Merge in trunk.
[official-gcc.git] / gcc / config / avr / avr.c
blob2edc78ac041e474c4f163fed9b376fc2a26d93c2
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998-2014 Free Software Foundation, Inc.
3 Contributed by Denis Chertykov (chertykov@gmail.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "regs.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-attr.h"
31 #include "insn-codes.h"
32 #include "flags.h"
33 #include "reload.h"
34 #include "tree.h"
35 #include "print-tree.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "stringpool.h"
39 #include "output.h"
40 #include "expr.h"
41 #include "c-family/c-common.h"
42 #include "diagnostic-core.h"
43 #include "obstack.h"
44 #include "function.h"
45 #include "recog.h"
46 #include "optabs.h"
47 #include "ggc.h"
48 #include "langhooks.h"
49 #include "tm_p.h"
50 #include "target.h"
51 #include "target-def.h"
52 #include "params.h"
53 #include "df.h"
55 /* Maximal allowed offset for an address in the LD command */
56 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
58 /* Return true if STR starts with PREFIX and false, otherwise. */
59 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
61 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
62 address space where data is to be located.
63 As the only non-generic address spaces are all located in flash,
64 this can be used to test if data shall go into some .progmem* section.
65 This must be the rightmost field of machine dependent section flags. */
66 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
68 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
69 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
71 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
72 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
73 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
74 do { \
75 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
76 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
77 } while (0)
79 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
80 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
81 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
82 / SYMBOL_FLAG_MACH_DEP)
84 /* Known address spaces. The order must be the same as in the respective
85 enum from avr.h (or designated initialized must be used). */
86 const avr_addrspace_t avr_addrspace[ADDR_SPACE_COUNT] =
88 { ADDR_SPACE_RAM, 0, 2, "", 0, NULL },
89 { ADDR_SPACE_FLASH, 1, 2, "__flash", 0, ".progmem.data" },
90 { ADDR_SPACE_FLASH1, 1, 2, "__flash1", 1, ".progmem1.data" },
91 { ADDR_SPACE_FLASH2, 1, 2, "__flash2", 2, ".progmem2.data" },
92 { ADDR_SPACE_FLASH3, 1, 2, "__flash3", 3, ".progmem3.data" },
93 { ADDR_SPACE_FLASH4, 1, 2, "__flash4", 4, ".progmem4.data" },
94 { ADDR_SPACE_FLASH5, 1, 2, "__flash5", 5, ".progmem5.data" },
95 { ADDR_SPACE_MEMX, 1, 3, "__memx", 0, ".progmemx.data" },
99 /* Holding RAM addresses of some SFRs used by the compiler and that
100 are unique over all devices in an architecture like 'avr4'. */
102 typedef struct
104 /* SREG: The processor status */
105 int sreg;
107 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
108 int ccp;
109 int rampd;
110 int rampx;
111 int rampy;
113 /* RAMPZ: The high byte of 24-bit address used with ELPM */
114 int rampz;
116 /* SP: The stack pointer and its low and high byte */
117 int sp_l;
118 int sp_h;
119 } avr_addr_t;
121 static avr_addr_t avr_addr;
124 /* Prototypes for local helper functions. */
126 static const char* out_movqi_r_mr (rtx, rtx[], int*);
127 static const char* out_movhi_r_mr (rtx, rtx[], int*);
128 static const char* out_movsi_r_mr (rtx, rtx[], int*);
129 static const char* out_movqi_mr_r (rtx, rtx[], int*);
130 static const char* out_movhi_mr_r (rtx, rtx[], int*);
131 static const char* out_movsi_mr_r (rtx, rtx[], int*);
133 static int get_sequence_length (rtx insns);
134 static int sequent_regs_live (void);
135 static const char *ptrreg_to_str (int);
136 static const char *cond_string (enum rtx_code);
137 static int avr_num_arg_regs (enum machine_mode, const_tree);
138 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
139 int, bool);
140 static void output_reload_in_const (rtx*, rtx, int*, bool);
141 static struct machine_function * avr_init_machine_status (void);
144 /* Prototypes for hook implementors if needed before their implementation. */
146 static bool avr_rtx_costs (rtx, int, int, int, int*, bool);
149 /* Allocate registers from r25 to r8 for parameters for function calls. */
150 #define FIRST_CUM_REG 26
152 /* Implicit target register of LPM instruction (R0) */
153 extern GTY(()) rtx lpm_reg_rtx;
154 rtx lpm_reg_rtx;
156 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
157 extern GTY(()) rtx lpm_addr_reg_rtx;
158 rtx lpm_addr_reg_rtx;
160 /* Temporary register RTX (reg:QI TMP_REGNO) */
161 extern GTY(()) rtx tmp_reg_rtx;
162 rtx tmp_reg_rtx;
164 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
165 extern GTY(()) rtx zero_reg_rtx;
166 rtx zero_reg_rtx;
168 /* RTXs for all general purpose registers as QImode */
169 extern GTY(()) rtx all_regs_rtx[32];
170 rtx all_regs_rtx[32];
172 /* SREG, the processor status */
173 extern GTY(()) rtx sreg_rtx;
174 rtx sreg_rtx;
176 /* RAMP* special function registers */
177 extern GTY(()) rtx rampd_rtx;
178 extern GTY(()) rtx rampx_rtx;
179 extern GTY(()) rtx rampy_rtx;
180 extern GTY(()) rtx rampz_rtx;
181 rtx rampd_rtx;
182 rtx rampx_rtx;
183 rtx rampy_rtx;
184 rtx rampz_rtx;
186 /* RTX containing the strings "" and "e", respectively */
187 static GTY(()) rtx xstring_empty;
188 static GTY(()) rtx xstring_e;
190 /* Current architecture. */
191 const avr_arch_t *avr_current_arch;
193 /* Current device. */
194 const avr_mcu_t *avr_current_device;
196 /* Section to put switch tables in. */
197 static GTY(()) section *progmem_swtable_section;
199 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
200 or to address space __flash* or __memx. Only used as singletons inside
201 avr_asm_select_section, but it must not be local there because of GTY. */
202 static GTY(()) section *progmem_section[ADDR_SPACE_COUNT];
204 /* Condition for insns/expanders from avr-dimode.md. */
205 bool avr_have_dimode = true;
207 /* To track if code will use .bss and/or .data. */
208 bool avr_need_clear_bss_p = false;
209 bool avr_need_copy_data_p = false;
212 /* Transform UP into lowercase and write the result to LO.
213 You must provide enough space for LO. Return LO. */
215 static char*
216 avr_tolower (char *lo, const char *up)
218 char *lo0 = lo;
220 for (; *up; up++, lo++)
221 *lo = TOLOWER (*up);
223 *lo = '\0';
225 return lo0;
229 /* Custom function to count number of set bits. */
231 static inline int
232 avr_popcount (unsigned int val)
234 int pop = 0;
236 while (val)
238 val &= val-1;
239 pop++;
242 return pop;
246 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
247 Return true if the least significant N_BYTES bytes of XVAL all have a
248 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
249 of integers which contains an integer N iff bit N of POP_MASK is set. */
251 bool
252 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
254 int i;
256 enum machine_mode mode = GET_MODE (xval);
258 if (VOIDmode == mode)
259 mode = SImode;
261 for (i = 0; i < n_bytes; i++)
263 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
264 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
266 if (0 == (pop_mask & (1 << avr_popcount (val8))))
267 return false;
270 return true;
274 /* Access some RTX as INT_MODE. If X is a CONST_FIXED we can get
275 the bit representation of X by "casting" it to CONST_INT. */
278 avr_to_int_mode (rtx x)
280 enum machine_mode mode = GET_MODE (x);
282 return VOIDmode == mode
284 : simplify_gen_subreg (int_mode_for_mode (mode), x, mode, 0);
288 /* Implement `TARGET_OPTION_OVERRIDE'. */
290 static void
291 avr_option_override (void)
293 /* Disable -fdelete-null-pointer-checks option for AVR target.
294 This option compiler assumes that dereferencing of a null pointer
295 would halt the program. For AVR this assumption is not true and
296 programs can safely dereference null pointers. Changes made by this
297 option may not work properly for AVR. So disable this option. */
299 flag_delete_null_pointer_checks = 0;
301 /* caller-save.c looks for call-clobbered hard registers that are assigned
302 to pseudos that cross calls and tries so save-restore them around calls
303 in order to reduce the number of stack slots needed.
305 This might lead to situations where reload is no more able to cope
306 with the challenge of AVR's very few address registers and fails to
307 perform the requested spills. */
309 if (avr_strict_X)
310 flag_caller_saves = 0;
312 /* Unwind tables currently require a frame pointer for correctness,
313 see toplev.c:process_options(). */
315 if ((flag_unwind_tables
316 || flag_non_call_exceptions
317 || flag_asynchronous_unwind_tables)
318 && !ACCUMULATE_OUTGOING_ARGS)
320 flag_omit_frame_pointer = 0;
323 if (flag_pic == 1)
324 warning (OPT_fpic, "-fpic is not supported");
325 if (flag_pic == 2)
326 warning (OPT_fPIC, "-fPIC is not supported");
327 if (flag_pie == 1)
328 warning (OPT_fpie, "-fpie is not supported");
329 if (flag_pie == 2)
330 warning (OPT_fPIE, "-fPIE is not supported");
332 avr_current_device = &avr_mcu_types[avr_mcu_index];
333 avr_current_arch = &avr_arch_types[avr_current_device->arch];
335 /* RAM addresses of some SFRs common to all devices in respective arch. */
337 /* SREG: Status Register containing flags like I (global IRQ) */
338 avr_addr.sreg = 0x3F + avr_current_arch->sfr_offset;
340 /* RAMPZ: Address' high part when loading via ELPM */
341 avr_addr.rampz = 0x3B + avr_current_arch->sfr_offset;
343 avr_addr.rampy = 0x3A + avr_current_arch->sfr_offset;
344 avr_addr.rampx = 0x39 + avr_current_arch->sfr_offset;
345 avr_addr.rampd = 0x38 + avr_current_arch->sfr_offset;
346 avr_addr.ccp = 0x34 + avr_current_arch->sfr_offset;
348 /* SP: Stack Pointer (SP_H:SP_L) */
349 avr_addr.sp_l = 0x3D + avr_current_arch->sfr_offset;
350 avr_addr.sp_h = avr_addr.sp_l + 1;
352 init_machine_status = avr_init_machine_status;
354 avr_log_set_avr_log();
357 /* Function to set up the backend function structure. */
359 static struct machine_function *
360 avr_init_machine_status (void)
362 return ggc_alloc_cleared_machine_function ();
366 /* Implement `INIT_EXPANDERS'. */
367 /* The function works like a singleton. */
369 void
370 avr_init_expanders (void)
372 int regno;
374 for (regno = 0; regno < 32; regno ++)
375 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
377 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
378 tmp_reg_rtx = all_regs_rtx[TMP_REGNO];
379 zero_reg_rtx = all_regs_rtx[ZERO_REGNO];
381 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
383 sreg_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.sreg));
384 rampd_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampd));
385 rampx_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampx));
386 rampy_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampy));
387 rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampz));
389 xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
390 xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
394 /* Implement `REGNO_REG_CLASS'. */
395 /* Return register class for register R. */
397 enum reg_class
398 avr_regno_reg_class (int r)
400 static const enum reg_class reg_class_tab[] =
402 R0_REG,
403 /* r1 - r15 */
404 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
405 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
406 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
407 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
408 /* r16 - r23 */
409 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
410 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
411 /* r24, r25 */
412 ADDW_REGS, ADDW_REGS,
413 /* X: r26, 27 */
414 POINTER_X_REGS, POINTER_X_REGS,
415 /* Y: r28, r29 */
416 POINTER_Y_REGS, POINTER_Y_REGS,
417 /* Z: r30, r31 */
418 POINTER_Z_REGS, POINTER_Z_REGS,
419 /* SP: SPL, SPH */
420 STACK_REG, STACK_REG
423 if (r <= 33)
424 return reg_class_tab[r];
426 return ALL_REGS;
430 /* Implement `TARGET_SCALAR_MODE_SUPPORTED_P'. */
432 static bool
433 avr_scalar_mode_supported_p (enum machine_mode mode)
435 if (ALL_FIXED_POINT_MODE_P (mode))
436 return true;
438 if (PSImode == mode)
439 return true;
441 return default_scalar_mode_supported_p (mode);
445 /* Return TRUE if DECL is a VAR_DECL located in flash and FALSE, otherwise. */
447 static bool
448 avr_decl_flash_p (tree decl)
450 if (TREE_CODE (decl) != VAR_DECL
451 || TREE_TYPE (decl) == error_mark_node)
453 return false;
456 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
460 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit flash
461 address space and FALSE, otherwise. */
463 static bool
464 avr_decl_memx_p (tree decl)
466 if (TREE_CODE (decl) != VAR_DECL
467 || TREE_TYPE (decl) == error_mark_node)
469 return false;
472 return (ADDR_SPACE_MEMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
476 /* Return TRUE if X is a MEM rtx located in flash and FALSE, otherwise. */
478 bool
479 avr_mem_flash_p (rtx x)
481 return (MEM_P (x)
482 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
486 /* Return TRUE if X is a MEM rtx located in the 24-bit flash
487 address space and FALSE, otherwise. */
489 bool
490 avr_mem_memx_p (rtx x)
492 return (MEM_P (x)
493 && ADDR_SPACE_MEMX == MEM_ADDR_SPACE (x));
497 /* A helper for the subsequent function attribute used to dig for
498 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
500 static inline int
501 avr_lookup_function_attribute1 (const_tree func, const char *name)
503 if (FUNCTION_DECL == TREE_CODE (func))
505 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
507 return true;
510 func = TREE_TYPE (func);
513 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
514 || TREE_CODE (func) == METHOD_TYPE);
516 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
519 /* Return nonzero if FUNC is a naked function. */
521 static int
522 avr_naked_function_p (tree func)
524 return avr_lookup_function_attribute1 (func, "naked");
527 /* Return nonzero if FUNC is an interrupt function as specified
528 by the "interrupt" attribute. */
530 static int
531 avr_interrupt_function_p (tree func)
533 return avr_lookup_function_attribute1 (func, "interrupt");
536 /* Return nonzero if FUNC is a signal function as specified
537 by the "signal" attribute. */
539 static int
540 avr_signal_function_p (tree func)
542 return avr_lookup_function_attribute1 (func, "signal");
545 /* Return nonzero if FUNC is an OS_task function. */
547 static int
548 avr_OS_task_function_p (tree func)
550 return avr_lookup_function_attribute1 (func, "OS_task");
553 /* Return nonzero if FUNC is an OS_main function. */
555 static int
556 avr_OS_main_function_p (tree func)
558 return avr_lookup_function_attribute1 (func, "OS_main");
562 /* Implement `TARGET_SET_CURRENT_FUNCTION'. */
563 /* Sanity cheching for above function attributes. */
565 static void
566 avr_set_current_function (tree decl)
568 location_t loc;
569 const char *isr;
571 if (decl == NULL_TREE
572 || current_function_decl == NULL_TREE
573 || current_function_decl == error_mark_node
574 || ! cfun->machine
575 || cfun->machine->attributes_checked_p)
576 return;
578 loc = DECL_SOURCE_LOCATION (decl);
580 cfun->machine->is_naked = avr_naked_function_p (decl);
581 cfun->machine->is_signal = avr_signal_function_p (decl);
582 cfun->machine->is_interrupt = avr_interrupt_function_p (decl);
583 cfun->machine->is_OS_task = avr_OS_task_function_p (decl);
584 cfun->machine->is_OS_main = avr_OS_main_function_p (decl);
586 isr = cfun->machine->is_interrupt ? "interrupt" : "signal";
588 /* Too much attributes make no sense as they request conflicting features. */
590 if (cfun->machine->is_OS_task + cfun->machine->is_OS_main
591 + (cfun->machine->is_signal || cfun->machine->is_interrupt) > 1)
592 error_at (loc, "function attributes %qs, %qs and %qs are mutually"
593 " exclusive", "OS_task", "OS_main", isr);
595 /* 'naked' will hide effects of 'OS_task' and 'OS_main'. */
597 if (cfun->machine->is_naked
598 && (cfun->machine->is_OS_task || cfun->machine->is_OS_main))
599 warning_at (loc, OPT_Wattributes, "function attributes %qs and %qs have"
600 " no effect on %qs function", "OS_task", "OS_main", "naked");
602 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
604 tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
605 tree ret = TREE_TYPE (TREE_TYPE (decl));
606 const char *name;
608 name = DECL_ASSEMBLER_NAME_SET_P (decl)
609 ? IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))
610 : IDENTIFIER_POINTER (DECL_NAME (decl));
612 /* Skip a leading '*' that might still prefix the assembler name,
613 e.g. in non-LTO runs. */
615 name = default_strip_name_encoding (name);
617 /* Silently ignore 'signal' if 'interrupt' is present. AVR-LibC startet
618 using this when it switched from SIGNAL and INTERRUPT to ISR. */
620 if (cfun->machine->is_interrupt)
621 cfun->machine->is_signal = 0;
623 /* Interrupt handlers must be void __vector (void) functions. */
625 if (args && TREE_CODE (TREE_VALUE (args)) != VOID_TYPE)
626 error_at (loc, "%qs function cannot have arguments", isr);
628 if (TREE_CODE (ret) != VOID_TYPE)
629 error_at (loc, "%qs function cannot return a value", isr);
631 /* If the function has the 'signal' or 'interrupt' attribute, ensure
632 that the name of the function is "__vector_NN" so as to catch
633 when the user misspells the vector name. */
635 if (!STR_PREFIX_P (name, "__vector"))
636 warning_at (loc, 0, "%qs appears to be a misspelled %s handler",
637 name, isr);
640 /* Don't print the above diagnostics more than once. */
642 cfun->machine->attributes_checked_p = 1;
646 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
649 avr_accumulate_outgoing_args (void)
651 if (!cfun)
652 return TARGET_ACCUMULATE_OUTGOING_ARGS;
654 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
655 what offset is correct. In some cases it is relative to
656 virtual_outgoing_args_rtx and in others it is relative to
657 virtual_stack_vars_rtx. For example code see
658 gcc.c-torture/execute/built-in-setjmp.c
659 gcc.c-torture/execute/builtins/sprintf-chk.c */
661 return (TARGET_ACCUMULATE_OUTGOING_ARGS
662 && !(cfun->calls_setjmp
663 || cfun->has_nonlocal_label));
667 /* Report contribution of accumulated outgoing arguments to stack size. */
669 static inline int
670 avr_outgoing_args_size (void)
672 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
676 /* Implement `STARTING_FRAME_OFFSET'. */
677 /* This is the offset from the frame pointer register to the first stack slot
678 that contains a variable living in the frame. */
681 avr_starting_frame_offset (void)
683 return 1 + avr_outgoing_args_size ();
687 /* Return the number of hard registers to push/pop in the prologue/epilogue
688 of the current function, and optionally store these registers in SET. */
690 static int
691 avr_regs_to_save (HARD_REG_SET *set)
693 int reg, count;
694 int int_or_sig_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
696 if (set)
697 CLEAR_HARD_REG_SET (*set);
698 count = 0;
700 /* No need to save any registers if the function never returns or
701 has the "OS_task" or "OS_main" attribute. */
703 if (TREE_THIS_VOLATILE (current_function_decl)
704 || cfun->machine->is_OS_task
705 || cfun->machine->is_OS_main)
706 return 0;
708 for (reg = 0; reg < 32; reg++)
710 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
711 any global register variables. */
713 if (fixed_regs[reg])
714 continue;
716 if ((int_or_sig_p && !crtl->is_leaf && call_used_regs[reg])
717 || (df_regs_ever_live_p (reg)
718 && (int_or_sig_p || !call_used_regs[reg])
719 /* Don't record frame pointer registers here. They are treated
720 indivitually in prologue. */
721 && !(frame_pointer_needed
722 && (reg == REG_Y || reg == (REG_Y+1)))))
724 if (set)
725 SET_HARD_REG_BIT (*set, reg);
726 count++;
729 return count;
733 /* Implement `TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS' */
735 static bool
736 avr_allocate_stack_slots_for_args (void)
738 return !cfun->machine->is_naked;
742 /* Return true if register FROM can be eliminated via register TO. */
744 static bool
745 avr_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
747 return ((frame_pointer_needed && to == FRAME_POINTER_REGNUM)
748 || !frame_pointer_needed);
752 /* Implement `TARGET_WARN_FUNC_RETURN'. */
754 static bool
755 avr_warn_func_return (tree decl)
757 /* Naked functions are implemented entirely in assembly, including the
758 return sequence, so suppress warnings about this. */
760 return !avr_naked_function_p (decl);
763 /* Compute offset between arg_pointer and frame_pointer. */
766 avr_initial_elimination_offset (int from, int to)
768 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
769 return 0;
770 else
772 int offset = frame_pointer_needed ? 2 : 0;
773 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
775 offset += avr_regs_to_save (NULL);
776 return (get_frame_size () + avr_outgoing_args_size()
777 + avr_pc_size + 1 + offset);
782 /* Helper for the function below. */
784 static void
785 avr_adjust_type_node (tree *node, enum machine_mode mode, int sat_p)
787 *node = make_node (FIXED_POINT_TYPE);
788 TYPE_SATURATING (*node) = sat_p;
789 TYPE_UNSIGNED (*node) = UNSIGNED_FIXED_POINT_MODE_P (mode);
790 TYPE_IBIT (*node) = GET_MODE_IBIT (mode);
791 TYPE_FBIT (*node) = GET_MODE_FBIT (mode);
792 TYPE_PRECISION (*node) = GET_MODE_BITSIZE (mode);
793 TYPE_ALIGN (*node) = 8;
794 SET_TYPE_MODE (*node, mode);
796 layout_type (*node);
800 /* Implement `TARGET_BUILD_BUILTIN_VA_LIST'. */
802 static tree
803 avr_build_builtin_va_list (void)
805 /* avr-modes.def adjusts [U]TA to be 64-bit modes with 48 fractional bits.
806 This is more appropriate for the 8-bit machine AVR than 128-bit modes.
807 The ADJUST_IBIT/FBIT are handled in toplev:init_adjust_machine_modes()
808 which is auto-generated by genmodes, but the compiler assigns [U]DAmode
809 to the long long accum modes instead of the desired [U]TAmode.
811 Fix this now, right after node setup in tree.c:build_common_tree_nodes().
812 This must run before c-cppbuiltin.c:builtin_define_fixed_point_constants()
813 which built-in defines macros like __ULLACCUM_FBIT__ that are used by
814 libgcc to detect IBIT and FBIT. */
816 avr_adjust_type_node (&ta_type_node, TAmode, 0);
817 avr_adjust_type_node (&uta_type_node, UTAmode, 0);
818 avr_adjust_type_node (&sat_ta_type_node, TAmode, 1);
819 avr_adjust_type_node (&sat_uta_type_node, UTAmode, 1);
821 unsigned_long_long_accum_type_node = uta_type_node;
822 long_long_accum_type_node = ta_type_node;
823 sat_unsigned_long_long_accum_type_node = sat_uta_type_node;
824 sat_long_long_accum_type_node = sat_ta_type_node;
826 /* Dispatch to the default handler. */
828 return std_build_builtin_va_list ();
832 /* Implement `TARGET_BUILTIN_SETJMP_FRAME_VALUE'. */
833 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
834 frame pointer by +STARTING_FRAME_OFFSET.
835 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
836 avoids creating add/sub of offset in nonlocal goto and setjmp. */
838 static rtx
839 avr_builtin_setjmp_frame_value (void)
841 rtx xval = gen_reg_rtx (Pmode);
842 emit_insn (gen_subhi3 (xval, virtual_stack_vars_rtx,
843 gen_int_mode (STARTING_FRAME_OFFSET, Pmode)));
844 return xval;
848 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3-byte PC).
849 This is return address of function. */
852 avr_return_addr_rtx (int count, rtx tem)
854 rtx r;
856 /* Can only return this function's return address. Others not supported. */
857 if (count)
858 return NULL;
860 if (AVR_3_BYTE_PC)
862 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
863 warning (0, "%<builtin_return_address%> contains only 2 bytes"
864 " of address");
866 else
867 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
869 r = gen_rtx_PLUS (Pmode, tem, r);
870 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
871 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
872 return r;
875 /* Return 1 if the function epilogue is just a single "ret". */
878 avr_simple_epilogue (void)
880 return (! frame_pointer_needed
881 && get_frame_size () == 0
882 && avr_outgoing_args_size() == 0
883 && avr_regs_to_save (NULL) == 0
884 && ! cfun->machine->is_interrupt
885 && ! cfun->machine->is_signal
886 && ! cfun->machine->is_naked
887 && ! TREE_THIS_VOLATILE (current_function_decl));
890 /* This function checks sequence of live registers. */
892 static int
893 sequent_regs_live (void)
895 int reg;
896 int live_seq = 0;
897 int cur_seq = 0;
899 for (reg = 0; reg < 18; ++reg)
901 if (fixed_regs[reg])
903 /* Don't recognize sequences that contain global register
904 variables. */
906 if (live_seq != 0)
907 return 0;
908 else
909 continue;
912 if (!call_used_regs[reg])
914 if (df_regs_ever_live_p (reg))
916 ++live_seq;
917 ++cur_seq;
919 else
920 cur_seq = 0;
924 if (!frame_pointer_needed)
926 if (df_regs_ever_live_p (REG_Y))
928 ++live_seq;
929 ++cur_seq;
931 else
932 cur_seq = 0;
934 if (df_regs_ever_live_p (REG_Y+1))
936 ++live_seq;
937 ++cur_seq;
939 else
940 cur_seq = 0;
942 else
944 cur_seq += 2;
945 live_seq += 2;
947 return (cur_seq == live_seq) ? live_seq : 0;
950 /* Obtain the length sequence of insns. */
953 get_sequence_length (rtx insns)
955 rtx insn;
956 int length;
958 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
959 length += get_attr_length (insn);
961 return length;
965 /* Implement `INCOMING_RETURN_ADDR_RTX'. */
968 avr_incoming_return_addr_rtx (void)
970 /* The return address is at the top of the stack. Note that the push
971 was via post-decrement, which means the actual address is off by one. */
972 return gen_frame_mem (HImode, plus_constant (Pmode, stack_pointer_rtx, 1));
975 /* Helper for expand_prologue. Emit a push of a byte register. */
977 static void
978 emit_push_byte (unsigned regno, bool frame_related_p)
980 rtx mem, reg, insn;
982 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
983 mem = gen_frame_mem (QImode, mem);
984 reg = gen_rtx_REG (QImode, regno);
986 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
987 if (frame_related_p)
988 RTX_FRAME_RELATED_P (insn) = 1;
990 cfun->machine->stack_usage++;
994 /* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
995 SFR is a MEM representing the memory location of the SFR.
996 If CLR_P then clear the SFR after the push using zero_reg. */
998 static void
999 emit_push_sfr (rtx sfr, bool frame_related_p, bool clr_p)
1001 rtx insn;
1003 gcc_assert (MEM_P (sfr));
1005 /* IN __tmp_reg__, IO(SFR) */
1006 insn = emit_move_insn (tmp_reg_rtx, sfr);
1007 if (frame_related_p)
1008 RTX_FRAME_RELATED_P (insn) = 1;
1010 /* PUSH __tmp_reg__ */
1011 emit_push_byte (TMP_REGNO, frame_related_p);
1013 if (clr_p)
1015 /* OUT IO(SFR), __zero_reg__ */
1016 insn = emit_move_insn (sfr, const0_rtx);
1017 if (frame_related_p)
1018 RTX_FRAME_RELATED_P (insn) = 1;
1022 static void
1023 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
1025 rtx insn;
1026 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1027 int live_seq = sequent_regs_live ();
1029 HOST_WIDE_INT size_max
1030 = (HOST_WIDE_INT) GET_MODE_MASK (AVR_HAVE_8BIT_SP ? QImode : Pmode);
1032 bool minimize = (TARGET_CALL_PROLOGUES
1033 && size < size_max
1034 && live_seq
1035 && !isr_p
1036 && !cfun->machine->is_OS_task
1037 && !cfun->machine->is_OS_main);
1039 if (minimize
1040 && (frame_pointer_needed
1041 || avr_outgoing_args_size() > 8
1042 || (AVR_2_BYTE_PC && live_seq > 6)
1043 || live_seq > 7))
1045 rtx pattern;
1046 int first_reg, reg, offset;
1048 emit_move_insn (gen_rtx_REG (HImode, REG_X),
1049 gen_int_mode (size, HImode));
1051 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
1052 gen_int_mode (live_seq+size, HImode));
1053 insn = emit_insn (pattern);
1054 RTX_FRAME_RELATED_P (insn) = 1;
1056 /* Describe the effect of the unspec_volatile call to prologue_saves.
1057 Note that this formulation assumes that add_reg_note pushes the
1058 notes to the front. Thus we build them in the reverse order of
1059 how we want dwarf2out to process them. */
1061 /* The function does always set frame_pointer_rtx, but whether that
1062 is going to be permanent in the function is frame_pointer_needed. */
1064 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1065 gen_rtx_SET (VOIDmode, (frame_pointer_needed
1066 ? frame_pointer_rtx
1067 : stack_pointer_rtx),
1068 plus_constant (Pmode, stack_pointer_rtx,
1069 -(size + live_seq))));
1071 /* Note that live_seq always contains r28+r29, but the other
1072 registers to be saved are all below 18. */
1074 first_reg = 18 - (live_seq - 2);
1076 for (reg = 29, offset = -live_seq + 1;
1077 reg >= first_reg;
1078 reg = (reg == 28 ? 17 : reg - 1), ++offset)
1080 rtx m, r;
1082 m = gen_rtx_MEM (QImode, plus_constant (Pmode, stack_pointer_rtx,
1083 offset));
1084 r = gen_rtx_REG (QImode, reg);
1085 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
1088 cfun->machine->stack_usage += size + live_seq;
1090 else /* !minimize */
1092 int reg;
1094 for (reg = 0; reg < 32; ++reg)
1095 if (TEST_HARD_REG_BIT (set, reg))
1096 emit_push_byte (reg, true);
1098 if (frame_pointer_needed
1099 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
1101 /* Push frame pointer. Always be consistent about the
1102 ordering of pushes -- epilogue_restores expects the
1103 register pair to be pushed low byte first. */
1105 emit_push_byte (REG_Y, true);
1106 emit_push_byte (REG_Y + 1, true);
1109 if (frame_pointer_needed
1110 && size == 0)
1112 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1113 RTX_FRAME_RELATED_P (insn) = 1;
1116 if (size != 0)
1118 /* Creating a frame can be done by direct manipulation of the
1119 stack or via the frame pointer. These two methods are:
1120 fp = sp
1121 fp -= size
1122 sp = fp
1124 sp -= size
1125 fp = sp (*)
1126 the optimum method depends on function type, stack and
1127 frame size. To avoid a complex logic, both methods are
1128 tested and shortest is selected.
1130 There is also the case where SIZE != 0 and no frame pointer is
1131 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1132 In that case, insn (*) is not needed in that case.
1133 We use the X register as scratch. This is save because in X
1134 is call-clobbered.
1135 In an interrupt routine, the case of SIZE != 0 together with
1136 !frame_pointer_needed can only occur if the function is not a
1137 leaf function and thus X has already been saved. */
1139 int irq_state = -1;
1140 HOST_WIDE_INT size_cfa = size, neg_size;
1141 rtx fp_plus_insns, fp, my_fp;
1143 gcc_assert (frame_pointer_needed
1144 || !isr_p
1145 || !crtl->is_leaf);
1147 fp = my_fp = (frame_pointer_needed
1148 ? frame_pointer_rtx
1149 : gen_rtx_REG (Pmode, REG_X));
1151 if (AVR_HAVE_8BIT_SP)
1153 /* The high byte (r29) does not change:
1154 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
1156 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1159 /* Cut down size and avoid size = 0 so that we don't run
1160 into ICE like PR52488 in the remainder. */
1162 if (size > size_max)
1164 /* Don't error so that insane code from newlib still compiles
1165 and does not break building newlib. As PR51345 is implemented
1166 now, there are multilib variants with -msp8.
1168 If user wants sanity checks he can use -Wstack-usage=
1169 or similar options.
1171 For CFA we emit the original, non-saturated size so that
1172 the generic machinery is aware of the real stack usage and
1173 will print the above diagnostic as expected. */
1175 size = size_max;
1178 size = trunc_int_for_mode (size, GET_MODE (my_fp));
1179 neg_size = trunc_int_for_mode (-size, GET_MODE (my_fp));
1181 /************ Method 1: Adjust frame pointer ************/
1183 start_sequence ();
1185 /* Normally, the dwarf2out frame-related-expr interpreter does
1186 not expect to have the CFA change once the frame pointer is
1187 set up. Thus, we avoid marking the move insn below and
1188 instead indicate that the entire operation is complete after
1189 the frame pointer subtraction is done. */
1191 insn = emit_move_insn (fp, stack_pointer_rtx);
1192 if (frame_pointer_needed)
1194 RTX_FRAME_RELATED_P (insn) = 1;
1195 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1196 gen_rtx_SET (VOIDmode, fp, stack_pointer_rtx));
1199 insn = emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp),
1200 my_fp, neg_size));
1202 if (frame_pointer_needed)
1204 RTX_FRAME_RELATED_P (insn) = 1;
1205 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1206 gen_rtx_SET (VOIDmode, fp,
1207 plus_constant (Pmode, fp,
1208 -size_cfa)));
1211 /* Copy to stack pointer. Note that since we've already
1212 changed the CFA to the frame pointer this operation
1213 need not be annotated if frame pointer is needed.
1214 Always move through unspec, see PR50063.
1215 For meaning of irq_state see movhi_sp_r insn. */
1217 if (cfun->machine->is_interrupt)
1218 irq_state = 1;
1220 if (TARGET_NO_INTERRUPTS
1221 || cfun->machine->is_signal
1222 || cfun->machine->is_OS_main)
1223 irq_state = 0;
1225 if (AVR_HAVE_8BIT_SP)
1226 irq_state = 2;
1228 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1229 fp, GEN_INT (irq_state)));
1230 if (!frame_pointer_needed)
1232 RTX_FRAME_RELATED_P (insn) = 1;
1233 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1234 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1235 plus_constant (Pmode,
1236 stack_pointer_rtx,
1237 -size_cfa)));
1240 fp_plus_insns = get_insns ();
1241 end_sequence ();
1243 /************ Method 2: Adjust Stack pointer ************/
1245 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1246 can only handle specific offsets. */
1248 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1250 rtx sp_plus_insns;
1252 start_sequence ();
1254 insn = emit_move_insn (stack_pointer_rtx,
1255 plus_constant (Pmode, stack_pointer_rtx,
1256 -size));
1257 RTX_FRAME_RELATED_P (insn) = 1;
1258 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1259 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1260 plus_constant (Pmode,
1261 stack_pointer_rtx,
1262 -size_cfa)));
1263 if (frame_pointer_needed)
1265 insn = emit_move_insn (fp, stack_pointer_rtx);
1266 RTX_FRAME_RELATED_P (insn) = 1;
1269 sp_plus_insns = get_insns ();
1270 end_sequence ();
1272 /************ Use shortest method ************/
1274 emit_insn (get_sequence_length (sp_plus_insns)
1275 < get_sequence_length (fp_plus_insns)
1276 ? sp_plus_insns
1277 : fp_plus_insns);
1279 else
1281 emit_insn (fp_plus_insns);
1284 cfun->machine->stack_usage += size_cfa;
1285 } /* !minimize && size != 0 */
1286 } /* !minimize */
1290 /* Output function prologue. */
1292 void
1293 avr_expand_prologue (void)
1295 HARD_REG_SET set;
1296 HOST_WIDE_INT size;
1298 size = get_frame_size() + avr_outgoing_args_size();
1300 cfun->machine->stack_usage = 0;
1302 /* Prologue: naked. */
1303 if (cfun->machine->is_naked)
1305 return;
1308 avr_regs_to_save (&set);
1310 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1312 /* Enable interrupts. */
1313 if (cfun->machine->is_interrupt)
1314 emit_insn (gen_enable_interrupt ());
1316 /* Push zero reg. */
1317 emit_push_byte (ZERO_REGNO, true);
1319 /* Push tmp reg. */
1320 emit_push_byte (TMP_REGNO, true);
1322 /* Push SREG. */
1323 /* ??? There's no dwarf2 column reserved for SREG. */
1324 emit_push_sfr (sreg_rtx, false, false /* clr */);
1326 /* Clear zero reg. */
1327 emit_move_insn (zero_reg_rtx, const0_rtx);
1329 /* Prevent any attempt to delete the setting of ZERO_REG! */
1330 emit_use (zero_reg_rtx);
1332 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1333 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1335 if (AVR_HAVE_RAMPD)
1336 emit_push_sfr (rampd_rtx, false /* frame-related */, true /* clr */);
1338 if (AVR_HAVE_RAMPX
1339 && TEST_HARD_REG_BIT (set, REG_X)
1340 && TEST_HARD_REG_BIT (set, REG_X + 1))
1342 emit_push_sfr (rampx_rtx, false /* frame-related */, true /* clr */);
1345 if (AVR_HAVE_RAMPY
1346 && (frame_pointer_needed
1347 || (TEST_HARD_REG_BIT (set, REG_Y)
1348 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1350 emit_push_sfr (rampy_rtx, false /* frame-related */, true /* clr */);
1353 if (AVR_HAVE_RAMPZ
1354 && TEST_HARD_REG_BIT (set, REG_Z)
1355 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1357 emit_push_sfr (rampz_rtx, false /* frame-related */, AVR_HAVE_RAMPD);
1359 } /* is_interrupt is_signal */
1361 avr_prologue_setup_frame (size, set);
1363 if (flag_stack_usage_info)
1364 current_function_static_stack_size = cfun->machine->stack_usage;
1368 /* Implement `TARGET_ASM_FUNCTION_END_PROLOGUE'. */
1369 /* Output summary at end of function prologue. */
1371 static void
1372 avr_asm_function_end_prologue (FILE *file)
1374 if (cfun->machine->is_naked)
1376 fputs ("/* prologue: naked */\n", file);
1378 else
1380 if (cfun->machine->is_interrupt)
1382 fputs ("/* prologue: Interrupt */\n", file);
1384 else if (cfun->machine->is_signal)
1386 fputs ("/* prologue: Signal */\n", file);
1388 else
1389 fputs ("/* prologue: function */\n", file);
1392 if (ACCUMULATE_OUTGOING_ARGS)
1393 fprintf (file, "/* outgoing args size = %d */\n",
1394 avr_outgoing_args_size());
1396 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1397 get_frame_size());
1398 fprintf (file, "/* stack size = %d */\n",
1399 cfun->machine->stack_usage);
1400 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1401 usage for offset so that SP + .L__stack_offset = return address. */
1402 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1406 /* Implement `EPILOGUE_USES'. */
1409 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1411 if (reload_completed
1412 && cfun->machine
1413 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1414 return 1;
1415 return 0;
1418 /* Helper for avr_expand_epilogue. Emit a pop of a byte register. */
1420 static void
1421 emit_pop_byte (unsigned regno)
1423 rtx mem, reg;
1425 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1426 mem = gen_frame_mem (QImode, mem);
1427 reg = gen_rtx_REG (QImode, regno);
1429 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1432 /* Output RTL epilogue. */
1434 void
1435 avr_expand_epilogue (bool sibcall_p)
1437 int reg;
1438 int live_seq;
1439 HARD_REG_SET set;
1440 int minimize;
1441 HOST_WIDE_INT size;
1442 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1444 size = get_frame_size() + avr_outgoing_args_size();
1446 /* epilogue: naked */
1447 if (cfun->machine->is_naked)
1449 gcc_assert (!sibcall_p);
1451 emit_jump_insn (gen_return ());
1452 return;
1455 avr_regs_to_save (&set);
1456 live_seq = sequent_regs_live ();
1458 minimize = (TARGET_CALL_PROLOGUES
1459 && live_seq
1460 && !isr_p
1461 && !cfun->machine->is_OS_task
1462 && !cfun->machine->is_OS_main);
1464 if (minimize
1465 && (live_seq > 4
1466 || frame_pointer_needed
1467 || size))
1469 /* Get rid of frame. */
1471 if (!frame_pointer_needed)
1473 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1476 if (size)
1478 emit_move_insn (frame_pointer_rtx,
1479 plus_constant (Pmode, frame_pointer_rtx, size));
1482 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1483 return;
1486 if (size)
1488 /* Try two methods to adjust stack and select shortest. */
1490 int irq_state = -1;
1491 rtx fp, my_fp;
1492 rtx fp_plus_insns;
1493 HOST_WIDE_INT size_max;
1495 gcc_assert (frame_pointer_needed
1496 || !isr_p
1497 || !crtl->is_leaf);
1499 fp = my_fp = (frame_pointer_needed
1500 ? frame_pointer_rtx
1501 : gen_rtx_REG (Pmode, REG_X));
1503 if (AVR_HAVE_8BIT_SP)
1505 /* The high byte (r29) does not change:
1506 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1508 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1511 /* For rationale see comment in prologue generation. */
1513 size_max = (HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (my_fp));
1514 if (size > size_max)
1515 size = size_max;
1516 size = trunc_int_for_mode (size, GET_MODE (my_fp));
1518 /********** Method 1: Adjust fp register **********/
1520 start_sequence ();
1522 if (!frame_pointer_needed)
1523 emit_move_insn (fp, stack_pointer_rtx);
1525 emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp), my_fp, size));
1527 /* Copy to stack pointer. */
1529 if (TARGET_NO_INTERRUPTS)
1530 irq_state = 0;
1532 if (AVR_HAVE_8BIT_SP)
1533 irq_state = 2;
1535 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp,
1536 GEN_INT (irq_state)));
1538 fp_plus_insns = get_insns ();
1539 end_sequence ();
1541 /********** Method 2: Adjust Stack pointer **********/
1543 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1545 rtx sp_plus_insns;
1547 start_sequence ();
1549 emit_move_insn (stack_pointer_rtx,
1550 plus_constant (Pmode, stack_pointer_rtx, size));
1552 sp_plus_insns = get_insns ();
1553 end_sequence ();
1555 /************ Use shortest method ************/
1557 emit_insn (get_sequence_length (sp_plus_insns)
1558 < get_sequence_length (fp_plus_insns)
1559 ? sp_plus_insns
1560 : fp_plus_insns);
1562 else
1563 emit_insn (fp_plus_insns);
1564 } /* size != 0 */
1566 if (frame_pointer_needed
1567 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1569 /* Restore previous frame_pointer. See avr_expand_prologue for
1570 rationale for not using pophi. */
1572 emit_pop_byte (REG_Y + 1);
1573 emit_pop_byte (REG_Y);
1576 /* Restore used registers. */
1578 for (reg = 31; reg >= 0; --reg)
1579 if (TEST_HARD_REG_BIT (set, reg))
1580 emit_pop_byte (reg);
1582 if (isr_p)
1584 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1585 The conditions to restore them must be tha same as in prologue. */
1587 if (AVR_HAVE_RAMPZ
1588 && TEST_HARD_REG_BIT (set, REG_Z)
1589 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1591 emit_pop_byte (TMP_REGNO);
1592 emit_move_insn (rampz_rtx, tmp_reg_rtx);
1595 if (AVR_HAVE_RAMPY
1596 && (frame_pointer_needed
1597 || (TEST_HARD_REG_BIT (set, REG_Y)
1598 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1600 emit_pop_byte (TMP_REGNO);
1601 emit_move_insn (rampy_rtx, tmp_reg_rtx);
1604 if (AVR_HAVE_RAMPX
1605 && TEST_HARD_REG_BIT (set, REG_X)
1606 && TEST_HARD_REG_BIT (set, REG_X + 1))
1608 emit_pop_byte (TMP_REGNO);
1609 emit_move_insn (rampx_rtx, tmp_reg_rtx);
1612 if (AVR_HAVE_RAMPD)
1614 emit_pop_byte (TMP_REGNO);
1615 emit_move_insn (rampd_rtx, tmp_reg_rtx);
1618 /* Restore SREG using tmp_reg as scratch. */
1620 emit_pop_byte (TMP_REGNO);
1621 emit_move_insn (sreg_rtx, tmp_reg_rtx);
1623 /* Restore tmp REG. */
1624 emit_pop_byte (TMP_REGNO);
1626 /* Restore zero REG. */
1627 emit_pop_byte (ZERO_REGNO);
1630 if (!sibcall_p)
1631 emit_jump_insn (gen_return ());
1635 /* Implement `TARGET_ASM_FUNCTION_BEGIN_EPILOGUE'. */
1637 static void
1638 avr_asm_function_begin_epilogue (FILE *file)
1640 fprintf (file, "/* epilogue start */\n");
1644 /* Implement `TARGET_CANNOT_MODITY_JUMPS_P'. */
1646 static bool
1647 avr_cannot_modify_jumps_p (void)
1650 /* Naked Functions must not have any instructions after
1651 their epilogue, see PR42240 */
1653 if (reload_completed
1654 && cfun->machine
1655 && cfun->machine->is_naked)
1657 return true;
1660 return false;
1664 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
1666 static bool
1667 avr_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED, addr_space_t as)
1669 /* FIXME: Non-generic addresses are not mode-dependent in themselves.
1670 This hook just serves to hack around PR rtl-optimization/52543 by
1671 claiming that non-generic addresses were mode-dependent so that
1672 lower-subreg.c will skip these addresses. lower-subreg.c sets up fake
1673 RTXes to probe SET and MEM costs and assumes that MEM is always in the
1674 generic address space which is not true. */
1676 return !ADDR_SPACE_GENERIC_P (as);
1680 /* Helper function for `avr_legitimate_address_p'. */
1682 static inline bool
1683 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
1684 RTX_CODE outer_code, bool strict)
1686 return (REG_P (reg)
1687 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1688 as, outer_code, UNKNOWN)
1689 || (!strict
1690 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1694 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1695 machine for a memory operand of mode MODE. */
1697 static bool
1698 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1700 bool ok = CONSTANT_ADDRESS_P (x);
1702 switch (GET_CODE (x))
1704 case REG:
1705 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1706 MEM, strict);
1708 if (strict
1709 && GET_MODE_SIZE (mode) > 4
1710 && REG_X == REGNO (x))
1712 ok = false;
1714 break;
1716 case POST_INC:
1717 case PRE_DEC:
1718 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1719 GET_CODE (x), strict);
1720 break;
1722 case PLUS:
1724 rtx reg = XEXP (x, 0);
1725 rtx op1 = XEXP (x, 1);
1727 if (REG_P (reg)
1728 && CONST_INT_P (op1)
1729 && INTVAL (op1) >= 0)
1731 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1733 if (fit)
1735 ok = (! strict
1736 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1737 PLUS, strict));
1739 if (reg == frame_pointer_rtx
1740 || reg == arg_pointer_rtx)
1742 ok = true;
1745 else if (frame_pointer_needed
1746 && reg == frame_pointer_rtx)
1748 ok = true;
1752 break;
1754 default:
1755 break;
1758 if (avr_log.legitimate_address_p)
1760 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1761 "reload_completed=%d reload_in_progress=%d %s:",
1762 ok, mode, strict, reload_completed, reload_in_progress,
1763 reg_renumber ? "(reg_renumber)" : "");
1765 if (GET_CODE (x) == PLUS
1766 && REG_P (XEXP (x, 0))
1767 && CONST_INT_P (XEXP (x, 1))
1768 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1769 && reg_renumber)
1771 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1772 true_regnum (XEXP (x, 0)));
1775 avr_edump ("\n%r\n", x);
1778 return ok;
1782 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1783 now only a helper for avr_addr_space_legitimize_address. */
1784 /* Attempts to replace X with a valid
1785 memory address for an operand of mode MODE */
1787 static rtx
1788 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1790 bool big_offset_p = false;
1792 x = oldx;
1794 if (GET_CODE (oldx) == PLUS
1795 && REG_P (XEXP (oldx, 0)))
1797 if (REG_P (XEXP (oldx, 1)))
1798 x = force_reg (GET_MODE (oldx), oldx);
1799 else if (CONST_INT_P (XEXP (oldx, 1)))
1801 int offs = INTVAL (XEXP (oldx, 1));
1802 if (frame_pointer_rtx != XEXP (oldx, 0)
1803 && offs > MAX_LD_OFFSET (mode))
1805 big_offset_p = true;
1806 x = force_reg (GET_MODE (oldx), oldx);
1811 if (avr_log.legitimize_address)
1813 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1815 if (x != oldx)
1816 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1819 return x;
1823 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1824 /* This will allow register R26/27 to be used where it is no worse than normal
1825 base pointers R28/29 or R30/31. For example, if base offset is greater
1826 than 63 bytes or for R++ or --R addressing. */
1829 avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
1830 int opnum, int type, int addr_type,
1831 int ind_levels ATTRIBUTE_UNUSED,
1832 rtx (*mk_memloc)(rtx,int))
1834 rtx x = *px;
1836 if (avr_log.legitimize_reload_address)
1837 avr_edump ("\n%?:%m %r\n", mode, x);
1839 if (1 && (GET_CODE (x) == POST_INC
1840 || GET_CODE (x) == PRE_DEC))
1842 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1843 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1844 opnum, RELOAD_OTHER);
1846 if (avr_log.legitimize_reload_address)
1847 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1848 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1850 return x;
1853 if (GET_CODE (x) == PLUS
1854 && REG_P (XEXP (x, 0))
1855 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1856 && CONST_INT_P (XEXP (x, 1))
1857 && INTVAL (XEXP (x, 1)) >= 1)
1859 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1861 if (fit)
1863 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1865 int regno = REGNO (XEXP (x, 0));
1866 rtx mem = mk_memloc (x, regno);
1868 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1869 POINTER_REGS, Pmode, VOIDmode, 0, 0,
1870 1, (enum reload_type) addr_type);
1872 if (avr_log.legitimize_reload_address)
1873 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1874 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1876 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1877 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1878 opnum, (enum reload_type) type);
1880 if (avr_log.legitimize_reload_address)
1881 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1882 BASE_POINTER_REGS, mem, NULL_RTX);
1884 return x;
1887 else if (! (frame_pointer_needed
1888 && XEXP (x, 0) == frame_pointer_rtx))
1890 push_reload (x, NULL_RTX, px, NULL,
1891 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1892 opnum, (enum reload_type) type);
1894 if (avr_log.legitimize_reload_address)
1895 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1896 POINTER_REGS, x, NULL_RTX);
1898 return x;
1902 return NULL_RTX;
1906 /* Implement `TARGET_SECONDARY_RELOAD' */
1908 static reg_class_t
1909 avr_secondary_reload (bool in_p, rtx x,
1910 reg_class_t reload_class ATTRIBUTE_UNUSED,
1911 enum machine_mode mode, secondary_reload_info *sri)
1913 if (in_p
1914 && MEM_P (x)
1915 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1916 && ADDR_SPACE_MEMX != MEM_ADDR_SPACE (x))
1918 /* For the non-generic 16-bit spaces we need a d-class scratch. */
1920 switch (mode)
1922 default:
1923 gcc_unreachable();
1925 case QImode: sri->icode = CODE_FOR_reload_inqi; break;
1926 case QQmode: sri->icode = CODE_FOR_reload_inqq; break;
1927 case UQQmode: sri->icode = CODE_FOR_reload_inuqq; break;
1929 case HImode: sri->icode = CODE_FOR_reload_inhi; break;
1930 case HQmode: sri->icode = CODE_FOR_reload_inhq; break;
1931 case HAmode: sri->icode = CODE_FOR_reload_inha; break;
1932 case UHQmode: sri->icode = CODE_FOR_reload_inuhq; break;
1933 case UHAmode: sri->icode = CODE_FOR_reload_inuha; break;
1935 case PSImode: sri->icode = CODE_FOR_reload_inpsi; break;
1937 case SImode: sri->icode = CODE_FOR_reload_insi; break;
1938 case SFmode: sri->icode = CODE_FOR_reload_insf; break;
1939 case SQmode: sri->icode = CODE_FOR_reload_insq; break;
1940 case SAmode: sri->icode = CODE_FOR_reload_insa; break;
1941 case USQmode: sri->icode = CODE_FOR_reload_inusq; break;
1942 case USAmode: sri->icode = CODE_FOR_reload_inusa; break;
1946 return NO_REGS;
1950 /* Helper function to print assembler resp. track instruction
1951 sequence lengths. Always return "".
1953 If PLEN == NULL:
1954 Output assembler code from template TPL with operands supplied
1955 by OPERANDS. This is just forwarding to output_asm_insn.
1957 If PLEN != NULL:
1958 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1959 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1960 Don't output anything.
1963 static const char*
1964 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1966 if (NULL == plen)
1968 output_asm_insn (tpl, operands);
1970 else
1972 if (n_words < 0)
1973 *plen = -n_words;
1974 else
1975 *plen += n_words;
1978 return "";
1982 /* Return a pointer register name as a string. */
1984 static const char*
1985 ptrreg_to_str (int regno)
1987 switch (regno)
1989 case REG_X: return "X";
1990 case REG_Y: return "Y";
1991 case REG_Z: return "Z";
1992 default:
1993 output_operand_lossage ("address operand requires constraint for"
1994 " X, Y, or Z register");
1996 return NULL;
1999 /* Return the condition name as a string.
2000 Used in conditional jump constructing */
2002 static const char*
2003 cond_string (enum rtx_code code)
2005 switch (code)
2007 case NE:
2008 return "ne";
2009 case EQ:
2010 return "eq";
2011 case GE:
2012 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2013 return "pl";
2014 else
2015 return "ge";
2016 case LT:
2017 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2018 return "mi";
2019 else
2020 return "lt";
2021 case GEU:
2022 return "sh";
2023 case LTU:
2024 return "lo";
2025 default:
2026 gcc_unreachable ();
2029 return "";
2033 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
2034 /* Output ADDR to FILE as address. */
2036 static void
2037 avr_print_operand_address (FILE *file, rtx addr)
2039 switch (GET_CODE (addr))
2041 case REG:
2042 fprintf (file, ptrreg_to_str (REGNO (addr)));
2043 break;
2045 case PRE_DEC:
2046 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2047 break;
2049 case POST_INC:
2050 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2051 break;
2053 default:
2054 if (CONSTANT_ADDRESS_P (addr)
2055 && text_segment_operand (addr, VOIDmode))
2057 rtx x = addr;
2058 if (GET_CODE (x) == CONST)
2059 x = XEXP (x, 0);
2060 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
2062 /* Assembler gs() will implant word address. Make offset
2063 a byte offset inside gs() for assembler. This is
2064 needed because the more logical (constant+gs(sym)) is not
2065 accepted by gas. For 128K and smaller devices this is ok.
2066 For large devices it will create a trampoline to offset
2067 from symbol which may not be what the user really wanted. */
2069 fprintf (file, "gs(");
2070 output_addr_const (file, XEXP (x,0));
2071 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
2072 2 * INTVAL (XEXP (x, 1)));
2073 if (AVR_3_BYTE_PC)
2074 if (warning (0, "pointer offset from symbol maybe incorrect"))
2076 output_addr_const (stderr, addr);
2077 fprintf(stderr,"\n");
2080 else
2082 fprintf (file, "gs(");
2083 output_addr_const (file, addr);
2084 fprintf (file, ")");
2087 else
2088 output_addr_const (file, addr);
2093 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
2095 static bool
2096 avr_print_operand_punct_valid_p (unsigned char code)
2098 return code == '~' || code == '!';
2102 /* Implement `TARGET_PRINT_OPERAND'. */
2103 /* Output X as assembler operand to file FILE.
2104 For a description of supported %-codes, see top of avr.md. */
2106 static void
2107 avr_print_operand (FILE *file, rtx x, int code)
2109 int abcd = 0;
2111 if (code >= 'A' && code <= 'D')
2112 abcd = code - 'A';
2114 if (code == '~')
2116 if (!AVR_HAVE_JMP_CALL)
2117 fputc ('r', file);
2119 else if (code == '!')
2121 if (AVR_HAVE_EIJMP_EICALL)
2122 fputc ('e', file);
2124 else if (code == 't'
2125 || code == 'T')
2127 static int t_regno = -1;
2128 static int t_nbits = -1;
2130 if (REG_P (x) && t_regno < 0 && code == 'T')
2132 t_regno = REGNO (x);
2133 t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
2135 else if (CONST_INT_P (x) && t_regno >= 0
2136 && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
2138 int bpos = INTVAL (x);
2140 fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
2141 if (code == 'T')
2142 fprintf (file, ",%d", bpos % 8);
2144 t_regno = -1;
2146 else
2147 fatal_insn ("operands to %T/%t must be reg + const_int:", x);
2149 else if (REG_P (x))
2151 if (x == zero_reg_rtx)
2152 fprintf (file, "__zero_reg__");
2153 else if (code == 'r' && REGNO (x) < 32)
2154 fprintf (file, "%d", (int) REGNO (x));
2155 else
2156 fprintf (file, reg_names[REGNO (x) + abcd]);
2158 else if (CONST_INT_P (x))
2160 HOST_WIDE_INT ival = INTVAL (x);
2162 if ('i' != code)
2163 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
2164 else if (low_io_address_operand (x, VOIDmode)
2165 || high_io_address_operand (x, VOIDmode))
2167 if (AVR_HAVE_RAMPZ && ival == avr_addr.rampz)
2168 fprintf (file, "__RAMPZ__");
2169 else if (AVR_HAVE_RAMPY && ival == avr_addr.rampy)
2170 fprintf (file, "__RAMPY__");
2171 else if (AVR_HAVE_RAMPX && ival == avr_addr.rampx)
2172 fprintf (file, "__RAMPX__");
2173 else if (AVR_HAVE_RAMPD && ival == avr_addr.rampd)
2174 fprintf (file, "__RAMPD__");
2175 else if (AVR_XMEGA && ival == avr_addr.ccp)
2176 fprintf (file, "__CCP__");
2177 else if (ival == avr_addr.sreg) fprintf (file, "__SREG__");
2178 else if (ival == avr_addr.sp_l) fprintf (file, "__SP_L__");
2179 else if (ival == avr_addr.sp_h) fprintf (file, "__SP_H__");
2180 else
2182 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2183 ival - avr_current_arch->sfr_offset);
2186 else
2187 fatal_insn ("bad address, not an I/O address:", x);
2189 else if (MEM_P (x))
2191 rtx addr = XEXP (x, 0);
2193 if (code == 'm')
2195 if (!CONSTANT_P (addr))
2196 fatal_insn ("bad address, not a constant:", addr);
2197 /* Assembler template with m-code is data - not progmem section */
2198 if (text_segment_operand (addr, VOIDmode))
2199 if (warning (0, "accessing data memory with"
2200 " program memory address"))
2202 output_addr_const (stderr, addr);
2203 fprintf(stderr,"\n");
2205 output_addr_const (file, addr);
2207 else if (code == 'i')
2209 avr_print_operand (file, addr, 'i');
2211 else if (code == 'o')
2213 if (GET_CODE (addr) != PLUS)
2214 fatal_insn ("bad address, not (reg+disp):", addr);
2216 avr_print_operand (file, XEXP (addr, 1), 0);
2218 else if (code == 'p' || code == 'r')
2220 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
2221 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
2223 if (code == 'p')
2224 avr_print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
2225 else
2226 avr_print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
2228 else if (GET_CODE (addr) == PLUS)
2230 avr_print_operand_address (file, XEXP (addr,0));
2231 if (REGNO (XEXP (addr, 0)) == REG_X)
2232 fatal_insn ("internal compiler error. Bad address:"
2233 ,addr);
2234 fputc ('+', file);
2235 avr_print_operand (file, XEXP (addr,1), code);
2237 else
2238 avr_print_operand_address (file, addr);
2240 else if (code == 'i')
2242 fatal_insn ("bad address, not an I/O address:", x);
2244 else if (code == 'x')
2246 /* Constant progmem address - like used in jmp or call */
2247 if (0 == text_segment_operand (x, VOIDmode))
2248 if (warning (0, "accessing program memory"
2249 " with data memory address"))
2251 output_addr_const (stderr, x);
2252 fprintf(stderr,"\n");
2254 /* Use normal symbol for direct address no linker trampoline needed */
2255 output_addr_const (file, x);
2257 else if (CONST_FIXED_P (x))
2259 HOST_WIDE_INT ival = INTVAL (avr_to_int_mode (x));
2260 if (code != 0)
2261 output_operand_lossage ("Unsupported code '%c' for fixed-point:",
2262 code);
2263 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival);
2265 else if (GET_CODE (x) == CONST_DOUBLE)
2267 long val;
2268 REAL_VALUE_TYPE rv;
2269 if (GET_MODE (x) != SFmode)
2270 fatal_insn ("internal compiler error. Unknown mode:", x);
2271 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
2272 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
2273 fprintf (file, "0x%lx", val);
2275 else if (GET_CODE (x) == CONST_STRING)
2276 fputs (XSTR (x, 0), file);
2277 else if (code == 'j')
2278 fputs (cond_string (GET_CODE (x)), file);
2279 else if (code == 'k')
2280 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
2281 else
2282 avr_print_operand_address (file, x);
2286 /* Worker function for `NOTICE_UPDATE_CC'. */
2287 /* Update the condition code in the INSN. */
2289 void
2290 avr_notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
2292 rtx set;
2293 enum attr_cc cc = get_attr_cc (insn);
2295 switch (cc)
2297 default:
2298 break;
2300 case CC_PLUS:
2301 case CC_LDI:
2303 rtx *op = recog_data.operand;
2304 int len_dummy, icc;
2306 /* Extract insn's operands. */
2307 extract_constrain_insn_cached (insn);
2309 switch (cc)
2311 default:
2312 gcc_unreachable();
2314 case CC_PLUS:
2315 avr_out_plus (insn, op, &len_dummy, &icc);
2316 cc = (enum attr_cc) icc;
2317 break;
2319 case CC_LDI:
2321 cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
2322 && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
2323 /* Loading zero-reg with 0 uses CLR and thus clobbers cc0. */
2324 ? CC_CLOBBER
2325 /* Any other "r,rL" combination does not alter cc0. */
2326 : CC_NONE;
2328 break;
2329 } /* inner switch */
2331 break;
2333 } /* outer swicth */
2335 switch (cc)
2337 default:
2338 /* Special values like CC_OUT_PLUS from above have been
2339 mapped to "standard" CC_* values so we never come here. */
2341 gcc_unreachable();
2342 break;
2344 case CC_NONE:
2345 /* Insn does not affect CC at all. */
2346 break;
2348 case CC_SET_N:
2349 CC_STATUS_INIT;
2350 break;
2352 case CC_SET_ZN:
2353 set = single_set (insn);
2354 CC_STATUS_INIT;
2355 if (set)
2357 cc_status.flags |= CC_NO_OVERFLOW;
2358 cc_status.value1 = SET_DEST (set);
2360 break;
2362 case CC_SET_CZN:
2363 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2364 The V flag may or may not be known but that's ok because
2365 alter_cond will change tests to use EQ/NE. */
2366 set = single_set (insn);
2367 CC_STATUS_INIT;
2368 if (set)
2370 cc_status.value1 = SET_DEST (set);
2371 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
2373 break;
2375 case CC_COMPARE:
2376 set = single_set (insn);
2377 CC_STATUS_INIT;
2378 if (set)
2379 cc_status.value1 = SET_SRC (set);
2380 break;
2382 case CC_CLOBBER:
2383 /* Insn doesn't leave CC in a usable state. */
2384 CC_STATUS_INIT;
2385 break;
2389 /* Choose mode for jump insn:
2390 1 - relative jump in range -63 <= x <= 62 ;
2391 2 - relative jump in range -2046 <= x <= 2045 ;
2392 3 - absolute jump (only for ATmega[16]03). */
2395 avr_jump_mode (rtx x, rtx insn)
2397 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
2398 ? XEXP (x, 0) : x));
2399 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
2400 int jump_distance = cur_addr - dest_addr;
2402 if (-63 <= jump_distance && jump_distance <= 62)
2403 return 1;
2404 else if (-2046 <= jump_distance && jump_distance <= 2045)
2405 return 2;
2406 else if (AVR_HAVE_JMP_CALL)
2407 return 3;
2409 return 2;
2412 /* Return an AVR condition jump commands.
2413 X is a comparison RTX.
2414 LEN is a number returned by avr_jump_mode function.
2415 If REVERSE nonzero then condition code in X must be reversed. */
2417 const char*
2418 ret_cond_branch (rtx x, int len, int reverse)
2420 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2422 switch (cond)
2424 case GT:
2425 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2426 return (len == 1 ? ("breq .+2" CR_TAB
2427 "brpl %0") :
2428 len == 2 ? ("breq .+4" CR_TAB
2429 "brmi .+2" CR_TAB
2430 "rjmp %0") :
2431 ("breq .+6" CR_TAB
2432 "brmi .+4" CR_TAB
2433 "jmp %0"));
2435 else
2436 return (len == 1 ? ("breq .+2" CR_TAB
2437 "brge %0") :
2438 len == 2 ? ("breq .+4" CR_TAB
2439 "brlt .+2" CR_TAB
2440 "rjmp %0") :
2441 ("breq .+6" CR_TAB
2442 "brlt .+4" CR_TAB
2443 "jmp %0"));
2444 case GTU:
2445 return (len == 1 ? ("breq .+2" CR_TAB
2446 "brsh %0") :
2447 len == 2 ? ("breq .+4" CR_TAB
2448 "brlo .+2" CR_TAB
2449 "rjmp %0") :
2450 ("breq .+6" CR_TAB
2451 "brlo .+4" CR_TAB
2452 "jmp %0"));
2453 case LE:
2454 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2455 return (len == 1 ? ("breq %0" CR_TAB
2456 "brmi %0") :
2457 len == 2 ? ("breq .+2" CR_TAB
2458 "brpl .+2" CR_TAB
2459 "rjmp %0") :
2460 ("breq .+2" CR_TAB
2461 "brpl .+4" CR_TAB
2462 "jmp %0"));
2463 else
2464 return (len == 1 ? ("breq %0" CR_TAB
2465 "brlt %0") :
2466 len == 2 ? ("breq .+2" CR_TAB
2467 "brge .+2" CR_TAB
2468 "rjmp %0") :
2469 ("breq .+2" CR_TAB
2470 "brge .+4" CR_TAB
2471 "jmp %0"));
2472 case LEU:
2473 return (len == 1 ? ("breq %0" CR_TAB
2474 "brlo %0") :
2475 len == 2 ? ("breq .+2" CR_TAB
2476 "brsh .+2" CR_TAB
2477 "rjmp %0") :
2478 ("breq .+2" CR_TAB
2479 "brsh .+4" CR_TAB
2480 "jmp %0"));
2481 default:
2482 if (reverse)
2484 switch (len)
2486 case 1:
2487 return "br%k1 %0";
2488 case 2:
2489 return ("br%j1 .+2" CR_TAB
2490 "rjmp %0");
2491 default:
2492 return ("br%j1 .+4" CR_TAB
2493 "jmp %0");
2496 else
2498 switch (len)
2500 case 1:
2501 return "br%j1 %0";
2502 case 2:
2503 return ("br%k1 .+2" CR_TAB
2504 "rjmp %0");
2505 default:
2506 return ("br%k1 .+4" CR_TAB
2507 "jmp %0");
2511 return "";
2515 /* Worker function for `FINAL_PRESCAN_INSN'. */
2516 /* Output insn cost for next insn. */
2518 void
2519 avr_final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
2520 int num_operands ATTRIBUTE_UNUSED)
2522 if (avr_log.rtx_costs)
2524 rtx set = single_set (insn);
2526 if (set)
2527 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
2528 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
2529 else
2530 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
2531 rtx_cost (PATTERN (insn), INSN, 0,
2532 optimize_insn_for_speed_p()));
2536 /* Return 0 if undefined, 1 if always true or always false. */
2539 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
2541 unsigned int max = (mode == QImode ? 0xff :
2542 mode == HImode ? 0xffff :
2543 mode == PSImode ? 0xffffff :
2544 mode == SImode ? 0xffffffff : 0);
2545 if (max && op && CONST_INT_P (x))
2547 if (unsigned_condition (op) != op)
2548 max >>= 1;
2550 if (max != (INTVAL (x) & max)
2551 && INTVAL (x) != 0xff)
2552 return 1;
2554 return 0;
2558 /* Worker function for `FUNCTION_ARG_REGNO_P'. */
2559 /* Returns nonzero if REGNO is the number of a hard
2560 register in which function arguments are sometimes passed. */
2563 avr_function_arg_regno_p(int r)
2565 return (r >= 8 && r <= 25);
2569 /* Worker function for `INIT_CUMULATIVE_ARGS'. */
2570 /* Initializing the variable cum for the state at the beginning
2571 of the argument list. */
2573 void
2574 avr_init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2575 tree fndecl ATTRIBUTE_UNUSED)
2577 cum->nregs = 18;
2578 cum->regno = FIRST_CUM_REG;
2579 if (!libname && stdarg_p (fntype))
2580 cum->nregs = 0;
2582 /* Assume the calle may be tail called */
2584 cfun->machine->sibcall_fails = 0;
2587 /* Returns the number of registers to allocate for a function argument. */
2589 static int
2590 avr_num_arg_regs (enum machine_mode mode, const_tree type)
2592 int size;
2594 if (mode == BLKmode)
2595 size = int_size_in_bytes (type);
2596 else
2597 size = GET_MODE_SIZE (mode);
2599 /* Align all function arguments to start in even-numbered registers.
2600 Odd-sized arguments leave holes above them. */
2602 return (size + 1) & ~1;
2606 /* Implement `TARGET_FUNCTION_ARG'. */
2607 /* Controls whether a function argument is passed
2608 in a register, and which register. */
2610 static rtx
2611 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
2612 const_tree type, bool named ATTRIBUTE_UNUSED)
2614 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2615 int bytes = avr_num_arg_regs (mode, type);
2617 if (cum->nregs && bytes <= cum->nregs)
2618 return gen_rtx_REG (mode, cum->regno - bytes);
2620 return NULL_RTX;
2624 /* Implement `TARGET_FUNCTION_ARG_ADVANCE'. */
2625 /* Update the summarizer variable CUM to advance past an argument
2626 in the argument list. */
2628 static void
2629 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
2630 const_tree type, bool named ATTRIBUTE_UNUSED)
2632 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2633 int bytes = avr_num_arg_regs (mode, type);
2635 cum->nregs -= bytes;
2636 cum->regno -= bytes;
2638 /* A parameter is being passed in a call-saved register. As the original
2639 contents of these regs has to be restored before leaving the function,
2640 a function must not pass arguments in call-saved regs in order to get
2641 tail-called. */
2643 if (cum->regno >= 8
2644 && cum->nregs >= 0
2645 && !call_used_regs[cum->regno])
2647 /* FIXME: We ship info on failing tail-call in struct machine_function.
2648 This uses internals of calls.c:expand_call() and the way args_so_far
2649 is used. targetm.function_ok_for_sibcall() needs to be extended to
2650 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2651 dependent so that such an extension is not wanted. */
2653 cfun->machine->sibcall_fails = 1;
2656 /* Test if all registers needed by the ABI are actually available. If the
2657 user has fixed a GPR needed to pass an argument, an (implicit) function
2658 call will clobber that fixed register. See PR45099 for an example. */
2660 if (cum->regno >= 8
2661 && cum->nregs >= 0)
2663 int regno;
2665 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2666 if (fixed_regs[regno])
2667 warning (0, "fixed register %s used to pass parameter to function",
2668 reg_names[regno]);
2671 if (cum->nregs <= 0)
2673 cum->nregs = 0;
2674 cum->regno = FIRST_CUM_REG;
2678 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2679 /* Decide whether we can make a sibling call to a function. DECL is the
2680 declaration of the function being targeted by the call and EXP is the
2681 CALL_EXPR representing the call. */
2683 static bool
2684 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2686 tree fntype_callee;
2688 /* Tail-calling must fail if callee-saved regs are used to pass
2689 function args. We must not tail-call when `epilogue_restores'
2690 is used. Unfortunately, we cannot tell at this point if that
2691 actually will happen or not, and we cannot step back from
2692 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2694 if (cfun->machine->sibcall_fails
2695 || TARGET_CALL_PROLOGUES)
2697 return false;
2700 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2702 if (decl_callee)
2704 decl_callee = TREE_TYPE (decl_callee);
2706 else
2708 decl_callee = fntype_callee;
2710 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2711 && METHOD_TYPE != TREE_CODE (decl_callee))
2713 decl_callee = TREE_TYPE (decl_callee);
2717 /* Ensure that caller and callee have compatible epilogues */
2719 if (cfun->machine->is_interrupt
2720 || cfun->machine->is_signal
2721 || cfun->machine->is_naked
2722 || avr_naked_function_p (decl_callee)
2723 /* FIXME: For OS_task and OS_main, this might be over-conservative. */
2724 || (avr_OS_task_function_p (decl_callee)
2725 != cfun->machine->is_OS_task)
2726 || (avr_OS_main_function_p (decl_callee)
2727 != cfun->machine->is_OS_main))
2729 return false;
2732 return true;
2735 /***********************************************************************
2736 Functions for outputting various mov's for a various modes
2737 ************************************************************************/
2739 /* Return true if a value of mode MODE is read from flash by
2740 __load_* function from libgcc. */
2742 bool
2743 avr_load_libgcc_p (rtx op)
2745 enum machine_mode mode = GET_MODE (op);
2746 int n_bytes = GET_MODE_SIZE (mode);
2748 return (n_bytes > 2
2749 && !AVR_HAVE_LPMX
2750 && avr_mem_flash_p (op));
2753 /* Return true if a value of mode MODE is read by __xload_* function. */
2755 bool
2756 avr_xload_libgcc_p (enum machine_mode mode)
2758 int n_bytes = GET_MODE_SIZE (mode);
2760 return (n_bytes > 1
2761 || avr_current_device->n_flash > 1);
2765 /* Fixme: This is a hack because secondary reloads don't works as expected.
2767 Find an unused d-register to be used as scratch in INSN.
2768 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2769 is a register, skip all possible return values that overlap EXCLUDE.
2770 The policy for the returned register is similar to that of
2771 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2772 of INSN.
2774 Return a QImode d-register or NULL_RTX if nothing found. */
2776 static rtx
2777 avr_find_unused_d_reg (rtx insn, rtx exclude)
2779 int regno;
2780 bool isr_p = (avr_interrupt_function_p (current_function_decl)
2781 || avr_signal_function_p (current_function_decl));
2783 for (regno = 16; regno < 32; regno++)
2785 rtx reg = all_regs_rtx[regno];
2787 if ((exclude
2788 && reg_overlap_mentioned_p (exclude, reg))
2789 || fixed_regs[regno])
2791 continue;
2794 /* Try non-live register */
2796 if (!df_regs_ever_live_p (regno)
2797 && (TREE_THIS_VOLATILE (current_function_decl)
2798 || cfun->machine->is_OS_task
2799 || cfun->machine->is_OS_main
2800 || (!isr_p && call_used_regs[regno])))
2802 return reg;
2805 /* Any live register can be used if it is unused after.
2806 Prologue/epilogue will care for it as needed. */
2808 if (df_regs_ever_live_p (regno)
2809 && reg_unused_after (insn, reg))
2811 return reg;
2815 return NULL_RTX;
2819 /* Helper function for the next function in the case where only restricted
2820 version of LPM instruction is available. */
2822 static const char*
2823 avr_out_lpm_no_lpmx (rtx insn, rtx *xop, int *plen)
2825 rtx dest = xop[0];
2826 rtx addr = xop[1];
2827 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2828 int regno_dest;
2830 regno_dest = REGNO (dest);
2832 /* The implicit target register of LPM. */
2833 xop[3] = lpm_reg_rtx;
2835 switch (GET_CODE (addr))
2837 default:
2838 gcc_unreachable();
2840 case REG:
2842 gcc_assert (REG_Z == REGNO (addr));
2844 switch (n_bytes)
2846 default:
2847 gcc_unreachable();
2849 case 1:
2850 avr_asm_len ("%4lpm", xop, plen, 1);
2852 if (regno_dest != LPM_REGNO)
2853 avr_asm_len ("mov %0,%3", xop, plen, 1);
2855 return "";
2857 case 2:
2858 if (REGNO (dest) == REG_Z)
2859 return avr_asm_len ("%4lpm" CR_TAB
2860 "push %3" CR_TAB
2861 "adiw %2,1" CR_TAB
2862 "%4lpm" CR_TAB
2863 "mov %B0,%3" CR_TAB
2864 "pop %A0", xop, plen, 6);
2866 avr_asm_len ("%4lpm" CR_TAB
2867 "mov %A0,%3" CR_TAB
2868 "adiw %2,1" CR_TAB
2869 "%4lpm" CR_TAB
2870 "mov %B0,%3", xop, plen, 5);
2872 if (!reg_unused_after (insn, addr))
2873 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2875 break; /* 2 */
2878 break; /* REG */
2880 case POST_INC:
2882 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2883 && n_bytes <= 4);
2885 if (regno_dest == LPM_REGNO)
2886 avr_asm_len ("%4lpm" CR_TAB
2887 "adiw %2,1", xop, plen, 2);
2888 else
2889 avr_asm_len ("%4lpm" CR_TAB
2890 "mov %A0,%3" CR_TAB
2891 "adiw %2,1", xop, plen, 3);
2893 if (n_bytes >= 2)
2894 avr_asm_len ("%4lpm" CR_TAB
2895 "mov %B0,%3" CR_TAB
2896 "adiw %2,1", xop, plen, 3);
2898 if (n_bytes >= 3)
2899 avr_asm_len ("%4lpm" CR_TAB
2900 "mov %C0,%3" CR_TAB
2901 "adiw %2,1", xop, plen, 3);
2903 if (n_bytes >= 4)
2904 avr_asm_len ("%4lpm" CR_TAB
2905 "mov %D0,%3" CR_TAB
2906 "adiw %2,1", xop, plen, 3);
2908 break; /* POST_INC */
2910 } /* switch CODE (addr) */
2912 return "";
2916 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2917 OP[1] in AS1 to register OP[0].
2918 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2919 Return "". */
2921 const char*
2922 avr_out_lpm (rtx insn, rtx *op, int *plen)
2924 rtx xop[7];
2925 rtx dest = op[0];
2926 rtx src = SET_SRC (single_set (insn));
2927 rtx addr;
2928 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2929 int segment;
2930 RTX_CODE code;
2931 addr_space_t as = MEM_ADDR_SPACE (src);
2933 if (plen)
2934 *plen = 0;
2936 if (MEM_P (dest))
2938 warning (0, "writing to address space %qs not supported",
2939 avr_addrspace[MEM_ADDR_SPACE (dest)].name);
2941 return "";
2944 addr = XEXP (src, 0);
2945 code = GET_CODE (addr);
2947 gcc_assert (REG_P (dest));
2948 gcc_assert (REG == code || POST_INC == code);
2950 xop[0] = dest;
2951 xop[1] = addr;
2952 xop[2] = lpm_addr_reg_rtx;
2953 xop[4] = xstring_empty;
2954 xop[5] = tmp_reg_rtx;
2955 xop[6] = XEXP (rampz_rtx, 0);
2957 segment = avr_addrspace[as].segment;
2959 /* Set RAMPZ as needed. */
2961 if (segment)
2963 xop[4] = GEN_INT (segment);
2964 xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx);
2966 if (xop[3] != NULL_RTX)
2968 avr_asm_len ("ldi %3,%4" CR_TAB
2969 "out %i6,%3", xop, plen, 2);
2971 else if (segment == 1)
2973 avr_asm_len ("clr %5" CR_TAB
2974 "inc %5" CR_TAB
2975 "out %i6,%5", xop, plen, 3);
2977 else
2979 avr_asm_len ("mov %5,%2" CR_TAB
2980 "ldi %2,%4" CR_TAB
2981 "out %i6,%2" CR_TAB
2982 "mov %2,%5", xop, plen, 4);
2985 xop[4] = xstring_e;
2987 if (!AVR_HAVE_ELPMX)
2988 return avr_out_lpm_no_lpmx (insn, xop, plen);
2990 else if (!AVR_HAVE_LPMX)
2992 return avr_out_lpm_no_lpmx (insn, xop, plen);
2995 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
2997 switch (GET_CODE (addr))
2999 default:
3000 gcc_unreachable();
3002 case REG:
3004 gcc_assert (REG_Z == REGNO (addr));
3006 switch (n_bytes)
3008 default:
3009 gcc_unreachable();
3011 case 1:
3012 return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
3014 case 2:
3015 if (REGNO (dest) == REG_Z)
3016 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3017 "%4lpm %B0,%a2" CR_TAB
3018 "mov %A0,%5", xop, plen, 3);
3019 else
3021 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3022 "%4lpm %B0,%a2", xop, plen, 2);
3024 if (!reg_unused_after (insn, addr))
3025 avr_asm_len ("sbiw %2,1", xop, plen, 1);
3028 break; /* 2 */
3030 case 3:
3032 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3033 "%4lpm %B0,%a2+" CR_TAB
3034 "%4lpm %C0,%a2", xop, plen, 3);
3036 if (!reg_unused_after (insn, addr))
3037 avr_asm_len ("sbiw %2,2", xop, plen, 1);
3039 break; /* 3 */
3041 case 4:
3043 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3044 "%4lpm %B0,%a2+", xop, plen, 2);
3046 if (REGNO (dest) == REG_Z - 2)
3047 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3048 "%4lpm %C0,%a2" CR_TAB
3049 "mov %D0,%5", xop, plen, 3);
3050 else
3052 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
3053 "%4lpm %D0,%a2", xop, plen, 2);
3055 if (!reg_unused_after (insn, addr))
3056 avr_asm_len ("sbiw %2,3", xop, plen, 1);
3059 break; /* 4 */
3060 } /* n_bytes */
3062 break; /* REG */
3064 case POST_INC:
3066 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
3067 && n_bytes <= 4);
3069 avr_asm_len ("%4lpm %A0,%a2+", xop, plen, 1);
3070 if (n_bytes >= 2) avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
3071 if (n_bytes >= 3) avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
3072 if (n_bytes >= 4) avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
3074 break; /* POST_INC */
3076 } /* switch CODE (addr) */
3078 if (xop[4] == xstring_e && AVR_HAVE_RAMPD)
3080 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM. */
3082 xop[0] = zero_reg_rtx;
3083 avr_asm_len ("out %i6,%0", xop, plen, 1);
3086 return "";
3090 /* Worker function for xload_8 insn. */
3092 const char*
3093 avr_out_xload (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
3095 rtx xop[4];
3097 xop[0] = op[0];
3098 xop[1] = op[1];
3099 xop[2] = lpm_addr_reg_rtx;
3100 xop[3] = AVR_HAVE_LPMX ? op[0] : lpm_reg_rtx;
3102 avr_asm_len (AVR_HAVE_LPMX ? "lpm %3,%a2" : "lpm", xop, plen, -1);
3104 avr_asm_len ("sbrc %1,7" CR_TAB
3105 "ld %3,%a2", xop, plen, 2);
3107 if (REGNO (xop[0]) != REGNO (xop[3]))
3108 avr_asm_len ("mov %0,%3", xop, plen, 1);
3110 return "";
3114 const char*
3115 output_movqi (rtx insn, rtx operands[], int *plen)
3117 rtx dest = operands[0];
3118 rtx src = operands[1];
3120 if (avr_mem_flash_p (src)
3121 || avr_mem_flash_p (dest))
3123 return avr_out_lpm (insn, operands, plen);
3126 gcc_assert (1 == GET_MODE_SIZE (GET_MODE (dest)));
3128 if (REG_P (dest))
3130 if (REG_P (src)) /* mov r,r */
3132 if (test_hard_reg_class (STACK_REG, dest))
3133 return avr_asm_len ("out %0,%1", operands, plen, -1);
3134 else if (test_hard_reg_class (STACK_REG, src))
3135 return avr_asm_len ("in %0,%1", operands, plen, -1);
3137 return avr_asm_len ("mov %0,%1", operands, plen, -1);
3139 else if (CONSTANT_P (src))
3141 output_reload_in_const (operands, NULL_RTX, plen, false);
3142 return "";
3144 else if (MEM_P (src))
3145 return out_movqi_r_mr (insn, operands, plen); /* mov r,m */
3147 else if (MEM_P (dest))
3149 rtx xop[2];
3151 xop[0] = dest;
3152 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3154 return out_movqi_mr_r (insn, xop, plen);
3157 return "";
3161 const char *
3162 output_movhi (rtx insn, rtx xop[], int *plen)
3164 rtx dest = xop[0];
3165 rtx src = xop[1];
3167 gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
3169 if (avr_mem_flash_p (src)
3170 || avr_mem_flash_p (dest))
3172 return avr_out_lpm (insn, xop, plen);
3175 gcc_assert (2 == GET_MODE_SIZE (GET_MODE (dest)));
3177 if (REG_P (dest))
3179 if (REG_P (src)) /* mov r,r */
3181 if (test_hard_reg_class (STACK_REG, dest))
3183 if (AVR_HAVE_8BIT_SP)
3184 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
3186 if (AVR_XMEGA)
3187 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
3188 "out __SP_H__,%B1", xop, plen, -2);
3190 /* Use simple load of SP if no interrupts are used. */
3192 return TARGET_NO_INTERRUPTS
3193 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
3194 "out __SP_L__,%A1", xop, plen, -2)
3195 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
3196 "cli" CR_TAB
3197 "out __SP_H__,%B1" CR_TAB
3198 "out __SREG__,__tmp_reg__" CR_TAB
3199 "out __SP_L__,%A1", xop, plen, -5);
3201 else if (test_hard_reg_class (STACK_REG, src))
3203 return !AVR_HAVE_SPH
3204 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
3205 "clr %B0", xop, plen, -2)
3207 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
3208 "in %B0,__SP_H__", xop, plen, -2);
3211 return AVR_HAVE_MOVW
3212 ? avr_asm_len ("movw %0,%1", xop, plen, -1)
3214 : avr_asm_len ("mov %A0,%A1" CR_TAB
3215 "mov %B0,%B1", xop, plen, -2);
3216 } /* REG_P (src) */
3217 else if (CONSTANT_P (src))
3219 return output_reload_inhi (xop, NULL, plen);
3221 else if (MEM_P (src))
3223 return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
3226 else if (MEM_P (dest))
3228 rtx xop[2];
3230 xop[0] = dest;
3231 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3233 return out_movhi_mr_r (insn, xop, plen);
3236 fatal_insn ("invalid insn:", insn);
3238 return "";
3241 static const char*
3242 out_movqi_r_mr (rtx insn, rtx op[], int *plen)
3244 rtx dest = op[0];
3245 rtx src = op[1];
3246 rtx x = XEXP (src, 0);
3248 if (CONSTANT_ADDRESS_P (x))
3250 return optimize > 0 && io_address_operand (x, QImode)
3251 ? avr_asm_len ("in %0,%i1", op, plen, -1)
3252 : avr_asm_len ("lds %0,%m1", op, plen, -2);
3254 else if (GET_CODE (x) == PLUS
3255 && REG_P (XEXP (x, 0))
3256 && CONST_INT_P (XEXP (x, 1)))
3258 /* memory access by reg+disp */
3260 int disp = INTVAL (XEXP (x, 1));
3262 if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
3264 if (REGNO (XEXP (x, 0)) != REG_Y)
3265 fatal_insn ("incorrect insn:",insn);
3267 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3268 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
3269 "ldd %0,Y+63" CR_TAB
3270 "sbiw r28,%o1-63", op, plen, -3);
3272 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3273 "sbci r29,hi8(-%o1)" CR_TAB
3274 "ld %0,Y" CR_TAB
3275 "subi r28,lo8(%o1)" CR_TAB
3276 "sbci r29,hi8(%o1)", op, plen, -5);
3278 else if (REGNO (XEXP (x, 0)) == REG_X)
3280 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
3281 it but I have this situation with extremal optimizing options. */
3283 avr_asm_len ("adiw r26,%o1" CR_TAB
3284 "ld %0,X", op, plen, -2);
3286 if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
3287 && !reg_unused_after (insn, XEXP (x,0)))
3289 avr_asm_len ("sbiw r26,%o1", op, plen, 1);
3292 return "";
3295 return avr_asm_len ("ldd %0,%1", op, plen, -1);
3298 return avr_asm_len ("ld %0,%1", op, plen, -1);
3301 static const char*
3302 out_movhi_r_mr (rtx insn, rtx op[], int *plen)
3304 rtx dest = op[0];
3305 rtx src = op[1];
3306 rtx base = XEXP (src, 0);
3307 int reg_dest = true_regnum (dest);
3308 int reg_base = true_regnum (base);
3309 /* "volatile" forces reading low byte first, even if less efficient,
3310 for correct operation with 16-bit I/O registers. */
3311 int mem_volatile_p = MEM_VOLATILE_P (src);
3313 if (reg_base > 0)
3315 if (reg_dest == reg_base) /* R = (R) */
3316 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3317 "ld %B0,%1" CR_TAB
3318 "mov %A0,__tmp_reg__", op, plen, -3);
3320 if (reg_base != REG_X)
3321 return avr_asm_len ("ld %A0,%1" CR_TAB
3322 "ldd %B0,%1+1", op, plen, -2);
3324 avr_asm_len ("ld %A0,X+" CR_TAB
3325 "ld %B0,X", op, plen, -2);
3327 if (!reg_unused_after (insn, base))
3328 avr_asm_len ("sbiw r26,1", op, plen, 1);
3330 return "";
3332 else if (GET_CODE (base) == PLUS) /* (R + i) */
3334 int disp = INTVAL (XEXP (base, 1));
3335 int reg_base = true_regnum (XEXP (base, 0));
3337 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3339 if (REGNO (XEXP (base, 0)) != REG_Y)
3340 fatal_insn ("incorrect insn:",insn);
3342 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
3343 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3344 "ldd %A0,Y+62" CR_TAB
3345 "ldd %B0,Y+63" CR_TAB
3346 "sbiw r28,%o1-62", op, plen, -4)
3348 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3349 "sbci r29,hi8(-%o1)" CR_TAB
3350 "ld %A0,Y" CR_TAB
3351 "ldd %B0,Y+1" CR_TAB
3352 "subi r28,lo8(%o1)" CR_TAB
3353 "sbci r29,hi8(%o1)", op, plen, -6);
3356 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3357 it but I have this situation with extremal
3358 optimization options. */
3360 if (reg_base == REG_X)
3361 return reg_base == reg_dest
3362 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3363 "ld __tmp_reg__,X+" CR_TAB
3364 "ld %B0,X" CR_TAB
3365 "mov %A0,__tmp_reg__", op, plen, -4)
3367 : avr_asm_len ("adiw r26,%o1" CR_TAB
3368 "ld %A0,X+" CR_TAB
3369 "ld %B0,X" CR_TAB
3370 "sbiw r26,%o1+1", op, plen, -4);
3372 return reg_base == reg_dest
3373 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3374 "ldd %B0,%B1" CR_TAB
3375 "mov %A0,__tmp_reg__", op, plen, -3)
3377 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3378 "ldd %B0,%B1", op, plen, -2);
3380 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3382 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3383 fatal_insn ("incorrect insn:", insn);
3385 if (!mem_volatile_p)
3386 return avr_asm_len ("ld %B0,%1" CR_TAB
3387 "ld %A0,%1", op, plen, -2);
3389 return REGNO (XEXP (base, 0)) == REG_X
3390 ? avr_asm_len ("sbiw r26,2" CR_TAB
3391 "ld %A0,X+" CR_TAB
3392 "ld %B0,X" CR_TAB
3393 "sbiw r26,1", op, plen, -4)
3395 : avr_asm_len ("sbiw %r1,2" CR_TAB
3396 "ld %A0,%p1" CR_TAB
3397 "ldd %B0,%p1+1", op, plen, -3);
3399 else if (GET_CODE (base) == POST_INC) /* (R++) */
3401 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3402 fatal_insn ("incorrect insn:", insn);
3404 return avr_asm_len ("ld %A0,%1" CR_TAB
3405 "ld %B0,%1", op, plen, -2);
3407 else if (CONSTANT_ADDRESS_P (base))
3409 return optimize > 0 && io_address_operand (base, HImode)
3410 ? avr_asm_len ("in %A0,%i1" CR_TAB
3411 "in %B0,%i1+1", op, plen, -2)
3413 : avr_asm_len ("lds %A0,%m1" CR_TAB
3414 "lds %B0,%m1+1", op, plen, -4);
3417 fatal_insn ("unknown move insn:",insn);
3418 return "";
3421 static const char*
3422 out_movsi_r_mr (rtx insn, rtx op[], int *l)
3424 rtx dest = op[0];
3425 rtx src = op[1];
3426 rtx base = XEXP (src, 0);
3427 int reg_dest = true_regnum (dest);
3428 int reg_base = true_regnum (base);
3429 int tmp;
3431 if (!l)
3432 l = &tmp;
3434 if (reg_base > 0)
3436 if (reg_base == REG_X) /* (R26) */
3438 if (reg_dest == REG_X)
3439 /* "ld r26,-X" is undefined */
3440 return *l=7, ("adiw r26,3" CR_TAB
3441 "ld r29,X" CR_TAB
3442 "ld r28,-X" CR_TAB
3443 "ld __tmp_reg__,-X" CR_TAB
3444 "sbiw r26,1" CR_TAB
3445 "ld r26,X" CR_TAB
3446 "mov r27,__tmp_reg__");
3447 else if (reg_dest == REG_X - 2)
3448 return *l=5, ("ld %A0,X+" CR_TAB
3449 "ld %B0,X+" CR_TAB
3450 "ld __tmp_reg__,X+" CR_TAB
3451 "ld %D0,X" CR_TAB
3452 "mov %C0,__tmp_reg__");
3453 else if (reg_unused_after (insn, base))
3454 return *l=4, ("ld %A0,X+" CR_TAB
3455 "ld %B0,X+" CR_TAB
3456 "ld %C0,X+" CR_TAB
3457 "ld %D0,X");
3458 else
3459 return *l=5, ("ld %A0,X+" CR_TAB
3460 "ld %B0,X+" CR_TAB
3461 "ld %C0,X+" CR_TAB
3462 "ld %D0,X" CR_TAB
3463 "sbiw r26,3");
3465 else
3467 if (reg_dest == reg_base)
3468 return *l=5, ("ldd %D0,%1+3" CR_TAB
3469 "ldd %C0,%1+2" CR_TAB
3470 "ldd __tmp_reg__,%1+1" CR_TAB
3471 "ld %A0,%1" CR_TAB
3472 "mov %B0,__tmp_reg__");
3473 else if (reg_base == reg_dest + 2)
3474 return *l=5, ("ld %A0,%1" CR_TAB
3475 "ldd %B0,%1+1" CR_TAB
3476 "ldd __tmp_reg__,%1+2" CR_TAB
3477 "ldd %D0,%1+3" CR_TAB
3478 "mov %C0,__tmp_reg__");
3479 else
3480 return *l=4, ("ld %A0,%1" CR_TAB
3481 "ldd %B0,%1+1" CR_TAB
3482 "ldd %C0,%1+2" CR_TAB
3483 "ldd %D0,%1+3");
3486 else if (GET_CODE (base) == PLUS) /* (R + i) */
3488 int disp = INTVAL (XEXP (base, 1));
3490 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3492 if (REGNO (XEXP (base, 0)) != REG_Y)
3493 fatal_insn ("incorrect insn:",insn);
3495 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3496 return *l = 6, ("adiw r28,%o1-60" CR_TAB
3497 "ldd %A0,Y+60" CR_TAB
3498 "ldd %B0,Y+61" CR_TAB
3499 "ldd %C0,Y+62" CR_TAB
3500 "ldd %D0,Y+63" CR_TAB
3501 "sbiw r28,%o1-60");
3503 return *l = 8, ("subi r28,lo8(-%o1)" CR_TAB
3504 "sbci r29,hi8(-%o1)" CR_TAB
3505 "ld %A0,Y" CR_TAB
3506 "ldd %B0,Y+1" CR_TAB
3507 "ldd %C0,Y+2" CR_TAB
3508 "ldd %D0,Y+3" CR_TAB
3509 "subi r28,lo8(%o1)" CR_TAB
3510 "sbci r29,hi8(%o1)");
3513 reg_base = true_regnum (XEXP (base, 0));
3514 if (reg_base == REG_X)
3516 /* R = (X + d) */
3517 if (reg_dest == REG_X)
3519 *l = 7;
3520 /* "ld r26,-X" is undefined */
3521 return ("adiw r26,%o1+3" CR_TAB
3522 "ld r29,X" CR_TAB
3523 "ld r28,-X" CR_TAB
3524 "ld __tmp_reg__,-X" CR_TAB
3525 "sbiw r26,1" CR_TAB
3526 "ld r26,X" CR_TAB
3527 "mov r27,__tmp_reg__");
3529 *l = 6;
3530 if (reg_dest == REG_X - 2)
3531 return ("adiw r26,%o1" CR_TAB
3532 "ld r24,X+" CR_TAB
3533 "ld r25,X+" CR_TAB
3534 "ld __tmp_reg__,X+" CR_TAB
3535 "ld r27,X" CR_TAB
3536 "mov r26,__tmp_reg__");
3538 return ("adiw r26,%o1" CR_TAB
3539 "ld %A0,X+" CR_TAB
3540 "ld %B0,X+" CR_TAB
3541 "ld %C0,X+" CR_TAB
3542 "ld %D0,X" CR_TAB
3543 "sbiw r26,%o1+3");
3545 if (reg_dest == reg_base)
3546 return *l=5, ("ldd %D0,%D1" CR_TAB
3547 "ldd %C0,%C1" CR_TAB
3548 "ldd __tmp_reg__,%B1" CR_TAB
3549 "ldd %A0,%A1" CR_TAB
3550 "mov %B0,__tmp_reg__");
3551 else if (reg_dest == reg_base - 2)
3552 return *l=5, ("ldd %A0,%A1" CR_TAB
3553 "ldd %B0,%B1" CR_TAB
3554 "ldd __tmp_reg__,%C1" CR_TAB
3555 "ldd %D0,%D1" CR_TAB
3556 "mov %C0,__tmp_reg__");
3557 return *l=4, ("ldd %A0,%A1" CR_TAB
3558 "ldd %B0,%B1" CR_TAB
3559 "ldd %C0,%C1" CR_TAB
3560 "ldd %D0,%D1");
3562 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3563 return *l=4, ("ld %D0,%1" CR_TAB
3564 "ld %C0,%1" CR_TAB
3565 "ld %B0,%1" CR_TAB
3566 "ld %A0,%1");
3567 else if (GET_CODE (base) == POST_INC) /* (R++) */
3568 return *l=4, ("ld %A0,%1" CR_TAB
3569 "ld %B0,%1" CR_TAB
3570 "ld %C0,%1" CR_TAB
3571 "ld %D0,%1");
3572 else if (CONSTANT_ADDRESS_P (base))
3573 return *l=8, ("lds %A0,%m1" CR_TAB
3574 "lds %B0,%m1+1" CR_TAB
3575 "lds %C0,%m1+2" CR_TAB
3576 "lds %D0,%m1+3");
3578 fatal_insn ("unknown move insn:",insn);
3579 return "";
3582 static const char*
3583 out_movsi_mr_r (rtx insn, rtx op[], int *l)
3585 rtx dest = op[0];
3586 rtx src = op[1];
3587 rtx base = XEXP (dest, 0);
3588 int reg_base = true_regnum (base);
3589 int reg_src = true_regnum (src);
3590 int tmp;
3592 if (!l)
3593 l = &tmp;
3595 if (CONSTANT_ADDRESS_P (base))
3596 return *l=8,("sts %m0,%A1" CR_TAB
3597 "sts %m0+1,%B1" CR_TAB
3598 "sts %m0+2,%C1" CR_TAB
3599 "sts %m0+3,%D1");
3600 if (reg_base > 0) /* (r) */
3602 if (reg_base == REG_X) /* (R26) */
3604 if (reg_src == REG_X)
3606 /* "st X+,r26" is undefined */
3607 if (reg_unused_after (insn, base))
3608 return *l=6, ("mov __tmp_reg__,r27" CR_TAB
3609 "st X,r26" CR_TAB
3610 "adiw r26,1" CR_TAB
3611 "st X+,__tmp_reg__" CR_TAB
3612 "st X+,r28" CR_TAB
3613 "st X,r29");
3614 else
3615 return *l=7, ("mov __tmp_reg__,r27" CR_TAB
3616 "st X,r26" CR_TAB
3617 "adiw r26,1" CR_TAB
3618 "st X+,__tmp_reg__" CR_TAB
3619 "st X+,r28" CR_TAB
3620 "st X,r29" CR_TAB
3621 "sbiw r26,3");
3623 else if (reg_base == reg_src + 2)
3625 if (reg_unused_after (insn, base))
3626 return *l=7, ("mov __zero_reg__,%C1" CR_TAB
3627 "mov __tmp_reg__,%D1" CR_TAB
3628 "st %0+,%A1" CR_TAB
3629 "st %0+,%B1" CR_TAB
3630 "st %0+,__zero_reg__" CR_TAB
3631 "st %0,__tmp_reg__" CR_TAB
3632 "clr __zero_reg__");
3633 else
3634 return *l=8, ("mov __zero_reg__,%C1" CR_TAB
3635 "mov __tmp_reg__,%D1" CR_TAB
3636 "st %0+,%A1" CR_TAB
3637 "st %0+,%B1" CR_TAB
3638 "st %0+,__zero_reg__" CR_TAB
3639 "st %0,__tmp_reg__" CR_TAB
3640 "clr __zero_reg__" CR_TAB
3641 "sbiw r26,3");
3643 return *l=5, ("st %0+,%A1" CR_TAB
3644 "st %0+,%B1" CR_TAB
3645 "st %0+,%C1" CR_TAB
3646 "st %0,%D1" CR_TAB
3647 "sbiw r26,3");
3649 else
3650 return *l=4, ("st %0,%A1" CR_TAB
3651 "std %0+1,%B1" CR_TAB
3652 "std %0+2,%C1" CR_TAB
3653 "std %0+3,%D1");
3655 else if (GET_CODE (base) == PLUS) /* (R + i) */
3657 int disp = INTVAL (XEXP (base, 1));
3658 reg_base = REGNO (XEXP (base, 0));
3659 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3661 if (reg_base != REG_Y)
3662 fatal_insn ("incorrect insn:",insn);
3664 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3665 return *l = 6, ("adiw r28,%o0-60" CR_TAB
3666 "std Y+60,%A1" CR_TAB
3667 "std Y+61,%B1" CR_TAB
3668 "std Y+62,%C1" CR_TAB
3669 "std Y+63,%D1" CR_TAB
3670 "sbiw r28,%o0-60");
3672 return *l = 8, ("subi r28,lo8(-%o0)" CR_TAB
3673 "sbci r29,hi8(-%o0)" CR_TAB
3674 "st Y,%A1" CR_TAB
3675 "std Y+1,%B1" CR_TAB
3676 "std Y+2,%C1" CR_TAB
3677 "std Y+3,%D1" CR_TAB
3678 "subi r28,lo8(%o0)" CR_TAB
3679 "sbci r29,hi8(%o0)");
3681 if (reg_base == REG_X)
3683 /* (X + d) = R */
3684 if (reg_src == REG_X)
3686 *l = 9;
3687 return ("mov __tmp_reg__,r26" CR_TAB
3688 "mov __zero_reg__,r27" CR_TAB
3689 "adiw r26,%o0" CR_TAB
3690 "st X+,__tmp_reg__" CR_TAB
3691 "st X+,__zero_reg__" CR_TAB
3692 "st X+,r28" CR_TAB
3693 "st X,r29" CR_TAB
3694 "clr __zero_reg__" CR_TAB
3695 "sbiw r26,%o0+3");
3697 else if (reg_src == REG_X - 2)
3699 *l = 9;
3700 return ("mov __tmp_reg__,r26" CR_TAB
3701 "mov __zero_reg__,r27" CR_TAB
3702 "adiw r26,%o0" CR_TAB
3703 "st X+,r24" CR_TAB
3704 "st X+,r25" CR_TAB
3705 "st X+,__tmp_reg__" CR_TAB
3706 "st X,__zero_reg__" CR_TAB
3707 "clr __zero_reg__" CR_TAB
3708 "sbiw r26,%o0+3");
3710 *l = 6;
3711 return ("adiw r26,%o0" CR_TAB
3712 "st X+,%A1" CR_TAB
3713 "st X+,%B1" CR_TAB
3714 "st X+,%C1" CR_TAB
3715 "st X,%D1" CR_TAB
3716 "sbiw r26,%o0+3");
3718 return *l=4, ("std %A0,%A1" CR_TAB
3719 "std %B0,%B1" CR_TAB
3720 "std %C0,%C1" CR_TAB
3721 "std %D0,%D1");
3723 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3724 return *l=4, ("st %0,%D1" CR_TAB
3725 "st %0,%C1" CR_TAB
3726 "st %0,%B1" CR_TAB
3727 "st %0,%A1");
3728 else if (GET_CODE (base) == POST_INC) /* (R++) */
3729 return *l=4, ("st %0,%A1" CR_TAB
3730 "st %0,%B1" CR_TAB
3731 "st %0,%C1" CR_TAB
3732 "st %0,%D1");
3733 fatal_insn ("unknown move insn:",insn);
3734 return "";
3737 const char *
3738 output_movsisf (rtx insn, rtx operands[], int *l)
3740 int dummy;
3741 rtx dest = operands[0];
3742 rtx src = operands[1];
3743 int *real_l = l;
3745 if (avr_mem_flash_p (src)
3746 || avr_mem_flash_p (dest))
3748 return avr_out_lpm (insn, operands, real_l);
3751 if (!l)
3752 l = &dummy;
3754 gcc_assert (4 == GET_MODE_SIZE (GET_MODE (dest)));
3755 if (REG_P (dest))
3757 if (REG_P (src)) /* mov r,r */
3759 if (true_regnum (dest) > true_regnum (src))
3761 if (AVR_HAVE_MOVW)
3763 *l = 2;
3764 return ("movw %C0,%C1" CR_TAB
3765 "movw %A0,%A1");
3767 *l = 4;
3768 return ("mov %D0,%D1" CR_TAB
3769 "mov %C0,%C1" CR_TAB
3770 "mov %B0,%B1" CR_TAB
3771 "mov %A0,%A1");
3773 else
3775 if (AVR_HAVE_MOVW)
3777 *l = 2;
3778 return ("movw %A0,%A1" CR_TAB
3779 "movw %C0,%C1");
3781 *l = 4;
3782 return ("mov %A0,%A1" CR_TAB
3783 "mov %B0,%B1" CR_TAB
3784 "mov %C0,%C1" CR_TAB
3785 "mov %D0,%D1");
3788 else if (CONSTANT_P (src))
3790 return output_reload_insisf (operands, NULL_RTX, real_l);
3792 else if (MEM_P (src))
3793 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
3795 else if (MEM_P (dest))
3797 const char *templ;
3799 if (src == CONST0_RTX (GET_MODE (dest)))
3800 operands[1] = zero_reg_rtx;
3802 templ = out_movsi_mr_r (insn, operands, real_l);
3804 if (!real_l)
3805 output_asm_insn (templ, operands);
3807 operands[1] = src;
3808 return "";
3810 fatal_insn ("invalid insn:", insn);
3811 return "";
3815 /* Handle loads of 24-bit types from memory to register. */
3817 static const char*
3818 avr_out_load_psi (rtx insn, rtx *op, int *plen)
3820 rtx dest = op[0];
3821 rtx src = op[1];
3822 rtx base = XEXP (src, 0);
3823 int reg_dest = true_regnum (dest);
3824 int reg_base = true_regnum (base);
3826 if (reg_base > 0)
3828 if (reg_base == REG_X) /* (R26) */
3830 if (reg_dest == REG_X)
3831 /* "ld r26,-X" is undefined */
3832 return avr_asm_len ("adiw r26,2" CR_TAB
3833 "ld r28,X" CR_TAB
3834 "ld __tmp_reg__,-X" CR_TAB
3835 "sbiw r26,1" CR_TAB
3836 "ld r26,X" CR_TAB
3837 "mov r27,__tmp_reg__", op, plen, -6);
3838 else
3840 avr_asm_len ("ld %A0,X+" CR_TAB
3841 "ld %B0,X+" CR_TAB
3842 "ld %C0,X", op, plen, -3);
3844 if (reg_dest != REG_X - 2
3845 && !reg_unused_after (insn, base))
3847 avr_asm_len ("sbiw r26,2", op, plen, 1);
3850 return "";
3853 else /* reg_base != REG_X */
3855 if (reg_dest == reg_base)
3856 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3857 "ldd __tmp_reg__,%1+1" CR_TAB
3858 "ld %A0,%1" CR_TAB
3859 "mov %B0,__tmp_reg__", op, plen, -4);
3860 else
3861 return avr_asm_len ("ld %A0,%1" CR_TAB
3862 "ldd %B0,%1+1" CR_TAB
3863 "ldd %C0,%1+2", op, plen, -3);
3866 else if (GET_CODE (base) == PLUS) /* (R + i) */
3868 int disp = INTVAL (XEXP (base, 1));
3870 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3872 if (REGNO (XEXP (base, 0)) != REG_Y)
3873 fatal_insn ("incorrect insn:",insn);
3875 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3876 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3877 "ldd %A0,Y+61" CR_TAB
3878 "ldd %B0,Y+62" CR_TAB
3879 "ldd %C0,Y+63" CR_TAB
3880 "sbiw r28,%o1-61", op, plen, -5);
3882 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3883 "sbci r29,hi8(-%o1)" CR_TAB
3884 "ld %A0,Y" CR_TAB
3885 "ldd %B0,Y+1" CR_TAB
3886 "ldd %C0,Y+2" CR_TAB
3887 "subi r28,lo8(%o1)" CR_TAB
3888 "sbci r29,hi8(%o1)", op, plen, -7);
3891 reg_base = true_regnum (XEXP (base, 0));
3892 if (reg_base == REG_X)
3894 /* R = (X + d) */
3895 if (reg_dest == REG_X)
3897 /* "ld r26,-X" is undefined */
3898 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3899 "ld r28,X" CR_TAB
3900 "ld __tmp_reg__,-X" CR_TAB
3901 "sbiw r26,1" CR_TAB
3902 "ld r26,X" CR_TAB
3903 "mov r27,__tmp_reg__", op, plen, -6);
3906 avr_asm_len ("adiw r26,%o1" CR_TAB
3907 "ld %A0,X+" CR_TAB
3908 "ld %B0,X+" CR_TAB
3909 "ld %C0,X", op, plen, -4);
3911 if (reg_dest != REG_W
3912 && !reg_unused_after (insn, XEXP (base, 0)))
3913 avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
3915 return "";
3918 if (reg_dest == reg_base)
3919 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3920 "ldd __tmp_reg__,%B1" CR_TAB
3921 "ldd %A0,%A1" CR_TAB
3922 "mov %B0,__tmp_reg__", op, plen, -4);
3924 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3925 "ldd %B0,%B1" CR_TAB
3926 "ldd %C0,%C1", op, plen, -3);
3928 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3929 return avr_asm_len ("ld %C0,%1" CR_TAB
3930 "ld %B0,%1" CR_TAB
3931 "ld %A0,%1", op, plen, -3);
3932 else if (GET_CODE (base) == POST_INC) /* (R++) */
3933 return avr_asm_len ("ld %A0,%1" CR_TAB
3934 "ld %B0,%1" CR_TAB
3935 "ld %C0,%1", op, plen, -3);
3937 else if (CONSTANT_ADDRESS_P (base))
3938 return avr_asm_len ("lds %A0,%m1" CR_TAB
3939 "lds %B0,%m1+1" CR_TAB
3940 "lds %C0,%m1+2", op, plen , -6);
3942 fatal_insn ("unknown move insn:",insn);
3943 return "";
3946 /* Handle store of 24-bit type from register or zero to memory. */
3948 static const char*
3949 avr_out_store_psi (rtx insn, rtx *op, int *plen)
3951 rtx dest = op[0];
3952 rtx src = op[1];
3953 rtx base = XEXP (dest, 0);
3954 int reg_base = true_regnum (base);
3956 if (CONSTANT_ADDRESS_P (base))
3957 return avr_asm_len ("sts %m0,%A1" CR_TAB
3958 "sts %m0+1,%B1" CR_TAB
3959 "sts %m0+2,%C1", op, plen, -6);
3961 if (reg_base > 0) /* (r) */
3963 if (reg_base == REG_X) /* (R26) */
3965 gcc_assert (!reg_overlap_mentioned_p (base, src));
3967 avr_asm_len ("st %0+,%A1" CR_TAB
3968 "st %0+,%B1" CR_TAB
3969 "st %0,%C1", op, plen, -3);
3971 if (!reg_unused_after (insn, base))
3972 avr_asm_len ("sbiw r26,2", op, plen, 1);
3974 return "";
3976 else
3977 return avr_asm_len ("st %0,%A1" CR_TAB
3978 "std %0+1,%B1" CR_TAB
3979 "std %0+2,%C1", op, plen, -3);
3981 else if (GET_CODE (base) == PLUS) /* (R + i) */
3983 int disp = INTVAL (XEXP (base, 1));
3984 reg_base = REGNO (XEXP (base, 0));
3986 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3988 if (reg_base != REG_Y)
3989 fatal_insn ("incorrect insn:",insn);
3991 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3992 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3993 "std Y+61,%A1" CR_TAB
3994 "std Y+62,%B1" CR_TAB
3995 "std Y+63,%C1" CR_TAB
3996 "sbiw r28,%o0-60", op, plen, -5);
3998 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3999 "sbci r29,hi8(-%o0)" CR_TAB
4000 "st Y,%A1" CR_TAB
4001 "std Y+1,%B1" CR_TAB
4002 "std Y+2,%C1" CR_TAB
4003 "subi r28,lo8(%o0)" CR_TAB
4004 "sbci r29,hi8(%o0)", op, plen, -7);
4006 if (reg_base == REG_X)
4008 /* (X + d) = R */
4009 gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
4011 avr_asm_len ("adiw r26,%o0" CR_TAB
4012 "st X+,%A1" CR_TAB
4013 "st X+,%B1" CR_TAB
4014 "st X,%C1", op, plen, -4);
4016 if (!reg_unused_after (insn, XEXP (base, 0)))
4017 avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
4019 return "";
4022 return avr_asm_len ("std %A0,%A1" CR_TAB
4023 "std %B0,%B1" CR_TAB
4024 "std %C0,%C1", op, plen, -3);
4026 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4027 return avr_asm_len ("st %0,%C1" CR_TAB
4028 "st %0,%B1" CR_TAB
4029 "st %0,%A1", op, plen, -3);
4030 else if (GET_CODE (base) == POST_INC) /* (R++) */
4031 return avr_asm_len ("st %0,%A1" CR_TAB
4032 "st %0,%B1" CR_TAB
4033 "st %0,%C1", op, plen, -3);
4035 fatal_insn ("unknown move insn:",insn);
4036 return "";
4040 /* Move around 24-bit stuff. */
4042 const char *
4043 avr_out_movpsi (rtx insn, rtx *op, int *plen)
4045 rtx dest = op[0];
4046 rtx src = op[1];
4048 if (avr_mem_flash_p (src)
4049 || avr_mem_flash_p (dest))
4051 return avr_out_lpm (insn, op, plen);
4054 if (register_operand (dest, VOIDmode))
4056 if (register_operand (src, VOIDmode)) /* mov r,r */
4058 if (true_regnum (dest) > true_regnum (src))
4060 avr_asm_len ("mov %C0,%C1", op, plen, -1);
4062 if (AVR_HAVE_MOVW)
4063 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
4064 else
4065 return avr_asm_len ("mov %B0,%B1" CR_TAB
4066 "mov %A0,%A1", op, plen, 2);
4068 else
4070 if (AVR_HAVE_MOVW)
4071 avr_asm_len ("movw %A0,%A1", op, plen, -1);
4072 else
4073 avr_asm_len ("mov %A0,%A1" CR_TAB
4074 "mov %B0,%B1", op, plen, -2);
4076 return avr_asm_len ("mov %C0,%C1", op, plen, 1);
4079 else if (CONSTANT_P (src))
4081 return avr_out_reload_inpsi (op, NULL_RTX, plen);
4083 else if (MEM_P (src))
4084 return avr_out_load_psi (insn, op, plen); /* mov r,m */
4086 else if (MEM_P (dest))
4088 rtx xop[2];
4090 xop[0] = dest;
4091 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
4093 return avr_out_store_psi (insn, xop, plen);
4096 fatal_insn ("invalid insn:", insn);
4097 return "";
4101 static const char*
4102 out_movqi_mr_r (rtx insn, rtx op[], int *plen)
4104 rtx dest = op[0];
4105 rtx src = op[1];
4106 rtx x = XEXP (dest, 0);
4108 if (CONSTANT_ADDRESS_P (x))
4110 return optimize > 0 && io_address_operand (x, QImode)
4111 ? avr_asm_len ("out %i0,%1", op, plen, -1)
4112 : avr_asm_len ("sts %m0,%1", op, plen, -2);
4114 else if (GET_CODE (x) == PLUS
4115 && REG_P (XEXP (x, 0))
4116 && CONST_INT_P (XEXP (x, 1)))
4118 /* memory access by reg+disp */
4120 int disp = INTVAL (XEXP (x, 1));
4122 if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
4124 if (REGNO (XEXP (x, 0)) != REG_Y)
4125 fatal_insn ("incorrect insn:",insn);
4127 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
4128 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
4129 "std Y+63,%1" CR_TAB
4130 "sbiw r28,%o0-63", op, plen, -3);
4132 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4133 "sbci r29,hi8(-%o0)" CR_TAB
4134 "st Y,%1" CR_TAB
4135 "subi r28,lo8(%o0)" CR_TAB
4136 "sbci r29,hi8(%o0)", op, plen, -5);
4138 else if (REGNO (XEXP (x,0)) == REG_X)
4140 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
4142 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
4143 "adiw r26,%o0" CR_TAB
4144 "st X,__tmp_reg__", op, plen, -3);
4146 else
4148 avr_asm_len ("adiw r26,%o0" CR_TAB
4149 "st X,%1", op, plen, -2);
4152 if (!reg_unused_after (insn, XEXP (x,0)))
4153 avr_asm_len ("sbiw r26,%o0", op, plen, 1);
4155 return "";
4158 return avr_asm_len ("std %0,%1", op, plen, -1);
4161 return avr_asm_len ("st %0,%1", op, plen, -1);
4165 /* Helper for the next function for XMEGA. It does the same
4166 but with low byte first. */
4168 static const char*
4169 avr_out_movhi_mr_r_xmega (rtx insn, rtx op[], int *plen)
4171 rtx dest = op[0];
4172 rtx src = op[1];
4173 rtx base = XEXP (dest, 0);
4174 int reg_base = true_regnum (base);
4175 int reg_src = true_regnum (src);
4177 /* "volatile" forces writing low byte first, even if less efficient,
4178 for correct operation with 16-bit I/O registers like SP. */
4179 int mem_volatile_p = MEM_VOLATILE_P (dest);
4181 if (CONSTANT_ADDRESS_P (base))
4182 return optimize > 0 && io_address_operand (base, HImode)
4183 ? avr_asm_len ("out %i0,%A1" CR_TAB
4184 "out %i0+1,%B1", op, plen, -2)
4186 : avr_asm_len ("sts %m0,%A1" CR_TAB
4187 "sts %m0+1,%B1", op, plen, -4);
4189 if (reg_base > 0)
4191 if (reg_base != REG_X)
4192 return avr_asm_len ("st %0,%A1" CR_TAB
4193 "std %0+1,%B1", op, plen, -2);
4195 if (reg_src == REG_X)
4196 /* "st X+,r26" and "st -X,r26" are undefined. */
4197 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4198 "st X,r26" CR_TAB
4199 "adiw r26,1" CR_TAB
4200 "st X,__tmp_reg__", op, plen, -4);
4201 else
4202 avr_asm_len ("st X+,%A1" CR_TAB
4203 "st X,%B1", op, plen, -2);
4205 return reg_unused_after (insn, base)
4206 ? ""
4207 : avr_asm_len ("sbiw r26,1", op, plen, 1);
4209 else if (GET_CODE (base) == PLUS)
4211 int disp = INTVAL (XEXP (base, 1));
4212 reg_base = REGNO (XEXP (base, 0));
4213 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4215 if (reg_base != REG_Y)
4216 fatal_insn ("incorrect insn:",insn);
4218 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
4219 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4220 "std Y+62,%A1" CR_TAB
4221 "std Y+63,%B1" CR_TAB
4222 "sbiw r28,%o0-62", op, plen, -4)
4224 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4225 "sbci r29,hi8(-%o0)" CR_TAB
4226 "st Y,%A1" CR_TAB
4227 "std Y+1,%B1" CR_TAB
4228 "subi r28,lo8(%o0)" CR_TAB
4229 "sbci r29,hi8(%o0)", op, plen, -6);
4232 if (reg_base != REG_X)
4233 return avr_asm_len ("std %A0,%A1" CR_TAB
4234 "std %B0,%B1", op, plen, -2);
4235 /* (X + d) = R */
4236 return reg_src == REG_X
4237 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4238 "mov __zero_reg__,r27" CR_TAB
4239 "adiw r26,%o0" CR_TAB
4240 "st X+,__tmp_reg__" CR_TAB
4241 "st X,__zero_reg__" CR_TAB
4242 "clr __zero_reg__" CR_TAB
4243 "sbiw r26,%o0+1", op, plen, -7)
4245 : avr_asm_len ("adiw r26,%o0" CR_TAB
4246 "st X+,%A1" CR_TAB
4247 "st X,%B1" CR_TAB
4248 "sbiw r26,%o0+1", op, plen, -4);
4250 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4252 if (!mem_volatile_p)
4253 return avr_asm_len ("st %0,%B1" CR_TAB
4254 "st %0,%A1", op, plen, -2);
4256 return REGNO (XEXP (base, 0)) == REG_X
4257 ? avr_asm_len ("sbiw r26,2" CR_TAB
4258 "st X+,%A1" CR_TAB
4259 "st X,%B1" CR_TAB
4260 "sbiw r26,1", op, plen, -4)
4262 : avr_asm_len ("sbiw %r0,2" CR_TAB
4263 "st %p0,%A1" CR_TAB
4264 "std %p0+1,%B1", op, plen, -3);
4266 else if (GET_CODE (base) == POST_INC) /* (R++) */
4268 return avr_asm_len ("st %0,%A1" CR_TAB
4269 "st %0,%B1", op, plen, -2);
4272 fatal_insn ("unknown move insn:",insn);
4273 return "";
4277 static const char*
4278 out_movhi_mr_r (rtx insn, rtx op[], int *plen)
4280 rtx dest = op[0];
4281 rtx src = op[1];
4282 rtx base = XEXP (dest, 0);
4283 int reg_base = true_regnum (base);
4284 int reg_src = true_regnum (src);
4285 int mem_volatile_p;
4287 /* "volatile" forces writing high-byte first (no-xmega) resp.
4288 low-byte first (xmega) even if less efficient, for correct
4289 operation with 16-bit I/O registers like. */
4291 if (AVR_XMEGA)
4292 return avr_out_movhi_mr_r_xmega (insn, op, plen);
4294 mem_volatile_p = MEM_VOLATILE_P (dest);
4296 if (CONSTANT_ADDRESS_P (base))
4297 return optimize > 0 && io_address_operand (base, HImode)
4298 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
4299 "out %i0,%A1", op, plen, -2)
4301 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
4302 "sts %m0,%A1", op, plen, -4);
4304 if (reg_base > 0)
4306 if (reg_base != REG_X)
4307 return avr_asm_len ("std %0+1,%B1" CR_TAB
4308 "st %0,%A1", op, plen, -2);
4310 if (reg_src == REG_X)
4311 /* "st X+,r26" and "st -X,r26" are undefined. */
4312 return !mem_volatile_p && reg_unused_after (insn, src)
4313 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4314 "st X,r26" CR_TAB
4315 "adiw r26,1" CR_TAB
4316 "st X,__tmp_reg__", op, plen, -4)
4318 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4319 "adiw r26,1" CR_TAB
4320 "st X,__tmp_reg__" CR_TAB
4321 "sbiw r26,1" CR_TAB
4322 "st X,r26", op, plen, -5);
4324 return !mem_volatile_p && reg_unused_after (insn, base)
4325 ? avr_asm_len ("st X+,%A1" CR_TAB
4326 "st X,%B1", op, plen, -2)
4327 : avr_asm_len ("adiw r26,1" CR_TAB
4328 "st X,%B1" CR_TAB
4329 "st -X,%A1", op, plen, -3);
4331 else if (GET_CODE (base) == PLUS)
4333 int disp = INTVAL (XEXP (base, 1));
4334 reg_base = REGNO (XEXP (base, 0));
4335 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4337 if (reg_base != REG_Y)
4338 fatal_insn ("incorrect insn:",insn);
4340 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
4341 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4342 "std Y+63,%B1" CR_TAB
4343 "std Y+62,%A1" CR_TAB
4344 "sbiw r28,%o0-62", op, plen, -4)
4346 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4347 "sbci r29,hi8(-%o0)" CR_TAB
4348 "std Y+1,%B1" CR_TAB
4349 "st Y,%A1" CR_TAB
4350 "subi r28,lo8(%o0)" CR_TAB
4351 "sbci r29,hi8(%o0)", op, plen, -6);
4354 if (reg_base != REG_X)
4355 return avr_asm_len ("std %B0,%B1" CR_TAB
4356 "std %A0,%A1", op, plen, -2);
4357 /* (X + d) = R */
4358 return reg_src == REG_X
4359 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4360 "mov __zero_reg__,r27" CR_TAB
4361 "adiw r26,%o0+1" CR_TAB
4362 "st X,__zero_reg__" CR_TAB
4363 "st -X,__tmp_reg__" CR_TAB
4364 "clr __zero_reg__" CR_TAB
4365 "sbiw r26,%o0", op, plen, -7)
4367 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
4368 "st X,%B1" CR_TAB
4369 "st -X,%A1" CR_TAB
4370 "sbiw r26,%o0", op, plen, -4);
4372 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4374 return avr_asm_len ("st %0,%B1" CR_TAB
4375 "st %0,%A1", op, plen, -2);
4377 else if (GET_CODE (base) == POST_INC) /* (R++) */
4379 if (!mem_volatile_p)
4380 return avr_asm_len ("st %0,%A1" CR_TAB
4381 "st %0,%B1", op, plen, -2);
4383 return REGNO (XEXP (base, 0)) == REG_X
4384 ? avr_asm_len ("adiw r26,1" CR_TAB
4385 "st X,%B1" CR_TAB
4386 "st -X,%A1" CR_TAB
4387 "adiw r26,2", op, plen, -4)
4389 : avr_asm_len ("std %p0+1,%B1" CR_TAB
4390 "st %p0,%A1" CR_TAB
4391 "adiw %r0,2", op, plen, -3);
4393 fatal_insn ("unknown move insn:",insn);
4394 return "";
4397 /* Return 1 if frame pointer for current function required. */
4399 static bool
4400 avr_frame_pointer_required_p (void)
4402 return (cfun->calls_alloca
4403 || cfun->calls_setjmp
4404 || cfun->has_nonlocal_label
4405 || crtl->args.info.nregs == 0
4406 || get_frame_size () > 0);
4409 /* Returns the condition of compare insn INSN, or UNKNOWN. */
4411 static RTX_CODE
4412 compare_condition (rtx insn)
4414 rtx next = next_real_insn (insn);
4416 if (next && JUMP_P (next))
4418 rtx pat = PATTERN (next);
4419 rtx src = SET_SRC (pat);
4421 if (IF_THEN_ELSE == GET_CODE (src))
4422 return GET_CODE (XEXP (src, 0));
4425 return UNKNOWN;
4429 /* Returns true iff INSN is a tst insn that only tests the sign. */
4431 static bool
4432 compare_sign_p (rtx insn)
4434 RTX_CODE cond = compare_condition (insn);
4435 return (cond == GE || cond == LT);
4439 /* Returns true iff the next insn is a JUMP_INSN with a condition
4440 that needs to be swapped (GT, GTU, LE, LEU). */
4442 static bool
4443 compare_diff_p (rtx insn)
4445 RTX_CODE cond = compare_condition (insn);
4446 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
4449 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4451 static bool
4452 compare_eq_p (rtx insn)
4454 RTX_CODE cond = compare_condition (insn);
4455 return (cond == EQ || cond == NE);
4459 /* Output compare instruction
4461 compare (XOP[0], XOP[1])
4463 for a register XOP[0] and a compile-time constant XOP[1]. Return "".
4464 XOP[2] is an 8-bit scratch register as needed.
4466 PLEN == NULL: Output instructions.
4467 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4468 Don't output anything. */
4470 const char*
4471 avr_out_compare (rtx insn, rtx *xop, int *plen)
4473 /* Register to compare and value to compare against. */
4474 rtx xreg = xop[0];
4475 rtx xval = xop[1];
4477 /* MODE of the comparison. */
4478 enum machine_mode mode;
4480 /* Number of bytes to operate on. */
4481 int i, n_bytes = GET_MODE_SIZE (GET_MODE (xreg));
4483 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4484 int clobber_val = -1;
4486 /* Map fixed mode operands to integer operands with the same binary
4487 representation. They are easier to handle in the remainder. */
4489 if (CONST_FIXED_P (xval))
4491 xreg = avr_to_int_mode (xop[0]);
4492 xval = avr_to_int_mode (xop[1]);
4495 mode = GET_MODE (xreg);
4497 gcc_assert (REG_P (xreg));
4498 gcc_assert ((CONST_INT_P (xval) && n_bytes <= 4)
4499 || (const_double_operand (xval, VOIDmode) && n_bytes == 8));
4501 if (plen)
4502 *plen = 0;
4504 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4505 against 0 by ORing the bytes. This is one instruction shorter.
4506 Notice that 64-bit comparisons are always against reg:ALL8 18 (ACC_A)
4507 and therefore don't use this. */
4509 if (!test_hard_reg_class (LD_REGS, xreg)
4510 && compare_eq_p (insn)
4511 && reg_unused_after (insn, xreg))
4513 if (xval == const1_rtx)
4515 avr_asm_len ("dec %A0" CR_TAB
4516 "or %A0,%B0", xop, plen, 2);
4518 if (n_bytes >= 3)
4519 avr_asm_len ("or %A0,%C0", xop, plen, 1);
4521 if (n_bytes >= 4)
4522 avr_asm_len ("or %A0,%D0", xop, plen, 1);
4524 return "";
4526 else if (xval == constm1_rtx)
4528 if (n_bytes >= 4)
4529 avr_asm_len ("and %A0,%D0", xop, plen, 1);
4531 if (n_bytes >= 3)
4532 avr_asm_len ("and %A0,%C0", xop, plen, 1);
4534 return avr_asm_len ("and %A0,%B0" CR_TAB
4535 "com %A0", xop, plen, 2);
4539 for (i = 0; i < n_bytes; i++)
4541 /* We compare byte-wise. */
4542 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
4543 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4545 /* 8-bit value to compare with this byte. */
4546 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4548 /* Registers R16..R31 can operate with immediate. */
4549 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4551 xop[0] = reg8;
4552 xop[1] = gen_int_mode (val8, QImode);
4554 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4556 if (i == 0
4557 && test_hard_reg_class (ADDW_REGS, reg8))
4559 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
4561 if (IN_RANGE (val16, 0, 63)
4562 && (val8 == 0
4563 || reg_unused_after (insn, xreg)))
4565 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
4566 i++;
4567 continue;
4570 if (n_bytes == 2
4571 && IN_RANGE (val16, -63, -1)
4572 && compare_eq_p (insn)
4573 && reg_unused_after (insn, xreg))
4575 return avr_asm_len ("adiw %0,%n1", xop, plen, 1);
4579 /* Comparing against 0 is easy. */
4581 if (val8 == 0)
4583 avr_asm_len (i == 0
4584 ? "cp %0,__zero_reg__"
4585 : "cpc %0,__zero_reg__", xop, plen, 1);
4586 continue;
4589 /* Upper registers can compare and subtract-with-carry immediates.
4590 Notice that compare instructions do the same as respective subtract
4591 instruction; the only difference is that comparisons don't write
4592 the result back to the target register. */
4594 if (ld_reg_p)
4596 if (i == 0)
4598 avr_asm_len ("cpi %0,%1", xop, plen, 1);
4599 continue;
4601 else if (reg_unused_after (insn, xreg))
4603 avr_asm_len ("sbci %0,%1", xop, plen, 1);
4604 continue;
4608 /* Must load the value into the scratch register. */
4610 gcc_assert (REG_P (xop[2]));
4612 if (clobber_val != (int) val8)
4613 avr_asm_len ("ldi %2,%1", xop, plen, 1);
4614 clobber_val = (int) val8;
4616 avr_asm_len (i == 0
4617 ? "cp %0,%2"
4618 : "cpc %0,%2", xop, plen, 1);
4621 return "";
4625 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4627 const char*
4628 avr_out_compare64 (rtx insn, rtx *op, int *plen)
4630 rtx xop[3];
4632 xop[0] = gen_rtx_REG (DImode, 18);
4633 xop[1] = op[0];
4634 xop[2] = op[1];
4636 return avr_out_compare (insn, xop, plen);
4639 /* Output test instruction for HImode. */
4641 const char*
4642 avr_out_tsthi (rtx insn, rtx *op, int *plen)
4644 if (compare_sign_p (insn))
4646 avr_asm_len ("tst %B0", op, plen, -1);
4648 else if (reg_unused_after (insn, op[0])
4649 && compare_eq_p (insn))
4651 /* Faster than sbiw if we can clobber the operand. */
4652 avr_asm_len ("or %A0,%B0", op, plen, -1);
4654 else
4656 avr_out_compare (insn, op, plen);
4659 return "";
4663 /* Output test instruction for PSImode. */
4665 const char*
4666 avr_out_tstpsi (rtx insn, rtx *op, int *plen)
4668 if (compare_sign_p (insn))
4670 avr_asm_len ("tst %C0", op, plen, -1);
4672 else if (reg_unused_after (insn, op[0])
4673 && compare_eq_p (insn))
4675 /* Faster than sbiw if we can clobber the operand. */
4676 avr_asm_len ("or %A0,%B0" CR_TAB
4677 "or %A0,%C0", op, plen, -2);
4679 else
4681 avr_out_compare (insn, op, plen);
4684 return "";
4688 /* Output test instruction for SImode. */
4690 const char*
4691 avr_out_tstsi (rtx insn, rtx *op, int *plen)
4693 if (compare_sign_p (insn))
4695 avr_asm_len ("tst %D0", op, plen, -1);
4697 else if (reg_unused_after (insn, op[0])
4698 && compare_eq_p (insn))
4700 /* Faster than sbiw if we can clobber the operand. */
4701 avr_asm_len ("or %A0,%B0" CR_TAB
4702 "or %A0,%C0" CR_TAB
4703 "or %A0,%D0", op, plen, -3);
4705 else
4707 avr_out_compare (insn, op, plen);
4710 return "";
4714 /* Generate asm equivalent for various shifts. This only handles cases
4715 that are not already carefully hand-optimized in ?sh??i3_out.
4717 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4718 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4719 OPERANDS[3] is a QImode scratch register from LD regs if
4720 available and SCRATCH, otherwise (no scratch available)
4722 TEMPL is an assembler template that shifts by one position.
4723 T_LEN is the length of this template. */
4725 void
4726 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
4727 int *plen, int t_len)
4729 bool second_label = true;
4730 bool saved_in_tmp = false;
4731 bool use_zero_reg = false;
4732 rtx op[5];
4734 op[0] = operands[0];
4735 op[1] = operands[1];
4736 op[2] = operands[2];
4737 op[3] = operands[3];
4739 if (plen)
4740 *plen = 0;
4742 if (CONST_INT_P (operands[2]))
4744 bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
4745 && REG_P (operands[3]));
4746 int count = INTVAL (operands[2]);
4747 int max_len = 10; /* If larger than this, always use a loop. */
4749 if (count <= 0)
4750 return;
4752 if (count < 8 && !scratch)
4753 use_zero_reg = true;
4755 if (optimize_size)
4756 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
4758 if (t_len * count <= max_len)
4760 /* Output shifts inline with no loop - faster. */
4762 while (count-- > 0)
4763 avr_asm_len (templ, op, plen, t_len);
4765 return;
4768 if (scratch)
4770 avr_asm_len ("ldi %3,%2", op, plen, 1);
4772 else if (use_zero_reg)
4774 /* Hack to save one word: use __zero_reg__ as loop counter.
4775 Set one bit, then shift in a loop until it is 0 again. */
4777 op[3] = zero_reg_rtx;
4779 avr_asm_len ("set" CR_TAB
4780 "bld %3,%2-1", op, plen, 2);
4782 else
4784 /* No scratch register available, use one from LD_REGS (saved in
4785 __tmp_reg__) that doesn't overlap with registers to shift. */
4787 op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
4788 op[4] = tmp_reg_rtx;
4789 saved_in_tmp = true;
4791 avr_asm_len ("mov %4,%3" CR_TAB
4792 "ldi %3,%2", op, plen, 2);
4795 second_label = false;
4797 else if (MEM_P (op[2]))
4799 rtx op_mov[2];
4801 op_mov[0] = op[3] = tmp_reg_rtx;
4802 op_mov[1] = op[2];
4804 out_movqi_r_mr (insn, op_mov, plen);
4806 else if (register_operand (op[2], QImode))
4808 op[3] = op[2];
4810 if (!reg_unused_after (insn, op[2])
4811 || reg_overlap_mentioned_p (op[0], op[2]))
4813 op[3] = tmp_reg_rtx;
4814 avr_asm_len ("mov %3,%2", op, plen, 1);
4817 else
4818 fatal_insn ("bad shift insn:", insn);
4820 if (second_label)
4821 avr_asm_len ("rjmp 2f", op, plen, 1);
4823 avr_asm_len ("1:", op, plen, 0);
4824 avr_asm_len (templ, op, plen, t_len);
4826 if (second_label)
4827 avr_asm_len ("2:", op, plen, 0);
4829 avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
4830 avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
4832 if (saved_in_tmp)
4833 avr_asm_len ("mov %3,%4", op, plen, 1);
4837 /* 8bit shift left ((char)x << i) */
4839 const char *
4840 ashlqi3_out (rtx insn, rtx operands[], int *len)
4842 if (GET_CODE (operands[2]) == CONST_INT)
4844 int k;
4846 if (!len)
4847 len = &k;
4849 switch (INTVAL (operands[2]))
4851 default:
4852 if (INTVAL (operands[2]) < 8)
4853 break;
4855 *len = 1;
4856 return "clr %0";
4858 case 1:
4859 *len = 1;
4860 return "lsl %0";
4862 case 2:
4863 *len = 2;
4864 return ("lsl %0" CR_TAB
4865 "lsl %0");
4867 case 3:
4868 *len = 3;
4869 return ("lsl %0" CR_TAB
4870 "lsl %0" CR_TAB
4871 "lsl %0");
4873 case 4:
4874 if (test_hard_reg_class (LD_REGS, operands[0]))
4876 *len = 2;
4877 return ("swap %0" CR_TAB
4878 "andi %0,0xf0");
4880 *len = 4;
4881 return ("lsl %0" CR_TAB
4882 "lsl %0" CR_TAB
4883 "lsl %0" CR_TAB
4884 "lsl %0");
4886 case 5:
4887 if (test_hard_reg_class (LD_REGS, operands[0]))
4889 *len = 3;
4890 return ("swap %0" CR_TAB
4891 "lsl %0" CR_TAB
4892 "andi %0,0xe0");
4894 *len = 5;
4895 return ("lsl %0" CR_TAB
4896 "lsl %0" CR_TAB
4897 "lsl %0" CR_TAB
4898 "lsl %0" CR_TAB
4899 "lsl %0");
4901 case 6:
4902 if (test_hard_reg_class (LD_REGS, operands[0]))
4904 *len = 4;
4905 return ("swap %0" CR_TAB
4906 "lsl %0" CR_TAB
4907 "lsl %0" CR_TAB
4908 "andi %0,0xc0");
4910 *len = 6;
4911 return ("lsl %0" CR_TAB
4912 "lsl %0" CR_TAB
4913 "lsl %0" CR_TAB
4914 "lsl %0" CR_TAB
4915 "lsl %0" CR_TAB
4916 "lsl %0");
4918 case 7:
4919 *len = 3;
4920 return ("ror %0" CR_TAB
4921 "clr %0" CR_TAB
4922 "ror %0");
4925 else if (CONSTANT_P (operands[2]))
4926 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4928 out_shift_with_cnt ("lsl %0",
4929 insn, operands, len, 1);
4930 return "";
4934 /* 16bit shift left ((short)x << i) */
4936 const char *
4937 ashlhi3_out (rtx insn, rtx operands[], int *len)
4939 if (GET_CODE (operands[2]) == CONST_INT)
4941 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4942 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4943 int k;
4944 int *t = len;
4946 if (!len)
4947 len = &k;
4949 switch (INTVAL (operands[2]))
4951 default:
4952 if (INTVAL (operands[2]) < 16)
4953 break;
4955 *len = 2;
4956 return ("clr %B0" CR_TAB
4957 "clr %A0");
4959 case 4:
4960 if (optimize_size && scratch)
4961 break; /* 5 */
4962 if (ldi_ok)
4964 *len = 6;
4965 return ("swap %A0" CR_TAB
4966 "swap %B0" CR_TAB
4967 "andi %B0,0xf0" CR_TAB
4968 "eor %B0,%A0" CR_TAB
4969 "andi %A0,0xf0" CR_TAB
4970 "eor %B0,%A0");
4972 if (scratch)
4974 *len = 7;
4975 return ("swap %A0" CR_TAB
4976 "swap %B0" CR_TAB
4977 "ldi %3,0xf0" CR_TAB
4978 "and %B0,%3" CR_TAB
4979 "eor %B0,%A0" CR_TAB
4980 "and %A0,%3" CR_TAB
4981 "eor %B0,%A0");
4983 break; /* optimize_size ? 6 : 8 */
4985 case 5:
4986 if (optimize_size)
4987 break; /* scratch ? 5 : 6 */
4988 if (ldi_ok)
4990 *len = 8;
4991 return ("lsl %A0" CR_TAB
4992 "rol %B0" CR_TAB
4993 "swap %A0" CR_TAB
4994 "swap %B0" CR_TAB
4995 "andi %B0,0xf0" CR_TAB
4996 "eor %B0,%A0" CR_TAB
4997 "andi %A0,0xf0" CR_TAB
4998 "eor %B0,%A0");
5000 if (scratch)
5002 *len = 9;
5003 return ("lsl %A0" CR_TAB
5004 "rol %B0" CR_TAB
5005 "swap %A0" CR_TAB
5006 "swap %B0" CR_TAB
5007 "ldi %3,0xf0" CR_TAB
5008 "and %B0,%3" CR_TAB
5009 "eor %B0,%A0" CR_TAB
5010 "and %A0,%3" CR_TAB
5011 "eor %B0,%A0");
5013 break; /* 10 */
5015 case 6:
5016 if (optimize_size)
5017 break; /* scratch ? 5 : 6 */
5018 *len = 9;
5019 return ("clr __tmp_reg__" CR_TAB
5020 "lsr %B0" CR_TAB
5021 "ror %A0" CR_TAB
5022 "ror __tmp_reg__" CR_TAB
5023 "lsr %B0" CR_TAB
5024 "ror %A0" CR_TAB
5025 "ror __tmp_reg__" CR_TAB
5026 "mov %B0,%A0" CR_TAB
5027 "mov %A0,__tmp_reg__");
5029 case 7:
5030 *len = 5;
5031 return ("lsr %B0" CR_TAB
5032 "mov %B0,%A0" CR_TAB
5033 "clr %A0" CR_TAB
5034 "ror %B0" CR_TAB
5035 "ror %A0");
5037 case 8:
5038 return *len = 2, ("mov %B0,%A1" CR_TAB
5039 "clr %A0");
5041 case 9:
5042 *len = 3;
5043 return ("mov %B0,%A0" CR_TAB
5044 "clr %A0" CR_TAB
5045 "lsl %B0");
5047 case 10:
5048 *len = 4;
5049 return ("mov %B0,%A0" CR_TAB
5050 "clr %A0" CR_TAB
5051 "lsl %B0" CR_TAB
5052 "lsl %B0");
5054 case 11:
5055 *len = 5;
5056 return ("mov %B0,%A0" CR_TAB
5057 "clr %A0" CR_TAB
5058 "lsl %B0" CR_TAB
5059 "lsl %B0" CR_TAB
5060 "lsl %B0");
5062 case 12:
5063 if (ldi_ok)
5065 *len = 4;
5066 return ("mov %B0,%A0" CR_TAB
5067 "clr %A0" CR_TAB
5068 "swap %B0" CR_TAB
5069 "andi %B0,0xf0");
5071 if (scratch)
5073 *len = 5;
5074 return ("mov %B0,%A0" CR_TAB
5075 "clr %A0" CR_TAB
5076 "swap %B0" CR_TAB
5077 "ldi %3,0xf0" CR_TAB
5078 "and %B0,%3");
5080 *len = 6;
5081 return ("mov %B0,%A0" CR_TAB
5082 "clr %A0" CR_TAB
5083 "lsl %B0" CR_TAB
5084 "lsl %B0" CR_TAB
5085 "lsl %B0" CR_TAB
5086 "lsl %B0");
5088 case 13:
5089 if (ldi_ok)
5091 *len = 5;
5092 return ("mov %B0,%A0" CR_TAB
5093 "clr %A0" CR_TAB
5094 "swap %B0" CR_TAB
5095 "lsl %B0" CR_TAB
5096 "andi %B0,0xe0");
5098 if (AVR_HAVE_MUL && scratch)
5100 *len = 5;
5101 return ("ldi %3,0x20" CR_TAB
5102 "mul %A0,%3" CR_TAB
5103 "mov %B0,r0" CR_TAB
5104 "clr %A0" CR_TAB
5105 "clr __zero_reg__");
5107 if (optimize_size && scratch)
5108 break; /* 5 */
5109 if (scratch)
5111 *len = 6;
5112 return ("mov %B0,%A0" CR_TAB
5113 "clr %A0" CR_TAB
5114 "swap %B0" CR_TAB
5115 "lsl %B0" CR_TAB
5116 "ldi %3,0xe0" CR_TAB
5117 "and %B0,%3");
5119 if (AVR_HAVE_MUL)
5121 *len = 6;
5122 return ("set" CR_TAB
5123 "bld r1,5" CR_TAB
5124 "mul %A0,r1" CR_TAB
5125 "mov %B0,r0" CR_TAB
5126 "clr %A0" CR_TAB
5127 "clr __zero_reg__");
5129 *len = 7;
5130 return ("mov %B0,%A0" CR_TAB
5131 "clr %A0" CR_TAB
5132 "lsl %B0" CR_TAB
5133 "lsl %B0" CR_TAB
5134 "lsl %B0" CR_TAB
5135 "lsl %B0" CR_TAB
5136 "lsl %B0");
5138 case 14:
5139 if (AVR_HAVE_MUL && ldi_ok)
5141 *len = 5;
5142 return ("ldi %B0,0x40" CR_TAB
5143 "mul %A0,%B0" CR_TAB
5144 "mov %B0,r0" CR_TAB
5145 "clr %A0" CR_TAB
5146 "clr __zero_reg__");
5148 if (AVR_HAVE_MUL && scratch)
5150 *len = 5;
5151 return ("ldi %3,0x40" CR_TAB
5152 "mul %A0,%3" CR_TAB
5153 "mov %B0,r0" CR_TAB
5154 "clr %A0" CR_TAB
5155 "clr __zero_reg__");
5157 if (optimize_size && ldi_ok)
5159 *len = 5;
5160 return ("mov %B0,%A0" CR_TAB
5161 "ldi %A0,6" "\n1:\t"
5162 "lsl %B0" CR_TAB
5163 "dec %A0" CR_TAB
5164 "brne 1b");
5166 if (optimize_size && scratch)
5167 break; /* 5 */
5168 *len = 6;
5169 return ("clr %B0" CR_TAB
5170 "lsr %A0" CR_TAB
5171 "ror %B0" CR_TAB
5172 "lsr %A0" CR_TAB
5173 "ror %B0" CR_TAB
5174 "clr %A0");
5176 case 15:
5177 *len = 4;
5178 return ("clr %B0" CR_TAB
5179 "lsr %A0" CR_TAB
5180 "ror %B0" CR_TAB
5181 "clr %A0");
5183 len = t;
5185 out_shift_with_cnt ("lsl %A0" CR_TAB
5186 "rol %B0", insn, operands, len, 2);
5187 return "";
5191 /* 24-bit shift left */
5193 const char*
5194 avr_out_ashlpsi3 (rtx insn, rtx *op, int *plen)
5196 if (plen)
5197 *plen = 0;
5199 if (CONST_INT_P (op[2]))
5201 switch (INTVAL (op[2]))
5203 default:
5204 if (INTVAL (op[2]) < 24)
5205 break;
5207 return avr_asm_len ("clr %A0" CR_TAB
5208 "clr %B0" CR_TAB
5209 "clr %C0", op, plen, 3);
5211 case 8:
5213 int reg0 = REGNO (op[0]);
5214 int reg1 = REGNO (op[1]);
5216 if (reg0 >= reg1)
5217 return avr_asm_len ("mov %C0,%B1" CR_TAB
5218 "mov %B0,%A1" CR_TAB
5219 "clr %A0", op, plen, 3);
5220 else
5221 return avr_asm_len ("clr %A0" CR_TAB
5222 "mov %B0,%A1" CR_TAB
5223 "mov %C0,%B1", op, plen, 3);
5226 case 16:
5228 int reg0 = REGNO (op[0]);
5229 int reg1 = REGNO (op[1]);
5231 if (reg0 + 2 != reg1)
5232 avr_asm_len ("mov %C0,%A0", op, plen, 1);
5234 return avr_asm_len ("clr %B0" CR_TAB
5235 "clr %A0", op, plen, 2);
5238 case 23:
5239 return avr_asm_len ("clr %C0" CR_TAB
5240 "lsr %A0" CR_TAB
5241 "ror %C0" CR_TAB
5242 "clr %B0" CR_TAB
5243 "clr %A0", op, plen, 5);
5247 out_shift_with_cnt ("lsl %A0" CR_TAB
5248 "rol %B0" CR_TAB
5249 "rol %C0", insn, op, plen, 3);
5250 return "";
5254 /* 32bit shift left ((long)x << i) */
5256 const char *
5257 ashlsi3_out (rtx insn, rtx operands[], int *len)
5259 if (GET_CODE (operands[2]) == CONST_INT)
5261 int k;
5262 int *t = len;
5264 if (!len)
5265 len = &k;
5267 switch (INTVAL (operands[2]))
5269 default:
5270 if (INTVAL (operands[2]) < 32)
5271 break;
5273 if (AVR_HAVE_MOVW)
5274 return *len = 3, ("clr %D0" CR_TAB
5275 "clr %C0" CR_TAB
5276 "movw %A0,%C0");
5277 *len = 4;
5278 return ("clr %D0" CR_TAB
5279 "clr %C0" CR_TAB
5280 "clr %B0" CR_TAB
5281 "clr %A0");
5283 case 8:
5285 int reg0 = true_regnum (operands[0]);
5286 int reg1 = true_regnum (operands[1]);
5287 *len = 4;
5288 if (reg0 >= reg1)
5289 return ("mov %D0,%C1" CR_TAB
5290 "mov %C0,%B1" CR_TAB
5291 "mov %B0,%A1" CR_TAB
5292 "clr %A0");
5293 else
5294 return ("clr %A0" CR_TAB
5295 "mov %B0,%A1" CR_TAB
5296 "mov %C0,%B1" CR_TAB
5297 "mov %D0,%C1");
5300 case 16:
5302 int reg0 = true_regnum (operands[0]);
5303 int reg1 = true_regnum (operands[1]);
5304 if (reg0 + 2 == reg1)
5305 return *len = 2, ("clr %B0" CR_TAB
5306 "clr %A0");
5307 if (AVR_HAVE_MOVW)
5308 return *len = 3, ("movw %C0,%A1" CR_TAB
5309 "clr %B0" CR_TAB
5310 "clr %A0");
5311 else
5312 return *len = 4, ("mov %C0,%A1" CR_TAB
5313 "mov %D0,%B1" CR_TAB
5314 "clr %B0" CR_TAB
5315 "clr %A0");
5318 case 24:
5319 *len = 4;
5320 return ("mov %D0,%A1" CR_TAB
5321 "clr %C0" CR_TAB
5322 "clr %B0" CR_TAB
5323 "clr %A0");
5325 case 31:
5326 *len = 6;
5327 return ("clr %D0" CR_TAB
5328 "lsr %A0" CR_TAB
5329 "ror %D0" CR_TAB
5330 "clr %C0" CR_TAB
5331 "clr %B0" CR_TAB
5332 "clr %A0");
5334 len = t;
5336 out_shift_with_cnt ("lsl %A0" CR_TAB
5337 "rol %B0" CR_TAB
5338 "rol %C0" CR_TAB
5339 "rol %D0", insn, operands, len, 4);
5340 return "";
5343 /* 8bit arithmetic shift right ((signed char)x >> i) */
5345 const char *
5346 ashrqi3_out (rtx insn, rtx operands[], int *len)
5348 if (GET_CODE (operands[2]) == CONST_INT)
5350 int k;
5352 if (!len)
5353 len = &k;
5355 switch (INTVAL (operands[2]))
5357 case 1:
5358 *len = 1;
5359 return "asr %0";
5361 case 2:
5362 *len = 2;
5363 return ("asr %0" CR_TAB
5364 "asr %0");
5366 case 3:
5367 *len = 3;
5368 return ("asr %0" CR_TAB
5369 "asr %0" CR_TAB
5370 "asr %0");
5372 case 4:
5373 *len = 4;
5374 return ("asr %0" CR_TAB
5375 "asr %0" CR_TAB
5376 "asr %0" CR_TAB
5377 "asr %0");
5379 case 5:
5380 *len = 5;
5381 return ("asr %0" CR_TAB
5382 "asr %0" CR_TAB
5383 "asr %0" CR_TAB
5384 "asr %0" CR_TAB
5385 "asr %0");
5387 case 6:
5388 *len = 4;
5389 return ("bst %0,6" CR_TAB
5390 "lsl %0" CR_TAB
5391 "sbc %0,%0" CR_TAB
5392 "bld %0,0");
5394 default:
5395 if (INTVAL (operands[2]) < 8)
5396 break;
5398 /* fall through */
5400 case 7:
5401 *len = 2;
5402 return ("lsl %0" CR_TAB
5403 "sbc %0,%0");
5406 else if (CONSTANT_P (operands[2]))
5407 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5409 out_shift_with_cnt ("asr %0",
5410 insn, operands, len, 1);
5411 return "";
5415 /* 16bit arithmetic shift right ((signed short)x >> i) */
5417 const char *
5418 ashrhi3_out (rtx insn, rtx operands[], int *len)
5420 if (GET_CODE (operands[2]) == CONST_INT)
5422 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5423 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5424 int k;
5425 int *t = len;
5427 if (!len)
5428 len = &k;
5430 switch (INTVAL (operands[2]))
5432 case 4:
5433 case 5:
5434 /* XXX try to optimize this too? */
5435 break;
5437 case 6:
5438 if (optimize_size)
5439 break; /* scratch ? 5 : 6 */
5440 *len = 8;
5441 return ("mov __tmp_reg__,%A0" CR_TAB
5442 "mov %A0,%B0" CR_TAB
5443 "lsl __tmp_reg__" CR_TAB
5444 "rol %A0" CR_TAB
5445 "sbc %B0,%B0" CR_TAB
5446 "lsl __tmp_reg__" CR_TAB
5447 "rol %A0" CR_TAB
5448 "rol %B0");
5450 case 7:
5451 *len = 4;
5452 return ("lsl %A0" CR_TAB
5453 "mov %A0,%B0" CR_TAB
5454 "rol %A0" CR_TAB
5455 "sbc %B0,%B0");
5457 case 8:
5459 int reg0 = true_regnum (operands[0]);
5460 int reg1 = true_regnum (operands[1]);
5462 if (reg0 == reg1)
5463 return *len = 3, ("mov %A0,%B0" CR_TAB
5464 "lsl %B0" CR_TAB
5465 "sbc %B0,%B0");
5466 else
5467 return *len = 4, ("mov %A0,%B1" CR_TAB
5468 "clr %B0" CR_TAB
5469 "sbrc %A0,7" CR_TAB
5470 "dec %B0");
5473 case 9:
5474 *len = 4;
5475 return ("mov %A0,%B0" CR_TAB
5476 "lsl %B0" CR_TAB
5477 "sbc %B0,%B0" CR_TAB
5478 "asr %A0");
5480 case 10:
5481 *len = 5;
5482 return ("mov %A0,%B0" CR_TAB
5483 "lsl %B0" CR_TAB
5484 "sbc %B0,%B0" CR_TAB
5485 "asr %A0" CR_TAB
5486 "asr %A0");
5488 case 11:
5489 if (AVR_HAVE_MUL && ldi_ok)
5491 *len = 5;
5492 return ("ldi %A0,0x20" CR_TAB
5493 "muls %B0,%A0" CR_TAB
5494 "mov %A0,r1" CR_TAB
5495 "sbc %B0,%B0" CR_TAB
5496 "clr __zero_reg__");
5498 if (optimize_size && scratch)
5499 break; /* 5 */
5500 *len = 6;
5501 return ("mov %A0,%B0" CR_TAB
5502 "lsl %B0" CR_TAB
5503 "sbc %B0,%B0" CR_TAB
5504 "asr %A0" CR_TAB
5505 "asr %A0" CR_TAB
5506 "asr %A0");
5508 case 12:
5509 if (AVR_HAVE_MUL && ldi_ok)
5511 *len = 5;
5512 return ("ldi %A0,0x10" CR_TAB
5513 "muls %B0,%A0" CR_TAB
5514 "mov %A0,r1" CR_TAB
5515 "sbc %B0,%B0" CR_TAB
5516 "clr __zero_reg__");
5518 if (optimize_size && scratch)
5519 break; /* 5 */
5520 *len = 7;
5521 return ("mov %A0,%B0" CR_TAB
5522 "lsl %B0" CR_TAB
5523 "sbc %B0,%B0" CR_TAB
5524 "asr %A0" CR_TAB
5525 "asr %A0" CR_TAB
5526 "asr %A0" CR_TAB
5527 "asr %A0");
5529 case 13:
5530 if (AVR_HAVE_MUL && ldi_ok)
5532 *len = 5;
5533 return ("ldi %A0,0x08" CR_TAB
5534 "muls %B0,%A0" CR_TAB
5535 "mov %A0,r1" CR_TAB
5536 "sbc %B0,%B0" CR_TAB
5537 "clr __zero_reg__");
5539 if (optimize_size)
5540 break; /* scratch ? 5 : 7 */
5541 *len = 8;
5542 return ("mov %A0,%B0" CR_TAB
5543 "lsl %B0" CR_TAB
5544 "sbc %B0,%B0" CR_TAB
5545 "asr %A0" CR_TAB
5546 "asr %A0" CR_TAB
5547 "asr %A0" CR_TAB
5548 "asr %A0" CR_TAB
5549 "asr %A0");
5551 case 14:
5552 *len = 5;
5553 return ("lsl %B0" CR_TAB
5554 "sbc %A0,%A0" CR_TAB
5555 "lsl %B0" CR_TAB
5556 "mov %B0,%A0" CR_TAB
5557 "rol %A0");
5559 default:
5560 if (INTVAL (operands[2]) < 16)
5561 break;
5563 /* fall through */
5565 case 15:
5566 return *len = 3, ("lsl %B0" CR_TAB
5567 "sbc %A0,%A0" CR_TAB
5568 "mov %B0,%A0");
5570 len = t;
5572 out_shift_with_cnt ("asr %B0" CR_TAB
5573 "ror %A0", insn, operands, len, 2);
5574 return "";
5578 /* 24-bit arithmetic shift right */
5580 const char*
5581 avr_out_ashrpsi3 (rtx insn, rtx *op, int *plen)
5583 int dest = REGNO (op[0]);
5584 int src = REGNO (op[1]);
5586 if (CONST_INT_P (op[2]))
5588 if (plen)
5589 *plen = 0;
5591 switch (INTVAL (op[2]))
5593 case 8:
5594 if (dest <= src)
5595 return avr_asm_len ("mov %A0,%B1" CR_TAB
5596 "mov %B0,%C1" CR_TAB
5597 "clr %C0" CR_TAB
5598 "sbrc %B0,7" CR_TAB
5599 "dec %C0", op, plen, 5);
5600 else
5601 return avr_asm_len ("clr %C0" CR_TAB
5602 "sbrc %C1,7" CR_TAB
5603 "dec %C0" CR_TAB
5604 "mov %B0,%C1" CR_TAB
5605 "mov %A0,%B1", op, plen, 5);
5607 case 16:
5608 if (dest != src + 2)
5609 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5611 return avr_asm_len ("clr %B0" CR_TAB
5612 "sbrc %A0,7" CR_TAB
5613 "com %B0" CR_TAB
5614 "mov %C0,%B0", op, plen, 4);
5616 default:
5617 if (INTVAL (op[2]) < 24)
5618 break;
5620 /* fall through */
5622 case 23:
5623 return avr_asm_len ("lsl %C0" CR_TAB
5624 "sbc %A0,%A0" CR_TAB
5625 "mov %B0,%A0" CR_TAB
5626 "mov %C0,%A0", op, plen, 4);
5627 } /* switch */
5630 out_shift_with_cnt ("asr %C0" CR_TAB
5631 "ror %B0" CR_TAB
5632 "ror %A0", insn, op, plen, 3);
5633 return "";
5637 /* 32-bit arithmetic shift right ((signed long)x >> i) */
5639 const char *
5640 ashrsi3_out (rtx insn, rtx operands[], int *len)
5642 if (GET_CODE (operands[2]) == CONST_INT)
5644 int k;
5645 int *t = len;
5647 if (!len)
5648 len = &k;
5650 switch (INTVAL (operands[2]))
5652 case 8:
5654 int reg0 = true_regnum (operands[0]);
5655 int reg1 = true_regnum (operands[1]);
5656 *len=6;
5657 if (reg0 <= reg1)
5658 return ("mov %A0,%B1" CR_TAB
5659 "mov %B0,%C1" CR_TAB
5660 "mov %C0,%D1" CR_TAB
5661 "clr %D0" CR_TAB
5662 "sbrc %C0,7" CR_TAB
5663 "dec %D0");
5664 else
5665 return ("clr %D0" CR_TAB
5666 "sbrc %D1,7" CR_TAB
5667 "dec %D0" CR_TAB
5668 "mov %C0,%D1" CR_TAB
5669 "mov %B0,%C1" CR_TAB
5670 "mov %A0,%B1");
5673 case 16:
5675 int reg0 = true_regnum (operands[0]);
5676 int reg1 = true_regnum (operands[1]);
5678 if (reg0 == reg1 + 2)
5679 return *len = 4, ("clr %D0" CR_TAB
5680 "sbrc %B0,7" CR_TAB
5681 "com %D0" CR_TAB
5682 "mov %C0,%D0");
5683 if (AVR_HAVE_MOVW)
5684 return *len = 5, ("movw %A0,%C1" CR_TAB
5685 "clr %D0" CR_TAB
5686 "sbrc %B0,7" CR_TAB
5687 "com %D0" CR_TAB
5688 "mov %C0,%D0");
5689 else
5690 return *len = 6, ("mov %B0,%D1" CR_TAB
5691 "mov %A0,%C1" CR_TAB
5692 "clr %D0" CR_TAB
5693 "sbrc %B0,7" CR_TAB
5694 "com %D0" CR_TAB
5695 "mov %C0,%D0");
5698 case 24:
5699 return *len = 6, ("mov %A0,%D1" CR_TAB
5700 "clr %D0" CR_TAB
5701 "sbrc %A0,7" CR_TAB
5702 "com %D0" CR_TAB
5703 "mov %B0,%D0" CR_TAB
5704 "mov %C0,%D0");
5706 default:
5707 if (INTVAL (operands[2]) < 32)
5708 break;
5710 /* fall through */
5712 case 31:
5713 if (AVR_HAVE_MOVW)
5714 return *len = 4, ("lsl %D0" CR_TAB
5715 "sbc %A0,%A0" CR_TAB
5716 "mov %B0,%A0" CR_TAB
5717 "movw %C0,%A0");
5718 else
5719 return *len = 5, ("lsl %D0" CR_TAB
5720 "sbc %A0,%A0" CR_TAB
5721 "mov %B0,%A0" CR_TAB
5722 "mov %C0,%A0" CR_TAB
5723 "mov %D0,%A0");
5725 len = t;
5727 out_shift_with_cnt ("asr %D0" CR_TAB
5728 "ror %C0" CR_TAB
5729 "ror %B0" CR_TAB
5730 "ror %A0", insn, operands, len, 4);
5731 return "";
5734 /* 8-bit logic shift right ((unsigned char)x >> i) */
5736 const char *
5737 lshrqi3_out (rtx insn, rtx operands[], int *len)
5739 if (GET_CODE (operands[2]) == CONST_INT)
5741 int k;
5743 if (!len)
5744 len = &k;
5746 switch (INTVAL (operands[2]))
5748 default:
5749 if (INTVAL (operands[2]) < 8)
5750 break;
5752 *len = 1;
5753 return "clr %0";
5755 case 1:
5756 *len = 1;
5757 return "lsr %0";
5759 case 2:
5760 *len = 2;
5761 return ("lsr %0" CR_TAB
5762 "lsr %0");
5763 case 3:
5764 *len = 3;
5765 return ("lsr %0" CR_TAB
5766 "lsr %0" CR_TAB
5767 "lsr %0");
5769 case 4:
5770 if (test_hard_reg_class (LD_REGS, operands[0]))
5772 *len=2;
5773 return ("swap %0" CR_TAB
5774 "andi %0,0x0f");
5776 *len = 4;
5777 return ("lsr %0" CR_TAB
5778 "lsr %0" CR_TAB
5779 "lsr %0" CR_TAB
5780 "lsr %0");
5782 case 5:
5783 if (test_hard_reg_class (LD_REGS, operands[0]))
5785 *len = 3;
5786 return ("swap %0" CR_TAB
5787 "lsr %0" CR_TAB
5788 "andi %0,0x7");
5790 *len = 5;
5791 return ("lsr %0" CR_TAB
5792 "lsr %0" CR_TAB
5793 "lsr %0" CR_TAB
5794 "lsr %0" CR_TAB
5795 "lsr %0");
5797 case 6:
5798 if (test_hard_reg_class (LD_REGS, operands[0]))
5800 *len = 4;
5801 return ("swap %0" CR_TAB
5802 "lsr %0" CR_TAB
5803 "lsr %0" CR_TAB
5804 "andi %0,0x3");
5806 *len = 6;
5807 return ("lsr %0" CR_TAB
5808 "lsr %0" CR_TAB
5809 "lsr %0" CR_TAB
5810 "lsr %0" CR_TAB
5811 "lsr %0" CR_TAB
5812 "lsr %0");
5814 case 7:
5815 *len = 3;
5816 return ("rol %0" CR_TAB
5817 "clr %0" CR_TAB
5818 "rol %0");
5821 else if (CONSTANT_P (operands[2]))
5822 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5824 out_shift_with_cnt ("lsr %0",
5825 insn, operands, len, 1);
5826 return "";
5829 /* 16-bit logic shift right ((unsigned short)x >> i) */
5831 const char *
5832 lshrhi3_out (rtx insn, rtx operands[], int *len)
5834 if (GET_CODE (operands[2]) == CONST_INT)
5836 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5837 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5838 int k;
5839 int *t = len;
5841 if (!len)
5842 len = &k;
5844 switch (INTVAL (operands[2]))
5846 default:
5847 if (INTVAL (operands[2]) < 16)
5848 break;
5850 *len = 2;
5851 return ("clr %B0" CR_TAB
5852 "clr %A0");
5854 case 4:
5855 if (optimize_size && scratch)
5856 break; /* 5 */
5857 if (ldi_ok)
5859 *len = 6;
5860 return ("swap %B0" CR_TAB
5861 "swap %A0" CR_TAB
5862 "andi %A0,0x0f" CR_TAB
5863 "eor %A0,%B0" CR_TAB
5864 "andi %B0,0x0f" CR_TAB
5865 "eor %A0,%B0");
5867 if (scratch)
5869 *len = 7;
5870 return ("swap %B0" CR_TAB
5871 "swap %A0" CR_TAB
5872 "ldi %3,0x0f" CR_TAB
5873 "and %A0,%3" CR_TAB
5874 "eor %A0,%B0" CR_TAB
5875 "and %B0,%3" CR_TAB
5876 "eor %A0,%B0");
5878 break; /* optimize_size ? 6 : 8 */
5880 case 5:
5881 if (optimize_size)
5882 break; /* scratch ? 5 : 6 */
5883 if (ldi_ok)
5885 *len = 8;
5886 return ("lsr %B0" CR_TAB
5887 "ror %A0" CR_TAB
5888 "swap %B0" CR_TAB
5889 "swap %A0" CR_TAB
5890 "andi %A0,0x0f" CR_TAB
5891 "eor %A0,%B0" CR_TAB
5892 "andi %B0,0x0f" CR_TAB
5893 "eor %A0,%B0");
5895 if (scratch)
5897 *len = 9;
5898 return ("lsr %B0" CR_TAB
5899 "ror %A0" CR_TAB
5900 "swap %B0" CR_TAB
5901 "swap %A0" CR_TAB
5902 "ldi %3,0x0f" CR_TAB
5903 "and %A0,%3" CR_TAB
5904 "eor %A0,%B0" CR_TAB
5905 "and %B0,%3" CR_TAB
5906 "eor %A0,%B0");
5908 break; /* 10 */
5910 case 6:
5911 if (optimize_size)
5912 break; /* scratch ? 5 : 6 */
5913 *len = 9;
5914 return ("clr __tmp_reg__" CR_TAB
5915 "lsl %A0" CR_TAB
5916 "rol %B0" CR_TAB
5917 "rol __tmp_reg__" CR_TAB
5918 "lsl %A0" CR_TAB
5919 "rol %B0" CR_TAB
5920 "rol __tmp_reg__" CR_TAB
5921 "mov %A0,%B0" CR_TAB
5922 "mov %B0,__tmp_reg__");
5924 case 7:
5925 *len = 5;
5926 return ("lsl %A0" CR_TAB
5927 "mov %A0,%B0" CR_TAB
5928 "rol %A0" CR_TAB
5929 "sbc %B0,%B0" CR_TAB
5930 "neg %B0");
5932 case 8:
5933 return *len = 2, ("mov %A0,%B1" CR_TAB
5934 "clr %B0");
5936 case 9:
5937 *len = 3;
5938 return ("mov %A0,%B0" CR_TAB
5939 "clr %B0" CR_TAB
5940 "lsr %A0");
5942 case 10:
5943 *len = 4;
5944 return ("mov %A0,%B0" CR_TAB
5945 "clr %B0" CR_TAB
5946 "lsr %A0" CR_TAB
5947 "lsr %A0");
5949 case 11:
5950 *len = 5;
5951 return ("mov %A0,%B0" CR_TAB
5952 "clr %B0" CR_TAB
5953 "lsr %A0" CR_TAB
5954 "lsr %A0" CR_TAB
5955 "lsr %A0");
5957 case 12:
5958 if (ldi_ok)
5960 *len = 4;
5961 return ("mov %A0,%B0" CR_TAB
5962 "clr %B0" CR_TAB
5963 "swap %A0" CR_TAB
5964 "andi %A0,0x0f");
5966 if (scratch)
5968 *len = 5;
5969 return ("mov %A0,%B0" CR_TAB
5970 "clr %B0" CR_TAB
5971 "swap %A0" CR_TAB
5972 "ldi %3,0x0f" CR_TAB
5973 "and %A0,%3");
5975 *len = 6;
5976 return ("mov %A0,%B0" CR_TAB
5977 "clr %B0" CR_TAB
5978 "lsr %A0" CR_TAB
5979 "lsr %A0" CR_TAB
5980 "lsr %A0" CR_TAB
5981 "lsr %A0");
5983 case 13:
5984 if (ldi_ok)
5986 *len = 5;
5987 return ("mov %A0,%B0" CR_TAB
5988 "clr %B0" CR_TAB
5989 "swap %A0" CR_TAB
5990 "lsr %A0" CR_TAB
5991 "andi %A0,0x07");
5993 if (AVR_HAVE_MUL && scratch)
5995 *len = 5;
5996 return ("ldi %3,0x08" CR_TAB
5997 "mul %B0,%3" CR_TAB
5998 "mov %A0,r1" CR_TAB
5999 "clr %B0" CR_TAB
6000 "clr __zero_reg__");
6002 if (optimize_size && scratch)
6003 break; /* 5 */
6004 if (scratch)
6006 *len = 6;
6007 return ("mov %A0,%B0" CR_TAB
6008 "clr %B0" CR_TAB
6009 "swap %A0" CR_TAB
6010 "lsr %A0" CR_TAB
6011 "ldi %3,0x07" CR_TAB
6012 "and %A0,%3");
6014 if (AVR_HAVE_MUL)
6016 *len = 6;
6017 return ("set" CR_TAB
6018 "bld r1,3" CR_TAB
6019 "mul %B0,r1" CR_TAB
6020 "mov %A0,r1" CR_TAB
6021 "clr %B0" CR_TAB
6022 "clr __zero_reg__");
6024 *len = 7;
6025 return ("mov %A0,%B0" CR_TAB
6026 "clr %B0" CR_TAB
6027 "lsr %A0" CR_TAB
6028 "lsr %A0" CR_TAB
6029 "lsr %A0" CR_TAB
6030 "lsr %A0" CR_TAB
6031 "lsr %A0");
6033 case 14:
6034 if (AVR_HAVE_MUL && ldi_ok)
6036 *len = 5;
6037 return ("ldi %A0,0x04" CR_TAB
6038 "mul %B0,%A0" CR_TAB
6039 "mov %A0,r1" CR_TAB
6040 "clr %B0" CR_TAB
6041 "clr __zero_reg__");
6043 if (AVR_HAVE_MUL && scratch)
6045 *len = 5;
6046 return ("ldi %3,0x04" CR_TAB
6047 "mul %B0,%3" CR_TAB
6048 "mov %A0,r1" CR_TAB
6049 "clr %B0" CR_TAB
6050 "clr __zero_reg__");
6052 if (optimize_size && ldi_ok)
6054 *len = 5;
6055 return ("mov %A0,%B0" CR_TAB
6056 "ldi %B0,6" "\n1:\t"
6057 "lsr %A0" CR_TAB
6058 "dec %B0" CR_TAB
6059 "brne 1b");
6061 if (optimize_size && scratch)
6062 break; /* 5 */
6063 *len = 6;
6064 return ("clr %A0" CR_TAB
6065 "lsl %B0" CR_TAB
6066 "rol %A0" CR_TAB
6067 "lsl %B0" CR_TAB
6068 "rol %A0" CR_TAB
6069 "clr %B0");
6071 case 15:
6072 *len = 4;
6073 return ("clr %A0" CR_TAB
6074 "lsl %B0" CR_TAB
6075 "rol %A0" CR_TAB
6076 "clr %B0");
6078 len = t;
6080 out_shift_with_cnt ("lsr %B0" CR_TAB
6081 "ror %A0", insn, operands, len, 2);
6082 return "";
6086 /* 24-bit logic shift right */
6088 const char*
6089 avr_out_lshrpsi3 (rtx insn, rtx *op, int *plen)
6091 int dest = REGNO (op[0]);
6092 int src = REGNO (op[1]);
6094 if (CONST_INT_P (op[2]))
6096 if (plen)
6097 *plen = 0;
6099 switch (INTVAL (op[2]))
6101 case 8:
6102 if (dest <= src)
6103 return avr_asm_len ("mov %A0,%B1" CR_TAB
6104 "mov %B0,%C1" CR_TAB
6105 "clr %C0", op, plen, 3);
6106 else
6107 return avr_asm_len ("clr %C0" CR_TAB
6108 "mov %B0,%C1" CR_TAB
6109 "mov %A0,%B1", op, plen, 3);
6111 case 16:
6112 if (dest != src + 2)
6113 avr_asm_len ("mov %A0,%C1", op, plen, 1);
6115 return avr_asm_len ("clr %B0" CR_TAB
6116 "clr %C0", op, plen, 2);
6118 default:
6119 if (INTVAL (op[2]) < 24)
6120 break;
6122 /* fall through */
6124 case 23:
6125 return avr_asm_len ("clr %A0" CR_TAB
6126 "sbrc %C0,7" CR_TAB
6127 "inc %A0" CR_TAB
6128 "clr %B0" CR_TAB
6129 "clr %C0", op, plen, 5);
6130 } /* switch */
6133 out_shift_with_cnt ("lsr %C0" CR_TAB
6134 "ror %B0" CR_TAB
6135 "ror %A0", insn, op, plen, 3);
6136 return "";
6140 /* 32-bit logic shift right ((unsigned int)x >> i) */
6142 const char *
6143 lshrsi3_out (rtx insn, rtx operands[], int *len)
6145 if (GET_CODE (operands[2]) == CONST_INT)
6147 int k;
6148 int *t = len;
6150 if (!len)
6151 len = &k;
6153 switch (INTVAL (operands[2]))
6155 default:
6156 if (INTVAL (operands[2]) < 32)
6157 break;
6159 if (AVR_HAVE_MOVW)
6160 return *len = 3, ("clr %D0" CR_TAB
6161 "clr %C0" CR_TAB
6162 "movw %A0,%C0");
6163 *len = 4;
6164 return ("clr %D0" CR_TAB
6165 "clr %C0" CR_TAB
6166 "clr %B0" CR_TAB
6167 "clr %A0");
6169 case 8:
6171 int reg0 = true_regnum (operands[0]);
6172 int reg1 = true_regnum (operands[1]);
6173 *len = 4;
6174 if (reg0 <= reg1)
6175 return ("mov %A0,%B1" CR_TAB
6176 "mov %B0,%C1" CR_TAB
6177 "mov %C0,%D1" CR_TAB
6178 "clr %D0");
6179 else
6180 return ("clr %D0" CR_TAB
6181 "mov %C0,%D1" CR_TAB
6182 "mov %B0,%C1" CR_TAB
6183 "mov %A0,%B1");
6186 case 16:
6188 int reg0 = true_regnum (operands[0]);
6189 int reg1 = true_regnum (operands[1]);
6191 if (reg0 == reg1 + 2)
6192 return *len = 2, ("clr %C0" CR_TAB
6193 "clr %D0");
6194 if (AVR_HAVE_MOVW)
6195 return *len = 3, ("movw %A0,%C1" CR_TAB
6196 "clr %C0" CR_TAB
6197 "clr %D0");
6198 else
6199 return *len = 4, ("mov %B0,%D1" CR_TAB
6200 "mov %A0,%C1" CR_TAB
6201 "clr %C0" CR_TAB
6202 "clr %D0");
6205 case 24:
6206 return *len = 4, ("mov %A0,%D1" CR_TAB
6207 "clr %B0" CR_TAB
6208 "clr %C0" CR_TAB
6209 "clr %D0");
6211 case 31:
6212 *len = 6;
6213 return ("clr %A0" CR_TAB
6214 "sbrc %D0,7" CR_TAB
6215 "inc %A0" CR_TAB
6216 "clr %B0" CR_TAB
6217 "clr %C0" CR_TAB
6218 "clr %D0");
6220 len = t;
6222 out_shift_with_cnt ("lsr %D0" CR_TAB
6223 "ror %C0" CR_TAB
6224 "ror %B0" CR_TAB
6225 "ror %A0", insn, operands, len, 4);
6226 return "";
6230 /* Output addition of register XOP[0] and compile time constant XOP[2].
6231 CODE == PLUS: perform addition by using ADD instructions or
6232 CODE == MINUS: perform addition by using SUB instructions:
6234 XOP[0] = XOP[0] + XOP[2]
6236 Or perform addition/subtraction with register XOP[2] depending on CODE:
6238 XOP[0] = XOP[0] +/- XOP[2]
6240 If PLEN == NULL, print assembler instructions to perform the operation;
6241 otherwise, set *PLEN to the length of the instruction sequence (in words)
6242 printed with PLEN == NULL. XOP[3] is an 8-bit scratch register or NULL_RTX.
6243 Set *PCC to effect on cc0 according to respective CC_* insn attribute.
6245 CODE_SAT == UNKNOWN: Perform ordinary, non-saturating operation.
6246 CODE_SAT != UNKNOWN: Perform operation and saturate according to CODE_SAT.
6247 If CODE_SAT != UNKNOWN then SIGN contains the sign of the summand resp.
6248 the subtrahend in the original insn, provided it is a compile time constant.
6249 In all other cases, SIGN is 0.
6251 If OUT_LABEL is true, print the final 0: label which is needed for
6252 saturated addition / subtraction. The only case where OUT_LABEL = false
6253 is useful is for saturated addition / subtraction performed during
6254 fixed-point rounding, cf. `avr_out_round'. */
6256 static void
6257 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc,
6258 enum rtx_code code_sat, int sign, bool out_label)
6260 /* MODE of the operation. */
6261 enum machine_mode mode = GET_MODE (xop[0]);
6263 /* INT_MODE of the same size. */
6264 enum machine_mode imode = int_mode_for_mode (mode);
6266 /* Number of bytes to operate on. */
6267 int i, n_bytes = GET_MODE_SIZE (mode);
6269 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6270 int clobber_val = -1;
6272 /* op[0]: 8-bit destination register
6273 op[1]: 8-bit const int
6274 op[2]: 8-bit scratch register */
6275 rtx op[3];
6277 /* Started the operation? Before starting the operation we may skip
6278 adding 0. This is no more true after the operation started because
6279 carry must be taken into account. */
6280 bool started = false;
6282 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
6283 rtx xval = xop[2];
6285 /* Output a BRVC instruction. Only needed with saturation. */
6286 bool out_brvc = true;
6288 if (plen)
6289 *plen = 0;
6291 if (REG_P (xop[2]))
6293 *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_SET_N;
6295 for (i = 0; i < n_bytes; i++)
6297 /* We operate byte-wise on the destination. */
6298 op[0] = simplify_gen_subreg (QImode, xop[0], mode, i);
6299 op[1] = simplify_gen_subreg (QImode, xop[2], mode, i);
6301 if (i == 0)
6302 avr_asm_len (code == PLUS ? "add %0,%1" : "sub %0,%1",
6303 op, plen, 1);
6304 else
6305 avr_asm_len (code == PLUS ? "adc %0,%1" : "sbc %0,%1",
6306 op, plen, 1);
6309 if (reg_overlap_mentioned_p (xop[0], xop[2]))
6311 gcc_assert (REGNO (xop[0]) == REGNO (xop[2]));
6313 if (MINUS == code)
6314 return;
6317 goto saturate;
6320 /* Except in the case of ADIW with 16-bit register (see below)
6321 addition does not set cc0 in a usable way. */
6323 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
6325 if (CONST_FIXED_P (xval))
6326 xval = avr_to_int_mode (xval);
6328 /* Adding/Subtracting zero is a no-op. */
6330 if (xval == const0_rtx)
6332 *pcc = CC_NONE;
6333 return;
6336 if (MINUS == code)
6337 xval = simplify_unary_operation (NEG, imode, xval, imode);
6339 op[2] = xop[3];
6341 if (SS_PLUS == code_sat && MINUS == code
6342 && sign < 0
6343 && 0x80 == (INTVAL (simplify_gen_subreg (QImode, xval, imode, n_bytes-1))
6344 & GET_MODE_MASK (QImode)))
6346 /* We compute x + 0x80 by means of SUB instructions. We negated the
6347 constant subtrahend above and are left with x - (-128) so that we
6348 need something like SUBI r,128 which does not exist because SUBI sets
6349 V according to the sign of the subtrahend. Notice the only case
6350 where this must be done is when NEG overflowed in case [2s] because
6351 the V computation needs the right sign of the subtrahend. */
6353 rtx msb = simplify_gen_subreg (QImode, xop[0], mode, n_bytes-1);
6355 avr_asm_len ("subi %0,128" CR_TAB
6356 "brmi 0f", &msb, plen, 2);
6357 out_brvc = false;
6359 goto saturate;
6362 for (i = 0; i < n_bytes; i++)
6364 /* We operate byte-wise on the destination. */
6365 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6366 rtx xval8 = simplify_gen_subreg (QImode, xval, imode, i);
6368 /* 8-bit value to operate with this byte. */
6369 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6371 /* Registers R16..R31 can operate with immediate. */
6372 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6374 op[0] = reg8;
6375 op[1] = gen_int_mode (val8, QImode);
6377 /* To get usable cc0 no low-bytes must have been skipped. */
6379 if (i && !started)
6380 *pcc = CC_CLOBBER;
6382 if (!started
6383 && i % 2 == 0
6384 && i + 2 <= n_bytes
6385 && test_hard_reg_class (ADDW_REGS, reg8))
6387 rtx xval16 = simplify_gen_subreg (HImode, xval, imode, i);
6388 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
6390 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
6391 i.e. operate word-wise. */
6393 if (val16 < 64)
6395 if (val16 != 0)
6397 started = true;
6398 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
6399 op, plen, 1);
6401 if (n_bytes == 2 && PLUS == code)
6402 *pcc = CC_SET_ZN;
6405 i++;
6406 continue;
6410 if (val8 == 0)
6412 if (started)
6413 avr_asm_len (code == PLUS
6414 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
6415 op, plen, 1);
6416 continue;
6418 else if ((val8 == 1 || val8 == 0xff)
6419 && UNKNOWN == code_sat
6420 && !started
6421 && i == n_bytes - 1)
6423 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
6424 op, plen, 1);
6425 break;
6428 switch (code)
6430 case PLUS:
6432 gcc_assert (plen != NULL || (op[2] && REG_P (op[2])));
6434 if (plen != NULL && UNKNOWN != code_sat)
6436 /* This belongs to the x + 0x80 corner case. The code with
6437 ADD instruction is not smaller, thus make this case
6438 expensive so that the caller won't pick it. */
6440 *plen += 10;
6441 break;
6444 if (clobber_val != (int) val8)
6445 avr_asm_len ("ldi %2,%1", op, plen, 1);
6446 clobber_val = (int) val8;
6448 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
6450 break; /* PLUS */
6452 case MINUS:
6454 if (ld_reg_p)
6455 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
6456 else
6458 gcc_assert (plen != NULL || REG_P (op[2]));
6460 if (clobber_val != (int) val8)
6461 avr_asm_len ("ldi %2,%1", op, plen, 1);
6462 clobber_val = (int) val8;
6464 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
6467 break; /* MINUS */
6469 default:
6470 /* Unknown code */
6471 gcc_unreachable();
6474 started = true;
6476 } /* for all sub-bytes */
6478 saturate:
6480 if (UNKNOWN == code_sat)
6481 return;
6483 *pcc = (int) CC_CLOBBER;
6485 /* Vanilla addition/subtraction is done. We are left with saturation.
6487 We have to compute A = A <op> B where A is a register and
6488 B is a register or a non-zero compile time constant CONST.
6489 A is register class "r" if unsigned && B is REG. Otherwise, A is in "d".
6490 B stands for the original operand $2 in INSN. In the case of B = CONST,
6491 SIGN in { -1, 1 } is the sign of B. Otherwise, SIGN is 0.
6493 CODE is the instruction flavor we use in the asm sequence to perform <op>.
6496 unsigned
6497 operation | code | sat if | b is | sat value | case
6498 -----------------+-------+----------+--------------+-----------+-------
6499 + as a + b | add | C == 1 | const, reg | u+ = 0xff | [1u]
6500 + as a - (-b) | sub | C == 0 | const | u+ = 0xff | [2u]
6501 - as a - b | sub | C == 1 | const, reg | u- = 0 | [3u]
6502 - as a + (-b) | add | C == 0 | const | u- = 0 | [4u]
6505 signed
6506 operation | code | sat if | b is | sat value | case
6507 -----------------+-------+----------+--------------+-----------+-------
6508 + as a + b | add | V == 1 | const, reg | s+ | [1s]
6509 + as a - (-b) | sub | V == 1 | const | s+ | [2s]
6510 - as a - b | sub | V == 1 | const, reg | s- | [3s]
6511 - as a + (-b) | add | V == 1 | const | s- | [4s]
6513 s+ = b < 0 ? -0x80 : 0x7f
6514 s- = b < 0 ? 0x7f : -0x80
6516 The cases a - b actually perform a - (-(-b)) if B is CONST.
6519 op[0] = simplify_gen_subreg (QImode, xop[0], mode, n_bytes-1);
6520 op[1] = n_bytes > 1
6521 ? simplify_gen_subreg (QImode, xop[0], mode, n_bytes-2)
6522 : NULL_RTX;
6524 bool need_copy = true;
6525 int len_call = 1 + AVR_HAVE_JMP_CALL;
6527 switch (code_sat)
6529 default:
6530 gcc_unreachable();
6532 case SS_PLUS:
6533 case SS_MINUS:
6535 if (out_brvc)
6536 avr_asm_len ("brvc 0f", op, plen, 1);
6538 if (reg_overlap_mentioned_p (xop[0], xop[2]))
6540 /* [1s,reg] */
6542 if (n_bytes == 1)
6543 avr_asm_len ("ldi %0,0x7f" CR_TAB
6544 "adc %0,__zero_reg__", op, plen, 2);
6545 else
6546 avr_asm_len ("ldi %0,0x7f" CR_TAB
6547 "ldi %1,0xff" CR_TAB
6548 "adc %1,__zero_reg__" CR_TAB
6549 "adc %0,__zero_reg__", op, plen, 4);
6551 else if (sign == 0 && PLUS == code)
6553 /* [1s,reg] */
6555 op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
6557 if (n_bytes == 1)
6558 avr_asm_len ("ldi %0,0x80" CR_TAB
6559 "sbrs %2,7" CR_TAB
6560 "dec %0", op, plen, 3);
6561 else
6562 avr_asm_len ("ldi %0,0x80" CR_TAB
6563 "cp %2,%0" CR_TAB
6564 "sbc %1,%1" CR_TAB
6565 "sbci %0,0", op, plen, 4);
6567 else if (sign == 0 && MINUS == code)
6569 /* [3s,reg] */
6571 op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
6573 if (n_bytes == 1)
6574 avr_asm_len ("ldi %0,0x7f" CR_TAB
6575 "sbrs %2,7" CR_TAB
6576 "inc %0", op, plen, 3);
6577 else
6578 avr_asm_len ("ldi %0,0x7f" CR_TAB
6579 "cp %0,%2" CR_TAB
6580 "sbc %1,%1" CR_TAB
6581 "sbci %0,-1", op, plen, 4);
6583 else if ((sign < 0) ^ (SS_MINUS == code_sat))
6585 /* [1s,const,B < 0] [2s,B < 0] */
6586 /* [3s,const,B > 0] [4s,B > 0] */
6588 if (n_bytes == 8)
6590 avr_asm_len ("%~call __clr_8", op, plen, len_call);
6591 need_copy = false;
6594 avr_asm_len ("ldi %0,0x80", op, plen, 1);
6595 if (n_bytes > 1 && need_copy)
6596 avr_asm_len ("clr %1", op, plen, 1);
6598 else if ((sign > 0) ^ (SS_MINUS == code_sat))
6600 /* [1s,const,B > 0] [2s,B > 0] */
6601 /* [3s,const,B < 0] [4s,B < 0] */
6603 if (n_bytes == 8)
6605 avr_asm_len ("sec" CR_TAB
6606 "%~call __sbc_8", op, plen, 1 + len_call);
6607 need_copy = false;
6610 avr_asm_len ("ldi %0,0x7f", op, plen, 1);
6611 if (n_bytes > 1 && need_copy)
6612 avr_asm_len ("ldi %1,0xff", op, plen, 1);
6614 else
6615 gcc_unreachable();
6617 break;
6619 case US_PLUS:
6620 /* [1u] : [2u] */
6622 avr_asm_len (PLUS == code ? "brcc 0f" : "brcs 0f", op, plen, 1);
6624 if (n_bytes == 8)
6626 if (MINUS == code)
6627 avr_asm_len ("sec", op, plen, 1);
6628 avr_asm_len ("%~call __sbc_8", op, plen, len_call);
6630 need_copy = false;
6632 else
6634 if (MINUS == code && !test_hard_reg_class (LD_REGS, op[0]))
6635 avr_asm_len ("sec" CR_TAB "sbc %0,%0", op, plen, 2);
6636 else
6637 avr_asm_len (PLUS == code ? "sbc %0,%0" : "ldi %0,0xff",
6638 op, plen, 1);
6640 break; /* US_PLUS */
6642 case US_MINUS:
6643 /* [4u] : [3u] */
6645 avr_asm_len (PLUS == code ? "brcs 0f" : "brcc 0f", op, plen, 1);
6647 if (n_bytes == 8)
6649 avr_asm_len ("%~call __clr_8", op, plen, len_call);
6650 need_copy = false;
6652 else
6653 avr_asm_len ("clr %0", op, plen, 1);
6655 break;
6658 /* We set the MSB in the unsigned case and the 2 MSBs in the signed case.
6659 Now copy the right value to the LSBs. */
6661 if (need_copy && n_bytes > 1)
6663 if (US_MINUS == code_sat || US_PLUS == code_sat)
6665 avr_asm_len ("mov %1,%0", op, plen, 1);
6667 if (n_bytes > 2)
6669 op[0] = xop[0];
6670 if (AVR_HAVE_MOVW)
6671 avr_asm_len ("movw %0,%1", op, plen, 1);
6672 else
6673 avr_asm_len ("mov %A0,%1" CR_TAB
6674 "mov %B0,%1", op, plen, 2);
6677 else if (n_bytes > 2)
6679 op[0] = xop[0];
6680 avr_asm_len ("mov %A0,%1" CR_TAB
6681 "mov %B0,%1", op, plen, 2);
6685 if (need_copy && n_bytes == 8)
6687 if (AVR_HAVE_MOVW)
6688 avr_asm_len ("movw %r0+2,%0" CR_TAB
6689 "movw %r0+4,%0", xop, plen, 2);
6690 else
6691 avr_asm_len ("mov %r0+2,%0" CR_TAB
6692 "mov %r0+3,%0" CR_TAB
6693 "mov %r0+4,%0" CR_TAB
6694 "mov %r0+5,%0", xop, plen, 4);
6697 if (out_label)
6698 avr_asm_len ("0:", op, plen, 0);
6702 /* Output addition/subtraction of register XOP[0] and a constant XOP[2] that
6703 is ont a compile-time constant:
6705 XOP[0] = XOP[0] +/- XOP[2]
6707 This is a helper for the function below. The only insns that need this
6708 are additions/subtraction for pointer modes, i.e. HImode and PSImode. */
6710 static const char*
6711 avr_out_plus_symbol (rtx *xop, enum rtx_code code, int *plen, int *pcc)
6713 enum machine_mode mode = GET_MODE (xop[0]);
6715 /* Only pointer modes want to add symbols. */
6717 gcc_assert (mode == HImode || mode == PSImode);
6719 *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_SET_N;
6721 avr_asm_len (PLUS == code
6722 ? "subi %A0,lo8(-(%2))" CR_TAB "sbci %B0,hi8(-(%2))"
6723 : "subi %A0,lo8(%2)" CR_TAB "sbci %B0,hi8(%2)",
6724 xop, plen, -2);
6726 if (PSImode == mode)
6727 avr_asm_len (PLUS == code
6728 ? "sbci %C0,hlo8(-(%2))"
6729 : "sbci %C0,hlo8(%2)", xop, plen, 1);
6730 return "";
6734 /* Prepare operands of addition/subtraction to be used with avr_out_plus_1.
6736 INSN is a single_set insn or an insn pattern with a binary operation as
6737 SET_SRC that is one of: PLUS, SS_PLUS, US_PLUS, MINUS, SS_MINUS, US_MINUS.
6739 XOP are the operands of INSN. In the case of 64-bit operations with
6740 constant XOP[] has just one element: The summand/subtrahend in XOP[0].
6741 The non-saturating insns up to 32 bits may or may not supply a "d" class
6742 scratch as XOP[3].
6744 If PLEN == NULL output the instructions.
6745 If PLEN != NULL set *PLEN to the length of the sequence in words.
6747 PCC is a pointer to store the instructions' effect on cc0.
6748 PCC may be NULL.
6750 PLEN and PCC default to NULL.
6752 OUT_LABEL defaults to TRUE. For a description, see AVR_OUT_PLUS_1.
6754 Return "" */
6756 const char*
6757 avr_out_plus (rtx insn, rtx *xop, int *plen, int *pcc, bool out_label)
6759 int cc_plus, cc_minus, cc_dummy;
6760 int len_plus, len_minus;
6761 rtx op[4];
6762 rtx xpattern = INSN_P (insn) ? single_set (insn) : insn;
6763 rtx xdest = SET_DEST (xpattern);
6764 enum machine_mode mode = GET_MODE (xdest);
6765 enum machine_mode imode = int_mode_for_mode (mode);
6766 int n_bytes = GET_MODE_SIZE (mode);
6767 enum rtx_code code_sat = GET_CODE (SET_SRC (xpattern));
6768 enum rtx_code code
6769 = (PLUS == code_sat || SS_PLUS == code_sat || US_PLUS == code_sat
6770 ? PLUS : MINUS);
6772 if (!pcc)
6773 pcc = &cc_dummy;
6775 /* PLUS and MINUS don't saturate: Use modular wrap-around. */
6777 if (PLUS == code_sat || MINUS == code_sat)
6778 code_sat = UNKNOWN;
6780 if (n_bytes <= 4 && REG_P (xop[2]))
6782 avr_out_plus_1 (xop, plen, code, pcc, code_sat, 0, out_label);
6783 return "";
6786 if (8 == n_bytes)
6788 op[0] = gen_rtx_REG (DImode, ACC_A);
6789 op[1] = gen_rtx_REG (DImode, ACC_A);
6790 op[2] = avr_to_int_mode (xop[0]);
6792 else
6794 if (!REG_P (xop[2])
6795 && !CONST_INT_P (xop[2])
6796 && !CONST_FIXED_P (xop[2]))
6798 return avr_out_plus_symbol (xop, code, plen, pcc);
6801 op[0] = avr_to_int_mode (xop[0]);
6802 op[1] = avr_to_int_mode (xop[1]);
6803 op[2] = avr_to_int_mode (xop[2]);
6806 /* Saturations and 64-bit operations don't have a clobber operand.
6807 For the other cases, the caller will provide a proper XOP[3]. */
6809 xpattern = INSN_P (insn) ? PATTERN (insn) : insn;
6810 op[3] = PARALLEL == GET_CODE (xpattern) ? xop[3] : NULL_RTX;
6812 /* Saturation will need the sign of the original operand. */
6814 rtx xmsb = simplify_gen_subreg (QImode, op[2], imode, n_bytes-1);
6815 int sign = INTVAL (xmsb) < 0 ? -1 : 1;
6817 /* If we subtract and the subtrahend is a constant, then negate it
6818 so that avr_out_plus_1 can be used. */
6820 if (MINUS == code)
6821 op[2] = simplify_unary_operation (NEG, imode, op[2], imode);
6823 /* Work out the shortest sequence. */
6825 avr_out_plus_1 (op, &len_minus, MINUS, &cc_minus, code_sat, sign, out_label);
6826 avr_out_plus_1 (op, &len_plus, PLUS, &cc_plus, code_sat, sign, out_label);
6828 if (plen)
6830 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
6831 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
6833 else if (len_minus <= len_plus)
6834 avr_out_plus_1 (op, NULL, MINUS, pcc, code_sat, sign, out_label);
6835 else
6836 avr_out_plus_1 (op, NULL, PLUS, pcc, code_sat, sign, out_label);
6838 return "";
6842 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6843 time constant XOP[2]:
6845 XOP[0] = XOP[0] <op> XOP[2]
6847 and return "". If PLEN == NULL, print assembler instructions to perform the
6848 operation; otherwise, set *PLEN to the length of the instruction sequence
6849 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6850 register or SCRATCH if no clobber register is needed for the operation.
6851 INSN is an INSN_P or a pattern of an insn. */
6853 const char*
6854 avr_out_bitop (rtx insn, rtx *xop, int *plen)
6856 /* CODE and MODE of the operation. */
6857 rtx xpattern = INSN_P (insn) ? single_set (insn) : insn;
6858 enum rtx_code code = GET_CODE (SET_SRC (xpattern));
6859 enum machine_mode mode = GET_MODE (xop[0]);
6861 /* Number of bytes to operate on. */
6862 int i, n_bytes = GET_MODE_SIZE (mode);
6864 /* Value of T-flag (0 or 1) or -1 if unknow. */
6865 int set_t = -1;
6867 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6868 int clobber_val = -1;
6870 /* op[0]: 8-bit destination register
6871 op[1]: 8-bit const int
6872 op[2]: 8-bit clobber register or SCRATCH
6873 op[3]: 8-bit register containing 0xff or NULL_RTX */
6874 rtx op[4];
6876 op[2] = xop[3];
6877 op[3] = NULL_RTX;
6879 if (plen)
6880 *plen = 0;
6882 for (i = 0; i < n_bytes; i++)
6884 /* We operate byte-wise on the destination. */
6885 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6886 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
6888 /* 8-bit value to operate with this byte. */
6889 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6891 /* Number of bits set in the current byte of the constant. */
6892 int pop8 = avr_popcount (val8);
6894 /* Registers R16..R31 can operate with immediate. */
6895 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6897 op[0] = reg8;
6898 op[1] = GEN_INT (val8);
6900 switch (code)
6902 case IOR:
6904 if (0 == pop8)
6905 continue;
6906 else if (ld_reg_p)
6907 avr_asm_len ("ori %0,%1", op, plen, 1);
6908 else if (1 == pop8)
6910 if (set_t != 1)
6911 avr_asm_len ("set", op, plen, 1);
6912 set_t = 1;
6914 op[1] = GEN_INT (exact_log2 (val8));
6915 avr_asm_len ("bld %0,%1", op, plen, 1);
6917 else if (8 == pop8)
6919 if (op[3] != NULL_RTX)
6920 avr_asm_len ("mov %0,%3", op, plen, 1);
6921 else
6922 avr_asm_len ("clr %0" CR_TAB
6923 "dec %0", op, plen, 2);
6925 op[3] = op[0];
6927 else
6929 if (clobber_val != (int) val8)
6930 avr_asm_len ("ldi %2,%1", op, plen, 1);
6931 clobber_val = (int) val8;
6933 avr_asm_len ("or %0,%2", op, plen, 1);
6936 continue; /* IOR */
6938 case AND:
6940 if (8 == pop8)
6941 continue;
6942 else if (0 == pop8)
6943 avr_asm_len ("clr %0", op, plen, 1);
6944 else if (ld_reg_p)
6945 avr_asm_len ("andi %0,%1", op, plen, 1);
6946 else if (7 == pop8)
6948 if (set_t != 0)
6949 avr_asm_len ("clt", op, plen, 1);
6950 set_t = 0;
6952 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
6953 avr_asm_len ("bld %0,%1", op, plen, 1);
6955 else
6957 if (clobber_val != (int) val8)
6958 avr_asm_len ("ldi %2,%1", op, plen, 1);
6959 clobber_val = (int) val8;
6961 avr_asm_len ("and %0,%2", op, plen, 1);
6964 continue; /* AND */
6966 case XOR:
6968 if (0 == pop8)
6969 continue;
6970 else if (8 == pop8)
6971 avr_asm_len ("com %0", op, plen, 1);
6972 else if (ld_reg_p && val8 == (1 << 7))
6973 avr_asm_len ("subi %0,%1", op, plen, 1);
6974 else
6976 if (clobber_val != (int) val8)
6977 avr_asm_len ("ldi %2,%1", op, plen, 1);
6978 clobber_val = (int) val8;
6980 avr_asm_len ("eor %0,%2", op, plen, 1);
6983 continue; /* XOR */
6985 default:
6986 /* Unknown rtx_code */
6987 gcc_unreachable();
6989 } /* for all sub-bytes */
6991 return "";
6995 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6996 PLEN != NULL: Set *PLEN to the length of that sequence.
6997 Return "". */
6999 const char*
7000 avr_out_addto_sp (rtx *op, int *plen)
7002 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
7003 int addend = INTVAL (op[0]);
7005 if (plen)
7006 *plen = 0;
7008 if (addend < 0)
7010 if (flag_verbose_asm || flag_print_asm_name)
7011 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
7013 while (addend <= -pc_len)
7015 addend += pc_len;
7016 avr_asm_len ("rcall .", op, plen, 1);
7019 while (addend++ < 0)
7020 avr_asm_len ("push __zero_reg__", op, plen, 1);
7022 else if (addend > 0)
7024 if (flag_verbose_asm || flag_print_asm_name)
7025 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
7027 while (addend-- > 0)
7028 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
7031 return "";
7035 /* Outputs instructions needed for fixed point type conversion.
7036 This includes converting between any fixed point type, as well
7037 as converting to any integer type. Conversion between integer
7038 types is not supported.
7040 Converting signed fractional types requires a bit shift if converting
7041 to or from any unsigned fractional type because the decimal place is
7042 shifted by 1 bit. When the destination is a signed fractional, the sign
7043 is stored in either the carry or T bit. */
7045 const char*
7046 avr_out_fract (rtx insn, rtx operands[], bool intsigned, int *plen)
7048 size_t i;
7049 rtx xop[6];
7050 RTX_CODE shift = UNKNOWN;
7051 bool sign_in_carry = false;
7052 bool msb_in_carry = false;
7053 bool lsb_in_tmp_reg = false;
7054 bool lsb_in_carry = false;
7055 bool frac_rounded = false;
7056 const char *code_ashift = "lsl %0";
7059 #define MAY_CLOBBER(RR) \
7060 /* Shorthand used below. */ \
7061 ((sign_bytes \
7062 && IN_RANGE (RR, dest.regno_msb - sign_bytes + 1, dest.regno_msb)) \
7063 || (offset && IN_RANGE (RR, dest.regno, dest.regno_msb)) \
7064 || (reg_unused_after (insn, all_regs_rtx[RR]) \
7065 && !IN_RANGE (RR, dest.regno, dest.regno_msb)))
7067 struct
7069 /* bytes : Length of operand in bytes.
7070 ibyte : Length of integral part in bytes.
7071 fbyte, fbit : Length of fractional part in bytes, bits. */
7073 bool sbit;
7074 unsigned fbit, bytes, ibyte, fbyte;
7075 unsigned regno, regno_msb;
7076 } dest, src, *val[2] = { &dest, &src };
7078 if (plen)
7079 *plen = 0;
7081 /* Step 0: Determine information on source and destination operand we
7082 ====== will need in the remainder. */
7084 for (i = 0; i < sizeof (val) / sizeof (*val); i++)
7086 enum machine_mode mode;
7088 xop[i] = operands[i];
7090 mode = GET_MODE (xop[i]);
7092 val[i]->bytes = GET_MODE_SIZE (mode);
7093 val[i]->regno = REGNO (xop[i]);
7094 val[i]->regno_msb = REGNO (xop[i]) + val[i]->bytes - 1;
7096 if (SCALAR_INT_MODE_P (mode))
7098 val[i]->sbit = intsigned;
7099 val[i]->fbit = 0;
7101 else if (ALL_SCALAR_FIXED_POINT_MODE_P (mode))
7103 val[i]->sbit = SIGNED_SCALAR_FIXED_POINT_MODE_P (mode);
7104 val[i]->fbit = GET_MODE_FBIT (mode);
7106 else
7107 fatal_insn ("unsupported fixed-point conversion", insn);
7109 val[i]->fbyte = (1 + val[i]->fbit) / BITS_PER_UNIT;
7110 val[i]->ibyte = val[i]->bytes - val[i]->fbyte;
7113 // Byte offset of the decimal point taking into account different place
7114 // of the decimal point in input and output and different register numbers
7115 // of input and output.
7116 int offset = dest.regno - src.regno + dest.fbyte - src.fbyte;
7118 // Number of destination bytes that will come from sign / zero extension.
7119 int sign_bytes = (dest.ibyte - src.ibyte) * (dest.ibyte > src.ibyte);
7121 // Number of bytes at the low end to be filled with zeros.
7122 int zero_bytes = (dest.fbyte - src.fbyte) * (dest.fbyte > src.fbyte);
7124 // Do we have a 16-Bit register that is cleared?
7125 rtx clrw = NULL_RTX;
7127 bool sign_extend = src.sbit && sign_bytes;
7129 if (0 == dest.fbit % 8 && 7 == src.fbit % 8)
7130 shift = ASHIFT;
7131 else if (7 == dest.fbit % 8 && 0 == src.fbit % 8)
7132 shift = ASHIFTRT;
7133 else if (dest.fbit % 8 == src.fbit % 8)
7134 shift = UNKNOWN;
7135 else
7136 gcc_unreachable();
7138 /* If we need to round the fraction part, we might need to save/round it
7139 before clobbering any of it in Step 1. Also, we might to want to do
7140 the rounding now to make use of LD_REGS. */
7141 if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
7142 && SCALAR_ACCUM_MODE_P (GET_MODE (xop[1]))
7143 && !TARGET_FRACT_CONV_TRUNC)
7145 bool overlap
7146 = (src.regno <=
7147 (offset ? dest.regno_msb - sign_bytes : dest.regno + zero_bytes - 1)
7148 && dest.regno - offset -1 >= dest.regno);
7149 unsigned s0 = dest.regno - offset -1;
7150 bool use_src = true;
7151 unsigned sn;
7152 unsigned copied_msb = src.regno_msb;
7153 bool have_carry = false;
7155 if (src.ibyte > dest.ibyte)
7156 copied_msb -= src.ibyte - dest.ibyte;
7158 for (sn = s0; sn <= copied_msb; sn++)
7159 if (!IN_RANGE (sn, dest.regno, dest.regno_msb)
7160 && !reg_unused_after (insn, all_regs_rtx[sn]))
7161 use_src = false;
7162 if (use_src && TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], s0))
7164 avr_asm_len ("tst %0" CR_TAB "brpl 0f",
7165 &all_regs_rtx[src.regno_msb], plen, 2);
7166 sn = src.regno;
7167 if (sn < s0)
7169 if (TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], sn))
7170 avr_asm_len ("cpi %0,1", &all_regs_rtx[sn], plen, 1);
7171 else
7172 avr_asm_len ("sec" CR_TAB "cpc %0,__zero_reg__",
7173 &all_regs_rtx[sn], plen, 2);
7174 have_carry = true;
7176 while (++sn < s0)
7177 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
7178 avr_asm_len (have_carry ? "sbci %0,128" : "subi %0,129",
7179 &all_regs_rtx[s0], plen, 1);
7180 for (sn = src.regno + src.fbyte; sn <= copied_msb; sn++)
7181 avr_asm_len ("sbci %0,255", &all_regs_rtx[sn], plen, 1);
7182 avr_asm_len ("\n0:", NULL, plen, 0);
7183 frac_rounded = true;
7185 else if (use_src && overlap)
7187 avr_asm_len ("clr __tmp_reg__" CR_TAB
7188 "sbrc %1,0" CR_TAB "dec __tmp_reg__", xop, plen, 1);
7189 sn = src.regno;
7190 if (sn < s0)
7192 avr_asm_len ("add %0,__tmp_reg__", &all_regs_rtx[sn], plen, 1);
7193 have_carry = true;
7195 while (++sn < s0)
7196 avr_asm_len ("adc %0,__tmp_reg__", &all_regs_rtx[sn], plen, 1);
7197 if (have_carry)
7198 avr_asm_len ("clt" CR_TAB "bld __tmp_reg__,7" CR_TAB
7199 "adc %0,__tmp_reg__",
7200 &all_regs_rtx[s0], plen, 1);
7201 else
7202 avr_asm_len ("lsr __tmp_reg" CR_TAB "add %0,__tmp_reg__",
7203 &all_regs_rtx[s0], plen, 2);
7204 for (sn = src.regno + src.fbyte; sn <= copied_msb; sn++)
7205 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
7206 frac_rounded = true;
7208 else if (overlap)
7210 bool use_src
7211 = (TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], s0)
7212 && (IN_RANGE (s0, dest.regno, dest.regno_msb)
7213 || reg_unused_after (insn, all_regs_rtx[s0])));
7214 xop[2] = all_regs_rtx[s0];
7215 unsigned sn = src.regno;
7216 if (!use_src || sn == s0)
7217 avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
7218 /* We need to consider to-be-discarded bits
7219 if the value is negative. */
7220 if (sn < s0)
7222 avr_asm_len ("tst %0" CR_TAB "brpl 0f",
7223 &all_regs_rtx[src.regno_msb], plen, 2);
7224 /* Test to-be-discarded bytes for any nozero bits.
7225 ??? Could use OR or SBIW to test two registers at once. */
7226 if (sn < s0)
7227 avr_asm_len ("cp %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
7228 while (++sn < s0)
7229 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
7230 /* Set bit 0 in __tmp_reg__ if any of the lower bits was set. */
7231 if (use_src)
7232 avr_asm_len ("breq 0f" CR_TAB
7233 "ori %2,1" "\n0:\t" "mov __tmp_reg__,%2",
7234 xop, plen, 3);
7235 else
7236 avr_asm_len ("breq 0f" CR_TAB
7237 "set" CR_TAB "bld __tmp_reg__,0\n0:",
7238 xop, plen, 3);
7240 lsb_in_tmp_reg = true;
7244 /* Step 1: Clear bytes at the low end and copy payload bits from source
7245 ====== to destination. */
7247 int step = offset < 0 ? 1 : -1;
7248 unsigned d0 = offset < 0 ? dest.regno : dest.regno_msb;
7250 // We cleared at least that number of registers.
7251 int clr_n = 0;
7253 for (; d0 >= dest.regno && d0 <= dest.regno_msb; d0 += step)
7255 // Next regno of destination is needed for MOVW
7256 unsigned d1 = d0 + step;
7258 // Current and next regno of source
7259 signed s0 = d0 - offset;
7260 signed s1 = s0 + step;
7262 // Must current resp. next regno be CLRed? This applies to the low
7263 // bytes of the destination that have no associated source bytes.
7264 bool clr0 = s0 < (signed) src.regno;
7265 bool clr1 = s1 < (signed) src.regno && d1 >= dest.regno;
7267 // First gather what code to emit (if any) and additional step to
7268 // apply if a MOVW is in use. xop[2] is destination rtx and xop[3]
7269 // is the source rtx for the current loop iteration.
7270 const char *code = NULL;
7271 int stepw = 0;
7273 if (clr0)
7275 if (AVR_HAVE_MOVW && clr1 && clrw)
7277 xop[2] = all_regs_rtx[d0 & ~1];
7278 xop[3] = clrw;
7279 code = "movw %2,%3";
7280 stepw = step;
7282 else
7284 xop[2] = all_regs_rtx[d0];
7285 code = "clr %2";
7287 if (++clr_n >= 2
7288 && !clrw
7289 && d0 % 2 == (step > 0))
7291 clrw = all_regs_rtx[d0 & ~1];
7295 else if (offset && s0 <= (signed) src.regno_msb)
7297 int movw = AVR_HAVE_MOVW && offset % 2 == 0
7298 && d0 % 2 == (offset > 0)
7299 && d1 <= dest.regno_msb && d1 >= dest.regno
7300 && s1 <= (signed) src.regno_msb && s1 >= (signed) src.regno;
7302 xop[2] = all_regs_rtx[d0 & ~movw];
7303 xop[3] = all_regs_rtx[s0 & ~movw];
7304 code = movw ? "movw %2,%3" : "mov %2,%3";
7305 stepw = step * movw;
7308 if (code)
7310 if (sign_extend && shift != ASHIFT && !sign_in_carry
7311 && (d0 == src.regno_msb || d0 + stepw == src.regno_msb))
7313 /* We are going to override the sign bit. If we sign-extend,
7314 store the sign in the Carry flag. This is not needed if
7315 the destination will be ASHIFT is the remainder because
7316 the ASHIFT will set Carry without extra instruction. */
7318 avr_asm_len ("lsl %0", &all_regs_rtx[src.regno_msb], plen, 1);
7319 sign_in_carry = true;
7322 unsigned src_msb = dest.regno_msb - sign_bytes - offset + 1;
7324 if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
7325 && src.ibyte > dest.ibyte
7326 && (d0 == src_msb || d0 + stepw == src_msb))
7328 /* We are going to override the MSB. If we shift right,
7329 store the MSB in the Carry flag. This is only needed if
7330 we don't sign-extend becaue with sign-extension the MSB
7331 (the sign) will be produced by the sign extension. */
7333 avr_asm_len ("lsr %0", &all_regs_rtx[src_msb], plen, 1);
7334 msb_in_carry = true;
7337 unsigned src_lsb = dest.regno - offset -1;
7339 if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry
7340 && !lsb_in_tmp_reg
7341 && (d0 == src_lsb || d0 + stepw == src_lsb))
7343 /* We are going to override the new LSB; store it into carry. */
7345 avr_asm_len ("lsl %0", &all_regs_rtx[src_lsb], plen, 1);
7346 code_ashift = "rol %0";
7347 lsb_in_carry = true;
7350 avr_asm_len (code, xop, plen, 1);
7351 d0 += stepw;
7355 /* Step 2: Shift destination left by 1 bit position. This might be needed
7356 ====== for signed input and unsigned output. */
7358 if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry)
7360 unsigned s0 = dest.regno - offset -1;
7362 /* n1169 4.1.4 says:
7363 "Conversions from a fixed-point to an integer type round toward zero."
7364 Hence, converting a fract type to integer only gives a non-zero result
7365 for -1. */
7366 if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
7367 && SCALAR_FRACT_MODE_P (GET_MODE (xop[1]))
7368 && !TARGET_FRACT_CONV_TRUNC)
7370 gcc_assert (s0 == src.regno_msb);
7371 /* Check if the input is -1. We do that by checking if negating
7372 the input causes an integer overflow. */
7373 unsigned sn = src.regno;
7374 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx[sn++], plen, 1);
7375 while (sn <= s0)
7376 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx[sn++], plen, 1);
7378 /* Overflow goes with set carry. Clear carry otherwise. */
7379 avr_asm_len ("brvs 0f" CR_TAB "clc\n0:", NULL, plen, 2);
7381 /* Likewise, when converting from accumulator types to integer, we
7382 need to round up negative values. */
7383 else if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
7384 && SCALAR_ACCUM_MODE_P (GET_MODE (xop[1]))
7385 && !TARGET_FRACT_CONV_TRUNC
7386 && !frac_rounded)
7388 bool have_carry = false;
7390 xop[2] = all_regs_rtx[s0];
7391 if (!lsb_in_tmp_reg && !MAY_CLOBBER (s0))
7392 avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
7393 avr_asm_len ("tst %0" CR_TAB "brpl 0f",
7394 &all_regs_rtx[src.regno_msb], plen, 2);
7395 if (!lsb_in_tmp_reg)
7397 unsigned sn = src.regno;
7398 if (sn < s0)
7400 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx[sn],
7401 plen, 1);
7402 have_carry = true;
7404 while (++sn < s0)
7405 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx[sn], plen, 1);
7406 lsb_in_tmp_reg = !MAY_CLOBBER (s0);
7408 /* Add in C and the rounding value 127. */
7409 /* If the destination msb is a sign byte, and in LD_REGS,
7410 grab it as a temporary. */
7411 if (sign_bytes
7412 && TEST_HARD_REG_BIT (reg_class_contents[LD_REGS],
7413 dest.regno_msb))
7415 xop[3] = all_regs_rtx[dest.regno_msb];
7416 avr_asm_len ("ldi %3,127", xop, plen, 1);
7417 avr_asm_len ((have_carry && lsb_in_tmp_reg ? "adc __tmp_reg__,%3"
7418 : have_carry ? "adc %2,%3"
7419 : lsb_in_tmp_reg ? "add __tmp_reg__,%3"
7420 : "add %2,%3"),
7421 xop, plen, 1);
7423 else
7425 /* Fall back to use __zero_reg__ as a temporary. */
7426 avr_asm_len ("dec __zero_reg__", NULL, plen, 1);
7427 if (have_carry)
7428 avr_asm_len ("clt" CR_TAB "bld __zero_reg__,7", NULL, plen, 2);
7429 else
7430 avr_asm_len ("lsr __zero_reg__", NULL, plen, 1);
7431 avr_asm_len ((have_carry && lsb_in_tmp_reg
7432 ? "adc __tmp_reg__,__zero_reg__"
7433 : have_carry ? "adc %2,__zero_reg__"
7434 : lsb_in_tmp_reg ? "add __tmp_reg__,__zero_reg__"
7435 : "add %2,__zero_reg__"),
7436 xop, plen, 1);
7437 avr_asm_len ("eor __zero_reg__,__zero_reg__", NULL, plen, 1);
7439 for (d0 = dest.regno + zero_bytes;
7440 d0 <= dest.regno_msb - sign_bytes; d0++)
7441 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx[d0], plen, 1);
7442 avr_asm_len (lsb_in_tmp_reg
7443 ? "\n0:\t" "lsl __tmp_reg__" : "\n0:\t" "lsl %2",
7444 xop, plen, 1);
7446 else if (MAY_CLOBBER (s0))
7447 avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
7448 else
7449 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7450 "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
7452 code_ashift = "rol %0";
7453 lsb_in_carry = true;
7456 if (shift == ASHIFT)
7458 for (d0 = dest.regno + zero_bytes;
7459 d0 <= dest.regno_msb - sign_bytes; d0++)
7461 avr_asm_len (code_ashift, &all_regs_rtx[d0], plen, 1);
7462 code_ashift = "rol %0";
7465 lsb_in_carry = false;
7466 sign_in_carry = true;
7469 /* Step 4a: Store MSB in carry if we don't already have it or will produce
7470 ======= it in sign-extension below. */
7472 if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
7473 && src.ibyte > dest.ibyte)
7475 unsigned s0 = dest.regno_msb - sign_bytes - offset + 1;
7477 if (MAY_CLOBBER (s0))
7478 avr_asm_len ("lsr %0", &all_regs_rtx[s0], plen, 1);
7479 else
7480 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7481 "lsr __tmp_reg__", &all_regs_rtx[s0], plen, 2);
7483 msb_in_carry = true;
7486 /* Step 3: Sign-extend or zero-extend the destination as needed.
7487 ====== */
7489 if (sign_extend && !sign_in_carry)
7491 unsigned s0 = src.regno_msb;
7493 if (MAY_CLOBBER (s0))
7494 avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
7495 else
7496 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7497 "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
7499 sign_in_carry = true;
7502 gcc_assert (sign_in_carry + msb_in_carry + lsb_in_carry <= 1);
7504 unsigned copies = 0;
7505 rtx movw = sign_extend ? NULL_RTX : clrw;
7507 for (d0 = dest.regno_msb - sign_bytes + 1; d0 <= dest.regno_msb; d0++)
7509 if (AVR_HAVE_MOVW && movw
7510 && d0 % 2 == 0 && d0 + 1 <= dest.regno_msb)
7512 xop[2] = all_regs_rtx[d0];
7513 xop[3] = movw;
7514 avr_asm_len ("movw %2,%3", xop, plen, 1);
7515 d0++;
7517 else
7519 avr_asm_len (sign_extend ? "sbc %0,%0" : "clr %0",
7520 &all_regs_rtx[d0], plen, 1);
7522 if (++copies >= 2 && !movw && d0 % 2 == 1)
7523 movw = all_regs_rtx[d0-1];
7525 } /* for */
7528 /* Step 4: Right shift the destination. This might be needed for
7529 ====== conversions from unsigned to signed. */
7531 if (shift == ASHIFTRT)
7533 const char *code_ashiftrt = "lsr %0";
7535 if (sign_extend || msb_in_carry)
7536 code_ashiftrt = "ror %0";
7538 if (src.sbit && src.ibyte == dest.ibyte)
7539 code_ashiftrt = "asr %0";
7541 for (d0 = dest.regno_msb - sign_bytes;
7542 d0 >= dest.regno + zero_bytes - 1 && d0 >= dest.regno; d0--)
7544 avr_asm_len (code_ashiftrt, &all_regs_rtx[d0], plen, 1);
7545 code_ashiftrt = "ror %0";
7549 #undef MAY_CLOBBER
7551 return "";
7555 /* Output fixed-point rounding. XOP[0] = XOP[1] is the operand to round.
7556 XOP[2] is the rounding point, a CONST_INT. The function prints the
7557 instruction sequence if PLEN = NULL and computes the length in words
7558 of the sequence if PLEN != NULL. Most of this function deals with
7559 preparing operands for calls to `avr_out_plus' and `avr_out_bitop'. */
7561 const char*
7562 avr_out_round (rtx insn ATTRIBUTE_UNUSED, rtx *xop, int *plen)
7564 enum machine_mode mode = GET_MODE (xop[0]);
7565 enum machine_mode imode = int_mode_for_mode (mode);
7566 // The smallest fractional bit not cleared by the rounding is 2^(-RP).
7567 int fbit = (int) GET_MODE_FBIT (mode);
7568 double_int i_add = double_int_zero.set_bit (fbit-1 - INTVAL (xop[2]));
7569 wide_int wi_add = wi::set_bit_in_zero (fbit-1 - INTVAL (xop[2]),
7570 GET_MODE_PRECISION (imode));
7571 // Lengths of PLUS and AND parts.
7572 int len_add = 0, *plen_add = plen ? &len_add : NULL;
7573 int len_and = 0, *plen_and = plen ? &len_and : NULL;
7575 // Add-Saturate 1/2 * 2^(-RP). Don't print the label "0:" when printing
7576 // the saturated addition so that we can emit the "rjmp 1f" before the
7577 // "0:" below.
7579 rtx xadd = const_fixed_from_double_int (i_add, mode);
7580 rtx xpattern, xsrc, op[4];
7582 xsrc = SIGNED_FIXED_POINT_MODE_P (mode)
7583 ? gen_rtx_SS_PLUS (mode, xop[1], xadd)
7584 : gen_rtx_US_PLUS (mode, xop[1], xadd);
7585 xpattern = gen_rtx_SET (VOIDmode, xop[0], xsrc);
7587 op[0] = xop[0];
7588 op[1] = xop[1];
7589 op[2] = xadd;
7590 avr_out_plus (xpattern, op, plen_add, NULL, false /* Don't print "0:" */);
7592 avr_asm_len ("rjmp 1f" CR_TAB
7593 "0:", NULL, plen_add, 1);
7595 // Keep all bits from RP and higher: ... 2^(-RP)
7596 // Clear all bits from RP+1 and lower: 2^(-RP-1) ...
7597 // Rounding point ^^^^^^^
7598 // Added above ^^^^^^^^^
7599 rtx xreg = simplify_gen_subreg (imode, xop[0], mode, 0);
7600 rtx xmask = immed_wide_int_const (-wi_add - wi_add, imode);
7602 xpattern = gen_rtx_SET (VOIDmode, xreg, gen_rtx_AND (imode, xreg, xmask));
7604 op[0] = xreg;
7605 op[1] = xreg;
7606 op[2] = xmask;
7607 op[3] = gen_rtx_SCRATCH (QImode);
7608 avr_out_bitop (xpattern, op, plen_and);
7609 avr_asm_len ("1:", NULL, plen, 0);
7611 if (plen)
7612 *plen = len_add + len_and;
7614 return "";
7618 /* Create RTL split patterns for byte sized rotate expressions. This
7619 produces a series of move instructions and considers overlap situations.
7620 Overlapping non-HImode operands need a scratch register. */
7622 bool
7623 avr_rotate_bytes (rtx operands[])
7625 int i, j;
7626 enum machine_mode mode = GET_MODE (operands[0]);
7627 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
7628 bool same_reg = rtx_equal_p (operands[0], operands[1]);
7629 int num = INTVAL (operands[2]);
7630 rtx scratch = operands[3];
7631 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
7632 Word move if no scratch is needed, otherwise use size of scratch. */
7633 enum machine_mode move_mode = QImode;
7634 int move_size, offset, size;
7636 if (num & 0xf)
7637 move_mode = QImode;
7638 else if ((mode == SImode && !same_reg) || !overlapped)
7639 move_mode = HImode;
7640 else
7641 move_mode = GET_MODE (scratch);
7643 /* Force DI rotate to use QI moves since other DI moves are currently split
7644 into QI moves so forward propagation works better. */
7645 if (mode == DImode)
7646 move_mode = QImode;
7647 /* Make scratch smaller if needed. */
7648 if (SCRATCH != GET_CODE (scratch)
7649 && HImode == GET_MODE (scratch)
7650 && QImode == move_mode)
7651 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
7653 move_size = GET_MODE_SIZE (move_mode);
7654 /* Number of bytes/words to rotate. */
7655 offset = (num >> 3) / move_size;
7656 /* Number of moves needed. */
7657 size = GET_MODE_SIZE (mode) / move_size;
7658 /* Himode byte swap is special case to avoid a scratch register. */
7659 if (mode == HImode && same_reg)
7661 /* HImode byte swap, using xor. This is as quick as using scratch. */
7662 rtx src, dst;
7663 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
7664 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
7665 if (!rtx_equal_p (dst, src))
7667 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
7668 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
7669 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
7672 else
7674 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
7675 /* Create linked list of moves to determine move order. */
7676 struct {
7677 rtx src, dst;
7678 int links;
7679 } move[MAX_SIZE + 8];
7680 int blocked, moves;
7682 gcc_assert (size <= MAX_SIZE);
7683 /* Generate list of subreg moves. */
7684 for (i = 0; i < size; i++)
7686 int from = i;
7687 int to = (from + offset) % size;
7688 move[i].src = simplify_gen_subreg (move_mode, operands[1],
7689 mode, from * move_size);
7690 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
7691 mode, to * move_size);
7692 move[i].links = -1;
7694 /* Mark dependence where a dst of one move is the src of another move.
7695 The first move is a conflict as it must wait until second is
7696 performed. We ignore moves to self - we catch this later. */
7697 if (overlapped)
7698 for (i = 0; i < size; i++)
7699 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
7700 for (j = 0; j < size; j++)
7701 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
7703 /* The dst of move i is the src of move j. */
7704 move[i].links = j;
7705 break;
7708 blocked = -1;
7709 moves = 0;
7710 /* Go through move list and perform non-conflicting moves. As each
7711 non-overlapping move is made, it may remove other conflicts
7712 so the process is repeated until no conflicts remain. */
7715 blocked = -1;
7716 moves = 0;
7717 /* Emit move where dst is not also a src or we have used that
7718 src already. */
7719 for (i = 0; i < size; i++)
7720 if (move[i].src != NULL_RTX)
7722 if (move[i].links == -1
7723 || move[move[i].links].src == NULL_RTX)
7725 moves++;
7726 /* Ignore NOP moves to self. */
7727 if (!rtx_equal_p (move[i].dst, move[i].src))
7728 emit_move_insn (move[i].dst, move[i].src);
7730 /* Remove conflict from list. */
7731 move[i].src = NULL_RTX;
7733 else
7734 blocked = i;
7737 /* Check for deadlock. This is when no moves occurred and we have
7738 at least one blocked move. */
7739 if (moves == 0 && blocked != -1)
7741 /* Need to use scratch register to break deadlock.
7742 Add move to put dst of blocked move into scratch.
7743 When this move occurs, it will break chain deadlock.
7744 The scratch register is substituted for real move. */
7746 gcc_assert (SCRATCH != GET_CODE (scratch));
7748 move[size].src = move[blocked].dst;
7749 move[size].dst = scratch;
7750 /* Scratch move is never blocked. */
7751 move[size].links = -1;
7752 /* Make sure we have valid link. */
7753 gcc_assert (move[blocked].links != -1);
7754 /* Replace src of blocking move with scratch reg. */
7755 move[move[blocked].links].src = scratch;
7756 /* Make dependent on scratch move occurring. */
7757 move[blocked].links = size;
7758 size=size+1;
7761 while (blocked != -1);
7763 return true;
7767 /* Worker function for `ADJUST_INSN_LENGTH'. */
7768 /* Modifies the length assigned to instruction INSN
7769 LEN is the initially computed length of the insn. */
7772 avr_adjust_insn_length (rtx insn, int len)
7774 rtx *op = recog_data.operand;
7775 enum attr_adjust_len adjust_len;
7777 /* Some complex insns don't need length adjustment and therefore
7778 the length need not/must not be adjusted for these insns.
7779 It is easier to state this in an insn attribute "adjust_len" than
7780 to clutter up code here... */
7782 if (JUMP_TABLE_DATA_P (insn) || recog_memoized (insn) == -1)
7784 return len;
7787 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
7789 adjust_len = get_attr_adjust_len (insn);
7791 if (adjust_len == ADJUST_LEN_NO)
7793 /* Nothing to adjust: The length from attribute "length" is fine.
7794 This is the default. */
7796 return len;
7799 /* Extract insn's operands. */
7801 extract_constrain_insn_cached (insn);
7803 /* Dispatch to right function. */
7805 switch (adjust_len)
7807 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
7808 case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
7809 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
7811 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
7813 case ADJUST_LEN_PLUS: avr_out_plus (insn, op, &len); break;
7814 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
7816 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
7817 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
7818 case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
7819 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
7820 case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
7821 case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
7822 case ADJUST_LEN_LPM: avr_out_lpm (insn, op, &len); break;
7824 case ADJUST_LEN_SFRACT: avr_out_fract (insn, op, true, &len); break;
7825 case ADJUST_LEN_UFRACT: avr_out_fract (insn, op, false, &len); break;
7826 case ADJUST_LEN_ROUND: avr_out_round (insn, op, &len); break;
7828 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
7829 case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
7830 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
7831 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
7832 case ADJUST_LEN_COMPARE64: avr_out_compare64 (insn, op, &len); break;
7834 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
7835 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
7836 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
7838 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
7839 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
7840 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
7842 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
7843 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
7844 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
7846 case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
7847 case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
7848 case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
7850 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
7852 case ADJUST_LEN_INSERT_BITS: avr_out_insert_bits (op, &len); break;
7854 default:
7855 gcc_unreachable();
7858 return len;
7861 /* Return nonzero if register REG dead after INSN. */
7864 reg_unused_after (rtx insn, rtx reg)
7866 return (dead_or_set_p (insn, reg)
7867 || (REG_P(reg) && _reg_unused_after (insn, reg)));
7870 /* Return nonzero if REG is not used after INSN.
7871 We assume REG is a reload reg, and therefore does
7872 not live past labels. It may live past calls or jumps though. */
7875 _reg_unused_after (rtx insn, rtx reg)
7877 enum rtx_code code;
7878 rtx set;
7880 /* If the reg is set by this instruction, then it is safe for our
7881 case. Disregard the case where this is a store to memory, since
7882 we are checking a register used in the store address. */
7883 set = single_set (insn);
7884 if (set && GET_CODE (SET_DEST (set)) != MEM
7885 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
7886 return 1;
7888 while ((insn = NEXT_INSN (insn)))
7890 rtx set;
7891 code = GET_CODE (insn);
7893 #if 0
7894 /* If this is a label that existed before reload, then the register
7895 if dead here. However, if this is a label added by reorg, then
7896 the register may still be live here. We can't tell the difference,
7897 so we just ignore labels completely. */
7898 if (code == CODE_LABEL)
7899 return 1;
7900 /* else */
7901 #endif
7903 if (!INSN_P (insn))
7904 continue;
7906 if (code == JUMP_INSN)
7907 return 0;
7909 /* If this is a sequence, we must handle them all at once.
7910 We could have for instance a call that sets the target register,
7911 and an insn in a delay slot that uses the register. In this case,
7912 we must return 0. */
7913 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
7915 int i;
7916 int retval = 0;
7918 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
7920 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
7921 rtx set = single_set (this_insn);
7923 if (CALL_P (this_insn))
7924 code = CALL_INSN;
7925 else if (JUMP_P (this_insn))
7927 if (INSN_ANNULLED_BRANCH_P (this_insn))
7928 return 0;
7929 code = JUMP_INSN;
7932 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
7933 return 0;
7934 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
7936 if (GET_CODE (SET_DEST (set)) != MEM)
7937 retval = 1;
7938 else
7939 return 0;
7941 if (set == 0
7942 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
7943 return 0;
7945 if (retval == 1)
7946 return 1;
7947 else if (code == JUMP_INSN)
7948 return 0;
7951 if (code == CALL_INSN)
7953 rtx tem;
7954 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
7955 if (GET_CODE (XEXP (tem, 0)) == USE
7956 && REG_P (XEXP (XEXP (tem, 0), 0))
7957 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
7958 return 0;
7959 if (call_used_regs[REGNO (reg)])
7960 return 1;
7963 set = single_set (insn);
7965 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
7966 return 0;
7967 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
7968 return GET_CODE (SET_DEST (set)) != MEM;
7969 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
7970 return 0;
7972 return 1;
7976 /* Implement `TARGET_ASM_INTEGER'. */
7977 /* Target hook for assembling integer objects. The AVR version needs
7978 special handling for references to certain labels. */
7980 static bool
7981 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
7983 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
7984 && text_segment_operand (x, VOIDmode))
7986 fputs ("\t.word\tgs(", asm_out_file);
7987 output_addr_const (asm_out_file, x);
7988 fputs (")\n", asm_out_file);
7990 return true;
7992 else if (GET_MODE (x) == PSImode)
7994 /* This needs binutils 2.23+, see PR binutils/13503 */
7996 fputs ("\t.byte\tlo8(", asm_out_file);
7997 output_addr_const (asm_out_file, x);
7998 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
8000 fputs ("\t.byte\thi8(", asm_out_file);
8001 output_addr_const (asm_out_file, x);
8002 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
8004 fputs ("\t.byte\thh8(", asm_out_file);
8005 output_addr_const (asm_out_file, x);
8006 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
8008 return true;
8010 else if (CONST_FIXED_P (x))
8012 unsigned n;
8014 /* varasm fails to handle big fixed modes that don't fit in hwi. */
8016 for (n = 0; n < size; n++)
8018 rtx xn = simplify_gen_subreg (QImode, x, GET_MODE (x), n);
8019 default_assemble_integer (xn, 1, aligned_p);
8022 return true;
8025 return default_assemble_integer (x, size, aligned_p);
8029 /* Implement `TARGET_CLASS_LIKELY_SPILLED_P'. */
8030 /* Return value is nonzero if pseudos that have been
8031 assigned to registers of class CLASS would likely be spilled
8032 because registers of CLASS are needed for spill registers. */
8034 static bool
8035 avr_class_likely_spilled_p (reg_class_t c)
8037 return (c != ALL_REGS && c != ADDW_REGS);
8041 /* Valid attributes:
8042 progmem - Put data to program memory.
8043 signal - Make a function to be hardware interrupt.
8044 After function prologue interrupts remain disabled.
8045 interrupt - Make a function to be hardware interrupt. Before function
8046 prologue interrupts are enabled by means of SEI.
8047 naked - Don't generate function prologue/epilogue and RET
8048 instruction. */
8050 /* Handle a "progmem" attribute; arguments as in
8051 struct attribute_spec.handler. */
8053 static tree
8054 avr_handle_progmem_attribute (tree *node, tree name,
8055 tree args ATTRIBUTE_UNUSED,
8056 int flags ATTRIBUTE_UNUSED,
8057 bool *no_add_attrs)
8059 if (DECL_P (*node))
8061 if (TREE_CODE (*node) == TYPE_DECL)
8063 /* This is really a decl attribute, not a type attribute,
8064 but try to handle it for GCC 3.0 backwards compatibility. */
8066 tree type = TREE_TYPE (*node);
8067 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
8068 tree newtype = build_type_attribute_variant (type, attr);
8070 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
8071 TREE_TYPE (*node) = newtype;
8072 *no_add_attrs = true;
8074 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
8076 *no_add_attrs = false;
8078 else
8080 warning (OPT_Wattributes, "%qE attribute ignored",
8081 name);
8082 *no_add_attrs = true;
8086 return NULL_TREE;
8089 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
8090 struct attribute_spec.handler. */
8092 static tree
8093 avr_handle_fndecl_attribute (tree *node, tree name,
8094 tree args ATTRIBUTE_UNUSED,
8095 int flags ATTRIBUTE_UNUSED,
8096 bool *no_add_attrs)
8098 if (TREE_CODE (*node) != FUNCTION_DECL)
8100 warning (OPT_Wattributes, "%qE attribute only applies to functions",
8101 name);
8102 *no_add_attrs = true;
8105 return NULL_TREE;
8108 static tree
8109 avr_handle_fntype_attribute (tree *node, tree name,
8110 tree args ATTRIBUTE_UNUSED,
8111 int flags ATTRIBUTE_UNUSED,
8112 bool *no_add_attrs)
8114 if (TREE_CODE (*node) != FUNCTION_TYPE)
8116 warning (OPT_Wattributes, "%qE attribute only applies to functions",
8117 name);
8118 *no_add_attrs = true;
8121 return NULL_TREE;
8125 /* AVR attributes. */
8126 static const struct attribute_spec
8127 avr_attribute_table[] =
8129 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
8130 affects_type_identity } */
8131 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
8132 false },
8133 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
8134 false },
8135 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
8136 false },
8137 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
8138 false },
8139 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
8140 false },
8141 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
8142 false },
8143 { NULL, 0, 0, false, false, false, NULL, false }
8147 /* Look if DECL shall be placed in program memory space by
8148 means of attribute `progmem' or some address-space qualifier.
8149 Return non-zero if DECL is data that must end up in Flash and
8150 zero if the data lives in RAM (.bss, .data, .rodata, ...).
8152 Return 2 if DECL is located in 24-bit flash address-space
8153 Return 1 if DECL is located in 16-bit flash address-space
8154 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
8155 Return 0 otherwise */
8158 avr_progmem_p (tree decl, tree attributes)
8160 tree a;
8162 if (TREE_CODE (decl) != VAR_DECL)
8163 return 0;
8165 if (avr_decl_memx_p (decl))
8166 return 2;
8168 if (avr_decl_flash_p (decl))
8169 return 1;
8171 if (NULL_TREE
8172 != lookup_attribute ("progmem", attributes))
8173 return -1;
8175 a = decl;
8178 a = TREE_TYPE(a);
8179 while (TREE_CODE (a) == ARRAY_TYPE);
8181 if (a == error_mark_node)
8182 return 0;
8184 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
8185 return -1;
8187 return 0;
8191 /* Scan type TYP for pointer references to address space ASn.
8192 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
8193 the AS are also declared to be CONST.
8194 Otherwise, return the respective address space, i.e. a value != 0. */
8196 static addr_space_t
8197 avr_nonconst_pointer_addrspace (tree typ)
8199 while (ARRAY_TYPE == TREE_CODE (typ))
8200 typ = TREE_TYPE (typ);
8202 if (POINTER_TYPE_P (typ))
8204 addr_space_t as;
8205 tree target = TREE_TYPE (typ);
8207 /* Pointer to function: Test the function's return type. */
8209 if (FUNCTION_TYPE == TREE_CODE (target))
8210 return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
8212 /* "Ordinary" pointers... */
8214 while (TREE_CODE (target) == ARRAY_TYPE)
8215 target = TREE_TYPE (target);
8217 /* Pointers to non-generic address space must be const.
8218 Refuse address spaces outside the device's flash. */
8220 as = TYPE_ADDR_SPACE (target);
8222 if (!ADDR_SPACE_GENERIC_P (as)
8223 && (!TYPE_READONLY (target)
8224 || avr_addrspace[as].segment >= avr_current_device->n_flash))
8226 return as;
8229 /* Scan pointer's target type. */
8231 return avr_nonconst_pointer_addrspace (target);
8234 return ADDR_SPACE_GENERIC;
8238 /* Sanity check NODE so that all pointers targeting non-generic address spaces
8239 go along with CONST qualifier. Writing to these address spaces should
8240 be detected and complained about as early as possible. */
8242 static bool
8243 avr_pgm_check_var_decl (tree node)
8245 const char *reason = NULL;
8247 addr_space_t as = ADDR_SPACE_GENERIC;
8249 gcc_assert (as == 0);
8251 if (avr_log.progmem)
8252 avr_edump ("%?: %t\n", node);
8254 switch (TREE_CODE (node))
8256 default:
8257 break;
8259 case VAR_DECL:
8260 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
8261 reason = "variable";
8262 break;
8264 case PARM_DECL:
8265 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
8266 reason = "function parameter";
8267 break;
8269 case FIELD_DECL:
8270 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
8271 reason = "structure field";
8272 break;
8274 case FUNCTION_DECL:
8275 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
8277 reason = "return type of function";
8278 break;
8280 case POINTER_TYPE:
8281 if (as = avr_nonconst_pointer_addrspace (node), as)
8282 reason = "pointer";
8283 break;
8286 if (reason)
8288 if (avr_addrspace[as].segment >= avr_current_device->n_flash)
8290 if (TYPE_P (node))
8291 error ("%qT uses address space %qs beyond flash of %qs",
8292 node, avr_addrspace[as].name, avr_current_device->name);
8293 else
8294 error ("%s %q+D uses address space %qs beyond flash of %qs",
8295 reason, node, avr_addrspace[as].name,
8296 avr_current_device->name);
8298 else
8300 if (TYPE_P (node))
8301 error ("pointer targeting address space %qs must be const in %qT",
8302 avr_addrspace[as].name, node);
8303 else
8304 error ("pointer targeting address space %qs must be const"
8305 " in %s %q+D",
8306 avr_addrspace[as].name, reason, node);
8310 return reason == NULL;
8314 /* Add the section attribute if the variable is in progmem. */
8316 static void
8317 avr_insert_attributes (tree node, tree *attributes)
8319 avr_pgm_check_var_decl (node);
8321 if (TREE_CODE (node) == VAR_DECL
8322 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
8323 && avr_progmem_p (node, *attributes))
8325 addr_space_t as;
8326 tree node0 = node;
8328 /* For C++, we have to peel arrays in order to get correct
8329 determination of readonlyness. */
8332 node0 = TREE_TYPE (node0);
8333 while (TREE_CODE (node0) == ARRAY_TYPE);
8335 if (error_mark_node == node0)
8336 return;
8338 as = TYPE_ADDR_SPACE (TREE_TYPE (node));
8340 if (avr_addrspace[as].segment >= avr_current_device->n_flash)
8342 error ("variable %q+D located in address space %qs"
8343 " beyond flash of %qs",
8344 node, avr_addrspace[as].name, avr_current_device->name);
8347 if (!TYPE_READONLY (node0)
8348 && !TREE_READONLY (node))
8350 const char *reason = "__attribute__((progmem))";
8352 if (!ADDR_SPACE_GENERIC_P (as))
8353 reason = avr_addrspace[as].name;
8355 if (avr_log.progmem)
8356 avr_edump ("\n%?: %t\n%t\n", node, node0);
8358 error ("variable %q+D must be const in order to be put into"
8359 " read-only section by means of %qs", node, reason);
8365 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
8366 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
8367 /* Track need of __do_clear_bss. */
8369 void
8370 avr_asm_output_aligned_decl_common (FILE * stream,
8371 const_tree decl ATTRIBUTE_UNUSED,
8372 const char *name,
8373 unsigned HOST_WIDE_INT size,
8374 unsigned int align, bool local_p)
8376 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
8377 There is no need to trigger __do_clear_bss code for them. */
8379 if (!STR_PREFIX_P (name, "__gnu_lto"))
8380 avr_need_clear_bss_p = true;
8382 if (local_p)
8383 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
8384 else
8385 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
8389 /* Unnamed section callback for data_section
8390 to track need of __do_copy_data. */
8392 static void
8393 avr_output_data_section_asm_op (const void *data)
8395 avr_need_copy_data_p = true;
8397 /* Dispatch to default. */
8398 output_section_asm_op (data);
8402 /* Unnamed section callback for bss_section
8403 to track need of __do_clear_bss. */
8405 static void
8406 avr_output_bss_section_asm_op (const void *data)
8408 avr_need_clear_bss_p = true;
8410 /* Dispatch to default. */
8411 output_section_asm_op (data);
8415 /* Unnamed section callback for progmem*.data sections. */
8417 static void
8418 avr_output_progmem_section_asm_op (const void *data)
8420 fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
8421 (const char*) data);
8425 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
8427 static void
8428 avr_asm_init_sections (void)
8430 /* Set up a section for jump tables. Alignment is handled by
8431 ASM_OUTPUT_BEFORE_CASE_LABEL. */
8433 if (AVR_HAVE_JMP_CALL)
8435 progmem_swtable_section
8436 = get_unnamed_section (0, output_section_asm_op,
8437 "\t.section\t.progmem.gcc_sw_table"
8438 ",\"a\",@progbits");
8440 else
8442 progmem_swtable_section
8443 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
8444 "\t.section\t.progmem.gcc_sw_table"
8445 ",\"ax\",@progbits");
8448 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
8449 resp. `avr_need_copy_data_p'. */
8451 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
8452 data_section->unnamed.callback = avr_output_data_section_asm_op;
8453 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
8457 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
8459 static section*
8460 avr_asm_function_rodata_section (tree decl)
8462 /* If a function is unused and optimized out by -ffunction-sections
8463 and --gc-sections, ensure that the same will happen for its jump
8464 tables by putting them into individual sections. */
8466 unsigned int flags;
8467 section * frodata;
8469 /* Get the frodata section from the default function in varasm.c
8470 but treat function-associated data-like jump tables as code
8471 rather than as user defined data. AVR has no constant pools. */
8473 int fdata = flag_data_sections;
8475 flag_data_sections = flag_function_sections;
8476 frodata = default_function_rodata_section (decl);
8477 flag_data_sections = fdata;
8478 flags = frodata->common.flags;
8481 if (frodata != readonly_data_section
8482 && flags & SECTION_NAMED)
8484 /* Adjust section flags and replace section name prefix. */
8486 unsigned int i;
8488 static const char* const prefix[] =
8490 ".rodata", ".progmem.gcc_sw_table",
8491 ".gnu.linkonce.r.", ".gnu.linkonce.t."
8494 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
8496 const char * old_prefix = prefix[i];
8497 const char * new_prefix = prefix[i+1];
8498 const char * name = frodata->named.name;
8500 if (STR_PREFIX_P (name, old_prefix))
8502 const char *rname = ACONCAT ((new_prefix,
8503 name + strlen (old_prefix), NULL));
8504 flags &= ~SECTION_CODE;
8505 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
8507 return get_section (rname, flags, frodata->named.decl);
8512 return progmem_swtable_section;
8516 /* Implement `TARGET_ASM_NAMED_SECTION'. */
8517 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
8519 static void
8520 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
8522 if (flags & AVR_SECTION_PROGMEM)
8524 addr_space_t as = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP;
8525 const char *old_prefix = ".rodata";
8526 const char *new_prefix = avr_addrspace[as].section_name;
8528 if (STR_PREFIX_P (name, old_prefix))
8530 const char *sname = ACONCAT ((new_prefix,
8531 name + strlen (old_prefix), NULL));
8532 default_elf_asm_named_section (sname, flags, decl);
8533 return;
8536 default_elf_asm_named_section (new_prefix, flags, decl);
8537 return;
8540 if (!avr_need_copy_data_p)
8541 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
8542 || STR_PREFIX_P (name, ".rodata")
8543 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
8545 if (!avr_need_clear_bss_p)
8546 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
8548 default_elf_asm_named_section (name, flags, decl);
8552 /* Implement `TARGET_SECTION_TYPE_FLAGS'. */
8554 static unsigned int
8555 avr_section_type_flags (tree decl, const char *name, int reloc)
8557 unsigned int flags = default_section_type_flags (decl, name, reloc);
8559 if (STR_PREFIX_P (name, ".noinit"))
8561 if (decl && TREE_CODE (decl) == VAR_DECL
8562 && DECL_INITIAL (decl) == NULL_TREE)
8563 flags |= SECTION_BSS; /* @nobits */
8564 else
8565 warning (0, "only uninitialized variables can be placed in the "
8566 ".noinit section");
8569 if (decl && DECL_P (decl)
8570 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
8572 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
8574 /* Attribute progmem puts data in generic address space.
8575 Set section flags as if it was in __flash to get the right
8576 section prefix in the remainder. */
8578 if (ADDR_SPACE_GENERIC_P (as))
8579 as = ADDR_SPACE_FLASH;
8581 flags |= as * SECTION_MACH_DEP;
8582 flags &= ~SECTION_WRITE;
8583 flags &= ~SECTION_BSS;
8586 return flags;
8590 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
8592 static void
8593 avr_encode_section_info (tree decl, rtx rtl, int new_decl_p)
8595 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
8596 readily available, see PR34734. So we postpone the warning
8597 about uninitialized data in program memory section until here. */
8599 if (new_decl_p
8600 && decl && DECL_P (decl)
8601 && NULL_TREE == DECL_INITIAL (decl)
8602 && !DECL_EXTERNAL (decl)
8603 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
8605 warning (OPT_Wuninitialized,
8606 "uninitialized variable %q+D put into "
8607 "program memory area", decl);
8610 default_encode_section_info (decl, rtl, new_decl_p);
8612 if (decl && DECL_P (decl)
8613 && TREE_CODE (decl) != FUNCTION_DECL
8614 && MEM_P (rtl)
8615 && SYMBOL_REF == GET_CODE (XEXP (rtl, 0)))
8617 rtx sym = XEXP (rtl, 0);
8618 tree type = TREE_TYPE (decl);
8619 if (type == error_mark_node)
8620 return;
8621 addr_space_t as = TYPE_ADDR_SPACE (type);
8623 /* PSTR strings are in generic space but located in flash:
8624 patch address space. */
8626 if (-1 == avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
8627 as = ADDR_SPACE_FLASH;
8629 AVR_SYMBOL_SET_ADDR_SPACE (sym, as);
8634 /* Implement `TARGET_ASM_SELECT_SECTION' */
8636 static section *
8637 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
8639 section * sect = default_elf_select_section (decl, reloc, align);
8641 if (decl && DECL_P (decl)
8642 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
8644 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
8646 /* __progmem__ goes in generic space but shall be allocated to
8647 .progmem.data */
8649 if (ADDR_SPACE_GENERIC_P (as))
8650 as = ADDR_SPACE_FLASH;
8652 if (sect->common.flags & SECTION_NAMED)
8654 const char * name = sect->named.name;
8655 const char * old_prefix = ".rodata";
8656 const char * new_prefix = avr_addrspace[as].section_name;
8658 if (STR_PREFIX_P (name, old_prefix))
8660 const char *sname = ACONCAT ((new_prefix,
8661 name + strlen (old_prefix), NULL));
8662 return get_section (sname, sect->common.flags, sect->named.decl);
8666 if (!progmem_section[as])
8668 progmem_section[as]
8669 = get_unnamed_section (0, avr_output_progmem_section_asm_op,
8670 avr_addrspace[as].section_name);
8673 return progmem_section[as];
8676 return sect;
8679 /* Implement `TARGET_ASM_FILE_START'. */
8680 /* Outputs some text at the start of each assembler file. */
8682 static void
8683 avr_file_start (void)
8685 int sfr_offset = avr_current_arch->sfr_offset;
8687 if (avr_current_arch->asm_only)
8688 error ("MCU %qs supported for assembler only", avr_current_device->name);
8690 default_file_start ();
8692 /* Print I/O addresses of some SFRs used with IN and OUT. */
8694 if (AVR_HAVE_SPH)
8695 fprintf (asm_out_file, "__SP_H__ = 0x%02x\n", avr_addr.sp_h - sfr_offset);
8697 fprintf (asm_out_file, "__SP_L__ = 0x%02x\n", avr_addr.sp_l - sfr_offset);
8698 fprintf (asm_out_file, "__SREG__ = 0x%02x\n", avr_addr.sreg - sfr_offset);
8699 if (AVR_HAVE_RAMPZ)
8700 fprintf (asm_out_file, "__RAMPZ__ = 0x%02x\n", avr_addr.rampz - sfr_offset);
8701 if (AVR_HAVE_RAMPY)
8702 fprintf (asm_out_file, "__RAMPY__ = 0x%02x\n", avr_addr.rampy - sfr_offset);
8703 if (AVR_HAVE_RAMPX)
8704 fprintf (asm_out_file, "__RAMPX__ = 0x%02x\n", avr_addr.rampx - sfr_offset);
8705 if (AVR_HAVE_RAMPD)
8706 fprintf (asm_out_file, "__RAMPD__ = 0x%02x\n", avr_addr.rampd - sfr_offset);
8707 if (AVR_XMEGA)
8708 fprintf (asm_out_file, "__CCP__ = 0x%02x\n", avr_addr.ccp - sfr_offset);
8709 fprintf (asm_out_file, "__tmp_reg__ = %d\n", TMP_REGNO);
8710 fprintf (asm_out_file, "__zero_reg__ = %d\n", ZERO_REGNO);
8714 /* Implement `TARGET_ASM_FILE_END'. */
8715 /* Outputs to the stdio stream FILE some
8716 appropriate text to go at the end of an assembler file. */
8718 static void
8719 avr_file_end (void)
8721 /* Output these only if there is anything in the
8722 .data* / .rodata* / .gnu.linkonce.* resp. .bss* or COMMON
8723 input section(s) - some code size can be saved by not
8724 linking in the initialization code from libgcc if resp.
8725 sections are empty, see PR18145. */
8727 if (avr_need_copy_data_p)
8728 fputs (".global __do_copy_data\n", asm_out_file);
8730 if (avr_need_clear_bss_p)
8731 fputs (".global __do_clear_bss\n", asm_out_file);
8735 /* Worker function for `ADJUST_REG_ALLOC_ORDER'. */
8736 /* Choose the order in which to allocate hard registers for
8737 pseudo-registers local to a basic block.
8739 Store the desired register order in the array `reg_alloc_order'.
8740 Element 0 should be the register to allocate first; element 1, the
8741 next register; and so on. */
8743 void
8744 avr_adjust_reg_alloc_order (void)
8746 unsigned int i;
8747 static const int order_0[] =
8749 24, 25,
8750 18, 19, 20, 21, 22, 23,
8751 30, 31,
8752 26, 27, 28, 29,
8753 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8754 0, 1,
8755 32, 33, 34, 35
8757 static const int order_1[] =
8759 18, 19, 20, 21, 22, 23, 24, 25,
8760 30, 31,
8761 26, 27, 28, 29,
8762 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8763 0, 1,
8764 32, 33, 34, 35
8766 static const int order_2[] =
8768 25, 24, 23, 22, 21, 20, 19, 18,
8769 30, 31,
8770 26, 27, 28, 29,
8771 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8772 1, 0,
8773 32, 33, 34, 35
8776 const int *order = (TARGET_ORDER_1 ? order_1 :
8777 TARGET_ORDER_2 ? order_2 :
8778 order_0);
8779 for (i = 0; i < ARRAY_SIZE (order_0); ++i)
8780 reg_alloc_order[i] = order[i];
8784 /* Implement `TARGET_REGISTER_MOVE_COST' */
8786 static int
8787 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
8788 reg_class_t from, reg_class_t to)
8790 return (from == STACK_REG ? 6
8791 : to == STACK_REG ? 12
8792 : 2);
8796 /* Implement `TARGET_MEMORY_MOVE_COST' */
8798 static int
8799 avr_memory_move_cost (enum machine_mode mode,
8800 reg_class_t rclass ATTRIBUTE_UNUSED,
8801 bool in ATTRIBUTE_UNUSED)
8803 return (mode == QImode ? 2
8804 : mode == HImode ? 4
8805 : mode == SImode ? 8
8806 : mode == SFmode ? 8
8807 : 16);
8811 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
8812 cost of an RTX operand given its context. X is the rtx of the
8813 operand, MODE is its mode, and OUTER is the rtx_code of this
8814 operand's parent operator. */
8816 static int
8817 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
8818 int opno, bool speed)
8820 enum rtx_code code = GET_CODE (x);
8821 int total;
8823 switch (code)
8825 case REG:
8826 case SUBREG:
8827 return 0;
8829 case CONST_INT:
8830 case CONST_FIXED:
8831 case CONST_DOUBLE:
8832 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
8834 default:
8835 break;
8838 total = 0;
8839 avr_rtx_costs (x, code, outer, opno, &total, speed);
8840 return total;
8843 /* Worker function for AVR backend's rtx_cost function.
8844 X is rtx expression whose cost is to be calculated.
8845 Return true if the complete cost has been computed.
8846 Return false if subexpressions should be scanned.
8847 In either case, *TOTAL contains the cost result. */
8849 static bool
8850 avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
8851 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
8853 enum rtx_code code = (enum rtx_code) codearg;
8854 enum machine_mode mode = GET_MODE (x);
8855 HOST_WIDE_INT val;
8857 switch (code)
8859 case CONST_INT:
8860 case CONST_FIXED:
8861 case CONST_DOUBLE:
8862 case SYMBOL_REF:
8863 case CONST:
8864 case LABEL_REF:
8865 /* Immediate constants are as cheap as registers. */
8866 *total = 0;
8867 return true;
8869 case MEM:
8870 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
8871 return true;
8873 case NEG:
8874 switch (mode)
8876 case QImode:
8877 case SFmode:
8878 *total = COSTS_N_INSNS (1);
8879 break;
8881 case HImode:
8882 case PSImode:
8883 case SImode:
8884 *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
8885 break;
8887 default:
8888 return false;
8890 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8891 return true;
8893 case ABS:
8894 switch (mode)
8896 case QImode:
8897 case SFmode:
8898 *total = COSTS_N_INSNS (1);
8899 break;
8901 default:
8902 return false;
8904 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8905 return true;
8907 case NOT:
8908 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
8909 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8910 return true;
8912 case ZERO_EXTEND:
8913 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
8914 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
8915 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8916 return true;
8918 case SIGN_EXTEND:
8919 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
8920 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
8921 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8922 return true;
8924 case PLUS:
8925 switch (mode)
8927 case QImode:
8928 if (AVR_HAVE_MUL
8929 && MULT == GET_CODE (XEXP (x, 0))
8930 && register_operand (XEXP (x, 1), QImode))
8932 /* multiply-add */
8933 *total = COSTS_N_INSNS (speed ? 4 : 3);
8934 /* multiply-add with constant: will be split and load constant. */
8935 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
8936 *total = COSTS_N_INSNS (1) + *total;
8937 return true;
8939 *total = COSTS_N_INSNS (1);
8940 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8941 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8942 break;
8944 case HImode:
8945 if (AVR_HAVE_MUL
8946 && (MULT == GET_CODE (XEXP (x, 0))
8947 || ASHIFT == GET_CODE (XEXP (x, 0)))
8948 && register_operand (XEXP (x, 1), HImode)
8949 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
8950 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
8952 /* multiply-add */
8953 *total = COSTS_N_INSNS (speed ? 5 : 4);
8954 /* multiply-add with constant: will be split and load constant. */
8955 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
8956 *total = COSTS_N_INSNS (1) + *total;
8957 return true;
8959 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8961 *total = COSTS_N_INSNS (2);
8962 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8963 speed);
8965 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
8966 *total = COSTS_N_INSNS (1);
8967 else
8968 *total = COSTS_N_INSNS (2);
8969 break;
8971 case PSImode:
8972 if (!CONST_INT_P (XEXP (x, 1)))
8974 *total = COSTS_N_INSNS (3);
8975 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8976 speed);
8978 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
8979 *total = COSTS_N_INSNS (2);
8980 else
8981 *total = COSTS_N_INSNS (3);
8982 break;
8984 case SImode:
8985 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8987 *total = COSTS_N_INSNS (4);
8988 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8989 speed);
8991 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
8992 *total = COSTS_N_INSNS (1);
8993 else
8994 *total = COSTS_N_INSNS (4);
8995 break;
8997 default:
8998 return false;
9000 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9001 return true;
9003 case MINUS:
9004 if (AVR_HAVE_MUL
9005 && QImode == mode
9006 && register_operand (XEXP (x, 0), QImode)
9007 && MULT == GET_CODE (XEXP (x, 1)))
9009 /* multiply-sub */
9010 *total = COSTS_N_INSNS (speed ? 4 : 3);
9011 /* multiply-sub with constant: will be split and load constant. */
9012 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
9013 *total = COSTS_N_INSNS (1) + *total;
9014 return true;
9016 if (AVR_HAVE_MUL
9017 && HImode == mode
9018 && register_operand (XEXP (x, 0), HImode)
9019 && (MULT == GET_CODE (XEXP (x, 1))
9020 || ASHIFT == GET_CODE (XEXP (x, 1)))
9021 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
9022 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
9024 /* multiply-sub */
9025 *total = COSTS_N_INSNS (speed ? 5 : 4);
9026 /* multiply-sub with constant: will be split and load constant. */
9027 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
9028 *total = COSTS_N_INSNS (1) + *total;
9029 return true;
9031 /* FALLTHRU */
9032 case AND:
9033 case IOR:
9034 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
9035 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9036 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9037 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9038 return true;
9040 case XOR:
9041 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
9042 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9043 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9044 return true;
9046 case MULT:
9047 switch (mode)
9049 case QImode:
9050 if (AVR_HAVE_MUL)
9051 *total = COSTS_N_INSNS (!speed ? 3 : 4);
9052 else if (!speed)
9053 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
9054 else
9055 return false;
9056 break;
9058 case HImode:
9059 if (AVR_HAVE_MUL)
9061 rtx op0 = XEXP (x, 0);
9062 rtx op1 = XEXP (x, 1);
9063 enum rtx_code code0 = GET_CODE (op0);
9064 enum rtx_code code1 = GET_CODE (op1);
9065 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
9066 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
9068 if (ex0
9069 && (u8_operand (op1, HImode)
9070 || s8_operand (op1, HImode)))
9072 *total = COSTS_N_INSNS (!speed ? 4 : 6);
9073 return true;
9075 if (ex0
9076 && register_operand (op1, HImode))
9078 *total = COSTS_N_INSNS (!speed ? 5 : 8);
9079 return true;
9081 else if (ex0 || ex1)
9083 *total = COSTS_N_INSNS (!speed ? 3 : 5);
9084 return true;
9086 else if (register_operand (op0, HImode)
9087 && (u8_operand (op1, HImode)
9088 || s8_operand (op1, HImode)))
9090 *total = COSTS_N_INSNS (!speed ? 6 : 9);
9091 return true;
9093 else
9094 *total = COSTS_N_INSNS (!speed ? 7 : 10);
9096 else if (!speed)
9097 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
9098 else
9099 return false;
9100 break;
9102 case PSImode:
9103 if (!speed)
9104 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
9105 else
9106 *total = 10;
9107 break;
9109 case SImode:
9110 if (AVR_HAVE_MUL)
9112 if (!speed)
9114 /* Add some additional costs besides CALL like moves etc. */
9116 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
9118 else
9120 /* Just a rough estimate. Even with -O2 we don't want bulky
9121 code expanded inline. */
9123 *total = COSTS_N_INSNS (25);
9126 else
9128 if (speed)
9129 *total = COSTS_N_INSNS (300);
9130 else
9131 /* Add some additional costs besides CALL like moves etc. */
9132 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
9135 return true;
9137 default:
9138 return false;
9140 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9141 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9142 return true;
9144 case DIV:
9145 case MOD:
9146 case UDIV:
9147 case UMOD:
9148 if (!speed)
9149 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
9150 else
9151 *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
9152 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9153 /* For div/mod with const-int divisor we have at least the cost of
9154 loading the divisor. */
9155 if (CONST_INT_P (XEXP (x, 1)))
9156 *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
9157 /* Add some overall penaly for clobbering and moving around registers */
9158 *total += COSTS_N_INSNS (2);
9159 return true;
9161 case ROTATE:
9162 switch (mode)
9164 case QImode:
9165 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
9166 *total = COSTS_N_INSNS (1);
9168 break;
9170 case HImode:
9171 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
9172 *total = COSTS_N_INSNS (3);
9174 break;
9176 case SImode:
9177 if (CONST_INT_P (XEXP (x, 1)))
9178 switch (INTVAL (XEXP (x, 1)))
9180 case 8:
9181 case 24:
9182 *total = COSTS_N_INSNS (5);
9183 break;
9184 case 16:
9185 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
9186 break;
9188 break;
9190 default:
9191 return false;
9193 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9194 return true;
9196 case ASHIFT:
9197 switch (mode)
9199 case QImode:
9200 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9202 *total = COSTS_N_INSNS (!speed ? 4 : 17);
9203 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9204 speed);
9206 else
9208 val = INTVAL (XEXP (x, 1));
9209 if (val == 7)
9210 *total = COSTS_N_INSNS (3);
9211 else if (val >= 0 && val <= 7)
9212 *total = COSTS_N_INSNS (val);
9213 else
9214 *total = COSTS_N_INSNS (1);
9216 break;
9218 case HImode:
9219 if (AVR_HAVE_MUL)
9221 if (const_2_to_7_operand (XEXP (x, 1), HImode)
9222 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
9223 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
9225 *total = COSTS_N_INSNS (!speed ? 4 : 6);
9226 return true;
9230 if (const1_rtx == (XEXP (x, 1))
9231 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
9233 *total = COSTS_N_INSNS (2);
9234 return true;
9237 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9239 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9240 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9241 speed);
9243 else
9244 switch (INTVAL (XEXP (x, 1)))
9246 case 0:
9247 *total = 0;
9248 break;
9249 case 1:
9250 case 8:
9251 *total = COSTS_N_INSNS (2);
9252 break;
9253 case 9:
9254 *total = COSTS_N_INSNS (3);
9255 break;
9256 case 2:
9257 case 3:
9258 case 10:
9259 case 15:
9260 *total = COSTS_N_INSNS (4);
9261 break;
9262 case 7:
9263 case 11:
9264 case 12:
9265 *total = COSTS_N_INSNS (5);
9266 break;
9267 case 4:
9268 *total = COSTS_N_INSNS (!speed ? 5 : 8);
9269 break;
9270 case 6:
9271 *total = COSTS_N_INSNS (!speed ? 5 : 9);
9272 break;
9273 case 5:
9274 *total = COSTS_N_INSNS (!speed ? 5 : 10);
9275 break;
9276 default:
9277 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9278 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9279 speed);
9281 break;
9283 case PSImode:
9284 if (!CONST_INT_P (XEXP (x, 1)))
9286 *total = COSTS_N_INSNS (!speed ? 6 : 73);
9288 else
9289 switch (INTVAL (XEXP (x, 1)))
9291 case 0:
9292 *total = 0;
9293 break;
9294 case 1:
9295 case 8:
9296 case 16:
9297 *total = COSTS_N_INSNS (3);
9298 break;
9299 case 23:
9300 *total = COSTS_N_INSNS (5);
9301 break;
9302 default:
9303 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
9304 break;
9306 break;
9308 case SImode:
9309 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9311 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9312 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9313 speed);
9315 else
9316 switch (INTVAL (XEXP (x, 1)))
9318 case 0:
9319 *total = 0;
9320 break;
9321 case 24:
9322 *total = COSTS_N_INSNS (3);
9323 break;
9324 case 1:
9325 case 8:
9326 case 16:
9327 *total = COSTS_N_INSNS (4);
9328 break;
9329 case 31:
9330 *total = COSTS_N_INSNS (6);
9331 break;
9332 case 2:
9333 *total = COSTS_N_INSNS (!speed ? 7 : 8);
9334 break;
9335 default:
9336 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9337 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9338 speed);
9340 break;
9342 default:
9343 return false;
9345 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9346 return true;
9348 case ASHIFTRT:
9349 switch (mode)
9351 case QImode:
9352 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9354 *total = COSTS_N_INSNS (!speed ? 4 : 17);
9355 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9356 speed);
9358 else
9360 val = INTVAL (XEXP (x, 1));
9361 if (val == 6)
9362 *total = COSTS_N_INSNS (4);
9363 else if (val == 7)
9364 *total = COSTS_N_INSNS (2);
9365 else if (val >= 0 && val <= 7)
9366 *total = COSTS_N_INSNS (val);
9367 else
9368 *total = COSTS_N_INSNS (1);
9370 break;
9372 case HImode:
9373 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9375 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9376 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9377 speed);
9379 else
9380 switch (INTVAL (XEXP (x, 1)))
9382 case 0:
9383 *total = 0;
9384 break;
9385 case 1:
9386 *total = COSTS_N_INSNS (2);
9387 break;
9388 case 15:
9389 *total = COSTS_N_INSNS (3);
9390 break;
9391 case 2:
9392 case 7:
9393 case 8:
9394 case 9:
9395 *total = COSTS_N_INSNS (4);
9396 break;
9397 case 10:
9398 case 14:
9399 *total = COSTS_N_INSNS (5);
9400 break;
9401 case 11:
9402 *total = COSTS_N_INSNS (!speed ? 5 : 6);
9403 break;
9404 case 12:
9405 *total = COSTS_N_INSNS (!speed ? 5 : 7);
9406 break;
9407 case 6:
9408 case 13:
9409 *total = COSTS_N_INSNS (!speed ? 5 : 8);
9410 break;
9411 default:
9412 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9413 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9414 speed);
9416 break;
9418 case PSImode:
9419 if (!CONST_INT_P (XEXP (x, 1)))
9421 *total = COSTS_N_INSNS (!speed ? 6 : 73);
9423 else
9424 switch (INTVAL (XEXP (x, 1)))
9426 case 0:
9427 *total = 0;
9428 break;
9429 case 1:
9430 *total = COSTS_N_INSNS (3);
9431 break;
9432 case 16:
9433 case 8:
9434 *total = COSTS_N_INSNS (5);
9435 break;
9436 case 23:
9437 *total = COSTS_N_INSNS (4);
9438 break;
9439 default:
9440 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
9441 break;
9443 break;
9445 case SImode:
9446 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9448 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9449 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9450 speed);
9452 else
9453 switch (INTVAL (XEXP (x, 1)))
9455 case 0:
9456 *total = 0;
9457 break;
9458 case 1:
9459 *total = COSTS_N_INSNS (4);
9460 break;
9461 case 8:
9462 case 16:
9463 case 24:
9464 *total = COSTS_N_INSNS (6);
9465 break;
9466 case 2:
9467 *total = COSTS_N_INSNS (!speed ? 7 : 8);
9468 break;
9469 case 31:
9470 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
9471 break;
9472 default:
9473 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9474 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9475 speed);
9477 break;
9479 default:
9480 return false;
9482 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9483 return true;
9485 case LSHIFTRT:
9486 switch (mode)
9488 case QImode:
9489 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9491 *total = COSTS_N_INSNS (!speed ? 4 : 17);
9492 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9493 speed);
9495 else
9497 val = INTVAL (XEXP (x, 1));
9498 if (val == 7)
9499 *total = COSTS_N_INSNS (3);
9500 else if (val >= 0 && val <= 7)
9501 *total = COSTS_N_INSNS (val);
9502 else
9503 *total = COSTS_N_INSNS (1);
9505 break;
9507 case HImode:
9508 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9510 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9511 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9512 speed);
9514 else
9515 switch (INTVAL (XEXP (x, 1)))
9517 case 0:
9518 *total = 0;
9519 break;
9520 case 1:
9521 case 8:
9522 *total = COSTS_N_INSNS (2);
9523 break;
9524 case 9:
9525 *total = COSTS_N_INSNS (3);
9526 break;
9527 case 2:
9528 case 10:
9529 case 15:
9530 *total = COSTS_N_INSNS (4);
9531 break;
9532 case 7:
9533 case 11:
9534 *total = COSTS_N_INSNS (5);
9535 break;
9536 case 3:
9537 case 12:
9538 case 13:
9539 case 14:
9540 *total = COSTS_N_INSNS (!speed ? 5 : 6);
9541 break;
9542 case 4:
9543 *total = COSTS_N_INSNS (!speed ? 5 : 7);
9544 break;
9545 case 5:
9546 case 6:
9547 *total = COSTS_N_INSNS (!speed ? 5 : 9);
9548 break;
9549 default:
9550 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9551 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9552 speed);
9554 break;
9556 case PSImode:
9557 if (!CONST_INT_P (XEXP (x, 1)))
9559 *total = COSTS_N_INSNS (!speed ? 6 : 73);
9561 else
9562 switch (INTVAL (XEXP (x, 1)))
9564 case 0:
9565 *total = 0;
9566 break;
9567 case 1:
9568 case 8:
9569 case 16:
9570 *total = COSTS_N_INSNS (3);
9571 break;
9572 case 23:
9573 *total = COSTS_N_INSNS (5);
9574 break;
9575 default:
9576 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
9577 break;
9579 break;
9581 case SImode:
9582 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9584 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9585 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9586 speed);
9588 else
9589 switch (INTVAL (XEXP (x, 1)))
9591 case 0:
9592 *total = 0;
9593 break;
9594 case 1:
9595 *total = COSTS_N_INSNS (4);
9596 break;
9597 case 2:
9598 *total = COSTS_N_INSNS (!speed ? 7 : 8);
9599 break;
9600 case 8:
9601 case 16:
9602 case 24:
9603 *total = COSTS_N_INSNS (4);
9604 break;
9605 case 31:
9606 *total = COSTS_N_INSNS (6);
9607 break;
9608 default:
9609 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9610 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9611 speed);
9613 break;
9615 default:
9616 return false;
9618 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9619 return true;
9621 case COMPARE:
9622 switch (GET_MODE (XEXP (x, 0)))
9624 case QImode:
9625 *total = COSTS_N_INSNS (1);
9626 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9627 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9628 break;
9630 case HImode:
9631 *total = COSTS_N_INSNS (2);
9632 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9633 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9634 else if (INTVAL (XEXP (x, 1)) != 0)
9635 *total += COSTS_N_INSNS (1);
9636 break;
9638 case PSImode:
9639 *total = COSTS_N_INSNS (3);
9640 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
9641 *total += COSTS_N_INSNS (2);
9642 break;
9644 case SImode:
9645 *total = COSTS_N_INSNS (4);
9646 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9647 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9648 else if (INTVAL (XEXP (x, 1)) != 0)
9649 *total += COSTS_N_INSNS (3);
9650 break;
9652 default:
9653 return false;
9655 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9656 return true;
9658 case TRUNCATE:
9659 if (AVR_HAVE_MUL
9660 && LSHIFTRT == GET_CODE (XEXP (x, 0))
9661 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
9662 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
9664 if (QImode == mode || HImode == mode)
9666 *total = COSTS_N_INSNS (2);
9667 return true;
9670 break;
9672 default:
9673 break;
9675 return false;
9679 /* Implement `TARGET_RTX_COSTS'. */
9681 static bool
9682 avr_rtx_costs (rtx x, int codearg, int outer_code,
9683 int opno, int *total, bool speed)
9685 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
9686 opno, total, speed);
9688 if (avr_log.rtx_costs)
9690 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
9691 done, speed ? "speed" : "size", *total, outer_code, x);
9694 return done;
9698 /* Implement `TARGET_ADDRESS_COST'. */
9700 static int
9701 avr_address_cost (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED,
9702 addr_space_t as ATTRIBUTE_UNUSED,
9703 bool speed ATTRIBUTE_UNUSED)
9705 int cost = 4;
9707 if (GET_CODE (x) == PLUS
9708 && CONST_INT_P (XEXP (x, 1))
9709 && (REG_P (XEXP (x, 0))
9710 || GET_CODE (XEXP (x, 0)) == SUBREG))
9712 if (INTVAL (XEXP (x, 1)) >= 61)
9713 cost = 18;
9715 else if (CONSTANT_ADDRESS_P (x))
9717 if (optimize > 0
9718 && io_address_operand (x, QImode))
9719 cost = 2;
9722 if (avr_log.address_cost)
9723 avr_edump ("\n%?: %d = %r\n", cost, x);
9725 return cost;
9728 /* Test for extra memory constraint 'Q'.
9729 It's a memory address based on Y or Z pointer with valid displacement. */
9732 extra_constraint_Q (rtx x)
9734 int ok = 0;
9736 if (GET_CODE (XEXP (x,0)) == PLUS
9737 && REG_P (XEXP (XEXP (x,0), 0))
9738 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
9739 && (INTVAL (XEXP (XEXP (x,0), 1))
9740 <= MAX_LD_OFFSET (GET_MODE (x))))
9742 rtx xx = XEXP (XEXP (x,0), 0);
9743 int regno = REGNO (xx);
9745 ok = (/* allocate pseudos */
9746 regno >= FIRST_PSEUDO_REGISTER
9747 /* strictly check */
9748 || regno == REG_Z || regno == REG_Y
9749 /* XXX frame & arg pointer checks */
9750 || xx == frame_pointer_rtx
9751 || xx == arg_pointer_rtx);
9753 if (avr_log.constraints)
9754 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
9755 ok, reload_completed, reload_in_progress, x);
9758 return ok;
9761 /* Convert condition code CONDITION to the valid AVR condition code. */
9763 RTX_CODE
9764 avr_normalize_condition (RTX_CODE condition)
9766 switch (condition)
9768 case GT:
9769 return GE;
9770 case GTU:
9771 return GEU;
9772 case LE:
9773 return LT;
9774 case LEU:
9775 return LTU;
9776 default:
9777 gcc_unreachable ();
9781 /* Helper function for `avr_reorg'. */
9783 static rtx
9784 avr_compare_pattern (rtx insn)
9786 rtx pattern = single_set (insn);
9788 if (pattern
9789 && NONJUMP_INSN_P (insn)
9790 && SET_DEST (pattern) == cc0_rtx
9791 && GET_CODE (SET_SRC (pattern)) == COMPARE)
9793 enum machine_mode mode0 = GET_MODE (XEXP (SET_SRC (pattern), 0));
9794 enum machine_mode mode1 = GET_MODE (XEXP (SET_SRC (pattern), 1));
9796 /* The 64-bit comparisons have fixed operands ACC_A and ACC_B.
9797 They must not be swapped, thus skip them. */
9799 if ((mode0 == VOIDmode || GET_MODE_SIZE (mode0) <= 4)
9800 && (mode1 == VOIDmode || GET_MODE_SIZE (mode1) <= 4))
9801 return pattern;
9804 return NULL_RTX;
9807 /* Helper function for `avr_reorg'. */
9809 /* Expansion of switch/case decision trees leads to code like
9811 cc0 = compare (Reg, Num)
9812 if (cc0 == 0)
9813 goto L1
9815 cc0 = compare (Reg, Num)
9816 if (cc0 > 0)
9817 goto L2
9819 The second comparison is superfluous and can be deleted.
9820 The second jump condition can be transformed from a
9821 "difficult" one to a "simple" one because "cc0 > 0" and
9822 "cc0 >= 0" will have the same effect here.
9824 This function relies on the way switch/case is being expaned
9825 as binary decision tree. For example code see PR 49903.
9827 Return TRUE if optimization performed.
9828 Return FALSE if nothing changed.
9830 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
9832 We don't want to do this in text peephole because it is
9833 tedious to work out jump offsets there and the second comparison
9834 might have been transormed by `avr_reorg'.
9836 RTL peephole won't do because peephole2 does not scan across
9837 basic blocks. */
9839 static bool
9840 avr_reorg_remove_redundant_compare (rtx insn1)
9842 rtx comp1, ifelse1, xcond1, branch1;
9843 rtx comp2, ifelse2, xcond2, branch2, insn2;
9844 enum rtx_code code;
9845 rtx jump, target, cond;
9847 /* Look out for: compare1 - branch1 - compare2 - branch2 */
9849 branch1 = next_nonnote_nondebug_insn (insn1);
9850 if (!branch1 || !JUMP_P (branch1))
9851 return false;
9853 insn2 = next_nonnote_nondebug_insn (branch1);
9854 if (!insn2 || !avr_compare_pattern (insn2))
9855 return false;
9857 branch2 = next_nonnote_nondebug_insn (insn2);
9858 if (!branch2 || !JUMP_P (branch2))
9859 return false;
9861 comp1 = avr_compare_pattern (insn1);
9862 comp2 = avr_compare_pattern (insn2);
9863 xcond1 = single_set (branch1);
9864 xcond2 = single_set (branch2);
9866 if (!comp1 || !comp2
9867 || !rtx_equal_p (comp1, comp2)
9868 || !xcond1 || SET_DEST (xcond1) != pc_rtx
9869 || !xcond2 || SET_DEST (xcond2) != pc_rtx
9870 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
9871 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
9873 return false;
9876 comp1 = SET_SRC (comp1);
9877 ifelse1 = SET_SRC (xcond1);
9878 ifelse2 = SET_SRC (xcond2);
9880 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
9882 if (EQ != GET_CODE (XEXP (ifelse1, 0))
9883 || !REG_P (XEXP (comp1, 0))
9884 || !CONST_INT_P (XEXP (comp1, 1))
9885 || XEXP (ifelse1, 2) != pc_rtx
9886 || XEXP (ifelse2, 2) != pc_rtx
9887 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
9888 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
9889 || !COMPARISON_P (XEXP (ifelse2, 0))
9890 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
9891 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
9892 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
9893 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
9895 return false;
9898 /* We filtered the insn sequence to look like
9900 (set (cc0)
9901 (compare (reg:M N)
9902 (const_int VAL)))
9903 (set (pc)
9904 (if_then_else (eq (cc0)
9905 (const_int 0))
9906 (label_ref L1)
9907 (pc)))
9909 (set (cc0)
9910 (compare (reg:M N)
9911 (const_int VAL)))
9912 (set (pc)
9913 (if_then_else (CODE (cc0)
9914 (const_int 0))
9915 (label_ref L2)
9916 (pc)))
9919 code = GET_CODE (XEXP (ifelse2, 0));
9921 /* Map GT/GTU to GE/GEU which is easier for AVR.
9922 The first two instructions compare/branch on EQ
9923 so we may replace the difficult
9925 if (x == VAL) goto L1;
9926 if (x > VAL) goto L2;
9928 with easy
9930 if (x == VAL) goto L1;
9931 if (x >= VAL) goto L2;
9933 Similarly, replace LE/LEU by LT/LTU. */
9935 switch (code)
9937 case EQ:
9938 case LT: case LTU:
9939 case GE: case GEU:
9940 break;
9942 case LE: case LEU:
9943 case GT: case GTU:
9944 code = avr_normalize_condition (code);
9945 break;
9947 default:
9948 return false;
9951 /* Wrap the branches into UNSPECs so they won't be changed or
9952 optimized in the remainder. */
9954 target = XEXP (XEXP (ifelse1, 1), 0);
9955 cond = XEXP (ifelse1, 0);
9956 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
9958 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
9960 target = XEXP (XEXP (ifelse2, 1), 0);
9961 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9962 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
9964 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
9966 /* The comparisons in insn1 and insn2 are exactly the same;
9967 insn2 is superfluous so delete it. */
9969 delete_insn (insn2);
9970 delete_insn (branch1);
9971 delete_insn (branch2);
9973 return true;
9977 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
9978 /* Optimize conditional jumps. */
9980 static void
9981 avr_reorg (void)
9983 rtx insn = get_insns();
9985 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
9987 rtx pattern = avr_compare_pattern (insn);
9989 if (!pattern)
9990 continue;
9992 if (optimize
9993 && avr_reorg_remove_redundant_compare (insn))
9995 continue;
9998 if (compare_diff_p (insn))
10000 /* Now we work under compare insn with difficult branch. */
10002 rtx next = next_real_insn (insn);
10003 rtx pat = PATTERN (next);
10005 pattern = SET_SRC (pattern);
10007 if (true_regnum (XEXP (pattern, 0)) >= 0
10008 && true_regnum (XEXP (pattern, 1)) >= 0)
10010 rtx x = XEXP (pattern, 0);
10011 rtx src = SET_SRC (pat);
10012 rtx t = XEXP (src,0);
10013 PUT_CODE (t, swap_condition (GET_CODE (t)));
10014 XEXP (pattern, 0) = XEXP (pattern, 1);
10015 XEXP (pattern, 1) = x;
10016 INSN_CODE (next) = -1;
10018 else if (true_regnum (XEXP (pattern, 0)) >= 0
10019 && XEXP (pattern, 1) == const0_rtx)
10021 /* This is a tst insn, we can reverse it. */
10022 rtx src = SET_SRC (pat);
10023 rtx t = XEXP (src,0);
10025 PUT_CODE (t, swap_condition (GET_CODE (t)));
10026 XEXP (pattern, 1) = XEXP (pattern, 0);
10027 XEXP (pattern, 0) = const0_rtx;
10028 INSN_CODE (next) = -1;
10029 INSN_CODE (insn) = -1;
10031 else if (true_regnum (XEXP (pattern, 0)) >= 0
10032 && CONST_INT_P (XEXP (pattern, 1)))
10034 rtx x = XEXP (pattern, 1);
10035 rtx src = SET_SRC (pat);
10036 rtx t = XEXP (src,0);
10037 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
10039 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
10041 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
10042 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
10043 INSN_CODE (next) = -1;
10044 INSN_CODE (insn) = -1;
10051 /* Returns register number for function return value.*/
10053 static inline unsigned int
10054 avr_ret_register (void)
10056 return 24;
10060 /* Implement `TARGET_FUNCTION_VALUE_REGNO_P'. */
10062 static bool
10063 avr_function_value_regno_p (const unsigned int regno)
10065 return (regno == avr_ret_register ());
10069 /* Implement `TARGET_LIBCALL_VALUE'. */
10070 /* Create an RTX representing the place where a
10071 library function returns a value of mode MODE. */
10073 static rtx
10074 avr_libcall_value (enum machine_mode mode,
10075 const_rtx func ATTRIBUTE_UNUSED)
10077 int offs = GET_MODE_SIZE (mode);
10079 if (offs <= 4)
10080 offs = (offs + 1) & ~1;
10082 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
10086 /* Implement `TARGET_FUNCTION_VALUE'. */
10087 /* Create an RTX representing the place where a
10088 function returns a value of data type VALTYPE. */
10090 static rtx
10091 avr_function_value (const_tree type,
10092 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
10093 bool outgoing ATTRIBUTE_UNUSED)
10095 unsigned int offs;
10097 if (TYPE_MODE (type) != BLKmode)
10098 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
10100 offs = int_size_in_bytes (type);
10101 if (offs < 2)
10102 offs = 2;
10103 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
10104 offs = GET_MODE_SIZE (SImode);
10105 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
10106 offs = GET_MODE_SIZE (DImode);
10108 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
10112 test_hard_reg_class (enum reg_class rclass, rtx x)
10114 int regno = true_regnum (x);
10115 if (regno < 0)
10116 return 0;
10118 if (TEST_HARD_REG_CLASS (rclass, regno))
10119 return 1;
10121 return 0;
10125 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
10126 and thus is suitable to be skipped by CPSE, SBRC, etc. */
10128 static bool
10129 avr_2word_insn_p (rtx insn)
10131 if ((avr_current_device->dev_attribute & AVR_ERRATA_SKIP)
10132 || !insn
10133 || 2 != get_attr_length (insn))
10135 return false;
10138 switch (INSN_CODE (insn))
10140 default:
10141 return false;
10143 case CODE_FOR_movqi_insn:
10144 case CODE_FOR_movuqq_insn:
10145 case CODE_FOR_movqq_insn:
10147 rtx set = single_set (insn);
10148 rtx src = SET_SRC (set);
10149 rtx dest = SET_DEST (set);
10151 /* Factor out LDS and STS from movqi_insn. */
10153 if (MEM_P (dest)
10154 && (REG_P (src) || src == CONST0_RTX (GET_MODE (dest))))
10156 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
10158 else if (REG_P (dest)
10159 && MEM_P (src))
10161 return CONSTANT_ADDRESS_P (XEXP (src, 0));
10164 return false;
10167 case CODE_FOR_call_insn:
10168 case CODE_FOR_call_value_insn:
10169 return true;
10175 jump_over_one_insn_p (rtx insn, rtx dest)
10177 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
10178 ? XEXP (dest, 0)
10179 : dest);
10180 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
10181 int dest_addr = INSN_ADDRESSES (uid);
10182 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
10184 return (jump_offset == 1
10185 || (jump_offset == 2
10186 && avr_2word_insn_p (next_active_insn (insn))));
10190 /* Worker function for `HARD_REGNO_MODE_OK'. */
10191 /* Returns 1 if a value of mode MODE can be stored starting with hard
10192 register number REGNO. On the enhanced core, anything larger than
10193 1 byte must start in even numbered register for "movw" to work
10194 (this way we don't have to check for odd registers everywhere). */
10197 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
10199 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
10200 Disallowing QI et al. in these regs might lead to code like
10201 (set (subreg:QI (reg:HI 28) n) ...)
10202 which will result in wrong code because reload does not
10203 handle SUBREGs of hard regsisters like this.
10204 This could be fixed in reload. However, it appears
10205 that fixing reload is not wanted by reload people. */
10207 /* Any GENERAL_REGS register can hold 8-bit values. */
10209 if (GET_MODE_SIZE (mode) == 1)
10210 return 1;
10212 /* FIXME: Ideally, the following test is not needed.
10213 However, it turned out that it can reduce the number
10214 of spill fails. AVR and it's poor endowment with
10215 address registers is extreme stress test for reload. */
10217 if (GET_MODE_SIZE (mode) >= 4
10218 && regno >= REG_X)
10219 return 0;
10221 /* All modes larger than 8 bits should start in an even register. */
10223 return !(regno & 1);
10227 /* Implement `HARD_REGNO_CALL_PART_CLOBBERED'. */
10230 avr_hard_regno_call_part_clobbered (unsigned regno, enum machine_mode mode)
10232 /* FIXME: This hook gets called with MODE:REGNO combinations that don't
10233 represent valid hard registers like, e.g. HI:29. Returning TRUE
10234 for such registers can lead to performance degradation as mentioned
10235 in PR53595. Thus, report invalid hard registers as FALSE. */
10237 if (!avr_hard_regno_mode_ok (regno, mode))
10238 return 0;
10240 /* Return true if any of the following boundaries is crossed:
10241 17/18, 27/28 and 29/30. */
10243 return ((regno < 18 && regno + GET_MODE_SIZE (mode) > 18)
10244 || (regno < REG_Y && regno + GET_MODE_SIZE (mode) > REG_Y)
10245 || (regno < REG_Z && regno + GET_MODE_SIZE (mode) > REG_Z));
10249 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
10251 enum reg_class
10252 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
10253 addr_space_t as, RTX_CODE outer_code,
10254 RTX_CODE index_code ATTRIBUTE_UNUSED)
10256 if (!ADDR_SPACE_GENERIC_P (as))
10258 return POINTER_Z_REGS;
10261 if (!avr_strict_X)
10262 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
10264 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
10268 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
10270 bool
10271 avr_regno_mode_code_ok_for_base_p (int regno,
10272 enum machine_mode mode ATTRIBUTE_UNUSED,
10273 addr_space_t as ATTRIBUTE_UNUSED,
10274 RTX_CODE outer_code,
10275 RTX_CODE index_code ATTRIBUTE_UNUSED)
10277 bool ok = false;
10279 if (!ADDR_SPACE_GENERIC_P (as))
10281 if (regno < FIRST_PSEUDO_REGISTER
10282 && regno == REG_Z)
10284 return true;
10287 if (reg_renumber)
10289 regno = reg_renumber[regno];
10291 if (regno == REG_Z)
10293 return true;
10297 return false;
10300 if (regno < FIRST_PSEUDO_REGISTER
10301 && (regno == REG_X
10302 || regno == REG_Y
10303 || regno == REG_Z
10304 || regno == ARG_POINTER_REGNUM))
10306 ok = true;
10308 else if (reg_renumber)
10310 regno = reg_renumber[regno];
10312 if (regno == REG_X
10313 || regno == REG_Y
10314 || regno == REG_Z
10315 || regno == ARG_POINTER_REGNUM)
10317 ok = true;
10321 if (avr_strict_X
10322 && PLUS == outer_code
10323 && regno == REG_X)
10325 ok = false;
10328 return ok;
10332 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
10333 /* Set 32-bit register OP[0] to compile-time constant OP[1].
10334 CLOBBER_REG is a QI clobber register or NULL_RTX.
10335 LEN == NULL: output instructions.
10336 LEN != NULL: set *LEN to the length of the instruction sequence
10337 (in words) printed with LEN = NULL.
10338 If CLEAR_P is true, OP[0] had been cleard to Zero already.
10339 If CLEAR_P is false, nothing is known about OP[0].
10341 The effect on cc0 is as follows:
10343 Load 0 to any register except ZERO_REG : NONE
10344 Load ld register with any value : NONE
10345 Anything else: : CLOBBER */
10347 static void
10348 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
10350 rtx src = op[1];
10351 rtx dest = op[0];
10352 rtx xval, xdest[4];
10353 int ival[4];
10354 int clobber_val = 1234;
10355 bool cooked_clobber_p = false;
10356 bool set_p = false;
10357 enum machine_mode mode = GET_MODE (dest);
10358 int n, n_bytes = GET_MODE_SIZE (mode);
10360 gcc_assert (REG_P (dest)
10361 && CONSTANT_P (src));
10363 if (len)
10364 *len = 0;
10366 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
10367 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
10369 if (REGNO (dest) < 16
10370 && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
10372 clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
10375 /* We might need a clobber reg but don't have one. Look at the value to
10376 be loaded more closely. A clobber is only needed if it is a symbol
10377 or contains a byte that is neither 0, -1 or a power of 2. */
10379 if (NULL_RTX == clobber_reg
10380 && !test_hard_reg_class (LD_REGS, dest)
10381 && (! (CONST_INT_P (src) || CONST_FIXED_P (src) || CONST_DOUBLE_P (src))
10382 || !avr_popcount_each_byte (src, n_bytes,
10383 (1 << 0) | (1 << 1) | (1 << 8))))
10385 /* We have no clobber register but need one. Cook one up.
10386 That's cheaper than loading from constant pool. */
10388 cooked_clobber_p = true;
10389 clobber_reg = all_regs_rtx[REG_Z + 1];
10390 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
10393 /* Now start filling DEST from LSB to MSB. */
10395 for (n = 0; n < n_bytes; n++)
10397 int ldreg_p;
10398 bool done_byte = false;
10399 int j;
10400 rtx xop[3];
10402 /* Crop the n-th destination byte. */
10404 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
10405 ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
10407 if (!CONST_INT_P (src)
10408 && !CONST_FIXED_P (src)
10409 && !CONST_DOUBLE_P (src))
10411 static const char* const asm_code[][2] =
10413 { "ldi %2,lo8(%1)" CR_TAB "mov %0,%2", "ldi %0,lo8(%1)" },
10414 { "ldi %2,hi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hi8(%1)" },
10415 { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2", "ldi %0,hlo8(%1)" },
10416 { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hhi8(%1)" }
10419 xop[0] = xdest[n];
10420 xop[1] = src;
10421 xop[2] = clobber_reg;
10423 avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
10425 continue;
10428 /* Crop the n-th source byte. */
10430 xval = simplify_gen_subreg (QImode, src, mode, n);
10431 ival[n] = INTVAL (xval);
10433 /* Look if we can reuse the low word by means of MOVW. */
10435 if (n == 2
10436 && n_bytes >= 4
10437 && AVR_HAVE_MOVW)
10439 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
10440 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
10442 if (INTVAL (lo16) == INTVAL (hi16))
10444 if (0 != INTVAL (lo16)
10445 || !clear_p)
10447 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
10450 break;
10454 /* Don't use CLR so that cc0 is set as expected. */
10456 if (ival[n] == 0)
10458 if (!clear_p)
10459 avr_asm_len (ldreg_p ? "ldi %0,0"
10460 : ZERO_REGNO == REGNO (xdest[n]) ? "clr %0"
10461 : "mov %0,__zero_reg__",
10462 &xdest[n], len, 1);
10463 continue;
10466 if (clobber_val == ival[n]
10467 && REGNO (clobber_reg) == REGNO (xdest[n]))
10469 continue;
10472 /* LD_REGS can use LDI to move a constant value */
10474 if (ldreg_p)
10476 xop[0] = xdest[n];
10477 xop[1] = xval;
10478 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
10479 continue;
10482 /* Try to reuse value already loaded in some lower byte. */
10484 for (j = 0; j < n; j++)
10485 if (ival[j] == ival[n])
10487 xop[0] = xdest[n];
10488 xop[1] = xdest[j];
10490 avr_asm_len ("mov %0,%1", xop, len, 1);
10491 done_byte = true;
10492 break;
10495 if (done_byte)
10496 continue;
10498 /* Need no clobber reg for -1: Use CLR/DEC */
10500 if (-1 == ival[n])
10502 if (!clear_p)
10503 avr_asm_len ("clr %0", &xdest[n], len, 1);
10505 avr_asm_len ("dec %0", &xdest[n], len, 1);
10506 continue;
10508 else if (1 == ival[n])
10510 if (!clear_p)
10511 avr_asm_len ("clr %0", &xdest[n], len, 1);
10513 avr_asm_len ("inc %0", &xdest[n], len, 1);
10514 continue;
10517 /* Use T flag or INC to manage powers of 2 if we have
10518 no clobber reg. */
10520 if (NULL_RTX == clobber_reg
10521 && single_one_operand (xval, QImode))
10523 xop[0] = xdest[n];
10524 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
10526 gcc_assert (constm1_rtx != xop[1]);
10528 if (!set_p)
10530 set_p = true;
10531 avr_asm_len ("set", xop, len, 1);
10534 if (!clear_p)
10535 avr_asm_len ("clr %0", xop, len, 1);
10537 avr_asm_len ("bld %0,%1", xop, len, 1);
10538 continue;
10541 /* We actually need the LD_REGS clobber reg. */
10543 gcc_assert (NULL_RTX != clobber_reg);
10545 xop[0] = xdest[n];
10546 xop[1] = xval;
10547 xop[2] = clobber_reg;
10548 clobber_val = ival[n];
10550 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
10551 "mov %0,%2", xop, len, 2);
10554 /* If we cooked up a clobber reg above, restore it. */
10556 if (cooked_clobber_p)
10558 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
10563 /* Reload the constant OP[1] into the HI register OP[0].
10564 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
10565 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
10566 need a clobber reg or have to cook one up.
10568 PLEN == NULL: Output instructions.
10569 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
10570 by the insns printed.
10572 Return "". */
10574 const char*
10575 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
10577 output_reload_in_const (op, clobber_reg, plen, false);
10578 return "";
10582 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
10583 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
10584 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
10585 need a clobber reg or have to cook one up.
10587 LEN == NULL: Output instructions.
10589 LEN != NULL: Output nothing. Set *LEN to number of words occupied
10590 by the insns printed.
10592 Return "". */
10594 const char *
10595 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
10597 if (AVR_HAVE_MOVW
10598 && !test_hard_reg_class (LD_REGS, op[0])
10599 && (CONST_INT_P (op[1])
10600 || CONST_FIXED_P (op[1])
10601 || CONST_DOUBLE_P (op[1])))
10603 int len_clr, len_noclr;
10605 /* In some cases it is better to clear the destination beforehand, e.g.
10607 CLR R2 CLR R3 MOVW R4,R2 INC R2
10609 is shorther than
10611 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
10613 We find it too tedious to work that out in the print function.
10614 Instead, we call the print function twice to get the lengths of
10615 both methods and use the shortest one. */
10617 output_reload_in_const (op, clobber_reg, &len_clr, true);
10618 output_reload_in_const (op, clobber_reg, &len_noclr, false);
10620 if (len_noclr - len_clr == 4)
10622 /* Default needs 4 CLR instructions: clear register beforehand. */
10624 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
10625 "mov %B0,__zero_reg__" CR_TAB
10626 "movw %C0,%A0", &op[0], len, 3);
10628 output_reload_in_const (op, clobber_reg, len, true);
10630 if (len)
10631 *len += 3;
10633 return "";
10637 /* Default: destination not pre-cleared. */
10639 output_reload_in_const (op, clobber_reg, len, false);
10640 return "";
10643 const char*
10644 avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
10646 output_reload_in_const (op, clobber_reg, len, false);
10647 return "";
10651 /* Worker function for `ASM_OUTPUT_ADDR_VEC_ELT'. */
10653 void
10654 avr_output_addr_vec_elt (FILE *stream, int value)
10656 if (AVR_HAVE_JMP_CALL)
10657 fprintf (stream, "\t.word gs(.L%d)\n", value);
10658 else
10659 fprintf (stream, "\trjmp .L%d\n", value);
10663 /* Implement `TARGET_HARD_REGNO_SCRATCH_OK'. */
10664 /* Returns true if SCRATCH are safe to be allocated as a scratch
10665 registers (for a define_peephole2) in the current function. */
10667 static bool
10668 avr_hard_regno_scratch_ok (unsigned int regno)
10670 /* Interrupt functions can only use registers that have already been saved
10671 by the prologue, even if they would normally be call-clobbered. */
10673 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
10674 && !df_regs_ever_live_p (regno))
10675 return false;
10677 /* Don't allow hard registers that might be part of the frame pointer.
10678 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
10679 and don't care for a frame pointer that spans more than one register. */
10681 if ((!reload_completed || frame_pointer_needed)
10682 && (regno == REG_Y || regno == REG_Y + 1))
10684 return false;
10687 return true;
10691 /* Worker function for `HARD_REGNO_RENAME_OK'. */
10692 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
10695 avr_hard_regno_rename_ok (unsigned int old_reg,
10696 unsigned int new_reg)
10698 /* Interrupt functions can only use registers that have already been
10699 saved by the prologue, even if they would normally be
10700 call-clobbered. */
10702 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
10703 && !df_regs_ever_live_p (new_reg))
10704 return 0;
10706 /* Don't allow hard registers that might be part of the frame pointer.
10707 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
10708 and don't care for a frame pointer that spans more than one register. */
10710 if ((!reload_completed || frame_pointer_needed)
10711 && (old_reg == REG_Y || old_reg == REG_Y + 1
10712 || new_reg == REG_Y || new_reg == REG_Y + 1))
10714 return 0;
10717 return 1;
10720 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
10721 or memory location in the I/O space (QImode only).
10723 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
10724 Operand 1: register operand to test, or CONST_INT memory address.
10725 Operand 2: bit number.
10726 Operand 3: label to jump to if the test is true. */
10728 const char*
10729 avr_out_sbxx_branch (rtx insn, rtx operands[])
10731 enum rtx_code comp = GET_CODE (operands[0]);
10732 bool long_jump = get_attr_length (insn) >= 4;
10733 bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
10735 if (comp == GE)
10736 comp = EQ;
10737 else if (comp == LT)
10738 comp = NE;
10740 if (reverse)
10741 comp = reverse_condition (comp);
10743 switch (GET_CODE (operands[1]))
10745 default:
10746 gcc_unreachable();
10748 case CONST_INT:
10750 if (low_io_address_operand (operands[1], QImode))
10752 if (comp == EQ)
10753 output_asm_insn ("sbis %i1,%2", operands);
10754 else
10755 output_asm_insn ("sbic %i1,%2", operands);
10757 else
10759 output_asm_insn ("in __tmp_reg__,%i1", operands);
10760 if (comp == EQ)
10761 output_asm_insn ("sbrs __tmp_reg__,%2", operands);
10762 else
10763 output_asm_insn ("sbrc __tmp_reg__,%2", operands);
10766 break; /* CONST_INT */
10768 case REG:
10770 if (comp == EQ)
10771 output_asm_insn ("sbrs %T1%T2", operands);
10772 else
10773 output_asm_insn ("sbrc %T1%T2", operands);
10775 break; /* REG */
10776 } /* switch */
10778 if (long_jump)
10779 return ("rjmp .+4" CR_TAB
10780 "jmp %x3");
10782 if (!reverse)
10783 return "rjmp %x3";
10785 return "";
10788 /* Worker function for `TARGET_ASM_CONSTRUCTOR'. */
10790 static void
10791 avr_asm_out_ctor (rtx symbol, int priority)
10793 fputs ("\t.global __do_global_ctors\n", asm_out_file);
10794 default_ctor_section_asm_out_constructor (symbol, priority);
10798 /* Worker function for `TARGET_ASM_DESTRUCTOR'. */
10800 static void
10801 avr_asm_out_dtor (rtx symbol, int priority)
10803 fputs ("\t.global __do_global_dtors\n", asm_out_file);
10804 default_dtor_section_asm_out_destructor (symbol, priority);
10808 /* Worker function for `TARGET_RETURN_IN_MEMORY'. */
10810 static bool
10811 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
10813 if (TYPE_MODE (type) == BLKmode)
10815 HOST_WIDE_INT size = int_size_in_bytes (type);
10816 return (size == -1 || size > 8);
10818 else
10819 return false;
10823 /* Implement `CASE_VALUES_THRESHOLD'. */
10824 /* Supply the default for --param case-values-threshold=0 */
10826 static unsigned int
10827 avr_case_values_threshold (void)
10829 /* The exact break-even point between a jump table and an if-else tree
10830 depends on several factors not available here like, e.g. if 8-bit
10831 comparisons can be used in the if-else tree or not, on the
10832 range of the case values, if the case value can be reused, on the
10833 register allocation, etc. '7' appears to be a good choice. */
10835 return 7;
10839 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
10841 static enum machine_mode
10842 avr_addr_space_address_mode (addr_space_t as)
10844 return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
10848 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
10850 static enum machine_mode
10851 avr_addr_space_pointer_mode (addr_space_t as)
10853 return avr_addr_space_address_mode (as);
10857 /* Helper for following function. */
10859 static bool
10860 avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
10862 gcc_assert (REG_P (reg));
10864 if (strict)
10866 return REGNO (reg) == REG_Z;
10869 /* Avoid combine to propagate hard regs. */
10871 if (can_create_pseudo_p()
10872 && REGNO (reg) < REG_Z)
10874 return false;
10877 return true;
10881 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
10883 static bool
10884 avr_addr_space_legitimate_address_p (enum machine_mode mode, rtx x,
10885 bool strict, addr_space_t as)
10887 bool ok = false;
10889 switch (as)
10891 default:
10892 gcc_unreachable();
10894 case ADDR_SPACE_GENERIC:
10895 return avr_legitimate_address_p (mode, x, strict);
10897 case ADDR_SPACE_FLASH:
10898 case ADDR_SPACE_FLASH1:
10899 case ADDR_SPACE_FLASH2:
10900 case ADDR_SPACE_FLASH3:
10901 case ADDR_SPACE_FLASH4:
10902 case ADDR_SPACE_FLASH5:
10904 switch (GET_CODE (x))
10906 case REG:
10907 ok = avr_reg_ok_for_pgm_addr (x, strict);
10908 break;
10910 case POST_INC:
10911 ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
10912 break;
10914 default:
10915 break;
10918 break; /* FLASH */
10920 case ADDR_SPACE_MEMX:
10921 if (REG_P (x))
10922 ok = (!strict
10923 && can_create_pseudo_p());
10925 if (LO_SUM == GET_CODE (x))
10927 rtx hi = XEXP (x, 0);
10928 rtx lo = XEXP (x, 1);
10930 ok = (REG_P (hi)
10931 && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
10932 && REG_P (lo)
10933 && REGNO (lo) == REG_Z);
10936 break; /* MEMX */
10939 if (avr_log.legitimate_address_p)
10941 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
10942 "reload_completed=%d reload_in_progress=%d %s:",
10943 ok, mode, strict, reload_completed, reload_in_progress,
10944 reg_renumber ? "(reg_renumber)" : "");
10946 if (GET_CODE (x) == PLUS
10947 && REG_P (XEXP (x, 0))
10948 && CONST_INT_P (XEXP (x, 1))
10949 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
10950 && reg_renumber)
10952 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
10953 true_regnum (XEXP (x, 0)));
10956 avr_edump ("\n%r\n", x);
10959 return ok;
10963 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
10965 static rtx
10966 avr_addr_space_legitimize_address (rtx x, rtx old_x,
10967 enum machine_mode mode, addr_space_t as)
10969 if (ADDR_SPACE_GENERIC_P (as))
10970 return avr_legitimize_address (x, old_x, mode);
10972 if (avr_log.legitimize_address)
10974 avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
10977 return old_x;
10981 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
10983 static rtx
10984 avr_addr_space_convert (rtx src, tree type_from, tree type_to)
10986 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
10987 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
10989 if (avr_log.progmem)
10990 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
10991 src, type_from, type_to);
10993 /* Up-casting from 16-bit to 24-bit pointer. */
10995 if (as_from != ADDR_SPACE_MEMX
10996 && as_to == ADDR_SPACE_MEMX)
10998 int msb;
10999 rtx sym = src;
11000 rtx reg = gen_reg_rtx (PSImode);
11002 while (CONST == GET_CODE (sym) || PLUS == GET_CODE (sym))
11003 sym = XEXP (sym, 0);
11005 /* Look at symbol flags: avr_encode_section_info set the flags
11006 also if attribute progmem was seen so that we get the right
11007 promotion for, e.g. PSTR-like strings that reside in generic space
11008 but are located in flash. In that case we patch the incoming
11009 address space. */
11011 if (SYMBOL_REF == GET_CODE (sym)
11012 && ADDR_SPACE_FLASH == AVR_SYMBOL_GET_ADDR_SPACE (sym))
11014 as_from = ADDR_SPACE_FLASH;
11017 /* Linearize memory: RAM has bit 23 set. */
11019 msb = ADDR_SPACE_GENERIC_P (as_from)
11020 ? 0x80
11021 : avr_addrspace[as_from].segment;
11023 src = force_reg (Pmode, src);
11025 emit_insn (msb == 0
11026 ? gen_zero_extendhipsi2 (reg, src)
11027 : gen_n_extendhipsi2 (reg, gen_int_mode (msb, QImode), src));
11029 return reg;
11032 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
11034 if (as_from == ADDR_SPACE_MEMX
11035 && as_to != ADDR_SPACE_MEMX)
11037 rtx new_src = gen_reg_rtx (Pmode);
11039 src = force_reg (PSImode, src);
11041 emit_move_insn (new_src,
11042 simplify_gen_subreg (Pmode, src, PSImode, 0));
11043 return new_src;
11046 return src;
11050 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
11052 static bool
11053 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED,
11054 addr_space_t superset ATTRIBUTE_UNUSED)
11056 /* Allow any kind of pointer mess. */
11058 return true;
11062 /* Implement `TARGET_CONVERT_TO_TYPE'. */
11064 static tree
11065 avr_convert_to_type (tree type, tree expr)
11067 /* Print a diagnose for pointer conversion that changes the address
11068 space of the pointer target to a non-enclosing address space,
11069 provided -Waddr-space-convert is on.
11071 FIXME: Filter out cases where the target object is known to
11072 be located in the right memory, like in
11074 (const __flash*) PSTR ("text")
11076 Also try to distinguish between explicit casts requested by
11077 the user and implicit casts like
11079 void f (const __flash char*);
11081 void g (const char *p)
11083 f ((const __flash*) p);
11086 under the assumption that an explicit casts means that the user
11087 knows what he is doing, e.g. interface with PSTR or old style
11088 code with progmem and pgm_read_xxx.
11091 if (avr_warn_addr_space_convert
11092 && expr != error_mark_node
11093 && POINTER_TYPE_P (type)
11094 && POINTER_TYPE_P (TREE_TYPE (expr)))
11096 addr_space_t as_old = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr)));
11097 addr_space_t as_new = TYPE_ADDR_SPACE (TREE_TYPE (type));
11099 if (avr_log.progmem)
11100 avr_edump ("%?: type = %t\nexpr = %t\n\n", type, expr);
11102 if (as_new != ADDR_SPACE_MEMX
11103 && as_new != as_old)
11105 location_t loc = EXPR_LOCATION (expr);
11106 const char *name_old = avr_addrspace[as_old].name;
11107 const char *name_new = avr_addrspace[as_new].name;
11109 warning (OPT_Waddr_space_convert,
11110 "conversion from address space %qs to address space %qs",
11111 ADDR_SPACE_GENERIC_P (as_old) ? "generic" : name_old,
11112 ADDR_SPACE_GENERIC_P (as_new) ? "generic" : name_new);
11114 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, expr);
11118 return NULL_TREE;
11122 /* Worker function for movmemhi expander.
11123 XOP[0] Destination as MEM:BLK
11124 XOP[1] Source " "
11125 XOP[2] # Bytes to copy
11127 Return TRUE if the expansion is accomplished.
11128 Return FALSE if the operand compination is not supported. */
11130 bool
11131 avr_emit_movmemhi (rtx *xop)
11133 HOST_WIDE_INT count;
11134 enum machine_mode loop_mode;
11135 addr_space_t as = MEM_ADDR_SPACE (xop[1]);
11136 rtx loop_reg, addr1, a_src, a_dest, insn, xas;
11137 rtx a_hi8 = NULL_RTX;
11139 if (avr_mem_flash_p (xop[0]))
11140 return false;
11142 if (!CONST_INT_P (xop[2]))
11143 return false;
11145 count = INTVAL (xop[2]);
11146 if (count <= 0)
11147 return false;
11149 a_src = XEXP (xop[1], 0);
11150 a_dest = XEXP (xop[0], 0);
11152 if (PSImode == GET_MODE (a_src))
11154 gcc_assert (as == ADDR_SPACE_MEMX);
11156 loop_mode = (count < 0x100) ? QImode : HImode;
11157 loop_reg = gen_rtx_REG (loop_mode, 24);
11158 emit_move_insn (loop_reg, gen_int_mode (count, loop_mode));
11160 addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
11161 a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
11163 else
11165 int segment = avr_addrspace[as].segment;
11167 if (segment
11168 && avr_current_device->n_flash > 1)
11170 a_hi8 = GEN_INT (segment);
11171 emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
11173 else if (!ADDR_SPACE_GENERIC_P (as))
11175 as = ADDR_SPACE_FLASH;
11178 addr1 = a_src;
11180 loop_mode = (count <= 0x100) ? QImode : HImode;
11181 loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
11184 xas = GEN_INT (as);
11186 /* FIXME: Register allocator might come up with spill fails if it is left
11187 on its own. Thus, we allocate the pointer registers by hand:
11188 Z = source address
11189 X = destination address */
11191 emit_move_insn (lpm_addr_reg_rtx, addr1);
11192 emit_move_insn (gen_rtx_REG (HImode, REG_X), a_dest);
11194 /* FIXME: Register allocator does a bad job and might spill address
11195 register(s) inside the loop leading to additional move instruction
11196 to/from stack which could clobber tmp_reg. Thus, do *not* emit
11197 load and store as separate insns. Instead, we perform the copy
11198 by means of one monolithic insn. */
11200 gcc_assert (TMP_REGNO == LPM_REGNO);
11202 if (as != ADDR_SPACE_MEMX)
11204 /* Load instruction ([E]LPM or LD) is known at compile time:
11205 Do the copy-loop inline. */
11207 rtx (*fun) (rtx, rtx, rtx)
11208 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
11210 insn = fun (xas, loop_reg, loop_reg);
11212 else
11214 rtx (*fun) (rtx, rtx)
11215 = QImode == loop_mode ? gen_movmemx_qi : gen_movmemx_hi;
11217 emit_move_insn (gen_rtx_REG (QImode, 23), a_hi8);
11219 insn = fun (xas, GEN_INT (avr_addr.rampz));
11222 set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
11223 emit_insn (insn);
11225 return true;
11229 /* Print assembler for movmem_qi, movmem_hi insns...
11230 $0 : Address Space
11231 $1, $2 : Loop register
11232 Z : Source address
11233 X : Destination address
11236 const char*
11237 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
11239 addr_space_t as = (addr_space_t) INTVAL (op[0]);
11240 enum machine_mode loop_mode = GET_MODE (op[1]);
11241 bool sbiw_p = test_hard_reg_class (ADDW_REGS, op[1]);
11242 rtx xop[3];
11244 if (plen)
11245 *plen = 0;
11247 xop[0] = op[0];
11248 xop[1] = op[1];
11249 xop[2] = tmp_reg_rtx;
11251 /* Loop label */
11253 avr_asm_len ("0:", xop, plen, 0);
11255 /* Load with post-increment */
11257 switch (as)
11259 default:
11260 gcc_unreachable();
11262 case ADDR_SPACE_GENERIC:
11264 avr_asm_len ("ld %2,Z+", xop, plen, 1);
11265 break;
11267 case ADDR_SPACE_FLASH:
11269 if (AVR_HAVE_LPMX)
11270 avr_asm_len ("lpm %2,Z+", xop, plen, 1);
11271 else
11272 avr_asm_len ("lpm" CR_TAB
11273 "adiw r30,1", xop, plen, 2);
11274 break;
11276 case ADDR_SPACE_FLASH1:
11277 case ADDR_SPACE_FLASH2:
11278 case ADDR_SPACE_FLASH3:
11279 case ADDR_SPACE_FLASH4:
11280 case ADDR_SPACE_FLASH5:
11282 if (AVR_HAVE_ELPMX)
11283 avr_asm_len ("elpm %2,Z+", xop, plen, 1);
11284 else
11285 avr_asm_len ("elpm" CR_TAB
11286 "adiw r30,1", xop, plen, 2);
11287 break;
11290 /* Store with post-increment */
11292 avr_asm_len ("st X+,%2", xop, plen, 1);
11294 /* Decrement loop-counter and set Z-flag */
11296 if (QImode == loop_mode)
11298 avr_asm_len ("dec %1", xop, plen, 1);
11300 else if (sbiw_p)
11302 avr_asm_len ("sbiw %1,1", xop, plen, 1);
11304 else
11306 avr_asm_len ("subi %A1,1" CR_TAB
11307 "sbci %B1,0", xop, plen, 2);
11310 /* Loop until zero */
11312 return avr_asm_len ("brne 0b", xop, plen, 1);
11317 /* Helper for __builtin_avr_delay_cycles */
11319 static rtx
11320 avr_mem_clobber (void)
11322 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
11323 MEM_VOLATILE_P (mem) = 1;
11324 return mem;
11327 static void
11328 avr_expand_delay_cycles (rtx operands0)
11330 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0) & GET_MODE_MASK (SImode);
11331 unsigned HOST_WIDE_INT cycles_used;
11332 unsigned HOST_WIDE_INT loop_count;
11334 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
11336 loop_count = ((cycles - 9) / 6) + 1;
11337 cycles_used = ((loop_count - 1) * 6) + 9;
11338 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode),
11339 avr_mem_clobber()));
11340 cycles -= cycles_used;
11343 if (IN_RANGE (cycles, 262145, 83886081))
11345 loop_count = ((cycles - 7) / 5) + 1;
11346 if (loop_count > 0xFFFFFF)
11347 loop_count = 0xFFFFFF;
11348 cycles_used = ((loop_count - 1) * 5) + 7;
11349 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode),
11350 avr_mem_clobber()));
11351 cycles -= cycles_used;
11354 if (IN_RANGE (cycles, 768, 262144))
11356 loop_count = ((cycles - 5) / 4) + 1;
11357 if (loop_count > 0xFFFF)
11358 loop_count = 0xFFFF;
11359 cycles_used = ((loop_count - 1) * 4) + 5;
11360 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode),
11361 avr_mem_clobber()));
11362 cycles -= cycles_used;
11365 if (IN_RANGE (cycles, 6, 767))
11367 loop_count = cycles / 3;
11368 if (loop_count > 255)
11369 loop_count = 255;
11370 cycles_used = loop_count * 3;
11371 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode),
11372 avr_mem_clobber()));
11373 cycles -= cycles_used;
11376 while (cycles >= 2)
11378 emit_insn (gen_nopv (GEN_INT(2)));
11379 cycles -= 2;
11382 if (cycles == 1)
11384 emit_insn (gen_nopv (GEN_INT(1)));
11385 cycles--;
11390 /* Compute the image of x under f, i.e. perform x --> f(x) */
11392 static int
11393 avr_map (unsigned int f, int x)
11395 return x < 8 ? (f >> (4 * x)) & 0xf : 0;
11399 /* Return some metrics of map A. */
11401 enum
11403 /* Number of fixed points in { 0 ... 7 } */
11404 MAP_FIXED_0_7,
11406 /* Size of preimage of non-fixed points in { 0 ... 7 } */
11407 MAP_NONFIXED_0_7,
11409 /* Mask representing the fixed points in { 0 ... 7 } */
11410 MAP_MASK_FIXED_0_7,
11412 /* Size of the preimage of { 0 ... 7 } */
11413 MAP_PREIMAGE_0_7,
11415 /* Mask that represents the preimage of { f } */
11416 MAP_MASK_PREIMAGE_F
11419 static unsigned
11420 avr_map_metric (unsigned int a, int mode)
11422 unsigned i, metric = 0;
11424 for (i = 0; i < 8; i++)
11426 unsigned ai = avr_map (a, i);
11428 if (mode == MAP_FIXED_0_7)
11429 metric += ai == i;
11430 else if (mode == MAP_NONFIXED_0_7)
11431 metric += ai < 8 && ai != i;
11432 else if (mode == MAP_MASK_FIXED_0_7)
11433 metric |= ((unsigned) (ai == i)) << i;
11434 else if (mode == MAP_PREIMAGE_0_7)
11435 metric += ai < 8;
11436 else if (mode == MAP_MASK_PREIMAGE_F)
11437 metric |= ((unsigned) (ai == 0xf)) << i;
11438 else
11439 gcc_unreachable();
11442 return metric;
11446 /* Return true if IVAL has a 0xf in its hexadecimal representation
11447 and false, otherwise. Only nibbles 0..7 are taken into account.
11448 Used as constraint helper for C0f and Cxf. */
11450 bool
11451 avr_has_nibble_0xf (rtx ival)
11453 unsigned int map = UINTVAL (ival) & GET_MODE_MASK (SImode);
11454 return 0 != avr_map_metric (map, MAP_MASK_PREIMAGE_F);
11458 /* We have a set of bits that are mapped by a function F.
11459 Try to decompose F by means of a second function G so that
11461 F = F o G^-1 o G
11465 cost (F o G^-1) + cost (G) < cost (F)
11467 Example: Suppose builtin insert_bits supplies us with the map
11468 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
11469 nibble of the result, we can just as well rotate the bits before inserting
11470 them and use the map 0x7654ffff which is cheaper than the original map.
11471 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
11473 typedef struct
11475 /* tree code of binary function G */
11476 enum tree_code code;
11478 /* The constant second argument of G */
11479 int arg;
11481 /* G^-1, the inverse of G (*, arg) */
11482 unsigned ginv;
11484 /* The cost of appplying G (*, arg) */
11485 int cost;
11487 /* The composition F o G^-1 (*, arg) for some function F */
11488 unsigned int map;
11490 /* For debug purpose only */
11491 const char *str;
11492 } avr_map_op_t;
11494 static const avr_map_op_t avr_map_op[] =
11496 { LROTATE_EXPR, 0, 0x76543210, 0, 0, "id" },
11497 { LROTATE_EXPR, 1, 0x07654321, 2, 0, "<<<" },
11498 { LROTATE_EXPR, 2, 0x10765432, 4, 0, "<<<" },
11499 { LROTATE_EXPR, 3, 0x21076543, 4, 0, "<<<" },
11500 { LROTATE_EXPR, 4, 0x32107654, 1, 0, "<<<" },
11501 { LROTATE_EXPR, 5, 0x43210765, 3, 0, "<<<" },
11502 { LROTATE_EXPR, 6, 0x54321076, 5, 0, "<<<" },
11503 { LROTATE_EXPR, 7, 0x65432107, 3, 0, "<<<" },
11504 { RSHIFT_EXPR, 1, 0x6543210c, 1, 0, ">>" },
11505 { RSHIFT_EXPR, 1, 0x7543210c, 1, 0, ">>" },
11506 { RSHIFT_EXPR, 2, 0x543210cc, 2, 0, ">>" },
11507 { RSHIFT_EXPR, 2, 0x643210cc, 2, 0, ">>" },
11508 { RSHIFT_EXPR, 2, 0x743210cc, 2, 0, ">>" },
11509 { LSHIFT_EXPR, 1, 0xc7654321, 1, 0, "<<" },
11510 { LSHIFT_EXPR, 2, 0xcc765432, 2, 0, "<<" }
11514 /* Try to decompose F as F = (F o G^-1) o G as described above.
11515 The result is a struct representing F o G^-1 and G.
11516 If result.cost < 0 then such a decomposition does not exist. */
11518 static avr_map_op_t
11519 avr_map_decompose (unsigned int f, const avr_map_op_t *g, bool val_const_p)
11521 int i;
11522 bool val_used_p = 0 != avr_map_metric (f, MAP_MASK_PREIMAGE_F);
11523 avr_map_op_t f_ginv = *g;
11524 unsigned int ginv = g->ginv;
11526 f_ginv.cost = -1;
11528 /* Step 1: Computing F o G^-1 */
11530 for (i = 7; i >= 0; i--)
11532 int x = avr_map (f, i);
11534 if (x <= 7)
11536 x = avr_map (ginv, x);
11538 /* The bit is no element of the image of G: no avail (cost = -1) */
11540 if (x > 7)
11541 return f_ginv;
11544 f_ginv.map = (f_ginv.map << 4) + x;
11547 /* Step 2: Compute the cost of the operations.
11548 The overall cost of doing an operation prior to the insertion is
11549 the cost of the insertion plus the cost of the operation. */
11551 /* Step 2a: Compute cost of F o G^-1 */
11553 if (0 == avr_map_metric (f_ginv.map, MAP_NONFIXED_0_7))
11555 /* The mapping consists only of fixed points and can be folded
11556 to AND/OR logic in the remainder. Reasonable cost is 3. */
11558 f_ginv.cost = 2 + (val_used_p && !val_const_p);
11560 else
11562 rtx xop[4];
11564 /* Get the cost of the insn by calling the output worker with some
11565 fake values. Mimic effect of reloading xop[3]: Unused operands
11566 are mapped to 0 and used operands are reloaded to xop[0]. */
11568 xop[0] = all_regs_rtx[24];
11569 xop[1] = gen_int_mode (f_ginv.map, SImode);
11570 xop[2] = all_regs_rtx[25];
11571 xop[3] = val_used_p ? xop[0] : const0_rtx;
11573 avr_out_insert_bits (xop, &f_ginv.cost);
11575 f_ginv.cost += val_const_p && val_used_p ? 1 : 0;
11578 /* Step 2b: Add cost of G */
11580 f_ginv.cost += g->cost;
11582 if (avr_log.builtin)
11583 avr_edump (" %s%d=%d", g->str, g->arg, f_ginv.cost);
11585 return f_ginv;
11589 /* Insert bits from XOP[1] into XOP[0] according to MAP.
11590 XOP[0] and XOP[1] don't overlap.
11591 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
11592 If FIXP_P = false: Just move the bit if its position in the destination
11593 is different to its source position. */
11595 static void
11596 avr_move_bits (rtx *xop, unsigned int map, bool fixp_p, int *plen)
11598 int bit_dest, b;
11600 /* T-flag contains this bit of the source, i.e. of XOP[1] */
11601 int t_bit_src = -1;
11603 /* We order the operations according to the requested source bit b. */
11605 for (b = 0; b < 8; b++)
11606 for (bit_dest = 0; bit_dest < 8; bit_dest++)
11608 int bit_src = avr_map (map, bit_dest);
11610 if (b != bit_src
11611 || bit_src >= 8
11612 /* Same position: No need to copy as requested by FIXP_P. */
11613 || (bit_dest == bit_src && !fixp_p))
11614 continue;
11616 if (t_bit_src != bit_src)
11618 /* Source bit is not yet in T: Store it to T. */
11620 t_bit_src = bit_src;
11622 xop[3] = GEN_INT (bit_src);
11623 avr_asm_len ("bst %T1%T3", xop, plen, 1);
11626 /* Load destination bit with T. */
11628 xop[3] = GEN_INT (bit_dest);
11629 avr_asm_len ("bld %T0%T3", xop, plen, 1);
11634 /* PLEN == 0: Print assembler code for `insert_bits'.
11635 PLEN != 0: Compute code length in bytes.
11637 OP[0]: Result
11638 OP[1]: The mapping composed of nibbles. If nibble no. N is
11639 0: Bit N of result is copied from bit OP[2].0
11640 ... ...
11641 7: Bit N of result is copied from bit OP[2].7
11642 0xf: Bit N of result is copied from bit OP[3].N
11643 OP[2]: Bits to be inserted
11644 OP[3]: Target value */
11646 const char*
11647 avr_out_insert_bits (rtx *op, int *plen)
11649 unsigned int map = UINTVAL (op[1]) & GET_MODE_MASK (SImode);
11650 unsigned mask_fixed;
11651 bool fixp_p = true;
11652 rtx xop[4];
11654 xop[0] = op[0];
11655 xop[1] = op[2];
11656 xop[2] = op[3];
11658 gcc_assert (REG_P (xop[2]) || CONST_INT_P (xop[2]));
11660 if (plen)
11661 *plen = 0;
11662 else if (flag_print_asm_name)
11663 fprintf (asm_out_file, ASM_COMMENT_START "map = 0x%08x\n", map);
11665 /* If MAP has fixed points it might be better to initialize the result
11666 with the bits to be inserted instead of moving all bits by hand. */
11668 mask_fixed = avr_map_metric (map, MAP_MASK_FIXED_0_7);
11670 if (REGNO (xop[0]) == REGNO (xop[1]))
11672 /* Avoid early-clobber conflicts */
11674 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
11675 xop[1] = tmp_reg_rtx;
11676 fixp_p = false;
11679 if (avr_map_metric (map, MAP_MASK_PREIMAGE_F))
11681 /* XOP[2] is used and reloaded to XOP[0] already */
11683 int n_fix = 0, n_nofix = 0;
11685 gcc_assert (REG_P (xop[2]));
11687 /* Get the code size of the bit insertions; once with all bits
11688 moved and once with fixed points omitted. */
11690 avr_move_bits (xop, map, true, &n_fix);
11691 avr_move_bits (xop, map, false, &n_nofix);
11693 if (fixp_p && n_fix - n_nofix > 3)
11695 xop[3] = gen_int_mode (~mask_fixed, QImode);
11697 avr_asm_len ("eor %0,%1" CR_TAB
11698 "andi %0,%3" CR_TAB
11699 "eor %0,%1", xop, plen, 3);
11700 fixp_p = false;
11703 else
11705 /* XOP[2] is unused */
11707 if (fixp_p && mask_fixed)
11709 avr_asm_len ("mov %0,%1", xop, plen, 1);
11710 fixp_p = false;
11714 /* Move/insert remaining bits. */
11716 avr_move_bits (xop, map, fixp_p, plen);
11718 return "";
11722 /* IDs for all the AVR builtins. */
11724 enum avr_builtin_id
11726 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
11727 AVR_BUILTIN_ ## NAME,
11728 #include "builtins.def"
11729 #undef DEF_BUILTIN
11731 AVR_BUILTIN_COUNT
11734 struct GTY(()) avr_builtin_description
11736 enum insn_code icode;
11737 int n_args;
11738 tree fndecl;
11742 /* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
11743 that a built-in's ID can be used to access the built-in by means of
11744 avr_bdesc[ID] */
11746 static GTY(()) struct avr_builtin_description
11747 avr_bdesc[AVR_BUILTIN_COUNT] =
11749 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, LIBNAME) \
11750 { (enum insn_code) CODE_FOR_ ## ICODE, N_ARGS, NULL_TREE },
11751 #include "builtins.def"
11752 #undef DEF_BUILTIN
11756 /* Implement `TARGET_BUILTIN_DECL'. */
11758 static tree
11759 avr_builtin_decl (unsigned id, bool initialize_p ATTRIBUTE_UNUSED)
11761 if (id < AVR_BUILTIN_COUNT)
11762 return avr_bdesc[id].fndecl;
11764 return error_mark_node;
11768 static void
11769 avr_init_builtin_int24 (void)
11771 tree int24_type = make_signed_type (GET_MODE_BITSIZE (PSImode));
11772 tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
11774 lang_hooks.types.register_builtin_type (int24_type, "__int24");
11775 lang_hooks.types.register_builtin_type (uint24_type, "__uint24");
11779 /* Implement `TARGET_INIT_BUILTINS' */
11780 /* Set up all builtin functions for this target. */
11782 static void
11783 avr_init_builtins (void)
11785 tree void_ftype_void
11786 = build_function_type_list (void_type_node, NULL_TREE);
11787 tree uchar_ftype_uchar
11788 = build_function_type_list (unsigned_char_type_node,
11789 unsigned_char_type_node,
11790 NULL_TREE);
11791 tree uint_ftype_uchar_uchar
11792 = build_function_type_list (unsigned_type_node,
11793 unsigned_char_type_node,
11794 unsigned_char_type_node,
11795 NULL_TREE);
11796 tree int_ftype_char_char
11797 = build_function_type_list (integer_type_node,
11798 char_type_node,
11799 char_type_node,
11800 NULL_TREE);
11801 tree int_ftype_char_uchar
11802 = build_function_type_list (integer_type_node,
11803 char_type_node,
11804 unsigned_char_type_node,
11805 NULL_TREE);
11806 tree void_ftype_ulong
11807 = build_function_type_list (void_type_node,
11808 long_unsigned_type_node,
11809 NULL_TREE);
11811 tree uchar_ftype_ulong_uchar_uchar
11812 = build_function_type_list (unsigned_char_type_node,
11813 long_unsigned_type_node,
11814 unsigned_char_type_node,
11815 unsigned_char_type_node,
11816 NULL_TREE);
11818 tree const_memx_void_node
11819 = build_qualified_type (void_type_node,
11820 TYPE_QUAL_CONST
11821 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX));
11823 tree const_memx_ptr_type_node
11824 = build_pointer_type_for_mode (const_memx_void_node, PSImode, false);
11826 tree char_ftype_const_memx_ptr
11827 = build_function_type_list (char_type_node,
11828 const_memx_ptr_type_node,
11829 NULL);
11831 #define ITYP(T) \
11832 lang_hooks.types.type_for_size (TYPE_PRECISION (T), TYPE_UNSIGNED (T))
11834 #define FX_FTYPE_FX(fx) \
11835 tree fx##r_ftype_##fx##r \
11836 = build_function_type_list (node_##fx##r, node_##fx##r, NULL); \
11837 tree fx##k_ftype_##fx##k \
11838 = build_function_type_list (node_##fx##k, node_##fx##k, NULL)
11840 #define FX_FTYPE_FX_INT(fx) \
11841 tree fx##r_ftype_##fx##r_int \
11842 = build_function_type_list (node_##fx##r, node_##fx##r, \
11843 integer_type_node, NULL); \
11844 tree fx##k_ftype_##fx##k_int \
11845 = build_function_type_list (node_##fx##k, node_##fx##k, \
11846 integer_type_node, NULL)
11848 #define INT_FTYPE_FX(fx) \
11849 tree int_ftype_##fx##r \
11850 = build_function_type_list (integer_type_node, node_##fx##r, NULL); \
11851 tree int_ftype_##fx##k \
11852 = build_function_type_list (integer_type_node, node_##fx##k, NULL)
11854 #define INTX_FTYPE_FX(fx) \
11855 tree int##fx##r_ftype_##fx##r \
11856 = build_function_type_list (ITYP (node_##fx##r), node_##fx##r, NULL); \
11857 tree int##fx##k_ftype_##fx##k \
11858 = build_function_type_list (ITYP (node_##fx##k), node_##fx##k, NULL)
11860 #define FX_FTYPE_INTX(fx) \
11861 tree fx##r_ftype_int##fx##r \
11862 = build_function_type_list (node_##fx##r, ITYP (node_##fx##r), NULL); \
11863 tree fx##k_ftype_int##fx##k \
11864 = build_function_type_list (node_##fx##k, ITYP (node_##fx##k), NULL)
11866 tree node_hr = short_fract_type_node;
11867 tree node_nr = fract_type_node;
11868 tree node_lr = long_fract_type_node;
11869 tree node_llr = long_long_fract_type_node;
11871 tree node_uhr = unsigned_short_fract_type_node;
11872 tree node_unr = unsigned_fract_type_node;
11873 tree node_ulr = unsigned_long_fract_type_node;
11874 tree node_ullr = unsigned_long_long_fract_type_node;
11876 tree node_hk = short_accum_type_node;
11877 tree node_nk = accum_type_node;
11878 tree node_lk = long_accum_type_node;
11879 tree node_llk = long_long_accum_type_node;
11881 tree node_uhk = unsigned_short_accum_type_node;
11882 tree node_unk = unsigned_accum_type_node;
11883 tree node_ulk = unsigned_long_accum_type_node;
11884 tree node_ullk = unsigned_long_long_accum_type_node;
11887 /* For absfx builtins. */
11889 FX_FTYPE_FX (h);
11890 FX_FTYPE_FX (n);
11891 FX_FTYPE_FX (l);
11892 FX_FTYPE_FX (ll);
11894 /* For roundfx builtins. */
11896 FX_FTYPE_FX_INT (h);
11897 FX_FTYPE_FX_INT (n);
11898 FX_FTYPE_FX_INT (l);
11899 FX_FTYPE_FX_INT (ll);
11901 FX_FTYPE_FX_INT (uh);
11902 FX_FTYPE_FX_INT (un);
11903 FX_FTYPE_FX_INT (ul);
11904 FX_FTYPE_FX_INT (ull);
11906 /* For countlsfx builtins. */
11908 INT_FTYPE_FX (h);
11909 INT_FTYPE_FX (n);
11910 INT_FTYPE_FX (l);
11911 INT_FTYPE_FX (ll);
11913 INT_FTYPE_FX (uh);
11914 INT_FTYPE_FX (un);
11915 INT_FTYPE_FX (ul);
11916 INT_FTYPE_FX (ull);
11918 /* For bitsfx builtins. */
11920 INTX_FTYPE_FX (h);
11921 INTX_FTYPE_FX (n);
11922 INTX_FTYPE_FX (l);
11923 INTX_FTYPE_FX (ll);
11925 INTX_FTYPE_FX (uh);
11926 INTX_FTYPE_FX (un);
11927 INTX_FTYPE_FX (ul);
11928 INTX_FTYPE_FX (ull);
11930 /* For fxbits builtins. */
11932 FX_FTYPE_INTX (h);
11933 FX_FTYPE_INTX (n);
11934 FX_FTYPE_INTX (l);
11935 FX_FTYPE_INTX (ll);
11937 FX_FTYPE_INTX (uh);
11938 FX_FTYPE_INTX (un);
11939 FX_FTYPE_INTX (ul);
11940 FX_FTYPE_INTX (ull);
11943 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
11945 int id = AVR_BUILTIN_ ## NAME; \
11946 const char *Name = "__builtin_avr_" #NAME; \
11947 char *name = (char*) alloca (1 + strlen (Name)); \
11949 gcc_assert (id < AVR_BUILTIN_COUNT); \
11950 avr_bdesc[id].fndecl \
11951 = add_builtin_function (avr_tolower (name, Name), TYPE, id, \
11952 BUILT_IN_MD, LIBNAME, NULL_TREE); \
11954 #include "builtins.def"
11955 #undef DEF_BUILTIN
11957 avr_init_builtin_int24 ();
11961 /* Subroutine of avr_expand_builtin to expand vanilla builtins
11962 with non-void result and 1 ... 3 arguments. */
11964 static rtx
11965 avr_default_expand_builtin (enum insn_code icode, tree exp, rtx target)
11967 rtx pat, xop[3];
11968 int n, n_args = call_expr_nargs (exp);
11969 enum machine_mode tmode = insn_data[icode].operand[0].mode;
11971 gcc_assert (n_args >= 1 && n_args <= 3);
11973 if (target == NULL_RTX
11974 || GET_MODE (target) != tmode
11975 || !insn_data[icode].operand[0].predicate (target, tmode))
11977 target = gen_reg_rtx (tmode);
11980 for (n = 0; n < n_args; n++)
11982 tree arg = CALL_EXPR_ARG (exp, n);
11983 rtx op = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
11984 enum machine_mode opmode = GET_MODE (op);
11985 enum machine_mode mode = insn_data[icode].operand[n+1].mode;
11987 if ((opmode == SImode || opmode == VOIDmode) && mode == HImode)
11989 opmode = HImode;
11990 op = gen_lowpart (HImode, op);
11993 /* In case the insn wants input operands in modes different from
11994 the result, abort. */
11996 gcc_assert (opmode == mode || opmode == VOIDmode);
11998 if (!insn_data[icode].operand[n+1].predicate (op, mode))
11999 op = copy_to_mode_reg (mode, op);
12001 xop[n] = op;
12004 switch (n_args)
12006 case 1: pat = GEN_FCN (icode) (target, xop[0]); break;
12007 case 2: pat = GEN_FCN (icode) (target, xop[0], xop[1]); break;
12008 case 3: pat = GEN_FCN (icode) (target, xop[0], xop[1], xop[2]); break;
12010 default:
12011 gcc_unreachable();
12014 if (pat == NULL_RTX)
12015 return NULL_RTX;
12017 emit_insn (pat);
12019 return target;
12023 /* Implement `TARGET_EXPAND_BUILTIN'. */
12024 /* Expand an expression EXP that calls a built-in function,
12025 with result going to TARGET if that's convenient
12026 (and in mode MODE if that's convenient).
12027 SUBTARGET may be used as the target for computing one of EXP's operands.
12028 IGNORE is nonzero if the value is to be ignored. */
12030 static rtx
12031 avr_expand_builtin (tree exp, rtx target,
12032 rtx subtarget ATTRIBUTE_UNUSED,
12033 enum machine_mode mode ATTRIBUTE_UNUSED,
12034 int ignore)
12036 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
12037 const char *bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
12038 unsigned int id = DECL_FUNCTION_CODE (fndecl);
12039 const struct avr_builtin_description *d = &avr_bdesc[id];
12040 tree arg0;
12041 rtx op0;
12043 gcc_assert (id < AVR_BUILTIN_COUNT);
12045 switch (id)
12047 case AVR_BUILTIN_NOP:
12048 emit_insn (gen_nopv (GEN_INT(1)));
12049 return 0;
12051 case AVR_BUILTIN_DELAY_CYCLES:
12053 arg0 = CALL_EXPR_ARG (exp, 0);
12054 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
12056 if (!CONST_INT_P (op0))
12057 error ("%s expects a compile time integer constant", bname);
12058 else
12059 avr_expand_delay_cycles (op0);
12061 return NULL_RTX;
12064 case AVR_BUILTIN_INSERT_BITS:
12066 arg0 = CALL_EXPR_ARG (exp, 0);
12067 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
12069 if (!CONST_INT_P (op0))
12071 error ("%s expects a compile time long integer constant"
12072 " as first argument", bname);
12073 return target;
12076 break;
12079 case AVR_BUILTIN_ROUNDHR: case AVR_BUILTIN_ROUNDUHR:
12080 case AVR_BUILTIN_ROUNDR: case AVR_BUILTIN_ROUNDUR:
12081 case AVR_BUILTIN_ROUNDLR: case AVR_BUILTIN_ROUNDULR:
12082 case AVR_BUILTIN_ROUNDLLR: case AVR_BUILTIN_ROUNDULLR:
12084 case AVR_BUILTIN_ROUNDHK: case AVR_BUILTIN_ROUNDUHK:
12085 case AVR_BUILTIN_ROUNDK: case AVR_BUILTIN_ROUNDUK:
12086 case AVR_BUILTIN_ROUNDLK: case AVR_BUILTIN_ROUNDULK:
12087 case AVR_BUILTIN_ROUNDLLK: case AVR_BUILTIN_ROUNDULLK:
12089 /* Warn about odd rounding. Rounding points >= FBIT will have
12090 no effect. */
12092 if (TREE_CODE (CALL_EXPR_ARG (exp, 1)) != INTEGER_CST)
12093 break;
12095 int rbit = (int) TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1));
12097 if (rbit >= (int) GET_MODE_FBIT (mode))
12099 warning (OPT_Wextra, "rounding to %d bits has no effect for "
12100 "fixed-point value with %d fractional bits",
12101 rbit, GET_MODE_FBIT (mode));
12103 return expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX, mode,
12104 EXPAND_NORMAL);
12106 else if (rbit <= - (int) GET_MODE_IBIT (mode))
12108 warning (0, "rounding result will always be 0");
12109 return CONST0_RTX (mode);
12112 /* The rounding points RP satisfies now: -IBIT < RP < FBIT.
12114 TR 18037 only specifies results for RP > 0. However, the
12115 remaining cases of -IBIT < RP <= 0 can easily be supported
12116 without any additional overhead. */
12118 break; /* round */
12121 /* No fold found and no insn: Call support function from libgcc. */
12123 if (d->icode == CODE_FOR_nothing
12124 && DECL_ASSEMBLER_NAME (get_callee_fndecl (exp)) != NULL_TREE)
12126 return expand_call (exp, target, ignore);
12129 /* No special treatment needed: vanilla expand. */
12131 gcc_assert (d->icode != CODE_FOR_nothing);
12132 gcc_assert (d->n_args == call_expr_nargs (exp));
12134 if (d->n_args == 0)
12136 emit_insn ((GEN_FCN (d->icode)) (target));
12137 return NULL_RTX;
12140 return avr_default_expand_builtin (d->icode, exp, target);
12144 /* Helper for `avr_fold_builtin' that folds absfx (FIXED_CST). */
12146 static tree
12147 avr_fold_absfx (tree tval)
12149 if (FIXED_CST != TREE_CODE (tval))
12150 return NULL_TREE;
12152 /* Our fixed-points have no padding: Use double_int payload directly. */
12154 FIXED_VALUE_TYPE fval = TREE_FIXED_CST (tval);
12155 unsigned int bits = GET_MODE_BITSIZE (fval.mode);
12156 double_int ival = fval.data.sext (bits);
12158 if (!ival.is_negative())
12159 return tval;
12161 /* ISO/IEC TR 18037, 7.18a.6.2: The absfx functions are saturating. */
12163 fval.data = (ival == double_int::min_value (bits, false).sext (bits))
12164 ? double_int::max_value (bits, false)
12165 : -ival;
12167 return build_fixed (TREE_TYPE (tval), fval);
12171 /* Implement `TARGET_FOLD_BUILTIN'. */
12173 static tree
12174 avr_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *arg,
12175 bool ignore ATTRIBUTE_UNUSED)
12177 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
12178 tree val_type = TREE_TYPE (TREE_TYPE (fndecl));
12180 if (!optimize)
12181 return NULL_TREE;
12183 switch (fcode)
12185 default:
12186 break;
12188 case AVR_BUILTIN_SWAP:
12190 return fold_build2 (LROTATE_EXPR, val_type, arg[0],
12191 build_int_cst (val_type, 4));
12194 case AVR_BUILTIN_ABSHR:
12195 case AVR_BUILTIN_ABSR:
12196 case AVR_BUILTIN_ABSLR:
12197 case AVR_BUILTIN_ABSLLR:
12199 case AVR_BUILTIN_ABSHK:
12200 case AVR_BUILTIN_ABSK:
12201 case AVR_BUILTIN_ABSLK:
12202 case AVR_BUILTIN_ABSLLK:
12203 /* GCC is not good with folding ABS for fixed-point. Do it by hand. */
12205 return avr_fold_absfx (arg[0]);
12207 case AVR_BUILTIN_BITSHR: case AVR_BUILTIN_HRBITS:
12208 case AVR_BUILTIN_BITSHK: case AVR_BUILTIN_HKBITS:
12209 case AVR_BUILTIN_BITSUHR: case AVR_BUILTIN_UHRBITS:
12210 case AVR_BUILTIN_BITSUHK: case AVR_BUILTIN_UHKBITS:
12212 case AVR_BUILTIN_BITSR: case AVR_BUILTIN_RBITS:
12213 case AVR_BUILTIN_BITSK: case AVR_BUILTIN_KBITS:
12214 case AVR_BUILTIN_BITSUR: case AVR_BUILTIN_URBITS:
12215 case AVR_BUILTIN_BITSUK: case AVR_BUILTIN_UKBITS:
12217 case AVR_BUILTIN_BITSLR: case AVR_BUILTIN_LRBITS:
12218 case AVR_BUILTIN_BITSLK: case AVR_BUILTIN_LKBITS:
12219 case AVR_BUILTIN_BITSULR: case AVR_BUILTIN_ULRBITS:
12220 case AVR_BUILTIN_BITSULK: case AVR_BUILTIN_ULKBITS:
12222 case AVR_BUILTIN_BITSLLR: case AVR_BUILTIN_LLRBITS:
12223 case AVR_BUILTIN_BITSLLK: case AVR_BUILTIN_LLKBITS:
12224 case AVR_BUILTIN_BITSULLR: case AVR_BUILTIN_ULLRBITS:
12225 case AVR_BUILTIN_BITSULLK: case AVR_BUILTIN_ULLKBITS:
12227 gcc_assert (TYPE_PRECISION (val_type)
12228 == TYPE_PRECISION (TREE_TYPE (arg[0])));
12230 return build1 (VIEW_CONVERT_EXPR, val_type, arg[0]);
12232 case AVR_BUILTIN_INSERT_BITS:
12234 tree tbits = arg[1];
12235 tree tval = arg[2];
12236 tree tmap;
12237 tree map_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
12238 unsigned int map;
12239 bool changed = false;
12240 unsigned i;
12241 avr_map_op_t best_g;
12243 if (TREE_CODE (arg[0]) != INTEGER_CST)
12245 /* No constant as first argument: Don't fold this and run into
12246 error in avr_expand_builtin. */
12248 break;
12251 tmap = wide_int_to_tree (map_type, arg[0]);
12252 map = TREE_INT_CST_LOW (tmap);
12254 if (TREE_CODE (tval) != INTEGER_CST
12255 && 0 == avr_map_metric (map, MAP_MASK_PREIMAGE_F))
12257 /* There are no F in the map, i.e. 3rd operand is unused.
12258 Replace that argument with some constant to render
12259 respective input unused. */
12261 tval = build_int_cst (val_type, 0);
12262 changed = true;
12265 if (TREE_CODE (tbits) != INTEGER_CST
12266 && 0 == avr_map_metric (map, MAP_PREIMAGE_0_7))
12268 /* Similar for the bits to be inserted. If they are unused,
12269 we can just as well pass 0. */
12271 tbits = build_int_cst (val_type, 0);
12274 if (TREE_CODE (tbits) == INTEGER_CST)
12276 /* Inserting bits known at compile time is easy and can be
12277 performed by AND and OR with appropriate masks. */
12279 int bits = TREE_INT_CST_LOW (tbits);
12280 int mask_ior = 0, mask_and = 0xff;
12282 for (i = 0; i < 8; i++)
12284 int mi = avr_map (map, i);
12286 if (mi < 8)
12288 if (bits & (1 << mi)) mask_ior |= (1 << i);
12289 else mask_and &= ~(1 << i);
12293 tval = fold_build2 (BIT_IOR_EXPR, val_type, tval,
12294 build_int_cst (val_type, mask_ior));
12295 return fold_build2 (BIT_AND_EXPR, val_type, tval,
12296 build_int_cst (val_type, mask_and));
12299 if (changed)
12300 return build_call_expr (fndecl, 3, tmap, tbits, tval);
12302 /* If bits don't change their position we can use vanilla logic
12303 to merge the two arguments. */
12305 if (0 == avr_map_metric (map, MAP_NONFIXED_0_7))
12307 int mask_f = avr_map_metric (map, MAP_MASK_PREIMAGE_F);
12308 tree tres, tmask = build_int_cst (val_type, mask_f ^ 0xff);
12310 tres = fold_build2 (BIT_XOR_EXPR, val_type, tbits, tval);
12311 tres = fold_build2 (BIT_AND_EXPR, val_type, tres, tmask);
12312 return fold_build2 (BIT_XOR_EXPR, val_type, tres, tval);
12315 /* Try to decomposing map to reduce overall cost. */
12317 if (avr_log.builtin)
12318 avr_edump ("\n%?: %x\n%?: ROL cost: ", map);
12320 best_g = avr_map_op[0];
12321 best_g.cost = 1000;
12323 for (i = 0; i < sizeof (avr_map_op) / sizeof (*avr_map_op); i++)
12325 avr_map_op_t g
12326 = avr_map_decompose (map, avr_map_op + i,
12327 TREE_CODE (tval) == INTEGER_CST);
12329 if (g.cost >= 0 && g.cost < best_g.cost)
12330 best_g = g;
12333 if (avr_log.builtin)
12334 avr_edump ("\n");
12336 if (best_g.arg == 0)
12337 /* No optimization found */
12338 break;
12340 /* Apply operation G to the 2nd argument. */
12342 if (avr_log.builtin)
12343 avr_edump ("%?: using OP(%s%d, %x) cost %d\n",
12344 best_g.str, best_g.arg, best_g.map, best_g.cost);
12346 /* Do right-shifts arithmetically: They copy the MSB instead of
12347 shifting in a non-usable value (0) as with logic right-shift. */
12349 tbits = fold_convert (signed_char_type_node, tbits);
12350 tbits = fold_build2 (best_g.code, signed_char_type_node, tbits,
12351 build_int_cst (val_type, best_g.arg));
12352 tbits = fold_convert (val_type, tbits);
12354 /* Use map o G^-1 instead of original map to undo the effect of G. */
12356 tmap = wide_int_to_tree (map_type, best_g.map);
12358 return build_call_expr (fndecl, 3, tmap, tbits, tval);
12359 } /* AVR_BUILTIN_INSERT_BITS */
12362 return NULL_TREE;
12367 /* Initialize the GCC target structure. */
12369 #undef TARGET_ASM_ALIGNED_HI_OP
12370 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
12371 #undef TARGET_ASM_ALIGNED_SI_OP
12372 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
12373 #undef TARGET_ASM_UNALIGNED_HI_OP
12374 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
12375 #undef TARGET_ASM_UNALIGNED_SI_OP
12376 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
12377 #undef TARGET_ASM_INTEGER
12378 #define TARGET_ASM_INTEGER avr_assemble_integer
12379 #undef TARGET_ASM_FILE_START
12380 #define TARGET_ASM_FILE_START avr_file_start
12381 #undef TARGET_ASM_FILE_END
12382 #define TARGET_ASM_FILE_END avr_file_end
12384 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
12385 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
12386 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
12387 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
12389 #undef TARGET_FUNCTION_VALUE
12390 #define TARGET_FUNCTION_VALUE avr_function_value
12391 #undef TARGET_LIBCALL_VALUE
12392 #define TARGET_LIBCALL_VALUE avr_libcall_value
12393 #undef TARGET_FUNCTION_VALUE_REGNO_P
12394 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
12396 #undef TARGET_ATTRIBUTE_TABLE
12397 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
12398 #undef TARGET_INSERT_ATTRIBUTES
12399 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
12400 #undef TARGET_SECTION_TYPE_FLAGS
12401 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
12403 #undef TARGET_ASM_NAMED_SECTION
12404 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
12405 #undef TARGET_ASM_INIT_SECTIONS
12406 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
12407 #undef TARGET_ENCODE_SECTION_INFO
12408 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
12409 #undef TARGET_ASM_SELECT_SECTION
12410 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
12412 #undef TARGET_REGISTER_MOVE_COST
12413 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
12414 #undef TARGET_MEMORY_MOVE_COST
12415 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
12416 #undef TARGET_RTX_COSTS
12417 #define TARGET_RTX_COSTS avr_rtx_costs
12418 #undef TARGET_ADDRESS_COST
12419 #define TARGET_ADDRESS_COST avr_address_cost
12420 #undef TARGET_MACHINE_DEPENDENT_REORG
12421 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
12422 #undef TARGET_FUNCTION_ARG
12423 #define TARGET_FUNCTION_ARG avr_function_arg
12424 #undef TARGET_FUNCTION_ARG_ADVANCE
12425 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
12427 #undef TARGET_SET_CURRENT_FUNCTION
12428 #define TARGET_SET_CURRENT_FUNCTION avr_set_current_function
12430 #undef TARGET_RETURN_IN_MEMORY
12431 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
12433 #undef TARGET_STRICT_ARGUMENT_NAMING
12434 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
12436 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
12437 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
12439 #undef TARGET_HARD_REGNO_SCRATCH_OK
12440 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
12441 #undef TARGET_CASE_VALUES_THRESHOLD
12442 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
12444 #undef TARGET_FRAME_POINTER_REQUIRED
12445 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
12446 #undef TARGET_CAN_ELIMINATE
12447 #define TARGET_CAN_ELIMINATE avr_can_eliminate
12449 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
12450 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS avr_allocate_stack_slots_for_args
12452 #undef TARGET_WARN_FUNC_RETURN
12453 #define TARGET_WARN_FUNC_RETURN avr_warn_func_return
12455 #undef TARGET_CLASS_LIKELY_SPILLED_P
12456 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
12458 #undef TARGET_OPTION_OVERRIDE
12459 #define TARGET_OPTION_OVERRIDE avr_option_override
12461 #undef TARGET_CANNOT_MODIFY_JUMPS_P
12462 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
12464 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
12465 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
12467 #undef TARGET_INIT_BUILTINS
12468 #define TARGET_INIT_BUILTINS avr_init_builtins
12470 #undef TARGET_BUILTIN_DECL
12471 #define TARGET_BUILTIN_DECL avr_builtin_decl
12473 #undef TARGET_EXPAND_BUILTIN
12474 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
12476 #undef TARGET_FOLD_BUILTIN
12477 #define TARGET_FOLD_BUILTIN avr_fold_builtin
12479 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
12480 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
12482 #undef TARGET_SCALAR_MODE_SUPPORTED_P
12483 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
12485 #undef TARGET_BUILD_BUILTIN_VA_LIST
12486 #define TARGET_BUILD_BUILTIN_VA_LIST avr_build_builtin_va_list
12488 #undef TARGET_FIXED_POINT_SUPPORTED_P
12489 #define TARGET_FIXED_POINT_SUPPORTED_P hook_bool_void_true
12491 #undef TARGET_CONVERT_TO_TYPE
12492 #define TARGET_CONVERT_TO_TYPE avr_convert_to_type
12494 #undef TARGET_ADDR_SPACE_SUBSET_P
12495 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
12497 #undef TARGET_ADDR_SPACE_CONVERT
12498 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
12500 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
12501 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
12503 #undef TARGET_ADDR_SPACE_POINTER_MODE
12504 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
12506 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
12507 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
12508 avr_addr_space_legitimate_address_p
12510 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
12511 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
12513 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
12514 #define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
12516 #undef TARGET_SECONDARY_RELOAD
12517 #define TARGET_SECONDARY_RELOAD avr_secondary_reload
12519 #undef TARGET_PRINT_OPERAND
12520 #define TARGET_PRINT_OPERAND avr_print_operand
12521 #undef TARGET_PRINT_OPERAND_ADDRESS
12522 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
12523 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
12524 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
12526 struct gcc_target targetm = TARGET_INITIALIZER;
12529 #include "gt-avr.h"