gcc/
[official-gcc.git] / gcc / config / avr / avr.c
blob0f1d2c1647a785efbaa82d6be201ef05751ddba8
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998-2013 Free Software Foundation, Inc.
3 Contributed by Denis Chertykov (chertykov@gmail.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "regs.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-attr.h"
31 #include "insn-codes.h"
32 #include "flags.h"
33 #include "reload.h"
34 #include "tree.h"
35 #include "output.h"
36 #include "expr.h"
37 #include "c-family/c-common.h"
38 #include "diagnostic-core.h"
39 #include "obstack.h"
40 #include "function.h"
41 #include "recog.h"
42 #include "optabs.h"
43 #include "ggc.h"
44 #include "langhooks.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "target-def.h"
48 #include "params.h"
49 #include "df.h"
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
54 /* Return true if STR starts with PREFIX and false, otherwise. */
55 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
57 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
58 address space where data is to be located.
59 As the only non-generic address spaces are all located in flash,
60 this can be used to test if data shall go into some .progmem* section.
61 This must be the rightmost field of machine dependent section flags. */
62 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
64 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
65 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
67 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
68 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
69 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
70 do { \
71 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
72 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
73 } while (0)
75 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
76 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
77 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
78 / SYMBOL_FLAG_MACH_DEP)
80 /* Known address spaces. The order must be the same as in the respective
81 enum from avr.h (or designated initialized must be used). */
82 const avr_addrspace_t avr_addrspace[ADDR_SPACE_COUNT] =
84 { ADDR_SPACE_RAM, 0, 2, "", 0, NULL },
85 { ADDR_SPACE_FLASH, 1, 2, "__flash", 0, ".progmem.data" },
86 { ADDR_SPACE_FLASH1, 1, 2, "__flash1", 1, ".progmem1.data" },
87 { ADDR_SPACE_FLASH2, 1, 2, "__flash2", 2, ".progmem2.data" },
88 { ADDR_SPACE_FLASH3, 1, 2, "__flash3", 3, ".progmem3.data" },
89 { ADDR_SPACE_FLASH4, 1, 2, "__flash4", 4, ".progmem4.data" },
90 { ADDR_SPACE_FLASH5, 1, 2, "__flash5", 5, ".progmem5.data" },
91 { ADDR_SPACE_MEMX, 1, 3, "__memx", 0, ".progmemx.data" },
95 /* Holding RAM addresses of some SFRs used by the compiler and that
96 are unique over all devices in an architecture like 'avr4'. */
98 typedef struct
100 /* SREG: The processor status */
101 int sreg;
103 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
104 int ccp;
105 int rampd;
106 int rampx;
107 int rampy;
109 /* RAMPZ: The high byte of 24-bit address used with ELPM */
110 int rampz;
112 /* SP: The stack pointer and its low and high byte */
113 int sp_l;
114 int sp_h;
115 } avr_addr_t;
117 static avr_addr_t avr_addr;
120 /* Prototypes for local helper functions. */
122 static const char* out_movqi_r_mr (rtx, rtx[], int*);
123 static const char* out_movhi_r_mr (rtx, rtx[], int*);
124 static const char* out_movsi_r_mr (rtx, rtx[], int*);
125 static const char* out_movqi_mr_r (rtx, rtx[], int*);
126 static const char* out_movhi_mr_r (rtx, rtx[], int*);
127 static const char* out_movsi_mr_r (rtx, rtx[], int*);
129 static int get_sequence_length (rtx insns);
130 static int sequent_regs_live (void);
131 static const char *ptrreg_to_str (int);
132 static const char *cond_string (enum rtx_code);
133 static int avr_num_arg_regs (enum machine_mode, const_tree);
134 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
135 int, bool);
136 static void output_reload_in_const (rtx*, rtx, int*, bool);
137 static struct machine_function * avr_init_machine_status (void);
140 /* Prototypes for hook implementors if needed before their implementation. */
142 static bool avr_rtx_costs (rtx, int, int, int, int*, bool);
145 /* Allocate registers from r25 to r8 for parameters for function calls. */
146 #define FIRST_CUM_REG 26
148 /* Implicit target register of LPM instruction (R0) */
149 extern GTY(()) rtx lpm_reg_rtx;
150 rtx lpm_reg_rtx;
152 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
153 extern GTY(()) rtx lpm_addr_reg_rtx;
154 rtx lpm_addr_reg_rtx;
156 /* Temporary register RTX (reg:QI TMP_REGNO) */
157 extern GTY(()) rtx tmp_reg_rtx;
158 rtx tmp_reg_rtx;
160 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
161 extern GTY(()) rtx zero_reg_rtx;
162 rtx zero_reg_rtx;
164 /* RTXs for all general purpose registers as QImode */
165 extern GTY(()) rtx all_regs_rtx[32];
166 rtx all_regs_rtx[32];
168 /* SREG, the processor status */
169 extern GTY(()) rtx sreg_rtx;
170 rtx sreg_rtx;
172 /* RAMP* special function registers */
173 extern GTY(()) rtx rampd_rtx;
174 extern GTY(()) rtx rampx_rtx;
175 extern GTY(()) rtx rampy_rtx;
176 extern GTY(()) rtx rampz_rtx;
177 rtx rampd_rtx;
178 rtx rampx_rtx;
179 rtx rampy_rtx;
180 rtx rampz_rtx;
182 /* RTX containing the strings "" and "e", respectively */
183 static GTY(()) rtx xstring_empty;
184 static GTY(()) rtx xstring_e;
186 /* Current architecture. */
187 const avr_arch_t *avr_current_arch;
189 /* Current device. */
190 const avr_mcu_t *avr_current_device;
192 /* Section to put switch tables in. */
193 static GTY(()) section *progmem_swtable_section;
195 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
196 or to address space __flash* or __memx. Only used as singletons inside
197 avr_asm_select_section, but it must not be local there because of GTY. */
198 static GTY(()) section *progmem_section[ADDR_SPACE_COUNT];
200 /* Condition for insns/expanders from avr-dimode.md. */
201 bool avr_have_dimode = true;
203 /* To track if code will use .bss and/or .data. */
204 bool avr_need_clear_bss_p = false;
205 bool avr_need_copy_data_p = false;
208 /* Transform UP into lowercase and write the result to LO.
209 You must provide enough space for LO. Return LO. */
211 static char*
212 avr_tolower (char *lo, const char *up)
214 char *lo0 = lo;
216 for (; *up; up++, lo++)
217 *lo = TOLOWER (*up);
219 *lo = '\0';
221 return lo0;
225 /* Custom function to count number of set bits. */
227 static inline int
228 avr_popcount (unsigned int val)
230 int pop = 0;
232 while (val)
234 val &= val-1;
235 pop++;
238 return pop;
242 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
243 Return true if the least significant N_BYTES bytes of XVAL all have a
244 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
245 of integers which contains an integer N iff bit N of POP_MASK is set. */
247 bool
248 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
250 int i;
252 enum machine_mode mode = GET_MODE (xval);
254 if (VOIDmode == mode)
255 mode = SImode;
257 for (i = 0; i < n_bytes; i++)
259 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
260 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
262 if (0 == (pop_mask & (1 << avr_popcount (val8))))
263 return false;
266 return true;
270 /* Access some RTX as INT_MODE. If X is a CONST_FIXED we can get
271 the bit representation of X by "casting" it to CONST_INT. */
274 avr_to_int_mode (rtx x)
276 enum machine_mode mode = GET_MODE (x);
278 return VOIDmode == mode
280 : simplify_gen_subreg (int_mode_for_mode (mode), x, mode, 0);
284 /* Implement `TARGET_OPTION_OVERRIDE'. */
286 static void
287 avr_option_override (void)
289 flag_delete_null_pointer_checks = 0;
291 /* caller-save.c looks for call-clobbered hard registers that are assigned
292 to pseudos that cross calls and tries so save-restore them around calls
293 in order to reduce the number of stack slots needed.
295 This might lead to situations where reload is no more able to cope
296 with the challenge of AVR's very few address registers and fails to
297 perform the requested spills. */
299 if (avr_strict_X)
300 flag_caller_saves = 0;
302 /* Unwind tables currently require a frame pointer for correctness,
303 see toplev.c:process_options(). */
305 if ((flag_unwind_tables
306 || flag_non_call_exceptions
307 || flag_asynchronous_unwind_tables)
308 && !ACCUMULATE_OUTGOING_ARGS)
310 flag_omit_frame_pointer = 0;
313 avr_current_device = &avr_mcu_types[avr_mcu_index];
314 avr_current_arch = &avr_arch_types[avr_current_device->arch];
316 /* RAM addresses of some SFRs common to all devices in respective arch. */
318 /* SREG: Status Register containing flags like I (global IRQ) */
319 avr_addr.sreg = 0x3F + avr_current_arch->sfr_offset;
321 /* RAMPZ: Address' high part when loading via ELPM */
322 avr_addr.rampz = 0x3B + avr_current_arch->sfr_offset;
324 avr_addr.rampy = 0x3A + avr_current_arch->sfr_offset;
325 avr_addr.rampx = 0x39 + avr_current_arch->sfr_offset;
326 avr_addr.rampd = 0x38 + avr_current_arch->sfr_offset;
327 avr_addr.ccp = 0x34 + avr_current_arch->sfr_offset;
329 /* SP: Stack Pointer (SP_H:SP_L) */
330 avr_addr.sp_l = 0x3D + avr_current_arch->sfr_offset;
331 avr_addr.sp_h = avr_addr.sp_l + 1;
333 init_machine_status = avr_init_machine_status;
335 avr_log_set_avr_log();
338 /* Function to set up the backend function structure. */
340 static struct machine_function *
341 avr_init_machine_status (void)
343 return ggc_alloc_cleared_machine_function ();
347 /* Implement `INIT_EXPANDERS'. */
348 /* The function works like a singleton. */
350 void
351 avr_init_expanders (void)
353 int regno;
355 for (regno = 0; regno < 32; regno ++)
356 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
358 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
359 tmp_reg_rtx = all_regs_rtx[TMP_REGNO];
360 zero_reg_rtx = all_regs_rtx[ZERO_REGNO];
362 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
364 sreg_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.sreg));
365 rampd_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampd));
366 rampx_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampx));
367 rampy_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampy));
368 rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampz));
370 xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
371 xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
375 /* Implement `REGNO_REG_CLASS'. */
376 /* Return register class for register R. */
378 enum reg_class
379 avr_regno_reg_class (int r)
381 static const enum reg_class reg_class_tab[] =
383 R0_REG,
384 /* r1 - r15 */
385 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
386 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
387 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
388 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
389 /* r16 - r23 */
390 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
391 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
392 /* r24, r25 */
393 ADDW_REGS, ADDW_REGS,
394 /* X: r26, 27 */
395 POINTER_X_REGS, POINTER_X_REGS,
396 /* Y: r28, r29 */
397 POINTER_Y_REGS, POINTER_Y_REGS,
398 /* Z: r30, r31 */
399 POINTER_Z_REGS, POINTER_Z_REGS,
400 /* SP: SPL, SPH */
401 STACK_REG, STACK_REG
404 if (r <= 33)
405 return reg_class_tab[r];
407 return ALL_REGS;
411 /* Implement `TARGET_SCALAR_MODE_SUPPORTED_P'. */
413 static bool
414 avr_scalar_mode_supported_p (enum machine_mode mode)
416 if (ALL_FIXED_POINT_MODE_P (mode))
417 return true;
419 if (PSImode == mode)
420 return true;
422 return default_scalar_mode_supported_p (mode);
426 /* Return TRUE if DECL is a VAR_DECL located in flash and FALSE, otherwise. */
428 static bool
429 avr_decl_flash_p (tree decl)
431 if (TREE_CODE (decl) != VAR_DECL
432 || TREE_TYPE (decl) == error_mark_node)
434 return false;
437 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
441 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit flash
442 address space and FALSE, otherwise. */
444 static bool
445 avr_decl_memx_p (tree decl)
447 if (TREE_CODE (decl) != VAR_DECL
448 || TREE_TYPE (decl) == error_mark_node)
450 return false;
453 return (ADDR_SPACE_MEMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
457 /* Return TRUE if X is a MEM rtx located in flash and FALSE, otherwise. */
459 bool
460 avr_mem_flash_p (rtx x)
462 return (MEM_P (x)
463 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
467 /* Return TRUE if X is a MEM rtx located in the 24-bit flash
468 address space and FALSE, otherwise. */
470 bool
471 avr_mem_memx_p (rtx x)
473 return (MEM_P (x)
474 && ADDR_SPACE_MEMX == MEM_ADDR_SPACE (x));
478 /* A helper for the subsequent function attribute used to dig for
479 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
481 static inline int
482 avr_lookup_function_attribute1 (const_tree func, const char *name)
484 if (FUNCTION_DECL == TREE_CODE (func))
486 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
488 return true;
491 func = TREE_TYPE (func);
494 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
495 || TREE_CODE (func) == METHOD_TYPE);
497 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
500 /* Return nonzero if FUNC is a naked function. */
502 static int
503 avr_naked_function_p (tree func)
505 return avr_lookup_function_attribute1 (func, "naked");
508 /* Return nonzero if FUNC is an interrupt function as specified
509 by the "interrupt" attribute. */
511 static int
512 avr_interrupt_function_p (tree func)
514 return avr_lookup_function_attribute1 (func, "interrupt");
517 /* Return nonzero if FUNC is a signal function as specified
518 by the "signal" attribute. */
520 static int
521 avr_signal_function_p (tree func)
523 return avr_lookup_function_attribute1 (func, "signal");
526 /* Return nonzero if FUNC is an OS_task function. */
528 static int
529 avr_OS_task_function_p (tree func)
531 return avr_lookup_function_attribute1 (func, "OS_task");
534 /* Return nonzero if FUNC is an OS_main function. */
536 static int
537 avr_OS_main_function_p (tree func)
539 return avr_lookup_function_attribute1 (func, "OS_main");
543 /* Implement `TARGET_SET_CURRENT_FUNCTION'. */
544 /* Sanity cheching for above function attributes. */
546 static void
547 avr_set_current_function (tree decl)
549 location_t loc;
550 const char *isr;
552 if (decl == NULL_TREE
553 || current_function_decl == NULL_TREE
554 || current_function_decl == error_mark_node
555 || ! cfun->machine
556 || cfun->machine->attributes_checked_p)
557 return;
559 loc = DECL_SOURCE_LOCATION (decl);
561 cfun->machine->is_naked = avr_naked_function_p (decl);
562 cfun->machine->is_signal = avr_signal_function_p (decl);
563 cfun->machine->is_interrupt = avr_interrupt_function_p (decl);
564 cfun->machine->is_OS_task = avr_OS_task_function_p (decl);
565 cfun->machine->is_OS_main = avr_OS_main_function_p (decl);
567 isr = cfun->machine->is_interrupt ? "interrupt" : "signal";
569 /* Too much attributes make no sense as they request conflicting features. */
571 if (cfun->machine->is_OS_task + cfun->machine->is_OS_main
572 + (cfun->machine->is_signal || cfun->machine->is_interrupt) > 1)
573 error_at (loc, "function attributes %qs, %qs and %qs are mutually"
574 " exclusive", "OS_task", "OS_main", isr);
576 /* 'naked' will hide effects of 'OS_task' and 'OS_main'. */
578 if (cfun->machine->is_naked
579 && (cfun->machine->is_OS_task || cfun->machine->is_OS_main))
580 warning_at (loc, OPT_Wattributes, "function attributes %qs and %qs have"
581 " no effect on %qs function", "OS_task", "OS_main", "naked");
583 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
585 tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
586 tree ret = TREE_TYPE (TREE_TYPE (decl));
587 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
589 /* Silently ignore 'signal' if 'interrupt' is present. AVR-LibC startet
590 using this when it switched from SIGNAL and INTERRUPT to ISR. */
592 if (cfun->machine->is_interrupt)
593 cfun->machine->is_signal = 0;
595 /* Interrupt handlers must be void __vector (void) functions. */
597 if (args && TREE_CODE (TREE_VALUE (args)) != VOID_TYPE)
598 error_at (loc, "%qs function cannot have arguments", isr);
600 if (TREE_CODE (ret) != VOID_TYPE)
601 error_at (loc, "%qs function cannot return a value", isr);
603 /* If the function has the 'signal' or 'interrupt' attribute, ensure
604 that the name of the function is "__vector_NN" so as to catch
605 when the user misspells the vector name. */
607 if (!STR_PREFIX_P (name, "__vector"))
608 warning_at (loc, 0, "%qs appears to be a misspelled %s handler",
609 name, isr);
612 /* Don't print the above diagnostics more than once. */
614 cfun->machine->attributes_checked_p = 1;
618 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
621 avr_accumulate_outgoing_args (void)
623 if (!cfun)
624 return TARGET_ACCUMULATE_OUTGOING_ARGS;
626 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
627 what offset is correct. In some cases it is relative to
628 virtual_outgoing_args_rtx and in others it is relative to
629 virtual_stack_vars_rtx. For example code see
630 gcc.c-torture/execute/built-in-setjmp.c
631 gcc.c-torture/execute/builtins/sprintf-chk.c */
633 return (TARGET_ACCUMULATE_OUTGOING_ARGS
634 && !(cfun->calls_setjmp
635 || cfun->has_nonlocal_label));
639 /* Report contribution of accumulated outgoing arguments to stack size. */
641 static inline int
642 avr_outgoing_args_size (void)
644 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
648 /* Implement `STARTING_FRAME_OFFSET'. */
649 /* This is the offset from the frame pointer register to the first stack slot
650 that contains a variable living in the frame. */
653 avr_starting_frame_offset (void)
655 return 1 + avr_outgoing_args_size ();
659 /* Return the number of hard registers to push/pop in the prologue/epilogue
660 of the current function, and optionally store these registers in SET. */
662 static int
663 avr_regs_to_save (HARD_REG_SET *set)
665 int reg, count;
666 int int_or_sig_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
668 if (set)
669 CLEAR_HARD_REG_SET (*set);
670 count = 0;
672 /* No need to save any registers if the function never returns or
673 has the "OS_task" or "OS_main" attribute. */
675 if (TREE_THIS_VOLATILE (current_function_decl)
676 || cfun->machine->is_OS_task
677 || cfun->machine->is_OS_main)
678 return 0;
680 for (reg = 0; reg < 32; reg++)
682 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
683 any global register variables. */
685 if (fixed_regs[reg])
686 continue;
688 if ((int_or_sig_p && !crtl->is_leaf && call_used_regs[reg])
689 || (df_regs_ever_live_p (reg)
690 && (int_or_sig_p || !call_used_regs[reg])
691 /* Don't record frame pointer registers here. They are treated
692 indivitually in prologue. */
693 && !(frame_pointer_needed
694 && (reg == REG_Y || reg == (REG_Y+1)))))
696 if (set)
697 SET_HARD_REG_BIT (*set, reg);
698 count++;
701 return count;
705 /* Implement `TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS' */
707 static bool
708 avr_allocate_stack_slots_for_args (void)
710 return !cfun->machine->is_naked;
714 /* Return true if register FROM can be eliminated via register TO. */
716 static bool
717 avr_can_eliminate (const int from, const int to)
719 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
720 || (frame_pointer_needed && to == FRAME_POINTER_REGNUM)
721 || ((from == FRAME_POINTER_REGNUM
722 || from == FRAME_POINTER_REGNUM + 1)
723 && !frame_pointer_needed));
727 /* Implement `TARGET_WARN_FUNC_RETURN'. */
729 static bool
730 avr_warn_func_return (tree decl)
732 /* Naked functions are implemented entirely in assembly, including the
733 return sequence, so suppress warnings about this. */
735 return !avr_naked_function_p (decl);
738 /* Compute offset between arg_pointer and frame_pointer. */
741 avr_initial_elimination_offset (int from, int to)
743 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
744 return 0;
745 else
747 int offset = frame_pointer_needed ? 2 : 0;
748 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
750 offset += avr_regs_to_save (NULL);
751 return (get_frame_size () + avr_outgoing_args_size()
752 + avr_pc_size + 1 + offset);
757 /* Helper for the function below. */
759 static void
760 avr_adjust_type_node (tree *node, enum machine_mode mode, int sat_p)
762 *node = make_node (FIXED_POINT_TYPE);
763 TYPE_SATURATING (*node) = sat_p;
764 TYPE_UNSIGNED (*node) = UNSIGNED_FIXED_POINT_MODE_P (mode);
765 TYPE_IBIT (*node) = GET_MODE_IBIT (mode);
766 TYPE_FBIT (*node) = GET_MODE_FBIT (mode);
767 TYPE_PRECISION (*node) = GET_MODE_BITSIZE (mode);
768 TYPE_ALIGN (*node) = 8;
769 SET_TYPE_MODE (*node, mode);
771 layout_type (*node);
775 /* Implement `TARGET_BUILD_BUILTIN_VA_LIST'. */
777 static tree
778 avr_build_builtin_va_list (void)
780 /* avr-modes.def adjusts [U]TA to be 64-bit modes with 48 fractional bits.
781 This is more appropriate for the 8-bit machine AVR than 128-bit modes.
782 The ADJUST_IBIT/FBIT are handled in toplev:init_adjust_machine_modes()
783 which is auto-generated by genmodes, but the compiler assigns [U]DAmode
784 to the long long accum modes instead of the desired [U]TAmode.
786 Fix this now, right after node setup in tree.c:build_common_tree_nodes().
787 This must run before c-cppbuiltin.c:builtin_define_fixed_point_constants()
788 which built-in defines macros like __ULLACCUM_FBIT__ that are used by
789 libgcc to detect IBIT and FBIT. */
791 avr_adjust_type_node (&ta_type_node, TAmode, 0);
792 avr_adjust_type_node (&uta_type_node, UTAmode, 0);
793 avr_adjust_type_node (&sat_ta_type_node, TAmode, 1);
794 avr_adjust_type_node (&sat_uta_type_node, UTAmode, 1);
796 unsigned_long_long_accum_type_node = uta_type_node;
797 long_long_accum_type_node = ta_type_node;
798 sat_unsigned_long_long_accum_type_node = sat_uta_type_node;
799 sat_long_long_accum_type_node = sat_ta_type_node;
801 /* Dispatch to the default handler. */
803 return std_build_builtin_va_list ();
807 /* Implement `TARGET_BUILTIN_SETJMP_FRAME_VALUE'. */
808 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
809 frame pointer by +STARTING_FRAME_OFFSET.
810 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
811 avoids creating add/sub of offset in nonlocal goto and setjmp. */
813 static rtx
814 avr_builtin_setjmp_frame_value (void)
816 rtx xval = gen_reg_rtx (Pmode);
817 emit_insn (gen_subhi3 (xval, virtual_stack_vars_rtx,
818 gen_int_mode (STARTING_FRAME_OFFSET, Pmode)));
819 return xval;
823 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3-byte PC).
824 This is return address of function. */
827 avr_return_addr_rtx (int count, rtx tem)
829 rtx r;
831 /* Can only return this function's return address. Others not supported. */
832 if (count)
833 return NULL;
835 if (AVR_3_BYTE_PC)
837 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
838 warning (0, "%<builtin_return_address%> contains only 2 bytes"
839 " of address");
841 else
842 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
844 r = gen_rtx_PLUS (Pmode, tem, r);
845 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
846 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
847 return r;
850 /* Return 1 if the function epilogue is just a single "ret". */
853 avr_simple_epilogue (void)
855 return (! frame_pointer_needed
856 && get_frame_size () == 0
857 && avr_outgoing_args_size() == 0
858 && avr_regs_to_save (NULL) == 0
859 && ! cfun->machine->is_interrupt
860 && ! cfun->machine->is_signal
861 && ! cfun->machine->is_naked
862 && ! TREE_THIS_VOLATILE (current_function_decl));
865 /* This function checks sequence of live registers. */
867 static int
868 sequent_regs_live (void)
870 int reg;
871 int live_seq = 0;
872 int cur_seq = 0;
874 for (reg = 0; reg < 18; ++reg)
876 if (fixed_regs[reg])
878 /* Don't recognize sequences that contain global register
879 variables. */
881 if (live_seq != 0)
882 return 0;
883 else
884 continue;
887 if (!call_used_regs[reg])
889 if (df_regs_ever_live_p (reg))
891 ++live_seq;
892 ++cur_seq;
894 else
895 cur_seq = 0;
899 if (!frame_pointer_needed)
901 if (df_regs_ever_live_p (REG_Y))
903 ++live_seq;
904 ++cur_seq;
906 else
907 cur_seq = 0;
909 if (df_regs_ever_live_p (REG_Y+1))
911 ++live_seq;
912 ++cur_seq;
914 else
915 cur_seq = 0;
917 else
919 cur_seq += 2;
920 live_seq += 2;
922 return (cur_seq == live_seq) ? live_seq : 0;
925 /* Obtain the length sequence of insns. */
928 get_sequence_length (rtx insns)
930 rtx insn;
931 int length;
933 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
934 length += get_attr_length (insn);
936 return length;
940 /* Implement `INCOMING_RETURN_ADDR_RTX'. */
943 avr_incoming_return_addr_rtx (void)
945 /* The return address is at the top of the stack. Note that the push
946 was via post-decrement, which means the actual address is off by one. */
947 return gen_frame_mem (HImode, plus_constant (Pmode, stack_pointer_rtx, 1));
950 /* Helper for expand_prologue. Emit a push of a byte register. */
952 static void
953 emit_push_byte (unsigned regno, bool frame_related_p)
955 rtx mem, reg, insn;
957 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
958 mem = gen_frame_mem (QImode, mem);
959 reg = gen_rtx_REG (QImode, regno);
961 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
962 if (frame_related_p)
963 RTX_FRAME_RELATED_P (insn) = 1;
965 cfun->machine->stack_usage++;
969 /* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
970 SFR is a MEM representing the memory location of the SFR.
971 If CLR_P then clear the SFR after the push using zero_reg. */
973 static void
974 emit_push_sfr (rtx sfr, bool frame_related_p, bool clr_p)
976 rtx insn;
978 gcc_assert (MEM_P (sfr));
980 /* IN __tmp_reg__, IO(SFR) */
981 insn = emit_move_insn (tmp_reg_rtx, sfr);
982 if (frame_related_p)
983 RTX_FRAME_RELATED_P (insn) = 1;
985 /* PUSH __tmp_reg__ */
986 emit_push_byte (TMP_REGNO, frame_related_p);
988 if (clr_p)
990 /* OUT IO(SFR), __zero_reg__ */
991 insn = emit_move_insn (sfr, const0_rtx);
992 if (frame_related_p)
993 RTX_FRAME_RELATED_P (insn) = 1;
997 static void
998 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
1000 rtx insn;
1001 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1002 int live_seq = sequent_regs_live ();
1004 HOST_WIDE_INT size_max
1005 = (HOST_WIDE_INT) GET_MODE_MASK (AVR_HAVE_8BIT_SP ? QImode : Pmode);
1007 bool minimize = (TARGET_CALL_PROLOGUES
1008 && size < size_max
1009 && live_seq
1010 && !isr_p
1011 && !cfun->machine->is_OS_task
1012 && !cfun->machine->is_OS_main);
1014 if (minimize
1015 && (frame_pointer_needed
1016 || avr_outgoing_args_size() > 8
1017 || (AVR_2_BYTE_PC && live_seq > 6)
1018 || live_seq > 7))
1020 rtx pattern;
1021 int first_reg, reg, offset;
1023 emit_move_insn (gen_rtx_REG (HImode, REG_X),
1024 gen_int_mode (size, HImode));
1026 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
1027 gen_int_mode (live_seq+size, HImode));
1028 insn = emit_insn (pattern);
1029 RTX_FRAME_RELATED_P (insn) = 1;
1031 /* Describe the effect of the unspec_volatile call to prologue_saves.
1032 Note that this formulation assumes that add_reg_note pushes the
1033 notes to the front. Thus we build them in the reverse order of
1034 how we want dwarf2out to process them. */
1036 /* The function does always set frame_pointer_rtx, but whether that
1037 is going to be permanent in the function is frame_pointer_needed. */
1039 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1040 gen_rtx_SET (VOIDmode, (frame_pointer_needed
1041 ? frame_pointer_rtx
1042 : stack_pointer_rtx),
1043 plus_constant (Pmode, stack_pointer_rtx,
1044 -(size + live_seq))));
1046 /* Note that live_seq always contains r28+r29, but the other
1047 registers to be saved are all below 18. */
1049 first_reg = 18 - (live_seq - 2);
1051 for (reg = 29, offset = -live_seq + 1;
1052 reg >= first_reg;
1053 reg = (reg == 28 ? 17 : reg - 1), ++offset)
1055 rtx m, r;
1057 m = gen_rtx_MEM (QImode, plus_constant (Pmode, stack_pointer_rtx,
1058 offset));
1059 r = gen_rtx_REG (QImode, reg);
1060 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
1063 cfun->machine->stack_usage += size + live_seq;
1065 else /* !minimize */
1067 int reg;
1069 for (reg = 0; reg < 32; ++reg)
1070 if (TEST_HARD_REG_BIT (set, reg))
1071 emit_push_byte (reg, true);
1073 if (frame_pointer_needed
1074 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
1076 /* Push frame pointer. Always be consistent about the
1077 ordering of pushes -- epilogue_restores expects the
1078 register pair to be pushed low byte first. */
1080 emit_push_byte (REG_Y, true);
1081 emit_push_byte (REG_Y + 1, true);
1084 if (frame_pointer_needed
1085 && size == 0)
1087 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1088 RTX_FRAME_RELATED_P (insn) = 1;
1091 if (size != 0)
1093 /* Creating a frame can be done by direct manipulation of the
1094 stack or via the frame pointer. These two methods are:
1095 fp = sp
1096 fp -= size
1097 sp = fp
1099 sp -= size
1100 fp = sp (*)
1101 the optimum method depends on function type, stack and
1102 frame size. To avoid a complex logic, both methods are
1103 tested and shortest is selected.
1105 There is also the case where SIZE != 0 and no frame pointer is
1106 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1107 In that case, insn (*) is not needed in that case.
1108 We use the X register as scratch. This is save because in X
1109 is call-clobbered.
1110 In an interrupt routine, the case of SIZE != 0 together with
1111 !frame_pointer_needed can only occur if the function is not a
1112 leaf function and thus X has already been saved. */
1114 int irq_state = -1;
1115 HOST_WIDE_INT size_cfa = size;
1116 rtx fp_plus_insns, fp, my_fp;
1118 gcc_assert (frame_pointer_needed
1119 || !isr_p
1120 || !crtl->is_leaf);
1122 fp = my_fp = (frame_pointer_needed
1123 ? frame_pointer_rtx
1124 : gen_rtx_REG (Pmode, REG_X));
1126 if (AVR_HAVE_8BIT_SP)
1128 /* The high byte (r29) does not change:
1129 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
1131 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1134 /* Cut down size and avoid size = 0 so that we don't run
1135 into ICE like PR52488 in the remainder. */
1137 if (size > size_max)
1139 /* Don't error so that insane code from newlib still compiles
1140 and does not break building newlib. As PR51345 is implemented
1141 now, there are multilib variants with -msp8.
1143 If user wants sanity checks he can use -Wstack-usage=
1144 or similar options.
1146 For CFA we emit the original, non-saturated size so that
1147 the generic machinery is aware of the real stack usage and
1148 will print the above diagnostic as expected. */
1150 size = size_max;
1153 size = trunc_int_for_mode (size, GET_MODE (my_fp));
1155 /************ Method 1: Adjust frame pointer ************/
1157 start_sequence ();
1159 /* Normally, the dwarf2out frame-related-expr interpreter does
1160 not expect to have the CFA change once the frame pointer is
1161 set up. Thus, we avoid marking the move insn below and
1162 instead indicate that the entire operation is complete after
1163 the frame pointer subtraction is done. */
1165 insn = emit_move_insn (fp, stack_pointer_rtx);
1166 if (frame_pointer_needed)
1168 RTX_FRAME_RELATED_P (insn) = 1;
1169 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1170 gen_rtx_SET (VOIDmode, fp, stack_pointer_rtx));
1173 insn = emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp),
1174 my_fp, -size));
1175 if (frame_pointer_needed)
1177 RTX_FRAME_RELATED_P (insn) = 1;
1178 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1179 gen_rtx_SET (VOIDmode, fp,
1180 plus_constant (Pmode, fp,
1181 -size_cfa)));
1184 /* Copy to stack pointer. Note that since we've already
1185 changed the CFA to the frame pointer this operation
1186 need not be annotated if frame pointer is needed.
1187 Always move through unspec, see PR50063.
1188 For meaning of irq_state see movhi_sp_r insn. */
1190 if (cfun->machine->is_interrupt)
1191 irq_state = 1;
1193 if (TARGET_NO_INTERRUPTS
1194 || cfun->machine->is_signal
1195 || cfun->machine->is_OS_main)
1196 irq_state = 0;
1198 if (AVR_HAVE_8BIT_SP)
1199 irq_state = 2;
1201 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1202 fp, GEN_INT (irq_state)));
1203 if (!frame_pointer_needed)
1205 RTX_FRAME_RELATED_P (insn) = 1;
1206 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1207 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1208 plus_constant (Pmode,
1209 stack_pointer_rtx,
1210 -size_cfa)));
1213 fp_plus_insns = get_insns ();
1214 end_sequence ();
1216 /************ Method 2: Adjust Stack pointer ************/
1218 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1219 can only handle specific offsets. */
1221 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1223 rtx sp_plus_insns;
1225 start_sequence ();
1227 insn = emit_move_insn (stack_pointer_rtx,
1228 plus_constant (Pmode, stack_pointer_rtx,
1229 -size));
1230 RTX_FRAME_RELATED_P (insn) = 1;
1231 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1232 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1233 plus_constant (Pmode,
1234 stack_pointer_rtx,
1235 -size_cfa)));
1236 if (frame_pointer_needed)
1238 insn = emit_move_insn (fp, stack_pointer_rtx);
1239 RTX_FRAME_RELATED_P (insn) = 1;
1242 sp_plus_insns = get_insns ();
1243 end_sequence ();
1245 /************ Use shortest method ************/
1247 emit_insn (get_sequence_length (sp_plus_insns)
1248 < get_sequence_length (fp_plus_insns)
1249 ? sp_plus_insns
1250 : fp_plus_insns);
1252 else
1254 emit_insn (fp_plus_insns);
1257 cfun->machine->stack_usage += size_cfa;
1258 } /* !minimize && size != 0 */
1259 } /* !minimize */
1263 /* Output function prologue. */
1265 void
1266 avr_expand_prologue (void)
1268 HARD_REG_SET set;
1269 HOST_WIDE_INT size;
1271 size = get_frame_size() + avr_outgoing_args_size();
1273 cfun->machine->stack_usage = 0;
1275 /* Prologue: naked. */
1276 if (cfun->machine->is_naked)
1278 return;
1281 avr_regs_to_save (&set);
1283 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1285 /* Enable interrupts. */
1286 if (cfun->machine->is_interrupt)
1287 emit_insn (gen_enable_interrupt ());
1289 /* Push zero reg. */
1290 emit_push_byte (ZERO_REGNO, true);
1292 /* Push tmp reg. */
1293 emit_push_byte (TMP_REGNO, true);
1295 /* Push SREG. */
1296 /* ??? There's no dwarf2 column reserved for SREG. */
1297 emit_push_sfr (sreg_rtx, false, false /* clr */);
1299 /* Clear zero reg. */
1300 emit_move_insn (zero_reg_rtx, const0_rtx);
1302 /* Prevent any attempt to delete the setting of ZERO_REG! */
1303 emit_use (zero_reg_rtx);
1305 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1306 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1308 if (AVR_HAVE_RAMPD)
1309 emit_push_sfr (rampd_rtx, false /* frame-related */, true /* clr */);
1311 if (AVR_HAVE_RAMPX
1312 && TEST_HARD_REG_BIT (set, REG_X)
1313 && TEST_HARD_REG_BIT (set, REG_X + 1))
1315 emit_push_sfr (rampx_rtx, false /* frame-related */, true /* clr */);
1318 if (AVR_HAVE_RAMPY
1319 && (frame_pointer_needed
1320 || (TEST_HARD_REG_BIT (set, REG_Y)
1321 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1323 emit_push_sfr (rampy_rtx, false /* frame-related */, true /* clr */);
1326 if (AVR_HAVE_RAMPZ
1327 && TEST_HARD_REG_BIT (set, REG_Z)
1328 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1330 emit_push_sfr (rampz_rtx, false /* frame-related */, AVR_HAVE_RAMPD);
1332 } /* is_interrupt is_signal */
1334 avr_prologue_setup_frame (size, set);
1336 if (flag_stack_usage_info)
1337 current_function_static_stack_size = cfun->machine->stack_usage;
1341 /* Implement `TARGET_ASM_FUNCTION_END_PROLOGUE'. */
1342 /* Output summary at end of function prologue. */
1344 static void
1345 avr_asm_function_end_prologue (FILE *file)
1347 if (cfun->machine->is_naked)
1349 fputs ("/* prologue: naked */\n", file);
1351 else
1353 if (cfun->machine->is_interrupt)
1355 fputs ("/* prologue: Interrupt */\n", file);
1357 else if (cfun->machine->is_signal)
1359 fputs ("/* prologue: Signal */\n", file);
1361 else
1362 fputs ("/* prologue: function */\n", file);
1365 if (ACCUMULATE_OUTGOING_ARGS)
1366 fprintf (file, "/* outgoing args size = %d */\n",
1367 avr_outgoing_args_size());
1369 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1370 get_frame_size());
1371 fprintf (file, "/* stack size = %d */\n",
1372 cfun->machine->stack_usage);
1373 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1374 usage for offset so that SP + .L__stack_offset = return address. */
1375 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1379 /* Implement `EPILOGUE_USES'. */
1382 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1384 if (reload_completed
1385 && cfun->machine
1386 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1387 return 1;
1388 return 0;
1391 /* Helper for avr_expand_epilogue. Emit a pop of a byte register. */
1393 static void
1394 emit_pop_byte (unsigned regno)
1396 rtx mem, reg;
1398 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1399 mem = gen_frame_mem (QImode, mem);
1400 reg = gen_rtx_REG (QImode, regno);
1402 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1405 /* Output RTL epilogue. */
1407 void
1408 avr_expand_epilogue (bool sibcall_p)
1410 int reg;
1411 int live_seq;
1412 HARD_REG_SET set;
1413 int minimize;
1414 HOST_WIDE_INT size;
1415 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1417 size = get_frame_size() + avr_outgoing_args_size();
1419 /* epilogue: naked */
1420 if (cfun->machine->is_naked)
1422 gcc_assert (!sibcall_p);
1424 emit_jump_insn (gen_return ());
1425 return;
1428 avr_regs_to_save (&set);
1429 live_seq = sequent_regs_live ();
1431 minimize = (TARGET_CALL_PROLOGUES
1432 && live_seq
1433 && !isr_p
1434 && !cfun->machine->is_OS_task
1435 && !cfun->machine->is_OS_main);
1437 if (minimize
1438 && (live_seq > 4
1439 || frame_pointer_needed
1440 || size))
1442 /* Get rid of frame. */
1444 if (!frame_pointer_needed)
1446 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1449 if (size)
1451 emit_move_insn (frame_pointer_rtx,
1452 plus_constant (Pmode, frame_pointer_rtx, size));
1455 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1456 return;
1459 if (size)
1461 /* Try two methods to adjust stack and select shortest. */
1463 int irq_state = -1;
1464 rtx fp, my_fp;
1465 rtx fp_plus_insns;
1466 HOST_WIDE_INT size_max;
1468 gcc_assert (frame_pointer_needed
1469 || !isr_p
1470 || !crtl->is_leaf);
1472 fp = my_fp = (frame_pointer_needed
1473 ? frame_pointer_rtx
1474 : gen_rtx_REG (Pmode, REG_X));
1476 if (AVR_HAVE_8BIT_SP)
1478 /* The high byte (r29) does not change:
1479 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1481 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1484 /* For rationale see comment in prologue generation. */
1486 size_max = (HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (my_fp));
1487 if (size > size_max)
1488 size = size_max;
1489 size = trunc_int_for_mode (size, GET_MODE (my_fp));
1491 /********** Method 1: Adjust fp register **********/
1493 start_sequence ();
1495 if (!frame_pointer_needed)
1496 emit_move_insn (fp, stack_pointer_rtx);
1498 emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp), my_fp, size));
1500 /* Copy to stack pointer. */
1502 if (TARGET_NO_INTERRUPTS)
1503 irq_state = 0;
1505 if (AVR_HAVE_8BIT_SP)
1506 irq_state = 2;
1508 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp,
1509 GEN_INT (irq_state)));
1511 fp_plus_insns = get_insns ();
1512 end_sequence ();
1514 /********** Method 2: Adjust Stack pointer **********/
1516 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1518 rtx sp_plus_insns;
1520 start_sequence ();
1522 emit_move_insn (stack_pointer_rtx,
1523 plus_constant (Pmode, stack_pointer_rtx, size));
1525 sp_plus_insns = get_insns ();
1526 end_sequence ();
1528 /************ Use shortest method ************/
1530 emit_insn (get_sequence_length (sp_plus_insns)
1531 < get_sequence_length (fp_plus_insns)
1532 ? sp_plus_insns
1533 : fp_plus_insns);
1535 else
1536 emit_insn (fp_plus_insns);
1537 } /* size != 0 */
1539 if (frame_pointer_needed
1540 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1542 /* Restore previous frame_pointer. See avr_expand_prologue for
1543 rationale for not using pophi. */
1545 emit_pop_byte (REG_Y + 1);
1546 emit_pop_byte (REG_Y);
1549 /* Restore used registers. */
1551 for (reg = 31; reg >= 0; --reg)
1552 if (TEST_HARD_REG_BIT (set, reg))
1553 emit_pop_byte (reg);
1555 if (isr_p)
1557 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1558 The conditions to restore them must be tha same as in prologue. */
1560 if (AVR_HAVE_RAMPZ
1561 && TEST_HARD_REG_BIT (set, REG_Z)
1562 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1564 emit_pop_byte (TMP_REGNO);
1565 emit_move_insn (rampz_rtx, tmp_reg_rtx);
1568 if (AVR_HAVE_RAMPY
1569 && (frame_pointer_needed
1570 || (TEST_HARD_REG_BIT (set, REG_Y)
1571 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1573 emit_pop_byte (TMP_REGNO);
1574 emit_move_insn (rampy_rtx, tmp_reg_rtx);
1577 if (AVR_HAVE_RAMPX
1578 && TEST_HARD_REG_BIT (set, REG_X)
1579 && TEST_HARD_REG_BIT (set, REG_X + 1))
1581 emit_pop_byte (TMP_REGNO);
1582 emit_move_insn (rampx_rtx, tmp_reg_rtx);
1585 if (AVR_HAVE_RAMPD)
1587 emit_pop_byte (TMP_REGNO);
1588 emit_move_insn (rampd_rtx, tmp_reg_rtx);
1591 /* Restore SREG using tmp_reg as scratch. */
1593 emit_pop_byte (TMP_REGNO);
1594 emit_move_insn (sreg_rtx, tmp_reg_rtx);
1596 /* Restore tmp REG. */
1597 emit_pop_byte (TMP_REGNO);
1599 /* Restore zero REG. */
1600 emit_pop_byte (ZERO_REGNO);
1603 if (!sibcall_p)
1604 emit_jump_insn (gen_return ());
1608 /* Implement `TARGET_ASM_FUNCTION_BEGIN_EPILOGUE'. */
1610 static void
1611 avr_asm_function_begin_epilogue (FILE *file)
1613 fprintf (file, "/* epilogue start */\n");
1617 /* Implement `TARGET_CANNOT_MODITY_JUMPS_P'. */
1619 static bool
1620 avr_cannot_modify_jumps_p (void)
1623 /* Naked Functions must not have any instructions after
1624 their epilogue, see PR42240 */
1626 if (reload_completed
1627 && cfun->machine
1628 && cfun->machine->is_naked)
1630 return true;
1633 return false;
1637 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
1639 static bool
1640 avr_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED, addr_space_t as)
1642 /* FIXME: Non-generic addresses are not mode-dependent in themselves.
1643 This hook just serves to hack around PR rtl-optimization/52543 by
1644 claiming that non-generic addresses were mode-dependent so that
1645 lower-subreg.c will skip these addresses. lower-subreg.c sets up fake
1646 RTXes to probe SET and MEM costs and assumes that MEM is always in the
1647 generic address space which is not true. */
1649 return !ADDR_SPACE_GENERIC_P (as);
1653 /* Helper function for `avr_legitimate_address_p'. */
1655 static inline bool
1656 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
1657 RTX_CODE outer_code, bool strict)
1659 return (REG_P (reg)
1660 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1661 as, outer_code, UNKNOWN)
1662 || (!strict
1663 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1667 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1668 machine for a memory operand of mode MODE. */
1670 static bool
1671 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1673 bool ok = CONSTANT_ADDRESS_P (x);
1675 switch (GET_CODE (x))
1677 case REG:
1678 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1679 MEM, strict);
1681 if (strict
1682 && GET_MODE_SIZE (mode) > 4
1683 && REG_X == REGNO (x))
1685 ok = false;
1687 break;
1689 case POST_INC:
1690 case PRE_DEC:
1691 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1692 GET_CODE (x), strict);
1693 break;
1695 case PLUS:
1697 rtx reg = XEXP (x, 0);
1698 rtx op1 = XEXP (x, 1);
1700 if (REG_P (reg)
1701 && CONST_INT_P (op1)
1702 && INTVAL (op1) >= 0)
1704 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1706 if (fit)
1708 ok = (! strict
1709 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1710 PLUS, strict));
1712 if (reg == frame_pointer_rtx
1713 || reg == arg_pointer_rtx)
1715 ok = true;
1718 else if (frame_pointer_needed
1719 && reg == frame_pointer_rtx)
1721 ok = true;
1725 break;
1727 default:
1728 break;
1731 if (avr_log.legitimate_address_p)
1733 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1734 "reload_completed=%d reload_in_progress=%d %s:",
1735 ok, mode, strict, reload_completed, reload_in_progress,
1736 reg_renumber ? "(reg_renumber)" : "");
1738 if (GET_CODE (x) == PLUS
1739 && REG_P (XEXP (x, 0))
1740 && CONST_INT_P (XEXP (x, 1))
1741 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1742 && reg_renumber)
1744 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1745 true_regnum (XEXP (x, 0)));
1748 avr_edump ("\n%r\n", x);
1751 return ok;
1755 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1756 now only a helper for avr_addr_space_legitimize_address. */
1757 /* Attempts to replace X with a valid
1758 memory address for an operand of mode MODE */
1760 static rtx
1761 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1763 bool big_offset_p = false;
1765 x = oldx;
1767 if (GET_CODE (oldx) == PLUS
1768 && REG_P (XEXP (oldx, 0)))
1770 if (REG_P (XEXP (oldx, 1)))
1771 x = force_reg (GET_MODE (oldx), oldx);
1772 else if (CONST_INT_P (XEXP (oldx, 1)))
1774 int offs = INTVAL (XEXP (oldx, 1));
1775 if (frame_pointer_rtx != XEXP (oldx, 0)
1776 && offs > MAX_LD_OFFSET (mode))
1778 big_offset_p = true;
1779 x = force_reg (GET_MODE (oldx), oldx);
1784 if (avr_log.legitimize_address)
1786 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1788 if (x != oldx)
1789 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1792 return x;
1796 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1797 /* This will allow register R26/27 to be used where it is no worse than normal
1798 base pointers R28/29 or R30/31. For example, if base offset is greater
1799 than 63 bytes or for R++ or --R addressing. */
1802 avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
1803 int opnum, int type, int addr_type,
1804 int ind_levels ATTRIBUTE_UNUSED,
1805 rtx (*mk_memloc)(rtx,int))
1807 rtx x = *px;
1809 if (avr_log.legitimize_reload_address)
1810 avr_edump ("\n%?:%m %r\n", mode, x);
1812 if (1 && (GET_CODE (x) == POST_INC
1813 || GET_CODE (x) == PRE_DEC))
1815 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1816 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1817 opnum, RELOAD_OTHER);
1819 if (avr_log.legitimize_reload_address)
1820 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1821 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1823 return x;
1826 if (GET_CODE (x) == PLUS
1827 && REG_P (XEXP (x, 0))
1828 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1829 && CONST_INT_P (XEXP (x, 1))
1830 && INTVAL (XEXP (x, 1)) >= 1)
1832 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1834 if (fit)
1836 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1838 int regno = REGNO (XEXP (x, 0));
1839 rtx mem = mk_memloc (x, regno);
1841 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1842 POINTER_REGS, Pmode, VOIDmode, 0, 0,
1843 1, (enum reload_type) addr_type);
1845 if (avr_log.legitimize_reload_address)
1846 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1847 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1849 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1850 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1851 opnum, (enum reload_type) type);
1853 if (avr_log.legitimize_reload_address)
1854 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1855 BASE_POINTER_REGS, mem, NULL_RTX);
1857 return x;
1860 else if (! (frame_pointer_needed
1861 && XEXP (x, 0) == frame_pointer_rtx))
1863 push_reload (x, NULL_RTX, px, NULL,
1864 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1865 opnum, (enum reload_type) type);
1867 if (avr_log.legitimize_reload_address)
1868 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1869 POINTER_REGS, x, NULL_RTX);
1871 return x;
1875 return NULL_RTX;
1879 /* Implement `TARGET_SECONDARY_RELOAD' */
1881 static reg_class_t
1882 avr_secondary_reload (bool in_p, rtx x,
1883 reg_class_t reload_class ATTRIBUTE_UNUSED,
1884 enum machine_mode mode, secondary_reload_info *sri)
1886 if (in_p
1887 && MEM_P (x)
1888 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1889 && ADDR_SPACE_MEMX != MEM_ADDR_SPACE (x))
1891 /* For the non-generic 16-bit spaces we need a d-class scratch. */
1893 switch (mode)
1895 default:
1896 gcc_unreachable();
1898 case QImode: sri->icode = CODE_FOR_reload_inqi; break;
1899 case QQmode: sri->icode = CODE_FOR_reload_inqq; break;
1900 case UQQmode: sri->icode = CODE_FOR_reload_inuqq; break;
1902 case HImode: sri->icode = CODE_FOR_reload_inhi; break;
1903 case HQmode: sri->icode = CODE_FOR_reload_inhq; break;
1904 case HAmode: sri->icode = CODE_FOR_reload_inha; break;
1905 case UHQmode: sri->icode = CODE_FOR_reload_inuhq; break;
1906 case UHAmode: sri->icode = CODE_FOR_reload_inuha; break;
1908 case PSImode: sri->icode = CODE_FOR_reload_inpsi; break;
1910 case SImode: sri->icode = CODE_FOR_reload_insi; break;
1911 case SFmode: sri->icode = CODE_FOR_reload_insf; break;
1912 case SQmode: sri->icode = CODE_FOR_reload_insq; break;
1913 case SAmode: sri->icode = CODE_FOR_reload_insa; break;
1914 case USQmode: sri->icode = CODE_FOR_reload_inusq; break;
1915 case USAmode: sri->icode = CODE_FOR_reload_inusa; break;
1919 return NO_REGS;
1923 /* Helper function to print assembler resp. track instruction
1924 sequence lengths. Always return "".
1926 If PLEN == NULL:
1927 Output assembler code from template TPL with operands supplied
1928 by OPERANDS. This is just forwarding to output_asm_insn.
1930 If PLEN != NULL:
1931 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1932 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1933 Don't output anything.
1936 static const char*
1937 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1939 if (NULL == plen)
1941 output_asm_insn (tpl, operands);
1943 else
1945 if (n_words < 0)
1946 *plen = -n_words;
1947 else
1948 *plen += n_words;
1951 return "";
1955 /* Return a pointer register name as a string. */
1957 static const char*
1958 ptrreg_to_str (int regno)
1960 switch (regno)
1962 case REG_X: return "X";
1963 case REG_Y: return "Y";
1964 case REG_Z: return "Z";
1965 default:
1966 output_operand_lossage ("address operand requires constraint for"
1967 " X, Y, or Z register");
1969 return NULL;
1972 /* Return the condition name as a string.
1973 Used in conditional jump constructing */
1975 static const char*
1976 cond_string (enum rtx_code code)
1978 switch (code)
1980 case NE:
1981 return "ne";
1982 case EQ:
1983 return "eq";
1984 case GE:
1985 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1986 return "pl";
1987 else
1988 return "ge";
1989 case LT:
1990 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1991 return "mi";
1992 else
1993 return "lt";
1994 case GEU:
1995 return "sh";
1996 case LTU:
1997 return "lo";
1998 default:
1999 gcc_unreachable ();
2002 return "";
2006 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
2007 /* Output ADDR to FILE as address. */
2009 static void
2010 avr_print_operand_address (FILE *file, rtx addr)
2012 switch (GET_CODE (addr))
2014 case REG:
2015 fprintf (file, ptrreg_to_str (REGNO (addr)));
2016 break;
2018 case PRE_DEC:
2019 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2020 break;
2022 case POST_INC:
2023 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2024 break;
2026 default:
2027 if (CONSTANT_ADDRESS_P (addr)
2028 && text_segment_operand (addr, VOIDmode))
2030 rtx x = addr;
2031 if (GET_CODE (x) == CONST)
2032 x = XEXP (x, 0);
2033 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
2035 /* Assembler gs() will implant word address. Make offset
2036 a byte offset inside gs() for assembler. This is
2037 needed because the more logical (constant+gs(sym)) is not
2038 accepted by gas. For 128K and smaller devices this is ok.
2039 For large devices it will create a trampoline to offset
2040 from symbol which may not be what the user really wanted. */
2042 fprintf (file, "gs(");
2043 output_addr_const (file, XEXP (x,0));
2044 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
2045 2 * INTVAL (XEXP (x, 1)));
2046 if (AVR_3_BYTE_PC)
2047 if (warning (0, "pointer offset from symbol maybe incorrect"))
2049 output_addr_const (stderr, addr);
2050 fprintf(stderr,"\n");
2053 else
2055 fprintf (file, "gs(");
2056 output_addr_const (file, addr);
2057 fprintf (file, ")");
2060 else
2061 output_addr_const (file, addr);
2066 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
2068 static bool
2069 avr_print_operand_punct_valid_p (unsigned char code)
2071 return code == '~' || code == '!';
2075 /* Implement `TARGET_PRINT_OPERAND'. */
2076 /* Output X as assembler operand to file FILE.
2077 For a description of supported %-codes, see top of avr.md. */
2079 static void
2080 avr_print_operand (FILE *file, rtx x, int code)
2082 int abcd = 0;
2084 if (code >= 'A' && code <= 'D')
2085 abcd = code - 'A';
2087 if (code == '~')
2089 if (!AVR_HAVE_JMP_CALL)
2090 fputc ('r', file);
2092 else if (code == '!')
2094 if (AVR_HAVE_EIJMP_EICALL)
2095 fputc ('e', file);
2097 else if (code == 't'
2098 || code == 'T')
2100 static int t_regno = -1;
2101 static int t_nbits = -1;
2103 if (REG_P (x) && t_regno < 0 && code == 'T')
2105 t_regno = REGNO (x);
2106 t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
2108 else if (CONST_INT_P (x) && t_regno >= 0
2109 && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
2111 int bpos = INTVAL (x);
2113 fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
2114 if (code == 'T')
2115 fprintf (file, ",%d", bpos % 8);
2117 t_regno = -1;
2119 else
2120 fatal_insn ("operands to %T/%t must be reg + const_int:", x);
2122 else if (REG_P (x))
2124 if (x == zero_reg_rtx)
2125 fprintf (file, "__zero_reg__");
2126 else if (code == 'r' && REGNO (x) < 32)
2127 fprintf (file, "%d", (int) REGNO (x));
2128 else
2129 fprintf (file, reg_names[REGNO (x) + abcd]);
2131 else if (CONST_INT_P (x))
2133 HOST_WIDE_INT ival = INTVAL (x);
2135 if ('i' != code)
2136 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
2137 else if (low_io_address_operand (x, VOIDmode)
2138 || high_io_address_operand (x, VOIDmode))
2140 if (AVR_HAVE_RAMPZ && ival == avr_addr.rampz)
2141 fprintf (file, "__RAMPZ__");
2142 else if (AVR_HAVE_RAMPY && ival == avr_addr.rampy)
2143 fprintf (file, "__RAMPY__");
2144 else if (AVR_HAVE_RAMPX && ival == avr_addr.rampx)
2145 fprintf (file, "__RAMPX__");
2146 else if (AVR_HAVE_RAMPD && ival == avr_addr.rampd)
2147 fprintf (file, "__RAMPD__");
2148 else if (AVR_XMEGA && ival == avr_addr.ccp)
2149 fprintf (file, "__CCP__");
2150 else if (ival == avr_addr.sreg) fprintf (file, "__SREG__");
2151 else if (ival == avr_addr.sp_l) fprintf (file, "__SP_L__");
2152 else if (ival == avr_addr.sp_h) fprintf (file, "__SP_H__");
2153 else
2155 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2156 ival - avr_current_arch->sfr_offset);
2159 else
2160 fatal_insn ("bad address, not an I/O address:", x);
2162 else if (MEM_P (x))
2164 rtx addr = XEXP (x, 0);
2166 if (code == 'm')
2168 if (!CONSTANT_P (addr))
2169 fatal_insn ("bad address, not a constant:", addr);
2170 /* Assembler template with m-code is data - not progmem section */
2171 if (text_segment_operand (addr, VOIDmode))
2172 if (warning (0, "accessing data memory with"
2173 " program memory address"))
2175 output_addr_const (stderr, addr);
2176 fprintf(stderr,"\n");
2178 output_addr_const (file, addr);
2180 else if (code == 'i')
2182 avr_print_operand (file, addr, 'i');
2184 else if (code == 'o')
2186 if (GET_CODE (addr) != PLUS)
2187 fatal_insn ("bad address, not (reg+disp):", addr);
2189 avr_print_operand (file, XEXP (addr, 1), 0);
2191 else if (code == 'p' || code == 'r')
2193 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
2194 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
2196 if (code == 'p')
2197 avr_print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
2198 else
2199 avr_print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
2201 else if (GET_CODE (addr) == PLUS)
2203 avr_print_operand_address (file, XEXP (addr,0));
2204 if (REGNO (XEXP (addr, 0)) == REG_X)
2205 fatal_insn ("internal compiler error. Bad address:"
2206 ,addr);
2207 fputc ('+', file);
2208 avr_print_operand (file, XEXP (addr,1), code);
2210 else
2211 avr_print_operand_address (file, addr);
2213 else if (code == 'i')
2215 fatal_insn ("bad address, not an I/O address:", x);
2217 else if (code == 'x')
2219 /* Constant progmem address - like used in jmp or call */
2220 if (0 == text_segment_operand (x, VOIDmode))
2221 if (warning (0, "accessing program memory"
2222 " with data memory address"))
2224 output_addr_const (stderr, x);
2225 fprintf(stderr,"\n");
2227 /* Use normal symbol for direct address no linker trampoline needed */
2228 output_addr_const (file, x);
2230 else if (CONST_FIXED_P (x))
2232 HOST_WIDE_INT ival = INTVAL (avr_to_int_mode (x));
2233 if (code != 0)
2234 output_operand_lossage ("Unsupported code '%c'for fixed-point:",
2235 code);
2236 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival);
2238 else if (GET_CODE (x) == CONST_DOUBLE)
2240 long val;
2241 REAL_VALUE_TYPE rv;
2242 if (GET_MODE (x) != SFmode)
2243 fatal_insn ("internal compiler error. Unknown mode:", x);
2244 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
2245 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
2246 fprintf (file, "0x%lx", val);
2248 else if (GET_CODE (x) == CONST_STRING)
2249 fputs (XSTR (x, 0), file);
2250 else if (code == 'j')
2251 fputs (cond_string (GET_CODE (x)), file);
2252 else if (code == 'k')
2253 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
2254 else
2255 avr_print_operand_address (file, x);
2259 /* Worker function for `NOTICE_UPDATE_CC'. */
2260 /* Update the condition code in the INSN. */
2262 void
2263 avr_notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
2265 rtx set;
2266 enum attr_cc cc = get_attr_cc (insn);
2268 switch (cc)
2270 default:
2271 break;
2273 case CC_PLUS:
2274 case CC_LDI:
2276 rtx *op = recog_data.operand;
2277 int len_dummy, icc;
2279 /* Extract insn's operands. */
2280 extract_constrain_insn_cached (insn);
2282 switch (cc)
2284 default:
2285 gcc_unreachable();
2287 case CC_PLUS:
2288 avr_out_plus (insn, op, &len_dummy, &icc);
2289 cc = (enum attr_cc) icc;
2290 break;
2292 case CC_LDI:
2294 cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
2295 && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
2296 /* Loading zero-reg with 0 uses CLR and thus clobbers cc0. */
2297 ? CC_CLOBBER
2298 /* Any other "r,rL" combination does not alter cc0. */
2299 : CC_NONE;
2301 break;
2302 } /* inner switch */
2304 break;
2306 } /* outer swicth */
2308 switch (cc)
2310 default:
2311 /* Special values like CC_OUT_PLUS from above have been
2312 mapped to "standard" CC_* values so we never come here. */
2314 gcc_unreachable();
2315 break;
2317 case CC_NONE:
2318 /* Insn does not affect CC at all. */
2319 break;
2321 case CC_SET_N:
2322 CC_STATUS_INIT;
2323 break;
2325 case CC_SET_ZN:
2326 set = single_set (insn);
2327 CC_STATUS_INIT;
2328 if (set)
2330 cc_status.flags |= CC_NO_OVERFLOW;
2331 cc_status.value1 = SET_DEST (set);
2333 break;
2335 case CC_SET_CZN:
2336 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2337 The V flag may or may not be known but that's ok because
2338 alter_cond will change tests to use EQ/NE. */
2339 set = single_set (insn);
2340 CC_STATUS_INIT;
2341 if (set)
2343 cc_status.value1 = SET_DEST (set);
2344 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
2346 break;
2348 case CC_COMPARE:
2349 set = single_set (insn);
2350 CC_STATUS_INIT;
2351 if (set)
2352 cc_status.value1 = SET_SRC (set);
2353 break;
2355 case CC_CLOBBER:
2356 /* Insn doesn't leave CC in a usable state. */
2357 CC_STATUS_INIT;
2358 break;
2362 /* Choose mode for jump insn:
2363 1 - relative jump in range -63 <= x <= 62 ;
2364 2 - relative jump in range -2046 <= x <= 2045 ;
2365 3 - absolute jump (only for ATmega[16]03). */
2368 avr_jump_mode (rtx x, rtx insn)
2370 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
2371 ? XEXP (x, 0) : x));
2372 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
2373 int jump_distance = cur_addr - dest_addr;
2375 if (-63 <= jump_distance && jump_distance <= 62)
2376 return 1;
2377 else if (-2046 <= jump_distance && jump_distance <= 2045)
2378 return 2;
2379 else if (AVR_HAVE_JMP_CALL)
2380 return 3;
2382 return 2;
2385 /* Return an AVR condition jump commands.
2386 X is a comparison RTX.
2387 LEN is a number returned by avr_jump_mode function.
2388 If REVERSE nonzero then condition code in X must be reversed. */
2390 const char*
2391 ret_cond_branch (rtx x, int len, int reverse)
2393 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2395 switch (cond)
2397 case GT:
2398 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2399 return (len == 1 ? ("breq .+2" CR_TAB
2400 "brpl %0") :
2401 len == 2 ? ("breq .+4" CR_TAB
2402 "brmi .+2" CR_TAB
2403 "rjmp %0") :
2404 ("breq .+6" CR_TAB
2405 "brmi .+4" CR_TAB
2406 "jmp %0"));
2408 else
2409 return (len == 1 ? ("breq .+2" CR_TAB
2410 "brge %0") :
2411 len == 2 ? ("breq .+4" CR_TAB
2412 "brlt .+2" CR_TAB
2413 "rjmp %0") :
2414 ("breq .+6" CR_TAB
2415 "brlt .+4" CR_TAB
2416 "jmp %0"));
2417 case GTU:
2418 return (len == 1 ? ("breq .+2" CR_TAB
2419 "brsh %0") :
2420 len == 2 ? ("breq .+4" CR_TAB
2421 "brlo .+2" CR_TAB
2422 "rjmp %0") :
2423 ("breq .+6" CR_TAB
2424 "brlo .+4" CR_TAB
2425 "jmp %0"));
2426 case LE:
2427 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2428 return (len == 1 ? ("breq %0" CR_TAB
2429 "brmi %0") :
2430 len == 2 ? ("breq .+2" CR_TAB
2431 "brpl .+2" CR_TAB
2432 "rjmp %0") :
2433 ("breq .+2" CR_TAB
2434 "brpl .+4" CR_TAB
2435 "jmp %0"));
2436 else
2437 return (len == 1 ? ("breq %0" CR_TAB
2438 "brlt %0") :
2439 len == 2 ? ("breq .+2" CR_TAB
2440 "brge .+2" CR_TAB
2441 "rjmp %0") :
2442 ("breq .+2" CR_TAB
2443 "brge .+4" CR_TAB
2444 "jmp %0"));
2445 case LEU:
2446 return (len == 1 ? ("breq %0" CR_TAB
2447 "brlo %0") :
2448 len == 2 ? ("breq .+2" CR_TAB
2449 "brsh .+2" CR_TAB
2450 "rjmp %0") :
2451 ("breq .+2" CR_TAB
2452 "brsh .+4" CR_TAB
2453 "jmp %0"));
2454 default:
2455 if (reverse)
2457 switch (len)
2459 case 1:
2460 return "br%k1 %0";
2461 case 2:
2462 return ("br%j1 .+2" CR_TAB
2463 "rjmp %0");
2464 default:
2465 return ("br%j1 .+4" CR_TAB
2466 "jmp %0");
2469 else
2471 switch (len)
2473 case 1:
2474 return "br%j1 %0";
2475 case 2:
2476 return ("br%k1 .+2" CR_TAB
2477 "rjmp %0");
2478 default:
2479 return ("br%k1 .+4" CR_TAB
2480 "jmp %0");
2484 return "";
2488 /* Worker function for `FINAL_PRESCAN_INSN'. */
2489 /* Output insn cost for next insn. */
2491 void
2492 avr_final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
2493 int num_operands ATTRIBUTE_UNUSED)
2495 if (avr_log.rtx_costs)
2497 rtx set = single_set (insn);
2499 if (set)
2500 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
2501 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
2502 else
2503 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
2504 rtx_cost (PATTERN (insn), INSN, 0,
2505 optimize_insn_for_speed_p()));
2509 /* Return 0 if undefined, 1 if always true or always false. */
2512 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
2514 unsigned int max = (mode == QImode ? 0xff :
2515 mode == HImode ? 0xffff :
2516 mode == PSImode ? 0xffffff :
2517 mode == SImode ? 0xffffffff : 0);
2518 if (max && op && CONST_INT_P (x))
2520 if (unsigned_condition (op) != op)
2521 max >>= 1;
2523 if (max != (INTVAL (x) & max)
2524 && INTVAL (x) != 0xff)
2525 return 1;
2527 return 0;
2531 /* Worker function for `FUNCTION_ARG_REGNO_P'. */
2532 /* Returns nonzero if REGNO is the number of a hard
2533 register in which function arguments are sometimes passed. */
2536 avr_function_arg_regno_p(int r)
2538 return (r >= 8 && r <= 25);
2542 /* Worker function for `INIT_CUMULATIVE_ARGS'. */
2543 /* Initializing the variable cum for the state at the beginning
2544 of the argument list. */
2546 void
2547 avr_init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2548 tree fndecl ATTRIBUTE_UNUSED)
2550 cum->nregs = 18;
2551 cum->regno = FIRST_CUM_REG;
2552 if (!libname && stdarg_p (fntype))
2553 cum->nregs = 0;
2555 /* Assume the calle may be tail called */
2557 cfun->machine->sibcall_fails = 0;
2560 /* Returns the number of registers to allocate for a function argument. */
2562 static int
2563 avr_num_arg_regs (enum machine_mode mode, const_tree type)
2565 int size;
2567 if (mode == BLKmode)
2568 size = int_size_in_bytes (type);
2569 else
2570 size = GET_MODE_SIZE (mode);
2572 /* Align all function arguments to start in even-numbered registers.
2573 Odd-sized arguments leave holes above them. */
2575 return (size + 1) & ~1;
2579 /* Implement `TARGET_FUNCTION_ARG'. */
2580 /* Controls whether a function argument is passed
2581 in a register, and which register. */
2583 static rtx
2584 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
2585 const_tree type, bool named ATTRIBUTE_UNUSED)
2587 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2588 int bytes = avr_num_arg_regs (mode, type);
2590 if (cum->nregs && bytes <= cum->nregs)
2591 return gen_rtx_REG (mode, cum->regno - bytes);
2593 return NULL_RTX;
2597 /* Implement `TARGET_FUNCTION_ARG_ADVANCE'. */
2598 /* Update the summarizer variable CUM to advance past an argument
2599 in the argument list. */
2601 static void
2602 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
2603 const_tree type, bool named ATTRIBUTE_UNUSED)
2605 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2606 int bytes = avr_num_arg_regs (mode, type);
2608 cum->nregs -= bytes;
2609 cum->regno -= bytes;
2611 /* A parameter is being passed in a call-saved register. As the original
2612 contents of these regs has to be restored before leaving the function,
2613 a function must not pass arguments in call-saved regs in order to get
2614 tail-called. */
2616 if (cum->regno >= 8
2617 && cum->nregs >= 0
2618 && !call_used_regs[cum->regno])
2620 /* FIXME: We ship info on failing tail-call in struct machine_function.
2621 This uses internals of calls.c:expand_call() and the way args_so_far
2622 is used. targetm.function_ok_for_sibcall() needs to be extended to
2623 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2624 dependent so that such an extension is not wanted. */
2626 cfun->machine->sibcall_fails = 1;
2629 /* Test if all registers needed by the ABI are actually available. If the
2630 user has fixed a GPR needed to pass an argument, an (implicit) function
2631 call will clobber that fixed register. See PR45099 for an example. */
2633 if (cum->regno >= 8
2634 && cum->nregs >= 0)
2636 int regno;
2638 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2639 if (fixed_regs[regno])
2640 warning (0, "fixed register %s used to pass parameter to function",
2641 reg_names[regno]);
2644 if (cum->nregs <= 0)
2646 cum->nregs = 0;
2647 cum->regno = FIRST_CUM_REG;
2651 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2652 /* Decide whether we can make a sibling call to a function. DECL is the
2653 declaration of the function being targeted by the call and EXP is the
2654 CALL_EXPR representing the call. */
2656 static bool
2657 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2659 tree fntype_callee;
2661 /* Tail-calling must fail if callee-saved regs are used to pass
2662 function args. We must not tail-call when `epilogue_restores'
2663 is used. Unfortunately, we cannot tell at this point if that
2664 actually will happen or not, and we cannot step back from
2665 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2667 if (cfun->machine->sibcall_fails
2668 || TARGET_CALL_PROLOGUES)
2670 return false;
2673 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2675 if (decl_callee)
2677 decl_callee = TREE_TYPE (decl_callee);
2679 else
2681 decl_callee = fntype_callee;
2683 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2684 && METHOD_TYPE != TREE_CODE (decl_callee))
2686 decl_callee = TREE_TYPE (decl_callee);
2690 /* Ensure that caller and callee have compatible epilogues */
2692 if (cfun->machine->is_interrupt
2693 || cfun->machine->is_signal
2694 || cfun->machine->is_naked
2695 || avr_naked_function_p (decl_callee)
2696 /* FIXME: For OS_task and OS_main, this might be over-conservative. */
2697 || (avr_OS_task_function_p (decl_callee)
2698 != cfun->machine->is_OS_task)
2699 || (avr_OS_main_function_p (decl_callee)
2700 != cfun->machine->is_OS_main))
2702 return false;
2705 return true;
2708 /***********************************************************************
2709 Functions for outputting various mov's for a various modes
2710 ************************************************************************/
2712 /* Return true if a value of mode MODE is read from flash by
2713 __load_* function from libgcc. */
2715 bool
2716 avr_load_libgcc_p (rtx op)
2718 enum machine_mode mode = GET_MODE (op);
2719 int n_bytes = GET_MODE_SIZE (mode);
2721 return (n_bytes > 2
2722 && !AVR_HAVE_LPMX
2723 && avr_mem_flash_p (op));
2726 /* Return true if a value of mode MODE is read by __xload_* function. */
2728 bool
2729 avr_xload_libgcc_p (enum machine_mode mode)
2731 int n_bytes = GET_MODE_SIZE (mode);
2733 return (n_bytes > 1
2734 || avr_current_device->n_flash > 1);
2738 /* Fixme: This is a hack because secondary reloads don't works as expected.
2740 Find an unused d-register to be used as scratch in INSN.
2741 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2742 is a register, skip all possible return values that overlap EXCLUDE.
2743 The policy for the returned register is similar to that of
2744 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2745 of INSN.
2747 Return a QImode d-register or NULL_RTX if nothing found. */
2749 static rtx
2750 avr_find_unused_d_reg (rtx insn, rtx exclude)
2752 int regno;
2753 bool isr_p = (avr_interrupt_function_p (current_function_decl)
2754 || avr_signal_function_p (current_function_decl));
2756 for (regno = 16; regno < 32; regno++)
2758 rtx reg = all_regs_rtx[regno];
2760 if ((exclude
2761 && reg_overlap_mentioned_p (exclude, reg))
2762 || fixed_regs[regno])
2764 continue;
2767 /* Try non-live register */
2769 if (!df_regs_ever_live_p (regno)
2770 && (TREE_THIS_VOLATILE (current_function_decl)
2771 || cfun->machine->is_OS_task
2772 || cfun->machine->is_OS_main
2773 || (!isr_p && call_used_regs[regno])))
2775 return reg;
2778 /* Any live register can be used if it is unused after.
2779 Prologue/epilogue will care for it as needed. */
2781 if (df_regs_ever_live_p (regno)
2782 && reg_unused_after (insn, reg))
2784 return reg;
2788 return NULL_RTX;
2792 /* Helper function for the next function in the case where only restricted
2793 version of LPM instruction is available. */
2795 static const char*
2796 avr_out_lpm_no_lpmx (rtx insn, rtx *xop, int *plen)
2798 rtx dest = xop[0];
2799 rtx addr = xop[1];
2800 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2801 int regno_dest;
2803 regno_dest = REGNO (dest);
2805 /* The implicit target register of LPM. */
2806 xop[3] = lpm_reg_rtx;
2808 switch (GET_CODE (addr))
2810 default:
2811 gcc_unreachable();
2813 case REG:
2815 gcc_assert (REG_Z == REGNO (addr));
2817 switch (n_bytes)
2819 default:
2820 gcc_unreachable();
2822 case 1:
2823 avr_asm_len ("%4lpm", xop, plen, 1);
2825 if (regno_dest != LPM_REGNO)
2826 avr_asm_len ("mov %0,%3", xop, plen, 1);
2828 return "";
2830 case 2:
2831 if (REGNO (dest) == REG_Z)
2832 return avr_asm_len ("%4lpm" CR_TAB
2833 "push %3" CR_TAB
2834 "adiw %2,1" CR_TAB
2835 "%4lpm" CR_TAB
2836 "mov %B0,%3" CR_TAB
2837 "pop %A0", xop, plen, 6);
2839 avr_asm_len ("%4lpm" CR_TAB
2840 "mov %A0,%3" CR_TAB
2841 "adiw %2,1" CR_TAB
2842 "%4lpm" CR_TAB
2843 "mov %B0,%3", xop, plen, 5);
2845 if (!reg_unused_after (insn, addr))
2846 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2848 break; /* 2 */
2851 break; /* REG */
2853 case POST_INC:
2855 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2856 && n_bytes <= 4);
2858 if (regno_dest == LPM_REGNO)
2859 avr_asm_len ("%4lpm" CR_TAB
2860 "adiw %2,1", xop, plen, 2);
2861 else
2862 avr_asm_len ("%4lpm" CR_TAB
2863 "mov %A0,%3" CR_TAB
2864 "adiw %2,1", xop, plen, 3);
2866 if (n_bytes >= 2)
2867 avr_asm_len ("%4lpm" CR_TAB
2868 "mov %B0,%3" CR_TAB
2869 "adiw %2,1", xop, plen, 3);
2871 if (n_bytes >= 3)
2872 avr_asm_len ("%4lpm" CR_TAB
2873 "mov %C0,%3" CR_TAB
2874 "adiw %2,1", xop, plen, 3);
2876 if (n_bytes >= 4)
2877 avr_asm_len ("%4lpm" CR_TAB
2878 "mov %D0,%3" CR_TAB
2879 "adiw %2,1", xop, plen, 3);
2881 break; /* POST_INC */
2883 } /* switch CODE (addr) */
2885 return "";
2889 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2890 OP[1] in AS1 to register OP[0].
2891 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2892 Return "". */
2894 const char*
2895 avr_out_lpm (rtx insn, rtx *op, int *plen)
2897 rtx xop[7];
2898 rtx dest = op[0];
2899 rtx src = SET_SRC (single_set (insn));
2900 rtx addr;
2901 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2902 int segment;
2903 RTX_CODE code;
2904 addr_space_t as = MEM_ADDR_SPACE (src);
2906 if (plen)
2907 *plen = 0;
2909 if (MEM_P (dest))
2911 warning (0, "writing to address space %qs not supported",
2912 avr_addrspace[MEM_ADDR_SPACE (dest)].name);
2914 return "";
2917 addr = XEXP (src, 0);
2918 code = GET_CODE (addr);
2920 gcc_assert (REG_P (dest));
2921 gcc_assert (REG == code || POST_INC == code);
2923 xop[0] = dest;
2924 xop[1] = addr;
2925 xop[2] = lpm_addr_reg_rtx;
2926 xop[4] = xstring_empty;
2927 xop[5] = tmp_reg_rtx;
2928 xop[6] = XEXP (rampz_rtx, 0);
2930 segment = avr_addrspace[as].segment;
2932 /* Set RAMPZ as needed. */
2934 if (segment)
2936 xop[4] = GEN_INT (segment);
2937 xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx);
2939 if (xop[3] != NULL_RTX)
2941 avr_asm_len ("ldi %3,%4" CR_TAB
2942 "out %i6,%3", xop, plen, 2);
2944 else if (segment == 1)
2946 avr_asm_len ("clr %5" CR_TAB
2947 "inc %5" CR_TAB
2948 "out %i6,%5", xop, plen, 3);
2950 else
2952 avr_asm_len ("mov %5,%2" CR_TAB
2953 "ldi %2,%4" CR_TAB
2954 "out %i6,%2" CR_TAB
2955 "mov %2,%5", xop, plen, 4);
2958 xop[4] = xstring_e;
2960 if (!AVR_HAVE_ELPMX)
2961 return avr_out_lpm_no_lpmx (insn, xop, plen);
2963 else if (!AVR_HAVE_LPMX)
2965 return avr_out_lpm_no_lpmx (insn, xop, plen);
2968 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
2970 switch (GET_CODE (addr))
2972 default:
2973 gcc_unreachable();
2975 case REG:
2977 gcc_assert (REG_Z == REGNO (addr));
2979 switch (n_bytes)
2981 default:
2982 gcc_unreachable();
2984 case 1:
2985 return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
2987 case 2:
2988 if (REGNO (dest) == REG_Z)
2989 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2990 "%4lpm %B0,%a2" CR_TAB
2991 "mov %A0,%5", xop, plen, 3);
2992 else
2994 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2995 "%4lpm %B0,%a2", xop, plen, 2);
2997 if (!reg_unused_after (insn, addr))
2998 avr_asm_len ("sbiw %2,1", xop, plen, 1);
3001 break; /* 2 */
3003 case 3:
3005 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3006 "%4lpm %B0,%a2+" CR_TAB
3007 "%4lpm %C0,%a2", xop, plen, 3);
3009 if (!reg_unused_after (insn, addr))
3010 avr_asm_len ("sbiw %2,2", xop, plen, 1);
3012 break; /* 3 */
3014 case 4:
3016 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3017 "%4lpm %B0,%a2+", xop, plen, 2);
3019 if (REGNO (dest) == REG_Z - 2)
3020 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3021 "%4lpm %C0,%a2" CR_TAB
3022 "mov %D0,%5", xop, plen, 3);
3023 else
3025 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
3026 "%4lpm %D0,%a2", xop, plen, 2);
3028 if (!reg_unused_after (insn, addr))
3029 avr_asm_len ("sbiw %2,3", xop, plen, 1);
3032 break; /* 4 */
3033 } /* n_bytes */
3035 break; /* REG */
3037 case POST_INC:
3039 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
3040 && n_bytes <= 4);
3042 avr_asm_len ("%4lpm %A0,%a2+", xop, plen, 1);
3043 if (n_bytes >= 2) avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
3044 if (n_bytes >= 3) avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
3045 if (n_bytes >= 4) avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
3047 break; /* POST_INC */
3049 } /* switch CODE (addr) */
3051 if (xop[4] == xstring_e && AVR_HAVE_RAMPD)
3053 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM. */
3055 xop[0] = zero_reg_rtx;
3056 avr_asm_len ("out %i6,%0", xop, plen, 1);
3059 return "";
3063 /* Worker function for xload_8 insn. */
3065 const char*
3066 avr_out_xload (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
3068 rtx xop[4];
3070 xop[0] = op[0];
3071 xop[1] = op[1];
3072 xop[2] = lpm_addr_reg_rtx;
3073 xop[3] = AVR_HAVE_LPMX ? op[0] : lpm_reg_rtx;
3075 if (plen)
3076 *plen = 0;
3078 avr_asm_len ("sbrc %1,7" CR_TAB
3079 "ld %3,%a2" CR_TAB
3080 "sbrs %1,7", xop, plen, 3);
3082 avr_asm_len (AVR_HAVE_LPMX ? "lpm %3,%a2" : "lpm", xop, plen, 1);
3084 if (REGNO (xop[0]) != REGNO (xop[3]))
3085 avr_asm_len ("mov %0,%3", xop, plen, 1);
3087 return "";
3091 const char*
3092 output_movqi (rtx insn, rtx operands[], int *plen)
3094 rtx dest = operands[0];
3095 rtx src = operands[1];
3097 if (avr_mem_flash_p (src)
3098 || avr_mem_flash_p (dest))
3100 return avr_out_lpm (insn, operands, plen);
3103 gcc_assert (1 == GET_MODE_SIZE (GET_MODE (dest)));
3105 if (REG_P (dest))
3107 if (REG_P (src)) /* mov r,r */
3109 if (test_hard_reg_class (STACK_REG, dest))
3110 return avr_asm_len ("out %0,%1", operands, plen, -1);
3111 else if (test_hard_reg_class (STACK_REG, src))
3112 return avr_asm_len ("in %0,%1", operands, plen, -1);
3114 return avr_asm_len ("mov %0,%1", operands, plen, -1);
3116 else if (CONSTANT_P (src))
3118 output_reload_in_const (operands, NULL_RTX, plen, false);
3119 return "";
3121 else if (MEM_P (src))
3122 return out_movqi_r_mr (insn, operands, plen); /* mov r,m */
3124 else if (MEM_P (dest))
3126 rtx xop[2];
3128 xop[0] = dest;
3129 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3131 return out_movqi_mr_r (insn, xop, plen);
3134 return "";
3138 const char *
3139 output_movhi (rtx insn, rtx xop[], int *plen)
3141 rtx dest = xop[0];
3142 rtx src = xop[1];
3144 gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
3146 if (avr_mem_flash_p (src)
3147 || avr_mem_flash_p (dest))
3149 return avr_out_lpm (insn, xop, plen);
3152 gcc_assert (2 == GET_MODE_SIZE (GET_MODE (dest)));
3154 if (REG_P (dest))
3156 if (REG_P (src)) /* mov r,r */
3158 if (test_hard_reg_class (STACK_REG, dest))
3160 if (AVR_HAVE_8BIT_SP)
3161 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
3163 if (AVR_XMEGA)
3164 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
3165 "out __SP_H__,%B1", xop, plen, -2);
3167 /* Use simple load of SP if no interrupts are used. */
3169 return TARGET_NO_INTERRUPTS
3170 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
3171 "out __SP_L__,%A1", xop, plen, -2)
3172 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
3173 "cli" CR_TAB
3174 "out __SP_H__,%B1" CR_TAB
3175 "out __SREG__,__tmp_reg__" CR_TAB
3176 "out __SP_L__,%A1", xop, plen, -5);
3178 else if (test_hard_reg_class (STACK_REG, src))
3180 return !AVR_HAVE_SPH
3181 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
3182 "clr %B0", xop, plen, -2)
3184 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
3185 "in %B0,__SP_H__", xop, plen, -2);
3188 return AVR_HAVE_MOVW
3189 ? avr_asm_len ("movw %0,%1", xop, plen, -1)
3191 : avr_asm_len ("mov %A0,%A1" CR_TAB
3192 "mov %B0,%B1", xop, plen, -2);
3193 } /* REG_P (src) */
3194 else if (CONSTANT_P (src))
3196 return output_reload_inhi (xop, NULL, plen);
3198 else if (MEM_P (src))
3200 return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
3203 else if (MEM_P (dest))
3205 rtx xop[2];
3207 xop[0] = dest;
3208 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3210 return out_movhi_mr_r (insn, xop, plen);
3213 fatal_insn ("invalid insn:", insn);
3215 return "";
3218 static const char*
3219 out_movqi_r_mr (rtx insn, rtx op[], int *plen)
3221 rtx dest = op[0];
3222 rtx src = op[1];
3223 rtx x = XEXP (src, 0);
3225 if (CONSTANT_ADDRESS_P (x))
3227 return optimize > 0 && io_address_operand (x, QImode)
3228 ? avr_asm_len ("in %0,%i1", op, plen, -1)
3229 : avr_asm_len ("lds %0,%m1", op, plen, -2);
3231 else if (GET_CODE (x) == PLUS
3232 && REG_P (XEXP (x, 0))
3233 && CONST_INT_P (XEXP (x, 1)))
3235 /* memory access by reg+disp */
3237 int disp = INTVAL (XEXP (x, 1));
3239 if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
3241 if (REGNO (XEXP (x, 0)) != REG_Y)
3242 fatal_insn ("incorrect insn:",insn);
3244 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3245 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
3246 "ldd %0,Y+63" CR_TAB
3247 "sbiw r28,%o1-63", op, plen, -3);
3249 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3250 "sbci r29,hi8(-%o1)" CR_TAB
3251 "ld %0,Y" CR_TAB
3252 "subi r28,lo8(%o1)" CR_TAB
3253 "sbci r29,hi8(%o1)", op, plen, -5);
3255 else if (REGNO (XEXP (x, 0)) == REG_X)
3257 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
3258 it but I have this situation with extremal optimizing options. */
3260 avr_asm_len ("adiw r26,%o1" CR_TAB
3261 "ld %0,X", op, plen, -2);
3263 if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
3264 && !reg_unused_after (insn, XEXP (x,0)))
3266 avr_asm_len ("sbiw r26,%o1", op, plen, 1);
3269 return "";
3272 return avr_asm_len ("ldd %0,%1", op, plen, -1);
3275 return avr_asm_len ("ld %0,%1", op, plen, -1);
3278 static const char*
3279 out_movhi_r_mr (rtx insn, rtx op[], int *plen)
3281 rtx dest = op[0];
3282 rtx src = op[1];
3283 rtx base = XEXP (src, 0);
3284 int reg_dest = true_regnum (dest);
3285 int reg_base = true_regnum (base);
3286 /* "volatile" forces reading low byte first, even if less efficient,
3287 for correct operation with 16-bit I/O registers. */
3288 int mem_volatile_p = MEM_VOLATILE_P (src);
3290 if (reg_base > 0)
3292 if (reg_dest == reg_base) /* R = (R) */
3293 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3294 "ld %B0,%1" CR_TAB
3295 "mov %A0,__tmp_reg__", op, plen, -3);
3297 if (reg_base != REG_X)
3298 return avr_asm_len ("ld %A0,%1" CR_TAB
3299 "ldd %B0,%1+1", op, plen, -2);
3301 avr_asm_len ("ld %A0,X+" CR_TAB
3302 "ld %B0,X", op, plen, -2);
3304 if (!reg_unused_after (insn, base))
3305 avr_asm_len ("sbiw r26,1", op, plen, 1);
3307 return "";
3309 else if (GET_CODE (base) == PLUS) /* (R + i) */
3311 int disp = INTVAL (XEXP (base, 1));
3312 int reg_base = true_regnum (XEXP (base, 0));
3314 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3316 if (REGNO (XEXP (base, 0)) != REG_Y)
3317 fatal_insn ("incorrect insn:",insn);
3319 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
3320 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3321 "ldd %A0,Y+62" CR_TAB
3322 "ldd %B0,Y+63" CR_TAB
3323 "sbiw r28,%o1-62", op, plen, -4)
3325 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3326 "sbci r29,hi8(-%o1)" CR_TAB
3327 "ld %A0,Y" CR_TAB
3328 "ldd %B0,Y+1" CR_TAB
3329 "subi r28,lo8(%o1)" CR_TAB
3330 "sbci r29,hi8(%o1)", op, plen, -6);
3333 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3334 it but I have this situation with extremal
3335 optimization options. */
3337 if (reg_base == REG_X)
3338 return reg_base == reg_dest
3339 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3340 "ld __tmp_reg__,X+" CR_TAB
3341 "ld %B0,X" CR_TAB
3342 "mov %A0,__tmp_reg__", op, plen, -4)
3344 : avr_asm_len ("adiw r26,%o1" CR_TAB
3345 "ld %A0,X+" CR_TAB
3346 "ld %B0,X" CR_TAB
3347 "sbiw r26,%o1+1", op, plen, -4);
3349 return reg_base == reg_dest
3350 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3351 "ldd %B0,%B1" CR_TAB
3352 "mov %A0,__tmp_reg__", op, plen, -3)
3354 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3355 "ldd %B0,%B1", op, plen, -2);
3357 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3359 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3360 fatal_insn ("incorrect insn:", insn);
3362 if (!mem_volatile_p)
3363 return avr_asm_len ("ld %B0,%1" CR_TAB
3364 "ld %A0,%1", op, plen, -2);
3366 return REGNO (XEXP (base, 0)) == REG_X
3367 ? avr_asm_len ("sbiw r26,2" CR_TAB
3368 "ld %A0,X+" CR_TAB
3369 "ld %B0,X" CR_TAB
3370 "sbiw r26,1", op, plen, -4)
3372 : avr_asm_len ("sbiw %r1,2" CR_TAB
3373 "ld %A0,%p1" CR_TAB
3374 "ldd %B0,%p1+1", op, plen, -3);
3376 else if (GET_CODE (base) == POST_INC) /* (R++) */
3378 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3379 fatal_insn ("incorrect insn:", insn);
3381 return avr_asm_len ("ld %A0,%1" CR_TAB
3382 "ld %B0,%1", op, plen, -2);
3384 else if (CONSTANT_ADDRESS_P (base))
3386 return optimize > 0 && io_address_operand (base, HImode)
3387 ? avr_asm_len ("in %A0,%i1" CR_TAB
3388 "in %B0,%i1+1", op, plen, -2)
3390 : avr_asm_len ("lds %A0,%m1" CR_TAB
3391 "lds %B0,%m1+1", op, plen, -4);
3394 fatal_insn ("unknown move insn:",insn);
3395 return "";
3398 static const char*
3399 out_movsi_r_mr (rtx insn, rtx op[], int *l)
3401 rtx dest = op[0];
3402 rtx src = op[1];
3403 rtx base = XEXP (src, 0);
3404 int reg_dest = true_regnum (dest);
3405 int reg_base = true_regnum (base);
3406 int tmp;
3408 if (!l)
3409 l = &tmp;
3411 if (reg_base > 0)
3413 if (reg_base == REG_X) /* (R26) */
3415 if (reg_dest == REG_X)
3416 /* "ld r26,-X" is undefined */
3417 return *l=7, ("adiw r26,3" CR_TAB
3418 "ld r29,X" CR_TAB
3419 "ld r28,-X" CR_TAB
3420 "ld __tmp_reg__,-X" CR_TAB
3421 "sbiw r26,1" CR_TAB
3422 "ld r26,X" CR_TAB
3423 "mov r27,__tmp_reg__");
3424 else if (reg_dest == REG_X - 2)
3425 return *l=5, ("ld %A0,X+" CR_TAB
3426 "ld %B0,X+" CR_TAB
3427 "ld __tmp_reg__,X+" CR_TAB
3428 "ld %D0,X" CR_TAB
3429 "mov %C0,__tmp_reg__");
3430 else if (reg_unused_after (insn, base))
3431 return *l=4, ("ld %A0,X+" CR_TAB
3432 "ld %B0,X+" CR_TAB
3433 "ld %C0,X+" CR_TAB
3434 "ld %D0,X");
3435 else
3436 return *l=5, ("ld %A0,X+" CR_TAB
3437 "ld %B0,X+" CR_TAB
3438 "ld %C0,X+" CR_TAB
3439 "ld %D0,X" CR_TAB
3440 "sbiw r26,3");
3442 else
3444 if (reg_dest == reg_base)
3445 return *l=5, ("ldd %D0,%1+3" CR_TAB
3446 "ldd %C0,%1+2" CR_TAB
3447 "ldd __tmp_reg__,%1+1" CR_TAB
3448 "ld %A0,%1" CR_TAB
3449 "mov %B0,__tmp_reg__");
3450 else if (reg_base == reg_dest + 2)
3451 return *l=5, ("ld %A0,%1" CR_TAB
3452 "ldd %B0,%1+1" CR_TAB
3453 "ldd __tmp_reg__,%1+2" CR_TAB
3454 "ldd %D0,%1+3" CR_TAB
3455 "mov %C0,__tmp_reg__");
3456 else
3457 return *l=4, ("ld %A0,%1" CR_TAB
3458 "ldd %B0,%1+1" CR_TAB
3459 "ldd %C0,%1+2" CR_TAB
3460 "ldd %D0,%1+3");
3463 else if (GET_CODE (base) == PLUS) /* (R + i) */
3465 int disp = INTVAL (XEXP (base, 1));
3467 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3469 if (REGNO (XEXP (base, 0)) != REG_Y)
3470 fatal_insn ("incorrect insn:",insn);
3472 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3473 return *l = 6, ("adiw r28,%o1-60" CR_TAB
3474 "ldd %A0,Y+60" CR_TAB
3475 "ldd %B0,Y+61" CR_TAB
3476 "ldd %C0,Y+62" CR_TAB
3477 "ldd %D0,Y+63" CR_TAB
3478 "sbiw r28,%o1-60");
3480 return *l = 8, ("subi r28,lo8(-%o1)" CR_TAB
3481 "sbci r29,hi8(-%o1)" CR_TAB
3482 "ld %A0,Y" CR_TAB
3483 "ldd %B0,Y+1" CR_TAB
3484 "ldd %C0,Y+2" CR_TAB
3485 "ldd %D0,Y+3" CR_TAB
3486 "subi r28,lo8(%o1)" CR_TAB
3487 "sbci r29,hi8(%o1)");
3490 reg_base = true_regnum (XEXP (base, 0));
3491 if (reg_base == REG_X)
3493 /* R = (X + d) */
3494 if (reg_dest == REG_X)
3496 *l = 7;
3497 /* "ld r26,-X" is undefined */
3498 return ("adiw r26,%o1+3" CR_TAB
3499 "ld r29,X" CR_TAB
3500 "ld r28,-X" CR_TAB
3501 "ld __tmp_reg__,-X" CR_TAB
3502 "sbiw r26,1" CR_TAB
3503 "ld r26,X" CR_TAB
3504 "mov r27,__tmp_reg__");
3506 *l = 6;
3507 if (reg_dest == REG_X - 2)
3508 return ("adiw r26,%o1" CR_TAB
3509 "ld r24,X+" CR_TAB
3510 "ld r25,X+" CR_TAB
3511 "ld __tmp_reg__,X+" CR_TAB
3512 "ld r27,X" CR_TAB
3513 "mov r26,__tmp_reg__");
3515 return ("adiw r26,%o1" CR_TAB
3516 "ld %A0,X+" CR_TAB
3517 "ld %B0,X+" CR_TAB
3518 "ld %C0,X+" CR_TAB
3519 "ld %D0,X" CR_TAB
3520 "sbiw r26,%o1+3");
3522 if (reg_dest == reg_base)
3523 return *l=5, ("ldd %D0,%D1" CR_TAB
3524 "ldd %C0,%C1" CR_TAB
3525 "ldd __tmp_reg__,%B1" CR_TAB
3526 "ldd %A0,%A1" CR_TAB
3527 "mov %B0,__tmp_reg__");
3528 else if (reg_dest == reg_base - 2)
3529 return *l=5, ("ldd %A0,%A1" CR_TAB
3530 "ldd %B0,%B1" CR_TAB
3531 "ldd __tmp_reg__,%C1" CR_TAB
3532 "ldd %D0,%D1" CR_TAB
3533 "mov %C0,__tmp_reg__");
3534 return *l=4, ("ldd %A0,%A1" CR_TAB
3535 "ldd %B0,%B1" CR_TAB
3536 "ldd %C0,%C1" CR_TAB
3537 "ldd %D0,%D1");
3539 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3540 return *l=4, ("ld %D0,%1" CR_TAB
3541 "ld %C0,%1" CR_TAB
3542 "ld %B0,%1" CR_TAB
3543 "ld %A0,%1");
3544 else if (GET_CODE (base) == POST_INC) /* (R++) */
3545 return *l=4, ("ld %A0,%1" CR_TAB
3546 "ld %B0,%1" CR_TAB
3547 "ld %C0,%1" CR_TAB
3548 "ld %D0,%1");
3549 else if (CONSTANT_ADDRESS_P (base))
3550 return *l=8, ("lds %A0,%m1" CR_TAB
3551 "lds %B0,%m1+1" CR_TAB
3552 "lds %C0,%m1+2" CR_TAB
3553 "lds %D0,%m1+3");
3555 fatal_insn ("unknown move insn:",insn);
3556 return "";
3559 static const char*
3560 out_movsi_mr_r (rtx insn, rtx op[], int *l)
3562 rtx dest = op[0];
3563 rtx src = op[1];
3564 rtx base = XEXP (dest, 0);
3565 int reg_base = true_regnum (base);
3566 int reg_src = true_regnum (src);
3567 int tmp;
3569 if (!l)
3570 l = &tmp;
3572 if (CONSTANT_ADDRESS_P (base))
3573 return *l=8,("sts %m0,%A1" CR_TAB
3574 "sts %m0+1,%B1" CR_TAB
3575 "sts %m0+2,%C1" CR_TAB
3576 "sts %m0+3,%D1");
3577 if (reg_base > 0) /* (r) */
3579 if (reg_base == REG_X) /* (R26) */
3581 if (reg_src == REG_X)
3583 /* "st X+,r26" is undefined */
3584 if (reg_unused_after (insn, base))
3585 return *l=6, ("mov __tmp_reg__,r27" CR_TAB
3586 "st X,r26" CR_TAB
3587 "adiw r26,1" CR_TAB
3588 "st X+,__tmp_reg__" CR_TAB
3589 "st X+,r28" CR_TAB
3590 "st X,r29");
3591 else
3592 return *l=7, ("mov __tmp_reg__,r27" CR_TAB
3593 "st X,r26" CR_TAB
3594 "adiw r26,1" CR_TAB
3595 "st X+,__tmp_reg__" CR_TAB
3596 "st X+,r28" CR_TAB
3597 "st X,r29" CR_TAB
3598 "sbiw r26,3");
3600 else if (reg_base == reg_src + 2)
3602 if (reg_unused_after (insn, base))
3603 return *l=7, ("mov __zero_reg__,%C1" CR_TAB
3604 "mov __tmp_reg__,%D1" CR_TAB
3605 "st %0+,%A1" CR_TAB
3606 "st %0+,%B1" CR_TAB
3607 "st %0+,__zero_reg__" CR_TAB
3608 "st %0,__tmp_reg__" CR_TAB
3609 "clr __zero_reg__");
3610 else
3611 return *l=8, ("mov __zero_reg__,%C1" CR_TAB
3612 "mov __tmp_reg__,%D1" CR_TAB
3613 "st %0+,%A1" CR_TAB
3614 "st %0+,%B1" CR_TAB
3615 "st %0+,__zero_reg__" CR_TAB
3616 "st %0,__tmp_reg__" CR_TAB
3617 "clr __zero_reg__" CR_TAB
3618 "sbiw r26,3");
3620 return *l=5, ("st %0+,%A1" CR_TAB
3621 "st %0+,%B1" CR_TAB
3622 "st %0+,%C1" CR_TAB
3623 "st %0,%D1" CR_TAB
3624 "sbiw r26,3");
3626 else
3627 return *l=4, ("st %0,%A1" CR_TAB
3628 "std %0+1,%B1" CR_TAB
3629 "std %0+2,%C1" CR_TAB
3630 "std %0+3,%D1");
3632 else if (GET_CODE (base) == PLUS) /* (R + i) */
3634 int disp = INTVAL (XEXP (base, 1));
3635 reg_base = REGNO (XEXP (base, 0));
3636 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3638 if (reg_base != REG_Y)
3639 fatal_insn ("incorrect insn:",insn);
3641 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3642 return *l = 6, ("adiw r28,%o0-60" CR_TAB
3643 "std Y+60,%A1" CR_TAB
3644 "std Y+61,%B1" CR_TAB
3645 "std Y+62,%C1" CR_TAB
3646 "std Y+63,%D1" CR_TAB
3647 "sbiw r28,%o0-60");
3649 return *l = 8, ("subi r28,lo8(-%o0)" CR_TAB
3650 "sbci r29,hi8(-%o0)" CR_TAB
3651 "st Y,%A1" CR_TAB
3652 "std Y+1,%B1" CR_TAB
3653 "std Y+2,%C1" CR_TAB
3654 "std Y+3,%D1" CR_TAB
3655 "subi r28,lo8(%o0)" CR_TAB
3656 "sbci r29,hi8(%o0)");
3658 if (reg_base == REG_X)
3660 /* (X + d) = R */
3661 if (reg_src == REG_X)
3663 *l = 9;
3664 return ("mov __tmp_reg__,r26" CR_TAB
3665 "mov __zero_reg__,r27" CR_TAB
3666 "adiw r26,%o0" CR_TAB
3667 "st X+,__tmp_reg__" CR_TAB
3668 "st X+,__zero_reg__" CR_TAB
3669 "st X+,r28" CR_TAB
3670 "st X,r29" CR_TAB
3671 "clr __zero_reg__" CR_TAB
3672 "sbiw r26,%o0+3");
3674 else if (reg_src == REG_X - 2)
3676 *l = 9;
3677 return ("mov __tmp_reg__,r26" CR_TAB
3678 "mov __zero_reg__,r27" CR_TAB
3679 "adiw r26,%o0" CR_TAB
3680 "st X+,r24" CR_TAB
3681 "st X+,r25" CR_TAB
3682 "st X+,__tmp_reg__" CR_TAB
3683 "st X,__zero_reg__" CR_TAB
3684 "clr __zero_reg__" CR_TAB
3685 "sbiw r26,%o0+3");
3687 *l = 6;
3688 return ("adiw r26,%o0" CR_TAB
3689 "st X+,%A1" CR_TAB
3690 "st X+,%B1" CR_TAB
3691 "st X+,%C1" CR_TAB
3692 "st X,%D1" CR_TAB
3693 "sbiw r26,%o0+3");
3695 return *l=4, ("std %A0,%A1" CR_TAB
3696 "std %B0,%B1" CR_TAB
3697 "std %C0,%C1" CR_TAB
3698 "std %D0,%D1");
3700 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3701 return *l=4, ("st %0,%D1" CR_TAB
3702 "st %0,%C1" CR_TAB
3703 "st %0,%B1" CR_TAB
3704 "st %0,%A1");
3705 else if (GET_CODE (base) == POST_INC) /* (R++) */
3706 return *l=4, ("st %0,%A1" CR_TAB
3707 "st %0,%B1" CR_TAB
3708 "st %0,%C1" CR_TAB
3709 "st %0,%D1");
3710 fatal_insn ("unknown move insn:",insn);
3711 return "";
3714 const char *
3715 output_movsisf (rtx insn, rtx operands[], int *l)
3717 int dummy;
3718 rtx dest = operands[0];
3719 rtx src = operands[1];
3720 int *real_l = l;
3722 if (avr_mem_flash_p (src)
3723 || avr_mem_flash_p (dest))
3725 return avr_out_lpm (insn, operands, real_l);
3728 if (!l)
3729 l = &dummy;
3731 gcc_assert (4 == GET_MODE_SIZE (GET_MODE (dest)));
3732 if (REG_P (dest))
3734 if (REG_P (src)) /* mov r,r */
3736 if (true_regnum (dest) > true_regnum (src))
3738 if (AVR_HAVE_MOVW)
3740 *l = 2;
3741 return ("movw %C0,%C1" CR_TAB
3742 "movw %A0,%A1");
3744 *l = 4;
3745 return ("mov %D0,%D1" CR_TAB
3746 "mov %C0,%C1" CR_TAB
3747 "mov %B0,%B1" CR_TAB
3748 "mov %A0,%A1");
3750 else
3752 if (AVR_HAVE_MOVW)
3754 *l = 2;
3755 return ("movw %A0,%A1" CR_TAB
3756 "movw %C0,%C1");
3758 *l = 4;
3759 return ("mov %A0,%A1" CR_TAB
3760 "mov %B0,%B1" CR_TAB
3761 "mov %C0,%C1" CR_TAB
3762 "mov %D0,%D1");
3765 else if (CONSTANT_P (src))
3767 return output_reload_insisf (operands, NULL_RTX, real_l);
3769 else if (MEM_P (src))
3770 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
3772 else if (MEM_P (dest))
3774 const char *templ;
3776 if (src == CONST0_RTX (GET_MODE (dest)))
3777 operands[1] = zero_reg_rtx;
3779 templ = out_movsi_mr_r (insn, operands, real_l);
3781 if (!real_l)
3782 output_asm_insn (templ, operands);
3784 operands[1] = src;
3785 return "";
3787 fatal_insn ("invalid insn:", insn);
3788 return "";
3792 /* Handle loads of 24-bit types from memory to register. */
3794 static const char*
3795 avr_out_load_psi (rtx insn, rtx *op, int *plen)
3797 rtx dest = op[0];
3798 rtx src = op[1];
3799 rtx base = XEXP (src, 0);
3800 int reg_dest = true_regnum (dest);
3801 int reg_base = true_regnum (base);
3803 if (reg_base > 0)
3805 if (reg_base == REG_X) /* (R26) */
3807 if (reg_dest == REG_X)
3808 /* "ld r26,-X" is undefined */
3809 return avr_asm_len ("adiw r26,2" CR_TAB
3810 "ld r28,X" CR_TAB
3811 "ld __tmp_reg__,-X" CR_TAB
3812 "sbiw r26,1" CR_TAB
3813 "ld r26,X" CR_TAB
3814 "mov r27,__tmp_reg__", op, plen, -6);
3815 else
3817 avr_asm_len ("ld %A0,X+" CR_TAB
3818 "ld %B0,X+" CR_TAB
3819 "ld %C0,X", op, plen, -3);
3821 if (reg_dest != REG_X - 2
3822 && !reg_unused_after (insn, base))
3824 avr_asm_len ("sbiw r26,2", op, plen, 1);
3827 return "";
3830 else /* reg_base != REG_X */
3832 if (reg_dest == reg_base)
3833 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3834 "ldd __tmp_reg__,%1+1" CR_TAB
3835 "ld %A0,%1" CR_TAB
3836 "mov %B0,__tmp_reg__", op, plen, -4);
3837 else
3838 return avr_asm_len ("ld %A0,%1" CR_TAB
3839 "ldd %B0,%1+1" CR_TAB
3840 "ldd %C0,%1+2", op, plen, -3);
3843 else if (GET_CODE (base) == PLUS) /* (R + i) */
3845 int disp = INTVAL (XEXP (base, 1));
3847 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3849 if (REGNO (XEXP (base, 0)) != REG_Y)
3850 fatal_insn ("incorrect insn:",insn);
3852 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3853 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3854 "ldd %A0,Y+61" CR_TAB
3855 "ldd %B0,Y+62" CR_TAB
3856 "ldd %C0,Y+63" CR_TAB
3857 "sbiw r28,%o1-61", op, plen, -5);
3859 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3860 "sbci r29,hi8(-%o1)" CR_TAB
3861 "ld %A0,Y" CR_TAB
3862 "ldd %B0,Y+1" CR_TAB
3863 "ldd %C0,Y+2" CR_TAB
3864 "subi r28,lo8(%o1)" CR_TAB
3865 "sbci r29,hi8(%o1)", op, plen, -7);
3868 reg_base = true_regnum (XEXP (base, 0));
3869 if (reg_base == REG_X)
3871 /* R = (X + d) */
3872 if (reg_dest == REG_X)
3874 /* "ld r26,-X" is undefined */
3875 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3876 "ld r28,X" CR_TAB
3877 "ld __tmp_reg__,-X" CR_TAB
3878 "sbiw r26,1" CR_TAB
3879 "ld r26,X" CR_TAB
3880 "mov r27,__tmp_reg__", op, plen, -6);
3883 avr_asm_len ("adiw r26,%o1" CR_TAB
3884 "ld %A0,X+" CR_TAB
3885 "ld %B0,X+" CR_TAB
3886 "ld %C0,X", op, plen, -4);
3888 if (reg_dest != REG_W
3889 && !reg_unused_after (insn, XEXP (base, 0)))
3890 avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
3892 return "";
3895 if (reg_dest == reg_base)
3896 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3897 "ldd __tmp_reg__,%B1" CR_TAB
3898 "ldd %A0,%A1" CR_TAB
3899 "mov %B0,__tmp_reg__", op, plen, -4);
3901 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3902 "ldd %B0,%B1" CR_TAB
3903 "ldd %C0,%C1", op, plen, -3);
3905 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3906 return avr_asm_len ("ld %C0,%1" CR_TAB
3907 "ld %B0,%1" CR_TAB
3908 "ld %A0,%1", op, plen, -3);
3909 else if (GET_CODE (base) == POST_INC) /* (R++) */
3910 return avr_asm_len ("ld %A0,%1" CR_TAB
3911 "ld %B0,%1" CR_TAB
3912 "ld %C0,%1", op, plen, -3);
3914 else if (CONSTANT_ADDRESS_P (base))
3915 return avr_asm_len ("lds %A0,%m1" CR_TAB
3916 "lds %B0,%m1+1" CR_TAB
3917 "lds %C0,%m1+2", op, plen , -6);
3919 fatal_insn ("unknown move insn:",insn);
3920 return "";
3923 /* Handle store of 24-bit type from register or zero to memory. */
3925 static const char*
3926 avr_out_store_psi (rtx insn, rtx *op, int *plen)
3928 rtx dest = op[0];
3929 rtx src = op[1];
3930 rtx base = XEXP (dest, 0);
3931 int reg_base = true_regnum (base);
3933 if (CONSTANT_ADDRESS_P (base))
3934 return avr_asm_len ("sts %m0,%A1" CR_TAB
3935 "sts %m0+1,%B1" CR_TAB
3936 "sts %m0+2,%C1", op, plen, -6);
3938 if (reg_base > 0) /* (r) */
3940 if (reg_base == REG_X) /* (R26) */
3942 gcc_assert (!reg_overlap_mentioned_p (base, src));
3944 avr_asm_len ("st %0+,%A1" CR_TAB
3945 "st %0+,%B1" CR_TAB
3946 "st %0,%C1", op, plen, -3);
3948 if (!reg_unused_after (insn, base))
3949 avr_asm_len ("sbiw r26,2", op, plen, 1);
3951 return "";
3953 else
3954 return avr_asm_len ("st %0,%A1" CR_TAB
3955 "std %0+1,%B1" CR_TAB
3956 "std %0+2,%C1", op, plen, -3);
3958 else if (GET_CODE (base) == PLUS) /* (R + i) */
3960 int disp = INTVAL (XEXP (base, 1));
3961 reg_base = REGNO (XEXP (base, 0));
3963 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3965 if (reg_base != REG_Y)
3966 fatal_insn ("incorrect insn:",insn);
3968 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3969 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3970 "std Y+61,%A1" CR_TAB
3971 "std Y+62,%B1" CR_TAB
3972 "std Y+63,%C1" CR_TAB
3973 "sbiw r28,%o0-60", op, plen, -5);
3975 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3976 "sbci r29,hi8(-%o0)" CR_TAB
3977 "st Y,%A1" CR_TAB
3978 "std Y+1,%B1" CR_TAB
3979 "std Y+2,%C1" CR_TAB
3980 "subi r28,lo8(%o0)" CR_TAB
3981 "sbci r29,hi8(%o0)", op, plen, -7);
3983 if (reg_base == REG_X)
3985 /* (X + d) = R */
3986 gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
3988 avr_asm_len ("adiw r26,%o0" CR_TAB
3989 "st X+,%A1" CR_TAB
3990 "st X+,%B1" CR_TAB
3991 "st X,%C1", op, plen, -4);
3993 if (!reg_unused_after (insn, XEXP (base, 0)))
3994 avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
3996 return "";
3999 return avr_asm_len ("std %A0,%A1" CR_TAB
4000 "std %B0,%B1" CR_TAB
4001 "std %C0,%C1", op, plen, -3);
4003 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4004 return avr_asm_len ("st %0,%C1" CR_TAB
4005 "st %0,%B1" CR_TAB
4006 "st %0,%A1", op, plen, -3);
4007 else if (GET_CODE (base) == POST_INC) /* (R++) */
4008 return avr_asm_len ("st %0,%A1" CR_TAB
4009 "st %0,%B1" CR_TAB
4010 "st %0,%C1", op, plen, -3);
4012 fatal_insn ("unknown move insn:",insn);
4013 return "";
4017 /* Move around 24-bit stuff. */
4019 const char *
4020 avr_out_movpsi (rtx insn, rtx *op, int *plen)
4022 rtx dest = op[0];
4023 rtx src = op[1];
4025 if (avr_mem_flash_p (src)
4026 || avr_mem_flash_p (dest))
4028 return avr_out_lpm (insn, op, plen);
4031 if (register_operand (dest, VOIDmode))
4033 if (register_operand (src, VOIDmode)) /* mov r,r */
4035 if (true_regnum (dest) > true_regnum (src))
4037 avr_asm_len ("mov %C0,%C1", op, plen, -1);
4039 if (AVR_HAVE_MOVW)
4040 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
4041 else
4042 return avr_asm_len ("mov %B0,%B1" CR_TAB
4043 "mov %A0,%A1", op, plen, 2);
4045 else
4047 if (AVR_HAVE_MOVW)
4048 avr_asm_len ("movw %A0,%A1", op, plen, -1);
4049 else
4050 avr_asm_len ("mov %A0,%A1" CR_TAB
4051 "mov %B0,%B1", op, plen, -2);
4053 return avr_asm_len ("mov %C0,%C1", op, plen, 1);
4056 else if (CONSTANT_P (src))
4058 return avr_out_reload_inpsi (op, NULL_RTX, plen);
4060 else if (MEM_P (src))
4061 return avr_out_load_psi (insn, op, plen); /* mov r,m */
4063 else if (MEM_P (dest))
4065 rtx xop[2];
4067 xop[0] = dest;
4068 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
4070 return avr_out_store_psi (insn, xop, plen);
4073 fatal_insn ("invalid insn:", insn);
4074 return "";
4078 static const char*
4079 out_movqi_mr_r (rtx insn, rtx op[], int *plen)
4081 rtx dest = op[0];
4082 rtx src = op[1];
4083 rtx x = XEXP (dest, 0);
4085 if (CONSTANT_ADDRESS_P (x))
4087 return optimize > 0 && io_address_operand (x, QImode)
4088 ? avr_asm_len ("out %i0,%1", op, plen, -1)
4089 : avr_asm_len ("sts %m0,%1", op, plen, -2);
4091 else if (GET_CODE (x) == PLUS
4092 && REG_P (XEXP (x, 0))
4093 && CONST_INT_P (XEXP (x, 1)))
4095 /* memory access by reg+disp */
4097 int disp = INTVAL (XEXP (x, 1));
4099 if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
4101 if (REGNO (XEXP (x, 0)) != REG_Y)
4102 fatal_insn ("incorrect insn:",insn);
4104 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
4105 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
4106 "std Y+63,%1" CR_TAB
4107 "sbiw r28,%o0-63", op, plen, -3);
4109 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4110 "sbci r29,hi8(-%o0)" CR_TAB
4111 "st Y,%1" CR_TAB
4112 "subi r28,lo8(%o0)" CR_TAB
4113 "sbci r29,hi8(%o0)", op, plen, -5);
4115 else if (REGNO (XEXP (x,0)) == REG_X)
4117 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
4119 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
4120 "adiw r26,%o0" CR_TAB
4121 "st X,__tmp_reg__", op, plen, -3);
4123 else
4125 avr_asm_len ("adiw r26,%o0" CR_TAB
4126 "st X,%1", op, plen, -2);
4129 if (!reg_unused_after (insn, XEXP (x,0)))
4130 avr_asm_len ("sbiw r26,%o0", op, plen, 1);
4132 return "";
4135 return avr_asm_len ("std %0,%1", op, plen, -1);
4138 return avr_asm_len ("st %0,%1", op, plen, -1);
4142 /* Helper for the next function for XMEGA. It does the same
4143 but with low byte first. */
4145 static const char*
4146 avr_out_movhi_mr_r_xmega (rtx insn, rtx op[], int *plen)
4148 rtx dest = op[0];
4149 rtx src = op[1];
4150 rtx base = XEXP (dest, 0);
4151 int reg_base = true_regnum (base);
4152 int reg_src = true_regnum (src);
4154 /* "volatile" forces writing low byte first, even if less efficient,
4155 for correct operation with 16-bit I/O registers like SP. */
4156 int mem_volatile_p = MEM_VOLATILE_P (dest);
4158 if (CONSTANT_ADDRESS_P (base))
4159 return optimize > 0 && io_address_operand (base, HImode)
4160 ? avr_asm_len ("out %i0,%A1" CR_TAB
4161 "out %i0+1,%B1", op, plen, -2)
4163 : avr_asm_len ("sts %m0,%A1" CR_TAB
4164 "sts %m0+1,%B1", op, plen, -4);
4166 if (reg_base > 0)
4168 if (reg_base != REG_X)
4169 return avr_asm_len ("st %0,%A1" CR_TAB
4170 "std %0+1,%B1", op, plen, -2);
4172 if (reg_src == REG_X)
4173 /* "st X+,r26" and "st -X,r26" are undefined. */
4174 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4175 "st X,r26" CR_TAB
4176 "adiw r26,1" CR_TAB
4177 "st X,__tmp_reg__", op, plen, -4);
4178 else
4179 avr_asm_len ("st X+,%A1" CR_TAB
4180 "st X,%B1", op, plen, -2);
4182 return reg_unused_after (insn, base)
4183 ? ""
4184 : avr_asm_len ("sbiw r26,1", op, plen, 1);
4186 else if (GET_CODE (base) == PLUS)
4188 int disp = INTVAL (XEXP (base, 1));
4189 reg_base = REGNO (XEXP (base, 0));
4190 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4192 if (reg_base != REG_Y)
4193 fatal_insn ("incorrect insn:",insn);
4195 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
4196 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4197 "std Y+62,%A1" CR_TAB
4198 "std Y+63,%B1" CR_TAB
4199 "sbiw r28,%o0-62", op, plen, -4)
4201 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4202 "sbci r29,hi8(-%o0)" CR_TAB
4203 "st Y,%A1" CR_TAB
4204 "std Y+1,%B1" CR_TAB
4205 "subi r28,lo8(%o0)" CR_TAB
4206 "sbci r29,hi8(%o0)", op, plen, -6);
4209 if (reg_base != REG_X)
4210 return avr_asm_len ("std %A0,%A1" CR_TAB
4211 "std %B0,%B1", op, plen, -2);
4212 /* (X + d) = R */
4213 return reg_src == REG_X
4214 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4215 "mov __zero_reg__,r27" CR_TAB
4216 "adiw r26,%o0" CR_TAB
4217 "st X+,__tmp_reg__" CR_TAB
4218 "st X,__zero_reg__" CR_TAB
4219 "clr __zero_reg__" CR_TAB
4220 "sbiw r26,%o0+1", op, plen, -7)
4222 : avr_asm_len ("adiw r26,%o0" CR_TAB
4223 "st X+,%A1" CR_TAB
4224 "st X,%B1" CR_TAB
4225 "sbiw r26,%o0+1", op, plen, -4);
4227 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4229 if (!mem_volatile_p)
4230 return avr_asm_len ("st %0,%B1" CR_TAB
4231 "st %0,%A1", op, plen, -2);
4233 return REGNO (XEXP (base, 0)) == REG_X
4234 ? avr_asm_len ("sbiw r26,2" CR_TAB
4235 "st X+,%A1" CR_TAB
4236 "st X,%B1" CR_TAB
4237 "sbiw r26,1", op, plen, -4)
4239 : avr_asm_len ("sbiw %r0,2" CR_TAB
4240 "st %p0,%A1" CR_TAB
4241 "std %p0+1,%B1", op, plen, -3);
4243 else if (GET_CODE (base) == POST_INC) /* (R++) */
4245 return avr_asm_len ("st %0,%A1" CR_TAB
4246 "st %0,%B1", op, plen, -2);
4249 fatal_insn ("unknown move insn:",insn);
4250 return "";
4254 static const char*
4255 out_movhi_mr_r (rtx insn, rtx op[], int *plen)
4257 rtx dest = op[0];
4258 rtx src = op[1];
4259 rtx base = XEXP (dest, 0);
4260 int reg_base = true_regnum (base);
4261 int reg_src = true_regnum (src);
4262 int mem_volatile_p;
4264 /* "volatile" forces writing high-byte first (no-xmega) resp.
4265 low-byte first (xmega) even if less efficient, for correct
4266 operation with 16-bit I/O registers like. */
4268 if (AVR_XMEGA)
4269 return avr_out_movhi_mr_r_xmega (insn, op, plen);
4271 mem_volatile_p = MEM_VOLATILE_P (dest);
4273 if (CONSTANT_ADDRESS_P (base))
4274 return optimize > 0 && io_address_operand (base, HImode)
4275 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
4276 "out %i0,%A1", op, plen, -2)
4278 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
4279 "sts %m0,%A1", op, plen, -4);
4281 if (reg_base > 0)
4283 if (reg_base != REG_X)
4284 return avr_asm_len ("std %0+1,%B1" CR_TAB
4285 "st %0,%A1", op, plen, -2);
4287 if (reg_src == REG_X)
4288 /* "st X+,r26" and "st -X,r26" are undefined. */
4289 return !mem_volatile_p && reg_unused_after (insn, src)
4290 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4291 "st X,r26" CR_TAB
4292 "adiw r26,1" CR_TAB
4293 "st X,__tmp_reg__", op, plen, -4)
4295 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4296 "adiw r26,1" CR_TAB
4297 "st X,__tmp_reg__" CR_TAB
4298 "sbiw r26,1" CR_TAB
4299 "st X,r26", op, plen, -5);
4301 return !mem_volatile_p && reg_unused_after (insn, base)
4302 ? avr_asm_len ("st X+,%A1" CR_TAB
4303 "st X,%B1", op, plen, -2)
4304 : avr_asm_len ("adiw r26,1" CR_TAB
4305 "st X,%B1" CR_TAB
4306 "st -X,%A1", op, plen, -3);
4308 else if (GET_CODE (base) == PLUS)
4310 int disp = INTVAL (XEXP (base, 1));
4311 reg_base = REGNO (XEXP (base, 0));
4312 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4314 if (reg_base != REG_Y)
4315 fatal_insn ("incorrect insn:",insn);
4317 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
4318 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4319 "std Y+63,%B1" CR_TAB
4320 "std Y+62,%A1" CR_TAB
4321 "sbiw r28,%o0-62", op, plen, -4)
4323 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4324 "sbci r29,hi8(-%o0)" CR_TAB
4325 "std Y+1,%B1" CR_TAB
4326 "st Y,%A1" CR_TAB
4327 "subi r28,lo8(%o0)" CR_TAB
4328 "sbci r29,hi8(%o0)", op, plen, -6);
4331 if (reg_base != REG_X)
4332 return avr_asm_len ("std %B0,%B1" CR_TAB
4333 "std %A0,%A1", op, plen, -2);
4334 /* (X + d) = R */
4335 return reg_src == REG_X
4336 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4337 "mov __zero_reg__,r27" CR_TAB
4338 "adiw r26,%o0+1" CR_TAB
4339 "st X,__zero_reg__" CR_TAB
4340 "st -X,__tmp_reg__" CR_TAB
4341 "clr __zero_reg__" CR_TAB
4342 "sbiw r26,%o0", op, plen, -7)
4344 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
4345 "st X,%B1" CR_TAB
4346 "st -X,%A1" CR_TAB
4347 "sbiw r26,%o0", op, plen, -4);
4349 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4351 return avr_asm_len ("st %0,%B1" CR_TAB
4352 "st %0,%A1", op, plen, -2);
4354 else if (GET_CODE (base) == POST_INC) /* (R++) */
4356 if (!mem_volatile_p)
4357 return avr_asm_len ("st %0,%A1" CR_TAB
4358 "st %0,%B1", op, plen, -2);
4360 return REGNO (XEXP (base, 0)) == REG_X
4361 ? avr_asm_len ("adiw r26,1" CR_TAB
4362 "st X,%B1" CR_TAB
4363 "st -X,%A1" CR_TAB
4364 "adiw r26,2", op, plen, -4)
4366 : avr_asm_len ("std %p0+1,%B1" CR_TAB
4367 "st %p0,%A1" CR_TAB
4368 "adiw %r0,2", op, plen, -3);
4370 fatal_insn ("unknown move insn:",insn);
4371 return "";
4374 /* Return 1 if frame pointer for current function required. */
4376 static bool
4377 avr_frame_pointer_required_p (void)
4379 return (cfun->calls_alloca
4380 || cfun->calls_setjmp
4381 || cfun->has_nonlocal_label
4382 || crtl->args.info.nregs == 0
4383 || get_frame_size () > 0);
4386 /* Returns the condition of compare insn INSN, or UNKNOWN. */
4388 static RTX_CODE
4389 compare_condition (rtx insn)
4391 rtx next = next_real_insn (insn);
4393 if (next && JUMP_P (next))
4395 rtx pat = PATTERN (next);
4396 rtx src = SET_SRC (pat);
4398 if (IF_THEN_ELSE == GET_CODE (src))
4399 return GET_CODE (XEXP (src, 0));
4402 return UNKNOWN;
4406 /* Returns true iff INSN is a tst insn that only tests the sign. */
4408 static bool
4409 compare_sign_p (rtx insn)
4411 RTX_CODE cond = compare_condition (insn);
4412 return (cond == GE || cond == LT);
4416 /* Returns true iff the next insn is a JUMP_INSN with a condition
4417 that needs to be swapped (GT, GTU, LE, LEU). */
4419 static bool
4420 compare_diff_p (rtx insn)
4422 RTX_CODE cond = compare_condition (insn);
4423 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
4426 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4428 static bool
4429 compare_eq_p (rtx insn)
4431 RTX_CODE cond = compare_condition (insn);
4432 return (cond == EQ || cond == NE);
4436 /* Output compare instruction
4438 compare (XOP[0], XOP[1])
4440 for a register XOP[0] and a compile-time constant XOP[1]. Return "".
4441 XOP[2] is an 8-bit scratch register as needed.
4443 PLEN == NULL: Output instructions.
4444 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4445 Don't output anything. */
4447 const char*
4448 avr_out_compare (rtx insn, rtx *xop, int *plen)
4450 /* Register to compare and value to compare against. */
4451 rtx xreg = xop[0];
4452 rtx xval = xop[1];
4454 /* MODE of the comparison. */
4455 enum machine_mode mode;
4457 /* Number of bytes to operate on. */
4458 int i, n_bytes = GET_MODE_SIZE (GET_MODE (xreg));
4460 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4461 int clobber_val = -1;
4463 /* Map fixed mode operands to integer operands with the same binary
4464 representation. They are easier to handle in the remainder. */
4466 if (CONST_FIXED_P (xval))
4468 xreg = avr_to_int_mode (xop[0]);
4469 xval = avr_to_int_mode (xop[1]);
4472 mode = GET_MODE (xreg);
4474 gcc_assert (REG_P (xreg));
4475 gcc_assert ((CONST_INT_P (xval) && n_bytes <= 4)
4476 || (const_double_operand (xval, VOIDmode) && n_bytes == 8));
4478 if (plen)
4479 *plen = 0;
4481 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4482 against 0 by ORing the bytes. This is one instruction shorter.
4483 Notice that 64-bit comparisons are always against reg:ALL8 18 (ACC_A)
4484 and therefore don't use this. */
4486 if (!test_hard_reg_class (LD_REGS, xreg)
4487 && compare_eq_p (insn)
4488 && reg_unused_after (insn, xreg))
4490 if (xval == const1_rtx)
4492 avr_asm_len ("dec %A0" CR_TAB
4493 "or %A0,%B0", xop, plen, 2);
4495 if (n_bytes >= 3)
4496 avr_asm_len ("or %A0,%C0", xop, plen, 1);
4498 if (n_bytes >= 4)
4499 avr_asm_len ("or %A0,%D0", xop, plen, 1);
4501 return "";
4503 else if (xval == constm1_rtx)
4505 if (n_bytes >= 4)
4506 avr_asm_len ("and %A0,%D0", xop, plen, 1);
4508 if (n_bytes >= 3)
4509 avr_asm_len ("and %A0,%C0", xop, plen, 1);
4511 return avr_asm_len ("and %A0,%B0" CR_TAB
4512 "com %A0", xop, plen, 2);
4516 for (i = 0; i < n_bytes; i++)
4518 /* We compare byte-wise. */
4519 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
4520 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4522 /* 8-bit value to compare with this byte. */
4523 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4525 /* Registers R16..R31 can operate with immediate. */
4526 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4528 xop[0] = reg8;
4529 xop[1] = gen_int_mode (val8, QImode);
4531 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4533 if (i == 0
4534 && test_hard_reg_class (ADDW_REGS, reg8))
4536 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
4538 if (IN_RANGE (val16, 0, 63)
4539 && (val8 == 0
4540 || reg_unused_after (insn, xreg)))
4542 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
4543 i++;
4544 continue;
4547 if (n_bytes == 2
4548 && IN_RANGE (val16, -63, -1)
4549 && compare_eq_p (insn)
4550 && reg_unused_after (insn, xreg))
4552 return avr_asm_len ("adiw %0,%n1", xop, plen, 1);
4556 /* Comparing against 0 is easy. */
4558 if (val8 == 0)
4560 avr_asm_len (i == 0
4561 ? "cp %0,__zero_reg__"
4562 : "cpc %0,__zero_reg__", xop, plen, 1);
4563 continue;
4566 /* Upper registers can compare and subtract-with-carry immediates.
4567 Notice that compare instructions do the same as respective subtract
4568 instruction; the only difference is that comparisons don't write
4569 the result back to the target register. */
4571 if (ld_reg_p)
4573 if (i == 0)
4575 avr_asm_len ("cpi %0,%1", xop, plen, 1);
4576 continue;
4578 else if (reg_unused_after (insn, xreg))
4580 avr_asm_len ("sbci %0,%1", xop, plen, 1);
4581 continue;
4585 /* Must load the value into the scratch register. */
4587 gcc_assert (REG_P (xop[2]));
4589 if (clobber_val != (int) val8)
4590 avr_asm_len ("ldi %2,%1", xop, plen, 1);
4591 clobber_val = (int) val8;
4593 avr_asm_len (i == 0
4594 ? "cp %0,%2"
4595 : "cpc %0,%2", xop, plen, 1);
4598 return "";
4602 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4604 const char*
4605 avr_out_compare64 (rtx insn, rtx *op, int *plen)
4607 rtx xop[3];
4609 xop[0] = gen_rtx_REG (DImode, 18);
4610 xop[1] = op[0];
4611 xop[2] = op[1];
4613 return avr_out_compare (insn, xop, plen);
4616 /* Output test instruction for HImode. */
4618 const char*
4619 avr_out_tsthi (rtx insn, rtx *op, int *plen)
4621 if (compare_sign_p (insn))
4623 avr_asm_len ("tst %B0", op, plen, -1);
4625 else if (reg_unused_after (insn, op[0])
4626 && compare_eq_p (insn))
4628 /* Faster than sbiw if we can clobber the operand. */
4629 avr_asm_len ("or %A0,%B0", op, plen, -1);
4631 else
4633 avr_out_compare (insn, op, plen);
4636 return "";
4640 /* Output test instruction for PSImode. */
4642 const char*
4643 avr_out_tstpsi (rtx insn, rtx *op, int *plen)
4645 if (compare_sign_p (insn))
4647 avr_asm_len ("tst %C0", op, plen, -1);
4649 else if (reg_unused_after (insn, op[0])
4650 && compare_eq_p (insn))
4652 /* Faster than sbiw if we can clobber the operand. */
4653 avr_asm_len ("or %A0,%B0" CR_TAB
4654 "or %A0,%C0", op, plen, -2);
4656 else
4658 avr_out_compare (insn, op, plen);
4661 return "";
4665 /* Output test instruction for SImode. */
4667 const char*
4668 avr_out_tstsi (rtx insn, rtx *op, int *plen)
4670 if (compare_sign_p (insn))
4672 avr_asm_len ("tst %D0", op, plen, -1);
4674 else if (reg_unused_after (insn, op[0])
4675 && compare_eq_p (insn))
4677 /* Faster than sbiw if we can clobber the operand. */
4678 avr_asm_len ("or %A0,%B0" CR_TAB
4679 "or %A0,%C0" CR_TAB
4680 "or %A0,%D0", op, plen, -3);
4682 else
4684 avr_out_compare (insn, op, plen);
4687 return "";
4691 /* Generate asm equivalent for various shifts. This only handles cases
4692 that are not already carefully hand-optimized in ?sh??i3_out.
4694 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4695 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4696 OPERANDS[3] is a QImode scratch register from LD regs if
4697 available and SCRATCH, otherwise (no scratch available)
4699 TEMPL is an assembler template that shifts by one position.
4700 T_LEN is the length of this template. */
4702 void
4703 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
4704 int *plen, int t_len)
4706 bool second_label = true;
4707 bool saved_in_tmp = false;
4708 bool use_zero_reg = false;
4709 rtx op[5];
4711 op[0] = operands[0];
4712 op[1] = operands[1];
4713 op[2] = operands[2];
4714 op[3] = operands[3];
4716 if (plen)
4717 *plen = 0;
4719 if (CONST_INT_P (operands[2]))
4721 bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
4722 && REG_P (operands[3]));
4723 int count = INTVAL (operands[2]);
4724 int max_len = 10; /* If larger than this, always use a loop. */
4726 if (count <= 0)
4727 return;
4729 if (count < 8 && !scratch)
4730 use_zero_reg = true;
4732 if (optimize_size)
4733 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
4735 if (t_len * count <= max_len)
4737 /* Output shifts inline with no loop - faster. */
4739 while (count-- > 0)
4740 avr_asm_len (templ, op, plen, t_len);
4742 return;
4745 if (scratch)
4747 avr_asm_len ("ldi %3,%2", op, plen, 1);
4749 else if (use_zero_reg)
4751 /* Hack to save one word: use __zero_reg__ as loop counter.
4752 Set one bit, then shift in a loop until it is 0 again. */
4754 op[3] = zero_reg_rtx;
4756 avr_asm_len ("set" CR_TAB
4757 "bld %3,%2-1", op, plen, 2);
4759 else
4761 /* No scratch register available, use one from LD_REGS (saved in
4762 __tmp_reg__) that doesn't overlap with registers to shift. */
4764 op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
4765 op[4] = tmp_reg_rtx;
4766 saved_in_tmp = true;
4768 avr_asm_len ("mov %4,%3" CR_TAB
4769 "ldi %3,%2", op, plen, 2);
4772 second_label = false;
4774 else if (MEM_P (op[2]))
4776 rtx op_mov[2];
4778 op_mov[0] = op[3] = tmp_reg_rtx;
4779 op_mov[1] = op[2];
4781 out_movqi_r_mr (insn, op_mov, plen);
4783 else if (register_operand (op[2], QImode))
4785 op[3] = op[2];
4787 if (!reg_unused_after (insn, op[2])
4788 || reg_overlap_mentioned_p (op[0], op[2]))
4790 op[3] = tmp_reg_rtx;
4791 avr_asm_len ("mov %3,%2", op, plen, 1);
4794 else
4795 fatal_insn ("bad shift insn:", insn);
4797 if (second_label)
4798 avr_asm_len ("rjmp 2f", op, plen, 1);
4800 avr_asm_len ("1:", op, plen, 0);
4801 avr_asm_len (templ, op, plen, t_len);
4803 if (second_label)
4804 avr_asm_len ("2:", op, plen, 0);
4806 avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
4807 avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
4809 if (saved_in_tmp)
4810 avr_asm_len ("mov %3,%4", op, plen, 1);
4814 /* 8bit shift left ((char)x << i) */
4816 const char *
4817 ashlqi3_out (rtx insn, rtx operands[], int *len)
4819 if (GET_CODE (operands[2]) == CONST_INT)
4821 int k;
4823 if (!len)
4824 len = &k;
4826 switch (INTVAL (operands[2]))
4828 default:
4829 if (INTVAL (operands[2]) < 8)
4830 break;
4832 *len = 1;
4833 return "clr %0";
4835 case 1:
4836 *len = 1;
4837 return "lsl %0";
4839 case 2:
4840 *len = 2;
4841 return ("lsl %0" CR_TAB
4842 "lsl %0");
4844 case 3:
4845 *len = 3;
4846 return ("lsl %0" CR_TAB
4847 "lsl %0" CR_TAB
4848 "lsl %0");
4850 case 4:
4851 if (test_hard_reg_class (LD_REGS, operands[0]))
4853 *len = 2;
4854 return ("swap %0" CR_TAB
4855 "andi %0,0xf0");
4857 *len = 4;
4858 return ("lsl %0" CR_TAB
4859 "lsl %0" CR_TAB
4860 "lsl %0" CR_TAB
4861 "lsl %0");
4863 case 5:
4864 if (test_hard_reg_class (LD_REGS, operands[0]))
4866 *len = 3;
4867 return ("swap %0" CR_TAB
4868 "lsl %0" CR_TAB
4869 "andi %0,0xe0");
4871 *len = 5;
4872 return ("lsl %0" CR_TAB
4873 "lsl %0" CR_TAB
4874 "lsl %0" CR_TAB
4875 "lsl %0" CR_TAB
4876 "lsl %0");
4878 case 6:
4879 if (test_hard_reg_class (LD_REGS, operands[0]))
4881 *len = 4;
4882 return ("swap %0" CR_TAB
4883 "lsl %0" CR_TAB
4884 "lsl %0" CR_TAB
4885 "andi %0,0xc0");
4887 *len = 6;
4888 return ("lsl %0" CR_TAB
4889 "lsl %0" CR_TAB
4890 "lsl %0" CR_TAB
4891 "lsl %0" CR_TAB
4892 "lsl %0" CR_TAB
4893 "lsl %0");
4895 case 7:
4896 *len = 3;
4897 return ("ror %0" CR_TAB
4898 "clr %0" CR_TAB
4899 "ror %0");
4902 else if (CONSTANT_P (operands[2]))
4903 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4905 out_shift_with_cnt ("lsl %0",
4906 insn, operands, len, 1);
4907 return "";
4911 /* 16bit shift left ((short)x << i) */
4913 const char *
4914 ashlhi3_out (rtx insn, rtx operands[], int *len)
4916 if (GET_CODE (operands[2]) == CONST_INT)
4918 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4919 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4920 int k;
4921 int *t = len;
4923 if (!len)
4924 len = &k;
4926 switch (INTVAL (operands[2]))
4928 default:
4929 if (INTVAL (operands[2]) < 16)
4930 break;
4932 *len = 2;
4933 return ("clr %B0" CR_TAB
4934 "clr %A0");
4936 case 4:
4937 if (optimize_size && scratch)
4938 break; /* 5 */
4939 if (ldi_ok)
4941 *len = 6;
4942 return ("swap %A0" CR_TAB
4943 "swap %B0" CR_TAB
4944 "andi %B0,0xf0" CR_TAB
4945 "eor %B0,%A0" CR_TAB
4946 "andi %A0,0xf0" CR_TAB
4947 "eor %B0,%A0");
4949 if (scratch)
4951 *len = 7;
4952 return ("swap %A0" CR_TAB
4953 "swap %B0" CR_TAB
4954 "ldi %3,0xf0" CR_TAB
4955 "and %B0,%3" CR_TAB
4956 "eor %B0,%A0" CR_TAB
4957 "and %A0,%3" CR_TAB
4958 "eor %B0,%A0");
4960 break; /* optimize_size ? 6 : 8 */
4962 case 5:
4963 if (optimize_size)
4964 break; /* scratch ? 5 : 6 */
4965 if (ldi_ok)
4967 *len = 8;
4968 return ("lsl %A0" CR_TAB
4969 "rol %B0" CR_TAB
4970 "swap %A0" CR_TAB
4971 "swap %B0" CR_TAB
4972 "andi %B0,0xf0" CR_TAB
4973 "eor %B0,%A0" CR_TAB
4974 "andi %A0,0xf0" CR_TAB
4975 "eor %B0,%A0");
4977 if (scratch)
4979 *len = 9;
4980 return ("lsl %A0" CR_TAB
4981 "rol %B0" CR_TAB
4982 "swap %A0" CR_TAB
4983 "swap %B0" CR_TAB
4984 "ldi %3,0xf0" CR_TAB
4985 "and %B0,%3" CR_TAB
4986 "eor %B0,%A0" CR_TAB
4987 "and %A0,%3" CR_TAB
4988 "eor %B0,%A0");
4990 break; /* 10 */
4992 case 6:
4993 if (optimize_size)
4994 break; /* scratch ? 5 : 6 */
4995 *len = 9;
4996 return ("clr __tmp_reg__" CR_TAB
4997 "lsr %B0" CR_TAB
4998 "ror %A0" CR_TAB
4999 "ror __tmp_reg__" CR_TAB
5000 "lsr %B0" CR_TAB
5001 "ror %A0" CR_TAB
5002 "ror __tmp_reg__" CR_TAB
5003 "mov %B0,%A0" CR_TAB
5004 "mov %A0,__tmp_reg__");
5006 case 7:
5007 *len = 5;
5008 return ("lsr %B0" CR_TAB
5009 "mov %B0,%A0" CR_TAB
5010 "clr %A0" CR_TAB
5011 "ror %B0" CR_TAB
5012 "ror %A0");
5014 case 8:
5015 return *len = 2, ("mov %B0,%A1" CR_TAB
5016 "clr %A0");
5018 case 9:
5019 *len = 3;
5020 return ("mov %B0,%A0" CR_TAB
5021 "clr %A0" CR_TAB
5022 "lsl %B0");
5024 case 10:
5025 *len = 4;
5026 return ("mov %B0,%A0" CR_TAB
5027 "clr %A0" CR_TAB
5028 "lsl %B0" CR_TAB
5029 "lsl %B0");
5031 case 11:
5032 *len = 5;
5033 return ("mov %B0,%A0" CR_TAB
5034 "clr %A0" CR_TAB
5035 "lsl %B0" CR_TAB
5036 "lsl %B0" CR_TAB
5037 "lsl %B0");
5039 case 12:
5040 if (ldi_ok)
5042 *len = 4;
5043 return ("mov %B0,%A0" CR_TAB
5044 "clr %A0" CR_TAB
5045 "swap %B0" CR_TAB
5046 "andi %B0,0xf0");
5048 if (scratch)
5050 *len = 5;
5051 return ("mov %B0,%A0" CR_TAB
5052 "clr %A0" CR_TAB
5053 "swap %B0" CR_TAB
5054 "ldi %3,0xf0" CR_TAB
5055 "and %B0,%3");
5057 *len = 6;
5058 return ("mov %B0,%A0" CR_TAB
5059 "clr %A0" CR_TAB
5060 "lsl %B0" CR_TAB
5061 "lsl %B0" CR_TAB
5062 "lsl %B0" CR_TAB
5063 "lsl %B0");
5065 case 13:
5066 if (ldi_ok)
5068 *len = 5;
5069 return ("mov %B0,%A0" CR_TAB
5070 "clr %A0" CR_TAB
5071 "swap %B0" CR_TAB
5072 "lsl %B0" CR_TAB
5073 "andi %B0,0xe0");
5075 if (AVR_HAVE_MUL && scratch)
5077 *len = 5;
5078 return ("ldi %3,0x20" CR_TAB
5079 "mul %A0,%3" CR_TAB
5080 "mov %B0,r0" CR_TAB
5081 "clr %A0" CR_TAB
5082 "clr __zero_reg__");
5084 if (optimize_size && scratch)
5085 break; /* 5 */
5086 if (scratch)
5088 *len = 6;
5089 return ("mov %B0,%A0" CR_TAB
5090 "clr %A0" CR_TAB
5091 "swap %B0" CR_TAB
5092 "lsl %B0" CR_TAB
5093 "ldi %3,0xe0" CR_TAB
5094 "and %B0,%3");
5096 if (AVR_HAVE_MUL)
5098 *len = 6;
5099 return ("set" CR_TAB
5100 "bld r1,5" CR_TAB
5101 "mul %A0,r1" CR_TAB
5102 "mov %B0,r0" CR_TAB
5103 "clr %A0" CR_TAB
5104 "clr __zero_reg__");
5106 *len = 7;
5107 return ("mov %B0,%A0" CR_TAB
5108 "clr %A0" CR_TAB
5109 "lsl %B0" CR_TAB
5110 "lsl %B0" CR_TAB
5111 "lsl %B0" CR_TAB
5112 "lsl %B0" CR_TAB
5113 "lsl %B0");
5115 case 14:
5116 if (AVR_HAVE_MUL && ldi_ok)
5118 *len = 5;
5119 return ("ldi %B0,0x40" CR_TAB
5120 "mul %A0,%B0" CR_TAB
5121 "mov %B0,r0" CR_TAB
5122 "clr %A0" CR_TAB
5123 "clr __zero_reg__");
5125 if (AVR_HAVE_MUL && scratch)
5127 *len = 5;
5128 return ("ldi %3,0x40" CR_TAB
5129 "mul %A0,%3" CR_TAB
5130 "mov %B0,r0" CR_TAB
5131 "clr %A0" CR_TAB
5132 "clr __zero_reg__");
5134 if (optimize_size && ldi_ok)
5136 *len = 5;
5137 return ("mov %B0,%A0" CR_TAB
5138 "ldi %A0,6" "\n1:\t"
5139 "lsl %B0" CR_TAB
5140 "dec %A0" CR_TAB
5141 "brne 1b");
5143 if (optimize_size && scratch)
5144 break; /* 5 */
5145 *len = 6;
5146 return ("clr %B0" CR_TAB
5147 "lsr %A0" CR_TAB
5148 "ror %B0" CR_TAB
5149 "lsr %A0" CR_TAB
5150 "ror %B0" CR_TAB
5151 "clr %A0");
5153 case 15:
5154 *len = 4;
5155 return ("clr %B0" CR_TAB
5156 "lsr %A0" CR_TAB
5157 "ror %B0" CR_TAB
5158 "clr %A0");
5160 len = t;
5162 out_shift_with_cnt ("lsl %A0" CR_TAB
5163 "rol %B0", insn, operands, len, 2);
5164 return "";
5168 /* 24-bit shift left */
5170 const char*
5171 avr_out_ashlpsi3 (rtx insn, rtx *op, int *plen)
5173 if (plen)
5174 *plen = 0;
5176 if (CONST_INT_P (op[2]))
5178 switch (INTVAL (op[2]))
5180 default:
5181 if (INTVAL (op[2]) < 24)
5182 break;
5184 return avr_asm_len ("clr %A0" CR_TAB
5185 "clr %B0" CR_TAB
5186 "clr %C0", op, plen, 3);
5188 case 8:
5190 int reg0 = REGNO (op[0]);
5191 int reg1 = REGNO (op[1]);
5193 if (reg0 >= reg1)
5194 return avr_asm_len ("mov %C0,%B1" CR_TAB
5195 "mov %B0,%A1" CR_TAB
5196 "clr %A0", op, plen, 3);
5197 else
5198 return avr_asm_len ("clr %A0" CR_TAB
5199 "mov %B0,%A1" CR_TAB
5200 "mov %C0,%B1", op, plen, 3);
5203 case 16:
5205 int reg0 = REGNO (op[0]);
5206 int reg1 = REGNO (op[1]);
5208 if (reg0 + 2 != reg1)
5209 avr_asm_len ("mov %C0,%A0", op, plen, 1);
5211 return avr_asm_len ("clr %B0" CR_TAB
5212 "clr %A0", op, plen, 2);
5215 case 23:
5216 return avr_asm_len ("clr %C0" CR_TAB
5217 "lsr %A0" CR_TAB
5218 "ror %C0" CR_TAB
5219 "clr %B0" CR_TAB
5220 "clr %A0", op, plen, 5);
5224 out_shift_with_cnt ("lsl %A0" CR_TAB
5225 "rol %B0" CR_TAB
5226 "rol %C0", insn, op, plen, 3);
5227 return "";
5231 /* 32bit shift left ((long)x << i) */
5233 const char *
5234 ashlsi3_out (rtx insn, rtx operands[], int *len)
5236 if (GET_CODE (operands[2]) == CONST_INT)
5238 int k;
5239 int *t = len;
5241 if (!len)
5242 len = &k;
5244 switch (INTVAL (operands[2]))
5246 default:
5247 if (INTVAL (operands[2]) < 32)
5248 break;
5250 if (AVR_HAVE_MOVW)
5251 return *len = 3, ("clr %D0" CR_TAB
5252 "clr %C0" CR_TAB
5253 "movw %A0,%C0");
5254 *len = 4;
5255 return ("clr %D0" CR_TAB
5256 "clr %C0" CR_TAB
5257 "clr %B0" CR_TAB
5258 "clr %A0");
5260 case 8:
5262 int reg0 = true_regnum (operands[0]);
5263 int reg1 = true_regnum (operands[1]);
5264 *len = 4;
5265 if (reg0 >= reg1)
5266 return ("mov %D0,%C1" CR_TAB
5267 "mov %C0,%B1" CR_TAB
5268 "mov %B0,%A1" CR_TAB
5269 "clr %A0");
5270 else
5271 return ("clr %A0" CR_TAB
5272 "mov %B0,%A1" CR_TAB
5273 "mov %C0,%B1" CR_TAB
5274 "mov %D0,%C1");
5277 case 16:
5279 int reg0 = true_regnum (operands[0]);
5280 int reg1 = true_regnum (operands[1]);
5281 if (reg0 + 2 == reg1)
5282 return *len = 2, ("clr %B0" CR_TAB
5283 "clr %A0");
5284 if (AVR_HAVE_MOVW)
5285 return *len = 3, ("movw %C0,%A1" CR_TAB
5286 "clr %B0" CR_TAB
5287 "clr %A0");
5288 else
5289 return *len = 4, ("mov %C0,%A1" CR_TAB
5290 "mov %D0,%B1" CR_TAB
5291 "clr %B0" CR_TAB
5292 "clr %A0");
5295 case 24:
5296 *len = 4;
5297 return ("mov %D0,%A1" CR_TAB
5298 "clr %C0" CR_TAB
5299 "clr %B0" CR_TAB
5300 "clr %A0");
5302 case 31:
5303 *len = 6;
5304 return ("clr %D0" CR_TAB
5305 "lsr %A0" CR_TAB
5306 "ror %D0" CR_TAB
5307 "clr %C0" CR_TAB
5308 "clr %B0" CR_TAB
5309 "clr %A0");
5311 len = t;
5313 out_shift_with_cnt ("lsl %A0" CR_TAB
5314 "rol %B0" CR_TAB
5315 "rol %C0" CR_TAB
5316 "rol %D0", insn, operands, len, 4);
5317 return "";
5320 /* 8bit arithmetic shift right ((signed char)x >> i) */
5322 const char *
5323 ashrqi3_out (rtx insn, rtx operands[], int *len)
5325 if (GET_CODE (operands[2]) == CONST_INT)
5327 int k;
5329 if (!len)
5330 len = &k;
5332 switch (INTVAL (operands[2]))
5334 case 1:
5335 *len = 1;
5336 return "asr %0";
5338 case 2:
5339 *len = 2;
5340 return ("asr %0" CR_TAB
5341 "asr %0");
5343 case 3:
5344 *len = 3;
5345 return ("asr %0" CR_TAB
5346 "asr %0" CR_TAB
5347 "asr %0");
5349 case 4:
5350 *len = 4;
5351 return ("asr %0" CR_TAB
5352 "asr %0" CR_TAB
5353 "asr %0" CR_TAB
5354 "asr %0");
5356 case 5:
5357 *len = 5;
5358 return ("asr %0" CR_TAB
5359 "asr %0" CR_TAB
5360 "asr %0" CR_TAB
5361 "asr %0" CR_TAB
5362 "asr %0");
5364 case 6:
5365 *len = 4;
5366 return ("bst %0,6" CR_TAB
5367 "lsl %0" CR_TAB
5368 "sbc %0,%0" CR_TAB
5369 "bld %0,0");
5371 default:
5372 if (INTVAL (operands[2]) < 8)
5373 break;
5375 /* fall through */
5377 case 7:
5378 *len = 2;
5379 return ("lsl %0" CR_TAB
5380 "sbc %0,%0");
5383 else if (CONSTANT_P (operands[2]))
5384 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5386 out_shift_with_cnt ("asr %0",
5387 insn, operands, len, 1);
5388 return "";
5392 /* 16bit arithmetic shift right ((signed short)x >> i) */
5394 const char *
5395 ashrhi3_out (rtx insn, rtx operands[], int *len)
5397 if (GET_CODE (operands[2]) == CONST_INT)
5399 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5400 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5401 int k;
5402 int *t = len;
5404 if (!len)
5405 len = &k;
5407 switch (INTVAL (operands[2]))
5409 case 4:
5410 case 5:
5411 /* XXX try to optimize this too? */
5412 break;
5414 case 6:
5415 if (optimize_size)
5416 break; /* scratch ? 5 : 6 */
5417 *len = 8;
5418 return ("mov __tmp_reg__,%A0" CR_TAB
5419 "mov %A0,%B0" CR_TAB
5420 "lsl __tmp_reg__" CR_TAB
5421 "rol %A0" CR_TAB
5422 "sbc %B0,%B0" CR_TAB
5423 "lsl __tmp_reg__" CR_TAB
5424 "rol %A0" CR_TAB
5425 "rol %B0");
5427 case 7:
5428 *len = 4;
5429 return ("lsl %A0" CR_TAB
5430 "mov %A0,%B0" CR_TAB
5431 "rol %A0" CR_TAB
5432 "sbc %B0,%B0");
5434 case 8:
5436 int reg0 = true_regnum (operands[0]);
5437 int reg1 = true_regnum (operands[1]);
5439 if (reg0 == reg1)
5440 return *len = 3, ("mov %A0,%B0" CR_TAB
5441 "lsl %B0" CR_TAB
5442 "sbc %B0,%B0");
5443 else
5444 return *len = 4, ("mov %A0,%B1" CR_TAB
5445 "clr %B0" CR_TAB
5446 "sbrc %A0,7" CR_TAB
5447 "dec %B0");
5450 case 9:
5451 *len = 4;
5452 return ("mov %A0,%B0" CR_TAB
5453 "lsl %B0" CR_TAB
5454 "sbc %B0,%B0" CR_TAB
5455 "asr %A0");
5457 case 10:
5458 *len = 5;
5459 return ("mov %A0,%B0" CR_TAB
5460 "lsl %B0" CR_TAB
5461 "sbc %B0,%B0" CR_TAB
5462 "asr %A0" CR_TAB
5463 "asr %A0");
5465 case 11:
5466 if (AVR_HAVE_MUL && ldi_ok)
5468 *len = 5;
5469 return ("ldi %A0,0x20" CR_TAB
5470 "muls %B0,%A0" CR_TAB
5471 "mov %A0,r1" CR_TAB
5472 "sbc %B0,%B0" CR_TAB
5473 "clr __zero_reg__");
5475 if (optimize_size && scratch)
5476 break; /* 5 */
5477 *len = 6;
5478 return ("mov %A0,%B0" CR_TAB
5479 "lsl %B0" CR_TAB
5480 "sbc %B0,%B0" CR_TAB
5481 "asr %A0" CR_TAB
5482 "asr %A0" CR_TAB
5483 "asr %A0");
5485 case 12:
5486 if (AVR_HAVE_MUL && ldi_ok)
5488 *len = 5;
5489 return ("ldi %A0,0x10" CR_TAB
5490 "muls %B0,%A0" CR_TAB
5491 "mov %A0,r1" CR_TAB
5492 "sbc %B0,%B0" CR_TAB
5493 "clr __zero_reg__");
5495 if (optimize_size && scratch)
5496 break; /* 5 */
5497 *len = 7;
5498 return ("mov %A0,%B0" CR_TAB
5499 "lsl %B0" CR_TAB
5500 "sbc %B0,%B0" CR_TAB
5501 "asr %A0" CR_TAB
5502 "asr %A0" CR_TAB
5503 "asr %A0" CR_TAB
5504 "asr %A0");
5506 case 13:
5507 if (AVR_HAVE_MUL && ldi_ok)
5509 *len = 5;
5510 return ("ldi %A0,0x08" CR_TAB
5511 "muls %B0,%A0" CR_TAB
5512 "mov %A0,r1" CR_TAB
5513 "sbc %B0,%B0" CR_TAB
5514 "clr __zero_reg__");
5516 if (optimize_size)
5517 break; /* scratch ? 5 : 7 */
5518 *len = 8;
5519 return ("mov %A0,%B0" CR_TAB
5520 "lsl %B0" CR_TAB
5521 "sbc %B0,%B0" CR_TAB
5522 "asr %A0" CR_TAB
5523 "asr %A0" CR_TAB
5524 "asr %A0" CR_TAB
5525 "asr %A0" CR_TAB
5526 "asr %A0");
5528 case 14:
5529 *len = 5;
5530 return ("lsl %B0" CR_TAB
5531 "sbc %A0,%A0" CR_TAB
5532 "lsl %B0" CR_TAB
5533 "mov %B0,%A0" CR_TAB
5534 "rol %A0");
5536 default:
5537 if (INTVAL (operands[2]) < 16)
5538 break;
5540 /* fall through */
5542 case 15:
5543 return *len = 3, ("lsl %B0" CR_TAB
5544 "sbc %A0,%A0" CR_TAB
5545 "mov %B0,%A0");
5547 len = t;
5549 out_shift_with_cnt ("asr %B0" CR_TAB
5550 "ror %A0", insn, operands, len, 2);
5551 return "";
5555 /* 24-bit arithmetic shift right */
5557 const char*
5558 avr_out_ashrpsi3 (rtx insn, rtx *op, int *plen)
5560 int dest = REGNO (op[0]);
5561 int src = REGNO (op[1]);
5563 if (CONST_INT_P (op[2]))
5565 if (plen)
5566 *plen = 0;
5568 switch (INTVAL (op[2]))
5570 case 8:
5571 if (dest <= src)
5572 return avr_asm_len ("mov %A0,%B1" CR_TAB
5573 "mov %B0,%C1" CR_TAB
5574 "clr %C0" CR_TAB
5575 "sbrc %B0,7" CR_TAB
5576 "dec %C0", op, plen, 5);
5577 else
5578 return avr_asm_len ("clr %C0" CR_TAB
5579 "sbrc %C1,7" CR_TAB
5580 "dec %C0" CR_TAB
5581 "mov %B0,%C1" CR_TAB
5582 "mov %A0,%B1", op, plen, 5);
5584 case 16:
5585 if (dest != src + 2)
5586 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5588 return avr_asm_len ("clr %B0" CR_TAB
5589 "sbrc %A0,7" CR_TAB
5590 "com %B0" CR_TAB
5591 "mov %C0,%B0", op, plen, 4);
5593 default:
5594 if (INTVAL (op[2]) < 24)
5595 break;
5597 /* fall through */
5599 case 23:
5600 return avr_asm_len ("lsl %C0" CR_TAB
5601 "sbc %A0,%A0" CR_TAB
5602 "mov %B0,%A0" CR_TAB
5603 "mov %C0,%A0", op, plen, 4);
5604 } /* switch */
5607 out_shift_with_cnt ("asr %C0" CR_TAB
5608 "ror %B0" CR_TAB
5609 "ror %A0", insn, op, plen, 3);
5610 return "";
5614 /* 32-bit arithmetic shift right ((signed long)x >> i) */
5616 const char *
5617 ashrsi3_out (rtx insn, rtx operands[], int *len)
5619 if (GET_CODE (operands[2]) == CONST_INT)
5621 int k;
5622 int *t = len;
5624 if (!len)
5625 len = &k;
5627 switch (INTVAL (operands[2]))
5629 case 8:
5631 int reg0 = true_regnum (operands[0]);
5632 int reg1 = true_regnum (operands[1]);
5633 *len=6;
5634 if (reg0 <= reg1)
5635 return ("mov %A0,%B1" CR_TAB
5636 "mov %B0,%C1" CR_TAB
5637 "mov %C0,%D1" CR_TAB
5638 "clr %D0" CR_TAB
5639 "sbrc %C0,7" CR_TAB
5640 "dec %D0");
5641 else
5642 return ("clr %D0" CR_TAB
5643 "sbrc %D1,7" CR_TAB
5644 "dec %D0" CR_TAB
5645 "mov %C0,%D1" CR_TAB
5646 "mov %B0,%C1" CR_TAB
5647 "mov %A0,%B1");
5650 case 16:
5652 int reg0 = true_regnum (operands[0]);
5653 int reg1 = true_regnum (operands[1]);
5655 if (reg0 == reg1 + 2)
5656 return *len = 4, ("clr %D0" CR_TAB
5657 "sbrc %B0,7" CR_TAB
5658 "com %D0" CR_TAB
5659 "mov %C0,%D0");
5660 if (AVR_HAVE_MOVW)
5661 return *len = 5, ("movw %A0,%C1" CR_TAB
5662 "clr %D0" CR_TAB
5663 "sbrc %B0,7" CR_TAB
5664 "com %D0" CR_TAB
5665 "mov %C0,%D0");
5666 else
5667 return *len = 6, ("mov %B0,%D1" CR_TAB
5668 "mov %A0,%C1" CR_TAB
5669 "clr %D0" CR_TAB
5670 "sbrc %B0,7" CR_TAB
5671 "com %D0" CR_TAB
5672 "mov %C0,%D0");
5675 case 24:
5676 return *len = 6, ("mov %A0,%D1" CR_TAB
5677 "clr %D0" CR_TAB
5678 "sbrc %A0,7" CR_TAB
5679 "com %D0" CR_TAB
5680 "mov %B0,%D0" CR_TAB
5681 "mov %C0,%D0");
5683 default:
5684 if (INTVAL (operands[2]) < 32)
5685 break;
5687 /* fall through */
5689 case 31:
5690 if (AVR_HAVE_MOVW)
5691 return *len = 4, ("lsl %D0" CR_TAB
5692 "sbc %A0,%A0" CR_TAB
5693 "mov %B0,%A0" CR_TAB
5694 "movw %C0,%A0");
5695 else
5696 return *len = 5, ("lsl %D0" CR_TAB
5697 "sbc %A0,%A0" CR_TAB
5698 "mov %B0,%A0" CR_TAB
5699 "mov %C0,%A0" CR_TAB
5700 "mov %D0,%A0");
5702 len = t;
5704 out_shift_with_cnt ("asr %D0" CR_TAB
5705 "ror %C0" CR_TAB
5706 "ror %B0" CR_TAB
5707 "ror %A0", insn, operands, len, 4);
5708 return "";
5711 /* 8-bit logic shift right ((unsigned char)x >> i) */
5713 const char *
5714 lshrqi3_out (rtx insn, rtx operands[], int *len)
5716 if (GET_CODE (operands[2]) == CONST_INT)
5718 int k;
5720 if (!len)
5721 len = &k;
5723 switch (INTVAL (operands[2]))
5725 default:
5726 if (INTVAL (operands[2]) < 8)
5727 break;
5729 *len = 1;
5730 return "clr %0";
5732 case 1:
5733 *len = 1;
5734 return "lsr %0";
5736 case 2:
5737 *len = 2;
5738 return ("lsr %0" CR_TAB
5739 "lsr %0");
5740 case 3:
5741 *len = 3;
5742 return ("lsr %0" CR_TAB
5743 "lsr %0" CR_TAB
5744 "lsr %0");
5746 case 4:
5747 if (test_hard_reg_class (LD_REGS, operands[0]))
5749 *len=2;
5750 return ("swap %0" CR_TAB
5751 "andi %0,0x0f");
5753 *len = 4;
5754 return ("lsr %0" CR_TAB
5755 "lsr %0" CR_TAB
5756 "lsr %0" CR_TAB
5757 "lsr %0");
5759 case 5:
5760 if (test_hard_reg_class (LD_REGS, operands[0]))
5762 *len = 3;
5763 return ("swap %0" CR_TAB
5764 "lsr %0" CR_TAB
5765 "andi %0,0x7");
5767 *len = 5;
5768 return ("lsr %0" CR_TAB
5769 "lsr %0" CR_TAB
5770 "lsr %0" CR_TAB
5771 "lsr %0" CR_TAB
5772 "lsr %0");
5774 case 6:
5775 if (test_hard_reg_class (LD_REGS, operands[0]))
5777 *len = 4;
5778 return ("swap %0" CR_TAB
5779 "lsr %0" CR_TAB
5780 "lsr %0" CR_TAB
5781 "andi %0,0x3");
5783 *len = 6;
5784 return ("lsr %0" CR_TAB
5785 "lsr %0" CR_TAB
5786 "lsr %0" CR_TAB
5787 "lsr %0" CR_TAB
5788 "lsr %0" CR_TAB
5789 "lsr %0");
5791 case 7:
5792 *len = 3;
5793 return ("rol %0" CR_TAB
5794 "clr %0" CR_TAB
5795 "rol %0");
5798 else if (CONSTANT_P (operands[2]))
5799 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5801 out_shift_with_cnt ("lsr %0",
5802 insn, operands, len, 1);
5803 return "";
5806 /* 16-bit logic shift right ((unsigned short)x >> i) */
5808 const char *
5809 lshrhi3_out (rtx insn, rtx operands[], int *len)
5811 if (GET_CODE (operands[2]) == CONST_INT)
5813 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5814 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5815 int k;
5816 int *t = len;
5818 if (!len)
5819 len = &k;
5821 switch (INTVAL (operands[2]))
5823 default:
5824 if (INTVAL (operands[2]) < 16)
5825 break;
5827 *len = 2;
5828 return ("clr %B0" CR_TAB
5829 "clr %A0");
5831 case 4:
5832 if (optimize_size && scratch)
5833 break; /* 5 */
5834 if (ldi_ok)
5836 *len = 6;
5837 return ("swap %B0" CR_TAB
5838 "swap %A0" CR_TAB
5839 "andi %A0,0x0f" CR_TAB
5840 "eor %A0,%B0" CR_TAB
5841 "andi %B0,0x0f" CR_TAB
5842 "eor %A0,%B0");
5844 if (scratch)
5846 *len = 7;
5847 return ("swap %B0" CR_TAB
5848 "swap %A0" CR_TAB
5849 "ldi %3,0x0f" CR_TAB
5850 "and %A0,%3" CR_TAB
5851 "eor %A0,%B0" CR_TAB
5852 "and %B0,%3" CR_TAB
5853 "eor %A0,%B0");
5855 break; /* optimize_size ? 6 : 8 */
5857 case 5:
5858 if (optimize_size)
5859 break; /* scratch ? 5 : 6 */
5860 if (ldi_ok)
5862 *len = 8;
5863 return ("lsr %B0" CR_TAB
5864 "ror %A0" CR_TAB
5865 "swap %B0" CR_TAB
5866 "swap %A0" CR_TAB
5867 "andi %A0,0x0f" CR_TAB
5868 "eor %A0,%B0" CR_TAB
5869 "andi %B0,0x0f" CR_TAB
5870 "eor %A0,%B0");
5872 if (scratch)
5874 *len = 9;
5875 return ("lsr %B0" CR_TAB
5876 "ror %A0" CR_TAB
5877 "swap %B0" CR_TAB
5878 "swap %A0" CR_TAB
5879 "ldi %3,0x0f" CR_TAB
5880 "and %A0,%3" CR_TAB
5881 "eor %A0,%B0" CR_TAB
5882 "and %B0,%3" CR_TAB
5883 "eor %A0,%B0");
5885 break; /* 10 */
5887 case 6:
5888 if (optimize_size)
5889 break; /* scratch ? 5 : 6 */
5890 *len = 9;
5891 return ("clr __tmp_reg__" CR_TAB
5892 "lsl %A0" CR_TAB
5893 "rol %B0" CR_TAB
5894 "rol __tmp_reg__" CR_TAB
5895 "lsl %A0" CR_TAB
5896 "rol %B0" CR_TAB
5897 "rol __tmp_reg__" CR_TAB
5898 "mov %A0,%B0" CR_TAB
5899 "mov %B0,__tmp_reg__");
5901 case 7:
5902 *len = 5;
5903 return ("lsl %A0" CR_TAB
5904 "mov %A0,%B0" CR_TAB
5905 "rol %A0" CR_TAB
5906 "sbc %B0,%B0" CR_TAB
5907 "neg %B0");
5909 case 8:
5910 return *len = 2, ("mov %A0,%B1" CR_TAB
5911 "clr %B0");
5913 case 9:
5914 *len = 3;
5915 return ("mov %A0,%B0" CR_TAB
5916 "clr %B0" CR_TAB
5917 "lsr %A0");
5919 case 10:
5920 *len = 4;
5921 return ("mov %A0,%B0" CR_TAB
5922 "clr %B0" CR_TAB
5923 "lsr %A0" CR_TAB
5924 "lsr %A0");
5926 case 11:
5927 *len = 5;
5928 return ("mov %A0,%B0" CR_TAB
5929 "clr %B0" CR_TAB
5930 "lsr %A0" CR_TAB
5931 "lsr %A0" CR_TAB
5932 "lsr %A0");
5934 case 12:
5935 if (ldi_ok)
5937 *len = 4;
5938 return ("mov %A0,%B0" CR_TAB
5939 "clr %B0" CR_TAB
5940 "swap %A0" CR_TAB
5941 "andi %A0,0x0f");
5943 if (scratch)
5945 *len = 5;
5946 return ("mov %A0,%B0" CR_TAB
5947 "clr %B0" CR_TAB
5948 "swap %A0" CR_TAB
5949 "ldi %3,0x0f" CR_TAB
5950 "and %A0,%3");
5952 *len = 6;
5953 return ("mov %A0,%B0" CR_TAB
5954 "clr %B0" CR_TAB
5955 "lsr %A0" CR_TAB
5956 "lsr %A0" CR_TAB
5957 "lsr %A0" CR_TAB
5958 "lsr %A0");
5960 case 13:
5961 if (ldi_ok)
5963 *len = 5;
5964 return ("mov %A0,%B0" CR_TAB
5965 "clr %B0" CR_TAB
5966 "swap %A0" CR_TAB
5967 "lsr %A0" CR_TAB
5968 "andi %A0,0x07");
5970 if (AVR_HAVE_MUL && scratch)
5972 *len = 5;
5973 return ("ldi %3,0x08" CR_TAB
5974 "mul %B0,%3" CR_TAB
5975 "mov %A0,r1" CR_TAB
5976 "clr %B0" CR_TAB
5977 "clr __zero_reg__");
5979 if (optimize_size && scratch)
5980 break; /* 5 */
5981 if (scratch)
5983 *len = 6;
5984 return ("mov %A0,%B0" CR_TAB
5985 "clr %B0" CR_TAB
5986 "swap %A0" CR_TAB
5987 "lsr %A0" CR_TAB
5988 "ldi %3,0x07" CR_TAB
5989 "and %A0,%3");
5991 if (AVR_HAVE_MUL)
5993 *len = 6;
5994 return ("set" CR_TAB
5995 "bld r1,3" CR_TAB
5996 "mul %B0,r1" CR_TAB
5997 "mov %A0,r1" CR_TAB
5998 "clr %B0" CR_TAB
5999 "clr __zero_reg__");
6001 *len = 7;
6002 return ("mov %A0,%B0" CR_TAB
6003 "clr %B0" CR_TAB
6004 "lsr %A0" CR_TAB
6005 "lsr %A0" CR_TAB
6006 "lsr %A0" CR_TAB
6007 "lsr %A0" CR_TAB
6008 "lsr %A0");
6010 case 14:
6011 if (AVR_HAVE_MUL && ldi_ok)
6013 *len = 5;
6014 return ("ldi %A0,0x04" CR_TAB
6015 "mul %B0,%A0" CR_TAB
6016 "mov %A0,r1" CR_TAB
6017 "clr %B0" CR_TAB
6018 "clr __zero_reg__");
6020 if (AVR_HAVE_MUL && scratch)
6022 *len = 5;
6023 return ("ldi %3,0x04" CR_TAB
6024 "mul %B0,%3" CR_TAB
6025 "mov %A0,r1" CR_TAB
6026 "clr %B0" CR_TAB
6027 "clr __zero_reg__");
6029 if (optimize_size && ldi_ok)
6031 *len = 5;
6032 return ("mov %A0,%B0" CR_TAB
6033 "ldi %B0,6" "\n1:\t"
6034 "lsr %A0" CR_TAB
6035 "dec %B0" CR_TAB
6036 "brne 1b");
6038 if (optimize_size && scratch)
6039 break; /* 5 */
6040 *len = 6;
6041 return ("clr %A0" CR_TAB
6042 "lsl %B0" CR_TAB
6043 "rol %A0" CR_TAB
6044 "lsl %B0" CR_TAB
6045 "rol %A0" CR_TAB
6046 "clr %B0");
6048 case 15:
6049 *len = 4;
6050 return ("clr %A0" CR_TAB
6051 "lsl %B0" CR_TAB
6052 "rol %A0" CR_TAB
6053 "clr %B0");
6055 len = t;
6057 out_shift_with_cnt ("lsr %B0" CR_TAB
6058 "ror %A0", insn, operands, len, 2);
6059 return "";
6063 /* 24-bit logic shift right */
6065 const char*
6066 avr_out_lshrpsi3 (rtx insn, rtx *op, int *plen)
6068 int dest = REGNO (op[0]);
6069 int src = REGNO (op[1]);
6071 if (CONST_INT_P (op[2]))
6073 if (plen)
6074 *plen = 0;
6076 switch (INTVAL (op[2]))
6078 case 8:
6079 if (dest <= src)
6080 return avr_asm_len ("mov %A0,%B1" CR_TAB
6081 "mov %B0,%C1" CR_TAB
6082 "clr %C0", op, plen, 3);
6083 else
6084 return avr_asm_len ("clr %C0" CR_TAB
6085 "mov %B0,%C1" CR_TAB
6086 "mov %A0,%B1", op, plen, 3);
6088 case 16:
6089 if (dest != src + 2)
6090 avr_asm_len ("mov %A0,%C1", op, plen, 1);
6092 return avr_asm_len ("clr %B0" CR_TAB
6093 "clr %C0", op, plen, 2);
6095 default:
6096 if (INTVAL (op[2]) < 24)
6097 break;
6099 /* fall through */
6101 case 23:
6102 return avr_asm_len ("clr %A0" CR_TAB
6103 "sbrc %C0,7" CR_TAB
6104 "inc %A0" CR_TAB
6105 "clr %B0" CR_TAB
6106 "clr %C0", op, plen, 5);
6107 } /* switch */
6110 out_shift_with_cnt ("lsr %C0" CR_TAB
6111 "ror %B0" CR_TAB
6112 "ror %A0", insn, op, plen, 3);
6113 return "";
6117 /* 32-bit logic shift right ((unsigned int)x >> i) */
6119 const char *
6120 lshrsi3_out (rtx insn, rtx operands[], int *len)
6122 if (GET_CODE (operands[2]) == CONST_INT)
6124 int k;
6125 int *t = len;
6127 if (!len)
6128 len = &k;
6130 switch (INTVAL (operands[2]))
6132 default:
6133 if (INTVAL (operands[2]) < 32)
6134 break;
6136 if (AVR_HAVE_MOVW)
6137 return *len = 3, ("clr %D0" CR_TAB
6138 "clr %C0" CR_TAB
6139 "movw %A0,%C0");
6140 *len = 4;
6141 return ("clr %D0" CR_TAB
6142 "clr %C0" CR_TAB
6143 "clr %B0" CR_TAB
6144 "clr %A0");
6146 case 8:
6148 int reg0 = true_regnum (operands[0]);
6149 int reg1 = true_regnum (operands[1]);
6150 *len = 4;
6151 if (reg0 <= reg1)
6152 return ("mov %A0,%B1" CR_TAB
6153 "mov %B0,%C1" CR_TAB
6154 "mov %C0,%D1" CR_TAB
6155 "clr %D0");
6156 else
6157 return ("clr %D0" CR_TAB
6158 "mov %C0,%D1" CR_TAB
6159 "mov %B0,%C1" CR_TAB
6160 "mov %A0,%B1");
6163 case 16:
6165 int reg0 = true_regnum (operands[0]);
6166 int reg1 = true_regnum (operands[1]);
6168 if (reg0 == reg1 + 2)
6169 return *len = 2, ("clr %C0" CR_TAB
6170 "clr %D0");
6171 if (AVR_HAVE_MOVW)
6172 return *len = 3, ("movw %A0,%C1" CR_TAB
6173 "clr %C0" CR_TAB
6174 "clr %D0");
6175 else
6176 return *len = 4, ("mov %B0,%D1" CR_TAB
6177 "mov %A0,%C1" CR_TAB
6178 "clr %C0" CR_TAB
6179 "clr %D0");
6182 case 24:
6183 return *len = 4, ("mov %A0,%D1" CR_TAB
6184 "clr %B0" CR_TAB
6185 "clr %C0" CR_TAB
6186 "clr %D0");
6188 case 31:
6189 *len = 6;
6190 return ("clr %A0" CR_TAB
6191 "sbrc %D0,7" CR_TAB
6192 "inc %A0" CR_TAB
6193 "clr %B0" CR_TAB
6194 "clr %C0" CR_TAB
6195 "clr %D0");
6197 len = t;
6199 out_shift_with_cnt ("lsr %D0" CR_TAB
6200 "ror %C0" CR_TAB
6201 "ror %B0" CR_TAB
6202 "ror %A0", insn, operands, len, 4);
6203 return "";
6207 /* Output addition of register XOP[0] and compile time constant XOP[2].
6208 CODE == PLUS: perform addition by using ADD instructions or
6209 CODE == MINUS: perform addition by using SUB instructions:
6211 XOP[0] = XOP[0] + XOP[2]
6213 Or perform addition/subtraction with register XOP[2] depending on CODE:
6215 XOP[0] = XOP[0] +/- XOP[2]
6217 If PLEN == NULL, print assembler instructions to perform the operation;
6218 otherwise, set *PLEN to the length of the instruction sequence (in words)
6219 printed with PLEN == NULL. XOP[3] is an 8-bit scratch register or NULL_RTX.
6220 Set *PCC to effect on cc0 according to respective CC_* insn attribute.
6222 CODE_SAT == UNKNOWN: Perform ordinary, non-saturating operation.
6223 CODE_SAT != UNKNOWN: Perform operation and saturate according to CODE_SAT.
6224 If CODE_SAT != UNKNOWN then SIGN contains the sign of the summand resp.
6225 the subtrahend in the original insn, provided it is a compile time constant.
6226 In all other cases, SIGN is 0.
6228 Return "". */
6230 static void
6231 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc,
6232 enum rtx_code code_sat = UNKNOWN, int sign = 0)
6234 /* MODE of the operation. */
6235 enum machine_mode mode = GET_MODE (xop[0]);
6237 /* INT_MODE of the same size. */
6238 enum machine_mode imode = int_mode_for_mode (mode);
6240 /* Number of bytes to operate on. */
6241 int i, n_bytes = GET_MODE_SIZE (mode);
6243 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6244 int clobber_val = -1;
6246 /* op[0]: 8-bit destination register
6247 op[1]: 8-bit const int
6248 op[2]: 8-bit scratch register */
6249 rtx op[3];
6251 /* Started the operation? Before starting the operation we may skip
6252 adding 0. This is no more true after the operation started because
6253 carry must be taken into account. */
6254 bool started = false;
6256 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
6257 rtx xval = xop[2];
6259 /* Output a BRVC instruction. Only needed with saturation. */
6260 bool out_brvc = true;
6262 if (plen)
6263 *plen = 0;
6265 if (REG_P (xop[2]))
6267 *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_SET_N;
6269 for (i = 0; i < n_bytes; i++)
6271 /* We operate byte-wise on the destination. */
6272 op[0] = simplify_gen_subreg (QImode, xop[0], mode, i);
6273 op[1] = simplify_gen_subreg (QImode, xop[2], mode, i);
6275 if (i == 0)
6276 avr_asm_len (code == PLUS ? "add %0,%1" : "sub %0,%1",
6277 op, plen, 1);
6278 else
6279 avr_asm_len (code == PLUS ? "adc %0,%1" : "sbc %0,%1",
6280 op, plen, 1);
6283 if (reg_overlap_mentioned_p (xop[0], xop[2]))
6285 gcc_assert (REGNO (xop[0]) == REGNO (xop[2]));
6287 if (MINUS == code)
6288 return;
6291 goto saturate;
6294 /* Except in the case of ADIW with 16-bit register (see below)
6295 addition does not set cc0 in a usable way. */
6297 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
6299 if (CONST_FIXED_P (xval))
6300 xval = avr_to_int_mode (xval);
6302 /* Adding/Subtracting zero is a no-op. */
6304 if (xval == const0_rtx)
6306 *pcc = CC_NONE;
6307 return;
6310 if (MINUS == code)
6311 xval = simplify_unary_operation (NEG, imode, xval, imode);
6313 op[2] = xop[3];
6315 if (SS_PLUS == code_sat && MINUS == code
6316 && sign < 0
6317 && 0x80 == (INTVAL (simplify_gen_subreg (QImode, xval, imode, n_bytes-1))
6318 & GET_MODE_MASK (QImode)))
6320 /* We compute x + 0x80 by means of SUB instructions. We negated the
6321 constant subtrahend above and are left with x - (-128) so that we
6322 need something like SUBI r,128 which does not exist because SUBI sets
6323 V according to the sign of the subtrahend. Notice the only case
6324 where this must be done is when NEG overflowed in case [2s] because
6325 the V computation needs the right sign of the subtrahend. */
6327 rtx msb = simplify_gen_subreg (QImode, xop[0], mode, n_bytes-1);
6329 avr_asm_len ("subi %0,128" CR_TAB
6330 "brmi 0f", &msb, plen, 2);
6331 out_brvc = false;
6333 goto saturate;
6336 for (i = 0; i < n_bytes; i++)
6338 /* We operate byte-wise on the destination. */
6339 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6340 rtx xval8 = simplify_gen_subreg (QImode, xval, imode, i);
6342 /* 8-bit value to operate with this byte. */
6343 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6345 /* Registers R16..R31 can operate with immediate. */
6346 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6348 op[0] = reg8;
6349 op[1] = gen_int_mode (val8, QImode);
6351 /* To get usable cc0 no low-bytes must have been skipped. */
6353 if (i && !started)
6354 *pcc = CC_CLOBBER;
6356 if (!started
6357 && i % 2 == 0
6358 && i + 2 <= n_bytes
6359 && test_hard_reg_class (ADDW_REGS, reg8))
6361 rtx xval16 = simplify_gen_subreg (HImode, xval, imode, i);
6362 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
6364 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
6365 i.e. operate word-wise. */
6367 if (val16 < 64)
6369 if (val16 != 0)
6371 started = true;
6372 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
6373 op, plen, 1);
6375 if (n_bytes == 2 && PLUS == code)
6376 *pcc = CC_SET_ZN;
6379 i++;
6380 continue;
6384 if (val8 == 0)
6386 if (started)
6387 avr_asm_len (code == PLUS
6388 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
6389 op, plen, 1);
6390 continue;
6392 else if ((val8 == 1 || val8 == 0xff)
6393 && UNKNOWN == code_sat
6394 && !started
6395 && i == n_bytes - 1)
6397 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
6398 op, plen, 1);
6399 break;
6402 switch (code)
6404 case PLUS:
6406 gcc_assert (plen != NULL || (op[2] && REG_P (op[2])));
6408 if (plen != NULL && UNKNOWN != code_sat)
6410 /* This belongs to the x + 0x80 corner case. The code with
6411 ADD instruction is not smaller, thus make this case
6412 expensive so that the caller won't pick it. */
6414 *plen += 10;
6415 break;
6418 if (clobber_val != (int) val8)
6419 avr_asm_len ("ldi %2,%1", op, plen, 1);
6420 clobber_val = (int) val8;
6422 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
6424 break; /* PLUS */
6426 case MINUS:
6428 if (ld_reg_p)
6429 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
6430 else
6432 gcc_assert (plen != NULL || REG_P (op[2]));
6434 if (clobber_val != (int) val8)
6435 avr_asm_len ("ldi %2,%1", op, plen, 1);
6436 clobber_val = (int) val8;
6438 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
6441 break; /* MINUS */
6443 default:
6444 /* Unknown code */
6445 gcc_unreachable();
6448 started = true;
6450 } /* for all sub-bytes */
6452 saturate:
6454 if (UNKNOWN == code_sat)
6455 return;
6457 *pcc = (int) CC_CLOBBER;
6459 /* Vanilla addition/subtraction is done. We are left with saturation.
6461 We have to compute A = A <op> B where A is a register and
6462 B is a register or a non-zero compile time constant CONST.
6463 A is register class "r" if unsigned && B is REG. Otherwise, A is in "d".
6464 B stands for the original operand $2 in INSN. In the case of B = CONST,
6465 SIGN in { -1, 1 } is the sign of B. Otherwise, SIGN is 0.
6467 CODE is the instruction flavor we use in the asm sequence to perform <op>.
6470 unsigned
6471 operation | code | sat if | b is | sat value | case
6472 -----------------+-------+----------+--------------+-----------+-------
6473 + as a + b | add | C == 1 | const, reg | u+ = 0xff | [1u]
6474 + as a - (-b) | sub | C == 0 | const | u+ = 0xff | [2u]
6475 - as a - b | sub | C == 1 | const, reg | u- = 0 | [3u]
6476 - as a + (-b) | add | C == 0 | const | u- = 0 | [4u]
6479 signed
6480 operation | code | sat if | b is | sat value | case
6481 -----------------+-------+----------+--------------+-----------+-------
6482 + as a + b | add | V == 1 | const, reg | s+ | [1s]
6483 + as a - (-b) | sub | V == 1 | const | s+ | [2s]
6484 - as a - b | sub | V == 1 | const, reg | s- | [3s]
6485 - as a + (-b) | add | V == 1 | const | s- | [4s]
6487 s+ = b < 0 ? -0x80 : 0x7f
6488 s- = b < 0 ? 0x7f : -0x80
6490 The cases a - b actually perform a - (-(-b)) if B is CONST.
6493 op[0] = simplify_gen_subreg (QImode, xop[0], mode, n_bytes-1);
6494 op[1] = n_bytes > 1
6495 ? simplify_gen_subreg (QImode, xop[0], mode, n_bytes-2)
6496 : NULL_RTX;
6498 bool need_copy = true;
6499 int len_call = 1 + AVR_HAVE_JMP_CALL;
6501 switch (code_sat)
6503 default:
6504 gcc_unreachable();
6506 case SS_PLUS:
6507 case SS_MINUS:
6509 if (out_brvc)
6510 avr_asm_len ("brvc 0f", op, plen, 1);
6512 if (reg_overlap_mentioned_p (xop[0], xop[2]))
6514 /* [1s,reg] */
6516 if (n_bytes == 1)
6517 avr_asm_len ("ldi %0,0x7f" CR_TAB
6518 "adc %0,__zero_reg__", op, plen, 2);
6519 else
6520 avr_asm_len ("ldi %0,0x7f" CR_TAB
6521 "ldi %1,0xff" CR_TAB
6522 "adc %1,__zero_reg__" CR_TAB
6523 "adc %0,__zero_reg__", op, plen, 4);
6525 else if (sign == 0 && PLUS == code)
6527 /* [1s,reg] */
6529 op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
6531 if (n_bytes == 1)
6532 avr_asm_len ("ldi %0,0x80" CR_TAB
6533 "sbrs %2,7" CR_TAB
6534 "dec %0", op, plen, 3);
6535 else
6536 avr_asm_len ("ldi %0,0x80" CR_TAB
6537 "cp %2,%0" CR_TAB
6538 "sbc %1,%1" CR_TAB
6539 "sbci %0,0", op, plen, 4);
6541 else if (sign == 0 && MINUS == code)
6543 /* [3s,reg] */
6545 op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
6547 if (n_bytes == 1)
6548 avr_asm_len ("ldi %0,0x7f" CR_TAB
6549 "sbrs %2,7" CR_TAB
6550 "inc %0", op, plen, 3);
6551 else
6552 avr_asm_len ("ldi %0,0x7f" CR_TAB
6553 "cp %0,%2" CR_TAB
6554 "sbc %1,%1" CR_TAB
6555 "sbci %0,-1", op, plen, 4);
6557 else if ((sign < 0) ^ (SS_MINUS == code_sat))
6559 /* [1s,const,B < 0] [2s,B < 0] */
6560 /* [3s,const,B > 0] [4s,B > 0] */
6562 if (n_bytes == 8)
6564 avr_asm_len ("%~call __clr_8", op, plen, len_call);
6565 need_copy = false;
6568 avr_asm_len ("ldi %0,0x80", op, plen, 1);
6569 if (n_bytes > 1 && need_copy)
6570 avr_asm_len ("clr %1", op, plen, 1);
6572 else if ((sign > 0) ^ (SS_MINUS == code_sat))
6574 /* [1s,const,B > 0] [2s,B > 0] */
6575 /* [3s,const,B < 0] [4s,B < 0] */
6577 if (n_bytes == 8)
6579 avr_asm_len ("sec" CR_TAB
6580 "%~call __sbc_8", op, plen, 1 + len_call);
6581 need_copy = false;
6584 avr_asm_len ("ldi %0,0x7f", op, plen, 1);
6585 if (n_bytes > 1 && need_copy)
6586 avr_asm_len ("ldi %1,0xff", op, plen, 1);
6588 else
6589 gcc_unreachable();
6591 break;
6593 case US_PLUS:
6594 /* [1u] : [2u] */
6596 avr_asm_len (PLUS == code ? "brcc 0f" : "brcs 0f", op, plen, 1);
6598 if (n_bytes == 8)
6600 if (MINUS == code)
6601 avr_asm_len ("sec", op, plen, 1);
6602 avr_asm_len ("%~call __sbc_8", op, plen, len_call);
6604 need_copy = false;
6606 else
6608 if (MINUS == code && !test_hard_reg_class (LD_REGS, op[0]))
6609 avr_asm_len ("sec" CR_TAB "sbc %0,%0", op, plen, 2);
6610 else
6611 avr_asm_len (PLUS == code ? "sbc %0,%0" : "ldi %0,0xff",
6612 op, plen, 1);
6614 break; /* US_PLUS */
6616 case US_MINUS:
6617 /* [4u] : [3u] */
6619 avr_asm_len (PLUS == code ? "brcs 0f" : "brcc 0f", op, plen, 1);
6621 if (n_bytes == 8)
6623 avr_asm_len ("%~call __clr_8", op, plen, len_call);
6624 need_copy = false;
6626 else
6627 avr_asm_len ("clr %0", op, plen, 1);
6629 break;
6632 /* We set the MSB in the unsigned case and the 2 MSBs in the signed case.
6633 Now copy the right value to the LSBs. */
6635 if (need_copy && n_bytes > 1)
6637 if (US_MINUS == code_sat || US_PLUS == code_sat)
6639 avr_asm_len ("mov %1,%0", op, plen, 1);
6641 if (n_bytes > 2)
6643 op[0] = xop[0];
6644 if (AVR_HAVE_MOVW)
6645 avr_asm_len ("movw %0,%1", op, plen, 1);
6646 else
6647 avr_asm_len ("mov %A0,%1" CR_TAB
6648 "mov %B0,%1", op, plen, 2);
6651 else if (n_bytes > 2)
6653 op[0] = xop[0];
6654 avr_asm_len ("mov %A0,%1" CR_TAB
6655 "mov %B0,%1", op, plen, 2);
6659 if (need_copy && n_bytes == 8)
6661 if (AVR_HAVE_MOVW)
6662 avr_asm_len ("movw %r0+2,%0" CR_TAB
6663 "movw %r0+4,%0", xop, plen, 2);
6664 else
6665 avr_asm_len ("mov %r0+2,%0" CR_TAB
6666 "mov %r0+3,%0" CR_TAB
6667 "mov %r0+4,%0" CR_TAB
6668 "mov %r0+5,%0", xop, plen, 4);
6671 avr_asm_len ("0:", op, plen, 0);
6675 /* Output addition/subtraction of register XOP[0] and a constant XOP[2] that
6676 is ont a compile-time constant:
6678 XOP[0] = XOP[0] +/- XOP[2]
6680 This is a helper for the function below. The only insns that need this
6681 are additions/subtraction for pointer modes, i.e. HImode and PSImode. */
6683 static const char*
6684 avr_out_plus_symbol (rtx *xop, enum rtx_code code, int *plen, int *pcc)
6686 enum machine_mode mode = GET_MODE (xop[0]);
6688 /* Only pointer modes want to add symbols. */
6690 gcc_assert (mode == HImode || mode == PSImode);
6692 *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_SET_N;
6694 avr_asm_len (PLUS == code
6695 ? "subi %A0,lo8(-(%2))" CR_TAB "sbci %B0,hi8(-(%2))"
6696 : "subi %A0,lo8(%2)" CR_TAB "sbci %B0,hi8(%2)",
6697 xop, plen, -2);
6699 if (PSImode == mode)
6700 avr_asm_len (PLUS == code
6701 ? "sbci %C0,hlo8(-(%2))"
6702 : "sbci %C0,hlo8(%2)", xop, plen, 1);
6703 return "";
6707 /* Prepare operands of addition/subtraction to be used with avr_out_plus_1.
6709 INSN is a single_set insn with a binary operation as SET_SRC that is
6710 one of: PLUS, SS_PLUS, US_PLUS, MINUS, SS_MINUS, US_MINUS.
6712 XOP are the operands of INSN. In the case of 64-bit operations with
6713 constant XOP[] has just one element: The summand/subtrahend in XOP[0].
6714 The non-saturating insns up to 32 bits may or may not supply a "d" class
6715 scratch as XOP[3].
6717 If PLEN == NULL output the instructions.
6718 If PLEN != NULL set *PLEN to the length of the sequence in words.
6720 PCC is a pointer to store the instructions' effect on cc0.
6721 PCC may be NULL.
6723 PLEN and PCC default to NULL.
6725 Return "" */
6727 const char*
6728 avr_out_plus (rtx insn, rtx *xop, int *plen, int *pcc)
6730 int cc_plus, cc_minus, cc_dummy;
6731 int len_plus, len_minus;
6732 rtx op[4];
6733 rtx xdest = SET_DEST (single_set (insn));
6734 enum machine_mode mode = GET_MODE (xdest);
6735 enum machine_mode imode = int_mode_for_mode (mode);
6736 int n_bytes = GET_MODE_SIZE (mode);
6737 enum rtx_code code_sat = GET_CODE (SET_SRC (single_set (insn)));
6738 enum rtx_code code
6739 = (PLUS == code_sat || SS_PLUS == code_sat || US_PLUS == code_sat
6740 ? PLUS : MINUS);
6742 if (!pcc)
6743 pcc = &cc_dummy;
6745 /* PLUS and MINUS don't saturate: Use modular wrap-around. */
6747 if (PLUS == code_sat || MINUS == code_sat)
6748 code_sat = UNKNOWN;
6750 if (n_bytes <= 4 && REG_P (xop[2]))
6752 avr_out_plus_1 (xop, plen, code, pcc, code_sat);
6753 return "";
6756 if (8 == n_bytes)
6758 op[0] = gen_rtx_REG (DImode, ACC_A);
6759 op[1] = gen_rtx_REG (DImode, ACC_A);
6760 op[2] = avr_to_int_mode (xop[0]);
6762 else
6764 if (!REG_P (xop[2])
6765 && !CONST_INT_P (xop[2])
6766 && !CONST_FIXED_P (xop[2]))
6768 return avr_out_plus_symbol (xop, code, plen, pcc);
6771 op[0] = avr_to_int_mode (xop[0]);
6772 op[1] = avr_to_int_mode (xop[1]);
6773 op[2] = avr_to_int_mode (xop[2]);
6776 /* Saturations and 64-bit operations don't have a clobber operand.
6777 For the other cases, the caller will provide a proper XOP[3]. */
6779 op[3] = PARALLEL == GET_CODE (PATTERN (insn)) ? xop[3] : NULL_RTX;
6781 /* Saturation will need the sign of the original operand. */
6783 rtx xmsb = simplify_gen_subreg (QImode, op[2], imode, n_bytes-1);
6784 int sign = INTVAL (xmsb) < 0 ? -1 : 1;
6786 /* If we subtract and the subtrahend is a constant, then negate it
6787 so that avr_out_plus_1 can be used. */
6789 if (MINUS == code)
6790 op[2] = simplify_unary_operation (NEG, imode, op[2], imode);
6792 /* Work out the shortest sequence. */
6794 avr_out_plus_1 (op, &len_minus, MINUS, &cc_plus, code_sat, sign);
6795 avr_out_plus_1 (op, &len_plus, PLUS, &cc_minus, code_sat, sign);
6797 if (plen)
6799 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
6800 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
6802 else if (len_minus <= len_plus)
6803 avr_out_plus_1 (op, NULL, MINUS, pcc, code_sat, sign);
6804 else
6805 avr_out_plus_1 (op, NULL, PLUS, pcc, code_sat, sign);
6807 return "";
6811 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6812 time constant XOP[2]:
6814 XOP[0] = XOP[0] <op> XOP[2]
6816 and return "". If PLEN == NULL, print assembler instructions to perform the
6817 operation; otherwise, set *PLEN to the length of the instruction sequence
6818 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6819 register or SCRATCH if no clobber register is needed for the operation. */
6821 const char*
6822 avr_out_bitop (rtx insn, rtx *xop, int *plen)
6824 /* CODE and MODE of the operation. */
6825 enum rtx_code code = GET_CODE (SET_SRC (single_set (insn)));
6826 enum machine_mode mode = GET_MODE (xop[0]);
6828 /* Number of bytes to operate on. */
6829 int i, n_bytes = GET_MODE_SIZE (mode);
6831 /* Value of T-flag (0 or 1) or -1 if unknow. */
6832 int set_t = -1;
6834 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6835 int clobber_val = -1;
6837 /* op[0]: 8-bit destination register
6838 op[1]: 8-bit const int
6839 op[2]: 8-bit clobber register or SCRATCH
6840 op[3]: 8-bit register containing 0xff or NULL_RTX */
6841 rtx op[4];
6843 op[2] = xop[3];
6844 op[3] = NULL_RTX;
6846 if (plen)
6847 *plen = 0;
6849 for (i = 0; i < n_bytes; i++)
6851 /* We operate byte-wise on the destination. */
6852 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6853 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
6855 /* 8-bit value to operate with this byte. */
6856 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6858 /* Number of bits set in the current byte of the constant. */
6859 int pop8 = avr_popcount (val8);
6861 /* Registers R16..R31 can operate with immediate. */
6862 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6864 op[0] = reg8;
6865 op[1] = GEN_INT (val8);
6867 switch (code)
6869 case IOR:
6871 if (0 == pop8)
6872 continue;
6873 else if (ld_reg_p)
6874 avr_asm_len ("ori %0,%1", op, plen, 1);
6875 else if (1 == pop8)
6877 if (set_t != 1)
6878 avr_asm_len ("set", op, plen, 1);
6879 set_t = 1;
6881 op[1] = GEN_INT (exact_log2 (val8));
6882 avr_asm_len ("bld %0,%1", op, plen, 1);
6884 else if (8 == pop8)
6886 if (op[3] != NULL_RTX)
6887 avr_asm_len ("mov %0,%3", op, plen, 1);
6888 else
6889 avr_asm_len ("clr %0" CR_TAB
6890 "dec %0", op, plen, 2);
6892 op[3] = op[0];
6894 else
6896 if (clobber_val != (int) val8)
6897 avr_asm_len ("ldi %2,%1", op, plen, 1);
6898 clobber_val = (int) val8;
6900 avr_asm_len ("or %0,%2", op, plen, 1);
6903 continue; /* IOR */
6905 case AND:
6907 if (8 == pop8)
6908 continue;
6909 else if (0 == pop8)
6910 avr_asm_len ("clr %0", op, plen, 1);
6911 else if (ld_reg_p)
6912 avr_asm_len ("andi %0,%1", op, plen, 1);
6913 else if (7 == pop8)
6915 if (set_t != 0)
6916 avr_asm_len ("clt", op, plen, 1);
6917 set_t = 0;
6919 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
6920 avr_asm_len ("bld %0,%1", op, plen, 1);
6922 else
6924 if (clobber_val != (int) val8)
6925 avr_asm_len ("ldi %2,%1", op, plen, 1);
6926 clobber_val = (int) val8;
6928 avr_asm_len ("and %0,%2", op, plen, 1);
6931 continue; /* AND */
6933 case XOR:
6935 if (0 == pop8)
6936 continue;
6937 else if (8 == pop8)
6938 avr_asm_len ("com %0", op, plen, 1);
6939 else if (ld_reg_p && val8 == (1 << 7))
6940 avr_asm_len ("subi %0,%1", op, plen, 1);
6941 else
6943 if (clobber_val != (int) val8)
6944 avr_asm_len ("ldi %2,%1", op, plen, 1);
6945 clobber_val = (int) val8;
6947 avr_asm_len ("eor %0,%2", op, plen, 1);
6950 continue; /* XOR */
6952 default:
6953 /* Unknown rtx_code */
6954 gcc_unreachable();
6956 } /* for all sub-bytes */
6958 return "";
6962 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6963 PLEN != NULL: Set *PLEN to the length of that sequence.
6964 Return "". */
6966 const char*
6967 avr_out_addto_sp (rtx *op, int *plen)
6969 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
6970 int addend = INTVAL (op[0]);
6972 if (plen)
6973 *plen = 0;
6975 if (addend < 0)
6977 if (flag_verbose_asm || flag_print_asm_name)
6978 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
6980 while (addend <= -pc_len)
6982 addend += pc_len;
6983 avr_asm_len ("rcall .", op, plen, 1);
6986 while (addend++ < 0)
6987 avr_asm_len ("push __zero_reg__", op, plen, 1);
6989 else if (addend > 0)
6991 if (flag_verbose_asm || flag_print_asm_name)
6992 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
6994 while (addend-- > 0)
6995 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
6998 return "";
7002 /* Outputs instructions needed for fixed point type conversion.
7003 This includes converting between any fixed point type, as well
7004 as converting to any integer type. Conversion between integer
7005 types is not supported.
7007 Converting signed fractional types requires a bit shift if converting
7008 to or from any unsigned fractional type because the decimal place is
7009 shifted by 1 bit. When the destination is a signed fractional, the sign
7010 is stored in either the carry or T bit. */
7012 const char*
7013 avr_out_fract (rtx insn, rtx operands[], bool intsigned, int *plen)
7015 size_t i;
7016 rtx xop[6];
7017 RTX_CODE shift = UNKNOWN;
7018 bool sign_in_carry = false;
7019 bool msb_in_carry = false;
7020 bool lsb_in_carry = false;
7021 const char *code_ashift = "lsl %0";
7024 #define MAY_CLOBBER(RR) \
7025 /* Shorthand used below. */ \
7026 ((sign_bytes \
7027 && IN_RANGE (RR, dest.regno_msb - sign_bytes + 1, dest.regno_msb)) \
7028 || (reg_unused_after (insn, all_regs_rtx[RR]) \
7029 && !IN_RANGE (RR, dest.regno, dest.regno_msb)))
7031 struct
7033 /* bytes : Length of operand in bytes.
7034 ibyte : Length of integral part in bytes.
7035 fbyte, fbit : Length of fractional part in bytes, bits. */
7037 bool sbit;
7038 unsigned fbit, bytes, ibyte, fbyte;
7039 unsigned regno, regno_msb;
7040 } dest, src, *val[2] = { &dest, &src };
7042 if (plen)
7043 *plen = 0;
7045 /* Step 0: Determine information on source and destination operand we
7046 ====== will need in the remainder. */
7048 for (i = 0; i < sizeof (val) / sizeof (*val); i++)
7050 enum machine_mode mode;
7052 xop[i] = operands[i];
7054 mode = GET_MODE (xop[i]);
7056 val[i]->bytes = GET_MODE_SIZE (mode);
7057 val[i]->regno = REGNO (xop[i]);
7058 val[i]->regno_msb = REGNO (xop[i]) + val[i]->bytes - 1;
7060 if (SCALAR_INT_MODE_P (mode))
7062 val[i]->sbit = intsigned;
7063 val[i]->fbit = 0;
7065 else if (ALL_SCALAR_FIXED_POINT_MODE_P (mode))
7067 val[i]->sbit = SIGNED_SCALAR_FIXED_POINT_MODE_P (mode);
7068 val[i]->fbit = GET_MODE_FBIT (mode);
7070 else
7071 fatal_insn ("unsupported fixed-point conversion", insn);
7073 val[i]->fbyte = (1 + val[i]->fbit) / BITS_PER_UNIT;
7074 val[i]->ibyte = val[i]->bytes - val[i]->fbyte;
7077 // Byte offset of the decimal point taking into account different place
7078 // of the decimal point in input and output and different register numbers
7079 // of input and output.
7080 int offset = dest.regno - src.regno + dest.fbyte - src.fbyte;
7082 // Number of destination bytes that will come from sign / zero extension.
7083 int sign_bytes = (dest.ibyte - src.ibyte) * (dest.ibyte > src.ibyte);
7085 // Number of bytes at the low end to be filled with zeros.
7086 int zero_bytes = (dest.fbyte - src.fbyte) * (dest.fbyte > src.fbyte);
7088 // Do we have a 16-Bit register that is cleared?
7089 rtx clrw = NULL_RTX;
7091 bool sign_extend = src.sbit && sign_bytes;
7093 if (0 == dest.fbit % 8 && 7 == src.fbit % 8)
7094 shift = ASHIFT;
7095 else if (7 == dest.fbit % 8 && 0 == src.fbit % 8)
7096 shift = ASHIFTRT;
7097 else if (dest.fbit % 8 == src.fbit % 8)
7098 shift = UNKNOWN;
7099 else
7100 gcc_unreachable();
7102 /* Step 1: Clear bytes at the low end and copy payload bits from source
7103 ====== to destination. */
7105 int step = offset < 0 ? 1 : -1;
7106 unsigned d0 = offset < 0 ? dest.regno : dest.regno_msb;
7108 // We leared at least that number of registers.
7109 int clr_n = 0;
7111 for (; d0 >= dest.regno && d0 <= dest.regno_msb; d0 += step)
7113 // Next regno of destination is needed for MOVW
7114 unsigned d1 = d0 + step;
7116 // Current and next regno of source
7117 signed s0 = d0 - offset;
7118 signed s1 = s0 + step;
7120 // Must current resp. next regno be CLRed? This applies to the low
7121 // bytes of the destination that have no associated source bytes.
7122 bool clr0 = s0 < (signed) src.regno;
7123 bool clr1 = s1 < (signed) src.regno && d1 >= dest.regno;
7125 // First gather what code to emit (if any) and additional step to
7126 // apply if a MOVW is in use. xop[2] is destination rtx and xop[3]
7127 // is the source rtx for the current loop iteration.
7128 const char *code = NULL;
7129 int stepw = 0;
7131 if (clr0)
7133 if (AVR_HAVE_MOVW && clr1 && clrw)
7135 xop[2] = all_regs_rtx[d0 & ~1];
7136 xop[3] = clrw;
7137 code = "movw %2,%3";
7138 stepw = step;
7140 else
7142 xop[2] = all_regs_rtx[d0];
7143 code = "clr %2";
7145 if (++clr_n >= 2
7146 && !clrw
7147 && d0 % 2 == (step > 0))
7149 clrw = all_regs_rtx[d0 & ~1];
7153 else if (offset && s0 <= (signed) src.regno_msb)
7155 int movw = AVR_HAVE_MOVW && offset % 2 == 0
7156 && d0 % 2 == (offset > 0)
7157 && d1 <= dest.regno_msb && d1 >= dest.regno
7158 && s1 <= (signed) src.regno_msb && s1 >= (signed) src.regno;
7160 xop[2] = all_regs_rtx[d0 & ~movw];
7161 xop[3] = all_regs_rtx[s0 & ~movw];
7162 code = movw ? "movw %2,%3" : "mov %2,%3";
7163 stepw = step * movw;
7166 if (code)
7168 if (sign_extend && shift != ASHIFT && !sign_in_carry
7169 && (d0 == src.regno_msb || d0 + stepw == src.regno_msb))
7171 /* We are going to override the sign bit. If we sign-extend,
7172 store the sign in the Carry flag. This is not needed if
7173 the destination will be ASHIFT is the remainder because
7174 the ASHIFT will set Carry without extra instruction. */
7176 avr_asm_len ("lsl %0", &all_regs_rtx[src.regno_msb], plen, 1);
7177 sign_in_carry = true;
7180 unsigned src_msb = dest.regno_msb - sign_bytes - offset + 1;
7182 if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
7183 && src.ibyte > dest.ibyte
7184 && (d0 == src_msb || d0 + stepw == src_msb))
7186 /* We are going to override the MSB. If we shift right,
7187 store the MSB in the Carry flag. This is only needed if
7188 we don't sign-extend becaue with sign-extension the MSB
7189 (the sign) will be produced by the sign extension. */
7191 avr_asm_len ("lsr %0", &all_regs_rtx[src_msb], plen, 1);
7192 msb_in_carry = true;
7195 unsigned src_lsb = dest.regno - offset -1;
7197 if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry
7198 && (d0 == src_lsb || d0 + stepw == src_lsb))
7200 /* We are going to override the new LSB; store it into carry. */
7202 avr_asm_len ("lsl %0", &all_regs_rtx[src_lsb], plen, 1);
7203 code_ashift = "rol %0";
7204 lsb_in_carry = true;
7207 avr_asm_len (code, xop, plen, 1);
7208 d0 += stepw;
7212 /* Step 2: Shift destination left by 1 bit position. This might be needed
7213 ====== for signed input and unsigned output. */
7215 if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry)
7217 unsigned s0 = dest.regno - offset -1;
7219 if (MAY_CLOBBER (s0))
7220 avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
7221 else
7222 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7223 "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
7225 code_ashift = "rol %0";
7226 lsb_in_carry = true;
7229 if (shift == ASHIFT)
7231 for (d0 = dest.regno + zero_bytes;
7232 d0 <= dest.regno_msb - sign_bytes; d0++)
7234 avr_asm_len (code_ashift, &all_regs_rtx[d0], plen, 1);
7235 code_ashift = "rol %0";
7238 lsb_in_carry = false;
7239 sign_in_carry = true;
7242 /* Step 4a: Store MSB in carry if we don't already have it or will produce
7243 ======= it in sign-extension below. */
7245 if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
7246 && src.ibyte > dest.ibyte)
7248 unsigned s0 = dest.regno_msb - sign_bytes - offset + 1;
7250 if (MAY_CLOBBER (s0))
7251 avr_asm_len ("lsr %0", &all_regs_rtx[s0], plen, 1);
7252 else
7253 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7254 "lsr __tmp_reg__", &all_regs_rtx[s0], plen, 2);
7256 msb_in_carry = true;
7259 /* Step 3: Sign-extend or zero-extend the destination as needed.
7260 ====== */
7262 if (sign_extend && !sign_in_carry)
7264 unsigned s0 = src.regno_msb;
7266 if (MAY_CLOBBER (s0))
7267 avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
7268 else
7269 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7270 "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
7272 sign_in_carry = true;
7275 gcc_assert (sign_in_carry + msb_in_carry + lsb_in_carry <= 1);
7277 unsigned copies = 0;
7278 rtx movw = sign_extend ? NULL_RTX : clrw;
7280 for (d0 = dest.regno_msb - sign_bytes + 1; d0 <= dest.regno_msb; d0++)
7282 if (AVR_HAVE_MOVW && movw
7283 && d0 % 2 == 0 && d0 + 1 <= dest.regno_msb)
7285 xop[2] = all_regs_rtx[d0];
7286 xop[3] = movw;
7287 avr_asm_len ("movw %2,%3", xop, plen, 1);
7288 d0++;
7290 else
7292 avr_asm_len (sign_extend ? "sbc %0,%0" : "clr %0",
7293 &all_regs_rtx[d0], plen, 1);
7295 if (++copies >= 2 && !movw && d0 % 2 == 1)
7296 movw = all_regs_rtx[d0-1];
7298 } /* for */
7301 /* Step 4: Right shift the destination. This might be needed for
7302 ====== conversions from unsigned to signed. */
7304 if (shift == ASHIFTRT)
7306 const char *code_ashiftrt = "lsr %0";
7308 if (sign_extend || msb_in_carry)
7309 code_ashiftrt = "ror %0";
7311 if (src.sbit && src.ibyte == dest.ibyte)
7312 code_ashiftrt = "asr %0";
7314 for (d0 = dest.regno_msb - sign_bytes;
7315 d0 >= dest.regno + zero_bytes - 1 && d0 >= dest.regno; d0--)
7317 avr_asm_len (code_ashiftrt, &all_regs_rtx[d0], plen, 1);
7318 code_ashiftrt = "ror %0";
7322 #undef MAY_CLOBBER
7324 return "";
7328 /* Create RTL split patterns for byte sized rotate expressions. This
7329 produces a series of move instructions and considers overlap situations.
7330 Overlapping non-HImode operands need a scratch register. */
7332 bool
7333 avr_rotate_bytes (rtx operands[])
7335 int i, j;
7336 enum machine_mode mode = GET_MODE (operands[0]);
7337 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
7338 bool same_reg = rtx_equal_p (operands[0], operands[1]);
7339 int num = INTVAL (operands[2]);
7340 rtx scratch = operands[3];
7341 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
7342 Word move if no scratch is needed, otherwise use size of scratch. */
7343 enum machine_mode move_mode = QImode;
7344 int move_size, offset, size;
7346 if (num & 0xf)
7347 move_mode = QImode;
7348 else if ((mode == SImode && !same_reg) || !overlapped)
7349 move_mode = HImode;
7350 else
7351 move_mode = GET_MODE (scratch);
7353 /* Force DI rotate to use QI moves since other DI moves are currently split
7354 into QI moves so forward propagation works better. */
7355 if (mode == DImode)
7356 move_mode = QImode;
7357 /* Make scratch smaller if needed. */
7358 if (SCRATCH != GET_CODE (scratch)
7359 && HImode == GET_MODE (scratch)
7360 && QImode == move_mode)
7361 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
7363 move_size = GET_MODE_SIZE (move_mode);
7364 /* Number of bytes/words to rotate. */
7365 offset = (num >> 3) / move_size;
7366 /* Number of moves needed. */
7367 size = GET_MODE_SIZE (mode) / move_size;
7368 /* Himode byte swap is special case to avoid a scratch register. */
7369 if (mode == HImode && same_reg)
7371 /* HImode byte swap, using xor. This is as quick as using scratch. */
7372 rtx src, dst;
7373 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
7374 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
7375 if (!rtx_equal_p (dst, src))
7377 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
7378 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
7379 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
7382 else
7384 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
7385 /* Create linked list of moves to determine move order. */
7386 struct {
7387 rtx src, dst;
7388 int links;
7389 } move[MAX_SIZE + 8];
7390 int blocked, moves;
7392 gcc_assert (size <= MAX_SIZE);
7393 /* Generate list of subreg moves. */
7394 for (i = 0; i < size; i++)
7396 int from = i;
7397 int to = (from + offset) % size;
7398 move[i].src = simplify_gen_subreg (move_mode, operands[1],
7399 mode, from * move_size);
7400 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
7401 mode, to * move_size);
7402 move[i].links = -1;
7404 /* Mark dependence where a dst of one move is the src of another move.
7405 The first move is a conflict as it must wait until second is
7406 performed. We ignore moves to self - we catch this later. */
7407 if (overlapped)
7408 for (i = 0; i < size; i++)
7409 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
7410 for (j = 0; j < size; j++)
7411 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
7413 /* The dst of move i is the src of move j. */
7414 move[i].links = j;
7415 break;
7418 blocked = -1;
7419 moves = 0;
7420 /* Go through move list and perform non-conflicting moves. As each
7421 non-overlapping move is made, it may remove other conflicts
7422 so the process is repeated until no conflicts remain. */
7425 blocked = -1;
7426 moves = 0;
7427 /* Emit move where dst is not also a src or we have used that
7428 src already. */
7429 for (i = 0; i < size; i++)
7430 if (move[i].src != NULL_RTX)
7432 if (move[i].links == -1
7433 || move[move[i].links].src == NULL_RTX)
7435 moves++;
7436 /* Ignore NOP moves to self. */
7437 if (!rtx_equal_p (move[i].dst, move[i].src))
7438 emit_move_insn (move[i].dst, move[i].src);
7440 /* Remove conflict from list. */
7441 move[i].src = NULL_RTX;
7443 else
7444 blocked = i;
7447 /* Check for deadlock. This is when no moves occurred and we have
7448 at least one blocked move. */
7449 if (moves == 0 && blocked != -1)
7451 /* Need to use scratch register to break deadlock.
7452 Add move to put dst of blocked move into scratch.
7453 When this move occurs, it will break chain deadlock.
7454 The scratch register is substituted for real move. */
7456 gcc_assert (SCRATCH != GET_CODE (scratch));
7458 move[size].src = move[blocked].dst;
7459 move[size].dst = scratch;
7460 /* Scratch move is never blocked. */
7461 move[size].links = -1;
7462 /* Make sure we have valid link. */
7463 gcc_assert (move[blocked].links != -1);
7464 /* Replace src of blocking move with scratch reg. */
7465 move[move[blocked].links].src = scratch;
7466 /* Make dependent on scratch move occuring. */
7467 move[blocked].links = size;
7468 size=size+1;
7471 while (blocked != -1);
7473 return true;
7477 /* Worker function for `ADJUST_INSN_LENGTH'. */
7478 /* Modifies the length assigned to instruction INSN
7479 LEN is the initially computed length of the insn. */
7482 avr_adjust_insn_length (rtx insn, int len)
7484 rtx *op = recog_data.operand;
7485 enum attr_adjust_len adjust_len;
7487 /* Some complex insns don't need length adjustment and therefore
7488 the length need not/must not be adjusted for these insns.
7489 It is easier to state this in an insn attribute "adjust_len" than
7490 to clutter up code here... */
7492 if (-1 == recog_memoized (insn))
7494 return len;
7497 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
7499 adjust_len = get_attr_adjust_len (insn);
7501 if (adjust_len == ADJUST_LEN_NO)
7503 /* Nothing to adjust: The length from attribute "length" is fine.
7504 This is the default. */
7506 return len;
7509 /* Extract insn's operands. */
7511 extract_constrain_insn_cached (insn);
7513 /* Dispatch to right function. */
7515 switch (adjust_len)
7517 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
7518 case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
7519 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
7521 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
7523 case ADJUST_LEN_PLUS: avr_out_plus (insn, op, &len); break;
7524 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
7526 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
7527 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
7528 case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
7529 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
7530 case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
7531 case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
7532 case ADJUST_LEN_LPM: avr_out_lpm (insn, op, &len); break;
7534 case ADJUST_LEN_SFRACT: avr_out_fract (insn, op, true, &len); break;
7535 case ADJUST_LEN_UFRACT: avr_out_fract (insn, op, false, &len); break;
7537 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
7538 case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
7539 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
7540 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
7541 case ADJUST_LEN_COMPARE64: avr_out_compare64 (insn, op, &len); break;
7543 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
7544 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
7545 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
7547 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
7548 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
7549 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
7551 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
7552 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
7553 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
7555 case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
7556 case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
7557 case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
7559 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
7561 case ADJUST_LEN_INSERT_BITS: avr_out_insert_bits (op, &len); break;
7563 default:
7564 gcc_unreachable();
7567 return len;
7570 /* Return nonzero if register REG dead after INSN. */
7573 reg_unused_after (rtx insn, rtx reg)
7575 return (dead_or_set_p (insn, reg)
7576 || (REG_P(reg) && _reg_unused_after (insn, reg)));
7579 /* Return nonzero if REG is not used after INSN.
7580 We assume REG is a reload reg, and therefore does
7581 not live past labels. It may live past calls or jumps though. */
7584 _reg_unused_after (rtx insn, rtx reg)
7586 enum rtx_code code;
7587 rtx set;
7589 /* If the reg is set by this instruction, then it is safe for our
7590 case. Disregard the case where this is a store to memory, since
7591 we are checking a register used in the store address. */
7592 set = single_set (insn);
7593 if (set && GET_CODE (SET_DEST (set)) != MEM
7594 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
7595 return 1;
7597 while ((insn = NEXT_INSN (insn)))
7599 rtx set;
7600 code = GET_CODE (insn);
7602 #if 0
7603 /* If this is a label that existed before reload, then the register
7604 if dead here. However, if this is a label added by reorg, then
7605 the register may still be live here. We can't tell the difference,
7606 so we just ignore labels completely. */
7607 if (code == CODE_LABEL)
7608 return 1;
7609 /* else */
7610 #endif
7612 if (!INSN_P (insn))
7613 continue;
7615 if (code == JUMP_INSN)
7616 return 0;
7618 /* If this is a sequence, we must handle them all at once.
7619 We could have for instance a call that sets the target register,
7620 and an insn in a delay slot that uses the register. In this case,
7621 we must return 0. */
7622 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
7624 int i;
7625 int retval = 0;
7627 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
7629 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
7630 rtx set = single_set (this_insn);
7632 if (GET_CODE (this_insn) == CALL_INSN)
7633 code = CALL_INSN;
7634 else if (GET_CODE (this_insn) == JUMP_INSN)
7636 if (INSN_ANNULLED_BRANCH_P (this_insn))
7637 return 0;
7638 code = JUMP_INSN;
7641 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
7642 return 0;
7643 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
7645 if (GET_CODE (SET_DEST (set)) != MEM)
7646 retval = 1;
7647 else
7648 return 0;
7650 if (set == 0
7651 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
7652 return 0;
7654 if (retval == 1)
7655 return 1;
7656 else if (code == JUMP_INSN)
7657 return 0;
7660 if (code == CALL_INSN)
7662 rtx tem;
7663 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
7664 if (GET_CODE (XEXP (tem, 0)) == USE
7665 && REG_P (XEXP (XEXP (tem, 0), 0))
7666 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
7667 return 0;
7668 if (call_used_regs[REGNO (reg)])
7669 return 1;
7672 set = single_set (insn);
7674 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
7675 return 0;
7676 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
7677 return GET_CODE (SET_DEST (set)) != MEM;
7678 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
7679 return 0;
7681 return 1;
7685 /* Implement `TARGET_ASM_INTEGER'. */
7686 /* Target hook for assembling integer objects. The AVR version needs
7687 special handling for references to certain labels. */
7689 static bool
7690 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
7692 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
7693 && text_segment_operand (x, VOIDmode))
7695 fputs ("\t.word\tgs(", asm_out_file);
7696 output_addr_const (asm_out_file, x);
7697 fputs (")\n", asm_out_file);
7699 return true;
7701 else if (GET_MODE (x) == PSImode)
7703 /* This needs binutils 2.23+, see PR binutils/13503 */
7705 fputs ("\t.byte\tlo8(", asm_out_file);
7706 output_addr_const (asm_out_file, x);
7707 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
7709 fputs ("\t.byte\thi8(", asm_out_file);
7710 output_addr_const (asm_out_file, x);
7711 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
7713 fputs ("\t.byte\thh8(", asm_out_file);
7714 output_addr_const (asm_out_file, x);
7715 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
7717 return true;
7719 else if (CONST_FIXED_P (x))
7721 unsigned n;
7723 /* varasm fails to handle big fixed modes that don't fit in hwi. */
7725 for (n = 0; n < size; n++)
7727 rtx xn = simplify_gen_subreg (QImode, x, GET_MODE (x), n);
7728 default_assemble_integer (xn, 1, aligned_p);
7731 return true;
7734 return default_assemble_integer (x, size, aligned_p);
7738 /* Implement `TARGET_CLASS_LIKELY_SPILLED_P'. */
7739 /* Return value is nonzero if pseudos that have been
7740 assigned to registers of class CLASS would likely be spilled
7741 because registers of CLASS are needed for spill registers. */
7743 static bool
7744 avr_class_likely_spilled_p (reg_class_t c)
7746 return (c != ALL_REGS && c != ADDW_REGS);
7750 /* Valid attributes:
7751 progmem - Put data to program memory.
7752 signal - Make a function to be hardware interrupt.
7753 After function prologue interrupts remain disabled.
7754 interrupt - Make a function to be hardware interrupt. Before function
7755 prologue interrupts are enabled by means of SEI.
7756 naked - Don't generate function prologue/epilogue and RET
7757 instruction. */
7759 /* Handle a "progmem" attribute; arguments as in
7760 struct attribute_spec.handler. */
7762 static tree
7763 avr_handle_progmem_attribute (tree *node, tree name,
7764 tree args ATTRIBUTE_UNUSED,
7765 int flags ATTRIBUTE_UNUSED,
7766 bool *no_add_attrs)
7768 if (DECL_P (*node))
7770 if (TREE_CODE (*node) == TYPE_DECL)
7772 /* This is really a decl attribute, not a type attribute,
7773 but try to handle it for GCC 3.0 backwards compatibility. */
7775 tree type = TREE_TYPE (*node);
7776 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
7777 tree newtype = build_type_attribute_variant (type, attr);
7779 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
7780 TREE_TYPE (*node) = newtype;
7781 *no_add_attrs = true;
7783 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
7785 *no_add_attrs = false;
7787 else
7789 warning (OPT_Wattributes, "%qE attribute ignored",
7790 name);
7791 *no_add_attrs = true;
7795 return NULL_TREE;
7798 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
7799 struct attribute_spec.handler. */
7801 static tree
7802 avr_handle_fndecl_attribute (tree *node, tree name,
7803 tree args ATTRIBUTE_UNUSED,
7804 int flags ATTRIBUTE_UNUSED,
7805 bool *no_add_attrs)
7807 if (TREE_CODE (*node) != FUNCTION_DECL)
7809 warning (OPT_Wattributes, "%qE attribute only applies to functions",
7810 name);
7811 *no_add_attrs = true;
7814 return NULL_TREE;
7817 static tree
7818 avr_handle_fntype_attribute (tree *node, tree name,
7819 tree args ATTRIBUTE_UNUSED,
7820 int flags ATTRIBUTE_UNUSED,
7821 bool *no_add_attrs)
7823 if (TREE_CODE (*node) != FUNCTION_TYPE)
7825 warning (OPT_Wattributes, "%qE attribute only applies to functions",
7826 name);
7827 *no_add_attrs = true;
7830 return NULL_TREE;
7834 /* AVR attributes. */
7835 static const struct attribute_spec
7836 avr_attribute_table[] =
7838 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
7839 affects_type_identity } */
7840 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
7841 false },
7842 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
7843 false },
7844 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
7845 false },
7846 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
7847 false },
7848 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
7849 false },
7850 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
7851 false },
7852 { NULL, 0, 0, false, false, false, NULL, false }
7856 /* Look if DECL shall be placed in program memory space by
7857 means of attribute `progmem' or some address-space qualifier.
7858 Return non-zero if DECL is data that must end up in Flash and
7859 zero if the data lives in RAM (.bss, .data, .rodata, ...).
7861 Return 2 if DECL is located in 24-bit flash address-space
7862 Return 1 if DECL is located in 16-bit flash address-space
7863 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
7864 Return 0 otherwise */
7867 avr_progmem_p (tree decl, tree attributes)
7869 tree a;
7871 if (TREE_CODE (decl) != VAR_DECL)
7872 return 0;
7874 if (avr_decl_memx_p (decl))
7875 return 2;
7877 if (avr_decl_flash_p (decl))
7878 return 1;
7880 if (NULL_TREE
7881 != lookup_attribute ("progmem", attributes))
7882 return -1;
7884 a = decl;
7887 a = TREE_TYPE(a);
7888 while (TREE_CODE (a) == ARRAY_TYPE);
7890 if (a == error_mark_node)
7891 return 0;
7893 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
7894 return -1;
7896 return 0;
7900 /* Scan type TYP for pointer references to address space ASn.
7901 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
7902 the AS are also declared to be CONST.
7903 Otherwise, return the respective address space, i.e. a value != 0. */
7905 static addr_space_t
7906 avr_nonconst_pointer_addrspace (tree typ)
7908 while (ARRAY_TYPE == TREE_CODE (typ))
7909 typ = TREE_TYPE (typ);
7911 if (POINTER_TYPE_P (typ))
7913 addr_space_t as;
7914 tree target = TREE_TYPE (typ);
7916 /* Pointer to function: Test the function's return type. */
7918 if (FUNCTION_TYPE == TREE_CODE (target))
7919 return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
7921 /* "Ordinary" pointers... */
7923 while (TREE_CODE (target) == ARRAY_TYPE)
7924 target = TREE_TYPE (target);
7926 /* Pointers to non-generic address space must be const.
7927 Refuse address spaces outside the device's flash. */
7929 as = TYPE_ADDR_SPACE (target);
7931 if (!ADDR_SPACE_GENERIC_P (as)
7932 && (!TYPE_READONLY (target)
7933 || avr_addrspace[as].segment >= avr_current_device->n_flash))
7935 return as;
7938 /* Scan pointer's target type. */
7940 return avr_nonconst_pointer_addrspace (target);
7943 return ADDR_SPACE_GENERIC;
7947 /* Sanity check NODE so that all pointers targeting non-generic address spaces
7948 go along with CONST qualifier. Writing to these address spaces should
7949 be detected and complained about as early as possible. */
7951 static bool
7952 avr_pgm_check_var_decl (tree node)
7954 const char *reason = NULL;
7956 addr_space_t as = ADDR_SPACE_GENERIC;
7958 gcc_assert (as == 0);
7960 if (avr_log.progmem)
7961 avr_edump ("%?: %t\n", node);
7963 switch (TREE_CODE (node))
7965 default:
7966 break;
7968 case VAR_DECL:
7969 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
7970 reason = "variable";
7971 break;
7973 case PARM_DECL:
7974 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
7975 reason = "function parameter";
7976 break;
7978 case FIELD_DECL:
7979 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
7980 reason = "structure field";
7981 break;
7983 case FUNCTION_DECL:
7984 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
7986 reason = "return type of function";
7987 break;
7989 case POINTER_TYPE:
7990 if (as = avr_nonconst_pointer_addrspace (node), as)
7991 reason = "pointer";
7992 break;
7995 if (reason)
7997 if (avr_addrspace[as].segment >= avr_current_device->n_flash)
7999 if (TYPE_P (node))
8000 error ("%qT uses address space %qs beyond flash of %qs",
8001 node, avr_addrspace[as].name, avr_current_device->name);
8002 else
8003 error ("%s %q+D uses address space %qs beyond flash of %qs",
8004 reason, node, avr_addrspace[as].name,
8005 avr_current_device->name);
8007 else
8009 if (TYPE_P (node))
8010 error ("pointer targeting address space %qs must be const in %qT",
8011 avr_addrspace[as].name, node);
8012 else
8013 error ("pointer targeting address space %qs must be const"
8014 " in %s %q+D",
8015 avr_addrspace[as].name, reason, node);
8019 return reason == NULL;
8023 /* Add the section attribute if the variable is in progmem. */
8025 static void
8026 avr_insert_attributes (tree node, tree *attributes)
8028 avr_pgm_check_var_decl (node);
8030 if (TREE_CODE (node) == VAR_DECL
8031 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
8032 && avr_progmem_p (node, *attributes))
8034 addr_space_t as;
8035 tree node0 = node;
8037 /* For C++, we have to peel arrays in order to get correct
8038 determination of readonlyness. */
8041 node0 = TREE_TYPE (node0);
8042 while (TREE_CODE (node0) == ARRAY_TYPE);
8044 if (error_mark_node == node0)
8045 return;
8047 as = TYPE_ADDR_SPACE (TREE_TYPE (node));
8049 if (avr_addrspace[as].segment >= avr_current_device->n_flash)
8051 error ("variable %q+D located in address space %qs"
8052 " beyond flash of %qs",
8053 node, avr_addrspace[as].name, avr_current_device->name);
8056 if (!TYPE_READONLY (node0)
8057 && !TREE_READONLY (node))
8059 const char *reason = "__attribute__((progmem))";
8061 if (!ADDR_SPACE_GENERIC_P (as))
8062 reason = avr_addrspace[as].name;
8064 if (avr_log.progmem)
8065 avr_edump ("\n%?: %t\n%t\n", node, node0);
8067 error ("variable %q+D must be const in order to be put into"
8068 " read-only section by means of %qs", node, reason);
8074 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
8075 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
8076 /* Track need of __do_clear_bss. */
8078 void
8079 avr_asm_output_aligned_decl_common (FILE * stream,
8080 const_tree decl ATTRIBUTE_UNUSED,
8081 const char *name,
8082 unsigned HOST_WIDE_INT size,
8083 unsigned int align, bool local_p)
8085 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
8086 There is no need to trigger __do_clear_bss code for them. */
8088 if (!STR_PREFIX_P (name, "__gnu_lto"))
8089 avr_need_clear_bss_p = true;
8091 if (local_p)
8092 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
8093 else
8094 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
8098 /* Unnamed section callback for data_section
8099 to track need of __do_copy_data. */
8101 static void
8102 avr_output_data_section_asm_op (const void *data)
8104 avr_need_copy_data_p = true;
8106 /* Dispatch to default. */
8107 output_section_asm_op (data);
8111 /* Unnamed section callback for bss_section
8112 to track need of __do_clear_bss. */
8114 static void
8115 avr_output_bss_section_asm_op (const void *data)
8117 avr_need_clear_bss_p = true;
8119 /* Dispatch to default. */
8120 output_section_asm_op (data);
8124 /* Unnamed section callback for progmem*.data sections. */
8126 static void
8127 avr_output_progmem_section_asm_op (const void *data)
8129 fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
8130 (const char*) data);
8134 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
8136 static void
8137 avr_asm_init_sections (void)
8139 /* Set up a section for jump tables. Alignment is handled by
8140 ASM_OUTPUT_BEFORE_CASE_LABEL. */
8142 if (AVR_HAVE_JMP_CALL)
8144 progmem_swtable_section
8145 = get_unnamed_section (0, output_section_asm_op,
8146 "\t.section\t.progmem.gcc_sw_table"
8147 ",\"a\",@progbits");
8149 else
8151 progmem_swtable_section
8152 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
8153 "\t.section\t.progmem.gcc_sw_table"
8154 ",\"ax\",@progbits");
8157 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
8158 resp. `avr_need_copy_data_p'. */
8160 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
8161 data_section->unnamed.callback = avr_output_data_section_asm_op;
8162 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
8166 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
8168 static section*
8169 avr_asm_function_rodata_section (tree decl)
8171 /* If a function is unused and optimized out by -ffunction-sections
8172 and --gc-sections, ensure that the same will happen for its jump
8173 tables by putting them into individual sections. */
8175 unsigned int flags;
8176 section * frodata;
8178 /* Get the frodata section from the default function in varasm.c
8179 but treat function-associated data-like jump tables as code
8180 rather than as user defined data. AVR has no constant pools. */
8182 int fdata = flag_data_sections;
8184 flag_data_sections = flag_function_sections;
8185 frodata = default_function_rodata_section (decl);
8186 flag_data_sections = fdata;
8187 flags = frodata->common.flags;
8190 if (frodata != readonly_data_section
8191 && flags & SECTION_NAMED)
8193 /* Adjust section flags and replace section name prefix. */
8195 unsigned int i;
8197 static const char* const prefix[] =
8199 ".rodata", ".progmem.gcc_sw_table",
8200 ".gnu.linkonce.r.", ".gnu.linkonce.t."
8203 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
8205 const char * old_prefix = prefix[i];
8206 const char * new_prefix = prefix[i+1];
8207 const char * name = frodata->named.name;
8209 if (STR_PREFIX_P (name, old_prefix))
8211 const char *rname = ACONCAT ((new_prefix,
8212 name + strlen (old_prefix), NULL));
8213 flags &= ~SECTION_CODE;
8214 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
8216 return get_section (rname, flags, frodata->named.decl);
8221 return progmem_swtable_section;
8225 /* Implement `TARGET_ASM_NAMED_SECTION'. */
8226 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
8228 static void
8229 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
8231 if (flags & AVR_SECTION_PROGMEM)
8233 addr_space_t as = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP;
8234 const char *old_prefix = ".rodata";
8235 const char *new_prefix = avr_addrspace[as].section_name;
8237 if (STR_PREFIX_P (name, old_prefix))
8239 const char *sname = ACONCAT ((new_prefix,
8240 name + strlen (old_prefix), NULL));
8241 default_elf_asm_named_section (sname, flags, decl);
8242 return;
8245 default_elf_asm_named_section (new_prefix, flags, decl);
8246 return;
8249 if (!avr_need_copy_data_p)
8250 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
8251 || STR_PREFIX_P (name, ".rodata")
8252 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
8254 if (!avr_need_clear_bss_p)
8255 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
8257 default_elf_asm_named_section (name, flags, decl);
8261 /* Implement `TARGET_SECTION_TYPE_FLAGS'. */
8263 static unsigned int
8264 avr_section_type_flags (tree decl, const char *name, int reloc)
8266 unsigned int flags = default_section_type_flags (decl, name, reloc);
8268 if (STR_PREFIX_P (name, ".noinit"))
8270 if (decl && TREE_CODE (decl) == VAR_DECL
8271 && DECL_INITIAL (decl) == NULL_TREE)
8272 flags |= SECTION_BSS; /* @nobits */
8273 else
8274 warning (0, "only uninitialized variables can be placed in the "
8275 ".noinit section");
8278 if (decl && DECL_P (decl)
8279 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
8281 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
8283 /* Attribute progmem puts data in generic address space.
8284 Set section flags as if it was in __flash to get the right
8285 section prefix in the remainder. */
8287 if (ADDR_SPACE_GENERIC_P (as))
8288 as = ADDR_SPACE_FLASH;
8290 flags |= as * SECTION_MACH_DEP;
8291 flags &= ~SECTION_WRITE;
8292 flags &= ~SECTION_BSS;
8295 return flags;
8299 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
8301 static void
8302 avr_encode_section_info (tree decl, rtx rtl, int new_decl_p)
8304 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
8305 readily available, see PR34734. So we postpone the warning
8306 about uninitialized data in program memory section until here. */
8308 if (new_decl_p
8309 && decl && DECL_P (decl)
8310 && NULL_TREE == DECL_INITIAL (decl)
8311 && !DECL_EXTERNAL (decl)
8312 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
8314 warning (OPT_Wuninitialized,
8315 "uninitialized variable %q+D put into "
8316 "program memory area", decl);
8319 default_encode_section_info (decl, rtl, new_decl_p);
8321 if (decl && DECL_P (decl)
8322 && TREE_CODE (decl) != FUNCTION_DECL
8323 && MEM_P (rtl)
8324 && SYMBOL_REF == GET_CODE (XEXP (rtl, 0)))
8326 rtx sym = XEXP (rtl, 0);
8327 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
8329 /* PSTR strings are in generic space but located in flash:
8330 patch address space. */
8332 if (-1 == avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
8333 as = ADDR_SPACE_FLASH;
8335 AVR_SYMBOL_SET_ADDR_SPACE (sym, as);
8340 /* Implement `TARGET_ASM_SELECT_SECTION' */
8342 static section *
8343 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
8345 section * sect = default_elf_select_section (decl, reloc, align);
8347 if (decl && DECL_P (decl)
8348 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
8350 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
8352 /* __progmem__ goes in generic space but shall be allocated to
8353 .progmem.data */
8355 if (ADDR_SPACE_GENERIC_P (as))
8356 as = ADDR_SPACE_FLASH;
8358 if (sect->common.flags & SECTION_NAMED)
8360 const char * name = sect->named.name;
8361 const char * old_prefix = ".rodata";
8362 const char * new_prefix = avr_addrspace[as].section_name;
8364 if (STR_PREFIX_P (name, old_prefix))
8366 const char *sname = ACONCAT ((new_prefix,
8367 name + strlen (old_prefix), NULL));
8368 return get_section (sname, sect->common.flags, sect->named.decl);
8372 if (!progmem_section[as])
8374 progmem_section[as]
8375 = get_unnamed_section (0, avr_output_progmem_section_asm_op,
8376 avr_addrspace[as].section_name);
8379 return progmem_section[as];
8382 return sect;
8385 /* Implement `TARGET_ASM_FILE_START'. */
8386 /* Outputs some text at the start of each assembler file. */
8388 static void
8389 avr_file_start (void)
8391 int sfr_offset = avr_current_arch->sfr_offset;
8393 if (avr_current_arch->asm_only)
8394 error ("MCU %qs supported for assembler only", avr_current_device->name);
8396 default_file_start ();
8398 /* Print I/O addresses of some SFRs used with IN and OUT. */
8400 if (AVR_HAVE_SPH)
8401 fprintf (asm_out_file, "__SP_H__ = 0x%02x\n", avr_addr.sp_h - sfr_offset);
8403 fprintf (asm_out_file, "__SP_L__ = 0x%02x\n", avr_addr.sp_l - sfr_offset);
8404 fprintf (asm_out_file, "__SREG__ = 0x%02x\n", avr_addr.sreg - sfr_offset);
8405 if (AVR_HAVE_RAMPZ)
8406 fprintf (asm_out_file, "__RAMPZ__ = 0x%02x\n", avr_addr.rampz - sfr_offset);
8407 if (AVR_HAVE_RAMPY)
8408 fprintf (asm_out_file, "__RAMPY__ = 0x%02x\n", avr_addr.rampy - sfr_offset);
8409 if (AVR_HAVE_RAMPX)
8410 fprintf (asm_out_file, "__RAMPX__ = 0x%02x\n", avr_addr.rampx - sfr_offset);
8411 if (AVR_HAVE_RAMPD)
8412 fprintf (asm_out_file, "__RAMPD__ = 0x%02x\n", avr_addr.rampd - sfr_offset);
8413 if (AVR_XMEGA)
8414 fprintf (asm_out_file, "__CCP__ = 0x%02x\n", avr_addr.ccp - sfr_offset);
8415 fprintf (asm_out_file, "__tmp_reg__ = %d\n", TMP_REGNO);
8416 fprintf (asm_out_file, "__zero_reg__ = %d\n", ZERO_REGNO);
8420 /* Implement `TARGET_ASM_FILE_END'. */
8421 /* Outputs to the stdio stream FILE some
8422 appropriate text to go at the end of an assembler file. */
8424 static void
8425 avr_file_end (void)
8427 /* Output these only if there is anything in the
8428 .data* / .rodata* / .gnu.linkonce.* resp. .bss* or COMMON
8429 input section(s) - some code size can be saved by not
8430 linking in the initialization code from libgcc if resp.
8431 sections are empty, see PR18145. */
8433 if (avr_need_copy_data_p)
8434 fputs (".global __do_copy_data\n", asm_out_file);
8436 if (avr_need_clear_bss_p)
8437 fputs (".global __do_clear_bss\n", asm_out_file);
8441 /* Worker function for `ADJUST_REG_ALLOC_ORDER'. */
8442 /* Choose the order in which to allocate hard registers for
8443 pseudo-registers local to a basic block.
8445 Store the desired register order in the array `reg_alloc_order'.
8446 Element 0 should be the register to allocate first; element 1, the
8447 next register; and so on. */
8449 void
8450 avr_adjust_reg_alloc_order (void)
8452 unsigned int i;
8453 static const int order_0[] =
8455 24, 25,
8456 18, 19, 20, 21, 22, 23,
8457 30, 31,
8458 26, 27, 28, 29,
8459 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8460 0, 1,
8461 32, 33, 34, 35
8463 static const int order_1[] =
8465 18, 19, 20, 21, 22, 23, 24, 25,
8466 30, 31,
8467 26, 27, 28, 29,
8468 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8469 0, 1,
8470 32, 33, 34, 35
8472 static const int order_2[] =
8474 25, 24, 23, 22, 21, 20, 19, 18,
8475 30, 31,
8476 26, 27, 28, 29,
8477 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8478 1, 0,
8479 32, 33, 34, 35
8482 const int *order = (TARGET_ORDER_1 ? order_1 :
8483 TARGET_ORDER_2 ? order_2 :
8484 order_0);
8485 for (i = 0; i < ARRAY_SIZE (order_0); ++i)
8486 reg_alloc_order[i] = order[i];
8490 /* Implement `TARGET_REGISTER_MOVE_COST' */
8492 static int
8493 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
8494 reg_class_t from, reg_class_t to)
8496 return (from == STACK_REG ? 6
8497 : to == STACK_REG ? 12
8498 : 2);
8502 /* Implement `TARGET_MEMORY_MOVE_COST' */
8504 static int
8505 avr_memory_move_cost (enum machine_mode mode,
8506 reg_class_t rclass ATTRIBUTE_UNUSED,
8507 bool in ATTRIBUTE_UNUSED)
8509 return (mode == QImode ? 2
8510 : mode == HImode ? 4
8511 : mode == SImode ? 8
8512 : mode == SFmode ? 8
8513 : 16);
8517 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
8518 cost of an RTX operand given its context. X is the rtx of the
8519 operand, MODE is its mode, and OUTER is the rtx_code of this
8520 operand's parent operator. */
8522 static int
8523 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
8524 int opno, bool speed)
8526 enum rtx_code code = GET_CODE (x);
8527 int total;
8529 switch (code)
8531 case REG:
8532 case SUBREG:
8533 return 0;
8535 case CONST_INT:
8536 case CONST_FIXED:
8537 case CONST_DOUBLE:
8538 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
8540 default:
8541 break;
8544 total = 0;
8545 avr_rtx_costs (x, code, outer, opno, &total, speed);
8546 return total;
8549 /* Worker function for AVR backend's rtx_cost function.
8550 X is rtx expression whose cost is to be calculated.
8551 Return true if the complete cost has been computed.
8552 Return false if subexpressions should be scanned.
8553 In either case, *TOTAL contains the cost result. */
8555 static bool
8556 avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
8557 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
8559 enum rtx_code code = (enum rtx_code) codearg;
8560 enum machine_mode mode = GET_MODE (x);
8561 HOST_WIDE_INT val;
8563 switch (code)
8565 case CONST_INT:
8566 case CONST_FIXED:
8567 case CONST_DOUBLE:
8568 case SYMBOL_REF:
8569 case CONST:
8570 case LABEL_REF:
8571 /* Immediate constants are as cheap as registers. */
8572 *total = 0;
8573 return true;
8575 case MEM:
8576 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
8577 return true;
8579 case NEG:
8580 switch (mode)
8582 case QImode:
8583 case SFmode:
8584 *total = COSTS_N_INSNS (1);
8585 break;
8587 case HImode:
8588 case PSImode:
8589 case SImode:
8590 *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
8591 break;
8593 default:
8594 return false;
8596 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8597 return true;
8599 case ABS:
8600 switch (mode)
8602 case QImode:
8603 case SFmode:
8604 *total = COSTS_N_INSNS (1);
8605 break;
8607 default:
8608 return false;
8610 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8611 return true;
8613 case NOT:
8614 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
8615 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8616 return true;
8618 case ZERO_EXTEND:
8619 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
8620 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
8621 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8622 return true;
8624 case SIGN_EXTEND:
8625 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
8626 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
8627 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8628 return true;
8630 case PLUS:
8631 switch (mode)
8633 case QImode:
8634 if (AVR_HAVE_MUL
8635 && MULT == GET_CODE (XEXP (x, 0))
8636 && register_operand (XEXP (x, 1), QImode))
8638 /* multiply-add */
8639 *total = COSTS_N_INSNS (speed ? 4 : 3);
8640 /* multiply-add with constant: will be split and load constant. */
8641 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
8642 *total = COSTS_N_INSNS (1) + *total;
8643 return true;
8645 *total = COSTS_N_INSNS (1);
8646 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8647 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8648 break;
8650 case HImode:
8651 if (AVR_HAVE_MUL
8652 && (MULT == GET_CODE (XEXP (x, 0))
8653 || ASHIFT == GET_CODE (XEXP (x, 0)))
8654 && register_operand (XEXP (x, 1), HImode)
8655 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
8656 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
8658 /* multiply-add */
8659 *total = COSTS_N_INSNS (speed ? 5 : 4);
8660 /* multiply-add with constant: will be split and load constant. */
8661 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
8662 *total = COSTS_N_INSNS (1) + *total;
8663 return true;
8665 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8667 *total = COSTS_N_INSNS (2);
8668 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8669 speed);
8671 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
8672 *total = COSTS_N_INSNS (1);
8673 else
8674 *total = COSTS_N_INSNS (2);
8675 break;
8677 case PSImode:
8678 if (!CONST_INT_P (XEXP (x, 1)))
8680 *total = COSTS_N_INSNS (3);
8681 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8682 speed);
8684 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
8685 *total = COSTS_N_INSNS (2);
8686 else
8687 *total = COSTS_N_INSNS (3);
8688 break;
8690 case SImode:
8691 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8693 *total = COSTS_N_INSNS (4);
8694 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8695 speed);
8697 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
8698 *total = COSTS_N_INSNS (1);
8699 else
8700 *total = COSTS_N_INSNS (4);
8701 break;
8703 default:
8704 return false;
8706 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8707 return true;
8709 case MINUS:
8710 if (AVR_HAVE_MUL
8711 && QImode == mode
8712 && register_operand (XEXP (x, 0), QImode)
8713 && MULT == GET_CODE (XEXP (x, 1)))
8715 /* multiply-sub */
8716 *total = COSTS_N_INSNS (speed ? 4 : 3);
8717 /* multiply-sub with constant: will be split and load constant. */
8718 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
8719 *total = COSTS_N_INSNS (1) + *total;
8720 return true;
8722 if (AVR_HAVE_MUL
8723 && HImode == mode
8724 && register_operand (XEXP (x, 0), HImode)
8725 && (MULT == GET_CODE (XEXP (x, 1))
8726 || ASHIFT == GET_CODE (XEXP (x, 1)))
8727 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
8728 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
8730 /* multiply-sub */
8731 *total = COSTS_N_INSNS (speed ? 5 : 4);
8732 /* multiply-sub with constant: will be split and load constant. */
8733 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
8734 *total = COSTS_N_INSNS (1) + *total;
8735 return true;
8737 /* FALLTHRU */
8738 case AND:
8739 case IOR:
8740 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
8741 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8742 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8743 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8744 return true;
8746 case XOR:
8747 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
8748 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8749 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8750 return true;
8752 case MULT:
8753 switch (mode)
8755 case QImode:
8756 if (AVR_HAVE_MUL)
8757 *total = COSTS_N_INSNS (!speed ? 3 : 4);
8758 else if (!speed)
8759 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
8760 else
8761 return false;
8762 break;
8764 case HImode:
8765 if (AVR_HAVE_MUL)
8767 rtx op0 = XEXP (x, 0);
8768 rtx op1 = XEXP (x, 1);
8769 enum rtx_code code0 = GET_CODE (op0);
8770 enum rtx_code code1 = GET_CODE (op1);
8771 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
8772 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
8774 if (ex0
8775 && (u8_operand (op1, HImode)
8776 || s8_operand (op1, HImode)))
8778 *total = COSTS_N_INSNS (!speed ? 4 : 6);
8779 return true;
8781 if (ex0
8782 && register_operand (op1, HImode))
8784 *total = COSTS_N_INSNS (!speed ? 5 : 8);
8785 return true;
8787 else if (ex0 || ex1)
8789 *total = COSTS_N_INSNS (!speed ? 3 : 5);
8790 return true;
8792 else if (register_operand (op0, HImode)
8793 && (u8_operand (op1, HImode)
8794 || s8_operand (op1, HImode)))
8796 *total = COSTS_N_INSNS (!speed ? 6 : 9);
8797 return true;
8799 else
8800 *total = COSTS_N_INSNS (!speed ? 7 : 10);
8802 else if (!speed)
8803 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
8804 else
8805 return false;
8806 break;
8808 case PSImode:
8809 if (!speed)
8810 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
8811 else
8812 *total = 10;
8813 break;
8815 case SImode:
8816 if (AVR_HAVE_MUL)
8818 if (!speed)
8820 /* Add some additional costs besides CALL like moves etc. */
8822 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
8824 else
8826 /* Just a rough estimate. Even with -O2 we don't want bulky
8827 code expanded inline. */
8829 *total = COSTS_N_INSNS (25);
8832 else
8834 if (speed)
8835 *total = COSTS_N_INSNS (300);
8836 else
8837 /* Add some additional costs besides CALL like moves etc. */
8838 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
8841 return true;
8843 default:
8844 return false;
8846 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8847 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8848 return true;
8850 case DIV:
8851 case MOD:
8852 case UDIV:
8853 case UMOD:
8854 if (!speed)
8855 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
8856 else
8857 *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
8858 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8859 /* For div/mod with const-int divisor we have at least the cost of
8860 loading the divisor. */
8861 if (CONST_INT_P (XEXP (x, 1)))
8862 *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
8863 /* Add some overall penaly for clobbering and moving around registers */
8864 *total += COSTS_N_INSNS (2);
8865 return true;
8867 case ROTATE:
8868 switch (mode)
8870 case QImode:
8871 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
8872 *total = COSTS_N_INSNS (1);
8874 break;
8876 case HImode:
8877 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
8878 *total = COSTS_N_INSNS (3);
8880 break;
8882 case SImode:
8883 if (CONST_INT_P (XEXP (x, 1)))
8884 switch (INTVAL (XEXP (x, 1)))
8886 case 8:
8887 case 24:
8888 *total = COSTS_N_INSNS (5);
8889 break;
8890 case 16:
8891 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
8892 break;
8894 break;
8896 default:
8897 return false;
8899 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8900 return true;
8902 case ASHIFT:
8903 switch (mode)
8905 case QImode:
8906 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8908 *total = COSTS_N_INSNS (!speed ? 4 : 17);
8909 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8910 speed);
8912 else
8914 val = INTVAL (XEXP (x, 1));
8915 if (val == 7)
8916 *total = COSTS_N_INSNS (3);
8917 else if (val >= 0 && val <= 7)
8918 *total = COSTS_N_INSNS (val);
8919 else
8920 *total = COSTS_N_INSNS (1);
8922 break;
8924 case HImode:
8925 if (AVR_HAVE_MUL)
8927 if (const_2_to_7_operand (XEXP (x, 1), HImode)
8928 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
8929 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
8931 *total = COSTS_N_INSNS (!speed ? 4 : 6);
8932 return true;
8936 if (const1_rtx == (XEXP (x, 1))
8937 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
8939 *total = COSTS_N_INSNS (2);
8940 return true;
8943 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8945 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8946 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8947 speed);
8949 else
8950 switch (INTVAL (XEXP (x, 1)))
8952 case 0:
8953 *total = 0;
8954 break;
8955 case 1:
8956 case 8:
8957 *total = COSTS_N_INSNS (2);
8958 break;
8959 case 9:
8960 *total = COSTS_N_INSNS (3);
8961 break;
8962 case 2:
8963 case 3:
8964 case 10:
8965 case 15:
8966 *total = COSTS_N_INSNS (4);
8967 break;
8968 case 7:
8969 case 11:
8970 case 12:
8971 *total = COSTS_N_INSNS (5);
8972 break;
8973 case 4:
8974 *total = COSTS_N_INSNS (!speed ? 5 : 8);
8975 break;
8976 case 6:
8977 *total = COSTS_N_INSNS (!speed ? 5 : 9);
8978 break;
8979 case 5:
8980 *total = COSTS_N_INSNS (!speed ? 5 : 10);
8981 break;
8982 default:
8983 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8984 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8985 speed);
8987 break;
8989 case PSImode:
8990 if (!CONST_INT_P (XEXP (x, 1)))
8992 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8994 else
8995 switch (INTVAL (XEXP (x, 1)))
8997 case 0:
8998 *total = 0;
8999 break;
9000 case 1:
9001 case 8:
9002 case 16:
9003 *total = COSTS_N_INSNS (3);
9004 break;
9005 case 23:
9006 *total = COSTS_N_INSNS (5);
9007 break;
9008 default:
9009 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
9010 break;
9012 break;
9014 case SImode:
9015 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9017 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9018 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9019 speed);
9021 else
9022 switch (INTVAL (XEXP (x, 1)))
9024 case 0:
9025 *total = 0;
9026 break;
9027 case 24:
9028 *total = COSTS_N_INSNS (3);
9029 break;
9030 case 1:
9031 case 8:
9032 case 16:
9033 *total = COSTS_N_INSNS (4);
9034 break;
9035 case 31:
9036 *total = COSTS_N_INSNS (6);
9037 break;
9038 case 2:
9039 *total = COSTS_N_INSNS (!speed ? 7 : 8);
9040 break;
9041 default:
9042 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9043 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9044 speed);
9046 break;
9048 default:
9049 return false;
9051 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9052 return true;
9054 case ASHIFTRT:
9055 switch (mode)
9057 case QImode:
9058 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9060 *total = COSTS_N_INSNS (!speed ? 4 : 17);
9061 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9062 speed);
9064 else
9066 val = INTVAL (XEXP (x, 1));
9067 if (val == 6)
9068 *total = COSTS_N_INSNS (4);
9069 else if (val == 7)
9070 *total = COSTS_N_INSNS (2);
9071 else if (val >= 0 && val <= 7)
9072 *total = COSTS_N_INSNS (val);
9073 else
9074 *total = COSTS_N_INSNS (1);
9076 break;
9078 case HImode:
9079 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9081 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9082 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9083 speed);
9085 else
9086 switch (INTVAL (XEXP (x, 1)))
9088 case 0:
9089 *total = 0;
9090 break;
9091 case 1:
9092 *total = COSTS_N_INSNS (2);
9093 break;
9094 case 15:
9095 *total = COSTS_N_INSNS (3);
9096 break;
9097 case 2:
9098 case 7:
9099 case 8:
9100 case 9:
9101 *total = COSTS_N_INSNS (4);
9102 break;
9103 case 10:
9104 case 14:
9105 *total = COSTS_N_INSNS (5);
9106 break;
9107 case 11:
9108 *total = COSTS_N_INSNS (!speed ? 5 : 6);
9109 break;
9110 case 12:
9111 *total = COSTS_N_INSNS (!speed ? 5 : 7);
9112 break;
9113 case 6:
9114 case 13:
9115 *total = COSTS_N_INSNS (!speed ? 5 : 8);
9116 break;
9117 default:
9118 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9119 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9120 speed);
9122 break;
9124 case PSImode:
9125 if (!CONST_INT_P (XEXP (x, 1)))
9127 *total = COSTS_N_INSNS (!speed ? 6 : 73);
9129 else
9130 switch (INTVAL (XEXP (x, 1)))
9132 case 0:
9133 *total = 0;
9134 break;
9135 case 1:
9136 *total = COSTS_N_INSNS (3);
9137 break;
9138 case 16:
9139 case 8:
9140 *total = COSTS_N_INSNS (5);
9141 break;
9142 case 23:
9143 *total = COSTS_N_INSNS (4);
9144 break;
9145 default:
9146 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
9147 break;
9149 break;
9151 case SImode:
9152 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9154 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9155 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9156 speed);
9158 else
9159 switch (INTVAL (XEXP (x, 1)))
9161 case 0:
9162 *total = 0;
9163 break;
9164 case 1:
9165 *total = COSTS_N_INSNS (4);
9166 break;
9167 case 8:
9168 case 16:
9169 case 24:
9170 *total = COSTS_N_INSNS (6);
9171 break;
9172 case 2:
9173 *total = COSTS_N_INSNS (!speed ? 7 : 8);
9174 break;
9175 case 31:
9176 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
9177 break;
9178 default:
9179 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9180 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9181 speed);
9183 break;
9185 default:
9186 return false;
9188 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9189 return true;
9191 case LSHIFTRT:
9192 switch (mode)
9194 case QImode:
9195 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9197 *total = COSTS_N_INSNS (!speed ? 4 : 17);
9198 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9199 speed);
9201 else
9203 val = INTVAL (XEXP (x, 1));
9204 if (val == 7)
9205 *total = COSTS_N_INSNS (3);
9206 else if (val >= 0 && val <= 7)
9207 *total = COSTS_N_INSNS (val);
9208 else
9209 *total = COSTS_N_INSNS (1);
9211 break;
9213 case HImode:
9214 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9216 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9217 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9218 speed);
9220 else
9221 switch (INTVAL (XEXP (x, 1)))
9223 case 0:
9224 *total = 0;
9225 break;
9226 case 1:
9227 case 8:
9228 *total = COSTS_N_INSNS (2);
9229 break;
9230 case 9:
9231 *total = COSTS_N_INSNS (3);
9232 break;
9233 case 2:
9234 case 10:
9235 case 15:
9236 *total = COSTS_N_INSNS (4);
9237 break;
9238 case 7:
9239 case 11:
9240 *total = COSTS_N_INSNS (5);
9241 break;
9242 case 3:
9243 case 12:
9244 case 13:
9245 case 14:
9246 *total = COSTS_N_INSNS (!speed ? 5 : 6);
9247 break;
9248 case 4:
9249 *total = COSTS_N_INSNS (!speed ? 5 : 7);
9250 break;
9251 case 5:
9252 case 6:
9253 *total = COSTS_N_INSNS (!speed ? 5 : 9);
9254 break;
9255 default:
9256 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9257 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9258 speed);
9260 break;
9262 case PSImode:
9263 if (!CONST_INT_P (XEXP (x, 1)))
9265 *total = COSTS_N_INSNS (!speed ? 6 : 73);
9267 else
9268 switch (INTVAL (XEXP (x, 1)))
9270 case 0:
9271 *total = 0;
9272 break;
9273 case 1:
9274 case 8:
9275 case 16:
9276 *total = COSTS_N_INSNS (3);
9277 break;
9278 case 23:
9279 *total = COSTS_N_INSNS (5);
9280 break;
9281 default:
9282 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
9283 break;
9285 break;
9287 case SImode:
9288 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9290 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9291 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9292 speed);
9294 else
9295 switch (INTVAL (XEXP (x, 1)))
9297 case 0:
9298 *total = 0;
9299 break;
9300 case 1:
9301 *total = COSTS_N_INSNS (4);
9302 break;
9303 case 2:
9304 *total = COSTS_N_INSNS (!speed ? 7 : 8);
9305 break;
9306 case 8:
9307 case 16:
9308 case 24:
9309 *total = COSTS_N_INSNS (4);
9310 break;
9311 case 31:
9312 *total = COSTS_N_INSNS (6);
9313 break;
9314 default:
9315 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9316 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9317 speed);
9319 break;
9321 default:
9322 return false;
9324 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9325 return true;
9327 case COMPARE:
9328 switch (GET_MODE (XEXP (x, 0)))
9330 case QImode:
9331 *total = COSTS_N_INSNS (1);
9332 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9333 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9334 break;
9336 case HImode:
9337 *total = COSTS_N_INSNS (2);
9338 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9339 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9340 else if (INTVAL (XEXP (x, 1)) != 0)
9341 *total += COSTS_N_INSNS (1);
9342 break;
9344 case PSImode:
9345 *total = COSTS_N_INSNS (3);
9346 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
9347 *total += COSTS_N_INSNS (2);
9348 break;
9350 case SImode:
9351 *total = COSTS_N_INSNS (4);
9352 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9353 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9354 else if (INTVAL (XEXP (x, 1)) != 0)
9355 *total += COSTS_N_INSNS (3);
9356 break;
9358 default:
9359 return false;
9361 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9362 return true;
9364 case TRUNCATE:
9365 if (AVR_HAVE_MUL
9366 && LSHIFTRT == GET_CODE (XEXP (x, 0))
9367 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
9368 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
9370 if (QImode == mode || HImode == mode)
9372 *total = COSTS_N_INSNS (2);
9373 return true;
9376 break;
9378 default:
9379 break;
9381 return false;
9385 /* Implement `TARGET_RTX_COSTS'. */
9387 static bool
9388 avr_rtx_costs (rtx x, int codearg, int outer_code,
9389 int opno, int *total, bool speed)
9391 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
9392 opno, total, speed);
9394 if (avr_log.rtx_costs)
9396 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
9397 done, speed ? "speed" : "size", *total, outer_code, x);
9400 return done;
9404 /* Implement `TARGET_ADDRESS_COST'. */
9406 static int
9407 avr_address_cost (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED,
9408 addr_space_t as ATTRIBUTE_UNUSED,
9409 bool speed ATTRIBUTE_UNUSED)
9411 int cost = 4;
9413 if (GET_CODE (x) == PLUS
9414 && CONST_INT_P (XEXP (x, 1))
9415 && (REG_P (XEXP (x, 0))
9416 || GET_CODE (XEXP (x, 0)) == SUBREG))
9418 if (INTVAL (XEXP (x, 1)) >= 61)
9419 cost = 18;
9421 else if (CONSTANT_ADDRESS_P (x))
9423 if (optimize > 0
9424 && io_address_operand (x, QImode))
9425 cost = 2;
9428 if (avr_log.address_cost)
9429 avr_edump ("\n%?: %d = %r\n", cost, x);
9431 return cost;
9434 /* Test for extra memory constraint 'Q'.
9435 It's a memory address based on Y or Z pointer with valid displacement. */
9438 extra_constraint_Q (rtx x)
9440 int ok = 0;
9442 if (GET_CODE (XEXP (x,0)) == PLUS
9443 && REG_P (XEXP (XEXP (x,0), 0))
9444 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
9445 && (INTVAL (XEXP (XEXP (x,0), 1))
9446 <= MAX_LD_OFFSET (GET_MODE (x))))
9448 rtx xx = XEXP (XEXP (x,0), 0);
9449 int regno = REGNO (xx);
9451 ok = (/* allocate pseudos */
9452 regno >= FIRST_PSEUDO_REGISTER
9453 /* strictly check */
9454 || regno == REG_Z || regno == REG_Y
9455 /* XXX frame & arg pointer checks */
9456 || xx == frame_pointer_rtx
9457 || xx == arg_pointer_rtx);
9459 if (avr_log.constraints)
9460 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
9461 ok, reload_completed, reload_in_progress, x);
9464 return ok;
9467 /* Convert condition code CONDITION to the valid AVR condition code. */
9469 RTX_CODE
9470 avr_normalize_condition (RTX_CODE condition)
9472 switch (condition)
9474 case GT:
9475 return GE;
9476 case GTU:
9477 return GEU;
9478 case LE:
9479 return LT;
9480 case LEU:
9481 return LTU;
9482 default:
9483 gcc_unreachable ();
9487 /* Helper function for `avr_reorg'. */
9489 static rtx
9490 avr_compare_pattern (rtx insn)
9492 rtx pattern = single_set (insn);
9494 if (pattern
9495 && NONJUMP_INSN_P (insn)
9496 && SET_DEST (pattern) == cc0_rtx
9497 && GET_CODE (SET_SRC (pattern)) == COMPARE)
9499 enum machine_mode mode0 = GET_MODE (XEXP (SET_SRC (pattern), 0));
9500 enum machine_mode mode1 = GET_MODE (XEXP (SET_SRC (pattern), 1));
9502 /* The 64-bit comparisons have fixed operands ACC_A and ACC_B.
9503 They must not be swapped, thus skip them. */
9505 if ((mode0 == VOIDmode || GET_MODE_SIZE (mode0) <= 4)
9506 && (mode1 == VOIDmode || GET_MODE_SIZE (mode1) <= 4))
9507 return pattern;
9510 return NULL_RTX;
9513 /* Helper function for `avr_reorg'. */
9515 /* Expansion of switch/case decision trees leads to code like
9517 cc0 = compare (Reg, Num)
9518 if (cc0 == 0)
9519 goto L1
9521 cc0 = compare (Reg, Num)
9522 if (cc0 > 0)
9523 goto L2
9525 The second comparison is superfluous and can be deleted.
9526 The second jump condition can be transformed from a
9527 "difficult" one to a "simple" one because "cc0 > 0" and
9528 "cc0 >= 0" will have the same effect here.
9530 This function relies on the way switch/case is being expaned
9531 as binary decision tree. For example code see PR 49903.
9533 Return TRUE if optimization performed.
9534 Return FALSE if nothing changed.
9536 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
9538 We don't want to do this in text peephole because it is
9539 tedious to work out jump offsets there and the second comparison
9540 might have been transormed by `avr_reorg'.
9542 RTL peephole won't do because peephole2 does not scan across
9543 basic blocks. */
9545 static bool
9546 avr_reorg_remove_redundant_compare (rtx insn1)
9548 rtx comp1, ifelse1, xcond1, branch1;
9549 rtx comp2, ifelse2, xcond2, branch2, insn2;
9550 enum rtx_code code;
9551 rtx jump, target, cond;
9553 /* Look out for: compare1 - branch1 - compare2 - branch2 */
9555 branch1 = next_nonnote_nondebug_insn (insn1);
9556 if (!branch1 || !JUMP_P (branch1))
9557 return false;
9559 insn2 = next_nonnote_nondebug_insn (branch1);
9560 if (!insn2 || !avr_compare_pattern (insn2))
9561 return false;
9563 branch2 = next_nonnote_nondebug_insn (insn2);
9564 if (!branch2 || !JUMP_P (branch2))
9565 return false;
9567 comp1 = avr_compare_pattern (insn1);
9568 comp2 = avr_compare_pattern (insn2);
9569 xcond1 = single_set (branch1);
9570 xcond2 = single_set (branch2);
9572 if (!comp1 || !comp2
9573 || !rtx_equal_p (comp1, comp2)
9574 || !xcond1 || SET_DEST (xcond1) != pc_rtx
9575 || !xcond2 || SET_DEST (xcond2) != pc_rtx
9576 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
9577 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
9579 return false;
9582 comp1 = SET_SRC (comp1);
9583 ifelse1 = SET_SRC (xcond1);
9584 ifelse2 = SET_SRC (xcond2);
9586 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
9588 if (EQ != GET_CODE (XEXP (ifelse1, 0))
9589 || !REG_P (XEXP (comp1, 0))
9590 || !CONST_INT_P (XEXP (comp1, 1))
9591 || XEXP (ifelse1, 2) != pc_rtx
9592 || XEXP (ifelse2, 2) != pc_rtx
9593 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
9594 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
9595 || !COMPARISON_P (XEXP (ifelse2, 0))
9596 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
9597 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
9598 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
9599 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
9601 return false;
9604 /* We filtered the insn sequence to look like
9606 (set (cc0)
9607 (compare (reg:M N)
9608 (const_int VAL)))
9609 (set (pc)
9610 (if_then_else (eq (cc0)
9611 (const_int 0))
9612 (label_ref L1)
9613 (pc)))
9615 (set (cc0)
9616 (compare (reg:M N)
9617 (const_int VAL)))
9618 (set (pc)
9619 (if_then_else (CODE (cc0)
9620 (const_int 0))
9621 (label_ref L2)
9622 (pc)))
9625 code = GET_CODE (XEXP (ifelse2, 0));
9627 /* Map GT/GTU to GE/GEU which is easier for AVR.
9628 The first two instructions compare/branch on EQ
9629 so we may replace the difficult
9631 if (x == VAL) goto L1;
9632 if (x > VAL) goto L2;
9634 with easy
9636 if (x == VAL) goto L1;
9637 if (x >= VAL) goto L2;
9639 Similarly, replace LE/LEU by LT/LTU. */
9641 switch (code)
9643 case EQ:
9644 case LT: case LTU:
9645 case GE: case GEU:
9646 break;
9648 case LE: case LEU:
9649 case GT: case GTU:
9650 code = avr_normalize_condition (code);
9651 break;
9653 default:
9654 return false;
9657 /* Wrap the branches into UNSPECs so they won't be changed or
9658 optimized in the remainder. */
9660 target = XEXP (XEXP (ifelse1, 1), 0);
9661 cond = XEXP (ifelse1, 0);
9662 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
9664 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
9666 target = XEXP (XEXP (ifelse2, 1), 0);
9667 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9668 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
9670 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
9672 /* The comparisons in insn1 and insn2 are exactly the same;
9673 insn2 is superfluous so delete it. */
9675 delete_insn (insn2);
9676 delete_insn (branch1);
9677 delete_insn (branch2);
9679 return true;
9683 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
9684 /* Optimize conditional jumps. */
9686 static void
9687 avr_reorg (void)
9689 rtx insn = get_insns();
9691 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
9693 rtx pattern = avr_compare_pattern (insn);
9695 if (!pattern)
9696 continue;
9698 if (optimize
9699 && avr_reorg_remove_redundant_compare (insn))
9701 continue;
9704 if (compare_diff_p (insn))
9706 /* Now we work under compare insn with difficult branch. */
9708 rtx next = next_real_insn (insn);
9709 rtx pat = PATTERN (next);
9711 pattern = SET_SRC (pattern);
9713 if (true_regnum (XEXP (pattern, 0)) >= 0
9714 && true_regnum (XEXP (pattern, 1)) >= 0)
9716 rtx x = XEXP (pattern, 0);
9717 rtx src = SET_SRC (pat);
9718 rtx t = XEXP (src,0);
9719 PUT_CODE (t, swap_condition (GET_CODE (t)));
9720 XEXP (pattern, 0) = XEXP (pattern, 1);
9721 XEXP (pattern, 1) = x;
9722 INSN_CODE (next) = -1;
9724 else if (true_regnum (XEXP (pattern, 0)) >= 0
9725 && XEXP (pattern, 1) == const0_rtx)
9727 /* This is a tst insn, we can reverse it. */
9728 rtx src = SET_SRC (pat);
9729 rtx t = XEXP (src,0);
9731 PUT_CODE (t, swap_condition (GET_CODE (t)));
9732 XEXP (pattern, 1) = XEXP (pattern, 0);
9733 XEXP (pattern, 0) = const0_rtx;
9734 INSN_CODE (next) = -1;
9735 INSN_CODE (insn) = -1;
9737 else if (true_regnum (XEXP (pattern, 0)) >= 0
9738 && CONST_INT_P (XEXP (pattern, 1)))
9740 rtx x = XEXP (pattern, 1);
9741 rtx src = SET_SRC (pat);
9742 rtx t = XEXP (src,0);
9743 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
9745 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
9747 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
9748 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
9749 INSN_CODE (next) = -1;
9750 INSN_CODE (insn) = -1;
9757 /* Returns register number for function return value.*/
9759 static inline unsigned int
9760 avr_ret_register (void)
9762 return 24;
9766 /* Implement `TARGET_FUNCTION_VALUE_REGNO_P'. */
9768 static bool
9769 avr_function_value_regno_p (const unsigned int regno)
9771 return (regno == avr_ret_register ());
9775 /* Implement `TARGET_LIBCALL_VALUE'. */
9776 /* Create an RTX representing the place where a
9777 library function returns a value of mode MODE. */
9779 static rtx
9780 avr_libcall_value (enum machine_mode mode,
9781 const_rtx func ATTRIBUTE_UNUSED)
9783 int offs = GET_MODE_SIZE (mode);
9785 if (offs <= 4)
9786 offs = (offs + 1) & ~1;
9788 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
9792 /* Implement `TARGET_FUNCTION_VALUE'. */
9793 /* Create an RTX representing the place where a
9794 function returns a value of data type VALTYPE. */
9796 static rtx
9797 avr_function_value (const_tree type,
9798 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
9799 bool outgoing ATTRIBUTE_UNUSED)
9801 unsigned int offs;
9803 if (TYPE_MODE (type) != BLKmode)
9804 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
9806 offs = int_size_in_bytes (type);
9807 if (offs < 2)
9808 offs = 2;
9809 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
9810 offs = GET_MODE_SIZE (SImode);
9811 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
9812 offs = GET_MODE_SIZE (DImode);
9814 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
9818 test_hard_reg_class (enum reg_class rclass, rtx x)
9820 int regno = true_regnum (x);
9821 if (regno < 0)
9822 return 0;
9824 if (TEST_HARD_REG_CLASS (rclass, regno))
9825 return 1;
9827 return 0;
9831 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
9832 and thus is suitable to be skipped by CPSE, SBRC, etc. */
9834 static bool
9835 avr_2word_insn_p (rtx insn)
9837 if (avr_current_device->errata_skip
9838 || !insn
9839 || 2 != get_attr_length (insn))
9841 return false;
9844 switch (INSN_CODE (insn))
9846 default:
9847 return false;
9849 case CODE_FOR_movqi_insn:
9850 case CODE_FOR_movuqq_insn:
9851 case CODE_FOR_movqq_insn:
9853 rtx set = single_set (insn);
9854 rtx src = SET_SRC (set);
9855 rtx dest = SET_DEST (set);
9857 /* Factor out LDS and STS from movqi_insn. */
9859 if (MEM_P (dest)
9860 && (REG_P (src) || src == CONST0_RTX (GET_MODE (dest))))
9862 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
9864 else if (REG_P (dest)
9865 && MEM_P (src))
9867 return CONSTANT_ADDRESS_P (XEXP (src, 0));
9870 return false;
9873 case CODE_FOR_call_insn:
9874 case CODE_FOR_call_value_insn:
9875 return true;
9881 jump_over_one_insn_p (rtx insn, rtx dest)
9883 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
9884 ? XEXP (dest, 0)
9885 : dest);
9886 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
9887 int dest_addr = INSN_ADDRESSES (uid);
9888 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
9890 return (jump_offset == 1
9891 || (jump_offset == 2
9892 && avr_2word_insn_p (next_active_insn (insn))));
9896 /* Worker function for `HARD_REGNO_MODE_OK'. */
9897 /* Returns 1 if a value of mode MODE can be stored starting with hard
9898 register number REGNO. On the enhanced core, anything larger than
9899 1 byte must start in even numbered register for "movw" to work
9900 (this way we don't have to check for odd registers everywhere). */
9903 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
9905 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
9906 Disallowing QI et al. in these regs might lead to code like
9907 (set (subreg:QI (reg:HI 28) n) ...)
9908 which will result in wrong code because reload does not
9909 handle SUBREGs of hard regsisters like this.
9910 This could be fixed in reload. However, it appears
9911 that fixing reload is not wanted by reload people. */
9913 /* Any GENERAL_REGS register can hold 8-bit values. */
9915 if (GET_MODE_SIZE (mode) == 1)
9916 return 1;
9918 /* FIXME: Ideally, the following test is not needed.
9919 However, it turned out that it can reduce the number
9920 of spill fails. AVR and it's poor endowment with
9921 address registers is extreme stress test for reload. */
9923 if (GET_MODE_SIZE (mode) >= 4
9924 && regno >= REG_X)
9925 return 0;
9927 /* All modes larger than 8 bits should start in an even register. */
9929 return !(regno & 1);
9933 /* Implement `HARD_REGNO_CALL_PART_CLOBBERED'. */
9936 avr_hard_regno_call_part_clobbered (unsigned regno, enum machine_mode mode)
9938 /* FIXME: This hook gets called with MODE:REGNO combinations that don't
9939 represent valid hard registers like, e.g. HI:29. Returning TRUE
9940 for such registers can lead to performance degradation as mentioned
9941 in PR53595. Thus, report invalid hard registers as FALSE. */
9943 if (!avr_hard_regno_mode_ok (regno, mode))
9944 return 0;
9946 /* Return true if any of the following boundaries is crossed:
9947 17/18, 27/28 and 29/30. */
9949 return ((regno < 18 && regno + GET_MODE_SIZE (mode) > 18)
9950 || (regno < REG_Y && regno + GET_MODE_SIZE (mode) > REG_Y)
9951 || (regno < REG_Z && regno + GET_MODE_SIZE (mode) > REG_Z));
9955 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
9957 enum reg_class
9958 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
9959 addr_space_t as, RTX_CODE outer_code,
9960 RTX_CODE index_code ATTRIBUTE_UNUSED)
9962 if (!ADDR_SPACE_GENERIC_P (as))
9964 return POINTER_Z_REGS;
9967 if (!avr_strict_X)
9968 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
9970 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
9974 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
9976 bool
9977 avr_regno_mode_code_ok_for_base_p (int regno,
9978 enum machine_mode mode ATTRIBUTE_UNUSED,
9979 addr_space_t as ATTRIBUTE_UNUSED,
9980 RTX_CODE outer_code,
9981 RTX_CODE index_code ATTRIBUTE_UNUSED)
9983 bool ok = false;
9985 if (!ADDR_SPACE_GENERIC_P (as))
9987 if (regno < FIRST_PSEUDO_REGISTER
9988 && regno == REG_Z)
9990 return true;
9993 if (reg_renumber)
9995 regno = reg_renumber[regno];
9997 if (regno == REG_Z)
9999 return true;
10003 return false;
10006 if (regno < FIRST_PSEUDO_REGISTER
10007 && (regno == REG_X
10008 || regno == REG_Y
10009 || regno == REG_Z
10010 || regno == ARG_POINTER_REGNUM))
10012 ok = true;
10014 else if (reg_renumber)
10016 regno = reg_renumber[regno];
10018 if (regno == REG_X
10019 || regno == REG_Y
10020 || regno == REG_Z
10021 || regno == ARG_POINTER_REGNUM)
10023 ok = true;
10027 if (avr_strict_X
10028 && PLUS == outer_code
10029 && regno == REG_X)
10031 ok = false;
10034 return ok;
10038 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
10039 /* Set 32-bit register OP[0] to compile-time constant OP[1].
10040 CLOBBER_REG is a QI clobber register or NULL_RTX.
10041 LEN == NULL: output instructions.
10042 LEN != NULL: set *LEN to the length of the instruction sequence
10043 (in words) printed with LEN = NULL.
10044 If CLEAR_P is true, OP[0] had been cleard to Zero already.
10045 If CLEAR_P is false, nothing is known about OP[0].
10047 The effect on cc0 is as follows:
10049 Load 0 to any register except ZERO_REG : NONE
10050 Load ld register with any value : NONE
10051 Anything else: : CLOBBER */
10053 static void
10054 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
10056 rtx src = op[1];
10057 rtx dest = op[0];
10058 rtx xval, xdest[4];
10059 int ival[4];
10060 int clobber_val = 1234;
10061 bool cooked_clobber_p = false;
10062 bool set_p = false;
10063 enum machine_mode mode = GET_MODE (dest);
10064 int n, n_bytes = GET_MODE_SIZE (mode);
10066 gcc_assert (REG_P (dest)
10067 && CONSTANT_P (src));
10069 if (len)
10070 *len = 0;
10072 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
10073 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
10075 if (REGNO (dest) < 16
10076 && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
10078 clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
10081 /* We might need a clobber reg but don't have one. Look at the value to
10082 be loaded more closely. A clobber is only needed if it is a symbol
10083 or contains a byte that is neither 0, -1 or a power of 2. */
10085 if (NULL_RTX == clobber_reg
10086 && !test_hard_reg_class (LD_REGS, dest)
10087 && (! (CONST_INT_P (src) || CONST_FIXED_P (src) || CONST_DOUBLE_P (src))
10088 || !avr_popcount_each_byte (src, n_bytes,
10089 (1 << 0) | (1 << 1) | (1 << 8))))
10091 /* We have no clobber register but need one. Cook one up.
10092 That's cheaper than loading from constant pool. */
10094 cooked_clobber_p = true;
10095 clobber_reg = all_regs_rtx[REG_Z + 1];
10096 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
10099 /* Now start filling DEST from LSB to MSB. */
10101 for (n = 0; n < n_bytes; n++)
10103 int ldreg_p;
10104 bool done_byte = false;
10105 int j;
10106 rtx xop[3];
10108 /* Crop the n-th destination byte. */
10110 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
10111 ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
10113 if (!CONST_INT_P (src)
10114 && !CONST_FIXED_P (src)
10115 && !CONST_DOUBLE_P (src))
10117 static const char* const asm_code[][2] =
10119 { "ldi %2,lo8(%1)" CR_TAB "mov %0,%2", "ldi %0,lo8(%1)" },
10120 { "ldi %2,hi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hi8(%1)" },
10121 { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2", "ldi %0,hlo8(%1)" },
10122 { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hhi8(%1)" }
10125 xop[0] = xdest[n];
10126 xop[1] = src;
10127 xop[2] = clobber_reg;
10129 avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
10131 continue;
10134 /* Crop the n-th source byte. */
10136 xval = simplify_gen_subreg (QImode, src, mode, n);
10137 ival[n] = INTVAL (xval);
10139 /* Look if we can reuse the low word by means of MOVW. */
10141 if (n == 2
10142 && n_bytes >= 4
10143 && AVR_HAVE_MOVW)
10145 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
10146 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
10148 if (INTVAL (lo16) == INTVAL (hi16))
10150 if (0 != INTVAL (lo16)
10151 || !clear_p)
10153 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
10156 break;
10160 /* Don't use CLR so that cc0 is set as expected. */
10162 if (ival[n] == 0)
10164 if (!clear_p)
10165 avr_asm_len (ldreg_p ? "ldi %0,0"
10166 : ZERO_REGNO == REGNO (xdest[n]) ? "clr %0"
10167 : "mov %0,__zero_reg__",
10168 &xdest[n], len, 1);
10169 continue;
10172 if (clobber_val == ival[n]
10173 && REGNO (clobber_reg) == REGNO (xdest[n]))
10175 continue;
10178 /* LD_REGS can use LDI to move a constant value */
10180 if (ldreg_p)
10182 xop[0] = xdest[n];
10183 xop[1] = xval;
10184 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
10185 continue;
10188 /* Try to reuse value already loaded in some lower byte. */
10190 for (j = 0; j < n; j++)
10191 if (ival[j] == ival[n])
10193 xop[0] = xdest[n];
10194 xop[1] = xdest[j];
10196 avr_asm_len ("mov %0,%1", xop, len, 1);
10197 done_byte = true;
10198 break;
10201 if (done_byte)
10202 continue;
10204 /* Need no clobber reg for -1: Use CLR/DEC */
10206 if (-1 == ival[n])
10208 if (!clear_p)
10209 avr_asm_len ("clr %0", &xdest[n], len, 1);
10211 avr_asm_len ("dec %0", &xdest[n], len, 1);
10212 continue;
10214 else if (1 == ival[n])
10216 if (!clear_p)
10217 avr_asm_len ("clr %0", &xdest[n], len, 1);
10219 avr_asm_len ("inc %0", &xdest[n], len, 1);
10220 continue;
10223 /* Use T flag or INC to manage powers of 2 if we have
10224 no clobber reg. */
10226 if (NULL_RTX == clobber_reg
10227 && single_one_operand (xval, QImode))
10229 xop[0] = xdest[n];
10230 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
10232 gcc_assert (constm1_rtx != xop[1]);
10234 if (!set_p)
10236 set_p = true;
10237 avr_asm_len ("set", xop, len, 1);
10240 if (!clear_p)
10241 avr_asm_len ("clr %0", xop, len, 1);
10243 avr_asm_len ("bld %0,%1", xop, len, 1);
10244 continue;
10247 /* We actually need the LD_REGS clobber reg. */
10249 gcc_assert (NULL_RTX != clobber_reg);
10251 xop[0] = xdest[n];
10252 xop[1] = xval;
10253 xop[2] = clobber_reg;
10254 clobber_val = ival[n];
10256 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
10257 "mov %0,%2", xop, len, 2);
10260 /* If we cooked up a clobber reg above, restore it. */
10262 if (cooked_clobber_p)
10264 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
10269 /* Reload the constant OP[1] into the HI register OP[0].
10270 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
10271 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
10272 need a clobber reg or have to cook one up.
10274 PLEN == NULL: Output instructions.
10275 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
10276 by the insns printed.
10278 Return "". */
10280 const char*
10281 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
10283 output_reload_in_const (op, clobber_reg, plen, false);
10284 return "";
10288 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
10289 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
10290 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
10291 need a clobber reg or have to cook one up.
10293 LEN == NULL: Output instructions.
10295 LEN != NULL: Output nothing. Set *LEN to number of words occupied
10296 by the insns printed.
10298 Return "". */
10300 const char *
10301 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
10303 if (AVR_HAVE_MOVW
10304 && !test_hard_reg_class (LD_REGS, op[0])
10305 && (CONST_INT_P (op[1])
10306 || CONST_FIXED_P (op[1])
10307 || CONST_DOUBLE_P (op[1])))
10309 int len_clr, len_noclr;
10311 /* In some cases it is better to clear the destination beforehand, e.g.
10313 CLR R2 CLR R3 MOVW R4,R2 INC R2
10315 is shorther than
10317 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
10319 We find it too tedious to work that out in the print function.
10320 Instead, we call the print function twice to get the lengths of
10321 both methods and use the shortest one. */
10323 output_reload_in_const (op, clobber_reg, &len_clr, true);
10324 output_reload_in_const (op, clobber_reg, &len_noclr, false);
10326 if (len_noclr - len_clr == 4)
10328 /* Default needs 4 CLR instructions: clear register beforehand. */
10330 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
10331 "mov %B0,__zero_reg__" CR_TAB
10332 "movw %C0,%A0", &op[0], len, 3);
10334 output_reload_in_const (op, clobber_reg, len, true);
10336 if (len)
10337 *len += 3;
10339 return "";
10343 /* Default: destination not pre-cleared. */
10345 output_reload_in_const (op, clobber_reg, len, false);
10346 return "";
10349 const char*
10350 avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
10352 output_reload_in_const (op, clobber_reg, len, false);
10353 return "";
10357 /* Worker function for `ASM_OUTPUT_ADDR_VEC_ELT'. */
10359 void
10360 avr_output_addr_vec_elt (FILE *stream, int value)
10362 if (AVR_HAVE_JMP_CALL)
10363 fprintf (stream, "\t.word gs(.L%d)\n", value);
10364 else
10365 fprintf (stream, "\trjmp .L%d\n", value);
10369 /* Implement `TARGET_HARD_REGNO_SCRATCH_OK'. */
10370 /* Returns true if SCRATCH are safe to be allocated as a scratch
10371 registers (for a define_peephole2) in the current function. */
10373 static bool
10374 avr_hard_regno_scratch_ok (unsigned int regno)
10376 /* Interrupt functions can only use registers that have already been saved
10377 by the prologue, even if they would normally be call-clobbered. */
10379 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
10380 && !df_regs_ever_live_p (regno))
10381 return false;
10383 /* Don't allow hard registers that might be part of the frame pointer.
10384 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
10385 and don't care for a frame pointer that spans more than one register. */
10387 if ((!reload_completed || frame_pointer_needed)
10388 && (regno == REG_Y || regno == REG_Y + 1))
10390 return false;
10393 return true;
10397 /* Worker function for `HARD_REGNO_RENAME_OK'. */
10398 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
10401 avr_hard_regno_rename_ok (unsigned int old_reg,
10402 unsigned int new_reg)
10404 /* Interrupt functions can only use registers that have already been
10405 saved by the prologue, even if they would normally be
10406 call-clobbered. */
10408 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
10409 && !df_regs_ever_live_p (new_reg))
10410 return 0;
10412 /* Don't allow hard registers that might be part of the frame pointer.
10413 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
10414 and don't care for a frame pointer that spans more than one register. */
10416 if ((!reload_completed || frame_pointer_needed)
10417 && (old_reg == REG_Y || old_reg == REG_Y + 1
10418 || new_reg == REG_Y || new_reg == REG_Y + 1))
10420 return 0;
10423 return 1;
10426 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
10427 or memory location in the I/O space (QImode only).
10429 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
10430 Operand 1: register operand to test, or CONST_INT memory address.
10431 Operand 2: bit number.
10432 Operand 3: label to jump to if the test is true. */
10434 const char*
10435 avr_out_sbxx_branch (rtx insn, rtx operands[])
10437 enum rtx_code comp = GET_CODE (operands[0]);
10438 bool long_jump = get_attr_length (insn) >= 4;
10439 bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
10441 if (comp == GE)
10442 comp = EQ;
10443 else if (comp == LT)
10444 comp = NE;
10446 if (reverse)
10447 comp = reverse_condition (comp);
10449 switch (GET_CODE (operands[1]))
10451 default:
10452 gcc_unreachable();
10454 case CONST_INT:
10456 if (low_io_address_operand (operands[1], QImode))
10458 if (comp == EQ)
10459 output_asm_insn ("sbis %i1,%2", operands);
10460 else
10461 output_asm_insn ("sbic %i1,%2", operands);
10463 else
10465 output_asm_insn ("in __tmp_reg__,%i1", operands);
10466 if (comp == EQ)
10467 output_asm_insn ("sbrs __tmp_reg__,%2", operands);
10468 else
10469 output_asm_insn ("sbrc __tmp_reg__,%2", operands);
10472 break; /* CONST_INT */
10474 case REG:
10476 if (comp == EQ)
10477 output_asm_insn ("sbrs %T1%T2", operands);
10478 else
10479 output_asm_insn ("sbrc %T1%T2", operands);
10481 break; /* REG */
10482 } /* switch */
10484 if (long_jump)
10485 return ("rjmp .+4" CR_TAB
10486 "jmp %x3");
10488 if (!reverse)
10489 return "rjmp %x3";
10491 return "";
10494 /* Worker function for `TARGET_ASM_CONSTRUCTOR'. */
10496 static void
10497 avr_asm_out_ctor (rtx symbol, int priority)
10499 fputs ("\t.global __do_global_ctors\n", asm_out_file);
10500 default_ctor_section_asm_out_constructor (symbol, priority);
10504 /* Worker function for `TARGET_ASM_DESTRUCTOR'. */
10506 static void
10507 avr_asm_out_dtor (rtx symbol, int priority)
10509 fputs ("\t.global __do_global_dtors\n", asm_out_file);
10510 default_dtor_section_asm_out_destructor (symbol, priority);
10514 /* Worker function for `TARGET_RETURN_IN_MEMORY'. */
10516 static bool
10517 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
10519 if (TYPE_MODE (type) == BLKmode)
10521 HOST_WIDE_INT size = int_size_in_bytes (type);
10522 return (size == -1 || size > 8);
10524 else
10525 return false;
10529 /* Implement `CASE_VALUES_THRESHOLD'. */
10530 /* Supply the default for --param case-values-threshold=0 */
10532 static unsigned int
10533 avr_case_values_threshold (void)
10535 /* The exact break-even point between a jump table and an if-else tree
10536 depends on several factors not available here like, e.g. if 8-bit
10537 comparisons can be used in the if-else tree or not, on the
10538 range of the case values, if the case value can be reused, on the
10539 register allocation, etc. '7' appears to be a good choice. */
10541 return 7;
10545 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
10547 static enum machine_mode
10548 avr_addr_space_address_mode (addr_space_t as)
10550 return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
10554 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
10556 static enum machine_mode
10557 avr_addr_space_pointer_mode (addr_space_t as)
10559 return avr_addr_space_address_mode (as);
10563 /* Helper for following function. */
10565 static bool
10566 avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
10568 gcc_assert (REG_P (reg));
10570 if (strict)
10572 return REGNO (reg) == REG_Z;
10575 /* Avoid combine to propagate hard regs. */
10577 if (can_create_pseudo_p()
10578 && REGNO (reg) < REG_Z)
10580 return false;
10583 return true;
10587 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
10589 static bool
10590 avr_addr_space_legitimate_address_p (enum machine_mode mode, rtx x,
10591 bool strict, addr_space_t as)
10593 bool ok = false;
10595 switch (as)
10597 default:
10598 gcc_unreachable();
10600 case ADDR_SPACE_GENERIC:
10601 return avr_legitimate_address_p (mode, x, strict);
10603 case ADDR_SPACE_FLASH:
10604 case ADDR_SPACE_FLASH1:
10605 case ADDR_SPACE_FLASH2:
10606 case ADDR_SPACE_FLASH3:
10607 case ADDR_SPACE_FLASH4:
10608 case ADDR_SPACE_FLASH5:
10610 switch (GET_CODE (x))
10612 case REG:
10613 ok = avr_reg_ok_for_pgm_addr (x, strict);
10614 break;
10616 case POST_INC:
10617 ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
10618 break;
10620 default:
10621 break;
10624 break; /* FLASH */
10626 case ADDR_SPACE_MEMX:
10627 if (REG_P (x))
10628 ok = (!strict
10629 && can_create_pseudo_p());
10631 if (LO_SUM == GET_CODE (x))
10633 rtx hi = XEXP (x, 0);
10634 rtx lo = XEXP (x, 1);
10636 ok = (REG_P (hi)
10637 && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
10638 && REG_P (lo)
10639 && REGNO (lo) == REG_Z);
10642 break; /* MEMX */
10645 if (avr_log.legitimate_address_p)
10647 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
10648 "reload_completed=%d reload_in_progress=%d %s:",
10649 ok, mode, strict, reload_completed, reload_in_progress,
10650 reg_renumber ? "(reg_renumber)" : "");
10652 if (GET_CODE (x) == PLUS
10653 && REG_P (XEXP (x, 0))
10654 && CONST_INT_P (XEXP (x, 1))
10655 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
10656 && reg_renumber)
10658 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
10659 true_regnum (XEXP (x, 0)));
10662 avr_edump ("\n%r\n", x);
10665 return ok;
10669 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
10671 static rtx
10672 avr_addr_space_legitimize_address (rtx x, rtx old_x,
10673 enum machine_mode mode, addr_space_t as)
10675 if (ADDR_SPACE_GENERIC_P (as))
10676 return avr_legitimize_address (x, old_x, mode);
10678 if (avr_log.legitimize_address)
10680 avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
10683 return old_x;
10687 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
10689 static rtx
10690 avr_addr_space_convert (rtx src, tree type_from, tree type_to)
10692 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
10693 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
10695 if (avr_log.progmem)
10696 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
10697 src, type_from, type_to);
10699 /* Up-casting from 16-bit to 24-bit pointer. */
10701 if (as_from != ADDR_SPACE_MEMX
10702 && as_to == ADDR_SPACE_MEMX)
10704 int msb;
10705 rtx sym = src;
10706 rtx reg = gen_reg_rtx (PSImode);
10708 while (CONST == GET_CODE (sym) || PLUS == GET_CODE (sym))
10709 sym = XEXP (sym, 0);
10711 /* Look at symbol flags: avr_encode_section_info set the flags
10712 also if attribute progmem was seen so that we get the right
10713 promotion for, e.g. PSTR-like strings that reside in generic space
10714 but are located in flash. In that case we patch the incoming
10715 address space. */
10717 if (SYMBOL_REF == GET_CODE (sym)
10718 && ADDR_SPACE_FLASH == AVR_SYMBOL_GET_ADDR_SPACE (sym))
10720 as_from = ADDR_SPACE_FLASH;
10723 /* Linearize memory: RAM has bit 23 set. */
10725 msb = ADDR_SPACE_GENERIC_P (as_from)
10726 ? 0x80
10727 : avr_addrspace[as_from].segment;
10729 src = force_reg (Pmode, src);
10731 emit_insn (msb == 0
10732 ? gen_zero_extendhipsi2 (reg, src)
10733 : gen_n_extendhipsi2 (reg, gen_int_mode (msb, QImode), src));
10735 return reg;
10738 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
10740 if (as_from == ADDR_SPACE_MEMX
10741 && as_to != ADDR_SPACE_MEMX)
10743 rtx new_src = gen_reg_rtx (Pmode);
10745 src = force_reg (PSImode, src);
10747 emit_move_insn (new_src,
10748 simplify_gen_subreg (Pmode, src, PSImode, 0));
10749 return new_src;
10752 return src;
10756 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
10758 static bool
10759 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED,
10760 addr_space_t superset ATTRIBUTE_UNUSED)
10762 /* Allow any kind of pointer mess. */
10764 return true;
10768 /* Worker function for movmemhi expander.
10769 XOP[0] Destination as MEM:BLK
10770 XOP[1] Source " "
10771 XOP[2] # Bytes to copy
10773 Return TRUE if the expansion is accomplished.
10774 Return FALSE if the operand compination is not supported. */
10776 bool
10777 avr_emit_movmemhi (rtx *xop)
10779 HOST_WIDE_INT count;
10780 enum machine_mode loop_mode;
10781 addr_space_t as = MEM_ADDR_SPACE (xop[1]);
10782 rtx loop_reg, addr1, a_src, a_dest, insn, xas;
10783 rtx a_hi8 = NULL_RTX;
10785 if (avr_mem_flash_p (xop[0]))
10786 return false;
10788 if (!CONST_INT_P (xop[2]))
10789 return false;
10791 count = INTVAL (xop[2]);
10792 if (count <= 0)
10793 return false;
10795 a_src = XEXP (xop[1], 0);
10796 a_dest = XEXP (xop[0], 0);
10798 if (PSImode == GET_MODE (a_src))
10800 gcc_assert (as == ADDR_SPACE_MEMX);
10802 loop_mode = (count < 0x100) ? QImode : HImode;
10803 loop_reg = gen_rtx_REG (loop_mode, 24);
10804 emit_move_insn (loop_reg, gen_int_mode (count, loop_mode));
10806 addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
10807 a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
10809 else
10811 int segment = avr_addrspace[as].segment;
10813 if (segment
10814 && avr_current_device->n_flash > 1)
10816 a_hi8 = GEN_INT (segment);
10817 emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
10819 else if (!ADDR_SPACE_GENERIC_P (as))
10821 as = ADDR_SPACE_FLASH;
10824 addr1 = a_src;
10826 loop_mode = (count <= 0x100) ? QImode : HImode;
10827 loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
10830 xas = GEN_INT (as);
10832 /* FIXME: Register allocator might come up with spill fails if it is left
10833 on its own. Thus, we allocate the pointer registers by hand:
10834 Z = source address
10835 X = destination address */
10837 emit_move_insn (lpm_addr_reg_rtx, addr1);
10838 emit_move_insn (gen_rtx_REG (HImode, REG_X), a_dest);
10840 /* FIXME: Register allocator does a bad job and might spill address
10841 register(s) inside the loop leading to additional move instruction
10842 to/from stack which could clobber tmp_reg. Thus, do *not* emit
10843 load and store as separate insns. Instead, we perform the copy
10844 by means of one monolithic insn. */
10846 gcc_assert (TMP_REGNO == LPM_REGNO);
10848 if (as != ADDR_SPACE_MEMX)
10850 /* Load instruction ([E]LPM or LD) is known at compile time:
10851 Do the copy-loop inline. */
10853 rtx (*fun) (rtx, rtx, rtx)
10854 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
10856 insn = fun (xas, loop_reg, loop_reg);
10858 else
10860 rtx (*fun) (rtx, rtx)
10861 = QImode == loop_mode ? gen_movmemx_qi : gen_movmemx_hi;
10863 emit_move_insn (gen_rtx_REG (QImode, 23), a_hi8);
10865 insn = fun (xas, GEN_INT (avr_addr.rampz));
10868 set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
10869 emit_insn (insn);
10871 return true;
10875 /* Print assembler for movmem_qi, movmem_hi insns...
10876 $0 : Address Space
10877 $1, $2 : Loop register
10878 Z : Source address
10879 X : Destination address
10882 const char*
10883 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
10885 addr_space_t as = (addr_space_t) INTVAL (op[0]);
10886 enum machine_mode loop_mode = GET_MODE (op[1]);
10887 bool sbiw_p = test_hard_reg_class (ADDW_REGS, op[1]);
10888 rtx xop[3];
10890 if (plen)
10891 *plen = 0;
10893 xop[0] = op[0];
10894 xop[1] = op[1];
10895 xop[2] = tmp_reg_rtx;
10897 /* Loop label */
10899 avr_asm_len ("0:", xop, plen, 0);
10901 /* Load with post-increment */
10903 switch (as)
10905 default:
10906 gcc_unreachable();
10908 case ADDR_SPACE_GENERIC:
10910 avr_asm_len ("ld %2,Z+", xop, plen, 1);
10911 break;
10913 case ADDR_SPACE_FLASH:
10915 if (AVR_HAVE_LPMX)
10916 avr_asm_len ("lpm %2,Z+", xop, plen, 1);
10917 else
10918 avr_asm_len ("lpm" CR_TAB
10919 "adiw r30,1", xop, plen, 2);
10920 break;
10922 case ADDR_SPACE_FLASH1:
10923 case ADDR_SPACE_FLASH2:
10924 case ADDR_SPACE_FLASH3:
10925 case ADDR_SPACE_FLASH4:
10926 case ADDR_SPACE_FLASH5:
10928 if (AVR_HAVE_ELPMX)
10929 avr_asm_len ("elpm %2,Z+", xop, plen, 1);
10930 else
10931 avr_asm_len ("elpm" CR_TAB
10932 "adiw r30,1", xop, plen, 2);
10933 break;
10936 /* Store with post-increment */
10938 avr_asm_len ("st X+,%2", xop, plen, 1);
10940 /* Decrement loop-counter and set Z-flag */
10942 if (QImode == loop_mode)
10944 avr_asm_len ("dec %1", xop, plen, 1);
10946 else if (sbiw_p)
10948 avr_asm_len ("sbiw %1,1", xop, plen, 1);
10950 else
10952 avr_asm_len ("subi %A1,1" CR_TAB
10953 "sbci %B1,0", xop, plen, 2);
10956 /* Loop until zero */
10958 return avr_asm_len ("brne 0b", xop, plen, 1);
10963 /* Helper for __builtin_avr_delay_cycles */
10965 static rtx
10966 avr_mem_clobber (void)
10968 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
10969 MEM_VOLATILE_P (mem) = 1;
10970 return mem;
10973 static void
10974 avr_expand_delay_cycles (rtx operands0)
10976 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0) & GET_MODE_MASK (SImode);
10977 unsigned HOST_WIDE_INT cycles_used;
10978 unsigned HOST_WIDE_INT loop_count;
10980 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
10982 loop_count = ((cycles - 9) / 6) + 1;
10983 cycles_used = ((loop_count - 1) * 6) + 9;
10984 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode),
10985 avr_mem_clobber()));
10986 cycles -= cycles_used;
10989 if (IN_RANGE (cycles, 262145, 83886081))
10991 loop_count = ((cycles - 7) / 5) + 1;
10992 if (loop_count > 0xFFFFFF)
10993 loop_count = 0xFFFFFF;
10994 cycles_used = ((loop_count - 1) * 5) + 7;
10995 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode),
10996 avr_mem_clobber()));
10997 cycles -= cycles_used;
11000 if (IN_RANGE (cycles, 768, 262144))
11002 loop_count = ((cycles - 5) / 4) + 1;
11003 if (loop_count > 0xFFFF)
11004 loop_count = 0xFFFF;
11005 cycles_used = ((loop_count - 1) * 4) + 5;
11006 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode),
11007 avr_mem_clobber()));
11008 cycles -= cycles_used;
11011 if (IN_RANGE (cycles, 6, 767))
11013 loop_count = cycles / 3;
11014 if (loop_count > 255)
11015 loop_count = 255;
11016 cycles_used = loop_count * 3;
11017 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode),
11018 avr_mem_clobber()));
11019 cycles -= cycles_used;
11022 while (cycles >= 2)
11024 emit_insn (gen_nopv (GEN_INT(2)));
11025 cycles -= 2;
11028 if (cycles == 1)
11030 emit_insn (gen_nopv (GEN_INT(1)));
11031 cycles--;
11036 /* Return VAL * BASE + DIGIT. BASE = 0 is shortcut for BASE = 2^{32} */
11038 static double_int
11039 avr_double_int_push_digit (double_int val, int base,
11040 unsigned HOST_WIDE_INT digit)
11042 val = 0 == base
11043 ? val.llshift (32, 64)
11044 : val * double_int::from_uhwi (base);
11046 return val + double_int::from_uhwi (digit);
11050 /* Compute the image of x under f, i.e. perform x --> f(x) */
11052 static int
11053 avr_map (double_int f, int x)
11055 return 0xf & f.lrshift (4*x, 64).to_uhwi ();
11059 /* Return some metrics of map A. */
11061 enum
11063 /* Number of fixed points in { 0 ... 7 } */
11064 MAP_FIXED_0_7,
11066 /* Size of preimage of non-fixed points in { 0 ... 7 } */
11067 MAP_NONFIXED_0_7,
11069 /* Mask representing the fixed points in { 0 ... 7 } */
11070 MAP_MASK_FIXED_0_7,
11072 /* Size of the preimage of { 0 ... 7 } */
11073 MAP_PREIMAGE_0_7,
11075 /* Mask that represents the preimage of { f } */
11076 MAP_MASK_PREIMAGE_F
11079 static unsigned
11080 avr_map_metric (double_int a, int mode)
11082 unsigned i, metric = 0;
11084 for (i = 0; i < 8; i++)
11086 unsigned ai = avr_map (a, i);
11088 if (mode == MAP_FIXED_0_7)
11089 metric += ai == i;
11090 else if (mode == MAP_NONFIXED_0_7)
11091 metric += ai < 8 && ai != i;
11092 else if (mode == MAP_MASK_FIXED_0_7)
11093 metric |= ((unsigned) (ai == i)) << i;
11094 else if (mode == MAP_PREIMAGE_0_7)
11095 metric += ai < 8;
11096 else if (mode == MAP_MASK_PREIMAGE_F)
11097 metric |= ((unsigned) (ai == 0xf)) << i;
11098 else
11099 gcc_unreachable();
11102 return metric;
11106 /* Return true if IVAL has a 0xf in its hexadecimal representation
11107 and false, otherwise. Only nibbles 0..7 are taken into account.
11108 Used as constraint helper for C0f and Cxf. */
11110 bool
11111 avr_has_nibble_0xf (rtx ival)
11113 return 0 != avr_map_metric (rtx_to_double_int (ival), MAP_MASK_PREIMAGE_F);
11117 /* We have a set of bits that are mapped by a function F.
11118 Try to decompose F by means of a second function G so that
11120 F = F o G^-1 o G
11124 cost (F o G^-1) + cost (G) < cost (F)
11126 Example: Suppose builtin insert_bits supplies us with the map
11127 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
11128 nibble of the result, we can just as well rotate the bits before inserting
11129 them and use the map 0x7654ffff which is cheaper than the original map.
11130 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
11132 typedef struct
11134 /* tree code of binary function G */
11135 enum tree_code code;
11137 /* The constant second argument of G */
11138 int arg;
11140 /* G^-1, the inverse of G (*, arg) */
11141 unsigned ginv;
11143 /* The cost of appplying G (*, arg) */
11144 int cost;
11146 /* The composition F o G^-1 (*, arg) for some function F */
11147 double_int map;
11149 /* For debug purpose only */
11150 const char *str;
11151 } avr_map_op_t;
11153 static const avr_map_op_t avr_map_op[] =
11155 { LROTATE_EXPR, 0, 0x76543210, 0, { 0, 0 }, "id" },
11156 { LROTATE_EXPR, 1, 0x07654321, 2, { 0, 0 }, "<<<" },
11157 { LROTATE_EXPR, 2, 0x10765432, 4, { 0, 0 }, "<<<" },
11158 { LROTATE_EXPR, 3, 0x21076543, 4, { 0, 0 }, "<<<" },
11159 { LROTATE_EXPR, 4, 0x32107654, 1, { 0, 0 }, "<<<" },
11160 { LROTATE_EXPR, 5, 0x43210765, 3, { 0, 0 }, "<<<" },
11161 { LROTATE_EXPR, 6, 0x54321076, 5, { 0, 0 }, "<<<" },
11162 { LROTATE_EXPR, 7, 0x65432107, 3, { 0, 0 }, "<<<" },
11163 { RSHIFT_EXPR, 1, 0x6543210c, 1, { 0, 0 }, ">>" },
11164 { RSHIFT_EXPR, 1, 0x7543210c, 1, { 0, 0 }, ">>" },
11165 { RSHIFT_EXPR, 2, 0x543210cc, 2, { 0, 0 }, ">>" },
11166 { RSHIFT_EXPR, 2, 0x643210cc, 2, { 0, 0 }, ">>" },
11167 { RSHIFT_EXPR, 2, 0x743210cc, 2, { 0, 0 }, ">>" },
11168 { LSHIFT_EXPR, 1, 0xc7654321, 1, { 0, 0 }, "<<" },
11169 { LSHIFT_EXPR, 2, 0xcc765432, 2, { 0, 0 }, "<<" }
11173 /* Try to decompose F as F = (F o G^-1) o G as described above.
11174 The result is a struct representing F o G^-1 and G.
11175 If result.cost < 0 then such a decomposition does not exist. */
11177 static avr_map_op_t
11178 avr_map_decompose (double_int f, const avr_map_op_t *g, bool val_const_p)
11180 int i;
11181 bool val_used_p = 0 != avr_map_metric (f, MAP_MASK_PREIMAGE_F);
11182 avr_map_op_t f_ginv = *g;
11183 double_int ginv = double_int::from_uhwi (g->ginv);
11185 f_ginv.cost = -1;
11187 /* Step 1: Computing F o G^-1 */
11189 for (i = 7; i >= 0; i--)
11191 int x = avr_map (f, i);
11193 if (x <= 7)
11195 x = avr_map (ginv, x);
11197 /* The bit is no element of the image of G: no avail (cost = -1) */
11199 if (x > 7)
11200 return f_ginv;
11203 f_ginv.map = avr_double_int_push_digit (f_ginv.map, 16, x);
11206 /* Step 2: Compute the cost of the operations.
11207 The overall cost of doing an operation prior to the insertion is
11208 the cost of the insertion plus the cost of the operation. */
11210 /* Step 2a: Compute cost of F o G^-1 */
11212 if (0 == avr_map_metric (f_ginv.map, MAP_NONFIXED_0_7))
11214 /* The mapping consists only of fixed points and can be folded
11215 to AND/OR logic in the remainder. Reasonable cost is 3. */
11217 f_ginv.cost = 2 + (val_used_p && !val_const_p);
11219 else
11221 rtx xop[4];
11223 /* Get the cost of the insn by calling the output worker with some
11224 fake values. Mimic effect of reloading xop[3]: Unused operands
11225 are mapped to 0 and used operands are reloaded to xop[0]. */
11227 xop[0] = all_regs_rtx[24];
11228 xop[1] = gen_int_mode (f_ginv.map.to_uhwi (), SImode);
11229 xop[2] = all_regs_rtx[25];
11230 xop[3] = val_used_p ? xop[0] : const0_rtx;
11232 avr_out_insert_bits (xop, &f_ginv.cost);
11234 f_ginv.cost += val_const_p && val_used_p ? 1 : 0;
11237 /* Step 2b: Add cost of G */
11239 f_ginv.cost += g->cost;
11241 if (avr_log.builtin)
11242 avr_edump (" %s%d=%d", g->str, g->arg, f_ginv.cost);
11244 return f_ginv;
11248 /* Insert bits from XOP[1] into XOP[0] according to MAP.
11249 XOP[0] and XOP[1] don't overlap.
11250 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
11251 If FIXP_P = false: Just move the bit if its position in the destination
11252 is different to its source position. */
11254 static void
11255 avr_move_bits (rtx *xop, double_int map, bool fixp_p, int *plen)
11257 int bit_dest, b;
11259 /* T-flag contains this bit of the source, i.e. of XOP[1] */
11260 int t_bit_src = -1;
11262 /* We order the operations according to the requested source bit b. */
11264 for (b = 0; b < 8; b++)
11265 for (bit_dest = 0; bit_dest < 8; bit_dest++)
11267 int bit_src = avr_map (map, bit_dest);
11269 if (b != bit_src
11270 || bit_src >= 8
11271 /* Same position: No need to copy as requested by FIXP_P. */
11272 || (bit_dest == bit_src && !fixp_p))
11273 continue;
11275 if (t_bit_src != bit_src)
11277 /* Source bit is not yet in T: Store it to T. */
11279 t_bit_src = bit_src;
11281 xop[3] = GEN_INT (bit_src);
11282 avr_asm_len ("bst %T1%T3", xop, plen, 1);
11285 /* Load destination bit with T. */
11287 xop[3] = GEN_INT (bit_dest);
11288 avr_asm_len ("bld %T0%T3", xop, plen, 1);
11293 /* PLEN == 0: Print assembler code for `insert_bits'.
11294 PLEN != 0: Compute code length in bytes.
11296 OP[0]: Result
11297 OP[1]: The mapping composed of nibbles. If nibble no. N is
11298 0: Bit N of result is copied from bit OP[2].0
11299 ... ...
11300 7: Bit N of result is copied from bit OP[2].7
11301 0xf: Bit N of result is copied from bit OP[3].N
11302 OP[2]: Bits to be inserted
11303 OP[3]: Target value */
11305 const char*
11306 avr_out_insert_bits (rtx *op, int *plen)
11308 double_int map = rtx_to_double_int (op[1]);
11309 unsigned mask_fixed;
11310 bool fixp_p = true;
11311 rtx xop[4];
11313 xop[0] = op[0];
11314 xop[1] = op[2];
11315 xop[2] = op[3];
11317 gcc_assert (REG_P (xop[2]) || CONST_INT_P (xop[2]));
11319 if (plen)
11320 *plen = 0;
11321 else if (flag_print_asm_name)
11322 fprintf (asm_out_file,
11323 ASM_COMMENT_START "map = 0x%08" HOST_LONG_FORMAT "x\n",
11324 map.to_uhwi () & GET_MODE_MASK (SImode));
11326 /* If MAP has fixed points it might be better to initialize the result
11327 with the bits to be inserted instead of moving all bits by hand. */
11329 mask_fixed = avr_map_metric (map, MAP_MASK_FIXED_0_7);
11331 if (REGNO (xop[0]) == REGNO (xop[1]))
11333 /* Avoid early-clobber conflicts */
11335 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
11336 xop[1] = tmp_reg_rtx;
11337 fixp_p = false;
11340 if (avr_map_metric (map, MAP_MASK_PREIMAGE_F))
11342 /* XOP[2] is used and reloaded to XOP[0] already */
11344 int n_fix = 0, n_nofix = 0;
11346 gcc_assert (REG_P (xop[2]));
11348 /* Get the code size of the bit insertions; once with all bits
11349 moved and once with fixed points omitted. */
11351 avr_move_bits (xop, map, true, &n_fix);
11352 avr_move_bits (xop, map, false, &n_nofix);
11354 if (fixp_p && n_fix - n_nofix > 3)
11356 xop[3] = gen_int_mode (~mask_fixed, QImode);
11358 avr_asm_len ("eor %0,%1" CR_TAB
11359 "andi %0,%3" CR_TAB
11360 "eor %0,%1", xop, plen, 3);
11361 fixp_p = false;
11364 else
11366 /* XOP[2] is unused */
11368 if (fixp_p && mask_fixed)
11370 avr_asm_len ("mov %0,%1", xop, plen, 1);
11371 fixp_p = false;
11375 /* Move/insert remaining bits. */
11377 avr_move_bits (xop, map, fixp_p, plen);
11379 return "";
11383 /* IDs for all the AVR builtins. */
11385 enum avr_builtin_id
11387 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
11388 AVR_BUILTIN_ ## NAME,
11389 #include "builtins.def"
11390 #undef DEF_BUILTIN
11392 AVR_BUILTIN_COUNT
11395 struct GTY(()) avr_builtin_description
11397 enum insn_code icode;
11398 int n_args;
11399 tree fndecl;
11403 /* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
11404 that a built-in's ID can be used to access the built-in by means of
11405 avr_bdesc[ID] */
11407 static GTY(()) struct avr_builtin_description
11408 avr_bdesc[AVR_BUILTIN_COUNT] =
11410 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, LIBNAME) \
11411 { (enum insn_code) CODE_FOR_ ## ICODE, N_ARGS, NULL_TREE },
11412 #include "builtins.def"
11413 #undef DEF_BUILTIN
11417 /* Implement `TARGET_BUILTIN_DECL'. */
11419 static tree
11420 avr_builtin_decl (unsigned id, bool initialize_p ATTRIBUTE_UNUSED)
11422 if (id < AVR_BUILTIN_COUNT)
11423 return avr_bdesc[id].fndecl;
11425 return error_mark_node;
11429 static void
11430 avr_init_builtin_int24 (void)
11432 tree int24_type = make_signed_type (GET_MODE_BITSIZE (PSImode));
11433 tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
11435 lang_hooks.types.register_builtin_type (int24_type, "__int24");
11436 lang_hooks.types.register_builtin_type (uint24_type, "__uint24");
11440 /* Implement `TARGET_INIT_BUILTINS' */
11441 /* Set up all builtin functions for this target. */
11443 static void
11444 avr_init_builtins (void)
11446 tree void_ftype_void
11447 = build_function_type_list (void_type_node, NULL_TREE);
11448 tree uchar_ftype_uchar
11449 = build_function_type_list (unsigned_char_type_node,
11450 unsigned_char_type_node,
11451 NULL_TREE);
11452 tree uint_ftype_uchar_uchar
11453 = build_function_type_list (unsigned_type_node,
11454 unsigned_char_type_node,
11455 unsigned_char_type_node,
11456 NULL_TREE);
11457 tree int_ftype_char_char
11458 = build_function_type_list (integer_type_node,
11459 char_type_node,
11460 char_type_node,
11461 NULL_TREE);
11462 tree int_ftype_char_uchar
11463 = build_function_type_list (integer_type_node,
11464 char_type_node,
11465 unsigned_char_type_node,
11466 NULL_TREE);
11467 tree void_ftype_ulong
11468 = build_function_type_list (void_type_node,
11469 long_unsigned_type_node,
11470 NULL_TREE);
11472 tree uchar_ftype_ulong_uchar_uchar
11473 = build_function_type_list (unsigned_char_type_node,
11474 long_unsigned_type_node,
11475 unsigned_char_type_node,
11476 unsigned_char_type_node,
11477 NULL_TREE);
11479 tree const_memx_void_node
11480 = build_qualified_type (void_type_node,
11481 TYPE_QUAL_CONST
11482 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX));
11484 tree const_memx_ptr_type_node
11485 = build_pointer_type_for_mode (const_memx_void_node, PSImode, false);
11487 tree char_ftype_const_memx_ptr
11488 = build_function_type_list (char_type_node,
11489 const_memx_ptr_type_node,
11490 NULL);
11492 #define ITYP(T) \
11493 lang_hooks.types.type_for_size (TYPE_PRECISION (T), TYPE_UNSIGNED (T))
11495 #define FX_FTYPE_FX(fx) \
11496 tree fx##r_ftype_##fx##r \
11497 = build_function_type_list (node_##fx##r, node_##fx##r, NULL); \
11498 tree fx##k_ftype_##fx##k \
11499 = build_function_type_list (node_##fx##k, node_##fx##k, NULL)
11501 #define FX_FTYPE_FX_INT(fx) \
11502 tree fx##r_ftype_##fx##r_int \
11503 = build_function_type_list (node_##fx##r, node_##fx##r, \
11504 integer_type_node, NULL); \
11505 tree fx##k_ftype_##fx##k_int \
11506 = build_function_type_list (node_##fx##k, node_##fx##k, \
11507 integer_type_node, NULL)
11509 #define INT_FTYPE_FX(fx) \
11510 tree int_ftype_##fx##r \
11511 = build_function_type_list (integer_type_node, node_##fx##r, NULL); \
11512 tree int_ftype_##fx##k \
11513 = build_function_type_list (integer_type_node, node_##fx##k, NULL)
11515 #define INTX_FTYPE_FX(fx) \
11516 tree int##fx##r_ftype_##fx##r \
11517 = build_function_type_list (ITYP (node_##fx##r), node_##fx##r, NULL); \
11518 tree int##fx##k_ftype_##fx##k \
11519 = build_function_type_list (ITYP (node_##fx##k), node_##fx##k, NULL)
11521 #define FX_FTYPE_INTX(fx) \
11522 tree fx##r_ftype_int##fx##r \
11523 = build_function_type_list (node_##fx##r, ITYP (node_##fx##r), NULL); \
11524 tree fx##k_ftype_int##fx##k \
11525 = build_function_type_list (node_##fx##k, ITYP (node_##fx##k), NULL)
11527 tree node_hr = short_fract_type_node;
11528 tree node_r = fract_type_node;
11529 tree node_lr = long_fract_type_node;
11530 tree node_llr = long_long_fract_type_node;
11532 tree node_uhr = unsigned_short_fract_type_node;
11533 tree node_ur = unsigned_fract_type_node;
11534 tree node_ulr = unsigned_long_fract_type_node;
11535 tree node_ullr = unsigned_long_long_fract_type_node;
11537 tree node_hk = short_accum_type_node;
11538 tree node_k = accum_type_node;
11539 tree node_lk = long_accum_type_node;
11540 tree node_llk = long_long_accum_type_node;
11542 tree node_uhk = unsigned_short_accum_type_node;
11543 tree node_uk = unsigned_accum_type_node;
11544 tree node_ulk = unsigned_long_accum_type_node;
11545 tree node_ullk = unsigned_long_long_accum_type_node;
11548 /* For absfx builtins. */
11550 FX_FTYPE_FX (h);
11551 FX_FTYPE_FX ();
11552 FX_FTYPE_FX (l);
11553 FX_FTYPE_FX (ll);
11555 /* For roundfx builtins. */
11557 FX_FTYPE_FX_INT (h);
11558 FX_FTYPE_FX_INT ();
11559 FX_FTYPE_FX_INT (l);
11560 FX_FTYPE_FX_INT (ll);
11562 FX_FTYPE_FX_INT (uh);
11563 FX_FTYPE_FX_INT (u);
11564 FX_FTYPE_FX_INT (ul);
11565 FX_FTYPE_FX_INT (ull);
11567 /* For countlsfx builtins. */
11569 INT_FTYPE_FX (h);
11570 INT_FTYPE_FX ();
11571 INT_FTYPE_FX (l);
11572 INT_FTYPE_FX (ll);
11574 INT_FTYPE_FX (uh);
11575 INT_FTYPE_FX (u);
11576 INT_FTYPE_FX (ul);
11577 INT_FTYPE_FX (ull);
11579 /* For bitsfx builtins. */
11581 INTX_FTYPE_FX (h);
11582 INTX_FTYPE_FX ();
11583 INTX_FTYPE_FX (l);
11584 INTX_FTYPE_FX (ll);
11586 INTX_FTYPE_FX (uh);
11587 INTX_FTYPE_FX (u);
11588 INTX_FTYPE_FX (ul);
11589 INTX_FTYPE_FX (ull);
11591 /* For fxbits builtins. */
11593 FX_FTYPE_INTX (h);
11594 FX_FTYPE_INTX ();
11595 FX_FTYPE_INTX (l);
11596 FX_FTYPE_INTX (ll);
11598 FX_FTYPE_INTX (uh);
11599 FX_FTYPE_INTX (u);
11600 FX_FTYPE_INTX (ul);
11601 FX_FTYPE_INTX (ull);
11604 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
11606 int id = AVR_BUILTIN_ ## NAME; \
11607 const char *Name = "__builtin_avr_" #NAME; \
11608 char *name = (char*) alloca (1 + strlen (Name)); \
11610 gcc_assert (id < AVR_BUILTIN_COUNT); \
11611 avr_bdesc[id].fndecl \
11612 = add_builtin_function (avr_tolower (name, Name), TYPE, id, \
11613 BUILT_IN_MD, LIBNAME, NULL_TREE); \
11615 #include "builtins.def"
11616 #undef DEF_BUILTIN
11618 avr_init_builtin_int24 ();
11622 /* Subroutine of avr_expand_builtin to expand vanilla builtins
11623 with non-void result and 1 ... 3 arguments. */
11625 static rtx
11626 avr_default_expand_builtin (enum insn_code icode, tree exp, rtx target)
11628 rtx pat, xop[3];
11629 int n, n_args = call_expr_nargs (exp);
11630 enum machine_mode tmode = insn_data[icode].operand[0].mode;
11632 gcc_assert (n_args >= 1 && n_args <= 3);
11634 if (target == NULL_RTX
11635 || GET_MODE (target) != tmode
11636 || !insn_data[icode].operand[0].predicate (target, tmode))
11638 target = gen_reg_rtx (tmode);
11641 for (n = 0; n < n_args; n++)
11643 tree arg = CALL_EXPR_ARG (exp, n);
11644 rtx op = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
11645 enum machine_mode opmode = GET_MODE (op);
11646 enum machine_mode mode = insn_data[icode].operand[n+1].mode;
11648 if ((opmode == SImode || opmode == VOIDmode) && mode == HImode)
11650 opmode = HImode;
11651 op = gen_lowpart (HImode, op);
11654 /* In case the insn wants input operands in modes different from
11655 the result, abort. */
11657 gcc_assert (opmode == mode || opmode == VOIDmode);
11659 if (!insn_data[icode].operand[n+1].predicate (op, mode))
11660 op = copy_to_mode_reg (mode, op);
11662 xop[n] = op;
11665 switch (n_args)
11667 case 1: pat = GEN_FCN (icode) (target, xop[0]); break;
11668 case 2: pat = GEN_FCN (icode) (target, xop[0], xop[1]); break;
11669 case 3: pat = GEN_FCN (icode) (target, xop[0], xop[1], xop[2]); break;
11671 default:
11672 gcc_unreachable();
11675 if (pat == NULL_RTX)
11676 return NULL_RTX;
11678 emit_insn (pat);
11680 return target;
11684 /* Implement `TARGET_EXPAND_BUILTIN'. */
11685 /* Expand an expression EXP that calls a built-in function,
11686 with result going to TARGET if that's convenient
11687 (and in mode MODE if that's convenient).
11688 SUBTARGET may be used as the target for computing one of EXP's operands.
11689 IGNORE is nonzero if the value is to be ignored. */
11691 static rtx
11692 avr_expand_builtin (tree exp, rtx target,
11693 rtx subtarget ATTRIBUTE_UNUSED,
11694 enum machine_mode mode ATTRIBUTE_UNUSED,
11695 int ignore)
11697 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
11698 const char *bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
11699 unsigned int id = DECL_FUNCTION_CODE (fndecl);
11700 const struct avr_builtin_description *d = &avr_bdesc[id];
11701 tree arg0;
11702 rtx op0;
11704 gcc_assert (id < AVR_BUILTIN_COUNT);
11706 switch (id)
11708 case AVR_BUILTIN_NOP:
11709 emit_insn (gen_nopv (GEN_INT(1)));
11710 return 0;
11712 case AVR_BUILTIN_DELAY_CYCLES:
11714 arg0 = CALL_EXPR_ARG (exp, 0);
11715 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
11717 if (!CONST_INT_P (op0))
11718 error ("%s expects a compile time integer constant", bname);
11719 else
11720 avr_expand_delay_cycles (op0);
11722 return NULL_RTX;
11725 case AVR_BUILTIN_INSERT_BITS:
11727 arg0 = CALL_EXPR_ARG (exp, 0);
11728 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
11730 if (!CONST_INT_P (op0))
11732 error ("%s expects a compile time long integer constant"
11733 " as first argument", bname);
11734 return target;
11737 break;
11740 case AVR_BUILTIN_ROUNDHR: case AVR_BUILTIN_ROUNDUHR:
11741 case AVR_BUILTIN_ROUNDR: case AVR_BUILTIN_ROUNDUR:
11742 case AVR_BUILTIN_ROUNDLR: case AVR_BUILTIN_ROUNDULR:
11743 case AVR_BUILTIN_ROUNDLLR: case AVR_BUILTIN_ROUNDULLR:
11745 case AVR_BUILTIN_ROUNDHK: case AVR_BUILTIN_ROUNDUHK:
11746 case AVR_BUILTIN_ROUNDK: case AVR_BUILTIN_ROUNDUK:
11747 case AVR_BUILTIN_ROUNDLK: case AVR_BUILTIN_ROUNDULK:
11748 case AVR_BUILTIN_ROUNDLLK: case AVR_BUILTIN_ROUNDULLK:
11750 /* Warn about odd rounding. Rounding points >= FBIT will have
11751 no effect. */
11753 if (TREE_CODE (CALL_EXPR_ARG (exp, 1)) != INTEGER_CST)
11754 break;
11756 int rbit = (int) TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1));
11758 if (rbit >= (int) GET_MODE_FBIT (mode))
11760 warning (OPT_Wextra, "rounding to %d bits has no effect for "
11761 "fixed-point value with %d fractional bits",
11762 rbit, GET_MODE_FBIT (mode));
11764 return expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX, mode,
11765 EXPAND_NORMAL);
11767 else if (rbit <= - (int) GET_MODE_IBIT (mode))
11769 warning (0, "rounding result will always be 0");
11770 return CONST0_RTX (mode);
11773 /* The rounding points RP satisfies now: -IBIT < RP < FBIT.
11775 TR 18037 only specifies results for RP > 0. However, the
11776 remaining cases of -IBIT < RP <= 0 can easily be supported
11777 without any additional overhead. */
11779 break; /* round */
11782 /* No fold found and no insn: Call support function from libgcc. */
11784 if (d->icode == CODE_FOR_nothing
11785 && DECL_ASSEMBLER_NAME (get_callee_fndecl (exp)) != NULL_TREE)
11787 return expand_call (exp, target, ignore);
11790 /* No special treatment needed: vanilla expand. */
11792 gcc_assert (d->icode != CODE_FOR_nothing);
11793 gcc_assert (d->n_args == call_expr_nargs (exp));
11795 if (d->n_args == 0)
11797 emit_insn ((GEN_FCN (d->icode)) (target));
11798 return NULL_RTX;
11801 return avr_default_expand_builtin (d->icode, exp, target);
11805 /* Helper for `avr_fold_builtin' that folds absfx (FIXED_CST). */
11807 static tree
11808 avr_fold_absfx (tree tval)
11810 if (FIXED_CST != TREE_CODE (tval))
11811 return NULL_TREE;
11813 /* Our fixed-points have no padding: Use double_int payload directly. */
11815 FIXED_VALUE_TYPE fval = TREE_FIXED_CST (tval);
11816 unsigned int bits = GET_MODE_BITSIZE (fval.mode);
11817 double_int ival = fval.data.sext (bits);
11819 if (!ival.is_negative())
11820 return tval;
11822 /* ISO/IEC TR 18037, 7.18a.6.2: The absfx functions are saturating. */
11824 fval.data = (ival == double_int::min_value (bits, false).sext (bits))
11825 ? double_int::max_value (bits, false)
11826 : -ival;
11828 return build_fixed (TREE_TYPE (tval), fval);
11832 /* Implement `TARGET_FOLD_BUILTIN'. */
11834 static tree
11835 avr_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *arg,
11836 bool ignore ATTRIBUTE_UNUSED)
11838 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
11839 tree val_type = TREE_TYPE (TREE_TYPE (fndecl));
11841 if (!optimize)
11842 return NULL_TREE;
11844 switch (fcode)
11846 default:
11847 break;
11849 case AVR_BUILTIN_SWAP:
11851 return fold_build2 (LROTATE_EXPR, val_type, arg[0],
11852 build_int_cst (val_type, 4));
11855 case AVR_BUILTIN_ABSHR:
11856 case AVR_BUILTIN_ABSR:
11857 case AVR_BUILTIN_ABSLR:
11858 case AVR_BUILTIN_ABSLLR:
11860 case AVR_BUILTIN_ABSHK:
11861 case AVR_BUILTIN_ABSK:
11862 case AVR_BUILTIN_ABSLK:
11863 case AVR_BUILTIN_ABSLLK:
11864 /* GCC is not good with folding ABS for fixed-point. Do it by hand. */
11866 return avr_fold_absfx (arg[0]);
11868 case AVR_BUILTIN_BITSHR: case AVR_BUILTIN_HRBITS:
11869 case AVR_BUILTIN_BITSHK: case AVR_BUILTIN_HKBITS:
11870 case AVR_BUILTIN_BITSUHR: case AVR_BUILTIN_UHRBITS:
11871 case AVR_BUILTIN_BITSUHK: case AVR_BUILTIN_UHKBITS:
11873 case AVR_BUILTIN_BITSR: case AVR_BUILTIN_RBITS:
11874 case AVR_BUILTIN_BITSK: case AVR_BUILTIN_KBITS:
11875 case AVR_BUILTIN_BITSUR: case AVR_BUILTIN_URBITS:
11876 case AVR_BUILTIN_BITSUK: case AVR_BUILTIN_UKBITS:
11878 case AVR_BUILTIN_BITSLR: case AVR_BUILTIN_LRBITS:
11879 case AVR_BUILTIN_BITSLK: case AVR_BUILTIN_LKBITS:
11880 case AVR_BUILTIN_BITSULR: case AVR_BUILTIN_ULRBITS:
11881 case AVR_BUILTIN_BITSULK: case AVR_BUILTIN_ULKBITS:
11883 case AVR_BUILTIN_BITSLLR: case AVR_BUILTIN_LLRBITS:
11884 case AVR_BUILTIN_BITSLLK: case AVR_BUILTIN_LLKBITS:
11885 case AVR_BUILTIN_BITSULLR: case AVR_BUILTIN_ULLRBITS:
11886 case AVR_BUILTIN_BITSULLK: case AVR_BUILTIN_ULLKBITS:
11888 gcc_assert (TYPE_PRECISION (val_type)
11889 == TYPE_PRECISION (TREE_TYPE (arg[0])));
11891 return build1 (VIEW_CONVERT_EXPR, val_type, arg[0]);
11893 case AVR_BUILTIN_INSERT_BITS:
11895 tree tbits = arg[1];
11896 tree tval = arg[2];
11897 tree tmap;
11898 tree map_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
11899 double_int map;
11900 bool changed = false;
11901 unsigned i;
11902 avr_map_op_t best_g;
11904 if (TREE_CODE (arg[0]) != INTEGER_CST)
11906 /* No constant as first argument: Don't fold this and run into
11907 error in avr_expand_builtin. */
11909 break;
11912 map = tree_to_double_int (arg[0]);
11913 tmap = double_int_to_tree (map_type, map);
11915 if (TREE_CODE (tval) != INTEGER_CST
11916 && 0 == avr_map_metric (map, MAP_MASK_PREIMAGE_F))
11918 /* There are no F in the map, i.e. 3rd operand is unused.
11919 Replace that argument with some constant to render
11920 respective input unused. */
11922 tval = build_int_cst (val_type, 0);
11923 changed = true;
11926 if (TREE_CODE (tbits) != INTEGER_CST
11927 && 0 == avr_map_metric (map, MAP_PREIMAGE_0_7))
11929 /* Similar for the bits to be inserted. If they are unused,
11930 we can just as well pass 0. */
11932 tbits = build_int_cst (val_type, 0);
11935 if (TREE_CODE (tbits) == INTEGER_CST)
11937 /* Inserting bits known at compile time is easy and can be
11938 performed by AND and OR with appropriate masks. */
11940 int bits = TREE_INT_CST_LOW (tbits);
11941 int mask_ior = 0, mask_and = 0xff;
11943 for (i = 0; i < 8; i++)
11945 int mi = avr_map (map, i);
11947 if (mi < 8)
11949 if (bits & (1 << mi)) mask_ior |= (1 << i);
11950 else mask_and &= ~(1 << i);
11954 tval = fold_build2 (BIT_IOR_EXPR, val_type, tval,
11955 build_int_cst (val_type, mask_ior));
11956 return fold_build2 (BIT_AND_EXPR, val_type, tval,
11957 build_int_cst (val_type, mask_and));
11960 if (changed)
11961 return build_call_expr (fndecl, 3, tmap, tbits, tval);
11963 /* If bits don't change their position we can use vanilla logic
11964 to merge the two arguments. */
11966 if (0 == avr_map_metric (map, MAP_NONFIXED_0_7))
11968 int mask_f = avr_map_metric (map, MAP_MASK_PREIMAGE_F);
11969 tree tres, tmask = build_int_cst (val_type, mask_f ^ 0xff);
11971 tres = fold_build2 (BIT_XOR_EXPR, val_type, tbits, tval);
11972 tres = fold_build2 (BIT_AND_EXPR, val_type, tres, tmask);
11973 return fold_build2 (BIT_XOR_EXPR, val_type, tres, tval);
11976 /* Try to decomposing map to reduce overall cost. */
11978 if (avr_log.builtin)
11979 avr_edump ("\n%?: %X\n%?: ROL cost: ", map);
11981 best_g = avr_map_op[0];
11982 best_g.cost = 1000;
11984 for (i = 0; i < sizeof (avr_map_op) / sizeof (*avr_map_op); i++)
11986 avr_map_op_t g
11987 = avr_map_decompose (map, avr_map_op + i,
11988 TREE_CODE (tval) == INTEGER_CST);
11990 if (g.cost >= 0 && g.cost < best_g.cost)
11991 best_g = g;
11994 if (avr_log.builtin)
11995 avr_edump ("\n");
11997 if (best_g.arg == 0)
11998 /* No optimization found */
11999 break;
12001 /* Apply operation G to the 2nd argument. */
12003 if (avr_log.builtin)
12004 avr_edump ("%?: using OP(%s%d, %X) cost %d\n",
12005 best_g.str, best_g.arg, best_g.map, best_g.cost);
12007 /* Do right-shifts arithmetically: They copy the MSB instead of
12008 shifting in a non-usable value (0) as with logic right-shift. */
12010 tbits = fold_convert (signed_char_type_node, tbits);
12011 tbits = fold_build2 (best_g.code, signed_char_type_node, tbits,
12012 build_int_cst (val_type, best_g.arg));
12013 tbits = fold_convert (val_type, tbits);
12015 /* Use map o G^-1 instead of original map to undo the effect of G. */
12017 tmap = double_int_to_tree (map_type, best_g.map);
12019 return build_call_expr (fndecl, 3, tmap, tbits, tval);
12020 } /* AVR_BUILTIN_INSERT_BITS */
12023 return NULL_TREE;
12028 /* Initialize the GCC target structure. */
12030 #undef TARGET_ASM_ALIGNED_HI_OP
12031 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
12032 #undef TARGET_ASM_ALIGNED_SI_OP
12033 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
12034 #undef TARGET_ASM_UNALIGNED_HI_OP
12035 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
12036 #undef TARGET_ASM_UNALIGNED_SI_OP
12037 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
12038 #undef TARGET_ASM_INTEGER
12039 #define TARGET_ASM_INTEGER avr_assemble_integer
12040 #undef TARGET_ASM_FILE_START
12041 #define TARGET_ASM_FILE_START avr_file_start
12042 #undef TARGET_ASM_FILE_END
12043 #define TARGET_ASM_FILE_END avr_file_end
12045 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
12046 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
12047 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
12048 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
12050 #undef TARGET_FUNCTION_VALUE
12051 #define TARGET_FUNCTION_VALUE avr_function_value
12052 #undef TARGET_LIBCALL_VALUE
12053 #define TARGET_LIBCALL_VALUE avr_libcall_value
12054 #undef TARGET_FUNCTION_VALUE_REGNO_P
12055 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
12057 #undef TARGET_ATTRIBUTE_TABLE
12058 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
12059 #undef TARGET_INSERT_ATTRIBUTES
12060 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
12061 #undef TARGET_SECTION_TYPE_FLAGS
12062 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
12064 #undef TARGET_ASM_NAMED_SECTION
12065 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
12066 #undef TARGET_ASM_INIT_SECTIONS
12067 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
12068 #undef TARGET_ENCODE_SECTION_INFO
12069 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
12070 #undef TARGET_ASM_SELECT_SECTION
12071 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
12073 #undef TARGET_REGISTER_MOVE_COST
12074 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
12075 #undef TARGET_MEMORY_MOVE_COST
12076 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
12077 #undef TARGET_RTX_COSTS
12078 #define TARGET_RTX_COSTS avr_rtx_costs
12079 #undef TARGET_ADDRESS_COST
12080 #define TARGET_ADDRESS_COST avr_address_cost
12081 #undef TARGET_MACHINE_DEPENDENT_REORG
12082 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
12083 #undef TARGET_FUNCTION_ARG
12084 #define TARGET_FUNCTION_ARG avr_function_arg
12085 #undef TARGET_FUNCTION_ARG_ADVANCE
12086 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
12088 #undef TARGET_SET_CURRENT_FUNCTION
12089 #define TARGET_SET_CURRENT_FUNCTION avr_set_current_function
12091 #undef TARGET_RETURN_IN_MEMORY
12092 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
12094 #undef TARGET_STRICT_ARGUMENT_NAMING
12095 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
12097 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
12098 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
12100 #undef TARGET_HARD_REGNO_SCRATCH_OK
12101 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
12102 #undef TARGET_CASE_VALUES_THRESHOLD
12103 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
12105 #undef TARGET_FRAME_POINTER_REQUIRED
12106 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
12107 #undef TARGET_CAN_ELIMINATE
12108 #define TARGET_CAN_ELIMINATE avr_can_eliminate
12110 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
12111 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS avr_allocate_stack_slots_for_args
12113 #undef TARGET_WARN_FUNC_RETURN
12114 #define TARGET_WARN_FUNC_RETURN avr_warn_func_return
12116 #undef TARGET_CLASS_LIKELY_SPILLED_P
12117 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
12119 #undef TARGET_OPTION_OVERRIDE
12120 #define TARGET_OPTION_OVERRIDE avr_option_override
12122 #undef TARGET_CANNOT_MODIFY_JUMPS_P
12123 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
12125 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
12126 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
12128 #undef TARGET_INIT_BUILTINS
12129 #define TARGET_INIT_BUILTINS avr_init_builtins
12131 #undef TARGET_BUILTIN_DECL
12132 #define TARGET_BUILTIN_DECL avr_builtin_decl
12134 #undef TARGET_EXPAND_BUILTIN
12135 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
12137 #undef TARGET_FOLD_BUILTIN
12138 #define TARGET_FOLD_BUILTIN avr_fold_builtin
12140 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
12141 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
12143 #undef TARGET_SCALAR_MODE_SUPPORTED_P
12144 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
12146 #undef TARGET_BUILD_BUILTIN_VA_LIST
12147 #define TARGET_BUILD_BUILTIN_VA_LIST avr_build_builtin_va_list
12149 #undef TARGET_FIXED_POINT_SUPPORTED_P
12150 #define TARGET_FIXED_POINT_SUPPORTED_P hook_bool_void_true
12152 #undef TARGET_ADDR_SPACE_SUBSET_P
12153 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
12155 #undef TARGET_ADDR_SPACE_CONVERT
12156 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
12158 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
12159 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
12161 #undef TARGET_ADDR_SPACE_POINTER_MODE
12162 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
12164 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
12165 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
12166 avr_addr_space_legitimate_address_p
12168 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
12169 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
12171 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
12172 #define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
12174 #undef TARGET_SECONDARY_RELOAD
12175 #define TARGET_SECONDARY_RELOAD avr_secondary_reload
12177 #undef TARGET_PRINT_OPERAND
12178 #define TARGET_PRINT_OPERAND avr_print_operand
12179 #undef TARGET_PRINT_OPERAND_ADDRESS
12180 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
12181 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
12182 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
12184 struct gcc_target targetm = TARGET_INITIALIZER;
12187 #include "gt-avr.h"