Merged revisions 208012,208018-208019,208021,208023-208030,208033,208037,208040-20804...
[official-gcc.git] / main / gcc / config / avr / avr.c
blob8ca7de0b36ddc56cd8566d220d43f66732b654ca
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998-2014 Free Software Foundation, Inc.
3 Contributed by Denis Chertykov (chertykov@gmail.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "regs.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-attr.h"
31 #include "insn-codes.h"
32 #include "flags.h"
33 #include "reload.h"
34 #include "tree.h"
35 #include "print-tree.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "stringpool.h"
39 #include "output.h"
40 #include "expr.h"
41 #include "c-family/c-common.h"
42 #include "diagnostic-core.h"
43 #include "obstack.h"
44 #include "function.h"
45 #include "recog.h"
46 #include "optabs.h"
47 #include "ggc.h"
48 #include "langhooks.h"
49 #include "tm_p.h"
50 #include "target.h"
51 #include "target-def.h"
52 #include "params.h"
53 #include "df.h"
55 /* Maximal allowed offset for an address in the LD command */
56 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
58 /* Return true if STR starts with PREFIX and false, otherwise. */
59 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
61 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
62 address space where data is to be located.
63 As the only non-generic address spaces are all located in flash,
64 this can be used to test if data shall go into some .progmem* section.
65 This must be the rightmost field of machine dependent section flags. */
66 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
68 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
69 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
71 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
72 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
73 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
74 do { \
75 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
76 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
77 } while (0)
79 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
80 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
81 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
82 / SYMBOL_FLAG_MACH_DEP)
84 /* Known address spaces. The order must be the same as in the respective
85 enum from avr.h (or designated initialized must be used). */
86 const avr_addrspace_t avr_addrspace[ADDR_SPACE_COUNT] =
88 { ADDR_SPACE_RAM, 0, 2, "", 0, NULL },
89 { ADDR_SPACE_FLASH, 1, 2, "__flash", 0, ".progmem.data" },
90 { ADDR_SPACE_FLASH1, 1, 2, "__flash1", 1, ".progmem1.data" },
91 { ADDR_SPACE_FLASH2, 1, 2, "__flash2", 2, ".progmem2.data" },
92 { ADDR_SPACE_FLASH3, 1, 2, "__flash3", 3, ".progmem3.data" },
93 { ADDR_SPACE_FLASH4, 1, 2, "__flash4", 4, ".progmem4.data" },
94 { ADDR_SPACE_FLASH5, 1, 2, "__flash5", 5, ".progmem5.data" },
95 { ADDR_SPACE_MEMX, 1, 3, "__memx", 0, ".progmemx.data" },
99 /* Holding RAM addresses of some SFRs used by the compiler and that
100 are unique over all devices in an architecture like 'avr4'. */
102 typedef struct
104 /* SREG: The processor status */
105 int sreg;
107 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
108 int ccp;
109 int rampd;
110 int rampx;
111 int rampy;
113 /* RAMPZ: The high byte of 24-bit address used with ELPM */
114 int rampz;
116 /* SP: The stack pointer and its low and high byte */
117 int sp_l;
118 int sp_h;
119 } avr_addr_t;
121 static avr_addr_t avr_addr;
124 /* Prototypes for local helper functions. */
126 static const char* out_movqi_r_mr (rtx, rtx[], int*);
127 static const char* out_movhi_r_mr (rtx, rtx[], int*);
128 static const char* out_movsi_r_mr (rtx, rtx[], int*);
129 static const char* out_movqi_mr_r (rtx, rtx[], int*);
130 static const char* out_movhi_mr_r (rtx, rtx[], int*);
131 static const char* out_movsi_mr_r (rtx, rtx[], int*);
133 static int get_sequence_length (rtx insns);
134 static int sequent_regs_live (void);
135 static const char *ptrreg_to_str (int);
136 static const char *cond_string (enum rtx_code);
137 static int avr_num_arg_regs (enum machine_mode, const_tree);
138 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
139 int, bool);
140 static void output_reload_in_const (rtx*, rtx, int*, bool);
141 static struct machine_function * avr_init_machine_status (void);
144 /* Prototypes for hook implementors if needed before their implementation. */
146 static bool avr_rtx_costs (rtx, int, int, int, int*, bool);
149 /* Allocate registers from r25 to r8 for parameters for function calls. */
150 #define FIRST_CUM_REG 26
152 /* Implicit target register of LPM instruction (R0) */
153 extern GTY(()) rtx lpm_reg_rtx;
154 rtx lpm_reg_rtx;
156 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
157 extern GTY(()) rtx lpm_addr_reg_rtx;
158 rtx lpm_addr_reg_rtx;
160 /* Temporary register RTX (reg:QI TMP_REGNO) */
161 extern GTY(()) rtx tmp_reg_rtx;
162 rtx tmp_reg_rtx;
164 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
165 extern GTY(()) rtx zero_reg_rtx;
166 rtx zero_reg_rtx;
168 /* RTXs for all general purpose registers as QImode */
169 extern GTY(()) rtx all_regs_rtx[32];
170 rtx all_regs_rtx[32];
172 /* SREG, the processor status */
173 extern GTY(()) rtx sreg_rtx;
174 rtx sreg_rtx;
176 /* RAMP* special function registers */
177 extern GTY(()) rtx rampd_rtx;
178 extern GTY(()) rtx rampx_rtx;
179 extern GTY(()) rtx rampy_rtx;
180 extern GTY(()) rtx rampz_rtx;
181 rtx rampd_rtx;
182 rtx rampx_rtx;
183 rtx rampy_rtx;
184 rtx rampz_rtx;
186 /* RTX containing the strings "" and "e", respectively */
187 static GTY(()) rtx xstring_empty;
188 static GTY(()) rtx xstring_e;
190 /* Current architecture. */
191 const avr_arch_t *avr_current_arch;
193 /* Current device. */
194 const avr_mcu_t *avr_current_device;
196 /* Section to put switch tables in. */
197 static GTY(()) section *progmem_swtable_section;
199 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
200 or to address space __flash* or __memx. Only used as singletons inside
201 avr_asm_select_section, but it must not be local there because of GTY. */
202 static GTY(()) section *progmem_section[ADDR_SPACE_COUNT];
204 /* Condition for insns/expanders from avr-dimode.md. */
205 bool avr_have_dimode = true;
207 /* To track if code will use .bss and/or .data. */
208 bool avr_need_clear_bss_p = false;
209 bool avr_need_copy_data_p = false;
212 /* Transform UP into lowercase and write the result to LO.
213 You must provide enough space for LO. Return LO. */
215 static char*
216 avr_tolower (char *lo, const char *up)
218 char *lo0 = lo;
220 for (; *up; up++, lo++)
221 *lo = TOLOWER (*up);
223 *lo = '\0';
225 return lo0;
229 /* Custom function to count number of set bits. */
231 static inline int
232 avr_popcount (unsigned int val)
234 int pop = 0;
236 while (val)
238 val &= val-1;
239 pop++;
242 return pop;
246 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
247 Return true if the least significant N_BYTES bytes of XVAL all have a
248 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
249 of integers which contains an integer N iff bit N of POP_MASK is set. */
251 bool
252 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
254 int i;
256 enum machine_mode mode = GET_MODE (xval);
258 if (VOIDmode == mode)
259 mode = SImode;
261 for (i = 0; i < n_bytes; i++)
263 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
264 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
266 if (0 == (pop_mask & (1 << avr_popcount (val8))))
267 return false;
270 return true;
274 /* Access some RTX as INT_MODE. If X is a CONST_FIXED we can get
275 the bit representation of X by "casting" it to CONST_INT. */
278 avr_to_int_mode (rtx x)
280 enum machine_mode mode = GET_MODE (x);
282 return VOIDmode == mode
284 : simplify_gen_subreg (int_mode_for_mode (mode), x, mode, 0);
288 /* Implement `TARGET_OPTION_OVERRIDE'. */
290 static void
291 avr_option_override (void)
293 flag_delete_null_pointer_checks = 0;
295 /* caller-save.c looks for call-clobbered hard registers that are assigned
296 to pseudos that cross calls and tries so save-restore them around calls
297 in order to reduce the number of stack slots needed.
299 This might lead to situations where reload is no more able to cope
300 with the challenge of AVR's very few address registers and fails to
301 perform the requested spills. */
303 if (avr_strict_X)
304 flag_caller_saves = 0;
306 /* Unwind tables currently require a frame pointer for correctness,
307 see toplev.c:process_options(). */
309 if ((flag_unwind_tables
310 || flag_non_call_exceptions
311 || flag_asynchronous_unwind_tables)
312 && !ACCUMULATE_OUTGOING_ARGS)
314 flag_omit_frame_pointer = 0;
317 if (flag_pic == 1)
318 warning (OPT_fpic, "-fpic is not supported");
319 if (flag_pic == 2)
320 warning (OPT_fPIC, "-fPIC is not supported");
321 if (flag_pie == 1)
322 warning (OPT_fpie, "-fpie is not supported");
323 if (flag_pie == 2)
324 warning (OPT_fPIE, "-fPIE is not supported");
326 avr_current_device = &avr_mcu_types[avr_mcu_index];
327 avr_current_arch = &avr_arch_types[avr_current_device->arch];
329 /* RAM addresses of some SFRs common to all devices in respective arch. */
331 /* SREG: Status Register containing flags like I (global IRQ) */
332 avr_addr.sreg = 0x3F + avr_current_arch->sfr_offset;
334 /* RAMPZ: Address' high part when loading via ELPM */
335 avr_addr.rampz = 0x3B + avr_current_arch->sfr_offset;
337 avr_addr.rampy = 0x3A + avr_current_arch->sfr_offset;
338 avr_addr.rampx = 0x39 + avr_current_arch->sfr_offset;
339 avr_addr.rampd = 0x38 + avr_current_arch->sfr_offset;
340 avr_addr.ccp = 0x34 + avr_current_arch->sfr_offset;
342 /* SP: Stack Pointer (SP_H:SP_L) */
343 avr_addr.sp_l = 0x3D + avr_current_arch->sfr_offset;
344 avr_addr.sp_h = avr_addr.sp_l + 1;
346 init_machine_status = avr_init_machine_status;
348 avr_log_set_avr_log();
351 /* Function to set up the backend function structure. */
353 static struct machine_function *
354 avr_init_machine_status (void)
356 return ggc_alloc_cleared_machine_function ();
360 /* Implement `INIT_EXPANDERS'. */
361 /* The function works like a singleton. */
363 void
364 avr_init_expanders (void)
366 int regno;
368 for (regno = 0; regno < 32; regno ++)
369 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
371 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
372 tmp_reg_rtx = all_regs_rtx[TMP_REGNO];
373 zero_reg_rtx = all_regs_rtx[ZERO_REGNO];
375 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
377 sreg_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.sreg));
378 rampd_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampd));
379 rampx_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampx));
380 rampy_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampy));
381 rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampz));
383 xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
384 xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
388 /* Implement `REGNO_REG_CLASS'. */
389 /* Return register class for register R. */
391 enum reg_class
392 avr_regno_reg_class (int r)
394 static const enum reg_class reg_class_tab[] =
396 R0_REG,
397 /* r1 - r15 */
398 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
399 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
400 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
401 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
402 /* r16 - r23 */
403 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
404 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
405 /* r24, r25 */
406 ADDW_REGS, ADDW_REGS,
407 /* X: r26, 27 */
408 POINTER_X_REGS, POINTER_X_REGS,
409 /* Y: r28, r29 */
410 POINTER_Y_REGS, POINTER_Y_REGS,
411 /* Z: r30, r31 */
412 POINTER_Z_REGS, POINTER_Z_REGS,
413 /* SP: SPL, SPH */
414 STACK_REG, STACK_REG
417 if (r <= 33)
418 return reg_class_tab[r];
420 return ALL_REGS;
424 /* Implement `TARGET_SCALAR_MODE_SUPPORTED_P'. */
426 static bool
427 avr_scalar_mode_supported_p (enum machine_mode mode)
429 if (ALL_FIXED_POINT_MODE_P (mode))
430 return true;
432 if (PSImode == mode)
433 return true;
435 return default_scalar_mode_supported_p (mode);
439 /* Return TRUE if DECL is a VAR_DECL located in flash and FALSE, otherwise. */
441 static bool
442 avr_decl_flash_p (tree decl)
444 if (TREE_CODE (decl) != VAR_DECL
445 || TREE_TYPE (decl) == error_mark_node)
447 return false;
450 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
454 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit flash
455 address space and FALSE, otherwise. */
457 static bool
458 avr_decl_memx_p (tree decl)
460 if (TREE_CODE (decl) != VAR_DECL
461 || TREE_TYPE (decl) == error_mark_node)
463 return false;
466 return (ADDR_SPACE_MEMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
470 /* Return TRUE if X is a MEM rtx located in flash and FALSE, otherwise. */
472 bool
473 avr_mem_flash_p (rtx x)
475 return (MEM_P (x)
476 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
480 /* Return TRUE if X is a MEM rtx located in the 24-bit flash
481 address space and FALSE, otherwise. */
483 bool
484 avr_mem_memx_p (rtx x)
486 return (MEM_P (x)
487 && ADDR_SPACE_MEMX == MEM_ADDR_SPACE (x));
491 /* A helper for the subsequent function attribute used to dig for
492 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
494 static inline int
495 avr_lookup_function_attribute1 (const_tree func, const char *name)
497 if (FUNCTION_DECL == TREE_CODE (func))
499 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
501 return true;
504 func = TREE_TYPE (func);
507 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
508 || TREE_CODE (func) == METHOD_TYPE);
510 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
513 /* Return nonzero if FUNC is a naked function. */
515 static int
516 avr_naked_function_p (tree func)
518 return avr_lookup_function_attribute1 (func, "naked");
521 /* Return nonzero if FUNC is an interrupt function as specified
522 by the "interrupt" attribute. */
524 static int
525 avr_interrupt_function_p (tree func)
527 return avr_lookup_function_attribute1 (func, "interrupt");
530 /* Return nonzero if FUNC is a signal function as specified
531 by the "signal" attribute. */
533 static int
534 avr_signal_function_p (tree func)
536 return avr_lookup_function_attribute1 (func, "signal");
539 /* Return nonzero if FUNC is an OS_task function. */
541 static int
542 avr_OS_task_function_p (tree func)
544 return avr_lookup_function_attribute1 (func, "OS_task");
547 /* Return nonzero if FUNC is an OS_main function. */
549 static int
550 avr_OS_main_function_p (tree func)
552 return avr_lookup_function_attribute1 (func, "OS_main");
556 /* Implement `TARGET_SET_CURRENT_FUNCTION'. */
557 /* Sanity cheching for above function attributes. */
559 static void
560 avr_set_current_function (tree decl)
562 location_t loc;
563 const char *isr;
565 if (decl == NULL_TREE
566 || current_function_decl == NULL_TREE
567 || current_function_decl == error_mark_node
568 || ! cfun->machine
569 || cfun->machine->attributes_checked_p)
570 return;
572 loc = DECL_SOURCE_LOCATION (decl);
574 cfun->machine->is_naked = avr_naked_function_p (decl);
575 cfun->machine->is_signal = avr_signal_function_p (decl);
576 cfun->machine->is_interrupt = avr_interrupt_function_p (decl);
577 cfun->machine->is_OS_task = avr_OS_task_function_p (decl);
578 cfun->machine->is_OS_main = avr_OS_main_function_p (decl);
580 isr = cfun->machine->is_interrupt ? "interrupt" : "signal";
582 /* Too much attributes make no sense as they request conflicting features. */
584 if (cfun->machine->is_OS_task + cfun->machine->is_OS_main
585 + (cfun->machine->is_signal || cfun->machine->is_interrupt) > 1)
586 error_at (loc, "function attributes %qs, %qs and %qs are mutually"
587 " exclusive", "OS_task", "OS_main", isr);
589 /* 'naked' will hide effects of 'OS_task' and 'OS_main'. */
591 if (cfun->machine->is_naked
592 && (cfun->machine->is_OS_task || cfun->machine->is_OS_main))
593 warning_at (loc, OPT_Wattributes, "function attributes %qs and %qs have"
594 " no effect on %qs function", "OS_task", "OS_main", "naked");
596 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
598 tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
599 tree ret = TREE_TYPE (TREE_TYPE (decl));
600 const char *name;
602 name = DECL_ASSEMBLER_NAME_SET_P (decl)
603 ? IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))
604 : IDENTIFIER_POINTER (DECL_NAME (decl));
606 /* Skip a leading '*' that might still prefix the assembler name,
607 e.g. in non-LTO runs. */
609 name = default_strip_name_encoding (name);
611 /* Silently ignore 'signal' if 'interrupt' is present. AVR-LibC startet
612 using this when it switched from SIGNAL and INTERRUPT to ISR. */
614 if (cfun->machine->is_interrupt)
615 cfun->machine->is_signal = 0;
617 /* Interrupt handlers must be void __vector (void) functions. */
619 if (args && TREE_CODE (TREE_VALUE (args)) != VOID_TYPE)
620 error_at (loc, "%qs function cannot have arguments", isr);
622 if (TREE_CODE (ret) != VOID_TYPE)
623 error_at (loc, "%qs function cannot return a value", isr);
625 /* If the function has the 'signal' or 'interrupt' attribute, ensure
626 that the name of the function is "__vector_NN" so as to catch
627 when the user misspells the vector name. */
629 if (!STR_PREFIX_P (name, "__vector"))
630 warning_at (loc, 0, "%qs appears to be a misspelled %s handler",
631 name, isr);
634 /* Don't print the above diagnostics more than once. */
636 cfun->machine->attributes_checked_p = 1;
640 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
643 avr_accumulate_outgoing_args (void)
645 if (!cfun)
646 return TARGET_ACCUMULATE_OUTGOING_ARGS;
648 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
649 what offset is correct. In some cases it is relative to
650 virtual_outgoing_args_rtx and in others it is relative to
651 virtual_stack_vars_rtx. For example code see
652 gcc.c-torture/execute/built-in-setjmp.c
653 gcc.c-torture/execute/builtins/sprintf-chk.c */
655 return (TARGET_ACCUMULATE_OUTGOING_ARGS
656 && !(cfun->calls_setjmp
657 || cfun->has_nonlocal_label));
661 /* Report contribution of accumulated outgoing arguments to stack size. */
663 static inline int
664 avr_outgoing_args_size (void)
666 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
670 /* Implement `STARTING_FRAME_OFFSET'. */
671 /* This is the offset from the frame pointer register to the first stack slot
672 that contains a variable living in the frame. */
675 avr_starting_frame_offset (void)
677 return 1 + avr_outgoing_args_size ();
681 /* Return the number of hard registers to push/pop in the prologue/epilogue
682 of the current function, and optionally store these registers in SET. */
684 static int
685 avr_regs_to_save (HARD_REG_SET *set)
687 int reg, count;
688 int int_or_sig_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
690 if (set)
691 CLEAR_HARD_REG_SET (*set);
692 count = 0;
694 /* No need to save any registers if the function never returns or
695 has the "OS_task" or "OS_main" attribute. */
697 if (TREE_THIS_VOLATILE (current_function_decl)
698 || cfun->machine->is_OS_task
699 || cfun->machine->is_OS_main)
700 return 0;
702 for (reg = 0; reg < 32; reg++)
704 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
705 any global register variables. */
707 if (fixed_regs[reg])
708 continue;
710 if ((int_or_sig_p && !crtl->is_leaf && call_used_regs[reg])
711 || (df_regs_ever_live_p (reg)
712 && (int_or_sig_p || !call_used_regs[reg])
713 /* Don't record frame pointer registers here. They are treated
714 indivitually in prologue. */
715 && !(frame_pointer_needed
716 && (reg == REG_Y || reg == (REG_Y+1)))))
718 if (set)
719 SET_HARD_REG_BIT (*set, reg);
720 count++;
723 return count;
727 /* Implement `TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS' */
729 static bool
730 avr_allocate_stack_slots_for_args (void)
732 return !cfun->machine->is_naked;
736 /* Return true if register FROM can be eliminated via register TO. */
738 static bool
739 avr_can_eliminate (const int from, const int to)
741 return ((frame_pointer_needed && to == FRAME_POINTER_REGNUM)
742 || !frame_pointer_needed);
746 /* Implement `TARGET_WARN_FUNC_RETURN'. */
748 static bool
749 avr_warn_func_return (tree decl)
751 /* Naked functions are implemented entirely in assembly, including the
752 return sequence, so suppress warnings about this. */
754 return !avr_naked_function_p (decl);
757 /* Compute offset between arg_pointer and frame_pointer. */
760 avr_initial_elimination_offset (int from, int to)
762 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
763 return 0;
764 else
766 int offset = frame_pointer_needed ? 2 : 0;
767 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
769 offset += avr_regs_to_save (NULL);
770 return (get_frame_size () + avr_outgoing_args_size()
771 + avr_pc_size + 1 + offset);
776 /* Helper for the function below. */
778 static void
779 avr_adjust_type_node (tree *node, enum machine_mode mode, int sat_p)
781 *node = make_node (FIXED_POINT_TYPE);
782 TYPE_SATURATING (*node) = sat_p;
783 TYPE_UNSIGNED (*node) = UNSIGNED_FIXED_POINT_MODE_P (mode);
784 TYPE_IBIT (*node) = GET_MODE_IBIT (mode);
785 TYPE_FBIT (*node) = GET_MODE_FBIT (mode);
786 TYPE_PRECISION (*node) = GET_MODE_BITSIZE (mode);
787 TYPE_ALIGN (*node) = 8;
788 SET_TYPE_MODE (*node, mode);
790 layout_type (*node);
794 /* Implement `TARGET_BUILD_BUILTIN_VA_LIST'. */
796 static tree
797 avr_build_builtin_va_list (void)
799 /* avr-modes.def adjusts [U]TA to be 64-bit modes with 48 fractional bits.
800 This is more appropriate for the 8-bit machine AVR than 128-bit modes.
801 The ADJUST_IBIT/FBIT are handled in toplev:init_adjust_machine_modes()
802 which is auto-generated by genmodes, but the compiler assigns [U]DAmode
803 to the long long accum modes instead of the desired [U]TAmode.
805 Fix this now, right after node setup in tree.c:build_common_tree_nodes().
806 This must run before c-cppbuiltin.c:builtin_define_fixed_point_constants()
807 which built-in defines macros like __ULLACCUM_FBIT__ that are used by
808 libgcc to detect IBIT and FBIT. */
810 avr_adjust_type_node (&ta_type_node, TAmode, 0);
811 avr_adjust_type_node (&uta_type_node, UTAmode, 0);
812 avr_adjust_type_node (&sat_ta_type_node, TAmode, 1);
813 avr_adjust_type_node (&sat_uta_type_node, UTAmode, 1);
815 unsigned_long_long_accum_type_node = uta_type_node;
816 long_long_accum_type_node = ta_type_node;
817 sat_unsigned_long_long_accum_type_node = sat_uta_type_node;
818 sat_long_long_accum_type_node = sat_ta_type_node;
820 /* Dispatch to the default handler. */
822 return std_build_builtin_va_list ();
826 /* Implement `TARGET_BUILTIN_SETJMP_FRAME_VALUE'. */
827 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
828 frame pointer by +STARTING_FRAME_OFFSET.
829 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
830 avoids creating add/sub of offset in nonlocal goto and setjmp. */
832 static rtx
833 avr_builtin_setjmp_frame_value (void)
835 rtx xval = gen_reg_rtx (Pmode);
836 emit_insn (gen_subhi3 (xval, virtual_stack_vars_rtx,
837 gen_int_mode (STARTING_FRAME_OFFSET, Pmode)));
838 return xval;
842 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3-byte PC).
843 This is return address of function. */
846 avr_return_addr_rtx (int count, rtx tem)
848 rtx r;
850 /* Can only return this function's return address. Others not supported. */
851 if (count)
852 return NULL;
854 if (AVR_3_BYTE_PC)
856 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
857 warning (0, "%<builtin_return_address%> contains only 2 bytes"
858 " of address");
860 else
861 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
863 r = gen_rtx_PLUS (Pmode, tem, r);
864 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
865 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
866 return r;
869 /* Return 1 if the function epilogue is just a single "ret". */
872 avr_simple_epilogue (void)
874 return (! frame_pointer_needed
875 && get_frame_size () == 0
876 && avr_outgoing_args_size() == 0
877 && avr_regs_to_save (NULL) == 0
878 && ! cfun->machine->is_interrupt
879 && ! cfun->machine->is_signal
880 && ! cfun->machine->is_naked
881 && ! TREE_THIS_VOLATILE (current_function_decl));
884 /* This function checks sequence of live registers. */
886 static int
887 sequent_regs_live (void)
889 int reg;
890 int live_seq = 0;
891 int cur_seq = 0;
893 for (reg = 0; reg < 18; ++reg)
895 if (fixed_regs[reg])
897 /* Don't recognize sequences that contain global register
898 variables. */
900 if (live_seq != 0)
901 return 0;
902 else
903 continue;
906 if (!call_used_regs[reg])
908 if (df_regs_ever_live_p (reg))
910 ++live_seq;
911 ++cur_seq;
913 else
914 cur_seq = 0;
918 if (!frame_pointer_needed)
920 if (df_regs_ever_live_p (REG_Y))
922 ++live_seq;
923 ++cur_seq;
925 else
926 cur_seq = 0;
928 if (df_regs_ever_live_p (REG_Y+1))
930 ++live_seq;
931 ++cur_seq;
933 else
934 cur_seq = 0;
936 else
938 cur_seq += 2;
939 live_seq += 2;
941 return (cur_seq == live_seq) ? live_seq : 0;
944 /* Obtain the length sequence of insns. */
947 get_sequence_length (rtx insns)
949 rtx insn;
950 int length;
952 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
953 length += get_attr_length (insn);
955 return length;
959 /* Implement `INCOMING_RETURN_ADDR_RTX'. */
962 avr_incoming_return_addr_rtx (void)
964 /* The return address is at the top of the stack. Note that the push
965 was via post-decrement, which means the actual address is off by one. */
966 return gen_frame_mem (HImode, plus_constant (Pmode, stack_pointer_rtx, 1));
969 /* Helper for expand_prologue. Emit a push of a byte register. */
971 static void
972 emit_push_byte (unsigned regno, bool frame_related_p)
974 rtx mem, reg, insn;
976 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
977 mem = gen_frame_mem (QImode, mem);
978 reg = gen_rtx_REG (QImode, regno);
980 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
981 if (frame_related_p)
982 RTX_FRAME_RELATED_P (insn) = 1;
984 cfun->machine->stack_usage++;
988 /* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
989 SFR is a MEM representing the memory location of the SFR.
990 If CLR_P then clear the SFR after the push using zero_reg. */
992 static void
993 emit_push_sfr (rtx sfr, bool frame_related_p, bool clr_p)
995 rtx insn;
997 gcc_assert (MEM_P (sfr));
999 /* IN __tmp_reg__, IO(SFR) */
1000 insn = emit_move_insn (tmp_reg_rtx, sfr);
1001 if (frame_related_p)
1002 RTX_FRAME_RELATED_P (insn) = 1;
1004 /* PUSH __tmp_reg__ */
1005 emit_push_byte (TMP_REGNO, frame_related_p);
1007 if (clr_p)
1009 /* OUT IO(SFR), __zero_reg__ */
1010 insn = emit_move_insn (sfr, const0_rtx);
1011 if (frame_related_p)
1012 RTX_FRAME_RELATED_P (insn) = 1;
1016 static void
1017 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
1019 rtx insn;
1020 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1021 int live_seq = sequent_regs_live ();
1023 HOST_WIDE_INT size_max
1024 = (HOST_WIDE_INT) GET_MODE_MASK (AVR_HAVE_8BIT_SP ? QImode : Pmode);
1026 bool minimize = (TARGET_CALL_PROLOGUES
1027 && size < size_max
1028 && live_seq
1029 && !isr_p
1030 && !cfun->machine->is_OS_task
1031 && !cfun->machine->is_OS_main);
1033 if (minimize
1034 && (frame_pointer_needed
1035 || avr_outgoing_args_size() > 8
1036 || (AVR_2_BYTE_PC && live_seq > 6)
1037 || live_seq > 7))
1039 rtx pattern;
1040 int first_reg, reg, offset;
1042 emit_move_insn (gen_rtx_REG (HImode, REG_X),
1043 gen_int_mode (size, HImode));
1045 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
1046 gen_int_mode (live_seq+size, HImode));
1047 insn = emit_insn (pattern);
1048 RTX_FRAME_RELATED_P (insn) = 1;
1050 /* Describe the effect of the unspec_volatile call to prologue_saves.
1051 Note that this formulation assumes that add_reg_note pushes the
1052 notes to the front. Thus we build them in the reverse order of
1053 how we want dwarf2out to process them. */
1055 /* The function does always set frame_pointer_rtx, but whether that
1056 is going to be permanent in the function is frame_pointer_needed. */
1058 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1059 gen_rtx_SET (VOIDmode, (frame_pointer_needed
1060 ? frame_pointer_rtx
1061 : stack_pointer_rtx),
1062 plus_constant (Pmode, stack_pointer_rtx,
1063 -(size + live_seq))));
1065 /* Note that live_seq always contains r28+r29, but the other
1066 registers to be saved are all below 18. */
1068 first_reg = 18 - (live_seq - 2);
1070 for (reg = 29, offset = -live_seq + 1;
1071 reg >= first_reg;
1072 reg = (reg == 28 ? 17 : reg - 1), ++offset)
1074 rtx m, r;
1076 m = gen_rtx_MEM (QImode, plus_constant (Pmode, stack_pointer_rtx,
1077 offset));
1078 r = gen_rtx_REG (QImode, reg);
1079 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
1082 cfun->machine->stack_usage += size + live_seq;
1084 else /* !minimize */
1086 int reg;
1088 for (reg = 0; reg < 32; ++reg)
1089 if (TEST_HARD_REG_BIT (set, reg))
1090 emit_push_byte (reg, true);
1092 if (frame_pointer_needed
1093 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
1095 /* Push frame pointer. Always be consistent about the
1096 ordering of pushes -- epilogue_restores expects the
1097 register pair to be pushed low byte first. */
1099 emit_push_byte (REG_Y, true);
1100 emit_push_byte (REG_Y + 1, true);
1103 if (frame_pointer_needed
1104 && size == 0)
1106 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1107 RTX_FRAME_RELATED_P (insn) = 1;
1110 if (size != 0)
1112 /* Creating a frame can be done by direct manipulation of the
1113 stack or via the frame pointer. These two methods are:
1114 fp = sp
1115 fp -= size
1116 sp = fp
1118 sp -= size
1119 fp = sp (*)
1120 the optimum method depends on function type, stack and
1121 frame size. To avoid a complex logic, both methods are
1122 tested and shortest is selected.
1124 There is also the case where SIZE != 0 and no frame pointer is
1125 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1126 In that case, insn (*) is not needed in that case.
1127 We use the X register as scratch. This is save because in X
1128 is call-clobbered.
1129 In an interrupt routine, the case of SIZE != 0 together with
1130 !frame_pointer_needed can only occur if the function is not a
1131 leaf function and thus X has already been saved. */
1133 int irq_state = -1;
1134 HOST_WIDE_INT size_cfa = size, neg_size;
1135 rtx fp_plus_insns, fp, my_fp;
1137 gcc_assert (frame_pointer_needed
1138 || !isr_p
1139 || !crtl->is_leaf);
1141 fp = my_fp = (frame_pointer_needed
1142 ? frame_pointer_rtx
1143 : gen_rtx_REG (Pmode, REG_X));
1145 if (AVR_HAVE_8BIT_SP)
1147 /* The high byte (r29) does not change:
1148 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
1150 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1153 /* Cut down size and avoid size = 0 so that we don't run
1154 into ICE like PR52488 in the remainder. */
1156 if (size > size_max)
1158 /* Don't error so that insane code from newlib still compiles
1159 and does not break building newlib. As PR51345 is implemented
1160 now, there are multilib variants with -msp8.
1162 If user wants sanity checks he can use -Wstack-usage=
1163 or similar options.
1165 For CFA we emit the original, non-saturated size so that
1166 the generic machinery is aware of the real stack usage and
1167 will print the above diagnostic as expected. */
1169 size = size_max;
1172 size = trunc_int_for_mode (size, GET_MODE (my_fp));
1173 neg_size = trunc_int_for_mode (-size, GET_MODE (my_fp));
1175 /************ Method 1: Adjust frame pointer ************/
1177 start_sequence ();
1179 /* Normally, the dwarf2out frame-related-expr interpreter does
1180 not expect to have the CFA change once the frame pointer is
1181 set up. Thus, we avoid marking the move insn below and
1182 instead indicate that the entire operation is complete after
1183 the frame pointer subtraction is done. */
1185 insn = emit_move_insn (fp, stack_pointer_rtx);
1186 if (frame_pointer_needed)
1188 RTX_FRAME_RELATED_P (insn) = 1;
1189 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1190 gen_rtx_SET (VOIDmode, fp, stack_pointer_rtx));
1193 insn = emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp),
1194 my_fp, neg_size));
1196 if (frame_pointer_needed)
1198 RTX_FRAME_RELATED_P (insn) = 1;
1199 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1200 gen_rtx_SET (VOIDmode, fp,
1201 plus_constant (Pmode, fp,
1202 -size_cfa)));
1205 /* Copy to stack pointer. Note that since we've already
1206 changed the CFA to the frame pointer this operation
1207 need not be annotated if frame pointer is needed.
1208 Always move through unspec, see PR50063.
1209 For meaning of irq_state see movhi_sp_r insn. */
1211 if (cfun->machine->is_interrupt)
1212 irq_state = 1;
1214 if (TARGET_NO_INTERRUPTS
1215 || cfun->machine->is_signal
1216 || cfun->machine->is_OS_main)
1217 irq_state = 0;
1219 if (AVR_HAVE_8BIT_SP)
1220 irq_state = 2;
1222 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1223 fp, GEN_INT (irq_state)));
1224 if (!frame_pointer_needed)
1226 RTX_FRAME_RELATED_P (insn) = 1;
1227 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1228 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1229 plus_constant (Pmode,
1230 stack_pointer_rtx,
1231 -size_cfa)));
1234 fp_plus_insns = get_insns ();
1235 end_sequence ();
1237 /************ Method 2: Adjust Stack pointer ************/
1239 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1240 can only handle specific offsets. */
1242 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1244 rtx sp_plus_insns;
1246 start_sequence ();
1248 insn = emit_move_insn (stack_pointer_rtx,
1249 plus_constant (Pmode, stack_pointer_rtx,
1250 -size));
1251 RTX_FRAME_RELATED_P (insn) = 1;
1252 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1253 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1254 plus_constant (Pmode,
1255 stack_pointer_rtx,
1256 -size_cfa)));
1257 if (frame_pointer_needed)
1259 insn = emit_move_insn (fp, stack_pointer_rtx);
1260 RTX_FRAME_RELATED_P (insn) = 1;
1263 sp_plus_insns = get_insns ();
1264 end_sequence ();
1266 /************ Use shortest method ************/
1268 emit_insn (get_sequence_length (sp_plus_insns)
1269 < get_sequence_length (fp_plus_insns)
1270 ? sp_plus_insns
1271 : fp_plus_insns);
1273 else
1275 emit_insn (fp_plus_insns);
1278 cfun->machine->stack_usage += size_cfa;
1279 } /* !minimize && size != 0 */
1280 } /* !minimize */
1284 /* Output function prologue. */
1286 void
1287 avr_expand_prologue (void)
1289 HARD_REG_SET set;
1290 HOST_WIDE_INT size;
1292 size = get_frame_size() + avr_outgoing_args_size();
1294 cfun->machine->stack_usage = 0;
1296 /* Prologue: naked. */
1297 if (cfun->machine->is_naked)
1299 return;
1302 avr_regs_to_save (&set);
1304 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1306 /* Enable interrupts. */
1307 if (cfun->machine->is_interrupt)
1308 emit_insn (gen_enable_interrupt ());
1310 /* Push zero reg. */
1311 emit_push_byte (ZERO_REGNO, true);
1313 /* Push tmp reg. */
1314 emit_push_byte (TMP_REGNO, true);
1316 /* Push SREG. */
1317 /* ??? There's no dwarf2 column reserved for SREG. */
1318 emit_push_sfr (sreg_rtx, false, false /* clr */);
1320 /* Clear zero reg. */
1321 emit_move_insn (zero_reg_rtx, const0_rtx);
1323 /* Prevent any attempt to delete the setting of ZERO_REG! */
1324 emit_use (zero_reg_rtx);
1326 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1327 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1329 if (AVR_HAVE_RAMPD)
1330 emit_push_sfr (rampd_rtx, false /* frame-related */, true /* clr */);
1332 if (AVR_HAVE_RAMPX
1333 && TEST_HARD_REG_BIT (set, REG_X)
1334 && TEST_HARD_REG_BIT (set, REG_X + 1))
1336 emit_push_sfr (rampx_rtx, false /* frame-related */, true /* clr */);
1339 if (AVR_HAVE_RAMPY
1340 && (frame_pointer_needed
1341 || (TEST_HARD_REG_BIT (set, REG_Y)
1342 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1344 emit_push_sfr (rampy_rtx, false /* frame-related */, true /* clr */);
1347 if (AVR_HAVE_RAMPZ
1348 && TEST_HARD_REG_BIT (set, REG_Z)
1349 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1351 emit_push_sfr (rampz_rtx, false /* frame-related */, AVR_HAVE_RAMPD);
1353 } /* is_interrupt is_signal */
1355 avr_prologue_setup_frame (size, set);
1357 if (flag_stack_usage_info)
1358 current_function_static_stack_size = cfun->machine->stack_usage;
1362 /* Implement `TARGET_ASM_FUNCTION_END_PROLOGUE'. */
1363 /* Output summary at end of function prologue. */
1365 static void
1366 avr_asm_function_end_prologue (FILE *file)
1368 if (cfun->machine->is_naked)
1370 fputs ("/* prologue: naked */\n", file);
1372 else
1374 if (cfun->machine->is_interrupt)
1376 fputs ("/* prologue: Interrupt */\n", file);
1378 else if (cfun->machine->is_signal)
1380 fputs ("/* prologue: Signal */\n", file);
1382 else
1383 fputs ("/* prologue: function */\n", file);
1386 if (ACCUMULATE_OUTGOING_ARGS)
1387 fprintf (file, "/* outgoing args size = %d */\n",
1388 avr_outgoing_args_size());
1390 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1391 get_frame_size());
1392 fprintf (file, "/* stack size = %d */\n",
1393 cfun->machine->stack_usage);
1394 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1395 usage for offset so that SP + .L__stack_offset = return address. */
1396 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1400 /* Implement `EPILOGUE_USES'. */
1403 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1405 if (reload_completed
1406 && cfun->machine
1407 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1408 return 1;
1409 return 0;
1412 /* Helper for avr_expand_epilogue. Emit a pop of a byte register. */
1414 static void
1415 emit_pop_byte (unsigned regno)
1417 rtx mem, reg;
1419 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1420 mem = gen_frame_mem (QImode, mem);
1421 reg = gen_rtx_REG (QImode, regno);
1423 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1426 /* Output RTL epilogue. */
1428 void
1429 avr_expand_epilogue (bool sibcall_p)
1431 int reg;
1432 int live_seq;
1433 HARD_REG_SET set;
1434 int minimize;
1435 HOST_WIDE_INT size;
1436 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1438 size = get_frame_size() + avr_outgoing_args_size();
1440 /* epilogue: naked */
1441 if (cfun->machine->is_naked)
1443 gcc_assert (!sibcall_p);
1445 emit_jump_insn (gen_return ());
1446 return;
1449 avr_regs_to_save (&set);
1450 live_seq = sequent_regs_live ();
1452 minimize = (TARGET_CALL_PROLOGUES
1453 && live_seq
1454 && !isr_p
1455 && !cfun->machine->is_OS_task
1456 && !cfun->machine->is_OS_main);
1458 if (minimize
1459 && (live_seq > 4
1460 || frame_pointer_needed
1461 || size))
1463 /* Get rid of frame. */
1465 if (!frame_pointer_needed)
1467 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1470 if (size)
1472 emit_move_insn (frame_pointer_rtx,
1473 plus_constant (Pmode, frame_pointer_rtx, size));
1476 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1477 return;
1480 if (size)
1482 /* Try two methods to adjust stack and select shortest. */
1484 int irq_state = -1;
1485 rtx fp, my_fp;
1486 rtx fp_plus_insns;
1487 HOST_WIDE_INT size_max;
1489 gcc_assert (frame_pointer_needed
1490 || !isr_p
1491 || !crtl->is_leaf);
1493 fp = my_fp = (frame_pointer_needed
1494 ? frame_pointer_rtx
1495 : gen_rtx_REG (Pmode, REG_X));
1497 if (AVR_HAVE_8BIT_SP)
1499 /* The high byte (r29) does not change:
1500 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1502 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1505 /* For rationale see comment in prologue generation. */
1507 size_max = (HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (my_fp));
1508 if (size > size_max)
1509 size = size_max;
1510 size = trunc_int_for_mode (size, GET_MODE (my_fp));
1512 /********** Method 1: Adjust fp register **********/
1514 start_sequence ();
1516 if (!frame_pointer_needed)
1517 emit_move_insn (fp, stack_pointer_rtx);
1519 emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp), my_fp, size));
1521 /* Copy to stack pointer. */
1523 if (TARGET_NO_INTERRUPTS)
1524 irq_state = 0;
1526 if (AVR_HAVE_8BIT_SP)
1527 irq_state = 2;
1529 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp,
1530 GEN_INT (irq_state)));
1532 fp_plus_insns = get_insns ();
1533 end_sequence ();
1535 /********** Method 2: Adjust Stack pointer **********/
1537 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1539 rtx sp_plus_insns;
1541 start_sequence ();
1543 emit_move_insn (stack_pointer_rtx,
1544 plus_constant (Pmode, stack_pointer_rtx, size));
1546 sp_plus_insns = get_insns ();
1547 end_sequence ();
1549 /************ Use shortest method ************/
1551 emit_insn (get_sequence_length (sp_plus_insns)
1552 < get_sequence_length (fp_plus_insns)
1553 ? sp_plus_insns
1554 : fp_plus_insns);
1556 else
1557 emit_insn (fp_plus_insns);
1558 } /* size != 0 */
1560 if (frame_pointer_needed
1561 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1563 /* Restore previous frame_pointer. See avr_expand_prologue for
1564 rationale for not using pophi. */
1566 emit_pop_byte (REG_Y + 1);
1567 emit_pop_byte (REG_Y);
1570 /* Restore used registers. */
1572 for (reg = 31; reg >= 0; --reg)
1573 if (TEST_HARD_REG_BIT (set, reg))
1574 emit_pop_byte (reg);
1576 if (isr_p)
1578 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1579 The conditions to restore them must be tha same as in prologue. */
1581 if (AVR_HAVE_RAMPZ
1582 && TEST_HARD_REG_BIT (set, REG_Z)
1583 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1585 emit_pop_byte (TMP_REGNO);
1586 emit_move_insn (rampz_rtx, tmp_reg_rtx);
1589 if (AVR_HAVE_RAMPY
1590 && (frame_pointer_needed
1591 || (TEST_HARD_REG_BIT (set, REG_Y)
1592 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1594 emit_pop_byte (TMP_REGNO);
1595 emit_move_insn (rampy_rtx, tmp_reg_rtx);
1598 if (AVR_HAVE_RAMPX
1599 && TEST_HARD_REG_BIT (set, REG_X)
1600 && TEST_HARD_REG_BIT (set, REG_X + 1))
1602 emit_pop_byte (TMP_REGNO);
1603 emit_move_insn (rampx_rtx, tmp_reg_rtx);
1606 if (AVR_HAVE_RAMPD)
1608 emit_pop_byte (TMP_REGNO);
1609 emit_move_insn (rampd_rtx, tmp_reg_rtx);
1612 /* Restore SREG using tmp_reg as scratch. */
1614 emit_pop_byte (TMP_REGNO);
1615 emit_move_insn (sreg_rtx, tmp_reg_rtx);
1617 /* Restore tmp REG. */
1618 emit_pop_byte (TMP_REGNO);
1620 /* Restore zero REG. */
1621 emit_pop_byte (ZERO_REGNO);
1624 if (!sibcall_p)
1625 emit_jump_insn (gen_return ());
1629 /* Implement `TARGET_ASM_FUNCTION_BEGIN_EPILOGUE'. */
1631 static void
1632 avr_asm_function_begin_epilogue (FILE *file)
1634 fprintf (file, "/* epilogue start */\n");
1638 /* Implement `TARGET_CANNOT_MODITY_JUMPS_P'. */
1640 static bool
1641 avr_cannot_modify_jumps_p (void)
1644 /* Naked Functions must not have any instructions after
1645 their epilogue, see PR42240 */
1647 if (reload_completed
1648 && cfun->machine
1649 && cfun->machine->is_naked)
1651 return true;
1654 return false;
1658 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
1660 static bool
1661 avr_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED, addr_space_t as)
1663 /* FIXME: Non-generic addresses are not mode-dependent in themselves.
1664 This hook just serves to hack around PR rtl-optimization/52543 by
1665 claiming that non-generic addresses were mode-dependent so that
1666 lower-subreg.c will skip these addresses. lower-subreg.c sets up fake
1667 RTXes to probe SET and MEM costs and assumes that MEM is always in the
1668 generic address space which is not true. */
1670 return !ADDR_SPACE_GENERIC_P (as);
1674 /* Helper function for `avr_legitimate_address_p'. */
1676 static inline bool
1677 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
1678 RTX_CODE outer_code, bool strict)
1680 return (REG_P (reg)
1681 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1682 as, outer_code, UNKNOWN)
1683 || (!strict
1684 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1688 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1689 machine for a memory operand of mode MODE. */
1691 static bool
1692 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1694 bool ok = CONSTANT_ADDRESS_P (x);
1696 switch (GET_CODE (x))
1698 case REG:
1699 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1700 MEM, strict);
1702 if (strict
1703 && GET_MODE_SIZE (mode) > 4
1704 && REG_X == REGNO (x))
1706 ok = false;
1708 break;
1710 case POST_INC:
1711 case PRE_DEC:
1712 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1713 GET_CODE (x), strict);
1714 break;
1716 case PLUS:
1718 rtx reg = XEXP (x, 0);
1719 rtx op1 = XEXP (x, 1);
1721 if (REG_P (reg)
1722 && CONST_INT_P (op1)
1723 && INTVAL (op1) >= 0)
1725 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1727 if (fit)
1729 ok = (! strict
1730 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1731 PLUS, strict));
1733 if (reg == frame_pointer_rtx
1734 || reg == arg_pointer_rtx)
1736 ok = true;
1739 else if (frame_pointer_needed
1740 && reg == frame_pointer_rtx)
1742 ok = true;
1746 break;
1748 default:
1749 break;
1752 if (avr_log.legitimate_address_p)
1754 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1755 "reload_completed=%d reload_in_progress=%d %s:",
1756 ok, mode, strict, reload_completed, reload_in_progress,
1757 reg_renumber ? "(reg_renumber)" : "");
1759 if (GET_CODE (x) == PLUS
1760 && REG_P (XEXP (x, 0))
1761 && CONST_INT_P (XEXP (x, 1))
1762 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1763 && reg_renumber)
1765 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1766 true_regnum (XEXP (x, 0)));
1769 avr_edump ("\n%r\n", x);
1772 return ok;
1776 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1777 now only a helper for avr_addr_space_legitimize_address. */
1778 /* Attempts to replace X with a valid
1779 memory address for an operand of mode MODE */
1781 static rtx
1782 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1784 bool big_offset_p = false;
1786 x = oldx;
1788 if (GET_CODE (oldx) == PLUS
1789 && REG_P (XEXP (oldx, 0)))
1791 if (REG_P (XEXP (oldx, 1)))
1792 x = force_reg (GET_MODE (oldx), oldx);
1793 else if (CONST_INT_P (XEXP (oldx, 1)))
1795 int offs = INTVAL (XEXP (oldx, 1));
1796 if (frame_pointer_rtx != XEXP (oldx, 0)
1797 && offs > MAX_LD_OFFSET (mode))
1799 big_offset_p = true;
1800 x = force_reg (GET_MODE (oldx), oldx);
1805 if (avr_log.legitimize_address)
1807 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1809 if (x != oldx)
1810 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1813 return x;
1817 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1818 /* This will allow register R26/27 to be used where it is no worse than normal
1819 base pointers R28/29 or R30/31. For example, if base offset is greater
1820 than 63 bytes or for R++ or --R addressing. */
1823 avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
1824 int opnum, int type, int addr_type,
1825 int ind_levels ATTRIBUTE_UNUSED,
1826 rtx (*mk_memloc)(rtx,int))
1828 rtx x = *px;
1830 if (avr_log.legitimize_reload_address)
1831 avr_edump ("\n%?:%m %r\n", mode, x);
1833 if (1 && (GET_CODE (x) == POST_INC
1834 || GET_CODE (x) == PRE_DEC))
1836 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1837 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1838 opnum, RELOAD_OTHER);
1840 if (avr_log.legitimize_reload_address)
1841 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1842 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1844 return x;
1847 if (GET_CODE (x) == PLUS
1848 && REG_P (XEXP (x, 0))
1849 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1850 && CONST_INT_P (XEXP (x, 1))
1851 && INTVAL (XEXP (x, 1)) >= 1)
1853 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1855 if (fit)
1857 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1859 int regno = REGNO (XEXP (x, 0));
1860 rtx mem = mk_memloc (x, regno);
1862 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1863 POINTER_REGS, Pmode, VOIDmode, 0, 0,
1864 1, (enum reload_type) addr_type);
1866 if (avr_log.legitimize_reload_address)
1867 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1868 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1870 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1871 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1872 opnum, (enum reload_type) type);
1874 if (avr_log.legitimize_reload_address)
1875 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1876 BASE_POINTER_REGS, mem, NULL_RTX);
1878 return x;
1881 else if (! (frame_pointer_needed
1882 && XEXP (x, 0) == frame_pointer_rtx))
1884 push_reload (x, NULL_RTX, px, NULL,
1885 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1886 opnum, (enum reload_type) type);
1888 if (avr_log.legitimize_reload_address)
1889 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1890 POINTER_REGS, x, NULL_RTX);
1892 return x;
1896 return NULL_RTX;
1900 /* Implement `TARGET_SECONDARY_RELOAD' */
1902 static reg_class_t
1903 avr_secondary_reload (bool in_p, rtx x,
1904 reg_class_t reload_class ATTRIBUTE_UNUSED,
1905 enum machine_mode mode, secondary_reload_info *sri)
1907 if (in_p
1908 && MEM_P (x)
1909 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1910 && ADDR_SPACE_MEMX != MEM_ADDR_SPACE (x))
1912 /* For the non-generic 16-bit spaces we need a d-class scratch. */
1914 switch (mode)
1916 default:
1917 gcc_unreachable();
1919 case QImode: sri->icode = CODE_FOR_reload_inqi; break;
1920 case QQmode: sri->icode = CODE_FOR_reload_inqq; break;
1921 case UQQmode: sri->icode = CODE_FOR_reload_inuqq; break;
1923 case HImode: sri->icode = CODE_FOR_reload_inhi; break;
1924 case HQmode: sri->icode = CODE_FOR_reload_inhq; break;
1925 case HAmode: sri->icode = CODE_FOR_reload_inha; break;
1926 case UHQmode: sri->icode = CODE_FOR_reload_inuhq; break;
1927 case UHAmode: sri->icode = CODE_FOR_reload_inuha; break;
1929 case PSImode: sri->icode = CODE_FOR_reload_inpsi; break;
1931 case SImode: sri->icode = CODE_FOR_reload_insi; break;
1932 case SFmode: sri->icode = CODE_FOR_reload_insf; break;
1933 case SQmode: sri->icode = CODE_FOR_reload_insq; break;
1934 case SAmode: sri->icode = CODE_FOR_reload_insa; break;
1935 case USQmode: sri->icode = CODE_FOR_reload_inusq; break;
1936 case USAmode: sri->icode = CODE_FOR_reload_inusa; break;
1940 return NO_REGS;
1944 /* Helper function to print assembler resp. track instruction
1945 sequence lengths. Always return "".
1947 If PLEN == NULL:
1948 Output assembler code from template TPL with operands supplied
1949 by OPERANDS. This is just forwarding to output_asm_insn.
1951 If PLEN != NULL:
1952 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1953 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1954 Don't output anything.
1957 static const char*
1958 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1960 if (NULL == plen)
1962 output_asm_insn (tpl, operands);
1964 else
1966 if (n_words < 0)
1967 *plen = -n_words;
1968 else
1969 *plen += n_words;
1972 return "";
1976 /* Return a pointer register name as a string. */
1978 static const char*
1979 ptrreg_to_str (int regno)
1981 switch (regno)
1983 case REG_X: return "X";
1984 case REG_Y: return "Y";
1985 case REG_Z: return "Z";
1986 default:
1987 output_operand_lossage ("address operand requires constraint for"
1988 " X, Y, or Z register");
1990 return NULL;
1993 /* Return the condition name as a string.
1994 Used in conditional jump constructing */
1996 static const char*
1997 cond_string (enum rtx_code code)
1999 switch (code)
2001 case NE:
2002 return "ne";
2003 case EQ:
2004 return "eq";
2005 case GE:
2006 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2007 return "pl";
2008 else
2009 return "ge";
2010 case LT:
2011 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2012 return "mi";
2013 else
2014 return "lt";
2015 case GEU:
2016 return "sh";
2017 case LTU:
2018 return "lo";
2019 default:
2020 gcc_unreachable ();
2023 return "";
2027 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
2028 /* Output ADDR to FILE as address. */
2030 static void
2031 avr_print_operand_address (FILE *file, rtx addr)
2033 switch (GET_CODE (addr))
2035 case REG:
2036 fprintf (file, ptrreg_to_str (REGNO (addr)));
2037 break;
2039 case PRE_DEC:
2040 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2041 break;
2043 case POST_INC:
2044 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2045 break;
2047 default:
2048 if (CONSTANT_ADDRESS_P (addr)
2049 && text_segment_operand (addr, VOIDmode))
2051 rtx x = addr;
2052 if (GET_CODE (x) == CONST)
2053 x = XEXP (x, 0);
2054 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
2056 /* Assembler gs() will implant word address. Make offset
2057 a byte offset inside gs() for assembler. This is
2058 needed because the more logical (constant+gs(sym)) is not
2059 accepted by gas. For 128K and smaller devices this is ok.
2060 For large devices it will create a trampoline to offset
2061 from symbol which may not be what the user really wanted. */
2063 fprintf (file, "gs(");
2064 output_addr_const (file, XEXP (x,0));
2065 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
2066 2 * INTVAL (XEXP (x, 1)));
2067 if (AVR_3_BYTE_PC)
2068 if (warning (0, "pointer offset from symbol maybe incorrect"))
2070 output_addr_const (stderr, addr);
2071 fprintf(stderr,"\n");
2074 else
2076 fprintf (file, "gs(");
2077 output_addr_const (file, addr);
2078 fprintf (file, ")");
2081 else
2082 output_addr_const (file, addr);
2087 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
2089 static bool
2090 avr_print_operand_punct_valid_p (unsigned char code)
2092 return code == '~' || code == '!';
2096 /* Implement `TARGET_PRINT_OPERAND'. */
2097 /* Output X as assembler operand to file FILE.
2098 For a description of supported %-codes, see top of avr.md. */
2100 static void
2101 avr_print_operand (FILE *file, rtx x, int code)
2103 int abcd = 0;
2105 if (code >= 'A' && code <= 'D')
2106 abcd = code - 'A';
2108 if (code == '~')
2110 if (!AVR_HAVE_JMP_CALL)
2111 fputc ('r', file);
2113 else if (code == '!')
2115 if (AVR_HAVE_EIJMP_EICALL)
2116 fputc ('e', file);
2118 else if (code == 't'
2119 || code == 'T')
2121 static int t_regno = -1;
2122 static int t_nbits = -1;
2124 if (REG_P (x) && t_regno < 0 && code == 'T')
2126 t_regno = REGNO (x);
2127 t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
2129 else if (CONST_INT_P (x) && t_regno >= 0
2130 && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
2132 int bpos = INTVAL (x);
2134 fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
2135 if (code == 'T')
2136 fprintf (file, ",%d", bpos % 8);
2138 t_regno = -1;
2140 else
2141 fatal_insn ("operands to %T/%t must be reg + const_int:", x);
2143 else if (REG_P (x))
2145 if (x == zero_reg_rtx)
2146 fprintf (file, "__zero_reg__");
2147 else if (code == 'r' && REGNO (x) < 32)
2148 fprintf (file, "%d", (int) REGNO (x));
2149 else
2150 fprintf (file, reg_names[REGNO (x) + abcd]);
2152 else if (CONST_INT_P (x))
2154 HOST_WIDE_INT ival = INTVAL (x);
2156 if ('i' != code)
2157 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
2158 else if (low_io_address_operand (x, VOIDmode)
2159 || high_io_address_operand (x, VOIDmode))
2161 if (AVR_HAVE_RAMPZ && ival == avr_addr.rampz)
2162 fprintf (file, "__RAMPZ__");
2163 else if (AVR_HAVE_RAMPY && ival == avr_addr.rampy)
2164 fprintf (file, "__RAMPY__");
2165 else if (AVR_HAVE_RAMPX && ival == avr_addr.rampx)
2166 fprintf (file, "__RAMPX__");
2167 else if (AVR_HAVE_RAMPD && ival == avr_addr.rampd)
2168 fprintf (file, "__RAMPD__");
2169 else if (AVR_XMEGA && ival == avr_addr.ccp)
2170 fprintf (file, "__CCP__");
2171 else if (ival == avr_addr.sreg) fprintf (file, "__SREG__");
2172 else if (ival == avr_addr.sp_l) fprintf (file, "__SP_L__");
2173 else if (ival == avr_addr.sp_h) fprintf (file, "__SP_H__");
2174 else
2176 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2177 ival - avr_current_arch->sfr_offset);
2180 else
2181 fatal_insn ("bad address, not an I/O address:", x);
2183 else if (MEM_P (x))
2185 rtx addr = XEXP (x, 0);
2187 if (code == 'm')
2189 if (!CONSTANT_P (addr))
2190 fatal_insn ("bad address, not a constant:", addr);
2191 /* Assembler template with m-code is data - not progmem section */
2192 if (text_segment_operand (addr, VOIDmode))
2193 if (warning (0, "accessing data memory with"
2194 " program memory address"))
2196 output_addr_const (stderr, addr);
2197 fprintf(stderr,"\n");
2199 output_addr_const (file, addr);
2201 else if (code == 'i')
2203 avr_print_operand (file, addr, 'i');
2205 else if (code == 'o')
2207 if (GET_CODE (addr) != PLUS)
2208 fatal_insn ("bad address, not (reg+disp):", addr);
2210 avr_print_operand (file, XEXP (addr, 1), 0);
2212 else if (code == 'p' || code == 'r')
2214 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
2215 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
2217 if (code == 'p')
2218 avr_print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
2219 else
2220 avr_print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
2222 else if (GET_CODE (addr) == PLUS)
2224 avr_print_operand_address (file, XEXP (addr,0));
2225 if (REGNO (XEXP (addr, 0)) == REG_X)
2226 fatal_insn ("internal compiler error. Bad address:"
2227 ,addr);
2228 fputc ('+', file);
2229 avr_print_operand (file, XEXP (addr,1), code);
2231 else
2232 avr_print_operand_address (file, addr);
2234 else if (code == 'i')
2236 fatal_insn ("bad address, not an I/O address:", x);
2238 else if (code == 'x')
2240 /* Constant progmem address - like used in jmp or call */
2241 if (0 == text_segment_operand (x, VOIDmode))
2242 if (warning (0, "accessing program memory"
2243 " with data memory address"))
2245 output_addr_const (stderr, x);
2246 fprintf(stderr,"\n");
2248 /* Use normal symbol for direct address no linker trampoline needed */
2249 output_addr_const (file, x);
2251 else if (CONST_FIXED_P (x))
2253 HOST_WIDE_INT ival = INTVAL (avr_to_int_mode (x));
2254 if (code != 0)
2255 output_operand_lossage ("Unsupported code '%c' for fixed-point:",
2256 code);
2257 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival);
2259 else if (GET_CODE (x) == CONST_DOUBLE)
2261 long val;
2262 REAL_VALUE_TYPE rv;
2263 if (GET_MODE (x) != SFmode)
2264 fatal_insn ("internal compiler error. Unknown mode:", x);
2265 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
2266 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
2267 fprintf (file, "0x%lx", val);
2269 else if (GET_CODE (x) == CONST_STRING)
2270 fputs (XSTR (x, 0), file);
2271 else if (code == 'j')
2272 fputs (cond_string (GET_CODE (x)), file);
2273 else if (code == 'k')
2274 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
2275 else
2276 avr_print_operand_address (file, x);
2280 /* Worker function for `NOTICE_UPDATE_CC'. */
2281 /* Update the condition code in the INSN. */
2283 void
2284 avr_notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
2286 rtx set;
2287 enum attr_cc cc = get_attr_cc (insn);
2289 switch (cc)
2291 default:
2292 break;
2294 case CC_PLUS:
2295 case CC_LDI:
2297 rtx *op = recog_data.operand;
2298 int len_dummy, icc;
2300 /* Extract insn's operands. */
2301 extract_constrain_insn_cached (insn);
2303 switch (cc)
2305 default:
2306 gcc_unreachable();
2308 case CC_PLUS:
2309 avr_out_plus (insn, op, &len_dummy, &icc);
2310 cc = (enum attr_cc) icc;
2311 break;
2313 case CC_LDI:
2315 cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
2316 && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
2317 /* Loading zero-reg with 0 uses CLR and thus clobbers cc0. */
2318 ? CC_CLOBBER
2319 /* Any other "r,rL" combination does not alter cc0. */
2320 : CC_NONE;
2322 break;
2323 } /* inner switch */
2325 break;
2327 } /* outer swicth */
2329 switch (cc)
2331 default:
2332 /* Special values like CC_OUT_PLUS from above have been
2333 mapped to "standard" CC_* values so we never come here. */
2335 gcc_unreachable();
2336 break;
2338 case CC_NONE:
2339 /* Insn does not affect CC at all. */
2340 break;
2342 case CC_SET_N:
2343 CC_STATUS_INIT;
2344 break;
2346 case CC_SET_ZN:
2347 set = single_set (insn);
2348 CC_STATUS_INIT;
2349 if (set)
2351 cc_status.flags |= CC_NO_OVERFLOW;
2352 cc_status.value1 = SET_DEST (set);
2354 break;
2356 case CC_SET_CZN:
2357 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2358 The V flag may or may not be known but that's ok because
2359 alter_cond will change tests to use EQ/NE. */
2360 set = single_set (insn);
2361 CC_STATUS_INIT;
2362 if (set)
2364 cc_status.value1 = SET_DEST (set);
2365 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
2367 break;
2369 case CC_COMPARE:
2370 set = single_set (insn);
2371 CC_STATUS_INIT;
2372 if (set)
2373 cc_status.value1 = SET_SRC (set);
2374 break;
2376 case CC_CLOBBER:
2377 /* Insn doesn't leave CC in a usable state. */
2378 CC_STATUS_INIT;
2379 break;
2383 /* Choose mode for jump insn:
2384 1 - relative jump in range -63 <= x <= 62 ;
2385 2 - relative jump in range -2046 <= x <= 2045 ;
2386 3 - absolute jump (only for ATmega[16]03). */
2389 avr_jump_mode (rtx x, rtx insn)
2391 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
2392 ? XEXP (x, 0) : x));
2393 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
2394 int jump_distance = cur_addr - dest_addr;
2396 if (-63 <= jump_distance && jump_distance <= 62)
2397 return 1;
2398 else if (-2046 <= jump_distance && jump_distance <= 2045)
2399 return 2;
2400 else if (AVR_HAVE_JMP_CALL)
2401 return 3;
2403 return 2;
2406 /* Return an AVR condition jump commands.
2407 X is a comparison RTX.
2408 LEN is a number returned by avr_jump_mode function.
2409 If REVERSE nonzero then condition code in X must be reversed. */
2411 const char*
2412 ret_cond_branch (rtx x, int len, int reverse)
2414 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2416 switch (cond)
2418 case GT:
2419 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2420 return (len == 1 ? ("breq .+2" CR_TAB
2421 "brpl %0") :
2422 len == 2 ? ("breq .+4" CR_TAB
2423 "brmi .+2" CR_TAB
2424 "rjmp %0") :
2425 ("breq .+6" CR_TAB
2426 "brmi .+4" CR_TAB
2427 "jmp %0"));
2429 else
2430 return (len == 1 ? ("breq .+2" CR_TAB
2431 "brge %0") :
2432 len == 2 ? ("breq .+4" CR_TAB
2433 "brlt .+2" CR_TAB
2434 "rjmp %0") :
2435 ("breq .+6" CR_TAB
2436 "brlt .+4" CR_TAB
2437 "jmp %0"));
2438 case GTU:
2439 return (len == 1 ? ("breq .+2" CR_TAB
2440 "brsh %0") :
2441 len == 2 ? ("breq .+4" CR_TAB
2442 "brlo .+2" CR_TAB
2443 "rjmp %0") :
2444 ("breq .+6" CR_TAB
2445 "brlo .+4" CR_TAB
2446 "jmp %0"));
2447 case LE:
2448 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2449 return (len == 1 ? ("breq %0" CR_TAB
2450 "brmi %0") :
2451 len == 2 ? ("breq .+2" CR_TAB
2452 "brpl .+2" CR_TAB
2453 "rjmp %0") :
2454 ("breq .+2" CR_TAB
2455 "brpl .+4" CR_TAB
2456 "jmp %0"));
2457 else
2458 return (len == 1 ? ("breq %0" CR_TAB
2459 "brlt %0") :
2460 len == 2 ? ("breq .+2" CR_TAB
2461 "brge .+2" CR_TAB
2462 "rjmp %0") :
2463 ("breq .+2" CR_TAB
2464 "brge .+4" CR_TAB
2465 "jmp %0"));
2466 case LEU:
2467 return (len == 1 ? ("breq %0" CR_TAB
2468 "brlo %0") :
2469 len == 2 ? ("breq .+2" CR_TAB
2470 "brsh .+2" CR_TAB
2471 "rjmp %0") :
2472 ("breq .+2" CR_TAB
2473 "brsh .+4" CR_TAB
2474 "jmp %0"));
2475 default:
2476 if (reverse)
2478 switch (len)
2480 case 1:
2481 return "br%k1 %0";
2482 case 2:
2483 return ("br%j1 .+2" CR_TAB
2484 "rjmp %0");
2485 default:
2486 return ("br%j1 .+4" CR_TAB
2487 "jmp %0");
2490 else
2492 switch (len)
2494 case 1:
2495 return "br%j1 %0";
2496 case 2:
2497 return ("br%k1 .+2" CR_TAB
2498 "rjmp %0");
2499 default:
2500 return ("br%k1 .+4" CR_TAB
2501 "jmp %0");
2505 return "";
2509 /* Worker function for `FINAL_PRESCAN_INSN'. */
2510 /* Output insn cost for next insn. */
2512 void
2513 avr_final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
2514 int num_operands ATTRIBUTE_UNUSED)
2516 if (avr_log.rtx_costs)
2518 rtx set = single_set (insn);
2520 if (set)
2521 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
2522 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
2523 else
2524 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
2525 rtx_cost (PATTERN (insn), INSN, 0,
2526 optimize_insn_for_speed_p()));
2530 /* Return 0 if undefined, 1 if always true or always false. */
2533 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
2535 unsigned int max = (mode == QImode ? 0xff :
2536 mode == HImode ? 0xffff :
2537 mode == PSImode ? 0xffffff :
2538 mode == SImode ? 0xffffffff : 0);
2539 if (max && op && CONST_INT_P (x))
2541 if (unsigned_condition (op) != op)
2542 max >>= 1;
2544 if (max != (INTVAL (x) & max)
2545 && INTVAL (x) != 0xff)
2546 return 1;
2548 return 0;
2552 /* Worker function for `FUNCTION_ARG_REGNO_P'. */
2553 /* Returns nonzero if REGNO is the number of a hard
2554 register in which function arguments are sometimes passed. */
2557 avr_function_arg_regno_p(int r)
2559 return (r >= 8 && r <= 25);
2563 /* Worker function for `INIT_CUMULATIVE_ARGS'. */
2564 /* Initializing the variable cum for the state at the beginning
2565 of the argument list. */
2567 void
2568 avr_init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2569 tree fndecl ATTRIBUTE_UNUSED)
2571 cum->nregs = 18;
2572 cum->regno = FIRST_CUM_REG;
2573 if (!libname && stdarg_p (fntype))
2574 cum->nregs = 0;
2576 /* Assume the calle may be tail called */
2578 cfun->machine->sibcall_fails = 0;
2581 /* Returns the number of registers to allocate for a function argument. */
2583 static int
2584 avr_num_arg_regs (enum machine_mode mode, const_tree type)
2586 int size;
2588 if (mode == BLKmode)
2589 size = int_size_in_bytes (type);
2590 else
2591 size = GET_MODE_SIZE (mode);
2593 /* Align all function arguments to start in even-numbered registers.
2594 Odd-sized arguments leave holes above them. */
2596 return (size + 1) & ~1;
2600 /* Implement `TARGET_FUNCTION_ARG'. */
2601 /* Controls whether a function argument is passed
2602 in a register, and which register. */
2604 static rtx
2605 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
2606 const_tree type, bool named ATTRIBUTE_UNUSED)
2608 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2609 int bytes = avr_num_arg_regs (mode, type);
2611 if (cum->nregs && bytes <= cum->nregs)
2612 return gen_rtx_REG (mode, cum->regno - bytes);
2614 return NULL_RTX;
2618 /* Implement `TARGET_FUNCTION_ARG_ADVANCE'. */
2619 /* Update the summarizer variable CUM to advance past an argument
2620 in the argument list. */
2622 static void
2623 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
2624 const_tree type, bool named ATTRIBUTE_UNUSED)
2626 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2627 int bytes = avr_num_arg_regs (mode, type);
2629 cum->nregs -= bytes;
2630 cum->regno -= bytes;
2632 /* A parameter is being passed in a call-saved register. As the original
2633 contents of these regs has to be restored before leaving the function,
2634 a function must not pass arguments in call-saved regs in order to get
2635 tail-called. */
2637 if (cum->regno >= 8
2638 && cum->nregs >= 0
2639 && !call_used_regs[cum->regno])
2641 /* FIXME: We ship info on failing tail-call in struct machine_function.
2642 This uses internals of calls.c:expand_call() and the way args_so_far
2643 is used. targetm.function_ok_for_sibcall() needs to be extended to
2644 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2645 dependent so that such an extension is not wanted. */
2647 cfun->machine->sibcall_fails = 1;
2650 /* Test if all registers needed by the ABI are actually available. If the
2651 user has fixed a GPR needed to pass an argument, an (implicit) function
2652 call will clobber that fixed register. See PR45099 for an example. */
2654 if (cum->regno >= 8
2655 && cum->nregs >= 0)
2657 int regno;
2659 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2660 if (fixed_regs[regno])
2661 warning (0, "fixed register %s used to pass parameter to function",
2662 reg_names[regno]);
2665 if (cum->nregs <= 0)
2667 cum->nregs = 0;
2668 cum->regno = FIRST_CUM_REG;
2672 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2673 /* Decide whether we can make a sibling call to a function. DECL is the
2674 declaration of the function being targeted by the call and EXP is the
2675 CALL_EXPR representing the call. */
2677 static bool
2678 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2680 tree fntype_callee;
2682 /* Tail-calling must fail if callee-saved regs are used to pass
2683 function args. We must not tail-call when `epilogue_restores'
2684 is used. Unfortunately, we cannot tell at this point if that
2685 actually will happen or not, and we cannot step back from
2686 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2688 if (cfun->machine->sibcall_fails
2689 || TARGET_CALL_PROLOGUES)
2691 return false;
2694 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2696 if (decl_callee)
2698 decl_callee = TREE_TYPE (decl_callee);
2700 else
2702 decl_callee = fntype_callee;
2704 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2705 && METHOD_TYPE != TREE_CODE (decl_callee))
2707 decl_callee = TREE_TYPE (decl_callee);
2711 /* Ensure that caller and callee have compatible epilogues */
2713 if (cfun->machine->is_interrupt
2714 || cfun->machine->is_signal
2715 || cfun->machine->is_naked
2716 || avr_naked_function_p (decl_callee)
2717 /* FIXME: For OS_task and OS_main, this might be over-conservative. */
2718 || (avr_OS_task_function_p (decl_callee)
2719 != cfun->machine->is_OS_task)
2720 || (avr_OS_main_function_p (decl_callee)
2721 != cfun->machine->is_OS_main))
2723 return false;
2726 return true;
2729 /***********************************************************************
2730 Functions for outputting various mov's for a various modes
2731 ************************************************************************/
2733 /* Return true if a value of mode MODE is read from flash by
2734 __load_* function from libgcc. */
2736 bool
2737 avr_load_libgcc_p (rtx op)
2739 enum machine_mode mode = GET_MODE (op);
2740 int n_bytes = GET_MODE_SIZE (mode);
2742 return (n_bytes > 2
2743 && !AVR_HAVE_LPMX
2744 && avr_mem_flash_p (op));
2747 /* Return true if a value of mode MODE is read by __xload_* function. */
2749 bool
2750 avr_xload_libgcc_p (enum machine_mode mode)
2752 int n_bytes = GET_MODE_SIZE (mode);
2754 return (n_bytes > 1
2755 || avr_current_device->n_flash > 1);
2759 /* Fixme: This is a hack because secondary reloads don't works as expected.
2761 Find an unused d-register to be used as scratch in INSN.
2762 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2763 is a register, skip all possible return values that overlap EXCLUDE.
2764 The policy for the returned register is similar to that of
2765 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2766 of INSN.
2768 Return a QImode d-register or NULL_RTX if nothing found. */
2770 static rtx
2771 avr_find_unused_d_reg (rtx insn, rtx exclude)
2773 int regno;
2774 bool isr_p = (avr_interrupt_function_p (current_function_decl)
2775 || avr_signal_function_p (current_function_decl));
2777 for (regno = 16; regno < 32; regno++)
2779 rtx reg = all_regs_rtx[regno];
2781 if ((exclude
2782 && reg_overlap_mentioned_p (exclude, reg))
2783 || fixed_regs[regno])
2785 continue;
2788 /* Try non-live register */
2790 if (!df_regs_ever_live_p (regno)
2791 && (TREE_THIS_VOLATILE (current_function_decl)
2792 || cfun->machine->is_OS_task
2793 || cfun->machine->is_OS_main
2794 || (!isr_p && call_used_regs[regno])))
2796 return reg;
2799 /* Any live register can be used if it is unused after.
2800 Prologue/epilogue will care for it as needed. */
2802 if (df_regs_ever_live_p (regno)
2803 && reg_unused_after (insn, reg))
2805 return reg;
2809 return NULL_RTX;
2813 /* Helper function for the next function in the case where only restricted
2814 version of LPM instruction is available. */
2816 static const char*
2817 avr_out_lpm_no_lpmx (rtx insn, rtx *xop, int *plen)
2819 rtx dest = xop[0];
2820 rtx addr = xop[1];
2821 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2822 int regno_dest;
2824 regno_dest = REGNO (dest);
2826 /* The implicit target register of LPM. */
2827 xop[3] = lpm_reg_rtx;
2829 switch (GET_CODE (addr))
2831 default:
2832 gcc_unreachable();
2834 case REG:
2836 gcc_assert (REG_Z == REGNO (addr));
2838 switch (n_bytes)
2840 default:
2841 gcc_unreachable();
2843 case 1:
2844 avr_asm_len ("%4lpm", xop, plen, 1);
2846 if (regno_dest != LPM_REGNO)
2847 avr_asm_len ("mov %0,%3", xop, plen, 1);
2849 return "";
2851 case 2:
2852 if (REGNO (dest) == REG_Z)
2853 return avr_asm_len ("%4lpm" CR_TAB
2854 "push %3" CR_TAB
2855 "adiw %2,1" CR_TAB
2856 "%4lpm" CR_TAB
2857 "mov %B0,%3" CR_TAB
2858 "pop %A0", xop, plen, 6);
2860 avr_asm_len ("%4lpm" CR_TAB
2861 "mov %A0,%3" CR_TAB
2862 "adiw %2,1" CR_TAB
2863 "%4lpm" CR_TAB
2864 "mov %B0,%3", xop, plen, 5);
2866 if (!reg_unused_after (insn, addr))
2867 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2869 break; /* 2 */
2872 break; /* REG */
2874 case POST_INC:
2876 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2877 && n_bytes <= 4);
2879 if (regno_dest == LPM_REGNO)
2880 avr_asm_len ("%4lpm" CR_TAB
2881 "adiw %2,1", xop, plen, 2);
2882 else
2883 avr_asm_len ("%4lpm" CR_TAB
2884 "mov %A0,%3" CR_TAB
2885 "adiw %2,1", xop, plen, 3);
2887 if (n_bytes >= 2)
2888 avr_asm_len ("%4lpm" CR_TAB
2889 "mov %B0,%3" CR_TAB
2890 "adiw %2,1", xop, plen, 3);
2892 if (n_bytes >= 3)
2893 avr_asm_len ("%4lpm" CR_TAB
2894 "mov %C0,%3" CR_TAB
2895 "adiw %2,1", xop, plen, 3);
2897 if (n_bytes >= 4)
2898 avr_asm_len ("%4lpm" CR_TAB
2899 "mov %D0,%3" CR_TAB
2900 "adiw %2,1", xop, plen, 3);
2902 break; /* POST_INC */
2904 } /* switch CODE (addr) */
2906 return "";
2910 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2911 OP[1] in AS1 to register OP[0].
2912 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2913 Return "". */
2915 const char*
2916 avr_out_lpm (rtx insn, rtx *op, int *plen)
2918 rtx xop[7];
2919 rtx dest = op[0];
2920 rtx src = SET_SRC (single_set (insn));
2921 rtx addr;
2922 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2923 int segment;
2924 RTX_CODE code;
2925 addr_space_t as = MEM_ADDR_SPACE (src);
2927 if (plen)
2928 *plen = 0;
2930 if (MEM_P (dest))
2932 warning (0, "writing to address space %qs not supported",
2933 avr_addrspace[MEM_ADDR_SPACE (dest)].name);
2935 return "";
2938 addr = XEXP (src, 0);
2939 code = GET_CODE (addr);
2941 gcc_assert (REG_P (dest));
2942 gcc_assert (REG == code || POST_INC == code);
2944 xop[0] = dest;
2945 xop[1] = addr;
2946 xop[2] = lpm_addr_reg_rtx;
2947 xop[4] = xstring_empty;
2948 xop[5] = tmp_reg_rtx;
2949 xop[6] = XEXP (rampz_rtx, 0);
2951 segment = avr_addrspace[as].segment;
2953 /* Set RAMPZ as needed. */
2955 if (segment)
2957 xop[4] = GEN_INT (segment);
2958 xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx);
2960 if (xop[3] != NULL_RTX)
2962 avr_asm_len ("ldi %3,%4" CR_TAB
2963 "out %i6,%3", xop, plen, 2);
2965 else if (segment == 1)
2967 avr_asm_len ("clr %5" CR_TAB
2968 "inc %5" CR_TAB
2969 "out %i6,%5", xop, plen, 3);
2971 else
2973 avr_asm_len ("mov %5,%2" CR_TAB
2974 "ldi %2,%4" CR_TAB
2975 "out %i6,%2" CR_TAB
2976 "mov %2,%5", xop, plen, 4);
2979 xop[4] = xstring_e;
2981 if (!AVR_HAVE_ELPMX)
2982 return avr_out_lpm_no_lpmx (insn, xop, plen);
2984 else if (!AVR_HAVE_LPMX)
2986 return avr_out_lpm_no_lpmx (insn, xop, plen);
2989 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
2991 switch (GET_CODE (addr))
2993 default:
2994 gcc_unreachable();
2996 case REG:
2998 gcc_assert (REG_Z == REGNO (addr));
3000 switch (n_bytes)
3002 default:
3003 gcc_unreachable();
3005 case 1:
3006 return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
3008 case 2:
3009 if (REGNO (dest) == REG_Z)
3010 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3011 "%4lpm %B0,%a2" CR_TAB
3012 "mov %A0,%5", xop, plen, 3);
3013 else
3015 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3016 "%4lpm %B0,%a2", xop, plen, 2);
3018 if (!reg_unused_after (insn, addr))
3019 avr_asm_len ("sbiw %2,1", xop, plen, 1);
3022 break; /* 2 */
3024 case 3:
3026 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3027 "%4lpm %B0,%a2+" CR_TAB
3028 "%4lpm %C0,%a2", xop, plen, 3);
3030 if (!reg_unused_after (insn, addr))
3031 avr_asm_len ("sbiw %2,2", xop, plen, 1);
3033 break; /* 3 */
3035 case 4:
3037 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3038 "%4lpm %B0,%a2+", xop, plen, 2);
3040 if (REGNO (dest) == REG_Z - 2)
3041 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3042 "%4lpm %C0,%a2" CR_TAB
3043 "mov %D0,%5", xop, plen, 3);
3044 else
3046 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
3047 "%4lpm %D0,%a2", xop, plen, 2);
3049 if (!reg_unused_after (insn, addr))
3050 avr_asm_len ("sbiw %2,3", xop, plen, 1);
3053 break; /* 4 */
3054 } /* n_bytes */
3056 break; /* REG */
3058 case POST_INC:
3060 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
3061 && n_bytes <= 4);
3063 avr_asm_len ("%4lpm %A0,%a2+", xop, plen, 1);
3064 if (n_bytes >= 2) avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
3065 if (n_bytes >= 3) avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
3066 if (n_bytes >= 4) avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
3068 break; /* POST_INC */
3070 } /* switch CODE (addr) */
3072 if (xop[4] == xstring_e && AVR_HAVE_RAMPD)
3074 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM. */
3076 xop[0] = zero_reg_rtx;
3077 avr_asm_len ("out %i6,%0", xop, plen, 1);
3080 return "";
3084 /* Worker function for xload_8 insn. */
3086 const char*
3087 avr_out_xload (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
3089 rtx xop[4];
3091 xop[0] = op[0];
3092 xop[1] = op[1];
3093 xop[2] = lpm_addr_reg_rtx;
3094 xop[3] = AVR_HAVE_LPMX ? op[0] : lpm_reg_rtx;
3096 avr_asm_len (AVR_HAVE_LPMX ? "lpm %3,%a2" : "lpm", xop, plen, -1);
3098 avr_asm_len ("sbrc %1,7" CR_TAB
3099 "ld %3,%a2", xop, plen, 2);
3101 if (REGNO (xop[0]) != REGNO (xop[3]))
3102 avr_asm_len ("mov %0,%3", xop, plen, 1);
3104 return "";
3108 const char*
3109 output_movqi (rtx insn, rtx operands[], int *plen)
3111 rtx dest = operands[0];
3112 rtx src = operands[1];
3114 if (avr_mem_flash_p (src)
3115 || avr_mem_flash_p (dest))
3117 return avr_out_lpm (insn, operands, plen);
3120 gcc_assert (1 == GET_MODE_SIZE (GET_MODE (dest)));
3122 if (REG_P (dest))
3124 if (REG_P (src)) /* mov r,r */
3126 if (test_hard_reg_class (STACK_REG, dest))
3127 return avr_asm_len ("out %0,%1", operands, plen, -1);
3128 else if (test_hard_reg_class (STACK_REG, src))
3129 return avr_asm_len ("in %0,%1", operands, plen, -1);
3131 return avr_asm_len ("mov %0,%1", operands, plen, -1);
3133 else if (CONSTANT_P (src))
3135 output_reload_in_const (operands, NULL_RTX, plen, false);
3136 return "";
3138 else if (MEM_P (src))
3139 return out_movqi_r_mr (insn, operands, plen); /* mov r,m */
3141 else if (MEM_P (dest))
3143 rtx xop[2];
3145 xop[0] = dest;
3146 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3148 return out_movqi_mr_r (insn, xop, plen);
3151 return "";
3155 const char *
3156 output_movhi (rtx insn, rtx xop[], int *plen)
3158 rtx dest = xop[0];
3159 rtx src = xop[1];
3161 gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
3163 if (avr_mem_flash_p (src)
3164 || avr_mem_flash_p (dest))
3166 return avr_out_lpm (insn, xop, plen);
3169 gcc_assert (2 == GET_MODE_SIZE (GET_MODE (dest)));
3171 if (REG_P (dest))
3173 if (REG_P (src)) /* mov r,r */
3175 if (test_hard_reg_class (STACK_REG, dest))
3177 if (AVR_HAVE_8BIT_SP)
3178 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
3180 if (AVR_XMEGA)
3181 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
3182 "out __SP_H__,%B1", xop, plen, -2);
3184 /* Use simple load of SP if no interrupts are used. */
3186 return TARGET_NO_INTERRUPTS
3187 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
3188 "out __SP_L__,%A1", xop, plen, -2)
3189 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
3190 "cli" CR_TAB
3191 "out __SP_H__,%B1" CR_TAB
3192 "out __SREG__,__tmp_reg__" CR_TAB
3193 "out __SP_L__,%A1", xop, plen, -5);
3195 else if (test_hard_reg_class (STACK_REG, src))
3197 return !AVR_HAVE_SPH
3198 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
3199 "clr %B0", xop, plen, -2)
3201 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
3202 "in %B0,__SP_H__", xop, plen, -2);
3205 return AVR_HAVE_MOVW
3206 ? avr_asm_len ("movw %0,%1", xop, plen, -1)
3208 : avr_asm_len ("mov %A0,%A1" CR_TAB
3209 "mov %B0,%B1", xop, plen, -2);
3210 } /* REG_P (src) */
3211 else if (CONSTANT_P (src))
3213 return output_reload_inhi (xop, NULL, plen);
3215 else if (MEM_P (src))
3217 return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
3220 else if (MEM_P (dest))
3222 rtx xop[2];
3224 xop[0] = dest;
3225 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3227 return out_movhi_mr_r (insn, xop, plen);
3230 fatal_insn ("invalid insn:", insn);
3232 return "";
3235 static const char*
3236 out_movqi_r_mr (rtx insn, rtx op[], int *plen)
3238 rtx dest = op[0];
3239 rtx src = op[1];
3240 rtx x = XEXP (src, 0);
3242 if (CONSTANT_ADDRESS_P (x))
3244 return optimize > 0 && io_address_operand (x, QImode)
3245 ? avr_asm_len ("in %0,%i1", op, plen, -1)
3246 : avr_asm_len ("lds %0,%m1", op, plen, -2);
3248 else if (GET_CODE (x) == PLUS
3249 && REG_P (XEXP (x, 0))
3250 && CONST_INT_P (XEXP (x, 1)))
3252 /* memory access by reg+disp */
3254 int disp = INTVAL (XEXP (x, 1));
3256 if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
3258 if (REGNO (XEXP (x, 0)) != REG_Y)
3259 fatal_insn ("incorrect insn:",insn);
3261 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3262 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
3263 "ldd %0,Y+63" CR_TAB
3264 "sbiw r28,%o1-63", op, plen, -3);
3266 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3267 "sbci r29,hi8(-%o1)" CR_TAB
3268 "ld %0,Y" CR_TAB
3269 "subi r28,lo8(%o1)" CR_TAB
3270 "sbci r29,hi8(%o1)", op, plen, -5);
3272 else if (REGNO (XEXP (x, 0)) == REG_X)
3274 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
3275 it but I have this situation with extremal optimizing options. */
3277 avr_asm_len ("adiw r26,%o1" CR_TAB
3278 "ld %0,X", op, plen, -2);
3280 if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
3281 && !reg_unused_after (insn, XEXP (x,0)))
3283 avr_asm_len ("sbiw r26,%o1", op, plen, 1);
3286 return "";
3289 return avr_asm_len ("ldd %0,%1", op, plen, -1);
3292 return avr_asm_len ("ld %0,%1", op, plen, -1);
3295 static const char*
3296 out_movhi_r_mr (rtx insn, rtx op[], int *plen)
3298 rtx dest = op[0];
3299 rtx src = op[1];
3300 rtx base = XEXP (src, 0);
3301 int reg_dest = true_regnum (dest);
3302 int reg_base = true_regnum (base);
3303 /* "volatile" forces reading low byte first, even if less efficient,
3304 for correct operation with 16-bit I/O registers. */
3305 int mem_volatile_p = MEM_VOLATILE_P (src);
3307 if (reg_base > 0)
3309 if (reg_dest == reg_base) /* R = (R) */
3310 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3311 "ld %B0,%1" CR_TAB
3312 "mov %A0,__tmp_reg__", op, plen, -3);
3314 if (reg_base != REG_X)
3315 return avr_asm_len ("ld %A0,%1" CR_TAB
3316 "ldd %B0,%1+1", op, plen, -2);
3318 avr_asm_len ("ld %A0,X+" CR_TAB
3319 "ld %B0,X", op, plen, -2);
3321 if (!reg_unused_after (insn, base))
3322 avr_asm_len ("sbiw r26,1", op, plen, 1);
3324 return "";
3326 else if (GET_CODE (base) == PLUS) /* (R + i) */
3328 int disp = INTVAL (XEXP (base, 1));
3329 int reg_base = true_regnum (XEXP (base, 0));
3331 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3333 if (REGNO (XEXP (base, 0)) != REG_Y)
3334 fatal_insn ("incorrect insn:",insn);
3336 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
3337 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3338 "ldd %A0,Y+62" CR_TAB
3339 "ldd %B0,Y+63" CR_TAB
3340 "sbiw r28,%o1-62", op, plen, -4)
3342 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3343 "sbci r29,hi8(-%o1)" CR_TAB
3344 "ld %A0,Y" CR_TAB
3345 "ldd %B0,Y+1" CR_TAB
3346 "subi r28,lo8(%o1)" CR_TAB
3347 "sbci r29,hi8(%o1)", op, plen, -6);
3350 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3351 it but I have this situation with extremal
3352 optimization options. */
3354 if (reg_base == REG_X)
3355 return reg_base == reg_dest
3356 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3357 "ld __tmp_reg__,X+" CR_TAB
3358 "ld %B0,X" CR_TAB
3359 "mov %A0,__tmp_reg__", op, plen, -4)
3361 : avr_asm_len ("adiw r26,%o1" CR_TAB
3362 "ld %A0,X+" CR_TAB
3363 "ld %B0,X" CR_TAB
3364 "sbiw r26,%o1+1", op, plen, -4);
3366 return reg_base == reg_dest
3367 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3368 "ldd %B0,%B1" CR_TAB
3369 "mov %A0,__tmp_reg__", op, plen, -3)
3371 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3372 "ldd %B0,%B1", op, plen, -2);
3374 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3376 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3377 fatal_insn ("incorrect insn:", insn);
3379 if (!mem_volatile_p)
3380 return avr_asm_len ("ld %B0,%1" CR_TAB
3381 "ld %A0,%1", op, plen, -2);
3383 return REGNO (XEXP (base, 0)) == REG_X
3384 ? avr_asm_len ("sbiw r26,2" CR_TAB
3385 "ld %A0,X+" CR_TAB
3386 "ld %B0,X" CR_TAB
3387 "sbiw r26,1", op, plen, -4)
3389 : avr_asm_len ("sbiw %r1,2" CR_TAB
3390 "ld %A0,%p1" CR_TAB
3391 "ldd %B0,%p1+1", op, plen, -3);
3393 else if (GET_CODE (base) == POST_INC) /* (R++) */
3395 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3396 fatal_insn ("incorrect insn:", insn);
3398 return avr_asm_len ("ld %A0,%1" CR_TAB
3399 "ld %B0,%1", op, plen, -2);
3401 else if (CONSTANT_ADDRESS_P (base))
3403 return optimize > 0 && io_address_operand (base, HImode)
3404 ? avr_asm_len ("in %A0,%i1" CR_TAB
3405 "in %B0,%i1+1", op, plen, -2)
3407 : avr_asm_len ("lds %A0,%m1" CR_TAB
3408 "lds %B0,%m1+1", op, plen, -4);
3411 fatal_insn ("unknown move insn:",insn);
3412 return "";
3415 static const char*
3416 out_movsi_r_mr (rtx insn, rtx op[], int *l)
3418 rtx dest = op[0];
3419 rtx src = op[1];
3420 rtx base = XEXP (src, 0);
3421 int reg_dest = true_regnum (dest);
3422 int reg_base = true_regnum (base);
3423 int tmp;
3425 if (!l)
3426 l = &tmp;
3428 if (reg_base > 0)
3430 if (reg_base == REG_X) /* (R26) */
3432 if (reg_dest == REG_X)
3433 /* "ld r26,-X" is undefined */
3434 return *l=7, ("adiw r26,3" CR_TAB
3435 "ld r29,X" CR_TAB
3436 "ld r28,-X" CR_TAB
3437 "ld __tmp_reg__,-X" CR_TAB
3438 "sbiw r26,1" CR_TAB
3439 "ld r26,X" CR_TAB
3440 "mov r27,__tmp_reg__");
3441 else if (reg_dest == REG_X - 2)
3442 return *l=5, ("ld %A0,X+" CR_TAB
3443 "ld %B0,X+" CR_TAB
3444 "ld __tmp_reg__,X+" CR_TAB
3445 "ld %D0,X" CR_TAB
3446 "mov %C0,__tmp_reg__");
3447 else if (reg_unused_after (insn, base))
3448 return *l=4, ("ld %A0,X+" CR_TAB
3449 "ld %B0,X+" CR_TAB
3450 "ld %C0,X+" CR_TAB
3451 "ld %D0,X");
3452 else
3453 return *l=5, ("ld %A0,X+" CR_TAB
3454 "ld %B0,X+" CR_TAB
3455 "ld %C0,X+" CR_TAB
3456 "ld %D0,X" CR_TAB
3457 "sbiw r26,3");
3459 else
3461 if (reg_dest == reg_base)
3462 return *l=5, ("ldd %D0,%1+3" CR_TAB
3463 "ldd %C0,%1+2" CR_TAB
3464 "ldd __tmp_reg__,%1+1" CR_TAB
3465 "ld %A0,%1" CR_TAB
3466 "mov %B0,__tmp_reg__");
3467 else if (reg_base == reg_dest + 2)
3468 return *l=5, ("ld %A0,%1" CR_TAB
3469 "ldd %B0,%1+1" CR_TAB
3470 "ldd __tmp_reg__,%1+2" CR_TAB
3471 "ldd %D0,%1+3" CR_TAB
3472 "mov %C0,__tmp_reg__");
3473 else
3474 return *l=4, ("ld %A0,%1" CR_TAB
3475 "ldd %B0,%1+1" CR_TAB
3476 "ldd %C0,%1+2" CR_TAB
3477 "ldd %D0,%1+3");
3480 else if (GET_CODE (base) == PLUS) /* (R + i) */
3482 int disp = INTVAL (XEXP (base, 1));
3484 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3486 if (REGNO (XEXP (base, 0)) != REG_Y)
3487 fatal_insn ("incorrect insn:",insn);
3489 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3490 return *l = 6, ("adiw r28,%o1-60" CR_TAB
3491 "ldd %A0,Y+60" CR_TAB
3492 "ldd %B0,Y+61" CR_TAB
3493 "ldd %C0,Y+62" CR_TAB
3494 "ldd %D0,Y+63" CR_TAB
3495 "sbiw r28,%o1-60");
3497 return *l = 8, ("subi r28,lo8(-%o1)" CR_TAB
3498 "sbci r29,hi8(-%o1)" CR_TAB
3499 "ld %A0,Y" CR_TAB
3500 "ldd %B0,Y+1" CR_TAB
3501 "ldd %C0,Y+2" CR_TAB
3502 "ldd %D0,Y+3" CR_TAB
3503 "subi r28,lo8(%o1)" CR_TAB
3504 "sbci r29,hi8(%o1)");
3507 reg_base = true_regnum (XEXP (base, 0));
3508 if (reg_base == REG_X)
3510 /* R = (X + d) */
3511 if (reg_dest == REG_X)
3513 *l = 7;
3514 /* "ld r26,-X" is undefined */
3515 return ("adiw r26,%o1+3" CR_TAB
3516 "ld r29,X" CR_TAB
3517 "ld r28,-X" CR_TAB
3518 "ld __tmp_reg__,-X" CR_TAB
3519 "sbiw r26,1" CR_TAB
3520 "ld r26,X" CR_TAB
3521 "mov r27,__tmp_reg__");
3523 *l = 6;
3524 if (reg_dest == REG_X - 2)
3525 return ("adiw r26,%o1" CR_TAB
3526 "ld r24,X+" CR_TAB
3527 "ld r25,X+" CR_TAB
3528 "ld __tmp_reg__,X+" CR_TAB
3529 "ld r27,X" CR_TAB
3530 "mov r26,__tmp_reg__");
3532 return ("adiw r26,%o1" CR_TAB
3533 "ld %A0,X+" CR_TAB
3534 "ld %B0,X+" CR_TAB
3535 "ld %C0,X+" CR_TAB
3536 "ld %D0,X" CR_TAB
3537 "sbiw r26,%o1+3");
3539 if (reg_dest == reg_base)
3540 return *l=5, ("ldd %D0,%D1" CR_TAB
3541 "ldd %C0,%C1" CR_TAB
3542 "ldd __tmp_reg__,%B1" CR_TAB
3543 "ldd %A0,%A1" CR_TAB
3544 "mov %B0,__tmp_reg__");
3545 else if (reg_dest == reg_base - 2)
3546 return *l=5, ("ldd %A0,%A1" CR_TAB
3547 "ldd %B0,%B1" CR_TAB
3548 "ldd __tmp_reg__,%C1" CR_TAB
3549 "ldd %D0,%D1" CR_TAB
3550 "mov %C0,__tmp_reg__");
3551 return *l=4, ("ldd %A0,%A1" CR_TAB
3552 "ldd %B0,%B1" CR_TAB
3553 "ldd %C0,%C1" CR_TAB
3554 "ldd %D0,%D1");
3556 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3557 return *l=4, ("ld %D0,%1" CR_TAB
3558 "ld %C0,%1" CR_TAB
3559 "ld %B0,%1" CR_TAB
3560 "ld %A0,%1");
3561 else if (GET_CODE (base) == POST_INC) /* (R++) */
3562 return *l=4, ("ld %A0,%1" CR_TAB
3563 "ld %B0,%1" CR_TAB
3564 "ld %C0,%1" CR_TAB
3565 "ld %D0,%1");
3566 else if (CONSTANT_ADDRESS_P (base))
3567 return *l=8, ("lds %A0,%m1" CR_TAB
3568 "lds %B0,%m1+1" CR_TAB
3569 "lds %C0,%m1+2" CR_TAB
3570 "lds %D0,%m1+3");
3572 fatal_insn ("unknown move insn:",insn);
3573 return "";
3576 static const char*
3577 out_movsi_mr_r (rtx insn, rtx op[], int *l)
3579 rtx dest = op[0];
3580 rtx src = op[1];
3581 rtx base = XEXP (dest, 0);
3582 int reg_base = true_regnum (base);
3583 int reg_src = true_regnum (src);
3584 int tmp;
3586 if (!l)
3587 l = &tmp;
3589 if (CONSTANT_ADDRESS_P (base))
3590 return *l=8,("sts %m0,%A1" CR_TAB
3591 "sts %m0+1,%B1" CR_TAB
3592 "sts %m0+2,%C1" CR_TAB
3593 "sts %m0+3,%D1");
3594 if (reg_base > 0) /* (r) */
3596 if (reg_base == REG_X) /* (R26) */
3598 if (reg_src == REG_X)
3600 /* "st X+,r26" is undefined */
3601 if (reg_unused_after (insn, base))
3602 return *l=6, ("mov __tmp_reg__,r27" CR_TAB
3603 "st X,r26" CR_TAB
3604 "adiw r26,1" CR_TAB
3605 "st X+,__tmp_reg__" CR_TAB
3606 "st X+,r28" CR_TAB
3607 "st X,r29");
3608 else
3609 return *l=7, ("mov __tmp_reg__,r27" CR_TAB
3610 "st X,r26" CR_TAB
3611 "adiw r26,1" CR_TAB
3612 "st X+,__tmp_reg__" CR_TAB
3613 "st X+,r28" CR_TAB
3614 "st X,r29" CR_TAB
3615 "sbiw r26,3");
3617 else if (reg_base == reg_src + 2)
3619 if (reg_unused_after (insn, base))
3620 return *l=7, ("mov __zero_reg__,%C1" CR_TAB
3621 "mov __tmp_reg__,%D1" CR_TAB
3622 "st %0+,%A1" CR_TAB
3623 "st %0+,%B1" CR_TAB
3624 "st %0+,__zero_reg__" CR_TAB
3625 "st %0,__tmp_reg__" CR_TAB
3626 "clr __zero_reg__");
3627 else
3628 return *l=8, ("mov __zero_reg__,%C1" CR_TAB
3629 "mov __tmp_reg__,%D1" CR_TAB
3630 "st %0+,%A1" CR_TAB
3631 "st %0+,%B1" CR_TAB
3632 "st %0+,__zero_reg__" CR_TAB
3633 "st %0,__tmp_reg__" CR_TAB
3634 "clr __zero_reg__" CR_TAB
3635 "sbiw r26,3");
3637 return *l=5, ("st %0+,%A1" CR_TAB
3638 "st %0+,%B1" CR_TAB
3639 "st %0+,%C1" CR_TAB
3640 "st %0,%D1" CR_TAB
3641 "sbiw r26,3");
3643 else
3644 return *l=4, ("st %0,%A1" CR_TAB
3645 "std %0+1,%B1" CR_TAB
3646 "std %0+2,%C1" CR_TAB
3647 "std %0+3,%D1");
3649 else if (GET_CODE (base) == PLUS) /* (R + i) */
3651 int disp = INTVAL (XEXP (base, 1));
3652 reg_base = REGNO (XEXP (base, 0));
3653 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3655 if (reg_base != REG_Y)
3656 fatal_insn ("incorrect insn:",insn);
3658 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3659 return *l = 6, ("adiw r28,%o0-60" CR_TAB
3660 "std Y+60,%A1" CR_TAB
3661 "std Y+61,%B1" CR_TAB
3662 "std Y+62,%C1" CR_TAB
3663 "std Y+63,%D1" CR_TAB
3664 "sbiw r28,%o0-60");
3666 return *l = 8, ("subi r28,lo8(-%o0)" CR_TAB
3667 "sbci r29,hi8(-%o0)" CR_TAB
3668 "st Y,%A1" CR_TAB
3669 "std Y+1,%B1" CR_TAB
3670 "std Y+2,%C1" CR_TAB
3671 "std Y+3,%D1" CR_TAB
3672 "subi r28,lo8(%o0)" CR_TAB
3673 "sbci r29,hi8(%o0)");
3675 if (reg_base == REG_X)
3677 /* (X + d) = R */
3678 if (reg_src == REG_X)
3680 *l = 9;
3681 return ("mov __tmp_reg__,r26" CR_TAB
3682 "mov __zero_reg__,r27" CR_TAB
3683 "adiw r26,%o0" CR_TAB
3684 "st X+,__tmp_reg__" CR_TAB
3685 "st X+,__zero_reg__" CR_TAB
3686 "st X+,r28" CR_TAB
3687 "st X,r29" CR_TAB
3688 "clr __zero_reg__" CR_TAB
3689 "sbiw r26,%o0+3");
3691 else if (reg_src == REG_X - 2)
3693 *l = 9;
3694 return ("mov __tmp_reg__,r26" CR_TAB
3695 "mov __zero_reg__,r27" CR_TAB
3696 "adiw r26,%o0" CR_TAB
3697 "st X+,r24" CR_TAB
3698 "st X+,r25" CR_TAB
3699 "st X+,__tmp_reg__" CR_TAB
3700 "st X,__zero_reg__" CR_TAB
3701 "clr __zero_reg__" CR_TAB
3702 "sbiw r26,%o0+3");
3704 *l = 6;
3705 return ("adiw r26,%o0" CR_TAB
3706 "st X+,%A1" CR_TAB
3707 "st X+,%B1" CR_TAB
3708 "st X+,%C1" CR_TAB
3709 "st X,%D1" CR_TAB
3710 "sbiw r26,%o0+3");
3712 return *l=4, ("std %A0,%A1" CR_TAB
3713 "std %B0,%B1" CR_TAB
3714 "std %C0,%C1" CR_TAB
3715 "std %D0,%D1");
3717 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3718 return *l=4, ("st %0,%D1" CR_TAB
3719 "st %0,%C1" CR_TAB
3720 "st %0,%B1" CR_TAB
3721 "st %0,%A1");
3722 else if (GET_CODE (base) == POST_INC) /* (R++) */
3723 return *l=4, ("st %0,%A1" CR_TAB
3724 "st %0,%B1" CR_TAB
3725 "st %0,%C1" CR_TAB
3726 "st %0,%D1");
3727 fatal_insn ("unknown move insn:",insn);
3728 return "";
3731 const char *
3732 output_movsisf (rtx insn, rtx operands[], int *l)
3734 int dummy;
3735 rtx dest = operands[0];
3736 rtx src = operands[1];
3737 int *real_l = l;
3739 if (avr_mem_flash_p (src)
3740 || avr_mem_flash_p (dest))
3742 return avr_out_lpm (insn, operands, real_l);
3745 if (!l)
3746 l = &dummy;
3748 gcc_assert (4 == GET_MODE_SIZE (GET_MODE (dest)));
3749 if (REG_P (dest))
3751 if (REG_P (src)) /* mov r,r */
3753 if (true_regnum (dest) > true_regnum (src))
3755 if (AVR_HAVE_MOVW)
3757 *l = 2;
3758 return ("movw %C0,%C1" CR_TAB
3759 "movw %A0,%A1");
3761 *l = 4;
3762 return ("mov %D0,%D1" CR_TAB
3763 "mov %C0,%C1" CR_TAB
3764 "mov %B0,%B1" CR_TAB
3765 "mov %A0,%A1");
3767 else
3769 if (AVR_HAVE_MOVW)
3771 *l = 2;
3772 return ("movw %A0,%A1" CR_TAB
3773 "movw %C0,%C1");
3775 *l = 4;
3776 return ("mov %A0,%A1" CR_TAB
3777 "mov %B0,%B1" CR_TAB
3778 "mov %C0,%C1" CR_TAB
3779 "mov %D0,%D1");
3782 else if (CONSTANT_P (src))
3784 return output_reload_insisf (operands, NULL_RTX, real_l);
3786 else if (MEM_P (src))
3787 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
3789 else if (MEM_P (dest))
3791 const char *templ;
3793 if (src == CONST0_RTX (GET_MODE (dest)))
3794 operands[1] = zero_reg_rtx;
3796 templ = out_movsi_mr_r (insn, operands, real_l);
3798 if (!real_l)
3799 output_asm_insn (templ, operands);
3801 operands[1] = src;
3802 return "";
3804 fatal_insn ("invalid insn:", insn);
3805 return "";
3809 /* Handle loads of 24-bit types from memory to register. */
3811 static const char*
3812 avr_out_load_psi (rtx insn, rtx *op, int *plen)
3814 rtx dest = op[0];
3815 rtx src = op[1];
3816 rtx base = XEXP (src, 0);
3817 int reg_dest = true_regnum (dest);
3818 int reg_base = true_regnum (base);
3820 if (reg_base > 0)
3822 if (reg_base == REG_X) /* (R26) */
3824 if (reg_dest == REG_X)
3825 /* "ld r26,-X" is undefined */
3826 return avr_asm_len ("adiw r26,2" CR_TAB
3827 "ld r28,X" CR_TAB
3828 "ld __tmp_reg__,-X" CR_TAB
3829 "sbiw r26,1" CR_TAB
3830 "ld r26,X" CR_TAB
3831 "mov r27,__tmp_reg__", op, plen, -6);
3832 else
3834 avr_asm_len ("ld %A0,X+" CR_TAB
3835 "ld %B0,X+" CR_TAB
3836 "ld %C0,X", op, plen, -3);
3838 if (reg_dest != REG_X - 2
3839 && !reg_unused_after (insn, base))
3841 avr_asm_len ("sbiw r26,2", op, plen, 1);
3844 return "";
3847 else /* reg_base != REG_X */
3849 if (reg_dest == reg_base)
3850 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3851 "ldd __tmp_reg__,%1+1" CR_TAB
3852 "ld %A0,%1" CR_TAB
3853 "mov %B0,__tmp_reg__", op, plen, -4);
3854 else
3855 return avr_asm_len ("ld %A0,%1" CR_TAB
3856 "ldd %B0,%1+1" CR_TAB
3857 "ldd %C0,%1+2", op, plen, -3);
3860 else if (GET_CODE (base) == PLUS) /* (R + i) */
3862 int disp = INTVAL (XEXP (base, 1));
3864 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3866 if (REGNO (XEXP (base, 0)) != REG_Y)
3867 fatal_insn ("incorrect insn:",insn);
3869 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3870 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3871 "ldd %A0,Y+61" CR_TAB
3872 "ldd %B0,Y+62" CR_TAB
3873 "ldd %C0,Y+63" CR_TAB
3874 "sbiw r28,%o1-61", op, plen, -5);
3876 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3877 "sbci r29,hi8(-%o1)" CR_TAB
3878 "ld %A0,Y" CR_TAB
3879 "ldd %B0,Y+1" CR_TAB
3880 "ldd %C0,Y+2" CR_TAB
3881 "subi r28,lo8(%o1)" CR_TAB
3882 "sbci r29,hi8(%o1)", op, plen, -7);
3885 reg_base = true_regnum (XEXP (base, 0));
3886 if (reg_base == REG_X)
3888 /* R = (X + d) */
3889 if (reg_dest == REG_X)
3891 /* "ld r26,-X" is undefined */
3892 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3893 "ld r28,X" CR_TAB
3894 "ld __tmp_reg__,-X" CR_TAB
3895 "sbiw r26,1" CR_TAB
3896 "ld r26,X" CR_TAB
3897 "mov r27,__tmp_reg__", op, plen, -6);
3900 avr_asm_len ("adiw r26,%o1" CR_TAB
3901 "ld %A0,X+" CR_TAB
3902 "ld %B0,X+" CR_TAB
3903 "ld %C0,X", op, plen, -4);
3905 if (reg_dest != REG_W
3906 && !reg_unused_after (insn, XEXP (base, 0)))
3907 avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
3909 return "";
3912 if (reg_dest == reg_base)
3913 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3914 "ldd __tmp_reg__,%B1" CR_TAB
3915 "ldd %A0,%A1" CR_TAB
3916 "mov %B0,__tmp_reg__", op, plen, -4);
3918 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3919 "ldd %B0,%B1" CR_TAB
3920 "ldd %C0,%C1", op, plen, -3);
3922 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3923 return avr_asm_len ("ld %C0,%1" CR_TAB
3924 "ld %B0,%1" CR_TAB
3925 "ld %A0,%1", op, plen, -3);
3926 else if (GET_CODE (base) == POST_INC) /* (R++) */
3927 return avr_asm_len ("ld %A0,%1" CR_TAB
3928 "ld %B0,%1" CR_TAB
3929 "ld %C0,%1", op, plen, -3);
3931 else if (CONSTANT_ADDRESS_P (base))
3932 return avr_asm_len ("lds %A0,%m1" CR_TAB
3933 "lds %B0,%m1+1" CR_TAB
3934 "lds %C0,%m1+2", op, plen , -6);
3936 fatal_insn ("unknown move insn:",insn);
3937 return "";
3940 /* Handle store of 24-bit type from register or zero to memory. */
3942 static const char*
3943 avr_out_store_psi (rtx insn, rtx *op, int *plen)
3945 rtx dest = op[0];
3946 rtx src = op[1];
3947 rtx base = XEXP (dest, 0);
3948 int reg_base = true_regnum (base);
3950 if (CONSTANT_ADDRESS_P (base))
3951 return avr_asm_len ("sts %m0,%A1" CR_TAB
3952 "sts %m0+1,%B1" CR_TAB
3953 "sts %m0+2,%C1", op, plen, -6);
3955 if (reg_base > 0) /* (r) */
3957 if (reg_base == REG_X) /* (R26) */
3959 gcc_assert (!reg_overlap_mentioned_p (base, src));
3961 avr_asm_len ("st %0+,%A1" CR_TAB
3962 "st %0+,%B1" CR_TAB
3963 "st %0,%C1", op, plen, -3);
3965 if (!reg_unused_after (insn, base))
3966 avr_asm_len ("sbiw r26,2", op, plen, 1);
3968 return "";
3970 else
3971 return avr_asm_len ("st %0,%A1" CR_TAB
3972 "std %0+1,%B1" CR_TAB
3973 "std %0+2,%C1", op, plen, -3);
3975 else if (GET_CODE (base) == PLUS) /* (R + i) */
3977 int disp = INTVAL (XEXP (base, 1));
3978 reg_base = REGNO (XEXP (base, 0));
3980 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3982 if (reg_base != REG_Y)
3983 fatal_insn ("incorrect insn:",insn);
3985 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3986 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3987 "std Y+61,%A1" CR_TAB
3988 "std Y+62,%B1" CR_TAB
3989 "std Y+63,%C1" CR_TAB
3990 "sbiw r28,%o0-60", op, plen, -5);
3992 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3993 "sbci r29,hi8(-%o0)" CR_TAB
3994 "st Y,%A1" CR_TAB
3995 "std Y+1,%B1" CR_TAB
3996 "std Y+2,%C1" CR_TAB
3997 "subi r28,lo8(%o0)" CR_TAB
3998 "sbci r29,hi8(%o0)", op, plen, -7);
4000 if (reg_base == REG_X)
4002 /* (X + d) = R */
4003 gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
4005 avr_asm_len ("adiw r26,%o0" CR_TAB
4006 "st X+,%A1" CR_TAB
4007 "st X+,%B1" CR_TAB
4008 "st X,%C1", op, plen, -4);
4010 if (!reg_unused_after (insn, XEXP (base, 0)))
4011 avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
4013 return "";
4016 return avr_asm_len ("std %A0,%A1" CR_TAB
4017 "std %B0,%B1" CR_TAB
4018 "std %C0,%C1", op, plen, -3);
4020 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4021 return avr_asm_len ("st %0,%C1" CR_TAB
4022 "st %0,%B1" CR_TAB
4023 "st %0,%A1", op, plen, -3);
4024 else if (GET_CODE (base) == POST_INC) /* (R++) */
4025 return avr_asm_len ("st %0,%A1" CR_TAB
4026 "st %0,%B1" CR_TAB
4027 "st %0,%C1", op, plen, -3);
4029 fatal_insn ("unknown move insn:",insn);
4030 return "";
4034 /* Move around 24-bit stuff. */
4036 const char *
4037 avr_out_movpsi (rtx insn, rtx *op, int *plen)
4039 rtx dest = op[0];
4040 rtx src = op[1];
4042 if (avr_mem_flash_p (src)
4043 || avr_mem_flash_p (dest))
4045 return avr_out_lpm (insn, op, plen);
4048 if (register_operand (dest, VOIDmode))
4050 if (register_operand (src, VOIDmode)) /* mov r,r */
4052 if (true_regnum (dest) > true_regnum (src))
4054 avr_asm_len ("mov %C0,%C1", op, plen, -1);
4056 if (AVR_HAVE_MOVW)
4057 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
4058 else
4059 return avr_asm_len ("mov %B0,%B1" CR_TAB
4060 "mov %A0,%A1", op, plen, 2);
4062 else
4064 if (AVR_HAVE_MOVW)
4065 avr_asm_len ("movw %A0,%A1", op, plen, -1);
4066 else
4067 avr_asm_len ("mov %A0,%A1" CR_TAB
4068 "mov %B0,%B1", op, plen, -2);
4070 return avr_asm_len ("mov %C0,%C1", op, plen, 1);
4073 else if (CONSTANT_P (src))
4075 return avr_out_reload_inpsi (op, NULL_RTX, plen);
4077 else if (MEM_P (src))
4078 return avr_out_load_psi (insn, op, plen); /* mov r,m */
4080 else if (MEM_P (dest))
4082 rtx xop[2];
4084 xop[0] = dest;
4085 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
4087 return avr_out_store_psi (insn, xop, plen);
4090 fatal_insn ("invalid insn:", insn);
4091 return "";
4095 static const char*
4096 out_movqi_mr_r (rtx insn, rtx op[], int *plen)
4098 rtx dest = op[0];
4099 rtx src = op[1];
4100 rtx x = XEXP (dest, 0);
4102 if (CONSTANT_ADDRESS_P (x))
4104 return optimize > 0 && io_address_operand (x, QImode)
4105 ? avr_asm_len ("out %i0,%1", op, plen, -1)
4106 : avr_asm_len ("sts %m0,%1", op, plen, -2);
4108 else if (GET_CODE (x) == PLUS
4109 && REG_P (XEXP (x, 0))
4110 && CONST_INT_P (XEXP (x, 1)))
4112 /* memory access by reg+disp */
4114 int disp = INTVAL (XEXP (x, 1));
4116 if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
4118 if (REGNO (XEXP (x, 0)) != REG_Y)
4119 fatal_insn ("incorrect insn:",insn);
4121 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
4122 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
4123 "std Y+63,%1" CR_TAB
4124 "sbiw r28,%o0-63", op, plen, -3);
4126 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4127 "sbci r29,hi8(-%o0)" CR_TAB
4128 "st Y,%1" CR_TAB
4129 "subi r28,lo8(%o0)" CR_TAB
4130 "sbci r29,hi8(%o0)", op, plen, -5);
4132 else if (REGNO (XEXP (x,0)) == REG_X)
4134 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
4136 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
4137 "adiw r26,%o0" CR_TAB
4138 "st X,__tmp_reg__", op, plen, -3);
4140 else
4142 avr_asm_len ("adiw r26,%o0" CR_TAB
4143 "st X,%1", op, plen, -2);
4146 if (!reg_unused_after (insn, XEXP (x,0)))
4147 avr_asm_len ("sbiw r26,%o0", op, plen, 1);
4149 return "";
4152 return avr_asm_len ("std %0,%1", op, plen, -1);
4155 return avr_asm_len ("st %0,%1", op, plen, -1);
4159 /* Helper for the next function for XMEGA. It does the same
4160 but with low byte first. */
4162 static const char*
4163 avr_out_movhi_mr_r_xmega (rtx insn, rtx op[], int *plen)
4165 rtx dest = op[0];
4166 rtx src = op[1];
4167 rtx base = XEXP (dest, 0);
4168 int reg_base = true_regnum (base);
4169 int reg_src = true_regnum (src);
4171 /* "volatile" forces writing low byte first, even if less efficient,
4172 for correct operation with 16-bit I/O registers like SP. */
4173 int mem_volatile_p = MEM_VOLATILE_P (dest);
4175 if (CONSTANT_ADDRESS_P (base))
4176 return optimize > 0 && io_address_operand (base, HImode)
4177 ? avr_asm_len ("out %i0,%A1" CR_TAB
4178 "out %i0+1,%B1", op, plen, -2)
4180 : avr_asm_len ("sts %m0,%A1" CR_TAB
4181 "sts %m0+1,%B1", op, plen, -4);
4183 if (reg_base > 0)
4185 if (reg_base != REG_X)
4186 return avr_asm_len ("st %0,%A1" CR_TAB
4187 "std %0+1,%B1", op, plen, -2);
4189 if (reg_src == REG_X)
4190 /* "st X+,r26" and "st -X,r26" are undefined. */
4191 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4192 "st X,r26" CR_TAB
4193 "adiw r26,1" CR_TAB
4194 "st X,__tmp_reg__", op, plen, -4);
4195 else
4196 avr_asm_len ("st X+,%A1" CR_TAB
4197 "st X,%B1", op, plen, -2);
4199 return reg_unused_after (insn, base)
4200 ? ""
4201 : avr_asm_len ("sbiw r26,1", op, plen, 1);
4203 else if (GET_CODE (base) == PLUS)
4205 int disp = INTVAL (XEXP (base, 1));
4206 reg_base = REGNO (XEXP (base, 0));
4207 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4209 if (reg_base != REG_Y)
4210 fatal_insn ("incorrect insn:",insn);
4212 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
4213 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4214 "std Y+62,%A1" CR_TAB
4215 "std Y+63,%B1" CR_TAB
4216 "sbiw r28,%o0-62", op, plen, -4)
4218 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4219 "sbci r29,hi8(-%o0)" CR_TAB
4220 "st Y,%A1" CR_TAB
4221 "std Y+1,%B1" CR_TAB
4222 "subi r28,lo8(%o0)" CR_TAB
4223 "sbci r29,hi8(%o0)", op, plen, -6);
4226 if (reg_base != REG_X)
4227 return avr_asm_len ("std %A0,%A1" CR_TAB
4228 "std %B0,%B1", op, plen, -2);
4229 /* (X + d) = R */
4230 return reg_src == REG_X
4231 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4232 "mov __zero_reg__,r27" CR_TAB
4233 "adiw r26,%o0" CR_TAB
4234 "st X+,__tmp_reg__" CR_TAB
4235 "st X,__zero_reg__" CR_TAB
4236 "clr __zero_reg__" CR_TAB
4237 "sbiw r26,%o0+1", op, plen, -7)
4239 : avr_asm_len ("adiw r26,%o0" CR_TAB
4240 "st X+,%A1" CR_TAB
4241 "st X,%B1" CR_TAB
4242 "sbiw r26,%o0+1", op, plen, -4);
4244 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4246 if (!mem_volatile_p)
4247 return avr_asm_len ("st %0,%B1" CR_TAB
4248 "st %0,%A1", op, plen, -2);
4250 return REGNO (XEXP (base, 0)) == REG_X
4251 ? avr_asm_len ("sbiw r26,2" CR_TAB
4252 "st X+,%A1" CR_TAB
4253 "st X,%B1" CR_TAB
4254 "sbiw r26,1", op, plen, -4)
4256 : avr_asm_len ("sbiw %r0,2" CR_TAB
4257 "st %p0,%A1" CR_TAB
4258 "std %p0+1,%B1", op, plen, -3);
4260 else if (GET_CODE (base) == POST_INC) /* (R++) */
4262 return avr_asm_len ("st %0,%A1" CR_TAB
4263 "st %0,%B1", op, plen, -2);
4266 fatal_insn ("unknown move insn:",insn);
4267 return "";
4271 static const char*
4272 out_movhi_mr_r (rtx insn, rtx op[], int *plen)
4274 rtx dest = op[0];
4275 rtx src = op[1];
4276 rtx base = XEXP (dest, 0);
4277 int reg_base = true_regnum (base);
4278 int reg_src = true_regnum (src);
4279 int mem_volatile_p;
4281 /* "volatile" forces writing high-byte first (no-xmega) resp.
4282 low-byte first (xmega) even if less efficient, for correct
4283 operation with 16-bit I/O registers like. */
4285 if (AVR_XMEGA)
4286 return avr_out_movhi_mr_r_xmega (insn, op, plen);
4288 mem_volatile_p = MEM_VOLATILE_P (dest);
4290 if (CONSTANT_ADDRESS_P (base))
4291 return optimize > 0 && io_address_operand (base, HImode)
4292 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
4293 "out %i0,%A1", op, plen, -2)
4295 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
4296 "sts %m0,%A1", op, plen, -4);
4298 if (reg_base > 0)
4300 if (reg_base != REG_X)
4301 return avr_asm_len ("std %0+1,%B1" CR_TAB
4302 "st %0,%A1", op, plen, -2);
4304 if (reg_src == REG_X)
4305 /* "st X+,r26" and "st -X,r26" are undefined. */
4306 return !mem_volatile_p && reg_unused_after (insn, src)
4307 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4308 "st X,r26" CR_TAB
4309 "adiw r26,1" CR_TAB
4310 "st X,__tmp_reg__", op, plen, -4)
4312 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4313 "adiw r26,1" CR_TAB
4314 "st X,__tmp_reg__" CR_TAB
4315 "sbiw r26,1" CR_TAB
4316 "st X,r26", op, plen, -5);
4318 return !mem_volatile_p && reg_unused_after (insn, base)
4319 ? avr_asm_len ("st X+,%A1" CR_TAB
4320 "st X,%B1", op, plen, -2)
4321 : avr_asm_len ("adiw r26,1" CR_TAB
4322 "st X,%B1" CR_TAB
4323 "st -X,%A1", op, plen, -3);
4325 else if (GET_CODE (base) == PLUS)
4327 int disp = INTVAL (XEXP (base, 1));
4328 reg_base = REGNO (XEXP (base, 0));
4329 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4331 if (reg_base != REG_Y)
4332 fatal_insn ("incorrect insn:",insn);
4334 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
4335 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4336 "std Y+63,%B1" CR_TAB
4337 "std Y+62,%A1" CR_TAB
4338 "sbiw r28,%o0-62", op, plen, -4)
4340 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4341 "sbci r29,hi8(-%o0)" CR_TAB
4342 "std Y+1,%B1" CR_TAB
4343 "st Y,%A1" CR_TAB
4344 "subi r28,lo8(%o0)" CR_TAB
4345 "sbci r29,hi8(%o0)", op, plen, -6);
4348 if (reg_base != REG_X)
4349 return avr_asm_len ("std %B0,%B1" CR_TAB
4350 "std %A0,%A1", op, plen, -2);
4351 /* (X + d) = R */
4352 return reg_src == REG_X
4353 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4354 "mov __zero_reg__,r27" CR_TAB
4355 "adiw r26,%o0+1" CR_TAB
4356 "st X,__zero_reg__" CR_TAB
4357 "st -X,__tmp_reg__" CR_TAB
4358 "clr __zero_reg__" CR_TAB
4359 "sbiw r26,%o0", op, plen, -7)
4361 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
4362 "st X,%B1" CR_TAB
4363 "st -X,%A1" CR_TAB
4364 "sbiw r26,%o0", op, plen, -4);
4366 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4368 return avr_asm_len ("st %0,%B1" CR_TAB
4369 "st %0,%A1", op, plen, -2);
4371 else if (GET_CODE (base) == POST_INC) /* (R++) */
4373 if (!mem_volatile_p)
4374 return avr_asm_len ("st %0,%A1" CR_TAB
4375 "st %0,%B1", op, plen, -2);
4377 return REGNO (XEXP (base, 0)) == REG_X
4378 ? avr_asm_len ("adiw r26,1" CR_TAB
4379 "st X,%B1" CR_TAB
4380 "st -X,%A1" CR_TAB
4381 "adiw r26,2", op, plen, -4)
4383 : avr_asm_len ("std %p0+1,%B1" CR_TAB
4384 "st %p0,%A1" CR_TAB
4385 "adiw %r0,2", op, plen, -3);
4387 fatal_insn ("unknown move insn:",insn);
4388 return "";
4391 /* Return 1 if frame pointer for current function required. */
4393 static bool
4394 avr_frame_pointer_required_p (void)
4396 return (cfun->calls_alloca
4397 || cfun->calls_setjmp
4398 || cfun->has_nonlocal_label
4399 || crtl->args.info.nregs == 0
4400 || get_frame_size () > 0);
4403 /* Returns the condition of compare insn INSN, or UNKNOWN. */
4405 static RTX_CODE
4406 compare_condition (rtx insn)
4408 rtx next = next_real_insn (insn);
4410 if (next && JUMP_P (next))
4412 rtx pat = PATTERN (next);
4413 rtx src = SET_SRC (pat);
4415 if (IF_THEN_ELSE == GET_CODE (src))
4416 return GET_CODE (XEXP (src, 0));
4419 return UNKNOWN;
4423 /* Returns true iff INSN is a tst insn that only tests the sign. */
4425 static bool
4426 compare_sign_p (rtx insn)
4428 RTX_CODE cond = compare_condition (insn);
4429 return (cond == GE || cond == LT);
4433 /* Returns true iff the next insn is a JUMP_INSN with a condition
4434 that needs to be swapped (GT, GTU, LE, LEU). */
4436 static bool
4437 compare_diff_p (rtx insn)
4439 RTX_CODE cond = compare_condition (insn);
4440 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
4443 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4445 static bool
4446 compare_eq_p (rtx insn)
4448 RTX_CODE cond = compare_condition (insn);
4449 return (cond == EQ || cond == NE);
4453 /* Output compare instruction
4455 compare (XOP[0], XOP[1])
4457 for a register XOP[0] and a compile-time constant XOP[1]. Return "".
4458 XOP[2] is an 8-bit scratch register as needed.
4460 PLEN == NULL: Output instructions.
4461 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4462 Don't output anything. */
4464 const char*
4465 avr_out_compare (rtx insn, rtx *xop, int *plen)
4467 /* Register to compare and value to compare against. */
4468 rtx xreg = xop[0];
4469 rtx xval = xop[1];
4471 /* MODE of the comparison. */
4472 enum machine_mode mode;
4474 /* Number of bytes to operate on. */
4475 int i, n_bytes = GET_MODE_SIZE (GET_MODE (xreg));
4477 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4478 int clobber_val = -1;
4480 /* Map fixed mode operands to integer operands with the same binary
4481 representation. They are easier to handle in the remainder. */
4483 if (CONST_FIXED_P (xval))
4485 xreg = avr_to_int_mode (xop[0]);
4486 xval = avr_to_int_mode (xop[1]);
4489 mode = GET_MODE (xreg);
4491 gcc_assert (REG_P (xreg));
4492 gcc_assert ((CONST_INT_P (xval) && n_bytes <= 4)
4493 || (const_double_operand (xval, VOIDmode) && n_bytes == 8));
4495 if (plen)
4496 *plen = 0;
4498 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4499 against 0 by ORing the bytes. This is one instruction shorter.
4500 Notice that 64-bit comparisons are always against reg:ALL8 18 (ACC_A)
4501 and therefore don't use this. */
4503 if (!test_hard_reg_class (LD_REGS, xreg)
4504 && compare_eq_p (insn)
4505 && reg_unused_after (insn, xreg))
4507 if (xval == const1_rtx)
4509 avr_asm_len ("dec %A0" CR_TAB
4510 "or %A0,%B0", xop, plen, 2);
4512 if (n_bytes >= 3)
4513 avr_asm_len ("or %A0,%C0", xop, plen, 1);
4515 if (n_bytes >= 4)
4516 avr_asm_len ("or %A0,%D0", xop, plen, 1);
4518 return "";
4520 else if (xval == constm1_rtx)
4522 if (n_bytes >= 4)
4523 avr_asm_len ("and %A0,%D0", xop, plen, 1);
4525 if (n_bytes >= 3)
4526 avr_asm_len ("and %A0,%C0", xop, plen, 1);
4528 return avr_asm_len ("and %A0,%B0" CR_TAB
4529 "com %A0", xop, plen, 2);
4533 for (i = 0; i < n_bytes; i++)
4535 /* We compare byte-wise. */
4536 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
4537 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4539 /* 8-bit value to compare with this byte. */
4540 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4542 /* Registers R16..R31 can operate with immediate. */
4543 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4545 xop[0] = reg8;
4546 xop[1] = gen_int_mode (val8, QImode);
4548 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4550 if (i == 0
4551 && test_hard_reg_class (ADDW_REGS, reg8))
4553 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
4555 if (IN_RANGE (val16, 0, 63)
4556 && (val8 == 0
4557 || reg_unused_after (insn, xreg)))
4559 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
4560 i++;
4561 continue;
4564 if (n_bytes == 2
4565 && IN_RANGE (val16, -63, -1)
4566 && compare_eq_p (insn)
4567 && reg_unused_after (insn, xreg))
4569 return avr_asm_len ("adiw %0,%n1", xop, plen, 1);
4573 /* Comparing against 0 is easy. */
4575 if (val8 == 0)
4577 avr_asm_len (i == 0
4578 ? "cp %0,__zero_reg__"
4579 : "cpc %0,__zero_reg__", xop, plen, 1);
4580 continue;
4583 /* Upper registers can compare and subtract-with-carry immediates.
4584 Notice that compare instructions do the same as respective subtract
4585 instruction; the only difference is that comparisons don't write
4586 the result back to the target register. */
4588 if (ld_reg_p)
4590 if (i == 0)
4592 avr_asm_len ("cpi %0,%1", xop, plen, 1);
4593 continue;
4595 else if (reg_unused_after (insn, xreg))
4597 avr_asm_len ("sbci %0,%1", xop, plen, 1);
4598 continue;
4602 /* Must load the value into the scratch register. */
4604 gcc_assert (REG_P (xop[2]));
4606 if (clobber_val != (int) val8)
4607 avr_asm_len ("ldi %2,%1", xop, plen, 1);
4608 clobber_val = (int) val8;
4610 avr_asm_len (i == 0
4611 ? "cp %0,%2"
4612 : "cpc %0,%2", xop, plen, 1);
4615 return "";
4619 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4621 const char*
4622 avr_out_compare64 (rtx insn, rtx *op, int *plen)
4624 rtx xop[3];
4626 xop[0] = gen_rtx_REG (DImode, 18);
4627 xop[1] = op[0];
4628 xop[2] = op[1];
4630 return avr_out_compare (insn, xop, plen);
4633 /* Output test instruction for HImode. */
4635 const char*
4636 avr_out_tsthi (rtx insn, rtx *op, int *plen)
4638 if (compare_sign_p (insn))
4640 avr_asm_len ("tst %B0", op, plen, -1);
4642 else if (reg_unused_after (insn, op[0])
4643 && compare_eq_p (insn))
4645 /* Faster than sbiw if we can clobber the operand. */
4646 avr_asm_len ("or %A0,%B0", op, plen, -1);
4648 else
4650 avr_out_compare (insn, op, plen);
4653 return "";
4657 /* Output test instruction for PSImode. */
4659 const char*
4660 avr_out_tstpsi (rtx insn, rtx *op, int *plen)
4662 if (compare_sign_p (insn))
4664 avr_asm_len ("tst %C0", op, plen, -1);
4666 else if (reg_unused_after (insn, op[0])
4667 && compare_eq_p (insn))
4669 /* Faster than sbiw if we can clobber the operand. */
4670 avr_asm_len ("or %A0,%B0" CR_TAB
4671 "or %A0,%C0", op, plen, -2);
4673 else
4675 avr_out_compare (insn, op, plen);
4678 return "";
4682 /* Output test instruction for SImode. */
4684 const char*
4685 avr_out_tstsi (rtx insn, rtx *op, int *plen)
4687 if (compare_sign_p (insn))
4689 avr_asm_len ("tst %D0", op, plen, -1);
4691 else if (reg_unused_after (insn, op[0])
4692 && compare_eq_p (insn))
4694 /* Faster than sbiw if we can clobber the operand. */
4695 avr_asm_len ("or %A0,%B0" CR_TAB
4696 "or %A0,%C0" CR_TAB
4697 "or %A0,%D0", op, plen, -3);
4699 else
4701 avr_out_compare (insn, op, plen);
4704 return "";
4708 /* Generate asm equivalent for various shifts. This only handles cases
4709 that are not already carefully hand-optimized in ?sh??i3_out.
4711 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4712 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4713 OPERANDS[3] is a QImode scratch register from LD regs if
4714 available and SCRATCH, otherwise (no scratch available)
4716 TEMPL is an assembler template that shifts by one position.
4717 T_LEN is the length of this template. */
4719 void
4720 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
4721 int *plen, int t_len)
4723 bool second_label = true;
4724 bool saved_in_tmp = false;
4725 bool use_zero_reg = false;
4726 rtx op[5];
4728 op[0] = operands[0];
4729 op[1] = operands[1];
4730 op[2] = operands[2];
4731 op[3] = operands[3];
4733 if (plen)
4734 *plen = 0;
4736 if (CONST_INT_P (operands[2]))
4738 bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
4739 && REG_P (operands[3]));
4740 int count = INTVAL (operands[2]);
4741 int max_len = 10; /* If larger than this, always use a loop. */
4743 if (count <= 0)
4744 return;
4746 if (count < 8 && !scratch)
4747 use_zero_reg = true;
4749 if (optimize_size)
4750 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
4752 if (t_len * count <= max_len)
4754 /* Output shifts inline with no loop - faster. */
4756 while (count-- > 0)
4757 avr_asm_len (templ, op, plen, t_len);
4759 return;
4762 if (scratch)
4764 avr_asm_len ("ldi %3,%2", op, plen, 1);
4766 else if (use_zero_reg)
4768 /* Hack to save one word: use __zero_reg__ as loop counter.
4769 Set one bit, then shift in a loop until it is 0 again. */
4771 op[3] = zero_reg_rtx;
4773 avr_asm_len ("set" CR_TAB
4774 "bld %3,%2-1", op, plen, 2);
4776 else
4778 /* No scratch register available, use one from LD_REGS (saved in
4779 __tmp_reg__) that doesn't overlap with registers to shift. */
4781 op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
4782 op[4] = tmp_reg_rtx;
4783 saved_in_tmp = true;
4785 avr_asm_len ("mov %4,%3" CR_TAB
4786 "ldi %3,%2", op, plen, 2);
4789 second_label = false;
4791 else if (MEM_P (op[2]))
4793 rtx op_mov[2];
4795 op_mov[0] = op[3] = tmp_reg_rtx;
4796 op_mov[1] = op[2];
4798 out_movqi_r_mr (insn, op_mov, plen);
4800 else if (register_operand (op[2], QImode))
4802 op[3] = op[2];
4804 if (!reg_unused_after (insn, op[2])
4805 || reg_overlap_mentioned_p (op[0], op[2]))
4807 op[3] = tmp_reg_rtx;
4808 avr_asm_len ("mov %3,%2", op, plen, 1);
4811 else
4812 fatal_insn ("bad shift insn:", insn);
4814 if (second_label)
4815 avr_asm_len ("rjmp 2f", op, plen, 1);
4817 avr_asm_len ("1:", op, plen, 0);
4818 avr_asm_len (templ, op, plen, t_len);
4820 if (second_label)
4821 avr_asm_len ("2:", op, plen, 0);
4823 avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
4824 avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
4826 if (saved_in_tmp)
4827 avr_asm_len ("mov %3,%4", op, plen, 1);
4831 /* 8bit shift left ((char)x << i) */
4833 const char *
4834 ashlqi3_out (rtx insn, rtx operands[], int *len)
4836 if (GET_CODE (operands[2]) == CONST_INT)
4838 int k;
4840 if (!len)
4841 len = &k;
4843 switch (INTVAL (operands[2]))
4845 default:
4846 if (INTVAL (operands[2]) < 8)
4847 break;
4849 *len = 1;
4850 return "clr %0";
4852 case 1:
4853 *len = 1;
4854 return "lsl %0";
4856 case 2:
4857 *len = 2;
4858 return ("lsl %0" CR_TAB
4859 "lsl %0");
4861 case 3:
4862 *len = 3;
4863 return ("lsl %0" CR_TAB
4864 "lsl %0" CR_TAB
4865 "lsl %0");
4867 case 4:
4868 if (test_hard_reg_class (LD_REGS, operands[0]))
4870 *len = 2;
4871 return ("swap %0" CR_TAB
4872 "andi %0,0xf0");
4874 *len = 4;
4875 return ("lsl %0" CR_TAB
4876 "lsl %0" CR_TAB
4877 "lsl %0" CR_TAB
4878 "lsl %0");
4880 case 5:
4881 if (test_hard_reg_class (LD_REGS, operands[0]))
4883 *len = 3;
4884 return ("swap %0" CR_TAB
4885 "lsl %0" CR_TAB
4886 "andi %0,0xe0");
4888 *len = 5;
4889 return ("lsl %0" CR_TAB
4890 "lsl %0" CR_TAB
4891 "lsl %0" CR_TAB
4892 "lsl %0" CR_TAB
4893 "lsl %0");
4895 case 6:
4896 if (test_hard_reg_class (LD_REGS, operands[0]))
4898 *len = 4;
4899 return ("swap %0" CR_TAB
4900 "lsl %0" CR_TAB
4901 "lsl %0" CR_TAB
4902 "andi %0,0xc0");
4904 *len = 6;
4905 return ("lsl %0" CR_TAB
4906 "lsl %0" CR_TAB
4907 "lsl %0" CR_TAB
4908 "lsl %0" CR_TAB
4909 "lsl %0" CR_TAB
4910 "lsl %0");
4912 case 7:
4913 *len = 3;
4914 return ("ror %0" CR_TAB
4915 "clr %0" CR_TAB
4916 "ror %0");
4919 else if (CONSTANT_P (operands[2]))
4920 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4922 out_shift_with_cnt ("lsl %0",
4923 insn, operands, len, 1);
4924 return "";
4928 /* 16bit shift left ((short)x << i) */
4930 const char *
4931 ashlhi3_out (rtx insn, rtx operands[], int *len)
4933 if (GET_CODE (operands[2]) == CONST_INT)
4935 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4936 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4937 int k;
4938 int *t = len;
4940 if (!len)
4941 len = &k;
4943 switch (INTVAL (operands[2]))
4945 default:
4946 if (INTVAL (operands[2]) < 16)
4947 break;
4949 *len = 2;
4950 return ("clr %B0" CR_TAB
4951 "clr %A0");
4953 case 4:
4954 if (optimize_size && scratch)
4955 break; /* 5 */
4956 if (ldi_ok)
4958 *len = 6;
4959 return ("swap %A0" CR_TAB
4960 "swap %B0" CR_TAB
4961 "andi %B0,0xf0" CR_TAB
4962 "eor %B0,%A0" CR_TAB
4963 "andi %A0,0xf0" CR_TAB
4964 "eor %B0,%A0");
4966 if (scratch)
4968 *len = 7;
4969 return ("swap %A0" CR_TAB
4970 "swap %B0" CR_TAB
4971 "ldi %3,0xf0" CR_TAB
4972 "and %B0,%3" CR_TAB
4973 "eor %B0,%A0" CR_TAB
4974 "and %A0,%3" CR_TAB
4975 "eor %B0,%A0");
4977 break; /* optimize_size ? 6 : 8 */
4979 case 5:
4980 if (optimize_size)
4981 break; /* scratch ? 5 : 6 */
4982 if (ldi_ok)
4984 *len = 8;
4985 return ("lsl %A0" CR_TAB
4986 "rol %B0" CR_TAB
4987 "swap %A0" CR_TAB
4988 "swap %B0" CR_TAB
4989 "andi %B0,0xf0" CR_TAB
4990 "eor %B0,%A0" CR_TAB
4991 "andi %A0,0xf0" CR_TAB
4992 "eor %B0,%A0");
4994 if (scratch)
4996 *len = 9;
4997 return ("lsl %A0" CR_TAB
4998 "rol %B0" CR_TAB
4999 "swap %A0" CR_TAB
5000 "swap %B0" CR_TAB
5001 "ldi %3,0xf0" CR_TAB
5002 "and %B0,%3" CR_TAB
5003 "eor %B0,%A0" CR_TAB
5004 "and %A0,%3" CR_TAB
5005 "eor %B0,%A0");
5007 break; /* 10 */
5009 case 6:
5010 if (optimize_size)
5011 break; /* scratch ? 5 : 6 */
5012 *len = 9;
5013 return ("clr __tmp_reg__" CR_TAB
5014 "lsr %B0" CR_TAB
5015 "ror %A0" CR_TAB
5016 "ror __tmp_reg__" CR_TAB
5017 "lsr %B0" CR_TAB
5018 "ror %A0" CR_TAB
5019 "ror __tmp_reg__" CR_TAB
5020 "mov %B0,%A0" CR_TAB
5021 "mov %A0,__tmp_reg__");
5023 case 7:
5024 *len = 5;
5025 return ("lsr %B0" CR_TAB
5026 "mov %B0,%A0" CR_TAB
5027 "clr %A0" CR_TAB
5028 "ror %B0" CR_TAB
5029 "ror %A0");
5031 case 8:
5032 return *len = 2, ("mov %B0,%A1" CR_TAB
5033 "clr %A0");
5035 case 9:
5036 *len = 3;
5037 return ("mov %B0,%A0" CR_TAB
5038 "clr %A0" CR_TAB
5039 "lsl %B0");
5041 case 10:
5042 *len = 4;
5043 return ("mov %B0,%A0" CR_TAB
5044 "clr %A0" CR_TAB
5045 "lsl %B0" CR_TAB
5046 "lsl %B0");
5048 case 11:
5049 *len = 5;
5050 return ("mov %B0,%A0" CR_TAB
5051 "clr %A0" CR_TAB
5052 "lsl %B0" CR_TAB
5053 "lsl %B0" CR_TAB
5054 "lsl %B0");
5056 case 12:
5057 if (ldi_ok)
5059 *len = 4;
5060 return ("mov %B0,%A0" CR_TAB
5061 "clr %A0" CR_TAB
5062 "swap %B0" CR_TAB
5063 "andi %B0,0xf0");
5065 if (scratch)
5067 *len = 5;
5068 return ("mov %B0,%A0" CR_TAB
5069 "clr %A0" CR_TAB
5070 "swap %B0" CR_TAB
5071 "ldi %3,0xf0" CR_TAB
5072 "and %B0,%3");
5074 *len = 6;
5075 return ("mov %B0,%A0" CR_TAB
5076 "clr %A0" CR_TAB
5077 "lsl %B0" CR_TAB
5078 "lsl %B0" CR_TAB
5079 "lsl %B0" CR_TAB
5080 "lsl %B0");
5082 case 13:
5083 if (ldi_ok)
5085 *len = 5;
5086 return ("mov %B0,%A0" CR_TAB
5087 "clr %A0" CR_TAB
5088 "swap %B0" CR_TAB
5089 "lsl %B0" CR_TAB
5090 "andi %B0,0xe0");
5092 if (AVR_HAVE_MUL && scratch)
5094 *len = 5;
5095 return ("ldi %3,0x20" CR_TAB
5096 "mul %A0,%3" CR_TAB
5097 "mov %B0,r0" CR_TAB
5098 "clr %A0" CR_TAB
5099 "clr __zero_reg__");
5101 if (optimize_size && scratch)
5102 break; /* 5 */
5103 if (scratch)
5105 *len = 6;
5106 return ("mov %B0,%A0" CR_TAB
5107 "clr %A0" CR_TAB
5108 "swap %B0" CR_TAB
5109 "lsl %B0" CR_TAB
5110 "ldi %3,0xe0" CR_TAB
5111 "and %B0,%3");
5113 if (AVR_HAVE_MUL)
5115 *len = 6;
5116 return ("set" CR_TAB
5117 "bld r1,5" CR_TAB
5118 "mul %A0,r1" CR_TAB
5119 "mov %B0,r0" CR_TAB
5120 "clr %A0" CR_TAB
5121 "clr __zero_reg__");
5123 *len = 7;
5124 return ("mov %B0,%A0" CR_TAB
5125 "clr %A0" CR_TAB
5126 "lsl %B0" CR_TAB
5127 "lsl %B0" CR_TAB
5128 "lsl %B0" CR_TAB
5129 "lsl %B0" CR_TAB
5130 "lsl %B0");
5132 case 14:
5133 if (AVR_HAVE_MUL && ldi_ok)
5135 *len = 5;
5136 return ("ldi %B0,0x40" CR_TAB
5137 "mul %A0,%B0" CR_TAB
5138 "mov %B0,r0" CR_TAB
5139 "clr %A0" CR_TAB
5140 "clr __zero_reg__");
5142 if (AVR_HAVE_MUL && scratch)
5144 *len = 5;
5145 return ("ldi %3,0x40" CR_TAB
5146 "mul %A0,%3" CR_TAB
5147 "mov %B0,r0" CR_TAB
5148 "clr %A0" CR_TAB
5149 "clr __zero_reg__");
5151 if (optimize_size && ldi_ok)
5153 *len = 5;
5154 return ("mov %B0,%A0" CR_TAB
5155 "ldi %A0,6" "\n1:\t"
5156 "lsl %B0" CR_TAB
5157 "dec %A0" CR_TAB
5158 "brne 1b");
5160 if (optimize_size && scratch)
5161 break; /* 5 */
5162 *len = 6;
5163 return ("clr %B0" CR_TAB
5164 "lsr %A0" CR_TAB
5165 "ror %B0" CR_TAB
5166 "lsr %A0" CR_TAB
5167 "ror %B0" CR_TAB
5168 "clr %A0");
5170 case 15:
5171 *len = 4;
5172 return ("clr %B0" CR_TAB
5173 "lsr %A0" CR_TAB
5174 "ror %B0" CR_TAB
5175 "clr %A0");
5177 len = t;
5179 out_shift_with_cnt ("lsl %A0" CR_TAB
5180 "rol %B0", insn, operands, len, 2);
5181 return "";
5185 /* 24-bit shift left */
5187 const char*
5188 avr_out_ashlpsi3 (rtx insn, rtx *op, int *plen)
5190 if (plen)
5191 *plen = 0;
5193 if (CONST_INT_P (op[2]))
5195 switch (INTVAL (op[2]))
5197 default:
5198 if (INTVAL (op[2]) < 24)
5199 break;
5201 return avr_asm_len ("clr %A0" CR_TAB
5202 "clr %B0" CR_TAB
5203 "clr %C0", op, plen, 3);
5205 case 8:
5207 int reg0 = REGNO (op[0]);
5208 int reg1 = REGNO (op[1]);
5210 if (reg0 >= reg1)
5211 return avr_asm_len ("mov %C0,%B1" CR_TAB
5212 "mov %B0,%A1" CR_TAB
5213 "clr %A0", op, plen, 3);
5214 else
5215 return avr_asm_len ("clr %A0" CR_TAB
5216 "mov %B0,%A1" CR_TAB
5217 "mov %C0,%B1", op, plen, 3);
5220 case 16:
5222 int reg0 = REGNO (op[0]);
5223 int reg1 = REGNO (op[1]);
5225 if (reg0 + 2 != reg1)
5226 avr_asm_len ("mov %C0,%A0", op, plen, 1);
5228 return avr_asm_len ("clr %B0" CR_TAB
5229 "clr %A0", op, plen, 2);
5232 case 23:
5233 return avr_asm_len ("clr %C0" CR_TAB
5234 "lsr %A0" CR_TAB
5235 "ror %C0" CR_TAB
5236 "clr %B0" CR_TAB
5237 "clr %A0", op, plen, 5);
5241 out_shift_with_cnt ("lsl %A0" CR_TAB
5242 "rol %B0" CR_TAB
5243 "rol %C0", insn, op, plen, 3);
5244 return "";
5248 /* 32bit shift left ((long)x << i) */
5250 const char *
5251 ashlsi3_out (rtx insn, rtx operands[], int *len)
5253 if (GET_CODE (operands[2]) == CONST_INT)
5255 int k;
5256 int *t = len;
5258 if (!len)
5259 len = &k;
5261 switch (INTVAL (operands[2]))
5263 default:
5264 if (INTVAL (operands[2]) < 32)
5265 break;
5267 if (AVR_HAVE_MOVW)
5268 return *len = 3, ("clr %D0" CR_TAB
5269 "clr %C0" CR_TAB
5270 "movw %A0,%C0");
5271 *len = 4;
5272 return ("clr %D0" CR_TAB
5273 "clr %C0" CR_TAB
5274 "clr %B0" CR_TAB
5275 "clr %A0");
5277 case 8:
5279 int reg0 = true_regnum (operands[0]);
5280 int reg1 = true_regnum (operands[1]);
5281 *len = 4;
5282 if (reg0 >= reg1)
5283 return ("mov %D0,%C1" CR_TAB
5284 "mov %C0,%B1" CR_TAB
5285 "mov %B0,%A1" CR_TAB
5286 "clr %A0");
5287 else
5288 return ("clr %A0" CR_TAB
5289 "mov %B0,%A1" CR_TAB
5290 "mov %C0,%B1" CR_TAB
5291 "mov %D0,%C1");
5294 case 16:
5296 int reg0 = true_regnum (operands[0]);
5297 int reg1 = true_regnum (operands[1]);
5298 if (reg0 + 2 == reg1)
5299 return *len = 2, ("clr %B0" CR_TAB
5300 "clr %A0");
5301 if (AVR_HAVE_MOVW)
5302 return *len = 3, ("movw %C0,%A1" CR_TAB
5303 "clr %B0" CR_TAB
5304 "clr %A0");
5305 else
5306 return *len = 4, ("mov %C0,%A1" CR_TAB
5307 "mov %D0,%B1" CR_TAB
5308 "clr %B0" CR_TAB
5309 "clr %A0");
5312 case 24:
5313 *len = 4;
5314 return ("mov %D0,%A1" CR_TAB
5315 "clr %C0" CR_TAB
5316 "clr %B0" CR_TAB
5317 "clr %A0");
5319 case 31:
5320 *len = 6;
5321 return ("clr %D0" CR_TAB
5322 "lsr %A0" CR_TAB
5323 "ror %D0" CR_TAB
5324 "clr %C0" CR_TAB
5325 "clr %B0" CR_TAB
5326 "clr %A0");
5328 len = t;
5330 out_shift_with_cnt ("lsl %A0" CR_TAB
5331 "rol %B0" CR_TAB
5332 "rol %C0" CR_TAB
5333 "rol %D0", insn, operands, len, 4);
5334 return "";
5337 /* 8bit arithmetic shift right ((signed char)x >> i) */
5339 const char *
5340 ashrqi3_out (rtx insn, rtx operands[], int *len)
5342 if (GET_CODE (operands[2]) == CONST_INT)
5344 int k;
5346 if (!len)
5347 len = &k;
5349 switch (INTVAL (operands[2]))
5351 case 1:
5352 *len = 1;
5353 return "asr %0";
5355 case 2:
5356 *len = 2;
5357 return ("asr %0" CR_TAB
5358 "asr %0");
5360 case 3:
5361 *len = 3;
5362 return ("asr %0" CR_TAB
5363 "asr %0" CR_TAB
5364 "asr %0");
5366 case 4:
5367 *len = 4;
5368 return ("asr %0" CR_TAB
5369 "asr %0" CR_TAB
5370 "asr %0" CR_TAB
5371 "asr %0");
5373 case 5:
5374 *len = 5;
5375 return ("asr %0" CR_TAB
5376 "asr %0" CR_TAB
5377 "asr %0" CR_TAB
5378 "asr %0" CR_TAB
5379 "asr %0");
5381 case 6:
5382 *len = 4;
5383 return ("bst %0,6" CR_TAB
5384 "lsl %0" CR_TAB
5385 "sbc %0,%0" CR_TAB
5386 "bld %0,0");
5388 default:
5389 if (INTVAL (operands[2]) < 8)
5390 break;
5392 /* fall through */
5394 case 7:
5395 *len = 2;
5396 return ("lsl %0" CR_TAB
5397 "sbc %0,%0");
5400 else if (CONSTANT_P (operands[2]))
5401 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5403 out_shift_with_cnt ("asr %0",
5404 insn, operands, len, 1);
5405 return "";
5409 /* 16bit arithmetic shift right ((signed short)x >> i) */
5411 const char *
5412 ashrhi3_out (rtx insn, rtx operands[], int *len)
5414 if (GET_CODE (operands[2]) == CONST_INT)
5416 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5417 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5418 int k;
5419 int *t = len;
5421 if (!len)
5422 len = &k;
5424 switch (INTVAL (operands[2]))
5426 case 4:
5427 case 5:
5428 /* XXX try to optimize this too? */
5429 break;
5431 case 6:
5432 if (optimize_size)
5433 break; /* scratch ? 5 : 6 */
5434 *len = 8;
5435 return ("mov __tmp_reg__,%A0" CR_TAB
5436 "mov %A0,%B0" CR_TAB
5437 "lsl __tmp_reg__" CR_TAB
5438 "rol %A0" CR_TAB
5439 "sbc %B0,%B0" CR_TAB
5440 "lsl __tmp_reg__" CR_TAB
5441 "rol %A0" CR_TAB
5442 "rol %B0");
5444 case 7:
5445 *len = 4;
5446 return ("lsl %A0" CR_TAB
5447 "mov %A0,%B0" CR_TAB
5448 "rol %A0" CR_TAB
5449 "sbc %B0,%B0");
5451 case 8:
5453 int reg0 = true_regnum (operands[0]);
5454 int reg1 = true_regnum (operands[1]);
5456 if (reg0 == reg1)
5457 return *len = 3, ("mov %A0,%B0" CR_TAB
5458 "lsl %B0" CR_TAB
5459 "sbc %B0,%B0");
5460 else
5461 return *len = 4, ("mov %A0,%B1" CR_TAB
5462 "clr %B0" CR_TAB
5463 "sbrc %A0,7" CR_TAB
5464 "dec %B0");
5467 case 9:
5468 *len = 4;
5469 return ("mov %A0,%B0" CR_TAB
5470 "lsl %B0" CR_TAB
5471 "sbc %B0,%B0" CR_TAB
5472 "asr %A0");
5474 case 10:
5475 *len = 5;
5476 return ("mov %A0,%B0" CR_TAB
5477 "lsl %B0" CR_TAB
5478 "sbc %B0,%B0" CR_TAB
5479 "asr %A0" CR_TAB
5480 "asr %A0");
5482 case 11:
5483 if (AVR_HAVE_MUL && ldi_ok)
5485 *len = 5;
5486 return ("ldi %A0,0x20" CR_TAB
5487 "muls %B0,%A0" CR_TAB
5488 "mov %A0,r1" CR_TAB
5489 "sbc %B0,%B0" CR_TAB
5490 "clr __zero_reg__");
5492 if (optimize_size && scratch)
5493 break; /* 5 */
5494 *len = 6;
5495 return ("mov %A0,%B0" CR_TAB
5496 "lsl %B0" CR_TAB
5497 "sbc %B0,%B0" CR_TAB
5498 "asr %A0" CR_TAB
5499 "asr %A0" CR_TAB
5500 "asr %A0");
5502 case 12:
5503 if (AVR_HAVE_MUL && ldi_ok)
5505 *len = 5;
5506 return ("ldi %A0,0x10" CR_TAB
5507 "muls %B0,%A0" CR_TAB
5508 "mov %A0,r1" CR_TAB
5509 "sbc %B0,%B0" CR_TAB
5510 "clr __zero_reg__");
5512 if (optimize_size && scratch)
5513 break; /* 5 */
5514 *len = 7;
5515 return ("mov %A0,%B0" CR_TAB
5516 "lsl %B0" CR_TAB
5517 "sbc %B0,%B0" CR_TAB
5518 "asr %A0" CR_TAB
5519 "asr %A0" CR_TAB
5520 "asr %A0" CR_TAB
5521 "asr %A0");
5523 case 13:
5524 if (AVR_HAVE_MUL && ldi_ok)
5526 *len = 5;
5527 return ("ldi %A0,0x08" CR_TAB
5528 "muls %B0,%A0" CR_TAB
5529 "mov %A0,r1" CR_TAB
5530 "sbc %B0,%B0" CR_TAB
5531 "clr __zero_reg__");
5533 if (optimize_size)
5534 break; /* scratch ? 5 : 7 */
5535 *len = 8;
5536 return ("mov %A0,%B0" CR_TAB
5537 "lsl %B0" CR_TAB
5538 "sbc %B0,%B0" CR_TAB
5539 "asr %A0" CR_TAB
5540 "asr %A0" CR_TAB
5541 "asr %A0" CR_TAB
5542 "asr %A0" CR_TAB
5543 "asr %A0");
5545 case 14:
5546 *len = 5;
5547 return ("lsl %B0" CR_TAB
5548 "sbc %A0,%A0" CR_TAB
5549 "lsl %B0" CR_TAB
5550 "mov %B0,%A0" CR_TAB
5551 "rol %A0");
5553 default:
5554 if (INTVAL (operands[2]) < 16)
5555 break;
5557 /* fall through */
5559 case 15:
5560 return *len = 3, ("lsl %B0" CR_TAB
5561 "sbc %A0,%A0" CR_TAB
5562 "mov %B0,%A0");
5564 len = t;
5566 out_shift_with_cnt ("asr %B0" CR_TAB
5567 "ror %A0", insn, operands, len, 2);
5568 return "";
5572 /* 24-bit arithmetic shift right */
5574 const char*
5575 avr_out_ashrpsi3 (rtx insn, rtx *op, int *plen)
5577 int dest = REGNO (op[0]);
5578 int src = REGNO (op[1]);
5580 if (CONST_INT_P (op[2]))
5582 if (plen)
5583 *plen = 0;
5585 switch (INTVAL (op[2]))
5587 case 8:
5588 if (dest <= src)
5589 return avr_asm_len ("mov %A0,%B1" CR_TAB
5590 "mov %B0,%C1" CR_TAB
5591 "clr %C0" CR_TAB
5592 "sbrc %B0,7" CR_TAB
5593 "dec %C0", op, plen, 5);
5594 else
5595 return avr_asm_len ("clr %C0" CR_TAB
5596 "sbrc %C1,7" CR_TAB
5597 "dec %C0" CR_TAB
5598 "mov %B0,%C1" CR_TAB
5599 "mov %A0,%B1", op, plen, 5);
5601 case 16:
5602 if (dest != src + 2)
5603 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5605 return avr_asm_len ("clr %B0" CR_TAB
5606 "sbrc %A0,7" CR_TAB
5607 "com %B0" CR_TAB
5608 "mov %C0,%B0", op, plen, 4);
5610 default:
5611 if (INTVAL (op[2]) < 24)
5612 break;
5614 /* fall through */
5616 case 23:
5617 return avr_asm_len ("lsl %C0" CR_TAB
5618 "sbc %A0,%A0" CR_TAB
5619 "mov %B0,%A0" CR_TAB
5620 "mov %C0,%A0", op, plen, 4);
5621 } /* switch */
5624 out_shift_with_cnt ("asr %C0" CR_TAB
5625 "ror %B0" CR_TAB
5626 "ror %A0", insn, op, plen, 3);
5627 return "";
5631 /* 32-bit arithmetic shift right ((signed long)x >> i) */
5633 const char *
5634 ashrsi3_out (rtx insn, rtx operands[], int *len)
5636 if (GET_CODE (operands[2]) == CONST_INT)
5638 int k;
5639 int *t = len;
5641 if (!len)
5642 len = &k;
5644 switch (INTVAL (operands[2]))
5646 case 8:
5648 int reg0 = true_regnum (operands[0]);
5649 int reg1 = true_regnum (operands[1]);
5650 *len=6;
5651 if (reg0 <= reg1)
5652 return ("mov %A0,%B1" CR_TAB
5653 "mov %B0,%C1" CR_TAB
5654 "mov %C0,%D1" CR_TAB
5655 "clr %D0" CR_TAB
5656 "sbrc %C0,7" CR_TAB
5657 "dec %D0");
5658 else
5659 return ("clr %D0" CR_TAB
5660 "sbrc %D1,7" CR_TAB
5661 "dec %D0" CR_TAB
5662 "mov %C0,%D1" CR_TAB
5663 "mov %B0,%C1" CR_TAB
5664 "mov %A0,%B1");
5667 case 16:
5669 int reg0 = true_regnum (operands[0]);
5670 int reg1 = true_regnum (operands[1]);
5672 if (reg0 == reg1 + 2)
5673 return *len = 4, ("clr %D0" CR_TAB
5674 "sbrc %B0,7" CR_TAB
5675 "com %D0" CR_TAB
5676 "mov %C0,%D0");
5677 if (AVR_HAVE_MOVW)
5678 return *len = 5, ("movw %A0,%C1" CR_TAB
5679 "clr %D0" CR_TAB
5680 "sbrc %B0,7" CR_TAB
5681 "com %D0" CR_TAB
5682 "mov %C0,%D0");
5683 else
5684 return *len = 6, ("mov %B0,%D1" CR_TAB
5685 "mov %A0,%C1" CR_TAB
5686 "clr %D0" CR_TAB
5687 "sbrc %B0,7" CR_TAB
5688 "com %D0" CR_TAB
5689 "mov %C0,%D0");
5692 case 24:
5693 return *len = 6, ("mov %A0,%D1" CR_TAB
5694 "clr %D0" CR_TAB
5695 "sbrc %A0,7" CR_TAB
5696 "com %D0" CR_TAB
5697 "mov %B0,%D0" CR_TAB
5698 "mov %C0,%D0");
5700 default:
5701 if (INTVAL (operands[2]) < 32)
5702 break;
5704 /* fall through */
5706 case 31:
5707 if (AVR_HAVE_MOVW)
5708 return *len = 4, ("lsl %D0" CR_TAB
5709 "sbc %A0,%A0" CR_TAB
5710 "mov %B0,%A0" CR_TAB
5711 "movw %C0,%A0");
5712 else
5713 return *len = 5, ("lsl %D0" CR_TAB
5714 "sbc %A0,%A0" CR_TAB
5715 "mov %B0,%A0" CR_TAB
5716 "mov %C0,%A0" CR_TAB
5717 "mov %D0,%A0");
5719 len = t;
5721 out_shift_with_cnt ("asr %D0" CR_TAB
5722 "ror %C0" CR_TAB
5723 "ror %B0" CR_TAB
5724 "ror %A0", insn, operands, len, 4);
5725 return "";
5728 /* 8-bit logic shift right ((unsigned char)x >> i) */
5730 const char *
5731 lshrqi3_out (rtx insn, rtx operands[], int *len)
5733 if (GET_CODE (operands[2]) == CONST_INT)
5735 int k;
5737 if (!len)
5738 len = &k;
5740 switch (INTVAL (operands[2]))
5742 default:
5743 if (INTVAL (operands[2]) < 8)
5744 break;
5746 *len = 1;
5747 return "clr %0";
5749 case 1:
5750 *len = 1;
5751 return "lsr %0";
5753 case 2:
5754 *len = 2;
5755 return ("lsr %0" CR_TAB
5756 "lsr %0");
5757 case 3:
5758 *len = 3;
5759 return ("lsr %0" CR_TAB
5760 "lsr %0" CR_TAB
5761 "lsr %0");
5763 case 4:
5764 if (test_hard_reg_class (LD_REGS, operands[0]))
5766 *len=2;
5767 return ("swap %0" CR_TAB
5768 "andi %0,0x0f");
5770 *len = 4;
5771 return ("lsr %0" CR_TAB
5772 "lsr %0" CR_TAB
5773 "lsr %0" CR_TAB
5774 "lsr %0");
5776 case 5:
5777 if (test_hard_reg_class (LD_REGS, operands[0]))
5779 *len = 3;
5780 return ("swap %0" CR_TAB
5781 "lsr %0" CR_TAB
5782 "andi %0,0x7");
5784 *len = 5;
5785 return ("lsr %0" CR_TAB
5786 "lsr %0" CR_TAB
5787 "lsr %0" CR_TAB
5788 "lsr %0" CR_TAB
5789 "lsr %0");
5791 case 6:
5792 if (test_hard_reg_class (LD_REGS, operands[0]))
5794 *len = 4;
5795 return ("swap %0" CR_TAB
5796 "lsr %0" CR_TAB
5797 "lsr %0" CR_TAB
5798 "andi %0,0x3");
5800 *len = 6;
5801 return ("lsr %0" CR_TAB
5802 "lsr %0" CR_TAB
5803 "lsr %0" CR_TAB
5804 "lsr %0" CR_TAB
5805 "lsr %0" CR_TAB
5806 "lsr %0");
5808 case 7:
5809 *len = 3;
5810 return ("rol %0" CR_TAB
5811 "clr %0" CR_TAB
5812 "rol %0");
5815 else if (CONSTANT_P (operands[2]))
5816 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5818 out_shift_with_cnt ("lsr %0",
5819 insn, operands, len, 1);
5820 return "";
5823 /* 16-bit logic shift right ((unsigned short)x >> i) */
5825 const char *
5826 lshrhi3_out (rtx insn, rtx operands[], int *len)
5828 if (GET_CODE (operands[2]) == CONST_INT)
5830 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5831 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5832 int k;
5833 int *t = len;
5835 if (!len)
5836 len = &k;
5838 switch (INTVAL (operands[2]))
5840 default:
5841 if (INTVAL (operands[2]) < 16)
5842 break;
5844 *len = 2;
5845 return ("clr %B0" CR_TAB
5846 "clr %A0");
5848 case 4:
5849 if (optimize_size && scratch)
5850 break; /* 5 */
5851 if (ldi_ok)
5853 *len = 6;
5854 return ("swap %B0" CR_TAB
5855 "swap %A0" CR_TAB
5856 "andi %A0,0x0f" CR_TAB
5857 "eor %A0,%B0" CR_TAB
5858 "andi %B0,0x0f" CR_TAB
5859 "eor %A0,%B0");
5861 if (scratch)
5863 *len = 7;
5864 return ("swap %B0" CR_TAB
5865 "swap %A0" CR_TAB
5866 "ldi %3,0x0f" CR_TAB
5867 "and %A0,%3" CR_TAB
5868 "eor %A0,%B0" CR_TAB
5869 "and %B0,%3" CR_TAB
5870 "eor %A0,%B0");
5872 break; /* optimize_size ? 6 : 8 */
5874 case 5:
5875 if (optimize_size)
5876 break; /* scratch ? 5 : 6 */
5877 if (ldi_ok)
5879 *len = 8;
5880 return ("lsr %B0" CR_TAB
5881 "ror %A0" CR_TAB
5882 "swap %B0" CR_TAB
5883 "swap %A0" CR_TAB
5884 "andi %A0,0x0f" CR_TAB
5885 "eor %A0,%B0" CR_TAB
5886 "andi %B0,0x0f" CR_TAB
5887 "eor %A0,%B0");
5889 if (scratch)
5891 *len = 9;
5892 return ("lsr %B0" CR_TAB
5893 "ror %A0" CR_TAB
5894 "swap %B0" CR_TAB
5895 "swap %A0" CR_TAB
5896 "ldi %3,0x0f" CR_TAB
5897 "and %A0,%3" CR_TAB
5898 "eor %A0,%B0" CR_TAB
5899 "and %B0,%3" CR_TAB
5900 "eor %A0,%B0");
5902 break; /* 10 */
5904 case 6:
5905 if (optimize_size)
5906 break; /* scratch ? 5 : 6 */
5907 *len = 9;
5908 return ("clr __tmp_reg__" CR_TAB
5909 "lsl %A0" CR_TAB
5910 "rol %B0" CR_TAB
5911 "rol __tmp_reg__" CR_TAB
5912 "lsl %A0" CR_TAB
5913 "rol %B0" CR_TAB
5914 "rol __tmp_reg__" CR_TAB
5915 "mov %A0,%B0" CR_TAB
5916 "mov %B0,__tmp_reg__");
5918 case 7:
5919 *len = 5;
5920 return ("lsl %A0" CR_TAB
5921 "mov %A0,%B0" CR_TAB
5922 "rol %A0" CR_TAB
5923 "sbc %B0,%B0" CR_TAB
5924 "neg %B0");
5926 case 8:
5927 return *len = 2, ("mov %A0,%B1" CR_TAB
5928 "clr %B0");
5930 case 9:
5931 *len = 3;
5932 return ("mov %A0,%B0" CR_TAB
5933 "clr %B0" CR_TAB
5934 "lsr %A0");
5936 case 10:
5937 *len = 4;
5938 return ("mov %A0,%B0" CR_TAB
5939 "clr %B0" CR_TAB
5940 "lsr %A0" CR_TAB
5941 "lsr %A0");
5943 case 11:
5944 *len = 5;
5945 return ("mov %A0,%B0" CR_TAB
5946 "clr %B0" CR_TAB
5947 "lsr %A0" CR_TAB
5948 "lsr %A0" CR_TAB
5949 "lsr %A0");
5951 case 12:
5952 if (ldi_ok)
5954 *len = 4;
5955 return ("mov %A0,%B0" CR_TAB
5956 "clr %B0" CR_TAB
5957 "swap %A0" CR_TAB
5958 "andi %A0,0x0f");
5960 if (scratch)
5962 *len = 5;
5963 return ("mov %A0,%B0" CR_TAB
5964 "clr %B0" CR_TAB
5965 "swap %A0" CR_TAB
5966 "ldi %3,0x0f" CR_TAB
5967 "and %A0,%3");
5969 *len = 6;
5970 return ("mov %A0,%B0" CR_TAB
5971 "clr %B0" CR_TAB
5972 "lsr %A0" CR_TAB
5973 "lsr %A0" CR_TAB
5974 "lsr %A0" CR_TAB
5975 "lsr %A0");
5977 case 13:
5978 if (ldi_ok)
5980 *len = 5;
5981 return ("mov %A0,%B0" CR_TAB
5982 "clr %B0" CR_TAB
5983 "swap %A0" CR_TAB
5984 "lsr %A0" CR_TAB
5985 "andi %A0,0x07");
5987 if (AVR_HAVE_MUL && scratch)
5989 *len = 5;
5990 return ("ldi %3,0x08" CR_TAB
5991 "mul %B0,%3" CR_TAB
5992 "mov %A0,r1" CR_TAB
5993 "clr %B0" CR_TAB
5994 "clr __zero_reg__");
5996 if (optimize_size && scratch)
5997 break; /* 5 */
5998 if (scratch)
6000 *len = 6;
6001 return ("mov %A0,%B0" CR_TAB
6002 "clr %B0" CR_TAB
6003 "swap %A0" CR_TAB
6004 "lsr %A0" CR_TAB
6005 "ldi %3,0x07" CR_TAB
6006 "and %A0,%3");
6008 if (AVR_HAVE_MUL)
6010 *len = 6;
6011 return ("set" CR_TAB
6012 "bld r1,3" CR_TAB
6013 "mul %B0,r1" CR_TAB
6014 "mov %A0,r1" CR_TAB
6015 "clr %B0" CR_TAB
6016 "clr __zero_reg__");
6018 *len = 7;
6019 return ("mov %A0,%B0" CR_TAB
6020 "clr %B0" CR_TAB
6021 "lsr %A0" CR_TAB
6022 "lsr %A0" CR_TAB
6023 "lsr %A0" CR_TAB
6024 "lsr %A0" CR_TAB
6025 "lsr %A0");
6027 case 14:
6028 if (AVR_HAVE_MUL && ldi_ok)
6030 *len = 5;
6031 return ("ldi %A0,0x04" CR_TAB
6032 "mul %B0,%A0" CR_TAB
6033 "mov %A0,r1" CR_TAB
6034 "clr %B0" CR_TAB
6035 "clr __zero_reg__");
6037 if (AVR_HAVE_MUL && scratch)
6039 *len = 5;
6040 return ("ldi %3,0x04" CR_TAB
6041 "mul %B0,%3" CR_TAB
6042 "mov %A0,r1" CR_TAB
6043 "clr %B0" CR_TAB
6044 "clr __zero_reg__");
6046 if (optimize_size && ldi_ok)
6048 *len = 5;
6049 return ("mov %A0,%B0" CR_TAB
6050 "ldi %B0,6" "\n1:\t"
6051 "lsr %A0" CR_TAB
6052 "dec %B0" CR_TAB
6053 "brne 1b");
6055 if (optimize_size && scratch)
6056 break; /* 5 */
6057 *len = 6;
6058 return ("clr %A0" CR_TAB
6059 "lsl %B0" CR_TAB
6060 "rol %A0" CR_TAB
6061 "lsl %B0" CR_TAB
6062 "rol %A0" CR_TAB
6063 "clr %B0");
6065 case 15:
6066 *len = 4;
6067 return ("clr %A0" CR_TAB
6068 "lsl %B0" CR_TAB
6069 "rol %A0" CR_TAB
6070 "clr %B0");
6072 len = t;
6074 out_shift_with_cnt ("lsr %B0" CR_TAB
6075 "ror %A0", insn, operands, len, 2);
6076 return "";
6080 /* 24-bit logic shift right */
6082 const char*
6083 avr_out_lshrpsi3 (rtx insn, rtx *op, int *plen)
6085 int dest = REGNO (op[0]);
6086 int src = REGNO (op[1]);
6088 if (CONST_INT_P (op[2]))
6090 if (plen)
6091 *plen = 0;
6093 switch (INTVAL (op[2]))
6095 case 8:
6096 if (dest <= src)
6097 return avr_asm_len ("mov %A0,%B1" CR_TAB
6098 "mov %B0,%C1" CR_TAB
6099 "clr %C0", op, plen, 3);
6100 else
6101 return avr_asm_len ("clr %C0" CR_TAB
6102 "mov %B0,%C1" CR_TAB
6103 "mov %A0,%B1", op, plen, 3);
6105 case 16:
6106 if (dest != src + 2)
6107 avr_asm_len ("mov %A0,%C1", op, plen, 1);
6109 return avr_asm_len ("clr %B0" CR_TAB
6110 "clr %C0", op, plen, 2);
6112 default:
6113 if (INTVAL (op[2]) < 24)
6114 break;
6116 /* fall through */
6118 case 23:
6119 return avr_asm_len ("clr %A0" CR_TAB
6120 "sbrc %C0,7" CR_TAB
6121 "inc %A0" CR_TAB
6122 "clr %B0" CR_TAB
6123 "clr %C0", op, plen, 5);
6124 } /* switch */
6127 out_shift_with_cnt ("lsr %C0" CR_TAB
6128 "ror %B0" CR_TAB
6129 "ror %A0", insn, op, plen, 3);
6130 return "";
6134 /* 32-bit logic shift right ((unsigned int)x >> i) */
6136 const char *
6137 lshrsi3_out (rtx insn, rtx operands[], int *len)
6139 if (GET_CODE (operands[2]) == CONST_INT)
6141 int k;
6142 int *t = len;
6144 if (!len)
6145 len = &k;
6147 switch (INTVAL (operands[2]))
6149 default:
6150 if (INTVAL (operands[2]) < 32)
6151 break;
6153 if (AVR_HAVE_MOVW)
6154 return *len = 3, ("clr %D0" CR_TAB
6155 "clr %C0" CR_TAB
6156 "movw %A0,%C0");
6157 *len = 4;
6158 return ("clr %D0" CR_TAB
6159 "clr %C0" CR_TAB
6160 "clr %B0" CR_TAB
6161 "clr %A0");
6163 case 8:
6165 int reg0 = true_regnum (operands[0]);
6166 int reg1 = true_regnum (operands[1]);
6167 *len = 4;
6168 if (reg0 <= reg1)
6169 return ("mov %A0,%B1" CR_TAB
6170 "mov %B0,%C1" CR_TAB
6171 "mov %C0,%D1" CR_TAB
6172 "clr %D0");
6173 else
6174 return ("clr %D0" CR_TAB
6175 "mov %C0,%D1" CR_TAB
6176 "mov %B0,%C1" CR_TAB
6177 "mov %A0,%B1");
6180 case 16:
6182 int reg0 = true_regnum (operands[0]);
6183 int reg1 = true_regnum (operands[1]);
6185 if (reg0 == reg1 + 2)
6186 return *len = 2, ("clr %C0" CR_TAB
6187 "clr %D0");
6188 if (AVR_HAVE_MOVW)
6189 return *len = 3, ("movw %A0,%C1" CR_TAB
6190 "clr %C0" CR_TAB
6191 "clr %D0");
6192 else
6193 return *len = 4, ("mov %B0,%D1" CR_TAB
6194 "mov %A0,%C1" CR_TAB
6195 "clr %C0" CR_TAB
6196 "clr %D0");
6199 case 24:
6200 return *len = 4, ("mov %A0,%D1" CR_TAB
6201 "clr %B0" CR_TAB
6202 "clr %C0" CR_TAB
6203 "clr %D0");
6205 case 31:
6206 *len = 6;
6207 return ("clr %A0" CR_TAB
6208 "sbrc %D0,7" CR_TAB
6209 "inc %A0" CR_TAB
6210 "clr %B0" CR_TAB
6211 "clr %C0" CR_TAB
6212 "clr %D0");
6214 len = t;
6216 out_shift_with_cnt ("lsr %D0" CR_TAB
6217 "ror %C0" CR_TAB
6218 "ror %B0" CR_TAB
6219 "ror %A0", insn, operands, len, 4);
6220 return "";
6224 /* Output addition of register XOP[0] and compile time constant XOP[2].
6225 CODE == PLUS: perform addition by using ADD instructions or
6226 CODE == MINUS: perform addition by using SUB instructions:
6228 XOP[0] = XOP[0] + XOP[2]
6230 Or perform addition/subtraction with register XOP[2] depending on CODE:
6232 XOP[0] = XOP[0] +/- XOP[2]
6234 If PLEN == NULL, print assembler instructions to perform the operation;
6235 otherwise, set *PLEN to the length of the instruction sequence (in words)
6236 printed with PLEN == NULL. XOP[3] is an 8-bit scratch register or NULL_RTX.
6237 Set *PCC to effect on cc0 according to respective CC_* insn attribute.
6239 CODE_SAT == UNKNOWN: Perform ordinary, non-saturating operation.
6240 CODE_SAT != UNKNOWN: Perform operation and saturate according to CODE_SAT.
6241 If CODE_SAT != UNKNOWN then SIGN contains the sign of the summand resp.
6242 the subtrahend in the original insn, provided it is a compile time constant.
6243 In all other cases, SIGN is 0.
6245 If OUT_LABEL is true, print the final 0: label which is needed for
6246 saturated addition / subtraction. The only case where OUT_LABEL = false
6247 is useful is for saturated addition / subtraction performed during
6248 fixed-point rounding, cf. `avr_out_round'. */
6250 static void
6251 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc,
6252 enum rtx_code code_sat, int sign, bool out_label)
6254 /* MODE of the operation. */
6255 enum machine_mode mode = GET_MODE (xop[0]);
6257 /* INT_MODE of the same size. */
6258 enum machine_mode imode = int_mode_for_mode (mode);
6260 /* Number of bytes to operate on. */
6261 int i, n_bytes = GET_MODE_SIZE (mode);
6263 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6264 int clobber_val = -1;
6266 /* op[0]: 8-bit destination register
6267 op[1]: 8-bit const int
6268 op[2]: 8-bit scratch register */
6269 rtx op[3];
6271 /* Started the operation? Before starting the operation we may skip
6272 adding 0. This is no more true after the operation started because
6273 carry must be taken into account. */
6274 bool started = false;
6276 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
6277 rtx xval = xop[2];
6279 /* Output a BRVC instruction. Only needed with saturation. */
6280 bool out_brvc = true;
6282 if (plen)
6283 *plen = 0;
6285 if (REG_P (xop[2]))
6287 *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_SET_N;
6289 for (i = 0; i < n_bytes; i++)
6291 /* We operate byte-wise on the destination. */
6292 op[0] = simplify_gen_subreg (QImode, xop[0], mode, i);
6293 op[1] = simplify_gen_subreg (QImode, xop[2], mode, i);
6295 if (i == 0)
6296 avr_asm_len (code == PLUS ? "add %0,%1" : "sub %0,%1",
6297 op, plen, 1);
6298 else
6299 avr_asm_len (code == PLUS ? "adc %0,%1" : "sbc %0,%1",
6300 op, plen, 1);
6303 if (reg_overlap_mentioned_p (xop[0], xop[2]))
6305 gcc_assert (REGNO (xop[0]) == REGNO (xop[2]));
6307 if (MINUS == code)
6308 return;
6311 goto saturate;
6314 /* Except in the case of ADIW with 16-bit register (see below)
6315 addition does not set cc0 in a usable way. */
6317 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
6319 if (CONST_FIXED_P (xval))
6320 xval = avr_to_int_mode (xval);
6322 /* Adding/Subtracting zero is a no-op. */
6324 if (xval == const0_rtx)
6326 *pcc = CC_NONE;
6327 return;
6330 if (MINUS == code)
6331 xval = simplify_unary_operation (NEG, imode, xval, imode);
6333 op[2] = xop[3];
6335 if (SS_PLUS == code_sat && MINUS == code
6336 && sign < 0
6337 && 0x80 == (INTVAL (simplify_gen_subreg (QImode, xval, imode, n_bytes-1))
6338 & GET_MODE_MASK (QImode)))
6340 /* We compute x + 0x80 by means of SUB instructions. We negated the
6341 constant subtrahend above and are left with x - (-128) so that we
6342 need something like SUBI r,128 which does not exist because SUBI sets
6343 V according to the sign of the subtrahend. Notice the only case
6344 where this must be done is when NEG overflowed in case [2s] because
6345 the V computation needs the right sign of the subtrahend. */
6347 rtx msb = simplify_gen_subreg (QImode, xop[0], mode, n_bytes-1);
6349 avr_asm_len ("subi %0,128" CR_TAB
6350 "brmi 0f", &msb, plen, 2);
6351 out_brvc = false;
6353 goto saturate;
6356 for (i = 0; i < n_bytes; i++)
6358 /* We operate byte-wise on the destination. */
6359 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6360 rtx xval8 = simplify_gen_subreg (QImode, xval, imode, i);
6362 /* 8-bit value to operate with this byte. */
6363 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6365 /* Registers R16..R31 can operate with immediate. */
6366 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6368 op[0] = reg8;
6369 op[1] = gen_int_mode (val8, QImode);
6371 /* To get usable cc0 no low-bytes must have been skipped. */
6373 if (i && !started)
6374 *pcc = CC_CLOBBER;
6376 if (!started
6377 && i % 2 == 0
6378 && i + 2 <= n_bytes
6379 && test_hard_reg_class (ADDW_REGS, reg8))
6381 rtx xval16 = simplify_gen_subreg (HImode, xval, imode, i);
6382 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
6384 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
6385 i.e. operate word-wise. */
6387 if (val16 < 64)
6389 if (val16 != 0)
6391 started = true;
6392 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
6393 op, plen, 1);
6395 if (n_bytes == 2 && PLUS == code)
6396 *pcc = CC_SET_ZN;
6399 i++;
6400 continue;
6404 if (val8 == 0)
6406 if (started)
6407 avr_asm_len (code == PLUS
6408 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
6409 op, plen, 1);
6410 continue;
6412 else if ((val8 == 1 || val8 == 0xff)
6413 && UNKNOWN == code_sat
6414 && !started
6415 && i == n_bytes - 1)
6417 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
6418 op, plen, 1);
6419 break;
6422 switch (code)
6424 case PLUS:
6426 gcc_assert (plen != NULL || (op[2] && REG_P (op[2])));
6428 if (plen != NULL && UNKNOWN != code_sat)
6430 /* This belongs to the x + 0x80 corner case. The code with
6431 ADD instruction is not smaller, thus make this case
6432 expensive so that the caller won't pick it. */
6434 *plen += 10;
6435 break;
6438 if (clobber_val != (int) val8)
6439 avr_asm_len ("ldi %2,%1", op, plen, 1);
6440 clobber_val = (int) val8;
6442 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
6444 break; /* PLUS */
6446 case MINUS:
6448 if (ld_reg_p)
6449 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
6450 else
6452 gcc_assert (plen != NULL || REG_P (op[2]));
6454 if (clobber_val != (int) val8)
6455 avr_asm_len ("ldi %2,%1", op, plen, 1);
6456 clobber_val = (int) val8;
6458 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
6461 break; /* MINUS */
6463 default:
6464 /* Unknown code */
6465 gcc_unreachable();
6468 started = true;
6470 } /* for all sub-bytes */
6472 saturate:
6474 if (UNKNOWN == code_sat)
6475 return;
6477 *pcc = (int) CC_CLOBBER;
6479 /* Vanilla addition/subtraction is done. We are left with saturation.
6481 We have to compute A = A <op> B where A is a register and
6482 B is a register or a non-zero compile time constant CONST.
6483 A is register class "r" if unsigned && B is REG. Otherwise, A is in "d".
6484 B stands for the original operand $2 in INSN. In the case of B = CONST,
6485 SIGN in { -1, 1 } is the sign of B. Otherwise, SIGN is 0.
6487 CODE is the instruction flavor we use in the asm sequence to perform <op>.
6490 unsigned
6491 operation | code | sat if | b is | sat value | case
6492 -----------------+-------+----------+--------------+-----------+-------
6493 + as a + b | add | C == 1 | const, reg | u+ = 0xff | [1u]
6494 + as a - (-b) | sub | C == 0 | const | u+ = 0xff | [2u]
6495 - as a - b | sub | C == 1 | const, reg | u- = 0 | [3u]
6496 - as a + (-b) | add | C == 0 | const | u- = 0 | [4u]
6499 signed
6500 operation | code | sat if | b is | sat value | case
6501 -----------------+-------+----------+--------------+-----------+-------
6502 + as a + b | add | V == 1 | const, reg | s+ | [1s]
6503 + as a - (-b) | sub | V == 1 | const | s+ | [2s]
6504 - as a - b | sub | V == 1 | const, reg | s- | [3s]
6505 - as a + (-b) | add | V == 1 | const | s- | [4s]
6507 s+ = b < 0 ? -0x80 : 0x7f
6508 s- = b < 0 ? 0x7f : -0x80
6510 The cases a - b actually perform a - (-(-b)) if B is CONST.
6513 op[0] = simplify_gen_subreg (QImode, xop[0], mode, n_bytes-1);
6514 op[1] = n_bytes > 1
6515 ? simplify_gen_subreg (QImode, xop[0], mode, n_bytes-2)
6516 : NULL_RTX;
6518 bool need_copy = true;
6519 int len_call = 1 + AVR_HAVE_JMP_CALL;
6521 switch (code_sat)
6523 default:
6524 gcc_unreachable();
6526 case SS_PLUS:
6527 case SS_MINUS:
6529 if (out_brvc)
6530 avr_asm_len ("brvc 0f", op, plen, 1);
6532 if (reg_overlap_mentioned_p (xop[0], xop[2]))
6534 /* [1s,reg] */
6536 if (n_bytes == 1)
6537 avr_asm_len ("ldi %0,0x7f" CR_TAB
6538 "adc %0,__zero_reg__", op, plen, 2);
6539 else
6540 avr_asm_len ("ldi %0,0x7f" CR_TAB
6541 "ldi %1,0xff" CR_TAB
6542 "adc %1,__zero_reg__" CR_TAB
6543 "adc %0,__zero_reg__", op, plen, 4);
6545 else if (sign == 0 && PLUS == code)
6547 /* [1s,reg] */
6549 op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
6551 if (n_bytes == 1)
6552 avr_asm_len ("ldi %0,0x80" CR_TAB
6553 "sbrs %2,7" CR_TAB
6554 "dec %0", op, plen, 3);
6555 else
6556 avr_asm_len ("ldi %0,0x80" CR_TAB
6557 "cp %2,%0" CR_TAB
6558 "sbc %1,%1" CR_TAB
6559 "sbci %0,0", op, plen, 4);
6561 else if (sign == 0 && MINUS == code)
6563 /* [3s,reg] */
6565 op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
6567 if (n_bytes == 1)
6568 avr_asm_len ("ldi %0,0x7f" CR_TAB
6569 "sbrs %2,7" CR_TAB
6570 "inc %0", op, plen, 3);
6571 else
6572 avr_asm_len ("ldi %0,0x7f" CR_TAB
6573 "cp %0,%2" CR_TAB
6574 "sbc %1,%1" CR_TAB
6575 "sbci %0,-1", op, plen, 4);
6577 else if ((sign < 0) ^ (SS_MINUS == code_sat))
6579 /* [1s,const,B < 0] [2s,B < 0] */
6580 /* [3s,const,B > 0] [4s,B > 0] */
6582 if (n_bytes == 8)
6584 avr_asm_len ("%~call __clr_8", op, plen, len_call);
6585 need_copy = false;
6588 avr_asm_len ("ldi %0,0x80", op, plen, 1);
6589 if (n_bytes > 1 && need_copy)
6590 avr_asm_len ("clr %1", op, plen, 1);
6592 else if ((sign > 0) ^ (SS_MINUS == code_sat))
6594 /* [1s,const,B > 0] [2s,B > 0] */
6595 /* [3s,const,B < 0] [4s,B < 0] */
6597 if (n_bytes == 8)
6599 avr_asm_len ("sec" CR_TAB
6600 "%~call __sbc_8", op, plen, 1 + len_call);
6601 need_copy = false;
6604 avr_asm_len ("ldi %0,0x7f", op, plen, 1);
6605 if (n_bytes > 1 && need_copy)
6606 avr_asm_len ("ldi %1,0xff", op, plen, 1);
6608 else
6609 gcc_unreachable();
6611 break;
6613 case US_PLUS:
6614 /* [1u] : [2u] */
6616 avr_asm_len (PLUS == code ? "brcc 0f" : "brcs 0f", op, plen, 1);
6618 if (n_bytes == 8)
6620 if (MINUS == code)
6621 avr_asm_len ("sec", op, plen, 1);
6622 avr_asm_len ("%~call __sbc_8", op, plen, len_call);
6624 need_copy = false;
6626 else
6628 if (MINUS == code && !test_hard_reg_class (LD_REGS, op[0]))
6629 avr_asm_len ("sec" CR_TAB "sbc %0,%0", op, plen, 2);
6630 else
6631 avr_asm_len (PLUS == code ? "sbc %0,%0" : "ldi %0,0xff",
6632 op, plen, 1);
6634 break; /* US_PLUS */
6636 case US_MINUS:
6637 /* [4u] : [3u] */
6639 avr_asm_len (PLUS == code ? "brcs 0f" : "brcc 0f", op, plen, 1);
6641 if (n_bytes == 8)
6643 avr_asm_len ("%~call __clr_8", op, plen, len_call);
6644 need_copy = false;
6646 else
6647 avr_asm_len ("clr %0", op, plen, 1);
6649 break;
6652 /* We set the MSB in the unsigned case and the 2 MSBs in the signed case.
6653 Now copy the right value to the LSBs. */
6655 if (need_copy && n_bytes > 1)
6657 if (US_MINUS == code_sat || US_PLUS == code_sat)
6659 avr_asm_len ("mov %1,%0", op, plen, 1);
6661 if (n_bytes > 2)
6663 op[0] = xop[0];
6664 if (AVR_HAVE_MOVW)
6665 avr_asm_len ("movw %0,%1", op, plen, 1);
6666 else
6667 avr_asm_len ("mov %A0,%1" CR_TAB
6668 "mov %B0,%1", op, plen, 2);
6671 else if (n_bytes > 2)
6673 op[0] = xop[0];
6674 avr_asm_len ("mov %A0,%1" CR_TAB
6675 "mov %B0,%1", op, plen, 2);
6679 if (need_copy && n_bytes == 8)
6681 if (AVR_HAVE_MOVW)
6682 avr_asm_len ("movw %r0+2,%0" CR_TAB
6683 "movw %r0+4,%0", xop, plen, 2);
6684 else
6685 avr_asm_len ("mov %r0+2,%0" CR_TAB
6686 "mov %r0+3,%0" CR_TAB
6687 "mov %r0+4,%0" CR_TAB
6688 "mov %r0+5,%0", xop, plen, 4);
6691 if (out_label)
6692 avr_asm_len ("0:", op, plen, 0);
6696 /* Output addition/subtraction of register XOP[0] and a constant XOP[2] that
6697 is ont a compile-time constant:
6699 XOP[0] = XOP[0] +/- XOP[2]
6701 This is a helper for the function below. The only insns that need this
6702 are additions/subtraction for pointer modes, i.e. HImode and PSImode. */
6704 static const char*
6705 avr_out_plus_symbol (rtx *xop, enum rtx_code code, int *plen, int *pcc)
6707 enum machine_mode mode = GET_MODE (xop[0]);
6709 /* Only pointer modes want to add symbols. */
6711 gcc_assert (mode == HImode || mode == PSImode);
6713 *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_SET_N;
6715 avr_asm_len (PLUS == code
6716 ? "subi %A0,lo8(-(%2))" CR_TAB "sbci %B0,hi8(-(%2))"
6717 : "subi %A0,lo8(%2)" CR_TAB "sbci %B0,hi8(%2)",
6718 xop, plen, -2);
6720 if (PSImode == mode)
6721 avr_asm_len (PLUS == code
6722 ? "sbci %C0,hlo8(-(%2))"
6723 : "sbci %C0,hlo8(%2)", xop, plen, 1);
6724 return "";
6728 /* Prepare operands of addition/subtraction to be used with avr_out_plus_1.
6730 INSN is a single_set insn or an insn pattern with a binary operation as
6731 SET_SRC that is one of: PLUS, SS_PLUS, US_PLUS, MINUS, SS_MINUS, US_MINUS.
6733 XOP are the operands of INSN. In the case of 64-bit operations with
6734 constant XOP[] has just one element: The summand/subtrahend in XOP[0].
6735 The non-saturating insns up to 32 bits may or may not supply a "d" class
6736 scratch as XOP[3].
6738 If PLEN == NULL output the instructions.
6739 If PLEN != NULL set *PLEN to the length of the sequence in words.
6741 PCC is a pointer to store the instructions' effect on cc0.
6742 PCC may be NULL.
6744 PLEN and PCC default to NULL.
6746 OUT_LABEL defaults to TRUE. For a description, see AVR_OUT_PLUS_1.
6748 Return "" */
6750 const char*
6751 avr_out_plus (rtx insn, rtx *xop, int *plen, int *pcc, bool out_label)
6753 int cc_plus, cc_minus, cc_dummy;
6754 int len_plus, len_minus;
6755 rtx op[4];
6756 rtx xpattern = INSN_P (insn) ? single_set (insn) : insn;
6757 rtx xdest = SET_DEST (xpattern);
6758 enum machine_mode mode = GET_MODE (xdest);
6759 enum machine_mode imode = int_mode_for_mode (mode);
6760 int n_bytes = GET_MODE_SIZE (mode);
6761 enum rtx_code code_sat = GET_CODE (SET_SRC (xpattern));
6762 enum rtx_code code
6763 = (PLUS == code_sat || SS_PLUS == code_sat || US_PLUS == code_sat
6764 ? PLUS : MINUS);
6766 if (!pcc)
6767 pcc = &cc_dummy;
6769 /* PLUS and MINUS don't saturate: Use modular wrap-around. */
6771 if (PLUS == code_sat || MINUS == code_sat)
6772 code_sat = UNKNOWN;
6774 if (n_bytes <= 4 && REG_P (xop[2]))
6776 avr_out_plus_1 (xop, plen, code, pcc, code_sat, 0, out_label);
6777 return "";
6780 if (8 == n_bytes)
6782 op[0] = gen_rtx_REG (DImode, ACC_A);
6783 op[1] = gen_rtx_REG (DImode, ACC_A);
6784 op[2] = avr_to_int_mode (xop[0]);
6786 else
6788 if (!REG_P (xop[2])
6789 && !CONST_INT_P (xop[2])
6790 && !CONST_FIXED_P (xop[2]))
6792 return avr_out_plus_symbol (xop, code, plen, pcc);
6795 op[0] = avr_to_int_mode (xop[0]);
6796 op[1] = avr_to_int_mode (xop[1]);
6797 op[2] = avr_to_int_mode (xop[2]);
6800 /* Saturations and 64-bit operations don't have a clobber operand.
6801 For the other cases, the caller will provide a proper XOP[3]. */
6803 xpattern = INSN_P (insn) ? PATTERN (insn) : insn;
6804 op[3] = PARALLEL == GET_CODE (xpattern) ? xop[3] : NULL_RTX;
6806 /* Saturation will need the sign of the original operand. */
6808 rtx xmsb = simplify_gen_subreg (QImode, op[2], imode, n_bytes-1);
6809 int sign = INTVAL (xmsb) < 0 ? -1 : 1;
6811 /* If we subtract and the subtrahend is a constant, then negate it
6812 so that avr_out_plus_1 can be used. */
6814 if (MINUS == code)
6815 op[2] = simplify_unary_operation (NEG, imode, op[2], imode);
6817 /* Work out the shortest sequence. */
6819 avr_out_plus_1 (op, &len_minus, MINUS, &cc_minus, code_sat, sign, out_label);
6820 avr_out_plus_1 (op, &len_plus, PLUS, &cc_plus, code_sat, sign, out_label);
6822 if (plen)
6824 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
6825 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
6827 else if (len_minus <= len_plus)
6828 avr_out_plus_1 (op, NULL, MINUS, pcc, code_sat, sign, out_label);
6829 else
6830 avr_out_plus_1 (op, NULL, PLUS, pcc, code_sat, sign, out_label);
6832 return "";
6836 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6837 time constant XOP[2]:
6839 XOP[0] = XOP[0] <op> XOP[2]
6841 and return "". If PLEN == NULL, print assembler instructions to perform the
6842 operation; otherwise, set *PLEN to the length of the instruction sequence
6843 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6844 register or SCRATCH if no clobber register is needed for the operation.
6845 INSN is an INSN_P or a pattern of an insn. */
6847 const char*
6848 avr_out_bitop (rtx insn, rtx *xop, int *plen)
6850 /* CODE and MODE of the operation. */
6851 rtx xpattern = INSN_P (insn) ? single_set (insn) : insn;
6852 enum rtx_code code = GET_CODE (SET_SRC (xpattern));
6853 enum machine_mode mode = GET_MODE (xop[0]);
6855 /* Number of bytes to operate on. */
6856 int i, n_bytes = GET_MODE_SIZE (mode);
6858 /* Value of T-flag (0 or 1) or -1 if unknow. */
6859 int set_t = -1;
6861 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6862 int clobber_val = -1;
6864 /* op[0]: 8-bit destination register
6865 op[1]: 8-bit const int
6866 op[2]: 8-bit clobber register or SCRATCH
6867 op[3]: 8-bit register containing 0xff or NULL_RTX */
6868 rtx op[4];
6870 op[2] = xop[3];
6871 op[3] = NULL_RTX;
6873 if (plen)
6874 *plen = 0;
6876 for (i = 0; i < n_bytes; i++)
6878 /* We operate byte-wise on the destination. */
6879 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6880 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
6882 /* 8-bit value to operate with this byte. */
6883 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6885 /* Number of bits set in the current byte of the constant. */
6886 int pop8 = avr_popcount (val8);
6888 /* Registers R16..R31 can operate with immediate. */
6889 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6891 op[0] = reg8;
6892 op[1] = GEN_INT (val8);
6894 switch (code)
6896 case IOR:
6898 if (0 == pop8)
6899 continue;
6900 else if (ld_reg_p)
6901 avr_asm_len ("ori %0,%1", op, plen, 1);
6902 else if (1 == pop8)
6904 if (set_t != 1)
6905 avr_asm_len ("set", op, plen, 1);
6906 set_t = 1;
6908 op[1] = GEN_INT (exact_log2 (val8));
6909 avr_asm_len ("bld %0,%1", op, plen, 1);
6911 else if (8 == pop8)
6913 if (op[3] != NULL_RTX)
6914 avr_asm_len ("mov %0,%3", op, plen, 1);
6915 else
6916 avr_asm_len ("clr %0" CR_TAB
6917 "dec %0", op, plen, 2);
6919 op[3] = op[0];
6921 else
6923 if (clobber_val != (int) val8)
6924 avr_asm_len ("ldi %2,%1", op, plen, 1);
6925 clobber_val = (int) val8;
6927 avr_asm_len ("or %0,%2", op, plen, 1);
6930 continue; /* IOR */
6932 case AND:
6934 if (8 == pop8)
6935 continue;
6936 else if (0 == pop8)
6937 avr_asm_len ("clr %0", op, plen, 1);
6938 else if (ld_reg_p)
6939 avr_asm_len ("andi %0,%1", op, plen, 1);
6940 else if (7 == pop8)
6942 if (set_t != 0)
6943 avr_asm_len ("clt", op, plen, 1);
6944 set_t = 0;
6946 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
6947 avr_asm_len ("bld %0,%1", op, plen, 1);
6949 else
6951 if (clobber_val != (int) val8)
6952 avr_asm_len ("ldi %2,%1", op, plen, 1);
6953 clobber_val = (int) val8;
6955 avr_asm_len ("and %0,%2", op, plen, 1);
6958 continue; /* AND */
6960 case XOR:
6962 if (0 == pop8)
6963 continue;
6964 else if (8 == pop8)
6965 avr_asm_len ("com %0", op, plen, 1);
6966 else if (ld_reg_p && val8 == (1 << 7))
6967 avr_asm_len ("subi %0,%1", op, plen, 1);
6968 else
6970 if (clobber_val != (int) val8)
6971 avr_asm_len ("ldi %2,%1", op, plen, 1);
6972 clobber_val = (int) val8;
6974 avr_asm_len ("eor %0,%2", op, plen, 1);
6977 continue; /* XOR */
6979 default:
6980 /* Unknown rtx_code */
6981 gcc_unreachable();
6983 } /* for all sub-bytes */
6985 return "";
6989 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6990 PLEN != NULL: Set *PLEN to the length of that sequence.
6991 Return "". */
6993 const char*
6994 avr_out_addto_sp (rtx *op, int *plen)
6996 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
6997 int addend = INTVAL (op[0]);
6999 if (plen)
7000 *plen = 0;
7002 if (addend < 0)
7004 if (flag_verbose_asm || flag_print_asm_name)
7005 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
7007 while (addend <= -pc_len)
7009 addend += pc_len;
7010 avr_asm_len ("rcall .", op, plen, 1);
7013 while (addend++ < 0)
7014 avr_asm_len ("push __zero_reg__", op, plen, 1);
7016 else if (addend > 0)
7018 if (flag_verbose_asm || flag_print_asm_name)
7019 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
7021 while (addend-- > 0)
7022 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
7025 return "";
7029 /* Outputs instructions needed for fixed point type conversion.
7030 This includes converting between any fixed point type, as well
7031 as converting to any integer type. Conversion between integer
7032 types is not supported.
7034 Converting signed fractional types requires a bit shift if converting
7035 to or from any unsigned fractional type because the decimal place is
7036 shifted by 1 bit. When the destination is a signed fractional, the sign
7037 is stored in either the carry or T bit. */
7039 const char*
7040 avr_out_fract (rtx insn, rtx operands[], bool intsigned, int *plen)
7042 size_t i;
7043 rtx xop[6];
7044 RTX_CODE shift = UNKNOWN;
7045 bool sign_in_carry = false;
7046 bool msb_in_carry = false;
7047 bool lsb_in_tmp_reg = false;
7048 bool lsb_in_carry = false;
7049 bool frac_rounded = false;
7050 const char *code_ashift = "lsl %0";
7053 #define MAY_CLOBBER(RR) \
7054 /* Shorthand used below. */ \
7055 ((sign_bytes \
7056 && IN_RANGE (RR, dest.regno_msb - sign_bytes + 1, dest.regno_msb)) \
7057 || (offset && IN_RANGE (RR, dest.regno, dest.regno_msb)) \
7058 || (reg_unused_after (insn, all_regs_rtx[RR]) \
7059 && !IN_RANGE (RR, dest.regno, dest.regno_msb)))
7061 struct
7063 /* bytes : Length of operand in bytes.
7064 ibyte : Length of integral part in bytes.
7065 fbyte, fbit : Length of fractional part in bytes, bits. */
7067 bool sbit;
7068 unsigned fbit, bytes, ibyte, fbyte;
7069 unsigned regno, regno_msb;
7070 } dest, src, *val[2] = { &dest, &src };
7072 if (plen)
7073 *plen = 0;
7075 /* Step 0: Determine information on source and destination operand we
7076 ====== will need in the remainder. */
7078 for (i = 0; i < sizeof (val) / sizeof (*val); i++)
7080 enum machine_mode mode;
7082 xop[i] = operands[i];
7084 mode = GET_MODE (xop[i]);
7086 val[i]->bytes = GET_MODE_SIZE (mode);
7087 val[i]->regno = REGNO (xop[i]);
7088 val[i]->regno_msb = REGNO (xop[i]) + val[i]->bytes - 1;
7090 if (SCALAR_INT_MODE_P (mode))
7092 val[i]->sbit = intsigned;
7093 val[i]->fbit = 0;
7095 else if (ALL_SCALAR_FIXED_POINT_MODE_P (mode))
7097 val[i]->sbit = SIGNED_SCALAR_FIXED_POINT_MODE_P (mode);
7098 val[i]->fbit = GET_MODE_FBIT (mode);
7100 else
7101 fatal_insn ("unsupported fixed-point conversion", insn);
7103 val[i]->fbyte = (1 + val[i]->fbit) / BITS_PER_UNIT;
7104 val[i]->ibyte = val[i]->bytes - val[i]->fbyte;
7107 // Byte offset of the decimal point taking into account different place
7108 // of the decimal point in input and output and different register numbers
7109 // of input and output.
7110 int offset = dest.regno - src.regno + dest.fbyte - src.fbyte;
7112 // Number of destination bytes that will come from sign / zero extension.
7113 int sign_bytes = (dest.ibyte - src.ibyte) * (dest.ibyte > src.ibyte);
7115 // Number of bytes at the low end to be filled with zeros.
7116 int zero_bytes = (dest.fbyte - src.fbyte) * (dest.fbyte > src.fbyte);
7118 // Do we have a 16-Bit register that is cleared?
7119 rtx clrw = NULL_RTX;
7121 bool sign_extend = src.sbit && sign_bytes;
7123 if (0 == dest.fbit % 8 && 7 == src.fbit % 8)
7124 shift = ASHIFT;
7125 else if (7 == dest.fbit % 8 && 0 == src.fbit % 8)
7126 shift = ASHIFTRT;
7127 else if (dest.fbit % 8 == src.fbit % 8)
7128 shift = UNKNOWN;
7129 else
7130 gcc_unreachable();
7132 /* If we need to round the fraction part, we might need to save/round it
7133 before clobbering any of it in Step 1. Also, we might to want to do
7134 the rounding now to make use of LD_REGS. */
7135 if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
7136 && SCALAR_ACCUM_MODE_P (GET_MODE (xop[1]))
7137 && !TARGET_FRACT_CONV_TRUNC)
7139 bool overlap
7140 = (src.regno <=
7141 (offset ? dest.regno_msb - sign_bytes : dest.regno + zero_bytes - 1)
7142 && dest.regno - offset -1 >= dest.regno);
7143 unsigned s0 = dest.regno - offset -1;
7144 bool use_src = true;
7145 unsigned sn;
7146 unsigned copied_msb = src.regno_msb;
7147 bool have_carry = false;
7149 if (src.ibyte > dest.ibyte)
7150 copied_msb -= src.ibyte - dest.ibyte;
7152 for (sn = s0; sn <= copied_msb; sn++)
7153 if (!IN_RANGE (sn, dest.regno, dest.regno_msb)
7154 && !reg_unused_after (insn, all_regs_rtx[sn]))
7155 use_src = false;
7156 if (use_src && TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], s0))
7158 avr_asm_len ("tst %0" CR_TAB "brpl 0f",
7159 &all_regs_rtx[src.regno_msb], plen, 2);
7160 sn = src.regno;
7161 if (sn < s0)
7163 if (TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], sn))
7164 avr_asm_len ("cpi %0,1", &all_regs_rtx[sn], plen, 1);
7165 else
7166 avr_asm_len ("sec" CR_TAB "cpc %0,__zero_reg__",
7167 &all_regs_rtx[sn], plen, 2);
7168 have_carry = true;
7170 while (++sn < s0)
7171 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
7172 avr_asm_len (have_carry ? "sbci %0,128" : "subi %0,129",
7173 &all_regs_rtx[s0], plen, 1);
7174 for (sn = src.regno + src.fbyte; sn <= copied_msb; sn++)
7175 avr_asm_len ("sbci %0,255", &all_regs_rtx[sn], plen, 1);
7176 avr_asm_len ("\n0:", NULL, plen, 0);
7177 frac_rounded = true;
7179 else if (use_src && overlap)
7181 avr_asm_len ("clr __tmp_reg__" CR_TAB
7182 "sbrc %1,0" CR_TAB "dec __tmp_reg__", xop, plen, 1);
7183 sn = src.regno;
7184 if (sn < s0)
7186 avr_asm_len ("add %0,__tmp_reg__", &all_regs_rtx[sn], plen, 1);
7187 have_carry = true;
7189 while (++sn < s0)
7190 avr_asm_len ("adc %0,__tmp_reg__", &all_regs_rtx[sn], plen, 1);
7191 if (have_carry)
7192 avr_asm_len ("clt" CR_TAB "bld __tmp_reg__,7" CR_TAB
7193 "adc %0,__tmp_reg__",
7194 &all_regs_rtx[s0], plen, 1);
7195 else
7196 avr_asm_len ("lsr __tmp_reg" CR_TAB "add %0,__tmp_reg__",
7197 &all_regs_rtx[s0], plen, 2);
7198 for (sn = src.regno + src.fbyte; sn <= copied_msb; sn++)
7199 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
7200 frac_rounded = true;
7202 else if (overlap)
7204 bool use_src
7205 = (TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], s0)
7206 && (IN_RANGE (s0, dest.regno, dest.regno_msb)
7207 || reg_unused_after (insn, all_regs_rtx[s0])));
7208 xop[2] = all_regs_rtx[s0];
7209 unsigned sn = src.regno;
7210 if (!use_src || sn == s0)
7211 avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
7212 /* We need to consider to-be-discarded bits
7213 if the value is negative. */
7214 if (sn < s0)
7216 avr_asm_len ("tst %0" CR_TAB "brpl 0f",
7217 &all_regs_rtx[src.regno_msb], plen, 2);
7218 /* Test to-be-discarded bytes for any nozero bits.
7219 ??? Could use OR or SBIW to test two registers at once. */
7220 if (sn < s0)
7221 avr_asm_len ("cp %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
7222 while (++sn < s0)
7223 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
7224 /* Set bit 0 in __tmp_reg__ if any of the lower bits was set. */
7225 if (use_src)
7226 avr_asm_len ("breq 0f" CR_TAB
7227 "ori %2,1" "\n0:\t" "mov __tmp_reg__,%2",
7228 xop, plen, 3);
7229 else
7230 avr_asm_len ("breq 0f" CR_TAB
7231 "set" CR_TAB "bld __tmp_reg__,0\n0:",
7232 xop, plen, 3);
7234 lsb_in_tmp_reg = true;
7238 /* Step 1: Clear bytes at the low end and copy payload bits from source
7239 ====== to destination. */
7241 int step = offset < 0 ? 1 : -1;
7242 unsigned d0 = offset < 0 ? dest.regno : dest.regno_msb;
7244 // We cleared at least that number of registers.
7245 int clr_n = 0;
7247 for (; d0 >= dest.regno && d0 <= dest.regno_msb; d0 += step)
7249 // Next regno of destination is needed for MOVW
7250 unsigned d1 = d0 + step;
7252 // Current and next regno of source
7253 signed s0 = d0 - offset;
7254 signed s1 = s0 + step;
7256 // Must current resp. next regno be CLRed? This applies to the low
7257 // bytes of the destination that have no associated source bytes.
7258 bool clr0 = s0 < (signed) src.regno;
7259 bool clr1 = s1 < (signed) src.regno && d1 >= dest.regno;
7261 // First gather what code to emit (if any) and additional step to
7262 // apply if a MOVW is in use. xop[2] is destination rtx and xop[3]
7263 // is the source rtx for the current loop iteration.
7264 const char *code = NULL;
7265 int stepw = 0;
7267 if (clr0)
7269 if (AVR_HAVE_MOVW && clr1 && clrw)
7271 xop[2] = all_regs_rtx[d0 & ~1];
7272 xop[3] = clrw;
7273 code = "movw %2,%3";
7274 stepw = step;
7276 else
7278 xop[2] = all_regs_rtx[d0];
7279 code = "clr %2";
7281 if (++clr_n >= 2
7282 && !clrw
7283 && d0 % 2 == (step > 0))
7285 clrw = all_regs_rtx[d0 & ~1];
7289 else if (offset && s0 <= (signed) src.regno_msb)
7291 int movw = AVR_HAVE_MOVW && offset % 2 == 0
7292 && d0 % 2 == (offset > 0)
7293 && d1 <= dest.regno_msb && d1 >= dest.regno
7294 && s1 <= (signed) src.regno_msb && s1 >= (signed) src.regno;
7296 xop[2] = all_regs_rtx[d0 & ~movw];
7297 xop[3] = all_regs_rtx[s0 & ~movw];
7298 code = movw ? "movw %2,%3" : "mov %2,%3";
7299 stepw = step * movw;
7302 if (code)
7304 if (sign_extend && shift != ASHIFT && !sign_in_carry
7305 && (d0 == src.regno_msb || d0 + stepw == src.regno_msb))
7307 /* We are going to override the sign bit. If we sign-extend,
7308 store the sign in the Carry flag. This is not needed if
7309 the destination will be ASHIFT is the remainder because
7310 the ASHIFT will set Carry without extra instruction. */
7312 avr_asm_len ("lsl %0", &all_regs_rtx[src.regno_msb], plen, 1);
7313 sign_in_carry = true;
7316 unsigned src_msb = dest.regno_msb - sign_bytes - offset + 1;
7318 if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
7319 && src.ibyte > dest.ibyte
7320 && (d0 == src_msb || d0 + stepw == src_msb))
7322 /* We are going to override the MSB. If we shift right,
7323 store the MSB in the Carry flag. This is only needed if
7324 we don't sign-extend becaue with sign-extension the MSB
7325 (the sign) will be produced by the sign extension. */
7327 avr_asm_len ("lsr %0", &all_regs_rtx[src_msb], plen, 1);
7328 msb_in_carry = true;
7331 unsigned src_lsb = dest.regno - offset -1;
7333 if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry
7334 && !lsb_in_tmp_reg
7335 && (d0 == src_lsb || d0 + stepw == src_lsb))
7337 /* We are going to override the new LSB; store it into carry. */
7339 avr_asm_len ("lsl %0", &all_regs_rtx[src_lsb], plen, 1);
7340 code_ashift = "rol %0";
7341 lsb_in_carry = true;
7344 avr_asm_len (code, xop, plen, 1);
7345 d0 += stepw;
7349 /* Step 2: Shift destination left by 1 bit position. This might be needed
7350 ====== for signed input and unsigned output. */
7352 if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry)
7354 unsigned s0 = dest.regno - offset -1;
7356 /* n1169 4.1.4 says:
7357 "Conversions from a fixed-point to an integer type round toward zero."
7358 Hence, converting a fract type to integer only gives a non-zero result
7359 for -1. */
7360 if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
7361 && SCALAR_FRACT_MODE_P (GET_MODE (xop[1]))
7362 && !TARGET_FRACT_CONV_TRUNC)
7364 gcc_assert (s0 == src.regno_msb);
7365 /* Check if the input is -1. We do that by checking if negating
7366 the input causes an integer overflow. */
7367 unsigned sn = src.regno;
7368 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx[sn++], plen, 1);
7369 while (sn <= s0)
7370 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx[sn++], plen, 1);
7372 /* Overflow goes with set carry. Clear carry otherwise. */
7373 avr_asm_len ("brvs 0f" CR_TAB "clc\n0:", NULL, plen, 2);
7375 /* Likewise, when converting from accumulator types to integer, we
7376 need to round up negative values. */
7377 else if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
7378 && SCALAR_ACCUM_MODE_P (GET_MODE (xop[1]))
7379 && !TARGET_FRACT_CONV_TRUNC
7380 && !frac_rounded)
7382 bool have_carry = false;
7384 xop[2] = all_regs_rtx[s0];
7385 if (!lsb_in_tmp_reg && !MAY_CLOBBER (s0))
7386 avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
7387 avr_asm_len ("tst %0" CR_TAB "brpl 0f",
7388 &all_regs_rtx[src.regno_msb], plen, 2);
7389 if (!lsb_in_tmp_reg)
7391 unsigned sn = src.regno;
7392 if (sn < s0)
7394 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx[sn],
7395 plen, 1);
7396 have_carry = true;
7398 while (++sn < s0)
7399 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx[sn], plen, 1);
7400 lsb_in_tmp_reg = !MAY_CLOBBER (s0);
7402 /* Add in C and the rounding value 127. */
7403 /* If the destination msb is a sign byte, and in LD_REGS,
7404 grab it as a temporary. */
7405 if (sign_bytes
7406 && TEST_HARD_REG_BIT (reg_class_contents[LD_REGS],
7407 dest.regno_msb))
7409 xop[3] = all_regs_rtx[dest.regno_msb];
7410 avr_asm_len ("ldi %3,127", xop, plen, 1);
7411 avr_asm_len ((have_carry && lsb_in_tmp_reg ? "adc __tmp_reg__,%3"
7412 : have_carry ? "adc %2,%3"
7413 : lsb_in_tmp_reg ? "add __tmp_reg__,%3"
7414 : "add %2,%3"),
7415 xop, plen, 1);
7417 else
7419 /* Fall back to use __zero_reg__ as a temporary. */
7420 avr_asm_len ("dec __zero_reg__", NULL, plen, 1);
7421 if (have_carry)
7422 avr_asm_len ("clt" CR_TAB "bld __zero_reg__,7", NULL, plen, 2);
7423 else
7424 avr_asm_len ("lsr __zero_reg__", NULL, plen, 1);
7425 avr_asm_len ((have_carry && lsb_in_tmp_reg
7426 ? "adc __tmp_reg__,__zero_reg__"
7427 : have_carry ? "adc %2,__zero_reg__"
7428 : lsb_in_tmp_reg ? "add __tmp_reg__,__zero_reg__"
7429 : "add %2,__zero_reg__"),
7430 xop, plen, 1);
7431 avr_asm_len ("eor __zero_reg__,__zero_reg__", NULL, plen, 1);
7433 for (d0 = dest.regno + zero_bytes;
7434 d0 <= dest.regno_msb - sign_bytes; d0++)
7435 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx[d0], plen, 1);
7436 avr_asm_len (lsb_in_tmp_reg
7437 ? "\n0:\t" "lsl __tmp_reg__" : "\n0:\t" "lsl %2",
7438 xop, plen, 1);
7440 else if (MAY_CLOBBER (s0))
7441 avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
7442 else
7443 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7444 "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
7446 code_ashift = "rol %0";
7447 lsb_in_carry = true;
7450 if (shift == ASHIFT)
7452 for (d0 = dest.regno + zero_bytes;
7453 d0 <= dest.regno_msb - sign_bytes; d0++)
7455 avr_asm_len (code_ashift, &all_regs_rtx[d0], plen, 1);
7456 code_ashift = "rol %0";
7459 lsb_in_carry = false;
7460 sign_in_carry = true;
7463 /* Step 4a: Store MSB in carry if we don't already have it or will produce
7464 ======= it in sign-extension below. */
7466 if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
7467 && src.ibyte > dest.ibyte)
7469 unsigned s0 = dest.regno_msb - sign_bytes - offset + 1;
7471 if (MAY_CLOBBER (s0))
7472 avr_asm_len ("lsr %0", &all_regs_rtx[s0], plen, 1);
7473 else
7474 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7475 "lsr __tmp_reg__", &all_regs_rtx[s0], plen, 2);
7477 msb_in_carry = true;
7480 /* Step 3: Sign-extend or zero-extend the destination as needed.
7481 ====== */
7483 if (sign_extend && !sign_in_carry)
7485 unsigned s0 = src.regno_msb;
7487 if (MAY_CLOBBER (s0))
7488 avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
7489 else
7490 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7491 "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
7493 sign_in_carry = true;
7496 gcc_assert (sign_in_carry + msb_in_carry + lsb_in_carry <= 1);
7498 unsigned copies = 0;
7499 rtx movw = sign_extend ? NULL_RTX : clrw;
7501 for (d0 = dest.regno_msb - sign_bytes + 1; d0 <= dest.regno_msb; d0++)
7503 if (AVR_HAVE_MOVW && movw
7504 && d0 % 2 == 0 && d0 + 1 <= dest.regno_msb)
7506 xop[2] = all_regs_rtx[d0];
7507 xop[3] = movw;
7508 avr_asm_len ("movw %2,%3", xop, plen, 1);
7509 d0++;
7511 else
7513 avr_asm_len (sign_extend ? "sbc %0,%0" : "clr %0",
7514 &all_regs_rtx[d0], plen, 1);
7516 if (++copies >= 2 && !movw && d0 % 2 == 1)
7517 movw = all_regs_rtx[d0-1];
7519 } /* for */
7522 /* Step 4: Right shift the destination. This might be needed for
7523 ====== conversions from unsigned to signed. */
7525 if (shift == ASHIFTRT)
7527 const char *code_ashiftrt = "lsr %0";
7529 if (sign_extend || msb_in_carry)
7530 code_ashiftrt = "ror %0";
7532 if (src.sbit && src.ibyte == dest.ibyte)
7533 code_ashiftrt = "asr %0";
7535 for (d0 = dest.regno_msb - sign_bytes;
7536 d0 >= dest.regno + zero_bytes - 1 && d0 >= dest.regno; d0--)
7538 avr_asm_len (code_ashiftrt, &all_regs_rtx[d0], plen, 1);
7539 code_ashiftrt = "ror %0";
7543 #undef MAY_CLOBBER
7545 return "";
7549 /* Output fixed-point rounding. XOP[0] = XOP[1] is the operand to round.
7550 XOP[2] is the rounding point, a CONST_INT. The function prints the
7551 instruction sequence if PLEN = NULL and computes the length in words
7552 of the sequence if PLEN != NULL. Most of this function deals with
7553 preparing operands for calls to `avr_out_plus' and `avr_out_bitop'. */
7555 const char*
7556 avr_out_round (rtx insn ATTRIBUTE_UNUSED, rtx *xop, int *plen)
7558 enum machine_mode mode = GET_MODE (xop[0]);
7559 enum machine_mode imode = int_mode_for_mode (mode);
7560 // The smallest fractional bit not cleared by the rounding is 2^(-RP).
7561 int fbit = (int) GET_MODE_FBIT (mode);
7562 double_int i_add = double_int_zero.set_bit (fbit-1 - INTVAL (xop[2]));
7563 // Lengths of PLUS and AND parts.
7564 int len_add = 0, *plen_add = plen ? &len_add : NULL;
7565 int len_and = 0, *plen_and = plen ? &len_and : NULL;
7567 // Add-Saturate 1/2 * 2^(-RP). Don't print the label "0:" when printing
7568 // the saturated addition so that we can emit the "rjmp 1f" before the
7569 // "0:" below.
7571 rtx xadd = const_fixed_from_double_int (i_add, mode);
7572 rtx xpattern, xsrc, op[4];
7574 xsrc = SIGNED_FIXED_POINT_MODE_P (mode)
7575 ? gen_rtx_SS_PLUS (mode, xop[1], xadd)
7576 : gen_rtx_US_PLUS (mode, xop[1], xadd);
7577 xpattern = gen_rtx_SET (VOIDmode, xop[0], xsrc);
7579 op[0] = xop[0];
7580 op[1] = xop[1];
7581 op[2] = xadd;
7582 avr_out_plus (xpattern, op, plen_add, NULL, false /* Don't print "0:" */);
7584 avr_asm_len ("rjmp 1f" CR_TAB
7585 "0:", NULL, plen_add, 1);
7587 // Keep all bits from RP and higher: ... 2^(-RP)
7588 // Clear all bits from RP+1 and lower: 2^(-RP-1) ...
7589 // Rounding point ^^^^^^^
7590 // Added above ^^^^^^^^^
7591 rtx xreg = simplify_gen_subreg (imode, xop[0], mode, 0);
7592 rtx xmask = immed_double_int_const (-i_add - i_add, imode);
7594 xpattern = gen_rtx_SET (VOIDmode, xreg, gen_rtx_AND (imode, xreg, xmask));
7596 op[0] = xreg;
7597 op[1] = xreg;
7598 op[2] = xmask;
7599 op[3] = gen_rtx_SCRATCH (QImode);
7600 avr_out_bitop (xpattern, op, plen_and);
7601 avr_asm_len ("1:", NULL, plen, 0);
7603 if (plen)
7604 *plen = len_add + len_and;
7606 return "";
7610 /* Create RTL split patterns for byte sized rotate expressions. This
7611 produces a series of move instructions and considers overlap situations.
7612 Overlapping non-HImode operands need a scratch register. */
7614 bool
7615 avr_rotate_bytes (rtx operands[])
7617 int i, j;
7618 enum machine_mode mode = GET_MODE (operands[0]);
7619 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
7620 bool same_reg = rtx_equal_p (operands[0], operands[1]);
7621 int num = INTVAL (operands[2]);
7622 rtx scratch = operands[3];
7623 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
7624 Word move if no scratch is needed, otherwise use size of scratch. */
7625 enum machine_mode move_mode = QImode;
7626 int move_size, offset, size;
7628 if (num & 0xf)
7629 move_mode = QImode;
7630 else if ((mode == SImode && !same_reg) || !overlapped)
7631 move_mode = HImode;
7632 else
7633 move_mode = GET_MODE (scratch);
7635 /* Force DI rotate to use QI moves since other DI moves are currently split
7636 into QI moves so forward propagation works better. */
7637 if (mode == DImode)
7638 move_mode = QImode;
7639 /* Make scratch smaller if needed. */
7640 if (SCRATCH != GET_CODE (scratch)
7641 && HImode == GET_MODE (scratch)
7642 && QImode == move_mode)
7643 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
7645 move_size = GET_MODE_SIZE (move_mode);
7646 /* Number of bytes/words to rotate. */
7647 offset = (num >> 3) / move_size;
7648 /* Number of moves needed. */
7649 size = GET_MODE_SIZE (mode) / move_size;
7650 /* Himode byte swap is special case to avoid a scratch register. */
7651 if (mode == HImode && same_reg)
7653 /* HImode byte swap, using xor. This is as quick as using scratch. */
7654 rtx src, dst;
7655 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
7656 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
7657 if (!rtx_equal_p (dst, src))
7659 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
7660 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
7661 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
7664 else
7666 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
7667 /* Create linked list of moves to determine move order. */
7668 struct {
7669 rtx src, dst;
7670 int links;
7671 } move[MAX_SIZE + 8];
7672 int blocked, moves;
7674 gcc_assert (size <= MAX_SIZE);
7675 /* Generate list of subreg moves. */
7676 for (i = 0; i < size; i++)
7678 int from = i;
7679 int to = (from + offset) % size;
7680 move[i].src = simplify_gen_subreg (move_mode, operands[1],
7681 mode, from * move_size);
7682 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
7683 mode, to * move_size);
7684 move[i].links = -1;
7686 /* Mark dependence where a dst of one move is the src of another move.
7687 The first move is a conflict as it must wait until second is
7688 performed. We ignore moves to self - we catch this later. */
7689 if (overlapped)
7690 for (i = 0; i < size; i++)
7691 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
7692 for (j = 0; j < size; j++)
7693 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
7695 /* The dst of move i is the src of move j. */
7696 move[i].links = j;
7697 break;
7700 blocked = -1;
7701 moves = 0;
7702 /* Go through move list and perform non-conflicting moves. As each
7703 non-overlapping move is made, it may remove other conflicts
7704 so the process is repeated until no conflicts remain. */
7707 blocked = -1;
7708 moves = 0;
7709 /* Emit move where dst is not also a src or we have used that
7710 src already. */
7711 for (i = 0; i < size; i++)
7712 if (move[i].src != NULL_RTX)
7714 if (move[i].links == -1
7715 || move[move[i].links].src == NULL_RTX)
7717 moves++;
7718 /* Ignore NOP moves to self. */
7719 if (!rtx_equal_p (move[i].dst, move[i].src))
7720 emit_move_insn (move[i].dst, move[i].src);
7722 /* Remove conflict from list. */
7723 move[i].src = NULL_RTX;
7725 else
7726 blocked = i;
7729 /* Check for deadlock. This is when no moves occurred and we have
7730 at least one blocked move. */
7731 if (moves == 0 && blocked != -1)
7733 /* Need to use scratch register to break deadlock.
7734 Add move to put dst of blocked move into scratch.
7735 When this move occurs, it will break chain deadlock.
7736 The scratch register is substituted for real move. */
7738 gcc_assert (SCRATCH != GET_CODE (scratch));
7740 move[size].src = move[blocked].dst;
7741 move[size].dst = scratch;
7742 /* Scratch move is never blocked. */
7743 move[size].links = -1;
7744 /* Make sure we have valid link. */
7745 gcc_assert (move[blocked].links != -1);
7746 /* Replace src of blocking move with scratch reg. */
7747 move[move[blocked].links].src = scratch;
7748 /* Make dependent on scratch move occurring. */
7749 move[blocked].links = size;
7750 size=size+1;
7753 while (blocked != -1);
7755 return true;
7759 /* Worker function for `ADJUST_INSN_LENGTH'. */
7760 /* Modifies the length assigned to instruction INSN
7761 LEN is the initially computed length of the insn. */
7764 avr_adjust_insn_length (rtx insn, int len)
7766 rtx *op = recog_data.operand;
7767 enum attr_adjust_len adjust_len;
7769 /* Some complex insns don't need length adjustment and therefore
7770 the length need not/must not be adjusted for these insns.
7771 It is easier to state this in an insn attribute "adjust_len" than
7772 to clutter up code here... */
7774 if (-1 == recog_memoized (insn))
7776 return len;
7779 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
7781 adjust_len = get_attr_adjust_len (insn);
7783 if (adjust_len == ADJUST_LEN_NO)
7785 /* Nothing to adjust: The length from attribute "length" is fine.
7786 This is the default. */
7788 return len;
7791 /* Extract insn's operands. */
7793 extract_constrain_insn_cached (insn);
7795 /* Dispatch to right function. */
7797 switch (adjust_len)
7799 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
7800 case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
7801 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
7803 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
7805 case ADJUST_LEN_PLUS: avr_out_plus (insn, op, &len); break;
7806 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
7808 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
7809 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
7810 case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
7811 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
7812 case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
7813 case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
7814 case ADJUST_LEN_LPM: avr_out_lpm (insn, op, &len); break;
7816 case ADJUST_LEN_SFRACT: avr_out_fract (insn, op, true, &len); break;
7817 case ADJUST_LEN_UFRACT: avr_out_fract (insn, op, false, &len); break;
7818 case ADJUST_LEN_ROUND: avr_out_round (insn, op, &len); break;
7820 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
7821 case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
7822 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
7823 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
7824 case ADJUST_LEN_COMPARE64: avr_out_compare64 (insn, op, &len); break;
7826 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
7827 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
7828 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
7830 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
7831 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
7832 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
7834 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
7835 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
7836 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
7838 case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
7839 case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
7840 case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
7842 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
7844 case ADJUST_LEN_INSERT_BITS: avr_out_insert_bits (op, &len); break;
7846 default:
7847 gcc_unreachable();
7850 return len;
7853 /* Return nonzero if register REG dead after INSN. */
7856 reg_unused_after (rtx insn, rtx reg)
7858 return (dead_or_set_p (insn, reg)
7859 || (REG_P(reg) && _reg_unused_after (insn, reg)));
7862 /* Return nonzero if REG is not used after INSN.
7863 We assume REG is a reload reg, and therefore does
7864 not live past labels. It may live past calls or jumps though. */
7867 _reg_unused_after (rtx insn, rtx reg)
7869 enum rtx_code code;
7870 rtx set;
7872 /* If the reg is set by this instruction, then it is safe for our
7873 case. Disregard the case where this is a store to memory, since
7874 we are checking a register used in the store address. */
7875 set = single_set (insn);
7876 if (set && GET_CODE (SET_DEST (set)) != MEM
7877 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
7878 return 1;
7880 while ((insn = NEXT_INSN (insn)))
7882 rtx set;
7883 code = GET_CODE (insn);
7885 #if 0
7886 /* If this is a label that existed before reload, then the register
7887 if dead here. However, if this is a label added by reorg, then
7888 the register may still be live here. We can't tell the difference,
7889 so we just ignore labels completely. */
7890 if (code == CODE_LABEL)
7891 return 1;
7892 /* else */
7893 #endif
7895 if (!INSN_P (insn))
7896 continue;
7898 if (code == JUMP_INSN)
7899 return 0;
7901 /* If this is a sequence, we must handle them all at once.
7902 We could have for instance a call that sets the target register,
7903 and an insn in a delay slot that uses the register. In this case,
7904 we must return 0. */
7905 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
7907 int i;
7908 int retval = 0;
7910 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
7912 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
7913 rtx set = single_set (this_insn);
7915 if (CALL_P (this_insn))
7916 code = CALL_INSN;
7917 else if (JUMP_P (this_insn))
7919 if (INSN_ANNULLED_BRANCH_P (this_insn))
7920 return 0;
7921 code = JUMP_INSN;
7924 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
7925 return 0;
7926 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
7928 if (GET_CODE (SET_DEST (set)) != MEM)
7929 retval = 1;
7930 else
7931 return 0;
7933 if (set == 0
7934 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
7935 return 0;
7937 if (retval == 1)
7938 return 1;
7939 else if (code == JUMP_INSN)
7940 return 0;
7943 if (code == CALL_INSN)
7945 rtx tem;
7946 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
7947 if (GET_CODE (XEXP (tem, 0)) == USE
7948 && REG_P (XEXP (XEXP (tem, 0), 0))
7949 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
7950 return 0;
7951 if (call_used_regs[REGNO (reg)])
7952 return 1;
7955 set = single_set (insn);
7957 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
7958 return 0;
7959 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
7960 return GET_CODE (SET_DEST (set)) != MEM;
7961 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
7962 return 0;
7964 return 1;
7968 /* Implement `TARGET_ASM_INTEGER'. */
7969 /* Target hook for assembling integer objects. The AVR version needs
7970 special handling for references to certain labels. */
7972 static bool
7973 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
7975 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
7976 && text_segment_operand (x, VOIDmode))
7978 fputs ("\t.word\tgs(", asm_out_file);
7979 output_addr_const (asm_out_file, x);
7980 fputs (")\n", asm_out_file);
7982 return true;
7984 else if (GET_MODE (x) == PSImode)
7986 /* This needs binutils 2.23+, see PR binutils/13503 */
7988 fputs ("\t.byte\tlo8(", asm_out_file);
7989 output_addr_const (asm_out_file, x);
7990 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
7992 fputs ("\t.byte\thi8(", asm_out_file);
7993 output_addr_const (asm_out_file, x);
7994 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
7996 fputs ("\t.byte\thh8(", asm_out_file);
7997 output_addr_const (asm_out_file, x);
7998 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
8000 return true;
8002 else if (CONST_FIXED_P (x))
8004 unsigned n;
8006 /* varasm fails to handle big fixed modes that don't fit in hwi. */
8008 for (n = 0; n < size; n++)
8010 rtx xn = simplify_gen_subreg (QImode, x, GET_MODE (x), n);
8011 default_assemble_integer (xn, 1, aligned_p);
8014 return true;
8017 return default_assemble_integer (x, size, aligned_p);
8021 /* Implement `TARGET_CLASS_LIKELY_SPILLED_P'. */
8022 /* Return value is nonzero if pseudos that have been
8023 assigned to registers of class CLASS would likely be spilled
8024 because registers of CLASS are needed for spill registers. */
8026 static bool
8027 avr_class_likely_spilled_p (reg_class_t c)
8029 return (c != ALL_REGS && c != ADDW_REGS);
8033 /* Valid attributes:
8034 progmem - Put data to program memory.
8035 signal - Make a function to be hardware interrupt.
8036 After function prologue interrupts remain disabled.
8037 interrupt - Make a function to be hardware interrupt. Before function
8038 prologue interrupts are enabled by means of SEI.
8039 naked - Don't generate function prologue/epilogue and RET
8040 instruction. */
8042 /* Handle a "progmem" attribute; arguments as in
8043 struct attribute_spec.handler. */
8045 static tree
8046 avr_handle_progmem_attribute (tree *node, tree name,
8047 tree args ATTRIBUTE_UNUSED,
8048 int flags ATTRIBUTE_UNUSED,
8049 bool *no_add_attrs)
8051 if (DECL_P (*node))
8053 if (TREE_CODE (*node) == TYPE_DECL)
8055 /* This is really a decl attribute, not a type attribute,
8056 but try to handle it for GCC 3.0 backwards compatibility. */
8058 tree type = TREE_TYPE (*node);
8059 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
8060 tree newtype = build_type_attribute_variant (type, attr);
8062 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
8063 TREE_TYPE (*node) = newtype;
8064 *no_add_attrs = true;
8066 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
8068 *no_add_attrs = false;
8070 else
8072 warning (OPT_Wattributes, "%qE attribute ignored",
8073 name);
8074 *no_add_attrs = true;
8078 return NULL_TREE;
8081 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
8082 struct attribute_spec.handler. */
8084 static tree
8085 avr_handle_fndecl_attribute (tree *node, tree name,
8086 tree args ATTRIBUTE_UNUSED,
8087 int flags ATTRIBUTE_UNUSED,
8088 bool *no_add_attrs)
8090 if (TREE_CODE (*node) != FUNCTION_DECL)
8092 warning (OPT_Wattributes, "%qE attribute only applies to functions",
8093 name);
8094 *no_add_attrs = true;
8097 return NULL_TREE;
8100 static tree
8101 avr_handle_fntype_attribute (tree *node, tree name,
8102 tree args ATTRIBUTE_UNUSED,
8103 int flags ATTRIBUTE_UNUSED,
8104 bool *no_add_attrs)
8106 if (TREE_CODE (*node) != FUNCTION_TYPE)
8108 warning (OPT_Wattributes, "%qE attribute only applies to functions",
8109 name);
8110 *no_add_attrs = true;
8113 return NULL_TREE;
8117 /* AVR attributes. */
8118 static const struct attribute_spec
8119 avr_attribute_table[] =
8121 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
8122 affects_type_identity } */
8123 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
8124 false },
8125 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
8126 false },
8127 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
8128 false },
8129 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
8130 false },
8131 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
8132 false },
8133 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
8134 false },
8135 { NULL, 0, 0, false, false, false, NULL, false }
8139 /* Look if DECL shall be placed in program memory space by
8140 means of attribute `progmem' or some address-space qualifier.
8141 Return non-zero if DECL is data that must end up in Flash and
8142 zero if the data lives in RAM (.bss, .data, .rodata, ...).
8144 Return 2 if DECL is located in 24-bit flash address-space
8145 Return 1 if DECL is located in 16-bit flash address-space
8146 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
8147 Return 0 otherwise */
8150 avr_progmem_p (tree decl, tree attributes)
8152 tree a;
8154 if (TREE_CODE (decl) != VAR_DECL)
8155 return 0;
8157 if (avr_decl_memx_p (decl))
8158 return 2;
8160 if (avr_decl_flash_p (decl))
8161 return 1;
8163 if (NULL_TREE
8164 != lookup_attribute ("progmem", attributes))
8165 return -1;
8167 a = decl;
8170 a = TREE_TYPE(a);
8171 while (TREE_CODE (a) == ARRAY_TYPE);
8173 if (a == error_mark_node)
8174 return 0;
8176 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
8177 return -1;
8179 return 0;
8183 /* Scan type TYP for pointer references to address space ASn.
8184 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
8185 the AS are also declared to be CONST.
8186 Otherwise, return the respective address space, i.e. a value != 0. */
8188 static addr_space_t
8189 avr_nonconst_pointer_addrspace (tree typ)
8191 while (ARRAY_TYPE == TREE_CODE (typ))
8192 typ = TREE_TYPE (typ);
8194 if (POINTER_TYPE_P (typ))
8196 addr_space_t as;
8197 tree target = TREE_TYPE (typ);
8199 /* Pointer to function: Test the function's return type. */
8201 if (FUNCTION_TYPE == TREE_CODE (target))
8202 return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
8204 /* "Ordinary" pointers... */
8206 while (TREE_CODE (target) == ARRAY_TYPE)
8207 target = TREE_TYPE (target);
8209 /* Pointers to non-generic address space must be const.
8210 Refuse address spaces outside the device's flash. */
8212 as = TYPE_ADDR_SPACE (target);
8214 if (!ADDR_SPACE_GENERIC_P (as)
8215 && (!TYPE_READONLY (target)
8216 || avr_addrspace[as].segment >= avr_current_device->n_flash))
8218 return as;
8221 /* Scan pointer's target type. */
8223 return avr_nonconst_pointer_addrspace (target);
8226 return ADDR_SPACE_GENERIC;
8230 /* Sanity check NODE so that all pointers targeting non-generic address spaces
8231 go along with CONST qualifier. Writing to these address spaces should
8232 be detected and complained about as early as possible. */
8234 static bool
8235 avr_pgm_check_var_decl (tree node)
8237 const char *reason = NULL;
8239 addr_space_t as = ADDR_SPACE_GENERIC;
8241 gcc_assert (as == 0);
8243 if (avr_log.progmem)
8244 avr_edump ("%?: %t\n", node);
8246 switch (TREE_CODE (node))
8248 default:
8249 break;
8251 case VAR_DECL:
8252 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
8253 reason = "variable";
8254 break;
8256 case PARM_DECL:
8257 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
8258 reason = "function parameter";
8259 break;
8261 case FIELD_DECL:
8262 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
8263 reason = "structure field";
8264 break;
8266 case FUNCTION_DECL:
8267 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
8269 reason = "return type of function";
8270 break;
8272 case POINTER_TYPE:
8273 if (as = avr_nonconst_pointer_addrspace (node), as)
8274 reason = "pointer";
8275 break;
8278 if (reason)
8280 if (avr_addrspace[as].segment >= avr_current_device->n_flash)
8282 if (TYPE_P (node))
8283 error ("%qT uses address space %qs beyond flash of %qs",
8284 node, avr_addrspace[as].name, avr_current_device->name);
8285 else
8286 error ("%s %q+D uses address space %qs beyond flash of %qs",
8287 reason, node, avr_addrspace[as].name,
8288 avr_current_device->name);
8290 else
8292 if (TYPE_P (node))
8293 error ("pointer targeting address space %qs must be const in %qT",
8294 avr_addrspace[as].name, node);
8295 else
8296 error ("pointer targeting address space %qs must be const"
8297 " in %s %q+D",
8298 avr_addrspace[as].name, reason, node);
8302 return reason == NULL;
8306 /* Add the section attribute if the variable is in progmem. */
8308 static void
8309 avr_insert_attributes (tree node, tree *attributes)
8311 avr_pgm_check_var_decl (node);
8313 if (TREE_CODE (node) == VAR_DECL
8314 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
8315 && avr_progmem_p (node, *attributes))
8317 addr_space_t as;
8318 tree node0 = node;
8320 /* For C++, we have to peel arrays in order to get correct
8321 determination of readonlyness. */
8324 node0 = TREE_TYPE (node0);
8325 while (TREE_CODE (node0) == ARRAY_TYPE);
8327 if (error_mark_node == node0)
8328 return;
8330 as = TYPE_ADDR_SPACE (TREE_TYPE (node));
8332 if (avr_addrspace[as].segment >= avr_current_device->n_flash)
8334 error ("variable %q+D located in address space %qs"
8335 " beyond flash of %qs",
8336 node, avr_addrspace[as].name, avr_current_device->name);
8339 if (!TYPE_READONLY (node0)
8340 && !TREE_READONLY (node))
8342 const char *reason = "__attribute__((progmem))";
8344 if (!ADDR_SPACE_GENERIC_P (as))
8345 reason = avr_addrspace[as].name;
8347 if (avr_log.progmem)
8348 avr_edump ("\n%?: %t\n%t\n", node, node0);
8350 error ("variable %q+D must be const in order to be put into"
8351 " read-only section by means of %qs", node, reason);
8357 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
8358 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
8359 /* Track need of __do_clear_bss. */
8361 void
8362 avr_asm_output_aligned_decl_common (FILE * stream,
8363 const_tree decl ATTRIBUTE_UNUSED,
8364 const char *name,
8365 unsigned HOST_WIDE_INT size,
8366 unsigned int align, bool local_p)
8368 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
8369 There is no need to trigger __do_clear_bss code for them. */
8371 if (!STR_PREFIX_P (name, "__gnu_lto"))
8372 avr_need_clear_bss_p = true;
8374 if (local_p)
8375 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
8376 else
8377 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
8381 /* Unnamed section callback for data_section
8382 to track need of __do_copy_data. */
8384 static void
8385 avr_output_data_section_asm_op (const void *data)
8387 avr_need_copy_data_p = true;
8389 /* Dispatch to default. */
8390 output_section_asm_op (data);
8394 /* Unnamed section callback for bss_section
8395 to track need of __do_clear_bss. */
8397 static void
8398 avr_output_bss_section_asm_op (const void *data)
8400 avr_need_clear_bss_p = true;
8402 /* Dispatch to default. */
8403 output_section_asm_op (data);
8407 /* Unnamed section callback for progmem*.data sections. */
8409 static void
8410 avr_output_progmem_section_asm_op (const void *data)
8412 fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
8413 (const char*) data);
8417 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
8419 static void
8420 avr_asm_init_sections (void)
8422 /* Set up a section for jump tables. Alignment is handled by
8423 ASM_OUTPUT_BEFORE_CASE_LABEL. */
8425 if (AVR_HAVE_JMP_CALL)
8427 progmem_swtable_section
8428 = get_unnamed_section (0, output_section_asm_op,
8429 "\t.section\t.progmem.gcc_sw_table"
8430 ",\"a\",@progbits");
8432 else
8434 progmem_swtable_section
8435 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
8436 "\t.section\t.progmem.gcc_sw_table"
8437 ",\"ax\",@progbits");
8440 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
8441 resp. `avr_need_copy_data_p'. */
8443 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
8444 data_section->unnamed.callback = avr_output_data_section_asm_op;
8445 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
8449 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
8451 static section*
8452 avr_asm_function_rodata_section (tree decl)
8454 /* If a function is unused and optimized out by -ffunction-sections
8455 and --gc-sections, ensure that the same will happen for its jump
8456 tables by putting them into individual sections. */
8458 unsigned int flags;
8459 section * frodata;
8461 /* Get the frodata section from the default function in varasm.c
8462 but treat function-associated data-like jump tables as code
8463 rather than as user defined data. AVR has no constant pools. */
8465 int fdata = flag_data_sections;
8467 flag_data_sections = flag_function_sections;
8468 frodata = default_function_rodata_section (decl);
8469 flag_data_sections = fdata;
8470 flags = frodata->common.flags;
8473 if (frodata != readonly_data_section
8474 && flags & SECTION_NAMED)
8476 /* Adjust section flags and replace section name prefix. */
8478 unsigned int i;
8480 static const char* const prefix[] =
8482 ".rodata", ".progmem.gcc_sw_table",
8483 ".gnu.linkonce.r.", ".gnu.linkonce.t."
8486 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
8488 const char * old_prefix = prefix[i];
8489 const char * new_prefix = prefix[i+1];
8490 const char * name = frodata->named.name;
8492 if (STR_PREFIX_P (name, old_prefix))
8494 const char *rname = ACONCAT ((new_prefix,
8495 name + strlen (old_prefix), NULL));
8496 flags &= ~SECTION_CODE;
8497 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
8499 return get_section (rname, flags, frodata->named.decl);
8504 return progmem_swtable_section;
8508 /* Implement `TARGET_ASM_NAMED_SECTION'. */
8509 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
8511 static void
8512 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
8514 if (flags & AVR_SECTION_PROGMEM)
8516 addr_space_t as = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP;
8517 const char *old_prefix = ".rodata";
8518 const char *new_prefix = avr_addrspace[as].section_name;
8520 if (STR_PREFIX_P (name, old_prefix))
8522 const char *sname = ACONCAT ((new_prefix,
8523 name + strlen (old_prefix), NULL));
8524 default_elf_asm_named_section (sname, flags, decl);
8525 return;
8528 default_elf_asm_named_section (new_prefix, flags, decl);
8529 return;
8532 if (!avr_need_copy_data_p)
8533 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
8534 || STR_PREFIX_P (name, ".rodata")
8535 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
8537 if (!avr_need_clear_bss_p)
8538 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
8540 default_elf_asm_named_section (name, flags, decl);
8544 /* Implement `TARGET_SECTION_TYPE_FLAGS'. */
8546 static unsigned int
8547 avr_section_type_flags (tree decl, const char *name, int reloc)
8549 unsigned int flags = default_section_type_flags (decl, name, reloc);
8551 if (STR_PREFIX_P (name, ".noinit"))
8553 if (decl && TREE_CODE (decl) == VAR_DECL
8554 && DECL_INITIAL (decl) == NULL_TREE)
8555 flags |= SECTION_BSS; /* @nobits */
8556 else
8557 warning (0, "only uninitialized variables can be placed in the "
8558 ".noinit section");
8561 if (decl && DECL_P (decl)
8562 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
8564 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
8566 /* Attribute progmem puts data in generic address space.
8567 Set section flags as if it was in __flash to get the right
8568 section prefix in the remainder. */
8570 if (ADDR_SPACE_GENERIC_P (as))
8571 as = ADDR_SPACE_FLASH;
8573 flags |= as * SECTION_MACH_DEP;
8574 flags &= ~SECTION_WRITE;
8575 flags &= ~SECTION_BSS;
8578 return flags;
8582 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
8584 static void
8585 avr_encode_section_info (tree decl, rtx rtl, int new_decl_p)
8587 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
8588 readily available, see PR34734. So we postpone the warning
8589 about uninitialized data in program memory section until here. */
8591 if (new_decl_p
8592 && decl && DECL_P (decl)
8593 && NULL_TREE == DECL_INITIAL (decl)
8594 && !DECL_EXTERNAL (decl)
8595 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
8597 warning (OPT_Wuninitialized,
8598 "uninitialized variable %q+D put into "
8599 "program memory area", decl);
8602 default_encode_section_info (decl, rtl, new_decl_p);
8604 if (decl && DECL_P (decl)
8605 && TREE_CODE (decl) != FUNCTION_DECL
8606 && MEM_P (rtl)
8607 && SYMBOL_REF == GET_CODE (XEXP (rtl, 0)))
8609 rtx sym = XEXP (rtl, 0);
8610 tree type = TREE_TYPE (decl);
8611 if (type == error_mark_node)
8612 return;
8613 addr_space_t as = TYPE_ADDR_SPACE (type);
8615 /* PSTR strings are in generic space but located in flash:
8616 patch address space. */
8618 if (-1 == avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
8619 as = ADDR_SPACE_FLASH;
8621 AVR_SYMBOL_SET_ADDR_SPACE (sym, as);
8626 /* Implement `TARGET_ASM_SELECT_SECTION' */
8628 static section *
8629 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
8631 section * sect = default_elf_select_section (decl, reloc, align);
8633 if (decl && DECL_P (decl)
8634 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
8636 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
8638 /* __progmem__ goes in generic space but shall be allocated to
8639 .progmem.data */
8641 if (ADDR_SPACE_GENERIC_P (as))
8642 as = ADDR_SPACE_FLASH;
8644 if (sect->common.flags & SECTION_NAMED)
8646 const char * name = sect->named.name;
8647 const char * old_prefix = ".rodata";
8648 const char * new_prefix = avr_addrspace[as].section_name;
8650 if (STR_PREFIX_P (name, old_prefix))
8652 const char *sname = ACONCAT ((new_prefix,
8653 name + strlen (old_prefix), NULL));
8654 return get_section (sname, sect->common.flags, sect->named.decl);
8658 if (!progmem_section[as])
8660 progmem_section[as]
8661 = get_unnamed_section (0, avr_output_progmem_section_asm_op,
8662 avr_addrspace[as].section_name);
8665 return progmem_section[as];
8668 return sect;
8671 /* Implement `TARGET_ASM_FILE_START'. */
8672 /* Outputs some text at the start of each assembler file. */
8674 static void
8675 avr_file_start (void)
8677 int sfr_offset = avr_current_arch->sfr_offset;
8679 if (avr_current_arch->asm_only)
8680 error ("MCU %qs supported for assembler only", avr_current_device->name);
8682 default_file_start ();
8684 /* Print I/O addresses of some SFRs used with IN and OUT. */
8686 if (AVR_HAVE_SPH)
8687 fprintf (asm_out_file, "__SP_H__ = 0x%02x\n", avr_addr.sp_h - sfr_offset);
8689 fprintf (asm_out_file, "__SP_L__ = 0x%02x\n", avr_addr.sp_l - sfr_offset);
8690 fprintf (asm_out_file, "__SREG__ = 0x%02x\n", avr_addr.sreg - sfr_offset);
8691 if (AVR_HAVE_RAMPZ)
8692 fprintf (asm_out_file, "__RAMPZ__ = 0x%02x\n", avr_addr.rampz - sfr_offset);
8693 if (AVR_HAVE_RAMPY)
8694 fprintf (asm_out_file, "__RAMPY__ = 0x%02x\n", avr_addr.rampy - sfr_offset);
8695 if (AVR_HAVE_RAMPX)
8696 fprintf (asm_out_file, "__RAMPX__ = 0x%02x\n", avr_addr.rampx - sfr_offset);
8697 if (AVR_HAVE_RAMPD)
8698 fprintf (asm_out_file, "__RAMPD__ = 0x%02x\n", avr_addr.rampd - sfr_offset);
8699 if (AVR_XMEGA)
8700 fprintf (asm_out_file, "__CCP__ = 0x%02x\n", avr_addr.ccp - sfr_offset);
8701 fprintf (asm_out_file, "__tmp_reg__ = %d\n", TMP_REGNO);
8702 fprintf (asm_out_file, "__zero_reg__ = %d\n", ZERO_REGNO);
8706 /* Implement `TARGET_ASM_FILE_END'. */
8707 /* Outputs to the stdio stream FILE some
8708 appropriate text to go at the end of an assembler file. */
8710 static void
8711 avr_file_end (void)
8713 /* Output these only if there is anything in the
8714 .data* / .rodata* / .gnu.linkonce.* resp. .bss* or COMMON
8715 input section(s) - some code size can be saved by not
8716 linking in the initialization code from libgcc if resp.
8717 sections are empty, see PR18145. */
8719 if (avr_need_copy_data_p)
8720 fputs (".global __do_copy_data\n", asm_out_file);
8722 if (avr_need_clear_bss_p)
8723 fputs (".global __do_clear_bss\n", asm_out_file);
8727 /* Worker function for `ADJUST_REG_ALLOC_ORDER'. */
8728 /* Choose the order in which to allocate hard registers for
8729 pseudo-registers local to a basic block.
8731 Store the desired register order in the array `reg_alloc_order'.
8732 Element 0 should be the register to allocate first; element 1, the
8733 next register; and so on. */
8735 void
8736 avr_adjust_reg_alloc_order (void)
8738 unsigned int i;
8739 static const int order_0[] =
8741 24, 25,
8742 18, 19, 20, 21, 22, 23,
8743 30, 31,
8744 26, 27, 28, 29,
8745 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8746 0, 1,
8747 32, 33, 34, 35
8749 static const int order_1[] =
8751 18, 19, 20, 21, 22, 23, 24, 25,
8752 30, 31,
8753 26, 27, 28, 29,
8754 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8755 0, 1,
8756 32, 33, 34, 35
8758 static const int order_2[] =
8760 25, 24, 23, 22, 21, 20, 19, 18,
8761 30, 31,
8762 26, 27, 28, 29,
8763 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8764 1, 0,
8765 32, 33, 34, 35
8768 const int *order = (TARGET_ORDER_1 ? order_1 :
8769 TARGET_ORDER_2 ? order_2 :
8770 order_0);
8771 for (i = 0; i < ARRAY_SIZE (order_0); ++i)
8772 reg_alloc_order[i] = order[i];
8776 /* Implement `TARGET_REGISTER_MOVE_COST' */
8778 static int
8779 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
8780 reg_class_t from, reg_class_t to)
8782 return (from == STACK_REG ? 6
8783 : to == STACK_REG ? 12
8784 : 2);
8788 /* Implement `TARGET_MEMORY_MOVE_COST' */
8790 static int
8791 avr_memory_move_cost (enum machine_mode mode,
8792 reg_class_t rclass ATTRIBUTE_UNUSED,
8793 bool in ATTRIBUTE_UNUSED)
8795 return (mode == QImode ? 2
8796 : mode == HImode ? 4
8797 : mode == SImode ? 8
8798 : mode == SFmode ? 8
8799 : 16);
8803 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
8804 cost of an RTX operand given its context. X is the rtx of the
8805 operand, MODE is its mode, and OUTER is the rtx_code of this
8806 operand's parent operator. */
8808 static int
8809 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
8810 int opno, bool speed)
8812 enum rtx_code code = GET_CODE (x);
8813 int total;
8815 switch (code)
8817 case REG:
8818 case SUBREG:
8819 return 0;
8821 case CONST_INT:
8822 case CONST_FIXED:
8823 case CONST_DOUBLE:
8824 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
8826 default:
8827 break;
8830 total = 0;
8831 avr_rtx_costs (x, code, outer, opno, &total, speed);
8832 return total;
8835 /* Worker function for AVR backend's rtx_cost function.
8836 X is rtx expression whose cost is to be calculated.
8837 Return true if the complete cost has been computed.
8838 Return false if subexpressions should be scanned.
8839 In either case, *TOTAL contains the cost result. */
8841 static bool
8842 avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
8843 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
8845 enum rtx_code code = (enum rtx_code) codearg;
8846 enum machine_mode mode = GET_MODE (x);
8847 HOST_WIDE_INT val;
8849 switch (code)
8851 case CONST_INT:
8852 case CONST_FIXED:
8853 case CONST_DOUBLE:
8854 case SYMBOL_REF:
8855 case CONST:
8856 case LABEL_REF:
8857 /* Immediate constants are as cheap as registers. */
8858 *total = 0;
8859 return true;
8861 case MEM:
8862 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
8863 return true;
8865 case NEG:
8866 switch (mode)
8868 case QImode:
8869 case SFmode:
8870 *total = COSTS_N_INSNS (1);
8871 break;
8873 case HImode:
8874 case PSImode:
8875 case SImode:
8876 *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
8877 break;
8879 default:
8880 return false;
8882 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8883 return true;
8885 case ABS:
8886 switch (mode)
8888 case QImode:
8889 case SFmode:
8890 *total = COSTS_N_INSNS (1);
8891 break;
8893 default:
8894 return false;
8896 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8897 return true;
8899 case NOT:
8900 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
8901 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8902 return true;
8904 case ZERO_EXTEND:
8905 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
8906 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
8907 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8908 return true;
8910 case SIGN_EXTEND:
8911 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
8912 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
8913 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8914 return true;
8916 case PLUS:
8917 switch (mode)
8919 case QImode:
8920 if (AVR_HAVE_MUL
8921 && MULT == GET_CODE (XEXP (x, 0))
8922 && register_operand (XEXP (x, 1), QImode))
8924 /* multiply-add */
8925 *total = COSTS_N_INSNS (speed ? 4 : 3);
8926 /* multiply-add with constant: will be split and load constant. */
8927 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
8928 *total = COSTS_N_INSNS (1) + *total;
8929 return true;
8931 *total = COSTS_N_INSNS (1);
8932 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8933 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8934 break;
8936 case HImode:
8937 if (AVR_HAVE_MUL
8938 && (MULT == GET_CODE (XEXP (x, 0))
8939 || ASHIFT == GET_CODE (XEXP (x, 0)))
8940 && register_operand (XEXP (x, 1), HImode)
8941 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
8942 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
8944 /* multiply-add */
8945 *total = COSTS_N_INSNS (speed ? 5 : 4);
8946 /* multiply-add with constant: will be split and load constant. */
8947 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
8948 *total = COSTS_N_INSNS (1) + *total;
8949 return true;
8951 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8953 *total = COSTS_N_INSNS (2);
8954 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8955 speed);
8957 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
8958 *total = COSTS_N_INSNS (1);
8959 else
8960 *total = COSTS_N_INSNS (2);
8961 break;
8963 case PSImode:
8964 if (!CONST_INT_P (XEXP (x, 1)))
8966 *total = COSTS_N_INSNS (3);
8967 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8968 speed);
8970 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
8971 *total = COSTS_N_INSNS (2);
8972 else
8973 *total = COSTS_N_INSNS (3);
8974 break;
8976 case SImode:
8977 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8979 *total = COSTS_N_INSNS (4);
8980 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8981 speed);
8983 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
8984 *total = COSTS_N_INSNS (1);
8985 else
8986 *total = COSTS_N_INSNS (4);
8987 break;
8989 default:
8990 return false;
8992 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8993 return true;
8995 case MINUS:
8996 if (AVR_HAVE_MUL
8997 && QImode == mode
8998 && register_operand (XEXP (x, 0), QImode)
8999 && MULT == GET_CODE (XEXP (x, 1)))
9001 /* multiply-sub */
9002 *total = COSTS_N_INSNS (speed ? 4 : 3);
9003 /* multiply-sub with constant: will be split and load constant. */
9004 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
9005 *total = COSTS_N_INSNS (1) + *total;
9006 return true;
9008 if (AVR_HAVE_MUL
9009 && HImode == mode
9010 && register_operand (XEXP (x, 0), HImode)
9011 && (MULT == GET_CODE (XEXP (x, 1))
9012 || ASHIFT == GET_CODE (XEXP (x, 1)))
9013 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
9014 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
9016 /* multiply-sub */
9017 *total = COSTS_N_INSNS (speed ? 5 : 4);
9018 /* multiply-sub with constant: will be split and load constant. */
9019 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
9020 *total = COSTS_N_INSNS (1) + *total;
9021 return true;
9023 /* FALLTHRU */
9024 case AND:
9025 case IOR:
9026 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
9027 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9028 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9029 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9030 return true;
9032 case XOR:
9033 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
9034 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9035 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9036 return true;
9038 case MULT:
9039 switch (mode)
9041 case QImode:
9042 if (AVR_HAVE_MUL)
9043 *total = COSTS_N_INSNS (!speed ? 3 : 4);
9044 else if (!speed)
9045 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
9046 else
9047 return false;
9048 break;
9050 case HImode:
9051 if (AVR_HAVE_MUL)
9053 rtx op0 = XEXP (x, 0);
9054 rtx op1 = XEXP (x, 1);
9055 enum rtx_code code0 = GET_CODE (op0);
9056 enum rtx_code code1 = GET_CODE (op1);
9057 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
9058 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
9060 if (ex0
9061 && (u8_operand (op1, HImode)
9062 || s8_operand (op1, HImode)))
9064 *total = COSTS_N_INSNS (!speed ? 4 : 6);
9065 return true;
9067 if (ex0
9068 && register_operand (op1, HImode))
9070 *total = COSTS_N_INSNS (!speed ? 5 : 8);
9071 return true;
9073 else if (ex0 || ex1)
9075 *total = COSTS_N_INSNS (!speed ? 3 : 5);
9076 return true;
9078 else if (register_operand (op0, HImode)
9079 && (u8_operand (op1, HImode)
9080 || s8_operand (op1, HImode)))
9082 *total = COSTS_N_INSNS (!speed ? 6 : 9);
9083 return true;
9085 else
9086 *total = COSTS_N_INSNS (!speed ? 7 : 10);
9088 else if (!speed)
9089 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
9090 else
9091 return false;
9092 break;
9094 case PSImode:
9095 if (!speed)
9096 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
9097 else
9098 *total = 10;
9099 break;
9101 case SImode:
9102 if (AVR_HAVE_MUL)
9104 if (!speed)
9106 /* Add some additional costs besides CALL like moves etc. */
9108 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
9110 else
9112 /* Just a rough estimate. Even with -O2 we don't want bulky
9113 code expanded inline. */
9115 *total = COSTS_N_INSNS (25);
9118 else
9120 if (speed)
9121 *total = COSTS_N_INSNS (300);
9122 else
9123 /* Add some additional costs besides CALL like moves etc. */
9124 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
9127 return true;
9129 default:
9130 return false;
9132 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9133 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9134 return true;
9136 case DIV:
9137 case MOD:
9138 case UDIV:
9139 case UMOD:
9140 if (!speed)
9141 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
9142 else
9143 *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
9144 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9145 /* For div/mod with const-int divisor we have at least the cost of
9146 loading the divisor. */
9147 if (CONST_INT_P (XEXP (x, 1)))
9148 *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
9149 /* Add some overall penaly for clobbering and moving around registers */
9150 *total += COSTS_N_INSNS (2);
9151 return true;
9153 case ROTATE:
9154 switch (mode)
9156 case QImode:
9157 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
9158 *total = COSTS_N_INSNS (1);
9160 break;
9162 case HImode:
9163 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
9164 *total = COSTS_N_INSNS (3);
9166 break;
9168 case SImode:
9169 if (CONST_INT_P (XEXP (x, 1)))
9170 switch (INTVAL (XEXP (x, 1)))
9172 case 8:
9173 case 24:
9174 *total = COSTS_N_INSNS (5);
9175 break;
9176 case 16:
9177 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
9178 break;
9180 break;
9182 default:
9183 return false;
9185 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9186 return true;
9188 case ASHIFT:
9189 switch (mode)
9191 case QImode:
9192 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9194 *total = COSTS_N_INSNS (!speed ? 4 : 17);
9195 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9196 speed);
9198 else
9200 val = INTVAL (XEXP (x, 1));
9201 if (val == 7)
9202 *total = COSTS_N_INSNS (3);
9203 else if (val >= 0 && val <= 7)
9204 *total = COSTS_N_INSNS (val);
9205 else
9206 *total = COSTS_N_INSNS (1);
9208 break;
9210 case HImode:
9211 if (AVR_HAVE_MUL)
9213 if (const_2_to_7_operand (XEXP (x, 1), HImode)
9214 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
9215 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
9217 *total = COSTS_N_INSNS (!speed ? 4 : 6);
9218 return true;
9222 if (const1_rtx == (XEXP (x, 1))
9223 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
9225 *total = COSTS_N_INSNS (2);
9226 return true;
9229 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9231 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9232 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9233 speed);
9235 else
9236 switch (INTVAL (XEXP (x, 1)))
9238 case 0:
9239 *total = 0;
9240 break;
9241 case 1:
9242 case 8:
9243 *total = COSTS_N_INSNS (2);
9244 break;
9245 case 9:
9246 *total = COSTS_N_INSNS (3);
9247 break;
9248 case 2:
9249 case 3:
9250 case 10:
9251 case 15:
9252 *total = COSTS_N_INSNS (4);
9253 break;
9254 case 7:
9255 case 11:
9256 case 12:
9257 *total = COSTS_N_INSNS (5);
9258 break;
9259 case 4:
9260 *total = COSTS_N_INSNS (!speed ? 5 : 8);
9261 break;
9262 case 6:
9263 *total = COSTS_N_INSNS (!speed ? 5 : 9);
9264 break;
9265 case 5:
9266 *total = COSTS_N_INSNS (!speed ? 5 : 10);
9267 break;
9268 default:
9269 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9270 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9271 speed);
9273 break;
9275 case PSImode:
9276 if (!CONST_INT_P (XEXP (x, 1)))
9278 *total = COSTS_N_INSNS (!speed ? 6 : 73);
9280 else
9281 switch (INTVAL (XEXP (x, 1)))
9283 case 0:
9284 *total = 0;
9285 break;
9286 case 1:
9287 case 8:
9288 case 16:
9289 *total = COSTS_N_INSNS (3);
9290 break;
9291 case 23:
9292 *total = COSTS_N_INSNS (5);
9293 break;
9294 default:
9295 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
9296 break;
9298 break;
9300 case SImode:
9301 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9303 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9304 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9305 speed);
9307 else
9308 switch (INTVAL (XEXP (x, 1)))
9310 case 0:
9311 *total = 0;
9312 break;
9313 case 24:
9314 *total = COSTS_N_INSNS (3);
9315 break;
9316 case 1:
9317 case 8:
9318 case 16:
9319 *total = COSTS_N_INSNS (4);
9320 break;
9321 case 31:
9322 *total = COSTS_N_INSNS (6);
9323 break;
9324 case 2:
9325 *total = COSTS_N_INSNS (!speed ? 7 : 8);
9326 break;
9327 default:
9328 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9329 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9330 speed);
9332 break;
9334 default:
9335 return false;
9337 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9338 return true;
9340 case ASHIFTRT:
9341 switch (mode)
9343 case QImode:
9344 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9346 *total = COSTS_N_INSNS (!speed ? 4 : 17);
9347 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9348 speed);
9350 else
9352 val = INTVAL (XEXP (x, 1));
9353 if (val == 6)
9354 *total = COSTS_N_INSNS (4);
9355 else if (val == 7)
9356 *total = COSTS_N_INSNS (2);
9357 else if (val >= 0 && val <= 7)
9358 *total = COSTS_N_INSNS (val);
9359 else
9360 *total = COSTS_N_INSNS (1);
9362 break;
9364 case HImode:
9365 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9367 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9368 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9369 speed);
9371 else
9372 switch (INTVAL (XEXP (x, 1)))
9374 case 0:
9375 *total = 0;
9376 break;
9377 case 1:
9378 *total = COSTS_N_INSNS (2);
9379 break;
9380 case 15:
9381 *total = COSTS_N_INSNS (3);
9382 break;
9383 case 2:
9384 case 7:
9385 case 8:
9386 case 9:
9387 *total = COSTS_N_INSNS (4);
9388 break;
9389 case 10:
9390 case 14:
9391 *total = COSTS_N_INSNS (5);
9392 break;
9393 case 11:
9394 *total = COSTS_N_INSNS (!speed ? 5 : 6);
9395 break;
9396 case 12:
9397 *total = COSTS_N_INSNS (!speed ? 5 : 7);
9398 break;
9399 case 6:
9400 case 13:
9401 *total = COSTS_N_INSNS (!speed ? 5 : 8);
9402 break;
9403 default:
9404 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9405 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9406 speed);
9408 break;
9410 case PSImode:
9411 if (!CONST_INT_P (XEXP (x, 1)))
9413 *total = COSTS_N_INSNS (!speed ? 6 : 73);
9415 else
9416 switch (INTVAL (XEXP (x, 1)))
9418 case 0:
9419 *total = 0;
9420 break;
9421 case 1:
9422 *total = COSTS_N_INSNS (3);
9423 break;
9424 case 16:
9425 case 8:
9426 *total = COSTS_N_INSNS (5);
9427 break;
9428 case 23:
9429 *total = COSTS_N_INSNS (4);
9430 break;
9431 default:
9432 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
9433 break;
9435 break;
9437 case SImode:
9438 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9440 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9441 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9442 speed);
9444 else
9445 switch (INTVAL (XEXP (x, 1)))
9447 case 0:
9448 *total = 0;
9449 break;
9450 case 1:
9451 *total = COSTS_N_INSNS (4);
9452 break;
9453 case 8:
9454 case 16:
9455 case 24:
9456 *total = COSTS_N_INSNS (6);
9457 break;
9458 case 2:
9459 *total = COSTS_N_INSNS (!speed ? 7 : 8);
9460 break;
9461 case 31:
9462 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
9463 break;
9464 default:
9465 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9466 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9467 speed);
9469 break;
9471 default:
9472 return false;
9474 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9475 return true;
9477 case LSHIFTRT:
9478 switch (mode)
9480 case QImode:
9481 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9483 *total = COSTS_N_INSNS (!speed ? 4 : 17);
9484 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9485 speed);
9487 else
9489 val = INTVAL (XEXP (x, 1));
9490 if (val == 7)
9491 *total = COSTS_N_INSNS (3);
9492 else if (val >= 0 && val <= 7)
9493 *total = COSTS_N_INSNS (val);
9494 else
9495 *total = COSTS_N_INSNS (1);
9497 break;
9499 case HImode:
9500 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9502 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9503 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9504 speed);
9506 else
9507 switch (INTVAL (XEXP (x, 1)))
9509 case 0:
9510 *total = 0;
9511 break;
9512 case 1:
9513 case 8:
9514 *total = COSTS_N_INSNS (2);
9515 break;
9516 case 9:
9517 *total = COSTS_N_INSNS (3);
9518 break;
9519 case 2:
9520 case 10:
9521 case 15:
9522 *total = COSTS_N_INSNS (4);
9523 break;
9524 case 7:
9525 case 11:
9526 *total = COSTS_N_INSNS (5);
9527 break;
9528 case 3:
9529 case 12:
9530 case 13:
9531 case 14:
9532 *total = COSTS_N_INSNS (!speed ? 5 : 6);
9533 break;
9534 case 4:
9535 *total = COSTS_N_INSNS (!speed ? 5 : 7);
9536 break;
9537 case 5:
9538 case 6:
9539 *total = COSTS_N_INSNS (!speed ? 5 : 9);
9540 break;
9541 default:
9542 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9543 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9544 speed);
9546 break;
9548 case PSImode:
9549 if (!CONST_INT_P (XEXP (x, 1)))
9551 *total = COSTS_N_INSNS (!speed ? 6 : 73);
9553 else
9554 switch (INTVAL (XEXP (x, 1)))
9556 case 0:
9557 *total = 0;
9558 break;
9559 case 1:
9560 case 8:
9561 case 16:
9562 *total = COSTS_N_INSNS (3);
9563 break;
9564 case 23:
9565 *total = COSTS_N_INSNS (5);
9566 break;
9567 default:
9568 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
9569 break;
9571 break;
9573 case SImode:
9574 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9576 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9577 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9578 speed);
9580 else
9581 switch (INTVAL (XEXP (x, 1)))
9583 case 0:
9584 *total = 0;
9585 break;
9586 case 1:
9587 *total = COSTS_N_INSNS (4);
9588 break;
9589 case 2:
9590 *total = COSTS_N_INSNS (!speed ? 7 : 8);
9591 break;
9592 case 8:
9593 case 16:
9594 case 24:
9595 *total = COSTS_N_INSNS (4);
9596 break;
9597 case 31:
9598 *total = COSTS_N_INSNS (6);
9599 break;
9600 default:
9601 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9602 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9603 speed);
9605 break;
9607 default:
9608 return false;
9610 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9611 return true;
9613 case COMPARE:
9614 switch (GET_MODE (XEXP (x, 0)))
9616 case QImode:
9617 *total = COSTS_N_INSNS (1);
9618 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9619 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9620 break;
9622 case HImode:
9623 *total = COSTS_N_INSNS (2);
9624 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9625 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9626 else if (INTVAL (XEXP (x, 1)) != 0)
9627 *total += COSTS_N_INSNS (1);
9628 break;
9630 case PSImode:
9631 *total = COSTS_N_INSNS (3);
9632 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
9633 *total += COSTS_N_INSNS (2);
9634 break;
9636 case SImode:
9637 *total = COSTS_N_INSNS (4);
9638 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9639 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9640 else if (INTVAL (XEXP (x, 1)) != 0)
9641 *total += COSTS_N_INSNS (3);
9642 break;
9644 default:
9645 return false;
9647 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9648 return true;
9650 case TRUNCATE:
9651 if (AVR_HAVE_MUL
9652 && LSHIFTRT == GET_CODE (XEXP (x, 0))
9653 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
9654 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
9656 if (QImode == mode || HImode == mode)
9658 *total = COSTS_N_INSNS (2);
9659 return true;
9662 break;
9664 default:
9665 break;
9667 return false;
9671 /* Implement `TARGET_RTX_COSTS'. */
9673 static bool
9674 avr_rtx_costs (rtx x, int codearg, int outer_code,
9675 int opno, int *total, bool speed)
9677 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
9678 opno, total, speed);
9680 if (avr_log.rtx_costs)
9682 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
9683 done, speed ? "speed" : "size", *total, outer_code, x);
9686 return done;
9690 /* Implement `TARGET_ADDRESS_COST'. */
9692 static int
9693 avr_address_cost (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED,
9694 addr_space_t as ATTRIBUTE_UNUSED,
9695 bool speed ATTRIBUTE_UNUSED)
9697 int cost = 4;
9699 if (GET_CODE (x) == PLUS
9700 && CONST_INT_P (XEXP (x, 1))
9701 && (REG_P (XEXP (x, 0))
9702 || GET_CODE (XEXP (x, 0)) == SUBREG))
9704 if (INTVAL (XEXP (x, 1)) >= 61)
9705 cost = 18;
9707 else if (CONSTANT_ADDRESS_P (x))
9709 if (optimize > 0
9710 && io_address_operand (x, QImode))
9711 cost = 2;
9714 if (avr_log.address_cost)
9715 avr_edump ("\n%?: %d = %r\n", cost, x);
9717 return cost;
9720 /* Test for extra memory constraint 'Q'.
9721 It's a memory address based on Y or Z pointer with valid displacement. */
9724 extra_constraint_Q (rtx x)
9726 int ok = 0;
9728 if (GET_CODE (XEXP (x,0)) == PLUS
9729 && REG_P (XEXP (XEXP (x,0), 0))
9730 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
9731 && (INTVAL (XEXP (XEXP (x,0), 1))
9732 <= MAX_LD_OFFSET (GET_MODE (x))))
9734 rtx xx = XEXP (XEXP (x,0), 0);
9735 int regno = REGNO (xx);
9737 ok = (/* allocate pseudos */
9738 regno >= FIRST_PSEUDO_REGISTER
9739 /* strictly check */
9740 || regno == REG_Z || regno == REG_Y
9741 /* XXX frame & arg pointer checks */
9742 || xx == frame_pointer_rtx
9743 || xx == arg_pointer_rtx);
9745 if (avr_log.constraints)
9746 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
9747 ok, reload_completed, reload_in_progress, x);
9750 return ok;
9753 /* Convert condition code CONDITION to the valid AVR condition code. */
9755 RTX_CODE
9756 avr_normalize_condition (RTX_CODE condition)
9758 switch (condition)
9760 case GT:
9761 return GE;
9762 case GTU:
9763 return GEU;
9764 case LE:
9765 return LT;
9766 case LEU:
9767 return LTU;
9768 default:
9769 gcc_unreachable ();
9773 /* Helper function for `avr_reorg'. */
9775 static rtx
9776 avr_compare_pattern (rtx insn)
9778 rtx pattern = single_set (insn);
9780 if (pattern
9781 && NONJUMP_INSN_P (insn)
9782 && SET_DEST (pattern) == cc0_rtx
9783 && GET_CODE (SET_SRC (pattern)) == COMPARE)
9785 enum machine_mode mode0 = GET_MODE (XEXP (SET_SRC (pattern), 0));
9786 enum machine_mode mode1 = GET_MODE (XEXP (SET_SRC (pattern), 1));
9788 /* The 64-bit comparisons have fixed operands ACC_A and ACC_B.
9789 They must not be swapped, thus skip them. */
9791 if ((mode0 == VOIDmode || GET_MODE_SIZE (mode0) <= 4)
9792 && (mode1 == VOIDmode || GET_MODE_SIZE (mode1) <= 4))
9793 return pattern;
9796 return NULL_RTX;
9799 /* Helper function for `avr_reorg'. */
9801 /* Expansion of switch/case decision trees leads to code like
9803 cc0 = compare (Reg, Num)
9804 if (cc0 == 0)
9805 goto L1
9807 cc0 = compare (Reg, Num)
9808 if (cc0 > 0)
9809 goto L2
9811 The second comparison is superfluous and can be deleted.
9812 The second jump condition can be transformed from a
9813 "difficult" one to a "simple" one because "cc0 > 0" and
9814 "cc0 >= 0" will have the same effect here.
9816 This function relies on the way switch/case is being expaned
9817 as binary decision tree. For example code see PR 49903.
9819 Return TRUE if optimization performed.
9820 Return FALSE if nothing changed.
9822 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
9824 We don't want to do this in text peephole because it is
9825 tedious to work out jump offsets there and the second comparison
9826 might have been transormed by `avr_reorg'.
9828 RTL peephole won't do because peephole2 does not scan across
9829 basic blocks. */
9831 static bool
9832 avr_reorg_remove_redundant_compare (rtx insn1)
9834 rtx comp1, ifelse1, xcond1, branch1;
9835 rtx comp2, ifelse2, xcond2, branch2, insn2;
9836 enum rtx_code code;
9837 rtx jump, target, cond;
9839 /* Look out for: compare1 - branch1 - compare2 - branch2 */
9841 branch1 = next_nonnote_nondebug_insn (insn1);
9842 if (!branch1 || !JUMP_P (branch1))
9843 return false;
9845 insn2 = next_nonnote_nondebug_insn (branch1);
9846 if (!insn2 || !avr_compare_pattern (insn2))
9847 return false;
9849 branch2 = next_nonnote_nondebug_insn (insn2);
9850 if (!branch2 || !JUMP_P (branch2))
9851 return false;
9853 comp1 = avr_compare_pattern (insn1);
9854 comp2 = avr_compare_pattern (insn2);
9855 xcond1 = single_set (branch1);
9856 xcond2 = single_set (branch2);
9858 if (!comp1 || !comp2
9859 || !rtx_equal_p (comp1, comp2)
9860 || !xcond1 || SET_DEST (xcond1) != pc_rtx
9861 || !xcond2 || SET_DEST (xcond2) != pc_rtx
9862 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
9863 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
9865 return false;
9868 comp1 = SET_SRC (comp1);
9869 ifelse1 = SET_SRC (xcond1);
9870 ifelse2 = SET_SRC (xcond2);
9872 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
9874 if (EQ != GET_CODE (XEXP (ifelse1, 0))
9875 || !REG_P (XEXP (comp1, 0))
9876 || !CONST_INT_P (XEXP (comp1, 1))
9877 || XEXP (ifelse1, 2) != pc_rtx
9878 || XEXP (ifelse2, 2) != pc_rtx
9879 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
9880 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
9881 || !COMPARISON_P (XEXP (ifelse2, 0))
9882 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
9883 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
9884 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
9885 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
9887 return false;
9890 /* We filtered the insn sequence to look like
9892 (set (cc0)
9893 (compare (reg:M N)
9894 (const_int VAL)))
9895 (set (pc)
9896 (if_then_else (eq (cc0)
9897 (const_int 0))
9898 (label_ref L1)
9899 (pc)))
9901 (set (cc0)
9902 (compare (reg:M N)
9903 (const_int VAL)))
9904 (set (pc)
9905 (if_then_else (CODE (cc0)
9906 (const_int 0))
9907 (label_ref L2)
9908 (pc)))
9911 code = GET_CODE (XEXP (ifelse2, 0));
9913 /* Map GT/GTU to GE/GEU which is easier for AVR.
9914 The first two instructions compare/branch on EQ
9915 so we may replace the difficult
9917 if (x == VAL) goto L1;
9918 if (x > VAL) goto L2;
9920 with easy
9922 if (x == VAL) goto L1;
9923 if (x >= VAL) goto L2;
9925 Similarly, replace LE/LEU by LT/LTU. */
9927 switch (code)
9929 case EQ:
9930 case LT: case LTU:
9931 case GE: case GEU:
9932 break;
9934 case LE: case LEU:
9935 case GT: case GTU:
9936 code = avr_normalize_condition (code);
9937 break;
9939 default:
9940 return false;
9943 /* Wrap the branches into UNSPECs so they won't be changed or
9944 optimized in the remainder. */
9946 target = XEXP (XEXP (ifelse1, 1), 0);
9947 cond = XEXP (ifelse1, 0);
9948 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
9950 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
9952 target = XEXP (XEXP (ifelse2, 1), 0);
9953 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9954 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
9956 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
9958 /* The comparisons in insn1 and insn2 are exactly the same;
9959 insn2 is superfluous so delete it. */
9961 delete_insn (insn2);
9962 delete_insn (branch1);
9963 delete_insn (branch2);
9965 return true;
9969 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
9970 /* Optimize conditional jumps. */
9972 static void
9973 avr_reorg (void)
9975 rtx insn = get_insns();
9977 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
9979 rtx pattern = avr_compare_pattern (insn);
9981 if (!pattern)
9982 continue;
9984 if (optimize
9985 && avr_reorg_remove_redundant_compare (insn))
9987 continue;
9990 if (compare_diff_p (insn))
9992 /* Now we work under compare insn with difficult branch. */
9994 rtx next = next_real_insn (insn);
9995 rtx pat = PATTERN (next);
9997 pattern = SET_SRC (pattern);
9999 if (true_regnum (XEXP (pattern, 0)) >= 0
10000 && true_regnum (XEXP (pattern, 1)) >= 0)
10002 rtx x = XEXP (pattern, 0);
10003 rtx src = SET_SRC (pat);
10004 rtx t = XEXP (src,0);
10005 PUT_CODE (t, swap_condition (GET_CODE (t)));
10006 XEXP (pattern, 0) = XEXP (pattern, 1);
10007 XEXP (pattern, 1) = x;
10008 INSN_CODE (next) = -1;
10010 else if (true_regnum (XEXP (pattern, 0)) >= 0
10011 && XEXP (pattern, 1) == const0_rtx)
10013 /* This is a tst insn, we can reverse it. */
10014 rtx src = SET_SRC (pat);
10015 rtx t = XEXP (src,0);
10017 PUT_CODE (t, swap_condition (GET_CODE (t)));
10018 XEXP (pattern, 1) = XEXP (pattern, 0);
10019 XEXP (pattern, 0) = const0_rtx;
10020 INSN_CODE (next) = -1;
10021 INSN_CODE (insn) = -1;
10023 else if (true_regnum (XEXP (pattern, 0)) >= 0
10024 && CONST_INT_P (XEXP (pattern, 1)))
10026 rtx x = XEXP (pattern, 1);
10027 rtx src = SET_SRC (pat);
10028 rtx t = XEXP (src,0);
10029 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
10031 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
10033 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
10034 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
10035 INSN_CODE (next) = -1;
10036 INSN_CODE (insn) = -1;
10043 /* Returns register number for function return value.*/
10045 static inline unsigned int
10046 avr_ret_register (void)
10048 return 24;
10052 /* Implement `TARGET_FUNCTION_VALUE_REGNO_P'. */
10054 static bool
10055 avr_function_value_regno_p (const unsigned int regno)
10057 return (regno == avr_ret_register ());
10061 /* Implement `TARGET_LIBCALL_VALUE'. */
10062 /* Create an RTX representing the place where a
10063 library function returns a value of mode MODE. */
10065 static rtx
10066 avr_libcall_value (enum machine_mode mode,
10067 const_rtx func ATTRIBUTE_UNUSED)
10069 int offs = GET_MODE_SIZE (mode);
10071 if (offs <= 4)
10072 offs = (offs + 1) & ~1;
10074 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
10078 /* Implement `TARGET_FUNCTION_VALUE'. */
10079 /* Create an RTX representing the place where a
10080 function returns a value of data type VALTYPE. */
10082 static rtx
10083 avr_function_value (const_tree type,
10084 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
10085 bool outgoing ATTRIBUTE_UNUSED)
10087 unsigned int offs;
10089 if (TYPE_MODE (type) != BLKmode)
10090 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
10092 offs = int_size_in_bytes (type);
10093 if (offs < 2)
10094 offs = 2;
10095 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
10096 offs = GET_MODE_SIZE (SImode);
10097 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
10098 offs = GET_MODE_SIZE (DImode);
10100 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
10104 test_hard_reg_class (enum reg_class rclass, rtx x)
10106 int regno = true_regnum (x);
10107 if (regno < 0)
10108 return 0;
10110 if (TEST_HARD_REG_CLASS (rclass, regno))
10111 return 1;
10113 return 0;
10117 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
10118 and thus is suitable to be skipped by CPSE, SBRC, etc. */
10120 static bool
10121 avr_2word_insn_p (rtx insn)
10123 if (avr_current_device->errata_skip
10124 || !insn
10125 || 2 != get_attr_length (insn))
10127 return false;
10130 switch (INSN_CODE (insn))
10132 default:
10133 return false;
10135 case CODE_FOR_movqi_insn:
10136 case CODE_FOR_movuqq_insn:
10137 case CODE_FOR_movqq_insn:
10139 rtx set = single_set (insn);
10140 rtx src = SET_SRC (set);
10141 rtx dest = SET_DEST (set);
10143 /* Factor out LDS and STS from movqi_insn. */
10145 if (MEM_P (dest)
10146 && (REG_P (src) || src == CONST0_RTX (GET_MODE (dest))))
10148 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
10150 else if (REG_P (dest)
10151 && MEM_P (src))
10153 return CONSTANT_ADDRESS_P (XEXP (src, 0));
10156 return false;
10159 case CODE_FOR_call_insn:
10160 case CODE_FOR_call_value_insn:
10161 return true;
10167 jump_over_one_insn_p (rtx insn, rtx dest)
10169 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
10170 ? XEXP (dest, 0)
10171 : dest);
10172 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
10173 int dest_addr = INSN_ADDRESSES (uid);
10174 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
10176 return (jump_offset == 1
10177 || (jump_offset == 2
10178 && avr_2word_insn_p (next_active_insn (insn))));
10182 /* Worker function for `HARD_REGNO_MODE_OK'. */
10183 /* Returns 1 if a value of mode MODE can be stored starting with hard
10184 register number REGNO. On the enhanced core, anything larger than
10185 1 byte must start in even numbered register for "movw" to work
10186 (this way we don't have to check for odd registers everywhere). */
10189 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
10191 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
10192 Disallowing QI et al. in these regs might lead to code like
10193 (set (subreg:QI (reg:HI 28) n) ...)
10194 which will result in wrong code because reload does not
10195 handle SUBREGs of hard regsisters like this.
10196 This could be fixed in reload. However, it appears
10197 that fixing reload is not wanted by reload people. */
10199 /* Any GENERAL_REGS register can hold 8-bit values. */
10201 if (GET_MODE_SIZE (mode) == 1)
10202 return 1;
10204 /* FIXME: Ideally, the following test is not needed.
10205 However, it turned out that it can reduce the number
10206 of spill fails. AVR and it's poor endowment with
10207 address registers is extreme stress test for reload. */
10209 if (GET_MODE_SIZE (mode) >= 4
10210 && regno >= REG_X)
10211 return 0;
10213 /* All modes larger than 8 bits should start in an even register. */
10215 return !(regno & 1);
10219 /* Implement `HARD_REGNO_CALL_PART_CLOBBERED'. */
10222 avr_hard_regno_call_part_clobbered (unsigned regno, enum machine_mode mode)
10224 /* FIXME: This hook gets called with MODE:REGNO combinations that don't
10225 represent valid hard registers like, e.g. HI:29. Returning TRUE
10226 for such registers can lead to performance degradation as mentioned
10227 in PR53595. Thus, report invalid hard registers as FALSE. */
10229 if (!avr_hard_regno_mode_ok (regno, mode))
10230 return 0;
10232 /* Return true if any of the following boundaries is crossed:
10233 17/18, 27/28 and 29/30. */
10235 return ((regno < 18 && regno + GET_MODE_SIZE (mode) > 18)
10236 || (regno < REG_Y && regno + GET_MODE_SIZE (mode) > REG_Y)
10237 || (regno < REG_Z && regno + GET_MODE_SIZE (mode) > REG_Z));
10241 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
10243 enum reg_class
10244 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
10245 addr_space_t as, RTX_CODE outer_code,
10246 RTX_CODE index_code ATTRIBUTE_UNUSED)
10248 if (!ADDR_SPACE_GENERIC_P (as))
10250 return POINTER_Z_REGS;
10253 if (!avr_strict_X)
10254 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
10256 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
10260 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
10262 bool
10263 avr_regno_mode_code_ok_for_base_p (int regno,
10264 enum machine_mode mode ATTRIBUTE_UNUSED,
10265 addr_space_t as ATTRIBUTE_UNUSED,
10266 RTX_CODE outer_code,
10267 RTX_CODE index_code ATTRIBUTE_UNUSED)
10269 bool ok = false;
10271 if (!ADDR_SPACE_GENERIC_P (as))
10273 if (regno < FIRST_PSEUDO_REGISTER
10274 && regno == REG_Z)
10276 return true;
10279 if (reg_renumber)
10281 regno = reg_renumber[regno];
10283 if (regno == REG_Z)
10285 return true;
10289 return false;
10292 if (regno < FIRST_PSEUDO_REGISTER
10293 && (regno == REG_X
10294 || regno == REG_Y
10295 || regno == REG_Z
10296 || regno == ARG_POINTER_REGNUM))
10298 ok = true;
10300 else if (reg_renumber)
10302 regno = reg_renumber[regno];
10304 if (regno == REG_X
10305 || regno == REG_Y
10306 || regno == REG_Z
10307 || regno == ARG_POINTER_REGNUM)
10309 ok = true;
10313 if (avr_strict_X
10314 && PLUS == outer_code
10315 && regno == REG_X)
10317 ok = false;
10320 return ok;
10324 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
10325 /* Set 32-bit register OP[0] to compile-time constant OP[1].
10326 CLOBBER_REG is a QI clobber register or NULL_RTX.
10327 LEN == NULL: output instructions.
10328 LEN != NULL: set *LEN to the length of the instruction sequence
10329 (in words) printed with LEN = NULL.
10330 If CLEAR_P is true, OP[0] had been cleard to Zero already.
10331 If CLEAR_P is false, nothing is known about OP[0].
10333 The effect on cc0 is as follows:
10335 Load 0 to any register except ZERO_REG : NONE
10336 Load ld register with any value : NONE
10337 Anything else: : CLOBBER */
10339 static void
10340 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
10342 rtx src = op[1];
10343 rtx dest = op[0];
10344 rtx xval, xdest[4];
10345 int ival[4];
10346 int clobber_val = 1234;
10347 bool cooked_clobber_p = false;
10348 bool set_p = false;
10349 enum machine_mode mode = GET_MODE (dest);
10350 int n, n_bytes = GET_MODE_SIZE (mode);
10352 gcc_assert (REG_P (dest)
10353 && CONSTANT_P (src));
10355 if (len)
10356 *len = 0;
10358 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
10359 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
10361 if (REGNO (dest) < 16
10362 && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
10364 clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
10367 /* We might need a clobber reg but don't have one. Look at the value to
10368 be loaded more closely. A clobber is only needed if it is a symbol
10369 or contains a byte that is neither 0, -1 or a power of 2. */
10371 if (NULL_RTX == clobber_reg
10372 && !test_hard_reg_class (LD_REGS, dest)
10373 && (! (CONST_INT_P (src) || CONST_FIXED_P (src) || CONST_DOUBLE_P (src))
10374 || !avr_popcount_each_byte (src, n_bytes,
10375 (1 << 0) | (1 << 1) | (1 << 8))))
10377 /* We have no clobber register but need one. Cook one up.
10378 That's cheaper than loading from constant pool. */
10380 cooked_clobber_p = true;
10381 clobber_reg = all_regs_rtx[REG_Z + 1];
10382 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
10385 /* Now start filling DEST from LSB to MSB. */
10387 for (n = 0; n < n_bytes; n++)
10389 int ldreg_p;
10390 bool done_byte = false;
10391 int j;
10392 rtx xop[3];
10394 /* Crop the n-th destination byte. */
10396 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
10397 ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
10399 if (!CONST_INT_P (src)
10400 && !CONST_FIXED_P (src)
10401 && !CONST_DOUBLE_P (src))
10403 static const char* const asm_code[][2] =
10405 { "ldi %2,lo8(%1)" CR_TAB "mov %0,%2", "ldi %0,lo8(%1)" },
10406 { "ldi %2,hi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hi8(%1)" },
10407 { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2", "ldi %0,hlo8(%1)" },
10408 { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hhi8(%1)" }
10411 xop[0] = xdest[n];
10412 xop[1] = src;
10413 xop[2] = clobber_reg;
10415 avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
10417 continue;
10420 /* Crop the n-th source byte. */
10422 xval = simplify_gen_subreg (QImode, src, mode, n);
10423 ival[n] = INTVAL (xval);
10425 /* Look if we can reuse the low word by means of MOVW. */
10427 if (n == 2
10428 && n_bytes >= 4
10429 && AVR_HAVE_MOVW)
10431 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
10432 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
10434 if (INTVAL (lo16) == INTVAL (hi16))
10436 if (0 != INTVAL (lo16)
10437 || !clear_p)
10439 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
10442 break;
10446 /* Don't use CLR so that cc0 is set as expected. */
10448 if (ival[n] == 0)
10450 if (!clear_p)
10451 avr_asm_len (ldreg_p ? "ldi %0,0"
10452 : ZERO_REGNO == REGNO (xdest[n]) ? "clr %0"
10453 : "mov %0,__zero_reg__",
10454 &xdest[n], len, 1);
10455 continue;
10458 if (clobber_val == ival[n]
10459 && REGNO (clobber_reg) == REGNO (xdest[n]))
10461 continue;
10464 /* LD_REGS can use LDI to move a constant value */
10466 if (ldreg_p)
10468 xop[0] = xdest[n];
10469 xop[1] = xval;
10470 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
10471 continue;
10474 /* Try to reuse value already loaded in some lower byte. */
10476 for (j = 0; j < n; j++)
10477 if (ival[j] == ival[n])
10479 xop[0] = xdest[n];
10480 xop[1] = xdest[j];
10482 avr_asm_len ("mov %0,%1", xop, len, 1);
10483 done_byte = true;
10484 break;
10487 if (done_byte)
10488 continue;
10490 /* Need no clobber reg for -1: Use CLR/DEC */
10492 if (-1 == ival[n])
10494 if (!clear_p)
10495 avr_asm_len ("clr %0", &xdest[n], len, 1);
10497 avr_asm_len ("dec %0", &xdest[n], len, 1);
10498 continue;
10500 else if (1 == ival[n])
10502 if (!clear_p)
10503 avr_asm_len ("clr %0", &xdest[n], len, 1);
10505 avr_asm_len ("inc %0", &xdest[n], len, 1);
10506 continue;
10509 /* Use T flag or INC to manage powers of 2 if we have
10510 no clobber reg. */
10512 if (NULL_RTX == clobber_reg
10513 && single_one_operand (xval, QImode))
10515 xop[0] = xdest[n];
10516 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
10518 gcc_assert (constm1_rtx != xop[1]);
10520 if (!set_p)
10522 set_p = true;
10523 avr_asm_len ("set", xop, len, 1);
10526 if (!clear_p)
10527 avr_asm_len ("clr %0", xop, len, 1);
10529 avr_asm_len ("bld %0,%1", xop, len, 1);
10530 continue;
10533 /* We actually need the LD_REGS clobber reg. */
10535 gcc_assert (NULL_RTX != clobber_reg);
10537 xop[0] = xdest[n];
10538 xop[1] = xval;
10539 xop[2] = clobber_reg;
10540 clobber_val = ival[n];
10542 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
10543 "mov %0,%2", xop, len, 2);
10546 /* If we cooked up a clobber reg above, restore it. */
10548 if (cooked_clobber_p)
10550 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
10555 /* Reload the constant OP[1] into the HI register OP[0].
10556 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
10557 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
10558 need a clobber reg or have to cook one up.
10560 PLEN == NULL: Output instructions.
10561 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
10562 by the insns printed.
10564 Return "". */
10566 const char*
10567 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
10569 output_reload_in_const (op, clobber_reg, plen, false);
10570 return "";
10574 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
10575 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
10576 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
10577 need a clobber reg or have to cook one up.
10579 LEN == NULL: Output instructions.
10581 LEN != NULL: Output nothing. Set *LEN to number of words occupied
10582 by the insns printed.
10584 Return "". */
10586 const char *
10587 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
10589 if (AVR_HAVE_MOVW
10590 && !test_hard_reg_class (LD_REGS, op[0])
10591 && (CONST_INT_P (op[1])
10592 || CONST_FIXED_P (op[1])
10593 || CONST_DOUBLE_P (op[1])))
10595 int len_clr, len_noclr;
10597 /* In some cases it is better to clear the destination beforehand, e.g.
10599 CLR R2 CLR R3 MOVW R4,R2 INC R2
10601 is shorther than
10603 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
10605 We find it too tedious to work that out in the print function.
10606 Instead, we call the print function twice to get the lengths of
10607 both methods and use the shortest one. */
10609 output_reload_in_const (op, clobber_reg, &len_clr, true);
10610 output_reload_in_const (op, clobber_reg, &len_noclr, false);
10612 if (len_noclr - len_clr == 4)
10614 /* Default needs 4 CLR instructions: clear register beforehand. */
10616 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
10617 "mov %B0,__zero_reg__" CR_TAB
10618 "movw %C0,%A0", &op[0], len, 3);
10620 output_reload_in_const (op, clobber_reg, len, true);
10622 if (len)
10623 *len += 3;
10625 return "";
10629 /* Default: destination not pre-cleared. */
10631 output_reload_in_const (op, clobber_reg, len, false);
10632 return "";
10635 const char*
10636 avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
10638 output_reload_in_const (op, clobber_reg, len, false);
10639 return "";
10643 /* Worker function for `ASM_OUTPUT_ADDR_VEC_ELT'. */
10645 void
10646 avr_output_addr_vec_elt (FILE *stream, int value)
10648 if (AVR_HAVE_JMP_CALL)
10649 fprintf (stream, "\t.word gs(.L%d)\n", value);
10650 else
10651 fprintf (stream, "\trjmp .L%d\n", value);
10655 /* Implement `TARGET_HARD_REGNO_SCRATCH_OK'. */
10656 /* Returns true if SCRATCH are safe to be allocated as a scratch
10657 registers (for a define_peephole2) in the current function. */
10659 static bool
10660 avr_hard_regno_scratch_ok (unsigned int regno)
10662 /* Interrupt functions can only use registers that have already been saved
10663 by the prologue, even if they would normally be call-clobbered. */
10665 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
10666 && !df_regs_ever_live_p (regno))
10667 return false;
10669 /* Don't allow hard registers that might be part of the frame pointer.
10670 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
10671 and don't care for a frame pointer that spans more than one register. */
10673 if ((!reload_completed || frame_pointer_needed)
10674 && (regno == REG_Y || regno == REG_Y + 1))
10676 return false;
10679 return true;
10683 /* Worker function for `HARD_REGNO_RENAME_OK'. */
10684 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
10687 avr_hard_regno_rename_ok (unsigned int old_reg,
10688 unsigned int new_reg)
10690 /* Interrupt functions can only use registers that have already been
10691 saved by the prologue, even if they would normally be
10692 call-clobbered. */
10694 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
10695 && !df_regs_ever_live_p (new_reg))
10696 return 0;
10698 /* Don't allow hard registers that might be part of the frame pointer.
10699 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
10700 and don't care for a frame pointer that spans more than one register. */
10702 if ((!reload_completed || frame_pointer_needed)
10703 && (old_reg == REG_Y || old_reg == REG_Y + 1
10704 || new_reg == REG_Y || new_reg == REG_Y + 1))
10706 return 0;
10709 return 1;
10712 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
10713 or memory location in the I/O space (QImode only).
10715 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
10716 Operand 1: register operand to test, or CONST_INT memory address.
10717 Operand 2: bit number.
10718 Operand 3: label to jump to if the test is true. */
10720 const char*
10721 avr_out_sbxx_branch (rtx insn, rtx operands[])
10723 enum rtx_code comp = GET_CODE (operands[0]);
10724 bool long_jump = get_attr_length (insn) >= 4;
10725 bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
10727 if (comp == GE)
10728 comp = EQ;
10729 else if (comp == LT)
10730 comp = NE;
10732 if (reverse)
10733 comp = reverse_condition (comp);
10735 switch (GET_CODE (operands[1]))
10737 default:
10738 gcc_unreachable();
10740 case CONST_INT:
10742 if (low_io_address_operand (operands[1], QImode))
10744 if (comp == EQ)
10745 output_asm_insn ("sbis %i1,%2", operands);
10746 else
10747 output_asm_insn ("sbic %i1,%2", operands);
10749 else
10751 output_asm_insn ("in __tmp_reg__,%i1", operands);
10752 if (comp == EQ)
10753 output_asm_insn ("sbrs __tmp_reg__,%2", operands);
10754 else
10755 output_asm_insn ("sbrc __tmp_reg__,%2", operands);
10758 break; /* CONST_INT */
10760 case REG:
10762 if (comp == EQ)
10763 output_asm_insn ("sbrs %T1%T2", operands);
10764 else
10765 output_asm_insn ("sbrc %T1%T2", operands);
10767 break; /* REG */
10768 } /* switch */
10770 if (long_jump)
10771 return ("rjmp .+4" CR_TAB
10772 "jmp %x3");
10774 if (!reverse)
10775 return "rjmp %x3";
10777 return "";
10780 /* Worker function for `TARGET_ASM_CONSTRUCTOR'. */
10782 static void
10783 avr_asm_out_ctor (rtx symbol, int priority)
10785 fputs ("\t.global __do_global_ctors\n", asm_out_file);
10786 default_ctor_section_asm_out_constructor (symbol, priority);
10790 /* Worker function for `TARGET_ASM_DESTRUCTOR'. */
10792 static void
10793 avr_asm_out_dtor (rtx symbol, int priority)
10795 fputs ("\t.global __do_global_dtors\n", asm_out_file);
10796 default_dtor_section_asm_out_destructor (symbol, priority);
10800 /* Worker function for `TARGET_RETURN_IN_MEMORY'. */
10802 static bool
10803 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
10805 if (TYPE_MODE (type) == BLKmode)
10807 HOST_WIDE_INT size = int_size_in_bytes (type);
10808 return (size == -1 || size > 8);
10810 else
10811 return false;
10815 /* Implement `CASE_VALUES_THRESHOLD'. */
10816 /* Supply the default for --param case-values-threshold=0 */
10818 static unsigned int
10819 avr_case_values_threshold (void)
10821 /* The exact break-even point between a jump table and an if-else tree
10822 depends on several factors not available here like, e.g. if 8-bit
10823 comparisons can be used in the if-else tree or not, on the
10824 range of the case values, if the case value can be reused, on the
10825 register allocation, etc. '7' appears to be a good choice. */
10827 return 7;
10831 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
10833 static enum machine_mode
10834 avr_addr_space_address_mode (addr_space_t as)
10836 return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
10840 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
10842 static enum machine_mode
10843 avr_addr_space_pointer_mode (addr_space_t as)
10845 return avr_addr_space_address_mode (as);
10849 /* Helper for following function. */
10851 static bool
10852 avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
10854 gcc_assert (REG_P (reg));
10856 if (strict)
10858 return REGNO (reg) == REG_Z;
10861 /* Avoid combine to propagate hard regs. */
10863 if (can_create_pseudo_p()
10864 && REGNO (reg) < REG_Z)
10866 return false;
10869 return true;
10873 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
10875 static bool
10876 avr_addr_space_legitimate_address_p (enum machine_mode mode, rtx x,
10877 bool strict, addr_space_t as)
10879 bool ok = false;
10881 switch (as)
10883 default:
10884 gcc_unreachable();
10886 case ADDR_SPACE_GENERIC:
10887 return avr_legitimate_address_p (mode, x, strict);
10889 case ADDR_SPACE_FLASH:
10890 case ADDR_SPACE_FLASH1:
10891 case ADDR_SPACE_FLASH2:
10892 case ADDR_SPACE_FLASH3:
10893 case ADDR_SPACE_FLASH4:
10894 case ADDR_SPACE_FLASH5:
10896 switch (GET_CODE (x))
10898 case REG:
10899 ok = avr_reg_ok_for_pgm_addr (x, strict);
10900 break;
10902 case POST_INC:
10903 ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
10904 break;
10906 default:
10907 break;
10910 break; /* FLASH */
10912 case ADDR_SPACE_MEMX:
10913 if (REG_P (x))
10914 ok = (!strict
10915 && can_create_pseudo_p());
10917 if (LO_SUM == GET_CODE (x))
10919 rtx hi = XEXP (x, 0);
10920 rtx lo = XEXP (x, 1);
10922 ok = (REG_P (hi)
10923 && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
10924 && REG_P (lo)
10925 && REGNO (lo) == REG_Z);
10928 break; /* MEMX */
10931 if (avr_log.legitimate_address_p)
10933 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
10934 "reload_completed=%d reload_in_progress=%d %s:",
10935 ok, mode, strict, reload_completed, reload_in_progress,
10936 reg_renumber ? "(reg_renumber)" : "");
10938 if (GET_CODE (x) == PLUS
10939 && REG_P (XEXP (x, 0))
10940 && CONST_INT_P (XEXP (x, 1))
10941 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
10942 && reg_renumber)
10944 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
10945 true_regnum (XEXP (x, 0)));
10948 avr_edump ("\n%r\n", x);
10951 return ok;
10955 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
10957 static rtx
10958 avr_addr_space_legitimize_address (rtx x, rtx old_x,
10959 enum machine_mode mode, addr_space_t as)
10961 if (ADDR_SPACE_GENERIC_P (as))
10962 return avr_legitimize_address (x, old_x, mode);
10964 if (avr_log.legitimize_address)
10966 avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
10969 return old_x;
10973 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
10975 static rtx
10976 avr_addr_space_convert (rtx src, tree type_from, tree type_to)
10978 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
10979 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
10981 if (avr_log.progmem)
10982 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
10983 src, type_from, type_to);
10985 /* Up-casting from 16-bit to 24-bit pointer. */
10987 if (as_from != ADDR_SPACE_MEMX
10988 && as_to == ADDR_SPACE_MEMX)
10990 int msb;
10991 rtx sym = src;
10992 rtx reg = gen_reg_rtx (PSImode);
10994 while (CONST == GET_CODE (sym) || PLUS == GET_CODE (sym))
10995 sym = XEXP (sym, 0);
10997 /* Look at symbol flags: avr_encode_section_info set the flags
10998 also if attribute progmem was seen so that we get the right
10999 promotion for, e.g. PSTR-like strings that reside in generic space
11000 but are located in flash. In that case we patch the incoming
11001 address space. */
11003 if (SYMBOL_REF == GET_CODE (sym)
11004 && ADDR_SPACE_FLASH == AVR_SYMBOL_GET_ADDR_SPACE (sym))
11006 as_from = ADDR_SPACE_FLASH;
11009 /* Linearize memory: RAM has bit 23 set. */
11011 msb = ADDR_SPACE_GENERIC_P (as_from)
11012 ? 0x80
11013 : avr_addrspace[as_from].segment;
11015 src = force_reg (Pmode, src);
11017 emit_insn (msb == 0
11018 ? gen_zero_extendhipsi2 (reg, src)
11019 : gen_n_extendhipsi2 (reg, gen_int_mode (msb, QImode), src));
11021 return reg;
11024 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
11026 if (as_from == ADDR_SPACE_MEMX
11027 && as_to != ADDR_SPACE_MEMX)
11029 rtx new_src = gen_reg_rtx (Pmode);
11031 src = force_reg (PSImode, src);
11033 emit_move_insn (new_src,
11034 simplify_gen_subreg (Pmode, src, PSImode, 0));
11035 return new_src;
11038 return src;
11042 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
11044 static bool
11045 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED,
11046 addr_space_t superset ATTRIBUTE_UNUSED)
11048 /* Allow any kind of pointer mess. */
11050 return true;
11054 /* Implement `TARGET_CONVERT_TO_TYPE'. */
11056 static tree
11057 avr_convert_to_type (tree type, tree expr)
11059 /* Print a diagnose for pointer conversion that changes the address
11060 space of the pointer target to a non-enclosing address space,
11061 provided -Waddr-space-convert is on.
11063 FIXME: Filter out cases where the target object is known to
11064 be located in the right memory, like in
11066 (const __flash*) PSTR ("text")
11068 Also try to distinguish between explicit casts requested by
11069 the user and implicit casts like
11071 void f (const __flash char*);
11073 void g (const char *p)
11075 f ((const __flash*) p);
11078 under the assumption that an explicit casts means that the user
11079 knows what he is doing, e.g. interface with PSTR or old style
11080 code with progmem and pgm_read_xxx.
11083 if (avr_warn_addr_space_convert
11084 && expr != error_mark_node
11085 && POINTER_TYPE_P (type)
11086 && POINTER_TYPE_P (TREE_TYPE (expr)))
11088 addr_space_t as_old = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr)));
11089 addr_space_t as_new = TYPE_ADDR_SPACE (TREE_TYPE (type));
11091 if (avr_log.progmem)
11092 avr_edump ("%?: type = %t\nexpr = %t\n\n", type, expr);
11094 if (as_new != ADDR_SPACE_MEMX
11095 && as_new != as_old)
11097 location_t loc = EXPR_LOCATION (expr);
11098 const char *name_old = avr_addrspace[as_old].name;
11099 const char *name_new = avr_addrspace[as_new].name;
11101 warning (OPT_Waddr_space_convert,
11102 "conversion from address space %qs to address space %qs",
11103 ADDR_SPACE_GENERIC_P (as_old) ? "generic" : name_old,
11104 ADDR_SPACE_GENERIC_P (as_new) ? "generic" : name_new);
11106 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, expr);
11110 return NULL_TREE;
11114 /* Worker function for movmemhi expander.
11115 XOP[0] Destination as MEM:BLK
11116 XOP[1] Source " "
11117 XOP[2] # Bytes to copy
11119 Return TRUE if the expansion is accomplished.
11120 Return FALSE if the operand compination is not supported. */
11122 bool
11123 avr_emit_movmemhi (rtx *xop)
11125 HOST_WIDE_INT count;
11126 enum machine_mode loop_mode;
11127 addr_space_t as = MEM_ADDR_SPACE (xop[1]);
11128 rtx loop_reg, addr1, a_src, a_dest, insn, xas;
11129 rtx a_hi8 = NULL_RTX;
11131 if (avr_mem_flash_p (xop[0]))
11132 return false;
11134 if (!CONST_INT_P (xop[2]))
11135 return false;
11137 count = INTVAL (xop[2]);
11138 if (count <= 0)
11139 return false;
11141 a_src = XEXP (xop[1], 0);
11142 a_dest = XEXP (xop[0], 0);
11144 if (PSImode == GET_MODE (a_src))
11146 gcc_assert (as == ADDR_SPACE_MEMX);
11148 loop_mode = (count < 0x100) ? QImode : HImode;
11149 loop_reg = gen_rtx_REG (loop_mode, 24);
11150 emit_move_insn (loop_reg, gen_int_mode (count, loop_mode));
11152 addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
11153 a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
11155 else
11157 int segment = avr_addrspace[as].segment;
11159 if (segment
11160 && avr_current_device->n_flash > 1)
11162 a_hi8 = GEN_INT (segment);
11163 emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
11165 else if (!ADDR_SPACE_GENERIC_P (as))
11167 as = ADDR_SPACE_FLASH;
11170 addr1 = a_src;
11172 loop_mode = (count <= 0x100) ? QImode : HImode;
11173 loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
11176 xas = GEN_INT (as);
11178 /* FIXME: Register allocator might come up with spill fails if it is left
11179 on its own. Thus, we allocate the pointer registers by hand:
11180 Z = source address
11181 X = destination address */
11183 emit_move_insn (lpm_addr_reg_rtx, addr1);
11184 emit_move_insn (gen_rtx_REG (HImode, REG_X), a_dest);
11186 /* FIXME: Register allocator does a bad job and might spill address
11187 register(s) inside the loop leading to additional move instruction
11188 to/from stack which could clobber tmp_reg. Thus, do *not* emit
11189 load and store as separate insns. Instead, we perform the copy
11190 by means of one monolithic insn. */
11192 gcc_assert (TMP_REGNO == LPM_REGNO);
11194 if (as != ADDR_SPACE_MEMX)
11196 /* Load instruction ([E]LPM or LD) is known at compile time:
11197 Do the copy-loop inline. */
11199 rtx (*fun) (rtx, rtx, rtx)
11200 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
11202 insn = fun (xas, loop_reg, loop_reg);
11204 else
11206 rtx (*fun) (rtx, rtx)
11207 = QImode == loop_mode ? gen_movmemx_qi : gen_movmemx_hi;
11209 emit_move_insn (gen_rtx_REG (QImode, 23), a_hi8);
11211 insn = fun (xas, GEN_INT (avr_addr.rampz));
11214 set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
11215 emit_insn (insn);
11217 return true;
11221 /* Print assembler for movmem_qi, movmem_hi insns...
11222 $0 : Address Space
11223 $1, $2 : Loop register
11224 Z : Source address
11225 X : Destination address
11228 const char*
11229 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
11231 addr_space_t as = (addr_space_t) INTVAL (op[0]);
11232 enum machine_mode loop_mode = GET_MODE (op[1]);
11233 bool sbiw_p = test_hard_reg_class (ADDW_REGS, op[1]);
11234 rtx xop[3];
11236 if (plen)
11237 *plen = 0;
11239 xop[0] = op[0];
11240 xop[1] = op[1];
11241 xop[2] = tmp_reg_rtx;
11243 /* Loop label */
11245 avr_asm_len ("0:", xop, plen, 0);
11247 /* Load with post-increment */
11249 switch (as)
11251 default:
11252 gcc_unreachable();
11254 case ADDR_SPACE_GENERIC:
11256 avr_asm_len ("ld %2,Z+", xop, plen, 1);
11257 break;
11259 case ADDR_SPACE_FLASH:
11261 if (AVR_HAVE_LPMX)
11262 avr_asm_len ("lpm %2,Z+", xop, plen, 1);
11263 else
11264 avr_asm_len ("lpm" CR_TAB
11265 "adiw r30,1", xop, plen, 2);
11266 break;
11268 case ADDR_SPACE_FLASH1:
11269 case ADDR_SPACE_FLASH2:
11270 case ADDR_SPACE_FLASH3:
11271 case ADDR_SPACE_FLASH4:
11272 case ADDR_SPACE_FLASH5:
11274 if (AVR_HAVE_ELPMX)
11275 avr_asm_len ("elpm %2,Z+", xop, plen, 1);
11276 else
11277 avr_asm_len ("elpm" CR_TAB
11278 "adiw r30,1", xop, plen, 2);
11279 break;
11282 /* Store with post-increment */
11284 avr_asm_len ("st X+,%2", xop, plen, 1);
11286 /* Decrement loop-counter and set Z-flag */
11288 if (QImode == loop_mode)
11290 avr_asm_len ("dec %1", xop, plen, 1);
11292 else if (sbiw_p)
11294 avr_asm_len ("sbiw %1,1", xop, plen, 1);
11296 else
11298 avr_asm_len ("subi %A1,1" CR_TAB
11299 "sbci %B1,0", xop, plen, 2);
11302 /* Loop until zero */
11304 return avr_asm_len ("brne 0b", xop, plen, 1);
11309 /* Helper for __builtin_avr_delay_cycles */
11311 static rtx
11312 avr_mem_clobber (void)
11314 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
11315 MEM_VOLATILE_P (mem) = 1;
11316 return mem;
11319 static void
11320 avr_expand_delay_cycles (rtx operands0)
11322 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0) & GET_MODE_MASK (SImode);
11323 unsigned HOST_WIDE_INT cycles_used;
11324 unsigned HOST_WIDE_INT loop_count;
11326 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
11328 loop_count = ((cycles - 9) / 6) + 1;
11329 cycles_used = ((loop_count - 1) * 6) + 9;
11330 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode),
11331 avr_mem_clobber()));
11332 cycles -= cycles_used;
11335 if (IN_RANGE (cycles, 262145, 83886081))
11337 loop_count = ((cycles - 7) / 5) + 1;
11338 if (loop_count > 0xFFFFFF)
11339 loop_count = 0xFFFFFF;
11340 cycles_used = ((loop_count - 1) * 5) + 7;
11341 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode),
11342 avr_mem_clobber()));
11343 cycles -= cycles_used;
11346 if (IN_RANGE (cycles, 768, 262144))
11348 loop_count = ((cycles - 5) / 4) + 1;
11349 if (loop_count > 0xFFFF)
11350 loop_count = 0xFFFF;
11351 cycles_used = ((loop_count - 1) * 4) + 5;
11352 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode),
11353 avr_mem_clobber()));
11354 cycles -= cycles_used;
11357 if (IN_RANGE (cycles, 6, 767))
11359 loop_count = cycles / 3;
11360 if (loop_count > 255)
11361 loop_count = 255;
11362 cycles_used = loop_count * 3;
11363 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode),
11364 avr_mem_clobber()));
11365 cycles -= cycles_used;
11368 while (cycles >= 2)
11370 emit_insn (gen_nopv (GEN_INT(2)));
11371 cycles -= 2;
11374 if (cycles == 1)
11376 emit_insn (gen_nopv (GEN_INT(1)));
11377 cycles--;
11382 /* Compute the image of x under f, i.e. perform x --> f(x) */
11384 static int
11385 avr_map (unsigned int f, int x)
11387 return x < 8 ? (f >> (4 * x)) & 0xf : 0;
11391 /* Return some metrics of map A. */
11393 enum
11395 /* Number of fixed points in { 0 ... 7 } */
11396 MAP_FIXED_0_7,
11398 /* Size of preimage of non-fixed points in { 0 ... 7 } */
11399 MAP_NONFIXED_0_7,
11401 /* Mask representing the fixed points in { 0 ... 7 } */
11402 MAP_MASK_FIXED_0_7,
11404 /* Size of the preimage of { 0 ... 7 } */
11405 MAP_PREIMAGE_0_7,
11407 /* Mask that represents the preimage of { f } */
11408 MAP_MASK_PREIMAGE_F
11411 static unsigned
11412 avr_map_metric (unsigned int a, int mode)
11414 unsigned i, metric = 0;
11416 for (i = 0; i < 8; i++)
11418 unsigned ai = avr_map (a, i);
11420 if (mode == MAP_FIXED_0_7)
11421 metric += ai == i;
11422 else if (mode == MAP_NONFIXED_0_7)
11423 metric += ai < 8 && ai != i;
11424 else if (mode == MAP_MASK_FIXED_0_7)
11425 metric |= ((unsigned) (ai == i)) << i;
11426 else if (mode == MAP_PREIMAGE_0_7)
11427 metric += ai < 8;
11428 else if (mode == MAP_MASK_PREIMAGE_F)
11429 metric |= ((unsigned) (ai == 0xf)) << i;
11430 else
11431 gcc_unreachable();
11434 return metric;
11438 /* Return true if IVAL has a 0xf in its hexadecimal representation
11439 and false, otherwise. Only nibbles 0..7 are taken into account.
11440 Used as constraint helper for C0f and Cxf. */
11442 bool
11443 avr_has_nibble_0xf (rtx ival)
11445 unsigned int map = UINTVAL (ival) & GET_MODE_MASK (SImode);
11446 return 0 != avr_map_metric (map, MAP_MASK_PREIMAGE_F);
11450 /* We have a set of bits that are mapped by a function F.
11451 Try to decompose F by means of a second function G so that
11453 F = F o G^-1 o G
11457 cost (F o G^-1) + cost (G) < cost (F)
11459 Example: Suppose builtin insert_bits supplies us with the map
11460 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
11461 nibble of the result, we can just as well rotate the bits before inserting
11462 them and use the map 0x7654ffff which is cheaper than the original map.
11463 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
11465 typedef struct
11467 /* tree code of binary function G */
11468 enum tree_code code;
11470 /* The constant second argument of G */
11471 int arg;
11473 /* G^-1, the inverse of G (*, arg) */
11474 unsigned ginv;
11476 /* The cost of appplying G (*, arg) */
11477 int cost;
11479 /* The composition F o G^-1 (*, arg) for some function F */
11480 unsigned int map;
11482 /* For debug purpose only */
11483 const char *str;
11484 } avr_map_op_t;
11486 static const avr_map_op_t avr_map_op[] =
11488 { LROTATE_EXPR, 0, 0x76543210, 0, 0, "id" },
11489 { LROTATE_EXPR, 1, 0x07654321, 2, 0, "<<<" },
11490 { LROTATE_EXPR, 2, 0x10765432, 4, 0, "<<<" },
11491 { LROTATE_EXPR, 3, 0x21076543, 4, 0, "<<<" },
11492 { LROTATE_EXPR, 4, 0x32107654, 1, 0, "<<<" },
11493 { LROTATE_EXPR, 5, 0x43210765, 3, 0, "<<<" },
11494 { LROTATE_EXPR, 6, 0x54321076, 5, 0, "<<<" },
11495 { LROTATE_EXPR, 7, 0x65432107, 3, 0, "<<<" },
11496 { RSHIFT_EXPR, 1, 0x6543210c, 1, 0, ">>" },
11497 { RSHIFT_EXPR, 1, 0x7543210c, 1, 0, ">>" },
11498 { RSHIFT_EXPR, 2, 0x543210cc, 2, 0, ">>" },
11499 { RSHIFT_EXPR, 2, 0x643210cc, 2, 0, ">>" },
11500 { RSHIFT_EXPR, 2, 0x743210cc, 2, 0, ">>" },
11501 { LSHIFT_EXPR, 1, 0xc7654321, 1, 0, "<<" },
11502 { LSHIFT_EXPR, 2, 0xcc765432, 2, 0, "<<" }
11506 /* Try to decompose F as F = (F o G^-1) o G as described above.
11507 The result is a struct representing F o G^-1 and G.
11508 If result.cost < 0 then such a decomposition does not exist. */
11510 static avr_map_op_t
11511 avr_map_decompose (unsigned int f, const avr_map_op_t *g, bool val_const_p)
11513 int i;
11514 bool val_used_p = 0 != avr_map_metric (f, MAP_MASK_PREIMAGE_F);
11515 avr_map_op_t f_ginv = *g;
11516 unsigned int ginv = g->ginv;
11518 f_ginv.cost = -1;
11520 /* Step 1: Computing F o G^-1 */
11522 for (i = 7; i >= 0; i--)
11524 int x = avr_map (f, i);
11526 if (x <= 7)
11528 x = avr_map (ginv, x);
11530 /* The bit is no element of the image of G: no avail (cost = -1) */
11532 if (x > 7)
11533 return f_ginv;
11536 f_ginv.map = (f_ginv.map << 4) + x;
11539 /* Step 2: Compute the cost of the operations.
11540 The overall cost of doing an operation prior to the insertion is
11541 the cost of the insertion plus the cost of the operation. */
11543 /* Step 2a: Compute cost of F o G^-1 */
11545 if (0 == avr_map_metric (f_ginv.map, MAP_NONFIXED_0_7))
11547 /* The mapping consists only of fixed points and can be folded
11548 to AND/OR logic in the remainder. Reasonable cost is 3. */
11550 f_ginv.cost = 2 + (val_used_p && !val_const_p);
11552 else
11554 rtx xop[4];
11556 /* Get the cost of the insn by calling the output worker with some
11557 fake values. Mimic effect of reloading xop[3]: Unused operands
11558 are mapped to 0 and used operands are reloaded to xop[0]. */
11560 xop[0] = all_regs_rtx[24];
11561 xop[1] = gen_int_mode (f_ginv.map, SImode);
11562 xop[2] = all_regs_rtx[25];
11563 xop[3] = val_used_p ? xop[0] : const0_rtx;
11565 avr_out_insert_bits (xop, &f_ginv.cost);
11567 f_ginv.cost += val_const_p && val_used_p ? 1 : 0;
11570 /* Step 2b: Add cost of G */
11572 f_ginv.cost += g->cost;
11574 if (avr_log.builtin)
11575 avr_edump (" %s%d=%d", g->str, g->arg, f_ginv.cost);
11577 return f_ginv;
11581 /* Insert bits from XOP[1] into XOP[0] according to MAP.
11582 XOP[0] and XOP[1] don't overlap.
11583 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
11584 If FIXP_P = false: Just move the bit if its position in the destination
11585 is different to its source position. */
11587 static void
11588 avr_move_bits (rtx *xop, unsigned int map, bool fixp_p, int *plen)
11590 int bit_dest, b;
11592 /* T-flag contains this bit of the source, i.e. of XOP[1] */
11593 int t_bit_src = -1;
11595 /* We order the operations according to the requested source bit b. */
11597 for (b = 0; b < 8; b++)
11598 for (bit_dest = 0; bit_dest < 8; bit_dest++)
11600 int bit_src = avr_map (map, bit_dest);
11602 if (b != bit_src
11603 || bit_src >= 8
11604 /* Same position: No need to copy as requested by FIXP_P. */
11605 || (bit_dest == bit_src && !fixp_p))
11606 continue;
11608 if (t_bit_src != bit_src)
11610 /* Source bit is not yet in T: Store it to T. */
11612 t_bit_src = bit_src;
11614 xop[3] = GEN_INT (bit_src);
11615 avr_asm_len ("bst %T1%T3", xop, plen, 1);
11618 /* Load destination bit with T. */
11620 xop[3] = GEN_INT (bit_dest);
11621 avr_asm_len ("bld %T0%T3", xop, plen, 1);
11626 /* PLEN == 0: Print assembler code for `insert_bits'.
11627 PLEN != 0: Compute code length in bytes.
11629 OP[0]: Result
11630 OP[1]: The mapping composed of nibbles. If nibble no. N is
11631 0: Bit N of result is copied from bit OP[2].0
11632 ... ...
11633 7: Bit N of result is copied from bit OP[2].7
11634 0xf: Bit N of result is copied from bit OP[3].N
11635 OP[2]: Bits to be inserted
11636 OP[3]: Target value */
11638 const char*
11639 avr_out_insert_bits (rtx *op, int *plen)
11641 unsigned int map = UINTVAL (op[1]) & GET_MODE_MASK (SImode);
11642 unsigned mask_fixed;
11643 bool fixp_p = true;
11644 rtx xop[4];
11646 xop[0] = op[0];
11647 xop[1] = op[2];
11648 xop[2] = op[3];
11650 gcc_assert (REG_P (xop[2]) || CONST_INT_P (xop[2]));
11652 if (plen)
11653 *plen = 0;
11654 else if (flag_print_asm_name)
11655 fprintf (asm_out_file, ASM_COMMENT_START "map = 0x%08x\n", map);
11657 /* If MAP has fixed points it might be better to initialize the result
11658 with the bits to be inserted instead of moving all bits by hand. */
11660 mask_fixed = avr_map_metric (map, MAP_MASK_FIXED_0_7);
11662 if (REGNO (xop[0]) == REGNO (xop[1]))
11664 /* Avoid early-clobber conflicts */
11666 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
11667 xop[1] = tmp_reg_rtx;
11668 fixp_p = false;
11671 if (avr_map_metric (map, MAP_MASK_PREIMAGE_F))
11673 /* XOP[2] is used and reloaded to XOP[0] already */
11675 int n_fix = 0, n_nofix = 0;
11677 gcc_assert (REG_P (xop[2]));
11679 /* Get the code size of the bit insertions; once with all bits
11680 moved and once with fixed points omitted. */
11682 avr_move_bits (xop, map, true, &n_fix);
11683 avr_move_bits (xop, map, false, &n_nofix);
11685 if (fixp_p && n_fix - n_nofix > 3)
11687 xop[3] = gen_int_mode (~mask_fixed, QImode);
11689 avr_asm_len ("eor %0,%1" CR_TAB
11690 "andi %0,%3" CR_TAB
11691 "eor %0,%1", xop, plen, 3);
11692 fixp_p = false;
11695 else
11697 /* XOP[2] is unused */
11699 if (fixp_p && mask_fixed)
11701 avr_asm_len ("mov %0,%1", xop, plen, 1);
11702 fixp_p = false;
11706 /* Move/insert remaining bits. */
11708 avr_move_bits (xop, map, fixp_p, plen);
11710 return "";
11714 /* IDs for all the AVR builtins. */
11716 enum avr_builtin_id
11718 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
11719 AVR_BUILTIN_ ## NAME,
11720 #include "builtins.def"
11721 #undef DEF_BUILTIN
11723 AVR_BUILTIN_COUNT
11726 struct GTY(()) avr_builtin_description
11728 enum insn_code icode;
11729 int n_args;
11730 tree fndecl;
11734 /* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
11735 that a built-in's ID can be used to access the built-in by means of
11736 avr_bdesc[ID] */
11738 static GTY(()) struct avr_builtin_description
11739 avr_bdesc[AVR_BUILTIN_COUNT] =
11741 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, LIBNAME) \
11742 { (enum insn_code) CODE_FOR_ ## ICODE, N_ARGS, NULL_TREE },
11743 #include "builtins.def"
11744 #undef DEF_BUILTIN
11748 /* Implement `TARGET_BUILTIN_DECL'. */
11750 static tree
11751 avr_builtin_decl (unsigned id, bool initialize_p ATTRIBUTE_UNUSED)
11753 if (id < AVR_BUILTIN_COUNT)
11754 return avr_bdesc[id].fndecl;
11756 return error_mark_node;
11760 static void
11761 avr_init_builtin_int24 (void)
11763 tree int24_type = make_signed_type (GET_MODE_BITSIZE (PSImode));
11764 tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
11766 lang_hooks.types.register_builtin_type (int24_type, "__int24");
11767 lang_hooks.types.register_builtin_type (uint24_type, "__uint24");
11771 /* Implement `TARGET_INIT_BUILTINS' */
11772 /* Set up all builtin functions for this target. */
11774 static void
11775 avr_init_builtins (void)
11777 tree void_ftype_void
11778 = build_function_type_list (void_type_node, NULL_TREE);
11779 tree uchar_ftype_uchar
11780 = build_function_type_list (unsigned_char_type_node,
11781 unsigned_char_type_node,
11782 NULL_TREE);
11783 tree uint_ftype_uchar_uchar
11784 = build_function_type_list (unsigned_type_node,
11785 unsigned_char_type_node,
11786 unsigned_char_type_node,
11787 NULL_TREE);
11788 tree int_ftype_char_char
11789 = build_function_type_list (integer_type_node,
11790 char_type_node,
11791 char_type_node,
11792 NULL_TREE);
11793 tree int_ftype_char_uchar
11794 = build_function_type_list (integer_type_node,
11795 char_type_node,
11796 unsigned_char_type_node,
11797 NULL_TREE);
11798 tree void_ftype_ulong
11799 = build_function_type_list (void_type_node,
11800 long_unsigned_type_node,
11801 NULL_TREE);
11803 tree uchar_ftype_ulong_uchar_uchar
11804 = build_function_type_list (unsigned_char_type_node,
11805 long_unsigned_type_node,
11806 unsigned_char_type_node,
11807 unsigned_char_type_node,
11808 NULL_TREE);
11810 tree const_memx_void_node
11811 = build_qualified_type (void_type_node,
11812 TYPE_QUAL_CONST
11813 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX));
11815 tree const_memx_ptr_type_node
11816 = build_pointer_type_for_mode (const_memx_void_node, PSImode, false);
11818 tree char_ftype_const_memx_ptr
11819 = build_function_type_list (char_type_node,
11820 const_memx_ptr_type_node,
11821 NULL);
11823 #define ITYP(T) \
11824 lang_hooks.types.type_for_size (TYPE_PRECISION (T), TYPE_UNSIGNED (T))
11826 #define FX_FTYPE_FX(fx) \
11827 tree fx##r_ftype_##fx##r \
11828 = build_function_type_list (node_##fx##r, node_##fx##r, NULL); \
11829 tree fx##k_ftype_##fx##k \
11830 = build_function_type_list (node_##fx##k, node_##fx##k, NULL)
11832 #define FX_FTYPE_FX_INT(fx) \
11833 tree fx##r_ftype_##fx##r_int \
11834 = build_function_type_list (node_##fx##r, node_##fx##r, \
11835 integer_type_node, NULL); \
11836 tree fx##k_ftype_##fx##k_int \
11837 = build_function_type_list (node_##fx##k, node_##fx##k, \
11838 integer_type_node, NULL)
11840 #define INT_FTYPE_FX(fx) \
11841 tree int_ftype_##fx##r \
11842 = build_function_type_list (integer_type_node, node_##fx##r, NULL); \
11843 tree int_ftype_##fx##k \
11844 = build_function_type_list (integer_type_node, node_##fx##k, NULL)
11846 #define INTX_FTYPE_FX(fx) \
11847 tree int##fx##r_ftype_##fx##r \
11848 = build_function_type_list (ITYP (node_##fx##r), node_##fx##r, NULL); \
11849 tree int##fx##k_ftype_##fx##k \
11850 = build_function_type_list (ITYP (node_##fx##k), node_##fx##k, NULL)
11852 #define FX_FTYPE_INTX(fx) \
11853 tree fx##r_ftype_int##fx##r \
11854 = build_function_type_list (node_##fx##r, ITYP (node_##fx##r), NULL); \
11855 tree fx##k_ftype_int##fx##k \
11856 = build_function_type_list (node_##fx##k, ITYP (node_##fx##k), NULL)
11858 tree node_hr = short_fract_type_node;
11859 tree node_nr = fract_type_node;
11860 tree node_lr = long_fract_type_node;
11861 tree node_llr = long_long_fract_type_node;
11863 tree node_uhr = unsigned_short_fract_type_node;
11864 tree node_unr = unsigned_fract_type_node;
11865 tree node_ulr = unsigned_long_fract_type_node;
11866 tree node_ullr = unsigned_long_long_fract_type_node;
11868 tree node_hk = short_accum_type_node;
11869 tree node_nk = accum_type_node;
11870 tree node_lk = long_accum_type_node;
11871 tree node_llk = long_long_accum_type_node;
11873 tree node_uhk = unsigned_short_accum_type_node;
11874 tree node_unk = unsigned_accum_type_node;
11875 tree node_ulk = unsigned_long_accum_type_node;
11876 tree node_ullk = unsigned_long_long_accum_type_node;
11879 /* For absfx builtins. */
11881 FX_FTYPE_FX (h);
11882 FX_FTYPE_FX (n);
11883 FX_FTYPE_FX (l);
11884 FX_FTYPE_FX (ll);
11886 /* For roundfx builtins. */
11888 FX_FTYPE_FX_INT (h);
11889 FX_FTYPE_FX_INT (n);
11890 FX_FTYPE_FX_INT (l);
11891 FX_FTYPE_FX_INT (ll);
11893 FX_FTYPE_FX_INT (uh);
11894 FX_FTYPE_FX_INT (un);
11895 FX_FTYPE_FX_INT (ul);
11896 FX_FTYPE_FX_INT (ull);
11898 /* For countlsfx builtins. */
11900 INT_FTYPE_FX (h);
11901 INT_FTYPE_FX (n);
11902 INT_FTYPE_FX (l);
11903 INT_FTYPE_FX (ll);
11905 INT_FTYPE_FX (uh);
11906 INT_FTYPE_FX (un);
11907 INT_FTYPE_FX (ul);
11908 INT_FTYPE_FX (ull);
11910 /* For bitsfx builtins. */
11912 INTX_FTYPE_FX (h);
11913 INTX_FTYPE_FX (n);
11914 INTX_FTYPE_FX (l);
11915 INTX_FTYPE_FX (ll);
11917 INTX_FTYPE_FX (uh);
11918 INTX_FTYPE_FX (un);
11919 INTX_FTYPE_FX (ul);
11920 INTX_FTYPE_FX (ull);
11922 /* For fxbits builtins. */
11924 FX_FTYPE_INTX (h);
11925 FX_FTYPE_INTX (n);
11926 FX_FTYPE_INTX (l);
11927 FX_FTYPE_INTX (ll);
11929 FX_FTYPE_INTX (uh);
11930 FX_FTYPE_INTX (un);
11931 FX_FTYPE_INTX (ul);
11932 FX_FTYPE_INTX (ull);
11935 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
11937 int id = AVR_BUILTIN_ ## NAME; \
11938 const char *Name = "__builtin_avr_" #NAME; \
11939 char *name = (char*) alloca (1 + strlen (Name)); \
11941 gcc_assert (id < AVR_BUILTIN_COUNT); \
11942 avr_bdesc[id].fndecl \
11943 = add_builtin_function (avr_tolower (name, Name), TYPE, id, \
11944 BUILT_IN_MD, LIBNAME, NULL_TREE); \
11946 #include "builtins.def"
11947 #undef DEF_BUILTIN
11949 avr_init_builtin_int24 ();
11953 /* Subroutine of avr_expand_builtin to expand vanilla builtins
11954 with non-void result and 1 ... 3 arguments. */
11956 static rtx
11957 avr_default_expand_builtin (enum insn_code icode, tree exp, rtx target)
11959 rtx pat, xop[3];
11960 int n, n_args = call_expr_nargs (exp);
11961 enum machine_mode tmode = insn_data[icode].operand[0].mode;
11963 gcc_assert (n_args >= 1 && n_args <= 3);
11965 if (target == NULL_RTX
11966 || GET_MODE (target) != tmode
11967 || !insn_data[icode].operand[0].predicate (target, tmode))
11969 target = gen_reg_rtx (tmode);
11972 for (n = 0; n < n_args; n++)
11974 tree arg = CALL_EXPR_ARG (exp, n);
11975 rtx op = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
11976 enum machine_mode opmode = GET_MODE (op);
11977 enum machine_mode mode = insn_data[icode].operand[n+1].mode;
11979 if ((opmode == SImode || opmode == VOIDmode) && mode == HImode)
11981 opmode = HImode;
11982 op = gen_lowpart (HImode, op);
11985 /* In case the insn wants input operands in modes different from
11986 the result, abort. */
11988 gcc_assert (opmode == mode || opmode == VOIDmode);
11990 if (!insn_data[icode].operand[n+1].predicate (op, mode))
11991 op = copy_to_mode_reg (mode, op);
11993 xop[n] = op;
11996 switch (n_args)
11998 case 1: pat = GEN_FCN (icode) (target, xop[0]); break;
11999 case 2: pat = GEN_FCN (icode) (target, xop[0], xop[1]); break;
12000 case 3: pat = GEN_FCN (icode) (target, xop[0], xop[1], xop[2]); break;
12002 default:
12003 gcc_unreachable();
12006 if (pat == NULL_RTX)
12007 return NULL_RTX;
12009 emit_insn (pat);
12011 return target;
12015 /* Implement `TARGET_EXPAND_BUILTIN'. */
12016 /* Expand an expression EXP that calls a built-in function,
12017 with result going to TARGET if that's convenient
12018 (and in mode MODE if that's convenient).
12019 SUBTARGET may be used as the target for computing one of EXP's operands.
12020 IGNORE is nonzero if the value is to be ignored. */
12022 static rtx
12023 avr_expand_builtin (tree exp, rtx target,
12024 rtx subtarget ATTRIBUTE_UNUSED,
12025 enum machine_mode mode ATTRIBUTE_UNUSED,
12026 int ignore)
12028 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
12029 const char *bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
12030 unsigned int id = DECL_FUNCTION_CODE (fndecl);
12031 const struct avr_builtin_description *d = &avr_bdesc[id];
12032 tree arg0;
12033 rtx op0;
12035 gcc_assert (id < AVR_BUILTIN_COUNT);
12037 switch (id)
12039 case AVR_BUILTIN_NOP:
12040 emit_insn (gen_nopv (GEN_INT(1)));
12041 return 0;
12043 case AVR_BUILTIN_DELAY_CYCLES:
12045 arg0 = CALL_EXPR_ARG (exp, 0);
12046 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
12048 if (!CONST_INT_P (op0))
12049 error ("%s expects a compile time integer constant", bname);
12050 else
12051 avr_expand_delay_cycles (op0);
12053 return NULL_RTX;
12056 case AVR_BUILTIN_INSERT_BITS:
12058 arg0 = CALL_EXPR_ARG (exp, 0);
12059 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
12061 if (!CONST_INT_P (op0))
12063 error ("%s expects a compile time long integer constant"
12064 " as first argument", bname);
12065 return target;
12068 break;
12071 case AVR_BUILTIN_ROUNDHR: case AVR_BUILTIN_ROUNDUHR:
12072 case AVR_BUILTIN_ROUNDR: case AVR_BUILTIN_ROUNDUR:
12073 case AVR_BUILTIN_ROUNDLR: case AVR_BUILTIN_ROUNDULR:
12074 case AVR_BUILTIN_ROUNDLLR: case AVR_BUILTIN_ROUNDULLR:
12076 case AVR_BUILTIN_ROUNDHK: case AVR_BUILTIN_ROUNDUHK:
12077 case AVR_BUILTIN_ROUNDK: case AVR_BUILTIN_ROUNDUK:
12078 case AVR_BUILTIN_ROUNDLK: case AVR_BUILTIN_ROUNDULK:
12079 case AVR_BUILTIN_ROUNDLLK: case AVR_BUILTIN_ROUNDULLK:
12081 /* Warn about odd rounding. Rounding points >= FBIT will have
12082 no effect. */
12084 if (TREE_CODE (CALL_EXPR_ARG (exp, 1)) != INTEGER_CST)
12085 break;
12087 int rbit = (int) TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1));
12089 if (rbit >= (int) GET_MODE_FBIT (mode))
12091 warning (OPT_Wextra, "rounding to %d bits has no effect for "
12092 "fixed-point value with %d fractional bits",
12093 rbit, GET_MODE_FBIT (mode));
12095 return expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX, mode,
12096 EXPAND_NORMAL);
12098 else if (rbit <= - (int) GET_MODE_IBIT (mode))
12100 warning (0, "rounding result will always be 0");
12101 return CONST0_RTX (mode);
12104 /* The rounding points RP satisfies now: -IBIT < RP < FBIT.
12106 TR 18037 only specifies results for RP > 0. However, the
12107 remaining cases of -IBIT < RP <= 0 can easily be supported
12108 without any additional overhead. */
12110 break; /* round */
12113 /* No fold found and no insn: Call support function from libgcc. */
12115 if (d->icode == CODE_FOR_nothing
12116 && DECL_ASSEMBLER_NAME (get_callee_fndecl (exp)) != NULL_TREE)
12118 return expand_call (exp, target, ignore);
12121 /* No special treatment needed: vanilla expand. */
12123 gcc_assert (d->icode != CODE_FOR_nothing);
12124 gcc_assert (d->n_args == call_expr_nargs (exp));
12126 if (d->n_args == 0)
12128 emit_insn ((GEN_FCN (d->icode)) (target));
12129 return NULL_RTX;
12132 return avr_default_expand_builtin (d->icode, exp, target);
12136 /* Helper for `avr_fold_builtin' that folds absfx (FIXED_CST). */
12138 static tree
12139 avr_fold_absfx (tree tval)
12141 if (FIXED_CST != TREE_CODE (tval))
12142 return NULL_TREE;
12144 /* Our fixed-points have no padding: Use double_int payload directly. */
12146 FIXED_VALUE_TYPE fval = TREE_FIXED_CST (tval);
12147 unsigned int bits = GET_MODE_BITSIZE (fval.mode);
12148 double_int ival = fval.data.sext (bits);
12150 if (!ival.is_negative())
12151 return tval;
12153 /* ISO/IEC TR 18037, 7.18a.6.2: The absfx functions are saturating. */
12155 fval.data = (ival == double_int::min_value (bits, false).sext (bits))
12156 ? double_int::max_value (bits, false)
12157 : -ival;
12159 return build_fixed (TREE_TYPE (tval), fval);
12163 /* Implement `TARGET_FOLD_BUILTIN'. */
12165 static tree
12166 avr_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *arg,
12167 bool ignore ATTRIBUTE_UNUSED)
12169 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
12170 tree val_type = TREE_TYPE (TREE_TYPE (fndecl));
12172 if (!optimize)
12173 return NULL_TREE;
12175 switch (fcode)
12177 default:
12178 break;
12180 case AVR_BUILTIN_SWAP:
12182 return fold_build2 (LROTATE_EXPR, val_type, arg[0],
12183 build_int_cst (val_type, 4));
12186 case AVR_BUILTIN_ABSHR:
12187 case AVR_BUILTIN_ABSR:
12188 case AVR_BUILTIN_ABSLR:
12189 case AVR_BUILTIN_ABSLLR:
12191 case AVR_BUILTIN_ABSHK:
12192 case AVR_BUILTIN_ABSK:
12193 case AVR_BUILTIN_ABSLK:
12194 case AVR_BUILTIN_ABSLLK:
12195 /* GCC is not good with folding ABS for fixed-point. Do it by hand. */
12197 return avr_fold_absfx (arg[0]);
12199 case AVR_BUILTIN_BITSHR: case AVR_BUILTIN_HRBITS:
12200 case AVR_BUILTIN_BITSHK: case AVR_BUILTIN_HKBITS:
12201 case AVR_BUILTIN_BITSUHR: case AVR_BUILTIN_UHRBITS:
12202 case AVR_BUILTIN_BITSUHK: case AVR_BUILTIN_UHKBITS:
12204 case AVR_BUILTIN_BITSR: case AVR_BUILTIN_RBITS:
12205 case AVR_BUILTIN_BITSK: case AVR_BUILTIN_KBITS:
12206 case AVR_BUILTIN_BITSUR: case AVR_BUILTIN_URBITS:
12207 case AVR_BUILTIN_BITSUK: case AVR_BUILTIN_UKBITS:
12209 case AVR_BUILTIN_BITSLR: case AVR_BUILTIN_LRBITS:
12210 case AVR_BUILTIN_BITSLK: case AVR_BUILTIN_LKBITS:
12211 case AVR_BUILTIN_BITSULR: case AVR_BUILTIN_ULRBITS:
12212 case AVR_BUILTIN_BITSULK: case AVR_BUILTIN_ULKBITS:
12214 case AVR_BUILTIN_BITSLLR: case AVR_BUILTIN_LLRBITS:
12215 case AVR_BUILTIN_BITSLLK: case AVR_BUILTIN_LLKBITS:
12216 case AVR_BUILTIN_BITSULLR: case AVR_BUILTIN_ULLRBITS:
12217 case AVR_BUILTIN_BITSULLK: case AVR_BUILTIN_ULLKBITS:
12219 gcc_assert (TYPE_PRECISION (val_type)
12220 == TYPE_PRECISION (TREE_TYPE (arg[0])));
12222 return build1 (VIEW_CONVERT_EXPR, val_type, arg[0]);
12224 case AVR_BUILTIN_INSERT_BITS:
12226 tree tbits = arg[1];
12227 tree tval = arg[2];
12228 tree tmap;
12229 tree map_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
12230 unsigned int map;
12231 bool changed = false;
12232 unsigned i;
12233 avr_map_op_t best_g;
12235 if (TREE_CODE (arg[0]) != INTEGER_CST)
12237 /* No constant as first argument: Don't fold this and run into
12238 error in avr_expand_builtin. */
12240 break;
12243 tmap = double_int_to_tree (map_type, tree_to_double_int (arg[0]));
12244 map = TREE_INT_CST_LOW (tmap);
12246 if (TREE_CODE (tval) != INTEGER_CST
12247 && 0 == avr_map_metric (map, MAP_MASK_PREIMAGE_F))
12249 /* There are no F in the map, i.e. 3rd operand is unused.
12250 Replace that argument with some constant to render
12251 respective input unused. */
12253 tval = build_int_cst (val_type, 0);
12254 changed = true;
12257 if (TREE_CODE (tbits) != INTEGER_CST
12258 && 0 == avr_map_metric (map, MAP_PREIMAGE_0_7))
12260 /* Similar for the bits to be inserted. If they are unused,
12261 we can just as well pass 0. */
12263 tbits = build_int_cst (val_type, 0);
12266 if (TREE_CODE (tbits) == INTEGER_CST)
12268 /* Inserting bits known at compile time is easy and can be
12269 performed by AND and OR with appropriate masks. */
12271 int bits = TREE_INT_CST_LOW (tbits);
12272 int mask_ior = 0, mask_and = 0xff;
12274 for (i = 0; i < 8; i++)
12276 int mi = avr_map (map, i);
12278 if (mi < 8)
12280 if (bits & (1 << mi)) mask_ior |= (1 << i);
12281 else mask_and &= ~(1 << i);
12285 tval = fold_build2 (BIT_IOR_EXPR, val_type, tval,
12286 build_int_cst (val_type, mask_ior));
12287 return fold_build2 (BIT_AND_EXPR, val_type, tval,
12288 build_int_cst (val_type, mask_and));
12291 if (changed)
12292 return build_call_expr (fndecl, 3, tmap, tbits, tval);
12294 /* If bits don't change their position we can use vanilla logic
12295 to merge the two arguments. */
12297 if (0 == avr_map_metric (map, MAP_NONFIXED_0_7))
12299 int mask_f = avr_map_metric (map, MAP_MASK_PREIMAGE_F);
12300 tree tres, tmask = build_int_cst (val_type, mask_f ^ 0xff);
12302 tres = fold_build2 (BIT_XOR_EXPR, val_type, tbits, tval);
12303 tres = fold_build2 (BIT_AND_EXPR, val_type, tres, tmask);
12304 return fold_build2 (BIT_XOR_EXPR, val_type, tres, tval);
12307 /* Try to decomposing map to reduce overall cost. */
12309 if (avr_log.builtin)
12310 avr_edump ("\n%?: %x\n%?: ROL cost: ", map);
12312 best_g = avr_map_op[0];
12313 best_g.cost = 1000;
12315 for (i = 0; i < sizeof (avr_map_op) / sizeof (*avr_map_op); i++)
12317 avr_map_op_t g
12318 = avr_map_decompose (map, avr_map_op + i,
12319 TREE_CODE (tval) == INTEGER_CST);
12321 if (g.cost >= 0 && g.cost < best_g.cost)
12322 best_g = g;
12325 if (avr_log.builtin)
12326 avr_edump ("\n");
12328 if (best_g.arg == 0)
12329 /* No optimization found */
12330 break;
12332 /* Apply operation G to the 2nd argument. */
12334 if (avr_log.builtin)
12335 avr_edump ("%?: using OP(%s%d, %x) cost %d\n",
12336 best_g.str, best_g.arg, best_g.map, best_g.cost);
12338 /* Do right-shifts arithmetically: They copy the MSB instead of
12339 shifting in a non-usable value (0) as with logic right-shift. */
12341 tbits = fold_convert (signed_char_type_node, tbits);
12342 tbits = fold_build2 (best_g.code, signed_char_type_node, tbits,
12343 build_int_cst (val_type, best_g.arg));
12344 tbits = fold_convert (val_type, tbits);
12346 /* Use map o G^-1 instead of original map to undo the effect of G. */
12348 tmap = double_int_to_tree (map_type,
12349 double_int::from_uhwi (best_g.map));
12351 return build_call_expr (fndecl, 3, tmap, tbits, tval);
12352 } /* AVR_BUILTIN_INSERT_BITS */
12355 return NULL_TREE;
12360 /* Initialize the GCC target structure. */
12362 #undef TARGET_ASM_ALIGNED_HI_OP
12363 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
12364 #undef TARGET_ASM_ALIGNED_SI_OP
12365 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
12366 #undef TARGET_ASM_UNALIGNED_HI_OP
12367 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
12368 #undef TARGET_ASM_UNALIGNED_SI_OP
12369 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
12370 #undef TARGET_ASM_INTEGER
12371 #define TARGET_ASM_INTEGER avr_assemble_integer
12372 #undef TARGET_ASM_FILE_START
12373 #define TARGET_ASM_FILE_START avr_file_start
12374 #undef TARGET_ASM_FILE_END
12375 #define TARGET_ASM_FILE_END avr_file_end
12377 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
12378 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
12379 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
12380 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
12382 #undef TARGET_FUNCTION_VALUE
12383 #define TARGET_FUNCTION_VALUE avr_function_value
12384 #undef TARGET_LIBCALL_VALUE
12385 #define TARGET_LIBCALL_VALUE avr_libcall_value
12386 #undef TARGET_FUNCTION_VALUE_REGNO_P
12387 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
12389 #undef TARGET_ATTRIBUTE_TABLE
12390 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
12391 #undef TARGET_INSERT_ATTRIBUTES
12392 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
12393 #undef TARGET_SECTION_TYPE_FLAGS
12394 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
12396 #undef TARGET_ASM_NAMED_SECTION
12397 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
12398 #undef TARGET_ASM_INIT_SECTIONS
12399 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
12400 #undef TARGET_ENCODE_SECTION_INFO
12401 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
12402 #undef TARGET_ASM_SELECT_SECTION
12403 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
12405 #undef TARGET_REGISTER_MOVE_COST
12406 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
12407 #undef TARGET_MEMORY_MOVE_COST
12408 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
12409 #undef TARGET_RTX_COSTS
12410 #define TARGET_RTX_COSTS avr_rtx_costs
12411 #undef TARGET_ADDRESS_COST
12412 #define TARGET_ADDRESS_COST avr_address_cost
12413 #undef TARGET_MACHINE_DEPENDENT_REORG
12414 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
12415 #undef TARGET_FUNCTION_ARG
12416 #define TARGET_FUNCTION_ARG avr_function_arg
12417 #undef TARGET_FUNCTION_ARG_ADVANCE
12418 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
12420 #undef TARGET_SET_CURRENT_FUNCTION
12421 #define TARGET_SET_CURRENT_FUNCTION avr_set_current_function
12423 #undef TARGET_RETURN_IN_MEMORY
12424 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
12426 #undef TARGET_STRICT_ARGUMENT_NAMING
12427 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
12429 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
12430 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
12432 #undef TARGET_HARD_REGNO_SCRATCH_OK
12433 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
12434 #undef TARGET_CASE_VALUES_THRESHOLD
12435 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
12437 #undef TARGET_FRAME_POINTER_REQUIRED
12438 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
12439 #undef TARGET_CAN_ELIMINATE
12440 #define TARGET_CAN_ELIMINATE avr_can_eliminate
12442 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
12443 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS avr_allocate_stack_slots_for_args
12445 #undef TARGET_WARN_FUNC_RETURN
12446 #define TARGET_WARN_FUNC_RETURN avr_warn_func_return
12448 #undef TARGET_CLASS_LIKELY_SPILLED_P
12449 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
12451 #undef TARGET_OPTION_OVERRIDE
12452 #define TARGET_OPTION_OVERRIDE avr_option_override
12454 #undef TARGET_CANNOT_MODIFY_JUMPS_P
12455 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
12457 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
12458 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
12460 #undef TARGET_INIT_BUILTINS
12461 #define TARGET_INIT_BUILTINS avr_init_builtins
12463 #undef TARGET_BUILTIN_DECL
12464 #define TARGET_BUILTIN_DECL avr_builtin_decl
12466 #undef TARGET_EXPAND_BUILTIN
12467 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
12469 #undef TARGET_FOLD_BUILTIN
12470 #define TARGET_FOLD_BUILTIN avr_fold_builtin
12472 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
12473 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
12475 #undef TARGET_SCALAR_MODE_SUPPORTED_P
12476 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
12478 #undef TARGET_BUILD_BUILTIN_VA_LIST
12479 #define TARGET_BUILD_BUILTIN_VA_LIST avr_build_builtin_va_list
12481 #undef TARGET_FIXED_POINT_SUPPORTED_P
12482 #define TARGET_FIXED_POINT_SUPPORTED_P hook_bool_void_true
12484 #undef TARGET_CONVERT_TO_TYPE
12485 #define TARGET_CONVERT_TO_TYPE avr_convert_to_type
12487 #undef TARGET_ADDR_SPACE_SUBSET_P
12488 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
12490 #undef TARGET_ADDR_SPACE_CONVERT
12491 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
12493 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
12494 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
12496 #undef TARGET_ADDR_SPACE_POINTER_MODE
12497 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
12499 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
12500 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
12501 avr_addr_space_legitimate_address_p
12503 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
12504 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
12506 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
12507 #define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
12509 #undef TARGET_SECONDARY_RELOAD
12510 #define TARGET_SECONDARY_RELOAD avr_secondary_reload
12512 #undef TARGET_PRINT_OPERAND
12513 #define TARGET_PRINT_OPERAND avr_print_operand
12514 #undef TARGET_PRINT_OPERAND_ADDRESS
12515 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
12516 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
12517 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
12519 struct gcc_target targetm = TARGET_INITIALIZER;
12522 #include "gt-avr.h"