re PR target/60486 ([avr] superfluous or missing comparision after addition or subtra...
[official-gcc.git] / gcc / config / avr / avr.c
blobe16f3bf058ad52f0365fe732fb19ddb747ce7199
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998-2014 Free Software Foundation, Inc.
3 Contributed by Denis Chertykov (chertykov@gmail.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "regs.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-attr.h"
31 #include "insn-codes.h"
32 #include "flags.h"
33 #include "reload.h"
34 #include "tree.h"
35 #include "print-tree.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "stringpool.h"
39 #include "output.h"
40 #include "expr.h"
41 #include "c-family/c-common.h"
42 #include "diagnostic-core.h"
43 #include "obstack.h"
44 #include "function.h"
45 #include "recog.h"
46 #include "optabs.h"
47 #include "ggc.h"
48 #include "langhooks.h"
49 #include "tm_p.h"
50 #include "target.h"
51 #include "target-def.h"
52 #include "params.h"
53 #include "df.h"
55 /* Maximal allowed offset for an address in the LD command */
56 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
58 /* Return true if STR starts with PREFIX and false, otherwise. */
59 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
61 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
62 address space where data is to be located.
63 As the only non-generic address spaces are all located in flash,
64 this can be used to test if data shall go into some .progmem* section.
65 This must be the rightmost field of machine dependent section flags. */
66 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
68 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
69 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
71 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
72 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
73 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
74 do { \
75 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
76 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
77 } while (0)
79 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
80 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
81 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
82 / SYMBOL_FLAG_MACH_DEP)
84 /* Known address spaces. The order must be the same as in the respective
85 enum from avr.h (or designated initialized must be used). */
86 const avr_addrspace_t avr_addrspace[ADDR_SPACE_COUNT] =
88 { ADDR_SPACE_RAM, 0, 2, "", 0, NULL },
89 { ADDR_SPACE_FLASH, 1, 2, "__flash", 0, ".progmem.data" },
90 { ADDR_SPACE_FLASH1, 1, 2, "__flash1", 1, ".progmem1.data" },
91 { ADDR_SPACE_FLASH2, 1, 2, "__flash2", 2, ".progmem2.data" },
92 { ADDR_SPACE_FLASH3, 1, 2, "__flash3", 3, ".progmem3.data" },
93 { ADDR_SPACE_FLASH4, 1, 2, "__flash4", 4, ".progmem4.data" },
94 { ADDR_SPACE_FLASH5, 1, 2, "__flash5", 5, ".progmem5.data" },
95 { ADDR_SPACE_MEMX, 1, 3, "__memx", 0, ".progmemx.data" },
99 /* Holding RAM addresses of some SFRs used by the compiler and that
100 are unique over all devices in an architecture like 'avr4'. */
102 typedef struct
104 /* SREG: The processor status */
105 int sreg;
107 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
108 int ccp;
109 int rampd;
110 int rampx;
111 int rampy;
113 /* RAMPZ: The high byte of 24-bit address used with ELPM */
114 int rampz;
116 /* SP: The stack pointer and its low and high byte */
117 int sp_l;
118 int sp_h;
119 } avr_addr_t;
121 static avr_addr_t avr_addr;
124 /* Prototypes for local helper functions. */
126 static const char* out_movqi_r_mr (rtx, rtx[], int*);
127 static const char* out_movhi_r_mr (rtx, rtx[], int*);
128 static const char* out_movsi_r_mr (rtx, rtx[], int*);
129 static const char* out_movqi_mr_r (rtx, rtx[], int*);
130 static const char* out_movhi_mr_r (rtx, rtx[], int*);
131 static const char* out_movsi_mr_r (rtx, rtx[], int*);
133 static int get_sequence_length (rtx insns);
134 static int sequent_regs_live (void);
135 static const char *ptrreg_to_str (int);
136 static const char *cond_string (enum rtx_code);
137 static int avr_num_arg_regs (enum machine_mode, const_tree);
138 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
139 int, bool);
140 static void output_reload_in_const (rtx*, rtx, int*, bool);
141 static struct machine_function * avr_init_machine_status (void);
144 /* Prototypes for hook implementors if needed before their implementation. */
146 static bool avr_rtx_costs (rtx, int, int, int, int*, bool);
149 /* Allocate registers from r25 to r8 for parameters for function calls. */
150 #define FIRST_CUM_REG 26
152 /* Implicit target register of LPM instruction (R0) */
153 extern GTY(()) rtx lpm_reg_rtx;
154 rtx lpm_reg_rtx;
156 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
157 extern GTY(()) rtx lpm_addr_reg_rtx;
158 rtx lpm_addr_reg_rtx;
160 /* Temporary register RTX (reg:QI TMP_REGNO) */
161 extern GTY(()) rtx tmp_reg_rtx;
162 rtx tmp_reg_rtx;
164 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
165 extern GTY(()) rtx zero_reg_rtx;
166 rtx zero_reg_rtx;
168 /* RTXs for all general purpose registers as QImode */
169 extern GTY(()) rtx all_regs_rtx[32];
170 rtx all_regs_rtx[32];
172 /* SREG, the processor status */
173 extern GTY(()) rtx sreg_rtx;
174 rtx sreg_rtx;
176 /* RAMP* special function registers */
177 extern GTY(()) rtx rampd_rtx;
178 extern GTY(()) rtx rampx_rtx;
179 extern GTY(()) rtx rampy_rtx;
180 extern GTY(()) rtx rampz_rtx;
181 rtx rampd_rtx;
182 rtx rampx_rtx;
183 rtx rampy_rtx;
184 rtx rampz_rtx;
186 /* RTX containing the strings "" and "e", respectively */
187 static GTY(()) rtx xstring_empty;
188 static GTY(()) rtx xstring_e;
190 /* Current architecture. */
191 const avr_arch_t *avr_current_arch;
193 /* Current device. */
194 const avr_mcu_t *avr_current_device;
196 /* Section to put switch tables in. */
197 static GTY(()) section *progmem_swtable_section;
199 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
200 or to address space __flash* or __memx. Only used as singletons inside
201 avr_asm_select_section, but it must not be local there because of GTY. */
202 static GTY(()) section *progmem_section[ADDR_SPACE_COUNT];
204 /* Condition for insns/expanders from avr-dimode.md. */
205 bool avr_have_dimode = true;
207 /* To track if code will use .bss and/or .data. */
208 bool avr_need_clear_bss_p = false;
209 bool avr_need_copy_data_p = false;
212 /* Transform UP into lowercase and write the result to LO.
213 You must provide enough space for LO. Return LO. */
215 static char*
216 avr_tolower (char *lo, const char *up)
218 char *lo0 = lo;
220 for (; *up; up++, lo++)
221 *lo = TOLOWER (*up);
223 *lo = '\0';
225 return lo0;
229 /* Custom function to count number of set bits. */
231 static inline int
232 avr_popcount (unsigned int val)
234 int pop = 0;
236 while (val)
238 val &= val-1;
239 pop++;
242 return pop;
246 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
247 Return true if the least significant N_BYTES bytes of XVAL all have a
248 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
249 of integers which contains an integer N iff bit N of POP_MASK is set. */
251 bool
252 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
254 int i;
256 enum machine_mode mode = GET_MODE (xval);
258 if (VOIDmode == mode)
259 mode = SImode;
261 for (i = 0; i < n_bytes; i++)
263 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
264 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
266 if (0 == (pop_mask & (1 << avr_popcount (val8))))
267 return false;
270 return true;
274 /* Access some RTX as INT_MODE. If X is a CONST_FIXED we can get
275 the bit representation of X by "casting" it to CONST_INT. */
278 avr_to_int_mode (rtx x)
280 enum machine_mode mode = GET_MODE (x);
282 return VOIDmode == mode
284 : simplify_gen_subreg (int_mode_for_mode (mode), x, mode, 0);
288 /* Implement `TARGET_OPTION_OVERRIDE'. */
290 static void
291 avr_option_override (void)
293 flag_delete_null_pointer_checks = 0;
295 /* caller-save.c looks for call-clobbered hard registers that are assigned
296 to pseudos that cross calls and tries so save-restore them around calls
297 in order to reduce the number of stack slots needed.
299 This might lead to situations where reload is no more able to cope
300 with the challenge of AVR's very few address registers and fails to
301 perform the requested spills. */
303 if (avr_strict_X)
304 flag_caller_saves = 0;
306 /* Unwind tables currently require a frame pointer for correctness,
307 see toplev.c:process_options(). */
309 if ((flag_unwind_tables
310 || flag_non_call_exceptions
311 || flag_asynchronous_unwind_tables)
312 && !ACCUMULATE_OUTGOING_ARGS)
314 flag_omit_frame_pointer = 0;
317 if (flag_pic == 1)
318 warning (OPT_fpic, "-fpic is not supported");
319 if (flag_pic == 2)
320 warning (OPT_fPIC, "-fPIC is not supported");
321 if (flag_pie == 1)
322 warning (OPT_fpie, "-fpie is not supported");
323 if (flag_pie == 2)
324 warning (OPT_fPIE, "-fPIE is not supported");
326 avr_current_device = &avr_mcu_types[avr_mcu_index];
327 avr_current_arch = &avr_arch_types[avr_current_device->arch];
329 /* RAM addresses of some SFRs common to all devices in respective arch. */
331 /* SREG: Status Register containing flags like I (global IRQ) */
332 avr_addr.sreg = 0x3F + avr_current_arch->sfr_offset;
334 /* RAMPZ: Address' high part when loading via ELPM */
335 avr_addr.rampz = 0x3B + avr_current_arch->sfr_offset;
337 avr_addr.rampy = 0x3A + avr_current_arch->sfr_offset;
338 avr_addr.rampx = 0x39 + avr_current_arch->sfr_offset;
339 avr_addr.rampd = 0x38 + avr_current_arch->sfr_offset;
340 avr_addr.ccp = 0x34 + avr_current_arch->sfr_offset;
342 /* SP: Stack Pointer (SP_H:SP_L) */
343 avr_addr.sp_l = 0x3D + avr_current_arch->sfr_offset;
344 avr_addr.sp_h = avr_addr.sp_l + 1;
346 init_machine_status = avr_init_machine_status;
348 avr_log_set_avr_log();
351 /* Function to set up the backend function structure. */
353 static struct machine_function *
354 avr_init_machine_status (void)
356 return ggc_alloc_cleared_machine_function ();
360 /* Implement `INIT_EXPANDERS'. */
361 /* The function works like a singleton. */
363 void
364 avr_init_expanders (void)
366 int regno;
368 for (regno = 0; regno < 32; regno ++)
369 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
371 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
372 tmp_reg_rtx = all_regs_rtx[TMP_REGNO];
373 zero_reg_rtx = all_regs_rtx[ZERO_REGNO];
375 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
377 sreg_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.sreg));
378 rampd_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampd));
379 rampx_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampx));
380 rampy_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampy));
381 rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampz));
383 xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
384 xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
388 /* Implement `REGNO_REG_CLASS'. */
389 /* Return register class for register R. */
391 enum reg_class
392 avr_regno_reg_class (int r)
394 static const enum reg_class reg_class_tab[] =
396 R0_REG,
397 /* r1 - r15 */
398 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
399 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
400 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
401 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
402 /* r16 - r23 */
403 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
404 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
405 /* r24, r25 */
406 ADDW_REGS, ADDW_REGS,
407 /* X: r26, 27 */
408 POINTER_X_REGS, POINTER_X_REGS,
409 /* Y: r28, r29 */
410 POINTER_Y_REGS, POINTER_Y_REGS,
411 /* Z: r30, r31 */
412 POINTER_Z_REGS, POINTER_Z_REGS,
413 /* SP: SPL, SPH */
414 STACK_REG, STACK_REG
417 if (r <= 33)
418 return reg_class_tab[r];
420 return ALL_REGS;
424 /* Implement `TARGET_SCALAR_MODE_SUPPORTED_P'. */
426 static bool
427 avr_scalar_mode_supported_p (enum machine_mode mode)
429 if (ALL_FIXED_POINT_MODE_P (mode))
430 return true;
432 if (PSImode == mode)
433 return true;
435 return default_scalar_mode_supported_p (mode);
439 /* Return TRUE if DECL is a VAR_DECL located in flash and FALSE, otherwise. */
441 static bool
442 avr_decl_flash_p (tree decl)
444 if (TREE_CODE (decl) != VAR_DECL
445 || TREE_TYPE (decl) == error_mark_node)
447 return false;
450 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
454 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit flash
455 address space and FALSE, otherwise. */
457 static bool
458 avr_decl_memx_p (tree decl)
460 if (TREE_CODE (decl) != VAR_DECL
461 || TREE_TYPE (decl) == error_mark_node)
463 return false;
466 return (ADDR_SPACE_MEMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
470 /* Return TRUE if X is a MEM rtx located in flash and FALSE, otherwise. */
472 bool
473 avr_mem_flash_p (rtx x)
475 return (MEM_P (x)
476 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
480 /* Return TRUE if X is a MEM rtx located in the 24-bit flash
481 address space and FALSE, otherwise. */
483 bool
484 avr_mem_memx_p (rtx x)
486 return (MEM_P (x)
487 && ADDR_SPACE_MEMX == MEM_ADDR_SPACE (x));
491 /* A helper for the subsequent function attribute used to dig for
492 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
494 static inline int
495 avr_lookup_function_attribute1 (const_tree func, const char *name)
497 if (FUNCTION_DECL == TREE_CODE (func))
499 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
501 return true;
504 func = TREE_TYPE (func);
507 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
508 || TREE_CODE (func) == METHOD_TYPE);
510 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
513 /* Return nonzero if FUNC is a naked function. */
515 static int
516 avr_naked_function_p (tree func)
518 return avr_lookup_function_attribute1 (func, "naked");
521 /* Return nonzero if FUNC is an interrupt function as specified
522 by the "interrupt" attribute. */
524 static int
525 avr_interrupt_function_p (tree func)
527 return avr_lookup_function_attribute1 (func, "interrupt");
530 /* Return nonzero if FUNC is a signal function as specified
531 by the "signal" attribute. */
533 static int
534 avr_signal_function_p (tree func)
536 return avr_lookup_function_attribute1 (func, "signal");
539 /* Return nonzero if FUNC is an OS_task function. */
541 static int
542 avr_OS_task_function_p (tree func)
544 return avr_lookup_function_attribute1 (func, "OS_task");
547 /* Return nonzero if FUNC is an OS_main function. */
549 static int
550 avr_OS_main_function_p (tree func)
552 return avr_lookup_function_attribute1 (func, "OS_main");
556 /* Implement `TARGET_SET_CURRENT_FUNCTION'. */
557 /* Sanity cheching for above function attributes. */
559 static void
560 avr_set_current_function (tree decl)
562 location_t loc;
563 const char *isr;
565 if (decl == NULL_TREE
566 || current_function_decl == NULL_TREE
567 || current_function_decl == error_mark_node
568 || ! cfun->machine
569 || cfun->machine->attributes_checked_p)
570 return;
572 loc = DECL_SOURCE_LOCATION (decl);
574 cfun->machine->is_naked = avr_naked_function_p (decl);
575 cfun->machine->is_signal = avr_signal_function_p (decl);
576 cfun->machine->is_interrupt = avr_interrupt_function_p (decl);
577 cfun->machine->is_OS_task = avr_OS_task_function_p (decl);
578 cfun->machine->is_OS_main = avr_OS_main_function_p (decl);
580 isr = cfun->machine->is_interrupt ? "interrupt" : "signal";
582 /* Too much attributes make no sense as they request conflicting features. */
584 if (cfun->machine->is_OS_task + cfun->machine->is_OS_main
585 + (cfun->machine->is_signal || cfun->machine->is_interrupt) > 1)
586 error_at (loc, "function attributes %qs, %qs and %qs are mutually"
587 " exclusive", "OS_task", "OS_main", isr);
589 /* 'naked' will hide effects of 'OS_task' and 'OS_main'. */
591 if (cfun->machine->is_naked
592 && (cfun->machine->is_OS_task || cfun->machine->is_OS_main))
593 warning_at (loc, OPT_Wattributes, "function attributes %qs and %qs have"
594 " no effect on %qs function", "OS_task", "OS_main", "naked");
596 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
598 tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
599 tree ret = TREE_TYPE (TREE_TYPE (decl));
600 const char *name;
602 name = DECL_ASSEMBLER_NAME_SET_P (decl)
603 /* Remove the leading '*' added in set_user_assembler_name. */
604 ? 1 + IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))
605 : IDENTIFIER_POINTER (DECL_NAME (decl));
607 /* Silently ignore 'signal' if 'interrupt' is present. AVR-LibC startet
608 using this when it switched from SIGNAL and INTERRUPT to ISR. */
610 if (cfun->machine->is_interrupt)
611 cfun->machine->is_signal = 0;
613 /* Interrupt handlers must be void __vector (void) functions. */
615 if (args && TREE_CODE (TREE_VALUE (args)) != VOID_TYPE)
616 error_at (loc, "%qs function cannot have arguments", isr);
618 if (TREE_CODE (ret) != VOID_TYPE)
619 error_at (loc, "%qs function cannot return a value", isr);
621 /* If the function has the 'signal' or 'interrupt' attribute, ensure
622 that the name of the function is "__vector_NN" so as to catch
623 when the user misspells the vector name. */
625 if (!STR_PREFIX_P (name, "__vector"))
626 warning_at (loc, 0, "%qs appears to be a misspelled %s handler",
627 name, isr);
630 /* Don't print the above diagnostics more than once. */
632 cfun->machine->attributes_checked_p = 1;
636 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
639 avr_accumulate_outgoing_args (void)
641 if (!cfun)
642 return TARGET_ACCUMULATE_OUTGOING_ARGS;
644 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
645 what offset is correct. In some cases it is relative to
646 virtual_outgoing_args_rtx and in others it is relative to
647 virtual_stack_vars_rtx. For example code see
648 gcc.c-torture/execute/built-in-setjmp.c
649 gcc.c-torture/execute/builtins/sprintf-chk.c */
651 return (TARGET_ACCUMULATE_OUTGOING_ARGS
652 && !(cfun->calls_setjmp
653 || cfun->has_nonlocal_label));
657 /* Report contribution of accumulated outgoing arguments to stack size. */
659 static inline int
660 avr_outgoing_args_size (void)
662 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
666 /* Implement `STARTING_FRAME_OFFSET'. */
667 /* This is the offset from the frame pointer register to the first stack slot
668 that contains a variable living in the frame. */
671 avr_starting_frame_offset (void)
673 return 1 + avr_outgoing_args_size ();
677 /* Return the number of hard registers to push/pop in the prologue/epilogue
678 of the current function, and optionally store these registers in SET. */
680 static int
681 avr_regs_to_save (HARD_REG_SET *set)
683 int reg, count;
684 int int_or_sig_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
686 if (set)
687 CLEAR_HARD_REG_SET (*set);
688 count = 0;
690 /* No need to save any registers if the function never returns or
691 has the "OS_task" or "OS_main" attribute. */
693 if (TREE_THIS_VOLATILE (current_function_decl)
694 || cfun->machine->is_OS_task
695 || cfun->machine->is_OS_main)
696 return 0;
698 for (reg = 0; reg < 32; reg++)
700 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
701 any global register variables. */
703 if (fixed_regs[reg])
704 continue;
706 if ((int_or_sig_p && !crtl->is_leaf && call_used_regs[reg])
707 || (df_regs_ever_live_p (reg)
708 && (int_or_sig_p || !call_used_regs[reg])
709 /* Don't record frame pointer registers here. They are treated
710 indivitually in prologue. */
711 && !(frame_pointer_needed
712 && (reg == REG_Y || reg == (REG_Y+1)))))
714 if (set)
715 SET_HARD_REG_BIT (*set, reg);
716 count++;
719 return count;
723 /* Implement `TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS' */
725 static bool
726 avr_allocate_stack_slots_for_args (void)
728 return !cfun->machine->is_naked;
732 /* Return true if register FROM can be eliminated via register TO. */
734 static bool
735 avr_can_eliminate (const int from, const int to)
737 return ((frame_pointer_needed && to == FRAME_POINTER_REGNUM)
738 || !frame_pointer_needed);
742 /* Implement `TARGET_WARN_FUNC_RETURN'. */
744 static bool
745 avr_warn_func_return (tree decl)
747 /* Naked functions are implemented entirely in assembly, including the
748 return sequence, so suppress warnings about this. */
750 return !avr_naked_function_p (decl);
753 /* Compute offset between arg_pointer and frame_pointer. */
756 avr_initial_elimination_offset (int from, int to)
758 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
759 return 0;
760 else
762 int offset = frame_pointer_needed ? 2 : 0;
763 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
765 offset += avr_regs_to_save (NULL);
766 return (get_frame_size () + avr_outgoing_args_size()
767 + avr_pc_size + 1 + offset);
772 /* Helper for the function below. */
774 static void
775 avr_adjust_type_node (tree *node, enum machine_mode mode, int sat_p)
777 *node = make_node (FIXED_POINT_TYPE);
778 TYPE_SATURATING (*node) = sat_p;
779 TYPE_UNSIGNED (*node) = UNSIGNED_FIXED_POINT_MODE_P (mode);
780 TYPE_IBIT (*node) = GET_MODE_IBIT (mode);
781 TYPE_FBIT (*node) = GET_MODE_FBIT (mode);
782 TYPE_PRECISION (*node) = GET_MODE_BITSIZE (mode);
783 TYPE_ALIGN (*node) = 8;
784 SET_TYPE_MODE (*node, mode);
786 layout_type (*node);
790 /* Implement `TARGET_BUILD_BUILTIN_VA_LIST'. */
792 static tree
793 avr_build_builtin_va_list (void)
795 /* avr-modes.def adjusts [U]TA to be 64-bit modes with 48 fractional bits.
796 This is more appropriate for the 8-bit machine AVR than 128-bit modes.
797 The ADJUST_IBIT/FBIT are handled in toplev:init_adjust_machine_modes()
798 which is auto-generated by genmodes, but the compiler assigns [U]DAmode
799 to the long long accum modes instead of the desired [U]TAmode.
801 Fix this now, right after node setup in tree.c:build_common_tree_nodes().
802 This must run before c-cppbuiltin.c:builtin_define_fixed_point_constants()
803 which built-in defines macros like __ULLACCUM_FBIT__ that are used by
804 libgcc to detect IBIT and FBIT. */
806 avr_adjust_type_node (&ta_type_node, TAmode, 0);
807 avr_adjust_type_node (&uta_type_node, UTAmode, 0);
808 avr_adjust_type_node (&sat_ta_type_node, TAmode, 1);
809 avr_adjust_type_node (&sat_uta_type_node, UTAmode, 1);
811 unsigned_long_long_accum_type_node = uta_type_node;
812 long_long_accum_type_node = ta_type_node;
813 sat_unsigned_long_long_accum_type_node = sat_uta_type_node;
814 sat_long_long_accum_type_node = sat_ta_type_node;
816 /* Dispatch to the default handler. */
818 return std_build_builtin_va_list ();
822 /* Implement `TARGET_BUILTIN_SETJMP_FRAME_VALUE'. */
823 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
824 frame pointer by +STARTING_FRAME_OFFSET.
825 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
826 avoids creating add/sub of offset in nonlocal goto and setjmp. */
828 static rtx
829 avr_builtin_setjmp_frame_value (void)
831 rtx xval = gen_reg_rtx (Pmode);
832 emit_insn (gen_subhi3 (xval, virtual_stack_vars_rtx,
833 gen_int_mode (STARTING_FRAME_OFFSET, Pmode)));
834 return xval;
838 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3-byte PC).
839 This is return address of function. */
842 avr_return_addr_rtx (int count, rtx tem)
844 rtx r;
846 /* Can only return this function's return address. Others not supported. */
847 if (count)
848 return NULL;
850 if (AVR_3_BYTE_PC)
852 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
853 warning (0, "%<builtin_return_address%> contains only 2 bytes"
854 " of address");
856 else
857 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
859 r = gen_rtx_PLUS (Pmode, tem, r);
860 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
861 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
862 return r;
865 /* Return 1 if the function epilogue is just a single "ret". */
868 avr_simple_epilogue (void)
870 return (! frame_pointer_needed
871 && get_frame_size () == 0
872 && avr_outgoing_args_size() == 0
873 && avr_regs_to_save (NULL) == 0
874 && ! cfun->machine->is_interrupt
875 && ! cfun->machine->is_signal
876 && ! cfun->machine->is_naked
877 && ! TREE_THIS_VOLATILE (current_function_decl));
880 /* This function checks sequence of live registers. */
882 static int
883 sequent_regs_live (void)
885 int reg;
886 int live_seq = 0;
887 int cur_seq = 0;
889 for (reg = 0; reg < 18; ++reg)
891 if (fixed_regs[reg])
893 /* Don't recognize sequences that contain global register
894 variables. */
896 if (live_seq != 0)
897 return 0;
898 else
899 continue;
902 if (!call_used_regs[reg])
904 if (df_regs_ever_live_p (reg))
906 ++live_seq;
907 ++cur_seq;
909 else
910 cur_seq = 0;
914 if (!frame_pointer_needed)
916 if (df_regs_ever_live_p (REG_Y))
918 ++live_seq;
919 ++cur_seq;
921 else
922 cur_seq = 0;
924 if (df_regs_ever_live_p (REG_Y+1))
926 ++live_seq;
927 ++cur_seq;
929 else
930 cur_seq = 0;
932 else
934 cur_seq += 2;
935 live_seq += 2;
937 return (cur_seq == live_seq) ? live_seq : 0;
940 /* Obtain the length sequence of insns. */
943 get_sequence_length (rtx insns)
945 rtx insn;
946 int length;
948 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
949 length += get_attr_length (insn);
951 return length;
955 /* Implement `INCOMING_RETURN_ADDR_RTX'. */
958 avr_incoming_return_addr_rtx (void)
960 /* The return address is at the top of the stack. Note that the push
961 was via post-decrement, which means the actual address is off by one. */
962 return gen_frame_mem (HImode, plus_constant (Pmode, stack_pointer_rtx, 1));
965 /* Helper for expand_prologue. Emit a push of a byte register. */
967 static void
968 emit_push_byte (unsigned regno, bool frame_related_p)
970 rtx mem, reg, insn;
972 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
973 mem = gen_frame_mem (QImode, mem);
974 reg = gen_rtx_REG (QImode, regno);
976 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
977 if (frame_related_p)
978 RTX_FRAME_RELATED_P (insn) = 1;
980 cfun->machine->stack_usage++;
984 /* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
985 SFR is a MEM representing the memory location of the SFR.
986 If CLR_P then clear the SFR after the push using zero_reg. */
988 static void
989 emit_push_sfr (rtx sfr, bool frame_related_p, bool clr_p)
991 rtx insn;
993 gcc_assert (MEM_P (sfr));
995 /* IN __tmp_reg__, IO(SFR) */
996 insn = emit_move_insn (tmp_reg_rtx, sfr);
997 if (frame_related_p)
998 RTX_FRAME_RELATED_P (insn) = 1;
1000 /* PUSH __tmp_reg__ */
1001 emit_push_byte (TMP_REGNO, frame_related_p);
1003 if (clr_p)
1005 /* OUT IO(SFR), __zero_reg__ */
1006 insn = emit_move_insn (sfr, const0_rtx);
1007 if (frame_related_p)
1008 RTX_FRAME_RELATED_P (insn) = 1;
1012 static void
1013 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
1015 rtx insn;
1016 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1017 int live_seq = sequent_regs_live ();
1019 HOST_WIDE_INT size_max
1020 = (HOST_WIDE_INT) GET_MODE_MASK (AVR_HAVE_8BIT_SP ? QImode : Pmode);
1022 bool minimize = (TARGET_CALL_PROLOGUES
1023 && size < size_max
1024 && live_seq
1025 && !isr_p
1026 && !cfun->machine->is_OS_task
1027 && !cfun->machine->is_OS_main);
1029 if (minimize
1030 && (frame_pointer_needed
1031 || avr_outgoing_args_size() > 8
1032 || (AVR_2_BYTE_PC && live_seq > 6)
1033 || live_seq > 7))
1035 rtx pattern;
1036 int first_reg, reg, offset;
1038 emit_move_insn (gen_rtx_REG (HImode, REG_X),
1039 gen_int_mode (size, HImode));
1041 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
1042 gen_int_mode (live_seq+size, HImode));
1043 insn = emit_insn (pattern);
1044 RTX_FRAME_RELATED_P (insn) = 1;
1046 /* Describe the effect of the unspec_volatile call to prologue_saves.
1047 Note that this formulation assumes that add_reg_note pushes the
1048 notes to the front. Thus we build them in the reverse order of
1049 how we want dwarf2out to process them. */
1051 /* The function does always set frame_pointer_rtx, but whether that
1052 is going to be permanent in the function is frame_pointer_needed. */
1054 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1055 gen_rtx_SET (VOIDmode, (frame_pointer_needed
1056 ? frame_pointer_rtx
1057 : stack_pointer_rtx),
1058 plus_constant (Pmode, stack_pointer_rtx,
1059 -(size + live_seq))));
1061 /* Note that live_seq always contains r28+r29, but the other
1062 registers to be saved are all below 18. */
1064 first_reg = 18 - (live_seq - 2);
1066 for (reg = 29, offset = -live_seq + 1;
1067 reg >= first_reg;
1068 reg = (reg == 28 ? 17 : reg - 1), ++offset)
1070 rtx m, r;
1072 m = gen_rtx_MEM (QImode, plus_constant (Pmode, stack_pointer_rtx,
1073 offset));
1074 r = gen_rtx_REG (QImode, reg);
1075 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
1078 cfun->machine->stack_usage += size + live_seq;
1080 else /* !minimize */
1082 int reg;
1084 for (reg = 0; reg < 32; ++reg)
1085 if (TEST_HARD_REG_BIT (set, reg))
1086 emit_push_byte (reg, true);
1088 if (frame_pointer_needed
1089 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
1091 /* Push frame pointer. Always be consistent about the
1092 ordering of pushes -- epilogue_restores expects the
1093 register pair to be pushed low byte first. */
1095 emit_push_byte (REG_Y, true);
1096 emit_push_byte (REG_Y + 1, true);
1099 if (frame_pointer_needed
1100 && size == 0)
1102 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1103 RTX_FRAME_RELATED_P (insn) = 1;
1106 if (size != 0)
1108 /* Creating a frame can be done by direct manipulation of the
1109 stack or via the frame pointer. These two methods are:
1110 fp = sp
1111 fp -= size
1112 sp = fp
1114 sp -= size
1115 fp = sp (*)
1116 the optimum method depends on function type, stack and
1117 frame size. To avoid a complex logic, both methods are
1118 tested and shortest is selected.
1120 There is also the case where SIZE != 0 and no frame pointer is
1121 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1122 In that case, insn (*) is not needed in that case.
1123 We use the X register as scratch. This is save because in X
1124 is call-clobbered.
1125 In an interrupt routine, the case of SIZE != 0 together with
1126 !frame_pointer_needed can only occur if the function is not a
1127 leaf function and thus X has already been saved. */
1129 int irq_state = -1;
1130 HOST_WIDE_INT size_cfa = size, neg_size;
1131 rtx fp_plus_insns, fp, my_fp;
1133 gcc_assert (frame_pointer_needed
1134 || !isr_p
1135 || !crtl->is_leaf);
1137 fp = my_fp = (frame_pointer_needed
1138 ? frame_pointer_rtx
1139 : gen_rtx_REG (Pmode, REG_X));
1141 if (AVR_HAVE_8BIT_SP)
1143 /* The high byte (r29) does not change:
1144 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
1146 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1149 /* Cut down size and avoid size = 0 so that we don't run
1150 into ICE like PR52488 in the remainder. */
1152 if (size > size_max)
1154 /* Don't error so that insane code from newlib still compiles
1155 and does not break building newlib. As PR51345 is implemented
1156 now, there are multilib variants with -msp8.
1158 If user wants sanity checks he can use -Wstack-usage=
1159 or similar options.
1161 For CFA we emit the original, non-saturated size so that
1162 the generic machinery is aware of the real stack usage and
1163 will print the above diagnostic as expected. */
1165 size = size_max;
1168 size = trunc_int_for_mode (size, GET_MODE (my_fp));
1169 neg_size = trunc_int_for_mode (-size, GET_MODE (my_fp));
1171 /************ Method 1: Adjust frame pointer ************/
1173 start_sequence ();
1175 /* Normally, the dwarf2out frame-related-expr interpreter does
1176 not expect to have the CFA change once the frame pointer is
1177 set up. Thus, we avoid marking the move insn below and
1178 instead indicate that the entire operation is complete after
1179 the frame pointer subtraction is done. */
1181 insn = emit_move_insn (fp, stack_pointer_rtx);
1182 if (frame_pointer_needed)
1184 RTX_FRAME_RELATED_P (insn) = 1;
1185 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1186 gen_rtx_SET (VOIDmode, fp, stack_pointer_rtx));
1189 insn = emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp),
1190 my_fp, neg_size));
1192 if (frame_pointer_needed)
1194 RTX_FRAME_RELATED_P (insn) = 1;
1195 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1196 gen_rtx_SET (VOIDmode, fp,
1197 plus_constant (Pmode, fp,
1198 -size_cfa)));
1201 /* Copy to stack pointer. Note that since we've already
1202 changed the CFA to the frame pointer this operation
1203 need not be annotated if frame pointer is needed.
1204 Always move through unspec, see PR50063.
1205 For meaning of irq_state see movhi_sp_r insn. */
1207 if (cfun->machine->is_interrupt)
1208 irq_state = 1;
1210 if (TARGET_NO_INTERRUPTS
1211 || cfun->machine->is_signal
1212 || cfun->machine->is_OS_main)
1213 irq_state = 0;
1215 if (AVR_HAVE_8BIT_SP)
1216 irq_state = 2;
1218 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1219 fp, GEN_INT (irq_state)));
1220 if (!frame_pointer_needed)
1222 RTX_FRAME_RELATED_P (insn) = 1;
1223 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1224 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1225 plus_constant (Pmode,
1226 stack_pointer_rtx,
1227 -size_cfa)));
1230 fp_plus_insns = get_insns ();
1231 end_sequence ();
1233 /************ Method 2: Adjust Stack pointer ************/
1235 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1236 can only handle specific offsets. */
1238 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1240 rtx sp_plus_insns;
1242 start_sequence ();
1244 insn = emit_move_insn (stack_pointer_rtx,
1245 plus_constant (Pmode, stack_pointer_rtx,
1246 -size));
1247 RTX_FRAME_RELATED_P (insn) = 1;
1248 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1249 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1250 plus_constant (Pmode,
1251 stack_pointer_rtx,
1252 -size_cfa)));
1253 if (frame_pointer_needed)
1255 insn = emit_move_insn (fp, stack_pointer_rtx);
1256 RTX_FRAME_RELATED_P (insn) = 1;
1259 sp_plus_insns = get_insns ();
1260 end_sequence ();
1262 /************ Use shortest method ************/
1264 emit_insn (get_sequence_length (sp_plus_insns)
1265 < get_sequence_length (fp_plus_insns)
1266 ? sp_plus_insns
1267 : fp_plus_insns);
1269 else
1271 emit_insn (fp_plus_insns);
1274 cfun->machine->stack_usage += size_cfa;
1275 } /* !minimize && size != 0 */
1276 } /* !minimize */
1280 /* Output function prologue. */
1282 void
1283 avr_expand_prologue (void)
1285 HARD_REG_SET set;
1286 HOST_WIDE_INT size;
1288 size = get_frame_size() + avr_outgoing_args_size();
1290 cfun->machine->stack_usage = 0;
1292 /* Prologue: naked. */
1293 if (cfun->machine->is_naked)
1295 return;
1298 avr_regs_to_save (&set);
1300 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1302 /* Enable interrupts. */
1303 if (cfun->machine->is_interrupt)
1304 emit_insn (gen_enable_interrupt ());
1306 /* Push zero reg. */
1307 emit_push_byte (ZERO_REGNO, true);
1309 /* Push tmp reg. */
1310 emit_push_byte (TMP_REGNO, true);
1312 /* Push SREG. */
1313 /* ??? There's no dwarf2 column reserved for SREG. */
1314 emit_push_sfr (sreg_rtx, false, false /* clr */);
1316 /* Clear zero reg. */
1317 emit_move_insn (zero_reg_rtx, const0_rtx);
1319 /* Prevent any attempt to delete the setting of ZERO_REG! */
1320 emit_use (zero_reg_rtx);
1322 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1323 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1325 if (AVR_HAVE_RAMPD)
1326 emit_push_sfr (rampd_rtx, false /* frame-related */, true /* clr */);
1328 if (AVR_HAVE_RAMPX
1329 && TEST_HARD_REG_BIT (set, REG_X)
1330 && TEST_HARD_REG_BIT (set, REG_X + 1))
1332 emit_push_sfr (rampx_rtx, false /* frame-related */, true /* clr */);
1335 if (AVR_HAVE_RAMPY
1336 && (frame_pointer_needed
1337 || (TEST_HARD_REG_BIT (set, REG_Y)
1338 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1340 emit_push_sfr (rampy_rtx, false /* frame-related */, true /* clr */);
1343 if (AVR_HAVE_RAMPZ
1344 && TEST_HARD_REG_BIT (set, REG_Z)
1345 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1347 emit_push_sfr (rampz_rtx, false /* frame-related */, AVR_HAVE_RAMPD);
1349 } /* is_interrupt is_signal */
1351 avr_prologue_setup_frame (size, set);
1353 if (flag_stack_usage_info)
1354 current_function_static_stack_size = cfun->machine->stack_usage;
1358 /* Implement `TARGET_ASM_FUNCTION_END_PROLOGUE'. */
1359 /* Output summary at end of function prologue. */
1361 static void
1362 avr_asm_function_end_prologue (FILE *file)
1364 if (cfun->machine->is_naked)
1366 fputs ("/* prologue: naked */\n", file);
1368 else
1370 if (cfun->machine->is_interrupt)
1372 fputs ("/* prologue: Interrupt */\n", file);
1374 else if (cfun->machine->is_signal)
1376 fputs ("/* prologue: Signal */\n", file);
1378 else
1379 fputs ("/* prologue: function */\n", file);
1382 if (ACCUMULATE_OUTGOING_ARGS)
1383 fprintf (file, "/* outgoing args size = %d */\n",
1384 avr_outgoing_args_size());
1386 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1387 get_frame_size());
1388 fprintf (file, "/* stack size = %d */\n",
1389 cfun->machine->stack_usage);
1390 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1391 usage for offset so that SP + .L__stack_offset = return address. */
1392 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1396 /* Implement `EPILOGUE_USES'. */
1399 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1401 if (reload_completed
1402 && cfun->machine
1403 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1404 return 1;
1405 return 0;
1408 /* Helper for avr_expand_epilogue. Emit a pop of a byte register. */
1410 static void
1411 emit_pop_byte (unsigned regno)
1413 rtx mem, reg;
1415 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1416 mem = gen_frame_mem (QImode, mem);
1417 reg = gen_rtx_REG (QImode, regno);
1419 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1422 /* Output RTL epilogue. */
1424 void
1425 avr_expand_epilogue (bool sibcall_p)
1427 int reg;
1428 int live_seq;
1429 HARD_REG_SET set;
1430 int minimize;
1431 HOST_WIDE_INT size;
1432 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1434 size = get_frame_size() + avr_outgoing_args_size();
1436 /* epilogue: naked */
1437 if (cfun->machine->is_naked)
1439 gcc_assert (!sibcall_p);
1441 emit_jump_insn (gen_return ());
1442 return;
1445 avr_regs_to_save (&set);
1446 live_seq = sequent_regs_live ();
1448 minimize = (TARGET_CALL_PROLOGUES
1449 && live_seq
1450 && !isr_p
1451 && !cfun->machine->is_OS_task
1452 && !cfun->machine->is_OS_main);
1454 if (minimize
1455 && (live_seq > 4
1456 || frame_pointer_needed
1457 || size))
1459 /* Get rid of frame. */
1461 if (!frame_pointer_needed)
1463 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1466 if (size)
1468 emit_move_insn (frame_pointer_rtx,
1469 plus_constant (Pmode, frame_pointer_rtx, size));
1472 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1473 return;
1476 if (size)
1478 /* Try two methods to adjust stack and select shortest. */
1480 int irq_state = -1;
1481 rtx fp, my_fp;
1482 rtx fp_plus_insns;
1483 HOST_WIDE_INT size_max;
1485 gcc_assert (frame_pointer_needed
1486 || !isr_p
1487 || !crtl->is_leaf);
1489 fp = my_fp = (frame_pointer_needed
1490 ? frame_pointer_rtx
1491 : gen_rtx_REG (Pmode, REG_X));
1493 if (AVR_HAVE_8BIT_SP)
1495 /* The high byte (r29) does not change:
1496 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1498 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1501 /* For rationale see comment in prologue generation. */
1503 size_max = (HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (my_fp));
1504 if (size > size_max)
1505 size = size_max;
1506 size = trunc_int_for_mode (size, GET_MODE (my_fp));
1508 /********** Method 1: Adjust fp register **********/
1510 start_sequence ();
1512 if (!frame_pointer_needed)
1513 emit_move_insn (fp, stack_pointer_rtx);
1515 emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp), my_fp, size));
1517 /* Copy to stack pointer. */
1519 if (TARGET_NO_INTERRUPTS)
1520 irq_state = 0;
1522 if (AVR_HAVE_8BIT_SP)
1523 irq_state = 2;
1525 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp,
1526 GEN_INT (irq_state)));
1528 fp_plus_insns = get_insns ();
1529 end_sequence ();
1531 /********** Method 2: Adjust Stack pointer **********/
1533 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1535 rtx sp_plus_insns;
1537 start_sequence ();
1539 emit_move_insn (stack_pointer_rtx,
1540 plus_constant (Pmode, stack_pointer_rtx, size));
1542 sp_plus_insns = get_insns ();
1543 end_sequence ();
1545 /************ Use shortest method ************/
1547 emit_insn (get_sequence_length (sp_plus_insns)
1548 < get_sequence_length (fp_plus_insns)
1549 ? sp_plus_insns
1550 : fp_plus_insns);
1552 else
1553 emit_insn (fp_plus_insns);
1554 } /* size != 0 */
1556 if (frame_pointer_needed
1557 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1559 /* Restore previous frame_pointer. See avr_expand_prologue for
1560 rationale for not using pophi. */
1562 emit_pop_byte (REG_Y + 1);
1563 emit_pop_byte (REG_Y);
1566 /* Restore used registers. */
1568 for (reg = 31; reg >= 0; --reg)
1569 if (TEST_HARD_REG_BIT (set, reg))
1570 emit_pop_byte (reg);
1572 if (isr_p)
1574 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1575 The conditions to restore them must be tha same as in prologue. */
1577 if (AVR_HAVE_RAMPZ
1578 && TEST_HARD_REG_BIT (set, REG_Z)
1579 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1581 emit_pop_byte (TMP_REGNO);
1582 emit_move_insn (rampz_rtx, tmp_reg_rtx);
1585 if (AVR_HAVE_RAMPY
1586 && (frame_pointer_needed
1587 || (TEST_HARD_REG_BIT (set, REG_Y)
1588 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1590 emit_pop_byte (TMP_REGNO);
1591 emit_move_insn (rampy_rtx, tmp_reg_rtx);
1594 if (AVR_HAVE_RAMPX
1595 && TEST_HARD_REG_BIT (set, REG_X)
1596 && TEST_HARD_REG_BIT (set, REG_X + 1))
1598 emit_pop_byte (TMP_REGNO);
1599 emit_move_insn (rampx_rtx, tmp_reg_rtx);
1602 if (AVR_HAVE_RAMPD)
1604 emit_pop_byte (TMP_REGNO);
1605 emit_move_insn (rampd_rtx, tmp_reg_rtx);
1608 /* Restore SREG using tmp_reg as scratch. */
1610 emit_pop_byte (TMP_REGNO);
1611 emit_move_insn (sreg_rtx, tmp_reg_rtx);
1613 /* Restore tmp REG. */
1614 emit_pop_byte (TMP_REGNO);
1616 /* Restore zero REG. */
1617 emit_pop_byte (ZERO_REGNO);
1620 if (!sibcall_p)
1621 emit_jump_insn (gen_return ());
1625 /* Implement `TARGET_ASM_FUNCTION_BEGIN_EPILOGUE'. */
1627 static void
1628 avr_asm_function_begin_epilogue (FILE *file)
1630 fprintf (file, "/* epilogue start */\n");
1634 /* Implement `TARGET_CANNOT_MODITY_JUMPS_P'. */
1636 static bool
1637 avr_cannot_modify_jumps_p (void)
1640 /* Naked Functions must not have any instructions after
1641 their epilogue, see PR42240 */
1643 if (reload_completed
1644 && cfun->machine
1645 && cfun->machine->is_naked)
1647 return true;
1650 return false;
1654 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
1656 static bool
1657 avr_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED, addr_space_t as)
1659 /* FIXME: Non-generic addresses are not mode-dependent in themselves.
1660 This hook just serves to hack around PR rtl-optimization/52543 by
1661 claiming that non-generic addresses were mode-dependent so that
1662 lower-subreg.c will skip these addresses. lower-subreg.c sets up fake
1663 RTXes to probe SET and MEM costs and assumes that MEM is always in the
1664 generic address space which is not true. */
1666 return !ADDR_SPACE_GENERIC_P (as);
1670 /* Helper function for `avr_legitimate_address_p'. */
1672 static inline bool
1673 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
1674 RTX_CODE outer_code, bool strict)
1676 return (REG_P (reg)
1677 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1678 as, outer_code, UNKNOWN)
1679 || (!strict
1680 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1684 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1685 machine for a memory operand of mode MODE. */
1687 static bool
1688 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1690 bool ok = CONSTANT_ADDRESS_P (x);
1692 switch (GET_CODE (x))
1694 case REG:
1695 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1696 MEM, strict);
1698 if (strict
1699 && GET_MODE_SIZE (mode) > 4
1700 && REG_X == REGNO (x))
1702 ok = false;
1704 break;
1706 case POST_INC:
1707 case PRE_DEC:
1708 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1709 GET_CODE (x), strict);
1710 break;
1712 case PLUS:
1714 rtx reg = XEXP (x, 0);
1715 rtx op1 = XEXP (x, 1);
1717 if (REG_P (reg)
1718 && CONST_INT_P (op1)
1719 && INTVAL (op1) >= 0)
1721 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1723 if (fit)
1725 ok = (! strict
1726 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1727 PLUS, strict));
1729 if (reg == frame_pointer_rtx
1730 || reg == arg_pointer_rtx)
1732 ok = true;
1735 else if (frame_pointer_needed
1736 && reg == frame_pointer_rtx)
1738 ok = true;
1742 break;
1744 default:
1745 break;
1748 if (avr_log.legitimate_address_p)
1750 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1751 "reload_completed=%d reload_in_progress=%d %s:",
1752 ok, mode, strict, reload_completed, reload_in_progress,
1753 reg_renumber ? "(reg_renumber)" : "");
1755 if (GET_CODE (x) == PLUS
1756 && REG_P (XEXP (x, 0))
1757 && CONST_INT_P (XEXP (x, 1))
1758 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1759 && reg_renumber)
1761 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1762 true_regnum (XEXP (x, 0)));
1765 avr_edump ("\n%r\n", x);
1768 return ok;
1772 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1773 now only a helper for avr_addr_space_legitimize_address. */
1774 /* Attempts to replace X with a valid
1775 memory address for an operand of mode MODE */
1777 static rtx
1778 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1780 bool big_offset_p = false;
1782 x = oldx;
1784 if (GET_CODE (oldx) == PLUS
1785 && REG_P (XEXP (oldx, 0)))
1787 if (REG_P (XEXP (oldx, 1)))
1788 x = force_reg (GET_MODE (oldx), oldx);
1789 else if (CONST_INT_P (XEXP (oldx, 1)))
1791 int offs = INTVAL (XEXP (oldx, 1));
1792 if (frame_pointer_rtx != XEXP (oldx, 0)
1793 && offs > MAX_LD_OFFSET (mode))
1795 big_offset_p = true;
1796 x = force_reg (GET_MODE (oldx), oldx);
1801 if (avr_log.legitimize_address)
1803 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1805 if (x != oldx)
1806 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1809 return x;
1813 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1814 /* This will allow register R26/27 to be used where it is no worse than normal
1815 base pointers R28/29 or R30/31. For example, if base offset is greater
1816 than 63 bytes or for R++ or --R addressing. */
1819 avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
1820 int opnum, int type, int addr_type,
1821 int ind_levels ATTRIBUTE_UNUSED,
1822 rtx (*mk_memloc)(rtx,int))
1824 rtx x = *px;
1826 if (avr_log.legitimize_reload_address)
1827 avr_edump ("\n%?:%m %r\n", mode, x);
1829 if (1 && (GET_CODE (x) == POST_INC
1830 || GET_CODE (x) == PRE_DEC))
1832 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1833 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1834 opnum, RELOAD_OTHER);
1836 if (avr_log.legitimize_reload_address)
1837 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1838 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1840 return x;
1843 if (GET_CODE (x) == PLUS
1844 && REG_P (XEXP (x, 0))
1845 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1846 && CONST_INT_P (XEXP (x, 1))
1847 && INTVAL (XEXP (x, 1)) >= 1)
1849 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1851 if (fit)
1853 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1855 int regno = REGNO (XEXP (x, 0));
1856 rtx mem = mk_memloc (x, regno);
1858 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1859 POINTER_REGS, Pmode, VOIDmode, 0, 0,
1860 1, (enum reload_type) addr_type);
1862 if (avr_log.legitimize_reload_address)
1863 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1864 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1866 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1867 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1868 opnum, (enum reload_type) type);
1870 if (avr_log.legitimize_reload_address)
1871 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1872 BASE_POINTER_REGS, mem, NULL_RTX);
1874 return x;
1877 else if (! (frame_pointer_needed
1878 && XEXP (x, 0) == frame_pointer_rtx))
1880 push_reload (x, NULL_RTX, px, NULL,
1881 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1882 opnum, (enum reload_type) type);
1884 if (avr_log.legitimize_reload_address)
1885 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1886 POINTER_REGS, x, NULL_RTX);
1888 return x;
1892 return NULL_RTX;
1896 /* Implement `TARGET_SECONDARY_RELOAD' */
1898 static reg_class_t
1899 avr_secondary_reload (bool in_p, rtx x,
1900 reg_class_t reload_class ATTRIBUTE_UNUSED,
1901 enum machine_mode mode, secondary_reload_info *sri)
1903 if (in_p
1904 && MEM_P (x)
1905 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1906 && ADDR_SPACE_MEMX != MEM_ADDR_SPACE (x))
1908 /* For the non-generic 16-bit spaces we need a d-class scratch. */
1910 switch (mode)
1912 default:
1913 gcc_unreachable();
1915 case QImode: sri->icode = CODE_FOR_reload_inqi; break;
1916 case QQmode: sri->icode = CODE_FOR_reload_inqq; break;
1917 case UQQmode: sri->icode = CODE_FOR_reload_inuqq; break;
1919 case HImode: sri->icode = CODE_FOR_reload_inhi; break;
1920 case HQmode: sri->icode = CODE_FOR_reload_inhq; break;
1921 case HAmode: sri->icode = CODE_FOR_reload_inha; break;
1922 case UHQmode: sri->icode = CODE_FOR_reload_inuhq; break;
1923 case UHAmode: sri->icode = CODE_FOR_reload_inuha; break;
1925 case PSImode: sri->icode = CODE_FOR_reload_inpsi; break;
1927 case SImode: sri->icode = CODE_FOR_reload_insi; break;
1928 case SFmode: sri->icode = CODE_FOR_reload_insf; break;
1929 case SQmode: sri->icode = CODE_FOR_reload_insq; break;
1930 case SAmode: sri->icode = CODE_FOR_reload_insa; break;
1931 case USQmode: sri->icode = CODE_FOR_reload_inusq; break;
1932 case USAmode: sri->icode = CODE_FOR_reload_inusa; break;
1936 return NO_REGS;
1940 /* Helper function to print assembler resp. track instruction
1941 sequence lengths. Always return "".
1943 If PLEN == NULL:
1944 Output assembler code from template TPL with operands supplied
1945 by OPERANDS. This is just forwarding to output_asm_insn.
1947 If PLEN != NULL:
1948 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1949 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1950 Don't output anything.
1953 static const char*
1954 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1956 if (NULL == plen)
1958 output_asm_insn (tpl, operands);
1960 else
1962 if (n_words < 0)
1963 *plen = -n_words;
1964 else
1965 *plen += n_words;
1968 return "";
1972 /* Return a pointer register name as a string. */
1974 static const char*
1975 ptrreg_to_str (int regno)
1977 switch (regno)
1979 case REG_X: return "X";
1980 case REG_Y: return "Y";
1981 case REG_Z: return "Z";
1982 default:
1983 output_operand_lossage ("address operand requires constraint for"
1984 " X, Y, or Z register");
1986 return NULL;
1989 /* Return the condition name as a string.
1990 Used in conditional jump constructing */
1992 static const char*
1993 cond_string (enum rtx_code code)
1995 switch (code)
1997 case NE:
1998 return "ne";
1999 case EQ:
2000 return "eq";
2001 case GE:
2002 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2003 return "pl";
2004 else
2005 return "ge";
2006 case LT:
2007 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2008 return "mi";
2009 else
2010 return "lt";
2011 case GEU:
2012 return "sh";
2013 case LTU:
2014 return "lo";
2015 default:
2016 gcc_unreachable ();
2019 return "";
2023 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
2024 /* Output ADDR to FILE as address. */
2026 static void
2027 avr_print_operand_address (FILE *file, rtx addr)
2029 switch (GET_CODE (addr))
2031 case REG:
2032 fprintf (file, ptrreg_to_str (REGNO (addr)));
2033 break;
2035 case PRE_DEC:
2036 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2037 break;
2039 case POST_INC:
2040 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2041 break;
2043 default:
2044 if (CONSTANT_ADDRESS_P (addr)
2045 && text_segment_operand (addr, VOIDmode))
2047 rtx x = addr;
2048 if (GET_CODE (x) == CONST)
2049 x = XEXP (x, 0);
2050 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
2052 /* Assembler gs() will implant word address. Make offset
2053 a byte offset inside gs() for assembler. This is
2054 needed because the more logical (constant+gs(sym)) is not
2055 accepted by gas. For 128K and smaller devices this is ok.
2056 For large devices it will create a trampoline to offset
2057 from symbol which may not be what the user really wanted. */
2059 fprintf (file, "gs(");
2060 output_addr_const (file, XEXP (x,0));
2061 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
2062 2 * INTVAL (XEXP (x, 1)));
2063 if (AVR_3_BYTE_PC)
2064 if (warning (0, "pointer offset from symbol maybe incorrect"))
2066 output_addr_const (stderr, addr);
2067 fprintf(stderr,"\n");
2070 else
2072 fprintf (file, "gs(");
2073 output_addr_const (file, addr);
2074 fprintf (file, ")");
2077 else
2078 output_addr_const (file, addr);
2083 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
2085 static bool
2086 avr_print_operand_punct_valid_p (unsigned char code)
2088 return code == '~' || code == '!';
2092 /* Implement `TARGET_PRINT_OPERAND'. */
2093 /* Output X as assembler operand to file FILE.
2094 For a description of supported %-codes, see top of avr.md. */
2096 static void
2097 avr_print_operand (FILE *file, rtx x, int code)
2099 int abcd = 0;
2101 if (code >= 'A' && code <= 'D')
2102 abcd = code - 'A';
2104 if (code == '~')
2106 if (!AVR_HAVE_JMP_CALL)
2107 fputc ('r', file);
2109 else if (code == '!')
2111 if (AVR_HAVE_EIJMP_EICALL)
2112 fputc ('e', file);
2114 else if (code == 't'
2115 || code == 'T')
2117 static int t_regno = -1;
2118 static int t_nbits = -1;
2120 if (REG_P (x) && t_regno < 0 && code == 'T')
2122 t_regno = REGNO (x);
2123 t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
2125 else if (CONST_INT_P (x) && t_regno >= 0
2126 && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
2128 int bpos = INTVAL (x);
2130 fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
2131 if (code == 'T')
2132 fprintf (file, ",%d", bpos % 8);
2134 t_regno = -1;
2136 else
2137 fatal_insn ("operands to %T/%t must be reg + const_int:", x);
2139 else if (REG_P (x))
2141 if (x == zero_reg_rtx)
2142 fprintf (file, "__zero_reg__");
2143 else if (code == 'r' && REGNO (x) < 32)
2144 fprintf (file, "%d", (int) REGNO (x));
2145 else
2146 fprintf (file, reg_names[REGNO (x) + abcd]);
2148 else if (CONST_INT_P (x))
2150 HOST_WIDE_INT ival = INTVAL (x);
2152 if ('i' != code)
2153 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
2154 else if (low_io_address_operand (x, VOIDmode)
2155 || high_io_address_operand (x, VOIDmode))
2157 if (AVR_HAVE_RAMPZ && ival == avr_addr.rampz)
2158 fprintf (file, "__RAMPZ__");
2159 else if (AVR_HAVE_RAMPY && ival == avr_addr.rampy)
2160 fprintf (file, "__RAMPY__");
2161 else if (AVR_HAVE_RAMPX && ival == avr_addr.rampx)
2162 fprintf (file, "__RAMPX__");
2163 else if (AVR_HAVE_RAMPD && ival == avr_addr.rampd)
2164 fprintf (file, "__RAMPD__");
2165 else if (AVR_XMEGA && ival == avr_addr.ccp)
2166 fprintf (file, "__CCP__");
2167 else if (ival == avr_addr.sreg) fprintf (file, "__SREG__");
2168 else if (ival == avr_addr.sp_l) fprintf (file, "__SP_L__");
2169 else if (ival == avr_addr.sp_h) fprintf (file, "__SP_H__");
2170 else
2172 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2173 ival - avr_current_arch->sfr_offset);
2176 else
2177 fatal_insn ("bad address, not an I/O address:", x);
2179 else if (MEM_P (x))
2181 rtx addr = XEXP (x, 0);
2183 if (code == 'm')
2185 if (!CONSTANT_P (addr))
2186 fatal_insn ("bad address, not a constant:", addr);
2187 /* Assembler template with m-code is data - not progmem section */
2188 if (text_segment_operand (addr, VOIDmode))
2189 if (warning (0, "accessing data memory with"
2190 " program memory address"))
2192 output_addr_const (stderr, addr);
2193 fprintf(stderr,"\n");
2195 output_addr_const (file, addr);
2197 else if (code == 'i')
2199 avr_print_operand (file, addr, 'i');
2201 else if (code == 'o')
2203 if (GET_CODE (addr) != PLUS)
2204 fatal_insn ("bad address, not (reg+disp):", addr);
2206 avr_print_operand (file, XEXP (addr, 1), 0);
2208 else if (code == 'p' || code == 'r')
2210 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
2211 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
2213 if (code == 'p')
2214 avr_print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
2215 else
2216 avr_print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
2218 else if (GET_CODE (addr) == PLUS)
2220 avr_print_operand_address (file, XEXP (addr,0));
2221 if (REGNO (XEXP (addr, 0)) == REG_X)
2222 fatal_insn ("internal compiler error. Bad address:"
2223 ,addr);
2224 fputc ('+', file);
2225 avr_print_operand (file, XEXP (addr,1), code);
2227 else
2228 avr_print_operand_address (file, addr);
2230 else if (code == 'i')
2232 fatal_insn ("bad address, not an I/O address:", x);
2234 else if (code == 'x')
2236 /* Constant progmem address - like used in jmp or call */
2237 if (0 == text_segment_operand (x, VOIDmode))
2238 if (warning (0, "accessing program memory"
2239 " with data memory address"))
2241 output_addr_const (stderr, x);
2242 fprintf(stderr,"\n");
2244 /* Use normal symbol for direct address no linker trampoline needed */
2245 output_addr_const (file, x);
2247 else if (CONST_FIXED_P (x))
2249 HOST_WIDE_INT ival = INTVAL (avr_to_int_mode (x));
2250 if (code != 0)
2251 output_operand_lossage ("Unsupported code '%c' for fixed-point:",
2252 code);
2253 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival);
2255 else if (GET_CODE (x) == CONST_DOUBLE)
2257 long val;
2258 REAL_VALUE_TYPE rv;
2259 if (GET_MODE (x) != SFmode)
2260 fatal_insn ("internal compiler error. Unknown mode:", x);
2261 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
2262 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
2263 fprintf (file, "0x%lx", val);
2265 else if (GET_CODE (x) == CONST_STRING)
2266 fputs (XSTR (x, 0), file);
2267 else if (code == 'j')
2268 fputs (cond_string (GET_CODE (x)), file);
2269 else if (code == 'k')
2270 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
2271 else
2272 avr_print_operand_address (file, x);
2276 /* Worker function for `NOTICE_UPDATE_CC'. */
2277 /* Update the condition code in the INSN. */
2279 void
2280 avr_notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
2282 rtx set;
2283 enum attr_cc cc = get_attr_cc (insn);
2285 switch (cc)
2287 default:
2288 break;
2290 case CC_PLUS:
2291 case CC_LDI:
2293 rtx *op = recog_data.operand;
2294 int len_dummy, icc;
2296 /* Extract insn's operands. */
2297 extract_constrain_insn_cached (insn);
2299 switch (cc)
2301 default:
2302 gcc_unreachable();
2304 case CC_PLUS:
2305 avr_out_plus (insn, op, &len_dummy, &icc);
2306 cc = (enum attr_cc) icc;
2307 break;
2309 case CC_LDI:
2311 cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
2312 && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
2313 /* Loading zero-reg with 0 uses CLR and thus clobbers cc0. */
2314 ? CC_CLOBBER
2315 /* Any other "r,rL" combination does not alter cc0. */
2316 : CC_NONE;
2318 break;
2319 } /* inner switch */
2321 break;
2323 } /* outer swicth */
2325 switch (cc)
2327 default:
2328 /* Special values like CC_OUT_PLUS from above have been
2329 mapped to "standard" CC_* values so we never come here. */
2331 gcc_unreachable();
2332 break;
2334 case CC_NONE:
2335 /* Insn does not affect CC at all. */
2336 break;
2338 case CC_SET_N:
2339 CC_STATUS_INIT;
2340 break;
2342 case CC_SET_ZN:
2343 set = single_set (insn);
2344 CC_STATUS_INIT;
2345 if (set)
2347 cc_status.flags |= CC_NO_OVERFLOW;
2348 cc_status.value1 = SET_DEST (set);
2350 break;
2352 case CC_SET_CZN:
2353 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2354 The V flag may or may not be known but that's ok because
2355 alter_cond will change tests to use EQ/NE. */
2356 set = single_set (insn);
2357 CC_STATUS_INIT;
2358 if (set)
2360 cc_status.value1 = SET_DEST (set);
2361 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
2363 break;
2365 case CC_COMPARE:
2366 set = single_set (insn);
2367 CC_STATUS_INIT;
2368 if (set)
2369 cc_status.value1 = SET_SRC (set);
2370 break;
2372 case CC_CLOBBER:
2373 /* Insn doesn't leave CC in a usable state. */
2374 CC_STATUS_INIT;
2375 break;
2379 /* Choose mode for jump insn:
2380 1 - relative jump in range -63 <= x <= 62 ;
2381 2 - relative jump in range -2046 <= x <= 2045 ;
2382 3 - absolute jump (only for ATmega[16]03). */
2385 avr_jump_mode (rtx x, rtx insn)
2387 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
2388 ? XEXP (x, 0) : x));
2389 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
2390 int jump_distance = cur_addr - dest_addr;
2392 if (-63 <= jump_distance && jump_distance <= 62)
2393 return 1;
2394 else if (-2046 <= jump_distance && jump_distance <= 2045)
2395 return 2;
2396 else if (AVR_HAVE_JMP_CALL)
2397 return 3;
2399 return 2;
2402 /* Return an AVR condition jump commands.
2403 X is a comparison RTX.
2404 LEN is a number returned by avr_jump_mode function.
2405 If REVERSE nonzero then condition code in X must be reversed. */
2407 const char*
2408 ret_cond_branch (rtx x, int len, int reverse)
2410 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2412 switch (cond)
2414 case GT:
2415 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2416 return (len == 1 ? ("breq .+2" CR_TAB
2417 "brpl %0") :
2418 len == 2 ? ("breq .+4" CR_TAB
2419 "brmi .+2" CR_TAB
2420 "rjmp %0") :
2421 ("breq .+6" CR_TAB
2422 "brmi .+4" CR_TAB
2423 "jmp %0"));
2425 else
2426 return (len == 1 ? ("breq .+2" CR_TAB
2427 "brge %0") :
2428 len == 2 ? ("breq .+4" CR_TAB
2429 "brlt .+2" CR_TAB
2430 "rjmp %0") :
2431 ("breq .+6" CR_TAB
2432 "brlt .+4" CR_TAB
2433 "jmp %0"));
2434 case GTU:
2435 return (len == 1 ? ("breq .+2" CR_TAB
2436 "brsh %0") :
2437 len == 2 ? ("breq .+4" CR_TAB
2438 "brlo .+2" CR_TAB
2439 "rjmp %0") :
2440 ("breq .+6" CR_TAB
2441 "brlo .+4" CR_TAB
2442 "jmp %0"));
2443 case LE:
2444 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2445 return (len == 1 ? ("breq %0" CR_TAB
2446 "brmi %0") :
2447 len == 2 ? ("breq .+2" CR_TAB
2448 "brpl .+2" CR_TAB
2449 "rjmp %0") :
2450 ("breq .+2" CR_TAB
2451 "brpl .+4" CR_TAB
2452 "jmp %0"));
2453 else
2454 return (len == 1 ? ("breq %0" CR_TAB
2455 "brlt %0") :
2456 len == 2 ? ("breq .+2" CR_TAB
2457 "brge .+2" CR_TAB
2458 "rjmp %0") :
2459 ("breq .+2" CR_TAB
2460 "brge .+4" CR_TAB
2461 "jmp %0"));
2462 case LEU:
2463 return (len == 1 ? ("breq %0" CR_TAB
2464 "brlo %0") :
2465 len == 2 ? ("breq .+2" CR_TAB
2466 "brsh .+2" CR_TAB
2467 "rjmp %0") :
2468 ("breq .+2" CR_TAB
2469 "brsh .+4" CR_TAB
2470 "jmp %0"));
2471 default:
2472 if (reverse)
2474 switch (len)
2476 case 1:
2477 return "br%k1 %0";
2478 case 2:
2479 return ("br%j1 .+2" CR_TAB
2480 "rjmp %0");
2481 default:
2482 return ("br%j1 .+4" CR_TAB
2483 "jmp %0");
2486 else
2488 switch (len)
2490 case 1:
2491 return "br%j1 %0";
2492 case 2:
2493 return ("br%k1 .+2" CR_TAB
2494 "rjmp %0");
2495 default:
2496 return ("br%k1 .+4" CR_TAB
2497 "jmp %0");
2501 return "";
2505 /* Worker function for `FINAL_PRESCAN_INSN'. */
2506 /* Output insn cost for next insn. */
2508 void
2509 avr_final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
2510 int num_operands ATTRIBUTE_UNUSED)
2512 if (avr_log.rtx_costs)
2514 rtx set = single_set (insn);
2516 if (set)
2517 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
2518 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
2519 else
2520 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
2521 rtx_cost (PATTERN (insn), INSN, 0,
2522 optimize_insn_for_speed_p()));
2526 /* Return 0 if undefined, 1 if always true or always false. */
2529 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
2531 unsigned int max = (mode == QImode ? 0xff :
2532 mode == HImode ? 0xffff :
2533 mode == PSImode ? 0xffffff :
2534 mode == SImode ? 0xffffffff : 0);
2535 if (max && op && CONST_INT_P (x))
2537 if (unsigned_condition (op) != op)
2538 max >>= 1;
2540 if (max != (INTVAL (x) & max)
2541 && INTVAL (x) != 0xff)
2542 return 1;
2544 return 0;
2548 /* Worker function for `FUNCTION_ARG_REGNO_P'. */
2549 /* Returns nonzero if REGNO is the number of a hard
2550 register in which function arguments are sometimes passed. */
2553 avr_function_arg_regno_p(int r)
2555 return (r >= 8 && r <= 25);
2559 /* Worker function for `INIT_CUMULATIVE_ARGS'. */
2560 /* Initializing the variable cum for the state at the beginning
2561 of the argument list. */
2563 void
2564 avr_init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2565 tree fndecl ATTRIBUTE_UNUSED)
2567 cum->nregs = 18;
2568 cum->regno = FIRST_CUM_REG;
2569 if (!libname && stdarg_p (fntype))
2570 cum->nregs = 0;
2572 /* Assume the calle may be tail called */
2574 cfun->machine->sibcall_fails = 0;
2577 /* Returns the number of registers to allocate for a function argument. */
2579 static int
2580 avr_num_arg_regs (enum machine_mode mode, const_tree type)
2582 int size;
2584 if (mode == BLKmode)
2585 size = int_size_in_bytes (type);
2586 else
2587 size = GET_MODE_SIZE (mode);
2589 /* Align all function arguments to start in even-numbered registers.
2590 Odd-sized arguments leave holes above them. */
2592 return (size + 1) & ~1;
2596 /* Implement `TARGET_FUNCTION_ARG'. */
2597 /* Controls whether a function argument is passed
2598 in a register, and which register. */
2600 static rtx
2601 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
2602 const_tree type, bool named ATTRIBUTE_UNUSED)
2604 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2605 int bytes = avr_num_arg_regs (mode, type);
2607 if (cum->nregs && bytes <= cum->nregs)
2608 return gen_rtx_REG (mode, cum->regno - bytes);
2610 return NULL_RTX;
2614 /* Implement `TARGET_FUNCTION_ARG_ADVANCE'. */
2615 /* Update the summarizer variable CUM to advance past an argument
2616 in the argument list. */
2618 static void
2619 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
2620 const_tree type, bool named ATTRIBUTE_UNUSED)
2622 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2623 int bytes = avr_num_arg_regs (mode, type);
2625 cum->nregs -= bytes;
2626 cum->regno -= bytes;
2628 /* A parameter is being passed in a call-saved register. As the original
2629 contents of these regs has to be restored before leaving the function,
2630 a function must not pass arguments in call-saved regs in order to get
2631 tail-called. */
2633 if (cum->regno >= 8
2634 && cum->nregs >= 0
2635 && !call_used_regs[cum->regno])
2637 /* FIXME: We ship info on failing tail-call in struct machine_function.
2638 This uses internals of calls.c:expand_call() and the way args_so_far
2639 is used. targetm.function_ok_for_sibcall() needs to be extended to
2640 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2641 dependent so that such an extension is not wanted. */
2643 cfun->machine->sibcall_fails = 1;
2646 /* Test if all registers needed by the ABI are actually available. If the
2647 user has fixed a GPR needed to pass an argument, an (implicit) function
2648 call will clobber that fixed register. See PR45099 for an example. */
2650 if (cum->regno >= 8
2651 && cum->nregs >= 0)
2653 int regno;
2655 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2656 if (fixed_regs[regno])
2657 warning (0, "fixed register %s used to pass parameter to function",
2658 reg_names[regno]);
2661 if (cum->nregs <= 0)
2663 cum->nregs = 0;
2664 cum->regno = FIRST_CUM_REG;
2668 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2669 /* Decide whether we can make a sibling call to a function. DECL is the
2670 declaration of the function being targeted by the call and EXP is the
2671 CALL_EXPR representing the call. */
2673 static bool
2674 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2676 tree fntype_callee;
2678 /* Tail-calling must fail if callee-saved regs are used to pass
2679 function args. We must not tail-call when `epilogue_restores'
2680 is used. Unfortunately, we cannot tell at this point if that
2681 actually will happen or not, and we cannot step back from
2682 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2684 if (cfun->machine->sibcall_fails
2685 || TARGET_CALL_PROLOGUES)
2687 return false;
2690 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2692 if (decl_callee)
2694 decl_callee = TREE_TYPE (decl_callee);
2696 else
2698 decl_callee = fntype_callee;
2700 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2701 && METHOD_TYPE != TREE_CODE (decl_callee))
2703 decl_callee = TREE_TYPE (decl_callee);
2707 /* Ensure that caller and callee have compatible epilogues */
2709 if (cfun->machine->is_interrupt
2710 || cfun->machine->is_signal
2711 || cfun->machine->is_naked
2712 || avr_naked_function_p (decl_callee)
2713 /* FIXME: For OS_task and OS_main, this might be over-conservative. */
2714 || (avr_OS_task_function_p (decl_callee)
2715 != cfun->machine->is_OS_task)
2716 || (avr_OS_main_function_p (decl_callee)
2717 != cfun->machine->is_OS_main))
2719 return false;
2722 return true;
2725 /***********************************************************************
2726 Functions for outputting various mov's for a various modes
2727 ************************************************************************/
2729 /* Return true if a value of mode MODE is read from flash by
2730 __load_* function from libgcc. */
2732 bool
2733 avr_load_libgcc_p (rtx op)
2735 enum machine_mode mode = GET_MODE (op);
2736 int n_bytes = GET_MODE_SIZE (mode);
2738 return (n_bytes > 2
2739 && !AVR_HAVE_LPMX
2740 && avr_mem_flash_p (op));
2743 /* Return true if a value of mode MODE is read by __xload_* function. */
2745 bool
2746 avr_xload_libgcc_p (enum machine_mode mode)
2748 int n_bytes = GET_MODE_SIZE (mode);
2750 return (n_bytes > 1
2751 || avr_current_device->n_flash > 1);
2755 /* Fixme: This is a hack because secondary reloads don't works as expected.
2757 Find an unused d-register to be used as scratch in INSN.
2758 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2759 is a register, skip all possible return values that overlap EXCLUDE.
2760 The policy for the returned register is similar to that of
2761 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2762 of INSN.
2764 Return a QImode d-register or NULL_RTX if nothing found. */
2766 static rtx
2767 avr_find_unused_d_reg (rtx insn, rtx exclude)
2769 int regno;
2770 bool isr_p = (avr_interrupt_function_p (current_function_decl)
2771 || avr_signal_function_p (current_function_decl));
2773 for (regno = 16; regno < 32; regno++)
2775 rtx reg = all_regs_rtx[regno];
2777 if ((exclude
2778 && reg_overlap_mentioned_p (exclude, reg))
2779 || fixed_regs[regno])
2781 continue;
2784 /* Try non-live register */
2786 if (!df_regs_ever_live_p (regno)
2787 && (TREE_THIS_VOLATILE (current_function_decl)
2788 || cfun->machine->is_OS_task
2789 || cfun->machine->is_OS_main
2790 || (!isr_p && call_used_regs[regno])))
2792 return reg;
2795 /* Any live register can be used if it is unused after.
2796 Prologue/epilogue will care for it as needed. */
2798 if (df_regs_ever_live_p (regno)
2799 && reg_unused_after (insn, reg))
2801 return reg;
2805 return NULL_RTX;
2809 /* Helper function for the next function in the case where only restricted
2810 version of LPM instruction is available. */
2812 static const char*
2813 avr_out_lpm_no_lpmx (rtx insn, rtx *xop, int *plen)
2815 rtx dest = xop[0];
2816 rtx addr = xop[1];
2817 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2818 int regno_dest;
2820 regno_dest = REGNO (dest);
2822 /* The implicit target register of LPM. */
2823 xop[3] = lpm_reg_rtx;
2825 switch (GET_CODE (addr))
2827 default:
2828 gcc_unreachable();
2830 case REG:
2832 gcc_assert (REG_Z == REGNO (addr));
2834 switch (n_bytes)
2836 default:
2837 gcc_unreachable();
2839 case 1:
2840 avr_asm_len ("%4lpm", xop, plen, 1);
2842 if (regno_dest != LPM_REGNO)
2843 avr_asm_len ("mov %0,%3", xop, plen, 1);
2845 return "";
2847 case 2:
2848 if (REGNO (dest) == REG_Z)
2849 return avr_asm_len ("%4lpm" CR_TAB
2850 "push %3" CR_TAB
2851 "adiw %2,1" CR_TAB
2852 "%4lpm" CR_TAB
2853 "mov %B0,%3" CR_TAB
2854 "pop %A0", xop, plen, 6);
2856 avr_asm_len ("%4lpm" CR_TAB
2857 "mov %A0,%3" CR_TAB
2858 "adiw %2,1" CR_TAB
2859 "%4lpm" CR_TAB
2860 "mov %B0,%3", xop, plen, 5);
2862 if (!reg_unused_after (insn, addr))
2863 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2865 break; /* 2 */
2868 break; /* REG */
2870 case POST_INC:
2872 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2873 && n_bytes <= 4);
2875 if (regno_dest == LPM_REGNO)
2876 avr_asm_len ("%4lpm" CR_TAB
2877 "adiw %2,1", xop, plen, 2);
2878 else
2879 avr_asm_len ("%4lpm" CR_TAB
2880 "mov %A0,%3" CR_TAB
2881 "adiw %2,1", xop, plen, 3);
2883 if (n_bytes >= 2)
2884 avr_asm_len ("%4lpm" CR_TAB
2885 "mov %B0,%3" CR_TAB
2886 "adiw %2,1", xop, plen, 3);
2888 if (n_bytes >= 3)
2889 avr_asm_len ("%4lpm" CR_TAB
2890 "mov %C0,%3" CR_TAB
2891 "adiw %2,1", xop, plen, 3);
2893 if (n_bytes >= 4)
2894 avr_asm_len ("%4lpm" CR_TAB
2895 "mov %D0,%3" CR_TAB
2896 "adiw %2,1", xop, plen, 3);
2898 break; /* POST_INC */
2900 } /* switch CODE (addr) */
2902 return "";
2906 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2907 OP[1] in AS1 to register OP[0].
2908 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2909 Return "". */
2911 const char*
2912 avr_out_lpm (rtx insn, rtx *op, int *plen)
2914 rtx xop[7];
2915 rtx dest = op[0];
2916 rtx src = SET_SRC (single_set (insn));
2917 rtx addr;
2918 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2919 int segment;
2920 RTX_CODE code;
2921 addr_space_t as = MEM_ADDR_SPACE (src);
2923 if (plen)
2924 *plen = 0;
2926 if (MEM_P (dest))
2928 warning (0, "writing to address space %qs not supported",
2929 avr_addrspace[MEM_ADDR_SPACE (dest)].name);
2931 return "";
2934 addr = XEXP (src, 0);
2935 code = GET_CODE (addr);
2937 gcc_assert (REG_P (dest));
2938 gcc_assert (REG == code || POST_INC == code);
2940 xop[0] = dest;
2941 xop[1] = addr;
2942 xop[2] = lpm_addr_reg_rtx;
2943 xop[4] = xstring_empty;
2944 xop[5] = tmp_reg_rtx;
2945 xop[6] = XEXP (rampz_rtx, 0);
2947 segment = avr_addrspace[as].segment;
2949 /* Set RAMPZ as needed. */
2951 if (segment)
2953 xop[4] = GEN_INT (segment);
2954 xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx);
2956 if (xop[3] != NULL_RTX)
2958 avr_asm_len ("ldi %3,%4" CR_TAB
2959 "out %i6,%3", xop, plen, 2);
2961 else if (segment == 1)
2963 avr_asm_len ("clr %5" CR_TAB
2964 "inc %5" CR_TAB
2965 "out %i6,%5", xop, plen, 3);
2967 else
2969 avr_asm_len ("mov %5,%2" CR_TAB
2970 "ldi %2,%4" CR_TAB
2971 "out %i6,%2" CR_TAB
2972 "mov %2,%5", xop, plen, 4);
2975 xop[4] = xstring_e;
2977 if (!AVR_HAVE_ELPMX)
2978 return avr_out_lpm_no_lpmx (insn, xop, plen);
2980 else if (!AVR_HAVE_LPMX)
2982 return avr_out_lpm_no_lpmx (insn, xop, plen);
2985 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
2987 switch (GET_CODE (addr))
2989 default:
2990 gcc_unreachable();
2992 case REG:
2994 gcc_assert (REG_Z == REGNO (addr));
2996 switch (n_bytes)
2998 default:
2999 gcc_unreachable();
3001 case 1:
3002 return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
3004 case 2:
3005 if (REGNO (dest) == REG_Z)
3006 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3007 "%4lpm %B0,%a2" CR_TAB
3008 "mov %A0,%5", xop, plen, 3);
3009 else
3011 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3012 "%4lpm %B0,%a2", xop, plen, 2);
3014 if (!reg_unused_after (insn, addr))
3015 avr_asm_len ("sbiw %2,1", xop, plen, 1);
3018 break; /* 2 */
3020 case 3:
3022 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3023 "%4lpm %B0,%a2+" CR_TAB
3024 "%4lpm %C0,%a2", xop, plen, 3);
3026 if (!reg_unused_after (insn, addr))
3027 avr_asm_len ("sbiw %2,2", xop, plen, 1);
3029 break; /* 3 */
3031 case 4:
3033 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3034 "%4lpm %B0,%a2+", xop, plen, 2);
3036 if (REGNO (dest) == REG_Z - 2)
3037 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3038 "%4lpm %C0,%a2" CR_TAB
3039 "mov %D0,%5", xop, plen, 3);
3040 else
3042 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
3043 "%4lpm %D0,%a2", xop, plen, 2);
3045 if (!reg_unused_after (insn, addr))
3046 avr_asm_len ("sbiw %2,3", xop, plen, 1);
3049 break; /* 4 */
3050 } /* n_bytes */
3052 break; /* REG */
3054 case POST_INC:
3056 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
3057 && n_bytes <= 4);
3059 avr_asm_len ("%4lpm %A0,%a2+", xop, plen, 1);
3060 if (n_bytes >= 2) avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
3061 if (n_bytes >= 3) avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
3062 if (n_bytes >= 4) avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
3064 break; /* POST_INC */
3066 } /* switch CODE (addr) */
3068 if (xop[4] == xstring_e && AVR_HAVE_RAMPD)
3070 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM. */
3072 xop[0] = zero_reg_rtx;
3073 avr_asm_len ("out %i6,%0", xop, plen, 1);
3076 return "";
3080 /* Worker function for xload_8 insn. */
3082 const char*
3083 avr_out_xload (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
3085 rtx xop[4];
3087 xop[0] = op[0];
3088 xop[1] = op[1];
3089 xop[2] = lpm_addr_reg_rtx;
3090 xop[3] = AVR_HAVE_LPMX ? op[0] : lpm_reg_rtx;
3092 avr_asm_len (AVR_HAVE_LPMX ? "lpm %3,%a2" : "lpm", xop, plen, -1);
3094 avr_asm_len ("sbrc %1,7" CR_TAB
3095 "ld %3,%a2", xop, plen, 2);
3097 if (REGNO (xop[0]) != REGNO (xop[3]))
3098 avr_asm_len ("mov %0,%3", xop, plen, 1);
3100 return "";
3104 const char*
3105 output_movqi (rtx insn, rtx operands[], int *plen)
3107 rtx dest = operands[0];
3108 rtx src = operands[1];
3110 if (avr_mem_flash_p (src)
3111 || avr_mem_flash_p (dest))
3113 return avr_out_lpm (insn, operands, plen);
3116 gcc_assert (1 == GET_MODE_SIZE (GET_MODE (dest)));
3118 if (REG_P (dest))
3120 if (REG_P (src)) /* mov r,r */
3122 if (test_hard_reg_class (STACK_REG, dest))
3123 return avr_asm_len ("out %0,%1", operands, plen, -1);
3124 else if (test_hard_reg_class (STACK_REG, src))
3125 return avr_asm_len ("in %0,%1", operands, plen, -1);
3127 return avr_asm_len ("mov %0,%1", operands, plen, -1);
3129 else if (CONSTANT_P (src))
3131 output_reload_in_const (operands, NULL_RTX, plen, false);
3132 return "";
3134 else if (MEM_P (src))
3135 return out_movqi_r_mr (insn, operands, plen); /* mov r,m */
3137 else if (MEM_P (dest))
3139 rtx xop[2];
3141 xop[0] = dest;
3142 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3144 return out_movqi_mr_r (insn, xop, plen);
3147 return "";
3151 const char *
3152 output_movhi (rtx insn, rtx xop[], int *plen)
3154 rtx dest = xop[0];
3155 rtx src = xop[1];
3157 gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
3159 if (avr_mem_flash_p (src)
3160 || avr_mem_flash_p (dest))
3162 return avr_out_lpm (insn, xop, plen);
3165 gcc_assert (2 == GET_MODE_SIZE (GET_MODE (dest)));
3167 if (REG_P (dest))
3169 if (REG_P (src)) /* mov r,r */
3171 if (test_hard_reg_class (STACK_REG, dest))
3173 if (AVR_HAVE_8BIT_SP)
3174 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
3176 if (AVR_XMEGA)
3177 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
3178 "out __SP_H__,%B1", xop, plen, -2);
3180 /* Use simple load of SP if no interrupts are used. */
3182 return TARGET_NO_INTERRUPTS
3183 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
3184 "out __SP_L__,%A1", xop, plen, -2)
3185 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
3186 "cli" CR_TAB
3187 "out __SP_H__,%B1" CR_TAB
3188 "out __SREG__,__tmp_reg__" CR_TAB
3189 "out __SP_L__,%A1", xop, plen, -5);
3191 else if (test_hard_reg_class (STACK_REG, src))
3193 return !AVR_HAVE_SPH
3194 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
3195 "clr %B0", xop, plen, -2)
3197 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
3198 "in %B0,__SP_H__", xop, plen, -2);
3201 return AVR_HAVE_MOVW
3202 ? avr_asm_len ("movw %0,%1", xop, plen, -1)
3204 : avr_asm_len ("mov %A0,%A1" CR_TAB
3205 "mov %B0,%B1", xop, plen, -2);
3206 } /* REG_P (src) */
3207 else if (CONSTANT_P (src))
3209 return output_reload_inhi (xop, NULL, plen);
3211 else if (MEM_P (src))
3213 return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
3216 else if (MEM_P (dest))
3218 rtx xop[2];
3220 xop[0] = dest;
3221 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3223 return out_movhi_mr_r (insn, xop, plen);
3226 fatal_insn ("invalid insn:", insn);
3228 return "";
3231 static const char*
3232 out_movqi_r_mr (rtx insn, rtx op[], int *plen)
3234 rtx dest = op[0];
3235 rtx src = op[1];
3236 rtx x = XEXP (src, 0);
3238 if (CONSTANT_ADDRESS_P (x))
3240 return optimize > 0 && io_address_operand (x, QImode)
3241 ? avr_asm_len ("in %0,%i1", op, plen, -1)
3242 : avr_asm_len ("lds %0,%m1", op, plen, -2);
3244 else if (GET_CODE (x) == PLUS
3245 && REG_P (XEXP (x, 0))
3246 && CONST_INT_P (XEXP (x, 1)))
3248 /* memory access by reg+disp */
3250 int disp = INTVAL (XEXP (x, 1));
3252 if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
3254 if (REGNO (XEXP (x, 0)) != REG_Y)
3255 fatal_insn ("incorrect insn:",insn);
3257 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3258 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
3259 "ldd %0,Y+63" CR_TAB
3260 "sbiw r28,%o1-63", op, plen, -3);
3262 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3263 "sbci r29,hi8(-%o1)" CR_TAB
3264 "ld %0,Y" CR_TAB
3265 "subi r28,lo8(%o1)" CR_TAB
3266 "sbci r29,hi8(%o1)", op, plen, -5);
3268 else if (REGNO (XEXP (x, 0)) == REG_X)
3270 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
3271 it but I have this situation with extremal optimizing options. */
3273 avr_asm_len ("adiw r26,%o1" CR_TAB
3274 "ld %0,X", op, plen, -2);
3276 if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
3277 && !reg_unused_after (insn, XEXP (x,0)))
3279 avr_asm_len ("sbiw r26,%o1", op, plen, 1);
3282 return "";
3285 return avr_asm_len ("ldd %0,%1", op, plen, -1);
3288 return avr_asm_len ("ld %0,%1", op, plen, -1);
3291 static const char*
3292 out_movhi_r_mr (rtx insn, rtx op[], int *plen)
3294 rtx dest = op[0];
3295 rtx src = op[1];
3296 rtx base = XEXP (src, 0);
3297 int reg_dest = true_regnum (dest);
3298 int reg_base = true_regnum (base);
3299 /* "volatile" forces reading low byte first, even if less efficient,
3300 for correct operation with 16-bit I/O registers. */
3301 int mem_volatile_p = MEM_VOLATILE_P (src);
3303 if (reg_base > 0)
3305 if (reg_dest == reg_base) /* R = (R) */
3306 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3307 "ld %B0,%1" CR_TAB
3308 "mov %A0,__tmp_reg__", op, plen, -3);
3310 if (reg_base != REG_X)
3311 return avr_asm_len ("ld %A0,%1" CR_TAB
3312 "ldd %B0,%1+1", op, plen, -2);
3314 avr_asm_len ("ld %A0,X+" CR_TAB
3315 "ld %B0,X", op, plen, -2);
3317 if (!reg_unused_after (insn, base))
3318 avr_asm_len ("sbiw r26,1", op, plen, 1);
3320 return "";
3322 else if (GET_CODE (base) == PLUS) /* (R + i) */
3324 int disp = INTVAL (XEXP (base, 1));
3325 int reg_base = true_regnum (XEXP (base, 0));
3327 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3329 if (REGNO (XEXP (base, 0)) != REG_Y)
3330 fatal_insn ("incorrect insn:",insn);
3332 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
3333 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3334 "ldd %A0,Y+62" CR_TAB
3335 "ldd %B0,Y+63" CR_TAB
3336 "sbiw r28,%o1-62", op, plen, -4)
3338 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3339 "sbci r29,hi8(-%o1)" CR_TAB
3340 "ld %A0,Y" CR_TAB
3341 "ldd %B0,Y+1" CR_TAB
3342 "subi r28,lo8(%o1)" CR_TAB
3343 "sbci r29,hi8(%o1)", op, plen, -6);
3346 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3347 it but I have this situation with extremal
3348 optimization options. */
3350 if (reg_base == REG_X)
3351 return reg_base == reg_dest
3352 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3353 "ld __tmp_reg__,X+" CR_TAB
3354 "ld %B0,X" CR_TAB
3355 "mov %A0,__tmp_reg__", op, plen, -4)
3357 : avr_asm_len ("adiw r26,%o1" CR_TAB
3358 "ld %A0,X+" CR_TAB
3359 "ld %B0,X" CR_TAB
3360 "sbiw r26,%o1+1", op, plen, -4);
3362 return reg_base == reg_dest
3363 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3364 "ldd %B0,%B1" CR_TAB
3365 "mov %A0,__tmp_reg__", op, plen, -3)
3367 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3368 "ldd %B0,%B1", op, plen, -2);
3370 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3372 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3373 fatal_insn ("incorrect insn:", insn);
3375 if (!mem_volatile_p)
3376 return avr_asm_len ("ld %B0,%1" CR_TAB
3377 "ld %A0,%1", op, plen, -2);
3379 return REGNO (XEXP (base, 0)) == REG_X
3380 ? avr_asm_len ("sbiw r26,2" CR_TAB
3381 "ld %A0,X+" CR_TAB
3382 "ld %B0,X" CR_TAB
3383 "sbiw r26,1", op, plen, -4)
3385 : avr_asm_len ("sbiw %r1,2" CR_TAB
3386 "ld %A0,%p1" CR_TAB
3387 "ldd %B0,%p1+1", op, plen, -3);
3389 else if (GET_CODE (base) == POST_INC) /* (R++) */
3391 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3392 fatal_insn ("incorrect insn:", insn);
3394 return avr_asm_len ("ld %A0,%1" CR_TAB
3395 "ld %B0,%1", op, plen, -2);
3397 else if (CONSTANT_ADDRESS_P (base))
3399 return optimize > 0 && io_address_operand (base, HImode)
3400 ? avr_asm_len ("in %A0,%i1" CR_TAB
3401 "in %B0,%i1+1", op, plen, -2)
3403 : avr_asm_len ("lds %A0,%m1" CR_TAB
3404 "lds %B0,%m1+1", op, plen, -4);
3407 fatal_insn ("unknown move insn:",insn);
3408 return "";
3411 static const char*
3412 out_movsi_r_mr (rtx insn, rtx op[], int *l)
3414 rtx dest = op[0];
3415 rtx src = op[1];
3416 rtx base = XEXP (src, 0);
3417 int reg_dest = true_regnum (dest);
3418 int reg_base = true_regnum (base);
3419 int tmp;
3421 if (!l)
3422 l = &tmp;
3424 if (reg_base > 0)
3426 if (reg_base == REG_X) /* (R26) */
3428 if (reg_dest == REG_X)
3429 /* "ld r26,-X" is undefined */
3430 return *l=7, ("adiw r26,3" CR_TAB
3431 "ld r29,X" CR_TAB
3432 "ld r28,-X" CR_TAB
3433 "ld __tmp_reg__,-X" CR_TAB
3434 "sbiw r26,1" CR_TAB
3435 "ld r26,X" CR_TAB
3436 "mov r27,__tmp_reg__");
3437 else if (reg_dest == REG_X - 2)
3438 return *l=5, ("ld %A0,X+" CR_TAB
3439 "ld %B0,X+" CR_TAB
3440 "ld __tmp_reg__,X+" CR_TAB
3441 "ld %D0,X" CR_TAB
3442 "mov %C0,__tmp_reg__");
3443 else if (reg_unused_after (insn, base))
3444 return *l=4, ("ld %A0,X+" CR_TAB
3445 "ld %B0,X+" CR_TAB
3446 "ld %C0,X+" CR_TAB
3447 "ld %D0,X");
3448 else
3449 return *l=5, ("ld %A0,X+" CR_TAB
3450 "ld %B0,X+" CR_TAB
3451 "ld %C0,X+" CR_TAB
3452 "ld %D0,X" CR_TAB
3453 "sbiw r26,3");
3455 else
3457 if (reg_dest == reg_base)
3458 return *l=5, ("ldd %D0,%1+3" CR_TAB
3459 "ldd %C0,%1+2" CR_TAB
3460 "ldd __tmp_reg__,%1+1" CR_TAB
3461 "ld %A0,%1" CR_TAB
3462 "mov %B0,__tmp_reg__");
3463 else if (reg_base == reg_dest + 2)
3464 return *l=5, ("ld %A0,%1" CR_TAB
3465 "ldd %B0,%1+1" CR_TAB
3466 "ldd __tmp_reg__,%1+2" CR_TAB
3467 "ldd %D0,%1+3" CR_TAB
3468 "mov %C0,__tmp_reg__");
3469 else
3470 return *l=4, ("ld %A0,%1" CR_TAB
3471 "ldd %B0,%1+1" CR_TAB
3472 "ldd %C0,%1+2" CR_TAB
3473 "ldd %D0,%1+3");
3476 else if (GET_CODE (base) == PLUS) /* (R + i) */
3478 int disp = INTVAL (XEXP (base, 1));
3480 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3482 if (REGNO (XEXP (base, 0)) != REG_Y)
3483 fatal_insn ("incorrect insn:",insn);
3485 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3486 return *l = 6, ("adiw r28,%o1-60" CR_TAB
3487 "ldd %A0,Y+60" CR_TAB
3488 "ldd %B0,Y+61" CR_TAB
3489 "ldd %C0,Y+62" CR_TAB
3490 "ldd %D0,Y+63" CR_TAB
3491 "sbiw r28,%o1-60");
3493 return *l = 8, ("subi r28,lo8(-%o1)" CR_TAB
3494 "sbci r29,hi8(-%o1)" CR_TAB
3495 "ld %A0,Y" CR_TAB
3496 "ldd %B0,Y+1" CR_TAB
3497 "ldd %C0,Y+2" CR_TAB
3498 "ldd %D0,Y+3" CR_TAB
3499 "subi r28,lo8(%o1)" CR_TAB
3500 "sbci r29,hi8(%o1)");
3503 reg_base = true_regnum (XEXP (base, 0));
3504 if (reg_base == REG_X)
3506 /* R = (X + d) */
3507 if (reg_dest == REG_X)
3509 *l = 7;
3510 /* "ld r26,-X" is undefined */
3511 return ("adiw r26,%o1+3" CR_TAB
3512 "ld r29,X" CR_TAB
3513 "ld r28,-X" CR_TAB
3514 "ld __tmp_reg__,-X" CR_TAB
3515 "sbiw r26,1" CR_TAB
3516 "ld r26,X" CR_TAB
3517 "mov r27,__tmp_reg__");
3519 *l = 6;
3520 if (reg_dest == REG_X - 2)
3521 return ("adiw r26,%o1" CR_TAB
3522 "ld r24,X+" CR_TAB
3523 "ld r25,X+" CR_TAB
3524 "ld __tmp_reg__,X+" CR_TAB
3525 "ld r27,X" CR_TAB
3526 "mov r26,__tmp_reg__");
3528 return ("adiw r26,%o1" CR_TAB
3529 "ld %A0,X+" CR_TAB
3530 "ld %B0,X+" CR_TAB
3531 "ld %C0,X+" CR_TAB
3532 "ld %D0,X" CR_TAB
3533 "sbiw r26,%o1+3");
3535 if (reg_dest == reg_base)
3536 return *l=5, ("ldd %D0,%D1" CR_TAB
3537 "ldd %C0,%C1" CR_TAB
3538 "ldd __tmp_reg__,%B1" CR_TAB
3539 "ldd %A0,%A1" CR_TAB
3540 "mov %B0,__tmp_reg__");
3541 else if (reg_dest == reg_base - 2)
3542 return *l=5, ("ldd %A0,%A1" CR_TAB
3543 "ldd %B0,%B1" CR_TAB
3544 "ldd __tmp_reg__,%C1" CR_TAB
3545 "ldd %D0,%D1" CR_TAB
3546 "mov %C0,__tmp_reg__");
3547 return *l=4, ("ldd %A0,%A1" CR_TAB
3548 "ldd %B0,%B1" CR_TAB
3549 "ldd %C0,%C1" CR_TAB
3550 "ldd %D0,%D1");
3552 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3553 return *l=4, ("ld %D0,%1" CR_TAB
3554 "ld %C0,%1" CR_TAB
3555 "ld %B0,%1" CR_TAB
3556 "ld %A0,%1");
3557 else if (GET_CODE (base) == POST_INC) /* (R++) */
3558 return *l=4, ("ld %A0,%1" CR_TAB
3559 "ld %B0,%1" CR_TAB
3560 "ld %C0,%1" CR_TAB
3561 "ld %D0,%1");
3562 else if (CONSTANT_ADDRESS_P (base))
3563 return *l=8, ("lds %A0,%m1" CR_TAB
3564 "lds %B0,%m1+1" CR_TAB
3565 "lds %C0,%m1+2" CR_TAB
3566 "lds %D0,%m1+3");
3568 fatal_insn ("unknown move insn:",insn);
3569 return "";
3572 static const char*
3573 out_movsi_mr_r (rtx insn, rtx op[], int *l)
3575 rtx dest = op[0];
3576 rtx src = op[1];
3577 rtx base = XEXP (dest, 0);
3578 int reg_base = true_regnum (base);
3579 int reg_src = true_regnum (src);
3580 int tmp;
3582 if (!l)
3583 l = &tmp;
3585 if (CONSTANT_ADDRESS_P (base))
3586 return *l=8,("sts %m0,%A1" CR_TAB
3587 "sts %m0+1,%B1" CR_TAB
3588 "sts %m0+2,%C1" CR_TAB
3589 "sts %m0+3,%D1");
3590 if (reg_base > 0) /* (r) */
3592 if (reg_base == REG_X) /* (R26) */
3594 if (reg_src == REG_X)
3596 /* "st X+,r26" is undefined */
3597 if (reg_unused_after (insn, base))
3598 return *l=6, ("mov __tmp_reg__,r27" CR_TAB
3599 "st X,r26" CR_TAB
3600 "adiw r26,1" CR_TAB
3601 "st X+,__tmp_reg__" CR_TAB
3602 "st X+,r28" CR_TAB
3603 "st X,r29");
3604 else
3605 return *l=7, ("mov __tmp_reg__,r27" CR_TAB
3606 "st X,r26" CR_TAB
3607 "adiw r26,1" CR_TAB
3608 "st X+,__tmp_reg__" CR_TAB
3609 "st X+,r28" CR_TAB
3610 "st X,r29" CR_TAB
3611 "sbiw r26,3");
3613 else if (reg_base == reg_src + 2)
3615 if (reg_unused_after (insn, base))
3616 return *l=7, ("mov __zero_reg__,%C1" CR_TAB
3617 "mov __tmp_reg__,%D1" CR_TAB
3618 "st %0+,%A1" CR_TAB
3619 "st %0+,%B1" CR_TAB
3620 "st %0+,__zero_reg__" CR_TAB
3621 "st %0,__tmp_reg__" CR_TAB
3622 "clr __zero_reg__");
3623 else
3624 return *l=8, ("mov __zero_reg__,%C1" CR_TAB
3625 "mov __tmp_reg__,%D1" CR_TAB
3626 "st %0+,%A1" CR_TAB
3627 "st %0+,%B1" CR_TAB
3628 "st %0+,__zero_reg__" CR_TAB
3629 "st %0,__tmp_reg__" CR_TAB
3630 "clr __zero_reg__" CR_TAB
3631 "sbiw r26,3");
3633 return *l=5, ("st %0+,%A1" CR_TAB
3634 "st %0+,%B1" CR_TAB
3635 "st %0+,%C1" CR_TAB
3636 "st %0,%D1" CR_TAB
3637 "sbiw r26,3");
3639 else
3640 return *l=4, ("st %0,%A1" CR_TAB
3641 "std %0+1,%B1" CR_TAB
3642 "std %0+2,%C1" CR_TAB
3643 "std %0+3,%D1");
3645 else if (GET_CODE (base) == PLUS) /* (R + i) */
3647 int disp = INTVAL (XEXP (base, 1));
3648 reg_base = REGNO (XEXP (base, 0));
3649 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3651 if (reg_base != REG_Y)
3652 fatal_insn ("incorrect insn:",insn);
3654 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3655 return *l = 6, ("adiw r28,%o0-60" CR_TAB
3656 "std Y+60,%A1" CR_TAB
3657 "std Y+61,%B1" CR_TAB
3658 "std Y+62,%C1" CR_TAB
3659 "std Y+63,%D1" CR_TAB
3660 "sbiw r28,%o0-60");
3662 return *l = 8, ("subi r28,lo8(-%o0)" CR_TAB
3663 "sbci r29,hi8(-%o0)" CR_TAB
3664 "st Y,%A1" CR_TAB
3665 "std Y+1,%B1" CR_TAB
3666 "std Y+2,%C1" CR_TAB
3667 "std Y+3,%D1" CR_TAB
3668 "subi r28,lo8(%o0)" CR_TAB
3669 "sbci r29,hi8(%o0)");
3671 if (reg_base == REG_X)
3673 /* (X + d) = R */
3674 if (reg_src == REG_X)
3676 *l = 9;
3677 return ("mov __tmp_reg__,r26" CR_TAB
3678 "mov __zero_reg__,r27" CR_TAB
3679 "adiw r26,%o0" CR_TAB
3680 "st X+,__tmp_reg__" CR_TAB
3681 "st X+,__zero_reg__" CR_TAB
3682 "st X+,r28" CR_TAB
3683 "st X,r29" CR_TAB
3684 "clr __zero_reg__" CR_TAB
3685 "sbiw r26,%o0+3");
3687 else if (reg_src == REG_X - 2)
3689 *l = 9;
3690 return ("mov __tmp_reg__,r26" CR_TAB
3691 "mov __zero_reg__,r27" CR_TAB
3692 "adiw r26,%o0" CR_TAB
3693 "st X+,r24" CR_TAB
3694 "st X+,r25" CR_TAB
3695 "st X+,__tmp_reg__" CR_TAB
3696 "st X,__zero_reg__" CR_TAB
3697 "clr __zero_reg__" CR_TAB
3698 "sbiw r26,%o0+3");
3700 *l = 6;
3701 return ("adiw r26,%o0" CR_TAB
3702 "st X+,%A1" CR_TAB
3703 "st X+,%B1" CR_TAB
3704 "st X+,%C1" CR_TAB
3705 "st X,%D1" CR_TAB
3706 "sbiw r26,%o0+3");
3708 return *l=4, ("std %A0,%A1" CR_TAB
3709 "std %B0,%B1" CR_TAB
3710 "std %C0,%C1" CR_TAB
3711 "std %D0,%D1");
3713 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3714 return *l=4, ("st %0,%D1" CR_TAB
3715 "st %0,%C1" CR_TAB
3716 "st %0,%B1" CR_TAB
3717 "st %0,%A1");
3718 else if (GET_CODE (base) == POST_INC) /* (R++) */
3719 return *l=4, ("st %0,%A1" CR_TAB
3720 "st %0,%B1" CR_TAB
3721 "st %0,%C1" CR_TAB
3722 "st %0,%D1");
3723 fatal_insn ("unknown move insn:",insn);
3724 return "";
3727 const char *
3728 output_movsisf (rtx insn, rtx operands[], int *l)
3730 int dummy;
3731 rtx dest = operands[0];
3732 rtx src = operands[1];
3733 int *real_l = l;
3735 if (avr_mem_flash_p (src)
3736 || avr_mem_flash_p (dest))
3738 return avr_out_lpm (insn, operands, real_l);
3741 if (!l)
3742 l = &dummy;
3744 gcc_assert (4 == GET_MODE_SIZE (GET_MODE (dest)));
3745 if (REG_P (dest))
3747 if (REG_P (src)) /* mov r,r */
3749 if (true_regnum (dest) > true_regnum (src))
3751 if (AVR_HAVE_MOVW)
3753 *l = 2;
3754 return ("movw %C0,%C1" CR_TAB
3755 "movw %A0,%A1");
3757 *l = 4;
3758 return ("mov %D0,%D1" CR_TAB
3759 "mov %C0,%C1" CR_TAB
3760 "mov %B0,%B1" CR_TAB
3761 "mov %A0,%A1");
3763 else
3765 if (AVR_HAVE_MOVW)
3767 *l = 2;
3768 return ("movw %A0,%A1" CR_TAB
3769 "movw %C0,%C1");
3771 *l = 4;
3772 return ("mov %A0,%A1" CR_TAB
3773 "mov %B0,%B1" CR_TAB
3774 "mov %C0,%C1" CR_TAB
3775 "mov %D0,%D1");
3778 else if (CONSTANT_P (src))
3780 return output_reload_insisf (operands, NULL_RTX, real_l);
3782 else if (MEM_P (src))
3783 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
3785 else if (MEM_P (dest))
3787 const char *templ;
3789 if (src == CONST0_RTX (GET_MODE (dest)))
3790 operands[1] = zero_reg_rtx;
3792 templ = out_movsi_mr_r (insn, operands, real_l);
3794 if (!real_l)
3795 output_asm_insn (templ, operands);
3797 operands[1] = src;
3798 return "";
3800 fatal_insn ("invalid insn:", insn);
3801 return "";
3805 /* Handle loads of 24-bit types from memory to register. */
3807 static const char*
3808 avr_out_load_psi (rtx insn, rtx *op, int *plen)
3810 rtx dest = op[0];
3811 rtx src = op[1];
3812 rtx base = XEXP (src, 0);
3813 int reg_dest = true_regnum (dest);
3814 int reg_base = true_regnum (base);
3816 if (reg_base > 0)
3818 if (reg_base == REG_X) /* (R26) */
3820 if (reg_dest == REG_X)
3821 /* "ld r26,-X" is undefined */
3822 return avr_asm_len ("adiw r26,2" CR_TAB
3823 "ld r28,X" CR_TAB
3824 "ld __tmp_reg__,-X" CR_TAB
3825 "sbiw r26,1" CR_TAB
3826 "ld r26,X" CR_TAB
3827 "mov r27,__tmp_reg__", op, plen, -6);
3828 else
3830 avr_asm_len ("ld %A0,X+" CR_TAB
3831 "ld %B0,X+" CR_TAB
3832 "ld %C0,X", op, plen, -3);
3834 if (reg_dest != REG_X - 2
3835 && !reg_unused_after (insn, base))
3837 avr_asm_len ("sbiw r26,2", op, plen, 1);
3840 return "";
3843 else /* reg_base != REG_X */
3845 if (reg_dest == reg_base)
3846 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3847 "ldd __tmp_reg__,%1+1" CR_TAB
3848 "ld %A0,%1" CR_TAB
3849 "mov %B0,__tmp_reg__", op, plen, -4);
3850 else
3851 return avr_asm_len ("ld %A0,%1" CR_TAB
3852 "ldd %B0,%1+1" CR_TAB
3853 "ldd %C0,%1+2", op, plen, -3);
3856 else if (GET_CODE (base) == PLUS) /* (R + i) */
3858 int disp = INTVAL (XEXP (base, 1));
3860 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3862 if (REGNO (XEXP (base, 0)) != REG_Y)
3863 fatal_insn ("incorrect insn:",insn);
3865 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3866 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3867 "ldd %A0,Y+61" CR_TAB
3868 "ldd %B0,Y+62" CR_TAB
3869 "ldd %C0,Y+63" CR_TAB
3870 "sbiw r28,%o1-61", op, plen, -5);
3872 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3873 "sbci r29,hi8(-%o1)" CR_TAB
3874 "ld %A0,Y" CR_TAB
3875 "ldd %B0,Y+1" CR_TAB
3876 "ldd %C0,Y+2" CR_TAB
3877 "subi r28,lo8(%o1)" CR_TAB
3878 "sbci r29,hi8(%o1)", op, plen, -7);
3881 reg_base = true_regnum (XEXP (base, 0));
3882 if (reg_base == REG_X)
3884 /* R = (X + d) */
3885 if (reg_dest == REG_X)
3887 /* "ld r26,-X" is undefined */
3888 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3889 "ld r28,X" CR_TAB
3890 "ld __tmp_reg__,-X" CR_TAB
3891 "sbiw r26,1" CR_TAB
3892 "ld r26,X" CR_TAB
3893 "mov r27,__tmp_reg__", op, plen, -6);
3896 avr_asm_len ("adiw r26,%o1" CR_TAB
3897 "ld %A0,X+" CR_TAB
3898 "ld %B0,X+" CR_TAB
3899 "ld %C0,X", op, plen, -4);
3901 if (reg_dest != REG_W
3902 && !reg_unused_after (insn, XEXP (base, 0)))
3903 avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
3905 return "";
3908 if (reg_dest == reg_base)
3909 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3910 "ldd __tmp_reg__,%B1" CR_TAB
3911 "ldd %A0,%A1" CR_TAB
3912 "mov %B0,__tmp_reg__", op, plen, -4);
3914 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3915 "ldd %B0,%B1" CR_TAB
3916 "ldd %C0,%C1", op, plen, -3);
3918 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3919 return avr_asm_len ("ld %C0,%1" CR_TAB
3920 "ld %B0,%1" CR_TAB
3921 "ld %A0,%1", op, plen, -3);
3922 else if (GET_CODE (base) == POST_INC) /* (R++) */
3923 return avr_asm_len ("ld %A0,%1" CR_TAB
3924 "ld %B0,%1" CR_TAB
3925 "ld %C0,%1", op, plen, -3);
3927 else if (CONSTANT_ADDRESS_P (base))
3928 return avr_asm_len ("lds %A0,%m1" CR_TAB
3929 "lds %B0,%m1+1" CR_TAB
3930 "lds %C0,%m1+2", op, plen , -6);
3932 fatal_insn ("unknown move insn:",insn);
3933 return "";
3936 /* Handle store of 24-bit type from register or zero to memory. */
3938 static const char*
3939 avr_out_store_psi (rtx insn, rtx *op, int *plen)
3941 rtx dest = op[0];
3942 rtx src = op[1];
3943 rtx base = XEXP (dest, 0);
3944 int reg_base = true_regnum (base);
3946 if (CONSTANT_ADDRESS_P (base))
3947 return avr_asm_len ("sts %m0,%A1" CR_TAB
3948 "sts %m0+1,%B1" CR_TAB
3949 "sts %m0+2,%C1", op, plen, -6);
3951 if (reg_base > 0) /* (r) */
3953 if (reg_base == REG_X) /* (R26) */
3955 gcc_assert (!reg_overlap_mentioned_p (base, src));
3957 avr_asm_len ("st %0+,%A1" CR_TAB
3958 "st %0+,%B1" CR_TAB
3959 "st %0,%C1", op, plen, -3);
3961 if (!reg_unused_after (insn, base))
3962 avr_asm_len ("sbiw r26,2", op, plen, 1);
3964 return "";
3966 else
3967 return avr_asm_len ("st %0,%A1" CR_TAB
3968 "std %0+1,%B1" CR_TAB
3969 "std %0+2,%C1", op, plen, -3);
3971 else if (GET_CODE (base) == PLUS) /* (R + i) */
3973 int disp = INTVAL (XEXP (base, 1));
3974 reg_base = REGNO (XEXP (base, 0));
3976 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3978 if (reg_base != REG_Y)
3979 fatal_insn ("incorrect insn:",insn);
3981 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3982 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3983 "std Y+61,%A1" CR_TAB
3984 "std Y+62,%B1" CR_TAB
3985 "std Y+63,%C1" CR_TAB
3986 "sbiw r28,%o0-60", op, plen, -5);
3988 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3989 "sbci r29,hi8(-%o0)" CR_TAB
3990 "st Y,%A1" CR_TAB
3991 "std Y+1,%B1" CR_TAB
3992 "std Y+2,%C1" CR_TAB
3993 "subi r28,lo8(%o0)" CR_TAB
3994 "sbci r29,hi8(%o0)", op, plen, -7);
3996 if (reg_base == REG_X)
3998 /* (X + d) = R */
3999 gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
4001 avr_asm_len ("adiw r26,%o0" CR_TAB
4002 "st X+,%A1" CR_TAB
4003 "st X+,%B1" CR_TAB
4004 "st X,%C1", op, plen, -4);
4006 if (!reg_unused_after (insn, XEXP (base, 0)))
4007 avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
4009 return "";
4012 return avr_asm_len ("std %A0,%A1" CR_TAB
4013 "std %B0,%B1" CR_TAB
4014 "std %C0,%C1", op, plen, -3);
4016 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4017 return avr_asm_len ("st %0,%C1" CR_TAB
4018 "st %0,%B1" CR_TAB
4019 "st %0,%A1", op, plen, -3);
4020 else if (GET_CODE (base) == POST_INC) /* (R++) */
4021 return avr_asm_len ("st %0,%A1" CR_TAB
4022 "st %0,%B1" CR_TAB
4023 "st %0,%C1", op, plen, -3);
4025 fatal_insn ("unknown move insn:",insn);
4026 return "";
4030 /* Move around 24-bit stuff. */
4032 const char *
4033 avr_out_movpsi (rtx insn, rtx *op, int *plen)
4035 rtx dest = op[0];
4036 rtx src = op[1];
4038 if (avr_mem_flash_p (src)
4039 || avr_mem_flash_p (dest))
4041 return avr_out_lpm (insn, op, plen);
4044 if (register_operand (dest, VOIDmode))
4046 if (register_operand (src, VOIDmode)) /* mov r,r */
4048 if (true_regnum (dest) > true_regnum (src))
4050 avr_asm_len ("mov %C0,%C1", op, plen, -1);
4052 if (AVR_HAVE_MOVW)
4053 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
4054 else
4055 return avr_asm_len ("mov %B0,%B1" CR_TAB
4056 "mov %A0,%A1", op, plen, 2);
4058 else
4060 if (AVR_HAVE_MOVW)
4061 avr_asm_len ("movw %A0,%A1", op, plen, -1);
4062 else
4063 avr_asm_len ("mov %A0,%A1" CR_TAB
4064 "mov %B0,%B1", op, plen, -2);
4066 return avr_asm_len ("mov %C0,%C1", op, plen, 1);
4069 else if (CONSTANT_P (src))
4071 return avr_out_reload_inpsi (op, NULL_RTX, plen);
4073 else if (MEM_P (src))
4074 return avr_out_load_psi (insn, op, plen); /* mov r,m */
4076 else if (MEM_P (dest))
4078 rtx xop[2];
4080 xop[0] = dest;
4081 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
4083 return avr_out_store_psi (insn, xop, plen);
4086 fatal_insn ("invalid insn:", insn);
4087 return "";
4091 static const char*
4092 out_movqi_mr_r (rtx insn, rtx op[], int *plen)
4094 rtx dest = op[0];
4095 rtx src = op[1];
4096 rtx x = XEXP (dest, 0);
4098 if (CONSTANT_ADDRESS_P (x))
4100 return optimize > 0 && io_address_operand (x, QImode)
4101 ? avr_asm_len ("out %i0,%1", op, plen, -1)
4102 : avr_asm_len ("sts %m0,%1", op, plen, -2);
4104 else if (GET_CODE (x) == PLUS
4105 && REG_P (XEXP (x, 0))
4106 && CONST_INT_P (XEXP (x, 1)))
4108 /* memory access by reg+disp */
4110 int disp = INTVAL (XEXP (x, 1));
4112 if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
4114 if (REGNO (XEXP (x, 0)) != REG_Y)
4115 fatal_insn ("incorrect insn:",insn);
4117 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
4118 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
4119 "std Y+63,%1" CR_TAB
4120 "sbiw r28,%o0-63", op, plen, -3);
4122 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4123 "sbci r29,hi8(-%o0)" CR_TAB
4124 "st Y,%1" CR_TAB
4125 "subi r28,lo8(%o0)" CR_TAB
4126 "sbci r29,hi8(%o0)", op, plen, -5);
4128 else if (REGNO (XEXP (x,0)) == REG_X)
4130 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
4132 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
4133 "adiw r26,%o0" CR_TAB
4134 "st X,__tmp_reg__", op, plen, -3);
4136 else
4138 avr_asm_len ("adiw r26,%o0" CR_TAB
4139 "st X,%1", op, plen, -2);
4142 if (!reg_unused_after (insn, XEXP (x,0)))
4143 avr_asm_len ("sbiw r26,%o0", op, plen, 1);
4145 return "";
4148 return avr_asm_len ("std %0,%1", op, plen, -1);
4151 return avr_asm_len ("st %0,%1", op, plen, -1);
4155 /* Helper for the next function for XMEGA. It does the same
4156 but with low byte first. */
4158 static const char*
4159 avr_out_movhi_mr_r_xmega (rtx insn, rtx op[], int *plen)
4161 rtx dest = op[0];
4162 rtx src = op[1];
4163 rtx base = XEXP (dest, 0);
4164 int reg_base = true_regnum (base);
4165 int reg_src = true_regnum (src);
4167 /* "volatile" forces writing low byte first, even if less efficient,
4168 for correct operation with 16-bit I/O registers like SP. */
4169 int mem_volatile_p = MEM_VOLATILE_P (dest);
4171 if (CONSTANT_ADDRESS_P (base))
4172 return optimize > 0 && io_address_operand (base, HImode)
4173 ? avr_asm_len ("out %i0,%A1" CR_TAB
4174 "out %i0+1,%B1", op, plen, -2)
4176 : avr_asm_len ("sts %m0,%A1" CR_TAB
4177 "sts %m0+1,%B1", op, plen, -4);
4179 if (reg_base > 0)
4181 if (reg_base != REG_X)
4182 return avr_asm_len ("st %0,%A1" CR_TAB
4183 "std %0+1,%B1", op, plen, -2);
4185 if (reg_src == REG_X)
4186 /* "st X+,r26" and "st -X,r26" are undefined. */
4187 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4188 "st X,r26" CR_TAB
4189 "adiw r26,1" CR_TAB
4190 "st X,__tmp_reg__", op, plen, -4);
4191 else
4192 avr_asm_len ("st X+,%A1" CR_TAB
4193 "st X,%B1", op, plen, -2);
4195 return reg_unused_after (insn, base)
4196 ? ""
4197 : avr_asm_len ("sbiw r26,1", op, plen, 1);
4199 else if (GET_CODE (base) == PLUS)
4201 int disp = INTVAL (XEXP (base, 1));
4202 reg_base = REGNO (XEXP (base, 0));
4203 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4205 if (reg_base != REG_Y)
4206 fatal_insn ("incorrect insn:",insn);
4208 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
4209 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4210 "std Y+62,%A1" CR_TAB
4211 "std Y+63,%B1" CR_TAB
4212 "sbiw r28,%o0-62", op, plen, -4)
4214 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4215 "sbci r29,hi8(-%o0)" CR_TAB
4216 "st Y,%A1" CR_TAB
4217 "std Y+1,%B1" CR_TAB
4218 "subi r28,lo8(%o0)" CR_TAB
4219 "sbci r29,hi8(%o0)", op, plen, -6);
4222 if (reg_base != REG_X)
4223 return avr_asm_len ("std %A0,%A1" CR_TAB
4224 "std %B0,%B1", op, plen, -2);
4225 /* (X + d) = R */
4226 return reg_src == REG_X
4227 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4228 "mov __zero_reg__,r27" CR_TAB
4229 "adiw r26,%o0" CR_TAB
4230 "st X+,__tmp_reg__" CR_TAB
4231 "st X,__zero_reg__" CR_TAB
4232 "clr __zero_reg__" CR_TAB
4233 "sbiw r26,%o0+1", op, plen, -7)
4235 : avr_asm_len ("adiw r26,%o0" CR_TAB
4236 "st X+,%A1" CR_TAB
4237 "st X,%B1" CR_TAB
4238 "sbiw r26,%o0+1", op, plen, -4);
4240 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4242 if (!mem_volatile_p)
4243 return avr_asm_len ("st %0,%B1" CR_TAB
4244 "st %0,%A1", op, plen, -2);
4246 return REGNO (XEXP (base, 0)) == REG_X
4247 ? avr_asm_len ("sbiw r26,2" CR_TAB
4248 "st X+,%A1" CR_TAB
4249 "st X,%B1" CR_TAB
4250 "sbiw r26,1", op, plen, -4)
4252 : avr_asm_len ("sbiw %r0,2" CR_TAB
4253 "st %p0,%A1" CR_TAB
4254 "std %p0+1,%B1", op, plen, -3);
4256 else if (GET_CODE (base) == POST_INC) /* (R++) */
4258 return avr_asm_len ("st %0,%A1" CR_TAB
4259 "st %0,%B1", op, plen, -2);
4262 fatal_insn ("unknown move insn:",insn);
4263 return "";
4267 static const char*
4268 out_movhi_mr_r (rtx insn, rtx op[], int *plen)
4270 rtx dest = op[0];
4271 rtx src = op[1];
4272 rtx base = XEXP (dest, 0);
4273 int reg_base = true_regnum (base);
4274 int reg_src = true_regnum (src);
4275 int mem_volatile_p;
4277 /* "volatile" forces writing high-byte first (no-xmega) resp.
4278 low-byte first (xmega) even if less efficient, for correct
4279 operation with 16-bit I/O registers like. */
4281 if (AVR_XMEGA)
4282 return avr_out_movhi_mr_r_xmega (insn, op, plen);
4284 mem_volatile_p = MEM_VOLATILE_P (dest);
4286 if (CONSTANT_ADDRESS_P (base))
4287 return optimize > 0 && io_address_operand (base, HImode)
4288 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
4289 "out %i0,%A1", op, plen, -2)
4291 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
4292 "sts %m0,%A1", op, plen, -4);
4294 if (reg_base > 0)
4296 if (reg_base != REG_X)
4297 return avr_asm_len ("std %0+1,%B1" CR_TAB
4298 "st %0,%A1", op, plen, -2);
4300 if (reg_src == REG_X)
4301 /* "st X+,r26" and "st -X,r26" are undefined. */
4302 return !mem_volatile_p && reg_unused_after (insn, src)
4303 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4304 "st X,r26" CR_TAB
4305 "adiw r26,1" CR_TAB
4306 "st X,__tmp_reg__", op, plen, -4)
4308 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4309 "adiw r26,1" CR_TAB
4310 "st X,__tmp_reg__" CR_TAB
4311 "sbiw r26,1" CR_TAB
4312 "st X,r26", op, plen, -5);
4314 return !mem_volatile_p && reg_unused_after (insn, base)
4315 ? avr_asm_len ("st X+,%A1" CR_TAB
4316 "st X,%B1", op, plen, -2)
4317 : avr_asm_len ("adiw r26,1" CR_TAB
4318 "st X,%B1" CR_TAB
4319 "st -X,%A1", op, plen, -3);
4321 else if (GET_CODE (base) == PLUS)
4323 int disp = INTVAL (XEXP (base, 1));
4324 reg_base = REGNO (XEXP (base, 0));
4325 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4327 if (reg_base != REG_Y)
4328 fatal_insn ("incorrect insn:",insn);
4330 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
4331 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4332 "std Y+63,%B1" CR_TAB
4333 "std Y+62,%A1" CR_TAB
4334 "sbiw r28,%o0-62", op, plen, -4)
4336 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4337 "sbci r29,hi8(-%o0)" CR_TAB
4338 "std Y+1,%B1" CR_TAB
4339 "st Y,%A1" CR_TAB
4340 "subi r28,lo8(%o0)" CR_TAB
4341 "sbci r29,hi8(%o0)", op, plen, -6);
4344 if (reg_base != REG_X)
4345 return avr_asm_len ("std %B0,%B1" CR_TAB
4346 "std %A0,%A1", op, plen, -2);
4347 /* (X + d) = R */
4348 return reg_src == REG_X
4349 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4350 "mov __zero_reg__,r27" CR_TAB
4351 "adiw r26,%o0+1" CR_TAB
4352 "st X,__zero_reg__" CR_TAB
4353 "st -X,__tmp_reg__" CR_TAB
4354 "clr __zero_reg__" CR_TAB
4355 "sbiw r26,%o0", op, plen, -7)
4357 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
4358 "st X,%B1" CR_TAB
4359 "st -X,%A1" CR_TAB
4360 "sbiw r26,%o0", op, plen, -4);
4362 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4364 return avr_asm_len ("st %0,%B1" CR_TAB
4365 "st %0,%A1", op, plen, -2);
4367 else if (GET_CODE (base) == POST_INC) /* (R++) */
4369 if (!mem_volatile_p)
4370 return avr_asm_len ("st %0,%A1" CR_TAB
4371 "st %0,%B1", op, plen, -2);
4373 return REGNO (XEXP (base, 0)) == REG_X
4374 ? avr_asm_len ("adiw r26,1" CR_TAB
4375 "st X,%B1" CR_TAB
4376 "st -X,%A1" CR_TAB
4377 "adiw r26,2", op, plen, -4)
4379 : avr_asm_len ("std %p0+1,%B1" CR_TAB
4380 "st %p0,%A1" CR_TAB
4381 "adiw %r0,2", op, plen, -3);
4383 fatal_insn ("unknown move insn:",insn);
4384 return "";
4387 /* Return 1 if frame pointer for current function required. */
4389 static bool
4390 avr_frame_pointer_required_p (void)
4392 return (cfun->calls_alloca
4393 || cfun->calls_setjmp
4394 || cfun->has_nonlocal_label
4395 || crtl->args.info.nregs == 0
4396 || get_frame_size () > 0);
4399 /* Returns the condition of compare insn INSN, or UNKNOWN. */
4401 static RTX_CODE
4402 compare_condition (rtx insn)
4404 rtx next = next_real_insn (insn);
4406 if (next && JUMP_P (next))
4408 rtx pat = PATTERN (next);
4409 rtx src = SET_SRC (pat);
4411 if (IF_THEN_ELSE == GET_CODE (src))
4412 return GET_CODE (XEXP (src, 0));
4415 return UNKNOWN;
4419 /* Returns true iff INSN is a tst insn that only tests the sign. */
4421 static bool
4422 compare_sign_p (rtx insn)
4424 RTX_CODE cond = compare_condition (insn);
4425 return (cond == GE || cond == LT);
4429 /* Returns true iff the next insn is a JUMP_INSN with a condition
4430 that needs to be swapped (GT, GTU, LE, LEU). */
4432 static bool
4433 compare_diff_p (rtx insn)
4435 RTX_CODE cond = compare_condition (insn);
4436 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
4439 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4441 static bool
4442 compare_eq_p (rtx insn)
4444 RTX_CODE cond = compare_condition (insn);
4445 return (cond == EQ || cond == NE);
4449 /* Output compare instruction
4451 compare (XOP[0], XOP[1])
4453 for a register XOP[0] and a compile-time constant XOP[1]. Return "".
4454 XOP[2] is an 8-bit scratch register as needed.
4456 PLEN == NULL: Output instructions.
4457 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4458 Don't output anything. */
4460 const char*
4461 avr_out_compare (rtx insn, rtx *xop, int *plen)
4463 /* Register to compare and value to compare against. */
4464 rtx xreg = xop[0];
4465 rtx xval = xop[1];
4467 /* MODE of the comparison. */
4468 enum machine_mode mode;
4470 /* Number of bytes to operate on. */
4471 int i, n_bytes = GET_MODE_SIZE (GET_MODE (xreg));
4473 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4474 int clobber_val = -1;
4476 /* Map fixed mode operands to integer operands with the same binary
4477 representation. They are easier to handle in the remainder. */
4479 if (CONST_FIXED_P (xval))
4481 xreg = avr_to_int_mode (xop[0]);
4482 xval = avr_to_int_mode (xop[1]);
4485 mode = GET_MODE (xreg);
4487 gcc_assert (REG_P (xreg));
4488 gcc_assert ((CONST_INT_P (xval) && n_bytes <= 4)
4489 || (const_double_operand (xval, VOIDmode) && n_bytes == 8));
4491 if (plen)
4492 *plen = 0;
4494 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4495 against 0 by ORing the bytes. This is one instruction shorter.
4496 Notice that 64-bit comparisons are always against reg:ALL8 18 (ACC_A)
4497 and therefore don't use this. */
4499 if (!test_hard_reg_class (LD_REGS, xreg)
4500 && compare_eq_p (insn)
4501 && reg_unused_after (insn, xreg))
4503 if (xval == const1_rtx)
4505 avr_asm_len ("dec %A0" CR_TAB
4506 "or %A0,%B0", xop, plen, 2);
4508 if (n_bytes >= 3)
4509 avr_asm_len ("or %A0,%C0", xop, plen, 1);
4511 if (n_bytes >= 4)
4512 avr_asm_len ("or %A0,%D0", xop, plen, 1);
4514 return "";
4516 else if (xval == constm1_rtx)
4518 if (n_bytes >= 4)
4519 avr_asm_len ("and %A0,%D0", xop, plen, 1);
4521 if (n_bytes >= 3)
4522 avr_asm_len ("and %A0,%C0", xop, plen, 1);
4524 return avr_asm_len ("and %A0,%B0" CR_TAB
4525 "com %A0", xop, plen, 2);
4529 for (i = 0; i < n_bytes; i++)
4531 /* We compare byte-wise. */
4532 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
4533 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4535 /* 8-bit value to compare with this byte. */
4536 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4538 /* Registers R16..R31 can operate with immediate. */
4539 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4541 xop[0] = reg8;
4542 xop[1] = gen_int_mode (val8, QImode);
4544 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4546 if (i == 0
4547 && test_hard_reg_class (ADDW_REGS, reg8))
4549 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
4551 if (IN_RANGE (val16, 0, 63)
4552 && (val8 == 0
4553 || reg_unused_after (insn, xreg)))
4555 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
4556 i++;
4557 continue;
4560 if (n_bytes == 2
4561 && IN_RANGE (val16, -63, -1)
4562 && compare_eq_p (insn)
4563 && reg_unused_after (insn, xreg))
4565 return avr_asm_len ("adiw %0,%n1", xop, plen, 1);
4569 /* Comparing against 0 is easy. */
4571 if (val8 == 0)
4573 avr_asm_len (i == 0
4574 ? "cp %0,__zero_reg__"
4575 : "cpc %0,__zero_reg__", xop, plen, 1);
4576 continue;
4579 /* Upper registers can compare and subtract-with-carry immediates.
4580 Notice that compare instructions do the same as respective subtract
4581 instruction; the only difference is that comparisons don't write
4582 the result back to the target register. */
4584 if (ld_reg_p)
4586 if (i == 0)
4588 avr_asm_len ("cpi %0,%1", xop, plen, 1);
4589 continue;
4591 else if (reg_unused_after (insn, xreg))
4593 avr_asm_len ("sbci %0,%1", xop, plen, 1);
4594 continue;
4598 /* Must load the value into the scratch register. */
4600 gcc_assert (REG_P (xop[2]));
4602 if (clobber_val != (int) val8)
4603 avr_asm_len ("ldi %2,%1", xop, plen, 1);
4604 clobber_val = (int) val8;
4606 avr_asm_len (i == 0
4607 ? "cp %0,%2"
4608 : "cpc %0,%2", xop, plen, 1);
4611 return "";
4615 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4617 const char*
4618 avr_out_compare64 (rtx insn, rtx *op, int *plen)
4620 rtx xop[3];
4622 xop[0] = gen_rtx_REG (DImode, 18);
4623 xop[1] = op[0];
4624 xop[2] = op[1];
4626 return avr_out_compare (insn, xop, plen);
4629 /* Output test instruction for HImode. */
4631 const char*
4632 avr_out_tsthi (rtx insn, rtx *op, int *plen)
4634 if (compare_sign_p (insn))
4636 avr_asm_len ("tst %B0", op, plen, -1);
4638 else if (reg_unused_after (insn, op[0])
4639 && compare_eq_p (insn))
4641 /* Faster than sbiw if we can clobber the operand. */
4642 avr_asm_len ("or %A0,%B0", op, plen, -1);
4644 else
4646 avr_out_compare (insn, op, plen);
4649 return "";
4653 /* Output test instruction for PSImode. */
4655 const char*
4656 avr_out_tstpsi (rtx insn, rtx *op, int *plen)
4658 if (compare_sign_p (insn))
4660 avr_asm_len ("tst %C0", op, plen, -1);
4662 else if (reg_unused_after (insn, op[0])
4663 && compare_eq_p (insn))
4665 /* Faster than sbiw if we can clobber the operand. */
4666 avr_asm_len ("or %A0,%B0" CR_TAB
4667 "or %A0,%C0", op, plen, -2);
4669 else
4671 avr_out_compare (insn, op, plen);
4674 return "";
4678 /* Output test instruction for SImode. */
4680 const char*
4681 avr_out_tstsi (rtx insn, rtx *op, int *plen)
4683 if (compare_sign_p (insn))
4685 avr_asm_len ("tst %D0", op, plen, -1);
4687 else if (reg_unused_after (insn, op[0])
4688 && compare_eq_p (insn))
4690 /* Faster than sbiw if we can clobber the operand. */
4691 avr_asm_len ("or %A0,%B0" CR_TAB
4692 "or %A0,%C0" CR_TAB
4693 "or %A0,%D0", op, plen, -3);
4695 else
4697 avr_out_compare (insn, op, plen);
4700 return "";
4704 /* Generate asm equivalent for various shifts. This only handles cases
4705 that are not already carefully hand-optimized in ?sh??i3_out.
4707 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4708 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4709 OPERANDS[3] is a QImode scratch register from LD regs if
4710 available and SCRATCH, otherwise (no scratch available)
4712 TEMPL is an assembler template that shifts by one position.
4713 T_LEN is the length of this template. */
4715 void
4716 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
4717 int *plen, int t_len)
4719 bool second_label = true;
4720 bool saved_in_tmp = false;
4721 bool use_zero_reg = false;
4722 rtx op[5];
4724 op[0] = operands[0];
4725 op[1] = operands[1];
4726 op[2] = operands[2];
4727 op[3] = operands[3];
4729 if (plen)
4730 *plen = 0;
4732 if (CONST_INT_P (operands[2]))
4734 bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
4735 && REG_P (operands[3]));
4736 int count = INTVAL (operands[2]);
4737 int max_len = 10; /* If larger than this, always use a loop. */
4739 if (count <= 0)
4740 return;
4742 if (count < 8 && !scratch)
4743 use_zero_reg = true;
4745 if (optimize_size)
4746 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
4748 if (t_len * count <= max_len)
4750 /* Output shifts inline with no loop - faster. */
4752 while (count-- > 0)
4753 avr_asm_len (templ, op, plen, t_len);
4755 return;
4758 if (scratch)
4760 avr_asm_len ("ldi %3,%2", op, plen, 1);
4762 else if (use_zero_reg)
4764 /* Hack to save one word: use __zero_reg__ as loop counter.
4765 Set one bit, then shift in a loop until it is 0 again. */
4767 op[3] = zero_reg_rtx;
4769 avr_asm_len ("set" CR_TAB
4770 "bld %3,%2-1", op, plen, 2);
4772 else
4774 /* No scratch register available, use one from LD_REGS (saved in
4775 __tmp_reg__) that doesn't overlap with registers to shift. */
4777 op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
4778 op[4] = tmp_reg_rtx;
4779 saved_in_tmp = true;
4781 avr_asm_len ("mov %4,%3" CR_TAB
4782 "ldi %3,%2", op, plen, 2);
4785 second_label = false;
4787 else if (MEM_P (op[2]))
4789 rtx op_mov[2];
4791 op_mov[0] = op[3] = tmp_reg_rtx;
4792 op_mov[1] = op[2];
4794 out_movqi_r_mr (insn, op_mov, plen);
4796 else if (register_operand (op[2], QImode))
4798 op[3] = op[2];
4800 if (!reg_unused_after (insn, op[2])
4801 || reg_overlap_mentioned_p (op[0], op[2]))
4803 op[3] = tmp_reg_rtx;
4804 avr_asm_len ("mov %3,%2", op, plen, 1);
4807 else
4808 fatal_insn ("bad shift insn:", insn);
4810 if (second_label)
4811 avr_asm_len ("rjmp 2f", op, plen, 1);
4813 avr_asm_len ("1:", op, plen, 0);
4814 avr_asm_len (templ, op, plen, t_len);
4816 if (second_label)
4817 avr_asm_len ("2:", op, plen, 0);
4819 avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
4820 avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
4822 if (saved_in_tmp)
4823 avr_asm_len ("mov %3,%4", op, plen, 1);
4827 /* 8bit shift left ((char)x << i) */
4829 const char *
4830 ashlqi3_out (rtx insn, rtx operands[], int *len)
4832 if (GET_CODE (operands[2]) == CONST_INT)
4834 int k;
4836 if (!len)
4837 len = &k;
4839 switch (INTVAL (operands[2]))
4841 default:
4842 if (INTVAL (operands[2]) < 8)
4843 break;
4845 *len = 1;
4846 return "clr %0";
4848 case 1:
4849 *len = 1;
4850 return "lsl %0";
4852 case 2:
4853 *len = 2;
4854 return ("lsl %0" CR_TAB
4855 "lsl %0");
4857 case 3:
4858 *len = 3;
4859 return ("lsl %0" CR_TAB
4860 "lsl %0" CR_TAB
4861 "lsl %0");
4863 case 4:
4864 if (test_hard_reg_class (LD_REGS, operands[0]))
4866 *len = 2;
4867 return ("swap %0" CR_TAB
4868 "andi %0,0xf0");
4870 *len = 4;
4871 return ("lsl %0" CR_TAB
4872 "lsl %0" CR_TAB
4873 "lsl %0" CR_TAB
4874 "lsl %0");
4876 case 5:
4877 if (test_hard_reg_class (LD_REGS, operands[0]))
4879 *len = 3;
4880 return ("swap %0" CR_TAB
4881 "lsl %0" CR_TAB
4882 "andi %0,0xe0");
4884 *len = 5;
4885 return ("lsl %0" CR_TAB
4886 "lsl %0" CR_TAB
4887 "lsl %0" CR_TAB
4888 "lsl %0" CR_TAB
4889 "lsl %0");
4891 case 6:
4892 if (test_hard_reg_class (LD_REGS, operands[0]))
4894 *len = 4;
4895 return ("swap %0" CR_TAB
4896 "lsl %0" CR_TAB
4897 "lsl %0" CR_TAB
4898 "andi %0,0xc0");
4900 *len = 6;
4901 return ("lsl %0" CR_TAB
4902 "lsl %0" CR_TAB
4903 "lsl %0" CR_TAB
4904 "lsl %0" CR_TAB
4905 "lsl %0" CR_TAB
4906 "lsl %0");
4908 case 7:
4909 *len = 3;
4910 return ("ror %0" CR_TAB
4911 "clr %0" CR_TAB
4912 "ror %0");
4915 else if (CONSTANT_P (operands[2]))
4916 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4918 out_shift_with_cnt ("lsl %0",
4919 insn, operands, len, 1);
4920 return "";
4924 /* 16bit shift left ((short)x << i) */
4926 const char *
4927 ashlhi3_out (rtx insn, rtx operands[], int *len)
4929 if (GET_CODE (operands[2]) == CONST_INT)
4931 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4932 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4933 int k;
4934 int *t = len;
4936 if (!len)
4937 len = &k;
4939 switch (INTVAL (operands[2]))
4941 default:
4942 if (INTVAL (operands[2]) < 16)
4943 break;
4945 *len = 2;
4946 return ("clr %B0" CR_TAB
4947 "clr %A0");
4949 case 4:
4950 if (optimize_size && scratch)
4951 break; /* 5 */
4952 if (ldi_ok)
4954 *len = 6;
4955 return ("swap %A0" CR_TAB
4956 "swap %B0" CR_TAB
4957 "andi %B0,0xf0" CR_TAB
4958 "eor %B0,%A0" CR_TAB
4959 "andi %A0,0xf0" CR_TAB
4960 "eor %B0,%A0");
4962 if (scratch)
4964 *len = 7;
4965 return ("swap %A0" CR_TAB
4966 "swap %B0" CR_TAB
4967 "ldi %3,0xf0" CR_TAB
4968 "and %B0,%3" CR_TAB
4969 "eor %B0,%A0" CR_TAB
4970 "and %A0,%3" CR_TAB
4971 "eor %B0,%A0");
4973 break; /* optimize_size ? 6 : 8 */
4975 case 5:
4976 if (optimize_size)
4977 break; /* scratch ? 5 : 6 */
4978 if (ldi_ok)
4980 *len = 8;
4981 return ("lsl %A0" CR_TAB
4982 "rol %B0" CR_TAB
4983 "swap %A0" CR_TAB
4984 "swap %B0" CR_TAB
4985 "andi %B0,0xf0" CR_TAB
4986 "eor %B0,%A0" CR_TAB
4987 "andi %A0,0xf0" CR_TAB
4988 "eor %B0,%A0");
4990 if (scratch)
4992 *len = 9;
4993 return ("lsl %A0" CR_TAB
4994 "rol %B0" CR_TAB
4995 "swap %A0" CR_TAB
4996 "swap %B0" CR_TAB
4997 "ldi %3,0xf0" CR_TAB
4998 "and %B0,%3" CR_TAB
4999 "eor %B0,%A0" CR_TAB
5000 "and %A0,%3" CR_TAB
5001 "eor %B0,%A0");
5003 break; /* 10 */
5005 case 6:
5006 if (optimize_size)
5007 break; /* scratch ? 5 : 6 */
5008 *len = 9;
5009 return ("clr __tmp_reg__" CR_TAB
5010 "lsr %B0" CR_TAB
5011 "ror %A0" CR_TAB
5012 "ror __tmp_reg__" CR_TAB
5013 "lsr %B0" CR_TAB
5014 "ror %A0" CR_TAB
5015 "ror __tmp_reg__" CR_TAB
5016 "mov %B0,%A0" CR_TAB
5017 "mov %A0,__tmp_reg__");
5019 case 7:
5020 *len = 5;
5021 return ("lsr %B0" CR_TAB
5022 "mov %B0,%A0" CR_TAB
5023 "clr %A0" CR_TAB
5024 "ror %B0" CR_TAB
5025 "ror %A0");
5027 case 8:
5028 return *len = 2, ("mov %B0,%A1" CR_TAB
5029 "clr %A0");
5031 case 9:
5032 *len = 3;
5033 return ("mov %B0,%A0" CR_TAB
5034 "clr %A0" CR_TAB
5035 "lsl %B0");
5037 case 10:
5038 *len = 4;
5039 return ("mov %B0,%A0" CR_TAB
5040 "clr %A0" CR_TAB
5041 "lsl %B0" CR_TAB
5042 "lsl %B0");
5044 case 11:
5045 *len = 5;
5046 return ("mov %B0,%A0" CR_TAB
5047 "clr %A0" CR_TAB
5048 "lsl %B0" CR_TAB
5049 "lsl %B0" CR_TAB
5050 "lsl %B0");
5052 case 12:
5053 if (ldi_ok)
5055 *len = 4;
5056 return ("mov %B0,%A0" CR_TAB
5057 "clr %A0" CR_TAB
5058 "swap %B0" CR_TAB
5059 "andi %B0,0xf0");
5061 if (scratch)
5063 *len = 5;
5064 return ("mov %B0,%A0" CR_TAB
5065 "clr %A0" CR_TAB
5066 "swap %B0" CR_TAB
5067 "ldi %3,0xf0" CR_TAB
5068 "and %B0,%3");
5070 *len = 6;
5071 return ("mov %B0,%A0" CR_TAB
5072 "clr %A0" CR_TAB
5073 "lsl %B0" CR_TAB
5074 "lsl %B0" CR_TAB
5075 "lsl %B0" CR_TAB
5076 "lsl %B0");
5078 case 13:
5079 if (ldi_ok)
5081 *len = 5;
5082 return ("mov %B0,%A0" CR_TAB
5083 "clr %A0" CR_TAB
5084 "swap %B0" CR_TAB
5085 "lsl %B0" CR_TAB
5086 "andi %B0,0xe0");
5088 if (AVR_HAVE_MUL && scratch)
5090 *len = 5;
5091 return ("ldi %3,0x20" CR_TAB
5092 "mul %A0,%3" CR_TAB
5093 "mov %B0,r0" CR_TAB
5094 "clr %A0" CR_TAB
5095 "clr __zero_reg__");
5097 if (optimize_size && scratch)
5098 break; /* 5 */
5099 if (scratch)
5101 *len = 6;
5102 return ("mov %B0,%A0" CR_TAB
5103 "clr %A0" CR_TAB
5104 "swap %B0" CR_TAB
5105 "lsl %B0" CR_TAB
5106 "ldi %3,0xe0" CR_TAB
5107 "and %B0,%3");
5109 if (AVR_HAVE_MUL)
5111 *len = 6;
5112 return ("set" CR_TAB
5113 "bld r1,5" CR_TAB
5114 "mul %A0,r1" CR_TAB
5115 "mov %B0,r0" CR_TAB
5116 "clr %A0" CR_TAB
5117 "clr __zero_reg__");
5119 *len = 7;
5120 return ("mov %B0,%A0" CR_TAB
5121 "clr %A0" CR_TAB
5122 "lsl %B0" CR_TAB
5123 "lsl %B0" CR_TAB
5124 "lsl %B0" CR_TAB
5125 "lsl %B0" CR_TAB
5126 "lsl %B0");
5128 case 14:
5129 if (AVR_HAVE_MUL && ldi_ok)
5131 *len = 5;
5132 return ("ldi %B0,0x40" CR_TAB
5133 "mul %A0,%B0" CR_TAB
5134 "mov %B0,r0" CR_TAB
5135 "clr %A0" CR_TAB
5136 "clr __zero_reg__");
5138 if (AVR_HAVE_MUL && scratch)
5140 *len = 5;
5141 return ("ldi %3,0x40" CR_TAB
5142 "mul %A0,%3" CR_TAB
5143 "mov %B0,r0" CR_TAB
5144 "clr %A0" CR_TAB
5145 "clr __zero_reg__");
5147 if (optimize_size && ldi_ok)
5149 *len = 5;
5150 return ("mov %B0,%A0" CR_TAB
5151 "ldi %A0,6" "\n1:\t"
5152 "lsl %B0" CR_TAB
5153 "dec %A0" CR_TAB
5154 "brne 1b");
5156 if (optimize_size && scratch)
5157 break; /* 5 */
5158 *len = 6;
5159 return ("clr %B0" CR_TAB
5160 "lsr %A0" CR_TAB
5161 "ror %B0" CR_TAB
5162 "lsr %A0" CR_TAB
5163 "ror %B0" CR_TAB
5164 "clr %A0");
5166 case 15:
5167 *len = 4;
5168 return ("clr %B0" CR_TAB
5169 "lsr %A0" CR_TAB
5170 "ror %B0" CR_TAB
5171 "clr %A0");
5173 len = t;
5175 out_shift_with_cnt ("lsl %A0" CR_TAB
5176 "rol %B0", insn, operands, len, 2);
5177 return "";
5181 /* 24-bit shift left */
5183 const char*
5184 avr_out_ashlpsi3 (rtx insn, rtx *op, int *plen)
5186 if (plen)
5187 *plen = 0;
5189 if (CONST_INT_P (op[2]))
5191 switch (INTVAL (op[2]))
5193 default:
5194 if (INTVAL (op[2]) < 24)
5195 break;
5197 return avr_asm_len ("clr %A0" CR_TAB
5198 "clr %B0" CR_TAB
5199 "clr %C0", op, plen, 3);
5201 case 8:
5203 int reg0 = REGNO (op[0]);
5204 int reg1 = REGNO (op[1]);
5206 if (reg0 >= reg1)
5207 return avr_asm_len ("mov %C0,%B1" CR_TAB
5208 "mov %B0,%A1" CR_TAB
5209 "clr %A0", op, plen, 3);
5210 else
5211 return avr_asm_len ("clr %A0" CR_TAB
5212 "mov %B0,%A1" CR_TAB
5213 "mov %C0,%B1", op, plen, 3);
5216 case 16:
5218 int reg0 = REGNO (op[0]);
5219 int reg1 = REGNO (op[1]);
5221 if (reg0 + 2 != reg1)
5222 avr_asm_len ("mov %C0,%A0", op, plen, 1);
5224 return avr_asm_len ("clr %B0" CR_TAB
5225 "clr %A0", op, plen, 2);
5228 case 23:
5229 return avr_asm_len ("clr %C0" CR_TAB
5230 "lsr %A0" CR_TAB
5231 "ror %C0" CR_TAB
5232 "clr %B0" CR_TAB
5233 "clr %A0", op, plen, 5);
5237 out_shift_with_cnt ("lsl %A0" CR_TAB
5238 "rol %B0" CR_TAB
5239 "rol %C0", insn, op, plen, 3);
5240 return "";
5244 /* 32bit shift left ((long)x << i) */
5246 const char *
5247 ashlsi3_out (rtx insn, rtx operands[], int *len)
5249 if (GET_CODE (operands[2]) == CONST_INT)
5251 int k;
5252 int *t = len;
5254 if (!len)
5255 len = &k;
5257 switch (INTVAL (operands[2]))
5259 default:
5260 if (INTVAL (operands[2]) < 32)
5261 break;
5263 if (AVR_HAVE_MOVW)
5264 return *len = 3, ("clr %D0" CR_TAB
5265 "clr %C0" CR_TAB
5266 "movw %A0,%C0");
5267 *len = 4;
5268 return ("clr %D0" CR_TAB
5269 "clr %C0" CR_TAB
5270 "clr %B0" CR_TAB
5271 "clr %A0");
5273 case 8:
5275 int reg0 = true_regnum (operands[0]);
5276 int reg1 = true_regnum (operands[1]);
5277 *len = 4;
5278 if (reg0 >= reg1)
5279 return ("mov %D0,%C1" CR_TAB
5280 "mov %C0,%B1" CR_TAB
5281 "mov %B0,%A1" CR_TAB
5282 "clr %A0");
5283 else
5284 return ("clr %A0" CR_TAB
5285 "mov %B0,%A1" CR_TAB
5286 "mov %C0,%B1" CR_TAB
5287 "mov %D0,%C1");
5290 case 16:
5292 int reg0 = true_regnum (operands[0]);
5293 int reg1 = true_regnum (operands[1]);
5294 if (reg0 + 2 == reg1)
5295 return *len = 2, ("clr %B0" CR_TAB
5296 "clr %A0");
5297 if (AVR_HAVE_MOVW)
5298 return *len = 3, ("movw %C0,%A1" CR_TAB
5299 "clr %B0" CR_TAB
5300 "clr %A0");
5301 else
5302 return *len = 4, ("mov %C0,%A1" CR_TAB
5303 "mov %D0,%B1" CR_TAB
5304 "clr %B0" CR_TAB
5305 "clr %A0");
5308 case 24:
5309 *len = 4;
5310 return ("mov %D0,%A1" CR_TAB
5311 "clr %C0" CR_TAB
5312 "clr %B0" CR_TAB
5313 "clr %A0");
5315 case 31:
5316 *len = 6;
5317 return ("clr %D0" CR_TAB
5318 "lsr %A0" CR_TAB
5319 "ror %D0" CR_TAB
5320 "clr %C0" CR_TAB
5321 "clr %B0" CR_TAB
5322 "clr %A0");
5324 len = t;
5326 out_shift_with_cnt ("lsl %A0" CR_TAB
5327 "rol %B0" CR_TAB
5328 "rol %C0" CR_TAB
5329 "rol %D0", insn, operands, len, 4);
5330 return "";
5333 /* 8bit arithmetic shift right ((signed char)x >> i) */
5335 const char *
5336 ashrqi3_out (rtx insn, rtx operands[], int *len)
5338 if (GET_CODE (operands[2]) == CONST_INT)
5340 int k;
5342 if (!len)
5343 len = &k;
5345 switch (INTVAL (operands[2]))
5347 case 1:
5348 *len = 1;
5349 return "asr %0";
5351 case 2:
5352 *len = 2;
5353 return ("asr %0" CR_TAB
5354 "asr %0");
5356 case 3:
5357 *len = 3;
5358 return ("asr %0" CR_TAB
5359 "asr %0" CR_TAB
5360 "asr %0");
5362 case 4:
5363 *len = 4;
5364 return ("asr %0" CR_TAB
5365 "asr %0" CR_TAB
5366 "asr %0" CR_TAB
5367 "asr %0");
5369 case 5:
5370 *len = 5;
5371 return ("asr %0" CR_TAB
5372 "asr %0" CR_TAB
5373 "asr %0" CR_TAB
5374 "asr %0" CR_TAB
5375 "asr %0");
5377 case 6:
5378 *len = 4;
5379 return ("bst %0,6" CR_TAB
5380 "lsl %0" CR_TAB
5381 "sbc %0,%0" CR_TAB
5382 "bld %0,0");
5384 default:
5385 if (INTVAL (operands[2]) < 8)
5386 break;
5388 /* fall through */
5390 case 7:
5391 *len = 2;
5392 return ("lsl %0" CR_TAB
5393 "sbc %0,%0");
5396 else if (CONSTANT_P (operands[2]))
5397 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5399 out_shift_with_cnt ("asr %0",
5400 insn, operands, len, 1);
5401 return "";
5405 /* 16bit arithmetic shift right ((signed short)x >> i) */
5407 const char *
5408 ashrhi3_out (rtx insn, rtx operands[], int *len)
5410 if (GET_CODE (operands[2]) == CONST_INT)
5412 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5413 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5414 int k;
5415 int *t = len;
5417 if (!len)
5418 len = &k;
5420 switch (INTVAL (operands[2]))
5422 case 4:
5423 case 5:
5424 /* XXX try to optimize this too? */
5425 break;
5427 case 6:
5428 if (optimize_size)
5429 break; /* scratch ? 5 : 6 */
5430 *len = 8;
5431 return ("mov __tmp_reg__,%A0" CR_TAB
5432 "mov %A0,%B0" CR_TAB
5433 "lsl __tmp_reg__" CR_TAB
5434 "rol %A0" CR_TAB
5435 "sbc %B0,%B0" CR_TAB
5436 "lsl __tmp_reg__" CR_TAB
5437 "rol %A0" CR_TAB
5438 "rol %B0");
5440 case 7:
5441 *len = 4;
5442 return ("lsl %A0" CR_TAB
5443 "mov %A0,%B0" CR_TAB
5444 "rol %A0" CR_TAB
5445 "sbc %B0,%B0");
5447 case 8:
5449 int reg0 = true_regnum (operands[0]);
5450 int reg1 = true_regnum (operands[1]);
5452 if (reg0 == reg1)
5453 return *len = 3, ("mov %A0,%B0" CR_TAB
5454 "lsl %B0" CR_TAB
5455 "sbc %B0,%B0");
5456 else
5457 return *len = 4, ("mov %A0,%B1" CR_TAB
5458 "clr %B0" CR_TAB
5459 "sbrc %A0,7" CR_TAB
5460 "dec %B0");
5463 case 9:
5464 *len = 4;
5465 return ("mov %A0,%B0" CR_TAB
5466 "lsl %B0" CR_TAB
5467 "sbc %B0,%B0" CR_TAB
5468 "asr %A0");
5470 case 10:
5471 *len = 5;
5472 return ("mov %A0,%B0" CR_TAB
5473 "lsl %B0" CR_TAB
5474 "sbc %B0,%B0" CR_TAB
5475 "asr %A0" CR_TAB
5476 "asr %A0");
5478 case 11:
5479 if (AVR_HAVE_MUL && ldi_ok)
5481 *len = 5;
5482 return ("ldi %A0,0x20" CR_TAB
5483 "muls %B0,%A0" CR_TAB
5484 "mov %A0,r1" CR_TAB
5485 "sbc %B0,%B0" CR_TAB
5486 "clr __zero_reg__");
5488 if (optimize_size && scratch)
5489 break; /* 5 */
5490 *len = 6;
5491 return ("mov %A0,%B0" CR_TAB
5492 "lsl %B0" CR_TAB
5493 "sbc %B0,%B0" CR_TAB
5494 "asr %A0" CR_TAB
5495 "asr %A0" CR_TAB
5496 "asr %A0");
5498 case 12:
5499 if (AVR_HAVE_MUL && ldi_ok)
5501 *len = 5;
5502 return ("ldi %A0,0x10" CR_TAB
5503 "muls %B0,%A0" CR_TAB
5504 "mov %A0,r1" CR_TAB
5505 "sbc %B0,%B0" CR_TAB
5506 "clr __zero_reg__");
5508 if (optimize_size && scratch)
5509 break; /* 5 */
5510 *len = 7;
5511 return ("mov %A0,%B0" CR_TAB
5512 "lsl %B0" CR_TAB
5513 "sbc %B0,%B0" CR_TAB
5514 "asr %A0" CR_TAB
5515 "asr %A0" CR_TAB
5516 "asr %A0" CR_TAB
5517 "asr %A0");
5519 case 13:
5520 if (AVR_HAVE_MUL && ldi_ok)
5522 *len = 5;
5523 return ("ldi %A0,0x08" CR_TAB
5524 "muls %B0,%A0" CR_TAB
5525 "mov %A0,r1" CR_TAB
5526 "sbc %B0,%B0" CR_TAB
5527 "clr __zero_reg__");
5529 if (optimize_size)
5530 break; /* scratch ? 5 : 7 */
5531 *len = 8;
5532 return ("mov %A0,%B0" CR_TAB
5533 "lsl %B0" CR_TAB
5534 "sbc %B0,%B0" CR_TAB
5535 "asr %A0" CR_TAB
5536 "asr %A0" CR_TAB
5537 "asr %A0" CR_TAB
5538 "asr %A0" CR_TAB
5539 "asr %A0");
5541 case 14:
5542 *len = 5;
5543 return ("lsl %B0" CR_TAB
5544 "sbc %A0,%A0" CR_TAB
5545 "lsl %B0" CR_TAB
5546 "mov %B0,%A0" CR_TAB
5547 "rol %A0");
5549 default:
5550 if (INTVAL (operands[2]) < 16)
5551 break;
5553 /* fall through */
5555 case 15:
5556 return *len = 3, ("lsl %B0" CR_TAB
5557 "sbc %A0,%A0" CR_TAB
5558 "mov %B0,%A0");
5560 len = t;
5562 out_shift_with_cnt ("asr %B0" CR_TAB
5563 "ror %A0", insn, operands, len, 2);
5564 return "";
5568 /* 24-bit arithmetic shift right */
5570 const char*
5571 avr_out_ashrpsi3 (rtx insn, rtx *op, int *plen)
5573 int dest = REGNO (op[0]);
5574 int src = REGNO (op[1]);
5576 if (CONST_INT_P (op[2]))
5578 if (plen)
5579 *plen = 0;
5581 switch (INTVAL (op[2]))
5583 case 8:
5584 if (dest <= src)
5585 return avr_asm_len ("mov %A0,%B1" CR_TAB
5586 "mov %B0,%C1" CR_TAB
5587 "clr %C0" CR_TAB
5588 "sbrc %B0,7" CR_TAB
5589 "dec %C0", op, plen, 5);
5590 else
5591 return avr_asm_len ("clr %C0" CR_TAB
5592 "sbrc %C1,7" CR_TAB
5593 "dec %C0" CR_TAB
5594 "mov %B0,%C1" CR_TAB
5595 "mov %A0,%B1", op, plen, 5);
5597 case 16:
5598 if (dest != src + 2)
5599 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5601 return avr_asm_len ("clr %B0" CR_TAB
5602 "sbrc %A0,7" CR_TAB
5603 "com %B0" CR_TAB
5604 "mov %C0,%B0", op, plen, 4);
5606 default:
5607 if (INTVAL (op[2]) < 24)
5608 break;
5610 /* fall through */
5612 case 23:
5613 return avr_asm_len ("lsl %C0" CR_TAB
5614 "sbc %A0,%A0" CR_TAB
5615 "mov %B0,%A0" CR_TAB
5616 "mov %C0,%A0", op, plen, 4);
5617 } /* switch */
5620 out_shift_with_cnt ("asr %C0" CR_TAB
5621 "ror %B0" CR_TAB
5622 "ror %A0", insn, op, plen, 3);
5623 return "";
5627 /* 32-bit arithmetic shift right ((signed long)x >> i) */
5629 const char *
5630 ashrsi3_out (rtx insn, rtx operands[], int *len)
5632 if (GET_CODE (operands[2]) == CONST_INT)
5634 int k;
5635 int *t = len;
5637 if (!len)
5638 len = &k;
5640 switch (INTVAL (operands[2]))
5642 case 8:
5644 int reg0 = true_regnum (operands[0]);
5645 int reg1 = true_regnum (operands[1]);
5646 *len=6;
5647 if (reg0 <= reg1)
5648 return ("mov %A0,%B1" CR_TAB
5649 "mov %B0,%C1" CR_TAB
5650 "mov %C0,%D1" CR_TAB
5651 "clr %D0" CR_TAB
5652 "sbrc %C0,7" CR_TAB
5653 "dec %D0");
5654 else
5655 return ("clr %D0" CR_TAB
5656 "sbrc %D1,7" CR_TAB
5657 "dec %D0" CR_TAB
5658 "mov %C0,%D1" CR_TAB
5659 "mov %B0,%C1" CR_TAB
5660 "mov %A0,%B1");
5663 case 16:
5665 int reg0 = true_regnum (operands[0]);
5666 int reg1 = true_regnum (operands[1]);
5668 if (reg0 == reg1 + 2)
5669 return *len = 4, ("clr %D0" CR_TAB
5670 "sbrc %B0,7" CR_TAB
5671 "com %D0" CR_TAB
5672 "mov %C0,%D0");
5673 if (AVR_HAVE_MOVW)
5674 return *len = 5, ("movw %A0,%C1" CR_TAB
5675 "clr %D0" CR_TAB
5676 "sbrc %B0,7" CR_TAB
5677 "com %D0" CR_TAB
5678 "mov %C0,%D0");
5679 else
5680 return *len = 6, ("mov %B0,%D1" CR_TAB
5681 "mov %A0,%C1" CR_TAB
5682 "clr %D0" CR_TAB
5683 "sbrc %B0,7" CR_TAB
5684 "com %D0" CR_TAB
5685 "mov %C0,%D0");
5688 case 24:
5689 return *len = 6, ("mov %A0,%D1" CR_TAB
5690 "clr %D0" CR_TAB
5691 "sbrc %A0,7" CR_TAB
5692 "com %D0" CR_TAB
5693 "mov %B0,%D0" CR_TAB
5694 "mov %C0,%D0");
5696 default:
5697 if (INTVAL (operands[2]) < 32)
5698 break;
5700 /* fall through */
5702 case 31:
5703 if (AVR_HAVE_MOVW)
5704 return *len = 4, ("lsl %D0" CR_TAB
5705 "sbc %A0,%A0" CR_TAB
5706 "mov %B0,%A0" CR_TAB
5707 "movw %C0,%A0");
5708 else
5709 return *len = 5, ("lsl %D0" CR_TAB
5710 "sbc %A0,%A0" CR_TAB
5711 "mov %B0,%A0" CR_TAB
5712 "mov %C0,%A0" CR_TAB
5713 "mov %D0,%A0");
5715 len = t;
5717 out_shift_with_cnt ("asr %D0" CR_TAB
5718 "ror %C0" CR_TAB
5719 "ror %B0" CR_TAB
5720 "ror %A0", insn, operands, len, 4);
5721 return "";
5724 /* 8-bit logic shift right ((unsigned char)x >> i) */
5726 const char *
5727 lshrqi3_out (rtx insn, rtx operands[], int *len)
5729 if (GET_CODE (operands[2]) == CONST_INT)
5731 int k;
5733 if (!len)
5734 len = &k;
5736 switch (INTVAL (operands[2]))
5738 default:
5739 if (INTVAL (operands[2]) < 8)
5740 break;
5742 *len = 1;
5743 return "clr %0";
5745 case 1:
5746 *len = 1;
5747 return "lsr %0";
5749 case 2:
5750 *len = 2;
5751 return ("lsr %0" CR_TAB
5752 "lsr %0");
5753 case 3:
5754 *len = 3;
5755 return ("lsr %0" CR_TAB
5756 "lsr %0" CR_TAB
5757 "lsr %0");
5759 case 4:
5760 if (test_hard_reg_class (LD_REGS, operands[0]))
5762 *len=2;
5763 return ("swap %0" CR_TAB
5764 "andi %0,0x0f");
5766 *len = 4;
5767 return ("lsr %0" CR_TAB
5768 "lsr %0" CR_TAB
5769 "lsr %0" CR_TAB
5770 "lsr %0");
5772 case 5:
5773 if (test_hard_reg_class (LD_REGS, operands[0]))
5775 *len = 3;
5776 return ("swap %0" CR_TAB
5777 "lsr %0" CR_TAB
5778 "andi %0,0x7");
5780 *len = 5;
5781 return ("lsr %0" CR_TAB
5782 "lsr %0" CR_TAB
5783 "lsr %0" CR_TAB
5784 "lsr %0" CR_TAB
5785 "lsr %0");
5787 case 6:
5788 if (test_hard_reg_class (LD_REGS, operands[0]))
5790 *len = 4;
5791 return ("swap %0" CR_TAB
5792 "lsr %0" CR_TAB
5793 "lsr %0" CR_TAB
5794 "andi %0,0x3");
5796 *len = 6;
5797 return ("lsr %0" CR_TAB
5798 "lsr %0" CR_TAB
5799 "lsr %0" CR_TAB
5800 "lsr %0" CR_TAB
5801 "lsr %0" CR_TAB
5802 "lsr %0");
5804 case 7:
5805 *len = 3;
5806 return ("rol %0" CR_TAB
5807 "clr %0" CR_TAB
5808 "rol %0");
5811 else if (CONSTANT_P (operands[2]))
5812 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5814 out_shift_with_cnt ("lsr %0",
5815 insn, operands, len, 1);
5816 return "";
5819 /* 16-bit logic shift right ((unsigned short)x >> i) */
5821 const char *
5822 lshrhi3_out (rtx insn, rtx operands[], int *len)
5824 if (GET_CODE (operands[2]) == CONST_INT)
5826 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5827 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5828 int k;
5829 int *t = len;
5831 if (!len)
5832 len = &k;
5834 switch (INTVAL (operands[2]))
5836 default:
5837 if (INTVAL (operands[2]) < 16)
5838 break;
5840 *len = 2;
5841 return ("clr %B0" CR_TAB
5842 "clr %A0");
5844 case 4:
5845 if (optimize_size && scratch)
5846 break; /* 5 */
5847 if (ldi_ok)
5849 *len = 6;
5850 return ("swap %B0" CR_TAB
5851 "swap %A0" CR_TAB
5852 "andi %A0,0x0f" CR_TAB
5853 "eor %A0,%B0" CR_TAB
5854 "andi %B0,0x0f" CR_TAB
5855 "eor %A0,%B0");
5857 if (scratch)
5859 *len = 7;
5860 return ("swap %B0" CR_TAB
5861 "swap %A0" CR_TAB
5862 "ldi %3,0x0f" CR_TAB
5863 "and %A0,%3" CR_TAB
5864 "eor %A0,%B0" CR_TAB
5865 "and %B0,%3" CR_TAB
5866 "eor %A0,%B0");
5868 break; /* optimize_size ? 6 : 8 */
5870 case 5:
5871 if (optimize_size)
5872 break; /* scratch ? 5 : 6 */
5873 if (ldi_ok)
5875 *len = 8;
5876 return ("lsr %B0" CR_TAB
5877 "ror %A0" CR_TAB
5878 "swap %B0" CR_TAB
5879 "swap %A0" CR_TAB
5880 "andi %A0,0x0f" CR_TAB
5881 "eor %A0,%B0" CR_TAB
5882 "andi %B0,0x0f" CR_TAB
5883 "eor %A0,%B0");
5885 if (scratch)
5887 *len = 9;
5888 return ("lsr %B0" CR_TAB
5889 "ror %A0" CR_TAB
5890 "swap %B0" CR_TAB
5891 "swap %A0" CR_TAB
5892 "ldi %3,0x0f" CR_TAB
5893 "and %A0,%3" CR_TAB
5894 "eor %A0,%B0" CR_TAB
5895 "and %B0,%3" CR_TAB
5896 "eor %A0,%B0");
5898 break; /* 10 */
5900 case 6:
5901 if (optimize_size)
5902 break; /* scratch ? 5 : 6 */
5903 *len = 9;
5904 return ("clr __tmp_reg__" CR_TAB
5905 "lsl %A0" CR_TAB
5906 "rol %B0" CR_TAB
5907 "rol __tmp_reg__" CR_TAB
5908 "lsl %A0" CR_TAB
5909 "rol %B0" CR_TAB
5910 "rol __tmp_reg__" CR_TAB
5911 "mov %A0,%B0" CR_TAB
5912 "mov %B0,__tmp_reg__");
5914 case 7:
5915 *len = 5;
5916 return ("lsl %A0" CR_TAB
5917 "mov %A0,%B0" CR_TAB
5918 "rol %A0" CR_TAB
5919 "sbc %B0,%B0" CR_TAB
5920 "neg %B0");
5922 case 8:
5923 return *len = 2, ("mov %A0,%B1" CR_TAB
5924 "clr %B0");
5926 case 9:
5927 *len = 3;
5928 return ("mov %A0,%B0" CR_TAB
5929 "clr %B0" CR_TAB
5930 "lsr %A0");
5932 case 10:
5933 *len = 4;
5934 return ("mov %A0,%B0" CR_TAB
5935 "clr %B0" CR_TAB
5936 "lsr %A0" CR_TAB
5937 "lsr %A0");
5939 case 11:
5940 *len = 5;
5941 return ("mov %A0,%B0" CR_TAB
5942 "clr %B0" CR_TAB
5943 "lsr %A0" CR_TAB
5944 "lsr %A0" CR_TAB
5945 "lsr %A0");
5947 case 12:
5948 if (ldi_ok)
5950 *len = 4;
5951 return ("mov %A0,%B0" CR_TAB
5952 "clr %B0" CR_TAB
5953 "swap %A0" CR_TAB
5954 "andi %A0,0x0f");
5956 if (scratch)
5958 *len = 5;
5959 return ("mov %A0,%B0" CR_TAB
5960 "clr %B0" CR_TAB
5961 "swap %A0" CR_TAB
5962 "ldi %3,0x0f" CR_TAB
5963 "and %A0,%3");
5965 *len = 6;
5966 return ("mov %A0,%B0" CR_TAB
5967 "clr %B0" CR_TAB
5968 "lsr %A0" CR_TAB
5969 "lsr %A0" CR_TAB
5970 "lsr %A0" CR_TAB
5971 "lsr %A0");
5973 case 13:
5974 if (ldi_ok)
5976 *len = 5;
5977 return ("mov %A0,%B0" CR_TAB
5978 "clr %B0" CR_TAB
5979 "swap %A0" CR_TAB
5980 "lsr %A0" CR_TAB
5981 "andi %A0,0x07");
5983 if (AVR_HAVE_MUL && scratch)
5985 *len = 5;
5986 return ("ldi %3,0x08" CR_TAB
5987 "mul %B0,%3" CR_TAB
5988 "mov %A0,r1" CR_TAB
5989 "clr %B0" CR_TAB
5990 "clr __zero_reg__");
5992 if (optimize_size && scratch)
5993 break; /* 5 */
5994 if (scratch)
5996 *len = 6;
5997 return ("mov %A0,%B0" CR_TAB
5998 "clr %B0" CR_TAB
5999 "swap %A0" CR_TAB
6000 "lsr %A0" CR_TAB
6001 "ldi %3,0x07" CR_TAB
6002 "and %A0,%3");
6004 if (AVR_HAVE_MUL)
6006 *len = 6;
6007 return ("set" CR_TAB
6008 "bld r1,3" CR_TAB
6009 "mul %B0,r1" CR_TAB
6010 "mov %A0,r1" CR_TAB
6011 "clr %B0" CR_TAB
6012 "clr __zero_reg__");
6014 *len = 7;
6015 return ("mov %A0,%B0" CR_TAB
6016 "clr %B0" CR_TAB
6017 "lsr %A0" CR_TAB
6018 "lsr %A0" CR_TAB
6019 "lsr %A0" CR_TAB
6020 "lsr %A0" CR_TAB
6021 "lsr %A0");
6023 case 14:
6024 if (AVR_HAVE_MUL && ldi_ok)
6026 *len = 5;
6027 return ("ldi %A0,0x04" CR_TAB
6028 "mul %B0,%A0" CR_TAB
6029 "mov %A0,r1" CR_TAB
6030 "clr %B0" CR_TAB
6031 "clr __zero_reg__");
6033 if (AVR_HAVE_MUL && scratch)
6035 *len = 5;
6036 return ("ldi %3,0x04" CR_TAB
6037 "mul %B0,%3" CR_TAB
6038 "mov %A0,r1" CR_TAB
6039 "clr %B0" CR_TAB
6040 "clr __zero_reg__");
6042 if (optimize_size && ldi_ok)
6044 *len = 5;
6045 return ("mov %A0,%B0" CR_TAB
6046 "ldi %B0,6" "\n1:\t"
6047 "lsr %A0" CR_TAB
6048 "dec %B0" CR_TAB
6049 "brne 1b");
6051 if (optimize_size && scratch)
6052 break; /* 5 */
6053 *len = 6;
6054 return ("clr %A0" CR_TAB
6055 "lsl %B0" CR_TAB
6056 "rol %A0" CR_TAB
6057 "lsl %B0" CR_TAB
6058 "rol %A0" CR_TAB
6059 "clr %B0");
6061 case 15:
6062 *len = 4;
6063 return ("clr %A0" CR_TAB
6064 "lsl %B0" CR_TAB
6065 "rol %A0" CR_TAB
6066 "clr %B0");
6068 len = t;
6070 out_shift_with_cnt ("lsr %B0" CR_TAB
6071 "ror %A0", insn, operands, len, 2);
6072 return "";
6076 /* 24-bit logic shift right */
6078 const char*
6079 avr_out_lshrpsi3 (rtx insn, rtx *op, int *plen)
6081 int dest = REGNO (op[0]);
6082 int src = REGNO (op[1]);
6084 if (CONST_INT_P (op[2]))
6086 if (plen)
6087 *plen = 0;
6089 switch (INTVAL (op[2]))
6091 case 8:
6092 if (dest <= src)
6093 return avr_asm_len ("mov %A0,%B1" CR_TAB
6094 "mov %B0,%C1" CR_TAB
6095 "clr %C0", op, plen, 3);
6096 else
6097 return avr_asm_len ("clr %C0" CR_TAB
6098 "mov %B0,%C1" CR_TAB
6099 "mov %A0,%B1", op, plen, 3);
6101 case 16:
6102 if (dest != src + 2)
6103 avr_asm_len ("mov %A0,%C1", op, plen, 1);
6105 return avr_asm_len ("clr %B0" CR_TAB
6106 "clr %C0", op, plen, 2);
6108 default:
6109 if (INTVAL (op[2]) < 24)
6110 break;
6112 /* fall through */
6114 case 23:
6115 return avr_asm_len ("clr %A0" CR_TAB
6116 "sbrc %C0,7" CR_TAB
6117 "inc %A0" CR_TAB
6118 "clr %B0" CR_TAB
6119 "clr %C0", op, plen, 5);
6120 } /* switch */
6123 out_shift_with_cnt ("lsr %C0" CR_TAB
6124 "ror %B0" CR_TAB
6125 "ror %A0", insn, op, plen, 3);
6126 return "";
6130 /* 32-bit logic shift right ((unsigned int)x >> i) */
6132 const char *
6133 lshrsi3_out (rtx insn, rtx operands[], int *len)
6135 if (GET_CODE (operands[2]) == CONST_INT)
6137 int k;
6138 int *t = len;
6140 if (!len)
6141 len = &k;
6143 switch (INTVAL (operands[2]))
6145 default:
6146 if (INTVAL (operands[2]) < 32)
6147 break;
6149 if (AVR_HAVE_MOVW)
6150 return *len = 3, ("clr %D0" CR_TAB
6151 "clr %C0" CR_TAB
6152 "movw %A0,%C0");
6153 *len = 4;
6154 return ("clr %D0" CR_TAB
6155 "clr %C0" CR_TAB
6156 "clr %B0" CR_TAB
6157 "clr %A0");
6159 case 8:
6161 int reg0 = true_regnum (operands[0]);
6162 int reg1 = true_regnum (operands[1]);
6163 *len = 4;
6164 if (reg0 <= reg1)
6165 return ("mov %A0,%B1" CR_TAB
6166 "mov %B0,%C1" CR_TAB
6167 "mov %C0,%D1" CR_TAB
6168 "clr %D0");
6169 else
6170 return ("clr %D0" CR_TAB
6171 "mov %C0,%D1" CR_TAB
6172 "mov %B0,%C1" CR_TAB
6173 "mov %A0,%B1");
6176 case 16:
6178 int reg0 = true_regnum (operands[0]);
6179 int reg1 = true_regnum (operands[1]);
6181 if (reg0 == reg1 + 2)
6182 return *len = 2, ("clr %C0" CR_TAB
6183 "clr %D0");
6184 if (AVR_HAVE_MOVW)
6185 return *len = 3, ("movw %A0,%C1" CR_TAB
6186 "clr %C0" CR_TAB
6187 "clr %D0");
6188 else
6189 return *len = 4, ("mov %B0,%D1" CR_TAB
6190 "mov %A0,%C1" CR_TAB
6191 "clr %C0" CR_TAB
6192 "clr %D0");
6195 case 24:
6196 return *len = 4, ("mov %A0,%D1" CR_TAB
6197 "clr %B0" CR_TAB
6198 "clr %C0" CR_TAB
6199 "clr %D0");
6201 case 31:
6202 *len = 6;
6203 return ("clr %A0" CR_TAB
6204 "sbrc %D0,7" CR_TAB
6205 "inc %A0" CR_TAB
6206 "clr %B0" CR_TAB
6207 "clr %C0" CR_TAB
6208 "clr %D0");
6210 len = t;
6212 out_shift_with_cnt ("lsr %D0" CR_TAB
6213 "ror %C0" CR_TAB
6214 "ror %B0" CR_TAB
6215 "ror %A0", insn, operands, len, 4);
6216 return "";
6220 /* Output addition of register XOP[0] and compile time constant XOP[2].
6221 CODE == PLUS: perform addition by using ADD instructions or
6222 CODE == MINUS: perform addition by using SUB instructions:
6224 XOP[0] = XOP[0] + XOP[2]
6226 Or perform addition/subtraction with register XOP[2] depending on CODE:
6228 XOP[0] = XOP[0] +/- XOP[2]
6230 If PLEN == NULL, print assembler instructions to perform the operation;
6231 otherwise, set *PLEN to the length of the instruction sequence (in words)
6232 printed with PLEN == NULL. XOP[3] is an 8-bit scratch register or NULL_RTX.
6233 Set *PCC to effect on cc0 according to respective CC_* insn attribute.
6235 CODE_SAT == UNKNOWN: Perform ordinary, non-saturating operation.
6236 CODE_SAT != UNKNOWN: Perform operation and saturate according to CODE_SAT.
6237 If CODE_SAT != UNKNOWN then SIGN contains the sign of the summand resp.
6238 the subtrahend in the original insn, provided it is a compile time constant.
6239 In all other cases, SIGN is 0.
6241 If OUT_LABEL is true, print the final 0: label which is needed for
6242 saturated addition / subtraction. The only case where OUT_LABEL = false
6243 is useful is for saturated addition / subtraction performed during
6244 fixed-point rounding, cf. `avr_out_round'. */
6246 static void
6247 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc,
6248 enum rtx_code code_sat, int sign, bool out_label)
6250 /* MODE of the operation. */
6251 enum machine_mode mode = GET_MODE (xop[0]);
6253 /* INT_MODE of the same size. */
6254 enum machine_mode imode = int_mode_for_mode (mode);
6256 /* Number of bytes to operate on. */
6257 int i, n_bytes = GET_MODE_SIZE (mode);
6259 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6260 int clobber_val = -1;
6262 /* op[0]: 8-bit destination register
6263 op[1]: 8-bit const int
6264 op[2]: 8-bit scratch register */
6265 rtx op[3];
6267 /* Started the operation? Before starting the operation we may skip
6268 adding 0. This is no more true after the operation started because
6269 carry must be taken into account. */
6270 bool started = false;
6272 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
6273 rtx xval = xop[2];
6275 /* Output a BRVC instruction. Only needed with saturation. */
6276 bool out_brvc = true;
6278 if (plen)
6279 *plen = 0;
6281 if (REG_P (xop[2]))
6283 *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_SET_N;
6285 for (i = 0; i < n_bytes; i++)
6287 /* We operate byte-wise on the destination. */
6288 op[0] = simplify_gen_subreg (QImode, xop[0], mode, i);
6289 op[1] = simplify_gen_subreg (QImode, xop[2], mode, i);
6291 if (i == 0)
6292 avr_asm_len (code == PLUS ? "add %0,%1" : "sub %0,%1",
6293 op, plen, 1);
6294 else
6295 avr_asm_len (code == PLUS ? "adc %0,%1" : "sbc %0,%1",
6296 op, plen, 1);
6299 if (reg_overlap_mentioned_p (xop[0], xop[2]))
6301 gcc_assert (REGNO (xop[0]) == REGNO (xop[2]));
6303 if (MINUS == code)
6304 return;
6307 goto saturate;
6310 /* Except in the case of ADIW with 16-bit register (see below)
6311 addition does not set cc0 in a usable way. */
6313 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
6315 if (CONST_FIXED_P (xval))
6316 xval = avr_to_int_mode (xval);
6318 /* Adding/Subtracting zero is a no-op. */
6320 if (xval == const0_rtx)
6322 *pcc = CC_NONE;
6323 return;
6326 if (MINUS == code)
6327 xval = simplify_unary_operation (NEG, imode, xval, imode);
6329 op[2] = xop[3];
6331 if (SS_PLUS == code_sat && MINUS == code
6332 && sign < 0
6333 && 0x80 == (INTVAL (simplify_gen_subreg (QImode, xval, imode, n_bytes-1))
6334 & GET_MODE_MASK (QImode)))
6336 /* We compute x + 0x80 by means of SUB instructions. We negated the
6337 constant subtrahend above and are left with x - (-128) so that we
6338 need something like SUBI r,128 which does not exist because SUBI sets
6339 V according to the sign of the subtrahend. Notice the only case
6340 where this must be done is when NEG overflowed in case [2s] because
6341 the V computation needs the right sign of the subtrahend. */
6343 rtx msb = simplify_gen_subreg (QImode, xop[0], mode, n_bytes-1);
6345 avr_asm_len ("subi %0,128" CR_TAB
6346 "brmi 0f", &msb, plen, 2);
6347 out_brvc = false;
6349 goto saturate;
6352 for (i = 0; i < n_bytes; i++)
6354 /* We operate byte-wise on the destination. */
6355 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6356 rtx xval8 = simplify_gen_subreg (QImode, xval, imode, i);
6358 /* 8-bit value to operate with this byte. */
6359 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6361 /* Registers R16..R31 can operate with immediate. */
6362 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6364 op[0] = reg8;
6365 op[1] = gen_int_mode (val8, QImode);
6367 /* To get usable cc0 no low-bytes must have been skipped. */
6369 if (i && !started)
6370 *pcc = CC_CLOBBER;
6372 if (!started
6373 && i % 2 == 0
6374 && i + 2 <= n_bytes
6375 && test_hard_reg_class (ADDW_REGS, reg8))
6377 rtx xval16 = simplify_gen_subreg (HImode, xval, imode, i);
6378 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
6380 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
6381 i.e. operate word-wise. */
6383 if (val16 < 64)
6385 if (val16 != 0)
6387 started = true;
6388 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
6389 op, plen, 1);
6391 if (n_bytes == 2 && PLUS == code)
6392 *pcc = CC_SET_ZN;
6395 i++;
6396 continue;
6400 if (val8 == 0)
6402 if (started)
6403 avr_asm_len (code == PLUS
6404 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
6405 op, plen, 1);
6406 continue;
6408 else if ((val8 == 1 || val8 == 0xff)
6409 && UNKNOWN == code_sat
6410 && !started
6411 && i == n_bytes - 1)
6413 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
6414 op, plen, 1);
6415 break;
6418 switch (code)
6420 case PLUS:
6422 gcc_assert (plen != NULL || (op[2] && REG_P (op[2])));
6424 if (plen != NULL && UNKNOWN != code_sat)
6426 /* This belongs to the x + 0x80 corner case. The code with
6427 ADD instruction is not smaller, thus make this case
6428 expensive so that the caller won't pick it. */
6430 *plen += 10;
6431 break;
6434 if (clobber_val != (int) val8)
6435 avr_asm_len ("ldi %2,%1", op, plen, 1);
6436 clobber_val = (int) val8;
6438 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
6440 break; /* PLUS */
6442 case MINUS:
6444 if (ld_reg_p)
6445 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
6446 else
6448 gcc_assert (plen != NULL || REG_P (op[2]));
6450 if (clobber_val != (int) val8)
6451 avr_asm_len ("ldi %2,%1", op, plen, 1);
6452 clobber_val = (int) val8;
6454 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
6457 break; /* MINUS */
6459 default:
6460 /* Unknown code */
6461 gcc_unreachable();
6464 started = true;
6466 } /* for all sub-bytes */
6468 saturate:
6470 if (UNKNOWN == code_sat)
6471 return;
6473 *pcc = (int) CC_CLOBBER;
6475 /* Vanilla addition/subtraction is done. We are left with saturation.
6477 We have to compute A = A <op> B where A is a register and
6478 B is a register or a non-zero compile time constant CONST.
6479 A is register class "r" if unsigned && B is REG. Otherwise, A is in "d".
6480 B stands for the original operand $2 in INSN. In the case of B = CONST,
6481 SIGN in { -1, 1 } is the sign of B. Otherwise, SIGN is 0.
6483 CODE is the instruction flavor we use in the asm sequence to perform <op>.
6486 unsigned
6487 operation | code | sat if | b is | sat value | case
6488 -----------------+-------+----------+--------------+-----------+-------
6489 + as a + b | add | C == 1 | const, reg | u+ = 0xff | [1u]
6490 + as a - (-b) | sub | C == 0 | const | u+ = 0xff | [2u]
6491 - as a - b | sub | C == 1 | const, reg | u- = 0 | [3u]
6492 - as a + (-b) | add | C == 0 | const | u- = 0 | [4u]
6495 signed
6496 operation | code | sat if | b is | sat value | case
6497 -----------------+-------+----------+--------------+-----------+-------
6498 + as a + b | add | V == 1 | const, reg | s+ | [1s]
6499 + as a - (-b) | sub | V == 1 | const | s+ | [2s]
6500 - as a - b | sub | V == 1 | const, reg | s- | [3s]
6501 - as a + (-b) | add | V == 1 | const | s- | [4s]
6503 s+ = b < 0 ? -0x80 : 0x7f
6504 s- = b < 0 ? 0x7f : -0x80
6506 The cases a - b actually perform a - (-(-b)) if B is CONST.
6509 op[0] = simplify_gen_subreg (QImode, xop[0], mode, n_bytes-1);
6510 op[1] = n_bytes > 1
6511 ? simplify_gen_subreg (QImode, xop[0], mode, n_bytes-2)
6512 : NULL_RTX;
6514 bool need_copy = true;
6515 int len_call = 1 + AVR_HAVE_JMP_CALL;
6517 switch (code_sat)
6519 default:
6520 gcc_unreachable();
6522 case SS_PLUS:
6523 case SS_MINUS:
6525 if (out_brvc)
6526 avr_asm_len ("brvc 0f", op, plen, 1);
6528 if (reg_overlap_mentioned_p (xop[0], xop[2]))
6530 /* [1s,reg] */
6532 if (n_bytes == 1)
6533 avr_asm_len ("ldi %0,0x7f" CR_TAB
6534 "adc %0,__zero_reg__", op, plen, 2);
6535 else
6536 avr_asm_len ("ldi %0,0x7f" CR_TAB
6537 "ldi %1,0xff" CR_TAB
6538 "adc %1,__zero_reg__" CR_TAB
6539 "adc %0,__zero_reg__", op, plen, 4);
6541 else if (sign == 0 && PLUS == code)
6543 /* [1s,reg] */
6545 op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
6547 if (n_bytes == 1)
6548 avr_asm_len ("ldi %0,0x80" CR_TAB
6549 "sbrs %2,7" CR_TAB
6550 "dec %0", op, plen, 3);
6551 else
6552 avr_asm_len ("ldi %0,0x80" CR_TAB
6553 "cp %2,%0" CR_TAB
6554 "sbc %1,%1" CR_TAB
6555 "sbci %0,0", op, plen, 4);
6557 else if (sign == 0 && MINUS == code)
6559 /* [3s,reg] */
6561 op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
6563 if (n_bytes == 1)
6564 avr_asm_len ("ldi %0,0x7f" CR_TAB
6565 "sbrs %2,7" CR_TAB
6566 "inc %0", op, plen, 3);
6567 else
6568 avr_asm_len ("ldi %0,0x7f" CR_TAB
6569 "cp %0,%2" CR_TAB
6570 "sbc %1,%1" CR_TAB
6571 "sbci %0,-1", op, plen, 4);
6573 else if ((sign < 0) ^ (SS_MINUS == code_sat))
6575 /* [1s,const,B < 0] [2s,B < 0] */
6576 /* [3s,const,B > 0] [4s,B > 0] */
6578 if (n_bytes == 8)
6580 avr_asm_len ("%~call __clr_8", op, plen, len_call);
6581 need_copy = false;
6584 avr_asm_len ("ldi %0,0x80", op, plen, 1);
6585 if (n_bytes > 1 && need_copy)
6586 avr_asm_len ("clr %1", op, plen, 1);
6588 else if ((sign > 0) ^ (SS_MINUS == code_sat))
6590 /* [1s,const,B > 0] [2s,B > 0] */
6591 /* [3s,const,B < 0] [4s,B < 0] */
6593 if (n_bytes == 8)
6595 avr_asm_len ("sec" CR_TAB
6596 "%~call __sbc_8", op, plen, 1 + len_call);
6597 need_copy = false;
6600 avr_asm_len ("ldi %0,0x7f", op, plen, 1);
6601 if (n_bytes > 1 && need_copy)
6602 avr_asm_len ("ldi %1,0xff", op, plen, 1);
6604 else
6605 gcc_unreachable();
6607 break;
6609 case US_PLUS:
6610 /* [1u] : [2u] */
6612 avr_asm_len (PLUS == code ? "brcc 0f" : "brcs 0f", op, plen, 1);
6614 if (n_bytes == 8)
6616 if (MINUS == code)
6617 avr_asm_len ("sec", op, plen, 1);
6618 avr_asm_len ("%~call __sbc_8", op, plen, len_call);
6620 need_copy = false;
6622 else
6624 if (MINUS == code && !test_hard_reg_class (LD_REGS, op[0]))
6625 avr_asm_len ("sec" CR_TAB "sbc %0,%0", op, plen, 2);
6626 else
6627 avr_asm_len (PLUS == code ? "sbc %0,%0" : "ldi %0,0xff",
6628 op, plen, 1);
6630 break; /* US_PLUS */
6632 case US_MINUS:
6633 /* [4u] : [3u] */
6635 avr_asm_len (PLUS == code ? "brcs 0f" : "brcc 0f", op, plen, 1);
6637 if (n_bytes == 8)
6639 avr_asm_len ("%~call __clr_8", op, plen, len_call);
6640 need_copy = false;
6642 else
6643 avr_asm_len ("clr %0", op, plen, 1);
6645 break;
6648 /* We set the MSB in the unsigned case and the 2 MSBs in the signed case.
6649 Now copy the right value to the LSBs. */
6651 if (need_copy && n_bytes > 1)
6653 if (US_MINUS == code_sat || US_PLUS == code_sat)
6655 avr_asm_len ("mov %1,%0", op, plen, 1);
6657 if (n_bytes > 2)
6659 op[0] = xop[0];
6660 if (AVR_HAVE_MOVW)
6661 avr_asm_len ("movw %0,%1", op, plen, 1);
6662 else
6663 avr_asm_len ("mov %A0,%1" CR_TAB
6664 "mov %B0,%1", op, plen, 2);
6667 else if (n_bytes > 2)
6669 op[0] = xop[0];
6670 avr_asm_len ("mov %A0,%1" CR_TAB
6671 "mov %B0,%1", op, plen, 2);
6675 if (need_copy && n_bytes == 8)
6677 if (AVR_HAVE_MOVW)
6678 avr_asm_len ("movw %r0+2,%0" CR_TAB
6679 "movw %r0+4,%0", xop, plen, 2);
6680 else
6681 avr_asm_len ("mov %r0+2,%0" CR_TAB
6682 "mov %r0+3,%0" CR_TAB
6683 "mov %r0+4,%0" CR_TAB
6684 "mov %r0+5,%0", xop, plen, 4);
6687 if (out_label)
6688 avr_asm_len ("0:", op, plen, 0);
6692 /* Output addition/subtraction of register XOP[0] and a constant XOP[2] that
6693 is ont a compile-time constant:
6695 XOP[0] = XOP[0] +/- XOP[2]
6697 This is a helper for the function below. The only insns that need this
6698 are additions/subtraction for pointer modes, i.e. HImode and PSImode. */
6700 static const char*
6701 avr_out_plus_symbol (rtx *xop, enum rtx_code code, int *plen, int *pcc)
6703 enum machine_mode mode = GET_MODE (xop[0]);
6705 /* Only pointer modes want to add symbols. */
6707 gcc_assert (mode == HImode || mode == PSImode);
6709 *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_SET_N;
6711 avr_asm_len (PLUS == code
6712 ? "subi %A0,lo8(-(%2))" CR_TAB "sbci %B0,hi8(-(%2))"
6713 : "subi %A0,lo8(%2)" CR_TAB "sbci %B0,hi8(%2)",
6714 xop, plen, -2);
6716 if (PSImode == mode)
6717 avr_asm_len (PLUS == code
6718 ? "sbci %C0,hlo8(-(%2))"
6719 : "sbci %C0,hlo8(%2)", xop, plen, 1);
6720 return "";
6724 /* Prepare operands of addition/subtraction to be used with avr_out_plus_1.
6726 INSN is a single_set insn or an insn pattern with a binary operation as
6727 SET_SRC that is one of: PLUS, SS_PLUS, US_PLUS, MINUS, SS_MINUS, US_MINUS.
6729 XOP are the operands of INSN. In the case of 64-bit operations with
6730 constant XOP[] has just one element: The summand/subtrahend in XOP[0].
6731 The non-saturating insns up to 32 bits may or may not supply a "d" class
6732 scratch as XOP[3].
6734 If PLEN == NULL output the instructions.
6735 If PLEN != NULL set *PLEN to the length of the sequence in words.
6737 PCC is a pointer to store the instructions' effect on cc0.
6738 PCC may be NULL.
6740 PLEN and PCC default to NULL.
6742 OUT_LABEL defaults to TRUE. For a description, see AVR_OUT_PLUS_1.
6744 Return "" */
6746 const char*
6747 avr_out_plus (rtx insn, rtx *xop, int *plen, int *pcc, bool out_label)
6749 int cc_plus, cc_minus, cc_dummy;
6750 int len_plus, len_minus;
6751 rtx op[4];
6752 rtx xpattern = INSN_P (insn) ? single_set (insn) : insn;
6753 rtx xdest = SET_DEST (xpattern);
6754 enum machine_mode mode = GET_MODE (xdest);
6755 enum machine_mode imode = int_mode_for_mode (mode);
6756 int n_bytes = GET_MODE_SIZE (mode);
6757 enum rtx_code code_sat = GET_CODE (SET_SRC (xpattern));
6758 enum rtx_code code
6759 = (PLUS == code_sat || SS_PLUS == code_sat || US_PLUS == code_sat
6760 ? PLUS : MINUS);
6762 if (!pcc)
6763 pcc = &cc_dummy;
6765 /* PLUS and MINUS don't saturate: Use modular wrap-around. */
6767 if (PLUS == code_sat || MINUS == code_sat)
6768 code_sat = UNKNOWN;
6770 if (n_bytes <= 4 && REG_P (xop[2]))
6772 avr_out_plus_1 (xop, plen, code, pcc, code_sat, 0, out_label);
6773 return "";
6776 if (8 == n_bytes)
6778 op[0] = gen_rtx_REG (DImode, ACC_A);
6779 op[1] = gen_rtx_REG (DImode, ACC_A);
6780 op[2] = avr_to_int_mode (xop[0]);
6782 else
6784 if (!REG_P (xop[2])
6785 && !CONST_INT_P (xop[2])
6786 && !CONST_FIXED_P (xop[2]))
6788 return avr_out_plus_symbol (xop, code, plen, pcc);
6791 op[0] = avr_to_int_mode (xop[0]);
6792 op[1] = avr_to_int_mode (xop[1]);
6793 op[2] = avr_to_int_mode (xop[2]);
6796 /* Saturations and 64-bit operations don't have a clobber operand.
6797 For the other cases, the caller will provide a proper XOP[3]. */
6799 xpattern = INSN_P (insn) ? PATTERN (insn) : insn;
6800 op[3] = PARALLEL == GET_CODE (xpattern) ? xop[3] : NULL_RTX;
6802 /* Saturation will need the sign of the original operand. */
6804 rtx xmsb = simplify_gen_subreg (QImode, op[2], imode, n_bytes-1);
6805 int sign = INTVAL (xmsb) < 0 ? -1 : 1;
6807 /* If we subtract and the subtrahend is a constant, then negate it
6808 so that avr_out_plus_1 can be used. */
6810 if (MINUS == code)
6811 op[2] = simplify_unary_operation (NEG, imode, op[2], imode);
6813 /* Work out the shortest sequence. */
6815 avr_out_plus_1 (op, &len_minus, MINUS, &cc_minus, code_sat, sign, out_label);
6816 avr_out_plus_1 (op, &len_plus, PLUS, &cc_plus, code_sat, sign, out_label);
6818 if (plen)
6820 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
6821 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
6823 else if (len_minus <= len_plus)
6824 avr_out_plus_1 (op, NULL, MINUS, pcc, code_sat, sign, out_label);
6825 else
6826 avr_out_plus_1 (op, NULL, PLUS, pcc, code_sat, sign, out_label);
6828 return "";
6832 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6833 time constant XOP[2]:
6835 XOP[0] = XOP[0] <op> XOP[2]
6837 and return "". If PLEN == NULL, print assembler instructions to perform the
6838 operation; otherwise, set *PLEN to the length of the instruction sequence
6839 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6840 register or SCRATCH if no clobber register is needed for the operation.
6841 INSN is an INSN_P or a pattern of an insn. */
6843 const char*
6844 avr_out_bitop (rtx insn, rtx *xop, int *plen)
6846 /* CODE and MODE of the operation. */
6847 rtx xpattern = INSN_P (insn) ? single_set (insn) : insn;
6848 enum rtx_code code = GET_CODE (SET_SRC (xpattern));
6849 enum machine_mode mode = GET_MODE (xop[0]);
6851 /* Number of bytes to operate on. */
6852 int i, n_bytes = GET_MODE_SIZE (mode);
6854 /* Value of T-flag (0 or 1) or -1 if unknow. */
6855 int set_t = -1;
6857 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6858 int clobber_val = -1;
6860 /* op[0]: 8-bit destination register
6861 op[1]: 8-bit const int
6862 op[2]: 8-bit clobber register or SCRATCH
6863 op[3]: 8-bit register containing 0xff or NULL_RTX */
6864 rtx op[4];
6866 op[2] = xop[3];
6867 op[3] = NULL_RTX;
6869 if (plen)
6870 *plen = 0;
6872 for (i = 0; i < n_bytes; i++)
6874 /* We operate byte-wise on the destination. */
6875 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6876 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
6878 /* 8-bit value to operate with this byte. */
6879 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6881 /* Number of bits set in the current byte of the constant. */
6882 int pop8 = avr_popcount (val8);
6884 /* Registers R16..R31 can operate with immediate. */
6885 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6887 op[0] = reg8;
6888 op[1] = GEN_INT (val8);
6890 switch (code)
6892 case IOR:
6894 if (0 == pop8)
6895 continue;
6896 else if (ld_reg_p)
6897 avr_asm_len ("ori %0,%1", op, plen, 1);
6898 else if (1 == pop8)
6900 if (set_t != 1)
6901 avr_asm_len ("set", op, plen, 1);
6902 set_t = 1;
6904 op[1] = GEN_INT (exact_log2 (val8));
6905 avr_asm_len ("bld %0,%1", op, plen, 1);
6907 else if (8 == pop8)
6909 if (op[3] != NULL_RTX)
6910 avr_asm_len ("mov %0,%3", op, plen, 1);
6911 else
6912 avr_asm_len ("clr %0" CR_TAB
6913 "dec %0", op, plen, 2);
6915 op[3] = op[0];
6917 else
6919 if (clobber_val != (int) val8)
6920 avr_asm_len ("ldi %2,%1", op, plen, 1);
6921 clobber_val = (int) val8;
6923 avr_asm_len ("or %0,%2", op, plen, 1);
6926 continue; /* IOR */
6928 case AND:
6930 if (8 == pop8)
6931 continue;
6932 else if (0 == pop8)
6933 avr_asm_len ("clr %0", op, plen, 1);
6934 else if (ld_reg_p)
6935 avr_asm_len ("andi %0,%1", op, plen, 1);
6936 else if (7 == pop8)
6938 if (set_t != 0)
6939 avr_asm_len ("clt", op, plen, 1);
6940 set_t = 0;
6942 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
6943 avr_asm_len ("bld %0,%1", op, plen, 1);
6945 else
6947 if (clobber_val != (int) val8)
6948 avr_asm_len ("ldi %2,%1", op, plen, 1);
6949 clobber_val = (int) val8;
6951 avr_asm_len ("and %0,%2", op, plen, 1);
6954 continue; /* AND */
6956 case XOR:
6958 if (0 == pop8)
6959 continue;
6960 else if (8 == pop8)
6961 avr_asm_len ("com %0", op, plen, 1);
6962 else if (ld_reg_p && val8 == (1 << 7))
6963 avr_asm_len ("subi %0,%1", op, plen, 1);
6964 else
6966 if (clobber_val != (int) val8)
6967 avr_asm_len ("ldi %2,%1", op, plen, 1);
6968 clobber_val = (int) val8;
6970 avr_asm_len ("eor %0,%2", op, plen, 1);
6973 continue; /* XOR */
6975 default:
6976 /* Unknown rtx_code */
6977 gcc_unreachable();
6979 } /* for all sub-bytes */
6981 return "";
6985 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6986 PLEN != NULL: Set *PLEN to the length of that sequence.
6987 Return "". */
6989 const char*
6990 avr_out_addto_sp (rtx *op, int *plen)
6992 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
6993 int addend = INTVAL (op[0]);
6995 if (plen)
6996 *plen = 0;
6998 if (addend < 0)
7000 if (flag_verbose_asm || flag_print_asm_name)
7001 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
7003 while (addend <= -pc_len)
7005 addend += pc_len;
7006 avr_asm_len ("rcall .", op, plen, 1);
7009 while (addend++ < 0)
7010 avr_asm_len ("push __zero_reg__", op, plen, 1);
7012 else if (addend > 0)
7014 if (flag_verbose_asm || flag_print_asm_name)
7015 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
7017 while (addend-- > 0)
7018 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
7021 return "";
7025 /* Outputs instructions needed for fixed point type conversion.
7026 This includes converting between any fixed point type, as well
7027 as converting to any integer type. Conversion between integer
7028 types is not supported.
7030 Converting signed fractional types requires a bit shift if converting
7031 to or from any unsigned fractional type because the decimal place is
7032 shifted by 1 bit. When the destination is a signed fractional, the sign
7033 is stored in either the carry or T bit. */
7035 const char*
7036 avr_out_fract (rtx insn, rtx operands[], bool intsigned, int *plen)
7038 size_t i;
7039 rtx xop[6];
7040 RTX_CODE shift = UNKNOWN;
7041 bool sign_in_carry = false;
7042 bool msb_in_carry = false;
7043 bool lsb_in_tmp_reg = false;
7044 bool lsb_in_carry = false;
7045 bool frac_rounded = false;
7046 const char *code_ashift = "lsl %0";
7049 #define MAY_CLOBBER(RR) \
7050 /* Shorthand used below. */ \
7051 ((sign_bytes \
7052 && IN_RANGE (RR, dest.regno_msb - sign_bytes + 1, dest.regno_msb)) \
7053 || (offset && IN_RANGE (RR, dest.regno, dest.regno_msb)) \
7054 || (reg_unused_after (insn, all_regs_rtx[RR]) \
7055 && !IN_RANGE (RR, dest.regno, dest.regno_msb)))
7057 struct
7059 /* bytes : Length of operand in bytes.
7060 ibyte : Length of integral part in bytes.
7061 fbyte, fbit : Length of fractional part in bytes, bits. */
7063 bool sbit;
7064 unsigned fbit, bytes, ibyte, fbyte;
7065 unsigned regno, regno_msb;
7066 } dest, src, *val[2] = { &dest, &src };
7068 if (plen)
7069 *plen = 0;
7071 /* Step 0: Determine information on source and destination operand we
7072 ====== will need in the remainder. */
7074 for (i = 0; i < sizeof (val) / sizeof (*val); i++)
7076 enum machine_mode mode;
7078 xop[i] = operands[i];
7080 mode = GET_MODE (xop[i]);
7082 val[i]->bytes = GET_MODE_SIZE (mode);
7083 val[i]->regno = REGNO (xop[i]);
7084 val[i]->regno_msb = REGNO (xop[i]) + val[i]->bytes - 1;
7086 if (SCALAR_INT_MODE_P (mode))
7088 val[i]->sbit = intsigned;
7089 val[i]->fbit = 0;
7091 else if (ALL_SCALAR_FIXED_POINT_MODE_P (mode))
7093 val[i]->sbit = SIGNED_SCALAR_FIXED_POINT_MODE_P (mode);
7094 val[i]->fbit = GET_MODE_FBIT (mode);
7096 else
7097 fatal_insn ("unsupported fixed-point conversion", insn);
7099 val[i]->fbyte = (1 + val[i]->fbit) / BITS_PER_UNIT;
7100 val[i]->ibyte = val[i]->bytes - val[i]->fbyte;
7103 // Byte offset of the decimal point taking into account different place
7104 // of the decimal point in input and output and different register numbers
7105 // of input and output.
7106 int offset = dest.regno - src.regno + dest.fbyte - src.fbyte;
7108 // Number of destination bytes that will come from sign / zero extension.
7109 int sign_bytes = (dest.ibyte - src.ibyte) * (dest.ibyte > src.ibyte);
7111 // Number of bytes at the low end to be filled with zeros.
7112 int zero_bytes = (dest.fbyte - src.fbyte) * (dest.fbyte > src.fbyte);
7114 // Do we have a 16-Bit register that is cleared?
7115 rtx clrw = NULL_RTX;
7117 bool sign_extend = src.sbit && sign_bytes;
7119 if (0 == dest.fbit % 8 && 7 == src.fbit % 8)
7120 shift = ASHIFT;
7121 else if (7 == dest.fbit % 8 && 0 == src.fbit % 8)
7122 shift = ASHIFTRT;
7123 else if (dest.fbit % 8 == src.fbit % 8)
7124 shift = UNKNOWN;
7125 else
7126 gcc_unreachable();
7128 /* If we need to round the fraction part, we might need to save/round it
7129 before clobbering any of it in Step 1. Also, we might to want to do
7130 the rounding now to make use of LD_REGS. */
7131 if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
7132 && SCALAR_ACCUM_MODE_P (GET_MODE (xop[1]))
7133 && !TARGET_FRACT_CONV_TRUNC)
7135 bool overlap
7136 = (src.regno <=
7137 (offset ? dest.regno_msb - sign_bytes : dest.regno + zero_bytes - 1)
7138 && dest.regno - offset -1 >= dest.regno);
7139 unsigned s0 = dest.regno - offset -1;
7140 bool use_src = true;
7141 unsigned sn;
7142 unsigned copied_msb = src.regno_msb;
7143 bool have_carry = false;
7145 if (src.ibyte > dest.ibyte)
7146 copied_msb -= src.ibyte - dest.ibyte;
7148 for (sn = s0; sn <= copied_msb; sn++)
7149 if (!IN_RANGE (sn, dest.regno, dest.regno_msb)
7150 && !reg_unused_after (insn, all_regs_rtx[sn]))
7151 use_src = false;
7152 if (use_src && TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], s0))
7154 avr_asm_len ("tst %0" CR_TAB "brpl 0f",
7155 &all_regs_rtx[src.regno_msb], plen, 2);
7156 sn = src.regno;
7157 if (sn < s0)
7159 if (TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], sn))
7160 avr_asm_len ("cpi %0,1", &all_regs_rtx[sn], plen, 1);
7161 else
7162 avr_asm_len ("sec" CR_TAB "cpc %0,__zero_reg__",
7163 &all_regs_rtx[sn], plen, 2);
7164 have_carry = true;
7166 while (++sn < s0)
7167 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
7168 avr_asm_len (have_carry ? "sbci %0,128" : "subi %0,129",
7169 &all_regs_rtx[s0], plen, 1);
7170 for (sn = src.regno + src.fbyte; sn <= copied_msb; sn++)
7171 avr_asm_len ("sbci %0,255", &all_regs_rtx[sn], plen, 1);
7172 avr_asm_len ("\n0:", NULL, plen, 0);
7173 frac_rounded = true;
7175 else if (use_src && overlap)
7177 avr_asm_len ("clr __tmp_reg__" CR_TAB
7178 "sbrc %1,0" CR_TAB "dec __tmp_reg__", xop, plen, 1);
7179 sn = src.regno;
7180 if (sn < s0)
7182 avr_asm_len ("add %0,__tmp_reg__", &all_regs_rtx[sn], plen, 1);
7183 have_carry = true;
7185 while (++sn < s0)
7186 avr_asm_len ("adc %0,__tmp_reg__", &all_regs_rtx[sn], plen, 1);
7187 if (have_carry)
7188 avr_asm_len ("clt" CR_TAB "bld __tmp_reg__,7" CR_TAB
7189 "adc %0,__tmp_reg__",
7190 &all_regs_rtx[s0], plen, 1);
7191 else
7192 avr_asm_len ("lsr __tmp_reg" CR_TAB "add %0,__tmp_reg__",
7193 &all_regs_rtx[s0], plen, 2);
7194 for (sn = src.regno + src.fbyte; sn <= copied_msb; sn++)
7195 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
7196 frac_rounded = true;
7198 else if (overlap)
7200 bool use_src
7201 = (TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], s0)
7202 && (IN_RANGE (s0, dest.regno, dest.regno_msb)
7203 || reg_unused_after (insn, all_regs_rtx[s0])));
7204 xop[2] = all_regs_rtx[s0];
7205 unsigned sn = src.regno;
7206 if (!use_src || sn == s0)
7207 avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
7208 /* We need to consider to-be-discarded bits
7209 if the value is negative. */
7210 if (sn < s0)
7212 avr_asm_len ("tst %0" CR_TAB "brpl 0f",
7213 &all_regs_rtx[src.regno_msb], plen, 2);
7214 /* Test to-be-discarded bytes for any nozero bits.
7215 ??? Could use OR or SBIW to test two registers at once. */
7216 if (sn < s0)
7217 avr_asm_len ("cp %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
7218 while (++sn < s0)
7219 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
7220 /* Set bit 0 in __tmp_reg__ if any of the lower bits was set. */
7221 if (use_src)
7222 avr_asm_len ("breq 0f" CR_TAB
7223 "ori %2,1" "\n0:\t" "mov __tmp_reg__,%2",
7224 xop, plen, 3);
7225 else
7226 avr_asm_len ("breq 0f" CR_TAB
7227 "set" CR_TAB "bld __tmp_reg__,0\n0:",
7228 xop, plen, 3);
7230 lsb_in_tmp_reg = true;
7234 /* Step 1: Clear bytes at the low end and copy payload bits from source
7235 ====== to destination. */
7237 int step = offset < 0 ? 1 : -1;
7238 unsigned d0 = offset < 0 ? dest.regno : dest.regno_msb;
7240 // We cleared at least that number of registers.
7241 int clr_n = 0;
7243 for (; d0 >= dest.regno && d0 <= dest.regno_msb; d0 += step)
7245 // Next regno of destination is needed for MOVW
7246 unsigned d1 = d0 + step;
7248 // Current and next regno of source
7249 signed s0 = d0 - offset;
7250 signed s1 = s0 + step;
7252 // Must current resp. next regno be CLRed? This applies to the low
7253 // bytes of the destination that have no associated source bytes.
7254 bool clr0 = s0 < (signed) src.regno;
7255 bool clr1 = s1 < (signed) src.regno && d1 >= dest.regno;
7257 // First gather what code to emit (if any) and additional step to
7258 // apply if a MOVW is in use. xop[2] is destination rtx and xop[3]
7259 // is the source rtx for the current loop iteration.
7260 const char *code = NULL;
7261 int stepw = 0;
7263 if (clr0)
7265 if (AVR_HAVE_MOVW && clr1 && clrw)
7267 xop[2] = all_regs_rtx[d0 & ~1];
7268 xop[3] = clrw;
7269 code = "movw %2,%3";
7270 stepw = step;
7272 else
7274 xop[2] = all_regs_rtx[d0];
7275 code = "clr %2";
7277 if (++clr_n >= 2
7278 && !clrw
7279 && d0 % 2 == (step > 0))
7281 clrw = all_regs_rtx[d0 & ~1];
7285 else if (offset && s0 <= (signed) src.regno_msb)
7287 int movw = AVR_HAVE_MOVW && offset % 2 == 0
7288 && d0 % 2 == (offset > 0)
7289 && d1 <= dest.regno_msb && d1 >= dest.regno
7290 && s1 <= (signed) src.regno_msb && s1 >= (signed) src.regno;
7292 xop[2] = all_regs_rtx[d0 & ~movw];
7293 xop[3] = all_regs_rtx[s0 & ~movw];
7294 code = movw ? "movw %2,%3" : "mov %2,%3";
7295 stepw = step * movw;
7298 if (code)
7300 if (sign_extend && shift != ASHIFT && !sign_in_carry
7301 && (d0 == src.regno_msb || d0 + stepw == src.regno_msb))
7303 /* We are going to override the sign bit. If we sign-extend,
7304 store the sign in the Carry flag. This is not needed if
7305 the destination will be ASHIFT is the remainder because
7306 the ASHIFT will set Carry without extra instruction. */
7308 avr_asm_len ("lsl %0", &all_regs_rtx[src.regno_msb], plen, 1);
7309 sign_in_carry = true;
7312 unsigned src_msb = dest.regno_msb - sign_bytes - offset + 1;
7314 if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
7315 && src.ibyte > dest.ibyte
7316 && (d0 == src_msb || d0 + stepw == src_msb))
7318 /* We are going to override the MSB. If we shift right,
7319 store the MSB in the Carry flag. This is only needed if
7320 we don't sign-extend becaue with sign-extension the MSB
7321 (the sign) will be produced by the sign extension. */
7323 avr_asm_len ("lsr %0", &all_regs_rtx[src_msb], plen, 1);
7324 msb_in_carry = true;
7327 unsigned src_lsb = dest.regno - offset -1;
7329 if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry
7330 && !lsb_in_tmp_reg
7331 && (d0 == src_lsb || d0 + stepw == src_lsb))
7333 /* We are going to override the new LSB; store it into carry. */
7335 avr_asm_len ("lsl %0", &all_regs_rtx[src_lsb], plen, 1);
7336 code_ashift = "rol %0";
7337 lsb_in_carry = true;
7340 avr_asm_len (code, xop, plen, 1);
7341 d0 += stepw;
7345 /* Step 2: Shift destination left by 1 bit position. This might be needed
7346 ====== for signed input and unsigned output. */
7348 if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry)
7350 unsigned s0 = dest.regno - offset -1;
7352 /* n1169 4.1.4 says:
7353 "Conversions from a fixed-point to an integer type round toward zero."
7354 Hence, converting a fract type to integer only gives a non-zero result
7355 for -1. */
7356 if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
7357 && SCALAR_FRACT_MODE_P (GET_MODE (xop[1]))
7358 && !TARGET_FRACT_CONV_TRUNC)
7360 gcc_assert (s0 == src.regno_msb);
7361 /* Check if the input is -1. We do that by checking if negating
7362 the input causes an integer overflow. */
7363 unsigned sn = src.regno;
7364 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx[sn++], plen, 1);
7365 while (sn <= s0)
7366 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx[sn++], plen, 1);
7368 /* Overflow goes with set carry. Clear carry otherwise. */
7369 avr_asm_len ("brvs 0f" CR_TAB "clc\n0:", NULL, plen, 2);
7371 /* Likewise, when converting from accumulator types to integer, we
7372 need to round up negative values. */
7373 else if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
7374 && SCALAR_ACCUM_MODE_P (GET_MODE (xop[1]))
7375 && !TARGET_FRACT_CONV_TRUNC
7376 && !frac_rounded)
7378 bool have_carry = false;
7380 xop[2] = all_regs_rtx[s0];
7381 if (!lsb_in_tmp_reg && !MAY_CLOBBER (s0))
7382 avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
7383 avr_asm_len ("tst %0" CR_TAB "brpl 0f",
7384 &all_regs_rtx[src.regno_msb], plen, 2);
7385 if (!lsb_in_tmp_reg)
7387 unsigned sn = src.regno;
7388 if (sn < s0)
7390 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx[sn],
7391 plen, 1);
7392 have_carry = true;
7394 while (++sn < s0)
7395 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx[sn], plen, 1);
7396 lsb_in_tmp_reg = !MAY_CLOBBER (s0);
7398 /* Add in C and the rounding value 127. */
7399 /* If the destination msb is a sign byte, and in LD_REGS,
7400 grab it as a temporary. */
7401 if (sign_bytes
7402 && TEST_HARD_REG_BIT (reg_class_contents[LD_REGS],
7403 dest.regno_msb))
7405 xop[3] = all_regs_rtx[dest.regno_msb];
7406 avr_asm_len ("ldi %3,127", xop, plen, 1);
7407 avr_asm_len ((have_carry && lsb_in_tmp_reg ? "adc __tmp_reg__,%3"
7408 : have_carry ? "adc %2,%3"
7409 : lsb_in_tmp_reg ? "add __tmp_reg__,%3"
7410 : "add %2,%3"),
7411 xop, plen, 1);
7413 else
7415 /* Fall back to use __zero_reg__ as a temporary. */
7416 avr_asm_len ("dec __zero_reg__", NULL, plen, 1);
7417 if (have_carry)
7418 avr_asm_len ("clt" CR_TAB "bld __zero_reg__,7", NULL, plen, 2);
7419 else
7420 avr_asm_len ("lsr __zero_reg__", NULL, plen, 1);
7421 avr_asm_len ((have_carry && lsb_in_tmp_reg
7422 ? "adc __tmp_reg__,__zero_reg__"
7423 : have_carry ? "adc %2,__zero_reg__"
7424 : lsb_in_tmp_reg ? "add __tmp_reg__,__zero_reg__"
7425 : "add %2,__zero_reg__"),
7426 xop, plen, 1);
7427 avr_asm_len ("eor __zero_reg__,__zero_reg__", NULL, plen, 1);
7429 for (d0 = dest.regno + zero_bytes;
7430 d0 <= dest.regno_msb - sign_bytes; d0++)
7431 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx[d0], plen, 1);
7432 avr_asm_len (lsb_in_tmp_reg
7433 ? "\n0:\t" "lsl __tmp_reg__" : "\n0:\t" "lsl %2",
7434 xop, plen, 1);
7436 else if (MAY_CLOBBER (s0))
7437 avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
7438 else
7439 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7440 "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
7442 code_ashift = "rol %0";
7443 lsb_in_carry = true;
7446 if (shift == ASHIFT)
7448 for (d0 = dest.regno + zero_bytes;
7449 d0 <= dest.regno_msb - sign_bytes; d0++)
7451 avr_asm_len (code_ashift, &all_regs_rtx[d0], plen, 1);
7452 code_ashift = "rol %0";
7455 lsb_in_carry = false;
7456 sign_in_carry = true;
7459 /* Step 4a: Store MSB in carry if we don't already have it or will produce
7460 ======= it in sign-extension below. */
7462 if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
7463 && src.ibyte > dest.ibyte)
7465 unsigned s0 = dest.regno_msb - sign_bytes - offset + 1;
7467 if (MAY_CLOBBER (s0))
7468 avr_asm_len ("lsr %0", &all_regs_rtx[s0], plen, 1);
7469 else
7470 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7471 "lsr __tmp_reg__", &all_regs_rtx[s0], plen, 2);
7473 msb_in_carry = true;
7476 /* Step 3: Sign-extend or zero-extend the destination as needed.
7477 ====== */
7479 if (sign_extend && !sign_in_carry)
7481 unsigned s0 = src.regno_msb;
7483 if (MAY_CLOBBER (s0))
7484 avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
7485 else
7486 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7487 "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
7489 sign_in_carry = true;
7492 gcc_assert (sign_in_carry + msb_in_carry + lsb_in_carry <= 1);
7494 unsigned copies = 0;
7495 rtx movw = sign_extend ? NULL_RTX : clrw;
7497 for (d0 = dest.regno_msb - sign_bytes + 1; d0 <= dest.regno_msb; d0++)
7499 if (AVR_HAVE_MOVW && movw
7500 && d0 % 2 == 0 && d0 + 1 <= dest.regno_msb)
7502 xop[2] = all_regs_rtx[d0];
7503 xop[3] = movw;
7504 avr_asm_len ("movw %2,%3", xop, plen, 1);
7505 d0++;
7507 else
7509 avr_asm_len (sign_extend ? "sbc %0,%0" : "clr %0",
7510 &all_regs_rtx[d0], plen, 1);
7512 if (++copies >= 2 && !movw && d0 % 2 == 1)
7513 movw = all_regs_rtx[d0-1];
7515 } /* for */
7518 /* Step 4: Right shift the destination. This might be needed for
7519 ====== conversions from unsigned to signed. */
7521 if (shift == ASHIFTRT)
7523 const char *code_ashiftrt = "lsr %0";
7525 if (sign_extend || msb_in_carry)
7526 code_ashiftrt = "ror %0";
7528 if (src.sbit && src.ibyte == dest.ibyte)
7529 code_ashiftrt = "asr %0";
7531 for (d0 = dest.regno_msb - sign_bytes;
7532 d0 >= dest.regno + zero_bytes - 1 && d0 >= dest.regno; d0--)
7534 avr_asm_len (code_ashiftrt, &all_regs_rtx[d0], plen, 1);
7535 code_ashiftrt = "ror %0";
7539 #undef MAY_CLOBBER
7541 return "";
7545 /* Output fixed-point rounding. XOP[0] = XOP[1] is the operand to round.
7546 XOP[2] is the rounding point, a CONST_INT. The function prints the
7547 instruction sequence if PLEN = NULL and computes the length in words
7548 of the sequence if PLEN != NULL. Most of this function deals with
7549 preparing operands for calls to `avr_out_plus' and `avr_out_bitop'. */
7551 const char*
7552 avr_out_round (rtx insn ATTRIBUTE_UNUSED, rtx *xop, int *plen)
7554 enum machine_mode mode = GET_MODE (xop[0]);
7555 enum machine_mode imode = int_mode_for_mode (mode);
7556 // The smallest fractional bit not cleared by the rounding is 2^(-RP).
7557 int fbit = (int) GET_MODE_FBIT (mode);
7558 double_int i_add = double_int_zero.set_bit (fbit-1 - INTVAL (xop[2]));
7559 // Lengths of PLUS and AND parts.
7560 int len_add = 0, *plen_add = plen ? &len_add : NULL;
7561 int len_and = 0, *plen_and = plen ? &len_and : NULL;
7563 // Add-Saturate 1/2 * 2^(-RP). Don't print the label "0:" when printing
7564 // the saturated addition so that we can emit the "rjmp 1f" before the
7565 // "0:" below.
7567 rtx xadd = const_fixed_from_double_int (i_add, mode);
7568 rtx xpattern, xsrc, op[4];
7570 xsrc = SIGNED_FIXED_POINT_MODE_P (mode)
7571 ? gen_rtx_SS_PLUS (mode, xop[1], xadd)
7572 : gen_rtx_US_PLUS (mode, xop[1], xadd);
7573 xpattern = gen_rtx_SET (VOIDmode, xop[0], xsrc);
7575 op[0] = xop[0];
7576 op[1] = xop[1];
7577 op[2] = xadd;
7578 avr_out_plus (xpattern, op, plen_add, NULL, false /* Don't print "0:" */);
7580 avr_asm_len ("rjmp 1f" CR_TAB
7581 "0:", NULL, plen_add, 1);
7583 // Keep all bits from RP and higher: ... 2^(-RP)
7584 // Clear all bits from RP+1 and lower: 2^(-RP-1) ...
7585 // Rounding point ^^^^^^^
7586 // Added above ^^^^^^^^^
7587 rtx xreg = simplify_gen_subreg (imode, xop[0], mode, 0);
7588 rtx xmask = immed_double_int_const (-i_add - i_add, imode);
7590 xpattern = gen_rtx_SET (VOIDmode, xreg, gen_rtx_AND (imode, xreg, xmask));
7592 op[0] = xreg;
7593 op[1] = xreg;
7594 op[2] = xmask;
7595 op[3] = gen_rtx_SCRATCH (QImode);
7596 avr_out_bitop (xpattern, op, plen_and);
7597 avr_asm_len ("1:", NULL, plen, 0);
7599 if (plen)
7600 *plen = len_add + len_and;
7602 return "";
7606 /* Create RTL split patterns for byte sized rotate expressions. This
7607 produces a series of move instructions and considers overlap situations.
7608 Overlapping non-HImode operands need a scratch register. */
7610 bool
7611 avr_rotate_bytes (rtx operands[])
7613 int i, j;
7614 enum machine_mode mode = GET_MODE (operands[0]);
7615 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
7616 bool same_reg = rtx_equal_p (operands[0], operands[1]);
7617 int num = INTVAL (operands[2]);
7618 rtx scratch = operands[3];
7619 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
7620 Word move if no scratch is needed, otherwise use size of scratch. */
7621 enum machine_mode move_mode = QImode;
7622 int move_size, offset, size;
7624 if (num & 0xf)
7625 move_mode = QImode;
7626 else if ((mode == SImode && !same_reg) || !overlapped)
7627 move_mode = HImode;
7628 else
7629 move_mode = GET_MODE (scratch);
7631 /* Force DI rotate to use QI moves since other DI moves are currently split
7632 into QI moves so forward propagation works better. */
7633 if (mode == DImode)
7634 move_mode = QImode;
7635 /* Make scratch smaller if needed. */
7636 if (SCRATCH != GET_CODE (scratch)
7637 && HImode == GET_MODE (scratch)
7638 && QImode == move_mode)
7639 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
7641 move_size = GET_MODE_SIZE (move_mode);
7642 /* Number of bytes/words to rotate. */
7643 offset = (num >> 3) / move_size;
7644 /* Number of moves needed. */
7645 size = GET_MODE_SIZE (mode) / move_size;
7646 /* Himode byte swap is special case to avoid a scratch register. */
7647 if (mode == HImode && same_reg)
7649 /* HImode byte swap, using xor. This is as quick as using scratch. */
7650 rtx src, dst;
7651 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
7652 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
7653 if (!rtx_equal_p (dst, src))
7655 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
7656 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
7657 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
7660 else
7662 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
7663 /* Create linked list of moves to determine move order. */
7664 struct {
7665 rtx src, dst;
7666 int links;
7667 } move[MAX_SIZE + 8];
7668 int blocked, moves;
7670 gcc_assert (size <= MAX_SIZE);
7671 /* Generate list of subreg moves. */
7672 for (i = 0; i < size; i++)
7674 int from = i;
7675 int to = (from + offset) % size;
7676 move[i].src = simplify_gen_subreg (move_mode, operands[1],
7677 mode, from * move_size);
7678 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
7679 mode, to * move_size);
7680 move[i].links = -1;
7682 /* Mark dependence where a dst of one move is the src of another move.
7683 The first move is a conflict as it must wait until second is
7684 performed. We ignore moves to self - we catch this later. */
7685 if (overlapped)
7686 for (i = 0; i < size; i++)
7687 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
7688 for (j = 0; j < size; j++)
7689 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
7691 /* The dst of move i is the src of move j. */
7692 move[i].links = j;
7693 break;
7696 blocked = -1;
7697 moves = 0;
7698 /* Go through move list and perform non-conflicting moves. As each
7699 non-overlapping move is made, it may remove other conflicts
7700 so the process is repeated until no conflicts remain. */
7703 blocked = -1;
7704 moves = 0;
7705 /* Emit move where dst is not also a src or we have used that
7706 src already. */
7707 for (i = 0; i < size; i++)
7708 if (move[i].src != NULL_RTX)
7710 if (move[i].links == -1
7711 || move[move[i].links].src == NULL_RTX)
7713 moves++;
7714 /* Ignore NOP moves to self. */
7715 if (!rtx_equal_p (move[i].dst, move[i].src))
7716 emit_move_insn (move[i].dst, move[i].src);
7718 /* Remove conflict from list. */
7719 move[i].src = NULL_RTX;
7721 else
7722 blocked = i;
7725 /* Check for deadlock. This is when no moves occurred and we have
7726 at least one blocked move. */
7727 if (moves == 0 && blocked != -1)
7729 /* Need to use scratch register to break deadlock.
7730 Add move to put dst of blocked move into scratch.
7731 When this move occurs, it will break chain deadlock.
7732 The scratch register is substituted for real move. */
7734 gcc_assert (SCRATCH != GET_CODE (scratch));
7736 move[size].src = move[blocked].dst;
7737 move[size].dst = scratch;
7738 /* Scratch move is never blocked. */
7739 move[size].links = -1;
7740 /* Make sure we have valid link. */
7741 gcc_assert (move[blocked].links != -1);
7742 /* Replace src of blocking move with scratch reg. */
7743 move[move[blocked].links].src = scratch;
7744 /* Make dependent on scratch move occurring. */
7745 move[blocked].links = size;
7746 size=size+1;
7749 while (blocked != -1);
7751 return true;
7755 /* Worker function for `ADJUST_INSN_LENGTH'. */
7756 /* Modifies the length assigned to instruction INSN
7757 LEN is the initially computed length of the insn. */
7760 avr_adjust_insn_length (rtx insn, int len)
7762 rtx *op = recog_data.operand;
7763 enum attr_adjust_len adjust_len;
7765 /* Some complex insns don't need length adjustment and therefore
7766 the length need not/must not be adjusted for these insns.
7767 It is easier to state this in an insn attribute "adjust_len" than
7768 to clutter up code here... */
7770 if (-1 == recog_memoized (insn))
7772 return len;
7775 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
7777 adjust_len = get_attr_adjust_len (insn);
7779 if (adjust_len == ADJUST_LEN_NO)
7781 /* Nothing to adjust: The length from attribute "length" is fine.
7782 This is the default. */
7784 return len;
7787 /* Extract insn's operands. */
7789 extract_constrain_insn_cached (insn);
7791 /* Dispatch to right function. */
7793 switch (adjust_len)
7795 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
7796 case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
7797 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
7799 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
7801 case ADJUST_LEN_PLUS: avr_out_plus (insn, op, &len); break;
7802 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
7804 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
7805 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
7806 case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
7807 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
7808 case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
7809 case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
7810 case ADJUST_LEN_LPM: avr_out_lpm (insn, op, &len); break;
7812 case ADJUST_LEN_SFRACT: avr_out_fract (insn, op, true, &len); break;
7813 case ADJUST_LEN_UFRACT: avr_out_fract (insn, op, false, &len); break;
7814 case ADJUST_LEN_ROUND: avr_out_round (insn, op, &len); break;
7816 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
7817 case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
7818 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
7819 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
7820 case ADJUST_LEN_COMPARE64: avr_out_compare64 (insn, op, &len); break;
7822 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
7823 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
7824 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
7826 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
7827 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
7828 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
7830 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
7831 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
7832 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
7834 case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
7835 case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
7836 case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
7838 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
7840 case ADJUST_LEN_INSERT_BITS: avr_out_insert_bits (op, &len); break;
7842 default:
7843 gcc_unreachable();
7846 return len;
7849 /* Return nonzero if register REG dead after INSN. */
7852 reg_unused_after (rtx insn, rtx reg)
7854 return (dead_or_set_p (insn, reg)
7855 || (REG_P(reg) && _reg_unused_after (insn, reg)));
7858 /* Return nonzero if REG is not used after INSN.
7859 We assume REG is a reload reg, and therefore does
7860 not live past labels. It may live past calls or jumps though. */
7863 _reg_unused_after (rtx insn, rtx reg)
7865 enum rtx_code code;
7866 rtx set;
7868 /* If the reg is set by this instruction, then it is safe for our
7869 case. Disregard the case where this is a store to memory, since
7870 we are checking a register used in the store address. */
7871 set = single_set (insn);
7872 if (set && GET_CODE (SET_DEST (set)) != MEM
7873 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
7874 return 1;
7876 while ((insn = NEXT_INSN (insn)))
7878 rtx set;
7879 code = GET_CODE (insn);
7881 #if 0
7882 /* If this is a label that existed before reload, then the register
7883 if dead here. However, if this is a label added by reorg, then
7884 the register may still be live here. We can't tell the difference,
7885 so we just ignore labels completely. */
7886 if (code == CODE_LABEL)
7887 return 1;
7888 /* else */
7889 #endif
7891 if (!INSN_P (insn))
7892 continue;
7894 if (code == JUMP_INSN)
7895 return 0;
7897 /* If this is a sequence, we must handle them all at once.
7898 We could have for instance a call that sets the target register,
7899 and an insn in a delay slot that uses the register. In this case,
7900 we must return 0. */
7901 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
7903 int i;
7904 int retval = 0;
7906 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
7908 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
7909 rtx set = single_set (this_insn);
7911 if (CALL_P (this_insn))
7912 code = CALL_INSN;
7913 else if (JUMP_P (this_insn))
7915 if (INSN_ANNULLED_BRANCH_P (this_insn))
7916 return 0;
7917 code = JUMP_INSN;
7920 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
7921 return 0;
7922 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
7924 if (GET_CODE (SET_DEST (set)) != MEM)
7925 retval = 1;
7926 else
7927 return 0;
7929 if (set == 0
7930 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
7931 return 0;
7933 if (retval == 1)
7934 return 1;
7935 else if (code == JUMP_INSN)
7936 return 0;
7939 if (code == CALL_INSN)
7941 rtx tem;
7942 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
7943 if (GET_CODE (XEXP (tem, 0)) == USE
7944 && REG_P (XEXP (XEXP (tem, 0), 0))
7945 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
7946 return 0;
7947 if (call_used_regs[REGNO (reg)])
7948 return 1;
7951 set = single_set (insn);
7953 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
7954 return 0;
7955 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
7956 return GET_CODE (SET_DEST (set)) != MEM;
7957 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
7958 return 0;
7960 return 1;
7964 /* Implement `TARGET_ASM_INTEGER'. */
7965 /* Target hook for assembling integer objects. The AVR version needs
7966 special handling for references to certain labels. */
7968 static bool
7969 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
7971 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
7972 && text_segment_operand (x, VOIDmode))
7974 fputs ("\t.word\tgs(", asm_out_file);
7975 output_addr_const (asm_out_file, x);
7976 fputs (")\n", asm_out_file);
7978 return true;
7980 else if (GET_MODE (x) == PSImode)
7982 /* This needs binutils 2.23+, see PR binutils/13503 */
7984 fputs ("\t.byte\tlo8(", asm_out_file);
7985 output_addr_const (asm_out_file, x);
7986 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
7988 fputs ("\t.byte\thi8(", asm_out_file);
7989 output_addr_const (asm_out_file, x);
7990 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
7992 fputs ("\t.byte\thh8(", asm_out_file);
7993 output_addr_const (asm_out_file, x);
7994 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
7996 return true;
7998 else if (CONST_FIXED_P (x))
8000 unsigned n;
8002 /* varasm fails to handle big fixed modes that don't fit in hwi. */
8004 for (n = 0; n < size; n++)
8006 rtx xn = simplify_gen_subreg (QImode, x, GET_MODE (x), n);
8007 default_assemble_integer (xn, 1, aligned_p);
8010 return true;
8013 return default_assemble_integer (x, size, aligned_p);
8017 /* Implement `TARGET_CLASS_LIKELY_SPILLED_P'. */
8018 /* Return value is nonzero if pseudos that have been
8019 assigned to registers of class CLASS would likely be spilled
8020 because registers of CLASS are needed for spill registers. */
8022 static bool
8023 avr_class_likely_spilled_p (reg_class_t c)
8025 return (c != ALL_REGS && c != ADDW_REGS);
8029 /* Valid attributes:
8030 progmem - Put data to program memory.
8031 signal - Make a function to be hardware interrupt.
8032 After function prologue interrupts remain disabled.
8033 interrupt - Make a function to be hardware interrupt. Before function
8034 prologue interrupts are enabled by means of SEI.
8035 naked - Don't generate function prologue/epilogue and RET
8036 instruction. */
8038 /* Handle a "progmem" attribute; arguments as in
8039 struct attribute_spec.handler. */
8041 static tree
8042 avr_handle_progmem_attribute (tree *node, tree name,
8043 tree args ATTRIBUTE_UNUSED,
8044 int flags ATTRIBUTE_UNUSED,
8045 bool *no_add_attrs)
8047 if (DECL_P (*node))
8049 if (TREE_CODE (*node) == TYPE_DECL)
8051 /* This is really a decl attribute, not a type attribute,
8052 but try to handle it for GCC 3.0 backwards compatibility. */
8054 tree type = TREE_TYPE (*node);
8055 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
8056 tree newtype = build_type_attribute_variant (type, attr);
8058 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
8059 TREE_TYPE (*node) = newtype;
8060 *no_add_attrs = true;
8062 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
8064 *no_add_attrs = false;
8066 else
8068 warning (OPT_Wattributes, "%qE attribute ignored",
8069 name);
8070 *no_add_attrs = true;
8074 return NULL_TREE;
8077 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
8078 struct attribute_spec.handler. */
8080 static tree
8081 avr_handle_fndecl_attribute (tree *node, tree name,
8082 tree args ATTRIBUTE_UNUSED,
8083 int flags ATTRIBUTE_UNUSED,
8084 bool *no_add_attrs)
8086 if (TREE_CODE (*node) != FUNCTION_DECL)
8088 warning (OPT_Wattributes, "%qE attribute only applies to functions",
8089 name);
8090 *no_add_attrs = true;
8093 return NULL_TREE;
8096 static tree
8097 avr_handle_fntype_attribute (tree *node, tree name,
8098 tree args ATTRIBUTE_UNUSED,
8099 int flags ATTRIBUTE_UNUSED,
8100 bool *no_add_attrs)
8102 if (TREE_CODE (*node) != FUNCTION_TYPE)
8104 warning (OPT_Wattributes, "%qE attribute only applies to functions",
8105 name);
8106 *no_add_attrs = true;
8109 return NULL_TREE;
8113 /* AVR attributes. */
8114 static const struct attribute_spec
8115 avr_attribute_table[] =
8117 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
8118 affects_type_identity } */
8119 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
8120 false },
8121 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
8122 false },
8123 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
8124 false },
8125 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
8126 false },
8127 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
8128 false },
8129 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
8130 false },
8131 { NULL, 0, 0, false, false, false, NULL, false }
8135 /* Look if DECL shall be placed in program memory space by
8136 means of attribute `progmem' or some address-space qualifier.
8137 Return non-zero if DECL is data that must end up in Flash and
8138 zero if the data lives in RAM (.bss, .data, .rodata, ...).
8140 Return 2 if DECL is located in 24-bit flash address-space
8141 Return 1 if DECL is located in 16-bit flash address-space
8142 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
8143 Return 0 otherwise */
8146 avr_progmem_p (tree decl, tree attributes)
8148 tree a;
8150 if (TREE_CODE (decl) != VAR_DECL)
8151 return 0;
8153 if (avr_decl_memx_p (decl))
8154 return 2;
8156 if (avr_decl_flash_p (decl))
8157 return 1;
8159 if (NULL_TREE
8160 != lookup_attribute ("progmem", attributes))
8161 return -1;
8163 a = decl;
8166 a = TREE_TYPE(a);
8167 while (TREE_CODE (a) == ARRAY_TYPE);
8169 if (a == error_mark_node)
8170 return 0;
8172 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
8173 return -1;
8175 return 0;
8179 /* Scan type TYP for pointer references to address space ASn.
8180 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
8181 the AS are also declared to be CONST.
8182 Otherwise, return the respective address space, i.e. a value != 0. */
8184 static addr_space_t
8185 avr_nonconst_pointer_addrspace (tree typ)
8187 while (ARRAY_TYPE == TREE_CODE (typ))
8188 typ = TREE_TYPE (typ);
8190 if (POINTER_TYPE_P (typ))
8192 addr_space_t as;
8193 tree target = TREE_TYPE (typ);
8195 /* Pointer to function: Test the function's return type. */
8197 if (FUNCTION_TYPE == TREE_CODE (target))
8198 return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
8200 /* "Ordinary" pointers... */
8202 while (TREE_CODE (target) == ARRAY_TYPE)
8203 target = TREE_TYPE (target);
8205 /* Pointers to non-generic address space must be const.
8206 Refuse address spaces outside the device's flash. */
8208 as = TYPE_ADDR_SPACE (target);
8210 if (!ADDR_SPACE_GENERIC_P (as)
8211 && (!TYPE_READONLY (target)
8212 || avr_addrspace[as].segment >= avr_current_device->n_flash))
8214 return as;
8217 /* Scan pointer's target type. */
8219 return avr_nonconst_pointer_addrspace (target);
8222 return ADDR_SPACE_GENERIC;
8226 /* Sanity check NODE so that all pointers targeting non-generic address spaces
8227 go along with CONST qualifier. Writing to these address spaces should
8228 be detected and complained about as early as possible. */
8230 static bool
8231 avr_pgm_check_var_decl (tree node)
8233 const char *reason = NULL;
8235 addr_space_t as = ADDR_SPACE_GENERIC;
8237 gcc_assert (as == 0);
8239 if (avr_log.progmem)
8240 avr_edump ("%?: %t\n", node);
8242 switch (TREE_CODE (node))
8244 default:
8245 break;
8247 case VAR_DECL:
8248 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
8249 reason = "variable";
8250 break;
8252 case PARM_DECL:
8253 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
8254 reason = "function parameter";
8255 break;
8257 case FIELD_DECL:
8258 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
8259 reason = "structure field";
8260 break;
8262 case FUNCTION_DECL:
8263 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
8265 reason = "return type of function";
8266 break;
8268 case POINTER_TYPE:
8269 if (as = avr_nonconst_pointer_addrspace (node), as)
8270 reason = "pointer";
8271 break;
8274 if (reason)
8276 if (avr_addrspace[as].segment >= avr_current_device->n_flash)
8278 if (TYPE_P (node))
8279 error ("%qT uses address space %qs beyond flash of %qs",
8280 node, avr_addrspace[as].name, avr_current_device->name);
8281 else
8282 error ("%s %q+D uses address space %qs beyond flash of %qs",
8283 reason, node, avr_addrspace[as].name,
8284 avr_current_device->name);
8286 else
8288 if (TYPE_P (node))
8289 error ("pointer targeting address space %qs must be const in %qT",
8290 avr_addrspace[as].name, node);
8291 else
8292 error ("pointer targeting address space %qs must be const"
8293 " in %s %q+D",
8294 avr_addrspace[as].name, reason, node);
8298 return reason == NULL;
8302 /* Add the section attribute if the variable is in progmem. */
8304 static void
8305 avr_insert_attributes (tree node, tree *attributes)
8307 avr_pgm_check_var_decl (node);
8309 if (TREE_CODE (node) == VAR_DECL
8310 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
8311 && avr_progmem_p (node, *attributes))
8313 addr_space_t as;
8314 tree node0 = node;
8316 /* For C++, we have to peel arrays in order to get correct
8317 determination of readonlyness. */
8320 node0 = TREE_TYPE (node0);
8321 while (TREE_CODE (node0) == ARRAY_TYPE);
8323 if (error_mark_node == node0)
8324 return;
8326 as = TYPE_ADDR_SPACE (TREE_TYPE (node));
8328 if (avr_addrspace[as].segment >= avr_current_device->n_flash)
8330 error ("variable %q+D located in address space %qs"
8331 " beyond flash of %qs",
8332 node, avr_addrspace[as].name, avr_current_device->name);
8335 if (!TYPE_READONLY (node0)
8336 && !TREE_READONLY (node))
8338 const char *reason = "__attribute__((progmem))";
8340 if (!ADDR_SPACE_GENERIC_P (as))
8341 reason = avr_addrspace[as].name;
8343 if (avr_log.progmem)
8344 avr_edump ("\n%?: %t\n%t\n", node, node0);
8346 error ("variable %q+D must be const in order to be put into"
8347 " read-only section by means of %qs", node, reason);
8353 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
8354 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
8355 /* Track need of __do_clear_bss. */
8357 void
8358 avr_asm_output_aligned_decl_common (FILE * stream,
8359 const_tree decl ATTRIBUTE_UNUSED,
8360 const char *name,
8361 unsigned HOST_WIDE_INT size,
8362 unsigned int align, bool local_p)
8364 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
8365 There is no need to trigger __do_clear_bss code for them. */
8367 if (!STR_PREFIX_P (name, "__gnu_lto"))
8368 avr_need_clear_bss_p = true;
8370 if (local_p)
8371 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
8372 else
8373 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
8377 /* Unnamed section callback for data_section
8378 to track need of __do_copy_data. */
8380 static void
8381 avr_output_data_section_asm_op (const void *data)
8383 avr_need_copy_data_p = true;
8385 /* Dispatch to default. */
8386 output_section_asm_op (data);
8390 /* Unnamed section callback for bss_section
8391 to track need of __do_clear_bss. */
8393 static void
8394 avr_output_bss_section_asm_op (const void *data)
8396 avr_need_clear_bss_p = true;
8398 /* Dispatch to default. */
8399 output_section_asm_op (data);
8403 /* Unnamed section callback for progmem*.data sections. */
8405 static void
8406 avr_output_progmem_section_asm_op (const void *data)
8408 fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
8409 (const char*) data);
8413 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
8415 static void
8416 avr_asm_init_sections (void)
8418 /* Set up a section for jump tables. Alignment is handled by
8419 ASM_OUTPUT_BEFORE_CASE_LABEL. */
8421 if (AVR_HAVE_JMP_CALL)
8423 progmem_swtable_section
8424 = get_unnamed_section (0, output_section_asm_op,
8425 "\t.section\t.progmem.gcc_sw_table"
8426 ",\"a\",@progbits");
8428 else
8430 progmem_swtable_section
8431 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
8432 "\t.section\t.progmem.gcc_sw_table"
8433 ",\"ax\",@progbits");
8436 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
8437 resp. `avr_need_copy_data_p'. */
8439 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
8440 data_section->unnamed.callback = avr_output_data_section_asm_op;
8441 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
8445 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
8447 static section*
8448 avr_asm_function_rodata_section (tree decl)
8450 /* If a function is unused and optimized out by -ffunction-sections
8451 and --gc-sections, ensure that the same will happen for its jump
8452 tables by putting them into individual sections. */
8454 unsigned int flags;
8455 section * frodata;
8457 /* Get the frodata section from the default function in varasm.c
8458 but treat function-associated data-like jump tables as code
8459 rather than as user defined data. AVR has no constant pools. */
8461 int fdata = flag_data_sections;
8463 flag_data_sections = flag_function_sections;
8464 frodata = default_function_rodata_section (decl);
8465 flag_data_sections = fdata;
8466 flags = frodata->common.flags;
8469 if (frodata != readonly_data_section
8470 && flags & SECTION_NAMED)
8472 /* Adjust section flags and replace section name prefix. */
8474 unsigned int i;
8476 static const char* const prefix[] =
8478 ".rodata", ".progmem.gcc_sw_table",
8479 ".gnu.linkonce.r.", ".gnu.linkonce.t."
8482 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
8484 const char * old_prefix = prefix[i];
8485 const char * new_prefix = prefix[i+1];
8486 const char * name = frodata->named.name;
8488 if (STR_PREFIX_P (name, old_prefix))
8490 const char *rname = ACONCAT ((new_prefix,
8491 name + strlen (old_prefix), NULL));
8492 flags &= ~SECTION_CODE;
8493 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
8495 return get_section (rname, flags, frodata->named.decl);
8500 return progmem_swtable_section;
8504 /* Implement `TARGET_ASM_NAMED_SECTION'. */
8505 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
8507 static void
8508 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
8510 if (flags & AVR_SECTION_PROGMEM)
8512 addr_space_t as = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP;
8513 const char *old_prefix = ".rodata";
8514 const char *new_prefix = avr_addrspace[as].section_name;
8516 if (STR_PREFIX_P (name, old_prefix))
8518 const char *sname = ACONCAT ((new_prefix,
8519 name + strlen (old_prefix), NULL));
8520 default_elf_asm_named_section (sname, flags, decl);
8521 return;
8524 default_elf_asm_named_section (new_prefix, flags, decl);
8525 return;
8528 if (!avr_need_copy_data_p)
8529 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
8530 || STR_PREFIX_P (name, ".rodata")
8531 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
8533 if (!avr_need_clear_bss_p)
8534 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
8536 default_elf_asm_named_section (name, flags, decl);
8540 /* Implement `TARGET_SECTION_TYPE_FLAGS'. */
8542 static unsigned int
8543 avr_section_type_flags (tree decl, const char *name, int reloc)
8545 unsigned int flags = default_section_type_flags (decl, name, reloc);
8547 if (STR_PREFIX_P (name, ".noinit"))
8549 if (decl && TREE_CODE (decl) == VAR_DECL
8550 && DECL_INITIAL (decl) == NULL_TREE)
8551 flags |= SECTION_BSS; /* @nobits */
8552 else
8553 warning (0, "only uninitialized variables can be placed in the "
8554 ".noinit section");
8557 if (decl && DECL_P (decl)
8558 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
8560 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
8562 /* Attribute progmem puts data in generic address space.
8563 Set section flags as if it was in __flash to get the right
8564 section prefix in the remainder. */
8566 if (ADDR_SPACE_GENERIC_P (as))
8567 as = ADDR_SPACE_FLASH;
8569 flags |= as * SECTION_MACH_DEP;
8570 flags &= ~SECTION_WRITE;
8571 flags &= ~SECTION_BSS;
8574 return flags;
8578 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
8580 static void
8581 avr_encode_section_info (tree decl, rtx rtl, int new_decl_p)
8583 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
8584 readily available, see PR34734. So we postpone the warning
8585 about uninitialized data in program memory section until here. */
8587 if (new_decl_p
8588 && decl && DECL_P (decl)
8589 && NULL_TREE == DECL_INITIAL (decl)
8590 && !DECL_EXTERNAL (decl)
8591 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
8593 warning (OPT_Wuninitialized,
8594 "uninitialized variable %q+D put into "
8595 "program memory area", decl);
8598 default_encode_section_info (decl, rtl, new_decl_p);
8600 if (decl && DECL_P (decl)
8601 && TREE_CODE (decl) != FUNCTION_DECL
8602 && MEM_P (rtl)
8603 && SYMBOL_REF == GET_CODE (XEXP (rtl, 0)))
8605 rtx sym = XEXP (rtl, 0);
8606 tree type = TREE_TYPE (decl);
8607 if (type == error_mark_node)
8608 return;
8609 addr_space_t as = TYPE_ADDR_SPACE (type);
8611 /* PSTR strings are in generic space but located in flash:
8612 patch address space. */
8614 if (-1 == avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
8615 as = ADDR_SPACE_FLASH;
8617 AVR_SYMBOL_SET_ADDR_SPACE (sym, as);
8622 /* Implement `TARGET_ASM_SELECT_SECTION' */
8624 static section *
8625 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
8627 section * sect = default_elf_select_section (decl, reloc, align);
8629 if (decl && DECL_P (decl)
8630 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
8632 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
8634 /* __progmem__ goes in generic space but shall be allocated to
8635 .progmem.data */
8637 if (ADDR_SPACE_GENERIC_P (as))
8638 as = ADDR_SPACE_FLASH;
8640 if (sect->common.flags & SECTION_NAMED)
8642 const char * name = sect->named.name;
8643 const char * old_prefix = ".rodata";
8644 const char * new_prefix = avr_addrspace[as].section_name;
8646 if (STR_PREFIX_P (name, old_prefix))
8648 const char *sname = ACONCAT ((new_prefix,
8649 name + strlen (old_prefix), NULL));
8650 return get_section (sname, sect->common.flags, sect->named.decl);
8654 if (!progmem_section[as])
8656 progmem_section[as]
8657 = get_unnamed_section (0, avr_output_progmem_section_asm_op,
8658 avr_addrspace[as].section_name);
8661 return progmem_section[as];
8664 return sect;
8667 /* Implement `TARGET_ASM_FILE_START'. */
8668 /* Outputs some text at the start of each assembler file. */
8670 static void
8671 avr_file_start (void)
8673 int sfr_offset = avr_current_arch->sfr_offset;
8675 if (avr_current_arch->asm_only)
8676 error ("MCU %qs supported for assembler only", avr_current_device->name);
8678 default_file_start ();
8680 /* Print I/O addresses of some SFRs used with IN and OUT. */
8682 if (AVR_HAVE_SPH)
8683 fprintf (asm_out_file, "__SP_H__ = 0x%02x\n", avr_addr.sp_h - sfr_offset);
8685 fprintf (asm_out_file, "__SP_L__ = 0x%02x\n", avr_addr.sp_l - sfr_offset);
8686 fprintf (asm_out_file, "__SREG__ = 0x%02x\n", avr_addr.sreg - sfr_offset);
8687 if (AVR_HAVE_RAMPZ)
8688 fprintf (asm_out_file, "__RAMPZ__ = 0x%02x\n", avr_addr.rampz - sfr_offset);
8689 if (AVR_HAVE_RAMPY)
8690 fprintf (asm_out_file, "__RAMPY__ = 0x%02x\n", avr_addr.rampy - sfr_offset);
8691 if (AVR_HAVE_RAMPX)
8692 fprintf (asm_out_file, "__RAMPX__ = 0x%02x\n", avr_addr.rampx - sfr_offset);
8693 if (AVR_HAVE_RAMPD)
8694 fprintf (asm_out_file, "__RAMPD__ = 0x%02x\n", avr_addr.rampd - sfr_offset);
8695 if (AVR_XMEGA)
8696 fprintf (asm_out_file, "__CCP__ = 0x%02x\n", avr_addr.ccp - sfr_offset);
8697 fprintf (asm_out_file, "__tmp_reg__ = %d\n", TMP_REGNO);
8698 fprintf (asm_out_file, "__zero_reg__ = %d\n", ZERO_REGNO);
8702 /* Implement `TARGET_ASM_FILE_END'. */
8703 /* Outputs to the stdio stream FILE some
8704 appropriate text to go at the end of an assembler file. */
8706 static void
8707 avr_file_end (void)
8709 /* Output these only if there is anything in the
8710 .data* / .rodata* / .gnu.linkonce.* resp. .bss* or COMMON
8711 input section(s) - some code size can be saved by not
8712 linking in the initialization code from libgcc if resp.
8713 sections are empty, see PR18145. */
8715 if (avr_need_copy_data_p)
8716 fputs (".global __do_copy_data\n", asm_out_file);
8718 if (avr_need_clear_bss_p)
8719 fputs (".global __do_clear_bss\n", asm_out_file);
8723 /* Worker function for `ADJUST_REG_ALLOC_ORDER'. */
8724 /* Choose the order in which to allocate hard registers for
8725 pseudo-registers local to a basic block.
8727 Store the desired register order in the array `reg_alloc_order'.
8728 Element 0 should be the register to allocate first; element 1, the
8729 next register; and so on. */
8731 void
8732 avr_adjust_reg_alloc_order (void)
8734 unsigned int i;
8735 static const int order_0[] =
8737 24, 25,
8738 18, 19, 20, 21, 22, 23,
8739 30, 31,
8740 26, 27, 28, 29,
8741 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8742 0, 1,
8743 32, 33, 34, 35
8745 static const int order_1[] =
8747 18, 19, 20, 21, 22, 23, 24, 25,
8748 30, 31,
8749 26, 27, 28, 29,
8750 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8751 0, 1,
8752 32, 33, 34, 35
8754 static const int order_2[] =
8756 25, 24, 23, 22, 21, 20, 19, 18,
8757 30, 31,
8758 26, 27, 28, 29,
8759 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8760 1, 0,
8761 32, 33, 34, 35
8764 const int *order = (TARGET_ORDER_1 ? order_1 :
8765 TARGET_ORDER_2 ? order_2 :
8766 order_0);
8767 for (i = 0; i < ARRAY_SIZE (order_0); ++i)
8768 reg_alloc_order[i] = order[i];
8772 /* Implement `TARGET_REGISTER_MOVE_COST' */
8774 static int
8775 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
8776 reg_class_t from, reg_class_t to)
8778 return (from == STACK_REG ? 6
8779 : to == STACK_REG ? 12
8780 : 2);
8784 /* Implement `TARGET_MEMORY_MOVE_COST' */
8786 static int
8787 avr_memory_move_cost (enum machine_mode mode,
8788 reg_class_t rclass ATTRIBUTE_UNUSED,
8789 bool in ATTRIBUTE_UNUSED)
8791 return (mode == QImode ? 2
8792 : mode == HImode ? 4
8793 : mode == SImode ? 8
8794 : mode == SFmode ? 8
8795 : 16);
8799 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
8800 cost of an RTX operand given its context. X is the rtx of the
8801 operand, MODE is its mode, and OUTER is the rtx_code of this
8802 operand's parent operator. */
8804 static int
8805 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
8806 int opno, bool speed)
8808 enum rtx_code code = GET_CODE (x);
8809 int total;
8811 switch (code)
8813 case REG:
8814 case SUBREG:
8815 return 0;
8817 case CONST_INT:
8818 case CONST_FIXED:
8819 case CONST_DOUBLE:
8820 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
8822 default:
8823 break;
8826 total = 0;
8827 avr_rtx_costs (x, code, outer, opno, &total, speed);
8828 return total;
8831 /* Worker function for AVR backend's rtx_cost function.
8832 X is rtx expression whose cost is to be calculated.
8833 Return true if the complete cost has been computed.
8834 Return false if subexpressions should be scanned.
8835 In either case, *TOTAL contains the cost result. */
8837 static bool
8838 avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
8839 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
8841 enum rtx_code code = (enum rtx_code) codearg;
8842 enum machine_mode mode = GET_MODE (x);
8843 HOST_WIDE_INT val;
8845 switch (code)
8847 case CONST_INT:
8848 case CONST_FIXED:
8849 case CONST_DOUBLE:
8850 case SYMBOL_REF:
8851 case CONST:
8852 case LABEL_REF:
8853 /* Immediate constants are as cheap as registers. */
8854 *total = 0;
8855 return true;
8857 case MEM:
8858 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
8859 return true;
8861 case NEG:
8862 switch (mode)
8864 case QImode:
8865 case SFmode:
8866 *total = COSTS_N_INSNS (1);
8867 break;
8869 case HImode:
8870 case PSImode:
8871 case SImode:
8872 *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
8873 break;
8875 default:
8876 return false;
8878 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8879 return true;
8881 case ABS:
8882 switch (mode)
8884 case QImode:
8885 case SFmode:
8886 *total = COSTS_N_INSNS (1);
8887 break;
8889 default:
8890 return false;
8892 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8893 return true;
8895 case NOT:
8896 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
8897 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8898 return true;
8900 case ZERO_EXTEND:
8901 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
8902 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
8903 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8904 return true;
8906 case SIGN_EXTEND:
8907 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
8908 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
8909 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8910 return true;
8912 case PLUS:
8913 switch (mode)
8915 case QImode:
8916 if (AVR_HAVE_MUL
8917 && MULT == GET_CODE (XEXP (x, 0))
8918 && register_operand (XEXP (x, 1), QImode))
8920 /* multiply-add */
8921 *total = COSTS_N_INSNS (speed ? 4 : 3);
8922 /* multiply-add with constant: will be split and load constant. */
8923 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
8924 *total = COSTS_N_INSNS (1) + *total;
8925 return true;
8927 *total = COSTS_N_INSNS (1);
8928 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8929 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8930 break;
8932 case HImode:
8933 if (AVR_HAVE_MUL
8934 && (MULT == GET_CODE (XEXP (x, 0))
8935 || ASHIFT == GET_CODE (XEXP (x, 0)))
8936 && register_operand (XEXP (x, 1), HImode)
8937 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
8938 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
8940 /* multiply-add */
8941 *total = COSTS_N_INSNS (speed ? 5 : 4);
8942 /* multiply-add with constant: will be split and load constant. */
8943 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
8944 *total = COSTS_N_INSNS (1) + *total;
8945 return true;
8947 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8949 *total = COSTS_N_INSNS (2);
8950 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8951 speed);
8953 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
8954 *total = COSTS_N_INSNS (1);
8955 else
8956 *total = COSTS_N_INSNS (2);
8957 break;
8959 case PSImode:
8960 if (!CONST_INT_P (XEXP (x, 1)))
8962 *total = COSTS_N_INSNS (3);
8963 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8964 speed);
8966 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
8967 *total = COSTS_N_INSNS (2);
8968 else
8969 *total = COSTS_N_INSNS (3);
8970 break;
8972 case SImode:
8973 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8975 *total = COSTS_N_INSNS (4);
8976 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8977 speed);
8979 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
8980 *total = COSTS_N_INSNS (1);
8981 else
8982 *total = COSTS_N_INSNS (4);
8983 break;
8985 default:
8986 return false;
8988 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8989 return true;
8991 case MINUS:
8992 if (AVR_HAVE_MUL
8993 && QImode == mode
8994 && register_operand (XEXP (x, 0), QImode)
8995 && MULT == GET_CODE (XEXP (x, 1)))
8997 /* multiply-sub */
8998 *total = COSTS_N_INSNS (speed ? 4 : 3);
8999 /* multiply-sub with constant: will be split and load constant. */
9000 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
9001 *total = COSTS_N_INSNS (1) + *total;
9002 return true;
9004 if (AVR_HAVE_MUL
9005 && HImode == mode
9006 && register_operand (XEXP (x, 0), HImode)
9007 && (MULT == GET_CODE (XEXP (x, 1))
9008 || ASHIFT == GET_CODE (XEXP (x, 1)))
9009 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
9010 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
9012 /* multiply-sub */
9013 *total = COSTS_N_INSNS (speed ? 5 : 4);
9014 /* multiply-sub with constant: will be split and load constant. */
9015 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
9016 *total = COSTS_N_INSNS (1) + *total;
9017 return true;
9019 /* FALLTHRU */
9020 case AND:
9021 case IOR:
9022 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
9023 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9024 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9025 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9026 return true;
9028 case XOR:
9029 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
9030 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9031 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9032 return true;
9034 case MULT:
9035 switch (mode)
9037 case QImode:
9038 if (AVR_HAVE_MUL)
9039 *total = COSTS_N_INSNS (!speed ? 3 : 4);
9040 else if (!speed)
9041 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
9042 else
9043 return false;
9044 break;
9046 case HImode:
9047 if (AVR_HAVE_MUL)
9049 rtx op0 = XEXP (x, 0);
9050 rtx op1 = XEXP (x, 1);
9051 enum rtx_code code0 = GET_CODE (op0);
9052 enum rtx_code code1 = GET_CODE (op1);
9053 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
9054 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
9056 if (ex0
9057 && (u8_operand (op1, HImode)
9058 || s8_operand (op1, HImode)))
9060 *total = COSTS_N_INSNS (!speed ? 4 : 6);
9061 return true;
9063 if (ex0
9064 && register_operand (op1, HImode))
9066 *total = COSTS_N_INSNS (!speed ? 5 : 8);
9067 return true;
9069 else if (ex0 || ex1)
9071 *total = COSTS_N_INSNS (!speed ? 3 : 5);
9072 return true;
9074 else if (register_operand (op0, HImode)
9075 && (u8_operand (op1, HImode)
9076 || s8_operand (op1, HImode)))
9078 *total = COSTS_N_INSNS (!speed ? 6 : 9);
9079 return true;
9081 else
9082 *total = COSTS_N_INSNS (!speed ? 7 : 10);
9084 else if (!speed)
9085 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
9086 else
9087 return false;
9088 break;
9090 case PSImode:
9091 if (!speed)
9092 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
9093 else
9094 *total = 10;
9095 break;
9097 case SImode:
9098 if (AVR_HAVE_MUL)
9100 if (!speed)
9102 /* Add some additional costs besides CALL like moves etc. */
9104 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
9106 else
9108 /* Just a rough estimate. Even with -O2 we don't want bulky
9109 code expanded inline. */
9111 *total = COSTS_N_INSNS (25);
9114 else
9116 if (speed)
9117 *total = COSTS_N_INSNS (300);
9118 else
9119 /* Add some additional costs besides CALL like moves etc. */
9120 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
9123 return true;
9125 default:
9126 return false;
9128 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9129 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9130 return true;
9132 case DIV:
9133 case MOD:
9134 case UDIV:
9135 case UMOD:
9136 if (!speed)
9137 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
9138 else
9139 *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
9140 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9141 /* For div/mod with const-int divisor we have at least the cost of
9142 loading the divisor. */
9143 if (CONST_INT_P (XEXP (x, 1)))
9144 *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
9145 /* Add some overall penaly for clobbering and moving around registers */
9146 *total += COSTS_N_INSNS (2);
9147 return true;
9149 case ROTATE:
9150 switch (mode)
9152 case QImode:
9153 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
9154 *total = COSTS_N_INSNS (1);
9156 break;
9158 case HImode:
9159 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
9160 *total = COSTS_N_INSNS (3);
9162 break;
9164 case SImode:
9165 if (CONST_INT_P (XEXP (x, 1)))
9166 switch (INTVAL (XEXP (x, 1)))
9168 case 8:
9169 case 24:
9170 *total = COSTS_N_INSNS (5);
9171 break;
9172 case 16:
9173 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
9174 break;
9176 break;
9178 default:
9179 return false;
9181 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9182 return true;
9184 case ASHIFT:
9185 switch (mode)
9187 case QImode:
9188 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9190 *total = COSTS_N_INSNS (!speed ? 4 : 17);
9191 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9192 speed);
9194 else
9196 val = INTVAL (XEXP (x, 1));
9197 if (val == 7)
9198 *total = COSTS_N_INSNS (3);
9199 else if (val >= 0 && val <= 7)
9200 *total = COSTS_N_INSNS (val);
9201 else
9202 *total = COSTS_N_INSNS (1);
9204 break;
9206 case HImode:
9207 if (AVR_HAVE_MUL)
9209 if (const_2_to_7_operand (XEXP (x, 1), HImode)
9210 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
9211 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
9213 *total = COSTS_N_INSNS (!speed ? 4 : 6);
9214 return true;
9218 if (const1_rtx == (XEXP (x, 1))
9219 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
9221 *total = COSTS_N_INSNS (2);
9222 return true;
9225 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9227 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9228 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9229 speed);
9231 else
9232 switch (INTVAL (XEXP (x, 1)))
9234 case 0:
9235 *total = 0;
9236 break;
9237 case 1:
9238 case 8:
9239 *total = COSTS_N_INSNS (2);
9240 break;
9241 case 9:
9242 *total = COSTS_N_INSNS (3);
9243 break;
9244 case 2:
9245 case 3:
9246 case 10:
9247 case 15:
9248 *total = COSTS_N_INSNS (4);
9249 break;
9250 case 7:
9251 case 11:
9252 case 12:
9253 *total = COSTS_N_INSNS (5);
9254 break;
9255 case 4:
9256 *total = COSTS_N_INSNS (!speed ? 5 : 8);
9257 break;
9258 case 6:
9259 *total = COSTS_N_INSNS (!speed ? 5 : 9);
9260 break;
9261 case 5:
9262 *total = COSTS_N_INSNS (!speed ? 5 : 10);
9263 break;
9264 default:
9265 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9266 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9267 speed);
9269 break;
9271 case PSImode:
9272 if (!CONST_INT_P (XEXP (x, 1)))
9274 *total = COSTS_N_INSNS (!speed ? 6 : 73);
9276 else
9277 switch (INTVAL (XEXP (x, 1)))
9279 case 0:
9280 *total = 0;
9281 break;
9282 case 1:
9283 case 8:
9284 case 16:
9285 *total = COSTS_N_INSNS (3);
9286 break;
9287 case 23:
9288 *total = COSTS_N_INSNS (5);
9289 break;
9290 default:
9291 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
9292 break;
9294 break;
9296 case SImode:
9297 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9299 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9300 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9301 speed);
9303 else
9304 switch (INTVAL (XEXP (x, 1)))
9306 case 0:
9307 *total = 0;
9308 break;
9309 case 24:
9310 *total = COSTS_N_INSNS (3);
9311 break;
9312 case 1:
9313 case 8:
9314 case 16:
9315 *total = COSTS_N_INSNS (4);
9316 break;
9317 case 31:
9318 *total = COSTS_N_INSNS (6);
9319 break;
9320 case 2:
9321 *total = COSTS_N_INSNS (!speed ? 7 : 8);
9322 break;
9323 default:
9324 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9325 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9326 speed);
9328 break;
9330 default:
9331 return false;
9333 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9334 return true;
9336 case ASHIFTRT:
9337 switch (mode)
9339 case QImode:
9340 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9342 *total = COSTS_N_INSNS (!speed ? 4 : 17);
9343 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9344 speed);
9346 else
9348 val = INTVAL (XEXP (x, 1));
9349 if (val == 6)
9350 *total = COSTS_N_INSNS (4);
9351 else if (val == 7)
9352 *total = COSTS_N_INSNS (2);
9353 else if (val >= 0 && val <= 7)
9354 *total = COSTS_N_INSNS (val);
9355 else
9356 *total = COSTS_N_INSNS (1);
9358 break;
9360 case HImode:
9361 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9363 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9364 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9365 speed);
9367 else
9368 switch (INTVAL (XEXP (x, 1)))
9370 case 0:
9371 *total = 0;
9372 break;
9373 case 1:
9374 *total = COSTS_N_INSNS (2);
9375 break;
9376 case 15:
9377 *total = COSTS_N_INSNS (3);
9378 break;
9379 case 2:
9380 case 7:
9381 case 8:
9382 case 9:
9383 *total = COSTS_N_INSNS (4);
9384 break;
9385 case 10:
9386 case 14:
9387 *total = COSTS_N_INSNS (5);
9388 break;
9389 case 11:
9390 *total = COSTS_N_INSNS (!speed ? 5 : 6);
9391 break;
9392 case 12:
9393 *total = COSTS_N_INSNS (!speed ? 5 : 7);
9394 break;
9395 case 6:
9396 case 13:
9397 *total = COSTS_N_INSNS (!speed ? 5 : 8);
9398 break;
9399 default:
9400 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9401 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9402 speed);
9404 break;
9406 case PSImode:
9407 if (!CONST_INT_P (XEXP (x, 1)))
9409 *total = COSTS_N_INSNS (!speed ? 6 : 73);
9411 else
9412 switch (INTVAL (XEXP (x, 1)))
9414 case 0:
9415 *total = 0;
9416 break;
9417 case 1:
9418 *total = COSTS_N_INSNS (3);
9419 break;
9420 case 16:
9421 case 8:
9422 *total = COSTS_N_INSNS (5);
9423 break;
9424 case 23:
9425 *total = COSTS_N_INSNS (4);
9426 break;
9427 default:
9428 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
9429 break;
9431 break;
9433 case SImode:
9434 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9436 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9437 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9438 speed);
9440 else
9441 switch (INTVAL (XEXP (x, 1)))
9443 case 0:
9444 *total = 0;
9445 break;
9446 case 1:
9447 *total = COSTS_N_INSNS (4);
9448 break;
9449 case 8:
9450 case 16:
9451 case 24:
9452 *total = COSTS_N_INSNS (6);
9453 break;
9454 case 2:
9455 *total = COSTS_N_INSNS (!speed ? 7 : 8);
9456 break;
9457 case 31:
9458 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
9459 break;
9460 default:
9461 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9462 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9463 speed);
9465 break;
9467 default:
9468 return false;
9470 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9471 return true;
9473 case LSHIFTRT:
9474 switch (mode)
9476 case QImode:
9477 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9479 *total = COSTS_N_INSNS (!speed ? 4 : 17);
9480 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9481 speed);
9483 else
9485 val = INTVAL (XEXP (x, 1));
9486 if (val == 7)
9487 *total = COSTS_N_INSNS (3);
9488 else if (val >= 0 && val <= 7)
9489 *total = COSTS_N_INSNS (val);
9490 else
9491 *total = COSTS_N_INSNS (1);
9493 break;
9495 case HImode:
9496 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9498 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9499 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9500 speed);
9502 else
9503 switch (INTVAL (XEXP (x, 1)))
9505 case 0:
9506 *total = 0;
9507 break;
9508 case 1:
9509 case 8:
9510 *total = COSTS_N_INSNS (2);
9511 break;
9512 case 9:
9513 *total = COSTS_N_INSNS (3);
9514 break;
9515 case 2:
9516 case 10:
9517 case 15:
9518 *total = COSTS_N_INSNS (4);
9519 break;
9520 case 7:
9521 case 11:
9522 *total = COSTS_N_INSNS (5);
9523 break;
9524 case 3:
9525 case 12:
9526 case 13:
9527 case 14:
9528 *total = COSTS_N_INSNS (!speed ? 5 : 6);
9529 break;
9530 case 4:
9531 *total = COSTS_N_INSNS (!speed ? 5 : 7);
9532 break;
9533 case 5:
9534 case 6:
9535 *total = COSTS_N_INSNS (!speed ? 5 : 9);
9536 break;
9537 default:
9538 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9539 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9540 speed);
9542 break;
9544 case PSImode:
9545 if (!CONST_INT_P (XEXP (x, 1)))
9547 *total = COSTS_N_INSNS (!speed ? 6 : 73);
9549 else
9550 switch (INTVAL (XEXP (x, 1)))
9552 case 0:
9553 *total = 0;
9554 break;
9555 case 1:
9556 case 8:
9557 case 16:
9558 *total = COSTS_N_INSNS (3);
9559 break;
9560 case 23:
9561 *total = COSTS_N_INSNS (5);
9562 break;
9563 default:
9564 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
9565 break;
9567 break;
9569 case SImode:
9570 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9572 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9573 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9574 speed);
9576 else
9577 switch (INTVAL (XEXP (x, 1)))
9579 case 0:
9580 *total = 0;
9581 break;
9582 case 1:
9583 *total = COSTS_N_INSNS (4);
9584 break;
9585 case 2:
9586 *total = COSTS_N_INSNS (!speed ? 7 : 8);
9587 break;
9588 case 8:
9589 case 16:
9590 case 24:
9591 *total = COSTS_N_INSNS (4);
9592 break;
9593 case 31:
9594 *total = COSTS_N_INSNS (6);
9595 break;
9596 default:
9597 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9598 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9599 speed);
9601 break;
9603 default:
9604 return false;
9606 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9607 return true;
9609 case COMPARE:
9610 switch (GET_MODE (XEXP (x, 0)))
9612 case QImode:
9613 *total = COSTS_N_INSNS (1);
9614 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9615 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9616 break;
9618 case HImode:
9619 *total = COSTS_N_INSNS (2);
9620 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9621 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9622 else if (INTVAL (XEXP (x, 1)) != 0)
9623 *total += COSTS_N_INSNS (1);
9624 break;
9626 case PSImode:
9627 *total = COSTS_N_INSNS (3);
9628 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
9629 *total += COSTS_N_INSNS (2);
9630 break;
9632 case SImode:
9633 *total = COSTS_N_INSNS (4);
9634 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9635 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9636 else if (INTVAL (XEXP (x, 1)) != 0)
9637 *total += COSTS_N_INSNS (3);
9638 break;
9640 default:
9641 return false;
9643 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9644 return true;
9646 case TRUNCATE:
9647 if (AVR_HAVE_MUL
9648 && LSHIFTRT == GET_CODE (XEXP (x, 0))
9649 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
9650 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
9652 if (QImode == mode || HImode == mode)
9654 *total = COSTS_N_INSNS (2);
9655 return true;
9658 break;
9660 default:
9661 break;
9663 return false;
9667 /* Implement `TARGET_RTX_COSTS'. */
9669 static bool
9670 avr_rtx_costs (rtx x, int codearg, int outer_code,
9671 int opno, int *total, bool speed)
9673 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
9674 opno, total, speed);
9676 if (avr_log.rtx_costs)
9678 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
9679 done, speed ? "speed" : "size", *total, outer_code, x);
9682 return done;
9686 /* Implement `TARGET_ADDRESS_COST'. */
9688 static int
9689 avr_address_cost (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED,
9690 addr_space_t as ATTRIBUTE_UNUSED,
9691 bool speed ATTRIBUTE_UNUSED)
9693 int cost = 4;
9695 if (GET_CODE (x) == PLUS
9696 && CONST_INT_P (XEXP (x, 1))
9697 && (REG_P (XEXP (x, 0))
9698 || GET_CODE (XEXP (x, 0)) == SUBREG))
9700 if (INTVAL (XEXP (x, 1)) >= 61)
9701 cost = 18;
9703 else if (CONSTANT_ADDRESS_P (x))
9705 if (optimize > 0
9706 && io_address_operand (x, QImode))
9707 cost = 2;
9710 if (avr_log.address_cost)
9711 avr_edump ("\n%?: %d = %r\n", cost, x);
9713 return cost;
9716 /* Test for extra memory constraint 'Q'.
9717 It's a memory address based on Y or Z pointer with valid displacement. */
9720 extra_constraint_Q (rtx x)
9722 int ok = 0;
9724 if (GET_CODE (XEXP (x,0)) == PLUS
9725 && REG_P (XEXP (XEXP (x,0), 0))
9726 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
9727 && (INTVAL (XEXP (XEXP (x,0), 1))
9728 <= MAX_LD_OFFSET (GET_MODE (x))))
9730 rtx xx = XEXP (XEXP (x,0), 0);
9731 int regno = REGNO (xx);
9733 ok = (/* allocate pseudos */
9734 regno >= FIRST_PSEUDO_REGISTER
9735 /* strictly check */
9736 || regno == REG_Z || regno == REG_Y
9737 /* XXX frame & arg pointer checks */
9738 || xx == frame_pointer_rtx
9739 || xx == arg_pointer_rtx);
9741 if (avr_log.constraints)
9742 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
9743 ok, reload_completed, reload_in_progress, x);
9746 return ok;
9749 /* Convert condition code CONDITION to the valid AVR condition code. */
9751 RTX_CODE
9752 avr_normalize_condition (RTX_CODE condition)
9754 switch (condition)
9756 case GT:
9757 return GE;
9758 case GTU:
9759 return GEU;
9760 case LE:
9761 return LT;
9762 case LEU:
9763 return LTU;
9764 default:
9765 gcc_unreachable ();
9769 /* Helper function for `avr_reorg'. */
9771 static rtx
9772 avr_compare_pattern (rtx insn)
9774 rtx pattern = single_set (insn);
9776 if (pattern
9777 && NONJUMP_INSN_P (insn)
9778 && SET_DEST (pattern) == cc0_rtx
9779 && GET_CODE (SET_SRC (pattern)) == COMPARE)
9781 enum machine_mode mode0 = GET_MODE (XEXP (SET_SRC (pattern), 0));
9782 enum machine_mode mode1 = GET_MODE (XEXP (SET_SRC (pattern), 1));
9784 /* The 64-bit comparisons have fixed operands ACC_A and ACC_B.
9785 They must not be swapped, thus skip them. */
9787 if ((mode0 == VOIDmode || GET_MODE_SIZE (mode0) <= 4)
9788 && (mode1 == VOIDmode || GET_MODE_SIZE (mode1) <= 4))
9789 return pattern;
9792 return NULL_RTX;
9795 /* Helper function for `avr_reorg'. */
9797 /* Expansion of switch/case decision trees leads to code like
9799 cc0 = compare (Reg, Num)
9800 if (cc0 == 0)
9801 goto L1
9803 cc0 = compare (Reg, Num)
9804 if (cc0 > 0)
9805 goto L2
9807 The second comparison is superfluous and can be deleted.
9808 The second jump condition can be transformed from a
9809 "difficult" one to a "simple" one because "cc0 > 0" and
9810 "cc0 >= 0" will have the same effect here.
9812 This function relies on the way switch/case is being expaned
9813 as binary decision tree. For example code see PR 49903.
9815 Return TRUE if optimization performed.
9816 Return FALSE if nothing changed.
9818 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
9820 We don't want to do this in text peephole because it is
9821 tedious to work out jump offsets there and the second comparison
9822 might have been transormed by `avr_reorg'.
9824 RTL peephole won't do because peephole2 does not scan across
9825 basic blocks. */
9827 static bool
9828 avr_reorg_remove_redundant_compare (rtx insn1)
9830 rtx comp1, ifelse1, xcond1, branch1;
9831 rtx comp2, ifelse2, xcond2, branch2, insn2;
9832 enum rtx_code code;
9833 rtx jump, target, cond;
9835 /* Look out for: compare1 - branch1 - compare2 - branch2 */
9837 branch1 = next_nonnote_nondebug_insn (insn1);
9838 if (!branch1 || !JUMP_P (branch1))
9839 return false;
9841 insn2 = next_nonnote_nondebug_insn (branch1);
9842 if (!insn2 || !avr_compare_pattern (insn2))
9843 return false;
9845 branch2 = next_nonnote_nondebug_insn (insn2);
9846 if (!branch2 || !JUMP_P (branch2))
9847 return false;
9849 comp1 = avr_compare_pattern (insn1);
9850 comp2 = avr_compare_pattern (insn2);
9851 xcond1 = single_set (branch1);
9852 xcond2 = single_set (branch2);
9854 if (!comp1 || !comp2
9855 || !rtx_equal_p (comp1, comp2)
9856 || !xcond1 || SET_DEST (xcond1) != pc_rtx
9857 || !xcond2 || SET_DEST (xcond2) != pc_rtx
9858 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
9859 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
9861 return false;
9864 comp1 = SET_SRC (comp1);
9865 ifelse1 = SET_SRC (xcond1);
9866 ifelse2 = SET_SRC (xcond2);
9868 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
9870 if (EQ != GET_CODE (XEXP (ifelse1, 0))
9871 || !REG_P (XEXP (comp1, 0))
9872 || !CONST_INT_P (XEXP (comp1, 1))
9873 || XEXP (ifelse1, 2) != pc_rtx
9874 || XEXP (ifelse2, 2) != pc_rtx
9875 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
9876 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
9877 || !COMPARISON_P (XEXP (ifelse2, 0))
9878 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
9879 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
9880 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
9881 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
9883 return false;
9886 /* We filtered the insn sequence to look like
9888 (set (cc0)
9889 (compare (reg:M N)
9890 (const_int VAL)))
9891 (set (pc)
9892 (if_then_else (eq (cc0)
9893 (const_int 0))
9894 (label_ref L1)
9895 (pc)))
9897 (set (cc0)
9898 (compare (reg:M N)
9899 (const_int VAL)))
9900 (set (pc)
9901 (if_then_else (CODE (cc0)
9902 (const_int 0))
9903 (label_ref L2)
9904 (pc)))
9907 code = GET_CODE (XEXP (ifelse2, 0));
9909 /* Map GT/GTU to GE/GEU which is easier for AVR.
9910 The first two instructions compare/branch on EQ
9911 so we may replace the difficult
9913 if (x == VAL) goto L1;
9914 if (x > VAL) goto L2;
9916 with easy
9918 if (x == VAL) goto L1;
9919 if (x >= VAL) goto L2;
9921 Similarly, replace LE/LEU by LT/LTU. */
9923 switch (code)
9925 case EQ:
9926 case LT: case LTU:
9927 case GE: case GEU:
9928 break;
9930 case LE: case LEU:
9931 case GT: case GTU:
9932 code = avr_normalize_condition (code);
9933 break;
9935 default:
9936 return false;
9939 /* Wrap the branches into UNSPECs so they won't be changed or
9940 optimized in the remainder. */
9942 target = XEXP (XEXP (ifelse1, 1), 0);
9943 cond = XEXP (ifelse1, 0);
9944 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
9946 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
9948 target = XEXP (XEXP (ifelse2, 1), 0);
9949 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9950 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
9952 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
9954 /* The comparisons in insn1 and insn2 are exactly the same;
9955 insn2 is superfluous so delete it. */
9957 delete_insn (insn2);
9958 delete_insn (branch1);
9959 delete_insn (branch2);
9961 return true;
9965 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
9966 /* Optimize conditional jumps. */
9968 static void
9969 avr_reorg (void)
9971 rtx insn = get_insns();
9973 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
9975 rtx pattern = avr_compare_pattern (insn);
9977 if (!pattern)
9978 continue;
9980 if (optimize
9981 && avr_reorg_remove_redundant_compare (insn))
9983 continue;
9986 if (compare_diff_p (insn))
9988 /* Now we work under compare insn with difficult branch. */
9990 rtx next = next_real_insn (insn);
9991 rtx pat = PATTERN (next);
9993 pattern = SET_SRC (pattern);
9995 if (true_regnum (XEXP (pattern, 0)) >= 0
9996 && true_regnum (XEXP (pattern, 1)) >= 0)
9998 rtx x = XEXP (pattern, 0);
9999 rtx src = SET_SRC (pat);
10000 rtx t = XEXP (src,0);
10001 PUT_CODE (t, swap_condition (GET_CODE (t)));
10002 XEXP (pattern, 0) = XEXP (pattern, 1);
10003 XEXP (pattern, 1) = x;
10004 INSN_CODE (next) = -1;
10006 else if (true_regnum (XEXP (pattern, 0)) >= 0
10007 && XEXP (pattern, 1) == const0_rtx)
10009 /* This is a tst insn, we can reverse it. */
10010 rtx src = SET_SRC (pat);
10011 rtx t = XEXP (src,0);
10013 PUT_CODE (t, swap_condition (GET_CODE (t)));
10014 XEXP (pattern, 1) = XEXP (pattern, 0);
10015 XEXP (pattern, 0) = const0_rtx;
10016 INSN_CODE (next) = -1;
10017 INSN_CODE (insn) = -1;
10019 else if (true_regnum (XEXP (pattern, 0)) >= 0
10020 && CONST_INT_P (XEXP (pattern, 1)))
10022 rtx x = XEXP (pattern, 1);
10023 rtx src = SET_SRC (pat);
10024 rtx t = XEXP (src,0);
10025 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
10027 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
10029 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
10030 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
10031 INSN_CODE (next) = -1;
10032 INSN_CODE (insn) = -1;
10039 /* Returns register number for function return value.*/
10041 static inline unsigned int
10042 avr_ret_register (void)
10044 return 24;
10048 /* Implement `TARGET_FUNCTION_VALUE_REGNO_P'. */
10050 static bool
10051 avr_function_value_regno_p (const unsigned int regno)
10053 return (regno == avr_ret_register ());
10057 /* Implement `TARGET_LIBCALL_VALUE'. */
10058 /* Create an RTX representing the place where a
10059 library function returns a value of mode MODE. */
10061 static rtx
10062 avr_libcall_value (enum machine_mode mode,
10063 const_rtx func ATTRIBUTE_UNUSED)
10065 int offs = GET_MODE_SIZE (mode);
10067 if (offs <= 4)
10068 offs = (offs + 1) & ~1;
10070 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
10074 /* Implement `TARGET_FUNCTION_VALUE'. */
10075 /* Create an RTX representing the place where a
10076 function returns a value of data type VALTYPE. */
10078 static rtx
10079 avr_function_value (const_tree type,
10080 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
10081 bool outgoing ATTRIBUTE_UNUSED)
10083 unsigned int offs;
10085 if (TYPE_MODE (type) != BLKmode)
10086 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
10088 offs = int_size_in_bytes (type);
10089 if (offs < 2)
10090 offs = 2;
10091 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
10092 offs = GET_MODE_SIZE (SImode);
10093 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
10094 offs = GET_MODE_SIZE (DImode);
10096 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
10100 test_hard_reg_class (enum reg_class rclass, rtx x)
10102 int regno = true_regnum (x);
10103 if (regno < 0)
10104 return 0;
10106 if (TEST_HARD_REG_CLASS (rclass, regno))
10107 return 1;
10109 return 0;
10113 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
10114 and thus is suitable to be skipped by CPSE, SBRC, etc. */
10116 static bool
10117 avr_2word_insn_p (rtx insn)
10119 if (avr_current_device->errata_skip
10120 || !insn
10121 || 2 != get_attr_length (insn))
10123 return false;
10126 switch (INSN_CODE (insn))
10128 default:
10129 return false;
10131 case CODE_FOR_movqi_insn:
10132 case CODE_FOR_movuqq_insn:
10133 case CODE_FOR_movqq_insn:
10135 rtx set = single_set (insn);
10136 rtx src = SET_SRC (set);
10137 rtx dest = SET_DEST (set);
10139 /* Factor out LDS and STS from movqi_insn. */
10141 if (MEM_P (dest)
10142 && (REG_P (src) || src == CONST0_RTX (GET_MODE (dest))))
10144 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
10146 else if (REG_P (dest)
10147 && MEM_P (src))
10149 return CONSTANT_ADDRESS_P (XEXP (src, 0));
10152 return false;
10155 case CODE_FOR_call_insn:
10156 case CODE_FOR_call_value_insn:
10157 return true;
10163 jump_over_one_insn_p (rtx insn, rtx dest)
10165 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
10166 ? XEXP (dest, 0)
10167 : dest);
10168 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
10169 int dest_addr = INSN_ADDRESSES (uid);
10170 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
10172 return (jump_offset == 1
10173 || (jump_offset == 2
10174 && avr_2word_insn_p (next_active_insn (insn))));
10178 /* Worker function for `HARD_REGNO_MODE_OK'. */
10179 /* Returns 1 if a value of mode MODE can be stored starting with hard
10180 register number REGNO. On the enhanced core, anything larger than
10181 1 byte must start in even numbered register for "movw" to work
10182 (this way we don't have to check for odd registers everywhere). */
10185 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
10187 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
10188 Disallowing QI et al. in these regs might lead to code like
10189 (set (subreg:QI (reg:HI 28) n) ...)
10190 which will result in wrong code because reload does not
10191 handle SUBREGs of hard regsisters like this.
10192 This could be fixed in reload. However, it appears
10193 that fixing reload is not wanted by reload people. */
10195 /* Any GENERAL_REGS register can hold 8-bit values. */
10197 if (GET_MODE_SIZE (mode) == 1)
10198 return 1;
10200 /* FIXME: Ideally, the following test is not needed.
10201 However, it turned out that it can reduce the number
10202 of spill fails. AVR and it's poor endowment with
10203 address registers is extreme stress test for reload. */
10205 if (GET_MODE_SIZE (mode) >= 4
10206 && regno >= REG_X)
10207 return 0;
10209 /* All modes larger than 8 bits should start in an even register. */
10211 return !(regno & 1);
10215 /* Implement `HARD_REGNO_CALL_PART_CLOBBERED'. */
10218 avr_hard_regno_call_part_clobbered (unsigned regno, enum machine_mode mode)
10220 /* FIXME: This hook gets called with MODE:REGNO combinations that don't
10221 represent valid hard registers like, e.g. HI:29. Returning TRUE
10222 for such registers can lead to performance degradation as mentioned
10223 in PR53595. Thus, report invalid hard registers as FALSE. */
10225 if (!avr_hard_regno_mode_ok (regno, mode))
10226 return 0;
10228 /* Return true if any of the following boundaries is crossed:
10229 17/18, 27/28 and 29/30. */
10231 return ((regno < 18 && regno + GET_MODE_SIZE (mode) > 18)
10232 || (regno < REG_Y && regno + GET_MODE_SIZE (mode) > REG_Y)
10233 || (regno < REG_Z && regno + GET_MODE_SIZE (mode) > REG_Z));
10237 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
10239 enum reg_class
10240 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
10241 addr_space_t as, RTX_CODE outer_code,
10242 RTX_CODE index_code ATTRIBUTE_UNUSED)
10244 if (!ADDR_SPACE_GENERIC_P (as))
10246 return POINTER_Z_REGS;
10249 if (!avr_strict_X)
10250 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
10252 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
10256 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
10258 bool
10259 avr_regno_mode_code_ok_for_base_p (int regno,
10260 enum machine_mode mode ATTRIBUTE_UNUSED,
10261 addr_space_t as ATTRIBUTE_UNUSED,
10262 RTX_CODE outer_code,
10263 RTX_CODE index_code ATTRIBUTE_UNUSED)
10265 bool ok = false;
10267 if (!ADDR_SPACE_GENERIC_P (as))
10269 if (regno < FIRST_PSEUDO_REGISTER
10270 && regno == REG_Z)
10272 return true;
10275 if (reg_renumber)
10277 regno = reg_renumber[regno];
10279 if (regno == REG_Z)
10281 return true;
10285 return false;
10288 if (regno < FIRST_PSEUDO_REGISTER
10289 && (regno == REG_X
10290 || regno == REG_Y
10291 || regno == REG_Z
10292 || regno == ARG_POINTER_REGNUM))
10294 ok = true;
10296 else if (reg_renumber)
10298 regno = reg_renumber[regno];
10300 if (regno == REG_X
10301 || regno == REG_Y
10302 || regno == REG_Z
10303 || regno == ARG_POINTER_REGNUM)
10305 ok = true;
10309 if (avr_strict_X
10310 && PLUS == outer_code
10311 && regno == REG_X)
10313 ok = false;
10316 return ok;
10320 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
10321 /* Set 32-bit register OP[0] to compile-time constant OP[1].
10322 CLOBBER_REG is a QI clobber register or NULL_RTX.
10323 LEN == NULL: output instructions.
10324 LEN != NULL: set *LEN to the length of the instruction sequence
10325 (in words) printed with LEN = NULL.
10326 If CLEAR_P is true, OP[0] had been cleard to Zero already.
10327 If CLEAR_P is false, nothing is known about OP[0].
10329 The effect on cc0 is as follows:
10331 Load 0 to any register except ZERO_REG : NONE
10332 Load ld register with any value : NONE
10333 Anything else: : CLOBBER */
10335 static void
10336 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
10338 rtx src = op[1];
10339 rtx dest = op[0];
10340 rtx xval, xdest[4];
10341 int ival[4];
10342 int clobber_val = 1234;
10343 bool cooked_clobber_p = false;
10344 bool set_p = false;
10345 enum machine_mode mode = GET_MODE (dest);
10346 int n, n_bytes = GET_MODE_SIZE (mode);
10348 gcc_assert (REG_P (dest)
10349 && CONSTANT_P (src));
10351 if (len)
10352 *len = 0;
10354 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
10355 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
10357 if (REGNO (dest) < 16
10358 && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
10360 clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
10363 /* We might need a clobber reg but don't have one. Look at the value to
10364 be loaded more closely. A clobber is only needed if it is a symbol
10365 or contains a byte that is neither 0, -1 or a power of 2. */
10367 if (NULL_RTX == clobber_reg
10368 && !test_hard_reg_class (LD_REGS, dest)
10369 && (! (CONST_INT_P (src) || CONST_FIXED_P (src) || CONST_DOUBLE_P (src))
10370 || !avr_popcount_each_byte (src, n_bytes,
10371 (1 << 0) | (1 << 1) | (1 << 8))))
10373 /* We have no clobber register but need one. Cook one up.
10374 That's cheaper than loading from constant pool. */
10376 cooked_clobber_p = true;
10377 clobber_reg = all_regs_rtx[REG_Z + 1];
10378 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
10381 /* Now start filling DEST from LSB to MSB. */
10383 for (n = 0; n < n_bytes; n++)
10385 int ldreg_p;
10386 bool done_byte = false;
10387 int j;
10388 rtx xop[3];
10390 /* Crop the n-th destination byte. */
10392 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
10393 ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
10395 if (!CONST_INT_P (src)
10396 && !CONST_FIXED_P (src)
10397 && !CONST_DOUBLE_P (src))
10399 static const char* const asm_code[][2] =
10401 { "ldi %2,lo8(%1)" CR_TAB "mov %0,%2", "ldi %0,lo8(%1)" },
10402 { "ldi %2,hi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hi8(%1)" },
10403 { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2", "ldi %0,hlo8(%1)" },
10404 { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hhi8(%1)" }
10407 xop[0] = xdest[n];
10408 xop[1] = src;
10409 xop[2] = clobber_reg;
10411 avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
10413 continue;
10416 /* Crop the n-th source byte. */
10418 xval = simplify_gen_subreg (QImode, src, mode, n);
10419 ival[n] = INTVAL (xval);
10421 /* Look if we can reuse the low word by means of MOVW. */
10423 if (n == 2
10424 && n_bytes >= 4
10425 && AVR_HAVE_MOVW)
10427 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
10428 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
10430 if (INTVAL (lo16) == INTVAL (hi16))
10432 if (0 != INTVAL (lo16)
10433 || !clear_p)
10435 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
10438 break;
10442 /* Don't use CLR so that cc0 is set as expected. */
10444 if (ival[n] == 0)
10446 if (!clear_p)
10447 avr_asm_len (ldreg_p ? "ldi %0,0"
10448 : ZERO_REGNO == REGNO (xdest[n]) ? "clr %0"
10449 : "mov %0,__zero_reg__",
10450 &xdest[n], len, 1);
10451 continue;
10454 if (clobber_val == ival[n]
10455 && REGNO (clobber_reg) == REGNO (xdest[n]))
10457 continue;
10460 /* LD_REGS can use LDI to move a constant value */
10462 if (ldreg_p)
10464 xop[0] = xdest[n];
10465 xop[1] = xval;
10466 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
10467 continue;
10470 /* Try to reuse value already loaded in some lower byte. */
10472 for (j = 0; j < n; j++)
10473 if (ival[j] == ival[n])
10475 xop[0] = xdest[n];
10476 xop[1] = xdest[j];
10478 avr_asm_len ("mov %0,%1", xop, len, 1);
10479 done_byte = true;
10480 break;
10483 if (done_byte)
10484 continue;
10486 /* Need no clobber reg for -1: Use CLR/DEC */
10488 if (-1 == ival[n])
10490 if (!clear_p)
10491 avr_asm_len ("clr %0", &xdest[n], len, 1);
10493 avr_asm_len ("dec %0", &xdest[n], len, 1);
10494 continue;
10496 else if (1 == ival[n])
10498 if (!clear_p)
10499 avr_asm_len ("clr %0", &xdest[n], len, 1);
10501 avr_asm_len ("inc %0", &xdest[n], len, 1);
10502 continue;
10505 /* Use T flag or INC to manage powers of 2 if we have
10506 no clobber reg. */
10508 if (NULL_RTX == clobber_reg
10509 && single_one_operand (xval, QImode))
10511 xop[0] = xdest[n];
10512 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
10514 gcc_assert (constm1_rtx != xop[1]);
10516 if (!set_p)
10518 set_p = true;
10519 avr_asm_len ("set", xop, len, 1);
10522 if (!clear_p)
10523 avr_asm_len ("clr %0", xop, len, 1);
10525 avr_asm_len ("bld %0,%1", xop, len, 1);
10526 continue;
10529 /* We actually need the LD_REGS clobber reg. */
10531 gcc_assert (NULL_RTX != clobber_reg);
10533 xop[0] = xdest[n];
10534 xop[1] = xval;
10535 xop[2] = clobber_reg;
10536 clobber_val = ival[n];
10538 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
10539 "mov %0,%2", xop, len, 2);
10542 /* If we cooked up a clobber reg above, restore it. */
10544 if (cooked_clobber_p)
10546 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
10551 /* Reload the constant OP[1] into the HI register OP[0].
10552 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
10553 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
10554 need a clobber reg or have to cook one up.
10556 PLEN == NULL: Output instructions.
10557 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
10558 by the insns printed.
10560 Return "". */
10562 const char*
10563 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
10565 output_reload_in_const (op, clobber_reg, plen, false);
10566 return "";
10570 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
10571 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
10572 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
10573 need a clobber reg or have to cook one up.
10575 LEN == NULL: Output instructions.
10577 LEN != NULL: Output nothing. Set *LEN to number of words occupied
10578 by the insns printed.
10580 Return "". */
10582 const char *
10583 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
10585 if (AVR_HAVE_MOVW
10586 && !test_hard_reg_class (LD_REGS, op[0])
10587 && (CONST_INT_P (op[1])
10588 || CONST_FIXED_P (op[1])
10589 || CONST_DOUBLE_P (op[1])))
10591 int len_clr, len_noclr;
10593 /* In some cases it is better to clear the destination beforehand, e.g.
10595 CLR R2 CLR R3 MOVW R4,R2 INC R2
10597 is shorther than
10599 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
10601 We find it too tedious to work that out in the print function.
10602 Instead, we call the print function twice to get the lengths of
10603 both methods and use the shortest one. */
10605 output_reload_in_const (op, clobber_reg, &len_clr, true);
10606 output_reload_in_const (op, clobber_reg, &len_noclr, false);
10608 if (len_noclr - len_clr == 4)
10610 /* Default needs 4 CLR instructions: clear register beforehand. */
10612 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
10613 "mov %B0,__zero_reg__" CR_TAB
10614 "movw %C0,%A0", &op[0], len, 3);
10616 output_reload_in_const (op, clobber_reg, len, true);
10618 if (len)
10619 *len += 3;
10621 return "";
10625 /* Default: destination not pre-cleared. */
10627 output_reload_in_const (op, clobber_reg, len, false);
10628 return "";
10631 const char*
10632 avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
10634 output_reload_in_const (op, clobber_reg, len, false);
10635 return "";
10639 /* Worker function for `ASM_OUTPUT_ADDR_VEC_ELT'. */
10641 void
10642 avr_output_addr_vec_elt (FILE *stream, int value)
10644 if (AVR_HAVE_JMP_CALL)
10645 fprintf (stream, "\t.word gs(.L%d)\n", value);
10646 else
10647 fprintf (stream, "\trjmp .L%d\n", value);
10651 /* Implement `TARGET_HARD_REGNO_SCRATCH_OK'. */
10652 /* Returns true if SCRATCH are safe to be allocated as a scratch
10653 registers (for a define_peephole2) in the current function. */
10655 static bool
10656 avr_hard_regno_scratch_ok (unsigned int regno)
10658 /* Interrupt functions can only use registers that have already been saved
10659 by the prologue, even if they would normally be call-clobbered. */
10661 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
10662 && !df_regs_ever_live_p (regno))
10663 return false;
10665 /* Don't allow hard registers that might be part of the frame pointer.
10666 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
10667 and don't care for a frame pointer that spans more than one register. */
10669 if ((!reload_completed || frame_pointer_needed)
10670 && (regno == REG_Y || regno == REG_Y + 1))
10672 return false;
10675 return true;
10679 /* Worker function for `HARD_REGNO_RENAME_OK'. */
10680 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
10683 avr_hard_regno_rename_ok (unsigned int old_reg,
10684 unsigned int new_reg)
10686 /* Interrupt functions can only use registers that have already been
10687 saved by the prologue, even if they would normally be
10688 call-clobbered. */
10690 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
10691 && !df_regs_ever_live_p (new_reg))
10692 return 0;
10694 /* Don't allow hard registers that might be part of the frame pointer.
10695 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
10696 and don't care for a frame pointer that spans more than one register. */
10698 if ((!reload_completed || frame_pointer_needed)
10699 && (old_reg == REG_Y || old_reg == REG_Y + 1
10700 || new_reg == REG_Y || new_reg == REG_Y + 1))
10702 return 0;
10705 return 1;
10708 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
10709 or memory location in the I/O space (QImode only).
10711 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
10712 Operand 1: register operand to test, or CONST_INT memory address.
10713 Operand 2: bit number.
10714 Operand 3: label to jump to if the test is true. */
10716 const char*
10717 avr_out_sbxx_branch (rtx insn, rtx operands[])
10719 enum rtx_code comp = GET_CODE (operands[0]);
10720 bool long_jump = get_attr_length (insn) >= 4;
10721 bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
10723 if (comp == GE)
10724 comp = EQ;
10725 else if (comp == LT)
10726 comp = NE;
10728 if (reverse)
10729 comp = reverse_condition (comp);
10731 switch (GET_CODE (operands[1]))
10733 default:
10734 gcc_unreachable();
10736 case CONST_INT:
10738 if (low_io_address_operand (operands[1], QImode))
10740 if (comp == EQ)
10741 output_asm_insn ("sbis %i1,%2", operands);
10742 else
10743 output_asm_insn ("sbic %i1,%2", operands);
10745 else
10747 output_asm_insn ("in __tmp_reg__,%i1", operands);
10748 if (comp == EQ)
10749 output_asm_insn ("sbrs __tmp_reg__,%2", operands);
10750 else
10751 output_asm_insn ("sbrc __tmp_reg__,%2", operands);
10754 break; /* CONST_INT */
10756 case REG:
10758 if (comp == EQ)
10759 output_asm_insn ("sbrs %T1%T2", operands);
10760 else
10761 output_asm_insn ("sbrc %T1%T2", operands);
10763 break; /* REG */
10764 } /* switch */
10766 if (long_jump)
10767 return ("rjmp .+4" CR_TAB
10768 "jmp %x3");
10770 if (!reverse)
10771 return "rjmp %x3";
10773 return "";
10776 /* Worker function for `TARGET_ASM_CONSTRUCTOR'. */
10778 static void
10779 avr_asm_out_ctor (rtx symbol, int priority)
10781 fputs ("\t.global __do_global_ctors\n", asm_out_file);
10782 default_ctor_section_asm_out_constructor (symbol, priority);
10786 /* Worker function for `TARGET_ASM_DESTRUCTOR'. */
10788 static void
10789 avr_asm_out_dtor (rtx symbol, int priority)
10791 fputs ("\t.global __do_global_dtors\n", asm_out_file);
10792 default_dtor_section_asm_out_destructor (symbol, priority);
10796 /* Worker function for `TARGET_RETURN_IN_MEMORY'. */
10798 static bool
10799 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
10801 if (TYPE_MODE (type) == BLKmode)
10803 HOST_WIDE_INT size = int_size_in_bytes (type);
10804 return (size == -1 || size > 8);
10806 else
10807 return false;
10811 /* Implement `CASE_VALUES_THRESHOLD'. */
10812 /* Supply the default for --param case-values-threshold=0 */
10814 static unsigned int
10815 avr_case_values_threshold (void)
10817 /* The exact break-even point between a jump table and an if-else tree
10818 depends on several factors not available here like, e.g. if 8-bit
10819 comparisons can be used in the if-else tree or not, on the
10820 range of the case values, if the case value can be reused, on the
10821 register allocation, etc. '7' appears to be a good choice. */
10823 return 7;
10827 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
10829 static enum machine_mode
10830 avr_addr_space_address_mode (addr_space_t as)
10832 return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
10836 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
10838 static enum machine_mode
10839 avr_addr_space_pointer_mode (addr_space_t as)
10841 return avr_addr_space_address_mode (as);
10845 /* Helper for following function. */
10847 static bool
10848 avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
10850 gcc_assert (REG_P (reg));
10852 if (strict)
10854 return REGNO (reg) == REG_Z;
10857 /* Avoid combine to propagate hard regs. */
10859 if (can_create_pseudo_p()
10860 && REGNO (reg) < REG_Z)
10862 return false;
10865 return true;
10869 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
10871 static bool
10872 avr_addr_space_legitimate_address_p (enum machine_mode mode, rtx x,
10873 bool strict, addr_space_t as)
10875 bool ok = false;
10877 switch (as)
10879 default:
10880 gcc_unreachable();
10882 case ADDR_SPACE_GENERIC:
10883 return avr_legitimate_address_p (mode, x, strict);
10885 case ADDR_SPACE_FLASH:
10886 case ADDR_SPACE_FLASH1:
10887 case ADDR_SPACE_FLASH2:
10888 case ADDR_SPACE_FLASH3:
10889 case ADDR_SPACE_FLASH4:
10890 case ADDR_SPACE_FLASH5:
10892 switch (GET_CODE (x))
10894 case REG:
10895 ok = avr_reg_ok_for_pgm_addr (x, strict);
10896 break;
10898 case POST_INC:
10899 ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
10900 break;
10902 default:
10903 break;
10906 break; /* FLASH */
10908 case ADDR_SPACE_MEMX:
10909 if (REG_P (x))
10910 ok = (!strict
10911 && can_create_pseudo_p());
10913 if (LO_SUM == GET_CODE (x))
10915 rtx hi = XEXP (x, 0);
10916 rtx lo = XEXP (x, 1);
10918 ok = (REG_P (hi)
10919 && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
10920 && REG_P (lo)
10921 && REGNO (lo) == REG_Z);
10924 break; /* MEMX */
10927 if (avr_log.legitimate_address_p)
10929 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
10930 "reload_completed=%d reload_in_progress=%d %s:",
10931 ok, mode, strict, reload_completed, reload_in_progress,
10932 reg_renumber ? "(reg_renumber)" : "");
10934 if (GET_CODE (x) == PLUS
10935 && REG_P (XEXP (x, 0))
10936 && CONST_INT_P (XEXP (x, 1))
10937 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
10938 && reg_renumber)
10940 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
10941 true_regnum (XEXP (x, 0)));
10944 avr_edump ("\n%r\n", x);
10947 return ok;
10951 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
10953 static rtx
10954 avr_addr_space_legitimize_address (rtx x, rtx old_x,
10955 enum machine_mode mode, addr_space_t as)
10957 if (ADDR_SPACE_GENERIC_P (as))
10958 return avr_legitimize_address (x, old_x, mode);
10960 if (avr_log.legitimize_address)
10962 avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
10965 return old_x;
10969 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
10971 static rtx
10972 avr_addr_space_convert (rtx src, tree type_from, tree type_to)
10974 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
10975 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
10977 if (avr_log.progmem)
10978 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
10979 src, type_from, type_to);
10981 /* Up-casting from 16-bit to 24-bit pointer. */
10983 if (as_from != ADDR_SPACE_MEMX
10984 && as_to == ADDR_SPACE_MEMX)
10986 int msb;
10987 rtx sym = src;
10988 rtx reg = gen_reg_rtx (PSImode);
10990 while (CONST == GET_CODE (sym) || PLUS == GET_CODE (sym))
10991 sym = XEXP (sym, 0);
10993 /* Look at symbol flags: avr_encode_section_info set the flags
10994 also if attribute progmem was seen so that we get the right
10995 promotion for, e.g. PSTR-like strings that reside in generic space
10996 but are located in flash. In that case we patch the incoming
10997 address space. */
10999 if (SYMBOL_REF == GET_CODE (sym)
11000 && ADDR_SPACE_FLASH == AVR_SYMBOL_GET_ADDR_SPACE (sym))
11002 as_from = ADDR_SPACE_FLASH;
11005 /* Linearize memory: RAM has bit 23 set. */
11007 msb = ADDR_SPACE_GENERIC_P (as_from)
11008 ? 0x80
11009 : avr_addrspace[as_from].segment;
11011 src = force_reg (Pmode, src);
11013 emit_insn (msb == 0
11014 ? gen_zero_extendhipsi2 (reg, src)
11015 : gen_n_extendhipsi2 (reg, gen_int_mode (msb, QImode), src));
11017 return reg;
11020 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
11022 if (as_from == ADDR_SPACE_MEMX
11023 && as_to != ADDR_SPACE_MEMX)
11025 rtx new_src = gen_reg_rtx (Pmode);
11027 src = force_reg (PSImode, src);
11029 emit_move_insn (new_src,
11030 simplify_gen_subreg (Pmode, src, PSImode, 0));
11031 return new_src;
11034 return src;
11038 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
11040 static bool
11041 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED,
11042 addr_space_t superset ATTRIBUTE_UNUSED)
11044 /* Allow any kind of pointer mess. */
11046 return true;
11050 /* Implement `TARGET_CONVERT_TO_TYPE'. */
11052 static tree
11053 avr_convert_to_type (tree type, tree expr)
11055 /* Print a diagnose for pointer conversion that changes the address
11056 space of the pointer target to a non-enclosing address space,
11057 provided -Waddr-space-convert is on.
11059 FIXME: Filter out cases where the target object is known to
11060 be located in the right memory, like in
11062 (const __flash*) PSTR ("text")
11064 Also try to distinguish between explicit casts requested by
11065 the user and implicit casts like
11067 void f (const __flash char*);
11069 void g (const char *p)
11071 f ((const __flash*) p);
11074 under the assumption that an explicit casts means that the user
11075 knows what he is doing, e.g. interface with PSTR or old style
11076 code with progmem and pgm_read_xxx.
11079 if (avr_warn_addr_space_convert
11080 && expr != error_mark_node
11081 && POINTER_TYPE_P (type)
11082 && POINTER_TYPE_P (TREE_TYPE (expr)))
11084 addr_space_t as_old = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr)));
11085 addr_space_t as_new = TYPE_ADDR_SPACE (TREE_TYPE (type));
11087 if (avr_log.progmem)
11088 avr_edump ("%?: type = %t\nexpr = %t\n\n", type, expr);
11090 if (as_new != ADDR_SPACE_MEMX
11091 && as_new != as_old)
11093 location_t loc = EXPR_LOCATION (expr);
11094 const char *name_old = avr_addrspace[as_old].name;
11095 const char *name_new = avr_addrspace[as_new].name;
11097 warning (OPT_Waddr_space_convert,
11098 "conversion from address space %qs to address space %qs",
11099 ADDR_SPACE_GENERIC_P (as_old) ? "generic" : name_old,
11100 ADDR_SPACE_GENERIC_P (as_new) ? "generic" : name_new);
11102 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, expr);
11106 return NULL_TREE;
11110 /* Worker function for movmemhi expander.
11111 XOP[0] Destination as MEM:BLK
11112 XOP[1] Source " "
11113 XOP[2] # Bytes to copy
11115 Return TRUE if the expansion is accomplished.
11116 Return FALSE if the operand compination is not supported. */
11118 bool
11119 avr_emit_movmemhi (rtx *xop)
11121 HOST_WIDE_INT count;
11122 enum machine_mode loop_mode;
11123 addr_space_t as = MEM_ADDR_SPACE (xop[1]);
11124 rtx loop_reg, addr1, a_src, a_dest, insn, xas;
11125 rtx a_hi8 = NULL_RTX;
11127 if (avr_mem_flash_p (xop[0]))
11128 return false;
11130 if (!CONST_INT_P (xop[2]))
11131 return false;
11133 count = INTVAL (xop[2]);
11134 if (count <= 0)
11135 return false;
11137 a_src = XEXP (xop[1], 0);
11138 a_dest = XEXP (xop[0], 0);
11140 if (PSImode == GET_MODE (a_src))
11142 gcc_assert (as == ADDR_SPACE_MEMX);
11144 loop_mode = (count < 0x100) ? QImode : HImode;
11145 loop_reg = gen_rtx_REG (loop_mode, 24);
11146 emit_move_insn (loop_reg, gen_int_mode (count, loop_mode));
11148 addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
11149 a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
11151 else
11153 int segment = avr_addrspace[as].segment;
11155 if (segment
11156 && avr_current_device->n_flash > 1)
11158 a_hi8 = GEN_INT (segment);
11159 emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
11161 else if (!ADDR_SPACE_GENERIC_P (as))
11163 as = ADDR_SPACE_FLASH;
11166 addr1 = a_src;
11168 loop_mode = (count <= 0x100) ? QImode : HImode;
11169 loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
11172 xas = GEN_INT (as);
11174 /* FIXME: Register allocator might come up with spill fails if it is left
11175 on its own. Thus, we allocate the pointer registers by hand:
11176 Z = source address
11177 X = destination address */
11179 emit_move_insn (lpm_addr_reg_rtx, addr1);
11180 emit_move_insn (gen_rtx_REG (HImode, REG_X), a_dest);
11182 /* FIXME: Register allocator does a bad job and might spill address
11183 register(s) inside the loop leading to additional move instruction
11184 to/from stack which could clobber tmp_reg. Thus, do *not* emit
11185 load and store as separate insns. Instead, we perform the copy
11186 by means of one monolithic insn. */
11188 gcc_assert (TMP_REGNO == LPM_REGNO);
11190 if (as != ADDR_SPACE_MEMX)
11192 /* Load instruction ([E]LPM or LD) is known at compile time:
11193 Do the copy-loop inline. */
11195 rtx (*fun) (rtx, rtx, rtx)
11196 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
11198 insn = fun (xas, loop_reg, loop_reg);
11200 else
11202 rtx (*fun) (rtx, rtx)
11203 = QImode == loop_mode ? gen_movmemx_qi : gen_movmemx_hi;
11205 emit_move_insn (gen_rtx_REG (QImode, 23), a_hi8);
11207 insn = fun (xas, GEN_INT (avr_addr.rampz));
11210 set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
11211 emit_insn (insn);
11213 return true;
11217 /* Print assembler for movmem_qi, movmem_hi insns...
11218 $0 : Address Space
11219 $1, $2 : Loop register
11220 Z : Source address
11221 X : Destination address
11224 const char*
11225 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
11227 addr_space_t as = (addr_space_t) INTVAL (op[0]);
11228 enum machine_mode loop_mode = GET_MODE (op[1]);
11229 bool sbiw_p = test_hard_reg_class (ADDW_REGS, op[1]);
11230 rtx xop[3];
11232 if (plen)
11233 *plen = 0;
11235 xop[0] = op[0];
11236 xop[1] = op[1];
11237 xop[2] = tmp_reg_rtx;
11239 /* Loop label */
11241 avr_asm_len ("0:", xop, plen, 0);
11243 /* Load with post-increment */
11245 switch (as)
11247 default:
11248 gcc_unreachable();
11250 case ADDR_SPACE_GENERIC:
11252 avr_asm_len ("ld %2,Z+", xop, plen, 1);
11253 break;
11255 case ADDR_SPACE_FLASH:
11257 if (AVR_HAVE_LPMX)
11258 avr_asm_len ("lpm %2,Z+", xop, plen, 1);
11259 else
11260 avr_asm_len ("lpm" CR_TAB
11261 "adiw r30,1", xop, plen, 2);
11262 break;
11264 case ADDR_SPACE_FLASH1:
11265 case ADDR_SPACE_FLASH2:
11266 case ADDR_SPACE_FLASH3:
11267 case ADDR_SPACE_FLASH4:
11268 case ADDR_SPACE_FLASH5:
11270 if (AVR_HAVE_ELPMX)
11271 avr_asm_len ("elpm %2,Z+", xop, plen, 1);
11272 else
11273 avr_asm_len ("elpm" CR_TAB
11274 "adiw r30,1", xop, plen, 2);
11275 break;
11278 /* Store with post-increment */
11280 avr_asm_len ("st X+,%2", xop, plen, 1);
11282 /* Decrement loop-counter and set Z-flag */
11284 if (QImode == loop_mode)
11286 avr_asm_len ("dec %1", xop, plen, 1);
11288 else if (sbiw_p)
11290 avr_asm_len ("sbiw %1,1", xop, plen, 1);
11292 else
11294 avr_asm_len ("subi %A1,1" CR_TAB
11295 "sbci %B1,0", xop, plen, 2);
11298 /* Loop until zero */
11300 return avr_asm_len ("brne 0b", xop, plen, 1);
11305 /* Helper for __builtin_avr_delay_cycles */
11307 static rtx
11308 avr_mem_clobber (void)
11310 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
11311 MEM_VOLATILE_P (mem) = 1;
11312 return mem;
11315 static void
11316 avr_expand_delay_cycles (rtx operands0)
11318 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0) & GET_MODE_MASK (SImode);
11319 unsigned HOST_WIDE_INT cycles_used;
11320 unsigned HOST_WIDE_INT loop_count;
11322 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
11324 loop_count = ((cycles - 9) / 6) + 1;
11325 cycles_used = ((loop_count - 1) * 6) + 9;
11326 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode),
11327 avr_mem_clobber()));
11328 cycles -= cycles_used;
11331 if (IN_RANGE (cycles, 262145, 83886081))
11333 loop_count = ((cycles - 7) / 5) + 1;
11334 if (loop_count > 0xFFFFFF)
11335 loop_count = 0xFFFFFF;
11336 cycles_used = ((loop_count - 1) * 5) + 7;
11337 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode),
11338 avr_mem_clobber()));
11339 cycles -= cycles_used;
11342 if (IN_RANGE (cycles, 768, 262144))
11344 loop_count = ((cycles - 5) / 4) + 1;
11345 if (loop_count > 0xFFFF)
11346 loop_count = 0xFFFF;
11347 cycles_used = ((loop_count - 1) * 4) + 5;
11348 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode),
11349 avr_mem_clobber()));
11350 cycles -= cycles_used;
11353 if (IN_RANGE (cycles, 6, 767))
11355 loop_count = cycles / 3;
11356 if (loop_count > 255)
11357 loop_count = 255;
11358 cycles_used = loop_count * 3;
11359 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode),
11360 avr_mem_clobber()));
11361 cycles -= cycles_used;
11364 while (cycles >= 2)
11366 emit_insn (gen_nopv (GEN_INT(2)));
11367 cycles -= 2;
11370 if (cycles == 1)
11372 emit_insn (gen_nopv (GEN_INT(1)));
11373 cycles--;
11378 /* Compute the image of x under f, i.e. perform x --> f(x) */
11380 static int
11381 avr_map (unsigned int f, int x)
11383 return x < 8 ? (f >> (4 * x)) & 0xf : 0;
11387 /* Return some metrics of map A. */
11389 enum
11391 /* Number of fixed points in { 0 ... 7 } */
11392 MAP_FIXED_0_7,
11394 /* Size of preimage of non-fixed points in { 0 ... 7 } */
11395 MAP_NONFIXED_0_7,
11397 /* Mask representing the fixed points in { 0 ... 7 } */
11398 MAP_MASK_FIXED_0_7,
11400 /* Size of the preimage of { 0 ... 7 } */
11401 MAP_PREIMAGE_0_7,
11403 /* Mask that represents the preimage of { f } */
11404 MAP_MASK_PREIMAGE_F
11407 static unsigned
11408 avr_map_metric (unsigned int a, int mode)
11410 unsigned i, metric = 0;
11412 for (i = 0; i < 8; i++)
11414 unsigned ai = avr_map (a, i);
11416 if (mode == MAP_FIXED_0_7)
11417 metric += ai == i;
11418 else if (mode == MAP_NONFIXED_0_7)
11419 metric += ai < 8 && ai != i;
11420 else if (mode == MAP_MASK_FIXED_0_7)
11421 metric |= ((unsigned) (ai == i)) << i;
11422 else if (mode == MAP_PREIMAGE_0_7)
11423 metric += ai < 8;
11424 else if (mode == MAP_MASK_PREIMAGE_F)
11425 metric |= ((unsigned) (ai == 0xf)) << i;
11426 else
11427 gcc_unreachable();
11430 return metric;
11434 /* Return true if IVAL has a 0xf in its hexadecimal representation
11435 and false, otherwise. Only nibbles 0..7 are taken into account.
11436 Used as constraint helper for C0f and Cxf. */
11438 bool
11439 avr_has_nibble_0xf (rtx ival)
11441 unsigned int map = UINTVAL (ival) & GET_MODE_MASK (SImode);
11442 return 0 != avr_map_metric (map, MAP_MASK_PREIMAGE_F);
11446 /* We have a set of bits that are mapped by a function F.
11447 Try to decompose F by means of a second function G so that
11449 F = F o G^-1 o G
11453 cost (F o G^-1) + cost (G) < cost (F)
11455 Example: Suppose builtin insert_bits supplies us with the map
11456 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
11457 nibble of the result, we can just as well rotate the bits before inserting
11458 them and use the map 0x7654ffff which is cheaper than the original map.
11459 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
11461 typedef struct
11463 /* tree code of binary function G */
11464 enum tree_code code;
11466 /* The constant second argument of G */
11467 int arg;
11469 /* G^-1, the inverse of G (*, arg) */
11470 unsigned ginv;
11472 /* The cost of appplying G (*, arg) */
11473 int cost;
11475 /* The composition F o G^-1 (*, arg) for some function F */
11476 unsigned int map;
11478 /* For debug purpose only */
11479 const char *str;
11480 } avr_map_op_t;
11482 static const avr_map_op_t avr_map_op[] =
11484 { LROTATE_EXPR, 0, 0x76543210, 0, 0, "id" },
11485 { LROTATE_EXPR, 1, 0x07654321, 2, 0, "<<<" },
11486 { LROTATE_EXPR, 2, 0x10765432, 4, 0, "<<<" },
11487 { LROTATE_EXPR, 3, 0x21076543, 4, 0, "<<<" },
11488 { LROTATE_EXPR, 4, 0x32107654, 1, 0, "<<<" },
11489 { LROTATE_EXPR, 5, 0x43210765, 3, 0, "<<<" },
11490 { LROTATE_EXPR, 6, 0x54321076, 5, 0, "<<<" },
11491 { LROTATE_EXPR, 7, 0x65432107, 3, 0, "<<<" },
11492 { RSHIFT_EXPR, 1, 0x6543210c, 1, 0, ">>" },
11493 { RSHIFT_EXPR, 1, 0x7543210c, 1, 0, ">>" },
11494 { RSHIFT_EXPR, 2, 0x543210cc, 2, 0, ">>" },
11495 { RSHIFT_EXPR, 2, 0x643210cc, 2, 0, ">>" },
11496 { RSHIFT_EXPR, 2, 0x743210cc, 2, 0, ">>" },
11497 { LSHIFT_EXPR, 1, 0xc7654321, 1, 0, "<<" },
11498 { LSHIFT_EXPR, 2, 0xcc765432, 2, 0, "<<" }
11502 /* Try to decompose F as F = (F o G^-1) o G as described above.
11503 The result is a struct representing F o G^-1 and G.
11504 If result.cost < 0 then such a decomposition does not exist. */
11506 static avr_map_op_t
11507 avr_map_decompose (unsigned int f, const avr_map_op_t *g, bool val_const_p)
11509 int i;
11510 bool val_used_p = 0 != avr_map_metric (f, MAP_MASK_PREIMAGE_F);
11511 avr_map_op_t f_ginv = *g;
11512 unsigned int ginv = g->ginv;
11514 f_ginv.cost = -1;
11516 /* Step 1: Computing F o G^-1 */
11518 for (i = 7; i >= 0; i--)
11520 int x = avr_map (f, i);
11522 if (x <= 7)
11524 x = avr_map (ginv, x);
11526 /* The bit is no element of the image of G: no avail (cost = -1) */
11528 if (x > 7)
11529 return f_ginv;
11532 f_ginv.map = (f_ginv.map << 4) + x;
11535 /* Step 2: Compute the cost of the operations.
11536 The overall cost of doing an operation prior to the insertion is
11537 the cost of the insertion plus the cost of the operation. */
11539 /* Step 2a: Compute cost of F o G^-1 */
11541 if (0 == avr_map_metric (f_ginv.map, MAP_NONFIXED_0_7))
11543 /* The mapping consists only of fixed points and can be folded
11544 to AND/OR logic in the remainder. Reasonable cost is 3. */
11546 f_ginv.cost = 2 + (val_used_p && !val_const_p);
11548 else
11550 rtx xop[4];
11552 /* Get the cost of the insn by calling the output worker with some
11553 fake values. Mimic effect of reloading xop[3]: Unused operands
11554 are mapped to 0 and used operands are reloaded to xop[0]. */
11556 xop[0] = all_regs_rtx[24];
11557 xop[1] = gen_int_mode (f_ginv.map, SImode);
11558 xop[2] = all_regs_rtx[25];
11559 xop[3] = val_used_p ? xop[0] : const0_rtx;
11561 avr_out_insert_bits (xop, &f_ginv.cost);
11563 f_ginv.cost += val_const_p && val_used_p ? 1 : 0;
11566 /* Step 2b: Add cost of G */
11568 f_ginv.cost += g->cost;
11570 if (avr_log.builtin)
11571 avr_edump (" %s%d=%d", g->str, g->arg, f_ginv.cost);
11573 return f_ginv;
11577 /* Insert bits from XOP[1] into XOP[0] according to MAP.
11578 XOP[0] and XOP[1] don't overlap.
11579 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
11580 If FIXP_P = false: Just move the bit if its position in the destination
11581 is different to its source position. */
11583 static void
11584 avr_move_bits (rtx *xop, unsigned int map, bool fixp_p, int *plen)
11586 int bit_dest, b;
11588 /* T-flag contains this bit of the source, i.e. of XOP[1] */
11589 int t_bit_src = -1;
11591 /* We order the operations according to the requested source bit b. */
11593 for (b = 0; b < 8; b++)
11594 for (bit_dest = 0; bit_dest < 8; bit_dest++)
11596 int bit_src = avr_map (map, bit_dest);
11598 if (b != bit_src
11599 || bit_src >= 8
11600 /* Same position: No need to copy as requested by FIXP_P. */
11601 || (bit_dest == bit_src && !fixp_p))
11602 continue;
11604 if (t_bit_src != bit_src)
11606 /* Source bit is not yet in T: Store it to T. */
11608 t_bit_src = bit_src;
11610 xop[3] = GEN_INT (bit_src);
11611 avr_asm_len ("bst %T1%T3", xop, plen, 1);
11614 /* Load destination bit with T. */
11616 xop[3] = GEN_INT (bit_dest);
11617 avr_asm_len ("bld %T0%T3", xop, plen, 1);
11622 /* PLEN == 0: Print assembler code for `insert_bits'.
11623 PLEN != 0: Compute code length in bytes.
11625 OP[0]: Result
11626 OP[1]: The mapping composed of nibbles. If nibble no. N is
11627 0: Bit N of result is copied from bit OP[2].0
11628 ... ...
11629 7: Bit N of result is copied from bit OP[2].7
11630 0xf: Bit N of result is copied from bit OP[3].N
11631 OP[2]: Bits to be inserted
11632 OP[3]: Target value */
11634 const char*
11635 avr_out_insert_bits (rtx *op, int *plen)
11637 unsigned int map = UINTVAL (op[1]) & GET_MODE_MASK (SImode);
11638 unsigned mask_fixed;
11639 bool fixp_p = true;
11640 rtx xop[4];
11642 xop[0] = op[0];
11643 xop[1] = op[2];
11644 xop[2] = op[3];
11646 gcc_assert (REG_P (xop[2]) || CONST_INT_P (xop[2]));
11648 if (plen)
11649 *plen = 0;
11650 else if (flag_print_asm_name)
11651 fprintf (asm_out_file, ASM_COMMENT_START "map = 0x%08x\n", map);
11653 /* If MAP has fixed points it might be better to initialize the result
11654 with the bits to be inserted instead of moving all bits by hand. */
11656 mask_fixed = avr_map_metric (map, MAP_MASK_FIXED_0_7);
11658 if (REGNO (xop[0]) == REGNO (xop[1]))
11660 /* Avoid early-clobber conflicts */
11662 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
11663 xop[1] = tmp_reg_rtx;
11664 fixp_p = false;
11667 if (avr_map_metric (map, MAP_MASK_PREIMAGE_F))
11669 /* XOP[2] is used and reloaded to XOP[0] already */
11671 int n_fix = 0, n_nofix = 0;
11673 gcc_assert (REG_P (xop[2]));
11675 /* Get the code size of the bit insertions; once with all bits
11676 moved and once with fixed points omitted. */
11678 avr_move_bits (xop, map, true, &n_fix);
11679 avr_move_bits (xop, map, false, &n_nofix);
11681 if (fixp_p && n_fix - n_nofix > 3)
11683 xop[3] = gen_int_mode (~mask_fixed, QImode);
11685 avr_asm_len ("eor %0,%1" CR_TAB
11686 "andi %0,%3" CR_TAB
11687 "eor %0,%1", xop, plen, 3);
11688 fixp_p = false;
11691 else
11693 /* XOP[2] is unused */
11695 if (fixp_p && mask_fixed)
11697 avr_asm_len ("mov %0,%1", xop, plen, 1);
11698 fixp_p = false;
11702 /* Move/insert remaining bits. */
11704 avr_move_bits (xop, map, fixp_p, plen);
11706 return "";
11710 /* IDs for all the AVR builtins. */
11712 enum avr_builtin_id
11714 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
11715 AVR_BUILTIN_ ## NAME,
11716 #include "builtins.def"
11717 #undef DEF_BUILTIN
11719 AVR_BUILTIN_COUNT
11722 struct GTY(()) avr_builtin_description
11724 enum insn_code icode;
11725 int n_args;
11726 tree fndecl;
11730 /* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
11731 that a built-in's ID can be used to access the built-in by means of
11732 avr_bdesc[ID] */
11734 static GTY(()) struct avr_builtin_description
11735 avr_bdesc[AVR_BUILTIN_COUNT] =
11737 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, LIBNAME) \
11738 { (enum insn_code) CODE_FOR_ ## ICODE, N_ARGS, NULL_TREE },
11739 #include "builtins.def"
11740 #undef DEF_BUILTIN
11744 /* Implement `TARGET_BUILTIN_DECL'. */
11746 static tree
11747 avr_builtin_decl (unsigned id, bool initialize_p ATTRIBUTE_UNUSED)
11749 if (id < AVR_BUILTIN_COUNT)
11750 return avr_bdesc[id].fndecl;
11752 return error_mark_node;
11756 static void
11757 avr_init_builtin_int24 (void)
11759 tree int24_type = make_signed_type (GET_MODE_BITSIZE (PSImode));
11760 tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
11762 lang_hooks.types.register_builtin_type (int24_type, "__int24");
11763 lang_hooks.types.register_builtin_type (uint24_type, "__uint24");
11767 /* Implement `TARGET_INIT_BUILTINS' */
11768 /* Set up all builtin functions for this target. */
11770 static void
11771 avr_init_builtins (void)
11773 tree void_ftype_void
11774 = build_function_type_list (void_type_node, NULL_TREE);
11775 tree uchar_ftype_uchar
11776 = build_function_type_list (unsigned_char_type_node,
11777 unsigned_char_type_node,
11778 NULL_TREE);
11779 tree uint_ftype_uchar_uchar
11780 = build_function_type_list (unsigned_type_node,
11781 unsigned_char_type_node,
11782 unsigned_char_type_node,
11783 NULL_TREE);
11784 tree int_ftype_char_char
11785 = build_function_type_list (integer_type_node,
11786 char_type_node,
11787 char_type_node,
11788 NULL_TREE);
11789 tree int_ftype_char_uchar
11790 = build_function_type_list (integer_type_node,
11791 char_type_node,
11792 unsigned_char_type_node,
11793 NULL_TREE);
11794 tree void_ftype_ulong
11795 = build_function_type_list (void_type_node,
11796 long_unsigned_type_node,
11797 NULL_TREE);
11799 tree uchar_ftype_ulong_uchar_uchar
11800 = build_function_type_list (unsigned_char_type_node,
11801 long_unsigned_type_node,
11802 unsigned_char_type_node,
11803 unsigned_char_type_node,
11804 NULL_TREE);
11806 tree const_memx_void_node
11807 = build_qualified_type (void_type_node,
11808 TYPE_QUAL_CONST
11809 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX));
11811 tree const_memx_ptr_type_node
11812 = build_pointer_type_for_mode (const_memx_void_node, PSImode, false);
11814 tree char_ftype_const_memx_ptr
11815 = build_function_type_list (char_type_node,
11816 const_memx_ptr_type_node,
11817 NULL);
11819 #define ITYP(T) \
11820 lang_hooks.types.type_for_size (TYPE_PRECISION (T), TYPE_UNSIGNED (T))
11822 #define FX_FTYPE_FX(fx) \
11823 tree fx##r_ftype_##fx##r \
11824 = build_function_type_list (node_##fx##r, node_##fx##r, NULL); \
11825 tree fx##k_ftype_##fx##k \
11826 = build_function_type_list (node_##fx##k, node_##fx##k, NULL)
11828 #define FX_FTYPE_FX_INT(fx) \
11829 tree fx##r_ftype_##fx##r_int \
11830 = build_function_type_list (node_##fx##r, node_##fx##r, \
11831 integer_type_node, NULL); \
11832 tree fx##k_ftype_##fx##k_int \
11833 = build_function_type_list (node_##fx##k, node_##fx##k, \
11834 integer_type_node, NULL)
11836 #define INT_FTYPE_FX(fx) \
11837 tree int_ftype_##fx##r \
11838 = build_function_type_list (integer_type_node, node_##fx##r, NULL); \
11839 tree int_ftype_##fx##k \
11840 = build_function_type_list (integer_type_node, node_##fx##k, NULL)
11842 #define INTX_FTYPE_FX(fx) \
11843 tree int##fx##r_ftype_##fx##r \
11844 = build_function_type_list (ITYP (node_##fx##r), node_##fx##r, NULL); \
11845 tree int##fx##k_ftype_##fx##k \
11846 = build_function_type_list (ITYP (node_##fx##k), node_##fx##k, NULL)
11848 #define FX_FTYPE_INTX(fx) \
11849 tree fx##r_ftype_int##fx##r \
11850 = build_function_type_list (node_##fx##r, ITYP (node_##fx##r), NULL); \
11851 tree fx##k_ftype_int##fx##k \
11852 = build_function_type_list (node_##fx##k, ITYP (node_##fx##k), NULL)
11854 tree node_hr = short_fract_type_node;
11855 tree node_nr = fract_type_node;
11856 tree node_lr = long_fract_type_node;
11857 tree node_llr = long_long_fract_type_node;
11859 tree node_uhr = unsigned_short_fract_type_node;
11860 tree node_unr = unsigned_fract_type_node;
11861 tree node_ulr = unsigned_long_fract_type_node;
11862 tree node_ullr = unsigned_long_long_fract_type_node;
11864 tree node_hk = short_accum_type_node;
11865 tree node_nk = accum_type_node;
11866 tree node_lk = long_accum_type_node;
11867 tree node_llk = long_long_accum_type_node;
11869 tree node_uhk = unsigned_short_accum_type_node;
11870 tree node_unk = unsigned_accum_type_node;
11871 tree node_ulk = unsigned_long_accum_type_node;
11872 tree node_ullk = unsigned_long_long_accum_type_node;
11875 /* For absfx builtins. */
11877 FX_FTYPE_FX (h);
11878 FX_FTYPE_FX (n);
11879 FX_FTYPE_FX (l);
11880 FX_FTYPE_FX (ll);
11882 /* For roundfx builtins. */
11884 FX_FTYPE_FX_INT (h);
11885 FX_FTYPE_FX_INT (n);
11886 FX_FTYPE_FX_INT (l);
11887 FX_FTYPE_FX_INT (ll);
11889 FX_FTYPE_FX_INT (uh);
11890 FX_FTYPE_FX_INT (un);
11891 FX_FTYPE_FX_INT (ul);
11892 FX_FTYPE_FX_INT (ull);
11894 /* For countlsfx builtins. */
11896 INT_FTYPE_FX (h);
11897 INT_FTYPE_FX (n);
11898 INT_FTYPE_FX (l);
11899 INT_FTYPE_FX (ll);
11901 INT_FTYPE_FX (uh);
11902 INT_FTYPE_FX (un);
11903 INT_FTYPE_FX (ul);
11904 INT_FTYPE_FX (ull);
11906 /* For bitsfx builtins. */
11908 INTX_FTYPE_FX (h);
11909 INTX_FTYPE_FX (n);
11910 INTX_FTYPE_FX (l);
11911 INTX_FTYPE_FX (ll);
11913 INTX_FTYPE_FX (uh);
11914 INTX_FTYPE_FX (un);
11915 INTX_FTYPE_FX (ul);
11916 INTX_FTYPE_FX (ull);
11918 /* For fxbits builtins. */
11920 FX_FTYPE_INTX (h);
11921 FX_FTYPE_INTX (n);
11922 FX_FTYPE_INTX (l);
11923 FX_FTYPE_INTX (ll);
11925 FX_FTYPE_INTX (uh);
11926 FX_FTYPE_INTX (un);
11927 FX_FTYPE_INTX (ul);
11928 FX_FTYPE_INTX (ull);
11931 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
11933 int id = AVR_BUILTIN_ ## NAME; \
11934 const char *Name = "__builtin_avr_" #NAME; \
11935 char *name = (char*) alloca (1 + strlen (Name)); \
11937 gcc_assert (id < AVR_BUILTIN_COUNT); \
11938 avr_bdesc[id].fndecl \
11939 = add_builtin_function (avr_tolower (name, Name), TYPE, id, \
11940 BUILT_IN_MD, LIBNAME, NULL_TREE); \
11942 #include "builtins.def"
11943 #undef DEF_BUILTIN
11945 avr_init_builtin_int24 ();
11949 /* Subroutine of avr_expand_builtin to expand vanilla builtins
11950 with non-void result and 1 ... 3 arguments. */
11952 static rtx
11953 avr_default_expand_builtin (enum insn_code icode, tree exp, rtx target)
11955 rtx pat, xop[3];
11956 int n, n_args = call_expr_nargs (exp);
11957 enum machine_mode tmode = insn_data[icode].operand[0].mode;
11959 gcc_assert (n_args >= 1 && n_args <= 3);
11961 if (target == NULL_RTX
11962 || GET_MODE (target) != tmode
11963 || !insn_data[icode].operand[0].predicate (target, tmode))
11965 target = gen_reg_rtx (tmode);
11968 for (n = 0; n < n_args; n++)
11970 tree arg = CALL_EXPR_ARG (exp, n);
11971 rtx op = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
11972 enum machine_mode opmode = GET_MODE (op);
11973 enum machine_mode mode = insn_data[icode].operand[n+1].mode;
11975 if ((opmode == SImode || opmode == VOIDmode) && mode == HImode)
11977 opmode = HImode;
11978 op = gen_lowpart (HImode, op);
11981 /* In case the insn wants input operands in modes different from
11982 the result, abort. */
11984 gcc_assert (opmode == mode || opmode == VOIDmode);
11986 if (!insn_data[icode].operand[n+1].predicate (op, mode))
11987 op = copy_to_mode_reg (mode, op);
11989 xop[n] = op;
11992 switch (n_args)
11994 case 1: pat = GEN_FCN (icode) (target, xop[0]); break;
11995 case 2: pat = GEN_FCN (icode) (target, xop[0], xop[1]); break;
11996 case 3: pat = GEN_FCN (icode) (target, xop[0], xop[1], xop[2]); break;
11998 default:
11999 gcc_unreachable();
12002 if (pat == NULL_RTX)
12003 return NULL_RTX;
12005 emit_insn (pat);
12007 return target;
12011 /* Implement `TARGET_EXPAND_BUILTIN'. */
12012 /* Expand an expression EXP that calls a built-in function,
12013 with result going to TARGET if that's convenient
12014 (and in mode MODE if that's convenient).
12015 SUBTARGET may be used as the target for computing one of EXP's operands.
12016 IGNORE is nonzero if the value is to be ignored. */
12018 static rtx
12019 avr_expand_builtin (tree exp, rtx target,
12020 rtx subtarget ATTRIBUTE_UNUSED,
12021 enum machine_mode mode ATTRIBUTE_UNUSED,
12022 int ignore)
12024 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
12025 const char *bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
12026 unsigned int id = DECL_FUNCTION_CODE (fndecl);
12027 const struct avr_builtin_description *d = &avr_bdesc[id];
12028 tree arg0;
12029 rtx op0;
12031 gcc_assert (id < AVR_BUILTIN_COUNT);
12033 switch (id)
12035 case AVR_BUILTIN_NOP:
12036 emit_insn (gen_nopv (GEN_INT(1)));
12037 return 0;
12039 case AVR_BUILTIN_DELAY_CYCLES:
12041 arg0 = CALL_EXPR_ARG (exp, 0);
12042 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
12044 if (!CONST_INT_P (op0))
12045 error ("%s expects a compile time integer constant", bname);
12046 else
12047 avr_expand_delay_cycles (op0);
12049 return NULL_RTX;
12052 case AVR_BUILTIN_INSERT_BITS:
12054 arg0 = CALL_EXPR_ARG (exp, 0);
12055 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
12057 if (!CONST_INT_P (op0))
12059 error ("%s expects a compile time long integer constant"
12060 " as first argument", bname);
12061 return target;
12064 break;
12067 case AVR_BUILTIN_ROUNDHR: case AVR_BUILTIN_ROUNDUHR:
12068 case AVR_BUILTIN_ROUNDR: case AVR_BUILTIN_ROUNDUR:
12069 case AVR_BUILTIN_ROUNDLR: case AVR_BUILTIN_ROUNDULR:
12070 case AVR_BUILTIN_ROUNDLLR: case AVR_BUILTIN_ROUNDULLR:
12072 case AVR_BUILTIN_ROUNDHK: case AVR_BUILTIN_ROUNDUHK:
12073 case AVR_BUILTIN_ROUNDK: case AVR_BUILTIN_ROUNDUK:
12074 case AVR_BUILTIN_ROUNDLK: case AVR_BUILTIN_ROUNDULK:
12075 case AVR_BUILTIN_ROUNDLLK: case AVR_BUILTIN_ROUNDULLK:
12077 /* Warn about odd rounding. Rounding points >= FBIT will have
12078 no effect. */
12080 if (TREE_CODE (CALL_EXPR_ARG (exp, 1)) != INTEGER_CST)
12081 break;
12083 int rbit = (int) TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1));
12085 if (rbit >= (int) GET_MODE_FBIT (mode))
12087 warning (OPT_Wextra, "rounding to %d bits has no effect for "
12088 "fixed-point value with %d fractional bits",
12089 rbit, GET_MODE_FBIT (mode));
12091 return expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX, mode,
12092 EXPAND_NORMAL);
12094 else if (rbit <= - (int) GET_MODE_IBIT (mode))
12096 warning (0, "rounding result will always be 0");
12097 return CONST0_RTX (mode);
12100 /* The rounding points RP satisfies now: -IBIT < RP < FBIT.
12102 TR 18037 only specifies results for RP > 0. However, the
12103 remaining cases of -IBIT < RP <= 0 can easily be supported
12104 without any additional overhead. */
12106 break; /* round */
12109 /* No fold found and no insn: Call support function from libgcc. */
12111 if (d->icode == CODE_FOR_nothing
12112 && DECL_ASSEMBLER_NAME (get_callee_fndecl (exp)) != NULL_TREE)
12114 return expand_call (exp, target, ignore);
12117 /* No special treatment needed: vanilla expand. */
12119 gcc_assert (d->icode != CODE_FOR_nothing);
12120 gcc_assert (d->n_args == call_expr_nargs (exp));
12122 if (d->n_args == 0)
12124 emit_insn ((GEN_FCN (d->icode)) (target));
12125 return NULL_RTX;
12128 return avr_default_expand_builtin (d->icode, exp, target);
12132 /* Helper for `avr_fold_builtin' that folds absfx (FIXED_CST). */
12134 static tree
12135 avr_fold_absfx (tree tval)
12137 if (FIXED_CST != TREE_CODE (tval))
12138 return NULL_TREE;
12140 /* Our fixed-points have no padding: Use double_int payload directly. */
12142 FIXED_VALUE_TYPE fval = TREE_FIXED_CST (tval);
12143 unsigned int bits = GET_MODE_BITSIZE (fval.mode);
12144 double_int ival = fval.data.sext (bits);
12146 if (!ival.is_negative())
12147 return tval;
12149 /* ISO/IEC TR 18037, 7.18a.6.2: The absfx functions are saturating. */
12151 fval.data = (ival == double_int::min_value (bits, false).sext (bits))
12152 ? double_int::max_value (bits, false)
12153 : -ival;
12155 return build_fixed (TREE_TYPE (tval), fval);
12159 /* Implement `TARGET_FOLD_BUILTIN'. */
12161 static tree
12162 avr_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *arg,
12163 bool ignore ATTRIBUTE_UNUSED)
12165 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
12166 tree val_type = TREE_TYPE (TREE_TYPE (fndecl));
12168 if (!optimize)
12169 return NULL_TREE;
12171 switch (fcode)
12173 default:
12174 break;
12176 case AVR_BUILTIN_SWAP:
12178 return fold_build2 (LROTATE_EXPR, val_type, arg[0],
12179 build_int_cst (val_type, 4));
12182 case AVR_BUILTIN_ABSHR:
12183 case AVR_BUILTIN_ABSR:
12184 case AVR_BUILTIN_ABSLR:
12185 case AVR_BUILTIN_ABSLLR:
12187 case AVR_BUILTIN_ABSHK:
12188 case AVR_BUILTIN_ABSK:
12189 case AVR_BUILTIN_ABSLK:
12190 case AVR_BUILTIN_ABSLLK:
12191 /* GCC is not good with folding ABS for fixed-point. Do it by hand. */
12193 return avr_fold_absfx (arg[0]);
12195 case AVR_BUILTIN_BITSHR: case AVR_BUILTIN_HRBITS:
12196 case AVR_BUILTIN_BITSHK: case AVR_BUILTIN_HKBITS:
12197 case AVR_BUILTIN_BITSUHR: case AVR_BUILTIN_UHRBITS:
12198 case AVR_BUILTIN_BITSUHK: case AVR_BUILTIN_UHKBITS:
12200 case AVR_BUILTIN_BITSR: case AVR_BUILTIN_RBITS:
12201 case AVR_BUILTIN_BITSK: case AVR_BUILTIN_KBITS:
12202 case AVR_BUILTIN_BITSUR: case AVR_BUILTIN_URBITS:
12203 case AVR_BUILTIN_BITSUK: case AVR_BUILTIN_UKBITS:
12205 case AVR_BUILTIN_BITSLR: case AVR_BUILTIN_LRBITS:
12206 case AVR_BUILTIN_BITSLK: case AVR_BUILTIN_LKBITS:
12207 case AVR_BUILTIN_BITSULR: case AVR_BUILTIN_ULRBITS:
12208 case AVR_BUILTIN_BITSULK: case AVR_BUILTIN_ULKBITS:
12210 case AVR_BUILTIN_BITSLLR: case AVR_BUILTIN_LLRBITS:
12211 case AVR_BUILTIN_BITSLLK: case AVR_BUILTIN_LLKBITS:
12212 case AVR_BUILTIN_BITSULLR: case AVR_BUILTIN_ULLRBITS:
12213 case AVR_BUILTIN_BITSULLK: case AVR_BUILTIN_ULLKBITS:
12215 gcc_assert (TYPE_PRECISION (val_type)
12216 == TYPE_PRECISION (TREE_TYPE (arg[0])));
12218 return build1 (VIEW_CONVERT_EXPR, val_type, arg[0]);
12220 case AVR_BUILTIN_INSERT_BITS:
12222 tree tbits = arg[1];
12223 tree tval = arg[2];
12224 tree tmap;
12225 tree map_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
12226 unsigned int map;
12227 bool changed = false;
12228 unsigned i;
12229 avr_map_op_t best_g;
12231 if (TREE_CODE (arg[0]) != INTEGER_CST)
12233 /* No constant as first argument: Don't fold this and run into
12234 error in avr_expand_builtin. */
12236 break;
12239 tmap = double_int_to_tree (map_type, tree_to_double_int (arg[0]));
12240 map = TREE_INT_CST_LOW (tmap);
12242 if (TREE_CODE (tval) != INTEGER_CST
12243 && 0 == avr_map_metric (map, MAP_MASK_PREIMAGE_F))
12245 /* There are no F in the map, i.e. 3rd operand is unused.
12246 Replace that argument with some constant to render
12247 respective input unused. */
12249 tval = build_int_cst (val_type, 0);
12250 changed = true;
12253 if (TREE_CODE (tbits) != INTEGER_CST
12254 && 0 == avr_map_metric (map, MAP_PREIMAGE_0_7))
12256 /* Similar for the bits to be inserted. If they are unused,
12257 we can just as well pass 0. */
12259 tbits = build_int_cst (val_type, 0);
12262 if (TREE_CODE (tbits) == INTEGER_CST)
12264 /* Inserting bits known at compile time is easy and can be
12265 performed by AND and OR with appropriate masks. */
12267 int bits = TREE_INT_CST_LOW (tbits);
12268 int mask_ior = 0, mask_and = 0xff;
12270 for (i = 0; i < 8; i++)
12272 int mi = avr_map (map, i);
12274 if (mi < 8)
12276 if (bits & (1 << mi)) mask_ior |= (1 << i);
12277 else mask_and &= ~(1 << i);
12281 tval = fold_build2 (BIT_IOR_EXPR, val_type, tval,
12282 build_int_cst (val_type, mask_ior));
12283 return fold_build2 (BIT_AND_EXPR, val_type, tval,
12284 build_int_cst (val_type, mask_and));
12287 if (changed)
12288 return build_call_expr (fndecl, 3, tmap, tbits, tval);
12290 /* If bits don't change their position we can use vanilla logic
12291 to merge the two arguments. */
12293 if (0 == avr_map_metric (map, MAP_NONFIXED_0_7))
12295 int mask_f = avr_map_metric (map, MAP_MASK_PREIMAGE_F);
12296 tree tres, tmask = build_int_cst (val_type, mask_f ^ 0xff);
12298 tres = fold_build2 (BIT_XOR_EXPR, val_type, tbits, tval);
12299 tres = fold_build2 (BIT_AND_EXPR, val_type, tres, tmask);
12300 return fold_build2 (BIT_XOR_EXPR, val_type, tres, tval);
12303 /* Try to decomposing map to reduce overall cost. */
12305 if (avr_log.builtin)
12306 avr_edump ("\n%?: %x\n%?: ROL cost: ", map);
12308 best_g = avr_map_op[0];
12309 best_g.cost = 1000;
12311 for (i = 0; i < sizeof (avr_map_op) / sizeof (*avr_map_op); i++)
12313 avr_map_op_t g
12314 = avr_map_decompose (map, avr_map_op + i,
12315 TREE_CODE (tval) == INTEGER_CST);
12317 if (g.cost >= 0 && g.cost < best_g.cost)
12318 best_g = g;
12321 if (avr_log.builtin)
12322 avr_edump ("\n");
12324 if (best_g.arg == 0)
12325 /* No optimization found */
12326 break;
12328 /* Apply operation G to the 2nd argument. */
12330 if (avr_log.builtin)
12331 avr_edump ("%?: using OP(%s%d, %x) cost %d\n",
12332 best_g.str, best_g.arg, best_g.map, best_g.cost);
12334 /* Do right-shifts arithmetically: They copy the MSB instead of
12335 shifting in a non-usable value (0) as with logic right-shift. */
12337 tbits = fold_convert (signed_char_type_node, tbits);
12338 tbits = fold_build2 (best_g.code, signed_char_type_node, tbits,
12339 build_int_cst (val_type, best_g.arg));
12340 tbits = fold_convert (val_type, tbits);
12342 /* Use map o G^-1 instead of original map to undo the effect of G. */
12344 tmap = double_int_to_tree (map_type,
12345 double_int::from_uhwi (best_g.map));
12347 return build_call_expr (fndecl, 3, tmap, tbits, tval);
12348 } /* AVR_BUILTIN_INSERT_BITS */
12351 return NULL_TREE;
12356 /* Initialize the GCC target structure. */
12358 #undef TARGET_ASM_ALIGNED_HI_OP
12359 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
12360 #undef TARGET_ASM_ALIGNED_SI_OP
12361 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
12362 #undef TARGET_ASM_UNALIGNED_HI_OP
12363 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
12364 #undef TARGET_ASM_UNALIGNED_SI_OP
12365 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
12366 #undef TARGET_ASM_INTEGER
12367 #define TARGET_ASM_INTEGER avr_assemble_integer
12368 #undef TARGET_ASM_FILE_START
12369 #define TARGET_ASM_FILE_START avr_file_start
12370 #undef TARGET_ASM_FILE_END
12371 #define TARGET_ASM_FILE_END avr_file_end
12373 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
12374 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
12375 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
12376 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
12378 #undef TARGET_FUNCTION_VALUE
12379 #define TARGET_FUNCTION_VALUE avr_function_value
12380 #undef TARGET_LIBCALL_VALUE
12381 #define TARGET_LIBCALL_VALUE avr_libcall_value
12382 #undef TARGET_FUNCTION_VALUE_REGNO_P
12383 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
12385 #undef TARGET_ATTRIBUTE_TABLE
12386 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
12387 #undef TARGET_INSERT_ATTRIBUTES
12388 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
12389 #undef TARGET_SECTION_TYPE_FLAGS
12390 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
12392 #undef TARGET_ASM_NAMED_SECTION
12393 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
12394 #undef TARGET_ASM_INIT_SECTIONS
12395 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
12396 #undef TARGET_ENCODE_SECTION_INFO
12397 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
12398 #undef TARGET_ASM_SELECT_SECTION
12399 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
12401 #undef TARGET_REGISTER_MOVE_COST
12402 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
12403 #undef TARGET_MEMORY_MOVE_COST
12404 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
12405 #undef TARGET_RTX_COSTS
12406 #define TARGET_RTX_COSTS avr_rtx_costs
12407 #undef TARGET_ADDRESS_COST
12408 #define TARGET_ADDRESS_COST avr_address_cost
12409 #undef TARGET_MACHINE_DEPENDENT_REORG
12410 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
12411 #undef TARGET_FUNCTION_ARG
12412 #define TARGET_FUNCTION_ARG avr_function_arg
12413 #undef TARGET_FUNCTION_ARG_ADVANCE
12414 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
12416 #undef TARGET_SET_CURRENT_FUNCTION
12417 #define TARGET_SET_CURRENT_FUNCTION avr_set_current_function
12419 #undef TARGET_RETURN_IN_MEMORY
12420 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
12422 #undef TARGET_STRICT_ARGUMENT_NAMING
12423 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
12425 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
12426 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
12428 #undef TARGET_HARD_REGNO_SCRATCH_OK
12429 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
12430 #undef TARGET_CASE_VALUES_THRESHOLD
12431 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
12433 #undef TARGET_FRAME_POINTER_REQUIRED
12434 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
12435 #undef TARGET_CAN_ELIMINATE
12436 #define TARGET_CAN_ELIMINATE avr_can_eliminate
12438 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
12439 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS avr_allocate_stack_slots_for_args
12441 #undef TARGET_WARN_FUNC_RETURN
12442 #define TARGET_WARN_FUNC_RETURN avr_warn_func_return
12444 #undef TARGET_CLASS_LIKELY_SPILLED_P
12445 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
12447 #undef TARGET_OPTION_OVERRIDE
12448 #define TARGET_OPTION_OVERRIDE avr_option_override
12450 #undef TARGET_CANNOT_MODIFY_JUMPS_P
12451 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
12453 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
12454 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
12456 #undef TARGET_INIT_BUILTINS
12457 #define TARGET_INIT_BUILTINS avr_init_builtins
12459 #undef TARGET_BUILTIN_DECL
12460 #define TARGET_BUILTIN_DECL avr_builtin_decl
12462 #undef TARGET_EXPAND_BUILTIN
12463 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
12465 #undef TARGET_FOLD_BUILTIN
12466 #define TARGET_FOLD_BUILTIN avr_fold_builtin
12468 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
12469 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
12471 #undef TARGET_SCALAR_MODE_SUPPORTED_P
12472 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
12474 #undef TARGET_BUILD_BUILTIN_VA_LIST
12475 #define TARGET_BUILD_BUILTIN_VA_LIST avr_build_builtin_va_list
12477 #undef TARGET_FIXED_POINT_SUPPORTED_P
12478 #define TARGET_FIXED_POINT_SUPPORTED_P hook_bool_void_true
12480 #undef TARGET_CONVERT_TO_TYPE
12481 #define TARGET_CONVERT_TO_TYPE avr_convert_to_type
12483 #undef TARGET_ADDR_SPACE_SUBSET_P
12484 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
12486 #undef TARGET_ADDR_SPACE_CONVERT
12487 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
12489 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
12490 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
12492 #undef TARGET_ADDR_SPACE_POINTER_MODE
12493 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
12495 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
12496 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
12497 avr_addr_space_legitimate_address_p
12499 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
12500 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
12502 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
12503 #define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
12505 #undef TARGET_SECONDARY_RELOAD
12506 #define TARGET_SECONDARY_RELOAD avr_secondary_reload
12508 #undef TARGET_PRINT_OPERAND
12509 #define TARGET_PRINT_OPERAND avr_print_operand
12510 #undef TARGET_PRINT_OPERAND_ADDRESS
12511 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
12512 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
12513 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
12515 struct gcc_target targetm = TARGET_INITIALIZER;
12518 #include "gt-avr.h"