gcc/
[official-gcc.git] / gcc / config / avr / avr.c
blob7436b59a970720af63bbf64046823e04ad25eab1
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998-2015 Free Software Foundation, Inc.
3 Contributed by Denis Chertykov (chertykov@gmail.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "regs.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-attr.h"
31 #include "insn-codes.h"
32 #include "flags.h"
33 #include "reload.h"
34 #include "hash-set.h"
35 #include "machmode.h"
36 #include "vec.h"
37 #include "double-int.h"
38 #include "input.h"
39 #include "alias.h"
40 #include "symtab.h"
41 #include "wide-int.h"
42 #include "inchash.h"
43 #include "tree.h"
44 #include "fold-const.h"
45 #include "varasm.h"
46 #include "print-tree.h"
47 #include "calls.h"
48 #include "stor-layout.h"
49 #include "stringpool.h"
50 #include "output.h"
51 #include "hashtab.h"
52 #include "function.h"
53 #include "statistics.h"
54 #include "real.h"
55 #include "fixed-value.h"
56 #include "expmed.h"
57 #include "dojump.h"
58 #include "explow.h"
59 #include "emit-rtl.h"
60 #include "stmt.h"
61 #include "expr.h"
62 #include "c-family/c-common.h"
63 #include "diagnostic-core.h"
64 #include "obstack.h"
65 #include "recog.h"
66 #include "optabs.h"
67 #include "ggc.h"
68 #include "langhooks.h"
69 #include "tm_p.h"
70 #include "target.h"
71 #include "target-def.h"
72 #include "params.h"
73 #include "dominance.h"
74 #include "cfg.h"
75 #include "cfgrtl.h"
76 #include "cfganal.h"
77 #include "lcm.h"
78 #include "cfgbuild.h"
79 #include "cfgcleanup.h"
80 #include "predict.h"
81 #include "basic-block.h"
82 #include "df.h"
83 #include "builtins.h"
84 #include "context.h"
85 #include "tree-pass.h"
87 /* Maximal allowed offset for an address in the LD command */
88 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
90 /* Return true if STR starts with PREFIX and false, otherwise. */
91 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
93 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
94 address space where data is to be located.
95 As the only non-generic address spaces are all located in flash,
96 this can be used to test if data shall go into some .progmem* section.
97 This must be the rightmost field of machine dependent section flags. */
98 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
100 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
101 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
103 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
104 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
105 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
106 do { \
107 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
108 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
109 } while (0)
111 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
112 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
113 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
114 / SYMBOL_FLAG_MACH_DEP)
116 #define TINY_ADIW(REG1, REG2, I) \
117 "subi " #REG1 ",lo8(-(" #I "))" CR_TAB \
118 "sbci " #REG2 ",hi8(-(" #I "))"
120 #define TINY_SBIW(REG1, REG2, I) \
121 "subi " #REG1 ",lo8((" #I "))" CR_TAB \
122 "sbci " #REG2 ",hi8((" #I "))"
124 #define AVR_TMP_REGNO (AVR_TINY ? TMP_REGNO_TINY : TMP_REGNO)
125 #define AVR_ZERO_REGNO (AVR_TINY ? ZERO_REGNO_TINY : ZERO_REGNO)
127 /* Known address spaces. The order must be the same as in the respective
128 enum from avr.h (or designated initialized must be used). */
129 const avr_addrspace_t avr_addrspace[ADDR_SPACE_COUNT] =
131 { ADDR_SPACE_RAM, 0, 2, "", 0, NULL },
132 { ADDR_SPACE_FLASH, 1, 2, "__flash", 0, ".progmem.data" },
133 { ADDR_SPACE_FLASH1, 1, 2, "__flash1", 1, ".progmem1.data" },
134 { ADDR_SPACE_FLASH2, 1, 2, "__flash2", 2, ".progmem2.data" },
135 { ADDR_SPACE_FLASH3, 1, 2, "__flash3", 3, ".progmem3.data" },
136 { ADDR_SPACE_FLASH4, 1, 2, "__flash4", 4, ".progmem4.data" },
137 { ADDR_SPACE_FLASH5, 1, 2, "__flash5", 5, ".progmem5.data" },
138 { ADDR_SPACE_MEMX, 1, 3, "__memx", 0, ".progmemx.data" },
142 /* Holding RAM addresses of some SFRs used by the compiler and that
143 are unique over all devices in an architecture like 'avr4'. */
145 typedef struct
147 /* SREG: The processor status */
148 int sreg;
150 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
151 int ccp;
152 int rampd;
153 int rampx;
154 int rampy;
156 /* RAMPZ: The high byte of 24-bit address used with ELPM */
157 int rampz;
159 /* SP: The stack pointer and its low and high byte */
160 int sp_l;
161 int sp_h;
162 } avr_addr_t;
164 static avr_addr_t avr_addr;
167 /* Prototypes for local helper functions. */
169 static const char* out_movqi_r_mr (rtx_insn *, rtx[], int*);
170 static const char* out_movhi_r_mr (rtx_insn *, rtx[], int*);
171 static const char* out_movsi_r_mr (rtx_insn *, rtx[], int*);
172 static const char* out_movqi_mr_r (rtx_insn *, rtx[], int*);
173 static const char* out_movhi_mr_r (rtx_insn *, rtx[], int*);
174 static const char* out_movsi_mr_r (rtx_insn *, rtx[], int*);
176 static int get_sequence_length (rtx_insn *insns);
177 static int sequent_regs_live (void);
178 static const char *ptrreg_to_str (int);
179 static const char *cond_string (enum rtx_code);
180 static int avr_num_arg_regs (machine_mode, const_tree);
181 static int avr_operand_rtx_cost (rtx, machine_mode, enum rtx_code,
182 int, bool);
183 static void output_reload_in_const (rtx*, rtx, int*, bool);
184 static struct machine_function * avr_init_machine_status (void);
187 /* Prototypes for hook implementors if needed before their implementation. */
189 static bool avr_rtx_costs (rtx, int, int, int, int*, bool);
192 /* Allocate registers from r25 to r8 for parameters for function calls. */
193 #define FIRST_CUM_REG 26
195 /* Last call saved register */
196 #define LAST_CALLEE_SAVED_REG (AVR_TINY ? 19 : 17)
198 /* Implicit target register of LPM instruction (R0) */
199 extern GTY(()) rtx lpm_reg_rtx;
200 rtx lpm_reg_rtx;
202 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
203 extern GTY(()) rtx lpm_addr_reg_rtx;
204 rtx lpm_addr_reg_rtx;
206 /* Temporary register RTX (reg:QI TMP_REGNO) */
207 extern GTY(()) rtx tmp_reg_rtx;
208 rtx tmp_reg_rtx;
210 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
211 extern GTY(()) rtx zero_reg_rtx;
212 rtx zero_reg_rtx;
214 /* RTXs for all general purpose registers as QImode */
215 extern GTY(()) rtx all_regs_rtx[32];
216 rtx all_regs_rtx[32];
218 /* SREG, the processor status */
219 extern GTY(()) rtx sreg_rtx;
220 rtx sreg_rtx;
222 /* RAMP* special function registers */
223 extern GTY(()) rtx rampd_rtx;
224 extern GTY(()) rtx rampx_rtx;
225 extern GTY(()) rtx rampy_rtx;
226 extern GTY(()) rtx rampz_rtx;
227 rtx rampd_rtx;
228 rtx rampx_rtx;
229 rtx rampy_rtx;
230 rtx rampz_rtx;
232 /* RTX containing the strings "" and "e", respectively */
233 static GTY(()) rtx xstring_empty;
234 static GTY(()) rtx xstring_e;
236 /* Current architecture. */
237 const avr_arch_t *avr_arch;
239 /* Section to put switch tables in. */
240 static GTY(()) section *progmem_swtable_section;
242 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
243 or to address space __flash* or __memx. Only used as singletons inside
244 avr_asm_select_section, but it must not be local there because of GTY. */
245 static GTY(()) section *progmem_section[ADDR_SPACE_COUNT];
247 /* Condition for insns/expanders from avr-dimode.md. */
248 bool avr_have_dimode = true;
250 /* To track if code will use .bss and/or .data. */
251 bool avr_need_clear_bss_p = false;
252 bool avr_need_copy_data_p = false;
255 /* Transform UP into lowercase and write the result to LO.
256 You must provide enough space for LO. Return LO. */
258 static char*
259 avr_tolower (char *lo, const char *up)
261 char *lo0 = lo;
263 for (; *up; up++, lo++)
264 *lo = TOLOWER (*up);
266 *lo = '\0';
268 return lo0;
272 /* Custom function to count number of set bits. */
274 static inline int
275 avr_popcount (unsigned int val)
277 int pop = 0;
279 while (val)
281 val &= val-1;
282 pop++;
285 return pop;
289 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
290 Return true if the least significant N_BYTES bytes of XVAL all have a
291 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
292 of integers which contains an integer N iff bit N of POP_MASK is set. */
294 bool
295 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
297 int i;
299 machine_mode mode = GET_MODE (xval);
301 if (VOIDmode == mode)
302 mode = SImode;
304 for (i = 0; i < n_bytes; i++)
306 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
307 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
309 if (0 == (pop_mask & (1 << avr_popcount (val8))))
310 return false;
313 return true;
317 /* Access some RTX as INT_MODE. If X is a CONST_FIXED we can get
318 the bit representation of X by "casting" it to CONST_INT. */
321 avr_to_int_mode (rtx x)
323 machine_mode mode = GET_MODE (x);
325 return VOIDmode == mode
327 : simplify_gen_subreg (int_mode_for_mode (mode), x, mode, 0);
331 static const pass_data avr_pass_data_recompute_notes =
333 RTL_PASS, // type
334 "", // name (will be patched)
335 OPTGROUP_NONE, // optinfo_flags
336 TV_DF_SCAN, // tv_id
337 0, // properties_required
338 0, // properties_provided
339 0, // properties_destroyed
340 0, // todo_flags_start
341 TODO_df_finish | TODO_df_verify // todo_flags_finish
345 class avr_pass_recompute_notes : public rtl_opt_pass
347 public:
348 avr_pass_recompute_notes (gcc::context *ctxt, const char *name)
349 : rtl_opt_pass (avr_pass_data_recompute_notes, ctxt)
351 this->name = name;
354 virtual unsigned int execute (function*)
356 df_note_add_problem ();
357 df_analyze ();
359 return 0;
361 }; // avr_pass_recompute_notes
364 static void
365 avr_register_passes (void)
367 /* This avr-specific pass (re)computes insn notes, in particular REG_DEAD
368 notes which are used by `avr.c::reg_unused_after' and branch offset
369 computations. These notes must be correct, i.e. there must be no
370 dangling REG_DEAD notes; otherwise wrong code might result, cf. PR64331.
372 DF needs (correct) CFG, hence right before free_cfg is the last
373 opportunity to rectify notes. */
375 register_pass (new avr_pass_recompute_notes (g, "avr-notes-free-cfg"),
376 PASS_POS_INSERT_BEFORE, "*free_cfg", 1);
380 /* Set `avr_arch' as specified by `-mmcu='.
381 Return true on success. */
383 static bool
384 avr_set_core_architecture (void)
386 /* Search for mcu core architecture. */
388 if (!avr_mmcu)
389 avr_mmcu = AVR_MMCU_DEFAULT;
391 avr_arch = &avr_arch_types[0];
393 for (const avr_mcu_t *mcu = avr_mcu_types; ; mcu++)
395 if (NULL == mcu->name)
397 /* Reached the end of `avr_mcu_types'. This should actually never
398 happen as options are provided by device-specs. It could be a
399 typo in a device-specs or calling the compiler proper directly
400 with -mmcu=<device>. */
402 error ("unknown core architecture %qs specified with %qs",
403 avr_mmcu, "-mmcu=");
404 avr_inform_core_architectures ();
405 break;
407 else if (0 == strcmp (mcu->name, avr_mmcu)
408 // Is this a proper architecture ?
409 && NULL == mcu->macro)
411 avr_arch = &avr_arch_types[mcu->arch_id];
412 if (avr_n_flash < 0)
413 avr_n_flash = mcu->n_flash;
415 return true;
419 return false;
423 /* Implement `TARGET_OPTION_OVERRIDE'. */
425 static void
426 avr_option_override (void)
428 /* Disable -fdelete-null-pointer-checks option for AVR target.
429 This option compiler assumes that dereferencing of a null pointer
430 would halt the program. For AVR this assumption is not true and
431 programs can safely dereference null pointers. Changes made by this
432 option may not work properly for AVR. So disable this option. */
434 flag_delete_null_pointer_checks = 0;
436 /* caller-save.c looks for call-clobbered hard registers that are assigned
437 to pseudos that cross calls and tries so save-restore them around calls
438 in order to reduce the number of stack slots needed.
440 This might lead to situations where reload is no more able to cope
441 with the challenge of AVR's very few address registers and fails to
442 perform the requested spills. */
444 if (avr_strict_X)
445 flag_caller_saves = 0;
447 /* Unwind tables currently require a frame pointer for correctness,
448 see toplev.c:process_options(). */
450 if ((flag_unwind_tables
451 || flag_non_call_exceptions
452 || flag_asynchronous_unwind_tables)
453 && !ACCUMULATE_OUTGOING_ARGS)
455 flag_omit_frame_pointer = 0;
458 if (flag_pic == 1)
459 warning (OPT_fpic, "-fpic is not supported");
460 if (flag_pic == 2)
461 warning (OPT_fPIC, "-fPIC is not supported");
462 if (flag_pie == 1)
463 warning (OPT_fpie, "-fpie is not supported");
464 if (flag_pie == 2)
465 warning (OPT_fPIE, "-fPIE is not supported");
467 if (!avr_set_core_architecture())
468 return;
470 /* RAM addresses of some SFRs common to all devices in respective arch. */
472 /* SREG: Status Register containing flags like I (global IRQ) */
473 avr_addr.sreg = 0x3F + avr_arch->sfr_offset;
475 /* RAMPZ: Address' high part when loading via ELPM */
476 avr_addr.rampz = 0x3B + avr_arch->sfr_offset;
478 avr_addr.rampy = 0x3A + avr_arch->sfr_offset;
479 avr_addr.rampx = 0x39 + avr_arch->sfr_offset;
480 avr_addr.rampd = 0x38 + avr_arch->sfr_offset;
481 avr_addr.ccp = (AVR_TINY ? 0x3C : 0x34) + avr_arch->sfr_offset;
483 /* SP: Stack Pointer (SP_H:SP_L) */
484 avr_addr.sp_l = 0x3D + avr_arch->sfr_offset;
485 avr_addr.sp_h = avr_addr.sp_l + 1;
487 init_machine_status = avr_init_machine_status;
489 avr_log_set_avr_log();
491 /* Register some avr-specific pass(es). There is no canonical place for
492 pass registration. This function is convenient. */
494 avr_register_passes ();
497 /* Function to set up the backend function structure. */
499 static struct machine_function *
500 avr_init_machine_status (void)
502 return ggc_cleared_alloc<machine_function> ();
506 /* Implement `INIT_EXPANDERS'. */
507 /* The function works like a singleton. */
509 void
510 avr_init_expanders (void)
512 int regno;
514 for (regno = 0; regno < 32; regno ++)
515 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
517 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
518 tmp_reg_rtx = all_regs_rtx[AVR_TMP_REGNO];
519 zero_reg_rtx = all_regs_rtx[AVR_ZERO_REGNO];
521 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
523 sreg_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.sreg));
524 rampd_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampd));
525 rampx_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampx));
526 rampy_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampy));
527 rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampz));
529 xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
530 xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
532 /* TINY core does not have regs r10-r16, but avr-dimode.md expects them
533 to be present */
534 if (AVR_TINY)
535 avr_have_dimode = false;
539 /* Implement `REGNO_REG_CLASS'. */
540 /* Return register class for register R. */
542 enum reg_class
543 avr_regno_reg_class (int r)
545 static const enum reg_class reg_class_tab[] =
547 R0_REG,
548 /* r1 - r15 */
549 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
550 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
551 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
552 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
553 /* r16 - r23 */
554 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
555 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
556 /* r24, r25 */
557 ADDW_REGS, ADDW_REGS,
558 /* X: r26, 27 */
559 POINTER_X_REGS, POINTER_X_REGS,
560 /* Y: r28, r29 */
561 POINTER_Y_REGS, POINTER_Y_REGS,
562 /* Z: r30, r31 */
563 POINTER_Z_REGS, POINTER_Z_REGS,
564 /* SP: SPL, SPH */
565 STACK_REG, STACK_REG
568 if (r <= 33)
569 return reg_class_tab[r];
571 return ALL_REGS;
575 /* Implement `TARGET_SCALAR_MODE_SUPPORTED_P'. */
577 static bool
578 avr_scalar_mode_supported_p (machine_mode mode)
580 if (ALL_FIXED_POINT_MODE_P (mode))
581 return true;
583 if (PSImode == mode)
584 return true;
586 return default_scalar_mode_supported_p (mode);
590 /* Return TRUE if DECL is a VAR_DECL located in flash and FALSE, otherwise. */
592 static bool
593 avr_decl_flash_p (tree decl)
595 if (TREE_CODE (decl) != VAR_DECL
596 || TREE_TYPE (decl) == error_mark_node)
598 return false;
601 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
605 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit flash
606 address space and FALSE, otherwise. */
608 static bool
609 avr_decl_memx_p (tree decl)
611 if (TREE_CODE (decl) != VAR_DECL
612 || TREE_TYPE (decl) == error_mark_node)
614 return false;
617 return (ADDR_SPACE_MEMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
621 /* Return TRUE if X is a MEM rtx located in flash and FALSE, otherwise. */
623 bool
624 avr_mem_flash_p (rtx x)
626 return (MEM_P (x)
627 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
631 /* Return TRUE if X is a MEM rtx located in the 24-bit flash
632 address space and FALSE, otherwise. */
634 bool
635 avr_mem_memx_p (rtx x)
637 return (MEM_P (x)
638 && ADDR_SPACE_MEMX == MEM_ADDR_SPACE (x));
642 /* A helper for the subsequent function attribute used to dig for
643 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
645 static inline int
646 avr_lookup_function_attribute1 (const_tree func, const char *name)
648 if (FUNCTION_DECL == TREE_CODE (func))
650 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
652 return true;
655 func = TREE_TYPE (func);
658 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
659 || TREE_CODE (func) == METHOD_TYPE);
661 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
664 /* Return nonzero if FUNC is a naked function. */
666 static int
667 avr_naked_function_p (tree func)
669 return avr_lookup_function_attribute1 (func, "naked");
672 /* Return nonzero if FUNC is an interrupt function as specified
673 by the "interrupt" attribute. */
675 static int
676 avr_interrupt_function_p (tree func)
678 return avr_lookup_function_attribute1 (func, "interrupt");
681 /* Return nonzero if FUNC is a signal function as specified
682 by the "signal" attribute. */
684 static int
685 avr_signal_function_p (tree func)
687 return avr_lookup_function_attribute1 (func, "signal");
690 /* Return nonzero if FUNC is an OS_task function. */
692 static int
693 avr_OS_task_function_p (tree func)
695 return avr_lookup_function_attribute1 (func, "OS_task");
698 /* Return nonzero if FUNC is an OS_main function. */
700 static int
701 avr_OS_main_function_p (tree func)
703 return avr_lookup_function_attribute1 (func, "OS_main");
707 /* Implement `TARGET_SET_CURRENT_FUNCTION'. */
708 /* Sanity cheching for above function attributes. */
710 static void
711 avr_set_current_function (tree decl)
713 location_t loc;
714 const char *isr;
716 if (decl == NULL_TREE
717 || current_function_decl == NULL_TREE
718 || current_function_decl == error_mark_node
719 || ! cfun->machine
720 || cfun->machine->attributes_checked_p)
721 return;
723 loc = DECL_SOURCE_LOCATION (decl);
725 cfun->machine->is_naked = avr_naked_function_p (decl);
726 cfun->machine->is_signal = avr_signal_function_p (decl);
727 cfun->machine->is_interrupt = avr_interrupt_function_p (decl);
728 cfun->machine->is_OS_task = avr_OS_task_function_p (decl);
729 cfun->machine->is_OS_main = avr_OS_main_function_p (decl);
731 isr = cfun->machine->is_interrupt ? "interrupt" : "signal";
733 /* Too much attributes make no sense as they request conflicting features. */
735 if (cfun->machine->is_OS_task + cfun->machine->is_OS_main
736 + (cfun->machine->is_signal || cfun->machine->is_interrupt) > 1)
737 error_at (loc, "function attributes %qs, %qs and %qs are mutually"
738 " exclusive", "OS_task", "OS_main", isr);
740 /* 'naked' will hide effects of 'OS_task' and 'OS_main'. */
742 if (cfun->machine->is_naked
743 && (cfun->machine->is_OS_task || cfun->machine->is_OS_main))
744 warning_at (loc, OPT_Wattributes, "function attributes %qs and %qs have"
745 " no effect on %qs function", "OS_task", "OS_main", "naked");
747 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
749 tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
750 tree ret = TREE_TYPE (TREE_TYPE (decl));
751 const char *name;
753 name = DECL_ASSEMBLER_NAME_SET_P (decl)
754 ? IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))
755 : IDENTIFIER_POINTER (DECL_NAME (decl));
757 /* Skip a leading '*' that might still prefix the assembler name,
758 e.g. in non-LTO runs. */
760 name = default_strip_name_encoding (name);
762 /* Silently ignore 'signal' if 'interrupt' is present. AVR-LibC startet
763 using this when it switched from SIGNAL and INTERRUPT to ISR. */
765 if (cfun->machine->is_interrupt)
766 cfun->machine->is_signal = 0;
768 /* Interrupt handlers must be void __vector (void) functions. */
770 if (args && TREE_CODE (TREE_VALUE (args)) != VOID_TYPE)
771 error_at (loc, "%qs function cannot have arguments", isr);
773 if (TREE_CODE (ret) != VOID_TYPE)
774 error_at (loc, "%qs function cannot return a value", isr);
776 /* If the function has the 'signal' or 'interrupt' attribute, ensure
777 that the name of the function is "__vector_NN" so as to catch
778 when the user misspells the vector name. */
780 if (!STR_PREFIX_P (name, "__vector"))
781 warning_at (loc, 0, "%qs appears to be a misspelled %s handler",
782 name, isr);
785 /* Don't print the above diagnostics more than once. */
787 cfun->machine->attributes_checked_p = 1;
791 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
794 avr_accumulate_outgoing_args (void)
796 if (!cfun)
797 return TARGET_ACCUMULATE_OUTGOING_ARGS;
799 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
800 what offset is correct. In some cases it is relative to
801 virtual_outgoing_args_rtx and in others it is relative to
802 virtual_stack_vars_rtx. For example code see
803 gcc.c-torture/execute/built-in-setjmp.c
804 gcc.c-torture/execute/builtins/sprintf-chk.c */
806 return (TARGET_ACCUMULATE_OUTGOING_ARGS
807 && !(cfun->calls_setjmp
808 || cfun->has_nonlocal_label));
812 /* Report contribution of accumulated outgoing arguments to stack size. */
814 static inline int
815 avr_outgoing_args_size (void)
817 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
821 /* Implement `STARTING_FRAME_OFFSET'. */
822 /* This is the offset from the frame pointer register to the first stack slot
823 that contains a variable living in the frame. */
826 avr_starting_frame_offset (void)
828 return 1 + avr_outgoing_args_size ();
832 /* Return the number of hard registers to push/pop in the prologue/epilogue
833 of the current function, and optionally store these registers in SET. */
835 static int
836 avr_regs_to_save (HARD_REG_SET *set)
838 int reg, count;
839 int int_or_sig_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
841 if (set)
842 CLEAR_HARD_REG_SET (*set);
843 count = 0;
845 /* No need to save any registers if the function never returns or
846 has the "OS_task" or "OS_main" attribute. */
848 if (TREE_THIS_VOLATILE (current_function_decl)
849 || cfun->machine->is_OS_task
850 || cfun->machine->is_OS_main)
851 return 0;
853 for (reg = 0; reg < 32; reg++)
855 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
856 any global register variables. */
858 if (fixed_regs[reg])
859 continue;
861 if ((int_or_sig_p && !crtl->is_leaf && call_used_regs[reg])
862 || (df_regs_ever_live_p (reg)
863 && (int_or_sig_p || !call_used_regs[reg])
864 /* Don't record frame pointer registers here. They are treated
865 indivitually in prologue. */
866 && !(frame_pointer_needed
867 && (reg == REG_Y || reg == (REG_Y+1)))))
869 if (set)
870 SET_HARD_REG_BIT (*set, reg);
871 count++;
874 return count;
878 /* Implement `TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS' */
880 static bool
881 avr_allocate_stack_slots_for_args (void)
883 return !cfun->machine->is_naked;
887 /* Return true if register FROM can be eliminated via register TO. */
889 static bool
890 avr_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
892 return ((frame_pointer_needed && to == FRAME_POINTER_REGNUM)
893 || !frame_pointer_needed);
897 /* Implement `TARGET_WARN_FUNC_RETURN'. */
899 static bool
900 avr_warn_func_return (tree decl)
902 /* Naked functions are implemented entirely in assembly, including the
903 return sequence, so suppress warnings about this. */
905 return !avr_naked_function_p (decl);
908 /* Compute offset between arg_pointer and frame_pointer. */
911 avr_initial_elimination_offset (int from, int to)
913 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
914 return 0;
915 else
917 int offset = frame_pointer_needed ? 2 : 0;
918 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
920 offset += avr_regs_to_save (NULL);
921 return (get_frame_size () + avr_outgoing_args_size()
922 + avr_pc_size + 1 + offset);
927 /* Helper for the function below. */
929 static void
930 avr_adjust_type_node (tree *node, machine_mode mode, int sat_p)
932 *node = make_node (FIXED_POINT_TYPE);
933 TYPE_SATURATING (*node) = sat_p;
934 TYPE_UNSIGNED (*node) = UNSIGNED_FIXED_POINT_MODE_P (mode);
935 TYPE_IBIT (*node) = GET_MODE_IBIT (mode);
936 TYPE_FBIT (*node) = GET_MODE_FBIT (mode);
937 TYPE_PRECISION (*node) = GET_MODE_BITSIZE (mode);
938 TYPE_ALIGN (*node) = 8;
939 SET_TYPE_MODE (*node, mode);
941 layout_type (*node);
945 /* Implement `TARGET_BUILD_BUILTIN_VA_LIST'. */
947 static tree
948 avr_build_builtin_va_list (void)
950 /* avr-modes.def adjusts [U]TA to be 64-bit modes with 48 fractional bits.
951 This is more appropriate for the 8-bit machine AVR than 128-bit modes.
952 The ADJUST_IBIT/FBIT are handled in toplev:init_adjust_machine_modes()
953 which is auto-generated by genmodes, but the compiler assigns [U]DAmode
954 to the long long accum modes instead of the desired [U]TAmode.
956 Fix this now, right after node setup in tree.c:build_common_tree_nodes().
957 This must run before c-cppbuiltin.c:builtin_define_fixed_point_constants()
958 which built-in defines macros like __ULLACCUM_FBIT__ that are used by
959 libgcc to detect IBIT and FBIT. */
961 avr_adjust_type_node (&ta_type_node, TAmode, 0);
962 avr_adjust_type_node (&uta_type_node, UTAmode, 0);
963 avr_adjust_type_node (&sat_ta_type_node, TAmode, 1);
964 avr_adjust_type_node (&sat_uta_type_node, UTAmode, 1);
966 unsigned_long_long_accum_type_node = uta_type_node;
967 long_long_accum_type_node = ta_type_node;
968 sat_unsigned_long_long_accum_type_node = sat_uta_type_node;
969 sat_long_long_accum_type_node = sat_ta_type_node;
971 /* Dispatch to the default handler. */
973 return std_build_builtin_va_list ();
977 /* Implement `TARGET_BUILTIN_SETJMP_FRAME_VALUE'. */
978 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
979 frame pointer by +STARTING_FRAME_OFFSET.
980 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
981 avoids creating add/sub of offset in nonlocal goto and setjmp. */
983 static rtx
984 avr_builtin_setjmp_frame_value (void)
986 rtx xval = gen_reg_rtx (Pmode);
987 emit_insn (gen_subhi3 (xval, virtual_stack_vars_rtx,
988 gen_int_mode (STARTING_FRAME_OFFSET, Pmode)));
989 return xval;
993 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3-byte PC).
994 This is return address of function. */
997 avr_return_addr_rtx (int count, rtx tem)
999 rtx r;
1001 /* Can only return this function's return address. Others not supported. */
1002 if (count)
1003 return NULL;
1005 if (AVR_3_BYTE_PC)
1007 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
1008 warning (0, "%<builtin_return_address%> contains only 2 bytes"
1009 " of address");
1011 else
1012 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
1014 r = gen_rtx_PLUS (Pmode, tem, r);
1015 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
1016 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
1017 return r;
1020 /* Return 1 if the function epilogue is just a single "ret". */
1023 avr_simple_epilogue (void)
1025 return (! frame_pointer_needed
1026 && get_frame_size () == 0
1027 && avr_outgoing_args_size() == 0
1028 && avr_regs_to_save (NULL) == 0
1029 && ! cfun->machine->is_interrupt
1030 && ! cfun->machine->is_signal
1031 && ! cfun->machine->is_naked
1032 && ! TREE_THIS_VOLATILE (current_function_decl));
1035 /* This function checks sequence of live registers. */
1037 static int
1038 sequent_regs_live (void)
1040 int reg;
1041 int live_seq = 0;
1042 int cur_seq = 0;
1044 for (reg = 0; reg <= LAST_CALLEE_SAVED_REG; ++reg)
1046 if (fixed_regs[reg])
1048 /* Don't recognize sequences that contain global register
1049 variables. */
1051 if (live_seq != 0)
1052 return 0;
1053 else
1054 continue;
1057 if (!call_used_regs[reg])
1059 if (df_regs_ever_live_p (reg))
1061 ++live_seq;
1062 ++cur_seq;
1064 else
1065 cur_seq = 0;
1069 if (!frame_pointer_needed)
1071 if (df_regs_ever_live_p (REG_Y))
1073 ++live_seq;
1074 ++cur_seq;
1076 else
1077 cur_seq = 0;
1079 if (df_regs_ever_live_p (REG_Y+1))
1081 ++live_seq;
1082 ++cur_seq;
1084 else
1085 cur_seq = 0;
1087 else
1089 cur_seq += 2;
1090 live_seq += 2;
1092 return (cur_seq == live_seq) ? live_seq : 0;
1095 /* Obtain the length sequence of insns. */
1098 get_sequence_length (rtx_insn *insns)
1100 rtx_insn *insn;
1101 int length;
1103 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
1104 length += get_attr_length (insn);
1106 return length;
1110 /* Implement `INCOMING_RETURN_ADDR_RTX'. */
1113 avr_incoming_return_addr_rtx (void)
1115 /* The return address is at the top of the stack. Note that the push
1116 was via post-decrement, which means the actual address is off by one. */
1117 return gen_frame_mem (HImode, plus_constant (Pmode, stack_pointer_rtx, 1));
1120 /* Helper for expand_prologue. Emit a push of a byte register. */
1122 static void
1123 emit_push_byte (unsigned regno, bool frame_related_p)
1125 rtx mem, reg;
1126 rtx_insn *insn;
1128 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
1129 mem = gen_frame_mem (QImode, mem);
1130 reg = gen_rtx_REG (QImode, regno);
1132 insn = emit_insn (gen_rtx_SET (mem, reg));
1133 if (frame_related_p)
1134 RTX_FRAME_RELATED_P (insn) = 1;
1136 cfun->machine->stack_usage++;
1140 /* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
1141 SFR is a MEM representing the memory location of the SFR.
1142 If CLR_P then clear the SFR after the push using zero_reg. */
1144 static void
1145 emit_push_sfr (rtx sfr, bool frame_related_p, bool clr_p)
1147 rtx_insn *insn;
1149 gcc_assert (MEM_P (sfr));
1151 /* IN __tmp_reg__, IO(SFR) */
1152 insn = emit_move_insn (tmp_reg_rtx, sfr);
1153 if (frame_related_p)
1154 RTX_FRAME_RELATED_P (insn) = 1;
1156 /* PUSH __tmp_reg__ */
1157 emit_push_byte (AVR_TMP_REGNO, frame_related_p);
1159 if (clr_p)
1161 /* OUT IO(SFR), __zero_reg__ */
1162 insn = emit_move_insn (sfr, const0_rtx);
1163 if (frame_related_p)
1164 RTX_FRAME_RELATED_P (insn) = 1;
1168 static void
1169 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
1171 rtx_insn *insn;
1172 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1173 int live_seq = sequent_regs_live ();
1175 HOST_WIDE_INT size_max
1176 = (HOST_WIDE_INT) GET_MODE_MASK (AVR_HAVE_8BIT_SP ? QImode : Pmode);
1178 bool minimize = (TARGET_CALL_PROLOGUES
1179 && size < size_max
1180 && live_seq
1181 && !isr_p
1182 && !cfun->machine->is_OS_task
1183 && !cfun->machine->is_OS_main
1184 && !AVR_TINY);
1186 if (minimize
1187 && (frame_pointer_needed
1188 || avr_outgoing_args_size() > 8
1189 || (AVR_2_BYTE_PC && live_seq > 6)
1190 || live_seq > 7))
1192 rtx pattern;
1193 int first_reg, reg, offset;
1195 emit_move_insn (gen_rtx_REG (HImode, REG_X),
1196 gen_int_mode (size, HImode));
1198 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
1199 gen_int_mode (live_seq+size, HImode));
1200 insn = emit_insn (pattern);
1201 RTX_FRAME_RELATED_P (insn) = 1;
1203 /* Describe the effect of the unspec_volatile call to prologue_saves.
1204 Note that this formulation assumes that add_reg_note pushes the
1205 notes to the front. Thus we build them in the reverse order of
1206 how we want dwarf2out to process them. */
1208 /* The function does always set frame_pointer_rtx, but whether that
1209 is going to be permanent in the function is frame_pointer_needed. */
1211 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1212 gen_rtx_SET ((frame_pointer_needed
1213 ? frame_pointer_rtx
1214 : stack_pointer_rtx),
1215 plus_constant (Pmode, stack_pointer_rtx,
1216 -(size + live_seq))));
1218 /* Note that live_seq always contains r28+r29, but the other
1219 registers to be saved are all below 18. */
1221 first_reg = (LAST_CALLEE_SAVED_REG + 1) - (live_seq - 2);
1223 for (reg = 29, offset = -live_seq + 1;
1224 reg >= first_reg;
1225 reg = (reg == 28 ? LAST_CALLEE_SAVED_REG : reg - 1), ++offset)
1227 rtx m, r;
1229 m = gen_rtx_MEM (QImode, plus_constant (Pmode, stack_pointer_rtx,
1230 offset));
1231 r = gen_rtx_REG (QImode, reg);
1232 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (m, r));
1235 cfun->machine->stack_usage += size + live_seq;
1237 else /* !minimize */
1239 int reg;
1241 for (reg = 0; reg < 32; ++reg)
1242 if (TEST_HARD_REG_BIT (set, reg))
1243 emit_push_byte (reg, true);
1245 if (frame_pointer_needed
1246 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
1248 /* Push frame pointer. Always be consistent about the
1249 ordering of pushes -- epilogue_restores expects the
1250 register pair to be pushed low byte first. */
1252 emit_push_byte (REG_Y, true);
1253 emit_push_byte (REG_Y + 1, true);
1256 if (frame_pointer_needed
1257 && size == 0)
1259 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1260 RTX_FRAME_RELATED_P (insn) = 1;
1263 if (size != 0)
1265 /* Creating a frame can be done by direct manipulation of the
1266 stack or via the frame pointer. These two methods are:
1267 fp = sp
1268 fp -= size
1269 sp = fp
1271 sp -= size
1272 fp = sp (*)
1273 the optimum method depends on function type, stack and
1274 frame size. To avoid a complex logic, both methods are
1275 tested and shortest is selected.
1277 There is also the case where SIZE != 0 and no frame pointer is
1278 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1279 In that case, insn (*) is not needed in that case.
1280 We use the X register as scratch. This is save because in X
1281 is call-clobbered.
1282 In an interrupt routine, the case of SIZE != 0 together with
1283 !frame_pointer_needed can only occur if the function is not a
1284 leaf function and thus X has already been saved. */
1286 int irq_state = -1;
1287 HOST_WIDE_INT size_cfa = size, neg_size;
1288 rtx_insn *fp_plus_insns;
1289 rtx fp, my_fp;
1291 gcc_assert (frame_pointer_needed
1292 || !isr_p
1293 || !crtl->is_leaf);
1295 fp = my_fp = (frame_pointer_needed
1296 ? frame_pointer_rtx
1297 : gen_rtx_REG (Pmode, REG_X));
1299 if (AVR_HAVE_8BIT_SP)
1301 /* The high byte (r29) does not change:
1302 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
1304 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1307 /* Cut down size and avoid size = 0 so that we don't run
1308 into ICE like PR52488 in the remainder. */
1310 if (size > size_max)
1312 /* Don't error so that insane code from newlib still compiles
1313 and does not break building newlib. As PR51345 is implemented
1314 now, there are multilib variants with -msp8.
1316 If user wants sanity checks he can use -Wstack-usage=
1317 or similar options.
1319 For CFA we emit the original, non-saturated size so that
1320 the generic machinery is aware of the real stack usage and
1321 will print the above diagnostic as expected. */
1323 size = size_max;
1326 size = trunc_int_for_mode (size, GET_MODE (my_fp));
1327 neg_size = trunc_int_for_mode (-size, GET_MODE (my_fp));
1329 /************ Method 1: Adjust frame pointer ************/
1331 start_sequence ();
1333 /* Normally, the dwarf2out frame-related-expr interpreter does
1334 not expect to have the CFA change once the frame pointer is
1335 set up. Thus, we avoid marking the move insn below and
1336 instead indicate that the entire operation is complete after
1337 the frame pointer subtraction is done. */
1339 insn = emit_move_insn (fp, stack_pointer_rtx);
1340 if (frame_pointer_needed)
1342 RTX_FRAME_RELATED_P (insn) = 1;
1343 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1344 gen_rtx_SET (fp, stack_pointer_rtx));
1347 insn = emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp),
1348 my_fp, neg_size));
1350 if (frame_pointer_needed)
1352 RTX_FRAME_RELATED_P (insn) = 1;
1353 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1354 gen_rtx_SET (fp, plus_constant (Pmode, fp,
1355 -size_cfa)));
1358 /* Copy to stack pointer. Note that since we've already
1359 changed the CFA to the frame pointer this operation
1360 need not be annotated if frame pointer is needed.
1361 Always move through unspec, see PR50063.
1362 For meaning of irq_state see movhi_sp_r insn. */
1364 if (cfun->machine->is_interrupt)
1365 irq_state = 1;
1367 if (TARGET_NO_INTERRUPTS
1368 || cfun->machine->is_signal
1369 || cfun->machine->is_OS_main)
1370 irq_state = 0;
1372 if (AVR_HAVE_8BIT_SP)
1373 irq_state = 2;
1375 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1376 fp, GEN_INT (irq_state)));
1377 if (!frame_pointer_needed)
1379 RTX_FRAME_RELATED_P (insn) = 1;
1380 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1381 gen_rtx_SET (stack_pointer_rtx,
1382 plus_constant (Pmode,
1383 stack_pointer_rtx,
1384 -size_cfa)));
1387 fp_plus_insns = get_insns ();
1388 end_sequence ();
1390 /************ Method 2: Adjust Stack pointer ************/
1392 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1393 can only handle specific offsets. */
1395 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1397 rtx_insn *sp_plus_insns;
1399 start_sequence ();
1401 insn = emit_move_insn (stack_pointer_rtx,
1402 plus_constant (Pmode, stack_pointer_rtx,
1403 -size));
1404 RTX_FRAME_RELATED_P (insn) = 1;
1405 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1406 gen_rtx_SET (stack_pointer_rtx,
1407 plus_constant (Pmode,
1408 stack_pointer_rtx,
1409 -size_cfa)));
1410 if (frame_pointer_needed)
1412 insn = emit_move_insn (fp, stack_pointer_rtx);
1413 RTX_FRAME_RELATED_P (insn) = 1;
1416 sp_plus_insns = get_insns ();
1417 end_sequence ();
1419 /************ Use shortest method ************/
1421 emit_insn (get_sequence_length (sp_plus_insns)
1422 < get_sequence_length (fp_plus_insns)
1423 ? sp_plus_insns
1424 : fp_plus_insns);
1426 else
1428 emit_insn (fp_plus_insns);
1431 cfun->machine->stack_usage += size_cfa;
1432 } /* !minimize && size != 0 */
1433 } /* !minimize */
1437 /* Output function prologue. */
1439 void
1440 avr_expand_prologue (void)
1442 HARD_REG_SET set;
1443 HOST_WIDE_INT size;
1445 size = get_frame_size() + avr_outgoing_args_size();
1447 cfun->machine->stack_usage = 0;
1449 /* Prologue: naked. */
1450 if (cfun->machine->is_naked)
1452 return;
1455 avr_regs_to_save (&set);
1457 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1459 /* Enable interrupts. */
1460 if (cfun->machine->is_interrupt)
1461 emit_insn (gen_enable_interrupt ());
1463 /* Push zero reg. */
1464 emit_push_byte (AVR_ZERO_REGNO, true);
1466 /* Push tmp reg. */
1467 emit_push_byte (AVR_TMP_REGNO, true);
1469 /* Push SREG. */
1470 /* ??? There's no dwarf2 column reserved for SREG. */
1471 emit_push_sfr (sreg_rtx, false, false /* clr */);
1473 /* Clear zero reg. */
1474 emit_move_insn (zero_reg_rtx, const0_rtx);
1476 /* Prevent any attempt to delete the setting of ZERO_REG! */
1477 emit_use (zero_reg_rtx);
1479 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1480 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1482 if (AVR_HAVE_RAMPD)
1483 emit_push_sfr (rampd_rtx, false /* frame-related */, true /* clr */);
1485 if (AVR_HAVE_RAMPX
1486 && TEST_HARD_REG_BIT (set, REG_X)
1487 && TEST_HARD_REG_BIT (set, REG_X + 1))
1489 emit_push_sfr (rampx_rtx, false /* frame-related */, true /* clr */);
1492 if (AVR_HAVE_RAMPY
1493 && (frame_pointer_needed
1494 || (TEST_HARD_REG_BIT (set, REG_Y)
1495 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1497 emit_push_sfr (rampy_rtx, false /* frame-related */, true /* clr */);
1500 if (AVR_HAVE_RAMPZ
1501 && TEST_HARD_REG_BIT (set, REG_Z)
1502 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1504 emit_push_sfr (rampz_rtx, false /* frame-related */, AVR_HAVE_RAMPD);
1506 } /* is_interrupt is_signal */
1508 avr_prologue_setup_frame (size, set);
1510 if (flag_stack_usage_info)
1511 current_function_static_stack_size = cfun->machine->stack_usage;
1515 /* Implement `TARGET_ASM_FUNCTION_END_PROLOGUE'. */
1516 /* Output summary at end of function prologue. */
1518 static void
1519 avr_asm_function_end_prologue (FILE *file)
1521 if (cfun->machine->is_naked)
1523 fputs ("/* prologue: naked */\n", file);
1525 else
1527 if (cfun->machine->is_interrupt)
1529 fputs ("/* prologue: Interrupt */\n", file);
1531 else if (cfun->machine->is_signal)
1533 fputs ("/* prologue: Signal */\n", file);
1535 else
1536 fputs ("/* prologue: function */\n", file);
1539 if (ACCUMULATE_OUTGOING_ARGS)
1540 fprintf (file, "/* outgoing args size = %d */\n",
1541 avr_outgoing_args_size());
1543 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1544 get_frame_size());
1545 fprintf (file, "/* stack size = %d */\n",
1546 cfun->machine->stack_usage);
1547 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1548 usage for offset so that SP + .L__stack_offset = return address. */
1549 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1553 /* Implement `EPILOGUE_USES'. */
1556 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1558 if (reload_completed
1559 && cfun->machine
1560 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1561 return 1;
1562 return 0;
1565 /* Helper for avr_expand_epilogue. Emit a pop of a byte register. */
1567 static void
1568 emit_pop_byte (unsigned regno)
1570 rtx mem, reg;
1572 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1573 mem = gen_frame_mem (QImode, mem);
1574 reg = gen_rtx_REG (QImode, regno);
1576 emit_insn (gen_rtx_SET (reg, mem));
1579 /* Output RTL epilogue. */
1581 void
1582 avr_expand_epilogue (bool sibcall_p)
1584 int reg;
1585 int live_seq;
1586 HARD_REG_SET set;
1587 int minimize;
1588 HOST_WIDE_INT size;
1589 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1591 size = get_frame_size() + avr_outgoing_args_size();
1593 /* epilogue: naked */
1594 if (cfun->machine->is_naked)
1596 gcc_assert (!sibcall_p);
1598 emit_jump_insn (gen_return ());
1599 return;
1602 avr_regs_to_save (&set);
1603 live_seq = sequent_regs_live ();
1605 minimize = (TARGET_CALL_PROLOGUES
1606 && live_seq
1607 && !isr_p
1608 && !cfun->machine->is_OS_task
1609 && !cfun->machine->is_OS_main
1610 && !AVR_TINY);
1612 if (minimize
1613 && (live_seq > 4
1614 || frame_pointer_needed
1615 || size))
1617 /* Get rid of frame. */
1619 if (!frame_pointer_needed)
1621 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1624 if (size)
1626 emit_move_insn (frame_pointer_rtx,
1627 plus_constant (Pmode, frame_pointer_rtx, size));
1630 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1631 return;
1634 if (size)
1636 /* Try two methods to adjust stack and select shortest. */
1638 int irq_state = -1;
1639 rtx fp, my_fp;
1640 rtx_insn *fp_plus_insns;
1641 HOST_WIDE_INT size_max;
1643 gcc_assert (frame_pointer_needed
1644 || !isr_p
1645 || !crtl->is_leaf);
1647 fp = my_fp = (frame_pointer_needed
1648 ? frame_pointer_rtx
1649 : gen_rtx_REG (Pmode, REG_X));
1651 if (AVR_HAVE_8BIT_SP)
1653 /* The high byte (r29) does not change:
1654 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1656 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1659 /* For rationale see comment in prologue generation. */
1661 size_max = (HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (my_fp));
1662 if (size > size_max)
1663 size = size_max;
1664 size = trunc_int_for_mode (size, GET_MODE (my_fp));
1666 /********** Method 1: Adjust fp register **********/
1668 start_sequence ();
1670 if (!frame_pointer_needed)
1671 emit_move_insn (fp, stack_pointer_rtx);
1673 emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp), my_fp, size));
1675 /* Copy to stack pointer. */
1677 if (TARGET_NO_INTERRUPTS)
1678 irq_state = 0;
1680 if (AVR_HAVE_8BIT_SP)
1681 irq_state = 2;
1683 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp,
1684 GEN_INT (irq_state)));
1686 fp_plus_insns = get_insns ();
1687 end_sequence ();
1689 /********** Method 2: Adjust Stack pointer **********/
1691 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1693 rtx_insn *sp_plus_insns;
1695 start_sequence ();
1697 emit_move_insn (stack_pointer_rtx,
1698 plus_constant (Pmode, stack_pointer_rtx, size));
1700 sp_plus_insns = get_insns ();
1701 end_sequence ();
1703 /************ Use shortest method ************/
1705 emit_insn (get_sequence_length (sp_plus_insns)
1706 < get_sequence_length (fp_plus_insns)
1707 ? sp_plus_insns
1708 : fp_plus_insns);
1710 else
1711 emit_insn (fp_plus_insns);
1712 } /* size != 0 */
1714 if (frame_pointer_needed
1715 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1717 /* Restore previous frame_pointer. See avr_expand_prologue for
1718 rationale for not using pophi. */
1720 emit_pop_byte (REG_Y + 1);
1721 emit_pop_byte (REG_Y);
1724 /* Restore used registers. */
1726 for (reg = 31; reg >= 0; --reg)
1727 if (TEST_HARD_REG_BIT (set, reg))
1728 emit_pop_byte (reg);
1730 if (isr_p)
1732 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1733 The conditions to restore them must be tha same as in prologue. */
1735 if (AVR_HAVE_RAMPZ
1736 && TEST_HARD_REG_BIT (set, REG_Z)
1737 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1739 emit_pop_byte (TMP_REGNO);
1740 emit_move_insn (rampz_rtx, tmp_reg_rtx);
1743 if (AVR_HAVE_RAMPY
1744 && (frame_pointer_needed
1745 || (TEST_HARD_REG_BIT (set, REG_Y)
1746 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1748 emit_pop_byte (TMP_REGNO);
1749 emit_move_insn (rampy_rtx, tmp_reg_rtx);
1752 if (AVR_HAVE_RAMPX
1753 && TEST_HARD_REG_BIT (set, REG_X)
1754 && TEST_HARD_REG_BIT (set, REG_X + 1))
1756 emit_pop_byte (TMP_REGNO);
1757 emit_move_insn (rampx_rtx, tmp_reg_rtx);
1760 if (AVR_HAVE_RAMPD)
1762 emit_pop_byte (TMP_REGNO);
1763 emit_move_insn (rampd_rtx, tmp_reg_rtx);
1766 /* Restore SREG using tmp_reg as scratch. */
1768 emit_pop_byte (AVR_TMP_REGNO);
1769 emit_move_insn (sreg_rtx, tmp_reg_rtx);
1771 /* Restore tmp REG. */
1772 emit_pop_byte (AVR_TMP_REGNO);
1774 /* Restore zero REG. */
1775 emit_pop_byte (AVR_ZERO_REGNO);
1778 if (!sibcall_p)
1779 emit_jump_insn (gen_return ());
1783 /* Implement `TARGET_ASM_FUNCTION_BEGIN_EPILOGUE'. */
1785 static void
1786 avr_asm_function_begin_epilogue (FILE *file)
1788 fprintf (file, "/* epilogue start */\n");
1792 /* Implement `TARGET_CANNOT_MODITY_JUMPS_P'. */
1794 static bool
1795 avr_cannot_modify_jumps_p (void)
1798 /* Naked Functions must not have any instructions after
1799 their epilogue, see PR42240 */
1801 if (reload_completed
1802 && cfun->machine
1803 && cfun->machine->is_naked)
1805 return true;
1808 return false;
1812 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
1814 static bool
1815 avr_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED, addr_space_t as)
1817 /* FIXME: Non-generic addresses are not mode-dependent in themselves.
1818 This hook just serves to hack around PR rtl-optimization/52543 by
1819 claiming that non-generic addresses were mode-dependent so that
1820 lower-subreg.c will skip these addresses. lower-subreg.c sets up fake
1821 RTXes to probe SET and MEM costs and assumes that MEM is always in the
1822 generic address space which is not true. */
1824 return !ADDR_SPACE_GENERIC_P (as);
1828 /* Helper function for `avr_legitimate_address_p'. */
1830 static inline bool
1831 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
1832 RTX_CODE outer_code, bool strict)
1834 return (REG_P (reg)
1835 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1836 as, outer_code, UNKNOWN)
1837 || (!strict
1838 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1842 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1843 machine for a memory operand of mode MODE. */
1845 static bool
1846 avr_legitimate_address_p (machine_mode mode, rtx x, bool strict)
1848 bool ok = CONSTANT_ADDRESS_P (x);
1850 switch (GET_CODE (x))
1852 case REG:
1853 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1854 MEM, strict);
1856 if (strict
1857 && GET_MODE_SIZE (mode) > 4
1858 && REG_X == REGNO (x))
1860 ok = false;
1862 break;
1864 case POST_INC:
1865 case PRE_DEC:
1866 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1867 GET_CODE (x), strict);
1868 break;
1870 case PLUS:
1872 rtx reg = XEXP (x, 0);
1873 rtx op1 = XEXP (x, 1);
1875 if (REG_P (reg)
1876 && CONST_INT_P (op1)
1877 && INTVAL (op1) >= 0)
1879 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1881 if (fit)
1883 ok = (! strict
1884 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1885 PLUS, strict));
1887 if (reg == frame_pointer_rtx
1888 || reg == arg_pointer_rtx)
1890 ok = true;
1893 else if (frame_pointer_needed
1894 && reg == frame_pointer_rtx)
1896 ok = true;
1900 break;
1902 default:
1903 break;
1906 if (AVR_TINY
1907 && CONSTANT_ADDRESS_P (x))
1909 /* avrtiny's load / store instructions only cover addresses 0..0xbf:
1910 IN / OUT range is 0..0x3f and LDS / STS can access 0x40..0xbf. */
1912 ok = (CONST_INT_P (x)
1913 && IN_RANGE (INTVAL (x), 0, 0xc0 - GET_MODE_SIZE (mode)));
1916 if (avr_log.legitimate_address_p)
1918 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1919 "reload_completed=%d reload_in_progress=%d %s:",
1920 ok, mode, strict, reload_completed, reload_in_progress,
1921 reg_renumber ? "(reg_renumber)" : "");
1923 if (GET_CODE (x) == PLUS
1924 && REG_P (XEXP (x, 0))
1925 && CONST_INT_P (XEXP (x, 1))
1926 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1927 && reg_renumber)
1929 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1930 true_regnum (XEXP (x, 0)));
1933 avr_edump ("\n%r\n", x);
1936 return ok;
1940 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1941 now only a helper for avr_addr_space_legitimize_address. */
1942 /* Attempts to replace X with a valid
1943 memory address for an operand of mode MODE */
1945 static rtx
1946 avr_legitimize_address (rtx x, rtx oldx, machine_mode mode)
1948 bool big_offset_p = false;
1950 x = oldx;
1952 if (GET_CODE (oldx) == PLUS
1953 && REG_P (XEXP (oldx, 0)))
1955 if (REG_P (XEXP (oldx, 1)))
1956 x = force_reg (GET_MODE (oldx), oldx);
1957 else if (CONST_INT_P (XEXP (oldx, 1)))
1959 int offs = INTVAL (XEXP (oldx, 1));
1960 if (frame_pointer_rtx != XEXP (oldx, 0)
1961 && offs > MAX_LD_OFFSET (mode))
1963 big_offset_p = true;
1964 x = force_reg (GET_MODE (oldx), oldx);
1969 if (avr_log.legitimize_address)
1971 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1973 if (x != oldx)
1974 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1977 return x;
1981 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1982 /* This will allow register R26/27 to be used where it is no worse than normal
1983 base pointers R28/29 or R30/31. For example, if base offset is greater
1984 than 63 bytes or for R++ or --R addressing. */
1987 avr_legitimize_reload_address (rtx *px, machine_mode mode,
1988 int opnum, int type, int addr_type,
1989 int ind_levels ATTRIBUTE_UNUSED,
1990 rtx (*mk_memloc)(rtx,int))
1992 rtx x = *px;
1994 if (avr_log.legitimize_reload_address)
1995 avr_edump ("\n%?:%m %r\n", mode, x);
1997 if (1 && (GET_CODE (x) == POST_INC
1998 || GET_CODE (x) == PRE_DEC))
2000 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
2001 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
2002 opnum, RELOAD_OTHER);
2004 if (avr_log.legitimize_reload_address)
2005 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
2006 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
2008 return x;
2011 if (GET_CODE (x) == PLUS
2012 && REG_P (XEXP (x, 0))
2013 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
2014 && CONST_INT_P (XEXP (x, 1))
2015 && INTVAL (XEXP (x, 1)) >= 1)
2017 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
2019 if (fit)
2021 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
2023 int regno = REGNO (XEXP (x, 0));
2024 rtx mem = mk_memloc (x, regno);
2026 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
2027 POINTER_REGS, Pmode, VOIDmode, 0, 0,
2028 1, (enum reload_type) addr_type);
2030 if (avr_log.legitimize_reload_address)
2031 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
2032 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
2034 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
2035 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
2036 opnum, (enum reload_type) type);
2038 if (avr_log.legitimize_reload_address)
2039 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
2040 BASE_POINTER_REGS, mem, NULL_RTX);
2042 return x;
2045 else if (! (frame_pointer_needed
2046 && XEXP (x, 0) == frame_pointer_rtx))
2048 push_reload (x, NULL_RTX, px, NULL,
2049 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
2050 opnum, (enum reload_type) type);
2052 if (avr_log.legitimize_reload_address)
2053 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
2054 POINTER_REGS, x, NULL_RTX);
2056 return x;
2060 return NULL_RTX;
2064 /* Implement `TARGET_SECONDARY_RELOAD' */
2066 static reg_class_t
2067 avr_secondary_reload (bool in_p, rtx x,
2068 reg_class_t reload_class ATTRIBUTE_UNUSED,
2069 machine_mode mode, secondary_reload_info *sri)
2071 if (in_p
2072 && MEM_P (x)
2073 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
2074 && ADDR_SPACE_MEMX != MEM_ADDR_SPACE (x))
2076 /* For the non-generic 16-bit spaces we need a d-class scratch. */
2078 switch (mode)
2080 default:
2081 gcc_unreachable();
2083 case QImode: sri->icode = CODE_FOR_reload_inqi; break;
2084 case QQmode: sri->icode = CODE_FOR_reload_inqq; break;
2085 case UQQmode: sri->icode = CODE_FOR_reload_inuqq; break;
2087 case HImode: sri->icode = CODE_FOR_reload_inhi; break;
2088 case HQmode: sri->icode = CODE_FOR_reload_inhq; break;
2089 case HAmode: sri->icode = CODE_FOR_reload_inha; break;
2090 case UHQmode: sri->icode = CODE_FOR_reload_inuhq; break;
2091 case UHAmode: sri->icode = CODE_FOR_reload_inuha; break;
2093 case PSImode: sri->icode = CODE_FOR_reload_inpsi; break;
2095 case SImode: sri->icode = CODE_FOR_reload_insi; break;
2096 case SFmode: sri->icode = CODE_FOR_reload_insf; break;
2097 case SQmode: sri->icode = CODE_FOR_reload_insq; break;
2098 case SAmode: sri->icode = CODE_FOR_reload_insa; break;
2099 case USQmode: sri->icode = CODE_FOR_reload_inusq; break;
2100 case USAmode: sri->icode = CODE_FOR_reload_inusa; break;
2104 return NO_REGS;
2108 /* Helper function to print assembler resp. track instruction
2109 sequence lengths. Always return "".
2111 If PLEN == NULL:
2112 Output assembler code from template TPL with operands supplied
2113 by OPERANDS. This is just forwarding to output_asm_insn.
2115 If PLEN != NULL:
2116 If N_WORDS >= 0 Add N_WORDS to *PLEN.
2117 If N_WORDS < 0 Set *PLEN to -N_WORDS.
2118 Don't output anything.
2121 static const char*
2122 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
2124 if (NULL == plen)
2126 output_asm_insn (tpl, operands);
2128 else
2130 if (n_words < 0)
2131 *plen = -n_words;
2132 else
2133 *plen += n_words;
2136 return "";
2140 /* Return a pointer register name as a string. */
2142 static const char*
2143 ptrreg_to_str (int regno)
2145 switch (regno)
2147 case REG_X: return "X";
2148 case REG_Y: return "Y";
2149 case REG_Z: return "Z";
2150 default:
2151 output_operand_lossage ("address operand requires constraint for"
2152 " X, Y, or Z register");
2154 return NULL;
2157 /* Return the condition name as a string.
2158 Used in conditional jump constructing */
2160 static const char*
2161 cond_string (enum rtx_code code)
2163 switch (code)
2165 case NE:
2166 return "ne";
2167 case EQ:
2168 return "eq";
2169 case GE:
2170 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2171 return "pl";
2172 else
2173 return "ge";
2174 case LT:
2175 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2176 return "mi";
2177 else
2178 return "lt";
2179 case GEU:
2180 return "sh";
2181 case LTU:
2182 return "lo";
2183 default:
2184 gcc_unreachable ();
2187 return "";
2191 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
2192 /* Output ADDR to FILE as address. */
2194 static void
2195 avr_print_operand_address (FILE *file, rtx addr)
2197 switch (GET_CODE (addr))
2199 case REG:
2200 fprintf (file, ptrreg_to_str (REGNO (addr)));
2201 break;
2203 case PRE_DEC:
2204 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2205 break;
2207 case POST_INC:
2208 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2209 break;
2211 default:
2212 if (CONSTANT_ADDRESS_P (addr)
2213 && text_segment_operand (addr, VOIDmode))
2215 rtx x = addr;
2216 if (GET_CODE (x) == CONST)
2217 x = XEXP (x, 0);
2218 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
2220 /* Assembler gs() will implant word address. Make offset
2221 a byte offset inside gs() for assembler. This is
2222 needed because the more logical (constant+gs(sym)) is not
2223 accepted by gas. For 128K and smaller devices this is ok.
2224 For large devices it will create a trampoline to offset
2225 from symbol which may not be what the user really wanted. */
2227 fprintf (file, "gs(");
2228 output_addr_const (file, XEXP (x,0));
2229 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
2230 2 * INTVAL (XEXP (x, 1)));
2231 if (AVR_3_BYTE_PC)
2232 if (warning (0, "pointer offset from symbol maybe incorrect"))
2234 output_addr_const (stderr, addr);
2235 fprintf(stderr,"\n");
2238 else
2240 fprintf (file, "gs(");
2241 output_addr_const (file, addr);
2242 fprintf (file, ")");
2245 else
2246 output_addr_const (file, addr);
2251 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
2253 static bool
2254 avr_print_operand_punct_valid_p (unsigned char code)
2256 return code == '~' || code == '!';
2260 /* Implement `TARGET_PRINT_OPERAND'. */
2261 /* Output X as assembler operand to file FILE.
2262 For a description of supported %-codes, see top of avr.md. */
2264 static void
2265 avr_print_operand (FILE *file, rtx x, int code)
2267 int abcd = 0, ef = 0, ij = 0;
2269 if (code >= 'A' && code <= 'D')
2270 abcd = code - 'A';
2271 else if (code == 'E' || code == 'F')
2272 ef = code - 'E';
2273 else if (code == 'I' || code == 'J')
2274 ij = code - 'I';
2276 if (code == '~')
2278 if (!AVR_HAVE_JMP_CALL)
2279 fputc ('r', file);
2281 else if (code == '!')
2283 if (AVR_HAVE_EIJMP_EICALL)
2284 fputc ('e', file);
2286 else if (code == 't'
2287 || code == 'T')
2289 static int t_regno = -1;
2290 static int t_nbits = -1;
2292 if (REG_P (x) && t_regno < 0 && code == 'T')
2294 t_regno = REGNO (x);
2295 t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
2297 else if (CONST_INT_P (x) && t_regno >= 0
2298 && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
2300 int bpos = INTVAL (x);
2302 fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
2303 if (code == 'T')
2304 fprintf (file, ",%d", bpos % 8);
2306 t_regno = -1;
2308 else
2309 fatal_insn ("operands to %T/%t must be reg + const_int:", x);
2311 else if (code == 'E' || code == 'F')
2313 rtx op = XEXP(x, 0);
2314 fprintf (file, reg_names[REGNO (op) + ef]);
2316 else if (code == 'I' || code == 'J')
2318 rtx op = XEXP(XEXP(x, 0), 0);
2319 fprintf (file, reg_names[REGNO (op) + ij]);
2321 else if (REG_P (x))
2323 if (x == zero_reg_rtx)
2324 fprintf (file, "__zero_reg__");
2325 else if (code == 'r' && REGNO (x) < 32)
2326 fprintf (file, "%d", (int) REGNO (x));
2327 else
2328 fprintf (file, reg_names[REGNO (x) + abcd]);
2330 else if (CONST_INT_P (x))
2332 HOST_WIDE_INT ival = INTVAL (x);
2334 if ('i' != code)
2335 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
2336 else if (low_io_address_operand (x, VOIDmode)
2337 || high_io_address_operand (x, VOIDmode))
2339 if (AVR_HAVE_RAMPZ && ival == avr_addr.rampz)
2340 fprintf (file, "__RAMPZ__");
2341 else if (AVR_HAVE_RAMPY && ival == avr_addr.rampy)
2342 fprintf (file, "__RAMPY__");
2343 else if (AVR_HAVE_RAMPX && ival == avr_addr.rampx)
2344 fprintf (file, "__RAMPX__");
2345 else if (AVR_HAVE_RAMPD && ival == avr_addr.rampd)
2346 fprintf (file, "__RAMPD__");
2347 else if ((AVR_XMEGA || AVR_TINY) && ival == avr_addr.ccp)
2348 fprintf (file, "__CCP__");
2349 else if (ival == avr_addr.sreg) fprintf (file, "__SREG__");
2350 else if (ival == avr_addr.sp_l) fprintf (file, "__SP_L__");
2351 else if (ival == avr_addr.sp_h) fprintf (file, "__SP_H__");
2352 else
2354 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2355 ival - avr_arch->sfr_offset);
2358 else
2359 fatal_insn ("bad address, not an I/O address:", x);
2361 else if (MEM_P (x))
2363 rtx addr = XEXP (x, 0);
2365 if (code == 'm')
2367 if (!CONSTANT_P (addr))
2368 fatal_insn ("bad address, not a constant:", addr);
2369 /* Assembler template with m-code is data - not progmem section */
2370 if (text_segment_operand (addr, VOIDmode))
2371 if (warning (0, "accessing data memory with"
2372 " program memory address"))
2374 output_addr_const (stderr, addr);
2375 fprintf(stderr,"\n");
2377 output_addr_const (file, addr);
2379 else if (code == 'i')
2381 avr_print_operand (file, addr, 'i');
2383 else if (code == 'o')
2385 if (GET_CODE (addr) != PLUS)
2386 fatal_insn ("bad address, not (reg+disp):", addr);
2388 avr_print_operand (file, XEXP (addr, 1), 0);
2390 else if (code == 'b')
2392 if (GET_CODE (addr) != PLUS)
2393 fatal_insn ("bad address, not (reg+disp):", addr);
2395 avr_print_operand_address (file, XEXP (addr, 0));
2397 else if (code == 'p' || code == 'r')
2399 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
2400 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
2402 if (code == 'p')
2403 avr_print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
2404 else
2405 avr_print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
2407 else if (GET_CODE (addr) == PLUS)
2409 avr_print_operand_address (file, XEXP (addr,0));
2410 if (REGNO (XEXP (addr, 0)) == REG_X)
2411 fatal_insn ("internal compiler error. Bad address:"
2412 ,addr);
2413 fputc ('+', file);
2414 avr_print_operand (file, XEXP (addr,1), code);
2416 else
2417 avr_print_operand_address (file, addr);
2419 else if (code == 'i')
2421 if (GET_CODE (x) == SYMBOL_REF && (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_IO))
2422 avr_print_operand_address
2423 (file, plus_constant (HImode, x, -avr_arch->sfr_offset));
2424 else
2425 fatal_insn ("bad address, not an I/O address:", x);
2427 else if (code == 'x')
2429 /* Constant progmem address - like used in jmp or call */
2430 if (0 == text_segment_operand (x, VOIDmode))
2431 if (warning (0, "accessing program memory"
2432 " with data memory address"))
2434 output_addr_const (stderr, x);
2435 fprintf(stderr,"\n");
2437 /* Use normal symbol for direct address no linker trampoline needed */
2438 output_addr_const (file, x);
2440 else if (CONST_FIXED_P (x))
2442 HOST_WIDE_INT ival = INTVAL (avr_to_int_mode (x));
2443 if (code != 0)
2444 output_operand_lossage ("Unsupported code '%c' for fixed-point:",
2445 code);
2446 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival);
2448 else if (GET_CODE (x) == CONST_DOUBLE)
2450 long val;
2451 REAL_VALUE_TYPE rv;
2452 if (GET_MODE (x) != SFmode)
2453 fatal_insn ("internal compiler error. Unknown mode:", x);
2454 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
2455 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
2456 fprintf (file, "0x%lx", val);
2458 else if (GET_CODE (x) == CONST_STRING)
2459 fputs (XSTR (x, 0), file);
2460 else if (code == 'j')
2461 fputs (cond_string (GET_CODE (x)), file);
2462 else if (code == 'k')
2463 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
2464 else
2465 avr_print_operand_address (file, x);
2469 /* Worker function for `NOTICE_UPDATE_CC'. */
2470 /* Update the condition code in the INSN. */
2472 void
2473 avr_notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx_insn *insn)
2475 rtx set;
2476 enum attr_cc cc = get_attr_cc (insn);
2478 switch (cc)
2480 default:
2481 break;
2483 case CC_PLUS:
2484 case CC_LDI:
2486 rtx *op = recog_data.operand;
2487 int len_dummy, icc;
2489 /* Extract insn's operands. */
2490 extract_constrain_insn_cached (insn);
2492 switch (cc)
2494 default:
2495 gcc_unreachable();
2497 case CC_PLUS:
2498 avr_out_plus (insn, op, &len_dummy, &icc);
2499 cc = (enum attr_cc) icc;
2500 break;
2502 case CC_LDI:
2504 cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
2505 && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
2506 /* Loading zero-reg with 0 uses CLR and thus clobbers cc0. */
2507 ? CC_CLOBBER
2508 /* Any other "r,rL" combination does not alter cc0. */
2509 : CC_NONE;
2511 break;
2512 } /* inner switch */
2514 break;
2516 } /* outer swicth */
2518 switch (cc)
2520 default:
2521 /* Special values like CC_OUT_PLUS from above have been
2522 mapped to "standard" CC_* values so we never come here. */
2524 gcc_unreachable();
2525 break;
2527 case CC_NONE:
2528 /* Insn does not affect CC at all. */
2529 break;
2531 case CC_SET_N:
2532 CC_STATUS_INIT;
2533 break;
2535 case CC_SET_ZN:
2536 set = single_set (insn);
2537 CC_STATUS_INIT;
2538 if (set)
2540 cc_status.flags |= CC_NO_OVERFLOW;
2541 cc_status.value1 = SET_DEST (set);
2543 break;
2545 case CC_SET_VZN:
2546 /* Insn like INC, DEC, NEG that set Z,N,V. We currently don't make use
2547 of this combination, cf. also PR61055. */
2548 CC_STATUS_INIT;
2549 break;
2551 case CC_SET_CZN:
2552 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2553 The V flag may or may not be known but that's ok because
2554 alter_cond will change tests to use EQ/NE. */
2555 set = single_set (insn);
2556 CC_STATUS_INIT;
2557 if (set)
2559 cc_status.value1 = SET_DEST (set);
2560 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
2562 break;
2564 case CC_COMPARE:
2565 set = single_set (insn);
2566 CC_STATUS_INIT;
2567 if (set)
2568 cc_status.value1 = SET_SRC (set);
2569 break;
2571 case CC_CLOBBER:
2572 /* Insn doesn't leave CC in a usable state. */
2573 CC_STATUS_INIT;
2574 break;
2578 /* Choose mode for jump insn:
2579 1 - relative jump in range -63 <= x <= 62 ;
2580 2 - relative jump in range -2046 <= x <= 2045 ;
2581 3 - absolute jump (only for ATmega[16]03). */
2584 avr_jump_mode (rtx x, rtx_insn *insn)
2586 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
2587 ? XEXP (x, 0) : x));
2588 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
2589 int jump_distance = cur_addr - dest_addr;
2591 if (-63 <= jump_distance && jump_distance <= 62)
2592 return 1;
2593 else if (-2046 <= jump_distance && jump_distance <= 2045)
2594 return 2;
2595 else if (AVR_HAVE_JMP_CALL)
2596 return 3;
2598 return 2;
2601 /* Return an AVR condition jump commands.
2602 X is a comparison RTX.
2603 LEN is a number returned by avr_jump_mode function.
2604 If REVERSE nonzero then condition code in X must be reversed. */
2606 const char*
2607 ret_cond_branch (rtx x, int len, int reverse)
2609 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2611 switch (cond)
2613 case GT:
2614 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2615 return (len == 1 ? ("breq .+2" CR_TAB
2616 "brpl %0") :
2617 len == 2 ? ("breq .+4" CR_TAB
2618 "brmi .+2" CR_TAB
2619 "rjmp %0") :
2620 ("breq .+6" CR_TAB
2621 "brmi .+4" CR_TAB
2622 "jmp %0"));
2624 else
2625 return (len == 1 ? ("breq .+2" CR_TAB
2626 "brge %0") :
2627 len == 2 ? ("breq .+4" CR_TAB
2628 "brlt .+2" CR_TAB
2629 "rjmp %0") :
2630 ("breq .+6" CR_TAB
2631 "brlt .+4" CR_TAB
2632 "jmp %0"));
2633 case GTU:
2634 return (len == 1 ? ("breq .+2" CR_TAB
2635 "brsh %0") :
2636 len == 2 ? ("breq .+4" CR_TAB
2637 "brlo .+2" CR_TAB
2638 "rjmp %0") :
2639 ("breq .+6" CR_TAB
2640 "brlo .+4" CR_TAB
2641 "jmp %0"));
2642 case LE:
2643 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2644 return (len == 1 ? ("breq %0" CR_TAB
2645 "brmi %0") :
2646 len == 2 ? ("breq .+2" CR_TAB
2647 "brpl .+2" CR_TAB
2648 "rjmp %0") :
2649 ("breq .+2" CR_TAB
2650 "brpl .+4" CR_TAB
2651 "jmp %0"));
2652 else
2653 return (len == 1 ? ("breq %0" CR_TAB
2654 "brlt %0") :
2655 len == 2 ? ("breq .+2" CR_TAB
2656 "brge .+2" CR_TAB
2657 "rjmp %0") :
2658 ("breq .+2" CR_TAB
2659 "brge .+4" CR_TAB
2660 "jmp %0"));
2661 case LEU:
2662 return (len == 1 ? ("breq %0" CR_TAB
2663 "brlo %0") :
2664 len == 2 ? ("breq .+2" CR_TAB
2665 "brsh .+2" CR_TAB
2666 "rjmp %0") :
2667 ("breq .+2" CR_TAB
2668 "brsh .+4" CR_TAB
2669 "jmp %0"));
2670 default:
2671 if (reverse)
2673 switch (len)
2675 case 1:
2676 return "br%k1 %0";
2677 case 2:
2678 return ("br%j1 .+2" CR_TAB
2679 "rjmp %0");
2680 default:
2681 return ("br%j1 .+4" CR_TAB
2682 "jmp %0");
2685 else
2687 switch (len)
2689 case 1:
2690 return "br%j1 %0";
2691 case 2:
2692 return ("br%k1 .+2" CR_TAB
2693 "rjmp %0");
2694 default:
2695 return ("br%k1 .+4" CR_TAB
2696 "jmp %0");
2700 return "";
2704 /* Worker function for `FINAL_PRESCAN_INSN'. */
2705 /* Output insn cost for next insn. */
2707 void
2708 avr_final_prescan_insn (rtx_insn *insn, rtx *operand ATTRIBUTE_UNUSED,
2709 int num_operands ATTRIBUTE_UNUSED)
2711 if (avr_log.rtx_costs)
2713 rtx set = single_set (insn);
2715 if (set)
2716 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
2717 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
2718 else
2719 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
2720 rtx_cost (PATTERN (insn), INSN, 0,
2721 optimize_insn_for_speed_p()));
2725 /* Return 0 if undefined, 1 if always true or always false. */
2728 avr_simplify_comparison_p (machine_mode mode, RTX_CODE op, rtx x)
2730 unsigned int max = (mode == QImode ? 0xff :
2731 mode == HImode ? 0xffff :
2732 mode == PSImode ? 0xffffff :
2733 mode == SImode ? 0xffffffff : 0);
2734 if (max && op && CONST_INT_P (x))
2736 if (unsigned_condition (op) != op)
2737 max >>= 1;
2739 if (max != (INTVAL (x) & max)
2740 && INTVAL (x) != 0xff)
2741 return 1;
2743 return 0;
2747 /* Worker function for `FUNCTION_ARG_REGNO_P'. */
2748 /* Returns nonzero if REGNO is the number of a hard
2749 register in which function arguments are sometimes passed. */
2752 avr_function_arg_regno_p(int r)
2754 return (AVR_TINY ? r >= 20 && r <= 25 : r >= 8 && r <= 25);
2758 /* Worker function for `INIT_CUMULATIVE_ARGS'. */
2759 /* Initializing the variable cum for the state at the beginning
2760 of the argument list. */
2762 void
2763 avr_init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2764 tree fndecl ATTRIBUTE_UNUSED)
2766 cum->nregs = AVR_TINY ? 6 : 18;
2767 cum->regno = FIRST_CUM_REG;
2768 if (!libname && stdarg_p (fntype))
2769 cum->nregs = 0;
2771 /* Assume the calle may be tail called */
2773 cfun->machine->sibcall_fails = 0;
2776 /* Returns the number of registers to allocate for a function argument. */
2778 static int
2779 avr_num_arg_regs (machine_mode mode, const_tree type)
2781 int size;
2783 if (mode == BLKmode)
2784 size = int_size_in_bytes (type);
2785 else
2786 size = GET_MODE_SIZE (mode);
2788 /* Align all function arguments to start in even-numbered registers.
2789 Odd-sized arguments leave holes above them. */
2791 return (size + 1) & ~1;
2795 /* Implement `TARGET_FUNCTION_ARG'. */
2796 /* Controls whether a function argument is passed
2797 in a register, and which register. */
2799 static rtx
2800 avr_function_arg (cumulative_args_t cum_v, machine_mode mode,
2801 const_tree type, bool named ATTRIBUTE_UNUSED)
2803 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2804 int bytes = avr_num_arg_regs (mode, type);
2806 if (cum->nregs && bytes <= cum->nregs)
2807 return gen_rtx_REG (mode, cum->regno - bytes);
2809 return NULL_RTX;
2813 /* Implement `TARGET_FUNCTION_ARG_ADVANCE'. */
2814 /* Update the summarizer variable CUM to advance past an argument
2815 in the argument list. */
2817 static void
2818 avr_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
2819 const_tree type, bool named ATTRIBUTE_UNUSED)
2821 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2822 int bytes = avr_num_arg_regs (mode, type);
2824 cum->nregs -= bytes;
2825 cum->regno -= bytes;
2827 /* A parameter is being passed in a call-saved register. As the original
2828 contents of these regs has to be restored before leaving the function,
2829 a function must not pass arguments in call-saved regs in order to get
2830 tail-called. */
2832 if (cum->regno >= 8
2833 && cum->nregs >= 0
2834 && !call_used_regs[cum->regno])
2836 /* FIXME: We ship info on failing tail-call in struct machine_function.
2837 This uses internals of calls.c:expand_call() and the way args_so_far
2838 is used. targetm.function_ok_for_sibcall() needs to be extended to
2839 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2840 dependent so that such an extension is not wanted. */
2842 cfun->machine->sibcall_fails = 1;
2845 /* Test if all registers needed by the ABI are actually available. If the
2846 user has fixed a GPR needed to pass an argument, an (implicit) function
2847 call will clobber that fixed register. See PR45099 for an example. */
2849 if (cum->regno >= 8
2850 && cum->nregs >= 0)
2852 int regno;
2854 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2855 if (fixed_regs[regno])
2856 warning (0, "fixed register %s used to pass parameter to function",
2857 reg_names[regno]);
2860 if (cum->nregs <= 0)
2862 cum->nregs = 0;
2863 cum->regno = FIRST_CUM_REG;
2867 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2868 /* Decide whether we can make a sibling call to a function. DECL is the
2869 declaration of the function being targeted by the call and EXP is the
2870 CALL_EXPR representing the call. */
2872 static bool
2873 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2875 tree fntype_callee;
2877 /* Tail-calling must fail if callee-saved regs are used to pass
2878 function args. We must not tail-call when `epilogue_restores'
2879 is used. Unfortunately, we cannot tell at this point if that
2880 actually will happen or not, and we cannot step back from
2881 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2883 if (cfun->machine->sibcall_fails
2884 || TARGET_CALL_PROLOGUES)
2886 return false;
2889 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2891 if (decl_callee)
2893 decl_callee = TREE_TYPE (decl_callee);
2895 else
2897 decl_callee = fntype_callee;
2899 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2900 && METHOD_TYPE != TREE_CODE (decl_callee))
2902 decl_callee = TREE_TYPE (decl_callee);
2906 /* Ensure that caller and callee have compatible epilogues */
2908 if (cfun->machine->is_interrupt
2909 || cfun->machine->is_signal
2910 || cfun->machine->is_naked
2911 || avr_naked_function_p (decl_callee)
2912 /* FIXME: For OS_task and OS_main, this might be over-conservative. */
2913 || (avr_OS_task_function_p (decl_callee)
2914 != cfun->machine->is_OS_task)
2915 || (avr_OS_main_function_p (decl_callee)
2916 != cfun->machine->is_OS_main))
2918 return false;
2921 return true;
2924 /***********************************************************************
2925 Functions for outputting various mov's for a various modes
2926 ************************************************************************/
2928 /* Return true if a value of mode MODE is read from flash by
2929 __load_* function from libgcc. */
2931 bool
2932 avr_load_libgcc_p (rtx op)
2934 machine_mode mode = GET_MODE (op);
2935 int n_bytes = GET_MODE_SIZE (mode);
2937 return (n_bytes > 2
2938 && !AVR_HAVE_LPMX
2939 && avr_mem_flash_p (op));
2942 /* Return true if a value of mode MODE is read by __xload_* function. */
2944 bool
2945 avr_xload_libgcc_p (machine_mode mode)
2947 int n_bytes = GET_MODE_SIZE (mode);
2949 return (n_bytes > 1
2950 || avr_n_flash > 1);
2954 /* Fixme: This is a hack because secondary reloads don't works as expected.
2956 Find an unused d-register to be used as scratch in INSN.
2957 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2958 is a register, skip all possible return values that overlap EXCLUDE.
2959 The policy for the returned register is similar to that of
2960 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2961 of INSN.
2963 Return a QImode d-register or NULL_RTX if nothing found. */
2965 static rtx
2966 avr_find_unused_d_reg (rtx_insn *insn, rtx exclude)
2968 int regno;
2969 bool isr_p = (avr_interrupt_function_p (current_function_decl)
2970 || avr_signal_function_p (current_function_decl));
2972 for (regno = 16; regno < 32; regno++)
2974 rtx reg = all_regs_rtx[regno];
2976 if ((exclude
2977 && reg_overlap_mentioned_p (exclude, reg))
2978 || fixed_regs[regno])
2980 continue;
2983 /* Try non-live register */
2985 if (!df_regs_ever_live_p (regno)
2986 && (TREE_THIS_VOLATILE (current_function_decl)
2987 || cfun->machine->is_OS_task
2988 || cfun->machine->is_OS_main
2989 || (!isr_p && call_used_regs[regno])))
2991 return reg;
2994 /* Any live register can be used if it is unused after.
2995 Prologue/epilogue will care for it as needed. */
2997 if (df_regs_ever_live_p (regno)
2998 && reg_unused_after (insn, reg))
3000 return reg;
3004 return NULL_RTX;
3008 /* Helper function for the next function in the case where only restricted
3009 version of LPM instruction is available. */
3011 static const char*
3012 avr_out_lpm_no_lpmx (rtx_insn *insn, rtx *xop, int *plen)
3014 rtx dest = xop[0];
3015 rtx addr = xop[1];
3016 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
3017 int regno_dest;
3019 regno_dest = REGNO (dest);
3021 /* The implicit target register of LPM. */
3022 xop[3] = lpm_reg_rtx;
3024 switch (GET_CODE (addr))
3026 default:
3027 gcc_unreachable();
3029 case REG:
3031 gcc_assert (REG_Z == REGNO (addr));
3033 switch (n_bytes)
3035 default:
3036 gcc_unreachable();
3038 case 1:
3039 avr_asm_len ("%4lpm", xop, plen, 1);
3041 if (regno_dest != LPM_REGNO)
3042 avr_asm_len ("mov %0,%3", xop, plen, 1);
3044 return "";
3046 case 2:
3047 if (REGNO (dest) == REG_Z)
3048 return avr_asm_len ("%4lpm" CR_TAB
3049 "push %3" CR_TAB
3050 "adiw %2,1" CR_TAB
3051 "%4lpm" CR_TAB
3052 "mov %B0,%3" CR_TAB
3053 "pop %A0", xop, plen, 6);
3055 avr_asm_len ("%4lpm" CR_TAB
3056 "mov %A0,%3" CR_TAB
3057 "adiw %2,1" CR_TAB
3058 "%4lpm" CR_TAB
3059 "mov %B0,%3", xop, plen, 5);
3061 if (!reg_unused_after (insn, addr))
3062 avr_asm_len ("sbiw %2,1", xop, plen, 1);
3064 break; /* 2 */
3067 break; /* REG */
3069 case POST_INC:
3071 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
3072 && n_bytes <= 4);
3074 if (regno_dest == LPM_REGNO)
3075 avr_asm_len ("%4lpm" CR_TAB
3076 "adiw %2,1", xop, plen, 2);
3077 else
3078 avr_asm_len ("%4lpm" CR_TAB
3079 "mov %A0,%3" CR_TAB
3080 "adiw %2,1", xop, plen, 3);
3082 if (n_bytes >= 2)
3083 avr_asm_len ("%4lpm" CR_TAB
3084 "mov %B0,%3" CR_TAB
3085 "adiw %2,1", xop, plen, 3);
3087 if (n_bytes >= 3)
3088 avr_asm_len ("%4lpm" CR_TAB
3089 "mov %C0,%3" CR_TAB
3090 "adiw %2,1", xop, plen, 3);
3092 if (n_bytes >= 4)
3093 avr_asm_len ("%4lpm" CR_TAB
3094 "mov %D0,%3" CR_TAB
3095 "adiw %2,1", xop, plen, 3);
3097 break; /* POST_INC */
3099 } /* switch CODE (addr) */
3101 return "";
3105 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
3106 OP[1] in AS1 to register OP[0].
3107 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
3108 Return "". */
3110 const char*
3111 avr_out_lpm (rtx_insn *insn, rtx *op, int *plen)
3113 rtx xop[7];
3114 rtx dest = op[0];
3115 rtx src = SET_SRC (single_set (insn));
3116 rtx addr;
3117 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
3118 int segment;
3119 RTX_CODE code;
3120 addr_space_t as = MEM_ADDR_SPACE (src);
3122 if (plen)
3123 *plen = 0;
3125 if (MEM_P (dest))
3127 warning (0, "writing to address space %qs not supported",
3128 avr_addrspace[MEM_ADDR_SPACE (dest)].name);
3130 return "";
3133 addr = XEXP (src, 0);
3134 code = GET_CODE (addr);
3136 gcc_assert (REG_P (dest));
3137 gcc_assert (REG == code || POST_INC == code);
3139 xop[0] = dest;
3140 xop[1] = addr;
3141 xop[2] = lpm_addr_reg_rtx;
3142 xop[4] = xstring_empty;
3143 xop[5] = tmp_reg_rtx;
3144 xop[6] = XEXP (rampz_rtx, 0);
3146 segment = avr_addrspace[as].segment;
3148 /* Set RAMPZ as needed. */
3150 if (segment)
3152 xop[4] = GEN_INT (segment);
3153 xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx);
3155 if (xop[3] != NULL_RTX)
3157 avr_asm_len ("ldi %3,%4" CR_TAB
3158 "out %i6,%3", xop, plen, 2);
3160 else if (segment == 1)
3162 avr_asm_len ("clr %5" CR_TAB
3163 "inc %5" CR_TAB
3164 "out %i6,%5", xop, plen, 3);
3166 else
3168 avr_asm_len ("mov %5,%2" CR_TAB
3169 "ldi %2,%4" CR_TAB
3170 "out %i6,%2" CR_TAB
3171 "mov %2,%5", xop, plen, 4);
3174 xop[4] = xstring_e;
3176 if (!AVR_HAVE_ELPMX)
3177 return avr_out_lpm_no_lpmx (insn, xop, plen);
3179 else if (!AVR_HAVE_LPMX)
3181 return avr_out_lpm_no_lpmx (insn, xop, plen);
3184 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
3186 switch (GET_CODE (addr))
3188 default:
3189 gcc_unreachable();
3191 case REG:
3193 gcc_assert (REG_Z == REGNO (addr));
3195 switch (n_bytes)
3197 default:
3198 gcc_unreachable();
3200 case 1:
3201 return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
3203 case 2:
3204 if (REGNO (dest) == REG_Z)
3205 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3206 "%4lpm %B0,%a2" CR_TAB
3207 "mov %A0,%5", xop, plen, 3);
3208 else
3210 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3211 "%4lpm %B0,%a2", xop, plen, 2);
3213 if (!reg_unused_after (insn, addr))
3214 avr_asm_len ("sbiw %2,1", xop, plen, 1);
3217 break; /* 2 */
3219 case 3:
3221 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3222 "%4lpm %B0,%a2+" CR_TAB
3223 "%4lpm %C0,%a2", xop, plen, 3);
3225 if (!reg_unused_after (insn, addr))
3226 avr_asm_len ("sbiw %2,2", xop, plen, 1);
3228 break; /* 3 */
3230 case 4:
3232 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3233 "%4lpm %B0,%a2+", xop, plen, 2);
3235 if (REGNO (dest) == REG_Z - 2)
3236 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3237 "%4lpm %C0,%a2" CR_TAB
3238 "mov %D0,%5", xop, plen, 3);
3239 else
3241 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
3242 "%4lpm %D0,%a2", xop, plen, 2);
3244 if (!reg_unused_after (insn, addr))
3245 avr_asm_len ("sbiw %2,3", xop, plen, 1);
3248 break; /* 4 */
3249 } /* n_bytes */
3251 break; /* REG */
3253 case POST_INC:
3255 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
3256 && n_bytes <= 4);
3258 avr_asm_len ("%4lpm %A0,%a2+", xop, plen, 1);
3259 if (n_bytes >= 2) avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
3260 if (n_bytes >= 3) avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
3261 if (n_bytes >= 4) avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
3263 break; /* POST_INC */
3265 } /* switch CODE (addr) */
3267 if (xop[4] == xstring_e && AVR_HAVE_RAMPD)
3269 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM. */
3271 xop[0] = zero_reg_rtx;
3272 avr_asm_len ("out %i6,%0", xop, plen, 1);
3275 return "";
3279 /* Worker function for xload_8 insn. */
3281 const char*
3282 avr_out_xload (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
3284 rtx xop[4];
3286 xop[0] = op[0];
3287 xop[1] = op[1];
3288 xop[2] = lpm_addr_reg_rtx;
3289 xop[3] = AVR_HAVE_LPMX ? op[0] : lpm_reg_rtx;
3291 avr_asm_len (AVR_HAVE_LPMX ? "lpm %3,%a2" : "lpm", xop, plen, -1);
3293 avr_asm_len ("sbrc %1,7" CR_TAB
3294 "ld %3,%a2", xop, plen, 2);
3296 if (REGNO (xop[0]) != REGNO (xop[3]))
3297 avr_asm_len ("mov %0,%3", xop, plen, 1);
3299 return "";
3303 const char*
3304 output_movqi (rtx_insn *insn, rtx operands[], int *plen)
3306 rtx dest = operands[0];
3307 rtx src = operands[1];
3309 if (avr_mem_flash_p (src)
3310 || avr_mem_flash_p (dest))
3312 return avr_out_lpm (insn, operands, plen);
3315 gcc_assert (1 == GET_MODE_SIZE (GET_MODE (dest)));
3317 if (REG_P (dest))
3319 if (REG_P (src)) /* mov r,r */
3321 if (test_hard_reg_class (STACK_REG, dest))
3322 return avr_asm_len ("out %0,%1", operands, plen, -1);
3323 else if (test_hard_reg_class (STACK_REG, src))
3324 return avr_asm_len ("in %0,%1", operands, plen, -1);
3326 return avr_asm_len ("mov %0,%1", operands, plen, -1);
3328 else if (CONSTANT_P (src))
3330 output_reload_in_const (operands, NULL_RTX, plen, false);
3331 return "";
3333 else if (MEM_P (src))
3334 return out_movqi_r_mr (insn, operands, plen); /* mov r,m */
3336 else if (MEM_P (dest))
3338 rtx xop[2];
3340 xop[0] = dest;
3341 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3343 return out_movqi_mr_r (insn, xop, plen);
3346 return "";
3350 const char *
3351 output_movhi (rtx_insn *insn, rtx xop[], int *plen)
3353 rtx dest = xop[0];
3354 rtx src = xop[1];
3356 gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
3358 if (avr_mem_flash_p (src)
3359 || avr_mem_flash_p (dest))
3361 return avr_out_lpm (insn, xop, plen);
3364 gcc_assert (2 == GET_MODE_SIZE (GET_MODE (dest)));
3366 if (REG_P (dest))
3368 if (REG_P (src)) /* mov r,r */
3370 if (test_hard_reg_class (STACK_REG, dest))
3372 if (AVR_HAVE_8BIT_SP)
3373 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
3375 if (AVR_XMEGA)
3376 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
3377 "out __SP_H__,%B1", xop, plen, -2);
3379 /* Use simple load of SP if no interrupts are used. */
3381 return TARGET_NO_INTERRUPTS
3382 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
3383 "out __SP_L__,%A1", xop, plen, -2)
3384 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
3385 "cli" CR_TAB
3386 "out __SP_H__,%B1" CR_TAB
3387 "out __SREG__,__tmp_reg__" CR_TAB
3388 "out __SP_L__,%A1", xop, plen, -5);
3390 else if (test_hard_reg_class (STACK_REG, src))
3392 return !AVR_HAVE_SPH
3393 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
3394 "clr %B0", xop, plen, -2)
3396 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
3397 "in %B0,__SP_H__", xop, plen, -2);
3400 return AVR_HAVE_MOVW
3401 ? avr_asm_len ("movw %0,%1", xop, plen, -1)
3403 : avr_asm_len ("mov %A0,%A1" CR_TAB
3404 "mov %B0,%B1", xop, plen, -2);
3405 } /* REG_P (src) */
3406 else if (CONSTANT_P (src))
3408 return output_reload_inhi (xop, NULL, plen);
3410 else if (MEM_P (src))
3412 return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
3415 else if (MEM_P (dest))
3417 rtx xop[2];
3419 xop[0] = dest;
3420 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3422 return out_movhi_mr_r (insn, xop, plen);
3425 fatal_insn ("invalid insn:", insn);
3427 return "";
3431 /* Same as out_movqi_r_mr, but TINY does not have ADIW or SBIW */
3433 static const char*
3434 avr_out_movqi_r_mr_reg_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
3436 rtx dest = op[0];
3437 rtx src = op[1];
3438 rtx x = XEXP (src, 0);
3440 avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
3441 "ld %0,%b1" , op, plen, -3);
3443 if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
3444 && !reg_unused_after (insn, XEXP (x,0)))
3445 avr_asm_len (TINY_SBIW (%I1, %J1, %o1), op, plen, 2);
3447 return "";
3450 static const char*
3451 out_movqi_r_mr (rtx_insn *insn, rtx op[], int *plen)
3453 rtx dest = op[0];
3454 rtx src = op[1];
3455 rtx x = XEXP (src, 0);
3457 if (CONSTANT_ADDRESS_P (x))
3459 int n_words = AVR_TINY ? 1 : 2;
3460 return optimize > 0 && io_address_operand (x, QImode)
3461 ? avr_asm_len ("in %0,%i1", op, plen, -1)
3462 : avr_asm_len ("lds %0,%m1", op, plen, -n_words);
3465 if (GET_CODE (x) == PLUS
3466 && REG_P (XEXP (x, 0))
3467 && CONST_INT_P (XEXP (x, 1)))
3469 /* memory access by reg+disp */
3471 int disp = INTVAL (XEXP (x, 1));
3473 if (AVR_TINY)
3474 return avr_out_movqi_r_mr_reg_disp_tiny (insn, op, plen);
3476 if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
3478 if (REGNO (XEXP (x, 0)) != REG_Y)
3479 fatal_insn ("incorrect insn:",insn);
3481 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3482 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
3483 "ldd %0,Y+63" CR_TAB
3484 "sbiw r28,%o1-63", op, plen, -3);
3486 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3487 "sbci r29,hi8(-%o1)" CR_TAB
3488 "ld %0,Y" CR_TAB
3489 "subi r28,lo8(%o1)" CR_TAB
3490 "sbci r29,hi8(%o1)", op, plen, -5);
3492 else if (REGNO (XEXP (x, 0)) == REG_X)
3494 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
3495 it but I have this situation with extremal optimizing options. */
3497 avr_asm_len ("adiw r26,%o1" CR_TAB
3498 "ld %0,X", op, plen, -2);
3500 if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
3501 && !reg_unused_after (insn, XEXP (x,0)))
3503 avr_asm_len ("sbiw r26,%o1", op, plen, 1);
3506 return "";
3509 return avr_asm_len ("ldd %0,%1", op, plen, -1);
3512 return avr_asm_len ("ld %0,%1", op, plen, -1);
3516 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
3518 static const char*
3519 avr_out_movhi_r_mr_reg_no_disp_tiny (rtx op[], int *plen)
3521 rtx dest = op[0];
3522 rtx src = op[1];
3523 rtx base = XEXP (src, 0);
3525 int reg_dest = true_regnum (dest);
3526 int reg_base = true_regnum (base);
3528 if (reg_dest == reg_base) /* R = (R) */
3529 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3530 "ld %B0,%1" CR_TAB
3531 "mov %A0,__tmp_reg__", op, plen, -3);
3533 return avr_asm_len ("ld %A0,%1" CR_TAB
3534 TINY_ADIW (%E1, %F1, 1) CR_TAB
3535 "ld %B0,%1" CR_TAB
3536 TINY_SBIW (%E1, %F1, 1), op, plen, -6);
3540 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
3542 static const char*
3543 avr_out_movhi_r_mr_reg_disp_tiny (rtx op[], int *plen)
3545 rtx dest = op[0];
3546 rtx src = op[1];
3547 rtx base = XEXP (src, 0);
3549 int reg_dest = true_regnum (dest);
3550 int reg_base = true_regnum (XEXP (base, 0));
3552 if (reg_base == reg_dest)
3554 return avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
3555 "ld __tmp_reg__,%b1+" CR_TAB
3556 "ld %B0,%b1" CR_TAB
3557 "mov %A0,__tmp_reg__", op, plen, -5);
3559 else
3561 return avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
3562 "ld %A0,%b1+" CR_TAB
3563 "ld %B0,%b1" CR_TAB
3564 TINY_SBIW (%I1, %J1, %o1+1), op, plen, -6);
3569 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
3571 static const char*
3572 avr_out_movhi_r_mr_pre_dec_tiny (rtx_insn *insn, rtx op[], int *plen)
3574 int mem_volatile_p = 0;
3575 rtx dest = op[0];
3576 rtx src = op[1];
3577 rtx base = XEXP (src, 0);
3579 /* "volatile" forces reading low byte first, even if less efficient,
3580 for correct operation with 16-bit I/O registers. */
3581 mem_volatile_p = MEM_VOLATILE_P (src);
3583 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3584 fatal_insn ("incorrect insn:", insn);
3586 if (!mem_volatile_p)
3587 return avr_asm_len ("ld %B0,%1" CR_TAB
3588 "ld %A0,%1", op, plen, -2);
3590 return avr_asm_len (TINY_SBIW (%I1, %J1, 2) CR_TAB
3591 "ld %A0,%p1+" CR_TAB
3592 "ld %B0,%p1" CR_TAB
3593 TINY_SBIW (%I1, %J1, 1), op, plen, -6);
3597 static const char*
3598 out_movhi_r_mr (rtx_insn *insn, rtx op[], int *plen)
3600 rtx dest = op[0];
3601 rtx src = op[1];
3602 rtx base = XEXP (src, 0);
3603 int reg_dest = true_regnum (dest);
3604 int reg_base = true_regnum (base);
3605 /* "volatile" forces reading low byte first, even if less efficient,
3606 for correct operation with 16-bit I/O registers. */
3607 int mem_volatile_p = MEM_VOLATILE_P (src);
3609 if (reg_base > 0)
3611 if (AVR_TINY)
3612 return avr_out_movhi_r_mr_reg_no_disp_tiny (op, plen);
3614 if (reg_dest == reg_base) /* R = (R) */
3615 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3616 "ld %B0,%1" CR_TAB
3617 "mov %A0,__tmp_reg__", op, plen, -3);
3619 if (reg_base != REG_X)
3620 return avr_asm_len ("ld %A0,%1" CR_TAB
3621 "ldd %B0,%1+1", op, plen, -2);
3623 avr_asm_len ("ld %A0,X+" CR_TAB
3624 "ld %B0,X", op, plen, -2);
3626 if (!reg_unused_after (insn, base))
3627 avr_asm_len ("sbiw r26,1", op, plen, 1);
3629 return "";
3631 else if (GET_CODE (base) == PLUS) /* (R + i) */
3633 int disp = INTVAL (XEXP (base, 1));
3634 int reg_base = true_regnum (XEXP (base, 0));
3636 if (AVR_TINY)
3637 return avr_out_movhi_r_mr_reg_disp_tiny (op, plen);
3639 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3641 if (REGNO (XEXP (base, 0)) != REG_Y)
3642 fatal_insn ("incorrect insn:",insn);
3644 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
3645 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3646 "ldd %A0,Y+62" CR_TAB
3647 "ldd %B0,Y+63" CR_TAB
3648 "sbiw r28,%o1-62", op, plen, -4)
3650 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3651 "sbci r29,hi8(-%o1)" CR_TAB
3652 "ld %A0,Y" CR_TAB
3653 "ldd %B0,Y+1" CR_TAB
3654 "subi r28,lo8(%o1)" CR_TAB
3655 "sbci r29,hi8(%o1)", op, plen, -6);
3658 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3659 it but I have this situation with extremal
3660 optimization options. */
3662 if (reg_base == REG_X)
3663 return reg_base == reg_dest
3664 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3665 "ld __tmp_reg__,X+" CR_TAB
3666 "ld %B0,X" CR_TAB
3667 "mov %A0,__tmp_reg__", op, plen, -4)
3669 : avr_asm_len ("adiw r26,%o1" CR_TAB
3670 "ld %A0,X+" CR_TAB
3671 "ld %B0,X" CR_TAB
3672 "sbiw r26,%o1+1", op, plen, -4);
3674 return reg_base == reg_dest
3675 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3676 "ldd %B0,%B1" CR_TAB
3677 "mov %A0,__tmp_reg__", op, plen, -3)
3679 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3680 "ldd %B0,%B1", op, plen, -2);
3682 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3684 if (AVR_TINY)
3685 return avr_out_movhi_r_mr_pre_dec_tiny (insn, op, plen);
3687 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3688 fatal_insn ("incorrect insn:", insn);
3690 if (!mem_volatile_p)
3691 return avr_asm_len ("ld %B0,%1" CR_TAB
3692 "ld %A0,%1", op, plen, -2);
3694 return REGNO (XEXP (base, 0)) == REG_X
3695 ? avr_asm_len ("sbiw r26,2" CR_TAB
3696 "ld %A0,X+" CR_TAB
3697 "ld %B0,X" CR_TAB
3698 "sbiw r26,1", op, plen, -4)
3700 : avr_asm_len ("sbiw %r1,2" CR_TAB
3701 "ld %A0,%p1" CR_TAB
3702 "ldd %B0,%p1+1", op, plen, -3);
3704 else if (GET_CODE (base) == POST_INC) /* (R++) */
3706 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3707 fatal_insn ("incorrect insn:", insn);
3709 return avr_asm_len ("ld %A0,%1" CR_TAB
3710 "ld %B0,%1", op, plen, -2);
3712 else if (CONSTANT_ADDRESS_P (base))
3714 int n_words = AVR_TINY ? 2 : 4;
3715 return optimize > 0 && io_address_operand (base, HImode)
3716 ? avr_asm_len ("in %A0,%i1" CR_TAB
3717 "in %B0,%i1+1", op, plen, -2)
3719 : avr_asm_len ("lds %A0,%m1" CR_TAB
3720 "lds %B0,%m1+1", op, plen, -n_words);
3723 fatal_insn ("unknown move insn:",insn);
3724 return "";
3727 static const char*
3728 avr_out_movsi_r_mr_reg_no_disp_tiny (rtx_insn *insn, rtx op[], int *l)
3730 rtx dest = op[0];
3731 rtx src = op[1];
3732 rtx base = XEXP (src, 0);
3733 int reg_dest = true_regnum (dest);
3734 int reg_base = true_regnum (base);
3736 if (reg_dest == reg_base)
3738 /* "ld r26,-X" is undefined */
3739 return *l = 9, (TINY_ADIW (%E1, %F1, 3) CR_TAB
3740 "ld %D0,%1" CR_TAB
3741 "ld %C0,-%1" CR_TAB
3742 "ld __tmp_reg__,-%1" CR_TAB
3743 TINY_SBIW (%E1, %F1, 1) CR_TAB
3744 "ld %A0,%1" CR_TAB
3745 "mov %B0,__tmp_reg__");
3747 else if (reg_dest == reg_base - 2)
3749 return *l = 5, ("ld %A0,%1+" CR_TAB
3750 "ld %B0,%1+" CR_TAB
3751 "ld __tmp_reg__,%1+" CR_TAB
3752 "ld %D0,%1" CR_TAB
3753 "mov %C0,__tmp_reg__");
3755 else if (reg_unused_after (insn, base))
3757 return *l = 4, ("ld %A0,%1+" CR_TAB
3758 "ld %B0,%1+" CR_TAB
3759 "ld %C0,%1+" CR_TAB
3760 "ld %D0,%1");
3762 else
3764 return *l = 6, ("ld %A0,%1+" CR_TAB
3765 "ld %B0,%1+" CR_TAB
3766 "ld %C0,%1+" CR_TAB
3767 "ld %D0,%1" CR_TAB
3768 TINY_SBIW (%E1, %F1, 3));
3773 static const char*
3774 avr_out_movsi_r_mr_reg_disp_tiny (rtx_insn *insn, rtx op[], int *l)
3776 rtx dest = op[0];
3777 rtx src = op[1];
3778 rtx base = XEXP (src, 0);
3779 int reg_dest = true_regnum (dest);
3780 int reg_base = true_regnum (XEXP (base, 0));
3782 if (reg_dest == reg_base)
3784 /* "ld r26,-X" is undefined */
3785 return *l = 9, (TINY_ADIW (%I1, %J1, %o1+3) CR_TAB
3786 "ld %D0,%b1" CR_TAB
3787 "ld %C0,-%b1" CR_TAB
3788 "ld __tmp_reg__,-%b1" CR_TAB
3789 TINY_SBIW (%I1, %J1, 1) CR_TAB
3790 "ld %A0,%b1" CR_TAB
3791 "mov %B0,__tmp_reg__");
3793 else if (reg_dest == reg_base - 2)
3795 return *l = 7, (TINY_ADIW (%I1, %J1, %o1) CR_TAB
3796 "ld %A0,%b1+" CR_TAB
3797 "ld %B0,%b1+" CR_TAB
3798 "ld __tmp_reg__,%b1+" CR_TAB
3799 "ld %D0,%b1" CR_TAB
3800 "mov %C0,__tmp_reg__");
3802 else if (reg_unused_after (insn, XEXP (base, 0)))
3804 return *l = 6, (TINY_ADIW (%I1, %J1, %o1) CR_TAB
3805 "ld %A0,%b1+" CR_TAB
3806 "ld %B0,%b1+" CR_TAB
3807 "ld %C0,%b1+" CR_TAB
3808 "ld %D0,%b1");
3810 else
3812 return *l = 8, (TINY_ADIW (%I1, %J1, %o1) CR_TAB
3813 "ld %A0,%b1+" CR_TAB
3814 "ld %B0,%b1+" CR_TAB
3815 "ld %C0,%b1+" CR_TAB
3816 "ld %D0,%b1" CR_TAB
3817 TINY_SBIW (%I1, %J1, %o1+3));
3821 static const char*
3822 out_movsi_r_mr (rtx_insn *insn, rtx op[], int *l)
3824 rtx dest = op[0];
3825 rtx src = op[1];
3826 rtx base = XEXP (src, 0);
3827 int reg_dest = true_regnum (dest);
3828 int reg_base = true_regnum (base);
3829 int tmp;
3831 if (!l)
3832 l = &tmp;
3834 if (reg_base > 0)
3836 if (AVR_TINY)
3837 return avr_out_movsi_r_mr_reg_no_disp_tiny (insn, op, l);
3839 if (reg_base == REG_X) /* (R26) */
3841 if (reg_dest == REG_X)
3842 /* "ld r26,-X" is undefined */
3843 return *l=7, ("adiw r26,3" CR_TAB
3844 "ld r29,X" CR_TAB
3845 "ld r28,-X" CR_TAB
3846 "ld __tmp_reg__,-X" CR_TAB
3847 "sbiw r26,1" CR_TAB
3848 "ld r26,X" CR_TAB
3849 "mov r27,__tmp_reg__");
3850 else if (reg_dest == REG_X - 2)
3851 return *l=5, ("ld %A0,X+" CR_TAB
3852 "ld %B0,X+" CR_TAB
3853 "ld __tmp_reg__,X+" CR_TAB
3854 "ld %D0,X" CR_TAB
3855 "mov %C0,__tmp_reg__");
3856 else if (reg_unused_after (insn, base))
3857 return *l=4, ("ld %A0,X+" CR_TAB
3858 "ld %B0,X+" CR_TAB
3859 "ld %C0,X+" CR_TAB
3860 "ld %D0,X");
3861 else
3862 return *l=5, ("ld %A0,X+" CR_TAB
3863 "ld %B0,X+" CR_TAB
3864 "ld %C0,X+" CR_TAB
3865 "ld %D0,X" CR_TAB
3866 "sbiw r26,3");
3868 else
3870 if (reg_dest == reg_base)
3871 return *l=5, ("ldd %D0,%1+3" CR_TAB
3872 "ldd %C0,%1+2" CR_TAB
3873 "ldd __tmp_reg__,%1+1" CR_TAB
3874 "ld %A0,%1" CR_TAB
3875 "mov %B0,__tmp_reg__");
3876 else if (reg_base == reg_dest + 2)
3877 return *l=5, ("ld %A0,%1" CR_TAB
3878 "ldd %B0,%1+1" CR_TAB
3879 "ldd __tmp_reg__,%1+2" CR_TAB
3880 "ldd %D0,%1+3" CR_TAB
3881 "mov %C0,__tmp_reg__");
3882 else
3883 return *l=4, ("ld %A0,%1" CR_TAB
3884 "ldd %B0,%1+1" CR_TAB
3885 "ldd %C0,%1+2" CR_TAB
3886 "ldd %D0,%1+3");
3889 else if (GET_CODE (base) == PLUS) /* (R + i) */
3891 int disp = INTVAL (XEXP (base, 1));
3893 if (AVR_TINY)
3894 return avr_out_movsi_r_mr_reg_disp_tiny (insn, op, l);
3896 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3898 if (REGNO (XEXP (base, 0)) != REG_Y)
3899 fatal_insn ("incorrect insn:",insn);
3901 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3902 return *l = 6, ("adiw r28,%o1-60" CR_TAB
3903 "ldd %A0,Y+60" CR_TAB
3904 "ldd %B0,Y+61" CR_TAB
3905 "ldd %C0,Y+62" CR_TAB
3906 "ldd %D0,Y+63" CR_TAB
3907 "sbiw r28,%o1-60");
3909 return *l = 8, ("subi r28,lo8(-%o1)" CR_TAB
3910 "sbci r29,hi8(-%o1)" CR_TAB
3911 "ld %A0,Y" CR_TAB
3912 "ldd %B0,Y+1" CR_TAB
3913 "ldd %C0,Y+2" CR_TAB
3914 "ldd %D0,Y+3" CR_TAB
3915 "subi r28,lo8(%o1)" CR_TAB
3916 "sbci r29,hi8(%o1)");
3919 reg_base = true_regnum (XEXP (base, 0));
3920 if (reg_base == REG_X)
3922 /* R = (X + d) */
3923 if (reg_dest == REG_X)
3925 *l = 7;
3926 /* "ld r26,-X" is undefined */
3927 return ("adiw r26,%o1+3" CR_TAB
3928 "ld r29,X" CR_TAB
3929 "ld r28,-X" CR_TAB
3930 "ld __tmp_reg__,-X" CR_TAB
3931 "sbiw r26,1" CR_TAB
3932 "ld r26,X" CR_TAB
3933 "mov r27,__tmp_reg__");
3935 *l = 6;
3936 if (reg_dest == REG_X - 2)
3937 return ("adiw r26,%o1" CR_TAB
3938 "ld r24,X+" CR_TAB
3939 "ld r25,X+" CR_TAB
3940 "ld __tmp_reg__,X+" CR_TAB
3941 "ld r27,X" CR_TAB
3942 "mov r26,__tmp_reg__");
3944 return ("adiw r26,%o1" CR_TAB
3945 "ld %A0,X+" CR_TAB
3946 "ld %B0,X+" CR_TAB
3947 "ld %C0,X+" CR_TAB
3948 "ld %D0,X" CR_TAB
3949 "sbiw r26,%o1+3");
3951 if (reg_dest == reg_base)
3952 return *l=5, ("ldd %D0,%D1" CR_TAB
3953 "ldd %C0,%C1" CR_TAB
3954 "ldd __tmp_reg__,%B1" CR_TAB
3955 "ldd %A0,%A1" CR_TAB
3956 "mov %B0,__tmp_reg__");
3957 else if (reg_dest == reg_base - 2)
3958 return *l=5, ("ldd %A0,%A1" CR_TAB
3959 "ldd %B0,%B1" CR_TAB
3960 "ldd __tmp_reg__,%C1" CR_TAB
3961 "ldd %D0,%D1" CR_TAB
3962 "mov %C0,__tmp_reg__");
3963 return *l=4, ("ldd %A0,%A1" CR_TAB
3964 "ldd %B0,%B1" CR_TAB
3965 "ldd %C0,%C1" CR_TAB
3966 "ldd %D0,%D1");
3968 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3969 return *l=4, ("ld %D0,%1" CR_TAB
3970 "ld %C0,%1" CR_TAB
3971 "ld %B0,%1" CR_TAB
3972 "ld %A0,%1");
3973 else if (GET_CODE (base) == POST_INC) /* (R++) */
3974 return *l=4, ("ld %A0,%1" CR_TAB
3975 "ld %B0,%1" CR_TAB
3976 "ld %C0,%1" CR_TAB
3977 "ld %D0,%1");
3978 else if (CONSTANT_ADDRESS_P (base))
3980 if (io_address_operand (base, SImode))
3982 *l = 4;
3983 return ("in %A0,%i1" CR_TAB
3984 "in %B0,%i1+1" CR_TAB
3985 "in %C0,%i1+2" CR_TAB
3986 "in %D0,%i1+3");
3988 else
3990 *l = AVR_TINY ? 4 : 8;
3991 return ("lds %A0,%m1" CR_TAB
3992 "lds %B0,%m1+1" CR_TAB
3993 "lds %C0,%m1+2" CR_TAB
3994 "lds %D0,%m1+3");
3998 fatal_insn ("unknown move insn:",insn);
3999 return "";
4002 static const char*
4003 avr_out_movsi_mr_r_reg_no_disp_tiny (rtx_insn *insn, rtx op[], int *l)
4005 rtx dest = op[0];
4006 rtx src = op[1];
4007 rtx base = XEXP (dest, 0);
4008 int reg_base = true_regnum (base);
4009 int reg_src = true_regnum (src);
4011 if (reg_base == reg_src)
4013 /* "ld r26,-X" is undefined */
4014 if (reg_unused_after (insn, base))
4016 return *l = 7, ("mov __tmp_reg__, %B1" CR_TAB
4017 "st %0,%A1" CR_TAB
4018 TINY_ADIW (%E0, %F0, 1) CR_TAB
4019 "st %0+,__tmp_reg__" CR_TAB
4020 "st %0+,%C1" CR_TAB
4021 "st %0+,%D1");
4023 else
4025 return *l = 9, ("mov __tmp_reg__, %B1" CR_TAB
4026 "st %0,%A1" CR_TAB
4027 TINY_ADIW (%E0, %F0, 1) CR_TAB
4028 "st %0+,__tmp_reg__" CR_TAB
4029 "st %0+,%C1" CR_TAB
4030 "st %0+,%D1" CR_TAB
4031 TINY_SBIW (%E0, %F0, 3));
4034 else if (reg_base == reg_src + 2)
4036 if (reg_unused_after (insn, base))
4037 return *l = 7, ("mov __zero_reg__,%C1" CR_TAB
4038 "mov __tmp_reg__,%D1" CR_TAB
4039 "st %0+,%A1" CR_TAB
4040 "st %0+,%B1" CR_TAB
4041 "st %0+,__zero_reg__" CR_TAB
4042 "st %0,__tmp_reg__" CR_TAB
4043 "clr __zero_reg__");
4044 else
4045 return *l = 9, ("mov __zero_reg__,%C1" CR_TAB
4046 "mov __tmp_reg__,%D1" CR_TAB
4047 "st %0+,%A1" CR_TAB
4048 "st %0+,%B1" CR_TAB
4049 "st %0+,__zero_reg__" CR_TAB
4050 "st %0,__tmp_reg__" CR_TAB
4051 "clr __zero_reg__" CR_TAB
4052 TINY_SBIW (%E0, %F0, 3));
4055 return *l = 6, ("st %0+,%A1" CR_TAB
4056 "st %0+,%B1" CR_TAB
4057 "st %0+,%C1" CR_TAB
4058 "st %0,%D1" CR_TAB
4059 TINY_SBIW (%E0, %F0, 3));
4062 static const char*
4063 avr_out_movsi_mr_r_reg_disp_tiny (rtx op[], int *l)
4065 rtx dest = op[0];
4066 rtx src = op[1];
4067 rtx base = XEXP (dest, 0);
4068 int reg_base = REGNO (XEXP (base, 0));
4069 int reg_src =true_regnum (src);
4071 if (reg_base == reg_src)
4073 *l = 11;
4074 return ("mov __tmp_reg__,%A2" CR_TAB
4075 "mov __zero_reg__,%B2" CR_TAB
4076 TINY_ADIW (%I0, %J0, %o0) CR_TAB
4077 "st %b0+,__tmp_reg__" CR_TAB
4078 "st %b0+,__zero_reg__" CR_TAB
4079 "st %b0+,%C2" CR_TAB
4080 "st %b0,%D2" CR_TAB
4081 "clr __zero_reg__" CR_TAB
4082 TINY_SBIW (%I0, %J0, %o0+3));
4084 else if (reg_src == reg_base - 2)
4086 *l = 11;
4087 return ("mov __tmp_reg__,%C2" CR_TAB
4088 "mov __zero_reg__,%D2" CR_TAB
4089 TINY_ADIW (%I0, %J0, %o0) CR_TAB
4090 "st %b0+,%A0" CR_TAB
4091 "st %b0+,%B0" CR_TAB
4092 "st %b0+,__tmp_reg__" CR_TAB
4093 "st %b0,__zero_reg__" CR_TAB
4094 "clr __zero_reg__" CR_TAB
4095 TINY_SBIW (%I0, %J0, %o0+3));
4097 *l = 8;
4098 return (TINY_ADIW (%I0, %J0, %o0) CR_TAB
4099 "st %b0+,%A1" CR_TAB
4100 "st %b0+,%B1" CR_TAB
4101 "st %b0+,%C1" CR_TAB
4102 "st %b0,%D1" CR_TAB
4103 TINY_SBIW (%I0, %J0, %o0+3));
4106 static const char*
4107 out_movsi_mr_r (rtx_insn *insn, rtx op[], int *l)
4109 rtx dest = op[0];
4110 rtx src = op[1];
4111 rtx base = XEXP (dest, 0);
4112 int reg_base = true_regnum (base);
4113 int reg_src = true_regnum (src);
4114 int tmp;
4116 if (!l)
4117 l = &tmp;
4119 if (CONSTANT_ADDRESS_P (base))
4121 if (io_address_operand (base, SImode))
4123 return *l=4,("out %i0, %A1" CR_TAB
4124 "out %i0+1,%B1" CR_TAB
4125 "out %i0+2,%C1" CR_TAB
4126 "out %i0+3,%D1");
4128 else
4130 *l = AVR_TINY ? 4 : 8;
4131 return ("sts %m0,%A1" CR_TAB
4132 "sts %m0+1,%B1" CR_TAB
4133 "sts %m0+2,%C1" CR_TAB
4134 "sts %m0+3,%D1");
4138 if (reg_base > 0) /* (r) */
4140 if (AVR_TINY)
4141 return avr_out_movsi_mr_r_reg_no_disp_tiny (insn, op, l);
4143 if (reg_base == REG_X) /* (R26) */
4145 if (reg_src == REG_X)
4147 /* "st X+,r26" is undefined */
4148 if (reg_unused_after (insn, base))
4149 return *l=6, ("mov __tmp_reg__,r27" CR_TAB
4150 "st X,r26" CR_TAB
4151 "adiw r26,1" CR_TAB
4152 "st X+,__tmp_reg__" CR_TAB
4153 "st X+,r28" CR_TAB
4154 "st X,r29");
4155 else
4156 return *l=7, ("mov __tmp_reg__,r27" CR_TAB
4157 "st X,r26" CR_TAB
4158 "adiw r26,1" CR_TAB
4159 "st X+,__tmp_reg__" CR_TAB
4160 "st X+,r28" CR_TAB
4161 "st X,r29" CR_TAB
4162 "sbiw r26,3");
4164 else if (reg_base == reg_src + 2)
4166 if (reg_unused_after (insn, base))
4167 return *l=7, ("mov __zero_reg__,%C1" CR_TAB
4168 "mov __tmp_reg__,%D1" CR_TAB
4169 "st %0+,%A1" CR_TAB
4170 "st %0+,%B1" CR_TAB
4171 "st %0+,__zero_reg__" CR_TAB
4172 "st %0,__tmp_reg__" CR_TAB
4173 "clr __zero_reg__");
4174 else
4175 return *l=8, ("mov __zero_reg__,%C1" CR_TAB
4176 "mov __tmp_reg__,%D1" CR_TAB
4177 "st %0+,%A1" CR_TAB
4178 "st %0+,%B1" CR_TAB
4179 "st %0+,__zero_reg__" CR_TAB
4180 "st %0,__tmp_reg__" CR_TAB
4181 "clr __zero_reg__" CR_TAB
4182 "sbiw r26,3");
4184 return *l=5, ("st %0+,%A1" CR_TAB
4185 "st %0+,%B1" CR_TAB
4186 "st %0+,%C1" CR_TAB
4187 "st %0,%D1" CR_TAB
4188 "sbiw r26,3");
4190 else
4191 return *l=4, ("st %0,%A1" CR_TAB
4192 "std %0+1,%B1" CR_TAB
4193 "std %0+2,%C1" CR_TAB
4194 "std %0+3,%D1");
4196 else if (GET_CODE (base) == PLUS) /* (R + i) */
4198 int disp = INTVAL (XEXP (base, 1));
4200 if (AVR_TINY)
4201 return avr_out_movsi_mr_r_reg_disp_tiny (op, l);
4203 reg_base = REGNO (XEXP (base, 0));
4204 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4206 if (reg_base != REG_Y)
4207 fatal_insn ("incorrect insn:",insn);
4209 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
4210 return *l = 6, ("adiw r28,%o0-60" CR_TAB
4211 "std Y+60,%A1" CR_TAB
4212 "std Y+61,%B1" CR_TAB
4213 "std Y+62,%C1" CR_TAB
4214 "std Y+63,%D1" CR_TAB
4215 "sbiw r28,%o0-60");
4217 return *l = 8, ("subi r28,lo8(-%o0)" CR_TAB
4218 "sbci r29,hi8(-%o0)" CR_TAB
4219 "st Y,%A1" CR_TAB
4220 "std Y+1,%B1" CR_TAB
4221 "std Y+2,%C1" CR_TAB
4222 "std Y+3,%D1" CR_TAB
4223 "subi r28,lo8(%o0)" CR_TAB
4224 "sbci r29,hi8(%o0)");
4226 if (reg_base == REG_X)
4228 /* (X + d) = R */
4229 if (reg_src == REG_X)
4231 *l = 9;
4232 return ("mov __tmp_reg__,r26" CR_TAB
4233 "mov __zero_reg__,r27" CR_TAB
4234 "adiw r26,%o0" CR_TAB
4235 "st X+,__tmp_reg__" CR_TAB
4236 "st X+,__zero_reg__" CR_TAB
4237 "st X+,r28" CR_TAB
4238 "st X,r29" CR_TAB
4239 "clr __zero_reg__" CR_TAB
4240 "sbiw r26,%o0+3");
4242 else if (reg_src == REG_X - 2)
4244 *l = 9;
4245 return ("mov __tmp_reg__,r26" CR_TAB
4246 "mov __zero_reg__,r27" CR_TAB
4247 "adiw r26,%o0" CR_TAB
4248 "st X+,r24" CR_TAB
4249 "st X+,r25" CR_TAB
4250 "st X+,__tmp_reg__" CR_TAB
4251 "st X,__zero_reg__" CR_TAB
4252 "clr __zero_reg__" CR_TAB
4253 "sbiw r26,%o0+3");
4255 *l = 6;
4256 return ("adiw r26,%o0" CR_TAB
4257 "st X+,%A1" CR_TAB
4258 "st X+,%B1" CR_TAB
4259 "st X+,%C1" CR_TAB
4260 "st X,%D1" CR_TAB
4261 "sbiw r26,%o0+3");
4263 return *l=4, ("std %A0,%A1" CR_TAB
4264 "std %B0,%B1" CR_TAB
4265 "std %C0,%C1" CR_TAB
4266 "std %D0,%D1");
4268 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4269 return *l=4, ("st %0,%D1" CR_TAB
4270 "st %0,%C1" CR_TAB
4271 "st %0,%B1" CR_TAB
4272 "st %0,%A1");
4273 else if (GET_CODE (base) == POST_INC) /* (R++) */
4274 return *l=4, ("st %0,%A1" CR_TAB
4275 "st %0,%B1" CR_TAB
4276 "st %0,%C1" CR_TAB
4277 "st %0,%D1");
4278 fatal_insn ("unknown move insn:",insn);
4279 return "";
4282 const char *
4283 output_movsisf (rtx_insn *insn, rtx operands[], int *l)
4285 int dummy;
4286 rtx dest = operands[0];
4287 rtx src = operands[1];
4288 int *real_l = l;
4290 if (avr_mem_flash_p (src)
4291 || avr_mem_flash_p (dest))
4293 return avr_out_lpm (insn, operands, real_l);
4296 if (!l)
4297 l = &dummy;
4299 gcc_assert (4 == GET_MODE_SIZE (GET_MODE (dest)));
4300 if (REG_P (dest))
4302 if (REG_P (src)) /* mov r,r */
4304 if (true_regnum (dest) > true_regnum (src))
4306 if (AVR_HAVE_MOVW)
4308 *l = 2;
4309 return ("movw %C0,%C1" CR_TAB
4310 "movw %A0,%A1");
4312 *l = 4;
4313 return ("mov %D0,%D1" CR_TAB
4314 "mov %C0,%C1" CR_TAB
4315 "mov %B0,%B1" CR_TAB
4316 "mov %A0,%A1");
4318 else
4320 if (AVR_HAVE_MOVW)
4322 *l = 2;
4323 return ("movw %A0,%A1" CR_TAB
4324 "movw %C0,%C1");
4326 *l = 4;
4327 return ("mov %A0,%A1" CR_TAB
4328 "mov %B0,%B1" CR_TAB
4329 "mov %C0,%C1" CR_TAB
4330 "mov %D0,%D1");
4333 else if (CONSTANT_P (src))
4335 return output_reload_insisf (operands, NULL_RTX, real_l);
4337 else if (MEM_P (src))
4338 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
4340 else if (MEM_P (dest))
4342 const char *templ;
4344 if (src == CONST0_RTX (GET_MODE (dest)))
4345 operands[1] = zero_reg_rtx;
4347 templ = out_movsi_mr_r (insn, operands, real_l);
4349 if (!real_l)
4350 output_asm_insn (templ, operands);
4352 operands[1] = src;
4353 return "";
4355 fatal_insn ("invalid insn:", insn);
4356 return "";
4360 /* Handle loads of 24-bit types from memory to register. */
4362 static const char*
4363 avr_out_load_psi_reg_no_disp_tiny (rtx_insn *insn, rtx *op, int *plen)
4365 rtx dest = op[0];
4366 rtx src = op[1];
4367 rtx base = XEXP (src, 0);
4368 int reg_dest = true_regnum (dest);
4369 int reg_base = true_regnum (base);
4371 if (reg_base == reg_dest)
4373 return avr_asm_len (TINY_ADIW (%E1, %F1, 2) CR_TAB
4374 "ld %C0,%1" CR_TAB
4375 "ld __tmp_reg__,-%1" CR_TAB
4376 TINY_SBIW (%E1, %F1, 1) CR_TAB
4377 "ld %A0,%1" CR_TAB
4378 "mov %B0,__tmp_reg__", op, plen, -8);
4380 else
4382 return avr_asm_len ("ld %A0,%1+" CR_TAB
4383 "ld %B0,%1+" CR_TAB
4384 "ld %C0,%1", op, plen, -3);
4386 if (reg_dest != reg_base - 2 &&
4387 !reg_unused_after (insn, base))
4389 avr_asm_len (TINY_SBIW (%E1, %F1, 2), op, plen, 2);
4391 return "";
4395 static const char*
4396 avr_out_load_psi_reg_disp_tiny (rtx_insn *insn, rtx *op, int *plen)
4398 rtx dest = op[0];
4399 rtx src = op[1];
4400 rtx base = XEXP (src, 0);
4401 int reg_dest = true_regnum (dest);
4402 int reg_base = true_regnum (base);
4404 reg_base = true_regnum (XEXP (base, 0));
4405 if (reg_base == reg_dest)
4407 return avr_asm_len (TINY_ADIW (%I1, %J1, %o1+2) CR_TAB
4408 "ld %C0,%b1" CR_TAB
4409 "ld __tmp_reg__,-%b1" CR_TAB
4410 TINY_SBIW (%I1, %J1, 1) CR_TAB
4411 "ld %A0,%b1" CR_TAB
4412 "mov %B0,__tmp_reg__", op, plen, -8);
4414 else
4416 avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
4417 "ld %A0,%b1+" CR_TAB
4418 "ld %B0,%b1+" CR_TAB
4419 "ld %C0,%b1", op, plen, -5);
4421 if (reg_dest != (reg_base - 2)
4422 && !reg_unused_after (insn, XEXP (base, 0)))
4423 avr_asm_len (TINY_SBIW (%I1, %J1, %o1+2), op, plen, 2);
4425 return "";
4429 static const char*
4430 avr_out_load_psi (rtx_insn *insn, rtx *op, int *plen)
4432 rtx dest = op[0];
4433 rtx src = op[1];
4434 rtx base = XEXP (src, 0);
4435 int reg_dest = true_regnum (dest);
4436 int reg_base = true_regnum (base);
4438 if (reg_base > 0)
4440 if (AVR_TINY)
4441 return avr_out_load_psi_reg_no_disp_tiny (insn, op, plen);
4443 if (reg_base == REG_X) /* (R26) */
4445 if (reg_dest == REG_X)
4446 /* "ld r26,-X" is undefined */
4447 return avr_asm_len ("adiw r26,2" CR_TAB
4448 "ld r28,X" CR_TAB
4449 "ld __tmp_reg__,-X" CR_TAB
4450 "sbiw r26,1" CR_TAB
4451 "ld r26,X" CR_TAB
4452 "mov r27,__tmp_reg__", op, plen, -6);
4453 else
4455 avr_asm_len ("ld %A0,X+" CR_TAB
4456 "ld %B0,X+" CR_TAB
4457 "ld %C0,X", op, plen, -3);
4459 if (reg_dest != REG_X - 2
4460 && !reg_unused_after (insn, base))
4462 avr_asm_len ("sbiw r26,2", op, plen, 1);
4465 return "";
4468 else /* reg_base != REG_X */
4470 if (reg_dest == reg_base)
4471 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
4472 "ldd __tmp_reg__,%1+1" CR_TAB
4473 "ld %A0,%1" CR_TAB
4474 "mov %B0,__tmp_reg__", op, plen, -4);
4475 else
4476 return avr_asm_len ("ld %A0,%1" CR_TAB
4477 "ldd %B0,%1+1" CR_TAB
4478 "ldd %C0,%1+2", op, plen, -3);
4481 else if (GET_CODE (base) == PLUS) /* (R + i) */
4483 int disp = INTVAL (XEXP (base, 1));
4485 if (AVR_TINY)
4486 return avr_out_load_psi_reg_disp_tiny (insn, op, plen);
4488 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
4490 if (REGNO (XEXP (base, 0)) != REG_Y)
4491 fatal_insn ("incorrect insn:",insn);
4493 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
4494 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
4495 "ldd %A0,Y+61" CR_TAB
4496 "ldd %B0,Y+62" CR_TAB
4497 "ldd %C0,Y+63" CR_TAB
4498 "sbiw r28,%o1-61", op, plen, -5);
4500 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
4501 "sbci r29,hi8(-%o1)" CR_TAB
4502 "ld %A0,Y" CR_TAB
4503 "ldd %B0,Y+1" CR_TAB
4504 "ldd %C0,Y+2" CR_TAB
4505 "subi r28,lo8(%o1)" CR_TAB
4506 "sbci r29,hi8(%o1)", op, plen, -7);
4509 reg_base = true_regnum (XEXP (base, 0));
4510 if (reg_base == REG_X)
4512 /* R = (X + d) */
4513 if (reg_dest == REG_X)
4515 /* "ld r26,-X" is undefined */
4516 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
4517 "ld r28,X" CR_TAB
4518 "ld __tmp_reg__,-X" CR_TAB
4519 "sbiw r26,1" CR_TAB
4520 "ld r26,X" CR_TAB
4521 "mov r27,__tmp_reg__", op, plen, -6);
4524 avr_asm_len ("adiw r26,%o1" CR_TAB
4525 "ld %A0,X+" CR_TAB
4526 "ld %B0,X+" CR_TAB
4527 "ld %C0,X", op, plen, -4);
4529 if (reg_dest != REG_W
4530 && !reg_unused_after (insn, XEXP (base, 0)))
4531 avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
4533 return "";
4536 if (reg_dest == reg_base)
4537 return avr_asm_len ("ldd %C0,%C1" CR_TAB
4538 "ldd __tmp_reg__,%B1" CR_TAB
4539 "ldd %A0,%A1" CR_TAB
4540 "mov %B0,__tmp_reg__", op, plen, -4);
4542 return avr_asm_len ("ldd %A0,%A1" CR_TAB
4543 "ldd %B0,%B1" CR_TAB
4544 "ldd %C0,%C1", op, plen, -3);
4546 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4547 return avr_asm_len ("ld %C0,%1" CR_TAB
4548 "ld %B0,%1" CR_TAB
4549 "ld %A0,%1", op, plen, -3);
4550 else if (GET_CODE (base) == POST_INC) /* (R++) */
4551 return avr_asm_len ("ld %A0,%1" CR_TAB
4552 "ld %B0,%1" CR_TAB
4553 "ld %C0,%1", op, plen, -3);
4555 else if (CONSTANT_ADDRESS_P (base))
4557 int n_words = AVR_TINY ? 3 : 6;
4558 return avr_asm_len ("lds %A0,%m1" CR_TAB
4559 "lds %B0,%m1+1" CR_TAB
4560 "lds %C0,%m1+2", op, plen , -n_words);
4563 fatal_insn ("unknown move insn:",insn);
4564 return "";
4568 static const char*
4569 avr_out_store_psi_reg_no_disp_tiny (rtx_insn *insn, rtx *op, int *plen)
4571 rtx dest = op[0];
4572 rtx src = op[1];
4573 rtx base = XEXP (dest, 0);
4574 int reg_base = true_regnum (base);
4575 int reg_src = true_regnum (src);
4577 if (reg_base == reg_src)
4579 avr_asm_len ("st %0,%A1" CR_TAB
4580 "mov __tmp_reg__,%B1" CR_TAB
4581 TINY_ADIW (%E0, %F0, 1) CR_TAB /* st X+, r27 is undefined */
4582 "st %0+,__tmp_reg__" CR_TAB
4583 "st %0,%C1", op, plen, -6);
4586 else if (reg_src == reg_base - 2)
4588 avr_asm_len ("st %0,%A1" CR_TAB
4589 "mov __tmp_reg__,%C1" CR_TAB
4590 TINY_ADIW (%E0, %F0, 1) CR_TAB
4591 "st %0+,%B1" CR_TAB
4592 "st %0,__tmp_reg__", op, plen, 6);
4594 else
4596 avr_asm_len ("st %0+,%A1" CR_TAB
4597 "st %0+,%B1" CR_TAB
4598 "st %0,%C1", op, plen, -3);
4601 if (!reg_unused_after (insn, base))
4602 avr_asm_len (TINY_SBIW (%E0, %F0, 2), op, plen, 2);
4604 return "";
4607 static const char*
4608 avr_out_store_psi_reg_disp_tiny (rtx *op, int *plen)
4610 rtx dest = op[0];
4611 rtx src = op[1];
4612 rtx base = XEXP (dest, 0);
4613 int reg_base = REGNO (XEXP (base, 0));
4614 int reg_src = true_regnum (src);
4616 if (reg_src == reg_base)
4618 return avr_asm_len ("mov __tmp_reg__,%A1" CR_TAB
4619 "mov __zero_reg__,%B1" CR_TAB
4620 TINY_ADIW (%I0, %J0, %o0) CR_TAB
4621 "st %b0+,__tmp_reg__" CR_TAB
4622 "st %b0+,__zero_reg__" CR_TAB
4623 "st %b0,%C1" CR_TAB
4624 "clr __zero_reg__" CR_TAB
4625 TINY_SBIW (%I0, %J0, %o0+2), op, plen, -10);
4627 else if (reg_src == reg_base - 2)
4629 return avr_asm_len ("mov __tmp_reg__,%C1" CR_TAB
4630 TINY_ADIW (%I0, %J0, %o0) CR_TAB
4631 "st %b0+,%A1" CR_TAB
4632 "st %b0+,%B1" CR_TAB
4633 "st %b0,__tmp_reg__" CR_TAB
4634 TINY_SBIW (%I0, %J0, %o0+2), op, plen, -8);
4637 return avr_asm_len (TINY_ADIW (%I0, %J0, %o0) CR_TAB
4638 "st %b0+,%A1" CR_TAB
4639 "st %b0+,%B1" CR_TAB
4640 "st %b0,%C1" CR_TAB
4641 TINY_SBIW (%I0, %J0, %o0+2), op, plen, -7);
4644 /* Handle store of 24-bit type from register or zero to memory. */
4646 static const char*
4647 avr_out_store_psi (rtx_insn *insn, rtx *op, int *plen)
4649 rtx dest = op[0];
4650 rtx src = op[1];
4651 rtx base = XEXP (dest, 0);
4652 int reg_base = true_regnum (base);
4654 if (CONSTANT_ADDRESS_P (base))
4656 int n_words = AVR_TINY ? 3 : 6;
4657 return avr_asm_len ("sts %m0,%A1" CR_TAB
4658 "sts %m0+1,%B1" CR_TAB
4659 "sts %m0+2,%C1", op, plen, -n_words);
4662 if (reg_base > 0) /* (r) */
4664 if (AVR_TINY)
4665 return avr_out_store_psi_reg_no_disp_tiny (insn, op, plen);
4667 if (reg_base == REG_X) /* (R26) */
4669 gcc_assert (!reg_overlap_mentioned_p (base, src));
4671 avr_asm_len ("st %0+,%A1" CR_TAB
4672 "st %0+,%B1" CR_TAB
4673 "st %0,%C1", op, plen, -3);
4675 if (!reg_unused_after (insn, base))
4676 avr_asm_len ("sbiw r26,2", op, plen, 1);
4678 return "";
4680 else
4681 return avr_asm_len ("st %0,%A1" CR_TAB
4682 "std %0+1,%B1" CR_TAB
4683 "std %0+2,%C1", op, plen, -3);
4685 else if (GET_CODE (base) == PLUS) /* (R + i) */
4687 int disp = INTVAL (XEXP (base, 1));
4689 if (AVR_TINY)
4690 return avr_out_store_psi_reg_disp_tiny (op, plen);
4692 reg_base = REGNO (XEXP (base, 0));
4694 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4696 if (reg_base != REG_Y)
4697 fatal_insn ("incorrect insn:",insn);
4699 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
4700 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
4701 "std Y+61,%A1" CR_TAB
4702 "std Y+62,%B1" CR_TAB
4703 "std Y+63,%C1" CR_TAB
4704 "sbiw r28,%o0-61", op, plen, -5);
4706 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4707 "sbci r29,hi8(-%o0)" CR_TAB
4708 "st Y,%A1" CR_TAB
4709 "std Y+1,%B1" CR_TAB
4710 "std Y+2,%C1" CR_TAB
4711 "subi r28,lo8(%o0)" CR_TAB
4712 "sbci r29,hi8(%o0)", op, plen, -7);
4714 if (reg_base == REG_X)
4716 /* (X + d) = R */
4717 gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
4719 avr_asm_len ("adiw r26,%o0" CR_TAB
4720 "st X+,%A1" CR_TAB
4721 "st X+,%B1" CR_TAB
4722 "st X,%C1", op, plen, -4);
4724 if (!reg_unused_after (insn, XEXP (base, 0)))
4725 avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
4727 return "";
4730 return avr_asm_len ("std %A0,%A1" CR_TAB
4731 "std %B0,%B1" CR_TAB
4732 "std %C0,%C1", op, plen, -3);
4734 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4735 return avr_asm_len ("st %0,%C1" CR_TAB
4736 "st %0,%B1" CR_TAB
4737 "st %0,%A1", op, plen, -3);
4738 else if (GET_CODE (base) == POST_INC) /* (R++) */
4739 return avr_asm_len ("st %0,%A1" CR_TAB
4740 "st %0,%B1" CR_TAB
4741 "st %0,%C1", op, plen, -3);
4743 fatal_insn ("unknown move insn:",insn);
4744 return "";
4748 /* Move around 24-bit stuff. */
4750 const char *
4751 avr_out_movpsi (rtx_insn *insn, rtx *op, int *plen)
4753 rtx dest = op[0];
4754 rtx src = op[1];
4756 if (avr_mem_flash_p (src)
4757 || avr_mem_flash_p (dest))
4759 return avr_out_lpm (insn, op, plen);
4762 if (register_operand (dest, VOIDmode))
4764 if (register_operand (src, VOIDmode)) /* mov r,r */
4766 if (true_regnum (dest) > true_regnum (src))
4768 avr_asm_len ("mov %C0,%C1", op, plen, -1);
4770 if (AVR_HAVE_MOVW)
4771 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
4772 else
4773 return avr_asm_len ("mov %B0,%B1" CR_TAB
4774 "mov %A0,%A1", op, plen, 2);
4776 else
4778 if (AVR_HAVE_MOVW)
4779 avr_asm_len ("movw %A0,%A1", op, plen, -1);
4780 else
4781 avr_asm_len ("mov %A0,%A1" CR_TAB
4782 "mov %B0,%B1", op, plen, -2);
4784 return avr_asm_len ("mov %C0,%C1", op, plen, 1);
4787 else if (CONSTANT_P (src))
4789 return avr_out_reload_inpsi (op, NULL_RTX, plen);
4791 else if (MEM_P (src))
4792 return avr_out_load_psi (insn, op, plen); /* mov r,m */
4794 else if (MEM_P (dest))
4796 rtx xop[2];
4798 xop[0] = dest;
4799 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
4801 return avr_out_store_psi (insn, xop, plen);
4804 fatal_insn ("invalid insn:", insn);
4805 return "";
4808 static const char*
4809 avr_out_movqi_mr_r_reg_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
4811 rtx dest = op[0];
4812 rtx src = op[1];
4813 rtx x = XEXP (dest, 0);
4815 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
4817 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
4818 TINY_ADIW (%I0, %J0, %o0) CR_TAB
4819 "st %b0,__tmp_reg__", op, plen, -4);
4821 else
4823 avr_asm_len (TINY_ADIW (%I0, %J0, %o0) CR_TAB
4824 "st %b0,%1" , op, plen, -3);
4827 if (!reg_unused_after (insn, XEXP (x,0)))
4828 avr_asm_len (TINY_SBIW (%I0, %J0, %o0), op, plen, 2);
4830 return "";
4833 static const char*
4834 out_movqi_mr_r (rtx_insn *insn, rtx op[], int *plen)
4836 rtx dest = op[0];
4837 rtx src = op[1];
4838 rtx x = XEXP (dest, 0);
4840 if (CONSTANT_ADDRESS_P (x))
4842 int n_words = AVR_TINY ? 1 : 2;
4843 return optimize > 0 && io_address_operand (x, QImode)
4844 ? avr_asm_len ("out %i0,%1", op, plen, -1)
4845 : avr_asm_len ("sts %m0,%1", op, plen, -n_words);
4847 else if (GET_CODE (x) == PLUS
4848 && REG_P (XEXP (x, 0))
4849 && CONST_INT_P (XEXP (x, 1)))
4851 /* memory access by reg+disp */
4853 int disp = INTVAL (XEXP (x, 1));
4855 if (AVR_TINY)
4856 return avr_out_movqi_mr_r_reg_disp_tiny (insn, op, plen);
4858 if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
4860 if (REGNO (XEXP (x, 0)) != REG_Y)
4861 fatal_insn ("incorrect insn:",insn);
4863 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
4864 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
4865 "std Y+63,%1" CR_TAB
4866 "sbiw r28,%o0-63", op, plen, -3);
4868 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4869 "sbci r29,hi8(-%o0)" CR_TAB
4870 "st Y,%1" CR_TAB
4871 "subi r28,lo8(%o0)" CR_TAB
4872 "sbci r29,hi8(%o0)", op, plen, -5);
4874 else if (REGNO (XEXP (x,0)) == REG_X)
4876 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
4878 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
4879 "adiw r26,%o0" CR_TAB
4880 "st X,__tmp_reg__", op, plen, -3);
4882 else
4884 avr_asm_len ("adiw r26,%o0" CR_TAB
4885 "st X,%1", op, plen, -2);
4888 if (!reg_unused_after (insn, XEXP (x,0)))
4889 avr_asm_len ("sbiw r26,%o0", op, plen, 1);
4891 return "";
4894 return avr_asm_len ("std %0,%1", op, plen, -1);
4897 return avr_asm_len ("st %0,%1", op, plen, -1);
4901 /* Helper for the next function for XMEGA. It does the same
4902 but with low byte first. */
4904 static const char*
4905 avr_out_movhi_mr_r_xmega (rtx_insn *insn, rtx op[], int *plen)
4907 rtx dest = op[0];
4908 rtx src = op[1];
4909 rtx base = XEXP (dest, 0);
4910 int reg_base = true_regnum (base);
4911 int reg_src = true_regnum (src);
4913 /* "volatile" forces writing low byte first, even if less efficient,
4914 for correct operation with 16-bit I/O registers like SP. */
4915 int mem_volatile_p = MEM_VOLATILE_P (dest);
4917 if (CONSTANT_ADDRESS_P (base))
4919 int n_words = AVR_TINY ? 2 : 4;
4920 return optimize > 0 && io_address_operand (base, HImode)
4921 ? avr_asm_len ("out %i0,%A1" CR_TAB
4922 "out %i0+1,%B1", op, plen, -2)
4924 : avr_asm_len ("sts %m0,%A1" CR_TAB
4925 "sts %m0+1,%B1", op, plen, -n_words);
4928 if (reg_base > 0)
4930 if (reg_base != REG_X)
4931 return avr_asm_len ("st %0,%A1" CR_TAB
4932 "std %0+1,%B1", op, plen, -2);
4934 if (reg_src == REG_X)
4935 /* "st X+,r26" and "st -X,r26" are undefined. */
4936 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4937 "st X,r26" CR_TAB
4938 "adiw r26,1" CR_TAB
4939 "st X,__tmp_reg__", op, plen, -4);
4940 else
4941 avr_asm_len ("st X+,%A1" CR_TAB
4942 "st X,%B1", op, plen, -2);
4944 return reg_unused_after (insn, base)
4945 ? ""
4946 : avr_asm_len ("sbiw r26,1", op, plen, 1);
4948 else if (GET_CODE (base) == PLUS)
4950 int disp = INTVAL (XEXP (base, 1));
4951 reg_base = REGNO (XEXP (base, 0));
4952 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4954 if (reg_base != REG_Y)
4955 fatal_insn ("incorrect insn:",insn);
4957 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
4958 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4959 "std Y+62,%A1" CR_TAB
4960 "std Y+63,%B1" CR_TAB
4961 "sbiw r28,%o0-62", op, plen, -4)
4963 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4964 "sbci r29,hi8(-%o0)" CR_TAB
4965 "st Y,%A1" CR_TAB
4966 "std Y+1,%B1" CR_TAB
4967 "subi r28,lo8(%o0)" CR_TAB
4968 "sbci r29,hi8(%o0)", op, plen, -6);
4971 if (reg_base != REG_X)
4972 return avr_asm_len ("std %A0,%A1" CR_TAB
4973 "std %B0,%B1", op, plen, -2);
4974 /* (X + d) = R */
4975 return reg_src == REG_X
4976 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4977 "mov __zero_reg__,r27" CR_TAB
4978 "adiw r26,%o0" CR_TAB
4979 "st X+,__tmp_reg__" CR_TAB
4980 "st X,__zero_reg__" CR_TAB
4981 "clr __zero_reg__" CR_TAB
4982 "sbiw r26,%o0+1", op, plen, -7)
4984 : avr_asm_len ("adiw r26,%o0" CR_TAB
4985 "st X+,%A1" CR_TAB
4986 "st X,%B1" CR_TAB
4987 "sbiw r26,%o0+1", op, plen, -4);
4989 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4991 if (!mem_volatile_p)
4992 return avr_asm_len ("st %0,%B1" CR_TAB
4993 "st %0,%A1", op, plen, -2);
4995 return REGNO (XEXP (base, 0)) == REG_X
4996 ? avr_asm_len ("sbiw r26,2" CR_TAB
4997 "st X+,%A1" CR_TAB
4998 "st X,%B1" CR_TAB
4999 "sbiw r26,1", op, plen, -4)
5001 : avr_asm_len ("sbiw %r0,2" CR_TAB
5002 "st %p0,%A1" CR_TAB
5003 "std %p0+1,%B1", op, plen, -3);
5005 else if (GET_CODE (base) == POST_INC) /* (R++) */
5007 return avr_asm_len ("st %0,%A1" CR_TAB
5008 "st %0,%B1", op, plen, -2);
5011 fatal_insn ("unknown move insn:",insn);
5012 return "";
5015 static const char*
5016 avr_out_movhi_mr_r_reg_no_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
5018 rtx dest = op[0];
5019 rtx src = op[1];
5020 rtx base = XEXP (dest, 0);
5021 int reg_base = true_regnum (base);
5022 int reg_src = true_regnum (src);
5023 int mem_volatile_p = MEM_VOLATILE_P (dest);
5025 if (reg_base == reg_src)
5027 return !mem_volatile_p && reg_unused_after (insn, src)
5028 ? avr_asm_len ("mov __tmp_reg__,%B1" CR_TAB
5029 "st %0,%A1" CR_TAB
5030 TINY_ADIW (%E0, %F0, 1) CR_TAB
5031 "st %0,__tmp_reg__", op, plen, -5)
5032 : avr_asm_len ("mov __tmp_reg__,%B1" CR_TAB
5033 TINY_ADIW (%E0, %F0, 1) CR_TAB
5034 "st %0,__tmp_reg__" CR_TAB
5035 TINY_SBIW (%E0, %F0, 1) CR_TAB
5036 "st %0, %A1", op, plen, -7);
5039 return !mem_volatile_p && reg_unused_after (insn, base)
5040 ? avr_asm_len ("st %0+,%A1" CR_TAB
5041 "st %0,%B1", op, plen, -2)
5042 : avr_asm_len (TINY_ADIW (%E0, %F0, 1) CR_TAB
5043 "st %0,%B1" CR_TAB
5044 "st -%0,%A1", op, plen, -4);
5047 static const char*
5048 avr_out_movhi_mr_r_reg_disp_tiny (rtx op[], int *plen)
5050 rtx dest = op[0];
5051 rtx src = op[1];
5052 rtx base = XEXP (dest, 0);
5053 int reg_base = REGNO (XEXP (base, 0));
5054 int reg_src = true_regnum (src);
5056 return reg_src == reg_base
5057 ? avr_asm_len ("mov __tmp_reg__,%A1" CR_TAB
5058 "mov __zero_reg__,%B1" CR_TAB
5059 TINY_ADIW (%I0, %J0, %o0+1) CR_TAB
5060 "st %b0,__zero_reg__" CR_TAB
5061 "st -%b0,__tmp_reg__" CR_TAB
5062 "clr __zero_reg__" CR_TAB
5063 TINY_SBIW (%I0, %J0, %o0), op, plen, -9)
5065 : avr_asm_len (TINY_ADIW (%I0, %J0, %o0+1) CR_TAB
5066 "st %b0,%B1" CR_TAB
5067 "st -%b0,%A1" CR_TAB
5068 TINY_SBIW (%I0, %J0, %o0), op, plen, -6);
5071 static const char*
5072 avr_out_movhi_mr_r_post_inc_tiny (rtx op[], int *plen)
5074 return avr_asm_len (TINY_ADIW (%I0, %J0, 1) CR_TAB
5075 "st %p0,%B1" CR_TAB
5076 "st -%p0,%A1" CR_TAB
5077 TINY_ADIW (%I0, %J0, 2), op, plen, -6);
5080 static const char*
5081 out_movhi_mr_r (rtx_insn *insn, rtx op[], int *plen)
5083 rtx dest = op[0];
5084 rtx src = op[1];
5085 rtx base = XEXP (dest, 0);
5086 int reg_base = true_regnum (base);
5087 int reg_src = true_regnum (src);
5088 int mem_volatile_p;
5090 /* "volatile" forces writing high-byte first (no-xmega) resp.
5091 low-byte first (xmega) even if less efficient, for correct
5092 operation with 16-bit I/O registers like. */
5094 if (AVR_XMEGA)
5095 return avr_out_movhi_mr_r_xmega (insn, op, plen);
5097 mem_volatile_p = MEM_VOLATILE_P (dest);
5099 if (CONSTANT_ADDRESS_P (base))
5101 int n_words = AVR_TINY ? 2 : 4;
5102 return optimize > 0 && io_address_operand (base, HImode)
5103 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
5104 "out %i0,%A1", op, plen, -2)
5106 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
5107 "sts %m0,%A1", op, plen, -n_words);
5110 if (reg_base > 0)
5112 if (AVR_TINY)
5113 return avr_out_movhi_mr_r_reg_no_disp_tiny (insn, op, plen);
5115 if (reg_base != REG_X)
5116 return avr_asm_len ("std %0+1,%B1" CR_TAB
5117 "st %0,%A1", op, plen, -2);
5119 if (reg_src == REG_X)
5120 /* "st X+,r26" and "st -X,r26" are undefined. */
5121 return !mem_volatile_p && reg_unused_after (insn, src)
5122 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
5123 "st X,r26" CR_TAB
5124 "adiw r26,1" CR_TAB
5125 "st X,__tmp_reg__", op, plen, -4)
5127 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
5128 "adiw r26,1" CR_TAB
5129 "st X,__tmp_reg__" CR_TAB
5130 "sbiw r26,1" CR_TAB
5131 "st X,r26", op, plen, -5);
5133 return !mem_volatile_p && reg_unused_after (insn, base)
5134 ? avr_asm_len ("st X+,%A1" CR_TAB
5135 "st X,%B1", op, plen, -2)
5136 : avr_asm_len ("adiw r26,1" CR_TAB
5137 "st X,%B1" CR_TAB
5138 "st -X,%A1", op, plen, -3);
5140 else if (GET_CODE (base) == PLUS)
5142 int disp = INTVAL (XEXP (base, 1));
5144 if (AVR_TINY)
5145 return avr_out_movhi_mr_r_reg_disp_tiny (op, plen);
5147 reg_base = REGNO (XEXP (base, 0));
5148 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
5150 if (reg_base != REG_Y)
5151 fatal_insn ("incorrect insn:",insn);
5153 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
5154 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
5155 "std Y+63,%B1" CR_TAB
5156 "std Y+62,%A1" CR_TAB
5157 "sbiw r28,%o0-62", op, plen, -4)
5159 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
5160 "sbci r29,hi8(-%o0)" CR_TAB
5161 "std Y+1,%B1" CR_TAB
5162 "st Y,%A1" CR_TAB
5163 "subi r28,lo8(%o0)" CR_TAB
5164 "sbci r29,hi8(%o0)", op, plen, -6);
5167 if (reg_base != REG_X)
5168 return avr_asm_len ("std %B0,%B1" CR_TAB
5169 "std %A0,%A1", op, plen, -2);
5170 /* (X + d) = R */
5171 return reg_src == REG_X
5172 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
5173 "mov __zero_reg__,r27" CR_TAB
5174 "adiw r26,%o0+1" CR_TAB
5175 "st X,__zero_reg__" CR_TAB
5176 "st -X,__tmp_reg__" CR_TAB
5177 "clr __zero_reg__" CR_TAB
5178 "sbiw r26,%o0", op, plen, -7)
5180 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
5181 "st X,%B1" CR_TAB
5182 "st -X,%A1" CR_TAB
5183 "sbiw r26,%o0", op, plen, -4);
5185 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
5187 return avr_asm_len ("st %0,%B1" CR_TAB
5188 "st %0,%A1", op, plen, -2);
5190 else if (GET_CODE (base) == POST_INC) /* (R++) */
5192 if (!mem_volatile_p)
5193 return avr_asm_len ("st %0,%A1" CR_TAB
5194 "st %0,%B1", op, plen, -2);
5196 if (AVR_TINY)
5197 return avr_out_movhi_mr_r_post_inc_tiny (op, plen);
5199 return REGNO (XEXP (base, 0)) == REG_X
5200 ? avr_asm_len ("adiw r26,1" CR_TAB
5201 "st X,%B1" CR_TAB
5202 "st -X,%A1" CR_TAB
5203 "adiw r26,2", op, plen, -4)
5205 : avr_asm_len ("std %p0+1,%B1" CR_TAB
5206 "st %p0,%A1" CR_TAB
5207 "adiw %r0,2", op, plen, -3);
5209 fatal_insn ("unknown move insn:",insn);
5210 return "";
5213 /* Return 1 if frame pointer for current function required. */
5215 static bool
5216 avr_frame_pointer_required_p (void)
5218 return (cfun->calls_alloca
5219 || cfun->calls_setjmp
5220 || cfun->has_nonlocal_label
5221 || crtl->args.info.nregs == 0
5222 || get_frame_size () > 0);
5225 /* Returns the condition of compare insn INSN, or UNKNOWN. */
5227 static RTX_CODE
5228 compare_condition (rtx_insn *insn)
5230 rtx_insn *next = next_real_insn (insn);
5232 if (next && JUMP_P (next))
5234 rtx pat = PATTERN (next);
5235 rtx src = SET_SRC (pat);
5237 if (IF_THEN_ELSE == GET_CODE (src))
5238 return GET_CODE (XEXP (src, 0));
5241 return UNKNOWN;
5245 /* Returns true iff INSN is a tst insn that only tests the sign. */
5247 static bool
5248 compare_sign_p (rtx_insn *insn)
5250 RTX_CODE cond = compare_condition (insn);
5251 return (cond == GE || cond == LT);
5255 /* Returns true iff the next insn is a JUMP_INSN with a condition
5256 that needs to be swapped (GT, GTU, LE, LEU). */
5258 static bool
5259 compare_diff_p (rtx_insn *insn)
5261 RTX_CODE cond = compare_condition (insn);
5262 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
5265 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
5267 static bool
5268 compare_eq_p (rtx_insn *insn)
5270 RTX_CODE cond = compare_condition (insn);
5271 return (cond == EQ || cond == NE);
5275 /* Output compare instruction
5277 compare (XOP[0], XOP[1])
5279 for a register XOP[0] and a compile-time constant XOP[1]. Return "".
5280 XOP[2] is an 8-bit scratch register as needed.
5282 PLEN == NULL: Output instructions.
5283 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
5284 Don't output anything. */
5286 const char*
5287 avr_out_compare (rtx_insn *insn, rtx *xop, int *plen)
5289 /* Register to compare and value to compare against. */
5290 rtx xreg = xop[0];
5291 rtx xval = xop[1];
5293 /* MODE of the comparison. */
5294 machine_mode mode;
5296 /* Number of bytes to operate on. */
5297 int i, n_bytes = GET_MODE_SIZE (GET_MODE (xreg));
5299 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
5300 int clobber_val = -1;
5302 /* Map fixed mode operands to integer operands with the same binary
5303 representation. They are easier to handle in the remainder. */
5305 if (CONST_FIXED_P (xval))
5307 xreg = avr_to_int_mode (xop[0]);
5308 xval = avr_to_int_mode (xop[1]);
5311 mode = GET_MODE (xreg);
5313 gcc_assert (REG_P (xreg));
5314 gcc_assert ((CONST_INT_P (xval) && n_bytes <= 4)
5315 || (const_double_operand (xval, VOIDmode) && n_bytes == 8));
5317 if (plen)
5318 *plen = 0;
5320 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
5321 against 0 by ORing the bytes. This is one instruction shorter.
5322 Notice that 64-bit comparisons are always against reg:ALL8 18 (ACC_A)
5323 and therefore don't use this. */
5325 if (!test_hard_reg_class (LD_REGS, xreg)
5326 && compare_eq_p (insn)
5327 && reg_unused_after (insn, xreg))
5329 if (xval == const1_rtx)
5331 avr_asm_len ("dec %A0" CR_TAB
5332 "or %A0,%B0", xop, plen, 2);
5334 if (n_bytes >= 3)
5335 avr_asm_len ("or %A0,%C0", xop, plen, 1);
5337 if (n_bytes >= 4)
5338 avr_asm_len ("or %A0,%D0", xop, plen, 1);
5340 return "";
5342 else if (xval == constm1_rtx)
5344 if (n_bytes >= 4)
5345 avr_asm_len ("and %A0,%D0", xop, plen, 1);
5347 if (n_bytes >= 3)
5348 avr_asm_len ("and %A0,%C0", xop, plen, 1);
5350 return avr_asm_len ("and %A0,%B0" CR_TAB
5351 "com %A0", xop, plen, 2);
5355 for (i = 0; i < n_bytes; i++)
5357 /* We compare byte-wise. */
5358 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
5359 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
5361 /* 8-bit value to compare with this byte. */
5362 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
5364 /* Registers R16..R31 can operate with immediate. */
5365 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
5367 xop[0] = reg8;
5368 xop[1] = gen_int_mode (val8, QImode);
5370 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
5372 if (i == 0
5373 && test_hard_reg_class (ADDW_REGS, reg8))
5375 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
5377 if (IN_RANGE (val16, 0, 63)
5378 && (val8 == 0
5379 || reg_unused_after (insn, xreg)))
5381 if (AVR_TINY)
5382 avr_asm_len (TINY_SBIW (%A0, %B0, %1), xop, plen, 2);
5383 else
5384 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
5386 i++;
5387 continue;
5390 if (n_bytes == 2
5391 && IN_RANGE (val16, -63, -1)
5392 && compare_eq_p (insn)
5393 && reg_unused_after (insn, xreg))
5395 return AVR_TINY
5396 ? avr_asm_len (TINY_ADIW (%A0, %B0, %n1), xop, plen, 2)
5397 : avr_asm_len ("adiw %0,%n1", xop, plen, 1);
5401 /* Comparing against 0 is easy. */
5403 if (val8 == 0)
5405 avr_asm_len (i == 0
5406 ? "cp %0,__zero_reg__"
5407 : "cpc %0,__zero_reg__", xop, plen, 1);
5408 continue;
5411 /* Upper registers can compare and subtract-with-carry immediates.
5412 Notice that compare instructions do the same as respective subtract
5413 instruction; the only difference is that comparisons don't write
5414 the result back to the target register. */
5416 if (ld_reg_p)
5418 if (i == 0)
5420 avr_asm_len ("cpi %0,%1", xop, plen, 1);
5421 continue;
5423 else if (reg_unused_after (insn, xreg))
5425 avr_asm_len ("sbci %0,%1", xop, plen, 1);
5426 continue;
5430 /* Must load the value into the scratch register. */
5432 gcc_assert (REG_P (xop[2]));
5434 if (clobber_val != (int) val8)
5435 avr_asm_len ("ldi %2,%1", xop, plen, 1);
5436 clobber_val = (int) val8;
5438 avr_asm_len (i == 0
5439 ? "cp %0,%2"
5440 : "cpc %0,%2", xop, plen, 1);
5443 return "";
5447 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
5449 const char*
5450 avr_out_compare64 (rtx_insn *insn, rtx *op, int *plen)
5452 rtx xop[3];
5454 xop[0] = gen_rtx_REG (DImode, 18);
5455 xop[1] = op[0];
5456 xop[2] = op[1];
5458 return avr_out_compare (insn, xop, plen);
5461 /* Output test instruction for HImode. */
5463 const char*
5464 avr_out_tsthi (rtx_insn *insn, rtx *op, int *plen)
5466 if (compare_sign_p (insn))
5468 avr_asm_len ("tst %B0", op, plen, -1);
5470 else if (reg_unused_after (insn, op[0])
5471 && compare_eq_p (insn))
5473 /* Faster than sbiw if we can clobber the operand. */
5474 avr_asm_len ("or %A0,%B0", op, plen, -1);
5476 else
5478 avr_out_compare (insn, op, plen);
5481 return "";
5485 /* Output test instruction for PSImode. */
5487 const char*
5488 avr_out_tstpsi (rtx_insn *insn, rtx *op, int *plen)
5490 if (compare_sign_p (insn))
5492 avr_asm_len ("tst %C0", op, plen, -1);
5494 else if (reg_unused_after (insn, op[0])
5495 && compare_eq_p (insn))
5497 /* Faster than sbiw if we can clobber the operand. */
5498 avr_asm_len ("or %A0,%B0" CR_TAB
5499 "or %A0,%C0", op, plen, -2);
5501 else
5503 avr_out_compare (insn, op, plen);
5506 return "";
5510 /* Output test instruction for SImode. */
5512 const char*
5513 avr_out_tstsi (rtx_insn *insn, rtx *op, int *plen)
5515 if (compare_sign_p (insn))
5517 avr_asm_len ("tst %D0", op, plen, -1);
5519 else if (reg_unused_after (insn, op[0])
5520 && compare_eq_p (insn))
5522 /* Faster than sbiw if we can clobber the operand. */
5523 avr_asm_len ("or %A0,%B0" CR_TAB
5524 "or %A0,%C0" CR_TAB
5525 "or %A0,%D0", op, plen, -3);
5527 else
5529 avr_out_compare (insn, op, plen);
5532 return "";
5536 /* Generate asm equivalent for various shifts. This only handles cases
5537 that are not already carefully hand-optimized in ?sh??i3_out.
5539 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
5540 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
5541 OPERANDS[3] is a QImode scratch register from LD regs if
5542 available and SCRATCH, otherwise (no scratch available)
5544 TEMPL is an assembler template that shifts by one position.
5545 T_LEN is the length of this template. */
5547 void
5548 out_shift_with_cnt (const char *templ, rtx_insn *insn, rtx operands[],
5549 int *plen, int t_len)
5551 bool second_label = true;
5552 bool saved_in_tmp = false;
5553 bool use_zero_reg = false;
5554 rtx op[5];
5556 op[0] = operands[0];
5557 op[1] = operands[1];
5558 op[2] = operands[2];
5559 op[3] = operands[3];
5561 if (plen)
5562 *plen = 0;
5564 if (CONST_INT_P (operands[2]))
5566 bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
5567 && REG_P (operands[3]));
5568 int count = INTVAL (operands[2]);
5569 int max_len = 10; /* If larger than this, always use a loop. */
5571 if (count <= 0)
5572 return;
5574 if (count < 8 && !scratch)
5575 use_zero_reg = true;
5577 if (optimize_size)
5578 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
5580 if (t_len * count <= max_len)
5582 /* Output shifts inline with no loop - faster. */
5584 while (count-- > 0)
5585 avr_asm_len (templ, op, plen, t_len);
5587 return;
5590 if (scratch)
5592 avr_asm_len ("ldi %3,%2", op, plen, 1);
5594 else if (use_zero_reg)
5596 /* Hack to save one word: use __zero_reg__ as loop counter.
5597 Set one bit, then shift in a loop until it is 0 again. */
5599 op[3] = zero_reg_rtx;
5601 avr_asm_len ("set" CR_TAB
5602 "bld %3,%2-1", op, plen, 2);
5604 else
5606 /* No scratch register available, use one from LD_REGS (saved in
5607 __tmp_reg__) that doesn't overlap with registers to shift. */
5609 op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
5610 op[4] = tmp_reg_rtx;
5611 saved_in_tmp = true;
5613 avr_asm_len ("mov %4,%3" CR_TAB
5614 "ldi %3,%2", op, plen, 2);
5617 second_label = false;
5619 else if (MEM_P (op[2]))
5621 rtx op_mov[2];
5623 op_mov[0] = op[3] = tmp_reg_rtx;
5624 op_mov[1] = op[2];
5626 out_movqi_r_mr (insn, op_mov, plen);
5628 else if (register_operand (op[2], QImode))
5630 op[3] = op[2];
5632 if (!reg_unused_after (insn, op[2])
5633 || reg_overlap_mentioned_p (op[0], op[2]))
5635 op[3] = tmp_reg_rtx;
5636 avr_asm_len ("mov %3,%2", op, plen, 1);
5639 else
5640 fatal_insn ("bad shift insn:", insn);
5642 if (second_label)
5643 avr_asm_len ("rjmp 2f", op, plen, 1);
5645 avr_asm_len ("1:", op, plen, 0);
5646 avr_asm_len (templ, op, plen, t_len);
5648 if (second_label)
5649 avr_asm_len ("2:", op, plen, 0);
5651 avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
5652 avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
5654 if (saved_in_tmp)
5655 avr_asm_len ("mov %3,%4", op, plen, 1);
5659 /* 8bit shift left ((char)x << i) */
5661 const char *
5662 ashlqi3_out (rtx_insn *insn, rtx operands[], int *len)
5664 if (GET_CODE (operands[2]) == CONST_INT)
5666 int k;
5668 if (!len)
5669 len = &k;
5671 switch (INTVAL (operands[2]))
5673 default:
5674 if (INTVAL (operands[2]) < 8)
5675 break;
5677 *len = 1;
5678 return "clr %0";
5680 case 1:
5681 *len = 1;
5682 return "lsl %0";
5684 case 2:
5685 *len = 2;
5686 return ("lsl %0" CR_TAB
5687 "lsl %0");
5689 case 3:
5690 *len = 3;
5691 return ("lsl %0" CR_TAB
5692 "lsl %0" CR_TAB
5693 "lsl %0");
5695 case 4:
5696 if (test_hard_reg_class (LD_REGS, operands[0]))
5698 *len = 2;
5699 return ("swap %0" CR_TAB
5700 "andi %0,0xf0");
5702 *len = 4;
5703 return ("lsl %0" CR_TAB
5704 "lsl %0" CR_TAB
5705 "lsl %0" CR_TAB
5706 "lsl %0");
5708 case 5:
5709 if (test_hard_reg_class (LD_REGS, operands[0]))
5711 *len = 3;
5712 return ("swap %0" CR_TAB
5713 "lsl %0" CR_TAB
5714 "andi %0,0xe0");
5716 *len = 5;
5717 return ("lsl %0" CR_TAB
5718 "lsl %0" CR_TAB
5719 "lsl %0" CR_TAB
5720 "lsl %0" CR_TAB
5721 "lsl %0");
5723 case 6:
5724 if (test_hard_reg_class (LD_REGS, operands[0]))
5726 *len = 4;
5727 return ("swap %0" CR_TAB
5728 "lsl %0" CR_TAB
5729 "lsl %0" CR_TAB
5730 "andi %0,0xc0");
5732 *len = 6;
5733 return ("lsl %0" CR_TAB
5734 "lsl %0" CR_TAB
5735 "lsl %0" CR_TAB
5736 "lsl %0" CR_TAB
5737 "lsl %0" CR_TAB
5738 "lsl %0");
5740 case 7:
5741 *len = 3;
5742 return ("ror %0" CR_TAB
5743 "clr %0" CR_TAB
5744 "ror %0");
5747 else if (CONSTANT_P (operands[2]))
5748 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5750 out_shift_with_cnt ("lsl %0",
5751 insn, operands, len, 1);
5752 return "";
5756 /* 16bit shift left ((short)x << i) */
5758 const char *
5759 ashlhi3_out (rtx_insn *insn, rtx operands[], int *len)
5761 if (GET_CODE (operands[2]) == CONST_INT)
5763 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5764 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5765 int k;
5766 int *t = len;
5768 if (!len)
5769 len = &k;
5771 switch (INTVAL (operands[2]))
5773 default:
5774 if (INTVAL (operands[2]) < 16)
5775 break;
5777 *len = 2;
5778 return ("clr %B0" CR_TAB
5779 "clr %A0");
5781 case 4:
5782 if (optimize_size && scratch)
5783 break; /* 5 */
5784 if (ldi_ok)
5786 *len = 6;
5787 return ("swap %A0" CR_TAB
5788 "swap %B0" CR_TAB
5789 "andi %B0,0xf0" CR_TAB
5790 "eor %B0,%A0" CR_TAB
5791 "andi %A0,0xf0" CR_TAB
5792 "eor %B0,%A0");
5794 if (scratch)
5796 *len = 7;
5797 return ("swap %A0" CR_TAB
5798 "swap %B0" CR_TAB
5799 "ldi %3,0xf0" CR_TAB
5800 "and %B0,%3" CR_TAB
5801 "eor %B0,%A0" CR_TAB
5802 "and %A0,%3" CR_TAB
5803 "eor %B0,%A0");
5805 break; /* optimize_size ? 6 : 8 */
5807 case 5:
5808 if (optimize_size)
5809 break; /* scratch ? 5 : 6 */
5810 if (ldi_ok)
5812 *len = 8;
5813 return ("lsl %A0" CR_TAB
5814 "rol %B0" CR_TAB
5815 "swap %A0" CR_TAB
5816 "swap %B0" CR_TAB
5817 "andi %B0,0xf0" CR_TAB
5818 "eor %B0,%A0" CR_TAB
5819 "andi %A0,0xf0" CR_TAB
5820 "eor %B0,%A0");
5822 if (scratch)
5824 *len = 9;
5825 return ("lsl %A0" CR_TAB
5826 "rol %B0" CR_TAB
5827 "swap %A0" CR_TAB
5828 "swap %B0" CR_TAB
5829 "ldi %3,0xf0" CR_TAB
5830 "and %B0,%3" CR_TAB
5831 "eor %B0,%A0" CR_TAB
5832 "and %A0,%3" CR_TAB
5833 "eor %B0,%A0");
5835 break; /* 10 */
5837 case 6:
5838 if (optimize_size)
5839 break; /* scratch ? 5 : 6 */
5840 *len = 9;
5841 return ("clr __tmp_reg__" CR_TAB
5842 "lsr %B0" CR_TAB
5843 "ror %A0" CR_TAB
5844 "ror __tmp_reg__" CR_TAB
5845 "lsr %B0" CR_TAB
5846 "ror %A0" CR_TAB
5847 "ror __tmp_reg__" CR_TAB
5848 "mov %B0,%A0" CR_TAB
5849 "mov %A0,__tmp_reg__");
5851 case 7:
5852 *len = 5;
5853 return ("lsr %B0" CR_TAB
5854 "mov %B0,%A0" CR_TAB
5855 "clr %A0" CR_TAB
5856 "ror %B0" CR_TAB
5857 "ror %A0");
5859 case 8:
5860 return *len = 2, ("mov %B0,%A1" CR_TAB
5861 "clr %A0");
5863 case 9:
5864 *len = 3;
5865 return ("mov %B0,%A0" CR_TAB
5866 "clr %A0" CR_TAB
5867 "lsl %B0");
5869 case 10:
5870 *len = 4;
5871 return ("mov %B0,%A0" CR_TAB
5872 "clr %A0" CR_TAB
5873 "lsl %B0" CR_TAB
5874 "lsl %B0");
5876 case 11:
5877 *len = 5;
5878 return ("mov %B0,%A0" CR_TAB
5879 "clr %A0" CR_TAB
5880 "lsl %B0" CR_TAB
5881 "lsl %B0" CR_TAB
5882 "lsl %B0");
5884 case 12:
5885 if (ldi_ok)
5887 *len = 4;
5888 return ("mov %B0,%A0" CR_TAB
5889 "clr %A0" CR_TAB
5890 "swap %B0" CR_TAB
5891 "andi %B0,0xf0");
5893 if (scratch)
5895 *len = 5;
5896 return ("mov %B0,%A0" CR_TAB
5897 "clr %A0" CR_TAB
5898 "swap %B0" CR_TAB
5899 "ldi %3,0xf0" CR_TAB
5900 "and %B0,%3");
5902 *len = 6;
5903 return ("mov %B0,%A0" CR_TAB
5904 "clr %A0" CR_TAB
5905 "lsl %B0" CR_TAB
5906 "lsl %B0" CR_TAB
5907 "lsl %B0" CR_TAB
5908 "lsl %B0");
5910 case 13:
5911 if (ldi_ok)
5913 *len = 5;
5914 return ("mov %B0,%A0" CR_TAB
5915 "clr %A0" CR_TAB
5916 "swap %B0" CR_TAB
5917 "lsl %B0" CR_TAB
5918 "andi %B0,0xe0");
5920 if (AVR_HAVE_MUL && scratch)
5922 *len = 5;
5923 return ("ldi %3,0x20" CR_TAB
5924 "mul %A0,%3" CR_TAB
5925 "mov %B0,r0" CR_TAB
5926 "clr %A0" CR_TAB
5927 "clr __zero_reg__");
5929 if (optimize_size && scratch)
5930 break; /* 5 */
5931 if (scratch)
5933 *len = 6;
5934 return ("mov %B0,%A0" CR_TAB
5935 "clr %A0" CR_TAB
5936 "swap %B0" CR_TAB
5937 "lsl %B0" CR_TAB
5938 "ldi %3,0xe0" CR_TAB
5939 "and %B0,%3");
5941 if (AVR_HAVE_MUL)
5943 *len = 6;
5944 return ("set" CR_TAB
5945 "bld r1,5" CR_TAB
5946 "mul %A0,r1" CR_TAB
5947 "mov %B0,r0" CR_TAB
5948 "clr %A0" CR_TAB
5949 "clr __zero_reg__");
5951 *len = 7;
5952 return ("mov %B0,%A0" CR_TAB
5953 "clr %A0" CR_TAB
5954 "lsl %B0" CR_TAB
5955 "lsl %B0" CR_TAB
5956 "lsl %B0" CR_TAB
5957 "lsl %B0" CR_TAB
5958 "lsl %B0");
5960 case 14:
5961 if (AVR_HAVE_MUL && ldi_ok)
5963 *len = 5;
5964 return ("ldi %B0,0x40" CR_TAB
5965 "mul %A0,%B0" CR_TAB
5966 "mov %B0,r0" CR_TAB
5967 "clr %A0" CR_TAB
5968 "clr __zero_reg__");
5970 if (AVR_HAVE_MUL && scratch)
5972 *len = 5;
5973 return ("ldi %3,0x40" CR_TAB
5974 "mul %A0,%3" CR_TAB
5975 "mov %B0,r0" CR_TAB
5976 "clr %A0" CR_TAB
5977 "clr __zero_reg__");
5979 if (optimize_size && ldi_ok)
5981 *len = 5;
5982 return ("mov %B0,%A0" CR_TAB
5983 "ldi %A0,6" "\n1:\t"
5984 "lsl %B0" CR_TAB
5985 "dec %A0" CR_TAB
5986 "brne 1b");
5988 if (optimize_size && scratch)
5989 break; /* 5 */
5990 *len = 6;
5991 return ("clr %B0" CR_TAB
5992 "lsr %A0" CR_TAB
5993 "ror %B0" CR_TAB
5994 "lsr %A0" CR_TAB
5995 "ror %B0" CR_TAB
5996 "clr %A0");
5998 case 15:
5999 *len = 4;
6000 return ("clr %B0" CR_TAB
6001 "lsr %A0" CR_TAB
6002 "ror %B0" CR_TAB
6003 "clr %A0");
6005 len = t;
6007 out_shift_with_cnt ("lsl %A0" CR_TAB
6008 "rol %B0", insn, operands, len, 2);
6009 return "";
6013 /* 24-bit shift left */
6015 const char*
6016 avr_out_ashlpsi3 (rtx_insn *insn, rtx *op, int *plen)
6018 if (plen)
6019 *plen = 0;
6021 if (CONST_INT_P (op[2]))
6023 switch (INTVAL (op[2]))
6025 default:
6026 if (INTVAL (op[2]) < 24)
6027 break;
6029 return avr_asm_len ("clr %A0" CR_TAB
6030 "clr %B0" CR_TAB
6031 "clr %C0", op, plen, 3);
6033 case 8:
6035 int reg0 = REGNO (op[0]);
6036 int reg1 = REGNO (op[1]);
6038 if (reg0 >= reg1)
6039 return avr_asm_len ("mov %C0,%B1" CR_TAB
6040 "mov %B0,%A1" CR_TAB
6041 "clr %A0", op, plen, 3);
6042 else
6043 return avr_asm_len ("clr %A0" CR_TAB
6044 "mov %B0,%A1" CR_TAB
6045 "mov %C0,%B1", op, plen, 3);
6048 case 16:
6050 int reg0 = REGNO (op[0]);
6051 int reg1 = REGNO (op[1]);
6053 if (reg0 + 2 != reg1)
6054 avr_asm_len ("mov %C0,%A0", op, plen, 1);
6056 return avr_asm_len ("clr %B0" CR_TAB
6057 "clr %A0", op, plen, 2);
6060 case 23:
6061 return avr_asm_len ("clr %C0" CR_TAB
6062 "lsr %A0" CR_TAB
6063 "ror %C0" CR_TAB
6064 "clr %B0" CR_TAB
6065 "clr %A0", op, plen, 5);
6069 out_shift_with_cnt ("lsl %A0" CR_TAB
6070 "rol %B0" CR_TAB
6071 "rol %C0", insn, op, plen, 3);
6072 return "";
6076 /* 32bit shift left ((long)x << i) */
6078 const char *
6079 ashlsi3_out (rtx_insn *insn, rtx operands[], int *len)
6081 if (GET_CODE (operands[2]) == CONST_INT)
6083 int k;
6084 int *t = len;
6086 if (!len)
6087 len = &k;
6089 switch (INTVAL (operands[2]))
6091 default:
6092 if (INTVAL (operands[2]) < 32)
6093 break;
6095 if (AVR_HAVE_MOVW)
6096 return *len = 3, ("clr %D0" CR_TAB
6097 "clr %C0" CR_TAB
6098 "movw %A0,%C0");
6099 *len = 4;
6100 return ("clr %D0" CR_TAB
6101 "clr %C0" CR_TAB
6102 "clr %B0" CR_TAB
6103 "clr %A0");
6105 case 8:
6107 int reg0 = true_regnum (operands[0]);
6108 int reg1 = true_regnum (operands[1]);
6109 *len = 4;
6110 if (reg0 >= reg1)
6111 return ("mov %D0,%C1" CR_TAB
6112 "mov %C0,%B1" CR_TAB
6113 "mov %B0,%A1" CR_TAB
6114 "clr %A0");
6115 else
6116 return ("clr %A0" CR_TAB
6117 "mov %B0,%A1" CR_TAB
6118 "mov %C0,%B1" CR_TAB
6119 "mov %D0,%C1");
6122 case 16:
6124 int reg0 = true_regnum (operands[0]);
6125 int reg1 = true_regnum (operands[1]);
6126 if (reg0 + 2 == reg1)
6127 return *len = 2, ("clr %B0" CR_TAB
6128 "clr %A0");
6129 if (AVR_HAVE_MOVW)
6130 return *len = 3, ("movw %C0,%A1" CR_TAB
6131 "clr %B0" CR_TAB
6132 "clr %A0");
6133 else
6134 return *len = 4, ("mov %C0,%A1" CR_TAB
6135 "mov %D0,%B1" CR_TAB
6136 "clr %B0" CR_TAB
6137 "clr %A0");
6140 case 24:
6141 *len = 4;
6142 return ("mov %D0,%A1" CR_TAB
6143 "clr %C0" CR_TAB
6144 "clr %B0" CR_TAB
6145 "clr %A0");
6147 case 31:
6148 *len = 6;
6149 return ("clr %D0" CR_TAB
6150 "lsr %A0" CR_TAB
6151 "ror %D0" CR_TAB
6152 "clr %C0" CR_TAB
6153 "clr %B0" CR_TAB
6154 "clr %A0");
6156 len = t;
6158 out_shift_with_cnt ("lsl %A0" CR_TAB
6159 "rol %B0" CR_TAB
6160 "rol %C0" CR_TAB
6161 "rol %D0", insn, operands, len, 4);
6162 return "";
6165 /* 8bit arithmetic shift right ((signed char)x >> i) */
6167 const char *
6168 ashrqi3_out (rtx_insn *insn, rtx operands[], int *len)
6170 if (GET_CODE (operands[2]) == CONST_INT)
6172 int k;
6174 if (!len)
6175 len = &k;
6177 switch (INTVAL (operands[2]))
6179 case 1:
6180 *len = 1;
6181 return "asr %0";
6183 case 2:
6184 *len = 2;
6185 return ("asr %0" CR_TAB
6186 "asr %0");
6188 case 3:
6189 *len = 3;
6190 return ("asr %0" CR_TAB
6191 "asr %0" CR_TAB
6192 "asr %0");
6194 case 4:
6195 *len = 4;
6196 return ("asr %0" CR_TAB
6197 "asr %0" CR_TAB
6198 "asr %0" CR_TAB
6199 "asr %0");
6201 case 5:
6202 *len = 5;
6203 return ("asr %0" CR_TAB
6204 "asr %0" CR_TAB
6205 "asr %0" CR_TAB
6206 "asr %0" CR_TAB
6207 "asr %0");
6209 case 6:
6210 *len = 4;
6211 return ("bst %0,6" CR_TAB
6212 "lsl %0" CR_TAB
6213 "sbc %0,%0" CR_TAB
6214 "bld %0,0");
6216 default:
6217 if (INTVAL (operands[2]) < 8)
6218 break;
6220 /* fall through */
6222 case 7:
6223 *len = 2;
6224 return ("lsl %0" CR_TAB
6225 "sbc %0,%0");
6228 else if (CONSTANT_P (operands[2]))
6229 fatal_insn ("internal compiler error. Incorrect shift:", insn);
6231 out_shift_with_cnt ("asr %0",
6232 insn, operands, len, 1);
6233 return "";
6237 /* 16bit arithmetic shift right ((signed short)x >> i) */
6239 const char *
6240 ashrhi3_out (rtx_insn *insn, rtx operands[], int *len)
6242 if (GET_CODE (operands[2]) == CONST_INT)
6244 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
6245 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
6246 int k;
6247 int *t = len;
6249 if (!len)
6250 len = &k;
6252 switch (INTVAL (operands[2]))
6254 case 4:
6255 case 5:
6256 /* XXX try to optimize this too? */
6257 break;
6259 case 6:
6260 if (optimize_size)
6261 break; /* scratch ? 5 : 6 */
6262 *len = 8;
6263 return ("mov __tmp_reg__,%A0" CR_TAB
6264 "mov %A0,%B0" CR_TAB
6265 "lsl __tmp_reg__" CR_TAB
6266 "rol %A0" CR_TAB
6267 "sbc %B0,%B0" CR_TAB
6268 "lsl __tmp_reg__" CR_TAB
6269 "rol %A0" CR_TAB
6270 "rol %B0");
6272 case 7:
6273 *len = 4;
6274 return ("lsl %A0" CR_TAB
6275 "mov %A0,%B0" CR_TAB
6276 "rol %A0" CR_TAB
6277 "sbc %B0,%B0");
6279 case 8:
6281 int reg0 = true_regnum (operands[0]);
6282 int reg1 = true_regnum (operands[1]);
6284 if (reg0 == reg1)
6285 return *len = 3, ("mov %A0,%B0" CR_TAB
6286 "lsl %B0" CR_TAB
6287 "sbc %B0,%B0");
6288 else
6289 return *len = 4, ("mov %A0,%B1" CR_TAB
6290 "clr %B0" CR_TAB
6291 "sbrc %A0,7" CR_TAB
6292 "dec %B0");
6295 case 9:
6296 *len = 4;
6297 return ("mov %A0,%B0" CR_TAB
6298 "lsl %B0" CR_TAB
6299 "sbc %B0,%B0" CR_TAB
6300 "asr %A0");
6302 case 10:
6303 *len = 5;
6304 return ("mov %A0,%B0" CR_TAB
6305 "lsl %B0" CR_TAB
6306 "sbc %B0,%B0" CR_TAB
6307 "asr %A0" CR_TAB
6308 "asr %A0");
6310 case 11:
6311 if (AVR_HAVE_MUL && ldi_ok)
6313 *len = 5;
6314 return ("ldi %A0,0x20" CR_TAB
6315 "muls %B0,%A0" CR_TAB
6316 "mov %A0,r1" CR_TAB
6317 "sbc %B0,%B0" CR_TAB
6318 "clr __zero_reg__");
6320 if (optimize_size && scratch)
6321 break; /* 5 */
6322 *len = 6;
6323 return ("mov %A0,%B0" CR_TAB
6324 "lsl %B0" CR_TAB
6325 "sbc %B0,%B0" CR_TAB
6326 "asr %A0" CR_TAB
6327 "asr %A0" CR_TAB
6328 "asr %A0");
6330 case 12:
6331 if (AVR_HAVE_MUL && ldi_ok)
6333 *len = 5;
6334 return ("ldi %A0,0x10" CR_TAB
6335 "muls %B0,%A0" CR_TAB
6336 "mov %A0,r1" CR_TAB
6337 "sbc %B0,%B0" CR_TAB
6338 "clr __zero_reg__");
6340 if (optimize_size && scratch)
6341 break; /* 5 */
6342 *len = 7;
6343 return ("mov %A0,%B0" CR_TAB
6344 "lsl %B0" CR_TAB
6345 "sbc %B0,%B0" CR_TAB
6346 "asr %A0" CR_TAB
6347 "asr %A0" CR_TAB
6348 "asr %A0" CR_TAB
6349 "asr %A0");
6351 case 13:
6352 if (AVR_HAVE_MUL && ldi_ok)
6354 *len = 5;
6355 return ("ldi %A0,0x08" CR_TAB
6356 "muls %B0,%A0" CR_TAB
6357 "mov %A0,r1" CR_TAB
6358 "sbc %B0,%B0" CR_TAB
6359 "clr __zero_reg__");
6361 if (optimize_size)
6362 break; /* scratch ? 5 : 7 */
6363 *len = 8;
6364 return ("mov %A0,%B0" CR_TAB
6365 "lsl %B0" CR_TAB
6366 "sbc %B0,%B0" CR_TAB
6367 "asr %A0" CR_TAB
6368 "asr %A0" CR_TAB
6369 "asr %A0" CR_TAB
6370 "asr %A0" CR_TAB
6371 "asr %A0");
6373 case 14:
6374 *len = 5;
6375 return ("lsl %B0" CR_TAB
6376 "sbc %A0,%A0" CR_TAB
6377 "lsl %B0" CR_TAB
6378 "mov %B0,%A0" CR_TAB
6379 "rol %A0");
6381 default:
6382 if (INTVAL (operands[2]) < 16)
6383 break;
6385 /* fall through */
6387 case 15:
6388 return *len = 3, ("lsl %B0" CR_TAB
6389 "sbc %A0,%A0" CR_TAB
6390 "mov %B0,%A0");
6392 len = t;
6394 out_shift_with_cnt ("asr %B0" CR_TAB
6395 "ror %A0", insn, operands, len, 2);
6396 return "";
6400 /* 24-bit arithmetic shift right */
6402 const char*
6403 avr_out_ashrpsi3 (rtx_insn *insn, rtx *op, int *plen)
6405 int dest = REGNO (op[0]);
6406 int src = REGNO (op[1]);
6408 if (CONST_INT_P (op[2]))
6410 if (plen)
6411 *plen = 0;
6413 switch (INTVAL (op[2]))
6415 case 8:
6416 if (dest <= src)
6417 return avr_asm_len ("mov %A0,%B1" CR_TAB
6418 "mov %B0,%C1" CR_TAB
6419 "clr %C0" CR_TAB
6420 "sbrc %B0,7" CR_TAB
6421 "dec %C0", op, plen, 5);
6422 else
6423 return avr_asm_len ("clr %C0" CR_TAB
6424 "sbrc %C1,7" CR_TAB
6425 "dec %C0" CR_TAB
6426 "mov %B0,%C1" CR_TAB
6427 "mov %A0,%B1", op, plen, 5);
6429 case 16:
6430 if (dest != src + 2)
6431 avr_asm_len ("mov %A0,%C1", op, plen, 1);
6433 return avr_asm_len ("clr %B0" CR_TAB
6434 "sbrc %A0,7" CR_TAB
6435 "com %B0" CR_TAB
6436 "mov %C0,%B0", op, plen, 4);
6438 default:
6439 if (INTVAL (op[2]) < 24)
6440 break;
6442 /* fall through */
6444 case 23:
6445 return avr_asm_len ("lsl %C0" CR_TAB
6446 "sbc %A0,%A0" CR_TAB
6447 "mov %B0,%A0" CR_TAB
6448 "mov %C0,%A0", op, plen, 4);
6449 } /* switch */
6452 out_shift_with_cnt ("asr %C0" CR_TAB
6453 "ror %B0" CR_TAB
6454 "ror %A0", insn, op, plen, 3);
6455 return "";
6459 /* 32-bit arithmetic shift right ((signed long)x >> i) */
6461 const char *
6462 ashrsi3_out (rtx_insn *insn, rtx operands[], int *len)
6464 if (GET_CODE (operands[2]) == CONST_INT)
6466 int k;
6467 int *t = len;
6469 if (!len)
6470 len = &k;
6472 switch (INTVAL (operands[2]))
6474 case 8:
6476 int reg0 = true_regnum (operands[0]);
6477 int reg1 = true_regnum (operands[1]);
6478 *len=6;
6479 if (reg0 <= reg1)
6480 return ("mov %A0,%B1" CR_TAB
6481 "mov %B0,%C1" CR_TAB
6482 "mov %C0,%D1" CR_TAB
6483 "clr %D0" CR_TAB
6484 "sbrc %C0,7" CR_TAB
6485 "dec %D0");
6486 else
6487 return ("clr %D0" CR_TAB
6488 "sbrc %D1,7" CR_TAB
6489 "dec %D0" CR_TAB
6490 "mov %C0,%D1" CR_TAB
6491 "mov %B0,%C1" CR_TAB
6492 "mov %A0,%B1");
6495 case 16:
6497 int reg0 = true_regnum (operands[0]);
6498 int reg1 = true_regnum (operands[1]);
6500 if (reg0 == reg1 + 2)
6501 return *len = 4, ("clr %D0" CR_TAB
6502 "sbrc %B0,7" CR_TAB
6503 "com %D0" CR_TAB
6504 "mov %C0,%D0");
6505 if (AVR_HAVE_MOVW)
6506 return *len = 5, ("movw %A0,%C1" CR_TAB
6507 "clr %D0" CR_TAB
6508 "sbrc %B0,7" CR_TAB
6509 "com %D0" CR_TAB
6510 "mov %C0,%D0");
6511 else
6512 return *len = 6, ("mov %B0,%D1" CR_TAB
6513 "mov %A0,%C1" CR_TAB
6514 "clr %D0" CR_TAB
6515 "sbrc %B0,7" CR_TAB
6516 "com %D0" CR_TAB
6517 "mov %C0,%D0");
6520 case 24:
6521 return *len = 6, ("mov %A0,%D1" CR_TAB
6522 "clr %D0" CR_TAB
6523 "sbrc %A0,7" CR_TAB
6524 "com %D0" CR_TAB
6525 "mov %B0,%D0" CR_TAB
6526 "mov %C0,%D0");
6528 default:
6529 if (INTVAL (operands[2]) < 32)
6530 break;
6532 /* fall through */
6534 case 31:
6535 if (AVR_HAVE_MOVW)
6536 return *len = 4, ("lsl %D0" CR_TAB
6537 "sbc %A0,%A0" CR_TAB
6538 "mov %B0,%A0" CR_TAB
6539 "movw %C0,%A0");
6540 else
6541 return *len = 5, ("lsl %D0" CR_TAB
6542 "sbc %A0,%A0" CR_TAB
6543 "mov %B0,%A0" CR_TAB
6544 "mov %C0,%A0" CR_TAB
6545 "mov %D0,%A0");
6547 len = t;
6549 out_shift_with_cnt ("asr %D0" CR_TAB
6550 "ror %C0" CR_TAB
6551 "ror %B0" CR_TAB
6552 "ror %A0", insn, operands, len, 4);
6553 return "";
6556 /* 8-bit logic shift right ((unsigned char)x >> i) */
6558 const char *
6559 lshrqi3_out (rtx_insn *insn, rtx operands[], int *len)
6561 if (GET_CODE (operands[2]) == CONST_INT)
6563 int k;
6565 if (!len)
6566 len = &k;
6568 switch (INTVAL (operands[2]))
6570 default:
6571 if (INTVAL (operands[2]) < 8)
6572 break;
6574 *len = 1;
6575 return "clr %0";
6577 case 1:
6578 *len = 1;
6579 return "lsr %0";
6581 case 2:
6582 *len = 2;
6583 return ("lsr %0" CR_TAB
6584 "lsr %0");
6585 case 3:
6586 *len = 3;
6587 return ("lsr %0" CR_TAB
6588 "lsr %0" CR_TAB
6589 "lsr %0");
6591 case 4:
6592 if (test_hard_reg_class (LD_REGS, operands[0]))
6594 *len=2;
6595 return ("swap %0" CR_TAB
6596 "andi %0,0x0f");
6598 *len = 4;
6599 return ("lsr %0" CR_TAB
6600 "lsr %0" CR_TAB
6601 "lsr %0" CR_TAB
6602 "lsr %0");
6604 case 5:
6605 if (test_hard_reg_class (LD_REGS, operands[0]))
6607 *len = 3;
6608 return ("swap %0" CR_TAB
6609 "lsr %0" CR_TAB
6610 "andi %0,0x7");
6612 *len = 5;
6613 return ("lsr %0" CR_TAB
6614 "lsr %0" CR_TAB
6615 "lsr %0" CR_TAB
6616 "lsr %0" CR_TAB
6617 "lsr %0");
6619 case 6:
6620 if (test_hard_reg_class (LD_REGS, operands[0]))
6622 *len = 4;
6623 return ("swap %0" CR_TAB
6624 "lsr %0" CR_TAB
6625 "lsr %0" CR_TAB
6626 "andi %0,0x3");
6628 *len = 6;
6629 return ("lsr %0" CR_TAB
6630 "lsr %0" CR_TAB
6631 "lsr %0" CR_TAB
6632 "lsr %0" CR_TAB
6633 "lsr %0" CR_TAB
6634 "lsr %0");
6636 case 7:
6637 *len = 3;
6638 return ("rol %0" CR_TAB
6639 "clr %0" CR_TAB
6640 "rol %0");
6643 else if (CONSTANT_P (operands[2]))
6644 fatal_insn ("internal compiler error. Incorrect shift:", insn);
6646 out_shift_with_cnt ("lsr %0",
6647 insn, operands, len, 1);
6648 return "";
6651 /* 16-bit logic shift right ((unsigned short)x >> i) */
6653 const char *
6654 lshrhi3_out (rtx_insn *insn, rtx operands[], int *len)
6656 if (GET_CODE (operands[2]) == CONST_INT)
6658 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
6659 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
6660 int k;
6661 int *t = len;
6663 if (!len)
6664 len = &k;
6666 switch (INTVAL (operands[2]))
6668 default:
6669 if (INTVAL (operands[2]) < 16)
6670 break;
6672 *len = 2;
6673 return ("clr %B0" CR_TAB
6674 "clr %A0");
6676 case 4:
6677 if (optimize_size && scratch)
6678 break; /* 5 */
6679 if (ldi_ok)
6681 *len = 6;
6682 return ("swap %B0" CR_TAB
6683 "swap %A0" CR_TAB
6684 "andi %A0,0x0f" CR_TAB
6685 "eor %A0,%B0" CR_TAB
6686 "andi %B0,0x0f" CR_TAB
6687 "eor %A0,%B0");
6689 if (scratch)
6691 *len = 7;
6692 return ("swap %B0" CR_TAB
6693 "swap %A0" CR_TAB
6694 "ldi %3,0x0f" CR_TAB
6695 "and %A0,%3" CR_TAB
6696 "eor %A0,%B0" CR_TAB
6697 "and %B0,%3" CR_TAB
6698 "eor %A0,%B0");
6700 break; /* optimize_size ? 6 : 8 */
6702 case 5:
6703 if (optimize_size)
6704 break; /* scratch ? 5 : 6 */
6705 if (ldi_ok)
6707 *len = 8;
6708 return ("lsr %B0" CR_TAB
6709 "ror %A0" CR_TAB
6710 "swap %B0" CR_TAB
6711 "swap %A0" CR_TAB
6712 "andi %A0,0x0f" CR_TAB
6713 "eor %A0,%B0" CR_TAB
6714 "andi %B0,0x0f" CR_TAB
6715 "eor %A0,%B0");
6717 if (scratch)
6719 *len = 9;
6720 return ("lsr %B0" CR_TAB
6721 "ror %A0" CR_TAB
6722 "swap %B0" CR_TAB
6723 "swap %A0" CR_TAB
6724 "ldi %3,0x0f" CR_TAB
6725 "and %A0,%3" CR_TAB
6726 "eor %A0,%B0" CR_TAB
6727 "and %B0,%3" CR_TAB
6728 "eor %A0,%B0");
6730 break; /* 10 */
6732 case 6:
6733 if (optimize_size)
6734 break; /* scratch ? 5 : 6 */
6735 *len = 9;
6736 return ("clr __tmp_reg__" CR_TAB
6737 "lsl %A0" CR_TAB
6738 "rol %B0" CR_TAB
6739 "rol __tmp_reg__" CR_TAB
6740 "lsl %A0" CR_TAB
6741 "rol %B0" CR_TAB
6742 "rol __tmp_reg__" CR_TAB
6743 "mov %A0,%B0" CR_TAB
6744 "mov %B0,__tmp_reg__");
6746 case 7:
6747 *len = 5;
6748 return ("lsl %A0" CR_TAB
6749 "mov %A0,%B0" CR_TAB
6750 "rol %A0" CR_TAB
6751 "sbc %B0,%B0" CR_TAB
6752 "neg %B0");
6754 case 8:
6755 return *len = 2, ("mov %A0,%B1" CR_TAB
6756 "clr %B0");
6758 case 9:
6759 *len = 3;
6760 return ("mov %A0,%B0" CR_TAB
6761 "clr %B0" CR_TAB
6762 "lsr %A0");
6764 case 10:
6765 *len = 4;
6766 return ("mov %A0,%B0" CR_TAB
6767 "clr %B0" CR_TAB
6768 "lsr %A0" CR_TAB
6769 "lsr %A0");
6771 case 11:
6772 *len = 5;
6773 return ("mov %A0,%B0" CR_TAB
6774 "clr %B0" CR_TAB
6775 "lsr %A0" CR_TAB
6776 "lsr %A0" CR_TAB
6777 "lsr %A0");
6779 case 12:
6780 if (ldi_ok)
6782 *len = 4;
6783 return ("mov %A0,%B0" CR_TAB
6784 "clr %B0" CR_TAB
6785 "swap %A0" CR_TAB
6786 "andi %A0,0x0f");
6788 if (scratch)
6790 *len = 5;
6791 return ("mov %A0,%B0" CR_TAB
6792 "clr %B0" CR_TAB
6793 "swap %A0" CR_TAB
6794 "ldi %3,0x0f" CR_TAB
6795 "and %A0,%3");
6797 *len = 6;
6798 return ("mov %A0,%B0" CR_TAB
6799 "clr %B0" CR_TAB
6800 "lsr %A0" CR_TAB
6801 "lsr %A0" CR_TAB
6802 "lsr %A0" CR_TAB
6803 "lsr %A0");
6805 case 13:
6806 if (ldi_ok)
6808 *len = 5;
6809 return ("mov %A0,%B0" CR_TAB
6810 "clr %B0" CR_TAB
6811 "swap %A0" CR_TAB
6812 "lsr %A0" CR_TAB
6813 "andi %A0,0x07");
6815 if (AVR_HAVE_MUL && scratch)
6817 *len = 5;
6818 return ("ldi %3,0x08" CR_TAB
6819 "mul %B0,%3" CR_TAB
6820 "mov %A0,r1" CR_TAB
6821 "clr %B0" CR_TAB
6822 "clr __zero_reg__");
6824 if (optimize_size && scratch)
6825 break; /* 5 */
6826 if (scratch)
6828 *len = 6;
6829 return ("mov %A0,%B0" CR_TAB
6830 "clr %B0" CR_TAB
6831 "swap %A0" CR_TAB
6832 "lsr %A0" CR_TAB
6833 "ldi %3,0x07" CR_TAB
6834 "and %A0,%3");
6836 if (AVR_HAVE_MUL)
6838 *len = 6;
6839 return ("set" CR_TAB
6840 "bld r1,3" CR_TAB
6841 "mul %B0,r1" CR_TAB
6842 "mov %A0,r1" CR_TAB
6843 "clr %B0" CR_TAB
6844 "clr __zero_reg__");
6846 *len = 7;
6847 return ("mov %A0,%B0" CR_TAB
6848 "clr %B0" CR_TAB
6849 "lsr %A0" CR_TAB
6850 "lsr %A0" CR_TAB
6851 "lsr %A0" CR_TAB
6852 "lsr %A0" CR_TAB
6853 "lsr %A0");
6855 case 14:
6856 if (AVR_HAVE_MUL && ldi_ok)
6858 *len = 5;
6859 return ("ldi %A0,0x04" CR_TAB
6860 "mul %B0,%A0" CR_TAB
6861 "mov %A0,r1" CR_TAB
6862 "clr %B0" CR_TAB
6863 "clr __zero_reg__");
6865 if (AVR_HAVE_MUL && scratch)
6867 *len = 5;
6868 return ("ldi %3,0x04" CR_TAB
6869 "mul %B0,%3" CR_TAB
6870 "mov %A0,r1" CR_TAB
6871 "clr %B0" CR_TAB
6872 "clr __zero_reg__");
6874 if (optimize_size && ldi_ok)
6876 *len = 5;
6877 return ("mov %A0,%B0" CR_TAB
6878 "ldi %B0,6" "\n1:\t"
6879 "lsr %A0" CR_TAB
6880 "dec %B0" CR_TAB
6881 "brne 1b");
6883 if (optimize_size && scratch)
6884 break; /* 5 */
6885 *len = 6;
6886 return ("clr %A0" CR_TAB
6887 "lsl %B0" CR_TAB
6888 "rol %A0" CR_TAB
6889 "lsl %B0" CR_TAB
6890 "rol %A0" CR_TAB
6891 "clr %B0");
6893 case 15:
6894 *len = 4;
6895 return ("clr %A0" CR_TAB
6896 "lsl %B0" CR_TAB
6897 "rol %A0" CR_TAB
6898 "clr %B0");
6900 len = t;
6902 out_shift_with_cnt ("lsr %B0" CR_TAB
6903 "ror %A0", insn, operands, len, 2);
6904 return "";
6908 /* 24-bit logic shift right */
6910 const char*
6911 avr_out_lshrpsi3 (rtx_insn *insn, rtx *op, int *plen)
6913 int dest = REGNO (op[0]);
6914 int src = REGNO (op[1]);
6916 if (CONST_INT_P (op[2]))
6918 if (plen)
6919 *plen = 0;
6921 switch (INTVAL (op[2]))
6923 case 8:
6924 if (dest <= src)
6925 return avr_asm_len ("mov %A0,%B1" CR_TAB
6926 "mov %B0,%C1" CR_TAB
6927 "clr %C0", op, plen, 3);
6928 else
6929 return avr_asm_len ("clr %C0" CR_TAB
6930 "mov %B0,%C1" CR_TAB
6931 "mov %A0,%B1", op, plen, 3);
6933 case 16:
6934 if (dest != src + 2)
6935 avr_asm_len ("mov %A0,%C1", op, plen, 1);
6937 return avr_asm_len ("clr %B0" CR_TAB
6938 "clr %C0", op, plen, 2);
6940 default:
6941 if (INTVAL (op[2]) < 24)
6942 break;
6944 /* fall through */
6946 case 23:
6947 return avr_asm_len ("clr %A0" CR_TAB
6948 "sbrc %C0,7" CR_TAB
6949 "inc %A0" CR_TAB
6950 "clr %B0" CR_TAB
6951 "clr %C0", op, plen, 5);
6952 } /* switch */
6955 out_shift_with_cnt ("lsr %C0" CR_TAB
6956 "ror %B0" CR_TAB
6957 "ror %A0", insn, op, plen, 3);
6958 return "";
6962 /* 32-bit logic shift right ((unsigned int)x >> i) */
6964 const char *
6965 lshrsi3_out (rtx_insn *insn, rtx operands[], int *len)
6967 if (GET_CODE (operands[2]) == CONST_INT)
6969 int k;
6970 int *t = len;
6972 if (!len)
6973 len = &k;
6975 switch (INTVAL (operands[2]))
6977 default:
6978 if (INTVAL (operands[2]) < 32)
6979 break;
6981 if (AVR_HAVE_MOVW)
6982 return *len = 3, ("clr %D0" CR_TAB
6983 "clr %C0" CR_TAB
6984 "movw %A0,%C0");
6985 *len = 4;
6986 return ("clr %D0" CR_TAB
6987 "clr %C0" CR_TAB
6988 "clr %B0" CR_TAB
6989 "clr %A0");
6991 case 8:
6993 int reg0 = true_regnum (operands[0]);
6994 int reg1 = true_regnum (operands[1]);
6995 *len = 4;
6996 if (reg0 <= reg1)
6997 return ("mov %A0,%B1" CR_TAB
6998 "mov %B0,%C1" CR_TAB
6999 "mov %C0,%D1" CR_TAB
7000 "clr %D0");
7001 else
7002 return ("clr %D0" CR_TAB
7003 "mov %C0,%D1" CR_TAB
7004 "mov %B0,%C1" CR_TAB
7005 "mov %A0,%B1");
7008 case 16:
7010 int reg0 = true_regnum (operands[0]);
7011 int reg1 = true_regnum (operands[1]);
7013 if (reg0 == reg1 + 2)
7014 return *len = 2, ("clr %C0" CR_TAB
7015 "clr %D0");
7016 if (AVR_HAVE_MOVW)
7017 return *len = 3, ("movw %A0,%C1" CR_TAB
7018 "clr %C0" CR_TAB
7019 "clr %D0");
7020 else
7021 return *len = 4, ("mov %B0,%D1" CR_TAB
7022 "mov %A0,%C1" CR_TAB
7023 "clr %C0" CR_TAB
7024 "clr %D0");
7027 case 24:
7028 return *len = 4, ("mov %A0,%D1" CR_TAB
7029 "clr %B0" CR_TAB
7030 "clr %C0" CR_TAB
7031 "clr %D0");
7033 case 31:
7034 *len = 6;
7035 return ("clr %A0" CR_TAB
7036 "sbrc %D0,7" CR_TAB
7037 "inc %A0" CR_TAB
7038 "clr %B0" CR_TAB
7039 "clr %C0" CR_TAB
7040 "clr %D0");
7042 len = t;
7044 out_shift_with_cnt ("lsr %D0" CR_TAB
7045 "ror %C0" CR_TAB
7046 "ror %B0" CR_TAB
7047 "ror %A0", insn, operands, len, 4);
7048 return "";
7052 /* Output addition of register XOP[0] and compile time constant XOP[2].
7053 CODE == PLUS: perform addition by using ADD instructions or
7054 CODE == MINUS: perform addition by using SUB instructions:
7056 XOP[0] = XOP[0] + XOP[2]
7058 Or perform addition/subtraction with register XOP[2] depending on CODE:
7060 XOP[0] = XOP[0] +/- XOP[2]
7062 If PLEN == NULL, print assembler instructions to perform the operation;
7063 otherwise, set *PLEN to the length of the instruction sequence (in words)
7064 printed with PLEN == NULL. XOP[3] is an 8-bit scratch register or NULL_RTX.
7065 Set *PCC to effect on cc0 according to respective CC_* insn attribute.
7067 CODE_SAT == UNKNOWN: Perform ordinary, non-saturating operation.
7068 CODE_SAT != UNKNOWN: Perform operation and saturate according to CODE_SAT.
7069 If CODE_SAT != UNKNOWN then SIGN contains the sign of the summand resp.
7070 the subtrahend in the original insn, provided it is a compile time constant.
7071 In all other cases, SIGN is 0.
7073 If OUT_LABEL is true, print the final 0: label which is needed for
7074 saturated addition / subtraction. The only case where OUT_LABEL = false
7075 is useful is for saturated addition / subtraction performed during
7076 fixed-point rounding, cf. `avr_out_round'. */
7078 static void
7079 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc,
7080 enum rtx_code code_sat, int sign, bool out_label)
7082 /* MODE of the operation. */
7083 machine_mode mode = GET_MODE (xop[0]);
7085 /* INT_MODE of the same size. */
7086 machine_mode imode = int_mode_for_mode (mode);
7088 /* Number of bytes to operate on. */
7089 int i, n_bytes = GET_MODE_SIZE (mode);
7091 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
7092 int clobber_val = -1;
7094 /* op[0]: 8-bit destination register
7095 op[1]: 8-bit const int
7096 op[2]: 8-bit scratch register */
7097 rtx op[3];
7099 /* Started the operation? Before starting the operation we may skip
7100 adding 0. This is no more true after the operation started because
7101 carry must be taken into account. */
7102 bool started = false;
7104 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
7105 rtx xval = xop[2];
7107 /* Output a BRVC instruction. Only needed with saturation. */
7108 bool out_brvc = true;
7110 if (plen)
7111 *plen = 0;
7113 if (REG_P (xop[2]))
7115 *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_CLOBBER;
7117 for (i = 0; i < n_bytes; i++)
7119 /* We operate byte-wise on the destination. */
7120 op[0] = simplify_gen_subreg (QImode, xop[0], mode, i);
7121 op[1] = simplify_gen_subreg (QImode, xop[2], mode, i);
7123 if (i == 0)
7124 avr_asm_len (code == PLUS ? "add %0,%1" : "sub %0,%1",
7125 op, plen, 1);
7126 else
7127 avr_asm_len (code == PLUS ? "adc %0,%1" : "sbc %0,%1",
7128 op, plen, 1);
7131 if (reg_overlap_mentioned_p (xop[0], xop[2]))
7133 gcc_assert (REGNO (xop[0]) == REGNO (xop[2]));
7135 if (MINUS == code)
7136 return;
7139 goto saturate;
7142 /* Except in the case of ADIW with 16-bit register (see below)
7143 addition does not set cc0 in a usable way. */
7145 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
7147 if (CONST_FIXED_P (xval))
7148 xval = avr_to_int_mode (xval);
7150 /* Adding/Subtracting zero is a no-op. */
7152 if (xval == const0_rtx)
7154 *pcc = CC_NONE;
7155 return;
7158 if (MINUS == code)
7159 xval = simplify_unary_operation (NEG, imode, xval, imode);
7161 op[2] = xop[3];
7163 if (SS_PLUS == code_sat && MINUS == code
7164 && sign < 0
7165 && 0x80 == (INTVAL (simplify_gen_subreg (QImode, xval, imode, n_bytes-1))
7166 & GET_MODE_MASK (QImode)))
7168 /* We compute x + 0x80 by means of SUB instructions. We negated the
7169 constant subtrahend above and are left with x - (-128) so that we
7170 need something like SUBI r,128 which does not exist because SUBI sets
7171 V according to the sign of the subtrahend. Notice the only case
7172 where this must be done is when NEG overflowed in case [2s] because
7173 the V computation needs the right sign of the subtrahend. */
7175 rtx msb = simplify_gen_subreg (QImode, xop[0], mode, n_bytes-1);
7177 avr_asm_len ("subi %0,128" CR_TAB
7178 "brmi 0f", &msb, plen, 2);
7179 out_brvc = false;
7181 goto saturate;
7184 for (i = 0; i < n_bytes; i++)
7186 /* We operate byte-wise on the destination. */
7187 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
7188 rtx xval8 = simplify_gen_subreg (QImode, xval, imode, i);
7190 /* 8-bit value to operate with this byte. */
7191 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
7193 /* Registers R16..R31 can operate with immediate. */
7194 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
7196 op[0] = reg8;
7197 op[1] = gen_int_mode (val8, QImode);
7199 /* To get usable cc0 no low-bytes must have been skipped. */
7201 if (i && !started)
7202 *pcc = CC_CLOBBER;
7204 if (!started
7205 && i % 2 == 0
7206 && i + 2 <= n_bytes
7207 && test_hard_reg_class (ADDW_REGS, reg8))
7209 rtx xval16 = simplify_gen_subreg (HImode, xval, imode, i);
7210 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
7212 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
7213 i.e. operate word-wise. */
7215 if (val16 < 64)
7217 if (val16 != 0)
7219 started = true;
7220 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
7221 op, plen, 1);
7223 if (n_bytes == 2 && PLUS == code)
7224 *pcc = CC_SET_CZN;
7227 i++;
7228 continue;
7232 if (val8 == 0)
7234 if (started)
7235 avr_asm_len (code == PLUS
7236 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
7237 op, plen, 1);
7238 continue;
7240 else if ((val8 == 1 || val8 == 0xff)
7241 && UNKNOWN == code_sat
7242 && !started
7243 && i == n_bytes - 1)
7245 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
7246 op, plen, 1);
7247 *pcc = CC_CLOBBER;
7248 break;
7251 switch (code)
7253 case PLUS:
7255 gcc_assert (plen != NULL || (op[2] && REG_P (op[2])));
7257 if (plen != NULL && UNKNOWN != code_sat)
7259 /* This belongs to the x + 0x80 corner case. The code with
7260 ADD instruction is not smaller, thus make this case
7261 expensive so that the caller won't pick it. */
7263 *plen += 10;
7264 break;
7267 if (clobber_val != (int) val8)
7268 avr_asm_len ("ldi %2,%1", op, plen, 1);
7269 clobber_val = (int) val8;
7271 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
7273 break; /* PLUS */
7275 case MINUS:
7277 if (ld_reg_p)
7278 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
7279 else
7281 gcc_assert (plen != NULL || REG_P (op[2]));
7283 if (clobber_val != (int) val8)
7284 avr_asm_len ("ldi %2,%1", op, plen, 1);
7285 clobber_val = (int) val8;
7287 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
7290 break; /* MINUS */
7292 default:
7293 /* Unknown code */
7294 gcc_unreachable();
7297 started = true;
7299 } /* for all sub-bytes */
7301 saturate:
7303 if (UNKNOWN == code_sat)
7304 return;
7306 *pcc = (int) CC_CLOBBER;
7308 /* Vanilla addition/subtraction is done. We are left with saturation.
7310 We have to compute A = A <op> B where A is a register and
7311 B is a register or a non-zero compile time constant CONST.
7312 A is register class "r" if unsigned && B is REG. Otherwise, A is in "d".
7313 B stands for the original operand $2 in INSN. In the case of B = CONST,
7314 SIGN in { -1, 1 } is the sign of B. Otherwise, SIGN is 0.
7316 CODE is the instruction flavor we use in the asm sequence to perform <op>.
7319 unsigned
7320 operation | code | sat if | b is | sat value | case
7321 -----------------+-------+----------+--------------+-----------+-------
7322 + as a + b | add | C == 1 | const, reg | u+ = 0xff | [1u]
7323 + as a - (-b) | sub | C == 0 | const | u+ = 0xff | [2u]
7324 - as a - b | sub | C == 1 | const, reg | u- = 0 | [3u]
7325 - as a + (-b) | add | C == 0 | const | u- = 0 | [4u]
7328 signed
7329 operation | code | sat if | b is | sat value | case
7330 -----------------+-------+----------+--------------+-----------+-------
7331 + as a + b | add | V == 1 | const, reg | s+ | [1s]
7332 + as a - (-b) | sub | V == 1 | const | s+ | [2s]
7333 - as a - b | sub | V == 1 | const, reg | s- | [3s]
7334 - as a + (-b) | add | V == 1 | const | s- | [4s]
7336 s+ = b < 0 ? -0x80 : 0x7f
7337 s- = b < 0 ? 0x7f : -0x80
7339 The cases a - b actually perform a - (-(-b)) if B is CONST.
7342 op[0] = simplify_gen_subreg (QImode, xop[0], mode, n_bytes-1);
7343 op[1] = n_bytes > 1
7344 ? simplify_gen_subreg (QImode, xop[0], mode, n_bytes-2)
7345 : NULL_RTX;
7347 bool need_copy = true;
7348 int len_call = 1 + AVR_HAVE_JMP_CALL;
7350 switch (code_sat)
7352 default:
7353 gcc_unreachable();
7355 case SS_PLUS:
7356 case SS_MINUS:
7358 if (out_brvc)
7359 avr_asm_len ("brvc 0f", op, plen, 1);
7361 if (reg_overlap_mentioned_p (xop[0], xop[2]))
7363 /* [1s,reg] */
7365 if (n_bytes == 1)
7366 avr_asm_len ("ldi %0,0x7f" CR_TAB
7367 "adc %0,__zero_reg__", op, plen, 2);
7368 else
7369 avr_asm_len ("ldi %0,0x7f" CR_TAB
7370 "ldi %1,0xff" CR_TAB
7371 "adc %1,__zero_reg__" CR_TAB
7372 "adc %0,__zero_reg__", op, plen, 4);
7374 else if (sign == 0 && PLUS == code)
7376 /* [1s,reg] */
7378 op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
7380 if (n_bytes == 1)
7381 avr_asm_len ("ldi %0,0x80" CR_TAB
7382 "sbrs %2,7" CR_TAB
7383 "dec %0", op, plen, 3);
7384 else
7385 avr_asm_len ("ldi %0,0x80" CR_TAB
7386 "cp %2,%0" CR_TAB
7387 "sbc %1,%1" CR_TAB
7388 "sbci %0,0", op, plen, 4);
7390 else if (sign == 0 && MINUS == code)
7392 /* [3s,reg] */
7394 op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
7396 if (n_bytes == 1)
7397 avr_asm_len ("ldi %0,0x7f" CR_TAB
7398 "sbrs %2,7" CR_TAB
7399 "inc %0", op, plen, 3);
7400 else
7401 avr_asm_len ("ldi %0,0x7f" CR_TAB
7402 "cp %0,%2" CR_TAB
7403 "sbc %1,%1" CR_TAB
7404 "sbci %0,-1", op, plen, 4);
7406 else if ((sign < 0) ^ (SS_MINUS == code_sat))
7408 /* [1s,const,B < 0] [2s,B < 0] */
7409 /* [3s,const,B > 0] [4s,B > 0] */
7411 if (n_bytes == 8)
7413 avr_asm_len ("%~call __clr_8", op, plen, len_call);
7414 need_copy = false;
7417 avr_asm_len ("ldi %0,0x80", op, plen, 1);
7418 if (n_bytes > 1 && need_copy)
7419 avr_asm_len ("clr %1", op, plen, 1);
7421 else if ((sign > 0) ^ (SS_MINUS == code_sat))
7423 /* [1s,const,B > 0] [2s,B > 0] */
7424 /* [3s,const,B < 0] [4s,B < 0] */
7426 if (n_bytes == 8)
7428 avr_asm_len ("sec" CR_TAB
7429 "%~call __sbc_8", op, plen, 1 + len_call);
7430 need_copy = false;
7433 avr_asm_len ("ldi %0,0x7f", op, plen, 1);
7434 if (n_bytes > 1 && need_copy)
7435 avr_asm_len ("ldi %1,0xff", op, plen, 1);
7437 else
7438 gcc_unreachable();
7440 break;
7442 case US_PLUS:
7443 /* [1u] : [2u] */
7445 avr_asm_len (PLUS == code ? "brcc 0f" : "brcs 0f", op, plen, 1);
7447 if (n_bytes == 8)
7449 if (MINUS == code)
7450 avr_asm_len ("sec", op, plen, 1);
7451 avr_asm_len ("%~call __sbc_8", op, plen, len_call);
7453 need_copy = false;
7455 else
7457 if (MINUS == code && !test_hard_reg_class (LD_REGS, op[0]))
7458 avr_asm_len ("sec" CR_TAB
7459 "sbc %0,%0", op, plen, 2);
7460 else
7461 avr_asm_len (PLUS == code ? "sbc %0,%0" : "ldi %0,0xff",
7462 op, plen, 1);
7464 break; /* US_PLUS */
7466 case US_MINUS:
7467 /* [4u] : [3u] */
7469 avr_asm_len (PLUS == code ? "brcs 0f" : "brcc 0f", op, plen, 1);
7471 if (n_bytes == 8)
7473 avr_asm_len ("%~call __clr_8", op, plen, len_call);
7474 need_copy = false;
7476 else
7477 avr_asm_len ("clr %0", op, plen, 1);
7479 break;
7482 /* We set the MSB in the unsigned case and the 2 MSBs in the signed case.
7483 Now copy the right value to the LSBs. */
7485 if (need_copy && n_bytes > 1)
7487 if (US_MINUS == code_sat || US_PLUS == code_sat)
7489 avr_asm_len ("mov %1,%0", op, plen, 1);
7491 if (n_bytes > 2)
7493 op[0] = xop[0];
7494 if (AVR_HAVE_MOVW)
7495 avr_asm_len ("movw %0,%1", op, plen, 1);
7496 else
7497 avr_asm_len ("mov %A0,%1" CR_TAB
7498 "mov %B0,%1", op, plen, 2);
7501 else if (n_bytes > 2)
7503 op[0] = xop[0];
7504 avr_asm_len ("mov %A0,%1" CR_TAB
7505 "mov %B0,%1", op, plen, 2);
7509 if (need_copy && n_bytes == 8)
7511 if (AVR_HAVE_MOVW)
7512 avr_asm_len ("movw %r0+2,%0" CR_TAB
7513 "movw %r0+4,%0", xop, plen, 2);
7514 else
7515 avr_asm_len ("mov %r0+2,%0" CR_TAB
7516 "mov %r0+3,%0" CR_TAB
7517 "mov %r0+4,%0" CR_TAB
7518 "mov %r0+5,%0", xop, plen, 4);
7521 if (out_label)
7522 avr_asm_len ("0:", op, plen, 0);
7526 /* Output addition/subtraction of register XOP[0] and a constant XOP[2] that
7527 is ont a compile-time constant:
7529 XOP[0] = XOP[0] +/- XOP[2]
7531 This is a helper for the function below. The only insns that need this
7532 are additions/subtraction for pointer modes, i.e. HImode and PSImode. */
7534 static const char*
7535 avr_out_plus_symbol (rtx *xop, enum rtx_code code, int *plen, int *pcc)
7537 machine_mode mode = GET_MODE (xop[0]);
7539 /* Only pointer modes want to add symbols. */
7541 gcc_assert (mode == HImode || mode == PSImode);
7543 *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_SET_N;
7545 avr_asm_len (PLUS == code
7546 ? "subi %A0,lo8(-(%2))" CR_TAB "sbci %B0,hi8(-(%2))"
7547 : "subi %A0,lo8(%2)" CR_TAB "sbci %B0,hi8(%2)",
7548 xop, plen, -2);
7550 if (PSImode == mode)
7551 avr_asm_len (PLUS == code
7552 ? "sbci %C0,hlo8(-(%2))"
7553 : "sbci %C0,hlo8(%2)", xop, plen, 1);
7554 return "";
7558 /* Prepare operands of addition/subtraction to be used with avr_out_plus_1.
7560 INSN is a single_set insn or an insn pattern with a binary operation as
7561 SET_SRC that is one of: PLUS, SS_PLUS, US_PLUS, MINUS, SS_MINUS, US_MINUS.
7563 XOP are the operands of INSN. In the case of 64-bit operations with
7564 constant XOP[] has just one element: The summand/subtrahend in XOP[0].
7565 The non-saturating insns up to 32 bits may or may not supply a "d" class
7566 scratch as XOP[3].
7568 If PLEN == NULL output the instructions.
7569 If PLEN != NULL set *PLEN to the length of the sequence in words.
7571 PCC is a pointer to store the instructions' effect on cc0.
7572 PCC may be NULL.
7574 PLEN and PCC default to NULL.
7576 OUT_LABEL defaults to TRUE. For a description, see AVR_OUT_PLUS_1.
7578 Return "" */
7580 const char*
7581 avr_out_plus (rtx insn, rtx *xop, int *plen, int *pcc, bool out_label)
7583 int cc_plus, cc_minus, cc_dummy;
7584 int len_plus, len_minus;
7585 rtx op[4];
7586 rtx xpattern = INSN_P (insn) ? single_set (as_a <rtx_insn *> (insn)) : insn;
7587 rtx xdest = SET_DEST (xpattern);
7588 machine_mode mode = GET_MODE (xdest);
7589 machine_mode imode = int_mode_for_mode (mode);
7590 int n_bytes = GET_MODE_SIZE (mode);
7591 enum rtx_code code_sat = GET_CODE (SET_SRC (xpattern));
7592 enum rtx_code code
7593 = (PLUS == code_sat || SS_PLUS == code_sat || US_PLUS == code_sat
7594 ? PLUS : MINUS);
7596 if (!pcc)
7597 pcc = &cc_dummy;
7599 /* PLUS and MINUS don't saturate: Use modular wrap-around. */
7601 if (PLUS == code_sat || MINUS == code_sat)
7602 code_sat = UNKNOWN;
7604 if (n_bytes <= 4 && REG_P (xop[2]))
7606 avr_out_plus_1 (xop, plen, code, pcc, code_sat, 0, out_label);
7607 return "";
7610 if (8 == n_bytes)
7612 op[0] = gen_rtx_REG (DImode, ACC_A);
7613 op[1] = gen_rtx_REG (DImode, ACC_A);
7614 op[2] = avr_to_int_mode (xop[0]);
7616 else
7618 if (!REG_P (xop[2])
7619 && !CONST_INT_P (xop[2])
7620 && !CONST_FIXED_P (xop[2]))
7622 return avr_out_plus_symbol (xop, code, plen, pcc);
7625 op[0] = avr_to_int_mode (xop[0]);
7626 op[1] = avr_to_int_mode (xop[1]);
7627 op[2] = avr_to_int_mode (xop[2]);
7630 /* Saturations and 64-bit operations don't have a clobber operand.
7631 For the other cases, the caller will provide a proper XOP[3]. */
7633 xpattern = INSN_P (insn) ? PATTERN (insn) : insn;
7634 op[3] = PARALLEL == GET_CODE (xpattern) ? xop[3] : NULL_RTX;
7636 /* Saturation will need the sign of the original operand. */
7638 rtx xmsb = simplify_gen_subreg (QImode, op[2], imode, n_bytes-1);
7639 int sign = INTVAL (xmsb) < 0 ? -1 : 1;
7641 /* If we subtract and the subtrahend is a constant, then negate it
7642 so that avr_out_plus_1 can be used. */
7644 if (MINUS == code)
7645 op[2] = simplify_unary_operation (NEG, imode, op[2], imode);
7647 /* Work out the shortest sequence. */
7649 avr_out_plus_1 (op, &len_minus, MINUS, &cc_minus, code_sat, sign, out_label);
7650 avr_out_plus_1 (op, &len_plus, PLUS, &cc_plus, code_sat, sign, out_label);
7652 if (plen)
7654 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
7655 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
7657 else if (len_minus <= len_plus)
7658 avr_out_plus_1 (op, NULL, MINUS, pcc, code_sat, sign, out_label);
7659 else
7660 avr_out_plus_1 (op, NULL, PLUS, pcc, code_sat, sign, out_label);
7662 return "";
7666 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
7667 time constant XOP[2]:
7669 XOP[0] = XOP[0] <op> XOP[2]
7671 and return "". If PLEN == NULL, print assembler instructions to perform the
7672 operation; otherwise, set *PLEN to the length of the instruction sequence
7673 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
7674 register or SCRATCH if no clobber register is needed for the operation.
7675 INSN is an INSN_P or a pattern of an insn. */
7677 const char*
7678 avr_out_bitop (rtx insn, rtx *xop, int *plen)
7680 /* CODE and MODE of the operation. */
7681 rtx xpattern = INSN_P (insn) ? single_set (as_a <rtx_insn *> (insn)) : insn;
7682 enum rtx_code code = GET_CODE (SET_SRC (xpattern));
7683 machine_mode mode = GET_MODE (xop[0]);
7685 /* Number of bytes to operate on. */
7686 int i, n_bytes = GET_MODE_SIZE (mode);
7688 /* Value of T-flag (0 or 1) or -1 if unknow. */
7689 int set_t = -1;
7691 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
7692 int clobber_val = -1;
7694 /* op[0]: 8-bit destination register
7695 op[1]: 8-bit const int
7696 op[2]: 8-bit clobber register or SCRATCH
7697 op[3]: 8-bit register containing 0xff or NULL_RTX */
7698 rtx op[4];
7700 op[2] = xop[3];
7701 op[3] = NULL_RTX;
7703 if (plen)
7704 *plen = 0;
7706 for (i = 0; i < n_bytes; i++)
7708 /* We operate byte-wise on the destination. */
7709 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
7710 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
7712 /* 8-bit value to operate with this byte. */
7713 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
7715 /* Number of bits set in the current byte of the constant. */
7716 int pop8 = avr_popcount (val8);
7718 /* Registers R16..R31 can operate with immediate. */
7719 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
7721 op[0] = reg8;
7722 op[1] = GEN_INT (val8);
7724 switch (code)
7726 case IOR:
7728 if (0 == pop8)
7729 continue;
7730 else if (ld_reg_p)
7731 avr_asm_len ("ori %0,%1", op, plen, 1);
7732 else if (1 == pop8)
7734 if (set_t != 1)
7735 avr_asm_len ("set", op, plen, 1);
7736 set_t = 1;
7738 op[1] = GEN_INT (exact_log2 (val8));
7739 avr_asm_len ("bld %0,%1", op, plen, 1);
7741 else if (8 == pop8)
7743 if (op[3] != NULL_RTX)
7744 avr_asm_len ("mov %0,%3", op, plen, 1);
7745 else
7746 avr_asm_len ("clr %0" CR_TAB
7747 "dec %0", op, plen, 2);
7749 op[3] = op[0];
7751 else
7753 if (clobber_val != (int) val8)
7754 avr_asm_len ("ldi %2,%1", op, plen, 1);
7755 clobber_val = (int) val8;
7757 avr_asm_len ("or %0,%2", op, plen, 1);
7760 continue; /* IOR */
7762 case AND:
7764 if (8 == pop8)
7765 continue;
7766 else if (0 == pop8)
7767 avr_asm_len ("clr %0", op, plen, 1);
7768 else if (ld_reg_p)
7769 avr_asm_len ("andi %0,%1", op, plen, 1);
7770 else if (7 == pop8)
7772 if (set_t != 0)
7773 avr_asm_len ("clt", op, plen, 1);
7774 set_t = 0;
7776 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
7777 avr_asm_len ("bld %0,%1", op, plen, 1);
7779 else
7781 if (clobber_val != (int) val8)
7782 avr_asm_len ("ldi %2,%1", op, plen, 1);
7783 clobber_val = (int) val8;
7785 avr_asm_len ("and %0,%2", op, plen, 1);
7788 continue; /* AND */
7790 case XOR:
7792 if (0 == pop8)
7793 continue;
7794 else if (8 == pop8)
7795 avr_asm_len ("com %0", op, plen, 1);
7796 else if (ld_reg_p && val8 == (1 << 7))
7797 avr_asm_len ("subi %0,%1", op, plen, 1);
7798 else
7800 if (clobber_val != (int) val8)
7801 avr_asm_len ("ldi %2,%1", op, plen, 1);
7802 clobber_val = (int) val8;
7804 avr_asm_len ("eor %0,%2", op, plen, 1);
7807 continue; /* XOR */
7809 default:
7810 /* Unknown rtx_code */
7811 gcc_unreachable();
7813 } /* for all sub-bytes */
7815 return "";
7819 /* Output sign extension from XOP[1] to XOP[0] and return "".
7820 If PLEN == NULL, print assembler instructions to perform the operation;
7821 otherwise, set *PLEN to the length of the instruction sequence (in words)
7822 as printed with PLEN == NULL. */
7824 const char*
7825 avr_out_sign_extend (rtx_insn *insn, rtx *xop, int *plen)
7827 // Size in bytes of source resp. destination operand.
7828 unsigned n_src = GET_MODE_SIZE (GET_MODE (xop[1]));
7829 unsigned n_dest = GET_MODE_SIZE (GET_MODE (xop[0]));
7830 rtx r_msb = all_regs_rtx[REGNO (xop[1]) + n_src - 1];
7832 if (plen)
7833 *plen = 0;
7835 // Copy destination to source
7837 if (REGNO (xop[0]) != REGNO (xop[1]))
7839 gcc_assert (n_src <= 2);
7841 if (n_src == 2)
7842 avr_asm_len (AVR_HAVE_MOVW
7843 ? "movw %0,%1"
7844 : "mov %B0,%B1", xop, plen, 1);
7845 if (n_src == 1 || !AVR_HAVE_MOVW)
7846 avr_asm_len ("mov %A0,%A1", xop, plen, 1);
7849 // Set Carry to the sign bit MSB.7...
7851 if (REGNO (xop[0]) == REGNO (xop[1])
7852 || !reg_unused_after (insn, r_msb))
7854 avr_asm_len ("mov __tmp_reg__,%0", &r_msb, plen, 1);
7855 r_msb = tmp_reg_rtx;
7858 avr_asm_len ("lsl %0", &r_msb, plen, 1);
7860 // ...and propagate it to all the new sign bits
7862 for (unsigned n = n_src; n < n_dest; n++)
7863 avr_asm_len ("sbc %0,%0", &all_regs_rtx[REGNO (xop[0]) + n], plen, 1);
7865 return "";
7869 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
7870 PLEN != NULL: Set *PLEN to the length of that sequence.
7871 Return "". */
7873 const char*
7874 avr_out_addto_sp (rtx *op, int *plen)
7876 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
7877 int addend = INTVAL (op[0]);
7879 if (plen)
7880 *plen = 0;
7882 if (addend < 0)
7884 if (flag_verbose_asm || flag_print_asm_name)
7885 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
7887 while (addend <= -pc_len)
7889 addend += pc_len;
7890 avr_asm_len ("rcall .", op, plen, 1);
7893 while (addend++ < 0)
7894 avr_asm_len ("push __zero_reg__", op, plen, 1);
7896 else if (addend > 0)
7898 if (flag_verbose_asm || flag_print_asm_name)
7899 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
7901 while (addend-- > 0)
7902 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
7905 return "";
7909 /* Outputs instructions needed for fixed point type conversion.
7910 This includes converting between any fixed point type, as well
7911 as converting to any integer type. Conversion between integer
7912 types is not supported.
7914 Converting signed fractional types requires a bit shift if converting
7915 to or from any unsigned fractional type because the decimal place is
7916 shifted by 1 bit. When the destination is a signed fractional, the sign
7917 is stored in either the carry or T bit. */
7919 const char*
7920 avr_out_fract (rtx_insn *insn, rtx operands[], bool intsigned, int *plen)
7922 size_t i;
7923 rtx xop[6];
7924 RTX_CODE shift = UNKNOWN;
7925 bool sign_in_carry = false;
7926 bool msb_in_carry = false;
7927 bool lsb_in_tmp_reg = false;
7928 bool lsb_in_carry = false;
7929 bool frac_rounded = false;
7930 const char *code_ashift = "lsl %0";
7933 #define MAY_CLOBBER(RR) \
7934 /* Shorthand used below. */ \
7935 ((sign_bytes \
7936 && IN_RANGE (RR, dest.regno_msb - sign_bytes + 1, dest.regno_msb)) \
7937 || (offset && IN_RANGE (RR, dest.regno, dest.regno_msb)) \
7938 || (reg_unused_after (insn, all_regs_rtx[RR]) \
7939 && !IN_RANGE (RR, dest.regno, dest.regno_msb)))
7941 struct
7943 /* bytes : Length of operand in bytes.
7944 ibyte : Length of integral part in bytes.
7945 fbyte, fbit : Length of fractional part in bytes, bits. */
7947 bool sbit;
7948 unsigned fbit, bytes, ibyte, fbyte;
7949 unsigned regno, regno_msb;
7950 } dest, src, *val[2] = { &dest, &src };
7952 if (plen)
7953 *plen = 0;
7955 /* Step 0: Determine information on source and destination operand we
7956 ====== will need in the remainder. */
7958 for (i = 0; i < sizeof (val) / sizeof (*val); i++)
7960 machine_mode mode;
7962 xop[i] = operands[i];
7964 mode = GET_MODE (xop[i]);
7966 val[i]->bytes = GET_MODE_SIZE (mode);
7967 val[i]->regno = REGNO (xop[i]);
7968 val[i]->regno_msb = REGNO (xop[i]) + val[i]->bytes - 1;
7970 if (SCALAR_INT_MODE_P (mode))
7972 val[i]->sbit = intsigned;
7973 val[i]->fbit = 0;
7975 else if (ALL_SCALAR_FIXED_POINT_MODE_P (mode))
7977 val[i]->sbit = SIGNED_SCALAR_FIXED_POINT_MODE_P (mode);
7978 val[i]->fbit = GET_MODE_FBIT (mode);
7980 else
7981 fatal_insn ("unsupported fixed-point conversion", insn);
7983 val[i]->fbyte = (1 + val[i]->fbit) / BITS_PER_UNIT;
7984 val[i]->ibyte = val[i]->bytes - val[i]->fbyte;
7987 // Byte offset of the decimal point taking into account different place
7988 // of the decimal point in input and output and different register numbers
7989 // of input and output.
7990 int offset = dest.regno - src.regno + dest.fbyte - src.fbyte;
7992 // Number of destination bytes that will come from sign / zero extension.
7993 int sign_bytes = (dest.ibyte - src.ibyte) * (dest.ibyte > src.ibyte);
7995 // Number of bytes at the low end to be filled with zeros.
7996 int zero_bytes = (dest.fbyte - src.fbyte) * (dest.fbyte > src.fbyte);
7998 // Do we have a 16-Bit register that is cleared?
7999 rtx clrw = NULL_RTX;
8001 bool sign_extend = src.sbit && sign_bytes;
8003 if (0 == dest.fbit % 8 && 7 == src.fbit % 8)
8004 shift = ASHIFT;
8005 else if (7 == dest.fbit % 8 && 0 == src.fbit % 8)
8006 shift = ASHIFTRT;
8007 else if (dest.fbit % 8 == src.fbit % 8)
8008 shift = UNKNOWN;
8009 else
8010 gcc_unreachable();
8012 /* If we need to round the fraction part, we might need to save/round it
8013 before clobbering any of it in Step 1. Also, we might want to do
8014 the rounding now to make use of LD_REGS. */
8015 if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
8016 && SCALAR_ACCUM_MODE_P (GET_MODE (xop[1]))
8017 && !TARGET_FRACT_CONV_TRUNC)
8019 bool overlap
8020 = (src.regno <=
8021 (offset ? dest.regno_msb - sign_bytes : dest.regno + zero_bytes - 1)
8022 && dest.regno - offset -1 >= dest.regno);
8023 unsigned s0 = dest.regno - offset -1;
8024 bool use_src = true;
8025 unsigned sn;
8026 unsigned copied_msb = src.regno_msb;
8027 bool have_carry = false;
8029 if (src.ibyte > dest.ibyte)
8030 copied_msb -= src.ibyte - dest.ibyte;
8032 for (sn = s0; sn <= copied_msb; sn++)
8033 if (!IN_RANGE (sn, dest.regno, dest.regno_msb)
8034 && !reg_unused_after (insn, all_regs_rtx[sn]))
8035 use_src = false;
8036 if (use_src && TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], s0))
8038 avr_asm_len ("tst %0" CR_TAB "brpl 0f",
8039 &all_regs_rtx[src.regno_msb], plen, 2);
8040 sn = src.regno;
8041 if (sn < s0)
8043 if (TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], sn))
8044 avr_asm_len ("cpi %0,1", &all_regs_rtx[sn], plen, 1);
8045 else
8046 avr_asm_len ("sec" CR_TAB
8047 "cpc %0,__zero_reg__",
8048 &all_regs_rtx[sn], plen, 2);
8049 have_carry = true;
8051 while (++sn < s0)
8052 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8054 avr_asm_len (have_carry ? "sbci %0,128" : "subi %0,129",
8055 &all_regs_rtx[s0], plen, 1);
8056 for (sn = src.regno + src.fbyte; sn <= copied_msb; sn++)
8057 avr_asm_len ("sbci %0,255", &all_regs_rtx[sn], plen, 1);
8058 avr_asm_len ("\n0:", NULL, plen, 0);
8059 frac_rounded = true;
8061 else if (use_src && overlap)
8063 avr_asm_len ("clr __tmp_reg__" CR_TAB
8064 "sbrc %1,0" CR_TAB
8065 "dec __tmp_reg__", xop, plen, 1);
8066 sn = src.regno;
8067 if (sn < s0)
8069 avr_asm_len ("add %0,__tmp_reg__", &all_regs_rtx[sn], plen, 1);
8070 have_carry = true;
8073 while (++sn < s0)
8074 avr_asm_len ("adc %0,__tmp_reg__", &all_regs_rtx[sn], plen, 1);
8076 if (have_carry)
8077 avr_asm_len ("clt" CR_TAB
8078 "bld __tmp_reg__,7" CR_TAB
8079 "adc %0,__tmp_reg__",
8080 &all_regs_rtx[s0], plen, 1);
8081 else
8082 avr_asm_len ("lsr __tmp_reg" CR_TAB
8083 "add %0,__tmp_reg__",
8084 &all_regs_rtx[s0], plen, 2);
8085 for (sn = src.regno + src.fbyte; sn <= copied_msb; sn++)
8086 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8087 frac_rounded = true;
8089 else if (overlap)
8091 bool use_src
8092 = (TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], s0)
8093 && (IN_RANGE (s0, dest.regno, dest.regno_msb)
8094 || reg_unused_after (insn, all_regs_rtx[s0])));
8095 xop[2] = all_regs_rtx[s0];
8096 unsigned sn = src.regno;
8097 if (!use_src || sn == s0)
8098 avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
8099 /* We need to consider to-be-discarded bits
8100 if the value is negative. */
8101 if (sn < s0)
8103 avr_asm_len ("tst %0" CR_TAB
8104 "brpl 0f",
8105 &all_regs_rtx[src.regno_msb], plen, 2);
8106 /* Test to-be-discarded bytes for any nozero bits.
8107 ??? Could use OR or SBIW to test two registers at once. */
8108 if (sn < s0)
8109 avr_asm_len ("cp %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8111 while (++sn < s0)
8112 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8113 /* Set bit 0 in __tmp_reg__ if any of the lower bits was set. */
8114 if (use_src)
8115 avr_asm_len ("breq 0f" CR_TAB
8116 "ori %2,1"
8117 "\n0:\t" "mov __tmp_reg__,%2",
8118 xop, plen, 3);
8119 else
8120 avr_asm_len ("breq 0f" CR_TAB
8121 "set" CR_TAB
8122 "bld __tmp_reg__,0\n0:",
8123 xop, plen, 3);
8125 lsb_in_tmp_reg = true;
8129 /* Step 1: Clear bytes at the low end and copy payload bits from source
8130 ====== to destination. */
8132 int step = offset < 0 ? 1 : -1;
8133 unsigned d0 = offset < 0 ? dest.regno : dest.regno_msb;
8135 // We cleared at least that number of registers.
8136 int clr_n = 0;
8138 for (; d0 >= dest.regno && d0 <= dest.regno_msb; d0 += step)
8140 // Next regno of destination is needed for MOVW
8141 unsigned d1 = d0 + step;
8143 // Current and next regno of source
8144 signed s0 = d0 - offset;
8145 signed s1 = s0 + step;
8147 // Must current resp. next regno be CLRed? This applies to the low
8148 // bytes of the destination that have no associated source bytes.
8149 bool clr0 = s0 < (signed) src.regno;
8150 bool clr1 = s1 < (signed) src.regno && d1 >= dest.regno;
8152 // First gather what code to emit (if any) and additional step to
8153 // apply if a MOVW is in use. xop[2] is destination rtx and xop[3]
8154 // is the source rtx for the current loop iteration.
8155 const char *code = NULL;
8156 int stepw = 0;
8158 if (clr0)
8160 if (AVR_HAVE_MOVW && clr1 && clrw)
8162 xop[2] = all_regs_rtx[d0 & ~1];
8163 xop[3] = clrw;
8164 code = "movw %2,%3";
8165 stepw = step;
8167 else
8169 xop[2] = all_regs_rtx[d0];
8170 code = "clr %2";
8172 if (++clr_n >= 2
8173 && !clrw
8174 && d0 % 2 == (step > 0))
8176 clrw = all_regs_rtx[d0 & ~1];
8180 else if (offset && s0 <= (signed) src.regno_msb)
8182 int movw = AVR_HAVE_MOVW && offset % 2 == 0
8183 && d0 % 2 == (offset > 0)
8184 && d1 <= dest.regno_msb && d1 >= dest.regno
8185 && s1 <= (signed) src.regno_msb && s1 >= (signed) src.regno;
8187 xop[2] = all_regs_rtx[d0 & ~movw];
8188 xop[3] = all_regs_rtx[s0 & ~movw];
8189 code = movw ? "movw %2,%3" : "mov %2,%3";
8190 stepw = step * movw;
8193 if (code)
8195 if (sign_extend && shift != ASHIFT && !sign_in_carry
8196 && (d0 == src.regno_msb || d0 + stepw == src.regno_msb))
8198 /* We are going to override the sign bit. If we sign-extend,
8199 store the sign in the Carry flag. This is not needed if
8200 the destination will be ASHIFT in the remainder because
8201 the ASHIFT will set Carry without extra instruction. */
8203 avr_asm_len ("lsl %0", &all_regs_rtx[src.regno_msb], plen, 1);
8204 sign_in_carry = true;
8207 unsigned src_msb = dest.regno_msb - sign_bytes - offset + 1;
8209 if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
8210 && src.ibyte > dest.ibyte
8211 && (d0 == src_msb || d0 + stepw == src_msb))
8213 /* We are going to override the MSB. If we shift right,
8214 store the MSB in the Carry flag. This is only needed if
8215 we don't sign-extend becaue with sign-extension the MSB
8216 (the sign) will be produced by the sign extension. */
8218 avr_asm_len ("lsr %0", &all_regs_rtx[src_msb], plen, 1);
8219 msb_in_carry = true;
8222 unsigned src_lsb = dest.regno - offset -1;
8224 if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry
8225 && !lsb_in_tmp_reg
8226 && (d0 == src_lsb || d0 + stepw == src_lsb))
8228 /* We are going to override the new LSB; store it into carry. */
8230 avr_asm_len ("lsl %0", &all_regs_rtx[src_lsb], plen, 1);
8231 code_ashift = "rol %0";
8232 lsb_in_carry = true;
8235 avr_asm_len (code, xop, plen, 1);
8236 d0 += stepw;
8240 /* Step 2: Shift destination left by 1 bit position. This might be needed
8241 ====== for signed input and unsigned output. */
8243 if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry)
8245 unsigned s0 = dest.regno - offset -1;
8247 /* n1169 4.1.4 says:
8248 "Conversions from a fixed-point to an integer type round toward zero."
8249 Hence, converting a fract type to integer only gives a non-zero result
8250 for -1. */
8251 if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
8252 && SCALAR_FRACT_MODE_P (GET_MODE (xop[1]))
8253 && !TARGET_FRACT_CONV_TRUNC)
8255 gcc_assert (s0 == src.regno_msb);
8256 /* Check if the input is -1. We do that by checking if negating
8257 the input causes an integer overflow. */
8258 unsigned sn = src.regno;
8259 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx[sn++], plen, 1);
8260 while (sn <= s0)
8261 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx[sn++], plen, 1);
8263 /* Overflow goes with set carry. Clear carry otherwise. */
8264 avr_asm_len ("brvs 0f" CR_TAB
8265 "clc\n0:", NULL, plen, 2);
8267 /* Likewise, when converting from accumulator types to integer, we
8268 need to round up negative values. */
8269 else if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
8270 && SCALAR_ACCUM_MODE_P (GET_MODE (xop[1]))
8271 && !TARGET_FRACT_CONV_TRUNC
8272 && !frac_rounded)
8274 bool have_carry = false;
8276 xop[2] = all_regs_rtx[s0];
8277 if (!lsb_in_tmp_reg && !MAY_CLOBBER (s0))
8278 avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
8279 avr_asm_len ("tst %0" CR_TAB "brpl 0f",
8280 &all_regs_rtx[src.regno_msb], plen, 2);
8281 if (!lsb_in_tmp_reg)
8283 unsigned sn = src.regno;
8284 if (sn < s0)
8286 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx[sn],
8287 plen, 1);
8288 have_carry = true;
8290 while (++sn < s0)
8291 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx[sn], plen, 1);
8292 lsb_in_tmp_reg = !MAY_CLOBBER (s0);
8294 /* Add in C and the rounding value 127. */
8295 /* If the destination msb is a sign byte, and in LD_REGS,
8296 grab it as a temporary. */
8297 if (sign_bytes
8298 && TEST_HARD_REG_BIT (reg_class_contents[LD_REGS],
8299 dest.regno_msb))
8301 xop[3] = all_regs_rtx[dest.regno_msb];
8302 avr_asm_len ("ldi %3,127", xop, plen, 1);
8303 avr_asm_len ((have_carry && lsb_in_tmp_reg ? "adc __tmp_reg__,%3"
8304 : have_carry ? "adc %2,%3"
8305 : lsb_in_tmp_reg ? "add __tmp_reg__,%3"
8306 : "add %2,%3"),
8307 xop, plen, 1);
8309 else
8311 /* Fall back to use __zero_reg__ as a temporary. */
8312 avr_asm_len ("dec __zero_reg__", NULL, plen, 1);
8313 if (have_carry)
8314 avr_asm_len ("clt" CR_TAB
8315 "bld __zero_reg__,7", NULL, plen, 2);
8316 else
8317 avr_asm_len ("lsr __zero_reg__", NULL, plen, 1);
8318 avr_asm_len (have_carry && lsb_in_tmp_reg
8319 ? "adc __tmp_reg__,__zero_reg__"
8320 : have_carry ? "adc %2,__zero_reg__"
8321 : lsb_in_tmp_reg ? "add __tmp_reg__,__zero_reg__"
8322 : "add %2,__zero_reg__",
8323 xop, plen, 1);
8324 avr_asm_len ("eor __zero_reg__,__zero_reg__", NULL, plen, 1);
8327 for (d0 = dest.regno + zero_bytes;
8328 d0 <= dest.regno_msb - sign_bytes; d0++)
8329 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx[d0], plen, 1);
8331 avr_asm_len (lsb_in_tmp_reg
8332 ? "\n0:\t" "lsl __tmp_reg__"
8333 : "\n0:\t" "lsl %2",
8334 xop, plen, 1);
8336 else if (MAY_CLOBBER (s0))
8337 avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
8338 else
8339 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
8340 "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
8342 code_ashift = "rol %0";
8343 lsb_in_carry = true;
8346 if (shift == ASHIFT)
8348 for (d0 = dest.regno + zero_bytes;
8349 d0 <= dest.regno_msb - sign_bytes; d0++)
8351 avr_asm_len (code_ashift, &all_regs_rtx[d0], plen, 1);
8352 code_ashift = "rol %0";
8355 lsb_in_carry = false;
8356 sign_in_carry = true;
8359 /* Step 4a: Store MSB in carry if we don't already have it or will produce
8360 ======= it in sign-extension below. */
8362 if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
8363 && src.ibyte > dest.ibyte)
8365 unsigned s0 = dest.regno_msb - sign_bytes - offset + 1;
8367 if (MAY_CLOBBER (s0))
8368 avr_asm_len ("lsr %0", &all_regs_rtx[s0], plen, 1);
8369 else
8370 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
8371 "lsr __tmp_reg__", &all_regs_rtx[s0], plen, 2);
8373 msb_in_carry = true;
8376 /* Step 3: Sign-extend or zero-extend the destination as needed.
8377 ====== */
8379 if (sign_extend && !sign_in_carry)
8381 unsigned s0 = src.regno_msb;
8383 if (MAY_CLOBBER (s0))
8384 avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
8385 else
8386 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
8387 "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
8389 sign_in_carry = true;
8392 gcc_assert (sign_in_carry + msb_in_carry + lsb_in_carry <= 1);
8394 unsigned copies = 0;
8395 rtx movw = sign_extend ? NULL_RTX : clrw;
8397 for (d0 = dest.regno_msb - sign_bytes + 1; d0 <= dest.regno_msb; d0++)
8399 if (AVR_HAVE_MOVW && movw
8400 && d0 % 2 == 0 && d0 + 1 <= dest.regno_msb)
8402 xop[2] = all_regs_rtx[d0];
8403 xop[3] = movw;
8404 avr_asm_len ("movw %2,%3", xop, plen, 1);
8405 d0++;
8407 else
8409 avr_asm_len (sign_extend ? "sbc %0,%0" : "clr %0",
8410 &all_regs_rtx[d0], plen, 1);
8412 if (++copies >= 2 && !movw && d0 % 2 == 1)
8413 movw = all_regs_rtx[d0-1];
8415 } /* for */
8418 /* Step 4: Right shift the destination. This might be needed for
8419 ====== conversions from unsigned to signed. */
8421 if (shift == ASHIFTRT)
8423 const char *code_ashiftrt = "lsr %0";
8425 if (sign_extend || msb_in_carry)
8426 code_ashiftrt = "ror %0";
8428 if (src.sbit && src.ibyte == dest.ibyte)
8429 code_ashiftrt = "asr %0";
8431 for (d0 = dest.regno_msb - sign_bytes;
8432 d0 >= dest.regno + zero_bytes - 1 && d0 >= dest.regno; d0--)
8434 avr_asm_len (code_ashiftrt, &all_regs_rtx[d0], plen, 1);
8435 code_ashiftrt = "ror %0";
8439 #undef MAY_CLOBBER
8441 return "";
8445 /* Output fixed-point rounding. XOP[0] = XOP[1] is the operand to round.
8446 XOP[2] is the rounding point, a CONST_INT. The function prints the
8447 instruction sequence if PLEN = NULL and computes the length in words
8448 of the sequence if PLEN != NULL. Most of this function deals with
8449 preparing operands for calls to `avr_out_plus' and `avr_out_bitop'. */
8451 const char*
8452 avr_out_round (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *xop, int *plen)
8454 machine_mode mode = GET_MODE (xop[0]);
8455 machine_mode imode = int_mode_for_mode (mode);
8456 // The smallest fractional bit not cleared by the rounding is 2^(-RP).
8457 int fbit = (int) GET_MODE_FBIT (mode);
8458 double_int i_add = double_int_zero.set_bit (fbit-1 - INTVAL (xop[2]));
8459 wide_int wi_add = wi::set_bit_in_zero (fbit-1 - INTVAL (xop[2]),
8460 GET_MODE_PRECISION (imode));
8461 // Lengths of PLUS and AND parts.
8462 int len_add = 0, *plen_add = plen ? &len_add : NULL;
8463 int len_and = 0, *plen_and = plen ? &len_and : NULL;
8465 // Add-Saturate 1/2 * 2^(-RP). Don't print the label "0:" when printing
8466 // the saturated addition so that we can emit the "rjmp 1f" before the
8467 // "0:" below.
8469 rtx xadd = const_fixed_from_double_int (i_add, mode);
8470 rtx xpattern, xsrc, op[4];
8472 xsrc = SIGNED_FIXED_POINT_MODE_P (mode)
8473 ? gen_rtx_SS_PLUS (mode, xop[1], xadd)
8474 : gen_rtx_US_PLUS (mode, xop[1], xadd);
8475 xpattern = gen_rtx_SET (xop[0], xsrc);
8477 op[0] = xop[0];
8478 op[1] = xop[1];
8479 op[2] = xadd;
8480 avr_out_plus (xpattern, op, plen_add, NULL, false /* Don't print "0:" */);
8482 avr_asm_len ("rjmp 1f" CR_TAB
8483 "0:", NULL, plen_add, 1);
8485 // Keep all bits from RP and higher: ... 2^(-RP)
8486 // Clear all bits from RP+1 and lower: 2^(-RP-1) ...
8487 // Rounding point ^^^^^^^
8488 // Added above ^^^^^^^^^
8489 rtx xreg = simplify_gen_subreg (imode, xop[0], mode, 0);
8490 rtx xmask = immed_wide_int_const (-wi_add - wi_add, imode);
8492 xpattern = gen_rtx_SET (xreg, gen_rtx_AND (imode, xreg, xmask));
8494 op[0] = xreg;
8495 op[1] = xreg;
8496 op[2] = xmask;
8497 op[3] = gen_rtx_SCRATCH (QImode);
8498 avr_out_bitop (xpattern, op, plen_and);
8499 avr_asm_len ("1:", NULL, plen, 0);
8501 if (plen)
8502 *plen = len_add + len_and;
8504 return "";
8508 /* Create RTL split patterns for byte sized rotate expressions. This
8509 produces a series of move instructions and considers overlap situations.
8510 Overlapping non-HImode operands need a scratch register. */
8512 bool
8513 avr_rotate_bytes (rtx operands[])
8515 int i, j;
8516 machine_mode mode = GET_MODE (operands[0]);
8517 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
8518 bool same_reg = rtx_equal_p (operands[0], operands[1]);
8519 int num = INTVAL (operands[2]);
8520 rtx scratch = operands[3];
8521 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
8522 Word move if no scratch is needed, otherwise use size of scratch. */
8523 machine_mode move_mode = QImode;
8524 int move_size, offset, size;
8526 if (num & 0xf)
8527 move_mode = QImode;
8528 else if ((mode == SImode && !same_reg) || !overlapped)
8529 move_mode = HImode;
8530 else
8531 move_mode = GET_MODE (scratch);
8533 /* Force DI rotate to use QI moves since other DI moves are currently split
8534 into QI moves so forward propagation works better. */
8535 if (mode == DImode)
8536 move_mode = QImode;
8537 /* Make scratch smaller if needed. */
8538 if (SCRATCH != GET_CODE (scratch)
8539 && HImode == GET_MODE (scratch)
8540 && QImode == move_mode)
8541 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
8543 move_size = GET_MODE_SIZE (move_mode);
8544 /* Number of bytes/words to rotate. */
8545 offset = (num >> 3) / move_size;
8546 /* Number of moves needed. */
8547 size = GET_MODE_SIZE (mode) / move_size;
8548 /* Himode byte swap is special case to avoid a scratch register. */
8549 if (mode == HImode && same_reg)
8551 /* HImode byte swap, using xor. This is as quick as using scratch. */
8552 rtx src, dst;
8553 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
8554 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
8555 if (!rtx_equal_p (dst, src))
8557 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
8558 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
8559 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
8562 else
8564 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
8565 /* Create linked list of moves to determine move order. */
8566 struct {
8567 rtx src, dst;
8568 int links;
8569 } move[MAX_SIZE + 8];
8570 int blocked, moves;
8572 gcc_assert (size <= MAX_SIZE);
8573 /* Generate list of subreg moves. */
8574 for (i = 0; i < size; i++)
8576 int from = i;
8577 int to = (from + offset) % size;
8578 move[i].src = simplify_gen_subreg (move_mode, operands[1],
8579 mode, from * move_size);
8580 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
8581 mode, to * move_size);
8582 move[i].links = -1;
8584 /* Mark dependence where a dst of one move is the src of another move.
8585 The first move is a conflict as it must wait until second is
8586 performed. We ignore moves to self - we catch this later. */
8587 if (overlapped)
8588 for (i = 0; i < size; i++)
8589 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
8590 for (j = 0; j < size; j++)
8591 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
8593 /* The dst of move i is the src of move j. */
8594 move[i].links = j;
8595 break;
8598 blocked = -1;
8599 moves = 0;
8600 /* Go through move list and perform non-conflicting moves. As each
8601 non-overlapping move is made, it may remove other conflicts
8602 so the process is repeated until no conflicts remain. */
8605 blocked = -1;
8606 moves = 0;
8607 /* Emit move where dst is not also a src or we have used that
8608 src already. */
8609 for (i = 0; i < size; i++)
8610 if (move[i].src != NULL_RTX)
8612 if (move[i].links == -1
8613 || move[move[i].links].src == NULL_RTX)
8615 moves++;
8616 /* Ignore NOP moves to self. */
8617 if (!rtx_equal_p (move[i].dst, move[i].src))
8618 emit_move_insn (move[i].dst, move[i].src);
8620 /* Remove conflict from list. */
8621 move[i].src = NULL_RTX;
8623 else
8624 blocked = i;
8627 /* Check for deadlock. This is when no moves occurred and we have
8628 at least one blocked move. */
8629 if (moves == 0 && blocked != -1)
8631 /* Need to use scratch register to break deadlock.
8632 Add move to put dst of blocked move into scratch.
8633 When this move occurs, it will break chain deadlock.
8634 The scratch register is substituted for real move. */
8636 gcc_assert (SCRATCH != GET_CODE (scratch));
8638 move[size].src = move[blocked].dst;
8639 move[size].dst = scratch;
8640 /* Scratch move is never blocked. */
8641 move[size].links = -1;
8642 /* Make sure we have valid link. */
8643 gcc_assert (move[blocked].links != -1);
8644 /* Replace src of blocking move with scratch reg. */
8645 move[move[blocked].links].src = scratch;
8646 /* Make dependent on scratch move occurring. */
8647 move[blocked].links = size;
8648 size=size+1;
8651 while (blocked != -1);
8653 return true;
8657 /* Worker function for `ADJUST_INSN_LENGTH'. */
8658 /* Modifies the length assigned to instruction INSN
8659 LEN is the initially computed length of the insn. */
8662 avr_adjust_insn_length (rtx_insn *insn, int len)
8664 rtx *op = recog_data.operand;
8665 enum attr_adjust_len adjust_len;
8667 /* Some complex insns don't need length adjustment and therefore
8668 the length need not/must not be adjusted for these insns.
8669 It is easier to state this in an insn attribute "adjust_len" than
8670 to clutter up code here... */
8672 if (!NONDEBUG_INSN_P (insn)
8673 || -1 == recog_memoized (insn))
8675 return len;
8678 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
8680 adjust_len = get_attr_adjust_len (insn);
8682 if (adjust_len == ADJUST_LEN_NO)
8684 /* Nothing to adjust: The length from attribute "length" is fine.
8685 This is the default. */
8687 return len;
8690 /* Extract insn's operands. */
8692 extract_constrain_insn_cached (insn);
8694 /* Dispatch to right function. */
8696 switch (adjust_len)
8698 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
8699 case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
8700 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
8702 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
8704 case ADJUST_LEN_PLUS: avr_out_plus (insn, op, &len); break;
8705 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
8707 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
8708 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
8709 case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
8710 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
8711 case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
8712 case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
8713 case ADJUST_LEN_LPM: avr_out_lpm (insn, op, &len); break;
8714 case ADJUST_LEN_SEXT: avr_out_sign_extend (insn, op, &len); break;
8716 case ADJUST_LEN_SFRACT: avr_out_fract (insn, op, true, &len); break;
8717 case ADJUST_LEN_UFRACT: avr_out_fract (insn, op, false, &len); break;
8718 case ADJUST_LEN_ROUND: avr_out_round (insn, op, &len); break;
8720 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
8721 case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
8722 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
8723 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
8724 case ADJUST_LEN_COMPARE64: avr_out_compare64 (insn, op, &len); break;
8726 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
8727 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
8728 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
8730 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
8731 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
8732 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
8734 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
8735 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
8736 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
8738 case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
8739 case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
8740 case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
8742 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
8744 case ADJUST_LEN_INSERT_BITS: avr_out_insert_bits (op, &len); break;
8746 default:
8747 gcc_unreachable();
8750 return len;
8753 /* Return nonzero if register REG dead after INSN. */
8756 reg_unused_after (rtx_insn *insn, rtx reg)
8758 return (dead_or_set_p (insn, reg)
8759 || (REG_P(reg) && _reg_unused_after (insn, reg)));
8762 /* Return nonzero if REG is not used after INSN.
8763 We assume REG is a reload reg, and therefore does
8764 not live past labels. It may live past calls or jumps though. */
8767 _reg_unused_after (rtx_insn *insn, rtx reg)
8769 enum rtx_code code;
8770 rtx set;
8772 /* If the reg is set by this instruction, then it is safe for our
8773 case. Disregard the case where this is a store to memory, since
8774 we are checking a register used in the store address. */
8775 set = single_set (insn);
8776 if (set && GET_CODE (SET_DEST (set)) != MEM
8777 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
8778 return 1;
8780 while ((insn = NEXT_INSN (insn)))
8782 rtx set;
8783 code = GET_CODE (insn);
8785 #if 0
8786 /* If this is a label that existed before reload, then the register
8787 if dead here. However, if this is a label added by reorg, then
8788 the register may still be live here. We can't tell the difference,
8789 so we just ignore labels completely. */
8790 if (code == CODE_LABEL)
8791 return 1;
8792 /* else */
8793 #endif
8795 if (!INSN_P (insn))
8796 continue;
8798 if (code == JUMP_INSN)
8799 return 0;
8801 /* If this is a sequence, we must handle them all at once.
8802 We could have for instance a call that sets the target register,
8803 and an insn in a delay slot that uses the register. In this case,
8804 we must return 0. */
8805 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
8807 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
8808 int i;
8809 int retval = 0;
8811 for (i = 0; i < seq->len (); i++)
8813 rtx_insn *this_insn = seq->insn (i);
8814 rtx set = single_set (this_insn);
8816 if (CALL_P (this_insn))
8817 code = CALL_INSN;
8818 else if (JUMP_P (this_insn))
8820 if (INSN_ANNULLED_BRANCH_P (this_insn))
8821 return 0;
8822 code = JUMP_INSN;
8825 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
8826 return 0;
8827 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
8829 if (GET_CODE (SET_DEST (set)) != MEM)
8830 retval = 1;
8831 else
8832 return 0;
8834 if (set == 0
8835 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
8836 return 0;
8838 if (retval == 1)
8839 return 1;
8840 else if (code == JUMP_INSN)
8841 return 0;
8844 if (code == CALL_INSN)
8846 rtx tem;
8847 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
8848 if (GET_CODE (XEXP (tem, 0)) == USE
8849 && REG_P (XEXP (XEXP (tem, 0), 0))
8850 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
8851 return 0;
8852 if (call_used_regs[REGNO (reg)])
8853 return 1;
8856 set = single_set (insn);
8858 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
8859 return 0;
8860 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
8861 return GET_CODE (SET_DEST (set)) != MEM;
8862 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
8863 return 0;
8865 return 1;
8869 /* Implement `TARGET_ASM_INTEGER'. */
8870 /* Target hook for assembling integer objects. The AVR version needs
8871 special handling for references to certain labels. */
8873 static bool
8874 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
8876 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
8877 && text_segment_operand (x, VOIDmode))
8879 fputs ("\t.word\tgs(", asm_out_file);
8880 output_addr_const (asm_out_file, x);
8881 fputs (")\n", asm_out_file);
8883 return true;
8885 else if (GET_MODE (x) == PSImode)
8887 /* This needs binutils 2.23+, see PR binutils/13503 */
8889 fputs ("\t.byte\tlo8(", asm_out_file);
8890 output_addr_const (asm_out_file, x);
8891 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
8893 fputs ("\t.byte\thi8(", asm_out_file);
8894 output_addr_const (asm_out_file, x);
8895 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
8897 fputs ("\t.byte\thh8(", asm_out_file);
8898 output_addr_const (asm_out_file, x);
8899 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
8901 return true;
8903 else if (CONST_FIXED_P (x))
8905 unsigned n;
8907 /* varasm fails to handle big fixed modes that don't fit in hwi. */
8909 for (n = 0; n < size; n++)
8911 rtx xn = simplify_gen_subreg (QImode, x, GET_MODE (x), n);
8912 default_assemble_integer (xn, 1, aligned_p);
8915 return true;
8918 return default_assemble_integer (x, size, aligned_p);
8922 /* Implement `TARGET_CLASS_LIKELY_SPILLED_P'. */
8923 /* Return value is nonzero if pseudos that have been
8924 assigned to registers of class CLASS would likely be spilled
8925 because registers of CLASS are needed for spill registers. */
8927 static bool
8928 avr_class_likely_spilled_p (reg_class_t c)
8930 return (c != ALL_REGS &&
8931 (AVR_TINY ? 1 : c != ADDW_REGS));
8935 /* Valid attributes:
8936 progmem - Put data to program memory.
8937 signal - Make a function to be hardware interrupt.
8938 After function prologue interrupts remain disabled.
8939 interrupt - Make a function to be hardware interrupt. Before function
8940 prologue interrupts are enabled by means of SEI.
8941 naked - Don't generate function prologue/epilogue and RET
8942 instruction. */
8944 /* Handle a "progmem" attribute; arguments as in
8945 struct attribute_spec.handler. */
8947 static tree
8948 avr_handle_progmem_attribute (tree *node, tree name,
8949 tree args ATTRIBUTE_UNUSED,
8950 int flags ATTRIBUTE_UNUSED,
8951 bool *no_add_attrs)
8953 if (DECL_P (*node))
8955 if (TREE_CODE (*node) == TYPE_DECL)
8957 /* This is really a decl attribute, not a type attribute,
8958 but try to handle it for GCC 3.0 backwards compatibility. */
8960 tree type = TREE_TYPE (*node);
8961 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
8962 tree newtype = build_type_attribute_variant (type, attr);
8964 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
8965 TREE_TYPE (*node) = newtype;
8966 *no_add_attrs = true;
8968 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
8970 *no_add_attrs = false;
8972 else
8974 warning (OPT_Wattributes, "%qE attribute ignored",
8975 name);
8976 *no_add_attrs = true;
8980 return NULL_TREE;
8983 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
8984 struct attribute_spec.handler. */
8986 static tree
8987 avr_handle_fndecl_attribute (tree *node, tree name,
8988 tree args ATTRIBUTE_UNUSED,
8989 int flags ATTRIBUTE_UNUSED,
8990 bool *no_add_attrs)
8992 if (TREE_CODE (*node) != FUNCTION_DECL)
8994 warning (OPT_Wattributes, "%qE attribute only applies to functions",
8995 name);
8996 *no_add_attrs = true;
8999 return NULL_TREE;
9002 static tree
9003 avr_handle_fntype_attribute (tree *node, tree name,
9004 tree args ATTRIBUTE_UNUSED,
9005 int flags ATTRIBUTE_UNUSED,
9006 bool *no_add_attrs)
9008 if (TREE_CODE (*node) != FUNCTION_TYPE)
9010 warning (OPT_Wattributes, "%qE attribute only applies to functions",
9011 name);
9012 *no_add_attrs = true;
9015 return NULL_TREE;
9018 static tree
9019 avr_handle_addr_attribute (tree *node, tree name, tree args,
9020 int flags ATTRIBUTE_UNUSED, bool *no_add)
9022 bool io_p = (strncmp (IDENTIFIER_POINTER (name), "io", 2) == 0);
9023 location_t loc = DECL_SOURCE_LOCATION (*node);
9025 if (TREE_CODE (*node) != VAR_DECL)
9027 warning_at (loc, 0, "%qE attribute only applies to variables", name);
9028 *no_add = true;
9031 if (args != NULL_TREE)
9033 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
9034 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
9035 tree arg = TREE_VALUE (args);
9036 if (TREE_CODE (arg) != INTEGER_CST)
9038 warning (0, "%qE attribute allows only an integer constant argument",
9039 name);
9040 *no_add = true;
9042 else if (io_p
9043 && (!tree_fits_shwi_p (arg)
9044 || !(strcmp (IDENTIFIER_POINTER (name), "io_low") == 0
9045 ? low_io_address_operand : io_address_operand)
9046 (GEN_INT (TREE_INT_CST_LOW (arg)), QImode)))
9048 warning_at (loc, 0, "%qE attribute address out of range", name);
9049 *no_add = true;
9051 else
9053 tree attribs = DECL_ATTRIBUTES (*node);
9054 const char *names[] = { "io", "io_low", "address", NULL } ;
9055 for (const char **p = names; *p; p++)
9057 tree other = lookup_attribute (*p, attribs);
9058 if (other && TREE_VALUE (other))
9060 warning_at (loc, 0,
9061 "both %s and %qE attribute provide address",
9062 *p, name);
9063 *no_add = true;
9064 break;
9070 if (*no_add == false && io_p && !TREE_THIS_VOLATILE (*node))
9071 warning_at (loc, 0, "%qE attribute on non-volatile variable", name);
9073 return NULL_TREE;
9077 avr_eval_addr_attrib (rtx x)
9079 if (GET_CODE (x) == SYMBOL_REF
9080 && (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_ADDRESS))
9082 tree decl = SYMBOL_REF_DECL (x);
9083 tree attr = NULL_TREE;
9085 if (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_IO)
9087 attr = lookup_attribute ("io", DECL_ATTRIBUTES (decl));
9088 gcc_assert (attr);
9090 if (!attr || !TREE_VALUE (attr))
9091 attr = lookup_attribute ("address", DECL_ATTRIBUTES (decl));
9092 gcc_assert (attr && TREE_VALUE (attr) && TREE_VALUE (TREE_VALUE (attr)));
9093 return GEN_INT (TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr))));
9095 return x;
9099 /* AVR attributes. */
9100 static const struct attribute_spec
9101 avr_attribute_table[] =
9103 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
9104 affects_type_identity } */
9105 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
9106 false },
9107 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
9108 false },
9109 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
9110 false },
9111 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
9112 false },
9113 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
9114 false },
9115 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
9116 false },
9117 { "io", 0, 1, false, false, false, avr_handle_addr_attribute,
9118 false },
9119 { "io_low", 0, 1, false, false, false, avr_handle_addr_attribute,
9120 false },
9121 { "address", 1, 1, false, false, false, avr_handle_addr_attribute,
9122 false },
9123 { NULL, 0, 0, false, false, false, NULL, false }
9127 /* Look if DECL shall be placed in program memory space by
9128 means of attribute `progmem' or some address-space qualifier.
9129 Return non-zero if DECL is data that must end up in Flash and
9130 zero if the data lives in RAM (.bss, .data, .rodata, ...).
9132 Return 2 if DECL is located in 24-bit flash address-space
9133 Return 1 if DECL is located in 16-bit flash address-space
9134 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
9135 Return 0 otherwise */
9138 avr_progmem_p (tree decl, tree attributes)
9140 tree a;
9142 if (TREE_CODE (decl) != VAR_DECL)
9143 return 0;
9145 if (avr_decl_memx_p (decl))
9146 return 2;
9148 if (avr_decl_flash_p (decl))
9149 return 1;
9151 if (NULL_TREE
9152 != lookup_attribute ("progmem", attributes))
9153 return -1;
9155 a = decl;
9158 a = TREE_TYPE(a);
9159 while (TREE_CODE (a) == ARRAY_TYPE);
9161 if (a == error_mark_node)
9162 return 0;
9164 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
9165 return -1;
9167 return 0;
9171 /* Scan type TYP for pointer references to address space ASn.
9172 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
9173 the AS are also declared to be CONST.
9174 Otherwise, return the respective address space, i.e. a value != 0. */
9176 static addr_space_t
9177 avr_nonconst_pointer_addrspace (tree typ)
9179 while (ARRAY_TYPE == TREE_CODE (typ))
9180 typ = TREE_TYPE (typ);
9182 if (POINTER_TYPE_P (typ))
9184 addr_space_t as;
9185 tree target = TREE_TYPE (typ);
9187 /* Pointer to function: Test the function's return type. */
9189 if (FUNCTION_TYPE == TREE_CODE (target))
9190 return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
9192 /* "Ordinary" pointers... */
9194 while (TREE_CODE (target) == ARRAY_TYPE)
9195 target = TREE_TYPE (target);
9197 /* Pointers to non-generic address space must be const.
9198 Refuse address spaces outside the device's flash. */
9200 as = TYPE_ADDR_SPACE (target);
9202 if (!ADDR_SPACE_GENERIC_P (as)
9203 && (!TYPE_READONLY (target)
9204 || avr_addrspace[as].segment >= avr_n_flash
9205 /* Also refuse __memx address space if we can't support it. */
9206 || (!AVR_HAVE_LPM && avr_addrspace[as].pointer_size > 2)))
9208 return as;
9211 /* Scan pointer's target type. */
9213 return avr_nonconst_pointer_addrspace (target);
9216 return ADDR_SPACE_GENERIC;
9220 /* Sanity check NODE so that all pointers targeting non-generic address spaces
9221 go along with CONST qualifier. Writing to these address spaces should
9222 be detected and complained about as early as possible. */
9224 static bool
9225 avr_pgm_check_var_decl (tree node)
9227 const char *reason = NULL;
9229 addr_space_t as = ADDR_SPACE_GENERIC;
9231 gcc_assert (as == 0);
9233 if (avr_log.progmem)
9234 avr_edump ("%?: %t\n", node);
9236 switch (TREE_CODE (node))
9238 default:
9239 break;
9241 case VAR_DECL:
9242 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
9243 reason = "variable";
9244 break;
9246 case PARM_DECL:
9247 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
9248 reason = "function parameter";
9249 break;
9251 case FIELD_DECL:
9252 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
9253 reason = "structure field";
9254 break;
9256 case FUNCTION_DECL:
9257 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
9259 reason = "return type of function";
9260 break;
9262 case POINTER_TYPE:
9263 if (as = avr_nonconst_pointer_addrspace (node), as)
9264 reason = "pointer";
9265 break;
9268 if (reason)
9270 if (avr_addrspace[as].segment >= avr_n_flash)
9272 if (TYPE_P (node))
9273 error ("%qT uses address space %qs beyond flash of %d KiB",
9274 node, avr_addrspace[as].name, avr_n_flash);
9275 else
9276 error ("%s %q+D uses address space %qs beyond flash of %d KiB",
9277 reason, node, avr_addrspace[as].name, avr_n_flash);
9279 else
9281 if (TYPE_P (node))
9282 error ("pointer targeting address space %qs must be const in %qT",
9283 avr_addrspace[as].name, node);
9284 else
9285 error ("pointer targeting address space %qs must be const"
9286 " in %s %q+D",
9287 avr_addrspace[as].name, reason, node);
9291 return reason == NULL;
9295 /* Add the section attribute if the variable is in progmem. */
9297 static void
9298 avr_insert_attributes (tree node, tree *attributes)
9300 avr_pgm_check_var_decl (node);
9302 if (TREE_CODE (node) == VAR_DECL
9303 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
9304 && avr_progmem_p (node, *attributes))
9306 addr_space_t as;
9307 tree node0 = node;
9309 /* For C++, we have to peel arrays in order to get correct
9310 determination of readonlyness. */
9313 node0 = TREE_TYPE (node0);
9314 while (TREE_CODE (node0) == ARRAY_TYPE);
9316 if (error_mark_node == node0)
9317 return;
9319 as = TYPE_ADDR_SPACE (TREE_TYPE (node));
9321 if (avr_addrspace[as].segment >= avr_n_flash)
9323 error ("variable %q+D located in address space %qs beyond flash "
9324 "of %d KiB", node, avr_addrspace[as].name, avr_n_flash);
9326 else if (!AVR_HAVE_LPM && avr_addrspace[as].pointer_size > 2)
9328 error ("variable %q+D located in address space %qs"
9329 " which is not supported for architecture %qs",
9330 node, avr_addrspace[as].name, avr_arch->name);
9333 if (!TYPE_READONLY (node0)
9334 && !TREE_READONLY (node))
9336 const char *reason = "__attribute__((progmem))";
9338 if (!ADDR_SPACE_GENERIC_P (as))
9339 reason = avr_addrspace[as].name;
9341 if (avr_log.progmem)
9342 avr_edump ("\n%?: %t\n%t\n", node, node0);
9344 error ("variable %q+D must be const in order to be put into"
9345 " read-only section by means of %qs", node, reason);
9351 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
9352 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
9353 /* Track need of __do_clear_bss. */
9355 void
9356 avr_asm_output_aligned_decl_common (FILE * stream,
9357 tree decl,
9358 const char *name,
9359 unsigned HOST_WIDE_INT size,
9360 unsigned int align, bool local_p)
9362 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
9363 rtx symbol;
9365 if (mem != NULL_RTX && MEM_P (mem)
9366 && GET_CODE ((symbol = XEXP (mem, 0))) == SYMBOL_REF
9367 && (SYMBOL_REF_FLAGS (symbol) & (SYMBOL_FLAG_IO | SYMBOL_FLAG_ADDRESS)))
9370 if (!local_p)
9372 fprintf (stream, "\t.globl\t");
9373 assemble_name (stream, name);
9374 fprintf (stream, "\n");
9376 if (SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_ADDRESS)
9378 assemble_name (stream, name);
9379 fprintf (stream, " = %ld\n",
9380 (long) INTVAL (avr_eval_addr_attrib (symbol)));
9382 else if (local_p)
9383 error_at (DECL_SOURCE_LOCATION (decl),
9384 "static IO declaration for %q+D needs an address", decl);
9385 return;
9388 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
9389 There is no need to trigger __do_clear_bss code for them. */
9391 if (!STR_PREFIX_P (name, "__gnu_lto"))
9392 avr_need_clear_bss_p = true;
9394 if (local_p)
9395 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
9396 else
9397 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
9400 void
9401 avr_asm_asm_output_aligned_bss (FILE *file, tree decl, const char *name,
9402 unsigned HOST_WIDE_INT size, int align,
9403 void (*default_func)
9404 (FILE *, tree, const char *,
9405 unsigned HOST_WIDE_INT, int))
9407 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
9408 rtx symbol;
9410 if (mem != NULL_RTX && MEM_P (mem)
9411 && GET_CODE ((symbol = XEXP (mem, 0))) == SYMBOL_REF
9412 && (SYMBOL_REF_FLAGS (symbol) & (SYMBOL_FLAG_IO | SYMBOL_FLAG_ADDRESS)))
9414 if (!(SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_ADDRESS))
9415 error_at (DECL_SOURCE_LOCATION (decl),
9416 "IO definition for %q+D needs an address", decl);
9417 avr_asm_output_aligned_decl_common (file, decl, name, size, align, false);
9419 else
9420 default_func (file, decl, name, size, align);
9424 /* Unnamed section callback for data_section
9425 to track need of __do_copy_data. */
9427 static void
9428 avr_output_data_section_asm_op (const void *data)
9430 avr_need_copy_data_p = true;
9432 /* Dispatch to default. */
9433 output_section_asm_op (data);
9437 /* Unnamed section callback for bss_section
9438 to track need of __do_clear_bss. */
9440 static void
9441 avr_output_bss_section_asm_op (const void *data)
9443 avr_need_clear_bss_p = true;
9445 /* Dispatch to default. */
9446 output_section_asm_op (data);
9450 /* Unnamed section callback for progmem*.data sections. */
9452 static void
9453 avr_output_progmem_section_asm_op (const void *data)
9455 fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
9456 (const char*) data);
9460 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
9462 static void
9463 avr_asm_init_sections (void)
9465 /* Set up a section for jump tables. Alignment is handled by
9466 ASM_OUTPUT_BEFORE_CASE_LABEL. */
9468 if (AVR_HAVE_JMP_CALL)
9470 progmem_swtable_section
9471 = get_unnamed_section (0, output_section_asm_op,
9472 "\t.section\t.progmem.gcc_sw_table"
9473 ",\"a\",@progbits");
9475 else
9477 progmem_swtable_section
9478 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
9479 "\t.section\t.progmem.gcc_sw_table"
9480 ",\"ax\",@progbits");
9483 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
9484 resp. `avr_need_copy_data_p'. */
9486 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
9487 data_section->unnamed.callback = avr_output_data_section_asm_op;
9488 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
9492 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
9494 static section*
9495 avr_asm_function_rodata_section (tree decl)
9497 /* If a function is unused and optimized out by -ffunction-sections
9498 and --gc-sections, ensure that the same will happen for its jump
9499 tables by putting them into individual sections. */
9501 unsigned int flags;
9502 section * frodata;
9504 /* Get the frodata section from the default function in varasm.c
9505 but treat function-associated data-like jump tables as code
9506 rather than as user defined data. AVR has no constant pools. */
9508 int fdata = flag_data_sections;
9510 flag_data_sections = flag_function_sections;
9511 frodata = default_function_rodata_section (decl);
9512 flag_data_sections = fdata;
9513 flags = frodata->common.flags;
9516 if (frodata != readonly_data_section
9517 && flags & SECTION_NAMED)
9519 /* Adjust section flags and replace section name prefix. */
9521 unsigned int i;
9523 static const char* const prefix[] =
9525 ".rodata", ".progmem.gcc_sw_table",
9526 ".gnu.linkonce.r.", ".gnu.linkonce.t."
9529 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
9531 const char * old_prefix = prefix[i];
9532 const char * new_prefix = prefix[i+1];
9533 const char * name = frodata->named.name;
9535 if (STR_PREFIX_P (name, old_prefix))
9537 const char *rname = ACONCAT ((new_prefix,
9538 name + strlen (old_prefix), NULL));
9539 flags &= ~SECTION_CODE;
9540 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
9542 return get_section (rname, flags, frodata->named.decl);
9547 return progmem_swtable_section;
9551 /* Implement `TARGET_ASM_NAMED_SECTION'. */
9552 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
9554 static void
9555 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
9557 if (flags & AVR_SECTION_PROGMEM)
9559 addr_space_t as = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP;
9560 const char *old_prefix = ".rodata";
9561 const char *new_prefix = avr_addrspace[as].section_name;
9563 if (STR_PREFIX_P (name, old_prefix))
9565 const char *sname = ACONCAT ((new_prefix,
9566 name + strlen (old_prefix), NULL));
9567 default_elf_asm_named_section (sname, flags, decl);
9568 return;
9571 default_elf_asm_named_section (new_prefix, flags, decl);
9572 return;
9575 if (!avr_need_copy_data_p)
9576 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
9577 || STR_PREFIX_P (name, ".rodata")
9578 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
9580 if (!avr_need_clear_bss_p)
9581 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
9583 default_elf_asm_named_section (name, flags, decl);
9587 /* Implement `TARGET_SECTION_TYPE_FLAGS'. */
9589 static unsigned int
9590 avr_section_type_flags (tree decl, const char *name, int reloc)
9592 unsigned int flags = default_section_type_flags (decl, name, reloc);
9594 if (STR_PREFIX_P (name, ".noinit"))
9596 if (decl && TREE_CODE (decl) == VAR_DECL
9597 && DECL_INITIAL (decl) == NULL_TREE)
9598 flags |= SECTION_BSS; /* @nobits */
9599 else
9600 warning (0, "only uninitialized variables can be placed in the "
9601 ".noinit section");
9604 if (decl && DECL_P (decl)
9605 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
9607 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
9609 /* Attribute progmem puts data in generic address space.
9610 Set section flags as if it was in __flash to get the right
9611 section prefix in the remainder. */
9613 if (ADDR_SPACE_GENERIC_P (as))
9614 as = ADDR_SPACE_FLASH;
9616 flags |= as * SECTION_MACH_DEP;
9617 flags &= ~SECTION_WRITE;
9618 flags &= ~SECTION_BSS;
9621 return flags;
9625 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
9627 static void
9628 avr_encode_section_info (tree decl, rtx rtl, int new_decl_p)
9630 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
9631 readily available, see PR34734. So we postpone the warning
9632 about uninitialized data in program memory section until here. */
9634 if (new_decl_p
9635 && decl && DECL_P (decl)
9636 && NULL_TREE == DECL_INITIAL (decl)
9637 && !DECL_EXTERNAL (decl)
9638 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
9640 warning (OPT_Wuninitialized,
9641 "uninitialized variable %q+D put into "
9642 "program memory area", decl);
9645 default_encode_section_info (decl, rtl, new_decl_p);
9647 if (decl && DECL_P (decl)
9648 && TREE_CODE (decl) != FUNCTION_DECL
9649 && MEM_P (rtl)
9650 && SYMBOL_REF == GET_CODE (XEXP (rtl, 0)))
9652 rtx sym = XEXP (rtl, 0);
9653 tree type = TREE_TYPE (decl);
9654 tree attr = DECL_ATTRIBUTES (decl);
9655 if (type == error_mark_node)
9656 return;
9658 addr_space_t as = TYPE_ADDR_SPACE (type);
9660 /* PSTR strings are in generic space but located in flash:
9661 patch address space. */
9663 if (-1 == avr_progmem_p (decl, attr))
9664 as = ADDR_SPACE_FLASH;
9666 AVR_SYMBOL_SET_ADDR_SPACE (sym, as);
9668 tree io_low_attr = lookup_attribute ("io_low", attr);
9669 tree io_attr = lookup_attribute ("io", attr);
9670 tree addr_attr;
9671 if (io_low_attr
9672 && TREE_VALUE (io_low_attr) && TREE_VALUE (TREE_VALUE (io_low_attr)))
9673 addr_attr = io_attr;
9674 else if (io_attr
9675 && TREE_VALUE (io_attr) && TREE_VALUE (TREE_VALUE (io_attr)))
9676 addr_attr = io_attr;
9677 else
9678 addr_attr = lookup_attribute ("address", attr);
9679 if (io_low_attr
9680 || (io_attr && addr_attr
9681 && low_io_address_operand
9682 (GEN_INT (TREE_INT_CST_LOW
9683 (TREE_VALUE (TREE_VALUE (addr_attr)))), QImode)))
9684 SYMBOL_REF_FLAGS (sym) |= SYMBOL_FLAG_IO_LOW;
9685 if (io_attr || io_low_attr)
9686 SYMBOL_REF_FLAGS (sym) |= SYMBOL_FLAG_IO;
9687 /* If we have an (io) address attribute specification, but the variable
9688 is external, treat the address as only a tentative definition
9689 to be used to determine if an io port is in the lower range, but
9690 don't use the exact value for constant propagation. */
9691 if (addr_attr && !DECL_EXTERNAL (decl))
9692 SYMBOL_REF_FLAGS (sym) |= SYMBOL_FLAG_ADDRESS;
9697 /* Implement `TARGET_ASM_SELECT_SECTION' */
9699 static section *
9700 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
9702 section * sect = default_elf_select_section (decl, reloc, align);
9704 if (decl && DECL_P (decl)
9705 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
9707 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
9709 /* __progmem__ goes in generic space but shall be allocated to
9710 .progmem.data */
9712 if (ADDR_SPACE_GENERIC_P (as))
9713 as = ADDR_SPACE_FLASH;
9715 if (sect->common.flags & SECTION_NAMED)
9717 const char * name = sect->named.name;
9718 const char * old_prefix = ".rodata";
9719 const char * new_prefix = avr_addrspace[as].section_name;
9721 if (STR_PREFIX_P (name, old_prefix))
9723 const char *sname = ACONCAT ((new_prefix,
9724 name + strlen (old_prefix), NULL));
9725 return get_section (sname, sect->common.flags, sect->named.decl);
9729 if (!progmem_section[as])
9731 progmem_section[as]
9732 = get_unnamed_section (0, avr_output_progmem_section_asm_op,
9733 avr_addrspace[as].section_name);
9736 return progmem_section[as];
9739 return sect;
9742 /* Implement `TARGET_ASM_FILE_START'. */
9743 /* Outputs some text at the start of each assembler file. */
9745 static void
9746 avr_file_start (void)
9748 int sfr_offset = avr_arch->sfr_offset;
9750 if (avr_arch->asm_only)
9751 error ("architecture %qs supported for assembler only", avr_mmcu);
9753 default_file_start ();
9755 /* Print I/O addresses of some SFRs used with IN and OUT. */
9757 if (AVR_HAVE_SPH)
9758 fprintf (asm_out_file, "__SP_H__ = 0x%02x\n", avr_addr.sp_h - sfr_offset);
9760 fprintf (asm_out_file, "__SP_L__ = 0x%02x\n", avr_addr.sp_l - sfr_offset);
9761 fprintf (asm_out_file, "__SREG__ = 0x%02x\n", avr_addr.sreg - sfr_offset);
9762 if (AVR_HAVE_RAMPZ)
9763 fprintf (asm_out_file, "__RAMPZ__ = 0x%02x\n", avr_addr.rampz - sfr_offset);
9764 if (AVR_HAVE_RAMPY)
9765 fprintf (asm_out_file, "__RAMPY__ = 0x%02x\n", avr_addr.rampy - sfr_offset);
9766 if (AVR_HAVE_RAMPX)
9767 fprintf (asm_out_file, "__RAMPX__ = 0x%02x\n", avr_addr.rampx - sfr_offset);
9768 if (AVR_HAVE_RAMPD)
9769 fprintf (asm_out_file, "__RAMPD__ = 0x%02x\n", avr_addr.rampd - sfr_offset);
9770 if (AVR_XMEGA || AVR_TINY)
9771 fprintf (asm_out_file, "__CCP__ = 0x%02x\n", avr_addr.ccp - sfr_offset);
9772 fprintf (asm_out_file, "__tmp_reg__ = %d\n", AVR_TMP_REGNO);
9773 fprintf (asm_out_file, "__zero_reg__ = %d\n", AVR_ZERO_REGNO);
9777 /* Implement `TARGET_ASM_FILE_END'. */
9778 /* Outputs to the stdio stream FILE some
9779 appropriate text to go at the end of an assembler file. */
9781 static void
9782 avr_file_end (void)
9784 /* Output these only if there is anything in the
9785 .data* / .rodata* / .gnu.linkonce.* resp. .bss* or COMMON
9786 input section(s) - some code size can be saved by not
9787 linking in the initialization code from libgcc if resp.
9788 sections are empty, see PR18145. */
9790 if (avr_need_copy_data_p)
9791 fputs (".global __do_copy_data\n", asm_out_file);
9793 if (avr_need_clear_bss_p)
9794 fputs (".global __do_clear_bss\n", asm_out_file);
9798 /* Worker function for `ADJUST_REG_ALLOC_ORDER'. */
9799 /* Choose the order in which to allocate hard registers for
9800 pseudo-registers local to a basic block.
9802 Store the desired register order in the array `reg_alloc_order'.
9803 Element 0 should be the register to allocate first; element 1, the
9804 next register; and so on. */
9806 void
9807 avr_adjust_reg_alloc_order (void)
9809 unsigned int i;
9810 static const int order_0[] =
9812 24, 25,
9813 18, 19, 20, 21, 22, 23,
9814 30, 31,
9815 26, 27, 28, 29,
9816 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
9817 0, 1,
9818 32, 33, 34, 35
9820 static const int tiny_order_0[] = {
9821 20, 21,
9822 22, 23,
9823 24, 25,
9824 30, 31,
9825 26, 27,
9826 28, 29,
9827 19, 18,
9828 16, 17,
9829 32, 33, 34, 35,
9830 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
9832 static const int order_1[] =
9834 18, 19, 20, 21, 22, 23, 24, 25,
9835 30, 31,
9836 26, 27, 28, 29,
9837 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
9838 0, 1,
9839 32, 33, 34, 35
9841 static const int tiny_order_1[] = {
9842 22, 23,
9843 24, 25,
9844 30, 31,
9845 26, 27,
9846 28, 29,
9847 21, 20, 19, 18,
9848 16, 17,
9849 32, 33, 34, 35,
9850 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
9852 static const int order_2[] =
9854 25, 24, 23, 22, 21, 20, 19, 18,
9855 30, 31,
9856 26, 27, 28, 29,
9857 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
9858 1, 0,
9859 32, 33, 34, 35
9862 /* Select specific register allocation order.
9863 Tiny Core (ATtiny4/5/9/10/20/40) devices have only 16 registers,
9864 so different allocation order should be used. */
9866 const int *order = (TARGET_ORDER_1 ? (AVR_TINY ? tiny_order_1 : order_1)
9867 : TARGET_ORDER_2 ? (AVR_TINY ? tiny_order_0 : order_2)
9868 : (AVR_TINY ? tiny_order_0 : order_0));
9870 for (i = 0; i < ARRAY_SIZE (order_0); ++i)
9871 reg_alloc_order[i] = order[i];
9875 /* Implement `TARGET_REGISTER_MOVE_COST' */
9877 static int
9878 avr_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
9879 reg_class_t from, reg_class_t to)
9881 return (from == STACK_REG ? 6
9882 : to == STACK_REG ? 12
9883 : 2);
9887 /* Implement `TARGET_MEMORY_MOVE_COST' */
9889 static int
9890 avr_memory_move_cost (machine_mode mode,
9891 reg_class_t rclass ATTRIBUTE_UNUSED,
9892 bool in ATTRIBUTE_UNUSED)
9894 return (mode == QImode ? 2
9895 : mode == HImode ? 4
9896 : mode == SImode ? 8
9897 : mode == SFmode ? 8
9898 : 16);
9902 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
9903 cost of an RTX operand given its context. X is the rtx of the
9904 operand, MODE is its mode, and OUTER is the rtx_code of this
9905 operand's parent operator. */
9907 static int
9908 avr_operand_rtx_cost (rtx x, machine_mode mode, enum rtx_code outer,
9909 int opno, bool speed)
9911 enum rtx_code code = GET_CODE (x);
9912 int total;
9914 switch (code)
9916 case REG:
9917 case SUBREG:
9918 return 0;
9920 case CONST_INT:
9921 case CONST_FIXED:
9922 case CONST_DOUBLE:
9923 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
9925 default:
9926 break;
9929 total = 0;
9930 avr_rtx_costs (x, code, outer, opno, &total, speed);
9931 return total;
9934 /* Worker function for AVR backend's rtx_cost function.
9935 X is rtx expression whose cost is to be calculated.
9936 Return true if the complete cost has been computed.
9937 Return false if subexpressions should be scanned.
9938 In either case, *TOTAL contains the cost result. */
9940 static bool
9941 avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
9942 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
9944 enum rtx_code code = (enum rtx_code) codearg;
9945 machine_mode mode = GET_MODE (x);
9946 HOST_WIDE_INT val;
9948 switch (code)
9950 case CONST_INT:
9951 case CONST_FIXED:
9952 case CONST_DOUBLE:
9953 case SYMBOL_REF:
9954 case CONST:
9955 case LABEL_REF:
9956 /* Immediate constants are as cheap as registers. */
9957 *total = 0;
9958 return true;
9960 case MEM:
9961 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
9962 return true;
9964 case NEG:
9965 switch (mode)
9967 case QImode:
9968 case SFmode:
9969 *total = COSTS_N_INSNS (1);
9970 break;
9972 case HImode:
9973 case PSImode:
9974 case SImode:
9975 *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
9976 break;
9978 default:
9979 return false;
9981 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9982 return true;
9984 case ABS:
9985 switch (mode)
9987 case QImode:
9988 case SFmode:
9989 *total = COSTS_N_INSNS (1);
9990 break;
9992 default:
9993 return false;
9995 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9996 return true;
9998 case NOT:
9999 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
10000 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10001 return true;
10003 case ZERO_EXTEND:
10004 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
10005 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
10006 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10007 return true;
10009 case SIGN_EXTEND:
10010 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
10011 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
10012 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10013 return true;
10015 case PLUS:
10016 switch (mode)
10018 case QImode:
10019 if (AVR_HAVE_MUL
10020 && MULT == GET_CODE (XEXP (x, 0))
10021 && register_operand (XEXP (x, 1), QImode))
10023 /* multiply-add */
10024 *total = COSTS_N_INSNS (speed ? 4 : 3);
10025 /* multiply-add with constant: will be split and load constant. */
10026 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
10027 *total = COSTS_N_INSNS (1) + *total;
10028 return true;
10030 *total = COSTS_N_INSNS (1);
10031 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10032 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10033 break;
10035 case HImode:
10036 if (AVR_HAVE_MUL
10037 && (MULT == GET_CODE (XEXP (x, 0))
10038 || ASHIFT == GET_CODE (XEXP (x, 0)))
10039 && register_operand (XEXP (x, 1), HImode)
10040 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
10041 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
10043 /* multiply-add */
10044 *total = COSTS_N_INSNS (speed ? 5 : 4);
10045 /* multiply-add with constant: will be split and load constant. */
10046 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
10047 *total = COSTS_N_INSNS (1) + *total;
10048 return true;
10050 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10052 *total = COSTS_N_INSNS (2);
10053 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10054 speed);
10056 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
10057 *total = COSTS_N_INSNS (1);
10058 else
10059 *total = COSTS_N_INSNS (2);
10060 break;
10062 case PSImode:
10063 if (!CONST_INT_P (XEXP (x, 1)))
10065 *total = COSTS_N_INSNS (3);
10066 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10067 speed);
10069 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
10070 *total = COSTS_N_INSNS (2);
10071 else
10072 *total = COSTS_N_INSNS (3);
10073 break;
10075 case SImode:
10076 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10078 *total = COSTS_N_INSNS (4);
10079 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10080 speed);
10082 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
10083 *total = COSTS_N_INSNS (1);
10084 else
10085 *total = COSTS_N_INSNS (4);
10086 break;
10088 default:
10089 return false;
10091 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10092 return true;
10094 case MINUS:
10095 if (AVR_HAVE_MUL
10096 && QImode == mode
10097 && register_operand (XEXP (x, 0), QImode)
10098 && MULT == GET_CODE (XEXP (x, 1)))
10100 /* multiply-sub */
10101 *total = COSTS_N_INSNS (speed ? 4 : 3);
10102 /* multiply-sub with constant: will be split and load constant. */
10103 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
10104 *total = COSTS_N_INSNS (1) + *total;
10105 return true;
10107 if (AVR_HAVE_MUL
10108 && HImode == mode
10109 && register_operand (XEXP (x, 0), HImode)
10110 && (MULT == GET_CODE (XEXP (x, 1))
10111 || ASHIFT == GET_CODE (XEXP (x, 1)))
10112 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
10113 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
10115 /* multiply-sub */
10116 *total = COSTS_N_INSNS (speed ? 5 : 4);
10117 /* multiply-sub with constant: will be split and load constant. */
10118 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
10119 *total = COSTS_N_INSNS (1) + *total;
10120 return true;
10122 /* FALLTHRU */
10123 case AND:
10124 case IOR:
10125 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
10126 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10127 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10128 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10129 return true;
10131 case XOR:
10132 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
10133 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10134 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10135 return true;
10137 case MULT:
10138 switch (mode)
10140 case QImode:
10141 if (AVR_HAVE_MUL)
10142 *total = COSTS_N_INSNS (!speed ? 3 : 4);
10143 else if (!speed)
10144 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
10145 else
10146 return false;
10147 break;
10149 case HImode:
10150 if (AVR_HAVE_MUL)
10152 rtx op0 = XEXP (x, 0);
10153 rtx op1 = XEXP (x, 1);
10154 enum rtx_code code0 = GET_CODE (op0);
10155 enum rtx_code code1 = GET_CODE (op1);
10156 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
10157 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
10159 if (ex0
10160 && (u8_operand (op1, HImode)
10161 || s8_operand (op1, HImode)))
10163 *total = COSTS_N_INSNS (!speed ? 4 : 6);
10164 return true;
10166 if (ex0
10167 && register_operand (op1, HImode))
10169 *total = COSTS_N_INSNS (!speed ? 5 : 8);
10170 return true;
10172 else if (ex0 || ex1)
10174 *total = COSTS_N_INSNS (!speed ? 3 : 5);
10175 return true;
10177 else if (register_operand (op0, HImode)
10178 && (u8_operand (op1, HImode)
10179 || s8_operand (op1, HImode)))
10181 *total = COSTS_N_INSNS (!speed ? 6 : 9);
10182 return true;
10184 else
10185 *total = COSTS_N_INSNS (!speed ? 7 : 10);
10187 else if (!speed)
10188 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
10189 else
10190 return false;
10191 break;
10193 case PSImode:
10194 if (!speed)
10195 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
10196 else
10197 *total = 10;
10198 break;
10200 case SImode:
10201 if (AVR_HAVE_MUL)
10203 if (!speed)
10205 /* Add some additional costs besides CALL like moves etc. */
10207 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
10209 else
10211 /* Just a rough estimate. Even with -O2 we don't want bulky
10212 code expanded inline. */
10214 *total = COSTS_N_INSNS (25);
10217 else
10219 if (speed)
10220 *total = COSTS_N_INSNS (300);
10221 else
10222 /* Add some additional costs besides CALL like moves etc. */
10223 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
10226 return true;
10228 default:
10229 return false;
10231 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10232 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10233 return true;
10235 case DIV:
10236 case MOD:
10237 case UDIV:
10238 case UMOD:
10239 if (!speed)
10240 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
10241 else
10242 *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
10243 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10244 /* For div/mod with const-int divisor we have at least the cost of
10245 loading the divisor. */
10246 if (CONST_INT_P (XEXP (x, 1)))
10247 *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
10248 /* Add some overall penaly for clobbering and moving around registers */
10249 *total += COSTS_N_INSNS (2);
10250 return true;
10252 case ROTATE:
10253 switch (mode)
10255 case QImode:
10256 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
10257 *total = COSTS_N_INSNS (1);
10259 break;
10261 case HImode:
10262 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
10263 *total = COSTS_N_INSNS (3);
10265 break;
10267 case SImode:
10268 if (CONST_INT_P (XEXP (x, 1)))
10269 switch (INTVAL (XEXP (x, 1)))
10271 case 8:
10272 case 24:
10273 *total = COSTS_N_INSNS (5);
10274 break;
10275 case 16:
10276 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
10277 break;
10279 break;
10281 default:
10282 return false;
10284 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10285 return true;
10287 case ASHIFT:
10288 switch (mode)
10290 case QImode:
10291 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10293 *total = COSTS_N_INSNS (!speed ? 4 : 17);
10294 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10295 speed);
10297 else
10299 val = INTVAL (XEXP (x, 1));
10300 if (val == 7)
10301 *total = COSTS_N_INSNS (3);
10302 else if (val >= 0 && val <= 7)
10303 *total = COSTS_N_INSNS (val);
10304 else
10305 *total = COSTS_N_INSNS (1);
10307 break;
10309 case HImode:
10310 if (AVR_HAVE_MUL)
10312 if (const_2_to_7_operand (XEXP (x, 1), HImode)
10313 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
10314 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
10316 *total = COSTS_N_INSNS (!speed ? 4 : 6);
10317 return true;
10321 if (const1_rtx == (XEXP (x, 1))
10322 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
10324 *total = COSTS_N_INSNS (2);
10325 return true;
10328 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10330 *total = COSTS_N_INSNS (!speed ? 5 : 41);
10331 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10332 speed);
10334 else
10335 switch (INTVAL (XEXP (x, 1)))
10337 case 0:
10338 *total = 0;
10339 break;
10340 case 1:
10341 case 8:
10342 *total = COSTS_N_INSNS (2);
10343 break;
10344 case 9:
10345 *total = COSTS_N_INSNS (3);
10346 break;
10347 case 2:
10348 case 3:
10349 case 10:
10350 case 15:
10351 *total = COSTS_N_INSNS (4);
10352 break;
10353 case 7:
10354 case 11:
10355 case 12:
10356 *total = COSTS_N_INSNS (5);
10357 break;
10358 case 4:
10359 *total = COSTS_N_INSNS (!speed ? 5 : 8);
10360 break;
10361 case 6:
10362 *total = COSTS_N_INSNS (!speed ? 5 : 9);
10363 break;
10364 case 5:
10365 *total = COSTS_N_INSNS (!speed ? 5 : 10);
10366 break;
10367 default:
10368 *total = COSTS_N_INSNS (!speed ? 5 : 41);
10369 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10370 speed);
10372 break;
10374 case PSImode:
10375 if (!CONST_INT_P (XEXP (x, 1)))
10377 *total = COSTS_N_INSNS (!speed ? 6 : 73);
10379 else
10380 switch (INTVAL (XEXP (x, 1)))
10382 case 0:
10383 *total = 0;
10384 break;
10385 case 1:
10386 case 8:
10387 case 16:
10388 *total = COSTS_N_INSNS (3);
10389 break;
10390 case 23:
10391 *total = COSTS_N_INSNS (5);
10392 break;
10393 default:
10394 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
10395 break;
10397 break;
10399 case SImode:
10400 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10402 *total = COSTS_N_INSNS (!speed ? 7 : 113);
10403 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10404 speed);
10406 else
10407 switch (INTVAL (XEXP (x, 1)))
10409 case 0:
10410 *total = 0;
10411 break;
10412 case 24:
10413 *total = COSTS_N_INSNS (3);
10414 break;
10415 case 1:
10416 case 8:
10417 case 16:
10418 *total = COSTS_N_INSNS (4);
10419 break;
10420 case 31:
10421 *total = COSTS_N_INSNS (6);
10422 break;
10423 case 2:
10424 *total = COSTS_N_INSNS (!speed ? 7 : 8);
10425 break;
10426 default:
10427 *total = COSTS_N_INSNS (!speed ? 7 : 113);
10428 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10429 speed);
10431 break;
10433 default:
10434 return false;
10436 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10437 return true;
10439 case ASHIFTRT:
10440 switch (mode)
10442 case QImode:
10443 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10445 *total = COSTS_N_INSNS (!speed ? 4 : 17);
10446 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10447 speed);
10449 else
10451 val = INTVAL (XEXP (x, 1));
10452 if (val == 6)
10453 *total = COSTS_N_INSNS (4);
10454 else if (val == 7)
10455 *total = COSTS_N_INSNS (2);
10456 else if (val >= 0 && val <= 7)
10457 *total = COSTS_N_INSNS (val);
10458 else
10459 *total = COSTS_N_INSNS (1);
10461 break;
10463 case HImode:
10464 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10466 *total = COSTS_N_INSNS (!speed ? 5 : 41);
10467 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10468 speed);
10470 else
10471 switch (INTVAL (XEXP (x, 1)))
10473 case 0:
10474 *total = 0;
10475 break;
10476 case 1:
10477 *total = COSTS_N_INSNS (2);
10478 break;
10479 case 15:
10480 *total = COSTS_N_INSNS (3);
10481 break;
10482 case 2:
10483 case 7:
10484 case 8:
10485 case 9:
10486 *total = COSTS_N_INSNS (4);
10487 break;
10488 case 10:
10489 case 14:
10490 *total = COSTS_N_INSNS (5);
10491 break;
10492 case 11:
10493 *total = COSTS_N_INSNS (!speed ? 5 : 6);
10494 break;
10495 case 12:
10496 *total = COSTS_N_INSNS (!speed ? 5 : 7);
10497 break;
10498 case 6:
10499 case 13:
10500 *total = COSTS_N_INSNS (!speed ? 5 : 8);
10501 break;
10502 default:
10503 *total = COSTS_N_INSNS (!speed ? 5 : 41);
10504 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10505 speed);
10507 break;
10509 case PSImode:
10510 if (!CONST_INT_P (XEXP (x, 1)))
10512 *total = COSTS_N_INSNS (!speed ? 6 : 73);
10514 else
10515 switch (INTVAL (XEXP (x, 1)))
10517 case 0:
10518 *total = 0;
10519 break;
10520 case 1:
10521 *total = COSTS_N_INSNS (3);
10522 break;
10523 case 16:
10524 case 8:
10525 *total = COSTS_N_INSNS (5);
10526 break;
10527 case 23:
10528 *total = COSTS_N_INSNS (4);
10529 break;
10530 default:
10531 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
10532 break;
10534 break;
10536 case SImode:
10537 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10539 *total = COSTS_N_INSNS (!speed ? 7 : 113);
10540 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10541 speed);
10543 else
10544 switch (INTVAL (XEXP (x, 1)))
10546 case 0:
10547 *total = 0;
10548 break;
10549 case 1:
10550 *total = COSTS_N_INSNS (4);
10551 break;
10552 case 8:
10553 case 16:
10554 case 24:
10555 *total = COSTS_N_INSNS (6);
10556 break;
10557 case 2:
10558 *total = COSTS_N_INSNS (!speed ? 7 : 8);
10559 break;
10560 case 31:
10561 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
10562 break;
10563 default:
10564 *total = COSTS_N_INSNS (!speed ? 7 : 113);
10565 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10566 speed);
10568 break;
10570 default:
10571 return false;
10573 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10574 return true;
10576 case LSHIFTRT:
10577 switch (mode)
10579 case QImode:
10580 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10582 *total = COSTS_N_INSNS (!speed ? 4 : 17);
10583 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10584 speed);
10586 else
10588 val = INTVAL (XEXP (x, 1));
10589 if (val == 7)
10590 *total = COSTS_N_INSNS (3);
10591 else if (val >= 0 && val <= 7)
10592 *total = COSTS_N_INSNS (val);
10593 else
10594 *total = COSTS_N_INSNS (1);
10596 break;
10598 case HImode:
10599 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10601 *total = COSTS_N_INSNS (!speed ? 5 : 41);
10602 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10603 speed);
10605 else
10606 switch (INTVAL (XEXP (x, 1)))
10608 case 0:
10609 *total = 0;
10610 break;
10611 case 1:
10612 case 8:
10613 *total = COSTS_N_INSNS (2);
10614 break;
10615 case 9:
10616 *total = COSTS_N_INSNS (3);
10617 break;
10618 case 2:
10619 case 10:
10620 case 15:
10621 *total = COSTS_N_INSNS (4);
10622 break;
10623 case 7:
10624 case 11:
10625 *total = COSTS_N_INSNS (5);
10626 break;
10627 case 3:
10628 case 12:
10629 case 13:
10630 case 14:
10631 *total = COSTS_N_INSNS (!speed ? 5 : 6);
10632 break;
10633 case 4:
10634 *total = COSTS_N_INSNS (!speed ? 5 : 7);
10635 break;
10636 case 5:
10637 case 6:
10638 *total = COSTS_N_INSNS (!speed ? 5 : 9);
10639 break;
10640 default:
10641 *total = COSTS_N_INSNS (!speed ? 5 : 41);
10642 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10643 speed);
10645 break;
10647 case PSImode:
10648 if (!CONST_INT_P (XEXP (x, 1)))
10650 *total = COSTS_N_INSNS (!speed ? 6 : 73);
10652 else
10653 switch (INTVAL (XEXP (x, 1)))
10655 case 0:
10656 *total = 0;
10657 break;
10658 case 1:
10659 case 8:
10660 case 16:
10661 *total = COSTS_N_INSNS (3);
10662 break;
10663 case 23:
10664 *total = COSTS_N_INSNS (5);
10665 break;
10666 default:
10667 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
10668 break;
10670 break;
10672 case SImode:
10673 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10675 *total = COSTS_N_INSNS (!speed ? 7 : 113);
10676 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10677 speed);
10679 else
10680 switch (INTVAL (XEXP (x, 1)))
10682 case 0:
10683 *total = 0;
10684 break;
10685 case 1:
10686 *total = COSTS_N_INSNS (4);
10687 break;
10688 case 2:
10689 *total = COSTS_N_INSNS (!speed ? 7 : 8);
10690 break;
10691 case 8:
10692 case 16:
10693 case 24:
10694 *total = COSTS_N_INSNS (4);
10695 break;
10696 case 31:
10697 *total = COSTS_N_INSNS (6);
10698 break;
10699 default:
10700 *total = COSTS_N_INSNS (!speed ? 7 : 113);
10701 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10702 speed);
10704 break;
10706 default:
10707 return false;
10709 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10710 return true;
10712 case COMPARE:
10713 switch (GET_MODE (XEXP (x, 0)))
10715 case QImode:
10716 *total = COSTS_N_INSNS (1);
10717 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10718 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10719 break;
10721 case HImode:
10722 *total = COSTS_N_INSNS (2);
10723 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10724 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10725 else if (INTVAL (XEXP (x, 1)) != 0)
10726 *total += COSTS_N_INSNS (1);
10727 break;
10729 case PSImode:
10730 *total = COSTS_N_INSNS (3);
10731 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
10732 *total += COSTS_N_INSNS (2);
10733 break;
10735 case SImode:
10736 *total = COSTS_N_INSNS (4);
10737 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10738 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10739 else if (INTVAL (XEXP (x, 1)) != 0)
10740 *total += COSTS_N_INSNS (3);
10741 break;
10743 default:
10744 return false;
10746 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10747 return true;
10749 case TRUNCATE:
10750 if (AVR_HAVE_MUL
10751 && LSHIFTRT == GET_CODE (XEXP (x, 0))
10752 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
10753 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
10755 if (QImode == mode || HImode == mode)
10757 *total = COSTS_N_INSNS (2);
10758 return true;
10761 break;
10763 default:
10764 break;
10766 return false;
10770 /* Implement `TARGET_RTX_COSTS'. */
10772 static bool
10773 avr_rtx_costs (rtx x, int codearg, int outer_code,
10774 int opno, int *total, bool speed)
10776 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
10777 opno, total, speed);
10779 if (avr_log.rtx_costs)
10781 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
10782 done, speed ? "speed" : "size", *total, outer_code, x);
10785 return done;
10789 /* Implement `TARGET_ADDRESS_COST'. */
10791 static int
10792 avr_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
10793 addr_space_t as ATTRIBUTE_UNUSED,
10794 bool speed ATTRIBUTE_UNUSED)
10796 int cost = 4;
10798 if (GET_CODE (x) == PLUS
10799 && CONST_INT_P (XEXP (x, 1))
10800 && (REG_P (XEXP (x, 0))
10801 || GET_CODE (XEXP (x, 0)) == SUBREG))
10803 if (INTVAL (XEXP (x, 1)) >= 61)
10804 cost = 18;
10806 else if (CONSTANT_ADDRESS_P (x))
10808 if (optimize > 0
10809 && io_address_operand (x, QImode))
10810 cost = 2;
10813 if (avr_log.address_cost)
10814 avr_edump ("\n%?: %d = %r\n", cost, x);
10816 return cost;
10819 /* Test for extra memory constraint 'Q'.
10820 It's a memory address based on Y or Z pointer with valid displacement. */
10823 extra_constraint_Q (rtx x)
10825 int ok = 0;
10827 if (GET_CODE (XEXP (x,0)) == PLUS
10828 && REG_P (XEXP (XEXP (x,0), 0))
10829 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
10830 && (INTVAL (XEXP (XEXP (x,0), 1))
10831 <= MAX_LD_OFFSET (GET_MODE (x))))
10833 rtx xx = XEXP (XEXP (x,0), 0);
10834 int regno = REGNO (xx);
10836 ok = (/* allocate pseudos */
10837 regno >= FIRST_PSEUDO_REGISTER
10838 /* strictly check */
10839 || regno == REG_Z || regno == REG_Y
10840 /* XXX frame & arg pointer checks */
10841 || xx == frame_pointer_rtx
10842 || xx == arg_pointer_rtx);
10844 if (avr_log.constraints)
10845 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
10846 ok, reload_completed, reload_in_progress, x);
10849 return ok;
10852 /* Convert condition code CONDITION to the valid AVR condition code. */
10854 RTX_CODE
10855 avr_normalize_condition (RTX_CODE condition)
10857 switch (condition)
10859 case GT:
10860 return GE;
10861 case GTU:
10862 return GEU;
10863 case LE:
10864 return LT;
10865 case LEU:
10866 return LTU;
10867 default:
10868 gcc_unreachable ();
10872 /* Helper function for `avr_reorg'. */
10874 static rtx
10875 avr_compare_pattern (rtx_insn *insn)
10877 rtx pattern = single_set (insn);
10879 if (pattern
10880 && NONJUMP_INSN_P (insn)
10881 && SET_DEST (pattern) == cc0_rtx
10882 && GET_CODE (SET_SRC (pattern)) == COMPARE)
10884 machine_mode mode0 = GET_MODE (XEXP (SET_SRC (pattern), 0));
10885 machine_mode mode1 = GET_MODE (XEXP (SET_SRC (pattern), 1));
10887 /* The 64-bit comparisons have fixed operands ACC_A and ACC_B.
10888 They must not be swapped, thus skip them. */
10890 if ((mode0 == VOIDmode || GET_MODE_SIZE (mode0) <= 4)
10891 && (mode1 == VOIDmode || GET_MODE_SIZE (mode1) <= 4))
10892 return pattern;
10895 return NULL_RTX;
10898 /* Helper function for `avr_reorg'. */
10900 /* Expansion of switch/case decision trees leads to code like
10902 cc0 = compare (Reg, Num)
10903 if (cc0 == 0)
10904 goto L1
10906 cc0 = compare (Reg, Num)
10907 if (cc0 > 0)
10908 goto L2
10910 The second comparison is superfluous and can be deleted.
10911 The second jump condition can be transformed from a
10912 "difficult" one to a "simple" one because "cc0 > 0" and
10913 "cc0 >= 0" will have the same effect here.
10915 This function relies on the way switch/case is being expaned
10916 as binary decision tree. For example code see PR 49903.
10918 Return TRUE if optimization performed.
10919 Return FALSE if nothing changed.
10921 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
10923 We don't want to do this in text peephole because it is
10924 tedious to work out jump offsets there and the second comparison
10925 might have been transormed by `avr_reorg'.
10927 RTL peephole won't do because peephole2 does not scan across
10928 basic blocks. */
10930 static bool
10931 avr_reorg_remove_redundant_compare (rtx_insn *insn1)
10933 rtx comp1, ifelse1, xcond1;
10934 rtx_insn *branch1;
10935 rtx comp2, ifelse2, xcond2;
10936 rtx_insn *branch2, *insn2;
10937 enum rtx_code code;
10938 rtx_insn *jump;
10939 rtx target, cond;
10941 /* Look out for: compare1 - branch1 - compare2 - branch2 */
10943 branch1 = next_nonnote_nondebug_insn (insn1);
10944 if (!branch1 || !JUMP_P (branch1))
10945 return false;
10947 insn2 = next_nonnote_nondebug_insn (branch1);
10948 if (!insn2 || !avr_compare_pattern (insn2))
10949 return false;
10951 branch2 = next_nonnote_nondebug_insn (insn2);
10952 if (!branch2 || !JUMP_P (branch2))
10953 return false;
10955 comp1 = avr_compare_pattern (insn1);
10956 comp2 = avr_compare_pattern (insn2);
10957 xcond1 = single_set (branch1);
10958 xcond2 = single_set (branch2);
10960 if (!comp1 || !comp2
10961 || !rtx_equal_p (comp1, comp2)
10962 || !xcond1 || SET_DEST (xcond1) != pc_rtx
10963 || !xcond2 || SET_DEST (xcond2) != pc_rtx
10964 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
10965 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
10967 return false;
10970 comp1 = SET_SRC (comp1);
10971 ifelse1 = SET_SRC (xcond1);
10972 ifelse2 = SET_SRC (xcond2);
10974 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
10976 if (EQ != GET_CODE (XEXP (ifelse1, 0))
10977 || !REG_P (XEXP (comp1, 0))
10978 || !CONST_INT_P (XEXP (comp1, 1))
10979 || XEXP (ifelse1, 2) != pc_rtx
10980 || XEXP (ifelse2, 2) != pc_rtx
10981 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
10982 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
10983 || !COMPARISON_P (XEXP (ifelse2, 0))
10984 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
10985 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
10986 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
10987 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
10989 return false;
10992 /* We filtered the insn sequence to look like
10994 (set (cc0)
10995 (compare (reg:M N)
10996 (const_int VAL)))
10997 (set (pc)
10998 (if_then_else (eq (cc0)
10999 (const_int 0))
11000 (label_ref L1)
11001 (pc)))
11003 (set (cc0)
11004 (compare (reg:M N)
11005 (const_int VAL)))
11006 (set (pc)
11007 (if_then_else (CODE (cc0)
11008 (const_int 0))
11009 (label_ref L2)
11010 (pc)))
11013 code = GET_CODE (XEXP (ifelse2, 0));
11015 /* Map GT/GTU to GE/GEU which is easier for AVR.
11016 The first two instructions compare/branch on EQ
11017 so we may replace the difficult
11019 if (x == VAL) goto L1;
11020 if (x > VAL) goto L2;
11022 with easy
11024 if (x == VAL) goto L1;
11025 if (x >= VAL) goto L2;
11027 Similarly, replace LE/LEU by LT/LTU. */
11029 switch (code)
11031 case EQ:
11032 case LT: case LTU:
11033 case GE: case GEU:
11034 break;
11036 case LE: case LEU:
11037 case GT: case GTU:
11038 code = avr_normalize_condition (code);
11039 break;
11041 default:
11042 return false;
11045 /* Wrap the branches into UNSPECs so they won't be changed or
11046 optimized in the remainder. */
11048 target = XEXP (XEXP (ifelse1, 1), 0);
11049 cond = XEXP (ifelse1, 0);
11050 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
11052 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
11054 target = XEXP (XEXP (ifelse2, 1), 0);
11055 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
11056 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
11058 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
11060 /* The comparisons in insn1 and insn2 are exactly the same;
11061 insn2 is superfluous so delete it. */
11063 delete_insn (insn2);
11064 delete_insn (branch1);
11065 delete_insn (branch2);
11067 return true;
11071 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
11072 /* Optimize conditional jumps. */
11074 static void
11075 avr_reorg (void)
11077 rtx_insn *insn = get_insns();
11079 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
11081 rtx pattern = avr_compare_pattern (insn);
11083 if (!pattern)
11084 continue;
11086 if (optimize
11087 && avr_reorg_remove_redundant_compare (insn))
11089 continue;
11092 if (compare_diff_p (insn))
11094 /* Now we work under compare insn with difficult branch. */
11096 rtx_insn *next = next_real_insn (insn);
11097 rtx pat = PATTERN (next);
11099 pattern = SET_SRC (pattern);
11101 if (true_regnum (XEXP (pattern, 0)) >= 0
11102 && true_regnum (XEXP (pattern, 1)) >= 0)
11104 rtx x = XEXP (pattern, 0);
11105 rtx src = SET_SRC (pat);
11106 rtx t = XEXP (src,0);
11107 PUT_CODE (t, swap_condition (GET_CODE (t)));
11108 XEXP (pattern, 0) = XEXP (pattern, 1);
11109 XEXP (pattern, 1) = x;
11110 INSN_CODE (next) = -1;
11112 else if (true_regnum (XEXP (pattern, 0)) >= 0
11113 && XEXP (pattern, 1) == const0_rtx)
11115 /* This is a tst insn, we can reverse it. */
11116 rtx src = SET_SRC (pat);
11117 rtx t = XEXP (src,0);
11119 PUT_CODE (t, swap_condition (GET_CODE (t)));
11120 XEXP (pattern, 1) = XEXP (pattern, 0);
11121 XEXP (pattern, 0) = const0_rtx;
11122 INSN_CODE (next) = -1;
11123 INSN_CODE (insn) = -1;
11125 else if (true_regnum (XEXP (pattern, 0)) >= 0
11126 && CONST_INT_P (XEXP (pattern, 1)))
11128 rtx x = XEXP (pattern, 1);
11129 rtx src = SET_SRC (pat);
11130 rtx t = XEXP (src,0);
11131 machine_mode mode = GET_MODE (XEXP (pattern, 0));
11133 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
11135 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
11136 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
11137 INSN_CODE (next) = -1;
11138 INSN_CODE (insn) = -1;
11145 /* Returns register number for function return value.*/
11147 static inline unsigned int
11148 avr_ret_register (void)
11150 return 24;
11154 /* Implement `TARGET_FUNCTION_VALUE_REGNO_P'. */
11156 static bool
11157 avr_function_value_regno_p (const unsigned int regno)
11159 return (regno == avr_ret_register ());
11163 /* Implement `TARGET_LIBCALL_VALUE'. */
11164 /* Create an RTX representing the place where a
11165 library function returns a value of mode MODE. */
11167 static rtx
11168 avr_libcall_value (machine_mode mode,
11169 const_rtx func ATTRIBUTE_UNUSED)
11171 int offs = GET_MODE_SIZE (mode);
11173 if (offs <= 4)
11174 offs = (offs + 1) & ~1;
11176 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
11180 /* Implement `TARGET_FUNCTION_VALUE'. */
11181 /* Create an RTX representing the place where a
11182 function returns a value of data type VALTYPE. */
11184 static rtx
11185 avr_function_value (const_tree type,
11186 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
11187 bool outgoing ATTRIBUTE_UNUSED)
11189 unsigned int offs;
11191 if (TYPE_MODE (type) != BLKmode)
11192 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
11194 offs = int_size_in_bytes (type);
11195 if (offs < 2)
11196 offs = 2;
11197 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
11198 offs = GET_MODE_SIZE (SImode);
11199 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
11200 offs = GET_MODE_SIZE (DImode);
11202 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
11206 test_hard_reg_class (enum reg_class rclass, rtx x)
11208 int regno = true_regnum (x);
11209 if (regno < 0)
11210 return 0;
11212 if (TEST_HARD_REG_CLASS (rclass, regno))
11213 return 1;
11215 return 0;
11219 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
11220 and thus is suitable to be skipped by CPSE, SBRC, etc. */
11222 static bool
11223 avr_2word_insn_p (rtx_insn *insn)
11225 if (TARGET_SKIP_BUG
11226 || !insn
11227 || 2 != get_attr_length (insn))
11229 return false;
11232 switch (INSN_CODE (insn))
11234 default:
11235 return false;
11237 case CODE_FOR_movqi_insn:
11238 case CODE_FOR_movuqq_insn:
11239 case CODE_FOR_movqq_insn:
11241 rtx set = single_set (insn);
11242 rtx src = SET_SRC (set);
11243 rtx dest = SET_DEST (set);
11245 /* Factor out LDS and STS from movqi_insn. */
11247 if (MEM_P (dest)
11248 && (REG_P (src) || src == CONST0_RTX (GET_MODE (dest))))
11250 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
11252 else if (REG_P (dest)
11253 && MEM_P (src))
11255 return CONSTANT_ADDRESS_P (XEXP (src, 0));
11258 return false;
11261 case CODE_FOR_call_insn:
11262 case CODE_FOR_call_value_insn:
11263 return true;
11269 jump_over_one_insn_p (rtx_insn *insn, rtx dest)
11271 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
11272 ? XEXP (dest, 0)
11273 : dest);
11274 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
11275 int dest_addr = INSN_ADDRESSES (uid);
11276 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
11278 return (jump_offset == 1
11279 || (jump_offset == 2
11280 && avr_2word_insn_p (next_active_insn (insn))));
11284 /* Worker function for `HARD_REGNO_MODE_OK'. */
11285 /* Returns 1 if a value of mode MODE can be stored starting with hard
11286 register number REGNO. On the enhanced core, anything larger than
11287 1 byte must start in even numbered register for "movw" to work
11288 (this way we don't have to check for odd registers everywhere). */
11291 avr_hard_regno_mode_ok (int regno, machine_mode mode)
11293 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
11294 Disallowing QI et al. in these regs might lead to code like
11295 (set (subreg:QI (reg:HI 28) n) ...)
11296 which will result in wrong code because reload does not
11297 handle SUBREGs of hard regsisters like this.
11298 This could be fixed in reload. However, it appears
11299 that fixing reload is not wanted by reload people. */
11301 /* Any GENERAL_REGS register can hold 8-bit values. */
11303 if (GET_MODE_SIZE (mode) == 1)
11304 return 1;
11306 /* FIXME: Ideally, the following test is not needed.
11307 However, it turned out that it can reduce the number
11308 of spill fails. AVR and it's poor endowment with
11309 address registers is extreme stress test for reload. */
11311 if (GET_MODE_SIZE (mode) >= 4
11312 && regno >= REG_X)
11313 return 0;
11315 /* All modes larger than 8 bits should start in an even register. */
11317 return !(regno & 1);
11321 /* Implement `HARD_REGNO_CALL_PART_CLOBBERED'. */
11324 avr_hard_regno_call_part_clobbered (unsigned regno, machine_mode mode)
11326 /* FIXME: This hook gets called with MODE:REGNO combinations that don't
11327 represent valid hard registers like, e.g. HI:29. Returning TRUE
11328 for such registers can lead to performance degradation as mentioned
11329 in PR53595. Thus, report invalid hard registers as FALSE. */
11331 if (!avr_hard_regno_mode_ok (regno, mode))
11332 return 0;
11334 /* Return true if any of the following boundaries is crossed:
11335 17/18, 27/28 and 29/30. */
11337 return ((regno < 18 && regno + GET_MODE_SIZE (mode) > 18)
11338 || (regno < REG_Y && regno + GET_MODE_SIZE (mode) > REG_Y)
11339 || (regno < REG_Z && regno + GET_MODE_SIZE (mode) > REG_Z));
11343 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
11345 enum reg_class
11346 avr_mode_code_base_reg_class (machine_mode mode ATTRIBUTE_UNUSED,
11347 addr_space_t as, RTX_CODE outer_code,
11348 RTX_CODE index_code ATTRIBUTE_UNUSED)
11350 if (!ADDR_SPACE_GENERIC_P (as))
11352 return POINTER_Z_REGS;
11355 if (!avr_strict_X)
11356 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
11358 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
11362 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
11364 bool
11365 avr_regno_mode_code_ok_for_base_p (int regno,
11366 machine_mode mode ATTRIBUTE_UNUSED,
11367 addr_space_t as ATTRIBUTE_UNUSED,
11368 RTX_CODE outer_code,
11369 RTX_CODE index_code ATTRIBUTE_UNUSED)
11371 bool ok = false;
11373 if (!ADDR_SPACE_GENERIC_P (as))
11375 if (regno < FIRST_PSEUDO_REGISTER
11376 && regno == REG_Z)
11378 return true;
11381 if (reg_renumber)
11383 regno = reg_renumber[regno];
11385 if (regno == REG_Z)
11387 return true;
11391 return false;
11394 if (regno < FIRST_PSEUDO_REGISTER
11395 && (regno == REG_X
11396 || regno == REG_Y
11397 || regno == REG_Z
11398 || regno == ARG_POINTER_REGNUM))
11400 ok = true;
11402 else if (reg_renumber)
11404 regno = reg_renumber[regno];
11406 if (regno == REG_X
11407 || regno == REG_Y
11408 || regno == REG_Z
11409 || regno == ARG_POINTER_REGNUM)
11411 ok = true;
11415 if (avr_strict_X
11416 && PLUS == outer_code
11417 && regno == REG_X)
11419 ok = false;
11422 return ok;
11426 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
11427 /* Set 32-bit register OP[0] to compile-time constant OP[1].
11428 CLOBBER_REG is a QI clobber register or NULL_RTX.
11429 LEN == NULL: output instructions.
11430 LEN != NULL: set *LEN to the length of the instruction sequence
11431 (in words) printed with LEN = NULL.
11432 If CLEAR_P is true, OP[0] had been cleard to Zero already.
11433 If CLEAR_P is false, nothing is known about OP[0].
11435 The effect on cc0 is as follows:
11437 Load 0 to any register except ZERO_REG : NONE
11438 Load ld register with any value : NONE
11439 Anything else: : CLOBBER */
11441 static void
11442 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
11444 rtx src = op[1];
11445 rtx dest = op[0];
11446 rtx xval, xdest[4];
11447 int ival[4];
11448 int clobber_val = 1234;
11449 bool cooked_clobber_p = false;
11450 bool set_p = false;
11451 machine_mode mode = GET_MODE (dest);
11452 int n, n_bytes = GET_MODE_SIZE (mode);
11454 gcc_assert (REG_P (dest)
11455 && CONSTANT_P (src));
11457 if (len)
11458 *len = 0;
11460 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
11461 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
11463 if (REGNO (dest) < 16
11464 && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
11466 clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
11469 /* We might need a clobber reg but don't have one. Look at the value to
11470 be loaded more closely. A clobber is only needed if it is a symbol
11471 or contains a byte that is neither 0, -1 or a power of 2. */
11473 if (NULL_RTX == clobber_reg
11474 && !test_hard_reg_class (LD_REGS, dest)
11475 && (! (CONST_INT_P (src) || CONST_FIXED_P (src) || CONST_DOUBLE_P (src))
11476 || !avr_popcount_each_byte (src, n_bytes,
11477 (1 << 0) | (1 << 1) | (1 << 8))))
11479 /* We have no clobber register but need one. Cook one up.
11480 That's cheaper than loading from constant pool. */
11482 cooked_clobber_p = true;
11483 clobber_reg = all_regs_rtx[REG_Z + 1];
11484 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
11487 /* Now start filling DEST from LSB to MSB. */
11489 for (n = 0; n < n_bytes; n++)
11491 int ldreg_p;
11492 bool done_byte = false;
11493 int j;
11494 rtx xop[3];
11496 /* Crop the n-th destination byte. */
11498 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
11499 ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
11501 if (!CONST_INT_P (src)
11502 && !CONST_FIXED_P (src)
11503 && !CONST_DOUBLE_P (src))
11505 static const char* const asm_code[][2] =
11507 { "ldi %2,lo8(%1)" CR_TAB "mov %0,%2", "ldi %0,lo8(%1)" },
11508 { "ldi %2,hi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hi8(%1)" },
11509 { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2", "ldi %0,hlo8(%1)" },
11510 { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hhi8(%1)" }
11513 xop[0] = xdest[n];
11514 xop[1] = src;
11515 xop[2] = clobber_reg;
11517 avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
11519 continue;
11522 /* Crop the n-th source byte. */
11524 xval = simplify_gen_subreg (QImode, src, mode, n);
11525 ival[n] = INTVAL (xval);
11527 /* Look if we can reuse the low word by means of MOVW. */
11529 if (n == 2
11530 && n_bytes >= 4
11531 && AVR_HAVE_MOVW)
11533 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
11534 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
11536 if (INTVAL (lo16) == INTVAL (hi16))
11538 if (0 != INTVAL (lo16)
11539 || !clear_p)
11541 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
11544 break;
11548 /* Don't use CLR so that cc0 is set as expected. */
11550 if (ival[n] == 0)
11552 if (!clear_p)
11553 avr_asm_len (ldreg_p ? "ldi %0,0"
11554 : AVR_ZERO_REGNO == REGNO (xdest[n]) ? "clr %0"
11555 : "mov %0,__zero_reg__",
11556 &xdest[n], len, 1);
11557 continue;
11560 if (clobber_val == ival[n]
11561 && REGNO (clobber_reg) == REGNO (xdest[n]))
11563 continue;
11566 /* LD_REGS can use LDI to move a constant value */
11568 if (ldreg_p)
11570 xop[0] = xdest[n];
11571 xop[1] = xval;
11572 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
11573 continue;
11576 /* Try to reuse value already loaded in some lower byte. */
11578 for (j = 0; j < n; j++)
11579 if (ival[j] == ival[n])
11581 xop[0] = xdest[n];
11582 xop[1] = xdest[j];
11584 avr_asm_len ("mov %0,%1", xop, len, 1);
11585 done_byte = true;
11586 break;
11589 if (done_byte)
11590 continue;
11592 /* Need no clobber reg for -1: Use CLR/DEC */
11594 if (-1 == ival[n])
11596 if (!clear_p)
11597 avr_asm_len ("clr %0", &xdest[n], len, 1);
11599 avr_asm_len ("dec %0", &xdest[n], len, 1);
11600 continue;
11602 else if (1 == ival[n])
11604 if (!clear_p)
11605 avr_asm_len ("clr %0", &xdest[n], len, 1);
11607 avr_asm_len ("inc %0", &xdest[n], len, 1);
11608 continue;
11611 /* Use T flag or INC to manage powers of 2 if we have
11612 no clobber reg. */
11614 if (NULL_RTX == clobber_reg
11615 && single_one_operand (xval, QImode))
11617 xop[0] = xdest[n];
11618 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
11620 gcc_assert (constm1_rtx != xop[1]);
11622 if (!set_p)
11624 set_p = true;
11625 avr_asm_len ("set", xop, len, 1);
11628 if (!clear_p)
11629 avr_asm_len ("clr %0", xop, len, 1);
11631 avr_asm_len ("bld %0,%1", xop, len, 1);
11632 continue;
11635 /* We actually need the LD_REGS clobber reg. */
11637 gcc_assert (NULL_RTX != clobber_reg);
11639 xop[0] = xdest[n];
11640 xop[1] = xval;
11641 xop[2] = clobber_reg;
11642 clobber_val = ival[n];
11644 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
11645 "mov %0,%2", xop, len, 2);
11648 /* If we cooked up a clobber reg above, restore it. */
11650 if (cooked_clobber_p)
11652 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
11657 /* Reload the constant OP[1] into the HI register OP[0].
11658 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
11659 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
11660 need a clobber reg or have to cook one up.
11662 PLEN == NULL: Output instructions.
11663 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
11664 by the insns printed.
11666 Return "". */
11668 const char*
11669 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
11671 output_reload_in_const (op, clobber_reg, plen, false);
11672 return "";
11676 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
11677 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
11678 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
11679 need a clobber reg or have to cook one up.
11681 LEN == NULL: Output instructions.
11683 LEN != NULL: Output nothing. Set *LEN to number of words occupied
11684 by the insns printed.
11686 Return "". */
11688 const char *
11689 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
11691 if (AVR_HAVE_MOVW
11692 && !test_hard_reg_class (LD_REGS, op[0])
11693 && (CONST_INT_P (op[1])
11694 || CONST_FIXED_P (op[1])
11695 || CONST_DOUBLE_P (op[1])))
11697 int len_clr, len_noclr;
11699 /* In some cases it is better to clear the destination beforehand, e.g.
11701 CLR R2 CLR R3 MOVW R4,R2 INC R2
11703 is shorther than
11705 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
11707 We find it too tedious to work that out in the print function.
11708 Instead, we call the print function twice to get the lengths of
11709 both methods and use the shortest one. */
11711 output_reload_in_const (op, clobber_reg, &len_clr, true);
11712 output_reload_in_const (op, clobber_reg, &len_noclr, false);
11714 if (len_noclr - len_clr == 4)
11716 /* Default needs 4 CLR instructions: clear register beforehand. */
11718 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
11719 "mov %B0,__zero_reg__" CR_TAB
11720 "movw %C0,%A0", &op[0], len, 3);
11722 output_reload_in_const (op, clobber_reg, len, true);
11724 if (len)
11725 *len += 3;
11727 return "";
11731 /* Default: destination not pre-cleared. */
11733 output_reload_in_const (op, clobber_reg, len, false);
11734 return "";
11737 const char*
11738 avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
11740 output_reload_in_const (op, clobber_reg, len, false);
11741 return "";
11745 /* Worker function for `ASM_OUTPUT_ADDR_VEC_ELT'. */
11747 void
11748 avr_output_addr_vec_elt (FILE *stream, int value)
11750 if (AVR_HAVE_JMP_CALL)
11751 fprintf (stream, "\t.word gs(.L%d)\n", value);
11752 else
11753 fprintf (stream, "\trjmp .L%d\n", value);
11756 static void
11757 avr_conditional_register_usage(void)
11759 if (AVR_TINY)
11761 unsigned int i;
11763 const int tiny_reg_alloc_order[] = {
11764 24, 25,
11765 22, 23,
11766 30, 31,
11767 26, 27,
11768 28, 29,
11769 21, 20, 19, 18,
11770 16, 17,
11771 32, 33, 34, 35,
11772 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
11775 /* Set R0-R17 as fixed registers. Reset R0-R17 in call used register list
11776 - R0-R15 are not available in Tiny Core devices
11777 - R16 and R17 are fixed registers. */
11779 for (i = 0; i <= 17; i++)
11781 fixed_regs[i] = 1;
11782 call_used_regs[i] = 1;
11785 /* Set R18 to R21 as callee saved registers
11786 - R18, R19, R20 and R21 are the callee saved registers in
11787 Tiny Core devices */
11789 for (i = 18; i <= LAST_CALLEE_SAVED_REG; i++)
11791 call_used_regs[i] = 0;
11794 /* Update register allocation order for Tiny Core devices */
11796 for (i = 0; i < ARRAY_SIZE (tiny_reg_alloc_order); i++)
11798 reg_alloc_order[i] = tiny_reg_alloc_order[i];
11801 CLEAR_HARD_REG_SET (reg_class_contents[(int) ADDW_REGS]);
11802 CLEAR_HARD_REG_SET (reg_class_contents[(int) NO_LD_REGS]);
11806 /* Implement `TARGET_HARD_REGNO_SCRATCH_OK'. */
11807 /* Returns true if SCRATCH are safe to be allocated as a scratch
11808 registers (for a define_peephole2) in the current function. */
11810 static bool
11811 avr_hard_regno_scratch_ok (unsigned int regno)
11813 /* Interrupt functions can only use registers that have already been saved
11814 by the prologue, even if they would normally be call-clobbered. */
11816 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
11817 && !df_regs_ever_live_p (regno))
11818 return false;
11820 /* Don't allow hard registers that might be part of the frame pointer.
11821 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
11822 and don't care for a frame pointer that spans more than one register. */
11824 if ((!reload_completed || frame_pointer_needed)
11825 && (regno == REG_Y || regno == REG_Y + 1))
11827 return false;
11830 return true;
11834 /* Worker function for `HARD_REGNO_RENAME_OK'. */
11835 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
11838 avr_hard_regno_rename_ok (unsigned int old_reg,
11839 unsigned int new_reg)
11841 /* Interrupt functions can only use registers that have already been
11842 saved by the prologue, even if they would normally be
11843 call-clobbered. */
11845 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
11846 && !df_regs_ever_live_p (new_reg))
11847 return 0;
11849 /* Don't allow hard registers that might be part of the frame pointer.
11850 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
11851 and don't care for a frame pointer that spans more than one register. */
11853 if ((!reload_completed || frame_pointer_needed)
11854 && (old_reg == REG_Y || old_reg == REG_Y + 1
11855 || new_reg == REG_Y || new_reg == REG_Y + 1))
11857 return 0;
11860 return 1;
11863 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
11864 or memory location in the I/O space (QImode only).
11866 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
11867 Operand 1: register operand to test, or CONST_INT memory address.
11868 Operand 2: bit number.
11869 Operand 3: label to jump to if the test is true. */
11871 const char*
11872 avr_out_sbxx_branch (rtx_insn *insn, rtx operands[])
11874 enum rtx_code comp = GET_CODE (operands[0]);
11875 bool long_jump = get_attr_length (insn) >= 4;
11876 bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
11878 if (comp == GE)
11879 comp = EQ;
11880 else if (comp == LT)
11881 comp = NE;
11883 if (reverse)
11884 comp = reverse_condition (comp);
11886 switch (GET_CODE (operands[1]))
11888 default:
11889 gcc_unreachable();
11891 case CONST_INT:
11892 case CONST:
11893 case SYMBOL_REF:
11895 if (low_io_address_operand (operands[1], QImode))
11897 if (comp == EQ)
11898 output_asm_insn ("sbis %i1,%2", operands);
11899 else
11900 output_asm_insn ("sbic %i1,%2", operands);
11902 else
11904 gcc_assert (io_address_operand (operands[1], QImode));
11905 output_asm_insn ("in __tmp_reg__,%i1", operands);
11906 if (comp == EQ)
11907 output_asm_insn ("sbrs __tmp_reg__,%2", operands);
11908 else
11909 output_asm_insn ("sbrc __tmp_reg__,%2", operands);
11912 break; /* CONST_INT */
11914 case REG:
11916 if (comp == EQ)
11917 output_asm_insn ("sbrs %T1%T2", operands);
11918 else
11919 output_asm_insn ("sbrc %T1%T2", operands);
11921 break; /* REG */
11922 } /* switch */
11924 if (long_jump)
11925 return ("rjmp .+4" CR_TAB
11926 "jmp %x3");
11928 if (!reverse)
11929 return "rjmp %x3";
11931 return "";
11934 /* Worker function for `TARGET_ASM_CONSTRUCTOR'. */
11936 static void
11937 avr_asm_out_ctor (rtx symbol, int priority)
11939 fputs ("\t.global __do_global_ctors\n", asm_out_file);
11940 default_ctor_section_asm_out_constructor (symbol, priority);
11944 /* Worker function for `TARGET_ASM_DESTRUCTOR'. */
11946 static void
11947 avr_asm_out_dtor (rtx symbol, int priority)
11949 fputs ("\t.global __do_global_dtors\n", asm_out_file);
11950 default_dtor_section_asm_out_destructor (symbol, priority);
11954 /* Worker function for `TARGET_RETURN_IN_MEMORY'. */
11956 static bool
11957 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
11959 HOST_WIDE_INT size = int_size_in_bytes (type);
11960 HOST_WIDE_INT ret_size_limit = AVR_TINY ? 4 : 8;
11962 /* In avr, there are 8 return registers. But, for Tiny Core
11963 (ATtiny4/5/9/10/20/40) devices, only 4 registers are available.
11964 Return true if size is unknown or greater than the limit. */
11966 if (size == -1 || size > ret_size_limit)
11968 return true;
11970 else
11972 return false;
11977 /* Implement `CASE_VALUES_THRESHOLD'. */
11978 /* Supply the default for --param case-values-threshold=0 */
11980 static unsigned int
11981 avr_case_values_threshold (void)
11983 /* The exact break-even point between a jump table and an if-else tree
11984 depends on several factors not available here like, e.g. if 8-bit
11985 comparisons can be used in the if-else tree or not, on the
11986 range of the case values, if the case value can be reused, on the
11987 register allocation, etc. '7' appears to be a good choice. */
11989 return 7;
11993 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
11995 static machine_mode
11996 avr_addr_space_address_mode (addr_space_t as)
11998 return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
12002 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
12004 static machine_mode
12005 avr_addr_space_pointer_mode (addr_space_t as)
12007 return avr_addr_space_address_mode (as);
12011 /* Helper for following function. */
12013 static bool
12014 avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
12016 gcc_assert (REG_P (reg));
12018 if (strict)
12020 return REGNO (reg) == REG_Z;
12023 /* Avoid combine to propagate hard regs. */
12025 if (can_create_pseudo_p()
12026 && REGNO (reg) < REG_Z)
12028 return false;
12031 return true;
12035 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
12037 static bool
12038 avr_addr_space_legitimate_address_p (machine_mode mode, rtx x,
12039 bool strict, addr_space_t as)
12041 bool ok = false;
12043 switch (as)
12045 default:
12046 gcc_unreachable();
12048 case ADDR_SPACE_GENERIC:
12049 return avr_legitimate_address_p (mode, x, strict);
12051 case ADDR_SPACE_FLASH:
12052 case ADDR_SPACE_FLASH1:
12053 case ADDR_SPACE_FLASH2:
12054 case ADDR_SPACE_FLASH3:
12055 case ADDR_SPACE_FLASH4:
12056 case ADDR_SPACE_FLASH5:
12058 switch (GET_CODE (x))
12060 case REG:
12061 ok = avr_reg_ok_for_pgm_addr (x, strict);
12062 break;
12064 case POST_INC:
12065 ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
12066 break;
12068 default:
12069 break;
12072 break; /* FLASH */
12074 case ADDR_SPACE_MEMX:
12075 if (REG_P (x))
12076 ok = (!strict
12077 && can_create_pseudo_p());
12079 if (LO_SUM == GET_CODE (x))
12081 rtx hi = XEXP (x, 0);
12082 rtx lo = XEXP (x, 1);
12084 ok = (REG_P (hi)
12085 && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
12086 && REG_P (lo)
12087 && REGNO (lo) == REG_Z);
12090 break; /* MEMX */
12093 if (avr_log.legitimate_address_p)
12095 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
12096 "reload_completed=%d reload_in_progress=%d %s:",
12097 ok, mode, strict, reload_completed, reload_in_progress,
12098 reg_renumber ? "(reg_renumber)" : "");
12100 if (GET_CODE (x) == PLUS
12101 && REG_P (XEXP (x, 0))
12102 && CONST_INT_P (XEXP (x, 1))
12103 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
12104 && reg_renumber)
12106 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
12107 true_regnum (XEXP (x, 0)));
12110 avr_edump ("\n%r\n", x);
12113 return ok;
12117 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
12119 static rtx
12120 avr_addr_space_legitimize_address (rtx x, rtx old_x,
12121 machine_mode mode, addr_space_t as)
12123 if (ADDR_SPACE_GENERIC_P (as))
12124 return avr_legitimize_address (x, old_x, mode);
12126 if (avr_log.legitimize_address)
12128 avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
12131 return old_x;
12135 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
12137 static rtx
12138 avr_addr_space_convert (rtx src, tree type_from, tree type_to)
12140 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
12141 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
12143 if (avr_log.progmem)
12144 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
12145 src, type_from, type_to);
12147 /* Up-casting from 16-bit to 24-bit pointer. */
12149 if (as_from != ADDR_SPACE_MEMX
12150 && as_to == ADDR_SPACE_MEMX)
12152 int msb;
12153 rtx sym = src;
12154 rtx reg = gen_reg_rtx (PSImode);
12156 while (CONST == GET_CODE (sym) || PLUS == GET_CODE (sym))
12157 sym = XEXP (sym, 0);
12159 /* Look at symbol flags: avr_encode_section_info set the flags
12160 also if attribute progmem was seen so that we get the right
12161 promotion for, e.g. PSTR-like strings that reside in generic space
12162 but are located in flash. In that case we patch the incoming
12163 address space. */
12165 if (SYMBOL_REF == GET_CODE (sym)
12166 && ADDR_SPACE_FLASH == AVR_SYMBOL_GET_ADDR_SPACE (sym))
12168 as_from = ADDR_SPACE_FLASH;
12171 /* Linearize memory: RAM has bit 23 set. */
12173 msb = ADDR_SPACE_GENERIC_P (as_from)
12174 ? 0x80
12175 : avr_addrspace[as_from].segment;
12177 src = force_reg (Pmode, src);
12179 emit_insn (msb == 0
12180 ? gen_zero_extendhipsi2 (reg, src)
12181 : gen_n_extendhipsi2 (reg, gen_int_mode (msb, QImode), src));
12183 return reg;
12186 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
12188 if (as_from == ADDR_SPACE_MEMX
12189 && as_to != ADDR_SPACE_MEMX)
12191 rtx new_src = gen_reg_rtx (Pmode);
12193 src = force_reg (PSImode, src);
12195 emit_move_insn (new_src,
12196 simplify_gen_subreg (Pmode, src, PSImode, 0));
12197 return new_src;
12200 return src;
12204 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
12206 static bool
12207 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED,
12208 addr_space_t superset ATTRIBUTE_UNUSED)
12210 /* Allow any kind of pointer mess. */
12212 return true;
12216 /* Implement `TARGET_CONVERT_TO_TYPE'. */
12218 static tree
12219 avr_convert_to_type (tree type, tree expr)
12221 /* Print a diagnose for pointer conversion that changes the address
12222 space of the pointer target to a non-enclosing address space,
12223 provided -Waddr-space-convert is on.
12225 FIXME: Filter out cases where the target object is known to
12226 be located in the right memory, like in
12228 (const __flash*) PSTR ("text")
12230 Also try to distinguish between explicit casts requested by
12231 the user and implicit casts like
12233 void f (const __flash char*);
12235 void g (const char *p)
12237 f ((const __flash*) p);
12240 under the assumption that an explicit casts means that the user
12241 knows what he is doing, e.g. interface with PSTR or old style
12242 code with progmem and pgm_read_xxx.
12245 if (avr_warn_addr_space_convert
12246 && expr != error_mark_node
12247 && POINTER_TYPE_P (type)
12248 && POINTER_TYPE_P (TREE_TYPE (expr)))
12250 addr_space_t as_old = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr)));
12251 addr_space_t as_new = TYPE_ADDR_SPACE (TREE_TYPE (type));
12253 if (avr_log.progmem)
12254 avr_edump ("%?: type = %t\nexpr = %t\n\n", type, expr);
12256 if (as_new != ADDR_SPACE_MEMX
12257 && as_new != as_old)
12259 location_t loc = EXPR_LOCATION (expr);
12260 const char *name_old = avr_addrspace[as_old].name;
12261 const char *name_new = avr_addrspace[as_new].name;
12263 warning (OPT_Waddr_space_convert,
12264 "conversion from address space %qs to address space %qs",
12265 ADDR_SPACE_GENERIC_P (as_old) ? "generic" : name_old,
12266 ADDR_SPACE_GENERIC_P (as_new) ? "generic" : name_new);
12268 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, expr);
12272 return NULL_TREE;
12276 /* PR63633: The middle-end might come up with hard regs as input operands.
12278 RMASK is a bit mask representing a subset of hard registers R0...R31:
12279 Rn is an element of that set iff bit n of RMASK is set.
12280 OPMASK describes a subset of OP[]: If bit n of OPMASK is 1 then
12281 OP[n] has to be fixed; otherwise OP[n] is left alone.
12283 For each element of OPMASK which is a hard register overlapping RMASK,
12284 replace OP[n] with a newly created pseudo register
12286 HREG == 0: Also emit a move insn that copies the contents of that
12287 hard register into the new pseudo.
12289 HREG != 0: Also set HREG[n] to the hard register. */
12291 static void
12292 avr_fix_operands (rtx *op, rtx *hreg, unsigned opmask, unsigned rmask)
12294 for (; opmask; opmask >>= 1, op++)
12296 rtx reg = *op;
12298 if (hreg)
12299 *hreg = NULL_RTX;
12301 if ((opmask & 1)
12302 && REG_P (reg)
12303 && REGNO (reg) < FIRST_PSEUDO_REGISTER
12304 // This hard-reg overlaps other prohibited hard regs?
12305 && (rmask & regmask (GET_MODE (reg), REGNO (reg))))
12307 *op = gen_reg_rtx (GET_MODE (reg));
12308 if (hreg == NULL)
12309 emit_move_insn (*op, reg);
12310 else
12311 *hreg = reg;
12314 if (hreg)
12315 hreg++;
12320 void
12321 avr_fix_inputs (rtx *op, unsigned opmask, unsigned rmask)
12323 avr_fix_operands (op, NULL, opmask, rmask);
12327 /* Helper for the function below: If bit n of MASK is set and
12328 HREG[n] != NULL, then emit a move insn to copy OP[n] to HREG[n].
12329 Otherwise do nothing for that n. Return TRUE. */
12331 static bool
12332 avr_move_fixed_operands (rtx *op, rtx *hreg, unsigned mask)
12334 for (; mask; mask >>= 1, op++, hreg++)
12335 if ((mask & 1)
12336 && *hreg)
12337 emit_move_insn (*hreg, *op);
12339 return true;
12343 /* PR63633: The middle-end might come up with hard regs as output operands.
12345 GEN is a sequence generating function like gen_mulsi3 with 3 operands OP[].
12346 RMASK is a bit mask representing a subset of hard registers R0...R31:
12347 Rn is an element of that set iff bit n of RMASK is set.
12348 OPMASK describes a subset of OP[]: If bit n of OPMASK is 1 then
12349 OP[n] has to be fixed; otherwise OP[n] is left alone.
12351 Emit the insn sequence as generated by GEN() with all elements of OPMASK
12352 which are hard registers overlapping RMASK replaced by newly created
12353 pseudo registers. After the sequence has been emitted, emit insns that
12354 move the contents of respective pseudos to their hard regs. */
12356 bool
12357 avr_emit3_fix_outputs (rtx (*gen)(rtx,rtx,rtx), rtx *op,
12358 unsigned opmask, unsigned rmask)
12360 const int n = 3;
12361 rtx hreg[n];
12363 /* It is letigimate for GEN to call this function, and in order not to
12364 get self-recursive we use the following static kludge. This is the
12365 only way not to duplicate all expanders and to avoid ugly and
12366 hard-to-maintain C-code instead of the much more appreciated RTL
12367 representation as supplied by define_expand. */
12368 static bool lock = false;
12370 gcc_assert (opmask < (1u << n));
12372 if (lock)
12373 return false;
12375 avr_fix_operands (op, hreg, opmask, rmask);
12377 lock = true;
12378 emit_insn (gen (op[0], op[1], op[2]));
12379 lock = false;
12381 return avr_move_fixed_operands (op, hreg, opmask);
12385 /* Worker function for movmemhi expander.
12386 XOP[0] Destination as MEM:BLK
12387 XOP[1] Source " "
12388 XOP[2] # Bytes to copy
12390 Return TRUE if the expansion is accomplished.
12391 Return FALSE if the operand compination is not supported. */
12393 bool
12394 avr_emit_movmemhi (rtx *xop)
12396 HOST_WIDE_INT count;
12397 machine_mode loop_mode;
12398 addr_space_t as = MEM_ADDR_SPACE (xop[1]);
12399 rtx loop_reg, addr1, a_src, a_dest, insn, xas;
12400 rtx a_hi8 = NULL_RTX;
12402 if (avr_mem_flash_p (xop[0]))
12403 return false;
12405 if (!CONST_INT_P (xop[2]))
12406 return false;
12408 count = INTVAL (xop[2]);
12409 if (count <= 0)
12410 return false;
12412 a_src = XEXP (xop[1], 0);
12413 a_dest = XEXP (xop[0], 0);
12415 if (PSImode == GET_MODE (a_src))
12417 gcc_assert (as == ADDR_SPACE_MEMX);
12419 loop_mode = (count < 0x100) ? QImode : HImode;
12420 loop_reg = gen_rtx_REG (loop_mode, 24);
12421 emit_move_insn (loop_reg, gen_int_mode (count, loop_mode));
12423 addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
12424 a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
12426 else
12428 int segment = avr_addrspace[as].segment;
12430 if (segment
12431 && avr_n_flash > 1)
12433 a_hi8 = GEN_INT (segment);
12434 emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
12436 else if (!ADDR_SPACE_GENERIC_P (as))
12438 as = ADDR_SPACE_FLASH;
12441 addr1 = a_src;
12443 loop_mode = (count <= 0x100) ? QImode : HImode;
12444 loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
12447 xas = GEN_INT (as);
12449 /* FIXME: Register allocator might come up with spill fails if it is left
12450 on its own. Thus, we allocate the pointer registers by hand:
12451 Z = source address
12452 X = destination address */
12454 emit_move_insn (lpm_addr_reg_rtx, addr1);
12455 emit_move_insn (gen_rtx_REG (HImode, REG_X), a_dest);
12457 /* FIXME: Register allocator does a bad job and might spill address
12458 register(s) inside the loop leading to additional move instruction
12459 to/from stack which could clobber tmp_reg. Thus, do *not* emit
12460 load and store as separate insns. Instead, we perform the copy
12461 by means of one monolithic insn. */
12463 gcc_assert (TMP_REGNO == LPM_REGNO);
12465 if (as != ADDR_SPACE_MEMX)
12467 /* Load instruction ([E]LPM or LD) is known at compile time:
12468 Do the copy-loop inline. */
12470 rtx (*fun) (rtx, rtx, rtx)
12471 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
12473 insn = fun (xas, loop_reg, loop_reg);
12475 else
12477 rtx (*fun) (rtx, rtx)
12478 = QImode == loop_mode ? gen_movmemx_qi : gen_movmemx_hi;
12480 emit_move_insn (gen_rtx_REG (QImode, 23), a_hi8);
12482 insn = fun (xas, GEN_INT (avr_addr.rampz));
12485 set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
12486 emit_insn (insn);
12488 return true;
12492 /* Print assembler for movmem_qi, movmem_hi insns...
12493 $0 : Address Space
12494 $1, $2 : Loop register
12495 Z : Source address
12496 X : Destination address
12499 const char*
12500 avr_out_movmem (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
12502 addr_space_t as = (addr_space_t) INTVAL (op[0]);
12503 machine_mode loop_mode = GET_MODE (op[1]);
12504 bool sbiw_p = test_hard_reg_class (ADDW_REGS, op[1]);
12505 rtx xop[3];
12507 if (plen)
12508 *plen = 0;
12510 xop[0] = op[0];
12511 xop[1] = op[1];
12512 xop[2] = tmp_reg_rtx;
12514 /* Loop label */
12516 avr_asm_len ("0:", xop, plen, 0);
12518 /* Load with post-increment */
12520 switch (as)
12522 default:
12523 gcc_unreachable();
12525 case ADDR_SPACE_GENERIC:
12527 avr_asm_len ("ld %2,Z+", xop, plen, 1);
12528 break;
12530 case ADDR_SPACE_FLASH:
12532 if (AVR_HAVE_LPMX)
12533 avr_asm_len ("lpm %2,Z+", xop, plen, 1);
12534 else
12535 avr_asm_len ("lpm" CR_TAB
12536 "adiw r30,1", xop, plen, 2);
12537 break;
12539 case ADDR_SPACE_FLASH1:
12540 case ADDR_SPACE_FLASH2:
12541 case ADDR_SPACE_FLASH3:
12542 case ADDR_SPACE_FLASH4:
12543 case ADDR_SPACE_FLASH5:
12545 if (AVR_HAVE_ELPMX)
12546 avr_asm_len ("elpm %2,Z+", xop, plen, 1);
12547 else
12548 avr_asm_len ("elpm" CR_TAB
12549 "adiw r30,1", xop, plen, 2);
12550 break;
12553 /* Store with post-increment */
12555 avr_asm_len ("st X+,%2", xop, plen, 1);
12557 /* Decrement loop-counter and set Z-flag */
12559 if (QImode == loop_mode)
12561 avr_asm_len ("dec %1", xop, plen, 1);
12563 else if (sbiw_p)
12565 avr_asm_len ("sbiw %1,1", xop, plen, 1);
12567 else
12569 avr_asm_len ("subi %A1,1" CR_TAB
12570 "sbci %B1,0", xop, plen, 2);
12573 /* Loop until zero */
12575 return avr_asm_len ("brne 0b", xop, plen, 1);
12580 /* Helper for __builtin_avr_delay_cycles */
12582 static rtx
12583 avr_mem_clobber (void)
12585 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
12586 MEM_VOLATILE_P (mem) = 1;
12587 return mem;
12590 static void
12591 avr_expand_delay_cycles (rtx operands0)
12593 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0) & GET_MODE_MASK (SImode);
12594 unsigned HOST_WIDE_INT cycles_used;
12595 unsigned HOST_WIDE_INT loop_count;
12597 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
12599 loop_count = ((cycles - 9) / 6) + 1;
12600 cycles_used = ((loop_count - 1) * 6) + 9;
12601 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode),
12602 avr_mem_clobber()));
12603 cycles -= cycles_used;
12606 if (IN_RANGE (cycles, 262145, 83886081))
12608 loop_count = ((cycles - 7) / 5) + 1;
12609 if (loop_count > 0xFFFFFF)
12610 loop_count = 0xFFFFFF;
12611 cycles_used = ((loop_count - 1) * 5) + 7;
12612 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode),
12613 avr_mem_clobber()));
12614 cycles -= cycles_used;
12617 if (IN_RANGE (cycles, 768, 262144))
12619 loop_count = ((cycles - 5) / 4) + 1;
12620 if (loop_count > 0xFFFF)
12621 loop_count = 0xFFFF;
12622 cycles_used = ((loop_count - 1) * 4) + 5;
12623 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode),
12624 avr_mem_clobber()));
12625 cycles -= cycles_used;
12628 if (IN_RANGE (cycles, 6, 767))
12630 loop_count = cycles / 3;
12631 if (loop_count > 255)
12632 loop_count = 255;
12633 cycles_used = loop_count * 3;
12634 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode),
12635 avr_mem_clobber()));
12636 cycles -= cycles_used;
12639 while (cycles >= 2)
12641 emit_insn (gen_nopv (GEN_INT(2)));
12642 cycles -= 2;
12645 if (cycles == 1)
12647 emit_insn (gen_nopv (GEN_INT(1)));
12648 cycles--;
12653 /* Compute the image of x under f, i.e. perform x --> f(x) */
12655 static int
12656 avr_map (unsigned int f, int x)
12658 return x < 8 ? (f >> (4 * x)) & 0xf : 0;
12662 /* Return some metrics of map A. */
12664 enum
12666 /* Number of fixed points in { 0 ... 7 } */
12667 MAP_FIXED_0_7,
12669 /* Size of preimage of non-fixed points in { 0 ... 7 } */
12670 MAP_NONFIXED_0_7,
12672 /* Mask representing the fixed points in { 0 ... 7 } */
12673 MAP_MASK_FIXED_0_7,
12675 /* Size of the preimage of { 0 ... 7 } */
12676 MAP_PREIMAGE_0_7,
12678 /* Mask that represents the preimage of { f } */
12679 MAP_MASK_PREIMAGE_F
12682 static unsigned
12683 avr_map_metric (unsigned int a, int mode)
12685 unsigned i, metric = 0;
12687 for (i = 0; i < 8; i++)
12689 unsigned ai = avr_map (a, i);
12691 if (mode == MAP_FIXED_0_7)
12692 metric += ai == i;
12693 else if (mode == MAP_NONFIXED_0_7)
12694 metric += ai < 8 && ai != i;
12695 else if (mode == MAP_MASK_FIXED_0_7)
12696 metric |= ((unsigned) (ai == i)) << i;
12697 else if (mode == MAP_PREIMAGE_0_7)
12698 metric += ai < 8;
12699 else if (mode == MAP_MASK_PREIMAGE_F)
12700 metric |= ((unsigned) (ai == 0xf)) << i;
12701 else
12702 gcc_unreachable();
12705 return metric;
12709 /* Return true if IVAL has a 0xf in its hexadecimal representation
12710 and false, otherwise. Only nibbles 0..7 are taken into account.
12711 Used as constraint helper for C0f and Cxf. */
12713 bool
12714 avr_has_nibble_0xf (rtx ival)
12716 unsigned int map = UINTVAL (ival) & GET_MODE_MASK (SImode);
12717 return 0 != avr_map_metric (map, MAP_MASK_PREIMAGE_F);
12721 /* We have a set of bits that are mapped by a function F.
12722 Try to decompose F by means of a second function G so that
12724 F = F o G^-1 o G
12728 cost (F o G^-1) + cost (G) < cost (F)
12730 Example: Suppose builtin insert_bits supplies us with the map
12731 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
12732 nibble of the result, we can just as well rotate the bits before inserting
12733 them and use the map 0x7654ffff which is cheaper than the original map.
12734 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
12736 typedef struct
12738 /* tree code of binary function G */
12739 enum tree_code code;
12741 /* The constant second argument of G */
12742 int arg;
12744 /* G^-1, the inverse of G (*, arg) */
12745 unsigned ginv;
12747 /* The cost of appplying G (*, arg) */
12748 int cost;
12750 /* The composition F o G^-1 (*, arg) for some function F */
12751 unsigned int map;
12753 /* For debug purpose only */
12754 const char *str;
12755 } avr_map_op_t;
12757 static const avr_map_op_t avr_map_op[] =
12759 { LROTATE_EXPR, 0, 0x76543210, 0, 0, "id" },
12760 { LROTATE_EXPR, 1, 0x07654321, 2, 0, "<<<" },
12761 { LROTATE_EXPR, 2, 0x10765432, 4, 0, "<<<" },
12762 { LROTATE_EXPR, 3, 0x21076543, 4, 0, "<<<" },
12763 { LROTATE_EXPR, 4, 0x32107654, 1, 0, "<<<" },
12764 { LROTATE_EXPR, 5, 0x43210765, 3, 0, "<<<" },
12765 { LROTATE_EXPR, 6, 0x54321076, 5, 0, "<<<" },
12766 { LROTATE_EXPR, 7, 0x65432107, 3, 0, "<<<" },
12767 { RSHIFT_EXPR, 1, 0x6543210c, 1, 0, ">>" },
12768 { RSHIFT_EXPR, 1, 0x7543210c, 1, 0, ">>" },
12769 { RSHIFT_EXPR, 2, 0x543210cc, 2, 0, ">>" },
12770 { RSHIFT_EXPR, 2, 0x643210cc, 2, 0, ">>" },
12771 { RSHIFT_EXPR, 2, 0x743210cc, 2, 0, ">>" },
12772 { LSHIFT_EXPR, 1, 0xc7654321, 1, 0, "<<" },
12773 { LSHIFT_EXPR, 2, 0xcc765432, 2, 0, "<<" }
12777 /* Try to decompose F as F = (F o G^-1) o G as described above.
12778 The result is a struct representing F o G^-1 and G.
12779 If result.cost < 0 then such a decomposition does not exist. */
12781 static avr_map_op_t
12782 avr_map_decompose (unsigned int f, const avr_map_op_t *g, bool val_const_p)
12784 int i;
12785 bool val_used_p = 0 != avr_map_metric (f, MAP_MASK_PREIMAGE_F);
12786 avr_map_op_t f_ginv = *g;
12787 unsigned int ginv = g->ginv;
12789 f_ginv.cost = -1;
12791 /* Step 1: Computing F o G^-1 */
12793 for (i = 7; i >= 0; i--)
12795 int x = avr_map (f, i);
12797 if (x <= 7)
12799 x = avr_map (ginv, x);
12801 /* The bit is no element of the image of G: no avail (cost = -1) */
12803 if (x > 7)
12804 return f_ginv;
12807 f_ginv.map = (f_ginv.map << 4) + x;
12810 /* Step 2: Compute the cost of the operations.
12811 The overall cost of doing an operation prior to the insertion is
12812 the cost of the insertion plus the cost of the operation. */
12814 /* Step 2a: Compute cost of F o G^-1 */
12816 if (0 == avr_map_metric (f_ginv.map, MAP_NONFIXED_0_7))
12818 /* The mapping consists only of fixed points and can be folded
12819 to AND/OR logic in the remainder. Reasonable cost is 3. */
12821 f_ginv.cost = 2 + (val_used_p && !val_const_p);
12823 else
12825 rtx xop[4];
12827 /* Get the cost of the insn by calling the output worker with some
12828 fake values. Mimic effect of reloading xop[3]: Unused operands
12829 are mapped to 0 and used operands are reloaded to xop[0]. */
12831 xop[0] = all_regs_rtx[24];
12832 xop[1] = gen_int_mode (f_ginv.map, SImode);
12833 xop[2] = all_regs_rtx[25];
12834 xop[3] = val_used_p ? xop[0] : const0_rtx;
12836 avr_out_insert_bits (xop, &f_ginv.cost);
12838 f_ginv.cost += val_const_p && val_used_p ? 1 : 0;
12841 /* Step 2b: Add cost of G */
12843 f_ginv.cost += g->cost;
12845 if (avr_log.builtin)
12846 avr_edump (" %s%d=%d", g->str, g->arg, f_ginv.cost);
12848 return f_ginv;
12852 /* Insert bits from XOP[1] into XOP[0] according to MAP.
12853 XOP[0] and XOP[1] don't overlap.
12854 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
12855 If FIXP_P = false: Just move the bit if its position in the destination
12856 is different to its source position. */
12858 static void
12859 avr_move_bits (rtx *xop, unsigned int map, bool fixp_p, int *plen)
12861 int bit_dest, b;
12863 /* T-flag contains this bit of the source, i.e. of XOP[1] */
12864 int t_bit_src = -1;
12866 /* We order the operations according to the requested source bit b. */
12868 for (b = 0; b < 8; b++)
12869 for (bit_dest = 0; bit_dest < 8; bit_dest++)
12871 int bit_src = avr_map (map, bit_dest);
12873 if (b != bit_src
12874 || bit_src >= 8
12875 /* Same position: No need to copy as requested by FIXP_P. */
12876 || (bit_dest == bit_src && !fixp_p))
12877 continue;
12879 if (t_bit_src != bit_src)
12881 /* Source bit is not yet in T: Store it to T. */
12883 t_bit_src = bit_src;
12885 xop[3] = GEN_INT (bit_src);
12886 avr_asm_len ("bst %T1%T3", xop, plen, 1);
12889 /* Load destination bit with T. */
12891 xop[3] = GEN_INT (bit_dest);
12892 avr_asm_len ("bld %T0%T3", xop, plen, 1);
12897 /* PLEN == 0: Print assembler code for `insert_bits'.
12898 PLEN != 0: Compute code length in bytes.
12900 OP[0]: Result
12901 OP[1]: The mapping composed of nibbles. If nibble no. N is
12902 0: Bit N of result is copied from bit OP[2].0
12903 ... ...
12904 7: Bit N of result is copied from bit OP[2].7
12905 0xf: Bit N of result is copied from bit OP[3].N
12906 OP[2]: Bits to be inserted
12907 OP[3]: Target value */
12909 const char*
12910 avr_out_insert_bits (rtx *op, int *plen)
12912 unsigned int map = UINTVAL (op[1]) & GET_MODE_MASK (SImode);
12913 unsigned mask_fixed;
12914 bool fixp_p = true;
12915 rtx xop[4];
12917 xop[0] = op[0];
12918 xop[1] = op[2];
12919 xop[2] = op[3];
12921 gcc_assert (REG_P (xop[2]) || CONST_INT_P (xop[2]));
12923 if (plen)
12924 *plen = 0;
12925 else if (flag_print_asm_name)
12926 fprintf (asm_out_file, ASM_COMMENT_START "map = 0x%08x\n", map);
12928 /* If MAP has fixed points it might be better to initialize the result
12929 with the bits to be inserted instead of moving all bits by hand. */
12931 mask_fixed = avr_map_metric (map, MAP_MASK_FIXED_0_7);
12933 if (REGNO (xop[0]) == REGNO (xop[1]))
12935 /* Avoid early-clobber conflicts */
12937 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
12938 xop[1] = tmp_reg_rtx;
12939 fixp_p = false;
12942 if (avr_map_metric (map, MAP_MASK_PREIMAGE_F))
12944 /* XOP[2] is used and reloaded to XOP[0] already */
12946 int n_fix = 0, n_nofix = 0;
12948 gcc_assert (REG_P (xop[2]));
12950 /* Get the code size of the bit insertions; once with all bits
12951 moved and once with fixed points omitted. */
12953 avr_move_bits (xop, map, true, &n_fix);
12954 avr_move_bits (xop, map, false, &n_nofix);
12956 if (fixp_p && n_fix - n_nofix > 3)
12958 xop[3] = gen_int_mode (~mask_fixed, QImode);
12960 avr_asm_len ("eor %0,%1" CR_TAB
12961 "andi %0,%3" CR_TAB
12962 "eor %0,%1", xop, plen, 3);
12963 fixp_p = false;
12966 else
12968 /* XOP[2] is unused */
12970 if (fixp_p && mask_fixed)
12972 avr_asm_len ("mov %0,%1", xop, plen, 1);
12973 fixp_p = false;
12977 /* Move/insert remaining bits. */
12979 avr_move_bits (xop, map, fixp_p, plen);
12981 return "";
12985 /* IDs for all the AVR builtins. */
12987 enum avr_builtin_id
12989 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
12990 AVR_BUILTIN_ ## NAME,
12991 #include "builtins.def"
12992 #undef DEF_BUILTIN
12994 AVR_BUILTIN_COUNT
12997 struct GTY(()) avr_builtin_description
12999 enum insn_code icode;
13000 int n_args;
13001 tree fndecl;
13005 /* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
13006 that a built-in's ID can be used to access the built-in by means of
13007 avr_bdesc[ID] */
13009 static GTY(()) struct avr_builtin_description
13010 avr_bdesc[AVR_BUILTIN_COUNT] =
13012 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, LIBNAME) \
13013 { (enum insn_code) CODE_FOR_ ## ICODE, N_ARGS, NULL_TREE },
13014 #include "builtins.def"
13015 #undef DEF_BUILTIN
13019 /* Implement `TARGET_BUILTIN_DECL'. */
13021 static tree
13022 avr_builtin_decl (unsigned id, bool initialize_p ATTRIBUTE_UNUSED)
13024 if (id < AVR_BUILTIN_COUNT)
13025 return avr_bdesc[id].fndecl;
13027 return error_mark_node;
13031 static void
13032 avr_init_builtin_int24 (void)
13034 tree int24_type = make_signed_type (GET_MODE_BITSIZE (PSImode));
13035 tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
13037 lang_hooks.types.register_builtin_type (int24_type, "__int24");
13038 lang_hooks.types.register_builtin_type (uint24_type, "__uint24");
13042 /* Implement `TARGET_INIT_BUILTINS' */
13043 /* Set up all builtin functions for this target. */
13045 static void
13046 avr_init_builtins (void)
13048 tree void_ftype_void
13049 = build_function_type_list (void_type_node, NULL_TREE);
13050 tree uchar_ftype_uchar
13051 = build_function_type_list (unsigned_char_type_node,
13052 unsigned_char_type_node,
13053 NULL_TREE);
13054 tree uint_ftype_uchar_uchar
13055 = build_function_type_list (unsigned_type_node,
13056 unsigned_char_type_node,
13057 unsigned_char_type_node,
13058 NULL_TREE);
13059 tree int_ftype_char_char
13060 = build_function_type_list (integer_type_node,
13061 char_type_node,
13062 char_type_node,
13063 NULL_TREE);
13064 tree int_ftype_char_uchar
13065 = build_function_type_list (integer_type_node,
13066 char_type_node,
13067 unsigned_char_type_node,
13068 NULL_TREE);
13069 tree void_ftype_ulong
13070 = build_function_type_list (void_type_node,
13071 long_unsigned_type_node,
13072 NULL_TREE);
13074 tree uchar_ftype_ulong_uchar_uchar
13075 = build_function_type_list (unsigned_char_type_node,
13076 long_unsigned_type_node,
13077 unsigned_char_type_node,
13078 unsigned_char_type_node,
13079 NULL_TREE);
13081 tree const_memx_void_node
13082 = build_qualified_type (void_type_node,
13083 TYPE_QUAL_CONST
13084 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX));
13086 tree const_memx_ptr_type_node
13087 = build_pointer_type_for_mode (const_memx_void_node, PSImode, false);
13089 tree char_ftype_const_memx_ptr
13090 = build_function_type_list (char_type_node,
13091 const_memx_ptr_type_node,
13092 NULL);
13094 #define ITYP(T) \
13095 lang_hooks.types.type_for_size (TYPE_PRECISION (T), TYPE_UNSIGNED (T))
13097 #define FX_FTYPE_FX(fx) \
13098 tree fx##r_ftype_##fx##r \
13099 = build_function_type_list (node_##fx##r, node_##fx##r, NULL); \
13100 tree fx##k_ftype_##fx##k \
13101 = build_function_type_list (node_##fx##k, node_##fx##k, NULL)
13103 #define FX_FTYPE_FX_INT(fx) \
13104 tree fx##r_ftype_##fx##r_int \
13105 = build_function_type_list (node_##fx##r, node_##fx##r, \
13106 integer_type_node, NULL); \
13107 tree fx##k_ftype_##fx##k_int \
13108 = build_function_type_list (node_##fx##k, node_##fx##k, \
13109 integer_type_node, NULL)
13111 #define INT_FTYPE_FX(fx) \
13112 tree int_ftype_##fx##r \
13113 = build_function_type_list (integer_type_node, node_##fx##r, NULL); \
13114 tree int_ftype_##fx##k \
13115 = build_function_type_list (integer_type_node, node_##fx##k, NULL)
13117 #define INTX_FTYPE_FX(fx) \
13118 tree int##fx##r_ftype_##fx##r \
13119 = build_function_type_list (ITYP (node_##fx##r), node_##fx##r, NULL); \
13120 tree int##fx##k_ftype_##fx##k \
13121 = build_function_type_list (ITYP (node_##fx##k), node_##fx##k, NULL)
13123 #define FX_FTYPE_INTX(fx) \
13124 tree fx##r_ftype_int##fx##r \
13125 = build_function_type_list (node_##fx##r, ITYP (node_##fx##r), NULL); \
13126 tree fx##k_ftype_int##fx##k \
13127 = build_function_type_list (node_##fx##k, ITYP (node_##fx##k), NULL)
13129 tree node_hr = short_fract_type_node;
13130 tree node_nr = fract_type_node;
13131 tree node_lr = long_fract_type_node;
13132 tree node_llr = long_long_fract_type_node;
13134 tree node_uhr = unsigned_short_fract_type_node;
13135 tree node_unr = unsigned_fract_type_node;
13136 tree node_ulr = unsigned_long_fract_type_node;
13137 tree node_ullr = unsigned_long_long_fract_type_node;
13139 tree node_hk = short_accum_type_node;
13140 tree node_nk = accum_type_node;
13141 tree node_lk = long_accum_type_node;
13142 tree node_llk = long_long_accum_type_node;
13144 tree node_uhk = unsigned_short_accum_type_node;
13145 tree node_unk = unsigned_accum_type_node;
13146 tree node_ulk = unsigned_long_accum_type_node;
13147 tree node_ullk = unsigned_long_long_accum_type_node;
13150 /* For absfx builtins. */
13152 FX_FTYPE_FX (h);
13153 FX_FTYPE_FX (n);
13154 FX_FTYPE_FX (l);
13155 FX_FTYPE_FX (ll);
13157 /* For roundfx builtins. */
13159 FX_FTYPE_FX_INT (h);
13160 FX_FTYPE_FX_INT (n);
13161 FX_FTYPE_FX_INT (l);
13162 FX_FTYPE_FX_INT (ll);
13164 FX_FTYPE_FX_INT (uh);
13165 FX_FTYPE_FX_INT (un);
13166 FX_FTYPE_FX_INT (ul);
13167 FX_FTYPE_FX_INT (ull);
13169 /* For countlsfx builtins. */
13171 INT_FTYPE_FX (h);
13172 INT_FTYPE_FX (n);
13173 INT_FTYPE_FX (l);
13174 INT_FTYPE_FX (ll);
13176 INT_FTYPE_FX (uh);
13177 INT_FTYPE_FX (un);
13178 INT_FTYPE_FX (ul);
13179 INT_FTYPE_FX (ull);
13181 /* For bitsfx builtins. */
13183 INTX_FTYPE_FX (h);
13184 INTX_FTYPE_FX (n);
13185 INTX_FTYPE_FX (l);
13186 INTX_FTYPE_FX (ll);
13188 INTX_FTYPE_FX (uh);
13189 INTX_FTYPE_FX (un);
13190 INTX_FTYPE_FX (ul);
13191 INTX_FTYPE_FX (ull);
13193 /* For fxbits builtins. */
13195 FX_FTYPE_INTX (h);
13196 FX_FTYPE_INTX (n);
13197 FX_FTYPE_INTX (l);
13198 FX_FTYPE_INTX (ll);
13200 FX_FTYPE_INTX (uh);
13201 FX_FTYPE_INTX (un);
13202 FX_FTYPE_INTX (ul);
13203 FX_FTYPE_INTX (ull);
13206 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
13208 int id = AVR_BUILTIN_ ## NAME; \
13209 const char *Name = "__builtin_avr_" #NAME; \
13210 char *name = (char*) alloca (1 + strlen (Name)); \
13212 gcc_assert (id < AVR_BUILTIN_COUNT); \
13213 avr_bdesc[id].fndecl \
13214 = add_builtin_function (avr_tolower (name, Name), TYPE, id, \
13215 BUILT_IN_MD, LIBNAME, NULL_TREE); \
13217 #include "builtins.def"
13218 #undef DEF_BUILTIN
13220 avr_init_builtin_int24 ();
13224 /* Subroutine of avr_expand_builtin to expand vanilla builtins
13225 with non-void result and 1 ... 3 arguments. */
13227 static rtx
13228 avr_default_expand_builtin (enum insn_code icode, tree exp, rtx target)
13230 rtx pat, xop[3];
13231 int n, n_args = call_expr_nargs (exp);
13232 machine_mode tmode = insn_data[icode].operand[0].mode;
13234 gcc_assert (n_args >= 1 && n_args <= 3);
13236 if (target == NULL_RTX
13237 || GET_MODE (target) != tmode
13238 || !insn_data[icode].operand[0].predicate (target, tmode))
13240 target = gen_reg_rtx (tmode);
13243 for (n = 0; n < n_args; n++)
13245 tree arg = CALL_EXPR_ARG (exp, n);
13246 rtx op = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
13247 machine_mode opmode = GET_MODE (op);
13248 machine_mode mode = insn_data[icode].operand[n+1].mode;
13250 if ((opmode == SImode || opmode == VOIDmode) && mode == HImode)
13252 opmode = HImode;
13253 op = gen_lowpart (HImode, op);
13256 /* In case the insn wants input operands in modes different from
13257 the result, abort. */
13259 gcc_assert (opmode == mode || opmode == VOIDmode);
13261 if (!insn_data[icode].operand[n+1].predicate (op, mode))
13262 op = copy_to_mode_reg (mode, op);
13264 xop[n] = op;
13267 switch (n_args)
13269 case 1: pat = GEN_FCN (icode) (target, xop[0]); break;
13270 case 2: pat = GEN_FCN (icode) (target, xop[0], xop[1]); break;
13271 case 3: pat = GEN_FCN (icode) (target, xop[0], xop[1], xop[2]); break;
13273 default:
13274 gcc_unreachable();
13277 if (pat == NULL_RTX)
13278 return NULL_RTX;
13280 emit_insn (pat);
13282 return target;
13286 /* Implement `TARGET_EXPAND_BUILTIN'. */
13287 /* Expand an expression EXP that calls a built-in function,
13288 with result going to TARGET if that's convenient
13289 (and in mode MODE if that's convenient).
13290 SUBTARGET may be used as the target for computing one of EXP's operands.
13291 IGNORE is nonzero if the value is to be ignored. */
13293 static rtx
13294 avr_expand_builtin (tree exp, rtx target,
13295 rtx subtarget ATTRIBUTE_UNUSED,
13296 machine_mode mode ATTRIBUTE_UNUSED,
13297 int ignore)
13299 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
13300 const char *bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
13301 unsigned int id = DECL_FUNCTION_CODE (fndecl);
13302 const struct avr_builtin_description *d = &avr_bdesc[id];
13303 tree arg0;
13304 rtx op0;
13306 gcc_assert (id < AVR_BUILTIN_COUNT);
13308 switch (id)
13310 case AVR_BUILTIN_NOP:
13311 emit_insn (gen_nopv (GEN_INT(1)));
13312 return 0;
13314 case AVR_BUILTIN_DELAY_CYCLES:
13316 arg0 = CALL_EXPR_ARG (exp, 0);
13317 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
13319 if (!CONST_INT_P (op0))
13320 error ("%s expects a compile time integer constant", bname);
13321 else
13322 avr_expand_delay_cycles (op0);
13324 return NULL_RTX;
13327 case AVR_BUILTIN_INSERT_BITS:
13329 arg0 = CALL_EXPR_ARG (exp, 0);
13330 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
13332 if (!CONST_INT_P (op0))
13334 error ("%s expects a compile time long integer constant"
13335 " as first argument", bname);
13336 return target;
13339 break;
13342 case AVR_BUILTIN_ROUNDHR: case AVR_BUILTIN_ROUNDUHR:
13343 case AVR_BUILTIN_ROUNDR: case AVR_BUILTIN_ROUNDUR:
13344 case AVR_BUILTIN_ROUNDLR: case AVR_BUILTIN_ROUNDULR:
13345 case AVR_BUILTIN_ROUNDLLR: case AVR_BUILTIN_ROUNDULLR:
13347 case AVR_BUILTIN_ROUNDHK: case AVR_BUILTIN_ROUNDUHK:
13348 case AVR_BUILTIN_ROUNDK: case AVR_BUILTIN_ROUNDUK:
13349 case AVR_BUILTIN_ROUNDLK: case AVR_BUILTIN_ROUNDULK:
13350 case AVR_BUILTIN_ROUNDLLK: case AVR_BUILTIN_ROUNDULLK:
13352 /* Warn about odd rounding. Rounding points >= FBIT will have
13353 no effect. */
13355 if (TREE_CODE (CALL_EXPR_ARG (exp, 1)) != INTEGER_CST)
13356 break;
13358 int rbit = (int) TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1));
13360 if (rbit >= (int) GET_MODE_FBIT (mode))
13362 warning (OPT_Wextra, "rounding to %d bits has no effect for "
13363 "fixed-point value with %d fractional bits",
13364 rbit, GET_MODE_FBIT (mode));
13366 return expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX, mode,
13367 EXPAND_NORMAL);
13369 else if (rbit <= - (int) GET_MODE_IBIT (mode))
13371 warning (0, "rounding result will always be 0");
13372 return CONST0_RTX (mode);
13375 /* The rounding points RP satisfies now: -IBIT < RP < FBIT.
13377 TR 18037 only specifies results for RP > 0. However, the
13378 remaining cases of -IBIT < RP <= 0 can easily be supported
13379 without any additional overhead. */
13381 break; /* round */
13384 /* No fold found and no insn: Call support function from libgcc. */
13386 if (d->icode == CODE_FOR_nothing
13387 && DECL_ASSEMBLER_NAME (get_callee_fndecl (exp)) != NULL_TREE)
13389 return expand_call (exp, target, ignore);
13392 /* No special treatment needed: vanilla expand. */
13394 gcc_assert (d->icode != CODE_FOR_nothing);
13395 gcc_assert (d->n_args == call_expr_nargs (exp));
13397 if (d->n_args == 0)
13399 emit_insn ((GEN_FCN (d->icode)) (target));
13400 return NULL_RTX;
13403 return avr_default_expand_builtin (d->icode, exp, target);
13407 /* Helper for `avr_fold_builtin' that folds absfx (FIXED_CST). */
13409 static tree
13410 avr_fold_absfx (tree tval)
13412 if (FIXED_CST != TREE_CODE (tval))
13413 return NULL_TREE;
13415 /* Our fixed-points have no padding: Use double_int payload directly. */
13417 FIXED_VALUE_TYPE fval = TREE_FIXED_CST (tval);
13418 unsigned int bits = GET_MODE_BITSIZE (fval.mode);
13419 double_int ival = fval.data.sext (bits);
13421 if (!ival.is_negative())
13422 return tval;
13424 /* ISO/IEC TR 18037, 7.18a.6.2: The absfx functions are saturating. */
13426 fval.data = (ival == double_int::min_value (bits, false).sext (bits))
13427 ? double_int::max_value (bits, false)
13428 : -ival;
13430 return build_fixed (TREE_TYPE (tval), fval);
13434 /* Implement `TARGET_FOLD_BUILTIN'. */
13436 static tree
13437 avr_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *arg,
13438 bool ignore ATTRIBUTE_UNUSED)
13440 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
13441 tree val_type = TREE_TYPE (TREE_TYPE (fndecl));
13443 if (!optimize)
13444 return NULL_TREE;
13446 switch (fcode)
13448 default:
13449 break;
13451 case AVR_BUILTIN_SWAP:
13453 return fold_build2 (LROTATE_EXPR, val_type, arg[0],
13454 build_int_cst (val_type, 4));
13457 case AVR_BUILTIN_ABSHR:
13458 case AVR_BUILTIN_ABSR:
13459 case AVR_BUILTIN_ABSLR:
13460 case AVR_BUILTIN_ABSLLR:
13462 case AVR_BUILTIN_ABSHK:
13463 case AVR_BUILTIN_ABSK:
13464 case AVR_BUILTIN_ABSLK:
13465 case AVR_BUILTIN_ABSLLK:
13466 /* GCC is not good with folding ABS for fixed-point. Do it by hand. */
13468 return avr_fold_absfx (arg[0]);
13470 case AVR_BUILTIN_BITSHR: case AVR_BUILTIN_HRBITS:
13471 case AVR_BUILTIN_BITSHK: case AVR_BUILTIN_HKBITS:
13472 case AVR_BUILTIN_BITSUHR: case AVR_BUILTIN_UHRBITS:
13473 case AVR_BUILTIN_BITSUHK: case AVR_BUILTIN_UHKBITS:
13475 case AVR_BUILTIN_BITSR: case AVR_BUILTIN_RBITS:
13476 case AVR_BUILTIN_BITSK: case AVR_BUILTIN_KBITS:
13477 case AVR_BUILTIN_BITSUR: case AVR_BUILTIN_URBITS:
13478 case AVR_BUILTIN_BITSUK: case AVR_BUILTIN_UKBITS:
13480 case AVR_BUILTIN_BITSLR: case AVR_BUILTIN_LRBITS:
13481 case AVR_BUILTIN_BITSLK: case AVR_BUILTIN_LKBITS:
13482 case AVR_BUILTIN_BITSULR: case AVR_BUILTIN_ULRBITS:
13483 case AVR_BUILTIN_BITSULK: case AVR_BUILTIN_ULKBITS:
13485 case AVR_BUILTIN_BITSLLR: case AVR_BUILTIN_LLRBITS:
13486 case AVR_BUILTIN_BITSLLK: case AVR_BUILTIN_LLKBITS:
13487 case AVR_BUILTIN_BITSULLR: case AVR_BUILTIN_ULLRBITS:
13488 case AVR_BUILTIN_BITSULLK: case AVR_BUILTIN_ULLKBITS:
13490 gcc_assert (TYPE_PRECISION (val_type)
13491 == TYPE_PRECISION (TREE_TYPE (arg[0])));
13493 return build1 (VIEW_CONVERT_EXPR, val_type, arg[0]);
13495 case AVR_BUILTIN_INSERT_BITS:
13497 tree tbits = arg[1];
13498 tree tval = arg[2];
13499 tree tmap;
13500 tree map_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
13501 unsigned int map;
13502 bool changed = false;
13503 unsigned i;
13504 avr_map_op_t best_g;
13506 if (TREE_CODE (arg[0]) != INTEGER_CST)
13508 /* No constant as first argument: Don't fold this and run into
13509 error in avr_expand_builtin. */
13511 break;
13514 tmap = wide_int_to_tree (map_type, arg[0]);
13515 map = TREE_INT_CST_LOW (tmap);
13517 if (TREE_CODE (tval) != INTEGER_CST
13518 && 0 == avr_map_metric (map, MAP_MASK_PREIMAGE_F))
13520 /* There are no F in the map, i.e. 3rd operand is unused.
13521 Replace that argument with some constant to render
13522 respective input unused. */
13524 tval = build_int_cst (val_type, 0);
13525 changed = true;
13528 if (TREE_CODE (tbits) != INTEGER_CST
13529 && 0 == avr_map_metric (map, MAP_PREIMAGE_0_7))
13531 /* Similar for the bits to be inserted. If they are unused,
13532 we can just as well pass 0. */
13534 tbits = build_int_cst (val_type, 0);
13537 if (TREE_CODE (tbits) == INTEGER_CST)
13539 /* Inserting bits known at compile time is easy and can be
13540 performed by AND and OR with appropriate masks. */
13542 int bits = TREE_INT_CST_LOW (tbits);
13543 int mask_ior = 0, mask_and = 0xff;
13545 for (i = 0; i < 8; i++)
13547 int mi = avr_map (map, i);
13549 if (mi < 8)
13551 if (bits & (1 << mi)) mask_ior |= (1 << i);
13552 else mask_and &= ~(1 << i);
13556 tval = fold_build2 (BIT_IOR_EXPR, val_type, tval,
13557 build_int_cst (val_type, mask_ior));
13558 return fold_build2 (BIT_AND_EXPR, val_type, tval,
13559 build_int_cst (val_type, mask_and));
13562 if (changed)
13563 return build_call_expr (fndecl, 3, tmap, tbits, tval);
13565 /* If bits don't change their position we can use vanilla logic
13566 to merge the two arguments. */
13568 if (0 == avr_map_metric (map, MAP_NONFIXED_0_7))
13570 int mask_f = avr_map_metric (map, MAP_MASK_PREIMAGE_F);
13571 tree tres, tmask = build_int_cst (val_type, mask_f ^ 0xff);
13573 tres = fold_build2 (BIT_XOR_EXPR, val_type, tbits, tval);
13574 tres = fold_build2 (BIT_AND_EXPR, val_type, tres, tmask);
13575 return fold_build2 (BIT_XOR_EXPR, val_type, tres, tval);
13578 /* Try to decomposing map to reduce overall cost. */
13580 if (avr_log.builtin)
13581 avr_edump ("\n%?: %x\n%?: ROL cost: ", map);
13583 best_g = avr_map_op[0];
13584 best_g.cost = 1000;
13586 for (i = 0; i < sizeof (avr_map_op) / sizeof (*avr_map_op); i++)
13588 avr_map_op_t g
13589 = avr_map_decompose (map, avr_map_op + i,
13590 TREE_CODE (tval) == INTEGER_CST);
13592 if (g.cost >= 0 && g.cost < best_g.cost)
13593 best_g = g;
13596 if (avr_log.builtin)
13597 avr_edump ("\n");
13599 if (best_g.arg == 0)
13600 /* No optimization found */
13601 break;
13603 /* Apply operation G to the 2nd argument. */
13605 if (avr_log.builtin)
13606 avr_edump ("%?: using OP(%s%d, %x) cost %d\n",
13607 best_g.str, best_g.arg, best_g.map, best_g.cost);
13609 /* Do right-shifts arithmetically: They copy the MSB instead of
13610 shifting in a non-usable value (0) as with logic right-shift. */
13612 tbits = fold_convert (signed_char_type_node, tbits);
13613 tbits = fold_build2 (best_g.code, signed_char_type_node, tbits,
13614 build_int_cst (val_type, best_g.arg));
13615 tbits = fold_convert (val_type, tbits);
13617 /* Use map o G^-1 instead of original map to undo the effect of G. */
13619 tmap = wide_int_to_tree (map_type, best_g.map);
13621 return build_call_expr (fndecl, 3, tmap, tbits, tval);
13622 } /* AVR_BUILTIN_INSERT_BITS */
13625 return NULL_TREE;
13630 /* Initialize the GCC target structure. */
13632 #undef TARGET_ASM_ALIGNED_HI_OP
13633 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
13634 #undef TARGET_ASM_ALIGNED_SI_OP
13635 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
13636 #undef TARGET_ASM_UNALIGNED_HI_OP
13637 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
13638 #undef TARGET_ASM_UNALIGNED_SI_OP
13639 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
13640 #undef TARGET_ASM_INTEGER
13641 #define TARGET_ASM_INTEGER avr_assemble_integer
13642 #undef TARGET_ASM_FILE_START
13643 #define TARGET_ASM_FILE_START avr_file_start
13644 #undef TARGET_ASM_FILE_END
13645 #define TARGET_ASM_FILE_END avr_file_end
13647 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
13648 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
13649 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
13650 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
13652 #undef TARGET_FUNCTION_VALUE
13653 #define TARGET_FUNCTION_VALUE avr_function_value
13654 #undef TARGET_LIBCALL_VALUE
13655 #define TARGET_LIBCALL_VALUE avr_libcall_value
13656 #undef TARGET_FUNCTION_VALUE_REGNO_P
13657 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
13659 #undef TARGET_ATTRIBUTE_TABLE
13660 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
13661 #undef TARGET_INSERT_ATTRIBUTES
13662 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
13663 #undef TARGET_SECTION_TYPE_FLAGS
13664 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
13666 #undef TARGET_ASM_NAMED_SECTION
13667 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
13668 #undef TARGET_ASM_INIT_SECTIONS
13669 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
13670 #undef TARGET_ENCODE_SECTION_INFO
13671 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
13672 #undef TARGET_ASM_SELECT_SECTION
13673 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
13675 #undef TARGET_REGISTER_MOVE_COST
13676 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
13677 #undef TARGET_MEMORY_MOVE_COST
13678 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
13679 #undef TARGET_RTX_COSTS
13680 #define TARGET_RTX_COSTS avr_rtx_costs
13681 #undef TARGET_ADDRESS_COST
13682 #define TARGET_ADDRESS_COST avr_address_cost
13683 #undef TARGET_MACHINE_DEPENDENT_REORG
13684 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
13685 #undef TARGET_FUNCTION_ARG
13686 #define TARGET_FUNCTION_ARG avr_function_arg
13687 #undef TARGET_FUNCTION_ARG_ADVANCE
13688 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
13690 #undef TARGET_SET_CURRENT_FUNCTION
13691 #define TARGET_SET_CURRENT_FUNCTION avr_set_current_function
13693 #undef TARGET_RETURN_IN_MEMORY
13694 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
13696 #undef TARGET_STRICT_ARGUMENT_NAMING
13697 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
13699 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
13700 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
13702 #undef TARGET_CONDITIONAL_REGISTER_USAGE
13703 #define TARGET_CONDITIONAL_REGISTER_USAGE avr_conditional_register_usage
13705 #undef TARGET_HARD_REGNO_SCRATCH_OK
13706 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
13707 #undef TARGET_CASE_VALUES_THRESHOLD
13708 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
13710 #undef TARGET_FRAME_POINTER_REQUIRED
13711 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
13712 #undef TARGET_CAN_ELIMINATE
13713 #define TARGET_CAN_ELIMINATE avr_can_eliminate
13715 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
13716 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS avr_allocate_stack_slots_for_args
13718 #undef TARGET_WARN_FUNC_RETURN
13719 #define TARGET_WARN_FUNC_RETURN avr_warn_func_return
13721 #undef TARGET_CLASS_LIKELY_SPILLED_P
13722 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
13724 #undef TARGET_OPTION_OVERRIDE
13725 #define TARGET_OPTION_OVERRIDE avr_option_override
13727 #undef TARGET_CANNOT_MODIFY_JUMPS_P
13728 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
13730 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
13731 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
13733 #undef TARGET_INIT_BUILTINS
13734 #define TARGET_INIT_BUILTINS avr_init_builtins
13736 #undef TARGET_BUILTIN_DECL
13737 #define TARGET_BUILTIN_DECL avr_builtin_decl
13739 #undef TARGET_EXPAND_BUILTIN
13740 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
13742 #undef TARGET_FOLD_BUILTIN
13743 #define TARGET_FOLD_BUILTIN avr_fold_builtin
13745 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
13746 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
13748 #undef TARGET_SCALAR_MODE_SUPPORTED_P
13749 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
13751 #undef TARGET_BUILD_BUILTIN_VA_LIST
13752 #define TARGET_BUILD_BUILTIN_VA_LIST avr_build_builtin_va_list
13754 #undef TARGET_FIXED_POINT_SUPPORTED_P
13755 #define TARGET_FIXED_POINT_SUPPORTED_P hook_bool_void_true
13757 #undef TARGET_CONVERT_TO_TYPE
13758 #define TARGET_CONVERT_TO_TYPE avr_convert_to_type
13760 #undef TARGET_ADDR_SPACE_SUBSET_P
13761 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
13763 #undef TARGET_ADDR_SPACE_CONVERT
13764 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
13766 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
13767 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
13769 #undef TARGET_ADDR_SPACE_POINTER_MODE
13770 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
13772 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
13773 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
13774 avr_addr_space_legitimate_address_p
13776 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
13777 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
13779 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
13780 #define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
13782 #undef TARGET_SECONDARY_RELOAD
13783 #define TARGET_SECONDARY_RELOAD avr_secondary_reload
13785 #undef TARGET_PRINT_OPERAND
13786 #define TARGET_PRINT_OPERAND avr_print_operand
13787 #undef TARGET_PRINT_OPERAND_ADDRESS
13788 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
13789 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
13790 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
13792 struct gcc_target targetm = TARGET_INITIALIZER;
13795 #include "gt-avr.h"