Implement TARGET_IRA_CHANGE_PSEUDO_ALLOCNO_CLASS hook.
[official-gcc.git] / gcc / config / avr / avr.c
blobef65d222420adf53b374a1bf8d433069da6c4a61
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998-2015 Free Software Foundation, Inc.
3 Contributed by Denis Chertykov (chertykov@gmail.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "regs.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-attr.h"
31 #include "insn-codes.h"
32 #include "flags.h"
33 #include "reload.h"
34 #include "input.h"
35 #include "alias.h"
36 #include "symtab.h"
37 #include "tree.h"
38 #include "fold-const.h"
39 #include "varasm.h"
40 #include "print-tree.h"
41 #include "calls.h"
42 #include "stor-layout.h"
43 #include "stringpool.h"
44 #include "output.h"
45 #include "function.h"
46 #include "expmed.h"
47 #include "dojump.h"
48 #include "explow.h"
49 #include "emit-rtl.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "c-family/c-common.h"
53 #include "diagnostic-core.h"
54 #include "obstack.h"
55 #include "recog.h"
56 #include "optabs.h"
57 #include "langhooks.h"
58 #include "tm_p.h"
59 #include "target.h"
60 #include "target-def.h"
61 #include "params.h"
62 #include "dominance.h"
63 #include "cfg.h"
64 #include "cfgrtl.h"
65 #include "cfganal.h"
66 #include "lcm.h"
67 #include "cfgbuild.h"
68 #include "cfgcleanup.h"
69 #include "predict.h"
70 #include "basic-block.h"
71 #include "df.h"
72 #include "builtins.h"
73 #include "context.h"
74 #include "tree-pass.h"
76 /* Maximal allowed offset for an address in the LD command */
77 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
79 /* Return true if STR starts with PREFIX and false, otherwise. */
80 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
82 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
83 address space where data is to be located.
84 As the only non-generic address spaces are all located in flash,
85 this can be used to test if data shall go into some .progmem* section.
86 This must be the rightmost field of machine dependent section flags. */
87 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
89 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
90 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
92 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
93 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
94 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
95 do { \
96 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
97 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
98 } while (0)
100 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
101 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
102 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
103 / SYMBOL_FLAG_MACH_DEP)
105 #define TINY_ADIW(REG1, REG2, I) \
106 "subi " #REG1 ",lo8(-(" #I "))" CR_TAB \
107 "sbci " #REG2 ",hi8(-(" #I "))"
109 #define TINY_SBIW(REG1, REG2, I) \
110 "subi " #REG1 ",lo8((" #I "))" CR_TAB \
111 "sbci " #REG2 ",hi8((" #I "))"
113 #define AVR_TMP_REGNO (AVR_TINY ? TMP_REGNO_TINY : TMP_REGNO)
114 #define AVR_ZERO_REGNO (AVR_TINY ? ZERO_REGNO_TINY : ZERO_REGNO)
116 /* Known address spaces. The order must be the same as in the respective
117 enum from avr.h (or designated initialized must be used). */
118 const avr_addrspace_t avr_addrspace[ADDR_SPACE_COUNT] =
120 { ADDR_SPACE_RAM, 0, 2, "", 0, NULL },
121 { ADDR_SPACE_FLASH, 1, 2, "__flash", 0, ".progmem.data" },
122 { ADDR_SPACE_FLASH1, 1, 2, "__flash1", 1, ".progmem1.data" },
123 { ADDR_SPACE_FLASH2, 1, 2, "__flash2", 2, ".progmem2.data" },
124 { ADDR_SPACE_FLASH3, 1, 2, "__flash3", 3, ".progmem3.data" },
125 { ADDR_SPACE_FLASH4, 1, 2, "__flash4", 4, ".progmem4.data" },
126 { ADDR_SPACE_FLASH5, 1, 2, "__flash5", 5, ".progmem5.data" },
127 { ADDR_SPACE_MEMX, 1, 3, "__memx", 0, ".progmemx.data" },
131 /* Holding RAM addresses of some SFRs used by the compiler and that
132 are unique over all devices in an architecture like 'avr4'. */
134 typedef struct
136 /* SREG: The processor status */
137 int sreg;
139 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
140 int ccp;
141 int rampd;
142 int rampx;
143 int rampy;
145 /* RAMPZ: The high byte of 24-bit address used with ELPM */
146 int rampz;
148 /* SP: The stack pointer and its low and high byte */
149 int sp_l;
150 int sp_h;
151 } avr_addr_t;
153 static avr_addr_t avr_addr;
156 /* Prototypes for local helper functions. */
158 static const char* out_movqi_r_mr (rtx_insn *, rtx[], int*);
159 static const char* out_movhi_r_mr (rtx_insn *, rtx[], int*);
160 static const char* out_movsi_r_mr (rtx_insn *, rtx[], int*);
161 static const char* out_movqi_mr_r (rtx_insn *, rtx[], int*);
162 static const char* out_movhi_mr_r (rtx_insn *, rtx[], int*);
163 static const char* out_movsi_mr_r (rtx_insn *, rtx[], int*);
165 static int get_sequence_length (rtx_insn *insns);
166 static int sequent_regs_live (void);
167 static const char *ptrreg_to_str (int);
168 static const char *cond_string (enum rtx_code);
169 static int avr_num_arg_regs (machine_mode, const_tree);
170 static int avr_operand_rtx_cost (rtx, machine_mode, enum rtx_code,
171 int, bool);
172 static void output_reload_in_const (rtx*, rtx, int*, bool);
173 static struct machine_function * avr_init_machine_status (void);
176 /* Prototypes for hook implementors if needed before their implementation. */
178 static bool avr_rtx_costs (rtx, int, int, int, int*, bool);
181 /* Allocate registers from r25 to r8 for parameters for function calls. */
182 #define FIRST_CUM_REG 26
184 /* Last call saved register */
185 #define LAST_CALLEE_SAVED_REG (AVR_TINY ? 19 : 17)
187 /* Implicit target register of LPM instruction (R0) */
188 extern GTY(()) rtx lpm_reg_rtx;
189 rtx lpm_reg_rtx;
191 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
192 extern GTY(()) rtx lpm_addr_reg_rtx;
193 rtx lpm_addr_reg_rtx;
195 /* Temporary register RTX (reg:QI TMP_REGNO) */
196 extern GTY(()) rtx tmp_reg_rtx;
197 rtx tmp_reg_rtx;
199 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
200 extern GTY(()) rtx zero_reg_rtx;
201 rtx zero_reg_rtx;
203 /* RTXs for all general purpose registers as QImode */
204 extern GTY(()) rtx all_regs_rtx[32];
205 rtx all_regs_rtx[32];
207 /* SREG, the processor status */
208 extern GTY(()) rtx sreg_rtx;
209 rtx sreg_rtx;
211 /* RAMP* special function registers */
212 extern GTY(()) rtx rampd_rtx;
213 extern GTY(()) rtx rampx_rtx;
214 extern GTY(()) rtx rampy_rtx;
215 extern GTY(()) rtx rampz_rtx;
216 rtx rampd_rtx;
217 rtx rampx_rtx;
218 rtx rampy_rtx;
219 rtx rampz_rtx;
221 /* RTX containing the strings "" and "e", respectively */
222 static GTY(()) rtx xstring_empty;
223 static GTY(()) rtx xstring_e;
225 /* Current architecture. */
226 const avr_arch_t *avr_arch;
228 /* Section to put switch tables in. */
229 static GTY(()) section *progmem_swtable_section;
231 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
232 or to address space __flash* or __memx. Only used as singletons inside
233 avr_asm_select_section, but it must not be local there because of GTY. */
234 static GTY(()) section *progmem_section[ADDR_SPACE_COUNT];
236 /* Condition for insns/expanders from avr-dimode.md. */
237 bool avr_have_dimode = true;
239 /* To track if code will use .bss and/or .data. */
240 bool avr_need_clear_bss_p = false;
241 bool avr_need_copy_data_p = false;
244 /* Transform UP into lowercase and write the result to LO.
245 You must provide enough space for LO. Return LO. */
247 static char*
248 avr_tolower (char *lo, const char *up)
250 char *lo0 = lo;
252 for (; *up; up++, lo++)
253 *lo = TOLOWER (*up);
255 *lo = '\0';
257 return lo0;
261 /* Custom function to count number of set bits. */
263 static inline int
264 avr_popcount (unsigned int val)
266 int pop = 0;
268 while (val)
270 val &= val-1;
271 pop++;
274 return pop;
278 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
279 Return true if the least significant N_BYTES bytes of XVAL all have a
280 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
281 of integers which contains an integer N iff bit N of POP_MASK is set. */
283 bool
284 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
286 int i;
288 machine_mode mode = GET_MODE (xval);
290 if (VOIDmode == mode)
291 mode = SImode;
293 for (i = 0; i < n_bytes; i++)
295 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
296 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
298 if (0 == (pop_mask & (1 << avr_popcount (val8))))
299 return false;
302 return true;
306 /* Access some RTX as INT_MODE. If X is a CONST_FIXED we can get
307 the bit representation of X by "casting" it to CONST_INT. */
310 avr_to_int_mode (rtx x)
312 machine_mode mode = GET_MODE (x);
314 return VOIDmode == mode
316 : simplify_gen_subreg (int_mode_for_mode (mode), x, mode, 0);
320 static const pass_data avr_pass_data_recompute_notes =
322 RTL_PASS, // type
323 "", // name (will be patched)
324 OPTGROUP_NONE, // optinfo_flags
325 TV_DF_SCAN, // tv_id
326 0, // properties_required
327 0, // properties_provided
328 0, // properties_destroyed
329 0, // todo_flags_start
330 TODO_df_finish | TODO_df_verify // todo_flags_finish
334 class avr_pass_recompute_notes : public rtl_opt_pass
336 public:
337 avr_pass_recompute_notes (gcc::context *ctxt, const char *name)
338 : rtl_opt_pass (avr_pass_data_recompute_notes, ctxt)
340 this->name = name;
343 virtual unsigned int execute (function*)
345 df_note_add_problem ();
346 df_analyze ();
348 return 0;
350 }; // avr_pass_recompute_notes
353 static void
354 avr_register_passes (void)
356 /* This avr-specific pass (re)computes insn notes, in particular REG_DEAD
357 notes which are used by `avr.c::reg_unused_after' and branch offset
358 computations. These notes must be correct, i.e. there must be no
359 dangling REG_DEAD notes; otherwise wrong code might result, cf. PR64331.
361 DF needs (correct) CFG, hence right before free_cfg is the last
362 opportunity to rectify notes. */
364 register_pass (new avr_pass_recompute_notes (g, "avr-notes-free-cfg"),
365 PASS_POS_INSERT_BEFORE, "*free_cfg", 1);
369 /* Set `avr_arch' as specified by `-mmcu='.
370 Return true on success. */
372 static bool
373 avr_set_core_architecture (void)
375 /* Search for mcu core architecture. */
377 if (!avr_mmcu)
378 avr_mmcu = AVR_MMCU_DEFAULT;
380 avr_arch = &avr_arch_types[0];
382 for (const avr_mcu_t *mcu = avr_mcu_types; ; mcu++)
384 if (NULL == mcu->name)
386 /* Reached the end of `avr_mcu_types'. This should actually never
387 happen as options are provided by device-specs. It could be a
388 typo in a device-specs or calling the compiler proper directly
389 with -mmcu=<device>. */
391 error ("unknown core architecture %qs specified with %qs",
392 avr_mmcu, "-mmcu=");
393 avr_inform_core_architectures ();
394 break;
396 else if (0 == strcmp (mcu->name, avr_mmcu)
397 // Is this a proper architecture ?
398 && NULL == mcu->macro)
400 avr_arch = &avr_arch_types[mcu->arch_id];
401 if (avr_n_flash < 0)
402 avr_n_flash = mcu->n_flash;
404 return true;
408 return false;
412 /* Implement `TARGET_OPTION_OVERRIDE'. */
414 static void
415 avr_option_override (void)
417 /* Disable -fdelete-null-pointer-checks option for AVR target.
418 This option compiler assumes that dereferencing of a null pointer
419 would halt the program. For AVR this assumption is not true and
420 programs can safely dereference null pointers. Changes made by this
421 option may not work properly for AVR. So disable this option. */
423 flag_delete_null_pointer_checks = 0;
425 /* caller-save.c looks for call-clobbered hard registers that are assigned
426 to pseudos that cross calls and tries so save-restore them around calls
427 in order to reduce the number of stack slots needed.
429 This might lead to situations where reload is no more able to cope
430 with the challenge of AVR's very few address registers and fails to
431 perform the requested spills. */
433 if (avr_strict_X)
434 flag_caller_saves = 0;
436 /* Unwind tables currently require a frame pointer for correctness,
437 see toplev.c:process_options(). */
439 if ((flag_unwind_tables
440 || flag_non_call_exceptions
441 || flag_asynchronous_unwind_tables)
442 && !ACCUMULATE_OUTGOING_ARGS)
444 flag_omit_frame_pointer = 0;
447 if (flag_pic == 1)
448 warning (OPT_fpic, "-fpic is not supported");
449 if (flag_pic == 2)
450 warning (OPT_fPIC, "-fPIC is not supported");
451 if (flag_pie == 1)
452 warning (OPT_fpie, "-fpie is not supported");
453 if (flag_pie == 2)
454 warning (OPT_fPIE, "-fPIE is not supported");
456 if (!avr_set_core_architecture())
457 return;
459 /* RAM addresses of some SFRs common to all devices in respective arch. */
461 /* SREG: Status Register containing flags like I (global IRQ) */
462 avr_addr.sreg = 0x3F + avr_arch->sfr_offset;
464 /* RAMPZ: Address' high part when loading via ELPM */
465 avr_addr.rampz = 0x3B + avr_arch->sfr_offset;
467 avr_addr.rampy = 0x3A + avr_arch->sfr_offset;
468 avr_addr.rampx = 0x39 + avr_arch->sfr_offset;
469 avr_addr.rampd = 0x38 + avr_arch->sfr_offset;
470 avr_addr.ccp = (AVR_TINY ? 0x3C : 0x34) + avr_arch->sfr_offset;
472 /* SP: Stack Pointer (SP_H:SP_L) */
473 avr_addr.sp_l = 0x3D + avr_arch->sfr_offset;
474 avr_addr.sp_h = avr_addr.sp_l + 1;
476 init_machine_status = avr_init_machine_status;
478 avr_log_set_avr_log();
480 /* Register some avr-specific pass(es). There is no canonical place for
481 pass registration. This function is convenient. */
483 avr_register_passes ();
486 /* Function to set up the backend function structure. */
488 static struct machine_function *
489 avr_init_machine_status (void)
491 return ggc_cleared_alloc<machine_function> ();
495 /* Implement `INIT_EXPANDERS'. */
496 /* The function works like a singleton. */
498 void
499 avr_init_expanders (void)
501 int regno;
503 for (regno = 0; regno < 32; regno ++)
504 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
506 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
507 tmp_reg_rtx = all_regs_rtx[AVR_TMP_REGNO];
508 zero_reg_rtx = all_regs_rtx[AVR_ZERO_REGNO];
510 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
512 sreg_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.sreg));
513 rampd_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampd));
514 rampx_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampx));
515 rampy_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampy));
516 rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampz));
518 xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
519 xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
521 /* TINY core does not have regs r10-r16, but avr-dimode.md expects them
522 to be present */
523 if (AVR_TINY)
524 avr_have_dimode = false;
528 /* Implement `REGNO_REG_CLASS'. */
529 /* Return register class for register R. */
531 enum reg_class
532 avr_regno_reg_class (int r)
534 static const enum reg_class reg_class_tab[] =
536 R0_REG,
537 /* r1 - r15 */
538 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
539 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
540 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
541 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
542 /* r16 - r23 */
543 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
544 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
545 /* r24, r25 */
546 ADDW_REGS, ADDW_REGS,
547 /* X: r26, 27 */
548 POINTER_X_REGS, POINTER_X_REGS,
549 /* Y: r28, r29 */
550 POINTER_Y_REGS, POINTER_Y_REGS,
551 /* Z: r30, r31 */
552 POINTER_Z_REGS, POINTER_Z_REGS,
553 /* SP: SPL, SPH */
554 STACK_REG, STACK_REG
557 if (r <= 33)
558 return reg_class_tab[r];
560 return ALL_REGS;
564 /* Implement `TARGET_SCALAR_MODE_SUPPORTED_P'. */
566 static bool
567 avr_scalar_mode_supported_p (machine_mode mode)
569 if (ALL_FIXED_POINT_MODE_P (mode))
570 return true;
572 if (PSImode == mode)
573 return true;
575 return default_scalar_mode_supported_p (mode);
579 /* Return TRUE if DECL is a VAR_DECL located in flash and FALSE, otherwise. */
581 static bool
582 avr_decl_flash_p (tree decl)
584 if (TREE_CODE (decl) != VAR_DECL
585 || TREE_TYPE (decl) == error_mark_node)
587 return false;
590 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
594 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit flash
595 address space and FALSE, otherwise. */
597 static bool
598 avr_decl_memx_p (tree decl)
600 if (TREE_CODE (decl) != VAR_DECL
601 || TREE_TYPE (decl) == error_mark_node)
603 return false;
606 return (ADDR_SPACE_MEMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
610 /* Return TRUE if X is a MEM rtx located in flash and FALSE, otherwise. */
612 bool
613 avr_mem_flash_p (rtx x)
615 return (MEM_P (x)
616 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
620 /* Return TRUE if X is a MEM rtx located in the 24-bit flash
621 address space and FALSE, otherwise. */
623 bool
624 avr_mem_memx_p (rtx x)
626 return (MEM_P (x)
627 && ADDR_SPACE_MEMX == MEM_ADDR_SPACE (x));
631 /* A helper for the subsequent function attribute used to dig for
632 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
634 static inline int
635 avr_lookup_function_attribute1 (const_tree func, const char *name)
637 if (FUNCTION_DECL == TREE_CODE (func))
639 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
641 return true;
644 func = TREE_TYPE (func);
647 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
648 || TREE_CODE (func) == METHOD_TYPE);
650 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
653 /* Return nonzero if FUNC is a naked function. */
655 static int
656 avr_naked_function_p (tree func)
658 return avr_lookup_function_attribute1 (func, "naked");
661 /* Return nonzero if FUNC is an interrupt function as specified
662 by the "interrupt" attribute. */
664 static int
665 avr_interrupt_function_p (tree func)
667 return avr_lookup_function_attribute1 (func, "interrupt");
670 /* Return nonzero if FUNC is a signal function as specified
671 by the "signal" attribute. */
673 static int
674 avr_signal_function_p (tree func)
676 return avr_lookup_function_attribute1 (func, "signal");
679 /* Return nonzero if FUNC is an OS_task function. */
681 static int
682 avr_OS_task_function_p (tree func)
684 return avr_lookup_function_attribute1 (func, "OS_task");
687 /* Return nonzero if FUNC is an OS_main function. */
689 static int
690 avr_OS_main_function_p (tree func)
692 return avr_lookup_function_attribute1 (func, "OS_main");
696 /* Implement `TARGET_SET_CURRENT_FUNCTION'. */
697 /* Sanity cheching for above function attributes. */
699 static void
700 avr_set_current_function (tree decl)
702 location_t loc;
703 const char *isr;
705 if (decl == NULL_TREE
706 || current_function_decl == NULL_TREE
707 || current_function_decl == error_mark_node
708 || ! cfun->machine
709 || cfun->machine->attributes_checked_p)
710 return;
712 loc = DECL_SOURCE_LOCATION (decl);
714 cfun->machine->is_naked = avr_naked_function_p (decl);
715 cfun->machine->is_signal = avr_signal_function_p (decl);
716 cfun->machine->is_interrupt = avr_interrupt_function_p (decl);
717 cfun->machine->is_OS_task = avr_OS_task_function_p (decl);
718 cfun->machine->is_OS_main = avr_OS_main_function_p (decl);
720 isr = cfun->machine->is_interrupt ? "interrupt" : "signal";
722 /* Too much attributes make no sense as they request conflicting features. */
724 if (cfun->machine->is_OS_task + cfun->machine->is_OS_main
725 + (cfun->machine->is_signal || cfun->machine->is_interrupt) > 1)
726 error_at (loc, "function attributes %qs, %qs and %qs are mutually"
727 " exclusive", "OS_task", "OS_main", isr);
729 /* 'naked' will hide effects of 'OS_task' and 'OS_main'. */
731 if (cfun->machine->is_naked
732 && (cfun->machine->is_OS_task || cfun->machine->is_OS_main))
733 warning_at (loc, OPT_Wattributes, "function attributes %qs and %qs have"
734 " no effect on %qs function", "OS_task", "OS_main", "naked");
736 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
738 tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
739 tree ret = TREE_TYPE (TREE_TYPE (decl));
740 const char *name;
742 name = DECL_ASSEMBLER_NAME_SET_P (decl)
743 ? IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))
744 : IDENTIFIER_POINTER (DECL_NAME (decl));
746 /* Skip a leading '*' that might still prefix the assembler name,
747 e.g. in non-LTO runs. */
749 name = default_strip_name_encoding (name);
751 /* Silently ignore 'signal' if 'interrupt' is present. AVR-LibC startet
752 using this when it switched from SIGNAL and INTERRUPT to ISR. */
754 if (cfun->machine->is_interrupt)
755 cfun->machine->is_signal = 0;
757 /* Interrupt handlers must be void __vector (void) functions. */
759 if (args && TREE_CODE (TREE_VALUE (args)) != VOID_TYPE)
760 error_at (loc, "%qs function cannot have arguments", isr);
762 if (TREE_CODE (ret) != VOID_TYPE)
763 error_at (loc, "%qs function cannot return a value", isr);
765 /* If the function has the 'signal' or 'interrupt' attribute, ensure
766 that the name of the function is "__vector_NN" so as to catch
767 when the user misspells the vector name. */
769 if (!STR_PREFIX_P (name, "__vector"))
770 warning_at (loc, 0, "%qs appears to be a misspelled %s handler",
771 name, isr);
774 /* Don't print the above diagnostics more than once. */
776 cfun->machine->attributes_checked_p = 1;
780 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
783 avr_accumulate_outgoing_args (void)
785 if (!cfun)
786 return TARGET_ACCUMULATE_OUTGOING_ARGS;
788 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
789 what offset is correct. In some cases it is relative to
790 virtual_outgoing_args_rtx and in others it is relative to
791 virtual_stack_vars_rtx. For example code see
792 gcc.c-torture/execute/built-in-setjmp.c
793 gcc.c-torture/execute/builtins/sprintf-chk.c */
795 return (TARGET_ACCUMULATE_OUTGOING_ARGS
796 && !(cfun->calls_setjmp
797 || cfun->has_nonlocal_label));
801 /* Report contribution of accumulated outgoing arguments to stack size. */
803 static inline int
804 avr_outgoing_args_size (void)
806 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
810 /* Implement `STARTING_FRAME_OFFSET'. */
811 /* This is the offset from the frame pointer register to the first stack slot
812 that contains a variable living in the frame. */
815 avr_starting_frame_offset (void)
817 return 1 + avr_outgoing_args_size ();
821 /* Return the number of hard registers to push/pop in the prologue/epilogue
822 of the current function, and optionally store these registers in SET. */
824 static int
825 avr_regs_to_save (HARD_REG_SET *set)
827 int reg, count;
828 int int_or_sig_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
830 if (set)
831 CLEAR_HARD_REG_SET (*set);
832 count = 0;
834 /* No need to save any registers if the function never returns or
835 has the "OS_task" or "OS_main" attribute. */
837 if (TREE_THIS_VOLATILE (current_function_decl)
838 || cfun->machine->is_OS_task
839 || cfun->machine->is_OS_main)
840 return 0;
842 for (reg = 0; reg < 32; reg++)
844 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
845 any global register variables. */
847 if (fixed_regs[reg])
848 continue;
850 if ((int_or_sig_p && !crtl->is_leaf && call_used_regs[reg])
851 || (df_regs_ever_live_p (reg)
852 && (int_or_sig_p || !call_used_regs[reg])
853 /* Don't record frame pointer registers here. They are treated
854 indivitually in prologue. */
855 && !(frame_pointer_needed
856 && (reg == REG_Y || reg == (REG_Y+1)))))
858 if (set)
859 SET_HARD_REG_BIT (*set, reg);
860 count++;
863 return count;
867 /* Implement `TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS' */
869 static bool
870 avr_allocate_stack_slots_for_args (void)
872 return !cfun->machine->is_naked;
876 /* Return true if register FROM can be eliminated via register TO. */
878 static bool
879 avr_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
881 return ((frame_pointer_needed && to == FRAME_POINTER_REGNUM)
882 || !frame_pointer_needed);
886 /* Implement `TARGET_WARN_FUNC_RETURN'. */
888 static bool
889 avr_warn_func_return (tree decl)
891 /* Naked functions are implemented entirely in assembly, including the
892 return sequence, so suppress warnings about this. */
894 return !avr_naked_function_p (decl);
897 /* Compute offset between arg_pointer and frame_pointer. */
900 avr_initial_elimination_offset (int from, int to)
902 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
903 return 0;
904 else
906 int offset = frame_pointer_needed ? 2 : 0;
907 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
909 offset += avr_regs_to_save (NULL);
910 return (get_frame_size () + avr_outgoing_args_size()
911 + avr_pc_size + 1 + offset);
916 /* Helper for the function below. */
918 static void
919 avr_adjust_type_node (tree *node, machine_mode mode, int sat_p)
921 *node = make_node (FIXED_POINT_TYPE);
922 TYPE_SATURATING (*node) = sat_p;
923 TYPE_UNSIGNED (*node) = UNSIGNED_FIXED_POINT_MODE_P (mode);
924 TYPE_IBIT (*node) = GET_MODE_IBIT (mode);
925 TYPE_FBIT (*node) = GET_MODE_FBIT (mode);
926 TYPE_PRECISION (*node) = GET_MODE_BITSIZE (mode);
927 TYPE_ALIGN (*node) = 8;
928 SET_TYPE_MODE (*node, mode);
930 layout_type (*node);
934 /* Implement `TARGET_BUILD_BUILTIN_VA_LIST'. */
936 static tree
937 avr_build_builtin_va_list (void)
939 /* avr-modes.def adjusts [U]TA to be 64-bit modes with 48 fractional bits.
940 This is more appropriate for the 8-bit machine AVR than 128-bit modes.
941 The ADJUST_IBIT/FBIT are handled in toplev:init_adjust_machine_modes()
942 which is auto-generated by genmodes, but the compiler assigns [U]DAmode
943 to the long long accum modes instead of the desired [U]TAmode.
945 Fix this now, right after node setup in tree.c:build_common_tree_nodes().
946 This must run before c-cppbuiltin.c:builtin_define_fixed_point_constants()
947 which built-in defines macros like __ULLACCUM_FBIT__ that are used by
948 libgcc to detect IBIT and FBIT. */
950 avr_adjust_type_node (&ta_type_node, TAmode, 0);
951 avr_adjust_type_node (&uta_type_node, UTAmode, 0);
952 avr_adjust_type_node (&sat_ta_type_node, TAmode, 1);
953 avr_adjust_type_node (&sat_uta_type_node, UTAmode, 1);
955 unsigned_long_long_accum_type_node = uta_type_node;
956 long_long_accum_type_node = ta_type_node;
957 sat_unsigned_long_long_accum_type_node = sat_uta_type_node;
958 sat_long_long_accum_type_node = sat_ta_type_node;
960 /* Dispatch to the default handler. */
962 return std_build_builtin_va_list ();
966 /* Implement `TARGET_BUILTIN_SETJMP_FRAME_VALUE'. */
967 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
968 frame pointer by +STARTING_FRAME_OFFSET.
969 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
970 avoids creating add/sub of offset in nonlocal goto and setjmp. */
972 static rtx
973 avr_builtin_setjmp_frame_value (void)
975 rtx xval = gen_reg_rtx (Pmode);
976 emit_insn (gen_subhi3 (xval, virtual_stack_vars_rtx,
977 gen_int_mode (STARTING_FRAME_OFFSET, Pmode)));
978 return xval;
982 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3-byte PC).
983 This is return address of function. */
986 avr_return_addr_rtx (int count, rtx tem)
988 rtx r;
990 /* Can only return this function's return address. Others not supported. */
991 if (count)
992 return NULL;
994 if (AVR_3_BYTE_PC)
996 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
997 warning (0, "%<builtin_return_address%> contains only 2 bytes"
998 " of address");
1000 else
1001 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
1003 r = gen_rtx_PLUS (Pmode, tem, r);
1004 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
1005 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
1006 return r;
1009 /* Return 1 if the function epilogue is just a single "ret". */
1012 avr_simple_epilogue (void)
1014 return (! frame_pointer_needed
1015 && get_frame_size () == 0
1016 && avr_outgoing_args_size() == 0
1017 && avr_regs_to_save (NULL) == 0
1018 && ! cfun->machine->is_interrupt
1019 && ! cfun->machine->is_signal
1020 && ! cfun->machine->is_naked
1021 && ! TREE_THIS_VOLATILE (current_function_decl));
1024 /* This function checks sequence of live registers. */
1026 static int
1027 sequent_regs_live (void)
1029 int reg;
1030 int live_seq = 0;
1031 int cur_seq = 0;
1033 for (reg = 0; reg <= LAST_CALLEE_SAVED_REG; ++reg)
1035 if (fixed_regs[reg])
1037 /* Don't recognize sequences that contain global register
1038 variables. */
1040 if (live_seq != 0)
1041 return 0;
1042 else
1043 continue;
1046 if (!call_used_regs[reg])
1048 if (df_regs_ever_live_p (reg))
1050 ++live_seq;
1051 ++cur_seq;
1053 else
1054 cur_seq = 0;
1058 if (!frame_pointer_needed)
1060 if (df_regs_ever_live_p (REG_Y))
1062 ++live_seq;
1063 ++cur_seq;
1065 else
1066 cur_seq = 0;
1068 if (df_regs_ever_live_p (REG_Y+1))
1070 ++live_seq;
1071 ++cur_seq;
1073 else
1074 cur_seq = 0;
1076 else
1078 cur_seq += 2;
1079 live_seq += 2;
1081 return (cur_seq == live_seq) ? live_seq : 0;
1084 /* Obtain the length sequence of insns. */
1087 get_sequence_length (rtx_insn *insns)
1089 rtx_insn *insn;
1090 int length;
1092 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
1093 length += get_attr_length (insn);
1095 return length;
1099 /* Implement `INCOMING_RETURN_ADDR_RTX'. */
1102 avr_incoming_return_addr_rtx (void)
1104 /* The return address is at the top of the stack. Note that the push
1105 was via post-decrement, which means the actual address is off by one. */
1106 return gen_frame_mem (HImode, plus_constant (Pmode, stack_pointer_rtx, 1));
1109 /* Helper for expand_prologue. Emit a push of a byte register. */
1111 static void
1112 emit_push_byte (unsigned regno, bool frame_related_p)
1114 rtx mem, reg;
1115 rtx_insn *insn;
1117 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
1118 mem = gen_frame_mem (QImode, mem);
1119 reg = gen_rtx_REG (QImode, regno);
1121 insn = emit_insn (gen_rtx_SET (mem, reg));
1122 if (frame_related_p)
1123 RTX_FRAME_RELATED_P (insn) = 1;
1125 cfun->machine->stack_usage++;
1129 /* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
1130 SFR is a MEM representing the memory location of the SFR.
1131 If CLR_P then clear the SFR after the push using zero_reg. */
1133 static void
1134 emit_push_sfr (rtx sfr, bool frame_related_p, bool clr_p)
1136 rtx_insn *insn;
1138 gcc_assert (MEM_P (sfr));
1140 /* IN __tmp_reg__, IO(SFR) */
1141 insn = emit_move_insn (tmp_reg_rtx, sfr);
1142 if (frame_related_p)
1143 RTX_FRAME_RELATED_P (insn) = 1;
1145 /* PUSH __tmp_reg__ */
1146 emit_push_byte (AVR_TMP_REGNO, frame_related_p);
1148 if (clr_p)
1150 /* OUT IO(SFR), __zero_reg__ */
1151 insn = emit_move_insn (sfr, const0_rtx);
1152 if (frame_related_p)
1153 RTX_FRAME_RELATED_P (insn) = 1;
1157 static void
1158 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
1160 rtx_insn *insn;
1161 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1162 int live_seq = sequent_regs_live ();
1164 HOST_WIDE_INT size_max
1165 = (HOST_WIDE_INT) GET_MODE_MASK (AVR_HAVE_8BIT_SP ? QImode : Pmode);
1167 bool minimize = (TARGET_CALL_PROLOGUES
1168 && size < size_max
1169 && live_seq
1170 && !isr_p
1171 && !cfun->machine->is_OS_task
1172 && !cfun->machine->is_OS_main
1173 && !AVR_TINY);
1175 if (minimize
1176 && (frame_pointer_needed
1177 || avr_outgoing_args_size() > 8
1178 || (AVR_2_BYTE_PC && live_seq > 6)
1179 || live_seq > 7))
1181 rtx pattern;
1182 int first_reg, reg, offset;
1184 emit_move_insn (gen_rtx_REG (HImode, REG_X),
1185 gen_int_mode (size, HImode));
1187 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
1188 gen_int_mode (live_seq+size, HImode));
1189 insn = emit_insn (pattern);
1190 RTX_FRAME_RELATED_P (insn) = 1;
1192 /* Describe the effect of the unspec_volatile call to prologue_saves.
1193 Note that this formulation assumes that add_reg_note pushes the
1194 notes to the front. Thus we build them in the reverse order of
1195 how we want dwarf2out to process them. */
1197 /* The function does always set frame_pointer_rtx, but whether that
1198 is going to be permanent in the function is frame_pointer_needed. */
1200 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1201 gen_rtx_SET ((frame_pointer_needed
1202 ? frame_pointer_rtx
1203 : stack_pointer_rtx),
1204 plus_constant (Pmode, stack_pointer_rtx,
1205 -(size + live_seq))));
1207 /* Note that live_seq always contains r28+r29, but the other
1208 registers to be saved are all below 18. */
1210 first_reg = (LAST_CALLEE_SAVED_REG + 1) - (live_seq - 2);
1212 for (reg = 29, offset = -live_seq + 1;
1213 reg >= first_reg;
1214 reg = (reg == 28 ? LAST_CALLEE_SAVED_REG : reg - 1), ++offset)
1216 rtx m, r;
1218 m = gen_rtx_MEM (QImode, plus_constant (Pmode, stack_pointer_rtx,
1219 offset));
1220 r = gen_rtx_REG (QImode, reg);
1221 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (m, r));
1224 cfun->machine->stack_usage += size + live_seq;
1226 else /* !minimize */
1228 int reg;
1230 for (reg = 0; reg < 32; ++reg)
1231 if (TEST_HARD_REG_BIT (set, reg))
1232 emit_push_byte (reg, true);
1234 if (frame_pointer_needed
1235 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
1237 /* Push frame pointer. Always be consistent about the
1238 ordering of pushes -- epilogue_restores expects the
1239 register pair to be pushed low byte first. */
1241 emit_push_byte (REG_Y, true);
1242 emit_push_byte (REG_Y + 1, true);
1245 if (frame_pointer_needed
1246 && size == 0)
1248 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1249 RTX_FRAME_RELATED_P (insn) = 1;
1252 if (size != 0)
1254 /* Creating a frame can be done by direct manipulation of the
1255 stack or via the frame pointer. These two methods are:
1256 fp = sp
1257 fp -= size
1258 sp = fp
1260 sp -= size
1261 fp = sp (*)
1262 the optimum method depends on function type, stack and
1263 frame size. To avoid a complex logic, both methods are
1264 tested and shortest is selected.
1266 There is also the case where SIZE != 0 and no frame pointer is
1267 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1268 In that case, insn (*) is not needed in that case.
1269 We use the X register as scratch. This is save because in X
1270 is call-clobbered.
1271 In an interrupt routine, the case of SIZE != 0 together with
1272 !frame_pointer_needed can only occur if the function is not a
1273 leaf function and thus X has already been saved. */
1275 int irq_state = -1;
1276 HOST_WIDE_INT size_cfa = size, neg_size;
1277 rtx_insn *fp_plus_insns;
1278 rtx fp, my_fp;
1280 gcc_assert (frame_pointer_needed
1281 || !isr_p
1282 || !crtl->is_leaf);
1284 fp = my_fp = (frame_pointer_needed
1285 ? frame_pointer_rtx
1286 : gen_rtx_REG (Pmode, REG_X));
1288 if (AVR_HAVE_8BIT_SP)
1290 /* The high byte (r29) does not change:
1291 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
1293 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1296 /* Cut down size and avoid size = 0 so that we don't run
1297 into ICE like PR52488 in the remainder. */
1299 if (size > size_max)
1301 /* Don't error so that insane code from newlib still compiles
1302 and does not break building newlib. As PR51345 is implemented
1303 now, there are multilib variants with -msp8.
1305 If user wants sanity checks he can use -Wstack-usage=
1306 or similar options.
1308 For CFA we emit the original, non-saturated size so that
1309 the generic machinery is aware of the real stack usage and
1310 will print the above diagnostic as expected. */
1312 size = size_max;
1315 size = trunc_int_for_mode (size, GET_MODE (my_fp));
1316 neg_size = trunc_int_for_mode (-size, GET_MODE (my_fp));
1318 /************ Method 1: Adjust frame pointer ************/
1320 start_sequence ();
1322 /* Normally, the dwarf2out frame-related-expr interpreter does
1323 not expect to have the CFA change once the frame pointer is
1324 set up. Thus, we avoid marking the move insn below and
1325 instead indicate that the entire operation is complete after
1326 the frame pointer subtraction is done. */
1328 insn = emit_move_insn (fp, stack_pointer_rtx);
1329 if (frame_pointer_needed)
1331 RTX_FRAME_RELATED_P (insn) = 1;
1332 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1333 gen_rtx_SET (fp, stack_pointer_rtx));
1336 insn = emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp),
1337 my_fp, neg_size));
1339 if (frame_pointer_needed)
1341 RTX_FRAME_RELATED_P (insn) = 1;
1342 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1343 gen_rtx_SET (fp, plus_constant (Pmode, fp,
1344 -size_cfa)));
1347 /* Copy to stack pointer. Note that since we've already
1348 changed the CFA to the frame pointer this operation
1349 need not be annotated if frame pointer is needed.
1350 Always move through unspec, see PR50063.
1351 For meaning of irq_state see movhi_sp_r insn. */
1353 if (cfun->machine->is_interrupt)
1354 irq_state = 1;
1356 if (TARGET_NO_INTERRUPTS
1357 || cfun->machine->is_signal
1358 || cfun->machine->is_OS_main)
1359 irq_state = 0;
1361 if (AVR_HAVE_8BIT_SP)
1362 irq_state = 2;
1364 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1365 fp, GEN_INT (irq_state)));
1366 if (!frame_pointer_needed)
1368 RTX_FRAME_RELATED_P (insn) = 1;
1369 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1370 gen_rtx_SET (stack_pointer_rtx,
1371 plus_constant (Pmode,
1372 stack_pointer_rtx,
1373 -size_cfa)));
1376 fp_plus_insns = get_insns ();
1377 end_sequence ();
1379 /************ Method 2: Adjust Stack pointer ************/
1381 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1382 can only handle specific offsets. */
1384 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1386 rtx_insn *sp_plus_insns;
1388 start_sequence ();
1390 insn = emit_move_insn (stack_pointer_rtx,
1391 plus_constant (Pmode, stack_pointer_rtx,
1392 -size));
1393 RTX_FRAME_RELATED_P (insn) = 1;
1394 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1395 gen_rtx_SET (stack_pointer_rtx,
1396 plus_constant (Pmode,
1397 stack_pointer_rtx,
1398 -size_cfa)));
1399 if (frame_pointer_needed)
1401 insn = emit_move_insn (fp, stack_pointer_rtx);
1402 RTX_FRAME_RELATED_P (insn) = 1;
1405 sp_plus_insns = get_insns ();
1406 end_sequence ();
1408 /************ Use shortest method ************/
1410 emit_insn (get_sequence_length (sp_plus_insns)
1411 < get_sequence_length (fp_plus_insns)
1412 ? sp_plus_insns
1413 : fp_plus_insns);
1415 else
1417 emit_insn (fp_plus_insns);
1420 cfun->machine->stack_usage += size_cfa;
1421 } /* !minimize && size != 0 */
1422 } /* !minimize */
1426 /* Output function prologue. */
1428 void
1429 avr_expand_prologue (void)
1431 HARD_REG_SET set;
1432 HOST_WIDE_INT size;
1434 size = get_frame_size() + avr_outgoing_args_size();
1436 cfun->machine->stack_usage = 0;
1438 /* Prologue: naked. */
1439 if (cfun->machine->is_naked)
1441 return;
1444 avr_regs_to_save (&set);
1446 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1448 /* Enable interrupts. */
1449 if (cfun->machine->is_interrupt)
1450 emit_insn (gen_enable_interrupt ());
1452 /* Push zero reg. */
1453 emit_push_byte (AVR_ZERO_REGNO, true);
1455 /* Push tmp reg. */
1456 emit_push_byte (AVR_TMP_REGNO, true);
1458 /* Push SREG. */
1459 /* ??? There's no dwarf2 column reserved for SREG. */
1460 emit_push_sfr (sreg_rtx, false, false /* clr */);
1462 /* Clear zero reg. */
1463 emit_move_insn (zero_reg_rtx, const0_rtx);
1465 /* Prevent any attempt to delete the setting of ZERO_REG! */
1466 emit_use (zero_reg_rtx);
1468 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1469 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1471 if (AVR_HAVE_RAMPD)
1472 emit_push_sfr (rampd_rtx, false /* frame-related */, true /* clr */);
1474 if (AVR_HAVE_RAMPX
1475 && TEST_HARD_REG_BIT (set, REG_X)
1476 && TEST_HARD_REG_BIT (set, REG_X + 1))
1478 emit_push_sfr (rampx_rtx, false /* frame-related */, true /* clr */);
1481 if (AVR_HAVE_RAMPY
1482 && (frame_pointer_needed
1483 || (TEST_HARD_REG_BIT (set, REG_Y)
1484 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1486 emit_push_sfr (rampy_rtx, false /* frame-related */, true /* clr */);
1489 if (AVR_HAVE_RAMPZ
1490 && TEST_HARD_REG_BIT (set, REG_Z)
1491 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1493 emit_push_sfr (rampz_rtx, false /* frame-related */, AVR_HAVE_RAMPD);
1495 } /* is_interrupt is_signal */
1497 avr_prologue_setup_frame (size, set);
1499 if (flag_stack_usage_info)
1500 current_function_static_stack_size = cfun->machine->stack_usage;
1504 /* Implement `TARGET_ASM_FUNCTION_END_PROLOGUE'. */
1505 /* Output summary at end of function prologue. */
1507 static void
1508 avr_asm_function_end_prologue (FILE *file)
1510 if (cfun->machine->is_naked)
1512 fputs ("/* prologue: naked */\n", file);
1514 else
1516 if (cfun->machine->is_interrupt)
1518 fputs ("/* prologue: Interrupt */\n", file);
1520 else if (cfun->machine->is_signal)
1522 fputs ("/* prologue: Signal */\n", file);
1524 else
1525 fputs ("/* prologue: function */\n", file);
1528 if (ACCUMULATE_OUTGOING_ARGS)
1529 fprintf (file, "/* outgoing args size = %d */\n",
1530 avr_outgoing_args_size());
1532 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1533 get_frame_size());
1534 fprintf (file, "/* stack size = %d */\n",
1535 cfun->machine->stack_usage);
1536 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1537 usage for offset so that SP + .L__stack_offset = return address. */
1538 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1542 /* Implement `EPILOGUE_USES'. */
1545 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1547 if (reload_completed
1548 && cfun->machine
1549 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1550 return 1;
1551 return 0;
1554 /* Helper for avr_expand_epilogue. Emit a pop of a byte register. */
1556 static void
1557 emit_pop_byte (unsigned regno)
1559 rtx mem, reg;
1561 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1562 mem = gen_frame_mem (QImode, mem);
1563 reg = gen_rtx_REG (QImode, regno);
1565 emit_insn (gen_rtx_SET (reg, mem));
1568 /* Output RTL epilogue. */
1570 void
1571 avr_expand_epilogue (bool sibcall_p)
1573 int reg;
1574 int live_seq;
1575 HARD_REG_SET set;
1576 int minimize;
1577 HOST_WIDE_INT size;
1578 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1580 size = get_frame_size() + avr_outgoing_args_size();
1582 /* epilogue: naked */
1583 if (cfun->machine->is_naked)
1585 gcc_assert (!sibcall_p);
1587 emit_jump_insn (gen_return ());
1588 return;
1591 avr_regs_to_save (&set);
1592 live_seq = sequent_regs_live ();
1594 minimize = (TARGET_CALL_PROLOGUES
1595 && live_seq
1596 && !isr_p
1597 && !cfun->machine->is_OS_task
1598 && !cfun->machine->is_OS_main
1599 && !AVR_TINY);
1601 if (minimize
1602 && (live_seq > 4
1603 || frame_pointer_needed
1604 || size))
1606 /* Get rid of frame. */
1608 if (!frame_pointer_needed)
1610 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1613 if (size)
1615 emit_move_insn (frame_pointer_rtx,
1616 plus_constant (Pmode, frame_pointer_rtx, size));
1619 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1620 return;
1623 if (size)
1625 /* Try two methods to adjust stack and select shortest. */
1627 int irq_state = -1;
1628 rtx fp, my_fp;
1629 rtx_insn *fp_plus_insns;
1630 HOST_WIDE_INT size_max;
1632 gcc_assert (frame_pointer_needed
1633 || !isr_p
1634 || !crtl->is_leaf);
1636 fp = my_fp = (frame_pointer_needed
1637 ? frame_pointer_rtx
1638 : gen_rtx_REG (Pmode, REG_X));
1640 if (AVR_HAVE_8BIT_SP)
1642 /* The high byte (r29) does not change:
1643 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1645 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1648 /* For rationale see comment in prologue generation. */
1650 size_max = (HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (my_fp));
1651 if (size > size_max)
1652 size = size_max;
1653 size = trunc_int_for_mode (size, GET_MODE (my_fp));
1655 /********** Method 1: Adjust fp register **********/
1657 start_sequence ();
1659 if (!frame_pointer_needed)
1660 emit_move_insn (fp, stack_pointer_rtx);
1662 emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp), my_fp, size));
1664 /* Copy to stack pointer. */
1666 if (TARGET_NO_INTERRUPTS)
1667 irq_state = 0;
1669 if (AVR_HAVE_8BIT_SP)
1670 irq_state = 2;
1672 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp,
1673 GEN_INT (irq_state)));
1675 fp_plus_insns = get_insns ();
1676 end_sequence ();
1678 /********** Method 2: Adjust Stack pointer **********/
1680 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1682 rtx_insn *sp_plus_insns;
1684 start_sequence ();
1686 emit_move_insn (stack_pointer_rtx,
1687 plus_constant (Pmode, stack_pointer_rtx, size));
1689 sp_plus_insns = get_insns ();
1690 end_sequence ();
1692 /************ Use shortest method ************/
1694 emit_insn (get_sequence_length (sp_plus_insns)
1695 < get_sequence_length (fp_plus_insns)
1696 ? sp_plus_insns
1697 : fp_plus_insns);
1699 else
1700 emit_insn (fp_plus_insns);
1701 } /* size != 0 */
1703 if (frame_pointer_needed
1704 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1706 /* Restore previous frame_pointer. See avr_expand_prologue for
1707 rationale for not using pophi. */
1709 emit_pop_byte (REG_Y + 1);
1710 emit_pop_byte (REG_Y);
1713 /* Restore used registers. */
1715 for (reg = 31; reg >= 0; --reg)
1716 if (TEST_HARD_REG_BIT (set, reg))
1717 emit_pop_byte (reg);
1719 if (isr_p)
1721 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1722 The conditions to restore them must be tha same as in prologue. */
1724 if (AVR_HAVE_RAMPZ
1725 && TEST_HARD_REG_BIT (set, REG_Z)
1726 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1728 emit_pop_byte (TMP_REGNO);
1729 emit_move_insn (rampz_rtx, tmp_reg_rtx);
1732 if (AVR_HAVE_RAMPY
1733 && (frame_pointer_needed
1734 || (TEST_HARD_REG_BIT (set, REG_Y)
1735 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1737 emit_pop_byte (TMP_REGNO);
1738 emit_move_insn (rampy_rtx, tmp_reg_rtx);
1741 if (AVR_HAVE_RAMPX
1742 && TEST_HARD_REG_BIT (set, REG_X)
1743 && TEST_HARD_REG_BIT (set, REG_X + 1))
1745 emit_pop_byte (TMP_REGNO);
1746 emit_move_insn (rampx_rtx, tmp_reg_rtx);
1749 if (AVR_HAVE_RAMPD)
1751 emit_pop_byte (TMP_REGNO);
1752 emit_move_insn (rampd_rtx, tmp_reg_rtx);
1755 /* Restore SREG using tmp_reg as scratch. */
1757 emit_pop_byte (AVR_TMP_REGNO);
1758 emit_move_insn (sreg_rtx, tmp_reg_rtx);
1760 /* Restore tmp REG. */
1761 emit_pop_byte (AVR_TMP_REGNO);
1763 /* Restore zero REG. */
1764 emit_pop_byte (AVR_ZERO_REGNO);
1767 if (!sibcall_p)
1768 emit_jump_insn (gen_return ());
1772 /* Implement `TARGET_ASM_FUNCTION_BEGIN_EPILOGUE'. */
1774 static void
1775 avr_asm_function_begin_epilogue (FILE *file)
1777 fprintf (file, "/* epilogue start */\n");
1781 /* Implement `TARGET_CANNOT_MODITY_JUMPS_P'. */
1783 static bool
1784 avr_cannot_modify_jumps_p (void)
1787 /* Naked Functions must not have any instructions after
1788 their epilogue, see PR42240 */
1790 if (reload_completed
1791 && cfun->machine
1792 && cfun->machine->is_naked)
1794 return true;
1797 return false;
1801 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
1803 static bool
1804 avr_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED, addr_space_t as)
1806 /* FIXME: Non-generic addresses are not mode-dependent in themselves.
1807 This hook just serves to hack around PR rtl-optimization/52543 by
1808 claiming that non-generic addresses were mode-dependent so that
1809 lower-subreg.c will skip these addresses. lower-subreg.c sets up fake
1810 RTXes to probe SET and MEM costs and assumes that MEM is always in the
1811 generic address space which is not true. */
1813 return !ADDR_SPACE_GENERIC_P (as);
1817 /* Helper function for `avr_legitimate_address_p'. */
1819 static inline bool
1820 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
1821 RTX_CODE outer_code, bool strict)
1823 return (REG_P (reg)
1824 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1825 as, outer_code, UNKNOWN)
1826 || (!strict
1827 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1831 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1832 machine for a memory operand of mode MODE. */
1834 static bool
1835 avr_legitimate_address_p (machine_mode mode, rtx x, bool strict)
1837 bool ok = CONSTANT_ADDRESS_P (x);
1839 switch (GET_CODE (x))
1841 case REG:
1842 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1843 MEM, strict);
1845 if (strict
1846 && GET_MODE_SIZE (mode) > 4
1847 && REG_X == REGNO (x))
1849 ok = false;
1851 break;
1853 case POST_INC:
1854 case PRE_DEC:
1855 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1856 GET_CODE (x), strict);
1857 break;
1859 case PLUS:
1861 rtx reg = XEXP (x, 0);
1862 rtx op1 = XEXP (x, 1);
1864 if (REG_P (reg)
1865 && CONST_INT_P (op1)
1866 && INTVAL (op1) >= 0)
1868 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1870 if (fit)
1872 ok = (! strict
1873 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1874 PLUS, strict));
1876 if (reg == frame_pointer_rtx
1877 || reg == arg_pointer_rtx)
1879 ok = true;
1882 else if (frame_pointer_needed
1883 && reg == frame_pointer_rtx)
1885 ok = true;
1889 break;
1891 default:
1892 break;
1895 if (AVR_TINY
1896 && CONSTANT_ADDRESS_P (x))
1898 /* avrtiny's load / store instructions only cover addresses 0..0xbf:
1899 IN / OUT range is 0..0x3f and LDS / STS can access 0x40..0xbf. */
1901 ok = (CONST_INT_P (x)
1902 && IN_RANGE (INTVAL (x), 0, 0xc0 - GET_MODE_SIZE (mode)));
1905 if (avr_log.legitimate_address_p)
1907 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1908 "reload_completed=%d reload_in_progress=%d %s:",
1909 ok, mode, strict, reload_completed, reload_in_progress,
1910 reg_renumber ? "(reg_renumber)" : "");
1912 if (GET_CODE (x) == PLUS
1913 && REG_P (XEXP (x, 0))
1914 && CONST_INT_P (XEXP (x, 1))
1915 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1916 && reg_renumber)
1918 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1919 true_regnum (XEXP (x, 0)));
1922 avr_edump ("\n%r\n", x);
1925 return ok;
1929 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1930 now only a helper for avr_addr_space_legitimize_address. */
1931 /* Attempts to replace X with a valid
1932 memory address for an operand of mode MODE */
1934 static rtx
1935 avr_legitimize_address (rtx x, rtx oldx, machine_mode mode)
1937 bool big_offset_p = false;
1939 x = oldx;
1941 if (GET_CODE (oldx) == PLUS
1942 && REG_P (XEXP (oldx, 0)))
1944 if (REG_P (XEXP (oldx, 1)))
1945 x = force_reg (GET_MODE (oldx), oldx);
1946 else if (CONST_INT_P (XEXP (oldx, 1)))
1948 int offs = INTVAL (XEXP (oldx, 1));
1949 if (frame_pointer_rtx != XEXP (oldx, 0)
1950 && offs > MAX_LD_OFFSET (mode))
1952 big_offset_p = true;
1953 x = force_reg (GET_MODE (oldx), oldx);
1958 if (avr_log.legitimize_address)
1960 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1962 if (x != oldx)
1963 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1966 return x;
1970 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1971 /* This will allow register R26/27 to be used where it is no worse than normal
1972 base pointers R28/29 or R30/31. For example, if base offset is greater
1973 than 63 bytes or for R++ or --R addressing. */
1976 avr_legitimize_reload_address (rtx *px, machine_mode mode,
1977 int opnum, int type, int addr_type,
1978 int ind_levels ATTRIBUTE_UNUSED,
1979 rtx (*mk_memloc)(rtx,int))
1981 rtx x = *px;
1983 if (avr_log.legitimize_reload_address)
1984 avr_edump ("\n%?:%m %r\n", mode, x);
1986 if (1 && (GET_CODE (x) == POST_INC
1987 || GET_CODE (x) == PRE_DEC))
1989 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1990 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1991 opnum, RELOAD_OTHER);
1993 if (avr_log.legitimize_reload_address)
1994 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1995 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1997 return x;
2000 if (GET_CODE (x) == PLUS
2001 && REG_P (XEXP (x, 0))
2002 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
2003 && CONST_INT_P (XEXP (x, 1))
2004 && INTVAL (XEXP (x, 1)) >= 1)
2006 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
2008 if (fit)
2010 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
2012 int regno = REGNO (XEXP (x, 0));
2013 rtx mem = mk_memloc (x, regno);
2015 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
2016 POINTER_REGS, Pmode, VOIDmode, 0, 0,
2017 1, (enum reload_type) addr_type);
2019 if (avr_log.legitimize_reload_address)
2020 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
2021 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
2023 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
2024 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
2025 opnum, (enum reload_type) type);
2027 if (avr_log.legitimize_reload_address)
2028 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
2029 BASE_POINTER_REGS, mem, NULL_RTX);
2031 return x;
2034 else if (! (frame_pointer_needed
2035 && XEXP (x, 0) == frame_pointer_rtx))
2037 push_reload (x, NULL_RTX, px, NULL,
2038 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
2039 opnum, (enum reload_type) type);
2041 if (avr_log.legitimize_reload_address)
2042 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
2043 POINTER_REGS, x, NULL_RTX);
2045 return x;
2049 return NULL_RTX;
2053 /* Implement `TARGET_SECONDARY_RELOAD' */
2055 static reg_class_t
2056 avr_secondary_reload (bool in_p, rtx x,
2057 reg_class_t reload_class ATTRIBUTE_UNUSED,
2058 machine_mode mode, secondary_reload_info *sri)
2060 if (in_p
2061 && MEM_P (x)
2062 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
2063 && ADDR_SPACE_MEMX != MEM_ADDR_SPACE (x))
2065 /* For the non-generic 16-bit spaces we need a d-class scratch. */
2067 switch (mode)
2069 default:
2070 gcc_unreachable();
2072 case QImode: sri->icode = CODE_FOR_reload_inqi; break;
2073 case QQmode: sri->icode = CODE_FOR_reload_inqq; break;
2074 case UQQmode: sri->icode = CODE_FOR_reload_inuqq; break;
2076 case HImode: sri->icode = CODE_FOR_reload_inhi; break;
2077 case HQmode: sri->icode = CODE_FOR_reload_inhq; break;
2078 case HAmode: sri->icode = CODE_FOR_reload_inha; break;
2079 case UHQmode: sri->icode = CODE_FOR_reload_inuhq; break;
2080 case UHAmode: sri->icode = CODE_FOR_reload_inuha; break;
2082 case PSImode: sri->icode = CODE_FOR_reload_inpsi; break;
2084 case SImode: sri->icode = CODE_FOR_reload_insi; break;
2085 case SFmode: sri->icode = CODE_FOR_reload_insf; break;
2086 case SQmode: sri->icode = CODE_FOR_reload_insq; break;
2087 case SAmode: sri->icode = CODE_FOR_reload_insa; break;
2088 case USQmode: sri->icode = CODE_FOR_reload_inusq; break;
2089 case USAmode: sri->icode = CODE_FOR_reload_inusa; break;
2093 return NO_REGS;
2097 /* Helper function to print assembler resp. track instruction
2098 sequence lengths. Always return "".
2100 If PLEN == NULL:
2101 Output assembler code from template TPL with operands supplied
2102 by OPERANDS. This is just forwarding to output_asm_insn.
2104 If PLEN != NULL:
2105 If N_WORDS >= 0 Add N_WORDS to *PLEN.
2106 If N_WORDS < 0 Set *PLEN to -N_WORDS.
2107 Don't output anything.
2110 static const char*
2111 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
2113 if (NULL == plen)
2115 output_asm_insn (tpl, operands);
2117 else
2119 if (n_words < 0)
2120 *plen = -n_words;
2121 else
2122 *plen += n_words;
2125 return "";
2129 /* Return a pointer register name as a string. */
2131 static const char*
2132 ptrreg_to_str (int regno)
2134 switch (regno)
2136 case REG_X: return "X";
2137 case REG_Y: return "Y";
2138 case REG_Z: return "Z";
2139 default:
2140 output_operand_lossage ("address operand requires constraint for"
2141 " X, Y, or Z register");
2143 return NULL;
2146 /* Return the condition name as a string.
2147 Used in conditional jump constructing */
2149 static const char*
2150 cond_string (enum rtx_code code)
2152 switch (code)
2154 case NE:
2155 return "ne";
2156 case EQ:
2157 return "eq";
2158 case GE:
2159 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2160 return "pl";
2161 else
2162 return "ge";
2163 case LT:
2164 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2165 return "mi";
2166 else
2167 return "lt";
2168 case GEU:
2169 return "sh";
2170 case LTU:
2171 return "lo";
2172 default:
2173 gcc_unreachable ();
2176 return "";
2180 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
2181 /* Output ADDR to FILE as address. */
2183 static void
2184 avr_print_operand_address (FILE *file, rtx addr)
2186 switch (GET_CODE (addr))
2188 case REG:
2189 fprintf (file, ptrreg_to_str (REGNO (addr)));
2190 break;
2192 case PRE_DEC:
2193 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2194 break;
2196 case POST_INC:
2197 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2198 break;
2200 default:
2201 if (CONSTANT_ADDRESS_P (addr)
2202 && text_segment_operand (addr, VOIDmode))
2204 rtx x = addr;
2205 if (GET_CODE (x) == CONST)
2206 x = XEXP (x, 0);
2207 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
2209 /* Assembler gs() will implant word address. Make offset
2210 a byte offset inside gs() for assembler. This is
2211 needed because the more logical (constant+gs(sym)) is not
2212 accepted by gas. For 128K and smaller devices this is ok.
2213 For large devices it will create a trampoline to offset
2214 from symbol which may not be what the user really wanted. */
2216 fprintf (file, "gs(");
2217 output_addr_const (file, XEXP (x,0));
2218 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
2219 2 * INTVAL (XEXP (x, 1)));
2220 if (AVR_3_BYTE_PC)
2221 if (warning (0, "pointer offset from symbol maybe incorrect"))
2223 output_addr_const (stderr, addr);
2224 fprintf(stderr,"\n");
2227 else
2229 fprintf (file, "gs(");
2230 output_addr_const (file, addr);
2231 fprintf (file, ")");
2234 else
2235 output_addr_const (file, addr);
2240 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
2242 static bool
2243 avr_print_operand_punct_valid_p (unsigned char code)
2245 return code == '~' || code == '!';
2249 /* Implement `TARGET_PRINT_OPERAND'. */
2250 /* Output X as assembler operand to file FILE.
2251 For a description of supported %-codes, see top of avr.md. */
2253 static void
2254 avr_print_operand (FILE *file, rtx x, int code)
2256 int abcd = 0, ef = 0, ij = 0;
2258 if (code >= 'A' && code <= 'D')
2259 abcd = code - 'A';
2260 else if (code == 'E' || code == 'F')
2261 ef = code - 'E';
2262 else if (code == 'I' || code == 'J')
2263 ij = code - 'I';
2265 if (code == '~')
2267 if (!AVR_HAVE_JMP_CALL)
2268 fputc ('r', file);
2270 else if (code == '!')
2272 if (AVR_HAVE_EIJMP_EICALL)
2273 fputc ('e', file);
2275 else if (code == 't'
2276 || code == 'T')
2278 static int t_regno = -1;
2279 static int t_nbits = -1;
2281 if (REG_P (x) && t_regno < 0 && code == 'T')
2283 t_regno = REGNO (x);
2284 t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
2286 else if (CONST_INT_P (x) && t_regno >= 0
2287 && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
2289 int bpos = INTVAL (x);
2291 fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
2292 if (code == 'T')
2293 fprintf (file, ",%d", bpos % 8);
2295 t_regno = -1;
2297 else
2298 fatal_insn ("operands to %T/%t must be reg + const_int:", x);
2300 else if (code == 'E' || code == 'F')
2302 rtx op = XEXP(x, 0);
2303 fprintf (file, reg_names[REGNO (op) + ef]);
2305 else if (code == 'I' || code == 'J')
2307 rtx op = XEXP(XEXP(x, 0), 0);
2308 fprintf (file, reg_names[REGNO (op) + ij]);
2310 else if (REG_P (x))
2312 if (x == zero_reg_rtx)
2313 fprintf (file, "__zero_reg__");
2314 else if (code == 'r' && REGNO (x) < 32)
2315 fprintf (file, "%d", (int) REGNO (x));
2316 else
2317 fprintf (file, reg_names[REGNO (x) + abcd]);
2319 else if (CONST_INT_P (x))
2321 HOST_WIDE_INT ival = INTVAL (x);
2323 if ('i' != code)
2324 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
2325 else if (low_io_address_operand (x, VOIDmode)
2326 || high_io_address_operand (x, VOIDmode))
2328 if (AVR_HAVE_RAMPZ && ival == avr_addr.rampz)
2329 fprintf (file, "__RAMPZ__");
2330 else if (AVR_HAVE_RAMPY && ival == avr_addr.rampy)
2331 fprintf (file, "__RAMPY__");
2332 else if (AVR_HAVE_RAMPX && ival == avr_addr.rampx)
2333 fprintf (file, "__RAMPX__");
2334 else if (AVR_HAVE_RAMPD && ival == avr_addr.rampd)
2335 fprintf (file, "__RAMPD__");
2336 else if ((AVR_XMEGA || AVR_TINY) && ival == avr_addr.ccp)
2337 fprintf (file, "__CCP__");
2338 else if (ival == avr_addr.sreg) fprintf (file, "__SREG__");
2339 else if (ival == avr_addr.sp_l) fprintf (file, "__SP_L__");
2340 else if (ival == avr_addr.sp_h) fprintf (file, "__SP_H__");
2341 else
2343 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2344 ival - avr_arch->sfr_offset);
2347 else
2348 fatal_insn ("bad address, not an I/O address:", x);
2350 else if (MEM_P (x))
2352 rtx addr = XEXP (x, 0);
2354 if (code == 'm')
2356 if (!CONSTANT_P (addr))
2357 fatal_insn ("bad address, not a constant:", addr);
2358 /* Assembler template with m-code is data - not progmem section */
2359 if (text_segment_operand (addr, VOIDmode))
2360 if (warning (0, "accessing data memory with"
2361 " program memory address"))
2363 output_addr_const (stderr, addr);
2364 fprintf(stderr,"\n");
2366 output_addr_const (file, addr);
2368 else if (code == 'i')
2370 avr_print_operand (file, addr, 'i');
2372 else if (code == 'o')
2374 if (GET_CODE (addr) != PLUS)
2375 fatal_insn ("bad address, not (reg+disp):", addr);
2377 avr_print_operand (file, XEXP (addr, 1), 0);
2379 else if (code == 'b')
2381 if (GET_CODE (addr) != PLUS)
2382 fatal_insn ("bad address, not (reg+disp):", addr);
2384 avr_print_operand_address (file, XEXP (addr, 0));
2386 else if (code == 'p' || code == 'r')
2388 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
2389 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
2391 if (code == 'p')
2392 avr_print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
2393 else
2394 avr_print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
2396 else if (GET_CODE (addr) == PLUS)
2398 avr_print_operand_address (file, XEXP (addr,0));
2399 if (REGNO (XEXP (addr, 0)) == REG_X)
2400 fatal_insn ("internal compiler error. Bad address:"
2401 ,addr);
2402 fputc ('+', file);
2403 avr_print_operand (file, XEXP (addr,1), code);
2405 else
2406 avr_print_operand_address (file, addr);
2408 else if (code == 'i')
2410 if (GET_CODE (x) == SYMBOL_REF && (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_IO))
2411 avr_print_operand_address
2412 (file, plus_constant (HImode, x, -avr_arch->sfr_offset));
2413 else
2414 fatal_insn ("bad address, not an I/O address:", x);
2416 else if (code == 'x')
2418 /* Constant progmem address - like used in jmp or call */
2419 if (0 == text_segment_operand (x, VOIDmode))
2420 if (warning (0, "accessing program memory"
2421 " with data memory address"))
2423 output_addr_const (stderr, x);
2424 fprintf(stderr,"\n");
2426 /* Use normal symbol for direct address no linker trampoline needed */
2427 output_addr_const (file, x);
2429 else if (CONST_FIXED_P (x))
2431 HOST_WIDE_INT ival = INTVAL (avr_to_int_mode (x));
2432 if (code != 0)
2433 output_operand_lossage ("Unsupported code '%c' for fixed-point:",
2434 code);
2435 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival);
2437 else if (GET_CODE (x) == CONST_DOUBLE)
2439 long val;
2440 REAL_VALUE_TYPE rv;
2441 if (GET_MODE (x) != SFmode)
2442 fatal_insn ("internal compiler error. Unknown mode:", x);
2443 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
2444 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
2445 fprintf (file, "0x%lx", val);
2447 else if (GET_CODE (x) == CONST_STRING)
2448 fputs (XSTR (x, 0), file);
2449 else if (code == 'j')
2450 fputs (cond_string (GET_CODE (x)), file);
2451 else if (code == 'k')
2452 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
2453 else
2454 avr_print_operand_address (file, x);
2458 /* Worker function for `NOTICE_UPDATE_CC'. */
2459 /* Update the condition code in the INSN. */
2461 void
2462 avr_notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx_insn *insn)
2464 rtx set;
2465 enum attr_cc cc = get_attr_cc (insn);
2467 switch (cc)
2469 default:
2470 break;
2472 case CC_PLUS:
2473 case CC_LDI:
2475 rtx *op = recog_data.operand;
2476 int len_dummy, icc;
2478 /* Extract insn's operands. */
2479 extract_constrain_insn_cached (insn);
2481 switch (cc)
2483 default:
2484 gcc_unreachable();
2486 case CC_PLUS:
2487 avr_out_plus (insn, op, &len_dummy, &icc);
2488 cc = (enum attr_cc) icc;
2489 break;
2491 case CC_LDI:
2493 cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
2494 && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
2495 /* Loading zero-reg with 0 uses CLR and thus clobbers cc0. */
2496 ? CC_CLOBBER
2497 /* Any other "r,rL" combination does not alter cc0. */
2498 : CC_NONE;
2500 break;
2501 } /* inner switch */
2503 break;
2505 } /* outer swicth */
2507 switch (cc)
2509 default:
2510 /* Special values like CC_OUT_PLUS from above have been
2511 mapped to "standard" CC_* values so we never come here. */
2513 gcc_unreachable();
2514 break;
2516 case CC_NONE:
2517 /* Insn does not affect CC at all. */
2518 break;
2520 case CC_SET_N:
2521 CC_STATUS_INIT;
2522 break;
2524 case CC_SET_ZN:
2525 set = single_set (insn);
2526 CC_STATUS_INIT;
2527 if (set)
2529 cc_status.flags |= CC_NO_OVERFLOW;
2530 cc_status.value1 = SET_DEST (set);
2532 break;
2534 case CC_SET_VZN:
2535 /* Insn like INC, DEC, NEG that set Z,N,V. We currently don't make use
2536 of this combination, cf. also PR61055. */
2537 CC_STATUS_INIT;
2538 break;
2540 case CC_SET_CZN:
2541 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2542 The V flag may or may not be known but that's ok because
2543 alter_cond will change tests to use EQ/NE. */
2544 set = single_set (insn);
2545 CC_STATUS_INIT;
2546 if (set)
2548 cc_status.value1 = SET_DEST (set);
2549 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
2551 break;
2553 case CC_COMPARE:
2554 set = single_set (insn);
2555 CC_STATUS_INIT;
2556 if (set)
2557 cc_status.value1 = SET_SRC (set);
2558 break;
2560 case CC_CLOBBER:
2561 /* Insn doesn't leave CC in a usable state. */
2562 CC_STATUS_INIT;
2563 break;
2567 /* Choose mode for jump insn:
2568 1 - relative jump in range -63 <= x <= 62 ;
2569 2 - relative jump in range -2046 <= x <= 2045 ;
2570 3 - absolute jump (only for ATmega[16]03). */
2573 avr_jump_mode (rtx x, rtx_insn *insn)
2575 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
2576 ? XEXP (x, 0) : x));
2577 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
2578 int jump_distance = cur_addr - dest_addr;
2580 if (-63 <= jump_distance && jump_distance <= 62)
2581 return 1;
2582 else if (-2046 <= jump_distance && jump_distance <= 2045)
2583 return 2;
2584 else if (AVR_HAVE_JMP_CALL)
2585 return 3;
2587 return 2;
2590 /* Return an AVR condition jump commands.
2591 X is a comparison RTX.
2592 LEN is a number returned by avr_jump_mode function.
2593 If REVERSE nonzero then condition code in X must be reversed. */
2595 const char*
2596 ret_cond_branch (rtx x, int len, int reverse)
2598 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2600 switch (cond)
2602 case GT:
2603 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2604 return (len == 1 ? ("breq .+2" CR_TAB
2605 "brpl %0") :
2606 len == 2 ? ("breq .+4" CR_TAB
2607 "brmi .+2" CR_TAB
2608 "rjmp %0") :
2609 ("breq .+6" CR_TAB
2610 "brmi .+4" CR_TAB
2611 "jmp %0"));
2613 else
2614 return (len == 1 ? ("breq .+2" CR_TAB
2615 "brge %0") :
2616 len == 2 ? ("breq .+4" CR_TAB
2617 "brlt .+2" CR_TAB
2618 "rjmp %0") :
2619 ("breq .+6" CR_TAB
2620 "brlt .+4" CR_TAB
2621 "jmp %0"));
2622 case GTU:
2623 return (len == 1 ? ("breq .+2" CR_TAB
2624 "brsh %0") :
2625 len == 2 ? ("breq .+4" CR_TAB
2626 "brlo .+2" CR_TAB
2627 "rjmp %0") :
2628 ("breq .+6" CR_TAB
2629 "brlo .+4" CR_TAB
2630 "jmp %0"));
2631 case LE:
2632 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2633 return (len == 1 ? ("breq %0" CR_TAB
2634 "brmi %0") :
2635 len == 2 ? ("breq .+2" CR_TAB
2636 "brpl .+2" CR_TAB
2637 "rjmp %0") :
2638 ("breq .+2" CR_TAB
2639 "brpl .+4" CR_TAB
2640 "jmp %0"));
2641 else
2642 return (len == 1 ? ("breq %0" CR_TAB
2643 "brlt %0") :
2644 len == 2 ? ("breq .+2" CR_TAB
2645 "brge .+2" CR_TAB
2646 "rjmp %0") :
2647 ("breq .+2" CR_TAB
2648 "brge .+4" CR_TAB
2649 "jmp %0"));
2650 case LEU:
2651 return (len == 1 ? ("breq %0" CR_TAB
2652 "brlo %0") :
2653 len == 2 ? ("breq .+2" CR_TAB
2654 "brsh .+2" CR_TAB
2655 "rjmp %0") :
2656 ("breq .+2" CR_TAB
2657 "brsh .+4" CR_TAB
2658 "jmp %0"));
2659 default:
2660 if (reverse)
2662 switch (len)
2664 case 1:
2665 return "br%k1 %0";
2666 case 2:
2667 return ("br%j1 .+2" CR_TAB
2668 "rjmp %0");
2669 default:
2670 return ("br%j1 .+4" CR_TAB
2671 "jmp %0");
2674 else
2676 switch (len)
2678 case 1:
2679 return "br%j1 %0";
2680 case 2:
2681 return ("br%k1 .+2" CR_TAB
2682 "rjmp %0");
2683 default:
2684 return ("br%k1 .+4" CR_TAB
2685 "jmp %0");
2689 return "";
2693 /* Worker function for `FINAL_PRESCAN_INSN'. */
2694 /* Output insn cost for next insn. */
2696 void
2697 avr_final_prescan_insn (rtx_insn *insn, rtx *operand ATTRIBUTE_UNUSED,
2698 int num_operands ATTRIBUTE_UNUSED)
2700 if (avr_log.rtx_costs)
2702 rtx set = single_set (insn);
2704 if (set)
2705 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
2706 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
2707 else
2708 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
2709 rtx_cost (PATTERN (insn), INSN, 0,
2710 optimize_insn_for_speed_p()));
2714 /* Return 0 if undefined, 1 if always true or always false. */
2717 avr_simplify_comparison_p (machine_mode mode, RTX_CODE op, rtx x)
2719 unsigned int max = (mode == QImode ? 0xff :
2720 mode == HImode ? 0xffff :
2721 mode == PSImode ? 0xffffff :
2722 mode == SImode ? 0xffffffff : 0);
2723 if (max && op && CONST_INT_P (x))
2725 if (unsigned_condition (op) != op)
2726 max >>= 1;
2728 if (max != (INTVAL (x) & max)
2729 && INTVAL (x) != 0xff)
2730 return 1;
2732 return 0;
2736 /* Worker function for `FUNCTION_ARG_REGNO_P'. */
2737 /* Returns nonzero if REGNO is the number of a hard
2738 register in which function arguments are sometimes passed. */
2741 avr_function_arg_regno_p(int r)
2743 return (AVR_TINY ? r >= 20 && r <= 25 : r >= 8 && r <= 25);
2747 /* Worker function for `INIT_CUMULATIVE_ARGS'. */
2748 /* Initializing the variable cum for the state at the beginning
2749 of the argument list. */
2751 void
2752 avr_init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2753 tree fndecl ATTRIBUTE_UNUSED)
2755 cum->nregs = AVR_TINY ? 6 : 18;
2756 cum->regno = FIRST_CUM_REG;
2757 if (!libname && stdarg_p (fntype))
2758 cum->nregs = 0;
2760 /* Assume the calle may be tail called */
2762 cfun->machine->sibcall_fails = 0;
2765 /* Returns the number of registers to allocate for a function argument. */
2767 static int
2768 avr_num_arg_regs (machine_mode mode, const_tree type)
2770 int size;
2772 if (mode == BLKmode)
2773 size = int_size_in_bytes (type);
2774 else
2775 size = GET_MODE_SIZE (mode);
2777 /* Align all function arguments to start in even-numbered registers.
2778 Odd-sized arguments leave holes above them. */
2780 return (size + 1) & ~1;
2784 /* Implement `TARGET_FUNCTION_ARG'. */
2785 /* Controls whether a function argument is passed
2786 in a register, and which register. */
2788 static rtx
2789 avr_function_arg (cumulative_args_t cum_v, machine_mode mode,
2790 const_tree type, bool named ATTRIBUTE_UNUSED)
2792 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2793 int bytes = avr_num_arg_regs (mode, type);
2795 if (cum->nregs && bytes <= cum->nregs)
2796 return gen_rtx_REG (mode, cum->regno - bytes);
2798 return NULL_RTX;
2802 /* Implement `TARGET_FUNCTION_ARG_ADVANCE'. */
2803 /* Update the summarizer variable CUM to advance past an argument
2804 in the argument list. */
2806 static void
2807 avr_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
2808 const_tree type, bool named ATTRIBUTE_UNUSED)
2810 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2811 int bytes = avr_num_arg_regs (mode, type);
2813 cum->nregs -= bytes;
2814 cum->regno -= bytes;
2816 /* A parameter is being passed in a call-saved register. As the original
2817 contents of these regs has to be restored before leaving the function,
2818 a function must not pass arguments in call-saved regs in order to get
2819 tail-called. */
2821 if (cum->regno >= 8
2822 && cum->nregs >= 0
2823 && !call_used_regs[cum->regno])
2825 /* FIXME: We ship info on failing tail-call in struct machine_function.
2826 This uses internals of calls.c:expand_call() and the way args_so_far
2827 is used. targetm.function_ok_for_sibcall() needs to be extended to
2828 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2829 dependent so that such an extension is not wanted. */
2831 cfun->machine->sibcall_fails = 1;
2834 /* Test if all registers needed by the ABI are actually available. If the
2835 user has fixed a GPR needed to pass an argument, an (implicit) function
2836 call will clobber that fixed register. See PR45099 for an example. */
2838 if (cum->regno >= 8
2839 && cum->nregs >= 0)
2841 int regno;
2843 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2844 if (fixed_regs[regno])
2845 warning (0, "fixed register %s used to pass parameter to function",
2846 reg_names[regno]);
2849 if (cum->nregs <= 0)
2851 cum->nregs = 0;
2852 cum->regno = FIRST_CUM_REG;
2856 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2857 /* Decide whether we can make a sibling call to a function. DECL is the
2858 declaration of the function being targeted by the call and EXP is the
2859 CALL_EXPR representing the call. */
2861 static bool
2862 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2864 tree fntype_callee;
2866 /* Tail-calling must fail if callee-saved regs are used to pass
2867 function args. We must not tail-call when `epilogue_restores'
2868 is used. Unfortunately, we cannot tell at this point if that
2869 actually will happen or not, and we cannot step back from
2870 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2872 if (cfun->machine->sibcall_fails
2873 || TARGET_CALL_PROLOGUES)
2875 return false;
2878 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2880 if (decl_callee)
2882 decl_callee = TREE_TYPE (decl_callee);
2884 else
2886 decl_callee = fntype_callee;
2888 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2889 && METHOD_TYPE != TREE_CODE (decl_callee))
2891 decl_callee = TREE_TYPE (decl_callee);
2895 /* Ensure that caller and callee have compatible epilogues */
2897 if (cfun->machine->is_interrupt
2898 || cfun->machine->is_signal
2899 || cfun->machine->is_naked
2900 || avr_naked_function_p (decl_callee)
2901 /* FIXME: For OS_task and OS_main, this might be over-conservative. */
2902 || (avr_OS_task_function_p (decl_callee)
2903 != cfun->machine->is_OS_task)
2904 || (avr_OS_main_function_p (decl_callee)
2905 != cfun->machine->is_OS_main))
2907 return false;
2910 return true;
2913 /***********************************************************************
2914 Functions for outputting various mov's for a various modes
2915 ************************************************************************/
2917 /* Return true if a value of mode MODE is read from flash by
2918 __load_* function from libgcc. */
2920 bool
2921 avr_load_libgcc_p (rtx op)
2923 machine_mode mode = GET_MODE (op);
2924 int n_bytes = GET_MODE_SIZE (mode);
2926 return (n_bytes > 2
2927 && !AVR_HAVE_LPMX
2928 && avr_mem_flash_p (op));
2931 /* Return true if a value of mode MODE is read by __xload_* function. */
2933 bool
2934 avr_xload_libgcc_p (machine_mode mode)
2936 int n_bytes = GET_MODE_SIZE (mode);
2938 return (n_bytes > 1
2939 || avr_n_flash > 1);
2943 /* Fixme: This is a hack because secondary reloads don't works as expected.
2945 Find an unused d-register to be used as scratch in INSN.
2946 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2947 is a register, skip all possible return values that overlap EXCLUDE.
2948 The policy for the returned register is similar to that of
2949 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2950 of INSN.
2952 Return a QImode d-register or NULL_RTX if nothing found. */
2954 static rtx
2955 avr_find_unused_d_reg (rtx_insn *insn, rtx exclude)
2957 int regno;
2958 bool isr_p = (avr_interrupt_function_p (current_function_decl)
2959 || avr_signal_function_p (current_function_decl));
2961 for (regno = 16; regno < 32; regno++)
2963 rtx reg = all_regs_rtx[regno];
2965 if ((exclude
2966 && reg_overlap_mentioned_p (exclude, reg))
2967 || fixed_regs[regno])
2969 continue;
2972 /* Try non-live register */
2974 if (!df_regs_ever_live_p (regno)
2975 && (TREE_THIS_VOLATILE (current_function_decl)
2976 || cfun->machine->is_OS_task
2977 || cfun->machine->is_OS_main
2978 || (!isr_p && call_used_regs[regno])))
2980 return reg;
2983 /* Any live register can be used if it is unused after.
2984 Prologue/epilogue will care for it as needed. */
2986 if (df_regs_ever_live_p (regno)
2987 && reg_unused_after (insn, reg))
2989 return reg;
2993 return NULL_RTX;
2997 /* Helper function for the next function in the case where only restricted
2998 version of LPM instruction is available. */
3000 static const char*
3001 avr_out_lpm_no_lpmx (rtx_insn *insn, rtx *xop, int *plen)
3003 rtx dest = xop[0];
3004 rtx addr = xop[1];
3005 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
3006 int regno_dest;
3008 regno_dest = REGNO (dest);
3010 /* The implicit target register of LPM. */
3011 xop[3] = lpm_reg_rtx;
3013 switch (GET_CODE (addr))
3015 default:
3016 gcc_unreachable();
3018 case REG:
3020 gcc_assert (REG_Z == REGNO (addr));
3022 switch (n_bytes)
3024 default:
3025 gcc_unreachable();
3027 case 1:
3028 avr_asm_len ("%4lpm", xop, plen, 1);
3030 if (regno_dest != LPM_REGNO)
3031 avr_asm_len ("mov %0,%3", xop, plen, 1);
3033 return "";
3035 case 2:
3036 if (REGNO (dest) == REG_Z)
3037 return avr_asm_len ("%4lpm" CR_TAB
3038 "push %3" CR_TAB
3039 "adiw %2,1" CR_TAB
3040 "%4lpm" CR_TAB
3041 "mov %B0,%3" CR_TAB
3042 "pop %A0", xop, plen, 6);
3044 avr_asm_len ("%4lpm" CR_TAB
3045 "mov %A0,%3" CR_TAB
3046 "adiw %2,1" CR_TAB
3047 "%4lpm" CR_TAB
3048 "mov %B0,%3", xop, plen, 5);
3050 if (!reg_unused_after (insn, addr))
3051 avr_asm_len ("sbiw %2,1", xop, plen, 1);
3053 break; /* 2 */
3056 break; /* REG */
3058 case POST_INC:
3060 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
3061 && n_bytes <= 4);
3063 if (regno_dest == LPM_REGNO)
3064 avr_asm_len ("%4lpm" CR_TAB
3065 "adiw %2,1", xop, plen, 2);
3066 else
3067 avr_asm_len ("%4lpm" CR_TAB
3068 "mov %A0,%3" CR_TAB
3069 "adiw %2,1", xop, plen, 3);
3071 if (n_bytes >= 2)
3072 avr_asm_len ("%4lpm" CR_TAB
3073 "mov %B0,%3" CR_TAB
3074 "adiw %2,1", xop, plen, 3);
3076 if (n_bytes >= 3)
3077 avr_asm_len ("%4lpm" CR_TAB
3078 "mov %C0,%3" CR_TAB
3079 "adiw %2,1", xop, plen, 3);
3081 if (n_bytes >= 4)
3082 avr_asm_len ("%4lpm" CR_TAB
3083 "mov %D0,%3" CR_TAB
3084 "adiw %2,1", xop, plen, 3);
3086 break; /* POST_INC */
3088 } /* switch CODE (addr) */
3090 return "";
3094 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
3095 OP[1] in AS1 to register OP[0].
3096 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
3097 Return "". */
3099 const char*
3100 avr_out_lpm (rtx_insn *insn, rtx *op, int *plen)
3102 rtx xop[7];
3103 rtx dest = op[0];
3104 rtx src = SET_SRC (single_set (insn));
3105 rtx addr;
3106 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
3107 int segment;
3108 RTX_CODE code;
3109 addr_space_t as = MEM_ADDR_SPACE (src);
3111 if (plen)
3112 *plen = 0;
3114 if (MEM_P (dest))
3116 warning (0, "writing to address space %qs not supported",
3117 avr_addrspace[MEM_ADDR_SPACE (dest)].name);
3119 return "";
3122 addr = XEXP (src, 0);
3123 code = GET_CODE (addr);
3125 gcc_assert (REG_P (dest));
3126 gcc_assert (REG == code || POST_INC == code);
3128 xop[0] = dest;
3129 xop[1] = addr;
3130 xop[2] = lpm_addr_reg_rtx;
3131 xop[4] = xstring_empty;
3132 xop[5] = tmp_reg_rtx;
3133 xop[6] = XEXP (rampz_rtx, 0);
3135 segment = avr_addrspace[as].segment;
3137 /* Set RAMPZ as needed. */
3139 if (segment)
3141 xop[4] = GEN_INT (segment);
3142 xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx);
3144 if (xop[3] != NULL_RTX)
3146 avr_asm_len ("ldi %3,%4" CR_TAB
3147 "out %i6,%3", xop, plen, 2);
3149 else if (segment == 1)
3151 avr_asm_len ("clr %5" CR_TAB
3152 "inc %5" CR_TAB
3153 "out %i6,%5", xop, plen, 3);
3155 else
3157 avr_asm_len ("mov %5,%2" CR_TAB
3158 "ldi %2,%4" CR_TAB
3159 "out %i6,%2" CR_TAB
3160 "mov %2,%5", xop, plen, 4);
3163 xop[4] = xstring_e;
3165 if (!AVR_HAVE_ELPMX)
3166 return avr_out_lpm_no_lpmx (insn, xop, plen);
3168 else if (!AVR_HAVE_LPMX)
3170 return avr_out_lpm_no_lpmx (insn, xop, plen);
3173 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
3175 switch (GET_CODE (addr))
3177 default:
3178 gcc_unreachable();
3180 case REG:
3182 gcc_assert (REG_Z == REGNO (addr));
3184 switch (n_bytes)
3186 default:
3187 gcc_unreachable();
3189 case 1:
3190 return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
3192 case 2:
3193 if (REGNO (dest) == REG_Z)
3194 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3195 "%4lpm %B0,%a2" CR_TAB
3196 "mov %A0,%5", xop, plen, 3);
3197 else
3199 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3200 "%4lpm %B0,%a2", xop, plen, 2);
3202 if (!reg_unused_after (insn, addr))
3203 avr_asm_len ("sbiw %2,1", xop, plen, 1);
3206 break; /* 2 */
3208 case 3:
3210 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3211 "%4lpm %B0,%a2+" CR_TAB
3212 "%4lpm %C0,%a2", xop, plen, 3);
3214 if (!reg_unused_after (insn, addr))
3215 avr_asm_len ("sbiw %2,2", xop, plen, 1);
3217 break; /* 3 */
3219 case 4:
3221 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3222 "%4lpm %B0,%a2+", xop, plen, 2);
3224 if (REGNO (dest) == REG_Z - 2)
3225 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3226 "%4lpm %C0,%a2" CR_TAB
3227 "mov %D0,%5", xop, plen, 3);
3228 else
3230 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
3231 "%4lpm %D0,%a2", xop, plen, 2);
3233 if (!reg_unused_after (insn, addr))
3234 avr_asm_len ("sbiw %2,3", xop, plen, 1);
3237 break; /* 4 */
3238 } /* n_bytes */
3240 break; /* REG */
3242 case POST_INC:
3244 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
3245 && n_bytes <= 4);
3247 avr_asm_len ("%4lpm %A0,%a2+", xop, plen, 1);
3248 if (n_bytes >= 2) avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
3249 if (n_bytes >= 3) avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
3250 if (n_bytes >= 4) avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
3252 break; /* POST_INC */
3254 } /* switch CODE (addr) */
3256 if (xop[4] == xstring_e && AVR_HAVE_RAMPD)
3258 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM. */
3260 xop[0] = zero_reg_rtx;
3261 avr_asm_len ("out %i6,%0", xop, plen, 1);
3264 return "";
3268 /* Worker function for xload_8 insn. */
3270 const char*
3271 avr_out_xload (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
3273 rtx xop[4];
3275 xop[0] = op[0];
3276 xop[1] = op[1];
3277 xop[2] = lpm_addr_reg_rtx;
3278 xop[3] = AVR_HAVE_LPMX ? op[0] : lpm_reg_rtx;
3280 avr_asm_len (AVR_HAVE_LPMX ? "lpm %3,%a2" : "lpm", xop, plen, -1);
3282 avr_asm_len ("sbrc %1,7" CR_TAB
3283 "ld %3,%a2", xop, plen, 2);
3285 if (REGNO (xop[0]) != REGNO (xop[3]))
3286 avr_asm_len ("mov %0,%3", xop, plen, 1);
3288 return "";
3292 const char*
3293 output_movqi (rtx_insn *insn, rtx operands[], int *plen)
3295 rtx dest = operands[0];
3296 rtx src = operands[1];
3298 if (avr_mem_flash_p (src)
3299 || avr_mem_flash_p (dest))
3301 return avr_out_lpm (insn, operands, plen);
3304 gcc_assert (1 == GET_MODE_SIZE (GET_MODE (dest)));
3306 if (REG_P (dest))
3308 if (REG_P (src)) /* mov r,r */
3310 if (test_hard_reg_class (STACK_REG, dest))
3311 return avr_asm_len ("out %0,%1", operands, plen, -1);
3312 else if (test_hard_reg_class (STACK_REG, src))
3313 return avr_asm_len ("in %0,%1", operands, plen, -1);
3315 return avr_asm_len ("mov %0,%1", operands, plen, -1);
3317 else if (CONSTANT_P (src))
3319 output_reload_in_const (operands, NULL_RTX, plen, false);
3320 return "";
3322 else if (MEM_P (src))
3323 return out_movqi_r_mr (insn, operands, plen); /* mov r,m */
3325 else if (MEM_P (dest))
3327 rtx xop[2];
3329 xop[0] = dest;
3330 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3332 return out_movqi_mr_r (insn, xop, plen);
3335 return "";
3339 const char *
3340 output_movhi (rtx_insn *insn, rtx xop[], int *plen)
3342 rtx dest = xop[0];
3343 rtx src = xop[1];
3345 gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
3347 if (avr_mem_flash_p (src)
3348 || avr_mem_flash_p (dest))
3350 return avr_out_lpm (insn, xop, plen);
3353 gcc_assert (2 == GET_MODE_SIZE (GET_MODE (dest)));
3355 if (REG_P (dest))
3357 if (REG_P (src)) /* mov r,r */
3359 if (test_hard_reg_class (STACK_REG, dest))
3361 if (AVR_HAVE_8BIT_SP)
3362 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
3364 if (AVR_XMEGA)
3365 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
3366 "out __SP_H__,%B1", xop, plen, -2);
3368 /* Use simple load of SP if no interrupts are used. */
3370 return TARGET_NO_INTERRUPTS
3371 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
3372 "out __SP_L__,%A1", xop, plen, -2)
3373 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
3374 "cli" CR_TAB
3375 "out __SP_H__,%B1" CR_TAB
3376 "out __SREG__,__tmp_reg__" CR_TAB
3377 "out __SP_L__,%A1", xop, plen, -5);
3379 else if (test_hard_reg_class (STACK_REG, src))
3381 return !AVR_HAVE_SPH
3382 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
3383 "clr %B0", xop, plen, -2)
3385 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
3386 "in %B0,__SP_H__", xop, plen, -2);
3389 return AVR_HAVE_MOVW
3390 ? avr_asm_len ("movw %0,%1", xop, plen, -1)
3392 : avr_asm_len ("mov %A0,%A1" CR_TAB
3393 "mov %B0,%B1", xop, plen, -2);
3394 } /* REG_P (src) */
3395 else if (CONSTANT_P (src))
3397 return output_reload_inhi (xop, NULL, plen);
3399 else if (MEM_P (src))
3401 return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
3404 else if (MEM_P (dest))
3406 rtx xop[2];
3408 xop[0] = dest;
3409 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3411 return out_movhi_mr_r (insn, xop, plen);
3414 fatal_insn ("invalid insn:", insn);
3416 return "";
3420 /* Same as out_movqi_r_mr, but TINY does not have ADIW or SBIW */
3422 static const char*
3423 avr_out_movqi_r_mr_reg_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
3425 rtx dest = op[0];
3426 rtx src = op[1];
3427 rtx x = XEXP (src, 0);
3429 avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
3430 "ld %0,%b1" , op, plen, -3);
3432 if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
3433 && !reg_unused_after (insn, XEXP (x,0)))
3434 avr_asm_len (TINY_SBIW (%I1, %J1, %o1), op, plen, 2);
3436 return "";
3439 static const char*
3440 out_movqi_r_mr (rtx_insn *insn, rtx op[], int *plen)
3442 rtx dest = op[0];
3443 rtx src = op[1];
3444 rtx x = XEXP (src, 0);
3446 if (CONSTANT_ADDRESS_P (x))
3448 int n_words = AVR_TINY ? 1 : 2;
3449 return optimize > 0 && io_address_operand (x, QImode)
3450 ? avr_asm_len ("in %0,%i1", op, plen, -1)
3451 : avr_asm_len ("lds %0,%m1", op, plen, -n_words);
3454 if (GET_CODE (x) == PLUS
3455 && REG_P (XEXP (x, 0))
3456 && CONST_INT_P (XEXP (x, 1)))
3458 /* memory access by reg+disp */
3460 int disp = INTVAL (XEXP (x, 1));
3462 if (AVR_TINY)
3463 return avr_out_movqi_r_mr_reg_disp_tiny (insn, op, plen);
3465 if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
3467 if (REGNO (XEXP (x, 0)) != REG_Y)
3468 fatal_insn ("incorrect insn:",insn);
3470 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3471 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
3472 "ldd %0,Y+63" CR_TAB
3473 "sbiw r28,%o1-63", op, plen, -3);
3475 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3476 "sbci r29,hi8(-%o1)" CR_TAB
3477 "ld %0,Y" CR_TAB
3478 "subi r28,lo8(%o1)" CR_TAB
3479 "sbci r29,hi8(%o1)", op, plen, -5);
3481 else if (REGNO (XEXP (x, 0)) == REG_X)
3483 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
3484 it but I have this situation with extremal optimizing options. */
3486 avr_asm_len ("adiw r26,%o1" CR_TAB
3487 "ld %0,X", op, plen, -2);
3489 if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
3490 && !reg_unused_after (insn, XEXP (x,0)))
3492 avr_asm_len ("sbiw r26,%o1", op, plen, 1);
3495 return "";
3498 return avr_asm_len ("ldd %0,%1", op, plen, -1);
3501 return avr_asm_len ("ld %0,%1", op, plen, -1);
3505 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
3507 static const char*
3508 avr_out_movhi_r_mr_reg_no_disp_tiny (rtx op[], int *plen)
3510 rtx dest = op[0];
3511 rtx src = op[1];
3512 rtx base = XEXP (src, 0);
3514 int reg_dest = true_regnum (dest);
3515 int reg_base = true_regnum (base);
3517 if (reg_dest == reg_base) /* R = (R) */
3518 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3519 "ld %B0,%1" CR_TAB
3520 "mov %A0,__tmp_reg__", op, plen, -3);
3522 return avr_asm_len ("ld %A0,%1" CR_TAB
3523 TINY_ADIW (%E1, %F1, 1) CR_TAB
3524 "ld %B0,%1" CR_TAB
3525 TINY_SBIW (%E1, %F1, 1), op, plen, -6);
3529 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
3531 static const char*
3532 avr_out_movhi_r_mr_reg_disp_tiny (rtx op[], int *plen)
3534 rtx dest = op[0];
3535 rtx src = op[1];
3536 rtx base = XEXP (src, 0);
3538 int reg_dest = true_regnum (dest);
3539 int reg_base = true_regnum (XEXP (base, 0));
3541 if (reg_base == reg_dest)
3543 return avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
3544 "ld __tmp_reg__,%b1+" CR_TAB
3545 "ld %B0,%b1" CR_TAB
3546 "mov %A0,__tmp_reg__", op, plen, -5);
3548 else
3550 return avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
3551 "ld %A0,%b1+" CR_TAB
3552 "ld %B0,%b1" CR_TAB
3553 TINY_SBIW (%I1, %J1, %o1+1), op, plen, -6);
3558 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
3560 static const char*
3561 avr_out_movhi_r_mr_pre_dec_tiny (rtx_insn *insn, rtx op[], int *plen)
3563 int mem_volatile_p = 0;
3564 rtx dest = op[0];
3565 rtx src = op[1];
3566 rtx base = XEXP (src, 0);
3568 /* "volatile" forces reading low byte first, even if less efficient,
3569 for correct operation with 16-bit I/O registers. */
3570 mem_volatile_p = MEM_VOLATILE_P (src);
3572 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3573 fatal_insn ("incorrect insn:", insn);
3575 if (!mem_volatile_p)
3576 return avr_asm_len ("ld %B0,%1" CR_TAB
3577 "ld %A0,%1", op, plen, -2);
3579 return avr_asm_len (TINY_SBIW (%I1, %J1, 2) CR_TAB
3580 "ld %A0,%p1+" CR_TAB
3581 "ld %B0,%p1" CR_TAB
3582 TINY_SBIW (%I1, %J1, 1), op, plen, -6);
3586 static const char*
3587 out_movhi_r_mr (rtx_insn *insn, rtx op[], int *plen)
3589 rtx dest = op[0];
3590 rtx src = op[1];
3591 rtx base = XEXP (src, 0);
3592 int reg_dest = true_regnum (dest);
3593 int reg_base = true_regnum (base);
3594 /* "volatile" forces reading low byte first, even if less efficient,
3595 for correct operation with 16-bit I/O registers. */
3596 int mem_volatile_p = MEM_VOLATILE_P (src);
3598 if (reg_base > 0)
3600 if (AVR_TINY)
3601 return avr_out_movhi_r_mr_reg_no_disp_tiny (op, plen);
3603 if (reg_dest == reg_base) /* R = (R) */
3604 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3605 "ld %B0,%1" CR_TAB
3606 "mov %A0,__tmp_reg__", op, plen, -3);
3608 if (reg_base != REG_X)
3609 return avr_asm_len ("ld %A0,%1" CR_TAB
3610 "ldd %B0,%1+1", op, plen, -2);
3612 avr_asm_len ("ld %A0,X+" CR_TAB
3613 "ld %B0,X", op, plen, -2);
3615 if (!reg_unused_after (insn, base))
3616 avr_asm_len ("sbiw r26,1", op, plen, 1);
3618 return "";
3620 else if (GET_CODE (base) == PLUS) /* (R + i) */
3622 int disp = INTVAL (XEXP (base, 1));
3623 int reg_base = true_regnum (XEXP (base, 0));
3625 if (AVR_TINY)
3626 return avr_out_movhi_r_mr_reg_disp_tiny (op, plen);
3628 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3630 if (REGNO (XEXP (base, 0)) != REG_Y)
3631 fatal_insn ("incorrect insn:",insn);
3633 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
3634 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3635 "ldd %A0,Y+62" CR_TAB
3636 "ldd %B0,Y+63" CR_TAB
3637 "sbiw r28,%o1-62", op, plen, -4)
3639 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3640 "sbci r29,hi8(-%o1)" CR_TAB
3641 "ld %A0,Y" CR_TAB
3642 "ldd %B0,Y+1" CR_TAB
3643 "subi r28,lo8(%o1)" CR_TAB
3644 "sbci r29,hi8(%o1)", op, plen, -6);
3647 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3648 it but I have this situation with extremal
3649 optimization options. */
3651 if (reg_base == REG_X)
3652 return reg_base == reg_dest
3653 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3654 "ld __tmp_reg__,X+" CR_TAB
3655 "ld %B0,X" CR_TAB
3656 "mov %A0,__tmp_reg__", op, plen, -4)
3658 : avr_asm_len ("adiw r26,%o1" CR_TAB
3659 "ld %A0,X+" CR_TAB
3660 "ld %B0,X" CR_TAB
3661 "sbiw r26,%o1+1", op, plen, -4);
3663 return reg_base == reg_dest
3664 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3665 "ldd %B0,%B1" CR_TAB
3666 "mov %A0,__tmp_reg__", op, plen, -3)
3668 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3669 "ldd %B0,%B1", op, plen, -2);
3671 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3673 if (AVR_TINY)
3674 return avr_out_movhi_r_mr_pre_dec_tiny (insn, op, plen);
3676 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3677 fatal_insn ("incorrect insn:", insn);
3679 if (!mem_volatile_p)
3680 return avr_asm_len ("ld %B0,%1" CR_TAB
3681 "ld %A0,%1", op, plen, -2);
3683 return REGNO (XEXP (base, 0)) == REG_X
3684 ? avr_asm_len ("sbiw r26,2" CR_TAB
3685 "ld %A0,X+" CR_TAB
3686 "ld %B0,X" CR_TAB
3687 "sbiw r26,1", op, plen, -4)
3689 : avr_asm_len ("sbiw %r1,2" CR_TAB
3690 "ld %A0,%p1" CR_TAB
3691 "ldd %B0,%p1+1", op, plen, -3);
3693 else if (GET_CODE (base) == POST_INC) /* (R++) */
3695 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3696 fatal_insn ("incorrect insn:", insn);
3698 return avr_asm_len ("ld %A0,%1" CR_TAB
3699 "ld %B0,%1", op, plen, -2);
3701 else if (CONSTANT_ADDRESS_P (base))
3703 int n_words = AVR_TINY ? 2 : 4;
3704 return optimize > 0 && io_address_operand (base, HImode)
3705 ? avr_asm_len ("in %A0,%i1" CR_TAB
3706 "in %B0,%i1+1", op, plen, -2)
3708 : avr_asm_len ("lds %A0,%m1" CR_TAB
3709 "lds %B0,%m1+1", op, plen, -n_words);
3712 fatal_insn ("unknown move insn:",insn);
3713 return "";
3716 static const char*
3717 avr_out_movsi_r_mr_reg_no_disp_tiny (rtx_insn *insn, rtx op[], int *l)
3719 rtx dest = op[0];
3720 rtx src = op[1];
3721 rtx base = XEXP (src, 0);
3722 int reg_dest = true_regnum (dest);
3723 int reg_base = true_regnum (base);
3725 if (reg_dest == reg_base)
3727 /* "ld r26,-X" is undefined */
3728 return *l = 9, (TINY_ADIW (%E1, %F1, 3) CR_TAB
3729 "ld %D0,%1" CR_TAB
3730 "ld %C0,-%1" CR_TAB
3731 "ld __tmp_reg__,-%1" CR_TAB
3732 TINY_SBIW (%E1, %F1, 1) CR_TAB
3733 "ld %A0,%1" CR_TAB
3734 "mov %B0,__tmp_reg__");
3736 else if (reg_dest == reg_base - 2)
3738 return *l = 5, ("ld %A0,%1+" CR_TAB
3739 "ld %B0,%1+" CR_TAB
3740 "ld __tmp_reg__,%1+" CR_TAB
3741 "ld %D0,%1" CR_TAB
3742 "mov %C0,__tmp_reg__");
3744 else if (reg_unused_after (insn, base))
3746 return *l = 4, ("ld %A0,%1+" CR_TAB
3747 "ld %B0,%1+" CR_TAB
3748 "ld %C0,%1+" CR_TAB
3749 "ld %D0,%1");
3751 else
3753 return *l = 6, ("ld %A0,%1+" CR_TAB
3754 "ld %B0,%1+" CR_TAB
3755 "ld %C0,%1+" CR_TAB
3756 "ld %D0,%1" CR_TAB
3757 TINY_SBIW (%E1, %F1, 3));
3762 static const char*
3763 avr_out_movsi_r_mr_reg_disp_tiny (rtx_insn *insn, rtx op[], int *l)
3765 rtx dest = op[0];
3766 rtx src = op[1];
3767 rtx base = XEXP (src, 0);
3768 int reg_dest = true_regnum (dest);
3769 int reg_base = true_regnum (XEXP (base, 0));
3771 if (reg_dest == reg_base)
3773 /* "ld r26,-X" is undefined */
3774 return *l = 9, (TINY_ADIW (%I1, %J1, %o1+3) CR_TAB
3775 "ld %D0,%b1" CR_TAB
3776 "ld %C0,-%b1" CR_TAB
3777 "ld __tmp_reg__,-%b1" CR_TAB
3778 TINY_SBIW (%I1, %J1, 1) CR_TAB
3779 "ld %A0,%b1" CR_TAB
3780 "mov %B0,__tmp_reg__");
3782 else if (reg_dest == reg_base - 2)
3784 return *l = 7, (TINY_ADIW (%I1, %J1, %o1) CR_TAB
3785 "ld %A0,%b1+" CR_TAB
3786 "ld %B0,%b1+" CR_TAB
3787 "ld __tmp_reg__,%b1+" CR_TAB
3788 "ld %D0,%b1" CR_TAB
3789 "mov %C0,__tmp_reg__");
3791 else if (reg_unused_after (insn, XEXP (base, 0)))
3793 return *l = 6, (TINY_ADIW (%I1, %J1, %o1) CR_TAB
3794 "ld %A0,%b1+" CR_TAB
3795 "ld %B0,%b1+" CR_TAB
3796 "ld %C0,%b1+" CR_TAB
3797 "ld %D0,%b1");
3799 else
3801 return *l = 8, (TINY_ADIW (%I1, %J1, %o1) CR_TAB
3802 "ld %A0,%b1+" CR_TAB
3803 "ld %B0,%b1+" CR_TAB
3804 "ld %C0,%b1+" CR_TAB
3805 "ld %D0,%b1" CR_TAB
3806 TINY_SBIW (%I1, %J1, %o1+3));
3810 static const char*
3811 out_movsi_r_mr (rtx_insn *insn, rtx op[], int *l)
3813 rtx dest = op[0];
3814 rtx src = op[1];
3815 rtx base = XEXP (src, 0);
3816 int reg_dest = true_regnum (dest);
3817 int reg_base = true_regnum (base);
3818 int tmp;
3820 if (!l)
3821 l = &tmp;
3823 if (reg_base > 0)
3825 if (AVR_TINY)
3826 return avr_out_movsi_r_mr_reg_no_disp_tiny (insn, op, l);
3828 if (reg_base == REG_X) /* (R26) */
3830 if (reg_dest == REG_X)
3831 /* "ld r26,-X" is undefined */
3832 return *l=7, ("adiw r26,3" CR_TAB
3833 "ld r29,X" CR_TAB
3834 "ld r28,-X" CR_TAB
3835 "ld __tmp_reg__,-X" CR_TAB
3836 "sbiw r26,1" CR_TAB
3837 "ld r26,X" CR_TAB
3838 "mov r27,__tmp_reg__");
3839 else if (reg_dest == REG_X - 2)
3840 return *l=5, ("ld %A0,X+" CR_TAB
3841 "ld %B0,X+" CR_TAB
3842 "ld __tmp_reg__,X+" CR_TAB
3843 "ld %D0,X" CR_TAB
3844 "mov %C0,__tmp_reg__");
3845 else if (reg_unused_after (insn, base))
3846 return *l=4, ("ld %A0,X+" CR_TAB
3847 "ld %B0,X+" CR_TAB
3848 "ld %C0,X+" CR_TAB
3849 "ld %D0,X");
3850 else
3851 return *l=5, ("ld %A0,X+" CR_TAB
3852 "ld %B0,X+" CR_TAB
3853 "ld %C0,X+" CR_TAB
3854 "ld %D0,X" CR_TAB
3855 "sbiw r26,3");
3857 else
3859 if (reg_dest == reg_base)
3860 return *l=5, ("ldd %D0,%1+3" CR_TAB
3861 "ldd %C0,%1+2" CR_TAB
3862 "ldd __tmp_reg__,%1+1" CR_TAB
3863 "ld %A0,%1" CR_TAB
3864 "mov %B0,__tmp_reg__");
3865 else if (reg_base == reg_dest + 2)
3866 return *l=5, ("ld %A0,%1" CR_TAB
3867 "ldd %B0,%1+1" CR_TAB
3868 "ldd __tmp_reg__,%1+2" CR_TAB
3869 "ldd %D0,%1+3" CR_TAB
3870 "mov %C0,__tmp_reg__");
3871 else
3872 return *l=4, ("ld %A0,%1" CR_TAB
3873 "ldd %B0,%1+1" CR_TAB
3874 "ldd %C0,%1+2" CR_TAB
3875 "ldd %D0,%1+3");
3878 else if (GET_CODE (base) == PLUS) /* (R + i) */
3880 int disp = INTVAL (XEXP (base, 1));
3882 if (AVR_TINY)
3883 return avr_out_movsi_r_mr_reg_disp_tiny (insn, op, l);
3885 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3887 if (REGNO (XEXP (base, 0)) != REG_Y)
3888 fatal_insn ("incorrect insn:",insn);
3890 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3891 return *l = 6, ("adiw r28,%o1-60" CR_TAB
3892 "ldd %A0,Y+60" CR_TAB
3893 "ldd %B0,Y+61" CR_TAB
3894 "ldd %C0,Y+62" CR_TAB
3895 "ldd %D0,Y+63" CR_TAB
3896 "sbiw r28,%o1-60");
3898 return *l = 8, ("subi r28,lo8(-%o1)" CR_TAB
3899 "sbci r29,hi8(-%o1)" CR_TAB
3900 "ld %A0,Y" CR_TAB
3901 "ldd %B0,Y+1" CR_TAB
3902 "ldd %C0,Y+2" CR_TAB
3903 "ldd %D0,Y+3" CR_TAB
3904 "subi r28,lo8(%o1)" CR_TAB
3905 "sbci r29,hi8(%o1)");
3908 reg_base = true_regnum (XEXP (base, 0));
3909 if (reg_base == REG_X)
3911 /* R = (X + d) */
3912 if (reg_dest == REG_X)
3914 *l = 7;
3915 /* "ld r26,-X" is undefined */
3916 return ("adiw r26,%o1+3" CR_TAB
3917 "ld r29,X" CR_TAB
3918 "ld r28,-X" CR_TAB
3919 "ld __tmp_reg__,-X" CR_TAB
3920 "sbiw r26,1" CR_TAB
3921 "ld r26,X" CR_TAB
3922 "mov r27,__tmp_reg__");
3924 *l = 6;
3925 if (reg_dest == REG_X - 2)
3926 return ("adiw r26,%o1" CR_TAB
3927 "ld r24,X+" CR_TAB
3928 "ld r25,X+" CR_TAB
3929 "ld __tmp_reg__,X+" CR_TAB
3930 "ld r27,X" CR_TAB
3931 "mov r26,__tmp_reg__");
3933 return ("adiw r26,%o1" CR_TAB
3934 "ld %A0,X+" CR_TAB
3935 "ld %B0,X+" CR_TAB
3936 "ld %C0,X+" CR_TAB
3937 "ld %D0,X" CR_TAB
3938 "sbiw r26,%o1+3");
3940 if (reg_dest == reg_base)
3941 return *l=5, ("ldd %D0,%D1" CR_TAB
3942 "ldd %C0,%C1" CR_TAB
3943 "ldd __tmp_reg__,%B1" CR_TAB
3944 "ldd %A0,%A1" CR_TAB
3945 "mov %B0,__tmp_reg__");
3946 else if (reg_dest == reg_base - 2)
3947 return *l=5, ("ldd %A0,%A1" CR_TAB
3948 "ldd %B0,%B1" CR_TAB
3949 "ldd __tmp_reg__,%C1" CR_TAB
3950 "ldd %D0,%D1" CR_TAB
3951 "mov %C0,__tmp_reg__");
3952 return *l=4, ("ldd %A0,%A1" CR_TAB
3953 "ldd %B0,%B1" CR_TAB
3954 "ldd %C0,%C1" CR_TAB
3955 "ldd %D0,%D1");
3957 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3958 return *l=4, ("ld %D0,%1" CR_TAB
3959 "ld %C0,%1" CR_TAB
3960 "ld %B0,%1" CR_TAB
3961 "ld %A0,%1");
3962 else if (GET_CODE (base) == POST_INC) /* (R++) */
3963 return *l=4, ("ld %A0,%1" CR_TAB
3964 "ld %B0,%1" CR_TAB
3965 "ld %C0,%1" CR_TAB
3966 "ld %D0,%1");
3967 else if (CONSTANT_ADDRESS_P (base))
3969 if (io_address_operand (base, SImode))
3971 *l = 4;
3972 return ("in %A0,%i1" CR_TAB
3973 "in %B0,%i1+1" CR_TAB
3974 "in %C0,%i1+2" CR_TAB
3975 "in %D0,%i1+3");
3977 else
3979 *l = AVR_TINY ? 4 : 8;
3980 return ("lds %A0,%m1" CR_TAB
3981 "lds %B0,%m1+1" CR_TAB
3982 "lds %C0,%m1+2" CR_TAB
3983 "lds %D0,%m1+3");
3987 fatal_insn ("unknown move insn:",insn);
3988 return "";
3991 static const char*
3992 avr_out_movsi_mr_r_reg_no_disp_tiny (rtx_insn *insn, rtx op[], int *l)
3994 rtx dest = op[0];
3995 rtx src = op[1];
3996 rtx base = XEXP (dest, 0);
3997 int reg_base = true_regnum (base);
3998 int reg_src = true_regnum (src);
4000 if (reg_base == reg_src)
4002 /* "ld r26,-X" is undefined */
4003 if (reg_unused_after (insn, base))
4005 return *l = 7, ("mov __tmp_reg__, %B1" CR_TAB
4006 "st %0,%A1" CR_TAB
4007 TINY_ADIW (%E0, %F0, 1) CR_TAB
4008 "st %0+,__tmp_reg__" CR_TAB
4009 "st %0+,%C1" CR_TAB
4010 "st %0+,%D1");
4012 else
4014 return *l = 9, ("mov __tmp_reg__, %B1" CR_TAB
4015 "st %0,%A1" CR_TAB
4016 TINY_ADIW (%E0, %F0, 1) CR_TAB
4017 "st %0+,__tmp_reg__" CR_TAB
4018 "st %0+,%C1" CR_TAB
4019 "st %0+,%D1" CR_TAB
4020 TINY_SBIW (%E0, %F0, 3));
4023 else if (reg_base == reg_src + 2)
4025 if (reg_unused_after (insn, base))
4026 return *l = 7, ("mov __zero_reg__,%C1" CR_TAB
4027 "mov __tmp_reg__,%D1" CR_TAB
4028 "st %0+,%A1" CR_TAB
4029 "st %0+,%B1" CR_TAB
4030 "st %0+,__zero_reg__" CR_TAB
4031 "st %0,__tmp_reg__" CR_TAB
4032 "clr __zero_reg__");
4033 else
4034 return *l = 9, ("mov __zero_reg__,%C1" CR_TAB
4035 "mov __tmp_reg__,%D1" CR_TAB
4036 "st %0+,%A1" CR_TAB
4037 "st %0+,%B1" CR_TAB
4038 "st %0+,__zero_reg__" CR_TAB
4039 "st %0,__tmp_reg__" CR_TAB
4040 "clr __zero_reg__" CR_TAB
4041 TINY_SBIW (%E0, %F0, 3));
4044 return *l = 6, ("st %0+,%A1" CR_TAB
4045 "st %0+,%B1" CR_TAB
4046 "st %0+,%C1" CR_TAB
4047 "st %0,%D1" CR_TAB
4048 TINY_SBIW (%E0, %F0, 3));
4051 static const char*
4052 avr_out_movsi_mr_r_reg_disp_tiny (rtx op[], int *l)
4054 rtx dest = op[0];
4055 rtx src = op[1];
4056 rtx base = XEXP (dest, 0);
4057 int reg_base = REGNO (XEXP (base, 0));
4058 int reg_src =true_regnum (src);
4060 if (reg_base == reg_src)
4062 *l = 11;
4063 return ("mov __tmp_reg__,%A2" CR_TAB
4064 "mov __zero_reg__,%B2" CR_TAB
4065 TINY_ADIW (%I0, %J0, %o0) CR_TAB
4066 "st %b0+,__tmp_reg__" CR_TAB
4067 "st %b0+,__zero_reg__" CR_TAB
4068 "st %b0+,%C2" CR_TAB
4069 "st %b0,%D2" CR_TAB
4070 "clr __zero_reg__" CR_TAB
4071 TINY_SBIW (%I0, %J0, %o0+3));
4073 else if (reg_src == reg_base - 2)
4075 *l = 11;
4076 return ("mov __tmp_reg__,%C2" CR_TAB
4077 "mov __zero_reg__,%D2" CR_TAB
4078 TINY_ADIW (%I0, %J0, %o0) CR_TAB
4079 "st %b0+,%A0" CR_TAB
4080 "st %b0+,%B0" CR_TAB
4081 "st %b0+,__tmp_reg__" CR_TAB
4082 "st %b0,__zero_reg__" CR_TAB
4083 "clr __zero_reg__" CR_TAB
4084 TINY_SBIW (%I0, %J0, %o0+3));
4086 *l = 8;
4087 return (TINY_ADIW (%I0, %J0, %o0) CR_TAB
4088 "st %b0+,%A1" CR_TAB
4089 "st %b0+,%B1" CR_TAB
4090 "st %b0+,%C1" CR_TAB
4091 "st %b0,%D1" CR_TAB
4092 TINY_SBIW (%I0, %J0, %o0+3));
4095 static const char*
4096 out_movsi_mr_r (rtx_insn *insn, rtx op[], int *l)
4098 rtx dest = op[0];
4099 rtx src = op[1];
4100 rtx base = XEXP (dest, 0);
4101 int reg_base = true_regnum (base);
4102 int reg_src = true_regnum (src);
4103 int tmp;
4105 if (!l)
4106 l = &tmp;
4108 if (CONSTANT_ADDRESS_P (base))
4110 if (io_address_operand (base, SImode))
4112 return *l=4,("out %i0, %A1" CR_TAB
4113 "out %i0+1,%B1" CR_TAB
4114 "out %i0+2,%C1" CR_TAB
4115 "out %i0+3,%D1");
4117 else
4119 *l = AVR_TINY ? 4 : 8;
4120 return ("sts %m0,%A1" CR_TAB
4121 "sts %m0+1,%B1" CR_TAB
4122 "sts %m0+2,%C1" CR_TAB
4123 "sts %m0+3,%D1");
4127 if (reg_base > 0) /* (r) */
4129 if (AVR_TINY)
4130 return avr_out_movsi_mr_r_reg_no_disp_tiny (insn, op, l);
4132 if (reg_base == REG_X) /* (R26) */
4134 if (reg_src == REG_X)
4136 /* "st X+,r26" is undefined */
4137 if (reg_unused_after (insn, base))
4138 return *l=6, ("mov __tmp_reg__,r27" CR_TAB
4139 "st X,r26" CR_TAB
4140 "adiw r26,1" CR_TAB
4141 "st X+,__tmp_reg__" CR_TAB
4142 "st X+,r28" CR_TAB
4143 "st X,r29");
4144 else
4145 return *l=7, ("mov __tmp_reg__,r27" CR_TAB
4146 "st X,r26" CR_TAB
4147 "adiw r26,1" CR_TAB
4148 "st X+,__tmp_reg__" CR_TAB
4149 "st X+,r28" CR_TAB
4150 "st X,r29" CR_TAB
4151 "sbiw r26,3");
4153 else if (reg_base == reg_src + 2)
4155 if (reg_unused_after (insn, base))
4156 return *l=7, ("mov __zero_reg__,%C1" CR_TAB
4157 "mov __tmp_reg__,%D1" CR_TAB
4158 "st %0+,%A1" CR_TAB
4159 "st %0+,%B1" CR_TAB
4160 "st %0+,__zero_reg__" CR_TAB
4161 "st %0,__tmp_reg__" CR_TAB
4162 "clr __zero_reg__");
4163 else
4164 return *l=8, ("mov __zero_reg__,%C1" CR_TAB
4165 "mov __tmp_reg__,%D1" CR_TAB
4166 "st %0+,%A1" CR_TAB
4167 "st %0+,%B1" CR_TAB
4168 "st %0+,__zero_reg__" CR_TAB
4169 "st %0,__tmp_reg__" CR_TAB
4170 "clr __zero_reg__" CR_TAB
4171 "sbiw r26,3");
4173 return *l=5, ("st %0+,%A1" CR_TAB
4174 "st %0+,%B1" CR_TAB
4175 "st %0+,%C1" CR_TAB
4176 "st %0,%D1" CR_TAB
4177 "sbiw r26,3");
4179 else
4180 return *l=4, ("st %0,%A1" CR_TAB
4181 "std %0+1,%B1" CR_TAB
4182 "std %0+2,%C1" CR_TAB
4183 "std %0+3,%D1");
4185 else if (GET_CODE (base) == PLUS) /* (R + i) */
4187 int disp = INTVAL (XEXP (base, 1));
4189 if (AVR_TINY)
4190 return avr_out_movsi_mr_r_reg_disp_tiny (op, l);
4192 reg_base = REGNO (XEXP (base, 0));
4193 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4195 if (reg_base != REG_Y)
4196 fatal_insn ("incorrect insn:",insn);
4198 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
4199 return *l = 6, ("adiw r28,%o0-60" CR_TAB
4200 "std Y+60,%A1" CR_TAB
4201 "std Y+61,%B1" CR_TAB
4202 "std Y+62,%C1" CR_TAB
4203 "std Y+63,%D1" CR_TAB
4204 "sbiw r28,%o0-60");
4206 return *l = 8, ("subi r28,lo8(-%o0)" CR_TAB
4207 "sbci r29,hi8(-%o0)" CR_TAB
4208 "st Y,%A1" CR_TAB
4209 "std Y+1,%B1" CR_TAB
4210 "std Y+2,%C1" CR_TAB
4211 "std Y+3,%D1" CR_TAB
4212 "subi r28,lo8(%o0)" CR_TAB
4213 "sbci r29,hi8(%o0)");
4215 if (reg_base == REG_X)
4217 /* (X + d) = R */
4218 if (reg_src == REG_X)
4220 *l = 9;
4221 return ("mov __tmp_reg__,r26" CR_TAB
4222 "mov __zero_reg__,r27" CR_TAB
4223 "adiw r26,%o0" CR_TAB
4224 "st X+,__tmp_reg__" CR_TAB
4225 "st X+,__zero_reg__" CR_TAB
4226 "st X+,r28" CR_TAB
4227 "st X,r29" CR_TAB
4228 "clr __zero_reg__" CR_TAB
4229 "sbiw r26,%o0+3");
4231 else if (reg_src == REG_X - 2)
4233 *l = 9;
4234 return ("mov __tmp_reg__,r26" CR_TAB
4235 "mov __zero_reg__,r27" CR_TAB
4236 "adiw r26,%o0" CR_TAB
4237 "st X+,r24" CR_TAB
4238 "st X+,r25" CR_TAB
4239 "st X+,__tmp_reg__" CR_TAB
4240 "st X,__zero_reg__" CR_TAB
4241 "clr __zero_reg__" CR_TAB
4242 "sbiw r26,%o0+3");
4244 *l = 6;
4245 return ("adiw r26,%o0" CR_TAB
4246 "st X+,%A1" CR_TAB
4247 "st X+,%B1" CR_TAB
4248 "st X+,%C1" CR_TAB
4249 "st X,%D1" CR_TAB
4250 "sbiw r26,%o0+3");
4252 return *l=4, ("std %A0,%A1" CR_TAB
4253 "std %B0,%B1" CR_TAB
4254 "std %C0,%C1" CR_TAB
4255 "std %D0,%D1");
4257 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4258 return *l=4, ("st %0,%D1" CR_TAB
4259 "st %0,%C1" CR_TAB
4260 "st %0,%B1" CR_TAB
4261 "st %0,%A1");
4262 else if (GET_CODE (base) == POST_INC) /* (R++) */
4263 return *l=4, ("st %0,%A1" CR_TAB
4264 "st %0,%B1" CR_TAB
4265 "st %0,%C1" CR_TAB
4266 "st %0,%D1");
4267 fatal_insn ("unknown move insn:",insn);
4268 return "";
4271 const char *
4272 output_movsisf (rtx_insn *insn, rtx operands[], int *l)
4274 int dummy;
4275 rtx dest = operands[0];
4276 rtx src = operands[1];
4277 int *real_l = l;
4279 if (avr_mem_flash_p (src)
4280 || avr_mem_flash_p (dest))
4282 return avr_out_lpm (insn, operands, real_l);
4285 if (!l)
4286 l = &dummy;
4288 gcc_assert (4 == GET_MODE_SIZE (GET_MODE (dest)));
4289 if (REG_P (dest))
4291 if (REG_P (src)) /* mov r,r */
4293 if (true_regnum (dest) > true_regnum (src))
4295 if (AVR_HAVE_MOVW)
4297 *l = 2;
4298 return ("movw %C0,%C1" CR_TAB
4299 "movw %A0,%A1");
4301 *l = 4;
4302 return ("mov %D0,%D1" CR_TAB
4303 "mov %C0,%C1" CR_TAB
4304 "mov %B0,%B1" CR_TAB
4305 "mov %A0,%A1");
4307 else
4309 if (AVR_HAVE_MOVW)
4311 *l = 2;
4312 return ("movw %A0,%A1" CR_TAB
4313 "movw %C0,%C1");
4315 *l = 4;
4316 return ("mov %A0,%A1" CR_TAB
4317 "mov %B0,%B1" CR_TAB
4318 "mov %C0,%C1" CR_TAB
4319 "mov %D0,%D1");
4322 else if (CONSTANT_P (src))
4324 return output_reload_insisf (operands, NULL_RTX, real_l);
4326 else if (MEM_P (src))
4327 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
4329 else if (MEM_P (dest))
4331 const char *templ;
4333 if (src == CONST0_RTX (GET_MODE (dest)))
4334 operands[1] = zero_reg_rtx;
4336 templ = out_movsi_mr_r (insn, operands, real_l);
4338 if (!real_l)
4339 output_asm_insn (templ, operands);
4341 operands[1] = src;
4342 return "";
4344 fatal_insn ("invalid insn:", insn);
4345 return "";
4349 /* Handle loads of 24-bit types from memory to register. */
4351 static const char*
4352 avr_out_load_psi_reg_no_disp_tiny (rtx_insn *insn, rtx *op, int *plen)
4354 rtx dest = op[0];
4355 rtx src = op[1];
4356 rtx base = XEXP (src, 0);
4357 int reg_dest = true_regnum (dest);
4358 int reg_base = true_regnum (base);
4360 if (reg_base == reg_dest)
4362 return avr_asm_len (TINY_ADIW (%E1, %F1, 2) CR_TAB
4363 "ld %C0,%1" CR_TAB
4364 "ld __tmp_reg__,-%1" CR_TAB
4365 TINY_SBIW (%E1, %F1, 1) CR_TAB
4366 "ld %A0,%1" CR_TAB
4367 "mov %B0,__tmp_reg__", op, plen, -8);
4369 else
4371 avr_asm_len ("ld %A0,%1+" CR_TAB
4372 "ld %B0,%1+" CR_TAB
4373 "ld %C0,%1", op, plen, -3);
4375 if (reg_dest != reg_base - 2 &&
4376 !reg_unused_after (insn, base))
4378 avr_asm_len (TINY_SBIW (%E1, %F1, 2), op, plen, 2);
4380 return "";
4384 static const char*
4385 avr_out_load_psi_reg_disp_tiny (rtx_insn *insn, rtx *op, int *plen)
4387 rtx dest = op[0];
4388 rtx src = op[1];
4389 rtx base = XEXP (src, 0);
4390 int reg_dest = true_regnum (dest);
4391 int reg_base = true_regnum (base);
4393 reg_base = true_regnum (XEXP (base, 0));
4394 if (reg_base == reg_dest)
4396 return avr_asm_len (TINY_ADIW (%I1, %J1, %o1+2) CR_TAB
4397 "ld %C0,%b1" CR_TAB
4398 "ld __tmp_reg__,-%b1" CR_TAB
4399 TINY_SBIW (%I1, %J1, 1) CR_TAB
4400 "ld %A0,%b1" CR_TAB
4401 "mov %B0,__tmp_reg__", op, plen, -8);
4403 else
4405 avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
4406 "ld %A0,%b1+" CR_TAB
4407 "ld %B0,%b1+" CR_TAB
4408 "ld %C0,%b1", op, plen, -5);
4410 if (reg_dest != (reg_base - 2)
4411 && !reg_unused_after (insn, XEXP (base, 0)))
4412 avr_asm_len (TINY_SBIW (%I1, %J1, %o1+2), op, plen, 2);
4414 return "";
4418 static const char*
4419 avr_out_load_psi (rtx_insn *insn, rtx *op, int *plen)
4421 rtx dest = op[0];
4422 rtx src = op[1];
4423 rtx base = XEXP (src, 0);
4424 int reg_dest = true_regnum (dest);
4425 int reg_base = true_regnum (base);
4427 if (reg_base > 0)
4429 if (AVR_TINY)
4430 return avr_out_load_psi_reg_no_disp_tiny (insn, op, plen);
4432 if (reg_base == REG_X) /* (R26) */
4434 if (reg_dest == REG_X)
4435 /* "ld r26,-X" is undefined */
4436 return avr_asm_len ("adiw r26,2" CR_TAB
4437 "ld r28,X" CR_TAB
4438 "ld __tmp_reg__,-X" CR_TAB
4439 "sbiw r26,1" CR_TAB
4440 "ld r26,X" CR_TAB
4441 "mov r27,__tmp_reg__", op, plen, -6);
4442 else
4444 avr_asm_len ("ld %A0,X+" CR_TAB
4445 "ld %B0,X+" CR_TAB
4446 "ld %C0,X", op, plen, -3);
4448 if (reg_dest != REG_X - 2
4449 && !reg_unused_after (insn, base))
4451 avr_asm_len ("sbiw r26,2", op, plen, 1);
4454 return "";
4457 else /* reg_base != REG_X */
4459 if (reg_dest == reg_base)
4460 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
4461 "ldd __tmp_reg__,%1+1" CR_TAB
4462 "ld %A0,%1" CR_TAB
4463 "mov %B0,__tmp_reg__", op, plen, -4);
4464 else
4465 return avr_asm_len ("ld %A0,%1" CR_TAB
4466 "ldd %B0,%1+1" CR_TAB
4467 "ldd %C0,%1+2", op, plen, -3);
4470 else if (GET_CODE (base) == PLUS) /* (R + i) */
4472 int disp = INTVAL (XEXP (base, 1));
4474 if (AVR_TINY)
4475 return avr_out_load_psi_reg_disp_tiny (insn, op, plen);
4477 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
4479 if (REGNO (XEXP (base, 0)) != REG_Y)
4480 fatal_insn ("incorrect insn:",insn);
4482 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
4483 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
4484 "ldd %A0,Y+61" CR_TAB
4485 "ldd %B0,Y+62" CR_TAB
4486 "ldd %C0,Y+63" CR_TAB
4487 "sbiw r28,%o1-61", op, plen, -5);
4489 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
4490 "sbci r29,hi8(-%o1)" CR_TAB
4491 "ld %A0,Y" CR_TAB
4492 "ldd %B0,Y+1" CR_TAB
4493 "ldd %C0,Y+2" CR_TAB
4494 "subi r28,lo8(%o1)" CR_TAB
4495 "sbci r29,hi8(%o1)", op, plen, -7);
4498 reg_base = true_regnum (XEXP (base, 0));
4499 if (reg_base == REG_X)
4501 /* R = (X + d) */
4502 if (reg_dest == REG_X)
4504 /* "ld r26,-X" is undefined */
4505 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
4506 "ld r28,X" CR_TAB
4507 "ld __tmp_reg__,-X" CR_TAB
4508 "sbiw r26,1" CR_TAB
4509 "ld r26,X" CR_TAB
4510 "mov r27,__tmp_reg__", op, plen, -6);
4513 avr_asm_len ("adiw r26,%o1" CR_TAB
4514 "ld %A0,X+" CR_TAB
4515 "ld %B0,X+" CR_TAB
4516 "ld %C0,X", op, plen, -4);
4518 if (reg_dest != REG_W
4519 && !reg_unused_after (insn, XEXP (base, 0)))
4520 avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
4522 return "";
4525 if (reg_dest == reg_base)
4526 return avr_asm_len ("ldd %C0,%C1" CR_TAB
4527 "ldd __tmp_reg__,%B1" CR_TAB
4528 "ldd %A0,%A1" CR_TAB
4529 "mov %B0,__tmp_reg__", op, plen, -4);
4531 return avr_asm_len ("ldd %A0,%A1" CR_TAB
4532 "ldd %B0,%B1" CR_TAB
4533 "ldd %C0,%C1", op, plen, -3);
4535 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4536 return avr_asm_len ("ld %C0,%1" CR_TAB
4537 "ld %B0,%1" CR_TAB
4538 "ld %A0,%1", op, plen, -3);
4539 else if (GET_CODE (base) == POST_INC) /* (R++) */
4540 return avr_asm_len ("ld %A0,%1" CR_TAB
4541 "ld %B0,%1" CR_TAB
4542 "ld %C0,%1", op, plen, -3);
4544 else if (CONSTANT_ADDRESS_P (base))
4546 int n_words = AVR_TINY ? 3 : 6;
4547 return avr_asm_len ("lds %A0,%m1" CR_TAB
4548 "lds %B0,%m1+1" CR_TAB
4549 "lds %C0,%m1+2", op, plen , -n_words);
4552 fatal_insn ("unknown move insn:",insn);
4553 return "";
4557 static const char*
4558 avr_out_store_psi_reg_no_disp_tiny (rtx_insn *insn, rtx *op, int *plen)
4560 rtx dest = op[0];
4561 rtx src = op[1];
4562 rtx base = XEXP (dest, 0);
4563 int reg_base = true_regnum (base);
4564 int reg_src = true_regnum (src);
4566 if (reg_base == reg_src)
4568 avr_asm_len ("st %0,%A1" CR_TAB
4569 "mov __tmp_reg__,%B1" CR_TAB
4570 TINY_ADIW (%E0, %F0, 1) CR_TAB /* st X+, r27 is undefined */
4571 "st %0+,__tmp_reg__" CR_TAB
4572 "st %0,%C1", op, plen, -6);
4575 else if (reg_src == reg_base - 2)
4577 avr_asm_len ("st %0,%A1" CR_TAB
4578 "mov __tmp_reg__,%C1" CR_TAB
4579 TINY_ADIW (%E0, %F0, 1) CR_TAB
4580 "st %0+,%B1" CR_TAB
4581 "st %0,__tmp_reg__", op, plen, 6);
4583 else
4585 avr_asm_len ("st %0+,%A1" CR_TAB
4586 "st %0+,%B1" CR_TAB
4587 "st %0,%C1", op, plen, -3);
4590 if (!reg_unused_after (insn, base))
4591 avr_asm_len (TINY_SBIW (%E0, %F0, 2), op, plen, 2);
4593 return "";
4596 static const char*
4597 avr_out_store_psi_reg_disp_tiny (rtx *op, int *plen)
4599 rtx dest = op[0];
4600 rtx src = op[1];
4601 rtx base = XEXP (dest, 0);
4602 int reg_base = REGNO (XEXP (base, 0));
4603 int reg_src = true_regnum (src);
4605 if (reg_src == reg_base)
4607 return avr_asm_len ("mov __tmp_reg__,%A1" CR_TAB
4608 "mov __zero_reg__,%B1" CR_TAB
4609 TINY_ADIW (%I0, %J0, %o0) CR_TAB
4610 "st %b0+,__tmp_reg__" CR_TAB
4611 "st %b0+,__zero_reg__" CR_TAB
4612 "st %b0,%C1" CR_TAB
4613 "clr __zero_reg__" CR_TAB
4614 TINY_SBIW (%I0, %J0, %o0+2), op, plen, -10);
4616 else if (reg_src == reg_base - 2)
4618 return avr_asm_len ("mov __tmp_reg__,%C1" CR_TAB
4619 TINY_ADIW (%I0, %J0, %o0) CR_TAB
4620 "st %b0+,%A1" CR_TAB
4621 "st %b0+,%B1" CR_TAB
4622 "st %b0,__tmp_reg__" CR_TAB
4623 TINY_SBIW (%I0, %J0, %o0+2), op, plen, -8);
4626 return avr_asm_len (TINY_ADIW (%I0, %J0, %o0) CR_TAB
4627 "st %b0+,%A1" CR_TAB
4628 "st %b0+,%B1" CR_TAB
4629 "st %b0,%C1" CR_TAB
4630 TINY_SBIW (%I0, %J0, %o0+2), op, plen, -7);
4633 /* Handle store of 24-bit type from register or zero to memory. */
4635 static const char*
4636 avr_out_store_psi (rtx_insn *insn, rtx *op, int *plen)
4638 rtx dest = op[0];
4639 rtx src = op[1];
4640 rtx base = XEXP (dest, 0);
4641 int reg_base = true_regnum (base);
4643 if (CONSTANT_ADDRESS_P (base))
4645 int n_words = AVR_TINY ? 3 : 6;
4646 return avr_asm_len ("sts %m0,%A1" CR_TAB
4647 "sts %m0+1,%B1" CR_TAB
4648 "sts %m0+2,%C1", op, plen, -n_words);
4651 if (reg_base > 0) /* (r) */
4653 if (AVR_TINY)
4654 return avr_out_store_psi_reg_no_disp_tiny (insn, op, plen);
4656 if (reg_base == REG_X) /* (R26) */
4658 gcc_assert (!reg_overlap_mentioned_p (base, src));
4660 avr_asm_len ("st %0+,%A1" CR_TAB
4661 "st %0+,%B1" CR_TAB
4662 "st %0,%C1", op, plen, -3);
4664 if (!reg_unused_after (insn, base))
4665 avr_asm_len ("sbiw r26,2", op, plen, 1);
4667 return "";
4669 else
4670 return avr_asm_len ("st %0,%A1" CR_TAB
4671 "std %0+1,%B1" CR_TAB
4672 "std %0+2,%C1", op, plen, -3);
4674 else if (GET_CODE (base) == PLUS) /* (R + i) */
4676 int disp = INTVAL (XEXP (base, 1));
4678 if (AVR_TINY)
4679 return avr_out_store_psi_reg_disp_tiny (op, plen);
4681 reg_base = REGNO (XEXP (base, 0));
4683 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4685 if (reg_base != REG_Y)
4686 fatal_insn ("incorrect insn:",insn);
4688 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
4689 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
4690 "std Y+61,%A1" CR_TAB
4691 "std Y+62,%B1" CR_TAB
4692 "std Y+63,%C1" CR_TAB
4693 "sbiw r28,%o0-61", op, plen, -5);
4695 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4696 "sbci r29,hi8(-%o0)" CR_TAB
4697 "st Y,%A1" CR_TAB
4698 "std Y+1,%B1" CR_TAB
4699 "std Y+2,%C1" CR_TAB
4700 "subi r28,lo8(%o0)" CR_TAB
4701 "sbci r29,hi8(%o0)", op, plen, -7);
4703 if (reg_base == REG_X)
4705 /* (X + d) = R */
4706 gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
4708 avr_asm_len ("adiw r26,%o0" CR_TAB
4709 "st X+,%A1" CR_TAB
4710 "st X+,%B1" CR_TAB
4711 "st X,%C1", op, plen, -4);
4713 if (!reg_unused_after (insn, XEXP (base, 0)))
4714 avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
4716 return "";
4719 return avr_asm_len ("std %A0,%A1" CR_TAB
4720 "std %B0,%B1" CR_TAB
4721 "std %C0,%C1", op, plen, -3);
4723 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4724 return avr_asm_len ("st %0,%C1" CR_TAB
4725 "st %0,%B1" CR_TAB
4726 "st %0,%A1", op, plen, -3);
4727 else if (GET_CODE (base) == POST_INC) /* (R++) */
4728 return avr_asm_len ("st %0,%A1" CR_TAB
4729 "st %0,%B1" CR_TAB
4730 "st %0,%C1", op, plen, -3);
4732 fatal_insn ("unknown move insn:",insn);
4733 return "";
4737 /* Move around 24-bit stuff. */
4739 const char *
4740 avr_out_movpsi (rtx_insn *insn, rtx *op, int *plen)
4742 rtx dest = op[0];
4743 rtx src = op[1];
4745 if (avr_mem_flash_p (src)
4746 || avr_mem_flash_p (dest))
4748 return avr_out_lpm (insn, op, plen);
4751 if (register_operand (dest, VOIDmode))
4753 if (register_operand (src, VOIDmode)) /* mov r,r */
4755 if (true_regnum (dest) > true_regnum (src))
4757 avr_asm_len ("mov %C0,%C1", op, plen, -1);
4759 if (AVR_HAVE_MOVW)
4760 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
4761 else
4762 return avr_asm_len ("mov %B0,%B1" CR_TAB
4763 "mov %A0,%A1", op, plen, 2);
4765 else
4767 if (AVR_HAVE_MOVW)
4768 avr_asm_len ("movw %A0,%A1", op, plen, -1);
4769 else
4770 avr_asm_len ("mov %A0,%A1" CR_TAB
4771 "mov %B0,%B1", op, plen, -2);
4773 return avr_asm_len ("mov %C0,%C1", op, plen, 1);
4776 else if (CONSTANT_P (src))
4778 return avr_out_reload_inpsi (op, NULL_RTX, plen);
4780 else if (MEM_P (src))
4781 return avr_out_load_psi (insn, op, plen); /* mov r,m */
4783 else if (MEM_P (dest))
4785 rtx xop[2];
4787 xop[0] = dest;
4788 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
4790 return avr_out_store_psi (insn, xop, plen);
4793 fatal_insn ("invalid insn:", insn);
4794 return "";
4797 static const char*
4798 avr_out_movqi_mr_r_reg_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
4800 rtx dest = op[0];
4801 rtx src = op[1];
4802 rtx x = XEXP (dest, 0);
4804 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
4806 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
4807 TINY_ADIW (%I0, %J0, %o0) CR_TAB
4808 "st %b0,__tmp_reg__", op, plen, -4);
4810 else
4812 avr_asm_len (TINY_ADIW (%I0, %J0, %o0) CR_TAB
4813 "st %b0,%1" , op, plen, -3);
4816 if (!reg_unused_after (insn, XEXP (x,0)))
4817 avr_asm_len (TINY_SBIW (%I0, %J0, %o0), op, plen, 2);
4819 return "";
4822 static const char*
4823 out_movqi_mr_r (rtx_insn *insn, rtx op[], int *plen)
4825 rtx dest = op[0];
4826 rtx src = op[1];
4827 rtx x = XEXP (dest, 0);
4829 if (CONSTANT_ADDRESS_P (x))
4831 int n_words = AVR_TINY ? 1 : 2;
4832 return optimize > 0 && io_address_operand (x, QImode)
4833 ? avr_asm_len ("out %i0,%1", op, plen, -1)
4834 : avr_asm_len ("sts %m0,%1", op, plen, -n_words);
4836 else if (GET_CODE (x) == PLUS
4837 && REG_P (XEXP (x, 0))
4838 && CONST_INT_P (XEXP (x, 1)))
4840 /* memory access by reg+disp */
4842 int disp = INTVAL (XEXP (x, 1));
4844 if (AVR_TINY)
4845 return avr_out_movqi_mr_r_reg_disp_tiny (insn, op, plen);
4847 if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
4849 if (REGNO (XEXP (x, 0)) != REG_Y)
4850 fatal_insn ("incorrect insn:",insn);
4852 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
4853 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
4854 "std Y+63,%1" CR_TAB
4855 "sbiw r28,%o0-63", op, plen, -3);
4857 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4858 "sbci r29,hi8(-%o0)" CR_TAB
4859 "st Y,%1" CR_TAB
4860 "subi r28,lo8(%o0)" CR_TAB
4861 "sbci r29,hi8(%o0)", op, plen, -5);
4863 else if (REGNO (XEXP (x,0)) == REG_X)
4865 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
4867 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
4868 "adiw r26,%o0" CR_TAB
4869 "st X,__tmp_reg__", op, plen, -3);
4871 else
4873 avr_asm_len ("adiw r26,%o0" CR_TAB
4874 "st X,%1", op, plen, -2);
4877 if (!reg_unused_after (insn, XEXP (x,0)))
4878 avr_asm_len ("sbiw r26,%o0", op, plen, 1);
4880 return "";
4883 return avr_asm_len ("std %0,%1", op, plen, -1);
4886 return avr_asm_len ("st %0,%1", op, plen, -1);
4890 /* Helper for the next function for XMEGA. It does the same
4891 but with low byte first. */
4893 static const char*
4894 avr_out_movhi_mr_r_xmega (rtx_insn *insn, rtx op[], int *plen)
4896 rtx dest = op[0];
4897 rtx src = op[1];
4898 rtx base = XEXP (dest, 0);
4899 int reg_base = true_regnum (base);
4900 int reg_src = true_regnum (src);
4902 /* "volatile" forces writing low byte first, even if less efficient,
4903 for correct operation with 16-bit I/O registers like SP. */
4904 int mem_volatile_p = MEM_VOLATILE_P (dest);
4906 if (CONSTANT_ADDRESS_P (base))
4908 int n_words = AVR_TINY ? 2 : 4;
4909 return optimize > 0 && io_address_operand (base, HImode)
4910 ? avr_asm_len ("out %i0,%A1" CR_TAB
4911 "out %i0+1,%B1", op, plen, -2)
4913 : avr_asm_len ("sts %m0,%A1" CR_TAB
4914 "sts %m0+1,%B1", op, plen, -n_words);
4917 if (reg_base > 0)
4919 if (reg_base != REG_X)
4920 return avr_asm_len ("st %0,%A1" CR_TAB
4921 "std %0+1,%B1", op, plen, -2);
4923 if (reg_src == REG_X)
4924 /* "st X+,r26" and "st -X,r26" are undefined. */
4925 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4926 "st X,r26" CR_TAB
4927 "adiw r26,1" CR_TAB
4928 "st X,__tmp_reg__", op, plen, -4);
4929 else
4930 avr_asm_len ("st X+,%A1" CR_TAB
4931 "st X,%B1", op, plen, -2);
4933 return reg_unused_after (insn, base)
4934 ? ""
4935 : avr_asm_len ("sbiw r26,1", op, plen, 1);
4937 else if (GET_CODE (base) == PLUS)
4939 int disp = INTVAL (XEXP (base, 1));
4940 reg_base = REGNO (XEXP (base, 0));
4941 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4943 if (reg_base != REG_Y)
4944 fatal_insn ("incorrect insn:",insn);
4946 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
4947 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4948 "std Y+62,%A1" CR_TAB
4949 "std Y+63,%B1" CR_TAB
4950 "sbiw r28,%o0-62", op, plen, -4)
4952 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4953 "sbci r29,hi8(-%o0)" CR_TAB
4954 "st Y,%A1" CR_TAB
4955 "std Y+1,%B1" CR_TAB
4956 "subi r28,lo8(%o0)" CR_TAB
4957 "sbci r29,hi8(%o0)", op, plen, -6);
4960 if (reg_base != REG_X)
4961 return avr_asm_len ("std %A0,%A1" CR_TAB
4962 "std %B0,%B1", op, plen, -2);
4963 /* (X + d) = R */
4964 return reg_src == REG_X
4965 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4966 "mov __zero_reg__,r27" CR_TAB
4967 "adiw r26,%o0" CR_TAB
4968 "st X+,__tmp_reg__" CR_TAB
4969 "st X,__zero_reg__" CR_TAB
4970 "clr __zero_reg__" CR_TAB
4971 "sbiw r26,%o0+1", op, plen, -7)
4973 : avr_asm_len ("adiw r26,%o0" CR_TAB
4974 "st X+,%A1" CR_TAB
4975 "st X,%B1" CR_TAB
4976 "sbiw r26,%o0+1", op, plen, -4);
4978 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4980 if (!mem_volatile_p)
4981 return avr_asm_len ("st %0,%B1" CR_TAB
4982 "st %0,%A1", op, plen, -2);
4984 return REGNO (XEXP (base, 0)) == REG_X
4985 ? avr_asm_len ("sbiw r26,2" CR_TAB
4986 "st X+,%A1" CR_TAB
4987 "st X,%B1" CR_TAB
4988 "sbiw r26,1", op, plen, -4)
4990 : avr_asm_len ("sbiw %r0,2" CR_TAB
4991 "st %p0,%A1" CR_TAB
4992 "std %p0+1,%B1", op, plen, -3);
4994 else if (GET_CODE (base) == POST_INC) /* (R++) */
4996 return avr_asm_len ("st %0,%A1" CR_TAB
4997 "st %0,%B1", op, plen, -2);
5000 fatal_insn ("unknown move insn:",insn);
5001 return "";
5004 static const char*
5005 avr_out_movhi_mr_r_reg_no_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
5007 rtx dest = op[0];
5008 rtx src = op[1];
5009 rtx base = XEXP (dest, 0);
5010 int reg_base = true_regnum (base);
5011 int reg_src = true_regnum (src);
5012 int mem_volatile_p = MEM_VOLATILE_P (dest);
5014 if (reg_base == reg_src)
5016 return !mem_volatile_p && reg_unused_after (insn, src)
5017 ? avr_asm_len ("mov __tmp_reg__,%B1" CR_TAB
5018 "st %0,%A1" CR_TAB
5019 TINY_ADIW (%E0, %F0, 1) CR_TAB
5020 "st %0,__tmp_reg__", op, plen, -5)
5021 : avr_asm_len ("mov __tmp_reg__,%B1" CR_TAB
5022 TINY_ADIW (%E0, %F0, 1) CR_TAB
5023 "st %0,__tmp_reg__" CR_TAB
5024 TINY_SBIW (%E0, %F0, 1) CR_TAB
5025 "st %0, %A1", op, plen, -7);
5028 return !mem_volatile_p && reg_unused_after (insn, base)
5029 ? avr_asm_len ("st %0+,%A1" CR_TAB
5030 "st %0,%B1", op, plen, -2)
5031 : avr_asm_len (TINY_ADIW (%E0, %F0, 1) CR_TAB
5032 "st %0,%B1" CR_TAB
5033 "st -%0,%A1", op, plen, -4);
5036 static const char*
5037 avr_out_movhi_mr_r_reg_disp_tiny (rtx op[], int *plen)
5039 rtx dest = op[0];
5040 rtx src = op[1];
5041 rtx base = XEXP (dest, 0);
5042 int reg_base = REGNO (XEXP (base, 0));
5043 int reg_src = true_regnum (src);
5045 return reg_src == reg_base
5046 ? avr_asm_len ("mov __tmp_reg__,%A1" CR_TAB
5047 "mov __zero_reg__,%B1" CR_TAB
5048 TINY_ADIW (%I0, %J0, %o0+1) CR_TAB
5049 "st %b0,__zero_reg__" CR_TAB
5050 "st -%b0,__tmp_reg__" CR_TAB
5051 "clr __zero_reg__" CR_TAB
5052 TINY_SBIW (%I0, %J0, %o0), op, plen, -9)
5054 : avr_asm_len (TINY_ADIW (%I0, %J0, %o0+1) CR_TAB
5055 "st %b0,%B1" CR_TAB
5056 "st -%b0,%A1" CR_TAB
5057 TINY_SBIW (%I0, %J0, %o0), op, plen, -6);
5060 static const char*
5061 avr_out_movhi_mr_r_post_inc_tiny (rtx op[], int *plen)
5063 return avr_asm_len (TINY_ADIW (%I0, %J0, 1) CR_TAB
5064 "st %p0,%B1" CR_TAB
5065 "st -%p0,%A1" CR_TAB
5066 TINY_ADIW (%I0, %J0, 2), op, plen, -6);
5069 static const char*
5070 out_movhi_mr_r (rtx_insn *insn, rtx op[], int *plen)
5072 rtx dest = op[0];
5073 rtx src = op[1];
5074 rtx base = XEXP (dest, 0);
5075 int reg_base = true_regnum (base);
5076 int reg_src = true_regnum (src);
5077 int mem_volatile_p;
5079 /* "volatile" forces writing high-byte first (no-xmega) resp.
5080 low-byte first (xmega) even if less efficient, for correct
5081 operation with 16-bit I/O registers like. */
5083 if (AVR_XMEGA)
5084 return avr_out_movhi_mr_r_xmega (insn, op, plen);
5086 mem_volatile_p = MEM_VOLATILE_P (dest);
5088 if (CONSTANT_ADDRESS_P (base))
5090 int n_words = AVR_TINY ? 2 : 4;
5091 return optimize > 0 && io_address_operand (base, HImode)
5092 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
5093 "out %i0,%A1", op, plen, -2)
5095 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
5096 "sts %m0,%A1", op, plen, -n_words);
5099 if (reg_base > 0)
5101 if (AVR_TINY)
5102 return avr_out_movhi_mr_r_reg_no_disp_tiny (insn, op, plen);
5104 if (reg_base != REG_X)
5105 return avr_asm_len ("std %0+1,%B1" CR_TAB
5106 "st %0,%A1", op, plen, -2);
5108 if (reg_src == REG_X)
5109 /* "st X+,r26" and "st -X,r26" are undefined. */
5110 return !mem_volatile_p && reg_unused_after (insn, src)
5111 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
5112 "st X,r26" CR_TAB
5113 "adiw r26,1" CR_TAB
5114 "st X,__tmp_reg__", op, plen, -4)
5116 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
5117 "adiw r26,1" CR_TAB
5118 "st X,__tmp_reg__" CR_TAB
5119 "sbiw r26,1" CR_TAB
5120 "st X,r26", op, plen, -5);
5122 return !mem_volatile_p && reg_unused_after (insn, base)
5123 ? avr_asm_len ("st X+,%A1" CR_TAB
5124 "st X,%B1", op, plen, -2)
5125 : avr_asm_len ("adiw r26,1" CR_TAB
5126 "st X,%B1" CR_TAB
5127 "st -X,%A1", op, plen, -3);
5129 else if (GET_CODE (base) == PLUS)
5131 int disp = INTVAL (XEXP (base, 1));
5133 if (AVR_TINY)
5134 return avr_out_movhi_mr_r_reg_disp_tiny (op, plen);
5136 reg_base = REGNO (XEXP (base, 0));
5137 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
5139 if (reg_base != REG_Y)
5140 fatal_insn ("incorrect insn:",insn);
5142 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
5143 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
5144 "std Y+63,%B1" CR_TAB
5145 "std Y+62,%A1" CR_TAB
5146 "sbiw r28,%o0-62", op, plen, -4)
5148 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
5149 "sbci r29,hi8(-%o0)" CR_TAB
5150 "std Y+1,%B1" CR_TAB
5151 "st Y,%A1" CR_TAB
5152 "subi r28,lo8(%o0)" CR_TAB
5153 "sbci r29,hi8(%o0)", op, plen, -6);
5156 if (reg_base != REG_X)
5157 return avr_asm_len ("std %B0,%B1" CR_TAB
5158 "std %A0,%A1", op, plen, -2);
5159 /* (X + d) = R */
5160 return reg_src == REG_X
5161 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
5162 "mov __zero_reg__,r27" CR_TAB
5163 "adiw r26,%o0+1" CR_TAB
5164 "st X,__zero_reg__" CR_TAB
5165 "st -X,__tmp_reg__" CR_TAB
5166 "clr __zero_reg__" CR_TAB
5167 "sbiw r26,%o0", op, plen, -7)
5169 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
5170 "st X,%B1" CR_TAB
5171 "st -X,%A1" CR_TAB
5172 "sbiw r26,%o0", op, plen, -4);
5174 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
5176 return avr_asm_len ("st %0,%B1" CR_TAB
5177 "st %0,%A1", op, plen, -2);
5179 else if (GET_CODE (base) == POST_INC) /* (R++) */
5181 if (!mem_volatile_p)
5182 return avr_asm_len ("st %0,%A1" CR_TAB
5183 "st %0,%B1", op, plen, -2);
5185 if (AVR_TINY)
5186 return avr_out_movhi_mr_r_post_inc_tiny (op, plen);
5188 return REGNO (XEXP (base, 0)) == REG_X
5189 ? avr_asm_len ("adiw r26,1" CR_TAB
5190 "st X,%B1" CR_TAB
5191 "st -X,%A1" CR_TAB
5192 "adiw r26,2", op, plen, -4)
5194 : avr_asm_len ("std %p0+1,%B1" CR_TAB
5195 "st %p0,%A1" CR_TAB
5196 "adiw %r0,2", op, plen, -3);
5198 fatal_insn ("unknown move insn:",insn);
5199 return "";
5202 /* Return 1 if frame pointer for current function required. */
5204 static bool
5205 avr_frame_pointer_required_p (void)
5207 return (cfun->calls_alloca
5208 || cfun->calls_setjmp
5209 || cfun->has_nonlocal_label
5210 || crtl->args.info.nregs == 0
5211 || get_frame_size () > 0);
5214 /* Returns the condition of compare insn INSN, or UNKNOWN. */
5216 static RTX_CODE
5217 compare_condition (rtx_insn *insn)
5219 rtx_insn *next = next_real_insn (insn);
5221 if (next && JUMP_P (next))
5223 rtx pat = PATTERN (next);
5224 rtx src = SET_SRC (pat);
5226 if (IF_THEN_ELSE == GET_CODE (src))
5227 return GET_CODE (XEXP (src, 0));
5230 return UNKNOWN;
5234 /* Returns true iff INSN is a tst insn that only tests the sign. */
5236 static bool
5237 compare_sign_p (rtx_insn *insn)
5239 RTX_CODE cond = compare_condition (insn);
5240 return (cond == GE || cond == LT);
5244 /* Returns true iff the next insn is a JUMP_INSN with a condition
5245 that needs to be swapped (GT, GTU, LE, LEU). */
5247 static bool
5248 compare_diff_p (rtx_insn *insn)
5250 RTX_CODE cond = compare_condition (insn);
5251 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
5254 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
5256 static bool
5257 compare_eq_p (rtx_insn *insn)
5259 RTX_CODE cond = compare_condition (insn);
5260 return (cond == EQ || cond == NE);
5264 /* Output compare instruction
5266 compare (XOP[0], XOP[1])
5268 for a register XOP[0] and a compile-time constant XOP[1]. Return "".
5269 XOP[2] is an 8-bit scratch register as needed.
5271 PLEN == NULL: Output instructions.
5272 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
5273 Don't output anything. */
5275 const char*
5276 avr_out_compare (rtx_insn *insn, rtx *xop, int *plen)
5278 /* Register to compare and value to compare against. */
5279 rtx xreg = xop[0];
5280 rtx xval = xop[1];
5282 /* MODE of the comparison. */
5283 machine_mode mode;
5285 /* Number of bytes to operate on. */
5286 int i, n_bytes = GET_MODE_SIZE (GET_MODE (xreg));
5288 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
5289 int clobber_val = -1;
5291 /* Map fixed mode operands to integer operands with the same binary
5292 representation. They are easier to handle in the remainder. */
5294 if (CONST_FIXED_P (xval))
5296 xreg = avr_to_int_mode (xop[0]);
5297 xval = avr_to_int_mode (xop[1]);
5300 mode = GET_MODE (xreg);
5302 gcc_assert (REG_P (xreg));
5303 gcc_assert ((CONST_INT_P (xval) && n_bytes <= 4)
5304 || (const_double_operand (xval, VOIDmode) && n_bytes == 8));
5306 if (plen)
5307 *plen = 0;
5309 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
5310 against 0 by ORing the bytes. This is one instruction shorter.
5311 Notice that 64-bit comparisons are always against reg:ALL8 18 (ACC_A)
5312 and therefore don't use this. */
5314 if (!test_hard_reg_class (LD_REGS, xreg)
5315 && compare_eq_p (insn)
5316 && reg_unused_after (insn, xreg))
5318 if (xval == const1_rtx)
5320 avr_asm_len ("dec %A0" CR_TAB
5321 "or %A0,%B0", xop, plen, 2);
5323 if (n_bytes >= 3)
5324 avr_asm_len ("or %A0,%C0", xop, plen, 1);
5326 if (n_bytes >= 4)
5327 avr_asm_len ("or %A0,%D0", xop, plen, 1);
5329 return "";
5331 else if (xval == constm1_rtx)
5333 if (n_bytes >= 4)
5334 avr_asm_len ("and %A0,%D0", xop, plen, 1);
5336 if (n_bytes >= 3)
5337 avr_asm_len ("and %A0,%C0", xop, plen, 1);
5339 return avr_asm_len ("and %A0,%B0" CR_TAB
5340 "com %A0", xop, plen, 2);
5344 for (i = 0; i < n_bytes; i++)
5346 /* We compare byte-wise. */
5347 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
5348 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
5350 /* 8-bit value to compare with this byte. */
5351 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
5353 /* Registers R16..R31 can operate with immediate. */
5354 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
5356 xop[0] = reg8;
5357 xop[1] = gen_int_mode (val8, QImode);
5359 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
5361 if (i == 0
5362 && test_hard_reg_class (ADDW_REGS, reg8))
5364 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
5366 if (IN_RANGE (val16, 0, 63)
5367 && (val8 == 0
5368 || reg_unused_after (insn, xreg)))
5370 if (AVR_TINY)
5371 avr_asm_len (TINY_SBIW (%A0, %B0, %1), xop, plen, 2);
5372 else
5373 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
5375 i++;
5376 continue;
5379 if (n_bytes == 2
5380 && IN_RANGE (val16, -63, -1)
5381 && compare_eq_p (insn)
5382 && reg_unused_after (insn, xreg))
5384 return AVR_TINY
5385 ? avr_asm_len (TINY_ADIW (%A0, %B0, %n1), xop, plen, 2)
5386 : avr_asm_len ("adiw %0,%n1", xop, plen, 1);
5390 /* Comparing against 0 is easy. */
5392 if (val8 == 0)
5394 avr_asm_len (i == 0
5395 ? "cp %0,__zero_reg__"
5396 : "cpc %0,__zero_reg__", xop, plen, 1);
5397 continue;
5400 /* Upper registers can compare and subtract-with-carry immediates.
5401 Notice that compare instructions do the same as respective subtract
5402 instruction; the only difference is that comparisons don't write
5403 the result back to the target register. */
5405 if (ld_reg_p)
5407 if (i == 0)
5409 avr_asm_len ("cpi %0,%1", xop, plen, 1);
5410 continue;
5412 else if (reg_unused_after (insn, xreg))
5414 avr_asm_len ("sbci %0,%1", xop, plen, 1);
5415 continue;
5419 /* Must load the value into the scratch register. */
5421 gcc_assert (REG_P (xop[2]));
5423 if (clobber_val != (int) val8)
5424 avr_asm_len ("ldi %2,%1", xop, plen, 1);
5425 clobber_val = (int) val8;
5427 avr_asm_len (i == 0
5428 ? "cp %0,%2"
5429 : "cpc %0,%2", xop, plen, 1);
5432 return "";
5436 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
5438 const char*
5439 avr_out_compare64 (rtx_insn *insn, rtx *op, int *plen)
5441 rtx xop[3];
5443 xop[0] = gen_rtx_REG (DImode, 18);
5444 xop[1] = op[0];
5445 xop[2] = op[1];
5447 return avr_out_compare (insn, xop, plen);
5450 /* Output test instruction for HImode. */
5452 const char*
5453 avr_out_tsthi (rtx_insn *insn, rtx *op, int *plen)
5455 if (compare_sign_p (insn))
5457 avr_asm_len ("tst %B0", op, plen, -1);
5459 else if (reg_unused_after (insn, op[0])
5460 && compare_eq_p (insn))
5462 /* Faster than sbiw if we can clobber the operand. */
5463 avr_asm_len ("or %A0,%B0", op, plen, -1);
5465 else
5467 avr_out_compare (insn, op, plen);
5470 return "";
5474 /* Output test instruction for PSImode. */
5476 const char*
5477 avr_out_tstpsi (rtx_insn *insn, rtx *op, int *plen)
5479 if (compare_sign_p (insn))
5481 avr_asm_len ("tst %C0", op, plen, -1);
5483 else if (reg_unused_after (insn, op[0])
5484 && compare_eq_p (insn))
5486 /* Faster than sbiw if we can clobber the operand. */
5487 avr_asm_len ("or %A0,%B0" CR_TAB
5488 "or %A0,%C0", op, plen, -2);
5490 else
5492 avr_out_compare (insn, op, plen);
5495 return "";
5499 /* Output test instruction for SImode. */
5501 const char*
5502 avr_out_tstsi (rtx_insn *insn, rtx *op, int *plen)
5504 if (compare_sign_p (insn))
5506 avr_asm_len ("tst %D0", op, plen, -1);
5508 else if (reg_unused_after (insn, op[0])
5509 && compare_eq_p (insn))
5511 /* Faster than sbiw if we can clobber the operand. */
5512 avr_asm_len ("or %A0,%B0" CR_TAB
5513 "or %A0,%C0" CR_TAB
5514 "or %A0,%D0", op, plen, -3);
5516 else
5518 avr_out_compare (insn, op, plen);
5521 return "";
5525 /* Generate asm equivalent for various shifts. This only handles cases
5526 that are not already carefully hand-optimized in ?sh??i3_out.
5528 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
5529 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
5530 OPERANDS[3] is a QImode scratch register from LD regs if
5531 available and SCRATCH, otherwise (no scratch available)
5533 TEMPL is an assembler template that shifts by one position.
5534 T_LEN is the length of this template. */
5536 void
5537 out_shift_with_cnt (const char *templ, rtx_insn *insn, rtx operands[],
5538 int *plen, int t_len)
5540 bool second_label = true;
5541 bool saved_in_tmp = false;
5542 bool use_zero_reg = false;
5543 rtx op[5];
5545 op[0] = operands[0];
5546 op[1] = operands[1];
5547 op[2] = operands[2];
5548 op[3] = operands[3];
5550 if (plen)
5551 *plen = 0;
5553 if (CONST_INT_P (operands[2]))
5555 bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
5556 && REG_P (operands[3]));
5557 int count = INTVAL (operands[2]);
5558 int max_len = 10; /* If larger than this, always use a loop. */
5560 if (count <= 0)
5561 return;
5563 if (count < 8 && !scratch)
5564 use_zero_reg = true;
5566 if (optimize_size)
5567 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
5569 if (t_len * count <= max_len)
5571 /* Output shifts inline with no loop - faster. */
5573 while (count-- > 0)
5574 avr_asm_len (templ, op, plen, t_len);
5576 return;
5579 if (scratch)
5581 avr_asm_len ("ldi %3,%2", op, plen, 1);
5583 else if (use_zero_reg)
5585 /* Hack to save one word: use __zero_reg__ as loop counter.
5586 Set one bit, then shift in a loop until it is 0 again. */
5588 op[3] = zero_reg_rtx;
5590 avr_asm_len ("set" CR_TAB
5591 "bld %3,%2-1", op, plen, 2);
5593 else
5595 /* No scratch register available, use one from LD_REGS (saved in
5596 __tmp_reg__) that doesn't overlap with registers to shift. */
5598 op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
5599 op[4] = tmp_reg_rtx;
5600 saved_in_tmp = true;
5602 avr_asm_len ("mov %4,%3" CR_TAB
5603 "ldi %3,%2", op, plen, 2);
5606 second_label = false;
5608 else if (MEM_P (op[2]))
5610 rtx op_mov[2];
5612 op_mov[0] = op[3] = tmp_reg_rtx;
5613 op_mov[1] = op[2];
5615 out_movqi_r_mr (insn, op_mov, plen);
5617 else if (register_operand (op[2], QImode))
5619 op[3] = op[2];
5621 if (!reg_unused_after (insn, op[2])
5622 || reg_overlap_mentioned_p (op[0], op[2]))
5624 op[3] = tmp_reg_rtx;
5625 avr_asm_len ("mov %3,%2", op, plen, 1);
5628 else
5629 fatal_insn ("bad shift insn:", insn);
5631 if (second_label)
5632 avr_asm_len ("rjmp 2f", op, plen, 1);
5634 avr_asm_len ("1:", op, plen, 0);
5635 avr_asm_len (templ, op, plen, t_len);
5637 if (second_label)
5638 avr_asm_len ("2:", op, plen, 0);
5640 avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
5641 avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
5643 if (saved_in_tmp)
5644 avr_asm_len ("mov %3,%4", op, plen, 1);
5648 /* 8bit shift left ((char)x << i) */
5650 const char *
5651 ashlqi3_out (rtx_insn *insn, rtx operands[], int *len)
5653 if (GET_CODE (operands[2]) == CONST_INT)
5655 int k;
5657 if (!len)
5658 len = &k;
5660 switch (INTVAL (operands[2]))
5662 default:
5663 if (INTVAL (operands[2]) < 8)
5664 break;
5666 *len = 1;
5667 return "clr %0";
5669 case 1:
5670 *len = 1;
5671 return "lsl %0";
5673 case 2:
5674 *len = 2;
5675 return ("lsl %0" CR_TAB
5676 "lsl %0");
5678 case 3:
5679 *len = 3;
5680 return ("lsl %0" CR_TAB
5681 "lsl %0" CR_TAB
5682 "lsl %0");
5684 case 4:
5685 if (test_hard_reg_class (LD_REGS, operands[0]))
5687 *len = 2;
5688 return ("swap %0" CR_TAB
5689 "andi %0,0xf0");
5691 *len = 4;
5692 return ("lsl %0" CR_TAB
5693 "lsl %0" CR_TAB
5694 "lsl %0" CR_TAB
5695 "lsl %0");
5697 case 5:
5698 if (test_hard_reg_class (LD_REGS, operands[0]))
5700 *len = 3;
5701 return ("swap %0" CR_TAB
5702 "lsl %0" CR_TAB
5703 "andi %0,0xe0");
5705 *len = 5;
5706 return ("lsl %0" CR_TAB
5707 "lsl %0" CR_TAB
5708 "lsl %0" CR_TAB
5709 "lsl %0" CR_TAB
5710 "lsl %0");
5712 case 6:
5713 if (test_hard_reg_class (LD_REGS, operands[0]))
5715 *len = 4;
5716 return ("swap %0" CR_TAB
5717 "lsl %0" CR_TAB
5718 "lsl %0" CR_TAB
5719 "andi %0,0xc0");
5721 *len = 6;
5722 return ("lsl %0" CR_TAB
5723 "lsl %0" CR_TAB
5724 "lsl %0" CR_TAB
5725 "lsl %0" CR_TAB
5726 "lsl %0" CR_TAB
5727 "lsl %0");
5729 case 7:
5730 *len = 3;
5731 return ("ror %0" CR_TAB
5732 "clr %0" CR_TAB
5733 "ror %0");
5736 else if (CONSTANT_P (operands[2]))
5737 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5739 out_shift_with_cnt ("lsl %0",
5740 insn, operands, len, 1);
5741 return "";
5745 /* 16bit shift left ((short)x << i) */
5747 const char *
5748 ashlhi3_out (rtx_insn *insn, rtx operands[], int *len)
5750 if (GET_CODE (operands[2]) == CONST_INT)
5752 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5753 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5754 int k;
5755 int *t = len;
5757 if (!len)
5758 len = &k;
5760 switch (INTVAL (operands[2]))
5762 default:
5763 if (INTVAL (operands[2]) < 16)
5764 break;
5766 *len = 2;
5767 return ("clr %B0" CR_TAB
5768 "clr %A0");
5770 case 4:
5771 if (optimize_size && scratch)
5772 break; /* 5 */
5773 if (ldi_ok)
5775 *len = 6;
5776 return ("swap %A0" CR_TAB
5777 "swap %B0" CR_TAB
5778 "andi %B0,0xf0" CR_TAB
5779 "eor %B0,%A0" CR_TAB
5780 "andi %A0,0xf0" CR_TAB
5781 "eor %B0,%A0");
5783 if (scratch)
5785 *len = 7;
5786 return ("swap %A0" CR_TAB
5787 "swap %B0" CR_TAB
5788 "ldi %3,0xf0" CR_TAB
5789 "and %B0,%3" CR_TAB
5790 "eor %B0,%A0" CR_TAB
5791 "and %A0,%3" CR_TAB
5792 "eor %B0,%A0");
5794 break; /* optimize_size ? 6 : 8 */
5796 case 5:
5797 if (optimize_size)
5798 break; /* scratch ? 5 : 6 */
5799 if (ldi_ok)
5801 *len = 8;
5802 return ("lsl %A0" CR_TAB
5803 "rol %B0" CR_TAB
5804 "swap %A0" CR_TAB
5805 "swap %B0" CR_TAB
5806 "andi %B0,0xf0" CR_TAB
5807 "eor %B0,%A0" CR_TAB
5808 "andi %A0,0xf0" CR_TAB
5809 "eor %B0,%A0");
5811 if (scratch)
5813 *len = 9;
5814 return ("lsl %A0" CR_TAB
5815 "rol %B0" CR_TAB
5816 "swap %A0" CR_TAB
5817 "swap %B0" CR_TAB
5818 "ldi %3,0xf0" CR_TAB
5819 "and %B0,%3" CR_TAB
5820 "eor %B0,%A0" CR_TAB
5821 "and %A0,%3" CR_TAB
5822 "eor %B0,%A0");
5824 break; /* 10 */
5826 case 6:
5827 if (optimize_size)
5828 break; /* scratch ? 5 : 6 */
5829 *len = 9;
5830 return ("clr __tmp_reg__" CR_TAB
5831 "lsr %B0" CR_TAB
5832 "ror %A0" CR_TAB
5833 "ror __tmp_reg__" CR_TAB
5834 "lsr %B0" CR_TAB
5835 "ror %A0" CR_TAB
5836 "ror __tmp_reg__" CR_TAB
5837 "mov %B0,%A0" CR_TAB
5838 "mov %A0,__tmp_reg__");
5840 case 7:
5841 *len = 5;
5842 return ("lsr %B0" CR_TAB
5843 "mov %B0,%A0" CR_TAB
5844 "clr %A0" CR_TAB
5845 "ror %B0" CR_TAB
5846 "ror %A0");
5848 case 8:
5849 return *len = 2, ("mov %B0,%A1" CR_TAB
5850 "clr %A0");
5852 case 9:
5853 *len = 3;
5854 return ("mov %B0,%A0" CR_TAB
5855 "clr %A0" CR_TAB
5856 "lsl %B0");
5858 case 10:
5859 *len = 4;
5860 return ("mov %B0,%A0" CR_TAB
5861 "clr %A0" CR_TAB
5862 "lsl %B0" CR_TAB
5863 "lsl %B0");
5865 case 11:
5866 *len = 5;
5867 return ("mov %B0,%A0" CR_TAB
5868 "clr %A0" CR_TAB
5869 "lsl %B0" CR_TAB
5870 "lsl %B0" CR_TAB
5871 "lsl %B0");
5873 case 12:
5874 if (ldi_ok)
5876 *len = 4;
5877 return ("mov %B0,%A0" CR_TAB
5878 "clr %A0" CR_TAB
5879 "swap %B0" CR_TAB
5880 "andi %B0,0xf0");
5882 if (scratch)
5884 *len = 5;
5885 return ("mov %B0,%A0" CR_TAB
5886 "clr %A0" CR_TAB
5887 "swap %B0" CR_TAB
5888 "ldi %3,0xf0" CR_TAB
5889 "and %B0,%3");
5891 *len = 6;
5892 return ("mov %B0,%A0" CR_TAB
5893 "clr %A0" CR_TAB
5894 "lsl %B0" CR_TAB
5895 "lsl %B0" CR_TAB
5896 "lsl %B0" CR_TAB
5897 "lsl %B0");
5899 case 13:
5900 if (ldi_ok)
5902 *len = 5;
5903 return ("mov %B0,%A0" CR_TAB
5904 "clr %A0" CR_TAB
5905 "swap %B0" CR_TAB
5906 "lsl %B0" CR_TAB
5907 "andi %B0,0xe0");
5909 if (AVR_HAVE_MUL && scratch)
5911 *len = 5;
5912 return ("ldi %3,0x20" CR_TAB
5913 "mul %A0,%3" CR_TAB
5914 "mov %B0,r0" CR_TAB
5915 "clr %A0" CR_TAB
5916 "clr __zero_reg__");
5918 if (optimize_size && scratch)
5919 break; /* 5 */
5920 if (scratch)
5922 *len = 6;
5923 return ("mov %B0,%A0" CR_TAB
5924 "clr %A0" CR_TAB
5925 "swap %B0" CR_TAB
5926 "lsl %B0" CR_TAB
5927 "ldi %3,0xe0" CR_TAB
5928 "and %B0,%3");
5930 if (AVR_HAVE_MUL)
5932 *len = 6;
5933 return ("set" CR_TAB
5934 "bld r1,5" CR_TAB
5935 "mul %A0,r1" CR_TAB
5936 "mov %B0,r0" CR_TAB
5937 "clr %A0" CR_TAB
5938 "clr __zero_reg__");
5940 *len = 7;
5941 return ("mov %B0,%A0" CR_TAB
5942 "clr %A0" CR_TAB
5943 "lsl %B0" CR_TAB
5944 "lsl %B0" CR_TAB
5945 "lsl %B0" CR_TAB
5946 "lsl %B0" CR_TAB
5947 "lsl %B0");
5949 case 14:
5950 if (AVR_HAVE_MUL && ldi_ok)
5952 *len = 5;
5953 return ("ldi %B0,0x40" CR_TAB
5954 "mul %A0,%B0" CR_TAB
5955 "mov %B0,r0" CR_TAB
5956 "clr %A0" CR_TAB
5957 "clr __zero_reg__");
5959 if (AVR_HAVE_MUL && scratch)
5961 *len = 5;
5962 return ("ldi %3,0x40" CR_TAB
5963 "mul %A0,%3" CR_TAB
5964 "mov %B0,r0" CR_TAB
5965 "clr %A0" CR_TAB
5966 "clr __zero_reg__");
5968 if (optimize_size && ldi_ok)
5970 *len = 5;
5971 return ("mov %B0,%A0" CR_TAB
5972 "ldi %A0,6" "\n1:\t"
5973 "lsl %B0" CR_TAB
5974 "dec %A0" CR_TAB
5975 "brne 1b");
5977 if (optimize_size && scratch)
5978 break; /* 5 */
5979 *len = 6;
5980 return ("clr %B0" CR_TAB
5981 "lsr %A0" CR_TAB
5982 "ror %B0" CR_TAB
5983 "lsr %A0" CR_TAB
5984 "ror %B0" CR_TAB
5985 "clr %A0");
5987 case 15:
5988 *len = 4;
5989 return ("clr %B0" CR_TAB
5990 "lsr %A0" CR_TAB
5991 "ror %B0" CR_TAB
5992 "clr %A0");
5994 len = t;
5996 out_shift_with_cnt ("lsl %A0" CR_TAB
5997 "rol %B0", insn, operands, len, 2);
5998 return "";
6002 /* 24-bit shift left */
6004 const char*
6005 avr_out_ashlpsi3 (rtx_insn *insn, rtx *op, int *plen)
6007 if (plen)
6008 *plen = 0;
6010 if (CONST_INT_P (op[2]))
6012 switch (INTVAL (op[2]))
6014 default:
6015 if (INTVAL (op[2]) < 24)
6016 break;
6018 return avr_asm_len ("clr %A0" CR_TAB
6019 "clr %B0" CR_TAB
6020 "clr %C0", op, plen, 3);
6022 case 8:
6024 int reg0 = REGNO (op[0]);
6025 int reg1 = REGNO (op[1]);
6027 if (reg0 >= reg1)
6028 return avr_asm_len ("mov %C0,%B1" CR_TAB
6029 "mov %B0,%A1" CR_TAB
6030 "clr %A0", op, plen, 3);
6031 else
6032 return avr_asm_len ("clr %A0" CR_TAB
6033 "mov %B0,%A1" CR_TAB
6034 "mov %C0,%B1", op, plen, 3);
6037 case 16:
6039 int reg0 = REGNO (op[0]);
6040 int reg1 = REGNO (op[1]);
6042 if (reg0 + 2 != reg1)
6043 avr_asm_len ("mov %C0,%A0", op, plen, 1);
6045 return avr_asm_len ("clr %B0" CR_TAB
6046 "clr %A0", op, plen, 2);
6049 case 23:
6050 return avr_asm_len ("clr %C0" CR_TAB
6051 "lsr %A0" CR_TAB
6052 "ror %C0" CR_TAB
6053 "clr %B0" CR_TAB
6054 "clr %A0", op, plen, 5);
6058 out_shift_with_cnt ("lsl %A0" CR_TAB
6059 "rol %B0" CR_TAB
6060 "rol %C0", insn, op, plen, 3);
6061 return "";
6065 /* 32bit shift left ((long)x << i) */
6067 const char *
6068 ashlsi3_out (rtx_insn *insn, rtx operands[], int *len)
6070 if (GET_CODE (operands[2]) == CONST_INT)
6072 int k;
6073 int *t = len;
6075 if (!len)
6076 len = &k;
6078 switch (INTVAL (operands[2]))
6080 default:
6081 if (INTVAL (operands[2]) < 32)
6082 break;
6084 if (AVR_HAVE_MOVW)
6085 return *len = 3, ("clr %D0" CR_TAB
6086 "clr %C0" CR_TAB
6087 "movw %A0,%C0");
6088 *len = 4;
6089 return ("clr %D0" CR_TAB
6090 "clr %C0" CR_TAB
6091 "clr %B0" CR_TAB
6092 "clr %A0");
6094 case 8:
6096 int reg0 = true_regnum (operands[0]);
6097 int reg1 = true_regnum (operands[1]);
6098 *len = 4;
6099 if (reg0 >= reg1)
6100 return ("mov %D0,%C1" CR_TAB
6101 "mov %C0,%B1" CR_TAB
6102 "mov %B0,%A1" CR_TAB
6103 "clr %A0");
6104 else
6105 return ("clr %A0" CR_TAB
6106 "mov %B0,%A1" CR_TAB
6107 "mov %C0,%B1" CR_TAB
6108 "mov %D0,%C1");
6111 case 16:
6113 int reg0 = true_regnum (operands[0]);
6114 int reg1 = true_regnum (operands[1]);
6115 if (reg0 + 2 == reg1)
6116 return *len = 2, ("clr %B0" CR_TAB
6117 "clr %A0");
6118 if (AVR_HAVE_MOVW)
6119 return *len = 3, ("movw %C0,%A1" CR_TAB
6120 "clr %B0" CR_TAB
6121 "clr %A0");
6122 else
6123 return *len = 4, ("mov %C0,%A1" CR_TAB
6124 "mov %D0,%B1" CR_TAB
6125 "clr %B0" CR_TAB
6126 "clr %A0");
6129 case 24:
6130 *len = 4;
6131 return ("mov %D0,%A1" CR_TAB
6132 "clr %C0" CR_TAB
6133 "clr %B0" CR_TAB
6134 "clr %A0");
6136 case 31:
6137 *len = 6;
6138 return ("clr %D0" CR_TAB
6139 "lsr %A0" CR_TAB
6140 "ror %D0" CR_TAB
6141 "clr %C0" CR_TAB
6142 "clr %B0" CR_TAB
6143 "clr %A0");
6145 len = t;
6147 out_shift_with_cnt ("lsl %A0" CR_TAB
6148 "rol %B0" CR_TAB
6149 "rol %C0" CR_TAB
6150 "rol %D0", insn, operands, len, 4);
6151 return "";
6154 /* 8bit arithmetic shift right ((signed char)x >> i) */
6156 const char *
6157 ashrqi3_out (rtx_insn *insn, rtx operands[], int *len)
6159 if (GET_CODE (operands[2]) == CONST_INT)
6161 int k;
6163 if (!len)
6164 len = &k;
6166 switch (INTVAL (operands[2]))
6168 case 1:
6169 *len = 1;
6170 return "asr %0";
6172 case 2:
6173 *len = 2;
6174 return ("asr %0" CR_TAB
6175 "asr %0");
6177 case 3:
6178 *len = 3;
6179 return ("asr %0" CR_TAB
6180 "asr %0" CR_TAB
6181 "asr %0");
6183 case 4:
6184 *len = 4;
6185 return ("asr %0" CR_TAB
6186 "asr %0" CR_TAB
6187 "asr %0" CR_TAB
6188 "asr %0");
6190 case 5:
6191 *len = 5;
6192 return ("asr %0" CR_TAB
6193 "asr %0" CR_TAB
6194 "asr %0" CR_TAB
6195 "asr %0" CR_TAB
6196 "asr %0");
6198 case 6:
6199 *len = 4;
6200 return ("bst %0,6" CR_TAB
6201 "lsl %0" CR_TAB
6202 "sbc %0,%0" CR_TAB
6203 "bld %0,0");
6205 default:
6206 if (INTVAL (operands[2]) < 8)
6207 break;
6209 /* fall through */
6211 case 7:
6212 *len = 2;
6213 return ("lsl %0" CR_TAB
6214 "sbc %0,%0");
6217 else if (CONSTANT_P (operands[2]))
6218 fatal_insn ("internal compiler error. Incorrect shift:", insn);
6220 out_shift_with_cnt ("asr %0",
6221 insn, operands, len, 1);
6222 return "";
6226 /* 16bit arithmetic shift right ((signed short)x >> i) */
6228 const char *
6229 ashrhi3_out (rtx_insn *insn, rtx operands[], int *len)
6231 if (GET_CODE (operands[2]) == CONST_INT)
6233 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
6234 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
6235 int k;
6236 int *t = len;
6238 if (!len)
6239 len = &k;
6241 switch (INTVAL (operands[2]))
6243 case 4:
6244 case 5:
6245 /* XXX try to optimize this too? */
6246 break;
6248 case 6:
6249 if (optimize_size)
6250 break; /* scratch ? 5 : 6 */
6251 *len = 8;
6252 return ("mov __tmp_reg__,%A0" CR_TAB
6253 "mov %A0,%B0" CR_TAB
6254 "lsl __tmp_reg__" CR_TAB
6255 "rol %A0" CR_TAB
6256 "sbc %B0,%B0" CR_TAB
6257 "lsl __tmp_reg__" CR_TAB
6258 "rol %A0" CR_TAB
6259 "rol %B0");
6261 case 7:
6262 *len = 4;
6263 return ("lsl %A0" CR_TAB
6264 "mov %A0,%B0" CR_TAB
6265 "rol %A0" CR_TAB
6266 "sbc %B0,%B0");
6268 case 8:
6270 int reg0 = true_regnum (operands[0]);
6271 int reg1 = true_regnum (operands[1]);
6273 if (reg0 == reg1)
6274 return *len = 3, ("mov %A0,%B0" CR_TAB
6275 "lsl %B0" CR_TAB
6276 "sbc %B0,%B0");
6277 else
6278 return *len = 4, ("mov %A0,%B1" CR_TAB
6279 "clr %B0" CR_TAB
6280 "sbrc %A0,7" CR_TAB
6281 "dec %B0");
6284 case 9:
6285 *len = 4;
6286 return ("mov %A0,%B0" CR_TAB
6287 "lsl %B0" CR_TAB
6288 "sbc %B0,%B0" CR_TAB
6289 "asr %A0");
6291 case 10:
6292 *len = 5;
6293 return ("mov %A0,%B0" CR_TAB
6294 "lsl %B0" CR_TAB
6295 "sbc %B0,%B0" CR_TAB
6296 "asr %A0" CR_TAB
6297 "asr %A0");
6299 case 11:
6300 if (AVR_HAVE_MUL && ldi_ok)
6302 *len = 5;
6303 return ("ldi %A0,0x20" CR_TAB
6304 "muls %B0,%A0" CR_TAB
6305 "mov %A0,r1" CR_TAB
6306 "sbc %B0,%B0" CR_TAB
6307 "clr __zero_reg__");
6309 if (optimize_size && scratch)
6310 break; /* 5 */
6311 *len = 6;
6312 return ("mov %A0,%B0" CR_TAB
6313 "lsl %B0" CR_TAB
6314 "sbc %B0,%B0" CR_TAB
6315 "asr %A0" CR_TAB
6316 "asr %A0" CR_TAB
6317 "asr %A0");
6319 case 12:
6320 if (AVR_HAVE_MUL && ldi_ok)
6322 *len = 5;
6323 return ("ldi %A0,0x10" CR_TAB
6324 "muls %B0,%A0" CR_TAB
6325 "mov %A0,r1" CR_TAB
6326 "sbc %B0,%B0" CR_TAB
6327 "clr __zero_reg__");
6329 if (optimize_size && scratch)
6330 break; /* 5 */
6331 *len = 7;
6332 return ("mov %A0,%B0" CR_TAB
6333 "lsl %B0" CR_TAB
6334 "sbc %B0,%B0" CR_TAB
6335 "asr %A0" CR_TAB
6336 "asr %A0" CR_TAB
6337 "asr %A0" CR_TAB
6338 "asr %A0");
6340 case 13:
6341 if (AVR_HAVE_MUL && ldi_ok)
6343 *len = 5;
6344 return ("ldi %A0,0x08" CR_TAB
6345 "muls %B0,%A0" CR_TAB
6346 "mov %A0,r1" CR_TAB
6347 "sbc %B0,%B0" CR_TAB
6348 "clr __zero_reg__");
6350 if (optimize_size)
6351 break; /* scratch ? 5 : 7 */
6352 *len = 8;
6353 return ("mov %A0,%B0" CR_TAB
6354 "lsl %B0" CR_TAB
6355 "sbc %B0,%B0" CR_TAB
6356 "asr %A0" CR_TAB
6357 "asr %A0" CR_TAB
6358 "asr %A0" CR_TAB
6359 "asr %A0" CR_TAB
6360 "asr %A0");
6362 case 14:
6363 *len = 5;
6364 return ("lsl %B0" CR_TAB
6365 "sbc %A0,%A0" CR_TAB
6366 "lsl %B0" CR_TAB
6367 "mov %B0,%A0" CR_TAB
6368 "rol %A0");
6370 default:
6371 if (INTVAL (operands[2]) < 16)
6372 break;
6374 /* fall through */
6376 case 15:
6377 return *len = 3, ("lsl %B0" CR_TAB
6378 "sbc %A0,%A0" CR_TAB
6379 "mov %B0,%A0");
6381 len = t;
6383 out_shift_with_cnt ("asr %B0" CR_TAB
6384 "ror %A0", insn, operands, len, 2);
6385 return "";
6389 /* 24-bit arithmetic shift right */
6391 const char*
6392 avr_out_ashrpsi3 (rtx_insn *insn, rtx *op, int *plen)
6394 int dest = REGNO (op[0]);
6395 int src = REGNO (op[1]);
6397 if (CONST_INT_P (op[2]))
6399 if (plen)
6400 *plen = 0;
6402 switch (INTVAL (op[2]))
6404 case 8:
6405 if (dest <= src)
6406 return avr_asm_len ("mov %A0,%B1" CR_TAB
6407 "mov %B0,%C1" CR_TAB
6408 "clr %C0" CR_TAB
6409 "sbrc %B0,7" CR_TAB
6410 "dec %C0", op, plen, 5);
6411 else
6412 return avr_asm_len ("clr %C0" CR_TAB
6413 "sbrc %C1,7" CR_TAB
6414 "dec %C0" CR_TAB
6415 "mov %B0,%C1" CR_TAB
6416 "mov %A0,%B1", op, plen, 5);
6418 case 16:
6419 if (dest != src + 2)
6420 avr_asm_len ("mov %A0,%C1", op, plen, 1);
6422 return avr_asm_len ("clr %B0" CR_TAB
6423 "sbrc %A0,7" CR_TAB
6424 "com %B0" CR_TAB
6425 "mov %C0,%B0", op, plen, 4);
6427 default:
6428 if (INTVAL (op[2]) < 24)
6429 break;
6431 /* fall through */
6433 case 23:
6434 return avr_asm_len ("lsl %C0" CR_TAB
6435 "sbc %A0,%A0" CR_TAB
6436 "mov %B0,%A0" CR_TAB
6437 "mov %C0,%A0", op, plen, 4);
6438 } /* switch */
6441 out_shift_with_cnt ("asr %C0" CR_TAB
6442 "ror %B0" CR_TAB
6443 "ror %A0", insn, op, plen, 3);
6444 return "";
6448 /* 32-bit arithmetic shift right ((signed long)x >> i) */
6450 const char *
6451 ashrsi3_out (rtx_insn *insn, rtx operands[], int *len)
6453 if (GET_CODE (operands[2]) == CONST_INT)
6455 int k;
6456 int *t = len;
6458 if (!len)
6459 len = &k;
6461 switch (INTVAL (operands[2]))
6463 case 8:
6465 int reg0 = true_regnum (operands[0]);
6466 int reg1 = true_regnum (operands[1]);
6467 *len=6;
6468 if (reg0 <= reg1)
6469 return ("mov %A0,%B1" CR_TAB
6470 "mov %B0,%C1" CR_TAB
6471 "mov %C0,%D1" CR_TAB
6472 "clr %D0" CR_TAB
6473 "sbrc %C0,7" CR_TAB
6474 "dec %D0");
6475 else
6476 return ("clr %D0" CR_TAB
6477 "sbrc %D1,7" CR_TAB
6478 "dec %D0" CR_TAB
6479 "mov %C0,%D1" CR_TAB
6480 "mov %B0,%C1" CR_TAB
6481 "mov %A0,%B1");
6484 case 16:
6486 int reg0 = true_regnum (operands[0]);
6487 int reg1 = true_regnum (operands[1]);
6489 if (reg0 == reg1 + 2)
6490 return *len = 4, ("clr %D0" CR_TAB
6491 "sbrc %B0,7" CR_TAB
6492 "com %D0" CR_TAB
6493 "mov %C0,%D0");
6494 if (AVR_HAVE_MOVW)
6495 return *len = 5, ("movw %A0,%C1" CR_TAB
6496 "clr %D0" CR_TAB
6497 "sbrc %B0,7" CR_TAB
6498 "com %D0" CR_TAB
6499 "mov %C0,%D0");
6500 else
6501 return *len = 6, ("mov %B0,%D1" CR_TAB
6502 "mov %A0,%C1" CR_TAB
6503 "clr %D0" CR_TAB
6504 "sbrc %B0,7" CR_TAB
6505 "com %D0" CR_TAB
6506 "mov %C0,%D0");
6509 case 24:
6510 return *len = 6, ("mov %A0,%D1" CR_TAB
6511 "clr %D0" CR_TAB
6512 "sbrc %A0,7" CR_TAB
6513 "com %D0" CR_TAB
6514 "mov %B0,%D0" CR_TAB
6515 "mov %C0,%D0");
6517 default:
6518 if (INTVAL (operands[2]) < 32)
6519 break;
6521 /* fall through */
6523 case 31:
6524 if (AVR_HAVE_MOVW)
6525 return *len = 4, ("lsl %D0" CR_TAB
6526 "sbc %A0,%A0" CR_TAB
6527 "mov %B0,%A0" CR_TAB
6528 "movw %C0,%A0");
6529 else
6530 return *len = 5, ("lsl %D0" CR_TAB
6531 "sbc %A0,%A0" CR_TAB
6532 "mov %B0,%A0" CR_TAB
6533 "mov %C0,%A0" CR_TAB
6534 "mov %D0,%A0");
6536 len = t;
6538 out_shift_with_cnt ("asr %D0" CR_TAB
6539 "ror %C0" CR_TAB
6540 "ror %B0" CR_TAB
6541 "ror %A0", insn, operands, len, 4);
6542 return "";
6545 /* 8-bit logic shift right ((unsigned char)x >> i) */
6547 const char *
6548 lshrqi3_out (rtx_insn *insn, rtx operands[], int *len)
6550 if (GET_CODE (operands[2]) == CONST_INT)
6552 int k;
6554 if (!len)
6555 len = &k;
6557 switch (INTVAL (operands[2]))
6559 default:
6560 if (INTVAL (operands[2]) < 8)
6561 break;
6563 *len = 1;
6564 return "clr %0";
6566 case 1:
6567 *len = 1;
6568 return "lsr %0";
6570 case 2:
6571 *len = 2;
6572 return ("lsr %0" CR_TAB
6573 "lsr %0");
6574 case 3:
6575 *len = 3;
6576 return ("lsr %0" CR_TAB
6577 "lsr %0" CR_TAB
6578 "lsr %0");
6580 case 4:
6581 if (test_hard_reg_class (LD_REGS, operands[0]))
6583 *len=2;
6584 return ("swap %0" CR_TAB
6585 "andi %0,0x0f");
6587 *len = 4;
6588 return ("lsr %0" CR_TAB
6589 "lsr %0" CR_TAB
6590 "lsr %0" CR_TAB
6591 "lsr %0");
6593 case 5:
6594 if (test_hard_reg_class (LD_REGS, operands[0]))
6596 *len = 3;
6597 return ("swap %0" CR_TAB
6598 "lsr %0" CR_TAB
6599 "andi %0,0x7");
6601 *len = 5;
6602 return ("lsr %0" CR_TAB
6603 "lsr %0" CR_TAB
6604 "lsr %0" CR_TAB
6605 "lsr %0" CR_TAB
6606 "lsr %0");
6608 case 6:
6609 if (test_hard_reg_class (LD_REGS, operands[0]))
6611 *len = 4;
6612 return ("swap %0" CR_TAB
6613 "lsr %0" CR_TAB
6614 "lsr %0" CR_TAB
6615 "andi %0,0x3");
6617 *len = 6;
6618 return ("lsr %0" CR_TAB
6619 "lsr %0" CR_TAB
6620 "lsr %0" CR_TAB
6621 "lsr %0" CR_TAB
6622 "lsr %0" CR_TAB
6623 "lsr %0");
6625 case 7:
6626 *len = 3;
6627 return ("rol %0" CR_TAB
6628 "clr %0" CR_TAB
6629 "rol %0");
6632 else if (CONSTANT_P (operands[2]))
6633 fatal_insn ("internal compiler error. Incorrect shift:", insn);
6635 out_shift_with_cnt ("lsr %0",
6636 insn, operands, len, 1);
6637 return "";
6640 /* 16-bit logic shift right ((unsigned short)x >> i) */
6642 const char *
6643 lshrhi3_out (rtx_insn *insn, rtx operands[], int *len)
6645 if (GET_CODE (operands[2]) == CONST_INT)
6647 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
6648 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
6649 int k;
6650 int *t = len;
6652 if (!len)
6653 len = &k;
6655 switch (INTVAL (operands[2]))
6657 default:
6658 if (INTVAL (operands[2]) < 16)
6659 break;
6661 *len = 2;
6662 return ("clr %B0" CR_TAB
6663 "clr %A0");
6665 case 4:
6666 if (optimize_size && scratch)
6667 break; /* 5 */
6668 if (ldi_ok)
6670 *len = 6;
6671 return ("swap %B0" CR_TAB
6672 "swap %A0" CR_TAB
6673 "andi %A0,0x0f" CR_TAB
6674 "eor %A0,%B0" CR_TAB
6675 "andi %B0,0x0f" CR_TAB
6676 "eor %A0,%B0");
6678 if (scratch)
6680 *len = 7;
6681 return ("swap %B0" CR_TAB
6682 "swap %A0" CR_TAB
6683 "ldi %3,0x0f" CR_TAB
6684 "and %A0,%3" CR_TAB
6685 "eor %A0,%B0" CR_TAB
6686 "and %B0,%3" CR_TAB
6687 "eor %A0,%B0");
6689 break; /* optimize_size ? 6 : 8 */
6691 case 5:
6692 if (optimize_size)
6693 break; /* scratch ? 5 : 6 */
6694 if (ldi_ok)
6696 *len = 8;
6697 return ("lsr %B0" CR_TAB
6698 "ror %A0" CR_TAB
6699 "swap %B0" CR_TAB
6700 "swap %A0" CR_TAB
6701 "andi %A0,0x0f" CR_TAB
6702 "eor %A0,%B0" CR_TAB
6703 "andi %B0,0x0f" CR_TAB
6704 "eor %A0,%B0");
6706 if (scratch)
6708 *len = 9;
6709 return ("lsr %B0" CR_TAB
6710 "ror %A0" CR_TAB
6711 "swap %B0" CR_TAB
6712 "swap %A0" CR_TAB
6713 "ldi %3,0x0f" CR_TAB
6714 "and %A0,%3" CR_TAB
6715 "eor %A0,%B0" CR_TAB
6716 "and %B0,%3" CR_TAB
6717 "eor %A0,%B0");
6719 break; /* 10 */
6721 case 6:
6722 if (optimize_size)
6723 break; /* scratch ? 5 : 6 */
6724 *len = 9;
6725 return ("clr __tmp_reg__" CR_TAB
6726 "lsl %A0" CR_TAB
6727 "rol %B0" CR_TAB
6728 "rol __tmp_reg__" CR_TAB
6729 "lsl %A0" CR_TAB
6730 "rol %B0" CR_TAB
6731 "rol __tmp_reg__" CR_TAB
6732 "mov %A0,%B0" CR_TAB
6733 "mov %B0,__tmp_reg__");
6735 case 7:
6736 *len = 5;
6737 return ("lsl %A0" CR_TAB
6738 "mov %A0,%B0" CR_TAB
6739 "rol %A0" CR_TAB
6740 "sbc %B0,%B0" CR_TAB
6741 "neg %B0");
6743 case 8:
6744 return *len = 2, ("mov %A0,%B1" CR_TAB
6745 "clr %B0");
6747 case 9:
6748 *len = 3;
6749 return ("mov %A0,%B0" CR_TAB
6750 "clr %B0" CR_TAB
6751 "lsr %A0");
6753 case 10:
6754 *len = 4;
6755 return ("mov %A0,%B0" CR_TAB
6756 "clr %B0" CR_TAB
6757 "lsr %A0" CR_TAB
6758 "lsr %A0");
6760 case 11:
6761 *len = 5;
6762 return ("mov %A0,%B0" CR_TAB
6763 "clr %B0" CR_TAB
6764 "lsr %A0" CR_TAB
6765 "lsr %A0" CR_TAB
6766 "lsr %A0");
6768 case 12:
6769 if (ldi_ok)
6771 *len = 4;
6772 return ("mov %A0,%B0" CR_TAB
6773 "clr %B0" CR_TAB
6774 "swap %A0" CR_TAB
6775 "andi %A0,0x0f");
6777 if (scratch)
6779 *len = 5;
6780 return ("mov %A0,%B0" CR_TAB
6781 "clr %B0" CR_TAB
6782 "swap %A0" CR_TAB
6783 "ldi %3,0x0f" CR_TAB
6784 "and %A0,%3");
6786 *len = 6;
6787 return ("mov %A0,%B0" CR_TAB
6788 "clr %B0" CR_TAB
6789 "lsr %A0" CR_TAB
6790 "lsr %A0" CR_TAB
6791 "lsr %A0" CR_TAB
6792 "lsr %A0");
6794 case 13:
6795 if (ldi_ok)
6797 *len = 5;
6798 return ("mov %A0,%B0" CR_TAB
6799 "clr %B0" CR_TAB
6800 "swap %A0" CR_TAB
6801 "lsr %A0" CR_TAB
6802 "andi %A0,0x07");
6804 if (AVR_HAVE_MUL && scratch)
6806 *len = 5;
6807 return ("ldi %3,0x08" CR_TAB
6808 "mul %B0,%3" CR_TAB
6809 "mov %A0,r1" CR_TAB
6810 "clr %B0" CR_TAB
6811 "clr __zero_reg__");
6813 if (optimize_size && scratch)
6814 break; /* 5 */
6815 if (scratch)
6817 *len = 6;
6818 return ("mov %A0,%B0" CR_TAB
6819 "clr %B0" CR_TAB
6820 "swap %A0" CR_TAB
6821 "lsr %A0" CR_TAB
6822 "ldi %3,0x07" CR_TAB
6823 "and %A0,%3");
6825 if (AVR_HAVE_MUL)
6827 *len = 6;
6828 return ("set" CR_TAB
6829 "bld r1,3" CR_TAB
6830 "mul %B0,r1" CR_TAB
6831 "mov %A0,r1" CR_TAB
6832 "clr %B0" CR_TAB
6833 "clr __zero_reg__");
6835 *len = 7;
6836 return ("mov %A0,%B0" CR_TAB
6837 "clr %B0" CR_TAB
6838 "lsr %A0" CR_TAB
6839 "lsr %A0" CR_TAB
6840 "lsr %A0" CR_TAB
6841 "lsr %A0" CR_TAB
6842 "lsr %A0");
6844 case 14:
6845 if (AVR_HAVE_MUL && ldi_ok)
6847 *len = 5;
6848 return ("ldi %A0,0x04" CR_TAB
6849 "mul %B0,%A0" CR_TAB
6850 "mov %A0,r1" CR_TAB
6851 "clr %B0" CR_TAB
6852 "clr __zero_reg__");
6854 if (AVR_HAVE_MUL && scratch)
6856 *len = 5;
6857 return ("ldi %3,0x04" CR_TAB
6858 "mul %B0,%3" CR_TAB
6859 "mov %A0,r1" CR_TAB
6860 "clr %B0" CR_TAB
6861 "clr __zero_reg__");
6863 if (optimize_size && ldi_ok)
6865 *len = 5;
6866 return ("mov %A0,%B0" CR_TAB
6867 "ldi %B0,6" "\n1:\t"
6868 "lsr %A0" CR_TAB
6869 "dec %B0" CR_TAB
6870 "brne 1b");
6872 if (optimize_size && scratch)
6873 break; /* 5 */
6874 *len = 6;
6875 return ("clr %A0" CR_TAB
6876 "lsl %B0" CR_TAB
6877 "rol %A0" CR_TAB
6878 "lsl %B0" CR_TAB
6879 "rol %A0" CR_TAB
6880 "clr %B0");
6882 case 15:
6883 *len = 4;
6884 return ("clr %A0" CR_TAB
6885 "lsl %B0" CR_TAB
6886 "rol %A0" CR_TAB
6887 "clr %B0");
6889 len = t;
6891 out_shift_with_cnt ("lsr %B0" CR_TAB
6892 "ror %A0", insn, operands, len, 2);
6893 return "";
6897 /* 24-bit logic shift right */
6899 const char*
6900 avr_out_lshrpsi3 (rtx_insn *insn, rtx *op, int *plen)
6902 int dest = REGNO (op[0]);
6903 int src = REGNO (op[1]);
6905 if (CONST_INT_P (op[2]))
6907 if (plen)
6908 *plen = 0;
6910 switch (INTVAL (op[2]))
6912 case 8:
6913 if (dest <= src)
6914 return avr_asm_len ("mov %A0,%B1" CR_TAB
6915 "mov %B0,%C1" CR_TAB
6916 "clr %C0", op, plen, 3);
6917 else
6918 return avr_asm_len ("clr %C0" CR_TAB
6919 "mov %B0,%C1" CR_TAB
6920 "mov %A0,%B1", op, plen, 3);
6922 case 16:
6923 if (dest != src + 2)
6924 avr_asm_len ("mov %A0,%C1", op, plen, 1);
6926 return avr_asm_len ("clr %B0" CR_TAB
6927 "clr %C0", op, plen, 2);
6929 default:
6930 if (INTVAL (op[2]) < 24)
6931 break;
6933 /* fall through */
6935 case 23:
6936 return avr_asm_len ("clr %A0" CR_TAB
6937 "sbrc %C0,7" CR_TAB
6938 "inc %A0" CR_TAB
6939 "clr %B0" CR_TAB
6940 "clr %C0", op, plen, 5);
6941 } /* switch */
6944 out_shift_with_cnt ("lsr %C0" CR_TAB
6945 "ror %B0" CR_TAB
6946 "ror %A0", insn, op, plen, 3);
6947 return "";
6951 /* 32-bit logic shift right ((unsigned int)x >> i) */
6953 const char *
6954 lshrsi3_out (rtx_insn *insn, rtx operands[], int *len)
6956 if (GET_CODE (operands[2]) == CONST_INT)
6958 int k;
6959 int *t = len;
6961 if (!len)
6962 len = &k;
6964 switch (INTVAL (operands[2]))
6966 default:
6967 if (INTVAL (operands[2]) < 32)
6968 break;
6970 if (AVR_HAVE_MOVW)
6971 return *len = 3, ("clr %D0" CR_TAB
6972 "clr %C0" CR_TAB
6973 "movw %A0,%C0");
6974 *len = 4;
6975 return ("clr %D0" CR_TAB
6976 "clr %C0" CR_TAB
6977 "clr %B0" CR_TAB
6978 "clr %A0");
6980 case 8:
6982 int reg0 = true_regnum (operands[0]);
6983 int reg1 = true_regnum (operands[1]);
6984 *len = 4;
6985 if (reg0 <= reg1)
6986 return ("mov %A0,%B1" CR_TAB
6987 "mov %B0,%C1" CR_TAB
6988 "mov %C0,%D1" CR_TAB
6989 "clr %D0");
6990 else
6991 return ("clr %D0" CR_TAB
6992 "mov %C0,%D1" CR_TAB
6993 "mov %B0,%C1" CR_TAB
6994 "mov %A0,%B1");
6997 case 16:
6999 int reg0 = true_regnum (operands[0]);
7000 int reg1 = true_regnum (operands[1]);
7002 if (reg0 == reg1 + 2)
7003 return *len = 2, ("clr %C0" CR_TAB
7004 "clr %D0");
7005 if (AVR_HAVE_MOVW)
7006 return *len = 3, ("movw %A0,%C1" CR_TAB
7007 "clr %C0" CR_TAB
7008 "clr %D0");
7009 else
7010 return *len = 4, ("mov %B0,%D1" CR_TAB
7011 "mov %A0,%C1" CR_TAB
7012 "clr %C0" CR_TAB
7013 "clr %D0");
7016 case 24:
7017 return *len = 4, ("mov %A0,%D1" CR_TAB
7018 "clr %B0" CR_TAB
7019 "clr %C0" CR_TAB
7020 "clr %D0");
7022 case 31:
7023 *len = 6;
7024 return ("clr %A0" CR_TAB
7025 "sbrc %D0,7" CR_TAB
7026 "inc %A0" CR_TAB
7027 "clr %B0" CR_TAB
7028 "clr %C0" CR_TAB
7029 "clr %D0");
7031 len = t;
7033 out_shift_with_cnt ("lsr %D0" CR_TAB
7034 "ror %C0" CR_TAB
7035 "ror %B0" CR_TAB
7036 "ror %A0", insn, operands, len, 4);
7037 return "";
7041 /* Output addition of register XOP[0] and compile time constant XOP[2].
7042 CODE == PLUS: perform addition by using ADD instructions or
7043 CODE == MINUS: perform addition by using SUB instructions:
7045 XOP[0] = XOP[0] + XOP[2]
7047 Or perform addition/subtraction with register XOP[2] depending on CODE:
7049 XOP[0] = XOP[0] +/- XOP[2]
7051 If PLEN == NULL, print assembler instructions to perform the operation;
7052 otherwise, set *PLEN to the length of the instruction sequence (in words)
7053 printed with PLEN == NULL. XOP[3] is an 8-bit scratch register or NULL_RTX.
7054 Set *PCC to effect on cc0 according to respective CC_* insn attribute.
7056 CODE_SAT == UNKNOWN: Perform ordinary, non-saturating operation.
7057 CODE_SAT != UNKNOWN: Perform operation and saturate according to CODE_SAT.
7058 If CODE_SAT != UNKNOWN then SIGN contains the sign of the summand resp.
7059 the subtrahend in the original insn, provided it is a compile time constant.
7060 In all other cases, SIGN is 0.
7062 If OUT_LABEL is true, print the final 0: label which is needed for
7063 saturated addition / subtraction. The only case where OUT_LABEL = false
7064 is useful is for saturated addition / subtraction performed during
7065 fixed-point rounding, cf. `avr_out_round'. */
7067 static void
7068 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc,
7069 enum rtx_code code_sat, int sign, bool out_label)
7071 /* MODE of the operation. */
7072 machine_mode mode = GET_MODE (xop[0]);
7074 /* INT_MODE of the same size. */
7075 machine_mode imode = int_mode_for_mode (mode);
7077 /* Number of bytes to operate on. */
7078 int i, n_bytes = GET_MODE_SIZE (mode);
7080 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
7081 int clobber_val = -1;
7083 /* op[0]: 8-bit destination register
7084 op[1]: 8-bit const int
7085 op[2]: 8-bit scratch register */
7086 rtx op[3];
7088 /* Started the operation? Before starting the operation we may skip
7089 adding 0. This is no more true after the operation started because
7090 carry must be taken into account. */
7091 bool started = false;
7093 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
7094 rtx xval = xop[2];
7096 /* Output a BRVC instruction. Only needed with saturation. */
7097 bool out_brvc = true;
7099 if (plen)
7100 *plen = 0;
7102 if (REG_P (xop[2]))
7104 *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_CLOBBER;
7106 for (i = 0; i < n_bytes; i++)
7108 /* We operate byte-wise on the destination. */
7109 op[0] = simplify_gen_subreg (QImode, xop[0], mode, i);
7110 op[1] = simplify_gen_subreg (QImode, xop[2], mode, i);
7112 if (i == 0)
7113 avr_asm_len (code == PLUS ? "add %0,%1" : "sub %0,%1",
7114 op, plen, 1);
7115 else
7116 avr_asm_len (code == PLUS ? "adc %0,%1" : "sbc %0,%1",
7117 op, plen, 1);
7120 if (reg_overlap_mentioned_p (xop[0], xop[2]))
7122 gcc_assert (REGNO (xop[0]) == REGNO (xop[2]));
7124 if (MINUS == code)
7125 return;
7128 goto saturate;
7131 /* Except in the case of ADIW with 16-bit register (see below)
7132 addition does not set cc0 in a usable way. */
7134 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
7136 if (CONST_FIXED_P (xval))
7137 xval = avr_to_int_mode (xval);
7139 /* Adding/Subtracting zero is a no-op. */
7141 if (xval == const0_rtx)
7143 *pcc = CC_NONE;
7144 return;
7147 if (MINUS == code)
7148 xval = simplify_unary_operation (NEG, imode, xval, imode);
7150 op[2] = xop[3];
7152 if (SS_PLUS == code_sat && MINUS == code
7153 && sign < 0
7154 && 0x80 == (INTVAL (simplify_gen_subreg (QImode, xval, imode, n_bytes-1))
7155 & GET_MODE_MASK (QImode)))
7157 /* We compute x + 0x80 by means of SUB instructions. We negated the
7158 constant subtrahend above and are left with x - (-128) so that we
7159 need something like SUBI r,128 which does not exist because SUBI sets
7160 V according to the sign of the subtrahend. Notice the only case
7161 where this must be done is when NEG overflowed in case [2s] because
7162 the V computation needs the right sign of the subtrahend. */
7164 rtx msb = simplify_gen_subreg (QImode, xop[0], mode, n_bytes-1);
7166 avr_asm_len ("subi %0,128" CR_TAB
7167 "brmi 0f", &msb, plen, 2);
7168 out_brvc = false;
7170 goto saturate;
7173 for (i = 0; i < n_bytes; i++)
7175 /* We operate byte-wise on the destination. */
7176 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
7177 rtx xval8 = simplify_gen_subreg (QImode, xval, imode, i);
7179 /* 8-bit value to operate with this byte. */
7180 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
7182 /* Registers R16..R31 can operate with immediate. */
7183 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
7185 op[0] = reg8;
7186 op[1] = gen_int_mode (val8, QImode);
7188 /* To get usable cc0 no low-bytes must have been skipped. */
7190 if (i && !started)
7191 *pcc = CC_CLOBBER;
7193 if (!started
7194 && i % 2 == 0
7195 && i + 2 <= n_bytes
7196 && test_hard_reg_class (ADDW_REGS, reg8))
7198 rtx xval16 = simplify_gen_subreg (HImode, xval, imode, i);
7199 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
7201 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
7202 i.e. operate word-wise. */
7204 if (val16 < 64)
7206 if (val16 != 0)
7208 started = true;
7209 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
7210 op, plen, 1);
7212 if (n_bytes == 2 && PLUS == code)
7213 *pcc = CC_SET_CZN;
7216 i++;
7217 continue;
7221 if (val8 == 0)
7223 if (started)
7224 avr_asm_len (code == PLUS
7225 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
7226 op, plen, 1);
7227 continue;
7229 else if ((val8 == 1 || val8 == 0xff)
7230 && UNKNOWN == code_sat
7231 && !started
7232 && i == n_bytes - 1)
7234 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
7235 op, plen, 1);
7236 *pcc = CC_CLOBBER;
7237 break;
7240 switch (code)
7242 case PLUS:
7244 gcc_assert (plen != NULL || (op[2] && REG_P (op[2])));
7246 if (plen != NULL && UNKNOWN != code_sat)
7248 /* This belongs to the x + 0x80 corner case. The code with
7249 ADD instruction is not smaller, thus make this case
7250 expensive so that the caller won't pick it. */
7252 *plen += 10;
7253 break;
7256 if (clobber_val != (int) val8)
7257 avr_asm_len ("ldi %2,%1", op, plen, 1);
7258 clobber_val = (int) val8;
7260 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
7262 break; /* PLUS */
7264 case MINUS:
7266 if (ld_reg_p)
7267 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
7268 else
7270 gcc_assert (plen != NULL || REG_P (op[2]));
7272 if (clobber_val != (int) val8)
7273 avr_asm_len ("ldi %2,%1", op, plen, 1);
7274 clobber_val = (int) val8;
7276 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
7279 break; /* MINUS */
7281 default:
7282 /* Unknown code */
7283 gcc_unreachable();
7286 started = true;
7288 } /* for all sub-bytes */
7290 saturate:
7292 if (UNKNOWN == code_sat)
7293 return;
7295 *pcc = (int) CC_CLOBBER;
7297 /* Vanilla addition/subtraction is done. We are left with saturation.
7299 We have to compute A = A <op> B where A is a register and
7300 B is a register or a non-zero compile time constant CONST.
7301 A is register class "r" if unsigned && B is REG. Otherwise, A is in "d".
7302 B stands for the original operand $2 in INSN. In the case of B = CONST,
7303 SIGN in { -1, 1 } is the sign of B. Otherwise, SIGN is 0.
7305 CODE is the instruction flavor we use in the asm sequence to perform <op>.
7308 unsigned
7309 operation | code | sat if | b is | sat value | case
7310 -----------------+-------+----------+--------------+-----------+-------
7311 + as a + b | add | C == 1 | const, reg | u+ = 0xff | [1u]
7312 + as a - (-b) | sub | C == 0 | const | u+ = 0xff | [2u]
7313 - as a - b | sub | C == 1 | const, reg | u- = 0 | [3u]
7314 - as a + (-b) | add | C == 0 | const | u- = 0 | [4u]
7317 signed
7318 operation | code | sat if | b is | sat value | case
7319 -----------------+-------+----------+--------------+-----------+-------
7320 + as a + b | add | V == 1 | const, reg | s+ | [1s]
7321 + as a - (-b) | sub | V == 1 | const | s+ | [2s]
7322 - as a - b | sub | V == 1 | const, reg | s- | [3s]
7323 - as a + (-b) | add | V == 1 | const | s- | [4s]
7325 s+ = b < 0 ? -0x80 : 0x7f
7326 s- = b < 0 ? 0x7f : -0x80
7328 The cases a - b actually perform a - (-(-b)) if B is CONST.
7331 op[0] = simplify_gen_subreg (QImode, xop[0], mode, n_bytes-1);
7332 op[1] = n_bytes > 1
7333 ? simplify_gen_subreg (QImode, xop[0], mode, n_bytes-2)
7334 : NULL_RTX;
7336 bool need_copy = true;
7337 int len_call = 1 + AVR_HAVE_JMP_CALL;
7339 switch (code_sat)
7341 default:
7342 gcc_unreachable();
7344 case SS_PLUS:
7345 case SS_MINUS:
7347 if (out_brvc)
7348 avr_asm_len ("brvc 0f", op, plen, 1);
7350 if (reg_overlap_mentioned_p (xop[0], xop[2]))
7352 /* [1s,reg] */
7354 if (n_bytes == 1)
7355 avr_asm_len ("ldi %0,0x7f" CR_TAB
7356 "adc %0,__zero_reg__", op, plen, 2);
7357 else
7358 avr_asm_len ("ldi %0,0x7f" CR_TAB
7359 "ldi %1,0xff" CR_TAB
7360 "adc %1,__zero_reg__" CR_TAB
7361 "adc %0,__zero_reg__", op, plen, 4);
7363 else if (sign == 0 && PLUS == code)
7365 /* [1s,reg] */
7367 op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
7369 if (n_bytes == 1)
7370 avr_asm_len ("ldi %0,0x80" CR_TAB
7371 "sbrs %2,7" CR_TAB
7372 "dec %0", op, plen, 3);
7373 else
7374 avr_asm_len ("ldi %0,0x80" CR_TAB
7375 "cp %2,%0" CR_TAB
7376 "sbc %1,%1" CR_TAB
7377 "sbci %0,0", op, plen, 4);
7379 else if (sign == 0 && MINUS == code)
7381 /* [3s,reg] */
7383 op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
7385 if (n_bytes == 1)
7386 avr_asm_len ("ldi %0,0x7f" CR_TAB
7387 "sbrs %2,7" CR_TAB
7388 "inc %0", op, plen, 3);
7389 else
7390 avr_asm_len ("ldi %0,0x7f" CR_TAB
7391 "cp %0,%2" CR_TAB
7392 "sbc %1,%1" CR_TAB
7393 "sbci %0,-1", op, plen, 4);
7395 else if ((sign < 0) ^ (SS_MINUS == code_sat))
7397 /* [1s,const,B < 0] [2s,B < 0] */
7398 /* [3s,const,B > 0] [4s,B > 0] */
7400 if (n_bytes == 8)
7402 avr_asm_len ("%~call __clr_8", op, plen, len_call);
7403 need_copy = false;
7406 avr_asm_len ("ldi %0,0x80", op, plen, 1);
7407 if (n_bytes > 1 && need_copy)
7408 avr_asm_len ("clr %1", op, plen, 1);
7410 else if ((sign > 0) ^ (SS_MINUS == code_sat))
7412 /* [1s,const,B > 0] [2s,B > 0] */
7413 /* [3s,const,B < 0] [4s,B < 0] */
7415 if (n_bytes == 8)
7417 avr_asm_len ("sec" CR_TAB
7418 "%~call __sbc_8", op, plen, 1 + len_call);
7419 need_copy = false;
7422 avr_asm_len ("ldi %0,0x7f", op, plen, 1);
7423 if (n_bytes > 1 && need_copy)
7424 avr_asm_len ("ldi %1,0xff", op, plen, 1);
7426 else
7427 gcc_unreachable();
7429 break;
7431 case US_PLUS:
7432 /* [1u] : [2u] */
7434 avr_asm_len (PLUS == code ? "brcc 0f" : "brcs 0f", op, plen, 1);
7436 if (n_bytes == 8)
7438 if (MINUS == code)
7439 avr_asm_len ("sec", op, plen, 1);
7440 avr_asm_len ("%~call __sbc_8", op, plen, len_call);
7442 need_copy = false;
7444 else
7446 if (MINUS == code && !test_hard_reg_class (LD_REGS, op[0]))
7447 avr_asm_len ("sec" CR_TAB
7448 "sbc %0,%0", op, plen, 2);
7449 else
7450 avr_asm_len (PLUS == code ? "sbc %0,%0" : "ldi %0,0xff",
7451 op, plen, 1);
7453 break; /* US_PLUS */
7455 case US_MINUS:
7456 /* [4u] : [3u] */
7458 avr_asm_len (PLUS == code ? "brcs 0f" : "brcc 0f", op, plen, 1);
7460 if (n_bytes == 8)
7462 avr_asm_len ("%~call __clr_8", op, plen, len_call);
7463 need_copy = false;
7465 else
7466 avr_asm_len ("clr %0", op, plen, 1);
7468 break;
7471 /* We set the MSB in the unsigned case and the 2 MSBs in the signed case.
7472 Now copy the right value to the LSBs. */
7474 if (need_copy && n_bytes > 1)
7476 if (US_MINUS == code_sat || US_PLUS == code_sat)
7478 avr_asm_len ("mov %1,%0", op, plen, 1);
7480 if (n_bytes > 2)
7482 op[0] = xop[0];
7483 if (AVR_HAVE_MOVW)
7484 avr_asm_len ("movw %0,%1", op, plen, 1);
7485 else
7486 avr_asm_len ("mov %A0,%1" CR_TAB
7487 "mov %B0,%1", op, plen, 2);
7490 else if (n_bytes > 2)
7492 op[0] = xop[0];
7493 avr_asm_len ("mov %A0,%1" CR_TAB
7494 "mov %B0,%1", op, plen, 2);
7498 if (need_copy && n_bytes == 8)
7500 if (AVR_HAVE_MOVW)
7501 avr_asm_len ("movw %r0+2,%0" CR_TAB
7502 "movw %r0+4,%0", xop, plen, 2);
7503 else
7504 avr_asm_len ("mov %r0+2,%0" CR_TAB
7505 "mov %r0+3,%0" CR_TAB
7506 "mov %r0+4,%0" CR_TAB
7507 "mov %r0+5,%0", xop, plen, 4);
7510 if (out_label)
7511 avr_asm_len ("0:", op, plen, 0);
7515 /* Output addition/subtraction of register XOP[0] and a constant XOP[2] that
7516 is ont a compile-time constant:
7518 XOP[0] = XOP[0] +/- XOP[2]
7520 This is a helper for the function below. The only insns that need this
7521 are additions/subtraction for pointer modes, i.e. HImode and PSImode. */
7523 static const char*
7524 avr_out_plus_symbol (rtx *xop, enum rtx_code code, int *plen, int *pcc)
7526 machine_mode mode = GET_MODE (xop[0]);
7528 /* Only pointer modes want to add symbols. */
7530 gcc_assert (mode == HImode || mode == PSImode);
7532 *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_SET_N;
7534 avr_asm_len (PLUS == code
7535 ? "subi %A0,lo8(-(%2))" CR_TAB "sbci %B0,hi8(-(%2))"
7536 : "subi %A0,lo8(%2)" CR_TAB "sbci %B0,hi8(%2)",
7537 xop, plen, -2);
7539 if (PSImode == mode)
7540 avr_asm_len (PLUS == code
7541 ? "sbci %C0,hlo8(-(%2))"
7542 : "sbci %C0,hlo8(%2)", xop, plen, 1);
7543 return "";
7547 /* Prepare operands of addition/subtraction to be used with avr_out_plus_1.
7549 INSN is a single_set insn or an insn pattern with a binary operation as
7550 SET_SRC that is one of: PLUS, SS_PLUS, US_PLUS, MINUS, SS_MINUS, US_MINUS.
7552 XOP are the operands of INSN. In the case of 64-bit operations with
7553 constant XOP[] has just one element: The summand/subtrahend in XOP[0].
7554 The non-saturating insns up to 32 bits may or may not supply a "d" class
7555 scratch as XOP[3].
7557 If PLEN == NULL output the instructions.
7558 If PLEN != NULL set *PLEN to the length of the sequence in words.
7560 PCC is a pointer to store the instructions' effect on cc0.
7561 PCC may be NULL.
7563 PLEN and PCC default to NULL.
7565 OUT_LABEL defaults to TRUE. For a description, see AVR_OUT_PLUS_1.
7567 Return "" */
7569 const char*
7570 avr_out_plus (rtx insn, rtx *xop, int *plen, int *pcc, bool out_label)
7572 int cc_plus, cc_minus, cc_dummy;
7573 int len_plus, len_minus;
7574 rtx op[4];
7575 rtx xpattern = INSN_P (insn) ? single_set (as_a <rtx_insn *> (insn)) : insn;
7576 rtx xdest = SET_DEST (xpattern);
7577 machine_mode mode = GET_MODE (xdest);
7578 machine_mode imode = int_mode_for_mode (mode);
7579 int n_bytes = GET_MODE_SIZE (mode);
7580 enum rtx_code code_sat = GET_CODE (SET_SRC (xpattern));
7581 enum rtx_code code
7582 = (PLUS == code_sat || SS_PLUS == code_sat || US_PLUS == code_sat
7583 ? PLUS : MINUS);
7585 if (!pcc)
7586 pcc = &cc_dummy;
7588 /* PLUS and MINUS don't saturate: Use modular wrap-around. */
7590 if (PLUS == code_sat || MINUS == code_sat)
7591 code_sat = UNKNOWN;
7593 if (n_bytes <= 4 && REG_P (xop[2]))
7595 avr_out_plus_1 (xop, plen, code, pcc, code_sat, 0, out_label);
7596 return "";
7599 if (8 == n_bytes)
7601 op[0] = gen_rtx_REG (DImode, ACC_A);
7602 op[1] = gen_rtx_REG (DImode, ACC_A);
7603 op[2] = avr_to_int_mode (xop[0]);
7605 else
7607 if (!REG_P (xop[2])
7608 && !CONST_INT_P (xop[2])
7609 && !CONST_FIXED_P (xop[2]))
7611 return avr_out_plus_symbol (xop, code, plen, pcc);
7614 op[0] = avr_to_int_mode (xop[0]);
7615 op[1] = avr_to_int_mode (xop[1]);
7616 op[2] = avr_to_int_mode (xop[2]);
7619 /* Saturations and 64-bit operations don't have a clobber operand.
7620 For the other cases, the caller will provide a proper XOP[3]. */
7622 xpattern = INSN_P (insn) ? PATTERN (insn) : insn;
7623 op[3] = PARALLEL == GET_CODE (xpattern) ? xop[3] : NULL_RTX;
7625 /* Saturation will need the sign of the original operand. */
7627 rtx xmsb = simplify_gen_subreg (QImode, op[2], imode, n_bytes-1);
7628 int sign = INTVAL (xmsb) < 0 ? -1 : 1;
7630 /* If we subtract and the subtrahend is a constant, then negate it
7631 so that avr_out_plus_1 can be used. */
7633 if (MINUS == code)
7634 op[2] = simplify_unary_operation (NEG, imode, op[2], imode);
7636 /* Work out the shortest sequence. */
7638 avr_out_plus_1 (op, &len_minus, MINUS, &cc_minus, code_sat, sign, out_label);
7639 avr_out_plus_1 (op, &len_plus, PLUS, &cc_plus, code_sat, sign, out_label);
7641 if (plen)
7643 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
7644 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
7646 else if (len_minus <= len_plus)
7647 avr_out_plus_1 (op, NULL, MINUS, pcc, code_sat, sign, out_label);
7648 else
7649 avr_out_plus_1 (op, NULL, PLUS, pcc, code_sat, sign, out_label);
7651 return "";
7655 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
7656 time constant XOP[2]:
7658 XOP[0] = XOP[0] <op> XOP[2]
7660 and return "". If PLEN == NULL, print assembler instructions to perform the
7661 operation; otherwise, set *PLEN to the length of the instruction sequence
7662 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
7663 register or SCRATCH if no clobber register is needed for the operation.
7664 INSN is an INSN_P or a pattern of an insn. */
7666 const char*
7667 avr_out_bitop (rtx insn, rtx *xop, int *plen)
7669 /* CODE and MODE of the operation. */
7670 rtx xpattern = INSN_P (insn) ? single_set (as_a <rtx_insn *> (insn)) : insn;
7671 enum rtx_code code = GET_CODE (SET_SRC (xpattern));
7672 machine_mode mode = GET_MODE (xop[0]);
7674 /* Number of bytes to operate on. */
7675 int i, n_bytes = GET_MODE_SIZE (mode);
7677 /* Value of T-flag (0 or 1) or -1 if unknow. */
7678 int set_t = -1;
7680 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
7681 int clobber_val = -1;
7683 /* op[0]: 8-bit destination register
7684 op[1]: 8-bit const int
7685 op[2]: 8-bit clobber register or SCRATCH
7686 op[3]: 8-bit register containing 0xff or NULL_RTX */
7687 rtx op[4];
7689 op[2] = xop[3];
7690 op[3] = NULL_RTX;
7692 if (plen)
7693 *plen = 0;
7695 for (i = 0; i < n_bytes; i++)
7697 /* We operate byte-wise on the destination. */
7698 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
7699 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
7701 /* 8-bit value to operate with this byte. */
7702 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
7704 /* Number of bits set in the current byte of the constant. */
7705 int pop8 = avr_popcount (val8);
7707 /* Registers R16..R31 can operate with immediate. */
7708 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
7710 op[0] = reg8;
7711 op[1] = GEN_INT (val8);
7713 switch (code)
7715 case IOR:
7717 if (0 == pop8)
7718 continue;
7719 else if (ld_reg_p)
7720 avr_asm_len ("ori %0,%1", op, plen, 1);
7721 else if (1 == pop8)
7723 if (set_t != 1)
7724 avr_asm_len ("set", op, plen, 1);
7725 set_t = 1;
7727 op[1] = GEN_INT (exact_log2 (val8));
7728 avr_asm_len ("bld %0,%1", op, plen, 1);
7730 else if (8 == pop8)
7732 if (op[3] != NULL_RTX)
7733 avr_asm_len ("mov %0,%3", op, plen, 1);
7734 else
7735 avr_asm_len ("clr %0" CR_TAB
7736 "dec %0", op, plen, 2);
7738 op[3] = op[0];
7740 else
7742 if (clobber_val != (int) val8)
7743 avr_asm_len ("ldi %2,%1", op, plen, 1);
7744 clobber_val = (int) val8;
7746 avr_asm_len ("or %0,%2", op, plen, 1);
7749 continue; /* IOR */
7751 case AND:
7753 if (8 == pop8)
7754 continue;
7755 else if (0 == pop8)
7756 avr_asm_len ("clr %0", op, plen, 1);
7757 else if (ld_reg_p)
7758 avr_asm_len ("andi %0,%1", op, plen, 1);
7759 else if (7 == pop8)
7761 if (set_t != 0)
7762 avr_asm_len ("clt", op, plen, 1);
7763 set_t = 0;
7765 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
7766 avr_asm_len ("bld %0,%1", op, plen, 1);
7768 else
7770 if (clobber_val != (int) val8)
7771 avr_asm_len ("ldi %2,%1", op, plen, 1);
7772 clobber_val = (int) val8;
7774 avr_asm_len ("and %0,%2", op, plen, 1);
7777 continue; /* AND */
7779 case XOR:
7781 if (0 == pop8)
7782 continue;
7783 else if (8 == pop8)
7784 avr_asm_len ("com %0", op, plen, 1);
7785 else if (ld_reg_p && val8 == (1 << 7))
7786 avr_asm_len ("subi %0,%1", op, plen, 1);
7787 else
7789 if (clobber_val != (int) val8)
7790 avr_asm_len ("ldi %2,%1", op, plen, 1);
7791 clobber_val = (int) val8;
7793 avr_asm_len ("eor %0,%2", op, plen, 1);
7796 continue; /* XOR */
7798 default:
7799 /* Unknown rtx_code */
7800 gcc_unreachable();
7802 } /* for all sub-bytes */
7804 return "";
7808 /* Output sign extension from XOP[1] to XOP[0] and return "".
7809 If PLEN == NULL, print assembler instructions to perform the operation;
7810 otherwise, set *PLEN to the length of the instruction sequence (in words)
7811 as printed with PLEN == NULL. */
7813 const char*
7814 avr_out_sign_extend (rtx_insn *insn, rtx *xop, int *plen)
7816 // Size in bytes of source resp. destination operand.
7817 unsigned n_src = GET_MODE_SIZE (GET_MODE (xop[1]));
7818 unsigned n_dest = GET_MODE_SIZE (GET_MODE (xop[0]));
7819 rtx r_msb = all_regs_rtx[REGNO (xop[1]) + n_src - 1];
7821 if (plen)
7822 *plen = 0;
7824 // Copy destination to source
7826 if (REGNO (xop[0]) != REGNO (xop[1]))
7828 gcc_assert (n_src <= 2);
7830 if (n_src == 2)
7831 avr_asm_len (AVR_HAVE_MOVW
7832 ? "movw %0,%1"
7833 : "mov %B0,%B1", xop, plen, 1);
7834 if (n_src == 1 || !AVR_HAVE_MOVW)
7835 avr_asm_len ("mov %A0,%A1", xop, plen, 1);
7838 // Set Carry to the sign bit MSB.7...
7840 if (REGNO (xop[0]) == REGNO (xop[1])
7841 || !reg_unused_after (insn, r_msb))
7843 avr_asm_len ("mov __tmp_reg__,%0", &r_msb, plen, 1);
7844 r_msb = tmp_reg_rtx;
7847 avr_asm_len ("lsl %0", &r_msb, plen, 1);
7849 // ...and propagate it to all the new sign bits
7851 for (unsigned n = n_src; n < n_dest; n++)
7852 avr_asm_len ("sbc %0,%0", &all_regs_rtx[REGNO (xop[0]) + n], plen, 1);
7854 return "";
7858 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
7859 PLEN != NULL: Set *PLEN to the length of that sequence.
7860 Return "". */
7862 const char*
7863 avr_out_addto_sp (rtx *op, int *plen)
7865 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
7866 int addend = INTVAL (op[0]);
7868 if (plen)
7869 *plen = 0;
7871 if (addend < 0)
7873 if (flag_verbose_asm || flag_print_asm_name)
7874 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
7876 while (addend <= -pc_len)
7878 addend += pc_len;
7879 avr_asm_len ("rcall .", op, plen, 1);
7882 while (addend++ < 0)
7883 avr_asm_len ("push __zero_reg__", op, plen, 1);
7885 else if (addend > 0)
7887 if (flag_verbose_asm || flag_print_asm_name)
7888 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
7890 while (addend-- > 0)
7891 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
7894 return "";
7898 /* Outputs instructions needed for fixed point type conversion.
7899 This includes converting between any fixed point type, as well
7900 as converting to any integer type. Conversion between integer
7901 types is not supported.
7903 Converting signed fractional types requires a bit shift if converting
7904 to or from any unsigned fractional type because the decimal place is
7905 shifted by 1 bit. When the destination is a signed fractional, the sign
7906 is stored in either the carry or T bit. */
7908 const char*
7909 avr_out_fract (rtx_insn *insn, rtx operands[], bool intsigned, int *plen)
7911 size_t i;
7912 rtx xop[6];
7913 RTX_CODE shift = UNKNOWN;
7914 bool sign_in_carry = false;
7915 bool msb_in_carry = false;
7916 bool lsb_in_tmp_reg = false;
7917 bool lsb_in_carry = false;
7918 bool frac_rounded = false;
7919 const char *code_ashift = "lsl %0";
7922 #define MAY_CLOBBER(RR) \
7923 /* Shorthand used below. */ \
7924 ((sign_bytes \
7925 && IN_RANGE (RR, dest.regno_msb - sign_bytes + 1, dest.regno_msb)) \
7926 || (offset && IN_RANGE (RR, dest.regno, dest.regno_msb)) \
7927 || (reg_unused_after (insn, all_regs_rtx[RR]) \
7928 && !IN_RANGE (RR, dest.regno, dest.regno_msb)))
7930 struct
7932 /* bytes : Length of operand in bytes.
7933 ibyte : Length of integral part in bytes.
7934 fbyte, fbit : Length of fractional part in bytes, bits. */
7936 bool sbit;
7937 unsigned fbit, bytes, ibyte, fbyte;
7938 unsigned regno, regno_msb;
7939 } dest, src, *val[2] = { &dest, &src };
7941 if (plen)
7942 *plen = 0;
7944 /* Step 0: Determine information on source and destination operand we
7945 ====== will need in the remainder. */
7947 for (i = 0; i < sizeof (val) / sizeof (*val); i++)
7949 machine_mode mode;
7951 xop[i] = operands[i];
7953 mode = GET_MODE (xop[i]);
7955 val[i]->bytes = GET_MODE_SIZE (mode);
7956 val[i]->regno = REGNO (xop[i]);
7957 val[i]->regno_msb = REGNO (xop[i]) + val[i]->bytes - 1;
7959 if (SCALAR_INT_MODE_P (mode))
7961 val[i]->sbit = intsigned;
7962 val[i]->fbit = 0;
7964 else if (ALL_SCALAR_FIXED_POINT_MODE_P (mode))
7966 val[i]->sbit = SIGNED_SCALAR_FIXED_POINT_MODE_P (mode);
7967 val[i]->fbit = GET_MODE_FBIT (mode);
7969 else
7970 fatal_insn ("unsupported fixed-point conversion", insn);
7972 val[i]->fbyte = (1 + val[i]->fbit) / BITS_PER_UNIT;
7973 val[i]->ibyte = val[i]->bytes - val[i]->fbyte;
7976 // Byte offset of the decimal point taking into account different place
7977 // of the decimal point in input and output and different register numbers
7978 // of input and output.
7979 int offset = dest.regno - src.regno + dest.fbyte - src.fbyte;
7981 // Number of destination bytes that will come from sign / zero extension.
7982 int sign_bytes = (dest.ibyte - src.ibyte) * (dest.ibyte > src.ibyte);
7984 // Number of bytes at the low end to be filled with zeros.
7985 int zero_bytes = (dest.fbyte - src.fbyte) * (dest.fbyte > src.fbyte);
7987 // Do we have a 16-Bit register that is cleared?
7988 rtx clrw = NULL_RTX;
7990 bool sign_extend = src.sbit && sign_bytes;
7992 if (0 == dest.fbit % 8 && 7 == src.fbit % 8)
7993 shift = ASHIFT;
7994 else if (7 == dest.fbit % 8 && 0 == src.fbit % 8)
7995 shift = ASHIFTRT;
7996 else if (dest.fbit % 8 == src.fbit % 8)
7997 shift = UNKNOWN;
7998 else
7999 gcc_unreachable();
8001 /* If we need to round the fraction part, we might need to save/round it
8002 before clobbering any of it in Step 1. Also, we might want to do
8003 the rounding now to make use of LD_REGS. */
8004 if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
8005 && SCALAR_ACCUM_MODE_P (GET_MODE (xop[1]))
8006 && !TARGET_FRACT_CONV_TRUNC)
8008 bool overlap
8009 = (src.regno <=
8010 (offset ? dest.regno_msb - sign_bytes : dest.regno + zero_bytes - 1)
8011 && dest.regno - offset -1 >= dest.regno);
8012 unsigned s0 = dest.regno - offset -1;
8013 bool use_src = true;
8014 unsigned sn;
8015 unsigned copied_msb = src.regno_msb;
8016 bool have_carry = false;
8018 if (src.ibyte > dest.ibyte)
8019 copied_msb -= src.ibyte - dest.ibyte;
8021 for (sn = s0; sn <= copied_msb; sn++)
8022 if (!IN_RANGE (sn, dest.regno, dest.regno_msb)
8023 && !reg_unused_after (insn, all_regs_rtx[sn]))
8024 use_src = false;
8025 if (use_src && TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], s0))
8027 avr_asm_len ("tst %0" CR_TAB "brpl 0f",
8028 &all_regs_rtx[src.regno_msb], plen, 2);
8029 sn = src.regno;
8030 if (sn < s0)
8032 if (TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], sn))
8033 avr_asm_len ("cpi %0,1", &all_regs_rtx[sn], plen, 1);
8034 else
8035 avr_asm_len ("sec" CR_TAB
8036 "cpc %0,__zero_reg__",
8037 &all_regs_rtx[sn], plen, 2);
8038 have_carry = true;
8040 while (++sn < s0)
8041 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8043 avr_asm_len (have_carry ? "sbci %0,128" : "subi %0,129",
8044 &all_regs_rtx[s0], plen, 1);
8045 for (sn = src.regno + src.fbyte; sn <= copied_msb; sn++)
8046 avr_asm_len ("sbci %0,255", &all_regs_rtx[sn], plen, 1);
8047 avr_asm_len ("\n0:", NULL, plen, 0);
8048 frac_rounded = true;
8050 else if (use_src && overlap)
8052 avr_asm_len ("clr __tmp_reg__" CR_TAB
8053 "sbrc %1,0" CR_TAB
8054 "dec __tmp_reg__", xop, plen, 1);
8055 sn = src.regno;
8056 if (sn < s0)
8058 avr_asm_len ("add %0,__tmp_reg__", &all_regs_rtx[sn], plen, 1);
8059 have_carry = true;
8062 while (++sn < s0)
8063 avr_asm_len ("adc %0,__tmp_reg__", &all_regs_rtx[sn], plen, 1);
8065 if (have_carry)
8066 avr_asm_len ("clt" CR_TAB
8067 "bld __tmp_reg__,7" CR_TAB
8068 "adc %0,__tmp_reg__",
8069 &all_regs_rtx[s0], plen, 1);
8070 else
8071 avr_asm_len ("lsr __tmp_reg" CR_TAB
8072 "add %0,__tmp_reg__",
8073 &all_regs_rtx[s0], plen, 2);
8074 for (sn = src.regno + src.fbyte; sn <= copied_msb; sn++)
8075 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8076 frac_rounded = true;
8078 else if (overlap)
8080 bool use_src
8081 = (TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], s0)
8082 && (IN_RANGE (s0, dest.regno, dest.regno_msb)
8083 || reg_unused_after (insn, all_regs_rtx[s0])));
8084 xop[2] = all_regs_rtx[s0];
8085 unsigned sn = src.regno;
8086 if (!use_src || sn == s0)
8087 avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
8088 /* We need to consider to-be-discarded bits
8089 if the value is negative. */
8090 if (sn < s0)
8092 avr_asm_len ("tst %0" CR_TAB
8093 "brpl 0f",
8094 &all_regs_rtx[src.regno_msb], plen, 2);
8095 /* Test to-be-discarded bytes for any nozero bits.
8096 ??? Could use OR or SBIW to test two registers at once. */
8097 if (sn < s0)
8098 avr_asm_len ("cp %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8100 while (++sn < s0)
8101 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8102 /* Set bit 0 in __tmp_reg__ if any of the lower bits was set. */
8103 if (use_src)
8104 avr_asm_len ("breq 0f" CR_TAB
8105 "ori %2,1"
8106 "\n0:\t" "mov __tmp_reg__,%2",
8107 xop, plen, 3);
8108 else
8109 avr_asm_len ("breq 0f" CR_TAB
8110 "set" CR_TAB
8111 "bld __tmp_reg__,0\n0:",
8112 xop, plen, 3);
8114 lsb_in_tmp_reg = true;
8118 /* Step 1: Clear bytes at the low end and copy payload bits from source
8119 ====== to destination. */
8121 int step = offset < 0 ? 1 : -1;
8122 unsigned d0 = offset < 0 ? dest.regno : dest.regno_msb;
8124 // We cleared at least that number of registers.
8125 int clr_n = 0;
8127 for (; d0 >= dest.regno && d0 <= dest.regno_msb; d0 += step)
8129 // Next regno of destination is needed for MOVW
8130 unsigned d1 = d0 + step;
8132 // Current and next regno of source
8133 signed s0 = d0 - offset;
8134 signed s1 = s0 + step;
8136 // Must current resp. next regno be CLRed? This applies to the low
8137 // bytes of the destination that have no associated source bytes.
8138 bool clr0 = s0 < (signed) src.regno;
8139 bool clr1 = s1 < (signed) src.regno && d1 >= dest.regno;
8141 // First gather what code to emit (if any) and additional step to
8142 // apply if a MOVW is in use. xop[2] is destination rtx and xop[3]
8143 // is the source rtx for the current loop iteration.
8144 const char *code = NULL;
8145 int stepw = 0;
8147 if (clr0)
8149 if (AVR_HAVE_MOVW && clr1 && clrw)
8151 xop[2] = all_regs_rtx[d0 & ~1];
8152 xop[3] = clrw;
8153 code = "movw %2,%3";
8154 stepw = step;
8156 else
8158 xop[2] = all_regs_rtx[d0];
8159 code = "clr %2";
8161 if (++clr_n >= 2
8162 && !clrw
8163 && d0 % 2 == (step > 0))
8165 clrw = all_regs_rtx[d0 & ~1];
8169 else if (offset && s0 <= (signed) src.regno_msb)
8171 int movw = AVR_HAVE_MOVW && offset % 2 == 0
8172 && d0 % 2 == (offset > 0)
8173 && d1 <= dest.regno_msb && d1 >= dest.regno
8174 && s1 <= (signed) src.regno_msb && s1 >= (signed) src.regno;
8176 xop[2] = all_regs_rtx[d0 & ~movw];
8177 xop[3] = all_regs_rtx[s0 & ~movw];
8178 code = movw ? "movw %2,%3" : "mov %2,%3";
8179 stepw = step * movw;
8182 if (code)
8184 if (sign_extend && shift != ASHIFT && !sign_in_carry
8185 && (d0 == src.regno_msb || d0 + stepw == src.regno_msb))
8187 /* We are going to override the sign bit. If we sign-extend,
8188 store the sign in the Carry flag. This is not needed if
8189 the destination will be ASHIFT in the remainder because
8190 the ASHIFT will set Carry without extra instruction. */
8192 avr_asm_len ("lsl %0", &all_regs_rtx[src.regno_msb], plen, 1);
8193 sign_in_carry = true;
8196 unsigned src_msb = dest.regno_msb - sign_bytes - offset + 1;
8198 if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
8199 && src.ibyte > dest.ibyte
8200 && (d0 == src_msb || d0 + stepw == src_msb))
8202 /* We are going to override the MSB. If we shift right,
8203 store the MSB in the Carry flag. This is only needed if
8204 we don't sign-extend becaue with sign-extension the MSB
8205 (the sign) will be produced by the sign extension. */
8207 avr_asm_len ("lsr %0", &all_regs_rtx[src_msb], plen, 1);
8208 msb_in_carry = true;
8211 unsigned src_lsb = dest.regno - offset -1;
8213 if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry
8214 && !lsb_in_tmp_reg
8215 && (d0 == src_lsb || d0 + stepw == src_lsb))
8217 /* We are going to override the new LSB; store it into carry. */
8219 avr_asm_len ("lsl %0", &all_regs_rtx[src_lsb], plen, 1);
8220 code_ashift = "rol %0";
8221 lsb_in_carry = true;
8224 avr_asm_len (code, xop, plen, 1);
8225 d0 += stepw;
8229 /* Step 2: Shift destination left by 1 bit position. This might be needed
8230 ====== for signed input and unsigned output. */
8232 if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry)
8234 unsigned s0 = dest.regno - offset -1;
8236 /* n1169 4.1.4 says:
8237 "Conversions from a fixed-point to an integer type round toward zero."
8238 Hence, converting a fract type to integer only gives a non-zero result
8239 for -1. */
8240 if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
8241 && SCALAR_FRACT_MODE_P (GET_MODE (xop[1]))
8242 && !TARGET_FRACT_CONV_TRUNC)
8244 gcc_assert (s0 == src.regno_msb);
8245 /* Check if the input is -1. We do that by checking if negating
8246 the input causes an integer overflow. */
8247 unsigned sn = src.regno;
8248 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx[sn++], plen, 1);
8249 while (sn <= s0)
8250 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx[sn++], plen, 1);
8252 /* Overflow goes with set carry. Clear carry otherwise. */
8253 avr_asm_len ("brvs 0f" CR_TAB
8254 "clc\n0:", NULL, plen, 2);
8256 /* Likewise, when converting from accumulator types to integer, we
8257 need to round up negative values. */
8258 else if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
8259 && SCALAR_ACCUM_MODE_P (GET_MODE (xop[1]))
8260 && !TARGET_FRACT_CONV_TRUNC
8261 && !frac_rounded)
8263 bool have_carry = false;
8265 xop[2] = all_regs_rtx[s0];
8266 if (!lsb_in_tmp_reg && !MAY_CLOBBER (s0))
8267 avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
8268 avr_asm_len ("tst %0" CR_TAB "brpl 0f",
8269 &all_regs_rtx[src.regno_msb], plen, 2);
8270 if (!lsb_in_tmp_reg)
8272 unsigned sn = src.regno;
8273 if (sn < s0)
8275 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx[sn],
8276 plen, 1);
8277 have_carry = true;
8279 while (++sn < s0)
8280 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx[sn], plen, 1);
8281 lsb_in_tmp_reg = !MAY_CLOBBER (s0);
8283 /* Add in C and the rounding value 127. */
8284 /* If the destination msb is a sign byte, and in LD_REGS,
8285 grab it as a temporary. */
8286 if (sign_bytes
8287 && TEST_HARD_REG_BIT (reg_class_contents[LD_REGS],
8288 dest.regno_msb))
8290 xop[3] = all_regs_rtx[dest.regno_msb];
8291 avr_asm_len ("ldi %3,127", xop, plen, 1);
8292 avr_asm_len ((have_carry && lsb_in_tmp_reg ? "adc __tmp_reg__,%3"
8293 : have_carry ? "adc %2,%3"
8294 : lsb_in_tmp_reg ? "add __tmp_reg__,%3"
8295 : "add %2,%3"),
8296 xop, plen, 1);
8298 else
8300 /* Fall back to use __zero_reg__ as a temporary. */
8301 avr_asm_len ("dec __zero_reg__", NULL, plen, 1);
8302 if (have_carry)
8303 avr_asm_len ("clt" CR_TAB
8304 "bld __zero_reg__,7", NULL, plen, 2);
8305 else
8306 avr_asm_len ("lsr __zero_reg__", NULL, plen, 1);
8307 avr_asm_len (have_carry && lsb_in_tmp_reg
8308 ? "adc __tmp_reg__,__zero_reg__"
8309 : have_carry ? "adc %2,__zero_reg__"
8310 : lsb_in_tmp_reg ? "add __tmp_reg__,__zero_reg__"
8311 : "add %2,__zero_reg__",
8312 xop, plen, 1);
8313 avr_asm_len ("eor __zero_reg__,__zero_reg__", NULL, plen, 1);
8316 for (d0 = dest.regno + zero_bytes;
8317 d0 <= dest.regno_msb - sign_bytes; d0++)
8318 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx[d0], plen, 1);
8320 avr_asm_len (lsb_in_tmp_reg
8321 ? "\n0:\t" "lsl __tmp_reg__"
8322 : "\n0:\t" "lsl %2",
8323 xop, plen, 1);
8325 else if (MAY_CLOBBER (s0))
8326 avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
8327 else
8328 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
8329 "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
8331 code_ashift = "rol %0";
8332 lsb_in_carry = true;
8335 if (shift == ASHIFT)
8337 for (d0 = dest.regno + zero_bytes;
8338 d0 <= dest.regno_msb - sign_bytes; d0++)
8340 avr_asm_len (code_ashift, &all_regs_rtx[d0], plen, 1);
8341 code_ashift = "rol %0";
8344 lsb_in_carry = false;
8345 sign_in_carry = true;
8348 /* Step 4a: Store MSB in carry if we don't already have it or will produce
8349 ======= it in sign-extension below. */
8351 if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
8352 && src.ibyte > dest.ibyte)
8354 unsigned s0 = dest.regno_msb - sign_bytes - offset + 1;
8356 if (MAY_CLOBBER (s0))
8357 avr_asm_len ("lsr %0", &all_regs_rtx[s0], plen, 1);
8358 else
8359 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
8360 "lsr __tmp_reg__", &all_regs_rtx[s0], plen, 2);
8362 msb_in_carry = true;
8365 /* Step 3: Sign-extend or zero-extend the destination as needed.
8366 ====== */
8368 if (sign_extend && !sign_in_carry)
8370 unsigned s0 = src.regno_msb;
8372 if (MAY_CLOBBER (s0))
8373 avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
8374 else
8375 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
8376 "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
8378 sign_in_carry = true;
8381 gcc_assert (sign_in_carry + msb_in_carry + lsb_in_carry <= 1);
8383 unsigned copies = 0;
8384 rtx movw = sign_extend ? NULL_RTX : clrw;
8386 for (d0 = dest.regno_msb - sign_bytes + 1; d0 <= dest.regno_msb; d0++)
8388 if (AVR_HAVE_MOVW && movw
8389 && d0 % 2 == 0 && d0 + 1 <= dest.regno_msb)
8391 xop[2] = all_regs_rtx[d0];
8392 xop[3] = movw;
8393 avr_asm_len ("movw %2,%3", xop, plen, 1);
8394 d0++;
8396 else
8398 avr_asm_len (sign_extend ? "sbc %0,%0" : "clr %0",
8399 &all_regs_rtx[d0], plen, 1);
8401 if (++copies >= 2 && !movw && d0 % 2 == 1)
8402 movw = all_regs_rtx[d0-1];
8404 } /* for */
8407 /* Step 4: Right shift the destination. This might be needed for
8408 ====== conversions from unsigned to signed. */
8410 if (shift == ASHIFTRT)
8412 const char *code_ashiftrt = "lsr %0";
8414 if (sign_extend || msb_in_carry)
8415 code_ashiftrt = "ror %0";
8417 if (src.sbit && src.ibyte == dest.ibyte)
8418 code_ashiftrt = "asr %0";
8420 for (d0 = dest.regno_msb - sign_bytes;
8421 d0 >= dest.regno + zero_bytes - 1 && d0 >= dest.regno; d0--)
8423 avr_asm_len (code_ashiftrt, &all_regs_rtx[d0], plen, 1);
8424 code_ashiftrt = "ror %0";
8428 #undef MAY_CLOBBER
8430 return "";
8434 /* Output fixed-point rounding. XOP[0] = XOP[1] is the operand to round.
8435 XOP[2] is the rounding point, a CONST_INT. The function prints the
8436 instruction sequence if PLEN = NULL and computes the length in words
8437 of the sequence if PLEN != NULL. Most of this function deals with
8438 preparing operands for calls to `avr_out_plus' and `avr_out_bitop'. */
8440 const char*
8441 avr_out_round (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *xop, int *plen)
8443 machine_mode mode = GET_MODE (xop[0]);
8444 machine_mode imode = int_mode_for_mode (mode);
8445 // The smallest fractional bit not cleared by the rounding is 2^(-RP).
8446 int fbit = (int) GET_MODE_FBIT (mode);
8447 double_int i_add = double_int_zero.set_bit (fbit-1 - INTVAL (xop[2]));
8448 wide_int wi_add = wi::set_bit_in_zero (fbit-1 - INTVAL (xop[2]),
8449 GET_MODE_PRECISION (imode));
8450 // Lengths of PLUS and AND parts.
8451 int len_add = 0, *plen_add = plen ? &len_add : NULL;
8452 int len_and = 0, *plen_and = plen ? &len_and : NULL;
8454 // Add-Saturate 1/2 * 2^(-RP). Don't print the label "0:" when printing
8455 // the saturated addition so that we can emit the "rjmp 1f" before the
8456 // "0:" below.
8458 rtx xadd = const_fixed_from_double_int (i_add, mode);
8459 rtx xpattern, xsrc, op[4];
8461 xsrc = SIGNED_FIXED_POINT_MODE_P (mode)
8462 ? gen_rtx_SS_PLUS (mode, xop[1], xadd)
8463 : gen_rtx_US_PLUS (mode, xop[1], xadd);
8464 xpattern = gen_rtx_SET (xop[0], xsrc);
8466 op[0] = xop[0];
8467 op[1] = xop[1];
8468 op[2] = xadd;
8469 avr_out_plus (xpattern, op, plen_add, NULL, false /* Don't print "0:" */);
8471 avr_asm_len ("rjmp 1f" CR_TAB
8472 "0:", NULL, plen_add, 1);
8474 // Keep all bits from RP and higher: ... 2^(-RP)
8475 // Clear all bits from RP+1 and lower: 2^(-RP-1) ...
8476 // Rounding point ^^^^^^^
8477 // Added above ^^^^^^^^^
8478 rtx xreg = simplify_gen_subreg (imode, xop[0], mode, 0);
8479 rtx xmask = immed_wide_int_const (-wi_add - wi_add, imode);
8481 xpattern = gen_rtx_SET (xreg, gen_rtx_AND (imode, xreg, xmask));
8483 op[0] = xreg;
8484 op[1] = xreg;
8485 op[2] = xmask;
8486 op[3] = gen_rtx_SCRATCH (QImode);
8487 avr_out_bitop (xpattern, op, plen_and);
8488 avr_asm_len ("1:", NULL, plen, 0);
8490 if (plen)
8491 *plen = len_add + len_and;
8493 return "";
8497 /* Create RTL split patterns for byte sized rotate expressions. This
8498 produces a series of move instructions and considers overlap situations.
8499 Overlapping non-HImode operands need a scratch register. */
8501 bool
8502 avr_rotate_bytes (rtx operands[])
8504 int i, j;
8505 machine_mode mode = GET_MODE (operands[0]);
8506 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
8507 bool same_reg = rtx_equal_p (operands[0], operands[1]);
8508 int num = INTVAL (operands[2]);
8509 rtx scratch = operands[3];
8510 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
8511 Word move if no scratch is needed, otherwise use size of scratch. */
8512 machine_mode move_mode = QImode;
8513 int move_size, offset, size;
8515 if (num & 0xf)
8516 move_mode = QImode;
8517 else if ((mode == SImode && !same_reg) || !overlapped)
8518 move_mode = HImode;
8519 else
8520 move_mode = GET_MODE (scratch);
8522 /* Force DI rotate to use QI moves since other DI moves are currently split
8523 into QI moves so forward propagation works better. */
8524 if (mode == DImode)
8525 move_mode = QImode;
8526 /* Make scratch smaller if needed. */
8527 if (SCRATCH != GET_CODE (scratch)
8528 && HImode == GET_MODE (scratch)
8529 && QImode == move_mode)
8530 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
8532 move_size = GET_MODE_SIZE (move_mode);
8533 /* Number of bytes/words to rotate. */
8534 offset = (num >> 3) / move_size;
8535 /* Number of moves needed. */
8536 size = GET_MODE_SIZE (mode) / move_size;
8537 /* Himode byte swap is special case to avoid a scratch register. */
8538 if (mode == HImode && same_reg)
8540 /* HImode byte swap, using xor. This is as quick as using scratch. */
8541 rtx src, dst;
8542 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
8543 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
8544 if (!rtx_equal_p (dst, src))
8546 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
8547 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
8548 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
8551 else
8553 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
8554 /* Create linked list of moves to determine move order. */
8555 struct {
8556 rtx src, dst;
8557 int links;
8558 } move[MAX_SIZE + 8];
8559 int blocked, moves;
8561 gcc_assert (size <= MAX_SIZE);
8562 /* Generate list of subreg moves. */
8563 for (i = 0; i < size; i++)
8565 int from = i;
8566 int to = (from + offset) % size;
8567 move[i].src = simplify_gen_subreg (move_mode, operands[1],
8568 mode, from * move_size);
8569 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
8570 mode, to * move_size);
8571 move[i].links = -1;
8573 /* Mark dependence where a dst of one move is the src of another move.
8574 The first move is a conflict as it must wait until second is
8575 performed. We ignore moves to self - we catch this later. */
8576 if (overlapped)
8577 for (i = 0; i < size; i++)
8578 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
8579 for (j = 0; j < size; j++)
8580 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
8582 /* The dst of move i is the src of move j. */
8583 move[i].links = j;
8584 break;
8587 blocked = -1;
8588 moves = 0;
8589 /* Go through move list and perform non-conflicting moves. As each
8590 non-overlapping move is made, it may remove other conflicts
8591 so the process is repeated until no conflicts remain. */
8594 blocked = -1;
8595 moves = 0;
8596 /* Emit move where dst is not also a src or we have used that
8597 src already. */
8598 for (i = 0; i < size; i++)
8599 if (move[i].src != NULL_RTX)
8601 if (move[i].links == -1
8602 || move[move[i].links].src == NULL_RTX)
8604 moves++;
8605 /* Ignore NOP moves to self. */
8606 if (!rtx_equal_p (move[i].dst, move[i].src))
8607 emit_move_insn (move[i].dst, move[i].src);
8609 /* Remove conflict from list. */
8610 move[i].src = NULL_RTX;
8612 else
8613 blocked = i;
8616 /* Check for deadlock. This is when no moves occurred and we have
8617 at least one blocked move. */
8618 if (moves == 0 && blocked != -1)
8620 /* Need to use scratch register to break deadlock.
8621 Add move to put dst of blocked move into scratch.
8622 When this move occurs, it will break chain deadlock.
8623 The scratch register is substituted for real move. */
8625 gcc_assert (SCRATCH != GET_CODE (scratch));
8627 move[size].src = move[blocked].dst;
8628 move[size].dst = scratch;
8629 /* Scratch move is never blocked. */
8630 move[size].links = -1;
8631 /* Make sure we have valid link. */
8632 gcc_assert (move[blocked].links != -1);
8633 /* Replace src of blocking move with scratch reg. */
8634 move[move[blocked].links].src = scratch;
8635 /* Make dependent on scratch move occurring. */
8636 move[blocked].links = size;
8637 size=size+1;
8640 while (blocked != -1);
8642 return true;
8646 /* Worker function for `ADJUST_INSN_LENGTH'. */
8647 /* Modifies the length assigned to instruction INSN
8648 LEN is the initially computed length of the insn. */
8651 avr_adjust_insn_length (rtx_insn *insn, int len)
8653 rtx *op = recog_data.operand;
8654 enum attr_adjust_len adjust_len;
8656 /* Some complex insns don't need length adjustment and therefore
8657 the length need not/must not be adjusted for these insns.
8658 It is easier to state this in an insn attribute "adjust_len" than
8659 to clutter up code here... */
8661 if (!NONDEBUG_INSN_P (insn)
8662 || -1 == recog_memoized (insn))
8664 return len;
8667 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
8669 adjust_len = get_attr_adjust_len (insn);
8671 if (adjust_len == ADJUST_LEN_NO)
8673 /* Nothing to adjust: The length from attribute "length" is fine.
8674 This is the default. */
8676 return len;
8679 /* Extract insn's operands. */
8681 extract_constrain_insn_cached (insn);
8683 /* Dispatch to right function. */
8685 switch (adjust_len)
8687 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
8688 case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
8689 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
8691 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
8693 case ADJUST_LEN_PLUS: avr_out_plus (insn, op, &len); break;
8694 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
8696 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
8697 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
8698 case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
8699 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
8700 case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
8701 case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
8702 case ADJUST_LEN_LPM: avr_out_lpm (insn, op, &len); break;
8703 case ADJUST_LEN_SEXT: avr_out_sign_extend (insn, op, &len); break;
8705 case ADJUST_LEN_SFRACT: avr_out_fract (insn, op, true, &len); break;
8706 case ADJUST_LEN_UFRACT: avr_out_fract (insn, op, false, &len); break;
8707 case ADJUST_LEN_ROUND: avr_out_round (insn, op, &len); break;
8709 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
8710 case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
8711 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
8712 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
8713 case ADJUST_LEN_COMPARE64: avr_out_compare64 (insn, op, &len); break;
8715 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
8716 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
8717 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
8719 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
8720 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
8721 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
8723 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
8724 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
8725 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
8727 case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
8728 case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
8729 case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
8731 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
8733 case ADJUST_LEN_INSERT_BITS: avr_out_insert_bits (op, &len); break;
8735 default:
8736 gcc_unreachable();
8739 return len;
8742 /* Return nonzero if register REG dead after INSN. */
8745 reg_unused_after (rtx_insn *insn, rtx reg)
8747 return (dead_or_set_p (insn, reg)
8748 || (REG_P(reg) && _reg_unused_after (insn, reg)));
8751 /* Return nonzero if REG is not used after INSN.
8752 We assume REG is a reload reg, and therefore does
8753 not live past labels. It may live past calls or jumps though. */
8756 _reg_unused_after (rtx_insn *insn, rtx reg)
8758 enum rtx_code code;
8759 rtx set;
8761 /* If the reg is set by this instruction, then it is safe for our
8762 case. Disregard the case where this is a store to memory, since
8763 we are checking a register used in the store address. */
8764 set = single_set (insn);
8765 if (set && GET_CODE (SET_DEST (set)) != MEM
8766 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
8767 return 1;
8769 while ((insn = NEXT_INSN (insn)))
8771 rtx set;
8772 code = GET_CODE (insn);
8774 #if 0
8775 /* If this is a label that existed before reload, then the register
8776 if dead here. However, if this is a label added by reorg, then
8777 the register may still be live here. We can't tell the difference,
8778 so we just ignore labels completely. */
8779 if (code == CODE_LABEL)
8780 return 1;
8781 /* else */
8782 #endif
8784 if (!INSN_P (insn))
8785 continue;
8787 if (code == JUMP_INSN)
8788 return 0;
8790 /* If this is a sequence, we must handle them all at once.
8791 We could have for instance a call that sets the target register,
8792 and an insn in a delay slot that uses the register. In this case,
8793 we must return 0. */
8794 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
8796 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
8797 int i;
8798 int retval = 0;
8800 for (i = 0; i < seq->len (); i++)
8802 rtx_insn *this_insn = seq->insn (i);
8803 rtx set = single_set (this_insn);
8805 if (CALL_P (this_insn))
8806 code = CALL_INSN;
8807 else if (JUMP_P (this_insn))
8809 if (INSN_ANNULLED_BRANCH_P (this_insn))
8810 return 0;
8811 code = JUMP_INSN;
8814 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
8815 return 0;
8816 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
8818 if (GET_CODE (SET_DEST (set)) != MEM)
8819 retval = 1;
8820 else
8821 return 0;
8823 if (set == 0
8824 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
8825 return 0;
8827 if (retval == 1)
8828 return 1;
8829 else if (code == JUMP_INSN)
8830 return 0;
8833 if (code == CALL_INSN)
8835 rtx tem;
8836 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
8837 if (GET_CODE (XEXP (tem, 0)) == USE
8838 && REG_P (XEXP (XEXP (tem, 0), 0))
8839 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
8840 return 0;
8841 if (call_used_regs[REGNO (reg)])
8842 return 1;
8845 set = single_set (insn);
8847 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
8848 return 0;
8849 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
8850 return GET_CODE (SET_DEST (set)) != MEM;
8851 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
8852 return 0;
8854 return 1;
8858 /* Implement `TARGET_ASM_INTEGER'. */
8859 /* Target hook for assembling integer objects. The AVR version needs
8860 special handling for references to certain labels. */
8862 static bool
8863 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
8865 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
8866 && text_segment_operand (x, VOIDmode))
8868 fputs ("\t.word\tgs(", asm_out_file);
8869 output_addr_const (asm_out_file, x);
8870 fputs (")\n", asm_out_file);
8872 return true;
8874 else if (GET_MODE (x) == PSImode)
8876 /* This needs binutils 2.23+, see PR binutils/13503 */
8878 fputs ("\t.byte\tlo8(", asm_out_file);
8879 output_addr_const (asm_out_file, x);
8880 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
8882 fputs ("\t.byte\thi8(", asm_out_file);
8883 output_addr_const (asm_out_file, x);
8884 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
8886 fputs ("\t.byte\thh8(", asm_out_file);
8887 output_addr_const (asm_out_file, x);
8888 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
8890 return true;
8892 else if (CONST_FIXED_P (x))
8894 unsigned n;
8896 /* varasm fails to handle big fixed modes that don't fit in hwi. */
8898 for (n = 0; n < size; n++)
8900 rtx xn = simplify_gen_subreg (QImode, x, GET_MODE (x), n);
8901 default_assemble_integer (xn, 1, aligned_p);
8904 return true;
8907 return default_assemble_integer (x, size, aligned_p);
8911 /* Implement `TARGET_CLASS_LIKELY_SPILLED_P'. */
8912 /* Return value is nonzero if pseudos that have been
8913 assigned to registers of class CLASS would likely be spilled
8914 because registers of CLASS are needed for spill registers. */
8916 static bool
8917 avr_class_likely_spilled_p (reg_class_t c)
8919 return (c != ALL_REGS &&
8920 (AVR_TINY ? 1 : c != ADDW_REGS));
8924 /* Valid attributes:
8925 progmem - Put data to program memory.
8926 signal - Make a function to be hardware interrupt.
8927 After function prologue interrupts remain disabled.
8928 interrupt - Make a function to be hardware interrupt. Before function
8929 prologue interrupts are enabled by means of SEI.
8930 naked - Don't generate function prologue/epilogue and RET
8931 instruction. */
8933 /* Handle a "progmem" attribute; arguments as in
8934 struct attribute_spec.handler. */
8936 static tree
8937 avr_handle_progmem_attribute (tree *node, tree name,
8938 tree args ATTRIBUTE_UNUSED,
8939 int flags ATTRIBUTE_UNUSED,
8940 bool *no_add_attrs)
8942 if (DECL_P (*node))
8944 if (TREE_CODE (*node) == TYPE_DECL)
8946 /* This is really a decl attribute, not a type attribute,
8947 but try to handle it for GCC 3.0 backwards compatibility. */
8949 tree type = TREE_TYPE (*node);
8950 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
8951 tree newtype = build_type_attribute_variant (type, attr);
8953 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
8954 TREE_TYPE (*node) = newtype;
8955 *no_add_attrs = true;
8957 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
8959 *no_add_attrs = false;
8961 else
8963 warning (OPT_Wattributes, "%qE attribute ignored",
8964 name);
8965 *no_add_attrs = true;
8969 return NULL_TREE;
8972 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
8973 struct attribute_spec.handler. */
8975 static tree
8976 avr_handle_fndecl_attribute (tree *node, tree name,
8977 tree args ATTRIBUTE_UNUSED,
8978 int flags ATTRIBUTE_UNUSED,
8979 bool *no_add_attrs)
8981 if (TREE_CODE (*node) != FUNCTION_DECL)
8983 warning (OPT_Wattributes, "%qE attribute only applies to functions",
8984 name);
8985 *no_add_attrs = true;
8988 return NULL_TREE;
8991 static tree
8992 avr_handle_fntype_attribute (tree *node, tree name,
8993 tree args ATTRIBUTE_UNUSED,
8994 int flags ATTRIBUTE_UNUSED,
8995 bool *no_add_attrs)
8997 if (TREE_CODE (*node) != FUNCTION_TYPE)
8999 warning (OPT_Wattributes, "%qE attribute only applies to functions",
9000 name);
9001 *no_add_attrs = true;
9004 return NULL_TREE;
9007 static tree
9008 avr_handle_addr_attribute (tree *node, tree name, tree args,
9009 int flags ATTRIBUTE_UNUSED, bool *no_add)
9011 bool io_p = (strncmp (IDENTIFIER_POINTER (name), "io", 2) == 0);
9012 location_t loc = DECL_SOURCE_LOCATION (*node);
9014 if (TREE_CODE (*node) != VAR_DECL)
9016 warning_at (loc, 0, "%qE attribute only applies to variables", name);
9017 *no_add = true;
9020 if (args != NULL_TREE)
9022 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
9023 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
9024 tree arg = TREE_VALUE (args);
9025 if (TREE_CODE (arg) != INTEGER_CST)
9027 warning (0, "%qE attribute allows only an integer constant argument",
9028 name);
9029 *no_add = true;
9031 else if (io_p
9032 && (!tree_fits_shwi_p (arg)
9033 || !(strcmp (IDENTIFIER_POINTER (name), "io_low") == 0
9034 ? low_io_address_operand : io_address_operand)
9035 (GEN_INT (TREE_INT_CST_LOW (arg)), QImode)))
9037 warning_at (loc, 0, "%qE attribute address out of range", name);
9038 *no_add = true;
9040 else
9042 tree attribs = DECL_ATTRIBUTES (*node);
9043 const char *names[] = { "io", "io_low", "address", NULL } ;
9044 for (const char **p = names; *p; p++)
9046 tree other = lookup_attribute (*p, attribs);
9047 if (other && TREE_VALUE (other))
9049 warning_at (loc, 0,
9050 "both %s and %qE attribute provide address",
9051 *p, name);
9052 *no_add = true;
9053 break;
9059 if (*no_add == false && io_p && !TREE_THIS_VOLATILE (*node))
9060 warning_at (loc, 0, "%qE attribute on non-volatile variable", name);
9062 return NULL_TREE;
9066 avr_eval_addr_attrib (rtx x)
9068 if (GET_CODE (x) == SYMBOL_REF
9069 && (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_ADDRESS))
9071 tree decl = SYMBOL_REF_DECL (x);
9072 tree attr = NULL_TREE;
9074 if (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_IO)
9076 attr = lookup_attribute ("io", DECL_ATTRIBUTES (decl));
9077 gcc_assert (attr);
9079 if (!attr || !TREE_VALUE (attr))
9080 attr = lookup_attribute ("address", DECL_ATTRIBUTES (decl));
9081 gcc_assert (attr && TREE_VALUE (attr) && TREE_VALUE (TREE_VALUE (attr)));
9082 return GEN_INT (TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr))));
9084 return x;
9088 /* AVR attributes. */
9089 static const struct attribute_spec
9090 avr_attribute_table[] =
9092 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
9093 affects_type_identity } */
9094 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
9095 false },
9096 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
9097 false },
9098 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
9099 false },
9100 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
9101 false },
9102 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
9103 false },
9104 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
9105 false },
9106 { "io", 0, 1, false, false, false, avr_handle_addr_attribute,
9107 false },
9108 { "io_low", 0, 1, false, false, false, avr_handle_addr_attribute,
9109 false },
9110 { "address", 1, 1, false, false, false, avr_handle_addr_attribute,
9111 false },
9112 { NULL, 0, 0, false, false, false, NULL, false }
9116 /* Look if DECL shall be placed in program memory space by
9117 means of attribute `progmem' or some address-space qualifier.
9118 Return non-zero if DECL is data that must end up in Flash and
9119 zero if the data lives in RAM (.bss, .data, .rodata, ...).
9121 Return 2 if DECL is located in 24-bit flash address-space
9122 Return 1 if DECL is located in 16-bit flash address-space
9123 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
9124 Return 0 otherwise */
9127 avr_progmem_p (tree decl, tree attributes)
9129 tree a;
9131 if (TREE_CODE (decl) != VAR_DECL)
9132 return 0;
9134 if (avr_decl_memx_p (decl))
9135 return 2;
9137 if (avr_decl_flash_p (decl))
9138 return 1;
9140 if (NULL_TREE
9141 != lookup_attribute ("progmem", attributes))
9142 return -1;
9144 a = decl;
9147 a = TREE_TYPE(a);
9148 while (TREE_CODE (a) == ARRAY_TYPE);
9150 if (a == error_mark_node)
9151 return 0;
9153 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
9154 return -1;
9156 return 0;
9160 /* Scan type TYP for pointer references to address space ASn.
9161 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
9162 the AS are also declared to be CONST.
9163 Otherwise, return the respective address space, i.e. a value != 0. */
9165 static addr_space_t
9166 avr_nonconst_pointer_addrspace (tree typ)
9168 while (ARRAY_TYPE == TREE_CODE (typ))
9169 typ = TREE_TYPE (typ);
9171 if (POINTER_TYPE_P (typ))
9173 addr_space_t as;
9174 tree target = TREE_TYPE (typ);
9176 /* Pointer to function: Test the function's return type. */
9178 if (FUNCTION_TYPE == TREE_CODE (target))
9179 return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
9181 /* "Ordinary" pointers... */
9183 while (TREE_CODE (target) == ARRAY_TYPE)
9184 target = TREE_TYPE (target);
9186 /* Pointers to non-generic address space must be const.
9187 Refuse address spaces outside the device's flash. */
9189 as = TYPE_ADDR_SPACE (target);
9191 if (!ADDR_SPACE_GENERIC_P (as)
9192 && (!TYPE_READONLY (target)
9193 || avr_addrspace[as].segment >= avr_n_flash
9194 /* Also refuse __memx address space if we can't support it. */
9195 || (!AVR_HAVE_LPM && avr_addrspace[as].pointer_size > 2)))
9197 return as;
9200 /* Scan pointer's target type. */
9202 return avr_nonconst_pointer_addrspace (target);
9205 return ADDR_SPACE_GENERIC;
9209 /* Sanity check NODE so that all pointers targeting non-generic address spaces
9210 go along with CONST qualifier. Writing to these address spaces should
9211 be detected and complained about as early as possible. */
9213 static bool
9214 avr_pgm_check_var_decl (tree node)
9216 const char *reason = NULL;
9218 addr_space_t as = ADDR_SPACE_GENERIC;
9220 gcc_assert (as == 0);
9222 if (avr_log.progmem)
9223 avr_edump ("%?: %t\n", node);
9225 switch (TREE_CODE (node))
9227 default:
9228 break;
9230 case VAR_DECL:
9231 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
9232 reason = "variable";
9233 break;
9235 case PARM_DECL:
9236 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
9237 reason = "function parameter";
9238 break;
9240 case FIELD_DECL:
9241 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
9242 reason = "structure field";
9243 break;
9245 case FUNCTION_DECL:
9246 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
9248 reason = "return type of function";
9249 break;
9251 case POINTER_TYPE:
9252 if (as = avr_nonconst_pointer_addrspace (node), as)
9253 reason = "pointer";
9254 break;
9257 if (reason)
9259 if (avr_addrspace[as].segment >= avr_n_flash)
9261 if (TYPE_P (node))
9262 error ("%qT uses address space %qs beyond flash of %d KiB",
9263 node, avr_addrspace[as].name, avr_n_flash);
9264 else
9265 error ("%s %q+D uses address space %qs beyond flash of %d KiB",
9266 reason, node, avr_addrspace[as].name, avr_n_flash);
9268 else
9270 if (TYPE_P (node))
9271 error ("pointer targeting address space %qs must be const in %qT",
9272 avr_addrspace[as].name, node);
9273 else
9274 error ("pointer targeting address space %qs must be const"
9275 " in %s %q+D",
9276 avr_addrspace[as].name, reason, node);
9280 return reason == NULL;
9284 /* Add the section attribute if the variable is in progmem. */
9286 static void
9287 avr_insert_attributes (tree node, tree *attributes)
9289 avr_pgm_check_var_decl (node);
9291 if (TREE_CODE (node) == VAR_DECL
9292 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
9293 && avr_progmem_p (node, *attributes))
9295 addr_space_t as;
9296 tree node0 = node;
9298 /* For C++, we have to peel arrays in order to get correct
9299 determination of readonlyness. */
9302 node0 = TREE_TYPE (node0);
9303 while (TREE_CODE (node0) == ARRAY_TYPE);
9305 if (error_mark_node == node0)
9306 return;
9308 as = TYPE_ADDR_SPACE (TREE_TYPE (node));
9310 if (avr_addrspace[as].segment >= avr_n_flash)
9312 error ("variable %q+D located in address space %qs beyond flash "
9313 "of %d KiB", node, avr_addrspace[as].name, avr_n_flash);
9315 else if (!AVR_HAVE_LPM && avr_addrspace[as].pointer_size > 2)
9317 error ("variable %q+D located in address space %qs"
9318 " which is not supported for architecture %qs",
9319 node, avr_addrspace[as].name, avr_arch->name);
9322 if (!TYPE_READONLY (node0)
9323 && !TREE_READONLY (node))
9325 const char *reason = "__attribute__((progmem))";
9327 if (!ADDR_SPACE_GENERIC_P (as))
9328 reason = avr_addrspace[as].name;
9330 if (avr_log.progmem)
9331 avr_edump ("\n%?: %t\n%t\n", node, node0);
9333 error ("variable %q+D must be const in order to be put into"
9334 " read-only section by means of %qs", node, reason);
9340 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
9341 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
9342 /* Track need of __do_clear_bss. */
9344 void
9345 avr_asm_output_aligned_decl_common (FILE * stream,
9346 tree decl,
9347 const char *name,
9348 unsigned HOST_WIDE_INT size,
9349 unsigned int align, bool local_p)
9351 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
9352 rtx symbol;
9354 if (mem != NULL_RTX && MEM_P (mem)
9355 && GET_CODE ((symbol = XEXP (mem, 0))) == SYMBOL_REF
9356 && (SYMBOL_REF_FLAGS (symbol) & (SYMBOL_FLAG_IO | SYMBOL_FLAG_ADDRESS)))
9359 if (!local_p)
9361 fprintf (stream, "\t.globl\t");
9362 assemble_name (stream, name);
9363 fprintf (stream, "\n");
9365 if (SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_ADDRESS)
9367 assemble_name (stream, name);
9368 fprintf (stream, " = %ld\n",
9369 (long) INTVAL (avr_eval_addr_attrib (symbol)));
9371 else if (local_p)
9372 error_at (DECL_SOURCE_LOCATION (decl),
9373 "static IO declaration for %q+D needs an address", decl);
9374 return;
9377 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
9378 There is no need to trigger __do_clear_bss code for them. */
9380 if (!STR_PREFIX_P (name, "__gnu_lto"))
9381 avr_need_clear_bss_p = true;
9383 if (local_p)
9384 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
9385 else
9386 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
9389 void
9390 avr_asm_asm_output_aligned_bss (FILE *file, tree decl, const char *name,
9391 unsigned HOST_WIDE_INT size, int align,
9392 void (*default_func)
9393 (FILE *, tree, const char *,
9394 unsigned HOST_WIDE_INT, int))
9396 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
9397 rtx symbol;
9399 if (mem != NULL_RTX && MEM_P (mem)
9400 && GET_CODE ((symbol = XEXP (mem, 0))) == SYMBOL_REF
9401 && (SYMBOL_REF_FLAGS (symbol) & (SYMBOL_FLAG_IO | SYMBOL_FLAG_ADDRESS)))
9403 if (!(SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_ADDRESS))
9404 error_at (DECL_SOURCE_LOCATION (decl),
9405 "IO definition for %q+D needs an address", decl);
9406 avr_asm_output_aligned_decl_common (file, decl, name, size, align, false);
9408 else
9409 default_func (file, decl, name, size, align);
9413 /* Unnamed section callback for data_section
9414 to track need of __do_copy_data. */
9416 static void
9417 avr_output_data_section_asm_op (const void *data)
9419 avr_need_copy_data_p = true;
9421 /* Dispatch to default. */
9422 output_section_asm_op (data);
9426 /* Unnamed section callback for bss_section
9427 to track need of __do_clear_bss. */
9429 static void
9430 avr_output_bss_section_asm_op (const void *data)
9432 avr_need_clear_bss_p = true;
9434 /* Dispatch to default. */
9435 output_section_asm_op (data);
9439 /* Unnamed section callback for progmem*.data sections. */
9441 static void
9442 avr_output_progmem_section_asm_op (const void *data)
9444 fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
9445 (const char*) data);
9449 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
9451 static void
9452 avr_asm_init_sections (void)
9454 /* Set up a section for jump tables. Alignment is handled by
9455 ASM_OUTPUT_BEFORE_CASE_LABEL. */
9457 if (AVR_HAVE_JMP_CALL)
9459 progmem_swtable_section
9460 = get_unnamed_section (0, output_section_asm_op,
9461 "\t.section\t.progmem.gcc_sw_table"
9462 ",\"a\",@progbits");
9464 else
9466 progmem_swtable_section
9467 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
9468 "\t.section\t.progmem.gcc_sw_table"
9469 ",\"ax\",@progbits");
9472 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
9473 resp. `avr_need_copy_data_p'. */
9475 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
9476 data_section->unnamed.callback = avr_output_data_section_asm_op;
9477 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
9481 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
9483 static section*
9484 avr_asm_function_rodata_section (tree decl)
9486 /* If a function is unused and optimized out by -ffunction-sections
9487 and --gc-sections, ensure that the same will happen for its jump
9488 tables by putting them into individual sections. */
9490 unsigned int flags;
9491 section * frodata;
9493 /* Get the frodata section from the default function in varasm.c
9494 but treat function-associated data-like jump tables as code
9495 rather than as user defined data. AVR has no constant pools. */
9497 int fdata = flag_data_sections;
9499 flag_data_sections = flag_function_sections;
9500 frodata = default_function_rodata_section (decl);
9501 flag_data_sections = fdata;
9502 flags = frodata->common.flags;
9505 if (frodata != readonly_data_section
9506 && flags & SECTION_NAMED)
9508 /* Adjust section flags and replace section name prefix. */
9510 unsigned int i;
9512 static const char* const prefix[] =
9514 ".rodata", ".progmem.gcc_sw_table",
9515 ".gnu.linkonce.r.", ".gnu.linkonce.t."
9518 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
9520 const char * old_prefix = prefix[i];
9521 const char * new_prefix = prefix[i+1];
9522 const char * name = frodata->named.name;
9524 if (STR_PREFIX_P (name, old_prefix))
9526 const char *rname = ACONCAT ((new_prefix,
9527 name + strlen (old_prefix), NULL));
9528 flags &= ~SECTION_CODE;
9529 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
9531 return get_section (rname, flags, frodata->named.decl);
9536 return progmem_swtable_section;
9540 /* Implement `TARGET_ASM_NAMED_SECTION'. */
9541 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
9543 static void
9544 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
9546 if (flags & AVR_SECTION_PROGMEM)
9548 addr_space_t as = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP;
9549 const char *old_prefix = ".rodata";
9550 const char *new_prefix = avr_addrspace[as].section_name;
9552 if (STR_PREFIX_P (name, old_prefix))
9554 const char *sname = ACONCAT ((new_prefix,
9555 name + strlen (old_prefix), NULL));
9556 default_elf_asm_named_section (sname, flags, decl);
9557 return;
9560 default_elf_asm_named_section (new_prefix, flags, decl);
9561 return;
9564 if (!avr_need_copy_data_p)
9565 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
9566 || STR_PREFIX_P (name, ".rodata")
9567 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
9569 if (!avr_need_clear_bss_p)
9570 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
9572 default_elf_asm_named_section (name, flags, decl);
9576 /* Implement `TARGET_SECTION_TYPE_FLAGS'. */
9578 static unsigned int
9579 avr_section_type_flags (tree decl, const char *name, int reloc)
9581 unsigned int flags = default_section_type_flags (decl, name, reloc);
9583 if (STR_PREFIX_P (name, ".noinit"))
9585 if (decl && TREE_CODE (decl) == VAR_DECL
9586 && DECL_INITIAL (decl) == NULL_TREE)
9587 flags |= SECTION_BSS; /* @nobits */
9588 else
9589 warning (0, "only uninitialized variables can be placed in the "
9590 ".noinit section");
9593 if (decl && DECL_P (decl)
9594 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
9596 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
9598 /* Attribute progmem puts data in generic address space.
9599 Set section flags as if it was in __flash to get the right
9600 section prefix in the remainder. */
9602 if (ADDR_SPACE_GENERIC_P (as))
9603 as = ADDR_SPACE_FLASH;
9605 flags |= as * SECTION_MACH_DEP;
9606 flags &= ~SECTION_WRITE;
9607 flags &= ~SECTION_BSS;
9610 return flags;
9614 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
9616 static void
9617 avr_encode_section_info (tree decl, rtx rtl, int new_decl_p)
9619 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
9620 readily available, see PR34734. So we postpone the warning
9621 about uninitialized data in program memory section until here. */
9623 if (new_decl_p
9624 && decl && DECL_P (decl)
9625 && NULL_TREE == DECL_INITIAL (decl)
9626 && !DECL_EXTERNAL (decl)
9627 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
9629 warning (OPT_Wuninitialized,
9630 "uninitialized variable %q+D put into "
9631 "program memory area", decl);
9634 default_encode_section_info (decl, rtl, new_decl_p);
9636 if (decl && DECL_P (decl)
9637 && TREE_CODE (decl) != FUNCTION_DECL
9638 && MEM_P (rtl)
9639 && SYMBOL_REF == GET_CODE (XEXP (rtl, 0)))
9641 rtx sym = XEXP (rtl, 0);
9642 tree type = TREE_TYPE (decl);
9643 tree attr = DECL_ATTRIBUTES (decl);
9644 if (type == error_mark_node)
9645 return;
9647 addr_space_t as = TYPE_ADDR_SPACE (type);
9649 /* PSTR strings are in generic space but located in flash:
9650 patch address space. */
9652 if (-1 == avr_progmem_p (decl, attr))
9653 as = ADDR_SPACE_FLASH;
9655 AVR_SYMBOL_SET_ADDR_SPACE (sym, as);
9657 tree io_low_attr = lookup_attribute ("io_low", attr);
9658 tree io_attr = lookup_attribute ("io", attr);
9659 tree addr_attr;
9660 if (io_low_attr
9661 && TREE_VALUE (io_low_attr) && TREE_VALUE (TREE_VALUE (io_low_attr)))
9662 addr_attr = io_attr;
9663 else if (io_attr
9664 && TREE_VALUE (io_attr) && TREE_VALUE (TREE_VALUE (io_attr)))
9665 addr_attr = io_attr;
9666 else
9667 addr_attr = lookup_attribute ("address", attr);
9668 if (io_low_attr
9669 || (io_attr && addr_attr
9670 && low_io_address_operand
9671 (GEN_INT (TREE_INT_CST_LOW
9672 (TREE_VALUE (TREE_VALUE (addr_attr)))), QImode)))
9673 SYMBOL_REF_FLAGS (sym) |= SYMBOL_FLAG_IO_LOW;
9674 if (io_attr || io_low_attr)
9675 SYMBOL_REF_FLAGS (sym) |= SYMBOL_FLAG_IO;
9676 /* If we have an (io) address attribute specification, but the variable
9677 is external, treat the address as only a tentative definition
9678 to be used to determine if an io port is in the lower range, but
9679 don't use the exact value for constant propagation. */
9680 if (addr_attr && !DECL_EXTERNAL (decl))
9681 SYMBOL_REF_FLAGS (sym) |= SYMBOL_FLAG_ADDRESS;
9686 /* Implement `TARGET_ASM_SELECT_SECTION' */
9688 static section *
9689 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
9691 section * sect = default_elf_select_section (decl, reloc, align);
9693 if (decl && DECL_P (decl)
9694 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
9696 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
9698 /* __progmem__ goes in generic space but shall be allocated to
9699 .progmem.data */
9701 if (ADDR_SPACE_GENERIC_P (as))
9702 as = ADDR_SPACE_FLASH;
9704 if (sect->common.flags & SECTION_NAMED)
9706 const char * name = sect->named.name;
9707 const char * old_prefix = ".rodata";
9708 const char * new_prefix = avr_addrspace[as].section_name;
9710 if (STR_PREFIX_P (name, old_prefix))
9712 const char *sname = ACONCAT ((new_prefix,
9713 name + strlen (old_prefix), NULL));
9714 return get_section (sname, sect->common.flags, sect->named.decl);
9718 if (!progmem_section[as])
9720 progmem_section[as]
9721 = get_unnamed_section (0, avr_output_progmem_section_asm_op,
9722 avr_addrspace[as].section_name);
9725 return progmem_section[as];
9728 return sect;
9731 /* Implement `TARGET_ASM_FILE_START'. */
9732 /* Outputs some text at the start of each assembler file. */
9734 static void
9735 avr_file_start (void)
9737 int sfr_offset = avr_arch->sfr_offset;
9739 if (avr_arch->asm_only)
9740 error ("architecture %qs supported for assembler only", avr_mmcu);
9742 default_file_start ();
9744 /* Print I/O addresses of some SFRs used with IN and OUT. */
9746 if (AVR_HAVE_SPH)
9747 fprintf (asm_out_file, "__SP_H__ = 0x%02x\n", avr_addr.sp_h - sfr_offset);
9749 fprintf (asm_out_file, "__SP_L__ = 0x%02x\n", avr_addr.sp_l - sfr_offset);
9750 fprintf (asm_out_file, "__SREG__ = 0x%02x\n", avr_addr.sreg - sfr_offset);
9751 if (AVR_HAVE_RAMPZ)
9752 fprintf (asm_out_file, "__RAMPZ__ = 0x%02x\n", avr_addr.rampz - sfr_offset);
9753 if (AVR_HAVE_RAMPY)
9754 fprintf (asm_out_file, "__RAMPY__ = 0x%02x\n", avr_addr.rampy - sfr_offset);
9755 if (AVR_HAVE_RAMPX)
9756 fprintf (asm_out_file, "__RAMPX__ = 0x%02x\n", avr_addr.rampx - sfr_offset);
9757 if (AVR_HAVE_RAMPD)
9758 fprintf (asm_out_file, "__RAMPD__ = 0x%02x\n", avr_addr.rampd - sfr_offset);
9759 if (AVR_XMEGA || AVR_TINY)
9760 fprintf (asm_out_file, "__CCP__ = 0x%02x\n", avr_addr.ccp - sfr_offset);
9761 fprintf (asm_out_file, "__tmp_reg__ = %d\n", AVR_TMP_REGNO);
9762 fprintf (asm_out_file, "__zero_reg__ = %d\n", AVR_ZERO_REGNO);
9766 /* Implement `TARGET_ASM_FILE_END'. */
9767 /* Outputs to the stdio stream FILE some
9768 appropriate text to go at the end of an assembler file. */
9770 static void
9771 avr_file_end (void)
9773 /* Output these only if there is anything in the
9774 .data* / .rodata* / .gnu.linkonce.* resp. .bss* or COMMON
9775 input section(s) - some code size can be saved by not
9776 linking in the initialization code from libgcc if resp.
9777 sections are empty, see PR18145. */
9779 if (avr_need_copy_data_p)
9780 fputs (".global __do_copy_data\n", asm_out_file);
9782 if (avr_need_clear_bss_p)
9783 fputs (".global __do_clear_bss\n", asm_out_file);
9787 /* Worker function for `ADJUST_REG_ALLOC_ORDER'. */
9788 /* Choose the order in which to allocate hard registers for
9789 pseudo-registers local to a basic block.
9791 Store the desired register order in the array `reg_alloc_order'.
9792 Element 0 should be the register to allocate first; element 1, the
9793 next register; and so on. */
9795 void
9796 avr_adjust_reg_alloc_order (void)
9798 unsigned int i;
9799 static const int order_0[] =
9801 24, 25,
9802 18, 19, 20, 21, 22, 23,
9803 30, 31,
9804 26, 27, 28, 29,
9805 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
9806 0, 1,
9807 32, 33, 34, 35
9809 static const int tiny_order_0[] = {
9810 20, 21,
9811 22, 23,
9812 24, 25,
9813 30, 31,
9814 26, 27,
9815 28, 29,
9816 19, 18,
9817 16, 17,
9818 32, 33, 34, 35,
9819 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
9821 static const int order_1[] =
9823 18, 19, 20, 21, 22, 23, 24, 25,
9824 30, 31,
9825 26, 27, 28, 29,
9826 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
9827 0, 1,
9828 32, 33, 34, 35
9830 static const int tiny_order_1[] = {
9831 22, 23,
9832 24, 25,
9833 30, 31,
9834 26, 27,
9835 28, 29,
9836 21, 20, 19, 18,
9837 16, 17,
9838 32, 33, 34, 35,
9839 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
9841 static const int order_2[] =
9843 25, 24, 23, 22, 21, 20, 19, 18,
9844 30, 31,
9845 26, 27, 28, 29,
9846 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
9847 1, 0,
9848 32, 33, 34, 35
9851 /* Select specific register allocation order.
9852 Tiny Core (ATtiny4/5/9/10/20/40) devices have only 16 registers,
9853 so different allocation order should be used. */
9855 const int *order = (TARGET_ORDER_1 ? (AVR_TINY ? tiny_order_1 : order_1)
9856 : TARGET_ORDER_2 ? (AVR_TINY ? tiny_order_0 : order_2)
9857 : (AVR_TINY ? tiny_order_0 : order_0));
9859 for (i = 0; i < ARRAY_SIZE (order_0); ++i)
9860 reg_alloc_order[i] = order[i];
9864 /* Implement `TARGET_REGISTER_MOVE_COST' */
9866 static int
9867 avr_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
9868 reg_class_t from, reg_class_t to)
9870 return (from == STACK_REG ? 6
9871 : to == STACK_REG ? 12
9872 : 2);
9876 /* Implement `TARGET_MEMORY_MOVE_COST' */
9878 static int
9879 avr_memory_move_cost (machine_mode mode,
9880 reg_class_t rclass ATTRIBUTE_UNUSED,
9881 bool in ATTRIBUTE_UNUSED)
9883 return (mode == QImode ? 2
9884 : mode == HImode ? 4
9885 : mode == SImode ? 8
9886 : mode == SFmode ? 8
9887 : 16);
9891 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
9892 cost of an RTX operand given its context. X is the rtx of the
9893 operand, MODE is its mode, and OUTER is the rtx_code of this
9894 operand's parent operator. */
9896 static int
9897 avr_operand_rtx_cost (rtx x, machine_mode mode, enum rtx_code outer,
9898 int opno, bool speed)
9900 enum rtx_code code = GET_CODE (x);
9901 int total;
9903 switch (code)
9905 case REG:
9906 case SUBREG:
9907 return 0;
9909 case CONST_INT:
9910 case CONST_FIXED:
9911 case CONST_DOUBLE:
9912 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
9914 default:
9915 break;
9918 total = 0;
9919 avr_rtx_costs (x, code, outer, opno, &total, speed);
9920 return total;
9923 /* Worker function for AVR backend's rtx_cost function.
9924 X is rtx expression whose cost is to be calculated.
9925 Return true if the complete cost has been computed.
9926 Return false if subexpressions should be scanned.
9927 In either case, *TOTAL contains the cost result. */
9929 static bool
9930 avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
9931 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
9933 enum rtx_code code = (enum rtx_code) codearg;
9934 machine_mode mode = GET_MODE (x);
9935 HOST_WIDE_INT val;
9937 switch (code)
9939 case CONST_INT:
9940 case CONST_FIXED:
9941 case CONST_DOUBLE:
9942 case SYMBOL_REF:
9943 case CONST:
9944 case LABEL_REF:
9945 /* Immediate constants are as cheap as registers. */
9946 *total = 0;
9947 return true;
9949 case MEM:
9950 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
9951 return true;
9953 case NEG:
9954 switch (mode)
9956 case QImode:
9957 case SFmode:
9958 *total = COSTS_N_INSNS (1);
9959 break;
9961 case HImode:
9962 case PSImode:
9963 case SImode:
9964 *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
9965 break;
9967 default:
9968 return false;
9970 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9971 return true;
9973 case ABS:
9974 switch (mode)
9976 case QImode:
9977 case SFmode:
9978 *total = COSTS_N_INSNS (1);
9979 break;
9981 default:
9982 return false;
9984 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9985 return true;
9987 case NOT:
9988 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
9989 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9990 return true;
9992 case ZERO_EXTEND:
9993 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
9994 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
9995 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9996 return true;
9998 case SIGN_EXTEND:
9999 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
10000 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
10001 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10002 return true;
10004 case PLUS:
10005 switch (mode)
10007 case QImode:
10008 if (AVR_HAVE_MUL
10009 && MULT == GET_CODE (XEXP (x, 0))
10010 && register_operand (XEXP (x, 1), QImode))
10012 /* multiply-add */
10013 *total = COSTS_N_INSNS (speed ? 4 : 3);
10014 /* multiply-add with constant: will be split and load constant. */
10015 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
10016 *total = COSTS_N_INSNS (1) + *total;
10017 return true;
10019 *total = COSTS_N_INSNS (1);
10020 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10021 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10022 break;
10024 case HImode:
10025 if (AVR_HAVE_MUL
10026 && (MULT == GET_CODE (XEXP (x, 0))
10027 || ASHIFT == GET_CODE (XEXP (x, 0)))
10028 && register_operand (XEXP (x, 1), HImode)
10029 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
10030 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
10032 /* multiply-add */
10033 *total = COSTS_N_INSNS (speed ? 5 : 4);
10034 /* multiply-add with constant: will be split and load constant. */
10035 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
10036 *total = COSTS_N_INSNS (1) + *total;
10037 return true;
10039 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10041 *total = COSTS_N_INSNS (2);
10042 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10043 speed);
10045 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
10046 *total = COSTS_N_INSNS (1);
10047 else
10048 *total = COSTS_N_INSNS (2);
10049 break;
10051 case PSImode:
10052 if (!CONST_INT_P (XEXP (x, 1)))
10054 *total = COSTS_N_INSNS (3);
10055 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10056 speed);
10058 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
10059 *total = COSTS_N_INSNS (2);
10060 else
10061 *total = COSTS_N_INSNS (3);
10062 break;
10064 case SImode:
10065 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10067 *total = COSTS_N_INSNS (4);
10068 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10069 speed);
10071 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
10072 *total = COSTS_N_INSNS (1);
10073 else
10074 *total = COSTS_N_INSNS (4);
10075 break;
10077 default:
10078 return false;
10080 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10081 return true;
10083 case MINUS:
10084 if (AVR_HAVE_MUL
10085 && QImode == mode
10086 && register_operand (XEXP (x, 0), QImode)
10087 && MULT == GET_CODE (XEXP (x, 1)))
10089 /* multiply-sub */
10090 *total = COSTS_N_INSNS (speed ? 4 : 3);
10091 /* multiply-sub with constant: will be split and load constant. */
10092 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
10093 *total = COSTS_N_INSNS (1) + *total;
10094 return true;
10096 if (AVR_HAVE_MUL
10097 && HImode == mode
10098 && register_operand (XEXP (x, 0), HImode)
10099 && (MULT == GET_CODE (XEXP (x, 1))
10100 || ASHIFT == GET_CODE (XEXP (x, 1)))
10101 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
10102 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
10104 /* multiply-sub */
10105 *total = COSTS_N_INSNS (speed ? 5 : 4);
10106 /* multiply-sub with constant: will be split and load constant. */
10107 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
10108 *total = COSTS_N_INSNS (1) + *total;
10109 return true;
10111 /* FALLTHRU */
10112 case AND:
10113 case IOR:
10114 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
10115 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10116 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10117 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10118 return true;
10120 case XOR:
10121 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
10122 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10123 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10124 return true;
10126 case MULT:
10127 switch (mode)
10129 case QImode:
10130 if (AVR_HAVE_MUL)
10131 *total = COSTS_N_INSNS (!speed ? 3 : 4);
10132 else if (!speed)
10133 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
10134 else
10135 return false;
10136 break;
10138 case HImode:
10139 if (AVR_HAVE_MUL)
10141 rtx op0 = XEXP (x, 0);
10142 rtx op1 = XEXP (x, 1);
10143 enum rtx_code code0 = GET_CODE (op0);
10144 enum rtx_code code1 = GET_CODE (op1);
10145 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
10146 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
10148 if (ex0
10149 && (u8_operand (op1, HImode)
10150 || s8_operand (op1, HImode)))
10152 *total = COSTS_N_INSNS (!speed ? 4 : 6);
10153 return true;
10155 if (ex0
10156 && register_operand (op1, HImode))
10158 *total = COSTS_N_INSNS (!speed ? 5 : 8);
10159 return true;
10161 else if (ex0 || ex1)
10163 *total = COSTS_N_INSNS (!speed ? 3 : 5);
10164 return true;
10166 else if (register_operand (op0, HImode)
10167 && (u8_operand (op1, HImode)
10168 || s8_operand (op1, HImode)))
10170 *total = COSTS_N_INSNS (!speed ? 6 : 9);
10171 return true;
10173 else
10174 *total = COSTS_N_INSNS (!speed ? 7 : 10);
10176 else if (!speed)
10177 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
10178 else
10179 return false;
10180 break;
10182 case PSImode:
10183 if (!speed)
10184 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
10185 else
10186 *total = 10;
10187 break;
10189 case SImode:
10190 if (AVR_HAVE_MUL)
10192 if (!speed)
10194 /* Add some additional costs besides CALL like moves etc. */
10196 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
10198 else
10200 /* Just a rough estimate. Even with -O2 we don't want bulky
10201 code expanded inline. */
10203 *total = COSTS_N_INSNS (25);
10206 else
10208 if (speed)
10209 *total = COSTS_N_INSNS (300);
10210 else
10211 /* Add some additional costs besides CALL like moves etc. */
10212 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
10215 return true;
10217 default:
10218 return false;
10220 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10221 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10222 return true;
10224 case DIV:
10225 case MOD:
10226 case UDIV:
10227 case UMOD:
10228 if (!speed)
10229 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
10230 else
10231 *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
10232 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10233 /* For div/mod with const-int divisor we have at least the cost of
10234 loading the divisor. */
10235 if (CONST_INT_P (XEXP (x, 1)))
10236 *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
10237 /* Add some overall penaly for clobbering and moving around registers */
10238 *total += COSTS_N_INSNS (2);
10239 return true;
10241 case ROTATE:
10242 switch (mode)
10244 case QImode:
10245 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
10246 *total = COSTS_N_INSNS (1);
10248 break;
10250 case HImode:
10251 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
10252 *total = COSTS_N_INSNS (3);
10254 break;
10256 case SImode:
10257 if (CONST_INT_P (XEXP (x, 1)))
10258 switch (INTVAL (XEXP (x, 1)))
10260 case 8:
10261 case 24:
10262 *total = COSTS_N_INSNS (5);
10263 break;
10264 case 16:
10265 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
10266 break;
10268 break;
10270 default:
10271 return false;
10273 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10274 return true;
10276 case ASHIFT:
10277 switch (mode)
10279 case QImode:
10280 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10282 *total = COSTS_N_INSNS (!speed ? 4 : 17);
10283 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10284 speed);
10286 else
10288 val = INTVAL (XEXP (x, 1));
10289 if (val == 7)
10290 *total = COSTS_N_INSNS (3);
10291 else if (val >= 0 && val <= 7)
10292 *total = COSTS_N_INSNS (val);
10293 else
10294 *total = COSTS_N_INSNS (1);
10296 break;
10298 case HImode:
10299 if (AVR_HAVE_MUL)
10301 if (const_2_to_7_operand (XEXP (x, 1), HImode)
10302 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
10303 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
10305 *total = COSTS_N_INSNS (!speed ? 4 : 6);
10306 return true;
10310 if (const1_rtx == (XEXP (x, 1))
10311 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
10313 *total = COSTS_N_INSNS (2);
10314 return true;
10317 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10319 *total = COSTS_N_INSNS (!speed ? 5 : 41);
10320 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10321 speed);
10323 else
10324 switch (INTVAL (XEXP (x, 1)))
10326 case 0:
10327 *total = 0;
10328 break;
10329 case 1:
10330 case 8:
10331 *total = COSTS_N_INSNS (2);
10332 break;
10333 case 9:
10334 *total = COSTS_N_INSNS (3);
10335 break;
10336 case 2:
10337 case 3:
10338 case 10:
10339 case 15:
10340 *total = COSTS_N_INSNS (4);
10341 break;
10342 case 7:
10343 case 11:
10344 case 12:
10345 *total = COSTS_N_INSNS (5);
10346 break;
10347 case 4:
10348 *total = COSTS_N_INSNS (!speed ? 5 : 8);
10349 break;
10350 case 6:
10351 *total = COSTS_N_INSNS (!speed ? 5 : 9);
10352 break;
10353 case 5:
10354 *total = COSTS_N_INSNS (!speed ? 5 : 10);
10355 break;
10356 default:
10357 *total = COSTS_N_INSNS (!speed ? 5 : 41);
10358 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10359 speed);
10361 break;
10363 case PSImode:
10364 if (!CONST_INT_P (XEXP (x, 1)))
10366 *total = COSTS_N_INSNS (!speed ? 6 : 73);
10368 else
10369 switch (INTVAL (XEXP (x, 1)))
10371 case 0:
10372 *total = 0;
10373 break;
10374 case 1:
10375 case 8:
10376 case 16:
10377 *total = COSTS_N_INSNS (3);
10378 break;
10379 case 23:
10380 *total = COSTS_N_INSNS (5);
10381 break;
10382 default:
10383 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
10384 break;
10386 break;
10388 case SImode:
10389 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10391 *total = COSTS_N_INSNS (!speed ? 7 : 113);
10392 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10393 speed);
10395 else
10396 switch (INTVAL (XEXP (x, 1)))
10398 case 0:
10399 *total = 0;
10400 break;
10401 case 24:
10402 *total = COSTS_N_INSNS (3);
10403 break;
10404 case 1:
10405 case 8:
10406 case 16:
10407 *total = COSTS_N_INSNS (4);
10408 break;
10409 case 31:
10410 *total = COSTS_N_INSNS (6);
10411 break;
10412 case 2:
10413 *total = COSTS_N_INSNS (!speed ? 7 : 8);
10414 break;
10415 default:
10416 *total = COSTS_N_INSNS (!speed ? 7 : 113);
10417 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10418 speed);
10420 break;
10422 default:
10423 return false;
10425 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10426 return true;
10428 case ASHIFTRT:
10429 switch (mode)
10431 case QImode:
10432 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10434 *total = COSTS_N_INSNS (!speed ? 4 : 17);
10435 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10436 speed);
10438 else
10440 val = INTVAL (XEXP (x, 1));
10441 if (val == 6)
10442 *total = COSTS_N_INSNS (4);
10443 else if (val == 7)
10444 *total = COSTS_N_INSNS (2);
10445 else if (val >= 0 && val <= 7)
10446 *total = COSTS_N_INSNS (val);
10447 else
10448 *total = COSTS_N_INSNS (1);
10450 break;
10452 case HImode:
10453 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10455 *total = COSTS_N_INSNS (!speed ? 5 : 41);
10456 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10457 speed);
10459 else
10460 switch (INTVAL (XEXP (x, 1)))
10462 case 0:
10463 *total = 0;
10464 break;
10465 case 1:
10466 *total = COSTS_N_INSNS (2);
10467 break;
10468 case 15:
10469 *total = COSTS_N_INSNS (3);
10470 break;
10471 case 2:
10472 case 7:
10473 case 8:
10474 case 9:
10475 *total = COSTS_N_INSNS (4);
10476 break;
10477 case 10:
10478 case 14:
10479 *total = COSTS_N_INSNS (5);
10480 break;
10481 case 11:
10482 *total = COSTS_N_INSNS (!speed ? 5 : 6);
10483 break;
10484 case 12:
10485 *total = COSTS_N_INSNS (!speed ? 5 : 7);
10486 break;
10487 case 6:
10488 case 13:
10489 *total = COSTS_N_INSNS (!speed ? 5 : 8);
10490 break;
10491 default:
10492 *total = COSTS_N_INSNS (!speed ? 5 : 41);
10493 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10494 speed);
10496 break;
10498 case PSImode:
10499 if (!CONST_INT_P (XEXP (x, 1)))
10501 *total = COSTS_N_INSNS (!speed ? 6 : 73);
10503 else
10504 switch (INTVAL (XEXP (x, 1)))
10506 case 0:
10507 *total = 0;
10508 break;
10509 case 1:
10510 *total = COSTS_N_INSNS (3);
10511 break;
10512 case 16:
10513 case 8:
10514 *total = COSTS_N_INSNS (5);
10515 break;
10516 case 23:
10517 *total = COSTS_N_INSNS (4);
10518 break;
10519 default:
10520 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
10521 break;
10523 break;
10525 case SImode:
10526 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10528 *total = COSTS_N_INSNS (!speed ? 7 : 113);
10529 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10530 speed);
10532 else
10533 switch (INTVAL (XEXP (x, 1)))
10535 case 0:
10536 *total = 0;
10537 break;
10538 case 1:
10539 *total = COSTS_N_INSNS (4);
10540 break;
10541 case 8:
10542 case 16:
10543 case 24:
10544 *total = COSTS_N_INSNS (6);
10545 break;
10546 case 2:
10547 *total = COSTS_N_INSNS (!speed ? 7 : 8);
10548 break;
10549 case 31:
10550 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
10551 break;
10552 default:
10553 *total = COSTS_N_INSNS (!speed ? 7 : 113);
10554 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10555 speed);
10557 break;
10559 default:
10560 return false;
10562 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10563 return true;
10565 case LSHIFTRT:
10566 switch (mode)
10568 case QImode:
10569 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10571 *total = COSTS_N_INSNS (!speed ? 4 : 17);
10572 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10573 speed);
10575 else
10577 val = INTVAL (XEXP (x, 1));
10578 if (val == 7)
10579 *total = COSTS_N_INSNS (3);
10580 else if (val >= 0 && val <= 7)
10581 *total = COSTS_N_INSNS (val);
10582 else
10583 *total = COSTS_N_INSNS (1);
10585 break;
10587 case HImode:
10588 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10590 *total = COSTS_N_INSNS (!speed ? 5 : 41);
10591 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10592 speed);
10594 else
10595 switch (INTVAL (XEXP (x, 1)))
10597 case 0:
10598 *total = 0;
10599 break;
10600 case 1:
10601 case 8:
10602 *total = COSTS_N_INSNS (2);
10603 break;
10604 case 9:
10605 *total = COSTS_N_INSNS (3);
10606 break;
10607 case 2:
10608 case 10:
10609 case 15:
10610 *total = COSTS_N_INSNS (4);
10611 break;
10612 case 7:
10613 case 11:
10614 *total = COSTS_N_INSNS (5);
10615 break;
10616 case 3:
10617 case 12:
10618 case 13:
10619 case 14:
10620 *total = COSTS_N_INSNS (!speed ? 5 : 6);
10621 break;
10622 case 4:
10623 *total = COSTS_N_INSNS (!speed ? 5 : 7);
10624 break;
10625 case 5:
10626 case 6:
10627 *total = COSTS_N_INSNS (!speed ? 5 : 9);
10628 break;
10629 default:
10630 *total = COSTS_N_INSNS (!speed ? 5 : 41);
10631 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10632 speed);
10634 break;
10636 case PSImode:
10637 if (!CONST_INT_P (XEXP (x, 1)))
10639 *total = COSTS_N_INSNS (!speed ? 6 : 73);
10641 else
10642 switch (INTVAL (XEXP (x, 1)))
10644 case 0:
10645 *total = 0;
10646 break;
10647 case 1:
10648 case 8:
10649 case 16:
10650 *total = COSTS_N_INSNS (3);
10651 break;
10652 case 23:
10653 *total = COSTS_N_INSNS (5);
10654 break;
10655 default:
10656 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
10657 break;
10659 break;
10661 case SImode:
10662 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10664 *total = COSTS_N_INSNS (!speed ? 7 : 113);
10665 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10666 speed);
10668 else
10669 switch (INTVAL (XEXP (x, 1)))
10671 case 0:
10672 *total = 0;
10673 break;
10674 case 1:
10675 *total = COSTS_N_INSNS (4);
10676 break;
10677 case 2:
10678 *total = COSTS_N_INSNS (!speed ? 7 : 8);
10679 break;
10680 case 8:
10681 case 16:
10682 case 24:
10683 *total = COSTS_N_INSNS (4);
10684 break;
10685 case 31:
10686 *total = COSTS_N_INSNS (6);
10687 break;
10688 default:
10689 *total = COSTS_N_INSNS (!speed ? 7 : 113);
10690 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10691 speed);
10693 break;
10695 default:
10696 return false;
10698 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10699 return true;
10701 case COMPARE:
10702 switch (GET_MODE (XEXP (x, 0)))
10704 case QImode:
10705 *total = COSTS_N_INSNS (1);
10706 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10707 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10708 break;
10710 case HImode:
10711 *total = COSTS_N_INSNS (2);
10712 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10713 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10714 else if (INTVAL (XEXP (x, 1)) != 0)
10715 *total += COSTS_N_INSNS (1);
10716 break;
10718 case PSImode:
10719 *total = COSTS_N_INSNS (3);
10720 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
10721 *total += COSTS_N_INSNS (2);
10722 break;
10724 case SImode:
10725 *total = COSTS_N_INSNS (4);
10726 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10727 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10728 else if (INTVAL (XEXP (x, 1)) != 0)
10729 *total += COSTS_N_INSNS (3);
10730 break;
10732 default:
10733 return false;
10735 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10736 return true;
10738 case TRUNCATE:
10739 if (AVR_HAVE_MUL
10740 && LSHIFTRT == GET_CODE (XEXP (x, 0))
10741 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
10742 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
10744 if (QImode == mode || HImode == mode)
10746 *total = COSTS_N_INSNS (2);
10747 return true;
10750 break;
10752 default:
10753 break;
10755 return false;
10759 /* Implement `TARGET_RTX_COSTS'. */
10761 static bool
10762 avr_rtx_costs (rtx x, int codearg, int outer_code,
10763 int opno, int *total, bool speed)
10765 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
10766 opno, total, speed);
10768 if (avr_log.rtx_costs)
10770 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
10771 done, speed ? "speed" : "size", *total, outer_code, x);
10774 return done;
10778 /* Implement `TARGET_ADDRESS_COST'. */
10780 static int
10781 avr_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
10782 addr_space_t as ATTRIBUTE_UNUSED,
10783 bool speed ATTRIBUTE_UNUSED)
10785 int cost = 4;
10787 if (GET_CODE (x) == PLUS
10788 && CONST_INT_P (XEXP (x, 1))
10789 && (REG_P (XEXP (x, 0))
10790 || GET_CODE (XEXP (x, 0)) == SUBREG))
10792 if (INTVAL (XEXP (x, 1)) >= 61)
10793 cost = 18;
10795 else if (CONSTANT_ADDRESS_P (x))
10797 if (optimize > 0
10798 && io_address_operand (x, QImode))
10799 cost = 2;
10802 if (avr_log.address_cost)
10803 avr_edump ("\n%?: %d = %r\n", cost, x);
10805 return cost;
10808 /* Test for extra memory constraint 'Q'.
10809 It's a memory address based on Y or Z pointer with valid displacement. */
10812 extra_constraint_Q (rtx x)
10814 int ok = 0;
10816 if (GET_CODE (XEXP (x,0)) == PLUS
10817 && REG_P (XEXP (XEXP (x,0), 0))
10818 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
10819 && (INTVAL (XEXP (XEXP (x,0), 1))
10820 <= MAX_LD_OFFSET (GET_MODE (x))))
10822 rtx xx = XEXP (XEXP (x,0), 0);
10823 int regno = REGNO (xx);
10825 ok = (/* allocate pseudos */
10826 regno >= FIRST_PSEUDO_REGISTER
10827 /* strictly check */
10828 || regno == REG_Z || regno == REG_Y
10829 /* XXX frame & arg pointer checks */
10830 || xx == frame_pointer_rtx
10831 || xx == arg_pointer_rtx);
10833 if (avr_log.constraints)
10834 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
10835 ok, reload_completed, reload_in_progress, x);
10838 return ok;
10841 /* Convert condition code CONDITION to the valid AVR condition code. */
10843 RTX_CODE
10844 avr_normalize_condition (RTX_CODE condition)
10846 switch (condition)
10848 case GT:
10849 return GE;
10850 case GTU:
10851 return GEU;
10852 case LE:
10853 return LT;
10854 case LEU:
10855 return LTU;
10856 default:
10857 gcc_unreachable ();
10861 /* Helper function for `avr_reorg'. */
10863 static rtx
10864 avr_compare_pattern (rtx_insn *insn)
10866 rtx pattern = single_set (insn);
10868 if (pattern
10869 && NONJUMP_INSN_P (insn)
10870 && SET_DEST (pattern) == cc0_rtx
10871 && GET_CODE (SET_SRC (pattern)) == COMPARE)
10873 machine_mode mode0 = GET_MODE (XEXP (SET_SRC (pattern), 0));
10874 machine_mode mode1 = GET_MODE (XEXP (SET_SRC (pattern), 1));
10876 /* The 64-bit comparisons have fixed operands ACC_A and ACC_B.
10877 They must not be swapped, thus skip them. */
10879 if ((mode0 == VOIDmode || GET_MODE_SIZE (mode0) <= 4)
10880 && (mode1 == VOIDmode || GET_MODE_SIZE (mode1) <= 4))
10881 return pattern;
10884 return NULL_RTX;
10887 /* Helper function for `avr_reorg'. */
10889 /* Expansion of switch/case decision trees leads to code like
10891 cc0 = compare (Reg, Num)
10892 if (cc0 == 0)
10893 goto L1
10895 cc0 = compare (Reg, Num)
10896 if (cc0 > 0)
10897 goto L2
10899 The second comparison is superfluous and can be deleted.
10900 The second jump condition can be transformed from a
10901 "difficult" one to a "simple" one because "cc0 > 0" and
10902 "cc0 >= 0" will have the same effect here.
10904 This function relies on the way switch/case is being expaned
10905 as binary decision tree. For example code see PR 49903.
10907 Return TRUE if optimization performed.
10908 Return FALSE if nothing changed.
10910 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
10912 We don't want to do this in text peephole because it is
10913 tedious to work out jump offsets there and the second comparison
10914 might have been transormed by `avr_reorg'.
10916 RTL peephole won't do because peephole2 does not scan across
10917 basic blocks. */
10919 static bool
10920 avr_reorg_remove_redundant_compare (rtx_insn *insn1)
10922 rtx comp1, ifelse1, xcond1;
10923 rtx_insn *branch1;
10924 rtx comp2, ifelse2, xcond2;
10925 rtx_insn *branch2, *insn2;
10926 enum rtx_code code;
10927 rtx_insn *jump;
10928 rtx target, cond;
10930 /* Look out for: compare1 - branch1 - compare2 - branch2 */
10932 branch1 = next_nonnote_nondebug_insn (insn1);
10933 if (!branch1 || !JUMP_P (branch1))
10934 return false;
10936 insn2 = next_nonnote_nondebug_insn (branch1);
10937 if (!insn2 || !avr_compare_pattern (insn2))
10938 return false;
10940 branch2 = next_nonnote_nondebug_insn (insn2);
10941 if (!branch2 || !JUMP_P (branch2))
10942 return false;
10944 comp1 = avr_compare_pattern (insn1);
10945 comp2 = avr_compare_pattern (insn2);
10946 xcond1 = single_set (branch1);
10947 xcond2 = single_set (branch2);
10949 if (!comp1 || !comp2
10950 || !rtx_equal_p (comp1, comp2)
10951 || !xcond1 || SET_DEST (xcond1) != pc_rtx
10952 || !xcond2 || SET_DEST (xcond2) != pc_rtx
10953 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
10954 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
10956 return false;
10959 comp1 = SET_SRC (comp1);
10960 ifelse1 = SET_SRC (xcond1);
10961 ifelse2 = SET_SRC (xcond2);
10963 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
10965 if (EQ != GET_CODE (XEXP (ifelse1, 0))
10966 || !REG_P (XEXP (comp1, 0))
10967 || !CONST_INT_P (XEXP (comp1, 1))
10968 || XEXP (ifelse1, 2) != pc_rtx
10969 || XEXP (ifelse2, 2) != pc_rtx
10970 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
10971 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
10972 || !COMPARISON_P (XEXP (ifelse2, 0))
10973 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
10974 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
10975 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
10976 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
10978 return false;
10981 /* We filtered the insn sequence to look like
10983 (set (cc0)
10984 (compare (reg:M N)
10985 (const_int VAL)))
10986 (set (pc)
10987 (if_then_else (eq (cc0)
10988 (const_int 0))
10989 (label_ref L1)
10990 (pc)))
10992 (set (cc0)
10993 (compare (reg:M N)
10994 (const_int VAL)))
10995 (set (pc)
10996 (if_then_else (CODE (cc0)
10997 (const_int 0))
10998 (label_ref L2)
10999 (pc)))
11002 code = GET_CODE (XEXP (ifelse2, 0));
11004 /* Map GT/GTU to GE/GEU which is easier for AVR.
11005 The first two instructions compare/branch on EQ
11006 so we may replace the difficult
11008 if (x == VAL) goto L1;
11009 if (x > VAL) goto L2;
11011 with easy
11013 if (x == VAL) goto L1;
11014 if (x >= VAL) goto L2;
11016 Similarly, replace LE/LEU by LT/LTU. */
11018 switch (code)
11020 case EQ:
11021 case LT: case LTU:
11022 case GE: case GEU:
11023 break;
11025 case LE: case LEU:
11026 case GT: case GTU:
11027 code = avr_normalize_condition (code);
11028 break;
11030 default:
11031 return false;
11034 /* Wrap the branches into UNSPECs so they won't be changed or
11035 optimized in the remainder. */
11037 target = XEXP (XEXP (ifelse1, 1), 0);
11038 cond = XEXP (ifelse1, 0);
11039 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
11041 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
11043 target = XEXP (XEXP (ifelse2, 1), 0);
11044 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
11045 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
11047 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
11049 /* The comparisons in insn1 and insn2 are exactly the same;
11050 insn2 is superfluous so delete it. */
11052 delete_insn (insn2);
11053 delete_insn (branch1);
11054 delete_insn (branch2);
11056 return true;
11060 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
11061 /* Optimize conditional jumps. */
11063 static void
11064 avr_reorg (void)
11066 rtx_insn *insn = get_insns();
11068 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
11070 rtx pattern = avr_compare_pattern (insn);
11072 if (!pattern)
11073 continue;
11075 if (optimize
11076 && avr_reorg_remove_redundant_compare (insn))
11078 continue;
11081 if (compare_diff_p (insn))
11083 /* Now we work under compare insn with difficult branch. */
11085 rtx_insn *next = next_real_insn (insn);
11086 rtx pat = PATTERN (next);
11088 pattern = SET_SRC (pattern);
11090 if (true_regnum (XEXP (pattern, 0)) >= 0
11091 && true_regnum (XEXP (pattern, 1)) >= 0)
11093 rtx x = XEXP (pattern, 0);
11094 rtx src = SET_SRC (pat);
11095 rtx t = XEXP (src,0);
11096 PUT_CODE (t, swap_condition (GET_CODE (t)));
11097 XEXP (pattern, 0) = XEXP (pattern, 1);
11098 XEXP (pattern, 1) = x;
11099 INSN_CODE (next) = -1;
11101 else if (true_regnum (XEXP (pattern, 0)) >= 0
11102 && XEXP (pattern, 1) == const0_rtx)
11104 /* This is a tst insn, we can reverse it. */
11105 rtx src = SET_SRC (pat);
11106 rtx t = XEXP (src,0);
11108 PUT_CODE (t, swap_condition (GET_CODE (t)));
11109 XEXP (pattern, 1) = XEXP (pattern, 0);
11110 XEXP (pattern, 0) = const0_rtx;
11111 INSN_CODE (next) = -1;
11112 INSN_CODE (insn) = -1;
11114 else if (true_regnum (XEXP (pattern, 0)) >= 0
11115 && CONST_INT_P (XEXP (pattern, 1)))
11117 rtx x = XEXP (pattern, 1);
11118 rtx src = SET_SRC (pat);
11119 rtx t = XEXP (src,0);
11120 machine_mode mode = GET_MODE (XEXP (pattern, 0));
11122 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
11124 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
11125 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
11126 INSN_CODE (next) = -1;
11127 INSN_CODE (insn) = -1;
11134 /* Returns register number for function return value.*/
11136 static inline unsigned int
11137 avr_ret_register (void)
11139 return 24;
11143 /* Implement `TARGET_FUNCTION_VALUE_REGNO_P'. */
11145 static bool
11146 avr_function_value_regno_p (const unsigned int regno)
11148 return (regno == avr_ret_register ());
11152 /* Implement `TARGET_LIBCALL_VALUE'. */
11153 /* Create an RTX representing the place where a
11154 library function returns a value of mode MODE. */
11156 static rtx
11157 avr_libcall_value (machine_mode mode,
11158 const_rtx func ATTRIBUTE_UNUSED)
11160 int offs = GET_MODE_SIZE (mode);
11162 if (offs <= 4)
11163 offs = (offs + 1) & ~1;
11165 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
11169 /* Implement `TARGET_FUNCTION_VALUE'. */
11170 /* Create an RTX representing the place where a
11171 function returns a value of data type VALTYPE. */
11173 static rtx
11174 avr_function_value (const_tree type,
11175 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
11176 bool outgoing ATTRIBUTE_UNUSED)
11178 unsigned int offs;
11180 if (TYPE_MODE (type) != BLKmode)
11181 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
11183 offs = int_size_in_bytes (type);
11184 if (offs < 2)
11185 offs = 2;
11186 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
11187 offs = GET_MODE_SIZE (SImode);
11188 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
11189 offs = GET_MODE_SIZE (DImode);
11191 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
11195 test_hard_reg_class (enum reg_class rclass, rtx x)
11197 int regno = true_regnum (x);
11198 if (regno < 0)
11199 return 0;
11201 if (TEST_HARD_REG_CLASS (rclass, regno))
11202 return 1;
11204 return 0;
11208 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
11209 and thus is suitable to be skipped by CPSE, SBRC, etc. */
11211 static bool
11212 avr_2word_insn_p (rtx_insn *insn)
11214 if (TARGET_SKIP_BUG
11215 || !insn
11216 || 2 != get_attr_length (insn))
11218 return false;
11221 switch (INSN_CODE (insn))
11223 default:
11224 return false;
11226 case CODE_FOR_movqi_insn:
11227 case CODE_FOR_movuqq_insn:
11228 case CODE_FOR_movqq_insn:
11230 rtx set = single_set (insn);
11231 rtx src = SET_SRC (set);
11232 rtx dest = SET_DEST (set);
11234 /* Factor out LDS and STS from movqi_insn. */
11236 if (MEM_P (dest)
11237 && (REG_P (src) || src == CONST0_RTX (GET_MODE (dest))))
11239 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
11241 else if (REG_P (dest)
11242 && MEM_P (src))
11244 return CONSTANT_ADDRESS_P (XEXP (src, 0));
11247 return false;
11250 case CODE_FOR_call_insn:
11251 case CODE_FOR_call_value_insn:
11252 return true;
11258 jump_over_one_insn_p (rtx_insn *insn, rtx dest)
11260 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
11261 ? XEXP (dest, 0)
11262 : dest);
11263 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
11264 int dest_addr = INSN_ADDRESSES (uid);
11265 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
11267 return (jump_offset == 1
11268 || (jump_offset == 2
11269 && avr_2word_insn_p (next_active_insn (insn))));
11273 /* Worker function for `HARD_REGNO_MODE_OK'. */
11274 /* Returns 1 if a value of mode MODE can be stored starting with hard
11275 register number REGNO. On the enhanced core, anything larger than
11276 1 byte must start in even numbered register for "movw" to work
11277 (this way we don't have to check for odd registers everywhere). */
11280 avr_hard_regno_mode_ok (int regno, machine_mode mode)
11282 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
11283 Disallowing QI et al. in these regs might lead to code like
11284 (set (subreg:QI (reg:HI 28) n) ...)
11285 which will result in wrong code because reload does not
11286 handle SUBREGs of hard regsisters like this.
11287 This could be fixed in reload. However, it appears
11288 that fixing reload is not wanted by reload people. */
11290 /* Any GENERAL_REGS register can hold 8-bit values. */
11292 if (GET_MODE_SIZE (mode) == 1)
11293 return 1;
11295 /* FIXME: Ideally, the following test is not needed.
11296 However, it turned out that it can reduce the number
11297 of spill fails. AVR and it's poor endowment with
11298 address registers is extreme stress test for reload. */
11300 if (GET_MODE_SIZE (mode) >= 4
11301 && regno >= REG_X)
11302 return 0;
11304 /* All modes larger than 8 bits should start in an even register. */
11306 return !(regno & 1);
11310 /* Implement `HARD_REGNO_CALL_PART_CLOBBERED'. */
11313 avr_hard_regno_call_part_clobbered (unsigned regno, machine_mode mode)
11315 /* FIXME: This hook gets called with MODE:REGNO combinations that don't
11316 represent valid hard registers like, e.g. HI:29. Returning TRUE
11317 for such registers can lead to performance degradation as mentioned
11318 in PR53595. Thus, report invalid hard registers as FALSE. */
11320 if (!avr_hard_regno_mode_ok (regno, mode))
11321 return 0;
11323 /* Return true if any of the following boundaries is crossed:
11324 17/18 or 19/20 (if AVR_TINY), 27/28 and 29/30. */
11326 return ((regno <= LAST_CALLEE_SAVED_REG &&
11327 regno + GET_MODE_SIZE (mode) > (LAST_CALLEE_SAVED_REG + 1))
11328 || (regno < REG_Y && regno + GET_MODE_SIZE (mode) > REG_Y)
11329 || (regno < REG_Z && regno + GET_MODE_SIZE (mode) > REG_Z));
11333 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
11335 enum reg_class
11336 avr_mode_code_base_reg_class (machine_mode mode ATTRIBUTE_UNUSED,
11337 addr_space_t as, RTX_CODE outer_code,
11338 RTX_CODE index_code ATTRIBUTE_UNUSED)
11340 if (!ADDR_SPACE_GENERIC_P (as))
11342 return POINTER_Z_REGS;
11345 if (!avr_strict_X)
11346 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
11348 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
11352 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
11354 bool
11355 avr_regno_mode_code_ok_for_base_p (int regno,
11356 machine_mode mode ATTRIBUTE_UNUSED,
11357 addr_space_t as ATTRIBUTE_UNUSED,
11358 RTX_CODE outer_code,
11359 RTX_CODE index_code ATTRIBUTE_UNUSED)
11361 bool ok = false;
11363 if (!ADDR_SPACE_GENERIC_P (as))
11365 if (regno < FIRST_PSEUDO_REGISTER
11366 && regno == REG_Z)
11368 return true;
11371 if (reg_renumber)
11373 regno = reg_renumber[regno];
11375 if (regno == REG_Z)
11377 return true;
11381 return false;
11384 if (regno < FIRST_PSEUDO_REGISTER
11385 && (regno == REG_X
11386 || regno == REG_Y
11387 || regno == REG_Z
11388 || regno == ARG_POINTER_REGNUM))
11390 ok = true;
11392 else if (reg_renumber)
11394 regno = reg_renumber[regno];
11396 if (regno == REG_X
11397 || regno == REG_Y
11398 || regno == REG_Z
11399 || regno == ARG_POINTER_REGNUM)
11401 ok = true;
11405 if (avr_strict_X
11406 && PLUS == outer_code
11407 && regno == REG_X)
11409 ok = false;
11412 return ok;
11416 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
11417 /* Set 32-bit register OP[0] to compile-time constant OP[1].
11418 CLOBBER_REG is a QI clobber register or NULL_RTX.
11419 LEN == NULL: output instructions.
11420 LEN != NULL: set *LEN to the length of the instruction sequence
11421 (in words) printed with LEN = NULL.
11422 If CLEAR_P is true, OP[0] had been cleard to Zero already.
11423 If CLEAR_P is false, nothing is known about OP[0].
11425 The effect on cc0 is as follows:
11427 Load 0 to any register except ZERO_REG : NONE
11428 Load ld register with any value : NONE
11429 Anything else: : CLOBBER */
11431 static void
11432 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
11434 rtx src = op[1];
11435 rtx dest = op[0];
11436 rtx xval, xdest[4];
11437 int ival[4];
11438 int clobber_val = 1234;
11439 bool cooked_clobber_p = false;
11440 bool set_p = false;
11441 machine_mode mode = GET_MODE (dest);
11442 int n, n_bytes = GET_MODE_SIZE (mode);
11444 gcc_assert (REG_P (dest)
11445 && CONSTANT_P (src));
11447 if (len)
11448 *len = 0;
11450 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
11451 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
11453 if (REGNO (dest) < 16
11454 && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
11456 clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
11459 /* We might need a clobber reg but don't have one. Look at the value to
11460 be loaded more closely. A clobber is only needed if it is a symbol
11461 or contains a byte that is neither 0, -1 or a power of 2. */
11463 if (NULL_RTX == clobber_reg
11464 && !test_hard_reg_class (LD_REGS, dest)
11465 && (! (CONST_INT_P (src) || CONST_FIXED_P (src) || CONST_DOUBLE_P (src))
11466 || !avr_popcount_each_byte (src, n_bytes,
11467 (1 << 0) | (1 << 1) | (1 << 8))))
11469 /* We have no clobber register but need one. Cook one up.
11470 That's cheaper than loading from constant pool. */
11472 cooked_clobber_p = true;
11473 clobber_reg = all_regs_rtx[REG_Z + 1];
11474 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
11477 /* Now start filling DEST from LSB to MSB. */
11479 for (n = 0; n < n_bytes; n++)
11481 int ldreg_p;
11482 bool done_byte = false;
11483 int j;
11484 rtx xop[3];
11486 /* Crop the n-th destination byte. */
11488 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
11489 ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
11491 if (!CONST_INT_P (src)
11492 && !CONST_FIXED_P (src)
11493 && !CONST_DOUBLE_P (src))
11495 static const char* const asm_code[][2] =
11497 { "ldi %2,lo8(%1)" CR_TAB "mov %0,%2", "ldi %0,lo8(%1)" },
11498 { "ldi %2,hi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hi8(%1)" },
11499 { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2", "ldi %0,hlo8(%1)" },
11500 { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hhi8(%1)" }
11503 xop[0] = xdest[n];
11504 xop[1] = src;
11505 xop[2] = clobber_reg;
11507 avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
11509 continue;
11512 /* Crop the n-th source byte. */
11514 xval = simplify_gen_subreg (QImode, src, mode, n);
11515 ival[n] = INTVAL (xval);
11517 /* Look if we can reuse the low word by means of MOVW. */
11519 if (n == 2
11520 && n_bytes >= 4
11521 && AVR_HAVE_MOVW)
11523 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
11524 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
11526 if (INTVAL (lo16) == INTVAL (hi16))
11528 if (0 != INTVAL (lo16)
11529 || !clear_p)
11531 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
11534 break;
11538 /* Don't use CLR so that cc0 is set as expected. */
11540 if (ival[n] == 0)
11542 if (!clear_p)
11543 avr_asm_len (ldreg_p ? "ldi %0,0"
11544 : AVR_ZERO_REGNO == REGNO (xdest[n]) ? "clr %0"
11545 : "mov %0,__zero_reg__",
11546 &xdest[n], len, 1);
11547 continue;
11550 if (clobber_val == ival[n]
11551 && REGNO (clobber_reg) == REGNO (xdest[n]))
11553 continue;
11556 /* LD_REGS can use LDI to move a constant value */
11558 if (ldreg_p)
11560 xop[0] = xdest[n];
11561 xop[1] = xval;
11562 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
11563 continue;
11566 /* Try to reuse value already loaded in some lower byte. */
11568 for (j = 0; j < n; j++)
11569 if (ival[j] == ival[n])
11571 xop[0] = xdest[n];
11572 xop[1] = xdest[j];
11574 avr_asm_len ("mov %0,%1", xop, len, 1);
11575 done_byte = true;
11576 break;
11579 if (done_byte)
11580 continue;
11582 /* Need no clobber reg for -1: Use CLR/DEC */
11584 if (-1 == ival[n])
11586 if (!clear_p)
11587 avr_asm_len ("clr %0", &xdest[n], len, 1);
11589 avr_asm_len ("dec %0", &xdest[n], len, 1);
11590 continue;
11592 else if (1 == ival[n])
11594 if (!clear_p)
11595 avr_asm_len ("clr %0", &xdest[n], len, 1);
11597 avr_asm_len ("inc %0", &xdest[n], len, 1);
11598 continue;
11601 /* Use T flag or INC to manage powers of 2 if we have
11602 no clobber reg. */
11604 if (NULL_RTX == clobber_reg
11605 && single_one_operand (xval, QImode))
11607 xop[0] = xdest[n];
11608 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
11610 gcc_assert (constm1_rtx != xop[1]);
11612 if (!set_p)
11614 set_p = true;
11615 avr_asm_len ("set", xop, len, 1);
11618 if (!clear_p)
11619 avr_asm_len ("clr %0", xop, len, 1);
11621 avr_asm_len ("bld %0,%1", xop, len, 1);
11622 continue;
11625 /* We actually need the LD_REGS clobber reg. */
11627 gcc_assert (NULL_RTX != clobber_reg);
11629 xop[0] = xdest[n];
11630 xop[1] = xval;
11631 xop[2] = clobber_reg;
11632 clobber_val = ival[n];
11634 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
11635 "mov %0,%2", xop, len, 2);
11638 /* If we cooked up a clobber reg above, restore it. */
11640 if (cooked_clobber_p)
11642 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
11647 /* Reload the constant OP[1] into the HI register OP[0].
11648 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
11649 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
11650 need a clobber reg or have to cook one up.
11652 PLEN == NULL: Output instructions.
11653 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
11654 by the insns printed.
11656 Return "". */
11658 const char*
11659 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
11661 output_reload_in_const (op, clobber_reg, plen, false);
11662 return "";
11666 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
11667 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
11668 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
11669 need a clobber reg or have to cook one up.
11671 LEN == NULL: Output instructions.
11673 LEN != NULL: Output nothing. Set *LEN to number of words occupied
11674 by the insns printed.
11676 Return "". */
11678 const char *
11679 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
11681 if (AVR_HAVE_MOVW
11682 && !test_hard_reg_class (LD_REGS, op[0])
11683 && (CONST_INT_P (op[1])
11684 || CONST_FIXED_P (op[1])
11685 || CONST_DOUBLE_P (op[1])))
11687 int len_clr, len_noclr;
11689 /* In some cases it is better to clear the destination beforehand, e.g.
11691 CLR R2 CLR R3 MOVW R4,R2 INC R2
11693 is shorther than
11695 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
11697 We find it too tedious to work that out in the print function.
11698 Instead, we call the print function twice to get the lengths of
11699 both methods and use the shortest one. */
11701 output_reload_in_const (op, clobber_reg, &len_clr, true);
11702 output_reload_in_const (op, clobber_reg, &len_noclr, false);
11704 if (len_noclr - len_clr == 4)
11706 /* Default needs 4 CLR instructions: clear register beforehand. */
11708 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
11709 "mov %B0,__zero_reg__" CR_TAB
11710 "movw %C0,%A0", &op[0], len, 3);
11712 output_reload_in_const (op, clobber_reg, len, true);
11714 if (len)
11715 *len += 3;
11717 return "";
11721 /* Default: destination not pre-cleared. */
11723 output_reload_in_const (op, clobber_reg, len, false);
11724 return "";
11727 const char*
11728 avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
11730 output_reload_in_const (op, clobber_reg, len, false);
11731 return "";
11735 /* Worker function for `ASM_OUTPUT_ADDR_VEC_ELT'. */
11737 void
11738 avr_output_addr_vec_elt (FILE *stream, int value)
11740 if (AVR_HAVE_JMP_CALL)
11741 fprintf (stream, "\t.word gs(.L%d)\n", value);
11742 else
11743 fprintf (stream, "\trjmp .L%d\n", value);
11746 static void
11747 avr_conditional_register_usage(void)
11749 if (AVR_TINY)
11751 unsigned int i;
11753 const int tiny_reg_alloc_order[] = {
11754 24, 25,
11755 22, 23,
11756 30, 31,
11757 26, 27,
11758 28, 29,
11759 21, 20, 19, 18,
11760 16, 17,
11761 32, 33, 34, 35,
11762 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
11765 /* Set R0-R17 as fixed registers. Reset R0-R17 in call used register list
11766 - R0-R15 are not available in Tiny Core devices
11767 - R16 and R17 are fixed registers. */
11769 for (i = 0; i <= 17; i++)
11771 fixed_regs[i] = 1;
11772 call_used_regs[i] = 1;
11775 /* Set R18 to R21 as callee saved registers
11776 - R18, R19, R20 and R21 are the callee saved registers in
11777 Tiny Core devices */
11779 for (i = 18; i <= LAST_CALLEE_SAVED_REG; i++)
11781 call_used_regs[i] = 0;
11784 /* Update register allocation order for Tiny Core devices */
11786 for (i = 0; i < ARRAY_SIZE (tiny_reg_alloc_order); i++)
11788 reg_alloc_order[i] = tiny_reg_alloc_order[i];
11791 CLEAR_HARD_REG_SET (reg_class_contents[(int) ADDW_REGS]);
11792 CLEAR_HARD_REG_SET (reg_class_contents[(int) NO_LD_REGS]);
11796 /* Implement `TARGET_HARD_REGNO_SCRATCH_OK'. */
11797 /* Returns true if SCRATCH are safe to be allocated as a scratch
11798 registers (for a define_peephole2) in the current function. */
11800 static bool
11801 avr_hard_regno_scratch_ok (unsigned int regno)
11803 /* Interrupt functions can only use registers that have already been saved
11804 by the prologue, even if they would normally be call-clobbered. */
11806 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
11807 && !df_regs_ever_live_p (regno))
11808 return false;
11810 /* Don't allow hard registers that might be part of the frame pointer.
11811 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
11812 and don't care for a frame pointer that spans more than one register. */
11814 if ((!reload_completed || frame_pointer_needed)
11815 && (regno == REG_Y || regno == REG_Y + 1))
11817 return false;
11820 return true;
11824 /* Worker function for `HARD_REGNO_RENAME_OK'. */
11825 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
11828 avr_hard_regno_rename_ok (unsigned int old_reg,
11829 unsigned int new_reg)
11831 /* Interrupt functions can only use registers that have already been
11832 saved by the prologue, even if they would normally be
11833 call-clobbered. */
11835 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
11836 && !df_regs_ever_live_p (new_reg))
11837 return 0;
11839 /* Don't allow hard registers that might be part of the frame pointer.
11840 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
11841 and don't care for a frame pointer that spans more than one register. */
11843 if ((!reload_completed || frame_pointer_needed)
11844 && (old_reg == REG_Y || old_reg == REG_Y + 1
11845 || new_reg == REG_Y || new_reg == REG_Y + 1))
11847 return 0;
11850 return 1;
11853 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
11854 or memory location in the I/O space (QImode only).
11856 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
11857 Operand 1: register operand to test, or CONST_INT memory address.
11858 Operand 2: bit number.
11859 Operand 3: label to jump to if the test is true. */
11861 const char*
11862 avr_out_sbxx_branch (rtx_insn *insn, rtx operands[])
11864 enum rtx_code comp = GET_CODE (operands[0]);
11865 bool long_jump = get_attr_length (insn) >= 4;
11866 bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
11868 if (comp == GE)
11869 comp = EQ;
11870 else if (comp == LT)
11871 comp = NE;
11873 if (reverse)
11874 comp = reverse_condition (comp);
11876 switch (GET_CODE (operands[1]))
11878 default:
11879 gcc_unreachable();
11881 case CONST_INT:
11882 case CONST:
11883 case SYMBOL_REF:
11885 if (low_io_address_operand (operands[1], QImode))
11887 if (comp == EQ)
11888 output_asm_insn ("sbis %i1,%2", operands);
11889 else
11890 output_asm_insn ("sbic %i1,%2", operands);
11892 else
11894 gcc_assert (io_address_operand (operands[1], QImode));
11895 output_asm_insn ("in __tmp_reg__,%i1", operands);
11896 if (comp == EQ)
11897 output_asm_insn ("sbrs __tmp_reg__,%2", operands);
11898 else
11899 output_asm_insn ("sbrc __tmp_reg__,%2", operands);
11902 break; /* CONST_INT */
11904 case REG:
11906 if (comp == EQ)
11907 output_asm_insn ("sbrs %T1%T2", operands);
11908 else
11909 output_asm_insn ("sbrc %T1%T2", operands);
11911 break; /* REG */
11912 } /* switch */
11914 if (long_jump)
11915 return ("rjmp .+4" CR_TAB
11916 "jmp %x3");
11918 if (!reverse)
11919 return "rjmp %x3";
11921 return "";
11924 /* Worker function for `TARGET_ASM_CONSTRUCTOR'. */
11926 static void
11927 avr_asm_out_ctor (rtx symbol, int priority)
11929 fputs ("\t.global __do_global_ctors\n", asm_out_file);
11930 default_ctor_section_asm_out_constructor (symbol, priority);
11934 /* Worker function for `TARGET_ASM_DESTRUCTOR'. */
11936 static void
11937 avr_asm_out_dtor (rtx symbol, int priority)
11939 fputs ("\t.global __do_global_dtors\n", asm_out_file);
11940 default_dtor_section_asm_out_destructor (symbol, priority);
11944 /* Worker function for `TARGET_RETURN_IN_MEMORY'. */
11946 static bool
11947 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
11949 HOST_WIDE_INT size = int_size_in_bytes (type);
11950 HOST_WIDE_INT ret_size_limit = AVR_TINY ? 4 : 8;
11952 /* In avr, there are 8 return registers. But, for Tiny Core
11953 (ATtiny4/5/9/10/20/40) devices, only 4 registers are available.
11954 Return true if size is unknown or greater than the limit. */
11956 if (size == -1 || size > ret_size_limit)
11958 return true;
11960 else
11962 return false;
11967 /* Implement `CASE_VALUES_THRESHOLD'. */
11968 /* Supply the default for --param case-values-threshold=0 */
11970 static unsigned int
11971 avr_case_values_threshold (void)
11973 /* The exact break-even point between a jump table and an if-else tree
11974 depends on several factors not available here like, e.g. if 8-bit
11975 comparisons can be used in the if-else tree or not, on the
11976 range of the case values, if the case value can be reused, on the
11977 register allocation, etc. '7' appears to be a good choice. */
11979 return 7;
11983 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
11985 static machine_mode
11986 avr_addr_space_address_mode (addr_space_t as)
11988 return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
11992 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
11994 static machine_mode
11995 avr_addr_space_pointer_mode (addr_space_t as)
11997 return avr_addr_space_address_mode (as);
12001 /* Helper for following function. */
12003 static bool
12004 avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
12006 gcc_assert (REG_P (reg));
12008 if (strict)
12010 return REGNO (reg) == REG_Z;
12013 /* Avoid combine to propagate hard regs. */
12015 if (can_create_pseudo_p()
12016 && REGNO (reg) < REG_Z)
12018 return false;
12021 return true;
12025 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
12027 static bool
12028 avr_addr_space_legitimate_address_p (machine_mode mode, rtx x,
12029 bool strict, addr_space_t as)
12031 bool ok = false;
12033 switch (as)
12035 default:
12036 gcc_unreachable();
12038 case ADDR_SPACE_GENERIC:
12039 return avr_legitimate_address_p (mode, x, strict);
12041 case ADDR_SPACE_FLASH:
12042 case ADDR_SPACE_FLASH1:
12043 case ADDR_SPACE_FLASH2:
12044 case ADDR_SPACE_FLASH3:
12045 case ADDR_SPACE_FLASH4:
12046 case ADDR_SPACE_FLASH5:
12048 switch (GET_CODE (x))
12050 case REG:
12051 ok = avr_reg_ok_for_pgm_addr (x, strict);
12052 break;
12054 case POST_INC:
12055 ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
12056 break;
12058 default:
12059 break;
12062 break; /* FLASH */
12064 case ADDR_SPACE_MEMX:
12065 if (REG_P (x))
12066 ok = (!strict
12067 && can_create_pseudo_p());
12069 if (LO_SUM == GET_CODE (x))
12071 rtx hi = XEXP (x, 0);
12072 rtx lo = XEXP (x, 1);
12074 ok = (REG_P (hi)
12075 && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
12076 && REG_P (lo)
12077 && REGNO (lo) == REG_Z);
12080 break; /* MEMX */
12083 if (avr_log.legitimate_address_p)
12085 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
12086 "reload_completed=%d reload_in_progress=%d %s:",
12087 ok, mode, strict, reload_completed, reload_in_progress,
12088 reg_renumber ? "(reg_renumber)" : "");
12090 if (GET_CODE (x) == PLUS
12091 && REG_P (XEXP (x, 0))
12092 && CONST_INT_P (XEXP (x, 1))
12093 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
12094 && reg_renumber)
12096 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
12097 true_regnum (XEXP (x, 0)));
12100 avr_edump ("\n%r\n", x);
12103 return ok;
12107 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
12109 static rtx
12110 avr_addr_space_legitimize_address (rtx x, rtx old_x,
12111 machine_mode mode, addr_space_t as)
12113 if (ADDR_SPACE_GENERIC_P (as))
12114 return avr_legitimize_address (x, old_x, mode);
12116 if (avr_log.legitimize_address)
12118 avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
12121 return old_x;
12125 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
12127 static rtx
12128 avr_addr_space_convert (rtx src, tree type_from, tree type_to)
12130 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
12131 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
12133 if (avr_log.progmem)
12134 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
12135 src, type_from, type_to);
12137 /* Up-casting from 16-bit to 24-bit pointer. */
12139 if (as_from != ADDR_SPACE_MEMX
12140 && as_to == ADDR_SPACE_MEMX)
12142 int msb;
12143 rtx sym = src;
12144 rtx reg = gen_reg_rtx (PSImode);
12146 while (CONST == GET_CODE (sym) || PLUS == GET_CODE (sym))
12147 sym = XEXP (sym, 0);
12149 /* Look at symbol flags: avr_encode_section_info set the flags
12150 also if attribute progmem was seen so that we get the right
12151 promotion for, e.g. PSTR-like strings that reside in generic space
12152 but are located in flash. In that case we patch the incoming
12153 address space. */
12155 if (SYMBOL_REF == GET_CODE (sym)
12156 && ADDR_SPACE_FLASH == AVR_SYMBOL_GET_ADDR_SPACE (sym))
12158 as_from = ADDR_SPACE_FLASH;
12161 /* Linearize memory: RAM has bit 23 set. */
12163 msb = ADDR_SPACE_GENERIC_P (as_from)
12164 ? 0x80
12165 : avr_addrspace[as_from].segment;
12167 src = force_reg (Pmode, src);
12169 emit_insn (msb == 0
12170 ? gen_zero_extendhipsi2 (reg, src)
12171 : gen_n_extendhipsi2 (reg, gen_int_mode (msb, QImode), src));
12173 return reg;
12176 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
12178 if (as_from == ADDR_SPACE_MEMX
12179 && as_to != ADDR_SPACE_MEMX)
12181 rtx new_src = gen_reg_rtx (Pmode);
12183 src = force_reg (PSImode, src);
12185 emit_move_insn (new_src,
12186 simplify_gen_subreg (Pmode, src, PSImode, 0));
12187 return new_src;
12190 return src;
12194 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
12196 static bool
12197 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED,
12198 addr_space_t superset ATTRIBUTE_UNUSED)
12200 /* Allow any kind of pointer mess. */
12202 return true;
12206 /* Implement `TARGET_CONVERT_TO_TYPE'. */
12208 static tree
12209 avr_convert_to_type (tree type, tree expr)
12211 /* Print a diagnose for pointer conversion that changes the address
12212 space of the pointer target to a non-enclosing address space,
12213 provided -Waddr-space-convert is on.
12215 FIXME: Filter out cases where the target object is known to
12216 be located in the right memory, like in
12218 (const __flash*) PSTR ("text")
12220 Also try to distinguish between explicit casts requested by
12221 the user and implicit casts like
12223 void f (const __flash char*);
12225 void g (const char *p)
12227 f ((const __flash*) p);
12230 under the assumption that an explicit casts means that the user
12231 knows what he is doing, e.g. interface with PSTR or old style
12232 code with progmem and pgm_read_xxx.
12235 if (avr_warn_addr_space_convert
12236 && expr != error_mark_node
12237 && POINTER_TYPE_P (type)
12238 && POINTER_TYPE_P (TREE_TYPE (expr)))
12240 addr_space_t as_old = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr)));
12241 addr_space_t as_new = TYPE_ADDR_SPACE (TREE_TYPE (type));
12243 if (avr_log.progmem)
12244 avr_edump ("%?: type = %t\nexpr = %t\n\n", type, expr);
12246 if (as_new != ADDR_SPACE_MEMX
12247 && as_new != as_old)
12249 location_t loc = EXPR_LOCATION (expr);
12250 const char *name_old = avr_addrspace[as_old].name;
12251 const char *name_new = avr_addrspace[as_new].name;
12253 warning (OPT_Waddr_space_convert,
12254 "conversion from address space %qs to address space %qs",
12255 ADDR_SPACE_GENERIC_P (as_old) ? "generic" : name_old,
12256 ADDR_SPACE_GENERIC_P (as_new) ? "generic" : name_new);
12258 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, expr);
12262 return NULL_TREE;
12266 /* PR63633: The middle-end might come up with hard regs as input operands.
12268 RMASK is a bit mask representing a subset of hard registers R0...R31:
12269 Rn is an element of that set iff bit n of RMASK is set.
12270 OPMASK describes a subset of OP[]: If bit n of OPMASK is 1 then
12271 OP[n] has to be fixed; otherwise OP[n] is left alone.
12273 For each element of OPMASK which is a hard register overlapping RMASK,
12274 replace OP[n] with a newly created pseudo register
12276 HREG == 0: Also emit a move insn that copies the contents of that
12277 hard register into the new pseudo.
12279 HREG != 0: Also set HREG[n] to the hard register. */
12281 static void
12282 avr_fix_operands (rtx *op, rtx *hreg, unsigned opmask, unsigned rmask)
12284 for (; opmask; opmask >>= 1, op++)
12286 rtx reg = *op;
12288 if (hreg)
12289 *hreg = NULL_RTX;
12291 if ((opmask & 1)
12292 && REG_P (reg)
12293 && REGNO (reg) < FIRST_PSEUDO_REGISTER
12294 // This hard-reg overlaps other prohibited hard regs?
12295 && (rmask & regmask (GET_MODE (reg), REGNO (reg))))
12297 *op = gen_reg_rtx (GET_MODE (reg));
12298 if (hreg == NULL)
12299 emit_move_insn (*op, reg);
12300 else
12301 *hreg = reg;
12304 if (hreg)
12305 hreg++;
12310 void
12311 avr_fix_inputs (rtx *op, unsigned opmask, unsigned rmask)
12313 avr_fix_operands (op, NULL, opmask, rmask);
12317 /* Helper for the function below: If bit n of MASK is set and
12318 HREG[n] != NULL, then emit a move insn to copy OP[n] to HREG[n].
12319 Otherwise do nothing for that n. Return TRUE. */
12321 static bool
12322 avr_move_fixed_operands (rtx *op, rtx *hreg, unsigned mask)
12324 for (; mask; mask >>= 1, op++, hreg++)
12325 if ((mask & 1)
12326 && *hreg)
12327 emit_move_insn (*hreg, *op);
12329 return true;
12333 /* PR63633: The middle-end might come up with hard regs as output operands.
12335 GEN is a sequence generating function like gen_mulsi3 with 3 operands OP[].
12336 RMASK is a bit mask representing a subset of hard registers R0...R31:
12337 Rn is an element of that set iff bit n of RMASK is set.
12338 OPMASK describes a subset of OP[]: If bit n of OPMASK is 1 then
12339 OP[n] has to be fixed; otherwise OP[n] is left alone.
12341 Emit the insn sequence as generated by GEN() with all elements of OPMASK
12342 which are hard registers overlapping RMASK replaced by newly created
12343 pseudo registers. After the sequence has been emitted, emit insns that
12344 move the contents of respective pseudos to their hard regs. */
12346 bool
12347 avr_emit3_fix_outputs (rtx (*gen)(rtx,rtx,rtx), rtx *op,
12348 unsigned opmask, unsigned rmask)
12350 const int n = 3;
12351 rtx hreg[n];
12353 /* It is letigimate for GEN to call this function, and in order not to
12354 get self-recursive we use the following static kludge. This is the
12355 only way not to duplicate all expanders and to avoid ugly and
12356 hard-to-maintain C-code instead of the much more appreciated RTL
12357 representation as supplied by define_expand. */
12358 static bool lock = false;
12360 gcc_assert (opmask < (1u << n));
12362 if (lock)
12363 return false;
12365 avr_fix_operands (op, hreg, opmask, rmask);
12367 lock = true;
12368 emit_insn (gen (op[0], op[1], op[2]));
12369 lock = false;
12371 return avr_move_fixed_operands (op, hreg, opmask);
12375 /* Worker function for movmemhi expander.
12376 XOP[0] Destination as MEM:BLK
12377 XOP[1] Source " "
12378 XOP[2] # Bytes to copy
12380 Return TRUE if the expansion is accomplished.
12381 Return FALSE if the operand compination is not supported. */
12383 bool
12384 avr_emit_movmemhi (rtx *xop)
12386 HOST_WIDE_INT count;
12387 machine_mode loop_mode;
12388 addr_space_t as = MEM_ADDR_SPACE (xop[1]);
12389 rtx loop_reg, addr1, a_src, a_dest, insn, xas;
12390 rtx a_hi8 = NULL_RTX;
12392 if (avr_mem_flash_p (xop[0]))
12393 return false;
12395 if (!CONST_INT_P (xop[2]))
12396 return false;
12398 count = INTVAL (xop[2]);
12399 if (count <= 0)
12400 return false;
12402 a_src = XEXP (xop[1], 0);
12403 a_dest = XEXP (xop[0], 0);
12405 if (PSImode == GET_MODE (a_src))
12407 gcc_assert (as == ADDR_SPACE_MEMX);
12409 loop_mode = (count < 0x100) ? QImode : HImode;
12410 loop_reg = gen_rtx_REG (loop_mode, 24);
12411 emit_move_insn (loop_reg, gen_int_mode (count, loop_mode));
12413 addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
12414 a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
12416 else
12418 int segment = avr_addrspace[as].segment;
12420 if (segment
12421 && avr_n_flash > 1)
12423 a_hi8 = GEN_INT (segment);
12424 emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
12426 else if (!ADDR_SPACE_GENERIC_P (as))
12428 as = ADDR_SPACE_FLASH;
12431 addr1 = a_src;
12433 loop_mode = (count <= 0x100) ? QImode : HImode;
12434 loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
12437 xas = GEN_INT (as);
12439 /* FIXME: Register allocator might come up with spill fails if it is left
12440 on its own. Thus, we allocate the pointer registers by hand:
12441 Z = source address
12442 X = destination address */
12444 emit_move_insn (lpm_addr_reg_rtx, addr1);
12445 emit_move_insn (gen_rtx_REG (HImode, REG_X), a_dest);
12447 /* FIXME: Register allocator does a bad job and might spill address
12448 register(s) inside the loop leading to additional move instruction
12449 to/from stack which could clobber tmp_reg. Thus, do *not* emit
12450 load and store as separate insns. Instead, we perform the copy
12451 by means of one monolithic insn. */
12453 gcc_assert (TMP_REGNO == LPM_REGNO);
12455 if (as != ADDR_SPACE_MEMX)
12457 /* Load instruction ([E]LPM or LD) is known at compile time:
12458 Do the copy-loop inline. */
12460 rtx (*fun) (rtx, rtx, rtx)
12461 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
12463 insn = fun (xas, loop_reg, loop_reg);
12465 else
12467 rtx (*fun) (rtx, rtx)
12468 = QImode == loop_mode ? gen_movmemx_qi : gen_movmemx_hi;
12470 emit_move_insn (gen_rtx_REG (QImode, 23), a_hi8);
12472 insn = fun (xas, GEN_INT (avr_addr.rampz));
12475 set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
12476 emit_insn (insn);
12478 return true;
12482 /* Print assembler for movmem_qi, movmem_hi insns...
12483 $0 : Address Space
12484 $1, $2 : Loop register
12485 Z : Source address
12486 X : Destination address
12489 const char*
12490 avr_out_movmem (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
12492 addr_space_t as = (addr_space_t) INTVAL (op[0]);
12493 machine_mode loop_mode = GET_MODE (op[1]);
12494 bool sbiw_p = test_hard_reg_class (ADDW_REGS, op[1]);
12495 rtx xop[3];
12497 if (plen)
12498 *plen = 0;
12500 xop[0] = op[0];
12501 xop[1] = op[1];
12502 xop[2] = tmp_reg_rtx;
12504 /* Loop label */
12506 avr_asm_len ("0:", xop, plen, 0);
12508 /* Load with post-increment */
12510 switch (as)
12512 default:
12513 gcc_unreachable();
12515 case ADDR_SPACE_GENERIC:
12517 avr_asm_len ("ld %2,Z+", xop, plen, 1);
12518 break;
12520 case ADDR_SPACE_FLASH:
12522 if (AVR_HAVE_LPMX)
12523 avr_asm_len ("lpm %2,Z+", xop, plen, 1);
12524 else
12525 avr_asm_len ("lpm" CR_TAB
12526 "adiw r30,1", xop, plen, 2);
12527 break;
12529 case ADDR_SPACE_FLASH1:
12530 case ADDR_SPACE_FLASH2:
12531 case ADDR_SPACE_FLASH3:
12532 case ADDR_SPACE_FLASH4:
12533 case ADDR_SPACE_FLASH5:
12535 if (AVR_HAVE_ELPMX)
12536 avr_asm_len ("elpm %2,Z+", xop, plen, 1);
12537 else
12538 avr_asm_len ("elpm" CR_TAB
12539 "adiw r30,1", xop, plen, 2);
12540 break;
12543 /* Store with post-increment */
12545 avr_asm_len ("st X+,%2", xop, plen, 1);
12547 /* Decrement loop-counter and set Z-flag */
12549 if (QImode == loop_mode)
12551 avr_asm_len ("dec %1", xop, plen, 1);
12553 else if (sbiw_p)
12555 avr_asm_len ("sbiw %1,1", xop, plen, 1);
12557 else
12559 avr_asm_len ("subi %A1,1" CR_TAB
12560 "sbci %B1,0", xop, plen, 2);
12563 /* Loop until zero */
12565 return avr_asm_len ("brne 0b", xop, plen, 1);
12570 /* Helper for __builtin_avr_delay_cycles */
12572 static rtx
12573 avr_mem_clobber (void)
12575 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
12576 MEM_VOLATILE_P (mem) = 1;
12577 return mem;
12580 static void
12581 avr_expand_delay_cycles (rtx operands0)
12583 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0) & GET_MODE_MASK (SImode);
12584 unsigned HOST_WIDE_INT cycles_used;
12585 unsigned HOST_WIDE_INT loop_count;
12587 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
12589 loop_count = ((cycles - 9) / 6) + 1;
12590 cycles_used = ((loop_count - 1) * 6) + 9;
12591 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode),
12592 avr_mem_clobber()));
12593 cycles -= cycles_used;
12596 if (IN_RANGE (cycles, 262145, 83886081))
12598 loop_count = ((cycles - 7) / 5) + 1;
12599 if (loop_count > 0xFFFFFF)
12600 loop_count = 0xFFFFFF;
12601 cycles_used = ((loop_count - 1) * 5) + 7;
12602 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode),
12603 avr_mem_clobber()));
12604 cycles -= cycles_used;
12607 if (IN_RANGE (cycles, 768, 262144))
12609 loop_count = ((cycles - 5) / 4) + 1;
12610 if (loop_count > 0xFFFF)
12611 loop_count = 0xFFFF;
12612 cycles_used = ((loop_count - 1) * 4) + 5;
12613 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode),
12614 avr_mem_clobber()));
12615 cycles -= cycles_used;
12618 if (IN_RANGE (cycles, 6, 767))
12620 loop_count = cycles / 3;
12621 if (loop_count > 255)
12622 loop_count = 255;
12623 cycles_used = loop_count * 3;
12624 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode),
12625 avr_mem_clobber()));
12626 cycles -= cycles_used;
12629 while (cycles >= 2)
12631 emit_insn (gen_nopv (GEN_INT(2)));
12632 cycles -= 2;
12635 if (cycles == 1)
12637 emit_insn (gen_nopv (GEN_INT(1)));
12638 cycles--;
12643 /* Compute the image of x under f, i.e. perform x --> f(x) */
12645 static int
12646 avr_map (unsigned int f, int x)
12648 return x < 8 ? (f >> (4 * x)) & 0xf : 0;
12652 /* Return some metrics of map A. */
12654 enum
12656 /* Number of fixed points in { 0 ... 7 } */
12657 MAP_FIXED_0_7,
12659 /* Size of preimage of non-fixed points in { 0 ... 7 } */
12660 MAP_NONFIXED_0_7,
12662 /* Mask representing the fixed points in { 0 ... 7 } */
12663 MAP_MASK_FIXED_0_7,
12665 /* Size of the preimage of { 0 ... 7 } */
12666 MAP_PREIMAGE_0_7,
12668 /* Mask that represents the preimage of { f } */
12669 MAP_MASK_PREIMAGE_F
12672 static unsigned
12673 avr_map_metric (unsigned int a, int mode)
12675 unsigned i, metric = 0;
12677 for (i = 0; i < 8; i++)
12679 unsigned ai = avr_map (a, i);
12681 if (mode == MAP_FIXED_0_7)
12682 metric += ai == i;
12683 else if (mode == MAP_NONFIXED_0_7)
12684 metric += ai < 8 && ai != i;
12685 else if (mode == MAP_MASK_FIXED_0_7)
12686 metric |= ((unsigned) (ai == i)) << i;
12687 else if (mode == MAP_PREIMAGE_0_7)
12688 metric += ai < 8;
12689 else if (mode == MAP_MASK_PREIMAGE_F)
12690 metric |= ((unsigned) (ai == 0xf)) << i;
12691 else
12692 gcc_unreachable();
12695 return metric;
12699 /* Return true if IVAL has a 0xf in its hexadecimal representation
12700 and false, otherwise. Only nibbles 0..7 are taken into account.
12701 Used as constraint helper for C0f and Cxf. */
12703 bool
12704 avr_has_nibble_0xf (rtx ival)
12706 unsigned int map = UINTVAL (ival) & GET_MODE_MASK (SImode);
12707 return 0 != avr_map_metric (map, MAP_MASK_PREIMAGE_F);
12711 /* We have a set of bits that are mapped by a function F.
12712 Try to decompose F by means of a second function G so that
12714 F = F o G^-1 o G
12718 cost (F o G^-1) + cost (G) < cost (F)
12720 Example: Suppose builtin insert_bits supplies us with the map
12721 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
12722 nibble of the result, we can just as well rotate the bits before inserting
12723 them and use the map 0x7654ffff which is cheaper than the original map.
12724 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
12726 typedef struct
12728 /* tree code of binary function G */
12729 enum tree_code code;
12731 /* The constant second argument of G */
12732 int arg;
12734 /* G^-1, the inverse of G (*, arg) */
12735 unsigned ginv;
12737 /* The cost of appplying G (*, arg) */
12738 int cost;
12740 /* The composition F o G^-1 (*, arg) for some function F */
12741 unsigned int map;
12743 /* For debug purpose only */
12744 const char *str;
12745 } avr_map_op_t;
12747 static const avr_map_op_t avr_map_op[] =
12749 { LROTATE_EXPR, 0, 0x76543210, 0, 0, "id" },
12750 { LROTATE_EXPR, 1, 0x07654321, 2, 0, "<<<" },
12751 { LROTATE_EXPR, 2, 0x10765432, 4, 0, "<<<" },
12752 { LROTATE_EXPR, 3, 0x21076543, 4, 0, "<<<" },
12753 { LROTATE_EXPR, 4, 0x32107654, 1, 0, "<<<" },
12754 { LROTATE_EXPR, 5, 0x43210765, 3, 0, "<<<" },
12755 { LROTATE_EXPR, 6, 0x54321076, 5, 0, "<<<" },
12756 { LROTATE_EXPR, 7, 0x65432107, 3, 0, "<<<" },
12757 { RSHIFT_EXPR, 1, 0x6543210c, 1, 0, ">>" },
12758 { RSHIFT_EXPR, 1, 0x7543210c, 1, 0, ">>" },
12759 { RSHIFT_EXPR, 2, 0x543210cc, 2, 0, ">>" },
12760 { RSHIFT_EXPR, 2, 0x643210cc, 2, 0, ">>" },
12761 { RSHIFT_EXPR, 2, 0x743210cc, 2, 0, ">>" },
12762 { LSHIFT_EXPR, 1, 0xc7654321, 1, 0, "<<" },
12763 { LSHIFT_EXPR, 2, 0xcc765432, 2, 0, "<<" }
12767 /* Try to decompose F as F = (F o G^-1) o G as described above.
12768 The result is a struct representing F o G^-1 and G.
12769 If result.cost < 0 then such a decomposition does not exist. */
12771 static avr_map_op_t
12772 avr_map_decompose (unsigned int f, const avr_map_op_t *g, bool val_const_p)
12774 int i;
12775 bool val_used_p = 0 != avr_map_metric (f, MAP_MASK_PREIMAGE_F);
12776 avr_map_op_t f_ginv = *g;
12777 unsigned int ginv = g->ginv;
12779 f_ginv.cost = -1;
12781 /* Step 1: Computing F o G^-1 */
12783 for (i = 7; i >= 0; i--)
12785 int x = avr_map (f, i);
12787 if (x <= 7)
12789 x = avr_map (ginv, x);
12791 /* The bit is no element of the image of G: no avail (cost = -1) */
12793 if (x > 7)
12794 return f_ginv;
12797 f_ginv.map = (f_ginv.map << 4) + x;
12800 /* Step 2: Compute the cost of the operations.
12801 The overall cost of doing an operation prior to the insertion is
12802 the cost of the insertion plus the cost of the operation. */
12804 /* Step 2a: Compute cost of F o G^-1 */
12806 if (0 == avr_map_metric (f_ginv.map, MAP_NONFIXED_0_7))
12808 /* The mapping consists only of fixed points and can be folded
12809 to AND/OR logic in the remainder. Reasonable cost is 3. */
12811 f_ginv.cost = 2 + (val_used_p && !val_const_p);
12813 else
12815 rtx xop[4];
12817 /* Get the cost of the insn by calling the output worker with some
12818 fake values. Mimic effect of reloading xop[3]: Unused operands
12819 are mapped to 0 and used operands are reloaded to xop[0]. */
12821 xop[0] = all_regs_rtx[24];
12822 xop[1] = gen_int_mode (f_ginv.map, SImode);
12823 xop[2] = all_regs_rtx[25];
12824 xop[3] = val_used_p ? xop[0] : const0_rtx;
12826 avr_out_insert_bits (xop, &f_ginv.cost);
12828 f_ginv.cost += val_const_p && val_used_p ? 1 : 0;
12831 /* Step 2b: Add cost of G */
12833 f_ginv.cost += g->cost;
12835 if (avr_log.builtin)
12836 avr_edump (" %s%d=%d", g->str, g->arg, f_ginv.cost);
12838 return f_ginv;
12842 /* Insert bits from XOP[1] into XOP[0] according to MAP.
12843 XOP[0] and XOP[1] don't overlap.
12844 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
12845 If FIXP_P = false: Just move the bit if its position in the destination
12846 is different to its source position. */
12848 static void
12849 avr_move_bits (rtx *xop, unsigned int map, bool fixp_p, int *plen)
12851 int bit_dest, b;
12853 /* T-flag contains this bit of the source, i.e. of XOP[1] */
12854 int t_bit_src = -1;
12856 /* We order the operations according to the requested source bit b. */
12858 for (b = 0; b < 8; b++)
12859 for (bit_dest = 0; bit_dest < 8; bit_dest++)
12861 int bit_src = avr_map (map, bit_dest);
12863 if (b != bit_src
12864 || bit_src >= 8
12865 /* Same position: No need to copy as requested by FIXP_P. */
12866 || (bit_dest == bit_src && !fixp_p))
12867 continue;
12869 if (t_bit_src != bit_src)
12871 /* Source bit is not yet in T: Store it to T. */
12873 t_bit_src = bit_src;
12875 xop[3] = GEN_INT (bit_src);
12876 avr_asm_len ("bst %T1%T3", xop, plen, 1);
12879 /* Load destination bit with T. */
12881 xop[3] = GEN_INT (bit_dest);
12882 avr_asm_len ("bld %T0%T3", xop, plen, 1);
12887 /* PLEN == 0: Print assembler code for `insert_bits'.
12888 PLEN != 0: Compute code length in bytes.
12890 OP[0]: Result
12891 OP[1]: The mapping composed of nibbles. If nibble no. N is
12892 0: Bit N of result is copied from bit OP[2].0
12893 ... ...
12894 7: Bit N of result is copied from bit OP[2].7
12895 0xf: Bit N of result is copied from bit OP[3].N
12896 OP[2]: Bits to be inserted
12897 OP[3]: Target value */
12899 const char*
12900 avr_out_insert_bits (rtx *op, int *plen)
12902 unsigned int map = UINTVAL (op[1]) & GET_MODE_MASK (SImode);
12903 unsigned mask_fixed;
12904 bool fixp_p = true;
12905 rtx xop[4];
12907 xop[0] = op[0];
12908 xop[1] = op[2];
12909 xop[2] = op[3];
12911 gcc_assert (REG_P (xop[2]) || CONST_INT_P (xop[2]));
12913 if (plen)
12914 *plen = 0;
12915 else if (flag_print_asm_name)
12916 fprintf (asm_out_file, ASM_COMMENT_START "map = 0x%08x\n", map);
12918 /* If MAP has fixed points it might be better to initialize the result
12919 with the bits to be inserted instead of moving all bits by hand. */
12921 mask_fixed = avr_map_metric (map, MAP_MASK_FIXED_0_7);
12923 if (REGNO (xop[0]) == REGNO (xop[1]))
12925 /* Avoid early-clobber conflicts */
12927 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
12928 xop[1] = tmp_reg_rtx;
12929 fixp_p = false;
12932 if (avr_map_metric (map, MAP_MASK_PREIMAGE_F))
12934 /* XOP[2] is used and reloaded to XOP[0] already */
12936 int n_fix = 0, n_nofix = 0;
12938 gcc_assert (REG_P (xop[2]));
12940 /* Get the code size of the bit insertions; once with all bits
12941 moved and once with fixed points omitted. */
12943 avr_move_bits (xop, map, true, &n_fix);
12944 avr_move_bits (xop, map, false, &n_nofix);
12946 if (fixp_p && n_fix - n_nofix > 3)
12948 xop[3] = gen_int_mode (~mask_fixed, QImode);
12950 avr_asm_len ("eor %0,%1" CR_TAB
12951 "andi %0,%3" CR_TAB
12952 "eor %0,%1", xop, plen, 3);
12953 fixp_p = false;
12956 else
12958 /* XOP[2] is unused */
12960 if (fixp_p && mask_fixed)
12962 avr_asm_len ("mov %0,%1", xop, plen, 1);
12963 fixp_p = false;
12967 /* Move/insert remaining bits. */
12969 avr_move_bits (xop, map, fixp_p, plen);
12971 return "";
12975 /* IDs for all the AVR builtins. */
12977 enum avr_builtin_id
12979 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
12980 AVR_BUILTIN_ ## NAME,
12981 #include "builtins.def"
12982 #undef DEF_BUILTIN
12984 AVR_BUILTIN_COUNT
12987 struct GTY(()) avr_builtin_description
12989 enum insn_code icode;
12990 int n_args;
12991 tree fndecl;
12995 /* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
12996 that a built-in's ID can be used to access the built-in by means of
12997 avr_bdesc[ID] */
12999 static GTY(()) struct avr_builtin_description
13000 avr_bdesc[AVR_BUILTIN_COUNT] =
13002 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, LIBNAME) \
13003 { (enum insn_code) CODE_FOR_ ## ICODE, N_ARGS, NULL_TREE },
13004 #include "builtins.def"
13005 #undef DEF_BUILTIN
13009 /* Implement `TARGET_BUILTIN_DECL'. */
13011 static tree
13012 avr_builtin_decl (unsigned id, bool initialize_p ATTRIBUTE_UNUSED)
13014 if (id < AVR_BUILTIN_COUNT)
13015 return avr_bdesc[id].fndecl;
13017 return error_mark_node;
13021 static void
13022 avr_init_builtin_int24 (void)
13024 tree int24_type = make_signed_type (GET_MODE_BITSIZE (PSImode));
13025 tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
13027 lang_hooks.types.register_builtin_type (int24_type, "__int24");
13028 lang_hooks.types.register_builtin_type (uint24_type, "__uint24");
13032 /* Implement `TARGET_INIT_BUILTINS' */
13033 /* Set up all builtin functions for this target. */
13035 static void
13036 avr_init_builtins (void)
13038 tree void_ftype_void
13039 = build_function_type_list (void_type_node, NULL_TREE);
13040 tree uchar_ftype_uchar
13041 = build_function_type_list (unsigned_char_type_node,
13042 unsigned_char_type_node,
13043 NULL_TREE);
13044 tree uint_ftype_uchar_uchar
13045 = build_function_type_list (unsigned_type_node,
13046 unsigned_char_type_node,
13047 unsigned_char_type_node,
13048 NULL_TREE);
13049 tree int_ftype_char_char
13050 = build_function_type_list (integer_type_node,
13051 char_type_node,
13052 char_type_node,
13053 NULL_TREE);
13054 tree int_ftype_char_uchar
13055 = build_function_type_list (integer_type_node,
13056 char_type_node,
13057 unsigned_char_type_node,
13058 NULL_TREE);
13059 tree void_ftype_ulong
13060 = build_function_type_list (void_type_node,
13061 long_unsigned_type_node,
13062 NULL_TREE);
13064 tree uchar_ftype_ulong_uchar_uchar
13065 = build_function_type_list (unsigned_char_type_node,
13066 long_unsigned_type_node,
13067 unsigned_char_type_node,
13068 unsigned_char_type_node,
13069 NULL_TREE);
13071 tree const_memx_void_node
13072 = build_qualified_type (void_type_node,
13073 TYPE_QUAL_CONST
13074 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX));
13076 tree const_memx_ptr_type_node
13077 = build_pointer_type_for_mode (const_memx_void_node, PSImode, false);
13079 tree char_ftype_const_memx_ptr
13080 = build_function_type_list (char_type_node,
13081 const_memx_ptr_type_node,
13082 NULL);
13084 #define ITYP(T) \
13085 lang_hooks.types.type_for_size (TYPE_PRECISION (T), TYPE_UNSIGNED (T))
13087 #define FX_FTYPE_FX(fx) \
13088 tree fx##r_ftype_##fx##r \
13089 = build_function_type_list (node_##fx##r, node_##fx##r, NULL); \
13090 tree fx##k_ftype_##fx##k \
13091 = build_function_type_list (node_##fx##k, node_##fx##k, NULL)
13093 #define FX_FTYPE_FX_INT(fx) \
13094 tree fx##r_ftype_##fx##r_int \
13095 = build_function_type_list (node_##fx##r, node_##fx##r, \
13096 integer_type_node, NULL); \
13097 tree fx##k_ftype_##fx##k_int \
13098 = build_function_type_list (node_##fx##k, node_##fx##k, \
13099 integer_type_node, NULL)
13101 #define INT_FTYPE_FX(fx) \
13102 tree int_ftype_##fx##r \
13103 = build_function_type_list (integer_type_node, node_##fx##r, NULL); \
13104 tree int_ftype_##fx##k \
13105 = build_function_type_list (integer_type_node, node_##fx##k, NULL)
13107 #define INTX_FTYPE_FX(fx) \
13108 tree int##fx##r_ftype_##fx##r \
13109 = build_function_type_list (ITYP (node_##fx##r), node_##fx##r, NULL); \
13110 tree int##fx##k_ftype_##fx##k \
13111 = build_function_type_list (ITYP (node_##fx##k), node_##fx##k, NULL)
13113 #define FX_FTYPE_INTX(fx) \
13114 tree fx##r_ftype_int##fx##r \
13115 = build_function_type_list (node_##fx##r, ITYP (node_##fx##r), NULL); \
13116 tree fx##k_ftype_int##fx##k \
13117 = build_function_type_list (node_##fx##k, ITYP (node_##fx##k), NULL)
13119 tree node_hr = short_fract_type_node;
13120 tree node_nr = fract_type_node;
13121 tree node_lr = long_fract_type_node;
13122 tree node_llr = long_long_fract_type_node;
13124 tree node_uhr = unsigned_short_fract_type_node;
13125 tree node_unr = unsigned_fract_type_node;
13126 tree node_ulr = unsigned_long_fract_type_node;
13127 tree node_ullr = unsigned_long_long_fract_type_node;
13129 tree node_hk = short_accum_type_node;
13130 tree node_nk = accum_type_node;
13131 tree node_lk = long_accum_type_node;
13132 tree node_llk = long_long_accum_type_node;
13134 tree node_uhk = unsigned_short_accum_type_node;
13135 tree node_unk = unsigned_accum_type_node;
13136 tree node_ulk = unsigned_long_accum_type_node;
13137 tree node_ullk = unsigned_long_long_accum_type_node;
13140 /* For absfx builtins. */
13142 FX_FTYPE_FX (h);
13143 FX_FTYPE_FX (n);
13144 FX_FTYPE_FX (l);
13145 FX_FTYPE_FX (ll);
13147 /* For roundfx builtins. */
13149 FX_FTYPE_FX_INT (h);
13150 FX_FTYPE_FX_INT (n);
13151 FX_FTYPE_FX_INT (l);
13152 FX_FTYPE_FX_INT (ll);
13154 FX_FTYPE_FX_INT (uh);
13155 FX_FTYPE_FX_INT (un);
13156 FX_FTYPE_FX_INT (ul);
13157 FX_FTYPE_FX_INT (ull);
13159 /* For countlsfx builtins. */
13161 INT_FTYPE_FX (h);
13162 INT_FTYPE_FX (n);
13163 INT_FTYPE_FX (l);
13164 INT_FTYPE_FX (ll);
13166 INT_FTYPE_FX (uh);
13167 INT_FTYPE_FX (un);
13168 INT_FTYPE_FX (ul);
13169 INT_FTYPE_FX (ull);
13171 /* For bitsfx builtins. */
13173 INTX_FTYPE_FX (h);
13174 INTX_FTYPE_FX (n);
13175 INTX_FTYPE_FX (l);
13176 INTX_FTYPE_FX (ll);
13178 INTX_FTYPE_FX (uh);
13179 INTX_FTYPE_FX (un);
13180 INTX_FTYPE_FX (ul);
13181 INTX_FTYPE_FX (ull);
13183 /* For fxbits builtins. */
13185 FX_FTYPE_INTX (h);
13186 FX_FTYPE_INTX (n);
13187 FX_FTYPE_INTX (l);
13188 FX_FTYPE_INTX (ll);
13190 FX_FTYPE_INTX (uh);
13191 FX_FTYPE_INTX (un);
13192 FX_FTYPE_INTX (ul);
13193 FX_FTYPE_INTX (ull);
13196 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
13198 int id = AVR_BUILTIN_ ## NAME; \
13199 const char *Name = "__builtin_avr_" #NAME; \
13200 char *name = (char*) alloca (1 + strlen (Name)); \
13202 gcc_assert (id < AVR_BUILTIN_COUNT); \
13203 avr_bdesc[id].fndecl \
13204 = add_builtin_function (avr_tolower (name, Name), TYPE, id, \
13205 BUILT_IN_MD, LIBNAME, NULL_TREE); \
13207 #include "builtins.def"
13208 #undef DEF_BUILTIN
13210 avr_init_builtin_int24 ();
13214 /* Subroutine of avr_expand_builtin to expand vanilla builtins
13215 with non-void result and 1 ... 3 arguments. */
13217 static rtx
13218 avr_default_expand_builtin (enum insn_code icode, tree exp, rtx target)
13220 rtx pat, xop[3];
13221 int n, n_args = call_expr_nargs (exp);
13222 machine_mode tmode = insn_data[icode].operand[0].mode;
13224 gcc_assert (n_args >= 1 && n_args <= 3);
13226 if (target == NULL_RTX
13227 || GET_MODE (target) != tmode
13228 || !insn_data[icode].operand[0].predicate (target, tmode))
13230 target = gen_reg_rtx (tmode);
13233 for (n = 0; n < n_args; n++)
13235 tree arg = CALL_EXPR_ARG (exp, n);
13236 rtx op = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
13237 machine_mode opmode = GET_MODE (op);
13238 machine_mode mode = insn_data[icode].operand[n+1].mode;
13240 if ((opmode == SImode || opmode == VOIDmode) && mode == HImode)
13242 opmode = HImode;
13243 op = gen_lowpart (HImode, op);
13246 /* In case the insn wants input operands in modes different from
13247 the result, abort. */
13249 gcc_assert (opmode == mode || opmode == VOIDmode);
13251 if (!insn_data[icode].operand[n+1].predicate (op, mode))
13252 op = copy_to_mode_reg (mode, op);
13254 xop[n] = op;
13257 switch (n_args)
13259 case 1: pat = GEN_FCN (icode) (target, xop[0]); break;
13260 case 2: pat = GEN_FCN (icode) (target, xop[0], xop[1]); break;
13261 case 3: pat = GEN_FCN (icode) (target, xop[0], xop[1], xop[2]); break;
13263 default:
13264 gcc_unreachable();
13267 if (pat == NULL_RTX)
13268 return NULL_RTX;
13270 emit_insn (pat);
13272 return target;
13276 /* Implement `TARGET_EXPAND_BUILTIN'. */
13277 /* Expand an expression EXP that calls a built-in function,
13278 with result going to TARGET if that's convenient
13279 (and in mode MODE if that's convenient).
13280 SUBTARGET may be used as the target for computing one of EXP's operands.
13281 IGNORE is nonzero if the value is to be ignored. */
13283 static rtx
13284 avr_expand_builtin (tree exp, rtx target,
13285 rtx subtarget ATTRIBUTE_UNUSED,
13286 machine_mode mode ATTRIBUTE_UNUSED,
13287 int ignore)
13289 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
13290 const char *bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
13291 unsigned int id = DECL_FUNCTION_CODE (fndecl);
13292 const struct avr_builtin_description *d = &avr_bdesc[id];
13293 tree arg0;
13294 rtx op0;
13296 gcc_assert (id < AVR_BUILTIN_COUNT);
13298 switch (id)
13300 case AVR_BUILTIN_NOP:
13301 emit_insn (gen_nopv (GEN_INT(1)));
13302 return 0;
13304 case AVR_BUILTIN_DELAY_CYCLES:
13306 arg0 = CALL_EXPR_ARG (exp, 0);
13307 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
13309 if (!CONST_INT_P (op0))
13310 error ("%s expects a compile time integer constant", bname);
13311 else
13312 avr_expand_delay_cycles (op0);
13314 return NULL_RTX;
13317 case AVR_BUILTIN_INSERT_BITS:
13319 arg0 = CALL_EXPR_ARG (exp, 0);
13320 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
13322 if (!CONST_INT_P (op0))
13324 error ("%s expects a compile time long integer constant"
13325 " as first argument", bname);
13326 return target;
13329 break;
13332 case AVR_BUILTIN_ROUNDHR: case AVR_BUILTIN_ROUNDUHR:
13333 case AVR_BUILTIN_ROUNDR: case AVR_BUILTIN_ROUNDUR:
13334 case AVR_BUILTIN_ROUNDLR: case AVR_BUILTIN_ROUNDULR:
13335 case AVR_BUILTIN_ROUNDLLR: case AVR_BUILTIN_ROUNDULLR:
13337 case AVR_BUILTIN_ROUNDHK: case AVR_BUILTIN_ROUNDUHK:
13338 case AVR_BUILTIN_ROUNDK: case AVR_BUILTIN_ROUNDUK:
13339 case AVR_BUILTIN_ROUNDLK: case AVR_BUILTIN_ROUNDULK:
13340 case AVR_BUILTIN_ROUNDLLK: case AVR_BUILTIN_ROUNDULLK:
13342 /* Warn about odd rounding. Rounding points >= FBIT will have
13343 no effect. */
13345 if (TREE_CODE (CALL_EXPR_ARG (exp, 1)) != INTEGER_CST)
13346 break;
13348 int rbit = (int) TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1));
13350 if (rbit >= (int) GET_MODE_FBIT (mode))
13352 warning (OPT_Wextra, "rounding to %d bits has no effect for "
13353 "fixed-point value with %d fractional bits",
13354 rbit, GET_MODE_FBIT (mode));
13356 return expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX, mode,
13357 EXPAND_NORMAL);
13359 else if (rbit <= - (int) GET_MODE_IBIT (mode))
13361 warning (0, "rounding result will always be 0");
13362 return CONST0_RTX (mode);
13365 /* The rounding points RP satisfies now: -IBIT < RP < FBIT.
13367 TR 18037 only specifies results for RP > 0. However, the
13368 remaining cases of -IBIT < RP <= 0 can easily be supported
13369 without any additional overhead. */
13371 break; /* round */
13374 /* No fold found and no insn: Call support function from libgcc. */
13376 if (d->icode == CODE_FOR_nothing
13377 && DECL_ASSEMBLER_NAME (get_callee_fndecl (exp)) != NULL_TREE)
13379 return expand_call (exp, target, ignore);
13382 /* No special treatment needed: vanilla expand. */
13384 gcc_assert (d->icode != CODE_FOR_nothing);
13385 gcc_assert (d->n_args == call_expr_nargs (exp));
13387 if (d->n_args == 0)
13389 emit_insn ((GEN_FCN (d->icode)) (target));
13390 return NULL_RTX;
13393 return avr_default_expand_builtin (d->icode, exp, target);
13397 /* Helper for `avr_fold_builtin' that folds absfx (FIXED_CST). */
13399 static tree
13400 avr_fold_absfx (tree tval)
13402 if (FIXED_CST != TREE_CODE (tval))
13403 return NULL_TREE;
13405 /* Our fixed-points have no padding: Use double_int payload directly. */
13407 FIXED_VALUE_TYPE fval = TREE_FIXED_CST (tval);
13408 unsigned int bits = GET_MODE_BITSIZE (fval.mode);
13409 double_int ival = fval.data.sext (bits);
13411 if (!ival.is_negative())
13412 return tval;
13414 /* ISO/IEC TR 18037, 7.18a.6.2: The absfx functions are saturating. */
13416 fval.data = (ival == double_int::min_value (bits, false).sext (bits))
13417 ? double_int::max_value (bits, false)
13418 : -ival;
13420 return build_fixed (TREE_TYPE (tval), fval);
13424 /* Implement `TARGET_FOLD_BUILTIN'. */
13426 static tree
13427 avr_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *arg,
13428 bool ignore ATTRIBUTE_UNUSED)
13430 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
13431 tree val_type = TREE_TYPE (TREE_TYPE (fndecl));
13433 if (!optimize)
13434 return NULL_TREE;
13436 switch (fcode)
13438 default:
13439 break;
13441 case AVR_BUILTIN_SWAP:
13443 return fold_build2 (LROTATE_EXPR, val_type, arg[0],
13444 build_int_cst (val_type, 4));
13447 case AVR_BUILTIN_ABSHR:
13448 case AVR_BUILTIN_ABSR:
13449 case AVR_BUILTIN_ABSLR:
13450 case AVR_BUILTIN_ABSLLR:
13452 case AVR_BUILTIN_ABSHK:
13453 case AVR_BUILTIN_ABSK:
13454 case AVR_BUILTIN_ABSLK:
13455 case AVR_BUILTIN_ABSLLK:
13456 /* GCC is not good with folding ABS for fixed-point. Do it by hand. */
13458 return avr_fold_absfx (arg[0]);
13460 case AVR_BUILTIN_BITSHR: case AVR_BUILTIN_HRBITS:
13461 case AVR_BUILTIN_BITSHK: case AVR_BUILTIN_HKBITS:
13462 case AVR_BUILTIN_BITSUHR: case AVR_BUILTIN_UHRBITS:
13463 case AVR_BUILTIN_BITSUHK: case AVR_BUILTIN_UHKBITS:
13465 case AVR_BUILTIN_BITSR: case AVR_BUILTIN_RBITS:
13466 case AVR_BUILTIN_BITSK: case AVR_BUILTIN_KBITS:
13467 case AVR_BUILTIN_BITSUR: case AVR_BUILTIN_URBITS:
13468 case AVR_BUILTIN_BITSUK: case AVR_BUILTIN_UKBITS:
13470 case AVR_BUILTIN_BITSLR: case AVR_BUILTIN_LRBITS:
13471 case AVR_BUILTIN_BITSLK: case AVR_BUILTIN_LKBITS:
13472 case AVR_BUILTIN_BITSULR: case AVR_BUILTIN_ULRBITS:
13473 case AVR_BUILTIN_BITSULK: case AVR_BUILTIN_ULKBITS:
13475 case AVR_BUILTIN_BITSLLR: case AVR_BUILTIN_LLRBITS:
13476 case AVR_BUILTIN_BITSLLK: case AVR_BUILTIN_LLKBITS:
13477 case AVR_BUILTIN_BITSULLR: case AVR_BUILTIN_ULLRBITS:
13478 case AVR_BUILTIN_BITSULLK: case AVR_BUILTIN_ULLKBITS:
13480 gcc_assert (TYPE_PRECISION (val_type)
13481 == TYPE_PRECISION (TREE_TYPE (arg[0])));
13483 return build1 (VIEW_CONVERT_EXPR, val_type, arg[0]);
13485 case AVR_BUILTIN_INSERT_BITS:
13487 tree tbits = arg[1];
13488 tree tval = arg[2];
13489 tree tmap;
13490 tree map_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
13491 unsigned int map;
13492 bool changed = false;
13493 unsigned i;
13494 avr_map_op_t best_g;
13496 if (TREE_CODE (arg[0]) != INTEGER_CST)
13498 /* No constant as first argument: Don't fold this and run into
13499 error in avr_expand_builtin. */
13501 break;
13504 tmap = wide_int_to_tree (map_type, arg[0]);
13505 map = TREE_INT_CST_LOW (tmap);
13507 if (TREE_CODE (tval) != INTEGER_CST
13508 && 0 == avr_map_metric (map, MAP_MASK_PREIMAGE_F))
13510 /* There are no F in the map, i.e. 3rd operand is unused.
13511 Replace that argument with some constant to render
13512 respective input unused. */
13514 tval = build_int_cst (val_type, 0);
13515 changed = true;
13518 if (TREE_CODE (tbits) != INTEGER_CST
13519 && 0 == avr_map_metric (map, MAP_PREIMAGE_0_7))
13521 /* Similar for the bits to be inserted. If they are unused,
13522 we can just as well pass 0. */
13524 tbits = build_int_cst (val_type, 0);
13527 if (TREE_CODE (tbits) == INTEGER_CST)
13529 /* Inserting bits known at compile time is easy and can be
13530 performed by AND and OR with appropriate masks. */
13532 int bits = TREE_INT_CST_LOW (tbits);
13533 int mask_ior = 0, mask_and = 0xff;
13535 for (i = 0; i < 8; i++)
13537 int mi = avr_map (map, i);
13539 if (mi < 8)
13541 if (bits & (1 << mi)) mask_ior |= (1 << i);
13542 else mask_and &= ~(1 << i);
13546 tval = fold_build2 (BIT_IOR_EXPR, val_type, tval,
13547 build_int_cst (val_type, mask_ior));
13548 return fold_build2 (BIT_AND_EXPR, val_type, tval,
13549 build_int_cst (val_type, mask_and));
13552 if (changed)
13553 return build_call_expr (fndecl, 3, tmap, tbits, tval);
13555 /* If bits don't change their position we can use vanilla logic
13556 to merge the two arguments. */
13558 if (0 == avr_map_metric (map, MAP_NONFIXED_0_7))
13560 int mask_f = avr_map_metric (map, MAP_MASK_PREIMAGE_F);
13561 tree tres, tmask = build_int_cst (val_type, mask_f ^ 0xff);
13563 tres = fold_build2 (BIT_XOR_EXPR, val_type, tbits, tval);
13564 tres = fold_build2 (BIT_AND_EXPR, val_type, tres, tmask);
13565 return fold_build2 (BIT_XOR_EXPR, val_type, tres, tval);
13568 /* Try to decomposing map to reduce overall cost. */
13570 if (avr_log.builtin)
13571 avr_edump ("\n%?: %x\n%?: ROL cost: ", map);
13573 best_g = avr_map_op[0];
13574 best_g.cost = 1000;
13576 for (i = 0; i < sizeof (avr_map_op) / sizeof (*avr_map_op); i++)
13578 avr_map_op_t g
13579 = avr_map_decompose (map, avr_map_op + i,
13580 TREE_CODE (tval) == INTEGER_CST);
13582 if (g.cost >= 0 && g.cost < best_g.cost)
13583 best_g = g;
13586 if (avr_log.builtin)
13587 avr_edump ("\n");
13589 if (best_g.arg == 0)
13590 /* No optimization found */
13591 break;
13593 /* Apply operation G to the 2nd argument. */
13595 if (avr_log.builtin)
13596 avr_edump ("%?: using OP(%s%d, %x) cost %d\n",
13597 best_g.str, best_g.arg, best_g.map, best_g.cost);
13599 /* Do right-shifts arithmetically: They copy the MSB instead of
13600 shifting in a non-usable value (0) as with logic right-shift. */
13602 tbits = fold_convert (signed_char_type_node, tbits);
13603 tbits = fold_build2 (best_g.code, signed_char_type_node, tbits,
13604 build_int_cst (val_type, best_g.arg));
13605 tbits = fold_convert (val_type, tbits);
13607 /* Use map o G^-1 instead of original map to undo the effect of G. */
13609 tmap = wide_int_to_tree (map_type, best_g.map);
13611 return build_call_expr (fndecl, 3, tmap, tbits, tval);
13612 } /* AVR_BUILTIN_INSERT_BITS */
13615 return NULL_TREE;
13620 /* Initialize the GCC target structure. */
13622 #undef TARGET_ASM_ALIGNED_HI_OP
13623 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
13624 #undef TARGET_ASM_ALIGNED_SI_OP
13625 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
13626 #undef TARGET_ASM_UNALIGNED_HI_OP
13627 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
13628 #undef TARGET_ASM_UNALIGNED_SI_OP
13629 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
13630 #undef TARGET_ASM_INTEGER
13631 #define TARGET_ASM_INTEGER avr_assemble_integer
13632 #undef TARGET_ASM_FILE_START
13633 #define TARGET_ASM_FILE_START avr_file_start
13634 #undef TARGET_ASM_FILE_END
13635 #define TARGET_ASM_FILE_END avr_file_end
13637 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
13638 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
13639 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
13640 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
13642 #undef TARGET_FUNCTION_VALUE
13643 #define TARGET_FUNCTION_VALUE avr_function_value
13644 #undef TARGET_LIBCALL_VALUE
13645 #define TARGET_LIBCALL_VALUE avr_libcall_value
13646 #undef TARGET_FUNCTION_VALUE_REGNO_P
13647 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
13649 #undef TARGET_ATTRIBUTE_TABLE
13650 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
13651 #undef TARGET_INSERT_ATTRIBUTES
13652 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
13653 #undef TARGET_SECTION_TYPE_FLAGS
13654 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
13656 #undef TARGET_ASM_NAMED_SECTION
13657 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
13658 #undef TARGET_ASM_INIT_SECTIONS
13659 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
13660 #undef TARGET_ENCODE_SECTION_INFO
13661 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
13662 #undef TARGET_ASM_SELECT_SECTION
13663 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
13665 #undef TARGET_REGISTER_MOVE_COST
13666 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
13667 #undef TARGET_MEMORY_MOVE_COST
13668 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
13669 #undef TARGET_RTX_COSTS
13670 #define TARGET_RTX_COSTS avr_rtx_costs
13671 #undef TARGET_ADDRESS_COST
13672 #define TARGET_ADDRESS_COST avr_address_cost
13673 #undef TARGET_MACHINE_DEPENDENT_REORG
13674 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
13675 #undef TARGET_FUNCTION_ARG
13676 #define TARGET_FUNCTION_ARG avr_function_arg
13677 #undef TARGET_FUNCTION_ARG_ADVANCE
13678 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
13680 #undef TARGET_SET_CURRENT_FUNCTION
13681 #define TARGET_SET_CURRENT_FUNCTION avr_set_current_function
13683 #undef TARGET_RETURN_IN_MEMORY
13684 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
13686 #undef TARGET_STRICT_ARGUMENT_NAMING
13687 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
13689 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
13690 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
13692 #undef TARGET_CONDITIONAL_REGISTER_USAGE
13693 #define TARGET_CONDITIONAL_REGISTER_USAGE avr_conditional_register_usage
13695 #undef TARGET_HARD_REGNO_SCRATCH_OK
13696 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
13697 #undef TARGET_CASE_VALUES_THRESHOLD
13698 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
13700 #undef TARGET_FRAME_POINTER_REQUIRED
13701 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
13702 #undef TARGET_CAN_ELIMINATE
13703 #define TARGET_CAN_ELIMINATE avr_can_eliminate
13705 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
13706 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS avr_allocate_stack_slots_for_args
13708 #undef TARGET_WARN_FUNC_RETURN
13709 #define TARGET_WARN_FUNC_RETURN avr_warn_func_return
13711 #undef TARGET_CLASS_LIKELY_SPILLED_P
13712 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
13714 #undef TARGET_OPTION_OVERRIDE
13715 #define TARGET_OPTION_OVERRIDE avr_option_override
13717 #undef TARGET_CANNOT_MODIFY_JUMPS_P
13718 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
13720 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
13721 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
13723 #undef TARGET_INIT_BUILTINS
13724 #define TARGET_INIT_BUILTINS avr_init_builtins
13726 #undef TARGET_BUILTIN_DECL
13727 #define TARGET_BUILTIN_DECL avr_builtin_decl
13729 #undef TARGET_EXPAND_BUILTIN
13730 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
13732 #undef TARGET_FOLD_BUILTIN
13733 #define TARGET_FOLD_BUILTIN avr_fold_builtin
13735 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
13736 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
13738 #undef TARGET_SCALAR_MODE_SUPPORTED_P
13739 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
13741 #undef TARGET_BUILD_BUILTIN_VA_LIST
13742 #define TARGET_BUILD_BUILTIN_VA_LIST avr_build_builtin_va_list
13744 #undef TARGET_FIXED_POINT_SUPPORTED_P
13745 #define TARGET_FIXED_POINT_SUPPORTED_P hook_bool_void_true
13747 #undef TARGET_CONVERT_TO_TYPE
13748 #define TARGET_CONVERT_TO_TYPE avr_convert_to_type
13750 #undef TARGET_ADDR_SPACE_SUBSET_P
13751 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
13753 #undef TARGET_ADDR_SPACE_CONVERT
13754 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
13756 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
13757 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
13759 #undef TARGET_ADDR_SPACE_POINTER_MODE
13760 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
13762 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
13763 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
13764 avr_addr_space_legitimate_address_p
13766 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
13767 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
13769 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
13770 #define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
13772 #undef TARGET_SECONDARY_RELOAD
13773 #define TARGET_SECONDARY_RELOAD avr_secondary_reload
13775 #undef TARGET_PRINT_OPERAND
13776 #define TARGET_PRINT_OPERAND avr_print_operand
13777 #undef TARGET_PRINT_OPERAND_ADDRESS
13778 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
13779 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
13780 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
13782 struct gcc_target targetm = TARGET_INITIALIZER;
13785 #include "gt-avr.h"