1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998-2013 Free Software Foundation, Inc.
3 Contributed by Denis Chertykov (chertykov@gmail.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-attr.h"
31 #include "insn-codes.h"
37 #include "c-family/c-common.h"
38 #include "diagnostic-core.h"
44 #include "langhooks.h"
47 #include "target-def.h"
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
54 /* Return true if STR starts with PREFIX and false, otherwise. */
55 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
57 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
58 address space where data is to be located.
59 As the only non-generic address spaces are all located in flash,
60 this can be used to test if data shall go into some .progmem* section.
61 This must be the rightmost field of machine dependent section flags. */
62 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
64 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
65 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
67 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
68 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
69 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
71 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
72 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
75 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
76 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
77 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
78 / SYMBOL_FLAG_MACH_DEP)
80 /* Known address spaces. The order must be the same as in the respective
81 enum from avr.h (or designated initialized must be used). */
82 const avr_addrspace_t avr_addrspace
[ADDR_SPACE_COUNT
] =
84 { ADDR_SPACE_RAM
, 0, 2, "", 0, NULL
},
85 { ADDR_SPACE_FLASH
, 1, 2, "__flash", 0, ".progmem.data" },
86 { ADDR_SPACE_FLASH1
, 1, 2, "__flash1", 1, ".progmem1.data" },
87 { ADDR_SPACE_FLASH2
, 1, 2, "__flash2", 2, ".progmem2.data" },
88 { ADDR_SPACE_FLASH3
, 1, 2, "__flash3", 3, ".progmem3.data" },
89 { ADDR_SPACE_FLASH4
, 1, 2, "__flash4", 4, ".progmem4.data" },
90 { ADDR_SPACE_FLASH5
, 1, 2, "__flash5", 5, ".progmem5.data" },
91 { ADDR_SPACE_MEMX
, 1, 3, "__memx", 0, ".progmemx.data" },
95 /* Holding RAM addresses of some SFRs used by the compiler and that
96 are unique over all devices in an architecture like 'avr4'. */
100 /* SREG: The processor status */
103 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
109 /* RAMPZ: The high byte of 24-bit address used with ELPM */
112 /* SP: The stack pointer and its low and high byte */
117 static avr_addr_t avr_addr
;
120 /* Prototypes for local helper functions. */
122 static const char* out_movqi_r_mr (rtx
, rtx
[], int*);
123 static const char* out_movhi_r_mr (rtx
, rtx
[], int*);
124 static const char* out_movsi_r_mr (rtx
, rtx
[], int*);
125 static const char* out_movqi_mr_r (rtx
, rtx
[], int*);
126 static const char* out_movhi_mr_r (rtx
, rtx
[], int*);
127 static const char* out_movsi_mr_r (rtx
, rtx
[], int*);
129 static int get_sequence_length (rtx insns
);
130 static int sequent_regs_live (void);
131 static const char *ptrreg_to_str (int);
132 static const char *cond_string (enum rtx_code
);
133 static int avr_num_arg_regs (enum machine_mode
, const_tree
);
134 static int avr_operand_rtx_cost (rtx
, enum machine_mode
, enum rtx_code
,
136 static void output_reload_in_const (rtx
*, rtx
, int*, bool);
137 static struct machine_function
* avr_init_machine_status (void);
140 /* Prototypes for hook implementors if needed before their implementation. */
142 static bool avr_rtx_costs (rtx
, int, int, int, int*, bool);
145 /* Allocate registers from r25 to r8 for parameters for function calls. */
146 #define FIRST_CUM_REG 26
148 /* Implicit target register of LPM instruction (R0) */
149 extern GTY(()) rtx lpm_reg_rtx
;
152 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
153 extern GTY(()) rtx lpm_addr_reg_rtx
;
154 rtx lpm_addr_reg_rtx
;
156 /* Temporary register RTX (reg:QI TMP_REGNO) */
157 extern GTY(()) rtx tmp_reg_rtx
;
160 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
161 extern GTY(()) rtx zero_reg_rtx
;
164 /* RTXs for all general purpose registers as QImode */
165 extern GTY(()) rtx all_regs_rtx
[32];
166 rtx all_regs_rtx
[32];
168 /* SREG, the processor status */
169 extern GTY(()) rtx sreg_rtx
;
172 /* RAMP* special function registers */
173 extern GTY(()) rtx rampd_rtx
;
174 extern GTY(()) rtx rampx_rtx
;
175 extern GTY(()) rtx rampy_rtx
;
176 extern GTY(()) rtx rampz_rtx
;
182 /* RTX containing the strings "" and "e", respectively */
183 static GTY(()) rtx xstring_empty
;
184 static GTY(()) rtx xstring_e
;
186 /* Current architecture. */
187 const avr_arch_t
*avr_current_arch
;
189 /* Current device. */
190 const avr_mcu_t
*avr_current_device
;
192 /* Section to put switch tables in. */
193 static GTY(()) section
*progmem_swtable_section
;
195 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
196 or to address space __flash* or __memx. Only used as singletons inside
197 avr_asm_select_section, but it must not be local there because of GTY. */
198 static GTY(()) section
*progmem_section
[ADDR_SPACE_COUNT
];
200 /* Condition for insns/expanders from avr-dimode.md. */
201 bool avr_have_dimode
= true;
203 /* To track if code will use .bss and/or .data. */
204 bool avr_need_clear_bss_p
= false;
205 bool avr_need_copy_data_p
= false;
208 /* Transform UP into lowercase and write the result to LO.
209 You must provide enough space for LO. Return LO. */
212 avr_tolower (char *lo
, const char *up
)
216 for (; *up
; up
++, lo
++)
225 /* Custom function to count number of set bits. */
228 avr_popcount (unsigned int val
)
242 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
243 Return true if the least significant N_BYTES bytes of XVAL all have a
244 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
245 of integers which contains an integer N iff bit N of POP_MASK is set. */
248 avr_popcount_each_byte (rtx xval
, int n_bytes
, int pop_mask
)
252 enum machine_mode mode
= GET_MODE (xval
);
254 if (VOIDmode
== mode
)
257 for (i
= 0; i
< n_bytes
; i
++)
259 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
260 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
262 if (0 == (pop_mask
& (1 << avr_popcount (val8
))))
270 /* Access some RTX as INT_MODE. If X is a CONST_FIXED we can get
271 the bit representation of X by "casting" it to CONST_INT. */
274 avr_to_int_mode (rtx x
)
276 enum machine_mode mode
= GET_MODE (x
);
278 return VOIDmode
== mode
280 : simplify_gen_subreg (int_mode_for_mode (mode
), x
, mode
, 0);
284 /* Implement `TARGET_OPTION_OVERRIDE'. */
287 avr_option_override (void)
289 flag_delete_null_pointer_checks
= 0;
291 /* caller-save.c looks for call-clobbered hard registers that are assigned
292 to pseudos that cross calls and tries so save-restore them around calls
293 in order to reduce the number of stack slots needed.
295 This might lead to situations where reload is no more able to cope
296 with the challenge of AVR's very few address registers and fails to
297 perform the requested spills. */
300 flag_caller_saves
= 0;
302 /* Unwind tables currently require a frame pointer for correctness,
303 see toplev.c:process_options(). */
305 if ((flag_unwind_tables
306 || flag_non_call_exceptions
307 || flag_asynchronous_unwind_tables
)
308 && !ACCUMULATE_OUTGOING_ARGS
)
310 flag_omit_frame_pointer
= 0;
313 avr_current_device
= &avr_mcu_types
[avr_mcu_index
];
314 avr_current_arch
= &avr_arch_types
[avr_current_device
->arch
];
316 /* RAM addresses of some SFRs common to all devices in respective arch. */
318 /* SREG: Status Register containing flags like I (global IRQ) */
319 avr_addr
.sreg
= 0x3F + avr_current_arch
->sfr_offset
;
321 /* RAMPZ: Address' high part when loading via ELPM */
322 avr_addr
.rampz
= 0x3B + avr_current_arch
->sfr_offset
;
324 avr_addr
.rampy
= 0x3A + avr_current_arch
->sfr_offset
;
325 avr_addr
.rampx
= 0x39 + avr_current_arch
->sfr_offset
;
326 avr_addr
.rampd
= 0x38 + avr_current_arch
->sfr_offset
;
327 avr_addr
.ccp
= 0x34 + avr_current_arch
->sfr_offset
;
329 /* SP: Stack Pointer (SP_H:SP_L) */
330 avr_addr
.sp_l
= 0x3D + avr_current_arch
->sfr_offset
;
331 avr_addr
.sp_h
= avr_addr
.sp_l
+ 1;
333 init_machine_status
= avr_init_machine_status
;
335 avr_log_set_avr_log();
338 /* Function to set up the backend function structure. */
340 static struct machine_function
*
341 avr_init_machine_status (void)
343 return ggc_alloc_cleared_machine_function ();
347 /* Implement `INIT_EXPANDERS'. */
348 /* The function works like a singleton. */
351 avr_init_expanders (void)
355 for (regno
= 0; regno
< 32; regno
++)
356 all_regs_rtx
[regno
] = gen_rtx_REG (QImode
, regno
);
358 lpm_reg_rtx
= all_regs_rtx
[LPM_REGNO
];
359 tmp_reg_rtx
= all_regs_rtx
[TMP_REGNO
];
360 zero_reg_rtx
= all_regs_rtx
[ZERO_REGNO
];
362 lpm_addr_reg_rtx
= gen_rtx_REG (HImode
, REG_Z
);
364 sreg_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.sreg
));
365 rampd_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampd
));
366 rampx_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampx
));
367 rampy_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampy
));
368 rampz_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampz
));
370 xstring_empty
= gen_rtx_CONST_STRING (VOIDmode
, "");
371 xstring_e
= gen_rtx_CONST_STRING (VOIDmode
, "e");
375 /* Implement `REGNO_REG_CLASS'. */
376 /* Return register class for register R. */
379 avr_regno_reg_class (int r
)
381 static const enum reg_class reg_class_tab
[] =
385 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
386 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
387 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
388 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
390 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
391 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
393 ADDW_REGS
, ADDW_REGS
,
395 POINTER_X_REGS
, POINTER_X_REGS
,
397 POINTER_Y_REGS
, POINTER_Y_REGS
,
399 POINTER_Z_REGS
, POINTER_Z_REGS
,
405 return reg_class_tab
[r
];
411 /* Implement `TARGET_SCALAR_MODE_SUPPORTED_P'. */
414 avr_scalar_mode_supported_p (enum machine_mode mode
)
416 if (ALL_FIXED_POINT_MODE_P (mode
))
422 return default_scalar_mode_supported_p (mode
);
426 /* Return TRUE if DECL is a VAR_DECL located in flash and FALSE, otherwise. */
429 avr_decl_flash_p (tree decl
)
431 if (TREE_CODE (decl
) != VAR_DECL
432 || TREE_TYPE (decl
) == error_mark_node
)
437 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
441 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit flash
442 address space and FALSE, otherwise. */
445 avr_decl_memx_p (tree decl
)
447 if (TREE_CODE (decl
) != VAR_DECL
448 || TREE_TYPE (decl
) == error_mark_node
)
453 return (ADDR_SPACE_MEMX
== TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
457 /* Return TRUE if X is a MEM rtx located in flash and FALSE, otherwise. */
460 avr_mem_flash_p (rtx x
)
463 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
)));
467 /* Return TRUE if X is a MEM rtx located in the 24-bit flash
468 address space and FALSE, otherwise. */
471 avr_mem_memx_p (rtx x
)
474 && ADDR_SPACE_MEMX
== MEM_ADDR_SPACE (x
));
478 /* A helper for the subsequent function attribute used to dig for
479 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
482 avr_lookup_function_attribute1 (const_tree func
, const char *name
)
484 if (FUNCTION_DECL
== TREE_CODE (func
))
486 if (NULL_TREE
!= lookup_attribute (name
, DECL_ATTRIBUTES (func
)))
491 func
= TREE_TYPE (func
);
494 gcc_assert (TREE_CODE (func
) == FUNCTION_TYPE
495 || TREE_CODE (func
) == METHOD_TYPE
);
497 return NULL_TREE
!= lookup_attribute (name
, TYPE_ATTRIBUTES (func
));
500 /* Return nonzero if FUNC is a naked function. */
503 avr_naked_function_p (tree func
)
505 return avr_lookup_function_attribute1 (func
, "naked");
508 /* Return nonzero if FUNC is an interrupt function as specified
509 by the "interrupt" attribute. */
512 avr_interrupt_function_p (tree func
)
514 return avr_lookup_function_attribute1 (func
, "interrupt");
517 /* Return nonzero if FUNC is a signal function as specified
518 by the "signal" attribute. */
521 avr_signal_function_p (tree func
)
523 return avr_lookup_function_attribute1 (func
, "signal");
526 /* Return nonzero if FUNC is an OS_task function. */
529 avr_OS_task_function_p (tree func
)
531 return avr_lookup_function_attribute1 (func
, "OS_task");
534 /* Return nonzero if FUNC is an OS_main function. */
537 avr_OS_main_function_p (tree func
)
539 return avr_lookup_function_attribute1 (func
, "OS_main");
543 /* Implement `TARGET_SET_CURRENT_FUNCTION'. */
544 /* Sanity cheching for above function attributes. */
547 avr_set_current_function (tree decl
)
552 if (decl
== NULL_TREE
553 || current_function_decl
== NULL_TREE
554 || current_function_decl
== error_mark_node
556 || cfun
->machine
->attributes_checked_p
)
559 loc
= DECL_SOURCE_LOCATION (decl
);
561 cfun
->machine
->is_naked
= avr_naked_function_p (decl
);
562 cfun
->machine
->is_signal
= avr_signal_function_p (decl
);
563 cfun
->machine
->is_interrupt
= avr_interrupt_function_p (decl
);
564 cfun
->machine
->is_OS_task
= avr_OS_task_function_p (decl
);
565 cfun
->machine
->is_OS_main
= avr_OS_main_function_p (decl
);
567 isr
= cfun
->machine
->is_interrupt
? "interrupt" : "signal";
569 /* Too much attributes make no sense as they request conflicting features. */
571 if (cfun
->machine
->is_OS_task
+ cfun
->machine
->is_OS_main
572 + (cfun
->machine
->is_signal
|| cfun
->machine
->is_interrupt
) > 1)
573 error_at (loc
, "function attributes %qs, %qs and %qs are mutually"
574 " exclusive", "OS_task", "OS_main", isr
);
576 /* 'naked' will hide effects of 'OS_task' and 'OS_main'. */
578 if (cfun
->machine
->is_naked
579 && (cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
580 warning_at (loc
, OPT_Wattributes
, "function attributes %qs and %qs have"
581 " no effect on %qs function", "OS_task", "OS_main", "naked");
583 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
585 tree args
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
586 tree ret
= TREE_TYPE (TREE_TYPE (decl
));
587 const char *name
= IDENTIFIER_POINTER (DECL_NAME (decl
));
589 /* Silently ignore 'signal' if 'interrupt' is present. AVR-LibC startet
590 using this when it switched from SIGNAL and INTERRUPT to ISR. */
592 if (cfun
->machine
->is_interrupt
)
593 cfun
->machine
->is_signal
= 0;
595 /* Interrupt handlers must be void __vector (void) functions. */
597 if (args
&& TREE_CODE (TREE_VALUE (args
)) != VOID_TYPE
)
598 error_at (loc
, "%qs function cannot have arguments", isr
);
600 if (TREE_CODE (ret
) != VOID_TYPE
)
601 error_at (loc
, "%qs function cannot return a value", isr
);
603 /* If the function has the 'signal' or 'interrupt' attribute, ensure
604 that the name of the function is "__vector_NN" so as to catch
605 when the user misspells the vector name. */
607 if (!STR_PREFIX_P (name
, "__vector"))
608 warning_at (loc
, 0, "%qs appears to be a misspelled %s handler",
612 /* Don't print the above diagnostics more than once. */
614 cfun
->machine
->attributes_checked_p
= 1;
618 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
621 avr_accumulate_outgoing_args (void)
624 return TARGET_ACCUMULATE_OUTGOING_ARGS
;
626 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
627 what offset is correct. In some cases it is relative to
628 virtual_outgoing_args_rtx and in others it is relative to
629 virtual_stack_vars_rtx. For example code see
630 gcc.c-torture/execute/built-in-setjmp.c
631 gcc.c-torture/execute/builtins/sprintf-chk.c */
633 return (TARGET_ACCUMULATE_OUTGOING_ARGS
634 && !(cfun
->calls_setjmp
635 || cfun
->has_nonlocal_label
));
639 /* Report contribution of accumulated outgoing arguments to stack size. */
642 avr_outgoing_args_size (void)
644 return ACCUMULATE_OUTGOING_ARGS
? crtl
->outgoing_args_size
: 0;
648 /* Implement `STARTING_FRAME_OFFSET'. */
649 /* This is the offset from the frame pointer register to the first stack slot
650 that contains a variable living in the frame. */
653 avr_starting_frame_offset (void)
655 return 1 + avr_outgoing_args_size ();
659 /* Return the number of hard registers to push/pop in the prologue/epilogue
660 of the current function, and optionally store these registers in SET. */
663 avr_regs_to_save (HARD_REG_SET
*set
)
666 int int_or_sig_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
669 CLEAR_HARD_REG_SET (*set
);
672 /* No need to save any registers if the function never returns or
673 has the "OS_task" or "OS_main" attribute. */
675 if (TREE_THIS_VOLATILE (current_function_decl
)
676 || cfun
->machine
->is_OS_task
677 || cfun
->machine
->is_OS_main
)
680 for (reg
= 0; reg
< 32; reg
++)
682 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
683 any global register variables. */
688 if ((int_or_sig_p
&& !crtl
->is_leaf
&& call_used_regs
[reg
])
689 || (df_regs_ever_live_p (reg
)
690 && (int_or_sig_p
|| !call_used_regs
[reg
])
691 /* Don't record frame pointer registers here. They are treated
692 indivitually in prologue. */
693 && !(frame_pointer_needed
694 && (reg
== REG_Y
|| reg
== (REG_Y
+1)))))
697 SET_HARD_REG_BIT (*set
, reg
);
705 /* Implement `TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS' */
708 avr_allocate_stack_slots_for_args (void)
710 return !cfun
->machine
->is_naked
;
714 /* Return true if register FROM can be eliminated via register TO. */
717 avr_can_eliminate (const int from
, const int to
)
719 return ((from
== ARG_POINTER_REGNUM
&& to
== FRAME_POINTER_REGNUM
)
720 || (frame_pointer_needed
&& to
== FRAME_POINTER_REGNUM
)
721 || ((from
== FRAME_POINTER_REGNUM
722 || from
== FRAME_POINTER_REGNUM
+ 1)
723 && !frame_pointer_needed
));
727 /* Implement `TARGET_WARN_FUNC_RETURN'. */
730 avr_warn_func_return (tree decl
)
732 /* Naked functions are implemented entirely in assembly, including the
733 return sequence, so suppress warnings about this. */
735 return !avr_naked_function_p (decl
);
738 /* Compute offset between arg_pointer and frame_pointer. */
741 avr_initial_elimination_offset (int from
, int to
)
743 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
747 int offset
= frame_pointer_needed
? 2 : 0;
748 int avr_pc_size
= AVR_HAVE_EIJMP_EICALL
? 3 : 2;
750 offset
+= avr_regs_to_save (NULL
);
751 return (get_frame_size () + avr_outgoing_args_size()
752 + avr_pc_size
+ 1 + offset
);
757 /* Helper for the function below. */
760 avr_adjust_type_node (tree
*node
, enum machine_mode mode
, int sat_p
)
762 *node
= make_node (FIXED_POINT_TYPE
);
763 TYPE_SATURATING (*node
) = sat_p
;
764 TYPE_UNSIGNED (*node
) = UNSIGNED_FIXED_POINT_MODE_P (mode
);
765 TYPE_IBIT (*node
) = GET_MODE_IBIT (mode
);
766 TYPE_FBIT (*node
) = GET_MODE_FBIT (mode
);
767 TYPE_PRECISION (*node
) = GET_MODE_BITSIZE (mode
);
768 TYPE_ALIGN (*node
) = 8;
769 SET_TYPE_MODE (*node
, mode
);
775 /* Implement `TARGET_BUILD_BUILTIN_VA_LIST'. */
778 avr_build_builtin_va_list (void)
780 /* avr-modes.def adjusts [U]TA to be 64-bit modes with 48 fractional bits.
781 This is more appropriate for the 8-bit machine AVR than 128-bit modes.
782 The ADJUST_IBIT/FBIT are handled in toplev:init_adjust_machine_modes()
783 which is auto-generated by genmodes, but the compiler assigns [U]DAmode
784 to the long long accum modes instead of the desired [U]TAmode.
786 Fix this now, right after node setup in tree.c:build_common_tree_nodes().
787 This must run before c-cppbuiltin.c:builtin_define_fixed_point_constants()
788 which built-in defines macros like __ULLACCUM_FBIT__ that are used by
789 libgcc to detect IBIT and FBIT. */
791 avr_adjust_type_node (&ta_type_node
, TAmode
, 0);
792 avr_adjust_type_node (&uta_type_node
, UTAmode
, 0);
793 avr_adjust_type_node (&sat_ta_type_node
, TAmode
, 1);
794 avr_adjust_type_node (&sat_uta_type_node
, UTAmode
, 1);
796 unsigned_long_long_accum_type_node
= uta_type_node
;
797 long_long_accum_type_node
= ta_type_node
;
798 sat_unsigned_long_long_accum_type_node
= sat_uta_type_node
;
799 sat_long_long_accum_type_node
= sat_ta_type_node
;
801 /* Dispatch to the default handler. */
803 return std_build_builtin_va_list ();
807 /* Implement `TARGET_BUILTIN_SETJMP_FRAME_VALUE'. */
808 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
809 frame pointer by +STARTING_FRAME_OFFSET.
810 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
811 avoids creating add/sub of offset in nonlocal goto and setjmp. */
814 avr_builtin_setjmp_frame_value (void)
816 rtx xval
= gen_reg_rtx (Pmode
);
817 emit_insn (gen_subhi3 (xval
, virtual_stack_vars_rtx
,
818 gen_int_mode (STARTING_FRAME_OFFSET
, Pmode
)));
823 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3-byte PC).
824 This is return address of function. */
827 avr_return_addr_rtx (int count
, rtx tem
)
831 /* Can only return this function's return address. Others not supported. */
837 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+2");
838 warning (0, "%<builtin_return_address%> contains only 2 bytes"
842 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+1");
844 r
= gen_rtx_PLUS (Pmode
, tem
, r
);
845 r
= gen_frame_mem (Pmode
, memory_address (Pmode
, r
));
846 r
= gen_rtx_ROTATE (HImode
, r
, GEN_INT (8));
850 /* Return 1 if the function epilogue is just a single "ret". */
853 avr_simple_epilogue (void)
855 return (! frame_pointer_needed
856 && get_frame_size () == 0
857 && avr_outgoing_args_size() == 0
858 && avr_regs_to_save (NULL
) == 0
859 && ! cfun
->machine
->is_interrupt
860 && ! cfun
->machine
->is_signal
861 && ! cfun
->machine
->is_naked
862 && ! TREE_THIS_VOLATILE (current_function_decl
));
865 /* This function checks sequence of live registers. */
868 sequent_regs_live (void)
874 for (reg
= 0; reg
< 18; ++reg
)
878 /* Don't recognize sequences that contain global register
887 if (!call_used_regs
[reg
])
889 if (df_regs_ever_live_p (reg
))
899 if (!frame_pointer_needed
)
901 if (df_regs_ever_live_p (REG_Y
))
909 if (df_regs_ever_live_p (REG_Y
+1))
922 return (cur_seq
== live_seq
) ? live_seq
: 0;
925 /* Obtain the length sequence of insns. */
928 get_sequence_length (rtx insns
)
933 for (insn
= insns
, length
= 0; insn
; insn
= NEXT_INSN (insn
))
934 length
+= get_attr_length (insn
);
940 /* Implement `INCOMING_RETURN_ADDR_RTX'. */
943 avr_incoming_return_addr_rtx (void)
945 /* The return address is at the top of the stack. Note that the push
946 was via post-decrement, which means the actual address is off by one. */
947 return gen_frame_mem (HImode
, plus_constant (Pmode
, stack_pointer_rtx
, 1));
950 /* Helper for expand_prologue. Emit a push of a byte register. */
953 emit_push_byte (unsigned regno
, bool frame_related_p
)
957 mem
= gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
);
958 mem
= gen_frame_mem (QImode
, mem
);
959 reg
= gen_rtx_REG (QImode
, regno
);
961 insn
= emit_insn (gen_rtx_SET (VOIDmode
, mem
, reg
));
963 RTX_FRAME_RELATED_P (insn
) = 1;
965 cfun
->machine
->stack_usage
++;
969 /* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
970 SFR is a MEM representing the memory location of the SFR.
971 If CLR_P then clear the SFR after the push using zero_reg. */
974 emit_push_sfr (rtx sfr
, bool frame_related_p
, bool clr_p
)
978 gcc_assert (MEM_P (sfr
));
980 /* IN __tmp_reg__, IO(SFR) */
981 insn
= emit_move_insn (tmp_reg_rtx
, sfr
);
983 RTX_FRAME_RELATED_P (insn
) = 1;
985 /* PUSH __tmp_reg__ */
986 emit_push_byte (TMP_REGNO
, frame_related_p
);
990 /* OUT IO(SFR), __zero_reg__ */
991 insn
= emit_move_insn (sfr
, const0_rtx
);
993 RTX_FRAME_RELATED_P (insn
) = 1;
998 avr_prologue_setup_frame (HOST_WIDE_INT size
, HARD_REG_SET set
)
1001 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
1002 int live_seq
= sequent_regs_live ();
1004 HOST_WIDE_INT size_max
1005 = (HOST_WIDE_INT
) GET_MODE_MASK (AVR_HAVE_8BIT_SP
? QImode
: Pmode
);
1007 bool minimize
= (TARGET_CALL_PROLOGUES
1011 && !cfun
->machine
->is_OS_task
1012 && !cfun
->machine
->is_OS_main
);
1015 && (frame_pointer_needed
1016 || avr_outgoing_args_size() > 8
1017 || (AVR_2_BYTE_PC
&& live_seq
> 6)
1021 int first_reg
, reg
, offset
;
1023 emit_move_insn (gen_rtx_REG (HImode
, REG_X
),
1024 gen_int_mode (size
, HImode
));
1026 pattern
= gen_call_prologue_saves (gen_int_mode (live_seq
, HImode
),
1027 gen_int_mode (live_seq
+size
, HImode
));
1028 insn
= emit_insn (pattern
);
1029 RTX_FRAME_RELATED_P (insn
) = 1;
1031 /* Describe the effect of the unspec_volatile call to prologue_saves.
1032 Note that this formulation assumes that add_reg_note pushes the
1033 notes to the front. Thus we build them in the reverse order of
1034 how we want dwarf2out to process them. */
1036 /* The function does always set frame_pointer_rtx, but whether that
1037 is going to be permanent in the function is frame_pointer_needed. */
1039 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1040 gen_rtx_SET (VOIDmode
, (frame_pointer_needed
1042 : stack_pointer_rtx
),
1043 plus_constant (Pmode
, stack_pointer_rtx
,
1044 -(size
+ live_seq
))));
1046 /* Note that live_seq always contains r28+r29, but the other
1047 registers to be saved are all below 18. */
1049 first_reg
= 18 - (live_seq
- 2);
1051 for (reg
= 29, offset
= -live_seq
+ 1;
1053 reg
= (reg
== 28 ? 17 : reg
- 1), ++offset
)
1057 m
= gen_rtx_MEM (QImode
, plus_constant (Pmode
, stack_pointer_rtx
,
1059 r
= gen_rtx_REG (QImode
, reg
);
1060 add_reg_note (insn
, REG_CFA_OFFSET
, gen_rtx_SET (VOIDmode
, m
, r
));
1063 cfun
->machine
->stack_usage
+= size
+ live_seq
;
1065 else /* !minimize */
1069 for (reg
= 0; reg
< 32; ++reg
)
1070 if (TEST_HARD_REG_BIT (set
, reg
))
1071 emit_push_byte (reg
, true);
1073 if (frame_pointer_needed
1074 && (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
)))
1076 /* Push frame pointer. Always be consistent about the
1077 ordering of pushes -- epilogue_restores expects the
1078 register pair to be pushed low byte first. */
1080 emit_push_byte (REG_Y
, true);
1081 emit_push_byte (REG_Y
+ 1, true);
1084 if (frame_pointer_needed
1087 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1088 RTX_FRAME_RELATED_P (insn
) = 1;
1093 /* Creating a frame can be done by direct manipulation of the
1094 stack or via the frame pointer. These two methods are:
1101 the optimum method depends on function type, stack and
1102 frame size. To avoid a complex logic, both methods are
1103 tested and shortest is selected.
1105 There is also the case where SIZE != 0 and no frame pointer is
1106 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1107 In that case, insn (*) is not needed in that case.
1108 We use the X register as scratch. This is save because in X
1110 In an interrupt routine, the case of SIZE != 0 together with
1111 !frame_pointer_needed can only occur if the function is not a
1112 leaf function and thus X has already been saved. */
1115 HOST_WIDE_INT size_cfa
= size
;
1116 rtx fp_plus_insns
, fp
, my_fp
;
1118 gcc_assert (frame_pointer_needed
1122 fp
= my_fp
= (frame_pointer_needed
1124 : gen_rtx_REG (Pmode
, REG_X
));
1126 if (AVR_HAVE_8BIT_SP
)
1128 /* The high byte (r29) does not change:
1129 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
1131 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
1134 /* Cut down size and avoid size = 0 so that we don't run
1135 into ICE like PR52488 in the remainder. */
1137 if (size
> size_max
)
1139 /* Don't error so that insane code from newlib still compiles
1140 and does not break building newlib. As PR51345 is implemented
1141 now, there are multilib variants with -msp8.
1143 If user wants sanity checks he can use -Wstack-usage=
1146 For CFA we emit the original, non-saturated size so that
1147 the generic machinery is aware of the real stack usage and
1148 will print the above diagnostic as expected. */
1153 size
= trunc_int_for_mode (size
, GET_MODE (my_fp
));
1155 /************ Method 1: Adjust frame pointer ************/
1159 /* Normally, the dwarf2out frame-related-expr interpreter does
1160 not expect to have the CFA change once the frame pointer is
1161 set up. Thus, we avoid marking the move insn below and
1162 instead indicate that the entire operation is complete after
1163 the frame pointer subtraction is done. */
1165 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
1166 if (frame_pointer_needed
)
1168 RTX_FRAME_RELATED_P (insn
) = 1;
1169 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1170 gen_rtx_SET (VOIDmode
, fp
, stack_pointer_rtx
));
1173 insn
= emit_move_insn (my_fp
, plus_constant (GET_MODE (my_fp
),
1175 if (frame_pointer_needed
)
1177 RTX_FRAME_RELATED_P (insn
) = 1;
1178 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1179 gen_rtx_SET (VOIDmode
, fp
,
1180 plus_constant (Pmode
, fp
,
1184 /* Copy to stack pointer. Note that since we've already
1185 changed the CFA to the frame pointer this operation
1186 need not be annotated if frame pointer is needed.
1187 Always move through unspec, see PR50063.
1188 For meaning of irq_state see movhi_sp_r insn. */
1190 if (cfun
->machine
->is_interrupt
)
1193 if (TARGET_NO_INTERRUPTS
1194 || cfun
->machine
->is_signal
1195 || cfun
->machine
->is_OS_main
)
1198 if (AVR_HAVE_8BIT_SP
)
1201 insn
= emit_insn (gen_movhi_sp_r (stack_pointer_rtx
,
1202 fp
, GEN_INT (irq_state
)));
1203 if (!frame_pointer_needed
)
1205 RTX_FRAME_RELATED_P (insn
) = 1;
1206 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1207 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1208 plus_constant (Pmode
,
1213 fp_plus_insns
= get_insns ();
1216 /************ Method 2: Adjust Stack pointer ************/
1218 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1219 can only handle specific offsets. */
1221 if (avr_sp_immediate_operand (gen_int_mode (-size
, HImode
), HImode
))
1227 insn
= emit_move_insn (stack_pointer_rtx
,
1228 plus_constant (Pmode
, stack_pointer_rtx
,
1230 RTX_FRAME_RELATED_P (insn
) = 1;
1231 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1232 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1233 plus_constant (Pmode
,
1236 if (frame_pointer_needed
)
1238 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
1239 RTX_FRAME_RELATED_P (insn
) = 1;
1242 sp_plus_insns
= get_insns ();
1245 /************ Use shortest method ************/
1247 emit_insn (get_sequence_length (sp_plus_insns
)
1248 < get_sequence_length (fp_plus_insns
)
1254 emit_insn (fp_plus_insns
);
1257 cfun
->machine
->stack_usage
+= size_cfa
;
1258 } /* !minimize && size != 0 */
1263 /* Output function prologue. */
1266 avr_expand_prologue (void)
1271 size
= get_frame_size() + avr_outgoing_args_size();
1273 cfun
->machine
->stack_usage
= 0;
1275 /* Prologue: naked. */
1276 if (cfun
->machine
->is_naked
)
1281 avr_regs_to_save (&set
);
1283 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
1285 /* Enable interrupts. */
1286 if (cfun
->machine
->is_interrupt
)
1287 emit_insn (gen_enable_interrupt ());
1289 /* Push zero reg. */
1290 emit_push_byte (ZERO_REGNO
, true);
1293 emit_push_byte (TMP_REGNO
, true);
1296 /* ??? There's no dwarf2 column reserved for SREG. */
1297 emit_push_sfr (sreg_rtx
, false, false /* clr */);
1299 /* Clear zero reg. */
1300 emit_move_insn (zero_reg_rtx
, const0_rtx
);
1302 /* Prevent any attempt to delete the setting of ZERO_REG! */
1303 emit_use (zero_reg_rtx
);
1305 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1306 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1309 emit_push_sfr (rampd_rtx
, false /* frame-related */, true /* clr */);
1312 && TEST_HARD_REG_BIT (set
, REG_X
)
1313 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
1315 emit_push_sfr (rampx_rtx
, false /* frame-related */, true /* clr */);
1319 && (frame_pointer_needed
1320 || (TEST_HARD_REG_BIT (set
, REG_Y
)
1321 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
1323 emit_push_sfr (rampy_rtx
, false /* frame-related */, true /* clr */);
1327 && TEST_HARD_REG_BIT (set
, REG_Z
)
1328 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1330 emit_push_sfr (rampz_rtx
, false /* frame-related */, AVR_HAVE_RAMPD
);
1332 } /* is_interrupt is_signal */
1334 avr_prologue_setup_frame (size
, set
);
1336 if (flag_stack_usage_info
)
1337 current_function_static_stack_size
= cfun
->machine
->stack_usage
;
1341 /* Implement `TARGET_ASM_FUNCTION_END_PROLOGUE'. */
1342 /* Output summary at end of function prologue. */
1345 avr_asm_function_end_prologue (FILE *file
)
1347 if (cfun
->machine
->is_naked
)
1349 fputs ("/* prologue: naked */\n", file
);
1353 if (cfun
->machine
->is_interrupt
)
1355 fputs ("/* prologue: Interrupt */\n", file
);
1357 else if (cfun
->machine
->is_signal
)
1359 fputs ("/* prologue: Signal */\n", file
);
1362 fputs ("/* prologue: function */\n", file
);
1365 if (ACCUMULATE_OUTGOING_ARGS
)
1366 fprintf (file
, "/* outgoing args size = %d */\n",
1367 avr_outgoing_args_size());
1369 fprintf (file
, "/* frame size = " HOST_WIDE_INT_PRINT_DEC
" */\n",
1371 fprintf (file
, "/* stack size = %d */\n",
1372 cfun
->machine
->stack_usage
);
1373 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1374 usage for offset so that SP + .L__stack_offset = return address. */
1375 fprintf (file
, ".L__stack_usage = %d\n", cfun
->machine
->stack_usage
);
1379 /* Implement `EPILOGUE_USES'. */
1382 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED
)
1384 if (reload_completed
1386 && (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
))
1391 /* Helper for avr_expand_epilogue. Emit a pop of a byte register. */
1394 emit_pop_byte (unsigned regno
)
1398 mem
= gen_rtx_PRE_INC (HImode
, stack_pointer_rtx
);
1399 mem
= gen_frame_mem (QImode
, mem
);
1400 reg
= gen_rtx_REG (QImode
, regno
);
1402 emit_insn (gen_rtx_SET (VOIDmode
, reg
, mem
));
1405 /* Output RTL epilogue. */
1408 avr_expand_epilogue (bool sibcall_p
)
1415 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
1417 size
= get_frame_size() + avr_outgoing_args_size();
1419 /* epilogue: naked */
1420 if (cfun
->machine
->is_naked
)
1422 gcc_assert (!sibcall_p
);
1424 emit_jump_insn (gen_return ());
1428 avr_regs_to_save (&set
);
1429 live_seq
= sequent_regs_live ();
1431 minimize
= (TARGET_CALL_PROLOGUES
1434 && !cfun
->machine
->is_OS_task
1435 && !cfun
->machine
->is_OS_main
);
1439 || frame_pointer_needed
1442 /* Get rid of frame. */
1444 if (!frame_pointer_needed
)
1446 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1451 emit_move_insn (frame_pointer_rtx
,
1452 plus_constant (Pmode
, frame_pointer_rtx
, size
));
1455 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq
, HImode
)));
1461 /* Try two methods to adjust stack and select shortest. */
1466 HOST_WIDE_INT size_max
;
1468 gcc_assert (frame_pointer_needed
1472 fp
= my_fp
= (frame_pointer_needed
1474 : gen_rtx_REG (Pmode
, REG_X
));
1476 if (AVR_HAVE_8BIT_SP
)
1478 /* The high byte (r29) does not change:
1479 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1481 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
1484 /* For rationale see comment in prologue generation. */
1486 size_max
= (HOST_WIDE_INT
) GET_MODE_MASK (GET_MODE (my_fp
));
1487 if (size
> size_max
)
1489 size
= trunc_int_for_mode (size
, GET_MODE (my_fp
));
1491 /********** Method 1: Adjust fp register **********/
1495 if (!frame_pointer_needed
)
1496 emit_move_insn (fp
, stack_pointer_rtx
);
1498 emit_move_insn (my_fp
, plus_constant (GET_MODE (my_fp
), my_fp
, size
));
1500 /* Copy to stack pointer. */
1502 if (TARGET_NO_INTERRUPTS
)
1505 if (AVR_HAVE_8BIT_SP
)
1508 emit_insn (gen_movhi_sp_r (stack_pointer_rtx
, fp
,
1509 GEN_INT (irq_state
)));
1511 fp_plus_insns
= get_insns ();
1514 /********** Method 2: Adjust Stack pointer **********/
1516 if (avr_sp_immediate_operand (gen_int_mode (size
, HImode
), HImode
))
1522 emit_move_insn (stack_pointer_rtx
,
1523 plus_constant (Pmode
, stack_pointer_rtx
, size
));
1525 sp_plus_insns
= get_insns ();
1528 /************ Use shortest method ************/
1530 emit_insn (get_sequence_length (sp_plus_insns
)
1531 < get_sequence_length (fp_plus_insns
)
1536 emit_insn (fp_plus_insns
);
1539 if (frame_pointer_needed
1540 && !(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
1542 /* Restore previous frame_pointer. See avr_expand_prologue for
1543 rationale for not using pophi. */
1545 emit_pop_byte (REG_Y
+ 1);
1546 emit_pop_byte (REG_Y
);
1549 /* Restore used registers. */
1551 for (reg
= 31; reg
>= 0; --reg
)
1552 if (TEST_HARD_REG_BIT (set
, reg
))
1553 emit_pop_byte (reg
);
1557 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1558 The conditions to restore them must be tha same as in prologue. */
1561 && TEST_HARD_REG_BIT (set
, REG_Z
)
1562 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1564 emit_pop_byte (TMP_REGNO
);
1565 emit_move_insn (rampz_rtx
, tmp_reg_rtx
);
1569 && (frame_pointer_needed
1570 || (TEST_HARD_REG_BIT (set
, REG_Y
)
1571 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
1573 emit_pop_byte (TMP_REGNO
);
1574 emit_move_insn (rampy_rtx
, tmp_reg_rtx
);
1578 && TEST_HARD_REG_BIT (set
, REG_X
)
1579 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
1581 emit_pop_byte (TMP_REGNO
);
1582 emit_move_insn (rampx_rtx
, tmp_reg_rtx
);
1587 emit_pop_byte (TMP_REGNO
);
1588 emit_move_insn (rampd_rtx
, tmp_reg_rtx
);
1591 /* Restore SREG using tmp_reg as scratch. */
1593 emit_pop_byte (TMP_REGNO
);
1594 emit_move_insn (sreg_rtx
, tmp_reg_rtx
);
1596 /* Restore tmp REG. */
1597 emit_pop_byte (TMP_REGNO
);
1599 /* Restore zero REG. */
1600 emit_pop_byte (ZERO_REGNO
);
1604 emit_jump_insn (gen_return ());
1608 /* Implement `TARGET_ASM_FUNCTION_BEGIN_EPILOGUE'. */
1611 avr_asm_function_begin_epilogue (FILE *file
)
1613 fprintf (file
, "/* epilogue start */\n");
1617 /* Implement `TARGET_CANNOT_MODITY_JUMPS_P'. */
1620 avr_cannot_modify_jumps_p (void)
1623 /* Naked Functions must not have any instructions after
1624 their epilogue, see PR42240 */
1626 if (reload_completed
1628 && cfun
->machine
->is_naked
)
1637 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
1640 avr_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED
, addr_space_t as
)
1642 /* FIXME: Non-generic addresses are not mode-dependent in themselves.
1643 This hook just serves to hack around PR rtl-optimization/52543 by
1644 claiming that non-generic addresses were mode-dependent so that
1645 lower-subreg.c will skip these addresses. lower-subreg.c sets up fake
1646 RTXes to probe SET and MEM costs and assumes that MEM is always in the
1647 generic address space which is not true. */
1649 return !ADDR_SPACE_GENERIC_P (as
);
1653 /* Helper function for `avr_legitimate_address_p'. */
1656 avr_reg_ok_for_addr_p (rtx reg
, addr_space_t as
,
1657 RTX_CODE outer_code
, bool strict
)
1660 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg
), QImode
,
1661 as
, outer_code
, UNKNOWN
)
1663 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)));
1667 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1668 machine for a memory operand of mode MODE. */
1671 avr_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
1673 bool ok
= CONSTANT_ADDRESS_P (x
);
1675 switch (GET_CODE (x
))
1678 ok
= avr_reg_ok_for_addr_p (x
, ADDR_SPACE_GENERIC
,
1682 && GET_MODE_SIZE (mode
) > 4
1683 && REG_X
== REGNO (x
))
1691 ok
= avr_reg_ok_for_addr_p (XEXP (x
, 0), ADDR_SPACE_GENERIC
,
1692 GET_CODE (x
), strict
);
1697 rtx reg
= XEXP (x
, 0);
1698 rtx op1
= XEXP (x
, 1);
1701 && CONST_INT_P (op1
)
1702 && INTVAL (op1
) >= 0)
1704 bool fit
= IN_RANGE (INTVAL (op1
), 0, MAX_LD_OFFSET (mode
));
1709 || avr_reg_ok_for_addr_p (reg
, ADDR_SPACE_GENERIC
,
1712 if (reg
== frame_pointer_rtx
1713 || reg
== arg_pointer_rtx
)
1718 else if (frame_pointer_needed
1719 && reg
== frame_pointer_rtx
)
1731 if (avr_log
.legitimate_address_p
)
1733 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1734 "reload_completed=%d reload_in_progress=%d %s:",
1735 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
1736 reg_renumber
? "(reg_renumber)" : "");
1738 if (GET_CODE (x
) == PLUS
1739 && REG_P (XEXP (x
, 0))
1740 && CONST_INT_P (XEXP (x
, 1))
1741 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
1744 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
1745 true_regnum (XEXP (x
, 0)));
1748 avr_edump ("\n%r\n", x
);
1755 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1756 now only a helper for avr_addr_space_legitimize_address. */
1757 /* Attempts to replace X with a valid
1758 memory address for an operand of mode MODE */
1761 avr_legitimize_address (rtx x
, rtx oldx
, enum machine_mode mode
)
1763 bool big_offset_p
= false;
1767 if (GET_CODE (oldx
) == PLUS
1768 && REG_P (XEXP (oldx
, 0)))
1770 if (REG_P (XEXP (oldx
, 1)))
1771 x
= force_reg (GET_MODE (oldx
), oldx
);
1772 else if (CONST_INT_P (XEXP (oldx
, 1)))
1774 int offs
= INTVAL (XEXP (oldx
, 1));
1775 if (frame_pointer_rtx
!= XEXP (oldx
, 0)
1776 && offs
> MAX_LD_OFFSET (mode
))
1778 big_offset_p
= true;
1779 x
= force_reg (GET_MODE (oldx
), oldx
);
1784 if (avr_log
.legitimize_address
)
1786 avr_edump ("\n%?: mode=%m\n %r\n", mode
, oldx
);
1789 avr_edump (" %s --> %r\n", big_offset_p
? "(big offset)" : "", x
);
1796 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1797 /* This will allow register R26/27 to be used where it is no worse than normal
1798 base pointers R28/29 or R30/31. For example, if base offset is greater
1799 than 63 bytes or for R++ or --R addressing. */
1802 avr_legitimize_reload_address (rtx
*px
, enum machine_mode mode
,
1803 int opnum
, int type
, int addr_type
,
1804 int ind_levels ATTRIBUTE_UNUSED
,
1805 rtx (*mk_memloc
)(rtx
,int))
1809 if (avr_log
.legitimize_reload_address
)
1810 avr_edump ("\n%?:%m %r\n", mode
, x
);
1812 if (1 && (GET_CODE (x
) == POST_INC
1813 || GET_CODE (x
) == PRE_DEC
))
1815 push_reload (XEXP (x
, 0), XEXP (x
, 0), &XEXP (x
, 0), &XEXP (x
, 0),
1816 POINTER_REGS
, GET_MODE (x
), GET_MODE (x
), 0, 0,
1817 opnum
, RELOAD_OTHER
);
1819 if (avr_log
.legitimize_reload_address
)
1820 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1821 POINTER_REGS
, XEXP (x
, 0), XEXP (x
, 0));
1826 if (GET_CODE (x
) == PLUS
1827 && REG_P (XEXP (x
, 0))
1828 && 0 == reg_equiv_constant (REGNO (XEXP (x
, 0)))
1829 && CONST_INT_P (XEXP (x
, 1))
1830 && INTVAL (XEXP (x
, 1)) >= 1)
1832 bool fit
= INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
);
1836 if (reg_equiv_address (REGNO (XEXP (x
, 0))) != 0)
1838 int regno
= REGNO (XEXP (x
, 0));
1839 rtx mem
= mk_memloc (x
, regno
);
1841 push_reload (XEXP (mem
, 0), NULL_RTX
, &XEXP (mem
, 0), NULL
,
1842 POINTER_REGS
, Pmode
, VOIDmode
, 0, 0,
1843 1, (enum reload_type
) addr_type
);
1845 if (avr_log
.legitimize_reload_address
)
1846 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1847 POINTER_REGS
, XEXP (mem
, 0), NULL_RTX
);
1849 push_reload (mem
, NULL_RTX
, &XEXP (x
, 0), NULL
,
1850 BASE_POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
1851 opnum
, (enum reload_type
) type
);
1853 if (avr_log
.legitimize_reload_address
)
1854 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1855 BASE_POINTER_REGS
, mem
, NULL_RTX
);
1860 else if (! (frame_pointer_needed
1861 && XEXP (x
, 0) == frame_pointer_rtx
))
1863 push_reload (x
, NULL_RTX
, px
, NULL
,
1864 POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
1865 opnum
, (enum reload_type
) type
);
1867 if (avr_log
.legitimize_reload_address
)
1868 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1869 POINTER_REGS
, x
, NULL_RTX
);
1879 /* Implement `TARGET_SECONDARY_RELOAD' */
1882 avr_secondary_reload (bool in_p
, rtx x
,
1883 reg_class_t reload_class ATTRIBUTE_UNUSED
,
1884 enum machine_mode mode
, secondary_reload_info
*sri
)
1888 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
))
1889 && ADDR_SPACE_MEMX
!= MEM_ADDR_SPACE (x
))
1891 /* For the non-generic 16-bit spaces we need a d-class scratch. */
1898 case QImode
: sri
->icode
= CODE_FOR_reload_inqi
; break;
1899 case QQmode
: sri
->icode
= CODE_FOR_reload_inqq
; break;
1900 case UQQmode
: sri
->icode
= CODE_FOR_reload_inuqq
; break;
1902 case HImode
: sri
->icode
= CODE_FOR_reload_inhi
; break;
1903 case HQmode
: sri
->icode
= CODE_FOR_reload_inhq
; break;
1904 case HAmode
: sri
->icode
= CODE_FOR_reload_inha
; break;
1905 case UHQmode
: sri
->icode
= CODE_FOR_reload_inuhq
; break;
1906 case UHAmode
: sri
->icode
= CODE_FOR_reload_inuha
; break;
1908 case PSImode
: sri
->icode
= CODE_FOR_reload_inpsi
; break;
1910 case SImode
: sri
->icode
= CODE_FOR_reload_insi
; break;
1911 case SFmode
: sri
->icode
= CODE_FOR_reload_insf
; break;
1912 case SQmode
: sri
->icode
= CODE_FOR_reload_insq
; break;
1913 case SAmode
: sri
->icode
= CODE_FOR_reload_insa
; break;
1914 case USQmode
: sri
->icode
= CODE_FOR_reload_inusq
; break;
1915 case USAmode
: sri
->icode
= CODE_FOR_reload_inusa
; break;
1923 /* Helper function to print assembler resp. track instruction
1924 sequence lengths. Always return "".
1927 Output assembler code from template TPL with operands supplied
1928 by OPERANDS. This is just forwarding to output_asm_insn.
1931 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1932 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1933 Don't output anything.
1937 avr_asm_len (const char* tpl
, rtx
* operands
, int* plen
, int n_words
)
1941 output_asm_insn (tpl
, operands
);
1955 /* Return a pointer register name as a string. */
1958 ptrreg_to_str (int regno
)
1962 case REG_X
: return "X";
1963 case REG_Y
: return "Y";
1964 case REG_Z
: return "Z";
1966 output_operand_lossage ("address operand requires constraint for"
1967 " X, Y, or Z register");
1972 /* Return the condition name as a string.
1973 Used in conditional jump constructing */
1976 cond_string (enum rtx_code code
)
1985 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1990 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2006 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
2007 /* Output ADDR to FILE as address. */
2010 avr_print_operand_address (FILE *file
, rtx addr
)
2012 switch (GET_CODE (addr
))
2015 fprintf (file
, ptrreg_to_str (REGNO (addr
)));
2019 fprintf (file
, "-%s", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
2023 fprintf (file
, "%s+", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
2027 if (CONSTANT_ADDRESS_P (addr
)
2028 && text_segment_operand (addr
, VOIDmode
))
2031 if (GET_CODE (x
) == CONST
)
2033 if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
,1)) == CONST_INT
)
2035 /* Assembler gs() will implant word address. Make offset
2036 a byte offset inside gs() for assembler. This is
2037 needed because the more logical (constant+gs(sym)) is not
2038 accepted by gas. For 128K and smaller devices this is ok.
2039 For large devices it will create a trampoline to offset
2040 from symbol which may not be what the user really wanted. */
2042 fprintf (file
, "gs(");
2043 output_addr_const (file
, XEXP (x
,0));
2044 fprintf (file
, "+" HOST_WIDE_INT_PRINT_DEC
")",
2045 2 * INTVAL (XEXP (x
, 1)));
2047 if (warning (0, "pointer offset from symbol maybe incorrect"))
2049 output_addr_const (stderr
, addr
);
2050 fprintf(stderr
,"\n");
2055 fprintf (file
, "gs(");
2056 output_addr_const (file
, addr
);
2057 fprintf (file
, ")");
2061 output_addr_const (file
, addr
);
2066 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
2069 avr_print_operand_punct_valid_p (unsigned char code
)
2071 return code
== '~' || code
== '!';
2075 /* Implement `TARGET_PRINT_OPERAND'. */
2076 /* Output X as assembler operand to file FILE.
2077 For a description of supported %-codes, see top of avr.md. */
2080 avr_print_operand (FILE *file
, rtx x
, int code
)
2084 if (code
>= 'A' && code
<= 'D')
2089 if (!AVR_HAVE_JMP_CALL
)
2092 else if (code
== '!')
2094 if (AVR_HAVE_EIJMP_EICALL
)
2097 else if (code
== 't'
2100 static int t_regno
= -1;
2101 static int t_nbits
= -1;
2103 if (REG_P (x
) && t_regno
< 0 && code
== 'T')
2105 t_regno
= REGNO (x
);
2106 t_nbits
= GET_MODE_BITSIZE (GET_MODE (x
));
2108 else if (CONST_INT_P (x
) && t_regno
>= 0
2109 && IN_RANGE (INTVAL (x
), 0, t_nbits
- 1))
2111 int bpos
= INTVAL (x
);
2113 fprintf (file
, "%s", reg_names
[t_regno
+ bpos
/ 8]);
2115 fprintf (file
, ",%d", bpos
% 8);
2120 fatal_insn ("operands to %T/%t must be reg + const_int:", x
);
2124 if (x
== zero_reg_rtx
)
2125 fprintf (file
, "__zero_reg__");
2126 else if (code
== 'r' && REGNO (x
) < 32)
2127 fprintf (file
, "%d", (int) REGNO (x
));
2129 fprintf (file
, reg_names
[REGNO (x
) + abcd
]);
2131 else if (CONST_INT_P (x
))
2133 HOST_WIDE_INT ival
= INTVAL (x
);
2136 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ival
+ abcd
);
2137 else if (low_io_address_operand (x
, VOIDmode
)
2138 || high_io_address_operand (x
, VOIDmode
))
2140 if (AVR_HAVE_RAMPZ
&& ival
== avr_addr
.rampz
)
2141 fprintf (file
, "__RAMPZ__");
2142 else if (AVR_HAVE_RAMPY
&& ival
== avr_addr
.rampy
)
2143 fprintf (file
, "__RAMPY__");
2144 else if (AVR_HAVE_RAMPX
&& ival
== avr_addr
.rampx
)
2145 fprintf (file
, "__RAMPX__");
2146 else if (AVR_HAVE_RAMPD
&& ival
== avr_addr
.rampd
)
2147 fprintf (file
, "__RAMPD__");
2148 else if (AVR_XMEGA
&& ival
== avr_addr
.ccp
)
2149 fprintf (file
, "__CCP__");
2150 else if (ival
== avr_addr
.sreg
) fprintf (file
, "__SREG__");
2151 else if (ival
== avr_addr
.sp_l
) fprintf (file
, "__SP_L__");
2152 else if (ival
== avr_addr
.sp_h
) fprintf (file
, "__SP_H__");
2155 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
2156 ival
- avr_current_arch
->sfr_offset
);
2160 fatal_insn ("bad address, not an I/O address:", x
);
2164 rtx addr
= XEXP (x
, 0);
2168 if (!CONSTANT_P (addr
))
2169 fatal_insn ("bad address, not a constant:", addr
);
2170 /* Assembler template with m-code is data - not progmem section */
2171 if (text_segment_operand (addr
, VOIDmode
))
2172 if (warning (0, "accessing data memory with"
2173 " program memory address"))
2175 output_addr_const (stderr
, addr
);
2176 fprintf(stderr
,"\n");
2178 output_addr_const (file
, addr
);
2180 else if (code
== 'i')
2182 avr_print_operand (file
, addr
, 'i');
2184 else if (code
== 'o')
2186 if (GET_CODE (addr
) != PLUS
)
2187 fatal_insn ("bad address, not (reg+disp):", addr
);
2189 avr_print_operand (file
, XEXP (addr
, 1), 0);
2191 else if (code
== 'p' || code
== 'r')
2193 if (GET_CODE (addr
) != POST_INC
&& GET_CODE (addr
) != PRE_DEC
)
2194 fatal_insn ("bad address, not post_inc or pre_dec:", addr
);
2197 avr_print_operand_address (file
, XEXP (addr
, 0)); /* X, Y, Z */
2199 avr_print_operand (file
, XEXP (addr
, 0), 0); /* r26, r28, r30 */
2201 else if (GET_CODE (addr
) == PLUS
)
2203 avr_print_operand_address (file
, XEXP (addr
,0));
2204 if (REGNO (XEXP (addr
, 0)) == REG_X
)
2205 fatal_insn ("internal compiler error. Bad address:"
2208 avr_print_operand (file
, XEXP (addr
,1), code
);
2211 avr_print_operand_address (file
, addr
);
2213 else if (code
== 'i')
2215 fatal_insn ("bad address, not an I/O address:", x
);
2217 else if (code
== 'x')
2219 /* Constant progmem address - like used in jmp or call */
2220 if (0 == text_segment_operand (x
, VOIDmode
))
2221 if (warning (0, "accessing program memory"
2222 " with data memory address"))
2224 output_addr_const (stderr
, x
);
2225 fprintf(stderr
,"\n");
2227 /* Use normal symbol for direct address no linker trampoline needed */
2228 output_addr_const (file
, x
);
2230 else if (CONST_FIXED_P (x
))
2232 HOST_WIDE_INT ival
= INTVAL (avr_to_int_mode (x
));
2234 output_operand_lossage ("Unsupported code '%c'for fixed-point:",
2236 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ival
);
2238 else if (GET_CODE (x
) == CONST_DOUBLE
)
2242 if (GET_MODE (x
) != SFmode
)
2243 fatal_insn ("internal compiler error. Unknown mode:", x
);
2244 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
2245 REAL_VALUE_TO_TARGET_SINGLE (rv
, val
);
2246 fprintf (file
, "0x%lx", val
);
2248 else if (GET_CODE (x
) == CONST_STRING
)
2249 fputs (XSTR (x
, 0), file
);
2250 else if (code
== 'j')
2251 fputs (cond_string (GET_CODE (x
)), file
);
2252 else if (code
== 'k')
2253 fputs (cond_string (reverse_condition (GET_CODE (x
))), file
);
2255 avr_print_operand_address (file
, x
);
2259 /* Worker function for `NOTICE_UPDATE_CC'. */
2260 /* Update the condition code in the INSN. */
2263 avr_notice_update_cc (rtx body ATTRIBUTE_UNUSED
, rtx insn
)
2266 enum attr_cc cc
= get_attr_cc (insn
);
2276 rtx
*op
= recog_data
.operand
;
2279 /* Extract insn's operands. */
2280 extract_constrain_insn_cached (insn
);
2288 avr_out_plus (insn
, op
, &len_dummy
, &icc
);
2289 cc
= (enum attr_cc
) icc
;
2294 cc
= (op
[1] == CONST0_RTX (GET_MODE (op
[0]))
2295 && reg_overlap_mentioned_p (op
[0], zero_reg_rtx
))
2296 /* Loading zero-reg with 0 uses CLR and thus clobbers cc0. */
2298 /* Any other "r,rL" combination does not alter cc0. */
2302 } /* inner switch */
2306 } /* outer swicth */
2311 /* Special values like CC_OUT_PLUS from above have been
2312 mapped to "standard" CC_* values so we never come here. */
2318 /* Insn does not affect CC at all. */
2326 set
= single_set (insn
);
2330 cc_status
.flags
|= CC_NO_OVERFLOW
;
2331 cc_status
.value1
= SET_DEST (set
);
2336 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2337 The V flag may or may not be known but that's ok because
2338 alter_cond will change tests to use EQ/NE. */
2339 set
= single_set (insn
);
2343 cc_status
.value1
= SET_DEST (set
);
2344 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
2349 set
= single_set (insn
);
2352 cc_status
.value1
= SET_SRC (set
);
2356 /* Insn doesn't leave CC in a usable state. */
2362 /* Choose mode for jump insn:
2363 1 - relative jump in range -63 <= x <= 62 ;
2364 2 - relative jump in range -2046 <= x <= 2045 ;
2365 3 - absolute jump (only for ATmega[16]03). */
2368 avr_jump_mode (rtx x
, rtx insn
)
2370 int dest_addr
= INSN_ADDRESSES (INSN_UID (GET_CODE (x
) == LABEL_REF
2371 ? XEXP (x
, 0) : x
));
2372 int cur_addr
= INSN_ADDRESSES (INSN_UID (insn
));
2373 int jump_distance
= cur_addr
- dest_addr
;
2375 if (-63 <= jump_distance
&& jump_distance
<= 62)
2377 else if (-2046 <= jump_distance
&& jump_distance
<= 2045)
2379 else if (AVR_HAVE_JMP_CALL
)
2385 /* Return an AVR condition jump commands.
2386 X is a comparison RTX.
2387 LEN is a number returned by avr_jump_mode function.
2388 If REVERSE nonzero then condition code in X must be reversed. */
2391 ret_cond_branch (rtx x
, int len
, int reverse
)
2393 RTX_CODE cond
= reverse
? reverse_condition (GET_CODE (x
)) : GET_CODE (x
);
2398 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2399 return (len
== 1 ? ("breq .+2" CR_TAB
2401 len
== 2 ? ("breq .+4" CR_TAB
2409 return (len
== 1 ? ("breq .+2" CR_TAB
2411 len
== 2 ? ("breq .+4" CR_TAB
2418 return (len
== 1 ? ("breq .+2" CR_TAB
2420 len
== 2 ? ("breq .+4" CR_TAB
2427 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2428 return (len
== 1 ? ("breq %0" CR_TAB
2430 len
== 2 ? ("breq .+2" CR_TAB
2437 return (len
== 1 ? ("breq %0" CR_TAB
2439 len
== 2 ? ("breq .+2" CR_TAB
2446 return (len
== 1 ? ("breq %0" CR_TAB
2448 len
== 2 ? ("breq .+2" CR_TAB
2462 return ("br%j1 .+2" CR_TAB
2465 return ("br%j1 .+4" CR_TAB
2476 return ("br%k1 .+2" CR_TAB
2479 return ("br%k1 .+4" CR_TAB
2488 /* Worker function for `FINAL_PRESCAN_INSN'. */
2489 /* Output insn cost for next insn. */
2492 avr_final_prescan_insn (rtx insn
, rtx
*operand ATTRIBUTE_UNUSED
,
2493 int num_operands ATTRIBUTE_UNUSED
)
2495 if (avr_log
.rtx_costs
)
2497 rtx set
= single_set (insn
);
2500 fprintf (asm_out_file
, "/* DEBUG: cost = %d. */\n",
2501 set_src_cost (SET_SRC (set
), optimize_insn_for_speed_p ()));
2503 fprintf (asm_out_file
, "/* DEBUG: pattern-cost = %d. */\n",
2504 rtx_cost (PATTERN (insn
), INSN
, 0,
2505 optimize_insn_for_speed_p()));
2509 /* Return 0 if undefined, 1 if always true or always false. */
2512 avr_simplify_comparison_p (enum machine_mode mode
, RTX_CODE op
, rtx x
)
2514 unsigned int max
= (mode
== QImode
? 0xff :
2515 mode
== HImode
? 0xffff :
2516 mode
== PSImode
? 0xffffff :
2517 mode
== SImode
? 0xffffffff : 0);
2518 if (max
&& op
&& CONST_INT_P (x
))
2520 if (unsigned_condition (op
) != op
)
2523 if (max
!= (INTVAL (x
) & max
)
2524 && INTVAL (x
) != 0xff)
2531 /* Worker function for `FUNCTION_ARG_REGNO_P'. */
2532 /* Returns nonzero if REGNO is the number of a hard
2533 register in which function arguments are sometimes passed. */
2536 avr_function_arg_regno_p(int r
)
2538 return (r
>= 8 && r
<= 25);
2542 /* Worker function for `INIT_CUMULATIVE_ARGS'. */
2543 /* Initializing the variable cum for the state at the beginning
2544 of the argument list. */
2547 avr_init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
, rtx libname
,
2548 tree fndecl ATTRIBUTE_UNUSED
)
2551 cum
->regno
= FIRST_CUM_REG
;
2552 if (!libname
&& stdarg_p (fntype
))
2555 /* Assume the calle may be tail called */
2557 cfun
->machine
->sibcall_fails
= 0;
2560 /* Returns the number of registers to allocate for a function argument. */
2563 avr_num_arg_regs (enum machine_mode mode
, const_tree type
)
2567 if (mode
== BLKmode
)
2568 size
= int_size_in_bytes (type
);
2570 size
= GET_MODE_SIZE (mode
);
2572 /* Align all function arguments to start in even-numbered registers.
2573 Odd-sized arguments leave holes above them. */
2575 return (size
+ 1) & ~1;
2579 /* Implement `TARGET_FUNCTION_ARG'. */
2580 /* Controls whether a function argument is passed
2581 in a register, and which register. */
2584 avr_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
2585 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2587 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2588 int bytes
= avr_num_arg_regs (mode
, type
);
2590 if (cum
->nregs
&& bytes
<= cum
->nregs
)
2591 return gen_rtx_REG (mode
, cum
->regno
- bytes
);
2597 /* Implement `TARGET_FUNCTION_ARG_ADVANCE'. */
2598 /* Update the summarizer variable CUM to advance past an argument
2599 in the argument list. */
2602 avr_function_arg_advance (cumulative_args_t cum_v
, enum machine_mode mode
,
2603 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2605 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2606 int bytes
= avr_num_arg_regs (mode
, type
);
2608 cum
->nregs
-= bytes
;
2609 cum
->regno
-= bytes
;
2611 /* A parameter is being passed in a call-saved register. As the original
2612 contents of these regs has to be restored before leaving the function,
2613 a function must not pass arguments in call-saved regs in order to get
2618 && !call_used_regs
[cum
->regno
])
2620 /* FIXME: We ship info on failing tail-call in struct machine_function.
2621 This uses internals of calls.c:expand_call() and the way args_so_far
2622 is used. targetm.function_ok_for_sibcall() needs to be extended to
2623 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2624 dependent so that such an extension is not wanted. */
2626 cfun
->machine
->sibcall_fails
= 1;
2629 /* Test if all registers needed by the ABI are actually available. If the
2630 user has fixed a GPR needed to pass an argument, an (implicit) function
2631 call will clobber that fixed register. See PR45099 for an example. */
2638 for (regno
= cum
->regno
; regno
< cum
->regno
+ bytes
; regno
++)
2639 if (fixed_regs
[regno
])
2640 warning (0, "fixed register %s used to pass parameter to function",
2644 if (cum
->nregs
<= 0)
2647 cum
->regno
= FIRST_CUM_REG
;
2651 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2652 /* Decide whether we can make a sibling call to a function. DECL is the
2653 declaration of the function being targeted by the call and EXP is the
2654 CALL_EXPR representing the call. */
2657 avr_function_ok_for_sibcall (tree decl_callee
, tree exp_callee
)
2661 /* Tail-calling must fail if callee-saved regs are used to pass
2662 function args. We must not tail-call when `epilogue_restores'
2663 is used. Unfortunately, we cannot tell at this point if that
2664 actually will happen or not, and we cannot step back from
2665 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2667 if (cfun
->machine
->sibcall_fails
2668 || TARGET_CALL_PROLOGUES
)
2673 fntype_callee
= TREE_TYPE (CALL_EXPR_FN (exp_callee
));
2677 decl_callee
= TREE_TYPE (decl_callee
);
2681 decl_callee
= fntype_callee
;
2683 while (FUNCTION_TYPE
!= TREE_CODE (decl_callee
)
2684 && METHOD_TYPE
!= TREE_CODE (decl_callee
))
2686 decl_callee
= TREE_TYPE (decl_callee
);
2690 /* Ensure that caller and callee have compatible epilogues */
2692 if (cfun
->machine
->is_interrupt
2693 || cfun
->machine
->is_signal
2694 || cfun
->machine
->is_naked
2695 || avr_naked_function_p (decl_callee
)
2696 /* FIXME: For OS_task and OS_main, this might be over-conservative. */
2697 || (avr_OS_task_function_p (decl_callee
)
2698 != cfun
->machine
->is_OS_task
)
2699 || (avr_OS_main_function_p (decl_callee
)
2700 != cfun
->machine
->is_OS_main
))
2708 /***********************************************************************
2709 Functions for outputting various mov's for a various modes
2710 ************************************************************************/
2712 /* Return true if a value of mode MODE is read from flash by
2713 __load_* function from libgcc. */
2716 avr_load_libgcc_p (rtx op
)
2718 enum machine_mode mode
= GET_MODE (op
);
2719 int n_bytes
= GET_MODE_SIZE (mode
);
2723 && avr_mem_flash_p (op
));
2726 /* Return true if a value of mode MODE is read by __xload_* function. */
2729 avr_xload_libgcc_p (enum machine_mode mode
)
2731 int n_bytes
= GET_MODE_SIZE (mode
);
2734 || avr_current_device
->n_flash
> 1);
2738 /* Fixme: This is a hack because secondary reloads don't works as expected.
2740 Find an unused d-register to be used as scratch in INSN.
2741 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2742 is a register, skip all possible return values that overlap EXCLUDE.
2743 The policy for the returned register is similar to that of
2744 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2747 Return a QImode d-register or NULL_RTX if nothing found. */
2750 avr_find_unused_d_reg (rtx insn
, rtx exclude
)
2753 bool isr_p
= (avr_interrupt_function_p (current_function_decl
)
2754 || avr_signal_function_p (current_function_decl
));
2756 for (regno
= 16; regno
< 32; regno
++)
2758 rtx reg
= all_regs_rtx
[regno
];
2761 && reg_overlap_mentioned_p (exclude
, reg
))
2762 || fixed_regs
[regno
])
2767 /* Try non-live register */
2769 if (!df_regs_ever_live_p (regno
)
2770 && (TREE_THIS_VOLATILE (current_function_decl
)
2771 || cfun
->machine
->is_OS_task
2772 || cfun
->machine
->is_OS_main
2773 || (!isr_p
&& call_used_regs
[regno
])))
2778 /* Any live register can be used if it is unused after.
2779 Prologue/epilogue will care for it as needed. */
2781 if (df_regs_ever_live_p (regno
)
2782 && reg_unused_after (insn
, reg
))
2792 /* Helper function for the next function in the case where only restricted
2793 version of LPM instruction is available. */
2796 avr_out_lpm_no_lpmx (rtx insn
, rtx
*xop
, int *plen
)
2800 int n_bytes
= GET_MODE_SIZE (GET_MODE (dest
));
2803 regno_dest
= REGNO (dest
);
2805 /* The implicit target register of LPM. */
2806 xop
[3] = lpm_reg_rtx
;
2808 switch (GET_CODE (addr
))
2815 gcc_assert (REG_Z
== REGNO (addr
));
2823 avr_asm_len ("%4lpm", xop
, plen
, 1);
2825 if (regno_dest
!= LPM_REGNO
)
2826 avr_asm_len ("mov %0,%3", xop
, plen
, 1);
2831 if (REGNO (dest
) == REG_Z
)
2832 return avr_asm_len ("%4lpm" CR_TAB
2837 "pop %A0", xop
, plen
, 6);
2839 avr_asm_len ("%4lpm" CR_TAB
2843 "mov %B0,%3", xop
, plen
, 5);
2845 if (!reg_unused_after (insn
, addr
))
2846 avr_asm_len ("sbiw %2,1", xop
, plen
, 1);
2855 gcc_assert (REG_Z
== REGNO (XEXP (addr
, 0))
2858 if (regno_dest
== LPM_REGNO
)
2859 avr_asm_len ("%4lpm" CR_TAB
2860 "adiw %2,1", xop
, plen
, 2);
2862 avr_asm_len ("%4lpm" CR_TAB
2864 "adiw %2,1", xop
, plen
, 3);
2867 avr_asm_len ("%4lpm" CR_TAB
2869 "adiw %2,1", xop
, plen
, 3);
2872 avr_asm_len ("%4lpm" CR_TAB
2874 "adiw %2,1", xop
, plen
, 3);
2877 avr_asm_len ("%4lpm" CR_TAB
2879 "adiw %2,1", xop
, plen
, 3);
2881 break; /* POST_INC */
2883 } /* switch CODE (addr) */
2889 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2890 OP[1] in AS1 to register OP[0].
2891 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2895 avr_out_lpm (rtx insn
, rtx
*op
, int *plen
)
2899 rtx src
= SET_SRC (single_set (insn
));
2901 int n_bytes
= GET_MODE_SIZE (GET_MODE (dest
));
2904 addr_space_t as
= MEM_ADDR_SPACE (src
);
2911 warning (0, "writing to address space %qs not supported",
2912 avr_addrspace
[MEM_ADDR_SPACE (dest
)].name
);
2917 addr
= XEXP (src
, 0);
2918 code
= GET_CODE (addr
);
2920 gcc_assert (REG_P (dest
));
2921 gcc_assert (REG
== code
|| POST_INC
== code
);
2925 xop
[2] = lpm_addr_reg_rtx
;
2926 xop
[4] = xstring_empty
;
2927 xop
[5] = tmp_reg_rtx
;
2928 xop
[6] = XEXP (rampz_rtx
, 0);
2930 segment
= avr_addrspace
[as
].segment
;
2932 /* Set RAMPZ as needed. */
2936 xop
[4] = GEN_INT (segment
);
2937 xop
[3] = avr_find_unused_d_reg (insn
, lpm_addr_reg_rtx
);
2939 if (xop
[3] != NULL_RTX
)
2941 avr_asm_len ("ldi %3,%4" CR_TAB
2942 "out %i6,%3", xop
, plen
, 2);
2944 else if (segment
== 1)
2946 avr_asm_len ("clr %5" CR_TAB
2948 "out %i6,%5", xop
, plen
, 3);
2952 avr_asm_len ("mov %5,%2" CR_TAB
2955 "mov %2,%5", xop
, plen
, 4);
2960 if (!AVR_HAVE_ELPMX
)
2961 return avr_out_lpm_no_lpmx (insn
, xop
, plen
);
2963 else if (!AVR_HAVE_LPMX
)
2965 return avr_out_lpm_no_lpmx (insn
, xop
, plen
);
2968 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
2970 switch (GET_CODE (addr
))
2977 gcc_assert (REG_Z
== REGNO (addr
));
2985 return avr_asm_len ("%4lpm %0,%a2", xop
, plen
, 1);
2988 if (REGNO (dest
) == REG_Z
)
2989 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2990 "%4lpm %B0,%a2" CR_TAB
2991 "mov %A0,%5", xop
, plen
, 3);
2994 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2995 "%4lpm %B0,%a2", xop
, plen
, 2);
2997 if (!reg_unused_after (insn
, addr
))
2998 avr_asm_len ("sbiw %2,1", xop
, plen
, 1);
3005 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3006 "%4lpm %B0,%a2+" CR_TAB
3007 "%4lpm %C0,%a2", xop
, plen
, 3);
3009 if (!reg_unused_after (insn
, addr
))
3010 avr_asm_len ("sbiw %2,2", xop
, plen
, 1);
3016 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3017 "%4lpm %B0,%a2+", xop
, plen
, 2);
3019 if (REGNO (dest
) == REG_Z
- 2)
3020 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3021 "%4lpm %C0,%a2" CR_TAB
3022 "mov %D0,%5", xop
, plen
, 3);
3025 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
3026 "%4lpm %D0,%a2", xop
, plen
, 2);
3028 if (!reg_unused_after (insn
, addr
))
3029 avr_asm_len ("sbiw %2,3", xop
, plen
, 1);
3039 gcc_assert (REG_Z
== REGNO (XEXP (addr
, 0))
3042 avr_asm_len ("%4lpm %A0,%a2+", xop
, plen
, 1);
3043 if (n_bytes
>= 2) avr_asm_len ("%4lpm %B0,%a2+", xop
, plen
, 1);
3044 if (n_bytes
>= 3) avr_asm_len ("%4lpm %C0,%a2+", xop
, plen
, 1);
3045 if (n_bytes
>= 4) avr_asm_len ("%4lpm %D0,%a2+", xop
, plen
, 1);
3047 break; /* POST_INC */
3049 } /* switch CODE (addr) */
3051 if (xop
[4] == xstring_e
&& AVR_HAVE_RAMPD
)
3053 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM. */
3055 xop
[0] = zero_reg_rtx
;
3056 avr_asm_len ("out %i6,%0", xop
, plen
, 1);
3063 /* Worker function for xload_8 insn. */
3066 avr_out_xload (rtx insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
3072 xop
[2] = lpm_addr_reg_rtx
;
3073 xop
[3] = AVR_HAVE_LPMX
? op
[0] : lpm_reg_rtx
;
3078 avr_asm_len ("sbrc %1,7" CR_TAB
3080 "sbrs %1,7", xop
, plen
, 3);
3082 avr_asm_len (AVR_HAVE_LPMX
? "lpm %3,%a2" : "lpm", xop
, plen
, 1);
3084 if (REGNO (xop
[0]) != REGNO (xop
[3]))
3085 avr_asm_len ("mov %0,%3", xop
, plen
, 1);
3092 output_movqi (rtx insn
, rtx operands
[], int *plen
)
3094 rtx dest
= operands
[0];
3095 rtx src
= operands
[1];
3097 if (avr_mem_flash_p (src
)
3098 || avr_mem_flash_p (dest
))
3100 return avr_out_lpm (insn
, operands
, plen
);
3103 gcc_assert (1 == GET_MODE_SIZE (GET_MODE (dest
)));
3107 if (REG_P (src
)) /* mov r,r */
3109 if (test_hard_reg_class (STACK_REG
, dest
))
3110 return avr_asm_len ("out %0,%1", operands
, plen
, -1);
3111 else if (test_hard_reg_class (STACK_REG
, src
))
3112 return avr_asm_len ("in %0,%1", operands
, plen
, -1);
3114 return avr_asm_len ("mov %0,%1", operands
, plen
, -1);
3116 else if (CONSTANT_P (src
))
3118 output_reload_in_const (operands
, NULL_RTX
, plen
, false);
3121 else if (MEM_P (src
))
3122 return out_movqi_r_mr (insn
, operands
, plen
); /* mov r,m */
3124 else if (MEM_P (dest
))
3129 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
3131 return out_movqi_mr_r (insn
, xop
, plen
);
3139 output_movhi (rtx insn
, rtx xop
[], int *plen
)
3144 gcc_assert (GET_MODE_SIZE (GET_MODE (dest
)) == 2);
3146 if (avr_mem_flash_p (src
)
3147 || avr_mem_flash_p (dest
))
3149 return avr_out_lpm (insn
, xop
, plen
);
3152 gcc_assert (2 == GET_MODE_SIZE (GET_MODE (dest
)));
3156 if (REG_P (src
)) /* mov r,r */
3158 if (test_hard_reg_class (STACK_REG
, dest
))
3160 if (AVR_HAVE_8BIT_SP
)
3161 return avr_asm_len ("out __SP_L__,%A1", xop
, plen
, -1);
3164 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
3165 "out __SP_H__,%B1", xop
, plen
, -2);
3167 /* Use simple load of SP if no interrupts are used. */
3169 return TARGET_NO_INTERRUPTS
3170 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
3171 "out __SP_L__,%A1", xop
, plen
, -2)
3172 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
3174 "out __SP_H__,%B1" CR_TAB
3175 "out __SREG__,__tmp_reg__" CR_TAB
3176 "out __SP_L__,%A1", xop
, plen
, -5);
3178 else if (test_hard_reg_class (STACK_REG
, src
))
3180 return !AVR_HAVE_SPH
3181 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
3182 "clr %B0", xop
, plen
, -2)
3184 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
3185 "in %B0,__SP_H__", xop
, plen
, -2);
3188 return AVR_HAVE_MOVW
3189 ? avr_asm_len ("movw %0,%1", xop
, plen
, -1)
3191 : avr_asm_len ("mov %A0,%A1" CR_TAB
3192 "mov %B0,%B1", xop
, plen
, -2);
3194 else if (CONSTANT_P (src
))
3196 return output_reload_inhi (xop
, NULL
, plen
);
3198 else if (MEM_P (src
))
3200 return out_movhi_r_mr (insn
, xop
, plen
); /* mov r,m */
3203 else if (MEM_P (dest
))
3208 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
3210 return out_movhi_mr_r (insn
, xop
, plen
);
3213 fatal_insn ("invalid insn:", insn
);
3219 out_movqi_r_mr (rtx insn
, rtx op
[], int *plen
)
3223 rtx x
= XEXP (src
, 0);
3225 if (CONSTANT_ADDRESS_P (x
))
3227 return optimize
> 0 && io_address_operand (x
, QImode
)
3228 ? avr_asm_len ("in %0,%i1", op
, plen
, -1)
3229 : avr_asm_len ("lds %0,%m1", op
, plen
, -2);
3231 else if (GET_CODE (x
) == PLUS
3232 && REG_P (XEXP (x
, 0))
3233 && CONST_INT_P (XEXP (x
, 1)))
3235 /* memory access by reg+disp */
3237 int disp
= INTVAL (XEXP (x
, 1));
3239 if (disp
- GET_MODE_SIZE (GET_MODE (src
)) >= 63)
3241 if (REGNO (XEXP (x
, 0)) != REG_Y
)
3242 fatal_insn ("incorrect insn:",insn
);
3244 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3245 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
3246 "ldd %0,Y+63" CR_TAB
3247 "sbiw r28,%o1-63", op
, plen
, -3);
3249 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3250 "sbci r29,hi8(-%o1)" CR_TAB
3252 "subi r28,lo8(%o1)" CR_TAB
3253 "sbci r29,hi8(%o1)", op
, plen
, -5);
3255 else if (REGNO (XEXP (x
, 0)) == REG_X
)
3257 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
3258 it but I have this situation with extremal optimizing options. */
3260 avr_asm_len ("adiw r26,%o1" CR_TAB
3261 "ld %0,X", op
, plen
, -2);
3263 if (!reg_overlap_mentioned_p (dest
, XEXP (x
,0))
3264 && !reg_unused_after (insn
, XEXP (x
,0)))
3266 avr_asm_len ("sbiw r26,%o1", op
, plen
, 1);
3272 return avr_asm_len ("ldd %0,%1", op
, plen
, -1);
3275 return avr_asm_len ("ld %0,%1", op
, plen
, -1);
3279 out_movhi_r_mr (rtx insn
, rtx op
[], int *plen
)
3283 rtx base
= XEXP (src
, 0);
3284 int reg_dest
= true_regnum (dest
);
3285 int reg_base
= true_regnum (base
);
3286 /* "volatile" forces reading low byte first, even if less efficient,
3287 for correct operation with 16-bit I/O registers. */
3288 int mem_volatile_p
= MEM_VOLATILE_P (src
);
3292 if (reg_dest
== reg_base
) /* R = (R) */
3293 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3295 "mov %A0,__tmp_reg__", op
, plen
, -3);
3297 if (reg_base
!= REG_X
)
3298 return avr_asm_len ("ld %A0,%1" CR_TAB
3299 "ldd %B0,%1+1", op
, plen
, -2);
3301 avr_asm_len ("ld %A0,X+" CR_TAB
3302 "ld %B0,X", op
, plen
, -2);
3304 if (!reg_unused_after (insn
, base
))
3305 avr_asm_len ("sbiw r26,1", op
, plen
, 1);
3309 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3311 int disp
= INTVAL (XEXP (base
, 1));
3312 int reg_base
= true_regnum (XEXP (base
, 0));
3314 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3316 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3317 fatal_insn ("incorrect insn:",insn
);
3319 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
))
3320 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3321 "ldd %A0,Y+62" CR_TAB
3322 "ldd %B0,Y+63" CR_TAB
3323 "sbiw r28,%o1-62", op
, plen
, -4)
3325 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3326 "sbci r29,hi8(-%o1)" CR_TAB
3328 "ldd %B0,Y+1" CR_TAB
3329 "subi r28,lo8(%o1)" CR_TAB
3330 "sbci r29,hi8(%o1)", op
, plen
, -6);
3333 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3334 it but I have this situation with extremal
3335 optimization options. */
3337 if (reg_base
== REG_X
)
3338 return reg_base
== reg_dest
3339 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3340 "ld __tmp_reg__,X+" CR_TAB
3342 "mov %A0,__tmp_reg__", op
, plen
, -4)
3344 : avr_asm_len ("adiw r26,%o1" CR_TAB
3347 "sbiw r26,%o1+1", op
, plen
, -4);
3349 return reg_base
== reg_dest
3350 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3351 "ldd %B0,%B1" CR_TAB
3352 "mov %A0,__tmp_reg__", op
, plen
, -3)
3354 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3355 "ldd %B0,%B1", op
, plen
, -2);
3357 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3359 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
3360 fatal_insn ("incorrect insn:", insn
);
3362 if (!mem_volatile_p
)
3363 return avr_asm_len ("ld %B0,%1" CR_TAB
3364 "ld %A0,%1", op
, plen
, -2);
3366 return REGNO (XEXP (base
, 0)) == REG_X
3367 ? avr_asm_len ("sbiw r26,2" CR_TAB
3370 "sbiw r26,1", op
, plen
, -4)
3372 : avr_asm_len ("sbiw %r1,2" CR_TAB
3374 "ldd %B0,%p1+1", op
, plen
, -3);
3376 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3378 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
3379 fatal_insn ("incorrect insn:", insn
);
3381 return avr_asm_len ("ld %A0,%1" CR_TAB
3382 "ld %B0,%1", op
, plen
, -2);
3384 else if (CONSTANT_ADDRESS_P (base
))
3386 return optimize
> 0 && io_address_operand (base
, HImode
)
3387 ? avr_asm_len ("in %A0,%i1" CR_TAB
3388 "in %B0,%i1+1", op
, plen
, -2)
3390 : avr_asm_len ("lds %A0,%m1" CR_TAB
3391 "lds %B0,%m1+1", op
, plen
, -4);
3394 fatal_insn ("unknown move insn:",insn
);
3399 out_movsi_r_mr (rtx insn
, rtx op
[], int *l
)
3403 rtx base
= XEXP (src
, 0);
3404 int reg_dest
= true_regnum (dest
);
3405 int reg_base
= true_regnum (base
);
3413 if (reg_base
== REG_X
) /* (R26) */
3415 if (reg_dest
== REG_X
)
3416 /* "ld r26,-X" is undefined */
3417 return *l
=7, ("adiw r26,3" CR_TAB
3420 "ld __tmp_reg__,-X" CR_TAB
3423 "mov r27,__tmp_reg__");
3424 else if (reg_dest
== REG_X
- 2)
3425 return *l
=5, ("ld %A0,X+" CR_TAB
3427 "ld __tmp_reg__,X+" CR_TAB
3429 "mov %C0,__tmp_reg__");
3430 else if (reg_unused_after (insn
, base
))
3431 return *l
=4, ("ld %A0,X+" CR_TAB
3436 return *l
=5, ("ld %A0,X+" CR_TAB
3444 if (reg_dest
== reg_base
)
3445 return *l
=5, ("ldd %D0,%1+3" CR_TAB
3446 "ldd %C0,%1+2" CR_TAB
3447 "ldd __tmp_reg__,%1+1" CR_TAB
3449 "mov %B0,__tmp_reg__");
3450 else if (reg_base
== reg_dest
+ 2)
3451 return *l
=5, ("ld %A0,%1" CR_TAB
3452 "ldd %B0,%1+1" CR_TAB
3453 "ldd __tmp_reg__,%1+2" CR_TAB
3454 "ldd %D0,%1+3" CR_TAB
3455 "mov %C0,__tmp_reg__");
3457 return *l
=4, ("ld %A0,%1" CR_TAB
3458 "ldd %B0,%1+1" CR_TAB
3459 "ldd %C0,%1+2" CR_TAB
3463 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3465 int disp
= INTVAL (XEXP (base
, 1));
3467 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3469 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3470 fatal_insn ("incorrect insn:",insn
);
3472 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3473 return *l
= 6, ("adiw r28,%o1-60" CR_TAB
3474 "ldd %A0,Y+60" CR_TAB
3475 "ldd %B0,Y+61" CR_TAB
3476 "ldd %C0,Y+62" CR_TAB
3477 "ldd %D0,Y+63" CR_TAB
3480 return *l
= 8, ("subi r28,lo8(-%o1)" CR_TAB
3481 "sbci r29,hi8(-%o1)" CR_TAB
3483 "ldd %B0,Y+1" CR_TAB
3484 "ldd %C0,Y+2" CR_TAB
3485 "ldd %D0,Y+3" CR_TAB
3486 "subi r28,lo8(%o1)" CR_TAB
3487 "sbci r29,hi8(%o1)");
3490 reg_base
= true_regnum (XEXP (base
, 0));
3491 if (reg_base
== REG_X
)
3494 if (reg_dest
== REG_X
)
3497 /* "ld r26,-X" is undefined */
3498 return ("adiw r26,%o1+3" CR_TAB
3501 "ld __tmp_reg__,-X" CR_TAB
3504 "mov r27,__tmp_reg__");
3507 if (reg_dest
== REG_X
- 2)
3508 return ("adiw r26,%o1" CR_TAB
3511 "ld __tmp_reg__,X+" CR_TAB
3513 "mov r26,__tmp_reg__");
3515 return ("adiw r26,%o1" CR_TAB
3522 if (reg_dest
== reg_base
)
3523 return *l
=5, ("ldd %D0,%D1" CR_TAB
3524 "ldd %C0,%C1" CR_TAB
3525 "ldd __tmp_reg__,%B1" CR_TAB
3526 "ldd %A0,%A1" CR_TAB
3527 "mov %B0,__tmp_reg__");
3528 else if (reg_dest
== reg_base
- 2)
3529 return *l
=5, ("ldd %A0,%A1" CR_TAB
3530 "ldd %B0,%B1" CR_TAB
3531 "ldd __tmp_reg__,%C1" CR_TAB
3532 "ldd %D0,%D1" CR_TAB
3533 "mov %C0,__tmp_reg__");
3534 return *l
=4, ("ldd %A0,%A1" CR_TAB
3535 "ldd %B0,%B1" CR_TAB
3536 "ldd %C0,%C1" CR_TAB
3539 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3540 return *l
=4, ("ld %D0,%1" CR_TAB
3544 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3545 return *l
=4, ("ld %A0,%1" CR_TAB
3549 else if (CONSTANT_ADDRESS_P (base
))
3550 return *l
=8, ("lds %A0,%m1" CR_TAB
3551 "lds %B0,%m1+1" CR_TAB
3552 "lds %C0,%m1+2" CR_TAB
3555 fatal_insn ("unknown move insn:",insn
);
3560 out_movsi_mr_r (rtx insn
, rtx op
[], int *l
)
3564 rtx base
= XEXP (dest
, 0);
3565 int reg_base
= true_regnum (base
);
3566 int reg_src
= true_regnum (src
);
3572 if (CONSTANT_ADDRESS_P (base
))
3573 return *l
=8,("sts %m0,%A1" CR_TAB
3574 "sts %m0+1,%B1" CR_TAB
3575 "sts %m0+2,%C1" CR_TAB
3577 if (reg_base
> 0) /* (r) */
3579 if (reg_base
== REG_X
) /* (R26) */
3581 if (reg_src
== REG_X
)
3583 /* "st X+,r26" is undefined */
3584 if (reg_unused_after (insn
, base
))
3585 return *l
=6, ("mov __tmp_reg__,r27" CR_TAB
3588 "st X+,__tmp_reg__" CR_TAB
3592 return *l
=7, ("mov __tmp_reg__,r27" CR_TAB
3595 "st X+,__tmp_reg__" CR_TAB
3600 else if (reg_base
== reg_src
+ 2)
3602 if (reg_unused_after (insn
, base
))
3603 return *l
=7, ("mov __zero_reg__,%C1" CR_TAB
3604 "mov __tmp_reg__,%D1" CR_TAB
3607 "st %0+,__zero_reg__" CR_TAB
3608 "st %0,__tmp_reg__" CR_TAB
3609 "clr __zero_reg__");
3611 return *l
=8, ("mov __zero_reg__,%C1" CR_TAB
3612 "mov __tmp_reg__,%D1" CR_TAB
3615 "st %0+,__zero_reg__" CR_TAB
3616 "st %0,__tmp_reg__" CR_TAB
3617 "clr __zero_reg__" CR_TAB
3620 return *l
=5, ("st %0+,%A1" CR_TAB
3627 return *l
=4, ("st %0,%A1" CR_TAB
3628 "std %0+1,%B1" CR_TAB
3629 "std %0+2,%C1" CR_TAB
3632 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3634 int disp
= INTVAL (XEXP (base
, 1));
3635 reg_base
= REGNO (XEXP (base
, 0));
3636 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3638 if (reg_base
!= REG_Y
)
3639 fatal_insn ("incorrect insn:",insn
);
3641 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3642 return *l
= 6, ("adiw r28,%o0-60" CR_TAB
3643 "std Y+60,%A1" CR_TAB
3644 "std Y+61,%B1" CR_TAB
3645 "std Y+62,%C1" CR_TAB
3646 "std Y+63,%D1" CR_TAB
3649 return *l
= 8, ("subi r28,lo8(-%o0)" CR_TAB
3650 "sbci r29,hi8(-%o0)" CR_TAB
3652 "std Y+1,%B1" CR_TAB
3653 "std Y+2,%C1" CR_TAB
3654 "std Y+3,%D1" CR_TAB
3655 "subi r28,lo8(%o0)" CR_TAB
3656 "sbci r29,hi8(%o0)");
3658 if (reg_base
== REG_X
)
3661 if (reg_src
== REG_X
)
3664 return ("mov __tmp_reg__,r26" CR_TAB
3665 "mov __zero_reg__,r27" CR_TAB
3666 "adiw r26,%o0" CR_TAB
3667 "st X+,__tmp_reg__" CR_TAB
3668 "st X+,__zero_reg__" CR_TAB
3671 "clr __zero_reg__" CR_TAB
3674 else if (reg_src
== REG_X
- 2)
3677 return ("mov __tmp_reg__,r26" CR_TAB
3678 "mov __zero_reg__,r27" CR_TAB
3679 "adiw r26,%o0" CR_TAB
3682 "st X+,__tmp_reg__" CR_TAB
3683 "st X,__zero_reg__" CR_TAB
3684 "clr __zero_reg__" CR_TAB
3688 return ("adiw r26,%o0" CR_TAB
3695 return *l
=4, ("std %A0,%A1" CR_TAB
3696 "std %B0,%B1" CR_TAB
3697 "std %C0,%C1" CR_TAB
3700 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3701 return *l
=4, ("st %0,%D1" CR_TAB
3705 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3706 return *l
=4, ("st %0,%A1" CR_TAB
3710 fatal_insn ("unknown move insn:",insn
);
3715 output_movsisf (rtx insn
, rtx operands
[], int *l
)
3718 rtx dest
= operands
[0];
3719 rtx src
= operands
[1];
3722 if (avr_mem_flash_p (src
)
3723 || avr_mem_flash_p (dest
))
3725 return avr_out_lpm (insn
, operands
, real_l
);
3731 gcc_assert (4 == GET_MODE_SIZE (GET_MODE (dest
)));
3734 if (REG_P (src
)) /* mov r,r */
3736 if (true_regnum (dest
) > true_regnum (src
))
3741 return ("movw %C0,%C1" CR_TAB
3745 return ("mov %D0,%D1" CR_TAB
3746 "mov %C0,%C1" CR_TAB
3747 "mov %B0,%B1" CR_TAB
3755 return ("movw %A0,%A1" CR_TAB
3759 return ("mov %A0,%A1" CR_TAB
3760 "mov %B0,%B1" CR_TAB
3761 "mov %C0,%C1" CR_TAB
3765 else if (CONSTANT_P (src
))
3767 return output_reload_insisf (operands
, NULL_RTX
, real_l
);
3769 else if (MEM_P (src
))
3770 return out_movsi_r_mr (insn
, operands
, real_l
); /* mov r,m */
3772 else if (MEM_P (dest
))
3776 if (src
== CONST0_RTX (GET_MODE (dest
)))
3777 operands
[1] = zero_reg_rtx
;
3779 templ
= out_movsi_mr_r (insn
, operands
, real_l
);
3782 output_asm_insn (templ
, operands
);
3787 fatal_insn ("invalid insn:", insn
);
3792 /* Handle loads of 24-bit types from memory to register. */
3795 avr_out_load_psi (rtx insn
, rtx
*op
, int *plen
)
3799 rtx base
= XEXP (src
, 0);
3800 int reg_dest
= true_regnum (dest
);
3801 int reg_base
= true_regnum (base
);
3805 if (reg_base
== REG_X
) /* (R26) */
3807 if (reg_dest
== REG_X
)
3808 /* "ld r26,-X" is undefined */
3809 return avr_asm_len ("adiw r26,2" CR_TAB
3811 "ld __tmp_reg__,-X" CR_TAB
3814 "mov r27,__tmp_reg__", op
, plen
, -6);
3817 avr_asm_len ("ld %A0,X+" CR_TAB
3819 "ld %C0,X", op
, plen
, -3);
3821 if (reg_dest
!= REG_X
- 2
3822 && !reg_unused_after (insn
, base
))
3824 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
3830 else /* reg_base != REG_X */
3832 if (reg_dest
== reg_base
)
3833 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3834 "ldd __tmp_reg__,%1+1" CR_TAB
3836 "mov %B0,__tmp_reg__", op
, plen
, -4);
3838 return avr_asm_len ("ld %A0,%1" CR_TAB
3839 "ldd %B0,%1+1" CR_TAB
3840 "ldd %C0,%1+2", op
, plen
, -3);
3843 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3845 int disp
= INTVAL (XEXP (base
, 1));
3847 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3849 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3850 fatal_insn ("incorrect insn:",insn
);
3852 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3853 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3854 "ldd %A0,Y+61" CR_TAB
3855 "ldd %B0,Y+62" CR_TAB
3856 "ldd %C0,Y+63" CR_TAB
3857 "sbiw r28,%o1-61", op
, plen
, -5);
3859 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3860 "sbci r29,hi8(-%o1)" CR_TAB
3862 "ldd %B0,Y+1" CR_TAB
3863 "ldd %C0,Y+2" CR_TAB
3864 "subi r28,lo8(%o1)" CR_TAB
3865 "sbci r29,hi8(%o1)", op
, plen
, -7);
3868 reg_base
= true_regnum (XEXP (base
, 0));
3869 if (reg_base
== REG_X
)
3872 if (reg_dest
== REG_X
)
3874 /* "ld r26,-X" is undefined */
3875 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3877 "ld __tmp_reg__,-X" CR_TAB
3880 "mov r27,__tmp_reg__", op
, plen
, -6);
3883 avr_asm_len ("adiw r26,%o1" CR_TAB
3886 "ld %C0,X", op
, plen
, -4);
3888 if (reg_dest
!= REG_W
3889 && !reg_unused_after (insn
, XEXP (base
, 0)))
3890 avr_asm_len ("sbiw r26,%o1+2", op
, plen
, 1);
3895 if (reg_dest
== reg_base
)
3896 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3897 "ldd __tmp_reg__,%B1" CR_TAB
3898 "ldd %A0,%A1" CR_TAB
3899 "mov %B0,__tmp_reg__", op
, plen
, -4);
3901 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3902 "ldd %B0,%B1" CR_TAB
3903 "ldd %C0,%C1", op
, plen
, -3);
3905 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3906 return avr_asm_len ("ld %C0,%1" CR_TAB
3908 "ld %A0,%1", op
, plen
, -3);
3909 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3910 return avr_asm_len ("ld %A0,%1" CR_TAB
3912 "ld %C0,%1", op
, plen
, -3);
3914 else if (CONSTANT_ADDRESS_P (base
))
3915 return avr_asm_len ("lds %A0,%m1" CR_TAB
3916 "lds %B0,%m1+1" CR_TAB
3917 "lds %C0,%m1+2", op
, plen
, -6);
3919 fatal_insn ("unknown move insn:",insn
);
3923 /* Handle store of 24-bit type from register or zero to memory. */
3926 avr_out_store_psi (rtx insn
, rtx
*op
, int *plen
)
3930 rtx base
= XEXP (dest
, 0);
3931 int reg_base
= true_regnum (base
);
3933 if (CONSTANT_ADDRESS_P (base
))
3934 return avr_asm_len ("sts %m0,%A1" CR_TAB
3935 "sts %m0+1,%B1" CR_TAB
3936 "sts %m0+2,%C1", op
, plen
, -6);
3938 if (reg_base
> 0) /* (r) */
3940 if (reg_base
== REG_X
) /* (R26) */
3942 gcc_assert (!reg_overlap_mentioned_p (base
, src
));
3944 avr_asm_len ("st %0+,%A1" CR_TAB
3946 "st %0,%C1", op
, plen
, -3);
3948 if (!reg_unused_after (insn
, base
))
3949 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
3954 return avr_asm_len ("st %0,%A1" CR_TAB
3955 "std %0+1,%B1" CR_TAB
3956 "std %0+2,%C1", op
, plen
, -3);
3958 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3960 int disp
= INTVAL (XEXP (base
, 1));
3961 reg_base
= REGNO (XEXP (base
, 0));
3963 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3965 if (reg_base
!= REG_Y
)
3966 fatal_insn ("incorrect insn:",insn
);
3968 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3969 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3970 "std Y+61,%A1" CR_TAB
3971 "std Y+62,%B1" CR_TAB
3972 "std Y+63,%C1" CR_TAB
3973 "sbiw r28,%o0-60", op
, plen
, -5);
3975 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3976 "sbci r29,hi8(-%o0)" CR_TAB
3978 "std Y+1,%B1" CR_TAB
3979 "std Y+2,%C1" CR_TAB
3980 "subi r28,lo8(%o0)" CR_TAB
3981 "sbci r29,hi8(%o0)", op
, plen
, -7);
3983 if (reg_base
== REG_X
)
3986 gcc_assert (!reg_overlap_mentioned_p (XEXP (base
, 0), src
));
3988 avr_asm_len ("adiw r26,%o0" CR_TAB
3991 "st X,%C1", op
, plen
, -4);
3993 if (!reg_unused_after (insn
, XEXP (base
, 0)))
3994 avr_asm_len ("sbiw r26,%o0+2", op
, plen
, 1);
3999 return avr_asm_len ("std %A0,%A1" CR_TAB
4000 "std %B0,%B1" CR_TAB
4001 "std %C0,%C1", op
, plen
, -3);
4003 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4004 return avr_asm_len ("st %0,%C1" CR_TAB
4006 "st %0,%A1", op
, plen
, -3);
4007 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4008 return avr_asm_len ("st %0,%A1" CR_TAB
4010 "st %0,%C1", op
, plen
, -3);
4012 fatal_insn ("unknown move insn:",insn
);
4017 /* Move around 24-bit stuff. */
4020 avr_out_movpsi (rtx insn
, rtx
*op
, int *plen
)
4025 if (avr_mem_flash_p (src
)
4026 || avr_mem_flash_p (dest
))
4028 return avr_out_lpm (insn
, op
, plen
);
4031 if (register_operand (dest
, VOIDmode
))
4033 if (register_operand (src
, VOIDmode
)) /* mov r,r */
4035 if (true_regnum (dest
) > true_regnum (src
))
4037 avr_asm_len ("mov %C0,%C1", op
, plen
, -1);
4040 return avr_asm_len ("movw %A0,%A1", op
, plen
, 1);
4042 return avr_asm_len ("mov %B0,%B1" CR_TAB
4043 "mov %A0,%A1", op
, plen
, 2);
4048 avr_asm_len ("movw %A0,%A1", op
, plen
, -1);
4050 avr_asm_len ("mov %A0,%A1" CR_TAB
4051 "mov %B0,%B1", op
, plen
, -2);
4053 return avr_asm_len ("mov %C0,%C1", op
, plen
, 1);
4056 else if (CONSTANT_P (src
))
4058 return avr_out_reload_inpsi (op
, NULL_RTX
, plen
);
4060 else if (MEM_P (src
))
4061 return avr_out_load_psi (insn
, op
, plen
); /* mov r,m */
4063 else if (MEM_P (dest
))
4068 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
4070 return avr_out_store_psi (insn
, xop
, plen
);
4073 fatal_insn ("invalid insn:", insn
);
4079 out_movqi_mr_r (rtx insn
, rtx op
[], int *plen
)
4083 rtx x
= XEXP (dest
, 0);
4085 if (CONSTANT_ADDRESS_P (x
))
4087 return optimize
> 0 && io_address_operand (x
, QImode
)
4088 ? avr_asm_len ("out %i0,%1", op
, plen
, -1)
4089 : avr_asm_len ("sts %m0,%1", op
, plen
, -2);
4091 else if (GET_CODE (x
) == PLUS
4092 && REG_P (XEXP (x
, 0))
4093 && CONST_INT_P (XEXP (x
, 1)))
4095 /* memory access by reg+disp */
4097 int disp
= INTVAL (XEXP (x
, 1));
4099 if (disp
- GET_MODE_SIZE (GET_MODE (dest
)) >= 63)
4101 if (REGNO (XEXP (x
, 0)) != REG_Y
)
4102 fatal_insn ("incorrect insn:",insn
);
4104 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
4105 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
4106 "std Y+63,%1" CR_TAB
4107 "sbiw r28,%o0-63", op
, plen
, -3);
4109 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4110 "sbci r29,hi8(-%o0)" CR_TAB
4112 "subi r28,lo8(%o0)" CR_TAB
4113 "sbci r29,hi8(%o0)", op
, plen
, -5);
4115 else if (REGNO (XEXP (x
,0)) == REG_X
)
4117 if (reg_overlap_mentioned_p (src
, XEXP (x
, 0)))
4119 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
4120 "adiw r26,%o0" CR_TAB
4121 "st X,__tmp_reg__", op
, plen
, -3);
4125 avr_asm_len ("adiw r26,%o0" CR_TAB
4126 "st X,%1", op
, plen
, -2);
4129 if (!reg_unused_after (insn
, XEXP (x
,0)))
4130 avr_asm_len ("sbiw r26,%o0", op
, plen
, 1);
4135 return avr_asm_len ("std %0,%1", op
, plen
, -1);
4138 return avr_asm_len ("st %0,%1", op
, plen
, -1);
4142 /* Helper for the next function for XMEGA. It does the same
4143 but with low byte first. */
4146 avr_out_movhi_mr_r_xmega (rtx insn
, rtx op
[], int *plen
)
4150 rtx base
= XEXP (dest
, 0);
4151 int reg_base
= true_regnum (base
);
4152 int reg_src
= true_regnum (src
);
4154 /* "volatile" forces writing low byte first, even if less efficient,
4155 for correct operation with 16-bit I/O registers like SP. */
4156 int mem_volatile_p
= MEM_VOLATILE_P (dest
);
4158 if (CONSTANT_ADDRESS_P (base
))
4159 return optimize
> 0 && io_address_operand (base
, HImode
)
4160 ? avr_asm_len ("out %i0,%A1" CR_TAB
4161 "out %i0+1,%B1", op
, plen
, -2)
4163 : avr_asm_len ("sts %m0,%A1" CR_TAB
4164 "sts %m0+1,%B1", op
, plen
, -4);
4168 if (reg_base
!= REG_X
)
4169 return avr_asm_len ("st %0,%A1" CR_TAB
4170 "std %0+1,%B1", op
, plen
, -2);
4172 if (reg_src
== REG_X
)
4173 /* "st X+,r26" and "st -X,r26" are undefined. */
4174 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4177 "st X,__tmp_reg__", op
, plen
, -4);
4179 avr_asm_len ("st X+,%A1" CR_TAB
4180 "st X,%B1", op
, plen
, -2);
4182 return reg_unused_after (insn
, base
)
4184 : avr_asm_len ("sbiw r26,1", op
, plen
, 1);
4186 else if (GET_CODE (base
) == PLUS
)
4188 int disp
= INTVAL (XEXP (base
, 1));
4189 reg_base
= REGNO (XEXP (base
, 0));
4190 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
4192 if (reg_base
!= REG_Y
)
4193 fatal_insn ("incorrect insn:",insn
);
4195 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
4196 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4197 "std Y+62,%A1" CR_TAB
4198 "std Y+63,%B1" CR_TAB
4199 "sbiw r28,%o0-62", op
, plen
, -4)
4201 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4202 "sbci r29,hi8(-%o0)" CR_TAB
4204 "std Y+1,%B1" CR_TAB
4205 "subi r28,lo8(%o0)" CR_TAB
4206 "sbci r29,hi8(%o0)", op
, plen
, -6);
4209 if (reg_base
!= REG_X
)
4210 return avr_asm_len ("std %A0,%A1" CR_TAB
4211 "std %B0,%B1", op
, plen
, -2);
4213 return reg_src
== REG_X
4214 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4215 "mov __zero_reg__,r27" CR_TAB
4216 "adiw r26,%o0" CR_TAB
4217 "st X+,__tmp_reg__" CR_TAB
4218 "st X,__zero_reg__" CR_TAB
4219 "clr __zero_reg__" CR_TAB
4220 "sbiw r26,%o0+1", op
, plen
, -7)
4222 : avr_asm_len ("adiw r26,%o0" CR_TAB
4225 "sbiw r26,%o0+1", op
, plen
, -4);
4227 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4229 if (!mem_volatile_p
)
4230 return avr_asm_len ("st %0,%B1" CR_TAB
4231 "st %0,%A1", op
, plen
, -2);
4233 return REGNO (XEXP (base
, 0)) == REG_X
4234 ? avr_asm_len ("sbiw r26,2" CR_TAB
4237 "sbiw r26,1", op
, plen
, -4)
4239 : avr_asm_len ("sbiw %r0,2" CR_TAB
4241 "std %p0+1,%B1", op
, plen
, -3);
4243 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4245 return avr_asm_len ("st %0,%A1" CR_TAB
4246 "st %0,%B1", op
, plen
, -2);
4249 fatal_insn ("unknown move insn:",insn
);
4255 out_movhi_mr_r (rtx insn
, rtx op
[], int *plen
)
4259 rtx base
= XEXP (dest
, 0);
4260 int reg_base
= true_regnum (base
);
4261 int reg_src
= true_regnum (src
);
4264 /* "volatile" forces writing high-byte first (no-xmega) resp.
4265 low-byte first (xmega) even if less efficient, for correct
4266 operation with 16-bit I/O registers like. */
4269 return avr_out_movhi_mr_r_xmega (insn
, op
, plen
);
4271 mem_volatile_p
= MEM_VOLATILE_P (dest
);
4273 if (CONSTANT_ADDRESS_P (base
))
4274 return optimize
> 0 && io_address_operand (base
, HImode
)
4275 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
4276 "out %i0,%A1", op
, plen
, -2)
4278 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
4279 "sts %m0,%A1", op
, plen
, -4);
4283 if (reg_base
!= REG_X
)
4284 return avr_asm_len ("std %0+1,%B1" CR_TAB
4285 "st %0,%A1", op
, plen
, -2);
4287 if (reg_src
== REG_X
)
4288 /* "st X+,r26" and "st -X,r26" are undefined. */
4289 return !mem_volatile_p
&& reg_unused_after (insn
, src
)
4290 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4293 "st X,__tmp_reg__", op
, plen
, -4)
4295 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4297 "st X,__tmp_reg__" CR_TAB
4299 "st X,r26", op
, plen
, -5);
4301 return !mem_volatile_p
&& reg_unused_after (insn
, base
)
4302 ? avr_asm_len ("st X+,%A1" CR_TAB
4303 "st X,%B1", op
, plen
, -2)
4304 : avr_asm_len ("adiw r26,1" CR_TAB
4306 "st -X,%A1", op
, plen
, -3);
4308 else if (GET_CODE (base
) == PLUS
)
4310 int disp
= INTVAL (XEXP (base
, 1));
4311 reg_base
= REGNO (XEXP (base
, 0));
4312 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
4314 if (reg_base
!= REG_Y
)
4315 fatal_insn ("incorrect insn:",insn
);
4317 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
4318 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4319 "std Y+63,%B1" CR_TAB
4320 "std Y+62,%A1" CR_TAB
4321 "sbiw r28,%o0-62", op
, plen
, -4)
4323 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4324 "sbci r29,hi8(-%o0)" CR_TAB
4325 "std Y+1,%B1" CR_TAB
4327 "subi r28,lo8(%o0)" CR_TAB
4328 "sbci r29,hi8(%o0)", op
, plen
, -6);
4331 if (reg_base
!= REG_X
)
4332 return avr_asm_len ("std %B0,%B1" CR_TAB
4333 "std %A0,%A1", op
, plen
, -2);
4335 return reg_src
== REG_X
4336 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4337 "mov __zero_reg__,r27" CR_TAB
4338 "adiw r26,%o0+1" CR_TAB
4339 "st X,__zero_reg__" CR_TAB
4340 "st -X,__tmp_reg__" CR_TAB
4341 "clr __zero_reg__" CR_TAB
4342 "sbiw r26,%o0", op
, plen
, -7)
4344 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
4347 "sbiw r26,%o0", op
, plen
, -4);
4349 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4351 return avr_asm_len ("st %0,%B1" CR_TAB
4352 "st %0,%A1", op
, plen
, -2);
4354 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4356 if (!mem_volatile_p
)
4357 return avr_asm_len ("st %0,%A1" CR_TAB
4358 "st %0,%B1", op
, plen
, -2);
4360 return REGNO (XEXP (base
, 0)) == REG_X
4361 ? avr_asm_len ("adiw r26,1" CR_TAB
4364 "adiw r26,2", op
, plen
, -4)
4366 : avr_asm_len ("std %p0+1,%B1" CR_TAB
4368 "adiw %r0,2", op
, plen
, -3);
4370 fatal_insn ("unknown move insn:",insn
);
4374 /* Return 1 if frame pointer for current function required. */
4377 avr_frame_pointer_required_p (void)
4379 return (cfun
->calls_alloca
4380 || cfun
->calls_setjmp
4381 || cfun
->has_nonlocal_label
4382 || crtl
->args
.info
.nregs
== 0
4383 || get_frame_size () > 0);
4386 /* Returns the condition of compare insn INSN, or UNKNOWN. */
4389 compare_condition (rtx insn
)
4391 rtx next
= next_real_insn (insn
);
4393 if (next
&& JUMP_P (next
))
4395 rtx pat
= PATTERN (next
);
4396 rtx src
= SET_SRC (pat
);
4398 if (IF_THEN_ELSE
== GET_CODE (src
))
4399 return GET_CODE (XEXP (src
, 0));
4406 /* Returns true iff INSN is a tst insn that only tests the sign. */
4409 compare_sign_p (rtx insn
)
4411 RTX_CODE cond
= compare_condition (insn
);
4412 return (cond
== GE
|| cond
== LT
);
4416 /* Returns true iff the next insn is a JUMP_INSN with a condition
4417 that needs to be swapped (GT, GTU, LE, LEU). */
4420 compare_diff_p (rtx insn
)
4422 RTX_CODE cond
= compare_condition (insn
);
4423 return (cond
== GT
|| cond
== GTU
|| cond
== LE
|| cond
== LEU
) ? cond
: 0;
4426 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4429 compare_eq_p (rtx insn
)
4431 RTX_CODE cond
= compare_condition (insn
);
4432 return (cond
== EQ
|| cond
== NE
);
4436 /* Output compare instruction
4438 compare (XOP[0], XOP[1])
4440 for a register XOP[0] and a compile-time constant XOP[1]. Return "".
4441 XOP[2] is an 8-bit scratch register as needed.
4443 PLEN == NULL: Output instructions.
4444 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4445 Don't output anything. */
4448 avr_out_compare (rtx insn
, rtx
*xop
, int *plen
)
4450 /* Register to compare and value to compare against. */
4454 /* MODE of the comparison. */
4455 enum machine_mode mode
;
4457 /* Number of bytes to operate on. */
4458 int i
, n_bytes
= GET_MODE_SIZE (GET_MODE (xreg
));
4460 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4461 int clobber_val
= -1;
4463 /* Map fixed mode operands to integer operands with the same binary
4464 representation. They are easier to handle in the remainder. */
4466 if (CONST_FIXED_P (xval
))
4468 xreg
= avr_to_int_mode (xop
[0]);
4469 xval
= avr_to_int_mode (xop
[1]);
4472 mode
= GET_MODE (xreg
);
4474 gcc_assert (REG_P (xreg
));
4475 gcc_assert ((CONST_INT_P (xval
) && n_bytes
<= 4)
4476 || (const_double_operand (xval
, VOIDmode
) && n_bytes
== 8));
4481 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4482 against 0 by ORing the bytes. This is one instruction shorter.
4483 Notice that 64-bit comparisons are always against reg:ALL8 18 (ACC_A)
4484 and therefore don't use this. */
4486 if (!test_hard_reg_class (LD_REGS
, xreg
)
4487 && compare_eq_p (insn
)
4488 && reg_unused_after (insn
, xreg
))
4490 if (xval
== const1_rtx
)
4492 avr_asm_len ("dec %A0" CR_TAB
4493 "or %A0,%B0", xop
, plen
, 2);
4496 avr_asm_len ("or %A0,%C0", xop
, plen
, 1);
4499 avr_asm_len ("or %A0,%D0", xop
, plen
, 1);
4503 else if (xval
== constm1_rtx
)
4506 avr_asm_len ("and %A0,%D0", xop
, plen
, 1);
4509 avr_asm_len ("and %A0,%C0", xop
, plen
, 1);
4511 return avr_asm_len ("and %A0,%B0" CR_TAB
4512 "com %A0", xop
, plen
, 2);
4516 for (i
= 0; i
< n_bytes
; i
++)
4518 /* We compare byte-wise. */
4519 rtx reg8
= simplify_gen_subreg (QImode
, xreg
, mode
, i
);
4520 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
4522 /* 8-bit value to compare with this byte. */
4523 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
4525 /* Registers R16..R31 can operate with immediate. */
4526 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
4529 xop
[1] = gen_int_mode (val8
, QImode
);
4531 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4534 && test_hard_reg_class (ADDW_REGS
, reg8
))
4536 int val16
= trunc_int_for_mode (INTVAL (xval
), HImode
);
4538 if (IN_RANGE (val16
, 0, 63)
4540 || reg_unused_after (insn
, xreg
)))
4542 avr_asm_len ("sbiw %0,%1", xop
, plen
, 1);
4548 && IN_RANGE (val16
, -63, -1)
4549 && compare_eq_p (insn
)
4550 && reg_unused_after (insn
, xreg
))
4552 return avr_asm_len ("adiw %0,%n1", xop
, plen
, 1);
4556 /* Comparing against 0 is easy. */
4561 ? "cp %0,__zero_reg__"
4562 : "cpc %0,__zero_reg__", xop
, plen
, 1);
4566 /* Upper registers can compare and subtract-with-carry immediates.
4567 Notice that compare instructions do the same as respective subtract
4568 instruction; the only difference is that comparisons don't write
4569 the result back to the target register. */
4575 avr_asm_len ("cpi %0,%1", xop
, plen
, 1);
4578 else if (reg_unused_after (insn
, xreg
))
4580 avr_asm_len ("sbci %0,%1", xop
, plen
, 1);
4585 /* Must load the value into the scratch register. */
4587 gcc_assert (REG_P (xop
[2]));
4589 if (clobber_val
!= (int) val8
)
4590 avr_asm_len ("ldi %2,%1", xop
, plen
, 1);
4591 clobber_val
= (int) val8
;
4595 : "cpc %0,%2", xop
, plen
, 1);
4602 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4605 avr_out_compare64 (rtx insn
, rtx
*op
, int *plen
)
4609 xop
[0] = gen_rtx_REG (DImode
, 18);
4613 return avr_out_compare (insn
, xop
, plen
);
4616 /* Output test instruction for HImode. */
4619 avr_out_tsthi (rtx insn
, rtx
*op
, int *plen
)
4621 if (compare_sign_p (insn
))
4623 avr_asm_len ("tst %B0", op
, plen
, -1);
4625 else if (reg_unused_after (insn
, op
[0])
4626 && compare_eq_p (insn
))
4628 /* Faster than sbiw if we can clobber the operand. */
4629 avr_asm_len ("or %A0,%B0", op
, plen
, -1);
4633 avr_out_compare (insn
, op
, plen
);
4640 /* Output test instruction for PSImode. */
4643 avr_out_tstpsi (rtx insn
, rtx
*op
, int *plen
)
4645 if (compare_sign_p (insn
))
4647 avr_asm_len ("tst %C0", op
, plen
, -1);
4649 else if (reg_unused_after (insn
, op
[0])
4650 && compare_eq_p (insn
))
4652 /* Faster than sbiw if we can clobber the operand. */
4653 avr_asm_len ("or %A0,%B0" CR_TAB
4654 "or %A0,%C0", op
, plen
, -2);
4658 avr_out_compare (insn
, op
, plen
);
4665 /* Output test instruction for SImode. */
4668 avr_out_tstsi (rtx insn
, rtx
*op
, int *plen
)
4670 if (compare_sign_p (insn
))
4672 avr_asm_len ("tst %D0", op
, plen
, -1);
4674 else if (reg_unused_after (insn
, op
[0])
4675 && compare_eq_p (insn
))
4677 /* Faster than sbiw if we can clobber the operand. */
4678 avr_asm_len ("or %A0,%B0" CR_TAB
4680 "or %A0,%D0", op
, plen
, -3);
4684 avr_out_compare (insn
, op
, plen
);
4691 /* Generate asm equivalent for various shifts. This only handles cases
4692 that are not already carefully hand-optimized in ?sh??i3_out.
4694 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4695 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4696 OPERANDS[3] is a QImode scratch register from LD regs if
4697 available and SCRATCH, otherwise (no scratch available)
4699 TEMPL is an assembler template that shifts by one position.
4700 T_LEN is the length of this template. */
4703 out_shift_with_cnt (const char *templ
, rtx insn
, rtx operands
[],
4704 int *plen
, int t_len
)
4706 bool second_label
= true;
4707 bool saved_in_tmp
= false;
4708 bool use_zero_reg
= false;
4711 op
[0] = operands
[0];
4712 op
[1] = operands
[1];
4713 op
[2] = operands
[2];
4714 op
[3] = operands
[3];
4719 if (CONST_INT_P (operands
[2]))
4721 bool scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
4722 && REG_P (operands
[3]));
4723 int count
= INTVAL (operands
[2]);
4724 int max_len
= 10; /* If larger than this, always use a loop. */
4729 if (count
< 8 && !scratch
)
4730 use_zero_reg
= true;
4733 max_len
= t_len
+ (scratch
? 3 : (use_zero_reg
? 4 : 5));
4735 if (t_len
* count
<= max_len
)
4737 /* Output shifts inline with no loop - faster. */
4740 avr_asm_len (templ
, op
, plen
, t_len
);
4747 avr_asm_len ("ldi %3,%2", op
, plen
, 1);
4749 else if (use_zero_reg
)
4751 /* Hack to save one word: use __zero_reg__ as loop counter.
4752 Set one bit, then shift in a loop until it is 0 again. */
4754 op
[3] = zero_reg_rtx
;
4756 avr_asm_len ("set" CR_TAB
4757 "bld %3,%2-1", op
, plen
, 2);
4761 /* No scratch register available, use one from LD_REGS (saved in
4762 __tmp_reg__) that doesn't overlap with registers to shift. */
4764 op
[3] = all_regs_rtx
[((REGNO (op
[0]) - 1) & 15) + 16];
4765 op
[4] = tmp_reg_rtx
;
4766 saved_in_tmp
= true;
4768 avr_asm_len ("mov %4,%3" CR_TAB
4769 "ldi %3,%2", op
, plen
, 2);
4772 second_label
= false;
4774 else if (MEM_P (op
[2]))
4778 op_mov
[0] = op
[3] = tmp_reg_rtx
;
4781 out_movqi_r_mr (insn
, op_mov
, plen
);
4783 else if (register_operand (op
[2], QImode
))
4787 if (!reg_unused_after (insn
, op
[2])
4788 || reg_overlap_mentioned_p (op
[0], op
[2]))
4790 op
[3] = tmp_reg_rtx
;
4791 avr_asm_len ("mov %3,%2", op
, plen
, 1);
4795 fatal_insn ("bad shift insn:", insn
);
4798 avr_asm_len ("rjmp 2f", op
, plen
, 1);
4800 avr_asm_len ("1:", op
, plen
, 0);
4801 avr_asm_len (templ
, op
, plen
, t_len
);
4804 avr_asm_len ("2:", op
, plen
, 0);
4806 avr_asm_len (use_zero_reg
? "lsr %3" : "dec %3", op
, plen
, 1);
4807 avr_asm_len (second_label
? "brpl 1b" : "brne 1b", op
, plen
, 1);
4810 avr_asm_len ("mov %3,%4", op
, plen
, 1);
4814 /* 8bit shift left ((char)x << i) */
4817 ashlqi3_out (rtx insn
, rtx operands
[], int *len
)
4819 if (GET_CODE (operands
[2]) == CONST_INT
)
4826 switch (INTVAL (operands
[2]))
4829 if (INTVAL (operands
[2]) < 8)
4841 return ("lsl %0" CR_TAB
4846 return ("lsl %0" CR_TAB
4851 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4854 return ("swap %0" CR_TAB
4858 return ("lsl %0" CR_TAB
4864 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4867 return ("swap %0" CR_TAB
4872 return ("lsl %0" CR_TAB
4879 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4882 return ("swap %0" CR_TAB
4888 return ("lsl %0" CR_TAB
4897 return ("ror %0" CR_TAB
4902 else if (CONSTANT_P (operands
[2]))
4903 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
4905 out_shift_with_cnt ("lsl %0",
4906 insn
, operands
, len
, 1);
4911 /* 16bit shift left ((short)x << i) */
4914 ashlhi3_out (rtx insn
, rtx operands
[], int *len
)
4916 if (GET_CODE (operands
[2]) == CONST_INT
)
4918 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
4919 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
4926 switch (INTVAL (operands
[2]))
4929 if (INTVAL (operands
[2]) < 16)
4933 return ("clr %B0" CR_TAB
4937 if (optimize_size
&& scratch
)
4942 return ("swap %A0" CR_TAB
4944 "andi %B0,0xf0" CR_TAB
4945 "eor %B0,%A0" CR_TAB
4946 "andi %A0,0xf0" CR_TAB
4952 return ("swap %A0" CR_TAB
4954 "ldi %3,0xf0" CR_TAB
4956 "eor %B0,%A0" CR_TAB
4960 break; /* optimize_size ? 6 : 8 */
4964 break; /* scratch ? 5 : 6 */
4968 return ("lsl %A0" CR_TAB
4972 "andi %B0,0xf0" CR_TAB
4973 "eor %B0,%A0" CR_TAB
4974 "andi %A0,0xf0" CR_TAB
4980 return ("lsl %A0" CR_TAB
4984 "ldi %3,0xf0" CR_TAB
4986 "eor %B0,%A0" CR_TAB
4994 break; /* scratch ? 5 : 6 */
4996 return ("clr __tmp_reg__" CR_TAB
4999 "ror __tmp_reg__" CR_TAB
5002 "ror __tmp_reg__" CR_TAB
5003 "mov %B0,%A0" CR_TAB
5004 "mov %A0,__tmp_reg__");
5008 return ("lsr %B0" CR_TAB
5009 "mov %B0,%A0" CR_TAB
5015 return *len
= 2, ("mov %B0,%A1" CR_TAB
5020 return ("mov %B0,%A0" CR_TAB
5026 return ("mov %B0,%A0" CR_TAB
5033 return ("mov %B0,%A0" CR_TAB
5043 return ("mov %B0,%A0" CR_TAB
5051 return ("mov %B0,%A0" CR_TAB
5054 "ldi %3,0xf0" CR_TAB
5058 return ("mov %B0,%A0" CR_TAB
5069 return ("mov %B0,%A0" CR_TAB
5075 if (AVR_HAVE_MUL
&& scratch
)
5078 return ("ldi %3,0x20" CR_TAB
5082 "clr __zero_reg__");
5084 if (optimize_size
&& scratch
)
5089 return ("mov %B0,%A0" CR_TAB
5093 "ldi %3,0xe0" CR_TAB
5099 return ("set" CR_TAB
5104 "clr __zero_reg__");
5107 return ("mov %B0,%A0" CR_TAB
5116 if (AVR_HAVE_MUL
&& ldi_ok
)
5119 return ("ldi %B0,0x40" CR_TAB
5120 "mul %A0,%B0" CR_TAB
5123 "clr __zero_reg__");
5125 if (AVR_HAVE_MUL
&& scratch
)
5128 return ("ldi %3,0x40" CR_TAB
5132 "clr __zero_reg__");
5134 if (optimize_size
&& ldi_ok
)
5137 return ("mov %B0,%A0" CR_TAB
5138 "ldi %A0,6" "\n1:\t"
5143 if (optimize_size
&& scratch
)
5146 return ("clr %B0" CR_TAB
5155 return ("clr %B0" CR_TAB
5162 out_shift_with_cnt ("lsl %A0" CR_TAB
5163 "rol %B0", insn
, operands
, len
, 2);
5168 /* 24-bit shift left */
5171 avr_out_ashlpsi3 (rtx insn
, rtx
*op
, int *plen
)
5176 if (CONST_INT_P (op
[2]))
5178 switch (INTVAL (op
[2]))
5181 if (INTVAL (op
[2]) < 24)
5184 return avr_asm_len ("clr %A0" CR_TAB
5186 "clr %C0", op
, plen
, 3);
5190 int reg0
= REGNO (op
[0]);
5191 int reg1
= REGNO (op
[1]);
5194 return avr_asm_len ("mov %C0,%B1" CR_TAB
5195 "mov %B0,%A1" CR_TAB
5196 "clr %A0", op
, plen
, 3);
5198 return avr_asm_len ("clr %A0" CR_TAB
5199 "mov %B0,%A1" CR_TAB
5200 "mov %C0,%B1", op
, plen
, 3);
5205 int reg0
= REGNO (op
[0]);
5206 int reg1
= REGNO (op
[1]);
5208 if (reg0
+ 2 != reg1
)
5209 avr_asm_len ("mov %C0,%A0", op
, plen
, 1);
5211 return avr_asm_len ("clr %B0" CR_TAB
5212 "clr %A0", op
, plen
, 2);
5216 return avr_asm_len ("clr %C0" CR_TAB
5220 "clr %A0", op
, plen
, 5);
5224 out_shift_with_cnt ("lsl %A0" CR_TAB
5226 "rol %C0", insn
, op
, plen
, 3);
5231 /* 32bit shift left ((long)x << i) */
5234 ashlsi3_out (rtx insn
, rtx operands
[], int *len
)
5236 if (GET_CODE (operands
[2]) == CONST_INT
)
5244 switch (INTVAL (operands
[2]))
5247 if (INTVAL (operands
[2]) < 32)
5251 return *len
= 3, ("clr %D0" CR_TAB
5255 return ("clr %D0" CR_TAB
5262 int reg0
= true_regnum (operands
[0]);
5263 int reg1
= true_regnum (operands
[1]);
5266 return ("mov %D0,%C1" CR_TAB
5267 "mov %C0,%B1" CR_TAB
5268 "mov %B0,%A1" CR_TAB
5271 return ("clr %A0" CR_TAB
5272 "mov %B0,%A1" CR_TAB
5273 "mov %C0,%B1" CR_TAB
5279 int reg0
= true_regnum (operands
[0]);
5280 int reg1
= true_regnum (operands
[1]);
5281 if (reg0
+ 2 == reg1
)
5282 return *len
= 2, ("clr %B0" CR_TAB
5285 return *len
= 3, ("movw %C0,%A1" CR_TAB
5289 return *len
= 4, ("mov %C0,%A1" CR_TAB
5290 "mov %D0,%B1" CR_TAB
5297 return ("mov %D0,%A1" CR_TAB
5304 return ("clr %D0" CR_TAB
5313 out_shift_with_cnt ("lsl %A0" CR_TAB
5316 "rol %D0", insn
, operands
, len
, 4);
5320 /* 8bit arithmetic shift right ((signed char)x >> i) */
5323 ashrqi3_out (rtx insn
, rtx operands
[], int *len
)
5325 if (GET_CODE (operands
[2]) == CONST_INT
)
5332 switch (INTVAL (operands
[2]))
5340 return ("asr %0" CR_TAB
5345 return ("asr %0" CR_TAB
5351 return ("asr %0" CR_TAB
5358 return ("asr %0" CR_TAB
5366 return ("bst %0,6" CR_TAB
5372 if (INTVAL (operands
[2]) < 8)
5379 return ("lsl %0" CR_TAB
5383 else if (CONSTANT_P (operands
[2]))
5384 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
5386 out_shift_with_cnt ("asr %0",
5387 insn
, operands
, len
, 1);
5392 /* 16bit arithmetic shift right ((signed short)x >> i) */
5395 ashrhi3_out (rtx insn
, rtx operands
[], int *len
)
5397 if (GET_CODE (operands
[2]) == CONST_INT
)
5399 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
5400 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
5407 switch (INTVAL (operands
[2]))
5411 /* XXX try to optimize this too? */
5416 break; /* scratch ? 5 : 6 */
5418 return ("mov __tmp_reg__,%A0" CR_TAB
5419 "mov %A0,%B0" CR_TAB
5420 "lsl __tmp_reg__" CR_TAB
5422 "sbc %B0,%B0" CR_TAB
5423 "lsl __tmp_reg__" CR_TAB
5429 return ("lsl %A0" CR_TAB
5430 "mov %A0,%B0" CR_TAB
5436 int reg0
= true_regnum (operands
[0]);
5437 int reg1
= true_regnum (operands
[1]);
5440 return *len
= 3, ("mov %A0,%B0" CR_TAB
5444 return *len
= 4, ("mov %A0,%B1" CR_TAB
5452 return ("mov %A0,%B0" CR_TAB
5454 "sbc %B0,%B0" CR_TAB
5459 return ("mov %A0,%B0" CR_TAB
5461 "sbc %B0,%B0" CR_TAB
5466 if (AVR_HAVE_MUL
&& ldi_ok
)
5469 return ("ldi %A0,0x20" CR_TAB
5470 "muls %B0,%A0" CR_TAB
5472 "sbc %B0,%B0" CR_TAB
5473 "clr __zero_reg__");
5475 if (optimize_size
&& scratch
)
5478 return ("mov %A0,%B0" CR_TAB
5480 "sbc %B0,%B0" CR_TAB
5486 if (AVR_HAVE_MUL
&& ldi_ok
)
5489 return ("ldi %A0,0x10" CR_TAB
5490 "muls %B0,%A0" CR_TAB
5492 "sbc %B0,%B0" CR_TAB
5493 "clr __zero_reg__");
5495 if (optimize_size
&& scratch
)
5498 return ("mov %A0,%B0" CR_TAB
5500 "sbc %B0,%B0" CR_TAB
5507 if (AVR_HAVE_MUL
&& ldi_ok
)
5510 return ("ldi %A0,0x08" CR_TAB
5511 "muls %B0,%A0" CR_TAB
5513 "sbc %B0,%B0" CR_TAB
5514 "clr __zero_reg__");
5517 break; /* scratch ? 5 : 7 */
5519 return ("mov %A0,%B0" CR_TAB
5521 "sbc %B0,%B0" CR_TAB
5530 return ("lsl %B0" CR_TAB
5531 "sbc %A0,%A0" CR_TAB
5533 "mov %B0,%A0" CR_TAB
5537 if (INTVAL (operands
[2]) < 16)
5543 return *len
= 3, ("lsl %B0" CR_TAB
5544 "sbc %A0,%A0" CR_TAB
5549 out_shift_with_cnt ("asr %B0" CR_TAB
5550 "ror %A0", insn
, operands
, len
, 2);
5555 /* 24-bit arithmetic shift right */
5558 avr_out_ashrpsi3 (rtx insn
, rtx
*op
, int *plen
)
5560 int dest
= REGNO (op
[0]);
5561 int src
= REGNO (op
[1]);
5563 if (CONST_INT_P (op
[2]))
5568 switch (INTVAL (op
[2]))
5572 return avr_asm_len ("mov %A0,%B1" CR_TAB
5573 "mov %B0,%C1" CR_TAB
5576 "dec %C0", op
, plen
, 5);
5578 return avr_asm_len ("clr %C0" CR_TAB
5581 "mov %B0,%C1" CR_TAB
5582 "mov %A0,%B1", op
, plen
, 5);
5585 if (dest
!= src
+ 2)
5586 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
5588 return avr_asm_len ("clr %B0" CR_TAB
5591 "mov %C0,%B0", op
, plen
, 4);
5594 if (INTVAL (op
[2]) < 24)
5600 return avr_asm_len ("lsl %C0" CR_TAB
5601 "sbc %A0,%A0" CR_TAB
5602 "mov %B0,%A0" CR_TAB
5603 "mov %C0,%A0", op
, plen
, 4);
5607 out_shift_with_cnt ("asr %C0" CR_TAB
5609 "ror %A0", insn
, op
, plen
, 3);
5614 /* 32-bit arithmetic shift right ((signed long)x >> i) */
5617 ashrsi3_out (rtx insn
, rtx operands
[], int *len
)
5619 if (GET_CODE (operands
[2]) == CONST_INT
)
5627 switch (INTVAL (operands
[2]))
5631 int reg0
= true_regnum (operands
[0]);
5632 int reg1
= true_regnum (operands
[1]);
5635 return ("mov %A0,%B1" CR_TAB
5636 "mov %B0,%C1" CR_TAB
5637 "mov %C0,%D1" CR_TAB
5642 return ("clr %D0" CR_TAB
5645 "mov %C0,%D1" CR_TAB
5646 "mov %B0,%C1" CR_TAB
5652 int reg0
= true_regnum (operands
[0]);
5653 int reg1
= true_regnum (operands
[1]);
5655 if (reg0
== reg1
+ 2)
5656 return *len
= 4, ("clr %D0" CR_TAB
5661 return *len
= 5, ("movw %A0,%C1" CR_TAB
5667 return *len
= 6, ("mov %B0,%D1" CR_TAB
5668 "mov %A0,%C1" CR_TAB
5676 return *len
= 6, ("mov %A0,%D1" CR_TAB
5680 "mov %B0,%D0" CR_TAB
5684 if (INTVAL (operands
[2]) < 32)
5691 return *len
= 4, ("lsl %D0" CR_TAB
5692 "sbc %A0,%A0" CR_TAB
5693 "mov %B0,%A0" CR_TAB
5696 return *len
= 5, ("lsl %D0" CR_TAB
5697 "sbc %A0,%A0" CR_TAB
5698 "mov %B0,%A0" CR_TAB
5699 "mov %C0,%A0" CR_TAB
5704 out_shift_with_cnt ("asr %D0" CR_TAB
5707 "ror %A0", insn
, operands
, len
, 4);
5711 /* 8-bit logic shift right ((unsigned char)x >> i) */
5714 lshrqi3_out (rtx insn
, rtx operands
[], int *len
)
5716 if (GET_CODE (operands
[2]) == CONST_INT
)
5723 switch (INTVAL (operands
[2]))
5726 if (INTVAL (operands
[2]) < 8)
5738 return ("lsr %0" CR_TAB
5742 return ("lsr %0" CR_TAB
5747 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5750 return ("swap %0" CR_TAB
5754 return ("lsr %0" CR_TAB
5760 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5763 return ("swap %0" CR_TAB
5768 return ("lsr %0" CR_TAB
5775 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5778 return ("swap %0" CR_TAB
5784 return ("lsr %0" CR_TAB
5793 return ("rol %0" CR_TAB
5798 else if (CONSTANT_P (operands
[2]))
5799 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
5801 out_shift_with_cnt ("lsr %0",
5802 insn
, operands
, len
, 1);
5806 /* 16-bit logic shift right ((unsigned short)x >> i) */
5809 lshrhi3_out (rtx insn
, rtx operands
[], int *len
)
5811 if (GET_CODE (operands
[2]) == CONST_INT
)
5813 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
5814 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
5821 switch (INTVAL (operands
[2]))
5824 if (INTVAL (operands
[2]) < 16)
5828 return ("clr %B0" CR_TAB
5832 if (optimize_size
&& scratch
)
5837 return ("swap %B0" CR_TAB
5839 "andi %A0,0x0f" CR_TAB
5840 "eor %A0,%B0" CR_TAB
5841 "andi %B0,0x0f" CR_TAB
5847 return ("swap %B0" CR_TAB
5849 "ldi %3,0x0f" CR_TAB
5851 "eor %A0,%B0" CR_TAB
5855 break; /* optimize_size ? 6 : 8 */
5859 break; /* scratch ? 5 : 6 */
5863 return ("lsr %B0" CR_TAB
5867 "andi %A0,0x0f" CR_TAB
5868 "eor %A0,%B0" CR_TAB
5869 "andi %B0,0x0f" CR_TAB
5875 return ("lsr %B0" CR_TAB
5879 "ldi %3,0x0f" CR_TAB
5881 "eor %A0,%B0" CR_TAB
5889 break; /* scratch ? 5 : 6 */
5891 return ("clr __tmp_reg__" CR_TAB
5894 "rol __tmp_reg__" CR_TAB
5897 "rol __tmp_reg__" CR_TAB
5898 "mov %A0,%B0" CR_TAB
5899 "mov %B0,__tmp_reg__");
5903 return ("lsl %A0" CR_TAB
5904 "mov %A0,%B0" CR_TAB
5906 "sbc %B0,%B0" CR_TAB
5910 return *len
= 2, ("mov %A0,%B1" CR_TAB
5915 return ("mov %A0,%B0" CR_TAB
5921 return ("mov %A0,%B0" CR_TAB
5928 return ("mov %A0,%B0" CR_TAB
5938 return ("mov %A0,%B0" CR_TAB
5946 return ("mov %A0,%B0" CR_TAB
5949 "ldi %3,0x0f" CR_TAB
5953 return ("mov %A0,%B0" CR_TAB
5964 return ("mov %A0,%B0" CR_TAB
5970 if (AVR_HAVE_MUL
&& scratch
)
5973 return ("ldi %3,0x08" CR_TAB
5977 "clr __zero_reg__");
5979 if (optimize_size
&& scratch
)
5984 return ("mov %A0,%B0" CR_TAB
5988 "ldi %3,0x07" CR_TAB
5994 return ("set" CR_TAB
5999 "clr __zero_reg__");
6002 return ("mov %A0,%B0" CR_TAB
6011 if (AVR_HAVE_MUL
&& ldi_ok
)
6014 return ("ldi %A0,0x04" CR_TAB
6015 "mul %B0,%A0" CR_TAB
6018 "clr __zero_reg__");
6020 if (AVR_HAVE_MUL
&& scratch
)
6023 return ("ldi %3,0x04" CR_TAB
6027 "clr __zero_reg__");
6029 if (optimize_size
&& ldi_ok
)
6032 return ("mov %A0,%B0" CR_TAB
6033 "ldi %B0,6" "\n1:\t"
6038 if (optimize_size
&& scratch
)
6041 return ("clr %A0" CR_TAB
6050 return ("clr %A0" CR_TAB
6057 out_shift_with_cnt ("lsr %B0" CR_TAB
6058 "ror %A0", insn
, operands
, len
, 2);
6063 /* 24-bit logic shift right */
6066 avr_out_lshrpsi3 (rtx insn
, rtx
*op
, int *plen
)
6068 int dest
= REGNO (op
[0]);
6069 int src
= REGNO (op
[1]);
6071 if (CONST_INT_P (op
[2]))
6076 switch (INTVAL (op
[2]))
6080 return avr_asm_len ("mov %A0,%B1" CR_TAB
6081 "mov %B0,%C1" CR_TAB
6082 "clr %C0", op
, plen
, 3);
6084 return avr_asm_len ("clr %C0" CR_TAB
6085 "mov %B0,%C1" CR_TAB
6086 "mov %A0,%B1", op
, plen
, 3);
6089 if (dest
!= src
+ 2)
6090 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
6092 return avr_asm_len ("clr %B0" CR_TAB
6093 "clr %C0", op
, plen
, 2);
6096 if (INTVAL (op
[2]) < 24)
6102 return avr_asm_len ("clr %A0" CR_TAB
6106 "clr %C0", op
, plen
, 5);
6110 out_shift_with_cnt ("lsr %C0" CR_TAB
6112 "ror %A0", insn
, op
, plen
, 3);
6117 /* 32-bit logic shift right ((unsigned int)x >> i) */
6120 lshrsi3_out (rtx insn
, rtx operands
[], int *len
)
6122 if (GET_CODE (operands
[2]) == CONST_INT
)
6130 switch (INTVAL (operands
[2]))
6133 if (INTVAL (operands
[2]) < 32)
6137 return *len
= 3, ("clr %D0" CR_TAB
6141 return ("clr %D0" CR_TAB
6148 int reg0
= true_regnum (operands
[0]);
6149 int reg1
= true_regnum (operands
[1]);
6152 return ("mov %A0,%B1" CR_TAB
6153 "mov %B0,%C1" CR_TAB
6154 "mov %C0,%D1" CR_TAB
6157 return ("clr %D0" CR_TAB
6158 "mov %C0,%D1" CR_TAB
6159 "mov %B0,%C1" CR_TAB
6165 int reg0
= true_regnum (operands
[0]);
6166 int reg1
= true_regnum (operands
[1]);
6168 if (reg0
== reg1
+ 2)
6169 return *len
= 2, ("clr %C0" CR_TAB
6172 return *len
= 3, ("movw %A0,%C1" CR_TAB
6176 return *len
= 4, ("mov %B0,%D1" CR_TAB
6177 "mov %A0,%C1" CR_TAB
6183 return *len
= 4, ("mov %A0,%D1" CR_TAB
6190 return ("clr %A0" CR_TAB
6199 out_shift_with_cnt ("lsr %D0" CR_TAB
6202 "ror %A0", insn
, operands
, len
, 4);
6207 /* Output addition of register XOP[0] and compile time constant XOP[2].
6208 CODE == PLUS: perform addition by using ADD instructions or
6209 CODE == MINUS: perform addition by using SUB instructions:
6211 XOP[0] = XOP[0] + XOP[2]
6213 Or perform addition/subtraction with register XOP[2] depending on CODE:
6215 XOP[0] = XOP[0] +/- XOP[2]
6217 If PLEN == NULL, print assembler instructions to perform the operation;
6218 otherwise, set *PLEN to the length of the instruction sequence (in words)
6219 printed with PLEN == NULL. XOP[3] is an 8-bit scratch register or NULL_RTX.
6220 Set *PCC to effect on cc0 according to respective CC_* insn attribute.
6222 CODE_SAT == UNKNOWN: Perform ordinary, non-saturating operation.
6223 CODE_SAT != UNKNOWN: Perform operation and saturate according to CODE_SAT.
6224 If CODE_SAT != UNKNOWN then SIGN contains the sign of the summand resp.
6225 the subtrahend in the original insn, provided it is a compile time constant.
6226 In all other cases, SIGN is 0.
6231 avr_out_plus_1 (rtx
*xop
, int *plen
, enum rtx_code code
, int *pcc
,
6232 enum rtx_code code_sat
= UNKNOWN
, int sign
= 0)
6234 /* MODE of the operation. */
6235 enum machine_mode mode
= GET_MODE (xop
[0]);
6237 /* INT_MODE of the same size. */
6238 enum machine_mode imode
= int_mode_for_mode (mode
);
6240 /* Number of bytes to operate on. */
6241 int i
, n_bytes
= GET_MODE_SIZE (mode
);
6243 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6244 int clobber_val
= -1;
6246 /* op[0]: 8-bit destination register
6247 op[1]: 8-bit const int
6248 op[2]: 8-bit scratch register */
6251 /* Started the operation? Before starting the operation we may skip
6252 adding 0. This is no more true after the operation started because
6253 carry must be taken into account. */
6254 bool started
= false;
6256 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
6259 /* Output a BRVC instruction. Only needed with saturation. */
6260 bool out_brvc
= true;
6267 *pcc
= MINUS
== code
? (int) CC_SET_CZN
: (int) CC_SET_N
;
6269 for (i
= 0; i
< n_bytes
; i
++)
6271 /* We operate byte-wise on the destination. */
6272 op
[0] = simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6273 op
[1] = simplify_gen_subreg (QImode
, xop
[2], mode
, i
);
6276 avr_asm_len (code
== PLUS
? "add %0,%1" : "sub %0,%1",
6279 avr_asm_len (code
== PLUS
? "adc %0,%1" : "sbc %0,%1",
6283 if (reg_overlap_mentioned_p (xop
[0], xop
[2]))
6285 gcc_assert (REGNO (xop
[0]) == REGNO (xop
[2]));
6294 /* Except in the case of ADIW with 16-bit register (see below)
6295 addition does not set cc0 in a usable way. */
6297 *pcc
= (MINUS
== code
) ? CC_SET_CZN
: CC_CLOBBER
;
6299 if (CONST_FIXED_P (xval
))
6300 xval
= avr_to_int_mode (xval
);
6302 /* Adding/Subtracting zero is a no-op. */
6304 if (xval
== const0_rtx
)
6311 xval
= simplify_unary_operation (NEG
, imode
, xval
, imode
);
6315 if (SS_PLUS
== code_sat
&& MINUS
== code
6317 && 0x80 == (INTVAL (simplify_gen_subreg (QImode
, xval
, imode
, n_bytes
-1))
6318 & GET_MODE_MASK (QImode
)))
6320 /* We compute x + 0x80 by means of SUB instructions. We negated the
6321 constant subtrahend above and are left with x - (-128) so that we
6322 need something like SUBI r,128 which does not exist because SUBI sets
6323 V according to the sign of the subtrahend. Notice the only case
6324 where this must be done is when NEG overflowed in case [2s] because
6325 the V computation needs the right sign of the subtrahend. */
6327 rtx msb
= simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-1);
6329 avr_asm_len ("subi %0,128" CR_TAB
6330 "brmi 0f", &msb
, plen
, 2);
6336 for (i
= 0; i
< n_bytes
; i
++)
6338 /* We operate byte-wise on the destination. */
6339 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6340 rtx xval8
= simplify_gen_subreg (QImode
, xval
, imode
, i
);
6342 /* 8-bit value to operate with this byte. */
6343 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
6345 /* Registers R16..R31 can operate with immediate. */
6346 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
6349 op
[1] = gen_int_mode (val8
, QImode
);
6351 /* To get usable cc0 no low-bytes must have been skipped. */
6359 && test_hard_reg_class (ADDW_REGS
, reg8
))
6361 rtx xval16
= simplify_gen_subreg (HImode
, xval
, imode
, i
);
6362 unsigned int val16
= UINTVAL (xval16
) & GET_MODE_MASK (HImode
);
6364 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
6365 i.e. operate word-wise. */
6372 avr_asm_len (code
== PLUS
? "adiw %0,%1" : "sbiw %0,%1",
6375 if (n_bytes
== 2 && PLUS
== code
)
6387 avr_asm_len (code
== PLUS
6388 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
6392 else if ((val8
== 1 || val8
== 0xff)
6393 && UNKNOWN
== code_sat
6395 && i
== n_bytes
- 1)
6397 avr_asm_len ((code
== PLUS
) ^ (val8
== 1) ? "dec %0" : "inc %0",
6406 gcc_assert (plen
!= NULL
|| (op
[2] && REG_P (op
[2])));
6408 if (plen
!= NULL
&& UNKNOWN
!= code_sat
)
6410 /* This belongs to the x + 0x80 corner case. The code with
6411 ADD instruction is not smaller, thus make this case
6412 expensive so that the caller won't pick it. */
6418 if (clobber_val
!= (int) val8
)
6419 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6420 clobber_val
= (int) val8
;
6422 avr_asm_len (started
? "adc %0,%2" : "add %0,%2", op
, plen
, 1);
6429 avr_asm_len (started
? "sbci %0,%1" : "subi %0,%1", op
, plen
, 1);
6432 gcc_assert (plen
!= NULL
|| REG_P (op
[2]));
6434 if (clobber_val
!= (int) val8
)
6435 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6436 clobber_val
= (int) val8
;
6438 avr_asm_len (started
? "sbc %0,%2" : "sub %0,%2", op
, plen
, 1);
6450 } /* for all sub-bytes */
6454 if (UNKNOWN
== code_sat
)
6457 *pcc
= (int) CC_CLOBBER
;
6459 /* Vanilla addition/subtraction is done. We are left with saturation.
6461 We have to compute A = A <op> B where A is a register and
6462 B is a register or a non-zero compile time constant CONST.
6463 A is register class "r" if unsigned && B is REG. Otherwise, A is in "d".
6464 B stands for the original operand $2 in INSN. In the case of B = CONST,
6465 SIGN in { -1, 1 } is the sign of B. Otherwise, SIGN is 0.
6467 CODE is the instruction flavor we use in the asm sequence to perform <op>.
6471 operation | code | sat if | b is | sat value | case
6472 -----------------+-------+----------+--------------+-----------+-------
6473 + as a + b | add | C == 1 | const, reg | u+ = 0xff | [1u]
6474 + as a - (-b) | sub | C == 0 | const | u+ = 0xff | [2u]
6475 - as a - b | sub | C == 1 | const, reg | u- = 0 | [3u]
6476 - as a + (-b) | add | C == 0 | const | u- = 0 | [4u]
6480 operation | code | sat if | b is | sat value | case
6481 -----------------+-------+----------+--------------+-----------+-------
6482 + as a + b | add | V == 1 | const, reg | s+ | [1s]
6483 + as a - (-b) | sub | V == 1 | const | s+ | [2s]
6484 - as a - b | sub | V == 1 | const, reg | s- | [3s]
6485 - as a + (-b) | add | V == 1 | const | s- | [4s]
6487 s+ = b < 0 ? -0x80 : 0x7f
6488 s- = b < 0 ? 0x7f : -0x80
6490 The cases a - b actually perform a - (-(-b)) if B is CONST.
6493 op
[0] = simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-1);
6495 ? simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-2)
6498 bool need_copy
= true;
6499 int len_call
= 1 + AVR_HAVE_JMP_CALL
;
6510 avr_asm_len ("brvc 0f", op
, plen
, 1);
6512 if (reg_overlap_mentioned_p (xop
[0], xop
[2]))
6517 avr_asm_len ("ldi %0,0x7f" CR_TAB
6518 "adc %0,__zero_reg__", op
, plen
, 2);
6520 avr_asm_len ("ldi %0,0x7f" CR_TAB
6521 "ldi %1,0xff" CR_TAB
6522 "adc %1,__zero_reg__" CR_TAB
6523 "adc %0,__zero_reg__", op
, plen
, 4);
6525 else if (sign
== 0 && PLUS
== code
)
6529 op
[2] = simplify_gen_subreg (QImode
, xop
[2], mode
, n_bytes
-1);
6532 avr_asm_len ("ldi %0,0x80" CR_TAB
6534 "dec %0", op
, plen
, 3);
6536 avr_asm_len ("ldi %0,0x80" CR_TAB
6539 "sbci %0,0", op
, plen
, 4);
6541 else if (sign
== 0 && MINUS
== code
)
6545 op
[2] = simplify_gen_subreg (QImode
, xop
[2], mode
, n_bytes
-1);
6548 avr_asm_len ("ldi %0,0x7f" CR_TAB
6550 "inc %0", op
, plen
, 3);
6552 avr_asm_len ("ldi %0,0x7f" CR_TAB
6555 "sbci %0,-1", op
, plen
, 4);
6557 else if ((sign
< 0) ^ (SS_MINUS
== code_sat
))
6559 /* [1s,const,B < 0] [2s,B < 0] */
6560 /* [3s,const,B > 0] [4s,B > 0] */
6564 avr_asm_len ("%~call __clr_8", op
, plen
, len_call
);
6568 avr_asm_len ("ldi %0,0x80", op
, plen
, 1);
6569 if (n_bytes
> 1 && need_copy
)
6570 avr_asm_len ("clr %1", op
, plen
, 1);
6572 else if ((sign
> 0) ^ (SS_MINUS
== code_sat
))
6574 /* [1s,const,B > 0] [2s,B > 0] */
6575 /* [3s,const,B < 0] [4s,B < 0] */
6579 avr_asm_len ("sec" CR_TAB
6580 "%~call __sbc_8", op
, plen
, 1 + len_call
);
6584 avr_asm_len ("ldi %0,0x7f", op
, plen
, 1);
6585 if (n_bytes
> 1 && need_copy
)
6586 avr_asm_len ("ldi %1,0xff", op
, plen
, 1);
6596 avr_asm_len (PLUS
== code
? "brcc 0f" : "brcs 0f", op
, plen
, 1);
6601 avr_asm_len ("sec", op
, plen
, 1);
6602 avr_asm_len ("%~call __sbc_8", op
, plen
, len_call
);
6608 if (MINUS
== code
&& !test_hard_reg_class (LD_REGS
, op
[0]))
6609 avr_asm_len ("sec" CR_TAB
"sbc %0,%0", op
, plen
, 2);
6611 avr_asm_len (PLUS
== code
? "sbc %0,%0" : "ldi %0,0xff",
6614 break; /* US_PLUS */
6619 avr_asm_len (PLUS
== code
? "brcs 0f" : "brcc 0f", op
, plen
, 1);
6623 avr_asm_len ("%~call __clr_8", op
, plen
, len_call
);
6627 avr_asm_len ("clr %0", op
, plen
, 1);
6632 /* We set the MSB in the unsigned case and the 2 MSBs in the signed case.
6633 Now copy the right value to the LSBs. */
6635 if (need_copy
&& n_bytes
> 1)
6637 if (US_MINUS
== code_sat
|| US_PLUS
== code_sat
)
6639 avr_asm_len ("mov %1,%0", op
, plen
, 1);
6645 avr_asm_len ("movw %0,%1", op
, plen
, 1);
6647 avr_asm_len ("mov %A0,%1" CR_TAB
6648 "mov %B0,%1", op
, plen
, 2);
6651 else if (n_bytes
> 2)
6654 avr_asm_len ("mov %A0,%1" CR_TAB
6655 "mov %B0,%1", op
, plen
, 2);
6659 if (need_copy
&& n_bytes
== 8)
6662 avr_asm_len ("movw %r0+2,%0" CR_TAB
6663 "movw %r0+4,%0", xop
, plen
, 2);
6665 avr_asm_len ("mov %r0+2,%0" CR_TAB
6666 "mov %r0+3,%0" CR_TAB
6667 "mov %r0+4,%0" CR_TAB
6668 "mov %r0+5,%0", xop
, plen
, 4);
6671 avr_asm_len ("0:", op
, plen
, 0);
6675 /* Output addition/subtraction of register XOP[0] and a constant XOP[2] that
6676 is ont a compile-time constant:
6678 XOP[0] = XOP[0] +/- XOP[2]
6680 This is a helper for the function below. The only insns that need this
6681 are additions/subtraction for pointer modes, i.e. HImode and PSImode. */
6684 avr_out_plus_symbol (rtx
*xop
, enum rtx_code code
, int *plen
, int *pcc
)
6686 enum machine_mode mode
= GET_MODE (xop
[0]);
6688 /* Only pointer modes want to add symbols. */
6690 gcc_assert (mode
== HImode
|| mode
== PSImode
);
6692 *pcc
= MINUS
== code
? (int) CC_SET_CZN
: (int) CC_SET_N
;
6694 avr_asm_len (PLUS
== code
6695 ? "subi %A0,lo8(-(%2))" CR_TAB
"sbci %B0,hi8(-(%2))"
6696 : "subi %A0,lo8(%2)" CR_TAB
"sbci %B0,hi8(%2)",
6699 if (PSImode
== mode
)
6700 avr_asm_len (PLUS
== code
6701 ? "sbci %C0,hlo8(-(%2))"
6702 : "sbci %C0,hlo8(%2)", xop
, plen
, 1);
6707 /* Prepare operands of addition/subtraction to be used with avr_out_plus_1.
6709 INSN is a single_set insn with a binary operation as SET_SRC that is
6710 one of: PLUS, SS_PLUS, US_PLUS, MINUS, SS_MINUS, US_MINUS.
6712 XOP are the operands of INSN. In the case of 64-bit operations with
6713 constant XOP[] has just one element: The summand/subtrahend in XOP[0].
6714 The non-saturating insns up to 32 bits may or may not supply a "d" class
6717 If PLEN == NULL output the instructions.
6718 If PLEN != NULL set *PLEN to the length of the sequence in words.
6720 PCC is a pointer to store the instructions' effect on cc0.
6723 PLEN and PCC default to NULL.
6728 avr_out_plus (rtx insn
, rtx
*xop
, int *plen
, int *pcc
)
6730 int cc_plus
, cc_minus
, cc_dummy
;
6731 int len_plus
, len_minus
;
6733 rtx xdest
= SET_DEST (single_set (insn
));
6734 enum machine_mode mode
= GET_MODE (xdest
);
6735 enum machine_mode imode
= int_mode_for_mode (mode
);
6736 int n_bytes
= GET_MODE_SIZE (mode
);
6737 enum rtx_code code_sat
= GET_CODE (SET_SRC (single_set (insn
)));
6739 = (PLUS
== code_sat
|| SS_PLUS
== code_sat
|| US_PLUS
== code_sat
6745 /* PLUS and MINUS don't saturate: Use modular wrap-around. */
6747 if (PLUS
== code_sat
|| MINUS
== code_sat
)
6750 if (n_bytes
<= 4 && REG_P (xop
[2]))
6752 avr_out_plus_1 (xop
, plen
, code
, pcc
, code_sat
);
6758 op
[0] = gen_rtx_REG (DImode
, ACC_A
);
6759 op
[1] = gen_rtx_REG (DImode
, ACC_A
);
6760 op
[2] = avr_to_int_mode (xop
[0]);
6765 && !CONST_INT_P (xop
[2])
6766 && !CONST_FIXED_P (xop
[2]))
6768 return avr_out_plus_symbol (xop
, code
, plen
, pcc
);
6771 op
[0] = avr_to_int_mode (xop
[0]);
6772 op
[1] = avr_to_int_mode (xop
[1]);
6773 op
[2] = avr_to_int_mode (xop
[2]);
6776 /* Saturations and 64-bit operations don't have a clobber operand.
6777 For the other cases, the caller will provide a proper XOP[3]. */
6779 op
[3] = PARALLEL
== GET_CODE (PATTERN (insn
)) ? xop
[3] : NULL_RTX
;
6781 /* Saturation will need the sign of the original operand. */
6783 rtx xmsb
= simplify_gen_subreg (QImode
, op
[2], imode
, n_bytes
-1);
6784 int sign
= INTVAL (xmsb
) < 0 ? -1 : 1;
6786 /* If we subtract and the subtrahend is a constant, then negate it
6787 so that avr_out_plus_1 can be used. */
6790 op
[2] = simplify_unary_operation (NEG
, imode
, op
[2], imode
);
6792 /* Work out the shortest sequence. */
6794 avr_out_plus_1 (op
, &len_minus
, MINUS
, &cc_plus
, code_sat
, sign
);
6795 avr_out_plus_1 (op
, &len_plus
, PLUS
, &cc_minus
, code_sat
, sign
);
6799 *plen
= (len_minus
<= len_plus
) ? len_minus
: len_plus
;
6800 *pcc
= (len_minus
<= len_plus
) ? cc_minus
: cc_plus
;
6802 else if (len_minus
<= len_plus
)
6803 avr_out_plus_1 (op
, NULL
, MINUS
, pcc
, code_sat
, sign
);
6805 avr_out_plus_1 (op
, NULL
, PLUS
, pcc
, code_sat
, sign
);
6811 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6812 time constant XOP[2]:
6814 XOP[0] = XOP[0] <op> XOP[2]
6816 and return "". If PLEN == NULL, print assembler instructions to perform the
6817 operation; otherwise, set *PLEN to the length of the instruction sequence
6818 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6819 register or SCRATCH if no clobber register is needed for the operation. */
6822 avr_out_bitop (rtx insn
, rtx
*xop
, int *plen
)
6824 /* CODE and MODE of the operation. */
6825 enum rtx_code code
= GET_CODE (SET_SRC (single_set (insn
)));
6826 enum machine_mode mode
= GET_MODE (xop
[0]);
6828 /* Number of bytes to operate on. */
6829 int i
, n_bytes
= GET_MODE_SIZE (mode
);
6831 /* Value of T-flag (0 or 1) or -1 if unknow. */
6834 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6835 int clobber_val
= -1;
6837 /* op[0]: 8-bit destination register
6838 op[1]: 8-bit const int
6839 op[2]: 8-bit clobber register or SCRATCH
6840 op[3]: 8-bit register containing 0xff or NULL_RTX */
6849 for (i
= 0; i
< n_bytes
; i
++)
6851 /* We operate byte-wise on the destination. */
6852 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6853 rtx xval8
= simplify_gen_subreg (QImode
, xop
[2], mode
, i
);
6855 /* 8-bit value to operate with this byte. */
6856 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
6858 /* Number of bits set in the current byte of the constant. */
6859 int pop8
= avr_popcount (val8
);
6861 /* Registers R16..R31 can operate with immediate. */
6862 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
6865 op
[1] = GEN_INT (val8
);
6874 avr_asm_len ("ori %0,%1", op
, plen
, 1);
6878 avr_asm_len ("set", op
, plen
, 1);
6881 op
[1] = GEN_INT (exact_log2 (val8
));
6882 avr_asm_len ("bld %0,%1", op
, plen
, 1);
6886 if (op
[3] != NULL_RTX
)
6887 avr_asm_len ("mov %0,%3", op
, plen
, 1);
6889 avr_asm_len ("clr %0" CR_TAB
6890 "dec %0", op
, plen
, 2);
6896 if (clobber_val
!= (int) val8
)
6897 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6898 clobber_val
= (int) val8
;
6900 avr_asm_len ("or %0,%2", op
, plen
, 1);
6910 avr_asm_len ("clr %0", op
, plen
, 1);
6912 avr_asm_len ("andi %0,%1", op
, plen
, 1);
6916 avr_asm_len ("clt", op
, plen
, 1);
6919 op
[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode
) & ~val8
));
6920 avr_asm_len ("bld %0,%1", op
, plen
, 1);
6924 if (clobber_val
!= (int) val8
)
6925 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6926 clobber_val
= (int) val8
;
6928 avr_asm_len ("and %0,%2", op
, plen
, 1);
6938 avr_asm_len ("com %0", op
, plen
, 1);
6939 else if (ld_reg_p
&& val8
== (1 << 7))
6940 avr_asm_len ("subi %0,%1", op
, plen
, 1);
6943 if (clobber_val
!= (int) val8
)
6944 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6945 clobber_val
= (int) val8
;
6947 avr_asm_len ("eor %0,%2", op
, plen
, 1);
6953 /* Unknown rtx_code */
6956 } /* for all sub-bytes */
6962 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6963 PLEN != NULL: Set *PLEN to the length of that sequence.
6967 avr_out_addto_sp (rtx
*op
, int *plen
)
6969 int pc_len
= AVR_2_BYTE_PC
? 2 : 3;
6970 int addend
= INTVAL (op
[0]);
6977 if (flag_verbose_asm
|| flag_print_asm_name
)
6978 avr_asm_len (ASM_COMMENT_START
"SP -= %n0", op
, plen
, 0);
6980 while (addend
<= -pc_len
)
6983 avr_asm_len ("rcall .", op
, plen
, 1);
6986 while (addend
++ < 0)
6987 avr_asm_len ("push __zero_reg__", op
, plen
, 1);
6989 else if (addend
> 0)
6991 if (flag_verbose_asm
|| flag_print_asm_name
)
6992 avr_asm_len (ASM_COMMENT_START
"SP += %0", op
, plen
, 0);
6994 while (addend
-- > 0)
6995 avr_asm_len ("pop __tmp_reg__", op
, plen
, 1);
7002 /* Outputs instructions needed for fixed point type conversion.
7003 This includes converting between any fixed point type, as well
7004 as converting to any integer type. Conversion between integer
7005 types is not supported.
7007 Converting signed fractional types requires a bit shift if converting
7008 to or from any unsigned fractional type because the decimal place is
7009 shifted by 1 bit. When the destination is a signed fractional, the sign
7010 is stored in either the carry or T bit. */
7013 avr_out_fract (rtx insn
, rtx operands
[], bool intsigned
, int *plen
)
7017 RTX_CODE shift
= UNKNOWN
;
7018 bool sign_in_carry
= false;
7019 bool msb_in_carry
= false;
7020 bool lsb_in_carry
= false;
7021 const char *code_ashift
= "lsl %0";
7024 #define MAY_CLOBBER(RR) \
7025 /* Shorthand used below. */ \
7027 && IN_RANGE (RR, dest.regno_msb - sign_bytes + 1, dest.regno_msb)) \
7028 || (reg_unused_after (insn, all_regs_rtx[RR]) \
7029 && !IN_RANGE (RR, dest.regno, dest.regno_msb)))
7033 /* bytes : Length of operand in bytes.
7034 ibyte : Length of integral part in bytes.
7035 fbyte, fbit : Length of fractional part in bytes, bits. */
7038 unsigned fbit
, bytes
, ibyte
, fbyte
;
7039 unsigned regno
, regno_msb
;
7040 } dest
, src
, *val
[2] = { &dest
, &src
};
7045 /* Step 0: Determine information on source and destination operand we
7046 ====== will need in the remainder. */
7048 for (i
= 0; i
< sizeof (val
) / sizeof (*val
); i
++)
7050 enum machine_mode mode
;
7052 xop
[i
] = operands
[i
];
7054 mode
= GET_MODE (xop
[i
]);
7056 val
[i
]->bytes
= GET_MODE_SIZE (mode
);
7057 val
[i
]->regno
= REGNO (xop
[i
]);
7058 val
[i
]->regno_msb
= REGNO (xop
[i
]) + val
[i
]->bytes
- 1;
7060 if (SCALAR_INT_MODE_P (mode
))
7062 val
[i
]->sbit
= intsigned
;
7065 else if (ALL_SCALAR_FIXED_POINT_MODE_P (mode
))
7067 val
[i
]->sbit
= SIGNED_SCALAR_FIXED_POINT_MODE_P (mode
);
7068 val
[i
]->fbit
= GET_MODE_FBIT (mode
);
7071 fatal_insn ("unsupported fixed-point conversion", insn
);
7073 val
[i
]->fbyte
= (1 + val
[i
]->fbit
) / BITS_PER_UNIT
;
7074 val
[i
]->ibyte
= val
[i
]->bytes
- val
[i
]->fbyte
;
7077 // Byte offset of the decimal point taking into account different place
7078 // of the decimal point in input and output and different register numbers
7079 // of input and output.
7080 int offset
= dest
.regno
- src
.regno
+ dest
.fbyte
- src
.fbyte
;
7082 // Number of destination bytes that will come from sign / zero extension.
7083 int sign_bytes
= (dest
.ibyte
- src
.ibyte
) * (dest
.ibyte
> src
.ibyte
);
7085 // Number of bytes at the low end to be filled with zeros.
7086 int zero_bytes
= (dest
.fbyte
- src
.fbyte
) * (dest
.fbyte
> src
.fbyte
);
7088 // Do we have a 16-Bit register that is cleared?
7089 rtx clrw
= NULL_RTX
;
7091 bool sign_extend
= src
.sbit
&& sign_bytes
;
7093 if (0 == dest
.fbit
% 8 && 7 == src
.fbit
% 8)
7095 else if (7 == dest
.fbit
% 8 && 0 == src
.fbit
% 8)
7097 else if (dest
.fbit
% 8 == src
.fbit
% 8)
7102 /* Step 1: Clear bytes at the low end and copy payload bits from source
7103 ====== to destination. */
7105 int step
= offset
< 0 ? 1 : -1;
7106 unsigned d0
= offset
< 0 ? dest
.regno
: dest
.regno_msb
;
7108 // We leared at least that number of registers.
7111 for (; d0
>= dest
.regno
&& d0
<= dest
.regno_msb
; d0
+= step
)
7113 // Next regno of destination is needed for MOVW
7114 unsigned d1
= d0
+ step
;
7116 // Current and next regno of source
7117 signed s0
= d0
- offset
;
7118 signed s1
= s0
+ step
;
7120 // Must current resp. next regno be CLRed? This applies to the low
7121 // bytes of the destination that have no associated source bytes.
7122 bool clr0
= s0
< (signed) src
.regno
;
7123 bool clr1
= s1
< (signed) src
.regno
&& d1
>= dest
.regno
;
7125 // First gather what code to emit (if any) and additional step to
7126 // apply if a MOVW is in use. xop[2] is destination rtx and xop[3]
7127 // is the source rtx for the current loop iteration.
7128 const char *code
= NULL
;
7133 if (AVR_HAVE_MOVW
&& clr1
&& clrw
)
7135 xop
[2] = all_regs_rtx
[d0
& ~1];
7137 code
= "movw %2,%3";
7142 xop
[2] = all_regs_rtx
[d0
];
7147 && d0
% 2 == (step
> 0))
7149 clrw
= all_regs_rtx
[d0
& ~1];
7153 else if (offset
&& s0
<= (signed) src
.regno_msb
)
7155 int movw
= AVR_HAVE_MOVW
&& offset
% 2 == 0
7156 && d0
% 2 == (offset
> 0)
7157 && d1
<= dest
.regno_msb
&& d1
>= dest
.regno
7158 && s1
<= (signed) src
.regno_msb
&& s1
>= (signed) src
.regno
;
7160 xop
[2] = all_regs_rtx
[d0
& ~movw
];
7161 xop
[3] = all_regs_rtx
[s0
& ~movw
];
7162 code
= movw
? "movw %2,%3" : "mov %2,%3";
7163 stepw
= step
* movw
;
7168 if (sign_extend
&& shift
!= ASHIFT
&& !sign_in_carry
7169 && (d0
== src
.regno_msb
|| d0
+ stepw
== src
.regno_msb
))
7171 /* We are going to override the sign bit. If we sign-extend,
7172 store the sign in the Carry flag. This is not needed if
7173 the destination will be ASHIFT is the remainder because
7174 the ASHIFT will set Carry without extra instruction. */
7176 avr_asm_len ("lsl %0", &all_regs_rtx
[src
.regno_msb
], plen
, 1);
7177 sign_in_carry
= true;
7180 unsigned src_msb
= dest
.regno_msb
- sign_bytes
- offset
+ 1;
7182 if (!sign_extend
&& shift
== ASHIFTRT
&& !msb_in_carry
7183 && src
.ibyte
> dest
.ibyte
7184 && (d0
== src_msb
|| d0
+ stepw
== src_msb
))
7186 /* We are going to override the MSB. If we shift right,
7187 store the MSB in the Carry flag. This is only needed if
7188 we don't sign-extend becaue with sign-extension the MSB
7189 (the sign) will be produced by the sign extension. */
7191 avr_asm_len ("lsr %0", &all_regs_rtx
[src_msb
], plen
, 1);
7192 msb_in_carry
= true;
7195 unsigned src_lsb
= dest
.regno
- offset
-1;
7197 if (shift
== ASHIFT
&& src
.fbyte
> dest
.fbyte
&& !lsb_in_carry
7198 && (d0
== src_lsb
|| d0
+ stepw
== src_lsb
))
7200 /* We are going to override the new LSB; store it into carry. */
7202 avr_asm_len ("lsl %0", &all_regs_rtx
[src_lsb
], plen
, 1);
7203 code_ashift
= "rol %0";
7204 lsb_in_carry
= true;
7207 avr_asm_len (code
, xop
, plen
, 1);
7212 /* Step 2: Shift destination left by 1 bit position. This might be needed
7213 ====== for signed input and unsigned output. */
7215 if (shift
== ASHIFT
&& src
.fbyte
> dest
.fbyte
&& !lsb_in_carry
)
7217 unsigned s0
= dest
.regno
- offset
-1;
7219 if (MAY_CLOBBER (s0
))
7220 avr_asm_len ("lsl %0", &all_regs_rtx
[s0
], plen
, 1);
7222 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7223 "lsl __tmp_reg__", &all_regs_rtx
[s0
], plen
, 2);
7225 code_ashift
= "rol %0";
7226 lsb_in_carry
= true;
7229 if (shift
== ASHIFT
)
7231 for (d0
= dest
.regno
+ zero_bytes
;
7232 d0
<= dest
.regno_msb
- sign_bytes
; d0
++)
7234 avr_asm_len (code_ashift
, &all_regs_rtx
[d0
], plen
, 1);
7235 code_ashift
= "rol %0";
7238 lsb_in_carry
= false;
7239 sign_in_carry
= true;
7242 /* Step 4a: Store MSB in carry if we don't already have it or will produce
7243 ======= it in sign-extension below. */
7245 if (!sign_extend
&& shift
== ASHIFTRT
&& !msb_in_carry
7246 && src
.ibyte
> dest
.ibyte
)
7248 unsigned s0
= dest
.regno_msb
- sign_bytes
- offset
+ 1;
7250 if (MAY_CLOBBER (s0
))
7251 avr_asm_len ("lsr %0", &all_regs_rtx
[s0
], plen
, 1);
7253 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7254 "lsr __tmp_reg__", &all_regs_rtx
[s0
], plen
, 2);
7256 msb_in_carry
= true;
7259 /* Step 3: Sign-extend or zero-extend the destination as needed.
7262 if (sign_extend
&& !sign_in_carry
)
7264 unsigned s0
= src
.regno_msb
;
7266 if (MAY_CLOBBER (s0
))
7267 avr_asm_len ("lsl %0", &all_regs_rtx
[s0
], plen
, 1);
7269 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7270 "lsl __tmp_reg__", &all_regs_rtx
[s0
], plen
, 2);
7272 sign_in_carry
= true;
7275 gcc_assert (sign_in_carry
+ msb_in_carry
+ lsb_in_carry
<= 1);
7277 unsigned copies
= 0;
7278 rtx movw
= sign_extend
? NULL_RTX
: clrw
;
7280 for (d0
= dest
.regno_msb
- sign_bytes
+ 1; d0
<= dest
.regno_msb
; d0
++)
7282 if (AVR_HAVE_MOVW
&& movw
7283 && d0
% 2 == 0 && d0
+ 1 <= dest
.regno_msb
)
7285 xop
[2] = all_regs_rtx
[d0
];
7287 avr_asm_len ("movw %2,%3", xop
, plen
, 1);
7292 avr_asm_len (sign_extend
? "sbc %0,%0" : "clr %0",
7293 &all_regs_rtx
[d0
], plen
, 1);
7295 if (++copies
>= 2 && !movw
&& d0
% 2 == 1)
7296 movw
= all_regs_rtx
[d0
-1];
7301 /* Step 4: Right shift the destination. This might be needed for
7302 ====== conversions from unsigned to signed. */
7304 if (shift
== ASHIFTRT
)
7306 const char *code_ashiftrt
= "lsr %0";
7308 if (sign_extend
|| msb_in_carry
)
7309 code_ashiftrt
= "ror %0";
7311 if (src
.sbit
&& src
.ibyte
== dest
.ibyte
)
7312 code_ashiftrt
= "asr %0";
7314 for (d0
= dest
.regno_msb
- sign_bytes
;
7315 d0
>= dest
.regno
+ zero_bytes
- 1 && d0
>= dest
.regno
; d0
--)
7317 avr_asm_len (code_ashiftrt
, &all_regs_rtx
[d0
], plen
, 1);
7318 code_ashiftrt
= "ror %0";
7328 /* Create RTL split patterns for byte sized rotate expressions. This
7329 produces a series of move instructions and considers overlap situations.
7330 Overlapping non-HImode operands need a scratch register. */
7333 avr_rotate_bytes (rtx operands
[])
7336 enum machine_mode mode
= GET_MODE (operands
[0]);
7337 bool overlapped
= reg_overlap_mentioned_p (operands
[0], operands
[1]);
7338 bool same_reg
= rtx_equal_p (operands
[0], operands
[1]);
7339 int num
= INTVAL (operands
[2]);
7340 rtx scratch
= operands
[3];
7341 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
7342 Word move if no scratch is needed, otherwise use size of scratch. */
7343 enum machine_mode move_mode
= QImode
;
7344 int move_size
, offset
, size
;
7348 else if ((mode
== SImode
&& !same_reg
) || !overlapped
)
7351 move_mode
= GET_MODE (scratch
);
7353 /* Force DI rotate to use QI moves since other DI moves are currently split
7354 into QI moves so forward propagation works better. */
7357 /* Make scratch smaller if needed. */
7358 if (SCRATCH
!= GET_CODE (scratch
)
7359 && HImode
== GET_MODE (scratch
)
7360 && QImode
== move_mode
)
7361 scratch
= simplify_gen_subreg (move_mode
, scratch
, HImode
, 0);
7363 move_size
= GET_MODE_SIZE (move_mode
);
7364 /* Number of bytes/words to rotate. */
7365 offset
= (num
>> 3) / move_size
;
7366 /* Number of moves needed. */
7367 size
= GET_MODE_SIZE (mode
) / move_size
;
7368 /* Himode byte swap is special case to avoid a scratch register. */
7369 if (mode
== HImode
&& same_reg
)
7371 /* HImode byte swap, using xor. This is as quick as using scratch. */
7373 src
= simplify_gen_subreg (move_mode
, operands
[1], mode
, 0);
7374 dst
= simplify_gen_subreg (move_mode
, operands
[0], mode
, 1);
7375 if (!rtx_equal_p (dst
, src
))
7377 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
7378 emit_move_insn (src
, gen_rtx_XOR (QImode
, src
, dst
));
7379 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
7384 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
7385 /* Create linked list of moves to determine move order. */
7389 } move
[MAX_SIZE
+ 8];
7392 gcc_assert (size
<= MAX_SIZE
);
7393 /* Generate list of subreg moves. */
7394 for (i
= 0; i
< size
; i
++)
7397 int to
= (from
+ offset
) % size
;
7398 move
[i
].src
= simplify_gen_subreg (move_mode
, operands
[1],
7399 mode
, from
* move_size
);
7400 move
[i
].dst
= simplify_gen_subreg (move_mode
, operands
[0],
7401 mode
, to
* move_size
);
7404 /* Mark dependence where a dst of one move is the src of another move.
7405 The first move is a conflict as it must wait until second is
7406 performed. We ignore moves to self - we catch this later. */
7408 for (i
= 0; i
< size
; i
++)
7409 if (reg_overlap_mentioned_p (move
[i
].dst
, operands
[1]))
7410 for (j
= 0; j
< size
; j
++)
7411 if (j
!= i
&& rtx_equal_p (move
[j
].src
, move
[i
].dst
))
7413 /* The dst of move i is the src of move j. */
7420 /* Go through move list and perform non-conflicting moves. As each
7421 non-overlapping move is made, it may remove other conflicts
7422 so the process is repeated until no conflicts remain. */
7427 /* Emit move where dst is not also a src or we have used that
7429 for (i
= 0; i
< size
; i
++)
7430 if (move
[i
].src
!= NULL_RTX
)
7432 if (move
[i
].links
== -1
7433 || move
[move
[i
].links
].src
== NULL_RTX
)
7436 /* Ignore NOP moves to self. */
7437 if (!rtx_equal_p (move
[i
].dst
, move
[i
].src
))
7438 emit_move_insn (move
[i
].dst
, move
[i
].src
);
7440 /* Remove conflict from list. */
7441 move
[i
].src
= NULL_RTX
;
7447 /* Check for deadlock. This is when no moves occurred and we have
7448 at least one blocked move. */
7449 if (moves
== 0 && blocked
!= -1)
7451 /* Need to use scratch register to break deadlock.
7452 Add move to put dst of blocked move into scratch.
7453 When this move occurs, it will break chain deadlock.
7454 The scratch register is substituted for real move. */
7456 gcc_assert (SCRATCH
!= GET_CODE (scratch
));
7458 move
[size
].src
= move
[blocked
].dst
;
7459 move
[size
].dst
= scratch
;
7460 /* Scratch move is never blocked. */
7461 move
[size
].links
= -1;
7462 /* Make sure we have valid link. */
7463 gcc_assert (move
[blocked
].links
!= -1);
7464 /* Replace src of blocking move with scratch reg. */
7465 move
[move
[blocked
].links
].src
= scratch
;
7466 /* Make dependent on scratch move occuring. */
7467 move
[blocked
].links
= size
;
7471 while (blocked
!= -1);
7477 /* Worker function for `ADJUST_INSN_LENGTH'. */
7478 /* Modifies the length assigned to instruction INSN
7479 LEN is the initially computed length of the insn. */
7482 avr_adjust_insn_length (rtx insn
, int len
)
7484 rtx
*op
= recog_data
.operand
;
7485 enum attr_adjust_len adjust_len
;
7487 /* Some complex insns don't need length adjustment and therefore
7488 the length need not/must not be adjusted for these insns.
7489 It is easier to state this in an insn attribute "adjust_len" than
7490 to clutter up code here... */
7492 if (-1 == recog_memoized (insn
))
7497 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
7499 adjust_len
= get_attr_adjust_len (insn
);
7501 if (adjust_len
== ADJUST_LEN_NO
)
7503 /* Nothing to adjust: The length from attribute "length" is fine.
7504 This is the default. */
7509 /* Extract insn's operands. */
7511 extract_constrain_insn_cached (insn
);
7513 /* Dispatch to right function. */
7517 case ADJUST_LEN_RELOAD_IN16
: output_reload_inhi (op
, op
[2], &len
); break;
7518 case ADJUST_LEN_RELOAD_IN24
: avr_out_reload_inpsi (op
, op
[2], &len
); break;
7519 case ADJUST_LEN_RELOAD_IN32
: output_reload_insisf (op
, op
[2], &len
); break;
7521 case ADJUST_LEN_OUT_BITOP
: avr_out_bitop (insn
, op
, &len
); break;
7523 case ADJUST_LEN_PLUS
: avr_out_plus (insn
, op
, &len
); break;
7524 case ADJUST_LEN_ADDTO_SP
: avr_out_addto_sp (op
, &len
); break;
7526 case ADJUST_LEN_MOV8
: output_movqi (insn
, op
, &len
); break;
7527 case ADJUST_LEN_MOV16
: output_movhi (insn
, op
, &len
); break;
7528 case ADJUST_LEN_MOV24
: avr_out_movpsi (insn
, op
, &len
); break;
7529 case ADJUST_LEN_MOV32
: output_movsisf (insn
, op
, &len
); break;
7530 case ADJUST_LEN_MOVMEM
: avr_out_movmem (insn
, op
, &len
); break;
7531 case ADJUST_LEN_XLOAD
: avr_out_xload (insn
, op
, &len
); break;
7532 case ADJUST_LEN_LPM
: avr_out_lpm (insn
, op
, &len
); break;
7534 case ADJUST_LEN_SFRACT
: avr_out_fract (insn
, op
, true, &len
); break;
7535 case ADJUST_LEN_UFRACT
: avr_out_fract (insn
, op
, false, &len
); break;
7537 case ADJUST_LEN_TSTHI
: avr_out_tsthi (insn
, op
, &len
); break;
7538 case ADJUST_LEN_TSTPSI
: avr_out_tstpsi (insn
, op
, &len
); break;
7539 case ADJUST_LEN_TSTSI
: avr_out_tstsi (insn
, op
, &len
); break;
7540 case ADJUST_LEN_COMPARE
: avr_out_compare (insn
, op
, &len
); break;
7541 case ADJUST_LEN_COMPARE64
: avr_out_compare64 (insn
, op
, &len
); break;
7543 case ADJUST_LEN_LSHRQI
: lshrqi3_out (insn
, op
, &len
); break;
7544 case ADJUST_LEN_LSHRHI
: lshrhi3_out (insn
, op
, &len
); break;
7545 case ADJUST_LEN_LSHRSI
: lshrsi3_out (insn
, op
, &len
); break;
7547 case ADJUST_LEN_ASHRQI
: ashrqi3_out (insn
, op
, &len
); break;
7548 case ADJUST_LEN_ASHRHI
: ashrhi3_out (insn
, op
, &len
); break;
7549 case ADJUST_LEN_ASHRSI
: ashrsi3_out (insn
, op
, &len
); break;
7551 case ADJUST_LEN_ASHLQI
: ashlqi3_out (insn
, op
, &len
); break;
7552 case ADJUST_LEN_ASHLHI
: ashlhi3_out (insn
, op
, &len
); break;
7553 case ADJUST_LEN_ASHLSI
: ashlsi3_out (insn
, op
, &len
); break;
7555 case ADJUST_LEN_ASHLPSI
: avr_out_ashlpsi3 (insn
, op
, &len
); break;
7556 case ADJUST_LEN_ASHRPSI
: avr_out_ashrpsi3 (insn
, op
, &len
); break;
7557 case ADJUST_LEN_LSHRPSI
: avr_out_lshrpsi3 (insn
, op
, &len
); break;
7559 case ADJUST_LEN_CALL
: len
= AVR_HAVE_JMP_CALL
? 2 : 1; break;
7561 case ADJUST_LEN_INSERT_BITS
: avr_out_insert_bits (op
, &len
); break;
7570 /* Return nonzero if register REG dead after INSN. */
7573 reg_unused_after (rtx insn
, rtx reg
)
7575 return (dead_or_set_p (insn
, reg
)
7576 || (REG_P(reg
) && _reg_unused_after (insn
, reg
)));
7579 /* Return nonzero if REG is not used after INSN.
7580 We assume REG is a reload reg, and therefore does
7581 not live past labels. It may live past calls or jumps though. */
7584 _reg_unused_after (rtx insn
, rtx reg
)
7589 /* If the reg is set by this instruction, then it is safe for our
7590 case. Disregard the case where this is a store to memory, since
7591 we are checking a register used in the store address. */
7592 set
= single_set (insn
);
7593 if (set
&& GET_CODE (SET_DEST (set
)) != MEM
7594 && reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
7597 while ((insn
= NEXT_INSN (insn
)))
7600 code
= GET_CODE (insn
);
7603 /* If this is a label that existed before reload, then the register
7604 if dead here. However, if this is a label added by reorg, then
7605 the register may still be live here. We can't tell the difference,
7606 so we just ignore labels completely. */
7607 if (code
== CODE_LABEL
)
7615 if (code
== JUMP_INSN
)
7618 /* If this is a sequence, we must handle them all at once.
7619 We could have for instance a call that sets the target register,
7620 and an insn in a delay slot that uses the register. In this case,
7621 we must return 0. */
7622 else if (code
== INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
7627 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
7629 rtx this_insn
= XVECEXP (PATTERN (insn
), 0, i
);
7630 rtx set
= single_set (this_insn
);
7632 if (GET_CODE (this_insn
) == CALL_INSN
)
7634 else if (GET_CODE (this_insn
) == JUMP_INSN
)
7636 if (INSN_ANNULLED_BRANCH_P (this_insn
))
7641 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
7643 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
7645 if (GET_CODE (SET_DEST (set
)) != MEM
)
7651 && reg_overlap_mentioned_p (reg
, PATTERN (this_insn
)))
7656 else if (code
== JUMP_INSN
)
7660 if (code
== CALL_INSN
)
7663 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
7664 if (GET_CODE (XEXP (tem
, 0)) == USE
7665 && REG_P (XEXP (XEXP (tem
, 0), 0))
7666 && reg_overlap_mentioned_p (reg
, XEXP (XEXP (tem
, 0), 0)))
7668 if (call_used_regs
[REGNO (reg
)])
7672 set
= single_set (insn
);
7674 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
7676 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
7677 return GET_CODE (SET_DEST (set
)) != MEM
;
7678 if (set
== 0 && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
7685 /* Implement `TARGET_ASM_INTEGER'. */
7686 /* Target hook for assembling integer objects. The AVR version needs
7687 special handling for references to certain labels. */
7690 avr_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
7692 if (size
== POINTER_SIZE
/ BITS_PER_UNIT
&& aligned_p
7693 && text_segment_operand (x
, VOIDmode
))
7695 fputs ("\t.word\tgs(", asm_out_file
);
7696 output_addr_const (asm_out_file
, x
);
7697 fputs (")\n", asm_out_file
);
7701 else if (GET_MODE (x
) == PSImode
)
7703 /* This needs binutils 2.23+, see PR binutils/13503 */
7705 fputs ("\t.byte\tlo8(", asm_out_file
);
7706 output_addr_const (asm_out_file
, x
);
7707 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
7709 fputs ("\t.byte\thi8(", asm_out_file
);
7710 output_addr_const (asm_out_file
, x
);
7711 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
7713 fputs ("\t.byte\thh8(", asm_out_file
);
7714 output_addr_const (asm_out_file
, x
);
7715 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
7719 else if (CONST_FIXED_P (x
))
7723 /* varasm fails to handle big fixed modes that don't fit in hwi. */
7725 for (n
= 0; n
< size
; n
++)
7727 rtx xn
= simplify_gen_subreg (QImode
, x
, GET_MODE (x
), n
);
7728 default_assemble_integer (xn
, 1, aligned_p
);
7734 return default_assemble_integer (x
, size
, aligned_p
);
7738 /* Implement `TARGET_CLASS_LIKELY_SPILLED_P'. */
7739 /* Return value is nonzero if pseudos that have been
7740 assigned to registers of class CLASS would likely be spilled
7741 because registers of CLASS are needed for spill registers. */
7744 avr_class_likely_spilled_p (reg_class_t c
)
7746 return (c
!= ALL_REGS
&& c
!= ADDW_REGS
);
7750 /* Valid attributes:
7751 progmem - Put data to program memory.
7752 signal - Make a function to be hardware interrupt.
7753 After function prologue interrupts remain disabled.
7754 interrupt - Make a function to be hardware interrupt. Before function
7755 prologue interrupts are enabled by means of SEI.
7756 naked - Don't generate function prologue/epilogue and RET
7759 /* Handle a "progmem" attribute; arguments as in
7760 struct attribute_spec.handler. */
7763 avr_handle_progmem_attribute (tree
*node
, tree name
,
7764 tree args ATTRIBUTE_UNUSED
,
7765 int flags ATTRIBUTE_UNUSED
,
7770 if (TREE_CODE (*node
) == TYPE_DECL
)
7772 /* This is really a decl attribute, not a type attribute,
7773 but try to handle it for GCC 3.0 backwards compatibility. */
7775 tree type
= TREE_TYPE (*node
);
7776 tree attr
= tree_cons (name
, args
, TYPE_ATTRIBUTES (type
));
7777 tree newtype
= build_type_attribute_variant (type
, attr
);
7779 TYPE_MAIN_VARIANT (newtype
) = TYPE_MAIN_VARIANT (type
);
7780 TREE_TYPE (*node
) = newtype
;
7781 *no_add_attrs
= true;
7783 else if (TREE_STATIC (*node
) || DECL_EXTERNAL (*node
))
7785 *no_add_attrs
= false;
7789 warning (OPT_Wattributes
, "%qE attribute ignored",
7791 *no_add_attrs
= true;
7798 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
7799 struct attribute_spec.handler. */
7802 avr_handle_fndecl_attribute (tree
*node
, tree name
,
7803 tree args ATTRIBUTE_UNUSED
,
7804 int flags ATTRIBUTE_UNUSED
,
7807 if (TREE_CODE (*node
) != FUNCTION_DECL
)
7809 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
7811 *no_add_attrs
= true;
7818 avr_handle_fntype_attribute (tree
*node
, tree name
,
7819 tree args ATTRIBUTE_UNUSED
,
7820 int flags ATTRIBUTE_UNUSED
,
7823 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
7825 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
7827 *no_add_attrs
= true;
7834 /* AVR attributes. */
7835 static const struct attribute_spec
7836 avr_attribute_table
[] =
7838 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
7839 affects_type_identity } */
7840 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute
,
7842 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
7844 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
7846 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute
,
7848 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute
,
7850 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute
,
7852 { NULL
, 0, 0, false, false, false, NULL
, false }
7856 /* Look if DECL shall be placed in program memory space by
7857 means of attribute `progmem' or some address-space qualifier.
7858 Return non-zero if DECL is data that must end up in Flash and
7859 zero if the data lives in RAM (.bss, .data, .rodata, ...).
7861 Return 2 if DECL is located in 24-bit flash address-space
7862 Return 1 if DECL is located in 16-bit flash address-space
7863 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
7864 Return 0 otherwise */
7867 avr_progmem_p (tree decl
, tree attributes
)
7871 if (TREE_CODE (decl
) != VAR_DECL
)
7874 if (avr_decl_memx_p (decl
))
7877 if (avr_decl_flash_p (decl
))
7881 != lookup_attribute ("progmem", attributes
))
7888 while (TREE_CODE (a
) == ARRAY_TYPE
);
7890 if (a
== error_mark_node
)
7893 if (NULL_TREE
!= lookup_attribute ("progmem", TYPE_ATTRIBUTES (a
)))
7900 /* Scan type TYP for pointer references to address space ASn.
7901 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
7902 the AS are also declared to be CONST.
7903 Otherwise, return the respective address space, i.e. a value != 0. */
7906 avr_nonconst_pointer_addrspace (tree typ
)
7908 while (ARRAY_TYPE
== TREE_CODE (typ
))
7909 typ
= TREE_TYPE (typ
);
7911 if (POINTER_TYPE_P (typ
))
7914 tree target
= TREE_TYPE (typ
);
7916 /* Pointer to function: Test the function's return type. */
7918 if (FUNCTION_TYPE
== TREE_CODE (target
))
7919 return avr_nonconst_pointer_addrspace (TREE_TYPE (target
));
7921 /* "Ordinary" pointers... */
7923 while (TREE_CODE (target
) == ARRAY_TYPE
)
7924 target
= TREE_TYPE (target
);
7926 /* Pointers to non-generic address space must be const.
7927 Refuse address spaces outside the device's flash. */
7929 as
= TYPE_ADDR_SPACE (target
);
7931 if (!ADDR_SPACE_GENERIC_P (as
)
7932 && (!TYPE_READONLY (target
)
7933 || avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
))
7938 /* Scan pointer's target type. */
7940 return avr_nonconst_pointer_addrspace (target
);
7943 return ADDR_SPACE_GENERIC
;
7947 /* Sanity check NODE so that all pointers targeting non-generic address spaces
7948 go along with CONST qualifier. Writing to these address spaces should
7949 be detected and complained about as early as possible. */
7952 avr_pgm_check_var_decl (tree node
)
7954 const char *reason
= NULL
;
7956 addr_space_t as
= ADDR_SPACE_GENERIC
;
7958 gcc_assert (as
== 0);
7960 if (avr_log
.progmem
)
7961 avr_edump ("%?: %t\n", node
);
7963 switch (TREE_CODE (node
))
7969 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
7970 reason
= "variable";
7974 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
7975 reason
= "function parameter";
7979 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
7980 reason
= "structure field";
7984 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node
))),
7986 reason
= "return type of function";
7990 if (as
= avr_nonconst_pointer_addrspace (node
), as
)
7997 if (avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
)
8000 error ("%qT uses address space %qs beyond flash of %qs",
8001 node
, avr_addrspace
[as
].name
, avr_current_device
->name
);
8003 error ("%s %q+D uses address space %qs beyond flash of %qs",
8004 reason
, node
, avr_addrspace
[as
].name
,
8005 avr_current_device
->name
);
8010 error ("pointer targeting address space %qs must be const in %qT",
8011 avr_addrspace
[as
].name
, node
);
8013 error ("pointer targeting address space %qs must be const"
8015 avr_addrspace
[as
].name
, reason
, node
);
8019 return reason
== NULL
;
8023 /* Add the section attribute if the variable is in progmem. */
8026 avr_insert_attributes (tree node
, tree
*attributes
)
8028 avr_pgm_check_var_decl (node
);
8030 if (TREE_CODE (node
) == VAR_DECL
8031 && (TREE_STATIC (node
) || DECL_EXTERNAL (node
))
8032 && avr_progmem_p (node
, *attributes
))
8037 /* For C++, we have to peel arrays in order to get correct
8038 determination of readonlyness. */
8041 node0
= TREE_TYPE (node0
);
8042 while (TREE_CODE (node0
) == ARRAY_TYPE
);
8044 if (error_mark_node
== node0
)
8047 as
= TYPE_ADDR_SPACE (TREE_TYPE (node
));
8049 if (avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
)
8051 error ("variable %q+D located in address space %qs"
8052 " beyond flash of %qs",
8053 node
, avr_addrspace
[as
].name
, avr_current_device
->name
);
8056 if (!TYPE_READONLY (node0
)
8057 && !TREE_READONLY (node
))
8059 const char *reason
= "__attribute__((progmem))";
8061 if (!ADDR_SPACE_GENERIC_P (as
))
8062 reason
= avr_addrspace
[as
].name
;
8064 if (avr_log
.progmem
)
8065 avr_edump ("\n%?: %t\n%t\n", node
, node0
);
8067 error ("variable %q+D must be const in order to be put into"
8068 " read-only section by means of %qs", node
, reason
);
8074 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
8075 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
8076 /* Track need of __do_clear_bss. */
8079 avr_asm_output_aligned_decl_common (FILE * stream
,
8080 const_tree decl ATTRIBUTE_UNUSED
,
8082 unsigned HOST_WIDE_INT size
,
8083 unsigned int align
, bool local_p
)
8085 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
8086 There is no need to trigger __do_clear_bss code for them. */
8088 if (!STR_PREFIX_P (name
, "__gnu_lto"))
8089 avr_need_clear_bss_p
= true;
8092 ASM_OUTPUT_ALIGNED_LOCAL (stream
, name
, size
, align
);
8094 ASM_OUTPUT_ALIGNED_COMMON (stream
, name
, size
, align
);
8098 /* Unnamed section callback for data_section
8099 to track need of __do_copy_data. */
8102 avr_output_data_section_asm_op (const void *data
)
8104 avr_need_copy_data_p
= true;
8106 /* Dispatch to default. */
8107 output_section_asm_op (data
);
8111 /* Unnamed section callback for bss_section
8112 to track need of __do_clear_bss. */
8115 avr_output_bss_section_asm_op (const void *data
)
8117 avr_need_clear_bss_p
= true;
8119 /* Dispatch to default. */
8120 output_section_asm_op (data
);
8124 /* Unnamed section callback for progmem*.data sections. */
8127 avr_output_progmem_section_asm_op (const void *data
)
8129 fprintf (asm_out_file
, "\t.section\t%s,\"a\",@progbits\n",
8130 (const char*) data
);
8134 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
8137 avr_asm_init_sections (void)
8139 /* Set up a section for jump tables. Alignment is handled by
8140 ASM_OUTPUT_BEFORE_CASE_LABEL. */
8142 if (AVR_HAVE_JMP_CALL
)
8144 progmem_swtable_section
8145 = get_unnamed_section (0, output_section_asm_op
,
8146 "\t.section\t.progmem.gcc_sw_table"
8147 ",\"a\",@progbits");
8151 progmem_swtable_section
8152 = get_unnamed_section (SECTION_CODE
, output_section_asm_op
,
8153 "\t.section\t.progmem.gcc_sw_table"
8154 ",\"ax\",@progbits");
8157 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
8158 resp. `avr_need_copy_data_p'. */
8160 readonly_data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
8161 data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
8162 bss_section
->unnamed
.callback
= avr_output_bss_section_asm_op
;
8166 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
8169 avr_asm_function_rodata_section (tree decl
)
8171 /* If a function is unused and optimized out by -ffunction-sections
8172 and --gc-sections, ensure that the same will happen for its jump
8173 tables by putting them into individual sections. */
8178 /* Get the frodata section from the default function in varasm.c
8179 but treat function-associated data-like jump tables as code
8180 rather than as user defined data. AVR has no constant pools. */
8182 int fdata
= flag_data_sections
;
8184 flag_data_sections
= flag_function_sections
;
8185 frodata
= default_function_rodata_section (decl
);
8186 flag_data_sections
= fdata
;
8187 flags
= frodata
->common
.flags
;
8190 if (frodata
!= readonly_data_section
8191 && flags
& SECTION_NAMED
)
8193 /* Adjust section flags and replace section name prefix. */
8197 static const char* const prefix
[] =
8199 ".rodata", ".progmem.gcc_sw_table",
8200 ".gnu.linkonce.r.", ".gnu.linkonce.t."
8203 for (i
= 0; i
< sizeof (prefix
) / sizeof (*prefix
); i
+= 2)
8205 const char * old_prefix
= prefix
[i
];
8206 const char * new_prefix
= prefix
[i
+1];
8207 const char * name
= frodata
->named
.name
;
8209 if (STR_PREFIX_P (name
, old_prefix
))
8211 const char *rname
= ACONCAT ((new_prefix
,
8212 name
+ strlen (old_prefix
), NULL
));
8213 flags
&= ~SECTION_CODE
;
8214 flags
|= AVR_HAVE_JMP_CALL
? 0 : SECTION_CODE
;
8216 return get_section (rname
, flags
, frodata
->named
.decl
);
8221 return progmem_swtable_section
;
8225 /* Implement `TARGET_ASM_NAMED_SECTION'. */
8226 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
8229 avr_asm_named_section (const char *name
, unsigned int flags
, tree decl
)
8231 if (flags
& AVR_SECTION_PROGMEM
)
8233 addr_space_t as
= (flags
& AVR_SECTION_PROGMEM
) / SECTION_MACH_DEP
;
8234 const char *old_prefix
= ".rodata";
8235 const char *new_prefix
= avr_addrspace
[as
].section_name
;
8237 if (STR_PREFIX_P (name
, old_prefix
))
8239 const char *sname
= ACONCAT ((new_prefix
,
8240 name
+ strlen (old_prefix
), NULL
));
8241 default_elf_asm_named_section (sname
, flags
, decl
);
8245 default_elf_asm_named_section (new_prefix
, flags
, decl
);
8249 if (!avr_need_copy_data_p
)
8250 avr_need_copy_data_p
= (STR_PREFIX_P (name
, ".data")
8251 || STR_PREFIX_P (name
, ".rodata")
8252 || STR_PREFIX_P (name
, ".gnu.linkonce.d"));
8254 if (!avr_need_clear_bss_p
)
8255 avr_need_clear_bss_p
= STR_PREFIX_P (name
, ".bss");
8257 default_elf_asm_named_section (name
, flags
, decl
);
8261 /* Implement `TARGET_SECTION_TYPE_FLAGS'. */
8264 avr_section_type_flags (tree decl
, const char *name
, int reloc
)
8266 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
8268 if (STR_PREFIX_P (name
, ".noinit"))
8270 if (decl
&& TREE_CODE (decl
) == VAR_DECL
8271 && DECL_INITIAL (decl
) == NULL_TREE
)
8272 flags
|= SECTION_BSS
; /* @nobits */
8274 warning (0, "only uninitialized variables can be placed in the "
8278 if (decl
&& DECL_P (decl
)
8279 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
8281 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
8283 /* Attribute progmem puts data in generic address space.
8284 Set section flags as if it was in __flash to get the right
8285 section prefix in the remainder. */
8287 if (ADDR_SPACE_GENERIC_P (as
))
8288 as
= ADDR_SPACE_FLASH
;
8290 flags
|= as
* SECTION_MACH_DEP
;
8291 flags
&= ~SECTION_WRITE
;
8292 flags
&= ~SECTION_BSS
;
8299 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
8302 avr_encode_section_info (tree decl
, rtx rtl
, int new_decl_p
)
8304 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
8305 readily available, see PR34734. So we postpone the warning
8306 about uninitialized data in program memory section until here. */
8309 && decl
&& DECL_P (decl
)
8310 && NULL_TREE
== DECL_INITIAL (decl
)
8311 && !DECL_EXTERNAL (decl
)
8312 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
8314 warning (OPT_Wuninitialized
,
8315 "uninitialized variable %q+D put into "
8316 "program memory area", decl
);
8319 default_encode_section_info (decl
, rtl
, new_decl_p
);
8321 if (decl
&& DECL_P (decl
)
8322 && TREE_CODE (decl
) != FUNCTION_DECL
8324 && SYMBOL_REF
== GET_CODE (XEXP (rtl
, 0)))
8326 rtx sym
= XEXP (rtl
, 0);
8327 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
8329 /* PSTR strings are in generic space but located in flash:
8330 patch address space. */
8332 if (-1 == avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
8333 as
= ADDR_SPACE_FLASH
;
8335 AVR_SYMBOL_SET_ADDR_SPACE (sym
, as
);
8340 /* Implement `TARGET_ASM_SELECT_SECTION' */
8343 avr_asm_select_section (tree decl
, int reloc
, unsigned HOST_WIDE_INT align
)
8345 section
* sect
= default_elf_select_section (decl
, reloc
, align
);
8347 if (decl
&& DECL_P (decl
)
8348 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
8350 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
8352 /* __progmem__ goes in generic space but shall be allocated to
8355 if (ADDR_SPACE_GENERIC_P (as
))
8356 as
= ADDR_SPACE_FLASH
;
8358 if (sect
->common
.flags
& SECTION_NAMED
)
8360 const char * name
= sect
->named
.name
;
8361 const char * old_prefix
= ".rodata";
8362 const char * new_prefix
= avr_addrspace
[as
].section_name
;
8364 if (STR_PREFIX_P (name
, old_prefix
))
8366 const char *sname
= ACONCAT ((new_prefix
,
8367 name
+ strlen (old_prefix
), NULL
));
8368 return get_section (sname
, sect
->common
.flags
, sect
->named
.decl
);
8372 if (!progmem_section
[as
])
8375 = get_unnamed_section (0, avr_output_progmem_section_asm_op
,
8376 avr_addrspace
[as
].section_name
);
8379 return progmem_section
[as
];
8385 /* Implement `TARGET_ASM_FILE_START'. */
8386 /* Outputs some text at the start of each assembler file. */
8389 avr_file_start (void)
8391 int sfr_offset
= avr_current_arch
->sfr_offset
;
8393 if (avr_current_arch
->asm_only
)
8394 error ("MCU %qs supported for assembler only", avr_current_device
->name
);
8396 default_file_start ();
8398 /* Print I/O addresses of some SFRs used with IN and OUT. */
8401 fprintf (asm_out_file
, "__SP_H__ = 0x%02x\n", avr_addr
.sp_h
- sfr_offset
);
8403 fprintf (asm_out_file
, "__SP_L__ = 0x%02x\n", avr_addr
.sp_l
- sfr_offset
);
8404 fprintf (asm_out_file
, "__SREG__ = 0x%02x\n", avr_addr
.sreg
- sfr_offset
);
8406 fprintf (asm_out_file
, "__RAMPZ__ = 0x%02x\n", avr_addr
.rampz
- sfr_offset
);
8408 fprintf (asm_out_file
, "__RAMPY__ = 0x%02x\n", avr_addr
.rampy
- sfr_offset
);
8410 fprintf (asm_out_file
, "__RAMPX__ = 0x%02x\n", avr_addr
.rampx
- sfr_offset
);
8412 fprintf (asm_out_file
, "__RAMPD__ = 0x%02x\n", avr_addr
.rampd
- sfr_offset
);
8414 fprintf (asm_out_file
, "__CCP__ = 0x%02x\n", avr_addr
.ccp
- sfr_offset
);
8415 fprintf (asm_out_file
, "__tmp_reg__ = %d\n", TMP_REGNO
);
8416 fprintf (asm_out_file
, "__zero_reg__ = %d\n", ZERO_REGNO
);
8420 /* Implement `TARGET_ASM_FILE_END'. */
8421 /* Outputs to the stdio stream FILE some
8422 appropriate text to go at the end of an assembler file. */
8427 /* Output these only if there is anything in the
8428 .data* / .rodata* / .gnu.linkonce.* resp. .bss* or COMMON
8429 input section(s) - some code size can be saved by not
8430 linking in the initialization code from libgcc if resp.
8431 sections are empty, see PR18145. */
8433 if (avr_need_copy_data_p
)
8434 fputs (".global __do_copy_data\n", asm_out_file
);
8436 if (avr_need_clear_bss_p
)
8437 fputs (".global __do_clear_bss\n", asm_out_file
);
8441 /* Worker function for `ADJUST_REG_ALLOC_ORDER'. */
8442 /* Choose the order in which to allocate hard registers for
8443 pseudo-registers local to a basic block.
8445 Store the desired register order in the array `reg_alloc_order'.
8446 Element 0 should be the register to allocate first; element 1, the
8447 next register; and so on. */
8450 avr_adjust_reg_alloc_order (void)
8453 static const int order_0
[] =
8456 18, 19, 20, 21, 22, 23,
8459 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8463 static const int order_1
[] =
8465 18, 19, 20, 21, 22, 23, 24, 25,
8468 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8472 static const int order_2
[] =
8474 25, 24, 23, 22, 21, 20, 19, 18,
8477 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8482 const int *order
= (TARGET_ORDER_1
? order_1
:
8483 TARGET_ORDER_2
? order_2
:
8485 for (i
= 0; i
< ARRAY_SIZE (order_0
); ++i
)
8486 reg_alloc_order
[i
] = order
[i
];
8490 /* Implement `TARGET_REGISTER_MOVE_COST' */
8493 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
8494 reg_class_t from
, reg_class_t to
)
8496 return (from
== STACK_REG
? 6
8497 : to
== STACK_REG
? 12
8502 /* Implement `TARGET_MEMORY_MOVE_COST' */
8505 avr_memory_move_cost (enum machine_mode mode
,
8506 reg_class_t rclass ATTRIBUTE_UNUSED
,
8507 bool in ATTRIBUTE_UNUSED
)
8509 return (mode
== QImode
? 2
8510 : mode
== HImode
? 4
8511 : mode
== SImode
? 8
8512 : mode
== SFmode
? 8
8517 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
8518 cost of an RTX operand given its context. X is the rtx of the
8519 operand, MODE is its mode, and OUTER is the rtx_code of this
8520 operand's parent operator. */
8523 avr_operand_rtx_cost (rtx x
, enum machine_mode mode
, enum rtx_code outer
,
8524 int opno
, bool speed
)
8526 enum rtx_code code
= GET_CODE (x
);
8538 return COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8545 avr_rtx_costs (x
, code
, outer
, opno
, &total
, speed
);
8549 /* Worker function for AVR backend's rtx_cost function.
8550 X is rtx expression whose cost is to be calculated.
8551 Return true if the complete cost has been computed.
8552 Return false if subexpressions should be scanned.
8553 In either case, *TOTAL contains the cost result. */
8556 avr_rtx_costs_1 (rtx x
, int codearg
, int outer_code ATTRIBUTE_UNUSED
,
8557 int opno ATTRIBUTE_UNUSED
, int *total
, bool speed
)
8559 enum rtx_code code
= (enum rtx_code
) codearg
;
8560 enum machine_mode mode
= GET_MODE (x
);
8571 /* Immediate constants are as cheap as registers. */
8576 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8584 *total
= COSTS_N_INSNS (1);
8590 *total
= COSTS_N_INSNS (2 * GET_MODE_SIZE (mode
) - 1);
8596 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8604 *total
= COSTS_N_INSNS (1);
8610 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8614 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8615 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8619 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
)
8620 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
8621 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8625 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
) + 2
8626 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
8627 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8635 && MULT
== GET_CODE (XEXP (x
, 0))
8636 && register_operand (XEXP (x
, 1), QImode
))
8639 *total
= COSTS_N_INSNS (speed
? 4 : 3);
8640 /* multiply-add with constant: will be split and load constant. */
8641 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
8642 *total
= COSTS_N_INSNS (1) + *total
;
8645 *total
= COSTS_N_INSNS (1);
8646 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8647 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8652 && (MULT
== GET_CODE (XEXP (x
, 0))
8653 || ASHIFT
== GET_CODE (XEXP (x
, 0)))
8654 && register_operand (XEXP (x
, 1), HImode
)
8655 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))
8656 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))))
8659 *total
= COSTS_N_INSNS (speed
? 5 : 4);
8660 /* multiply-add with constant: will be split and load constant. */
8661 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
8662 *total
= COSTS_N_INSNS (1) + *total
;
8665 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8667 *total
= COSTS_N_INSNS (2);
8668 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8671 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
8672 *total
= COSTS_N_INSNS (1);
8674 *total
= COSTS_N_INSNS (2);
8678 if (!CONST_INT_P (XEXP (x
, 1)))
8680 *total
= COSTS_N_INSNS (3);
8681 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8684 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
8685 *total
= COSTS_N_INSNS (2);
8687 *total
= COSTS_N_INSNS (3);
8691 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8693 *total
= COSTS_N_INSNS (4);
8694 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8697 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
8698 *total
= COSTS_N_INSNS (1);
8700 *total
= COSTS_N_INSNS (4);
8706 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8712 && register_operand (XEXP (x
, 0), QImode
)
8713 && MULT
== GET_CODE (XEXP (x
, 1)))
8716 *total
= COSTS_N_INSNS (speed
? 4 : 3);
8717 /* multiply-sub with constant: will be split and load constant. */
8718 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
8719 *total
= COSTS_N_INSNS (1) + *total
;
8724 && register_operand (XEXP (x
, 0), HImode
)
8725 && (MULT
== GET_CODE (XEXP (x
, 1))
8726 || ASHIFT
== GET_CODE (XEXP (x
, 1)))
8727 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))
8728 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))))
8731 *total
= COSTS_N_INSNS (speed
? 5 : 4);
8732 /* multiply-sub with constant: will be split and load constant. */
8733 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
8734 *total
= COSTS_N_INSNS (1) + *total
;
8740 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8741 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8742 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8743 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8747 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8748 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8749 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8757 *total
= COSTS_N_INSNS (!speed
? 3 : 4);
8759 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
8767 rtx op0
= XEXP (x
, 0);
8768 rtx op1
= XEXP (x
, 1);
8769 enum rtx_code code0
= GET_CODE (op0
);
8770 enum rtx_code code1
= GET_CODE (op1
);
8771 bool ex0
= SIGN_EXTEND
== code0
|| ZERO_EXTEND
== code0
;
8772 bool ex1
= SIGN_EXTEND
== code1
|| ZERO_EXTEND
== code1
;
8775 && (u8_operand (op1
, HImode
)
8776 || s8_operand (op1
, HImode
)))
8778 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
8782 && register_operand (op1
, HImode
))
8784 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
8787 else if (ex0
|| ex1
)
8789 *total
= COSTS_N_INSNS (!speed
? 3 : 5);
8792 else if (register_operand (op0
, HImode
)
8793 && (u8_operand (op1
, HImode
)
8794 || s8_operand (op1
, HImode
)))
8796 *total
= COSTS_N_INSNS (!speed
? 6 : 9);
8800 *total
= COSTS_N_INSNS (!speed
? 7 : 10);
8803 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
8810 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
8820 /* Add some additional costs besides CALL like moves etc. */
8822 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
8826 /* Just a rough estimate. Even with -O2 we don't want bulky
8827 code expanded inline. */
8829 *total
= COSTS_N_INSNS (25);
8835 *total
= COSTS_N_INSNS (300);
8837 /* Add some additional costs besides CALL like moves etc. */
8838 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
8846 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8847 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8855 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
8857 *total
= COSTS_N_INSNS (15 * GET_MODE_SIZE (mode
));
8858 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8859 /* For div/mod with const-int divisor we have at least the cost of
8860 loading the divisor. */
8861 if (CONST_INT_P (XEXP (x
, 1)))
8862 *total
+= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8863 /* Add some overall penaly for clobbering and moving around registers */
8864 *total
+= COSTS_N_INSNS (2);
8871 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 4)
8872 *total
= COSTS_N_INSNS (1);
8877 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 8)
8878 *total
= COSTS_N_INSNS (3);
8883 if (CONST_INT_P (XEXP (x
, 1)))
8884 switch (INTVAL (XEXP (x
, 1)))
8888 *total
= COSTS_N_INSNS (5);
8891 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 6);
8899 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8906 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8908 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
8909 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8914 val
= INTVAL (XEXP (x
, 1));
8916 *total
= COSTS_N_INSNS (3);
8917 else if (val
>= 0 && val
<= 7)
8918 *total
= COSTS_N_INSNS (val
);
8920 *total
= COSTS_N_INSNS (1);
8927 if (const_2_to_7_operand (XEXP (x
, 1), HImode
)
8928 && (SIGN_EXTEND
== GET_CODE (XEXP (x
, 0))
8929 || ZERO_EXTEND
== GET_CODE (XEXP (x
, 0))))
8931 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
8936 if (const1_rtx
== (XEXP (x
, 1))
8937 && SIGN_EXTEND
== GET_CODE (XEXP (x
, 0)))
8939 *total
= COSTS_N_INSNS (2);
8943 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8945 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8946 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8950 switch (INTVAL (XEXP (x
, 1)))
8957 *total
= COSTS_N_INSNS (2);
8960 *total
= COSTS_N_INSNS (3);
8966 *total
= COSTS_N_INSNS (4);
8971 *total
= COSTS_N_INSNS (5);
8974 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
8977 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
8980 *total
= COSTS_N_INSNS (!speed
? 5 : 10);
8983 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8984 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8990 if (!CONST_INT_P (XEXP (x
, 1)))
8992 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
8995 switch (INTVAL (XEXP (x
, 1)))
9003 *total
= COSTS_N_INSNS (3);
9006 *total
= COSTS_N_INSNS (5);
9009 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
9015 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9017 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9018 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9022 switch (INTVAL (XEXP (x
, 1)))
9028 *total
= COSTS_N_INSNS (3);
9033 *total
= COSTS_N_INSNS (4);
9036 *total
= COSTS_N_INSNS (6);
9039 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
9042 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9043 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9051 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9058 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9060 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
9061 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9066 val
= INTVAL (XEXP (x
, 1));
9068 *total
= COSTS_N_INSNS (4);
9070 *total
= COSTS_N_INSNS (2);
9071 else if (val
>= 0 && val
<= 7)
9072 *total
= COSTS_N_INSNS (val
);
9074 *total
= COSTS_N_INSNS (1);
9079 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9081 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9082 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9086 switch (INTVAL (XEXP (x
, 1)))
9092 *total
= COSTS_N_INSNS (2);
9095 *total
= COSTS_N_INSNS (3);
9101 *total
= COSTS_N_INSNS (4);
9105 *total
= COSTS_N_INSNS (5);
9108 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
9111 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
9115 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
9118 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9119 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9125 if (!CONST_INT_P (XEXP (x
, 1)))
9127 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
9130 switch (INTVAL (XEXP (x
, 1)))
9136 *total
= COSTS_N_INSNS (3);
9140 *total
= COSTS_N_INSNS (5);
9143 *total
= COSTS_N_INSNS (4);
9146 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
9152 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9154 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9155 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9159 switch (INTVAL (XEXP (x
, 1)))
9165 *total
= COSTS_N_INSNS (4);
9170 *total
= COSTS_N_INSNS (6);
9173 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
9176 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 5);
9179 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9180 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9188 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9195 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9197 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
9198 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9203 val
= INTVAL (XEXP (x
, 1));
9205 *total
= COSTS_N_INSNS (3);
9206 else if (val
>= 0 && val
<= 7)
9207 *total
= COSTS_N_INSNS (val
);
9209 *total
= COSTS_N_INSNS (1);
9214 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9216 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9217 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9221 switch (INTVAL (XEXP (x
, 1)))
9228 *total
= COSTS_N_INSNS (2);
9231 *total
= COSTS_N_INSNS (3);
9236 *total
= COSTS_N_INSNS (4);
9240 *total
= COSTS_N_INSNS (5);
9246 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
9249 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
9253 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
9256 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9257 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9263 if (!CONST_INT_P (XEXP (x
, 1)))
9265 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
9268 switch (INTVAL (XEXP (x
, 1)))
9276 *total
= COSTS_N_INSNS (3);
9279 *total
= COSTS_N_INSNS (5);
9282 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
9288 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9290 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9291 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9295 switch (INTVAL (XEXP (x
, 1)))
9301 *total
= COSTS_N_INSNS (4);
9304 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
9309 *total
= COSTS_N_INSNS (4);
9312 *total
= COSTS_N_INSNS (6);
9315 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9316 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9324 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9328 switch (GET_MODE (XEXP (x
, 0)))
9331 *total
= COSTS_N_INSNS (1);
9332 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9333 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9337 *total
= COSTS_N_INSNS (2);
9338 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9339 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9340 else if (INTVAL (XEXP (x
, 1)) != 0)
9341 *total
+= COSTS_N_INSNS (1);
9345 *total
= COSTS_N_INSNS (3);
9346 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) != 0)
9347 *total
+= COSTS_N_INSNS (2);
9351 *total
= COSTS_N_INSNS (4);
9352 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9353 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9354 else if (INTVAL (XEXP (x
, 1)) != 0)
9355 *total
+= COSTS_N_INSNS (3);
9361 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9366 && LSHIFTRT
== GET_CODE (XEXP (x
, 0))
9367 && MULT
== GET_CODE (XEXP (XEXP (x
, 0), 0))
9368 && CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
9370 if (QImode
== mode
|| HImode
== mode
)
9372 *total
= COSTS_N_INSNS (2);
9385 /* Implement `TARGET_RTX_COSTS'. */
9388 avr_rtx_costs (rtx x
, int codearg
, int outer_code
,
9389 int opno
, int *total
, bool speed
)
9391 bool done
= avr_rtx_costs_1 (x
, codearg
, outer_code
,
9392 opno
, total
, speed
);
9394 if (avr_log
.rtx_costs
)
9396 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
9397 done
, speed
? "speed" : "size", *total
, outer_code
, x
);
9404 /* Implement `TARGET_ADDRESS_COST'. */
9407 avr_address_cost (rtx x
, enum machine_mode mode ATTRIBUTE_UNUSED
,
9408 addr_space_t as ATTRIBUTE_UNUSED
,
9409 bool speed ATTRIBUTE_UNUSED
)
9413 if (GET_CODE (x
) == PLUS
9414 && CONST_INT_P (XEXP (x
, 1))
9415 && (REG_P (XEXP (x
, 0))
9416 || GET_CODE (XEXP (x
, 0)) == SUBREG
))
9418 if (INTVAL (XEXP (x
, 1)) >= 61)
9421 else if (CONSTANT_ADDRESS_P (x
))
9424 && io_address_operand (x
, QImode
))
9428 if (avr_log
.address_cost
)
9429 avr_edump ("\n%?: %d = %r\n", cost
, x
);
9434 /* Test for extra memory constraint 'Q'.
9435 It's a memory address based on Y or Z pointer with valid displacement. */
9438 extra_constraint_Q (rtx x
)
9442 if (GET_CODE (XEXP (x
,0)) == PLUS
9443 && REG_P (XEXP (XEXP (x
,0), 0))
9444 && GET_CODE (XEXP (XEXP (x
,0), 1)) == CONST_INT
9445 && (INTVAL (XEXP (XEXP (x
,0), 1))
9446 <= MAX_LD_OFFSET (GET_MODE (x
))))
9448 rtx xx
= XEXP (XEXP (x
,0), 0);
9449 int regno
= REGNO (xx
);
9451 ok
= (/* allocate pseudos */
9452 regno
>= FIRST_PSEUDO_REGISTER
9453 /* strictly check */
9454 || regno
== REG_Z
|| regno
== REG_Y
9455 /* XXX frame & arg pointer checks */
9456 || xx
== frame_pointer_rtx
9457 || xx
== arg_pointer_rtx
);
9459 if (avr_log
.constraints
)
9460 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
9461 ok
, reload_completed
, reload_in_progress
, x
);
9467 /* Convert condition code CONDITION to the valid AVR condition code. */
9470 avr_normalize_condition (RTX_CODE condition
)
9487 /* Helper function for `avr_reorg'. */
9490 avr_compare_pattern (rtx insn
)
9492 rtx pattern
= single_set (insn
);
9495 && NONJUMP_INSN_P (insn
)
9496 && SET_DEST (pattern
) == cc0_rtx
9497 && GET_CODE (SET_SRC (pattern
)) == COMPARE
)
9499 enum machine_mode mode0
= GET_MODE (XEXP (SET_SRC (pattern
), 0));
9500 enum machine_mode mode1
= GET_MODE (XEXP (SET_SRC (pattern
), 1));
9502 /* The 64-bit comparisons have fixed operands ACC_A and ACC_B.
9503 They must not be swapped, thus skip them. */
9505 if ((mode0
== VOIDmode
|| GET_MODE_SIZE (mode0
) <= 4)
9506 && (mode1
== VOIDmode
|| GET_MODE_SIZE (mode1
) <= 4))
9513 /* Helper function for `avr_reorg'. */
9515 /* Expansion of switch/case decision trees leads to code like
9517 cc0 = compare (Reg, Num)
9521 cc0 = compare (Reg, Num)
9525 The second comparison is superfluous and can be deleted.
9526 The second jump condition can be transformed from a
9527 "difficult" one to a "simple" one because "cc0 > 0" and
9528 "cc0 >= 0" will have the same effect here.
9530 This function relies on the way switch/case is being expaned
9531 as binary decision tree. For example code see PR 49903.
9533 Return TRUE if optimization performed.
9534 Return FALSE if nothing changed.
9536 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
9538 We don't want to do this in text peephole because it is
9539 tedious to work out jump offsets there and the second comparison
9540 might have been transormed by `avr_reorg'.
9542 RTL peephole won't do because peephole2 does not scan across
9546 avr_reorg_remove_redundant_compare (rtx insn1
)
9548 rtx comp1
, ifelse1
, xcond1
, branch1
;
9549 rtx comp2
, ifelse2
, xcond2
, branch2
, insn2
;
9551 rtx jump
, target
, cond
;
9553 /* Look out for: compare1 - branch1 - compare2 - branch2 */
9555 branch1
= next_nonnote_nondebug_insn (insn1
);
9556 if (!branch1
|| !JUMP_P (branch1
))
9559 insn2
= next_nonnote_nondebug_insn (branch1
);
9560 if (!insn2
|| !avr_compare_pattern (insn2
))
9563 branch2
= next_nonnote_nondebug_insn (insn2
);
9564 if (!branch2
|| !JUMP_P (branch2
))
9567 comp1
= avr_compare_pattern (insn1
);
9568 comp2
= avr_compare_pattern (insn2
);
9569 xcond1
= single_set (branch1
);
9570 xcond2
= single_set (branch2
);
9572 if (!comp1
|| !comp2
9573 || !rtx_equal_p (comp1
, comp2
)
9574 || !xcond1
|| SET_DEST (xcond1
) != pc_rtx
9575 || !xcond2
|| SET_DEST (xcond2
) != pc_rtx
9576 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond1
))
9577 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond2
)))
9582 comp1
= SET_SRC (comp1
);
9583 ifelse1
= SET_SRC (xcond1
);
9584 ifelse2
= SET_SRC (xcond2
);
9586 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
9588 if (EQ
!= GET_CODE (XEXP (ifelse1
, 0))
9589 || !REG_P (XEXP (comp1
, 0))
9590 || !CONST_INT_P (XEXP (comp1
, 1))
9591 || XEXP (ifelse1
, 2) != pc_rtx
9592 || XEXP (ifelse2
, 2) != pc_rtx
9593 || LABEL_REF
!= GET_CODE (XEXP (ifelse1
, 1))
9594 || LABEL_REF
!= GET_CODE (XEXP (ifelse2
, 1))
9595 || !COMPARISON_P (XEXP (ifelse2
, 0))
9596 || cc0_rtx
!= XEXP (XEXP (ifelse1
, 0), 0)
9597 || cc0_rtx
!= XEXP (XEXP (ifelse2
, 0), 0)
9598 || const0_rtx
!= XEXP (XEXP (ifelse1
, 0), 1)
9599 || const0_rtx
!= XEXP (XEXP (ifelse2
, 0), 1))
9604 /* We filtered the insn sequence to look like
9610 (if_then_else (eq (cc0)
9619 (if_then_else (CODE (cc0)
9625 code
= GET_CODE (XEXP (ifelse2
, 0));
9627 /* Map GT/GTU to GE/GEU which is easier for AVR.
9628 The first two instructions compare/branch on EQ
9629 so we may replace the difficult
9631 if (x == VAL) goto L1;
9632 if (x > VAL) goto L2;
9636 if (x == VAL) goto L1;
9637 if (x >= VAL) goto L2;
9639 Similarly, replace LE/LEU by LT/LTU. */
9650 code
= avr_normalize_condition (code
);
9657 /* Wrap the branches into UNSPECs so they won't be changed or
9658 optimized in the remainder. */
9660 target
= XEXP (XEXP (ifelse1
, 1), 0);
9661 cond
= XEXP (ifelse1
, 0);
9662 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn1
);
9664 JUMP_LABEL (jump
) = JUMP_LABEL (branch1
);
9666 target
= XEXP (XEXP (ifelse2
, 1), 0);
9667 cond
= gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
9668 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn2
);
9670 JUMP_LABEL (jump
) = JUMP_LABEL (branch2
);
9672 /* The comparisons in insn1 and insn2 are exactly the same;
9673 insn2 is superfluous so delete it. */
9675 delete_insn (insn2
);
9676 delete_insn (branch1
);
9677 delete_insn (branch2
);
9683 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
9684 /* Optimize conditional jumps. */
9689 rtx insn
= get_insns();
9691 for (insn
= next_real_insn (insn
); insn
; insn
= next_real_insn (insn
))
9693 rtx pattern
= avr_compare_pattern (insn
);
9699 && avr_reorg_remove_redundant_compare (insn
))
9704 if (compare_diff_p (insn
))
9706 /* Now we work under compare insn with difficult branch. */
9708 rtx next
= next_real_insn (insn
);
9709 rtx pat
= PATTERN (next
);
9711 pattern
= SET_SRC (pattern
);
9713 if (true_regnum (XEXP (pattern
, 0)) >= 0
9714 && true_regnum (XEXP (pattern
, 1)) >= 0)
9716 rtx x
= XEXP (pattern
, 0);
9717 rtx src
= SET_SRC (pat
);
9718 rtx t
= XEXP (src
,0);
9719 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
9720 XEXP (pattern
, 0) = XEXP (pattern
, 1);
9721 XEXP (pattern
, 1) = x
;
9722 INSN_CODE (next
) = -1;
9724 else if (true_regnum (XEXP (pattern
, 0)) >= 0
9725 && XEXP (pattern
, 1) == const0_rtx
)
9727 /* This is a tst insn, we can reverse it. */
9728 rtx src
= SET_SRC (pat
);
9729 rtx t
= XEXP (src
,0);
9731 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
9732 XEXP (pattern
, 1) = XEXP (pattern
, 0);
9733 XEXP (pattern
, 0) = const0_rtx
;
9734 INSN_CODE (next
) = -1;
9735 INSN_CODE (insn
) = -1;
9737 else if (true_regnum (XEXP (pattern
, 0)) >= 0
9738 && CONST_INT_P (XEXP (pattern
, 1)))
9740 rtx x
= XEXP (pattern
, 1);
9741 rtx src
= SET_SRC (pat
);
9742 rtx t
= XEXP (src
,0);
9743 enum machine_mode mode
= GET_MODE (XEXP (pattern
, 0));
9745 if (avr_simplify_comparison_p (mode
, GET_CODE (t
), x
))
9747 XEXP (pattern
, 1) = gen_int_mode (INTVAL (x
) + 1, mode
);
9748 PUT_CODE (t
, avr_normalize_condition (GET_CODE (t
)));
9749 INSN_CODE (next
) = -1;
9750 INSN_CODE (insn
) = -1;
9757 /* Returns register number for function return value.*/
9759 static inline unsigned int
9760 avr_ret_register (void)
9766 /* Implement `TARGET_FUNCTION_VALUE_REGNO_P'. */
9769 avr_function_value_regno_p (const unsigned int regno
)
9771 return (regno
== avr_ret_register ());
9775 /* Implement `TARGET_LIBCALL_VALUE'. */
9776 /* Create an RTX representing the place where a
9777 library function returns a value of mode MODE. */
9780 avr_libcall_value (enum machine_mode mode
,
9781 const_rtx func ATTRIBUTE_UNUSED
)
9783 int offs
= GET_MODE_SIZE (mode
);
9786 offs
= (offs
+ 1) & ~1;
9788 return gen_rtx_REG (mode
, avr_ret_register () + 2 - offs
);
9792 /* Implement `TARGET_FUNCTION_VALUE'. */
9793 /* Create an RTX representing the place where a
9794 function returns a value of data type VALTYPE. */
9797 avr_function_value (const_tree type
,
9798 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
9799 bool outgoing ATTRIBUTE_UNUSED
)
9803 if (TYPE_MODE (type
) != BLKmode
)
9804 return avr_libcall_value (TYPE_MODE (type
), NULL_RTX
);
9806 offs
= int_size_in_bytes (type
);
9809 if (offs
> 2 && offs
< GET_MODE_SIZE (SImode
))
9810 offs
= GET_MODE_SIZE (SImode
);
9811 else if (offs
> GET_MODE_SIZE (SImode
) && offs
< GET_MODE_SIZE (DImode
))
9812 offs
= GET_MODE_SIZE (DImode
);
9814 return gen_rtx_REG (BLKmode
, avr_ret_register () + 2 - offs
);
9818 test_hard_reg_class (enum reg_class rclass
, rtx x
)
9820 int regno
= true_regnum (x
);
9824 if (TEST_HARD_REG_CLASS (rclass
, regno
))
9831 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
9832 and thus is suitable to be skipped by CPSE, SBRC, etc. */
9835 avr_2word_insn_p (rtx insn
)
9837 if (avr_current_device
->errata_skip
9839 || 2 != get_attr_length (insn
))
9844 switch (INSN_CODE (insn
))
9849 case CODE_FOR_movqi_insn
:
9850 case CODE_FOR_movuqq_insn
:
9851 case CODE_FOR_movqq_insn
:
9853 rtx set
= single_set (insn
);
9854 rtx src
= SET_SRC (set
);
9855 rtx dest
= SET_DEST (set
);
9857 /* Factor out LDS and STS from movqi_insn. */
9860 && (REG_P (src
) || src
== CONST0_RTX (GET_MODE (dest
))))
9862 return CONSTANT_ADDRESS_P (XEXP (dest
, 0));
9864 else if (REG_P (dest
)
9867 return CONSTANT_ADDRESS_P (XEXP (src
, 0));
9873 case CODE_FOR_call_insn
:
9874 case CODE_FOR_call_value_insn
:
9881 jump_over_one_insn_p (rtx insn
, rtx dest
)
9883 int uid
= INSN_UID (GET_CODE (dest
) == LABEL_REF
9886 int jump_addr
= INSN_ADDRESSES (INSN_UID (insn
));
9887 int dest_addr
= INSN_ADDRESSES (uid
);
9888 int jump_offset
= dest_addr
- jump_addr
- get_attr_length (insn
);
9890 return (jump_offset
== 1
9891 || (jump_offset
== 2
9892 && avr_2word_insn_p (next_active_insn (insn
))));
9896 /* Worker function for `HARD_REGNO_MODE_OK'. */
9897 /* Returns 1 if a value of mode MODE can be stored starting with hard
9898 register number REGNO. On the enhanced core, anything larger than
9899 1 byte must start in even numbered register for "movw" to work
9900 (this way we don't have to check for odd registers everywhere). */
9903 avr_hard_regno_mode_ok (int regno
, enum machine_mode mode
)
9905 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
9906 Disallowing QI et al. in these regs might lead to code like
9907 (set (subreg:QI (reg:HI 28) n) ...)
9908 which will result in wrong code because reload does not
9909 handle SUBREGs of hard regsisters like this.
9910 This could be fixed in reload. However, it appears
9911 that fixing reload is not wanted by reload people. */
9913 /* Any GENERAL_REGS register can hold 8-bit values. */
9915 if (GET_MODE_SIZE (mode
) == 1)
9918 /* FIXME: Ideally, the following test is not needed.
9919 However, it turned out that it can reduce the number
9920 of spill fails. AVR and it's poor endowment with
9921 address registers is extreme stress test for reload. */
9923 if (GET_MODE_SIZE (mode
) >= 4
9927 /* All modes larger than 8 bits should start in an even register. */
9929 return !(regno
& 1);
9933 /* Implement `HARD_REGNO_CALL_PART_CLOBBERED'. */
9936 avr_hard_regno_call_part_clobbered (unsigned regno
, enum machine_mode mode
)
9938 /* FIXME: This hook gets called with MODE:REGNO combinations that don't
9939 represent valid hard registers like, e.g. HI:29. Returning TRUE
9940 for such registers can lead to performance degradation as mentioned
9941 in PR53595. Thus, report invalid hard registers as FALSE. */
9943 if (!avr_hard_regno_mode_ok (regno
, mode
))
9946 /* Return true if any of the following boundaries is crossed:
9947 17/18, 27/28 and 29/30. */
9949 return ((regno
< 18 && regno
+ GET_MODE_SIZE (mode
) > 18)
9950 || (regno
< REG_Y
&& regno
+ GET_MODE_SIZE (mode
) > REG_Y
)
9951 || (regno
< REG_Z
&& regno
+ GET_MODE_SIZE (mode
) > REG_Z
));
9955 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
9958 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED
,
9959 addr_space_t as
, RTX_CODE outer_code
,
9960 RTX_CODE index_code ATTRIBUTE_UNUSED
)
9962 if (!ADDR_SPACE_GENERIC_P (as
))
9964 return POINTER_Z_REGS
;
9968 return reload_completed
? BASE_POINTER_REGS
: POINTER_REGS
;
9970 return PLUS
== outer_code
? BASE_POINTER_REGS
: POINTER_REGS
;
9974 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
9977 avr_regno_mode_code_ok_for_base_p (int regno
,
9978 enum machine_mode mode ATTRIBUTE_UNUSED
,
9979 addr_space_t as ATTRIBUTE_UNUSED
,
9980 RTX_CODE outer_code
,
9981 RTX_CODE index_code ATTRIBUTE_UNUSED
)
9985 if (!ADDR_SPACE_GENERIC_P (as
))
9987 if (regno
< FIRST_PSEUDO_REGISTER
9995 regno
= reg_renumber
[regno
];
10006 if (regno
< FIRST_PSEUDO_REGISTER
10010 || regno
== ARG_POINTER_REGNUM
))
10014 else if (reg_renumber
)
10016 regno
= reg_renumber
[regno
];
10021 || regno
== ARG_POINTER_REGNUM
)
10028 && PLUS
== outer_code
10038 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
10039 /* Set 32-bit register OP[0] to compile-time constant OP[1].
10040 CLOBBER_REG is a QI clobber register or NULL_RTX.
10041 LEN == NULL: output instructions.
10042 LEN != NULL: set *LEN to the length of the instruction sequence
10043 (in words) printed with LEN = NULL.
10044 If CLEAR_P is true, OP[0] had been cleard to Zero already.
10045 If CLEAR_P is false, nothing is known about OP[0].
10047 The effect on cc0 is as follows:
10049 Load 0 to any register except ZERO_REG : NONE
10050 Load ld register with any value : NONE
10051 Anything else: : CLOBBER */
10054 output_reload_in_const (rtx
*op
, rtx clobber_reg
, int *len
, bool clear_p
)
10058 rtx xval
, xdest
[4];
10060 int clobber_val
= 1234;
10061 bool cooked_clobber_p
= false;
10062 bool set_p
= false;
10063 enum machine_mode mode
= GET_MODE (dest
);
10064 int n
, n_bytes
= GET_MODE_SIZE (mode
);
10066 gcc_assert (REG_P (dest
)
10067 && CONSTANT_P (src
));
10072 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
10073 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
10075 if (REGNO (dest
) < 16
10076 && REGNO (dest
) + GET_MODE_SIZE (mode
) > 16)
10078 clobber_reg
= all_regs_rtx
[REGNO (dest
) + n_bytes
- 1];
10081 /* We might need a clobber reg but don't have one. Look at the value to
10082 be loaded more closely. A clobber is only needed if it is a symbol
10083 or contains a byte that is neither 0, -1 or a power of 2. */
10085 if (NULL_RTX
== clobber_reg
10086 && !test_hard_reg_class (LD_REGS
, dest
)
10087 && (! (CONST_INT_P (src
) || CONST_FIXED_P (src
) || CONST_DOUBLE_P (src
))
10088 || !avr_popcount_each_byte (src
, n_bytes
,
10089 (1 << 0) | (1 << 1) | (1 << 8))))
10091 /* We have no clobber register but need one. Cook one up.
10092 That's cheaper than loading from constant pool. */
10094 cooked_clobber_p
= true;
10095 clobber_reg
= all_regs_rtx
[REG_Z
+ 1];
10096 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg
, len
, 1);
10099 /* Now start filling DEST from LSB to MSB. */
10101 for (n
= 0; n
< n_bytes
; n
++)
10104 bool done_byte
= false;
10108 /* Crop the n-th destination byte. */
10110 xdest
[n
] = simplify_gen_subreg (QImode
, dest
, mode
, n
);
10111 ldreg_p
= test_hard_reg_class (LD_REGS
, xdest
[n
]);
10113 if (!CONST_INT_P (src
)
10114 && !CONST_FIXED_P (src
)
10115 && !CONST_DOUBLE_P (src
))
10117 static const char* const asm_code
[][2] =
10119 { "ldi %2,lo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,lo8(%1)" },
10120 { "ldi %2,hi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hi8(%1)" },
10121 { "ldi %2,hlo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hlo8(%1)" },
10122 { "ldi %2,hhi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hhi8(%1)" }
10127 xop
[2] = clobber_reg
;
10129 avr_asm_len (asm_code
[n
][ldreg_p
], xop
, len
, ldreg_p
? 1 : 2);
10134 /* Crop the n-th source byte. */
10136 xval
= simplify_gen_subreg (QImode
, src
, mode
, n
);
10137 ival
[n
] = INTVAL (xval
);
10139 /* Look if we can reuse the low word by means of MOVW. */
10145 rtx lo16
= simplify_gen_subreg (HImode
, src
, mode
, 0);
10146 rtx hi16
= simplify_gen_subreg (HImode
, src
, mode
, 2);
10148 if (INTVAL (lo16
) == INTVAL (hi16
))
10150 if (0 != INTVAL (lo16
)
10153 avr_asm_len ("movw %C0,%A0", &op
[0], len
, 1);
10160 /* Don't use CLR so that cc0 is set as expected. */
10165 avr_asm_len (ldreg_p
? "ldi %0,0"
10166 : ZERO_REGNO
== REGNO (xdest
[n
]) ? "clr %0"
10167 : "mov %0,__zero_reg__",
10168 &xdest
[n
], len
, 1);
10172 if (clobber_val
== ival
[n
]
10173 && REGNO (clobber_reg
) == REGNO (xdest
[n
]))
10178 /* LD_REGS can use LDI to move a constant value */
10184 avr_asm_len ("ldi %0,lo8(%1)", xop
, len
, 1);
10188 /* Try to reuse value already loaded in some lower byte. */
10190 for (j
= 0; j
< n
; j
++)
10191 if (ival
[j
] == ival
[n
])
10196 avr_asm_len ("mov %0,%1", xop
, len
, 1);
10204 /* Need no clobber reg for -1: Use CLR/DEC */
10209 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
10211 avr_asm_len ("dec %0", &xdest
[n
], len
, 1);
10214 else if (1 == ival
[n
])
10217 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
10219 avr_asm_len ("inc %0", &xdest
[n
], len
, 1);
10223 /* Use T flag or INC to manage powers of 2 if we have
10226 if (NULL_RTX
== clobber_reg
10227 && single_one_operand (xval
, QImode
))
10230 xop
[1] = GEN_INT (exact_log2 (ival
[n
] & GET_MODE_MASK (QImode
)));
10232 gcc_assert (constm1_rtx
!= xop
[1]);
10237 avr_asm_len ("set", xop
, len
, 1);
10241 avr_asm_len ("clr %0", xop
, len
, 1);
10243 avr_asm_len ("bld %0,%1", xop
, len
, 1);
10247 /* We actually need the LD_REGS clobber reg. */
10249 gcc_assert (NULL_RTX
!= clobber_reg
);
10253 xop
[2] = clobber_reg
;
10254 clobber_val
= ival
[n
];
10256 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
10257 "mov %0,%2", xop
, len
, 2);
10260 /* If we cooked up a clobber reg above, restore it. */
10262 if (cooked_clobber_p
)
10264 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg
, len
, 1);
10269 /* Reload the constant OP[1] into the HI register OP[0].
10270 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
10271 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
10272 need a clobber reg or have to cook one up.
10274 PLEN == NULL: Output instructions.
10275 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
10276 by the insns printed.
10281 output_reload_inhi (rtx
*op
, rtx clobber_reg
, int *plen
)
10283 output_reload_in_const (op
, clobber_reg
, plen
, false);
10288 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
10289 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
10290 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
10291 need a clobber reg or have to cook one up.
10293 LEN == NULL: Output instructions.
10295 LEN != NULL: Output nothing. Set *LEN to number of words occupied
10296 by the insns printed.
10301 output_reload_insisf (rtx
*op
, rtx clobber_reg
, int *len
)
10304 && !test_hard_reg_class (LD_REGS
, op
[0])
10305 && (CONST_INT_P (op
[1])
10306 || CONST_FIXED_P (op
[1])
10307 || CONST_DOUBLE_P (op
[1])))
10309 int len_clr
, len_noclr
;
10311 /* In some cases it is better to clear the destination beforehand, e.g.
10313 CLR R2 CLR R3 MOVW R4,R2 INC R2
10317 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
10319 We find it too tedious to work that out in the print function.
10320 Instead, we call the print function twice to get the lengths of
10321 both methods and use the shortest one. */
10323 output_reload_in_const (op
, clobber_reg
, &len_clr
, true);
10324 output_reload_in_const (op
, clobber_reg
, &len_noclr
, false);
10326 if (len_noclr
- len_clr
== 4)
10328 /* Default needs 4 CLR instructions: clear register beforehand. */
10330 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
10331 "mov %B0,__zero_reg__" CR_TAB
10332 "movw %C0,%A0", &op
[0], len
, 3);
10334 output_reload_in_const (op
, clobber_reg
, len
, true);
10343 /* Default: destination not pre-cleared. */
10345 output_reload_in_const (op
, clobber_reg
, len
, false);
10350 avr_out_reload_inpsi (rtx
*op
, rtx clobber_reg
, int *len
)
10352 output_reload_in_const (op
, clobber_reg
, len
, false);
10357 /* Worker function for `ASM_OUTPUT_ADDR_VEC_ELT'. */
10360 avr_output_addr_vec_elt (FILE *stream
, int value
)
10362 if (AVR_HAVE_JMP_CALL
)
10363 fprintf (stream
, "\t.word gs(.L%d)\n", value
);
10365 fprintf (stream
, "\trjmp .L%d\n", value
);
10369 /* Implement `TARGET_HARD_REGNO_SCRATCH_OK'. */
10370 /* Returns true if SCRATCH are safe to be allocated as a scratch
10371 registers (for a define_peephole2) in the current function. */
10374 avr_hard_regno_scratch_ok (unsigned int regno
)
10376 /* Interrupt functions can only use registers that have already been saved
10377 by the prologue, even if they would normally be call-clobbered. */
10379 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
10380 && !df_regs_ever_live_p (regno
))
10383 /* Don't allow hard registers that might be part of the frame pointer.
10384 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
10385 and don't care for a frame pointer that spans more than one register. */
10387 if ((!reload_completed
|| frame_pointer_needed
)
10388 && (regno
== REG_Y
|| regno
== REG_Y
+ 1))
10397 /* Worker function for `HARD_REGNO_RENAME_OK'. */
10398 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
10401 avr_hard_regno_rename_ok (unsigned int old_reg
,
10402 unsigned int new_reg
)
10404 /* Interrupt functions can only use registers that have already been
10405 saved by the prologue, even if they would normally be
10408 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
10409 && !df_regs_ever_live_p (new_reg
))
10412 /* Don't allow hard registers that might be part of the frame pointer.
10413 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
10414 and don't care for a frame pointer that spans more than one register. */
10416 if ((!reload_completed
|| frame_pointer_needed
)
10417 && (old_reg
== REG_Y
|| old_reg
== REG_Y
+ 1
10418 || new_reg
== REG_Y
|| new_reg
== REG_Y
+ 1))
10426 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
10427 or memory location in the I/O space (QImode only).
10429 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
10430 Operand 1: register operand to test, or CONST_INT memory address.
10431 Operand 2: bit number.
10432 Operand 3: label to jump to if the test is true. */
10435 avr_out_sbxx_branch (rtx insn
, rtx operands
[])
10437 enum rtx_code comp
= GET_CODE (operands
[0]);
10438 bool long_jump
= get_attr_length (insn
) >= 4;
10439 bool reverse
= long_jump
|| jump_over_one_insn_p (insn
, operands
[3]);
10443 else if (comp
== LT
)
10447 comp
= reverse_condition (comp
);
10449 switch (GET_CODE (operands
[1]))
10456 if (low_io_address_operand (operands
[1], QImode
))
10459 output_asm_insn ("sbis %i1,%2", operands
);
10461 output_asm_insn ("sbic %i1,%2", operands
);
10465 output_asm_insn ("in __tmp_reg__,%i1", operands
);
10467 output_asm_insn ("sbrs __tmp_reg__,%2", operands
);
10469 output_asm_insn ("sbrc __tmp_reg__,%2", operands
);
10472 break; /* CONST_INT */
10477 output_asm_insn ("sbrs %T1%T2", operands
);
10479 output_asm_insn ("sbrc %T1%T2", operands
);
10485 return ("rjmp .+4" CR_TAB
10494 /* Worker function for `TARGET_ASM_CONSTRUCTOR'. */
10497 avr_asm_out_ctor (rtx symbol
, int priority
)
10499 fputs ("\t.global __do_global_ctors\n", asm_out_file
);
10500 default_ctor_section_asm_out_constructor (symbol
, priority
);
10504 /* Worker function for `TARGET_ASM_DESTRUCTOR'. */
10507 avr_asm_out_dtor (rtx symbol
, int priority
)
10509 fputs ("\t.global __do_global_dtors\n", asm_out_file
);
10510 default_dtor_section_asm_out_destructor (symbol
, priority
);
10514 /* Worker function for `TARGET_RETURN_IN_MEMORY'. */
10517 avr_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
10519 if (TYPE_MODE (type
) == BLKmode
)
10521 HOST_WIDE_INT size
= int_size_in_bytes (type
);
10522 return (size
== -1 || size
> 8);
10529 /* Implement `CASE_VALUES_THRESHOLD'. */
10530 /* Supply the default for --param case-values-threshold=0 */
10532 static unsigned int
10533 avr_case_values_threshold (void)
10535 /* The exact break-even point between a jump table and an if-else tree
10536 depends on several factors not available here like, e.g. if 8-bit
10537 comparisons can be used in the if-else tree or not, on the
10538 range of the case values, if the case value can be reused, on the
10539 register allocation, etc. '7' appears to be a good choice. */
10545 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
10547 static enum machine_mode
10548 avr_addr_space_address_mode (addr_space_t as
)
10550 return avr_addrspace
[as
].pointer_size
== 3 ? PSImode
: HImode
;
10554 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
10556 static enum machine_mode
10557 avr_addr_space_pointer_mode (addr_space_t as
)
10559 return avr_addr_space_address_mode (as
);
10563 /* Helper for following function. */
10566 avr_reg_ok_for_pgm_addr (rtx reg
, bool strict
)
10568 gcc_assert (REG_P (reg
));
10572 return REGNO (reg
) == REG_Z
;
10575 /* Avoid combine to propagate hard regs. */
10577 if (can_create_pseudo_p()
10578 && REGNO (reg
) < REG_Z
)
10587 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
10590 avr_addr_space_legitimate_address_p (enum machine_mode mode
, rtx x
,
10591 bool strict
, addr_space_t as
)
10600 case ADDR_SPACE_GENERIC
:
10601 return avr_legitimate_address_p (mode
, x
, strict
);
10603 case ADDR_SPACE_FLASH
:
10604 case ADDR_SPACE_FLASH1
:
10605 case ADDR_SPACE_FLASH2
:
10606 case ADDR_SPACE_FLASH3
:
10607 case ADDR_SPACE_FLASH4
:
10608 case ADDR_SPACE_FLASH5
:
10610 switch (GET_CODE (x
))
10613 ok
= avr_reg_ok_for_pgm_addr (x
, strict
);
10617 ok
= avr_reg_ok_for_pgm_addr (XEXP (x
, 0), strict
);
10626 case ADDR_SPACE_MEMX
:
10629 && can_create_pseudo_p());
10631 if (LO_SUM
== GET_CODE (x
))
10633 rtx hi
= XEXP (x
, 0);
10634 rtx lo
= XEXP (x
, 1);
10637 && (!strict
|| REGNO (hi
) < FIRST_PSEUDO_REGISTER
)
10639 && REGNO (lo
) == REG_Z
);
10645 if (avr_log
.legitimate_address_p
)
10647 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
10648 "reload_completed=%d reload_in_progress=%d %s:",
10649 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
10650 reg_renumber
? "(reg_renumber)" : "");
10652 if (GET_CODE (x
) == PLUS
10653 && REG_P (XEXP (x
, 0))
10654 && CONST_INT_P (XEXP (x
, 1))
10655 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
10658 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
10659 true_regnum (XEXP (x
, 0)));
10662 avr_edump ("\n%r\n", x
);
10669 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
10672 avr_addr_space_legitimize_address (rtx x
, rtx old_x
,
10673 enum machine_mode mode
, addr_space_t as
)
10675 if (ADDR_SPACE_GENERIC_P (as
))
10676 return avr_legitimize_address (x
, old_x
, mode
);
10678 if (avr_log
.legitimize_address
)
10680 avr_edump ("\n%?: mode=%m\n %r\n", mode
, old_x
);
10687 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
10690 avr_addr_space_convert (rtx src
, tree type_from
, tree type_to
)
10692 addr_space_t as_from
= TYPE_ADDR_SPACE (TREE_TYPE (type_from
));
10693 addr_space_t as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type_to
));
10695 if (avr_log
.progmem
)
10696 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
10697 src
, type_from
, type_to
);
10699 /* Up-casting from 16-bit to 24-bit pointer. */
10701 if (as_from
!= ADDR_SPACE_MEMX
10702 && as_to
== ADDR_SPACE_MEMX
)
10706 rtx reg
= gen_reg_rtx (PSImode
);
10708 while (CONST
== GET_CODE (sym
) || PLUS
== GET_CODE (sym
))
10709 sym
= XEXP (sym
, 0);
10711 /* Look at symbol flags: avr_encode_section_info set the flags
10712 also if attribute progmem was seen so that we get the right
10713 promotion for, e.g. PSTR-like strings that reside in generic space
10714 but are located in flash. In that case we patch the incoming
10717 if (SYMBOL_REF
== GET_CODE (sym
)
10718 && ADDR_SPACE_FLASH
== AVR_SYMBOL_GET_ADDR_SPACE (sym
))
10720 as_from
= ADDR_SPACE_FLASH
;
10723 /* Linearize memory: RAM has bit 23 set. */
10725 msb
= ADDR_SPACE_GENERIC_P (as_from
)
10727 : avr_addrspace
[as_from
].segment
;
10729 src
= force_reg (Pmode
, src
);
10731 emit_insn (msb
== 0
10732 ? gen_zero_extendhipsi2 (reg
, src
)
10733 : gen_n_extendhipsi2 (reg
, gen_int_mode (msb
, QImode
), src
));
10738 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
10740 if (as_from
== ADDR_SPACE_MEMX
10741 && as_to
!= ADDR_SPACE_MEMX
)
10743 rtx new_src
= gen_reg_rtx (Pmode
);
10745 src
= force_reg (PSImode
, src
);
10747 emit_move_insn (new_src
,
10748 simplify_gen_subreg (Pmode
, src
, PSImode
, 0));
10756 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
10759 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED
,
10760 addr_space_t superset ATTRIBUTE_UNUSED
)
10762 /* Allow any kind of pointer mess. */
10768 /* Worker function for movmemhi expander.
10769 XOP[0] Destination as MEM:BLK
10771 XOP[2] # Bytes to copy
10773 Return TRUE if the expansion is accomplished.
10774 Return FALSE if the operand compination is not supported. */
10777 avr_emit_movmemhi (rtx
*xop
)
10779 HOST_WIDE_INT count
;
10780 enum machine_mode loop_mode
;
10781 addr_space_t as
= MEM_ADDR_SPACE (xop
[1]);
10782 rtx loop_reg
, addr1
, a_src
, a_dest
, insn
, xas
;
10783 rtx a_hi8
= NULL_RTX
;
10785 if (avr_mem_flash_p (xop
[0]))
10788 if (!CONST_INT_P (xop
[2]))
10791 count
= INTVAL (xop
[2]);
10795 a_src
= XEXP (xop
[1], 0);
10796 a_dest
= XEXP (xop
[0], 0);
10798 if (PSImode
== GET_MODE (a_src
))
10800 gcc_assert (as
== ADDR_SPACE_MEMX
);
10802 loop_mode
= (count
< 0x100) ? QImode
: HImode
;
10803 loop_reg
= gen_rtx_REG (loop_mode
, 24);
10804 emit_move_insn (loop_reg
, gen_int_mode (count
, loop_mode
));
10806 addr1
= simplify_gen_subreg (HImode
, a_src
, PSImode
, 0);
10807 a_hi8
= simplify_gen_subreg (QImode
, a_src
, PSImode
, 2);
10811 int segment
= avr_addrspace
[as
].segment
;
10814 && avr_current_device
->n_flash
> 1)
10816 a_hi8
= GEN_INT (segment
);
10817 emit_move_insn (rampz_rtx
, a_hi8
= copy_to_mode_reg (QImode
, a_hi8
));
10819 else if (!ADDR_SPACE_GENERIC_P (as
))
10821 as
= ADDR_SPACE_FLASH
;
10826 loop_mode
= (count
<= 0x100) ? QImode
: HImode
;
10827 loop_reg
= copy_to_mode_reg (loop_mode
, gen_int_mode (count
, loop_mode
));
10830 xas
= GEN_INT (as
);
10832 /* FIXME: Register allocator might come up with spill fails if it is left
10833 on its own. Thus, we allocate the pointer registers by hand:
10835 X = destination address */
10837 emit_move_insn (lpm_addr_reg_rtx
, addr1
);
10838 emit_move_insn (gen_rtx_REG (HImode
, REG_X
), a_dest
);
10840 /* FIXME: Register allocator does a bad job and might spill address
10841 register(s) inside the loop leading to additional move instruction
10842 to/from stack which could clobber tmp_reg. Thus, do *not* emit
10843 load and store as separate insns. Instead, we perform the copy
10844 by means of one monolithic insn. */
10846 gcc_assert (TMP_REGNO
== LPM_REGNO
);
10848 if (as
!= ADDR_SPACE_MEMX
)
10850 /* Load instruction ([E]LPM or LD) is known at compile time:
10851 Do the copy-loop inline. */
10853 rtx (*fun
) (rtx
, rtx
, rtx
)
10854 = QImode
== loop_mode
? gen_movmem_qi
: gen_movmem_hi
;
10856 insn
= fun (xas
, loop_reg
, loop_reg
);
10860 rtx (*fun
) (rtx
, rtx
)
10861 = QImode
== loop_mode
? gen_movmemx_qi
: gen_movmemx_hi
;
10863 emit_move_insn (gen_rtx_REG (QImode
, 23), a_hi8
);
10865 insn
= fun (xas
, GEN_INT (avr_addr
.rampz
));
10868 set_mem_addr_space (SET_SRC (XVECEXP (insn
, 0, 0)), as
);
10875 /* Print assembler for movmem_qi, movmem_hi insns...
10877 $1, $2 : Loop register
10879 X : Destination address
10883 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
10885 addr_space_t as
= (addr_space_t
) INTVAL (op
[0]);
10886 enum machine_mode loop_mode
= GET_MODE (op
[1]);
10887 bool sbiw_p
= test_hard_reg_class (ADDW_REGS
, op
[1]);
10895 xop
[2] = tmp_reg_rtx
;
10899 avr_asm_len ("0:", xop
, plen
, 0);
10901 /* Load with post-increment */
10908 case ADDR_SPACE_GENERIC
:
10910 avr_asm_len ("ld %2,Z+", xop
, plen
, 1);
10913 case ADDR_SPACE_FLASH
:
10916 avr_asm_len ("lpm %2,Z+", xop
, plen
, 1);
10918 avr_asm_len ("lpm" CR_TAB
10919 "adiw r30,1", xop
, plen
, 2);
10922 case ADDR_SPACE_FLASH1
:
10923 case ADDR_SPACE_FLASH2
:
10924 case ADDR_SPACE_FLASH3
:
10925 case ADDR_SPACE_FLASH4
:
10926 case ADDR_SPACE_FLASH5
:
10928 if (AVR_HAVE_ELPMX
)
10929 avr_asm_len ("elpm %2,Z+", xop
, plen
, 1);
10931 avr_asm_len ("elpm" CR_TAB
10932 "adiw r30,1", xop
, plen
, 2);
10936 /* Store with post-increment */
10938 avr_asm_len ("st X+,%2", xop
, plen
, 1);
10940 /* Decrement loop-counter and set Z-flag */
10942 if (QImode
== loop_mode
)
10944 avr_asm_len ("dec %1", xop
, plen
, 1);
10948 avr_asm_len ("sbiw %1,1", xop
, plen
, 1);
10952 avr_asm_len ("subi %A1,1" CR_TAB
10953 "sbci %B1,0", xop
, plen
, 2);
10956 /* Loop until zero */
10958 return avr_asm_len ("brne 0b", xop
, plen
, 1);
10963 /* Helper for __builtin_avr_delay_cycles */
10966 avr_mem_clobber (void)
10968 rtx mem
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (Pmode
));
10969 MEM_VOLATILE_P (mem
) = 1;
10974 avr_expand_delay_cycles (rtx operands0
)
10976 unsigned HOST_WIDE_INT cycles
= UINTVAL (operands0
) & GET_MODE_MASK (SImode
);
10977 unsigned HOST_WIDE_INT cycles_used
;
10978 unsigned HOST_WIDE_INT loop_count
;
10980 if (IN_RANGE (cycles
, 83886082, 0xFFFFFFFF))
10982 loop_count
= ((cycles
- 9) / 6) + 1;
10983 cycles_used
= ((loop_count
- 1) * 6) + 9;
10984 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count
, SImode
),
10985 avr_mem_clobber()));
10986 cycles
-= cycles_used
;
10989 if (IN_RANGE (cycles
, 262145, 83886081))
10991 loop_count
= ((cycles
- 7) / 5) + 1;
10992 if (loop_count
> 0xFFFFFF)
10993 loop_count
= 0xFFFFFF;
10994 cycles_used
= ((loop_count
- 1) * 5) + 7;
10995 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count
, SImode
),
10996 avr_mem_clobber()));
10997 cycles
-= cycles_used
;
11000 if (IN_RANGE (cycles
, 768, 262144))
11002 loop_count
= ((cycles
- 5) / 4) + 1;
11003 if (loop_count
> 0xFFFF)
11004 loop_count
= 0xFFFF;
11005 cycles_used
= ((loop_count
- 1) * 4) + 5;
11006 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count
, HImode
),
11007 avr_mem_clobber()));
11008 cycles
-= cycles_used
;
11011 if (IN_RANGE (cycles
, 6, 767))
11013 loop_count
= cycles
/ 3;
11014 if (loop_count
> 255)
11016 cycles_used
= loop_count
* 3;
11017 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count
, QImode
),
11018 avr_mem_clobber()));
11019 cycles
-= cycles_used
;
11022 while (cycles
>= 2)
11024 emit_insn (gen_nopv (GEN_INT(2)));
11030 emit_insn (gen_nopv (GEN_INT(1)));
11036 /* Return VAL * BASE + DIGIT. BASE = 0 is shortcut for BASE = 2^{32} */
11039 avr_double_int_push_digit (double_int val
, int base
,
11040 unsigned HOST_WIDE_INT digit
)
11043 ? val
.llshift (32, 64)
11044 : val
* double_int::from_uhwi (base
);
11046 return val
+ double_int::from_uhwi (digit
);
11050 /* Compute the image of x under f, i.e. perform x --> f(x) */
11053 avr_map (double_int f
, int x
)
11055 return 0xf & f
.lrshift (4*x
, 64).to_uhwi ();
11059 /* Return some metrics of map A. */
11063 /* Number of fixed points in { 0 ... 7 } */
11066 /* Size of preimage of non-fixed points in { 0 ... 7 } */
11069 /* Mask representing the fixed points in { 0 ... 7 } */
11070 MAP_MASK_FIXED_0_7
,
11072 /* Size of the preimage of { 0 ... 7 } */
11075 /* Mask that represents the preimage of { f } */
11076 MAP_MASK_PREIMAGE_F
11080 avr_map_metric (double_int a
, int mode
)
11082 unsigned i
, metric
= 0;
11084 for (i
= 0; i
< 8; i
++)
11086 unsigned ai
= avr_map (a
, i
);
11088 if (mode
== MAP_FIXED_0_7
)
11090 else if (mode
== MAP_NONFIXED_0_7
)
11091 metric
+= ai
< 8 && ai
!= i
;
11092 else if (mode
== MAP_MASK_FIXED_0_7
)
11093 metric
|= ((unsigned) (ai
== i
)) << i
;
11094 else if (mode
== MAP_PREIMAGE_0_7
)
11096 else if (mode
== MAP_MASK_PREIMAGE_F
)
11097 metric
|= ((unsigned) (ai
== 0xf)) << i
;
11106 /* Return true if IVAL has a 0xf in its hexadecimal representation
11107 and false, otherwise. Only nibbles 0..7 are taken into account.
11108 Used as constraint helper for C0f and Cxf. */
11111 avr_has_nibble_0xf (rtx ival
)
11113 return 0 != avr_map_metric (rtx_to_double_int (ival
), MAP_MASK_PREIMAGE_F
);
11117 /* We have a set of bits that are mapped by a function F.
11118 Try to decompose F by means of a second function G so that
11124 cost (F o G^-1) + cost (G) < cost (F)
11126 Example: Suppose builtin insert_bits supplies us with the map
11127 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
11128 nibble of the result, we can just as well rotate the bits before inserting
11129 them and use the map 0x7654ffff which is cheaper than the original map.
11130 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
11134 /* tree code of binary function G */
11135 enum tree_code code
;
11137 /* The constant second argument of G */
11140 /* G^-1, the inverse of G (*, arg) */
11143 /* The cost of appplying G (*, arg) */
11146 /* The composition F o G^-1 (*, arg) for some function F */
11149 /* For debug purpose only */
11153 static const avr_map_op_t avr_map_op
[] =
11155 { LROTATE_EXPR
, 0, 0x76543210, 0, { 0, 0 }, "id" },
11156 { LROTATE_EXPR
, 1, 0x07654321, 2, { 0, 0 }, "<<<" },
11157 { LROTATE_EXPR
, 2, 0x10765432, 4, { 0, 0 }, "<<<" },
11158 { LROTATE_EXPR
, 3, 0x21076543, 4, { 0, 0 }, "<<<" },
11159 { LROTATE_EXPR
, 4, 0x32107654, 1, { 0, 0 }, "<<<" },
11160 { LROTATE_EXPR
, 5, 0x43210765, 3, { 0, 0 }, "<<<" },
11161 { LROTATE_EXPR
, 6, 0x54321076, 5, { 0, 0 }, "<<<" },
11162 { LROTATE_EXPR
, 7, 0x65432107, 3, { 0, 0 }, "<<<" },
11163 { RSHIFT_EXPR
, 1, 0x6543210c, 1, { 0, 0 }, ">>" },
11164 { RSHIFT_EXPR
, 1, 0x7543210c, 1, { 0, 0 }, ">>" },
11165 { RSHIFT_EXPR
, 2, 0x543210cc, 2, { 0, 0 }, ">>" },
11166 { RSHIFT_EXPR
, 2, 0x643210cc, 2, { 0, 0 }, ">>" },
11167 { RSHIFT_EXPR
, 2, 0x743210cc, 2, { 0, 0 }, ">>" },
11168 { LSHIFT_EXPR
, 1, 0xc7654321, 1, { 0, 0 }, "<<" },
11169 { LSHIFT_EXPR
, 2, 0xcc765432, 2, { 0, 0 }, "<<" }
11173 /* Try to decompose F as F = (F o G^-1) o G as described above.
11174 The result is a struct representing F o G^-1 and G.
11175 If result.cost < 0 then such a decomposition does not exist. */
11177 static avr_map_op_t
11178 avr_map_decompose (double_int f
, const avr_map_op_t
*g
, bool val_const_p
)
11181 bool val_used_p
= 0 != avr_map_metric (f
, MAP_MASK_PREIMAGE_F
);
11182 avr_map_op_t f_ginv
= *g
;
11183 double_int ginv
= double_int::from_uhwi (g
->ginv
);
11187 /* Step 1: Computing F o G^-1 */
11189 for (i
= 7; i
>= 0; i
--)
11191 int x
= avr_map (f
, i
);
11195 x
= avr_map (ginv
, x
);
11197 /* The bit is no element of the image of G: no avail (cost = -1) */
11203 f_ginv
.map
= avr_double_int_push_digit (f_ginv
.map
, 16, x
);
11206 /* Step 2: Compute the cost of the operations.
11207 The overall cost of doing an operation prior to the insertion is
11208 the cost of the insertion plus the cost of the operation. */
11210 /* Step 2a: Compute cost of F o G^-1 */
11212 if (0 == avr_map_metric (f_ginv
.map
, MAP_NONFIXED_0_7
))
11214 /* The mapping consists only of fixed points and can be folded
11215 to AND/OR logic in the remainder. Reasonable cost is 3. */
11217 f_ginv
.cost
= 2 + (val_used_p
&& !val_const_p
);
11223 /* Get the cost of the insn by calling the output worker with some
11224 fake values. Mimic effect of reloading xop[3]: Unused operands
11225 are mapped to 0 and used operands are reloaded to xop[0]. */
11227 xop
[0] = all_regs_rtx
[24];
11228 xop
[1] = gen_int_mode (f_ginv
.map
.to_uhwi (), SImode
);
11229 xop
[2] = all_regs_rtx
[25];
11230 xop
[3] = val_used_p
? xop
[0] : const0_rtx
;
11232 avr_out_insert_bits (xop
, &f_ginv
.cost
);
11234 f_ginv
.cost
+= val_const_p
&& val_used_p
? 1 : 0;
11237 /* Step 2b: Add cost of G */
11239 f_ginv
.cost
+= g
->cost
;
11241 if (avr_log
.builtin
)
11242 avr_edump (" %s%d=%d", g
->str
, g
->arg
, f_ginv
.cost
);
11248 /* Insert bits from XOP[1] into XOP[0] according to MAP.
11249 XOP[0] and XOP[1] don't overlap.
11250 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
11251 If FIXP_P = false: Just move the bit if its position in the destination
11252 is different to its source position. */
11255 avr_move_bits (rtx
*xop
, double_int map
, bool fixp_p
, int *plen
)
11259 /* T-flag contains this bit of the source, i.e. of XOP[1] */
11260 int t_bit_src
= -1;
11262 /* We order the operations according to the requested source bit b. */
11264 for (b
= 0; b
< 8; b
++)
11265 for (bit_dest
= 0; bit_dest
< 8; bit_dest
++)
11267 int bit_src
= avr_map (map
, bit_dest
);
11271 /* Same position: No need to copy as requested by FIXP_P. */
11272 || (bit_dest
== bit_src
&& !fixp_p
))
11275 if (t_bit_src
!= bit_src
)
11277 /* Source bit is not yet in T: Store it to T. */
11279 t_bit_src
= bit_src
;
11281 xop
[3] = GEN_INT (bit_src
);
11282 avr_asm_len ("bst %T1%T3", xop
, plen
, 1);
11285 /* Load destination bit with T. */
11287 xop
[3] = GEN_INT (bit_dest
);
11288 avr_asm_len ("bld %T0%T3", xop
, plen
, 1);
11293 /* PLEN == 0: Print assembler code for `insert_bits'.
11294 PLEN != 0: Compute code length in bytes.
11297 OP[1]: The mapping composed of nibbles. If nibble no. N is
11298 0: Bit N of result is copied from bit OP[2].0
11300 7: Bit N of result is copied from bit OP[2].7
11301 0xf: Bit N of result is copied from bit OP[3].N
11302 OP[2]: Bits to be inserted
11303 OP[3]: Target value */
11306 avr_out_insert_bits (rtx
*op
, int *plen
)
11308 double_int map
= rtx_to_double_int (op
[1]);
11309 unsigned mask_fixed
;
11310 bool fixp_p
= true;
11317 gcc_assert (REG_P (xop
[2]) || CONST_INT_P (xop
[2]));
11321 else if (flag_print_asm_name
)
11322 fprintf (asm_out_file
,
11323 ASM_COMMENT_START
"map = 0x%08" HOST_LONG_FORMAT
"x\n",
11324 map
.to_uhwi () & GET_MODE_MASK (SImode
));
11326 /* If MAP has fixed points it might be better to initialize the result
11327 with the bits to be inserted instead of moving all bits by hand. */
11329 mask_fixed
= avr_map_metric (map
, MAP_MASK_FIXED_0_7
);
11331 if (REGNO (xop
[0]) == REGNO (xop
[1]))
11333 /* Avoid early-clobber conflicts */
11335 avr_asm_len ("mov __tmp_reg__,%1", xop
, plen
, 1);
11336 xop
[1] = tmp_reg_rtx
;
11340 if (avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
11342 /* XOP[2] is used and reloaded to XOP[0] already */
11344 int n_fix
= 0, n_nofix
= 0;
11346 gcc_assert (REG_P (xop
[2]));
11348 /* Get the code size of the bit insertions; once with all bits
11349 moved and once with fixed points omitted. */
11351 avr_move_bits (xop
, map
, true, &n_fix
);
11352 avr_move_bits (xop
, map
, false, &n_nofix
);
11354 if (fixp_p
&& n_fix
- n_nofix
> 3)
11356 xop
[3] = gen_int_mode (~mask_fixed
, QImode
);
11358 avr_asm_len ("eor %0,%1" CR_TAB
11359 "andi %0,%3" CR_TAB
11360 "eor %0,%1", xop
, plen
, 3);
11366 /* XOP[2] is unused */
11368 if (fixp_p
&& mask_fixed
)
11370 avr_asm_len ("mov %0,%1", xop
, plen
, 1);
11375 /* Move/insert remaining bits. */
11377 avr_move_bits (xop
, map
, fixp_p
, plen
);
11383 /* IDs for all the AVR builtins. */
11385 enum avr_builtin_id
11387 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
11388 AVR_BUILTIN_ ## NAME,
11389 #include "builtins.def"
11395 struct GTY(()) avr_builtin_description
11397 enum insn_code icode
;
11403 /* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
11404 that a built-in's ID can be used to access the built-in by means of
11407 static GTY(()) struct avr_builtin_description
11408 avr_bdesc
[AVR_BUILTIN_COUNT
] =
11410 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, LIBNAME) \
11411 { (enum insn_code) CODE_FOR_ ## ICODE, N_ARGS, NULL_TREE },
11412 #include "builtins.def"
11417 /* Implement `TARGET_BUILTIN_DECL'. */
11420 avr_builtin_decl (unsigned id
, bool initialize_p ATTRIBUTE_UNUSED
)
11422 if (id
< AVR_BUILTIN_COUNT
)
11423 return avr_bdesc
[id
].fndecl
;
11425 return error_mark_node
;
11430 avr_init_builtin_int24 (void)
11432 tree int24_type
= make_signed_type (GET_MODE_BITSIZE (PSImode
));
11433 tree uint24_type
= make_unsigned_type (GET_MODE_BITSIZE (PSImode
));
11435 lang_hooks
.types
.register_builtin_type (int24_type
, "__int24");
11436 lang_hooks
.types
.register_builtin_type (uint24_type
, "__uint24");
11440 /* Implement `TARGET_INIT_BUILTINS' */
11441 /* Set up all builtin functions for this target. */
11444 avr_init_builtins (void)
11446 tree void_ftype_void
11447 = build_function_type_list (void_type_node
, NULL_TREE
);
11448 tree uchar_ftype_uchar
11449 = build_function_type_list (unsigned_char_type_node
,
11450 unsigned_char_type_node
,
11452 tree uint_ftype_uchar_uchar
11453 = build_function_type_list (unsigned_type_node
,
11454 unsigned_char_type_node
,
11455 unsigned_char_type_node
,
11457 tree int_ftype_char_char
11458 = build_function_type_list (integer_type_node
,
11462 tree int_ftype_char_uchar
11463 = build_function_type_list (integer_type_node
,
11465 unsigned_char_type_node
,
11467 tree void_ftype_ulong
11468 = build_function_type_list (void_type_node
,
11469 long_unsigned_type_node
,
11472 tree uchar_ftype_ulong_uchar_uchar
11473 = build_function_type_list (unsigned_char_type_node
,
11474 long_unsigned_type_node
,
11475 unsigned_char_type_node
,
11476 unsigned_char_type_node
,
11479 tree const_memx_void_node
11480 = build_qualified_type (void_type_node
,
11482 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX
));
11484 tree const_memx_ptr_type_node
11485 = build_pointer_type_for_mode (const_memx_void_node
, PSImode
, false);
11487 tree char_ftype_const_memx_ptr
11488 = build_function_type_list (char_type_node
,
11489 const_memx_ptr_type_node
,
11493 lang_hooks.types.type_for_size (TYPE_PRECISION (T), TYPE_UNSIGNED (T))
11495 #define FX_FTYPE_FX(fx) \
11496 tree fx##r_ftype_##fx##r \
11497 = build_function_type_list (node_##fx##r, node_##fx##r, NULL); \
11498 tree fx##k_ftype_##fx##k \
11499 = build_function_type_list (node_##fx##k, node_##fx##k, NULL)
11501 #define FX_FTYPE_FX_INT(fx) \
11502 tree fx##r_ftype_##fx##r_int \
11503 = build_function_type_list (node_##fx##r, node_##fx##r, \
11504 integer_type_node, NULL); \
11505 tree fx##k_ftype_##fx##k_int \
11506 = build_function_type_list (node_##fx##k, node_##fx##k, \
11507 integer_type_node, NULL)
11509 #define INT_FTYPE_FX(fx) \
11510 tree int_ftype_##fx##r \
11511 = build_function_type_list (integer_type_node, node_##fx##r, NULL); \
11512 tree int_ftype_##fx##k \
11513 = build_function_type_list (integer_type_node, node_##fx##k, NULL)
11515 #define INTX_FTYPE_FX(fx) \
11516 tree int##fx##r_ftype_##fx##r \
11517 = build_function_type_list (ITYP (node_##fx##r), node_##fx##r, NULL); \
11518 tree int##fx##k_ftype_##fx##k \
11519 = build_function_type_list (ITYP (node_##fx##k), node_##fx##k, NULL)
11521 #define FX_FTYPE_INTX(fx) \
11522 tree fx##r_ftype_int##fx##r \
11523 = build_function_type_list (node_##fx##r, ITYP (node_##fx##r), NULL); \
11524 tree fx##k_ftype_int##fx##k \
11525 = build_function_type_list (node_##fx##k, ITYP (node_##fx##k), NULL)
11527 tree node_hr
= short_fract_type_node
;
11528 tree node_r
= fract_type_node
;
11529 tree node_lr
= long_fract_type_node
;
11530 tree node_llr
= long_long_fract_type_node
;
11532 tree node_uhr
= unsigned_short_fract_type_node
;
11533 tree node_ur
= unsigned_fract_type_node
;
11534 tree node_ulr
= unsigned_long_fract_type_node
;
11535 tree node_ullr
= unsigned_long_long_fract_type_node
;
11537 tree node_hk
= short_accum_type_node
;
11538 tree node_k
= accum_type_node
;
11539 tree node_lk
= long_accum_type_node
;
11540 tree node_llk
= long_long_accum_type_node
;
11542 tree node_uhk
= unsigned_short_accum_type_node
;
11543 tree node_uk
= unsigned_accum_type_node
;
11544 tree node_ulk
= unsigned_long_accum_type_node
;
11545 tree node_ullk
= unsigned_long_long_accum_type_node
;
11548 /* For absfx builtins. */
11555 /* For roundfx builtins. */
11557 FX_FTYPE_FX_INT (h
);
11558 FX_FTYPE_FX_INT ();
11559 FX_FTYPE_FX_INT (l
);
11560 FX_FTYPE_FX_INT (ll
);
11562 FX_FTYPE_FX_INT (uh
);
11563 FX_FTYPE_FX_INT (u
);
11564 FX_FTYPE_FX_INT (ul
);
11565 FX_FTYPE_FX_INT (ull
);
11567 /* For countlsfx builtins. */
11577 INT_FTYPE_FX (ull
);
11579 /* For bitsfx builtins. */
11584 INTX_FTYPE_FX (ll
);
11586 INTX_FTYPE_FX (uh
);
11588 INTX_FTYPE_FX (ul
);
11589 INTX_FTYPE_FX (ull
);
11591 /* For fxbits builtins. */
11596 FX_FTYPE_INTX (ll
);
11598 FX_FTYPE_INTX (uh
);
11600 FX_FTYPE_INTX (ul
);
11601 FX_FTYPE_INTX (ull
);
11604 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
11606 int id = AVR_BUILTIN_ ## NAME; \
11607 const char *Name = "__builtin_avr_" #NAME; \
11608 char *name = (char*) alloca (1 + strlen (Name)); \
11610 gcc_assert (id < AVR_BUILTIN_COUNT); \
11611 avr_bdesc[id].fndecl \
11612 = add_builtin_function (avr_tolower (name, Name), TYPE, id, \
11613 BUILT_IN_MD, LIBNAME, NULL_TREE); \
11615 #include "builtins.def"
11618 avr_init_builtin_int24 ();
11622 /* Subroutine of avr_expand_builtin to expand vanilla builtins
11623 with non-void result and 1 ... 3 arguments. */
11626 avr_default_expand_builtin (enum insn_code icode
, tree exp
, rtx target
)
11629 int n
, n_args
= call_expr_nargs (exp
);
11630 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
11632 gcc_assert (n_args
>= 1 && n_args
<= 3);
11634 if (target
== NULL_RTX
11635 || GET_MODE (target
) != tmode
11636 || !insn_data
[icode
].operand
[0].predicate (target
, tmode
))
11638 target
= gen_reg_rtx (tmode
);
11641 for (n
= 0; n
< n_args
; n
++)
11643 tree arg
= CALL_EXPR_ARG (exp
, n
);
11644 rtx op
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
11645 enum machine_mode opmode
= GET_MODE (op
);
11646 enum machine_mode mode
= insn_data
[icode
].operand
[n
+1].mode
;
11648 if ((opmode
== SImode
|| opmode
== VOIDmode
) && mode
== HImode
)
11651 op
= gen_lowpart (HImode
, op
);
11654 /* In case the insn wants input operands in modes different from
11655 the result, abort. */
11657 gcc_assert (opmode
== mode
|| opmode
== VOIDmode
);
11659 if (!insn_data
[icode
].operand
[n
+1].predicate (op
, mode
))
11660 op
= copy_to_mode_reg (mode
, op
);
11667 case 1: pat
= GEN_FCN (icode
) (target
, xop
[0]); break;
11668 case 2: pat
= GEN_FCN (icode
) (target
, xop
[0], xop
[1]); break;
11669 case 3: pat
= GEN_FCN (icode
) (target
, xop
[0], xop
[1], xop
[2]); break;
11675 if (pat
== NULL_RTX
)
11684 /* Implement `TARGET_EXPAND_BUILTIN'. */
11685 /* Expand an expression EXP that calls a built-in function,
11686 with result going to TARGET if that's convenient
11687 (and in mode MODE if that's convenient).
11688 SUBTARGET may be used as the target for computing one of EXP's operands.
11689 IGNORE is nonzero if the value is to be ignored. */
11692 avr_expand_builtin (tree exp
, rtx target
,
11693 rtx subtarget ATTRIBUTE_UNUSED
,
11694 enum machine_mode mode ATTRIBUTE_UNUSED
,
11697 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
11698 const char *bname
= IDENTIFIER_POINTER (DECL_NAME (fndecl
));
11699 unsigned int id
= DECL_FUNCTION_CODE (fndecl
);
11700 const struct avr_builtin_description
*d
= &avr_bdesc
[id
];
11704 gcc_assert (id
< AVR_BUILTIN_COUNT
);
11708 case AVR_BUILTIN_NOP
:
11709 emit_insn (gen_nopv (GEN_INT(1)));
11712 case AVR_BUILTIN_DELAY_CYCLES
:
11714 arg0
= CALL_EXPR_ARG (exp
, 0);
11715 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
11717 if (!CONST_INT_P (op0
))
11718 error ("%s expects a compile time integer constant", bname
);
11720 avr_expand_delay_cycles (op0
);
11725 case AVR_BUILTIN_INSERT_BITS
:
11727 arg0
= CALL_EXPR_ARG (exp
, 0);
11728 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
11730 if (!CONST_INT_P (op0
))
11732 error ("%s expects a compile time long integer constant"
11733 " as first argument", bname
);
11740 case AVR_BUILTIN_ROUNDHR
: case AVR_BUILTIN_ROUNDUHR
:
11741 case AVR_BUILTIN_ROUNDR
: case AVR_BUILTIN_ROUNDUR
:
11742 case AVR_BUILTIN_ROUNDLR
: case AVR_BUILTIN_ROUNDULR
:
11743 case AVR_BUILTIN_ROUNDLLR
: case AVR_BUILTIN_ROUNDULLR
:
11745 case AVR_BUILTIN_ROUNDHK
: case AVR_BUILTIN_ROUNDUHK
:
11746 case AVR_BUILTIN_ROUNDK
: case AVR_BUILTIN_ROUNDUK
:
11747 case AVR_BUILTIN_ROUNDLK
: case AVR_BUILTIN_ROUNDULK
:
11748 case AVR_BUILTIN_ROUNDLLK
: case AVR_BUILTIN_ROUNDULLK
:
11750 /* Warn about odd rounding. Rounding points >= FBIT will have
11753 if (TREE_CODE (CALL_EXPR_ARG (exp
, 1)) != INTEGER_CST
)
11756 int rbit
= (int) TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1));
11758 if (rbit
>= (int) GET_MODE_FBIT (mode
))
11760 warning (OPT_Wextra
, "rounding to %d bits has no effect for "
11761 "fixed-point value with %d fractional bits",
11762 rbit
, GET_MODE_FBIT (mode
));
11764 return expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
, mode
,
11767 else if (rbit
<= - (int) GET_MODE_IBIT (mode
))
11769 warning (0, "rounding result will always be 0");
11770 return CONST0_RTX (mode
);
11773 /* The rounding points RP satisfies now: -IBIT < RP < FBIT.
11775 TR 18037 only specifies results for RP > 0. However, the
11776 remaining cases of -IBIT < RP <= 0 can easily be supported
11777 without any additional overhead. */
11782 /* No fold found and no insn: Call support function from libgcc. */
11784 if (d
->icode
== CODE_FOR_nothing
11785 && DECL_ASSEMBLER_NAME (get_callee_fndecl (exp
)) != NULL_TREE
)
11787 return expand_call (exp
, target
, ignore
);
11790 /* No special treatment needed: vanilla expand. */
11792 gcc_assert (d
->icode
!= CODE_FOR_nothing
);
11793 gcc_assert (d
->n_args
== call_expr_nargs (exp
));
11795 if (d
->n_args
== 0)
11797 emit_insn ((GEN_FCN (d
->icode
)) (target
));
11801 return avr_default_expand_builtin (d
->icode
, exp
, target
);
11805 /* Helper for `avr_fold_builtin' that folds absfx (FIXED_CST). */
11808 avr_fold_absfx (tree tval
)
11810 if (FIXED_CST
!= TREE_CODE (tval
))
11813 /* Our fixed-points have no padding: Use double_int payload directly. */
11815 FIXED_VALUE_TYPE fval
= TREE_FIXED_CST (tval
);
11816 unsigned int bits
= GET_MODE_BITSIZE (fval
.mode
);
11817 double_int ival
= fval
.data
.sext (bits
);
11819 if (!ival
.is_negative())
11822 /* ISO/IEC TR 18037, 7.18a.6.2: The absfx functions are saturating. */
11824 fval
.data
= (ival
== double_int::min_value (bits
, false).sext (bits
))
11825 ? double_int::max_value (bits
, false)
11828 return build_fixed (TREE_TYPE (tval
), fval
);
11832 /* Implement `TARGET_FOLD_BUILTIN'. */
11835 avr_fold_builtin (tree fndecl
, int n_args ATTRIBUTE_UNUSED
, tree
*arg
,
11836 bool ignore ATTRIBUTE_UNUSED
)
11838 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
11839 tree val_type
= TREE_TYPE (TREE_TYPE (fndecl
));
11849 case AVR_BUILTIN_SWAP
:
11851 return fold_build2 (LROTATE_EXPR
, val_type
, arg
[0],
11852 build_int_cst (val_type
, 4));
11855 case AVR_BUILTIN_ABSHR
:
11856 case AVR_BUILTIN_ABSR
:
11857 case AVR_BUILTIN_ABSLR
:
11858 case AVR_BUILTIN_ABSLLR
:
11860 case AVR_BUILTIN_ABSHK
:
11861 case AVR_BUILTIN_ABSK
:
11862 case AVR_BUILTIN_ABSLK
:
11863 case AVR_BUILTIN_ABSLLK
:
11864 /* GCC is not good with folding ABS for fixed-point. Do it by hand. */
11866 return avr_fold_absfx (arg
[0]);
11868 case AVR_BUILTIN_BITSHR
: case AVR_BUILTIN_HRBITS
:
11869 case AVR_BUILTIN_BITSHK
: case AVR_BUILTIN_HKBITS
:
11870 case AVR_BUILTIN_BITSUHR
: case AVR_BUILTIN_UHRBITS
:
11871 case AVR_BUILTIN_BITSUHK
: case AVR_BUILTIN_UHKBITS
:
11873 case AVR_BUILTIN_BITSR
: case AVR_BUILTIN_RBITS
:
11874 case AVR_BUILTIN_BITSK
: case AVR_BUILTIN_KBITS
:
11875 case AVR_BUILTIN_BITSUR
: case AVR_BUILTIN_URBITS
:
11876 case AVR_BUILTIN_BITSUK
: case AVR_BUILTIN_UKBITS
:
11878 case AVR_BUILTIN_BITSLR
: case AVR_BUILTIN_LRBITS
:
11879 case AVR_BUILTIN_BITSLK
: case AVR_BUILTIN_LKBITS
:
11880 case AVR_BUILTIN_BITSULR
: case AVR_BUILTIN_ULRBITS
:
11881 case AVR_BUILTIN_BITSULK
: case AVR_BUILTIN_ULKBITS
:
11883 case AVR_BUILTIN_BITSLLR
: case AVR_BUILTIN_LLRBITS
:
11884 case AVR_BUILTIN_BITSLLK
: case AVR_BUILTIN_LLKBITS
:
11885 case AVR_BUILTIN_BITSULLR
: case AVR_BUILTIN_ULLRBITS
:
11886 case AVR_BUILTIN_BITSULLK
: case AVR_BUILTIN_ULLKBITS
:
11888 gcc_assert (TYPE_PRECISION (val_type
)
11889 == TYPE_PRECISION (TREE_TYPE (arg
[0])));
11891 return build1 (VIEW_CONVERT_EXPR
, val_type
, arg
[0]);
11893 case AVR_BUILTIN_INSERT_BITS
:
11895 tree tbits
= arg
[1];
11896 tree tval
= arg
[2];
11898 tree map_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)));
11900 bool changed
= false;
11902 avr_map_op_t best_g
;
11904 if (TREE_CODE (arg
[0]) != INTEGER_CST
)
11906 /* No constant as first argument: Don't fold this and run into
11907 error in avr_expand_builtin. */
11912 map
= tree_to_double_int (arg
[0]);
11913 tmap
= double_int_to_tree (map_type
, map
);
11915 if (TREE_CODE (tval
) != INTEGER_CST
11916 && 0 == avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
11918 /* There are no F in the map, i.e. 3rd operand is unused.
11919 Replace that argument with some constant to render
11920 respective input unused. */
11922 tval
= build_int_cst (val_type
, 0);
11926 if (TREE_CODE (tbits
) != INTEGER_CST
11927 && 0 == avr_map_metric (map
, MAP_PREIMAGE_0_7
))
11929 /* Similar for the bits to be inserted. If they are unused,
11930 we can just as well pass 0. */
11932 tbits
= build_int_cst (val_type
, 0);
11935 if (TREE_CODE (tbits
) == INTEGER_CST
)
11937 /* Inserting bits known at compile time is easy and can be
11938 performed by AND and OR with appropriate masks. */
11940 int bits
= TREE_INT_CST_LOW (tbits
);
11941 int mask_ior
= 0, mask_and
= 0xff;
11943 for (i
= 0; i
< 8; i
++)
11945 int mi
= avr_map (map
, i
);
11949 if (bits
& (1 << mi
)) mask_ior
|= (1 << i
);
11950 else mask_and
&= ~(1 << i
);
11954 tval
= fold_build2 (BIT_IOR_EXPR
, val_type
, tval
,
11955 build_int_cst (val_type
, mask_ior
));
11956 return fold_build2 (BIT_AND_EXPR
, val_type
, tval
,
11957 build_int_cst (val_type
, mask_and
));
11961 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
11963 /* If bits don't change their position we can use vanilla logic
11964 to merge the two arguments. */
11966 if (0 == avr_map_metric (map
, MAP_NONFIXED_0_7
))
11968 int mask_f
= avr_map_metric (map
, MAP_MASK_PREIMAGE_F
);
11969 tree tres
, tmask
= build_int_cst (val_type
, mask_f
^ 0xff);
11971 tres
= fold_build2 (BIT_XOR_EXPR
, val_type
, tbits
, tval
);
11972 tres
= fold_build2 (BIT_AND_EXPR
, val_type
, tres
, tmask
);
11973 return fold_build2 (BIT_XOR_EXPR
, val_type
, tres
, tval
);
11976 /* Try to decomposing map to reduce overall cost. */
11978 if (avr_log
.builtin
)
11979 avr_edump ("\n%?: %X\n%?: ROL cost: ", map
);
11981 best_g
= avr_map_op
[0];
11982 best_g
.cost
= 1000;
11984 for (i
= 0; i
< sizeof (avr_map_op
) / sizeof (*avr_map_op
); i
++)
11987 = avr_map_decompose (map
, avr_map_op
+ i
,
11988 TREE_CODE (tval
) == INTEGER_CST
);
11990 if (g
.cost
>= 0 && g
.cost
< best_g
.cost
)
11994 if (avr_log
.builtin
)
11997 if (best_g
.arg
== 0)
11998 /* No optimization found */
12001 /* Apply operation G to the 2nd argument. */
12003 if (avr_log
.builtin
)
12004 avr_edump ("%?: using OP(%s%d, %X) cost %d\n",
12005 best_g
.str
, best_g
.arg
, best_g
.map
, best_g
.cost
);
12007 /* Do right-shifts arithmetically: They copy the MSB instead of
12008 shifting in a non-usable value (0) as with logic right-shift. */
12010 tbits
= fold_convert (signed_char_type_node
, tbits
);
12011 tbits
= fold_build2 (best_g
.code
, signed_char_type_node
, tbits
,
12012 build_int_cst (val_type
, best_g
.arg
));
12013 tbits
= fold_convert (val_type
, tbits
);
12015 /* Use map o G^-1 instead of original map to undo the effect of G. */
12017 tmap
= double_int_to_tree (map_type
, best_g
.map
);
12019 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
12020 } /* AVR_BUILTIN_INSERT_BITS */
12028 /* Initialize the GCC target structure. */
12030 #undef TARGET_ASM_ALIGNED_HI_OP
12031 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
12032 #undef TARGET_ASM_ALIGNED_SI_OP
12033 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
12034 #undef TARGET_ASM_UNALIGNED_HI_OP
12035 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
12036 #undef TARGET_ASM_UNALIGNED_SI_OP
12037 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
12038 #undef TARGET_ASM_INTEGER
12039 #define TARGET_ASM_INTEGER avr_assemble_integer
12040 #undef TARGET_ASM_FILE_START
12041 #define TARGET_ASM_FILE_START avr_file_start
12042 #undef TARGET_ASM_FILE_END
12043 #define TARGET_ASM_FILE_END avr_file_end
12045 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
12046 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
12047 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
12048 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
12050 #undef TARGET_FUNCTION_VALUE
12051 #define TARGET_FUNCTION_VALUE avr_function_value
12052 #undef TARGET_LIBCALL_VALUE
12053 #define TARGET_LIBCALL_VALUE avr_libcall_value
12054 #undef TARGET_FUNCTION_VALUE_REGNO_P
12055 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
12057 #undef TARGET_ATTRIBUTE_TABLE
12058 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
12059 #undef TARGET_INSERT_ATTRIBUTES
12060 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
12061 #undef TARGET_SECTION_TYPE_FLAGS
12062 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
12064 #undef TARGET_ASM_NAMED_SECTION
12065 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
12066 #undef TARGET_ASM_INIT_SECTIONS
12067 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
12068 #undef TARGET_ENCODE_SECTION_INFO
12069 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
12070 #undef TARGET_ASM_SELECT_SECTION
12071 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
12073 #undef TARGET_REGISTER_MOVE_COST
12074 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
12075 #undef TARGET_MEMORY_MOVE_COST
12076 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
12077 #undef TARGET_RTX_COSTS
12078 #define TARGET_RTX_COSTS avr_rtx_costs
12079 #undef TARGET_ADDRESS_COST
12080 #define TARGET_ADDRESS_COST avr_address_cost
12081 #undef TARGET_MACHINE_DEPENDENT_REORG
12082 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
12083 #undef TARGET_FUNCTION_ARG
12084 #define TARGET_FUNCTION_ARG avr_function_arg
12085 #undef TARGET_FUNCTION_ARG_ADVANCE
12086 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
12088 #undef TARGET_SET_CURRENT_FUNCTION
12089 #define TARGET_SET_CURRENT_FUNCTION avr_set_current_function
12091 #undef TARGET_RETURN_IN_MEMORY
12092 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
12094 #undef TARGET_STRICT_ARGUMENT_NAMING
12095 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
12097 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
12098 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
12100 #undef TARGET_HARD_REGNO_SCRATCH_OK
12101 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
12102 #undef TARGET_CASE_VALUES_THRESHOLD
12103 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
12105 #undef TARGET_FRAME_POINTER_REQUIRED
12106 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
12107 #undef TARGET_CAN_ELIMINATE
12108 #define TARGET_CAN_ELIMINATE avr_can_eliminate
12110 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
12111 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS avr_allocate_stack_slots_for_args
12113 #undef TARGET_WARN_FUNC_RETURN
12114 #define TARGET_WARN_FUNC_RETURN avr_warn_func_return
12116 #undef TARGET_CLASS_LIKELY_SPILLED_P
12117 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
12119 #undef TARGET_OPTION_OVERRIDE
12120 #define TARGET_OPTION_OVERRIDE avr_option_override
12122 #undef TARGET_CANNOT_MODIFY_JUMPS_P
12123 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
12125 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
12126 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
12128 #undef TARGET_INIT_BUILTINS
12129 #define TARGET_INIT_BUILTINS avr_init_builtins
12131 #undef TARGET_BUILTIN_DECL
12132 #define TARGET_BUILTIN_DECL avr_builtin_decl
12134 #undef TARGET_EXPAND_BUILTIN
12135 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
12137 #undef TARGET_FOLD_BUILTIN
12138 #define TARGET_FOLD_BUILTIN avr_fold_builtin
12140 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
12141 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
12143 #undef TARGET_SCALAR_MODE_SUPPORTED_P
12144 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
12146 #undef TARGET_BUILD_BUILTIN_VA_LIST
12147 #define TARGET_BUILD_BUILTIN_VA_LIST avr_build_builtin_va_list
12149 #undef TARGET_FIXED_POINT_SUPPORTED_P
12150 #define TARGET_FIXED_POINT_SUPPORTED_P hook_bool_void_true
12152 #undef TARGET_ADDR_SPACE_SUBSET_P
12153 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
12155 #undef TARGET_ADDR_SPACE_CONVERT
12156 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
12158 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
12159 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
12161 #undef TARGET_ADDR_SPACE_POINTER_MODE
12162 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
12164 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
12165 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
12166 avr_addr_space_legitimate_address_p
12168 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
12169 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
12171 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
12172 #define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
12174 #undef TARGET_SECONDARY_RELOAD
12175 #define TARGET_SECONDARY_RELOAD avr_secondary_reload
12177 #undef TARGET_PRINT_OPERAND
12178 #define TARGET_PRINT_OPERAND avr_print_operand
12179 #undef TARGET_PRINT_OPERAND_ADDRESS
12180 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
12181 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
12182 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
12184 struct gcc_target targetm
= TARGET_INITIALIZER
;
12187 #include "gt-avr.h"