1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998-2013 Free Software Foundation, Inc.
3 Contributed by Denis Chertykov (chertykov@gmail.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-attr.h"
31 #include "insn-codes.h"
37 #include "c-family/c-common.h"
38 #include "diagnostic-core.h"
44 #include "langhooks.h"
47 #include "target-def.h"
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
54 /* Return true if STR starts with PREFIX and false, otherwise. */
55 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
57 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
58 address space where data is to be located.
59 As the only non-generic address spaces are all located in flash,
60 this can be used to test if data shall go into some .progmem* section.
61 This must be the rightmost field of machine dependent section flags. */
62 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
64 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
65 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
67 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
68 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
69 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
71 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
72 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
75 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
76 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
77 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
78 / SYMBOL_FLAG_MACH_DEP)
80 /* Known address spaces. The order must be the same as in the respective
81 enum from avr.h (or designated initialized must be used). */
82 const avr_addrspace_t avr_addrspace
[ADDR_SPACE_COUNT
] =
84 { ADDR_SPACE_RAM
, 0, 2, "", 0, NULL
},
85 { ADDR_SPACE_FLASH
, 1, 2, "__flash", 0, ".progmem.data" },
86 { ADDR_SPACE_FLASH1
, 1, 2, "__flash1", 1, ".progmem1.data" },
87 { ADDR_SPACE_FLASH2
, 1, 2, "__flash2", 2, ".progmem2.data" },
88 { ADDR_SPACE_FLASH3
, 1, 2, "__flash3", 3, ".progmem3.data" },
89 { ADDR_SPACE_FLASH4
, 1, 2, "__flash4", 4, ".progmem4.data" },
90 { ADDR_SPACE_FLASH5
, 1, 2, "__flash5", 5, ".progmem5.data" },
91 { ADDR_SPACE_MEMX
, 1, 3, "__memx", 0, ".progmemx.data" },
95 /* Holding RAM addresses of some SFRs used by the compiler and that
96 are unique over all devices in an architecture like 'avr4'. */
100 /* SREG: The processor status */
103 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
109 /* RAMPZ: The high byte of 24-bit address used with ELPM */
112 /* SP: The stack pointer and its low and high byte */
117 static avr_addr_t avr_addr
;
120 /* Prototypes for local helper functions. */
122 static const char* out_movqi_r_mr (rtx
, rtx
[], int*);
123 static const char* out_movhi_r_mr (rtx
, rtx
[], int*);
124 static const char* out_movsi_r_mr (rtx
, rtx
[], int*);
125 static const char* out_movqi_mr_r (rtx
, rtx
[], int*);
126 static const char* out_movhi_mr_r (rtx
, rtx
[], int*);
127 static const char* out_movsi_mr_r (rtx
, rtx
[], int*);
129 static int get_sequence_length (rtx insns
);
130 static int sequent_regs_live (void);
131 static const char *ptrreg_to_str (int);
132 static const char *cond_string (enum rtx_code
);
133 static int avr_num_arg_regs (enum machine_mode
, const_tree
);
134 static int avr_operand_rtx_cost (rtx
, enum machine_mode
, enum rtx_code
,
136 static void output_reload_in_const (rtx
*, rtx
, int*, bool);
137 static struct machine_function
* avr_init_machine_status (void);
140 /* Prototypes for hook implementors if needed before their implementation. */
142 static bool avr_rtx_costs (rtx
, int, int, int, int*, bool);
145 /* Allocate registers from r25 to r8 for parameters for function calls. */
146 #define FIRST_CUM_REG 26
148 /* Implicit target register of LPM instruction (R0) */
149 extern GTY(()) rtx lpm_reg_rtx
;
152 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
153 extern GTY(()) rtx lpm_addr_reg_rtx
;
154 rtx lpm_addr_reg_rtx
;
156 /* Temporary register RTX (reg:QI TMP_REGNO) */
157 extern GTY(()) rtx tmp_reg_rtx
;
160 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
161 extern GTY(()) rtx zero_reg_rtx
;
164 /* RTXs for all general purpose registers as QImode */
165 extern GTY(()) rtx all_regs_rtx
[32];
166 rtx all_regs_rtx
[32];
168 /* SREG, the processor status */
169 extern GTY(()) rtx sreg_rtx
;
172 /* RAMP* special function registers */
173 extern GTY(()) rtx rampd_rtx
;
174 extern GTY(()) rtx rampx_rtx
;
175 extern GTY(()) rtx rampy_rtx
;
176 extern GTY(()) rtx rampz_rtx
;
182 /* RTX containing the strings "" and "e", respectively */
183 static GTY(()) rtx xstring_empty
;
184 static GTY(()) rtx xstring_e
;
186 /* Current architecture. */
187 const avr_arch_t
*avr_current_arch
;
189 /* Current device. */
190 const avr_mcu_t
*avr_current_device
;
192 /* Section to put switch tables in. */
193 static GTY(()) section
*progmem_swtable_section
;
195 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
196 or to address space __flash* or __memx. Only used as singletons inside
197 avr_asm_select_section, but it must not be local there because of GTY. */
198 static GTY(()) section
*progmem_section
[ADDR_SPACE_COUNT
];
200 /* Condition for insns/expanders from avr-dimode.md. */
201 bool avr_have_dimode
= true;
203 /* To track if code will use .bss and/or .data. */
204 bool avr_need_clear_bss_p
= false;
205 bool avr_need_copy_data_p
= false;
208 /* Transform UP into lowercase and write the result to LO.
209 You must provide enough space for LO. Return LO. */
212 avr_tolower (char *lo
, const char *up
)
216 for (; *up
; up
++, lo
++)
225 /* Custom function to count number of set bits. */
228 avr_popcount (unsigned int val
)
242 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
243 Return true if the least significant N_BYTES bytes of XVAL all have a
244 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
245 of integers which contains an integer N iff bit N of POP_MASK is set. */
248 avr_popcount_each_byte (rtx xval
, int n_bytes
, int pop_mask
)
252 enum machine_mode mode
= GET_MODE (xval
);
254 if (VOIDmode
== mode
)
257 for (i
= 0; i
< n_bytes
; i
++)
259 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
260 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
262 if (0 == (pop_mask
& (1 << avr_popcount (val8
))))
270 /* Access some RTX as INT_MODE. If X is a CONST_FIXED we can get
271 the bit representation of X by "casting" it to CONST_INT. */
274 avr_to_int_mode (rtx x
)
276 enum machine_mode mode
= GET_MODE (x
);
278 return VOIDmode
== mode
280 : simplify_gen_subreg (int_mode_for_mode (mode
), x
, mode
, 0);
284 /* Implement `TARGET_OPTION_OVERRIDE'. */
287 avr_option_override (void)
289 flag_delete_null_pointer_checks
= 0;
291 /* caller-save.c looks for call-clobbered hard registers that are assigned
292 to pseudos that cross calls and tries so save-restore them around calls
293 in order to reduce the number of stack slots needed.
295 This might lead to situations where reload is no more able to cope
296 with the challenge of AVR's very few address registers and fails to
297 perform the requested spills. */
300 flag_caller_saves
= 0;
302 /* Unwind tables currently require a frame pointer for correctness,
303 see toplev.c:process_options(). */
305 if ((flag_unwind_tables
306 || flag_non_call_exceptions
307 || flag_asynchronous_unwind_tables
)
308 && !ACCUMULATE_OUTGOING_ARGS
)
310 flag_omit_frame_pointer
= 0;
313 avr_current_device
= &avr_mcu_types
[avr_mcu_index
];
314 avr_current_arch
= &avr_arch_types
[avr_current_device
->arch
];
316 /* RAM addresses of some SFRs common to all devices in respective arch. */
318 /* SREG: Status Register containing flags like I (global IRQ) */
319 avr_addr
.sreg
= 0x3F + avr_current_arch
->sfr_offset
;
321 /* RAMPZ: Address' high part when loading via ELPM */
322 avr_addr
.rampz
= 0x3B + avr_current_arch
->sfr_offset
;
324 avr_addr
.rampy
= 0x3A + avr_current_arch
->sfr_offset
;
325 avr_addr
.rampx
= 0x39 + avr_current_arch
->sfr_offset
;
326 avr_addr
.rampd
= 0x38 + avr_current_arch
->sfr_offset
;
327 avr_addr
.ccp
= 0x34 + avr_current_arch
->sfr_offset
;
329 /* SP: Stack Pointer (SP_H:SP_L) */
330 avr_addr
.sp_l
= 0x3D + avr_current_arch
->sfr_offset
;
331 avr_addr
.sp_h
= avr_addr
.sp_l
+ 1;
333 init_machine_status
= avr_init_machine_status
;
335 avr_log_set_avr_log();
338 /* Function to set up the backend function structure. */
340 static struct machine_function
*
341 avr_init_machine_status (void)
343 return ggc_alloc_cleared_machine_function ();
347 /* Implement `INIT_EXPANDERS'. */
348 /* The function works like a singleton. */
351 avr_init_expanders (void)
355 for (regno
= 0; regno
< 32; regno
++)
356 all_regs_rtx
[regno
] = gen_rtx_REG (QImode
, regno
);
358 lpm_reg_rtx
= all_regs_rtx
[LPM_REGNO
];
359 tmp_reg_rtx
= all_regs_rtx
[TMP_REGNO
];
360 zero_reg_rtx
= all_regs_rtx
[ZERO_REGNO
];
362 lpm_addr_reg_rtx
= gen_rtx_REG (HImode
, REG_Z
);
364 sreg_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.sreg
));
365 rampd_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampd
));
366 rampx_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampx
));
367 rampy_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampy
));
368 rampz_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampz
));
370 xstring_empty
= gen_rtx_CONST_STRING (VOIDmode
, "");
371 xstring_e
= gen_rtx_CONST_STRING (VOIDmode
, "e");
375 /* Implement `REGNO_REG_CLASS'. */
376 /* Return register class for register R. */
379 avr_regno_reg_class (int r
)
381 static const enum reg_class reg_class_tab
[] =
385 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
386 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
387 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
388 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
390 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
391 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
393 ADDW_REGS
, ADDW_REGS
,
395 POINTER_X_REGS
, POINTER_X_REGS
,
397 POINTER_Y_REGS
, POINTER_Y_REGS
,
399 POINTER_Z_REGS
, POINTER_Z_REGS
,
405 return reg_class_tab
[r
];
411 /* Implement `TARGET_SCALAR_MODE_SUPPORTED_P'. */
414 avr_scalar_mode_supported_p (enum machine_mode mode
)
416 if (ALL_FIXED_POINT_MODE_P (mode
))
422 return default_scalar_mode_supported_p (mode
);
426 /* Return TRUE if DECL is a VAR_DECL located in flash and FALSE, otherwise. */
429 avr_decl_flash_p (tree decl
)
431 if (TREE_CODE (decl
) != VAR_DECL
432 || TREE_TYPE (decl
) == error_mark_node
)
437 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
441 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit flash
442 address space and FALSE, otherwise. */
445 avr_decl_memx_p (tree decl
)
447 if (TREE_CODE (decl
) != VAR_DECL
448 || TREE_TYPE (decl
) == error_mark_node
)
453 return (ADDR_SPACE_MEMX
== TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
457 /* Return TRUE if X is a MEM rtx located in flash and FALSE, otherwise. */
460 avr_mem_flash_p (rtx x
)
463 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
)));
467 /* Return TRUE if X is a MEM rtx located in the 24-bit flash
468 address space and FALSE, otherwise. */
471 avr_mem_memx_p (rtx x
)
474 && ADDR_SPACE_MEMX
== MEM_ADDR_SPACE (x
));
478 /* A helper for the subsequent function attribute used to dig for
479 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
482 avr_lookup_function_attribute1 (const_tree func
, const char *name
)
484 if (FUNCTION_DECL
== TREE_CODE (func
))
486 if (NULL_TREE
!= lookup_attribute (name
, DECL_ATTRIBUTES (func
)))
491 func
= TREE_TYPE (func
);
494 gcc_assert (TREE_CODE (func
) == FUNCTION_TYPE
495 || TREE_CODE (func
) == METHOD_TYPE
);
497 return NULL_TREE
!= lookup_attribute (name
, TYPE_ATTRIBUTES (func
));
500 /* Return nonzero if FUNC is a naked function. */
503 avr_naked_function_p (tree func
)
505 return avr_lookup_function_attribute1 (func
, "naked");
508 /* Return nonzero if FUNC is an interrupt function as specified
509 by the "interrupt" attribute. */
512 avr_interrupt_function_p (tree func
)
514 return avr_lookup_function_attribute1 (func
, "interrupt");
517 /* Return nonzero if FUNC is a signal function as specified
518 by the "signal" attribute. */
521 avr_signal_function_p (tree func
)
523 return avr_lookup_function_attribute1 (func
, "signal");
526 /* Return nonzero if FUNC is an OS_task function. */
529 avr_OS_task_function_p (tree func
)
531 return avr_lookup_function_attribute1 (func
, "OS_task");
534 /* Return nonzero if FUNC is an OS_main function. */
537 avr_OS_main_function_p (tree func
)
539 return avr_lookup_function_attribute1 (func
, "OS_main");
543 /* Implement `TARGET_SET_CURRENT_FUNCTION'. */
544 /* Sanity cheching for above function attributes. */
547 avr_set_current_function (tree decl
)
552 if (decl
== NULL_TREE
553 || current_function_decl
== NULL_TREE
554 || current_function_decl
== error_mark_node
556 || cfun
->machine
->attributes_checked_p
)
559 loc
= DECL_SOURCE_LOCATION (decl
);
561 cfun
->machine
->is_naked
= avr_naked_function_p (decl
);
562 cfun
->machine
->is_signal
= avr_signal_function_p (decl
);
563 cfun
->machine
->is_interrupt
= avr_interrupt_function_p (decl
);
564 cfun
->machine
->is_OS_task
= avr_OS_task_function_p (decl
);
565 cfun
->machine
->is_OS_main
= avr_OS_main_function_p (decl
);
567 isr
= cfun
->machine
->is_interrupt
? "interrupt" : "signal";
569 /* Too much attributes make no sense as they request conflicting features. */
571 if (cfun
->machine
->is_OS_task
+ cfun
->machine
->is_OS_main
572 + (cfun
->machine
->is_signal
|| cfun
->machine
->is_interrupt
) > 1)
573 error_at (loc
, "function attributes %qs, %qs and %qs are mutually"
574 " exclusive", "OS_task", "OS_main", isr
);
576 /* 'naked' will hide effects of 'OS_task' and 'OS_main'. */
578 if (cfun
->machine
->is_naked
579 && (cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
580 warning_at (loc
, OPT_Wattributes
, "function attributes %qs and %qs have"
581 " no effect on %qs function", "OS_task", "OS_main", "naked");
583 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
585 tree args
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
586 tree ret
= TREE_TYPE (TREE_TYPE (decl
));
589 name
= DECL_ASSEMBLER_NAME_SET_P (decl
)
590 /* Remove the leading '*' added in set_user_assembler_name. */
591 ? 1 + IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
))
592 : IDENTIFIER_POINTER (DECL_NAME (decl
));
594 /* Silently ignore 'signal' if 'interrupt' is present. AVR-LibC startet
595 using this when it switched from SIGNAL and INTERRUPT to ISR. */
597 if (cfun
->machine
->is_interrupt
)
598 cfun
->machine
->is_signal
= 0;
600 /* Interrupt handlers must be void __vector (void) functions. */
602 if (args
&& TREE_CODE (TREE_VALUE (args
)) != VOID_TYPE
)
603 error_at (loc
, "%qs function cannot have arguments", isr
);
605 if (TREE_CODE (ret
) != VOID_TYPE
)
606 error_at (loc
, "%qs function cannot return a value", isr
);
608 /* If the function has the 'signal' or 'interrupt' attribute, ensure
609 that the name of the function is "__vector_NN" so as to catch
610 when the user misspells the vector name. */
612 if (!STR_PREFIX_P (name
, "__vector"))
613 warning_at (loc
, 0, "%qs appears to be a misspelled %s handler",
617 /* Don't print the above diagnostics more than once. */
619 cfun
->machine
->attributes_checked_p
= 1;
623 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
626 avr_accumulate_outgoing_args (void)
629 return TARGET_ACCUMULATE_OUTGOING_ARGS
;
631 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
632 what offset is correct. In some cases it is relative to
633 virtual_outgoing_args_rtx and in others it is relative to
634 virtual_stack_vars_rtx. For example code see
635 gcc.c-torture/execute/built-in-setjmp.c
636 gcc.c-torture/execute/builtins/sprintf-chk.c */
638 return (TARGET_ACCUMULATE_OUTGOING_ARGS
639 && !(cfun
->calls_setjmp
640 || cfun
->has_nonlocal_label
));
644 /* Report contribution of accumulated outgoing arguments to stack size. */
647 avr_outgoing_args_size (void)
649 return ACCUMULATE_OUTGOING_ARGS
? crtl
->outgoing_args_size
: 0;
653 /* Implement `STARTING_FRAME_OFFSET'. */
654 /* This is the offset from the frame pointer register to the first stack slot
655 that contains a variable living in the frame. */
658 avr_starting_frame_offset (void)
660 return 1 + avr_outgoing_args_size ();
664 /* Return the number of hard registers to push/pop in the prologue/epilogue
665 of the current function, and optionally store these registers in SET. */
668 avr_regs_to_save (HARD_REG_SET
*set
)
671 int int_or_sig_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
674 CLEAR_HARD_REG_SET (*set
);
677 /* No need to save any registers if the function never returns or
678 has the "OS_task" or "OS_main" attribute. */
680 if (TREE_THIS_VOLATILE (current_function_decl
)
681 || cfun
->machine
->is_OS_task
682 || cfun
->machine
->is_OS_main
)
685 for (reg
= 0; reg
< 32; reg
++)
687 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
688 any global register variables. */
693 if ((int_or_sig_p
&& !crtl
->is_leaf
&& call_used_regs
[reg
])
694 || (df_regs_ever_live_p (reg
)
695 && (int_or_sig_p
|| !call_used_regs
[reg
])
696 /* Don't record frame pointer registers here. They are treated
697 indivitually in prologue. */
698 && !(frame_pointer_needed
699 && (reg
== REG_Y
|| reg
== (REG_Y
+1)))))
702 SET_HARD_REG_BIT (*set
, reg
);
710 /* Implement `TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS' */
713 avr_allocate_stack_slots_for_args (void)
715 return !cfun
->machine
->is_naked
;
719 /* Return true if register FROM can be eliminated via register TO. */
722 avr_can_eliminate (const int from
, const int to
)
724 return ((from
== ARG_POINTER_REGNUM
&& to
== FRAME_POINTER_REGNUM
)
725 || (frame_pointer_needed
&& to
== FRAME_POINTER_REGNUM
)
726 || ((from
== FRAME_POINTER_REGNUM
727 || from
== FRAME_POINTER_REGNUM
+ 1)
728 && !frame_pointer_needed
));
732 /* Implement `TARGET_WARN_FUNC_RETURN'. */
735 avr_warn_func_return (tree decl
)
737 /* Naked functions are implemented entirely in assembly, including the
738 return sequence, so suppress warnings about this. */
740 return !avr_naked_function_p (decl
);
743 /* Compute offset between arg_pointer and frame_pointer. */
746 avr_initial_elimination_offset (int from
, int to
)
748 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
752 int offset
= frame_pointer_needed
? 2 : 0;
753 int avr_pc_size
= AVR_HAVE_EIJMP_EICALL
? 3 : 2;
755 offset
+= avr_regs_to_save (NULL
);
756 return (get_frame_size () + avr_outgoing_args_size()
757 + avr_pc_size
+ 1 + offset
);
762 /* Helper for the function below. */
765 avr_adjust_type_node (tree
*node
, enum machine_mode mode
, int sat_p
)
767 *node
= make_node (FIXED_POINT_TYPE
);
768 TYPE_SATURATING (*node
) = sat_p
;
769 TYPE_UNSIGNED (*node
) = UNSIGNED_FIXED_POINT_MODE_P (mode
);
770 TYPE_IBIT (*node
) = GET_MODE_IBIT (mode
);
771 TYPE_FBIT (*node
) = GET_MODE_FBIT (mode
);
772 TYPE_PRECISION (*node
) = GET_MODE_BITSIZE (mode
);
773 TYPE_ALIGN (*node
) = 8;
774 SET_TYPE_MODE (*node
, mode
);
780 /* Implement `TARGET_BUILD_BUILTIN_VA_LIST'. */
783 avr_build_builtin_va_list (void)
785 /* avr-modes.def adjusts [U]TA to be 64-bit modes with 48 fractional bits.
786 This is more appropriate for the 8-bit machine AVR than 128-bit modes.
787 The ADJUST_IBIT/FBIT are handled in toplev:init_adjust_machine_modes()
788 which is auto-generated by genmodes, but the compiler assigns [U]DAmode
789 to the long long accum modes instead of the desired [U]TAmode.
791 Fix this now, right after node setup in tree.c:build_common_tree_nodes().
792 This must run before c-cppbuiltin.c:builtin_define_fixed_point_constants()
793 which built-in defines macros like __ULLACCUM_FBIT__ that are used by
794 libgcc to detect IBIT and FBIT. */
796 avr_adjust_type_node (&ta_type_node
, TAmode
, 0);
797 avr_adjust_type_node (&uta_type_node
, UTAmode
, 0);
798 avr_adjust_type_node (&sat_ta_type_node
, TAmode
, 1);
799 avr_adjust_type_node (&sat_uta_type_node
, UTAmode
, 1);
801 unsigned_long_long_accum_type_node
= uta_type_node
;
802 long_long_accum_type_node
= ta_type_node
;
803 sat_unsigned_long_long_accum_type_node
= sat_uta_type_node
;
804 sat_long_long_accum_type_node
= sat_ta_type_node
;
806 /* Dispatch to the default handler. */
808 return std_build_builtin_va_list ();
812 /* Implement `TARGET_BUILTIN_SETJMP_FRAME_VALUE'. */
813 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
814 frame pointer by +STARTING_FRAME_OFFSET.
815 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
816 avoids creating add/sub of offset in nonlocal goto and setjmp. */
819 avr_builtin_setjmp_frame_value (void)
821 rtx xval
= gen_reg_rtx (Pmode
);
822 emit_insn (gen_subhi3 (xval
, virtual_stack_vars_rtx
,
823 gen_int_mode (STARTING_FRAME_OFFSET
, Pmode
)));
828 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3-byte PC).
829 This is return address of function. */
832 avr_return_addr_rtx (int count
, rtx tem
)
836 /* Can only return this function's return address. Others not supported. */
842 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+2");
843 warning (0, "%<builtin_return_address%> contains only 2 bytes"
847 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+1");
849 r
= gen_rtx_PLUS (Pmode
, tem
, r
);
850 r
= gen_frame_mem (Pmode
, memory_address (Pmode
, r
));
851 r
= gen_rtx_ROTATE (HImode
, r
, GEN_INT (8));
855 /* Return 1 if the function epilogue is just a single "ret". */
858 avr_simple_epilogue (void)
860 return (! frame_pointer_needed
861 && get_frame_size () == 0
862 && avr_outgoing_args_size() == 0
863 && avr_regs_to_save (NULL
) == 0
864 && ! cfun
->machine
->is_interrupt
865 && ! cfun
->machine
->is_signal
866 && ! cfun
->machine
->is_naked
867 && ! TREE_THIS_VOLATILE (current_function_decl
));
870 /* This function checks sequence of live registers. */
873 sequent_regs_live (void)
879 for (reg
= 0; reg
< 18; ++reg
)
883 /* Don't recognize sequences that contain global register
892 if (!call_used_regs
[reg
])
894 if (df_regs_ever_live_p (reg
))
904 if (!frame_pointer_needed
)
906 if (df_regs_ever_live_p (REG_Y
))
914 if (df_regs_ever_live_p (REG_Y
+1))
927 return (cur_seq
== live_seq
) ? live_seq
: 0;
930 /* Obtain the length sequence of insns. */
933 get_sequence_length (rtx insns
)
938 for (insn
= insns
, length
= 0; insn
; insn
= NEXT_INSN (insn
))
939 length
+= get_attr_length (insn
);
945 /* Implement `INCOMING_RETURN_ADDR_RTX'. */
948 avr_incoming_return_addr_rtx (void)
950 /* The return address is at the top of the stack. Note that the push
951 was via post-decrement, which means the actual address is off by one. */
952 return gen_frame_mem (HImode
, plus_constant (Pmode
, stack_pointer_rtx
, 1));
955 /* Helper for expand_prologue. Emit a push of a byte register. */
958 emit_push_byte (unsigned regno
, bool frame_related_p
)
962 mem
= gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
);
963 mem
= gen_frame_mem (QImode
, mem
);
964 reg
= gen_rtx_REG (QImode
, regno
);
966 insn
= emit_insn (gen_rtx_SET (VOIDmode
, mem
, reg
));
968 RTX_FRAME_RELATED_P (insn
) = 1;
970 cfun
->machine
->stack_usage
++;
974 /* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
975 SFR is a MEM representing the memory location of the SFR.
976 If CLR_P then clear the SFR after the push using zero_reg. */
979 emit_push_sfr (rtx sfr
, bool frame_related_p
, bool clr_p
)
983 gcc_assert (MEM_P (sfr
));
985 /* IN __tmp_reg__, IO(SFR) */
986 insn
= emit_move_insn (tmp_reg_rtx
, sfr
);
988 RTX_FRAME_RELATED_P (insn
) = 1;
990 /* PUSH __tmp_reg__ */
991 emit_push_byte (TMP_REGNO
, frame_related_p
);
995 /* OUT IO(SFR), __zero_reg__ */
996 insn
= emit_move_insn (sfr
, const0_rtx
);
998 RTX_FRAME_RELATED_P (insn
) = 1;
1003 avr_prologue_setup_frame (HOST_WIDE_INT size
, HARD_REG_SET set
)
1006 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
1007 int live_seq
= sequent_regs_live ();
1009 HOST_WIDE_INT size_max
1010 = (HOST_WIDE_INT
) GET_MODE_MASK (AVR_HAVE_8BIT_SP
? QImode
: Pmode
);
1012 bool minimize
= (TARGET_CALL_PROLOGUES
1016 && !cfun
->machine
->is_OS_task
1017 && !cfun
->machine
->is_OS_main
);
1020 && (frame_pointer_needed
1021 || avr_outgoing_args_size() > 8
1022 || (AVR_2_BYTE_PC
&& live_seq
> 6)
1026 int first_reg
, reg
, offset
;
1028 emit_move_insn (gen_rtx_REG (HImode
, REG_X
),
1029 gen_int_mode (size
, HImode
));
1031 pattern
= gen_call_prologue_saves (gen_int_mode (live_seq
, HImode
),
1032 gen_int_mode (live_seq
+size
, HImode
));
1033 insn
= emit_insn (pattern
);
1034 RTX_FRAME_RELATED_P (insn
) = 1;
1036 /* Describe the effect of the unspec_volatile call to prologue_saves.
1037 Note that this formulation assumes that add_reg_note pushes the
1038 notes to the front. Thus we build them in the reverse order of
1039 how we want dwarf2out to process them. */
1041 /* The function does always set frame_pointer_rtx, but whether that
1042 is going to be permanent in the function is frame_pointer_needed. */
1044 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1045 gen_rtx_SET (VOIDmode
, (frame_pointer_needed
1047 : stack_pointer_rtx
),
1048 plus_constant (Pmode
, stack_pointer_rtx
,
1049 -(size
+ live_seq
))));
1051 /* Note that live_seq always contains r28+r29, but the other
1052 registers to be saved are all below 18. */
1054 first_reg
= 18 - (live_seq
- 2);
1056 for (reg
= 29, offset
= -live_seq
+ 1;
1058 reg
= (reg
== 28 ? 17 : reg
- 1), ++offset
)
1062 m
= gen_rtx_MEM (QImode
, plus_constant (Pmode
, stack_pointer_rtx
,
1064 r
= gen_rtx_REG (QImode
, reg
);
1065 add_reg_note (insn
, REG_CFA_OFFSET
, gen_rtx_SET (VOIDmode
, m
, r
));
1068 cfun
->machine
->stack_usage
+= size
+ live_seq
;
1070 else /* !minimize */
1074 for (reg
= 0; reg
< 32; ++reg
)
1075 if (TEST_HARD_REG_BIT (set
, reg
))
1076 emit_push_byte (reg
, true);
1078 if (frame_pointer_needed
1079 && (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
)))
1081 /* Push frame pointer. Always be consistent about the
1082 ordering of pushes -- epilogue_restores expects the
1083 register pair to be pushed low byte first. */
1085 emit_push_byte (REG_Y
, true);
1086 emit_push_byte (REG_Y
+ 1, true);
1089 if (frame_pointer_needed
1092 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1093 RTX_FRAME_RELATED_P (insn
) = 1;
1098 /* Creating a frame can be done by direct manipulation of the
1099 stack or via the frame pointer. These two methods are:
1106 the optimum method depends on function type, stack and
1107 frame size. To avoid a complex logic, both methods are
1108 tested and shortest is selected.
1110 There is also the case where SIZE != 0 and no frame pointer is
1111 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1112 In that case, insn (*) is not needed in that case.
1113 We use the X register as scratch. This is save because in X
1115 In an interrupt routine, the case of SIZE != 0 together with
1116 !frame_pointer_needed can only occur if the function is not a
1117 leaf function and thus X has already been saved. */
1120 HOST_WIDE_INT size_cfa
= size
, neg_size
;
1121 rtx fp_plus_insns
, fp
, my_fp
;
1123 gcc_assert (frame_pointer_needed
1127 fp
= my_fp
= (frame_pointer_needed
1129 : gen_rtx_REG (Pmode
, REG_X
));
1131 if (AVR_HAVE_8BIT_SP
)
1133 /* The high byte (r29) does not change:
1134 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
1136 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
1139 /* Cut down size and avoid size = 0 so that we don't run
1140 into ICE like PR52488 in the remainder. */
1142 if (size
> size_max
)
1144 /* Don't error so that insane code from newlib still compiles
1145 and does not break building newlib. As PR51345 is implemented
1146 now, there are multilib variants with -msp8.
1148 If user wants sanity checks he can use -Wstack-usage=
1151 For CFA we emit the original, non-saturated size so that
1152 the generic machinery is aware of the real stack usage and
1153 will print the above diagnostic as expected. */
1158 size
= trunc_int_for_mode (size
, GET_MODE (my_fp
));
1159 neg_size
= trunc_int_for_mode (-size
, GET_MODE (my_fp
));
1161 /************ Method 1: Adjust frame pointer ************/
1165 /* Normally, the dwarf2out frame-related-expr interpreter does
1166 not expect to have the CFA change once the frame pointer is
1167 set up. Thus, we avoid marking the move insn below and
1168 instead indicate that the entire operation is complete after
1169 the frame pointer subtraction is done. */
1171 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
1172 if (frame_pointer_needed
)
1174 RTX_FRAME_RELATED_P (insn
) = 1;
1175 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1176 gen_rtx_SET (VOIDmode
, fp
, stack_pointer_rtx
));
1179 insn
= emit_move_insn (my_fp
, plus_constant (GET_MODE (my_fp
),
1182 if (frame_pointer_needed
)
1184 RTX_FRAME_RELATED_P (insn
) = 1;
1185 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1186 gen_rtx_SET (VOIDmode
, fp
,
1187 plus_constant (Pmode
, fp
,
1191 /* Copy to stack pointer. Note that since we've already
1192 changed the CFA to the frame pointer this operation
1193 need not be annotated if frame pointer is needed.
1194 Always move through unspec, see PR50063.
1195 For meaning of irq_state see movhi_sp_r insn. */
1197 if (cfun
->machine
->is_interrupt
)
1200 if (TARGET_NO_INTERRUPTS
1201 || cfun
->machine
->is_signal
1202 || cfun
->machine
->is_OS_main
)
1205 if (AVR_HAVE_8BIT_SP
)
1208 insn
= emit_insn (gen_movhi_sp_r (stack_pointer_rtx
,
1209 fp
, GEN_INT (irq_state
)));
1210 if (!frame_pointer_needed
)
1212 RTX_FRAME_RELATED_P (insn
) = 1;
1213 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1214 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1215 plus_constant (Pmode
,
1220 fp_plus_insns
= get_insns ();
1223 /************ Method 2: Adjust Stack pointer ************/
1225 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1226 can only handle specific offsets. */
1228 if (avr_sp_immediate_operand (gen_int_mode (-size
, HImode
), HImode
))
1234 insn
= emit_move_insn (stack_pointer_rtx
,
1235 plus_constant (Pmode
, stack_pointer_rtx
,
1237 RTX_FRAME_RELATED_P (insn
) = 1;
1238 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1239 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1240 plus_constant (Pmode
,
1243 if (frame_pointer_needed
)
1245 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
1246 RTX_FRAME_RELATED_P (insn
) = 1;
1249 sp_plus_insns
= get_insns ();
1252 /************ Use shortest method ************/
1254 emit_insn (get_sequence_length (sp_plus_insns
)
1255 < get_sequence_length (fp_plus_insns
)
1261 emit_insn (fp_plus_insns
);
1264 cfun
->machine
->stack_usage
+= size_cfa
;
1265 } /* !minimize && size != 0 */
1270 /* Output function prologue. */
1273 avr_expand_prologue (void)
1278 size
= get_frame_size() + avr_outgoing_args_size();
1280 cfun
->machine
->stack_usage
= 0;
1282 /* Prologue: naked. */
1283 if (cfun
->machine
->is_naked
)
1288 avr_regs_to_save (&set
);
1290 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
1292 /* Enable interrupts. */
1293 if (cfun
->machine
->is_interrupt
)
1294 emit_insn (gen_enable_interrupt ());
1296 /* Push zero reg. */
1297 emit_push_byte (ZERO_REGNO
, true);
1300 emit_push_byte (TMP_REGNO
, true);
1303 /* ??? There's no dwarf2 column reserved for SREG. */
1304 emit_push_sfr (sreg_rtx
, false, false /* clr */);
1306 /* Clear zero reg. */
1307 emit_move_insn (zero_reg_rtx
, const0_rtx
);
1309 /* Prevent any attempt to delete the setting of ZERO_REG! */
1310 emit_use (zero_reg_rtx
);
1312 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1313 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1316 emit_push_sfr (rampd_rtx
, false /* frame-related */, true /* clr */);
1319 && TEST_HARD_REG_BIT (set
, REG_X
)
1320 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
1322 emit_push_sfr (rampx_rtx
, false /* frame-related */, true /* clr */);
1326 && (frame_pointer_needed
1327 || (TEST_HARD_REG_BIT (set
, REG_Y
)
1328 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
1330 emit_push_sfr (rampy_rtx
, false /* frame-related */, true /* clr */);
1334 && TEST_HARD_REG_BIT (set
, REG_Z
)
1335 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1337 emit_push_sfr (rampz_rtx
, false /* frame-related */, AVR_HAVE_RAMPD
);
1339 } /* is_interrupt is_signal */
1341 avr_prologue_setup_frame (size
, set
);
1343 if (flag_stack_usage_info
)
1344 current_function_static_stack_size
= cfun
->machine
->stack_usage
;
1348 /* Implement `TARGET_ASM_FUNCTION_END_PROLOGUE'. */
1349 /* Output summary at end of function prologue. */
1352 avr_asm_function_end_prologue (FILE *file
)
1354 if (cfun
->machine
->is_naked
)
1356 fputs ("/* prologue: naked */\n", file
);
1360 if (cfun
->machine
->is_interrupt
)
1362 fputs ("/* prologue: Interrupt */\n", file
);
1364 else if (cfun
->machine
->is_signal
)
1366 fputs ("/* prologue: Signal */\n", file
);
1369 fputs ("/* prologue: function */\n", file
);
1372 if (ACCUMULATE_OUTGOING_ARGS
)
1373 fprintf (file
, "/* outgoing args size = %d */\n",
1374 avr_outgoing_args_size());
1376 fprintf (file
, "/* frame size = " HOST_WIDE_INT_PRINT_DEC
" */\n",
1378 fprintf (file
, "/* stack size = %d */\n",
1379 cfun
->machine
->stack_usage
);
1380 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1381 usage for offset so that SP + .L__stack_offset = return address. */
1382 fprintf (file
, ".L__stack_usage = %d\n", cfun
->machine
->stack_usage
);
1386 /* Implement `EPILOGUE_USES'. */
1389 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED
)
1391 if (reload_completed
1393 && (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
))
1398 /* Helper for avr_expand_epilogue. Emit a pop of a byte register. */
1401 emit_pop_byte (unsigned regno
)
1405 mem
= gen_rtx_PRE_INC (HImode
, stack_pointer_rtx
);
1406 mem
= gen_frame_mem (QImode
, mem
);
1407 reg
= gen_rtx_REG (QImode
, regno
);
1409 emit_insn (gen_rtx_SET (VOIDmode
, reg
, mem
));
1412 /* Output RTL epilogue. */
1415 avr_expand_epilogue (bool sibcall_p
)
1422 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
1424 size
= get_frame_size() + avr_outgoing_args_size();
1426 /* epilogue: naked */
1427 if (cfun
->machine
->is_naked
)
1429 gcc_assert (!sibcall_p
);
1431 emit_jump_insn (gen_return ());
1435 avr_regs_to_save (&set
);
1436 live_seq
= sequent_regs_live ();
1438 minimize
= (TARGET_CALL_PROLOGUES
1441 && !cfun
->machine
->is_OS_task
1442 && !cfun
->machine
->is_OS_main
);
1446 || frame_pointer_needed
1449 /* Get rid of frame. */
1451 if (!frame_pointer_needed
)
1453 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1458 emit_move_insn (frame_pointer_rtx
,
1459 plus_constant (Pmode
, frame_pointer_rtx
, size
));
1462 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq
, HImode
)));
1468 /* Try two methods to adjust stack and select shortest. */
1473 HOST_WIDE_INT size_max
;
1475 gcc_assert (frame_pointer_needed
1479 fp
= my_fp
= (frame_pointer_needed
1481 : gen_rtx_REG (Pmode
, REG_X
));
1483 if (AVR_HAVE_8BIT_SP
)
1485 /* The high byte (r29) does not change:
1486 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1488 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
1491 /* For rationale see comment in prologue generation. */
1493 size_max
= (HOST_WIDE_INT
) GET_MODE_MASK (GET_MODE (my_fp
));
1494 if (size
> size_max
)
1496 size
= trunc_int_for_mode (size
, GET_MODE (my_fp
));
1498 /********** Method 1: Adjust fp register **********/
1502 if (!frame_pointer_needed
)
1503 emit_move_insn (fp
, stack_pointer_rtx
);
1505 emit_move_insn (my_fp
, plus_constant (GET_MODE (my_fp
), my_fp
, size
));
1507 /* Copy to stack pointer. */
1509 if (TARGET_NO_INTERRUPTS
)
1512 if (AVR_HAVE_8BIT_SP
)
1515 emit_insn (gen_movhi_sp_r (stack_pointer_rtx
, fp
,
1516 GEN_INT (irq_state
)));
1518 fp_plus_insns
= get_insns ();
1521 /********** Method 2: Adjust Stack pointer **********/
1523 if (avr_sp_immediate_operand (gen_int_mode (size
, HImode
), HImode
))
1529 emit_move_insn (stack_pointer_rtx
,
1530 plus_constant (Pmode
, stack_pointer_rtx
, size
));
1532 sp_plus_insns
= get_insns ();
1535 /************ Use shortest method ************/
1537 emit_insn (get_sequence_length (sp_plus_insns
)
1538 < get_sequence_length (fp_plus_insns
)
1543 emit_insn (fp_plus_insns
);
1546 if (frame_pointer_needed
1547 && !(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
1549 /* Restore previous frame_pointer. See avr_expand_prologue for
1550 rationale for not using pophi. */
1552 emit_pop_byte (REG_Y
+ 1);
1553 emit_pop_byte (REG_Y
);
1556 /* Restore used registers. */
1558 for (reg
= 31; reg
>= 0; --reg
)
1559 if (TEST_HARD_REG_BIT (set
, reg
))
1560 emit_pop_byte (reg
);
1564 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1565 The conditions to restore them must be tha same as in prologue. */
1568 && TEST_HARD_REG_BIT (set
, REG_Z
)
1569 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1571 emit_pop_byte (TMP_REGNO
);
1572 emit_move_insn (rampz_rtx
, tmp_reg_rtx
);
1576 && (frame_pointer_needed
1577 || (TEST_HARD_REG_BIT (set
, REG_Y
)
1578 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
1580 emit_pop_byte (TMP_REGNO
);
1581 emit_move_insn (rampy_rtx
, tmp_reg_rtx
);
1585 && TEST_HARD_REG_BIT (set
, REG_X
)
1586 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
1588 emit_pop_byte (TMP_REGNO
);
1589 emit_move_insn (rampx_rtx
, tmp_reg_rtx
);
1594 emit_pop_byte (TMP_REGNO
);
1595 emit_move_insn (rampd_rtx
, tmp_reg_rtx
);
1598 /* Restore SREG using tmp_reg as scratch. */
1600 emit_pop_byte (TMP_REGNO
);
1601 emit_move_insn (sreg_rtx
, tmp_reg_rtx
);
1603 /* Restore tmp REG. */
1604 emit_pop_byte (TMP_REGNO
);
1606 /* Restore zero REG. */
1607 emit_pop_byte (ZERO_REGNO
);
1611 emit_jump_insn (gen_return ());
1615 /* Implement `TARGET_ASM_FUNCTION_BEGIN_EPILOGUE'. */
1618 avr_asm_function_begin_epilogue (FILE *file
)
1620 fprintf (file
, "/* epilogue start */\n");
1624 /* Implement `TARGET_CANNOT_MODITY_JUMPS_P'. */
1627 avr_cannot_modify_jumps_p (void)
1630 /* Naked Functions must not have any instructions after
1631 their epilogue, see PR42240 */
1633 if (reload_completed
1635 && cfun
->machine
->is_naked
)
1644 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
1647 avr_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED
, addr_space_t as
)
1649 /* FIXME: Non-generic addresses are not mode-dependent in themselves.
1650 This hook just serves to hack around PR rtl-optimization/52543 by
1651 claiming that non-generic addresses were mode-dependent so that
1652 lower-subreg.c will skip these addresses. lower-subreg.c sets up fake
1653 RTXes to probe SET and MEM costs and assumes that MEM is always in the
1654 generic address space which is not true. */
1656 return !ADDR_SPACE_GENERIC_P (as
);
1660 /* Helper function for `avr_legitimate_address_p'. */
1663 avr_reg_ok_for_addr_p (rtx reg
, addr_space_t as
,
1664 RTX_CODE outer_code
, bool strict
)
1667 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg
), QImode
,
1668 as
, outer_code
, UNKNOWN
)
1670 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)));
1674 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1675 machine for a memory operand of mode MODE. */
1678 avr_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
1680 bool ok
= CONSTANT_ADDRESS_P (x
);
1682 switch (GET_CODE (x
))
1685 ok
= avr_reg_ok_for_addr_p (x
, ADDR_SPACE_GENERIC
,
1689 && GET_MODE_SIZE (mode
) > 4
1690 && REG_X
== REGNO (x
))
1698 ok
= avr_reg_ok_for_addr_p (XEXP (x
, 0), ADDR_SPACE_GENERIC
,
1699 GET_CODE (x
), strict
);
1704 rtx reg
= XEXP (x
, 0);
1705 rtx op1
= XEXP (x
, 1);
1708 && CONST_INT_P (op1
)
1709 && INTVAL (op1
) >= 0)
1711 bool fit
= IN_RANGE (INTVAL (op1
), 0, MAX_LD_OFFSET (mode
));
1716 || avr_reg_ok_for_addr_p (reg
, ADDR_SPACE_GENERIC
,
1719 if (reg
== frame_pointer_rtx
1720 || reg
== arg_pointer_rtx
)
1725 else if (frame_pointer_needed
1726 && reg
== frame_pointer_rtx
)
1738 if (avr_log
.legitimate_address_p
)
1740 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1741 "reload_completed=%d reload_in_progress=%d %s:",
1742 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
1743 reg_renumber
? "(reg_renumber)" : "");
1745 if (GET_CODE (x
) == PLUS
1746 && REG_P (XEXP (x
, 0))
1747 && CONST_INT_P (XEXP (x
, 1))
1748 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
1751 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
1752 true_regnum (XEXP (x
, 0)));
1755 avr_edump ("\n%r\n", x
);
1762 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1763 now only a helper for avr_addr_space_legitimize_address. */
1764 /* Attempts to replace X with a valid
1765 memory address for an operand of mode MODE */
1768 avr_legitimize_address (rtx x
, rtx oldx
, enum machine_mode mode
)
1770 bool big_offset_p
= false;
1774 if (GET_CODE (oldx
) == PLUS
1775 && REG_P (XEXP (oldx
, 0)))
1777 if (REG_P (XEXP (oldx
, 1)))
1778 x
= force_reg (GET_MODE (oldx
), oldx
);
1779 else if (CONST_INT_P (XEXP (oldx
, 1)))
1781 int offs
= INTVAL (XEXP (oldx
, 1));
1782 if (frame_pointer_rtx
!= XEXP (oldx
, 0)
1783 && offs
> MAX_LD_OFFSET (mode
))
1785 big_offset_p
= true;
1786 x
= force_reg (GET_MODE (oldx
), oldx
);
1791 if (avr_log
.legitimize_address
)
1793 avr_edump ("\n%?: mode=%m\n %r\n", mode
, oldx
);
1796 avr_edump (" %s --> %r\n", big_offset_p
? "(big offset)" : "", x
);
1803 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1804 /* This will allow register R26/27 to be used where it is no worse than normal
1805 base pointers R28/29 or R30/31. For example, if base offset is greater
1806 than 63 bytes or for R++ or --R addressing. */
1809 avr_legitimize_reload_address (rtx
*px
, enum machine_mode mode
,
1810 int opnum
, int type
, int addr_type
,
1811 int ind_levels ATTRIBUTE_UNUSED
,
1812 rtx (*mk_memloc
)(rtx
,int))
1816 if (avr_log
.legitimize_reload_address
)
1817 avr_edump ("\n%?:%m %r\n", mode
, x
);
1819 if (1 && (GET_CODE (x
) == POST_INC
1820 || GET_CODE (x
) == PRE_DEC
))
1822 push_reload (XEXP (x
, 0), XEXP (x
, 0), &XEXP (x
, 0), &XEXP (x
, 0),
1823 POINTER_REGS
, GET_MODE (x
), GET_MODE (x
), 0, 0,
1824 opnum
, RELOAD_OTHER
);
1826 if (avr_log
.legitimize_reload_address
)
1827 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1828 POINTER_REGS
, XEXP (x
, 0), XEXP (x
, 0));
1833 if (GET_CODE (x
) == PLUS
1834 && REG_P (XEXP (x
, 0))
1835 && 0 == reg_equiv_constant (REGNO (XEXP (x
, 0)))
1836 && CONST_INT_P (XEXP (x
, 1))
1837 && INTVAL (XEXP (x
, 1)) >= 1)
1839 bool fit
= INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
);
1843 if (reg_equiv_address (REGNO (XEXP (x
, 0))) != 0)
1845 int regno
= REGNO (XEXP (x
, 0));
1846 rtx mem
= mk_memloc (x
, regno
);
1848 push_reload (XEXP (mem
, 0), NULL_RTX
, &XEXP (mem
, 0), NULL
,
1849 POINTER_REGS
, Pmode
, VOIDmode
, 0, 0,
1850 1, (enum reload_type
) addr_type
);
1852 if (avr_log
.legitimize_reload_address
)
1853 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1854 POINTER_REGS
, XEXP (mem
, 0), NULL_RTX
);
1856 push_reload (mem
, NULL_RTX
, &XEXP (x
, 0), NULL
,
1857 BASE_POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
1858 opnum
, (enum reload_type
) type
);
1860 if (avr_log
.legitimize_reload_address
)
1861 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1862 BASE_POINTER_REGS
, mem
, NULL_RTX
);
1867 else if (! (frame_pointer_needed
1868 && XEXP (x
, 0) == frame_pointer_rtx
))
1870 push_reload (x
, NULL_RTX
, px
, NULL
,
1871 POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
1872 opnum
, (enum reload_type
) type
);
1874 if (avr_log
.legitimize_reload_address
)
1875 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1876 POINTER_REGS
, x
, NULL_RTX
);
1886 /* Implement `TARGET_SECONDARY_RELOAD' */
1889 avr_secondary_reload (bool in_p
, rtx x
,
1890 reg_class_t reload_class ATTRIBUTE_UNUSED
,
1891 enum machine_mode mode
, secondary_reload_info
*sri
)
1895 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
))
1896 && ADDR_SPACE_MEMX
!= MEM_ADDR_SPACE (x
))
1898 /* For the non-generic 16-bit spaces we need a d-class scratch. */
1905 case QImode
: sri
->icode
= CODE_FOR_reload_inqi
; break;
1906 case QQmode
: sri
->icode
= CODE_FOR_reload_inqq
; break;
1907 case UQQmode
: sri
->icode
= CODE_FOR_reload_inuqq
; break;
1909 case HImode
: sri
->icode
= CODE_FOR_reload_inhi
; break;
1910 case HQmode
: sri
->icode
= CODE_FOR_reload_inhq
; break;
1911 case HAmode
: sri
->icode
= CODE_FOR_reload_inha
; break;
1912 case UHQmode
: sri
->icode
= CODE_FOR_reload_inuhq
; break;
1913 case UHAmode
: sri
->icode
= CODE_FOR_reload_inuha
; break;
1915 case PSImode
: sri
->icode
= CODE_FOR_reload_inpsi
; break;
1917 case SImode
: sri
->icode
= CODE_FOR_reload_insi
; break;
1918 case SFmode
: sri
->icode
= CODE_FOR_reload_insf
; break;
1919 case SQmode
: sri
->icode
= CODE_FOR_reload_insq
; break;
1920 case SAmode
: sri
->icode
= CODE_FOR_reload_insa
; break;
1921 case USQmode
: sri
->icode
= CODE_FOR_reload_inusq
; break;
1922 case USAmode
: sri
->icode
= CODE_FOR_reload_inusa
; break;
1930 /* Helper function to print assembler resp. track instruction
1931 sequence lengths. Always return "".
1934 Output assembler code from template TPL with operands supplied
1935 by OPERANDS. This is just forwarding to output_asm_insn.
1938 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1939 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1940 Don't output anything.
1944 avr_asm_len (const char* tpl
, rtx
* operands
, int* plen
, int n_words
)
1948 output_asm_insn (tpl
, operands
);
1962 /* Return a pointer register name as a string. */
1965 ptrreg_to_str (int regno
)
1969 case REG_X
: return "X";
1970 case REG_Y
: return "Y";
1971 case REG_Z
: return "Z";
1973 output_operand_lossage ("address operand requires constraint for"
1974 " X, Y, or Z register");
1979 /* Return the condition name as a string.
1980 Used in conditional jump constructing */
1983 cond_string (enum rtx_code code
)
1992 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1997 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2013 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
2014 /* Output ADDR to FILE as address. */
2017 avr_print_operand_address (FILE *file
, rtx addr
)
2019 switch (GET_CODE (addr
))
2022 fprintf (file
, ptrreg_to_str (REGNO (addr
)));
2026 fprintf (file
, "-%s", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
2030 fprintf (file
, "%s+", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
2034 if (CONSTANT_ADDRESS_P (addr
)
2035 && text_segment_operand (addr
, VOIDmode
))
2038 if (GET_CODE (x
) == CONST
)
2040 if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
,1)) == CONST_INT
)
2042 /* Assembler gs() will implant word address. Make offset
2043 a byte offset inside gs() for assembler. This is
2044 needed because the more logical (constant+gs(sym)) is not
2045 accepted by gas. For 128K and smaller devices this is ok.
2046 For large devices it will create a trampoline to offset
2047 from symbol which may not be what the user really wanted. */
2049 fprintf (file
, "gs(");
2050 output_addr_const (file
, XEXP (x
,0));
2051 fprintf (file
, "+" HOST_WIDE_INT_PRINT_DEC
")",
2052 2 * INTVAL (XEXP (x
, 1)));
2054 if (warning (0, "pointer offset from symbol maybe incorrect"))
2056 output_addr_const (stderr
, addr
);
2057 fprintf(stderr
,"\n");
2062 fprintf (file
, "gs(");
2063 output_addr_const (file
, addr
);
2064 fprintf (file
, ")");
2068 output_addr_const (file
, addr
);
2073 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
2076 avr_print_operand_punct_valid_p (unsigned char code
)
2078 return code
== '~' || code
== '!';
2082 /* Implement `TARGET_PRINT_OPERAND'. */
2083 /* Output X as assembler operand to file FILE.
2084 For a description of supported %-codes, see top of avr.md. */
2087 avr_print_operand (FILE *file
, rtx x
, int code
)
2091 if (code
>= 'A' && code
<= 'D')
2096 if (!AVR_HAVE_JMP_CALL
)
2099 else if (code
== '!')
2101 if (AVR_HAVE_EIJMP_EICALL
)
2104 else if (code
== 't'
2107 static int t_regno
= -1;
2108 static int t_nbits
= -1;
2110 if (REG_P (x
) && t_regno
< 0 && code
== 'T')
2112 t_regno
= REGNO (x
);
2113 t_nbits
= GET_MODE_BITSIZE (GET_MODE (x
));
2115 else if (CONST_INT_P (x
) && t_regno
>= 0
2116 && IN_RANGE (INTVAL (x
), 0, t_nbits
- 1))
2118 int bpos
= INTVAL (x
);
2120 fprintf (file
, "%s", reg_names
[t_regno
+ bpos
/ 8]);
2122 fprintf (file
, ",%d", bpos
% 8);
2127 fatal_insn ("operands to %T/%t must be reg + const_int:", x
);
2131 if (x
== zero_reg_rtx
)
2132 fprintf (file
, "__zero_reg__");
2133 else if (code
== 'r' && REGNO (x
) < 32)
2134 fprintf (file
, "%d", (int) REGNO (x
));
2136 fprintf (file
, reg_names
[REGNO (x
) + abcd
]);
2138 else if (CONST_INT_P (x
))
2140 HOST_WIDE_INT ival
= INTVAL (x
);
2143 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ival
+ abcd
);
2144 else if (low_io_address_operand (x
, VOIDmode
)
2145 || high_io_address_operand (x
, VOIDmode
))
2147 if (AVR_HAVE_RAMPZ
&& ival
== avr_addr
.rampz
)
2148 fprintf (file
, "__RAMPZ__");
2149 else if (AVR_HAVE_RAMPY
&& ival
== avr_addr
.rampy
)
2150 fprintf (file
, "__RAMPY__");
2151 else if (AVR_HAVE_RAMPX
&& ival
== avr_addr
.rampx
)
2152 fprintf (file
, "__RAMPX__");
2153 else if (AVR_HAVE_RAMPD
&& ival
== avr_addr
.rampd
)
2154 fprintf (file
, "__RAMPD__");
2155 else if (AVR_XMEGA
&& ival
== avr_addr
.ccp
)
2156 fprintf (file
, "__CCP__");
2157 else if (ival
== avr_addr
.sreg
) fprintf (file
, "__SREG__");
2158 else if (ival
== avr_addr
.sp_l
) fprintf (file
, "__SP_L__");
2159 else if (ival
== avr_addr
.sp_h
) fprintf (file
, "__SP_H__");
2162 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
2163 ival
- avr_current_arch
->sfr_offset
);
2167 fatal_insn ("bad address, not an I/O address:", x
);
2171 rtx addr
= XEXP (x
, 0);
2175 if (!CONSTANT_P (addr
))
2176 fatal_insn ("bad address, not a constant:", addr
);
2177 /* Assembler template with m-code is data - not progmem section */
2178 if (text_segment_operand (addr
, VOIDmode
))
2179 if (warning (0, "accessing data memory with"
2180 " program memory address"))
2182 output_addr_const (stderr
, addr
);
2183 fprintf(stderr
,"\n");
2185 output_addr_const (file
, addr
);
2187 else if (code
== 'i')
2189 avr_print_operand (file
, addr
, 'i');
2191 else if (code
== 'o')
2193 if (GET_CODE (addr
) != PLUS
)
2194 fatal_insn ("bad address, not (reg+disp):", addr
);
2196 avr_print_operand (file
, XEXP (addr
, 1), 0);
2198 else if (code
== 'p' || code
== 'r')
2200 if (GET_CODE (addr
) != POST_INC
&& GET_CODE (addr
) != PRE_DEC
)
2201 fatal_insn ("bad address, not post_inc or pre_dec:", addr
);
2204 avr_print_operand_address (file
, XEXP (addr
, 0)); /* X, Y, Z */
2206 avr_print_operand (file
, XEXP (addr
, 0), 0); /* r26, r28, r30 */
2208 else if (GET_CODE (addr
) == PLUS
)
2210 avr_print_operand_address (file
, XEXP (addr
,0));
2211 if (REGNO (XEXP (addr
, 0)) == REG_X
)
2212 fatal_insn ("internal compiler error. Bad address:"
2215 avr_print_operand (file
, XEXP (addr
,1), code
);
2218 avr_print_operand_address (file
, addr
);
2220 else if (code
== 'i')
2222 fatal_insn ("bad address, not an I/O address:", x
);
2224 else if (code
== 'x')
2226 /* Constant progmem address - like used in jmp or call */
2227 if (0 == text_segment_operand (x
, VOIDmode
))
2228 if (warning (0, "accessing program memory"
2229 " with data memory address"))
2231 output_addr_const (stderr
, x
);
2232 fprintf(stderr
,"\n");
2234 /* Use normal symbol for direct address no linker trampoline needed */
2235 output_addr_const (file
, x
);
2237 else if (CONST_FIXED_P (x
))
2239 HOST_WIDE_INT ival
= INTVAL (avr_to_int_mode (x
));
2241 output_operand_lossage ("Unsupported code '%c' for fixed-point:",
2243 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ival
);
2245 else if (GET_CODE (x
) == CONST_DOUBLE
)
2249 if (GET_MODE (x
) != SFmode
)
2250 fatal_insn ("internal compiler error. Unknown mode:", x
);
2251 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
2252 REAL_VALUE_TO_TARGET_SINGLE (rv
, val
);
2253 fprintf (file
, "0x%lx", val
);
2255 else if (GET_CODE (x
) == CONST_STRING
)
2256 fputs (XSTR (x
, 0), file
);
2257 else if (code
== 'j')
2258 fputs (cond_string (GET_CODE (x
)), file
);
2259 else if (code
== 'k')
2260 fputs (cond_string (reverse_condition (GET_CODE (x
))), file
);
2262 avr_print_operand_address (file
, x
);
2266 /* Worker function for `NOTICE_UPDATE_CC'. */
2267 /* Update the condition code in the INSN. */
2270 avr_notice_update_cc (rtx body ATTRIBUTE_UNUSED
, rtx insn
)
2273 enum attr_cc cc
= get_attr_cc (insn
);
2283 rtx
*op
= recog_data
.operand
;
2286 /* Extract insn's operands. */
2287 extract_constrain_insn_cached (insn
);
2295 avr_out_plus (insn
, op
, &len_dummy
, &icc
);
2296 cc
= (enum attr_cc
) icc
;
2301 cc
= (op
[1] == CONST0_RTX (GET_MODE (op
[0]))
2302 && reg_overlap_mentioned_p (op
[0], zero_reg_rtx
))
2303 /* Loading zero-reg with 0 uses CLR and thus clobbers cc0. */
2305 /* Any other "r,rL" combination does not alter cc0. */
2309 } /* inner switch */
2313 } /* outer swicth */
2318 /* Special values like CC_OUT_PLUS from above have been
2319 mapped to "standard" CC_* values so we never come here. */
2325 /* Insn does not affect CC at all. */
2333 set
= single_set (insn
);
2337 cc_status
.flags
|= CC_NO_OVERFLOW
;
2338 cc_status
.value1
= SET_DEST (set
);
2343 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2344 The V flag may or may not be known but that's ok because
2345 alter_cond will change tests to use EQ/NE. */
2346 set
= single_set (insn
);
2350 cc_status
.value1
= SET_DEST (set
);
2351 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
2356 set
= single_set (insn
);
2359 cc_status
.value1
= SET_SRC (set
);
2363 /* Insn doesn't leave CC in a usable state. */
2369 /* Choose mode for jump insn:
2370 1 - relative jump in range -63 <= x <= 62 ;
2371 2 - relative jump in range -2046 <= x <= 2045 ;
2372 3 - absolute jump (only for ATmega[16]03). */
2375 avr_jump_mode (rtx x
, rtx insn
)
2377 int dest_addr
= INSN_ADDRESSES (INSN_UID (GET_CODE (x
) == LABEL_REF
2378 ? XEXP (x
, 0) : x
));
2379 int cur_addr
= INSN_ADDRESSES (INSN_UID (insn
));
2380 int jump_distance
= cur_addr
- dest_addr
;
2382 if (-63 <= jump_distance
&& jump_distance
<= 62)
2384 else if (-2046 <= jump_distance
&& jump_distance
<= 2045)
2386 else if (AVR_HAVE_JMP_CALL
)
2392 /* Return an AVR condition jump commands.
2393 X is a comparison RTX.
2394 LEN is a number returned by avr_jump_mode function.
2395 If REVERSE nonzero then condition code in X must be reversed. */
2398 ret_cond_branch (rtx x
, int len
, int reverse
)
2400 RTX_CODE cond
= reverse
? reverse_condition (GET_CODE (x
)) : GET_CODE (x
);
2405 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2406 return (len
== 1 ? ("breq .+2" CR_TAB
2408 len
== 2 ? ("breq .+4" CR_TAB
2416 return (len
== 1 ? ("breq .+2" CR_TAB
2418 len
== 2 ? ("breq .+4" CR_TAB
2425 return (len
== 1 ? ("breq .+2" CR_TAB
2427 len
== 2 ? ("breq .+4" CR_TAB
2434 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2435 return (len
== 1 ? ("breq %0" CR_TAB
2437 len
== 2 ? ("breq .+2" CR_TAB
2444 return (len
== 1 ? ("breq %0" CR_TAB
2446 len
== 2 ? ("breq .+2" CR_TAB
2453 return (len
== 1 ? ("breq %0" CR_TAB
2455 len
== 2 ? ("breq .+2" CR_TAB
2469 return ("br%j1 .+2" CR_TAB
2472 return ("br%j1 .+4" CR_TAB
2483 return ("br%k1 .+2" CR_TAB
2486 return ("br%k1 .+4" CR_TAB
2495 /* Worker function for `FINAL_PRESCAN_INSN'. */
2496 /* Output insn cost for next insn. */
2499 avr_final_prescan_insn (rtx insn
, rtx
*operand ATTRIBUTE_UNUSED
,
2500 int num_operands ATTRIBUTE_UNUSED
)
2502 if (avr_log
.rtx_costs
)
2504 rtx set
= single_set (insn
);
2507 fprintf (asm_out_file
, "/* DEBUG: cost = %d. */\n",
2508 set_src_cost (SET_SRC (set
), optimize_insn_for_speed_p ()));
2510 fprintf (asm_out_file
, "/* DEBUG: pattern-cost = %d. */\n",
2511 rtx_cost (PATTERN (insn
), INSN
, 0,
2512 optimize_insn_for_speed_p()));
2516 /* Return 0 if undefined, 1 if always true or always false. */
2519 avr_simplify_comparison_p (enum machine_mode mode
, RTX_CODE op
, rtx x
)
2521 unsigned int max
= (mode
== QImode
? 0xff :
2522 mode
== HImode
? 0xffff :
2523 mode
== PSImode
? 0xffffff :
2524 mode
== SImode
? 0xffffffff : 0);
2525 if (max
&& op
&& CONST_INT_P (x
))
2527 if (unsigned_condition (op
) != op
)
2530 if (max
!= (INTVAL (x
) & max
)
2531 && INTVAL (x
) != 0xff)
2538 /* Worker function for `FUNCTION_ARG_REGNO_P'. */
2539 /* Returns nonzero if REGNO is the number of a hard
2540 register in which function arguments are sometimes passed. */
2543 avr_function_arg_regno_p(int r
)
2545 return (r
>= 8 && r
<= 25);
2549 /* Worker function for `INIT_CUMULATIVE_ARGS'. */
2550 /* Initializing the variable cum for the state at the beginning
2551 of the argument list. */
2554 avr_init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
, rtx libname
,
2555 tree fndecl ATTRIBUTE_UNUSED
)
2558 cum
->regno
= FIRST_CUM_REG
;
2559 if (!libname
&& stdarg_p (fntype
))
2562 /* Assume the calle may be tail called */
2564 cfun
->machine
->sibcall_fails
= 0;
2567 /* Returns the number of registers to allocate for a function argument. */
2570 avr_num_arg_regs (enum machine_mode mode
, const_tree type
)
2574 if (mode
== BLKmode
)
2575 size
= int_size_in_bytes (type
);
2577 size
= GET_MODE_SIZE (mode
);
2579 /* Align all function arguments to start in even-numbered registers.
2580 Odd-sized arguments leave holes above them. */
2582 return (size
+ 1) & ~1;
2586 /* Implement `TARGET_FUNCTION_ARG'. */
2587 /* Controls whether a function argument is passed
2588 in a register, and which register. */
2591 avr_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
2592 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2594 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2595 int bytes
= avr_num_arg_regs (mode
, type
);
2597 if (cum
->nregs
&& bytes
<= cum
->nregs
)
2598 return gen_rtx_REG (mode
, cum
->regno
- bytes
);
2604 /* Implement `TARGET_FUNCTION_ARG_ADVANCE'. */
2605 /* Update the summarizer variable CUM to advance past an argument
2606 in the argument list. */
2609 avr_function_arg_advance (cumulative_args_t cum_v
, enum machine_mode mode
,
2610 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2612 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2613 int bytes
= avr_num_arg_regs (mode
, type
);
2615 cum
->nregs
-= bytes
;
2616 cum
->regno
-= bytes
;
2618 /* A parameter is being passed in a call-saved register. As the original
2619 contents of these regs has to be restored before leaving the function,
2620 a function must not pass arguments in call-saved regs in order to get
2625 && !call_used_regs
[cum
->regno
])
2627 /* FIXME: We ship info on failing tail-call in struct machine_function.
2628 This uses internals of calls.c:expand_call() and the way args_so_far
2629 is used. targetm.function_ok_for_sibcall() needs to be extended to
2630 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2631 dependent so that such an extension is not wanted. */
2633 cfun
->machine
->sibcall_fails
= 1;
2636 /* Test if all registers needed by the ABI are actually available. If the
2637 user has fixed a GPR needed to pass an argument, an (implicit) function
2638 call will clobber that fixed register. See PR45099 for an example. */
2645 for (regno
= cum
->regno
; regno
< cum
->regno
+ bytes
; regno
++)
2646 if (fixed_regs
[regno
])
2647 warning (0, "fixed register %s used to pass parameter to function",
2651 if (cum
->nregs
<= 0)
2654 cum
->regno
= FIRST_CUM_REG
;
2658 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2659 /* Decide whether we can make a sibling call to a function. DECL is the
2660 declaration of the function being targeted by the call and EXP is the
2661 CALL_EXPR representing the call. */
2664 avr_function_ok_for_sibcall (tree decl_callee
, tree exp_callee
)
2668 /* Tail-calling must fail if callee-saved regs are used to pass
2669 function args. We must not tail-call when `epilogue_restores'
2670 is used. Unfortunately, we cannot tell at this point if that
2671 actually will happen or not, and we cannot step back from
2672 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2674 if (cfun
->machine
->sibcall_fails
2675 || TARGET_CALL_PROLOGUES
)
2680 fntype_callee
= TREE_TYPE (CALL_EXPR_FN (exp_callee
));
2684 decl_callee
= TREE_TYPE (decl_callee
);
2688 decl_callee
= fntype_callee
;
2690 while (FUNCTION_TYPE
!= TREE_CODE (decl_callee
)
2691 && METHOD_TYPE
!= TREE_CODE (decl_callee
))
2693 decl_callee
= TREE_TYPE (decl_callee
);
2697 /* Ensure that caller and callee have compatible epilogues */
2699 if (cfun
->machine
->is_interrupt
2700 || cfun
->machine
->is_signal
2701 || cfun
->machine
->is_naked
2702 || avr_naked_function_p (decl_callee
)
2703 /* FIXME: For OS_task and OS_main, this might be over-conservative. */
2704 || (avr_OS_task_function_p (decl_callee
)
2705 != cfun
->machine
->is_OS_task
)
2706 || (avr_OS_main_function_p (decl_callee
)
2707 != cfun
->machine
->is_OS_main
))
2715 /***********************************************************************
2716 Functions for outputting various mov's for a various modes
2717 ************************************************************************/
2719 /* Return true if a value of mode MODE is read from flash by
2720 __load_* function from libgcc. */
2723 avr_load_libgcc_p (rtx op
)
2725 enum machine_mode mode
= GET_MODE (op
);
2726 int n_bytes
= GET_MODE_SIZE (mode
);
2730 && avr_mem_flash_p (op
));
2733 /* Return true if a value of mode MODE is read by __xload_* function. */
2736 avr_xload_libgcc_p (enum machine_mode mode
)
2738 int n_bytes
= GET_MODE_SIZE (mode
);
2741 || avr_current_device
->n_flash
> 1);
2745 /* Fixme: This is a hack because secondary reloads don't works as expected.
2747 Find an unused d-register to be used as scratch in INSN.
2748 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2749 is a register, skip all possible return values that overlap EXCLUDE.
2750 The policy for the returned register is similar to that of
2751 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2754 Return a QImode d-register or NULL_RTX if nothing found. */
2757 avr_find_unused_d_reg (rtx insn
, rtx exclude
)
2760 bool isr_p
= (avr_interrupt_function_p (current_function_decl
)
2761 || avr_signal_function_p (current_function_decl
));
2763 for (regno
= 16; regno
< 32; regno
++)
2765 rtx reg
= all_regs_rtx
[regno
];
2768 && reg_overlap_mentioned_p (exclude
, reg
))
2769 || fixed_regs
[regno
])
2774 /* Try non-live register */
2776 if (!df_regs_ever_live_p (regno
)
2777 && (TREE_THIS_VOLATILE (current_function_decl
)
2778 || cfun
->machine
->is_OS_task
2779 || cfun
->machine
->is_OS_main
2780 || (!isr_p
&& call_used_regs
[regno
])))
2785 /* Any live register can be used if it is unused after.
2786 Prologue/epilogue will care for it as needed. */
2788 if (df_regs_ever_live_p (regno
)
2789 && reg_unused_after (insn
, reg
))
2799 /* Helper function for the next function in the case where only restricted
2800 version of LPM instruction is available. */
2803 avr_out_lpm_no_lpmx (rtx insn
, rtx
*xop
, int *plen
)
2807 int n_bytes
= GET_MODE_SIZE (GET_MODE (dest
));
2810 regno_dest
= REGNO (dest
);
2812 /* The implicit target register of LPM. */
2813 xop
[3] = lpm_reg_rtx
;
2815 switch (GET_CODE (addr
))
2822 gcc_assert (REG_Z
== REGNO (addr
));
2830 avr_asm_len ("%4lpm", xop
, plen
, 1);
2832 if (regno_dest
!= LPM_REGNO
)
2833 avr_asm_len ("mov %0,%3", xop
, plen
, 1);
2838 if (REGNO (dest
) == REG_Z
)
2839 return avr_asm_len ("%4lpm" CR_TAB
2844 "pop %A0", xop
, plen
, 6);
2846 avr_asm_len ("%4lpm" CR_TAB
2850 "mov %B0,%3", xop
, plen
, 5);
2852 if (!reg_unused_after (insn
, addr
))
2853 avr_asm_len ("sbiw %2,1", xop
, plen
, 1);
2862 gcc_assert (REG_Z
== REGNO (XEXP (addr
, 0))
2865 if (regno_dest
== LPM_REGNO
)
2866 avr_asm_len ("%4lpm" CR_TAB
2867 "adiw %2,1", xop
, plen
, 2);
2869 avr_asm_len ("%4lpm" CR_TAB
2871 "adiw %2,1", xop
, plen
, 3);
2874 avr_asm_len ("%4lpm" CR_TAB
2876 "adiw %2,1", xop
, plen
, 3);
2879 avr_asm_len ("%4lpm" CR_TAB
2881 "adiw %2,1", xop
, plen
, 3);
2884 avr_asm_len ("%4lpm" CR_TAB
2886 "adiw %2,1", xop
, plen
, 3);
2888 break; /* POST_INC */
2890 } /* switch CODE (addr) */
2896 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2897 OP[1] in AS1 to register OP[0].
2898 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2902 avr_out_lpm (rtx insn
, rtx
*op
, int *plen
)
2906 rtx src
= SET_SRC (single_set (insn
));
2908 int n_bytes
= GET_MODE_SIZE (GET_MODE (dest
));
2911 addr_space_t as
= MEM_ADDR_SPACE (src
);
2918 warning (0, "writing to address space %qs not supported",
2919 avr_addrspace
[MEM_ADDR_SPACE (dest
)].name
);
2924 addr
= XEXP (src
, 0);
2925 code
= GET_CODE (addr
);
2927 gcc_assert (REG_P (dest
));
2928 gcc_assert (REG
== code
|| POST_INC
== code
);
2932 xop
[2] = lpm_addr_reg_rtx
;
2933 xop
[4] = xstring_empty
;
2934 xop
[5] = tmp_reg_rtx
;
2935 xop
[6] = XEXP (rampz_rtx
, 0);
2937 segment
= avr_addrspace
[as
].segment
;
2939 /* Set RAMPZ as needed. */
2943 xop
[4] = GEN_INT (segment
);
2944 xop
[3] = avr_find_unused_d_reg (insn
, lpm_addr_reg_rtx
);
2946 if (xop
[3] != NULL_RTX
)
2948 avr_asm_len ("ldi %3,%4" CR_TAB
2949 "out %i6,%3", xop
, plen
, 2);
2951 else if (segment
== 1)
2953 avr_asm_len ("clr %5" CR_TAB
2955 "out %i6,%5", xop
, plen
, 3);
2959 avr_asm_len ("mov %5,%2" CR_TAB
2962 "mov %2,%5", xop
, plen
, 4);
2967 if (!AVR_HAVE_ELPMX
)
2968 return avr_out_lpm_no_lpmx (insn
, xop
, plen
);
2970 else if (!AVR_HAVE_LPMX
)
2972 return avr_out_lpm_no_lpmx (insn
, xop
, plen
);
2975 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
2977 switch (GET_CODE (addr
))
2984 gcc_assert (REG_Z
== REGNO (addr
));
2992 return avr_asm_len ("%4lpm %0,%a2", xop
, plen
, 1);
2995 if (REGNO (dest
) == REG_Z
)
2996 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2997 "%4lpm %B0,%a2" CR_TAB
2998 "mov %A0,%5", xop
, plen
, 3);
3001 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3002 "%4lpm %B0,%a2", xop
, plen
, 2);
3004 if (!reg_unused_after (insn
, addr
))
3005 avr_asm_len ("sbiw %2,1", xop
, plen
, 1);
3012 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3013 "%4lpm %B0,%a2+" CR_TAB
3014 "%4lpm %C0,%a2", xop
, plen
, 3);
3016 if (!reg_unused_after (insn
, addr
))
3017 avr_asm_len ("sbiw %2,2", xop
, plen
, 1);
3023 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3024 "%4lpm %B0,%a2+", xop
, plen
, 2);
3026 if (REGNO (dest
) == REG_Z
- 2)
3027 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3028 "%4lpm %C0,%a2" CR_TAB
3029 "mov %D0,%5", xop
, plen
, 3);
3032 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
3033 "%4lpm %D0,%a2", xop
, plen
, 2);
3035 if (!reg_unused_after (insn
, addr
))
3036 avr_asm_len ("sbiw %2,3", xop
, plen
, 1);
3046 gcc_assert (REG_Z
== REGNO (XEXP (addr
, 0))
3049 avr_asm_len ("%4lpm %A0,%a2+", xop
, plen
, 1);
3050 if (n_bytes
>= 2) avr_asm_len ("%4lpm %B0,%a2+", xop
, plen
, 1);
3051 if (n_bytes
>= 3) avr_asm_len ("%4lpm %C0,%a2+", xop
, plen
, 1);
3052 if (n_bytes
>= 4) avr_asm_len ("%4lpm %D0,%a2+", xop
, plen
, 1);
3054 break; /* POST_INC */
3056 } /* switch CODE (addr) */
3058 if (xop
[4] == xstring_e
&& AVR_HAVE_RAMPD
)
3060 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM. */
3062 xop
[0] = zero_reg_rtx
;
3063 avr_asm_len ("out %i6,%0", xop
, plen
, 1);
3070 /* Worker function for xload_8 insn. */
3073 avr_out_xload (rtx insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
3079 xop
[2] = lpm_addr_reg_rtx
;
3080 xop
[3] = AVR_HAVE_LPMX
? op
[0] : lpm_reg_rtx
;
3082 avr_asm_len (AVR_HAVE_LPMX
? "lpm %3,%a2" : "lpm", xop
, plen
, -1);
3084 avr_asm_len ("sbrc %1,7" CR_TAB
3085 "ld %3,%a2", xop
, plen
, 2);
3087 if (REGNO (xop
[0]) != REGNO (xop
[3]))
3088 avr_asm_len ("mov %0,%3", xop
, plen
, 1);
3095 output_movqi (rtx insn
, rtx operands
[], int *plen
)
3097 rtx dest
= operands
[0];
3098 rtx src
= operands
[1];
3100 if (avr_mem_flash_p (src
)
3101 || avr_mem_flash_p (dest
))
3103 return avr_out_lpm (insn
, operands
, plen
);
3106 gcc_assert (1 == GET_MODE_SIZE (GET_MODE (dest
)));
3110 if (REG_P (src
)) /* mov r,r */
3112 if (test_hard_reg_class (STACK_REG
, dest
))
3113 return avr_asm_len ("out %0,%1", operands
, plen
, -1);
3114 else if (test_hard_reg_class (STACK_REG
, src
))
3115 return avr_asm_len ("in %0,%1", operands
, plen
, -1);
3117 return avr_asm_len ("mov %0,%1", operands
, plen
, -1);
3119 else if (CONSTANT_P (src
))
3121 output_reload_in_const (operands
, NULL_RTX
, plen
, false);
3124 else if (MEM_P (src
))
3125 return out_movqi_r_mr (insn
, operands
, plen
); /* mov r,m */
3127 else if (MEM_P (dest
))
3132 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
3134 return out_movqi_mr_r (insn
, xop
, plen
);
3142 output_movhi (rtx insn
, rtx xop
[], int *plen
)
3147 gcc_assert (GET_MODE_SIZE (GET_MODE (dest
)) == 2);
3149 if (avr_mem_flash_p (src
)
3150 || avr_mem_flash_p (dest
))
3152 return avr_out_lpm (insn
, xop
, plen
);
3155 gcc_assert (2 == GET_MODE_SIZE (GET_MODE (dest
)));
3159 if (REG_P (src
)) /* mov r,r */
3161 if (test_hard_reg_class (STACK_REG
, dest
))
3163 if (AVR_HAVE_8BIT_SP
)
3164 return avr_asm_len ("out __SP_L__,%A1", xop
, plen
, -1);
3167 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
3168 "out __SP_H__,%B1", xop
, plen
, -2);
3170 /* Use simple load of SP if no interrupts are used. */
3172 return TARGET_NO_INTERRUPTS
3173 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
3174 "out __SP_L__,%A1", xop
, plen
, -2)
3175 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
3177 "out __SP_H__,%B1" CR_TAB
3178 "out __SREG__,__tmp_reg__" CR_TAB
3179 "out __SP_L__,%A1", xop
, plen
, -5);
3181 else if (test_hard_reg_class (STACK_REG
, src
))
3183 return !AVR_HAVE_SPH
3184 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
3185 "clr %B0", xop
, plen
, -2)
3187 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
3188 "in %B0,__SP_H__", xop
, plen
, -2);
3191 return AVR_HAVE_MOVW
3192 ? avr_asm_len ("movw %0,%1", xop
, plen
, -1)
3194 : avr_asm_len ("mov %A0,%A1" CR_TAB
3195 "mov %B0,%B1", xop
, plen
, -2);
3197 else if (CONSTANT_P (src
))
3199 return output_reload_inhi (xop
, NULL
, plen
);
3201 else if (MEM_P (src
))
3203 return out_movhi_r_mr (insn
, xop
, plen
); /* mov r,m */
3206 else if (MEM_P (dest
))
3211 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
3213 return out_movhi_mr_r (insn
, xop
, plen
);
3216 fatal_insn ("invalid insn:", insn
);
3222 out_movqi_r_mr (rtx insn
, rtx op
[], int *plen
)
3226 rtx x
= XEXP (src
, 0);
3228 if (CONSTANT_ADDRESS_P (x
))
3230 return optimize
> 0 && io_address_operand (x
, QImode
)
3231 ? avr_asm_len ("in %0,%i1", op
, plen
, -1)
3232 : avr_asm_len ("lds %0,%m1", op
, plen
, -2);
3234 else if (GET_CODE (x
) == PLUS
3235 && REG_P (XEXP (x
, 0))
3236 && CONST_INT_P (XEXP (x
, 1)))
3238 /* memory access by reg+disp */
3240 int disp
= INTVAL (XEXP (x
, 1));
3242 if (disp
- GET_MODE_SIZE (GET_MODE (src
)) >= 63)
3244 if (REGNO (XEXP (x
, 0)) != REG_Y
)
3245 fatal_insn ("incorrect insn:",insn
);
3247 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3248 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
3249 "ldd %0,Y+63" CR_TAB
3250 "sbiw r28,%o1-63", op
, plen
, -3);
3252 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3253 "sbci r29,hi8(-%o1)" CR_TAB
3255 "subi r28,lo8(%o1)" CR_TAB
3256 "sbci r29,hi8(%o1)", op
, plen
, -5);
3258 else if (REGNO (XEXP (x
, 0)) == REG_X
)
3260 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
3261 it but I have this situation with extremal optimizing options. */
3263 avr_asm_len ("adiw r26,%o1" CR_TAB
3264 "ld %0,X", op
, plen
, -2);
3266 if (!reg_overlap_mentioned_p (dest
, XEXP (x
,0))
3267 && !reg_unused_after (insn
, XEXP (x
,0)))
3269 avr_asm_len ("sbiw r26,%o1", op
, plen
, 1);
3275 return avr_asm_len ("ldd %0,%1", op
, plen
, -1);
3278 return avr_asm_len ("ld %0,%1", op
, plen
, -1);
3282 out_movhi_r_mr (rtx insn
, rtx op
[], int *plen
)
3286 rtx base
= XEXP (src
, 0);
3287 int reg_dest
= true_regnum (dest
);
3288 int reg_base
= true_regnum (base
);
3289 /* "volatile" forces reading low byte first, even if less efficient,
3290 for correct operation with 16-bit I/O registers. */
3291 int mem_volatile_p
= MEM_VOLATILE_P (src
);
3295 if (reg_dest
== reg_base
) /* R = (R) */
3296 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3298 "mov %A0,__tmp_reg__", op
, plen
, -3);
3300 if (reg_base
!= REG_X
)
3301 return avr_asm_len ("ld %A0,%1" CR_TAB
3302 "ldd %B0,%1+1", op
, plen
, -2);
3304 avr_asm_len ("ld %A0,X+" CR_TAB
3305 "ld %B0,X", op
, plen
, -2);
3307 if (!reg_unused_after (insn
, base
))
3308 avr_asm_len ("sbiw r26,1", op
, plen
, 1);
3312 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3314 int disp
= INTVAL (XEXP (base
, 1));
3315 int reg_base
= true_regnum (XEXP (base
, 0));
3317 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3319 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3320 fatal_insn ("incorrect insn:",insn
);
3322 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
))
3323 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3324 "ldd %A0,Y+62" CR_TAB
3325 "ldd %B0,Y+63" CR_TAB
3326 "sbiw r28,%o1-62", op
, plen
, -4)
3328 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3329 "sbci r29,hi8(-%o1)" CR_TAB
3331 "ldd %B0,Y+1" CR_TAB
3332 "subi r28,lo8(%o1)" CR_TAB
3333 "sbci r29,hi8(%o1)", op
, plen
, -6);
3336 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3337 it but I have this situation with extremal
3338 optimization options. */
3340 if (reg_base
== REG_X
)
3341 return reg_base
== reg_dest
3342 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3343 "ld __tmp_reg__,X+" CR_TAB
3345 "mov %A0,__tmp_reg__", op
, plen
, -4)
3347 : avr_asm_len ("adiw r26,%o1" CR_TAB
3350 "sbiw r26,%o1+1", op
, plen
, -4);
3352 return reg_base
== reg_dest
3353 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3354 "ldd %B0,%B1" CR_TAB
3355 "mov %A0,__tmp_reg__", op
, plen
, -3)
3357 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3358 "ldd %B0,%B1", op
, plen
, -2);
3360 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3362 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
3363 fatal_insn ("incorrect insn:", insn
);
3365 if (!mem_volatile_p
)
3366 return avr_asm_len ("ld %B0,%1" CR_TAB
3367 "ld %A0,%1", op
, plen
, -2);
3369 return REGNO (XEXP (base
, 0)) == REG_X
3370 ? avr_asm_len ("sbiw r26,2" CR_TAB
3373 "sbiw r26,1", op
, plen
, -4)
3375 : avr_asm_len ("sbiw %r1,2" CR_TAB
3377 "ldd %B0,%p1+1", op
, plen
, -3);
3379 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3381 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
3382 fatal_insn ("incorrect insn:", insn
);
3384 return avr_asm_len ("ld %A0,%1" CR_TAB
3385 "ld %B0,%1", op
, plen
, -2);
3387 else if (CONSTANT_ADDRESS_P (base
))
3389 return optimize
> 0 && io_address_operand (base
, HImode
)
3390 ? avr_asm_len ("in %A0,%i1" CR_TAB
3391 "in %B0,%i1+1", op
, plen
, -2)
3393 : avr_asm_len ("lds %A0,%m1" CR_TAB
3394 "lds %B0,%m1+1", op
, plen
, -4);
3397 fatal_insn ("unknown move insn:",insn
);
3402 out_movsi_r_mr (rtx insn
, rtx op
[], int *l
)
3406 rtx base
= XEXP (src
, 0);
3407 int reg_dest
= true_regnum (dest
);
3408 int reg_base
= true_regnum (base
);
3416 if (reg_base
== REG_X
) /* (R26) */
3418 if (reg_dest
== REG_X
)
3419 /* "ld r26,-X" is undefined */
3420 return *l
=7, ("adiw r26,3" CR_TAB
3423 "ld __tmp_reg__,-X" CR_TAB
3426 "mov r27,__tmp_reg__");
3427 else if (reg_dest
== REG_X
- 2)
3428 return *l
=5, ("ld %A0,X+" CR_TAB
3430 "ld __tmp_reg__,X+" CR_TAB
3432 "mov %C0,__tmp_reg__");
3433 else if (reg_unused_after (insn
, base
))
3434 return *l
=4, ("ld %A0,X+" CR_TAB
3439 return *l
=5, ("ld %A0,X+" CR_TAB
3447 if (reg_dest
== reg_base
)
3448 return *l
=5, ("ldd %D0,%1+3" CR_TAB
3449 "ldd %C0,%1+2" CR_TAB
3450 "ldd __tmp_reg__,%1+1" CR_TAB
3452 "mov %B0,__tmp_reg__");
3453 else if (reg_base
== reg_dest
+ 2)
3454 return *l
=5, ("ld %A0,%1" CR_TAB
3455 "ldd %B0,%1+1" CR_TAB
3456 "ldd __tmp_reg__,%1+2" CR_TAB
3457 "ldd %D0,%1+3" CR_TAB
3458 "mov %C0,__tmp_reg__");
3460 return *l
=4, ("ld %A0,%1" CR_TAB
3461 "ldd %B0,%1+1" CR_TAB
3462 "ldd %C0,%1+2" CR_TAB
3466 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3468 int disp
= INTVAL (XEXP (base
, 1));
3470 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3472 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3473 fatal_insn ("incorrect insn:",insn
);
3475 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3476 return *l
= 6, ("adiw r28,%o1-60" CR_TAB
3477 "ldd %A0,Y+60" CR_TAB
3478 "ldd %B0,Y+61" CR_TAB
3479 "ldd %C0,Y+62" CR_TAB
3480 "ldd %D0,Y+63" CR_TAB
3483 return *l
= 8, ("subi r28,lo8(-%o1)" CR_TAB
3484 "sbci r29,hi8(-%o1)" CR_TAB
3486 "ldd %B0,Y+1" CR_TAB
3487 "ldd %C0,Y+2" CR_TAB
3488 "ldd %D0,Y+3" CR_TAB
3489 "subi r28,lo8(%o1)" CR_TAB
3490 "sbci r29,hi8(%o1)");
3493 reg_base
= true_regnum (XEXP (base
, 0));
3494 if (reg_base
== REG_X
)
3497 if (reg_dest
== REG_X
)
3500 /* "ld r26,-X" is undefined */
3501 return ("adiw r26,%o1+3" CR_TAB
3504 "ld __tmp_reg__,-X" CR_TAB
3507 "mov r27,__tmp_reg__");
3510 if (reg_dest
== REG_X
- 2)
3511 return ("adiw r26,%o1" CR_TAB
3514 "ld __tmp_reg__,X+" CR_TAB
3516 "mov r26,__tmp_reg__");
3518 return ("adiw r26,%o1" CR_TAB
3525 if (reg_dest
== reg_base
)
3526 return *l
=5, ("ldd %D0,%D1" CR_TAB
3527 "ldd %C0,%C1" CR_TAB
3528 "ldd __tmp_reg__,%B1" CR_TAB
3529 "ldd %A0,%A1" CR_TAB
3530 "mov %B0,__tmp_reg__");
3531 else if (reg_dest
== reg_base
- 2)
3532 return *l
=5, ("ldd %A0,%A1" CR_TAB
3533 "ldd %B0,%B1" CR_TAB
3534 "ldd __tmp_reg__,%C1" CR_TAB
3535 "ldd %D0,%D1" CR_TAB
3536 "mov %C0,__tmp_reg__");
3537 return *l
=4, ("ldd %A0,%A1" CR_TAB
3538 "ldd %B0,%B1" CR_TAB
3539 "ldd %C0,%C1" CR_TAB
3542 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3543 return *l
=4, ("ld %D0,%1" CR_TAB
3547 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3548 return *l
=4, ("ld %A0,%1" CR_TAB
3552 else if (CONSTANT_ADDRESS_P (base
))
3553 return *l
=8, ("lds %A0,%m1" CR_TAB
3554 "lds %B0,%m1+1" CR_TAB
3555 "lds %C0,%m1+2" CR_TAB
3558 fatal_insn ("unknown move insn:",insn
);
3563 out_movsi_mr_r (rtx insn
, rtx op
[], int *l
)
3567 rtx base
= XEXP (dest
, 0);
3568 int reg_base
= true_regnum (base
);
3569 int reg_src
= true_regnum (src
);
3575 if (CONSTANT_ADDRESS_P (base
))
3576 return *l
=8,("sts %m0,%A1" CR_TAB
3577 "sts %m0+1,%B1" CR_TAB
3578 "sts %m0+2,%C1" CR_TAB
3580 if (reg_base
> 0) /* (r) */
3582 if (reg_base
== REG_X
) /* (R26) */
3584 if (reg_src
== REG_X
)
3586 /* "st X+,r26" is undefined */
3587 if (reg_unused_after (insn
, base
))
3588 return *l
=6, ("mov __tmp_reg__,r27" CR_TAB
3591 "st X+,__tmp_reg__" CR_TAB
3595 return *l
=7, ("mov __tmp_reg__,r27" CR_TAB
3598 "st X+,__tmp_reg__" CR_TAB
3603 else if (reg_base
== reg_src
+ 2)
3605 if (reg_unused_after (insn
, base
))
3606 return *l
=7, ("mov __zero_reg__,%C1" CR_TAB
3607 "mov __tmp_reg__,%D1" CR_TAB
3610 "st %0+,__zero_reg__" CR_TAB
3611 "st %0,__tmp_reg__" CR_TAB
3612 "clr __zero_reg__");
3614 return *l
=8, ("mov __zero_reg__,%C1" CR_TAB
3615 "mov __tmp_reg__,%D1" CR_TAB
3618 "st %0+,__zero_reg__" CR_TAB
3619 "st %0,__tmp_reg__" CR_TAB
3620 "clr __zero_reg__" CR_TAB
3623 return *l
=5, ("st %0+,%A1" CR_TAB
3630 return *l
=4, ("st %0,%A1" CR_TAB
3631 "std %0+1,%B1" CR_TAB
3632 "std %0+2,%C1" CR_TAB
3635 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3637 int disp
= INTVAL (XEXP (base
, 1));
3638 reg_base
= REGNO (XEXP (base
, 0));
3639 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3641 if (reg_base
!= REG_Y
)
3642 fatal_insn ("incorrect insn:",insn
);
3644 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3645 return *l
= 6, ("adiw r28,%o0-60" CR_TAB
3646 "std Y+60,%A1" CR_TAB
3647 "std Y+61,%B1" CR_TAB
3648 "std Y+62,%C1" CR_TAB
3649 "std Y+63,%D1" CR_TAB
3652 return *l
= 8, ("subi r28,lo8(-%o0)" CR_TAB
3653 "sbci r29,hi8(-%o0)" CR_TAB
3655 "std Y+1,%B1" CR_TAB
3656 "std Y+2,%C1" CR_TAB
3657 "std Y+3,%D1" CR_TAB
3658 "subi r28,lo8(%o0)" CR_TAB
3659 "sbci r29,hi8(%o0)");
3661 if (reg_base
== REG_X
)
3664 if (reg_src
== REG_X
)
3667 return ("mov __tmp_reg__,r26" CR_TAB
3668 "mov __zero_reg__,r27" CR_TAB
3669 "adiw r26,%o0" CR_TAB
3670 "st X+,__tmp_reg__" CR_TAB
3671 "st X+,__zero_reg__" CR_TAB
3674 "clr __zero_reg__" CR_TAB
3677 else if (reg_src
== REG_X
- 2)
3680 return ("mov __tmp_reg__,r26" CR_TAB
3681 "mov __zero_reg__,r27" CR_TAB
3682 "adiw r26,%o0" CR_TAB
3685 "st X+,__tmp_reg__" CR_TAB
3686 "st X,__zero_reg__" CR_TAB
3687 "clr __zero_reg__" CR_TAB
3691 return ("adiw r26,%o0" CR_TAB
3698 return *l
=4, ("std %A0,%A1" CR_TAB
3699 "std %B0,%B1" CR_TAB
3700 "std %C0,%C1" CR_TAB
3703 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3704 return *l
=4, ("st %0,%D1" CR_TAB
3708 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3709 return *l
=4, ("st %0,%A1" CR_TAB
3713 fatal_insn ("unknown move insn:",insn
);
3718 output_movsisf (rtx insn
, rtx operands
[], int *l
)
3721 rtx dest
= operands
[0];
3722 rtx src
= operands
[1];
3725 if (avr_mem_flash_p (src
)
3726 || avr_mem_flash_p (dest
))
3728 return avr_out_lpm (insn
, operands
, real_l
);
3734 gcc_assert (4 == GET_MODE_SIZE (GET_MODE (dest
)));
3737 if (REG_P (src
)) /* mov r,r */
3739 if (true_regnum (dest
) > true_regnum (src
))
3744 return ("movw %C0,%C1" CR_TAB
3748 return ("mov %D0,%D1" CR_TAB
3749 "mov %C0,%C1" CR_TAB
3750 "mov %B0,%B1" CR_TAB
3758 return ("movw %A0,%A1" CR_TAB
3762 return ("mov %A0,%A1" CR_TAB
3763 "mov %B0,%B1" CR_TAB
3764 "mov %C0,%C1" CR_TAB
3768 else if (CONSTANT_P (src
))
3770 return output_reload_insisf (operands
, NULL_RTX
, real_l
);
3772 else if (MEM_P (src
))
3773 return out_movsi_r_mr (insn
, operands
, real_l
); /* mov r,m */
3775 else if (MEM_P (dest
))
3779 if (src
== CONST0_RTX (GET_MODE (dest
)))
3780 operands
[1] = zero_reg_rtx
;
3782 templ
= out_movsi_mr_r (insn
, operands
, real_l
);
3785 output_asm_insn (templ
, operands
);
3790 fatal_insn ("invalid insn:", insn
);
3795 /* Handle loads of 24-bit types from memory to register. */
3798 avr_out_load_psi (rtx insn
, rtx
*op
, int *plen
)
3802 rtx base
= XEXP (src
, 0);
3803 int reg_dest
= true_regnum (dest
);
3804 int reg_base
= true_regnum (base
);
3808 if (reg_base
== REG_X
) /* (R26) */
3810 if (reg_dest
== REG_X
)
3811 /* "ld r26,-X" is undefined */
3812 return avr_asm_len ("adiw r26,2" CR_TAB
3814 "ld __tmp_reg__,-X" CR_TAB
3817 "mov r27,__tmp_reg__", op
, plen
, -6);
3820 avr_asm_len ("ld %A0,X+" CR_TAB
3822 "ld %C0,X", op
, plen
, -3);
3824 if (reg_dest
!= REG_X
- 2
3825 && !reg_unused_after (insn
, base
))
3827 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
3833 else /* reg_base != REG_X */
3835 if (reg_dest
== reg_base
)
3836 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3837 "ldd __tmp_reg__,%1+1" CR_TAB
3839 "mov %B0,__tmp_reg__", op
, plen
, -4);
3841 return avr_asm_len ("ld %A0,%1" CR_TAB
3842 "ldd %B0,%1+1" CR_TAB
3843 "ldd %C0,%1+2", op
, plen
, -3);
3846 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3848 int disp
= INTVAL (XEXP (base
, 1));
3850 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3852 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3853 fatal_insn ("incorrect insn:",insn
);
3855 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3856 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3857 "ldd %A0,Y+61" CR_TAB
3858 "ldd %B0,Y+62" CR_TAB
3859 "ldd %C0,Y+63" CR_TAB
3860 "sbiw r28,%o1-61", op
, plen
, -5);
3862 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3863 "sbci r29,hi8(-%o1)" CR_TAB
3865 "ldd %B0,Y+1" CR_TAB
3866 "ldd %C0,Y+2" CR_TAB
3867 "subi r28,lo8(%o1)" CR_TAB
3868 "sbci r29,hi8(%o1)", op
, plen
, -7);
3871 reg_base
= true_regnum (XEXP (base
, 0));
3872 if (reg_base
== REG_X
)
3875 if (reg_dest
== REG_X
)
3877 /* "ld r26,-X" is undefined */
3878 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3880 "ld __tmp_reg__,-X" CR_TAB
3883 "mov r27,__tmp_reg__", op
, plen
, -6);
3886 avr_asm_len ("adiw r26,%o1" CR_TAB
3889 "ld %C0,X", op
, plen
, -4);
3891 if (reg_dest
!= REG_W
3892 && !reg_unused_after (insn
, XEXP (base
, 0)))
3893 avr_asm_len ("sbiw r26,%o1+2", op
, plen
, 1);
3898 if (reg_dest
== reg_base
)
3899 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3900 "ldd __tmp_reg__,%B1" CR_TAB
3901 "ldd %A0,%A1" CR_TAB
3902 "mov %B0,__tmp_reg__", op
, plen
, -4);
3904 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3905 "ldd %B0,%B1" CR_TAB
3906 "ldd %C0,%C1", op
, plen
, -3);
3908 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3909 return avr_asm_len ("ld %C0,%1" CR_TAB
3911 "ld %A0,%1", op
, plen
, -3);
3912 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3913 return avr_asm_len ("ld %A0,%1" CR_TAB
3915 "ld %C0,%1", op
, plen
, -3);
3917 else if (CONSTANT_ADDRESS_P (base
))
3918 return avr_asm_len ("lds %A0,%m1" CR_TAB
3919 "lds %B0,%m1+1" CR_TAB
3920 "lds %C0,%m1+2", op
, plen
, -6);
3922 fatal_insn ("unknown move insn:",insn
);
3926 /* Handle store of 24-bit type from register or zero to memory. */
3929 avr_out_store_psi (rtx insn
, rtx
*op
, int *plen
)
3933 rtx base
= XEXP (dest
, 0);
3934 int reg_base
= true_regnum (base
);
3936 if (CONSTANT_ADDRESS_P (base
))
3937 return avr_asm_len ("sts %m0,%A1" CR_TAB
3938 "sts %m0+1,%B1" CR_TAB
3939 "sts %m0+2,%C1", op
, plen
, -6);
3941 if (reg_base
> 0) /* (r) */
3943 if (reg_base
== REG_X
) /* (R26) */
3945 gcc_assert (!reg_overlap_mentioned_p (base
, src
));
3947 avr_asm_len ("st %0+,%A1" CR_TAB
3949 "st %0,%C1", op
, plen
, -3);
3951 if (!reg_unused_after (insn
, base
))
3952 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
3957 return avr_asm_len ("st %0,%A1" CR_TAB
3958 "std %0+1,%B1" CR_TAB
3959 "std %0+2,%C1", op
, plen
, -3);
3961 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3963 int disp
= INTVAL (XEXP (base
, 1));
3964 reg_base
= REGNO (XEXP (base
, 0));
3966 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3968 if (reg_base
!= REG_Y
)
3969 fatal_insn ("incorrect insn:",insn
);
3971 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3972 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3973 "std Y+61,%A1" CR_TAB
3974 "std Y+62,%B1" CR_TAB
3975 "std Y+63,%C1" CR_TAB
3976 "sbiw r28,%o0-60", op
, plen
, -5);
3978 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3979 "sbci r29,hi8(-%o0)" CR_TAB
3981 "std Y+1,%B1" CR_TAB
3982 "std Y+2,%C1" CR_TAB
3983 "subi r28,lo8(%o0)" CR_TAB
3984 "sbci r29,hi8(%o0)", op
, plen
, -7);
3986 if (reg_base
== REG_X
)
3989 gcc_assert (!reg_overlap_mentioned_p (XEXP (base
, 0), src
));
3991 avr_asm_len ("adiw r26,%o0" CR_TAB
3994 "st X,%C1", op
, plen
, -4);
3996 if (!reg_unused_after (insn
, XEXP (base
, 0)))
3997 avr_asm_len ("sbiw r26,%o0+2", op
, plen
, 1);
4002 return avr_asm_len ("std %A0,%A1" CR_TAB
4003 "std %B0,%B1" CR_TAB
4004 "std %C0,%C1", op
, plen
, -3);
4006 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4007 return avr_asm_len ("st %0,%C1" CR_TAB
4009 "st %0,%A1", op
, plen
, -3);
4010 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4011 return avr_asm_len ("st %0,%A1" CR_TAB
4013 "st %0,%C1", op
, plen
, -3);
4015 fatal_insn ("unknown move insn:",insn
);
4020 /* Move around 24-bit stuff. */
4023 avr_out_movpsi (rtx insn
, rtx
*op
, int *plen
)
4028 if (avr_mem_flash_p (src
)
4029 || avr_mem_flash_p (dest
))
4031 return avr_out_lpm (insn
, op
, plen
);
4034 if (register_operand (dest
, VOIDmode
))
4036 if (register_operand (src
, VOIDmode
)) /* mov r,r */
4038 if (true_regnum (dest
) > true_regnum (src
))
4040 avr_asm_len ("mov %C0,%C1", op
, plen
, -1);
4043 return avr_asm_len ("movw %A0,%A1", op
, plen
, 1);
4045 return avr_asm_len ("mov %B0,%B1" CR_TAB
4046 "mov %A0,%A1", op
, plen
, 2);
4051 avr_asm_len ("movw %A0,%A1", op
, plen
, -1);
4053 avr_asm_len ("mov %A0,%A1" CR_TAB
4054 "mov %B0,%B1", op
, plen
, -2);
4056 return avr_asm_len ("mov %C0,%C1", op
, plen
, 1);
4059 else if (CONSTANT_P (src
))
4061 return avr_out_reload_inpsi (op
, NULL_RTX
, plen
);
4063 else if (MEM_P (src
))
4064 return avr_out_load_psi (insn
, op
, plen
); /* mov r,m */
4066 else if (MEM_P (dest
))
4071 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
4073 return avr_out_store_psi (insn
, xop
, plen
);
4076 fatal_insn ("invalid insn:", insn
);
4082 out_movqi_mr_r (rtx insn
, rtx op
[], int *plen
)
4086 rtx x
= XEXP (dest
, 0);
4088 if (CONSTANT_ADDRESS_P (x
))
4090 return optimize
> 0 && io_address_operand (x
, QImode
)
4091 ? avr_asm_len ("out %i0,%1", op
, plen
, -1)
4092 : avr_asm_len ("sts %m0,%1", op
, plen
, -2);
4094 else if (GET_CODE (x
) == PLUS
4095 && REG_P (XEXP (x
, 0))
4096 && CONST_INT_P (XEXP (x
, 1)))
4098 /* memory access by reg+disp */
4100 int disp
= INTVAL (XEXP (x
, 1));
4102 if (disp
- GET_MODE_SIZE (GET_MODE (dest
)) >= 63)
4104 if (REGNO (XEXP (x
, 0)) != REG_Y
)
4105 fatal_insn ("incorrect insn:",insn
);
4107 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
4108 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
4109 "std Y+63,%1" CR_TAB
4110 "sbiw r28,%o0-63", op
, plen
, -3);
4112 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4113 "sbci r29,hi8(-%o0)" CR_TAB
4115 "subi r28,lo8(%o0)" CR_TAB
4116 "sbci r29,hi8(%o0)", op
, plen
, -5);
4118 else if (REGNO (XEXP (x
,0)) == REG_X
)
4120 if (reg_overlap_mentioned_p (src
, XEXP (x
, 0)))
4122 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
4123 "adiw r26,%o0" CR_TAB
4124 "st X,__tmp_reg__", op
, plen
, -3);
4128 avr_asm_len ("adiw r26,%o0" CR_TAB
4129 "st X,%1", op
, plen
, -2);
4132 if (!reg_unused_after (insn
, XEXP (x
,0)))
4133 avr_asm_len ("sbiw r26,%o0", op
, plen
, 1);
4138 return avr_asm_len ("std %0,%1", op
, plen
, -1);
4141 return avr_asm_len ("st %0,%1", op
, plen
, -1);
4145 /* Helper for the next function for XMEGA. It does the same
4146 but with low byte first. */
4149 avr_out_movhi_mr_r_xmega (rtx insn
, rtx op
[], int *plen
)
4153 rtx base
= XEXP (dest
, 0);
4154 int reg_base
= true_regnum (base
);
4155 int reg_src
= true_regnum (src
);
4157 /* "volatile" forces writing low byte first, even if less efficient,
4158 for correct operation with 16-bit I/O registers like SP. */
4159 int mem_volatile_p
= MEM_VOLATILE_P (dest
);
4161 if (CONSTANT_ADDRESS_P (base
))
4162 return optimize
> 0 && io_address_operand (base
, HImode
)
4163 ? avr_asm_len ("out %i0,%A1" CR_TAB
4164 "out %i0+1,%B1", op
, plen
, -2)
4166 : avr_asm_len ("sts %m0,%A1" CR_TAB
4167 "sts %m0+1,%B1", op
, plen
, -4);
4171 if (reg_base
!= REG_X
)
4172 return avr_asm_len ("st %0,%A1" CR_TAB
4173 "std %0+1,%B1", op
, plen
, -2);
4175 if (reg_src
== REG_X
)
4176 /* "st X+,r26" and "st -X,r26" are undefined. */
4177 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4180 "st X,__tmp_reg__", op
, plen
, -4);
4182 avr_asm_len ("st X+,%A1" CR_TAB
4183 "st X,%B1", op
, plen
, -2);
4185 return reg_unused_after (insn
, base
)
4187 : avr_asm_len ("sbiw r26,1", op
, plen
, 1);
4189 else if (GET_CODE (base
) == PLUS
)
4191 int disp
= INTVAL (XEXP (base
, 1));
4192 reg_base
= REGNO (XEXP (base
, 0));
4193 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
4195 if (reg_base
!= REG_Y
)
4196 fatal_insn ("incorrect insn:",insn
);
4198 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
4199 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4200 "std Y+62,%A1" CR_TAB
4201 "std Y+63,%B1" CR_TAB
4202 "sbiw r28,%o0-62", op
, plen
, -4)
4204 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4205 "sbci r29,hi8(-%o0)" CR_TAB
4207 "std Y+1,%B1" CR_TAB
4208 "subi r28,lo8(%o0)" CR_TAB
4209 "sbci r29,hi8(%o0)", op
, plen
, -6);
4212 if (reg_base
!= REG_X
)
4213 return avr_asm_len ("std %A0,%A1" CR_TAB
4214 "std %B0,%B1", op
, plen
, -2);
4216 return reg_src
== REG_X
4217 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4218 "mov __zero_reg__,r27" CR_TAB
4219 "adiw r26,%o0" CR_TAB
4220 "st X+,__tmp_reg__" CR_TAB
4221 "st X,__zero_reg__" CR_TAB
4222 "clr __zero_reg__" CR_TAB
4223 "sbiw r26,%o0+1", op
, plen
, -7)
4225 : avr_asm_len ("adiw r26,%o0" CR_TAB
4228 "sbiw r26,%o0+1", op
, plen
, -4);
4230 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4232 if (!mem_volatile_p
)
4233 return avr_asm_len ("st %0,%B1" CR_TAB
4234 "st %0,%A1", op
, plen
, -2);
4236 return REGNO (XEXP (base
, 0)) == REG_X
4237 ? avr_asm_len ("sbiw r26,2" CR_TAB
4240 "sbiw r26,1", op
, plen
, -4)
4242 : avr_asm_len ("sbiw %r0,2" CR_TAB
4244 "std %p0+1,%B1", op
, plen
, -3);
4246 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4248 return avr_asm_len ("st %0,%A1" CR_TAB
4249 "st %0,%B1", op
, plen
, -2);
4252 fatal_insn ("unknown move insn:",insn
);
4258 out_movhi_mr_r (rtx insn
, rtx op
[], int *plen
)
4262 rtx base
= XEXP (dest
, 0);
4263 int reg_base
= true_regnum (base
);
4264 int reg_src
= true_regnum (src
);
4267 /* "volatile" forces writing high-byte first (no-xmega) resp.
4268 low-byte first (xmega) even if less efficient, for correct
4269 operation with 16-bit I/O registers like. */
4272 return avr_out_movhi_mr_r_xmega (insn
, op
, plen
);
4274 mem_volatile_p
= MEM_VOLATILE_P (dest
);
4276 if (CONSTANT_ADDRESS_P (base
))
4277 return optimize
> 0 && io_address_operand (base
, HImode
)
4278 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
4279 "out %i0,%A1", op
, plen
, -2)
4281 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
4282 "sts %m0,%A1", op
, plen
, -4);
4286 if (reg_base
!= REG_X
)
4287 return avr_asm_len ("std %0+1,%B1" CR_TAB
4288 "st %0,%A1", op
, plen
, -2);
4290 if (reg_src
== REG_X
)
4291 /* "st X+,r26" and "st -X,r26" are undefined. */
4292 return !mem_volatile_p
&& reg_unused_after (insn
, src
)
4293 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4296 "st X,__tmp_reg__", op
, plen
, -4)
4298 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4300 "st X,__tmp_reg__" CR_TAB
4302 "st X,r26", op
, plen
, -5);
4304 return !mem_volatile_p
&& reg_unused_after (insn
, base
)
4305 ? avr_asm_len ("st X+,%A1" CR_TAB
4306 "st X,%B1", op
, plen
, -2)
4307 : avr_asm_len ("adiw r26,1" CR_TAB
4309 "st -X,%A1", op
, plen
, -3);
4311 else if (GET_CODE (base
) == PLUS
)
4313 int disp
= INTVAL (XEXP (base
, 1));
4314 reg_base
= REGNO (XEXP (base
, 0));
4315 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
4317 if (reg_base
!= REG_Y
)
4318 fatal_insn ("incorrect insn:",insn
);
4320 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
4321 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4322 "std Y+63,%B1" CR_TAB
4323 "std Y+62,%A1" CR_TAB
4324 "sbiw r28,%o0-62", op
, plen
, -4)
4326 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4327 "sbci r29,hi8(-%o0)" CR_TAB
4328 "std Y+1,%B1" CR_TAB
4330 "subi r28,lo8(%o0)" CR_TAB
4331 "sbci r29,hi8(%o0)", op
, plen
, -6);
4334 if (reg_base
!= REG_X
)
4335 return avr_asm_len ("std %B0,%B1" CR_TAB
4336 "std %A0,%A1", op
, plen
, -2);
4338 return reg_src
== REG_X
4339 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4340 "mov __zero_reg__,r27" CR_TAB
4341 "adiw r26,%o0+1" CR_TAB
4342 "st X,__zero_reg__" CR_TAB
4343 "st -X,__tmp_reg__" CR_TAB
4344 "clr __zero_reg__" CR_TAB
4345 "sbiw r26,%o0", op
, plen
, -7)
4347 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
4350 "sbiw r26,%o0", op
, plen
, -4);
4352 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4354 return avr_asm_len ("st %0,%B1" CR_TAB
4355 "st %0,%A1", op
, plen
, -2);
4357 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4359 if (!mem_volatile_p
)
4360 return avr_asm_len ("st %0,%A1" CR_TAB
4361 "st %0,%B1", op
, plen
, -2);
4363 return REGNO (XEXP (base
, 0)) == REG_X
4364 ? avr_asm_len ("adiw r26,1" CR_TAB
4367 "adiw r26,2", op
, plen
, -4)
4369 : avr_asm_len ("std %p0+1,%B1" CR_TAB
4371 "adiw %r0,2", op
, plen
, -3);
4373 fatal_insn ("unknown move insn:",insn
);
4377 /* Return 1 if frame pointer for current function required. */
4380 avr_frame_pointer_required_p (void)
4382 return (cfun
->calls_alloca
4383 || cfun
->calls_setjmp
4384 || cfun
->has_nonlocal_label
4385 || crtl
->args
.info
.nregs
== 0
4386 || get_frame_size () > 0);
4389 /* Returns the condition of compare insn INSN, or UNKNOWN. */
4392 compare_condition (rtx insn
)
4394 rtx next
= next_real_insn (insn
);
4396 if (next
&& JUMP_P (next
))
4398 rtx pat
= PATTERN (next
);
4399 rtx src
= SET_SRC (pat
);
4401 if (IF_THEN_ELSE
== GET_CODE (src
))
4402 return GET_CODE (XEXP (src
, 0));
4409 /* Returns true iff INSN is a tst insn that only tests the sign. */
4412 compare_sign_p (rtx insn
)
4414 RTX_CODE cond
= compare_condition (insn
);
4415 return (cond
== GE
|| cond
== LT
);
4419 /* Returns true iff the next insn is a JUMP_INSN with a condition
4420 that needs to be swapped (GT, GTU, LE, LEU). */
4423 compare_diff_p (rtx insn
)
4425 RTX_CODE cond
= compare_condition (insn
);
4426 return (cond
== GT
|| cond
== GTU
|| cond
== LE
|| cond
== LEU
) ? cond
: 0;
4429 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4432 compare_eq_p (rtx insn
)
4434 RTX_CODE cond
= compare_condition (insn
);
4435 return (cond
== EQ
|| cond
== NE
);
4439 /* Output compare instruction
4441 compare (XOP[0], XOP[1])
4443 for a register XOP[0] and a compile-time constant XOP[1]. Return "".
4444 XOP[2] is an 8-bit scratch register as needed.
4446 PLEN == NULL: Output instructions.
4447 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4448 Don't output anything. */
4451 avr_out_compare (rtx insn
, rtx
*xop
, int *plen
)
4453 /* Register to compare and value to compare against. */
4457 /* MODE of the comparison. */
4458 enum machine_mode mode
;
4460 /* Number of bytes to operate on. */
4461 int i
, n_bytes
= GET_MODE_SIZE (GET_MODE (xreg
));
4463 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4464 int clobber_val
= -1;
4466 /* Map fixed mode operands to integer operands with the same binary
4467 representation. They are easier to handle in the remainder. */
4469 if (CONST_FIXED_P (xval
))
4471 xreg
= avr_to_int_mode (xop
[0]);
4472 xval
= avr_to_int_mode (xop
[1]);
4475 mode
= GET_MODE (xreg
);
4477 gcc_assert (REG_P (xreg
));
4478 gcc_assert ((CONST_INT_P (xval
) && n_bytes
<= 4)
4479 || (const_double_operand (xval
, VOIDmode
) && n_bytes
== 8));
4484 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4485 against 0 by ORing the bytes. This is one instruction shorter.
4486 Notice that 64-bit comparisons are always against reg:ALL8 18 (ACC_A)
4487 and therefore don't use this. */
4489 if (!test_hard_reg_class (LD_REGS
, xreg
)
4490 && compare_eq_p (insn
)
4491 && reg_unused_after (insn
, xreg
))
4493 if (xval
== const1_rtx
)
4495 avr_asm_len ("dec %A0" CR_TAB
4496 "or %A0,%B0", xop
, plen
, 2);
4499 avr_asm_len ("or %A0,%C0", xop
, plen
, 1);
4502 avr_asm_len ("or %A0,%D0", xop
, plen
, 1);
4506 else if (xval
== constm1_rtx
)
4509 avr_asm_len ("and %A0,%D0", xop
, plen
, 1);
4512 avr_asm_len ("and %A0,%C0", xop
, plen
, 1);
4514 return avr_asm_len ("and %A0,%B0" CR_TAB
4515 "com %A0", xop
, plen
, 2);
4519 for (i
= 0; i
< n_bytes
; i
++)
4521 /* We compare byte-wise. */
4522 rtx reg8
= simplify_gen_subreg (QImode
, xreg
, mode
, i
);
4523 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
4525 /* 8-bit value to compare with this byte. */
4526 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
4528 /* Registers R16..R31 can operate with immediate. */
4529 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
4532 xop
[1] = gen_int_mode (val8
, QImode
);
4534 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4537 && test_hard_reg_class (ADDW_REGS
, reg8
))
4539 int val16
= trunc_int_for_mode (INTVAL (xval
), HImode
);
4541 if (IN_RANGE (val16
, 0, 63)
4543 || reg_unused_after (insn
, xreg
)))
4545 avr_asm_len ("sbiw %0,%1", xop
, plen
, 1);
4551 && IN_RANGE (val16
, -63, -1)
4552 && compare_eq_p (insn
)
4553 && reg_unused_after (insn
, xreg
))
4555 return avr_asm_len ("adiw %0,%n1", xop
, plen
, 1);
4559 /* Comparing against 0 is easy. */
4564 ? "cp %0,__zero_reg__"
4565 : "cpc %0,__zero_reg__", xop
, plen
, 1);
4569 /* Upper registers can compare and subtract-with-carry immediates.
4570 Notice that compare instructions do the same as respective subtract
4571 instruction; the only difference is that comparisons don't write
4572 the result back to the target register. */
4578 avr_asm_len ("cpi %0,%1", xop
, plen
, 1);
4581 else if (reg_unused_after (insn
, xreg
))
4583 avr_asm_len ("sbci %0,%1", xop
, plen
, 1);
4588 /* Must load the value into the scratch register. */
4590 gcc_assert (REG_P (xop
[2]));
4592 if (clobber_val
!= (int) val8
)
4593 avr_asm_len ("ldi %2,%1", xop
, plen
, 1);
4594 clobber_val
= (int) val8
;
4598 : "cpc %0,%2", xop
, plen
, 1);
4605 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4608 avr_out_compare64 (rtx insn
, rtx
*op
, int *plen
)
4612 xop
[0] = gen_rtx_REG (DImode
, 18);
4616 return avr_out_compare (insn
, xop
, plen
);
4619 /* Output test instruction for HImode. */
4622 avr_out_tsthi (rtx insn
, rtx
*op
, int *plen
)
4624 if (compare_sign_p (insn
))
4626 avr_asm_len ("tst %B0", op
, plen
, -1);
4628 else if (reg_unused_after (insn
, op
[0])
4629 && compare_eq_p (insn
))
4631 /* Faster than sbiw if we can clobber the operand. */
4632 avr_asm_len ("or %A0,%B0", op
, plen
, -1);
4636 avr_out_compare (insn
, op
, plen
);
4643 /* Output test instruction for PSImode. */
4646 avr_out_tstpsi (rtx insn
, rtx
*op
, int *plen
)
4648 if (compare_sign_p (insn
))
4650 avr_asm_len ("tst %C0", op
, plen
, -1);
4652 else if (reg_unused_after (insn
, op
[0])
4653 && compare_eq_p (insn
))
4655 /* Faster than sbiw if we can clobber the operand. */
4656 avr_asm_len ("or %A0,%B0" CR_TAB
4657 "or %A0,%C0", op
, plen
, -2);
4661 avr_out_compare (insn
, op
, plen
);
4668 /* Output test instruction for SImode. */
4671 avr_out_tstsi (rtx insn
, rtx
*op
, int *plen
)
4673 if (compare_sign_p (insn
))
4675 avr_asm_len ("tst %D0", op
, plen
, -1);
4677 else if (reg_unused_after (insn
, op
[0])
4678 && compare_eq_p (insn
))
4680 /* Faster than sbiw if we can clobber the operand. */
4681 avr_asm_len ("or %A0,%B0" CR_TAB
4683 "or %A0,%D0", op
, plen
, -3);
4687 avr_out_compare (insn
, op
, plen
);
4694 /* Generate asm equivalent for various shifts. This only handles cases
4695 that are not already carefully hand-optimized in ?sh??i3_out.
4697 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4698 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4699 OPERANDS[3] is a QImode scratch register from LD regs if
4700 available and SCRATCH, otherwise (no scratch available)
4702 TEMPL is an assembler template that shifts by one position.
4703 T_LEN is the length of this template. */
4706 out_shift_with_cnt (const char *templ
, rtx insn
, rtx operands
[],
4707 int *plen
, int t_len
)
4709 bool second_label
= true;
4710 bool saved_in_tmp
= false;
4711 bool use_zero_reg
= false;
4714 op
[0] = operands
[0];
4715 op
[1] = operands
[1];
4716 op
[2] = operands
[2];
4717 op
[3] = operands
[3];
4722 if (CONST_INT_P (operands
[2]))
4724 bool scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
4725 && REG_P (operands
[3]));
4726 int count
= INTVAL (operands
[2]);
4727 int max_len
= 10; /* If larger than this, always use a loop. */
4732 if (count
< 8 && !scratch
)
4733 use_zero_reg
= true;
4736 max_len
= t_len
+ (scratch
? 3 : (use_zero_reg
? 4 : 5));
4738 if (t_len
* count
<= max_len
)
4740 /* Output shifts inline with no loop - faster. */
4743 avr_asm_len (templ
, op
, plen
, t_len
);
4750 avr_asm_len ("ldi %3,%2", op
, plen
, 1);
4752 else if (use_zero_reg
)
4754 /* Hack to save one word: use __zero_reg__ as loop counter.
4755 Set one bit, then shift in a loop until it is 0 again. */
4757 op
[3] = zero_reg_rtx
;
4759 avr_asm_len ("set" CR_TAB
4760 "bld %3,%2-1", op
, plen
, 2);
4764 /* No scratch register available, use one from LD_REGS (saved in
4765 __tmp_reg__) that doesn't overlap with registers to shift. */
4767 op
[3] = all_regs_rtx
[((REGNO (op
[0]) - 1) & 15) + 16];
4768 op
[4] = tmp_reg_rtx
;
4769 saved_in_tmp
= true;
4771 avr_asm_len ("mov %4,%3" CR_TAB
4772 "ldi %3,%2", op
, plen
, 2);
4775 second_label
= false;
4777 else if (MEM_P (op
[2]))
4781 op_mov
[0] = op
[3] = tmp_reg_rtx
;
4784 out_movqi_r_mr (insn
, op_mov
, plen
);
4786 else if (register_operand (op
[2], QImode
))
4790 if (!reg_unused_after (insn
, op
[2])
4791 || reg_overlap_mentioned_p (op
[0], op
[2]))
4793 op
[3] = tmp_reg_rtx
;
4794 avr_asm_len ("mov %3,%2", op
, plen
, 1);
4798 fatal_insn ("bad shift insn:", insn
);
4801 avr_asm_len ("rjmp 2f", op
, plen
, 1);
4803 avr_asm_len ("1:", op
, plen
, 0);
4804 avr_asm_len (templ
, op
, plen
, t_len
);
4807 avr_asm_len ("2:", op
, plen
, 0);
4809 avr_asm_len (use_zero_reg
? "lsr %3" : "dec %3", op
, plen
, 1);
4810 avr_asm_len (second_label
? "brpl 1b" : "brne 1b", op
, plen
, 1);
4813 avr_asm_len ("mov %3,%4", op
, plen
, 1);
4817 /* 8bit shift left ((char)x << i) */
4820 ashlqi3_out (rtx insn
, rtx operands
[], int *len
)
4822 if (GET_CODE (operands
[2]) == CONST_INT
)
4829 switch (INTVAL (operands
[2]))
4832 if (INTVAL (operands
[2]) < 8)
4844 return ("lsl %0" CR_TAB
4849 return ("lsl %0" CR_TAB
4854 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4857 return ("swap %0" CR_TAB
4861 return ("lsl %0" CR_TAB
4867 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4870 return ("swap %0" CR_TAB
4875 return ("lsl %0" CR_TAB
4882 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4885 return ("swap %0" CR_TAB
4891 return ("lsl %0" CR_TAB
4900 return ("ror %0" CR_TAB
4905 else if (CONSTANT_P (operands
[2]))
4906 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
4908 out_shift_with_cnt ("lsl %0",
4909 insn
, operands
, len
, 1);
4914 /* 16bit shift left ((short)x << i) */
4917 ashlhi3_out (rtx insn
, rtx operands
[], int *len
)
4919 if (GET_CODE (operands
[2]) == CONST_INT
)
4921 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
4922 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
4929 switch (INTVAL (operands
[2]))
4932 if (INTVAL (operands
[2]) < 16)
4936 return ("clr %B0" CR_TAB
4940 if (optimize_size
&& scratch
)
4945 return ("swap %A0" CR_TAB
4947 "andi %B0,0xf0" CR_TAB
4948 "eor %B0,%A0" CR_TAB
4949 "andi %A0,0xf0" CR_TAB
4955 return ("swap %A0" CR_TAB
4957 "ldi %3,0xf0" CR_TAB
4959 "eor %B0,%A0" CR_TAB
4963 break; /* optimize_size ? 6 : 8 */
4967 break; /* scratch ? 5 : 6 */
4971 return ("lsl %A0" CR_TAB
4975 "andi %B0,0xf0" CR_TAB
4976 "eor %B0,%A0" CR_TAB
4977 "andi %A0,0xf0" CR_TAB
4983 return ("lsl %A0" CR_TAB
4987 "ldi %3,0xf0" CR_TAB
4989 "eor %B0,%A0" CR_TAB
4997 break; /* scratch ? 5 : 6 */
4999 return ("clr __tmp_reg__" CR_TAB
5002 "ror __tmp_reg__" CR_TAB
5005 "ror __tmp_reg__" CR_TAB
5006 "mov %B0,%A0" CR_TAB
5007 "mov %A0,__tmp_reg__");
5011 return ("lsr %B0" CR_TAB
5012 "mov %B0,%A0" CR_TAB
5018 return *len
= 2, ("mov %B0,%A1" CR_TAB
5023 return ("mov %B0,%A0" CR_TAB
5029 return ("mov %B0,%A0" CR_TAB
5036 return ("mov %B0,%A0" CR_TAB
5046 return ("mov %B0,%A0" CR_TAB
5054 return ("mov %B0,%A0" CR_TAB
5057 "ldi %3,0xf0" CR_TAB
5061 return ("mov %B0,%A0" CR_TAB
5072 return ("mov %B0,%A0" CR_TAB
5078 if (AVR_HAVE_MUL
&& scratch
)
5081 return ("ldi %3,0x20" CR_TAB
5085 "clr __zero_reg__");
5087 if (optimize_size
&& scratch
)
5092 return ("mov %B0,%A0" CR_TAB
5096 "ldi %3,0xe0" CR_TAB
5102 return ("set" CR_TAB
5107 "clr __zero_reg__");
5110 return ("mov %B0,%A0" CR_TAB
5119 if (AVR_HAVE_MUL
&& ldi_ok
)
5122 return ("ldi %B0,0x40" CR_TAB
5123 "mul %A0,%B0" CR_TAB
5126 "clr __zero_reg__");
5128 if (AVR_HAVE_MUL
&& scratch
)
5131 return ("ldi %3,0x40" CR_TAB
5135 "clr __zero_reg__");
5137 if (optimize_size
&& ldi_ok
)
5140 return ("mov %B0,%A0" CR_TAB
5141 "ldi %A0,6" "\n1:\t"
5146 if (optimize_size
&& scratch
)
5149 return ("clr %B0" CR_TAB
5158 return ("clr %B0" CR_TAB
5165 out_shift_with_cnt ("lsl %A0" CR_TAB
5166 "rol %B0", insn
, operands
, len
, 2);
5171 /* 24-bit shift left */
5174 avr_out_ashlpsi3 (rtx insn
, rtx
*op
, int *plen
)
5179 if (CONST_INT_P (op
[2]))
5181 switch (INTVAL (op
[2]))
5184 if (INTVAL (op
[2]) < 24)
5187 return avr_asm_len ("clr %A0" CR_TAB
5189 "clr %C0", op
, plen
, 3);
5193 int reg0
= REGNO (op
[0]);
5194 int reg1
= REGNO (op
[1]);
5197 return avr_asm_len ("mov %C0,%B1" CR_TAB
5198 "mov %B0,%A1" CR_TAB
5199 "clr %A0", op
, plen
, 3);
5201 return avr_asm_len ("clr %A0" CR_TAB
5202 "mov %B0,%A1" CR_TAB
5203 "mov %C0,%B1", op
, plen
, 3);
5208 int reg0
= REGNO (op
[0]);
5209 int reg1
= REGNO (op
[1]);
5211 if (reg0
+ 2 != reg1
)
5212 avr_asm_len ("mov %C0,%A0", op
, plen
, 1);
5214 return avr_asm_len ("clr %B0" CR_TAB
5215 "clr %A0", op
, plen
, 2);
5219 return avr_asm_len ("clr %C0" CR_TAB
5223 "clr %A0", op
, plen
, 5);
5227 out_shift_with_cnt ("lsl %A0" CR_TAB
5229 "rol %C0", insn
, op
, plen
, 3);
5234 /* 32bit shift left ((long)x << i) */
5237 ashlsi3_out (rtx insn
, rtx operands
[], int *len
)
5239 if (GET_CODE (operands
[2]) == CONST_INT
)
5247 switch (INTVAL (operands
[2]))
5250 if (INTVAL (operands
[2]) < 32)
5254 return *len
= 3, ("clr %D0" CR_TAB
5258 return ("clr %D0" CR_TAB
5265 int reg0
= true_regnum (operands
[0]);
5266 int reg1
= true_regnum (operands
[1]);
5269 return ("mov %D0,%C1" CR_TAB
5270 "mov %C0,%B1" CR_TAB
5271 "mov %B0,%A1" CR_TAB
5274 return ("clr %A0" CR_TAB
5275 "mov %B0,%A1" CR_TAB
5276 "mov %C0,%B1" CR_TAB
5282 int reg0
= true_regnum (operands
[0]);
5283 int reg1
= true_regnum (operands
[1]);
5284 if (reg0
+ 2 == reg1
)
5285 return *len
= 2, ("clr %B0" CR_TAB
5288 return *len
= 3, ("movw %C0,%A1" CR_TAB
5292 return *len
= 4, ("mov %C0,%A1" CR_TAB
5293 "mov %D0,%B1" CR_TAB
5300 return ("mov %D0,%A1" CR_TAB
5307 return ("clr %D0" CR_TAB
5316 out_shift_with_cnt ("lsl %A0" CR_TAB
5319 "rol %D0", insn
, operands
, len
, 4);
5323 /* 8bit arithmetic shift right ((signed char)x >> i) */
5326 ashrqi3_out (rtx insn
, rtx operands
[], int *len
)
5328 if (GET_CODE (operands
[2]) == CONST_INT
)
5335 switch (INTVAL (operands
[2]))
5343 return ("asr %0" CR_TAB
5348 return ("asr %0" CR_TAB
5354 return ("asr %0" CR_TAB
5361 return ("asr %0" CR_TAB
5369 return ("bst %0,6" CR_TAB
5375 if (INTVAL (operands
[2]) < 8)
5382 return ("lsl %0" CR_TAB
5386 else if (CONSTANT_P (operands
[2]))
5387 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
5389 out_shift_with_cnt ("asr %0",
5390 insn
, operands
, len
, 1);
5395 /* 16bit arithmetic shift right ((signed short)x >> i) */
5398 ashrhi3_out (rtx insn
, rtx operands
[], int *len
)
5400 if (GET_CODE (operands
[2]) == CONST_INT
)
5402 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
5403 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
5410 switch (INTVAL (operands
[2]))
5414 /* XXX try to optimize this too? */
5419 break; /* scratch ? 5 : 6 */
5421 return ("mov __tmp_reg__,%A0" CR_TAB
5422 "mov %A0,%B0" CR_TAB
5423 "lsl __tmp_reg__" CR_TAB
5425 "sbc %B0,%B0" CR_TAB
5426 "lsl __tmp_reg__" CR_TAB
5432 return ("lsl %A0" CR_TAB
5433 "mov %A0,%B0" CR_TAB
5439 int reg0
= true_regnum (operands
[0]);
5440 int reg1
= true_regnum (operands
[1]);
5443 return *len
= 3, ("mov %A0,%B0" CR_TAB
5447 return *len
= 4, ("mov %A0,%B1" CR_TAB
5455 return ("mov %A0,%B0" CR_TAB
5457 "sbc %B0,%B0" CR_TAB
5462 return ("mov %A0,%B0" CR_TAB
5464 "sbc %B0,%B0" CR_TAB
5469 if (AVR_HAVE_MUL
&& ldi_ok
)
5472 return ("ldi %A0,0x20" CR_TAB
5473 "muls %B0,%A0" CR_TAB
5475 "sbc %B0,%B0" CR_TAB
5476 "clr __zero_reg__");
5478 if (optimize_size
&& scratch
)
5481 return ("mov %A0,%B0" CR_TAB
5483 "sbc %B0,%B0" CR_TAB
5489 if (AVR_HAVE_MUL
&& ldi_ok
)
5492 return ("ldi %A0,0x10" CR_TAB
5493 "muls %B0,%A0" CR_TAB
5495 "sbc %B0,%B0" CR_TAB
5496 "clr __zero_reg__");
5498 if (optimize_size
&& scratch
)
5501 return ("mov %A0,%B0" CR_TAB
5503 "sbc %B0,%B0" CR_TAB
5510 if (AVR_HAVE_MUL
&& ldi_ok
)
5513 return ("ldi %A0,0x08" CR_TAB
5514 "muls %B0,%A0" CR_TAB
5516 "sbc %B0,%B0" CR_TAB
5517 "clr __zero_reg__");
5520 break; /* scratch ? 5 : 7 */
5522 return ("mov %A0,%B0" CR_TAB
5524 "sbc %B0,%B0" CR_TAB
5533 return ("lsl %B0" CR_TAB
5534 "sbc %A0,%A0" CR_TAB
5536 "mov %B0,%A0" CR_TAB
5540 if (INTVAL (operands
[2]) < 16)
5546 return *len
= 3, ("lsl %B0" CR_TAB
5547 "sbc %A0,%A0" CR_TAB
5552 out_shift_with_cnt ("asr %B0" CR_TAB
5553 "ror %A0", insn
, operands
, len
, 2);
5558 /* 24-bit arithmetic shift right */
5561 avr_out_ashrpsi3 (rtx insn
, rtx
*op
, int *plen
)
5563 int dest
= REGNO (op
[0]);
5564 int src
= REGNO (op
[1]);
5566 if (CONST_INT_P (op
[2]))
5571 switch (INTVAL (op
[2]))
5575 return avr_asm_len ("mov %A0,%B1" CR_TAB
5576 "mov %B0,%C1" CR_TAB
5579 "dec %C0", op
, plen
, 5);
5581 return avr_asm_len ("clr %C0" CR_TAB
5584 "mov %B0,%C1" CR_TAB
5585 "mov %A0,%B1", op
, plen
, 5);
5588 if (dest
!= src
+ 2)
5589 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
5591 return avr_asm_len ("clr %B0" CR_TAB
5594 "mov %C0,%B0", op
, plen
, 4);
5597 if (INTVAL (op
[2]) < 24)
5603 return avr_asm_len ("lsl %C0" CR_TAB
5604 "sbc %A0,%A0" CR_TAB
5605 "mov %B0,%A0" CR_TAB
5606 "mov %C0,%A0", op
, plen
, 4);
5610 out_shift_with_cnt ("asr %C0" CR_TAB
5612 "ror %A0", insn
, op
, plen
, 3);
5617 /* 32-bit arithmetic shift right ((signed long)x >> i) */
5620 ashrsi3_out (rtx insn
, rtx operands
[], int *len
)
5622 if (GET_CODE (operands
[2]) == CONST_INT
)
5630 switch (INTVAL (operands
[2]))
5634 int reg0
= true_regnum (operands
[0]);
5635 int reg1
= true_regnum (operands
[1]);
5638 return ("mov %A0,%B1" CR_TAB
5639 "mov %B0,%C1" CR_TAB
5640 "mov %C0,%D1" CR_TAB
5645 return ("clr %D0" CR_TAB
5648 "mov %C0,%D1" CR_TAB
5649 "mov %B0,%C1" CR_TAB
5655 int reg0
= true_regnum (operands
[0]);
5656 int reg1
= true_regnum (operands
[1]);
5658 if (reg0
== reg1
+ 2)
5659 return *len
= 4, ("clr %D0" CR_TAB
5664 return *len
= 5, ("movw %A0,%C1" CR_TAB
5670 return *len
= 6, ("mov %B0,%D1" CR_TAB
5671 "mov %A0,%C1" CR_TAB
5679 return *len
= 6, ("mov %A0,%D1" CR_TAB
5683 "mov %B0,%D0" CR_TAB
5687 if (INTVAL (operands
[2]) < 32)
5694 return *len
= 4, ("lsl %D0" CR_TAB
5695 "sbc %A0,%A0" CR_TAB
5696 "mov %B0,%A0" CR_TAB
5699 return *len
= 5, ("lsl %D0" CR_TAB
5700 "sbc %A0,%A0" CR_TAB
5701 "mov %B0,%A0" CR_TAB
5702 "mov %C0,%A0" CR_TAB
5707 out_shift_with_cnt ("asr %D0" CR_TAB
5710 "ror %A0", insn
, operands
, len
, 4);
5714 /* 8-bit logic shift right ((unsigned char)x >> i) */
5717 lshrqi3_out (rtx insn
, rtx operands
[], int *len
)
5719 if (GET_CODE (operands
[2]) == CONST_INT
)
5726 switch (INTVAL (operands
[2]))
5729 if (INTVAL (operands
[2]) < 8)
5741 return ("lsr %0" CR_TAB
5745 return ("lsr %0" CR_TAB
5750 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5753 return ("swap %0" CR_TAB
5757 return ("lsr %0" CR_TAB
5763 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5766 return ("swap %0" CR_TAB
5771 return ("lsr %0" CR_TAB
5778 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5781 return ("swap %0" CR_TAB
5787 return ("lsr %0" CR_TAB
5796 return ("rol %0" CR_TAB
5801 else if (CONSTANT_P (operands
[2]))
5802 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
5804 out_shift_with_cnt ("lsr %0",
5805 insn
, operands
, len
, 1);
5809 /* 16-bit logic shift right ((unsigned short)x >> i) */
5812 lshrhi3_out (rtx insn
, rtx operands
[], int *len
)
5814 if (GET_CODE (operands
[2]) == CONST_INT
)
5816 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
5817 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
5824 switch (INTVAL (operands
[2]))
5827 if (INTVAL (operands
[2]) < 16)
5831 return ("clr %B0" CR_TAB
5835 if (optimize_size
&& scratch
)
5840 return ("swap %B0" CR_TAB
5842 "andi %A0,0x0f" CR_TAB
5843 "eor %A0,%B0" CR_TAB
5844 "andi %B0,0x0f" CR_TAB
5850 return ("swap %B0" CR_TAB
5852 "ldi %3,0x0f" CR_TAB
5854 "eor %A0,%B0" CR_TAB
5858 break; /* optimize_size ? 6 : 8 */
5862 break; /* scratch ? 5 : 6 */
5866 return ("lsr %B0" CR_TAB
5870 "andi %A0,0x0f" CR_TAB
5871 "eor %A0,%B0" CR_TAB
5872 "andi %B0,0x0f" CR_TAB
5878 return ("lsr %B0" CR_TAB
5882 "ldi %3,0x0f" CR_TAB
5884 "eor %A0,%B0" CR_TAB
5892 break; /* scratch ? 5 : 6 */
5894 return ("clr __tmp_reg__" CR_TAB
5897 "rol __tmp_reg__" CR_TAB
5900 "rol __tmp_reg__" CR_TAB
5901 "mov %A0,%B0" CR_TAB
5902 "mov %B0,__tmp_reg__");
5906 return ("lsl %A0" CR_TAB
5907 "mov %A0,%B0" CR_TAB
5909 "sbc %B0,%B0" CR_TAB
5913 return *len
= 2, ("mov %A0,%B1" CR_TAB
5918 return ("mov %A0,%B0" CR_TAB
5924 return ("mov %A0,%B0" CR_TAB
5931 return ("mov %A0,%B0" CR_TAB
5941 return ("mov %A0,%B0" CR_TAB
5949 return ("mov %A0,%B0" CR_TAB
5952 "ldi %3,0x0f" CR_TAB
5956 return ("mov %A0,%B0" CR_TAB
5967 return ("mov %A0,%B0" CR_TAB
5973 if (AVR_HAVE_MUL
&& scratch
)
5976 return ("ldi %3,0x08" CR_TAB
5980 "clr __zero_reg__");
5982 if (optimize_size
&& scratch
)
5987 return ("mov %A0,%B0" CR_TAB
5991 "ldi %3,0x07" CR_TAB
5997 return ("set" CR_TAB
6002 "clr __zero_reg__");
6005 return ("mov %A0,%B0" CR_TAB
6014 if (AVR_HAVE_MUL
&& ldi_ok
)
6017 return ("ldi %A0,0x04" CR_TAB
6018 "mul %B0,%A0" CR_TAB
6021 "clr __zero_reg__");
6023 if (AVR_HAVE_MUL
&& scratch
)
6026 return ("ldi %3,0x04" CR_TAB
6030 "clr __zero_reg__");
6032 if (optimize_size
&& ldi_ok
)
6035 return ("mov %A0,%B0" CR_TAB
6036 "ldi %B0,6" "\n1:\t"
6041 if (optimize_size
&& scratch
)
6044 return ("clr %A0" CR_TAB
6053 return ("clr %A0" CR_TAB
6060 out_shift_with_cnt ("lsr %B0" CR_TAB
6061 "ror %A0", insn
, operands
, len
, 2);
6066 /* 24-bit logic shift right */
6069 avr_out_lshrpsi3 (rtx insn
, rtx
*op
, int *plen
)
6071 int dest
= REGNO (op
[0]);
6072 int src
= REGNO (op
[1]);
6074 if (CONST_INT_P (op
[2]))
6079 switch (INTVAL (op
[2]))
6083 return avr_asm_len ("mov %A0,%B1" CR_TAB
6084 "mov %B0,%C1" CR_TAB
6085 "clr %C0", op
, plen
, 3);
6087 return avr_asm_len ("clr %C0" CR_TAB
6088 "mov %B0,%C1" CR_TAB
6089 "mov %A0,%B1", op
, plen
, 3);
6092 if (dest
!= src
+ 2)
6093 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
6095 return avr_asm_len ("clr %B0" CR_TAB
6096 "clr %C0", op
, plen
, 2);
6099 if (INTVAL (op
[2]) < 24)
6105 return avr_asm_len ("clr %A0" CR_TAB
6109 "clr %C0", op
, plen
, 5);
6113 out_shift_with_cnt ("lsr %C0" CR_TAB
6115 "ror %A0", insn
, op
, plen
, 3);
6120 /* 32-bit logic shift right ((unsigned int)x >> i) */
6123 lshrsi3_out (rtx insn
, rtx operands
[], int *len
)
6125 if (GET_CODE (operands
[2]) == CONST_INT
)
6133 switch (INTVAL (operands
[2]))
6136 if (INTVAL (operands
[2]) < 32)
6140 return *len
= 3, ("clr %D0" CR_TAB
6144 return ("clr %D0" CR_TAB
6151 int reg0
= true_regnum (operands
[0]);
6152 int reg1
= true_regnum (operands
[1]);
6155 return ("mov %A0,%B1" CR_TAB
6156 "mov %B0,%C1" CR_TAB
6157 "mov %C0,%D1" CR_TAB
6160 return ("clr %D0" CR_TAB
6161 "mov %C0,%D1" CR_TAB
6162 "mov %B0,%C1" CR_TAB
6168 int reg0
= true_regnum (operands
[0]);
6169 int reg1
= true_regnum (operands
[1]);
6171 if (reg0
== reg1
+ 2)
6172 return *len
= 2, ("clr %C0" CR_TAB
6175 return *len
= 3, ("movw %A0,%C1" CR_TAB
6179 return *len
= 4, ("mov %B0,%D1" CR_TAB
6180 "mov %A0,%C1" CR_TAB
6186 return *len
= 4, ("mov %A0,%D1" CR_TAB
6193 return ("clr %A0" CR_TAB
6202 out_shift_with_cnt ("lsr %D0" CR_TAB
6205 "ror %A0", insn
, operands
, len
, 4);
6210 /* Output addition of register XOP[0] and compile time constant XOP[2].
6211 CODE == PLUS: perform addition by using ADD instructions or
6212 CODE == MINUS: perform addition by using SUB instructions:
6214 XOP[0] = XOP[0] + XOP[2]
6216 Or perform addition/subtraction with register XOP[2] depending on CODE:
6218 XOP[0] = XOP[0] +/- XOP[2]
6220 If PLEN == NULL, print assembler instructions to perform the operation;
6221 otherwise, set *PLEN to the length of the instruction sequence (in words)
6222 printed with PLEN == NULL. XOP[3] is an 8-bit scratch register or NULL_RTX.
6223 Set *PCC to effect on cc0 according to respective CC_* insn attribute.
6225 CODE_SAT == UNKNOWN: Perform ordinary, non-saturating operation.
6226 CODE_SAT != UNKNOWN: Perform operation and saturate according to CODE_SAT.
6227 If CODE_SAT != UNKNOWN then SIGN contains the sign of the summand resp.
6228 the subtrahend in the original insn, provided it is a compile time constant.
6229 In all other cases, SIGN is 0.
6231 If OUT_LABEL is true, print the final 0: label which is needed for
6232 saturated addition / subtraction. The only case where OUT_LABEL = false
6233 is useful is for saturated addition / subtraction performed during
6234 fixed-point rounding, cf. `avr_out_round'. */
6237 avr_out_plus_1 (rtx
*xop
, int *plen
, enum rtx_code code
, int *pcc
,
6238 enum rtx_code code_sat
, int sign
, bool out_label
)
6240 /* MODE of the operation. */
6241 enum machine_mode mode
= GET_MODE (xop
[0]);
6243 /* INT_MODE of the same size. */
6244 enum machine_mode imode
= int_mode_for_mode (mode
);
6246 /* Number of bytes to operate on. */
6247 int i
, n_bytes
= GET_MODE_SIZE (mode
);
6249 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6250 int clobber_val
= -1;
6252 /* op[0]: 8-bit destination register
6253 op[1]: 8-bit const int
6254 op[2]: 8-bit scratch register */
6257 /* Started the operation? Before starting the operation we may skip
6258 adding 0. This is no more true after the operation started because
6259 carry must be taken into account. */
6260 bool started
= false;
6262 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
6265 /* Output a BRVC instruction. Only needed with saturation. */
6266 bool out_brvc
= true;
6273 *pcc
= MINUS
== code
? (int) CC_SET_CZN
: (int) CC_SET_N
;
6275 for (i
= 0; i
< n_bytes
; i
++)
6277 /* We operate byte-wise on the destination. */
6278 op
[0] = simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6279 op
[1] = simplify_gen_subreg (QImode
, xop
[2], mode
, i
);
6282 avr_asm_len (code
== PLUS
? "add %0,%1" : "sub %0,%1",
6285 avr_asm_len (code
== PLUS
? "adc %0,%1" : "sbc %0,%1",
6289 if (reg_overlap_mentioned_p (xop
[0], xop
[2]))
6291 gcc_assert (REGNO (xop
[0]) == REGNO (xop
[2]));
6300 /* Except in the case of ADIW with 16-bit register (see below)
6301 addition does not set cc0 in a usable way. */
6303 *pcc
= (MINUS
== code
) ? CC_SET_CZN
: CC_CLOBBER
;
6305 if (CONST_FIXED_P (xval
))
6306 xval
= avr_to_int_mode (xval
);
6308 /* Adding/Subtracting zero is a no-op. */
6310 if (xval
== const0_rtx
)
6317 xval
= simplify_unary_operation (NEG
, imode
, xval
, imode
);
6321 if (SS_PLUS
== code_sat
&& MINUS
== code
6323 && 0x80 == (INTVAL (simplify_gen_subreg (QImode
, xval
, imode
, n_bytes
-1))
6324 & GET_MODE_MASK (QImode
)))
6326 /* We compute x + 0x80 by means of SUB instructions. We negated the
6327 constant subtrahend above and are left with x - (-128) so that we
6328 need something like SUBI r,128 which does not exist because SUBI sets
6329 V according to the sign of the subtrahend. Notice the only case
6330 where this must be done is when NEG overflowed in case [2s] because
6331 the V computation needs the right sign of the subtrahend. */
6333 rtx msb
= simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-1);
6335 avr_asm_len ("subi %0,128" CR_TAB
6336 "brmi 0f", &msb
, plen
, 2);
6342 for (i
= 0; i
< n_bytes
; i
++)
6344 /* We operate byte-wise on the destination. */
6345 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6346 rtx xval8
= simplify_gen_subreg (QImode
, xval
, imode
, i
);
6348 /* 8-bit value to operate with this byte. */
6349 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
6351 /* Registers R16..R31 can operate with immediate. */
6352 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
6355 op
[1] = gen_int_mode (val8
, QImode
);
6357 /* To get usable cc0 no low-bytes must have been skipped. */
6365 && test_hard_reg_class (ADDW_REGS
, reg8
))
6367 rtx xval16
= simplify_gen_subreg (HImode
, xval
, imode
, i
);
6368 unsigned int val16
= UINTVAL (xval16
) & GET_MODE_MASK (HImode
);
6370 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
6371 i.e. operate word-wise. */
6378 avr_asm_len (code
== PLUS
? "adiw %0,%1" : "sbiw %0,%1",
6381 if (n_bytes
== 2 && PLUS
== code
)
6393 avr_asm_len (code
== PLUS
6394 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
6398 else if ((val8
== 1 || val8
== 0xff)
6399 && UNKNOWN
== code_sat
6401 && i
== n_bytes
- 1)
6403 avr_asm_len ((code
== PLUS
) ^ (val8
== 1) ? "dec %0" : "inc %0",
6412 gcc_assert (plen
!= NULL
|| (op
[2] && REG_P (op
[2])));
6414 if (plen
!= NULL
&& UNKNOWN
!= code_sat
)
6416 /* This belongs to the x + 0x80 corner case. The code with
6417 ADD instruction is not smaller, thus make this case
6418 expensive so that the caller won't pick it. */
6424 if (clobber_val
!= (int) val8
)
6425 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6426 clobber_val
= (int) val8
;
6428 avr_asm_len (started
? "adc %0,%2" : "add %0,%2", op
, plen
, 1);
6435 avr_asm_len (started
? "sbci %0,%1" : "subi %0,%1", op
, plen
, 1);
6438 gcc_assert (plen
!= NULL
|| REG_P (op
[2]));
6440 if (clobber_val
!= (int) val8
)
6441 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6442 clobber_val
= (int) val8
;
6444 avr_asm_len (started
? "sbc %0,%2" : "sub %0,%2", op
, plen
, 1);
6456 } /* for all sub-bytes */
6460 if (UNKNOWN
== code_sat
)
6463 *pcc
= (int) CC_CLOBBER
;
6465 /* Vanilla addition/subtraction is done. We are left with saturation.
6467 We have to compute A = A <op> B where A is a register and
6468 B is a register or a non-zero compile time constant CONST.
6469 A is register class "r" if unsigned && B is REG. Otherwise, A is in "d".
6470 B stands for the original operand $2 in INSN. In the case of B = CONST,
6471 SIGN in { -1, 1 } is the sign of B. Otherwise, SIGN is 0.
6473 CODE is the instruction flavor we use in the asm sequence to perform <op>.
6477 operation | code | sat if | b is | sat value | case
6478 -----------------+-------+----------+--------------+-----------+-------
6479 + as a + b | add | C == 1 | const, reg | u+ = 0xff | [1u]
6480 + as a - (-b) | sub | C == 0 | const | u+ = 0xff | [2u]
6481 - as a - b | sub | C == 1 | const, reg | u- = 0 | [3u]
6482 - as a + (-b) | add | C == 0 | const | u- = 0 | [4u]
6486 operation | code | sat if | b is | sat value | case
6487 -----------------+-------+----------+--------------+-----------+-------
6488 + as a + b | add | V == 1 | const, reg | s+ | [1s]
6489 + as a - (-b) | sub | V == 1 | const | s+ | [2s]
6490 - as a - b | sub | V == 1 | const, reg | s- | [3s]
6491 - as a + (-b) | add | V == 1 | const | s- | [4s]
6493 s+ = b < 0 ? -0x80 : 0x7f
6494 s- = b < 0 ? 0x7f : -0x80
6496 The cases a - b actually perform a - (-(-b)) if B is CONST.
6499 op
[0] = simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-1);
6501 ? simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-2)
6504 bool need_copy
= true;
6505 int len_call
= 1 + AVR_HAVE_JMP_CALL
;
6516 avr_asm_len ("brvc 0f", op
, plen
, 1);
6518 if (reg_overlap_mentioned_p (xop
[0], xop
[2]))
6523 avr_asm_len ("ldi %0,0x7f" CR_TAB
6524 "adc %0,__zero_reg__", op
, plen
, 2);
6526 avr_asm_len ("ldi %0,0x7f" CR_TAB
6527 "ldi %1,0xff" CR_TAB
6528 "adc %1,__zero_reg__" CR_TAB
6529 "adc %0,__zero_reg__", op
, plen
, 4);
6531 else if (sign
== 0 && PLUS
== code
)
6535 op
[2] = simplify_gen_subreg (QImode
, xop
[2], mode
, n_bytes
-1);
6538 avr_asm_len ("ldi %0,0x80" CR_TAB
6540 "dec %0", op
, plen
, 3);
6542 avr_asm_len ("ldi %0,0x80" CR_TAB
6545 "sbci %0,0", op
, plen
, 4);
6547 else if (sign
== 0 && MINUS
== code
)
6551 op
[2] = simplify_gen_subreg (QImode
, xop
[2], mode
, n_bytes
-1);
6554 avr_asm_len ("ldi %0,0x7f" CR_TAB
6556 "inc %0", op
, plen
, 3);
6558 avr_asm_len ("ldi %0,0x7f" CR_TAB
6561 "sbci %0,-1", op
, plen
, 4);
6563 else if ((sign
< 0) ^ (SS_MINUS
== code_sat
))
6565 /* [1s,const,B < 0] [2s,B < 0] */
6566 /* [3s,const,B > 0] [4s,B > 0] */
6570 avr_asm_len ("%~call __clr_8", op
, plen
, len_call
);
6574 avr_asm_len ("ldi %0,0x80", op
, plen
, 1);
6575 if (n_bytes
> 1 && need_copy
)
6576 avr_asm_len ("clr %1", op
, plen
, 1);
6578 else if ((sign
> 0) ^ (SS_MINUS
== code_sat
))
6580 /* [1s,const,B > 0] [2s,B > 0] */
6581 /* [3s,const,B < 0] [4s,B < 0] */
6585 avr_asm_len ("sec" CR_TAB
6586 "%~call __sbc_8", op
, plen
, 1 + len_call
);
6590 avr_asm_len ("ldi %0,0x7f", op
, plen
, 1);
6591 if (n_bytes
> 1 && need_copy
)
6592 avr_asm_len ("ldi %1,0xff", op
, plen
, 1);
6602 avr_asm_len (PLUS
== code
? "brcc 0f" : "brcs 0f", op
, plen
, 1);
6607 avr_asm_len ("sec", op
, plen
, 1);
6608 avr_asm_len ("%~call __sbc_8", op
, plen
, len_call
);
6614 if (MINUS
== code
&& !test_hard_reg_class (LD_REGS
, op
[0]))
6615 avr_asm_len ("sec" CR_TAB
"sbc %0,%0", op
, plen
, 2);
6617 avr_asm_len (PLUS
== code
? "sbc %0,%0" : "ldi %0,0xff",
6620 break; /* US_PLUS */
6625 avr_asm_len (PLUS
== code
? "brcs 0f" : "brcc 0f", op
, plen
, 1);
6629 avr_asm_len ("%~call __clr_8", op
, plen
, len_call
);
6633 avr_asm_len ("clr %0", op
, plen
, 1);
6638 /* We set the MSB in the unsigned case and the 2 MSBs in the signed case.
6639 Now copy the right value to the LSBs. */
6641 if (need_copy
&& n_bytes
> 1)
6643 if (US_MINUS
== code_sat
|| US_PLUS
== code_sat
)
6645 avr_asm_len ("mov %1,%0", op
, plen
, 1);
6651 avr_asm_len ("movw %0,%1", op
, plen
, 1);
6653 avr_asm_len ("mov %A0,%1" CR_TAB
6654 "mov %B0,%1", op
, plen
, 2);
6657 else if (n_bytes
> 2)
6660 avr_asm_len ("mov %A0,%1" CR_TAB
6661 "mov %B0,%1", op
, plen
, 2);
6665 if (need_copy
&& n_bytes
== 8)
6668 avr_asm_len ("movw %r0+2,%0" CR_TAB
6669 "movw %r0+4,%0", xop
, plen
, 2);
6671 avr_asm_len ("mov %r0+2,%0" CR_TAB
6672 "mov %r0+3,%0" CR_TAB
6673 "mov %r0+4,%0" CR_TAB
6674 "mov %r0+5,%0", xop
, plen
, 4);
6678 avr_asm_len ("0:", op
, plen
, 0);
6682 /* Output addition/subtraction of register XOP[0] and a constant XOP[2] that
6683 is ont a compile-time constant:
6685 XOP[0] = XOP[0] +/- XOP[2]
6687 This is a helper for the function below. The only insns that need this
6688 are additions/subtraction for pointer modes, i.e. HImode and PSImode. */
6691 avr_out_plus_symbol (rtx
*xop
, enum rtx_code code
, int *plen
, int *pcc
)
6693 enum machine_mode mode
= GET_MODE (xop
[0]);
6695 /* Only pointer modes want to add symbols. */
6697 gcc_assert (mode
== HImode
|| mode
== PSImode
);
6699 *pcc
= MINUS
== code
? (int) CC_SET_CZN
: (int) CC_SET_N
;
6701 avr_asm_len (PLUS
== code
6702 ? "subi %A0,lo8(-(%2))" CR_TAB
"sbci %B0,hi8(-(%2))"
6703 : "subi %A0,lo8(%2)" CR_TAB
"sbci %B0,hi8(%2)",
6706 if (PSImode
== mode
)
6707 avr_asm_len (PLUS
== code
6708 ? "sbci %C0,hlo8(-(%2))"
6709 : "sbci %C0,hlo8(%2)", xop
, plen
, 1);
6714 /* Prepare operands of addition/subtraction to be used with avr_out_plus_1.
6716 INSN is a single_set insn or an insn pattern with a binary operation as
6717 SET_SRC that is one of: PLUS, SS_PLUS, US_PLUS, MINUS, SS_MINUS, US_MINUS.
6719 XOP are the operands of INSN. In the case of 64-bit operations with
6720 constant XOP[] has just one element: The summand/subtrahend in XOP[0].
6721 The non-saturating insns up to 32 bits may or may not supply a "d" class
6724 If PLEN == NULL output the instructions.
6725 If PLEN != NULL set *PLEN to the length of the sequence in words.
6727 PCC is a pointer to store the instructions' effect on cc0.
6730 PLEN and PCC default to NULL.
6732 OUT_LABEL defaults to TRUE. For a description, see AVR_OUT_PLUS_1.
6737 avr_out_plus (rtx insn
, rtx
*xop
, int *plen
, int *pcc
, bool out_label
)
6739 int cc_plus
, cc_minus
, cc_dummy
;
6740 int len_plus
, len_minus
;
6742 rtx xpattern
= INSN_P (insn
) ? single_set (insn
) : insn
;
6743 rtx xdest
= SET_DEST (xpattern
);
6744 enum machine_mode mode
= GET_MODE (xdest
);
6745 enum machine_mode imode
= int_mode_for_mode (mode
);
6746 int n_bytes
= GET_MODE_SIZE (mode
);
6747 enum rtx_code code_sat
= GET_CODE (SET_SRC (xpattern
));
6749 = (PLUS
== code_sat
|| SS_PLUS
== code_sat
|| US_PLUS
== code_sat
6755 /* PLUS and MINUS don't saturate: Use modular wrap-around. */
6757 if (PLUS
== code_sat
|| MINUS
== code_sat
)
6760 if (n_bytes
<= 4 && REG_P (xop
[2]))
6762 avr_out_plus_1 (xop
, plen
, code
, pcc
, code_sat
, 0, out_label
);
6768 op
[0] = gen_rtx_REG (DImode
, ACC_A
);
6769 op
[1] = gen_rtx_REG (DImode
, ACC_A
);
6770 op
[2] = avr_to_int_mode (xop
[0]);
6775 && !CONST_INT_P (xop
[2])
6776 && !CONST_FIXED_P (xop
[2]))
6778 return avr_out_plus_symbol (xop
, code
, plen
, pcc
);
6781 op
[0] = avr_to_int_mode (xop
[0]);
6782 op
[1] = avr_to_int_mode (xop
[1]);
6783 op
[2] = avr_to_int_mode (xop
[2]);
6786 /* Saturations and 64-bit operations don't have a clobber operand.
6787 For the other cases, the caller will provide a proper XOP[3]. */
6789 xpattern
= INSN_P (insn
) ? PATTERN (insn
) : insn
;
6790 op
[3] = PARALLEL
== GET_CODE (xpattern
) ? xop
[3] : NULL_RTX
;
6792 /* Saturation will need the sign of the original operand. */
6794 rtx xmsb
= simplify_gen_subreg (QImode
, op
[2], imode
, n_bytes
-1);
6795 int sign
= INTVAL (xmsb
) < 0 ? -1 : 1;
6797 /* If we subtract and the subtrahend is a constant, then negate it
6798 so that avr_out_plus_1 can be used. */
6801 op
[2] = simplify_unary_operation (NEG
, imode
, op
[2], imode
);
6803 /* Work out the shortest sequence. */
6805 avr_out_plus_1 (op
, &len_minus
, MINUS
, &cc_plus
, code_sat
, sign
, out_label
);
6806 avr_out_plus_1 (op
, &len_plus
, PLUS
, &cc_minus
, code_sat
, sign
, out_label
);
6810 *plen
= (len_minus
<= len_plus
) ? len_minus
: len_plus
;
6811 *pcc
= (len_minus
<= len_plus
) ? cc_minus
: cc_plus
;
6813 else if (len_minus
<= len_plus
)
6814 avr_out_plus_1 (op
, NULL
, MINUS
, pcc
, code_sat
, sign
, out_label
);
6816 avr_out_plus_1 (op
, NULL
, PLUS
, pcc
, code_sat
, sign
, out_label
);
6822 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6823 time constant XOP[2]:
6825 XOP[0] = XOP[0] <op> XOP[2]
6827 and return "". If PLEN == NULL, print assembler instructions to perform the
6828 operation; otherwise, set *PLEN to the length of the instruction sequence
6829 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6830 register or SCRATCH if no clobber register is needed for the operation.
6831 INSN is an INSN_P or a pattern of an insn. */
6834 avr_out_bitop (rtx insn
, rtx
*xop
, int *plen
)
6836 /* CODE and MODE of the operation. */
6837 rtx xpattern
= INSN_P (insn
) ? single_set (insn
) : insn
;
6838 enum rtx_code code
= GET_CODE (SET_SRC (xpattern
));
6839 enum machine_mode mode
= GET_MODE (xop
[0]);
6841 /* Number of bytes to operate on. */
6842 int i
, n_bytes
= GET_MODE_SIZE (mode
);
6844 /* Value of T-flag (0 or 1) or -1 if unknow. */
6847 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6848 int clobber_val
= -1;
6850 /* op[0]: 8-bit destination register
6851 op[1]: 8-bit const int
6852 op[2]: 8-bit clobber register or SCRATCH
6853 op[3]: 8-bit register containing 0xff or NULL_RTX */
6862 for (i
= 0; i
< n_bytes
; i
++)
6864 /* We operate byte-wise on the destination. */
6865 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6866 rtx xval8
= simplify_gen_subreg (QImode
, xop
[2], mode
, i
);
6868 /* 8-bit value to operate with this byte. */
6869 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
6871 /* Number of bits set in the current byte of the constant. */
6872 int pop8
= avr_popcount (val8
);
6874 /* Registers R16..R31 can operate with immediate. */
6875 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
6878 op
[1] = GEN_INT (val8
);
6887 avr_asm_len ("ori %0,%1", op
, plen
, 1);
6891 avr_asm_len ("set", op
, plen
, 1);
6894 op
[1] = GEN_INT (exact_log2 (val8
));
6895 avr_asm_len ("bld %0,%1", op
, plen
, 1);
6899 if (op
[3] != NULL_RTX
)
6900 avr_asm_len ("mov %0,%3", op
, plen
, 1);
6902 avr_asm_len ("clr %0" CR_TAB
6903 "dec %0", op
, plen
, 2);
6909 if (clobber_val
!= (int) val8
)
6910 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6911 clobber_val
= (int) val8
;
6913 avr_asm_len ("or %0,%2", op
, plen
, 1);
6923 avr_asm_len ("clr %0", op
, plen
, 1);
6925 avr_asm_len ("andi %0,%1", op
, plen
, 1);
6929 avr_asm_len ("clt", op
, plen
, 1);
6932 op
[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode
) & ~val8
));
6933 avr_asm_len ("bld %0,%1", op
, plen
, 1);
6937 if (clobber_val
!= (int) val8
)
6938 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6939 clobber_val
= (int) val8
;
6941 avr_asm_len ("and %0,%2", op
, plen
, 1);
6951 avr_asm_len ("com %0", op
, plen
, 1);
6952 else if (ld_reg_p
&& val8
== (1 << 7))
6953 avr_asm_len ("subi %0,%1", op
, plen
, 1);
6956 if (clobber_val
!= (int) val8
)
6957 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6958 clobber_val
= (int) val8
;
6960 avr_asm_len ("eor %0,%2", op
, plen
, 1);
6966 /* Unknown rtx_code */
6969 } /* for all sub-bytes */
6975 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6976 PLEN != NULL: Set *PLEN to the length of that sequence.
6980 avr_out_addto_sp (rtx
*op
, int *plen
)
6982 int pc_len
= AVR_2_BYTE_PC
? 2 : 3;
6983 int addend
= INTVAL (op
[0]);
6990 if (flag_verbose_asm
|| flag_print_asm_name
)
6991 avr_asm_len (ASM_COMMENT_START
"SP -= %n0", op
, plen
, 0);
6993 while (addend
<= -pc_len
)
6996 avr_asm_len ("rcall .", op
, plen
, 1);
6999 while (addend
++ < 0)
7000 avr_asm_len ("push __zero_reg__", op
, plen
, 1);
7002 else if (addend
> 0)
7004 if (flag_verbose_asm
|| flag_print_asm_name
)
7005 avr_asm_len (ASM_COMMENT_START
"SP += %0", op
, plen
, 0);
7007 while (addend
-- > 0)
7008 avr_asm_len ("pop __tmp_reg__", op
, plen
, 1);
7015 /* Outputs instructions needed for fixed point type conversion.
7016 This includes converting between any fixed point type, as well
7017 as converting to any integer type. Conversion between integer
7018 types is not supported.
7020 Converting signed fractional types requires a bit shift if converting
7021 to or from any unsigned fractional type because the decimal place is
7022 shifted by 1 bit. When the destination is a signed fractional, the sign
7023 is stored in either the carry or T bit. */
7026 avr_out_fract (rtx insn
, rtx operands
[], bool intsigned
, int *plen
)
7030 RTX_CODE shift
= UNKNOWN
;
7031 bool sign_in_carry
= false;
7032 bool msb_in_carry
= false;
7033 bool lsb_in_tmp_reg
= false;
7034 bool lsb_in_carry
= false;
7035 bool frac_rounded
= false;
7036 const char *code_ashift
= "lsl %0";
7039 #define MAY_CLOBBER(RR) \
7040 /* Shorthand used below. */ \
7042 && IN_RANGE (RR, dest.regno_msb - sign_bytes + 1, dest.regno_msb)) \
7043 || (offset && IN_RANGE (RR, dest.regno, dest.regno_msb)) \
7044 || (reg_unused_after (insn, all_regs_rtx[RR]) \
7045 && !IN_RANGE (RR, dest.regno, dest.regno_msb)))
7049 /* bytes : Length of operand in bytes.
7050 ibyte : Length of integral part in bytes.
7051 fbyte, fbit : Length of fractional part in bytes, bits. */
7054 unsigned fbit
, bytes
, ibyte
, fbyte
;
7055 unsigned regno
, regno_msb
;
7056 } dest
, src
, *val
[2] = { &dest
, &src
};
7061 /* Step 0: Determine information on source and destination operand we
7062 ====== will need in the remainder. */
7064 for (i
= 0; i
< sizeof (val
) / sizeof (*val
); i
++)
7066 enum machine_mode mode
;
7068 xop
[i
] = operands
[i
];
7070 mode
= GET_MODE (xop
[i
]);
7072 val
[i
]->bytes
= GET_MODE_SIZE (mode
);
7073 val
[i
]->regno
= REGNO (xop
[i
]);
7074 val
[i
]->regno_msb
= REGNO (xop
[i
]) + val
[i
]->bytes
- 1;
7076 if (SCALAR_INT_MODE_P (mode
))
7078 val
[i
]->sbit
= intsigned
;
7081 else if (ALL_SCALAR_FIXED_POINT_MODE_P (mode
))
7083 val
[i
]->sbit
= SIGNED_SCALAR_FIXED_POINT_MODE_P (mode
);
7084 val
[i
]->fbit
= GET_MODE_FBIT (mode
);
7087 fatal_insn ("unsupported fixed-point conversion", insn
);
7089 val
[i
]->fbyte
= (1 + val
[i
]->fbit
) / BITS_PER_UNIT
;
7090 val
[i
]->ibyte
= val
[i
]->bytes
- val
[i
]->fbyte
;
7093 // Byte offset of the decimal point taking into account different place
7094 // of the decimal point in input and output and different register numbers
7095 // of input and output.
7096 int offset
= dest
.regno
- src
.regno
+ dest
.fbyte
- src
.fbyte
;
7098 // Number of destination bytes that will come from sign / zero extension.
7099 int sign_bytes
= (dest
.ibyte
- src
.ibyte
) * (dest
.ibyte
> src
.ibyte
);
7101 // Number of bytes at the low end to be filled with zeros.
7102 int zero_bytes
= (dest
.fbyte
- src
.fbyte
) * (dest
.fbyte
> src
.fbyte
);
7104 // Do we have a 16-Bit register that is cleared?
7105 rtx clrw
= NULL_RTX
;
7107 bool sign_extend
= src
.sbit
&& sign_bytes
;
7109 if (0 == dest
.fbit
% 8 && 7 == src
.fbit
% 8)
7111 else if (7 == dest
.fbit
% 8 && 0 == src
.fbit
% 8)
7113 else if (dest
.fbit
% 8 == src
.fbit
% 8)
7118 /* If we need to round the fraction part, we might need to save/round it
7119 before clobbering any of it in Step 1. Also, we might to want to do
7120 the rounding now to make use of LD_REGS. */
7121 if (SCALAR_INT_MODE_P (GET_MODE (xop
[0]))
7122 && SCALAR_ACCUM_MODE_P (GET_MODE (xop
[1]))
7123 && !TARGET_FRACT_CONV_TRUNC
)
7127 (offset
? dest
.regno_msb
- sign_bytes
: dest
.regno
+ zero_bytes
- 1)
7128 && dest
.regno
- offset
-1 >= dest
.regno
);
7129 unsigned s0
= dest
.regno
- offset
-1;
7130 bool use_src
= true;
7132 unsigned copied_msb
= src
.regno_msb
;
7133 bool have_carry
= false;
7135 if (src
.ibyte
> dest
.ibyte
)
7136 copied_msb
-= src
.ibyte
- dest
.ibyte
;
7138 for (sn
= s0
; sn
<= copied_msb
; sn
++)
7139 if (!IN_RANGE (sn
, dest
.regno
, dest
.regno_msb
)
7140 && !reg_unused_after (insn
, all_regs_rtx
[sn
]))
7142 if (use_src
&& TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
], s0
))
7144 avr_asm_len ("tst %0" CR_TAB
"brpl 0f",
7145 &all_regs_rtx
[src
.regno_msb
], plen
, 2);
7149 if (TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
], sn
))
7150 avr_asm_len ("cpi %0,1", &all_regs_rtx
[sn
], plen
, 1);
7152 avr_asm_len ("sec" CR_TAB
"cpc %0,__zero_reg__",
7153 &all_regs_rtx
[sn
], plen
, 2);
7157 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
7158 avr_asm_len (have_carry
? "sbci %0,128" : "subi %0,129",
7159 &all_regs_rtx
[s0
], plen
, 1);
7160 for (sn
= src
.regno
+ src
.fbyte
; sn
<= copied_msb
; sn
++)
7161 avr_asm_len ("sbci %0,255", &all_regs_rtx
[sn
], plen
, 1);
7162 avr_asm_len ("\n0:", NULL
, plen
, 0);
7163 frac_rounded
= true;
7165 else if (use_src
&& overlap
)
7167 avr_asm_len ("clr __tmp_reg__" CR_TAB
7168 "sbrc %1,0" CR_TAB
"dec __tmp_reg__", xop
, plen
, 1);
7172 avr_asm_len ("add %0,__tmp_reg__", &all_regs_rtx
[sn
], plen
, 1);
7176 avr_asm_len ("adc %0,__tmp_reg__", &all_regs_rtx
[sn
], plen
, 1);
7178 avr_asm_len ("clt" CR_TAB
"bld __tmp_reg__,7" CR_TAB
7179 "adc %0,__tmp_reg__",
7180 &all_regs_rtx
[s0
], plen
, 1);
7182 avr_asm_len ("lsr __tmp_reg" CR_TAB
"add %0,__tmp_reg__",
7183 &all_regs_rtx
[s0
], plen
, 2);
7184 for (sn
= src
.regno
+ src
.fbyte
; sn
<= copied_msb
; sn
++)
7185 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
7186 frac_rounded
= true;
7191 = (TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
], s0
)
7192 && (IN_RANGE (s0
, dest
.regno
, dest
.regno_msb
)
7193 || reg_unused_after (insn
, all_regs_rtx
[s0
])));
7194 xop
[2] = all_regs_rtx
[s0
];
7195 unsigned sn
= src
.regno
;
7196 if (!use_src
|| sn
== s0
)
7197 avr_asm_len ("mov __tmp_reg__,%2", xop
, plen
, 1);
7198 /* We need to consider to-be-discarded bits
7199 if the value is negative. */
7202 avr_asm_len ("tst %0" CR_TAB
"brpl 0f",
7203 &all_regs_rtx
[src
.regno_msb
], plen
, 2);
7204 /* Test to-be-discarded bytes for any nozero bits.
7205 ??? Could use OR or SBIW to test two registers at once. */
7207 avr_asm_len ("cp %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
7209 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
7210 /* Set bit 0 in __tmp_reg__ if any of the lower bits was set. */
7212 avr_asm_len ("breq 0f" CR_TAB
7213 "ori %2,1" "\n0:\t" "mov __tmp_reg__,%2",
7216 avr_asm_len ("breq 0f" CR_TAB
7217 "set" CR_TAB
"bld __tmp_reg__,0\n0:",
7220 lsb_in_tmp_reg
= true;
7224 /* Step 1: Clear bytes at the low end and copy payload bits from source
7225 ====== to destination. */
7227 int step
= offset
< 0 ? 1 : -1;
7228 unsigned d0
= offset
< 0 ? dest
.regno
: dest
.regno_msb
;
7230 // We cleared at least that number of registers.
7233 for (; d0
>= dest
.regno
&& d0
<= dest
.regno_msb
; d0
+= step
)
7235 // Next regno of destination is needed for MOVW
7236 unsigned d1
= d0
+ step
;
7238 // Current and next regno of source
7239 signed s0
= d0
- offset
;
7240 signed s1
= s0
+ step
;
7242 // Must current resp. next regno be CLRed? This applies to the low
7243 // bytes of the destination that have no associated source bytes.
7244 bool clr0
= s0
< (signed) src
.regno
;
7245 bool clr1
= s1
< (signed) src
.regno
&& d1
>= dest
.regno
;
7247 // First gather what code to emit (if any) and additional step to
7248 // apply if a MOVW is in use. xop[2] is destination rtx and xop[3]
7249 // is the source rtx for the current loop iteration.
7250 const char *code
= NULL
;
7255 if (AVR_HAVE_MOVW
&& clr1
&& clrw
)
7257 xop
[2] = all_regs_rtx
[d0
& ~1];
7259 code
= "movw %2,%3";
7264 xop
[2] = all_regs_rtx
[d0
];
7269 && d0
% 2 == (step
> 0))
7271 clrw
= all_regs_rtx
[d0
& ~1];
7275 else if (offset
&& s0
<= (signed) src
.regno_msb
)
7277 int movw
= AVR_HAVE_MOVW
&& offset
% 2 == 0
7278 && d0
% 2 == (offset
> 0)
7279 && d1
<= dest
.regno_msb
&& d1
>= dest
.regno
7280 && s1
<= (signed) src
.regno_msb
&& s1
>= (signed) src
.regno
;
7282 xop
[2] = all_regs_rtx
[d0
& ~movw
];
7283 xop
[3] = all_regs_rtx
[s0
& ~movw
];
7284 code
= movw
? "movw %2,%3" : "mov %2,%3";
7285 stepw
= step
* movw
;
7290 if (sign_extend
&& shift
!= ASHIFT
&& !sign_in_carry
7291 && (d0
== src
.regno_msb
|| d0
+ stepw
== src
.regno_msb
))
7293 /* We are going to override the sign bit. If we sign-extend,
7294 store the sign in the Carry flag. This is not needed if
7295 the destination will be ASHIFT is the remainder because
7296 the ASHIFT will set Carry without extra instruction. */
7298 avr_asm_len ("lsl %0", &all_regs_rtx
[src
.regno_msb
], plen
, 1);
7299 sign_in_carry
= true;
7302 unsigned src_msb
= dest
.regno_msb
- sign_bytes
- offset
+ 1;
7304 if (!sign_extend
&& shift
== ASHIFTRT
&& !msb_in_carry
7305 && src
.ibyte
> dest
.ibyte
7306 && (d0
== src_msb
|| d0
+ stepw
== src_msb
))
7308 /* We are going to override the MSB. If we shift right,
7309 store the MSB in the Carry flag. This is only needed if
7310 we don't sign-extend becaue with sign-extension the MSB
7311 (the sign) will be produced by the sign extension. */
7313 avr_asm_len ("lsr %0", &all_regs_rtx
[src_msb
], plen
, 1);
7314 msb_in_carry
= true;
7317 unsigned src_lsb
= dest
.regno
- offset
-1;
7319 if (shift
== ASHIFT
&& src
.fbyte
> dest
.fbyte
&& !lsb_in_carry
7321 && (d0
== src_lsb
|| d0
+ stepw
== src_lsb
))
7323 /* We are going to override the new LSB; store it into carry. */
7325 avr_asm_len ("lsl %0", &all_regs_rtx
[src_lsb
], plen
, 1);
7326 code_ashift
= "rol %0";
7327 lsb_in_carry
= true;
7330 avr_asm_len (code
, xop
, plen
, 1);
7335 /* Step 2: Shift destination left by 1 bit position. This might be needed
7336 ====== for signed input and unsigned output. */
7338 if (shift
== ASHIFT
&& src
.fbyte
> dest
.fbyte
&& !lsb_in_carry
)
7340 unsigned s0
= dest
.regno
- offset
-1;
7342 /* n1169 4.1.4 says:
7343 "Conversions from a fixed-point to an integer type round toward zero."
7344 Hence, converting a fract type to integer only gives a non-zero result
7346 if (SCALAR_INT_MODE_P (GET_MODE (xop
[0]))
7347 && SCALAR_FRACT_MODE_P (GET_MODE (xop
[1]))
7348 && !TARGET_FRACT_CONV_TRUNC
)
7350 gcc_assert (s0
== src
.regno_msb
);
7351 /* Check if the input is -1. We do that by checking if negating
7352 the input causes an integer overflow. */
7353 unsigned sn
= src
.regno
;
7354 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx
[sn
++], plen
, 1);
7356 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx
[sn
++], plen
, 1);
7358 /* Overflow goes with set carry. Clear carry otherwise. */
7359 avr_asm_len ("brvs 0f" CR_TAB
"clc\n0:", NULL
, plen
, 2);
7361 /* Likewise, when converting from accumulator types to integer, we
7362 need to round up negative values. */
7363 else if (SCALAR_INT_MODE_P (GET_MODE (xop
[0]))
7364 && SCALAR_ACCUM_MODE_P (GET_MODE (xop
[1]))
7365 && !TARGET_FRACT_CONV_TRUNC
7368 bool have_carry
= false;
7370 xop
[2] = all_regs_rtx
[s0
];
7371 if (!lsb_in_tmp_reg
&& !MAY_CLOBBER (s0
))
7372 avr_asm_len ("mov __tmp_reg__,%2", xop
, plen
, 1);
7373 avr_asm_len ("tst %0" CR_TAB
"brpl 0f",
7374 &all_regs_rtx
[src
.regno_msb
], plen
, 2);
7375 if (!lsb_in_tmp_reg
)
7377 unsigned sn
= src
.regno
;
7380 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx
[sn
],
7385 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx
[sn
], plen
, 1);
7386 lsb_in_tmp_reg
= !MAY_CLOBBER (s0
);
7388 /* Add in C and the rounding value 127. */
7389 /* If the destination msb is a sign byte, and in LD_REGS,
7390 grab it as a temporary. */
7392 && TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
],
7395 xop
[3] = all_regs_rtx
[dest
.regno_msb
];
7396 avr_asm_len ("ldi %3,127", xop
, plen
, 1);
7397 avr_asm_len ((have_carry
&& lsb_in_tmp_reg
? "adc __tmp_reg__,%3"
7398 : have_carry
? "adc %2,%3"
7399 : lsb_in_tmp_reg
? "add __tmp_reg__,%3"
7405 /* Fall back to use __zero_reg__ as a temporary. */
7406 avr_asm_len ("dec __zero_reg__", NULL
, plen
, 1);
7408 avr_asm_len ("clt" CR_TAB
"bld __zero_reg__,7", NULL
, plen
, 2);
7410 avr_asm_len ("lsr __zero_reg__", NULL
, plen
, 1);
7411 avr_asm_len ((have_carry
&& lsb_in_tmp_reg
7412 ? "adc __tmp_reg__,__zero_reg__"
7413 : have_carry
? "adc %2,__zero_reg__"
7414 : lsb_in_tmp_reg
? "add __tmp_reg__,__zero_reg__"
7415 : "add %2,__zero_reg__"),
7417 avr_asm_len ("eor __zero_reg__,__zero_reg__", NULL
, plen
, 1);
7419 for (d0
= dest
.regno
+ zero_bytes
;
7420 d0
<= dest
.regno_msb
- sign_bytes
; d0
++)
7421 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx
[d0
], plen
, 1);
7422 avr_asm_len (lsb_in_tmp_reg
7423 ? "\n0:\t" "lsl __tmp_reg__" : "\n0:\t" "lsl %2",
7426 else if (MAY_CLOBBER (s0
))
7427 avr_asm_len ("lsl %0", &all_regs_rtx
[s0
], plen
, 1);
7429 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7430 "lsl __tmp_reg__", &all_regs_rtx
[s0
], plen
, 2);
7432 code_ashift
= "rol %0";
7433 lsb_in_carry
= true;
7436 if (shift
== ASHIFT
)
7438 for (d0
= dest
.regno
+ zero_bytes
;
7439 d0
<= dest
.regno_msb
- sign_bytes
; d0
++)
7441 avr_asm_len (code_ashift
, &all_regs_rtx
[d0
], plen
, 1);
7442 code_ashift
= "rol %0";
7445 lsb_in_carry
= false;
7446 sign_in_carry
= true;
7449 /* Step 4a: Store MSB in carry if we don't already have it or will produce
7450 ======= it in sign-extension below. */
7452 if (!sign_extend
&& shift
== ASHIFTRT
&& !msb_in_carry
7453 && src
.ibyte
> dest
.ibyte
)
7455 unsigned s0
= dest
.regno_msb
- sign_bytes
- offset
+ 1;
7457 if (MAY_CLOBBER (s0
))
7458 avr_asm_len ("lsr %0", &all_regs_rtx
[s0
], plen
, 1);
7460 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7461 "lsr __tmp_reg__", &all_regs_rtx
[s0
], plen
, 2);
7463 msb_in_carry
= true;
7466 /* Step 3: Sign-extend or zero-extend the destination as needed.
7469 if (sign_extend
&& !sign_in_carry
)
7471 unsigned s0
= src
.regno_msb
;
7473 if (MAY_CLOBBER (s0
))
7474 avr_asm_len ("lsl %0", &all_regs_rtx
[s0
], plen
, 1);
7476 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7477 "lsl __tmp_reg__", &all_regs_rtx
[s0
], plen
, 2);
7479 sign_in_carry
= true;
7482 gcc_assert (sign_in_carry
+ msb_in_carry
+ lsb_in_carry
<= 1);
7484 unsigned copies
= 0;
7485 rtx movw
= sign_extend
? NULL_RTX
: clrw
;
7487 for (d0
= dest
.regno_msb
- sign_bytes
+ 1; d0
<= dest
.regno_msb
; d0
++)
7489 if (AVR_HAVE_MOVW
&& movw
7490 && d0
% 2 == 0 && d0
+ 1 <= dest
.regno_msb
)
7492 xop
[2] = all_regs_rtx
[d0
];
7494 avr_asm_len ("movw %2,%3", xop
, plen
, 1);
7499 avr_asm_len (sign_extend
? "sbc %0,%0" : "clr %0",
7500 &all_regs_rtx
[d0
], plen
, 1);
7502 if (++copies
>= 2 && !movw
&& d0
% 2 == 1)
7503 movw
= all_regs_rtx
[d0
-1];
7508 /* Step 4: Right shift the destination. This might be needed for
7509 ====== conversions from unsigned to signed. */
7511 if (shift
== ASHIFTRT
)
7513 const char *code_ashiftrt
= "lsr %0";
7515 if (sign_extend
|| msb_in_carry
)
7516 code_ashiftrt
= "ror %0";
7518 if (src
.sbit
&& src
.ibyte
== dest
.ibyte
)
7519 code_ashiftrt
= "asr %0";
7521 for (d0
= dest
.regno_msb
- sign_bytes
;
7522 d0
>= dest
.regno
+ zero_bytes
- 1 && d0
>= dest
.regno
; d0
--)
7524 avr_asm_len (code_ashiftrt
, &all_regs_rtx
[d0
], plen
, 1);
7525 code_ashiftrt
= "ror %0";
7535 /* Output fixed-point rounding. XOP[0] = XOP[1] is the operand to round.
7536 XOP[2] is the rounding point, a CONST_INT. The function prints the
7537 instruction sequence if PLEN = NULL and computes the length in words
7538 of the sequence if PLEN != NULL. Most of this function deals with
7539 preparing operands for calls to `avr_out_plus' and `avr_out_bitop'. */
7542 avr_out_round (rtx insn ATTRIBUTE_UNUSED
, rtx
*xop
, int *plen
)
7544 enum machine_mode mode
= GET_MODE (xop
[0]);
7545 enum machine_mode imode
= int_mode_for_mode (mode
);
7546 // The smallest fractional bit not cleared by the rounding is 2^(-RP).
7547 int fbit
= (int) GET_MODE_FBIT (mode
);
7548 double_int i_add
= double_int_zero
.set_bit (fbit
-1 - INTVAL (xop
[2]));
7549 // Lengths of PLUS and AND parts.
7550 int len_add
= 0, *plen_add
= plen
? &len_add
: NULL
;
7551 int len_and
= 0, *plen_and
= plen
? &len_and
: NULL
;
7553 // Add-Saturate 1/2 * 2^(-RP). Don't print the label "0:" when printing
7554 // the saturated addition so that we can emit the "rjmp 1f" before the
7557 rtx xadd
= const_fixed_from_double_int (i_add
, mode
);
7558 rtx xpattern
, xsrc
, op
[4];
7560 xsrc
= SIGNED_FIXED_POINT_MODE_P (mode
)
7561 ? gen_rtx_SS_PLUS (mode
, xop
[1], xadd
)
7562 : gen_rtx_US_PLUS (mode
, xop
[1], xadd
);
7563 xpattern
= gen_rtx_SET (VOIDmode
, xop
[0], xsrc
);
7568 avr_out_plus (xpattern
, op
, plen_add
, NULL
, false /* Don't print "0:" */);
7570 avr_asm_len ("rjmp 1f" CR_TAB
7571 "0:", NULL
, plen_add
, 1);
7573 // Keep all bits from RP and higher: ... 2^(-RP)
7574 // Clear all bits from RP+1 and lower: 2^(-RP-1) ...
7575 // Rounding point ^^^^^^^
7576 // Added above ^^^^^^^^^
7577 rtx xreg
= simplify_gen_subreg (imode
, xop
[0], mode
, 0);
7578 rtx xmask
= immed_double_int_const (-i_add
- i_add
, imode
);
7580 xpattern
= gen_rtx_SET (VOIDmode
, xreg
, gen_rtx_AND (imode
, xreg
, xmask
));
7585 op
[3] = gen_rtx_SCRATCH (QImode
);
7586 avr_out_bitop (xpattern
, op
, plen_and
);
7587 avr_asm_len ("1:", NULL
, plen
, 0);
7590 *plen
= len_add
+ len_and
;
7596 /* Create RTL split patterns for byte sized rotate expressions. This
7597 produces a series of move instructions and considers overlap situations.
7598 Overlapping non-HImode operands need a scratch register. */
7601 avr_rotate_bytes (rtx operands
[])
7604 enum machine_mode mode
= GET_MODE (operands
[0]);
7605 bool overlapped
= reg_overlap_mentioned_p (operands
[0], operands
[1]);
7606 bool same_reg
= rtx_equal_p (operands
[0], operands
[1]);
7607 int num
= INTVAL (operands
[2]);
7608 rtx scratch
= operands
[3];
7609 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
7610 Word move if no scratch is needed, otherwise use size of scratch. */
7611 enum machine_mode move_mode
= QImode
;
7612 int move_size
, offset
, size
;
7616 else if ((mode
== SImode
&& !same_reg
) || !overlapped
)
7619 move_mode
= GET_MODE (scratch
);
7621 /* Force DI rotate to use QI moves since other DI moves are currently split
7622 into QI moves so forward propagation works better. */
7625 /* Make scratch smaller if needed. */
7626 if (SCRATCH
!= GET_CODE (scratch
)
7627 && HImode
== GET_MODE (scratch
)
7628 && QImode
== move_mode
)
7629 scratch
= simplify_gen_subreg (move_mode
, scratch
, HImode
, 0);
7631 move_size
= GET_MODE_SIZE (move_mode
);
7632 /* Number of bytes/words to rotate. */
7633 offset
= (num
>> 3) / move_size
;
7634 /* Number of moves needed. */
7635 size
= GET_MODE_SIZE (mode
) / move_size
;
7636 /* Himode byte swap is special case to avoid a scratch register. */
7637 if (mode
== HImode
&& same_reg
)
7639 /* HImode byte swap, using xor. This is as quick as using scratch. */
7641 src
= simplify_gen_subreg (move_mode
, operands
[1], mode
, 0);
7642 dst
= simplify_gen_subreg (move_mode
, operands
[0], mode
, 1);
7643 if (!rtx_equal_p (dst
, src
))
7645 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
7646 emit_move_insn (src
, gen_rtx_XOR (QImode
, src
, dst
));
7647 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
7652 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
7653 /* Create linked list of moves to determine move order. */
7657 } move
[MAX_SIZE
+ 8];
7660 gcc_assert (size
<= MAX_SIZE
);
7661 /* Generate list of subreg moves. */
7662 for (i
= 0; i
< size
; i
++)
7665 int to
= (from
+ offset
) % size
;
7666 move
[i
].src
= simplify_gen_subreg (move_mode
, operands
[1],
7667 mode
, from
* move_size
);
7668 move
[i
].dst
= simplify_gen_subreg (move_mode
, operands
[0],
7669 mode
, to
* move_size
);
7672 /* Mark dependence where a dst of one move is the src of another move.
7673 The first move is a conflict as it must wait until second is
7674 performed. We ignore moves to self - we catch this later. */
7676 for (i
= 0; i
< size
; i
++)
7677 if (reg_overlap_mentioned_p (move
[i
].dst
, operands
[1]))
7678 for (j
= 0; j
< size
; j
++)
7679 if (j
!= i
&& rtx_equal_p (move
[j
].src
, move
[i
].dst
))
7681 /* The dst of move i is the src of move j. */
7688 /* Go through move list and perform non-conflicting moves. As each
7689 non-overlapping move is made, it may remove other conflicts
7690 so the process is repeated until no conflicts remain. */
7695 /* Emit move where dst is not also a src or we have used that
7697 for (i
= 0; i
< size
; i
++)
7698 if (move
[i
].src
!= NULL_RTX
)
7700 if (move
[i
].links
== -1
7701 || move
[move
[i
].links
].src
== NULL_RTX
)
7704 /* Ignore NOP moves to self. */
7705 if (!rtx_equal_p (move
[i
].dst
, move
[i
].src
))
7706 emit_move_insn (move
[i
].dst
, move
[i
].src
);
7708 /* Remove conflict from list. */
7709 move
[i
].src
= NULL_RTX
;
7715 /* Check for deadlock. This is when no moves occurred and we have
7716 at least one blocked move. */
7717 if (moves
== 0 && blocked
!= -1)
7719 /* Need to use scratch register to break deadlock.
7720 Add move to put dst of blocked move into scratch.
7721 When this move occurs, it will break chain deadlock.
7722 The scratch register is substituted for real move. */
7724 gcc_assert (SCRATCH
!= GET_CODE (scratch
));
7726 move
[size
].src
= move
[blocked
].dst
;
7727 move
[size
].dst
= scratch
;
7728 /* Scratch move is never blocked. */
7729 move
[size
].links
= -1;
7730 /* Make sure we have valid link. */
7731 gcc_assert (move
[blocked
].links
!= -1);
7732 /* Replace src of blocking move with scratch reg. */
7733 move
[move
[blocked
].links
].src
= scratch
;
7734 /* Make dependent on scratch move occurring. */
7735 move
[blocked
].links
= size
;
7739 while (blocked
!= -1);
7745 /* Worker function for `ADJUST_INSN_LENGTH'. */
7746 /* Modifies the length assigned to instruction INSN
7747 LEN is the initially computed length of the insn. */
7750 avr_adjust_insn_length (rtx insn
, int len
)
7752 rtx
*op
= recog_data
.operand
;
7753 enum attr_adjust_len adjust_len
;
7755 /* Some complex insns don't need length adjustment and therefore
7756 the length need not/must not be adjusted for these insns.
7757 It is easier to state this in an insn attribute "adjust_len" than
7758 to clutter up code here... */
7760 if (-1 == recog_memoized (insn
))
7765 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
7767 adjust_len
= get_attr_adjust_len (insn
);
7769 if (adjust_len
== ADJUST_LEN_NO
)
7771 /* Nothing to adjust: The length from attribute "length" is fine.
7772 This is the default. */
7777 /* Extract insn's operands. */
7779 extract_constrain_insn_cached (insn
);
7781 /* Dispatch to right function. */
7785 case ADJUST_LEN_RELOAD_IN16
: output_reload_inhi (op
, op
[2], &len
); break;
7786 case ADJUST_LEN_RELOAD_IN24
: avr_out_reload_inpsi (op
, op
[2], &len
); break;
7787 case ADJUST_LEN_RELOAD_IN32
: output_reload_insisf (op
, op
[2], &len
); break;
7789 case ADJUST_LEN_OUT_BITOP
: avr_out_bitop (insn
, op
, &len
); break;
7791 case ADJUST_LEN_PLUS
: avr_out_plus (insn
, op
, &len
); break;
7792 case ADJUST_LEN_ADDTO_SP
: avr_out_addto_sp (op
, &len
); break;
7794 case ADJUST_LEN_MOV8
: output_movqi (insn
, op
, &len
); break;
7795 case ADJUST_LEN_MOV16
: output_movhi (insn
, op
, &len
); break;
7796 case ADJUST_LEN_MOV24
: avr_out_movpsi (insn
, op
, &len
); break;
7797 case ADJUST_LEN_MOV32
: output_movsisf (insn
, op
, &len
); break;
7798 case ADJUST_LEN_MOVMEM
: avr_out_movmem (insn
, op
, &len
); break;
7799 case ADJUST_LEN_XLOAD
: avr_out_xload (insn
, op
, &len
); break;
7800 case ADJUST_LEN_LPM
: avr_out_lpm (insn
, op
, &len
); break;
7802 case ADJUST_LEN_SFRACT
: avr_out_fract (insn
, op
, true, &len
); break;
7803 case ADJUST_LEN_UFRACT
: avr_out_fract (insn
, op
, false, &len
); break;
7804 case ADJUST_LEN_ROUND
: avr_out_round (insn
, op
, &len
); break;
7806 case ADJUST_LEN_TSTHI
: avr_out_tsthi (insn
, op
, &len
); break;
7807 case ADJUST_LEN_TSTPSI
: avr_out_tstpsi (insn
, op
, &len
); break;
7808 case ADJUST_LEN_TSTSI
: avr_out_tstsi (insn
, op
, &len
); break;
7809 case ADJUST_LEN_COMPARE
: avr_out_compare (insn
, op
, &len
); break;
7810 case ADJUST_LEN_COMPARE64
: avr_out_compare64 (insn
, op
, &len
); break;
7812 case ADJUST_LEN_LSHRQI
: lshrqi3_out (insn
, op
, &len
); break;
7813 case ADJUST_LEN_LSHRHI
: lshrhi3_out (insn
, op
, &len
); break;
7814 case ADJUST_LEN_LSHRSI
: lshrsi3_out (insn
, op
, &len
); break;
7816 case ADJUST_LEN_ASHRQI
: ashrqi3_out (insn
, op
, &len
); break;
7817 case ADJUST_LEN_ASHRHI
: ashrhi3_out (insn
, op
, &len
); break;
7818 case ADJUST_LEN_ASHRSI
: ashrsi3_out (insn
, op
, &len
); break;
7820 case ADJUST_LEN_ASHLQI
: ashlqi3_out (insn
, op
, &len
); break;
7821 case ADJUST_LEN_ASHLHI
: ashlhi3_out (insn
, op
, &len
); break;
7822 case ADJUST_LEN_ASHLSI
: ashlsi3_out (insn
, op
, &len
); break;
7824 case ADJUST_LEN_ASHLPSI
: avr_out_ashlpsi3 (insn
, op
, &len
); break;
7825 case ADJUST_LEN_ASHRPSI
: avr_out_ashrpsi3 (insn
, op
, &len
); break;
7826 case ADJUST_LEN_LSHRPSI
: avr_out_lshrpsi3 (insn
, op
, &len
); break;
7828 case ADJUST_LEN_CALL
: len
= AVR_HAVE_JMP_CALL
? 2 : 1; break;
7830 case ADJUST_LEN_INSERT_BITS
: avr_out_insert_bits (op
, &len
); break;
7839 /* Return nonzero if register REG dead after INSN. */
7842 reg_unused_after (rtx insn
, rtx reg
)
7844 return (dead_or_set_p (insn
, reg
)
7845 || (REG_P(reg
) && _reg_unused_after (insn
, reg
)));
7848 /* Return nonzero if REG is not used after INSN.
7849 We assume REG is a reload reg, and therefore does
7850 not live past labels. It may live past calls or jumps though. */
7853 _reg_unused_after (rtx insn
, rtx reg
)
7858 /* If the reg is set by this instruction, then it is safe for our
7859 case. Disregard the case where this is a store to memory, since
7860 we are checking a register used in the store address. */
7861 set
= single_set (insn
);
7862 if (set
&& GET_CODE (SET_DEST (set
)) != MEM
7863 && reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
7866 while ((insn
= NEXT_INSN (insn
)))
7869 code
= GET_CODE (insn
);
7872 /* If this is a label that existed before reload, then the register
7873 if dead here. However, if this is a label added by reorg, then
7874 the register may still be live here. We can't tell the difference,
7875 so we just ignore labels completely. */
7876 if (code
== CODE_LABEL
)
7884 if (code
== JUMP_INSN
)
7887 /* If this is a sequence, we must handle them all at once.
7888 We could have for instance a call that sets the target register,
7889 and an insn in a delay slot that uses the register. In this case,
7890 we must return 0. */
7891 else if (code
== INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
7896 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
7898 rtx this_insn
= XVECEXP (PATTERN (insn
), 0, i
);
7899 rtx set
= single_set (this_insn
);
7901 if (CALL_P (this_insn
))
7903 else if (JUMP_P (this_insn
))
7905 if (INSN_ANNULLED_BRANCH_P (this_insn
))
7910 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
7912 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
7914 if (GET_CODE (SET_DEST (set
)) != MEM
)
7920 && reg_overlap_mentioned_p (reg
, PATTERN (this_insn
)))
7925 else if (code
== JUMP_INSN
)
7929 if (code
== CALL_INSN
)
7932 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
7933 if (GET_CODE (XEXP (tem
, 0)) == USE
7934 && REG_P (XEXP (XEXP (tem
, 0), 0))
7935 && reg_overlap_mentioned_p (reg
, XEXP (XEXP (tem
, 0), 0)))
7937 if (call_used_regs
[REGNO (reg
)])
7941 set
= single_set (insn
);
7943 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
7945 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
7946 return GET_CODE (SET_DEST (set
)) != MEM
;
7947 if (set
== 0 && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
7954 /* Implement `TARGET_ASM_INTEGER'. */
7955 /* Target hook for assembling integer objects. The AVR version needs
7956 special handling for references to certain labels. */
7959 avr_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
7961 if (size
== POINTER_SIZE
/ BITS_PER_UNIT
&& aligned_p
7962 && text_segment_operand (x
, VOIDmode
))
7964 fputs ("\t.word\tgs(", asm_out_file
);
7965 output_addr_const (asm_out_file
, x
);
7966 fputs (")\n", asm_out_file
);
7970 else if (GET_MODE (x
) == PSImode
)
7972 /* This needs binutils 2.23+, see PR binutils/13503 */
7974 fputs ("\t.byte\tlo8(", asm_out_file
);
7975 output_addr_const (asm_out_file
, x
);
7976 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
7978 fputs ("\t.byte\thi8(", asm_out_file
);
7979 output_addr_const (asm_out_file
, x
);
7980 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
7982 fputs ("\t.byte\thh8(", asm_out_file
);
7983 output_addr_const (asm_out_file
, x
);
7984 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
7988 else if (CONST_FIXED_P (x
))
7992 /* varasm fails to handle big fixed modes that don't fit in hwi. */
7994 for (n
= 0; n
< size
; n
++)
7996 rtx xn
= simplify_gen_subreg (QImode
, x
, GET_MODE (x
), n
);
7997 default_assemble_integer (xn
, 1, aligned_p
);
8003 return default_assemble_integer (x
, size
, aligned_p
);
8007 /* Implement `TARGET_CLASS_LIKELY_SPILLED_P'. */
8008 /* Return value is nonzero if pseudos that have been
8009 assigned to registers of class CLASS would likely be spilled
8010 because registers of CLASS are needed for spill registers. */
8013 avr_class_likely_spilled_p (reg_class_t c
)
8015 return (c
!= ALL_REGS
&& c
!= ADDW_REGS
);
8019 /* Valid attributes:
8020 progmem - Put data to program memory.
8021 signal - Make a function to be hardware interrupt.
8022 After function prologue interrupts remain disabled.
8023 interrupt - Make a function to be hardware interrupt. Before function
8024 prologue interrupts are enabled by means of SEI.
8025 naked - Don't generate function prologue/epilogue and RET
8028 /* Handle a "progmem" attribute; arguments as in
8029 struct attribute_spec.handler. */
8032 avr_handle_progmem_attribute (tree
*node
, tree name
,
8033 tree args ATTRIBUTE_UNUSED
,
8034 int flags ATTRIBUTE_UNUSED
,
8039 if (TREE_CODE (*node
) == TYPE_DECL
)
8041 /* This is really a decl attribute, not a type attribute,
8042 but try to handle it for GCC 3.0 backwards compatibility. */
8044 tree type
= TREE_TYPE (*node
);
8045 tree attr
= tree_cons (name
, args
, TYPE_ATTRIBUTES (type
));
8046 tree newtype
= build_type_attribute_variant (type
, attr
);
8048 TYPE_MAIN_VARIANT (newtype
) = TYPE_MAIN_VARIANT (type
);
8049 TREE_TYPE (*node
) = newtype
;
8050 *no_add_attrs
= true;
8052 else if (TREE_STATIC (*node
) || DECL_EXTERNAL (*node
))
8054 *no_add_attrs
= false;
8058 warning (OPT_Wattributes
, "%qE attribute ignored",
8060 *no_add_attrs
= true;
8067 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
8068 struct attribute_spec.handler. */
8071 avr_handle_fndecl_attribute (tree
*node
, tree name
,
8072 tree args ATTRIBUTE_UNUSED
,
8073 int flags ATTRIBUTE_UNUSED
,
8076 if (TREE_CODE (*node
) != FUNCTION_DECL
)
8078 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
8080 *no_add_attrs
= true;
8087 avr_handle_fntype_attribute (tree
*node
, tree name
,
8088 tree args ATTRIBUTE_UNUSED
,
8089 int flags ATTRIBUTE_UNUSED
,
8092 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
8094 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
8096 *no_add_attrs
= true;
8103 /* AVR attributes. */
8104 static const struct attribute_spec
8105 avr_attribute_table
[] =
8107 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
8108 affects_type_identity } */
8109 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute
,
8111 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
8113 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
8115 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute
,
8117 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute
,
8119 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute
,
8121 { NULL
, 0, 0, false, false, false, NULL
, false }
8125 /* Look if DECL shall be placed in program memory space by
8126 means of attribute `progmem' or some address-space qualifier.
8127 Return non-zero if DECL is data that must end up in Flash and
8128 zero if the data lives in RAM (.bss, .data, .rodata, ...).
8130 Return 2 if DECL is located in 24-bit flash address-space
8131 Return 1 if DECL is located in 16-bit flash address-space
8132 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
8133 Return 0 otherwise */
8136 avr_progmem_p (tree decl
, tree attributes
)
8140 if (TREE_CODE (decl
) != VAR_DECL
)
8143 if (avr_decl_memx_p (decl
))
8146 if (avr_decl_flash_p (decl
))
8150 != lookup_attribute ("progmem", attributes
))
8157 while (TREE_CODE (a
) == ARRAY_TYPE
);
8159 if (a
== error_mark_node
)
8162 if (NULL_TREE
!= lookup_attribute ("progmem", TYPE_ATTRIBUTES (a
)))
8169 /* Scan type TYP for pointer references to address space ASn.
8170 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
8171 the AS are also declared to be CONST.
8172 Otherwise, return the respective address space, i.e. a value != 0. */
8175 avr_nonconst_pointer_addrspace (tree typ
)
8177 while (ARRAY_TYPE
== TREE_CODE (typ
))
8178 typ
= TREE_TYPE (typ
);
8180 if (POINTER_TYPE_P (typ
))
8183 tree target
= TREE_TYPE (typ
);
8185 /* Pointer to function: Test the function's return type. */
8187 if (FUNCTION_TYPE
== TREE_CODE (target
))
8188 return avr_nonconst_pointer_addrspace (TREE_TYPE (target
));
8190 /* "Ordinary" pointers... */
8192 while (TREE_CODE (target
) == ARRAY_TYPE
)
8193 target
= TREE_TYPE (target
);
8195 /* Pointers to non-generic address space must be const.
8196 Refuse address spaces outside the device's flash. */
8198 as
= TYPE_ADDR_SPACE (target
);
8200 if (!ADDR_SPACE_GENERIC_P (as
)
8201 && (!TYPE_READONLY (target
)
8202 || avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
))
8207 /* Scan pointer's target type. */
8209 return avr_nonconst_pointer_addrspace (target
);
8212 return ADDR_SPACE_GENERIC
;
8216 /* Sanity check NODE so that all pointers targeting non-generic address spaces
8217 go along with CONST qualifier. Writing to these address spaces should
8218 be detected and complained about as early as possible. */
8221 avr_pgm_check_var_decl (tree node
)
8223 const char *reason
= NULL
;
8225 addr_space_t as
= ADDR_SPACE_GENERIC
;
8227 gcc_assert (as
== 0);
8229 if (avr_log
.progmem
)
8230 avr_edump ("%?: %t\n", node
);
8232 switch (TREE_CODE (node
))
8238 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
8239 reason
= "variable";
8243 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
8244 reason
= "function parameter";
8248 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
8249 reason
= "structure field";
8253 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node
))),
8255 reason
= "return type of function";
8259 if (as
= avr_nonconst_pointer_addrspace (node
), as
)
8266 if (avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
)
8269 error ("%qT uses address space %qs beyond flash of %qs",
8270 node
, avr_addrspace
[as
].name
, avr_current_device
->name
);
8272 error ("%s %q+D uses address space %qs beyond flash of %qs",
8273 reason
, node
, avr_addrspace
[as
].name
,
8274 avr_current_device
->name
);
8279 error ("pointer targeting address space %qs must be const in %qT",
8280 avr_addrspace
[as
].name
, node
);
8282 error ("pointer targeting address space %qs must be const"
8284 avr_addrspace
[as
].name
, reason
, node
);
8288 return reason
== NULL
;
8292 /* Add the section attribute if the variable is in progmem. */
8295 avr_insert_attributes (tree node
, tree
*attributes
)
8297 avr_pgm_check_var_decl (node
);
8299 if (TREE_CODE (node
) == VAR_DECL
8300 && (TREE_STATIC (node
) || DECL_EXTERNAL (node
))
8301 && avr_progmem_p (node
, *attributes
))
8306 /* For C++, we have to peel arrays in order to get correct
8307 determination of readonlyness. */
8310 node0
= TREE_TYPE (node0
);
8311 while (TREE_CODE (node0
) == ARRAY_TYPE
);
8313 if (error_mark_node
== node0
)
8316 as
= TYPE_ADDR_SPACE (TREE_TYPE (node
));
8318 if (avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
)
8320 error ("variable %q+D located in address space %qs"
8321 " beyond flash of %qs",
8322 node
, avr_addrspace
[as
].name
, avr_current_device
->name
);
8325 if (!TYPE_READONLY (node0
)
8326 && !TREE_READONLY (node
))
8328 const char *reason
= "__attribute__((progmem))";
8330 if (!ADDR_SPACE_GENERIC_P (as
))
8331 reason
= avr_addrspace
[as
].name
;
8333 if (avr_log
.progmem
)
8334 avr_edump ("\n%?: %t\n%t\n", node
, node0
);
8336 error ("variable %q+D must be const in order to be put into"
8337 " read-only section by means of %qs", node
, reason
);
8343 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
8344 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
8345 /* Track need of __do_clear_bss. */
8348 avr_asm_output_aligned_decl_common (FILE * stream
,
8349 const_tree decl ATTRIBUTE_UNUSED
,
8351 unsigned HOST_WIDE_INT size
,
8352 unsigned int align
, bool local_p
)
8354 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
8355 There is no need to trigger __do_clear_bss code for them. */
8357 if (!STR_PREFIX_P (name
, "__gnu_lto"))
8358 avr_need_clear_bss_p
= true;
8361 ASM_OUTPUT_ALIGNED_LOCAL (stream
, name
, size
, align
);
8363 ASM_OUTPUT_ALIGNED_COMMON (stream
, name
, size
, align
);
8367 /* Unnamed section callback for data_section
8368 to track need of __do_copy_data. */
8371 avr_output_data_section_asm_op (const void *data
)
8373 avr_need_copy_data_p
= true;
8375 /* Dispatch to default. */
8376 output_section_asm_op (data
);
8380 /* Unnamed section callback for bss_section
8381 to track need of __do_clear_bss. */
8384 avr_output_bss_section_asm_op (const void *data
)
8386 avr_need_clear_bss_p
= true;
8388 /* Dispatch to default. */
8389 output_section_asm_op (data
);
8393 /* Unnamed section callback for progmem*.data sections. */
8396 avr_output_progmem_section_asm_op (const void *data
)
8398 fprintf (asm_out_file
, "\t.section\t%s,\"a\",@progbits\n",
8399 (const char*) data
);
8403 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
8406 avr_asm_init_sections (void)
8408 /* Set up a section for jump tables. Alignment is handled by
8409 ASM_OUTPUT_BEFORE_CASE_LABEL. */
8411 if (AVR_HAVE_JMP_CALL
)
8413 progmem_swtable_section
8414 = get_unnamed_section (0, output_section_asm_op
,
8415 "\t.section\t.progmem.gcc_sw_table"
8416 ",\"a\",@progbits");
8420 progmem_swtable_section
8421 = get_unnamed_section (SECTION_CODE
, output_section_asm_op
,
8422 "\t.section\t.progmem.gcc_sw_table"
8423 ",\"ax\",@progbits");
8426 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
8427 resp. `avr_need_copy_data_p'. */
8429 readonly_data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
8430 data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
8431 bss_section
->unnamed
.callback
= avr_output_bss_section_asm_op
;
8435 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
8438 avr_asm_function_rodata_section (tree decl
)
8440 /* If a function is unused and optimized out by -ffunction-sections
8441 and --gc-sections, ensure that the same will happen for its jump
8442 tables by putting them into individual sections. */
8447 /* Get the frodata section from the default function in varasm.c
8448 but treat function-associated data-like jump tables as code
8449 rather than as user defined data. AVR has no constant pools. */
8451 int fdata
= flag_data_sections
;
8453 flag_data_sections
= flag_function_sections
;
8454 frodata
= default_function_rodata_section (decl
);
8455 flag_data_sections
= fdata
;
8456 flags
= frodata
->common
.flags
;
8459 if (frodata
!= readonly_data_section
8460 && flags
& SECTION_NAMED
)
8462 /* Adjust section flags and replace section name prefix. */
8466 static const char* const prefix
[] =
8468 ".rodata", ".progmem.gcc_sw_table",
8469 ".gnu.linkonce.r.", ".gnu.linkonce.t."
8472 for (i
= 0; i
< sizeof (prefix
) / sizeof (*prefix
); i
+= 2)
8474 const char * old_prefix
= prefix
[i
];
8475 const char * new_prefix
= prefix
[i
+1];
8476 const char * name
= frodata
->named
.name
;
8478 if (STR_PREFIX_P (name
, old_prefix
))
8480 const char *rname
= ACONCAT ((new_prefix
,
8481 name
+ strlen (old_prefix
), NULL
));
8482 flags
&= ~SECTION_CODE
;
8483 flags
|= AVR_HAVE_JMP_CALL
? 0 : SECTION_CODE
;
8485 return get_section (rname
, flags
, frodata
->named
.decl
);
8490 return progmem_swtable_section
;
8494 /* Implement `TARGET_ASM_NAMED_SECTION'. */
8495 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
8498 avr_asm_named_section (const char *name
, unsigned int flags
, tree decl
)
8500 if (flags
& AVR_SECTION_PROGMEM
)
8502 addr_space_t as
= (flags
& AVR_SECTION_PROGMEM
) / SECTION_MACH_DEP
;
8503 const char *old_prefix
= ".rodata";
8504 const char *new_prefix
= avr_addrspace
[as
].section_name
;
8506 if (STR_PREFIX_P (name
, old_prefix
))
8508 const char *sname
= ACONCAT ((new_prefix
,
8509 name
+ strlen (old_prefix
), NULL
));
8510 default_elf_asm_named_section (sname
, flags
, decl
);
8514 default_elf_asm_named_section (new_prefix
, flags
, decl
);
8518 if (!avr_need_copy_data_p
)
8519 avr_need_copy_data_p
= (STR_PREFIX_P (name
, ".data")
8520 || STR_PREFIX_P (name
, ".rodata")
8521 || STR_PREFIX_P (name
, ".gnu.linkonce.d"));
8523 if (!avr_need_clear_bss_p
)
8524 avr_need_clear_bss_p
= STR_PREFIX_P (name
, ".bss");
8526 default_elf_asm_named_section (name
, flags
, decl
);
8530 /* Implement `TARGET_SECTION_TYPE_FLAGS'. */
8533 avr_section_type_flags (tree decl
, const char *name
, int reloc
)
8535 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
8537 if (STR_PREFIX_P (name
, ".noinit"))
8539 if (decl
&& TREE_CODE (decl
) == VAR_DECL
8540 && DECL_INITIAL (decl
) == NULL_TREE
)
8541 flags
|= SECTION_BSS
; /* @nobits */
8543 warning (0, "only uninitialized variables can be placed in the "
8547 if (decl
&& DECL_P (decl
)
8548 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
8550 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
8552 /* Attribute progmem puts data in generic address space.
8553 Set section flags as if it was in __flash to get the right
8554 section prefix in the remainder. */
8556 if (ADDR_SPACE_GENERIC_P (as
))
8557 as
= ADDR_SPACE_FLASH
;
8559 flags
|= as
* SECTION_MACH_DEP
;
8560 flags
&= ~SECTION_WRITE
;
8561 flags
&= ~SECTION_BSS
;
8568 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
8571 avr_encode_section_info (tree decl
, rtx rtl
, int new_decl_p
)
8573 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
8574 readily available, see PR34734. So we postpone the warning
8575 about uninitialized data in program memory section until here. */
8578 && decl
&& DECL_P (decl
)
8579 && NULL_TREE
== DECL_INITIAL (decl
)
8580 && !DECL_EXTERNAL (decl
)
8581 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
8583 warning (OPT_Wuninitialized
,
8584 "uninitialized variable %q+D put into "
8585 "program memory area", decl
);
8588 default_encode_section_info (decl
, rtl
, new_decl_p
);
8590 if (decl
&& DECL_P (decl
)
8591 && TREE_CODE (decl
) != FUNCTION_DECL
8593 && SYMBOL_REF
== GET_CODE (XEXP (rtl
, 0)))
8595 rtx sym
= XEXP (rtl
, 0);
8596 tree type
= TREE_TYPE (decl
);
8597 if (type
== error_mark_node
)
8599 addr_space_t as
= TYPE_ADDR_SPACE (type
);
8601 /* PSTR strings are in generic space but located in flash:
8602 patch address space. */
8604 if (-1 == avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
8605 as
= ADDR_SPACE_FLASH
;
8607 AVR_SYMBOL_SET_ADDR_SPACE (sym
, as
);
8612 /* Implement `TARGET_ASM_SELECT_SECTION' */
8615 avr_asm_select_section (tree decl
, int reloc
, unsigned HOST_WIDE_INT align
)
8617 section
* sect
= default_elf_select_section (decl
, reloc
, align
);
8619 if (decl
&& DECL_P (decl
)
8620 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
8622 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
8624 /* __progmem__ goes in generic space but shall be allocated to
8627 if (ADDR_SPACE_GENERIC_P (as
))
8628 as
= ADDR_SPACE_FLASH
;
8630 if (sect
->common
.flags
& SECTION_NAMED
)
8632 const char * name
= sect
->named
.name
;
8633 const char * old_prefix
= ".rodata";
8634 const char * new_prefix
= avr_addrspace
[as
].section_name
;
8636 if (STR_PREFIX_P (name
, old_prefix
))
8638 const char *sname
= ACONCAT ((new_prefix
,
8639 name
+ strlen (old_prefix
), NULL
));
8640 return get_section (sname
, sect
->common
.flags
, sect
->named
.decl
);
8644 if (!progmem_section
[as
])
8647 = get_unnamed_section (0, avr_output_progmem_section_asm_op
,
8648 avr_addrspace
[as
].section_name
);
8651 return progmem_section
[as
];
8657 /* Implement `TARGET_ASM_FILE_START'. */
8658 /* Outputs some text at the start of each assembler file. */
8661 avr_file_start (void)
8663 int sfr_offset
= avr_current_arch
->sfr_offset
;
8665 if (avr_current_arch
->asm_only
)
8666 error ("MCU %qs supported for assembler only", avr_current_device
->name
);
8668 default_file_start ();
8670 /* Print I/O addresses of some SFRs used with IN and OUT. */
8673 fprintf (asm_out_file
, "__SP_H__ = 0x%02x\n", avr_addr
.sp_h
- sfr_offset
);
8675 fprintf (asm_out_file
, "__SP_L__ = 0x%02x\n", avr_addr
.sp_l
- sfr_offset
);
8676 fprintf (asm_out_file
, "__SREG__ = 0x%02x\n", avr_addr
.sreg
- sfr_offset
);
8678 fprintf (asm_out_file
, "__RAMPZ__ = 0x%02x\n", avr_addr
.rampz
- sfr_offset
);
8680 fprintf (asm_out_file
, "__RAMPY__ = 0x%02x\n", avr_addr
.rampy
- sfr_offset
);
8682 fprintf (asm_out_file
, "__RAMPX__ = 0x%02x\n", avr_addr
.rampx
- sfr_offset
);
8684 fprintf (asm_out_file
, "__RAMPD__ = 0x%02x\n", avr_addr
.rampd
- sfr_offset
);
8686 fprintf (asm_out_file
, "__CCP__ = 0x%02x\n", avr_addr
.ccp
- sfr_offset
);
8687 fprintf (asm_out_file
, "__tmp_reg__ = %d\n", TMP_REGNO
);
8688 fprintf (asm_out_file
, "__zero_reg__ = %d\n", ZERO_REGNO
);
8692 /* Implement `TARGET_ASM_FILE_END'. */
8693 /* Outputs to the stdio stream FILE some
8694 appropriate text to go at the end of an assembler file. */
8699 /* Output these only if there is anything in the
8700 .data* / .rodata* / .gnu.linkonce.* resp. .bss* or COMMON
8701 input section(s) - some code size can be saved by not
8702 linking in the initialization code from libgcc if resp.
8703 sections are empty, see PR18145. */
8705 if (avr_need_copy_data_p
)
8706 fputs (".global __do_copy_data\n", asm_out_file
);
8708 if (avr_need_clear_bss_p
)
8709 fputs (".global __do_clear_bss\n", asm_out_file
);
8713 /* Worker function for `ADJUST_REG_ALLOC_ORDER'. */
8714 /* Choose the order in which to allocate hard registers for
8715 pseudo-registers local to a basic block.
8717 Store the desired register order in the array `reg_alloc_order'.
8718 Element 0 should be the register to allocate first; element 1, the
8719 next register; and so on. */
8722 avr_adjust_reg_alloc_order (void)
8725 static const int order_0
[] =
8728 18, 19, 20, 21, 22, 23,
8731 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8735 static const int order_1
[] =
8737 18, 19, 20, 21, 22, 23, 24, 25,
8740 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8744 static const int order_2
[] =
8746 25, 24, 23, 22, 21, 20, 19, 18,
8749 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8754 const int *order
= (TARGET_ORDER_1
? order_1
:
8755 TARGET_ORDER_2
? order_2
:
8757 for (i
= 0; i
< ARRAY_SIZE (order_0
); ++i
)
8758 reg_alloc_order
[i
] = order
[i
];
8762 /* Implement `TARGET_REGISTER_MOVE_COST' */
8765 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
8766 reg_class_t from
, reg_class_t to
)
8768 return (from
== STACK_REG
? 6
8769 : to
== STACK_REG
? 12
8774 /* Implement `TARGET_MEMORY_MOVE_COST' */
8777 avr_memory_move_cost (enum machine_mode mode
,
8778 reg_class_t rclass ATTRIBUTE_UNUSED
,
8779 bool in ATTRIBUTE_UNUSED
)
8781 return (mode
== QImode
? 2
8782 : mode
== HImode
? 4
8783 : mode
== SImode
? 8
8784 : mode
== SFmode
? 8
8789 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
8790 cost of an RTX operand given its context. X is the rtx of the
8791 operand, MODE is its mode, and OUTER is the rtx_code of this
8792 operand's parent operator. */
8795 avr_operand_rtx_cost (rtx x
, enum machine_mode mode
, enum rtx_code outer
,
8796 int opno
, bool speed
)
8798 enum rtx_code code
= GET_CODE (x
);
8810 return COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8817 avr_rtx_costs (x
, code
, outer
, opno
, &total
, speed
);
8821 /* Worker function for AVR backend's rtx_cost function.
8822 X is rtx expression whose cost is to be calculated.
8823 Return true if the complete cost has been computed.
8824 Return false if subexpressions should be scanned.
8825 In either case, *TOTAL contains the cost result. */
8828 avr_rtx_costs_1 (rtx x
, int codearg
, int outer_code ATTRIBUTE_UNUSED
,
8829 int opno ATTRIBUTE_UNUSED
, int *total
, bool speed
)
8831 enum rtx_code code
= (enum rtx_code
) codearg
;
8832 enum machine_mode mode
= GET_MODE (x
);
8843 /* Immediate constants are as cheap as registers. */
8848 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8856 *total
= COSTS_N_INSNS (1);
8862 *total
= COSTS_N_INSNS (2 * GET_MODE_SIZE (mode
) - 1);
8868 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8876 *total
= COSTS_N_INSNS (1);
8882 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8886 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8887 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8891 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
)
8892 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
8893 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8897 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
) + 2
8898 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
8899 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8907 && MULT
== GET_CODE (XEXP (x
, 0))
8908 && register_operand (XEXP (x
, 1), QImode
))
8911 *total
= COSTS_N_INSNS (speed
? 4 : 3);
8912 /* multiply-add with constant: will be split and load constant. */
8913 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
8914 *total
= COSTS_N_INSNS (1) + *total
;
8917 *total
= COSTS_N_INSNS (1);
8918 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8919 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8924 && (MULT
== GET_CODE (XEXP (x
, 0))
8925 || ASHIFT
== GET_CODE (XEXP (x
, 0)))
8926 && register_operand (XEXP (x
, 1), HImode
)
8927 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))
8928 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))))
8931 *total
= COSTS_N_INSNS (speed
? 5 : 4);
8932 /* multiply-add with constant: will be split and load constant. */
8933 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
8934 *total
= COSTS_N_INSNS (1) + *total
;
8937 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8939 *total
= COSTS_N_INSNS (2);
8940 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8943 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
8944 *total
= COSTS_N_INSNS (1);
8946 *total
= COSTS_N_INSNS (2);
8950 if (!CONST_INT_P (XEXP (x
, 1)))
8952 *total
= COSTS_N_INSNS (3);
8953 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8956 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
8957 *total
= COSTS_N_INSNS (2);
8959 *total
= COSTS_N_INSNS (3);
8963 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8965 *total
= COSTS_N_INSNS (4);
8966 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8969 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
8970 *total
= COSTS_N_INSNS (1);
8972 *total
= COSTS_N_INSNS (4);
8978 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8984 && register_operand (XEXP (x
, 0), QImode
)
8985 && MULT
== GET_CODE (XEXP (x
, 1)))
8988 *total
= COSTS_N_INSNS (speed
? 4 : 3);
8989 /* multiply-sub with constant: will be split and load constant. */
8990 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
8991 *total
= COSTS_N_INSNS (1) + *total
;
8996 && register_operand (XEXP (x
, 0), HImode
)
8997 && (MULT
== GET_CODE (XEXP (x
, 1))
8998 || ASHIFT
== GET_CODE (XEXP (x
, 1)))
8999 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))
9000 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))))
9003 *total
= COSTS_N_INSNS (speed
? 5 : 4);
9004 /* multiply-sub with constant: will be split and load constant. */
9005 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
9006 *total
= COSTS_N_INSNS (1) + *total
;
9012 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
9013 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9014 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9015 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9019 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
9020 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9021 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9029 *total
= COSTS_N_INSNS (!speed
? 3 : 4);
9031 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
9039 rtx op0
= XEXP (x
, 0);
9040 rtx op1
= XEXP (x
, 1);
9041 enum rtx_code code0
= GET_CODE (op0
);
9042 enum rtx_code code1
= GET_CODE (op1
);
9043 bool ex0
= SIGN_EXTEND
== code0
|| ZERO_EXTEND
== code0
;
9044 bool ex1
= SIGN_EXTEND
== code1
|| ZERO_EXTEND
== code1
;
9047 && (u8_operand (op1
, HImode
)
9048 || s8_operand (op1
, HImode
)))
9050 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
9054 && register_operand (op1
, HImode
))
9056 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
9059 else if (ex0
|| ex1
)
9061 *total
= COSTS_N_INSNS (!speed
? 3 : 5);
9064 else if (register_operand (op0
, HImode
)
9065 && (u8_operand (op1
, HImode
)
9066 || s8_operand (op1
, HImode
)))
9068 *total
= COSTS_N_INSNS (!speed
? 6 : 9);
9072 *total
= COSTS_N_INSNS (!speed
? 7 : 10);
9075 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
9082 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
9092 /* Add some additional costs besides CALL like moves etc. */
9094 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
9098 /* Just a rough estimate. Even with -O2 we don't want bulky
9099 code expanded inline. */
9101 *total
= COSTS_N_INSNS (25);
9107 *total
= COSTS_N_INSNS (300);
9109 /* Add some additional costs besides CALL like moves etc. */
9110 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
9118 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9119 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9127 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
9129 *total
= COSTS_N_INSNS (15 * GET_MODE_SIZE (mode
));
9130 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9131 /* For div/mod with const-int divisor we have at least the cost of
9132 loading the divisor. */
9133 if (CONST_INT_P (XEXP (x
, 1)))
9134 *total
+= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
9135 /* Add some overall penaly for clobbering and moving around registers */
9136 *total
+= COSTS_N_INSNS (2);
9143 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 4)
9144 *total
= COSTS_N_INSNS (1);
9149 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 8)
9150 *total
= COSTS_N_INSNS (3);
9155 if (CONST_INT_P (XEXP (x
, 1)))
9156 switch (INTVAL (XEXP (x
, 1)))
9160 *total
= COSTS_N_INSNS (5);
9163 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 6);
9171 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9178 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9180 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
9181 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9186 val
= INTVAL (XEXP (x
, 1));
9188 *total
= COSTS_N_INSNS (3);
9189 else if (val
>= 0 && val
<= 7)
9190 *total
= COSTS_N_INSNS (val
);
9192 *total
= COSTS_N_INSNS (1);
9199 if (const_2_to_7_operand (XEXP (x
, 1), HImode
)
9200 && (SIGN_EXTEND
== GET_CODE (XEXP (x
, 0))
9201 || ZERO_EXTEND
== GET_CODE (XEXP (x
, 0))))
9203 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
9208 if (const1_rtx
== (XEXP (x
, 1))
9209 && SIGN_EXTEND
== GET_CODE (XEXP (x
, 0)))
9211 *total
= COSTS_N_INSNS (2);
9215 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9217 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9218 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9222 switch (INTVAL (XEXP (x
, 1)))
9229 *total
= COSTS_N_INSNS (2);
9232 *total
= COSTS_N_INSNS (3);
9238 *total
= COSTS_N_INSNS (4);
9243 *total
= COSTS_N_INSNS (5);
9246 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
9249 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
9252 *total
= COSTS_N_INSNS (!speed
? 5 : 10);
9255 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9256 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9262 if (!CONST_INT_P (XEXP (x
, 1)))
9264 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
9267 switch (INTVAL (XEXP (x
, 1)))
9275 *total
= COSTS_N_INSNS (3);
9278 *total
= COSTS_N_INSNS (5);
9281 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
9287 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9289 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9290 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9294 switch (INTVAL (XEXP (x
, 1)))
9300 *total
= COSTS_N_INSNS (3);
9305 *total
= COSTS_N_INSNS (4);
9308 *total
= COSTS_N_INSNS (6);
9311 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
9314 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9315 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9323 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9330 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9332 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
9333 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9338 val
= INTVAL (XEXP (x
, 1));
9340 *total
= COSTS_N_INSNS (4);
9342 *total
= COSTS_N_INSNS (2);
9343 else if (val
>= 0 && val
<= 7)
9344 *total
= COSTS_N_INSNS (val
);
9346 *total
= COSTS_N_INSNS (1);
9351 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9353 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9354 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9358 switch (INTVAL (XEXP (x
, 1)))
9364 *total
= COSTS_N_INSNS (2);
9367 *total
= COSTS_N_INSNS (3);
9373 *total
= COSTS_N_INSNS (4);
9377 *total
= COSTS_N_INSNS (5);
9380 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
9383 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
9387 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
9390 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9391 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9397 if (!CONST_INT_P (XEXP (x
, 1)))
9399 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
9402 switch (INTVAL (XEXP (x
, 1)))
9408 *total
= COSTS_N_INSNS (3);
9412 *total
= COSTS_N_INSNS (5);
9415 *total
= COSTS_N_INSNS (4);
9418 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
9424 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9426 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9427 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9431 switch (INTVAL (XEXP (x
, 1)))
9437 *total
= COSTS_N_INSNS (4);
9442 *total
= COSTS_N_INSNS (6);
9445 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
9448 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 5);
9451 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9452 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9460 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9467 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9469 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
9470 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9475 val
= INTVAL (XEXP (x
, 1));
9477 *total
= COSTS_N_INSNS (3);
9478 else if (val
>= 0 && val
<= 7)
9479 *total
= COSTS_N_INSNS (val
);
9481 *total
= COSTS_N_INSNS (1);
9486 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9488 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9489 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9493 switch (INTVAL (XEXP (x
, 1)))
9500 *total
= COSTS_N_INSNS (2);
9503 *total
= COSTS_N_INSNS (3);
9508 *total
= COSTS_N_INSNS (4);
9512 *total
= COSTS_N_INSNS (5);
9518 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
9521 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
9525 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
9528 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9529 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9535 if (!CONST_INT_P (XEXP (x
, 1)))
9537 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
9540 switch (INTVAL (XEXP (x
, 1)))
9548 *total
= COSTS_N_INSNS (3);
9551 *total
= COSTS_N_INSNS (5);
9554 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
9560 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9562 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9563 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9567 switch (INTVAL (XEXP (x
, 1)))
9573 *total
= COSTS_N_INSNS (4);
9576 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
9581 *total
= COSTS_N_INSNS (4);
9584 *total
= COSTS_N_INSNS (6);
9587 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9588 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9596 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9600 switch (GET_MODE (XEXP (x
, 0)))
9603 *total
= COSTS_N_INSNS (1);
9604 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9605 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9609 *total
= COSTS_N_INSNS (2);
9610 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9611 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9612 else if (INTVAL (XEXP (x
, 1)) != 0)
9613 *total
+= COSTS_N_INSNS (1);
9617 *total
= COSTS_N_INSNS (3);
9618 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) != 0)
9619 *total
+= COSTS_N_INSNS (2);
9623 *total
= COSTS_N_INSNS (4);
9624 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9625 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9626 else if (INTVAL (XEXP (x
, 1)) != 0)
9627 *total
+= COSTS_N_INSNS (3);
9633 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9638 && LSHIFTRT
== GET_CODE (XEXP (x
, 0))
9639 && MULT
== GET_CODE (XEXP (XEXP (x
, 0), 0))
9640 && CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
9642 if (QImode
== mode
|| HImode
== mode
)
9644 *total
= COSTS_N_INSNS (2);
9657 /* Implement `TARGET_RTX_COSTS'. */
9660 avr_rtx_costs (rtx x
, int codearg
, int outer_code
,
9661 int opno
, int *total
, bool speed
)
9663 bool done
= avr_rtx_costs_1 (x
, codearg
, outer_code
,
9664 opno
, total
, speed
);
9666 if (avr_log
.rtx_costs
)
9668 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
9669 done
, speed
? "speed" : "size", *total
, outer_code
, x
);
9676 /* Implement `TARGET_ADDRESS_COST'. */
9679 avr_address_cost (rtx x
, enum machine_mode mode ATTRIBUTE_UNUSED
,
9680 addr_space_t as ATTRIBUTE_UNUSED
,
9681 bool speed ATTRIBUTE_UNUSED
)
9685 if (GET_CODE (x
) == PLUS
9686 && CONST_INT_P (XEXP (x
, 1))
9687 && (REG_P (XEXP (x
, 0))
9688 || GET_CODE (XEXP (x
, 0)) == SUBREG
))
9690 if (INTVAL (XEXP (x
, 1)) >= 61)
9693 else if (CONSTANT_ADDRESS_P (x
))
9696 && io_address_operand (x
, QImode
))
9700 if (avr_log
.address_cost
)
9701 avr_edump ("\n%?: %d = %r\n", cost
, x
);
9706 /* Test for extra memory constraint 'Q'.
9707 It's a memory address based on Y or Z pointer with valid displacement. */
9710 extra_constraint_Q (rtx x
)
9714 if (GET_CODE (XEXP (x
,0)) == PLUS
9715 && REG_P (XEXP (XEXP (x
,0), 0))
9716 && GET_CODE (XEXP (XEXP (x
,0), 1)) == CONST_INT
9717 && (INTVAL (XEXP (XEXP (x
,0), 1))
9718 <= MAX_LD_OFFSET (GET_MODE (x
))))
9720 rtx xx
= XEXP (XEXP (x
,0), 0);
9721 int regno
= REGNO (xx
);
9723 ok
= (/* allocate pseudos */
9724 regno
>= FIRST_PSEUDO_REGISTER
9725 /* strictly check */
9726 || regno
== REG_Z
|| regno
== REG_Y
9727 /* XXX frame & arg pointer checks */
9728 || xx
== frame_pointer_rtx
9729 || xx
== arg_pointer_rtx
);
9731 if (avr_log
.constraints
)
9732 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
9733 ok
, reload_completed
, reload_in_progress
, x
);
9739 /* Convert condition code CONDITION to the valid AVR condition code. */
9742 avr_normalize_condition (RTX_CODE condition
)
9759 /* Helper function for `avr_reorg'. */
9762 avr_compare_pattern (rtx insn
)
9764 rtx pattern
= single_set (insn
);
9767 && NONJUMP_INSN_P (insn
)
9768 && SET_DEST (pattern
) == cc0_rtx
9769 && GET_CODE (SET_SRC (pattern
)) == COMPARE
)
9771 enum machine_mode mode0
= GET_MODE (XEXP (SET_SRC (pattern
), 0));
9772 enum machine_mode mode1
= GET_MODE (XEXP (SET_SRC (pattern
), 1));
9774 /* The 64-bit comparisons have fixed operands ACC_A and ACC_B.
9775 They must not be swapped, thus skip them. */
9777 if ((mode0
== VOIDmode
|| GET_MODE_SIZE (mode0
) <= 4)
9778 && (mode1
== VOIDmode
|| GET_MODE_SIZE (mode1
) <= 4))
9785 /* Helper function for `avr_reorg'. */
9787 /* Expansion of switch/case decision trees leads to code like
9789 cc0 = compare (Reg, Num)
9793 cc0 = compare (Reg, Num)
9797 The second comparison is superfluous and can be deleted.
9798 The second jump condition can be transformed from a
9799 "difficult" one to a "simple" one because "cc0 > 0" and
9800 "cc0 >= 0" will have the same effect here.
9802 This function relies on the way switch/case is being expaned
9803 as binary decision tree. For example code see PR 49903.
9805 Return TRUE if optimization performed.
9806 Return FALSE if nothing changed.
9808 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
9810 We don't want to do this in text peephole because it is
9811 tedious to work out jump offsets there and the second comparison
9812 might have been transormed by `avr_reorg'.
9814 RTL peephole won't do because peephole2 does not scan across
9818 avr_reorg_remove_redundant_compare (rtx insn1
)
9820 rtx comp1
, ifelse1
, xcond1
, branch1
;
9821 rtx comp2
, ifelse2
, xcond2
, branch2
, insn2
;
9823 rtx jump
, target
, cond
;
9825 /* Look out for: compare1 - branch1 - compare2 - branch2 */
9827 branch1
= next_nonnote_nondebug_insn (insn1
);
9828 if (!branch1
|| !JUMP_P (branch1
))
9831 insn2
= next_nonnote_nondebug_insn (branch1
);
9832 if (!insn2
|| !avr_compare_pattern (insn2
))
9835 branch2
= next_nonnote_nondebug_insn (insn2
);
9836 if (!branch2
|| !JUMP_P (branch2
))
9839 comp1
= avr_compare_pattern (insn1
);
9840 comp2
= avr_compare_pattern (insn2
);
9841 xcond1
= single_set (branch1
);
9842 xcond2
= single_set (branch2
);
9844 if (!comp1
|| !comp2
9845 || !rtx_equal_p (comp1
, comp2
)
9846 || !xcond1
|| SET_DEST (xcond1
) != pc_rtx
9847 || !xcond2
|| SET_DEST (xcond2
) != pc_rtx
9848 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond1
))
9849 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond2
)))
9854 comp1
= SET_SRC (comp1
);
9855 ifelse1
= SET_SRC (xcond1
);
9856 ifelse2
= SET_SRC (xcond2
);
9858 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
9860 if (EQ
!= GET_CODE (XEXP (ifelse1
, 0))
9861 || !REG_P (XEXP (comp1
, 0))
9862 || !CONST_INT_P (XEXP (comp1
, 1))
9863 || XEXP (ifelse1
, 2) != pc_rtx
9864 || XEXP (ifelse2
, 2) != pc_rtx
9865 || LABEL_REF
!= GET_CODE (XEXP (ifelse1
, 1))
9866 || LABEL_REF
!= GET_CODE (XEXP (ifelse2
, 1))
9867 || !COMPARISON_P (XEXP (ifelse2
, 0))
9868 || cc0_rtx
!= XEXP (XEXP (ifelse1
, 0), 0)
9869 || cc0_rtx
!= XEXP (XEXP (ifelse2
, 0), 0)
9870 || const0_rtx
!= XEXP (XEXP (ifelse1
, 0), 1)
9871 || const0_rtx
!= XEXP (XEXP (ifelse2
, 0), 1))
9876 /* We filtered the insn sequence to look like
9882 (if_then_else (eq (cc0)
9891 (if_then_else (CODE (cc0)
9897 code
= GET_CODE (XEXP (ifelse2
, 0));
9899 /* Map GT/GTU to GE/GEU which is easier for AVR.
9900 The first two instructions compare/branch on EQ
9901 so we may replace the difficult
9903 if (x == VAL) goto L1;
9904 if (x > VAL) goto L2;
9908 if (x == VAL) goto L1;
9909 if (x >= VAL) goto L2;
9911 Similarly, replace LE/LEU by LT/LTU. */
9922 code
= avr_normalize_condition (code
);
9929 /* Wrap the branches into UNSPECs so they won't be changed or
9930 optimized in the remainder. */
9932 target
= XEXP (XEXP (ifelse1
, 1), 0);
9933 cond
= XEXP (ifelse1
, 0);
9934 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn1
);
9936 JUMP_LABEL (jump
) = JUMP_LABEL (branch1
);
9938 target
= XEXP (XEXP (ifelse2
, 1), 0);
9939 cond
= gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
9940 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn2
);
9942 JUMP_LABEL (jump
) = JUMP_LABEL (branch2
);
9944 /* The comparisons in insn1 and insn2 are exactly the same;
9945 insn2 is superfluous so delete it. */
9947 delete_insn (insn2
);
9948 delete_insn (branch1
);
9949 delete_insn (branch2
);
9955 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
9956 /* Optimize conditional jumps. */
9961 rtx insn
= get_insns();
9963 for (insn
= next_real_insn (insn
); insn
; insn
= next_real_insn (insn
))
9965 rtx pattern
= avr_compare_pattern (insn
);
9971 && avr_reorg_remove_redundant_compare (insn
))
9976 if (compare_diff_p (insn
))
9978 /* Now we work under compare insn with difficult branch. */
9980 rtx next
= next_real_insn (insn
);
9981 rtx pat
= PATTERN (next
);
9983 pattern
= SET_SRC (pattern
);
9985 if (true_regnum (XEXP (pattern
, 0)) >= 0
9986 && true_regnum (XEXP (pattern
, 1)) >= 0)
9988 rtx x
= XEXP (pattern
, 0);
9989 rtx src
= SET_SRC (pat
);
9990 rtx t
= XEXP (src
,0);
9991 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
9992 XEXP (pattern
, 0) = XEXP (pattern
, 1);
9993 XEXP (pattern
, 1) = x
;
9994 INSN_CODE (next
) = -1;
9996 else if (true_regnum (XEXP (pattern
, 0)) >= 0
9997 && XEXP (pattern
, 1) == const0_rtx
)
9999 /* This is a tst insn, we can reverse it. */
10000 rtx src
= SET_SRC (pat
);
10001 rtx t
= XEXP (src
,0);
10003 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
10004 XEXP (pattern
, 1) = XEXP (pattern
, 0);
10005 XEXP (pattern
, 0) = const0_rtx
;
10006 INSN_CODE (next
) = -1;
10007 INSN_CODE (insn
) = -1;
10009 else if (true_regnum (XEXP (pattern
, 0)) >= 0
10010 && CONST_INT_P (XEXP (pattern
, 1)))
10012 rtx x
= XEXP (pattern
, 1);
10013 rtx src
= SET_SRC (pat
);
10014 rtx t
= XEXP (src
,0);
10015 enum machine_mode mode
= GET_MODE (XEXP (pattern
, 0));
10017 if (avr_simplify_comparison_p (mode
, GET_CODE (t
), x
))
10019 XEXP (pattern
, 1) = gen_int_mode (INTVAL (x
) + 1, mode
);
10020 PUT_CODE (t
, avr_normalize_condition (GET_CODE (t
)));
10021 INSN_CODE (next
) = -1;
10022 INSN_CODE (insn
) = -1;
10029 /* Returns register number for function return value.*/
10031 static inline unsigned int
10032 avr_ret_register (void)
10038 /* Implement `TARGET_FUNCTION_VALUE_REGNO_P'. */
10041 avr_function_value_regno_p (const unsigned int regno
)
10043 return (regno
== avr_ret_register ());
10047 /* Implement `TARGET_LIBCALL_VALUE'. */
10048 /* Create an RTX representing the place where a
10049 library function returns a value of mode MODE. */
10052 avr_libcall_value (enum machine_mode mode
,
10053 const_rtx func ATTRIBUTE_UNUSED
)
10055 int offs
= GET_MODE_SIZE (mode
);
10058 offs
= (offs
+ 1) & ~1;
10060 return gen_rtx_REG (mode
, avr_ret_register () + 2 - offs
);
10064 /* Implement `TARGET_FUNCTION_VALUE'. */
10065 /* Create an RTX representing the place where a
10066 function returns a value of data type VALTYPE. */
10069 avr_function_value (const_tree type
,
10070 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
10071 bool outgoing ATTRIBUTE_UNUSED
)
10075 if (TYPE_MODE (type
) != BLKmode
)
10076 return avr_libcall_value (TYPE_MODE (type
), NULL_RTX
);
10078 offs
= int_size_in_bytes (type
);
10081 if (offs
> 2 && offs
< GET_MODE_SIZE (SImode
))
10082 offs
= GET_MODE_SIZE (SImode
);
10083 else if (offs
> GET_MODE_SIZE (SImode
) && offs
< GET_MODE_SIZE (DImode
))
10084 offs
= GET_MODE_SIZE (DImode
);
10086 return gen_rtx_REG (BLKmode
, avr_ret_register () + 2 - offs
);
10090 test_hard_reg_class (enum reg_class rclass
, rtx x
)
10092 int regno
= true_regnum (x
);
10096 if (TEST_HARD_REG_CLASS (rclass
, regno
))
10103 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
10104 and thus is suitable to be skipped by CPSE, SBRC, etc. */
10107 avr_2word_insn_p (rtx insn
)
10109 if (avr_current_device
->errata_skip
10111 || 2 != get_attr_length (insn
))
10116 switch (INSN_CODE (insn
))
10121 case CODE_FOR_movqi_insn
:
10122 case CODE_FOR_movuqq_insn
:
10123 case CODE_FOR_movqq_insn
:
10125 rtx set
= single_set (insn
);
10126 rtx src
= SET_SRC (set
);
10127 rtx dest
= SET_DEST (set
);
10129 /* Factor out LDS and STS from movqi_insn. */
10132 && (REG_P (src
) || src
== CONST0_RTX (GET_MODE (dest
))))
10134 return CONSTANT_ADDRESS_P (XEXP (dest
, 0));
10136 else if (REG_P (dest
)
10139 return CONSTANT_ADDRESS_P (XEXP (src
, 0));
10145 case CODE_FOR_call_insn
:
10146 case CODE_FOR_call_value_insn
:
10153 jump_over_one_insn_p (rtx insn
, rtx dest
)
10155 int uid
= INSN_UID (GET_CODE (dest
) == LABEL_REF
10158 int jump_addr
= INSN_ADDRESSES (INSN_UID (insn
));
10159 int dest_addr
= INSN_ADDRESSES (uid
);
10160 int jump_offset
= dest_addr
- jump_addr
- get_attr_length (insn
);
10162 return (jump_offset
== 1
10163 || (jump_offset
== 2
10164 && avr_2word_insn_p (next_active_insn (insn
))));
10168 /* Worker function for `HARD_REGNO_MODE_OK'. */
10169 /* Returns 1 if a value of mode MODE can be stored starting with hard
10170 register number REGNO. On the enhanced core, anything larger than
10171 1 byte must start in even numbered register for "movw" to work
10172 (this way we don't have to check for odd registers everywhere). */
10175 avr_hard_regno_mode_ok (int regno
, enum machine_mode mode
)
10177 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
10178 Disallowing QI et al. in these regs might lead to code like
10179 (set (subreg:QI (reg:HI 28) n) ...)
10180 which will result in wrong code because reload does not
10181 handle SUBREGs of hard regsisters like this.
10182 This could be fixed in reload. However, it appears
10183 that fixing reload is not wanted by reload people. */
10185 /* Any GENERAL_REGS register can hold 8-bit values. */
10187 if (GET_MODE_SIZE (mode
) == 1)
10190 /* FIXME: Ideally, the following test is not needed.
10191 However, it turned out that it can reduce the number
10192 of spill fails. AVR and it's poor endowment with
10193 address registers is extreme stress test for reload. */
10195 if (GET_MODE_SIZE (mode
) >= 4
10199 /* All modes larger than 8 bits should start in an even register. */
10201 return !(regno
& 1);
10205 /* Implement `HARD_REGNO_CALL_PART_CLOBBERED'. */
10208 avr_hard_regno_call_part_clobbered (unsigned regno
, enum machine_mode mode
)
10210 /* FIXME: This hook gets called with MODE:REGNO combinations that don't
10211 represent valid hard registers like, e.g. HI:29. Returning TRUE
10212 for such registers can lead to performance degradation as mentioned
10213 in PR53595. Thus, report invalid hard registers as FALSE. */
10215 if (!avr_hard_regno_mode_ok (regno
, mode
))
10218 /* Return true if any of the following boundaries is crossed:
10219 17/18, 27/28 and 29/30. */
10221 return ((regno
< 18 && regno
+ GET_MODE_SIZE (mode
) > 18)
10222 || (regno
< REG_Y
&& regno
+ GET_MODE_SIZE (mode
) > REG_Y
)
10223 || (regno
< REG_Z
&& regno
+ GET_MODE_SIZE (mode
) > REG_Z
));
10227 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
10230 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED
,
10231 addr_space_t as
, RTX_CODE outer_code
,
10232 RTX_CODE index_code ATTRIBUTE_UNUSED
)
10234 if (!ADDR_SPACE_GENERIC_P (as
))
10236 return POINTER_Z_REGS
;
10240 return reload_completed
? BASE_POINTER_REGS
: POINTER_REGS
;
10242 return PLUS
== outer_code
? BASE_POINTER_REGS
: POINTER_REGS
;
10246 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
10249 avr_regno_mode_code_ok_for_base_p (int regno
,
10250 enum machine_mode mode ATTRIBUTE_UNUSED
,
10251 addr_space_t as ATTRIBUTE_UNUSED
,
10252 RTX_CODE outer_code
,
10253 RTX_CODE index_code ATTRIBUTE_UNUSED
)
10257 if (!ADDR_SPACE_GENERIC_P (as
))
10259 if (regno
< FIRST_PSEUDO_REGISTER
10267 regno
= reg_renumber
[regno
];
10269 if (regno
== REG_Z
)
10278 if (regno
< FIRST_PSEUDO_REGISTER
10282 || regno
== ARG_POINTER_REGNUM
))
10286 else if (reg_renumber
)
10288 regno
= reg_renumber
[regno
];
10293 || regno
== ARG_POINTER_REGNUM
)
10300 && PLUS
== outer_code
10310 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
10311 /* Set 32-bit register OP[0] to compile-time constant OP[1].
10312 CLOBBER_REG is a QI clobber register or NULL_RTX.
10313 LEN == NULL: output instructions.
10314 LEN != NULL: set *LEN to the length of the instruction sequence
10315 (in words) printed with LEN = NULL.
10316 If CLEAR_P is true, OP[0] had been cleard to Zero already.
10317 If CLEAR_P is false, nothing is known about OP[0].
10319 The effect on cc0 is as follows:
10321 Load 0 to any register except ZERO_REG : NONE
10322 Load ld register with any value : NONE
10323 Anything else: : CLOBBER */
10326 output_reload_in_const (rtx
*op
, rtx clobber_reg
, int *len
, bool clear_p
)
10330 rtx xval
, xdest
[4];
10332 int clobber_val
= 1234;
10333 bool cooked_clobber_p
= false;
10334 bool set_p
= false;
10335 enum machine_mode mode
= GET_MODE (dest
);
10336 int n
, n_bytes
= GET_MODE_SIZE (mode
);
10338 gcc_assert (REG_P (dest
)
10339 && CONSTANT_P (src
));
10344 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
10345 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
10347 if (REGNO (dest
) < 16
10348 && REGNO (dest
) + GET_MODE_SIZE (mode
) > 16)
10350 clobber_reg
= all_regs_rtx
[REGNO (dest
) + n_bytes
- 1];
10353 /* We might need a clobber reg but don't have one. Look at the value to
10354 be loaded more closely. A clobber is only needed if it is a symbol
10355 or contains a byte that is neither 0, -1 or a power of 2. */
10357 if (NULL_RTX
== clobber_reg
10358 && !test_hard_reg_class (LD_REGS
, dest
)
10359 && (! (CONST_INT_P (src
) || CONST_FIXED_P (src
) || CONST_DOUBLE_P (src
))
10360 || !avr_popcount_each_byte (src
, n_bytes
,
10361 (1 << 0) | (1 << 1) | (1 << 8))))
10363 /* We have no clobber register but need one. Cook one up.
10364 That's cheaper than loading from constant pool. */
10366 cooked_clobber_p
= true;
10367 clobber_reg
= all_regs_rtx
[REG_Z
+ 1];
10368 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg
, len
, 1);
10371 /* Now start filling DEST from LSB to MSB. */
10373 for (n
= 0; n
< n_bytes
; n
++)
10376 bool done_byte
= false;
10380 /* Crop the n-th destination byte. */
10382 xdest
[n
] = simplify_gen_subreg (QImode
, dest
, mode
, n
);
10383 ldreg_p
= test_hard_reg_class (LD_REGS
, xdest
[n
]);
10385 if (!CONST_INT_P (src
)
10386 && !CONST_FIXED_P (src
)
10387 && !CONST_DOUBLE_P (src
))
10389 static const char* const asm_code
[][2] =
10391 { "ldi %2,lo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,lo8(%1)" },
10392 { "ldi %2,hi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hi8(%1)" },
10393 { "ldi %2,hlo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hlo8(%1)" },
10394 { "ldi %2,hhi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hhi8(%1)" }
10399 xop
[2] = clobber_reg
;
10401 avr_asm_len (asm_code
[n
][ldreg_p
], xop
, len
, ldreg_p
? 1 : 2);
10406 /* Crop the n-th source byte. */
10408 xval
= simplify_gen_subreg (QImode
, src
, mode
, n
);
10409 ival
[n
] = INTVAL (xval
);
10411 /* Look if we can reuse the low word by means of MOVW. */
10417 rtx lo16
= simplify_gen_subreg (HImode
, src
, mode
, 0);
10418 rtx hi16
= simplify_gen_subreg (HImode
, src
, mode
, 2);
10420 if (INTVAL (lo16
) == INTVAL (hi16
))
10422 if (0 != INTVAL (lo16
)
10425 avr_asm_len ("movw %C0,%A0", &op
[0], len
, 1);
10432 /* Don't use CLR so that cc0 is set as expected. */
10437 avr_asm_len (ldreg_p
? "ldi %0,0"
10438 : ZERO_REGNO
== REGNO (xdest
[n
]) ? "clr %0"
10439 : "mov %0,__zero_reg__",
10440 &xdest
[n
], len
, 1);
10444 if (clobber_val
== ival
[n
]
10445 && REGNO (clobber_reg
) == REGNO (xdest
[n
]))
10450 /* LD_REGS can use LDI to move a constant value */
10456 avr_asm_len ("ldi %0,lo8(%1)", xop
, len
, 1);
10460 /* Try to reuse value already loaded in some lower byte. */
10462 for (j
= 0; j
< n
; j
++)
10463 if (ival
[j
] == ival
[n
])
10468 avr_asm_len ("mov %0,%1", xop
, len
, 1);
10476 /* Need no clobber reg for -1: Use CLR/DEC */
10481 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
10483 avr_asm_len ("dec %0", &xdest
[n
], len
, 1);
10486 else if (1 == ival
[n
])
10489 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
10491 avr_asm_len ("inc %0", &xdest
[n
], len
, 1);
10495 /* Use T flag or INC to manage powers of 2 if we have
10498 if (NULL_RTX
== clobber_reg
10499 && single_one_operand (xval
, QImode
))
10502 xop
[1] = GEN_INT (exact_log2 (ival
[n
] & GET_MODE_MASK (QImode
)));
10504 gcc_assert (constm1_rtx
!= xop
[1]);
10509 avr_asm_len ("set", xop
, len
, 1);
10513 avr_asm_len ("clr %0", xop
, len
, 1);
10515 avr_asm_len ("bld %0,%1", xop
, len
, 1);
10519 /* We actually need the LD_REGS clobber reg. */
10521 gcc_assert (NULL_RTX
!= clobber_reg
);
10525 xop
[2] = clobber_reg
;
10526 clobber_val
= ival
[n
];
10528 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
10529 "mov %0,%2", xop
, len
, 2);
10532 /* If we cooked up a clobber reg above, restore it. */
10534 if (cooked_clobber_p
)
10536 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg
, len
, 1);
10541 /* Reload the constant OP[1] into the HI register OP[0].
10542 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
10543 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
10544 need a clobber reg or have to cook one up.
10546 PLEN == NULL: Output instructions.
10547 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
10548 by the insns printed.
10553 output_reload_inhi (rtx
*op
, rtx clobber_reg
, int *plen
)
10555 output_reload_in_const (op
, clobber_reg
, plen
, false);
10560 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
10561 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
10562 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
10563 need a clobber reg or have to cook one up.
10565 LEN == NULL: Output instructions.
10567 LEN != NULL: Output nothing. Set *LEN to number of words occupied
10568 by the insns printed.
10573 output_reload_insisf (rtx
*op
, rtx clobber_reg
, int *len
)
10576 && !test_hard_reg_class (LD_REGS
, op
[0])
10577 && (CONST_INT_P (op
[1])
10578 || CONST_FIXED_P (op
[1])
10579 || CONST_DOUBLE_P (op
[1])))
10581 int len_clr
, len_noclr
;
10583 /* In some cases it is better to clear the destination beforehand, e.g.
10585 CLR R2 CLR R3 MOVW R4,R2 INC R2
10589 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
10591 We find it too tedious to work that out in the print function.
10592 Instead, we call the print function twice to get the lengths of
10593 both methods and use the shortest one. */
10595 output_reload_in_const (op
, clobber_reg
, &len_clr
, true);
10596 output_reload_in_const (op
, clobber_reg
, &len_noclr
, false);
10598 if (len_noclr
- len_clr
== 4)
10600 /* Default needs 4 CLR instructions: clear register beforehand. */
10602 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
10603 "mov %B0,__zero_reg__" CR_TAB
10604 "movw %C0,%A0", &op
[0], len
, 3);
10606 output_reload_in_const (op
, clobber_reg
, len
, true);
10615 /* Default: destination not pre-cleared. */
10617 output_reload_in_const (op
, clobber_reg
, len
, false);
10622 avr_out_reload_inpsi (rtx
*op
, rtx clobber_reg
, int *len
)
10624 output_reload_in_const (op
, clobber_reg
, len
, false);
10629 /* Worker function for `ASM_OUTPUT_ADDR_VEC_ELT'. */
10632 avr_output_addr_vec_elt (FILE *stream
, int value
)
10634 if (AVR_HAVE_JMP_CALL
)
10635 fprintf (stream
, "\t.word gs(.L%d)\n", value
);
10637 fprintf (stream
, "\trjmp .L%d\n", value
);
10641 /* Implement `TARGET_HARD_REGNO_SCRATCH_OK'. */
10642 /* Returns true if SCRATCH are safe to be allocated as a scratch
10643 registers (for a define_peephole2) in the current function. */
10646 avr_hard_regno_scratch_ok (unsigned int regno
)
10648 /* Interrupt functions can only use registers that have already been saved
10649 by the prologue, even if they would normally be call-clobbered. */
10651 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
10652 && !df_regs_ever_live_p (regno
))
10655 /* Don't allow hard registers that might be part of the frame pointer.
10656 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
10657 and don't care for a frame pointer that spans more than one register. */
10659 if ((!reload_completed
|| frame_pointer_needed
)
10660 && (regno
== REG_Y
|| regno
== REG_Y
+ 1))
10669 /* Worker function for `HARD_REGNO_RENAME_OK'. */
10670 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
10673 avr_hard_regno_rename_ok (unsigned int old_reg
,
10674 unsigned int new_reg
)
10676 /* Interrupt functions can only use registers that have already been
10677 saved by the prologue, even if they would normally be
10680 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
10681 && !df_regs_ever_live_p (new_reg
))
10684 /* Don't allow hard registers that might be part of the frame pointer.
10685 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
10686 and don't care for a frame pointer that spans more than one register. */
10688 if ((!reload_completed
|| frame_pointer_needed
)
10689 && (old_reg
== REG_Y
|| old_reg
== REG_Y
+ 1
10690 || new_reg
== REG_Y
|| new_reg
== REG_Y
+ 1))
10698 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
10699 or memory location in the I/O space (QImode only).
10701 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
10702 Operand 1: register operand to test, or CONST_INT memory address.
10703 Operand 2: bit number.
10704 Operand 3: label to jump to if the test is true. */
10707 avr_out_sbxx_branch (rtx insn
, rtx operands
[])
10709 enum rtx_code comp
= GET_CODE (operands
[0]);
10710 bool long_jump
= get_attr_length (insn
) >= 4;
10711 bool reverse
= long_jump
|| jump_over_one_insn_p (insn
, operands
[3]);
10715 else if (comp
== LT
)
10719 comp
= reverse_condition (comp
);
10721 switch (GET_CODE (operands
[1]))
10728 if (low_io_address_operand (operands
[1], QImode
))
10731 output_asm_insn ("sbis %i1,%2", operands
);
10733 output_asm_insn ("sbic %i1,%2", operands
);
10737 output_asm_insn ("in __tmp_reg__,%i1", operands
);
10739 output_asm_insn ("sbrs __tmp_reg__,%2", operands
);
10741 output_asm_insn ("sbrc __tmp_reg__,%2", operands
);
10744 break; /* CONST_INT */
10749 output_asm_insn ("sbrs %T1%T2", operands
);
10751 output_asm_insn ("sbrc %T1%T2", operands
);
10757 return ("rjmp .+4" CR_TAB
10766 /* Worker function for `TARGET_ASM_CONSTRUCTOR'. */
10769 avr_asm_out_ctor (rtx symbol
, int priority
)
10771 fputs ("\t.global __do_global_ctors\n", asm_out_file
);
10772 default_ctor_section_asm_out_constructor (symbol
, priority
);
10776 /* Worker function for `TARGET_ASM_DESTRUCTOR'. */
10779 avr_asm_out_dtor (rtx symbol
, int priority
)
10781 fputs ("\t.global __do_global_dtors\n", asm_out_file
);
10782 default_dtor_section_asm_out_destructor (symbol
, priority
);
10786 /* Worker function for `TARGET_RETURN_IN_MEMORY'. */
10789 avr_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
10791 if (TYPE_MODE (type
) == BLKmode
)
10793 HOST_WIDE_INT size
= int_size_in_bytes (type
);
10794 return (size
== -1 || size
> 8);
10801 /* Implement `CASE_VALUES_THRESHOLD'. */
10802 /* Supply the default for --param case-values-threshold=0 */
10804 static unsigned int
10805 avr_case_values_threshold (void)
10807 /* The exact break-even point between a jump table and an if-else tree
10808 depends on several factors not available here like, e.g. if 8-bit
10809 comparisons can be used in the if-else tree or not, on the
10810 range of the case values, if the case value can be reused, on the
10811 register allocation, etc. '7' appears to be a good choice. */
10817 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
10819 static enum machine_mode
10820 avr_addr_space_address_mode (addr_space_t as
)
10822 return avr_addrspace
[as
].pointer_size
== 3 ? PSImode
: HImode
;
10826 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
10828 static enum machine_mode
10829 avr_addr_space_pointer_mode (addr_space_t as
)
10831 return avr_addr_space_address_mode (as
);
10835 /* Helper for following function. */
10838 avr_reg_ok_for_pgm_addr (rtx reg
, bool strict
)
10840 gcc_assert (REG_P (reg
));
10844 return REGNO (reg
) == REG_Z
;
10847 /* Avoid combine to propagate hard regs. */
10849 if (can_create_pseudo_p()
10850 && REGNO (reg
) < REG_Z
)
10859 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
10862 avr_addr_space_legitimate_address_p (enum machine_mode mode
, rtx x
,
10863 bool strict
, addr_space_t as
)
10872 case ADDR_SPACE_GENERIC
:
10873 return avr_legitimate_address_p (mode
, x
, strict
);
10875 case ADDR_SPACE_FLASH
:
10876 case ADDR_SPACE_FLASH1
:
10877 case ADDR_SPACE_FLASH2
:
10878 case ADDR_SPACE_FLASH3
:
10879 case ADDR_SPACE_FLASH4
:
10880 case ADDR_SPACE_FLASH5
:
10882 switch (GET_CODE (x
))
10885 ok
= avr_reg_ok_for_pgm_addr (x
, strict
);
10889 ok
= avr_reg_ok_for_pgm_addr (XEXP (x
, 0), strict
);
10898 case ADDR_SPACE_MEMX
:
10901 && can_create_pseudo_p());
10903 if (LO_SUM
== GET_CODE (x
))
10905 rtx hi
= XEXP (x
, 0);
10906 rtx lo
= XEXP (x
, 1);
10909 && (!strict
|| REGNO (hi
) < FIRST_PSEUDO_REGISTER
)
10911 && REGNO (lo
) == REG_Z
);
10917 if (avr_log
.legitimate_address_p
)
10919 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
10920 "reload_completed=%d reload_in_progress=%d %s:",
10921 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
10922 reg_renumber
? "(reg_renumber)" : "");
10924 if (GET_CODE (x
) == PLUS
10925 && REG_P (XEXP (x
, 0))
10926 && CONST_INT_P (XEXP (x
, 1))
10927 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
10930 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
10931 true_regnum (XEXP (x
, 0)));
10934 avr_edump ("\n%r\n", x
);
10941 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
10944 avr_addr_space_legitimize_address (rtx x
, rtx old_x
,
10945 enum machine_mode mode
, addr_space_t as
)
10947 if (ADDR_SPACE_GENERIC_P (as
))
10948 return avr_legitimize_address (x
, old_x
, mode
);
10950 if (avr_log
.legitimize_address
)
10952 avr_edump ("\n%?: mode=%m\n %r\n", mode
, old_x
);
10959 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
10962 avr_addr_space_convert (rtx src
, tree type_from
, tree type_to
)
10964 addr_space_t as_from
= TYPE_ADDR_SPACE (TREE_TYPE (type_from
));
10965 addr_space_t as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type_to
));
10967 if (avr_log
.progmem
)
10968 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
10969 src
, type_from
, type_to
);
10971 /* Up-casting from 16-bit to 24-bit pointer. */
10973 if (as_from
!= ADDR_SPACE_MEMX
10974 && as_to
== ADDR_SPACE_MEMX
)
10978 rtx reg
= gen_reg_rtx (PSImode
);
10980 while (CONST
== GET_CODE (sym
) || PLUS
== GET_CODE (sym
))
10981 sym
= XEXP (sym
, 0);
10983 /* Look at symbol flags: avr_encode_section_info set the flags
10984 also if attribute progmem was seen so that we get the right
10985 promotion for, e.g. PSTR-like strings that reside in generic space
10986 but are located in flash. In that case we patch the incoming
10989 if (SYMBOL_REF
== GET_CODE (sym
)
10990 && ADDR_SPACE_FLASH
== AVR_SYMBOL_GET_ADDR_SPACE (sym
))
10992 as_from
= ADDR_SPACE_FLASH
;
10995 /* Linearize memory: RAM has bit 23 set. */
10997 msb
= ADDR_SPACE_GENERIC_P (as_from
)
10999 : avr_addrspace
[as_from
].segment
;
11001 src
= force_reg (Pmode
, src
);
11003 emit_insn (msb
== 0
11004 ? gen_zero_extendhipsi2 (reg
, src
)
11005 : gen_n_extendhipsi2 (reg
, gen_int_mode (msb
, QImode
), src
));
11010 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
11012 if (as_from
== ADDR_SPACE_MEMX
11013 && as_to
!= ADDR_SPACE_MEMX
)
11015 rtx new_src
= gen_reg_rtx (Pmode
);
11017 src
= force_reg (PSImode
, src
);
11019 emit_move_insn (new_src
,
11020 simplify_gen_subreg (Pmode
, src
, PSImode
, 0));
11028 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
11031 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED
,
11032 addr_space_t superset ATTRIBUTE_UNUSED
)
11034 /* Allow any kind of pointer mess. */
11040 /* Implement `TARGET_CONVERT_TO_TYPE'. */
11043 avr_convert_to_type (tree type
, tree expr
)
11045 /* Print a diagnose for pointer conversion that changes the address
11046 space of the pointer target to a non-enclosing address space,
11047 provided -Waddr-space-convert is on.
11049 FIXME: Filter out cases where the target object is known to
11050 be located in the right memory, like in
11052 (const __flash*) PSTR ("text")
11054 Also try to distinguish between explicit casts requested by
11055 the user and implicit casts like
11057 void f (const __flash char*);
11059 void g (const char *p)
11061 f ((const __flash*) p);
11064 under the assumption that an explicit casts means that the user
11065 knows what he is doing, e.g. interface with PSTR or old style
11066 code with progmem and pgm_read_xxx.
11069 if (avr_warn_addr_space_convert
11070 && expr
!= error_mark_node
11071 && POINTER_TYPE_P (type
)
11072 && POINTER_TYPE_P (TREE_TYPE (expr
)))
11074 addr_space_t as_old
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr
)));
11075 addr_space_t as_new
= TYPE_ADDR_SPACE (TREE_TYPE (type
));
11077 if (avr_log
.progmem
)
11078 avr_edump ("%?: type = %t\nexpr = %t\n\n", type
, expr
);
11080 if (as_new
!= ADDR_SPACE_MEMX
11081 && as_new
!= as_old
)
11083 location_t loc
= EXPR_LOCATION (expr
);
11084 const char *name_old
= avr_addrspace
[as_old
].name
;
11085 const char *name_new
= avr_addrspace
[as_new
].name
;
11087 warning (OPT_Waddr_space_convert
,
11088 "conversion from address space %qs to address space %qs",
11089 ADDR_SPACE_GENERIC_P (as_old
) ? "generic" : name_old
,
11090 ADDR_SPACE_GENERIC_P (as_new
) ? "generic" : name_new
);
11092 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, expr
);
11100 /* Worker function for movmemhi expander.
11101 XOP[0] Destination as MEM:BLK
11103 XOP[2] # Bytes to copy
11105 Return TRUE if the expansion is accomplished.
11106 Return FALSE if the operand compination is not supported. */
11109 avr_emit_movmemhi (rtx
*xop
)
11111 HOST_WIDE_INT count
;
11112 enum machine_mode loop_mode
;
11113 addr_space_t as
= MEM_ADDR_SPACE (xop
[1]);
11114 rtx loop_reg
, addr1
, a_src
, a_dest
, insn
, xas
;
11115 rtx a_hi8
= NULL_RTX
;
11117 if (avr_mem_flash_p (xop
[0]))
11120 if (!CONST_INT_P (xop
[2]))
11123 count
= INTVAL (xop
[2]);
11127 a_src
= XEXP (xop
[1], 0);
11128 a_dest
= XEXP (xop
[0], 0);
11130 if (PSImode
== GET_MODE (a_src
))
11132 gcc_assert (as
== ADDR_SPACE_MEMX
);
11134 loop_mode
= (count
< 0x100) ? QImode
: HImode
;
11135 loop_reg
= gen_rtx_REG (loop_mode
, 24);
11136 emit_move_insn (loop_reg
, gen_int_mode (count
, loop_mode
));
11138 addr1
= simplify_gen_subreg (HImode
, a_src
, PSImode
, 0);
11139 a_hi8
= simplify_gen_subreg (QImode
, a_src
, PSImode
, 2);
11143 int segment
= avr_addrspace
[as
].segment
;
11146 && avr_current_device
->n_flash
> 1)
11148 a_hi8
= GEN_INT (segment
);
11149 emit_move_insn (rampz_rtx
, a_hi8
= copy_to_mode_reg (QImode
, a_hi8
));
11151 else if (!ADDR_SPACE_GENERIC_P (as
))
11153 as
= ADDR_SPACE_FLASH
;
11158 loop_mode
= (count
<= 0x100) ? QImode
: HImode
;
11159 loop_reg
= copy_to_mode_reg (loop_mode
, gen_int_mode (count
, loop_mode
));
11162 xas
= GEN_INT (as
);
11164 /* FIXME: Register allocator might come up with spill fails if it is left
11165 on its own. Thus, we allocate the pointer registers by hand:
11167 X = destination address */
11169 emit_move_insn (lpm_addr_reg_rtx
, addr1
);
11170 emit_move_insn (gen_rtx_REG (HImode
, REG_X
), a_dest
);
11172 /* FIXME: Register allocator does a bad job and might spill address
11173 register(s) inside the loop leading to additional move instruction
11174 to/from stack which could clobber tmp_reg. Thus, do *not* emit
11175 load and store as separate insns. Instead, we perform the copy
11176 by means of one monolithic insn. */
11178 gcc_assert (TMP_REGNO
== LPM_REGNO
);
11180 if (as
!= ADDR_SPACE_MEMX
)
11182 /* Load instruction ([E]LPM or LD) is known at compile time:
11183 Do the copy-loop inline. */
11185 rtx (*fun
) (rtx
, rtx
, rtx
)
11186 = QImode
== loop_mode
? gen_movmem_qi
: gen_movmem_hi
;
11188 insn
= fun (xas
, loop_reg
, loop_reg
);
11192 rtx (*fun
) (rtx
, rtx
)
11193 = QImode
== loop_mode
? gen_movmemx_qi
: gen_movmemx_hi
;
11195 emit_move_insn (gen_rtx_REG (QImode
, 23), a_hi8
);
11197 insn
= fun (xas
, GEN_INT (avr_addr
.rampz
));
11200 set_mem_addr_space (SET_SRC (XVECEXP (insn
, 0, 0)), as
);
11207 /* Print assembler for movmem_qi, movmem_hi insns...
11209 $1, $2 : Loop register
11211 X : Destination address
11215 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
11217 addr_space_t as
= (addr_space_t
) INTVAL (op
[0]);
11218 enum machine_mode loop_mode
= GET_MODE (op
[1]);
11219 bool sbiw_p
= test_hard_reg_class (ADDW_REGS
, op
[1]);
11227 xop
[2] = tmp_reg_rtx
;
11231 avr_asm_len ("0:", xop
, plen
, 0);
11233 /* Load with post-increment */
11240 case ADDR_SPACE_GENERIC
:
11242 avr_asm_len ("ld %2,Z+", xop
, plen
, 1);
11245 case ADDR_SPACE_FLASH
:
11248 avr_asm_len ("lpm %2,Z+", xop
, plen
, 1);
11250 avr_asm_len ("lpm" CR_TAB
11251 "adiw r30,1", xop
, plen
, 2);
11254 case ADDR_SPACE_FLASH1
:
11255 case ADDR_SPACE_FLASH2
:
11256 case ADDR_SPACE_FLASH3
:
11257 case ADDR_SPACE_FLASH4
:
11258 case ADDR_SPACE_FLASH5
:
11260 if (AVR_HAVE_ELPMX
)
11261 avr_asm_len ("elpm %2,Z+", xop
, plen
, 1);
11263 avr_asm_len ("elpm" CR_TAB
11264 "adiw r30,1", xop
, plen
, 2);
11268 /* Store with post-increment */
11270 avr_asm_len ("st X+,%2", xop
, plen
, 1);
11272 /* Decrement loop-counter and set Z-flag */
11274 if (QImode
== loop_mode
)
11276 avr_asm_len ("dec %1", xop
, plen
, 1);
11280 avr_asm_len ("sbiw %1,1", xop
, plen
, 1);
11284 avr_asm_len ("subi %A1,1" CR_TAB
11285 "sbci %B1,0", xop
, plen
, 2);
11288 /* Loop until zero */
11290 return avr_asm_len ("brne 0b", xop
, plen
, 1);
11295 /* Helper for __builtin_avr_delay_cycles */
11298 avr_mem_clobber (void)
11300 rtx mem
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (Pmode
));
11301 MEM_VOLATILE_P (mem
) = 1;
11306 avr_expand_delay_cycles (rtx operands0
)
11308 unsigned HOST_WIDE_INT cycles
= UINTVAL (operands0
) & GET_MODE_MASK (SImode
);
11309 unsigned HOST_WIDE_INT cycles_used
;
11310 unsigned HOST_WIDE_INT loop_count
;
11312 if (IN_RANGE (cycles
, 83886082, 0xFFFFFFFF))
11314 loop_count
= ((cycles
- 9) / 6) + 1;
11315 cycles_used
= ((loop_count
- 1) * 6) + 9;
11316 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count
, SImode
),
11317 avr_mem_clobber()));
11318 cycles
-= cycles_used
;
11321 if (IN_RANGE (cycles
, 262145, 83886081))
11323 loop_count
= ((cycles
- 7) / 5) + 1;
11324 if (loop_count
> 0xFFFFFF)
11325 loop_count
= 0xFFFFFF;
11326 cycles_used
= ((loop_count
- 1) * 5) + 7;
11327 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count
, SImode
),
11328 avr_mem_clobber()));
11329 cycles
-= cycles_used
;
11332 if (IN_RANGE (cycles
, 768, 262144))
11334 loop_count
= ((cycles
- 5) / 4) + 1;
11335 if (loop_count
> 0xFFFF)
11336 loop_count
= 0xFFFF;
11337 cycles_used
= ((loop_count
- 1) * 4) + 5;
11338 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count
, HImode
),
11339 avr_mem_clobber()));
11340 cycles
-= cycles_used
;
11343 if (IN_RANGE (cycles
, 6, 767))
11345 loop_count
= cycles
/ 3;
11346 if (loop_count
> 255)
11348 cycles_used
= loop_count
* 3;
11349 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count
, QImode
),
11350 avr_mem_clobber()));
11351 cycles
-= cycles_used
;
11354 while (cycles
>= 2)
11356 emit_insn (gen_nopv (GEN_INT(2)));
11362 emit_insn (gen_nopv (GEN_INT(1)));
11368 /* Compute the image of x under f, i.e. perform x --> f(x) */
11371 avr_map (unsigned int f
, int x
)
11373 return x
< 8 ? (f
>> (4 * x
)) & 0xf : 0;
11377 /* Return some metrics of map A. */
11381 /* Number of fixed points in { 0 ... 7 } */
11384 /* Size of preimage of non-fixed points in { 0 ... 7 } */
11387 /* Mask representing the fixed points in { 0 ... 7 } */
11388 MAP_MASK_FIXED_0_7
,
11390 /* Size of the preimage of { 0 ... 7 } */
11393 /* Mask that represents the preimage of { f } */
11394 MAP_MASK_PREIMAGE_F
11398 avr_map_metric (unsigned int a
, int mode
)
11400 unsigned i
, metric
= 0;
11402 for (i
= 0; i
< 8; i
++)
11404 unsigned ai
= avr_map (a
, i
);
11406 if (mode
== MAP_FIXED_0_7
)
11408 else if (mode
== MAP_NONFIXED_0_7
)
11409 metric
+= ai
< 8 && ai
!= i
;
11410 else if (mode
== MAP_MASK_FIXED_0_7
)
11411 metric
|= ((unsigned) (ai
== i
)) << i
;
11412 else if (mode
== MAP_PREIMAGE_0_7
)
11414 else if (mode
== MAP_MASK_PREIMAGE_F
)
11415 metric
|= ((unsigned) (ai
== 0xf)) << i
;
11424 /* Return true if IVAL has a 0xf in its hexadecimal representation
11425 and false, otherwise. Only nibbles 0..7 are taken into account.
11426 Used as constraint helper for C0f and Cxf. */
11429 avr_has_nibble_0xf (rtx ival
)
11431 unsigned int map
= UINTVAL (ival
) & GET_MODE_MASK (SImode
);
11432 return 0 != avr_map_metric (map
, MAP_MASK_PREIMAGE_F
);
11436 /* We have a set of bits that are mapped by a function F.
11437 Try to decompose F by means of a second function G so that
11443 cost (F o G^-1) + cost (G) < cost (F)
11445 Example: Suppose builtin insert_bits supplies us with the map
11446 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
11447 nibble of the result, we can just as well rotate the bits before inserting
11448 them and use the map 0x7654ffff which is cheaper than the original map.
11449 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
11453 /* tree code of binary function G */
11454 enum tree_code code
;
11456 /* The constant second argument of G */
11459 /* G^-1, the inverse of G (*, arg) */
11462 /* The cost of appplying G (*, arg) */
11465 /* The composition F o G^-1 (*, arg) for some function F */
11468 /* For debug purpose only */
11472 static const avr_map_op_t avr_map_op
[] =
11474 { LROTATE_EXPR
, 0, 0x76543210, 0, 0, "id" },
11475 { LROTATE_EXPR
, 1, 0x07654321, 2, 0, "<<<" },
11476 { LROTATE_EXPR
, 2, 0x10765432, 4, 0, "<<<" },
11477 { LROTATE_EXPR
, 3, 0x21076543, 4, 0, "<<<" },
11478 { LROTATE_EXPR
, 4, 0x32107654, 1, 0, "<<<" },
11479 { LROTATE_EXPR
, 5, 0x43210765, 3, 0, "<<<" },
11480 { LROTATE_EXPR
, 6, 0x54321076, 5, 0, "<<<" },
11481 { LROTATE_EXPR
, 7, 0x65432107, 3, 0, "<<<" },
11482 { RSHIFT_EXPR
, 1, 0x6543210c, 1, 0, ">>" },
11483 { RSHIFT_EXPR
, 1, 0x7543210c, 1, 0, ">>" },
11484 { RSHIFT_EXPR
, 2, 0x543210cc, 2, 0, ">>" },
11485 { RSHIFT_EXPR
, 2, 0x643210cc, 2, 0, ">>" },
11486 { RSHIFT_EXPR
, 2, 0x743210cc, 2, 0, ">>" },
11487 { LSHIFT_EXPR
, 1, 0xc7654321, 1, 0, "<<" },
11488 { LSHIFT_EXPR
, 2, 0xcc765432, 2, 0, "<<" }
11492 /* Try to decompose F as F = (F o G^-1) o G as described above.
11493 The result is a struct representing F o G^-1 and G.
11494 If result.cost < 0 then such a decomposition does not exist. */
11496 static avr_map_op_t
11497 avr_map_decompose (unsigned int f
, const avr_map_op_t
*g
, bool val_const_p
)
11500 bool val_used_p
= 0 != avr_map_metric (f
, MAP_MASK_PREIMAGE_F
);
11501 avr_map_op_t f_ginv
= *g
;
11502 unsigned int ginv
= g
->ginv
;
11506 /* Step 1: Computing F o G^-1 */
11508 for (i
= 7; i
>= 0; i
--)
11510 int x
= avr_map (f
, i
);
11514 x
= avr_map (ginv
, x
);
11516 /* The bit is no element of the image of G: no avail (cost = -1) */
11522 f_ginv
.map
= (f_ginv
.map
<< 4) + x
;
11525 /* Step 2: Compute the cost of the operations.
11526 The overall cost of doing an operation prior to the insertion is
11527 the cost of the insertion plus the cost of the operation. */
11529 /* Step 2a: Compute cost of F o G^-1 */
11531 if (0 == avr_map_metric (f_ginv
.map
, MAP_NONFIXED_0_7
))
11533 /* The mapping consists only of fixed points and can be folded
11534 to AND/OR logic in the remainder. Reasonable cost is 3. */
11536 f_ginv
.cost
= 2 + (val_used_p
&& !val_const_p
);
11542 /* Get the cost of the insn by calling the output worker with some
11543 fake values. Mimic effect of reloading xop[3]: Unused operands
11544 are mapped to 0 and used operands are reloaded to xop[0]. */
11546 xop
[0] = all_regs_rtx
[24];
11547 xop
[1] = gen_int_mode (f_ginv
.map
, SImode
);
11548 xop
[2] = all_regs_rtx
[25];
11549 xop
[3] = val_used_p
? xop
[0] : const0_rtx
;
11551 avr_out_insert_bits (xop
, &f_ginv
.cost
);
11553 f_ginv
.cost
+= val_const_p
&& val_used_p
? 1 : 0;
11556 /* Step 2b: Add cost of G */
11558 f_ginv
.cost
+= g
->cost
;
11560 if (avr_log
.builtin
)
11561 avr_edump (" %s%d=%d", g
->str
, g
->arg
, f_ginv
.cost
);
11567 /* Insert bits from XOP[1] into XOP[0] according to MAP.
11568 XOP[0] and XOP[1] don't overlap.
11569 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
11570 If FIXP_P = false: Just move the bit if its position in the destination
11571 is different to its source position. */
11574 avr_move_bits (rtx
*xop
, unsigned int map
, bool fixp_p
, int *plen
)
11578 /* T-flag contains this bit of the source, i.e. of XOP[1] */
11579 int t_bit_src
= -1;
11581 /* We order the operations according to the requested source bit b. */
11583 for (b
= 0; b
< 8; b
++)
11584 for (bit_dest
= 0; bit_dest
< 8; bit_dest
++)
11586 int bit_src
= avr_map (map
, bit_dest
);
11590 /* Same position: No need to copy as requested by FIXP_P. */
11591 || (bit_dest
== bit_src
&& !fixp_p
))
11594 if (t_bit_src
!= bit_src
)
11596 /* Source bit is not yet in T: Store it to T. */
11598 t_bit_src
= bit_src
;
11600 xop
[3] = GEN_INT (bit_src
);
11601 avr_asm_len ("bst %T1%T3", xop
, plen
, 1);
11604 /* Load destination bit with T. */
11606 xop
[3] = GEN_INT (bit_dest
);
11607 avr_asm_len ("bld %T0%T3", xop
, plen
, 1);
11612 /* PLEN == 0: Print assembler code for `insert_bits'.
11613 PLEN != 0: Compute code length in bytes.
11616 OP[1]: The mapping composed of nibbles. If nibble no. N is
11617 0: Bit N of result is copied from bit OP[2].0
11619 7: Bit N of result is copied from bit OP[2].7
11620 0xf: Bit N of result is copied from bit OP[3].N
11621 OP[2]: Bits to be inserted
11622 OP[3]: Target value */
11625 avr_out_insert_bits (rtx
*op
, int *plen
)
11627 unsigned int map
= UINTVAL (op
[1]) & GET_MODE_MASK (SImode
);
11628 unsigned mask_fixed
;
11629 bool fixp_p
= true;
11636 gcc_assert (REG_P (xop
[2]) || CONST_INT_P (xop
[2]));
11640 else if (flag_print_asm_name
)
11641 fprintf (asm_out_file
, ASM_COMMENT_START
"map = 0x%08x\n", map
);
11643 /* If MAP has fixed points it might be better to initialize the result
11644 with the bits to be inserted instead of moving all bits by hand. */
11646 mask_fixed
= avr_map_metric (map
, MAP_MASK_FIXED_0_7
);
11648 if (REGNO (xop
[0]) == REGNO (xop
[1]))
11650 /* Avoid early-clobber conflicts */
11652 avr_asm_len ("mov __tmp_reg__,%1", xop
, plen
, 1);
11653 xop
[1] = tmp_reg_rtx
;
11657 if (avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
11659 /* XOP[2] is used and reloaded to XOP[0] already */
11661 int n_fix
= 0, n_nofix
= 0;
11663 gcc_assert (REG_P (xop
[2]));
11665 /* Get the code size of the bit insertions; once with all bits
11666 moved and once with fixed points omitted. */
11668 avr_move_bits (xop
, map
, true, &n_fix
);
11669 avr_move_bits (xop
, map
, false, &n_nofix
);
11671 if (fixp_p
&& n_fix
- n_nofix
> 3)
11673 xop
[3] = gen_int_mode (~mask_fixed
, QImode
);
11675 avr_asm_len ("eor %0,%1" CR_TAB
11676 "andi %0,%3" CR_TAB
11677 "eor %0,%1", xop
, plen
, 3);
11683 /* XOP[2] is unused */
11685 if (fixp_p
&& mask_fixed
)
11687 avr_asm_len ("mov %0,%1", xop
, plen
, 1);
11692 /* Move/insert remaining bits. */
11694 avr_move_bits (xop
, map
, fixp_p
, plen
);
11700 /* IDs for all the AVR builtins. */
11702 enum avr_builtin_id
11704 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
11705 AVR_BUILTIN_ ## NAME,
11706 #include "builtins.def"
11712 struct GTY(()) avr_builtin_description
11714 enum insn_code icode
;
11720 /* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
11721 that a built-in's ID can be used to access the built-in by means of
11724 static GTY(()) struct avr_builtin_description
11725 avr_bdesc
[AVR_BUILTIN_COUNT
] =
11727 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, LIBNAME) \
11728 { (enum insn_code) CODE_FOR_ ## ICODE, N_ARGS, NULL_TREE },
11729 #include "builtins.def"
11734 /* Implement `TARGET_BUILTIN_DECL'. */
11737 avr_builtin_decl (unsigned id
, bool initialize_p ATTRIBUTE_UNUSED
)
11739 if (id
< AVR_BUILTIN_COUNT
)
11740 return avr_bdesc
[id
].fndecl
;
11742 return error_mark_node
;
11747 avr_init_builtin_int24 (void)
11749 tree int24_type
= make_signed_type (GET_MODE_BITSIZE (PSImode
));
11750 tree uint24_type
= make_unsigned_type (GET_MODE_BITSIZE (PSImode
));
11752 lang_hooks
.types
.register_builtin_type (int24_type
, "__int24");
11753 lang_hooks
.types
.register_builtin_type (uint24_type
, "__uint24");
11757 /* Implement `TARGET_INIT_BUILTINS' */
11758 /* Set up all builtin functions for this target. */
11761 avr_init_builtins (void)
11763 tree void_ftype_void
11764 = build_function_type_list (void_type_node
, NULL_TREE
);
11765 tree uchar_ftype_uchar
11766 = build_function_type_list (unsigned_char_type_node
,
11767 unsigned_char_type_node
,
11769 tree uint_ftype_uchar_uchar
11770 = build_function_type_list (unsigned_type_node
,
11771 unsigned_char_type_node
,
11772 unsigned_char_type_node
,
11774 tree int_ftype_char_char
11775 = build_function_type_list (integer_type_node
,
11779 tree int_ftype_char_uchar
11780 = build_function_type_list (integer_type_node
,
11782 unsigned_char_type_node
,
11784 tree void_ftype_ulong
11785 = build_function_type_list (void_type_node
,
11786 long_unsigned_type_node
,
11789 tree uchar_ftype_ulong_uchar_uchar
11790 = build_function_type_list (unsigned_char_type_node
,
11791 long_unsigned_type_node
,
11792 unsigned_char_type_node
,
11793 unsigned_char_type_node
,
11796 tree const_memx_void_node
11797 = build_qualified_type (void_type_node
,
11799 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX
));
11801 tree const_memx_ptr_type_node
11802 = build_pointer_type_for_mode (const_memx_void_node
, PSImode
, false);
11804 tree char_ftype_const_memx_ptr
11805 = build_function_type_list (char_type_node
,
11806 const_memx_ptr_type_node
,
11810 lang_hooks.types.type_for_size (TYPE_PRECISION (T), TYPE_UNSIGNED (T))
11812 #define FX_FTYPE_FX(fx) \
11813 tree fx##r_ftype_##fx##r \
11814 = build_function_type_list (node_##fx##r, node_##fx##r, NULL); \
11815 tree fx##k_ftype_##fx##k \
11816 = build_function_type_list (node_##fx##k, node_##fx##k, NULL)
11818 #define FX_FTYPE_FX_INT(fx) \
11819 tree fx##r_ftype_##fx##r_int \
11820 = build_function_type_list (node_##fx##r, node_##fx##r, \
11821 integer_type_node, NULL); \
11822 tree fx##k_ftype_##fx##k_int \
11823 = build_function_type_list (node_##fx##k, node_##fx##k, \
11824 integer_type_node, NULL)
11826 #define INT_FTYPE_FX(fx) \
11827 tree int_ftype_##fx##r \
11828 = build_function_type_list (integer_type_node, node_##fx##r, NULL); \
11829 tree int_ftype_##fx##k \
11830 = build_function_type_list (integer_type_node, node_##fx##k, NULL)
11832 #define INTX_FTYPE_FX(fx) \
11833 tree int##fx##r_ftype_##fx##r \
11834 = build_function_type_list (ITYP (node_##fx##r), node_##fx##r, NULL); \
11835 tree int##fx##k_ftype_##fx##k \
11836 = build_function_type_list (ITYP (node_##fx##k), node_##fx##k, NULL)
11838 #define FX_FTYPE_INTX(fx) \
11839 tree fx##r_ftype_int##fx##r \
11840 = build_function_type_list (node_##fx##r, ITYP (node_##fx##r), NULL); \
11841 tree fx##k_ftype_int##fx##k \
11842 = build_function_type_list (node_##fx##k, ITYP (node_##fx##k), NULL)
11844 tree node_hr
= short_fract_type_node
;
11845 tree node_nr
= fract_type_node
;
11846 tree node_lr
= long_fract_type_node
;
11847 tree node_llr
= long_long_fract_type_node
;
11849 tree node_uhr
= unsigned_short_fract_type_node
;
11850 tree node_unr
= unsigned_fract_type_node
;
11851 tree node_ulr
= unsigned_long_fract_type_node
;
11852 tree node_ullr
= unsigned_long_long_fract_type_node
;
11854 tree node_hk
= short_accum_type_node
;
11855 tree node_nk
= accum_type_node
;
11856 tree node_lk
= long_accum_type_node
;
11857 tree node_llk
= long_long_accum_type_node
;
11859 tree node_uhk
= unsigned_short_accum_type_node
;
11860 tree node_unk
= unsigned_accum_type_node
;
11861 tree node_ulk
= unsigned_long_accum_type_node
;
11862 tree node_ullk
= unsigned_long_long_accum_type_node
;
11865 /* For absfx builtins. */
11872 /* For roundfx builtins. */
11874 FX_FTYPE_FX_INT (h
);
11875 FX_FTYPE_FX_INT (n
);
11876 FX_FTYPE_FX_INT (l
);
11877 FX_FTYPE_FX_INT (ll
);
11879 FX_FTYPE_FX_INT (uh
);
11880 FX_FTYPE_FX_INT (un
);
11881 FX_FTYPE_FX_INT (ul
);
11882 FX_FTYPE_FX_INT (ull
);
11884 /* For countlsfx builtins. */
11894 INT_FTYPE_FX (ull
);
11896 /* For bitsfx builtins. */
11901 INTX_FTYPE_FX (ll
);
11903 INTX_FTYPE_FX (uh
);
11904 INTX_FTYPE_FX (un
);
11905 INTX_FTYPE_FX (ul
);
11906 INTX_FTYPE_FX (ull
);
11908 /* For fxbits builtins. */
11913 FX_FTYPE_INTX (ll
);
11915 FX_FTYPE_INTX (uh
);
11916 FX_FTYPE_INTX (un
);
11917 FX_FTYPE_INTX (ul
);
11918 FX_FTYPE_INTX (ull
);
11921 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
11923 int id = AVR_BUILTIN_ ## NAME; \
11924 const char *Name = "__builtin_avr_" #NAME; \
11925 char *name = (char*) alloca (1 + strlen (Name)); \
11927 gcc_assert (id < AVR_BUILTIN_COUNT); \
11928 avr_bdesc[id].fndecl \
11929 = add_builtin_function (avr_tolower (name, Name), TYPE, id, \
11930 BUILT_IN_MD, LIBNAME, NULL_TREE); \
11932 #include "builtins.def"
11935 avr_init_builtin_int24 ();
11939 /* Subroutine of avr_expand_builtin to expand vanilla builtins
11940 with non-void result and 1 ... 3 arguments. */
11943 avr_default_expand_builtin (enum insn_code icode
, tree exp
, rtx target
)
11946 int n
, n_args
= call_expr_nargs (exp
);
11947 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
11949 gcc_assert (n_args
>= 1 && n_args
<= 3);
11951 if (target
== NULL_RTX
11952 || GET_MODE (target
) != tmode
11953 || !insn_data
[icode
].operand
[0].predicate (target
, tmode
))
11955 target
= gen_reg_rtx (tmode
);
11958 for (n
= 0; n
< n_args
; n
++)
11960 tree arg
= CALL_EXPR_ARG (exp
, n
);
11961 rtx op
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
11962 enum machine_mode opmode
= GET_MODE (op
);
11963 enum machine_mode mode
= insn_data
[icode
].operand
[n
+1].mode
;
11965 if ((opmode
== SImode
|| opmode
== VOIDmode
) && mode
== HImode
)
11968 op
= gen_lowpart (HImode
, op
);
11971 /* In case the insn wants input operands in modes different from
11972 the result, abort. */
11974 gcc_assert (opmode
== mode
|| opmode
== VOIDmode
);
11976 if (!insn_data
[icode
].operand
[n
+1].predicate (op
, mode
))
11977 op
= copy_to_mode_reg (mode
, op
);
11984 case 1: pat
= GEN_FCN (icode
) (target
, xop
[0]); break;
11985 case 2: pat
= GEN_FCN (icode
) (target
, xop
[0], xop
[1]); break;
11986 case 3: pat
= GEN_FCN (icode
) (target
, xop
[0], xop
[1], xop
[2]); break;
11992 if (pat
== NULL_RTX
)
12001 /* Implement `TARGET_EXPAND_BUILTIN'. */
12002 /* Expand an expression EXP that calls a built-in function,
12003 with result going to TARGET if that's convenient
12004 (and in mode MODE if that's convenient).
12005 SUBTARGET may be used as the target for computing one of EXP's operands.
12006 IGNORE is nonzero if the value is to be ignored. */
12009 avr_expand_builtin (tree exp
, rtx target
,
12010 rtx subtarget ATTRIBUTE_UNUSED
,
12011 enum machine_mode mode ATTRIBUTE_UNUSED
,
12014 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
12015 const char *bname
= IDENTIFIER_POINTER (DECL_NAME (fndecl
));
12016 unsigned int id
= DECL_FUNCTION_CODE (fndecl
);
12017 const struct avr_builtin_description
*d
= &avr_bdesc
[id
];
12021 gcc_assert (id
< AVR_BUILTIN_COUNT
);
12025 case AVR_BUILTIN_NOP
:
12026 emit_insn (gen_nopv (GEN_INT(1)));
12029 case AVR_BUILTIN_DELAY_CYCLES
:
12031 arg0
= CALL_EXPR_ARG (exp
, 0);
12032 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
12034 if (!CONST_INT_P (op0
))
12035 error ("%s expects a compile time integer constant", bname
);
12037 avr_expand_delay_cycles (op0
);
12042 case AVR_BUILTIN_INSERT_BITS
:
12044 arg0
= CALL_EXPR_ARG (exp
, 0);
12045 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
12047 if (!CONST_INT_P (op0
))
12049 error ("%s expects a compile time long integer constant"
12050 " as first argument", bname
);
12057 case AVR_BUILTIN_ROUNDHR
: case AVR_BUILTIN_ROUNDUHR
:
12058 case AVR_BUILTIN_ROUNDR
: case AVR_BUILTIN_ROUNDUR
:
12059 case AVR_BUILTIN_ROUNDLR
: case AVR_BUILTIN_ROUNDULR
:
12060 case AVR_BUILTIN_ROUNDLLR
: case AVR_BUILTIN_ROUNDULLR
:
12062 case AVR_BUILTIN_ROUNDHK
: case AVR_BUILTIN_ROUNDUHK
:
12063 case AVR_BUILTIN_ROUNDK
: case AVR_BUILTIN_ROUNDUK
:
12064 case AVR_BUILTIN_ROUNDLK
: case AVR_BUILTIN_ROUNDULK
:
12065 case AVR_BUILTIN_ROUNDLLK
: case AVR_BUILTIN_ROUNDULLK
:
12067 /* Warn about odd rounding. Rounding points >= FBIT will have
12070 if (TREE_CODE (CALL_EXPR_ARG (exp
, 1)) != INTEGER_CST
)
12073 int rbit
= (int) TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1));
12075 if (rbit
>= (int) GET_MODE_FBIT (mode
))
12077 warning (OPT_Wextra
, "rounding to %d bits has no effect for "
12078 "fixed-point value with %d fractional bits",
12079 rbit
, GET_MODE_FBIT (mode
));
12081 return expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
, mode
,
12084 else if (rbit
<= - (int) GET_MODE_IBIT (mode
))
12086 warning (0, "rounding result will always be 0");
12087 return CONST0_RTX (mode
);
12090 /* The rounding points RP satisfies now: -IBIT < RP < FBIT.
12092 TR 18037 only specifies results for RP > 0. However, the
12093 remaining cases of -IBIT < RP <= 0 can easily be supported
12094 without any additional overhead. */
12099 /* No fold found and no insn: Call support function from libgcc. */
12101 if (d
->icode
== CODE_FOR_nothing
12102 && DECL_ASSEMBLER_NAME (get_callee_fndecl (exp
)) != NULL_TREE
)
12104 return expand_call (exp
, target
, ignore
);
12107 /* No special treatment needed: vanilla expand. */
12109 gcc_assert (d
->icode
!= CODE_FOR_nothing
);
12110 gcc_assert (d
->n_args
== call_expr_nargs (exp
));
12112 if (d
->n_args
== 0)
12114 emit_insn ((GEN_FCN (d
->icode
)) (target
));
12118 return avr_default_expand_builtin (d
->icode
, exp
, target
);
12122 /* Helper for `avr_fold_builtin' that folds absfx (FIXED_CST). */
12125 avr_fold_absfx (tree tval
)
12127 if (FIXED_CST
!= TREE_CODE (tval
))
12130 /* Our fixed-points have no padding: Use double_int payload directly. */
12132 FIXED_VALUE_TYPE fval
= TREE_FIXED_CST (tval
);
12133 unsigned int bits
= GET_MODE_BITSIZE (fval
.mode
);
12134 double_int ival
= fval
.data
.sext (bits
);
12136 if (!ival
.is_negative())
12139 /* ISO/IEC TR 18037, 7.18a.6.2: The absfx functions are saturating. */
12141 fval
.data
= (ival
== double_int::min_value (bits
, false).sext (bits
))
12142 ? double_int::max_value (bits
, false)
12145 return build_fixed (TREE_TYPE (tval
), fval
);
12149 /* Implement `TARGET_FOLD_BUILTIN'. */
12152 avr_fold_builtin (tree fndecl
, int n_args ATTRIBUTE_UNUSED
, tree
*arg
,
12153 bool ignore ATTRIBUTE_UNUSED
)
12155 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
12156 tree val_type
= TREE_TYPE (TREE_TYPE (fndecl
));
12166 case AVR_BUILTIN_SWAP
:
12168 return fold_build2 (LROTATE_EXPR
, val_type
, arg
[0],
12169 build_int_cst (val_type
, 4));
12172 case AVR_BUILTIN_ABSHR
:
12173 case AVR_BUILTIN_ABSR
:
12174 case AVR_BUILTIN_ABSLR
:
12175 case AVR_BUILTIN_ABSLLR
:
12177 case AVR_BUILTIN_ABSHK
:
12178 case AVR_BUILTIN_ABSK
:
12179 case AVR_BUILTIN_ABSLK
:
12180 case AVR_BUILTIN_ABSLLK
:
12181 /* GCC is not good with folding ABS for fixed-point. Do it by hand. */
12183 return avr_fold_absfx (arg
[0]);
12185 case AVR_BUILTIN_BITSHR
: case AVR_BUILTIN_HRBITS
:
12186 case AVR_BUILTIN_BITSHK
: case AVR_BUILTIN_HKBITS
:
12187 case AVR_BUILTIN_BITSUHR
: case AVR_BUILTIN_UHRBITS
:
12188 case AVR_BUILTIN_BITSUHK
: case AVR_BUILTIN_UHKBITS
:
12190 case AVR_BUILTIN_BITSR
: case AVR_BUILTIN_RBITS
:
12191 case AVR_BUILTIN_BITSK
: case AVR_BUILTIN_KBITS
:
12192 case AVR_BUILTIN_BITSUR
: case AVR_BUILTIN_URBITS
:
12193 case AVR_BUILTIN_BITSUK
: case AVR_BUILTIN_UKBITS
:
12195 case AVR_BUILTIN_BITSLR
: case AVR_BUILTIN_LRBITS
:
12196 case AVR_BUILTIN_BITSLK
: case AVR_BUILTIN_LKBITS
:
12197 case AVR_BUILTIN_BITSULR
: case AVR_BUILTIN_ULRBITS
:
12198 case AVR_BUILTIN_BITSULK
: case AVR_BUILTIN_ULKBITS
:
12200 case AVR_BUILTIN_BITSLLR
: case AVR_BUILTIN_LLRBITS
:
12201 case AVR_BUILTIN_BITSLLK
: case AVR_BUILTIN_LLKBITS
:
12202 case AVR_BUILTIN_BITSULLR
: case AVR_BUILTIN_ULLRBITS
:
12203 case AVR_BUILTIN_BITSULLK
: case AVR_BUILTIN_ULLKBITS
:
12205 gcc_assert (TYPE_PRECISION (val_type
)
12206 == TYPE_PRECISION (TREE_TYPE (arg
[0])));
12208 return build1 (VIEW_CONVERT_EXPR
, val_type
, arg
[0]);
12210 case AVR_BUILTIN_INSERT_BITS
:
12212 tree tbits
= arg
[1];
12213 tree tval
= arg
[2];
12215 tree map_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)));
12217 bool changed
= false;
12219 avr_map_op_t best_g
;
12221 if (TREE_CODE (arg
[0]) != INTEGER_CST
)
12223 /* No constant as first argument: Don't fold this and run into
12224 error in avr_expand_builtin. */
12229 tmap
= double_int_to_tree (map_type
, tree_to_double_int (arg
[0]));
12230 map
= TREE_INT_CST_LOW (tmap
);
12232 if (TREE_CODE (tval
) != INTEGER_CST
12233 && 0 == avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
12235 /* There are no F in the map, i.e. 3rd operand is unused.
12236 Replace that argument with some constant to render
12237 respective input unused. */
12239 tval
= build_int_cst (val_type
, 0);
12243 if (TREE_CODE (tbits
) != INTEGER_CST
12244 && 0 == avr_map_metric (map
, MAP_PREIMAGE_0_7
))
12246 /* Similar for the bits to be inserted. If they are unused,
12247 we can just as well pass 0. */
12249 tbits
= build_int_cst (val_type
, 0);
12252 if (TREE_CODE (tbits
) == INTEGER_CST
)
12254 /* Inserting bits known at compile time is easy and can be
12255 performed by AND and OR with appropriate masks. */
12257 int bits
= TREE_INT_CST_LOW (tbits
);
12258 int mask_ior
= 0, mask_and
= 0xff;
12260 for (i
= 0; i
< 8; i
++)
12262 int mi
= avr_map (map
, i
);
12266 if (bits
& (1 << mi
)) mask_ior
|= (1 << i
);
12267 else mask_and
&= ~(1 << i
);
12271 tval
= fold_build2 (BIT_IOR_EXPR
, val_type
, tval
,
12272 build_int_cst (val_type
, mask_ior
));
12273 return fold_build2 (BIT_AND_EXPR
, val_type
, tval
,
12274 build_int_cst (val_type
, mask_and
));
12278 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
12280 /* If bits don't change their position we can use vanilla logic
12281 to merge the two arguments. */
12283 if (0 == avr_map_metric (map
, MAP_NONFIXED_0_7
))
12285 int mask_f
= avr_map_metric (map
, MAP_MASK_PREIMAGE_F
);
12286 tree tres
, tmask
= build_int_cst (val_type
, mask_f
^ 0xff);
12288 tres
= fold_build2 (BIT_XOR_EXPR
, val_type
, tbits
, tval
);
12289 tres
= fold_build2 (BIT_AND_EXPR
, val_type
, tres
, tmask
);
12290 return fold_build2 (BIT_XOR_EXPR
, val_type
, tres
, tval
);
12293 /* Try to decomposing map to reduce overall cost. */
12295 if (avr_log
.builtin
)
12296 avr_edump ("\n%?: %x\n%?: ROL cost: ", map
);
12298 best_g
= avr_map_op
[0];
12299 best_g
.cost
= 1000;
12301 for (i
= 0; i
< sizeof (avr_map_op
) / sizeof (*avr_map_op
); i
++)
12304 = avr_map_decompose (map
, avr_map_op
+ i
,
12305 TREE_CODE (tval
) == INTEGER_CST
);
12307 if (g
.cost
>= 0 && g
.cost
< best_g
.cost
)
12311 if (avr_log
.builtin
)
12314 if (best_g
.arg
== 0)
12315 /* No optimization found */
12318 /* Apply operation G to the 2nd argument. */
12320 if (avr_log
.builtin
)
12321 avr_edump ("%?: using OP(%s%d, %x) cost %d\n",
12322 best_g
.str
, best_g
.arg
, best_g
.map
, best_g
.cost
);
12324 /* Do right-shifts arithmetically: They copy the MSB instead of
12325 shifting in a non-usable value (0) as with logic right-shift. */
12327 tbits
= fold_convert (signed_char_type_node
, tbits
);
12328 tbits
= fold_build2 (best_g
.code
, signed_char_type_node
, tbits
,
12329 build_int_cst (val_type
, best_g
.arg
));
12330 tbits
= fold_convert (val_type
, tbits
);
12332 /* Use map o G^-1 instead of original map to undo the effect of G. */
12334 tmap
= double_int_to_tree (map_type
,
12335 double_int::from_uhwi (best_g
.map
));
12337 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
12338 } /* AVR_BUILTIN_INSERT_BITS */
12346 /* Initialize the GCC target structure. */
12348 #undef TARGET_ASM_ALIGNED_HI_OP
12349 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
12350 #undef TARGET_ASM_ALIGNED_SI_OP
12351 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
12352 #undef TARGET_ASM_UNALIGNED_HI_OP
12353 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
12354 #undef TARGET_ASM_UNALIGNED_SI_OP
12355 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
12356 #undef TARGET_ASM_INTEGER
12357 #define TARGET_ASM_INTEGER avr_assemble_integer
12358 #undef TARGET_ASM_FILE_START
12359 #define TARGET_ASM_FILE_START avr_file_start
12360 #undef TARGET_ASM_FILE_END
12361 #define TARGET_ASM_FILE_END avr_file_end
12363 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
12364 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
12365 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
12366 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
12368 #undef TARGET_FUNCTION_VALUE
12369 #define TARGET_FUNCTION_VALUE avr_function_value
12370 #undef TARGET_LIBCALL_VALUE
12371 #define TARGET_LIBCALL_VALUE avr_libcall_value
12372 #undef TARGET_FUNCTION_VALUE_REGNO_P
12373 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
12375 #undef TARGET_ATTRIBUTE_TABLE
12376 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
12377 #undef TARGET_INSERT_ATTRIBUTES
12378 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
12379 #undef TARGET_SECTION_TYPE_FLAGS
12380 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
12382 #undef TARGET_ASM_NAMED_SECTION
12383 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
12384 #undef TARGET_ASM_INIT_SECTIONS
12385 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
12386 #undef TARGET_ENCODE_SECTION_INFO
12387 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
12388 #undef TARGET_ASM_SELECT_SECTION
12389 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
12391 #undef TARGET_REGISTER_MOVE_COST
12392 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
12393 #undef TARGET_MEMORY_MOVE_COST
12394 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
12395 #undef TARGET_RTX_COSTS
12396 #define TARGET_RTX_COSTS avr_rtx_costs
12397 #undef TARGET_ADDRESS_COST
12398 #define TARGET_ADDRESS_COST avr_address_cost
12399 #undef TARGET_MACHINE_DEPENDENT_REORG
12400 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
12401 #undef TARGET_FUNCTION_ARG
12402 #define TARGET_FUNCTION_ARG avr_function_arg
12403 #undef TARGET_FUNCTION_ARG_ADVANCE
12404 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
12406 #undef TARGET_SET_CURRENT_FUNCTION
12407 #define TARGET_SET_CURRENT_FUNCTION avr_set_current_function
12409 #undef TARGET_RETURN_IN_MEMORY
12410 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
12412 #undef TARGET_STRICT_ARGUMENT_NAMING
12413 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
12415 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
12416 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
12418 #undef TARGET_HARD_REGNO_SCRATCH_OK
12419 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
12420 #undef TARGET_CASE_VALUES_THRESHOLD
12421 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
12423 #undef TARGET_FRAME_POINTER_REQUIRED
12424 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
12425 #undef TARGET_CAN_ELIMINATE
12426 #define TARGET_CAN_ELIMINATE avr_can_eliminate
12428 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
12429 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS avr_allocate_stack_slots_for_args
12431 #undef TARGET_WARN_FUNC_RETURN
12432 #define TARGET_WARN_FUNC_RETURN avr_warn_func_return
12434 #undef TARGET_CLASS_LIKELY_SPILLED_P
12435 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
12437 #undef TARGET_OPTION_OVERRIDE
12438 #define TARGET_OPTION_OVERRIDE avr_option_override
12440 #undef TARGET_CANNOT_MODIFY_JUMPS_P
12441 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
12443 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
12444 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
12446 #undef TARGET_INIT_BUILTINS
12447 #define TARGET_INIT_BUILTINS avr_init_builtins
12449 #undef TARGET_BUILTIN_DECL
12450 #define TARGET_BUILTIN_DECL avr_builtin_decl
12452 #undef TARGET_EXPAND_BUILTIN
12453 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
12455 #undef TARGET_FOLD_BUILTIN
12456 #define TARGET_FOLD_BUILTIN avr_fold_builtin
12458 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
12459 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
12461 #undef TARGET_SCALAR_MODE_SUPPORTED_P
12462 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
12464 #undef TARGET_BUILD_BUILTIN_VA_LIST
12465 #define TARGET_BUILD_BUILTIN_VA_LIST avr_build_builtin_va_list
12467 #undef TARGET_FIXED_POINT_SUPPORTED_P
12468 #define TARGET_FIXED_POINT_SUPPORTED_P hook_bool_void_true
12470 #undef TARGET_CONVERT_TO_TYPE
12471 #define TARGET_CONVERT_TO_TYPE avr_convert_to_type
12473 #undef TARGET_ADDR_SPACE_SUBSET_P
12474 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
12476 #undef TARGET_ADDR_SPACE_CONVERT
12477 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
12479 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
12480 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
12482 #undef TARGET_ADDR_SPACE_POINTER_MODE
12483 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
12485 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
12486 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
12487 avr_addr_space_legitimate_address_p
12489 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
12490 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
12492 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
12493 #define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
12495 #undef TARGET_SECONDARY_RELOAD
12496 #define TARGET_SECONDARY_RELOAD avr_secondary_reload
12498 #undef TARGET_PRINT_OPERAND
12499 #define TARGET_PRINT_OPERAND avr_print_operand
12500 #undef TARGET_PRINT_OPERAND_ADDRESS
12501 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
12502 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
12503 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
12505 struct gcc_target targetm
= TARGET_INITIALIZER
;
12508 #include "gt-avr.h"