1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
45 #include "target-def.h"
49 /* Maximal allowed offset for an address in the LD command */
50 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
52 static int avr_naked_function_p (tree
);
53 static int interrupt_function_p (tree
);
54 static int signal_function_p (tree
);
55 static int avr_OS_task_function_p (tree
);
56 static int avr_OS_main_function_p (tree
);
57 static int avr_regs_to_save (HARD_REG_SET
*);
58 static int get_sequence_length (rtx insns
);
59 static int sequent_regs_live (void);
60 static const char *ptrreg_to_str (int);
61 static const char *cond_string (enum rtx_code
);
62 static int avr_num_arg_regs (enum machine_mode
, tree
);
64 static RTX_CODE
compare_condition (rtx insn
);
65 static rtx
avr_legitimize_address (rtx
, rtx
, enum machine_mode
);
66 static int compare_sign_p (rtx insn
);
67 static tree
avr_handle_progmem_attribute (tree
*, tree
, tree
, int, bool *);
68 static tree
avr_handle_fndecl_attribute (tree
*, tree
, tree
, int, bool *);
69 static tree
avr_handle_fntype_attribute (tree
*, tree
, tree
, int, bool *);
70 static bool avr_assemble_integer (rtx
, unsigned int, int);
71 static void avr_file_start (void);
72 static void avr_file_end (void);
73 static bool avr_legitimate_address_p (enum machine_mode
, rtx
, bool);
74 static void avr_asm_function_end_prologue (FILE *);
75 static void avr_asm_function_begin_epilogue (FILE *);
76 static rtx
avr_function_value (const_tree
, const_tree
, bool);
77 static void avr_insert_attributes (tree
, tree
*);
78 static void avr_asm_init_sections (void);
79 static unsigned int avr_section_type_flags (tree
, const char *, int);
81 static void avr_reorg (void);
82 static void avr_asm_out_ctor (rtx
, int);
83 static void avr_asm_out_dtor (rtx
, int);
84 static int avr_operand_rtx_cost (rtx
, enum machine_mode
, enum rtx_code
, bool);
85 static bool avr_rtx_costs (rtx
, int, int, int *, bool);
86 static int avr_address_cost (rtx
, bool);
87 static bool avr_return_in_memory (const_tree
, const_tree
);
88 static struct machine_function
* avr_init_machine_status (void);
89 static rtx
avr_builtin_setjmp_frame_value (void);
90 static bool avr_hard_regno_scratch_ok (unsigned int);
91 static unsigned int avr_case_values_threshold (void);
92 static bool avr_frame_pointer_required_p (void);
94 /* Allocate registers from r25 to r8 for parameters for function calls. */
95 #define FIRST_CUM_REG 26
97 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
98 static GTY(()) rtx tmp_reg_rtx
;
100 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
101 static GTY(()) rtx zero_reg_rtx
;
103 /* AVR register names {"r0", "r1", ..., "r31"} */
104 static const char *const avr_regnames
[] = REGISTER_NAMES
;
106 /* This holds the last insn address. */
107 static int last_insn_address
= 0;
109 /* Preprocessor macros to define depending on MCU type. */
110 static const char *avr_extra_arch_macro
;
112 /* Current architecture. */
113 const struct base_arch_s
*avr_current_arch
;
115 /* Current device. */
116 const struct mcu_type_s
*avr_current_device
;
118 section
*progmem_section
;
120 /* AVR attributes. */
121 static const struct attribute_spec avr_attribute_table
[] =
123 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
124 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute
},
125 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute
},
126 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute
},
127 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute
},
128 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute
},
129 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute
},
130 { NULL
, 0, 0, false, false, false, NULL
}
133 /* Initialize the GCC target structure. */
134 #undef TARGET_ASM_ALIGNED_HI_OP
135 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
136 #undef TARGET_ASM_ALIGNED_SI_OP
137 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
138 #undef TARGET_ASM_UNALIGNED_HI_OP
139 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
140 #undef TARGET_ASM_UNALIGNED_SI_OP
141 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
142 #undef TARGET_ASM_INTEGER
143 #define TARGET_ASM_INTEGER avr_assemble_integer
144 #undef TARGET_ASM_FILE_START
145 #define TARGET_ASM_FILE_START avr_file_start
146 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
147 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
148 #undef TARGET_ASM_FILE_END
149 #define TARGET_ASM_FILE_END avr_file_end
151 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
152 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
153 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
154 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
155 #undef TARGET_FUNCTION_VALUE
156 #define TARGET_FUNCTION_VALUE avr_function_value
157 #undef TARGET_ATTRIBUTE_TABLE
158 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
159 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
160 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
161 #undef TARGET_INSERT_ATTRIBUTES
162 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
163 #undef TARGET_SECTION_TYPE_FLAGS
164 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
165 #undef TARGET_RTX_COSTS
166 #define TARGET_RTX_COSTS avr_rtx_costs
167 #undef TARGET_ADDRESS_COST
168 #define TARGET_ADDRESS_COST avr_address_cost
169 #undef TARGET_MACHINE_DEPENDENT_REORG
170 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
172 #undef TARGET_LEGITIMIZE_ADDRESS
173 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
175 #undef TARGET_RETURN_IN_MEMORY
176 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
178 #undef TARGET_STRICT_ARGUMENT_NAMING
179 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
181 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
182 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
184 #undef TARGET_HARD_REGNO_SCRATCH_OK
185 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
186 #undef TARGET_CASE_VALUES_THRESHOLD
187 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
189 #undef TARGET_LEGITIMATE_ADDRESS_P
190 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
192 #undef TARGET_FRAME_POINTER_REQUIRED
193 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
195 struct gcc_target targetm
= TARGET_INITIALIZER
;
198 avr_override_options (void)
200 const struct mcu_type_s
*t
;
202 flag_delete_null_pointer_checks
= 0;
204 for (t
= avr_mcu_types
; t
->name
; t
++)
205 if (strcmp (t
->name
, avr_mcu_name
) == 0)
210 fprintf (stderr
, "unknown MCU '%s' specified\nKnown MCU names:\n",
212 for (t
= avr_mcu_types
; t
->name
; t
++)
213 fprintf (stderr
," %s\n", t
->name
);
216 avr_current_arch
= &avr_arch_types
[t
->arch
];
217 avr_extra_arch_macro
= t
->macro
;
219 tmp_reg_rtx
= gen_rtx_REG (QImode
, TMP_REGNO
);
220 zero_reg_rtx
= gen_rtx_REG (QImode
, ZERO_REGNO
);
222 init_machine_status
= avr_init_machine_status
;
225 /* Worker function for TARGET_CPU_CPP_BUILTINS. */
228 avr_cpu_cpp_builtins (struct cpp_reader
*pfile
)
230 builtin_define_std ("AVR");
232 if (avr_current_arch
->macro
)
233 cpp_define (pfile
, avr_current_arch
->macro
);
234 if (avr_extra_arch_macro
)
235 cpp_define (pfile
, avr_extra_arch_macro
);
236 if (avr_current_arch
->have_elpm
)
237 cpp_define (pfile
, "__AVR_HAVE_RAMPZ__");
238 if (avr_current_arch
->have_elpm
)
239 cpp_define (pfile
, "__AVR_HAVE_ELPM__");
240 if (avr_current_arch
->have_elpmx
)
241 cpp_define (pfile
, "__AVR_HAVE_ELPMX__");
242 if (avr_current_arch
->have_movw_lpmx
)
244 cpp_define (pfile
, "__AVR_HAVE_MOVW__");
245 cpp_define (pfile
, "__AVR_HAVE_LPMX__");
247 if (avr_current_arch
->asm_only
)
248 cpp_define (pfile
, "__AVR_ASM_ONLY__");
249 if (avr_current_arch
->have_mul
)
251 cpp_define (pfile
, "__AVR_ENHANCED__");
252 cpp_define (pfile
, "__AVR_HAVE_MUL__");
254 if (avr_current_arch
->have_jmp_call
)
256 cpp_define (pfile
, "__AVR_MEGA__");
257 cpp_define (pfile
, "__AVR_HAVE_JMP_CALL__");
259 if (avr_current_arch
->have_eijmp_eicall
)
261 cpp_define (pfile
, "__AVR_HAVE_EIJMP_EICALL__");
262 cpp_define (pfile
, "__AVR_3_BYTE_PC__");
266 cpp_define (pfile
, "__AVR_2_BYTE_PC__");
268 if (TARGET_NO_INTERRUPTS
)
269 cpp_define (pfile
, "__NO_INTERRUPTS__");
272 /* return register class from register number. */
274 static const enum reg_class reg_class_tab
[]={
275 GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,
276 GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,
277 GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,
278 GENERAL_REGS
, /* r0 - r15 */
279 LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,
280 LD_REGS
, /* r16 - 23 */
281 ADDW_REGS
,ADDW_REGS
, /* r24,r25 */
282 POINTER_X_REGS
,POINTER_X_REGS
, /* r26,27 */
283 POINTER_Y_REGS
,POINTER_Y_REGS
, /* r28,r29 */
284 POINTER_Z_REGS
,POINTER_Z_REGS
, /* r30,r31 */
285 STACK_REG
,STACK_REG
/* SPL,SPH */
288 /* Function to set up the backend function structure. */
290 static struct machine_function
*
291 avr_init_machine_status (void)
293 return ((struct machine_function
*)
294 ggc_alloc_cleared (sizeof (struct machine_function
)));
297 /* Return register class for register R. */
300 avr_regno_reg_class (int r
)
303 return reg_class_tab
[r
];
307 /* Return nonzero if FUNC is a naked function. */
310 avr_naked_function_p (tree func
)
314 gcc_assert (TREE_CODE (func
) == FUNCTION_DECL
);
316 a
= lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func
)));
317 return a
!= NULL_TREE
;
320 /* Return nonzero if FUNC is an interrupt function as specified
321 by the "interrupt" attribute. */
324 interrupt_function_p (tree func
)
328 if (TREE_CODE (func
) != FUNCTION_DECL
)
331 a
= lookup_attribute ("interrupt", DECL_ATTRIBUTES (func
));
332 return a
!= NULL_TREE
;
335 /* Return nonzero if FUNC is a signal function as specified
336 by the "signal" attribute. */
339 signal_function_p (tree func
)
343 if (TREE_CODE (func
) != FUNCTION_DECL
)
346 a
= lookup_attribute ("signal", DECL_ATTRIBUTES (func
));
347 return a
!= NULL_TREE
;
350 /* Return nonzero if FUNC is a OS_task function. */
353 avr_OS_task_function_p (tree func
)
357 gcc_assert (TREE_CODE (func
) == FUNCTION_DECL
);
359 a
= lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func
)));
360 return a
!= NULL_TREE
;
363 /* Return nonzero if FUNC is a OS_main function. */
366 avr_OS_main_function_p (tree func
)
370 gcc_assert (TREE_CODE (func
) == FUNCTION_DECL
);
372 a
= lookup_attribute ("OS_main", TYPE_ATTRIBUTES (TREE_TYPE (func
)));
373 return a
!= NULL_TREE
;
376 /* Return the number of hard registers to push/pop in the prologue/epilogue
377 of the current function, and optionally store these registers in SET. */
380 avr_regs_to_save (HARD_REG_SET
*set
)
383 int int_or_sig_p
= (interrupt_function_p (current_function_decl
)
384 || signal_function_p (current_function_decl
));
386 if (!reload_completed
)
387 cfun
->machine
->is_leaf
= leaf_function_p ();
390 CLEAR_HARD_REG_SET (*set
);
393 /* No need to save any registers if the function never returns or
394 is have "OS_task" or "OS_main" attribute. */
395 if (TREE_THIS_VOLATILE (current_function_decl
)
396 || cfun
->machine
->is_OS_task
397 || cfun
->machine
->is_OS_main
)
400 for (reg
= 0; reg
< 32; reg
++)
402 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
403 any global register variables. */
407 if ((int_or_sig_p
&& !cfun
->machine
->is_leaf
&& call_used_regs
[reg
])
408 || (df_regs_ever_live_p (reg
)
409 && (int_or_sig_p
|| !call_used_regs
[reg
])
410 && !(frame_pointer_needed
411 && (reg
== REG_Y
|| reg
== (REG_Y
+1)))))
414 SET_HARD_REG_BIT (*set
, reg
);
421 /* Return true if register FROM can be eliminated via register TO. */
424 avr_can_eliminate (int from
, int to
)
426 return ((from
== ARG_POINTER_REGNUM
&& to
== FRAME_POINTER_REGNUM
)
427 || ((from
== FRAME_POINTER_REGNUM
428 || from
== FRAME_POINTER_REGNUM
+ 1)
429 && !frame_pointer_needed
));
432 /* Compute offset between arg_pointer and frame_pointer. */
435 avr_initial_elimination_offset (int from
, int to
)
437 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
441 int offset
= frame_pointer_needed
? 2 : 0;
442 int avr_pc_size
= AVR_HAVE_EIJMP_EICALL
? 3 : 2;
444 offset
+= avr_regs_to_save (NULL
);
445 return get_frame_size () + (avr_pc_size
) + 1 + offset
;
449 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
450 frame pointer by +STARTING_FRAME_OFFSET.
451 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
452 avoids creating add/sub of offset in nonlocal goto and setjmp. */
454 rtx
avr_builtin_setjmp_frame_value (void)
456 return gen_rtx_MINUS (Pmode
, virtual_stack_vars_rtx
,
457 gen_int_mode (STARTING_FRAME_OFFSET
, Pmode
));
460 /* Return 1 if the function epilogue is just a single "ret". */
463 avr_simple_epilogue (void)
465 return (! frame_pointer_needed
466 && get_frame_size () == 0
467 && avr_regs_to_save (NULL
) == 0
468 && ! interrupt_function_p (current_function_decl
)
469 && ! signal_function_p (current_function_decl
)
470 && ! avr_naked_function_p (current_function_decl
)
471 && ! TREE_THIS_VOLATILE (current_function_decl
));
474 /* This function checks sequence of live registers. */
477 sequent_regs_live (void)
483 for (reg
= 0; reg
< 18; ++reg
)
485 if (!call_used_regs
[reg
])
487 if (df_regs_ever_live_p (reg
))
497 if (!frame_pointer_needed
)
499 if (df_regs_ever_live_p (REG_Y
))
507 if (df_regs_ever_live_p (REG_Y
+1))
520 return (cur_seq
== live_seq
) ? live_seq
: 0;
523 /* Obtain the length sequence of insns. */
526 get_sequence_length (rtx insns
)
531 for (insn
= insns
, length
= 0; insn
; insn
= NEXT_INSN (insn
))
532 length
+= get_attr_length (insn
);
537 /* Output function prologue. */
540 expand_prologue (void)
545 HOST_WIDE_INT size
= get_frame_size();
546 /* Define templates for push instructions. */
547 rtx pushbyte
= gen_rtx_MEM (QImode
,
548 gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
));
549 rtx pushword
= gen_rtx_MEM (HImode
,
550 gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
));
553 last_insn_address
= 0;
555 /* Init cfun->machine. */
556 cfun
->machine
->is_naked
= avr_naked_function_p (current_function_decl
);
557 cfun
->machine
->is_interrupt
= interrupt_function_p (current_function_decl
);
558 cfun
->machine
->is_signal
= signal_function_p (current_function_decl
);
559 cfun
->machine
->is_OS_task
= avr_OS_task_function_p (current_function_decl
);
560 cfun
->machine
->is_OS_main
= avr_OS_main_function_p (current_function_decl
);
562 /* Prologue: naked. */
563 if (cfun
->machine
->is_naked
)
568 avr_regs_to_save (&set
);
569 live_seq
= sequent_regs_live ();
570 minimize
= (TARGET_CALL_PROLOGUES
571 && !cfun
->machine
->is_interrupt
572 && !cfun
->machine
->is_signal
573 && !cfun
->machine
->is_OS_task
574 && !cfun
->machine
->is_OS_main
577 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
579 if (cfun
->machine
->is_interrupt
)
581 /* Enable interrupts. */
582 insn
= emit_insn (gen_enable_interrupt ());
583 RTX_FRAME_RELATED_P (insn
) = 1;
587 insn
= emit_move_insn (pushbyte
, zero_reg_rtx
);
588 RTX_FRAME_RELATED_P (insn
) = 1;
591 insn
= emit_move_insn (pushbyte
, tmp_reg_rtx
);
592 RTX_FRAME_RELATED_P (insn
) = 1;
595 insn
= emit_move_insn (tmp_reg_rtx
,
596 gen_rtx_MEM (QImode
, GEN_INT (SREG_ADDR
)));
597 RTX_FRAME_RELATED_P (insn
) = 1;
598 insn
= emit_move_insn (pushbyte
, tmp_reg_rtx
);
599 RTX_FRAME_RELATED_P (insn
) = 1;
603 && (TEST_HARD_REG_BIT (set
, REG_Z
) && TEST_HARD_REG_BIT (set
, REG_Z
+ 1)))
605 insn
= emit_move_insn (tmp_reg_rtx
,
606 gen_rtx_MEM (QImode
, GEN_INT (RAMPZ_ADDR
)));
607 RTX_FRAME_RELATED_P (insn
) = 1;
608 insn
= emit_move_insn (pushbyte
, tmp_reg_rtx
);
609 RTX_FRAME_RELATED_P (insn
) = 1;
612 /* Clear zero reg. */
613 insn
= emit_move_insn (zero_reg_rtx
, const0_rtx
);
614 RTX_FRAME_RELATED_P (insn
) = 1;
616 /* Prevent any attempt to delete the setting of ZERO_REG! */
617 emit_use (zero_reg_rtx
);
619 if (minimize
&& (frame_pointer_needed
620 || (AVR_2_BYTE_PC
&& live_seq
> 6)
623 insn
= emit_move_insn (gen_rtx_REG (HImode
, REG_X
),
624 gen_int_mode (size
, HImode
));
625 RTX_FRAME_RELATED_P (insn
) = 1;
628 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq
, HImode
),
629 gen_int_mode (size
+ live_seq
, HImode
)));
630 RTX_FRAME_RELATED_P (insn
) = 1;
635 for (reg
= 0; reg
< 32; ++reg
)
637 if (TEST_HARD_REG_BIT (set
, reg
))
639 /* Emit push of register to save. */
640 insn
=emit_move_insn (pushbyte
, gen_rtx_REG (QImode
, reg
));
641 RTX_FRAME_RELATED_P (insn
) = 1;
644 if (frame_pointer_needed
)
646 if (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
648 /* Push frame pointer. */
649 insn
= emit_move_insn (pushword
, frame_pointer_rtx
);
650 RTX_FRAME_RELATED_P (insn
) = 1;
655 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
656 RTX_FRAME_RELATED_P (insn
) = 1;
660 /* Creating a frame can be done by direct manipulation of the
661 stack or via the frame pointer. These two methods are:
668 the optimum method depends on function type, stack and frame size.
669 To avoid a complex logic, both methods are tested and shortest
673 rtx sp_plus_insns
= NULL_RTX
;
675 if (TARGET_TINY_STACK
)
677 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
678 over 'sbiw' (2 cycles, same size). */
679 myfp
= gen_rtx_REG (QImode
, REGNO (frame_pointer_rtx
));
683 /* Normal sized addition. */
684 myfp
= frame_pointer_rtx
;
687 /* Method 1-Adjust frame pointer. */
690 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
691 RTX_FRAME_RELATED_P (insn
) = 1;
694 emit_move_insn (myfp
,
695 gen_rtx_PLUS (GET_MODE(myfp
), myfp
,
698 RTX_FRAME_RELATED_P (insn
) = 1;
700 /* Copy to stack pointer. */
701 if (TARGET_TINY_STACK
)
703 insn
= emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
704 RTX_FRAME_RELATED_P (insn
) = 1;
706 else if (TARGET_NO_INTERRUPTS
707 || cfun
->machine
->is_signal
708 || cfun
->machine
->is_OS_main
)
711 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx
,
713 RTX_FRAME_RELATED_P (insn
) = 1;
715 else if (cfun
->machine
->is_interrupt
)
717 insn
= emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx
,
719 RTX_FRAME_RELATED_P (insn
) = 1;
723 insn
= emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
724 RTX_FRAME_RELATED_P (insn
) = 1;
727 fp_plus_insns
= get_insns ();
730 /* Method 2-Adjust Stack pointer. */
736 emit_move_insn (stack_pointer_rtx
,
737 gen_rtx_PLUS (HImode
,
741 RTX_FRAME_RELATED_P (insn
) = 1;
744 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
745 RTX_FRAME_RELATED_P (insn
) = 1;
747 sp_plus_insns
= get_insns ();
751 /* Use shortest method. */
752 if (size
<= 6 && (get_sequence_length (sp_plus_insns
)
753 < get_sequence_length (fp_plus_insns
)))
754 emit_insn (sp_plus_insns
);
756 emit_insn (fp_plus_insns
);
762 /* Output summary at end of function prologue. */
765 avr_asm_function_end_prologue (FILE *file
)
767 if (cfun
->machine
->is_naked
)
769 fputs ("/* prologue: naked */\n", file
);
773 if (cfun
->machine
->is_interrupt
)
775 fputs ("/* prologue: Interrupt */\n", file
);
777 else if (cfun
->machine
->is_signal
)
779 fputs ("/* prologue: Signal */\n", file
);
782 fputs ("/* prologue: function */\n", file
);
784 fprintf (file
, "/* frame size = " HOST_WIDE_INT_PRINT_DEC
" */\n",
789 /* Implement EPILOGUE_USES. */
792 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED
)
796 && (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
))
801 /* Output RTL epilogue. */
804 expand_epilogue (void)
810 HOST_WIDE_INT size
= get_frame_size();
812 /* epilogue: naked */
813 if (cfun
->machine
->is_naked
)
815 emit_jump_insn (gen_return ());
819 avr_regs_to_save (&set
);
820 live_seq
= sequent_regs_live ();
821 minimize
= (TARGET_CALL_PROLOGUES
822 && !cfun
->machine
->is_interrupt
823 && !cfun
->machine
->is_signal
824 && !cfun
->machine
->is_OS_task
825 && !cfun
->machine
->is_OS_main
828 if (minimize
&& (frame_pointer_needed
|| live_seq
> 4))
830 if (frame_pointer_needed
)
832 /* Get rid of frame. */
833 emit_move_insn(frame_pointer_rtx
,
834 gen_rtx_PLUS (HImode
, frame_pointer_rtx
,
835 gen_int_mode (size
, HImode
)));
839 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
842 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq
, HImode
)));
846 if (frame_pointer_needed
)
850 /* Try two methods to adjust stack and select shortest. */
853 rtx sp_plus_insns
= NULL_RTX
;
855 if (TARGET_TINY_STACK
)
857 /* The high byte (r29) doesn't change - prefer 'subi'
858 (1 cycle) over 'sbiw' (2 cycles, same size). */
859 myfp
= gen_rtx_REG (QImode
, REGNO (frame_pointer_rtx
));
863 /* Normal sized addition. */
864 myfp
= frame_pointer_rtx
;
867 /* Method 1-Adjust frame pointer. */
870 emit_move_insn (myfp
,
871 gen_rtx_PLUS (HImode
, myfp
,
875 /* Copy to stack pointer. */
876 if (TARGET_TINY_STACK
)
878 emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
880 else if (TARGET_NO_INTERRUPTS
881 || cfun
->machine
->is_signal
)
883 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx
,
886 else if (cfun
->machine
->is_interrupt
)
888 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx
,
893 emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
896 fp_plus_insns
= get_insns ();
899 /* Method 2-Adjust Stack pointer. */
904 emit_move_insn (stack_pointer_rtx
,
905 gen_rtx_PLUS (HImode
, stack_pointer_rtx
,
909 sp_plus_insns
= get_insns ();
913 /* Use shortest method. */
914 if (size
<= 5 && (get_sequence_length (sp_plus_insns
)
915 < get_sequence_length (fp_plus_insns
)))
916 emit_insn (sp_plus_insns
);
918 emit_insn (fp_plus_insns
);
920 if (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
922 /* Restore previous frame_pointer. */
923 emit_insn (gen_pophi (frame_pointer_rtx
));
926 /* Restore used registers. */
927 for (reg
= 31; reg
>= 0; --reg
)
929 if (TEST_HARD_REG_BIT (set
, reg
))
930 emit_insn (gen_popqi (gen_rtx_REG (QImode
, reg
)));
932 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
934 /* Restore RAMPZ using tmp reg as scratch. */
936 && (TEST_HARD_REG_BIT (set
, REG_Z
) && TEST_HARD_REG_BIT (set
, REG_Z
+ 1)))
938 emit_insn (gen_popqi (tmp_reg_rtx
));
939 emit_move_insn (gen_rtx_MEM(QImode
, GEN_INT(RAMPZ_ADDR
)),
943 /* Restore SREG using tmp reg as scratch. */
944 emit_insn (gen_popqi (tmp_reg_rtx
));
946 emit_move_insn (gen_rtx_MEM(QImode
, GEN_INT(SREG_ADDR
)),
949 /* Restore tmp REG. */
950 emit_insn (gen_popqi (tmp_reg_rtx
));
952 /* Restore zero REG. */
953 emit_insn (gen_popqi (zero_reg_rtx
));
956 emit_jump_insn (gen_return ());
960 /* Output summary messages at beginning of function epilogue. */
963 avr_asm_function_begin_epilogue (FILE *file
)
965 fprintf (file
, "/* epilogue start */\n");
968 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
969 machine for a memory operand of mode MODE. */
972 avr_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
974 enum reg_class r
= NO_REGS
;
976 if (TARGET_ALL_DEBUG
)
978 fprintf (stderr
, "mode: (%s) %s %s %s %s:",
980 strict
? "(strict)": "",
981 reload_completed
? "(reload_completed)": "",
982 reload_in_progress
? "(reload_in_progress)": "",
983 reg_renumber
? "(reg_renumber)" : "");
984 if (GET_CODE (x
) == PLUS
985 && REG_P (XEXP (x
, 0))
986 && GET_CODE (XEXP (x
, 1)) == CONST_INT
987 && INTVAL (XEXP (x
, 1)) >= 0
988 && INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
)
991 fprintf (stderr
, "(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
992 true_regnum (XEXP (x
, 0)));
995 if (!strict
&& GET_CODE (x
) == SUBREG
)
997 if (REG_P (x
) && (strict
? REG_OK_FOR_BASE_STRICT_P (x
)
998 : REG_OK_FOR_BASE_NOSTRICT_P (x
)))
1000 else if (CONSTANT_ADDRESS_P (x
))
1002 else if (GET_CODE (x
) == PLUS
1003 && REG_P (XEXP (x
, 0))
1004 && GET_CODE (XEXP (x
, 1)) == CONST_INT
1005 && INTVAL (XEXP (x
, 1)) >= 0)
1007 int fit
= INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
);
1011 || REGNO (XEXP (x
,0)) == REG_X
1012 || REGNO (XEXP (x
,0)) == REG_Y
1013 || REGNO (XEXP (x
,0)) == REG_Z
)
1014 r
= BASE_POINTER_REGS
;
1015 if (XEXP (x
,0) == frame_pointer_rtx
1016 || XEXP (x
,0) == arg_pointer_rtx
)
1017 r
= BASE_POINTER_REGS
;
1019 else if (frame_pointer_needed
&& XEXP (x
,0) == frame_pointer_rtx
)
1022 else if ((GET_CODE (x
) == PRE_DEC
|| GET_CODE (x
) == POST_INC
)
1023 && REG_P (XEXP (x
, 0))
1024 && (strict
? REG_OK_FOR_BASE_STRICT_P (XEXP (x
, 0))
1025 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x
, 0))))
1029 if (TARGET_ALL_DEBUG
)
1031 fprintf (stderr
, " ret = %c\n", r
+ '0');
1033 return r
== NO_REGS
? 0 : (int)r
;
1036 /* Attempts to replace X with a valid
1037 memory address for an operand of mode MODE */
1040 avr_legitimize_address (rtx x
, rtx oldx
, enum machine_mode mode
)
1043 if (TARGET_ALL_DEBUG
)
1045 fprintf (stderr
, "legitimize_address mode: %s", GET_MODE_NAME(mode
));
1049 if (GET_CODE (oldx
) == PLUS
1050 && REG_P (XEXP (oldx
,0)))
1052 if (REG_P (XEXP (oldx
,1)))
1053 x
= force_reg (GET_MODE (oldx
), oldx
);
1054 else if (GET_CODE (XEXP (oldx
, 1)) == CONST_INT
)
1056 int offs
= INTVAL (XEXP (oldx
,1));
1057 if (frame_pointer_rtx
!= XEXP (oldx
,0))
1058 if (offs
> MAX_LD_OFFSET (mode
))
1060 if (TARGET_ALL_DEBUG
)
1061 fprintf (stderr
, "force_reg (big offset)\n");
1062 x
= force_reg (GET_MODE (oldx
), oldx
);
1070 /* Return a pointer register name as a string. */
1073 ptrreg_to_str (int regno
)
1077 case REG_X
: return "X";
1078 case REG_Y
: return "Y";
1079 case REG_Z
: return "Z";
1081 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1086 /* Return the condition name as a string.
1087 Used in conditional jump constructing */
1090 cond_string (enum rtx_code code
)
1099 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1104 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1117 /* Output ADDR to FILE as address. */
1120 print_operand_address (FILE *file
, rtx addr
)
1122 switch (GET_CODE (addr
))
1125 fprintf (file
, ptrreg_to_str (REGNO (addr
)));
1129 fprintf (file
, "-%s", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1133 fprintf (file
, "%s+", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1137 if (CONSTANT_ADDRESS_P (addr
)
1138 && ((GET_CODE (addr
) == SYMBOL_REF
&& SYMBOL_REF_FUNCTION_P (addr
))
1139 || GET_CODE (addr
) == LABEL_REF
))
1141 fprintf (file
, "gs(");
1142 output_addr_const (file
,addr
);
1143 fprintf (file
,")");
1146 output_addr_const (file
, addr
);
1151 /* Output X as assembler operand to file FILE. */
1154 print_operand (FILE *file
, rtx x
, int code
)
1158 if (code
>= 'A' && code
<= 'D')
1163 if (!AVR_HAVE_JMP_CALL
)
1166 else if (code
== '!')
1168 if (AVR_HAVE_EIJMP_EICALL
)
1173 if (x
== zero_reg_rtx
)
1174 fprintf (file
, "__zero_reg__");
1176 fprintf (file
, reg_names
[true_regnum (x
) + abcd
]);
1178 else if (GET_CODE (x
) == CONST_INT
)
1179 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) + abcd
);
1180 else if (GET_CODE (x
) == MEM
)
1182 rtx addr
= XEXP (x
,0);
1184 if (CONSTANT_P (addr
) && abcd
)
1187 output_address (addr
);
1188 fprintf (file
, ")+%d", abcd
);
1190 else if (code
== 'o')
1192 if (GET_CODE (addr
) != PLUS
)
1193 fatal_insn ("bad address, not (reg+disp):", addr
);
1195 print_operand (file
, XEXP (addr
, 1), 0);
1197 else if (code
== 'p' || code
== 'r')
1199 if (GET_CODE (addr
) != POST_INC
&& GET_CODE (addr
) != PRE_DEC
)
1200 fatal_insn ("bad address, not post_inc or pre_dec:", addr
);
1203 print_operand_address (file
, XEXP (addr
, 0)); /* X, Y, Z */
1205 print_operand (file
, XEXP (addr
, 0), 0); /* r26, r28, r30 */
1207 else if (GET_CODE (addr
) == PLUS
)
1209 print_operand_address (file
, XEXP (addr
,0));
1210 if (REGNO (XEXP (addr
, 0)) == REG_X
)
1211 fatal_insn ("internal compiler error. Bad address:"
1214 print_operand (file
, XEXP (addr
,1), code
);
1217 print_operand_address (file
, addr
);
1219 else if (GET_CODE (x
) == CONST_DOUBLE
)
1223 if (GET_MODE (x
) != SFmode
)
1224 fatal_insn ("internal compiler error. Unknown mode:", x
);
1225 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
1226 REAL_VALUE_TO_TARGET_SINGLE (rv
, val
);
1227 fprintf (file
, "0x%lx", val
);
1229 else if (code
== 'j')
1230 fputs (cond_string (GET_CODE (x
)), file
);
1231 else if (code
== 'k')
1232 fputs (cond_string (reverse_condition (GET_CODE (x
))), file
);
1234 print_operand_address (file
, x
);
1237 /* Update the condition code in the INSN. */
1240 notice_update_cc (rtx body ATTRIBUTE_UNUSED
, rtx insn
)
1244 switch (get_attr_cc (insn
))
1247 /* Insn does not affect CC at all. */
1255 set
= single_set (insn
);
1259 cc_status
.flags
|= CC_NO_OVERFLOW
;
1260 cc_status
.value1
= SET_DEST (set
);
1265 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1266 The V flag may or may not be known but that's ok because
1267 alter_cond will change tests to use EQ/NE. */
1268 set
= single_set (insn
);
1272 cc_status
.value1
= SET_DEST (set
);
1273 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
1278 set
= single_set (insn
);
1281 cc_status
.value1
= SET_SRC (set
);
1285 /* Insn doesn't leave CC in a usable state. */
1288 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1289 set
= single_set (insn
);
1292 rtx src
= SET_SRC (set
);
1294 if (GET_CODE (src
) == ASHIFTRT
1295 && GET_MODE (src
) == QImode
)
1297 rtx x
= XEXP (src
, 1);
1299 if (GET_CODE (x
) == CONST_INT
1303 cc_status
.value1
= SET_DEST (set
);
1304 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
1312 /* Return maximum number of consecutive registers of
1313 class CLASS needed to hold a value of mode MODE. */
1316 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED
,enum machine_mode mode
)
1318 return ((GET_MODE_SIZE (mode
) + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
);
1321 /* Choose mode for jump insn:
1322 1 - relative jump in range -63 <= x <= 62 ;
1323 2 - relative jump in range -2046 <= x <= 2045 ;
1324 3 - absolute jump (only for ATmega[16]03). */
1327 avr_jump_mode (rtx x
, rtx insn
)
1329 int dest_addr
= INSN_ADDRESSES (INSN_UID (GET_CODE (x
) == LABEL_REF
1330 ? XEXP (x
, 0) : x
));
1331 int cur_addr
= INSN_ADDRESSES (INSN_UID (insn
));
1332 int jump_distance
= cur_addr
- dest_addr
;
1334 if (-63 <= jump_distance
&& jump_distance
<= 62)
1336 else if (-2046 <= jump_distance
&& jump_distance
<= 2045)
1338 else if (AVR_HAVE_JMP_CALL
)
1344 /* return an AVR condition jump commands.
1345 X is a comparison RTX.
1346 LEN is a number returned by avr_jump_mode function.
1347 if REVERSE nonzero then condition code in X must be reversed. */
1350 ret_cond_branch (rtx x
, int len
, int reverse
)
1352 RTX_CODE cond
= reverse
? reverse_condition (GET_CODE (x
)) : GET_CODE (x
);
1357 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1358 return (len
== 1 ? (AS1 (breq
,.+2) CR_TAB
1360 len
== 2 ? (AS1 (breq
,.+4) CR_TAB
1361 AS1 (brmi
,.+2) CR_TAB
1363 (AS1 (breq
,.+6) CR_TAB
1364 AS1 (brmi
,.+4) CR_TAB
1368 return (len
== 1 ? (AS1 (breq
,.+2) CR_TAB
1370 len
== 2 ? (AS1 (breq
,.+4) CR_TAB
1371 AS1 (brlt
,.+2) CR_TAB
1373 (AS1 (breq
,.+6) CR_TAB
1374 AS1 (brlt
,.+4) CR_TAB
1377 return (len
== 1 ? (AS1 (breq
,.+2) CR_TAB
1379 len
== 2 ? (AS1 (breq
,.+4) CR_TAB
1380 AS1 (brlo
,.+2) CR_TAB
1382 (AS1 (breq
,.+6) CR_TAB
1383 AS1 (brlo
,.+4) CR_TAB
1386 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1387 return (len
== 1 ? (AS1 (breq
,%0) CR_TAB
1389 len
== 2 ? (AS1 (breq
,.+2) CR_TAB
1390 AS1 (brpl
,.+2) CR_TAB
1392 (AS1 (breq
,.+2) CR_TAB
1393 AS1 (brpl
,.+4) CR_TAB
1396 return (len
== 1 ? (AS1 (breq
,%0) CR_TAB
1398 len
== 2 ? (AS1 (breq
,.+2) CR_TAB
1399 AS1 (brge
,.+2) CR_TAB
1401 (AS1 (breq
,.+2) CR_TAB
1402 AS1 (brge
,.+4) CR_TAB
1405 return (len
== 1 ? (AS1 (breq
,%0) CR_TAB
1407 len
== 2 ? (AS1 (breq
,.+2) CR_TAB
1408 AS1 (brsh
,.+2) CR_TAB
1410 (AS1 (breq
,.+2) CR_TAB
1411 AS1 (brsh
,.+4) CR_TAB
1419 return AS1 (br
%k1
,%0);
1421 return (AS1 (br
%j1
,.+2) CR_TAB
1424 return (AS1 (br
%j1
,.+4) CR_TAB
1433 return AS1 (br
%j1
,%0);
1435 return (AS1 (br
%k1
,.+2) CR_TAB
1438 return (AS1 (br
%k1
,.+4) CR_TAB
1446 /* Predicate function for immediate operand which fits to byte (8bit) */
1449 byte_immediate_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1451 return (GET_CODE (op
) == CONST_INT
1452 && INTVAL (op
) <= 0xff && INTVAL (op
) >= 0);
1455 /* Output all insn addresses and their sizes into the assembly language
1456 output file. This is helpful for debugging whether the length attributes
1457 in the md file are correct.
1458 Output insn cost for next insn. */
1461 final_prescan_insn (rtx insn
, rtx
*operand ATTRIBUTE_UNUSED
,
1462 int num_operands ATTRIBUTE_UNUSED
)
1464 int uid
= INSN_UID (insn
);
1466 if (TARGET_INSN_SIZE_DUMP
|| TARGET_ALL_DEBUG
)
1468 fprintf (asm_out_file
, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
1469 INSN_ADDRESSES (uid
),
1470 INSN_ADDRESSES (uid
) - last_insn_address
,
1471 rtx_cost (PATTERN (insn
), INSN
, !optimize_size
));
1473 last_insn_address
= INSN_ADDRESSES (uid
);
1476 /* Return 0 if undefined, 1 if always true or always false. */
1479 avr_simplify_comparison_p (enum machine_mode mode
, RTX_CODE op
, rtx x
)
1481 unsigned int max
= (mode
== QImode
? 0xff :
1482 mode
== HImode
? 0xffff :
1483 mode
== SImode
? 0xffffffff : 0);
1484 if (max
&& op
&& GET_CODE (x
) == CONST_INT
)
1486 if (unsigned_condition (op
) != op
)
1489 if (max
!= (INTVAL (x
) & max
)
1490 && INTVAL (x
) != 0xff)
1497 /* Returns nonzero if REGNO is the number of a hard
1498 register in which function arguments are sometimes passed. */
1501 function_arg_regno_p(int r
)
1503 return (r
>= 8 && r
<= 25);
1506 /* Initializing the variable cum for the state at the beginning
1507 of the argument list. */
1510 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
, rtx libname
,
1511 tree fndecl ATTRIBUTE_UNUSED
)
1514 cum
->regno
= FIRST_CUM_REG
;
1515 if (!libname
&& fntype
)
1517 int stdarg
= (TYPE_ARG_TYPES (fntype
) != 0
1518 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
1519 != void_type_node
));
1525 /* Returns the number of registers to allocate for a function argument. */
1528 avr_num_arg_regs (enum machine_mode mode
, tree type
)
1532 if (mode
== BLKmode
)
1533 size
= int_size_in_bytes (type
);
1535 size
= GET_MODE_SIZE (mode
);
1537 /* Align all function arguments to start in even-numbered registers.
1538 Odd-sized arguments leave holes above them. */
1540 return (size
+ 1) & ~1;
1543 /* Controls whether a function argument is passed
1544 in a register, and which register. */
1547 function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
, tree type
,
1548 int named ATTRIBUTE_UNUSED
)
1550 int bytes
= avr_num_arg_regs (mode
, type
);
1552 if (cum
->nregs
&& bytes
<= cum
->nregs
)
1553 return gen_rtx_REG (mode
, cum
->regno
- bytes
);
1558 /* Update the summarizer variable CUM to advance past an argument
1559 in the argument list. */
1562 function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
, tree type
,
1563 int named ATTRIBUTE_UNUSED
)
1565 int bytes
= avr_num_arg_regs (mode
, type
);
1567 cum
->nregs
-= bytes
;
1568 cum
->regno
-= bytes
;
1570 if (cum
->nregs
<= 0)
1573 cum
->regno
= FIRST_CUM_REG
;
1577 /***********************************************************************
1578 Functions for outputting various mov's for a various modes
1579 ************************************************************************/
1581 output_movqi (rtx insn
, rtx operands
[], int *l
)
1584 rtx dest
= operands
[0];
1585 rtx src
= operands
[1];
1593 if (register_operand (dest
, QImode
))
1595 if (register_operand (src
, QImode
)) /* mov r,r */
1597 if (test_hard_reg_class (STACK_REG
, dest
))
1598 return AS2 (out
,%0,%1);
1599 else if (test_hard_reg_class (STACK_REG
, src
))
1600 return AS2 (in
,%0,%1);
1602 return AS2 (mov
,%0,%1);
1604 else if (CONSTANT_P (src
))
1606 if (test_hard_reg_class (LD_REGS
, dest
)) /* ldi d,i */
1607 return AS2 (ldi
,%0,lo8(%1));
1609 if (GET_CODE (src
) == CONST_INT
)
1611 if (src
== const0_rtx
) /* mov r,L */
1612 return AS1 (clr
,%0);
1613 else if (src
== const1_rtx
)
1616 return (AS1 (clr
,%0) CR_TAB
1619 else if (src
== constm1_rtx
)
1621 /* Immediate constants -1 to any register */
1623 return (AS1 (clr
,%0) CR_TAB
1628 int bit_nr
= exact_log2 (INTVAL (src
));
1634 output_asm_insn ((AS1 (clr
,%0) CR_TAB
1637 avr_output_bld (operands
, bit_nr
);
1644 /* Last resort, larger than loading from memory. */
1646 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
1647 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
1648 AS2 (mov
,%0,r31
) CR_TAB
1649 AS2 (mov
,r31
,__tmp_reg__
));
1651 else if (GET_CODE (src
) == MEM
)
1652 return out_movqi_r_mr (insn
, operands
, real_l
); /* mov r,m */
1654 else if (GET_CODE (dest
) == MEM
)
1658 if (src
== const0_rtx
)
1659 operands
[1] = zero_reg_rtx
;
1661 templ
= out_movqi_mr_r (insn
, operands
, real_l
);
1664 output_asm_insn (templ
, operands
);
1673 output_movhi (rtx insn
, rtx operands
[], int *l
)
1676 rtx dest
= operands
[0];
1677 rtx src
= operands
[1];
1683 if (register_operand (dest
, HImode
))
1685 if (register_operand (src
, HImode
)) /* mov r,r */
1687 if (test_hard_reg_class (STACK_REG
, dest
))
1689 if (TARGET_TINY_STACK
)
1690 return *l
= 1, AS2 (out
,__SP_L__
,%A1
);
1691 /* Use simple load of stack pointer if no interrupts are
1693 else if (TARGET_NO_INTERRUPTS
)
1694 return *l
= 2, (AS2 (out
,__SP_H__
,%B1
) CR_TAB
1695 AS2 (out
,__SP_L__
,%A1
));
1697 return (AS2 (in
,__tmp_reg__
,__SREG__
) CR_TAB
1699 AS2 (out
,__SP_H__
,%B1
) CR_TAB
1700 AS2 (out
,__SREG__
,__tmp_reg__
) CR_TAB
1701 AS2 (out
,__SP_L__
,%A1
));
1703 else if (test_hard_reg_class (STACK_REG
, src
))
1706 return (AS2 (in
,%A0
,__SP_L__
) CR_TAB
1707 AS2 (in
,%B0
,__SP_H__
));
1713 return (AS2 (movw
,%0,%1));
1718 return (AS2 (mov
,%A0
,%A1
) CR_TAB
1722 else if (CONSTANT_P (src
))
1724 if (test_hard_reg_class (LD_REGS
, dest
)) /* ldi d,i */
1727 return (AS2 (ldi
,%A0
,lo8(%1)) CR_TAB
1728 AS2 (ldi
,%B0
,hi8(%1)));
1731 if (GET_CODE (src
) == CONST_INT
)
1733 if (src
== const0_rtx
) /* mov r,L */
1736 return (AS1 (clr
,%A0
) CR_TAB
1739 else if (src
== const1_rtx
)
1742 return (AS1 (clr
,%A0
) CR_TAB
1743 AS1 (clr
,%B0
) CR_TAB
1746 else if (src
== constm1_rtx
)
1748 /* Immediate constants -1 to any register */
1750 return (AS1 (clr
,%0) CR_TAB
1751 AS1 (dec
,%A0
) CR_TAB
1756 int bit_nr
= exact_log2 (INTVAL (src
));
1762 output_asm_insn ((AS1 (clr
,%A0
) CR_TAB
1763 AS1 (clr
,%B0
) CR_TAB
1766 avr_output_bld (operands
, bit_nr
);
1772 if ((INTVAL (src
) & 0xff) == 0)
1775 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
1776 AS1 (clr
,%A0
) CR_TAB
1777 AS2 (ldi
,r31
,hi8(%1)) CR_TAB
1778 AS2 (mov
,%B0
,r31
) CR_TAB
1779 AS2 (mov
,r31
,__tmp_reg__
));
1781 else if ((INTVAL (src
) & 0xff00) == 0)
1784 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
1785 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
1786 AS2 (mov
,%A0
,r31
) CR_TAB
1787 AS1 (clr
,%B0
) CR_TAB
1788 AS2 (mov
,r31
,__tmp_reg__
));
1792 /* Last resort, equal to loading from memory. */
1794 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
1795 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
1796 AS2 (mov
,%A0
,r31
) CR_TAB
1797 AS2 (ldi
,r31
,hi8(%1)) CR_TAB
1798 AS2 (mov
,%B0
,r31
) CR_TAB
1799 AS2 (mov
,r31
,__tmp_reg__
));
1801 else if (GET_CODE (src
) == MEM
)
1802 return out_movhi_r_mr (insn
, operands
, real_l
); /* mov r,m */
1804 else if (GET_CODE (dest
) == MEM
)
1808 if (src
== const0_rtx
)
1809 operands
[1] = zero_reg_rtx
;
1811 templ
= out_movhi_mr_r (insn
, operands
, real_l
);
1814 output_asm_insn (templ
, operands
);
1819 fatal_insn ("invalid insn:", insn
);
1824 out_movqi_r_mr (rtx insn
, rtx op
[], int *l
)
1828 rtx x
= XEXP (src
, 0);
1834 if (CONSTANT_ADDRESS_P (x
))
1836 if (CONST_INT_P (x
) && INTVAL (x
) == SREG_ADDR
)
1839 return AS2 (in
,%0,__SREG__
);
1841 if (optimize
> 0 && io_address_operand (x
, QImode
))
1844 return AS2 (in
,%0,%1-0x20);
1847 return AS2 (lds
,%0,%1);
1849 /* memory access by reg+disp */
1850 else if (GET_CODE (x
) == PLUS
1851 && REG_P (XEXP (x
,0))
1852 && GET_CODE (XEXP (x
,1)) == CONST_INT
)
1854 if ((INTVAL (XEXP (x
,1)) - GET_MODE_SIZE (GET_MODE (src
))) >= 63)
1856 int disp
= INTVAL (XEXP (x
,1));
1857 if (REGNO (XEXP (x
,0)) != REG_Y
)
1858 fatal_insn ("incorrect insn:",insn
);
1860 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
1861 return *l
= 3, (AS2 (adiw
,r28
,%o1
-63) CR_TAB
1862 AS2 (ldd
,%0,Y
+63) CR_TAB
1863 AS2 (sbiw
,r28
,%o1
-63));
1865 return *l
= 5, (AS2 (subi
,r28
,lo8(-%o1
)) CR_TAB
1866 AS2 (sbci
,r29
,hi8(-%o1
)) CR_TAB
1867 AS2 (ld
,%0,Y
) CR_TAB
1868 AS2 (subi
,r28
,lo8(%o1
)) CR_TAB
1869 AS2 (sbci
,r29
,hi8(%o1
)));
1871 else if (REGNO (XEXP (x
,0)) == REG_X
)
1873 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1874 it but I have this situation with extremal optimizing options. */
1875 if (reg_overlap_mentioned_p (dest
, XEXP (x
,0))
1876 || reg_unused_after (insn
, XEXP (x
,0)))
1877 return *l
= 2, (AS2 (adiw
,r26
,%o1
) CR_TAB
1880 return *l
= 3, (AS2 (adiw
,r26
,%o1
) CR_TAB
1881 AS2 (ld
,%0,X
) CR_TAB
1882 AS2 (sbiw
,r26
,%o1
));
1885 return AS2 (ldd
,%0,%1);
1888 return AS2 (ld
,%0,%1);
1892 out_movhi_r_mr (rtx insn
, rtx op
[], int *l
)
1896 rtx base
= XEXP (src
, 0);
1897 int reg_dest
= true_regnum (dest
);
1898 int reg_base
= true_regnum (base
);
1899 /* "volatile" forces reading low byte first, even if less efficient,
1900 for correct operation with 16-bit I/O registers. */
1901 int mem_volatile_p
= MEM_VOLATILE_P (src
);
1909 if (reg_dest
== reg_base
) /* R = (R) */
1912 return (AS2 (ld
,__tmp_reg__
,%1+) CR_TAB
1913 AS2 (ld
,%B0
,%1) CR_TAB
1914 AS2 (mov
,%A0
,__tmp_reg__
));
1916 else if (reg_base
== REG_X
) /* (R26) */
1918 if (reg_unused_after (insn
, base
))
1921 return (AS2 (ld
,%A0
,X
+) CR_TAB
1925 return (AS2 (ld
,%A0
,X
+) CR_TAB
1926 AS2 (ld
,%B0
,X
) CR_TAB
1932 return (AS2 (ld
,%A0
,%1) CR_TAB
1933 AS2 (ldd
,%B0
,%1+1));
1936 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
1938 int disp
= INTVAL (XEXP (base
, 1));
1939 int reg_base
= true_regnum (XEXP (base
, 0));
1941 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
1943 if (REGNO (XEXP (base
, 0)) != REG_Y
)
1944 fatal_insn ("incorrect insn:",insn
);
1946 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
1947 return *l
= 4, (AS2 (adiw
,r28
,%o1
-62) CR_TAB
1948 AS2 (ldd
,%A0
,Y
+62) CR_TAB
1949 AS2 (ldd
,%B0
,Y
+63) CR_TAB
1950 AS2 (sbiw
,r28
,%o1
-62));
1952 return *l
= 6, (AS2 (subi
,r28
,lo8(-%o1
)) CR_TAB
1953 AS2 (sbci
,r29
,hi8(-%o1
)) CR_TAB
1954 AS2 (ld
,%A0
,Y
) CR_TAB
1955 AS2 (ldd
,%B0
,Y
+1) CR_TAB
1956 AS2 (subi
,r28
,lo8(%o1
)) CR_TAB
1957 AS2 (sbci
,r29
,hi8(%o1
)));
1959 if (reg_base
== REG_X
)
1961 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
1962 it but I have this situation with extremal
1963 optimization options. */
1966 if (reg_base
== reg_dest
)
1967 return (AS2 (adiw
,r26
,%o1
) CR_TAB
1968 AS2 (ld
,__tmp_reg__
,X
+) CR_TAB
1969 AS2 (ld
,%B0
,X
) CR_TAB
1970 AS2 (mov
,%A0
,__tmp_reg__
));
1972 return (AS2 (adiw
,r26
,%o1
) CR_TAB
1973 AS2 (ld
,%A0
,X
+) CR_TAB
1974 AS2 (ld
,%B0
,X
) CR_TAB
1975 AS2 (sbiw
,r26
,%o1
+1));
1978 if (reg_base
== reg_dest
)
1981 return (AS2 (ldd
,__tmp_reg__
,%A1
) CR_TAB
1982 AS2 (ldd
,%B0
,%B1
) CR_TAB
1983 AS2 (mov
,%A0
,__tmp_reg__
));
1987 return (AS2 (ldd
,%A0
,%A1
) CR_TAB
1990 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
1992 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
1993 fatal_insn ("incorrect insn:", insn
);
1997 if (REGNO (XEXP (base
, 0)) == REG_X
)
2000 return (AS2 (sbiw
,r26
,2) CR_TAB
2001 AS2 (ld
,%A0
,X
+) CR_TAB
2002 AS2 (ld
,%B0
,X
) CR_TAB
2008 return (AS2 (sbiw
,%r1
,2) CR_TAB
2009 AS2 (ld
,%A0
,%p1
) CR_TAB
2010 AS2 (ldd
,%B0
,%p1
+1));
2015 return (AS2 (ld
,%B0
,%1) CR_TAB
2018 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2020 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
2021 fatal_insn ("incorrect insn:", insn
);
2024 return (AS2 (ld
,%A0
,%1) CR_TAB
2027 else if (CONSTANT_ADDRESS_P (base
))
2029 if (optimize
> 0 && io_address_operand (base
, HImode
))
2032 return (AS2 (in
,%A0
,%A1
-0x20) CR_TAB
2033 AS2 (in
,%B0
,%B1
-0x20));
2036 return (AS2 (lds
,%A0
,%A1
) CR_TAB
2040 fatal_insn ("unknown move insn:",insn
);
2045 out_movsi_r_mr (rtx insn
, rtx op
[], int *l
)
2049 rtx base
= XEXP (src
, 0);
2050 int reg_dest
= true_regnum (dest
);
2051 int reg_base
= true_regnum (base
);
2059 if (reg_base
== REG_X
) /* (R26) */
2061 if (reg_dest
== REG_X
)
2062 /* "ld r26,-X" is undefined */
2063 return *l
=7, (AS2 (adiw
,r26
,3) CR_TAB
2064 AS2 (ld
,r29
,X
) CR_TAB
2065 AS2 (ld
,r28
,-X
) CR_TAB
2066 AS2 (ld
,__tmp_reg__
,-X
) CR_TAB
2067 AS2 (sbiw
,r26
,1) CR_TAB
2068 AS2 (ld
,r26
,X
) CR_TAB
2069 AS2 (mov
,r27
,__tmp_reg__
));
2070 else if (reg_dest
== REG_X
- 2)
2071 return *l
=5, (AS2 (ld
,%A0
,X
+) CR_TAB
2072 AS2 (ld
,%B0
,X
+) CR_TAB
2073 AS2 (ld
,__tmp_reg__
,X
+) CR_TAB
2074 AS2 (ld
,%D0
,X
) CR_TAB
2075 AS2 (mov
,%C0
,__tmp_reg__
));
2076 else if (reg_unused_after (insn
, base
))
2077 return *l
=4, (AS2 (ld
,%A0
,X
+) CR_TAB
2078 AS2 (ld
,%B0
,X
+) CR_TAB
2079 AS2 (ld
,%C0
,X
+) CR_TAB
2082 return *l
=5, (AS2 (ld
,%A0
,X
+) CR_TAB
2083 AS2 (ld
,%B0
,X
+) CR_TAB
2084 AS2 (ld
,%C0
,X
+) CR_TAB
2085 AS2 (ld
,%D0
,X
) CR_TAB
2090 if (reg_dest
== reg_base
)
2091 return *l
=5, (AS2 (ldd
,%D0
,%1+3) CR_TAB
2092 AS2 (ldd
,%C0
,%1+2) CR_TAB
2093 AS2 (ldd
,__tmp_reg__
,%1+1) CR_TAB
2094 AS2 (ld
,%A0
,%1) CR_TAB
2095 AS2 (mov
,%B0
,__tmp_reg__
));
2096 else if (reg_base
== reg_dest
+ 2)
2097 return *l
=5, (AS2 (ld
,%A0
,%1) CR_TAB
2098 AS2 (ldd
,%B0
,%1+1) CR_TAB
2099 AS2 (ldd
,__tmp_reg__
,%1+2) CR_TAB
2100 AS2 (ldd
,%D0
,%1+3) CR_TAB
2101 AS2 (mov
,%C0
,__tmp_reg__
));
2103 return *l
=4, (AS2 (ld
,%A0
,%1) CR_TAB
2104 AS2 (ldd
,%B0
,%1+1) CR_TAB
2105 AS2 (ldd
,%C0
,%1+2) CR_TAB
2106 AS2 (ldd
,%D0
,%1+3));
2109 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
2111 int disp
= INTVAL (XEXP (base
, 1));
2113 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
2115 if (REGNO (XEXP (base
, 0)) != REG_Y
)
2116 fatal_insn ("incorrect insn:",insn
);
2118 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
2119 return *l
= 6, (AS2 (adiw
,r28
,%o1
-60) CR_TAB
2120 AS2 (ldd
,%A0
,Y
+60) CR_TAB
2121 AS2 (ldd
,%B0
,Y
+61) CR_TAB
2122 AS2 (ldd
,%C0
,Y
+62) CR_TAB
2123 AS2 (ldd
,%D0
,Y
+63) CR_TAB
2124 AS2 (sbiw
,r28
,%o1
-60));
2126 return *l
= 8, (AS2 (subi
,r28
,lo8(-%o1
)) CR_TAB
2127 AS2 (sbci
,r29
,hi8(-%o1
)) CR_TAB
2128 AS2 (ld
,%A0
,Y
) CR_TAB
2129 AS2 (ldd
,%B0
,Y
+1) CR_TAB
2130 AS2 (ldd
,%C0
,Y
+2) CR_TAB
2131 AS2 (ldd
,%D0
,Y
+3) CR_TAB
2132 AS2 (subi
,r28
,lo8(%o1
)) CR_TAB
2133 AS2 (sbci
,r29
,hi8(%o1
)));
2136 reg_base
= true_regnum (XEXP (base
, 0));
2137 if (reg_base
== REG_X
)
2140 if (reg_dest
== REG_X
)
2143 /* "ld r26,-X" is undefined */
2144 return (AS2 (adiw
,r26
,%o1
+3) CR_TAB
2145 AS2 (ld
,r29
,X
) CR_TAB
2146 AS2 (ld
,r28
,-X
) CR_TAB
2147 AS2 (ld
,__tmp_reg__
,-X
) CR_TAB
2148 AS2 (sbiw
,r26
,1) CR_TAB
2149 AS2 (ld
,r26
,X
) CR_TAB
2150 AS2 (mov
,r27
,__tmp_reg__
));
2153 if (reg_dest
== REG_X
- 2)
2154 return (AS2 (adiw
,r26
,%o1
) CR_TAB
2155 AS2 (ld
,r24
,X
+) CR_TAB
2156 AS2 (ld
,r25
,X
+) CR_TAB
2157 AS2 (ld
,__tmp_reg__
,X
+) CR_TAB
2158 AS2 (ld
,r27
,X
) CR_TAB
2159 AS2 (mov
,r26
,__tmp_reg__
));
2161 return (AS2 (adiw
,r26
,%o1
) CR_TAB
2162 AS2 (ld
,%A0
,X
+) CR_TAB
2163 AS2 (ld
,%B0
,X
+) CR_TAB
2164 AS2 (ld
,%C0
,X
+) CR_TAB
2165 AS2 (ld
,%D0
,X
) CR_TAB
2166 AS2 (sbiw
,r26
,%o1
+3));
2168 if (reg_dest
== reg_base
)
2169 return *l
=5, (AS2 (ldd
,%D0
,%D1
) CR_TAB
2170 AS2 (ldd
,%C0
,%C1
) CR_TAB
2171 AS2 (ldd
,__tmp_reg__
,%B1
) CR_TAB
2172 AS2 (ldd
,%A0
,%A1
) CR_TAB
2173 AS2 (mov
,%B0
,__tmp_reg__
));
2174 else if (reg_dest
== reg_base
- 2)
2175 return *l
=5, (AS2 (ldd
,%A0
,%A1
) CR_TAB
2176 AS2 (ldd
,%B0
,%B1
) CR_TAB
2177 AS2 (ldd
,__tmp_reg__
,%C1
) CR_TAB
2178 AS2 (ldd
,%D0
,%D1
) CR_TAB
2179 AS2 (mov
,%C0
,__tmp_reg__
));
2180 return *l
=4, (AS2 (ldd
,%A0
,%A1
) CR_TAB
2181 AS2 (ldd
,%B0
,%B1
) CR_TAB
2182 AS2 (ldd
,%C0
,%C1
) CR_TAB
2185 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2186 return *l
=4, (AS2 (ld
,%D0
,%1) CR_TAB
2187 AS2 (ld
,%C0
,%1) CR_TAB
2188 AS2 (ld
,%B0
,%1) CR_TAB
2190 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2191 return *l
=4, (AS2 (ld
,%A0
,%1) CR_TAB
2192 AS2 (ld
,%B0
,%1) CR_TAB
2193 AS2 (ld
,%C0
,%1) CR_TAB
2195 else if (CONSTANT_ADDRESS_P (base
))
2196 return *l
=8, (AS2 (lds
,%A0
,%A1
) CR_TAB
2197 AS2 (lds
,%B0
,%B1
) CR_TAB
2198 AS2 (lds
,%C0
,%C1
) CR_TAB
2201 fatal_insn ("unknown move insn:",insn
);
2206 out_movsi_mr_r (rtx insn
, rtx op
[], int *l
)
2210 rtx base
= XEXP (dest
, 0);
2211 int reg_base
= true_regnum (base
);
2212 int reg_src
= true_regnum (src
);
2218 if (CONSTANT_ADDRESS_P (base
))
2219 return *l
=8,(AS2 (sts
,%A0
,%A1
) CR_TAB
2220 AS2 (sts
,%B0
,%B1
) CR_TAB
2221 AS2 (sts
,%C0
,%C1
) CR_TAB
2223 if (reg_base
> 0) /* (r) */
2225 if (reg_base
== REG_X
) /* (R26) */
2227 if (reg_src
== REG_X
)
2229 /* "st X+,r26" is undefined */
2230 if (reg_unused_after (insn
, base
))
2231 return *l
=6, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2232 AS2 (st
,X
,r26
) CR_TAB
2233 AS2 (adiw
,r26
,1) CR_TAB
2234 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2235 AS2 (st
,X
+,r28
) CR_TAB
2238 return *l
=7, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2239 AS2 (st
,X
,r26
) CR_TAB
2240 AS2 (adiw
,r26
,1) CR_TAB
2241 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2242 AS2 (st
,X
+,r28
) CR_TAB
2243 AS2 (st
,X
,r29
) CR_TAB
2246 else if (reg_base
== reg_src
+ 2)
2248 if (reg_unused_after (insn
, base
))
2249 return *l
=7, (AS2 (mov
,__zero_reg__
,%C1
) CR_TAB
2250 AS2 (mov
,__tmp_reg__
,%D1
) CR_TAB
2251 AS2 (st
,%0+,%A1
) CR_TAB
2252 AS2 (st
,%0+,%B1
) CR_TAB
2253 AS2 (st
,%0+,__zero_reg__
) CR_TAB
2254 AS2 (st
,%0,__tmp_reg__
) CR_TAB
2255 AS1 (clr
,__zero_reg__
));
2257 return *l
=8, (AS2 (mov
,__zero_reg__
,%C1
) CR_TAB
2258 AS2 (mov
,__tmp_reg__
,%D1
) CR_TAB
2259 AS2 (st
,%0+,%A1
) CR_TAB
2260 AS2 (st
,%0+,%B1
) CR_TAB
2261 AS2 (st
,%0+,__zero_reg__
) CR_TAB
2262 AS2 (st
,%0,__tmp_reg__
) CR_TAB
2263 AS1 (clr
,__zero_reg__
) CR_TAB
2266 return *l
=5, (AS2 (st
,%0+,%A1
) CR_TAB
2267 AS2 (st
,%0+,%B1
) CR_TAB
2268 AS2 (st
,%0+,%C1
) CR_TAB
2269 AS2 (st
,%0,%D1
) CR_TAB
2273 return *l
=4, (AS2 (st
,%0,%A1
) CR_TAB
2274 AS2 (std
,%0+1,%B1
) CR_TAB
2275 AS2 (std
,%0+2,%C1
) CR_TAB
2276 AS2 (std
,%0+3,%D1
));
2278 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
2280 int disp
= INTVAL (XEXP (base
, 1));
2281 reg_base
= REGNO (XEXP (base
, 0));
2282 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
2284 if (reg_base
!= REG_Y
)
2285 fatal_insn ("incorrect insn:",insn
);
2287 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
2288 return *l
= 6, (AS2 (adiw
,r28
,%o0
-60) CR_TAB
2289 AS2 (std
,Y
+60,%A1
) CR_TAB
2290 AS2 (std
,Y
+61,%B1
) CR_TAB
2291 AS2 (std
,Y
+62,%C1
) CR_TAB
2292 AS2 (std
,Y
+63,%D1
) CR_TAB
2293 AS2 (sbiw
,r28
,%o0
-60));
2295 return *l
= 8, (AS2 (subi
,r28
,lo8(-%o0
)) CR_TAB
2296 AS2 (sbci
,r29
,hi8(-%o0
)) CR_TAB
2297 AS2 (st
,Y
,%A1
) CR_TAB
2298 AS2 (std
,Y
+1,%B1
) CR_TAB
2299 AS2 (std
,Y
+2,%C1
) CR_TAB
2300 AS2 (std
,Y
+3,%D1
) CR_TAB
2301 AS2 (subi
,r28
,lo8(%o0
)) CR_TAB
2302 AS2 (sbci
,r29
,hi8(%o0
)));
2304 if (reg_base
== REG_X
)
2307 if (reg_src
== REG_X
)
2310 return (AS2 (mov
,__tmp_reg__
,r26
) CR_TAB
2311 AS2 (mov
,__zero_reg__
,r27
) CR_TAB
2312 AS2 (adiw
,r26
,%o0
) CR_TAB
2313 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2314 AS2 (st
,X
+,__zero_reg__
) CR_TAB
2315 AS2 (st
,X
+,r28
) CR_TAB
2316 AS2 (st
,X
,r29
) CR_TAB
2317 AS1 (clr
,__zero_reg__
) CR_TAB
2318 AS2 (sbiw
,r26
,%o0
+3));
2320 else if (reg_src
== REG_X
- 2)
2323 return (AS2 (mov
,__tmp_reg__
,r26
) CR_TAB
2324 AS2 (mov
,__zero_reg__
,r27
) CR_TAB
2325 AS2 (adiw
,r26
,%o0
) CR_TAB
2326 AS2 (st
,X
+,r24
) CR_TAB
2327 AS2 (st
,X
+,r25
) CR_TAB
2328 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2329 AS2 (st
,X
,__zero_reg__
) CR_TAB
2330 AS1 (clr
,__zero_reg__
) CR_TAB
2331 AS2 (sbiw
,r26
,%o0
+3));
2334 return (AS2 (adiw
,r26
,%o0
) CR_TAB
2335 AS2 (st
,X
+,%A1
) CR_TAB
2336 AS2 (st
,X
+,%B1
) CR_TAB
2337 AS2 (st
,X
+,%C1
) CR_TAB
2338 AS2 (st
,X
,%D1
) CR_TAB
2339 AS2 (sbiw
,r26
,%o0
+3));
2341 return *l
=4, (AS2 (std
,%A0
,%A1
) CR_TAB
2342 AS2 (std
,%B0
,%B1
) CR_TAB
2343 AS2 (std
,%C0
,%C1
) CR_TAB
2346 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2347 return *l
=4, (AS2 (st
,%0,%D1
) CR_TAB
2348 AS2 (st
,%0,%C1
) CR_TAB
2349 AS2 (st
,%0,%B1
) CR_TAB
2351 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2352 return *l
=4, (AS2 (st
,%0,%A1
) CR_TAB
2353 AS2 (st
,%0,%B1
) CR_TAB
2354 AS2 (st
,%0,%C1
) CR_TAB
2356 fatal_insn ("unknown move insn:",insn
);
2361 output_movsisf(rtx insn
, rtx operands
[], int *l
)
2364 rtx dest
= operands
[0];
2365 rtx src
= operands
[1];
2371 if (register_operand (dest
, VOIDmode
))
2373 if (register_operand (src
, VOIDmode
)) /* mov r,r */
2375 if (true_regnum (dest
) > true_regnum (src
))
2380 return (AS2 (movw
,%C0
,%C1
) CR_TAB
2381 AS2 (movw
,%A0
,%A1
));
2384 return (AS2 (mov
,%D0
,%D1
) CR_TAB
2385 AS2 (mov
,%C0
,%C1
) CR_TAB
2386 AS2 (mov
,%B0
,%B1
) CR_TAB
2394 return (AS2 (movw
,%A0
,%A1
) CR_TAB
2395 AS2 (movw
,%C0
,%C1
));
2398 return (AS2 (mov
,%A0
,%A1
) CR_TAB
2399 AS2 (mov
,%B0
,%B1
) CR_TAB
2400 AS2 (mov
,%C0
,%C1
) CR_TAB
2404 else if (CONSTANT_P (src
))
2406 if (test_hard_reg_class (LD_REGS
, dest
)) /* ldi d,i */
2409 return (AS2 (ldi
,%A0
,lo8(%1)) CR_TAB
2410 AS2 (ldi
,%B0
,hi8(%1)) CR_TAB
2411 AS2 (ldi
,%C0
,hlo8(%1)) CR_TAB
2412 AS2 (ldi
,%D0
,hhi8(%1)));
2415 if (GET_CODE (src
) == CONST_INT
)
2417 const char *const clr_op0
=
2418 AVR_HAVE_MOVW
? (AS1 (clr
,%A0
) CR_TAB
2419 AS1 (clr
,%B0
) CR_TAB
2421 : (AS1 (clr
,%A0
) CR_TAB
2422 AS1 (clr
,%B0
) CR_TAB
2423 AS1 (clr
,%C0
) CR_TAB
2426 if (src
== const0_rtx
) /* mov r,L */
2428 *l
= AVR_HAVE_MOVW
? 3 : 4;
2431 else if (src
== const1_rtx
)
2434 output_asm_insn (clr_op0
, operands
);
2435 *l
= AVR_HAVE_MOVW
? 4 : 5;
2436 return AS1 (inc
,%A0
);
2438 else if (src
== constm1_rtx
)
2440 /* Immediate constants -1 to any register */
2444 return (AS1 (clr
,%A0
) CR_TAB
2445 AS1 (dec
,%A0
) CR_TAB
2446 AS2 (mov
,%B0
,%A0
) CR_TAB
2447 AS2 (movw
,%C0
,%A0
));
2450 return (AS1 (clr
,%A0
) CR_TAB
2451 AS1 (dec
,%A0
) CR_TAB
2452 AS2 (mov
,%B0
,%A0
) CR_TAB
2453 AS2 (mov
,%C0
,%A0
) CR_TAB
2458 int bit_nr
= exact_log2 (INTVAL (src
));
2462 *l
= AVR_HAVE_MOVW
? 5 : 6;
2465 output_asm_insn (clr_op0
, operands
);
2466 output_asm_insn ("set", operands
);
2469 avr_output_bld (operands
, bit_nr
);
2476 /* Last resort, better than loading from memory. */
2478 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
2479 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
2480 AS2 (mov
,%A0
,r31
) CR_TAB
2481 AS2 (ldi
,r31
,hi8(%1)) CR_TAB
2482 AS2 (mov
,%B0
,r31
) CR_TAB
2483 AS2 (ldi
,r31
,hlo8(%1)) CR_TAB
2484 AS2 (mov
,%C0
,r31
) CR_TAB
2485 AS2 (ldi
,r31
,hhi8(%1)) CR_TAB
2486 AS2 (mov
,%D0
,r31
) CR_TAB
2487 AS2 (mov
,r31
,__tmp_reg__
));
2489 else if (GET_CODE (src
) == MEM
)
2490 return out_movsi_r_mr (insn
, operands
, real_l
); /* mov r,m */
2492 else if (GET_CODE (dest
) == MEM
)
2496 if (src
== const0_rtx
)
2497 operands
[1] = zero_reg_rtx
;
2499 templ
= out_movsi_mr_r (insn
, operands
, real_l
);
2502 output_asm_insn (templ
, operands
);
2507 fatal_insn ("invalid insn:", insn
);
2512 out_movqi_mr_r (rtx insn
, rtx op
[], int *l
)
2516 rtx x
= XEXP (dest
, 0);
2522 if (CONSTANT_ADDRESS_P (x
))
2524 if (CONST_INT_P (x
) && INTVAL (x
) == SREG_ADDR
)
2527 return AS2 (out
,__SREG__
,%1);
2529 if (optimize
> 0 && io_address_operand (x
, QImode
))
2532 return AS2 (out
,%0-0x20,%1);
2535 return AS2 (sts
,%0,%1);
2537 /* memory access by reg+disp */
2538 else if (GET_CODE (x
) == PLUS
2539 && REG_P (XEXP (x
,0))
2540 && GET_CODE (XEXP (x
,1)) == CONST_INT
)
2542 if ((INTVAL (XEXP (x
,1)) - GET_MODE_SIZE (GET_MODE (dest
))) >= 63)
2544 int disp
= INTVAL (XEXP (x
,1));
2545 if (REGNO (XEXP (x
,0)) != REG_Y
)
2546 fatal_insn ("incorrect insn:",insn
);
2548 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
2549 return *l
= 3, (AS2 (adiw
,r28
,%o0
-63) CR_TAB
2550 AS2 (std
,Y
+63,%1) CR_TAB
2551 AS2 (sbiw
,r28
,%o0
-63));
2553 return *l
= 5, (AS2 (subi
,r28
,lo8(-%o0
)) CR_TAB
2554 AS2 (sbci
,r29
,hi8(-%o0
)) CR_TAB
2555 AS2 (st
,Y
,%1) CR_TAB
2556 AS2 (subi
,r28
,lo8(%o0
)) CR_TAB
2557 AS2 (sbci
,r29
,hi8(%o0
)));
2559 else if (REGNO (XEXP (x
,0)) == REG_X
)
2561 if (reg_overlap_mentioned_p (src
, XEXP (x
, 0)))
2563 if (reg_unused_after (insn
, XEXP (x
,0)))
2564 return *l
= 3, (AS2 (mov
,__tmp_reg__
,%1) CR_TAB
2565 AS2 (adiw
,r26
,%o0
) CR_TAB
2566 AS2 (st
,X
,__tmp_reg__
));
2568 return *l
= 4, (AS2 (mov
,__tmp_reg__
,%1) CR_TAB
2569 AS2 (adiw
,r26
,%o0
) CR_TAB
2570 AS2 (st
,X
,__tmp_reg__
) CR_TAB
2571 AS2 (sbiw
,r26
,%o0
));
2575 if (reg_unused_after (insn
, XEXP (x
,0)))
2576 return *l
= 2, (AS2 (adiw
,r26
,%o0
) CR_TAB
2579 return *l
= 3, (AS2 (adiw
,r26
,%o0
) CR_TAB
2580 AS2 (st
,X
,%1) CR_TAB
2581 AS2 (sbiw
,r26
,%o0
));
2585 return AS2 (std
,%0,%1);
2588 return AS2 (st
,%0,%1);
2592 out_movhi_mr_r (rtx insn
, rtx op
[], int *l
)
2596 rtx base
= XEXP (dest
, 0);
2597 int reg_base
= true_regnum (base
);
2598 int reg_src
= true_regnum (src
);
2599 /* "volatile" forces writing high byte first, even if less efficient,
2600 for correct operation with 16-bit I/O registers. */
2601 int mem_volatile_p
= MEM_VOLATILE_P (dest
);
2606 if (CONSTANT_ADDRESS_P (base
))
2608 if (optimize
> 0 && io_address_operand (base
, HImode
))
2611 return (AS2 (out
,%B0
-0x20,%B1
) CR_TAB
2612 AS2 (out
,%A0
-0x20,%A1
));
2614 return *l
= 4, (AS2 (sts
,%B0
,%B1
) CR_TAB
2619 if (reg_base
== REG_X
)
2621 if (reg_src
== REG_X
)
2623 /* "st X+,r26" and "st -X,r26" are undefined. */
2624 if (!mem_volatile_p
&& reg_unused_after (insn
, src
))
2625 return *l
=4, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2626 AS2 (st
,X
,r26
) CR_TAB
2627 AS2 (adiw
,r26
,1) CR_TAB
2628 AS2 (st
,X
,__tmp_reg__
));
2630 return *l
=5, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2631 AS2 (adiw
,r26
,1) CR_TAB
2632 AS2 (st
,X
,__tmp_reg__
) CR_TAB
2633 AS2 (sbiw
,r26
,1) CR_TAB
2638 if (!mem_volatile_p
&& reg_unused_after (insn
, base
))
2639 return *l
=2, (AS2 (st
,X
+,%A1
) CR_TAB
2642 return *l
=3, (AS2 (adiw
,r26
,1) CR_TAB
2643 AS2 (st
,X
,%B1
) CR_TAB
2648 return *l
=2, (AS2 (std
,%0+1,%B1
) CR_TAB
2651 else if (GET_CODE (base
) == PLUS
)
2653 int disp
= INTVAL (XEXP (base
, 1));
2654 reg_base
= REGNO (XEXP (base
, 0));
2655 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
2657 if (reg_base
!= REG_Y
)
2658 fatal_insn ("incorrect insn:",insn
);
2660 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
2661 return *l
= 4, (AS2 (adiw
,r28
,%o0
-62) CR_TAB
2662 AS2 (std
,Y
+63,%B1
) CR_TAB
2663 AS2 (std
,Y
+62,%A1
) CR_TAB
2664 AS2 (sbiw
,r28
,%o0
-62));
2666 return *l
= 6, (AS2 (subi
,r28
,lo8(-%o0
)) CR_TAB
2667 AS2 (sbci
,r29
,hi8(-%o0
)) CR_TAB
2668 AS2 (std
,Y
+1,%B1
) CR_TAB
2669 AS2 (st
,Y
,%A1
) CR_TAB
2670 AS2 (subi
,r28
,lo8(%o0
)) CR_TAB
2671 AS2 (sbci
,r29
,hi8(%o0
)));
2673 if (reg_base
== REG_X
)
2676 if (reg_src
== REG_X
)
2679 return (AS2 (mov
,__tmp_reg__
,r26
) CR_TAB
2680 AS2 (mov
,__zero_reg__
,r27
) CR_TAB
2681 AS2 (adiw
,r26
,%o0
+1) CR_TAB
2682 AS2 (st
,X
,__zero_reg__
) CR_TAB
2683 AS2 (st
,-X
,__tmp_reg__
) CR_TAB
2684 AS1 (clr
,__zero_reg__
) CR_TAB
2685 AS2 (sbiw
,r26
,%o0
));
2688 return (AS2 (adiw
,r26
,%o0
+1) CR_TAB
2689 AS2 (st
,X
,%B1
) CR_TAB
2690 AS2 (st
,-X
,%A1
) CR_TAB
2691 AS2 (sbiw
,r26
,%o0
));
2693 return *l
=2, (AS2 (std
,%B0
,%B1
) CR_TAB
2696 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2697 return *l
=2, (AS2 (st
,%0,%B1
) CR_TAB
2699 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2703 if (REGNO (XEXP (base
, 0)) == REG_X
)
2706 return (AS2 (adiw
,r26
,1) CR_TAB
2707 AS2 (st
,X
,%B1
) CR_TAB
2708 AS2 (st
,-X
,%A1
) CR_TAB
2714 return (AS2 (std
,%p0
+1,%B1
) CR_TAB
2715 AS2 (st
,%p0
,%A1
) CR_TAB
2721 return (AS2 (st
,%0,%A1
) CR_TAB
2724 fatal_insn ("unknown move insn:",insn
);
2728 /* Return 1 if frame pointer for current function required. */
2731 avr_frame_pointer_required_p (void)
2733 return (cfun
->calls_alloca
2734 || crtl
->args
.info
.nregs
== 0
2735 || get_frame_size () > 0);
2738 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2741 compare_condition (rtx insn
)
2743 rtx next
= next_real_insn (insn
);
2744 RTX_CODE cond
= UNKNOWN
;
2745 if (next
&& GET_CODE (next
) == JUMP_INSN
)
2747 rtx pat
= PATTERN (next
);
2748 rtx src
= SET_SRC (pat
);
2749 rtx t
= XEXP (src
, 0);
2750 cond
= GET_CODE (t
);
2755 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2758 compare_sign_p (rtx insn
)
2760 RTX_CODE cond
= compare_condition (insn
);
2761 return (cond
== GE
|| cond
== LT
);
2764 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2765 that needs to be swapped (GT, GTU, LE, LEU). */
2768 compare_diff_p (rtx insn
)
2770 RTX_CODE cond
= compare_condition (insn
);
2771 return (cond
== GT
|| cond
== GTU
|| cond
== LE
|| cond
== LEU
) ? cond
: 0;
2774 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2777 compare_eq_p (rtx insn
)
2779 RTX_CODE cond
= compare_condition (insn
);
2780 return (cond
== EQ
|| cond
== NE
);
2784 /* Output test instruction for HImode. */
2787 out_tsthi (rtx insn
, rtx op
, int *l
)
2789 if (compare_sign_p (insn
))
2792 return AS1 (tst
,%B0
);
2794 if (reg_unused_after (insn
, op
)
2795 && compare_eq_p (insn
))
2797 /* Faster than sbiw if we can clobber the operand. */
2799 return "or %A0,%B0";
2801 if (test_hard_reg_class (ADDW_REGS
, op
))
2804 return AS2 (sbiw
,%0,0);
2807 return (AS2 (cp
,%A0
,__zero_reg__
) CR_TAB
2808 AS2 (cpc
,%B0
,__zero_reg__
));
2812 /* Output test instruction for SImode. */
2815 out_tstsi (rtx insn
, rtx op
, int *l
)
2817 if (compare_sign_p (insn
))
2820 return AS1 (tst
,%D0
);
2822 if (test_hard_reg_class (ADDW_REGS
, op
))
2825 return (AS2 (sbiw
,%A0
,0) CR_TAB
2826 AS2 (cpc
,%C0
,__zero_reg__
) CR_TAB
2827 AS2 (cpc
,%D0
,__zero_reg__
));
2830 return (AS2 (cp
,%A0
,__zero_reg__
) CR_TAB
2831 AS2 (cpc
,%B0
,__zero_reg__
) CR_TAB
2832 AS2 (cpc
,%C0
,__zero_reg__
) CR_TAB
2833 AS2 (cpc
,%D0
,__zero_reg__
));
2837 /* Generate asm equivalent for various shifts.
2838 Shift count is a CONST_INT, MEM or REG.
2839 This only handles cases that are not already
2840 carefully hand-optimized in ?sh??i3_out. */
2843 out_shift_with_cnt (const char *templ
, rtx insn
, rtx operands
[],
2844 int *len
, int t_len
)
2848 int second_label
= 1;
2849 int saved_in_tmp
= 0;
2850 int use_zero_reg
= 0;
2852 op
[0] = operands
[0];
2853 op
[1] = operands
[1];
2854 op
[2] = operands
[2];
2855 op
[3] = operands
[3];
2861 if (GET_CODE (operands
[2]) == CONST_INT
)
2863 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
2864 int count
= INTVAL (operands
[2]);
2865 int max_len
= 10; /* If larger than this, always use a loop. */
2874 if (count
< 8 && !scratch
)
2878 max_len
= t_len
+ (scratch
? 3 : (use_zero_reg
? 4 : 5));
2880 if (t_len
* count
<= max_len
)
2882 /* Output shifts inline with no loop - faster. */
2884 *len
= t_len
* count
;
2888 output_asm_insn (templ
, op
);
2897 strcat (str
, AS2 (ldi
,%3,%2));
2899 else if (use_zero_reg
)
2901 /* Hack to save one word: use __zero_reg__ as loop counter.
2902 Set one bit, then shift in a loop until it is 0 again. */
2904 op
[3] = zero_reg_rtx
;
2908 strcat (str
, ("set" CR_TAB
2909 AS2 (bld
,%3,%2-1)));
2913 /* No scratch register available, use one from LD_REGS (saved in
2914 __tmp_reg__) that doesn't overlap with registers to shift. */
2916 op
[3] = gen_rtx_REG (QImode
,
2917 ((true_regnum (operands
[0]) - 1) & 15) + 16);
2918 op
[4] = tmp_reg_rtx
;
2922 *len
= 3; /* Includes "mov %3,%4" after the loop. */
2924 strcat (str
, (AS2 (mov
,%4,%3) CR_TAB
2930 else if (GET_CODE (operands
[2]) == MEM
)
2934 op
[3] = op_mov
[0] = tmp_reg_rtx
;
2938 out_movqi_r_mr (insn
, op_mov
, len
);
2940 output_asm_insn (out_movqi_r_mr (insn
, op_mov
, NULL
), op_mov
);
2942 else if (register_operand (operands
[2], QImode
))
2944 if (reg_unused_after (insn
, operands
[2]))
2948 op
[3] = tmp_reg_rtx
;
2950 strcat (str
, (AS2 (mov
,%3,%2) CR_TAB
));
2954 fatal_insn ("bad shift insn:", insn
);
2961 strcat (str
, AS1 (rjmp
,2f
));
2965 *len
+= t_len
+ 2; /* template + dec + brXX */
2968 strcat (str
, "\n1:\t");
2969 strcat (str
, templ
);
2970 strcat (str
, second_label
? "\n2:\t" : "\n\t");
2971 strcat (str
, use_zero_reg
? AS1 (lsr
,%3) : AS1 (dec
,%3));
2972 strcat (str
, CR_TAB
);
2973 strcat (str
, second_label
? AS1 (brpl
,1b
) : AS1 (brne
,1b
));
2975 strcat (str
, (CR_TAB
AS2 (mov
,%3,%4)));
2976 output_asm_insn (str
, op
);
2981 /* 8bit shift left ((char)x << i) */
2984 ashlqi3_out (rtx insn
, rtx operands
[], int *len
)
2986 if (GET_CODE (operands
[2]) == CONST_INT
)
2993 switch (INTVAL (operands
[2]))
2996 if (INTVAL (operands
[2]) < 8)
3000 return AS1 (clr
,%0);
3004 return AS1 (lsl
,%0);
3008 return (AS1 (lsl
,%0) CR_TAB
3013 return (AS1 (lsl
,%0) CR_TAB
3018 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3021 return (AS1 (swap
,%0) CR_TAB
3022 AS2 (andi
,%0,0xf0));
3025 return (AS1 (lsl
,%0) CR_TAB
3031 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3034 return (AS1 (swap
,%0) CR_TAB
3036 AS2 (andi
,%0,0xe0));
3039 return (AS1 (lsl
,%0) CR_TAB
3046 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3049 return (AS1 (swap
,%0) CR_TAB
3052 AS2 (andi
,%0,0xc0));
3055 return (AS1 (lsl
,%0) CR_TAB
3064 return (AS1 (ror
,%0) CR_TAB
3069 else if (CONSTANT_P (operands
[2]))
3070 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
3072 out_shift_with_cnt (AS1 (lsl
,%0),
3073 insn
, operands
, len
, 1);
3078 /* 16bit shift left ((short)x << i) */
3081 ashlhi3_out (rtx insn
, rtx operands
[], int *len
)
3083 if (GET_CODE (operands
[2]) == CONST_INT
)
3085 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
3086 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
3093 switch (INTVAL (operands
[2]))
3096 if (INTVAL (operands
[2]) < 16)
3100 return (AS1 (clr
,%B0
) CR_TAB
3104 if (optimize_size
&& scratch
)
3109 return (AS1 (swap
,%A0
) CR_TAB
3110 AS1 (swap
,%B0
) CR_TAB
3111 AS2 (andi
,%B0
,0xf0) CR_TAB
3112 AS2 (eor
,%B0
,%A0
) CR_TAB
3113 AS2 (andi
,%A0
,0xf0) CR_TAB
3119 return (AS1 (swap
,%A0
) CR_TAB
3120 AS1 (swap
,%B0
) CR_TAB
3121 AS2 (ldi
,%3,0xf0) CR_TAB
3123 AS2 (eor
,%B0
,%A0
) CR_TAB
3127 break; /* optimize_size ? 6 : 8 */
3131 break; /* scratch ? 5 : 6 */
3135 return (AS1 (lsl
,%A0
) CR_TAB
3136 AS1 (rol
,%B0
) CR_TAB
3137 AS1 (swap
,%A0
) CR_TAB
3138 AS1 (swap
,%B0
) CR_TAB
3139 AS2 (andi
,%B0
,0xf0) CR_TAB
3140 AS2 (eor
,%B0
,%A0
) CR_TAB
3141 AS2 (andi
,%A0
,0xf0) CR_TAB
3147 return (AS1 (lsl
,%A0
) CR_TAB
3148 AS1 (rol
,%B0
) CR_TAB
3149 AS1 (swap
,%A0
) CR_TAB
3150 AS1 (swap
,%B0
) CR_TAB
3151 AS2 (ldi
,%3,0xf0) CR_TAB
3153 AS2 (eor
,%B0
,%A0
) CR_TAB
3161 break; /* scratch ? 5 : 6 */
3163 return (AS1 (clr
,__tmp_reg__
) CR_TAB
3164 AS1 (lsr
,%B0
) CR_TAB
3165 AS1 (ror
,%A0
) CR_TAB
3166 AS1 (ror
,__tmp_reg__
) CR_TAB
3167 AS1 (lsr
,%B0
) CR_TAB
3168 AS1 (ror
,%A0
) CR_TAB
3169 AS1 (ror
,__tmp_reg__
) CR_TAB
3170 AS2 (mov
,%B0
,%A0
) CR_TAB
3171 AS2 (mov
,%A0
,__tmp_reg__
));
3175 return (AS1 (lsr
,%B0
) CR_TAB
3176 AS2 (mov
,%B0
,%A0
) CR_TAB
3177 AS1 (clr
,%A0
) CR_TAB
3178 AS1 (ror
,%B0
) CR_TAB
3182 return *len
= 2, (AS2 (mov
,%B0
,%A1
) CR_TAB
3187 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3188 AS1 (clr
,%A0
) CR_TAB
3193 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3194 AS1 (clr
,%A0
) CR_TAB
3195 AS1 (lsl
,%B0
) CR_TAB
3200 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3201 AS1 (clr
,%A0
) CR_TAB
3202 AS1 (lsl
,%B0
) CR_TAB
3203 AS1 (lsl
,%B0
) CR_TAB
3210 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3211 AS1 (clr
,%A0
) CR_TAB
3212 AS1 (swap
,%B0
) CR_TAB
3213 AS2 (andi
,%B0
,0xf0));
3218 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3219 AS1 (clr
,%A0
) CR_TAB
3220 AS1 (swap
,%B0
) CR_TAB
3221 AS2 (ldi
,%3,0xf0) CR_TAB
3225 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3226 AS1 (clr
,%A0
) CR_TAB
3227 AS1 (lsl
,%B0
) CR_TAB
3228 AS1 (lsl
,%B0
) CR_TAB
3229 AS1 (lsl
,%B0
) CR_TAB
3236 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3237 AS1 (clr
,%A0
) CR_TAB
3238 AS1 (swap
,%B0
) CR_TAB
3239 AS1 (lsl
,%B0
) CR_TAB
3240 AS2 (andi
,%B0
,0xe0));
3242 if (AVR_HAVE_MUL
&& scratch
)
3245 return (AS2 (ldi
,%3,0x20) CR_TAB
3246 AS2 (mul
,%A0
,%3) CR_TAB
3247 AS2 (mov
,%B0
,r0
) CR_TAB
3248 AS1 (clr
,%A0
) CR_TAB
3249 AS1 (clr
,__zero_reg__
));
3251 if (optimize_size
&& scratch
)
3256 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3257 AS1 (clr
,%A0
) CR_TAB
3258 AS1 (swap
,%B0
) CR_TAB
3259 AS1 (lsl
,%B0
) CR_TAB
3260 AS2 (ldi
,%3,0xe0) CR_TAB
3266 return ("set" CR_TAB
3267 AS2 (bld
,r1
,5) CR_TAB
3268 AS2 (mul
,%A0
,r1
) CR_TAB
3269 AS2 (mov
,%B0
,r0
) CR_TAB
3270 AS1 (clr
,%A0
) CR_TAB
3271 AS1 (clr
,__zero_reg__
));
3274 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3275 AS1 (clr
,%A0
) CR_TAB
3276 AS1 (lsl
,%B0
) CR_TAB
3277 AS1 (lsl
,%B0
) CR_TAB
3278 AS1 (lsl
,%B0
) CR_TAB
3279 AS1 (lsl
,%B0
) CR_TAB
3283 if (AVR_HAVE_MUL
&& ldi_ok
)
3286 return (AS2 (ldi
,%B0
,0x40) CR_TAB
3287 AS2 (mul
,%A0
,%B0
) CR_TAB
3288 AS2 (mov
,%B0
,r0
) CR_TAB
3289 AS1 (clr
,%A0
) CR_TAB
3290 AS1 (clr
,__zero_reg__
));
3292 if (AVR_HAVE_MUL
&& scratch
)
3295 return (AS2 (ldi
,%3,0x40) CR_TAB
3296 AS2 (mul
,%A0
,%3) CR_TAB
3297 AS2 (mov
,%B0
,r0
) CR_TAB
3298 AS1 (clr
,%A0
) CR_TAB
3299 AS1 (clr
,__zero_reg__
));
3301 if (optimize_size
&& ldi_ok
)
3304 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3305 AS2 (ldi
,%A0
,6) "\n1:\t"
3306 AS1 (lsl
,%B0
) CR_TAB
3307 AS1 (dec
,%A0
) CR_TAB
3310 if (optimize_size
&& scratch
)
3313 return (AS1 (clr
,%B0
) CR_TAB
3314 AS1 (lsr
,%A0
) CR_TAB
3315 AS1 (ror
,%B0
) CR_TAB
3316 AS1 (lsr
,%A0
) CR_TAB
3317 AS1 (ror
,%B0
) CR_TAB
3322 return (AS1 (clr
,%B0
) CR_TAB
3323 AS1 (lsr
,%A0
) CR_TAB
3324 AS1 (ror
,%B0
) CR_TAB
3329 out_shift_with_cnt ((AS1 (lsl
,%A0
) CR_TAB
3331 insn
, operands
, len
, 2);
3336 /* 32bit shift left ((long)x << i) */
3339 ashlsi3_out (rtx insn
, rtx operands
[], int *len
)
3341 if (GET_CODE (operands
[2]) == CONST_INT
)
3349 switch (INTVAL (operands
[2]))
3352 if (INTVAL (operands
[2]) < 32)
3356 return *len
= 3, (AS1 (clr
,%D0
) CR_TAB
3357 AS1 (clr
,%C0
) CR_TAB
3358 AS2 (movw
,%A0
,%C0
));
3360 return (AS1 (clr
,%D0
) CR_TAB
3361 AS1 (clr
,%C0
) CR_TAB
3362 AS1 (clr
,%B0
) CR_TAB
3367 int reg0
= true_regnum (operands
[0]);
3368 int reg1
= true_regnum (operands
[1]);
3371 return (AS2 (mov
,%D0
,%C1
) CR_TAB
3372 AS2 (mov
,%C0
,%B1
) CR_TAB
3373 AS2 (mov
,%B0
,%A1
) CR_TAB
3376 return (AS1 (clr
,%A0
) CR_TAB
3377 AS2 (mov
,%B0
,%A1
) CR_TAB
3378 AS2 (mov
,%C0
,%B1
) CR_TAB
3384 int reg0
= true_regnum (operands
[0]);
3385 int reg1
= true_regnum (operands
[1]);
3386 if (reg0
+ 2 == reg1
)
3387 return *len
= 2, (AS1 (clr
,%B0
) CR_TAB
3390 return *len
= 3, (AS2 (movw
,%C0
,%A1
) CR_TAB
3391 AS1 (clr
,%B0
) CR_TAB
3394 return *len
= 4, (AS2 (mov
,%C0
,%A1
) CR_TAB
3395 AS2 (mov
,%D0
,%B1
) CR_TAB
3396 AS1 (clr
,%B0
) CR_TAB
3402 return (AS2 (mov
,%D0
,%A1
) CR_TAB
3403 AS1 (clr
,%C0
) CR_TAB
3404 AS1 (clr
,%B0
) CR_TAB
3409 return (AS1 (clr
,%D0
) CR_TAB
3410 AS1 (lsr
,%A0
) CR_TAB
3411 AS1 (ror
,%D0
) CR_TAB
3412 AS1 (clr
,%C0
) CR_TAB
3413 AS1 (clr
,%B0
) CR_TAB
3418 out_shift_with_cnt ((AS1 (lsl
,%A0
) CR_TAB
3419 AS1 (rol
,%B0
) CR_TAB
3420 AS1 (rol
,%C0
) CR_TAB
3422 insn
, operands
, len
, 4);
3426 /* 8bit arithmetic shift right ((signed char)x >> i) */
3429 ashrqi3_out (rtx insn
, rtx operands
[], int *len
)
3431 if (GET_CODE (operands
[2]) == CONST_INT
)
3438 switch (INTVAL (operands
[2]))
3442 return AS1 (asr
,%0);
3446 return (AS1 (asr
,%0) CR_TAB
3451 return (AS1 (asr
,%0) CR_TAB
3457 return (AS1 (asr
,%0) CR_TAB
3464 return (AS1 (asr
,%0) CR_TAB
3472 return (AS2 (bst
,%0,6) CR_TAB
3474 AS2 (sbc
,%0,%0) CR_TAB
3478 if (INTVAL (operands
[2]) < 8)
3485 return (AS1 (lsl
,%0) CR_TAB
3489 else if (CONSTANT_P (operands
[2]))
3490 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
3492 out_shift_with_cnt (AS1 (asr
,%0),
3493 insn
, operands
, len
, 1);
3498 /* 16bit arithmetic shift right ((signed short)x >> i) */
3501 ashrhi3_out (rtx insn
, rtx operands
[], int *len
)
3503 if (GET_CODE (operands
[2]) == CONST_INT
)
3505 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
3506 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
3513 switch (INTVAL (operands
[2]))
3517 /* XXX try to optimize this too? */
3522 break; /* scratch ? 5 : 6 */
3524 return (AS2 (mov
,__tmp_reg__
,%A0
) CR_TAB
3525 AS2 (mov
,%A0
,%B0
) CR_TAB
3526 AS1 (lsl
,__tmp_reg__
) CR_TAB
3527 AS1 (rol
,%A0
) CR_TAB
3528 AS2 (sbc
,%B0
,%B0
) CR_TAB
3529 AS1 (lsl
,__tmp_reg__
) CR_TAB
3530 AS1 (rol
,%A0
) CR_TAB
3535 return (AS1 (lsl
,%A0
) CR_TAB
3536 AS2 (mov
,%A0
,%B0
) CR_TAB
3537 AS1 (rol
,%A0
) CR_TAB
3542 int reg0
= true_regnum (operands
[0]);
3543 int reg1
= true_regnum (operands
[1]);
3546 return *len
= 3, (AS2 (mov
,%A0
,%B0
) CR_TAB
3547 AS1 (lsl
,%B0
) CR_TAB
3550 return *len
= 4, (AS2 (mov
,%A0
,%B1
) CR_TAB
3551 AS1 (clr
,%B0
) CR_TAB
3552 AS2 (sbrc
,%A0
,7) CR_TAB
3558 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3559 AS1 (lsl
,%B0
) CR_TAB
3560 AS2 (sbc
,%B0
,%B0
) CR_TAB
3565 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3566 AS1 (lsl
,%B0
) CR_TAB
3567 AS2 (sbc
,%B0
,%B0
) CR_TAB
3568 AS1 (asr
,%A0
) CR_TAB
3572 if (AVR_HAVE_MUL
&& ldi_ok
)
3575 return (AS2 (ldi
,%A0
,0x20) CR_TAB
3576 AS2 (muls
,%B0
,%A0
) CR_TAB
3577 AS2 (mov
,%A0
,r1
) CR_TAB
3578 AS2 (sbc
,%B0
,%B0
) CR_TAB
3579 AS1 (clr
,__zero_reg__
));
3581 if (optimize_size
&& scratch
)
3584 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3585 AS1 (lsl
,%B0
) CR_TAB
3586 AS2 (sbc
,%B0
,%B0
) CR_TAB
3587 AS1 (asr
,%A0
) CR_TAB
3588 AS1 (asr
,%A0
) CR_TAB
3592 if (AVR_HAVE_MUL
&& ldi_ok
)
3595 return (AS2 (ldi
,%A0
,0x10) CR_TAB
3596 AS2 (muls
,%B0
,%A0
) CR_TAB
3597 AS2 (mov
,%A0
,r1
) CR_TAB
3598 AS2 (sbc
,%B0
,%B0
) CR_TAB
3599 AS1 (clr
,__zero_reg__
));
3601 if (optimize_size
&& scratch
)
3604 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3605 AS1 (lsl
,%B0
) CR_TAB
3606 AS2 (sbc
,%B0
,%B0
) CR_TAB
3607 AS1 (asr
,%A0
) CR_TAB
3608 AS1 (asr
,%A0
) CR_TAB
3609 AS1 (asr
,%A0
) CR_TAB
3613 if (AVR_HAVE_MUL
&& ldi_ok
)
3616 return (AS2 (ldi
,%A0
,0x08) CR_TAB
3617 AS2 (muls
,%B0
,%A0
) CR_TAB
3618 AS2 (mov
,%A0
,r1
) CR_TAB
3619 AS2 (sbc
,%B0
,%B0
) CR_TAB
3620 AS1 (clr
,__zero_reg__
));
3623 break; /* scratch ? 5 : 7 */
3625 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3626 AS1 (lsl
,%B0
) CR_TAB
3627 AS2 (sbc
,%B0
,%B0
) CR_TAB
3628 AS1 (asr
,%A0
) CR_TAB
3629 AS1 (asr
,%A0
) CR_TAB
3630 AS1 (asr
,%A0
) CR_TAB
3631 AS1 (asr
,%A0
) CR_TAB
3636 return (AS1 (lsl
,%B0
) CR_TAB
3637 AS2 (sbc
,%A0
,%A0
) CR_TAB
3638 AS1 (lsl
,%B0
) CR_TAB
3639 AS2 (mov
,%B0
,%A0
) CR_TAB
3643 if (INTVAL (operands
[2]) < 16)
3649 return *len
= 3, (AS1 (lsl
,%B0
) CR_TAB
3650 AS2 (sbc
,%A0
,%A0
) CR_TAB
3655 out_shift_with_cnt ((AS1 (asr
,%B0
) CR_TAB
3657 insn
, operands
, len
, 2);
3662 /* 32bit arithmetic shift right ((signed long)x >> i) */
3665 ashrsi3_out (rtx insn
, rtx operands
[], int *len
)
3667 if (GET_CODE (operands
[2]) == CONST_INT
)
3675 switch (INTVAL (operands
[2]))
3679 int reg0
= true_regnum (operands
[0]);
3680 int reg1
= true_regnum (operands
[1]);
3683 return (AS2 (mov
,%A0
,%B1
) CR_TAB
3684 AS2 (mov
,%B0
,%C1
) CR_TAB
3685 AS2 (mov
,%C0
,%D1
) CR_TAB
3686 AS1 (clr
,%D0
) CR_TAB
3687 AS2 (sbrc
,%C0
,7) CR_TAB
3690 return (AS1 (clr
,%D0
) CR_TAB
3691 AS2 (sbrc
,%D1
,7) CR_TAB
3692 AS1 (dec
,%D0
) CR_TAB
3693 AS2 (mov
,%C0
,%D1
) CR_TAB
3694 AS2 (mov
,%B0
,%C1
) CR_TAB
3700 int reg0
= true_regnum (operands
[0]);
3701 int reg1
= true_regnum (operands
[1]);
3703 if (reg0
== reg1
+ 2)
3704 return *len
= 4, (AS1 (clr
,%D0
) CR_TAB
3705 AS2 (sbrc
,%B0
,7) CR_TAB
3706 AS1 (com
,%D0
) CR_TAB
3709 return *len
= 5, (AS2 (movw
,%A0
,%C1
) CR_TAB
3710 AS1 (clr
,%D0
) CR_TAB
3711 AS2 (sbrc
,%B0
,7) CR_TAB
3712 AS1 (com
,%D0
) CR_TAB
3715 return *len
= 6, (AS2 (mov
,%B0
,%D1
) CR_TAB
3716 AS2 (mov
,%A0
,%C1
) CR_TAB
3717 AS1 (clr
,%D0
) CR_TAB
3718 AS2 (sbrc
,%B0
,7) CR_TAB
3719 AS1 (com
,%D0
) CR_TAB
3724 return *len
= 6, (AS2 (mov
,%A0
,%D1
) CR_TAB
3725 AS1 (clr
,%D0
) CR_TAB
3726 AS2 (sbrc
,%A0
,7) CR_TAB
3727 AS1 (com
,%D0
) CR_TAB
3728 AS2 (mov
,%B0
,%D0
) CR_TAB
3732 if (INTVAL (operands
[2]) < 32)
3739 return *len
= 4, (AS1 (lsl
,%D0
) CR_TAB
3740 AS2 (sbc
,%A0
,%A0
) CR_TAB
3741 AS2 (mov
,%B0
,%A0
) CR_TAB
3742 AS2 (movw
,%C0
,%A0
));
3744 return *len
= 5, (AS1 (lsl
,%D0
) CR_TAB
3745 AS2 (sbc
,%A0
,%A0
) CR_TAB
3746 AS2 (mov
,%B0
,%A0
) CR_TAB
3747 AS2 (mov
,%C0
,%A0
) CR_TAB
3752 out_shift_with_cnt ((AS1 (asr
,%D0
) CR_TAB
3753 AS1 (ror
,%C0
) CR_TAB
3754 AS1 (ror
,%B0
) CR_TAB
3756 insn
, operands
, len
, 4);
3760 /* 8bit logic shift right ((unsigned char)x >> i) */
3763 lshrqi3_out (rtx insn
, rtx operands
[], int *len
)
3765 if (GET_CODE (operands
[2]) == CONST_INT
)
3772 switch (INTVAL (operands
[2]))
3775 if (INTVAL (operands
[2]) < 8)
3779 return AS1 (clr
,%0);
3783 return AS1 (lsr
,%0);
3787 return (AS1 (lsr
,%0) CR_TAB
3791 return (AS1 (lsr
,%0) CR_TAB
3796 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3799 return (AS1 (swap
,%0) CR_TAB
3800 AS2 (andi
,%0,0x0f));
3803 return (AS1 (lsr
,%0) CR_TAB
3809 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3812 return (AS1 (swap
,%0) CR_TAB
3817 return (AS1 (lsr
,%0) CR_TAB
3824 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3827 return (AS1 (swap
,%0) CR_TAB
3833 return (AS1 (lsr
,%0) CR_TAB
3842 return (AS1 (rol
,%0) CR_TAB
3847 else if (CONSTANT_P (operands
[2]))
3848 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
3850 out_shift_with_cnt (AS1 (lsr
,%0),
3851 insn
, operands
, len
, 1);
3855 /* 16bit logic shift right ((unsigned short)x >> i) */
3858 lshrhi3_out (rtx insn
, rtx operands
[], int *len
)
3860 if (GET_CODE (operands
[2]) == CONST_INT
)
3862 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
3863 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
3870 switch (INTVAL (operands
[2]))
3873 if (INTVAL (operands
[2]) < 16)
3877 return (AS1 (clr
,%B0
) CR_TAB
3881 if (optimize_size
&& scratch
)
3886 return (AS1 (swap
,%B0
) CR_TAB
3887 AS1 (swap
,%A0
) CR_TAB
3888 AS2 (andi
,%A0
,0x0f) CR_TAB
3889 AS2 (eor
,%A0
,%B0
) CR_TAB
3890 AS2 (andi
,%B0
,0x0f) CR_TAB
3896 return (AS1 (swap
,%B0
) CR_TAB
3897 AS1 (swap
,%A0
) CR_TAB
3898 AS2 (ldi
,%3,0x0f) CR_TAB
3900 AS2 (eor
,%A0
,%B0
) CR_TAB
3904 break; /* optimize_size ? 6 : 8 */
3908 break; /* scratch ? 5 : 6 */
3912 return (AS1 (lsr
,%B0
) CR_TAB
3913 AS1 (ror
,%A0
) CR_TAB
3914 AS1 (swap
,%B0
) CR_TAB
3915 AS1 (swap
,%A0
) CR_TAB
3916 AS2 (andi
,%A0
,0x0f) CR_TAB
3917 AS2 (eor
,%A0
,%B0
) CR_TAB
3918 AS2 (andi
,%B0
,0x0f) CR_TAB
3924 return (AS1 (lsr
,%B0
) CR_TAB
3925 AS1 (ror
,%A0
) CR_TAB
3926 AS1 (swap
,%B0
) CR_TAB
3927 AS1 (swap
,%A0
) CR_TAB
3928 AS2 (ldi
,%3,0x0f) CR_TAB
3930 AS2 (eor
,%A0
,%B0
) CR_TAB
3938 break; /* scratch ? 5 : 6 */
3940 return (AS1 (clr
,__tmp_reg__
) CR_TAB
3941 AS1 (lsl
,%A0
) CR_TAB
3942 AS1 (rol
,%B0
) CR_TAB
3943 AS1 (rol
,__tmp_reg__
) CR_TAB
3944 AS1 (lsl
,%A0
) CR_TAB
3945 AS1 (rol
,%B0
) CR_TAB
3946 AS1 (rol
,__tmp_reg__
) CR_TAB
3947 AS2 (mov
,%A0
,%B0
) CR_TAB
3948 AS2 (mov
,%B0
,__tmp_reg__
));
3952 return (AS1 (lsl
,%A0
) CR_TAB
3953 AS2 (mov
,%A0
,%B0
) CR_TAB
3954 AS1 (rol
,%A0
) CR_TAB
3955 AS2 (sbc
,%B0
,%B0
) CR_TAB
3959 return *len
= 2, (AS2 (mov
,%A0
,%B1
) CR_TAB
3964 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3965 AS1 (clr
,%B0
) CR_TAB
3970 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3971 AS1 (clr
,%B0
) CR_TAB
3972 AS1 (lsr
,%A0
) CR_TAB
3977 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3978 AS1 (clr
,%B0
) CR_TAB
3979 AS1 (lsr
,%A0
) CR_TAB
3980 AS1 (lsr
,%A0
) CR_TAB
3987 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3988 AS1 (clr
,%B0
) CR_TAB
3989 AS1 (swap
,%A0
) CR_TAB
3990 AS2 (andi
,%A0
,0x0f));
3995 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3996 AS1 (clr
,%B0
) CR_TAB
3997 AS1 (swap
,%A0
) CR_TAB
3998 AS2 (ldi
,%3,0x0f) CR_TAB
4002 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4003 AS1 (clr
,%B0
) CR_TAB
4004 AS1 (lsr
,%A0
) CR_TAB
4005 AS1 (lsr
,%A0
) CR_TAB
4006 AS1 (lsr
,%A0
) CR_TAB
4013 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4014 AS1 (clr
,%B0
) CR_TAB
4015 AS1 (swap
,%A0
) CR_TAB
4016 AS1 (lsr
,%A0
) CR_TAB
4017 AS2 (andi
,%A0
,0x07));
4019 if (AVR_HAVE_MUL
&& scratch
)
4022 return (AS2 (ldi
,%3,0x08) CR_TAB
4023 AS2 (mul
,%B0
,%3) CR_TAB
4024 AS2 (mov
,%A0
,r1
) CR_TAB
4025 AS1 (clr
,%B0
) CR_TAB
4026 AS1 (clr
,__zero_reg__
));
4028 if (optimize_size
&& scratch
)
4033 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4034 AS1 (clr
,%B0
) CR_TAB
4035 AS1 (swap
,%A0
) CR_TAB
4036 AS1 (lsr
,%A0
) CR_TAB
4037 AS2 (ldi
,%3,0x07) CR_TAB
4043 return ("set" CR_TAB
4044 AS2 (bld
,r1
,3) CR_TAB
4045 AS2 (mul
,%B0
,r1
) CR_TAB
4046 AS2 (mov
,%A0
,r1
) CR_TAB
4047 AS1 (clr
,%B0
) CR_TAB
4048 AS1 (clr
,__zero_reg__
));
4051 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4052 AS1 (clr
,%B0
) CR_TAB
4053 AS1 (lsr
,%A0
) CR_TAB
4054 AS1 (lsr
,%A0
) CR_TAB
4055 AS1 (lsr
,%A0
) CR_TAB
4056 AS1 (lsr
,%A0
) CR_TAB
4060 if (AVR_HAVE_MUL
&& ldi_ok
)
4063 return (AS2 (ldi
,%A0
,0x04) CR_TAB
4064 AS2 (mul
,%B0
,%A0
) CR_TAB
4065 AS2 (mov
,%A0
,r1
) CR_TAB
4066 AS1 (clr
,%B0
) CR_TAB
4067 AS1 (clr
,__zero_reg__
));
4069 if (AVR_HAVE_MUL
&& scratch
)
4072 return (AS2 (ldi
,%3,0x04) CR_TAB
4073 AS2 (mul
,%B0
,%3) CR_TAB
4074 AS2 (mov
,%A0
,r1
) CR_TAB
4075 AS1 (clr
,%B0
) CR_TAB
4076 AS1 (clr
,__zero_reg__
));
4078 if (optimize_size
&& ldi_ok
)
4081 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4082 AS2 (ldi
,%B0
,6) "\n1:\t"
4083 AS1 (lsr
,%A0
) CR_TAB
4084 AS1 (dec
,%B0
) CR_TAB
4087 if (optimize_size
&& scratch
)
4090 return (AS1 (clr
,%A0
) CR_TAB
4091 AS1 (lsl
,%B0
) CR_TAB
4092 AS1 (rol
,%A0
) CR_TAB
4093 AS1 (lsl
,%B0
) CR_TAB
4094 AS1 (rol
,%A0
) CR_TAB
4099 return (AS1 (clr
,%A0
) CR_TAB
4100 AS1 (lsl
,%B0
) CR_TAB
4101 AS1 (rol
,%A0
) CR_TAB
4106 out_shift_with_cnt ((AS1 (lsr
,%B0
) CR_TAB
4108 insn
, operands
, len
, 2);
4112 /* 32bit logic shift right ((unsigned int)x >> i) */
4115 lshrsi3_out (rtx insn
, rtx operands
[], int *len
)
4117 if (GET_CODE (operands
[2]) == CONST_INT
)
4125 switch (INTVAL (operands
[2]))
4128 if (INTVAL (operands
[2]) < 32)
4132 return *len
= 3, (AS1 (clr
,%D0
) CR_TAB
4133 AS1 (clr
,%C0
) CR_TAB
4134 AS2 (movw
,%A0
,%C0
));
4136 return (AS1 (clr
,%D0
) CR_TAB
4137 AS1 (clr
,%C0
) CR_TAB
4138 AS1 (clr
,%B0
) CR_TAB
4143 int reg0
= true_regnum (operands
[0]);
4144 int reg1
= true_regnum (operands
[1]);
4147 return (AS2 (mov
,%A0
,%B1
) CR_TAB
4148 AS2 (mov
,%B0
,%C1
) CR_TAB
4149 AS2 (mov
,%C0
,%D1
) CR_TAB
4152 return (AS1 (clr
,%D0
) CR_TAB
4153 AS2 (mov
,%C0
,%D1
) CR_TAB
4154 AS2 (mov
,%B0
,%C1
) CR_TAB
4160 int reg0
= true_regnum (operands
[0]);
4161 int reg1
= true_regnum (operands
[1]);
4163 if (reg0
== reg1
+ 2)
4164 return *len
= 2, (AS1 (clr
,%C0
) CR_TAB
4167 return *len
= 3, (AS2 (movw
,%A0
,%C1
) CR_TAB
4168 AS1 (clr
,%C0
) CR_TAB
4171 return *len
= 4, (AS2 (mov
,%B0
,%D1
) CR_TAB
4172 AS2 (mov
,%A0
,%C1
) CR_TAB
4173 AS1 (clr
,%C0
) CR_TAB
4178 return *len
= 4, (AS2 (mov
,%A0
,%D1
) CR_TAB
4179 AS1 (clr
,%B0
) CR_TAB
4180 AS1 (clr
,%C0
) CR_TAB
4185 return (AS1 (clr
,%A0
) CR_TAB
4186 AS2 (sbrc
,%D0
,7) CR_TAB
4187 AS1 (inc
,%A0
) CR_TAB
4188 AS1 (clr
,%B0
) CR_TAB
4189 AS1 (clr
,%C0
) CR_TAB
4194 out_shift_with_cnt ((AS1 (lsr
,%D0
) CR_TAB
4195 AS1 (ror
,%C0
) CR_TAB
4196 AS1 (ror
,%B0
) CR_TAB
4198 insn
, operands
, len
, 4);
4202 /* Modifies the length assigned to instruction INSN
4203 LEN is the initially computed length of the insn. */
4206 adjust_insn_length (rtx insn
, int len
)
4208 rtx patt
= PATTERN (insn
);
4211 if (GET_CODE (patt
) == SET
)
4214 op
[1] = SET_SRC (patt
);
4215 op
[0] = SET_DEST (patt
);
4216 if (general_operand (op
[1], VOIDmode
)
4217 && general_operand (op
[0], VOIDmode
))
4219 switch (GET_MODE (op
[0]))
4222 output_movqi (insn
, op
, &len
);
4225 output_movhi (insn
, op
, &len
);
4229 output_movsisf (insn
, op
, &len
);
4235 else if (op
[0] == cc0_rtx
&& REG_P (op
[1]))
4237 switch (GET_MODE (op
[1]))
4239 case HImode
: out_tsthi (insn
, op
[1], &len
); break;
4240 case SImode
: out_tstsi (insn
, op
[1], &len
); break;
4244 else if (GET_CODE (op
[1]) == AND
)
4246 if (GET_CODE (XEXP (op
[1],1)) == CONST_INT
)
4248 HOST_WIDE_INT mask
= INTVAL (XEXP (op
[1],1));
4249 if (GET_MODE (op
[1]) == SImode
)
4250 len
= (((mask
& 0xff) != 0xff)
4251 + ((mask
& 0xff00) != 0xff00)
4252 + ((mask
& 0xff0000L
) != 0xff0000L
)
4253 + ((mask
& 0xff000000L
) != 0xff000000L
));
4254 else if (GET_MODE (op
[1]) == HImode
)
4255 len
= (((mask
& 0xff) != 0xff)
4256 + ((mask
& 0xff00) != 0xff00));
4259 else if (GET_CODE (op
[1]) == IOR
)
4261 if (GET_CODE (XEXP (op
[1],1)) == CONST_INT
)
4263 HOST_WIDE_INT mask
= INTVAL (XEXP (op
[1],1));
4264 if (GET_MODE (op
[1]) == SImode
)
4265 len
= (((mask
& 0xff) != 0)
4266 + ((mask
& 0xff00) != 0)
4267 + ((mask
& 0xff0000L
) != 0)
4268 + ((mask
& 0xff000000L
) != 0));
4269 else if (GET_MODE (op
[1]) == HImode
)
4270 len
= (((mask
& 0xff) != 0)
4271 + ((mask
& 0xff00) != 0));
4275 set
= single_set (insn
);
4280 op
[1] = SET_SRC (set
);
4281 op
[0] = SET_DEST (set
);
4283 if (GET_CODE (patt
) == PARALLEL
4284 && general_operand (op
[1], VOIDmode
)
4285 && general_operand (op
[0], VOIDmode
))
4287 if (XVECLEN (patt
, 0) == 2)
4288 op
[2] = XVECEXP (patt
, 0, 1);
4290 switch (GET_MODE (op
[0]))
4296 output_reload_inhi (insn
, op
, &len
);
4300 output_reload_insisf (insn
, op
, &len
);
4306 else if (GET_CODE (op
[1]) == ASHIFT
4307 || GET_CODE (op
[1]) == ASHIFTRT
4308 || GET_CODE (op
[1]) == LSHIFTRT
)
4312 ops
[1] = XEXP (op
[1],0);
4313 ops
[2] = XEXP (op
[1],1);
4314 switch (GET_CODE (op
[1]))
4317 switch (GET_MODE (op
[0]))
4319 case QImode
: ashlqi3_out (insn
,ops
,&len
); break;
4320 case HImode
: ashlhi3_out (insn
,ops
,&len
); break;
4321 case SImode
: ashlsi3_out (insn
,ops
,&len
); break;
4326 switch (GET_MODE (op
[0]))
4328 case QImode
: ashrqi3_out (insn
,ops
,&len
); break;
4329 case HImode
: ashrhi3_out (insn
,ops
,&len
); break;
4330 case SImode
: ashrsi3_out (insn
,ops
,&len
); break;
4335 switch (GET_MODE (op
[0]))
4337 case QImode
: lshrqi3_out (insn
,ops
,&len
); break;
4338 case HImode
: lshrhi3_out (insn
,ops
,&len
); break;
4339 case SImode
: lshrsi3_out (insn
,ops
,&len
); break;
4351 /* Return nonzero if register REG dead after INSN. */
4354 reg_unused_after (rtx insn
, rtx reg
)
4356 return (dead_or_set_p (insn
, reg
)
4357 || (REG_P(reg
) && _reg_unused_after (insn
, reg
)));
4360 /* Return nonzero if REG is not used after INSN.
4361 We assume REG is a reload reg, and therefore does
4362 not live past labels. It may live past calls or jumps though. */
4365 _reg_unused_after (rtx insn
, rtx reg
)
4370 /* If the reg is set by this instruction, then it is safe for our
4371 case. Disregard the case where this is a store to memory, since
4372 we are checking a register used in the store address. */
4373 set
= single_set (insn
);
4374 if (set
&& GET_CODE (SET_DEST (set
)) != MEM
4375 && reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
4378 while ((insn
= NEXT_INSN (insn
)))
4381 code
= GET_CODE (insn
);
4384 /* If this is a label that existed before reload, then the register
4385 if dead here. However, if this is a label added by reorg, then
4386 the register may still be live here. We can't tell the difference,
4387 so we just ignore labels completely. */
4388 if (code
== CODE_LABEL
)
4396 if (code
== JUMP_INSN
)
4399 /* If this is a sequence, we must handle them all at once.
4400 We could have for instance a call that sets the target register,
4401 and an insn in a delay slot that uses the register. In this case,
4402 we must return 0. */
4403 else if (code
== INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
4408 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
4410 rtx this_insn
= XVECEXP (PATTERN (insn
), 0, i
);
4411 rtx set
= single_set (this_insn
);
4413 if (GET_CODE (this_insn
) == CALL_INSN
)
4415 else if (GET_CODE (this_insn
) == JUMP_INSN
)
4417 if (INSN_ANNULLED_BRANCH_P (this_insn
))
4422 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
4424 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
4426 if (GET_CODE (SET_DEST (set
)) != MEM
)
4432 && reg_overlap_mentioned_p (reg
, PATTERN (this_insn
)))
4437 else if (code
== JUMP_INSN
)
4441 if (code
== CALL_INSN
)
4444 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
4445 if (GET_CODE (XEXP (tem
, 0)) == USE
4446 && REG_P (XEXP (XEXP (tem
, 0), 0))
4447 && reg_overlap_mentioned_p (reg
, XEXP (XEXP (tem
, 0), 0)))
4449 if (call_used_regs
[REGNO (reg
)])
4453 set
= single_set (insn
);
4455 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
4457 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
4458 return GET_CODE (SET_DEST (set
)) != MEM
;
4459 if (set
== 0 && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
4465 /* Target hook for assembling integer objects. The AVR version needs
4466 special handling for references to certain labels. */
4469 avr_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
4471 if (size
== POINTER_SIZE
/ BITS_PER_UNIT
&& aligned_p
4472 && ((GET_CODE (x
) == SYMBOL_REF
&& SYMBOL_REF_FUNCTION_P (x
))
4473 || GET_CODE (x
) == LABEL_REF
))
4475 fputs ("\t.word\tgs(", asm_out_file
);
4476 output_addr_const (asm_out_file
, x
);
4477 fputs (")\n", asm_out_file
);
4480 return default_assemble_integer (x
, size
, aligned_p
);
4483 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4486 avr_asm_declare_function_name (FILE *file
, const char *name
, tree decl
)
4489 /* If the function has the 'signal' or 'interrupt' attribute, test to
4490 make sure that the name of the function is "__vector_NN" so as to
4491 catch when the user misspells the interrupt vector name. */
4493 if (cfun
->machine
->is_interrupt
)
4495 if (strncmp (name
, "__vector", strlen ("__vector")) != 0)
4497 warning_at (DECL_SOURCE_LOCATION (decl
), 0,
4498 "%qs appears to be a misspelled interrupt handler",
4502 else if (cfun
->machine
->is_signal
)
4504 if (strncmp (name
, "__vector", strlen ("__vector")) != 0)
4506 warning_at (DECL_SOURCE_LOCATION (decl
), 0,
4507 "%qs appears to be a misspelled signal handler",
4512 ASM_OUTPUT_TYPE_DIRECTIVE (file
, name
, "function");
4513 ASM_OUTPUT_LABEL (file
, name
);
4516 /* The routine used to output NUL terminated strings. We use a special
4517 version of this for most svr4 targets because doing so makes the
4518 generated assembly code more compact (and thus faster to assemble)
4519 as well as more readable, especially for targets like the i386
4520 (where the only alternative is to output character sequences as
4521 comma separated lists of numbers). */
4524 gas_output_limited_string(FILE *file
, const char *str
)
4526 const unsigned char *_limited_str
= (const unsigned char *) str
;
4528 fprintf (file
, "%s\"", STRING_ASM_OP
);
4529 for (; (ch
= *_limited_str
); _limited_str
++)
4532 switch (escape
= ESCAPES
[ch
])
4538 fprintf (file
, "\\%03o", ch
);
4542 putc (escape
, file
);
4546 fprintf (file
, "\"\n");
4549 /* The routine used to output sequences of byte values. We use a special
4550 version of this for most svr4 targets because doing so makes the
4551 generated assembly code more compact (and thus faster to assemble)
4552 as well as more readable. Note that if we find subparts of the
4553 character sequence which end with NUL (and which are shorter than
4554 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4557 gas_output_ascii(FILE *file
, const char *str
, size_t length
)
4559 const unsigned char *_ascii_bytes
= (const unsigned char *) str
;
4560 const unsigned char *limit
= _ascii_bytes
+ length
;
4561 unsigned bytes_in_chunk
= 0;
4562 for (; _ascii_bytes
< limit
; _ascii_bytes
++)
4564 const unsigned char *p
;
4565 if (bytes_in_chunk
>= 60)
4567 fprintf (file
, "\"\n");
4570 for (p
= _ascii_bytes
; p
< limit
&& *p
!= '\0'; p
++)
4572 if (p
< limit
&& (p
- _ascii_bytes
) <= (signed)STRING_LIMIT
)
4574 if (bytes_in_chunk
> 0)
4576 fprintf (file
, "\"\n");
4579 gas_output_limited_string (file
, (const char*)_ascii_bytes
);
4586 if (bytes_in_chunk
== 0)
4587 fprintf (file
, "\t.ascii\t\"");
4588 switch (escape
= ESCAPES
[ch
= *_ascii_bytes
])
4595 fprintf (file
, "\\%03o", ch
);
4596 bytes_in_chunk
+= 4;
4600 putc (escape
, file
);
4601 bytes_in_chunk
+= 2;
4606 if (bytes_in_chunk
> 0)
4607 fprintf (file
, "\"\n");
4610 /* Return value is nonzero if pseudos that have been
4611 assigned to registers of class CLASS would likely be spilled
4612 because registers of CLASS are needed for spill registers. */
4615 class_likely_spilled_p (int c
)
4617 return (c
!= ALL_REGS
&& c
!= ADDW_REGS
);
4620 /* Valid attributes:
4621 progmem - put data to program memory;
4622 signal - make a function to be hardware interrupt. After function
4623 prologue interrupts are disabled;
4624 interrupt - make a function to be hardware interrupt. After function
4625 prologue interrupts are enabled;
4626 naked - don't generate function prologue/epilogue and `ret' command.
4628 Only `progmem' attribute valid for type. */
4630 /* Handle a "progmem" attribute; arguments as in
4631 struct attribute_spec.handler. */
4633 avr_handle_progmem_attribute (tree
*node
, tree name
,
4634 tree args ATTRIBUTE_UNUSED
,
4635 int flags ATTRIBUTE_UNUSED
,
4640 if (TREE_CODE (*node
) == TYPE_DECL
)
4642 /* This is really a decl attribute, not a type attribute,
4643 but try to handle it for GCC 3.0 backwards compatibility. */
4645 tree type
= TREE_TYPE (*node
);
4646 tree attr
= tree_cons (name
, args
, TYPE_ATTRIBUTES (type
));
4647 tree newtype
= build_type_attribute_variant (type
, attr
);
4649 TYPE_MAIN_VARIANT (newtype
) = TYPE_MAIN_VARIANT (type
);
4650 TREE_TYPE (*node
) = newtype
;
4651 *no_add_attrs
= true;
4653 else if (TREE_STATIC (*node
) || DECL_EXTERNAL (*node
))
4655 if (DECL_INITIAL (*node
) == NULL_TREE
&& !DECL_EXTERNAL (*node
))
4657 warning (0, "only initialized variables can be placed into "
4658 "program memory area");
4659 *no_add_attrs
= true;
4664 warning (OPT_Wattributes
, "%qE attribute ignored",
4666 *no_add_attrs
= true;
4673 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4674 struct attribute_spec.handler. */
4677 avr_handle_fndecl_attribute (tree
*node
, tree name
,
4678 tree args ATTRIBUTE_UNUSED
,
4679 int flags ATTRIBUTE_UNUSED
,
4682 if (TREE_CODE (*node
) != FUNCTION_DECL
)
4684 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
4686 *no_add_attrs
= true;
4693 avr_handle_fntype_attribute (tree
*node
, tree name
,
4694 tree args ATTRIBUTE_UNUSED
,
4695 int flags ATTRIBUTE_UNUSED
,
4698 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
4700 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
4702 *no_add_attrs
= true;
4708 /* Look for attribute `progmem' in DECL
4709 if found return 1, otherwise 0. */
4712 avr_progmem_p (tree decl
, tree attributes
)
4716 if (TREE_CODE (decl
) != VAR_DECL
)
4720 != lookup_attribute ("progmem", attributes
))
4726 while (TREE_CODE (a
) == ARRAY_TYPE
);
4728 if (a
== error_mark_node
)
4731 if (NULL_TREE
!= lookup_attribute ("progmem", TYPE_ATTRIBUTES (a
)))
4737 /* Add the section attribute if the variable is in progmem. */
4740 avr_insert_attributes (tree node
, tree
*attributes
)
4742 if (TREE_CODE (node
) == VAR_DECL
4743 && (TREE_STATIC (node
) || DECL_EXTERNAL (node
))
4744 && avr_progmem_p (node
, *attributes
))
4746 static const char dsec
[] = ".progmem.data";
4747 *attributes
= tree_cons (get_identifier ("section"),
4748 build_tree_list (NULL
, build_string (strlen (dsec
), dsec
)),
4751 /* ??? This seems sketchy. Why can't the user declare the
4752 thing const in the first place? */
4753 TREE_READONLY (node
) = 1;
4757 /* A get_unnamed_section callback for switching to progmem_section. */
4760 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED
)
4762 fprintf (asm_out_file
,
4763 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4764 AVR_HAVE_JMP_CALL
? "a" : "ax");
4765 /* Should already be aligned, this is just to be safe if it isn't. */
4766 fprintf (asm_out_file
, "\t.p2align 1\n");
4769 /* Implement TARGET_ASM_INIT_SECTIONS. */
4772 avr_asm_init_sections (void)
4774 progmem_section
= get_unnamed_section (AVR_HAVE_JMP_CALL
? 0 : SECTION_CODE
,
4775 avr_output_progmem_section_asm_op
,
4777 readonly_data_section
= data_section
;
4781 avr_section_type_flags (tree decl
, const char *name
, int reloc
)
4783 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
4785 if (strncmp (name
, ".noinit", 7) == 0)
4787 if (decl
&& TREE_CODE (decl
) == VAR_DECL
4788 && DECL_INITIAL (decl
) == NULL_TREE
)
4789 flags
|= SECTION_BSS
; /* @nobits */
4791 warning (0, "only uninitialized variables can be placed in the "
4798 /* Outputs some appropriate text to go at the start of an assembler
4802 avr_file_start (void)
4804 if (avr_current_arch
->asm_only
)
4805 error ("MCU %qs supported for assembler only", avr_mcu_name
);
4807 default_file_start ();
4809 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4810 fputs ("__SREG__ = 0x3f\n"
4812 "__SP_L__ = 0x3d\n", asm_out_file
);
4814 fputs ("__tmp_reg__ = 0\n"
4815 "__zero_reg__ = 1\n", asm_out_file
);
4817 /* FIXME: output these only if there is anything in the .data / .bss
4818 sections - some code size could be saved by not linking in the
4819 initialization code from libgcc if one or both sections are empty. */
4820 fputs ("\t.global __do_copy_data\n", asm_out_file
);
4821 fputs ("\t.global __do_clear_bss\n", asm_out_file
);
4824 /* Outputs to the stdio stream FILE some
4825 appropriate text to go at the end of an assembler file. */
4832 /* Choose the order in which to allocate hard registers for
4833 pseudo-registers local to a basic block.
4835 Store the desired register order in the array `reg_alloc_order'.
4836 Element 0 should be the register to allocate first; element 1, the
4837 next register; and so on. */
4840 order_regs_for_local_alloc (void)
4843 static const int order_0
[] = {
4851 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4855 static const int order_1
[] = {
4863 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4867 static const int order_2
[] = {
4876 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4881 const int *order
= (TARGET_ORDER_1
? order_1
:
4882 TARGET_ORDER_2
? order_2
:
4884 for (i
=0; i
< ARRAY_SIZE (order_0
); ++i
)
4885 reg_alloc_order
[i
] = order
[i
];
4889 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
4890 cost of an RTX operand given its context. X is the rtx of the
4891 operand, MODE is its mode, and OUTER is the rtx_code of this
4892 operand's parent operator. */
4895 avr_operand_rtx_cost (rtx x
, enum machine_mode mode
, enum rtx_code outer
,
4898 enum rtx_code code
= GET_CODE (x
);
4909 return COSTS_N_INSNS (GET_MODE_SIZE (mode
));
4916 avr_rtx_costs (x
, code
, outer
, &total
, speed
);
4920 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
4921 is to be calculated. Return true if the complete cost has been
4922 computed, and false if subexpressions should be scanned. In either
4923 case, *TOTAL contains the cost result. */
4926 avr_rtx_costs (rtx x
, int codearg
, int outer_code ATTRIBUTE_UNUSED
, int *total
,
4929 enum rtx_code code
= (enum rtx_code
) codearg
;
4930 enum machine_mode mode
= GET_MODE (x
);
4937 /* Immediate constants are as cheap as registers. */
4945 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
4953 *total
= COSTS_N_INSNS (1);
4957 *total
= COSTS_N_INSNS (3);
4961 *total
= COSTS_N_INSNS (7);
4967 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
4975 *total
= COSTS_N_INSNS (1);
4981 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
4985 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
4986 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
4990 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
)
4991 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
4992 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
4996 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
) + 2
4997 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
4998 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5005 *total
= COSTS_N_INSNS (1);
5006 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5007 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5011 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5013 *total
= COSTS_N_INSNS (2);
5014 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5016 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
5017 *total
= COSTS_N_INSNS (1);
5019 *total
= COSTS_N_INSNS (2);
5023 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5025 *total
= COSTS_N_INSNS (4);
5026 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5028 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
5029 *total
= COSTS_N_INSNS (1);
5031 *total
= COSTS_N_INSNS (4);
5037 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5043 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5044 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5045 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5046 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5050 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5051 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5052 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5060 *total
= COSTS_N_INSNS (!speed
? 3 : 4);
5062 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
5069 *total
= COSTS_N_INSNS (!speed
? 7 : 10);
5071 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
5079 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5080 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5088 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
5091 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5092 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5099 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 4)
5100 *total
= COSTS_N_INSNS (1);
5105 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 8)
5106 *total
= COSTS_N_INSNS (3);
5111 if (CONST_INT_P (XEXP (x
, 1)))
5112 switch (INTVAL (XEXP (x
, 1)))
5116 *total
= COSTS_N_INSNS (5);
5119 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 6);
5127 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5134 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5136 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
5137 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5141 val
= INTVAL (XEXP (x
, 1));
5143 *total
= COSTS_N_INSNS (3);
5144 else if (val
>= 0 && val
<= 7)
5145 *total
= COSTS_N_INSNS (val
);
5147 *total
= COSTS_N_INSNS (1);
5152 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5154 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5155 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5158 switch (INTVAL (XEXP (x
, 1)))
5165 *total
= COSTS_N_INSNS (2);
5168 *total
= COSTS_N_INSNS (3);
5174 *total
= COSTS_N_INSNS (4);
5179 *total
= COSTS_N_INSNS (5);
5182 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
5185 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
5188 *total
= COSTS_N_INSNS (!speed
? 5 : 10);
5191 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5192 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5197 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5199 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5200 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5203 switch (INTVAL (XEXP (x
, 1)))
5209 *total
= COSTS_N_INSNS (3);
5214 *total
= COSTS_N_INSNS (4);
5217 *total
= COSTS_N_INSNS (6);
5220 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
5223 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5224 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5231 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5238 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5240 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
5241 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5245 val
= INTVAL (XEXP (x
, 1));
5247 *total
= COSTS_N_INSNS (4);
5249 *total
= COSTS_N_INSNS (2);
5250 else if (val
>= 0 && val
<= 7)
5251 *total
= COSTS_N_INSNS (val
);
5253 *total
= COSTS_N_INSNS (1);
5258 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5260 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5261 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5264 switch (INTVAL (XEXP (x
, 1)))
5270 *total
= COSTS_N_INSNS (2);
5273 *total
= COSTS_N_INSNS (3);
5279 *total
= COSTS_N_INSNS (4);
5283 *total
= COSTS_N_INSNS (5);
5286 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
5289 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
5293 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
5296 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5297 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5302 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5304 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5305 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5308 switch (INTVAL (XEXP (x
, 1)))
5314 *total
= COSTS_N_INSNS (4);
5319 *total
= COSTS_N_INSNS (6);
5322 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
5325 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 5);
5328 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5329 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5336 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5343 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5345 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
5346 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5350 val
= INTVAL (XEXP (x
, 1));
5352 *total
= COSTS_N_INSNS (3);
5353 else if (val
>= 0 && val
<= 7)
5354 *total
= COSTS_N_INSNS (val
);
5356 *total
= COSTS_N_INSNS (1);
5361 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5363 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5364 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5367 switch (INTVAL (XEXP (x
, 1)))
5374 *total
= COSTS_N_INSNS (2);
5377 *total
= COSTS_N_INSNS (3);
5382 *total
= COSTS_N_INSNS (4);
5386 *total
= COSTS_N_INSNS (5);
5392 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
5395 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
5399 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
5402 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5403 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5408 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5410 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5411 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5414 switch (INTVAL (XEXP (x
, 1)))
5420 *total
= COSTS_N_INSNS (4);
5423 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
5428 *total
= COSTS_N_INSNS (4);
5431 *total
= COSTS_N_INSNS (6);
5434 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5435 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5442 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5446 switch (GET_MODE (XEXP (x
, 0)))
5449 *total
= COSTS_N_INSNS (1);
5450 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5451 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5455 *total
= COSTS_N_INSNS (2);
5456 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5457 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5458 else if (INTVAL (XEXP (x
, 1)) != 0)
5459 *total
+= COSTS_N_INSNS (1);
5463 *total
= COSTS_N_INSNS (4);
5464 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5465 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5466 else if (INTVAL (XEXP (x
, 1)) != 0)
5467 *total
+= COSTS_N_INSNS (3);
5473 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5482 /* Calculate the cost of a memory address. */
5485 avr_address_cost (rtx x
, bool speed ATTRIBUTE_UNUSED
)
5487 if (GET_CODE (x
) == PLUS
5488 && GET_CODE (XEXP (x
,1)) == CONST_INT
5489 && (REG_P (XEXP (x
,0)) || GET_CODE (XEXP (x
,0)) == SUBREG
)
5490 && INTVAL (XEXP (x
,1)) >= 61)
5492 if (CONSTANT_ADDRESS_P (x
))
5494 if (optimize
> 0 && io_address_operand (x
, QImode
))
5501 /* Test for extra memory constraint 'Q'.
5502 It's a memory address based on Y or Z pointer with valid displacement. */
5505 extra_constraint_Q (rtx x
)
5507 if (GET_CODE (XEXP (x
,0)) == PLUS
5508 && REG_P (XEXP (XEXP (x
,0), 0))
5509 && GET_CODE (XEXP (XEXP (x
,0), 1)) == CONST_INT
5510 && (INTVAL (XEXP (XEXP (x
,0), 1))
5511 <= MAX_LD_OFFSET (GET_MODE (x
))))
5513 rtx xx
= XEXP (XEXP (x
,0), 0);
5514 int regno
= REGNO (xx
);
5515 if (TARGET_ALL_DEBUG
)
5517 fprintf (stderr
, ("extra_constraint:\n"
5518 "reload_completed: %d\n"
5519 "reload_in_progress: %d\n"),
5520 reload_completed
, reload_in_progress
);
5523 if (regno
>= FIRST_PSEUDO_REGISTER
)
5524 return 1; /* allocate pseudos */
5525 else if (regno
== REG_Z
|| regno
== REG_Y
)
5526 return 1; /* strictly check */
5527 else if (xx
== frame_pointer_rtx
5528 || xx
== arg_pointer_rtx
)
5529 return 1; /* XXX frame & arg pointer checks */
5534 /* Convert condition code CONDITION to the valid AVR condition code. */
5537 avr_normalize_condition (RTX_CODE condition
)
5554 /* This function optimizes conditional jumps. */
5561 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5563 if (! (GET_CODE (insn
) == INSN
5564 || GET_CODE (insn
) == CALL_INSN
5565 || GET_CODE (insn
) == JUMP_INSN
)
5566 || !single_set (insn
))
5569 pattern
= PATTERN (insn
);
5571 if (GET_CODE (pattern
) == PARALLEL
)
5572 pattern
= XVECEXP (pattern
, 0, 0);
5573 if (GET_CODE (pattern
) == SET
5574 && SET_DEST (pattern
) == cc0_rtx
5575 && compare_diff_p (insn
))
5577 if (GET_CODE (SET_SRC (pattern
)) == COMPARE
)
5579 /* Now we work under compare insn. */
5581 pattern
= SET_SRC (pattern
);
5582 if (true_regnum (XEXP (pattern
,0)) >= 0
5583 && true_regnum (XEXP (pattern
,1)) >= 0 )
5585 rtx x
= XEXP (pattern
,0);
5586 rtx next
= next_real_insn (insn
);
5587 rtx pat
= PATTERN (next
);
5588 rtx src
= SET_SRC (pat
);
5589 rtx t
= XEXP (src
,0);
5590 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
5591 XEXP (pattern
,0) = XEXP (pattern
,1);
5592 XEXP (pattern
,1) = x
;
5593 INSN_CODE (next
) = -1;
5595 else if (true_regnum (XEXP (pattern
, 0)) >= 0
5596 && XEXP (pattern
, 1) == const0_rtx
)
5598 /* This is a tst insn, we can reverse it. */
5599 rtx next
= next_real_insn (insn
);
5600 rtx pat
= PATTERN (next
);
5601 rtx src
= SET_SRC (pat
);
5602 rtx t
= XEXP (src
,0);
5604 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
5605 XEXP (pattern
, 1) = XEXP (pattern
, 0);
5606 XEXP (pattern
, 0) = const0_rtx
;
5607 INSN_CODE (next
) = -1;
5608 INSN_CODE (insn
) = -1;
5610 else if (true_regnum (XEXP (pattern
,0)) >= 0
5611 && GET_CODE (XEXP (pattern
,1)) == CONST_INT
)
5613 rtx x
= XEXP (pattern
,1);
5614 rtx next
= next_real_insn (insn
);
5615 rtx pat
= PATTERN (next
);
5616 rtx src
= SET_SRC (pat
);
5617 rtx t
= XEXP (src
,0);
5618 enum machine_mode mode
= GET_MODE (XEXP (pattern
, 0));
5620 if (avr_simplify_comparison_p (mode
, GET_CODE (t
), x
))
5622 XEXP (pattern
, 1) = gen_int_mode (INTVAL (x
) + 1, mode
);
5623 PUT_CODE (t
, avr_normalize_condition (GET_CODE (t
)));
5624 INSN_CODE (next
) = -1;
5625 INSN_CODE (insn
) = -1;
5633 /* Returns register number for function return value.*/
5636 avr_ret_register (void)
5641 /* Create an RTX representing the place where a
5642 library function returns a value of mode MODE. */
5645 avr_libcall_value (enum machine_mode mode
)
5647 int offs
= GET_MODE_SIZE (mode
);
5650 return gen_rtx_REG (mode
, RET_REGISTER
+ 2 - offs
);
5653 /* Create an RTX representing the place where a
5654 function returns a value of data type VALTYPE. */
5657 avr_function_value (const_tree type
,
5658 const_tree func ATTRIBUTE_UNUSED
,
5659 bool outgoing ATTRIBUTE_UNUSED
)
5663 if (TYPE_MODE (type
) != BLKmode
)
5664 return avr_libcall_value (TYPE_MODE (type
));
5666 offs
= int_size_in_bytes (type
);
5669 if (offs
> 2 && offs
< GET_MODE_SIZE (SImode
))
5670 offs
= GET_MODE_SIZE (SImode
);
5671 else if (offs
> GET_MODE_SIZE (SImode
) && offs
< GET_MODE_SIZE (DImode
))
5672 offs
= GET_MODE_SIZE (DImode
);
5674 return gen_rtx_REG (BLKmode
, RET_REGISTER
+ 2 - offs
);
5677 /* Places additional restrictions on the register class to
5678 use when it is necessary to copy value X into a register
5682 preferred_reload_class (rtx x ATTRIBUTE_UNUSED
, enum reg_class rclass
)
5688 test_hard_reg_class (enum reg_class rclass
, rtx x
)
5690 int regno
= true_regnum (x
);
5694 if (TEST_HARD_REG_CLASS (rclass
, regno
))
5702 jump_over_one_insn_p (rtx insn
, rtx dest
)
5704 int uid
= INSN_UID (GET_CODE (dest
) == LABEL_REF
5707 int jump_addr
= INSN_ADDRESSES (INSN_UID (insn
));
5708 int dest_addr
= INSN_ADDRESSES (uid
);
5709 return dest_addr
- jump_addr
== get_attr_length (insn
) + 1;
5712 /* Returns 1 if a value of mode MODE can be stored starting with hard
5713 register number REGNO. On the enhanced core, anything larger than
5714 1 byte must start in even numbered register for "movw" to work
5715 (this way we don't have to check for odd registers everywhere). */
5718 avr_hard_regno_mode_ok (int regno
, enum machine_mode mode
)
5720 /* Disallow QImode in stack pointer regs. */
5721 if ((regno
== REG_SP
|| regno
== (REG_SP
+ 1)) && mode
== QImode
)
5724 /* The only thing that can go into registers r28:r29 is a Pmode. */
5725 if (regno
== REG_Y
&& mode
== Pmode
)
5728 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5729 if (regno
<= (REG_Y
+ 1) && (regno
+ GET_MODE_SIZE (mode
)) >= (REG_Y
+ 1))
5735 /* Modes larger than QImode occupy consecutive registers. */
5736 if (regno
+ GET_MODE_SIZE (mode
) > FIRST_PSEUDO_REGISTER
)
5739 /* All modes larger than QImode should start in an even register. */
5740 return !(regno
& 1);
5744 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED
, rtx
*operands
, int *len
)
5750 if (GET_CODE (operands
[1]) == CONST_INT
)
5752 int val
= INTVAL (operands
[1]);
5753 if ((val
& 0xff) == 0)
5756 return (AS2 (mov
,%A0
,__zero_reg__
) CR_TAB
5757 AS2 (ldi
,%2,hi8(%1)) CR_TAB
5760 else if ((val
& 0xff00) == 0)
5763 return (AS2 (ldi
,%2,lo8(%1)) CR_TAB
5764 AS2 (mov
,%A0
,%2) CR_TAB
5765 AS2 (mov
,%B0
,__zero_reg__
));
5767 else if ((val
& 0xff) == ((val
& 0xff00) >> 8))
5770 return (AS2 (ldi
,%2,lo8(%1)) CR_TAB
5771 AS2 (mov
,%A0
,%2) CR_TAB
5776 return (AS2 (ldi
,%2,lo8(%1)) CR_TAB
5777 AS2 (mov
,%A0
,%2) CR_TAB
5778 AS2 (ldi
,%2,hi8(%1)) CR_TAB
5784 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED
, rtx
*operands
, int *len
)
5786 rtx src
= operands
[1];
5787 int cnst
= (GET_CODE (src
) == CONST_INT
);
5792 *len
= 4 + ((INTVAL (src
) & 0xff) != 0)
5793 + ((INTVAL (src
) & 0xff00) != 0)
5794 + ((INTVAL (src
) & 0xff0000) != 0)
5795 + ((INTVAL (src
) & 0xff000000) != 0);
5802 if (cnst
&& ((INTVAL (src
) & 0xff) == 0))
5803 output_asm_insn (AS2 (mov
, %A0
, __zero_reg__
), operands
);
5806 output_asm_insn (AS2 (ldi
, %2, lo8(%1)), operands
);
5807 output_asm_insn (AS2 (mov
, %A0
, %2), operands
);
5809 if (cnst
&& ((INTVAL (src
) & 0xff00) == 0))
5810 output_asm_insn (AS2 (mov
, %B0
, __zero_reg__
), operands
);
5813 output_asm_insn (AS2 (ldi
, %2, hi8(%1)), operands
);
5814 output_asm_insn (AS2 (mov
, %B0
, %2), operands
);
5816 if (cnst
&& ((INTVAL (src
) & 0xff0000) == 0))
5817 output_asm_insn (AS2 (mov
, %C0
, __zero_reg__
), operands
);
5820 output_asm_insn (AS2 (ldi
, %2, hlo8(%1)), operands
);
5821 output_asm_insn (AS2 (mov
, %C0
, %2), operands
);
5823 if (cnst
&& ((INTVAL (src
) & 0xff000000) == 0))
5824 output_asm_insn (AS2 (mov
, %D0
, __zero_reg__
), operands
);
5827 output_asm_insn (AS2 (ldi
, %2, hhi8(%1)), operands
);
5828 output_asm_insn (AS2 (mov
, %D0
, %2), operands
);
5834 avr_output_bld (rtx operands
[], int bit_nr
)
5836 static char s
[] = "bld %A0,0";
5838 s
[5] = 'A' + (bit_nr
>> 3);
5839 s
[8] = '0' + (bit_nr
& 7);
5840 output_asm_insn (s
, operands
);
5844 avr_output_addr_vec_elt (FILE *stream
, int value
)
5846 switch_to_section (progmem_section
);
5847 if (AVR_HAVE_JMP_CALL
)
5848 fprintf (stream
, "\t.word gs(.L%d)\n", value
);
5850 fprintf (stream
, "\trjmp .L%d\n", value
);
5853 /* Returns true if SCRATCH are safe to be allocated as a scratch
5854 registers (for a define_peephole2) in the current function. */
5857 avr_hard_regno_scratch_ok (unsigned int regno
)
5859 /* Interrupt functions can only use registers that have already been saved
5860 by the prologue, even if they would normally be call-clobbered. */
5862 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
5863 && !df_regs_ever_live_p (regno
))
5869 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
5872 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED
,
5873 unsigned int new_reg
)
5875 /* Interrupt functions can only use registers that have already been
5876 saved by the prologue, even if they would normally be
5879 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
5880 && !df_regs_ever_live_p (new_reg
))
5886 /* Output a branch that tests a single bit of a register (QI, HI or SImode)
5887 or memory location in the I/O space (QImode only).
5889 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
5890 Operand 1: register operand to test, or CONST_INT memory address.
5891 Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
5892 Operand 3: label to jump to if the test is true. */
5895 avr_out_sbxx_branch (rtx insn
, rtx operands
[])
5897 enum rtx_code comp
= GET_CODE (operands
[0]);
5898 int long_jump
= (get_attr_length (insn
) >= 4);
5899 int reverse
= long_jump
|| jump_over_one_insn_p (insn
, operands
[3]);
5903 else if (comp
== LT
)
5907 comp
= reverse_condition (comp
);
5909 if (GET_CODE (operands
[1]) == CONST_INT
)
5911 if (INTVAL (operands
[1]) < 0x40)
5914 output_asm_insn (AS2 (sbis
,%1-0x20,%2), operands
);
5916 output_asm_insn (AS2 (sbic
,%1-0x20,%2), operands
);
5920 output_asm_insn (AS2 (in
,__tmp_reg__
,%1-0x20), operands
);
5922 output_asm_insn (AS2 (sbrs
,__tmp_reg__
,%2), operands
);
5924 output_asm_insn (AS2 (sbrc
,__tmp_reg__
,%2), operands
);
5927 else /* GET_CODE (operands[1]) == REG */
5929 if (GET_MODE (operands
[1]) == QImode
)
5932 output_asm_insn (AS2 (sbrs
,%1,%2), operands
);
5934 output_asm_insn (AS2 (sbrc
,%1,%2), operands
);
5936 else /* HImode or SImode */
5938 static char buf
[] = "sbrc %A1,0";
5939 int bit_nr
= exact_log2 (INTVAL (operands
[2])
5940 & GET_MODE_MASK (GET_MODE (operands
[1])));
5942 buf
[3] = (comp
== EQ
) ? 's' : 'c';
5943 buf
[6] = 'A' + (bit_nr
>> 3);
5944 buf
[9] = '0' + (bit_nr
& 7);
5945 output_asm_insn (buf
, operands
);
5950 return (AS1 (rjmp
,.+4) CR_TAB
5953 return AS1 (rjmp
,%3);
5957 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
5960 avr_asm_out_ctor (rtx symbol
, int priority
)
5962 fputs ("\t.global __do_global_ctors\n", asm_out_file
);
5963 default_ctor_section_asm_out_constructor (symbol
, priority
);
5966 /* Worker function for TARGET_ASM_DESTRUCTOR. */
5969 avr_asm_out_dtor (rtx symbol
, int priority
)
5971 fputs ("\t.global __do_global_dtors\n", asm_out_file
);
5972 default_dtor_section_asm_out_destructor (symbol
, priority
);
5975 /* Worker function for TARGET_RETURN_IN_MEMORY. */
5978 avr_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
5980 if (TYPE_MODE (type
) == BLKmode
)
5982 HOST_WIDE_INT size
= int_size_in_bytes (type
);
5983 return (size
== -1 || size
> 8);
5989 /* Worker function for CASE_VALUES_THRESHOLD. */
5991 unsigned int avr_case_values_threshold (void)
5993 return (!AVR_HAVE_JMP_CALL
|| TARGET_CALL_PROLOGUES
) ? 8 : 17;