1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
45 #include "target-def.h"
49 /* Maximal allowed offset for an address in the LD command */
50 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
52 static int avr_naked_function_p (tree
);
53 static int interrupt_function_p (tree
);
54 static int signal_function_p (tree
);
55 static int avr_OS_task_function_p (tree
);
56 static int avr_OS_main_function_p (tree
);
57 static int avr_regs_to_save (HARD_REG_SET
*);
58 static int get_sequence_length (rtx insns
);
59 static int sequent_regs_live (void);
60 static const char *ptrreg_to_str (int);
61 static const char *cond_string (enum rtx_code
);
62 static int avr_num_arg_regs (enum machine_mode
, tree
);
64 static RTX_CODE
compare_condition (rtx insn
);
65 static rtx
avr_legitimize_address (rtx
, rtx
, enum machine_mode
);
66 static int compare_sign_p (rtx insn
);
67 static tree
avr_handle_progmem_attribute (tree
*, tree
, tree
, int, bool *);
68 static tree
avr_handle_fndecl_attribute (tree
*, tree
, tree
, int, bool *);
69 static tree
avr_handle_fntype_attribute (tree
*, tree
, tree
, int, bool *);
70 static bool avr_assemble_integer (rtx
, unsigned int, int);
71 static void avr_file_start (void);
72 static void avr_file_end (void);
73 static bool avr_legitimate_address_p (enum machine_mode
, rtx
, bool);
74 static void avr_asm_function_end_prologue (FILE *);
75 static void avr_asm_function_begin_epilogue (FILE *);
76 static rtx
avr_function_value (const_tree
, const_tree
, bool);
77 static void avr_insert_attributes (tree
, tree
*);
78 static void avr_asm_init_sections (void);
79 static unsigned int avr_section_type_flags (tree
, const char *, int);
81 static void avr_reorg (void);
82 static void avr_asm_out_ctor (rtx
, int);
83 static void avr_asm_out_dtor (rtx
, int);
84 static int avr_operand_rtx_cost (rtx
, enum machine_mode
, enum rtx_code
, bool);
85 static bool avr_rtx_costs (rtx
, int, int, int *, bool);
86 static int avr_address_cost (rtx
, bool);
87 static bool avr_return_in_memory (const_tree
, const_tree
);
88 static struct machine_function
* avr_init_machine_status (void);
89 static rtx
avr_builtin_setjmp_frame_value (void);
90 static bool avr_hard_regno_scratch_ok (unsigned int);
91 static unsigned int avr_case_values_threshold (void);
92 static bool avr_frame_pointer_required_p (void);
94 /* Allocate registers from r25 to r8 for parameters for function calls. */
95 #define FIRST_CUM_REG 26
97 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
98 static GTY(()) rtx tmp_reg_rtx
;
100 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
101 static GTY(()) rtx zero_reg_rtx
;
103 /* AVR register names {"r0", "r1", ..., "r31"} */
104 static const char *const avr_regnames
[] = REGISTER_NAMES
;
106 /* This holds the last insn address. */
107 static int last_insn_address
= 0;
109 /* Preprocessor macros to define depending on MCU type. */
110 static const char *avr_extra_arch_macro
;
112 /* Current architecture. */
113 const struct base_arch_s
*avr_current_arch
;
115 /* Current device. */
116 const struct mcu_type_s
*avr_current_device
;
118 section
*progmem_section
;
120 /* AVR attributes. */
121 static const struct attribute_spec avr_attribute_table
[] =
123 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
124 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute
},
125 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute
},
126 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute
},
127 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute
},
128 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute
},
129 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute
},
130 { NULL
, 0, 0, false, false, false, NULL
}
133 /* Initialize the GCC target structure. */
134 #undef TARGET_ASM_ALIGNED_HI_OP
135 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
136 #undef TARGET_ASM_ALIGNED_SI_OP
137 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
138 #undef TARGET_ASM_UNALIGNED_HI_OP
139 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
140 #undef TARGET_ASM_UNALIGNED_SI_OP
141 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
142 #undef TARGET_ASM_INTEGER
143 #define TARGET_ASM_INTEGER avr_assemble_integer
144 #undef TARGET_ASM_FILE_START
145 #define TARGET_ASM_FILE_START avr_file_start
146 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
147 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
148 #undef TARGET_ASM_FILE_END
149 #define TARGET_ASM_FILE_END avr_file_end
151 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
152 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
153 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
154 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
155 #undef TARGET_FUNCTION_VALUE
156 #define TARGET_FUNCTION_VALUE avr_function_value
157 #undef TARGET_ATTRIBUTE_TABLE
158 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
159 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
160 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
161 #undef TARGET_INSERT_ATTRIBUTES
162 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
163 #undef TARGET_SECTION_TYPE_FLAGS
164 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
165 #undef TARGET_RTX_COSTS
166 #define TARGET_RTX_COSTS avr_rtx_costs
167 #undef TARGET_ADDRESS_COST
168 #define TARGET_ADDRESS_COST avr_address_cost
169 #undef TARGET_MACHINE_DEPENDENT_REORG
170 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
172 #undef TARGET_LEGITIMIZE_ADDRESS
173 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
175 #undef TARGET_RETURN_IN_MEMORY
176 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
178 #undef TARGET_STRICT_ARGUMENT_NAMING
179 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
181 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
182 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
184 #undef TARGET_HARD_REGNO_SCRATCH_OK
185 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
186 #undef TARGET_CASE_VALUES_THRESHOLD
187 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
189 #undef TARGET_LEGITIMATE_ADDRESS_P
190 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
192 #undef TARGET_FRAME_POINTER_REQUIRED
193 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
195 struct gcc_target targetm
= TARGET_INITIALIZER
;
198 avr_override_options (void)
200 const struct mcu_type_s
*t
;
202 flag_delete_null_pointer_checks
= 0;
204 for (t
= avr_mcu_types
; t
->name
; t
++)
205 if (strcmp (t
->name
, avr_mcu_name
) == 0)
210 fprintf (stderr
, "unknown MCU '%s' specified\nKnown MCU names:\n",
212 for (t
= avr_mcu_types
; t
->name
; t
++)
213 fprintf (stderr
," %s\n", t
->name
);
216 avr_current_device
= t
;
217 avr_current_arch
= &avr_arch_types
[avr_current_device
->arch
];
218 avr_extra_arch_macro
= avr_current_device
->macro
;
220 tmp_reg_rtx
= gen_rtx_REG (QImode
, TMP_REGNO
);
221 zero_reg_rtx
= gen_rtx_REG (QImode
, ZERO_REGNO
);
223 init_machine_status
= avr_init_machine_status
;
226 /* Worker function for TARGET_CPU_CPP_BUILTINS. */
229 avr_cpu_cpp_builtins (struct cpp_reader
*pfile
)
231 builtin_define_std ("AVR");
233 if (avr_current_arch
->macro
)
234 cpp_define (pfile
, avr_current_arch
->macro
);
235 if (avr_extra_arch_macro
)
236 cpp_define (pfile
, avr_extra_arch_macro
);
237 if (avr_current_arch
->have_elpm
)
238 cpp_define (pfile
, "__AVR_HAVE_RAMPZ__");
239 if (avr_current_arch
->have_elpm
)
240 cpp_define (pfile
, "__AVR_HAVE_ELPM__");
241 if (avr_current_arch
->have_elpmx
)
242 cpp_define (pfile
, "__AVR_HAVE_ELPMX__");
243 if (avr_current_arch
->have_movw_lpmx
)
245 cpp_define (pfile
, "__AVR_HAVE_MOVW__");
246 cpp_define (pfile
, "__AVR_HAVE_LPMX__");
248 if (avr_current_arch
->asm_only
)
249 cpp_define (pfile
, "__AVR_ASM_ONLY__");
250 if (avr_current_arch
->have_mul
)
252 cpp_define (pfile
, "__AVR_ENHANCED__");
253 cpp_define (pfile
, "__AVR_HAVE_MUL__");
255 if (avr_current_arch
->have_jmp_call
)
257 cpp_define (pfile
, "__AVR_MEGA__");
258 cpp_define (pfile
, "__AVR_HAVE_JMP_CALL__");
260 if (avr_current_arch
->have_eijmp_eicall
)
262 cpp_define (pfile
, "__AVR_HAVE_EIJMP_EICALL__");
263 cpp_define (pfile
, "__AVR_3_BYTE_PC__");
267 cpp_define (pfile
, "__AVR_2_BYTE_PC__");
270 if (avr_current_device
->short_sp
)
271 cpp_define (pfile
, "__AVR_HAVE_8BIT_SP__");
273 cpp_define (pfile
, "__AVR_HAVE_16BIT_SP__");
275 if (TARGET_NO_INTERRUPTS
)
276 cpp_define (pfile
, "__NO_INTERRUPTS__");
279 /* return register class from register number. */
281 static const enum reg_class reg_class_tab
[]={
282 GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,
283 GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,
284 GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,
285 GENERAL_REGS
, /* r0 - r15 */
286 LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,
287 LD_REGS
, /* r16 - 23 */
288 ADDW_REGS
,ADDW_REGS
, /* r24,r25 */
289 POINTER_X_REGS
,POINTER_X_REGS
, /* r26,27 */
290 POINTER_Y_REGS
,POINTER_Y_REGS
, /* r28,r29 */
291 POINTER_Z_REGS
,POINTER_Z_REGS
, /* r30,r31 */
292 STACK_REG
,STACK_REG
/* SPL,SPH */
295 /* Function to set up the backend function structure. */
297 static struct machine_function
*
298 avr_init_machine_status (void)
300 return ((struct machine_function
*)
301 ggc_alloc_cleared (sizeof (struct machine_function
)));
304 /* Return register class for register R. */
307 avr_regno_reg_class (int r
)
310 return reg_class_tab
[r
];
314 /* Return nonzero if FUNC is a naked function. */
317 avr_naked_function_p (tree func
)
321 gcc_assert (TREE_CODE (func
) == FUNCTION_DECL
);
323 a
= lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func
)));
324 return a
!= NULL_TREE
;
327 /* Return nonzero if FUNC is an interrupt function as specified
328 by the "interrupt" attribute. */
331 interrupt_function_p (tree func
)
335 if (TREE_CODE (func
) != FUNCTION_DECL
)
338 a
= lookup_attribute ("interrupt", DECL_ATTRIBUTES (func
));
339 return a
!= NULL_TREE
;
342 /* Return nonzero if FUNC is a signal function as specified
343 by the "signal" attribute. */
346 signal_function_p (tree func
)
350 if (TREE_CODE (func
) != FUNCTION_DECL
)
353 a
= lookup_attribute ("signal", DECL_ATTRIBUTES (func
));
354 return a
!= NULL_TREE
;
357 /* Return nonzero if FUNC is a OS_task function. */
360 avr_OS_task_function_p (tree func
)
364 gcc_assert (TREE_CODE (func
) == FUNCTION_DECL
);
366 a
= lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func
)));
367 return a
!= NULL_TREE
;
370 /* Return nonzero if FUNC is a OS_main function. */
373 avr_OS_main_function_p (tree func
)
377 gcc_assert (TREE_CODE (func
) == FUNCTION_DECL
);
379 a
= lookup_attribute ("OS_main", TYPE_ATTRIBUTES (TREE_TYPE (func
)));
380 return a
!= NULL_TREE
;
383 /* Return the number of hard registers to push/pop in the prologue/epilogue
384 of the current function, and optionally store these registers in SET. */
387 avr_regs_to_save (HARD_REG_SET
*set
)
390 int int_or_sig_p
= (interrupt_function_p (current_function_decl
)
391 || signal_function_p (current_function_decl
));
393 if (!reload_completed
)
394 cfun
->machine
->is_leaf
= leaf_function_p ();
397 CLEAR_HARD_REG_SET (*set
);
400 /* No need to save any registers if the function never returns or
401 is have "OS_task" or "OS_main" attribute. */
402 if (TREE_THIS_VOLATILE (current_function_decl
)
403 || cfun
->machine
->is_OS_task
404 || cfun
->machine
->is_OS_main
)
407 for (reg
= 0; reg
< 32; reg
++)
409 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
410 any global register variables. */
414 if ((int_or_sig_p
&& !cfun
->machine
->is_leaf
&& call_used_regs
[reg
])
415 || (df_regs_ever_live_p (reg
)
416 && (int_or_sig_p
|| !call_used_regs
[reg
])
417 && !(frame_pointer_needed
418 && (reg
== REG_Y
|| reg
== (REG_Y
+1)))))
421 SET_HARD_REG_BIT (*set
, reg
);
428 /* Return true if register FROM can be eliminated via register TO. */
431 avr_can_eliminate (int from
, int to
)
433 return ((from
== ARG_POINTER_REGNUM
&& to
== FRAME_POINTER_REGNUM
)
434 || ((from
== FRAME_POINTER_REGNUM
435 || from
== FRAME_POINTER_REGNUM
+ 1)
436 && !frame_pointer_needed
));
439 /* Compute offset between arg_pointer and frame_pointer. */
442 avr_initial_elimination_offset (int from
, int to
)
444 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
448 int offset
= frame_pointer_needed
? 2 : 0;
449 int avr_pc_size
= AVR_HAVE_EIJMP_EICALL
? 3 : 2;
451 offset
+= avr_regs_to_save (NULL
);
452 return get_frame_size () + (avr_pc_size
) + 1 + offset
;
456 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
457 frame pointer by +STARTING_FRAME_OFFSET.
458 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
459 avoids creating add/sub of offset in nonlocal goto and setjmp. */
461 rtx
avr_builtin_setjmp_frame_value (void)
463 return gen_rtx_MINUS (Pmode
, virtual_stack_vars_rtx
,
464 gen_int_mode (STARTING_FRAME_OFFSET
, Pmode
));
467 /* Return 1 if the function epilogue is just a single "ret". */
470 avr_simple_epilogue (void)
472 return (! frame_pointer_needed
473 && get_frame_size () == 0
474 && avr_regs_to_save (NULL
) == 0
475 && ! interrupt_function_p (current_function_decl
)
476 && ! signal_function_p (current_function_decl
)
477 && ! avr_naked_function_p (current_function_decl
)
478 && ! TREE_THIS_VOLATILE (current_function_decl
));
481 /* This function checks sequence of live registers. */
484 sequent_regs_live (void)
490 for (reg
= 0; reg
< 18; ++reg
)
492 if (!call_used_regs
[reg
])
494 if (df_regs_ever_live_p (reg
))
504 if (!frame_pointer_needed
)
506 if (df_regs_ever_live_p (REG_Y
))
514 if (df_regs_ever_live_p (REG_Y
+1))
527 return (cur_seq
== live_seq
) ? live_seq
: 0;
530 /* Obtain the length sequence of insns. */
533 get_sequence_length (rtx insns
)
538 for (insn
= insns
, length
= 0; insn
; insn
= NEXT_INSN (insn
))
539 length
+= get_attr_length (insn
);
544 /* Output function prologue. */
547 expand_prologue (void)
552 HOST_WIDE_INT size
= get_frame_size();
553 /* Define templates for push instructions. */
554 rtx pushbyte
= gen_rtx_MEM (QImode
,
555 gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
));
556 rtx pushword
= gen_rtx_MEM (HImode
,
557 gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
));
560 last_insn_address
= 0;
562 /* Init cfun->machine. */
563 cfun
->machine
->is_naked
= avr_naked_function_p (current_function_decl
);
564 cfun
->machine
->is_interrupt
= interrupt_function_p (current_function_decl
);
565 cfun
->machine
->is_signal
= signal_function_p (current_function_decl
);
566 cfun
->machine
->is_OS_task
= avr_OS_task_function_p (current_function_decl
);
567 cfun
->machine
->is_OS_main
= avr_OS_main_function_p (current_function_decl
);
569 /* Prologue: naked. */
570 if (cfun
->machine
->is_naked
)
575 avr_regs_to_save (&set
);
576 live_seq
= sequent_regs_live ();
577 minimize
= (TARGET_CALL_PROLOGUES
578 && !cfun
->machine
->is_interrupt
579 && !cfun
->machine
->is_signal
580 && !cfun
->machine
->is_OS_task
581 && !cfun
->machine
->is_OS_main
584 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
586 if (cfun
->machine
->is_interrupt
)
588 /* Enable interrupts. */
589 insn
= emit_insn (gen_enable_interrupt ());
590 RTX_FRAME_RELATED_P (insn
) = 1;
594 insn
= emit_move_insn (pushbyte
, zero_reg_rtx
);
595 RTX_FRAME_RELATED_P (insn
) = 1;
598 insn
= emit_move_insn (pushbyte
, tmp_reg_rtx
);
599 RTX_FRAME_RELATED_P (insn
) = 1;
602 insn
= emit_move_insn (tmp_reg_rtx
,
603 gen_rtx_MEM (QImode
, GEN_INT (SREG_ADDR
)));
604 RTX_FRAME_RELATED_P (insn
) = 1;
605 insn
= emit_move_insn (pushbyte
, tmp_reg_rtx
);
606 RTX_FRAME_RELATED_P (insn
) = 1;
610 && (TEST_HARD_REG_BIT (set
, REG_Z
) && TEST_HARD_REG_BIT (set
, REG_Z
+ 1)))
612 insn
= emit_move_insn (tmp_reg_rtx
,
613 gen_rtx_MEM (QImode
, GEN_INT (RAMPZ_ADDR
)));
614 RTX_FRAME_RELATED_P (insn
) = 1;
615 insn
= emit_move_insn (pushbyte
, tmp_reg_rtx
);
616 RTX_FRAME_RELATED_P (insn
) = 1;
619 /* Clear zero reg. */
620 insn
= emit_move_insn (zero_reg_rtx
, const0_rtx
);
621 RTX_FRAME_RELATED_P (insn
) = 1;
623 /* Prevent any attempt to delete the setting of ZERO_REG! */
624 emit_use (zero_reg_rtx
);
626 if (minimize
&& (frame_pointer_needed
627 || (AVR_2_BYTE_PC
&& live_seq
> 6)
630 insn
= emit_move_insn (gen_rtx_REG (HImode
, REG_X
),
631 gen_int_mode (size
, HImode
));
632 RTX_FRAME_RELATED_P (insn
) = 1;
635 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq
, HImode
),
636 gen_int_mode (size
+ live_seq
, HImode
)));
637 RTX_FRAME_RELATED_P (insn
) = 1;
642 for (reg
= 0; reg
< 32; ++reg
)
644 if (TEST_HARD_REG_BIT (set
, reg
))
646 /* Emit push of register to save. */
647 insn
=emit_move_insn (pushbyte
, gen_rtx_REG (QImode
, reg
));
648 RTX_FRAME_RELATED_P (insn
) = 1;
651 if (frame_pointer_needed
)
653 if (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
655 /* Push frame pointer. */
656 insn
= emit_move_insn (pushword
, frame_pointer_rtx
);
657 RTX_FRAME_RELATED_P (insn
) = 1;
662 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
663 RTX_FRAME_RELATED_P (insn
) = 1;
667 /* Creating a frame can be done by direct manipulation of the
668 stack or via the frame pointer. These two methods are:
675 the optimum method depends on function type, stack and frame size.
676 To avoid a complex logic, both methods are tested and shortest
680 rtx sp_plus_insns
= NULL_RTX
;
682 if (AVR_HAVE_8BIT_SP
)
684 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
685 over 'sbiw' (2 cycles, same size). */
686 myfp
= gen_rtx_REG (QImode
, REGNO (frame_pointer_rtx
));
690 /* Normal sized addition. */
691 myfp
= frame_pointer_rtx
;
694 /* Method 1-Adjust frame pointer. */
697 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
698 RTX_FRAME_RELATED_P (insn
) = 1;
701 emit_move_insn (myfp
,
702 gen_rtx_PLUS (GET_MODE(myfp
), myfp
,
705 RTX_FRAME_RELATED_P (insn
) = 1;
707 /* Copy to stack pointer. */
708 if (AVR_HAVE_8BIT_SP
)
710 insn
= emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
711 RTX_FRAME_RELATED_P (insn
) = 1;
713 else if (TARGET_NO_INTERRUPTS
714 || cfun
->machine
->is_signal
715 || cfun
->machine
->is_OS_main
)
718 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx
,
720 RTX_FRAME_RELATED_P (insn
) = 1;
722 else if (cfun
->machine
->is_interrupt
)
724 insn
= emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx
,
726 RTX_FRAME_RELATED_P (insn
) = 1;
730 insn
= emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
731 RTX_FRAME_RELATED_P (insn
) = 1;
734 fp_plus_insns
= get_insns ();
737 /* Method 2-Adjust Stack pointer. */
743 emit_move_insn (stack_pointer_rtx
,
744 gen_rtx_PLUS (HImode
,
748 RTX_FRAME_RELATED_P (insn
) = 1;
751 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
752 RTX_FRAME_RELATED_P (insn
) = 1;
754 sp_plus_insns
= get_insns ();
758 /* Use shortest method. */
759 if (size
<= 6 && (get_sequence_length (sp_plus_insns
)
760 < get_sequence_length (fp_plus_insns
)))
761 emit_insn (sp_plus_insns
);
763 emit_insn (fp_plus_insns
);
769 /* Output summary at end of function prologue. */
772 avr_asm_function_end_prologue (FILE *file
)
774 if (cfun
->machine
->is_naked
)
776 fputs ("/* prologue: naked */\n", file
);
780 if (cfun
->machine
->is_interrupt
)
782 fputs ("/* prologue: Interrupt */\n", file
);
784 else if (cfun
->machine
->is_signal
)
786 fputs ("/* prologue: Signal */\n", file
);
789 fputs ("/* prologue: function */\n", file
);
791 fprintf (file
, "/* frame size = " HOST_WIDE_INT_PRINT_DEC
" */\n",
796 /* Implement EPILOGUE_USES. */
799 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED
)
803 && (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
))
808 /* Output RTL epilogue. */
811 expand_epilogue (void)
817 HOST_WIDE_INT size
= get_frame_size();
819 /* epilogue: naked */
820 if (cfun
->machine
->is_naked
)
822 emit_jump_insn (gen_return ());
826 avr_regs_to_save (&set
);
827 live_seq
= sequent_regs_live ();
828 minimize
= (TARGET_CALL_PROLOGUES
829 && !cfun
->machine
->is_interrupt
830 && !cfun
->machine
->is_signal
831 && !cfun
->machine
->is_OS_task
832 && !cfun
->machine
->is_OS_main
835 if (minimize
&& (frame_pointer_needed
|| live_seq
> 4))
837 if (frame_pointer_needed
)
839 /* Get rid of frame. */
840 emit_move_insn(frame_pointer_rtx
,
841 gen_rtx_PLUS (HImode
, frame_pointer_rtx
,
842 gen_int_mode (size
, HImode
)));
846 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
849 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq
, HImode
)));
853 if (frame_pointer_needed
)
857 /* Try two methods to adjust stack and select shortest. */
860 rtx sp_plus_insns
= NULL_RTX
;
862 if (AVR_HAVE_8BIT_SP
)
864 /* The high byte (r29) doesn't change - prefer 'subi'
865 (1 cycle) over 'sbiw' (2 cycles, same size). */
866 myfp
= gen_rtx_REG (QImode
, REGNO (frame_pointer_rtx
));
870 /* Normal sized addition. */
871 myfp
= frame_pointer_rtx
;
874 /* Method 1-Adjust frame pointer. */
877 emit_move_insn (myfp
,
878 gen_rtx_PLUS (GET_MODE (myfp
), myfp
,
882 /* Copy to stack pointer. */
883 if (AVR_HAVE_8BIT_SP
)
885 emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
887 else if (TARGET_NO_INTERRUPTS
888 || cfun
->machine
->is_signal
)
890 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx
,
893 else if (cfun
->machine
->is_interrupt
)
895 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx
,
900 emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
903 fp_plus_insns
= get_insns ();
906 /* Method 2-Adjust Stack pointer. */
911 emit_move_insn (stack_pointer_rtx
,
912 gen_rtx_PLUS (HImode
, stack_pointer_rtx
,
916 sp_plus_insns
= get_insns ();
920 /* Use shortest method. */
921 if (size
<= 5 && (get_sequence_length (sp_plus_insns
)
922 < get_sequence_length (fp_plus_insns
)))
923 emit_insn (sp_plus_insns
);
925 emit_insn (fp_plus_insns
);
927 if (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
929 /* Restore previous frame_pointer. */
930 emit_insn (gen_pophi (frame_pointer_rtx
));
933 /* Restore used registers. */
934 for (reg
= 31; reg
>= 0; --reg
)
936 if (TEST_HARD_REG_BIT (set
, reg
))
937 emit_insn (gen_popqi (gen_rtx_REG (QImode
, reg
)));
939 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
941 /* Restore RAMPZ using tmp reg as scratch. */
943 && (TEST_HARD_REG_BIT (set
, REG_Z
) && TEST_HARD_REG_BIT (set
, REG_Z
+ 1)))
945 emit_insn (gen_popqi (tmp_reg_rtx
));
946 emit_move_insn (gen_rtx_MEM(QImode
, GEN_INT(RAMPZ_ADDR
)),
950 /* Restore SREG using tmp reg as scratch. */
951 emit_insn (gen_popqi (tmp_reg_rtx
));
953 emit_move_insn (gen_rtx_MEM(QImode
, GEN_INT(SREG_ADDR
)),
956 /* Restore tmp REG. */
957 emit_insn (gen_popqi (tmp_reg_rtx
));
959 /* Restore zero REG. */
960 emit_insn (gen_popqi (zero_reg_rtx
));
963 emit_jump_insn (gen_return ());
967 /* Output summary messages at beginning of function epilogue. */
970 avr_asm_function_begin_epilogue (FILE *file
)
972 fprintf (file
, "/* epilogue start */\n");
975 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
976 machine for a memory operand of mode MODE. */
979 avr_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
981 enum reg_class r
= NO_REGS
;
983 if (TARGET_ALL_DEBUG
)
985 fprintf (stderr
, "mode: (%s) %s %s %s %s:",
987 strict
? "(strict)": "",
988 reload_completed
? "(reload_completed)": "",
989 reload_in_progress
? "(reload_in_progress)": "",
990 reg_renumber
? "(reg_renumber)" : "");
991 if (GET_CODE (x
) == PLUS
992 && REG_P (XEXP (x
, 0))
993 && GET_CODE (XEXP (x
, 1)) == CONST_INT
994 && INTVAL (XEXP (x
, 1)) >= 0
995 && INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
)
998 fprintf (stderr
, "(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
999 true_regnum (XEXP (x
, 0)));
1002 if (!strict
&& GET_CODE (x
) == SUBREG
)
1004 if (REG_P (x
) && (strict
? REG_OK_FOR_BASE_STRICT_P (x
)
1005 : REG_OK_FOR_BASE_NOSTRICT_P (x
)))
1007 else if (CONSTANT_ADDRESS_P (x
))
1009 else if (GET_CODE (x
) == PLUS
1010 && REG_P (XEXP (x
, 0))
1011 && GET_CODE (XEXP (x
, 1)) == CONST_INT
1012 && INTVAL (XEXP (x
, 1)) >= 0)
1014 int fit
= INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
);
1018 || REGNO (XEXP (x
,0)) == REG_X
1019 || REGNO (XEXP (x
,0)) == REG_Y
1020 || REGNO (XEXP (x
,0)) == REG_Z
)
1021 r
= BASE_POINTER_REGS
;
1022 if (XEXP (x
,0) == frame_pointer_rtx
1023 || XEXP (x
,0) == arg_pointer_rtx
)
1024 r
= BASE_POINTER_REGS
;
1026 else if (frame_pointer_needed
&& XEXP (x
,0) == frame_pointer_rtx
)
1029 else if ((GET_CODE (x
) == PRE_DEC
|| GET_CODE (x
) == POST_INC
)
1030 && REG_P (XEXP (x
, 0))
1031 && (strict
? REG_OK_FOR_BASE_STRICT_P (XEXP (x
, 0))
1032 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x
, 0))))
1036 if (TARGET_ALL_DEBUG
)
1038 fprintf (stderr
, " ret = %c\n", r
+ '0');
1040 return r
== NO_REGS
? 0 : (int)r
;
1043 /* Attempts to replace X with a valid
1044 memory address for an operand of mode MODE */
1047 avr_legitimize_address (rtx x
, rtx oldx
, enum machine_mode mode
)
1050 if (TARGET_ALL_DEBUG
)
1052 fprintf (stderr
, "legitimize_address mode: %s", GET_MODE_NAME(mode
));
1056 if (GET_CODE (oldx
) == PLUS
1057 && REG_P (XEXP (oldx
,0)))
1059 if (REG_P (XEXP (oldx
,1)))
1060 x
= force_reg (GET_MODE (oldx
), oldx
);
1061 else if (GET_CODE (XEXP (oldx
, 1)) == CONST_INT
)
1063 int offs
= INTVAL (XEXP (oldx
,1));
1064 if (frame_pointer_rtx
!= XEXP (oldx
,0))
1065 if (offs
> MAX_LD_OFFSET (mode
))
1067 if (TARGET_ALL_DEBUG
)
1068 fprintf (stderr
, "force_reg (big offset)\n");
1069 x
= force_reg (GET_MODE (oldx
), oldx
);
1077 /* Return a pointer register name as a string. */
1080 ptrreg_to_str (int regno
)
1084 case REG_X
: return "X";
1085 case REG_Y
: return "Y";
1086 case REG_Z
: return "Z";
1088 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1093 /* Return the condition name as a string.
1094 Used in conditional jump constructing */
1097 cond_string (enum rtx_code code
)
1106 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1111 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1124 /* Output ADDR to FILE as address. */
1127 print_operand_address (FILE *file
, rtx addr
)
1129 switch (GET_CODE (addr
))
1132 fprintf (file
, ptrreg_to_str (REGNO (addr
)));
1136 fprintf (file
, "-%s", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1140 fprintf (file
, "%s+", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1144 if (CONSTANT_ADDRESS_P (addr
)
1145 && ((GET_CODE (addr
) == SYMBOL_REF
&& SYMBOL_REF_FUNCTION_P (addr
))
1146 || GET_CODE (addr
) == LABEL_REF
))
1148 fprintf (file
, "gs(");
1149 output_addr_const (file
,addr
);
1150 fprintf (file
,")");
1153 output_addr_const (file
, addr
);
1158 /* Output X as assembler operand to file FILE. */
1161 print_operand (FILE *file
, rtx x
, int code
)
1165 if (code
>= 'A' && code
<= 'D')
1170 if (!AVR_HAVE_JMP_CALL
)
1173 else if (code
== '!')
1175 if (AVR_HAVE_EIJMP_EICALL
)
1180 if (x
== zero_reg_rtx
)
1181 fprintf (file
, "__zero_reg__");
1183 fprintf (file
, reg_names
[true_regnum (x
) + abcd
]);
1185 else if (GET_CODE (x
) == CONST_INT
)
1186 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) + abcd
);
1187 else if (GET_CODE (x
) == MEM
)
1189 rtx addr
= XEXP (x
,0);
1191 if (CONSTANT_P (addr
) && abcd
)
1194 output_address (addr
);
1195 fprintf (file
, ")+%d", abcd
);
1197 else if (code
== 'o')
1199 if (GET_CODE (addr
) != PLUS
)
1200 fatal_insn ("bad address, not (reg+disp):", addr
);
1202 print_operand (file
, XEXP (addr
, 1), 0);
1204 else if (code
== 'p' || code
== 'r')
1206 if (GET_CODE (addr
) != POST_INC
&& GET_CODE (addr
) != PRE_DEC
)
1207 fatal_insn ("bad address, not post_inc or pre_dec:", addr
);
1210 print_operand_address (file
, XEXP (addr
, 0)); /* X, Y, Z */
1212 print_operand (file
, XEXP (addr
, 0), 0); /* r26, r28, r30 */
1214 else if (GET_CODE (addr
) == PLUS
)
1216 print_operand_address (file
, XEXP (addr
,0));
1217 if (REGNO (XEXP (addr
, 0)) == REG_X
)
1218 fatal_insn ("internal compiler error. Bad address:"
1221 print_operand (file
, XEXP (addr
,1), code
);
1224 print_operand_address (file
, addr
);
1226 else if (GET_CODE (x
) == CONST_DOUBLE
)
1230 if (GET_MODE (x
) != SFmode
)
1231 fatal_insn ("internal compiler error. Unknown mode:", x
);
1232 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
1233 REAL_VALUE_TO_TARGET_SINGLE (rv
, val
);
1234 fprintf (file
, "0x%lx", val
);
1236 else if (code
== 'j')
1237 fputs (cond_string (GET_CODE (x
)), file
);
1238 else if (code
== 'k')
1239 fputs (cond_string (reverse_condition (GET_CODE (x
))), file
);
1241 print_operand_address (file
, x
);
1244 /* Update the condition code in the INSN. */
1247 notice_update_cc (rtx body ATTRIBUTE_UNUSED
, rtx insn
)
1251 switch (get_attr_cc (insn
))
1254 /* Insn does not affect CC at all. */
1262 set
= single_set (insn
);
1266 cc_status
.flags
|= CC_NO_OVERFLOW
;
1267 cc_status
.value1
= SET_DEST (set
);
1272 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1273 The V flag may or may not be known but that's ok because
1274 alter_cond will change tests to use EQ/NE. */
1275 set
= single_set (insn
);
1279 cc_status
.value1
= SET_DEST (set
);
1280 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
1285 set
= single_set (insn
);
1288 cc_status
.value1
= SET_SRC (set
);
1292 /* Insn doesn't leave CC in a usable state. */
1295 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1296 set
= single_set (insn
);
1299 rtx src
= SET_SRC (set
);
1301 if (GET_CODE (src
) == ASHIFTRT
1302 && GET_MODE (src
) == QImode
)
1304 rtx x
= XEXP (src
, 1);
1306 if (GET_CODE (x
) == CONST_INT
1310 cc_status
.value1
= SET_DEST (set
);
1311 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
1319 /* Return maximum number of consecutive registers of
1320 class CLASS needed to hold a value of mode MODE. */
1323 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED
,enum machine_mode mode
)
1325 return ((GET_MODE_SIZE (mode
) + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
);
1328 /* Choose mode for jump insn:
1329 1 - relative jump in range -63 <= x <= 62 ;
1330 2 - relative jump in range -2046 <= x <= 2045 ;
1331 3 - absolute jump (only for ATmega[16]03). */
1334 avr_jump_mode (rtx x
, rtx insn
)
1336 int dest_addr
= INSN_ADDRESSES (INSN_UID (GET_CODE (x
) == LABEL_REF
1337 ? XEXP (x
, 0) : x
));
1338 int cur_addr
= INSN_ADDRESSES (INSN_UID (insn
));
1339 int jump_distance
= cur_addr
- dest_addr
;
1341 if (-63 <= jump_distance
&& jump_distance
<= 62)
1343 else if (-2046 <= jump_distance
&& jump_distance
<= 2045)
1345 else if (AVR_HAVE_JMP_CALL
)
1351 /* return an AVR condition jump commands.
1352 X is a comparison RTX.
1353 LEN is a number returned by avr_jump_mode function.
1354 if REVERSE nonzero then condition code in X must be reversed. */
1357 ret_cond_branch (rtx x
, int len
, int reverse
)
1359 RTX_CODE cond
= reverse
? reverse_condition (GET_CODE (x
)) : GET_CODE (x
);
1364 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1365 return (len
== 1 ? (AS1 (breq
,.+2) CR_TAB
1367 len
== 2 ? (AS1 (breq
,.+4) CR_TAB
1368 AS1 (brmi
,.+2) CR_TAB
1370 (AS1 (breq
,.+6) CR_TAB
1371 AS1 (brmi
,.+4) CR_TAB
1375 return (len
== 1 ? (AS1 (breq
,.+2) CR_TAB
1377 len
== 2 ? (AS1 (breq
,.+4) CR_TAB
1378 AS1 (brlt
,.+2) CR_TAB
1380 (AS1 (breq
,.+6) CR_TAB
1381 AS1 (brlt
,.+4) CR_TAB
1384 return (len
== 1 ? (AS1 (breq
,.+2) CR_TAB
1386 len
== 2 ? (AS1 (breq
,.+4) CR_TAB
1387 AS1 (brlo
,.+2) CR_TAB
1389 (AS1 (breq
,.+6) CR_TAB
1390 AS1 (brlo
,.+4) CR_TAB
1393 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1394 return (len
== 1 ? (AS1 (breq
,%0) CR_TAB
1396 len
== 2 ? (AS1 (breq
,.+2) CR_TAB
1397 AS1 (brpl
,.+2) CR_TAB
1399 (AS1 (breq
,.+2) CR_TAB
1400 AS1 (brpl
,.+4) CR_TAB
1403 return (len
== 1 ? (AS1 (breq
,%0) CR_TAB
1405 len
== 2 ? (AS1 (breq
,.+2) CR_TAB
1406 AS1 (brge
,.+2) CR_TAB
1408 (AS1 (breq
,.+2) CR_TAB
1409 AS1 (brge
,.+4) CR_TAB
1412 return (len
== 1 ? (AS1 (breq
,%0) CR_TAB
1414 len
== 2 ? (AS1 (breq
,.+2) CR_TAB
1415 AS1 (brsh
,.+2) CR_TAB
1417 (AS1 (breq
,.+2) CR_TAB
1418 AS1 (brsh
,.+4) CR_TAB
1426 return AS1 (br
%k1
,%0);
1428 return (AS1 (br
%j1
,.+2) CR_TAB
1431 return (AS1 (br
%j1
,.+4) CR_TAB
1440 return AS1 (br
%j1
,%0);
1442 return (AS1 (br
%k1
,.+2) CR_TAB
1445 return (AS1 (br
%k1
,.+4) CR_TAB
1453 /* Predicate function for immediate operand which fits to byte (8bit) */
1456 byte_immediate_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1458 return (GET_CODE (op
) == CONST_INT
1459 && INTVAL (op
) <= 0xff && INTVAL (op
) >= 0);
1462 /* Output all insn addresses and their sizes into the assembly language
1463 output file. This is helpful for debugging whether the length attributes
1464 in the md file are correct.
1465 Output insn cost for next insn. */
1468 final_prescan_insn (rtx insn
, rtx
*operand ATTRIBUTE_UNUSED
,
1469 int num_operands ATTRIBUTE_UNUSED
)
1471 int uid
= INSN_UID (insn
);
1473 if (TARGET_INSN_SIZE_DUMP
|| TARGET_ALL_DEBUG
)
1475 fprintf (asm_out_file
, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
1476 INSN_ADDRESSES (uid
),
1477 INSN_ADDRESSES (uid
) - last_insn_address
,
1478 rtx_cost (PATTERN (insn
), INSN
, !optimize_size
));
1480 last_insn_address
= INSN_ADDRESSES (uid
);
1483 /* Return 0 if undefined, 1 if always true or always false. */
1486 avr_simplify_comparison_p (enum machine_mode mode
, RTX_CODE op
, rtx x
)
1488 unsigned int max
= (mode
== QImode
? 0xff :
1489 mode
== HImode
? 0xffff :
1490 mode
== SImode
? 0xffffffff : 0);
1491 if (max
&& op
&& GET_CODE (x
) == CONST_INT
)
1493 if (unsigned_condition (op
) != op
)
1496 if (max
!= (INTVAL (x
) & max
)
1497 && INTVAL (x
) != 0xff)
1504 /* Returns nonzero if REGNO is the number of a hard
1505 register in which function arguments are sometimes passed. */
1508 function_arg_regno_p(int r
)
1510 return (r
>= 8 && r
<= 25);
1513 /* Initializing the variable cum for the state at the beginning
1514 of the argument list. */
1517 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
, rtx libname
,
1518 tree fndecl ATTRIBUTE_UNUSED
)
1521 cum
->regno
= FIRST_CUM_REG
;
1522 if (!libname
&& fntype
)
1524 int stdarg
= (TYPE_ARG_TYPES (fntype
) != 0
1525 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
1526 != void_type_node
));
1532 /* Returns the number of registers to allocate for a function argument. */
1535 avr_num_arg_regs (enum machine_mode mode
, tree type
)
1539 if (mode
== BLKmode
)
1540 size
= int_size_in_bytes (type
);
1542 size
= GET_MODE_SIZE (mode
);
1544 /* Align all function arguments to start in even-numbered registers.
1545 Odd-sized arguments leave holes above them. */
1547 return (size
+ 1) & ~1;
1550 /* Controls whether a function argument is passed
1551 in a register, and which register. */
1554 function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
, tree type
,
1555 int named ATTRIBUTE_UNUSED
)
1557 int bytes
= avr_num_arg_regs (mode
, type
);
1559 if (cum
->nregs
&& bytes
<= cum
->nregs
)
1560 return gen_rtx_REG (mode
, cum
->regno
- bytes
);
1565 /* Update the summarizer variable CUM to advance past an argument
1566 in the argument list. */
1569 function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
, tree type
,
1570 int named ATTRIBUTE_UNUSED
)
1572 int bytes
= avr_num_arg_regs (mode
, type
);
1574 cum
->nregs
-= bytes
;
1575 cum
->regno
-= bytes
;
1577 if (cum
->nregs
<= 0)
1580 cum
->regno
= FIRST_CUM_REG
;
1584 /***********************************************************************
1585 Functions for outputting various mov's for a various modes
1586 ************************************************************************/
1588 output_movqi (rtx insn
, rtx operands
[], int *l
)
1591 rtx dest
= operands
[0];
1592 rtx src
= operands
[1];
1600 if (register_operand (dest
, QImode
))
1602 if (register_operand (src
, QImode
)) /* mov r,r */
1604 if (test_hard_reg_class (STACK_REG
, dest
))
1605 return AS2 (out
,%0,%1);
1606 else if (test_hard_reg_class (STACK_REG
, src
))
1607 return AS2 (in
,%0,%1);
1609 return AS2 (mov
,%0,%1);
1611 else if (CONSTANT_P (src
))
1613 if (test_hard_reg_class (LD_REGS
, dest
)) /* ldi d,i */
1614 return AS2 (ldi
,%0,lo8(%1));
1616 if (GET_CODE (src
) == CONST_INT
)
1618 if (src
== const0_rtx
) /* mov r,L */
1619 return AS1 (clr
,%0);
1620 else if (src
== const1_rtx
)
1623 return (AS1 (clr
,%0) CR_TAB
1626 else if (src
== constm1_rtx
)
1628 /* Immediate constants -1 to any register */
1630 return (AS1 (clr
,%0) CR_TAB
1635 int bit_nr
= exact_log2 (INTVAL (src
));
1641 output_asm_insn ((AS1 (clr
,%0) CR_TAB
1644 avr_output_bld (operands
, bit_nr
);
1651 /* Last resort, larger than loading from memory. */
1653 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
1654 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
1655 AS2 (mov
,%0,r31
) CR_TAB
1656 AS2 (mov
,r31
,__tmp_reg__
));
1658 else if (GET_CODE (src
) == MEM
)
1659 return out_movqi_r_mr (insn
, operands
, real_l
); /* mov r,m */
1661 else if (GET_CODE (dest
) == MEM
)
1665 if (src
== const0_rtx
)
1666 operands
[1] = zero_reg_rtx
;
1668 templ
= out_movqi_mr_r (insn
, operands
, real_l
);
1671 output_asm_insn (templ
, operands
);
1680 output_movhi (rtx insn
, rtx operands
[], int *l
)
1683 rtx dest
= operands
[0];
1684 rtx src
= operands
[1];
1690 if (register_operand (dest
, HImode
))
1692 if (register_operand (src
, HImode
)) /* mov r,r */
1694 if (test_hard_reg_class (STACK_REG
, dest
))
1696 if (AVR_HAVE_8BIT_SP
)
1697 return *l
= 1, AS2 (out
,__SP_L__
,%A1
);
1698 /* Use simple load of stack pointer if no interrupts are
1700 else if (TARGET_NO_INTERRUPTS
)
1701 return *l
= 2, (AS2 (out
,__SP_H__
,%B1
) CR_TAB
1702 AS2 (out
,__SP_L__
,%A1
));
1704 return (AS2 (in
,__tmp_reg__
,__SREG__
) CR_TAB
1706 AS2 (out
,__SP_H__
,%B1
) CR_TAB
1707 AS2 (out
,__SREG__
,__tmp_reg__
) CR_TAB
1708 AS2 (out
,__SP_L__
,%A1
));
1710 else if (test_hard_reg_class (STACK_REG
, src
))
1713 return (AS2 (in
,%A0
,__SP_L__
) CR_TAB
1714 AS2 (in
,%B0
,__SP_H__
));
1720 return (AS2 (movw
,%0,%1));
1725 return (AS2 (mov
,%A0
,%A1
) CR_TAB
1729 else if (CONSTANT_P (src
))
1731 if (test_hard_reg_class (LD_REGS
, dest
)) /* ldi d,i */
1734 return (AS2 (ldi
,%A0
,lo8(%1)) CR_TAB
1735 AS2 (ldi
,%B0
,hi8(%1)));
1738 if (GET_CODE (src
) == CONST_INT
)
1740 if (src
== const0_rtx
) /* mov r,L */
1743 return (AS1 (clr
,%A0
) CR_TAB
1746 else if (src
== const1_rtx
)
1749 return (AS1 (clr
,%A0
) CR_TAB
1750 AS1 (clr
,%B0
) CR_TAB
1753 else if (src
== constm1_rtx
)
1755 /* Immediate constants -1 to any register */
1757 return (AS1 (clr
,%0) CR_TAB
1758 AS1 (dec
,%A0
) CR_TAB
1763 int bit_nr
= exact_log2 (INTVAL (src
));
1769 output_asm_insn ((AS1 (clr
,%A0
) CR_TAB
1770 AS1 (clr
,%B0
) CR_TAB
1773 avr_output_bld (operands
, bit_nr
);
1779 if ((INTVAL (src
) & 0xff) == 0)
1782 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
1783 AS1 (clr
,%A0
) CR_TAB
1784 AS2 (ldi
,r31
,hi8(%1)) CR_TAB
1785 AS2 (mov
,%B0
,r31
) CR_TAB
1786 AS2 (mov
,r31
,__tmp_reg__
));
1788 else if ((INTVAL (src
) & 0xff00) == 0)
1791 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
1792 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
1793 AS2 (mov
,%A0
,r31
) CR_TAB
1794 AS1 (clr
,%B0
) CR_TAB
1795 AS2 (mov
,r31
,__tmp_reg__
));
1799 /* Last resort, equal to loading from memory. */
1801 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
1802 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
1803 AS2 (mov
,%A0
,r31
) CR_TAB
1804 AS2 (ldi
,r31
,hi8(%1)) CR_TAB
1805 AS2 (mov
,%B0
,r31
) CR_TAB
1806 AS2 (mov
,r31
,__tmp_reg__
));
1808 else if (GET_CODE (src
) == MEM
)
1809 return out_movhi_r_mr (insn
, operands
, real_l
); /* mov r,m */
1811 else if (GET_CODE (dest
) == MEM
)
1815 if (src
== const0_rtx
)
1816 operands
[1] = zero_reg_rtx
;
1818 templ
= out_movhi_mr_r (insn
, operands
, real_l
);
1821 output_asm_insn (templ
, operands
);
1826 fatal_insn ("invalid insn:", insn
);
1831 out_movqi_r_mr (rtx insn
, rtx op
[], int *l
)
1835 rtx x
= XEXP (src
, 0);
1841 if (CONSTANT_ADDRESS_P (x
))
1843 if (CONST_INT_P (x
) && INTVAL (x
) == SREG_ADDR
)
1846 return AS2 (in
,%0,__SREG__
);
1848 if (optimize
> 0 && io_address_operand (x
, QImode
))
1851 return AS2 (in
,%0,%1-0x20);
1854 return AS2 (lds
,%0,%1);
1856 /* memory access by reg+disp */
1857 else if (GET_CODE (x
) == PLUS
1858 && REG_P (XEXP (x
,0))
1859 && GET_CODE (XEXP (x
,1)) == CONST_INT
)
1861 if ((INTVAL (XEXP (x
,1)) - GET_MODE_SIZE (GET_MODE (src
))) >= 63)
1863 int disp
= INTVAL (XEXP (x
,1));
1864 if (REGNO (XEXP (x
,0)) != REG_Y
)
1865 fatal_insn ("incorrect insn:",insn
);
1867 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
1868 return *l
= 3, (AS2 (adiw
,r28
,%o1
-63) CR_TAB
1869 AS2 (ldd
,%0,Y
+63) CR_TAB
1870 AS2 (sbiw
,r28
,%o1
-63));
1872 return *l
= 5, (AS2 (subi
,r28
,lo8(-%o1
)) CR_TAB
1873 AS2 (sbci
,r29
,hi8(-%o1
)) CR_TAB
1874 AS2 (ld
,%0,Y
) CR_TAB
1875 AS2 (subi
,r28
,lo8(%o1
)) CR_TAB
1876 AS2 (sbci
,r29
,hi8(%o1
)));
1878 else if (REGNO (XEXP (x
,0)) == REG_X
)
1880 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1881 it but I have this situation with extremal optimizing options. */
1882 if (reg_overlap_mentioned_p (dest
, XEXP (x
,0))
1883 || reg_unused_after (insn
, XEXP (x
,0)))
1884 return *l
= 2, (AS2 (adiw
,r26
,%o1
) CR_TAB
1887 return *l
= 3, (AS2 (adiw
,r26
,%o1
) CR_TAB
1888 AS2 (ld
,%0,X
) CR_TAB
1889 AS2 (sbiw
,r26
,%o1
));
1892 return AS2 (ldd
,%0,%1);
1895 return AS2 (ld
,%0,%1);
1899 out_movhi_r_mr (rtx insn
, rtx op
[], int *l
)
1903 rtx base
= XEXP (src
, 0);
1904 int reg_dest
= true_regnum (dest
);
1905 int reg_base
= true_regnum (base
);
1906 /* "volatile" forces reading low byte first, even if less efficient,
1907 for correct operation with 16-bit I/O registers. */
1908 int mem_volatile_p
= MEM_VOLATILE_P (src
);
1916 if (reg_dest
== reg_base
) /* R = (R) */
1919 return (AS2 (ld
,__tmp_reg__
,%1+) CR_TAB
1920 AS2 (ld
,%B0
,%1) CR_TAB
1921 AS2 (mov
,%A0
,__tmp_reg__
));
1923 else if (reg_base
== REG_X
) /* (R26) */
1925 if (reg_unused_after (insn
, base
))
1928 return (AS2 (ld
,%A0
,X
+) CR_TAB
1932 return (AS2 (ld
,%A0
,X
+) CR_TAB
1933 AS2 (ld
,%B0
,X
) CR_TAB
1939 return (AS2 (ld
,%A0
,%1) CR_TAB
1940 AS2 (ldd
,%B0
,%1+1));
1943 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
1945 int disp
= INTVAL (XEXP (base
, 1));
1946 int reg_base
= true_regnum (XEXP (base
, 0));
1948 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
1950 if (REGNO (XEXP (base
, 0)) != REG_Y
)
1951 fatal_insn ("incorrect insn:",insn
);
1953 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
1954 return *l
= 4, (AS2 (adiw
,r28
,%o1
-62) CR_TAB
1955 AS2 (ldd
,%A0
,Y
+62) CR_TAB
1956 AS2 (ldd
,%B0
,Y
+63) CR_TAB
1957 AS2 (sbiw
,r28
,%o1
-62));
1959 return *l
= 6, (AS2 (subi
,r28
,lo8(-%o1
)) CR_TAB
1960 AS2 (sbci
,r29
,hi8(-%o1
)) CR_TAB
1961 AS2 (ld
,%A0
,Y
) CR_TAB
1962 AS2 (ldd
,%B0
,Y
+1) CR_TAB
1963 AS2 (subi
,r28
,lo8(%o1
)) CR_TAB
1964 AS2 (sbci
,r29
,hi8(%o1
)));
1966 if (reg_base
== REG_X
)
1968 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
1969 it but I have this situation with extremal
1970 optimization options. */
1973 if (reg_base
== reg_dest
)
1974 return (AS2 (adiw
,r26
,%o1
) CR_TAB
1975 AS2 (ld
,__tmp_reg__
,X
+) CR_TAB
1976 AS2 (ld
,%B0
,X
) CR_TAB
1977 AS2 (mov
,%A0
,__tmp_reg__
));
1979 return (AS2 (adiw
,r26
,%o1
) CR_TAB
1980 AS2 (ld
,%A0
,X
+) CR_TAB
1981 AS2 (ld
,%B0
,X
) CR_TAB
1982 AS2 (sbiw
,r26
,%o1
+1));
1985 if (reg_base
== reg_dest
)
1988 return (AS2 (ldd
,__tmp_reg__
,%A1
) CR_TAB
1989 AS2 (ldd
,%B0
,%B1
) CR_TAB
1990 AS2 (mov
,%A0
,__tmp_reg__
));
1994 return (AS2 (ldd
,%A0
,%A1
) CR_TAB
1997 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
1999 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
2000 fatal_insn ("incorrect insn:", insn
);
2004 if (REGNO (XEXP (base
, 0)) == REG_X
)
2007 return (AS2 (sbiw
,r26
,2) CR_TAB
2008 AS2 (ld
,%A0
,X
+) CR_TAB
2009 AS2 (ld
,%B0
,X
) CR_TAB
2015 return (AS2 (sbiw
,%r1
,2) CR_TAB
2016 AS2 (ld
,%A0
,%p1
) CR_TAB
2017 AS2 (ldd
,%B0
,%p1
+1));
2022 return (AS2 (ld
,%B0
,%1) CR_TAB
2025 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2027 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
2028 fatal_insn ("incorrect insn:", insn
);
2031 return (AS2 (ld
,%A0
,%1) CR_TAB
2034 else if (CONSTANT_ADDRESS_P (base
))
2036 if (optimize
> 0 && io_address_operand (base
, HImode
))
2039 return (AS2 (in
,%A0
,%A1
-0x20) CR_TAB
2040 AS2 (in
,%B0
,%B1
-0x20));
2043 return (AS2 (lds
,%A0
,%A1
) CR_TAB
2047 fatal_insn ("unknown move insn:",insn
);
2052 out_movsi_r_mr (rtx insn
, rtx op
[], int *l
)
2056 rtx base
= XEXP (src
, 0);
2057 int reg_dest
= true_regnum (dest
);
2058 int reg_base
= true_regnum (base
);
2066 if (reg_base
== REG_X
) /* (R26) */
2068 if (reg_dest
== REG_X
)
2069 /* "ld r26,-X" is undefined */
2070 return *l
=7, (AS2 (adiw
,r26
,3) CR_TAB
2071 AS2 (ld
,r29
,X
) CR_TAB
2072 AS2 (ld
,r28
,-X
) CR_TAB
2073 AS2 (ld
,__tmp_reg__
,-X
) CR_TAB
2074 AS2 (sbiw
,r26
,1) CR_TAB
2075 AS2 (ld
,r26
,X
) CR_TAB
2076 AS2 (mov
,r27
,__tmp_reg__
));
2077 else if (reg_dest
== REG_X
- 2)
2078 return *l
=5, (AS2 (ld
,%A0
,X
+) CR_TAB
2079 AS2 (ld
,%B0
,X
+) CR_TAB
2080 AS2 (ld
,__tmp_reg__
,X
+) CR_TAB
2081 AS2 (ld
,%D0
,X
) CR_TAB
2082 AS2 (mov
,%C0
,__tmp_reg__
));
2083 else if (reg_unused_after (insn
, base
))
2084 return *l
=4, (AS2 (ld
,%A0
,X
+) CR_TAB
2085 AS2 (ld
,%B0
,X
+) CR_TAB
2086 AS2 (ld
,%C0
,X
+) CR_TAB
2089 return *l
=5, (AS2 (ld
,%A0
,X
+) CR_TAB
2090 AS2 (ld
,%B0
,X
+) CR_TAB
2091 AS2 (ld
,%C0
,X
+) CR_TAB
2092 AS2 (ld
,%D0
,X
) CR_TAB
2097 if (reg_dest
== reg_base
)
2098 return *l
=5, (AS2 (ldd
,%D0
,%1+3) CR_TAB
2099 AS2 (ldd
,%C0
,%1+2) CR_TAB
2100 AS2 (ldd
,__tmp_reg__
,%1+1) CR_TAB
2101 AS2 (ld
,%A0
,%1) CR_TAB
2102 AS2 (mov
,%B0
,__tmp_reg__
));
2103 else if (reg_base
== reg_dest
+ 2)
2104 return *l
=5, (AS2 (ld
,%A0
,%1) CR_TAB
2105 AS2 (ldd
,%B0
,%1+1) CR_TAB
2106 AS2 (ldd
,__tmp_reg__
,%1+2) CR_TAB
2107 AS2 (ldd
,%D0
,%1+3) CR_TAB
2108 AS2 (mov
,%C0
,__tmp_reg__
));
2110 return *l
=4, (AS2 (ld
,%A0
,%1) CR_TAB
2111 AS2 (ldd
,%B0
,%1+1) CR_TAB
2112 AS2 (ldd
,%C0
,%1+2) CR_TAB
2113 AS2 (ldd
,%D0
,%1+3));
2116 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
2118 int disp
= INTVAL (XEXP (base
, 1));
2120 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
2122 if (REGNO (XEXP (base
, 0)) != REG_Y
)
2123 fatal_insn ("incorrect insn:",insn
);
2125 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
2126 return *l
= 6, (AS2 (adiw
,r28
,%o1
-60) CR_TAB
2127 AS2 (ldd
,%A0
,Y
+60) CR_TAB
2128 AS2 (ldd
,%B0
,Y
+61) CR_TAB
2129 AS2 (ldd
,%C0
,Y
+62) CR_TAB
2130 AS2 (ldd
,%D0
,Y
+63) CR_TAB
2131 AS2 (sbiw
,r28
,%o1
-60));
2133 return *l
= 8, (AS2 (subi
,r28
,lo8(-%o1
)) CR_TAB
2134 AS2 (sbci
,r29
,hi8(-%o1
)) CR_TAB
2135 AS2 (ld
,%A0
,Y
) CR_TAB
2136 AS2 (ldd
,%B0
,Y
+1) CR_TAB
2137 AS2 (ldd
,%C0
,Y
+2) CR_TAB
2138 AS2 (ldd
,%D0
,Y
+3) CR_TAB
2139 AS2 (subi
,r28
,lo8(%o1
)) CR_TAB
2140 AS2 (sbci
,r29
,hi8(%o1
)));
2143 reg_base
= true_regnum (XEXP (base
, 0));
2144 if (reg_base
== REG_X
)
2147 if (reg_dest
== REG_X
)
2150 /* "ld r26,-X" is undefined */
2151 return (AS2 (adiw
,r26
,%o1
+3) CR_TAB
2152 AS2 (ld
,r29
,X
) CR_TAB
2153 AS2 (ld
,r28
,-X
) CR_TAB
2154 AS2 (ld
,__tmp_reg__
,-X
) CR_TAB
2155 AS2 (sbiw
,r26
,1) CR_TAB
2156 AS2 (ld
,r26
,X
) CR_TAB
2157 AS2 (mov
,r27
,__tmp_reg__
));
2160 if (reg_dest
== REG_X
- 2)
2161 return (AS2 (adiw
,r26
,%o1
) CR_TAB
2162 AS2 (ld
,r24
,X
+) CR_TAB
2163 AS2 (ld
,r25
,X
+) CR_TAB
2164 AS2 (ld
,__tmp_reg__
,X
+) CR_TAB
2165 AS2 (ld
,r27
,X
) CR_TAB
2166 AS2 (mov
,r26
,__tmp_reg__
));
2168 return (AS2 (adiw
,r26
,%o1
) CR_TAB
2169 AS2 (ld
,%A0
,X
+) CR_TAB
2170 AS2 (ld
,%B0
,X
+) CR_TAB
2171 AS2 (ld
,%C0
,X
+) CR_TAB
2172 AS2 (ld
,%D0
,X
) CR_TAB
2173 AS2 (sbiw
,r26
,%o1
+3));
2175 if (reg_dest
== reg_base
)
2176 return *l
=5, (AS2 (ldd
,%D0
,%D1
) CR_TAB
2177 AS2 (ldd
,%C0
,%C1
) CR_TAB
2178 AS2 (ldd
,__tmp_reg__
,%B1
) CR_TAB
2179 AS2 (ldd
,%A0
,%A1
) CR_TAB
2180 AS2 (mov
,%B0
,__tmp_reg__
));
2181 else if (reg_dest
== reg_base
- 2)
2182 return *l
=5, (AS2 (ldd
,%A0
,%A1
) CR_TAB
2183 AS2 (ldd
,%B0
,%B1
) CR_TAB
2184 AS2 (ldd
,__tmp_reg__
,%C1
) CR_TAB
2185 AS2 (ldd
,%D0
,%D1
) CR_TAB
2186 AS2 (mov
,%C0
,__tmp_reg__
));
2187 return *l
=4, (AS2 (ldd
,%A0
,%A1
) CR_TAB
2188 AS2 (ldd
,%B0
,%B1
) CR_TAB
2189 AS2 (ldd
,%C0
,%C1
) CR_TAB
2192 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2193 return *l
=4, (AS2 (ld
,%D0
,%1) CR_TAB
2194 AS2 (ld
,%C0
,%1) CR_TAB
2195 AS2 (ld
,%B0
,%1) CR_TAB
2197 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2198 return *l
=4, (AS2 (ld
,%A0
,%1) CR_TAB
2199 AS2 (ld
,%B0
,%1) CR_TAB
2200 AS2 (ld
,%C0
,%1) CR_TAB
2202 else if (CONSTANT_ADDRESS_P (base
))
2203 return *l
=8, (AS2 (lds
,%A0
,%A1
) CR_TAB
2204 AS2 (lds
,%B0
,%B1
) CR_TAB
2205 AS2 (lds
,%C0
,%C1
) CR_TAB
2208 fatal_insn ("unknown move insn:",insn
);
2213 out_movsi_mr_r (rtx insn
, rtx op
[], int *l
)
2217 rtx base
= XEXP (dest
, 0);
2218 int reg_base
= true_regnum (base
);
2219 int reg_src
= true_regnum (src
);
2225 if (CONSTANT_ADDRESS_P (base
))
2226 return *l
=8,(AS2 (sts
,%A0
,%A1
) CR_TAB
2227 AS2 (sts
,%B0
,%B1
) CR_TAB
2228 AS2 (sts
,%C0
,%C1
) CR_TAB
2230 if (reg_base
> 0) /* (r) */
2232 if (reg_base
== REG_X
) /* (R26) */
2234 if (reg_src
== REG_X
)
2236 /* "st X+,r26" is undefined */
2237 if (reg_unused_after (insn
, base
))
2238 return *l
=6, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2239 AS2 (st
,X
,r26
) CR_TAB
2240 AS2 (adiw
,r26
,1) CR_TAB
2241 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2242 AS2 (st
,X
+,r28
) CR_TAB
2245 return *l
=7, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2246 AS2 (st
,X
,r26
) CR_TAB
2247 AS2 (adiw
,r26
,1) CR_TAB
2248 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2249 AS2 (st
,X
+,r28
) CR_TAB
2250 AS2 (st
,X
,r29
) CR_TAB
2253 else if (reg_base
== reg_src
+ 2)
2255 if (reg_unused_after (insn
, base
))
2256 return *l
=7, (AS2 (mov
,__zero_reg__
,%C1
) CR_TAB
2257 AS2 (mov
,__tmp_reg__
,%D1
) CR_TAB
2258 AS2 (st
,%0+,%A1
) CR_TAB
2259 AS2 (st
,%0+,%B1
) CR_TAB
2260 AS2 (st
,%0+,__zero_reg__
) CR_TAB
2261 AS2 (st
,%0,__tmp_reg__
) CR_TAB
2262 AS1 (clr
,__zero_reg__
));
2264 return *l
=8, (AS2 (mov
,__zero_reg__
,%C1
) CR_TAB
2265 AS2 (mov
,__tmp_reg__
,%D1
) CR_TAB
2266 AS2 (st
,%0+,%A1
) CR_TAB
2267 AS2 (st
,%0+,%B1
) CR_TAB
2268 AS2 (st
,%0+,__zero_reg__
) CR_TAB
2269 AS2 (st
,%0,__tmp_reg__
) CR_TAB
2270 AS1 (clr
,__zero_reg__
) CR_TAB
2273 return *l
=5, (AS2 (st
,%0+,%A1
) CR_TAB
2274 AS2 (st
,%0+,%B1
) CR_TAB
2275 AS2 (st
,%0+,%C1
) CR_TAB
2276 AS2 (st
,%0,%D1
) CR_TAB
2280 return *l
=4, (AS2 (st
,%0,%A1
) CR_TAB
2281 AS2 (std
,%0+1,%B1
) CR_TAB
2282 AS2 (std
,%0+2,%C1
) CR_TAB
2283 AS2 (std
,%0+3,%D1
));
2285 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
2287 int disp
= INTVAL (XEXP (base
, 1));
2288 reg_base
= REGNO (XEXP (base
, 0));
2289 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
2291 if (reg_base
!= REG_Y
)
2292 fatal_insn ("incorrect insn:",insn
);
2294 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
2295 return *l
= 6, (AS2 (adiw
,r28
,%o0
-60) CR_TAB
2296 AS2 (std
,Y
+60,%A1
) CR_TAB
2297 AS2 (std
,Y
+61,%B1
) CR_TAB
2298 AS2 (std
,Y
+62,%C1
) CR_TAB
2299 AS2 (std
,Y
+63,%D1
) CR_TAB
2300 AS2 (sbiw
,r28
,%o0
-60));
2302 return *l
= 8, (AS2 (subi
,r28
,lo8(-%o0
)) CR_TAB
2303 AS2 (sbci
,r29
,hi8(-%o0
)) CR_TAB
2304 AS2 (st
,Y
,%A1
) CR_TAB
2305 AS2 (std
,Y
+1,%B1
) CR_TAB
2306 AS2 (std
,Y
+2,%C1
) CR_TAB
2307 AS2 (std
,Y
+3,%D1
) CR_TAB
2308 AS2 (subi
,r28
,lo8(%o0
)) CR_TAB
2309 AS2 (sbci
,r29
,hi8(%o0
)));
2311 if (reg_base
== REG_X
)
2314 if (reg_src
== REG_X
)
2317 return (AS2 (mov
,__tmp_reg__
,r26
) CR_TAB
2318 AS2 (mov
,__zero_reg__
,r27
) CR_TAB
2319 AS2 (adiw
,r26
,%o0
) CR_TAB
2320 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2321 AS2 (st
,X
+,__zero_reg__
) CR_TAB
2322 AS2 (st
,X
+,r28
) CR_TAB
2323 AS2 (st
,X
,r29
) CR_TAB
2324 AS1 (clr
,__zero_reg__
) CR_TAB
2325 AS2 (sbiw
,r26
,%o0
+3));
2327 else if (reg_src
== REG_X
- 2)
2330 return (AS2 (mov
,__tmp_reg__
,r26
) CR_TAB
2331 AS2 (mov
,__zero_reg__
,r27
) CR_TAB
2332 AS2 (adiw
,r26
,%o0
) CR_TAB
2333 AS2 (st
,X
+,r24
) CR_TAB
2334 AS2 (st
,X
+,r25
) CR_TAB
2335 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2336 AS2 (st
,X
,__zero_reg__
) CR_TAB
2337 AS1 (clr
,__zero_reg__
) CR_TAB
2338 AS2 (sbiw
,r26
,%o0
+3));
2341 return (AS2 (adiw
,r26
,%o0
) CR_TAB
2342 AS2 (st
,X
+,%A1
) CR_TAB
2343 AS2 (st
,X
+,%B1
) CR_TAB
2344 AS2 (st
,X
+,%C1
) CR_TAB
2345 AS2 (st
,X
,%D1
) CR_TAB
2346 AS2 (sbiw
,r26
,%o0
+3));
2348 return *l
=4, (AS2 (std
,%A0
,%A1
) CR_TAB
2349 AS2 (std
,%B0
,%B1
) CR_TAB
2350 AS2 (std
,%C0
,%C1
) CR_TAB
2353 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2354 return *l
=4, (AS2 (st
,%0,%D1
) CR_TAB
2355 AS2 (st
,%0,%C1
) CR_TAB
2356 AS2 (st
,%0,%B1
) CR_TAB
2358 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2359 return *l
=4, (AS2 (st
,%0,%A1
) CR_TAB
2360 AS2 (st
,%0,%B1
) CR_TAB
2361 AS2 (st
,%0,%C1
) CR_TAB
2363 fatal_insn ("unknown move insn:",insn
);
2368 output_movsisf(rtx insn
, rtx operands
[], int *l
)
2371 rtx dest
= operands
[0];
2372 rtx src
= operands
[1];
2378 if (register_operand (dest
, VOIDmode
))
2380 if (register_operand (src
, VOIDmode
)) /* mov r,r */
2382 if (true_regnum (dest
) > true_regnum (src
))
2387 return (AS2 (movw
,%C0
,%C1
) CR_TAB
2388 AS2 (movw
,%A0
,%A1
));
2391 return (AS2 (mov
,%D0
,%D1
) CR_TAB
2392 AS2 (mov
,%C0
,%C1
) CR_TAB
2393 AS2 (mov
,%B0
,%B1
) CR_TAB
2401 return (AS2 (movw
,%A0
,%A1
) CR_TAB
2402 AS2 (movw
,%C0
,%C1
));
2405 return (AS2 (mov
,%A0
,%A1
) CR_TAB
2406 AS2 (mov
,%B0
,%B1
) CR_TAB
2407 AS2 (mov
,%C0
,%C1
) CR_TAB
2411 else if (CONSTANT_P (src
))
2413 if (test_hard_reg_class (LD_REGS
, dest
)) /* ldi d,i */
2416 return (AS2 (ldi
,%A0
,lo8(%1)) CR_TAB
2417 AS2 (ldi
,%B0
,hi8(%1)) CR_TAB
2418 AS2 (ldi
,%C0
,hlo8(%1)) CR_TAB
2419 AS2 (ldi
,%D0
,hhi8(%1)));
2422 if (GET_CODE (src
) == CONST_INT
)
2424 const char *const clr_op0
=
2425 AVR_HAVE_MOVW
? (AS1 (clr
,%A0
) CR_TAB
2426 AS1 (clr
,%B0
) CR_TAB
2428 : (AS1 (clr
,%A0
) CR_TAB
2429 AS1 (clr
,%B0
) CR_TAB
2430 AS1 (clr
,%C0
) CR_TAB
2433 if (src
== const0_rtx
) /* mov r,L */
2435 *l
= AVR_HAVE_MOVW
? 3 : 4;
2438 else if (src
== const1_rtx
)
2441 output_asm_insn (clr_op0
, operands
);
2442 *l
= AVR_HAVE_MOVW
? 4 : 5;
2443 return AS1 (inc
,%A0
);
2445 else if (src
== constm1_rtx
)
2447 /* Immediate constants -1 to any register */
2451 return (AS1 (clr
,%A0
) CR_TAB
2452 AS1 (dec
,%A0
) CR_TAB
2453 AS2 (mov
,%B0
,%A0
) CR_TAB
2454 AS2 (movw
,%C0
,%A0
));
2457 return (AS1 (clr
,%A0
) CR_TAB
2458 AS1 (dec
,%A0
) CR_TAB
2459 AS2 (mov
,%B0
,%A0
) CR_TAB
2460 AS2 (mov
,%C0
,%A0
) CR_TAB
2465 int bit_nr
= exact_log2 (INTVAL (src
));
2469 *l
= AVR_HAVE_MOVW
? 5 : 6;
2472 output_asm_insn (clr_op0
, operands
);
2473 output_asm_insn ("set", operands
);
2476 avr_output_bld (operands
, bit_nr
);
2483 /* Last resort, better than loading from memory. */
2485 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
2486 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
2487 AS2 (mov
,%A0
,r31
) CR_TAB
2488 AS2 (ldi
,r31
,hi8(%1)) CR_TAB
2489 AS2 (mov
,%B0
,r31
) CR_TAB
2490 AS2 (ldi
,r31
,hlo8(%1)) CR_TAB
2491 AS2 (mov
,%C0
,r31
) CR_TAB
2492 AS2 (ldi
,r31
,hhi8(%1)) CR_TAB
2493 AS2 (mov
,%D0
,r31
) CR_TAB
2494 AS2 (mov
,r31
,__tmp_reg__
));
2496 else if (GET_CODE (src
) == MEM
)
2497 return out_movsi_r_mr (insn
, operands
, real_l
); /* mov r,m */
2499 else if (GET_CODE (dest
) == MEM
)
2503 if (src
== const0_rtx
)
2504 operands
[1] = zero_reg_rtx
;
2506 templ
= out_movsi_mr_r (insn
, operands
, real_l
);
2509 output_asm_insn (templ
, operands
);
2514 fatal_insn ("invalid insn:", insn
);
2519 out_movqi_mr_r (rtx insn
, rtx op
[], int *l
)
2523 rtx x
= XEXP (dest
, 0);
2529 if (CONSTANT_ADDRESS_P (x
))
2531 if (CONST_INT_P (x
) && INTVAL (x
) == SREG_ADDR
)
2534 return AS2 (out
,__SREG__
,%1);
2536 if (optimize
> 0 && io_address_operand (x
, QImode
))
2539 return AS2 (out
,%0-0x20,%1);
2542 return AS2 (sts
,%0,%1);
2544 /* memory access by reg+disp */
2545 else if (GET_CODE (x
) == PLUS
2546 && REG_P (XEXP (x
,0))
2547 && GET_CODE (XEXP (x
,1)) == CONST_INT
)
2549 if ((INTVAL (XEXP (x
,1)) - GET_MODE_SIZE (GET_MODE (dest
))) >= 63)
2551 int disp
= INTVAL (XEXP (x
,1));
2552 if (REGNO (XEXP (x
,0)) != REG_Y
)
2553 fatal_insn ("incorrect insn:",insn
);
2555 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
2556 return *l
= 3, (AS2 (adiw
,r28
,%o0
-63) CR_TAB
2557 AS2 (std
,Y
+63,%1) CR_TAB
2558 AS2 (sbiw
,r28
,%o0
-63));
2560 return *l
= 5, (AS2 (subi
,r28
,lo8(-%o0
)) CR_TAB
2561 AS2 (sbci
,r29
,hi8(-%o0
)) CR_TAB
2562 AS2 (st
,Y
,%1) CR_TAB
2563 AS2 (subi
,r28
,lo8(%o0
)) CR_TAB
2564 AS2 (sbci
,r29
,hi8(%o0
)));
2566 else if (REGNO (XEXP (x
,0)) == REG_X
)
2568 if (reg_overlap_mentioned_p (src
, XEXP (x
, 0)))
2570 if (reg_unused_after (insn
, XEXP (x
,0)))
2571 return *l
= 3, (AS2 (mov
,__tmp_reg__
,%1) CR_TAB
2572 AS2 (adiw
,r26
,%o0
) CR_TAB
2573 AS2 (st
,X
,__tmp_reg__
));
2575 return *l
= 4, (AS2 (mov
,__tmp_reg__
,%1) CR_TAB
2576 AS2 (adiw
,r26
,%o0
) CR_TAB
2577 AS2 (st
,X
,__tmp_reg__
) CR_TAB
2578 AS2 (sbiw
,r26
,%o0
));
2582 if (reg_unused_after (insn
, XEXP (x
,0)))
2583 return *l
= 2, (AS2 (adiw
,r26
,%o0
) CR_TAB
2586 return *l
= 3, (AS2 (adiw
,r26
,%o0
) CR_TAB
2587 AS2 (st
,X
,%1) CR_TAB
2588 AS2 (sbiw
,r26
,%o0
));
2592 return AS2 (std
,%0,%1);
2595 return AS2 (st
,%0,%1);
2599 out_movhi_mr_r (rtx insn
, rtx op
[], int *l
)
2603 rtx base
= XEXP (dest
, 0);
2604 int reg_base
= true_regnum (base
);
2605 int reg_src
= true_regnum (src
);
2606 /* "volatile" forces writing high byte first, even if less efficient,
2607 for correct operation with 16-bit I/O registers. */
2608 int mem_volatile_p
= MEM_VOLATILE_P (dest
);
2613 if (CONSTANT_ADDRESS_P (base
))
2615 if (optimize
> 0 && io_address_operand (base
, HImode
))
2618 return (AS2 (out
,%B0
-0x20,%B1
) CR_TAB
2619 AS2 (out
,%A0
-0x20,%A1
));
2621 return *l
= 4, (AS2 (sts
,%B0
,%B1
) CR_TAB
2626 if (reg_base
== REG_X
)
2628 if (reg_src
== REG_X
)
2630 /* "st X+,r26" and "st -X,r26" are undefined. */
2631 if (!mem_volatile_p
&& reg_unused_after (insn
, src
))
2632 return *l
=4, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2633 AS2 (st
,X
,r26
) CR_TAB
2634 AS2 (adiw
,r26
,1) CR_TAB
2635 AS2 (st
,X
,__tmp_reg__
));
2637 return *l
=5, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2638 AS2 (adiw
,r26
,1) CR_TAB
2639 AS2 (st
,X
,__tmp_reg__
) CR_TAB
2640 AS2 (sbiw
,r26
,1) CR_TAB
2645 if (!mem_volatile_p
&& reg_unused_after (insn
, base
))
2646 return *l
=2, (AS2 (st
,X
+,%A1
) CR_TAB
2649 return *l
=3, (AS2 (adiw
,r26
,1) CR_TAB
2650 AS2 (st
,X
,%B1
) CR_TAB
2655 return *l
=2, (AS2 (std
,%0+1,%B1
) CR_TAB
2658 else if (GET_CODE (base
) == PLUS
)
2660 int disp
= INTVAL (XEXP (base
, 1));
2661 reg_base
= REGNO (XEXP (base
, 0));
2662 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
2664 if (reg_base
!= REG_Y
)
2665 fatal_insn ("incorrect insn:",insn
);
2667 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
2668 return *l
= 4, (AS2 (adiw
,r28
,%o0
-62) CR_TAB
2669 AS2 (std
,Y
+63,%B1
) CR_TAB
2670 AS2 (std
,Y
+62,%A1
) CR_TAB
2671 AS2 (sbiw
,r28
,%o0
-62));
2673 return *l
= 6, (AS2 (subi
,r28
,lo8(-%o0
)) CR_TAB
2674 AS2 (sbci
,r29
,hi8(-%o0
)) CR_TAB
2675 AS2 (std
,Y
+1,%B1
) CR_TAB
2676 AS2 (st
,Y
,%A1
) CR_TAB
2677 AS2 (subi
,r28
,lo8(%o0
)) CR_TAB
2678 AS2 (sbci
,r29
,hi8(%o0
)));
2680 if (reg_base
== REG_X
)
2683 if (reg_src
== REG_X
)
2686 return (AS2 (mov
,__tmp_reg__
,r26
) CR_TAB
2687 AS2 (mov
,__zero_reg__
,r27
) CR_TAB
2688 AS2 (adiw
,r26
,%o0
+1) CR_TAB
2689 AS2 (st
,X
,__zero_reg__
) CR_TAB
2690 AS2 (st
,-X
,__tmp_reg__
) CR_TAB
2691 AS1 (clr
,__zero_reg__
) CR_TAB
2692 AS2 (sbiw
,r26
,%o0
));
2695 return (AS2 (adiw
,r26
,%o0
+1) CR_TAB
2696 AS2 (st
,X
,%B1
) CR_TAB
2697 AS2 (st
,-X
,%A1
) CR_TAB
2698 AS2 (sbiw
,r26
,%o0
));
2700 return *l
=2, (AS2 (std
,%B0
,%B1
) CR_TAB
2703 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2704 return *l
=2, (AS2 (st
,%0,%B1
) CR_TAB
2706 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2710 if (REGNO (XEXP (base
, 0)) == REG_X
)
2713 return (AS2 (adiw
,r26
,1) CR_TAB
2714 AS2 (st
,X
,%B1
) CR_TAB
2715 AS2 (st
,-X
,%A1
) CR_TAB
2721 return (AS2 (std
,%p0
+1,%B1
) CR_TAB
2722 AS2 (st
,%p0
,%A1
) CR_TAB
2728 return (AS2 (st
,%0,%A1
) CR_TAB
2731 fatal_insn ("unknown move insn:",insn
);
2735 /* Return 1 if frame pointer for current function required. */
2738 avr_frame_pointer_required_p (void)
2740 return (cfun
->calls_alloca
2741 || crtl
->args
.info
.nregs
== 0
2742 || get_frame_size () > 0);
2745 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2748 compare_condition (rtx insn
)
2750 rtx next
= next_real_insn (insn
);
2751 RTX_CODE cond
= UNKNOWN
;
2752 if (next
&& GET_CODE (next
) == JUMP_INSN
)
2754 rtx pat
= PATTERN (next
);
2755 rtx src
= SET_SRC (pat
);
2756 rtx t
= XEXP (src
, 0);
2757 cond
= GET_CODE (t
);
2762 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2765 compare_sign_p (rtx insn
)
2767 RTX_CODE cond
= compare_condition (insn
);
2768 return (cond
== GE
|| cond
== LT
);
2771 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2772 that needs to be swapped (GT, GTU, LE, LEU). */
2775 compare_diff_p (rtx insn
)
2777 RTX_CODE cond
= compare_condition (insn
);
2778 return (cond
== GT
|| cond
== GTU
|| cond
== LE
|| cond
== LEU
) ? cond
: 0;
2781 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2784 compare_eq_p (rtx insn
)
2786 RTX_CODE cond
= compare_condition (insn
);
2787 return (cond
== EQ
|| cond
== NE
);
2791 /* Output test instruction for HImode. */
2794 out_tsthi (rtx insn
, rtx op
, int *l
)
2796 if (compare_sign_p (insn
))
2799 return AS1 (tst
,%B0
);
2801 if (reg_unused_after (insn
, op
)
2802 && compare_eq_p (insn
))
2804 /* Faster than sbiw if we can clobber the operand. */
2806 return "or %A0,%B0";
2808 if (test_hard_reg_class (ADDW_REGS
, op
))
2811 return AS2 (sbiw
,%0,0);
2814 return (AS2 (cp
,%A0
,__zero_reg__
) CR_TAB
2815 AS2 (cpc
,%B0
,__zero_reg__
));
2819 /* Output test instruction for SImode. */
2822 out_tstsi (rtx insn
, rtx op
, int *l
)
2824 if (compare_sign_p (insn
))
2827 return AS1 (tst
,%D0
);
2829 if (test_hard_reg_class (ADDW_REGS
, op
))
2832 return (AS2 (sbiw
,%A0
,0) CR_TAB
2833 AS2 (cpc
,%C0
,__zero_reg__
) CR_TAB
2834 AS2 (cpc
,%D0
,__zero_reg__
));
2837 return (AS2 (cp
,%A0
,__zero_reg__
) CR_TAB
2838 AS2 (cpc
,%B0
,__zero_reg__
) CR_TAB
2839 AS2 (cpc
,%C0
,__zero_reg__
) CR_TAB
2840 AS2 (cpc
,%D0
,__zero_reg__
));
2844 /* Generate asm equivalent for various shifts.
2845 Shift count is a CONST_INT, MEM or REG.
2846 This only handles cases that are not already
2847 carefully hand-optimized in ?sh??i3_out. */
2850 out_shift_with_cnt (const char *templ
, rtx insn
, rtx operands
[],
2851 int *len
, int t_len
)
2855 int second_label
= 1;
2856 int saved_in_tmp
= 0;
2857 int use_zero_reg
= 0;
2859 op
[0] = operands
[0];
2860 op
[1] = operands
[1];
2861 op
[2] = operands
[2];
2862 op
[3] = operands
[3];
2868 if (GET_CODE (operands
[2]) == CONST_INT
)
2870 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
2871 int count
= INTVAL (operands
[2]);
2872 int max_len
= 10; /* If larger than this, always use a loop. */
2881 if (count
< 8 && !scratch
)
2885 max_len
= t_len
+ (scratch
? 3 : (use_zero_reg
? 4 : 5));
2887 if (t_len
* count
<= max_len
)
2889 /* Output shifts inline with no loop - faster. */
2891 *len
= t_len
* count
;
2895 output_asm_insn (templ
, op
);
2904 strcat (str
, AS2 (ldi
,%3,%2));
2906 else if (use_zero_reg
)
2908 /* Hack to save one word: use __zero_reg__ as loop counter.
2909 Set one bit, then shift in a loop until it is 0 again. */
2911 op
[3] = zero_reg_rtx
;
2915 strcat (str
, ("set" CR_TAB
2916 AS2 (bld
,%3,%2-1)));
2920 /* No scratch register available, use one from LD_REGS (saved in
2921 __tmp_reg__) that doesn't overlap with registers to shift. */
2923 op
[3] = gen_rtx_REG (QImode
,
2924 ((true_regnum (operands
[0]) - 1) & 15) + 16);
2925 op
[4] = tmp_reg_rtx
;
2929 *len
= 3; /* Includes "mov %3,%4" after the loop. */
2931 strcat (str
, (AS2 (mov
,%4,%3) CR_TAB
2937 else if (GET_CODE (operands
[2]) == MEM
)
2941 op
[3] = op_mov
[0] = tmp_reg_rtx
;
2945 out_movqi_r_mr (insn
, op_mov
, len
);
2947 output_asm_insn (out_movqi_r_mr (insn
, op_mov
, NULL
), op_mov
);
2949 else if (register_operand (operands
[2], QImode
))
2951 if (reg_unused_after (insn
, operands
[2]))
2955 op
[3] = tmp_reg_rtx
;
2957 strcat (str
, (AS2 (mov
,%3,%2) CR_TAB
));
2961 fatal_insn ("bad shift insn:", insn
);
2968 strcat (str
, AS1 (rjmp
,2f
));
2972 *len
+= t_len
+ 2; /* template + dec + brXX */
2975 strcat (str
, "\n1:\t");
2976 strcat (str
, templ
);
2977 strcat (str
, second_label
? "\n2:\t" : "\n\t");
2978 strcat (str
, use_zero_reg
? AS1 (lsr
,%3) : AS1 (dec
,%3));
2979 strcat (str
, CR_TAB
);
2980 strcat (str
, second_label
? AS1 (brpl
,1b
) : AS1 (brne
,1b
));
2982 strcat (str
, (CR_TAB
AS2 (mov
,%3,%4)));
2983 output_asm_insn (str
, op
);
2988 /* 8bit shift left ((char)x << i) */
2991 ashlqi3_out (rtx insn
, rtx operands
[], int *len
)
2993 if (GET_CODE (operands
[2]) == CONST_INT
)
3000 switch (INTVAL (operands
[2]))
3003 if (INTVAL (operands
[2]) < 8)
3007 return AS1 (clr
,%0);
3011 return AS1 (lsl
,%0);
3015 return (AS1 (lsl
,%0) CR_TAB
3020 return (AS1 (lsl
,%0) CR_TAB
3025 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3028 return (AS1 (swap
,%0) CR_TAB
3029 AS2 (andi
,%0,0xf0));
3032 return (AS1 (lsl
,%0) CR_TAB
3038 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3041 return (AS1 (swap
,%0) CR_TAB
3043 AS2 (andi
,%0,0xe0));
3046 return (AS1 (lsl
,%0) CR_TAB
3053 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3056 return (AS1 (swap
,%0) CR_TAB
3059 AS2 (andi
,%0,0xc0));
3062 return (AS1 (lsl
,%0) CR_TAB
3071 return (AS1 (ror
,%0) CR_TAB
3076 else if (CONSTANT_P (operands
[2]))
3077 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
3079 out_shift_with_cnt (AS1 (lsl
,%0),
3080 insn
, operands
, len
, 1);
3085 /* 16bit shift left ((short)x << i) */
3088 ashlhi3_out (rtx insn
, rtx operands
[], int *len
)
3090 if (GET_CODE (operands
[2]) == CONST_INT
)
3092 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
3093 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
3100 switch (INTVAL (operands
[2]))
3103 if (INTVAL (operands
[2]) < 16)
3107 return (AS1 (clr
,%B0
) CR_TAB
3111 if (optimize_size
&& scratch
)
3116 return (AS1 (swap
,%A0
) CR_TAB
3117 AS1 (swap
,%B0
) CR_TAB
3118 AS2 (andi
,%B0
,0xf0) CR_TAB
3119 AS2 (eor
,%B0
,%A0
) CR_TAB
3120 AS2 (andi
,%A0
,0xf0) CR_TAB
3126 return (AS1 (swap
,%A0
) CR_TAB
3127 AS1 (swap
,%B0
) CR_TAB
3128 AS2 (ldi
,%3,0xf0) CR_TAB
3130 AS2 (eor
,%B0
,%A0
) CR_TAB
3134 break; /* optimize_size ? 6 : 8 */
3138 break; /* scratch ? 5 : 6 */
3142 return (AS1 (lsl
,%A0
) CR_TAB
3143 AS1 (rol
,%B0
) CR_TAB
3144 AS1 (swap
,%A0
) CR_TAB
3145 AS1 (swap
,%B0
) CR_TAB
3146 AS2 (andi
,%B0
,0xf0) CR_TAB
3147 AS2 (eor
,%B0
,%A0
) CR_TAB
3148 AS2 (andi
,%A0
,0xf0) CR_TAB
3154 return (AS1 (lsl
,%A0
) CR_TAB
3155 AS1 (rol
,%B0
) CR_TAB
3156 AS1 (swap
,%A0
) CR_TAB
3157 AS1 (swap
,%B0
) CR_TAB
3158 AS2 (ldi
,%3,0xf0) CR_TAB
3160 AS2 (eor
,%B0
,%A0
) CR_TAB
3168 break; /* scratch ? 5 : 6 */
3170 return (AS1 (clr
,__tmp_reg__
) CR_TAB
3171 AS1 (lsr
,%B0
) CR_TAB
3172 AS1 (ror
,%A0
) CR_TAB
3173 AS1 (ror
,__tmp_reg__
) CR_TAB
3174 AS1 (lsr
,%B0
) CR_TAB
3175 AS1 (ror
,%A0
) CR_TAB
3176 AS1 (ror
,__tmp_reg__
) CR_TAB
3177 AS2 (mov
,%B0
,%A0
) CR_TAB
3178 AS2 (mov
,%A0
,__tmp_reg__
));
3182 return (AS1 (lsr
,%B0
) CR_TAB
3183 AS2 (mov
,%B0
,%A0
) CR_TAB
3184 AS1 (clr
,%A0
) CR_TAB
3185 AS1 (ror
,%B0
) CR_TAB
3189 return *len
= 2, (AS2 (mov
,%B0
,%A1
) CR_TAB
3194 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3195 AS1 (clr
,%A0
) CR_TAB
3200 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3201 AS1 (clr
,%A0
) CR_TAB
3202 AS1 (lsl
,%B0
) CR_TAB
3207 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3208 AS1 (clr
,%A0
) CR_TAB
3209 AS1 (lsl
,%B0
) CR_TAB
3210 AS1 (lsl
,%B0
) CR_TAB
3217 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3218 AS1 (clr
,%A0
) CR_TAB
3219 AS1 (swap
,%B0
) CR_TAB
3220 AS2 (andi
,%B0
,0xf0));
3225 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3226 AS1 (clr
,%A0
) CR_TAB
3227 AS1 (swap
,%B0
) CR_TAB
3228 AS2 (ldi
,%3,0xf0) CR_TAB
3232 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3233 AS1 (clr
,%A0
) CR_TAB
3234 AS1 (lsl
,%B0
) CR_TAB
3235 AS1 (lsl
,%B0
) CR_TAB
3236 AS1 (lsl
,%B0
) CR_TAB
3243 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3244 AS1 (clr
,%A0
) CR_TAB
3245 AS1 (swap
,%B0
) CR_TAB
3246 AS1 (lsl
,%B0
) CR_TAB
3247 AS2 (andi
,%B0
,0xe0));
3249 if (AVR_HAVE_MUL
&& scratch
)
3252 return (AS2 (ldi
,%3,0x20) CR_TAB
3253 AS2 (mul
,%A0
,%3) CR_TAB
3254 AS2 (mov
,%B0
,r0
) CR_TAB
3255 AS1 (clr
,%A0
) CR_TAB
3256 AS1 (clr
,__zero_reg__
));
3258 if (optimize_size
&& scratch
)
3263 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3264 AS1 (clr
,%A0
) CR_TAB
3265 AS1 (swap
,%B0
) CR_TAB
3266 AS1 (lsl
,%B0
) CR_TAB
3267 AS2 (ldi
,%3,0xe0) CR_TAB
3273 return ("set" CR_TAB
3274 AS2 (bld
,r1
,5) CR_TAB
3275 AS2 (mul
,%A0
,r1
) CR_TAB
3276 AS2 (mov
,%B0
,r0
) CR_TAB
3277 AS1 (clr
,%A0
) CR_TAB
3278 AS1 (clr
,__zero_reg__
));
3281 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3282 AS1 (clr
,%A0
) CR_TAB
3283 AS1 (lsl
,%B0
) CR_TAB
3284 AS1 (lsl
,%B0
) CR_TAB
3285 AS1 (lsl
,%B0
) CR_TAB
3286 AS1 (lsl
,%B0
) CR_TAB
3290 if (AVR_HAVE_MUL
&& ldi_ok
)
3293 return (AS2 (ldi
,%B0
,0x40) CR_TAB
3294 AS2 (mul
,%A0
,%B0
) CR_TAB
3295 AS2 (mov
,%B0
,r0
) CR_TAB
3296 AS1 (clr
,%A0
) CR_TAB
3297 AS1 (clr
,__zero_reg__
));
3299 if (AVR_HAVE_MUL
&& scratch
)
3302 return (AS2 (ldi
,%3,0x40) CR_TAB
3303 AS2 (mul
,%A0
,%3) CR_TAB
3304 AS2 (mov
,%B0
,r0
) CR_TAB
3305 AS1 (clr
,%A0
) CR_TAB
3306 AS1 (clr
,__zero_reg__
));
3308 if (optimize_size
&& ldi_ok
)
3311 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3312 AS2 (ldi
,%A0
,6) "\n1:\t"
3313 AS1 (lsl
,%B0
) CR_TAB
3314 AS1 (dec
,%A0
) CR_TAB
3317 if (optimize_size
&& scratch
)
3320 return (AS1 (clr
,%B0
) CR_TAB
3321 AS1 (lsr
,%A0
) CR_TAB
3322 AS1 (ror
,%B0
) CR_TAB
3323 AS1 (lsr
,%A0
) CR_TAB
3324 AS1 (ror
,%B0
) CR_TAB
3329 return (AS1 (clr
,%B0
) CR_TAB
3330 AS1 (lsr
,%A0
) CR_TAB
3331 AS1 (ror
,%B0
) CR_TAB
3336 out_shift_with_cnt ((AS1 (lsl
,%A0
) CR_TAB
3338 insn
, operands
, len
, 2);
3343 /* 32bit shift left ((long)x << i) */
3346 ashlsi3_out (rtx insn
, rtx operands
[], int *len
)
3348 if (GET_CODE (operands
[2]) == CONST_INT
)
3356 switch (INTVAL (operands
[2]))
3359 if (INTVAL (operands
[2]) < 32)
3363 return *len
= 3, (AS1 (clr
,%D0
) CR_TAB
3364 AS1 (clr
,%C0
) CR_TAB
3365 AS2 (movw
,%A0
,%C0
));
3367 return (AS1 (clr
,%D0
) CR_TAB
3368 AS1 (clr
,%C0
) CR_TAB
3369 AS1 (clr
,%B0
) CR_TAB
3374 int reg0
= true_regnum (operands
[0]);
3375 int reg1
= true_regnum (operands
[1]);
3378 return (AS2 (mov
,%D0
,%C1
) CR_TAB
3379 AS2 (mov
,%C0
,%B1
) CR_TAB
3380 AS2 (mov
,%B0
,%A1
) CR_TAB
3383 return (AS1 (clr
,%A0
) CR_TAB
3384 AS2 (mov
,%B0
,%A1
) CR_TAB
3385 AS2 (mov
,%C0
,%B1
) CR_TAB
3391 int reg0
= true_regnum (operands
[0]);
3392 int reg1
= true_regnum (operands
[1]);
3393 if (reg0
+ 2 == reg1
)
3394 return *len
= 2, (AS1 (clr
,%B0
) CR_TAB
3397 return *len
= 3, (AS2 (movw
,%C0
,%A1
) CR_TAB
3398 AS1 (clr
,%B0
) CR_TAB
3401 return *len
= 4, (AS2 (mov
,%C0
,%A1
) CR_TAB
3402 AS2 (mov
,%D0
,%B1
) CR_TAB
3403 AS1 (clr
,%B0
) CR_TAB
3409 return (AS2 (mov
,%D0
,%A1
) CR_TAB
3410 AS1 (clr
,%C0
) CR_TAB
3411 AS1 (clr
,%B0
) CR_TAB
3416 return (AS1 (clr
,%D0
) CR_TAB
3417 AS1 (lsr
,%A0
) CR_TAB
3418 AS1 (ror
,%D0
) CR_TAB
3419 AS1 (clr
,%C0
) CR_TAB
3420 AS1 (clr
,%B0
) CR_TAB
3425 out_shift_with_cnt ((AS1 (lsl
,%A0
) CR_TAB
3426 AS1 (rol
,%B0
) CR_TAB
3427 AS1 (rol
,%C0
) CR_TAB
3429 insn
, operands
, len
, 4);
3433 /* 8bit arithmetic shift right ((signed char)x >> i) */
3436 ashrqi3_out (rtx insn
, rtx operands
[], int *len
)
3438 if (GET_CODE (operands
[2]) == CONST_INT
)
3445 switch (INTVAL (operands
[2]))
3449 return AS1 (asr
,%0);
3453 return (AS1 (asr
,%0) CR_TAB
3458 return (AS1 (asr
,%0) CR_TAB
3464 return (AS1 (asr
,%0) CR_TAB
3471 return (AS1 (asr
,%0) CR_TAB
3479 return (AS2 (bst
,%0,6) CR_TAB
3481 AS2 (sbc
,%0,%0) CR_TAB
3485 if (INTVAL (operands
[2]) < 8)
3492 return (AS1 (lsl
,%0) CR_TAB
3496 else if (CONSTANT_P (operands
[2]))
3497 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
3499 out_shift_with_cnt (AS1 (asr
,%0),
3500 insn
, operands
, len
, 1);
3505 /* 16bit arithmetic shift right ((signed short)x >> i) */
3508 ashrhi3_out (rtx insn
, rtx operands
[], int *len
)
3510 if (GET_CODE (operands
[2]) == CONST_INT
)
3512 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
3513 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
3520 switch (INTVAL (operands
[2]))
3524 /* XXX try to optimize this too? */
3529 break; /* scratch ? 5 : 6 */
3531 return (AS2 (mov
,__tmp_reg__
,%A0
) CR_TAB
3532 AS2 (mov
,%A0
,%B0
) CR_TAB
3533 AS1 (lsl
,__tmp_reg__
) CR_TAB
3534 AS1 (rol
,%A0
) CR_TAB
3535 AS2 (sbc
,%B0
,%B0
) CR_TAB
3536 AS1 (lsl
,__tmp_reg__
) CR_TAB
3537 AS1 (rol
,%A0
) CR_TAB
3542 return (AS1 (lsl
,%A0
) CR_TAB
3543 AS2 (mov
,%A0
,%B0
) CR_TAB
3544 AS1 (rol
,%A0
) CR_TAB
3549 int reg0
= true_regnum (operands
[0]);
3550 int reg1
= true_regnum (operands
[1]);
3553 return *len
= 3, (AS2 (mov
,%A0
,%B0
) CR_TAB
3554 AS1 (lsl
,%B0
) CR_TAB
3557 return *len
= 4, (AS2 (mov
,%A0
,%B1
) CR_TAB
3558 AS1 (clr
,%B0
) CR_TAB
3559 AS2 (sbrc
,%A0
,7) CR_TAB
3565 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3566 AS1 (lsl
,%B0
) CR_TAB
3567 AS2 (sbc
,%B0
,%B0
) CR_TAB
3572 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3573 AS1 (lsl
,%B0
) CR_TAB
3574 AS2 (sbc
,%B0
,%B0
) CR_TAB
3575 AS1 (asr
,%A0
) CR_TAB
3579 if (AVR_HAVE_MUL
&& ldi_ok
)
3582 return (AS2 (ldi
,%A0
,0x20) CR_TAB
3583 AS2 (muls
,%B0
,%A0
) CR_TAB
3584 AS2 (mov
,%A0
,r1
) CR_TAB
3585 AS2 (sbc
,%B0
,%B0
) CR_TAB
3586 AS1 (clr
,__zero_reg__
));
3588 if (optimize_size
&& scratch
)
3591 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3592 AS1 (lsl
,%B0
) CR_TAB
3593 AS2 (sbc
,%B0
,%B0
) CR_TAB
3594 AS1 (asr
,%A0
) CR_TAB
3595 AS1 (asr
,%A0
) CR_TAB
3599 if (AVR_HAVE_MUL
&& ldi_ok
)
3602 return (AS2 (ldi
,%A0
,0x10) CR_TAB
3603 AS2 (muls
,%B0
,%A0
) CR_TAB
3604 AS2 (mov
,%A0
,r1
) CR_TAB
3605 AS2 (sbc
,%B0
,%B0
) CR_TAB
3606 AS1 (clr
,__zero_reg__
));
3608 if (optimize_size
&& scratch
)
3611 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3612 AS1 (lsl
,%B0
) CR_TAB
3613 AS2 (sbc
,%B0
,%B0
) CR_TAB
3614 AS1 (asr
,%A0
) CR_TAB
3615 AS1 (asr
,%A0
) CR_TAB
3616 AS1 (asr
,%A0
) CR_TAB
3620 if (AVR_HAVE_MUL
&& ldi_ok
)
3623 return (AS2 (ldi
,%A0
,0x08) CR_TAB
3624 AS2 (muls
,%B0
,%A0
) CR_TAB
3625 AS2 (mov
,%A0
,r1
) CR_TAB
3626 AS2 (sbc
,%B0
,%B0
) CR_TAB
3627 AS1 (clr
,__zero_reg__
));
3630 break; /* scratch ? 5 : 7 */
3632 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3633 AS1 (lsl
,%B0
) CR_TAB
3634 AS2 (sbc
,%B0
,%B0
) CR_TAB
3635 AS1 (asr
,%A0
) CR_TAB
3636 AS1 (asr
,%A0
) CR_TAB
3637 AS1 (asr
,%A0
) CR_TAB
3638 AS1 (asr
,%A0
) CR_TAB
3643 return (AS1 (lsl
,%B0
) CR_TAB
3644 AS2 (sbc
,%A0
,%A0
) CR_TAB
3645 AS1 (lsl
,%B0
) CR_TAB
3646 AS2 (mov
,%B0
,%A0
) CR_TAB
3650 if (INTVAL (operands
[2]) < 16)
3656 return *len
= 3, (AS1 (lsl
,%B0
) CR_TAB
3657 AS2 (sbc
,%A0
,%A0
) CR_TAB
3662 out_shift_with_cnt ((AS1 (asr
,%B0
) CR_TAB
3664 insn
, operands
, len
, 2);
3669 /* 32bit arithmetic shift right ((signed long)x >> i) */
3672 ashrsi3_out (rtx insn
, rtx operands
[], int *len
)
3674 if (GET_CODE (operands
[2]) == CONST_INT
)
3682 switch (INTVAL (operands
[2]))
3686 int reg0
= true_regnum (operands
[0]);
3687 int reg1
= true_regnum (operands
[1]);
3690 return (AS2 (mov
,%A0
,%B1
) CR_TAB
3691 AS2 (mov
,%B0
,%C1
) CR_TAB
3692 AS2 (mov
,%C0
,%D1
) CR_TAB
3693 AS1 (clr
,%D0
) CR_TAB
3694 AS2 (sbrc
,%C0
,7) CR_TAB
3697 return (AS1 (clr
,%D0
) CR_TAB
3698 AS2 (sbrc
,%D1
,7) CR_TAB
3699 AS1 (dec
,%D0
) CR_TAB
3700 AS2 (mov
,%C0
,%D1
) CR_TAB
3701 AS2 (mov
,%B0
,%C1
) CR_TAB
3707 int reg0
= true_regnum (operands
[0]);
3708 int reg1
= true_regnum (operands
[1]);
3710 if (reg0
== reg1
+ 2)
3711 return *len
= 4, (AS1 (clr
,%D0
) CR_TAB
3712 AS2 (sbrc
,%B0
,7) CR_TAB
3713 AS1 (com
,%D0
) CR_TAB
3716 return *len
= 5, (AS2 (movw
,%A0
,%C1
) CR_TAB
3717 AS1 (clr
,%D0
) CR_TAB
3718 AS2 (sbrc
,%B0
,7) CR_TAB
3719 AS1 (com
,%D0
) CR_TAB
3722 return *len
= 6, (AS2 (mov
,%B0
,%D1
) CR_TAB
3723 AS2 (mov
,%A0
,%C1
) CR_TAB
3724 AS1 (clr
,%D0
) CR_TAB
3725 AS2 (sbrc
,%B0
,7) CR_TAB
3726 AS1 (com
,%D0
) CR_TAB
3731 return *len
= 6, (AS2 (mov
,%A0
,%D1
) CR_TAB
3732 AS1 (clr
,%D0
) CR_TAB
3733 AS2 (sbrc
,%A0
,7) CR_TAB
3734 AS1 (com
,%D0
) CR_TAB
3735 AS2 (mov
,%B0
,%D0
) CR_TAB
3739 if (INTVAL (operands
[2]) < 32)
3746 return *len
= 4, (AS1 (lsl
,%D0
) CR_TAB
3747 AS2 (sbc
,%A0
,%A0
) CR_TAB
3748 AS2 (mov
,%B0
,%A0
) CR_TAB
3749 AS2 (movw
,%C0
,%A0
));
3751 return *len
= 5, (AS1 (lsl
,%D0
) CR_TAB
3752 AS2 (sbc
,%A0
,%A0
) CR_TAB
3753 AS2 (mov
,%B0
,%A0
) CR_TAB
3754 AS2 (mov
,%C0
,%A0
) CR_TAB
3759 out_shift_with_cnt ((AS1 (asr
,%D0
) CR_TAB
3760 AS1 (ror
,%C0
) CR_TAB
3761 AS1 (ror
,%B0
) CR_TAB
3763 insn
, operands
, len
, 4);
3767 /* 8bit logic shift right ((unsigned char)x >> i) */
3770 lshrqi3_out (rtx insn
, rtx operands
[], int *len
)
3772 if (GET_CODE (operands
[2]) == CONST_INT
)
3779 switch (INTVAL (operands
[2]))
3782 if (INTVAL (operands
[2]) < 8)
3786 return AS1 (clr
,%0);
3790 return AS1 (lsr
,%0);
3794 return (AS1 (lsr
,%0) CR_TAB
3798 return (AS1 (lsr
,%0) CR_TAB
3803 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3806 return (AS1 (swap
,%0) CR_TAB
3807 AS2 (andi
,%0,0x0f));
3810 return (AS1 (lsr
,%0) CR_TAB
3816 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3819 return (AS1 (swap
,%0) CR_TAB
3824 return (AS1 (lsr
,%0) CR_TAB
3831 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3834 return (AS1 (swap
,%0) CR_TAB
3840 return (AS1 (lsr
,%0) CR_TAB
3849 return (AS1 (rol
,%0) CR_TAB
3854 else if (CONSTANT_P (operands
[2]))
3855 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
3857 out_shift_with_cnt (AS1 (lsr
,%0),
3858 insn
, operands
, len
, 1);
3862 /* 16bit logic shift right ((unsigned short)x >> i) */
3865 lshrhi3_out (rtx insn
, rtx operands
[], int *len
)
3867 if (GET_CODE (operands
[2]) == CONST_INT
)
3869 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
3870 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
3877 switch (INTVAL (operands
[2]))
3880 if (INTVAL (operands
[2]) < 16)
3884 return (AS1 (clr
,%B0
) CR_TAB
3888 if (optimize_size
&& scratch
)
3893 return (AS1 (swap
,%B0
) CR_TAB
3894 AS1 (swap
,%A0
) CR_TAB
3895 AS2 (andi
,%A0
,0x0f) CR_TAB
3896 AS2 (eor
,%A0
,%B0
) CR_TAB
3897 AS2 (andi
,%B0
,0x0f) CR_TAB
3903 return (AS1 (swap
,%B0
) CR_TAB
3904 AS1 (swap
,%A0
) CR_TAB
3905 AS2 (ldi
,%3,0x0f) CR_TAB
3907 AS2 (eor
,%A0
,%B0
) CR_TAB
3911 break; /* optimize_size ? 6 : 8 */
3915 break; /* scratch ? 5 : 6 */
3919 return (AS1 (lsr
,%B0
) CR_TAB
3920 AS1 (ror
,%A0
) CR_TAB
3921 AS1 (swap
,%B0
) CR_TAB
3922 AS1 (swap
,%A0
) CR_TAB
3923 AS2 (andi
,%A0
,0x0f) CR_TAB
3924 AS2 (eor
,%A0
,%B0
) CR_TAB
3925 AS2 (andi
,%B0
,0x0f) CR_TAB
3931 return (AS1 (lsr
,%B0
) CR_TAB
3932 AS1 (ror
,%A0
) CR_TAB
3933 AS1 (swap
,%B0
) CR_TAB
3934 AS1 (swap
,%A0
) CR_TAB
3935 AS2 (ldi
,%3,0x0f) CR_TAB
3937 AS2 (eor
,%A0
,%B0
) CR_TAB
3945 break; /* scratch ? 5 : 6 */
3947 return (AS1 (clr
,__tmp_reg__
) CR_TAB
3948 AS1 (lsl
,%A0
) CR_TAB
3949 AS1 (rol
,%B0
) CR_TAB
3950 AS1 (rol
,__tmp_reg__
) CR_TAB
3951 AS1 (lsl
,%A0
) CR_TAB
3952 AS1 (rol
,%B0
) CR_TAB
3953 AS1 (rol
,__tmp_reg__
) CR_TAB
3954 AS2 (mov
,%A0
,%B0
) CR_TAB
3955 AS2 (mov
,%B0
,__tmp_reg__
));
3959 return (AS1 (lsl
,%A0
) CR_TAB
3960 AS2 (mov
,%A0
,%B0
) CR_TAB
3961 AS1 (rol
,%A0
) CR_TAB
3962 AS2 (sbc
,%B0
,%B0
) CR_TAB
3966 return *len
= 2, (AS2 (mov
,%A0
,%B1
) CR_TAB
3971 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3972 AS1 (clr
,%B0
) CR_TAB
3977 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3978 AS1 (clr
,%B0
) CR_TAB
3979 AS1 (lsr
,%A0
) CR_TAB
3984 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3985 AS1 (clr
,%B0
) CR_TAB
3986 AS1 (lsr
,%A0
) CR_TAB
3987 AS1 (lsr
,%A0
) CR_TAB
3994 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3995 AS1 (clr
,%B0
) CR_TAB
3996 AS1 (swap
,%A0
) CR_TAB
3997 AS2 (andi
,%A0
,0x0f));
4002 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4003 AS1 (clr
,%B0
) CR_TAB
4004 AS1 (swap
,%A0
) CR_TAB
4005 AS2 (ldi
,%3,0x0f) CR_TAB
4009 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4010 AS1 (clr
,%B0
) CR_TAB
4011 AS1 (lsr
,%A0
) CR_TAB
4012 AS1 (lsr
,%A0
) CR_TAB
4013 AS1 (lsr
,%A0
) CR_TAB
4020 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4021 AS1 (clr
,%B0
) CR_TAB
4022 AS1 (swap
,%A0
) CR_TAB
4023 AS1 (lsr
,%A0
) CR_TAB
4024 AS2 (andi
,%A0
,0x07));
4026 if (AVR_HAVE_MUL
&& scratch
)
4029 return (AS2 (ldi
,%3,0x08) CR_TAB
4030 AS2 (mul
,%B0
,%3) CR_TAB
4031 AS2 (mov
,%A0
,r1
) CR_TAB
4032 AS1 (clr
,%B0
) CR_TAB
4033 AS1 (clr
,__zero_reg__
));
4035 if (optimize_size
&& scratch
)
4040 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4041 AS1 (clr
,%B0
) CR_TAB
4042 AS1 (swap
,%A0
) CR_TAB
4043 AS1 (lsr
,%A0
) CR_TAB
4044 AS2 (ldi
,%3,0x07) CR_TAB
4050 return ("set" CR_TAB
4051 AS2 (bld
,r1
,3) CR_TAB
4052 AS2 (mul
,%B0
,r1
) CR_TAB
4053 AS2 (mov
,%A0
,r1
) CR_TAB
4054 AS1 (clr
,%B0
) CR_TAB
4055 AS1 (clr
,__zero_reg__
));
4058 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4059 AS1 (clr
,%B0
) CR_TAB
4060 AS1 (lsr
,%A0
) CR_TAB
4061 AS1 (lsr
,%A0
) CR_TAB
4062 AS1 (lsr
,%A0
) CR_TAB
4063 AS1 (lsr
,%A0
) CR_TAB
4067 if (AVR_HAVE_MUL
&& ldi_ok
)
4070 return (AS2 (ldi
,%A0
,0x04) CR_TAB
4071 AS2 (mul
,%B0
,%A0
) CR_TAB
4072 AS2 (mov
,%A0
,r1
) CR_TAB
4073 AS1 (clr
,%B0
) CR_TAB
4074 AS1 (clr
,__zero_reg__
));
4076 if (AVR_HAVE_MUL
&& scratch
)
4079 return (AS2 (ldi
,%3,0x04) CR_TAB
4080 AS2 (mul
,%B0
,%3) CR_TAB
4081 AS2 (mov
,%A0
,r1
) CR_TAB
4082 AS1 (clr
,%B0
) CR_TAB
4083 AS1 (clr
,__zero_reg__
));
4085 if (optimize_size
&& ldi_ok
)
4088 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4089 AS2 (ldi
,%B0
,6) "\n1:\t"
4090 AS1 (lsr
,%A0
) CR_TAB
4091 AS1 (dec
,%B0
) CR_TAB
4094 if (optimize_size
&& scratch
)
4097 return (AS1 (clr
,%A0
) CR_TAB
4098 AS1 (lsl
,%B0
) CR_TAB
4099 AS1 (rol
,%A0
) CR_TAB
4100 AS1 (lsl
,%B0
) CR_TAB
4101 AS1 (rol
,%A0
) CR_TAB
4106 return (AS1 (clr
,%A0
) CR_TAB
4107 AS1 (lsl
,%B0
) CR_TAB
4108 AS1 (rol
,%A0
) CR_TAB
4113 out_shift_with_cnt ((AS1 (lsr
,%B0
) CR_TAB
4115 insn
, operands
, len
, 2);
4119 /* 32bit logic shift right ((unsigned int)x >> i) */
4122 lshrsi3_out (rtx insn
, rtx operands
[], int *len
)
4124 if (GET_CODE (operands
[2]) == CONST_INT
)
4132 switch (INTVAL (operands
[2]))
4135 if (INTVAL (operands
[2]) < 32)
4139 return *len
= 3, (AS1 (clr
,%D0
) CR_TAB
4140 AS1 (clr
,%C0
) CR_TAB
4141 AS2 (movw
,%A0
,%C0
));
4143 return (AS1 (clr
,%D0
) CR_TAB
4144 AS1 (clr
,%C0
) CR_TAB
4145 AS1 (clr
,%B0
) CR_TAB
4150 int reg0
= true_regnum (operands
[0]);
4151 int reg1
= true_regnum (operands
[1]);
4154 return (AS2 (mov
,%A0
,%B1
) CR_TAB
4155 AS2 (mov
,%B0
,%C1
) CR_TAB
4156 AS2 (mov
,%C0
,%D1
) CR_TAB
4159 return (AS1 (clr
,%D0
) CR_TAB
4160 AS2 (mov
,%C0
,%D1
) CR_TAB
4161 AS2 (mov
,%B0
,%C1
) CR_TAB
4167 int reg0
= true_regnum (operands
[0]);
4168 int reg1
= true_regnum (operands
[1]);
4170 if (reg0
== reg1
+ 2)
4171 return *len
= 2, (AS1 (clr
,%C0
) CR_TAB
4174 return *len
= 3, (AS2 (movw
,%A0
,%C1
) CR_TAB
4175 AS1 (clr
,%C0
) CR_TAB
4178 return *len
= 4, (AS2 (mov
,%B0
,%D1
) CR_TAB
4179 AS2 (mov
,%A0
,%C1
) CR_TAB
4180 AS1 (clr
,%C0
) CR_TAB
4185 return *len
= 4, (AS2 (mov
,%A0
,%D1
) CR_TAB
4186 AS1 (clr
,%B0
) CR_TAB
4187 AS1 (clr
,%C0
) CR_TAB
4192 return (AS1 (clr
,%A0
) CR_TAB
4193 AS2 (sbrc
,%D0
,7) CR_TAB
4194 AS1 (inc
,%A0
) CR_TAB
4195 AS1 (clr
,%B0
) CR_TAB
4196 AS1 (clr
,%C0
) CR_TAB
4201 out_shift_with_cnt ((AS1 (lsr
,%D0
) CR_TAB
4202 AS1 (ror
,%C0
) CR_TAB
4203 AS1 (ror
,%B0
) CR_TAB
4205 insn
, operands
, len
, 4);
4209 /* Modifies the length assigned to instruction INSN
4210 LEN is the initially computed length of the insn. */
4213 adjust_insn_length (rtx insn
, int len
)
4215 rtx patt
= PATTERN (insn
);
4218 if (GET_CODE (patt
) == SET
)
4221 op
[1] = SET_SRC (patt
);
4222 op
[0] = SET_DEST (patt
);
4223 if (general_operand (op
[1], VOIDmode
)
4224 && general_operand (op
[0], VOIDmode
))
4226 switch (GET_MODE (op
[0]))
4229 output_movqi (insn
, op
, &len
);
4232 output_movhi (insn
, op
, &len
);
4236 output_movsisf (insn
, op
, &len
);
4242 else if (op
[0] == cc0_rtx
&& REG_P (op
[1]))
4244 switch (GET_MODE (op
[1]))
4246 case HImode
: out_tsthi (insn
, op
[1], &len
); break;
4247 case SImode
: out_tstsi (insn
, op
[1], &len
); break;
4251 else if (GET_CODE (op
[1]) == AND
)
4253 if (GET_CODE (XEXP (op
[1],1)) == CONST_INT
)
4255 HOST_WIDE_INT mask
= INTVAL (XEXP (op
[1],1));
4256 if (GET_MODE (op
[1]) == SImode
)
4257 len
= (((mask
& 0xff) != 0xff)
4258 + ((mask
& 0xff00) != 0xff00)
4259 + ((mask
& 0xff0000L
) != 0xff0000L
)
4260 + ((mask
& 0xff000000L
) != 0xff000000L
));
4261 else if (GET_MODE (op
[1]) == HImode
)
4262 len
= (((mask
& 0xff) != 0xff)
4263 + ((mask
& 0xff00) != 0xff00));
4266 else if (GET_CODE (op
[1]) == IOR
)
4268 if (GET_CODE (XEXP (op
[1],1)) == CONST_INT
)
4270 HOST_WIDE_INT mask
= INTVAL (XEXP (op
[1],1));
4271 if (GET_MODE (op
[1]) == SImode
)
4272 len
= (((mask
& 0xff) != 0)
4273 + ((mask
& 0xff00) != 0)
4274 + ((mask
& 0xff0000L
) != 0)
4275 + ((mask
& 0xff000000L
) != 0));
4276 else if (GET_MODE (op
[1]) == HImode
)
4277 len
= (((mask
& 0xff) != 0)
4278 + ((mask
& 0xff00) != 0));
4282 set
= single_set (insn
);
4287 op
[1] = SET_SRC (set
);
4288 op
[0] = SET_DEST (set
);
4290 if (GET_CODE (patt
) == PARALLEL
4291 && general_operand (op
[1], VOIDmode
)
4292 && general_operand (op
[0], VOIDmode
))
4294 if (XVECLEN (patt
, 0) == 2)
4295 op
[2] = XVECEXP (patt
, 0, 1);
4297 switch (GET_MODE (op
[0]))
4303 output_reload_inhi (insn
, op
, &len
);
4307 output_reload_insisf (insn
, op
, &len
);
4313 else if (GET_CODE (op
[1]) == ASHIFT
4314 || GET_CODE (op
[1]) == ASHIFTRT
4315 || GET_CODE (op
[1]) == LSHIFTRT
)
4319 ops
[1] = XEXP (op
[1],0);
4320 ops
[2] = XEXP (op
[1],1);
4321 switch (GET_CODE (op
[1]))
4324 switch (GET_MODE (op
[0]))
4326 case QImode
: ashlqi3_out (insn
,ops
,&len
); break;
4327 case HImode
: ashlhi3_out (insn
,ops
,&len
); break;
4328 case SImode
: ashlsi3_out (insn
,ops
,&len
); break;
4333 switch (GET_MODE (op
[0]))
4335 case QImode
: ashrqi3_out (insn
,ops
,&len
); break;
4336 case HImode
: ashrhi3_out (insn
,ops
,&len
); break;
4337 case SImode
: ashrsi3_out (insn
,ops
,&len
); break;
4342 switch (GET_MODE (op
[0]))
4344 case QImode
: lshrqi3_out (insn
,ops
,&len
); break;
4345 case HImode
: lshrhi3_out (insn
,ops
,&len
); break;
4346 case SImode
: lshrsi3_out (insn
,ops
,&len
); break;
4358 /* Return nonzero if register REG dead after INSN. */
4361 reg_unused_after (rtx insn
, rtx reg
)
4363 return (dead_or_set_p (insn
, reg
)
4364 || (REG_P(reg
) && _reg_unused_after (insn
, reg
)));
4367 /* Return nonzero if REG is not used after INSN.
4368 We assume REG is a reload reg, and therefore does
4369 not live past labels. It may live past calls or jumps though. */
4372 _reg_unused_after (rtx insn
, rtx reg
)
4377 /* If the reg is set by this instruction, then it is safe for our
4378 case. Disregard the case where this is a store to memory, since
4379 we are checking a register used in the store address. */
4380 set
= single_set (insn
);
4381 if (set
&& GET_CODE (SET_DEST (set
)) != MEM
4382 && reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
4385 while ((insn
= NEXT_INSN (insn
)))
4388 code
= GET_CODE (insn
);
4391 /* If this is a label that existed before reload, then the register
4392 if dead here. However, if this is a label added by reorg, then
4393 the register may still be live here. We can't tell the difference,
4394 so we just ignore labels completely. */
4395 if (code
== CODE_LABEL
)
4403 if (code
== JUMP_INSN
)
4406 /* If this is a sequence, we must handle them all at once.
4407 We could have for instance a call that sets the target register,
4408 and an insn in a delay slot that uses the register. In this case,
4409 we must return 0. */
4410 else if (code
== INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
4415 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
4417 rtx this_insn
= XVECEXP (PATTERN (insn
), 0, i
);
4418 rtx set
= single_set (this_insn
);
4420 if (GET_CODE (this_insn
) == CALL_INSN
)
4422 else if (GET_CODE (this_insn
) == JUMP_INSN
)
4424 if (INSN_ANNULLED_BRANCH_P (this_insn
))
4429 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
4431 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
4433 if (GET_CODE (SET_DEST (set
)) != MEM
)
4439 && reg_overlap_mentioned_p (reg
, PATTERN (this_insn
)))
4444 else if (code
== JUMP_INSN
)
4448 if (code
== CALL_INSN
)
4451 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
4452 if (GET_CODE (XEXP (tem
, 0)) == USE
4453 && REG_P (XEXP (XEXP (tem
, 0), 0))
4454 && reg_overlap_mentioned_p (reg
, XEXP (XEXP (tem
, 0), 0)))
4456 if (call_used_regs
[REGNO (reg
)])
4460 set
= single_set (insn
);
4462 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
4464 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
4465 return GET_CODE (SET_DEST (set
)) != MEM
;
4466 if (set
== 0 && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
4472 /* Target hook for assembling integer objects. The AVR version needs
4473 special handling for references to certain labels. */
4476 avr_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
4478 if (size
== POINTER_SIZE
/ BITS_PER_UNIT
&& aligned_p
4479 && ((GET_CODE (x
) == SYMBOL_REF
&& SYMBOL_REF_FUNCTION_P (x
))
4480 || GET_CODE (x
) == LABEL_REF
))
4482 fputs ("\t.word\tgs(", asm_out_file
);
4483 output_addr_const (asm_out_file
, x
);
4484 fputs (")\n", asm_out_file
);
4487 return default_assemble_integer (x
, size
, aligned_p
);
4490 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4493 avr_asm_declare_function_name (FILE *file
, const char *name
, tree decl
)
4496 /* If the function has the 'signal' or 'interrupt' attribute, test to
4497 make sure that the name of the function is "__vector_NN" so as to
4498 catch when the user misspells the interrupt vector name. */
4500 if (cfun
->machine
->is_interrupt
)
4502 if (strncmp (name
, "__vector", strlen ("__vector")) != 0)
4504 warning_at (DECL_SOURCE_LOCATION (decl
), 0,
4505 "%qs appears to be a misspelled interrupt handler",
4509 else if (cfun
->machine
->is_signal
)
4511 if (strncmp (name
, "__vector", strlen ("__vector")) != 0)
4513 warning_at (DECL_SOURCE_LOCATION (decl
), 0,
4514 "%qs appears to be a misspelled signal handler",
4519 ASM_OUTPUT_TYPE_DIRECTIVE (file
, name
, "function");
4520 ASM_OUTPUT_LABEL (file
, name
);
4523 /* The routine used to output NUL terminated strings. We use a special
4524 version of this for most svr4 targets because doing so makes the
4525 generated assembly code more compact (and thus faster to assemble)
4526 as well as more readable, especially for targets like the i386
4527 (where the only alternative is to output character sequences as
4528 comma separated lists of numbers). */
4531 gas_output_limited_string(FILE *file
, const char *str
)
4533 const unsigned char *_limited_str
= (const unsigned char *) str
;
4535 fprintf (file
, "%s\"", STRING_ASM_OP
);
4536 for (; (ch
= *_limited_str
); _limited_str
++)
4539 switch (escape
= ESCAPES
[ch
])
4545 fprintf (file
, "\\%03o", ch
);
4549 putc (escape
, file
);
4553 fprintf (file
, "\"\n");
4556 /* The routine used to output sequences of byte values. We use a special
4557 version of this for most svr4 targets because doing so makes the
4558 generated assembly code more compact (and thus faster to assemble)
4559 as well as more readable. Note that if we find subparts of the
4560 character sequence which end with NUL (and which are shorter than
4561 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4564 gas_output_ascii(FILE *file
, const char *str
, size_t length
)
4566 const unsigned char *_ascii_bytes
= (const unsigned char *) str
;
4567 const unsigned char *limit
= _ascii_bytes
+ length
;
4568 unsigned bytes_in_chunk
= 0;
4569 for (; _ascii_bytes
< limit
; _ascii_bytes
++)
4571 const unsigned char *p
;
4572 if (bytes_in_chunk
>= 60)
4574 fprintf (file
, "\"\n");
4577 for (p
= _ascii_bytes
; p
< limit
&& *p
!= '\0'; p
++)
4579 if (p
< limit
&& (p
- _ascii_bytes
) <= (signed)STRING_LIMIT
)
4581 if (bytes_in_chunk
> 0)
4583 fprintf (file
, "\"\n");
4586 gas_output_limited_string (file
, (const char*)_ascii_bytes
);
4593 if (bytes_in_chunk
== 0)
4594 fprintf (file
, "\t.ascii\t\"");
4595 switch (escape
= ESCAPES
[ch
= *_ascii_bytes
])
4602 fprintf (file
, "\\%03o", ch
);
4603 bytes_in_chunk
+= 4;
4607 putc (escape
, file
);
4608 bytes_in_chunk
+= 2;
4613 if (bytes_in_chunk
> 0)
4614 fprintf (file
, "\"\n");
4617 /* Return value is nonzero if pseudos that have been
4618 assigned to registers of class CLASS would likely be spilled
4619 because registers of CLASS are needed for spill registers. */
4622 class_likely_spilled_p (int c
)
4624 return (c
!= ALL_REGS
&& c
!= ADDW_REGS
);
4627 /* Valid attributes:
4628 progmem - put data to program memory;
4629 signal - make a function to be hardware interrupt. After function
4630 prologue interrupts are disabled;
4631 interrupt - make a function to be hardware interrupt. After function
4632 prologue interrupts are enabled;
4633 naked - don't generate function prologue/epilogue and `ret' command.
4635 Only `progmem' attribute valid for type. */
4637 /* Handle a "progmem" attribute; arguments as in
4638 struct attribute_spec.handler. */
4640 avr_handle_progmem_attribute (tree
*node
, tree name
,
4641 tree args ATTRIBUTE_UNUSED
,
4642 int flags ATTRIBUTE_UNUSED
,
4647 if (TREE_CODE (*node
) == TYPE_DECL
)
4649 /* This is really a decl attribute, not a type attribute,
4650 but try to handle it for GCC 3.0 backwards compatibility. */
4652 tree type
= TREE_TYPE (*node
);
4653 tree attr
= tree_cons (name
, args
, TYPE_ATTRIBUTES (type
));
4654 tree newtype
= build_type_attribute_variant (type
, attr
);
4656 TYPE_MAIN_VARIANT (newtype
) = TYPE_MAIN_VARIANT (type
);
4657 TREE_TYPE (*node
) = newtype
;
4658 *no_add_attrs
= true;
4660 else if (TREE_STATIC (*node
) || DECL_EXTERNAL (*node
))
4662 if (DECL_INITIAL (*node
) == NULL_TREE
&& !DECL_EXTERNAL (*node
))
4664 warning (0, "only initialized variables can be placed into "
4665 "program memory area");
4666 *no_add_attrs
= true;
4671 warning (OPT_Wattributes
, "%qE attribute ignored",
4673 *no_add_attrs
= true;
4680 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4681 struct attribute_spec.handler. */
4684 avr_handle_fndecl_attribute (tree
*node
, tree name
,
4685 tree args ATTRIBUTE_UNUSED
,
4686 int flags ATTRIBUTE_UNUSED
,
4689 if (TREE_CODE (*node
) != FUNCTION_DECL
)
4691 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
4693 *no_add_attrs
= true;
4700 avr_handle_fntype_attribute (tree
*node
, tree name
,
4701 tree args ATTRIBUTE_UNUSED
,
4702 int flags ATTRIBUTE_UNUSED
,
4705 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
4707 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
4709 *no_add_attrs
= true;
4715 /* Look for attribute `progmem' in DECL
4716 if found return 1, otherwise 0. */
4719 avr_progmem_p (tree decl
, tree attributes
)
4723 if (TREE_CODE (decl
) != VAR_DECL
)
4727 != lookup_attribute ("progmem", attributes
))
4733 while (TREE_CODE (a
) == ARRAY_TYPE
);
4735 if (a
== error_mark_node
)
4738 if (NULL_TREE
!= lookup_attribute ("progmem", TYPE_ATTRIBUTES (a
)))
4744 /* Add the section attribute if the variable is in progmem. */
4747 avr_insert_attributes (tree node
, tree
*attributes
)
4749 if (TREE_CODE (node
) == VAR_DECL
4750 && (TREE_STATIC (node
) || DECL_EXTERNAL (node
))
4751 && avr_progmem_p (node
, *attributes
))
4753 static const char dsec
[] = ".progmem.data";
4754 *attributes
= tree_cons (get_identifier ("section"),
4755 build_tree_list (NULL
, build_string (strlen (dsec
), dsec
)),
4758 /* ??? This seems sketchy. Why can't the user declare the
4759 thing const in the first place? */
4760 TREE_READONLY (node
) = 1;
4764 /* A get_unnamed_section callback for switching to progmem_section. */
4767 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED
)
4769 fprintf (asm_out_file
,
4770 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4771 AVR_HAVE_JMP_CALL
? "a" : "ax");
4772 /* Should already be aligned, this is just to be safe if it isn't. */
4773 fprintf (asm_out_file
, "\t.p2align 1\n");
4776 /* Implement TARGET_ASM_INIT_SECTIONS. */
4779 avr_asm_init_sections (void)
4781 progmem_section
= get_unnamed_section (AVR_HAVE_JMP_CALL
? 0 : SECTION_CODE
,
4782 avr_output_progmem_section_asm_op
,
4784 readonly_data_section
= data_section
;
4788 avr_section_type_flags (tree decl
, const char *name
, int reloc
)
4790 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
4792 if (strncmp (name
, ".noinit", 7) == 0)
4794 if (decl
&& TREE_CODE (decl
) == VAR_DECL
4795 && DECL_INITIAL (decl
) == NULL_TREE
)
4796 flags
|= SECTION_BSS
; /* @nobits */
4798 warning (0, "only uninitialized variables can be placed in the "
4805 /* Outputs some appropriate text to go at the start of an assembler
4809 avr_file_start (void)
4811 if (avr_current_arch
->asm_only
)
4812 error ("MCU %qs supported for assembler only", avr_mcu_name
);
4814 default_file_start ();
4816 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4817 fputs ("__SREG__ = 0x3f\n"
4819 "__SP_L__ = 0x3d\n", asm_out_file
);
4821 fputs ("__tmp_reg__ = 0\n"
4822 "__zero_reg__ = 1\n", asm_out_file
);
4824 /* FIXME: output these only if there is anything in the .data / .bss
4825 sections - some code size could be saved by not linking in the
4826 initialization code from libgcc if one or both sections are empty. */
4827 fputs ("\t.global __do_copy_data\n", asm_out_file
);
4828 fputs ("\t.global __do_clear_bss\n", asm_out_file
);
4831 /* Outputs to the stdio stream FILE some
4832 appropriate text to go at the end of an assembler file. */
4839 /* Choose the order in which to allocate hard registers for
4840 pseudo-registers local to a basic block.
4842 Store the desired register order in the array `reg_alloc_order'.
4843 Element 0 should be the register to allocate first; element 1, the
4844 next register; and so on. */
4847 order_regs_for_local_alloc (void)
4850 static const int order_0
[] = {
4858 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4862 static const int order_1
[] = {
4870 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4874 static const int order_2
[] = {
4883 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4888 const int *order
= (TARGET_ORDER_1
? order_1
:
4889 TARGET_ORDER_2
? order_2
:
4891 for (i
=0; i
< ARRAY_SIZE (order_0
); ++i
)
4892 reg_alloc_order
[i
] = order
[i
];
4896 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
4897 cost of an RTX operand given its context. X is the rtx of the
4898 operand, MODE is its mode, and OUTER is the rtx_code of this
4899 operand's parent operator. */
4902 avr_operand_rtx_cost (rtx x
, enum machine_mode mode
, enum rtx_code outer
,
4905 enum rtx_code code
= GET_CODE (x
);
4916 return COSTS_N_INSNS (GET_MODE_SIZE (mode
));
4923 avr_rtx_costs (x
, code
, outer
, &total
, speed
);
4927 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
4928 is to be calculated. Return true if the complete cost has been
4929 computed, and false if subexpressions should be scanned. In either
4930 case, *TOTAL contains the cost result. */
4933 avr_rtx_costs (rtx x
, int codearg
, int outer_code ATTRIBUTE_UNUSED
, int *total
,
4936 enum rtx_code code
= (enum rtx_code
) codearg
;
4937 enum machine_mode mode
= GET_MODE (x
);
4944 /* Immediate constants are as cheap as registers. */
4952 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
4960 *total
= COSTS_N_INSNS (1);
4964 *total
= COSTS_N_INSNS (3);
4968 *total
= COSTS_N_INSNS (7);
4974 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
4982 *total
= COSTS_N_INSNS (1);
4988 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
4992 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
4993 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
4997 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
)
4998 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
4999 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5003 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
) + 2
5004 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
5005 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5012 *total
= COSTS_N_INSNS (1);
5013 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5014 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5018 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5020 *total
= COSTS_N_INSNS (2);
5021 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5023 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
5024 *total
= COSTS_N_INSNS (1);
5026 *total
= COSTS_N_INSNS (2);
5030 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5032 *total
= COSTS_N_INSNS (4);
5033 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5035 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
5036 *total
= COSTS_N_INSNS (1);
5038 *total
= COSTS_N_INSNS (4);
5044 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5050 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5051 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5052 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5053 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5057 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5058 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5059 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5067 *total
= COSTS_N_INSNS (!speed
? 3 : 4);
5069 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
5076 *total
= COSTS_N_INSNS (!speed
? 7 : 10);
5078 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
5086 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5087 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5095 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
5098 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5099 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5106 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 4)
5107 *total
= COSTS_N_INSNS (1);
5112 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 8)
5113 *total
= COSTS_N_INSNS (3);
5118 if (CONST_INT_P (XEXP (x
, 1)))
5119 switch (INTVAL (XEXP (x
, 1)))
5123 *total
= COSTS_N_INSNS (5);
5126 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 6);
5134 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5141 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5143 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
5144 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5148 val
= INTVAL (XEXP (x
, 1));
5150 *total
= COSTS_N_INSNS (3);
5151 else if (val
>= 0 && val
<= 7)
5152 *total
= COSTS_N_INSNS (val
);
5154 *total
= COSTS_N_INSNS (1);
5159 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5161 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5162 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5165 switch (INTVAL (XEXP (x
, 1)))
5172 *total
= COSTS_N_INSNS (2);
5175 *total
= COSTS_N_INSNS (3);
5181 *total
= COSTS_N_INSNS (4);
5186 *total
= COSTS_N_INSNS (5);
5189 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
5192 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
5195 *total
= COSTS_N_INSNS (!speed
? 5 : 10);
5198 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5199 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5204 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5206 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5207 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5210 switch (INTVAL (XEXP (x
, 1)))
5216 *total
= COSTS_N_INSNS (3);
5221 *total
= COSTS_N_INSNS (4);
5224 *total
= COSTS_N_INSNS (6);
5227 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
5230 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5231 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5238 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5245 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5247 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
5248 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5252 val
= INTVAL (XEXP (x
, 1));
5254 *total
= COSTS_N_INSNS (4);
5256 *total
= COSTS_N_INSNS (2);
5257 else if (val
>= 0 && val
<= 7)
5258 *total
= COSTS_N_INSNS (val
);
5260 *total
= COSTS_N_INSNS (1);
5265 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5267 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5268 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5271 switch (INTVAL (XEXP (x
, 1)))
5277 *total
= COSTS_N_INSNS (2);
5280 *total
= COSTS_N_INSNS (3);
5286 *total
= COSTS_N_INSNS (4);
5290 *total
= COSTS_N_INSNS (5);
5293 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
5296 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
5300 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
5303 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5304 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5309 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5311 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5312 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5315 switch (INTVAL (XEXP (x
, 1)))
5321 *total
= COSTS_N_INSNS (4);
5326 *total
= COSTS_N_INSNS (6);
5329 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
5332 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 5);
5335 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5336 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5343 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5350 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5352 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
5353 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5357 val
= INTVAL (XEXP (x
, 1));
5359 *total
= COSTS_N_INSNS (3);
5360 else if (val
>= 0 && val
<= 7)
5361 *total
= COSTS_N_INSNS (val
);
5363 *total
= COSTS_N_INSNS (1);
5368 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5370 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5371 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5374 switch (INTVAL (XEXP (x
, 1)))
5381 *total
= COSTS_N_INSNS (2);
5384 *total
= COSTS_N_INSNS (3);
5389 *total
= COSTS_N_INSNS (4);
5393 *total
= COSTS_N_INSNS (5);
5399 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
5402 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
5406 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
5409 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5410 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5415 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5417 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5418 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5421 switch (INTVAL (XEXP (x
, 1)))
5427 *total
= COSTS_N_INSNS (4);
5430 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
5435 *total
= COSTS_N_INSNS (4);
5438 *total
= COSTS_N_INSNS (6);
5441 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5442 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5449 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5453 switch (GET_MODE (XEXP (x
, 0)))
5456 *total
= COSTS_N_INSNS (1);
5457 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5458 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5462 *total
= COSTS_N_INSNS (2);
5463 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5464 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5465 else if (INTVAL (XEXP (x
, 1)) != 0)
5466 *total
+= COSTS_N_INSNS (1);
5470 *total
= COSTS_N_INSNS (4);
5471 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5472 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5473 else if (INTVAL (XEXP (x
, 1)) != 0)
5474 *total
+= COSTS_N_INSNS (3);
5480 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5489 /* Calculate the cost of a memory address. */
5492 avr_address_cost (rtx x
, bool speed ATTRIBUTE_UNUSED
)
5494 if (GET_CODE (x
) == PLUS
5495 && GET_CODE (XEXP (x
,1)) == CONST_INT
5496 && (REG_P (XEXP (x
,0)) || GET_CODE (XEXP (x
,0)) == SUBREG
)
5497 && INTVAL (XEXP (x
,1)) >= 61)
5499 if (CONSTANT_ADDRESS_P (x
))
5501 if (optimize
> 0 && io_address_operand (x
, QImode
))
5508 /* Test for extra memory constraint 'Q'.
5509 It's a memory address based on Y or Z pointer with valid displacement. */
5512 extra_constraint_Q (rtx x
)
5514 if (GET_CODE (XEXP (x
,0)) == PLUS
5515 && REG_P (XEXP (XEXP (x
,0), 0))
5516 && GET_CODE (XEXP (XEXP (x
,0), 1)) == CONST_INT
5517 && (INTVAL (XEXP (XEXP (x
,0), 1))
5518 <= MAX_LD_OFFSET (GET_MODE (x
))))
5520 rtx xx
= XEXP (XEXP (x
,0), 0);
5521 int regno
= REGNO (xx
);
5522 if (TARGET_ALL_DEBUG
)
5524 fprintf (stderr
, ("extra_constraint:\n"
5525 "reload_completed: %d\n"
5526 "reload_in_progress: %d\n"),
5527 reload_completed
, reload_in_progress
);
5530 if (regno
>= FIRST_PSEUDO_REGISTER
)
5531 return 1; /* allocate pseudos */
5532 else if (regno
== REG_Z
|| regno
== REG_Y
)
5533 return 1; /* strictly check */
5534 else if (xx
== frame_pointer_rtx
5535 || xx
== arg_pointer_rtx
)
5536 return 1; /* XXX frame & arg pointer checks */
5541 /* Convert condition code CONDITION to the valid AVR condition code. */
5544 avr_normalize_condition (RTX_CODE condition
)
5561 /* This function optimizes conditional jumps. */
5568 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5570 if (! (GET_CODE (insn
) == INSN
5571 || GET_CODE (insn
) == CALL_INSN
5572 || GET_CODE (insn
) == JUMP_INSN
)
5573 || !single_set (insn
))
5576 pattern
= PATTERN (insn
);
5578 if (GET_CODE (pattern
) == PARALLEL
)
5579 pattern
= XVECEXP (pattern
, 0, 0);
5580 if (GET_CODE (pattern
) == SET
5581 && SET_DEST (pattern
) == cc0_rtx
5582 && compare_diff_p (insn
))
5584 if (GET_CODE (SET_SRC (pattern
)) == COMPARE
)
5586 /* Now we work under compare insn. */
5588 pattern
= SET_SRC (pattern
);
5589 if (true_regnum (XEXP (pattern
,0)) >= 0
5590 && true_regnum (XEXP (pattern
,1)) >= 0 )
5592 rtx x
= XEXP (pattern
,0);
5593 rtx next
= next_real_insn (insn
);
5594 rtx pat
= PATTERN (next
);
5595 rtx src
= SET_SRC (pat
);
5596 rtx t
= XEXP (src
,0);
5597 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
5598 XEXP (pattern
,0) = XEXP (pattern
,1);
5599 XEXP (pattern
,1) = x
;
5600 INSN_CODE (next
) = -1;
5602 else if (true_regnum (XEXP (pattern
, 0)) >= 0
5603 && XEXP (pattern
, 1) == const0_rtx
)
5605 /* This is a tst insn, we can reverse it. */
5606 rtx next
= next_real_insn (insn
);
5607 rtx pat
= PATTERN (next
);
5608 rtx src
= SET_SRC (pat
);
5609 rtx t
= XEXP (src
,0);
5611 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
5612 XEXP (pattern
, 1) = XEXP (pattern
, 0);
5613 XEXP (pattern
, 0) = const0_rtx
;
5614 INSN_CODE (next
) = -1;
5615 INSN_CODE (insn
) = -1;
5617 else if (true_regnum (XEXP (pattern
,0)) >= 0
5618 && GET_CODE (XEXP (pattern
,1)) == CONST_INT
)
5620 rtx x
= XEXP (pattern
,1);
5621 rtx next
= next_real_insn (insn
);
5622 rtx pat
= PATTERN (next
);
5623 rtx src
= SET_SRC (pat
);
5624 rtx t
= XEXP (src
,0);
5625 enum machine_mode mode
= GET_MODE (XEXP (pattern
, 0));
5627 if (avr_simplify_comparison_p (mode
, GET_CODE (t
), x
))
5629 XEXP (pattern
, 1) = gen_int_mode (INTVAL (x
) + 1, mode
);
5630 PUT_CODE (t
, avr_normalize_condition (GET_CODE (t
)));
5631 INSN_CODE (next
) = -1;
5632 INSN_CODE (insn
) = -1;
5640 /* Returns register number for function return value.*/
5643 avr_ret_register (void)
5648 /* Create an RTX representing the place where a
5649 library function returns a value of mode MODE. */
5652 avr_libcall_value (enum machine_mode mode
)
5654 int offs
= GET_MODE_SIZE (mode
);
5657 return gen_rtx_REG (mode
, RET_REGISTER
+ 2 - offs
);
5660 /* Create an RTX representing the place where a
5661 function returns a value of data type VALTYPE. */
5664 avr_function_value (const_tree type
,
5665 const_tree func ATTRIBUTE_UNUSED
,
5666 bool outgoing ATTRIBUTE_UNUSED
)
5670 if (TYPE_MODE (type
) != BLKmode
)
5671 return avr_libcall_value (TYPE_MODE (type
));
5673 offs
= int_size_in_bytes (type
);
5676 if (offs
> 2 && offs
< GET_MODE_SIZE (SImode
))
5677 offs
= GET_MODE_SIZE (SImode
);
5678 else if (offs
> GET_MODE_SIZE (SImode
) && offs
< GET_MODE_SIZE (DImode
))
5679 offs
= GET_MODE_SIZE (DImode
);
5681 return gen_rtx_REG (BLKmode
, RET_REGISTER
+ 2 - offs
);
5684 /* Places additional restrictions on the register class to
5685 use when it is necessary to copy value X into a register
5689 preferred_reload_class (rtx x ATTRIBUTE_UNUSED
, enum reg_class rclass
)
5695 test_hard_reg_class (enum reg_class rclass
, rtx x
)
5697 int regno
= true_regnum (x
);
5701 if (TEST_HARD_REG_CLASS (rclass
, regno
))
5709 jump_over_one_insn_p (rtx insn
, rtx dest
)
5711 int uid
= INSN_UID (GET_CODE (dest
) == LABEL_REF
5714 int jump_addr
= INSN_ADDRESSES (INSN_UID (insn
));
5715 int dest_addr
= INSN_ADDRESSES (uid
);
5716 return dest_addr
- jump_addr
== get_attr_length (insn
) + 1;
5719 /* Returns 1 if a value of mode MODE can be stored starting with hard
5720 register number REGNO. On the enhanced core, anything larger than
5721 1 byte must start in even numbered register for "movw" to work
5722 (this way we don't have to check for odd registers everywhere). */
5725 avr_hard_regno_mode_ok (int regno
, enum machine_mode mode
)
5727 /* Disallow QImode in stack pointer regs. */
5728 if ((regno
== REG_SP
|| regno
== (REG_SP
+ 1)) && mode
== QImode
)
5731 /* The only thing that can go into registers r28:r29 is a Pmode. */
5732 if (regno
== REG_Y
&& mode
== Pmode
)
5735 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5736 if (regno
<= (REG_Y
+ 1) && (regno
+ GET_MODE_SIZE (mode
)) >= (REG_Y
+ 1))
5742 /* Modes larger than QImode occupy consecutive registers. */
5743 if (regno
+ GET_MODE_SIZE (mode
) > FIRST_PSEUDO_REGISTER
)
5746 /* All modes larger than QImode should start in an even register. */
5747 return !(regno
& 1);
5751 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED
, rtx
*operands
, int *len
)
5757 if (GET_CODE (operands
[1]) == CONST_INT
)
5759 int val
= INTVAL (operands
[1]);
5760 if ((val
& 0xff) == 0)
5763 return (AS2 (mov
,%A0
,__zero_reg__
) CR_TAB
5764 AS2 (ldi
,%2,hi8(%1)) CR_TAB
5767 else if ((val
& 0xff00) == 0)
5770 return (AS2 (ldi
,%2,lo8(%1)) CR_TAB
5771 AS2 (mov
,%A0
,%2) CR_TAB
5772 AS2 (mov
,%B0
,__zero_reg__
));
5774 else if ((val
& 0xff) == ((val
& 0xff00) >> 8))
5777 return (AS2 (ldi
,%2,lo8(%1)) CR_TAB
5778 AS2 (mov
,%A0
,%2) CR_TAB
5783 return (AS2 (ldi
,%2,lo8(%1)) CR_TAB
5784 AS2 (mov
,%A0
,%2) CR_TAB
5785 AS2 (ldi
,%2,hi8(%1)) CR_TAB
5791 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED
, rtx
*operands
, int *len
)
5793 rtx src
= operands
[1];
5794 int cnst
= (GET_CODE (src
) == CONST_INT
);
5799 *len
= 4 + ((INTVAL (src
) & 0xff) != 0)
5800 + ((INTVAL (src
) & 0xff00) != 0)
5801 + ((INTVAL (src
) & 0xff0000) != 0)
5802 + ((INTVAL (src
) & 0xff000000) != 0);
5809 if (cnst
&& ((INTVAL (src
) & 0xff) == 0))
5810 output_asm_insn (AS2 (mov
, %A0
, __zero_reg__
), operands
);
5813 output_asm_insn (AS2 (ldi
, %2, lo8(%1)), operands
);
5814 output_asm_insn (AS2 (mov
, %A0
, %2), operands
);
5816 if (cnst
&& ((INTVAL (src
) & 0xff00) == 0))
5817 output_asm_insn (AS2 (mov
, %B0
, __zero_reg__
), operands
);
5820 output_asm_insn (AS2 (ldi
, %2, hi8(%1)), operands
);
5821 output_asm_insn (AS2 (mov
, %B0
, %2), operands
);
5823 if (cnst
&& ((INTVAL (src
) & 0xff0000) == 0))
5824 output_asm_insn (AS2 (mov
, %C0
, __zero_reg__
), operands
);
5827 output_asm_insn (AS2 (ldi
, %2, hlo8(%1)), operands
);
5828 output_asm_insn (AS2 (mov
, %C0
, %2), operands
);
5830 if (cnst
&& ((INTVAL (src
) & 0xff000000) == 0))
5831 output_asm_insn (AS2 (mov
, %D0
, __zero_reg__
), operands
);
5834 output_asm_insn (AS2 (ldi
, %2, hhi8(%1)), operands
);
5835 output_asm_insn (AS2 (mov
, %D0
, %2), operands
);
5841 avr_output_bld (rtx operands
[], int bit_nr
)
5843 static char s
[] = "bld %A0,0";
5845 s
[5] = 'A' + (bit_nr
>> 3);
5846 s
[8] = '0' + (bit_nr
& 7);
5847 output_asm_insn (s
, operands
);
5851 avr_output_addr_vec_elt (FILE *stream
, int value
)
5853 switch_to_section (progmem_section
);
5854 if (AVR_HAVE_JMP_CALL
)
5855 fprintf (stream
, "\t.word gs(.L%d)\n", value
);
5857 fprintf (stream
, "\trjmp .L%d\n", value
);
5860 /* Returns true if SCRATCH are safe to be allocated as a scratch
5861 registers (for a define_peephole2) in the current function. */
5864 avr_hard_regno_scratch_ok (unsigned int regno
)
5866 /* Interrupt functions can only use registers that have already been saved
5867 by the prologue, even if they would normally be call-clobbered. */
5869 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
5870 && !df_regs_ever_live_p (regno
))
5876 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
5879 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED
,
5880 unsigned int new_reg
)
5882 /* Interrupt functions can only use registers that have already been
5883 saved by the prologue, even if they would normally be
5886 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
5887 && !df_regs_ever_live_p (new_reg
))
5893 /* Output a branch that tests a single bit of a register (QI, HI or SImode)
5894 or memory location in the I/O space (QImode only).
5896 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
5897 Operand 1: register operand to test, or CONST_INT memory address.
5898 Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
5899 Operand 3: label to jump to if the test is true. */
5902 avr_out_sbxx_branch (rtx insn
, rtx operands
[])
5904 enum rtx_code comp
= GET_CODE (operands
[0]);
5905 int long_jump
= (get_attr_length (insn
) >= 4);
5906 int reverse
= long_jump
|| jump_over_one_insn_p (insn
, operands
[3]);
5910 else if (comp
== LT
)
5914 comp
= reverse_condition (comp
);
5916 if (GET_CODE (operands
[1]) == CONST_INT
)
5918 if (INTVAL (operands
[1]) < 0x40)
5921 output_asm_insn (AS2 (sbis
,%1-0x20,%2), operands
);
5923 output_asm_insn (AS2 (sbic
,%1-0x20,%2), operands
);
5927 output_asm_insn (AS2 (in
,__tmp_reg__
,%1-0x20), operands
);
5929 output_asm_insn (AS2 (sbrs
,__tmp_reg__
,%2), operands
);
5931 output_asm_insn (AS2 (sbrc
,__tmp_reg__
,%2), operands
);
5934 else /* GET_CODE (operands[1]) == REG */
5936 if (GET_MODE (operands
[1]) == QImode
)
5939 output_asm_insn (AS2 (sbrs
,%1,%2), operands
);
5941 output_asm_insn (AS2 (sbrc
,%1,%2), operands
);
5943 else /* HImode or SImode */
5945 static char buf
[] = "sbrc %A1,0";
5946 int bit_nr
= exact_log2 (INTVAL (operands
[2])
5947 & GET_MODE_MASK (GET_MODE (operands
[1])));
5949 buf
[3] = (comp
== EQ
) ? 's' : 'c';
5950 buf
[6] = 'A' + (bit_nr
>> 3);
5951 buf
[9] = '0' + (bit_nr
& 7);
5952 output_asm_insn (buf
, operands
);
5957 return (AS1 (rjmp
,.+4) CR_TAB
5960 return AS1 (rjmp
,%3);
5964 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
5967 avr_asm_out_ctor (rtx symbol
, int priority
)
5969 fputs ("\t.global __do_global_ctors\n", asm_out_file
);
5970 default_ctor_section_asm_out_constructor (symbol
, priority
);
5973 /* Worker function for TARGET_ASM_DESTRUCTOR. */
5976 avr_asm_out_dtor (rtx symbol
, int priority
)
5978 fputs ("\t.global __do_global_dtors\n", asm_out_file
);
5979 default_dtor_section_asm_out_destructor (symbol
, priority
);
5982 /* Worker function for TARGET_RETURN_IN_MEMORY. */
5985 avr_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
5987 if (TYPE_MODE (type
) == BLKmode
)
5989 HOST_WIDE_INT size
= int_size_in_bytes (type
);
5990 return (size
== -1 || size
> 8);
5996 /* Worker function for CASE_VALUES_THRESHOLD. */
5998 unsigned int avr_case_values_threshold (void)
6000 return (!AVR_HAVE_JMP_CALL
|| TARGET_CALL_PROLOGUES
) ? 8 : 17;