1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
44 #include "target-def.h"
48 /* Maximal allowed offset for an address in the LD command */
49 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
51 static int avr_naked_function_p (tree
);
52 static int interrupt_function_p (tree
);
53 static int signal_function_p (tree
);
54 static int avr_OS_task_function_p (tree
);
55 static int avr_OS_main_function_p (tree
);
56 static int avr_regs_to_save (HARD_REG_SET
*);
57 static int get_sequence_length (rtx insns
);
58 static int sequent_regs_live (void);
59 static const char *ptrreg_to_str (int);
60 static const char *cond_string (enum rtx_code
);
61 static int avr_num_arg_regs (enum machine_mode
, tree
);
63 static RTX_CODE
compare_condition (rtx insn
);
64 static rtx
avr_legitimize_address (rtx
, rtx
, enum machine_mode
);
65 static int compare_sign_p (rtx insn
);
66 static tree
avr_handle_progmem_attribute (tree
*, tree
, tree
, int, bool *);
67 static tree
avr_handle_fndecl_attribute (tree
*, tree
, tree
, int, bool *);
68 static tree
avr_handle_fntype_attribute (tree
*, tree
, tree
, int, bool *);
69 static bool avr_assemble_integer (rtx
, unsigned int, int);
70 static void avr_file_start (void);
71 static void avr_file_end (void);
72 static bool avr_legitimate_address_p (enum machine_mode
, rtx
, bool);
73 static void avr_asm_function_end_prologue (FILE *);
74 static void avr_asm_function_begin_epilogue (FILE *);
75 static rtx
avr_function_value (const_tree
, const_tree
, bool);
76 static void avr_insert_attributes (tree
, tree
*);
77 static void avr_asm_init_sections (void);
78 static unsigned int avr_section_type_flags (tree
, const char *, int);
80 static void avr_reorg (void);
81 static void avr_asm_out_ctor (rtx
, int);
82 static void avr_asm_out_dtor (rtx
, int);
83 static int avr_operand_rtx_cost (rtx
, enum machine_mode
, enum rtx_code
, bool);
84 static bool avr_rtx_costs (rtx
, int, int, int *, bool);
85 static int avr_address_cost (rtx
, bool);
86 static bool avr_return_in_memory (const_tree
, const_tree
);
87 static struct machine_function
* avr_init_machine_status (void);
88 static rtx
avr_builtin_setjmp_frame_value (void);
89 static bool avr_hard_regno_scratch_ok (unsigned int);
90 static unsigned int avr_case_values_threshold (void);
91 static bool avr_frame_pointer_required_p (void);
92 static bool avr_can_eliminate (const int, const int);
94 /* Allocate registers from r25 to r8 for parameters for function calls. */
95 #define FIRST_CUM_REG 26
97 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
98 static GTY(()) rtx tmp_reg_rtx
;
100 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
101 static GTY(()) rtx zero_reg_rtx
;
103 /* AVR register names {"r0", "r1", ..., "r31"} */
104 static const char *const avr_regnames
[] = REGISTER_NAMES
;
106 /* Preprocessor macros to define depending on MCU type. */
107 const char *avr_extra_arch_macro
;
109 /* Current architecture. */
110 const struct base_arch_s
*avr_current_arch
;
112 /* Current device. */
113 const struct mcu_type_s
*avr_current_device
;
115 section
*progmem_section
;
117 /* AVR attributes. */
118 static const struct attribute_spec avr_attribute_table
[] =
120 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
121 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute
},
122 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute
},
123 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute
},
124 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute
},
125 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute
},
126 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute
},
127 { NULL
, 0, 0, false, false, false, NULL
}
130 /* Initialize the GCC target structure. */
131 #undef TARGET_ASM_ALIGNED_HI_OP
132 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
133 #undef TARGET_ASM_ALIGNED_SI_OP
134 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
135 #undef TARGET_ASM_UNALIGNED_HI_OP
136 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
137 #undef TARGET_ASM_UNALIGNED_SI_OP
138 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
139 #undef TARGET_ASM_INTEGER
140 #define TARGET_ASM_INTEGER avr_assemble_integer
141 #undef TARGET_ASM_FILE_START
142 #define TARGET_ASM_FILE_START avr_file_start
143 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
144 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
145 #undef TARGET_ASM_FILE_END
146 #define TARGET_ASM_FILE_END avr_file_end
148 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
149 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
150 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
151 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
152 #undef TARGET_FUNCTION_VALUE
153 #define TARGET_FUNCTION_VALUE avr_function_value
154 #undef TARGET_ATTRIBUTE_TABLE
155 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
156 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
157 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
158 #undef TARGET_INSERT_ATTRIBUTES
159 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
160 #undef TARGET_SECTION_TYPE_FLAGS
161 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
162 #undef TARGET_RTX_COSTS
163 #define TARGET_RTX_COSTS avr_rtx_costs
164 #undef TARGET_ADDRESS_COST
165 #define TARGET_ADDRESS_COST avr_address_cost
166 #undef TARGET_MACHINE_DEPENDENT_REORG
167 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
169 #undef TARGET_LEGITIMIZE_ADDRESS
170 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
172 #undef TARGET_RETURN_IN_MEMORY
173 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
175 #undef TARGET_STRICT_ARGUMENT_NAMING
176 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
178 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
179 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
181 #undef TARGET_HARD_REGNO_SCRATCH_OK
182 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
183 #undef TARGET_CASE_VALUES_THRESHOLD
184 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
186 #undef TARGET_LEGITIMATE_ADDRESS_P
187 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
189 #undef TARGET_FRAME_POINTER_REQUIRED
190 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
191 #undef TARGET_CAN_ELIMINATE
192 #define TARGET_CAN_ELIMINATE avr_can_eliminate
194 struct gcc_target targetm
= TARGET_INITIALIZER
;
197 avr_override_options (void)
199 const struct mcu_type_s
*t
;
201 flag_delete_null_pointer_checks
= 0;
203 for (t
= avr_mcu_types
; t
->name
; t
++)
204 if (strcmp (t
->name
, avr_mcu_name
) == 0)
209 fprintf (stderr
, "unknown MCU '%s' specified\nKnown MCU names:\n",
211 for (t
= avr_mcu_types
; t
->name
; t
++)
212 fprintf (stderr
," %s\n", t
->name
);
215 avr_current_device
= t
;
216 avr_current_arch
= &avr_arch_types
[avr_current_device
->arch
];
217 avr_extra_arch_macro
= avr_current_device
->macro
;
219 tmp_reg_rtx
= gen_rtx_REG (QImode
, TMP_REGNO
);
220 zero_reg_rtx
= gen_rtx_REG (QImode
, ZERO_REGNO
);
222 init_machine_status
= avr_init_machine_status
;
225 /* return register class from register number. */
227 static const enum reg_class reg_class_tab
[]={
228 GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,
229 GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,
230 GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,
231 GENERAL_REGS
, /* r0 - r15 */
232 LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,
233 LD_REGS
, /* r16 - 23 */
234 ADDW_REGS
,ADDW_REGS
, /* r24,r25 */
235 POINTER_X_REGS
,POINTER_X_REGS
, /* r26,27 */
236 POINTER_Y_REGS
,POINTER_Y_REGS
, /* r28,r29 */
237 POINTER_Z_REGS
,POINTER_Z_REGS
, /* r30,r31 */
238 STACK_REG
,STACK_REG
/* SPL,SPH */
241 /* Function to set up the backend function structure. */
243 static struct machine_function
*
244 avr_init_machine_status (void)
246 return ggc_alloc_cleared_machine_function ();
249 /* Return register class for register R. */
252 avr_regno_reg_class (int r
)
255 return reg_class_tab
[r
];
259 /* Return nonzero if FUNC is a naked function. */
262 avr_naked_function_p (tree func
)
266 gcc_assert (TREE_CODE (func
) == FUNCTION_DECL
);
268 a
= lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func
)));
269 return a
!= NULL_TREE
;
272 /* Return nonzero if FUNC is an interrupt function as specified
273 by the "interrupt" attribute. */
276 interrupt_function_p (tree func
)
280 if (TREE_CODE (func
) != FUNCTION_DECL
)
283 a
= lookup_attribute ("interrupt", DECL_ATTRIBUTES (func
));
284 return a
!= NULL_TREE
;
287 /* Return nonzero if FUNC is a signal function as specified
288 by the "signal" attribute. */
291 signal_function_p (tree func
)
295 if (TREE_CODE (func
) != FUNCTION_DECL
)
298 a
= lookup_attribute ("signal", DECL_ATTRIBUTES (func
));
299 return a
!= NULL_TREE
;
302 /* Return nonzero if FUNC is a OS_task function. */
305 avr_OS_task_function_p (tree func
)
309 gcc_assert (TREE_CODE (func
) == FUNCTION_DECL
);
311 a
= lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func
)));
312 return a
!= NULL_TREE
;
315 /* Return nonzero if FUNC is a OS_main function. */
318 avr_OS_main_function_p (tree func
)
322 gcc_assert (TREE_CODE (func
) == FUNCTION_DECL
);
324 a
= lookup_attribute ("OS_main", TYPE_ATTRIBUTES (TREE_TYPE (func
)));
325 return a
!= NULL_TREE
;
328 /* Return the number of hard registers to push/pop in the prologue/epilogue
329 of the current function, and optionally store these registers in SET. */
332 avr_regs_to_save (HARD_REG_SET
*set
)
335 int int_or_sig_p
= (interrupt_function_p (current_function_decl
)
336 || signal_function_p (current_function_decl
));
339 CLEAR_HARD_REG_SET (*set
);
342 /* No need to save any registers if the function never returns or
343 is have "OS_task" or "OS_main" attribute. */
344 if (TREE_THIS_VOLATILE (current_function_decl
)
345 || cfun
->machine
->is_OS_task
346 || cfun
->machine
->is_OS_main
)
349 for (reg
= 0; reg
< 32; reg
++)
351 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
352 any global register variables. */
356 if ((int_or_sig_p
&& !current_function_is_leaf
&& call_used_regs
[reg
])
357 || (df_regs_ever_live_p (reg
)
358 && (int_or_sig_p
|| !call_used_regs
[reg
])
359 && !(frame_pointer_needed
360 && (reg
== REG_Y
|| reg
== (REG_Y
+1)))))
363 SET_HARD_REG_BIT (*set
, reg
);
370 /* Return true if register FROM can be eliminated via register TO. */
373 avr_can_eliminate (const int from
, const int to
)
375 return ((from
== ARG_POINTER_REGNUM
&& to
== FRAME_POINTER_REGNUM
)
376 || ((from
== FRAME_POINTER_REGNUM
377 || from
== FRAME_POINTER_REGNUM
+ 1)
378 && !frame_pointer_needed
));
381 /* Compute offset between arg_pointer and frame_pointer. */
384 avr_initial_elimination_offset (int from
, int to
)
386 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
390 int offset
= frame_pointer_needed
? 2 : 0;
391 int avr_pc_size
= AVR_HAVE_EIJMP_EICALL
? 3 : 2;
393 offset
+= avr_regs_to_save (NULL
);
394 return get_frame_size () + (avr_pc_size
) + 1 + offset
;
398 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
399 frame pointer by +STARTING_FRAME_OFFSET.
400 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
401 avoids creating add/sub of offset in nonlocal goto and setjmp. */
403 rtx
avr_builtin_setjmp_frame_value (void)
405 return gen_rtx_MINUS (Pmode
, virtual_stack_vars_rtx
,
406 gen_int_mode (STARTING_FRAME_OFFSET
, Pmode
));
409 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
410 This is return address of function. */
412 avr_return_addr_rtx (int count
, const_rtx tem
)
416 /* Can only return this functions return address. Others not supported. */
422 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+2");
423 warning (0, "'builtin_return_address' contains only 2 bytes of address");
426 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+1");
428 r
= gen_rtx_PLUS (Pmode
, tem
, r
);
429 r
= gen_frame_mem (Pmode
, memory_address (Pmode
, r
));
430 r
= gen_rtx_ROTATE (HImode
, r
, GEN_INT (8));
434 /* Return 1 if the function epilogue is just a single "ret". */
437 avr_simple_epilogue (void)
439 return (! frame_pointer_needed
440 && get_frame_size () == 0
441 && avr_regs_to_save (NULL
) == 0
442 && ! interrupt_function_p (current_function_decl
)
443 && ! signal_function_p (current_function_decl
)
444 && ! avr_naked_function_p (current_function_decl
)
445 && ! TREE_THIS_VOLATILE (current_function_decl
));
448 /* This function checks sequence of live registers. */
451 sequent_regs_live (void)
457 for (reg
= 0; reg
< 18; ++reg
)
459 if (!call_used_regs
[reg
])
461 if (df_regs_ever_live_p (reg
))
471 if (!frame_pointer_needed
)
473 if (df_regs_ever_live_p (REG_Y
))
481 if (df_regs_ever_live_p (REG_Y
+1))
494 return (cur_seq
== live_seq
) ? live_seq
: 0;
497 /* Obtain the length sequence of insns. */
500 get_sequence_length (rtx insns
)
505 for (insn
= insns
, length
= 0; insn
; insn
= NEXT_INSN (insn
))
506 length
+= get_attr_length (insn
);
511 /* Output function prologue. */
514 expand_prologue (void)
519 HOST_WIDE_INT size
= get_frame_size();
520 /* Define templates for push instructions. */
521 rtx pushbyte
= gen_rtx_MEM (QImode
,
522 gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
));
523 rtx pushword
= gen_rtx_MEM (HImode
,
524 gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
));
527 /* Init cfun->machine. */
528 cfun
->machine
->is_naked
= avr_naked_function_p (current_function_decl
);
529 cfun
->machine
->is_interrupt
= interrupt_function_p (current_function_decl
);
530 cfun
->machine
->is_signal
= signal_function_p (current_function_decl
);
531 cfun
->machine
->is_OS_task
= avr_OS_task_function_p (current_function_decl
);
532 cfun
->machine
->is_OS_main
= avr_OS_main_function_p (current_function_decl
);
533 cfun
->machine
->stack_usage
= 0;
535 /* Prologue: naked. */
536 if (cfun
->machine
->is_naked
)
541 avr_regs_to_save (&set
);
542 live_seq
= sequent_regs_live ();
543 minimize
= (TARGET_CALL_PROLOGUES
544 && !cfun
->machine
->is_interrupt
545 && !cfun
->machine
->is_signal
546 && !cfun
->machine
->is_OS_task
547 && !cfun
->machine
->is_OS_main
550 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
552 if (cfun
->machine
->is_interrupt
)
554 /* Enable interrupts. */
555 insn
= emit_insn (gen_enable_interrupt ());
556 RTX_FRAME_RELATED_P (insn
) = 1;
560 insn
= emit_move_insn (pushbyte
, zero_reg_rtx
);
561 RTX_FRAME_RELATED_P (insn
) = 1;
562 cfun
->machine
->stack_usage
++;
565 insn
= emit_move_insn (pushbyte
, tmp_reg_rtx
);
566 RTX_FRAME_RELATED_P (insn
) = 1;
567 cfun
->machine
->stack_usage
++;
570 insn
= emit_move_insn (tmp_reg_rtx
,
571 gen_rtx_MEM (QImode
, GEN_INT (SREG_ADDR
)));
572 RTX_FRAME_RELATED_P (insn
) = 1;
573 insn
= emit_move_insn (pushbyte
, tmp_reg_rtx
);
574 RTX_FRAME_RELATED_P (insn
) = 1;
575 cfun
->machine
->stack_usage
++;
579 && (TEST_HARD_REG_BIT (set
, REG_Z
) && TEST_HARD_REG_BIT (set
, REG_Z
+ 1)))
581 insn
= emit_move_insn (tmp_reg_rtx
,
582 gen_rtx_MEM (QImode
, GEN_INT (RAMPZ_ADDR
)));
583 RTX_FRAME_RELATED_P (insn
) = 1;
584 insn
= emit_move_insn (pushbyte
, tmp_reg_rtx
);
585 RTX_FRAME_RELATED_P (insn
) = 1;
586 cfun
->machine
->stack_usage
++;
589 /* Clear zero reg. */
590 insn
= emit_move_insn (zero_reg_rtx
, const0_rtx
);
591 RTX_FRAME_RELATED_P (insn
) = 1;
593 /* Prevent any attempt to delete the setting of ZERO_REG! */
594 emit_use (zero_reg_rtx
);
596 if (minimize
&& (frame_pointer_needed
597 || (AVR_2_BYTE_PC
&& live_seq
> 6)
600 insn
= emit_move_insn (gen_rtx_REG (HImode
, REG_X
),
601 gen_int_mode (size
, HImode
));
602 RTX_FRAME_RELATED_P (insn
) = 1;
605 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq
, HImode
),
606 gen_int_mode (size
+ live_seq
, HImode
)));
607 RTX_FRAME_RELATED_P (insn
) = 1;
608 cfun
->machine
->stack_usage
+= size
+ live_seq
;
613 for (reg
= 0; reg
< 32; ++reg
)
615 if (TEST_HARD_REG_BIT (set
, reg
))
617 /* Emit push of register to save. */
618 insn
=emit_move_insn (pushbyte
, gen_rtx_REG (QImode
, reg
));
619 RTX_FRAME_RELATED_P (insn
) = 1;
620 cfun
->machine
->stack_usage
++;
623 if (frame_pointer_needed
)
625 if (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
627 /* Push frame pointer. */
628 insn
= emit_move_insn (pushword
, frame_pointer_rtx
);
629 RTX_FRAME_RELATED_P (insn
) = 1;
630 cfun
->machine
->stack_usage
+= 2;
635 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
636 RTX_FRAME_RELATED_P (insn
) = 1;
640 /* Creating a frame can be done by direct manipulation of the
641 stack or via the frame pointer. These two methods are:
648 the optimum method depends on function type, stack and frame size.
649 To avoid a complex logic, both methods are tested and shortest
653 rtx sp_plus_insns
= NULL_RTX
;
655 if (AVR_HAVE_8BIT_SP
)
657 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
658 over 'sbiw' (2 cycles, same size). */
659 myfp
= gen_rtx_REG (QImode
, REGNO (frame_pointer_rtx
));
663 /* Normal sized addition. */
664 myfp
= frame_pointer_rtx
;
667 /* Method 1-Adjust frame pointer. */
670 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
671 RTX_FRAME_RELATED_P (insn
) = 1;
674 emit_move_insn (myfp
,
675 gen_rtx_PLUS (GET_MODE(myfp
), myfp
,
678 RTX_FRAME_RELATED_P (insn
) = 1;
680 /* Copy to stack pointer. */
681 if (AVR_HAVE_8BIT_SP
)
683 insn
= emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
684 RTX_FRAME_RELATED_P (insn
) = 1;
686 else if (TARGET_NO_INTERRUPTS
687 || cfun
->machine
->is_signal
688 || cfun
->machine
->is_OS_main
)
691 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx
,
693 RTX_FRAME_RELATED_P (insn
) = 1;
695 else if (cfun
->machine
->is_interrupt
)
697 insn
= emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx
,
699 RTX_FRAME_RELATED_P (insn
) = 1;
703 insn
= emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
704 RTX_FRAME_RELATED_P (insn
) = 1;
707 fp_plus_insns
= get_insns ();
710 /* Method 2-Adjust Stack pointer. */
716 emit_move_insn (stack_pointer_rtx
,
717 gen_rtx_PLUS (HImode
,
721 RTX_FRAME_RELATED_P (insn
) = 1;
724 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
725 RTX_FRAME_RELATED_P (insn
) = 1;
727 sp_plus_insns
= get_insns ();
731 /* Use shortest method. */
732 if (size
<= 6 && (get_sequence_length (sp_plus_insns
)
733 < get_sequence_length (fp_plus_insns
)))
734 emit_insn (sp_plus_insns
);
736 emit_insn (fp_plus_insns
);
737 cfun
->machine
->stack_usage
+= size
;
743 /* Output summary at end of function prologue. */
746 avr_asm_function_end_prologue (FILE *file
)
748 if (cfun
->machine
->is_naked
)
750 fputs ("/* prologue: naked */\n", file
);
754 if (cfun
->machine
->is_interrupt
)
756 fputs ("/* prologue: Interrupt */\n", file
);
758 else if (cfun
->machine
->is_signal
)
760 fputs ("/* prologue: Signal */\n", file
);
763 fputs ("/* prologue: function */\n", file
);
765 fprintf (file
, "/* frame size = " HOST_WIDE_INT_PRINT_DEC
" */\n",
767 fprintf (file
, "/* stack size = %d */\n",
768 cfun
->machine
->stack_usage
);
769 /* Create symbol stack offset here so all functions have it. Add 1 to stack
770 usage for offset so that SP + .L__stack_offset = return address. */
771 fprintf (file
, ".L__stack_usage = %d\n", cfun
->machine
->stack_usage
);
775 /* Implement EPILOGUE_USES. */
778 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED
)
782 && (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
))
787 /* Output RTL epilogue. */
790 expand_epilogue (void)
796 HOST_WIDE_INT size
= get_frame_size();
798 /* epilogue: naked */
799 if (cfun
->machine
->is_naked
)
801 emit_jump_insn (gen_return ());
805 avr_regs_to_save (&set
);
806 live_seq
= sequent_regs_live ();
807 minimize
= (TARGET_CALL_PROLOGUES
808 && !cfun
->machine
->is_interrupt
809 && !cfun
->machine
->is_signal
810 && !cfun
->machine
->is_OS_task
811 && !cfun
->machine
->is_OS_main
814 if (minimize
&& (frame_pointer_needed
|| live_seq
> 4))
816 if (frame_pointer_needed
)
818 /* Get rid of frame. */
819 emit_move_insn(frame_pointer_rtx
,
820 gen_rtx_PLUS (HImode
, frame_pointer_rtx
,
821 gen_int_mode (size
, HImode
)));
825 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
828 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq
, HImode
)));
832 if (frame_pointer_needed
)
836 /* Try two methods to adjust stack and select shortest. */
839 rtx sp_plus_insns
= NULL_RTX
;
841 if (AVR_HAVE_8BIT_SP
)
843 /* The high byte (r29) doesn't change - prefer 'subi'
844 (1 cycle) over 'sbiw' (2 cycles, same size). */
845 myfp
= gen_rtx_REG (QImode
, REGNO (frame_pointer_rtx
));
849 /* Normal sized addition. */
850 myfp
= frame_pointer_rtx
;
853 /* Method 1-Adjust frame pointer. */
856 emit_move_insn (myfp
,
857 gen_rtx_PLUS (GET_MODE (myfp
), myfp
,
861 /* Copy to stack pointer. */
862 if (AVR_HAVE_8BIT_SP
)
864 emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
866 else if (TARGET_NO_INTERRUPTS
867 || cfun
->machine
->is_signal
)
869 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx
,
872 else if (cfun
->machine
->is_interrupt
)
874 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx
,
879 emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
882 fp_plus_insns
= get_insns ();
885 /* Method 2-Adjust Stack pointer. */
890 emit_move_insn (stack_pointer_rtx
,
891 gen_rtx_PLUS (HImode
, stack_pointer_rtx
,
895 sp_plus_insns
= get_insns ();
899 /* Use shortest method. */
900 if (size
<= 5 && (get_sequence_length (sp_plus_insns
)
901 < get_sequence_length (fp_plus_insns
)))
902 emit_insn (sp_plus_insns
);
904 emit_insn (fp_plus_insns
);
906 if (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
908 /* Restore previous frame_pointer. */
909 emit_insn (gen_pophi (frame_pointer_rtx
));
912 /* Restore used registers. */
913 for (reg
= 31; reg
>= 0; --reg
)
915 if (TEST_HARD_REG_BIT (set
, reg
))
916 emit_insn (gen_popqi (gen_rtx_REG (QImode
, reg
)));
918 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
920 /* Restore RAMPZ using tmp reg as scratch. */
922 && (TEST_HARD_REG_BIT (set
, REG_Z
) && TEST_HARD_REG_BIT (set
, REG_Z
+ 1)))
924 emit_insn (gen_popqi (tmp_reg_rtx
));
925 emit_move_insn (gen_rtx_MEM(QImode
, GEN_INT(RAMPZ_ADDR
)),
929 /* Restore SREG using tmp reg as scratch. */
930 emit_insn (gen_popqi (tmp_reg_rtx
));
932 emit_move_insn (gen_rtx_MEM(QImode
, GEN_INT(SREG_ADDR
)),
935 /* Restore tmp REG. */
936 emit_insn (gen_popqi (tmp_reg_rtx
));
938 /* Restore zero REG. */
939 emit_insn (gen_popqi (zero_reg_rtx
));
942 emit_jump_insn (gen_return ());
946 /* Output summary messages at beginning of function epilogue. */
949 avr_asm_function_begin_epilogue (FILE *file
)
951 fprintf (file
, "/* epilogue start */\n");
954 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
955 machine for a memory operand of mode MODE. */
958 avr_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
960 enum reg_class r
= NO_REGS
;
962 if (TARGET_ALL_DEBUG
)
964 fprintf (stderr
, "mode: (%s) %s %s %s %s:",
966 strict
? "(strict)": "",
967 reload_completed
? "(reload_completed)": "",
968 reload_in_progress
? "(reload_in_progress)": "",
969 reg_renumber
? "(reg_renumber)" : "");
970 if (GET_CODE (x
) == PLUS
971 && REG_P (XEXP (x
, 0))
972 && GET_CODE (XEXP (x
, 1)) == CONST_INT
973 && INTVAL (XEXP (x
, 1)) >= 0
974 && INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
)
977 fprintf (stderr
, "(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
978 true_regnum (XEXP (x
, 0)));
981 if (!strict
&& GET_CODE (x
) == SUBREG
)
983 if (REG_P (x
) && (strict
? REG_OK_FOR_BASE_STRICT_P (x
)
984 : REG_OK_FOR_BASE_NOSTRICT_P (x
)))
986 else if (CONSTANT_ADDRESS_P (x
))
988 else if (GET_CODE (x
) == PLUS
989 && REG_P (XEXP (x
, 0))
990 && GET_CODE (XEXP (x
, 1)) == CONST_INT
991 && INTVAL (XEXP (x
, 1)) >= 0)
993 int fit
= INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
);
997 || REGNO (XEXP (x
,0)) == REG_X
998 || REGNO (XEXP (x
,0)) == REG_Y
999 || REGNO (XEXP (x
,0)) == REG_Z
)
1000 r
= BASE_POINTER_REGS
;
1001 if (XEXP (x
,0) == frame_pointer_rtx
1002 || XEXP (x
,0) == arg_pointer_rtx
)
1003 r
= BASE_POINTER_REGS
;
1005 else if (frame_pointer_needed
&& XEXP (x
,0) == frame_pointer_rtx
)
1008 else if ((GET_CODE (x
) == PRE_DEC
|| GET_CODE (x
) == POST_INC
)
1009 && REG_P (XEXP (x
, 0))
1010 && (strict
? REG_OK_FOR_BASE_STRICT_P (XEXP (x
, 0))
1011 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x
, 0))))
1015 if (TARGET_ALL_DEBUG
)
1017 fprintf (stderr
, " ret = %c\n", r
+ '0');
1019 return r
== NO_REGS
? 0 : (int)r
;
1022 /* Attempts to replace X with a valid
1023 memory address for an operand of mode MODE */
1026 avr_legitimize_address (rtx x
, rtx oldx
, enum machine_mode mode
)
1029 if (TARGET_ALL_DEBUG
)
1031 fprintf (stderr
, "legitimize_address mode: %s", GET_MODE_NAME(mode
));
1035 if (GET_CODE (oldx
) == PLUS
1036 && REG_P (XEXP (oldx
,0)))
1038 if (REG_P (XEXP (oldx
,1)))
1039 x
= force_reg (GET_MODE (oldx
), oldx
);
1040 else if (GET_CODE (XEXP (oldx
, 1)) == CONST_INT
)
1042 int offs
= INTVAL (XEXP (oldx
,1));
1043 if (frame_pointer_rtx
!= XEXP (oldx
,0))
1044 if (offs
> MAX_LD_OFFSET (mode
))
1046 if (TARGET_ALL_DEBUG
)
1047 fprintf (stderr
, "force_reg (big offset)\n");
1048 x
= force_reg (GET_MODE (oldx
), oldx
);
1056 /* Return a pointer register name as a string. */
1059 ptrreg_to_str (int regno
)
1063 case REG_X
: return "X";
1064 case REG_Y
: return "Y";
1065 case REG_Z
: return "Z";
1067 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1072 /* Return the condition name as a string.
1073 Used in conditional jump constructing */
1076 cond_string (enum rtx_code code
)
1085 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1090 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1103 /* Output ADDR to FILE as address. */
1106 print_operand_address (FILE *file
, rtx addr
)
1108 switch (GET_CODE (addr
))
1111 fprintf (file
, ptrreg_to_str (REGNO (addr
)));
1115 fprintf (file
, "-%s", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1119 fprintf (file
, "%s+", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1123 if (CONSTANT_ADDRESS_P (addr
)
1124 && text_segment_operand (addr
, VOIDmode
))
1126 rtx x
= XEXP (addr
,0);
1127 if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
,1)) == CONST_INT
)
1129 /* Assembler gs() will implant word address. Make offset
1130 a byte offset inside gs() for assembler. This is
1131 needed because the more logical (constant+gs(sym)) is not
1132 accepted by gas. For 128K and lower devices this is ok. For
1133 large devices it will create a Trampoline to offset from symbol
1134 which may not be what the user really wanted. */
1135 fprintf (file
, "gs(");
1136 output_addr_const (file
, XEXP (x
,0));
1137 fprintf (file
,"+" HOST_WIDE_INT_PRINT_DEC
")", 2 * INTVAL (XEXP (x
,1)));
1139 if (warning ( 0, "Pointer offset from symbol maybe incorrect."))
1141 output_addr_const (stderr
, addr
);
1142 fprintf(stderr
,"\n");
1147 fprintf (file
, "gs(");
1148 output_addr_const (file
, addr
);
1149 fprintf (file
, ")");
1153 output_addr_const (file
, addr
);
1158 /* Output X as assembler operand to file FILE. */
1161 print_operand (FILE *file
, rtx x
, int code
)
1165 if (code
>= 'A' && code
<= 'D')
1170 if (!AVR_HAVE_JMP_CALL
)
1173 else if (code
== '!')
1175 if (AVR_HAVE_EIJMP_EICALL
)
1180 if (x
== zero_reg_rtx
)
1181 fprintf (file
, "__zero_reg__");
1183 fprintf (file
, reg_names
[true_regnum (x
) + abcd
]);
1185 else if (GET_CODE (x
) == CONST_INT
)
1186 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) + abcd
);
1187 else if (GET_CODE (x
) == MEM
)
1189 rtx addr
= XEXP (x
,0);
1192 if (!CONSTANT_P (addr
))
1193 fatal_insn ("bad address, not a constant):", addr
);
1194 /* Assembler template with m-code is data - not progmem section */
1195 if (text_segment_operand (addr
, VOIDmode
))
1196 if (warning ( 0, "accessing data memory with program memory address"))
1198 output_addr_const (stderr
, addr
);
1199 fprintf(stderr
,"\n");
1201 output_addr_const (file
, addr
);
1203 else if (code
== 'o')
1205 if (GET_CODE (addr
) != PLUS
)
1206 fatal_insn ("bad address, not (reg+disp):", addr
);
1208 print_operand (file
, XEXP (addr
, 1), 0);
1210 else if (code
== 'p' || code
== 'r')
1212 if (GET_CODE (addr
) != POST_INC
&& GET_CODE (addr
) != PRE_DEC
)
1213 fatal_insn ("bad address, not post_inc or pre_dec:", addr
);
1216 print_operand_address (file
, XEXP (addr
, 0)); /* X, Y, Z */
1218 print_operand (file
, XEXP (addr
, 0), 0); /* r26, r28, r30 */
1220 else if (GET_CODE (addr
) == PLUS
)
1222 print_operand_address (file
, XEXP (addr
,0));
1223 if (REGNO (XEXP (addr
, 0)) == REG_X
)
1224 fatal_insn ("internal compiler error. Bad address:"
1227 print_operand (file
, XEXP (addr
,1), code
);
1230 print_operand_address (file
, addr
);
1232 else if (code
== 'x')
1234 /* Constant progmem address - like used in jmp or call */
1235 if (0 == text_segment_operand (x
, VOIDmode
))
1236 if (warning ( 0, "accessing program memory with data memory address"))
1238 output_addr_const (stderr
, x
);
1239 fprintf(stderr
,"\n");
1241 /* Use normal symbol for direct address no linker trampoline needed */
1242 output_addr_const (file
, x
);
1244 else if (GET_CODE (x
) == CONST_DOUBLE
)
1248 if (GET_MODE (x
) != SFmode
)
1249 fatal_insn ("internal compiler error. Unknown mode:", x
);
1250 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
1251 REAL_VALUE_TO_TARGET_SINGLE (rv
, val
);
1252 fprintf (file
, "0x%lx", val
);
1254 else if (code
== 'j')
1255 fputs (cond_string (GET_CODE (x
)), file
);
1256 else if (code
== 'k')
1257 fputs (cond_string (reverse_condition (GET_CODE (x
))), file
);
1259 print_operand_address (file
, x
);
1262 /* Update the condition code in the INSN. */
1265 notice_update_cc (rtx body ATTRIBUTE_UNUSED
, rtx insn
)
1269 switch (get_attr_cc (insn
))
1272 /* Insn does not affect CC at all. */
1280 set
= single_set (insn
);
1284 cc_status
.flags
|= CC_NO_OVERFLOW
;
1285 cc_status
.value1
= SET_DEST (set
);
1290 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1291 The V flag may or may not be known but that's ok because
1292 alter_cond will change tests to use EQ/NE. */
1293 set
= single_set (insn
);
1297 cc_status
.value1
= SET_DEST (set
);
1298 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
1303 set
= single_set (insn
);
1306 cc_status
.value1
= SET_SRC (set
);
1310 /* Insn doesn't leave CC in a usable state. */
1313 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1314 set
= single_set (insn
);
1317 rtx src
= SET_SRC (set
);
1319 if (GET_CODE (src
) == ASHIFTRT
1320 && GET_MODE (src
) == QImode
)
1322 rtx x
= XEXP (src
, 1);
1324 if (GET_CODE (x
) == CONST_INT
1328 cc_status
.value1
= SET_DEST (set
);
1329 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
1337 /* Return maximum number of consecutive registers of
1338 class CLASS needed to hold a value of mode MODE. */
1341 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED
,enum machine_mode mode
)
1343 return ((GET_MODE_SIZE (mode
) + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
);
1346 /* Choose mode for jump insn:
1347 1 - relative jump in range -63 <= x <= 62 ;
1348 2 - relative jump in range -2046 <= x <= 2045 ;
1349 3 - absolute jump (only for ATmega[16]03). */
1352 avr_jump_mode (rtx x
, rtx insn
)
1354 int dest_addr
= INSN_ADDRESSES (INSN_UID (GET_CODE (x
) == LABEL_REF
1355 ? XEXP (x
, 0) : x
));
1356 int cur_addr
= INSN_ADDRESSES (INSN_UID (insn
));
1357 int jump_distance
= cur_addr
- dest_addr
;
1359 if (-63 <= jump_distance
&& jump_distance
<= 62)
1361 else if (-2046 <= jump_distance
&& jump_distance
<= 2045)
1363 else if (AVR_HAVE_JMP_CALL
)
1369 /* return an AVR condition jump commands.
1370 X is a comparison RTX.
1371 LEN is a number returned by avr_jump_mode function.
1372 if REVERSE nonzero then condition code in X must be reversed. */
1375 ret_cond_branch (rtx x
, int len
, int reverse
)
1377 RTX_CODE cond
= reverse
? reverse_condition (GET_CODE (x
)) : GET_CODE (x
);
1382 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1383 return (len
== 1 ? (AS1 (breq
,.+2) CR_TAB
1385 len
== 2 ? (AS1 (breq
,.+4) CR_TAB
1386 AS1 (brmi
,.+2) CR_TAB
1388 (AS1 (breq
,.+6) CR_TAB
1389 AS1 (brmi
,.+4) CR_TAB
1393 return (len
== 1 ? (AS1 (breq
,.+2) CR_TAB
1395 len
== 2 ? (AS1 (breq
,.+4) CR_TAB
1396 AS1 (brlt
,.+2) CR_TAB
1398 (AS1 (breq
,.+6) CR_TAB
1399 AS1 (brlt
,.+4) CR_TAB
1402 return (len
== 1 ? (AS1 (breq
,.+2) CR_TAB
1404 len
== 2 ? (AS1 (breq
,.+4) CR_TAB
1405 AS1 (brlo
,.+2) CR_TAB
1407 (AS1 (breq
,.+6) CR_TAB
1408 AS1 (brlo
,.+4) CR_TAB
1411 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1412 return (len
== 1 ? (AS1 (breq
,%0) CR_TAB
1414 len
== 2 ? (AS1 (breq
,.+2) CR_TAB
1415 AS1 (brpl
,.+2) CR_TAB
1417 (AS1 (breq
,.+2) CR_TAB
1418 AS1 (brpl
,.+4) CR_TAB
1421 return (len
== 1 ? (AS1 (breq
,%0) CR_TAB
1423 len
== 2 ? (AS1 (breq
,.+2) CR_TAB
1424 AS1 (brge
,.+2) CR_TAB
1426 (AS1 (breq
,.+2) CR_TAB
1427 AS1 (brge
,.+4) CR_TAB
1430 return (len
== 1 ? (AS1 (breq
,%0) CR_TAB
1432 len
== 2 ? (AS1 (breq
,.+2) CR_TAB
1433 AS1 (brsh
,.+2) CR_TAB
1435 (AS1 (breq
,.+2) CR_TAB
1436 AS1 (brsh
,.+4) CR_TAB
1444 return AS1 (br
%k1
,%0);
1446 return (AS1 (br
%j1
,.+2) CR_TAB
1449 return (AS1 (br
%j1
,.+4) CR_TAB
1458 return AS1 (br
%j1
,%0);
1460 return (AS1 (br
%k1
,.+2) CR_TAB
1463 return (AS1 (br
%k1
,.+4) CR_TAB
1471 /* Predicate function for immediate operand which fits to byte (8bit) */
1474 byte_immediate_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1476 return (GET_CODE (op
) == CONST_INT
1477 && INTVAL (op
) <= 0xff && INTVAL (op
) >= 0);
1480 /* Output insn cost for next insn. */
1483 final_prescan_insn (rtx insn
, rtx
*operand ATTRIBUTE_UNUSED
,
1484 int num_operands ATTRIBUTE_UNUSED
)
1486 if (TARGET_ALL_DEBUG
)
1488 fprintf (asm_out_file
, "/* DEBUG: cost = %d. */\n",
1489 rtx_cost (PATTERN (insn
), INSN
, !optimize_size
));
1493 /* Return 0 if undefined, 1 if always true or always false. */
1496 avr_simplify_comparison_p (enum machine_mode mode
, RTX_CODE op
, rtx x
)
1498 unsigned int max
= (mode
== QImode
? 0xff :
1499 mode
== HImode
? 0xffff :
1500 mode
== SImode
? 0xffffffff : 0);
1501 if (max
&& op
&& GET_CODE (x
) == CONST_INT
)
1503 if (unsigned_condition (op
) != op
)
1506 if (max
!= (INTVAL (x
) & max
)
1507 && INTVAL (x
) != 0xff)
1514 /* Returns nonzero if REGNO is the number of a hard
1515 register in which function arguments are sometimes passed. */
1518 function_arg_regno_p(int r
)
1520 return (r
>= 8 && r
<= 25);
1523 /* Initializing the variable cum for the state at the beginning
1524 of the argument list. */
1527 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
, rtx libname
,
1528 tree fndecl ATTRIBUTE_UNUSED
)
1531 cum
->regno
= FIRST_CUM_REG
;
1532 if (!libname
&& fntype
)
1534 int stdarg
= (TYPE_ARG_TYPES (fntype
) != 0
1535 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
1536 != void_type_node
));
1542 /* Returns the number of registers to allocate for a function argument. */
1545 avr_num_arg_regs (enum machine_mode mode
, tree type
)
1549 if (mode
== BLKmode
)
1550 size
= int_size_in_bytes (type
);
1552 size
= GET_MODE_SIZE (mode
);
1554 /* Align all function arguments to start in even-numbered registers.
1555 Odd-sized arguments leave holes above them. */
1557 return (size
+ 1) & ~1;
1560 /* Controls whether a function argument is passed
1561 in a register, and which register. */
1564 function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
, tree type
,
1565 int named ATTRIBUTE_UNUSED
)
1567 int bytes
= avr_num_arg_regs (mode
, type
);
1569 if (cum
->nregs
&& bytes
<= cum
->nregs
)
1570 return gen_rtx_REG (mode
, cum
->regno
- bytes
);
1575 /* Update the summarizer variable CUM to advance past an argument
1576 in the argument list. */
1579 function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
, tree type
,
1580 int named ATTRIBUTE_UNUSED
)
1582 int bytes
= avr_num_arg_regs (mode
, type
);
1584 cum
->nregs
-= bytes
;
1585 cum
->regno
-= bytes
;
1587 if (cum
->nregs
<= 0)
1590 cum
->regno
= FIRST_CUM_REG
;
1594 /***********************************************************************
1595 Functions for outputting various mov's for a various modes
1596 ************************************************************************/
1598 output_movqi (rtx insn
, rtx operands
[], int *l
)
1601 rtx dest
= operands
[0];
1602 rtx src
= operands
[1];
1610 if (register_operand (dest
, QImode
))
1612 if (register_operand (src
, QImode
)) /* mov r,r */
1614 if (test_hard_reg_class (STACK_REG
, dest
))
1615 return AS2 (out
,%0,%1);
1616 else if (test_hard_reg_class (STACK_REG
, src
))
1617 return AS2 (in
,%0,%1);
1619 return AS2 (mov
,%0,%1);
1621 else if (CONSTANT_P (src
))
1623 if (test_hard_reg_class (LD_REGS
, dest
)) /* ldi d,i */
1624 return AS2 (ldi
,%0,lo8(%1));
1626 if (GET_CODE (src
) == CONST_INT
)
1628 if (src
== const0_rtx
) /* mov r,L */
1629 return AS1 (clr
,%0);
1630 else if (src
== const1_rtx
)
1633 return (AS1 (clr
,%0) CR_TAB
1636 else if (src
== constm1_rtx
)
1638 /* Immediate constants -1 to any register */
1640 return (AS1 (clr
,%0) CR_TAB
1645 int bit_nr
= exact_log2 (INTVAL (src
));
1651 output_asm_insn ((AS1 (clr
,%0) CR_TAB
1654 avr_output_bld (operands
, bit_nr
);
1661 /* Last resort, larger than loading from memory. */
1663 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
1664 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
1665 AS2 (mov
,%0,r31
) CR_TAB
1666 AS2 (mov
,r31
,__tmp_reg__
));
1668 else if (GET_CODE (src
) == MEM
)
1669 return out_movqi_r_mr (insn
, operands
, real_l
); /* mov r,m */
1671 else if (GET_CODE (dest
) == MEM
)
1675 if (src
== const0_rtx
)
1676 operands
[1] = zero_reg_rtx
;
1678 templ
= out_movqi_mr_r (insn
, operands
, real_l
);
1681 output_asm_insn (templ
, operands
);
1690 output_movhi (rtx insn
, rtx operands
[], int *l
)
1693 rtx dest
= operands
[0];
1694 rtx src
= operands
[1];
1700 if (register_operand (dest
, HImode
))
1702 if (register_operand (src
, HImode
)) /* mov r,r */
1704 if (test_hard_reg_class (STACK_REG
, dest
))
1706 if (AVR_HAVE_8BIT_SP
)
1707 return *l
= 1, AS2 (out
,__SP_L__
,%A1
);
1708 /* Use simple load of stack pointer if no interrupts are
1710 else if (TARGET_NO_INTERRUPTS
)
1711 return *l
= 2, (AS2 (out
,__SP_H__
,%B1
) CR_TAB
1712 AS2 (out
,__SP_L__
,%A1
));
1714 return (AS2 (in
,__tmp_reg__
,__SREG__
) CR_TAB
1716 AS2 (out
,__SP_H__
,%B1
) CR_TAB
1717 AS2 (out
,__SREG__
,__tmp_reg__
) CR_TAB
1718 AS2 (out
,__SP_L__
,%A1
));
1720 else if (test_hard_reg_class (STACK_REG
, src
))
1723 return (AS2 (in
,%A0
,__SP_L__
) CR_TAB
1724 AS2 (in
,%B0
,__SP_H__
));
1730 return (AS2 (movw
,%0,%1));
1735 return (AS2 (mov
,%A0
,%A1
) CR_TAB
1739 else if (CONSTANT_P (src
))
1741 if (test_hard_reg_class (LD_REGS
, dest
)) /* ldi d,i */
1744 return (AS2 (ldi
,%A0
,lo8(%1)) CR_TAB
1745 AS2 (ldi
,%B0
,hi8(%1)));
1748 if (GET_CODE (src
) == CONST_INT
)
1750 if (src
== const0_rtx
) /* mov r,L */
1753 return (AS1 (clr
,%A0
) CR_TAB
1756 else if (src
== const1_rtx
)
1759 return (AS1 (clr
,%A0
) CR_TAB
1760 AS1 (clr
,%B0
) CR_TAB
1763 else if (src
== constm1_rtx
)
1765 /* Immediate constants -1 to any register */
1767 return (AS1 (clr
,%0) CR_TAB
1768 AS1 (dec
,%A0
) CR_TAB
1773 int bit_nr
= exact_log2 (INTVAL (src
));
1779 output_asm_insn ((AS1 (clr
,%A0
) CR_TAB
1780 AS1 (clr
,%B0
) CR_TAB
1783 avr_output_bld (operands
, bit_nr
);
1789 if ((INTVAL (src
) & 0xff) == 0)
1792 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
1793 AS1 (clr
,%A0
) CR_TAB
1794 AS2 (ldi
,r31
,hi8(%1)) CR_TAB
1795 AS2 (mov
,%B0
,r31
) CR_TAB
1796 AS2 (mov
,r31
,__tmp_reg__
));
1798 else if ((INTVAL (src
) & 0xff00) == 0)
1801 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
1802 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
1803 AS2 (mov
,%A0
,r31
) CR_TAB
1804 AS1 (clr
,%B0
) CR_TAB
1805 AS2 (mov
,r31
,__tmp_reg__
));
1809 /* Last resort, equal to loading from memory. */
1811 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
1812 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
1813 AS2 (mov
,%A0
,r31
) CR_TAB
1814 AS2 (ldi
,r31
,hi8(%1)) CR_TAB
1815 AS2 (mov
,%B0
,r31
) CR_TAB
1816 AS2 (mov
,r31
,__tmp_reg__
));
1818 else if (GET_CODE (src
) == MEM
)
1819 return out_movhi_r_mr (insn
, operands
, real_l
); /* mov r,m */
1821 else if (GET_CODE (dest
) == MEM
)
1825 if (src
== const0_rtx
)
1826 operands
[1] = zero_reg_rtx
;
1828 templ
= out_movhi_mr_r (insn
, operands
, real_l
);
1831 output_asm_insn (templ
, operands
);
1836 fatal_insn ("invalid insn:", insn
);
1841 out_movqi_r_mr (rtx insn
, rtx op
[], int *l
)
1845 rtx x
= XEXP (src
, 0);
1851 if (CONSTANT_ADDRESS_P (x
))
1853 if (CONST_INT_P (x
) && INTVAL (x
) == SREG_ADDR
)
1856 return AS2 (in
,%0,__SREG__
);
1858 if (optimize
> 0 && io_address_operand (x
, QImode
))
1861 return AS2 (in
,%0,%m1
-0x20);
1864 return AS2 (lds
,%0,%m1
);
1866 /* memory access by reg+disp */
1867 else if (GET_CODE (x
) == PLUS
1868 && REG_P (XEXP (x
,0))
1869 && GET_CODE (XEXP (x
,1)) == CONST_INT
)
1871 if ((INTVAL (XEXP (x
,1)) - GET_MODE_SIZE (GET_MODE (src
))) >= 63)
1873 int disp
= INTVAL (XEXP (x
,1));
1874 if (REGNO (XEXP (x
,0)) != REG_Y
)
1875 fatal_insn ("incorrect insn:",insn
);
1877 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
1878 return *l
= 3, (AS2 (adiw
,r28
,%o1
-63) CR_TAB
1879 AS2 (ldd
,%0,Y
+63) CR_TAB
1880 AS2 (sbiw
,r28
,%o1
-63));
1882 return *l
= 5, (AS2 (subi
,r28
,lo8(-%o1
)) CR_TAB
1883 AS2 (sbci
,r29
,hi8(-%o1
)) CR_TAB
1884 AS2 (ld
,%0,Y
) CR_TAB
1885 AS2 (subi
,r28
,lo8(%o1
)) CR_TAB
1886 AS2 (sbci
,r29
,hi8(%o1
)));
1888 else if (REGNO (XEXP (x
,0)) == REG_X
)
1890 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1891 it but I have this situation with extremal optimizing options. */
1892 if (reg_overlap_mentioned_p (dest
, XEXP (x
,0))
1893 || reg_unused_after (insn
, XEXP (x
,0)))
1894 return *l
= 2, (AS2 (adiw
,r26
,%o1
) CR_TAB
1897 return *l
= 3, (AS2 (adiw
,r26
,%o1
) CR_TAB
1898 AS2 (ld
,%0,X
) CR_TAB
1899 AS2 (sbiw
,r26
,%o1
));
1902 return AS2 (ldd
,%0,%1);
1905 return AS2 (ld
,%0,%1);
1909 out_movhi_r_mr (rtx insn
, rtx op
[], int *l
)
1913 rtx base
= XEXP (src
, 0);
1914 int reg_dest
= true_regnum (dest
);
1915 int reg_base
= true_regnum (base
);
1916 /* "volatile" forces reading low byte first, even if less efficient,
1917 for correct operation with 16-bit I/O registers. */
1918 int mem_volatile_p
= MEM_VOLATILE_P (src
);
1926 if (reg_dest
== reg_base
) /* R = (R) */
1929 return (AS2 (ld
,__tmp_reg__
,%1+) CR_TAB
1930 AS2 (ld
,%B0
,%1) CR_TAB
1931 AS2 (mov
,%A0
,__tmp_reg__
));
1933 else if (reg_base
== REG_X
) /* (R26) */
1935 if (reg_unused_after (insn
, base
))
1938 return (AS2 (ld
,%A0
,X
+) CR_TAB
1942 return (AS2 (ld
,%A0
,X
+) CR_TAB
1943 AS2 (ld
,%B0
,X
) CR_TAB
1949 return (AS2 (ld
,%A0
,%1) CR_TAB
1950 AS2 (ldd
,%B0
,%1+1));
1953 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
1955 int disp
= INTVAL (XEXP (base
, 1));
1956 int reg_base
= true_regnum (XEXP (base
, 0));
1958 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
1960 if (REGNO (XEXP (base
, 0)) != REG_Y
)
1961 fatal_insn ("incorrect insn:",insn
);
1963 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
1964 return *l
= 4, (AS2 (adiw
,r28
,%o1
-62) CR_TAB
1965 AS2 (ldd
,%A0
,Y
+62) CR_TAB
1966 AS2 (ldd
,%B0
,Y
+63) CR_TAB
1967 AS2 (sbiw
,r28
,%o1
-62));
1969 return *l
= 6, (AS2 (subi
,r28
,lo8(-%o1
)) CR_TAB
1970 AS2 (sbci
,r29
,hi8(-%o1
)) CR_TAB
1971 AS2 (ld
,%A0
,Y
) CR_TAB
1972 AS2 (ldd
,%B0
,Y
+1) CR_TAB
1973 AS2 (subi
,r28
,lo8(%o1
)) CR_TAB
1974 AS2 (sbci
,r29
,hi8(%o1
)));
1976 if (reg_base
== REG_X
)
1978 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
1979 it but I have this situation with extremal
1980 optimization options. */
1983 if (reg_base
== reg_dest
)
1984 return (AS2 (adiw
,r26
,%o1
) CR_TAB
1985 AS2 (ld
,__tmp_reg__
,X
+) CR_TAB
1986 AS2 (ld
,%B0
,X
) CR_TAB
1987 AS2 (mov
,%A0
,__tmp_reg__
));
1989 return (AS2 (adiw
,r26
,%o1
) CR_TAB
1990 AS2 (ld
,%A0
,X
+) CR_TAB
1991 AS2 (ld
,%B0
,X
) CR_TAB
1992 AS2 (sbiw
,r26
,%o1
+1));
1995 if (reg_base
== reg_dest
)
1998 return (AS2 (ldd
,__tmp_reg__
,%A1
) CR_TAB
1999 AS2 (ldd
,%B0
,%B1
) CR_TAB
2000 AS2 (mov
,%A0
,__tmp_reg__
));
2004 return (AS2 (ldd
,%A0
,%A1
) CR_TAB
2007 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2009 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
2010 fatal_insn ("incorrect insn:", insn
);
2014 if (REGNO (XEXP (base
, 0)) == REG_X
)
2017 return (AS2 (sbiw
,r26
,2) CR_TAB
2018 AS2 (ld
,%A0
,X
+) CR_TAB
2019 AS2 (ld
,%B0
,X
) CR_TAB
2025 return (AS2 (sbiw
,%r1
,2) CR_TAB
2026 AS2 (ld
,%A0
,%p1
) CR_TAB
2027 AS2 (ldd
,%B0
,%p1
+1));
2032 return (AS2 (ld
,%B0
,%1) CR_TAB
2035 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2037 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
2038 fatal_insn ("incorrect insn:", insn
);
2041 return (AS2 (ld
,%A0
,%1) CR_TAB
2044 else if (CONSTANT_ADDRESS_P (base
))
2046 if (optimize
> 0 && io_address_operand (base
, HImode
))
2049 return (AS2 (in
,%A0
,%m1
-0x20) CR_TAB
2050 AS2 (in
,%B0
,%m1
+1-0x20));
2053 return (AS2 (lds
,%A0
,%m1
) CR_TAB
2054 AS2 (lds
,%B0
,%m1
+1));
2057 fatal_insn ("unknown move insn:",insn
);
2062 out_movsi_r_mr (rtx insn
, rtx op
[], int *l
)
2066 rtx base
= XEXP (src
, 0);
2067 int reg_dest
= true_regnum (dest
);
2068 int reg_base
= true_regnum (base
);
2076 if (reg_base
== REG_X
) /* (R26) */
2078 if (reg_dest
== REG_X
)
2079 /* "ld r26,-X" is undefined */
2080 return *l
=7, (AS2 (adiw
,r26
,3) CR_TAB
2081 AS2 (ld
,r29
,X
) CR_TAB
2082 AS2 (ld
,r28
,-X
) CR_TAB
2083 AS2 (ld
,__tmp_reg__
,-X
) CR_TAB
2084 AS2 (sbiw
,r26
,1) CR_TAB
2085 AS2 (ld
,r26
,X
) CR_TAB
2086 AS2 (mov
,r27
,__tmp_reg__
));
2087 else if (reg_dest
== REG_X
- 2)
2088 return *l
=5, (AS2 (ld
,%A0
,X
+) CR_TAB
2089 AS2 (ld
,%B0
,X
+) CR_TAB
2090 AS2 (ld
,__tmp_reg__
,X
+) CR_TAB
2091 AS2 (ld
,%D0
,X
) CR_TAB
2092 AS2 (mov
,%C0
,__tmp_reg__
));
2093 else if (reg_unused_after (insn
, base
))
2094 return *l
=4, (AS2 (ld
,%A0
,X
+) CR_TAB
2095 AS2 (ld
,%B0
,X
+) CR_TAB
2096 AS2 (ld
,%C0
,X
+) CR_TAB
2099 return *l
=5, (AS2 (ld
,%A0
,X
+) CR_TAB
2100 AS2 (ld
,%B0
,X
+) CR_TAB
2101 AS2 (ld
,%C0
,X
+) CR_TAB
2102 AS2 (ld
,%D0
,X
) CR_TAB
2107 if (reg_dest
== reg_base
)
2108 return *l
=5, (AS2 (ldd
,%D0
,%1+3) CR_TAB
2109 AS2 (ldd
,%C0
,%1+2) CR_TAB
2110 AS2 (ldd
,__tmp_reg__
,%1+1) CR_TAB
2111 AS2 (ld
,%A0
,%1) CR_TAB
2112 AS2 (mov
,%B0
,__tmp_reg__
));
2113 else if (reg_base
== reg_dest
+ 2)
2114 return *l
=5, (AS2 (ld
,%A0
,%1) CR_TAB
2115 AS2 (ldd
,%B0
,%1+1) CR_TAB
2116 AS2 (ldd
,__tmp_reg__
,%1+2) CR_TAB
2117 AS2 (ldd
,%D0
,%1+3) CR_TAB
2118 AS2 (mov
,%C0
,__tmp_reg__
));
2120 return *l
=4, (AS2 (ld
,%A0
,%1) CR_TAB
2121 AS2 (ldd
,%B0
,%1+1) CR_TAB
2122 AS2 (ldd
,%C0
,%1+2) CR_TAB
2123 AS2 (ldd
,%D0
,%1+3));
2126 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
2128 int disp
= INTVAL (XEXP (base
, 1));
2130 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
2132 if (REGNO (XEXP (base
, 0)) != REG_Y
)
2133 fatal_insn ("incorrect insn:",insn
);
2135 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
2136 return *l
= 6, (AS2 (adiw
,r28
,%o1
-60) CR_TAB
2137 AS2 (ldd
,%A0
,Y
+60) CR_TAB
2138 AS2 (ldd
,%B0
,Y
+61) CR_TAB
2139 AS2 (ldd
,%C0
,Y
+62) CR_TAB
2140 AS2 (ldd
,%D0
,Y
+63) CR_TAB
2141 AS2 (sbiw
,r28
,%o1
-60));
2143 return *l
= 8, (AS2 (subi
,r28
,lo8(-%o1
)) CR_TAB
2144 AS2 (sbci
,r29
,hi8(-%o1
)) CR_TAB
2145 AS2 (ld
,%A0
,Y
) CR_TAB
2146 AS2 (ldd
,%B0
,Y
+1) CR_TAB
2147 AS2 (ldd
,%C0
,Y
+2) CR_TAB
2148 AS2 (ldd
,%D0
,Y
+3) CR_TAB
2149 AS2 (subi
,r28
,lo8(%o1
)) CR_TAB
2150 AS2 (sbci
,r29
,hi8(%o1
)));
2153 reg_base
= true_regnum (XEXP (base
, 0));
2154 if (reg_base
== REG_X
)
2157 if (reg_dest
== REG_X
)
2160 /* "ld r26,-X" is undefined */
2161 return (AS2 (adiw
,r26
,%o1
+3) CR_TAB
2162 AS2 (ld
,r29
,X
) CR_TAB
2163 AS2 (ld
,r28
,-X
) CR_TAB
2164 AS2 (ld
,__tmp_reg__
,-X
) CR_TAB
2165 AS2 (sbiw
,r26
,1) CR_TAB
2166 AS2 (ld
,r26
,X
) CR_TAB
2167 AS2 (mov
,r27
,__tmp_reg__
));
2170 if (reg_dest
== REG_X
- 2)
2171 return (AS2 (adiw
,r26
,%o1
) CR_TAB
2172 AS2 (ld
,r24
,X
+) CR_TAB
2173 AS2 (ld
,r25
,X
+) CR_TAB
2174 AS2 (ld
,__tmp_reg__
,X
+) CR_TAB
2175 AS2 (ld
,r27
,X
) CR_TAB
2176 AS2 (mov
,r26
,__tmp_reg__
));
2178 return (AS2 (adiw
,r26
,%o1
) CR_TAB
2179 AS2 (ld
,%A0
,X
+) CR_TAB
2180 AS2 (ld
,%B0
,X
+) CR_TAB
2181 AS2 (ld
,%C0
,X
+) CR_TAB
2182 AS2 (ld
,%D0
,X
) CR_TAB
2183 AS2 (sbiw
,r26
,%o1
+3));
2185 if (reg_dest
== reg_base
)
2186 return *l
=5, (AS2 (ldd
,%D0
,%D1
) CR_TAB
2187 AS2 (ldd
,%C0
,%C1
) CR_TAB
2188 AS2 (ldd
,__tmp_reg__
,%B1
) CR_TAB
2189 AS2 (ldd
,%A0
,%A1
) CR_TAB
2190 AS2 (mov
,%B0
,__tmp_reg__
));
2191 else if (reg_dest
== reg_base
- 2)
2192 return *l
=5, (AS2 (ldd
,%A0
,%A1
) CR_TAB
2193 AS2 (ldd
,%B0
,%B1
) CR_TAB
2194 AS2 (ldd
,__tmp_reg__
,%C1
) CR_TAB
2195 AS2 (ldd
,%D0
,%D1
) CR_TAB
2196 AS2 (mov
,%C0
,__tmp_reg__
));
2197 return *l
=4, (AS2 (ldd
,%A0
,%A1
) CR_TAB
2198 AS2 (ldd
,%B0
,%B1
) CR_TAB
2199 AS2 (ldd
,%C0
,%C1
) CR_TAB
2202 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2203 return *l
=4, (AS2 (ld
,%D0
,%1) CR_TAB
2204 AS2 (ld
,%C0
,%1) CR_TAB
2205 AS2 (ld
,%B0
,%1) CR_TAB
2207 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2208 return *l
=4, (AS2 (ld
,%A0
,%1) CR_TAB
2209 AS2 (ld
,%B0
,%1) CR_TAB
2210 AS2 (ld
,%C0
,%1) CR_TAB
2212 else if (CONSTANT_ADDRESS_P (base
))
2213 return *l
=8, (AS2 (lds
,%A0
,%m1
) CR_TAB
2214 AS2 (lds
,%B0
,%m1
+1) CR_TAB
2215 AS2 (lds
,%C0
,%m1
+2) CR_TAB
2216 AS2 (lds
,%D0
,%m1
+3));
2218 fatal_insn ("unknown move insn:",insn
);
2223 out_movsi_mr_r (rtx insn
, rtx op
[], int *l
)
2227 rtx base
= XEXP (dest
, 0);
2228 int reg_base
= true_regnum (base
);
2229 int reg_src
= true_regnum (src
);
2235 if (CONSTANT_ADDRESS_P (base
))
2236 return *l
=8,(AS2 (sts
,%m0
,%A1
) CR_TAB
2237 AS2 (sts
,%m0
+1,%B1
) CR_TAB
2238 AS2 (sts
,%m0
+2,%C1
) CR_TAB
2239 AS2 (sts
,%m0
+3,%D1
));
2240 if (reg_base
> 0) /* (r) */
2242 if (reg_base
== REG_X
) /* (R26) */
2244 if (reg_src
== REG_X
)
2246 /* "st X+,r26" is undefined */
2247 if (reg_unused_after (insn
, base
))
2248 return *l
=6, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2249 AS2 (st
,X
,r26
) CR_TAB
2250 AS2 (adiw
,r26
,1) CR_TAB
2251 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2252 AS2 (st
,X
+,r28
) CR_TAB
2255 return *l
=7, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2256 AS2 (st
,X
,r26
) CR_TAB
2257 AS2 (adiw
,r26
,1) CR_TAB
2258 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2259 AS2 (st
,X
+,r28
) CR_TAB
2260 AS2 (st
,X
,r29
) CR_TAB
2263 else if (reg_base
== reg_src
+ 2)
2265 if (reg_unused_after (insn
, base
))
2266 return *l
=7, (AS2 (mov
,__zero_reg__
,%C1
) CR_TAB
2267 AS2 (mov
,__tmp_reg__
,%D1
) CR_TAB
2268 AS2 (st
,%0+,%A1
) CR_TAB
2269 AS2 (st
,%0+,%B1
) CR_TAB
2270 AS2 (st
,%0+,__zero_reg__
) CR_TAB
2271 AS2 (st
,%0,__tmp_reg__
) CR_TAB
2272 AS1 (clr
,__zero_reg__
));
2274 return *l
=8, (AS2 (mov
,__zero_reg__
,%C1
) CR_TAB
2275 AS2 (mov
,__tmp_reg__
,%D1
) CR_TAB
2276 AS2 (st
,%0+,%A1
) CR_TAB
2277 AS2 (st
,%0+,%B1
) CR_TAB
2278 AS2 (st
,%0+,__zero_reg__
) CR_TAB
2279 AS2 (st
,%0,__tmp_reg__
) CR_TAB
2280 AS1 (clr
,__zero_reg__
) CR_TAB
2283 return *l
=5, (AS2 (st
,%0+,%A1
) CR_TAB
2284 AS2 (st
,%0+,%B1
) CR_TAB
2285 AS2 (st
,%0+,%C1
) CR_TAB
2286 AS2 (st
,%0,%D1
) CR_TAB
2290 return *l
=4, (AS2 (st
,%0,%A1
) CR_TAB
2291 AS2 (std
,%0+1,%B1
) CR_TAB
2292 AS2 (std
,%0+2,%C1
) CR_TAB
2293 AS2 (std
,%0+3,%D1
));
2295 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
2297 int disp
= INTVAL (XEXP (base
, 1));
2298 reg_base
= REGNO (XEXP (base
, 0));
2299 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
2301 if (reg_base
!= REG_Y
)
2302 fatal_insn ("incorrect insn:",insn
);
2304 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
2305 return *l
= 6, (AS2 (adiw
,r28
,%o0
-60) CR_TAB
2306 AS2 (std
,Y
+60,%A1
) CR_TAB
2307 AS2 (std
,Y
+61,%B1
) CR_TAB
2308 AS2 (std
,Y
+62,%C1
) CR_TAB
2309 AS2 (std
,Y
+63,%D1
) CR_TAB
2310 AS2 (sbiw
,r28
,%o0
-60));
2312 return *l
= 8, (AS2 (subi
,r28
,lo8(-%o0
)) CR_TAB
2313 AS2 (sbci
,r29
,hi8(-%o0
)) CR_TAB
2314 AS2 (st
,Y
,%A1
) CR_TAB
2315 AS2 (std
,Y
+1,%B1
) CR_TAB
2316 AS2 (std
,Y
+2,%C1
) CR_TAB
2317 AS2 (std
,Y
+3,%D1
) CR_TAB
2318 AS2 (subi
,r28
,lo8(%o0
)) CR_TAB
2319 AS2 (sbci
,r29
,hi8(%o0
)));
2321 if (reg_base
== REG_X
)
2324 if (reg_src
== REG_X
)
2327 return (AS2 (mov
,__tmp_reg__
,r26
) CR_TAB
2328 AS2 (mov
,__zero_reg__
,r27
) CR_TAB
2329 AS2 (adiw
,r26
,%o0
) CR_TAB
2330 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2331 AS2 (st
,X
+,__zero_reg__
) CR_TAB
2332 AS2 (st
,X
+,r28
) CR_TAB
2333 AS2 (st
,X
,r29
) CR_TAB
2334 AS1 (clr
,__zero_reg__
) CR_TAB
2335 AS2 (sbiw
,r26
,%o0
+3));
2337 else if (reg_src
== REG_X
- 2)
2340 return (AS2 (mov
,__tmp_reg__
,r26
) CR_TAB
2341 AS2 (mov
,__zero_reg__
,r27
) CR_TAB
2342 AS2 (adiw
,r26
,%o0
) CR_TAB
2343 AS2 (st
,X
+,r24
) CR_TAB
2344 AS2 (st
,X
+,r25
) CR_TAB
2345 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2346 AS2 (st
,X
,__zero_reg__
) CR_TAB
2347 AS1 (clr
,__zero_reg__
) CR_TAB
2348 AS2 (sbiw
,r26
,%o0
+3));
2351 return (AS2 (adiw
,r26
,%o0
) CR_TAB
2352 AS2 (st
,X
+,%A1
) CR_TAB
2353 AS2 (st
,X
+,%B1
) CR_TAB
2354 AS2 (st
,X
+,%C1
) CR_TAB
2355 AS2 (st
,X
,%D1
) CR_TAB
2356 AS2 (sbiw
,r26
,%o0
+3));
2358 return *l
=4, (AS2 (std
,%A0
,%A1
) CR_TAB
2359 AS2 (std
,%B0
,%B1
) CR_TAB
2360 AS2 (std
,%C0
,%C1
) CR_TAB
2363 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2364 return *l
=4, (AS2 (st
,%0,%D1
) CR_TAB
2365 AS2 (st
,%0,%C1
) CR_TAB
2366 AS2 (st
,%0,%B1
) CR_TAB
2368 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2369 return *l
=4, (AS2 (st
,%0,%A1
) CR_TAB
2370 AS2 (st
,%0,%B1
) CR_TAB
2371 AS2 (st
,%0,%C1
) CR_TAB
2373 fatal_insn ("unknown move insn:",insn
);
2378 output_movsisf(rtx insn
, rtx operands
[], int *l
)
2381 rtx dest
= operands
[0];
2382 rtx src
= operands
[1];
2388 if (register_operand (dest
, VOIDmode
))
2390 if (register_operand (src
, VOIDmode
)) /* mov r,r */
2392 if (true_regnum (dest
) > true_regnum (src
))
2397 return (AS2 (movw
,%C0
,%C1
) CR_TAB
2398 AS2 (movw
,%A0
,%A1
));
2401 return (AS2 (mov
,%D0
,%D1
) CR_TAB
2402 AS2 (mov
,%C0
,%C1
) CR_TAB
2403 AS2 (mov
,%B0
,%B1
) CR_TAB
2411 return (AS2 (movw
,%A0
,%A1
) CR_TAB
2412 AS2 (movw
,%C0
,%C1
));
2415 return (AS2 (mov
,%A0
,%A1
) CR_TAB
2416 AS2 (mov
,%B0
,%B1
) CR_TAB
2417 AS2 (mov
,%C0
,%C1
) CR_TAB
2421 else if (CONSTANT_P (src
))
2423 if (test_hard_reg_class (LD_REGS
, dest
)) /* ldi d,i */
2426 return (AS2 (ldi
,%A0
,lo8(%1)) CR_TAB
2427 AS2 (ldi
,%B0
,hi8(%1)) CR_TAB
2428 AS2 (ldi
,%C0
,hlo8(%1)) CR_TAB
2429 AS2 (ldi
,%D0
,hhi8(%1)));
2432 if (GET_CODE (src
) == CONST_INT
)
2434 const char *const clr_op0
=
2435 AVR_HAVE_MOVW
? (AS1 (clr
,%A0
) CR_TAB
2436 AS1 (clr
,%B0
) CR_TAB
2438 : (AS1 (clr
,%A0
) CR_TAB
2439 AS1 (clr
,%B0
) CR_TAB
2440 AS1 (clr
,%C0
) CR_TAB
2443 if (src
== const0_rtx
) /* mov r,L */
2445 *l
= AVR_HAVE_MOVW
? 3 : 4;
2448 else if (src
== const1_rtx
)
2451 output_asm_insn (clr_op0
, operands
);
2452 *l
= AVR_HAVE_MOVW
? 4 : 5;
2453 return AS1 (inc
,%A0
);
2455 else if (src
== constm1_rtx
)
2457 /* Immediate constants -1 to any register */
2461 return (AS1 (clr
,%A0
) CR_TAB
2462 AS1 (dec
,%A0
) CR_TAB
2463 AS2 (mov
,%B0
,%A0
) CR_TAB
2464 AS2 (movw
,%C0
,%A0
));
2467 return (AS1 (clr
,%A0
) CR_TAB
2468 AS1 (dec
,%A0
) CR_TAB
2469 AS2 (mov
,%B0
,%A0
) CR_TAB
2470 AS2 (mov
,%C0
,%A0
) CR_TAB
2475 int bit_nr
= exact_log2 (INTVAL (src
));
2479 *l
= AVR_HAVE_MOVW
? 5 : 6;
2482 output_asm_insn (clr_op0
, operands
);
2483 output_asm_insn ("set", operands
);
2486 avr_output_bld (operands
, bit_nr
);
2493 /* Last resort, better than loading from memory. */
2495 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
2496 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
2497 AS2 (mov
,%A0
,r31
) CR_TAB
2498 AS2 (ldi
,r31
,hi8(%1)) CR_TAB
2499 AS2 (mov
,%B0
,r31
) CR_TAB
2500 AS2 (ldi
,r31
,hlo8(%1)) CR_TAB
2501 AS2 (mov
,%C0
,r31
) CR_TAB
2502 AS2 (ldi
,r31
,hhi8(%1)) CR_TAB
2503 AS2 (mov
,%D0
,r31
) CR_TAB
2504 AS2 (mov
,r31
,__tmp_reg__
));
2506 else if (GET_CODE (src
) == MEM
)
2507 return out_movsi_r_mr (insn
, operands
, real_l
); /* mov r,m */
2509 else if (GET_CODE (dest
) == MEM
)
2513 if (src
== const0_rtx
)
2514 operands
[1] = zero_reg_rtx
;
2516 templ
= out_movsi_mr_r (insn
, operands
, real_l
);
2519 output_asm_insn (templ
, operands
);
2524 fatal_insn ("invalid insn:", insn
);
2529 out_movqi_mr_r (rtx insn
, rtx op
[], int *l
)
2533 rtx x
= XEXP (dest
, 0);
2539 if (CONSTANT_ADDRESS_P (x
))
2541 if (CONST_INT_P (x
) && INTVAL (x
) == SREG_ADDR
)
2544 return AS2 (out
,__SREG__
,%1);
2546 if (optimize
> 0 && io_address_operand (x
, QImode
))
2549 return AS2 (out
,%m0
-0x20,%1);
2552 return AS2 (sts
,%m0
,%1);
2554 /* memory access by reg+disp */
2555 else if (GET_CODE (x
) == PLUS
2556 && REG_P (XEXP (x
,0))
2557 && GET_CODE (XEXP (x
,1)) == CONST_INT
)
2559 if ((INTVAL (XEXP (x
,1)) - GET_MODE_SIZE (GET_MODE (dest
))) >= 63)
2561 int disp
= INTVAL (XEXP (x
,1));
2562 if (REGNO (XEXP (x
,0)) != REG_Y
)
2563 fatal_insn ("incorrect insn:",insn
);
2565 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
2566 return *l
= 3, (AS2 (adiw
,r28
,%o0
-63) CR_TAB
2567 AS2 (std
,Y
+63,%1) CR_TAB
2568 AS2 (sbiw
,r28
,%o0
-63));
2570 return *l
= 5, (AS2 (subi
,r28
,lo8(-%o0
)) CR_TAB
2571 AS2 (sbci
,r29
,hi8(-%o0
)) CR_TAB
2572 AS2 (st
,Y
,%1) CR_TAB
2573 AS2 (subi
,r28
,lo8(%o0
)) CR_TAB
2574 AS2 (sbci
,r29
,hi8(%o0
)));
2576 else if (REGNO (XEXP (x
,0)) == REG_X
)
2578 if (reg_overlap_mentioned_p (src
, XEXP (x
, 0)))
2580 if (reg_unused_after (insn
, XEXP (x
,0)))
2581 return *l
= 3, (AS2 (mov
,__tmp_reg__
,%1) CR_TAB
2582 AS2 (adiw
,r26
,%o0
) CR_TAB
2583 AS2 (st
,X
,__tmp_reg__
));
2585 return *l
= 4, (AS2 (mov
,__tmp_reg__
,%1) CR_TAB
2586 AS2 (adiw
,r26
,%o0
) CR_TAB
2587 AS2 (st
,X
,__tmp_reg__
) CR_TAB
2588 AS2 (sbiw
,r26
,%o0
));
2592 if (reg_unused_after (insn
, XEXP (x
,0)))
2593 return *l
= 2, (AS2 (adiw
,r26
,%o0
) CR_TAB
2596 return *l
= 3, (AS2 (adiw
,r26
,%o0
) CR_TAB
2597 AS2 (st
,X
,%1) CR_TAB
2598 AS2 (sbiw
,r26
,%o0
));
2602 return AS2 (std
,%0,%1);
2605 return AS2 (st
,%0,%1);
2609 out_movhi_mr_r (rtx insn
, rtx op
[], int *l
)
2613 rtx base
= XEXP (dest
, 0);
2614 int reg_base
= true_regnum (base
);
2615 int reg_src
= true_regnum (src
);
2616 /* "volatile" forces writing high byte first, even if less efficient,
2617 for correct operation with 16-bit I/O registers. */
2618 int mem_volatile_p
= MEM_VOLATILE_P (dest
);
2623 if (CONSTANT_ADDRESS_P (base
))
2625 if (optimize
> 0 && io_address_operand (base
, HImode
))
2628 return (AS2 (out
,%m0
+1-0x20,%B1
) CR_TAB
2629 AS2 (out
,%m0
-0x20,%A1
));
2631 return *l
= 4, (AS2 (sts
,%m0
+1,%B1
) CR_TAB
2636 if (reg_base
== REG_X
)
2638 if (reg_src
== REG_X
)
2640 /* "st X+,r26" and "st -X,r26" are undefined. */
2641 if (!mem_volatile_p
&& reg_unused_after (insn
, src
))
2642 return *l
=4, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2643 AS2 (st
,X
,r26
) CR_TAB
2644 AS2 (adiw
,r26
,1) CR_TAB
2645 AS2 (st
,X
,__tmp_reg__
));
2647 return *l
=5, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2648 AS2 (adiw
,r26
,1) CR_TAB
2649 AS2 (st
,X
,__tmp_reg__
) CR_TAB
2650 AS2 (sbiw
,r26
,1) CR_TAB
2655 if (!mem_volatile_p
&& reg_unused_after (insn
, base
))
2656 return *l
=2, (AS2 (st
,X
+,%A1
) CR_TAB
2659 return *l
=3, (AS2 (adiw
,r26
,1) CR_TAB
2660 AS2 (st
,X
,%B1
) CR_TAB
2665 return *l
=2, (AS2 (std
,%0+1,%B1
) CR_TAB
2668 else if (GET_CODE (base
) == PLUS
)
2670 int disp
= INTVAL (XEXP (base
, 1));
2671 reg_base
= REGNO (XEXP (base
, 0));
2672 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
2674 if (reg_base
!= REG_Y
)
2675 fatal_insn ("incorrect insn:",insn
);
2677 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
2678 return *l
= 4, (AS2 (adiw
,r28
,%o0
-62) CR_TAB
2679 AS2 (std
,Y
+63,%B1
) CR_TAB
2680 AS2 (std
,Y
+62,%A1
) CR_TAB
2681 AS2 (sbiw
,r28
,%o0
-62));
2683 return *l
= 6, (AS2 (subi
,r28
,lo8(-%o0
)) CR_TAB
2684 AS2 (sbci
,r29
,hi8(-%o0
)) CR_TAB
2685 AS2 (std
,Y
+1,%B1
) CR_TAB
2686 AS2 (st
,Y
,%A1
) CR_TAB
2687 AS2 (subi
,r28
,lo8(%o0
)) CR_TAB
2688 AS2 (sbci
,r29
,hi8(%o0
)));
2690 if (reg_base
== REG_X
)
2693 if (reg_src
== REG_X
)
2696 return (AS2 (mov
,__tmp_reg__
,r26
) CR_TAB
2697 AS2 (mov
,__zero_reg__
,r27
) CR_TAB
2698 AS2 (adiw
,r26
,%o0
+1) CR_TAB
2699 AS2 (st
,X
,__zero_reg__
) CR_TAB
2700 AS2 (st
,-X
,__tmp_reg__
) CR_TAB
2701 AS1 (clr
,__zero_reg__
) CR_TAB
2702 AS2 (sbiw
,r26
,%o0
));
2705 return (AS2 (adiw
,r26
,%o0
+1) CR_TAB
2706 AS2 (st
,X
,%B1
) CR_TAB
2707 AS2 (st
,-X
,%A1
) CR_TAB
2708 AS2 (sbiw
,r26
,%o0
));
2710 return *l
=2, (AS2 (std
,%B0
,%B1
) CR_TAB
2713 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2714 return *l
=2, (AS2 (st
,%0,%B1
) CR_TAB
2716 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2720 if (REGNO (XEXP (base
, 0)) == REG_X
)
2723 return (AS2 (adiw
,r26
,1) CR_TAB
2724 AS2 (st
,X
,%B1
) CR_TAB
2725 AS2 (st
,-X
,%A1
) CR_TAB
2731 return (AS2 (std
,%p0
+1,%B1
) CR_TAB
2732 AS2 (st
,%p0
,%A1
) CR_TAB
2738 return (AS2 (st
,%0,%A1
) CR_TAB
2741 fatal_insn ("unknown move insn:",insn
);
2745 /* Return 1 if frame pointer for current function required. */
2748 avr_frame_pointer_required_p (void)
2750 return (cfun
->calls_alloca
2751 || crtl
->args
.info
.nregs
== 0
2752 || get_frame_size () > 0);
2755 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2758 compare_condition (rtx insn
)
2760 rtx next
= next_real_insn (insn
);
2761 RTX_CODE cond
= UNKNOWN
;
2762 if (next
&& GET_CODE (next
) == JUMP_INSN
)
2764 rtx pat
= PATTERN (next
);
2765 rtx src
= SET_SRC (pat
);
2766 rtx t
= XEXP (src
, 0);
2767 cond
= GET_CODE (t
);
2772 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2775 compare_sign_p (rtx insn
)
2777 RTX_CODE cond
= compare_condition (insn
);
2778 return (cond
== GE
|| cond
== LT
);
2781 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2782 that needs to be swapped (GT, GTU, LE, LEU). */
2785 compare_diff_p (rtx insn
)
2787 RTX_CODE cond
= compare_condition (insn
);
2788 return (cond
== GT
|| cond
== GTU
|| cond
== LE
|| cond
== LEU
) ? cond
: 0;
2791 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2794 compare_eq_p (rtx insn
)
2796 RTX_CODE cond
= compare_condition (insn
);
2797 return (cond
== EQ
|| cond
== NE
);
2801 /* Output test instruction for HImode. */
2804 out_tsthi (rtx insn
, rtx op
, int *l
)
2806 if (compare_sign_p (insn
))
2809 return AS1 (tst
,%B0
);
2811 if (reg_unused_after (insn
, op
)
2812 && compare_eq_p (insn
))
2814 /* Faster than sbiw if we can clobber the operand. */
2816 return "or %A0,%B0";
2818 if (test_hard_reg_class (ADDW_REGS
, op
))
2821 return AS2 (sbiw
,%0,0);
2824 return (AS2 (cp
,%A0
,__zero_reg__
) CR_TAB
2825 AS2 (cpc
,%B0
,__zero_reg__
));
2829 /* Output test instruction for SImode. */
2832 out_tstsi (rtx insn
, rtx op
, int *l
)
2834 if (compare_sign_p (insn
))
2837 return AS1 (tst
,%D0
);
2839 if (test_hard_reg_class (ADDW_REGS
, op
))
2842 return (AS2 (sbiw
,%A0
,0) CR_TAB
2843 AS2 (cpc
,%C0
,__zero_reg__
) CR_TAB
2844 AS2 (cpc
,%D0
,__zero_reg__
));
2847 return (AS2 (cp
,%A0
,__zero_reg__
) CR_TAB
2848 AS2 (cpc
,%B0
,__zero_reg__
) CR_TAB
2849 AS2 (cpc
,%C0
,__zero_reg__
) CR_TAB
2850 AS2 (cpc
,%D0
,__zero_reg__
));
2854 /* Generate asm equivalent for various shifts.
2855 Shift count is a CONST_INT, MEM or REG.
2856 This only handles cases that are not already
2857 carefully hand-optimized in ?sh??i3_out. */
2860 out_shift_with_cnt (const char *templ
, rtx insn
, rtx operands
[],
2861 int *len
, int t_len
)
2865 int second_label
= 1;
2866 int saved_in_tmp
= 0;
2867 int use_zero_reg
= 0;
2869 op
[0] = operands
[0];
2870 op
[1] = operands
[1];
2871 op
[2] = operands
[2];
2872 op
[3] = operands
[3];
2878 if (GET_CODE (operands
[2]) == CONST_INT
)
2880 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
2881 int count
= INTVAL (operands
[2]);
2882 int max_len
= 10; /* If larger than this, always use a loop. */
2891 if (count
< 8 && !scratch
)
2895 max_len
= t_len
+ (scratch
? 3 : (use_zero_reg
? 4 : 5));
2897 if (t_len
* count
<= max_len
)
2899 /* Output shifts inline with no loop - faster. */
2901 *len
= t_len
* count
;
2905 output_asm_insn (templ
, op
);
2914 strcat (str
, AS2 (ldi
,%3,%2));
2916 else if (use_zero_reg
)
2918 /* Hack to save one word: use __zero_reg__ as loop counter.
2919 Set one bit, then shift in a loop until it is 0 again. */
2921 op
[3] = zero_reg_rtx
;
2925 strcat (str
, ("set" CR_TAB
2926 AS2 (bld
,%3,%2-1)));
2930 /* No scratch register available, use one from LD_REGS (saved in
2931 __tmp_reg__) that doesn't overlap with registers to shift. */
2933 op
[3] = gen_rtx_REG (QImode
,
2934 ((true_regnum (operands
[0]) - 1) & 15) + 16);
2935 op
[4] = tmp_reg_rtx
;
2939 *len
= 3; /* Includes "mov %3,%4" after the loop. */
2941 strcat (str
, (AS2 (mov
,%4,%3) CR_TAB
2947 else if (GET_CODE (operands
[2]) == MEM
)
2951 op
[3] = op_mov
[0] = tmp_reg_rtx
;
2955 out_movqi_r_mr (insn
, op_mov
, len
);
2957 output_asm_insn (out_movqi_r_mr (insn
, op_mov
, NULL
), op_mov
);
2959 else if (register_operand (operands
[2], QImode
))
2961 if (reg_unused_after (insn
, operands
[2]))
2965 op
[3] = tmp_reg_rtx
;
2967 strcat (str
, (AS2 (mov
,%3,%2) CR_TAB
));
2971 fatal_insn ("bad shift insn:", insn
);
2978 strcat (str
, AS1 (rjmp
,2f
));
2982 *len
+= t_len
+ 2; /* template + dec + brXX */
2985 strcat (str
, "\n1:\t");
2986 strcat (str
, templ
);
2987 strcat (str
, second_label
? "\n2:\t" : "\n\t");
2988 strcat (str
, use_zero_reg
? AS1 (lsr
,%3) : AS1 (dec
,%3));
2989 strcat (str
, CR_TAB
);
2990 strcat (str
, second_label
? AS1 (brpl
,1b
) : AS1 (brne
,1b
));
2992 strcat (str
, (CR_TAB
AS2 (mov
,%3,%4)));
2993 output_asm_insn (str
, op
);
2998 /* 8bit shift left ((char)x << i) */
3001 ashlqi3_out (rtx insn
, rtx operands
[], int *len
)
3003 if (GET_CODE (operands
[2]) == CONST_INT
)
3010 switch (INTVAL (operands
[2]))
3013 if (INTVAL (operands
[2]) < 8)
3017 return AS1 (clr
,%0);
3021 return AS1 (lsl
,%0);
3025 return (AS1 (lsl
,%0) CR_TAB
3030 return (AS1 (lsl
,%0) CR_TAB
3035 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3038 return (AS1 (swap
,%0) CR_TAB
3039 AS2 (andi
,%0,0xf0));
3042 return (AS1 (lsl
,%0) CR_TAB
3048 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3051 return (AS1 (swap
,%0) CR_TAB
3053 AS2 (andi
,%0,0xe0));
3056 return (AS1 (lsl
,%0) CR_TAB
3063 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3066 return (AS1 (swap
,%0) CR_TAB
3069 AS2 (andi
,%0,0xc0));
3072 return (AS1 (lsl
,%0) CR_TAB
3081 return (AS1 (ror
,%0) CR_TAB
3086 else if (CONSTANT_P (operands
[2]))
3087 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
3089 out_shift_with_cnt (AS1 (lsl
,%0),
3090 insn
, operands
, len
, 1);
3095 /* 16bit shift left ((short)x << i) */
3098 ashlhi3_out (rtx insn
, rtx operands
[], int *len
)
3100 if (GET_CODE (operands
[2]) == CONST_INT
)
3102 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
3103 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
3110 switch (INTVAL (operands
[2]))
3113 if (INTVAL (operands
[2]) < 16)
3117 return (AS1 (clr
,%B0
) CR_TAB
3121 if (optimize_size
&& scratch
)
3126 return (AS1 (swap
,%A0
) CR_TAB
3127 AS1 (swap
,%B0
) CR_TAB
3128 AS2 (andi
,%B0
,0xf0) CR_TAB
3129 AS2 (eor
,%B0
,%A0
) CR_TAB
3130 AS2 (andi
,%A0
,0xf0) CR_TAB
3136 return (AS1 (swap
,%A0
) CR_TAB
3137 AS1 (swap
,%B0
) CR_TAB
3138 AS2 (ldi
,%3,0xf0) CR_TAB
3140 AS2 (eor
,%B0
,%A0
) CR_TAB
3144 break; /* optimize_size ? 6 : 8 */
3148 break; /* scratch ? 5 : 6 */
3152 return (AS1 (lsl
,%A0
) CR_TAB
3153 AS1 (rol
,%B0
) CR_TAB
3154 AS1 (swap
,%A0
) CR_TAB
3155 AS1 (swap
,%B0
) CR_TAB
3156 AS2 (andi
,%B0
,0xf0) CR_TAB
3157 AS2 (eor
,%B0
,%A0
) CR_TAB
3158 AS2 (andi
,%A0
,0xf0) CR_TAB
3164 return (AS1 (lsl
,%A0
) CR_TAB
3165 AS1 (rol
,%B0
) CR_TAB
3166 AS1 (swap
,%A0
) CR_TAB
3167 AS1 (swap
,%B0
) CR_TAB
3168 AS2 (ldi
,%3,0xf0) CR_TAB
3170 AS2 (eor
,%B0
,%A0
) CR_TAB
3178 break; /* scratch ? 5 : 6 */
3180 return (AS1 (clr
,__tmp_reg__
) CR_TAB
3181 AS1 (lsr
,%B0
) CR_TAB
3182 AS1 (ror
,%A0
) CR_TAB
3183 AS1 (ror
,__tmp_reg__
) CR_TAB
3184 AS1 (lsr
,%B0
) CR_TAB
3185 AS1 (ror
,%A0
) CR_TAB
3186 AS1 (ror
,__tmp_reg__
) CR_TAB
3187 AS2 (mov
,%B0
,%A0
) CR_TAB
3188 AS2 (mov
,%A0
,__tmp_reg__
));
3192 return (AS1 (lsr
,%B0
) CR_TAB
3193 AS2 (mov
,%B0
,%A0
) CR_TAB
3194 AS1 (clr
,%A0
) CR_TAB
3195 AS1 (ror
,%B0
) CR_TAB
3199 return *len
= 2, (AS2 (mov
,%B0
,%A1
) CR_TAB
3204 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3205 AS1 (clr
,%A0
) CR_TAB
3210 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3211 AS1 (clr
,%A0
) CR_TAB
3212 AS1 (lsl
,%B0
) CR_TAB
3217 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3218 AS1 (clr
,%A0
) CR_TAB
3219 AS1 (lsl
,%B0
) CR_TAB
3220 AS1 (lsl
,%B0
) CR_TAB
3227 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3228 AS1 (clr
,%A0
) CR_TAB
3229 AS1 (swap
,%B0
) CR_TAB
3230 AS2 (andi
,%B0
,0xf0));
3235 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3236 AS1 (clr
,%A0
) CR_TAB
3237 AS1 (swap
,%B0
) CR_TAB
3238 AS2 (ldi
,%3,0xf0) CR_TAB
3242 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3243 AS1 (clr
,%A0
) CR_TAB
3244 AS1 (lsl
,%B0
) CR_TAB
3245 AS1 (lsl
,%B0
) CR_TAB
3246 AS1 (lsl
,%B0
) CR_TAB
3253 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3254 AS1 (clr
,%A0
) CR_TAB
3255 AS1 (swap
,%B0
) CR_TAB
3256 AS1 (lsl
,%B0
) CR_TAB
3257 AS2 (andi
,%B0
,0xe0));
3259 if (AVR_HAVE_MUL
&& scratch
)
3262 return (AS2 (ldi
,%3,0x20) CR_TAB
3263 AS2 (mul
,%A0
,%3) CR_TAB
3264 AS2 (mov
,%B0
,r0
) CR_TAB
3265 AS1 (clr
,%A0
) CR_TAB
3266 AS1 (clr
,__zero_reg__
));
3268 if (optimize_size
&& scratch
)
3273 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3274 AS1 (clr
,%A0
) CR_TAB
3275 AS1 (swap
,%B0
) CR_TAB
3276 AS1 (lsl
,%B0
) CR_TAB
3277 AS2 (ldi
,%3,0xe0) CR_TAB
3283 return ("set" CR_TAB
3284 AS2 (bld
,r1
,5) CR_TAB
3285 AS2 (mul
,%A0
,r1
) CR_TAB
3286 AS2 (mov
,%B0
,r0
) CR_TAB
3287 AS1 (clr
,%A0
) CR_TAB
3288 AS1 (clr
,__zero_reg__
));
3291 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3292 AS1 (clr
,%A0
) CR_TAB
3293 AS1 (lsl
,%B0
) CR_TAB
3294 AS1 (lsl
,%B0
) CR_TAB
3295 AS1 (lsl
,%B0
) CR_TAB
3296 AS1 (lsl
,%B0
) CR_TAB
3300 if (AVR_HAVE_MUL
&& ldi_ok
)
3303 return (AS2 (ldi
,%B0
,0x40) CR_TAB
3304 AS2 (mul
,%A0
,%B0
) CR_TAB
3305 AS2 (mov
,%B0
,r0
) CR_TAB
3306 AS1 (clr
,%A0
) CR_TAB
3307 AS1 (clr
,__zero_reg__
));
3309 if (AVR_HAVE_MUL
&& scratch
)
3312 return (AS2 (ldi
,%3,0x40) CR_TAB
3313 AS2 (mul
,%A0
,%3) CR_TAB
3314 AS2 (mov
,%B0
,r0
) CR_TAB
3315 AS1 (clr
,%A0
) CR_TAB
3316 AS1 (clr
,__zero_reg__
));
3318 if (optimize_size
&& ldi_ok
)
3321 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3322 AS2 (ldi
,%A0
,6) "\n1:\t"
3323 AS1 (lsl
,%B0
) CR_TAB
3324 AS1 (dec
,%A0
) CR_TAB
3327 if (optimize_size
&& scratch
)
3330 return (AS1 (clr
,%B0
) CR_TAB
3331 AS1 (lsr
,%A0
) CR_TAB
3332 AS1 (ror
,%B0
) CR_TAB
3333 AS1 (lsr
,%A0
) CR_TAB
3334 AS1 (ror
,%B0
) CR_TAB
3339 return (AS1 (clr
,%B0
) CR_TAB
3340 AS1 (lsr
,%A0
) CR_TAB
3341 AS1 (ror
,%B0
) CR_TAB
3346 out_shift_with_cnt ((AS1 (lsl
,%A0
) CR_TAB
3348 insn
, operands
, len
, 2);
3353 /* 32bit shift left ((long)x << i) */
3356 ashlsi3_out (rtx insn
, rtx operands
[], int *len
)
3358 if (GET_CODE (operands
[2]) == CONST_INT
)
3366 switch (INTVAL (operands
[2]))
3369 if (INTVAL (operands
[2]) < 32)
3373 return *len
= 3, (AS1 (clr
,%D0
) CR_TAB
3374 AS1 (clr
,%C0
) CR_TAB
3375 AS2 (movw
,%A0
,%C0
));
3377 return (AS1 (clr
,%D0
) CR_TAB
3378 AS1 (clr
,%C0
) CR_TAB
3379 AS1 (clr
,%B0
) CR_TAB
3384 int reg0
= true_regnum (operands
[0]);
3385 int reg1
= true_regnum (operands
[1]);
3388 return (AS2 (mov
,%D0
,%C1
) CR_TAB
3389 AS2 (mov
,%C0
,%B1
) CR_TAB
3390 AS2 (mov
,%B0
,%A1
) CR_TAB
3393 return (AS1 (clr
,%A0
) CR_TAB
3394 AS2 (mov
,%B0
,%A1
) CR_TAB
3395 AS2 (mov
,%C0
,%B1
) CR_TAB
3401 int reg0
= true_regnum (operands
[0]);
3402 int reg1
= true_regnum (operands
[1]);
3403 if (reg0
+ 2 == reg1
)
3404 return *len
= 2, (AS1 (clr
,%B0
) CR_TAB
3407 return *len
= 3, (AS2 (movw
,%C0
,%A1
) CR_TAB
3408 AS1 (clr
,%B0
) CR_TAB
3411 return *len
= 4, (AS2 (mov
,%C0
,%A1
) CR_TAB
3412 AS2 (mov
,%D0
,%B1
) CR_TAB
3413 AS1 (clr
,%B0
) CR_TAB
3419 return (AS2 (mov
,%D0
,%A1
) CR_TAB
3420 AS1 (clr
,%C0
) CR_TAB
3421 AS1 (clr
,%B0
) CR_TAB
3426 return (AS1 (clr
,%D0
) CR_TAB
3427 AS1 (lsr
,%A0
) CR_TAB
3428 AS1 (ror
,%D0
) CR_TAB
3429 AS1 (clr
,%C0
) CR_TAB
3430 AS1 (clr
,%B0
) CR_TAB
3435 out_shift_with_cnt ((AS1 (lsl
,%A0
) CR_TAB
3436 AS1 (rol
,%B0
) CR_TAB
3437 AS1 (rol
,%C0
) CR_TAB
3439 insn
, operands
, len
, 4);
3443 /* 8bit arithmetic shift right ((signed char)x >> i) */
3446 ashrqi3_out (rtx insn
, rtx operands
[], int *len
)
3448 if (GET_CODE (operands
[2]) == CONST_INT
)
3455 switch (INTVAL (operands
[2]))
3459 return AS1 (asr
,%0);
3463 return (AS1 (asr
,%0) CR_TAB
3468 return (AS1 (asr
,%0) CR_TAB
3474 return (AS1 (asr
,%0) CR_TAB
3481 return (AS1 (asr
,%0) CR_TAB
3489 return (AS2 (bst
,%0,6) CR_TAB
3491 AS2 (sbc
,%0,%0) CR_TAB
3495 if (INTVAL (operands
[2]) < 8)
3502 return (AS1 (lsl
,%0) CR_TAB
3506 else if (CONSTANT_P (operands
[2]))
3507 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
3509 out_shift_with_cnt (AS1 (asr
,%0),
3510 insn
, operands
, len
, 1);
3515 /* 16bit arithmetic shift right ((signed short)x >> i) */
3518 ashrhi3_out (rtx insn
, rtx operands
[], int *len
)
3520 if (GET_CODE (operands
[2]) == CONST_INT
)
3522 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
3523 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
3530 switch (INTVAL (operands
[2]))
3534 /* XXX try to optimize this too? */
3539 break; /* scratch ? 5 : 6 */
3541 return (AS2 (mov
,__tmp_reg__
,%A0
) CR_TAB
3542 AS2 (mov
,%A0
,%B0
) CR_TAB
3543 AS1 (lsl
,__tmp_reg__
) CR_TAB
3544 AS1 (rol
,%A0
) CR_TAB
3545 AS2 (sbc
,%B0
,%B0
) CR_TAB
3546 AS1 (lsl
,__tmp_reg__
) CR_TAB
3547 AS1 (rol
,%A0
) CR_TAB
3552 return (AS1 (lsl
,%A0
) CR_TAB
3553 AS2 (mov
,%A0
,%B0
) CR_TAB
3554 AS1 (rol
,%A0
) CR_TAB
3559 int reg0
= true_regnum (operands
[0]);
3560 int reg1
= true_regnum (operands
[1]);
3563 return *len
= 3, (AS2 (mov
,%A0
,%B0
) CR_TAB
3564 AS1 (lsl
,%B0
) CR_TAB
3567 return *len
= 4, (AS2 (mov
,%A0
,%B1
) CR_TAB
3568 AS1 (clr
,%B0
) CR_TAB
3569 AS2 (sbrc
,%A0
,7) CR_TAB
3575 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3576 AS1 (lsl
,%B0
) CR_TAB
3577 AS2 (sbc
,%B0
,%B0
) CR_TAB
3582 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3583 AS1 (lsl
,%B0
) CR_TAB
3584 AS2 (sbc
,%B0
,%B0
) CR_TAB
3585 AS1 (asr
,%A0
) CR_TAB
3589 if (AVR_HAVE_MUL
&& ldi_ok
)
3592 return (AS2 (ldi
,%A0
,0x20) CR_TAB
3593 AS2 (muls
,%B0
,%A0
) CR_TAB
3594 AS2 (mov
,%A0
,r1
) CR_TAB
3595 AS2 (sbc
,%B0
,%B0
) CR_TAB
3596 AS1 (clr
,__zero_reg__
));
3598 if (optimize_size
&& scratch
)
3601 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3602 AS1 (lsl
,%B0
) CR_TAB
3603 AS2 (sbc
,%B0
,%B0
) CR_TAB
3604 AS1 (asr
,%A0
) CR_TAB
3605 AS1 (asr
,%A0
) CR_TAB
3609 if (AVR_HAVE_MUL
&& ldi_ok
)
3612 return (AS2 (ldi
,%A0
,0x10) CR_TAB
3613 AS2 (muls
,%B0
,%A0
) CR_TAB
3614 AS2 (mov
,%A0
,r1
) CR_TAB
3615 AS2 (sbc
,%B0
,%B0
) CR_TAB
3616 AS1 (clr
,__zero_reg__
));
3618 if (optimize_size
&& scratch
)
3621 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3622 AS1 (lsl
,%B0
) CR_TAB
3623 AS2 (sbc
,%B0
,%B0
) CR_TAB
3624 AS1 (asr
,%A0
) CR_TAB
3625 AS1 (asr
,%A0
) CR_TAB
3626 AS1 (asr
,%A0
) CR_TAB
3630 if (AVR_HAVE_MUL
&& ldi_ok
)
3633 return (AS2 (ldi
,%A0
,0x08) CR_TAB
3634 AS2 (muls
,%B0
,%A0
) CR_TAB
3635 AS2 (mov
,%A0
,r1
) CR_TAB
3636 AS2 (sbc
,%B0
,%B0
) CR_TAB
3637 AS1 (clr
,__zero_reg__
));
3640 break; /* scratch ? 5 : 7 */
3642 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3643 AS1 (lsl
,%B0
) CR_TAB
3644 AS2 (sbc
,%B0
,%B0
) CR_TAB
3645 AS1 (asr
,%A0
) CR_TAB
3646 AS1 (asr
,%A0
) CR_TAB
3647 AS1 (asr
,%A0
) CR_TAB
3648 AS1 (asr
,%A0
) CR_TAB
3653 return (AS1 (lsl
,%B0
) CR_TAB
3654 AS2 (sbc
,%A0
,%A0
) CR_TAB
3655 AS1 (lsl
,%B0
) CR_TAB
3656 AS2 (mov
,%B0
,%A0
) CR_TAB
3660 if (INTVAL (operands
[2]) < 16)
3666 return *len
= 3, (AS1 (lsl
,%B0
) CR_TAB
3667 AS2 (sbc
,%A0
,%A0
) CR_TAB
3672 out_shift_with_cnt ((AS1 (asr
,%B0
) CR_TAB
3674 insn
, operands
, len
, 2);
3679 /* 32bit arithmetic shift right ((signed long)x >> i) */
3682 ashrsi3_out (rtx insn
, rtx operands
[], int *len
)
3684 if (GET_CODE (operands
[2]) == CONST_INT
)
3692 switch (INTVAL (operands
[2]))
3696 int reg0
= true_regnum (operands
[0]);
3697 int reg1
= true_regnum (operands
[1]);
3700 return (AS2 (mov
,%A0
,%B1
) CR_TAB
3701 AS2 (mov
,%B0
,%C1
) CR_TAB
3702 AS2 (mov
,%C0
,%D1
) CR_TAB
3703 AS1 (clr
,%D0
) CR_TAB
3704 AS2 (sbrc
,%C0
,7) CR_TAB
3707 return (AS1 (clr
,%D0
) CR_TAB
3708 AS2 (sbrc
,%D1
,7) CR_TAB
3709 AS1 (dec
,%D0
) CR_TAB
3710 AS2 (mov
,%C0
,%D1
) CR_TAB
3711 AS2 (mov
,%B0
,%C1
) CR_TAB
3717 int reg0
= true_regnum (operands
[0]);
3718 int reg1
= true_regnum (operands
[1]);
3720 if (reg0
== reg1
+ 2)
3721 return *len
= 4, (AS1 (clr
,%D0
) CR_TAB
3722 AS2 (sbrc
,%B0
,7) CR_TAB
3723 AS1 (com
,%D0
) CR_TAB
3726 return *len
= 5, (AS2 (movw
,%A0
,%C1
) CR_TAB
3727 AS1 (clr
,%D0
) CR_TAB
3728 AS2 (sbrc
,%B0
,7) CR_TAB
3729 AS1 (com
,%D0
) CR_TAB
3732 return *len
= 6, (AS2 (mov
,%B0
,%D1
) CR_TAB
3733 AS2 (mov
,%A0
,%C1
) CR_TAB
3734 AS1 (clr
,%D0
) CR_TAB
3735 AS2 (sbrc
,%B0
,7) CR_TAB
3736 AS1 (com
,%D0
) CR_TAB
3741 return *len
= 6, (AS2 (mov
,%A0
,%D1
) CR_TAB
3742 AS1 (clr
,%D0
) CR_TAB
3743 AS2 (sbrc
,%A0
,7) CR_TAB
3744 AS1 (com
,%D0
) CR_TAB
3745 AS2 (mov
,%B0
,%D0
) CR_TAB
3749 if (INTVAL (operands
[2]) < 32)
3756 return *len
= 4, (AS1 (lsl
,%D0
) CR_TAB
3757 AS2 (sbc
,%A0
,%A0
) CR_TAB
3758 AS2 (mov
,%B0
,%A0
) CR_TAB
3759 AS2 (movw
,%C0
,%A0
));
3761 return *len
= 5, (AS1 (lsl
,%D0
) CR_TAB
3762 AS2 (sbc
,%A0
,%A0
) CR_TAB
3763 AS2 (mov
,%B0
,%A0
) CR_TAB
3764 AS2 (mov
,%C0
,%A0
) CR_TAB
3769 out_shift_with_cnt ((AS1 (asr
,%D0
) CR_TAB
3770 AS1 (ror
,%C0
) CR_TAB
3771 AS1 (ror
,%B0
) CR_TAB
3773 insn
, operands
, len
, 4);
3777 /* 8bit logic shift right ((unsigned char)x >> i) */
3780 lshrqi3_out (rtx insn
, rtx operands
[], int *len
)
3782 if (GET_CODE (operands
[2]) == CONST_INT
)
3789 switch (INTVAL (operands
[2]))
3792 if (INTVAL (operands
[2]) < 8)
3796 return AS1 (clr
,%0);
3800 return AS1 (lsr
,%0);
3804 return (AS1 (lsr
,%0) CR_TAB
3808 return (AS1 (lsr
,%0) CR_TAB
3813 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3816 return (AS1 (swap
,%0) CR_TAB
3817 AS2 (andi
,%0,0x0f));
3820 return (AS1 (lsr
,%0) CR_TAB
3826 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3829 return (AS1 (swap
,%0) CR_TAB
3834 return (AS1 (lsr
,%0) CR_TAB
3841 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3844 return (AS1 (swap
,%0) CR_TAB
3850 return (AS1 (lsr
,%0) CR_TAB
3859 return (AS1 (rol
,%0) CR_TAB
3864 else if (CONSTANT_P (operands
[2]))
3865 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
3867 out_shift_with_cnt (AS1 (lsr
,%0),
3868 insn
, operands
, len
, 1);
3872 /* 16bit logic shift right ((unsigned short)x >> i) */
3875 lshrhi3_out (rtx insn
, rtx operands
[], int *len
)
3877 if (GET_CODE (operands
[2]) == CONST_INT
)
3879 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
3880 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
3887 switch (INTVAL (operands
[2]))
3890 if (INTVAL (operands
[2]) < 16)
3894 return (AS1 (clr
,%B0
) CR_TAB
3898 if (optimize_size
&& scratch
)
3903 return (AS1 (swap
,%B0
) CR_TAB
3904 AS1 (swap
,%A0
) CR_TAB
3905 AS2 (andi
,%A0
,0x0f) CR_TAB
3906 AS2 (eor
,%A0
,%B0
) CR_TAB
3907 AS2 (andi
,%B0
,0x0f) CR_TAB
3913 return (AS1 (swap
,%B0
) CR_TAB
3914 AS1 (swap
,%A0
) CR_TAB
3915 AS2 (ldi
,%3,0x0f) CR_TAB
3917 AS2 (eor
,%A0
,%B0
) CR_TAB
3921 break; /* optimize_size ? 6 : 8 */
3925 break; /* scratch ? 5 : 6 */
3929 return (AS1 (lsr
,%B0
) CR_TAB
3930 AS1 (ror
,%A0
) CR_TAB
3931 AS1 (swap
,%B0
) CR_TAB
3932 AS1 (swap
,%A0
) CR_TAB
3933 AS2 (andi
,%A0
,0x0f) CR_TAB
3934 AS2 (eor
,%A0
,%B0
) CR_TAB
3935 AS2 (andi
,%B0
,0x0f) CR_TAB
3941 return (AS1 (lsr
,%B0
) CR_TAB
3942 AS1 (ror
,%A0
) CR_TAB
3943 AS1 (swap
,%B0
) CR_TAB
3944 AS1 (swap
,%A0
) CR_TAB
3945 AS2 (ldi
,%3,0x0f) CR_TAB
3947 AS2 (eor
,%A0
,%B0
) CR_TAB
3955 break; /* scratch ? 5 : 6 */
3957 return (AS1 (clr
,__tmp_reg__
) CR_TAB
3958 AS1 (lsl
,%A0
) CR_TAB
3959 AS1 (rol
,%B0
) CR_TAB
3960 AS1 (rol
,__tmp_reg__
) CR_TAB
3961 AS1 (lsl
,%A0
) CR_TAB
3962 AS1 (rol
,%B0
) CR_TAB
3963 AS1 (rol
,__tmp_reg__
) CR_TAB
3964 AS2 (mov
,%A0
,%B0
) CR_TAB
3965 AS2 (mov
,%B0
,__tmp_reg__
));
3969 return (AS1 (lsl
,%A0
) CR_TAB
3970 AS2 (mov
,%A0
,%B0
) CR_TAB
3971 AS1 (rol
,%A0
) CR_TAB
3972 AS2 (sbc
,%B0
,%B0
) CR_TAB
3976 return *len
= 2, (AS2 (mov
,%A0
,%B1
) CR_TAB
3981 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3982 AS1 (clr
,%B0
) CR_TAB
3987 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3988 AS1 (clr
,%B0
) CR_TAB
3989 AS1 (lsr
,%A0
) CR_TAB
3994 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3995 AS1 (clr
,%B0
) CR_TAB
3996 AS1 (lsr
,%A0
) CR_TAB
3997 AS1 (lsr
,%A0
) CR_TAB
4004 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4005 AS1 (clr
,%B0
) CR_TAB
4006 AS1 (swap
,%A0
) CR_TAB
4007 AS2 (andi
,%A0
,0x0f));
4012 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4013 AS1 (clr
,%B0
) CR_TAB
4014 AS1 (swap
,%A0
) CR_TAB
4015 AS2 (ldi
,%3,0x0f) CR_TAB
4019 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4020 AS1 (clr
,%B0
) CR_TAB
4021 AS1 (lsr
,%A0
) CR_TAB
4022 AS1 (lsr
,%A0
) CR_TAB
4023 AS1 (lsr
,%A0
) CR_TAB
4030 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4031 AS1 (clr
,%B0
) CR_TAB
4032 AS1 (swap
,%A0
) CR_TAB
4033 AS1 (lsr
,%A0
) CR_TAB
4034 AS2 (andi
,%A0
,0x07));
4036 if (AVR_HAVE_MUL
&& scratch
)
4039 return (AS2 (ldi
,%3,0x08) CR_TAB
4040 AS2 (mul
,%B0
,%3) CR_TAB
4041 AS2 (mov
,%A0
,r1
) CR_TAB
4042 AS1 (clr
,%B0
) CR_TAB
4043 AS1 (clr
,__zero_reg__
));
4045 if (optimize_size
&& scratch
)
4050 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4051 AS1 (clr
,%B0
) CR_TAB
4052 AS1 (swap
,%A0
) CR_TAB
4053 AS1 (lsr
,%A0
) CR_TAB
4054 AS2 (ldi
,%3,0x07) CR_TAB
4060 return ("set" CR_TAB
4061 AS2 (bld
,r1
,3) CR_TAB
4062 AS2 (mul
,%B0
,r1
) CR_TAB
4063 AS2 (mov
,%A0
,r1
) CR_TAB
4064 AS1 (clr
,%B0
) CR_TAB
4065 AS1 (clr
,__zero_reg__
));
4068 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4069 AS1 (clr
,%B0
) CR_TAB
4070 AS1 (lsr
,%A0
) CR_TAB
4071 AS1 (lsr
,%A0
) CR_TAB
4072 AS1 (lsr
,%A0
) CR_TAB
4073 AS1 (lsr
,%A0
) CR_TAB
4077 if (AVR_HAVE_MUL
&& ldi_ok
)
4080 return (AS2 (ldi
,%A0
,0x04) CR_TAB
4081 AS2 (mul
,%B0
,%A0
) CR_TAB
4082 AS2 (mov
,%A0
,r1
) CR_TAB
4083 AS1 (clr
,%B0
) CR_TAB
4084 AS1 (clr
,__zero_reg__
));
4086 if (AVR_HAVE_MUL
&& scratch
)
4089 return (AS2 (ldi
,%3,0x04) CR_TAB
4090 AS2 (mul
,%B0
,%3) CR_TAB
4091 AS2 (mov
,%A0
,r1
) CR_TAB
4092 AS1 (clr
,%B0
) CR_TAB
4093 AS1 (clr
,__zero_reg__
));
4095 if (optimize_size
&& ldi_ok
)
4098 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4099 AS2 (ldi
,%B0
,6) "\n1:\t"
4100 AS1 (lsr
,%A0
) CR_TAB
4101 AS1 (dec
,%B0
) CR_TAB
4104 if (optimize_size
&& scratch
)
4107 return (AS1 (clr
,%A0
) CR_TAB
4108 AS1 (lsl
,%B0
) CR_TAB
4109 AS1 (rol
,%A0
) CR_TAB
4110 AS1 (lsl
,%B0
) CR_TAB
4111 AS1 (rol
,%A0
) CR_TAB
4116 return (AS1 (clr
,%A0
) CR_TAB
4117 AS1 (lsl
,%B0
) CR_TAB
4118 AS1 (rol
,%A0
) CR_TAB
4123 out_shift_with_cnt ((AS1 (lsr
,%B0
) CR_TAB
4125 insn
, operands
, len
, 2);
4129 /* 32bit logic shift right ((unsigned int)x >> i) */
4132 lshrsi3_out (rtx insn
, rtx operands
[], int *len
)
4134 if (GET_CODE (operands
[2]) == CONST_INT
)
4142 switch (INTVAL (operands
[2]))
4145 if (INTVAL (operands
[2]) < 32)
4149 return *len
= 3, (AS1 (clr
,%D0
) CR_TAB
4150 AS1 (clr
,%C0
) CR_TAB
4151 AS2 (movw
,%A0
,%C0
));
4153 return (AS1 (clr
,%D0
) CR_TAB
4154 AS1 (clr
,%C0
) CR_TAB
4155 AS1 (clr
,%B0
) CR_TAB
4160 int reg0
= true_regnum (operands
[0]);
4161 int reg1
= true_regnum (operands
[1]);
4164 return (AS2 (mov
,%A0
,%B1
) CR_TAB
4165 AS2 (mov
,%B0
,%C1
) CR_TAB
4166 AS2 (mov
,%C0
,%D1
) CR_TAB
4169 return (AS1 (clr
,%D0
) CR_TAB
4170 AS2 (mov
,%C0
,%D1
) CR_TAB
4171 AS2 (mov
,%B0
,%C1
) CR_TAB
4177 int reg0
= true_regnum (operands
[0]);
4178 int reg1
= true_regnum (operands
[1]);
4180 if (reg0
== reg1
+ 2)
4181 return *len
= 2, (AS1 (clr
,%C0
) CR_TAB
4184 return *len
= 3, (AS2 (movw
,%A0
,%C1
) CR_TAB
4185 AS1 (clr
,%C0
) CR_TAB
4188 return *len
= 4, (AS2 (mov
,%B0
,%D1
) CR_TAB
4189 AS2 (mov
,%A0
,%C1
) CR_TAB
4190 AS1 (clr
,%C0
) CR_TAB
4195 return *len
= 4, (AS2 (mov
,%A0
,%D1
) CR_TAB
4196 AS1 (clr
,%B0
) CR_TAB
4197 AS1 (clr
,%C0
) CR_TAB
4202 return (AS1 (clr
,%A0
) CR_TAB
4203 AS2 (sbrc
,%D0
,7) CR_TAB
4204 AS1 (inc
,%A0
) CR_TAB
4205 AS1 (clr
,%B0
) CR_TAB
4206 AS1 (clr
,%C0
) CR_TAB
4211 out_shift_with_cnt ((AS1 (lsr
,%D0
) CR_TAB
4212 AS1 (ror
,%C0
) CR_TAB
4213 AS1 (ror
,%B0
) CR_TAB
4215 insn
, operands
, len
, 4);
4219 /* Create RTL split patterns for byte sized rotate expressions. This
4220 produces a series of move instructions and considers overlap situations.
4221 Overlapping non-HImode operands need a scratch register. */
4224 avr_rotate_bytes (rtx operands
[])
4227 enum machine_mode mode
= GET_MODE (operands
[0]);
4228 bool overlapped
= reg_overlap_mentioned_p (operands
[0], operands
[1]);
4229 bool same_reg
= rtx_equal_p (operands
[0], operands
[1]);
4230 int num
= INTVAL (operands
[2]);
4231 rtx scratch
= operands
[3];
4232 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
4233 Word move if no scratch is needed, otherwise use size of scratch. */
4234 enum machine_mode move_mode
= QImode
;
4237 else if ((mode
== SImode
&& !same_reg
) || !overlapped
)
4240 move_mode
= GET_MODE (scratch
);
4242 /* Force DI rotate to use QI moves since other DI moves are currently split
4243 into QI moves so forward propagation works better. */
4246 /* Make scratch smaller if needed. */
4247 if (GET_MODE (scratch
) == HImode
&& move_mode
== QImode
)
4248 scratch
= simplify_gen_subreg (move_mode
, scratch
, HImode
, 0);
4250 int move_size
= GET_MODE_SIZE (move_mode
);
4251 /* Number of bytes/words to rotate. */
4252 int offset
= (num
>> 3) / move_size
;
4253 /* Number of moves needed. */
4254 int size
= GET_MODE_SIZE (mode
) / move_size
;
4255 /* Himode byte swap is special case to avoid a scratch register. */
4256 if (mode
== HImode
&& same_reg
)
4258 /* HImode byte swap, using xor. This is as quick as using scratch. */
4260 src
= simplify_gen_subreg (move_mode
, operands
[1], mode
, 0);
4261 dst
= simplify_gen_subreg (move_mode
, operands
[0], mode
, 1);
4262 if (!rtx_equal_p (dst
, src
))
4264 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
4265 emit_move_insn (src
, gen_rtx_XOR (QImode
, src
, dst
));
4266 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
4271 /* Create linked list of moves to determine move order. */
4277 /* Generate list of subreg moves. */
4278 for (i
= 0; i
< size
; i
++)
4281 int to
= (from
+ offset
) % size
;
4282 move
[i
].src
= simplify_gen_subreg (move_mode
, operands
[1],
4283 mode
, from
* move_size
);
4284 move
[i
].dst
= simplify_gen_subreg (move_mode
, operands
[0],
4285 mode
, to
* move_size
);
4288 /* Mark dependence where a dst of one move is the src of another move.
4289 The first move is a conflict as it must wait until second is
4290 performed. We ignore moves to self - we catch this later. */
4292 for (i
= 0; i
< size
; i
++)
4293 if (reg_overlap_mentioned_p (move
[i
].dst
, operands
[1]))
4294 for (j
= 0; j
< size
; j
++)
4295 if (j
!= i
&& rtx_equal_p (move
[j
].src
, move
[i
].dst
))
4297 /* The dst of move i is the src of move j. */
4304 /* Go through move list and perform non-conflicting moves. As each
4305 non-overlapping move is made, it may remove other conflicts
4306 so the process is repeated until no conflicts remain. */
4311 /* Emit move where dst is not also a src or we have used that
4313 for (i
= 0; i
< size
; i
++)
4314 if (move
[i
].src
!= NULL_RTX
)
4315 if (move
[i
].links
== -1 || move
[move
[i
].links
].src
== NULL_RTX
)
4318 /* Ignore NOP moves to self. */
4319 if (!rtx_equal_p (move
[i
].dst
, move
[i
].src
))
4320 emit_move_insn (move
[i
].dst
, move
[i
].src
);
4322 /* Remove conflict from list. */
4323 move
[i
].src
= NULL_RTX
;
4328 /* Check for deadlock. This is when no moves occurred and we have
4329 at least one blocked move. */
4330 if (moves
== 0 && blocked
!= -1)
4332 /* Need to use scratch register to break deadlock.
4333 Add move to put dst of blocked move into scratch.
4334 When this move occurs, it will break chain deadlock.
4335 The scratch register is substituted for real move. */
4337 move
[size
].src
= move
[blocked
].dst
;
4338 move
[size
].dst
= scratch
;
4339 /* Scratch move is never blocked. */
4340 move
[size
].links
= -1;
4341 /* Make sure we have valid link. */
4342 gcc_assert (move
[blocked
].links
!= -1);
4343 /* Replace src of blocking move with scratch reg. */
4344 move
[move
[blocked
].links
].src
= scratch
;
4345 /* Make dependent on scratch move occuring. */
4346 move
[blocked
].links
= size
;
4350 while (blocked
!= -1);
4355 /* Modifies the length assigned to instruction INSN
4356 LEN is the initially computed length of the insn. */
4359 adjust_insn_length (rtx insn
, int len
)
4361 rtx patt
= PATTERN (insn
);
4364 if (GET_CODE (patt
) == SET
)
4367 op
[1] = SET_SRC (patt
);
4368 op
[0] = SET_DEST (patt
);
4369 if (general_operand (op
[1], VOIDmode
)
4370 && general_operand (op
[0], VOIDmode
))
4372 switch (GET_MODE (op
[0]))
4375 output_movqi (insn
, op
, &len
);
4378 output_movhi (insn
, op
, &len
);
4382 output_movsisf (insn
, op
, &len
);
4388 else if (op
[0] == cc0_rtx
&& REG_P (op
[1]))
4390 switch (GET_MODE (op
[1]))
4392 case HImode
: out_tsthi (insn
, op
[1], &len
); break;
4393 case SImode
: out_tstsi (insn
, op
[1], &len
); break;
4397 else if (GET_CODE (op
[1]) == AND
)
4399 if (GET_CODE (XEXP (op
[1],1)) == CONST_INT
)
4401 HOST_WIDE_INT mask
= INTVAL (XEXP (op
[1],1));
4402 if (GET_MODE (op
[1]) == SImode
)
4403 len
= (((mask
& 0xff) != 0xff)
4404 + ((mask
& 0xff00) != 0xff00)
4405 + ((mask
& 0xff0000L
) != 0xff0000L
)
4406 + ((mask
& 0xff000000L
) != 0xff000000L
));
4407 else if (GET_MODE (op
[1]) == HImode
)
4408 len
= (((mask
& 0xff) != 0xff)
4409 + ((mask
& 0xff00) != 0xff00));
4412 else if (GET_CODE (op
[1]) == IOR
)
4414 if (GET_CODE (XEXP (op
[1],1)) == CONST_INT
)
4416 HOST_WIDE_INT mask
= INTVAL (XEXP (op
[1],1));
4417 if (GET_MODE (op
[1]) == SImode
)
4418 len
= (((mask
& 0xff) != 0)
4419 + ((mask
& 0xff00) != 0)
4420 + ((mask
& 0xff0000L
) != 0)
4421 + ((mask
& 0xff000000L
) != 0));
4422 else if (GET_MODE (op
[1]) == HImode
)
4423 len
= (((mask
& 0xff) != 0)
4424 + ((mask
& 0xff00) != 0));
4428 set
= single_set (insn
);
4433 op
[1] = SET_SRC (set
);
4434 op
[0] = SET_DEST (set
);
4436 if (GET_CODE (patt
) == PARALLEL
4437 && general_operand (op
[1], VOIDmode
)
4438 && general_operand (op
[0], VOIDmode
))
4440 if (XVECLEN (patt
, 0) == 2)
4441 op
[2] = XVECEXP (patt
, 0, 1);
4443 switch (GET_MODE (op
[0]))
4449 output_reload_inhi (insn
, op
, &len
);
4453 output_reload_insisf (insn
, op
, &len
);
4459 else if (GET_CODE (op
[1]) == ASHIFT
4460 || GET_CODE (op
[1]) == ASHIFTRT
4461 || GET_CODE (op
[1]) == LSHIFTRT
)
4465 ops
[1] = XEXP (op
[1],0);
4466 ops
[2] = XEXP (op
[1],1);
4467 switch (GET_CODE (op
[1]))
4470 switch (GET_MODE (op
[0]))
4472 case QImode
: ashlqi3_out (insn
,ops
,&len
); break;
4473 case HImode
: ashlhi3_out (insn
,ops
,&len
); break;
4474 case SImode
: ashlsi3_out (insn
,ops
,&len
); break;
4479 switch (GET_MODE (op
[0]))
4481 case QImode
: ashrqi3_out (insn
,ops
,&len
); break;
4482 case HImode
: ashrhi3_out (insn
,ops
,&len
); break;
4483 case SImode
: ashrsi3_out (insn
,ops
,&len
); break;
4488 switch (GET_MODE (op
[0]))
4490 case QImode
: lshrqi3_out (insn
,ops
,&len
); break;
4491 case HImode
: lshrhi3_out (insn
,ops
,&len
); break;
4492 case SImode
: lshrsi3_out (insn
,ops
,&len
); break;
4504 /* Return nonzero if register REG dead after INSN. */
4507 reg_unused_after (rtx insn
, rtx reg
)
4509 return (dead_or_set_p (insn
, reg
)
4510 || (REG_P(reg
) && _reg_unused_after (insn
, reg
)));
4513 /* Return nonzero if REG is not used after INSN.
4514 We assume REG is a reload reg, and therefore does
4515 not live past labels. It may live past calls or jumps though. */
4518 _reg_unused_after (rtx insn
, rtx reg
)
4523 /* If the reg is set by this instruction, then it is safe for our
4524 case. Disregard the case where this is a store to memory, since
4525 we are checking a register used in the store address. */
4526 set
= single_set (insn
);
4527 if (set
&& GET_CODE (SET_DEST (set
)) != MEM
4528 && reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
4531 while ((insn
= NEXT_INSN (insn
)))
4534 code
= GET_CODE (insn
);
4537 /* If this is a label that existed before reload, then the register
4538 if dead here. However, if this is a label added by reorg, then
4539 the register may still be live here. We can't tell the difference,
4540 so we just ignore labels completely. */
4541 if (code
== CODE_LABEL
)
4549 if (code
== JUMP_INSN
)
4552 /* If this is a sequence, we must handle them all at once.
4553 We could have for instance a call that sets the target register,
4554 and an insn in a delay slot that uses the register. In this case,
4555 we must return 0. */
4556 else if (code
== INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
4561 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
4563 rtx this_insn
= XVECEXP (PATTERN (insn
), 0, i
);
4564 rtx set
= single_set (this_insn
);
4566 if (GET_CODE (this_insn
) == CALL_INSN
)
4568 else if (GET_CODE (this_insn
) == JUMP_INSN
)
4570 if (INSN_ANNULLED_BRANCH_P (this_insn
))
4575 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
4577 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
4579 if (GET_CODE (SET_DEST (set
)) != MEM
)
4585 && reg_overlap_mentioned_p (reg
, PATTERN (this_insn
)))
4590 else if (code
== JUMP_INSN
)
4594 if (code
== CALL_INSN
)
4597 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
4598 if (GET_CODE (XEXP (tem
, 0)) == USE
4599 && REG_P (XEXP (XEXP (tem
, 0), 0))
4600 && reg_overlap_mentioned_p (reg
, XEXP (XEXP (tem
, 0), 0)))
4602 if (call_used_regs
[REGNO (reg
)])
4606 set
= single_set (insn
);
4608 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
4610 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
4611 return GET_CODE (SET_DEST (set
)) != MEM
;
4612 if (set
== 0 && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
4618 /* Target hook for assembling integer objects. The AVR version needs
4619 special handling for references to certain labels. */
4622 avr_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
4624 if (size
== POINTER_SIZE
/ BITS_PER_UNIT
&& aligned_p
4625 && text_segment_operand (x
, VOIDmode
) )
4627 fputs ("\t.word\tgs(", asm_out_file
);
4628 output_addr_const (asm_out_file
, x
);
4629 fputs (")\n", asm_out_file
);
4632 return default_assemble_integer (x
, size
, aligned_p
);
4635 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4638 avr_asm_declare_function_name (FILE *file
, const char *name
, tree decl
)
4641 /* If the function has the 'signal' or 'interrupt' attribute, test to
4642 make sure that the name of the function is "__vector_NN" so as to
4643 catch when the user misspells the interrupt vector name. */
4645 if (cfun
->machine
->is_interrupt
)
4647 if (strncmp (name
, "__vector", strlen ("__vector")) != 0)
4649 warning_at (DECL_SOURCE_LOCATION (decl
), 0,
4650 "%qs appears to be a misspelled interrupt handler",
4654 else if (cfun
->machine
->is_signal
)
4656 if (strncmp (name
, "__vector", strlen ("__vector")) != 0)
4658 warning_at (DECL_SOURCE_LOCATION (decl
), 0,
4659 "%qs appears to be a misspelled signal handler",
4664 ASM_OUTPUT_TYPE_DIRECTIVE (file
, name
, "function");
4665 ASM_OUTPUT_LABEL (file
, name
);
4668 /* The routine used to output NUL terminated strings. We use a special
4669 version of this for most svr4 targets because doing so makes the
4670 generated assembly code more compact (and thus faster to assemble)
4671 as well as more readable, especially for targets like the i386
4672 (where the only alternative is to output character sequences as
4673 comma separated lists of numbers). */
4676 gas_output_limited_string(FILE *file
, const char *str
)
4678 const unsigned char *_limited_str
= (const unsigned char *) str
;
4680 fprintf (file
, "%s\"", STRING_ASM_OP
);
4681 for (; (ch
= *_limited_str
); _limited_str
++)
4684 switch (escape
= ESCAPES
[ch
])
4690 fprintf (file
, "\\%03o", ch
);
4694 putc (escape
, file
);
4698 fprintf (file
, "\"\n");
4701 /* The routine used to output sequences of byte values. We use a special
4702 version of this for most svr4 targets because doing so makes the
4703 generated assembly code more compact (and thus faster to assemble)
4704 as well as more readable. Note that if we find subparts of the
4705 character sequence which end with NUL (and which are shorter than
4706 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4709 gas_output_ascii(FILE *file
, const char *str
, size_t length
)
4711 const unsigned char *_ascii_bytes
= (const unsigned char *) str
;
4712 const unsigned char *limit
= _ascii_bytes
+ length
;
4713 unsigned bytes_in_chunk
= 0;
4714 for (; _ascii_bytes
< limit
; _ascii_bytes
++)
4716 const unsigned char *p
;
4717 if (bytes_in_chunk
>= 60)
4719 fprintf (file
, "\"\n");
4722 for (p
= _ascii_bytes
; p
< limit
&& *p
!= '\0'; p
++)
4724 if (p
< limit
&& (p
- _ascii_bytes
) <= (signed)STRING_LIMIT
)
4726 if (bytes_in_chunk
> 0)
4728 fprintf (file
, "\"\n");
4731 gas_output_limited_string (file
, (const char*)_ascii_bytes
);
4738 if (bytes_in_chunk
== 0)
4739 fprintf (file
, "\t.ascii\t\"");
4740 switch (escape
= ESCAPES
[ch
= *_ascii_bytes
])
4747 fprintf (file
, "\\%03o", ch
);
4748 bytes_in_chunk
+= 4;
4752 putc (escape
, file
);
4753 bytes_in_chunk
+= 2;
4758 if (bytes_in_chunk
> 0)
4759 fprintf (file
, "\"\n");
4762 /* Return value is nonzero if pseudos that have been
4763 assigned to registers of class CLASS would likely be spilled
4764 because registers of CLASS are needed for spill registers. */
4767 class_likely_spilled_p (int c
)
4769 return (c
!= ALL_REGS
&& c
!= ADDW_REGS
);
4772 /* Valid attributes:
4773 progmem - put data to program memory;
4774 signal - make a function to be hardware interrupt. After function
4775 prologue interrupts are disabled;
4776 interrupt - make a function to be hardware interrupt. After function
4777 prologue interrupts are enabled;
4778 naked - don't generate function prologue/epilogue and `ret' command.
4780 Only `progmem' attribute valid for type. */
4782 /* Handle a "progmem" attribute; arguments as in
4783 struct attribute_spec.handler. */
4785 avr_handle_progmem_attribute (tree
*node
, tree name
,
4786 tree args ATTRIBUTE_UNUSED
,
4787 int flags ATTRIBUTE_UNUSED
,
4792 if (TREE_CODE (*node
) == TYPE_DECL
)
4794 /* This is really a decl attribute, not a type attribute,
4795 but try to handle it for GCC 3.0 backwards compatibility. */
4797 tree type
= TREE_TYPE (*node
);
4798 tree attr
= tree_cons (name
, args
, TYPE_ATTRIBUTES (type
));
4799 tree newtype
= build_type_attribute_variant (type
, attr
);
4801 TYPE_MAIN_VARIANT (newtype
) = TYPE_MAIN_VARIANT (type
);
4802 TREE_TYPE (*node
) = newtype
;
4803 *no_add_attrs
= true;
4805 else if (TREE_STATIC (*node
) || DECL_EXTERNAL (*node
))
4807 if (DECL_INITIAL (*node
) == NULL_TREE
&& !DECL_EXTERNAL (*node
))
4809 warning (0, "only initialized variables can be placed into "
4810 "program memory area");
4811 *no_add_attrs
= true;
4816 warning (OPT_Wattributes
, "%qE attribute ignored",
4818 *no_add_attrs
= true;
4825 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4826 struct attribute_spec.handler. */
4829 avr_handle_fndecl_attribute (tree
*node
, tree name
,
4830 tree args ATTRIBUTE_UNUSED
,
4831 int flags ATTRIBUTE_UNUSED
,
4834 if (TREE_CODE (*node
) != FUNCTION_DECL
)
4836 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
4838 *no_add_attrs
= true;
4845 avr_handle_fntype_attribute (tree
*node
, tree name
,
4846 tree args ATTRIBUTE_UNUSED
,
4847 int flags ATTRIBUTE_UNUSED
,
4850 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
4852 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
4854 *no_add_attrs
= true;
4860 /* Look for attribute `progmem' in DECL
4861 if found return 1, otherwise 0. */
4864 avr_progmem_p (tree decl
, tree attributes
)
4868 if (TREE_CODE (decl
) != VAR_DECL
)
4872 != lookup_attribute ("progmem", attributes
))
4878 while (TREE_CODE (a
) == ARRAY_TYPE
);
4880 if (a
== error_mark_node
)
4883 if (NULL_TREE
!= lookup_attribute ("progmem", TYPE_ATTRIBUTES (a
)))
4889 /* Add the section attribute if the variable is in progmem. */
4892 avr_insert_attributes (tree node
, tree
*attributes
)
4894 if (TREE_CODE (node
) == VAR_DECL
4895 && (TREE_STATIC (node
) || DECL_EXTERNAL (node
))
4896 && avr_progmem_p (node
, *attributes
))
4898 static const char dsec
[] = ".progmem.data";
4899 *attributes
= tree_cons (get_identifier ("section"),
4900 build_tree_list (NULL
, build_string (strlen (dsec
), dsec
)),
4903 /* ??? This seems sketchy. Why can't the user declare the
4904 thing const in the first place? */
4905 TREE_READONLY (node
) = 1;
4909 /* A get_unnamed_section callback for switching to progmem_section. */
4912 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED
)
4914 fprintf (asm_out_file
,
4915 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4916 AVR_HAVE_JMP_CALL
? "a" : "ax");
4917 /* Should already be aligned, this is just to be safe if it isn't. */
4918 fprintf (asm_out_file
, "\t.p2align 1\n");
4921 /* Implement TARGET_ASM_INIT_SECTIONS. */
4924 avr_asm_init_sections (void)
4926 progmem_section
= get_unnamed_section (AVR_HAVE_JMP_CALL
? 0 : SECTION_CODE
,
4927 avr_output_progmem_section_asm_op
,
4929 readonly_data_section
= data_section
;
4933 avr_section_type_flags (tree decl
, const char *name
, int reloc
)
4935 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
4937 if (strncmp (name
, ".noinit", 7) == 0)
4939 if (decl
&& TREE_CODE (decl
) == VAR_DECL
4940 && DECL_INITIAL (decl
) == NULL_TREE
)
4941 flags
|= SECTION_BSS
; /* @nobits */
4943 warning (0, "only uninitialized variables can be placed in the "
4950 /* Outputs some appropriate text to go at the start of an assembler
4954 avr_file_start (void)
4956 if (avr_current_arch
->asm_only
)
4957 error ("MCU %qs supported for assembler only", avr_mcu_name
);
4959 default_file_start ();
4961 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4962 fputs ("__SREG__ = 0x3f\n"
4964 "__SP_L__ = 0x3d\n", asm_out_file
);
4966 fputs ("__tmp_reg__ = 0\n"
4967 "__zero_reg__ = 1\n", asm_out_file
);
4969 /* FIXME: output these only if there is anything in the .data / .bss
4970 sections - some code size could be saved by not linking in the
4971 initialization code from libgcc if one or both sections are empty. */
4972 fputs ("\t.global __do_copy_data\n", asm_out_file
);
4973 fputs ("\t.global __do_clear_bss\n", asm_out_file
);
4976 /* Outputs to the stdio stream FILE some
4977 appropriate text to go at the end of an assembler file. */
4984 /* Choose the order in which to allocate hard registers for
4985 pseudo-registers local to a basic block.
4987 Store the desired register order in the array `reg_alloc_order'.
4988 Element 0 should be the register to allocate first; element 1, the
4989 next register; and so on. */
4992 order_regs_for_local_alloc (void)
4995 static const int order_0
[] = {
5003 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5007 static const int order_1
[] = {
5015 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5019 static const int order_2
[] = {
5028 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5033 const int *order
= (TARGET_ORDER_1
? order_1
:
5034 TARGET_ORDER_2
? order_2
:
5036 for (i
=0; i
< ARRAY_SIZE (order_0
); ++i
)
5037 reg_alloc_order
[i
] = order
[i
];
5041 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5042 cost of an RTX operand given its context. X is the rtx of the
5043 operand, MODE is its mode, and OUTER is the rtx_code of this
5044 operand's parent operator. */
5047 avr_operand_rtx_cost (rtx x
, enum machine_mode mode
, enum rtx_code outer
,
5050 enum rtx_code code
= GET_CODE (x
);
5061 return COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5068 avr_rtx_costs (x
, code
, outer
, &total
, speed
);
5072 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5073 is to be calculated. Return true if the complete cost has been
5074 computed, and false if subexpressions should be scanned. In either
5075 case, *TOTAL contains the cost result. */
5078 avr_rtx_costs (rtx x
, int codearg
, int outer_code ATTRIBUTE_UNUSED
, int *total
,
5081 enum rtx_code code
= (enum rtx_code
) codearg
;
5082 enum machine_mode mode
= GET_MODE (x
);
5089 /* Immediate constants are as cheap as registers. */
5097 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5105 *total
= COSTS_N_INSNS (1);
5109 *total
= COSTS_N_INSNS (3);
5113 *total
= COSTS_N_INSNS (7);
5119 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5127 *total
= COSTS_N_INSNS (1);
5133 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5137 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5138 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5142 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
)
5143 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
5144 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5148 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
) + 2
5149 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
5150 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5157 *total
= COSTS_N_INSNS (1);
5158 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5159 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5163 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5165 *total
= COSTS_N_INSNS (2);
5166 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5168 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
5169 *total
= COSTS_N_INSNS (1);
5171 *total
= COSTS_N_INSNS (2);
5175 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5177 *total
= COSTS_N_INSNS (4);
5178 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5180 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
5181 *total
= COSTS_N_INSNS (1);
5183 *total
= COSTS_N_INSNS (4);
5189 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5195 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5196 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5197 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5198 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5202 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5203 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5204 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5212 *total
= COSTS_N_INSNS (!speed
? 3 : 4);
5214 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
5221 *total
= COSTS_N_INSNS (!speed
? 7 : 10);
5223 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
5231 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5232 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5240 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
5243 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5244 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5251 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 4)
5252 *total
= COSTS_N_INSNS (1);
5257 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 8)
5258 *total
= COSTS_N_INSNS (3);
5263 if (CONST_INT_P (XEXP (x
, 1)))
5264 switch (INTVAL (XEXP (x
, 1)))
5268 *total
= COSTS_N_INSNS (5);
5271 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 6);
5279 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5286 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5288 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
5289 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5293 val
= INTVAL (XEXP (x
, 1));
5295 *total
= COSTS_N_INSNS (3);
5296 else if (val
>= 0 && val
<= 7)
5297 *total
= COSTS_N_INSNS (val
);
5299 *total
= COSTS_N_INSNS (1);
5304 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5306 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5307 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5310 switch (INTVAL (XEXP (x
, 1)))
5317 *total
= COSTS_N_INSNS (2);
5320 *total
= COSTS_N_INSNS (3);
5326 *total
= COSTS_N_INSNS (4);
5331 *total
= COSTS_N_INSNS (5);
5334 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
5337 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
5340 *total
= COSTS_N_INSNS (!speed
? 5 : 10);
5343 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5344 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5349 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5351 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5352 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5355 switch (INTVAL (XEXP (x
, 1)))
5361 *total
= COSTS_N_INSNS (3);
5366 *total
= COSTS_N_INSNS (4);
5369 *total
= COSTS_N_INSNS (6);
5372 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
5375 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5376 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5383 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5390 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5392 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
5393 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5397 val
= INTVAL (XEXP (x
, 1));
5399 *total
= COSTS_N_INSNS (4);
5401 *total
= COSTS_N_INSNS (2);
5402 else if (val
>= 0 && val
<= 7)
5403 *total
= COSTS_N_INSNS (val
);
5405 *total
= COSTS_N_INSNS (1);
5410 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5412 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5413 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5416 switch (INTVAL (XEXP (x
, 1)))
5422 *total
= COSTS_N_INSNS (2);
5425 *total
= COSTS_N_INSNS (3);
5431 *total
= COSTS_N_INSNS (4);
5435 *total
= COSTS_N_INSNS (5);
5438 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
5441 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
5445 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
5448 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5449 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5454 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5456 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5457 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5460 switch (INTVAL (XEXP (x
, 1)))
5466 *total
= COSTS_N_INSNS (4);
5471 *total
= COSTS_N_INSNS (6);
5474 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
5477 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 5);
5480 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5481 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5488 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5495 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5497 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
5498 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5502 val
= INTVAL (XEXP (x
, 1));
5504 *total
= COSTS_N_INSNS (3);
5505 else if (val
>= 0 && val
<= 7)
5506 *total
= COSTS_N_INSNS (val
);
5508 *total
= COSTS_N_INSNS (1);
5513 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5515 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5516 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5519 switch (INTVAL (XEXP (x
, 1)))
5526 *total
= COSTS_N_INSNS (2);
5529 *total
= COSTS_N_INSNS (3);
5534 *total
= COSTS_N_INSNS (4);
5538 *total
= COSTS_N_INSNS (5);
5544 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
5547 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
5551 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
5554 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5555 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5560 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5562 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5563 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5566 switch (INTVAL (XEXP (x
, 1)))
5572 *total
= COSTS_N_INSNS (4);
5575 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
5580 *total
= COSTS_N_INSNS (4);
5583 *total
= COSTS_N_INSNS (6);
5586 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5587 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5594 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5598 switch (GET_MODE (XEXP (x
, 0)))
5601 *total
= COSTS_N_INSNS (1);
5602 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5603 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5607 *total
= COSTS_N_INSNS (2);
5608 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5609 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5610 else if (INTVAL (XEXP (x
, 1)) != 0)
5611 *total
+= COSTS_N_INSNS (1);
5615 *total
= COSTS_N_INSNS (4);
5616 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5617 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5618 else if (INTVAL (XEXP (x
, 1)) != 0)
5619 *total
+= COSTS_N_INSNS (3);
5625 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5634 /* Calculate the cost of a memory address. */
5637 avr_address_cost (rtx x
, bool speed ATTRIBUTE_UNUSED
)
5639 if (GET_CODE (x
) == PLUS
5640 && GET_CODE (XEXP (x
,1)) == CONST_INT
5641 && (REG_P (XEXP (x
,0)) || GET_CODE (XEXP (x
,0)) == SUBREG
)
5642 && INTVAL (XEXP (x
,1)) >= 61)
5644 if (CONSTANT_ADDRESS_P (x
))
5646 if (optimize
> 0 && io_address_operand (x
, QImode
))
5653 /* Test for extra memory constraint 'Q'.
5654 It's a memory address based on Y or Z pointer with valid displacement. */
5657 extra_constraint_Q (rtx x
)
5659 if (GET_CODE (XEXP (x
,0)) == PLUS
5660 && REG_P (XEXP (XEXP (x
,0), 0))
5661 && GET_CODE (XEXP (XEXP (x
,0), 1)) == CONST_INT
5662 && (INTVAL (XEXP (XEXP (x
,0), 1))
5663 <= MAX_LD_OFFSET (GET_MODE (x
))))
5665 rtx xx
= XEXP (XEXP (x
,0), 0);
5666 int regno
= REGNO (xx
);
5667 if (TARGET_ALL_DEBUG
)
5669 fprintf (stderr
, ("extra_constraint:\n"
5670 "reload_completed: %d\n"
5671 "reload_in_progress: %d\n"),
5672 reload_completed
, reload_in_progress
);
5675 if (regno
>= FIRST_PSEUDO_REGISTER
)
5676 return 1; /* allocate pseudos */
5677 else if (regno
== REG_Z
|| regno
== REG_Y
)
5678 return 1; /* strictly check */
5679 else if (xx
== frame_pointer_rtx
5680 || xx
== arg_pointer_rtx
)
5681 return 1; /* XXX frame & arg pointer checks */
5686 /* Convert condition code CONDITION to the valid AVR condition code. */
5689 avr_normalize_condition (RTX_CODE condition
)
5706 /* This function optimizes conditional jumps. */
5713 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5715 if (! (GET_CODE (insn
) == INSN
5716 || GET_CODE (insn
) == CALL_INSN
5717 || GET_CODE (insn
) == JUMP_INSN
)
5718 || !single_set (insn
))
5721 pattern
= PATTERN (insn
);
5723 if (GET_CODE (pattern
) == PARALLEL
)
5724 pattern
= XVECEXP (pattern
, 0, 0);
5725 if (GET_CODE (pattern
) == SET
5726 && SET_DEST (pattern
) == cc0_rtx
5727 && compare_diff_p (insn
))
5729 if (GET_CODE (SET_SRC (pattern
)) == COMPARE
)
5731 /* Now we work under compare insn. */
5733 pattern
= SET_SRC (pattern
);
5734 if (true_regnum (XEXP (pattern
,0)) >= 0
5735 && true_regnum (XEXP (pattern
,1)) >= 0 )
5737 rtx x
= XEXP (pattern
,0);
5738 rtx next
= next_real_insn (insn
);
5739 rtx pat
= PATTERN (next
);
5740 rtx src
= SET_SRC (pat
);
5741 rtx t
= XEXP (src
,0);
5742 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
5743 XEXP (pattern
,0) = XEXP (pattern
,1);
5744 XEXP (pattern
,1) = x
;
5745 INSN_CODE (next
) = -1;
5747 else if (true_regnum (XEXP (pattern
, 0)) >= 0
5748 && XEXP (pattern
, 1) == const0_rtx
)
5750 /* This is a tst insn, we can reverse it. */
5751 rtx next
= next_real_insn (insn
);
5752 rtx pat
= PATTERN (next
);
5753 rtx src
= SET_SRC (pat
);
5754 rtx t
= XEXP (src
,0);
5756 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
5757 XEXP (pattern
, 1) = XEXP (pattern
, 0);
5758 XEXP (pattern
, 0) = const0_rtx
;
5759 INSN_CODE (next
) = -1;
5760 INSN_CODE (insn
) = -1;
5762 else if (true_regnum (XEXP (pattern
,0)) >= 0
5763 && GET_CODE (XEXP (pattern
,1)) == CONST_INT
)
5765 rtx x
= XEXP (pattern
,1);
5766 rtx next
= next_real_insn (insn
);
5767 rtx pat
= PATTERN (next
);
5768 rtx src
= SET_SRC (pat
);
5769 rtx t
= XEXP (src
,0);
5770 enum machine_mode mode
= GET_MODE (XEXP (pattern
, 0));
5772 if (avr_simplify_comparison_p (mode
, GET_CODE (t
), x
))
5774 XEXP (pattern
, 1) = gen_int_mode (INTVAL (x
) + 1, mode
);
5775 PUT_CODE (t
, avr_normalize_condition (GET_CODE (t
)));
5776 INSN_CODE (next
) = -1;
5777 INSN_CODE (insn
) = -1;
5785 /* Returns register number for function return value.*/
5788 avr_ret_register (void)
5793 /* Create an RTX representing the place where a
5794 library function returns a value of mode MODE. */
5797 avr_libcall_value (enum machine_mode mode
)
5799 int offs
= GET_MODE_SIZE (mode
);
5802 return gen_rtx_REG (mode
, RET_REGISTER
+ 2 - offs
);
5805 /* Create an RTX representing the place where a
5806 function returns a value of data type VALTYPE. */
5809 avr_function_value (const_tree type
,
5810 const_tree func ATTRIBUTE_UNUSED
,
5811 bool outgoing ATTRIBUTE_UNUSED
)
5815 if (TYPE_MODE (type
) != BLKmode
)
5816 return avr_libcall_value (TYPE_MODE (type
));
5818 offs
= int_size_in_bytes (type
);
5821 if (offs
> 2 && offs
< GET_MODE_SIZE (SImode
))
5822 offs
= GET_MODE_SIZE (SImode
);
5823 else if (offs
> GET_MODE_SIZE (SImode
) && offs
< GET_MODE_SIZE (DImode
))
5824 offs
= GET_MODE_SIZE (DImode
);
5826 return gen_rtx_REG (BLKmode
, RET_REGISTER
+ 2 - offs
);
5829 /* Places additional restrictions on the register class to
5830 use when it is necessary to copy value X into a register
5834 preferred_reload_class (rtx x ATTRIBUTE_UNUSED
, enum reg_class rclass
)
5840 test_hard_reg_class (enum reg_class rclass
, rtx x
)
5842 int regno
= true_regnum (x
);
5846 if (TEST_HARD_REG_CLASS (rclass
, regno
))
5854 jump_over_one_insn_p (rtx insn
, rtx dest
)
5856 int uid
= INSN_UID (GET_CODE (dest
) == LABEL_REF
5859 int jump_addr
= INSN_ADDRESSES (INSN_UID (insn
));
5860 int dest_addr
= INSN_ADDRESSES (uid
);
5861 return dest_addr
- jump_addr
== get_attr_length (insn
) + 1;
5864 /* Returns 1 if a value of mode MODE can be stored starting with hard
5865 register number REGNO. On the enhanced core, anything larger than
5866 1 byte must start in even numbered register for "movw" to work
5867 (this way we don't have to check for odd registers everywhere). */
5870 avr_hard_regno_mode_ok (int regno
, enum machine_mode mode
)
5872 /* Disallow QImode in stack pointer regs. */
5873 if ((regno
== REG_SP
|| regno
== (REG_SP
+ 1)) && mode
== QImode
)
5876 /* The only thing that can go into registers r28:r29 is a Pmode. */
5877 if (regno
== REG_Y
&& mode
== Pmode
)
5880 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5881 if (regno
<= (REG_Y
+ 1) && (regno
+ GET_MODE_SIZE (mode
)) >= (REG_Y
+ 1))
5887 /* Modes larger than QImode occupy consecutive registers. */
5888 if (regno
+ GET_MODE_SIZE (mode
) > FIRST_PSEUDO_REGISTER
)
5891 /* All modes larger than QImode should start in an even register. */
5892 return !(regno
& 1);
5896 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED
, rtx
*operands
, int *len
)
5902 if (GET_CODE (operands
[1]) == CONST_INT
)
5904 int val
= INTVAL (operands
[1]);
5905 if ((val
& 0xff) == 0)
5908 return (AS2 (mov
,%A0
,__zero_reg__
) CR_TAB
5909 AS2 (ldi
,%2,hi8(%1)) CR_TAB
5912 else if ((val
& 0xff00) == 0)
5915 return (AS2 (ldi
,%2,lo8(%1)) CR_TAB
5916 AS2 (mov
,%A0
,%2) CR_TAB
5917 AS2 (mov
,%B0
,__zero_reg__
));
5919 else if ((val
& 0xff) == ((val
& 0xff00) >> 8))
5922 return (AS2 (ldi
,%2,lo8(%1)) CR_TAB
5923 AS2 (mov
,%A0
,%2) CR_TAB
5928 return (AS2 (ldi
,%2,lo8(%1)) CR_TAB
5929 AS2 (mov
,%A0
,%2) CR_TAB
5930 AS2 (ldi
,%2,hi8(%1)) CR_TAB
5936 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED
, rtx
*operands
, int *len
)
5938 rtx src
= operands
[1];
5939 int cnst
= (GET_CODE (src
) == CONST_INT
);
5944 *len
= 4 + ((INTVAL (src
) & 0xff) != 0)
5945 + ((INTVAL (src
) & 0xff00) != 0)
5946 + ((INTVAL (src
) & 0xff0000) != 0)
5947 + ((INTVAL (src
) & 0xff000000) != 0);
5954 if (cnst
&& ((INTVAL (src
) & 0xff) == 0))
5955 output_asm_insn (AS2 (mov
, %A0
, __zero_reg__
), operands
);
5958 output_asm_insn (AS2 (ldi
, %2, lo8(%1)), operands
);
5959 output_asm_insn (AS2 (mov
, %A0
, %2), operands
);
5961 if (cnst
&& ((INTVAL (src
) & 0xff00) == 0))
5962 output_asm_insn (AS2 (mov
, %B0
, __zero_reg__
), operands
);
5965 output_asm_insn (AS2 (ldi
, %2, hi8(%1)), operands
);
5966 output_asm_insn (AS2 (mov
, %B0
, %2), operands
);
5968 if (cnst
&& ((INTVAL (src
) & 0xff0000) == 0))
5969 output_asm_insn (AS2 (mov
, %C0
, __zero_reg__
), operands
);
5972 output_asm_insn (AS2 (ldi
, %2, hlo8(%1)), operands
);
5973 output_asm_insn (AS2 (mov
, %C0
, %2), operands
);
5975 if (cnst
&& ((INTVAL (src
) & 0xff000000) == 0))
5976 output_asm_insn (AS2 (mov
, %D0
, __zero_reg__
), operands
);
5979 output_asm_insn (AS2 (ldi
, %2, hhi8(%1)), operands
);
5980 output_asm_insn (AS2 (mov
, %D0
, %2), operands
);
5986 avr_output_bld (rtx operands
[], int bit_nr
)
5988 static char s
[] = "bld %A0,0";
5990 s
[5] = 'A' + (bit_nr
>> 3);
5991 s
[8] = '0' + (bit_nr
& 7);
5992 output_asm_insn (s
, operands
);
5996 avr_output_addr_vec_elt (FILE *stream
, int value
)
5998 switch_to_section (progmem_section
);
5999 if (AVR_HAVE_JMP_CALL
)
6000 fprintf (stream
, "\t.word gs(.L%d)\n", value
);
6002 fprintf (stream
, "\trjmp .L%d\n", value
);
6005 /* Returns true if SCRATCH are safe to be allocated as a scratch
6006 registers (for a define_peephole2) in the current function. */
6009 avr_hard_regno_scratch_ok (unsigned int regno
)
6011 /* Interrupt functions can only use registers that have already been saved
6012 by the prologue, even if they would normally be call-clobbered. */
6014 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
6015 && !df_regs_ever_live_p (regno
))
6021 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6024 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED
,
6025 unsigned int new_reg
)
6027 /* Interrupt functions can only use registers that have already been
6028 saved by the prologue, even if they would normally be
6031 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
6032 && !df_regs_ever_live_p (new_reg
))
6038 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
6039 or memory location in the I/O space (QImode only).
6041 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
6042 Operand 1: register operand to test, or CONST_INT memory address.
6043 Operand 2: bit number.
6044 Operand 3: label to jump to if the test is true. */
6047 avr_out_sbxx_branch (rtx insn
, rtx operands
[])
6049 enum rtx_code comp
= GET_CODE (operands
[0]);
6050 int long_jump
= (get_attr_length (insn
) >= 4);
6051 int reverse
= long_jump
|| jump_over_one_insn_p (insn
, operands
[3]);
6055 else if (comp
== LT
)
6059 comp
= reverse_condition (comp
);
6061 if (GET_CODE (operands
[1]) == CONST_INT
)
6063 if (INTVAL (operands
[1]) < 0x40)
6066 output_asm_insn (AS2 (sbis
,%m1
-0x20,%2), operands
);
6068 output_asm_insn (AS2 (sbic
,%m1
-0x20,%2), operands
);
6072 output_asm_insn (AS2 (in
,__tmp_reg__
,%m1
-0x20), operands
);
6074 output_asm_insn (AS2 (sbrs
,__tmp_reg__
,%2), operands
);
6076 output_asm_insn (AS2 (sbrc
,__tmp_reg__
,%2), operands
);
6079 else /* GET_CODE (operands[1]) == REG */
6081 if (GET_MODE (operands
[1]) == QImode
)
6084 output_asm_insn (AS2 (sbrs
,%1,%2), operands
);
6086 output_asm_insn (AS2 (sbrc
,%1,%2), operands
);
6088 else /* HImode or SImode */
6090 static char buf
[] = "sbrc %A1,0";
6091 int bit_nr
= INTVAL (operands
[2]);
6092 buf
[3] = (comp
== EQ
) ? 's' : 'c';
6093 buf
[6] = 'A' + (bit_nr
>> 3);
6094 buf
[9] = '0' + (bit_nr
& 7);
6095 output_asm_insn (buf
, operands
);
6100 return (AS1 (rjmp
,.+4) CR_TAB
6103 return AS1 (rjmp
,%x3
);
6107 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6110 avr_asm_out_ctor (rtx symbol
, int priority
)
6112 fputs ("\t.global __do_global_ctors\n", asm_out_file
);
6113 default_ctor_section_asm_out_constructor (symbol
, priority
);
6116 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6119 avr_asm_out_dtor (rtx symbol
, int priority
)
6121 fputs ("\t.global __do_global_dtors\n", asm_out_file
);
6122 default_dtor_section_asm_out_destructor (symbol
, priority
);
6125 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6128 avr_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
6130 if (TYPE_MODE (type
) == BLKmode
)
6132 HOST_WIDE_INT size
= int_size_in_bytes (type
);
6133 return (size
== -1 || size
> 8);
6139 /* Worker function for CASE_VALUES_THRESHOLD. */
6141 unsigned int avr_case_values_threshold (void)
6143 return (!AVR_HAVE_JMP_CALL
|| TARGET_CALL_PROLOGUES
) ? 8 : 17;