re PR target/43746 (-fmerge-constants and -fmerge-all-constants don't work at AVR...
[official-gcc.git] / gcc / config / avr / avr.c
blobd535f330621724cc7ce436eda61316b95d9fd3e0
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
33 #include "flags.h"
34 #include "reload.h"
35 #include "tree.h"
36 #include "output.h"
37 #include "expr.h"
38 #include "diagnostic-core.h"
39 #include "obstack.h"
40 #include "function.h"
41 #include "recog.h"
42 #include "optabs.h"
43 #include "ggc.h"
44 #include "langhooks.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "target-def.h"
48 #include "params.h"
49 #include "df.h"
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
54 static void avr_option_override (void);
55 static int avr_naked_function_p (tree);
56 static int interrupt_function_p (tree);
57 static int signal_function_p (tree);
58 static int avr_OS_task_function_p (tree);
59 static int avr_OS_main_function_p (tree);
60 static int avr_regs_to_save (HARD_REG_SET *);
61 static int get_sequence_length (rtx insns);
62 static int sequent_regs_live (void);
63 static const char *ptrreg_to_str (int);
64 static const char *cond_string (enum rtx_code);
65 static int avr_num_arg_regs (enum machine_mode, const_tree);
67 static RTX_CODE compare_condition (rtx insn);
68 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
69 static int compare_sign_p (rtx insn);
70 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
71 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
72 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
73 static bool avr_assemble_integer (rtx, unsigned int, int);
74 static void avr_file_start (void);
75 static void avr_file_end (void);
76 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
77 static void avr_asm_function_end_prologue (FILE *);
78 static void avr_asm_function_begin_epilogue (FILE *);
79 static bool avr_cannot_modify_jumps_p (void);
80 static rtx avr_function_value (const_tree, const_tree, bool);
81 static rtx avr_libcall_value (enum machine_mode, const_rtx);
82 static bool avr_function_value_regno_p (const unsigned int);
83 static void avr_insert_attributes (tree, tree *);
84 static void avr_asm_init_sections (void);
85 static unsigned int avr_section_type_flags (tree, const char *, int);
87 static void avr_reorg (void);
88 static void avr_asm_out_ctor (rtx, int);
89 static void avr_asm_out_dtor (rtx, int);
90 static int avr_register_move_cost (enum machine_mode, reg_class_t, reg_class_t);
91 static int avr_memory_move_cost (enum machine_mode, reg_class_t, bool);
92 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code, bool);
93 static bool avr_rtx_costs (rtx, int, int, int *, bool);
94 static int avr_address_cost (rtx, bool);
95 static bool avr_return_in_memory (const_tree, const_tree);
96 static struct machine_function * avr_init_machine_status (void);
97 static void avr_init_builtins (void);
98 static rtx avr_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
99 static rtx avr_builtin_setjmp_frame_value (void);
100 static bool avr_hard_regno_scratch_ok (unsigned int);
101 static unsigned int avr_case_values_threshold (void);
102 static bool avr_frame_pointer_required_p (void);
103 static bool avr_can_eliminate (const int, const int);
104 static bool avr_class_likely_spilled_p (reg_class_t c);
105 static rtx avr_function_arg (cumulative_args_t , enum machine_mode,
106 const_tree, bool);
107 static void avr_function_arg_advance (cumulative_args_t, enum machine_mode,
108 const_tree, bool);
109 static bool avr_function_ok_for_sibcall (tree, tree);
110 static void avr_asm_named_section (const char *name, unsigned int flags, tree decl);
111 static void avr_encode_section_info (tree, rtx, int);
113 /* Allocate registers from r25 to r8 for parameters for function calls. */
114 #define FIRST_CUM_REG 26
116 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
117 static GTY(()) rtx tmp_reg_rtx;
119 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
120 static GTY(()) rtx zero_reg_rtx;
122 /* AVR register names {"r0", "r1", ..., "r31"} */
123 static const char *const avr_regnames[] = REGISTER_NAMES;
125 /* Preprocessor macros to define depending on MCU type. */
126 const char *avr_extra_arch_macro;
128 /* Current architecture. */
129 const struct base_arch_s *avr_current_arch;
131 /* Current device. */
132 const struct mcu_type_s *avr_current_device;
134 section *progmem_section;
136 /* To track if code will use .bss and/or .data. */
137 bool avr_need_clear_bss_p = false;
138 bool avr_need_copy_data_p = false;
140 /* AVR attributes. */
141 static const struct attribute_spec avr_attribute_table[] =
143 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
144 affects_type_identity } */
145 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
146 false },
147 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
148 false },
149 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
150 false },
151 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
152 false },
153 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
154 false },
155 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
156 false },
157 { NULL, 0, 0, false, false, false, NULL, false }
160 /* Initialize the GCC target structure. */
161 #undef TARGET_ASM_ALIGNED_HI_OP
162 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
163 #undef TARGET_ASM_ALIGNED_SI_OP
164 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
165 #undef TARGET_ASM_UNALIGNED_HI_OP
166 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
167 #undef TARGET_ASM_UNALIGNED_SI_OP
168 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
169 #undef TARGET_ASM_INTEGER
170 #define TARGET_ASM_INTEGER avr_assemble_integer
171 #undef TARGET_ASM_FILE_START
172 #define TARGET_ASM_FILE_START avr_file_start
173 #undef TARGET_ASM_FILE_END
174 #define TARGET_ASM_FILE_END avr_file_end
176 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
177 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
178 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
179 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
181 #undef TARGET_FUNCTION_VALUE
182 #define TARGET_FUNCTION_VALUE avr_function_value
183 #undef TARGET_LIBCALL_VALUE
184 #define TARGET_LIBCALL_VALUE avr_libcall_value
185 #undef TARGET_FUNCTION_VALUE_REGNO_P
186 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
188 #undef TARGET_ATTRIBUTE_TABLE
189 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
190 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
191 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
192 #undef TARGET_INSERT_ATTRIBUTES
193 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
194 #undef TARGET_SECTION_TYPE_FLAGS
195 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
197 #undef TARGET_ASM_NAMED_SECTION
198 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
199 #undef TARGET_ASM_INIT_SECTIONS
200 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
201 #undef TARGET_ENCODE_SECTION_INFO
202 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
204 #undef TARGET_REGISTER_MOVE_COST
205 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
206 #undef TARGET_MEMORY_MOVE_COST
207 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
208 #undef TARGET_RTX_COSTS
209 #define TARGET_RTX_COSTS avr_rtx_costs
210 #undef TARGET_ADDRESS_COST
211 #define TARGET_ADDRESS_COST avr_address_cost
212 #undef TARGET_MACHINE_DEPENDENT_REORG
213 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
214 #undef TARGET_FUNCTION_ARG
215 #define TARGET_FUNCTION_ARG avr_function_arg
216 #undef TARGET_FUNCTION_ARG_ADVANCE
217 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
219 #undef TARGET_LEGITIMIZE_ADDRESS
220 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
222 #undef TARGET_RETURN_IN_MEMORY
223 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
225 #undef TARGET_STRICT_ARGUMENT_NAMING
226 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
228 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
229 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
231 #undef TARGET_HARD_REGNO_SCRATCH_OK
232 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
233 #undef TARGET_CASE_VALUES_THRESHOLD
234 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
236 #undef TARGET_LEGITIMATE_ADDRESS_P
237 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
239 #undef TARGET_FRAME_POINTER_REQUIRED
240 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
241 #undef TARGET_CAN_ELIMINATE
242 #define TARGET_CAN_ELIMINATE avr_can_eliminate
244 #undef TARGET_CLASS_LIKELY_SPILLED_P
245 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
247 #undef TARGET_OPTION_OVERRIDE
248 #define TARGET_OPTION_OVERRIDE avr_option_override
250 #undef TARGET_CANNOT_MODIFY_JUMPS_P
251 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
253 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
254 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
256 #undef TARGET_INIT_BUILTINS
257 #define TARGET_INIT_BUILTINS avr_init_builtins
259 #undef TARGET_EXPAND_BUILTIN
260 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
263 struct gcc_target targetm = TARGET_INITIALIZER;
265 static void
266 avr_option_override (void)
268 flag_delete_null_pointer_checks = 0;
270 avr_current_device = &avr_mcu_types[avr_mcu_index];
271 avr_current_arch = &avr_arch_types[avr_current_device->arch];
272 avr_extra_arch_macro = avr_current_device->macro;
274 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
275 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
277 init_machine_status = avr_init_machine_status;
280 /* return register class from register number. */
282 static const enum reg_class reg_class_tab[]={
283 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
284 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
285 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
286 GENERAL_REGS, /* r0 - r15 */
287 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
288 LD_REGS, /* r16 - 23 */
289 ADDW_REGS,ADDW_REGS, /* r24,r25 */
290 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
291 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
292 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
293 STACK_REG,STACK_REG /* SPL,SPH */
296 /* Function to set up the backend function structure. */
298 static struct machine_function *
299 avr_init_machine_status (void)
301 return ggc_alloc_cleared_machine_function ();
304 /* Return register class for register R. */
306 enum reg_class
307 avr_regno_reg_class (int r)
309 if (r <= 33)
310 return reg_class_tab[r];
311 return ALL_REGS;
314 /* A helper for the subsequent function attribute used to dig for
315 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
317 static inline int
318 avr_lookup_function_attribute1 (const_tree func, const char *name)
320 if (FUNCTION_DECL == TREE_CODE (func))
322 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
324 return true;
327 func = TREE_TYPE (func);
330 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
331 || TREE_CODE (func) == METHOD_TYPE);
333 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
336 /* Return nonzero if FUNC is a naked function. */
338 static int
339 avr_naked_function_p (tree func)
341 return avr_lookup_function_attribute1 (func, "naked");
344 /* Return nonzero if FUNC is an interrupt function as specified
345 by the "interrupt" attribute. */
347 static int
348 interrupt_function_p (tree func)
350 return avr_lookup_function_attribute1 (func, "interrupt");
353 /* Return nonzero if FUNC is a signal function as specified
354 by the "signal" attribute. */
356 static int
357 signal_function_p (tree func)
359 return avr_lookup_function_attribute1 (func, "signal");
362 /* Return nonzero if FUNC is a OS_task function. */
364 static int
365 avr_OS_task_function_p (tree func)
367 return avr_lookup_function_attribute1 (func, "OS_task");
370 /* Return nonzero if FUNC is a OS_main function. */
372 static int
373 avr_OS_main_function_p (tree func)
375 return avr_lookup_function_attribute1 (func, "OS_main");
378 /* Return the number of hard registers to push/pop in the prologue/epilogue
379 of the current function, and optionally store these registers in SET. */
381 static int
382 avr_regs_to_save (HARD_REG_SET *set)
384 int reg, count;
385 int int_or_sig_p = (interrupt_function_p (current_function_decl)
386 || signal_function_p (current_function_decl));
388 if (set)
389 CLEAR_HARD_REG_SET (*set);
390 count = 0;
392 /* No need to save any registers if the function never returns or
393 is have "OS_task" or "OS_main" attribute. */
394 if (TREE_THIS_VOLATILE (current_function_decl)
395 || cfun->machine->is_OS_task
396 || cfun->machine->is_OS_main)
397 return 0;
399 for (reg = 0; reg < 32; reg++)
401 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
402 any global register variables. */
403 if (fixed_regs[reg])
404 continue;
406 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
407 || (df_regs_ever_live_p (reg)
408 && (int_or_sig_p || !call_used_regs[reg])
409 && !(frame_pointer_needed
410 && (reg == REG_Y || reg == (REG_Y+1)))))
412 if (set)
413 SET_HARD_REG_BIT (*set, reg);
414 count++;
417 return count;
420 /* Return true if register FROM can be eliminated via register TO. */
422 bool
423 avr_can_eliminate (const int from, const int to)
425 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
426 || ((from == FRAME_POINTER_REGNUM
427 || from == FRAME_POINTER_REGNUM + 1)
428 && !frame_pointer_needed));
431 /* Compute offset between arg_pointer and frame_pointer. */
434 avr_initial_elimination_offset (int from, int to)
436 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
437 return 0;
438 else
440 int offset = frame_pointer_needed ? 2 : 0;
441 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
443 offset += avr_regs_to_save (NULL);
444 return get_frame_size () + (avr_pc_size) + 1 + offset;
448 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
449 frame pointer by +STARTING_FRAME_OFFSET.
450 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
451 avoids creating add/sub of offset in nonlocal goto and setjmp. */
453 rtx avr_builtin_setjmp_frame_value (void)
455 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
456 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
459 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
460 This is return address of function. */
461 rtx
462 avr_return_addr_rtx (int count, rtx tem)
464 rtx r;
466 /* Can only return this functions return address. Others not supported. */
467 if (count)
468 return NULL;
470 if (AVR_3_BYTE_PC)
472 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
473 warning (0, "'builtin_return_address' contains only 2 bytes of address");
475 else
476 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
478 r = gen_rtx_PLUS (Pmode, tem, r);
479 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
480 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
481 return r;
484 /* Return 1 if the function epilogue is just a single "ret". */
487 avr_simple_epilogue (void)
489 return (! frame_pointer_needed
490 && get_frame_size () == 0
491 && avr_regs_to_save (NULL) == 0
492 && ! interrupt_function_p (current_function_decl)
493 && ! signal_function_p (current_function_decl)
494 && ! avr_naked_function_p (current_function_decl)
495 && ! TREE_THIS_VOLATILE (current_function_decl));
498 /* This function checks sequence of live registers. */
500 static int
501 sequent_regs_live (void)
503 int reg;
504 int live_seq=0;
505 int cur_seq=0;
507 for (reg = 0; reg < 18; ++reg)
509 if (!call_used_regs[reg])
511 if (df_regs_ever_live_p (reg))
513 ++live_seq;
514 ++cur_seq;
516 else
517 cur_seq = 0;
521 if (!frame_pointer_needed)
523 if (df_regs_ever_live_p (REG_Y))
525 ++live_seq;
526 ++cur_seq;
528 else
529 cur_seq = 0;
531 if (df_regs_ever_live_p (REG_Y+1))
533 ++live_seq;
534 ++cur_seq;
536 else
537 cur_seq = 0;
539 else
541 cur_seq += 2;
542 live_seq += 2;
544 return (cur_seq == live_seq) ? live_seq : 0;
547 /* Obtain the length sequence of insns. */
550 get_sequence_length (rtx insns)
552 rtx insn;
553 int length;
555 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
556 length += get_attr_length (insn);
558 return length;
561 /* Implement INCOMING_RETURN_ADDR_RTX. */
564 avr_incoming_return_addr_rtx (void)
566 /* The return address is at the top of the stack. Note that the push
567 was via post-decrement, which means the actual address is off by one. */
568 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
571 /* Helper for expand_prologue. Emit a push of a byte register. */
573 static void
574 emit_push_byte (unsigned regno, bool frame_related_p)
576 rtx mem, reg, insn;
578 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
579 mem = gen_frame_mem (QImode, mem);
580 reg = gen_rtx_REG (QImode, regno);
582 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
583 if (frame_related_p)
584 RTX_FRAME_RELATED_P (insn) = 1;
586 cfun->machine->stack_usage++;
590 /* Output function prologue. */
592 void
593 expand_prologue (void)
595 int live_seq;
596 HARD_REG_SET set;
597 int minimize;
598 HOST_WIDE_INT size = get_frame_size();
599 rtx insn;
601 /* Init cfun->machine. */
602 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
603 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
604 cfun->machine->is_signal = signal_function_p (current_function_decl);
605 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
606 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
607 cfun->machine->stack_usage = 0;
609 /* Prologue: naked. */
610 if (cfun->machine->is_naked)
612 return;
615 avr_regs_to_save (&set);
616 live_seq = sequent_regs_live ();
617 minimize = (TARGET_CALL_PROLOGUES
618 && !cfun->machine->is_interrupt
619 && !cfun->machine->is_signal
620 && !cfun->machine->is_OS_task
621 && !cfun->machine->is_OS_main
622 && live_seq);
624 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
626 /* Enable interrupts. */
627 if (cfun->machine->is_interrupt)
628 emit_insn (gen_enable_interrupt ());
630 /* Push zero reg. */
631 emit_push_byte (ZERO_REGNO, true);
633 /* Push tmp reg. */
634 emit_push_byte (TMP_REGNO, true);
636 /* Push SREG. */
637 /* ??? There's no dwarf2 column reserved for SREG. */
638 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
639 emit_push_byte (TMP_REGNO, false);
641 /* Push RAMPZ. */
642 /* ??? There's no dwarf2 column reserved for RAMPZ. */
643 if (AVR_HAVE_RAMPZ
644 && TEST_HARD_REG_BIT (set, REG_Z)
645 && TEST_HARD_REG_BIT (set, REG_Z + 1))
647 emit_move_insn (tmp_reg_rtx,
648 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
649 emit_push_byte (TMP_REGNO, false);
652 /* Clear zero reg. */
653 emit_move_insn (zero_reg_rtx, const0_rtx);
655 /* Prevent any attempt to delete the setting of ZERO_REG! */
656 emit_use (zero_reg_rtx);
658 if (minimize && (frame_pointer_needed
659 || (AVR_2_BYTE_PC && live_seq > 6)
660 || live_seq > 7))
662 int first_reg, reg, offset;
664 emit_move_insn (gen_rtx_REG (HImode, REG_X),
665 gen_int_mode (size, HImode));
667 insn = emit_insn (gen_call_prologue_saves
668 (gen_int_mode (live_seq, HImode),
669 gen_int_mode (size + live_seq, HImode)));
670 RTX_FRAME_RELATED_P (insn) = 1;
672 /* Describe the effect of the unspec_volatile call to prologue_saves.
673 Note that this formulation assumes that add_reg_note pushes the
674 notes to the front. Thus we build them in the reverse order of
675 how we want dwarf2out to process them. */
677 /* The function does always set frame_pointer_rtx, but whether that
678 is going to be permanent in the function is frame_pointer_needed. */
679 add_reg_note (insn, REG_CFA_ADJUST_CFA,
680 gen_rtx_SET (VOIDmode,
681 (frame_pointer_needed
682 ? frame_pointer_rtx : stack_pointer_rtx),
683 plus_constant (stack_pointer_rtx,
684 -(size + live_seq))));
686 /* Note that live_seq always contains r28+r29, but the other
687 registers to be saved are all below 18. */
688 first_reg = 18 - (live_seq - 2);
690 for (reg = 29, offset = -live_seq + 1;
691 reg >= first_reg;
692 reg = (reg == 28 ? 17 : reg - 1), ++offset)
694 rtx m, r;
696 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
697 r = gen_rtx_REG (QImode, reg);
698 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
701 cfun->machine->stack_usage += size + live_seq;
703 else
705 int reg;
706 for (reg = 0; reg < 32; ++reg)
707 if (TEST_HARD_REG_BIT (set, reg))
708 emit_push_byte (reg, true);
710 if (frame_pointer_needed)
712 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
714 /* Push frame pointer. Always be consistent about the
715 ordering of pushes -- epilogue_restores expects the
716 register pair to be pushed low byte first. */
717 emit_push_byte (REG_Y, true);
718 emit_push_byte (REG_Y + 1, true);
721 if (!size)
723 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
724 RTX_FRAME_RELATED_P (insn) = 1;
726 else
728 /* Creating a frame can be done by direct manipulation of the
729 stack or via the frame pointer. These two methods are:
730 fp=sp
731 fp-=size
732 sp=fp
734 sp-=size
735 fp=sp
736 the optimum method depends on function type, stack and frame size.
737 To avoid a complex logic, both methods are tested and shortest
738 is selected. */
739 rtx myfp;
740 rtx fp_plus_insns;
742 if (AVR_HAVE_8BIT_SP)
744 /* The high byte (r29) doesn't change. Prefer 'subi'
745 (1 cycle) over 'sbiw' (2 cycles, same size). */
746 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
748 else
750 /* Normal sized addition. */
751 myfp = frame_pointer_rtx;
754 /* Method 1-Adjust frame pointer. */
755 start_sequence ();
757 /* Normally the dwarf2out frame-related-expr interpreter does
758 not expect to have the CFA change once the frame pointer is
759 set up. Thus we avoid marking the move insn below and
760 instead indicate that the entire operation is complete after
761 the frame pointer subtraction is done. */
763 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
765 insn = emit_move_insn (myfp, plus_constant (myfp, -size));
766 RTX_FRAME_RELATED_P (insn) = 1;
767 add_reg_note (insn, REG_CFA_ADJUST_CFA,
768 gen_rtx_SET (VOIDmode, frame_pointer_rtx,
769 plus_constant (stack_pointer_rtx,
770 -size)));
772 /* Copy to stack pointer. Note that since we've already
773 changed the CFA to the frame pointer this operation
774 need not be annotated at all. */
775 if (AVR_HAVE_8BIT_SP)
777 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
779 else if (TARGET_NO_INTERRUPTS
780 || cfun->machine->is_signal
781 || cfun->machine->is_OS_main)
783 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
784 frame_pointer_rtx));
786 else if (cfun->machine->is_interrupt)
788 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
789 frame_pointer_rtx));
791 else
793 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
796 fp_plus_insns = get_insns ();
797 end_sequence ();
799 /* Method 2-Adjust Stack pointer. */
800 if (size <= 6)
802 rtx sp_plus_insns;
804 start_sequence ();
806 insn = plus_constant (stack_pointer_rtx, -size);
807 insn = emit_move_insn (stack_pointer_rtx, insn);
808 RTX_FRAME_RELATED_P (insn) = 1;
810 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
811 RTX_FRAME_RELATED_P (insn) = 1;
813 sp_plus_insns = get_insns ();
814 end_sequence ();
816 /* Use shortest method. */
817 if (get_sequence_length (sp_plus_insns)
818 < get_sequence_length (fp_plus_insns))
819 emit_insn (sp_plus_insns);
820 else
821 emit_insn (fp_plus_insns);
823 else
824 emit_insn (fp_plus_insns);
826 cfun->machine->stack_usage += size;
831 if (flag_stack_usage_info)
832 current_function_static_stack_size = cfun->machine->stack_usage;
835 /* Output summary at end of function prologue. */
837 static void
838 avr_asm_function_end_prologue (FILE *file)
840 if (cfun->machine->is_naked)
842 fputs ("/* prologue: naked */\n", file);
844 else
846 if (cfun->machine->is_interrupt)
848 fputs ("/* prologue: Interrupt */\n", file);
850 else if (cfun->machine->is_signal)
852 fputs ("/* prologue: Signal */\n", file);
854 else
855 fputs ("/* prologue: function */\n", file);
857 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
858 get_frame_size());
859 fprintf (file, "/* stack size = %d */\n",
860 cfun->machine->stack_usage);
861 /* Create symbol stack offset here so all functions have it. Add 1 to stack
862 usage for offset so that SP + .L__stack_offset = return address. */
863 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
867 /* Implement EPILOGUE_USES. */
870 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
872 if (reload_completed
873 && cfun->machine
874 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
875 return 1;
876 return 0;
879 /* Helper for expand_epilogue. Emit a pop of a byte register. */
881 static void
882 emit_pop_byte (unsigned regno)
884 rtx mem, reg;
886 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
887 mem = gen_frame_mem (QImode, mem);
888 reg = gen_rtx_REG (QImode, regno);
890 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
893 /* Output RTL epilogue. */
895 void
896 expand_epilogue (bool sibcall_p)
898 int reg;
899 int live_seq;
900 HARD_REG_SET set;
901 int minimize;
902 HOST_WIDE_INT size = get_frame_size();
904 /* epilogue: naked */
905 if (cfun->machine->is_naked)
907 gcc_assert (!sibcall_p);
909 emit_jump_insn (gen_return ());
910 return;
913 avr_regs_to_save (&set);
914 live_seq = sequent_regs_live ();
915 minimize = (TARGET_CALL_PROLOGUES
916 && !cfun->machine->is_interrupt
917 && !cfun->machine->is_signal
918 && !cfun->machine->is_OS_task
919 && !cfun->machine->is_OS_main
920 && live_seq);
922 if (minimize && (frame_pointer_needed || live_seq > 4))
924 if (frame_pointer_needed)
926 /* Get rid of frame. */
927 emit_move_insn(frame_pointer_rtx,
928 gen_rtx_PLUS (HImode, frame_pointer_rtx,
929 gen_int_mode (size, HImode)));
931 else
933 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
936 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
938 else
940 if (frame_pointer_needed)
942 if (size)
944 /* Try two methods to adjust stack and select shortest. */
945 rtx myfp;
946 rtx fp_plus_insns;
948 if (AVR_HAVE_8BIT_SP)
950 /* The high byte (r29) doesn't change - prefer 'subi'
951 (1 cycle) over 'sbiw' (2 cycles, same size). */
952 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
954 else
956 /* Normal sized addition. */
957 myfp = frame_pointer_rtx;
960 /* Method 1-Adjust frame pointer. */
961 start_sequence ();
963 emit_move_insn (myfp, plus_constant (myfp, size));
965 /* Copy to stack pointer. */
966 if (AVR_HAVE_8BIT_SP)
968 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
970 else if (TARGET_NO_INTERRUPTS
971 || cfun->machine->is_signal)
973 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
974 frame_pointer_rtx));
976 else if (cfun->machine->is_interrupt)
978 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
979 frame_pointer_rtx));
981 else
983 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
986 fp_plus_insns = get_insns ();
987 end_sequence ();
989 /* Method 2-Adjust Stack pointer. */
990 if (size <= 5)
992 rtx sp_plus_insns;
994 start_sequence ();
996 emit_move_insn (stack_pointer_rtx,
997 plus_constant (stack_pointer_rtx, size));
999 sp_plus_insns = get_insns ();
1000 end_sequence ();
1002 /* Use shortest method. */
1003 if (get_sequence_length (sp_plus_insns)
1004 < get_sequence_length (fp_plus_insns))
1005 emit_insn (sp_plus_insns);
1006 else
1007 emit_insn (fp_plus_insns);
1009 else
1010 emit_insn (fp_plus_insns);
1012 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1014 /* Restore previous frame_pointer. See expand_prologue for
1015 rationale for not using pophi. */
1016 emit_pop_byte (REG_Y + 1);
1017 emit_pop_byte (REG_Y);
1021 /* Restore used registers. */
1022 for (reg = 31; reg >= 0; --reg)
1023 if (TEST_HARD_REG_BIT (set, reg))
1024 emit_pop_byte (reg);
1026 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1028 /* Restore RAMPZ using tmp reg as scratch. */
1029 if (AVR_HAVE_RAMPZ
1030 && TEST_HARD_REG_BIT (set, REG_Z)
1031 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1033 emit_pop_byte (TMP_REGNO);
1034 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)),
1035 tmp_reg_rtx);
1038 /* Restore SREG using tmp reg as scratch. */
1039 emit_pop_byte (TMP_REGNO);
1041 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1042 tmp_reg_rtx);
1044 /* Restore tmp REG. */
1045 emit_pop_byte (TMP_REGNO);
1047 /* Restore zero REG. */
1048 emit_pop_byte (ZERO_REGNO);
1051 if (!sibcall_p)
1052 emit_jump_insn (gen_return ());
1056 /* Output summary messages at beginning of function epilogue. */
1058 static void
1059 avr_asm_function_begin_epilogue (FILE *file)
1061 fprintf (file, "/* epilogue start */\n");
1065 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1067 static bool
1068 avr_cannot_modify_jumps_p (void)
1071 /* Naked Functions must not have any instructions after
1072 their epilogue, see PR42240 */
1074 if (reload_completed
1075 && cfun->machine
1076 && cfun->machine->is_naked)
1078 return true;
1081 return false;
1085 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1086 machine for a memory operand of mode MODE. */
1088 bool
1089 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1091 enum reg_class r = NO_REGS;
1093 if (TARGET_ALL_DEBUG)
1095 fprintf (stderr, "mode: (%s) %s %s %s %s:",
1096 GET_MODE_NAME(mode),
1097 strict ? "(strict)": "",
1098 reload_completed ? "(reload_completed)": "",
1099 reload_in_progress ? "(reload_in_progress)": "",
1100 reg_renumber ? "(reg_renumber)" : "");
1101 if (GET_CODE (x) == PLUS
1102 && REG_P (XEXP (x, 0))
1103 && GET_CODE (XEXP (x, 1)) == CONST_INT
1104 && INTVAL (XEXP (x, 1)) >= 0
1105 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1106 && reg_renumber
1108 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1109 true_regnum (XEXP (x, 0)));
1110 debug_rtx (x);
1113 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1114 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1115 r = POINTER_REGS;
1116 else if (CONSTANT_ADDRESS_P (x))
1117 r = ALL_REGS;
1118 else if (GET_CODE (x) == PLUS
1119 && REG_P (XEXP (x, 0))
1120 && GET_CODE (XEXP (x, 1)) == CONST_INT
1121 && INTVAL (XEXP (x, 1)) >= 0)
1123 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1124 if (fit)
1126 if (! strict
1127 || REGNO (XEXP (x,0)) == REG_X
1128 || REGNO (XEXP (x,0)) == REG_Y
1129 || REGNO (XEXP (x,0)) == REG_Z)
1130 r = BASE_POINTER_REGS;
1131 if (XEXP (x,0) == frame_pointer_rtx
1132 || XEXP (x,0) == arg_pointer_rtx)
1133 r = BASE_POINTER_REGS;
1135 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1136 r = POINTER_Y_REGS;
1138 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1139 && REG_P (XEXP (x, 0))
1140 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1141 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1143 r = POINTER_REGS;
1145 if (TARGET_ALL_DEBUG)
1147 fprintf (stderr, " ret = %c\n", r + '0');
1149 return r == NO_REGS ? 0 : (int)r;
1152 /* Attempts to replace X with a valid
1153 memory address for an operand of mode MODE */
1156 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1158 x = oldx;
1159 if (TARGET_ALL_DEBUG)
1161 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1162 debug_rtx (oldx);
1165 if (GET_CODE (oldx) == PLUS
1166 && REG_P (XEXP (oldx,0)))
1168 if (REG_P (XEXP (oldx,1)))
1169 x = force_reg (GET_MODE (oldx), oldx);
1170 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1172 int offs = INTVAL (XEXP (oldx,1));
1173 if (frame_pointer_rtx != XEXP (oldx,0))
1174 if (offs > MAX_LD_OFFSET (mode))
1176 if (TARGET_ALL_DEBUG)
1177 fprintf (stderr, "force_reg (big offset)\n");
1178 x = force_reg (GET_MODE (oldx), oldx);
1182 return x;
1186 /* Helper function to print assembler resp. track instruction
1187 sequence lengths.
1189 If PLEN == NULL:
1190 Output assembler code from template TPL with operands supplied
1191 by OPERANDS. This is just forwarding to output_asm_insn.
1193 If PLEN != NULL:
1194 Add N_WORDS to *PLEN.
1195 Don't output anything.
1198 static void
1199 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1201 if (NULL == plen)
1203 output_asm_insn (tpl, operands);
1205 else
1207 *plen += n_words;
1212 /* Return a pointer register name as a string. */
1214 static const char *
1215 ptrreg_to_str (int regno)
1217 switch (regno)
1219 case REG_X: return "X";
1220 case REG_Y: return "Y";
1221 case REG_Z: return "Z";
1222 default:
1223 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1225 return NULL;
1228 /* Return the condition name as a string.
1229 Used in conditional jump constructing */
1231 static const char *
1232 cond_string (enum rtx_code code)
1234 switch (code)
1236 case NE:
1237 return "ne";
1238 case EQ:
1239 return "eq";
1240 case GE:
1241 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1242 return "pl";
1243 else
1244 return "ge";
1245 case LT:
1246 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1247 return "mi";
1248 else
1249 return "lt";
1250 case GEU:
1251 return "sh";
1252 case LTU:
1253 return "lo";
1254 default:
1255 gcc_unreachable ();
1259 /* Output ADDR to FILE as address. */
1261 void
1262 print_operand_address (FILE *file, rtx addr)
1264 switch (GET_CODE (addr))
1266 case REG:
1267 fprintf (file, ptrreg_to_str (REGNO (addr)));
1268 break;
1270 case PRE_DEC:
1271 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1272 break;
1274 case POST_INC:
1275 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1276 break;
1278 default:
1279 if (CONSTANT_ADDRESS_P (addr)
1280 && text_segment_operand (addr, VOIDmode))
1282 rtx x = addr;
1283 if (GET_CODE (x) == CONST)
1284 x = XEXP (x, 0);
1285 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1287 /* Assembler gs() will implant word address. Make offset
1288 a byte offset inside gs() for assembler. This is
1289 needed because the more logical (constant+gs(sym)) is not
1290 accepted by gas. For 128K and lower devices this is ok. For
1291 large devices it will create a Trampoline to offset from symbol
1292 which may not be what the user really wanted. */
1293 fprintf (file, "gs(");
1294 output_addr_const (file, XEXP (x,0));
1295 fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1296 if (AVR_3_BYTE_PC)
1297 if (warning (0, "pointer offset from symbol maybe incorrect"))
1299 output_addr_const (stderr, addr);
1300 fprintf(stderr,"\n");
1303 else
1305 fprintf (file, "gs(");
1306 output_addr_const (file, addr);
1307 fprintf (file, ")");
1310 else
1311 output_addr_const (file, addr);
1316 /* Output X as assembler operand to file FILE. */
1318 void
1319 print_operand (FILE *file, rtx x, int code)
1321 int abcd = 0;
1323 if (code >= 'A' && code <= 'D')
1324 abcd = code - 'A';
1326 if (code == '~')
1328 if (!AVR_HAVE_JMP_CALL)
1329 fputc ('r', file);
1331 else if (code == '!')
1333 if (AVR_HAVE_EIJMP_EICALL)
1334 fputc ('e', file);
1336 else if (REG_P (x))
1338 if (x == zero_reg_rtx)
1339 fprintf (file, "__zero_reg__");
1340 else
1341 fprintf (file, reg_names[true_regnum (x) + abcd]);
1343 else if (GET_CODE (x) == CONST_INT)
1344 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1345 else if (GET_CODE (x) == MEM)
1347 rtx addr = XEXP (x,0);
1348 if (code == 'm')
1350 if (!CONSTANT_P (addr))
1351 fatal_insn ("bad address, not a constant):", addr);
1352 /* Assembler template with m-code is data - not progmem section */
1353 if (text_segment_operand (addr, VOIDmode))
1354 if (warning ( 0, "accessing data memory with program memory address"))
1356 output_addr_const (stderr, addr);
1357 fprintf(stderr,"\n");
1359 output_addr_const (file, addr);
1361 else if (code == 'o')
1363 if (GET_CODE (addr) != PLUS)
1364 fatal_insn ("bad address, not (reg+disp):", addr);
1366 print_operand (file, XEXP (addr, 1), 0);
1368 else if (code == 'p' || code == 'r')
1370 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1371 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1373 if (code == 'p')
1374 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1375 else
1376 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1378 else if (GET_CODE (addr) == PLUS)
1380 print_operand_address (file, XEXP (addr,0));
1381 if (REGNO (XEXP (addr, 0)) == REG_X)
1382 fatal_insn ("internal compiler error. Bad address:"
1383 ,addr);
1384 fputc ('+', file);
1385 print_operand (file, XEXP (addr,1), code);
1387 else
1388 print_operand_address (file, addr);
1390 else if (code == 'x')
1392 /* Constant progmem address - like used in jmp or call */
1393 if (0 == text_segment_operand (x, VOIDmode))
1394 if (warning ( 0, "accessing program memory with data memory address"))
1396 output_addr_const (stderr, x);
1397 fprintf(stderr,"\n");
1399 /* Use normal symbol for direct address no linker trampoline needed */
1400 output_addr_const (file, x);
1402 else if (GET_CODE (x) == CONST_DOUBLE)
1404 long val;
1405 REAL_VALUE_TYPE rv;
1406 if (GET_MODE (x) != SFmode)
1407 fatal_insn ("internal compiler error. Unknown mode:", x);
1408 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1409 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1410 fprintf (file, "0x%lx", val);
1412 else if (code == 'j')
1413 fputs (cond_string (GET_CODE (x)), file);
1414 else if (code == 'k')
1415 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1416 else
1417 print_operand_address (file, x);
1420 /* Update the condition code in the INSN. */
1422 void
1423 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1425 rtx set;
1427 switch (get_attr_cc (insn))
1429 case CC_NONE:
1430 /* Insn does not affect CC at all. */
1431 break;
1433 case CC_SET_N:
1434 CC_STATUS_INIT;
1435 break;
1437 case CC_SET_ZN:
1438 set = single_set (insn);
1439 CC_STATUS_INIT;
1440 if (set)
1442 cc_status.flags |= CC_NO_OVERFLOW;
1443 cc_status.value1 = SET_DEST (set);
1445 break;
1447 case CC_SET_CZN:
1448 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1449 The V flag may or may not be known but that's ok because
1450 alter_cond will change tests to use EQ/NE. */
1451 set = single_set (insn);
1452 CC_STATUS_INIT;
1453 if (set)
1455 cc_status.value1 = SET_DEST (set);
1456 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1458 break;
1460 case CC_COMPARE:
1461 set = single_set (insn);
1462 CC_STATUS_INIT;
1463 if (set)
1464 cc_status.value1 = SET_SRC (set);
1465 break;
1467 case CC_CLOBBER:
1468 /* Insn doesn't leave CC in a usable state. */
1469 CC_STATUS_INIT;
1471 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1472 set = single_set (insn);
1473 if (set)
1475 rtx src = SET_SRC (set);
1477 if (GET_CODE (src) == ASHIFTRT
1478 && GET_MODE (src) == QImode)
1480 rtx x = XEXP (src, 1);
1482 if (CONST_INT_P (x)
1483 && IN_RANGE (INTVAL (x), 1, 5))
1485 cc_status.value1 = SET_DEST (set);
1486 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1490 break;
1494 /* Return maximum number of consecutive registers of
1495 class CLASS needed to hold a value of mode MODE. */
1498 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED,enum machine_mode mode)
1500 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1503 /* Choose mode for jump insn:
1504 1 - relative jump in range -63 <= x <= 62 ;
1505 2 - relative jump in range -2046 <= x <= 2045 ;
1506 3 - absolute jump (only for ATmega[16]03). */
1509 avr_jump_mode (rtx x, rtx insn)
1511 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1512 ? XEXP (x, 0) : x));
1513 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1514 int jump_distance = cur_addr - dest_addr;
1516 if (-63 <= jump_distance && jump_distance <= 62)
1517 return 1;
1518 else if (-2046 <= jump_distance && jump_distance <= 2045)
1519 return 2;
1520 else if (AVR_HAVE_JMP_CALL)
1521 return 3;
1523 return 2;
1526 /* return an AVR condition jump commands.
1527 X is a comparison RTX.
1528 LEN is a number returned by avr_jump_mode function.
1529 if REVERSE nonzero then condition code in X must be reversed. */
1531 const char *
1532 ret_cond_branch (rtx x, int len, int reverse)
1534 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1536 switch (cond)
1538 case GT:
1539 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1540 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1541 AS1 (brpl,%0)) :
1542 len == 2 ? (AS1 (breq,.+4) CR_TAB
1543 AS1 (brmi,.+2) CR_TAB
1544 AS1 (rjmp,%0)) :
1545 (AS1 (breq,.+6) CR_TAB
1546 AS1 (brmi,.+4) CR_TAB
1547 AS1 (jmp,%0)));
1549 else
1550 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1551 AS1 (brge,%0)) :
1552 len == 2 ? (AS1 (breq,.+4) CR_TAB
1553 AS1 (brlt,.+2) CR_TAB
1554 AS1 (rjmp,%0)) :
1555 (AS1 (breq,.+6) CR_TAB
1556 AS1 (brlt,.+4) CR_TAB
1557 AS1 (jmp,%0)));
1558 case GTU:
1559 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1560 AS1 (brsh,%0)) :
1561 len == 2 ? (AS1 (breq,.+4) CR_TAB
1562 AS1 (brlo,.+2) CR_TAB
1563 AS1 (rjmp,%0)) :
1564 (AS1 (breq,.+6) CR_TAB
1565 AS1 (brlo,.+4) CR_TAB
1566 AS1 (jmp,%0)));
1567 case LE:
1568 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1569 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1570 AS1 (brmi,%0)) :
1571 len == 2 ? (AS1 (breq,.+2) CR_TAB
1572 AS1 (brpl,.+2) CR_TAB
1573 AS1 (rjmp,%0)) :
1574 (AS1 (breq,.+2) CR_TAB
1575 AS1 (brpl,.+4) CR_TAB
1576 AS1 (jmp,%0)));
1577 else
1578 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1579 AS1 (brlt,%0)) :
1580 len == 2 ? (AS1 (breq,.+2) CR_TAB
1581 AS1 (brge,.+2) CR_TAB
1582 AS1 (rjmp,%0)) :
1583 (AS1 (breq,.+2) CR_TAB
1584 AS1 (brge,.+4) CR_TAB
1585 AS1 (jmp,%0)));
1586 case LEU:
1587 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1588 AS1 (brlo,%0)) :
1589 len == 2 ? (AS1 (breq,.+2) CR_TAB
1590 AS1 (brsh,.+2) CR_TAB
1591 AS1 (rjmp,%0)) :
1592 (AS1 (breq,.+2) CR_TAB
1593 AS1 (brsh,.+4) CR_TAB
1594 AS1 (jmp,%0)));
1595 default:
1596 if (reverse)
1598 switch (len)
1600 case 1:
1601 return AS1 (br%k1,%0);
1602 case 2:
1603 return (AS1 (br%j1,.+2) CR_TAB
1604 AS1 (rjmp,%0));
1605 default:
1606 return (AS1 (br%j1,.+4) CR_TAB
1607 AS1 (jmp,%0));
1610 else
1612 switch (len)
1614 case 1:
1615 return AS1 (br%j1,%0);
1616 case 2:
1617 return (AS1 (br%k1,.+2) CR_TAB
1618 AS1 (rjmp,%0));
1619 default:
1620 return (AS1 (br%k1,.+4) CR_TAB
1621 AS1 (jmp,%0));
1625 return "";
1628 /* Predicate function for immediate operand which fits to byte (8bit) */
1631 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1633 return (GET_CODE (op) == CONST_INT
1634 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1637 /* Output insn cost for next insn. */
1639 void
1640 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1641 int num_operands ATTRIBUTE_UNUSED)
1643 if (TARGET_ALL_DEBUG)
1645 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
1646 rtx_cost (PATTERN (insn), INSN, !optimize_size));
1650 /* Return 0 if undefined, 1 if always true or always false. */
1653 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1655 unsigned int max = (mode == QImode ? 0xff :
1656 mode == HImode ? 0xffff :
1657 mode == SImode ? 0xffffffff : 0);
1658 if (max && op && GET_CODE (x) == CONST_INT)
1660 if (unsigned_condition (op) != op)
1661 max >>= 1;
1663 if (max != (INTVAL (x) & max)
1664 && INTVAL (x) != 0xff)
1665 return 1;
1667 return 0;
1671 /* Returns nonzero if REGNO is the number of a hard
1672 register in which function arguments are sometimes passed. */
1675 function_arg_regno_p(int r)
1677 return (r >= 8 && r <= 25);
1680 /* Initializing the variable cum for the state at the beginning
1681 of the argument list. */
1683 void
1684 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1685 tree fndecl ATTRIBUTE_UNUSED)
1687 cum->nregs = 18;
1688 cum->regno = FIRST_CUM_REG;
1689 if (!libname && stdarg_p (fntype))
1690 cum->nregs = 0;
1692 /* Assume the calle may be tail called */
1694 cfun->machine->sibcall_fails = 0;
1697 /* Returns the number of registers to allocate for a function argument. */
1699 static int
1700 avr_num_arg_regs (enum machine_mode mode, const_tree type)
1702 int size;
1704 if (mode == BLKmode)
1705 size = int_size_in_bytes (type);
1706 else
1707 size = GET_MODE_SIZE (mode);
1709 /* Align all function arguments to start in even-numbered registers.
1710 Odd-sized arguments leave holes above them. */
1712 return (size + 1) & ~1;
1715 /* Controls whether a function argument is passed
1716 in a register, and which register. */
1718 static rtx
1719 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
1720 const_tree type, bool named ATTRIBUTE_UNUSED)
1722 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1723 int bytes = avr_num_arg_regs (mode, type);
1725 if (cum->nregs && bytes <= cum->nregs)
1726 return gen_rtx_REG (mode, cum->regno - bytes);
1728 return NULL_RTX;
1731 /* Update the summarizer variable CUM to advance past an argument
1732 in the argument list. */
1734 static void
1735 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
1736 const_tree type, bool named ATTRIBUTE_UNUSED)
1738 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1739 int bytes = avr_num_arg_regs (mode, type);
1741 cum->nregs -= bytes;
1742 cum->regno -= bytes;
1744 /* A parameter is being passed in a call-saved register. As the original
1745 contents of these regs has to be restored before leaving the function,
1746 a function must not pass arguments in call-saved regs in order to get
1747 tail-called. */
1749 if (cum->regno >= 8
1750 && cum->nregs >= 0
1751 && !call_used_regs[cum->regno])
1753 /* FIXME: We ship info on failing tail-call in struct machine_function.
1754 This uses internals of calls.c:expand_call() and the way args_so_far
1755 is used. targetm.function_ok_for_sibcall() needs to be extended to
1756 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
1757 dependent so that such an extension is not wanted. */
1759 cfun->machine->sibcall_fails = 1;
1762 /* Test if all registers needed by the ABI are actually available. If the
1763 user has fixed a GPR needed to pass an argument, an (implicit) function
1764 call would clobber that fixed register. See PR45099 for an example. */
1766 if (cum->regno >= 8
1767 && cum->nregs >= 0)
1769 int regno;
1771 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
1772 if (fixed_regs[regno])
1773 error ("Register %s is needed to pass a parameter but is fixed",
1774 reg_names[regno]);
1777 if (cum->nregs <= 0)
1779 cum->nregs = 0;
1780 cum->regno = FIRST_CUM_REG;
1784 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
1785 /* Decide whether we can make a sibling call to a function. DECL is the
1786 declaration of the function being targeted by the call and EXP is the
1787 CALL_EXPR representing the call. */
1789 static bool
1790 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
1792 tree fntype_callee;
1794 /* Tail-calling must fail if callee-saved regs are used to pass
1795 function args. We must not tail-call when `epilogue_restores'
1796 is used. Unfortunately, we cannot tell at this point if that
1797 actually will happen or not, and we cannot step back from
1798 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
1800 if (cfun->machine->sibcall_fails
1801 || TARGET_CALL_PROLOGUES)
1803 return false;
1806 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
1808 if (decl_callee)
1810 decl_callee = TREE_TYPE (decl_callee);
1812 else
1814 decl_callee = fntype_callee;
1816 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
1817 && METHOD_TYPE != TREE_CODE (decl_callee))
1819 decl_callee = TREE_TYPE (decl_callee);
1823 /* Ensure that caller and callee have compatible epilogues */
1825 if (interrupt_function_p (current_function_decl)
1826 || signal_function_p (current_function_decl)
1827 || avr_naked_function_p (decl_callee)
1828 || avr_naked_function_p (current_function_decl)
1829 /* FIXME: For OS_task and OS_main, we are over-conservative.
1830 This is due to missing documentation of these attributes
1831 and what they actually should do and should not do. */
1832 || (avr_OS_task_function_p (decl_callee)
1833 != avr_OS_task_function_p (current_function_decl))
1834 || (avr_OS_main_function_p (decl_callee)
1835 != avr_OS_main_function_p (current_function_decl)))
1837 return false;
1840 return true;
1843 /***********************************************************************
1844 Functions for outputting various mov's for a various modes
1845 ************************************************************************/
1846 const char *
1847 output_movqi (rtx insn, rtx operands[], int *l)
1849 int dummy;
1850 rtx dest = operands[0];
1851 rtx src = operands[1];
1852 int *real_l = l;
1854 if (!l)
1855 l = &dummy;
1857 *l = 1;
1859 if (register_operand (dest, QImode))
1861 if (register_operand (src, QImode)) /* mov r,r */
1863 if (test_hard_reg_class (STACK_REG, dest))
1864 return AS2 (out,%0,%1);
1865 else if (test_hard_reg_class (STACK_REG, src))
1866 return AS2 (in,%0,%1);
1868 return AS2 (mov,%0,%1);
1870 else if (CONSTANT_P (src))
1872 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1873 return AS2 (ldi,%0,lo8(%1));
1875 if (GET_CODE (src) == CONST_INT)
1877 if (src == const0_rtx) /* mov r,L */
1878 return AS1 (clr,%0);
1879 else if (src == const1_rtx)
1881 *l = 2;
1882 return (AS1 (clr,%0) CR_TAB
1883 AS1 (inc,%0));
1885 else if (src == constm1_rtx)
1887 /* Immediate constants -1 to any register */
1888 *l = 2;
1889 return (AS1 (clr,%0) CR_TAB
1890 AS1 (dec,%0));
1892 else
1894 int bit_nr = exact_log2 (INTVAL (src));
1896 if (bit_nr >= 0)
1898 *l = 3;
1899 if (!real_l)
1900 output_asm_insn ((AS1 (clr,%0) CR_TAB
1901 "set"), operands);
1902 if (!real_l)
1903 avr_output_bld (operands, bit_nr);
1905 return "";
1910 /* Last resort, larger than loading from memory. */
1911 *l = 4;
1912 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1913 AS2 (ldi,r31,lo8(%1)) CR_TAB
1914 AS2 (mov,%0,r31) CR_TAB
1915 AS2 (mov,r31,__tmp_reg__));
1917 else if (GET_CODE (src) == MEM)
1918 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1920 else if (GET_CODE (dest) == MEM)
1922 const char *templ;
1924 if (src == const0_rtx)
1925 operands[1] = zero_reg_rtx;
1927 templ = out_movqi_mr_r (insn, operands, real_l);
1929 if (!real_l)
1930 output_asm_insn (templ, operands);
1932 operands[1] = src;
1934 return "";
1938 const char *
1939 output_movhi (rtx insn, rtx operands[], int *l)
1941 int dummy;
1942 rtx dest = operands[0];
1943 rtx src = operands[1];
1944 int *real_l = l;
1946 if (!l)
1947 l = &dummy;
1949 if (register_operand (dest, HImode))
1951 if (register_operand (src, HImode)) /* mov r,r */
1953 if (test_hard_reg_class (STACK_REG, dest))
1955 if (AVR_HAVE_8BIT_SP)
1956 return *l = 1, AS2 (out,__SP_L__,%A1);
1957 /* Use simple load of stack pointer if no interrupts are
1958 used. */
1959 else if (TARGET_NO_INTERRUPTS)
1960 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1961 AS2 (out,__SP_L__,%A1));
1962 *l = 5;
1963 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1964 "cli" CR_TAB
1965 AS2 (out,__SP_H__,%B1) CR_TAB
1966 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1967 AS2 (out,__SP_L__,%A1));
1969 else if (test_hard_reg_class (STACK_REG, src))
1971 *l = 2;
1972 return (AS2 (in,%A0,__SP_L__) CR_TAB
1973 AS2 (in,%B0,__SP_H__));
1976 if (AVR_HAVE_MOVW)
1978 *l = 1;
1979 return (AS2 (movw,%0,%1));
1981 else
1983 *l = 2;
1984 return (AS2 (mov,%A0,%A1) CR_TAB
1985 AS2 (mov,%B0,%B1));
1988 else if (CONSTANT_P (src))
1990 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1992 *l = 2;
1993 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1994 AS2 (ldi,%B0,hi8(%1)));
1997 if (GET_CODE (src) == CONST_INT)
1999 if (src == const0_rtx) /* mov r,L */
2001 *l = 2;
2002 return (AS1 (clr,%A0) CR_TAB
2003 AS1 (clr,%B0));
2005 else if (src == const1_rtx)
2007 *l = 3;
2008 return (AS1 (clr,%A0) CR_TAB
2009 AS1 (clr,%B0) CR_TAB
2010 AS1 (inc,%A0));
2012 else if (src == constm1_rtx)
2014 /* Immediate constants -1 to any register */
2015 *l = 3;
2016 return (AS1 (clr,%0) CR_TAB
2017 AS1 (dec,%A0) CR_TAB
2018 AS2 (mov,%B0,%A0));
2020 else
2022 int bit_nr = exact_log2 (INTVAL (src));
2024 if (bit_nr >= 0)
2026 *l = 4;
2027 if (!real_l)
2028 output_asm_insn ((AS1 (clr,%A0) CR_TAB
2029 AS1 (clr,%B0) CR_TAB
2030 "set"), operands);
2031 if (!real_l)
2032 avr_output_bld (operands, bit_nr);
2034 return "";
2038 if ((INTVAL (src) & 0xff) == 0)
2040 *l = 5;
2041 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2042 AS1 (clr,%A0) CR_TAB
2043 AS2 (ldi,r31,hi8(%1)) CR_TAB
2044 AS2 (mov,%B0,r31) CR_TAB
2045 AS2 (mov,r31,__tmp_reg__));
2047 else if ((INTVAL (src) & 0xff00) == 0)
2049 *l = 5;
2050 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2051 AS2 (ldi,r31,lo8(%1)) CR_TAB
2052 AS2 (mov,%A0,r31) CR_TAB
2053 AS1 (clr,%B0) CR_TAB
2054 AS2 (mov,r31,__tmp_reg__));
2058 /* Last resort, equal to loading from memory. */
2059 *l = 6;
2060 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2061 AS2 (ldi,r31,lo8(%1)) CR_TAB
2062 AS2 (mov,%A0,r31) CR_TAB
2063 AS2 (ldi,r31,hi8(%1)) CR_TAB
2064 AS2 (mov,%B0,r31) CR_TAB
2065 AS2 (mov,r31,__tmp_reg__));
2067 else if (GET_CODE (src) == MEM)
2068 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
2070 else if (GET_CODE (dest) == MEM)
2072 const char *templ;
2074 if (src == const0_rtx)
2075 operands[1] = zero_reg_rtx;
2077 templ = out_movhi_mr_r (insn, operands, real_l);
2079 if (!real_l)
2080 output_asm_insn (templ, operands);
2082 operands[1] = src;
2083 return "";
2085 fatal_insn ("invalid insn:", insn);
2086 return "";
2089 const char *
2090 out_movqi_r_mr (rtx insn, rtx op[], int *l)
2092 rtx dest = op[0];
2093 rtx src = op[1];
2094 rtx x = XEXP (src, 0);
2095 int dummy;
2097 if (!l)
2098 l = &dummy;
2100 if (CONSTANT_ADDRESS_P (x))
2102 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2104 *l = 1;
2105 return AS2 (in,%0,__SREG__);
2107 if (optimize > 0 && io_address_operand (x, QImode))
2109 *l = 1;
2110 return AS2 (in,%0,%m1-0x20);
2112 *l = 2;
2113 return AS2 (lds,%0,%m1);
2115 /* memory access by reg+disp */
2116 else if (GET_CODE (x) == PLUS
2117 && REG_P (XEXP (x,0))
2118 && GET_CODE (XEXP (x,1)) == CONST_INT)
2120 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
2122 int disp = INTVAL (XEXP (x,1));
2123 if (REGNO (XEXP (x,0)) != REG_Y)
2124 fatal_insn ("incorrect insn:",insn);
2126 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2127 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
2128 AS2 (ldd,%0,Y+63) CR_TAB
2129 AS2 (sbiw,r28,%o1-63));
2131 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2132 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2133 AS2 (ld,%0,Y) CR_TAB
2134 AS2 (subi,r28,lo8(%o1)) CR_TAB
2135 AS2 (sbci,r29,hi8(%o1)));
2137 else if (REGNO (XEXP (x,0)) == REG_X)
2139 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2140 it but I have this situation with extremal optimizing options. */
2141 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
2142 || reg_unused_after (insn, XEXP (x,0)))
2143 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
2144 AS2 (ld,%0,X));
2146 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
2147 AS2 (ld,%0,X) CR_TAB
2148 AS2 (sbiw,r26,%o1));
2150 *l = 1;
2151 return AS2 (ldd,%0,%1);
2153 *l = 1;
2154 return AS2 (ld,%0,%1);
2157 const char *
2158 out_movhi_r_mr (rtx insn, rtx op[], int *l)
2160 rtx dest = op[0];
2161 rtx src = op[1];
2162 rtx base = XEXP (src, 0);
2163 int reg_dest = true_regnum (dest);
2164 int reg_base = true_regnum (base);
2165 /* "volatile" forces reading low byte first, even if less efficient,
2166 for correct operation with 16-bit I/O registers. */
2167 int mem_volatile_p = MEM_VOLATILE_P (src);
2168 int tmp;
2170 if (!l)
2171 l = &tmp;
2173 if (reg_base > 0)
2175 if (reg_dest == reg_base) /* R = (R) */
2177 *l = 3;
2178 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2179 AS2 (ld,%B0,%1) CR_TAB
2180 AS2 (mov,%A0,__tmp_reg__));
2182 else if (reg_base == REG_X) /* (R26) */
2184 if (reg_unused_after (insn, base))
2186 *l = 2;
2187 return (AS2 (ld,%A0,X+) CR_TAB
2188 AS2 (ld,%B0,X));
2190 *l = 3;
2191 return (AS2 (ld,%A0,X+) CR_TAB
2192 AS2 (ld,%B0,X) CR_TAB
2193 AS2 (sbiw,r26,1));
2195 else /* (R) */
2197 *l = 2;
2198 return (AS2 (ld,%A0,%1) CR_TAB
2199 AS2 (ldd,%B0,%1+1));
2202 else if (GET_CODE (base) == PLUS) /* (R + i) */
2204 int disp = INTVAL (XEXP (base, 1));
2205 int reg_base = true_regnum (XEXP (base, 0));
2207 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2209 if (REGNO (XEXP (base, 0)) != REG_Y)
2210 fatal_insn ("incorrect insn:",insn);
2212 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2213 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2214 AS2 (ldd,%A0,Y+62) CR_TAB
2215 AS2 (ldd,%B0,Y+63) CR_TAB
2216 AS2 (sbiw,r28,%o1-62));
2218 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2219 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2220 AS2 (ld,%A0,Y) CR_TAB
2221 AS2 (ldd,%B0,Y+1) CR_TAB
2222 AS2 (subi,r28,lo8(%o1)) CR_TAB
2223 AS2 (sbci,r29,hi8(%o1)));
2225 if (reg_base == REG_X)
2227 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2228 it but I have this situation with extremal
2229 optimization options. */
2231 *l = 4;
2232 if (reg_base == reg_dest)
2233 return (AS2 (adiw,r26,%o1) CR_TAB
2234 AS2 (ld,__tmp_reg__,X+) CR_TAB
2235 AS2 (ld,%B0,X) CR_TAB
2236 AS2 (mov,%A0,__tmp_reg__));
2238 return (AS2 (adiw,r26,%o1) CR_TAB
2239 AS2 (ld,%A0,X+) CR_TAB
2240 AS2 (ld,%B0,X) CR_TAB
2241 AS2 (sbiw,r26,%o1+1));
2244 if (reg_base == reg_dest)
2246 *l = 3;
2247 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2248 AS2 (ldd,%B0,%B1) CR_TAB
2249 AS2 (mov,%A0,__tmp_reg__));
2252 *l = 2;
2253 return (AS2 (ldd,%A0,%A1) CR_TAB
2254 AS2 (ldd,%B0,%B1));
2256 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2258 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2259 fatal_insn ("incorrect insn:", insn);
2261 if (mem_volatile_p)
2263 if (REGNO (XEXP (base, 0)) == REG_X)
2265 *l = 4;
2266 return (AS2 (sbiw,r26,2) CR_TAB
2267 AS2 (ld,%A0,X+) CR_TAB
2268 AS2 (ld,%B0,X) CR_TAB
2269 AS2 (sbiw,r26,1));
2271 else
2273 *l = 3;
2274 return (AS2 (sbiw,%r1,2) CR_TAB
2275 AS2 (ld,%A0,%p1) CR_TAB
2276 AS2 (ldd,%B0,%p1+1));
2280 *l = 2;
2281 return (AS2 (ld,%B0,%1) CR_TAB
2282 AS2 (ld,%A0,%1));
2284 else if (GET_CODE (base) == POST_INC) /* (R++) */
2286 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2287 fatal_insn ("incorrect insn:", insn);
2289 *l = 2;
2290 return (AS2 (ld,%A0,%1) CR_TAB
2291 AS2 (ld,%B0,%1));
2293 else if (CONSTANT_ADDRESS_P (base))
2295 if (optimize > 0 && io_address_operand (base, HImode))
2297 *l = 2;
2298 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2299 AS2 (in,%B0,%m1+1-0x20));
2301 *l = 4;
2302 return (AS2 (lds,%A0,%m1) CR_TAB
2303 AS2 (lds,%B0,%m1+1));
2306 fatal_insn ("unknown move insn:",insn);
2307 return "";
2310 const char *
2311 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2313 rtx dest = op[0];
2314 rtx src = op[1];
2315 rtx base = XEXP (src, 0);
2316 int reg_dest = true_regnum (dest);
2317 int reg_base = true_regnum (base);
2318 int tmp;
2320 if (!l)
2321 l = &tmp;
2323 if (reg_base > 0)
2325 if (reg_base == REG_X) /* (R26) */
2327 if (reg_dest == REG_X)
2328 /* "ld r26,-X" is undefined */
2329 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2330 AS2 (ld,r29,X) CR_TAB
2331 AS2 (ld,r28,-X) CR_TAB
2332 AS2 (ld,__tmp_reg__,-X) CR_TAB
2333 AS2 (sbiw,r26,1) CR_TAB
2334 AS2 (ld,r26,X) CR_TAB
2335 AS2 (mov,r27,__tmp_reg__));
2336 else if (reg_dest == REG_X - 2)
2337 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2338 AS2 (ld,%B0,X+) CR_TAB
2339 AS2 (ld,__tmp_reg__,X+) CR_TAB
2340 AS2 (ld,%D0,X) CR_TAB
2341 AS2 (mov,%C0,__tmp_reg__));
2342 else if (reg_unused_after (insn, base))
2343 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2344 AS2 (ld,%B0,X+) CR_TAB
2345 AS2 (ld,%C0,X+) CR_TAB
2346 AS2 (ld,%D0,X));
2347 else
2348 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2349 AS2 (ld,%B0,X+) CR_TAB
2350 AS2 (ld,%C0,X+) CR_TAB
2351 AS2 (ld,%D0,X) CR_TAB
2352 AS2 (sbiw,r26,3));
2354 else
2356 if (reg_dest == reg_base)
2357 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2358 AS2 (ldd,%C0,%1+2) CR_TAB
2359 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2360 AS2 (ld,%A0,%1) CR_TAB
2361 AS2 (mov,%B0,__tmp_reg__));
2362 else if (reg_base == reg_dest + 2)
2363 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2364 AS2 (ldd,%B0,%1+1) CR_TAB
2365 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2366 AS2 (ldd,%D0,%1+3) CR_TAB
2367 AS2 (mov,%C0,__tmp_reg__));
2368 else
2369 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2370 AS2 (ldd,%B0,%1+1) CR_TAB
2371 AS2 (ldd,%C0,%1+2) CR_TAB
2372 AS2 (ldd,%D0,%1+3));
2375 else if (GET_CODE (base) == PLUS) /* (R + i) */
2377 int disp = INTVAL (XEXP (base, 1));
2379 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2381 if (REGNO (XEXP (base, 0)) != REG_Y)
2382 fatal_insn ("incorrect insn:",insn);
2384 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2385 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2386 AS2 (ldd,%A0,Y+60) CR_TAB
2387 AS2 (ldd,%B0,Y+61) CR_TAB
2388 AS2 (ldd,%C0,Y+62) CR_TAB
2389 AS2 (ldd,%D0,Y+63) CR_TAB
2390 AS2 (sbiw,r28,%o1-60));
2392 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2393 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2394 AS2 (ld,%A0,Y) CR_TAB
2395 AS2 (ldd,%B0,Y+1) CR_TAB
2396 AS2 (ldd,%C0,Y+2) CR_TAB
2397 AS2 (ldd,%D0,Y+3) CR_TAB
2398 AS2 (subi,r28,lo8(%o1)) CR_TAB
2399 AS2 (sbci,r29,hi8(%o1)));
2402 reg_base = true_regnum (XEXP (base, 0));
2403 if (reg_base == REG_X)
2405 /* R = (X + d) */
2406 if (reg_dest == REG_X)
2408 *l = 7;
2409 /* "ld r26,-X" is undefined */
2410 return (AS2 (adiw,r26,%o1+3) CR_TAB
2411 AS2 (ld,r29,X) CR_TAB
2412 AS2 (ld,r28,-X) CR_TAB
2413 AS2 (ld,__tmp_reg__,-X) CR_TAB
2414 AS2 (sbiw,r26,1) CR_TAB
2415 AS2 (ld,r26,X) CR_TAB
2416 AS2 (mov,r27,__tmp_reg__));
2418 *l = 6;
2419 if (reg_dest == REG_X - 2)
2420 return (AS2 (adiw,r26,%o1) CR_TAB
2421 AS2 (ld,r24,X+) CR_TAB
2422 AS2 (ld,r25,X+) CR_TAB
2423 AS2 (ld,__tmp_reg__,X+) CR_TAB
2424 AS2 (ld,r27,X) CR_TAB
2425 AS2 (mov,r26,__tmp_reg__));
2427 return (AS2 (adiw,r26,%o1) CR_TAB
2428 AS2 (ld,%A0,X+) CR_TAB
2429 AS2 (ld,%B0,X+) CR_TAB
2430 AS2 (ld,%C0,X+) CR_TAB
2431 AS2 (ld,%D0,X) CR_TAB
2432 AS2 (sbiw,r26,%o1+3));
2434 if (reg_dest == reg_base)
2435 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2436 AS2 (ldd,%C0,%C1) CR_TAB
2437 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2438 AS2 (ldd,%A0,%A1) CR_TAB
2439 AS2 (mov,%B0,__tmp_reg__));
2440 else if (reg_dest == reg_base - 2)
2441 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2442 AS2 (ldd,%B0,%B1) CR_TAB
2443 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2444 AS2 (ldd,%D0,%D1) CR_TAB
2445 AS2 (mov,%C0,__tmp_reg__));
2446 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2447 AS2 (ldd,%B0,%B1) CR_TAB
2448 AS2 (ldd,%C0,%C1) CR_TAB
2449 AS2 (ldd,%D0,%D1));
2451 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2452 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2453 AS2 (ld,%C0,%1) CR_TAB
2454 AS2 (ld,%B0,%1) CR_TAB
2455 AS2 (ld,%A0,%1));
2456 else if (GET_CODE (base) == POST_INC) /* (R++) */
2457 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2458 AS2 (ld,%B0,%1) CR_TAB
2459 AS2 (ld,%C0,%1) CR_TAB
2460 AS2 (ld,%D0,%1));
2461 else if (CONSTANT_ADDRESS_P (base))
2462 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2463 AS2 (lds,%B0,%m1+1) CR_TAB
2464 AS2 (lds,%C0,%m1+2) CR_TAB
2465 AS2 (lds,%D0,%m1+3));
2467 fatal_insn ("unknown move insn:",insn);
2468 return "";
2471 const char *
2472 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2474 rtx dest = op[0];
2475 rtx src = op[1];
2476 rtx base = XEXP (dest, 0);
2477 int reg_base = true_regnum (base);
2478 int reg_src = true_regnum (src);
2479 int tmp;
2481 if (!l)
2482 l = &tmp;
2484 if (CONSTANT_ADDRESS_P (base))
2485 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2486 AS2 (sts,%m0+1,%B1) CR_TAB
2487 AS2 (sts,%m0+2,%C1) CR_TAB
2488 AS2 (sts,%m0+3,%D1));
2489 if (reg_base > 0) /* (r) */
2491 if (reg_base == REG_X) /* (R26) */
2493 if (reg_src == REG_X)
2495 /* "st X+,r26" is undefined */
2496 if (reg_unused_after (insn, base))
2497 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2498 AS2 (st,X,r26) CR_TAB
2499 AS2 (adiw,r26,1) CR_TAB
2500 AS2 (st,X+,__tmp_reg__) CR_TAB
2501 AS2 (st,X+,r28) CR_TAB
2502 AS2 (st,X,r29));
2503 else
2504 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2505 AS2 (st,X,r26) CR_TAB
2506 AS2 (adiw,r26,1) CR_TAB
2507 AS2 (st,X+,__tmp_reg__) CR_TAB
2508 AS2 (st,X+,r28) CR_TAB
2509 AS2 (st,X,r29) CR_TAB
2510 AS2 (sbiw,r26,3));
2512 else if (reg_base == reg_src + 2)
2514 if (reg_unused_after (insn, base))
2515 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2516 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2517 AS2 (st,%0+,%A1) CR_TAB
2518 AS2 (st,%0+,%B1) CR_TAB
2519 AS2 (st,%0+,__zero_reg__) CR_TAB
2520 AS2 (st,%0,__tmp_reg__) CR_TAB
2521 AS1 (clr,__zero_reg__));
2522 else
2523 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2524 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2525 AS2 (st,%0+,%A1) CR_TAB
2526 AS2 (st,%0+,%B1) CR_TAB
2527 AS2 (st,%0+,__zero_reg__) CR_TAB
2528 AS2 (st,%0,__tmp_reg__) CR_TAB
2529 AS1 (clr,__zero_reg__) CR_TAB
2530 AS2 (sbiw,r26,3));
2532 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2533 AS2 (st,%0+,%B1) CR_TAB
2534 AS2 (st,%0+,%C1) CR_TAB
2535 AS2 (st,%0,%D1) CR_TAB
2536 AS2 (sbiw,r26,3));
2538 else
2539 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2540 AS2 (std,%0+1,%B1) CR_TAB
2541 AS2 (std,%0+2,%C1) CR_TAB
2542 AS2 (std,%0+3,%D1));
2544 else if (GET_CODE (base) == PLUS) /* (R + i) */
2546 int disp = INTVAL (XEXP (base, 1));
2547 reg_base = REGNO (XEXP (base, 0));
2548 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2550 if (reg_base != REG_Y)
2551 fatal_insn ("incorrect insn:",insn);
2553 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2554 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2555 AS2 (std,Y+60,%A1) CR_TAB
2556 AS2 (std,Y+61,%B1) CR_TAB
2557 AS2 (std,Y+62,%C1) CR_TAB
2558 AS2 (std,Y+63,%D1) CR_TAB
2559 AS2 (sbiw,r28,%o0-60));
2561 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2562 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2563 AS2 (st,Y,%A1) CR_TAB
2564 AS2 (std,Y+1,%B1) CR_TAB
2565 AS2 (std,Y+2,%C1) CR_TAB
2566 AS2 (std,Y+3,%D1) CR_TAB
2567 AS2 (subi,r28,lo8(%o0)) CR_TAB
2568 AS2 (sbci,r29,hi8(%o0)));
2570 if (reg_base == REG_X)
2572 /* (X + d) = R */
2573 if (reg_src == REG_X)
2575 *l = 9;
2576 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2577 AS2 (mov,__zero_reg__,r27) CR_TAB
2578 AS2 (adiw,r26,%o0) CR_TAB
2579 AS2 (st,X+,__tmp_reg__) CR_TAB
2580 AS2 (st,X+,__zero_reg__) CR_TAB
2581 AS2 (st,X+,r28) CR_TAB
2582 AS2 (st,X,r29) CR_TAB
2583 AS1 (clr,__zero_reg__) CR_TAB
2584 AS2 (sbiw,r26,%o0+3));
2586 else if (reg_src == REG_X - 2)
2588 *l = 9;
2589 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2590 AS2 (mov,__zero_reg__,r27) CR_TAB
2591 AS2 (adiw,r26,%o0) CR_TAB
2592 AS2 (st,X+,r24) CR_TAB
2593 AS2 (st,X+,r25) CR_TAB
2594 AS2 (st,X+,__tmp_reg__) CR_TAB
2595 AS2 (st,X,__zero_reg__) CR_TAB
2596 AS1 (clr,__zero_reg__) CR_TAB
2597 AS2 (sbiw,r26,%o0+3));
2599 *l = 6;
2600 return (AS2 (adiw,r26,%o0) CR_TAB
2601 AS2 (st,X+,%A1) CR_TAB
2602 AS2 (st,X+,%B1) CR_TAB
2603 AS2 (st,X+,%C1) CR_TAB
2604 AS2 (st,X,%D1) CR_TAB
2605 AS2 (sbiw,r26,%o0+3));
2607 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2608 AS2 (std,%B0,%B1) CR_TAB
2609 AS2 (std,%C0,%C1) CR_TAB
2610 AS2 (std,%D0,%D1));
2612 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2613 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2614 AS2 (st,%0,%C1) CR_TAB
2615 AS2 (st,%0,%B1) CR_TAB
2616 AS2 (st,%0,%A1));
2617 else if (GET_CODE (base) == POST_INC) /* (R++) */
2618 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2619 AS2 (st,%0,%B1) CR_TAB
2620 AS2 (st,%0,%C1) CR_TAB
2621 AS2 (st,%0,%D1));
2622 fatal_insn ("unknown move insn:",insn);
2623 return "";
2626 const char *
2627 output_movsisf (rtx insn, rtx operands[], rtx clobber_reg, int *l)
2629 int dummy;
2630 rtx dest = operands[0];
2631 rtx src = operands[1];
2632 int *real_l = l;
2634 if (!l)
2635 l = &dummy;
2637 if (register_operand (dest, VOIDmode))
2639 if (register_operand (src, VOIDmode)) /* mov r,r */
2641 if (true_regnum (dest) > true_regnum (src))
2643 if (AVR_HAVE_MOVW)
2645 *l = 2;
2646 return (AS2 (movw,%C0,%C1) CR_TAB
2647 AS2 (movw,%A0,%A1));
2649 *l = 4;
2650 return (AS2 (mov,%D0,%D1) CR_TAB
2651 AS2 (mov,%C0,%C1) CR_TAB
2652 AS2 (mov,%B0,%B1) CR_TAB
2653 AS2 (mov,%A0,%A1));
2655 else
2657 if (AVR_HAVE_MOVW)
2659 *l = 2;
2660 return (AS2 (movw,%A0,%A1) CR_TAB
2661 AS2 (movw,%C0,%C1));
2663 *l = 4;
2664 return (AS2 (mov,%A0,%A1) CR_TAB
2665 AS2 (mov,%B0,%B1) CR_TAB
2666 AS2 (mov,%C0,%C1) CR_TAB
2667 AS2 (mov,%D0,%D1));
2670 else if (CONST_INT_P (src)
2671 || CONST_DOUBLE_P (src))
2673 return output_reload_insisf (insn, operands, clobber_reg, real_l);
2675 else if (CONSTANT_P (src))
2677 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2679 *l = 4;
2680 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2681 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2682 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2683 AS2 (ldi,%D0,hhi8(%1)));
2685 /* Last resort, better than loading from memory. */
2686 *l = 10;
2687 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2688 AS2 (ldi,r31,lo8(%1)) CR_TAB
2689 AS2 (mov,%A0,r31) CR_TAB
2690 AS2 (ldi,r31,hi8(%1)) CR_TAB
2691 AS2 (mov,%B0,r31) CR_TAB
2692 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2693 AS2 (mov,%C0,r31) CR_TAB
2694 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2695 AS2 (mov,%D0,r31) CR_TAB
2696 AS2 (mov,r31,__tmp_reg__));
2698 else if (GET_CODE (src) == MEM)
2699 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2701 else if (GET_CODE (dest) == MEM)
2703 const char *templ;
2705 if (src == CONST0_RTX (GET_MODE (dest)))
2706 operands[1] = zero_reg_rtx;
2708 templ = out_movsi_mr_r (insn, operands, real_l);
2710 if (!real_l)
2711 output_asm_insn (templ, operands);
2713 operands[1] = src;
2714 return "";
2716 fatal_insn ("invalid insn:", insn);
2717 return "";
2720 const char *
2721 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2723 rtx dest = op[0];
2724 rtx src = op[1];
2725 rtx x = XEXP (dest, 0);
2726 int dummy;
2728 if (!l)
2729 l = &dummy;
2731 if (CONSTANT_ADDRESS_P (x))
2733 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2735 *l = 1;
2736 return AS2 (out,__SREG__,%1);
2738 if (optimize > 0 && io_address_operand (x, QImode))
2740 *l = 1;
2741 return AS2 (out,%m0-0x20,%1);
2743 *l = 2;
2744 return AS2 (sts,%m0,%1);
2746 /* memory access by reg+disp */
2747 else if (GET_CODE (x) == PLUS
2748 && REG_P (XEXP (x,0))
2749 && GET_CODE (XEXP (x,1)) == CONST_INT)
2751 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2753 int disp = INTVAL (XEXP (x,1));
2754 if (REGNO (XEXP (x,0)) != REG_Y)
2755 fatal_insn ("incorrect insn:",insn);
2757 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2758 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2759 AS2 (std,Y+63,%1) CR_TAB
2760 AS2 (sbiw,r28,%o0-63));
2762 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2763 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2764 AS2 (st,Y,%1) CR_TAB
2765 AS2 (subi,r28,lo8(%o0)) CR_TAB
2766 AS2 (sbci,r29,hi8(%o0)));
2768 else if (REGNO (XEXP (x,0)) == REG_X)
2770 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2772 if (reg_unused_after (insn, XEXP (x,0)))
2773 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2774 AS2 (adiw,r26,%o0) CR_TAB
2775 AS2 (st,X,__tmp_reg__));
2777 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2778 AS2 (adiw,r26,%o0) CR_TAB
2779 AS2 (st,X,__tmp_reg__) CR_TAB
2780 AS2 (sbiw,r26,%o0));
2782 else
2784 if (reg_unused_after (insn, XEXP (x,0)))
2785 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2786 AS2 (st,X,%1));
2788 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2789 AS2 (st,X,%1) CR_TAB
2790 AS2 (sbiw,r26,%o0));
2793 *l = 1;
2794 return AS2 (std,%0,%1);
2796 *l = 1;
2797 return AS2 (st,%0,%1);
2800 const char *
2801 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2803 rtx dest = op[0];
2804 rtx src = op[1];
2805 rtx base = XEXP (dest, 0);
2806 int reg_base = true_regnum (base);
2807 int reg_src = true_regnum (src);
2808 /* "volatile" forces writing high byte first, even if less efficient,
2809 for correct operation with 16-bit I/O registers. */
2810 int mem_volatile_p = MEM_VOLATILE_P (dest);
2811 int tmp;
2813 if (!l)
2814 l = &tmp;
2815 if (CONSTANT_ADDRESS_P (base))
2817 if (optimize > 0 && io_address_operand (base, HImode))
2819 *l = 2;
2820 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
2821 AS2 (out,%m0-0x20,%A1));
2823 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
2824 AS2 (sts,%m0,%A1));
2826 if (reg_base > 0)
2828 if (reg_base == REG_X)
2830 if (reg_src == REG_X)
2832 /* "st X+,r26" and "st -X,r26" are undefined. */
2833 if (!mem_volatile_p && reg_unused_after (insn, src))
2834 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2835 AS2 (st,X,r26) CR_TAB
2836 AS2 (adiw,r26,1) CR_TAB
2837 AS2 (st,X,__tmp_reg__));
2838 else
2839 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2840 AS2 (adiw,r26,1) CR_TAB
2841 AS2 (st,X,__tmp_reg__) CR_TAB
2842 AS2 (sbiw,r26,1) CR_TAB
2843 AS2 (st,X,r26));
2845 else
2847 if (!mem_volatile_p && reg_unused_after (insn, base))
2848 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2849 AS2 (st,X,%B1));
2850 else
2851 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2852 AS2 (st,X,%B1) CR_TAB
2853 AS2 (st,-X,%A1));
2856 else
2857 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2858 AS2 (st,%0,%A1));
2860 else if (GET_CODE (base) == PLUS)
2862 int disp = INTVAL (XEXP (base, 1));
2863 reg_base = REGNO (XEXP (base, 0));
2864 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2866 if (reg_base != REG_Y)
2867 fatal_insn ("incorrect insn:",insn);
2869 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2870 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2871 AS2 (std,Y+63,%B1) CR_TAB
2872 AS2 (std,Y+62,%A1) CR_TAB
2873 AS2 (sbiw,r28,%o0-62));
2875 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2876 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2877 AS2 (std,Y+1,%B1) CR_TAB
2878 AS2 (st,Y,%A1) CR_TAB
2879 AS2 (subi,r28,lo8(%o0)) CR_TAB
2880 AS2 (sbci,r29,hi8(%o0)));
2882 if (reg_base == REG_X)
2884 /* (X + d) = R */
2885 if (reg_src == REG_X)
2887 *l = 7;
2888 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2889 AS2 (mov,__zero_reg__,r27) CR_TAB
2890 AS2 (adiw,r26,%o0+1) CR_TAB
2891 AS2 (st,X,__zero_reg__) CR_TAB
2892 AS2 (st,-X,__tmp_reg__) CR_TAB
2893 AS1 (clr,__zero_reg__) CR_TAB
2894 AS2 (sbiw,r26,%o0));
2896 *l = 4;
2897 return (AS2 (adiw,r26,%o0+1) CR_TAB
2898 AS2 (st,X,%B1) CR_TAB
2899 AS2 (st,-X,%A1) CR_TAB
2900 AS2 (sbiw,r26,%o0));
2902 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2903 AS2 (std,%A0,%A1));
2905 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2906 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2907 AS2 (st,%0,%A1));
2908 else if (GET_CODE (base) == POST_INC) /* (R++) */
2910 if (mem_volatile_p)
2912 if (REGNO (XEXP (base, 0)) == REG_X)
2914 *l = 4;
2915 return (AS2 (adiw,r26,1) CR_TAB
2916 AS2 (st,X,%B1) CR_TAB
2917 AS2 (st,-X,%A1) CR_TAB
2918 AS2 (adiw,r26,2));
2920 else
2922 *l = 3;
2923 return (AS2 (std,%p0+1,%B1) CR_TAB
2924 AS2 (st,%p0,%A1) CR_TAB
2925 AS2 (adiw,%r0,2));
2929 *l = 2;
2930 return (AS2 (st,%0,%A1) CR_TAB
2931 AS2 (st,%0,%B1));
2933 fatal_insn ("unknown move insn:",insn);
2934 return "";
2937 /* Return 1 if frame pointer for current function required. */
2939 bool
2940 avr_frame_pointer_required_p (void)
2942 return (cfun->calls_alloca
2943 || crtl->args.info.nregs == 0
2944 || get_frame_size () > 0);
2947 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2949 static RTX_CODE
2950 compare_condition (rtx insn)
2952 rtx next = next_real_insn (insn);
2953 RTX_CODE cond = UNKNOWN;
2954 if (next && GET_CODE (next) == JUMP_INSN)
2956 rtx pat = PATTERN (next);
2957 rtx src = SET_SRC (pat);
2958 rtx t = XEXP (src, 0);
2959 cond = GET_CODE (t);
2961 return cond;
2964 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2966 static int
2967 compare_sign_p (rtx insn)
2969 RTX_CODE cond = compare_condition (insn);
2970 return (cond == GE || cond == LT);
2973 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2974 that needs to be swapped (GT, GTU, LE, LEU). */
2977 compare_diff_p (rtx insn)
2979 RTX_CODE cond = compare_condition (insn);
2980 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2983 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2986 compare_eq_p (rtx insn)
2988 RTX_CODE cond = compare_condition (insn);
2989 return (cond == EQ || cond == NE);
2993 /* Output test instruction for HImode. */
2995 const char *
2996 out_tsthi (rtx insn, rtx op, int *l)
2998 if (compare_sign_p (insn))
3000 if (l) *l = 1;
3001 return AS1 (tst,%B0);
3003 if (reg_unused_after (insn, op)
3004 && compare_eq_p (insn))
3006 /* Faster than sbiw if we can clobber the operand. */
3007 if (l) *l = 1;
3008 return "or %A0,%B0";
3010 if (test_hard_reg_class (ADDW_REGS, op))
3012 if (l) *l = 1;
3013 return AS2 (sbiw,%0,0);
3015 if (l) *l = 2;
3016 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3017 AS2 (cpc,%B0,__zero_reg__));
3021 /* Output test instruction for SImode. */
3023 const char *
3024 out_tstsi (rtx insn, rtx op, int *l)
3026 if (compare_sign_p (insn))
3028 if (l) *l = 1;
3029 return AS1 (tst,%D0);
3031 if (test_hard_reg_class (ADDW_REGS, op))
3033 if (l) *l = 3;
3034 return (AS2 (sbiw,%A0,0) CR_TAB
3035 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3036 AS2 (cpc,%D0,__zero_reg__));
3038 if (l) *l = 4;
3039 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3040 AS2 (cpc,%B0,__zero_reg__) CR_TAB
3041 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3042 AS2 (cpc,%D0,__zero_reg__));
3046 /* Generate asm equivalent for various shifts.
3047 Shift count is a CONST_INT, MEM or REG.
3048 This only handles cases that are not already
3049 carefully hand-optimized in ?sh??i3_out. */
3051 void
3052 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
3053 int *len, int t_len)
3055 rtx op[10];
3056 char str[500];
3057 int second_label = 1;
3058 int saved_in_tmp = 0;
3059 int use_zero_reg = 0;
3061 op[0] = operands[0];
3062 op[1] = operands[1];
3063 op[2] = operands[2];
3064 op[3] = operands[3];
3065 str[0] = 0;
3067 if (len)
3068 *len = 1;
3070 if (GET_CODE (operands[2]) == CONST_INT)
3072 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3073 int count = INTVAL (operands[2]);
3074 int max_len = 10; /* If larger than this, always use a loop. */
3076 if (count <= 0)
3078 if (len)
3079 *len = 0;
3080 return;
3083 if (count < 8 && !scratch)
3084 use_zero_reg = 1;
3086 if (optimize_size)
3087 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
3089 if (t_len * count <= max_len)
3091 /* Output shifts inline with no loop - faster. */
3092 if (len)
3093 *len = t_len * count;
3094 else
3096 while (count-- > 0)
3097 output_asm_insn (templ, op);
3100 return;
3103 if (scratch)
3105 if (!len)
3106 strcat (str, AS2 (ldi,%3,%2));
3108 else if (use_zero_reg)
3110 /* Hack to save one word: use __zero_reg__ as loop counter.
3111 Set one bit, then shift in a loop until it is 0 again. */
3113 op[3] = zero_reg_rtx;
3114 if (len)
3115 *len = 2;
3116 else
3117 strcat (str, ("set" CR_TAB
3118 AS2 (bld,%3,%2-1)));
3120 else
3122 /* No scratch register available, use one from LD_REGS (saved in
3123 __tmp_reg__) that doesn't overlap with registers to shift. */
3125 op[3] = gen_rtx_REG (QImode,
3126 ((true_regnum (operands[0]) - 1) & 15) + 16);
3127 op[4] = tmp_reg_rtx;
3128 saved_in_tmp = 1;
3130 if (len)
3131 *len = 3; /* Includes "mov %3,%4" after the loop. */
3132 else
3133 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3134 AS2 (ldi,%3,%2)));
3137 second_label = 0;
3139 else if (GET_CODE (operands[2]) == MEM)
3141 rtx op_mov[10];
3143 op[3] = op_mov[0] = tmp_reg_rtx;
3144 op_mov[1] = op[2];
3146 if (len)
3147 out_movqi_r_mr (insn, op_mov, len);
3148 else
3149 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3151 else if (register_operand (operands[2], QImode))
3153 if (reg_unused_after (insn, operands[2]))
3154 op[3] = op[2];
3155 else
3157 op[3] = tmp_reg_rtx;
3158 if (!len)
3159 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3162 else
3163 fatal_insn ("bad shift insn:", insn);
3165 if (second_label)
3167 if (len)
3168 ++*len;
3169 else
3170 strcat (str, AS1 (rjmp,2f));
3173 if (len)
3174 *len += t_len + 2; /* template + dec + brXX */
3175 else
3177 strcat (str, "\n1:\t");
3178 strcat (str, templ);
3179 strcat (str, second_label ? "\n2:\t" : "\n\t");
3180 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3181 strcat (str, CR_TAB);
3182 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3183 if (saved_in_tmp)
3184 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3185 output_asm_insn (str, op);
3190 /* 8bit shift left ((char)x << i) */
3192 const char *
3193 ashlqi3_out (rtx insn, rtx operands[], int *len)
3195 if (GET_CODE (operands[2]) == CONST_INT)
3197 int k;
3199 if (!len)
3200 len = &k;
3202 switch (INTVAL (operands[2]))
3204 default:
3205 if (INTVAL (operands[2]) < 8)
3206 break;
3208 *len = 1;
3209 return AS1 (clr,%0);
3211 case 1:
3212 *len = 1;
3213 return AS1 (lsl,%0);
3215 case 2:
3216 *len = 2;
3217 return (AS1 (lsl,%0) CR_TAB
3218 AS1 (lsl,%0));
3220 case 3:
3221 *len = 3;
3222 return (AS1 (lsl,%0) CR_TAB
3223 AS1 (lsl,%0) CR_TAB
3224 AS1 (lsl,%0));
3226 case 4:
3227 if (test_hard_reg_class (LD_REGS, operands[0]))
3229 *len = 2;
3230 return (AS1 (swap,%0) CR_TAB
3231 AS2 (andi,%0,0xf0));
3233 *len = 4;
3234 return (AS1 (lsl,%0) CR_TAB
3235 AS1 (lsl,%0) CR_TAB
3236 AS1 (lsl,%0) CR_TAB
3237 AS1 (lsl,%0));
3239 case 5:
3240 if (test_hard_reg_class (LD_REGS, operands[0]))
3242 *len = 3;
3243 return (AS1 (swap,%0) CR_TAB
3244 AS1 (lsl,%0) CR_TAB
3245 AS2 (andi,%0,0xe0));
3247 *len = 5;
3248 return (AS1 (lsl,%0) CR_TAB
3249 AS1 (lsl,%0) CR_TAB
3250 AS1 (lsl,%0) CR_TAB
3251 AS1 (lsl,%0) CR_TAB
3252 AS1 (lsl,%0));
3254 case 6:
3255 if (test_hard_reg_class (LD_REGS, operands[0]))
3257 *len = 4;
3258 return (AS1 (swap,%0) CR_TAB
3259 AS1 (lsl,%0) CR_TAB
3260 AS1 (lsl,%0) CR_TAB
3261 AS2 (andi,%0,0xc0));
3263 *len = 6;
3264 return (AS1 (lsl,%0) CR_TAB
3265 AS1 (lsl,%0) CR_TAB
3266 AS1 (lsl,%0) CR_TAB
3267 AS1 (lsl,%0) CR_TAB
3268 AS1 (lsl,%0) CR_TAB
3269 AS1 (lsl,%0));
3271 case 7:
3272 *len = 3;
3273 return (AS1 (ror,%0) CR_TAB
3274 AS1 (clr,%0) CR_TAB
3275 AS1 (ror,%0));
3278 else if (CONSTANT_P (operands[2]))
3279 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3281 out_shift_with_cnt (AS1 (lsl,%0),
3282 insn, operands, len, 1);
3283 return "";
3287 /* 16bit shift left ((short)x << i) */
3289 const char *
3290 ashlhi3_out (rtx insn, rtx operands[], int *len)
3292 if (GET_CODE (operands[2]) == CONST_INT)
3294 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3295 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3296 int k;
3297 int *t = len;
3299 if (!len)
3300 len = &k;
3302 switch (INTVAL (operands[2]))
3304 default:
3305 if (INTVAL (operands[2]) < 16)
3306 break;
3308 *len = 2;
3309 return (AS1 (clr,%B0) CR_TAB
3310 AS1 (clr,%A0));
3312 case 4:
3313 if (optimize_size && scratch)
3314 break; /* 5 */
3315 if (ldi_ok)
3317 *len = 6;
3318 return (AS1 (swap,%A0) CR_TAB
3319 AS1 (swap,%B0) CR_TAB
3320 AS2 (andi,%B0,0xf0) CR_TAB
3321 AS2 (eor,%B0,%A0) CR_TAB
3322 AS2 (andi,%A0,0xf0) CR_TAB
3323 AS2 (eor,%B0,%A0));
3325 if (scratch)
3327 *len = 7;
3328 return (AS1 (swap,%A0) CR_TAB
3329 AS1 (swap,%B0) CR_TAB
3330 AS2 (ldi,%3,0xf0) CR_TAB
3331 "and %B0,%3" CR_TAB
3332 AS2 (eor,%B0,%A0) CR_TAB
3333 "and %A0,%3" CR_TAB
3334 AS2 (eor,%B0,%A0));
3336 break; /* optimize_size ? 6 : 8 */
3338 case 5:
3339 if (optimize_size)
3340 break; /* scratch ? 5 : 6 */
3341 if (ldi_ok)
3343 *len = 8;
3344 return (AS1 (lsl,%A0) CR_TAB
3345 AS1 (rol,%B0) CR_TAB
3346 AS1 (swap,%A0) CR_TAB
3347 AS1 (swap,%B0) CR_TAB
3348 AS2 (andi,%B0,0xf0) CR_TAB
3349 AS2 (eor,%B0,%A0) CR_TAB
3350 AS2 (andi,%A0,0xf0) CR_TAB
3351 AS2 (eor,%B0,%A0));
3353 if (scratch)
3355 *len = 9;
3356 return (AS1 (lsl,%A0) CR_TAB
3357 AS1 (rol,%B0) CR_TAB
3358 AS1 (swap,%A0) CR_TAB
3359 AS1 (swap,%B0) CR_TAB
3360 AS2 (ldi,%3,0xf0) CR_TAB
3361 "and %B0,%3" CR_TAB
3362 AS2 (eor,%B0,%A0) CR_TAB
3363 "and %A0,%3" CR_TAB
3364 AS2 (eor,%B0,%A0));
3366 break; /* 10 */
3368 case 6:
3369 if (optimize_size)
3370 break; /* scratch ? 5 : 6 */
3371 *len = 9;
3372 return (AS1 (clr,__tmp_reg__) CR_TAB
3373 AS1 (lsr,%B0) CR_TAB
3374 AS1 (ror,%A0) CR_TAB
3375 AS1 (ror,__tmp_reg__) CR_TAB
3376 AS1 (lsr,%B0) CR_TAB
3377 AS1 (ror,%A0) CR_TAB
3378 AS1 (ror,__tmp_reg__) CR_TAB
3379 AS2 (mov,%B0,%A0) CR_TAB
3380 AS2 (mov,%A0,__tmp_reg__));
3382 case 7:
3383 *len = 5;
3384 return (AS1 (lsr,%B0) CR_TAB
3385 AS2 (mov,%B0,%A0) CR_TAB
3386 AS1 (clr,%A0) CR_TAB
3387 AS1 (ror,%B0) CR_TAB
3388 AS1 (ror,%A0));
3390 case 8:
3391 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3392 AS1 (clr,%A0));
3394 case 9:
3395 *len = 3;
3396 return (AS2 (mov,%B0,%A0) CR_TAB
3397 AS1 (clr,%A0) CR_TAB
3398 AS1 (lsl,%B0));
3400 case 10:
3401 *len = 4;
3402 return (AS2 (mov,%B0,%A0) CR_TAB
3403 AS1 (clr,%A0) CR_TAB
3404 AS1 (lsl,%B0) CR_TAB
3405 AS1 (lsl,%B0));
3407 case 11:
3408 *len = 5;
3409 return (AS2 (mov,%B0,%A0) CR_TAB
3410 AS1 (clr,%A0) CR_TAB
3411 AS1 (lsl,%B0) CR_TAB
3412 AS1 (lsl,%B0) CR_TAB
3413 AS1 (lsl,%B0));
3415 case 12:
3416 if (ldi_ok)
3418 *len = 4;
3419 return (AS2 (mov,%B0,%A0) CR_TAB
3420 AS1 (clr,%A0) CR_TAB
3421 AS1 (swap,%B0) CR_TAB
3422 AS2 (andi,%B0,0xf0));
3424 if (scratch)
3426 *len = 5;
3427 return (AS2 (mov,%B0,%A0) CR_TAB
3428 AS1 (clr,%A0) CR_TAB
3429 AS1 (swap,%B0) CR_TAB
3430 AS2 (ldi,%3,0xf0) CR_TAB
3431 "and %B0,%3");
3433 *len = 6;
3434 return (AS2 (mov,%B0,%A0) CR_TAB
3435 AS1 (clr,%A0) CR_TAB
3436 AS1 (lsl,%B0) CR_TAB
3437 AS1 (lsl,%B0) CR_TAB
3438 AS1 (lsl,%B0) CR_TAB
3439 AS1 (lsl,%B0));
3441 case 13:
3442 if (ldi_ok)
3444 *len = 5;
3445 return (AS2 (mov,%B0,%A0) CR_TAB
3446 AS1 (clr,%A0) CR_TAB
3447 AS1 (swap,%B0) CR_TAB
3448 AS1 (lsl,%B0) CR_TAB
3449 AS2 (andi,%B0,0xe0));
3451 if (AVR_HAVE_MUL && scratch)
3453 *len = 5;
3454 return (AS2 (ldi,%3,0x20) CR_TAB
3455 AS2 (mul,%A0,%3) CR_TAB
3456 AS2 (mov,%B0,r0) CR_TAB
3457 AS1 (clr,%A0) CR_TAB
3458 AS1 (clr,__zero_reg__));
3460 if (optimize_size && scratch)
3461 break; /* 5 */
3462 if (scratch)
3464 *len = 6;
3465 return (AS2 (mov,%B0,%A0) CR_TAB
3466 AS1 (clr,%A0) CR_TAB
3467 AS1 (swap,%B0) CR_TAB
3468 AS1 (lsl,%B0) CR_TAB
3469 AS2 (ldi,%3,0xe0) CR_TAB
3470 "and %B0,%3");
3472 if (AVR_HAVE_MUL)
3474 *len = 6;
3475 return ("set" CR_TAB
3476 AS2 (bld,r1,5) CR_TAB
3477 AS2 (mul,%A0,r1) CR_TAB
3478 AS2 (mov,%B0,r0) CR_TAB
3479 AS1 (clr,%A0) CR_TAB
3480 AS1 (clr,__zero_reg__));
3482 *len = 7;
3483 return (AS2 (mov,%B0,%A0) CR_TAB
3484 AS1 (clr,%A0) CR_TAB
3485 AS1 (lsl,%B0) CR_TAB
3486 AS1 (lsl,%B0) CR_TAB
3487 AS1 (lsl,%B0) CR_TAB
3488 AS1 (lsl,%B0) CR_TAB
3489 AS1 (lsl,%B0));
3491 case 14:
3492 if (AVR_HAVE_MUL && ldi_ok)
3494 *len = 5;
3495 return (AS2 (ldi,%B0,0x40) CR_TAB
3496 AS2 (mul,%A0,%B0) CR_TAB
3497 AS2 (mov,%B0,r0) CR_TAB
3498 AS1 (clr,%A0) CR_TAB
3499 AS1 (clr,__zero_reg__));
3501 if (AVR_HAVE_MUL && scratch)
3503 *len = 5;
3504 return (AS2 (ldi,%3,0x40) CR_TAB
3505 AS2 (mul,%A0,%3) CR_TAB
3506 AS2 (mov,%B0,r0) CR_TAB
3507 AS1 (clr,%A0) CR_TAB
3508 AS1 (clr,__zero_reg__));
3510 if (optimize_size && ldi_ok)
3512 *len = 5;
3513 return (AS2 (mov,%B0,%A0) CR_TAB
3514 AS2 (ldi,%A0,6) "\n1:\t"
3515 AS1 (lsl,%B0) CR_TAB
3516 AS1 (dec,%A0) CR_TAB
3517 AS1 (brne,1b));
3519 if (optimize_size && scratch)
3520 break; /* 5 */
3521 *len = 6;
3522 return (AS1 (clr,%B0) CR_TAB
3523 AS1 (lsr,%A0) CR_TAB
3524 AS1 (ror,%B0) CR_TAB
3525 AS1 (lsr,%A0) CR_TAB
3526 AS1 (ror,%B0) CR_TAB
3527 AS1 (clr,%A0));
3529 case 15:
3530 *len = 4;
3531 return (AS1 (clr,%B0) CR_TAB
3532 AS1 (lsr,%A0) CR_TAB
3533 AS1 (ror,%B0) CR_TAB
3534 AS1 (clr,%A0));
3536 len = t;
3538 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3539 AS1 (rol,%B0)),
3540 insn, operands, len, 2);
3541 return "";
3545 /* 32bit shift left ((long)x << i) */
3547 const char *
3548 ashlsi3_out (rtx insn, rtx operands[], int *len)
3550 if (GET_CODE (operands[2]) == CONST_INT)
3552 int k;
3553 int *t = len;
3555 if (!len)
3556 len = &k;
3558 switch (INTVAL (operands[2]))
3560 default:
3561 if (INTVAL (operands[2]) < 32)
3562 break;
3564 if (AVR_HAVE_MOVW)
3565 return *len = 3, (AS1 (clr,%D0) CR_TAB
3566 AS1 (clr,%C0) CR_TAB
3567 AS2 (movw,%A0,%C0));
3568 *len = 4;
3569 return (AS1 (clr,%D0) CR_TAB
3570 AS1 (clr,%C0) CR_TAB
3571 AS1 (clr,%B0) CR_TAB
3572 AS1 (clr,%A0));
3574 case 8:
3576 int reg0 = true_regnum (operands[0]);
3577 int reg1 = true_regnum (operands[1]);
3578 *len = 4;
3579 if (reg0 >= reg1)
3580 return (AS2 (mov,%D0,%C1) CR_TAB
3581 AS2 (mov,%C0,%B1) CR_TAB
3582 AS2 (mov,%B0,%A1) CR_TAB
3583 AS1 (clr,%A0));
3584 else
3585 return (AS1 (clr,%A0) CR_TAB
3586 AS2 (mov,%B0,%A1) CR_TAB
3587 AS2 (mov,%C0,%B1) CR_TAB
3588 AS2 (mov,%D0,%C1));
3591 case 16:
3593 int reg0 = true_regnum (operands[0]);
3594 int reg1 = true_regnum (operands[1]);
3595 if (reg0 + 2 == reg1)
3596 return *len = 2, (AS1 (clr,%B0) CR_TAB
3597 AS1 (clr,%A0));
3598 if (AVR_HAVE_MOVW)
3599 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3600 AS1 (clr,%B0) CR_TAB
3601 AS1 (clr,%A0));
3602 else
3603 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3604 AS2 (mov,%D0,%B1) CR_TAB
3605 AS1 (clr,%B0) CR_TAB
3606 AS1 (clr,%A0));
3609 case 24:
3610 *len = 4;
3611 return (AS2 (mov,%D0,%A1) CR_TAB
3612 AS1 (clr,%C0) CR_TAB
3613 AS1 (clr,%B0) CR_TAB
3614 AS1 (clr,%A0));
3616 case 31:
3617 *len = 6;
3618 return (AS1 (clr,%D0) CR_TAB
3619 AS1 (lsr,%A0) CR_TAB
3620 AS1 (ror,%D0) CR_TAB
3621 AS1 (clr,%C0) CR_TAB
3622 AS1 (clr,%B0) CR_TAB
3623 AS1 (clr,%A0));
3625 len = t;
3627 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3628 AS1 (rol,%B0) CR_TAB
3629 AS1 (rol,%C0) CR_TAB
3630 AS1 (rol,%D0)),
3631 insn, operands, len, 4);
3632 return "";
3635 /* 8bit arithmetic shift right ((signed char)x >> i) */
3637 const char *
3638 ashrqi3_out (rtx insn, rtx operands[], int *len)
3640 if (GET_CODE (operands[2]) == CONST_INT)
3642 int k;
3644 if (!len)
3645 len = &k;
3647 switch (INTVAL (operands[2]))
3649 case 1:
3650 *len = 1;
3651 return AS1 (asr,%0);
3653 case 2:
3654 *len = 2;
3655 return (AS1 (asr,%0) CR_TAB
3656 AS1 (asr,%0));
3658 case 3:
3659 *len = 3;
3660 return (AS1 (asr,%0) CR_TAB
3661 AS1 (asr,%0) CR_TAB
3662 AS1 (asr,%0));
3664 case 4:
3665 *len = 4;
3666 return (AS1 (asr,%0) CR_TAB
3667 AS1 (asr,%0) CR_TAB
3668 AS1 (asr,%0) CR_TAB
3669 AS1 (asr,%0));
3671 case 5:
3672 *len = 5;
3673 return (AS1 (asr,%0) CR_TAB
3674 AS1 (asr,%0) CR_TAB
3675 AS1 (asr,%0) CR_TAB
3676 AS1 (asr,%0) CR_TAB
3677 AS1 (asr,%0));
3679 case 6:
3680 *len = 4;
3681 return (AS2 (bst,%0,6) CR_TAB
3682 AS1 (lsl,%0) CR_TAB
3683 AS2 (sbc,%0,%0) CR_TAB
3684 AS2 (bld,%0,0));
3686 default:
3687 if (INTVAL (operands[2]) < 8)
3688 break;
3690 /* fall through */
3692 case 7:
3693 *len = 2;
3694 return (AS1 (lsl,%0) CR_TAB
3695 AS2 (sbc,%0,%0));
3698 else if (CONSTANT_P (operands[2]))
3699 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3701 out_shift_with_cnt (AS1 (asr,%0),
3702 insn, operands, len, 1);
3703 return "";
3707 /* 16bit arithmetic shift right ((signed short)x >> i) */
3709 const char *
3710 ashrhi3_out (rtx insn, rtx operands[], int *len)
3712 if (GET_CODE (operands[2]) == CONST_INT)
3714 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3715 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3716 int k;
3717 int *t = len;
3719 if (!len)
3720 len = &k;
3722 switch (INTVAL (operands[2]))
3724 case 4:
3725 case 5:
3726 /* XXX try to optimize this too? */
3727 break;
3729 case 6:
3730 if (optimize_size)
3731 break; /* scratch ? 5 : 6 */
3732 *len = 8;
3733 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3734 AS2 (mov,%A0,%B0) CR_TAB
3735 AS1 (lsl,__tmp_reg__) CR_TAB
3736 AS1 (rol,%A0) CR_TAB
3737 AS2 (sbc,%B0,%B0) CR_TAB
3738 AS1 (lsl,__tmp_reg__) CR_TAB
3739 AS1 (rol,%A0) CR_TAB
3740 AS1 (rol,%B0));
3742 case 7:
3743 *len = 4;
3744 return (AS1 (lsl,%A0) CR_TAB
3745 AS2 (mov,%A0,%B0) CR_TAB
3746 AS1 (rol,%A0) CR_TAB
3747 AS2 (sbc,%B0,%B0));
3749 case 8:
3751 int reg0 = true_regnum (operands[0]);
3752 int reg1 = true_regnum (operands[1]);
3754 if (reg0 == reg1)
3755 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3756 AS1 (lsl,%B0) CR_TAB
3757 AS2 (sbc,%B0,%B0));
3758 else
3759 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3760 AS1 (clr,%B0) CR_TAB
3761 AS2 (sbrc,%A0,7) CR_TAB
3762 AS1 (dec,%B0));
3765 case 9:
3766 *len = 4;
3767 return (AS2 (mov,%A0,%B0) CR_TAB
3768 AS1 (lsl,%B0) CR_TAB
3769 AS2 (sbc,%B0,%B0) CR_TAB
3770 AS1 (asr,%A0));
3772 case 10:
3773 *len = 5;
3774 return (AS2 (mov,%A0,%B0) CR_TAB
3775 AS1 (lsl,%B0) CR_TAB
3776 AS2 (sbc,%B0,%B0) CR_TAB
3777 AS1 (asr,%A0) CR_TAB
3778 AS1 (asr,%A0));
3780 case 11:
3781 if (AVR_HAVE_MUL && ldi_ok)
3783 *len = 5;
3784 return (AS2 (ldi,%A0,0x20) CR_TAB
3785 AS2 (muls,%B0,%A0) CR_TAB
3786 AS2 (mov,%A0,r1) CR_TAB
3787 AS2 (sbc,%B0,%B0) CR_TAB
3788 AS1 (clr,__zero_reg__));
3790 if (optimize_size && scratch)
3791 break; /* 5 */
3792 *len = 6;
3793 return (AS2 (mov,%A0,%B0) CR_TAB
3794 AS1 (lsl,%B0) CR_TAB
3795 AS2 (sbc,%B0,%B0) CR_TAB
3796 AS1 (asr,%A0) CR_TAB
3797 AS1 (asr,%A0) CR_TAB
3798 AS1 (asr,%A0));
3800 case 12:
3801 if (AVR_HAVE_MUL && ldi_ok)
3803 *len = 5;
3804 return (AS2 (ldi,%A0,0x10) CR_TAB
3805 AS2 (muls,%B0,%A0) CR_TAB
3806 AS2 (mov,%A0,r1) CR_TAB
3807 AS2 (sbc,%B0,%B0) CR_TAB
3808 AS1 (clr,__zero_reg__));
3810 if (optimize_size && scratch)
3811 break; /* 5 */
3812 *len = 7;
3813 return (AS2 (mov,%A0,%B0) CR_TAB
3814 AS1 (lsl,%B0) CR_TAB
3815 AS2 (sbc,%B0,%B0) CR_TAB
3816 AS1 (asr,%A0) CR_TAB
3817 AS1 (asr,%A0) CR_TAB
3818 AS1 (asr,%A0) CR_TAB
3819 AS1 (asr,%A0));
3821 case 13:
3822 if (AVR_HAVE_MUL && ldi_ok)
3824 *len = 5;
3825 return (AS2 (ldi,%A0,0x08) CR_TAB
3826 AS2 (muls,%B0,%A0) CR_TAB
3827 AS2 (mov,%A0,r1) CR_TAB
3828 AS2 (sbc,%B0,%B0) CR_TAB
3829 AS1 (clr,__zero_reg__));
3831 if (optimize_size)
3832 break; /* scratch ? 5 : 7 */
3833 *len = 8;
3834 return (AS2 (mov,%A0,%B0) CR_TAB
3835 AS1 (lsl,%B0) CR_TAB
3836 AS2 (sbc,%B0,%B0) CR_TAB
3837 AS1 (asr,%A0) CR_TAB
3838 AS1 (asr,%A0) CR_TAB
3839 AS1 (asr,%A0) CR_TAB
3840 AS1 (asr,%A0) CR_TAB
3841 AS1 (asr,%A0));
3843 case 14:
3844 *len = 5;
3845 return (AS1 (lsl,%B0) CR_TAB
3846 AS2 (sbc,%A0,%A0) CR_TAB
3847 AS1 (lsl,%B0) CR_TAB
3848 AS2 (mov,%B0,%A0) CR_TAB
3849 AS1 (rol,%A0));
3851 default:
3852 if (INTVAL (operands[2]) < 16)
3853 break;
3855 /* fall through */
3857 case 15:
3858 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3859 AS2 (sbc,%A0,%A0) CR_TAB
3860 AS2 (mov,%B0,%A0));
3862 len = t;
3864 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3865 AS1 (ror,%A0)),
3866 insn, operands, len, 2);
3867 return "";
3871 /* 32bit arithmetic shift right ((signed long)x >> i) */
3873 const char *
3874 ashrsi3_out (rtx insn, rtx operands[], int *len)
3876 if (GET_CODE (operands[2]) == CONST_INT)
3878 int k;
3879 int *t = len;
3881 if (!len)
3882 len = &k;
3884 switch (INTVAL (operands[2]))
3886 case 8:
3888 int reg0 = true_regnum (operands[0]);
3889 int reg1 = true_regnum (operands[1]);
3890 *len=6;
3891 if (reg0 <= reg1)
3892 return (AS2 (mov,%A0,%B1) CR_TAB
3893 AS2 (mov,%B0,%C1) CR_TAB
3894 AS2 (mov,%C0,%D1) CR_TAB
3895 AS1 (clr,%D0) CR_TAB
3896 AS2 (sbrc,%C0,7) CR_TAB
3897 AS1 (dec,%D0));
3898 else
3899 return (AS1 (clr,%D0) CR_TAB
3900 AS2 (sbrc,%D1,7) CR_TAB
3901 AS1 (dec,%D0) CR_TAB
3902 AS2 (mov,%C0,%D1) CR_TAB
3903 AS2 (mov,%B0,%C1) CR_TAB
3904 AS2 (mov,%A0,%B1));
3907 case 16:
3909 int reg0 = true_regnum (operands[0]);
3910 int reg1 = true_regnum (operands[1]);
3912 if (reg0 == reg1 + 2)
3913 return *len = 4, (AS1 (clr,%D0) CR_TAB
3914 AS2 (sbrc,%B0,7) CR_TAB
3915 AS1 (com,%D0) CR_TAB
3916 AS2 (mov,%C0,%D0));
3917 if (AVR_HAVE_MOVW)
3918 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3919 AS1 (clr,%D0) CR_TAB
3920 AS2 (sbrc,%B0,7) CR_TAB
3921 AS1 (com,%D0) CR_TAB
3922 AS2 (mov,%C0,%D0));
3923 else
3924 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3925 AS2 (mov,%A0,%C1) CR_TAB
3926 AS1 (clr,%D0) CR_TAB
3927 AS2 (sbrc,%B0,7) CR_TAB
3928 AS1 (com,%D0) CR_TAB
3929 AS2 (mov,%C0,%D0));
3932 case 24:
3933 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3934 AS1 (clr,%D0) CR_TAB
3935 AS2 (sbrc,%A0,7) CR_TAB
3936 AS1 (com,%D0) CR_TAB
3937 AS2 (mov,%B0,%D0) CR_TAB
3938 AS2 (mov,%C0,%D0));
3940 default:
3941 if (INTVAL (operands[2]) < 32)
3942 break;
3944 /* fall through */
3946 case 31:
3947 if (AVR_HAVE_MOVW)
3948 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3949 AS2 (sbc,%A0,%A0) CR_TAB
3950 AS2 (mov,%B0,%A0) CR_TAB
3951 AS2 (movw,%C0,%A0));
3952 else
3953 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3954 AS2 (sbc,%A0,%A0) CR_TAB
3955 AS2 (mov,%B0,%A0) CR_TAB
3956 AS2 (mov,%C0,%A0) CR_TAB
3957 AS2 (mov,%D0,%A0));
3959 len = t;
3961 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3962 AS1 (ror,%C0) CR_TAB
3963 AS1 (ror,%B0) CR_TAB
3964 AS1 (ror,%A0)),
3965 insn, operands, len, 4);
3966 return "";
3969 /* 8bit logic shift right ((unsigned char)x >> i) */
3971 const char *
3972 lshrqi3_out (rtx insn, rtx operands[], int *len)
3974 if (GET_CODE (operands[2]) == CONST_INT)
3976 int k;
3978 if (!len)
3979 len = &k;
3981 switch (INTVAL (operands[2]))
3983 default:
3984 if (INTVAL (operands[2]) < 8)
3985 break;
3987 *len = 1;
3988 return AS1 (clr,%0);
3990 case 1:
3991 *len = 1;
3992 return AS1 (lsr,%0);
3994 case 2:
3995 *len = 2;
3996 return (AS1 (lsr,%0) CR_TAB
3997 AS1 (lsr,%0));
3998 case 3:
3999 *len = 3;
4000 return (AS1 (lsr,%0) CR_TAB
4001 AS1 (lsr,%0) CR_TAB
4002 AS1 (lsr,%0));
4004 case 4:
4005 if (test_hard_reg_class (LD_REGS, operands[0]))
4007 *len=2;
4008 return (AS1 (swap,%0) CR_TAB
4009 AS2 (andi,%0,0x0f));
4011 *len = 4;
4012 return (AS1 (lsr,%0) CR_TAB
4013 AS1 (lsr,%0) CR_TAB
4014 AS1 (lsr,%0) CR_TAB
4015 AS1 (lsr,%0));
4017 case 5:
4018 if (test_hard_reg_class (LD_REGS, operands[0]))
4020 *len = 3;
4021 return (AS1 (swap,%0) CR_TAB
4022 AS1 (lsr,%0) CR_TAB
4023 AS2 (andi,%0,0x7));
4025 *len = 5;
4026 return (AS1 (lsr,%0) CR_TAB
4027 AS1 (lsr,%0) CR_TAB
4028 AS1 (lsr,%0) CR_TAB
4029 AS1 (lsr,%0) CR_TAB
4030 AS1 (lsr,%0));
4032 case 6:
4033 if (test_hard_reg_class (LD_REGS, operands[0]))
4035 *len = 4;
4036 return (AS1 (swap,%0) CR_TAB
4037 AS1 (lsr,%0) CR_TAB
4038 AS1 (lsr,%0) CR_TAB
4039 AS2 (andi,%0,0x3));
4041 *len = 6;
4042 return (AS1 (lsr,%0) CR_TAB
4043 AS1 (lsr,%0) CR_TAB
4044 AS1 (lsr,%0) CR_TAB
4045 AS1 (lsr,%0) CR_TAB
4046 AS1 (lsr,%0) CR_TAB
4047 AS1 (lsr,%0));
4049 case 7:
4050 *len = 3;
4051 return (AS1 (rol,%0) CR_TAB
4052 AS1 (clr,%0) CR_TAB
4053 AS1 (rol,%0));
4056 else if (CONSTANT_P (operands[2]))
4057 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4059 out_shift_with_cnt (AS1 (lsr,%0),
4060 insn, operands, len, 1);
4061 return "";
4064 /* 16bit logic shift right ((unsigned short)x >> i) */
4066 const char *
4067 lshrhi3_out (rtx insn, rtx operands[], int *len)
4069 if (GET_CODE (operands[2]) == CONST_INT)
4071 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4072 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4073 int k;
4074 int *t = len;
4076 if (!len)
4077 len = &k;
4079 switch (INTVAL (operands[2]))
4081 default:
4082 if (INTVAL (operands[2]) < 16)
4083 break;
4085 *len = 2;
4086 return (AS1 (clr,%B0) CR_TAB
4087 AS1 (clr,%A0));
4089 case 4:
4090 if (optimize_size && scratch)
4091 break; /* 5 */
4092 if (ldi_ok)
4094 *len = 6;
4095 return (AS1 (swap,%B0) CR_TAB
4096 AS1 (swap,%A0) CR_TAB
4097 AS2 (andi,%A0,0x0f) CR_TAB
4098 AS2 (eor,%A0,%B0) CR_TAB
4099 AS2 (andi,%B0,0x0f) CR_TAB
4100 AS2 (eor,%A0,%B0));
4102 if (scratch)
4104 *len = 7;
4105 return (AS1 (swap,%B0) CR_TAB
4106 AS1 (swap,%A0) CR_TAB
4107 AS2 (ldi,%3,0x0f) CR_TAB
4108 "and %A0,%3" CR_TAB
4109 AS2 (eor,%A0,%B0) CR_TAB
4110 "and %B0,%3" CR_TAB
4111 AS2 (eor,%A0,%B0));
4113 break; /* optimize_size ? 6 : 8 */
4115 case 5:
4116 if (optimize_size)
4117 break; /* scratch ? 5 : 6 */
4118 if (ldi_ok)
4120 *len = 8;
4121 return (AS1 (lsr,%B0) CR_TAB
4122 AS1 (ror,%A0) CR_TAB
4123 AS1 (swap,%B0) CR_TAB
4124 AS1 (swap,%A0) CR_TAB
4125 AS2 (andi,%A0,0x0f) CR_TAB
4126 AS2 (eor,%A0,%B0) CR_TAB
4127 AS2 (andi,%B0,0x0f) CR_TAB
4128 AS2 (eor,%A0,%B0));
4130 if (scratch)
4132 *len = 9;
4133 return (AS1 (lsr,%B0) CR_TAB
4134 AS1 (ror,%A0) CR_TAB
4135 AS1 (swap,%B0) CR_TAB
4136 AS1 (swap,%A0) CR_TAB
4137 AS2 (ldi,%3,0x0f) CR_TAB
4138 "and %A0,%3" CR_TAB
4139 AS2 (eor,%A0,%B0) CR_TAB
4140 "and %B0,%3" CR_TAB
4141 AS2 (eor,%A0,%B0));
4143 break; /* 10 */
4145 case 6:
4146 if (optimize_size)
4147 break; /* scratch ? 5 : 6 */
4148 *len = 9;
4149 return (AS1 (clr,__tmp_reg__) CR_TAB
4150 AS1 (lsl,%A0) CR_TAB
4151 AS1 (rol,%B0) CR_TAB
4152 AS1 (rol,__tmp_reg__) CR_TAB
4153 AS1 (lsl,%A0) CR_TAB
4154 AS1 (rol,%B0) CR_TAB
4155 AS1 (rol,__tmp_reg__) CR_TAB
4156 AS2 (mov,%A0,%B0) CR_TAB
4157 AS2 (mov,%B0,__tmp_reg__));
4159 case 7:
4160 *len = 5;
4161 return (AS1 (lsl,%A0) CR_TAB
4162 AS2 (mov,%A0,%B0) CR_TAB
4163 AS1 (rol,%A0) CR_TAB
4164 AS2 (sbc,%B0,%B0) CR_TAB
4165 AS1 (neg,%B0));
4167 case 8:
4168 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4169 AS1 (clr,%B0));
4171 case 9:
4172 *len = 3;
4173 return (AS2 (mov,%A0,%B0) CR_TAB
4174 AS1 (clr,%B0) CR_TAB
4175 AS1 (lsr,%A0));
4177 case 10:
4178 *len = 4;
4179 return (AS2 (mov,%A0,%B0) CR_TAB
4180 AS1 (clr,%B0) CR_TAB
4181 AS1 (lsr,%A0) CR_TAB
4182 AS1 (lsr,%A0));
4184 case 11:
4185 *len = 5;
4186 return (AS2 (mov,%A0,%B0) CR_TAB
4187 AS1 (clr,%B0) CR_TAB
4188 AS1 (lsr,%A0) CR_TAB
4189 AS1 (lsr,%A0) CR_TAB
4190 AS1 (lsr,%A0));
4192 case 12:
4193 if (ldi_ok)
4195 *len = 4;
4196 return (AS2 (mov,%A0,%B0) CR_TAB
4197 AS1 (clr,%B0) CR_TAB
4198 AS1 (swap,%A0) CR_TAB
4199 AS2 (andi,%A0,0x0f));
4201 if (scratch)
4203 *len = 5;
4204 return (AS2 (mov,%A0,%B0) CR_TAB
4205 AS1 (clr,%B0) CR_TAB
4206 AS1 (swap,%A0) CR_TAB
4207 AS2 (ldi,%3,0x0f) CR_TAB
4208 "and %A0,%3");
4210 *len = 6;
4211 return (AS2 (mov,%A0,%B0) CR_TAB
4212 AS1 (clr,%B0) CR_TAB
4213 AS1 (lsr,%A0) CR_TAB
4214 AS1 (lsr,%A0) CR_TAB
4215 AS1 (lsr,%A0) CR_TAB
4216 AS1 (lsr,%A0));
4218 case 13:
4219 if (ldi_ok)
4221 *len = 5;
4222 return (AS2 (mov,%A0,%B0) CR_TAB
4223 AS1 (clr,%B0) CR_TAB
4224 AS1 (swap,%A0) CR_TAB
4225 AS1 (lsr,%A0) CR_TAB
4226 AS2 (andi,%A0,0x07));
4228 if (AVR_HAVE_MUL && scratch)
4230 *len = 5;
4231 return (AS2 (ldi,%3,0x08) CR_TAB
4232 AS2 (mul,%B0,%3) CR_TAB
4233 AS2 (mov,%A0,r1) CR_TAB
4234 AS1 (clr,%B0) CR_TAB
4235 AS1 (clr,__zero_reg__));
4237 if (optimize_size && scratch)
4238 break; /* 5 */
4239 if (scratch)
4241 *len = 6;
4242 return (AS2 (mov,%A0,%B0) CR_TAB
4243 AS1 (clr,%B0) CR_TAB
4244 AS1 (swap,%A0) CR_TAB
4245 AS1 (lsr,%A0) CR_TAB
4246 AS2 (ldi,%3,0x07) CR_TAB
4247 "and %A0,%3");
4249 if (AVR_HAVE_MUL)
4251 *len = 6;
4252 return ("set" CR_TAB
4253 AS2 (bld,r1,3) CR_TAB
4254 AS2 (mul,%B0,r1) CR_TAB
4255 AS2 (mov,%A0,r1) CR_TAB
4256 AS1 (clr,%B0) CR_TAB
4257 AS1 (clr,__zero_reg__));
4259 *len = 7;
4260 return (AS2 (mov,%A0,%B0) CR_TAB
4261 AS1 (clr,%B0) CR_TAB
4262 AS1 (lsr,%A0) CR_TAB
4263 AS1 (lsr,%A0) CR_TAB
4264 AS1 (lsr,%A0) CR_TAB
4265 AS1 (lsr,%A0) CR_TAB
4266 AS1 (lsr,%A0));
4268 case 14:
4269 if (AVR_HAVE_MUL && ldi_ok)
4271 *len = 5;
4272 return (AS2 (ldi,%A0,0x04) CR_TAB
4273 AS2 (mul,%B0,%A0) CR_TAB
4274 AS2 (mov,%A0,r1) CR_TAB
4275 AS1 (clr,%B0) CR_TAB
4276 AS1 (clr,__zero_reg__));
4278 if (AVR_HAVE_MUL && scratch)
4280 *len = 5;
4281 return (AS2 (ldi,%3,0x04) CR_TAB
4282 AS2 (mul,%B0,%3) CR_TAB
4283 AS2 (mov,%A0,r1) CR_TAB
4284 AS1 (clr,%B0) CR_TAB
4285 AS1 (clr,__zero_reg__));
4287 if (optimize_size && ldi_ok)
4289 *len = 5;
4290 return (AS2 (mov,%A0,%B0) CR_TAB
4291 AS2 (ldi,%B0,6) "\n1:\t"
4292 AS1 (lsr,%A0) CR_TAB
4293 AS1 (dec,%B0) CR_TAB
4294 AS1 (brne,1b));
4296 if (optimize_size && scratch)
4297 break; /* 5 */
4298 *len = 6;
4299 return (AS1 (clr,%A0) CR_TAB
4300 AS1 (lsl,%B0) CR_TAB
4301 AS1 (rol,%A0) CR_TAB
4302 AS1 (lsl,%B0) CR_TAB
4303 AS1 (rol,%A0) CR_TAB
4304 AS1 (clr,%B0));
4306 case 15:
4307 *len = 4;
4308 return (AS1 (clr,%A0) CR_TAB
4309 AS1 (lsl,%B0) CR_TAB
4310 AS1 (rol,%A0) CR_TAB
4311 AS1 (clr,%B0));
4313 len = t;
4315 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4316 AS1 (ror,%A0)),
4317 insn, operands, len, 2);
4318 return "";
4321 /* 32bit logic shift right ((unsigned int)x >> i) */
4323 const char *
4324 lshrsi3_out (rtx insn, rtx operands[], int *len)
4326 if (GET_CODE (operands[2]) == CONST_INT)
4328 int k;
4329 int *t = len;
4331 if (!len)
4332 len = &k;
4334 switch (INTVAL (operands[2]))
4336 default:
4337 if (INTVAL (operands[2]) < 32)
4338 break;
4340 if (AVR_HAVE_MOVW)
4341 return *len = 3, (AS1 (clr,%D0) CR_TAB
4342 AS1 (clr,%C0) CR_TAB
4343 AS2 (movw,%A0,%C0));
4344 *len = 4;
4345 return (AS1 (clr,%D0) CR_TAB
4346 AS1 (clr,%C0) CR_TAB
4347 AS1 (clr,%B0) CR_TAB
4348 AS1 (clr,%A0));
4350 case 8:
4352 int reg0 = true_regnum (operands[0]);
4353 int reg1 = true_regnum (operands[1]);
4354 *len = 4;
4355 if (reg0 <= reg1)
4356 return (AS2 (mov,%A0,%B1) CR_TAB
4357 AS2 (mov,%B0,%C1) CR_TAB
4358 AS2 (mov,%C0,%D1) CR_TAB
4359 AS1 (clr,%D0));
4360 else
4361 return (AS1 (clr,%D0) CR_TAB
4362 AS2 (mov,%C0,%D1) CR_TAB
4363 AS2 (mov,%B0,%C1) CR_TAB
4364 AS2 (mov,%A0,%B1));
4367 case 16:
4369 int reg0 = true_regnum (operands[0]);
4370 int reg1 = true_regnum (operands[1]);
4372 if (reg0 == reg1 + 2)
4373 return *len = 2, (AS1 (clr,%C0) CR_TAB
4374 AS1 (clr,%D0));
4375 if (AVR_HAVE_MOVW)
4376 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4377 AS1 (clr,%C0) CR_TAB
4378 AS1 (clr,%D0));
4379 else
4380 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4381 AS2 (mov,%A0,%C1) CR_TAB
4382 AS1 (clr,%C0) CR_TAB
4383 AS1 (clr,%D0));
4386 case 24:
4387 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4388 AS1 (clr,%B0) CR_TAB
4389 AS1 (clr,%C0) CR_TAB
4390 AS1 (clr,%D0));
4392 case 31:
4393 *len = 6;
4394 return (AS1 (clr,%A0) CR_TAB
4395 AS2 (sbrc,%D0,7) CR_TAB
4396 AS1 (inc,%A0) CR_TAB
4397 AS1 (clr,%B0) CR_TAB
4398 AS1 (clr,%C0) CR_TAB
4399 AS1 (clr,%D0));
4401 len = t;
4403 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4404 AS1 (ror,%C0) CR_TAB
4405 AS1 (ror,%B0) CR_TAB
4406 AS1 (ror,%A0)),
4407 insn, operands, len, 4);
4408 return "";
4411 /* Create RTL split patterns for byte sized rotate expressions. This
4412 produces a series of move instructions and considers overlap situations.
4413 Overlapping non-HImode operands need a scratch register. */
4415 bool
4416 avr_rotate_bytes (rtx operands[])
4418 int i, j;
4419 enum machine_mode mode = GET_MODE (operands[0]);
4420 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
4421 bool same_reg = rtx_equal_p (operands[0], operands[1]);
4422 int num = INTVAL (operands[2]);
4423 rtx scratch = operands[3];
4424 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
4425 Word move if no scratch is needed, otherwise use size of scratch. */
4426 enum machine_mode move_mode = QImode;
4427 int move_size, offset, size;
4429 if (num & 0xf)
4430 move_mode = QImode;
4431 else if ((mode == SImode && !same_reg) || !overlapped)
4432 move_mode = HImode;
4433 else
4434 move_mode = GET_MODE (scratch);
4436 /* Force DI rotate to use QI moves since other DI moves are currently split
4437 into QI moves so forward propagation works better. */
4438 if (mode == DImode)
4439 move_mode = QImode;
4440 /* Make scratch smaller if needed. */
4441 if (GET_MODE (scratch) == HImode && move_mode == QImode)
4442 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
4444 move_size = GET_MODE_SIZE (move_mode);
4445 /* Number of bytes/words to rotate. */
4446 offset = (num >> 3) / move_size;
4447 /* Number of moves needed. */
4448 size = GET_MODE_SIZE (mode) / move_size;
4449 /* Himode byte swap is special case to avoid a scratch register. */
4450 if (mode == HImode && same_reg)
4452 /* HImode byte swap, using xor. This is as quick as using scratch. */
4453 rtx src, dst;
4454 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
4455 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
4456 if (!rtx_equal_p (dst, src))
4458 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4459 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
4460 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4463 else
4465 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
4466 /* Create linked list of moves to determine move order. */
4467 struct {
4468 rtx src, dst;
4469 int links;
4470 } move[MAX_SIZE + 8];
4471 int blocked, moves;
4473 gcc_assert (size <= MAX_SIZE);
4474 /* Generate list of subreg moves. */
4475 for (i = 0; i < size; i++)
4477 int from = i;
4478 int to = (from + offset) % size;
4479 move[i].src = simplify_gen_subreg (move_mode, operands[1],
4480 mode, from * move_size);
4481 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
4482 mode, to * move_size);
4483 move[i].links = -1;
4485 /* Mark dependence where a dst of one move is the src of another move.
4486 The first move is a conflict as it must wait until second is
4487 performed. We ignore moves to self - we catch this later. */
4488 if (overlapped)
4489 for (i = 0; i < size; i++)
4490 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
4491 for (j = 0; j < size; j++)
4492 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
4494 /* The dst of move i is the src of move j. */
4495 move[i].links = j;
4496 break;
4499 blocked = -1;
4500 moves = 0;
4501 /* Go through move list and perform non-conflicting moves. As each
4502 non-overlapping move is made, it may remove other conflicts
4503 so the process is repeated until no conflicts remain. */
4506 blocked = -1;
4507 moves = 0;
4508 /* Emit move where dst is not also a src or we have used that
4509 src already. */
4510 for (i = 0; i < size; i++)
4511 if (move[i].src != NULL_RTX)
4513 if (move[i].links == -1
4514 || move[move[i].links].src == NULL_RTX)
4516 moves++;
4517 /* Ignore NOP moves to self. */
4518 if (!rtx_equal_p (move[i].dst, move[i].src))
4519 emit_move_insn (move[i].dst, move[i].src);
4521 /* Remove conflict from list. */
4522 move[i].src = NULL_RTX;
4524 else
4525 blocked = i;
4528 /* Check for deadlock. This is when no moves occurred and we have
4529 at least one blocked move. */
4530 if (moves == 0 && blocked != -1)
4532 /* Need to use scratch register to break deadlock.
4533 Add move to put dst of blocked move into scratch.
4534 When this move occurs, it will break chain deadlock.
4535 The scratch register is substituted for real move. */
4537 move[size].src = move[blocked].dst;
4538 move[size].dst = scratch;
4539 /* Scratch move is never blocked. */
4540 move[size].links = -1;
4541 /* Make sure we have valid link. */
4542 gcc_assert (move[blocked].links != -1);
4543 /* Replace src of blocking move with scratch reg. */
4544 move[move[blocked].links].src = scratch;
4545 /* Make dependent on scratch move occuring. */
4546 move[blocked].links = size;
4547 size=size+1;
4550 while (blocked != -1);
4552 return true;
4555 /* Modifies the length assigned to instruction INSN
4556 LEN is the initially computed length of the insn. */
4559 adjust_insn_length (rtx insn, int len)
4561 rtx patt = PATTERN (insn);
4562 rtx set;
4564 if (GET_CODE (patt) == SET)
4566 rtx op[10];
4567 op[1] = SET_SRC (patt);
4568 op[0] = SET_DEST (patt);
4569 if (general_operand (op[1], VOIDmode)
4570 && general_operand (op[0], VOIDmode))
4572 switch (GET_MODE (op[0]))
4574 case QImode:
4575 output_movqi (insn, op, &len);
4576 break;
4577 case HImode:
4578 output_movhi (insn, op, &len);
4579 break;
4580 case SImode:
4581 case SFmode:
4582 output_movsisf (insn, op, NULL_RTX, &len);
4583 break;
4584 default:
4585 break;
4588 else if (op[0] == cc0_rtx && REG_P (op[1]))
4590 switch (GET_MODE (op[1]))
4592 case HImode: out_tsthi (insn, op[1], &len); break;
4593 case SImode: out_tstsi (insn, op[1], &len); break;
4594 default: break;
4597 else if (GET_CODE (op[1]) == AND)
4599 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4601 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4602 if (GET_MODE (op[1]) == SImode)
4603 len = (((mask & 0xff) != 0xff)
4604 + ((mask & 0xff00) != 0xff00)
4605 + ((mask & 0xff0000L) != 0xff0000L)
4606 + ((mask & 0xff000000L) != 0xff000000L));
4607 else if (GET_MODE (op[1]) == HImode)
4608 len = (((mask & 0xff) != 0xff)
4609 + ((mask & 0xff00) != 0xff00));
4612 else if (GET_CODE (op[1]) == IOR)
4614 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4616 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4617 if (GET_MODE (op[1]) == SImode)
4618 len = (((mask & 0xff) != 0)
4619 + ((mask & 0xff00) != 0)
4620 + ((mask & 0xff0000L) != 0)
4621 + ((mask & 0xff000000L) != 0));
4622 else if (GET_MODE (op[1]) == HImode)
4623 len = (((mask & 0xff) != 0)
4624 + ((mask & 0xff00) != 0));
4628 set = single_set (insn);
4629 if (set)
4631 rtx op[10];
4633 op[1] = SET_SRC (set);
4634 op[0] = SET_DEST (set);
4636 if (GET_CODE (patt) == PARALLEL
4637 && general_operand (op[1], VOIDmode)
4638 && general_operand (op[0], VOIDmode))
4640 if (XVECLEN (patt, 0) == 2)
4641 op[2] = XVECEXP (patt, 0, 1);
4643 switch (GET_MODE (op[0]))
4645 case QImode:
4646 len = 2;
4647 break;
4648 case HImode:
4649 output_reload_inhi (insn, op, &len);
4650 break;
4651 case SImode:
4652 case SFmode:
4653 output_reload_insisf (insn, op, XEXP (op[2], 0), &len);
4654 break;
4655 default:
4656 break;
4659 else if (GET_CODE (op[1]) == ASHIFT
4660 || GET_CODE (op[1]) == ASHIFTRT
4661 || GET_CODE (op[1]) == LSHIFTRT)
4663 rtx ops[10];
4664 ops[0] = op[0];
4665 ops[1] = XEXP (op[1],0);
4666 ops[2] = XEXP (op[1],1);
4667 switch (GET_CODE (op[1]))
4669 case ASHIFT:
4670 switch (GET_MODE (op[0]))
4672 case QImode: ashlqi3_out (insn,ops,&len); break;
4673 case HImode: ashlhi3_out (insn,ops,&len); break;
4674 case SImode: ashlsi3_out (insn,ops,&len); break;
4675 default: break;
4677 break;
4678 case ASHIFTRT:
4679 switch (GET_MODE (op[0]))
4681 case QImode: ashrqi3_out (insn,ops,&len); break;
4682 case HImode: ashrhi3_out (insn,ops,&len); break;
4683 case SImode: ashrsi3_out (insn,ops,&len); break;
4684 default: break;
4686 break;
4687 case LSHIFTRT:
4688 switch (GET_MODE (op[0]))
4690 case QImode: lshrqi3_out (insn,ops,&len); break;
4691 case HImode: lshrhi3_out (insn,ops,&len); break;
4692 case SImode: lshrsi3_out (insn,ops,&len); break;
4693 default: break;
4695 break;
4696 default:
4697 break;
4701 return len;
4704 /* Return nonzero if register REG dead after INSN. */
4707 reg_unused_after (rtx insn, rtx reg)
4709 return (dead_or_set_p (insn, reg)
4710 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4713 /* Return nonzero if REG is not used after INSN.
4714 We assume REG is a reload reg, and therefore does
4715 not live past labels. It may live past calls or jumps though. */
4718 _reg_unused_after (rtx insn, rtx reg)
4720 enum rtx_code code;
4721 rtx set;
4723 /* If the reg is set by this instruction, then it is safe for our
4724 case. Disregard the case where this is a store to memory, since
4725 we are checking a register used in the store address. */
4726 set = single_set (insn);
4727 if (set && GET_CODE (SET_DEST (set)) != MEM
4728 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4729 return 1;
4731 while ((insn = NEXT_INSN (insn)))
4733 rtx set;
4734 code = GET_CODE (insn);
4736 #if 0
4737 /* If this is a label that existed before reload, then the register
4738 if dead here. However, if this is a label added by reorg, then
4739 the register may still be live here. We can't tell the difference,
4740 so we just ignore labels completely. */
4741 if (code == CODE_LABEL)
4742 return 1;
4743 /* else */
4744 #endif
4746 if (!INSN_P (insn))
4747 continue;
4749 if (code == JUMP_INSN)
4750 return 0;
4752 /* If this is a sequence, we must handle them all at once.
4753 We could have for instance a call that sets the target register,
4754 and an insn in a delay slot that uses the register. In this case,
4755 we must return 0. */
4756 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4758 int i;
4759 int retval = 0;
4761 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4763 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4764 rtx set = single_set (this_insn);
4766 if (GET_CODE (this_insn) == CALL_INSN)
4767 code = CALL_INSN;
4768 else if (GET_CODE (this_insn) == JUMP_INSN)
4770 if (INSN_ANNULLED_BRANCH_P (this_insn))
4771 return 0;
4772 code = JUMP_INSN;
4775 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4776 return 0;
4777 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4779 if (GET_CODE (SET_DEST (set)) != MEM)
4780 retval = 1;
4781 else
4782 return 0;
4784 if (set == 0
4785 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4786 return 0;
4788 if (retval == 1)
4789 return 1;
4790 else if (code == JUMP_INSN)
4791 return 0;
4794 if (code == CALL_INSN)
4796 rtx tem;
4797 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4798 if (GET_CODE (XEXP (tem, 0)) == USE
4799 && REG_P (XEXP (XEXP (tem, 0), 0))
4800 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4801 return 0;
4802 if (call_used_regs[REGNO (reg)])
4803 return 1;
4806 set = single_set (insn);
4808 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4809 return 0;
4810 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4811 return GET_CODE (SET_DEST (set)) != MEM;
4812 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4813 return 0;
4815 return 1;
4818 /* Target hook for assembling integer objects. The AVR version needs
4819 special handling for references to certain labels. */
4821 static bool
4822 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4824 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4825 && text_segment_operand (x, VOIDmode) )
4827 fputs ("\t.word\tgs(", asm_out_file);
4828 output_addr_const (asm_out_file, x);
4829 fputs (")\n", asm_out_file);
4830 return true;
4832 return default_assemble_integer (x, size, aligned_p);
4835 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4837 void
4838 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
4841 /* If the function has the 'signal' or 'interrupt' attribute, test to
4842 make sure that the name of the function is "__vector_NN" so as to
4843 catch when the user misspells the interrupt vector name. */
4845 if (cfun->machine->is_interrupt)
4847 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4849 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4850 "%qs appears to be a misspelled interrupt handler",
4851 name);
4854 else if (cfun->machine->is_signal)
4856 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4858 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4859 "%qs appears to be a misspelled signal handler",
4860 name);
4864 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
4865 ASM_OUTPUT_LABEL (file, name);
4869 /* Return value is nonzero if pseudos that have been
4870 assigned to registers of class CLASS would likely be spilled
4871 because registers of CLASS are needed for spill registers. */
4873 static bool
4874 avr_class_likely_spilled_p (reg_class_t c)
4876 return (c != ALL_REGS && c != ADDW_REGS);
4879 /* Valid attributes:
4880 progmem - put data to program memory;
4881 signal - make a function to be hardware interrupt. After function
4882 prologue interrupts are disabled;
4883 interrupt - make a function to be hardware interrupt. After function
4884 prologue interrupts are enabled;
4885 naked - don't generate function prologue/epilogue and `ret' command.
4887 Only `progmem' attribute valid for type. */
4889 /* Handle a "progmem" attribute; arguments as in
4890 struct attribute_spec.handler. */
4891 static tree
4892 avr_handle_progmem_attribute (tree *node, tree name,
4893 tree args ATTRIBUTE_UNUSED,
4894 int flags ATTRIBUTE_UNUSED,
4895 bool *no_add_attrs)
4897 if (DECL_P (*node))
4899 if (TREE_CODE (*node) == TYPE_DECL)
4901 /* This is really a decl attribute, not a type attribute,
4902 but try to handle it for GCC 3.0 backwards compatibility. */
4904 tree type = TREE_TYPE (*node);
4905 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4906 tree newtype = build_type_attribute_variant (type, attr);
4908 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4909 TREE_TYPE (*node) = newtype;
4910 *no_add_attrs = true;
4912 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4914 *no_add_attrs = false;
4916 else
4918 warning (OPT_Wattributes, "%qE attribute ignored",
4919 name);
4920 *no_add_attrs = true;
4924 return NULL_TREE;
4927 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4928 struct attribute_spec.handler. */
4930 static tree
4931 avr_handle_fndecl_attribute (tree *node, tree name,
4932 tree args ATTRIBUTE_UNUSED,
4933 int flags ATTRIBUTE_UNUSED,
4934 bool *no_add_attrs)
4936 if (TREE_CODE (*node) != FUNCTION_DECL)
4938 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4939 name);
4940 *no_add_attrs = true;
4943 return NULL_TREE;
4946 static tree
4947 avr_handle_fntype_attribute (tree *node, tree name,
4948 tree args ATTRIBUTE_UNUSED,
4949 int flags ATTRIBUTE_UNUSED,
4950 bool *no_add_attrs)
4952 if (TREE_CODE (*node) != FUNCTION_TYPE)
4954 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4955 name);
4956 *no_add_attrs = true;
4959 return NULL_TREE;
4962 /* Look for attribute `progmem' in DECL
4963 if found return 1, otherwise 0. */
4966 avr_progmem_p (tree decl, tree attributes)
4968 tree a;
4970 if (TREE_CODE (decl) != VAR_DECL)
4971 return 0;
4973 if (NULL_TREE
4974 != lookup_attribute ("progmem", attributes))
4975 return 1;
4977 a=decl;
4979 a = TREE_TYPE(a);
4980 while (TREE_CODE (a) == ARRAY_TYPE);
4982 if (a == error_mark_node)
4983 return 0;
4985 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4986 return 1;
4988 return 0;
4991 /* Add the section attribute if the variable is in progmem. */
4993 static void
4994 avr_insert_attributes (tree node, tree *attributes)
4996 if (TREE_CODE (node) == VAR_DECL
4997 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4998 && avr_progmem_p (node, *attributes))
5000 tree node0 = node;
5002 /* For C++, we have to peel arrays in order to get correct
5003 determination of readonlyness. */
5006 node0 = TREE_TYPE (node0);
5007 while (TREE_CODE (node0) == ARRAY_TYPE);
5009 if (error_mark_node == node0)
5010 return;
5012 if (TYPE_READONLY (node0))
5014 static const char dsec[] = ".progmem.data";
5016 *attributes = tree_cons (get_identifier ("section"),
5017 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
5018 *attributes);
5020 else
5022 error ("variable %q+D must be const in order to be put into"
5023 " read-only section by means of %<__attribute__((progmem))%>",
5024 node);
5029 /* A get_unnamed_section callback for switching to progmem_section. */
5031 static void
5032 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
5034 fprintf (asm_out_file,
5035 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
5036 AVR_HAVE_JMP_CALL ? "a" : "ax");
5037 /* Should already be aligned, this is just to be safe if it isn't. */
5038 fprintf (asm_out_file, "\t.p2align 1\n");
5042 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
5043 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
5044 /* Track need of __do_clear_bss. */
5046 void
5047 avr_asm_output_aligned_decl_common (FILE * stream, const_tree decl ATTRIBUTE_UNUSED,
5048 const char *name, unsigned HOST_WIDE_INT size,
5049 unsigned int align, bool local_p)
5051 avr_need_clear_bss_p = true;
5053 if (local_p)
5054 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
5055 else
5056 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
5060 /* Unnamed section callback for data_section
5061 to track need of __do_copy_data. */
5063 static void
5064 avr_output_data_section_asm_op (const void *data)
5066 avr_need_copy_data_p = true;
5068 /* Dispatch to default. */
5069 output_section_asm_op (data);
5073 /* Unnamed section callback for bss_section
5074 to track need of __do_clear_bss. */
5076 static void
5077 avr_output_bss_section_asm_op (const void *data)
5079 avr_need_clear_bss_p = true;
5081 /* Dispatch to default. */
5082 output_section_asm_op (data);
5086 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
5088 static void
5089 avr_asm_init_sections (void)
5091 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
5092 avr_output_progmem_section_asm_op,
5093 NULL);
5095 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
5096 resp. `avr_need_copy_data_p'. */
5098 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
5099 data_section->unnamed.callback = avr_output_data_section_asm_op;
5100 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
5104 /* Implement `TARGET_ASM_NAMED_SECTION'. */
5105 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
5107 static void
5108 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
5110 if (!avr_need_copy_data_p)
5111 avr_need_copy_data_p = (0 == strncmp (name, ".data", 5)
5112 || 0 == strncmp (name, ".rodata", 7)
5113 || 0 == strncmp (name, ".gnu.linkonce.d", 15));
5115 if (!avr_need_clear_bss_p)
5116 avr_need_clear_bss_p = (0 == strncmp (name, ".bss", 4));
5118 default_elf_asm_named_section (name, flags, decl);
5121 static unsigned int
5122 avr_section_type_flags (tree decl, const char *name, int reloc)
5124 unsigned int flags = default_section_type_flags (decl, name, reloc);
5126 if (strncmp (name, ".noinit", 7) == 0)
5128 if (decl && TREE_CODE (decl) == VAR_DECL
5129 && DECL_INITIAL (decl) == NULL_TREE)
5130 flags |= SECTION_BSS; /* @nobits */
5131 else
5132 warning (0, "only uninitialized variables can be placed in the "
5133 ".noinit section");
5136 if (0 == strncmp (name, ".progmem.data", strlen (".progmem.data")))
5137 flags &= ~SECTION_WRITE;
5139 return flags;
5143 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
5145 static void
5146 avr_encode_section_info (tree decl, rtx rtl,
5147 int new_decl_p)
5149 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
5150 readily available, see PR34734. So we postpone the warning
5151 about uninitialized data in program memory section until here. */
5153 if (new_decl_p
5154 && decl && DECL_P (decl)
5155 && NULL_TREE == DECL_INITIAL (decl)
5156 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5158 warning (OPT_Wuninitialized,
5159 "uninitialized variable %q+D put into "
5160 "program memory area", decl);
5163 default_encode_section_info (decl, rtl, new_decl_p);
5167 /* Implement `TARGET_ASM_FILE_START'. */
5168 /* Outputs some appropriate text to go at the start of an assembler
5169 file. */
5171 static void
5172 avr_file_start (void)
5174 if (avr_current_arch->asm_only)
5175 error ("MCU %qs supported for assembler only", avr_current_device->name);
5177 default_file_start ();
5179 /* fprintf (asm_out_file, "\t.arch %s\n", avr_current_device->name);*/
5180 fputs ("__SREG__ = 0x3f\n"
5181 "__SP_H__ = 0x3e\n"
5182 "__SP_L__ = 0x3d\n", asm_out_file);
5184 fputs ("__tmp_reg__ = 0\n"
5185 "__zero_reg__ = 1\n", asm_out_file);
5189 /* Implement `TARGET_ASM_FILE_END'. */
5190 /* Outputs to the stdio stream FILE some
5191 appropriate text to go at the end of an assembler file. */
5193 static void
5194 avr_file_end (void)
5196 /* Output these only if there is anything in the
5197 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
5198 input section(s) - some code size can be saved by not
5199 linking in the initialization code from libgcc if resp.
5200 sections are empty. */
5202 if (avr_need_copy_data_p)
5203 fputs (".global __do_copy_data\n", asm_out_file);
5205 if (avr_need_clear_bss_p)
5206 fputs (".global __do_clear_bss\n", asm_out_file);
5209 /* Choose the order in which to allocate hard registers for
5210 pseudo-registers local to a basic block.
5212 Store the desired register order in the array `reg_alloc_order'.
5213 Element 0 should be the register to allocate first; element 1, the
5214 next register; and so on. */
5216 void
5217 order_regs_for_local_alloc (void)
5219 unsigned int i;
5220 static const int order_0[] = {
5221 24,25,
5222 18,19,
5223 20,21,
5224 22,23,
5225 30,31,
5226 26,27,
5227 28,29,
5228 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5229 0,1,
5230 32,33,34,35
5232 static const int order_1[] = {
5233 18,19,
5234 20,21,
5235 22,23,
5236 24,25,
5237 30,31,
5238 26,27,
5239 28,29,
5240 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5241 0,1,
5242 32,33,34,35
5244 static const int order_2[] = {
5245 25,24,
5246 23,22,
5247 21,20,
5248 19,18,
5249 30,31,
5250 26,27,
5251 28,29,
5252 17,16,
5253 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5254 1,0,
5255 32,33,34,35
5258 const int *order = (TARGET_ORDER_1 ? order_1 :
5259 TARGET_ORDER_2 ? order_2 :
5260 order_0);
5261 for (i=0; i < ARRAY_SIZE (order_0); ++i)
5262 reg_alloc_order[i] = order[i];
5266 /* Implement `TARGET_REGISTER_MOVE_COST' */
5268 static int
5269 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
5270 reg_class_t from, reg_class_t to)
5272 return (from == STACK_REG ? 6
5273 : to == STACK_REG ? 12
5274 : 2);
5278 /* Implement `TARGET_MEMORY_MOVE_COST' */
5280 static int
5281 avr_memory_move_cost (enum machine_mode mode, reg_class_t rclass ATTRIBUTE_UNUSED,
5282 bool in ATTRIBUTE_UNUSED)
5284 return (mode == QImode ? 2
5285 : mode == HImode ? 4
5286 : mode == SImode ? 8
5287 : mode == SFmode ? 8
5288 : 16);
5292 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5293 cost of an RTX operand given its context. X is the rtx of the
5294 operand, MODE is its mode, and OUTER is the rtx_code of this
5295 operand's parent operator. */
5297 static int
5298 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
5299 bool speed)
5301 enum rtx_code code = GET_CODE (x);
5302 int total;
5304 switch (code)
5306 case REG:
5307 case SUBREG:
5308 return 0;
5310 case CONST_INT:
5311 case CONST_DOUBLE:
5312 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5314 default:
5315 break;
5318 total = 0;
5319 avr_rtx_costs (x, code, outer, &total, speed);
5320 return total;
5323 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5324 is to be calculated. Return true if the complete cost has been
5325 computed, and false if subexpressions should be scanned. In either
5326 case, *TOTAL contains the cost result. */
5328 static bool
5329 avr_rtx_costs (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED, int *total,
5330 bool speed)
5332 enum rtx_code code = (enum rtx_code) codearg;
5333 enum machine_mode mode = GET_MODE (x);
5334 HOST_WIDE_INT val;
5336 switch (code)
5338 case CONST_INT:
5339 case CONST_DOUBLE:
5340 /* Immediate constants are as cheap as registers. */
5341 *total = 0;
5342 return true;
5344 case MEM:
5345 case CONST:
5346 case LABEL_REF:
5347 case SYMBOL_REF:
5348 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5349 return true;
5351 case NEG:
5352 switch (mode)
5354 case QImode:
5355 case SFmode:
5356 *total = COSTS_N_INSNS (1);
5357 break;
5359 case HImode:
5360 *total = COSTS_N_INSNS (3);
5361 break;
5363 case SImode:
5364 *total = COSTS_N_INSNS (7);
5365 break;
5367 default:
5368 return false;
5370 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5371 return true;
5373 case ABS:
5374 switch (mode)
5376 case QImode:
5377 case SFmode:
5378 *total = COSTS_N_INSNS (1);
5379 break;
5381 default:
5382 return false;
5384 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5385 return true;
5387 case NOT:
5388 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5389 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5390 return true;
5392 case ZERO_EXTEND:
5393 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5394 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5395 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5396 return true;
5398 case SIGN_EXTEND:
5399 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5400 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5401 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5402 return true;
5404 case PLUS:
5405 switch (mode)
5407 case QImode:
5408 *total = COSTS_N_INSNS (1);
5409 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5410 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5411 break;
5413 case HImode:
5414 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5416 *total = COSTS_N_INSNS (2);
5417 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5419 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5420 *total = COSTS_N_INSNS (1);
5421 else
5422 *total = COSTS_N_INSNS (2);
5423 break;
5425 case SImode:
5426 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5428 *total = COSTS_N_INSNS (4);
5429 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5431 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5432 *total = COSTS_N_INSNS (1);
5433 else
5434 *total = COSTS_N_INSNS (4);
5435 break;
5437 default:
5438 return false;
5440 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5441 return true;
5443 case MINUS:
5444 case AND:
5445 case IOR:
5446 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5447 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5448 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5449 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5450 return true;
5452 case XOR:
5453 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5454 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5455 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5456 return true;
5458 case MULT:
5459 switch (mode)
5461 case QImode:
5462 if (AVR_HAVE_MUL)
5463 *total = COSTS_N_INSNS (!speed ? 3 : 4);
5464 else if (!speed)
5465 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5466 else
5467 return false;
5468 break;
5470 case HImode:
5471 if (AVR_HAVE_MUL)
5472 *total = COSTS_N_INSNS (!speed ? 7 : 10);
5473 else if (!speed)
5474 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5475 else
5476 return false;
5477 break;
5479 default:
5480 return false;
5482 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5483 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5484 return true;
5486 case DIV:
5487 case MOD:
5488 case UDIV:
5489 case UMOD:
5490 if (!speed)
5491 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5492 else
5493 return false;
5494 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5495 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5496 return true;
5498 case ROTATE:
5499 switch (mode)
5501 case QImode:
5502 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
5503 *total = COSTS_N_INSNS (1);
5505 break;
5507 case HImode:
5508 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
5509 *total = COSTS_N_INSNS (3);
5511 break;
5513 case SImode:
5514 if (CONST_INT_P (XEXP (x, 1)))
5515 switch (INTVAL (XEXP (x, 1)))
5517 case 8:
5518 case 24:
5519 *total = COSTS_N_INSNS (5);
5520 break;
5521 case 16:
5522 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
5523 break;
5525 break;
5527 default:
5528 return false;
5530 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5531 return true;
5533 case ASHIFT:
5534 switch (mode)
5536 case QImode:
5537 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5539 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5540 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5542 else
5544 val = INTVAL (XEXP (x, 1));
5545 if (val == 7)
5546 *total = COSTS_N_INSNS (3);
5547 else if (val >= 0 && val <= 7)
5548 *total = COSTS_N_INSNS (val);
5549 else
5550 *total = COSTS_N_INSNS (1);
5552 break;
5554 case HImode:
5555 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5557 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5558 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5560 else
5561 switch (INTVAL (XEXP (x, 1)))
5563 case 0:
5564 *total = 0;
5565 break;
5566 case 1:
5567 case 8:
5568 *total = COSTS_N_INSNS (2);
5569 break;
5570 case 9:
5571 *total = COSTS_N_INSNS (3);
5572 break;
5573 case 2:
5574 case 3:
5575 case 10:
5576 case 15:
5577 *total = COSTS_N_INSNS (4);
5578 break;
5579 case 7:
5580 case 11:
5581 case 12:
5582 *total = COSTS_N_INSNS (5);
5583 break;
5584 case 4:
5585 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5586 break;
5587 case 6:
5588 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5589 break;
5590 case 5:
5591 *total = COSTS_N_INSNS (!speed ? 5 : 10);
5592 break;
5593 default:
5594 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5595 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5597 break;
5599 case SImode:
5600 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5602 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5603 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5605 else
5606 switch (INTVAL (XEXP (x, 1)))
5608 case 0:
5609 *total = 0;
5610 break;
5611 case 24:
5612 *total = COSTS_N_INSNS (3);
5613 break;
5614 case 1:
5615 case 8:
5616 case 16:
5617 *total = COSTS_N_INSNS (4);
5618 break;
5619 case 31:
5620 *total = COSTS_N_INSNS (6);
5621 break;
5622 case 2:
5623 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5624 break;
5625 default:
5626 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5627 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5629 break;
5631 default:
5632 return false;
5634 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5635 return true;
5637 case ASHIFTRT:
5638 switch (mode)
5640 case QImode:
5641 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5643 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5644 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5646 else
5648 val = INTVAL (XEXP (x, 1));
5649 if (val == 6)
5650 *total = COSTS_N_INSNS (4);
5651 else if (val == 7)
5652 *total = COSTS_N_INSNS (2);
5653 else if (val >= 0 && val <= 7)
5654 *total = COSTS_N_INSNS (val);
5655 else
5656 *total = COSTS_N_INSNS (1);
5658 break;
5660 case HImode:
5661 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5663 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5664 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5666 else
5667 switch (INTVAL (XEXP (x, 1)))
5669 case 0:
5670 *total = 0;
5671 break;
5672 case 1:
5673 *total = COSTS_N_INSNS (2);
5674 break;
5675 case 15:
5676 *total = COSTS_N_INSNS (3);
5677 break;
5678 case 2:
5679 case 7:
5680 case 8:
5681 case 9:
5682 *total = COSTS_N_INSNS (4);
5683 break;
5684 case 10:
5685 case 14:
5686 *total = COSTS_N_INSNS (5);
5687 break;
5688 case 11:
5689 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5690 break;
5691 case 12:
5692 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5693 break;
5694 case 6:
5695 case 13:
5696 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5697 break;
5698 default:
5699 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5700 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5702 break;
5704 case SImode:
5705 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5707 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5708 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5710 else
5711 switch (INTVAL (XEXP (x, 1)))
5713 case 0:
5714 *total = 0;
5715 break;
5716 case 1:
5717 *total = COSTS_N_INSNS (4);
5718 break;
5719 case 8:
5720 case 16:
5721 case 24:
5722 *total = COSTS_N_INSNS (6);
5723 break;
5724 case 2:
5725 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5726 break;
5727 case 31:
5728 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5729 break;
5730 default:
5731 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5732 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5734 break;
5736 default:
5737 return false;
5739 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5740 return true;
5742 case LSHIFTRT:
5743 switch (mode)
5745 case QImode:
5746 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5748 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5749 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5751 else
5753 val = INTVAL (XEXP (x, 1));
5754 if (val == 7)
5755 *total = COSTS_N_INSNS (3);
5756 else if (val >= 0 && val <= 7)
5757 *total = COSTS_N_INSNS (val);
5758 else
5759 *total = COSTS_N_INSNS (1);
5761 break;
5763 case HImode:
5764 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5766 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5767 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5769 else
5770 switch (INTVAL (XEXP (x, 1)))
5772 case 0:
5773 *total = 0;
5774 break;
5775 case 1:
5776 case 8:
5777 *total = COSTS_N_INSNS (2);
5778 break;
5779 case 9:
5780 *total = COSTS_N_INSNS (3);
5781 break;
5782 case 2:
5783 case 10:
5784 case 15:
5785 *total = COSTS_N_INSNS (4);
5786 break;
5787 case 7:
5788 case 11:
5789 *total = COSTS_N_INSNS (5);
5790 break;
5791 case 3:
5792 case 12:
5793 case 13:
5794 case 14:
5795 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5796 break;
5797 case 4:
5798 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5799 break;
5800 case 5:
5801 case 6:
5802 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5803 break;
5804 default:
5805 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5806 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5808 break;
5810 case SImode:
5811 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5813 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5814 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5816 else
5817 switch (INTVAL (XEXP (x, 1)))
5819 case 0:
5820 *total = 0;
5821 break;
5822 case 1:
5823 *total = COSTS_N_INSNS (4);
5824 break;
5825 case 2:
5826 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5827 break;
5828 case 8:
5829 case 16:
5830 case 24:
5831 *total = COSTS_N_INSNS (4);
5832 break;
5833 case 31:
5834 *total = COSTS_N_INSNS (6);
5835 break;
5836 default:
5837 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5838 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5840 break;
5842 default:
5843 return false;
5845 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5846 return true;
5848 case COMPARE:
5849 switch (GET_MODE (XEXP (x, 0)))
5851 case QImode:
5852 *total = COSTS_N_INSNS (1);
5853 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5854 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5855 break;
5857 case HImode:
5858 *total = COSTS_N_INSNS (2);
5859 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5860 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5861 else if (INTVAL (XEXP (x, 1)) != 0)
5862 *total += COSTS_N_INSNS (1);
5863 break;
5865 case SImode:
5866 *total = COSTS_N_INSNS (4);
5867 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5868 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5869 else if (INTVAL (XEXP (x, 1)) != 0)
5870 *total += COSTS_N_INSNS (3);
5871 break;
5873 default:
5874 return false;
5876 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5877 return true;
5879 default:
5880 break;
5882 return false;
5885 /* Calculate the cost of a memory address. */
5887 static int
5888 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
5890 if (GET_CODE (x) == PLUS
5891 && GET_CODE (XEXP (x,1)) == CONST_INT
5892 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5893 && INTVAL (XEXP (x,1)) >= 61)
5894 return 18;
5895 if (CONSTANT_ADDRESS_P (x))
5897 if (optimize > 0 && io_address_operand (x, QImode))
5898 return 2;
5899 return 4;
5901 return 4;
5904 /* Test for extra memory constraint 'Q'.
5905 It's a memory address based on Y or Z pointer with valid displacement. */
5908 extra_constraint_Q (rtx x)
5910 if (GET_CODE (XEXP (x,0)) == PLUS
5911 && REG_P (XEXP (XEXP (x,0), 0))
5912 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5913 && (INTVAL (XEXP (XEXP (x,0), 1))
5914 <= MAX_LD_OFFSET (GET_MODE (x))))
5916 rtx xx = XEXP (XEXP (x,0), 0);
5917 int regno = REGNO (xx);
5918 if (TARGET_ALL_DEBUG)
5920 fprintf (stderr, ("extra_constraint:\n"
5921 "reload_completed: %d\n"
5922 "reload_in_progress: %d\n"),
5923 reload_completed, reload_in_progress);
5924 debug_rtx (x);
5926 if (regno >= FIRST_PSEUDO_REGISTER)
5927 return 1; /* allocate pseudos */
5928 else if (regno == REG_Z || regno == REG_Y)
5929 return 1; /* strictly check */
5930 else if (xx == frame_pointer_rtx
5931 || xx == arg_pointer_rtx)
5932 return 1; /* XXX frame & arg pointer checks */
5934 return 0;
5937 /* Convert condition code CONDITION to the valid AVR condition code. */
5939 RTX_CODE
5940 avr_normalize_condition (RTX_CODE condition)
5942 switch (condition)
5944 case GT:
5945 return GE;
5946 case GTU:
5947 return GEU;
5948 case LE:
5949 return LT;
5950 case LEU:
5951 return LTU;
5952 default:
5953 gcc_unreachable ();
5957 /* This function optimizes conditional jumps. */
5959 static void
5960 avr_reorg (void)
5962 rtx insn, pattern;
5964 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5966 if (! (GET_CODE (insn) == INSN
5967 || GET_CODE (insn) == CALL_INSN
5968 || GET_CODE (insn) == JUMP_INSN)
5969 || !single_set (insn))
5970 continue;
5972 pattern = PATTERN (insn);
5974 if (GET_CODE (pattern) == PARALLEL)
5975 pattern = XVECEXP (pattern, 0, 0);
5976 if (GET_CODE (pattern) == SET
5977 && SET_DEST (pattern) == cc0_rtx
5978 && compare_diff_p (insn))
5980 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5982 /* Now we work under compare insn. */
5984 pattern = SET_SRC (pattern);
5985 if (true_regnum (XEXP (pattern,0)) >= 0
5986 && true_regnum (XEXP (pattern,1)) >= 0 )
5988 rtx x = XEXP (pattern,0);
5989 rtx next = next_real_insn (insn);
5990 rtx pat = PATTERN (next);
5991 rtx src = SET_SRC (pat);
5992 rtx t = XEXP (src,0);
5993 PUT_CODE (t, swap_condition (GET_CODE (t)));
5994 XEXP (pattern,0) = XEXP (pattern,1);
5995 XEXP (pattern,1) = x;
5996 INSN_CODE (next) = -1;
5998 else if (true_regnum (XEXP (pattern, 0)) >= 0
5999 && XEXP (pattern, 1) == const0_rtx)
6001 /* This is a tst insn, we can reverse it. */
6002 rtx next = next_real_insn (insn);
6003 rtx pat = PATTERN (next);
6004 rtx src = SET_SRC (pat);
6005 rtx t = XEXP (src,0);
6007 PUT_CODE (t, swap_condition (GET_CODE (t)));
6008 XEXP (pattern, 1) = XEXP (pattern, 0);
6009 XEXP (pattern, 0) = const0_rtx;
6010 INSN_CODE (next) = -1;
6011 INSN_CODE (insn) = -1;
6013 else if (true_regnum (XEXP (pattern,0)) >= 0
6014 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
6016 rtx x = XEXP (pattern,1);
6017 rtx next = next_real_insn (insn);
6018 rtx pat = PATTERN (next);
6019 rtx src = SET_SRC (pat);
6020 rtx t = XEXP (src,0);
6021 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
6023 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
6025 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
6026 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
6027 INSN_CODE (next) = -1;
6028 INSN_CODE (insn) = -1;
6036 /* Returns register number for function return value.*/
6038 static inline unsigned int
6039 avr_ret_register (void)
6041 return 24;
6044 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
6046 static bool
6047 avr_function_value_regno_p (const unsigned int regno)
6049 return (regno == avr_ret_register ());
6052 /* Create an RTX representing the place where a
6053 library function returns a value of mode MODE. */
6055 static rtx
6056 avr_libcall_value (enum machine_mode mode,
6057 const_rtx func ATTRIBUTE_UNUSED)
6059 int offs = GET_MODE_SIZE (mode);
6060 if (offs < 2)
6061 offs = 2;
6062 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
6065 /* Create an RTX representing the place where a
6066 function returns a value of data type VALTYPE. */
6068 static rtx
6069 avr_function_value (const_tree type,
6070 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
6071 bool outgoing ATTRIBUTE_UNUSED)
6073 unsigned int offs;
6075 if (TYPE_MODE (type) != BLKmode)
6076 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
6078 offs = int_size_in_bytes (type);
6079 if (offs < 2)
6080 offs = 2;
6081 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
6082 offs = GET_MODE_SIZE (SImode);
6083 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
6084 offs = GET_MODE_SIZE (DImode);
6086 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
6090 test_hard_reg_class (enum reg_class rclass, rtx x)
6092 int regno = true_regnum (x);
6093 if (regno < 0)
6094 return 0;
6096 if (TEST_HARD_REG_CLASS (rclass, regno))
6097 return 1;
6099 return 0;
6104 jump_over_one_insn_p (rtx insn, rtx dest)
6106 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
6107 ? XEXP (dest, 0)
6108 : dest);
6109 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
6110 int dest_addr = INSN_ADDRESSES (uid);
6111 return dest_addr - jump_addr == get_attr_length (insn) + 1;
6114 /* Returns 1 if a value of mode MODE can be stored starting with hard
6115 register number REGNO. On the enhanced core, anything larger than
6116 1 byte must start in even numbered register for "movw" to work
6117 (this way we don't have to check for odd registers everywhere). */
6120 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
6122 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
6123 Disallowing QI et al. in these regs might lead to code like
6124 (set (subreg:QI (reg:HI 28) n) ...)
6125 which will result in wrong code because reload does not
6126 handle SUBREGs of hard regsisters like this.
6127 This could be fixed in reload. However, it appears
6128 that fixing reload is not wanted by reload people. */
6130 /* Any GENERAL_REGS register can hold 8-bit values. */
6132 if (GET_MODE_SIZE (mode) == 1)
6133 return 1;
6135 /* FIXME: Ideally, the following test is not needed.
6136 However, it turned out that it can reduce the number
6137 of spill fails. AVR and it's poor endowment with
6138 address registers is extreme stress test for reload. */
6140 if (GET_MODE_SIZE (mode) >= 4
6141 && regno >= REG_X)
6142 return 0;
6144 /* All modes larger than 8 bits should start in an even register. */
6146 return !(regno & 1);
6149 const char *
6150 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
6152 int tmp;
6153 if (!len)
6154 len = &tmp;
6156 if (GET_CODE (operands[1]) == CONST_INT)
6158 int val = INTVAL (operands[1]);
6159 if ((val & 0xff) == 0)
6161 *len = 3;
6162 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
6163 AS2 (ldi,%2,hi8(%1)) CR_TAB
6164 AS2 (mov,%B0,%2));
6166 else if ((val & 0xff00) == 0)
6168 *len = 3;
6169 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6170 AS2 (mov,%A0,%2) CR_TAB
6171 AS2 (mov,%B0,__zero_reg__));
6173 else if ((val & 0xff) == ((val & 0xff00) >> 8))
6175 *len = 3;
6176 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6177 AS2 (mov,%A0,%2) CR_TAB
6178 AS2 (mov,%B0,%2));
6181 *len = 4;
6182 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6183 AS2 (mov,%A0,%2) CR_TAB
6184 AS2 (ldi,%2,hi8(%1)) CR_TAB
6185 AS2 (mov,%B0,%2));
6189 /* Reload a SI or SF compile time constant (OP[1]) into a GPR (OP[0]).
6190 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
6191 into a NO_LD_REGS. If CLOBBER_REG is NULL_RTX we either don't need a
6192 clobber reg or have to cook one up.
6194 LEN == NULL: Output instructions.
6196 LEN != NULL: Output nothing. Increment *LEN by number of words occupied
6197 by the insns printed.
6199 Return "". */
6201 const char *
6202 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED,
6203 rtx *op, rtx clobber_reg, int *len)
6205 rtx src = op[1];
6206 rtx dest = op[0];
6207 rtx xval, xdest[4];
6208 int ival[4];
6209 int clobber_val = 1234;
6210 bool cooked_clobber_p = false;
6211 bool set_p = false;
6212 unsigned int n;
6213 enum machine_mode mode = GET_MODE (dest);
6215 gcc_assert (REG_P (dest));
6217 if (len)
6218 *len = 0;
6220 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
6221 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
6223 if (14 == REGNO (dest))
6225 clobber_reg = gen_rtx_REG (QImode, 17);
6228 /* We might need a clobber reg but don't have one. Look at the value
6229 to be loaded more closely. A clobber is only needed if it contains
6230 a byte that is neither 0, -1 or a power of 2. */
6232 if (NULL_RTX == clobber_reg
6233 && !test_hard_reg_class (LD_REGS, dest))
6235 for (n = 0; n < GET_MODE_SIZE (mode); n++)
6237 xval = simplify_gen_subreg (QImode, src, mode, n);
6239 if (!(const0_rtx == xval
6240 || constm1_rtx == xval
6241 || single_one_operand (xval, QImode)))
6243 /* We have no clobber reg but need one. Cook one up.
6244 That's cheaper than loading from constant pool. */
6246 cooked_clobber_p = true;
6247 clobber_reg = gen_rtx_REG (QImode, REG_Z + 1);
6248 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
6249 break;
6254 /* Now start filling DEST from LSB to MSB. */
6256 for (n = 0; n < GET_MODE_SIZE (mode); n++)
6258 bool done_byte = false;
6259 unsigned int j;
6260 rtx xop[3];
6262 /* Crop the n-th sub-byte. */
6264 xval = simplify_gen_subreg (QImode, src, mode, n);
6265 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
6266 ival[n] = INTVAL (xval);
6268 /* Look if we can reuse the low word by means of MOVW. */
6270 if (n == 2
6271 && AVR_HAVE_MOVW)
6273 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
6274 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
6276 if (INTVAL (lo16) == INTVAL (hi16))
6278 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
6279 break;
6283 /* Use CLR to zero a value so that cc0 is set as expected
6284 for zero. */
6286 if (ival[n] == 0)
6288 avr_asm_len ("clr %0", &xdest[n], len, 1);
6289 continue;
6292 if (clobber_val == ival[n]
6293 && REGNO (clobber_reg) == REGNO (xdest[n]))
6295 continue;
6298 /* LD_REGS can use LDI to move a constant value */
6300 if (test_hard_reg_class (LD_REGS, xdest[n]))
6302 xop[0] = xdest[n];
6303 xop[1] = xval;
6304 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
6305 continue;
6308 /* Try to reuse value already loaded in some lower byte. */
6310 for (j = 0; j < n; j++)
6311 if (ival[j] == ival[n])
6313 xop[0] = xdest[n];
6314 xop[1] = xdest[j];
6316 avr_asm_len ("mov %0,%1", xop, len, 1);
6317 done_byte = true;
6318 break;
6321 if (done_byte)
6322 continue;
6324 /* Need no clobber reg for -1: Use CLR/DEC */
6326 if (-1 == ival[n])
6328 avr_asm_len ("clr %0" CR_TAB
6329 "dec %0", &xdest[n], len, 2);
6330 continue;
6333 /* Use T flag or INC to manage powers of 2 if we have
6334 no clobber reg. */
6336 if (NULL_RTX == clobber_reg
6337 && single_one_operand (xval, QImode))
6339 if (1 == ival[n])
6341 avr_asm_len ("clr %0" CR_TAB
6342 "inc %0", &xdest[n], len, 2);
6343 continue;
6346 xop[0] = xdest[n];
6347 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
6349 gcc_assert (constm1_rtx != xop[1]);
6351 if (!set_p)
6353 set_p = true;
6354 avr_asm_len ("set", xop, len, 1);
6357 avr_asm_len ("clr %0" CR_TAB
6358 "bld %0,%1", xop, len, 2);
6359 continue;
6362 /* We actually need the LD_REGS clobber reg. */
6364 gcc_assert (NULL_RTX != clobber_reg);
6366 xop[0] = xdest[n];
6367 xop[1] = xval;
6368 xop[2] = clobber_reg;
6369 clobber_val = ival[n];
6371 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
6372 "mov %0,%2", xop, len, 2);
6375 /* If we cooked up a clobber reg above, restore it. */
6377 if (cooked_clobber_p)
6379 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
6382 return "";
6385 void
6386 avr_output_bld (rtx operands[], int bit_nr)
6388 static char s[] = "bld %A0,0";
6390 s[5] = 'A' + (bit_nr >> 3);
6391 s[8] = '0' + (bit_nr & 7);
6392 output_asm_insn (s, operands);
6395 void
6396 avr_output_addr_vec_elt (FILE *stream, int value)
6398 switch_to_section (progmem_section);
6399 if (AVR_HAVE_JMP_CALL)
6400 fprintf (stream, "\t.word gs(.L%d)\n", value);
6401 else
6402 fprintf (stream, "\trjmp .L%d\n", value);
6405 /* Returns true if SCRATCH are safe to be allocated as a scratch
6406 registers (for a define_peephole2) in the current function. */
6408 bool
6409 avr_hard_regno_scratch_ok (unsigned int regno)
6411 /* Interrupt functions can only use registers that have already been saved
6412 by the prologue, even if they would normally be call-clobbered. */
6414 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6415 && !df_regs_ever_live_p (regno))
6416 return false;
6418 /* Don't allow hard registers that might be part of the frame pointer.
6419 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
6420 and don't care for a frame pointer that spans more than one register. */
6422 if ((!reload_completed || frame_pointer_needed)
6423 && (regno == REG_Y || regno == REG_Y + 1))
6425 return false;
6428 return true;
6431 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6434 avr_hard_regno_rename_ok (unsigned int old_reg,
6435 unsigned int new_reg)
6437 /* Interrupt functions can only use registers that have already been
6438 saved by the prologue, even if they would normally be
6439 call-clobbered. */
6441 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6442 && !df_regs_ever_live_p (new_reg))
6443 return 0;
6445 /* Don't allow hard registers that might be part of the frame pointer.
6446 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
6447 and don't care for a frame pointer that spans more than one register. */
6449 if ((!reload_completed || frame_pointer_needed)
6450 && (old_reg == REG_Y || old_reg == REG_Y + 1
6451 || new_reg == REG_Y || new_reg == REG_Y + 1))
6453 return 0;
6456 return 1;
6459 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
6460 or memory location in the I/O space (QImode only).
6462 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
6463 Operand 1: register operand to test, or CONST_INT memory address.
6464 Operand 2: bit number.
6465 Operand 3: label to jump to if the test is true. */
6467 const char *
6468 avr_out_sbxx_branch (rtx insn, rtx operands[])
6470 enum rtx_code comp = GET_CODE (operands[0]);
6471 int long_jump = (get_attr_length (insn) >= 4);
6472 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
6474 if (comp == GE)
6475 comp = EQ;
6476 else if (comp == LT)
6477 comp = NE;
6479 if (reverse)
6480 comp = reverse_condition (comp);
6482 if (GET_CODE (operands[1]) == CONST_INT)
6484 if (INTVAL (operands[1]) < 0x40)
6486 if (comp == EQ)
6487 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
6488 else
6489 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
6491 else
6493 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
6494 if (comp == EQ)
6495 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
6496 else
6497 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
6500 else /* GET_CODE (operands[1]) == REG */
6502 if (GET_MODE (operands[1]) == QImode)
6504 if (comp == EQ)
6505 output_asm_insn (AS2 (sbrs,%1,%2), operands);
6506 else
6507 output_asm_insn (AS2 (sbrc,%1,%2), operands);
6509 else /* HImode or SImode */
6511 static char buf[] = "sbrc %A1,0";
6512 int bit_nr = INTVAL (operands[2]);
6513 buf[3] = (comp == EQ) ? 's' : 'c';
6514 buf[6] = 'A' + (bit_nr >> 3);
6515 buf[9] = '0' + (bit_nr & 7);
6516 output_asm_insn (buf, operands);
6520 if (long_jump)
6521 return (AS1 (rjmp,.+4) CR_TAB
6522 AS1 (jmp,%x3));
6523 if (!reverse)
6524 return AS1 (rjmp,%x3);
6525 return "";
6528 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6530 static void
6531 avr_asm_out_ctor (rtx symbol, int priority)
6533 fputs ("\t.global __do_global_ctors\n", asm_out_file);
6534 default_ctor_section_asm_out_constructor (symbol, priority);
6537 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6539 static void
6540 avr_asm_out_dtor (rtx symbol, int priority)
6542 fputs ("\t.global __do_global_dtors\n", asm_out_file);
6543 default_dtor_section_asm_out_destructor (symbol, priority);
6546 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6548 static bool
6549 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
6551 if (TYPE_MODE (type) == BLKmode)
6553 HOST_WIDE_INT size = int_size_in_bytes (type);
6554 return (size == -1 || size > 8);
6556 else
6557 return false;
6560 /* Worker function for CASE_VALUES_THRESHOLD. */
6562 unsigned int avr_case_values_threshold (void)
6564 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
6567 /* Helper for __builtin_avr_delay_cycles */
6569 static void
6570 avr_expand_delay_cycles (rtx operands0)
6572 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
6573 unsigned HOST_WIDE_INT cycles_used;
6574 unsigned HOST_WIDE_INT loop_count;
6576 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
6578 loop_count = ((cycles - 9) / 6) + 1;
6579 cycles_used = ((loop_count - 1) * 6) + 9;
6580 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
6581 cycles -= cycles_used;
6584 if (IN_RANGE (cycles, 262145, 83886081))
6586 loop_count = ((cycles - 7) / 5) + 1;
6587 if (loop_count > 0xFFFFFF)
6588 loop_count = 0xFFFFFF;
6589 cycles_used = ((loop_count - 1) * 5) + 7;
6590 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
6591 cycles -= cycles_used;
6594 if (IN_RANGE (cycles, 768, 262144))
6596 loop_count = ((cycles - 5) / 4) + 1;
6597 if (loop_count > 0xFFFF)
6598 loop_count = 0xFFFF;
6599 cycles_used = ((loop_count - 1) * 4) + 5;
6600 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
6601 cycles -= cycles_used;
6604 if (IN_RANGE (cycles, 6, 767))
6606 loop_count = cycles / 3;
6607 if (loop_count > 255)
6608 loop_count = 255;
6609 cycles_used = loop_count * 3;
6610 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
6611 cycles -= cycles_used;
6614 while (cycles >= 2)
6616 emit_insn (gen_nopv (GEN_INT(2)));
6617 cycles -= 2;
6620 if (cycles == 1)
6622 emit_insn (gen_nopv (GEN_INT(1)));
6623 cycles--;
6627 /* IDs for all the AVR builtins. */
6629 enum avr_builtin_id
6631 AVR_BUILTIN_NOP,
6632 AVR_BUILTIN_SEI,
6633 AVR_BUILTIN_CLI,
6634 AVR_BUILTIN_WDR,
6635 AVR_BUILTIN_SLEEP,
6636 AVR_BUILTIN_SWAP,
6637 AVR_BUILTIN_FMUL,
6638 AVR_BUILTIN_FMULS,
6639 AVR_BUILTIN_FMULSU,
6640 AVR_BUILTIN_DELAY_CYCLES
6643 #define DEF_BUILTIN(NAME, TYPE, CODE) \
6644 do \
6646 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
6647 NULL, NULL_TREE); \
6648 } while (0)
6651 /* Implement `TARGET_INIT_BUILTINS' */
6652 /* Set up all builtin functions for this target. */
6654 static void
6655 avr_init_builtins (void)
6657 tree void_ftype_void
6658 = build_function_type_list (void_type_node, NULL_TREE);
6659 tree uchar_ftype_uchar
6660 = build_function_type_list (unsigned_char_type_node,
6661 unsigned_char_type_node,
6662 NULL_TREE);
6663 tree uint_ftype_uchar_uchar
6664 = build_function_type_list (unsigned_type_node,
6665 unsigned_char_type_node,
6666 unsigned_char_type_node,
6667 NULL_TREE);
6668 tree int_ftype_char_char
6669 = build_function_type_list (integer_type_node,
6670 char_type_node,
6671 char_type_node,
6672 NULL_TREE);
6673 tree int_ftype_char_uchar
6674 = build_function_type_list (integer_type_node,
6675 char_type_node,
6676 unsigned_char_type_node,
6677 NULL_TREE);
6678 tree void_ftype_ulong
6679 = build_function_type_list (void_type_node,
6680 long_unsigned_type_node,
6681 NULL_TREE);
6683 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
6684 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
6685 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
6686 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
6687 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
6688 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
6689 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
6690 AVR_BUILTIN_DELAY_CYCLES);
6692 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
6693 AVR_BUILTIN_FMUL);
6694 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
6695 AVR_BUILTIN_FMULS);
6696 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
6697 AVR_BUILTIN_FMULSU);
6700 #undef DEF_BUILTIN
6702 struct avr_builtin_description
6704 const enum insn_code icode;
6705 const char *const name;
6706 const enum avr_builtin_id id;
6709 static const struct avr_builtin_description
6710 bdesc_1arg[] =
6712 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
6715 static const struct avr_builtin_description
6716 bdesc_2arg[] =
6718 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
6719 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
6720 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU }
6723 /* Subroutine of avr_expand_builtin to take care of unop insns. */
6725 static rtx
6726 avr_expand_unop_builtin (enum insn_code icode, tree exp,
6727 rtx target)
6729 rtx pat;
6730 tree arg0 = CALL_EXPR_ARG (exp, 0);
6731 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6732 enum machine_mode op0mode = GET_MODE (op0);
6733 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6734 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6736 if (! target
6737 || GET_MODE (target) != tmode
6738 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6740 target = gen_reg_rtx (tmode);
6743 if (op0mode == SImode && mode0 == HImode)
6745 op0mode = HImode;
6746 op0 = gen_lowpart (HImode, op0);
6749 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
6751 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6752 op0 = copy_to_mode_reg (mode0, op0);
6754 pat = GEN_FCN (icode) (target, op0);
6755 if (! pat)
6756 return 0;
6758 emit_insn (pat);
6760 return target;
6764 /* Subroutine of avr_expand_builtin to take care of binop insns. */
6766 static rtx
6767 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
6769 rtx pat;
6770 tree arg0 = CALL_EXPR_ARG (exp, 0);
6771 tree arg1 = CALL_EXPR_ARG (exp, 1);
6772 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6773 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6774 enum machine_mode op0mode = GET_MODE (op0);
6775 enum machine_mode op1mode = GET_MODE (op1);
6776 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6777 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6778 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6780 if (! target
6781 || GET_MODE (target) != tmode
6782 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6784 target = gen_reg_rtx (tmode);
6787 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
6789 op0mode = HImode;
6790 op0 = gen_lowpart (HImode, op0);
6793 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
6795 op1mode = HImode;
6796 op1 = gen_lowpart (HImode, op1);
6799 /* In case the insn wants input operands in modes different from
6800 the result, abort. */
6802 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
6803 && (op1mode == mode1 || op1mode == VOIDmode));
6805 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6806 op0 = copy_to_mode_reg (mode0, op0);
6808 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6809 op1 = copy_to_mode_reg (mode1, op1);
6811 pat = GEN_FCN (icode) (target, op0, op1);
6813 if (! pat)
6814 return 0;
6816 emit_insn (pat);
6817 return target;
6821 /* Expand an expression EXP that calls a built-in function,
6822 with result going to TARGET if that's convenient
6823 (and in mode MODE if that's convenient).
6824 SUBTARGET may be used as the target for computing one of EXP's operands.
6825 IGNORE is nonzero if the value is to be ignored. */
6827 static rtx
6828 avr_expand_builtin (tree exp, rtx target,
6829 rtx subtarget ATTRIBUTE_UNUSED,
6830 enum machine_mode mode ATTRIBUTE_UNUSED,
6831 int ignore ATTRIBUTE_UNUSED)
6833 size_t i;
6834 const struct avr_builtin_description *d;
6835 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6836 unsigned int id = DECL_FUNCTION_CODE (fndecl);
6837 tree arg0;
6838 rtx op0;
6840 switch (id)
6842 case AVR_BUILTIN_NOP:
6843 emit_insn (gen_nopv (GEN_INT(1)));
6844 return 0;
6846 case AVR_BUILTIN_SEI:
6847 emit_insn (gen_enable_interrupt ());
6848 return 0;
6850 case AVR_BUILTIN_CLI:
6851 emit_insn (gen_disable_interrupt ());
6852 return 0;
6854 case AVR_BUILTIN_WDR:
6855 emit_insn (gen_wdr ());
6856 return 0;
6858 case AVR_BUILTIN_SLEEP:
6859 emit_insn (gen_sleep ());
6860 return 0;
6862 case AVR_BUILTIN_DELAY_CYCLES:
6864 arg0 = CALL_EXPR_ARG (exp, 0);
6865 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6867 if (! CONST_INT_P (op0))
6868 error ("__builtin_avr_delay_cycles expects a compile time integer constant.");
6870 avr_expand_delay_cycles (op0);
6871 return 0;
6875 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6876 if (d->id == id)
6877 return avr_expand_unop_builtin (d->icode, exp, target);
6879 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6880 if (d->id == id)
6881 return avr_expand_binop_builtin (d->icode, exp, target);
6883 gcc_unreachable ();
6887 #include "gt-avr.h"