* target.h (enum opt_levels, struct default_options): New.
[official-gcc.git] / gcc / config / avr / avr.c
blob7016d483501438ba42b38ce200fd64ceedad6ba3
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "flags.h"
33 #include "reload.h"
34 #include "tree.h"
35 #include "output.h"
36 #include "expr.h"
37 #include "diagnostic-core.h"
38 #include "toplev.h"
39 #include "obstack.h"
40 #include "function.h"
41 #include "recog.h"
42 #include "ggc.h"
43 #include "tm_p.h"
44 #include "target.h"
45 #include "target-def.h"
46 #include "params.h"
47 #include "df.h"
49 /* Maximal allowed offset for an address in the LD command */
50 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
52 static void avr_option_override (void);
53 static int avr_naked_function_p (tree);
54 static int interrupt_function_p (tree);
55 static int signal_function_p (tree);
56 static int avr_OS_task_function_p (tree);
57 static int avr_OS_main_function_p (tree);
58 static int avr_regs_to_save (HARD_REG_SET *);
59 static int get_sequence_length (rtx insns);
60 static int sequent_regs_live (void);
61 static const char *ptrreg_to_str (int);
62 static const char *cond_string (enum rtx_code);
63 static int avr_num_arg_regs (enum machine_mode, tree);
65 static RTX_CODE compare_condition (rtx insn);
66 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
67 static int compare_sign_p (rtx insn);
68 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
69 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
70 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
71 static bool avr_assemble_integer (rtx, unsigned int, int);
72 static void avr_file_start (void);
73 static void avr_file_end (void);
74 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
75 static void avr_asm_function_end_prologue (FILE *);
76 static void avr_asm_function_begin_epilogue (FILE *);
77 static rtx avr_function_value (const_tree, const_tree, bool);
78 static void avr_insert_attributes (tree, tree *);
79 static void avr_asm_init_sections (void);
80 static unsigned int avr_section_type_flags (tree, const char *, int);
82 static void avr_reorg (void);
83 static void avr_asm_out_ctor (rtx, int);
84 static void avr_asm_out_dtor (rtx, int);
85 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code, bool);
86 static bool avr_rtx_costs (rtx, int, int, int *, bool);
87 static int avr_address_cost (rtx, bool);
88 static bool avr_return_in_memory (const_tree, const_tree);
89 static struct machine_function * avr_init_machine_status (void);
90 static rtx avr_builtin_setjmp_frame_value (void);
91 static bool avr_hard_regno_scratch_ok (unsigned int);
92 static unsigned int avr_case_values_threshold (void);
93 static bool avr_frame_pointer_required_p (void);
94 static bool avr_can_eliminate (const int, const int);
95 static bool avr_class_likely_spilled_p (reg_class_t c);
96 static rtx avr_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
97 const_tree, bool);
98 static void avr_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
99 const_tree, bool);
101 /* Allocate registers from r25 to r8 for parameters for function calls. */
102 #define FIRST_CUM_REG 26
104 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
105 static GTY(()) rtx tmp_reg_rtx;
107 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
108 static GTY(()) rtx zero_reg_rtx;
110 /* AVR register names {"r0", "r1", ..., "r31"} */
111 static const char *const avr_regnames[] = REGISTER_NAMES;
113 /* Preprocessor macros to define depending on MCU type. */
114 const char *avr_extra_arch_macro;
116 /* Current architecture. */
117 const struct base_arch_s *avr_current_arch;
119 /* Current device. */
120 const struct mcu_type_s *avr_current_device;
122 section *progmem_section;
124 /* AVR attributes. */
125 static const struct attribute_spec avr_attribute_table[] =
127 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
128 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
129 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
130 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
131 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute },
132 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute },
133 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute },
134 { NULL, 0, 0, false, false, false, NULL }
137 /* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
138 static const struct default_options avr_option_optimization_table[] =
140 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
141 { OPT_LEVELS_NONE, 0, NULL, 0 }
144 /* Initialize the GCC target structure. */
145 #undef TARGET_ASM_ALIGNED_HI_OP
146 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
147 #undef TARGET_ASM_ALIGNED_SI_OP
148 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
149 #undef TARGET_ASM_UNALIGNED_HI_OP
150 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
151 #undef TARGET_ASM_UNALIGNED_SI_OP
152 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
153 #undef TARGET_ASM_INTEGER
154 #define TARGET_ASM_INTEGER avr_assemble_integer
155 #undef TARGET_ASM_FILE_START
156 #define TARGET_ASM_FILE_START avr_file_start
157 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
158 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
159 #undef TARGET_ASM_FILE_END
160 #define TARGET_ASM_FILE_END avr_file_end
162 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
163 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
164 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
165 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
166 #undef TARGET_FUNCTION_VALUE
167 #define TARGET_FUNCTION_VALUE avr_function_value
168 #undef TARGET_ATTRIBUTE_TABLE
169 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
170 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
171 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
172 #undef TARGET_INSERT_ATTRIBUTES
173 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
174 #undef TARGET_SECTION_TYPE_FLAGS
175 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
176 #undef TARGET_RTX_COSTS
177 #define TARGET_RTX_COSTS avr_rtx_costs
178 #undef TARGET_ADDRESS_COST
179 #define TARGET_ADDRESS_COST avr_address_cost
180 #undef TARGET_MACHINE_DEPENDENT_REORG
181 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
182 #undef TARGET_FUNCTION_ARG
183 #define TARGET_FUNCTION_ARG avr_function_arg
184 #undef TARGET_FUNCTION_ARG_ADVANCE
185 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
187 #undef TARGET_LEGITIMIZE_ADDRESS
188 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
190 #undef TARGET_RETURN_IN_MEMORY
191 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
193 #undef TARGET_STRICT_ARGUMENT_NAMING
194 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
196 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
197 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
199 #undef TARGET_HARD_REGNO_SCRATCH_OK
200 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
201 #undef TARGET_CASE_VALUES_THRESHOLD
202 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
204 #undef TARGET_LEGITIMATE_ADDRESS_P
205 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
207 #undef TARGET_FRAME_POINTER_REQUIRED
208 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
209 #undef TARGET_CAN_ELIMINATE
210 #define TARGET_CAN_ELIMINATE avr_can_eliminate
212 #undef TARGET_CLASS_LIKELY_SPILLED_P
213 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
215 #undef TARGET_OPTION_OVERRIDE
216 #define TARGET_OPTION_OVERRIDE avr_option_override
218 #undef TARGET_OPTION_OPTIMIZATION_TABLE
219 #define TARGET_OPTION_OPTIMIZATION_TABLE avr_option_optimization_table
221 struct gcc_target targetm = TARGET_INITIALIZER;
223 static void
224 avr_option_override (void)
226 const struct mcu_type_s *t;
228 flag_delete_null_pointer_checks = 0;
230 for (t = avr_mcu_types; t->name; t++)
231 if (strcmp (t->name, avr_mcu_name) == 0)
232 break;
234 if (!t->name)
236 fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
237 avr_mcu_name);
238 for (t = avr_mcu_types; t->name; t++)
239 fprintf (stderr," %s\n", t->name);
242 avr_current_device = t;
243 avr_current_arch = &avr_arch_types[avr_current_device->arch];
244 avr_extra_arch_macro = avr_current_device->macro;
246 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
247 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
249 init_machine_status = avr_init_machine_status;
252 /* return register class from register number. */
254 static const enum reg_class reg_class_tab[]={
255 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
256 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
257 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
258 GENERAL_REGS, /* r0 - r15 */
259 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
260 LD_REGS, /* r16 - 23 */
261 ADDW_REGS,ADDW_REGS, /* r24,r25 */
262 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
263 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
264 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
265 STACK_REG,STACK_REG /* SPL,SPH */
268 /* Function to set up the backend function structure. */
270 static struct machine_function *
271 avr_init_machine_status (void)
273 return ggc_alloc_cleared_machine_function ();
276 /* Return register class for register R. */
278 enum reg_class
279 avr_regno_reg_class (int r)
281 if (r <= 33)
282 return reg_class_tab[r];
283 return ALL_REGS;
286 /* Return nonzero if FUNC is a naked function. */
288 static int
289 avr_naked_function_p (tree func)
291 tree a;
293 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
295 a = lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func)));
296 return a != NULL_TREE;
299 /* Return nonzero if FUNC is an interrupt function as specified
300 by the "interrupt" attribute. */
302 static int
303 interrupt_function_p (tree func)
305 tree a;
307 if (TREE_CODE (func) != FUNCTION_DECL)
308 return 0;
310 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
311 return a != NULL_TREE;
314 /* Return nonzero if FUNC is a signal function as specified
315 by the "signal" attribute. */
317 static int
318 signal_function_p (tree func)
320 tree a;
322 if (TREE_CODE (func) != FUNCTION_DECL)
323 return 0;
325 a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
326 return a != NULL_TREE;
329 /* Return nonzero if FUNC is a OS_task function. */
331 static int
332 avr_OS_task_function_p (tree func)
334 tree a;
336 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
338 a = lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func)));
339 return a != NULL_TREE;
342 /* Return nonzero if FUNC is a OS_main function. */
344 static int
345 avr_OS_main_function_p (tree func)
347 tree a;
349 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
351 a = lookup_attribute ("OS_main", TYPE_ATTRIBUTES (TREE_TYPE (func)));
352 return a != NULL_TREE;
355 /* Return the number of hard registers to push/pop in the prologue/epilogue
356 of the current function, and optionally store these registers in SET. */
358 static int
359 avr_regs_to_save (HARD_REG_SET *set)
361 int reg, count;
362 int int_or_sig_p = (interrupt_function_p (current_function_decl)
363 || signal_function_p (current_function_decl));
365 if (set)
366 CLEAR_HARD_REG_SET (*set);
367 count = 0;
369 /* No need to save any registers if the function never returns or
370 is have "OS_task" or "OS_main" attribute. */
371 if (TREE_THIS_VOLATILE (current_function_decl)
372 || cfun->machine->is_OS_task
373 || cfun->machine->is_OS_main)
374 return 0;
376 for (reg = 0; reg < 32; reg++)
378 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
379 any global register variables. */
380 if (fixed_regs[reg])
381 continue;
383 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
384 || (df_regs_ever_live_p (reg)
385 && (int_or_sig_p || !call_used_regs[reg])
386 && !(frame_pointer_needed
387 && (reg == REG_Y || reg == (REG_Y+1)))))
389 if (set)
390 SET_HARD_REG_BIT (*set, reg);
391 count++;
394 return count;
397 /* Return true if register FROM can be eliminated via register TO. */
399 bool
400 avr_can_eliminate (const int from, const int to)
402 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
403 || ((from == FRAME_POINTER_REGNUM
404 || from == FRAME_POINTER_REGNUM + 1)
405 && !frame_pointer_needed));
408 /* Compute offset between arg_pointer and frame_pointer. */
411 avr_initial_elimination_offset (int from, int to)
413 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
414 return 0;
415 else
417 int offset = frame_pointer_needed ? 2 : 0;
418 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
420 offset += avr_regs_to_save (NULL);
421 return get_frame_size () + (avr_pc_size) + 1 + offset;
425 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
426 frame pointer by +STARTING_FRAME_OFFSET.
427 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
428 avoids creating add/sub of offset in nonlocal goto and setjmp. */
430 rtx avr_builtin_setjmp_frame_value (void)
432 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
433 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
436 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
437 This is return address of function. */
438 rtx
439 avr_return_addr_rtx (int count, const_rtx tem)
441 rtx r;
443 /* Can only return this functions return address. Others not supported. */
444 if (count)
445 return NULL;
447 if (AVR_3_BYTE_PC)
449 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
450 warning (0, "'builtin_return_address' contains only 2 bytes of address");
452 else
453 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
455 r = gen_rtx_PLUS (Pmode, tem, r);
456 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
457 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
458 return r;
461 /* Return 1 if the function epilogue is just a single "ret". */
464 avr_simple_epilogue (void)
466 return (! frame_pointer_needed
467 && get_frame_size () == 0
468 && avr_regs_to_save (NULL) == 0
469 && ! interrupt_function_p (current_function_decl)
470 && ! signal_function_p (current_function_decl)
471 && ! avr_naked_function_p (current_function_decl)
472 && ! TREE_THIS_VOLATILE (current_function_decl));
475 /* This function checks sequence of live registers. */
477 static int
478 sequent_regs_live (void)
480 int reg;
481 int live_seq=0;
482 int cur_seq=0;
484 for (reg = 0; reg < 18; ++reg)
486 if (!call_used_regs[reg])
488 if (df_regs_ever_live_p (reg))
490 ++live_seq;
491 ++cur_seq;
493 else
494 cur_seq = 0;
498 if (!frame_pointer_needed)
500 if (df_regs_ever_live_p (REG_Y))
502 ++live_seq;
503 ++cur_seq;
505 else
506 cur_seq = 0;
508 if (df_regs_ever_live_p (REG_Y+1))
510 ++live_seq;
511 ++cur_seq;
513 else
514 cur_seq = 0;
516 else
518 cur_seq += 2;
519 live_seq += 2;
521 return (cur_seq == live_seq) ? live_seq : 0;
524 /* Obtain the length sequence of insns. */
527 get_sequence_length (rtx insns)
529 rtx insn;
530 int length;
532 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
533 length += get_attr_length (insn);
535 return length;
538 /* Output function prologue. */
540 void
541 expand_prologue (void)
543 int live_seq;
544 HARD_REG_SET set;
545 int minimize;
546 HOST_WIDE_INT size = get_frame_size();
547 /* Define templates for push instructions. */
548 rtx pushbyte = gen_rtx_MEM (QImode,
549 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
550 rtx pushword = gen_rtx_MEM (HImode,
551 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
552 rtx insn;
554 /* Init cfun->machine. */
555 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
556 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
557 cfun->machine->is_signal = signal_function_p (current_function_decl);
558 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
559 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
560 cfun->machine->stack_usage = 0;
562 /* Prologue: naked. */
563 if (cfun->machine->is_naked)
565 return;
568 avr_regs_to_save (&set);
569 live_seq = sequent_regs_live ();
570 minimize = (TARGET_CALL_PROLOGUES
571 && !cfun->machine->is_interrupt
572 && !cfun->machine->is_signal
573 && !cfun->machine->is_OS_task
574 && !cfun->machine->is_OS_main
575 && live_seq);
577 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
579 if (cfun->machine->is_interrupt)
581 /* Enable interrupts. */
582 insn = emit_insn (gen_enable_interrupt ());
583 RTX_FRAME_RELATED_P (insn) = 1;
586 /* Push zero reg. */
587 insn = emit_move_insn (pushbyte, zero_reg_rtx);
588 RTX_FRAME_RELATED_P (insn) = 1;
589 cfun->machine->stack_usage++;
591 /* Push tmp reg. */
592 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
593 RTX_FRAME_RELATED_P (insn) = 1;
594 cfun->machine->stack_usage++;
596 /* Push SREG. */
597 insn = emit_move_insn (tmp_reg_rtx,
598 gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
599 RTX_FRAME_RELATED_P (insn) = 1;
600 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
601 RTX_FRAME_RELATED_P (insn) = 1;
602 cfun->machine->stack_usage++;
604 /* Push RAMPZ. */
605 if(AVR_HAVE_RAMPZ
606 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
608 insn = emit_move_insn (tmp_reg_rtx,
609 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
610 RTX_FRAME_RELATED_P (insn) = 1;
611 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
612 RTX_FRAME_RELATED_P (insn) = 1;
613 cfun->machine->stack_usage++;
616 /* Clear zero reg. */
617 insn = emit_move_insn (zero_reg_rtx, const0_rtx);
618 RTX_FRAME_RELATED_P (insn) = 1;
620 /* Prevent any attempt to delete the setting of ZERO_REG! */
621 emit_use (zero_reg_rtx);
623 if (minimize && (frame_pointer_needed
624 || (AVR_2_BYTE_PC && live_seq > 6)
625 || live_seq > 7))
627 insn = emit_move_insn (gen_rtx_REG (HImode, REG_X),
628 gen_int_mode (size, HImode));
629 RTX_FRAME_RELATED_P (insn) = 1;
631 insn =
632 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
633 gen_int_mode (size + live_seq, HImode)));
634 RTX_FRAME_RELATED_P (insn) = 1;
635 cfun->machine->stack_usage += size + live_seq;
637 else
639 int reg;
640 for (reg = 0; reg < 32; ++reg)
642 if (TEST_HARD_REG_BIT (set, reg))
644 /* Emit push of register to save. */
645 insn=emit_move_insn (pushbyte, gen_rtx_REG (QImode, reg));
646 RTX_FRAME_RELATED_P (insn) = 1;
647 cfun->machine->stack_usage++;
650 if (frame_pointer_needed)
652 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
654 /* Push frame pointer. */
655 insn = emit_move_insn (pushword, frame_pointer_rtx);
656 RTX_FRAME_RELATED_P (insn) = 1;
657 cfun->machine->stack_usage += 2;
660 if (!size)
662 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
663 RTX_FRAME_RELATED_P (insn) = 1;
665 else
667 /* Creating a frame can be done by direct manipulation of the
668 stack or via the frame pointer. These two methods are:
669 fp=sp
670 fp-=size
671 sp=fp
673 sp-=size
674 fp=sp
675 the optimum method depends on function type, stack and frame size.
676 To avoid a complex logic, both methods are tested and shortest
677 is selected. */
678 rtx myfp;
679 rtx fp_plus_insns;
680 rtx sp_plus_insns = NULL_RTX;
682 if (AVR_HAVE_8BIT_SP)
684 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
685 over 'sbiw' (2 cycles, same size). */
686 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
688 else
690 /* Normal sized addition. */
691 myfp = frame_pointer_rtx;
694 /* Method 1-Adjust frame pointer. */
695 start_sequence ();
697 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
698 RTX_FRAME_RELATED_P (insn) = 1;
700 insn =
701 emit_move_insn (myfp,
702 gen_rtx_PLUS (GET_MODE(myfp), myfp,
703 gen_int_mode (-size,
704 GET_MODE(myfp))));
705 RTX_FRAME_RELATED_P (insn) = 1;
707 /* Copy to stack pointer. */
708 if (AVR_HAVE_8BIT_SP)
710 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
711 RTX_FRAME_RELATED_P (insn) = 1;
713 else if (TARGET_NO_INTERRUPTS
714 || cfun->machine->is_signal
715 || cfun->machine->is_OS_main)
717 insn =
718 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
719 frame_pointer_rtx));
720 RTX_FRAME_RELATED_P (insn) = 1;
722 else if (cfun->machine->is_interrupt)
724 insn = emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
725 frame_pointer_rtx));
726 RTX_FRAME_RELATED_P (insn) = 1;
728 else
730 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
731 RTX_FRAME_RELATED_P (insn) = 1;
734 fp_plus_insns = get_insns ();
735 end_sequence ();
737 /* Method 2-Adjust Stack pointer. */
738 if (size <= 6)
740 start_sequence ();
742 insn =
743 emit_move_insn (stack_pointer_rtx,
744 gen_rtx_PLUS (HImode,
745 stack_pointer_rtx,
746 gen_int_mode (-size,
747 HImode)));
748 RTX_FRAME_RELATED_P (insn) = 1;
750 insn =
751 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
752 RTX_FRAME_RELATED_P (insn) = 1;
754 sp_plus_insns = get_insns ();
755 end_sequence ();
758 /* Use shortest method. */
759 if (size <= 6 && (get_sequence_length (sp_plus_insns)
760 < get_sequence_length (fp_plus_insns)))
761 emit_insn (sp_plus_insns);
762 else
763 emit_insn (fp_plus_insns);
764 cfun->machine->stack_usage += size;
769 if (flag_stack_usage)
770 current_function_static_stack_size = cfun->machine->stack_usage;
773 /* Output summary at end of function prologue. */
775 static void
776 avr_asm_function_end_prologue (FILE *file)
778 if (cfun->machine->is_naked)
780 fputs ("/* prologue: naked */\n", file);
782 else
784 if (cfun->machine->is_interrupt)
786 fputs ("/* prologue: Interrupt */\n", file);
788 else if (cfun->machine->is_signal)
790 fputs ("/* prologue: Signal */\n", file);
792 else
793 fputs ("/* prologue: function */\n", file);
795 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
796 get_frame_size());
797 fprintf (file, "/* stack size = %d */\n",
798 cfun->machine->stack_usage);
799 /* Create symbol stack offset here so all functions have it. Add 1 to stack
800 usage for offset so that SP + .L__stack_offset = return address. */
801 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
805 /* Implement EPILOGUE_USES. */
808 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
810 if (reload_completed
811 && cfun->machine
812 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
813 return 1;
814 return 0;
817 /* Output RTL epilogue. */
819 void
820 expand_epilogue (void)
822 int reg;
823 int live_seq;
824 HARD_REG_SET set;
825 int minimize;
826 HOST_WIDE_INT size = get_frame_size();
828 /* epilogue: naked */
829 if (cfun->machine->is_naked)
831 emit_jump_insn (gen_return ());
832 return;
835 avr_regs_to_save (&set);
836 live_seq = sequent_regs_live ();
837 minimize = (TARGET_CALL_PROLOGUES
838 && !cfun->machine->is_interrupt
839 && !cfun->machine->is_signal
840 && !cfun->machine->is_OS_task
841 && !cfun->machine->is_OS_main
842 && live_seq);
844 if (minimize && (frame_pointer_needed || live_seq > 4))
846 if (frame_pointer_needed)
848 /* Get rid of frame. */
849 emit_move_insn(frame_pointer_rtx,
850 gen_rtx_PLUS (HImode, frame_pointer_rtx,
851 gen_int_mode (size, HImode)));
853 else
855 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
858 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
860 else
862 if (frame_pointer_needed)
864 if (size)
866 /* Try two methods to adjust stack and select shortest. */
867 rtx myfp;
868 rtx fp_plus_insns;
869 rtx sp_plus_insns = NULL_RTX;
871 if (AVR_HAVE_8BIT_SP)
873 /* The high byte (r29) doesn't change - prefer 'subi'
874 (1 cycle) over 'sbiw' (2 cycles, same size). */
875 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
877 else
879 /* Normal sized addition. */
880 myfp = frame_pointer_rtx;
883 /* Method 1-Adjust frame pointer. */
884 start_sequence ();
886 emit_move_insn (myfp,
887 gen_rtx_PLUS (GET_MODE (myfp), myfp,
888 gen_int_mode (size,
889 GET_MODE(myfp))));
891 /* Copy to stack pointer. */
892 if (AVR_HAVE_8BIT_SP)
894 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
896 else if (TARGET_NO_INTERRUPTS
897 || cfun->machine->is_signal)
899 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
900 frame_pointer_rtx));
902 else if (cfun->machine->is_interrupt)
904 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
905 frame_pointer_rtx));
907 else
909 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
912 fp_plus_insns = get_insns ();
913 end_sequence ();
915 /* Method 2-Adjust Stack pointer. */
916 if (size <= 5)
918 start_sequence ();
920 emit_move_insn (stack_pointer_rtx,
921 gen_rtx_PLUS (HImode, stack_pointer_rtx,
922 gen_int_mode (size,
923 HImode)));
925 sp_plus_insns = get_insns ();
926 end_sequence ();
929 /* Use shortest method. */
930 if (size <= 5 && (get_sequence_length (sp_plus_insns)
931 < get_sequence_length (fp_plus_insns)))
932 emit_insn (sp_plus_insns);
933 else
934 emit_insn (fp_plus_insns);
936 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
938 /* Restore previous frame_pointer. */
939 emit_insn (gen_pophi (frame_pointer_rtx));
942 /* Restore used registers. */
943 for (reg = 31; reg >= 0; --reg)
945 if (TEST_HARD_REG_BIT (set, reg))
946 emit_insn (gen_popqi (gen_rtx_REG (QImode, reg)));
948 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
950 /* Restore RAMPZ using tmp reg as scratch. */
951 if(AVR_HAVE_RAMPZ
952 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
954 emit_insn (gen_popqi (tmp_reg_rtx));
955 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(RAMPZ_ADDR)),
956 tmp_reg_rtx);
959 /* Restore SREG using tmp reg as scratch. */
960 emit_insn (gen_popqi (tmp_reg_rtx));
962 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(SREG_ADDR)),
963 tmp_reg_rtx);
965 /* Restore tmp REG. */
966 emit_insn (gen_popqi (tmp_reg_rtx));
968 /* Restore zero REG. */
969 emit_insn (gen_popqi (zero_reg_rtx));
972 emit_jump_insn (gen_return ());
976 /* Output summary messages at beginning of function epilogue. */
978 static void
979 avr_asm_function_begin_epilogue (FILE *file)
981 fprintf (file, "/* epilogue start */\n");
984 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
985 machine for a memory operand of mode MODE. */
987 bool
988 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
990 enum reg_class r = NO_REGS;
992 if (TARGET_ALL_DEBUG)
994 fprintf (stderr, "mode: (%s) %s %s %s %s:",
995 GET_MODE_NAME(mode),
996 strict ? "(strict)": "",
997 reload_completed ? "(reload_completed)": "",
998 reload_in_progress ? "(reload_in_progress)": "",
999 reg_renumber ? "(reg_renumber)" : "");
1000 if (GET_CODE (x) == PLUS
1001 && REG_P (XEXP (x, 0))
1002 && GET_CODE (XEXP (x, 1)) == CONST_INT
1003 && INTVAL (XEXP (x, 1)) >= 0
1004 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1005 && reg_renumber
1007 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1008 true_regnum (XEXP (x, 0)));
1009 debug_rtx (x);
1011 if (!strict && GET_CODE (x) == SUBREG)
1012 x = SUBREG_REG (x);
1013 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1014 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1015 r = POINTER_REGS;
1016 else if (CONSTANT_ADDRESS_P (x))
1017 r = ALL_REGS;
1018 else if (GET_CODE (x) == PLUS
1019 && REG_P (XEXP (x, 0))
1020 && GET_CODE (XEXP (x, 1)) == CONST_INT
1021 && INTVAL (XEXP (x, 1)) >= 0)
1023 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1024 if (fit)
1026 if (! strict
1027 || REGNO (XEXP (x,0)) == REG_X
1028 || REGNO (XEXP (x,0)) == REG_Y
1029 || REGNO (XEXP (x,0)) == REG_Z)
1030 r = BASE_POINTER_REGS;
1031 if (XEXP (x,0) == frame_pointer_rtx
1032 || XEXP (x,0) == arg_pointer_rtx)
1033 r = BASE_POINTER_REGS;
1035 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1036 r = POINTER_Y_REGS;
1038 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1039 && REG_P (XEXP (x, 0))
1040 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1041 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1043 r = POINTER_REGS;
1045 if (TARGET_ALL_DEBUG)
1047 fprintf (stderr, " ret = %c\n", r + '0');
1049 return r == NO_REGS ? 0 : (int)r;
1052 /* Attempts to replace X with a valid
1053 memory address for an operand of mode MODE */
1056 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1058 x = oldx;
1059 if (TARGET_ALL_DEBUG)
1061 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1062 debug_rtx (oldx);
1065 if (GET_CODE (oldx) == PLUS
1066 && REG_P (XEXP (oldx,0)))
1068 if (REG_P (XEXP (oldx,1)))
1069 x = force_reg (GET_MODE (oldx), oldx);
1070 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1072 int offs = INTVAL (XEXP (oldx,1));
1073 if (frame_pointer_rtx != XEXP (oldx,0))
1074 if (offs > MAX_LD_OFFSET (mode))
1076 if (TARGET_ALL_DEBUG)
1077 fprintf (stderr, "force_reg (big offset)\n");
1078 x = force_reg (GET_MODE (oldx), oldx);
1082 return x;
1086 /* Return a pointer register name as a string. */
1088 static const char *
1089 ptrreg_to_str (int regno)
1091 switch (regno)
1093 case REG_X: return "X";
1094 case REG_Y: return "Y";
1095 case REG_Z: return "Z";
1096 default:
1097 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1099 return NULL;
1102 /* Return the condition name as a string.
1103 Used in conditional jump constructing */
1105 static const char *
1106 cond_string (enum rtx_code code)
1108 switch (code)
1110 case NE:
1111 return "ne";
1112 case EQ:
1113 return "eq";
1114 case GE:
1115 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1116 return "pl";
1117 else
1118 return "ge";
1119 case LT:
1120 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1121 return "mi";
1122 else
1123 return "lt";
1124 case GEU:
1125 return "sh";
1126 case LTU:
1127 return "lo";
1128 default:
1129 gcc_unreachable ();
1133 /* Output ADDR to FILE as address. */
1135 void
1136 print_operand_address (FILE *file, rtx addr)
1138 switch (GET_CODE (addr))
1140 case REG:
1141 fprintf (file, ptrreg_to_str (REGNO (addr)));
1142 break;
1144 case PRE_DEC:
1145 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1146 break;
1148 case POST_INC:
1149 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1150 break;
1152 default:
1153 if (CONSTANT_ADDRESS_P (addr)
1154 && text_segment_operand (addr, VOIDmode))
1156 rtx x = XEXP (addr,0);
1157 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1159 /* Assembler gs() will implant word address. Make offset
1160 a byte offset inside gs() for assembler. This is
1161 needed because the more logical (constant+gs(sym)) is not
1162 accepted by gas. For 128K and lower devices this is ok. For
1163 large devices it will create a Trampoline to offset from symbol
1164 which may not be what the user really wanted. */
1165 fprintf (file, "gs(");
1166 output_addr_const (file, XEXP (x,0));
1167 fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1168 if (AVR_3_BYTE_PC)
1169 if (warning ( 0, "Pointer offset from symbol maybe incorrect."))
1171 output_addr_const (stderr, addr);
1172 fprintf(stderr,"\n");
1175 else
1177 fprintf (file, "gs(");
1178 output_addr_const (file, addr);
1179 fprintf (file, ")");
1182 else
1183 output_addr_const (file, addr);
1188 /* Output X as assembler operand to file FILE. */
1190 void
1191 print_operand (FILE *file, rtx x, int code)
1193 int abcd = 0;
1195 if (code >= 'A' && code <= 'D')
1196 abcd = code - 'A';
1198 if (code == '~')
1200 if (!AVR_HAVE_JMP_CALL)
1201 fputc ('r', file);
1203 else if (code == '!')
1205 if (AVR_HAVE_EIJMP_EICALL)
1206 fputc ('e', file);
1208 else if (REG_P (x))
1210 if (x == zero_reg_rtx)
1211 fprintf (file, "__zero_reg__");
1212 else
1213 fprintf (file, reg_names[true_regnum (x) + abcd]);
1215 else if (GET_CODE (x) == CONST_INT)
1216 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1217 else if (GET_CODE (x) == MEM)
1219 rtx addr = XEXP (x,0);
1220 if (code == 'm')
1222 if (!CONSTANT_P (addr))
1223 fatal_insn ("bad address, not a constant):", addr);
1224 /* Assembler template with m-code is data - not progmem section */
1225 if (text_segment_operand (addr, VOIDmode))
1226 if (warning ( 0, "accessing data memory with program memory address"))
1228 output_addr_const (stderr, addr);
1229 fprintf(stderr,"\n");
1231 output_addr_const (file, addr);
1233 else if (code == 'o')
1235 if (GET_CODE (addr) != PLUS)
1236 fatal_insn ("bad address, not (reg+disp):", addr);
1238 print_operand (file, XEXP (addr, 1), 0);
1240 else if (code == 'p' || code == 'r')
1242 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1243 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1245 if (code == 'p')
1246 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1247 else
1248 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1250 else if (GET_CODE (addr) == PLUS)
1252 print_operand_address (file, XEXP (addr,0));
1253 if (REGNO (XEXP (addr, 0)) == REG_X)
1254 fatal_insn ("internal compiler error. Bad address:"
1255 ,addr);
1256 fputc ('+', file);
1257 print_operand (file, XEXP (addr,1), code);
1259 else
1260 print_operand_address (file, addr);
1262 else if (code == 'x')
1264 /* Constant progmem address - like used in jmp or call */
1265 if (0 == text_segment_operand (x, VOIDmode))
1266 if (warning ( 0, "accessing program memory with data memory address"))
1268 output_addr_const (stderr, x);
1269 fprintf(stderr,"\n");
1271 /* Use normal symbol for direct address no linker trampoline needed */
1272 output_addr_const (file, x);
1274 else if (GET_CODE (x) == CONST_DOUBLE)
1276 long val;
1277 REAL_VALUE_TYPE rv;
1278 if (GET_MODE (x) != SFmode)
1279 fatal_insn ("internal compiler error. Unknown mode:", x);
1280 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1281 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1282 fprintf (file, "0x%lx", val);
1284 else if (code == 'j')
1285 fputs (cond_string (GET_CODE (x)), file);
1286 else if (code == 'k')
1287 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1288 else
1289 print_operand_address (file, x);
1292 /* Update the condition code in the INSN. */
1294 void
1295 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1297 rtx set;
1299 switch (get_attr_cc (insn))
1301 case CC_NONE:
1302 /* Insn does not affect CC at all. */
1303 break;
1305 case CC_SET_N:
1306 CC_STATUS_INIT;
1307 break;
1309 case CC_SET_ZN:
1310 set = single_set (insn);
1311 CC_STATUS_INIT;
1312 if (set)
1314 cc_status.flags |= CC_NO_OVERFLOW;
1315 cc_status.value1 = SET_DEST (set);
1317 break;
1319 case CC_SET_CZN:
1320 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1321 The V flag may or may not be known but that's ok because
1322 alter_cond will change tests to use EQ/NE. */
1323 set = single_set (insn);
1324 CC_STATUS_INIT;
1325 if (set)
1327 cc_status.value1 = SET_DEST (set);
1328 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1330 break;
1332 case CC_COMPARE:
1333 set = single_set (insn);
1334 CC_STATUS_INIT;
1335 if (set)
1336 cc_status.value1 = SET_SRC (set);
1337 break;
1339 case CC_CLOBBER:
1340 /* Insn doesn't leave CC in a usable state. */
1341 CC_STATUS_INIT;
1343 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1344 set = single_set (insn);
1345 if (set)
1347 rtx src = SET_SRC (set);
1349 if (GET_CODE (src) == ASHIFTRT
1350 && GET_MODE (src) == QImode)
1352 rtx x = XEXP (src, 1);
1354 if (GET_CODE (x) == CONST_INT
1355 && INTVAL (x) > 0
1356 && INTVAL (x) != 6)
1358 cc_status.value1 = SET_DEST (set);
1359 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1363 break;
1367 /* Return maximum number of consecutive registers of
1368 class CLASS needed to hold a value of mode MODE. */
1371 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED,enum machine_mode mode)
1373 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1376 /* Choose mode for jump insn:
1377 1 - relative jump in range -63 <= x <= 62 ;
1378 2 - relative jump in range -2046 <= x <= 2045 ;
1379 3 - absolute jump (only for ATmega[16]03). */
1382 avr_jump_mode (rtx x, rtx insn)
1384 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1385 ? XEXP (x, 0) : x));
1386 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1387 int jump_distance = cur_addr - dest_addr;
1389 if (-63 <= jump_distance && jump_distance <= 62)
1390 return 1;
1391 else if (-2046 <= jump_distance && jump_distance <= 2045)
1392 return 2;
1393 else if (AVR_HAVE_JMP_CALL)
1394 return 3;
1396 return 2;
1399 /* return an AVR condition jump commands.
1400 X is a comparison RTX.
1401 LEN is a number returned by avr_jump_mode function.
1402 if REVERSE nonzero then condition code in X must be reversed. */
1404 const char *
1405 ret_cond_branch (rtx x, int len, int reverse)
1407 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1409 switch (cond)
1411 case GT:
1412 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1413 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1414 AS1 (brpl,%0)) :
1415 len == 2 ? (AS1 (breq,.+4) CR_TAB
1416 AS1 (brmi,.+2) CR_TAB
1417 AS1 (rjmp,%0)) :
1418 (AS1 (breq,.+6) CR_TAB
1419 AS1 (brmi,.+4) CR_TAB
1420 AS1 (jmp,%0)));
1422 else
1423 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1424 AS1 (brge,%0)) :
1425 len == 2 ? (AS1 (breq,.+4) CR_TAB
1426 AS1 (brlt,.+2) CR_TAB
1427 AS1 (rjmp,%0)) :
1428 (AS1 (breq,.+6) CR_TAB
1429 AS1 (brlt,.+4) CR_TAB
1430 AS1 (jmp,%0)));
1431 case GTU:
1432 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1433 AS1 (brsh,%0)) :
1434 len == 2 ? (AS1 (breq,.+4) CR_TAB
1435 AS1 (brlo,.+2) CR_TAB
1436 AS1 (rjmp,%0)) :
1437 (AS1 (breq,.+6) CR_TAB
1438 AS1 (brlo,.+4) CR_TAB
1439 AS1 (jmp,%0)));
1440 case LE:
1441 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1442 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1443 AS1 (brmi,%0)) :
1444 len == 2 ? (AS1 (breq,.+2) CR_TAB
1445 AS1 (brpl,.+2) CR_TAB
1446 AS1 (rjmp,%0)) :
1447 (AS1 (breq,.+2) CR_TAB
1448 AS1 (brpl,.+4) CR_TAB
1449 AS1 (jmp,%0)));
1450 else
1451 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1452 AS1 (brlt,%0)) :
1453 len == 2 ? (AS1 (breq,.+2) CR_TAB
1454 AS1 (brge,.+2) CR_TAB
1455 AS1 (rjmp,%0)) :
1456 (AS1 (breq,.+2) CR_TAB
1457 AS1 (brge,.+4) CR_TAB
1458 AS1 (jmp,%0)));
1459 case LEU:
1460 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1461 AS1 (brlo,%0)) :
1462 len == 2 ? (AS1 (breq,.+2) CR_TAB
1463 AS1 (brsh,.+2) CR_TAB
1464 AS1 (rjmp,%0)) :
1465 (AS1 (breq,.+2) CR_TAB
1466 AS1 (brsh,.+4) CR_TAB
1467 AS1 (jmp,%0)));
1468 default:
1469 if (reverse)
1471 switch (len)
1473 case 1:
1474 return AS1 (br%k1,%0);
1475 case 2:
1476 return (AS1 (br%j1,.+2) CR_TAB
1477 AS1 (rjmp,%0));
1478 default:
1479 return (AS1 (br%j1,.+4) CR_TAB
1480 AS1 (jmp,%0));
1483 else
1485 switch (len)
1487 case 1:
1488 return AS1 (br%j1,%0);
1489 case 2:
1490 return (AS1 (br%k1,.+2) CR_TAB
1491 AS1 (rjmp,%0));
1492 default:
1493 return (AS1 (br%k1,.+4) CR_TAB
1494 AS1 (jmp,%0));
1498 return "";
1501 /* Predicate function for immediate operand which fits to byte (8bit) */
1504 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1506 return (GET_CODE (op) == CONST_INT
1507 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1510 /* Output insn cost for next insn. */
1512 void
1513 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1514 int num_operands ATTRIBUTE_UNUSED)
1516 if (TARGET_ALL_DEBUG)
1518 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
1519 rtx_cost (PATTERN (insn), INSN, !optimize_size));
1523 /* Return 0 if undefined, 1 if always true or always false. */
1526 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1528 unsigned int max = (mode == QImode ? 0xff :
1529 mode == HImode ? 0xffff :
1530 mode == SImode ? 0xffffffff : 0);
1531 if (max && op && GET_CODE (x) == CONST_INT)
1533 if (unsigned_condition (op) != op)
1534 max >>= 1;
1536 if (max != (INTVAL (x) & max)
1537 && INTVAL (x) != 0xff)
1538 return 1;
1540 return 0;
1544 /* Returns nonzero if REGNO is the number of a hard
1545 register in which function arguments are sometimes passed. */
1548 function_arg_regno_p(int r)
1550 return (r >= 8 && r <= 25);
1553 /* Initializing the variable cum for the state at the beginning
1554 of the argument list. */
1556 void
1557 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1558 tree fndecl ATTRIBUTE_UNUSED)
1560 cum->nregs = 18;
1561 cum->regno = FIRST_CUM_REG;
1562 if (!libname && stdarg_p (fntype))
1563 cum->nregs = 0;
1566 /* Returns the number of registers to allocate for a function argument. */
1568 static int
1569 avr_num_arg_regs (enum machine_mode mode, tree type)
1571 int size;
1573 if (mode == BLKmode)
1574 size = int_size_in_bytes (type);
1575 else
1576 size = GET_MODE_SIZE (mode);
1578 /* Align all function arguments to start in even-numbered registers.
1579 Odd-sized arguments leave holes above them. */
1581 return (size + 1) & ~1;
1584 /* Controls whether a function argument is passed
1585 in a register, and which register. */
1587 static rtx
1588 avr_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1589 const_tree type, bool named ATTRIBUTE_UNUSED)
1591 int bytes = avr_num_arg_regs (mode, type);
1593 if (cum->nregs && bytes <= cum->nregs)
1594 return gen_rtx_REG (mode, cum->regno - bytes);
1596 return NULL_RTX;
1599 /* Update the summarizer variable CUM to advance past an argument
1600 in the argument list. */
1602 static void
1603 avr_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1604 const_tree type, bool named ATTRIBUTE_UNUSED)
1606 int bytes = avr_num_arg_regs (mode, type);
1608 cum->nregs -= bytes;
1609 cum->regno -= bytes;
1611 if (cum->nregs <= 0)
1613 cum->nregs = 0;
1614 cum->regno = FIRST_CUM_REG;
1618 /***********************************************************************
1619 Functions for outputting various mov's for a various modes
1620 ************************************************************************/
1621 const char *
1622 output_movqi (rtx insn, rtx operands[], int *l)
1624 int dummy;
1625 rtx dest = operands[0];
1626 rtx src = operands[1];
1627 int *real_l = l;
1629 if (!l)
1630 l = &dummy;
1632 *l = 1;
1634 if (register_operand (dest, QImode))
1636 if (register_operand (src, QImode)) /* mov r,r */
1638 if (test_hard_reg_class (STACK_REG, dest))
1639 return AS2 (out,%0,%1);
1640 else if (test_hard_reg_class (STACK_REG, src))
1641 return AS2 (in,%0,%1);
1643 return AS2 (mov,%0,%1);
1645 else if (CONSTANT_P (src))
1647 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1648 return AS2 (ldi,%0,lo8(%1));
1650 if (GET_CODE (src) == CONST_INT)
1652 if (src == const0_rtx) /* mov r,L */
1653 return AS1 (clr,%0);
1654 else if (src == const1_rtx)
1656 *l = 2;
1657 return (AS1 (clr,%0) CR_TAB
1658 AS1 (inc,%0));
1660 else if (src == constm1_rtx)
1662 /* Immediate constants -1 to any register */
1663 *l = 2;
1664 return (AS1 (clr,%0) CR_TAB
1665 AS1 (dec,%0));
1667 else
1669 int bit_nr = exact_log2 (INTVAL (src));
1671 if (bit_nr >= 0)
1673 *l = 3;
1674 if (!real_l)
1675 output_asm_insn ((AS1 (clr,%0) CR_TAB
1676 "set"), operands);
1677 if (!real_l)
1678 avr_output_bld (operands, bit_nr);
1680 return "";
1685 /* Last resort, larger than loading from memory. */
1686 *l = 4;
1687 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1688 AS2 (ldi,r31,lo8(%1)) CR_TAB
1689 AS2 (mov,%0,r31) CR_TAB
1690 AS2 (mov,r31,__tmp_reg__));
1692 else if (GET_CODE (src) == MEM)
1693 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1695 else if (GET_CODE (dest) == MEM)
1697 const char *templ;
1699 if (src == const0_rtx)
1700 operands[1] = zero_reg_rtx;
1702 templ = out_movqi_mr_r (insn, operands, real_l);
1704 if (!real_l)
1705 output_asm_insn (templ, operands);
1707 operands[1] = src;
1709 return "";
1713 const char *
1714 output_movhi (rtx insn, rtx operands[], int *l)
1716 int dummy;
1717 rtx dest = operands[0];
1718 rtx src = operands[1];
1719 int *real_l = l;
1721 if (!l)
1722 l = &dummy;
1724 if (register_operand (dest, HImode))
1726 if (register_operand (src, HImode)) /* mov r,r */
1728 if (test_hard_reg_class (STACK_REG, dest))
1730 if (AVR_HAVE_8BIT_SP)
1731 return *l = 1, AS2 (out,__SP_L__,%A1);
1732 /* Use simple load of stack pointer if no interrupts are
1733 used. */
1734 else if (TARGET_NO_INTERRUPTS)
1735 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1736 AS2 (out,__SP_L__,%A1));
1737 *l = 5;
1738 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1739 "cli" CR_TAB
1740 AS2 (out,__SP_H__,%B1) CR_TAB
1741 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1742 AS2 (out,__SP_L__,%A1));
1744 else if (test_hard_reg_class (STACK_REG, src))
1746 *l = 2;
1747 return (AS2 (in,%A0,__SP_L__) CR_TAB
1748 AS2 (in,%B0,__SP_H__));
1751 if (AVR_HAVE_MOVW)
1753 *l = 1;
1754 return (AS2 (movw,%0,%1));
1756 else
1758 *l = 2;
1759 return (AS2 (mov,%A0,%A1) CR_TAB
1760 AS2 (mov,%B0,%B1));
1763 else if (CONSTANT_P (src))
1765 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1767 *l = 2;
1768 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1769 AS2 (ldi,%B0,hi8(%1)));
1772 if (GET_CODE (src) == CONST_INT)
1774 if (src == const0_rtx) /* mov r,L */
1776 *l = 2;
1777 return (AS1 (clr,%A0) CR_TAB
1778 AS1 (clr,%B0));
1780 else if (src == const1_rtx)
1782 *l = 3;
1783 return (AS1 (clr,%A0) CR_TAB
1784 AS1 (clr,%B0) CR_TAB
1785 AS1 (inc,%A0));
1787 else if (src == constm1_rtx)
1789 /* Immediate constants -1 to any register */
1790 *l = 3;
1791 return (AS1 (clr,%0) CR_TAB
1792 AS1 (dec,%A0) CR_TAB
1793 AS2 (mov,%B0,%A0));
1795 else
1797 int bit_nr = exact_log2 (INTVAL (src));
1799 if (bit_nr >= 0)
1801 *l = 4;
1802 if (!real_l)
1803 output_asm_insn ((AS1 (clr,%A0) CR_TAB
1804 AS1 (clr,%B0) CR_TAB
1805 "set"), operands);
1806 if (!real_l)
1807 avr_output_bld (operands, bit_nr);
1809 return "";
1813 if ((INTVAL (src) & 0xff) == 0)
1815 *l = 5;
1816 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1817 AS1 (clr,%A0) CR_TAB
1818 AS2 (ldi,r31,hi8(%1)) CR_TAB
1819 AS2 (mov,%B0,r31) CR_TAB
1820 AS2 (mov,r31,__tmp_reg__));
1822 else if ((INTVAL (src) & 0xff00) == 0)
1824 *l = 5;
1825 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1826 AS2 (ldi,r31,lo8(%1)) CR_TAB
1827 AS2 (mov,%A0,r31) CR_TAB
1828 AS1 (clr,%B0) CR_TAB
1829 AS2 (mov,r31,__tmp_reg__));
1833 /* Last resort, equal to loading from memory. */
1834 *l = 6;
1835 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1836 AS2 (ldi,r31,lo8(%1)) CR_TAB
1837 AS2 (mov,%A0,r31) CR_TAB
1838 AS2 (ldi,r31,hi8(%1)) CR_TAB
1839 AS2 (mov,%B0,r31) CR_TAB
1840 AS2 (mov,r31,__tmp_reg__));
1842 else if (GET_CODE (src) == MEM)
1843 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1845 else if (GET_CODE (dest) == MEM)
1847 const char *templ;
1849 if (src == const0_rtx)
1850 operands[1] = zero_reg_rtx;
1852 templ = out_movhi_mr_r (insn, operands, real_l);
1854 if (!real_l)
1855 output_asm_insn (templ, operands);
1857 operands[1] = src;
1858 return "";
1860 fatal_insn ("invalid insn:", insn);
1861 return "";
1864 const char *
1865 out_movqi_r_mr (rtx insn, rtx op[], int *l)
1867 rtx dest = op[0];
1868 rtx src = op[1];
1869 rtx x = XEXP (src, 0);
1870 int dummy;
1872 if (!l)
1873 l = &dummy;
1875 if (CONSTANT_ADDRESS_P (x))
1877 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
1879 *l = 1;
1880 return AS2 (in,%0,__SREG__);
1882 if (optimize > 0 && io_address_operand (x, QImode))
1884 *l = 1;
1885 return AS2 (in,%0,%m1-0x20);
1887 *l = 2;
1888 return AS2 (lds,%0,%m1);
1890 /* memory access by reg+disp */
1891 else if (GET_CODE (x) == PLUS
1892 && REG_P (XEXP (x,0))
1893 && GET_CODE (XEXP (x,1)) == CONST_INT)
1895 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1897 int disp = INTVAL (XEXP (x,1));
1898 if (REGNO (XEXP (x,0)) != REG_Y)
1899 fatal_insn ("incorrect insn:",insn);
1901 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1902 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1903 AS2 (ldd,%0,Y+63) CR_TAB
1904 AS2 (sbiw,r28,%o1-63));
1906 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1907 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1908 AS2 (ld,%0,Y) CR_TAB
1909 AS2 (subi,r28,lo8(%o1)) CR_TAB
1910 AS2 (sbci,r29,hi8(%o1)));
1912 else if (REGNO (XEXP (x,0)) == REG_X)
1914 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1915 it but I have this situation with extremal optimizing options. */
1916 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
1917 || reg_unused_after (insn, XEXP (x,0)))
1918 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
1919 AS2 (ld,%0,X));
1921 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
1922 AS2 (ld,%0,X) CR_TAB
1923 AS2 (sbiw,r26,%o1));
1925 *l = 1;
1926 return AS2 (ldd,%0,%1);
1928 *l = 1;
1929 return AS2 (ld,%0,%1);
1932 const char *
1933 out_movhi_r_mr (rtx insn, rtx op[], int *l)
1935 rtx dest = op[0];
1936 rtx src = op[1];
1937 rtx base = XEXP (src, 0);
1938 int reg_dest = true_regnum (dest);
1939 int reg_base = true_regnum (base);
1940 /* "volatile" forces reading low byte first, even if less efficient,
1941 for correct operation with 16-bit I/O registers. */
1942 int mem_volatile_p = MEM_VOLATILE_P (src);
1943 int tmp;
1945 if (!l)
1946 l = &tmp;
1948 if (reg_base > 0)
1950 if (reg_dest == reg_base) /* R = (R) */
1952 *l = 3;
1953 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
1954 AS2 (ld,%B0,%1) CR_TAB
1955 AS2 (mov,%A0,__tmp_reg__));
1957 else if (reg_base == REG_X) /* (R26) */
1959 if (reg_unused_after (insn, base))
1961 *l = 2;
1962 return (AS2 (ld,%A0,X+) CR_TAB
1963 AS2 (ld,%B0,X));
1965 *l = 3;
1966 return (AS2 (ld,%A0,X+) CR_TAB
1967 AS2 (ld,%B0,X) CR_TAB
1968 AS2 (sbiw,r26,1));
1970 else /* (R) */
1972 *l = 2;
1973 return (AS2 (ld,%A0,%1) CR_TAB
1974 AS2 (ldd,%B0,%1+1));
1977 else if (GET_CODE (base) == PLUS) /* (R + i) */
1979 int disp = INTVAL (XEXP (base, 1));
1980 int reg_base = true_regnum (XEXP (base, 0));
1982 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
1984 if (REGNO (XEXP (base, 0)) != REG_Y)
1985 fatal_insn ("incorrect insn:",insn);
1987 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1988 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
1989 AS2 (ldd,%A0,Y+62) CR_TAB
1990 AS2 (ldd,%B0,Y+63) CR_TAB
1991 AS2 (sbiw,r28,%o1-62));
1993 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1994 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1995 AS2 (ld,%A0,Y) CR_TAB
1996 AS2 (ldd,%B0,Y+1) CR_TAB
1997 AS2 (subi,r28,lo8(%o1)) CR_TAB
1998 AS2 (sbci,r29,hi8(%o1)));
2000 if (reg_base == REG_X)
2002 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2003 it but I have this situation with extremal
2004 optimization options. */
2006 *l = 4;
2007 if (reg_base == reg_dest)
2008 return (AS2 (adiw,r26,%o1) CR_TAB
2009 AS2 (ld,__tmp_reg__,X+) CR_TAB
2010 AS2 (ld,%B0,X) CR_TAB
2011 AS2 (mov,%A0,__tmp_reg__));
2013 return (AS2 (adiw,r26,%o1) CR_TAB
2014 AS2 (ld,%A0,X+) CR_TAB
2015 AS2 (ld,%B0,X) CR_TAB
2016 AS2 (sbiw,r26,%o1+1));
2019 if (reg_base == reg_dest)
2021 *l = 3;
2022 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2023 AS2 (ldd,%B0,%B1) CR_TAB
2024 AS2 (mov,%A0,__tmp_reg__));
2027 *l = 2;
2028 return (AS2 (ldd,%A0,%A1) CR_TAB
2029 AS2 (ldd,%B0,%B1));
2031 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2033 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2034 fatal_insn ("incorrect insn:", insn);
2036 if (mem_volatile_p)
2038 if (REGNO (XEXP (base, 0)) == REG_X)
2040 *l = 4;
2041 return (AS2 (sbiw,r26,2) CR_TAB
2042 AS2 (ld,%A0,X+) CR_TAB
2043 AS2 (ld,%B0,X) CR_TAB
2044 AS2 (sbiw,r26,1));
2046 else
2048 *l = 3;
2049 return (AS2 (sbiw,%r1,2) CR_TAB
2050 AS2 (ld,%A0,%p1) CR_TAB
2051 AS2 (ldd,%B0,%p1+1));
2055 *l = 2;
2056 return (AS2 (ld,%B0,%1) CR_TAB
2057 AS2 (ld,%A0,%1));
2059 else if (GET_CODE (base) == POST_INC) /* (R++) */
2061 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2062 fatal_insn ("incorrect insn:", insn);
2064 *l = 2;
2065 return (AS2 (ld,%A0,%1) CR_TAB
2066 AS2 (ld,%B0,%1));
2068 else if (CONSTANT_ADDRESS_P (base))
2070 if (optimize > 0 && io_address_operand (base, HImode))
2072 *l = 2;
2073 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2074 AS2 (in,%B0,%m1+1-0x20));
2076 *l = 4;
2077 return (AS2 (lds,%A0,%m1) CR_TAB
2078 AS2 (lds,%B0,%m1+1));
2081 fatal_insn ("unknown move insn:",insn);
2082 return "";
2085 const char *
2086 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2088 rtx dest = op[0];
2089 rtx src = op[1];
2090 rtx base = XEXP (src, 0);
2091 int reg_dest = true_regnum (dest);
2092 int reg_base = true_regnum (base);
2093 int tmp;
2095 if (!l)
2096 l = &tmp;
2098 if (reg_base > 0)
2100 if (reg_base == REG_X) /* (R26) */
2102 if (reg_dest == REG_X)
2103 /* "ld r26,-X" is undefined */
2104 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2105 AS2 (ld,r29,X) CR_TAB
2106 AS2 (ld,r28,-X) CR_TAB
2107 AS2 (ld,__tmp_reg__,-X) CR_TAB
2108 AS2 (sbiw,r26,1) CR_TAB
2109 AS2 (ld,r26,X) CR_TAB
2110 AS2 (mov,r27,__tmp_reg__));
2111 else if (reg_dest == REG_X - 2)
2112 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2113 AS2 (ld,%B0,X+) CR_TAB
2114 AS2 (ld,__tmp_reg__,X+) CR_TAB
2115 AS2 (ld,%D0,X) CR_TAB
2116 AS2 (mov,%C0,__tmp_reg__));
2117 else if (reg_unused_after (insn, base))
2118 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2119 AS2 (ld,%B0,X+) CR_TAB
2120 AS2 (ld,%C0,X+) CR_TAB
2121 AS2 (ld,%D0,X));
2122 else
2123 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2124 AS2 (ld,%B0,X+) CR_TAB
2125 AS2 (ld,%C0,X+) CR_TAB
2126 AS2 (ld,%D0,X) CR_TAB
2127 AS2 (sbiw,r26,3));
2129 else
2131 if (reg_dest == reg_base)
2132 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2133 AS2 (ldd,%C0,%1+2) CR_TAB
2134 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2135 AS2 (ld,%A0,%1) CR_TAB
2136 AS2 (mov,%B0,__tmp_reg__));
2137 else if (reg_base == reg_dest + 2)
2138 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2139 AS2 (ldd,%B0,%1+1) CR_TAB
2140 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2141 AS2 (ldd,%D0,%1+3) CR_TAB
2142 AS2 (mov,%C0,__tmp_reg__));
2143 else
2144 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2145 AS2 (ldd,%B0,%1+1) CR_TAB
2146 AS2 (ldd,%C0,%1+2) CR_TAB
2147 AS2 (ldd,%D0,%1+3));
2150 else if (GET_CODE (base) == PLUS) /* (R + i) */
2152 int disp = INTVAL (XEXP (base, 1));
2154 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2156 if (REGNO (XEXP (base, 0)) != REG_Y)
2157 fatal_insn ("incorrect insn:",insn);
2159 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2160 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2161 AS2 (ldd,%A0,Y+60) CR_TAB
2162 AS2 (ldd,%B0,Y+61) CR_TAB
2163 AS2 (ldd,%C0,Y+62) CR_TAB
2164 AS2 (ldd,%D0,Y+63) CR_TAB
2165 AS2 (sbiw,r28,%o1-60));
2167 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2168 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2169 AS2 (ld,%A0,Y) CR_TAB
2170 AS2 (ldd,%B0,Y+1) CR_TAB
2171 AS2 (ldd,%C0,Y+2) CR_TAB
2172 AS2 (ldd,%D0,Y+3) CR_TAB
2173 AS2 (subi,r28,lo8(%o1)) CR_TAB
2174 AS2 (sbci,r29,hi8(%o1)));
2177 reg_base = true_regnum (XEXP (base, 0));
2178 if (reg_base == REG_X)
2180 /* R = (X + d) */
2181 if (reg_dest == REG_X)
2183 *l = 7;
2184 /* "ld r26,-X" is undefined */
2185 return (AS2 (adiw,r26,%o1+3) CR_TAB
2186 AS2 (ld,r29,X) CR_TAB
2187 AS2 (ld,r28,-X) CR_TAB
2188 AS2 (ld,__tmp_reg__,-X) CR_TAB
2189 AS2 (sbiw,r26,1) CR_TAB
2190 AS2 (ld,r26,X) CR_TAB
2191 AS2 (mov,r27,__tmp_reg__));
2193 *l = 6;
2194 if (reg_dest == REG_X - 2)
2195 return (AS2 (adiw,r26,%o1) CR_TAB
2196 AS2 (ld,r24,X+) CR_TAB
2197 AS2 (ld,r25,X+) CR_TAB
2198 AS2 (ld,__tmp_reg__,X+) CR_TAB
2199 AS2 (ld,r27,X) CR_TAB
2200 AS2 (mov,r26,__tmp_reg__));
2202 return (AS2 (adiw,r26,%o1) CR_TAB
2203 AS2 (ld,%A0,X+) CR_TAB
2204 AS2 (ld,%B0,X+) CR_TAB
2205 AS2 (ld,%C0,X+) CR_TAB
2206 AS2 (ld,%D0,X) CR_TAB
2207 AS2 (sbiw,r26,%o1+3));
2209 if (reg_dest == reg_base)
2210 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2211 AS2 (ldd,%C0,%C1) CR_TAB
2212 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2213 AS2 (ldd,%A0,%A1) CR_TAB
2214 AS2 (mov,%B0,__tmp_reg__));
2215 else if (reg_dest == reg_base - 2)
2216 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2217 AS2 (ldd,%B0,%B1) CR_TAB
2218 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2219 AS2 (ldd,%D0,%D1) CR_TAB
2220 AS2 (mov,%C0,__tmp_reg__));
2221 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2222 AS2 (ldd,%B0,%B1) CR_TAB
2223 AS2 (ldd,%C0,%C1) CR_TAB
2224 AS2 (ldd,%D0,%D1));
2226 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2227 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2228 AS2 (ld,%C0,%1) CR_TAB
2229 AS2 (ld,%B0,%1) CR_TAB
2230 AS2 (ld,%A0,%1));
2231 else if (GET_CODE (base) == POST_INC) /* (R++) */
2232 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2233 AS2 (ld,%B0,%1) CR_TAB
2234 AS2 (ld,%C0,%1) CR_TAB
2235 AS2 (ld,%D0,%1));
2236 else if (CONSTANT_ADDRESS_P (base))
2237 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2238 AS2 (lds,%B0,%m1+1) CR_TAB
2239 AS2 (lds,%C0,%m1+2) CR_TAB
2240 AS2 (lds,%D0,%m1+3));
2242 fatal_insn ("unknown move insn:",insn);
2243 return "";
2246 const char *
2247 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2249 rtx dest = op[0];
2250 rtx src = op[1];
2251 rtx base = XEXP (dest, 0);
2252 int reg_base = true_regnum (base);
2253 int reg_src = true_regnum (src);
2254 int tmp;
2256 if (!l)
2257 l = &tmp;
2259 if (CONSTANT_ADDRESS_P (base))
2260 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2261 AS2 (sts,%m0+1,%B1) CR_TAB
2262 AS2 (sts,%m0+2,%C1) CR_TAB
2263 AS2 (sts,%m0+3,%D1));
2264 if (reg_base > 0) /* (r) */
2266 if (reg_base == REG_X) /* (R26) */
2268 if (reg_src == REG_X)
2270 /* "st X+,r26" is undefined */
2271 if (reg_unused_after (insn, base))
2272 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2273 AS2 (st,X,r26) CR_TAB
2274 AS2 (adiw,r26,1) CR_TAB
2275 AS2 (st,X+,__tmp_reg__) CR_TAB
2276 AS2 (st,X+,r28) CR_TAB
2277 AS2 (st,X,r29));
2278 else
2279 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2280 AS2 (st,X,r26) CR_TAB
2281 AS2 (adiw,r26,1) CR_TAB
2282 AS2 (st,X+,__tmp_reg__) CR_TAB
2283 AS2 (st,X+,r28) CR_TAB
2284 AS2 (st,X,r29) CR_TAB
2285 AS2 (sbiw,r26,3));
2287 else if (reg_base == reg_src + 2)
2289 if (reg_unused_after (insn, base))
2290 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2291 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2292 AS2 (st,%0+,%A1) CR_TAB
2293 AS2 (st,%0+,%B1) CR_TAB
2294 AS2 (st,%0+,__zero_reg__) CR_TAB
2295 AS2 (st,%0,__tmp_reg__) CR_TAB
2296 AS1 (clr,__zero_reg__));
2297 else
2298 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2299 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2300 AS2 (st,%0+,%A1) CR_TAB
2301 AS2 (st,%0+,%B1) CR_TAB
2302 AS2 (st,%0+,__zero_reg__) CR_TAB
2303 AS2 (st,%0,__tmp_reg__) CR_TAB
2304 AS1 (clr,__zero_reg__) CR_TAB
2305 AS2 (sbiw,r26,3));
2307 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2308 AS2 (st,%0+,%B1) CR_TAB
2309 AS2 (st,%0+,%C1) CR_TAB
2310 AS2 (st,%0,%D1) CR_TAB
2311 AS2 (sbiw,r26,3));
2313 else
2314 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2315 AS2 (std,%0+1,%B1) CR_TAB
2316 AS2 (std,%0+2,%C1) CR_TAB
2317 AS2 (std,%0+3,%D1));
2319 else if (GET_CODE (base) == PLUS) /* (R + i) */
2321 int disp = INTVAL (XEXP (base, 1));
2322 reg_base = REGNO (XEXP (base, 0));
2323 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2325 if (reg_base != REG_Y)
2326 fatal_insn ("incorrect insn:",insn);
2328 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2329 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2330 AS2 (std,Y+60,%A1) CR_TAB
2331 AS2 (std,Y+61,%B1) CR_TAB
2332 AS2 (std,Y+62,%C1) CR_TAB
2333 AS2 (std,Y+63,%D1) CR_TAB
2334 AS2 (sbiw,r28,%o0-60));
2336 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2337 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2338 AS2 (st,Y,%A1) CR_TAB
2339 AS2 (std,Y+1,%B1) CR_TAB
2340 AS2 (std,Y+2,%C1) CR_TAB
2341 AS2 (std,Y+3,%D1) CR_TAB
2342 AS2 (subi,r28,lo8(%o0)) CR_TAB
2343 AS2 (sbci,r29,hi8(%o0)));
2345 if (reg_base == REG_X)
2347 /* (X + d) = R */
2348 if (reg_src == REG_X)
2350 *l = 9;
2351 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2352 AS2 (mov,__zero_reg__,r27) CR_TAB
2353 AS2 (adiw,r26,%o0) CR_TAB
2354 AS2 (st,X+,__tmp_reg__) CR_TAB
2355 AS2 (st,X+,__zero_reg__) CR_TAB
2356 AS2 (st,X+,r28) CR_TAB
2357 AS2 (st,X,r29) CR_TAB
2358 AS1 (clr,__zero_reg__) CR_TAB
2359 AS2 (sbiw,r26,%o0+3));
2361 else if (reg_src == REG_X - 2)
2363 *l = 9;
2364 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2365 AS2 (mov,__zero_reg__,r27) CR_TAB
2366 AS2 (adiw,r26,%o0) CR_TAB
2367 AS2 (st,X+,r24) CR_TAB
2368 AS2 (st,X+,r25) CR_TAB
2369 AS2 (st,X+,__tmp_reg__) CR_TAB
2370 AS2 (st,X,__zero_reg__) CR_TAB
2371 AS1 (clr,__zero_reg__) CR_TAB
2372 AS2 (sbiw,r26,%o0+3));
2374 *l = 6;
2375 return (AS2 (adiw,r26,%o0) CR_TAB
2376 AS2 (st,X+,%A1) CR_TAB
2377 AS2 (st,X+,%B1) CR_TAB
2378 AS2 (st,X+,%C1) CR_TAB
2379 AS2 (st,X,%D1) CR_TAB
2380 AS2 (sbiw,r26,%o0+3));
2382 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2383 AS2 (std,%B0,%B1) CR_TAB
2384 AS2 (std,%C0,%C1) CR_TAB
2385 AS2 (std,%D0,%D1));
2387 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2388 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2389 AS2 (st,%0,%C1) CR_TAB
2390 AS2 (st,%0,%B1) CR_TAB
2391 AS2 (st,%0,%A1));
2392 else if (GET_CODE (base) == POST_INC) /* (R++) */
2393 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2394 AS2 (st,%0,%B1) CR_TAB
2395 AS2 (st,%0,%C1) CR_TAB
2396 AS2 (st,%0,%D1));
2397 fatal_insn ("unknown move insn:",insn);
2398 return "";
2401 const char *
2402 output_movsisf(rtx insn, rtx operands[], int *l)
2404 int dummy;
2405 rtx dest = operands[0];
2406 rtx src = operands[1];
2407 int *real_l = l;
2409 if (!l)
2410 l = &dummy;
2412 if (register_operand (dest, VOIDmode))
2414 if (register_operand (src, VOIDmode)) /* mov r,r */
2416 if (true_regnum (dest) > true_regnum (src))
2418 if (AVR_HAVE_MOVW)
2420 *l = 2;
2421 return (AS2 (movw,%C0,%C1) CR_TAB
2422 AS2 (movw,%A0,%A1));
2424 *l = 4;
2425 return (AS2 (mov,%D0,%D1) CR_TAB
2426 AS2 (mov,%C0,%C1) CR_TAB
2427 AS2 (mov,%B0,%B1) CR_TAB
2428 AS2 (mov,%A0,%A1));
2430 else
2432 if (AVR_HAVE_MOVW)
2434 *l = 2;
2435 return (AS2 (movw,%A0,%A1) CR_TAB
2436 AS2 (movw,%C0,%C1));
2438 *l = 4;
2439 return (AS2 (mov,%A0,%A1) CR_TAB
2440 AS2 (mov,%B0,%B1) CR_TAB
2441 AS2 (mov,%C0,%C1) CR_TAB
2442 AS2 (mov,%D0,%D1));
2445 else if (CONSTANT_P (src))
2447 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2449 *l = 4;
2450 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2451 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2452 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2453 AS2 (ldi,%D0,hhi8(%1)));
2456 if (GET_CODE (src) == CONST_INT)
2458 const char *const clr_op0 =
2459 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2460 AS1 (clr,%B0) CR_TAB
2461 AS2 (movw,%C0,%A0))
2462 : (AS1 (clr,%A0) CR_TAB
2463 AS1 (clr,%B0) CR_TAB
2464 AS1 (clr,%C0) CR_TAB
2465 AS1 (clr,%D0));
2467 if (src == const0_rtx) /* mov r,L */
2469 *l = AVR_HAVE_MOVW ? 3 : 4;
2470 return clr_op0;
2472 else if (src == const1_rtx)
2474 if (!real_l)
2475 output_asm_insn (clr_op0, operands);
2476 *l = AVR_HAVE_MOVW ? 4 : 5;
2477 return AS1 (inc,%A0);
2479 else if (src == constm1_rtx)
2481 /* Immediate constants -1 to any register */
2482 if (AVR_HAVE_MOVW)
2484 *l = 4;
2485 return (AS1 (clr,%A0) CR_TAB
2486 AS1 (dec,%A0) CR_TAB
2487 AS2 (mov,%B0,%A0) CR_TAB
2488 AS2 (movw,%C0,%A0));
2490 *l = 5;
2491 return (AS1 (clr,%A0) CR_TAB
2492 AS1 (dec,%A0) CR_TAB
2493 AS2 (mov,%B0,%A0) CR_TAB
2494 AS2 (mov,%C0,%A0) CR_TAB
2495 AS2 (mov,%D0,%A0));
2497 else
2499 int bit_nr = exact_log2 (INTVAL (src));
2501 if (bit_nr >= 0)
2503 *l = AVR_HAVE_MOVW ? 5 : 6;
2504 if (!real_l)
2506 output_asm_insn (clr_op0, operands);
2507 output_asm_insn ("set", operands);
2509 if (!real_l)
2510 avr_output_bld (operands, bit_nr);
2512 return "";
2517 /* Last resort, better than loading from memory. */
2518 *l = 10;
2519 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2520 AS2 (ldi,r31,lo8(%1)) CR_TAB
2521 AS2 (mov,%A0,r31) CR_TAB
2522 AS2 (ldi,r31,hi8(%1)) CR_TAB
2523 AS2 (mov,%B0,r31) CR_TAB
2524 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2525 AS2 (mov,%C0,r31) CR_TAB
2526 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2527 AS2 (mov,%D0,r31) CR_TAB
2528 AS2 (mov,r31,__tmp_reg__));
2530 else if (GET_CODE (src) == MEM)
2531 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2533 else if (GET_CODE (dest) == MEM)
2535 const char *templ;
2537 if (src == const0_rtx)
2538 operands[1] = zero_reg_rtx;
2540 templ = out_movsi_mr_r (insn, operands, real_l);
2542 if (!real_l)
2543 output_asm_insn (templ, operands);
2545 operands[1] = src;
2546 return "";
2548 fatal_insn ("invalid insn:", insn);
2549 return "";
2552 const char *
2553 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2555 rtx dest = op[0];
2556 rtx src = op[1];
2557 rtx x = XEXP (dest, 0);
2558 int dummy;
2560 if (!l)
2561 l = &dummy;
2563 if (CONSTANT_ADDRESS_P (x))
2565 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2567 *l = 1;
2568 return AS2 (out,__SREG__,%1);
2570 if (optimize > 0 && io_address_operand (x, QImode))
2572 *l = 1;
2573 return AS2 (out,%m0-0x20,%1);
2575 *l = 2;
2576 return AS2 (sts,%m0,%1);
2578 /* memory access by reg+disp */
2579 else if (GET_CODE (x) == PLUS
2580 && REG_P (XEXP (x,0))
2581 && GET_CODE (XEXP (x,1)) == CONST_INT)
2583 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2585 int disp = INTVAL (XEXP (x,1));
2586 if (REGNO (XEXP (x,0)) != REG_Y)
2587 fatal_insn ("incorrect insn:",insn);
2589 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2590 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2591 AS2 (std,Y+63,%1) CR_TAB
2592 AS2 (sbiw,r28,%o0-63));
2594 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2595 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2596 AS2 (st,Y,%1) CR_TAB
2597 AS2 (subi,r28,lo8(%o0)) CR_TAB
2598 AS2 (sbci,r29,hi8(%o0)));
2600 else if (REGNO (XEXP (x,0)) == REG_X)
2602 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2604 if (reg_unused_after (insn, XEXP (x,0)))
2605 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2606 AS2 (adiw,r26,%o0) CR_TAB
2607 AS2 (st,X,__tmp_reg__));
2609 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2610 AS2 (adiw,r26,%o0) CR_TAB
2611 AS2 (st,X,__tmp_reg__) CR_TAB
2612 AS2 (sbiw,r26,%o0));
2614 else
2616 if (reg_unused_after (insn, XEXP (x,0)))
2617 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2618 AS2 (st,X,%1));
2620 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2621 AS2 (st,X,%1) CR_TAB
2622 AS2 (sbiw,r26,%o0));
2625 *l = 1;
2626 return AS2 (std,%0,%1);
2628 *l = 1;
2629 return AS2 (st,%0,%1);
2632 const char *
2633 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2635 rtx dest = op[0];
2636 rtx src = op[1];
2637 rtx base = XEXP (dest, 0);
2638 int reg_base = true_regnum (base);
2639 int reg_src = true_regnum (src);
2640 /* "volatile" forces writing high byte first, even if less efficient,
2641 for correct operation with 16-bit I/O registers. */
2642 int mem_volatile_p = MEM_VOLATILE_P (dest);
2643 int tmp;
2645 if (!l)
2646 l = &tmp;
2647 if (CONSTANT_ADDRESS_P (base))
2649 if (optimize > 0 && io_address_operand (base, HImode))
2651 *l = 2;
2652 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
2653 AS2 (out,%m0-0x20,%A1));
2655 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
2656 AS2 (sts,%m0,%A1));
2658 if (reg_base > 0)
2660 if (reg_base == REG_X)
2662 if (reg_src == REG_X)
2664 /* "st X+,r26" and "st -X,r26" are undefined. */
2665 if (!mem_volatile_p && reg_unused_after (insn, src))
2666 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2667 AS2 (st,X,r26) CR_TAB
2668 AS2 (adiw,r26,1) CR_TAB
2669 AS2 (st,X,__tmp_reg__));
2670 else
2671 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2672 AS2 (adiw,r26,1) CR_TAB
2673 AS2 (st,X,__tmp_reg__) CR_TAB
2674 AS2 (sbiw,r26,1) CR_TAB
2675 AS2 (st,X,r26));
2677 else
2679 if (!mem_volatile_p && reg_unused_after (insn, base))
2680 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2681 AS2 (st,X,%B1));
2682 else
2683 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2684 AS2 (st,X,%B1) CR_TAB
2685 AS2 (st,-X,%A1));
2688 else
2689 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2690 AS2 (st,%0,%A1));
2692 else if (GET_CODE (base) == PLUS)
2694 int disp = INTVAL (XEXP (base, 1));
2695 reg_base = REGNO (XEXP (base, 0));
2696 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2698 if (reg_base != REG_Y)
2699 fatal_insn ("incorrect insn:",insn);
2701 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2702 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2703 AS2 (std,Y+63,%B1) CR_TAB
2704 AS2 (std,Y+62,%A1) CR_TAB
2705 AS2 (sbiw,r28,%o0-62));
2707 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2708 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2709 AS2 (std,Y+1,%B1) CR_TAB
2710 AS2 (st,Y,%A1) CR_TAB
2711 AS2 (subi,r28,lo8(%o0)) CR_TAB
2712 AS2 (sbci,r29,hi8(%o0)));
2714 if (reg_base == REG_X)
2716 /* (X + d) = R */
2717 if (reg_src == REG_X)
2719 *l = 7;
2720 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2721 AS2 (mov,__zero_reg__,r27) CR_TAB
2722 AS2 (adiw,r26,%o0+1) CR_TAB
2723 AS2 (st,X,__zero_reg__) CR_TAB
2724 AS2 (st,-X,__tmp_reg__) CR_TAB
2725 AS1 (clr,__zero_reg__) CR_TAB
2726 AS2 (sbiw,r26,%o0));
2728 *l = 4;
2729 return (AS2 (adiw,r26,%o0+1) CR_TAB
2730 AS2 (st,X,%B1) CR_TAB
2731 AS2 (st,-X,%A1) CR_TAB
2732 AS2 (sbiw,r26,%o0));
2734 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2735 AS2 (std,%A0,%A1));
2737 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2738 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2739 AS2 (st,%0,%A1));
2740 else if (GET_CODE (base) == POST_INC) /* (R++) */
2742 if (mem_volatile_p)
2744 if (REGNO (XEXP (base, 0)) == REG_X)
2746 *l = 4;
2747 return (AS2 (adiw,r26,1) CR_TAB
2748 AS2 (st,X,%B1) CR_TAB
2749 AS2 (st,-X,%A1) CR_TAB
2750 AS2 (adiw,r26,2));
2752 else
2754 *l = 3;
2755 return (AS2 (std,%p0+1,%B1) CR_TAB
2756 AS2 (st,%p0,%A1) CR_TAB
2757 AS2 (adiw,%r0,2));
2761 *l = 2;
2762 return (AS2 (st,%0,%A1) CR_TAB
2763 AS2 (st,%0,%B1));
2765 fatal_insn ("unknown move insn:",insn);
2766 return "";
2769 /* Return 1 if frame pointer for current function required. */
2771 bool
2772 avr_frame_pointer_required_p (void)
2774 return (cfun->calls_alloca
2775 || crtl->args.info.nregs == 0
2776 || get_frame_size () > 0);
2779 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2781 static RTX_CODE
2782 compare_condition (rtx insn)
2784 rtx next = next_real_insn (insn);
2785 RTX_CODE cond = UNKNOWN;
2786 if (next && GET_CODE (next) == JUMP_INSN)
2788 rtx pat = PATTERN (next);
2789 rtx src = SET_SRC (pat);
2790 rtx t = XEXP (src, 0);
2791 cond = GET_CODE (t);
2793 return cond;
2796 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2798 static int
2799 compare_sign_p (rtx insn)
2801 RTX_CODE cond = compare_condition (insn);
2802 return (cond == GE || cond == LT);
2805 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2806 that needs to be swapped (GT, GTU, LE, LEU). */
2809 compare_diff_p (rtx insn)
2811 RTX_CODE cond = compare_condition (insn);
2812 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2815 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2818 compare_eq_p (rtx insn)
2820 RTX_CODE cond = compare_condition (insn);
2821 return (cond == EQ || cond == NE);
2825 /* Output test instruction for HImode. */
2827 const char *
2828 out_tsthi (rtx insn, rtx op, int *l)
2830 if (compare_sign_p (insn))
2832 if (l) *l = 1;
2833 return AS1 (tst,%B0);
2835 if (reg_unused_after (insn, op)
2836 && compare_eq_p (insn))
2838 /* Faster than sbiw if we can clobber the operand. */
2839 if (l) *l = 1;
2840 return "or %A0,%B0";
2842 if (test_hard_reg_class (ADDW_REGS, op))
2844 if (l) *l = 1;
2845 return AS2 (sbiw,%0,0);
2847 if (l) *l = 2;
2848 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2849 AS2 (cpc,%B0,__zero_reg__));
2853 /* Output test instruction for SImode. */
2855 const char *
2856 out_tstsi (rtx insn, rtx op, int *l)
2858 if (compare_sign_p (insn))
2860 if (l) *l = 1;
2861 return AS1 (tst,%D0);
2863 if (test_hard_reg_class (ADDW_REGS, op))
2865 if (l) *l = 3;
2866 return (AS2 (sbiw,%A0,0) CR_TAB
2867 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2868 AS2 (cpc,%D0,__zero_reg__));
2870 if (l) *l = 4;
2871 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2872 AS2 (cpc,%B0,__zero_reg__) CR_TAB
2873 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2874 AS2 (cpc,%D0,__zero_reg__));
2878 /* Generate asm equivalent for various shifts.
2879 Shift count is a CONST_INT, MEM or REG.
2880 This only handles cases that are not already
2881 carefully hand-optimized in ?sh??i3_out. */
2883 void
2884 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
2885 int *len, int t_len)
2887 rtx op[10];
2888 char str[500];
2889 int second_label = 1;
2890 int saved_in_tmp = 0;
2891 int use_zero_reg = 0;
2893 op[0] = operands[0];
2894 op[1] = operands[1];
2895 op[2] = operands[2];
2896 op[3] = operands[3];
2897 str[0] = 0;
2899 if (len)
2900 *len = 1;
2902 if (GET_CODE (operands[2]) == CONST_INT)
2904 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2905 int count = INTVAL (operands[2]);
2906 int max_len = 10; /* If larger than this, always use a loop. */
2908 if (count <= 0)
2910 if (len)
2911 *len = 0;
2912 return;
2915 if (count < 8 && !scratch)
2916 use_zero_reg = 1;
2918 if (optimize_size)
2919 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
2921 if (t_len * count <= max_len)
2923 /* Output shifts inline with no loop - faster. */
2924 if (len)
2925 *len = t_len * count;
2926 else
2928 while (count-- > 0)
2929 output_asm_insn (templ, op);
2932 return;
2935 if (scratch)
2937 if (!len)
2938 strcat (str, AS2 (ldi,%3,%2));
2940 else if (use_zero_reg)
2942 /* Hack to save one word: use __zero_reg__ as loop counter.
2943 Set one bit, then shift in a loop until it is 0 again. */
2945 op[3] = zero_reg_rtx;
2946 if (len)
2947 *len = 2;
2948 else
2949 strcat (str, ("set" CR_TAB
2950 AS2 (bld,%3,%2-1)));
2952 else
2954 /* No scratch register available, use one from LD_REGS (saved in
2955 __tmp_reg__) that doesn't overlap with registers to shift. */
2957 op[3] = gen_rtx_REG (QImode,
2958 ((true_regnum (operands[0]) - 1) & 15) + 16);
2959 op[4] = tmp_reg_rtx;
2960 saved_in_tmp = 1;
2962 if (len)
2963 *len = 3; /* Includes "mov %3,%4" after the loop. */
2964 else
2965 strcat (str, (AS2 (mov,%4,%3) CR_TAB
2966 AS2 (ldi,%3,%2)));
2969 second_label = 0;
2971 else if (GET_CODE (operands[2]) == MEM)
2973 rtx op_mov[10];
2975 op[3] = op_mov[0] = tmp_reg_rtx;
2976 op_mov[1] = op[2];
2978 if (len)
2979 out_movqi_r_mr (insn, op_mov, len);
2980 else
2981 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
2983 else if (register_operand (operands[2], QImode))
2985 if (reg_unused_after (insn, operands[2]))
2986 op[3] = op[2];
2987 else
2989 op[3] = tmp_reg_rtx;
2990 if (!len)
2991 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
2994 else
2995 fatal_insn ("bad shift insn:", insn);
2997 if (second_label)
2999 if (len)
3000 ++*len;
3001 else
3002 strcat (str, AS1 (rjmp,2f));
3005 if (len)
3006 *len += t_len + 2; /* template + dec + brXX */
3007 else
3009 strcat (str, "\n1:\t");
3010 strcat (str, templ);
3011 strcat (str, second_label ? "\n2:\t" : "\n\t");
3012 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3013 strcat (str, CR_TAB);
3014 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3015 if (saved_in_tmp)
3016 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3017 output_asm_insn (str, op);
3022 /* 8bit shift left ((char)x << i) */
3024 const char *
3025 ashlqi3_out (rtx insn, rtx operands[], int *len)
3027 if (GET_CODE (operands[2]) == CONST_INT)
3029 int k;
3031 if (!len)
3032 len = &k;
3034 switch (INTVAL (operands[2]))
3036 default:
3037 if (INTVAL (operands[2]) < 8)
3038 break;
3040 *len = 1;
3041 return AS1 (clr,%0);
3043 case 1:
3044 *len = 1;
3045 return AS1 (lsl,%0);
3047 case 2:
3048 *len = 2;
3049 return (AS1 (lsl,%0) CR_TAB
3050 AS1 (lsl,%0));
3052 case 3:
3053 *len = 3;
3054 return (AS1 (lsl,%0) CR_TAB
3055 AS1 (lsl,%0) CR_TAB
3056 AS1 (lsl,%0));
3058 case 4:
3059 if (test_hard_reg_class (LD_REGS, operands[0]))
3061 *len = 2;
3062 return (AS1 (swap,%0) CR_TAB
3063 AS2 (andi,%0,0xf0));
3065 *len = 4;
3066 return (AS1 (lsl,%0) CR_TAB
3067 AS1 (lsl,%0) CR_TAB
3068 AS1 (lsl,%0) CR_TAB
3069 AS1 (lsl,%0));
3071 case 5:
3072 if (test_hard_reg_class (LD_REGS, operands[0]))
3074 *len = 3;
3075 return (AS1 (swap,%0) CR_TAB
3076 AS1 (lsl,%0) CR_TAB
3077 AS2 (andi,%0,0xe0));
3079 *len = 5;
3080 return (AS1 (lsl,%0) CR_TAB
3081 AS1 (lsl,%0) CR_TAB
3082 AS1 (lsl,%0) CR_TAB
3083 AS1 (lsl,%0) CR_TAB
3084 AS1 (lsl,%0));
3086 case 6:
3087 if (test_hard_reg_class (LD_REGS, operands[0]))
3089 *len = 4;
3090 return (AS1 (swap,%0) CR_TAB
3091 AS1 (lsl,%0) CR_TAB
3092 AS1 (lsl,%0) CR_TAB
3093 AS2 (andi,%0,0xc0));
3095 *len = 6;
3096 return (AS1 (lsl,%0) CR_TAB
3097 AS1 (lsl,%0) CR_TAB
3098 AS1 (lsl,%0) CR_TAB
3099 AS1 (lsl,%0) CR_TAB
3100 AS1 (lsl,%0) CR_TAB
3101 AS1 (lsl,%0));
3103 case 7:
3104 *len = 3;
3105 return (AS1 (ror,%0) CR_TAB
3106 AS1 (clr,%0) CR_TAB
3107 AS1 (ror,%0));
3110 else if (CONSTANT_P (operands[2]))
3111 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3113 out_shift_with_cnt (AS1 (lsl,%0),
3114 insn, operands, len, 1);
3115 return "";
3119 /* 16bit shift left ((short)x << i) */
3121 const char *
3122 ashlhi3_out (rtx insn, rtx operands[], int *len)
3124 if (GET_CODE (operands[2]) == CONST_INT)
3126 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3127 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3128 int k;
3129 int *t = len;
3131 if (!len)
3132 len = &k;
3134 switch (INTVAL (operands[2]))
3136 default:
3137 if (INTVAL (operands[2]) < 16)
3138 break;
3140 *len = 2;
3141 return (AS1 (clr,%B0) CR_TAB
3142 AS1 (clr,%A0));
3144 case 4:
3145 if (optimize_size && scratch)
3146 break; /* 5 */
3147 if (ldi_ok)
3149 *len = 6;
3150 return (AS1 (swap,%A0) CR_TAB
3151 AS1 (swap,%B0) CR_TAB
3152 AS2 (andi,%B0,0xf0) CR_TAB
3153 AS2 (eor,%B0,%A0) CR_TAB
3154 AS2 (andi,%A0,0xf0) CR_TAB
3155 AS2 (eor,%B0,%A0));
3157 if (scratch)
3159 *len = 7;
3160 return (AS1 (swap,%A0) CR_TAB
3161 AS1 (swap,%B0) CR_TAB
3162 AS2 (ldi,%3,0xf0) CR_TAB
3163 "and %B0,%3" CR_TAB
3164 AS2 (eor,%B0,%A0) CR_TAB
3165 "and %A0,%3" CR_TAB
3166 AS2 (eor,%B0,%A0));
3168 break; /* optimize_size ? 6 : 8 */
3170 case 5:
3171 if (optimize_size)
3172 break; /* scratch ? 5 : 6 */
3173 if (ldi_ok)
3175 *len = 8;
3176 return (AS1 (lsl,%A0) CR_TAB
3177 AS1 (rol,%B0) CR_TAB
3178 AS1 (swap,%A0) CR_TAB
3179 AS1 (swap,%B0) CR_TAB
3180 AS2 (andi,%B0,0xf0) CR_TAB
3181 AS2 (eor,%B0,%A0) CR_TAB
3182 AS2 (andi,%A0,0xf0) CR_TAB
3183 AS2 (eor,%B0,%A0));
3185 if (scratch)
3187 *len = 9;
3188 return (AS1 (lsl,%A0) CR_TAB
3189 AS1 (rol,%B0) CR_TAB
3190 AS1 (swap,%A0) CR_TAB
3191 AS1 (swap,%B0) CR_TAB
3192 AS2 (ldi,%3,0xf0) CR_TAB
3193 "and %B0,%3" CR_TAB
3194 AS2 (eor,%B0,%A0) CR_TAB
3195 "and %A0,%3" CR_TAB
3196 AS2 (eor,%B0,%A0));
3198 break; /* 10 */
3200 case 6:
3201 if (optimize_size)
3202 break; /* scratch ? 5 : 6 */
3203 *len = 9;
3204 return (AS1 (clr,__tmp_reg__) CR_TAB
3205 AS1 (lsr,%B0) CR_TAB
3206 AS1 (ror,%A0) CR_TAB
3207 AS1 (ror,__tmp_reg__) CR_TAB
3208 AS1 (lsr,%B0) CR_TAB
3209 AS1 (ror,%A0) CR_TAB
3210 AS1 (ror,__tmp_reg__) CR_TAB
3211 AS2 (mov,%B0,%A0) CR_TAB
3212 AS2 (mov,%A0,__tmp_reg__));
3214 case 7:
3215 *len = 5;
3216 return (AS1 (lsr,%B0) CR_TAB
3217 AS2 (mov,%B0,%A0) CR_TAB
3218 AS1 (clr,%A0) CR_TAB
3219 AS1 (ror,%B0) CR_TAB
3220 AS1 (ror,%A0));
3222 case 8:
3223 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3224 AS1 (clr,%A0));
3226 case 9:
3227 *len = 3;
3228 return (AS2 (mov,%B0,%A0) CR_TAB
3229 AS1 (clr,%A0) CR_TAB
3230 AS1 (lsl,%B0));
3232 case 10:
3233 *len = 4;
3234 return (AS2 (mov,%B0,%A0) CR_TAB
3235 AS1 (clr,%A0) CR_TAB
3236 AS1 (lsl,%B0) CR_TAB
3237 AS1 (lsl,%B0));
3239 case 11:
3240 *len = 5;
3241 return (AS2 (mov,%B0,%A0) CR_TAB
3242 AS1 (clr,%A0) CR_TAB
3243 AS1 (lsl,%B0) CR_TAB
3244 AS1 (lsl,%B0) CR_TAB
3245 AS1 (lsl,%B0));
3247 case 12:
3248 if (ldi_ok)
3250 *len = 4;
3251 return (AS2 (mov,%B0,%A0) CR_TAB
3252 AS1 (clr,%A0) CR_TAB
3253 AS1 (swap,%B0) CR_TAB
3254 AS2 (andi,%B0,0xf0));
3256 if (scratch)
3258 *len = 5;
3259 return (AS2 (mov,%B0,%A0) CR_TAB
3260 AS1 (clr,%A0) CR_TAB
3261 AS1 (swap,%B0) CR_TAB
3262 AS2 (ldi,%3,0xf0) CR_TAB
3263 "and %B0,%3");
3265 *len = 6;
3266 return (AS2 (mov,%B0,%A0) CR_TAB
3267 AS1 (clr,%A0) CR_TAB
3268 AS1 (lsl,%B0) CR_TAB
3269 AS1 (lsl,%B0) CR_TAB
3270 AS1 (lsl,%B0) CR_TAB
3271 AS1 (lsl,%B0));
3273 case 13:
3274 if (ldi_ok)
3276 *len = 5;
3277 return (AS2 (mov,%B0,%A0) CR_TAB
3278 AS1 (clr,%A0) CR_TAB
3279 AS1 (swap,%B0) CR_TAB
3280 AS1 (lsl,%B0) CR_TAB
3281 AS2 (andi,%B0,0xe0));
3283 if (AVR_HAVE_MUL && scratch)
3285 *len = 5;
3286 return (AS2 (ldi,%3,0x20) CR_TAB
3287 AS2 (mul,%A0,%3) CR_TAB
3288 AS2 (mov,%B0,r0) CR_TAB
3289 AS1 (clr,%A0) CR_TAB
3290 AS1 (clr,__zero_reg__));
3292 if (optimize_size && scratch)
3293 break; /* 5 */
3294 if (scratch)
3296 *len = 6;
3297 return (AS2 (mov,%B0,%A0) CR_TAB
3298 AS1 (clr,%A0) CR_TAB
3299 AS1 (swap,%B0) CR_TAB
3300 AS1 (lsl,%B0) CR_TAB
3301 AS2 (ldi,%3,0xe0) CR_TAB
3302 "and %B0,%3");
3304 if (AVR_HAVE_MUL)
3306 *len = 6;
3307 return ("set" CR_TAB
3308 AS2 (bld,r1,5) CR_TAB
3309 AS2 (mul,%A0,r1) CR_TAB
3310 AS2 (mov,%B0,r0) CR_TAB
3311 AS1 (clr,%A0) CR_TAB
3312 AS1 (clr,__zero_reg__));
3314 *len = 7;
3315 return (AS2 (mov,%B0,%A0) CR_TAB
3316 AS1 (clr,%A0) CR_TAB
3317 AS1 (lsl,%B0) CR_TAB
3318 AS1 (lsl,%B0) CR_TAB
3319 AS1 (lsl,%B0) CR_TAB
3320 AS1 (lsl,%B0) CR_TAB
3321 AS1 (lsl,%B0));
3323 case 14:
3324 if (AVR_HAVE_MUL && ldi_ok)
3326 *len = 5;
3327 return (AS2 (ldi,%B0,0x40) CR_TAB
3328 AS2 (mul,%A0,%B0) CR_TAB
3329 AS2 (mov,%B0,r0) CR_TAB
3330 AS1 (clr,%A0) CR_TAB
3331 AS1 (clr,__zero_reg__));
3333 if (AVR_HAVE_MUL && scratch)
3335 *len = 5;
3336 return (AS2 (ldi,%3,0x40) CR_TAB
3337 AS2 (mul,%A0,%3) CR_TAB
3338 AS2 (mov,%B0,r0) CR_TAB
3339 AS1 (clr,%A0) CR_TAB
3340 AS1 (clr,__zero_reg__));
3342 if (optimize_size && ldi_ok)
3344 *len = 5;
3345 return (AS2 (mov,%B0,%A0) CR_TAB
3346 AS2 (ldi,%A0,6) "\n1:\t"
3347 AS1 (lsl,%B0) CR_TAB
3348 AS1 (dec,%A0) CR_TAB
3349 AS1 (brne,1b));
3351 if (optimize_size && scratch)
3352 break; /* 5 */
3353 *len = 6;
3354 return (AS1 (clr,%B0) CR_TAB
3355 AS1 (lsr,%A0) CR_TAB
3356 AS1 (ror,%B0) CR_TAB
3357 AS1 (lsr,%A0) CR_TAB
3358 AS1 (ror,%B0) CR_TAB
3359 AS1 (clr,%A0));
3361 case 15:
3362 *len = 4;
3363 return (AS1 (clr,%B0) CR_TAB
3364 AS1 (lsr,%A0) CR_TAB
3365 AS1 (ror,%B0) CR_TAB
3366 AS1 (clr,%A0));
3368 len = t;
3370 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3371 AS1 (rol,%B0)),
3372 insn, operands, len, 2);
3373 return "";
3377 /* 32bit shift left ((long)x << i) */
3379 const char *
3380 ashlsi3_out (rtx insn, rtx operands[], int *len)
3382 if (GET_CODE (operands[2]) == CONST_INT)
3384 int k;
3385 int *t = len;
3387 if (!len)
3388 len = &k;
3390 switch (INTVAL (operands[2]))
3392 default:
3393 if (INTVAL (operands[2]) < 32)
3394 break;
3396 if (AVR_HAVE_MOVW)
3397 return *len = 3, (AS1 (clr,%D0) CR_TAB
3398 AS1 (clr,%C0) CR_TAB
3399 AS2 (movw,%A0,%C0));
3400 *len = 4;
3401 return (AS1 (clr,%D0) CR_TAB
3402 AS1 (clr,%C0) CR_TAB
3403 AS1 (clr,%B0) CR_TAB
3404 AS1 (clr,%A0));
3406 case 8:
3408 int reg0 = true_regnum (operands[0]);
3409 int reg1 = true_regnum (operands[1]);
3410 *len = 4;
3411 if (reg0 >= reg1)
3412 return (AS2 (mov,%D0,%C1) CR_TAB
3413 AS2 (mov,%C0,%B1) CR_TAB
3414 AS2 (mov,%B0,%A1) CR_TAB
3415 AS1 (clr,%A0));
3416 else
3417 return (AS1 (clr,%A0) CR_TAB
3418 AS2 (mov,%B0,%A1) CR_TAB
3419 AS2 (mov,%C0,%B1) CR_TAB
3420 AS2 (mov,%D0,%C1));
3423 case 16:
3425 int reg0 = true_regnum (operands[0]);
3426 int reg1 = true_regnum (operands[1]);
3427 if (reg0 + 2 == reg1)
3428 return *len = 2, (AS1 (clr,%B0) CR_TAB
3429 AS1 (clr,%A0));
3430 if (AVR_HAVE_MOVW)
3431 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3432 AS1 (clr,%B0) CR_TAB
3433 AS1 (clr,%A0));
3434 else
3435 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3436 AS2 (mov,%D0,%B1) CR_TAB
3437 AS1 (clr,%B0) CR_TAB
3438 AS1 (clr,%A0));
3441 case 24:
3442 *len = 4;
3443 return (AS2 (mov,%D0,%A1) CR_TAB
3444 AS1 (clr,%C0) CR_TAB
3445 AS1 (clr,%B0) CR_TAB
3446 AS1 (clr,%A0));
3448 case 31:
3449 *len = 6;
3450 return (AS1 (clr,%D0) CR_TAB
3451 AS1 (lsr,%A0) CR_TAB
3452 AS1 (ror,%D0) CR_TAB
3453 AS1 (clr,%C0) CR_TAB
3454 AS1 (clr,%B0) CR_TAB
3455 AS1 (clr,%A0));
3457 len = t;
3459 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3460 AS1 (rol,%B0) CR_TAB
3461 AS1 (rol,%C0) CR_TAB
3462 AS1 (rol,%D0)),
3463 insn, operands, len, 4);
3464 return "";
3467 /* 8bit arithmetic shift right ((signed char)x >> i) */
3469 const char *
3470 ashrqi3_out (rtx insn, rtx operands[], int *len)
3472 if (GET_CODE (operands[2]) == CONST_INT)
3474 int k;
3476 if (!len)
3477 len = &k;
3479 switch (INTVAL (operands[2]))
3481 case 1:
3482 *len = 1;
3483 return AS1 (asr,%0);
3485 case 2:
3486 *len = 2;
3487 return (AS1 (asr,%0) CR_TAB
3488 AS1 (asr,%0));
3490 case 3:
3491 *len = 3;
3492 return (AS1 (asr,%0) CR_TAB
3493 AS1 (asr,%0) CR_TAB
3494 AS1 (asr,%0));
3496 case 4:
3497 *len = 4;
3498 return (AS1 (asr,%0) CR_TAB
3499 AS1 (asr,%0) CR_TAB
3500 AS1 (asr,%0) CR_TAB
3501 AS1 (asr,%0));
3503 case 5:
3504 *len = 5;
3505 return (AS1 (asr,%0) CR_TAB
3506 AS1 (asr,%0) CR_TAB
3507 AS1 (asr,%0) CR_TAB
3508 AS1 (asr,%0) CR_TAB
3509 AS1 (asr,%0));
3511 case 6:
3512 *len = 4;
3513 return (AS2 (bst,%0,6) CR_TAB
3514 AS1 (lsl,%0) CR_TAB
3515 AS2 (sbc,%0,%0) CR_TAB
3516 AS2 (bld,%0,0));
3518 default:
3519 if (INTVAL (operands[2]) < 8)
3520 break;
3522 /* fall through */
3524 case 7:
3525 *len = 2;
3526 return (AS1 (lsl,%0) CR_TAB
3527 AS2 (sbc,%0,%0));
3530 else if (CONSTANT_P (operands[2]))
3531 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3533 out_shift_with_cnt (AS1 (asr,%0),
3534 insn, operands, len, 1);
3535 return "";
3539 /* 16bit arithmetic shift right ((signed short)x >> i) */
3541 const char *
3542 ashrhi3_out (rtx insn, rtx operands[], int *len)
3544 if (GET_CODE (operands[2]) == CONST_INT)
3546 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3547 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3548 int k;
3549 int *t = len;
3551 if (!len)
3552 len = &k;
3554 switch (INTVAL (operands[2]))
3556 case 4:
3557 case 5:
3558 /* XXX try to optimize this too? */
3559 break;
3561 case 6:
3562 if (optimize_size)
3563 break; /* scratch ? 5 : 6 */
3564 *len = 8;
3565 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3566 AS2 (mov,%A0,%B0) CR_TAB
3567 AS1 (lsl,__tmp_reg__) CR_TAB
3568 AS1 (rol,%A0) CR_TAB
3569 AS2 (sbc,%B0,%B0) CR_TAB
3570 AS1 (lsl,__tmp_reg__) CR_TAB
3571 AS1 (rol,%A0) CR_TAB
3572 AS1 (rol,%B0));
3574 case 7:
3575 *len = 4;
3576 return (AS1 (lsl,%A0) CR_TAB
3577 AS2 (mov,%A0,%B0) CR_TAB
3578 AS1 (rol,%A0) CR_TAB
3579 AS2 (sbc,%B0,%B0));
3581 case 8:
3583 int reg0 = true_regnum (operands[0]);
3584 int reg1 = true_regnum (operands[1]);
3586 if (reg0 == reg1)
3587 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3588 AS1 (lsl,%B0) CR_TAB
3589 AS2 (sbc,%B0,%B0));
3590 else
3591 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3592 AS1 (clr,%B0) CR_TAB
3593 AS2 (sbrc,%A0,7) CR_TAB
3594 AS1 (dec,%B0));
3597 case 9:
3598 *len = 4;
3599 return (AS2 (mov,%A0,%B0) CR_TAB
3600 AS1 (lsl,%B0) CR_TAB
3601 AS2 (sbc,%B0,%B0) CR_TAB
3602 AS1 (asr,%A0));
3604 case 10:
3605 *len = 5;
3606 return (AS2 (mov,%A0,%B0) CR_TAB
3607 AS1 (lsl,%B0) CR_TAB
3608 AS2 (sbc,%B0,%B0) CR_TAB
3609 AS1 (asr,%A0) CR_TAB
3610 AS1 (asr,%A0));
3612 case 11:
3613 if (AVR_HAVE_MUL && ldi_ok)
3615 *len = 5;
3616 return (AS2 (ldi,%A0,0x20) CR_TAB
3617 AS2 (muls,%B0,%A0) CR_TAB
3618 AS2 (mov,%A0,r1) CR_TAB
3619 AS2 (sbc,%B0,%B0) CR_TAB
3620 AS1 (clr,__zero_reg__));
3622 if (optimize_size && scratch)
3623 break; /* 5 */
3624 *len = 6;
3625 return (AS2 (mov,%A0,%B0) CR_TAB
3626 AS1 (lsl,%B0) CR_TAB
3627 AS2 (sbc,%B0,%B0) CR_TAB
3628 AS1 (asr,%A0) CR_TAB
3629 AS1 (asr,%A0) CR_TAB
3630 AS1 (asr,%A0));
3632 case 12:
3633 if (AVR_HAVE_MUL && ldi_ok)
3635 *len = 5;
3636 return (AS2 (ldi,%A0,0x10) CR_TAB
3637 AS2 (muls,%B0,%A0) CR_TAB
3638 AS2 (mov,%A0,r1) CR_TAB
3639 AS2 (sbc,%B0,%B0) CR_TAB
3640 AS1 (clr,__zero_reg__));
3642 if (optimize_size && scratch)
3643 break; /* 5 */
3644 *len = 7;
3645 return (AS2 (mov,%A0,%B0) CR_TAB
3646 AS1 (lsl,%B0) CR_TAB
3647 AS2 (sbc,%B0,%B0) CR_TAB
3648 AS1 (asr,%A0) CR_TAB
3649 AS1 (asr,%A0) CR_TAB
3650 AS1 (asr,%A0) CR_TAB
3651 AS1 (asr,%A0));
3653 case 13:
3654 if (AVR_HAVE_MUL && ldi_ok)
3656 *len = 5;
3657 return (AS2 (ldi,%A0,0x08) CR_TAB
3658 AS2 (muls,%B0,%A0) CR_TAB
3659 AS2 (mov,%A0,r1) CR_TAB
3660 AS2 (sbc,%B0,%B0) CR_TAB
3661 AS1 (clr,__zero_reg__));
3663 if (optimize_size)
3664 break; /* scratch ? 5 : 7 */
3665 *len = 8;
3666 return (AS2 (mov,%A0,%B0) CR_TAB
3667 AS1 (lsl,%B0) CR_TAB
3668 AS2 (sbc,%B0,%B0) CR_TAB
3669 AS1 (asr,%A0) CR_TAB
3670 AS1 (asr,%A0) CR_TAB
3671 AS1 (asr,%A0) CR_TAB
3672 AS1 (asr,%A0) CR_TAB
3673 AS1 (asr,%A0));
3675 case 14:
3676 *len = 5;
3677 return (AS1 (lsl,%B0) CR_TAB
3678 AS2 (sbc,%A0,%A0) CR_TAB
3679 AS1 (lsl,%B0) CR_TAB
3680 AS2 (mov,%B0,%A0) CR_TAB
3681 AS1 (rol,%A0));
3683 default:
3684 if (INTVAL (operands[2]) < 16)
3685 break;
3687 /* fall through */
3689 case 15:
3690 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3691 AS2 (sbc,%A0,%A0) CR_TAB
3692 AS2 (mov,%B0,%A0));
3694 len = t;
3696 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3697 AS1 (ror,%A0)),
3698 insn, operands, len, 2);
3699 return "";
3703 /* 32bit arithmetic shift right ((signed long)x >> i) */
3705 const char *
3706 ashrsi3_out (rtx insn, rtx operands[], int *len)
3708 if (GET_CODE (operands[2]) == CONST_INT)
3710 int k;
3711 int *t = len;
3713 if (!len)
3714 len = &k;
3716 switch (INTVAL (operands[2]))
3718 case 8:
3720 int reg0 = true_regnum (operands[0]);
3721 int reg1 = true_regnum (operands[1]);
3722 *len=6;
3723 if (reg0 <= reg1)
3724 return (AS2 (mov,%A0,%B1) CR_TAB
3725 AS2 (mov,%B0,%C1) CR_TAB
3726 AS2 (mov,%C0,%D1) CR_TAB
3727 AS1 (clr,%D0) CR_TAB
3728 AS2 (sbrc,%C0,7) CR_TAB
3729 AS1 (dec,%D0));
3730 else
3731 return (AS1 (clr,%D0) CR_TAB
3732 AS2 (sbrc,%D1,7) CR_TAB
3733 AS1 (dec,%D0) CR_TAB
3734 AS2 (mov,%C0,%D1) CR_TAB
3735 AS2 (mov,%B0,%C1) CR_TAB
3736 AS2 (mov,%A0,%B1));
3739 case 16:
3741 int reg0 = true_regnum (operands[0]);
3742 int reg1 = true_regnum (operands[1]);
3744 if (reg0 == reg1 + 2)
3745 return *len = 4, (AS1 (clr,%D0) CR_TAB
3746 AS2 (sbrc,%B0,7) CR_TAB
3747 AS1 (com,%D0) CR_TAB
3748 AS2 (mov,%C0,%D0));
3749 if (AVR_HAVE_MOVW)
3750 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3751 AS1 (clr,%D0) CR_TAB
3752 AS2 (sbrc,%B0,7) CR_TAB
3753 AS1 (com,%D0) CR_TAB
3754 AS2 (mov,%C0,%D0));
3755 else
3756 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3757 AS2 (mov,%A0,%C1) CR_TAB
3758 AS1 (clr,%D0) CR_TAB
3759 AS2 (sbrc,%B0,7) CR_TAB
3760 AS1 (com,%D0) CR_TAB
3761 AS2 (mov,%C0,%D0));
3764 case 24:
3765 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3766 AS1 (clr,%D0) CR_TAB
3767 AS2 (sbrc,%A0,7) CR_TAB
3768 AS1 (com,%D0) CR_TAB
3769 AS2 (mov,%B0,%D0) CR_TAB
3770 AS2 (mov,%C0,%D0));
3772 default:
3773 if (INTVAL (operands[2]) < 32)
3774 break;
3776 /* fall through */
3778 case 31:
3779 if (AVR_HAVE_MOVW)
3780 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3781 AS2 (sbc,%A0,%A0) CR_TAB
3782 AS2 (mov,%B0,%A0) CR_TAB
3783 AS2 (movw,%C0,%A0));
3784 else
3785 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3786 AS2 (sbc,%A0,%A0) CR_TAB
3787 AS2 (mov,%B0,%A0) CR_TAB
3788 AS2 (mov,%C0,%A0) CR_TAB
3789 AS2 (mov,%D0,%A0));
3791 len = t;
3793 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3794 AS1 (ror,%C0) CR_TAB
3795 AS1 (ror,%B0) CR_TAB
3796 AS1 (ror,%A0)),
3797 insn, operands, len, 4);
3798 return "";
3801 /* 8bit logic shift right ((unsigned char)x >> i) */
3803 const char *
3804 lshrqi3_out (rtx insn, rtx operands[], int *len)
3806 if (GET_CODE (operands[2]) == CONST_INT)
3808 int k;
3810 if (!len)
3811 len = &k;
3813 switch (INTVAL (operands[2]))
3815 default:
3816 if (INTVAL (operands[2]) < 8)
3817 break;
3819 *len = 1;
3820 return AS1 (clr,%0);
3822 case 1:
3823 *len = 1;
3824 return AS1 (lsr,%0);
3826 case 2:
3827 *len = 2;
3828 return (AS1 (lsr,%0) CR_TAB
3829 AS1 (lsr,%0));
3830 case 3:
3831 *len = 3;
3832 return (AS1 (lsr,%0) CR_TAB
3833 AS1 (lsr,%0) CR_TAB
3834 AS1 (lsr,%0));
3836 case 4:
3837 if (test_hard_reg_class (LD_REGS, operands[0]))
3839 *len=2;
3840 return (AS1 (swap,%0) CR_TAB
3841 AS2 (andi,%0,0x0f));
3843 *len = 4;
3844 return (AS1 (lsr,%0) CR_TAB
3845 AS1 (lsr,%0) CR_TAB
3846 AS1 (lsr,%0) CR_TAB
3847 AS1 (lsr,%0));
3849 case 5:
3850 if (test_hard_reg_class (LD_REGS, operands[0]))
3852 *len = 3;
3853 return (AS1 (swap,%0) CR_TAB
3854 AS1 (lsr,%0) CR_TAB
3855 AS2 (andi,%0,0x7));
3857 *len = 5;
3858 return (AS1 (lsr,%0) CR_TAB
3859 AS1 (lsr,%0) CR_TAB
3860 AS1 (lsr,%0) CR_TAB
3861 AS1 (lsr,%0) CR_TAB
3862 AS1 (lsr,%0));
3864 case 6:
3865 if (test_hard_reg_class (LD_REGS, operands[0]))
3867 *len = 4;
3868 return (AS1 (swap,%0) CR_TAB
3869 AS1 (lsr,%0) CR_TAB
3870 AS1 (lsr,%0) CR_TAB
3871 AS2 (andi,%0,0x3));
3873 *len = 6;
3874 return (AS1 (lsr,%0) CR_TAB
3875 AS1 (lsr,%0) CR_TAB
3876 AS1 (lsr,%0) CR_TAB
3877 AS1 (lsr,%0) CR_TAB
3878 AS1 (lsr,%0) CR_TAB
3879 AS1 (lsr,%0));
3881 case 7:
3882 *len = 3;
3883 return (AS1 (rol,%0) CR_TAB
3884 AS1 (clr,%0) CR_TAB
3885 AS1 (rol,%0));
3888 else if (CONSTANT_P (operands[2]))
3889 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3891 out_shift_with_cnt (AS1 (lsr,%0),
3892 insn, operands, len, 1);
3893 return "";
3896 /* 16bit logic shift right ((unsigned short)x >> i) */
3898 const char *
3899 lshrhi3_out (rtx insn, rtx operands[], int *len)
3901 if (GET_CODE (operands[2]) == CONST_INT)
3903 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3904 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3905 int k;
3906 int *t = len;
3908 if (!len)
3909 len = &k;
3911 switch (INTVAL (operands[2]))
3913 default:
3914 if (INTVAL (operands[2]) < 16)
3915 break;
3917 *len = 2;
3918 return (AS1 (clr,%B0) CR_TAB
3919 AS1 (clr,%A0));
3921 case 4:
3922 if (optimize_size && scratch)
3923 break; /* 5 */
3924 if (ldi_ok)
3926 *len = 6;
3927 return (AS1 (swap,%B0) CR_TAB
3928 AS1 (swap,%A0) CR_TAB
3929 AS2 (andi,%A0,0x0f) CR_TAB
3930 AS2 (eor,%A0,%B0) CR_TAB
3931 AS2 (andi,%B0,0x0f) CR_TAB
3932 AS2 (eor,%A0,%B0));
3934 if (scratch)
3936 *len = 7;
3937 return (AS1 (swap,%B0) CR_TAB
3938 AS1 (swap,%A0) CR_TAB
3939 AS2 (ldi,%3,0x0f) CR_TAB
3940 "and %A0,%3" CR_TAB
3941 AS2 (eor,%A0,%B0) CR_TAB
3942 "and %B0,%3" CR_TAB
3943 AS2 (eor,%A0,%B0));
3945 break; /* optimize_size ? 6 : 8 */
3947 case 5:
3948 if (optimize_size)
3949 break; /* scratch ? 5 : 6 */
3950 if (ldi_ok)
3952 *len = 8;
3953 return (AS1 (lsr,%B0) CR_TAB
3954 AS1 (ror,%A0) CR_TAB
3955 AS1 (swap,%B0) CR_TAB
3956 AS1 (swap,%A0) CR_TAB
3957 AS2 (andi,%A0,0x0f) CR_TAB
3958 AS2 (eor,%A0,%B0) CR_TAB
3959 AS2 (andi,%B0,0x0f) CR_TAB
3960 AS2 (eor,%A0,%B0));
3962 if (scratch)
3964 *len = 9;
3965 return (AS1 (lsr,%B0) CR_TAB
3966 AS1 (ror,%A0) CR_TAB
3967 AS1 (swap,%B0) CR_TAB
3968 AS1 (swap,%A0) CR_TAB
3969 AS2 (ldi,%3,0x0f) CR_TAB
3970 "and %A0,%3" CR_TAB
3971 AS2 (eor,%A0,%B0) CR_TAB
3972 "and %B0,%3" CR_TAB
3973 AS2 (eor,%A0,%B0));
3975 break; /* 10 */
3977 case 6:
3978 if (optimize_size)
3979 break; /* scratch ? 5 : 6 */
3980 *len = 9;
3981 return (AS1 (clr,__tmp_reg__) CR_TAB
3982 AS1 (lsl,%A0) CR_TAB
3983 AS1 (rol,%B0) CR_TAB
3984 AS1 (rol,__tmp_reg__) CR_TAB
3985 AS1 (lsl,%A0) CR_TAB
3986 AS1 (rol,%B0) CR_TAB
3987 AS1 (rol,__tmp_reg__) CR_TAB
3988 AS2 (mov,%A0,%B0) CR_TAB
3989 AS2 (mov,%B0,__tmp_reg__));
3991 case 7:
3992 *len = 5;
3993 return (AS1 (lsl,%A0) CR_TAB
3994 AS2 (mov,%A0,%B0) CR_TAB
3995 AS1 (rol,%A0) CR_TAB
3996 AS2 (sbc,%B0,%B0) CR_TAB
3997 AS1 (neg,%B0));
3999 case 8:
4000 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4001 AS1 (clr,%B0));
4003 case 9:
4004 *len = 3;
4005 return (AS2 (mov,%A0,%B0) CR_TAB
4006 AS1 (clr,%B0) CR_TAB
4007 AS1 (lsr,%A0));
4009 case 10:
4010 *len = 4;
4011 return (AS2 (mov,%A0,%B0) CR_TAB
4012 AS1 (clr,%B0) CR_TAB
4013 AS1 (lsr,%A0) CR_TAB
4014 AS1 (lsr,%A0));
4016 case 11:
4017 *len = 5;
4018 return (AS2 (mov,%A0,%B0) CR_TAB
4019 AS1 (clr,%B0) CR_TAB
4020 AS1 (lsr,%A0) CR_TAB
4021 AS1 (lsr,%A0) CR_TAB
4022 AS1 (lsr,%A0));
4024 case 12:
4025 if (ldi_ok)
4027 *len = 4;
4028 return (AS2 (mov,%A0,%B0) CR_TAB
4029 AS1 (clr,%B0) CR_TAB
4030 AS1 (swap,%A0) CR_TAB
4031 AS2 (andi,%A0,0x0f));
4033 if (scratch)
4035 *len = 5;
4036 return (AS2 (mov,%A0,%B0) CR_TAB
4037 AS1 (clr,%B0) CR_TAB
4038 AS1 (swap,%A0) CR_TAB
4039 AS2 (ldi,%3,0x0f) CR_TAB
4040 "and %A0,%3");
4042 *len = 6;
4043 return (AS2 (mov,%A0,%B0) CR_TAB
4044 AS1 (clr,%B0) CR_TAB
4045 AS1 (lsr,%A0) CR_TAB
4046 AS1 (lsr,%A0) CR_TAB
4047 AS1 (lsr,%A0) CR_TAB
4048 AS1 (lsr,%A0));
4050 case 13:
4051 if (ldi_ok)
4053 *len = 5;
4054 return (AS2 (mov,%A0,%B0) CR_TAB
4055 AS1 (clr,%B0) CR_TAB
4056 AS1 (swap,%A0) CR_TAB
4057 AS1 (lsr,%A0) CR_TAB
4058 AS2 (andi,%A0,0x07));
4060 if (AVR_HAVE_MUL && scratch)
4062 *len = 5;
4063 return (AS2 (ldi,%3,0x08) CR_TAB
4064 AS2 (mul,%B0,%3) CR_TAB
4065 AS2 (mov,%A0,r1) CR_TAB
4066 AS1 (clr,%B0) CR_TAB
4067 AS1 (clr,__zero_reg__));
4069 if (optimize_size && scratch)
4070 break; /* 5 */
4071 if (scratch)
4073 *len = 6;
4074 return (AS2 (mov,%A0,%B0) CR_TAB
4075 AS1 (clr,%B0) CR_TAB
4076 AS1 (swap,%A0) CR_TAB
4077 AS1 (lsr,%A0) CR_TAB
4078 AS2 (ldi,%3,0x07) CR_TAB
4079 "and %A0,%3");
4081 if (AVR_HAVE_MUL)
4083 *len = 6;
4084 return ("set" CR_TAB
4085 AS2 (bld,r1,3) CR_TAB
4086 AS2 (mul,%B0,r1) CR_TAB
4087 AS2 (mov,%A0,r1) CR_TAB
4088 AS1 (clr,%B0) CR_TAB
4089 AS1 (clr,__zero_reg__));
4091 *len = 7;
4092 return (AS2 (mov,%A0,%B0) CR_TAB
4093 AS1 (clr,%B0) CR_TAB
4094 AS1 (lsr,%A0) CR_TAB
4095 AS1 (lsr,%A0) CR_TAB
4096 AS1 (lsr,%A0) CR_TAB
4097 AS1 (lsr,%A0) CR_TAB
4098 AS1 (lsr,%A0));
4100 case 14:
4101 if (AVR_HAVE_MUL && ldi_ok)
4103 *len = 5;
4104 return (AS2 (ldi,%A0,0x04) CR_TAB
4105 AS2 (mul,%B0,%A0) CR_TAB
4106 AS2 (mov,%A0,r1) CR_TAB
4107 AS1 (clr,%B0) CR_TAB
4108 AS1 (clr,__zero_reg__));
4110 if (AVR_HAVE_MUL && scratch)
4112 *len = 5;
4113 return (AS2 (ldi,%3,0x04) CR_TAB
4114 AS2 (mul,%B0,%3) CR_TAB
4115 AS2 (mov,%A0,r1) CR_TAB
4116 AS1 (clr,%B0) CR_TAB
4117 AS1 (clr,__zero_reg__));
4119 if (optimize_size && ldi_ok)
4121 *len = 5;
4122 return (AS2 (mov,%A0,%B0) CR_TAB
4123 AS2 (ldi,%B0,6) "\n1:\t"
4124 AS1 (lsr,%A0) CR_TAB
4125 AS1 (dec,%B0) CR_TAB
4126 AS1 (brne,1b));
4128 if (optimize_size && scratch)
4129 break; /* 5 */
4130 *len = 6;
4131 return (AS1 (clr,%A0) CR_TAB
4132 AS1 (lsl,%B0) CR_TAB
4133 AS1 (rol,%A0) CR_TAB
4134 AS1 (lsl,%B0) CR_TAB
4135 AS1 (rol,%A0) CR_TAB
4136 AS1 (clr,%B0));
4138 case 15:
4139 *len = 4;
4140 return (AS1 (clr,%A0) CR_TAB
4141 AS1 (lsl,%B0) CR_TAB
4142 AS1 (rol,%A0) CR_TAB
4143 AS1 (clr,%B0));
4145 len = t;
4147 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4148 AS1 (ror,%A0)),
4149 insn, operands, len, 2);
4150 return "";
4153 /* 32bit logic shift right ((unsigned int)x >> i) */
4155 const char *
4156 lshrsi3_out (rtx insn, rtx operands[], int *len)
4158 if (GET_CODE (operands[2]) == CONST_INT)
4160 int k;
4161 int *t = len;
4163 if (!len)
4164 len = &k;
4166 switch (INTVAL (operands[2]))
4168 default:
4169 if (INTVAL (operands[2]) < 32)
4170 break;
4172 if (AVR_HAVE_MOVW)
4173 return *len = 3, (AS1 (clr,%D0) CR_TAB
4174 AS1 (clr,%C0) CR_TAB
4175 AS2 (movw,%A0,%C0));
4176 *len = 4;
4177 return (AS1 (clr,%D0) CR_TAB
4178 AS1 (clr,%C0) CR_TAB
4179 AS1 (clr,%B0) CR_TAB
4180 AS1 (clr,%A0));
4182 case 8:
4184 int reg0 = true_regnum (operands[0]);
4185 int reg1 = true_regnum (operands[1]);
4186 *len = 4;
4187 if (reg0 <= reg1)
4188 return (AS2 (mov,%A0,%B1) CR_TAB
4189 AS2 (mov,%B0,%C1) CR_TAB
4190 AS2 (mov,%C0,%D1) CR_TAB
4191 AS1 (clr,%D0));
4192 else
4193 return (AS1 (clr,%D0) CR_TAB
4194 AS2 (mov,%C0,%D1) CR_TAB
4195 AS2 (mov,%B0,%C1) CR_TAB
4196 AS2 (mov,%A0,%B1));
4199 case 16:
4201 int reg0 = true_regnum (operands[0]);
4202 int reg1 = true_regnum (operands[1]);
4204 if (reg0 == reg1 + 2)
4205 return *len = 2, (AS1 (clr,%C0) CR_TAB
4206 AS1 (clr,%D0));
4207 if (AVR_HAVE_MOVW)
4208 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4209 AS1 (clr,%C0) CR_TAB
4210 AS1 (clr,%D0));
4211 else
4212 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4213 AS2 (mov,%A0,%C1) CR_TAB
4214 AS1 (clr,%C0) CR_TAB
4215 AS1 (clr,%D0));
4218 case 24:
4219 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4220 AS1 (clr,%B0) CR_TAB
4221 AS1 (clr,%C0) CR_TAB
4222 AS1 (clr,%D0));
4224 case 31:
4225 *len = 6;
4226 return (AS1 (clr,%A0) CR_TAB
4227 AS2 (sbrc,%D0,7) CR_TAB
4228 AS1 (inc,%A0) CR_TAB
4229 AS1 (clr,%B0) CR_TAB
4230 AS1 (clr,%C0) CR_TAB
4231 AS1 (clr,%D0));
4233 len = t;
4235 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4236 AS1 (ror,%C0) CR_TAB
4237 AS1 (ror,%B0) CR_TAB
4238 AS1 (ror,%A0)),
4239 insn, operands, len, 4);
4240 return "";
4243 /* Create RTL split patterns for byte sized rotate expressions. This
4244 produces a series of move instructions and considers overlap situations.
4245 Overlapping non-HImode operands need a scratch register. */
4247 bool
4248 avr_rotate_bytes (rtx operands[])
4250 int i, j;
4251 enum machine_mode mode = GET_MODE (operands[0]);
4252 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
4253 bool same_reg = rtx_equal_p (operands[0], operands[1]);
4254 int num = INTVAL (operands[2]);
4255 rtx scratch = operands[3];
4256 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
4257 Word move if no scratch is needed, otherwise use size of scratch. */
4258 enum machine_mode move_mode = QImode;
4259 if (num & 0xf)
4260 move_mode = QImode;
4261 else if ((mode == SImode && !same_reg) || !overlapped)
4262 move_mode = HImode;
4263 else
4264 move_mode = GET_MODE (scratch);
4266 /* Force DI rotate to use QI moves since other DI moves are currently split
4267 into QI moves so forward propagation works better. */
4268 if (mode == DImode)
4269 move_mode = QImode;
4270 /* Make scratch smaller if needed. */
4271 if (GET_MODE (scratch) == HImode && move_mode == QImode)
4272 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
4274 int move_size = GET_MODE_SIZE (move_mode);
4275 /* Number of bytes/words to rotate. */
4276 int offset = (num >> 3) / move_size;
4277 /* Number of moves needed. */
4278 int size = GET_MODE_SIZE (mode) / move_size;
4279 /* Himode byte swap is special case to avoid a scratch register. */
4280 if (mode == HImode && same_reg)
4282 /* HImode byte swap, using xor. This is as quick as using scratch. */
4283 rtx src, dst;
4284 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
4285 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
4286 if (!rtx_equal_p (dst, src))
4288 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4289 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
4290 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4293 else
4295 /* Create linked list of moves to determine move order. */
4296 struct {
4297 rtx src, dst;
4298 int links;
4299 } move[size + 8];
4301 /* Generate list of subreg moves. */
4302 for (i = 0; i < size; i++)
4304 int from = i;
4305 int to = (from + offset) % size;
4306 move[i].src = simplify_gen_subreg (move_mode, operands[1],
4307 mode, from * move_size);
4308 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
4309 mode, to * move_size);
4310 move[i].links = -1;
4312 /* Mark dependence where a dst of one move is the src of another move.
4313 The first move is a conflict as it must wait until second is
4314 performed. We ignore moves to self - we catch this later. */
4315 if (overlapped)
4316 for (i = 0; i < size; i++)
4317 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
4318 for (j = 0; j < size; j++)
4319 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
4321 /* The dst of move i is the src of move j. */
4322 move[i].links = j;
4323 break;
4326 int blocked = -1;
4327 int moves = 0;
4328 /* Go through move list and perform non-conflicting moves. As each
4329 non-overlapping move is made, it may remove other conflicts
4330 so the process is repeated until no conflicts remain. */
4333 blocked = -1;
4334 moves = 0;
4335 /* Emit move where dst is not also a src or we have used that
4336 src already. */
4337 for (i = 0; i < size; i++)
4338 if (move[i].src != NULL_RTX)
4339 if (move[i].links == -1 || move[move[i].links].src == NULL_RTX)
4341 moves++;
4342 /* Ignore NOP moves to self. */
4343 if (!rtx_equal_p (move[i].dst, move[i].src))
4344 emit_move_insn (move[i].dst, move[i].src);
4346 /* Remove conflict from list. */
4347 move[i].src = NULL_RTX;
4349 else
4350 blocked = i;
4352 /* Check for deadlock. This is when no moves occurred and we have
4353 at least one blocked move. */
4354 if (moves == 0 && blocked != -1)
4356 /* Need to use scratch register to break deadlock.
4357 Add move to put dst of blocked move into scratch.
4358 When this move occurs, it will break chain deadlock.
4359 The scratch register is substituted for real move. */
4361 move[size].src = move[blocked].dst;
4362 move[size].dst = scratch;
4363 /* Scratch move is never blocked. */
4364 move[size].links = -1;
4365 /* Make sure we have valid link. */
4366 gcc_assert (move[blocked].links != -1);
4367 /* Replace src of blocking move with scratch reg. */
4368 move[move[blocked].links].src = scratch;
4369 /* Make dependent on scratch move occuring. */
4370 move[blocked].links = size;
4371 size=size+1;
4374 while (blocked != -1);
4376 return true;
4379 /* Modifies the length assigned to instruction INSN
4380 LEN is the initially computed length of the insn. */
4383 adjust_insn_length (rtx insn, int len)
4385 rtx patt = PATTERN (insn);
4386 rtx set;
4388 if (GET_CODE (patt) == SET)
4390 rtx op[10];
4391 op[1] = SET_SRC (patt);
4392 op[0] = SET_DEST (patt);
4393 if (general_operand (op[1], VOIDmode)
4394 && general_operand (op[0], VOIDmode))
4396 switch (GET_MODE (op[0]))
4398 case QImode:
4399 output_movqi (insn, op, &len);
4400 break;
4401 case HImode:
4402 output_movhi (insn, op, &len);
4403 break;
4404 case SImode:
4405 case SFmode:
4406 output_movsisf (insn, op, &len);
4407 break;
4408 default:
4409 break;
4412 else if (op[0] == cc0_rtx && REG_P (op[1]))
4414 switch (GET_MODE (op[1]))
4416 case HImode: out_tsthi (insn, op[1], &len); break;
4417 case SImode: out_tstsi (insn, op[1], &len); break;
4418 default: break;
4421 else if (GET_CODE (op[1]) == AND)
4423 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4425 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4426 if (GET_MODE (op[1]) == SImode)
4427 len = (((mask & 0xff) != 0xff)
4428 + ((mask & 0xff00) != 0xff00)
4429 + ((mask & 0xff0000L) != 0xff0000L)
4430 + ((mask & 0xff000000L) != 0xff000000L));
4431 else if (GET_MODE (op[1]) == HImode)
4432 len = (((mask & 0xff) != 0xff)
4433 + ((mask & 0xff00) != 0xff00));
4436 else if (GET_CODE (op[1]) == IOR)
4438 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4440 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4441 if (GET_MODE (op[1]) == SImode)
4442 len = (((mask & 0xff) != 0)
4443 + ((mask & 0xff00) != 0)
4444 + ((mask & 0xff0000L) != 0)
4445 + ((mask & 0xff000000L) != 0));
4446 else if (GET_MODE (op[1]) == HImode)
4447 len = (((mask & 0xff) != 0)
4448 + ((mask & 0xff00) != 0));
4452 set = single_set (insn);
4453 if (set)
4455 rtx op[10];
4457 op[1] = SET_SRC (set);
4458 op[0] = SET_DEST (set);
4460 if (GET_CODE (patt) == PARALLEL
4461 && general_operand (op[1], VOIDmode)
4462 && general_operand (op[0], VOIDmode))
4464 if (XVECLEN (patt, 0) == 2)
4465 op[2] = XVECEXP (patt, 0, 1);
4467 switch (GET_MODE (op[0]))
4469 case QImode:
4470 len = 2;
4471 break;
4472 case HImode:
4473 output_reload_inhi (insn, op, &len);
4474 break;
4475 case SImode:
4476 case SFmode:
4477 output_reload_insisf (insn, op, &len);
4478 break;
4479 default:
4480 break;
4483 else if (GET_CODE (op[1]) == ASHIFT
4484 || GET_CODE (op[1]) == ASHIFTRT
4485 || GET_CODE (op[1]) == LSHIFTRT)
4487 rtx ops[10];
4488 ops[0] = op[0];
4489 ops[1] = XEXP (op[1],0);
4490 ops[2] = XEXP (op[1],1);
4491 switch (GET_CODE (op[1]))
4493 case ASHIFT:
4494 switch (GET_MODE (op[0]))
4496 case QImode: ashlqi3_out (insn,ops,&len); break;
4497 case HImode: ashlhi3_out (insn,ops,&len); break;
4498 case SImode: ashlsi3_out (insn,ops,&len); break;
4499 default: break;
4501 break;
4502 case ASHIFTRT:
4503 switch (GET_MODE (op[0]))
4505 case QImode: ashrqi3_out (insn,ops,&len); break;
4506 case HImode: ashrhi3_out (insn,ops,&len); break;
4507 case SImode: ashrsi3_out (insn,ops,&len); break;
4508 default: break;
4510 break;
4511 case LSHIFTRT:
4512 switch (GET_MODE (op[0]))
4514 case QImode: lshrqi3_out (insn,ops,&len); break;
4515 case HImode: lshrhi3_out (insn,ops,&len); break;
4516 case SImode: lshrsi3_out (insn,ops,&len); break;
4517 default: break;
4519 break;
4520 default:
4521 break;
4525 return len;
4528 /* Return nonzero if register REG dead after INSN. */
4531 reg_unused_after (rtx insn, rtx reg)
4533 return (dead_or_set_p (insn, reg)
4534 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4537 /* Return nonzero if REG is not used after INSN.
4538 We assume REG is a reload reg, and therefore does
4539 not live past labels. It may live past calls or jumps though. */
4542 _reg_unused_after (rtx insn, rtx reg)
4544 enum rtx_code code;
4545 rtx set;
4547 /* If the reg is set by this instruction, then it is safe for our
4548 case. Disregard the case where this is a store to memory, since
4549 we are checking a register used in the store address. */
4550 set = single_set (insn);
4551 if (set && GET_CODE (SET_DEST (set)) != MEM
4552 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4553 return 1;
4555 while ((insn = NEXT_INSN (insn)))
4557 rtx set;
4558 code = GET_CODE (insn);
4560 #if 0
4561 /* If this is a label that existed before reload, then the register
4562 if dead here. However, if this is a label added by reorg, then
4563 the register may still be live here. We can't tell the difference,
4564 so we just ignore labels completely. */
4565 if (code == CODE_LABEL)
4566 return 1;
4567 /* else */
4568 #endif
4570 if (!INSN_P (insn))
4571 continue;
4573 if (code == JUMP_INSN)
4574 return 0;
4576 /* If this is a sequence, we must handle them all at once.
4577 We could have for instance a call that sets the target register,
4578 and an insn in a delay slot that uses the register. In this case,
4579 we must return 0. */
4580 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4582 int i;
4583 int retval = 0;
4585 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4587 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4588 rtx set = single_set (this_insn);
4590 if (GET_CODE (this_insn) == CALL_INSN)
4591 code = CALL_INSN;
4592 else if (GET_CODE (this_insn) == JUMP_INSN)
4594 if (INSN_ANNULLED_BRANCH_P (this_insn))
4595 return 0;
4596 code = JUMP_INSN;
4599 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4600 return 0;
4601 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4603 if (GET_CODE (SET_DEST (set)) != MEM)
4604 retval = 1;
4605 else
4606 return 0;
4608 if (set == 0
4609 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4610 return 0;
4612 if (retval == 1)
4613 return 1;
4614 else if (code == JUMP_INSN)
4615 return 0;
4618 if (code == CALL_INSN)
4620 rtx tem;
4621 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4622 if (GET_CODE (XEXP (tem, 0)) == USE
4623 && REG_P (XEXP (XEXP (tem, 0), 0))
4624 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4625 return 0;
4626 if (call_used_regs[REGNO (reg)])
4627 return 1;
4630 set = single_set (insn);
4632 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4633 return 0;
4634 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4635 return GET_CODE (SET_DEST (set)) != MEM;
4636 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4637 return 0;
4639 return 1;
4642 /* Target hook for assembling integer objects. The AVR version needs
4643 special handling for references to certain labels. */
4645 static bool
4646 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4648 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4649 && text_segment_operand (x, VOIDmode) )
4651 fputs ("\t.word\tgs(", asm_out_file);
4652 output_addr_const (asm_out_file, x);
4653 fputs (")\n", asm_out_file);
4654 return true;
4656 return default_assemble_integer (x, size, aligned_p);
4659 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4661 void
4662 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
4665 /* If the function has the 'signal' or 'interrupt' attribute, test to
4666 make sure that the name of the function is "__vector_NN" so as to
4667 catch when the user misspells the interrupt vector name. */
4669 if (cfun->machine->is_interrupt)
4671 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4673 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4674 "%qs appears to be a misspelled interrupt handler",
4675 name);
4678 else if (cfun->machine->is_signal)
4680 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4682 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4683 "%qs appears to be a misspelled signal handler",
4684 name);
4688 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
4689 ASM_OUTPUT_LABEL (file, name);
4692 /* The routine used to output NUL terminated strings. We use a special
4693 version of this for most svr4 targets because doing so makes the
4694 generated assembly code more compact (and thus faster to assemble)
4695 as well as more readable, especially for targets like the i386
4696 (where the only alternative is to output character sequences as
4697 comma separated lists of numbers). */
4699 void
4700 gas_output_limited_string(FILE *file, const char *str)
4702 const unsigned char *_limited_str = (const unsigned char *) str;
4703 unsigned ch;
4704 fprintf (file, "%s\"", STRING_ASM_OP);
4705 for (; (ch = *_limited_str); _limited_str++)
4707 int escape;
4708 switch (escape = ESCAPES[ch])
4710 case 0:
4711 putc (ch, file);
4712 break;
4713 case 1:
4714 fprintf (file, "\\%03o", ch);
4715 break;
4716 default:
4717 putc ('\\', file);
4718 putc (escape, file);
4719 break;
4722 fprintf (file, "\"\n");
4725 /* The routine used to output sequences of byte values. We use a special
4726 version of this for most svr4 targets because doing so makes the
4727 generated assembly code more compact (and thus faster to assemble)
4728 as well as more readable. Note that if we find subparts of the
4729 character sequence which end with NUL (and which are shorter than
4730 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4732 void
4733 gas_output_ascii(FILE *file, const char *str, size_t length)
4735 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4736 const unsigned char *limit = _ascii_bytes + length;
4737 unsigned bytes_in_chunk = 0;
4738 for (; _ascii_bytes < limit; _ascii_bytes++)
4740 const unsigned char *p;
4741 if (bytes_in_chunk >= 60)
4743 fprintf (file, "\"\n");
4744 bytes_in_chunk = 0;
4746 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4747 continue;
4748 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4750 if (bytes_in_chunk > 0)
4752 fprintf (file, "\"\n");
4753 bytes_in_chunk = 0;
4755 gas_output_limited_string (file, (const char*)_ascii_bytes);
4756 _ascii_bytes = p;
4758 else
4760 int escape;
4761 unsigned ch;
4762 if (bytes_in_chunk == 0)
4763 fprintf (file, "\t.ascii\t\"");
4764 switch (escape = ESCAPES[ch = *_ascii_bytes])
4766 case 0:
4767 putc (ch, file);
4768 bytes_in_chunk++;
4769 break;
4770 case 1:
4771 fprintf (file, "\\%03o", ch);
4772 bytes_in_chunk += 4;
4773 break;
4774 default:
4775 putc ('\\', file);
4776 putc (escape, file);
4777 bytes_in_chunk += 2;
4778 break;
4782 if (bytes_in_chunk > 0)
4783 fprintf (file, "\"\n");
4786 /* Return value is nonzero if pseudos that have been
4787 assigned to registers of class CLASS would likely be spilled
4788 because registers of CLASS are needed for spill registers. */
4790 static bool
4791 avr_class_likely_spilled_p (reg_class_t c)
4793 return (c != ALL_REGS && c != ADDW_REGS);
4796 /* Valid attributes:
4797 progmem - put data to program memory;
4798 signal - make a function to be hardware interrupt. After function
4799 prologue interrupts are disabled;
4800 interrupt - make a function to be hardware interrupt. After function
4801 prologue interrupts are enabled;
4802 naked - don't generate function prologue/epilogue and `ret' command.
4804 Only `progmem' attribute valid for type. */
4806 /* Handle a "progmem" attribute; arguments as in
4807 struct attribute_spec.handler. */
4808 static tree
4809 avr_handle_progmem_attribute (tree *node, tree name,
4810 tree args ATTRIBUTE_UNUSED,
4811 int flags ATTRIBUTE_UNUSED,
4812 bool *no_add_attrs)
4814 if (DECL_P (*node))
4816 if (TREE_CODE (*node) == TYPE_DECL)
4818 /* This is really a decl attribute, not a type attribute,
4819 but try to handle it for GCC 3.0 backwards compatibility. */
4821 tree type = TREE_TYPE (*node);
4822 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4823 tree newtype = build_type_attribute_variant (type, attr);
4825 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4826 TREE_TYPE (*node) = newtype;
4827 *no_add_attrs = true;
4829 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4831 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4833 warning (0, "only initialized variables can be placed into "
4834 "program memory area");
4835 *no_add_attrs = true;
4838 else
4840 warning (OPT_Wattributes, "%qE attribute ignored",
4841 name);
4842 *no_add_attrs = true;
4846 return NULL_TREE;
4849 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4850 struct attribute_spec.handler. */
4852 static tree
4853 avr_handle_fndecl_attribute (tree *node, tree name,
4854 tree args ATTRIBUTE_UNUSED,
4855 int flags ATTRIBUTE_UNUSED,
4856 bool *no_add_attrs)
4858 if (TREE_CODE (*node) != FUNCTION_DECL)
4860 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4861 name);
4862 *no_add_attrs = true;
4865 return NULL_TREE;
4868 static tree
4869 avr_handle_fntype_attribute (tree *node, tree name,
4870 tree args ATTRIBUTE_UNUSED,
4871 int flags ATTRIBUTE_UNUSED,
4872 bool *no_add_attrs)
4874 if (TREE_CODE (*node) != FUNCTION_TYPE)
4876 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4877 name);
4878 *no_add_attrs = true;
4881 return NULL_TREE;
4884 /* Look for attribute `progmem' in DECL
4885 if found return 1, otherwise 0. */
4888 avr_progmem_p (tree decl, tree attributes)
4890 tree a;
4892 if (TREE_CODE (decl) != VAR_DECL)
4893 return 0;
4895 if (NULL_TREE
4896 != lookup_attribute ("progmem", attributes))
4897 return 1;
4899 a=decl;
4901 a = TREE_TYPE(a);
4902 while (TREE_CODE (a) == ARRAY_TYPE);
4904 if (a == error_mark_node)
4905 return 0;
4907 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4908 return 1;
4910 return 0;
4913 /* Add the section attribute if the variable is in progmem. */
4915 static void
4916 avr_insert_attributes (tree node, tree *attributes)
4918 if (TREE_CODE (node) == VAR_DECL
4919 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4920 && avr_progmem_p (node, *attributes))
4922 static const char dsec[] = ".progmem.data";
4923 *attributes = tree_cons (get_identifier ("section"),
4924 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4925 *attributes);
4927 /* ??? This seems sketchy. Why can't the user declare the
4928 thing const in the first place? */
4929 TREE_READONLY (node) = 1;
4933 /* A get_unnamed_section callback for switching to progmem_section. */
4935 static void
4936 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
4938 fprintf (asm_out_file,
4939 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4940 AVR_HAVE_JMP_CALL ? "a" : "ax");
4941 /* Should already be aligned, this is just to be safe if it isn't. */
4942 fprintf (asm_out_file, "\t.p2align 1\n");
4945 /* Implement TARGET_ASM_INIT_SECTIONS. */
4947 static void
4948 avr_asm_init_sections (void)
4950 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
4951 avr_output_progmem_section_asm_op,
4952 NULL);
4953 readonly_data_section = data_section;
4956 static unsigned int
4957 avr_section_type_flags (tree decl, const char *name, int reloc)
4959 unsigned int flags = default_section_type_flags (decl, name, reloc);
4961 if (strncmp (name, ".noinit", 7) == 0)
4963 if (decl && TREE_CODE (decl) == VAR_DECL
4964 && DECL_INITIAL (decl) == NULL_TREE)
4965 flags |= SECTION_BSS; /* @nobits */
4966 else
4967 warning (0, "only uninitialized variables can be placed in the "
4968 ".noinit section");
4971 return flags;
4974 /* Outputs some appropriate text to go at the start of an assembler
4975 file. */
4977 static void
4978 avr_file_start (void)
4980 if (avr_current_arch->asm_only)
4981 error ("MCU %qs supported for assembler only", avr_mcu_name);
4983 default_file_start ();
4985 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4986 fputs ("__SREG__ = 0x3f\n"
4987 "__SP_H__ = 0x3e\n"
4988 "__SP_L__ = 0x3d\n", asm_out_file);
4990 fputs ("__tmp_reg__ = 0\n"
4991 "__zero_reg__ = 1\n", asm_out_file);
4993 /* FIXME: output these only if there is anything in the .data / .bss
4994 sections - some code size could be saved by not linking in the
4995 initialization code from libgcc if one or both sections are empty. */
4996 fputs ("\t.global __do_copy_data\n", asm_out_file);
4997 fputs ("\t.global __do_clear_bss\n", asm_out_file);
5000 /* Outputs to the stdio stream FILE some
5001 appropriate text to go at the end of an assembler file. */
5003 static void
5004 avr_file_end (void)
5008 /* Choose the order in which to allocate hard registers for
5009 pseudo-registers local to a basic block.
5011 Store the desired register order in the array `reg_alloc_order'.
5012 Element 0 should be the register to allocate first; element 1, the
5013 next register; and so on. */
5015 void
5016 order_regs_for_local_alloc (void)
5018 unsigned int i;
5019 static const int order_0[] = {
5020 24,25,
5021 18,19,
5022 20,21,
5023 22,23,
5024 30,31,
5025 26,27,
5026 28,29,
5027 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5028 0,1,
5029 32,33,34,35
5031 static const int order_1[] = {
5032 18,19,
5033 20,21,
5034 22,23,
5035 24,25,
5036 30,31,
5037 26,27,
5038 28,29,
5039 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5040 0,1,
5041 32,33,34,35
5043 static const int order_2[] = {
5044 25,24,
5045 23,22,
5046 21,20,
5047 19,18,
5048 30,31,
5049 26,27,
5050 28,29,
5051 17,16,
5052 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5053 1,0,
5054 32,33,34,35
5057 const int *order = (TARGET_ORDER_1 ? order_1 :
5058 TARGET_ORDER_2 ? order_2 :
5059 order_0);
5060 for (i=0; i < ARRAY_SIZE (order_0); ++i)
5061 reg_alloc_order[i] = order[i];
5065 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5066 cost of an RTX operand given its context. X is the rtx of the
5067 operand, MODE is its mode, and OUTER is the rtx_code of this
5068 operand's parent operator. */
5070 static int
5071 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
5072 bool speed)
5074 enum rtx_code code = GET_CODE (x);
5075 int total;
5077 switch (code)
5079 case REG:
5080 case SUBREG:
5081 return 0;
5083 case CONST_INT:
5084 case CONST_DOUBLE:
5085 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5087 default:
5088 break;
5091 total = 0;
5092 avr_rtx_costs (x, code, outer, &total, speed);
5093 return total;
5096 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5097 is to be calculated. Return true if the complete cost has been
5098 computed, and false if subexpressions should be scanned. In either
5099 case, *TOTAL contains the cost result. */
5101 static bool
5102 avr_rtx_costs (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED, int *total,
5103 bool speed)
5105 enum rtx_code code = (enum rtx_code) codearg;
5106 enum machine_mode mode = GET_MODE (x);
5107 HOST_WIDE_INT val;
5109 switch (code)
5111 case CONST_INT:
5112 case CONST_DOUBLE:
5113 /* Immediate constants are as cheap as registers. */
5114 *total = 0;
5115 return true;
5117 case MEM:
5118 case CONST:
5119 case LABEL_REF:
5120 case SYMBOL_REF:
5121 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5122 return true;
5124 case NEG:
5125 switch (mode)
5127 case QImode:
5128 case SFmode:
5129 *total = COSTS_N_INSNS (1);
5130 break;
5132 case HImode:
5133 *total = COSTS_N_INSNS (3);
5134 break;
5136 case SImode:
5137 *total = COSTS_N_INSNS (7);
5138 break;
5140 default:
5141 return false;
5143 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5144 return true;
5146 case ABS:
5147 switch (mode)
5149 case QImode:
5150 case SFmode:
5151 *total = COSTS_N_INSNS (1);
5152 break;
5154 default:
5155 return false;
5157 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5158 return true;
5160 case NOT:
5161 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5162 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5163 return true;
5165 case ZERO_EXTEND:
5166 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5167 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5168 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5169 return true;
5171 case SIGN_EXTEND:
5172 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5173 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5174 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5175 return true;
5177 case PLUS:
5178 switch (mode)
5180 case QImode:
5181 *total = COSTS_N_INSNS (1);
5182 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5183 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5184 break;
5186 case HImode:
5187 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5189 *total = COSTS_N_INSNS (2);
5190 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5192 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5193 *total = COSTS_N_INSNS (1);
5194 else
5195 *total = COSTS_N_INSNS (2);
5196 break;
5198 case SImode:
5199 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5201 *total = COSTS_N_INSNS (4);
5202 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5204 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5205 *total = COSTS_N_INSNS (1);
5206 else
5207 *total = COSTS_N_INSNS (4);
5208 break;
5210 default:
5211 return false;
5213 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5214 return true;
5216 case MINUS:
5217 case AND:
5218 case IOR:
5219 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5220 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5221 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5222 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5223 return true;
5225 case XOR:
5226 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5227 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5228 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5229 return true;
5231 case MULT:
5232 switch (mode)
5234 case QImode:
5235 if (AVR_HAVE_MUL)
5236 *total = COSTS_N_INSNS (!speed ? 3 : 4);
5237 else if (!speed)
5238 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5239 else
5240 return false;
5241 break;
5243 case HImode:
5244 if (AVR_HAVE_MUL)
5245 *total = COSTS_N_INSNS (!speed ? 7 : 10);
5246 else if (!speed)
5247 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5248 else
5249 return false;
5250 break;
5252 default:
5253 return false;
5255 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5256 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5257 return true;
5259 case DIV:
5260 case MOD:
5261 case UDIV:
5262 case UMOD:
5263 if (!speed)
5264 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5265 else
5266 return false;
5267 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5268 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5269 return true;
5271 case ROTATE:
5272 switch (mode)
5274 case QImode:
5275 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
5276 *total = COSTS_N_INSNS (1);
5278 break;
5280 case HImode:
5281 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
5282 *total = COSTS_N_INSNS (3);
5284 break;
5286 case SImode:
5287 if (CONST_INT_P (XEXP (x, 1)))
5288 switch (INTVAL (XEXP (x, 1)))
5290 case 8:
5291 case 24:
5292 *total = COSTS_N_INSNS (5);
5293 break;
5294 case 16:
5295 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
5296 break;
5298 break;
5300 default:
5301 return false;
5303 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5304 return true;
5306 case ASHIFT:
5307 switch (mode)
5309 case QImode:
5310 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5312 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5313 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5315 else
5317 val = INTVAL (XEXP (x, 1));
5318 if (val == 7)
5319 *total = COSTS_N_INSNS (3);
5320 else if (val >= 0 && val <= 7)
5321 *total = COSTS_N_INSNS (val);
5322 else
5323 *total = COSTS_N_INSNS (1);
5325 break;
5327 case HImode:
5328 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5330 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5331 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5333 else
5334 switch (INTVAL (XEXP (x, 1)))
5336 case 0:
5337 *total = 0;
5338 break;
5339 case 1:
5340 case 8:
5341 *total = COSTS_N_INSNS (2);
5342 break;
5343 case 9:
5344 *total = COSTS_N_INSNS (3);
5345 break;
5346 case 2:
5347 case 3:
5348 case 10:
5349 case 15:
5350 *total = COSTS_N_INSNS (4);
5351 break;
5352 case 7:
5353 case 11:
5354 case 12:
5355 *total = COSTS_N_INSNS (5);
5356 break;
5357 case 4:
5358 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5359 break;
5360 case 6:
5361 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5362 break;
5363 case 5:
5364 *total = COSTS_N_INSNS (!speed ? 5 : 10);
5365 break;
5366 default:
5367 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5368 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5370 break;
5372 case SImode:
5373 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5375 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5376 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5378 else
5379 switch (INTVAL (XEXP (x, 1)))
5381 case 0:
5382 *total = 0;
5383 break;
5384 case 24:
5385 *total = COSTS_N_INSNS (3);
5386 break;
5387 case 1:
5388 case 8:
5389 case 16:
5390 *total = COSTS_N_INSNS (4);
5391 break;
5392 case 31:
5393 *total = COSTS_N_INSNS (6);
5394 break;
5395 case 2:
5396 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5397 break;
5398 default:
5399 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5400 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5402 break;
5404 default:
5405 return false;
5407 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5408 return true;
5410 case ASHIFTRT:
5411 switch (mode)
5413 case QImode:
5414 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5416 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5417 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5419 else
5421 val = INTVAL (XEXP (x, 1));
5422 if (val == 6)
5423 *total = COSTS_N_INSNS (4);
5424 else if (val == 7)
5425 *total = COSTS_N_INSNS (2);
5426 else if (val >= 0 && val <= 7)
5427 *total = COSTS_N_INSNS (val);
5428 else
5429 *total = COSTS_N_INSNS (1);
5431 break;
5433 case HImode:
5434 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5436 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5437 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5439 else
5440 switch (INTVAL (XEXP (x, 1)))
5442 case 0:
5443 *total = 0;
5444 break;
5445 case 1:
5446 *total = COSTS_N_INSNS (2);
5447 break;
5448 case 15:
5449 *total = COSTS_N_INSNS (3);
5450 break;
5451 case 2:
5452 case 7:
5453 case 8:
5454 case 9:
5455 *total = COSTS_N_INSNS (4);
5456 break;
5457 case 10:
5458 case 14:
5459 *total = COSTS_N_INSNS (5);
5460 break;
5461 case 11:
5462 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5463 break;
5464 case 12:
5465 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5466 break;
5467 case 6:
5468 case 13:
5469 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5470 break;
5471 default:
5472 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5473 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5475 break;
5477 case SImode:
5478 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5480 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5481 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5483 else
5484 switch (INTVAL (XEXP (x, 1)))
5486 case 0:
5487 *total = 0;
5488 break;
5489 case 1:
5490 *total = COSTS_N_INSNS (4);
5491 break;
5492 case 8:
5493 case 16:
5494 case 24:
5495 *total = COSTS_N_INSNS (6);
5496 break;
5497 case 2:
5498 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5499 break;
5500 case 31:
5501 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5502 break;
5503 default:
5504 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5505 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5507 break;
5509 default:
5510 return false;
5512 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5513 return true;
5515 case LSHIFTRT:
5516 switch (mode)
5518 case QImode:
5519 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5521 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5522 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5524 else
5526 val = INTVAL (XEXP (x, 1));
5527 if (val == 7)
5528 *total = COSTS_N_INSNS (3);
5529 else if (val >= 0 && val <= 7)
5530 *total = COSTS_N_INSNS (val);
5531 else
5532 *total = COSTS_N_INSNS (1);
5534 break;
5536 case HImode:
5537 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5539 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5540 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5542 else
5543 switch (INTVAL (XEXP (x, 1)))
5545 case 0:
5546 *total = 0;
5547 break;
5548 case 1:
5549 case 8:
5550 *total = COSTS_N_INSNS (2);
5551 break;
5552 case 9:
5553 *total = COSTS_N_INSNS (3);
5554 break;
5555 case 2:
5556 case 10:
5557 case 15:
5558 *total = COSTS_N_INSNS (4);
5559 break;
5560 case 7:
5561 case 11:
5562 *total = COSTS_N_INSNS (5);
5563 break;
5564 case 3:
5565 case 12:
5566 case 13:
5567 case 14:
5568 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5569 break;
5570 case 4:
5571 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5572 break;
5573 case 5:
5574 case 6:
5575 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5576 break;
5577 default:
5578 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5579 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5581 break;
5583 case SImode:
5584 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5586 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5587 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5589 else
5590 switch (INTVAL (XEXP (x, 1)))
5592 case 0:
5593 *total = 0;
5594 break;
5595 case 1:
5596 *total = COSTS_N_INSNS (4);
5597 break;
5598 case 2:
5599 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5600 break;
5601 case 8:
5602 case 16:
5603 case 24:
5604 *total = COSTS_N_INSNS (4);
5605 break;
5606 case 31:
5607 *total = COSTS_N_INSNS (6);
5608 break;
5609 default:
5610 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5611 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5613 break;
5615 default:
5616 return false;
5618 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5619 return true;
5621 case COMPARE:
5622 switch (GET_MODE (XEXP (x, 0)))
5624 case QImode:
5625 *total = COSTS_N_INSNS (1);
5626 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5627 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5628 break;
5630 case HImode:
5631 *total = COSTS_N_INSNS (2);
5632 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5633 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5634 else if (INTVAL (XEXP (x, 1)) != 0)
5635 *total += COSTS_N_INSNS (1);
5636 break;
5638 case SImode:
5639 *total = COSTS_N_INSNS (4);
5640 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5641 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5642 else if (INTVAL (XEXP (x, 1)) != 0)
5643 *total += COSTS_N_INSNS (3);
5644 break;
5646 default:
5647 return false;
5649 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5650 return true;
5652 default:
5653 break;
5655 return false;
5658 /* Calculate the cost of a memory address. */
5660 static int
5661 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
5663 if (GET_CODE (x) == PLUS
5664 && GET_CODE (XEXP (x,1)) == CONST_INT
5665 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5666 && INTVAL (XEXP (x,1)) >= 61)
5667 return 18;
5668 if (CONSTANT_ADDRESS_P (x))
5670 if (optimize > 0 && io_address_operand (x, QImode))
5671 return 2;
5672 return 4;
5674 return 4;
5677 /* Test for extra memory constraint 'Q'.
5678 It's a memory address based on Y or Z pointer with valid displacement. */
5681 extra_constraint_Q (rtx x)
5683 if (GET_CODE (XEXP (x,0)) == PLUS
5684 && REG_P (XEXP (XEXP (x,0), 0))
5685 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5686 && (INTVAL (XEXP (XEXP (x,0), 1))
5687 <= MAX_LD_OFFSET (GET_MODE (x))))
5689 rtx xx = XEXP (XEXP (x,0), 0);
5690 int regno = REGNO (xx);
5691 if (TARGET_ALL_DEBUG)
5693 fprintf (stderr, ("extra_constraint:\n"
5694 "reload_completed: %d\n"
5695 "reload_in_progress: %d\n"),
5696 reload_completed, reload_in_progress);
5697 debug_rtx (x);
5699 if (regno >= FIRST_PSEUDO_REGISTER)
5700 return 1; /* allocate pseudos */
5701 else if (regno == REG_Z || regno == REG_Y)
5702 return 1; /* strictly check */
5703 else if (xx == frame_pointer_rtx
5704 || xx == arg_pointer_rtx)
5705 return 1; /* XXX frame & arg pointer checks */
5707 return 0;
5710 /* Convert condition code CONDITION to the valid AVR condition code. */
5712 RTX_CODE
5713 avr_normalize_condition (RTX_CODE condition)
5715 switch (condition)
5717 case GT:
5718 return GE;
5719 case GTU:
5720 return GEU;
5721 case LE:
5722 return LT;
5723 case LEU:
5724 return LTU;
5725 default:
5726 gcc_unreachable ();
5730 /* This function optimizes conditional jumps. */
5732 static void
5733 avr_reorg (void)
5735 rtx insn, pattern;
5737 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5739 if (! (GET_CODE (insn) == INSN
5740 || GET_CODE (insn) == CALL_INSN
5741 || GET_CODE (insn) == JUMP_INSN)
5742 || !single_set (insn))
5743 continue;
5745 pattern = PATTERN (insn);
5747 if (GET_CODE (pattern) == PARALLEL)
5748 pattern = XVECEXP (pattern, 0, 0);
5749 if (GET_CODE (pattern) == SET
5750 && SET_DEST (pattern) == cc0_rtx
5751 && compare_diff_p (insn))
5753 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5755 /* Now we work under compare insn. */
5757 pattern = SET_SRC (pattern);
5758 if (true_regnum (XEXP (pattern,0)) >= 0
5759 && true_regnum (XEXP (pattern,1)) >= 0 )
5761 rtx x = XEXP (pattern,0);
5762 rtx next = next_real_insn (insn);
5763 rtx pat = PATTERN (next);
5764 rtx src = SET_SRC (pat);
5765 rtx t = XEXP (src,0);
5766 PUT_CODE (t, swap_condition (GET_CODE (t)));
5767 XEXP (pattern,0) = XEXP (pattern,1);
5768 XEXP (pattern,1) = x;
5769 INSN_CODE (next) = -1;
5771 else if (true_regnum (XEXP (pattern, 0)) >= 0
5772 && XEXP (pattern, 1) == const0_rtx)
5774 /* This is a tst insn, we can reverse it. */
5775 rtx next = next_real_insn (insn);
5776 rtx pat = PATTERN (next);
5777 rtx src = SET_SRC (pat);
5778 rtx t = XEXP (src,0);
5780 PUT_CODE (t, swap_condition (GET_CODE (t)));
5781 XEXP (pattern, 1) = XEXP (pattern, 0);
5782 XEXP (pattern, 0) = const0_rtx;
5783 INSN_CODE (next) = -1;
5784 INSN_CODE (insn) = -1;
5786 else if (true_regnum (XEXP (pattern,0)) >= 0
5787 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5789 rtx x = XEXP (pattern,1);
5790 rtx next = next_real_insn (insn);
5791 rtx pat = PATTERN (next);
5792 rtx src = SET_SRC (pat);
5793 rtx t = XEXP (src,0);
5794 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5796 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5798 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5799 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5800 INSN_CODE (next) = -1;
5801 INSN_CODE (insn) = -1;
5809 /* Returns register number for function return value.*/
5812 avr_ret_register (void)
5814 return 24;
5817 /* Create an RTX representing the place where a
5818 library function returns a value of mode MODE. */
5821 avr_libcall_value (enum machine_mode mode)
5823 int offs = GET_MODE_SIZE (mode);
5824 if (offs < 2)
5825 offs = 2;
5826 return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5829 /* Create an RTX representing the place where a
5830 function returns a value of data type VALTYPE. */
5833 avr_function_value (const_tree type,
5834 const_tree func ATTRIBUTE_UNUSED,
5835 bool outgoing ATTRIBUTE_UNUSED)
5837 unsigned int offs;
5839 if (TYPE_MODE (type) != BLKmode)
5840 return avr_libcall_value (TYPE_MODE (type));
5842 offs = int_size_in_bytes (type);
5843 if (offs < 2)
5844 offs = 2;
5845 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5846 offs = GET_MODE_SIZE (SImode);
5847 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5848 offs = GET_MODE_SIZE (DImode);
5850 return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5854 test_hard_reg_class (enum reg_class rclass, rtx x)
5856 int regno = true_regnum (x);
5857 if (regno < 0)
5858 return 0;
5860 if (TEST_HARD_REG_CLASS (rclass, regno))
5861 return 1;
5863 return 0;
5868 jump_over_one_insn_p (rtx insn, rtx dest)
5870 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5871 ? XEXP (dest, 0)
5872 : dest);
5873 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5874 int dest_addr = INSN_ADDRESSES (uid);
5875 return dest_addr - jump_addr == get_attr_length (insn) + 1;
5878 /* Returns 1 if a value of mode MODE can be stored starting with hard
5879 register number REGNO. On the enhanced core, anything larger than
5880 1 byte must start in even numbered register for "movw" to work
5881 (this way we don't have to check for odd registers everywhere). */
5884 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5886 /* Disallow QImode in stack pointer regs. */
5887 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
5888 return 0;
5890 /* The only thing that can go into registers r28:r29 is a Pmode. */
5891 if (regno == REG_Y && mode == Pmode)
5892 return 1;
5894 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5895 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
5896 return 0;
5898 if (mode == QImode)
5899 return 1;
5901 /* Modes larger than QImode occupy consecutive registers. */
5902 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
5903 return 0;
5905 /* All modes larger than QImode should start in an even register. */
5906 return !(regno & 1);
5909 const char *
5910 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5912 int tmp;
5913 if (!len)
5914 len = &tmp;
5916 if (GET_CODE (operands[1]) == CONST_INT)
5918 int val = INTVAL (operands[1]);
5919 if ((val & 0xff) == 0)
5921 *len = 3;
5922 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5923 AS2 (ldi,%2,hi8(%1)) CR_TAB
5924 AS2 (mov,%B0,%2));
5926 else if ((val & 0xff00) == 0)
5928 *len = 3;
5929 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5930 AS2 (mov,%A0,%2) CR_TAB
5931 AS2 (mov,%B0,__zero_reg__));
5933 else if ((val & 0xff) == ((val & 0xff00) >> 8))
5935 *len = 3;
5936 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5937 AS2 (mov,%A0,%2) CR_TAB
5938 AS2 (mov,%B0,%2));
5941 *len = 4;
5942 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5943 AS2 (mov,%A0,%2) CR_TAB
5944 AS2 (ldi,%2,hi8(%1)) CR_TAB
5945 AS2 (mov,%B0,%2));
5949 const char *
5950 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5952 rtx src = operands[1];
5953 int cnst = (GET_CODE (src) == CONST_INT);
5955 if (len)
5957 if (cnst)
5958 *len = 4 + ((INTVAL (src) & 0xff) != 0)
5959 + ((INTVAL (src) & 0xff00) != 0)
5960 + ((INTVAL (src) & 0xff0000) != 0)
5961 + ((INTVAL (src) & 0xff000000) != 0);
5962 else
5963 *len = 8;
5965 return "";
5968 if (cnst && ((INTVAL (src) & 0xff) == 0))
5969 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
5970 else
5972 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
5973 output_asm_insn (AS2 (mov, %A0, %2), operands);
5975 if (cnst && ((INTVAL (src) & 0xff00) == 0))
5976 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
5977 else
5979 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
5980 output_asm_insn (AS2 (mov, %B0, %2), operands);
5982 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
5983 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
5984 else
5986 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
5987 output_asm_insn (AS2 (mov, %C0, %2), operands);
5989 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
5990 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
5991 else
5993 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
5994 output_asm_insn (AS2 (mov, %D0, %2), operands);
5996 return "";
5999 void
6000 avr_output_bld (rtx operands[], int bit_nr)
6002 static char s[] = "bld %A0,0";
6004 s[5] = 'A' + (bit_nr >> 3);
6005 s[8] = '0' + (bit_nr & 7);
6006 output_asm_insn (s, operands);
6009 void
6010 avr_output_addr_vec_elt (FILE *stream, int value)
6012 switch_to_section (progmem_section);
6013 if (AVR_HAVE_JMP_CALL)
6014 fprintf (stream, "\t.word gs(.L%d)\n", value);
6015 else
6016 fprintf (stream, "\trjmp .L%d\n", value);
6019 /* Returns true if SCRATCH are safe to be allocated as a scratch
6020 registers (for a define_peephole2) in the current function. */
6022 bool
6023 avr_hard_regno_scratch_ok (unsigned int regno)
6025 /* Interrupt functions can only use registers that have already been saved
6026 by the prologue, even if they would normally be call-clobbered. */
6028 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6029 && !df_regs_ever_live_p (regno))
6030 return false;
6032 return true;
6035 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6038 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
6039 unsigned int new_reg)
6041 /* Interrupt functions can only use registers that have already been
6042 saved by the prologue, even if they would normally be
6043 call-clobbered. */
6045 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6046 && !df_regs_ever_live_p (new_reg))
6047 return 0;
6049 return 1;
6052 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
6053 or memory location in the I/O space (QImode only).
6055 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
6056 Operand 1: register operand to test, or CONST_INT memory address.
6057 Operand 2: bit number.
6058 Operand 3: label to jump to if the test is true. */
6060 const char *
6061 avr_out_sbxx_branch (rtx insn, rtx operands[])
6063 enum rtx_code comp = GET_CODE (operands[0]);
6064 int long_jump = (get_attr_length (insn) >= 4);
6065 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
6067 if (comp == GE)
6068 comp = EQ;
6069 else if (comp == LT)
6070 comp = NE;
6072 if (reverse)
6073 comp = reverse_condition (comp);
6075 if (GET_CODE (operands[1]) == CONST_INT)
6077 if (INTVAL (operands[1]) < 0x40)
6079 if (comp == EQ)
6080 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
6081 else
6082 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
6084 else
6086 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
6087 if (comp == EQ)
6088 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
6089 else
6090 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
6093 else /* GET_CODE (operands[1]) == REG */
6095 if (GET_MODE (operands[1]) == QImode)
6097 if (comp == EQ)
6098 output_asm_insn (AS2 (sbrs,%1,%2), operands);
6099 else
6100 output_asm_insn (AS2 (sbrc,%1,%2), operands);
6102 else /* HImode or SImode */
6104 static char buf[] = "sbrc %A1,0";
6105 int bit_nr = INTVAL (operands[2]);
6106 buf[3] = (comp == EQ) ? 's' : 'c';
6107 buf[6] = 'A' + (bit_nr >> 3);
6108 buf[9] = '0' + (bit_nr & 7);
6109 output_asm_insn (buf, operands);
6113 if (long_jump)
6114 return (AS1 (rjmp,.+4) CR_TAB
6115 AS1 (jmp,%x3));
6116 if (!reverse)
6117 return AS1 (rjmp,%x3);
6118 return "";
6121 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6123 static void
6124 avr_asm_out_ctor (rtx symbol, int priority)
6126 fputs ("\t.global __do_global_ctors\n", asm_out_file);
6127 default_ctor_section_asm_out_constructor (symbol, priority);
6130 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6132 static void
6133 avr_asm_out_dtor (rtx symbol, int priority)
6135 fputs ("\t.global __do_global_dtors\n", asm_out_file);
6136 default_dtor_section_asm_out_destructor (symbol, priority);
6139 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6141 static bool
6142 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
6144 if (TYPE_MODE (type) == BLKmode)
6146 HOST_WIDE_INT size = int_size_in_bytes (type);
6147 return (size == -1 || size > 8);
6149 else
6150 return false;
6153 /* Worker function for CASE_VALUES_THRESHOLD. */
6155 unsigned int avr_case_values_threshold (void)
6157 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
6160 #include "gt-avr.h"