Merged trunk at revision 161680 into branch.
[official-gcc.git] / gcc / config / avr / avr.c
blob531a4128bf31da100dc43f3637196eddd1bf4761
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "flags.h"
33 #include "reload.h"
34 #include "tree.h"
35 #include "output.h"
36 #include "expr.h"
37 #include "toplev.h"
38 #include "obstack.h"
39 #include "function.h"
40 #include "recog.h"
41 #include "ggc.h"
42 #include "tm_p.h"
43 #include "target.h"
44 #include "target-def.h"
45 #include "params.h"
46 #include "df.h"
48 /* Maximal allowed offset for an address in the LD command */
49 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
51 static int avr_naked_function_p (tree);
52 static int interrupt_function_p (tree);
53 static int signal_function_p (tree);
54 static int avr_OS_task_function_p (tree);
55 static int avr_OS_main_function_p (tree);
56 static int avr_regs_to_save (HARD_REG_SET *);
57 static int get_sequence_length (rtx insns);
58 static int sequent_regs_live (void);
59 static const char *ptrreg_to_str (int);
60 static const char *cond_string (enum rtx_code);
61 static int avr_num_arg_regs (enum machine_mode, tree);
63 static RTX_CODE compare_condition (rtx insn);
64 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
65 static int compare_sign_p (rtx insn);
66 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
67 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
68 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
69 static bool avr_assemble_integer (rtx, unsigned int, int);
70 static void avr_file_start (void);
71 static void avr_file_end (void);
72 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
73 static void avr_asm_function_end_prologue (FILE *);
74 static void avr_asm_function_begin_epilogue (FILE *);
75 static rtx avr_function_value (const_tree, const_tree, bool);
76 static void avr_insert_attributes (tree, tree *);
77 static void avr_asm_init_sections (void);
78 static unsigned int avr_section_type_flags (tree, const char *, int);
80 static void avr_reorg (void);
81 static void avr_asm_out_ctor (rtx, int);
82 static void avr_asm_out_dtor (rtx, int);
83 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code, bool);
84 static bool avr_rtx_costs (rtx, int, int, int *, bool);
85 static int avr_address_cost (rtx, bool);
86 static bool avr_return_in_memory (const_tree, const_tree);
87 static struct machine_function * avr_init_machine_status (void);
88 static rtx avr_builtin_setjmp_frame_value (void);
89 static bool avr_hard_regno_scratch_ok (unsigned int);
90 static unsigned int avr_case_values_threshold (void);
91 static bool avr_frame_pointer_required_p (void);
92 static bool avr_can_eliminate (const int, const int);
94 /* Allocate registers from r25 to r8 for parameters for function calls. */
95 #define FIRST_CUM_REG 26
97 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
98 static GTY(()) rtx tmp_reg_rtx;
100 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
101 static GTY(()) rtx zero_reg_rtx;
103 /* AVR register names {"r0", "r1", ..., "r31"} */
104 static const char *const avr_regnames[] = REGISTER_NAMES;
106 /* Preprocessor macros to define depending on MCU type. */
107 const char *avr_extra_arch_macro;
109 /* Current architecture. */
110 const struct base_arch_s *avr_current_arch;
112 /* Current device. */
113 const struct mcu_type_s *avr_current_device;
115 section *progmem_section;
117 /* AVR attributes. */
118 static const struct attribute_spec avr_attribute_table[] =
120 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
121 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
122 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
123 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
124 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute },
125 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute },
126 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute },
127 { NULL, 0, 0, false, false, false, NULL }
130 /* Initialize the GCC target structure. */
131 #undef TARGET_ASM_ALIGNED_HI_OP
132 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
133 #undef TARGET_ASM_ALIGNED_SI_OP
134 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
135 #undef TARGET_ASM_UNALIGNED_HI_OP
136 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
137 #undef TARGET_ASM_UNALIGNED_SI_OP
138 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
139 #undef TARGET_ASM_INTEGER
140 #define TARGET_ASM_INTEGER avr_assemble_integer
141 #undef TARGET_ASM_FILE_START
142 #define TARGET_ASM_FILE_START avr_file_start
143 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
144 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
145 #undef TARGET_ASM_FILE_END
146 #define TARGET_ASM_FILE_END avr_file_end
148 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
149 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
150 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
151 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
152 #undef TARGET_FUNCTION_VALUE
153 #define TARGET_FUNCTION_VALUE avr_function_value
154 #undef TARGET_ATTRIBUTE_TABLE
155 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
156 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
157 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
158 #undef TARGET_INSERT_ATTRIBUTES
159 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
160 #undef TARGET_SECTION_TYPE_FLAGS
161 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
162 #undef TARGET_RTX_COSTS
163 #define TARGET_RTX_COSTS avr_rtx_costs
164 #undef TARGET_ADDRESS_COST
165 #define TARGET_ADDRESS_COST avr_address_cost
166 #undef TARGET_MACHINE_DEPENDENT_REORG
167 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
169 #undef TARGET_LEGITIMIZE_ADDRESS
170 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
172 #undef TARGET_RETURN_IN_MEMORY
173 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
175 #undef TARGET_STRICT_ARGUMENT_NAMING
176 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
178 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
179 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
181 #undef TARGET_HARD_REGNO_SCRATCH_OK
182 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
183 #undef TARGET_CASE_VALUES_THRESHOLD
184 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
186 #undef TARGET_LEGITIMATE_ADDRESS_P
187 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
189 #undef TARGET_FRAME_POINTER_REQUIRED
190 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
191 #undef TARGET_CAN_ELIMINATE
192 #define TARGET_CAN_ELIMINATE avr_can_eliminate
194 struct gcc_target targetm = TARGET_INITIALIZER;
196 void
197 avr_override_options (void)
199 const struct mcu_type_s *t;
201 flag_delete_null_pointer_checks = 0;
203 for (t = avr_mcu_types; t->name; t++)
204 if (strcmp (t->name, avr_mcu_name) == 0)
205 break;
207 if (!t->name)
209 fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
210 avr_mcu_name);
211 for (t = avr_mcu_types; t->name; t++)
212 fprintf (stderr," %s\n", t->name);
215 avr_current_device = t;
216 avr_current_arch = &avr_arch_types[avr_current_device->arch];
217 avr_extra_arch_macro = avr_current_device->macro;
219 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
220 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
222 init_machine_status = avr_init_machine_status;
225 /* return register class from register number. */
227 static const enum reg_class reg_class_tab[]={
228 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
229 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
230 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
231 GENERAL_REGS, /* r0 - r15 */
232 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
233 LD_REGS, /* r16 - 23 */
234 ADDW_REGS,ADDW_REGS, /* r24,r25 */
235 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
236 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
237 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
238 STACK_REG,STACK_REG /* SPL,SPH */
241 /* Function to set up the backend function structure. */
243 static struct machine_function *
244 avr_init_machine_status (void)
246 return ggc_alloc_cleared_machine_function ();
249 /* Return register class for register R. */
251 enum reg_class
252 avr_regno_reg_class (int r)
254 if (r <= 33)
255 return reg_class_tab[r];
256 return ALL_REGS;
259 /* Return nonzero if FUNC is a naked function. */
261 static int
262 avr_naked_function_p (tree func)
264 tree a;
266 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
268 a = lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func)));
269 return a != NULL_TREE;
272 /* Return nonzero if FUNC is an interrupt function as specified
273 by the "interrupt" attribute. */
275 static int
276 interrupt_function_p (tree func)
278 tree a;
280 if (TREE_CODE (func) != FUNCTION_DECL)
281 return 0;
283 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
284 return a != NULL_TREE;
287 /* Return nonzero if FUNC is a signal function as specified
288 by the "signal" attribute. */
290 static int
291 signal_function_p (tree func)
293 tree a;
295 if (TREE_CODE (func) != FUNCTION_DECL)
296 return 0;
298 a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
299 return a != NULL_TREE;
302 /* Return nonzero if FUNC is a OS_task function. */
304 static int
305 avr_OS_task_function_p (tree func)
307 tree a;
309 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
311 a = lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func)));
312 return a != NULL_TREE;
315 /* Return nonzero if FUNC is a OS_main function. */
317 static int
318 avr_OS_main_function_p (tree func)
320 tree a;
322 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
324 a = lookup_attribute ("OS_main", TYPE_ATTRIBUTES (TREE_TYPE (func)));
325 return a != NULL_TREE;
328 /* Return the number of hard registers to push/pop in the prologue/epilogue
329 of the current function, and optionally store these registers in SET. */
331 static int
332 avr_regs_to_save (HARD_REG_SET *set)
334 int reg, count;
335 int int_or_sig_p = (interrupt_function_p (current_function_decl)
336 || signal_function_p (current_function_decl));
338 if (set)
339 CLEAR_HARD_REG_SET (*set);
340 count = 0;
342 /* No need to save any registers if the function never returns or
343 is have "OS_task" or "OS_main" attribute. */
344 if (TREE_THIS_VOLATILE (current_function_decl)
345 || cfun->machine->is_OS_task
346 || cfun->machine->is_OS_main)
347 return 0;
349 for (reg = 0; reg < 32; reg++)
351 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
352 any global register variables. */
353 if (fixed_regs[reg])
354 continue;
356 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
357 || (df_regs_ever_live_p (reg)
358 && (int_or_sig_p || !call_used_regs[reg])
359 && !(frame_pointer_needed
360 && (reg == REG_Y || reg == (REG_Y+1)))))
362 if (set)
363 SET_HARD_REG_BIT (*set, reg);
364 count++;
367 return count;
370 /* Return true if register FROM can be eliminated via register TO. */
372 bool
373 avr_can_eliminate (const int from, const int to)
375 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
376 || ((from == FRAME_POINTER_REGNUM
377 || from == FRAME_POINTER_REGNUM + 1)
378 && !frame_pointer_needed));
381 /* Compute offset between arg_pointer and frame_pointer. */
384 avr_initial_elimination_offset (int from, int to)
386 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
387 return 0;
388 else
390 int offset = frame_pointer_needed ? 2 : 0;
391 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
393 offset += avr_regs_to_save (NULL);
394 return get_frame_size () + (avr_pc_size) + 1 + offset;
398 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
399 frame pointer by +STARTING_FRAME_OFFSET.
400 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
401 avoids creating add/sub of offset in nonlocal goto and setjmp. */
403 rtx avr_builtin_setjmp_frame_value (void)
405 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
406 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
409 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
410 This is return address of function. */
411 rtx
412 avr_return_addr_rtx (int count, const_rtx tem)
414 rtx r;
416 /* Can only return this functions return address. Others not supported. */
417 if (count)
418 return NULL;
420 if (AVR_3_BYTE_PC)
422 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
423 warning (0, "'builtin_return_address' contains only 2 bytes of address");
425 else
426 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
428 r = gen_rtx_PLUS (Pmode, tem, r);
429 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
430 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
431 return r;
434 /* Return 1 if the function epilogue is just a single "ret". */
437 avr_simple_epilogue (void)
439 return (! frame_pointer_needed
440 && get_frame_size () == 0
441 && avr_regs_to_save (NULL) == 0
442 && ! interrupt_function_p (current_function_decl)
443 && ! signal_function_p (current_function_decl)
444 && ! avr_naked_function_p (current_function_decl)
445 && ! TREE_THIS_VOLATILE (current_function_decl));
448 /* This function checks sequence of live registers. */
450 static int
451 sequent_regs_live (void)
453 int reg;
454 int live_seq=0;
455 int cur_seq=0;
457 for (reg = 0; reg < 18; ++reg)
459 if (!call_used_regs[reg])
461 if (df_regs_ever_live_p (reg))
463 ++live_seq;
464 ++cur_seq;
466 else
467 cur_seq = 0;
471 if (!frame_pointer_needed)
473 if (df_regs_ever_live_p (REG_Y))
475 ++live_seq;
476 ++cur_seq;
478 else
479 cur_seq = 0;
481 if (df_regs_ever_live_p (REG_Y+1))
483 ++live_seq;
484 ++cur_seq;
486 else
487 cur_seq = 0;
489 else
491 cur_seq += 2;
492 live_seq += 2;
494 return (cur_seq == live_seq) ? live_seq : 0;
497 /* Obtain the length sequence of insns. */
500 get_sequence_length (rtx insns)
502 rtx insn;
503 int length;
505 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
506 length += get_attr_length (insn);
508 return length;
511 /* Output function prologue. */
513 void
514 expand_prologue (void)
516 int live_seq;
517 HARD_REG_SET set;
518 int minimize;
519 HOST_WIDE_INT size = get_frame_size();
520 /* Define templates for push instructions. */
521 rtx pushbyte = gen_rtx_MEM (QImode,
522 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
523 rtx pushword = gen_rtx_MEM (HImode,
524 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
525 rtx insn;
527 /* Init cfun->machine. */
528 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
529 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
530 cfun->machine->is_signal = signal_function_p (current_function_decl);
531 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
532 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
533 cfun->machine->stack_usage = 0;
535 /* Prologue: naked. */
536 if (cfun->machine->is_naked)
538 return;
541 avr_regs_to_save (&set);
542 live_seq = sequent_regs_live ();
543 minimize = (TARGET_CALL_PROLOGUES
544 && !cfun->machine->is_interrupt
545 && !cfun->machine->is_signal
546 && !cfun->machine->is_OS_task
547 && !cfun->machine->is_OS_main
548 && live_seq);
550 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
552 if (cfun->machine->is_interrupt)
554 /* Enable interrupts. */
555 insn = emit_insn (gen_enable_interrupt ());
556 RTX_FRAME_RELATED_P (insn) = 1;
559 /* Push zero reg. */
560 insn = emit_move_insn (pushbyte, zero_reg_rtx);
561 RTX_FRAME_RELATED_P (insn) = 1;
562 cfun->machine->stack_usage++;
564 /* Push tmp reg. */
565 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
566 RTX_FRAME_RELATED_P (insn) = 1;
567 cfun->machine->stack_usage++;
569 /* Push SREG. */
570 insn = emit_move_insn (tmp_reg_rtx,
571 gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
572 RTX_FRAME_RELATED_P (insn) = 1;
573 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
574 RTX_FRAME_RELATED_P (insn) = 1;
575 cfun->machine->stack_usage++;
577 /* Push RAMPZ. */
578 if(AVR_HAVE_RAMPZ
579 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
581 insn = emit_move_insn (tmp_reg_rtx,
582 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
583 RTX_FRAME_RELATED_P (insn) = 1;
584 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
585 RTX_FRAME_RELATED_P (insn) = 1;
586 cfun->machine->stack_usage++;
589 /* Clear zero reg. */
590 insn = emit_move_insn (zero_reg_rtx, const0_rtx);
591 RTX_FRAME_RELATED_P (insn) = 1;
593 /* Prevent any attempt to delete the setting of ZERO_REG! */
594 emit_use (zero_reg_rtx);
596 if (minimize && (frame_pointer_needed
597 || (AVR_2_BYTE_PC && live_seq > 6)
598 || live_seq > 7))
600 insn = emit_move_insn (gen_rtx_REG (HImode, REG_X),
601 gen_int_mode (size, HImode));
602 RTX_FRAME_RELATED_P (insn) = 1;
604 insn =
605 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
606 gen_int_mode (size + live_seq, HImode)));
607 RTX_FRAME_RELATED_P (insn) = 1;
608 cfun->machine->stack_usage += size + live_seq;
610 else
612 int reg;
613 for (reg = 0; reg < 32; ++reg)
615 if (TEST_HARD_REG_BIT (set, reg))
617 /* Emit push of register to save. */
618 insn=emit_move_insn (pushbyte, gen_rtx_REG (QImode, reg));
619 RTX_FRAME_RELATED_P (insn) = 1;
620 cfun->machine->stack_usage++;
623 if (frame_pointer_needed)
625 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
627 /* Push frame pointer. */
628 insn = emit_move_insn (pushword, frame_pointer_rtx);
629 RTX_FRAME_RELATED_P (insn) = 1;
630 cfun->machine->stack_usage += 2;
633 if (!size)
635 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
636 RTX_FRAME_RELATED_P (insn) = 1;
638 else
640 /* Creating a frame can be done by direct manipulation of the
641 stack or via the frame pointer. These two methods are:
642 fp=sp
643 fp-=size
644 sp=fp
646 sp-=size
647 fp=sp
648 the optimum method depends on function type, stack and frame size.
649 To avoid a complex logic, both methods are tested and shortest
650 is selected. */
651 rtx myfp;
652 rtx fp_plus_insns;
653 rtx sp_plus_insns = NULL_RTX;
655 if (AVR_HAVE_8BIT_SP)
657 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
658 over 'sbiw' (2 cycles, same size). */
659 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
661 else
663 /* Normal sized addition. */
664 myfp = frame_pointer_rtx;
667 /* Method 1-Adjust frame pointer. */
668 start_sequence ();
670 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
671 RTX_FRAME_RELATED_P (insn) = 1;
673 insn =
674 emit_move_insn (myfp,
675 gen_rtx_PLUS (GET_MODE(myfp), myfp,
676 gen_int_mode (-size,
677 GET_MODE(myfp))));
678 RTX_FRAME_RELATED_P (insn) = 1;
680 /* Copy to stack pointer. */
681 if (AVR_HAVE_8BIT_SP)
683 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
684 RTX_FRAME_RELATED_P (insn) = 1;
686 else if (TARGET_NO_INTERRUPTS
687 || cfun->machine->is_signal
688 || cfun->machine->is_OS_main)
690 insn =
691 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
692 frame_pointer_rtx));
693 RTX_FRAME_RELATED_P (insn) = 1;
695 else if (cfun->machine->is_interrupt)
697 insn = emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
698 frame_pointer_rtx));
699 RTX_FRAME_RELATED_P (insn) = 1;
701 else
703 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
704 RTX_FRAME_RELATED_P (insn) = 1;
707 fp_plus_insns = get_insns ();
708 end_sequence ();
710 /* Method 2-Adjust Stack pointer. */
711 if (size <= 6)
713 start_sequence ();
715 insn =
716 emit_move_insn (stack_pointer_rtx,
717 gen_rtx_PLUS (HImode,
718 stack_pointer_rtx,
719 gen_int_mode (-size,
720 HImode)));
721 RTX_FRAME_RELATED_P (insn) = 1;
723 insn =
724 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
725 RTX_FRAME_RELATED_P (insn) = 1;
727 sp_plus_insns = get_insns ();
728 end_sequence ();
731 /* Use shortest method. */
732 if (size <= 6 && (get_sequence_length (sp_plus_insns)
733 < get_sequence_length (fp_plus_insns)))
734 emit_insn (sp_plus_insns);
735 else
736 emit_insn (fp_plus_insns);
737 cfun->machine->stack_usage += size;
743 /* Output summary at end of function prologue. */
745 static void
746 avr_asm_function_end_prologue (FILE *file)
748 if (cfun->machine->is_naked)
750 fputs ("/* prologue: naked */\n", file);
752 else
754 if (cfun->machine->is_interrupt)
756 fputs ("/* prologue: Interrupt */\n", file);
758 else if (cfun->machine->is_signal)
760 fputs ("/* prologue: Signal */\n", file);
762 else
763 fputs ("/* prologue: function */\n", file);
765 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
766 get_frame_size());
767 fprintf (file, "/* stack size = %d */\n",
768 cfun->machine->stack_usage);
769 /* Create symbol stack offset here so all functions have it. Add 1 to stack
770 usage for offset so that SP + .L__stack_offset = return address. */
771 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
775 /* Implement EPILOGUE_USES. */
778 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
780 if (reload_completed
781 && cfun->machine
782 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
783 return 1;
784 return 0;
787 /* Output RTL epilogue. */
789 void
790 expand_epilogue (void)
792 int reg;
793 int live_seq;
794 HARD_REG_SET set;
795 int minimize;
796 HOST_WIDE_INT size = get_frame_size();
798 /* epilogue: naked */
799 if (cfun->machine->is_naked)
801 emit_jump_insn (gen_return ());
802 return;
805 avr_regs_to_save (&set);
806 live_seq = sequent_regs_live ();
807 minimize = (TARGET_CALL_PROLOGUES
808 && !cfun->machine->is_interrupt
809 && !cfun->machine->is_signal
810 && !cfun->machine->is_OS_task
811 && !cfun->machine->is_OS_main
812 && live_seq);
814 if (minimize && (frame_pointer_needed || live_seq > 4))
816 if (frame_pointer_needed)
818 /* Get rid of frame. */
819 emit_move_insn(frame_pointer_rtx,
820 gen_rtx_PLUS (HImode, frame_pointer_rtx,
821 gen_int_mode (size, HImode)));
823 else
825 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
828 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
830 else
832 if (frame_pointer_needed)
834 if (size)
836 /* Try two methods to adjust stack and select shortest. */
837 rtx myfp;
838 rtx fp_plus_insns;
839 rtx sp_plus_insns = NULL_RTX;
841 if (AVR_HAVE_8BIT_SP)
843 /* The high byte (r29) doesn't change - prefer 'subi'
844 (1 cycle) over 'sbiw' (2 cycles, same size). */
845 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
847 else
849 /* Normal sized addition. */
850 myfp = frame_pointer_rtx;
853 /* Method 1-Adjust frame pointer. */
854 start_sequence ();
856 emit_move_insn (myfp,
857 gen_rtx_PLUS (GET_MODE (myfp), myfp,
858 gen_int_mode (size,
859 GET_MODE(myfp))));
861 /* Copy to stack pointer. */
862 if (AVR_HAVE_8BIT_SP)
864 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
866 else if (TARGET_NO_INTERRUPTS
867 || cfun->machine->is_signal)
869 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
870 frame_pointer_rtx));
872 else if (cfun->machine->is_interrupt)
874 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
875 frame_pointer_rtx));
877 else
879 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
882 fp_plus_insns = get_insns ();
883 end_sequence ();
885 /* Method 2-Adjust Stack pointer. */
886 if (size <= 5)
888 start_sequence ();
890 emit_move_insn (stack_pointer_rtx,
891 gen_rtx_PLUS (HImode, stack_pointer_rtx,
892 gen_int_mode (size,
893 HImode)));
895 sp_plus_insns = get_insns ();
896 end_sequence ();
899 /* Use shortest method. */
900 if (size <= 5 && (get_sequence_length (sp_plus_insns)
901 < get_sequence_length (fp_plus_insns)))
902 emit_insn (sp_plus_insns);
903 else
904 emit_insn (fp_plus_insns);
906 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
908 /* Restore previous frame_pointer. */
909 emit_insn (gen_pophi (frame_pointer_rtx));
912 /* Restore used registers. */
913 for (reg = 31; reg >= 0; --reg)
915 if (TEST_HARD_REG_BIT (set, reg))
916 emit_insn (gen_popqi (gen_rtx_REG (QImode, reg)));
918 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
920 /* Restore RAMPZ using tmp reg as scratch. */
921 if(AVR_HAVE_RAMPZ
922 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
924 emit_insn (gen_popqi (tmp_reg_rtx));
925 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(RAMPZ_ADDR)),
926 tmp_reg_rtx);
929 /* Restore SREG using tmp reg as scratch. */
930 emit_insn (gen_popqi (tmp_reg_rtx));
932 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(SREG_ADDR)),
933 tmp_reg_rtx);
935 /* Restore tmp REG. */
936 emit_insn (gen_popqi (tmp_reg_rtx));
938 /* Restore zero REG. */
939 emit_insn (gen_popqi (zero_reg_rtx));
942 emit_jump_insn (gen_return ());
946 /* Output summary messages at beginning of function epilogue. */
948 static void
949 avr_asm_function_begin_epilogue (FILE *file)
951 fprintf (file, "/* epilogue start */\n");
954 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
955 machine for a memory operand of mode MODE. */
957 bool
958 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
960 enum reg_class r = NO_REGS;
962 if (TARGET_ALL_DEBUG)
964 fprintf (stderr, "mode: (%s) %s %s %s %s:",
965 GET_MODE_NAME(mode),
966 strict ? "(strict)": "",
967 reload_completed ? "(reload_completed)": "",
968 reload_in_progress ? "(reload_in_progress)": "",
969 reg_renumber ? "(reg_renumber)" : "");
970 if (GET_CODE (x) == PLUS
971 && REG_P (XEXP (x, 0))
972 && GET_CODE (XEXP (x, 1)) == CONST_INT
973 && INTVAL (XEXP (x, 1)) >= 0
974 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
975 && reg_renumber
977 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
978 true_regnum (XEXP (x, 0)));
979 debug_rtx (x);
981 if (!strict && GET_CODE (x) == SUBREG)
982 x = SUBREG_REG (x);
983 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
984 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
985 r = POINTER_REGS;
986 else if (CONSTANT_ADDRESS_P (x))
987 r = ALL_REGS;
988 else if (GET_CODE (x) == PLUS
989 && REG_P (XEXP (x, 0))
990 && GET_CODE (XEXP (x, 1)) == CONST_INT
991 && INTVAL (XEXP (x, 1)) >= 0)
993 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
994 if (fit)
996 if (! strict
997 || REGNO (XEXP (x,0)) == REG_X
998 || REGNO (XEXP (x,0)) == REG_Y
999 || REGNO (XEXP (x,0)) == REG_Z)
1000 r = BASE_POINTER_REGS;
1001 if (XEXP (x,0) == frame_pointer_rtx
1002 || XEXP (x,0) == arg_pointer_rtx)
1003 r = BASE_POINTER_REGS;
1005 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1006 r = POINTER_Y_REGS;
1008 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1009 && REG_P (XEXP (x, 0))
1010 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1011 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1013 r = POINTER_REGS;
1015 if (TARGET_ALL_DEBUG)
1017 fprintf (stderr, " ret = %c\n", r + '0');
1019 return r == NO_REGS ? 0 : (int)r;
1022 /* Attempts to replace X with a valid
1023 memory address for an operand of mode MODE */
1026 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1028 x = oldx;
1029 if (TARGET_ALL_DEBUG)
1031 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1032 debug_rtx (oldx);
1035 if (GET_CODE (oldx) == PLUS
1036 && REG_P (XEXP (oldx,0)))
1038 if (REG_P (XEXP (oldx,1)))
1039 x = force_reg (GET_MODE (oldx), oldx);
1040 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1042 int offs = INTVAL (XEXP (oldx,1));
1043 if (frame_pointer_rtx != XEXP (oldx,0))
1044 if (offs > MAX_LD_OFFSET (mode))
1046 if (TARGET_ALL_DEBUG)
1047 fprintf (stderr, "force_reg (big offset)\n");
1048 x = force_reg (GET_MODE (oldx), oldx);
1052 return x;
1056 /* Return a pointer register name as a string. */
1058 static const char *
1059 ptrreg_to_str (int regno)
1061 switch (regno)
1063 case REG_X: return "X";
1064 case REG_Y: return "Y";
1065 case REG_Z: return "Z";
1066 default:
1067 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1069 return NULL;
1072 /* Return the condition name as a string.
1073 Used in conditional jump constructing */
1075 static const char *
1076 cond_string (enum rtx_code code)
1078 switch (code)
1080 case NE:
1081 return "ne";
1082 case EQ:
1083 return "eq";
1084 case GE:
1085 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1086 return "pl";
1087 else
1088 return "ge";
1089 case LT:
1090 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1091 return "mi";
1092 else
1093 return "lt";
1094 case GEU:
1095 return "sh";
1096 case LTU:
1097 return "lo";
1098 default:
1099 gcc_unreachable ();
1103 /* Output ADDR to FILE as address. */
1105 void
1106 print_operand_address (FILE *file, rtx addr)
1108 switch (GET_CODE (addr))
1110 case REG:
1111 fprintf (file, ptrreg_to_str (REGNO (addr)));
1112 break;
1114 case PRE_DEC:
1115 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1116 break;
1118 case POST_INC:
1119 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1120 break;
1122 default:
1123 if (CONSTANT_ADDRESS_P (addr)
1124 && text_segment_operand (addr, VOIDmode))
1126 rtx x = XEXP (addr,0);
1127 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1129 /* Assembler gs() will implant word address. Make offset
1130 a byte offset inside gs() for assembler. This is
1131 needed because the more logical (constant+gs(sym)) is not
1132 accepted by gas. For 128K and lower devices this is ok. For
1133 large devices it will create a Trampoline to offset from symbol
1134 which may not be what the user really wanted. */
1135 fprintf (file, "gs(");
1136 output_addr_const (file, XEXP (x,0));
1137 fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1138 if (AVR_3_BYTE_PC)
1139 if (warning ( 0, "Pointer offset from symbol maybe incorrect."))
1141 output_addr_const (stderr, addr);
1142 fprintf(stderr,"\n");
1145 else
1147 fprintf (file, "gs(");
1148 output_addr_const (file, addr);
1149 fprintf (file, ")");
1152 else
1153 output_addr_const (file, addr);
1158 /* Output X as assembler operand to file FILE. */
1160 void
1161 print_operand (FILE *file, rtx x, int code)
1163 int abcd = 0;
1165 if (code >= 'A' && code <= 'D')
1166 abcd = code - 'A';
1168 if (code == '~')
1170 if (!AVR_HAVE_JMP_CALL)
1171 fputc ('r', file);
1173 else if (code == '!')
1175 if (AVR_HAVE_EIJMP_EICALL)
1176 fputc ('e', file);
1178 else if (REG_P (x))
1180 if (x == zero_reg_rtx)
1181 fprintf (file, "__zero_reg__");
1182 else
1183 fprintf (file, reg_names[true_regnum (x) + abcd]);
1185 else if (GET_CODE (x) == CONST_INT)
1186 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1187 else if (GET_CODE (x) == MEM)
1189 rtx addr = XEXP (x,0);
1190 if (code == 'm')
1192 if (!CONSTANT_P (addr))
1193 fatal_insn ("bad address, not a constant):", addr);
1194 /* Assembler template with m-code is data - not progmem section */
1195 if (text_segment_operand (addr, VOIDmode))
1196 if (warning ( 0, "accessing data memory with program memory address"))
1198 output_addr_const (stderr, addr);
1199 fprintf(stderr,"\n");
1201 output_addr_const (file, addr);
1203 else if (code == 'o')
1205 if (GET_CODE (addr) != PLUS)
1206 fatal_insn ("bad address, not (reg+disp):", addr);
1208 print_operand (file, XEXP (addr, 1), 0);
1210 else if (code == 'p' || code == 'r')
1212 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1213 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1215 if (code == 'p')
1216 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1217 else
1218 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1220 else if (GET_CODE (addr) == PLUS)
1222 print_operand_address (file, XEXP (addr,0));
1223 if (REGNO (XEXP (addr, 0)) == REG_X)
1224 fatal_insn ("internal compiler error. Bad address:"
1225 ,addr);
1226 fputc ('+', file);
1227 print_operand (file, XEXP (addr,1), code);
1229 else
1230 print_operand_address (file, addr);
1232 else if (code == 'x')
1234 /* Constant progmem address - like used in jmp or call */
1235 if (0 == text_segment_operand (x, VOIDmode))
1236 if (warning ( 0, "accessing program memory with data memory address"))
1238 output_addr_const (stderr, x);
1239 fprintf(stderr,"\n");
1241 /* Use normal symbol for direct address no linker trampoline needed */
1242 output_addr_const (file, x);
1244 else if (GET_CODE (x) == CONST_DOUBLE)
1246 long val;
1247 REAL_VALUE_TYPE rv;
1248 if (GET_MODE (x) != SFmode)
1249 fatal_insn ("internal compiler error. Unknown mode:", x);
1250 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1251 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1252 fprintf (file, "0x%lx", val);
1254 else if (code == 'j')
1255 fputs (cond_string (GET_CODE (x)), file);
1256 else if (code == 'k')
1257 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1258 else
1259 print_operand_address (file, x);
1262 /* Update the condition code in the INSN. */
1264 void
1265 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1267 rtx set;
1269 switch (get_attr_cc (insn))
1271 case CC_NONE:
1272 /* Insn does not affect CC at all. */
1273 break;
1275 case CC_SET_N:
1276 CC_STATUS_INIT;
1277 break;
1279 case CC_SET_ZN:
1280 set = single_set (insn);
1281 CC_STATUS_INIT;
1282 if (set)
1284 cc_status.flags |= CC_NO_OVERFLOW;
1285 cc_status.value1 = SET_DEST (set);
1287 break;
1289 case CC_SET_CZN:
1290 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1291 The V flag may or may not be known but that's ok because
1292 alter_cond will change tests to use EQ/NE. */
1293 set = single_set (insn);
1294 CC_STATUS_INIT;
1295 if (set)
1297 cc_status.value1 = SET_DEST (set);
1298 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1300 break;
1302 case CC_COMPARE:
1303 set = single_set (insn);
1304 CC_STATUS_INIT;
1305 if (set)
1306 cc_status.value1 = SET_SRC (set);
1307 break;
1309 case CC_CLOBBER:
1310 /* Insn doesn't leave CC in a usable state. */
1311 CC_STATUS_INIT;
1313 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1314 set = single_set (insn);
1315 if (set)
1317 rtx src = SET_SRC (set);
1319 if (GET_CODE (src) == ASHIFTRT
1320 && GET_MODE (src) == QImode)
1322 rtx x = XEXP (src, 1);
1324 if (GET_CODE (x) == CONST_INT
1325 && INTVAL (x) > 0
1326 && INTVAL (x) != 6)
1328 cc_status.value1 = SET_DEST (set);
1329 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1333 break;
1337 /* Return maximum number of consecutive registers of
1338 class CLASS needed to hold a value of mode MODE. */
1341 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED,enum machine_mode mode)
1343 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1346 /* Choose mode for jump insn:
1347 1 - relative jump in range -63 <= x <= 62 ;
1348 2 - relative jump in range -2046 <= x <= 2045 ;
1349 3 - absolute jump (only for ATmega[16]03). */
1352 avr_jump_mode (rtx x, rtx insn)
1354 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1355 ? XEXP (x, 0) : x));
1356 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1357 int jump_distance = cur_addr - dest_addr;
1359 if (-63 <= jump_distance && jump_distance <= 62)
1360 return 1;
1361 else if (-2046 <= jump_distance && jump_distance <= 2045)
1362 return 2;
1363 else if (AVR_HAVE_JMP_CALL)
1364 return 3;
1366 return 2;
1369 /* return an AVR condition jump commands.
1370 X is a comparison RTX.
1371 LEN is a number returned by avr_jump_mode function.
1372 if REVERSE nonzero then condition code in X must be reversed. */
1374 const char *
1375 ret_cond_branch (rtx x, int len, int reverse)
1377 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1379 switch (cond)
1381 case GT:
1382 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1383 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1384 AS1 (brpl,%0)) :
1385 len == 2 ? (AS1 (breq,.+4) CR_TAB
1386 AS1 (brmi,.+2) CR_TAB
1387 AS1 (rjmp,%0)) :
1388 (AS1 (breq,.+6) CR_TAB
1389 AS1 (brmi,.+4) CR_TAB
1390 AS1 (jmp,%0)));
1392 else
1393 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1394 AS1 (brge,%0)) :
1395 len == 2 ? (AS1 (breq,.+4) CR_TAB
1396 AS1 (brlt,.+2) CR_TAB
1397 AS1 (rjmp,%0)) :
1398 (AS1 (breq,.+6) CR_TAB
1399 AS1 (brlt,.+4) CR_TAB
1400 AS1 (jmp,%0)));
1401 case GTU:
1402 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1403 AS1 (brsh,%0)) :
1404 len == 2 ? (AS1 (breq,.+4) CR_TAB
1405 AS1 (brlo,.+2) CR_TAB
1406 AS1 (rjmp,%0)) :
1407 (AS1 (breq,.+6) CR_TAB
1408 AS1 (brlo,.+4) CR_TAB
1409 AS1 (jmp,%0)));
1410 case LE:
1411 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1412 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1413 AS1 (brmi,%0)) :
1414 len == 2 ? (AS1 (breq,.+2) CR_TAB
1415 AS1 (brpl,.+2) CR_TAB
1416 AS1 (rjmp,%0)) :
1417 (AS1 (breq,.+2) CR_TAB
1418 AS1 (brpl,.+4) CR_TAB
1419 AS1 (jmp,%0)));
1420 else
1421 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1422 AS1 (brlt,%0)) :
1423 len == 2 ? (AS1 (breq,.+2) CR_TAB
1424 AS1 (brge,.+2) CR_TAB
1425 AS1 (rjmp,%0)) :
1426 (AS1 (breq,.+2) CR_TAB
1427 AS1 (brge,.+4) CR_TAB
1428 AS1 (jmp,%0)));
1429 case LEU:
1430 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1431 AS1 (brlo,%0)) :
1432 len == 2 ? (AS1 (breq,.+2) CR_TAB
1433 AS1 (brsh,.+2) CR_TAB
1434 AS1 (rjmp,%0)) :
1435 (AS1 (breq,.+2) CR_TAB
1436 AS1 (brsh,.+4) CR_TAB
1437 AS1 (jmp,%0)));
1438 default:
1439 if (reverse)
1441 switch (len)
1443 case 1:
1444 return AS1 (br%k1,%0);
1445 case 2:
1446 return (AS1 (br%j1,.+2) CR_TAB
1447 AS1 (rjmp,%0));
1448 default:
1449 return (AS1 (br%j1,.+4) CR_TAB
1450 AS1 (jmp,%0));
1453 else
1455 switch (len)
1457 case 1:
1458 return AS1 (br%j1,%0);
1459 case 2:
1460 return (AS1 (br%k1,.+2) CR_TAB
1461 AS1 (rjmp,%0));
1462 default:
1463 return (AS1 (br%k1,.+4) CR_TAB
1464 AS1 (jmp,%0));
1468 return "";
1471 /* Predicate function for immediate operand which fits to byte (8bit) */
1474 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1476 return (GET_CODE (op) == CONST_INT
1477 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1480 /* Output insn cost for next insn. */
1482 void
1483 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1484 int num_operands ATTRIBUTE_UNUSED)
1486 if (TARGET_ALL_DEBUG)
1488 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
1489 rtx_cost (PATTERN (insn), INSN, !optimize_size));
1493 /* Return 0 if undefined, 1 if always true or always false. */
1496 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1498 unsigned int max = (mode == QImode ? 0xff :
1499 mode == HImode ? 0xffff :
1500 mode == SImode ? 0xffffffff : 0);
1501 if (max && op && GET_CODE (x) == CONST_INT)
1503 if (unsigned_condition (op) != op)
1504 max >>= 1;
1506 if (max != (INTVAL (x) & max)
1507 && INTVAL (x) != 0xff)
1508 return 1;
1510 return 0;
1514 /* Returns nonzero if REGNO is the number of a hard
1515 register in which function arguments are sometimes passed. */
1518 function_arg_regno_p(int r)
1520 return (r >= 8 && r <= 25);
1523 /* Initializing the variable cum for the state at the beginning
1524 of the argument list. */
1526 void
1527 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1528 tree fndecl ATTRIBUTE_UNUSED)
1530 cum->nregs = 18;
1531 cum->regno = FIRST_CUM_REG;
1532 if (!libname && fntype)
1534 int stdarg = (TYPE_ARG_TYPES (fntype) != 0
1535 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1536 != void_type_node));
1537 if (stdarg)
1538 cum->nregs = 0;
1542 /* Returns the number of registers to allocate for a function argument. */
1544 static int
1545 avr_num_arg_regs (enum machine_mode mode, tree type)
1547 int size;
1549 if (mode == BLKmode)
1550 size = int_size_in_bytes (type);
1551 else
1552 size = GET_MODE_SIZE (mode);
1554 /* Align all function arguments to start in even-numbered registers.
1555 Odd-sized arguments leave holes above them. */
1557 return (size + 1) & ~1;
1560 /* Controls whether a function argument is passed
1561 in a register, and which register. */
1564 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1565 int named ATTRIBUTE_UNUSED)
1567 int bytes = avr_num_arg_regs (mode, type);
1569 if (cum->nregs && bytes <= cum->nregs)
1570 return gen_rtx_REG (mode, cum->regno - bytes);
1572 return NULL_RTX;
1575 /* Update the summarizer variable CUM to advance past an argument
1576 in the argument list. */
1578 void
1579 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1580 int named ATTRIBUTE_UNUSED)
1582 int bytes = avr_num_arg_regs (mode, type);
1584 cum->nregs -= bytes;
1585 cum->regno -= bytes;
1587 if (cum->nregs <= 0)
1589 cum->nregs = 0;
1590 cum->regno = FIRST_CUM_REG;
1594 /***********************************************************************
1595 Functions for outputting various mov's for a various modes
1596 ************************************************************************/
1597 const char *
1598 output_movqi (rtx insn, rtx operands[], int *l)
1600 int dummy;
1601 rtx dest = operands[0];
1602 rtx src = operands[1];
1603 int *real_l = l;
1605 if (!l)
1606 l = &dummy;
1608 *l = 1;
1610 if (register_operand (dest, QImode))
1612 if (register_operand (src, QImode)) /* mov r,r */
1614 if (test_hard_reg_class (STACK_REG, dest))
1615 return AS2 (out,%0,%1);
1616 else if (test_hard_reg_class (STACK_REG, src))
1617 return AS2 (in,%0,%1);
1619 return AS2 (mov,%0,%1);
1621 else if (CONSTANT_P (src))
1623 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1624 return AS2 (ldi,%0,lo8(%1));
1626 if (GET_CODE (src) == CONST_INT)
1628 if (src == const0_rtx) /* mov r,L */
1629 return AS1 (clr,%0);
1630 else if (src == const1_rtx)
1632 *l = 2;
1633 return (AS1 (clr,%0) CR_TAB
1634 AS1 (inc,%0));
1636 else if (src == constm1_rtx)
1638 /* Immediate constants -1 to any register */
1639 *l = 2;
1640 return (AS1 (clr,%0) CR_TAB
1641 AS1 (dec,%0));
1643 else
1645 int bit_nr = exact_log2 (INTVAL (src));
1647 if (bit_nr >= 0)
1649 *l = 3;
1650 if (!real_l)
1651 output_asm_insn ((AS1 (clr,%0) CR_TAB
1652 "set"), operands);
1653 if (!real_l)
1654 avr_output_bld (operands, bit_nr);
1656 return "";
1661 /* Last resort, larger than loading from memory. */
1662 *l = 4;
1663 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1664 AS2 (ldi,r31,lo8(%1)) CR_TAB
1665 AS2 (mov,%0,r31) CR_TAB
1666 AS2 (mov,r31,__tmp_reg__));
1668 else if (GET_CODE (src) == MEM)
1669 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1671 else if (GET_CODE (dest) == MEM)
1673 const char *templ;
1675 if (src == const0_rtx)
1676 operands[1] = zero_reg_rtx;
1678 templ = out_movqi_mr_r (insn, operands, real_l);
1680 if (!real_l)
1681 output_asm_insn (templ, operands);
1683 operands[1] = src;
1685 return "";
1689 const char *
1690 output_movhi (rtx insn, rtx operands[], int *l)
1692 int dummy;
1693 rtx dest = operands[0];
1694 rtx src = operands[1];
1695 int *real_l = l;
1697 if (!l)
1698 l = &dummy;
1700 if (register_operand (dest, HImode))
1702 if (register_operand (src, HImode)) /* mov r,r */
1704 if (test_hard_reg_class (STACK_REG, dest))
1706 if (AVR_HAVE_8BIT_SP)
1707 return *l = 1, AS2 (out,__SP_L__,%A1);
1708 /* Use simple load of stack pointer if no interrupts are
1709 used. */
1710 else if (TARGET_NO_INTERRUPTS)
1711 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1712 AS2 (out,__SP_L__,%A1));
1713 *l = 5;
1714 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1715 "cli" CR_TAB
1716 AS2 (out,__SP_H__,%B1) CR_TAB
1717 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1718 AS2 (out,__SP_L__,%A1));
1720 else if (test_hard_reg_class (STACK_REG, src))
1722 *l = 2;
1723 return (AS2 (in,%A0,__SP_L__) CR_TAB
1724 AS2 (in,%B0,__SP_H__));
1727 if (AVR_HAVE_MOVW)
1729 *l = 1;
1730 return (AS2 (movw,%0,%1));
1732 else
1734 *l = 2;
1735 return (AS2 (mov,%A0,%A1) CR_TAB
1736 AS2 (mov,%B0,%B1));
1739 else if (CONSTANT_P (src))
1741 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1743 *l = 2;
1744 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1745 AS2 (ldi,%B0,hi8(%1)));
1748 if (GET_CODE (src) == CONST_INT)
1750 if (src == const0_rtx) /* mov r,L */
1752 *l = 2;
1753 return (AS1 (clr,%A0) CR_TAB
1754 AS1 (clr,%B0));
1756 else if (src == const1_rtx)
1758 *l = 3;
1759 return (AS1 (clr,%A0) CR_TAB
1760 AS1 (clr,%B0) CR_TAB
1761 AS1 (inc,%A0));
1763 else if (src == constm1_rtx)
1765 /* Immediate constants -1 to any register */
1766 *l = 3;
1767 return (AS1 (clr,%0) CR_TAB
1768 AS1 (dec,%A0) CR_TAB
1769 AS2 (mov,%B0,%A0));
1771 else
1773 int bit_nr = exact_log2 (INTVAL (src));
1775 if (bit_nr >= 0)
1777 *l = 4;
1778 if (!real_l)
1779 output_asm_insn ((AS1 (clr,%A0) CR_TAB
1780 AS1 (clr,%B0) CR_TAB
1781 "set"), operands);
1782 if (!real_l)
1783 avr_output_bld (operands, bit_nr);
1785 return "";
1789 if ((INTVAL (src) & 0xff) == 0)
1791 *l = 5;
1792 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1793 AS1 (clr,%A0) CR_TAB
1794 AS2 (ldi,r31,hi8(%1)) CR_TAB
1795 AS2 (mov,%B0,r31) CR_TAB
1796 AS2 (mov,r31,__tmp_reg__));
1798 else if ((INTVAL (src) & 0xff00) == 0)
1800 *l = 5;
1801 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1802 AS2 (ldi,r31,lo8(%1)) CR_TAB
1803 AS2 (mov,%A0,r31) CR_TAB
1804 AS1 (clr,%B0) CR_TAB
1805 AS2 (mov,r31,__tmp_reg__));
1809 /* Last resort, equal to loading from memory. */
1810 *l = 6;
1811 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1812 AS2 (ldi,r31,lo8(%1)) CR_TAB
1813 AS2 (mov,%A0,r31) CR_TAB
1814 AS2 (ldi,r31,hi8(%1)) CR_TAB
1815 AS2 (mov,%B0,r31) CR_TAB
1816 AS2 (mov,r31,__tmp_reg__));
1818 else if (GET_CODE (src) == MEM)
1819 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1821 else if (GET_CODE (dest) == MEM)
1823 const char *templ;
1825 if (src == const0_rtx)
1826 operands[1] = zero_reg_rtx;
1828 templ = out_movhi_mr_r (insn, operands, real_l);
1830 if (!real_l)
1831 output_asm_insn (templ, operands);
1833 operands[1] = src;
1834 return "";
1836 fatal_insn ("invalid insn:", insn);
1837 return "";
1840 const char *
1841 out_movqi_r_mr (rtx insn, rtx op[], int *l)
1843 rtx dest = op[0];
1844 rtx src = op[1];
1845 rtx x = XEXP (src, 0);
1846 int dummy;
1848 if (!l)
1849 l = &dummy;
1851 if (CONSTANT_ADDRESS_P (x))
1853 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
1855 *l = 1;
1856 return AS2 (in,%0,__SREG__);
1858 if (optimize > 0 && io_address_operand (x, QImode))
1860 *l = 1;
1861 return AS2 (in,%0,%m1-0x20);
1863 *l = 2;
1864 return AS2 (lds,%0,%m1);
1866 /* memory access by reg+disp */
1867 else if (GET_CODE (x) == PLUS
1868 && REG_P (XEXP (x,0))
1869 && GET_CODE (XEXP (x,1)) == CONST_INT)
1871 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1873 int disp = INTVAL (XEXP (x,1));
1874 if (REGNO (XEXP (x,0)) != REG_Y)
1875 fatal_insn ("incorrect insn:",insn);
1877 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1878 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1879 AS2 (ldd,%0,Y+63) CR_TAB
1880 AS2 (sbiw,r28,%o1-63));
1882 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1883 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1884 AS2 (ld,%0,Y) CR_TAB
1885 AS2 (subi,r28,lo8(%o1)) CR_TAB
1886 AS2 (sbci,r29,hi8(%o1)));
1888 else if (REGNO (XEXP (x,0)) == REG_X)
1890 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1891 it but I have this situation with extremal optimizing options. */
1892 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
1893 || reg_unused_after (insn, XEXP (x,0)))
1894 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
1895 AS2 (ld,%0,X));
1897 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
1898 AS2 (ld,%0,X) CR_TAB
1899 AS2 (sbiw,r26,%o1));
1901 *l = 1;
1902 return AS2 (ldd,%0,%1);
1904 *l = 1;
1905 return AS2 (ld,%0,%1);
1908 const char *
1909 out_movhi_r_mr (rtx insn, rtx op[], int *l)
1911 rtx dest = op[0];
1912 rtx src = op[1];
1913 rtx base = XEXP (src, 0);
1914 int reg_dest = true_regnum (dest);
1915 int reg_base = true_regnum (base);
1916 /* "volatile" forces reading low byte first, even if less efficient,
1917 for correct operation with 16-bit I/O registers. */
1918 int mem_volatile_p = MEM_VOLATILE_P (src);
1919 int tmp;
1921 if (!l)
1922 l = &tmp;
1924 if (reg_base > 0)
1926 if (reg_dest == reg_base) /* R = (R) */
1928 *l = 3;
1929 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
1930 AS2 (ld,%B0,%1) CR_TAB
1931 AS2 (mov,%A0,__tmp_reg__));
1933 else if (reg_base == REG_X) /* (R26) */
1935 if (reg_unused_after (insn, base))
1937 *l = 2;
1938 return (AS2 (ld,%A0,X+) CR_TAB
1939 AS2 (ld,%B0,X));
1941 *l = 3;
1942 return (AS2 (ld,%A0,X+) CR_TAB
1943 AS2 (ld,%B0,X) CR_TAB
1944 AS2 (sbiw,r26,1));
1946 else /* (R) */
1948 *l = 2;
1949 return (AS2 (ld,%A0,%1) CR_TAB
1950 AS2 (ldd,%B0,%1+1));
1953 else if (GET_CODE (base) == PLUS) /* (R + i) */
1955 int disp = INTVAL (XEXP (base, 1));
1956 int reg_base = true_regnum (XEXP (base, 0));
1958 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
1960 if (REGNO (XEXP (base, 0)) != REG_Y)
1961 fatal_insn ("incorrect insn:",insn);
1963 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1964 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
1965 AS2 (ldd,%A0,Y+62) CR_TAB
1966 AS2 (ldd,%B0,Y+63) CR_TAB
1967 AS2 (sbiw,r28,%o1-62));
1969 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1970 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1971 AS2 (ld,%A0,Y) CR_TAB
1972 AS2 (ldd,%B0,Y+1) CR_TAB
1973 AS2 (subi,r28,lo8(%o1)) CR_TAB
1974 AS2 (sbci,r29,hi8(%o1)));
1976 if (reg_base == REG_X)
1978 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
1979 it but I have this situation with extremal
1980 optimization options. */
1982 *l = 4;
1983 if (reg_base == reg_dest)
1984 return (AS2 (adiw,r26,%o1) CR_TAB
1985 AS2 (ld,__tmp_reg__,X+) CR_TAB
1986 AS2 (ld,%B0,X) CR_TAB
1987 AS2 (mov,%A0,__tmp_reg__));
1989 return (AS2 (adiw,r26,%o1) CR_TAB
1990 AS2 (ld,%A0,X+) CR_TAB
1991 AS2 (ld,%B0,X) CR_TAB
1992 AS2 (sbiw,r26,%o1+1));
1995 if (reg_base == reg_dest)
1997 *l = 3;
1998 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
1999 AS2 (ldd,%B0,%B1) CR_TAB
2000 AS2 (mov,%A0,__tmp_reg__));
2003 *l = 2;
2004 return (AS2 (ldd,%A0,%A1) CR_TAB
2005 AS2 (ldd,%B0,%B1));
2007 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2009 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2010 fatal_insn ("incorrect insn:", insn);
2012 if (mem_volatile_p)
2014 if (REGNO (XEXP (base, 0)) == REG_X)
2016 *l = 4;
2017 return (AS2 (sbiw,r26,2) CR_TAB
2018 AS2 (ld,%A0,X+) CR_TAB
2019 AS2 (ld,%B0,X) CR_TAB
2020 AS2 (sbiw,r26,1));
2022 else
2024 *l = 3;
2025 return (AS2 (sbiw,%r1,2) CR_TAB
2026 AS2 (ld,%A0,%p1) CR_TAB
2027 AS2 (ldd,%B0,%p1+1));
2031 *l = 2;
2032 return (AS2 (ld,%B0,%1) CR_TAB
2033 AS2 (ld,%A0,%1));
2035 else if (GET_CODE (base) == POST_INC) /* (R++) */
2037 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2038 fatal_insn ("incorrect insn:", insn);
2040 *l = 2;
2041 return (AS2 (ld,%A0,%1) CR_TAB
2042 AS2 (ld,%B0,%1));
2044 else if (CONSTANT_ADDRESS_P (base))
2046 if (optimize > 0 && io_address_operand (base, HImode))
2048 *l = 2;
2049 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2050 AS2 (in,%B0,%m1+1-0x20));
2052 *l = 4;
2053 return (AS2 (lds,%A0,%m1) CR_TAB
2054 AS2 (lds,%B0,%m1+1));
2057 fatal_insn ("unknown move insn:",insn);
2058 return "";
2061 const char *
2062 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2064 rtx dest = op[0];
2065 rtx src = op[1];
2066 rtx base = XEXP (src, 0);
2067 int reg_dest = true_regnum (dest);
2068 int reg_base = true_regnum (base);
2069 int tmp;
2071 if (!l)
2072 l = &tmp;
2074 if (reg_base > 0)
2076 if (reg_base == REG_X) /* (R26) */
2078 if (reg_dest == REG_X)
2079 /* "ld r26,-X" is undefined */
2080 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2081 AS2 (ld,r29,X) CR_TAB
2082 AS2 (ld,r28,-X) CR_TAB
2083 AS2 (ld,__tmp_reg__,-X) CR_TAB
2084 AS2 (sbiw,r26,1) CR_TAB
2085 AS2 (ld,r26,X) CR_TAB
2086 AS2 (mov,r27,__tmp_reg__));
2087 else if (reg_dest == REG_X - 2)
2088 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2089 AS2 (ld,%B0,X+) CR_TAB
2090 AS2 (ld,__tmp_reg__,X+) CR_TAB
2091 AS2 (ld,%D0,X) CR_TAB
2092 AS2 (mov,%C0,__tmp_reg__));
2093 else if (reg_unused_after (insn, base))
2094 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2095 AS2 (ld,%B0,X+) CR_TAB
2096 AS2 (ld,%C0,X+) CR_TAB
2097 AS2 (ld,%D0,X));
2098 else
2099 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2100 AS2 (ld,%B0,X+) CR_TAB
2101 AS2 (ld,%C0,X+) CR_TAB
2102 AS2 (ld,%D0,X) CR_TAB
2103 AS2 (sbiw,r26,3));
2105 else
2107 if (reg_dest == reg_base)
2108 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2109 AS2 (ldd,%C0,%1+2) CR_TAB
2110 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2111 AS2 (ld,%A0,%1) CR_TAB
2112 AS2 (mov,%B0,__tmp_reg__));
2113 else if (reg_base == reg_dest + 2)
2114 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2115 AS2 (ldd,%B0,%1+1) CR_TAB
2116 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2117 AS2 (ldd,%D0,%1+3) CR_TAB
2118 AS2 (mov,%C0,__tmp_reg__));
2119 else
2120 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2121 AS2 (ldd,%B0,%1+1) CR_TAB
2122 AS2 (ldd,%C0,%1+2) CR_TAB
2123 AS2 (ldd,%D0,%1+3));
2126 else if (GET_CODE (base) == PLUS) /* (R + i) */
2128 int disp = INTVAL (XEXP (base, 1));
2130 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2132 if (REGNO (XEXP (base, 0)) != REG_Y)
2133 fatal_insn ("incorrect insn:",insn);
2135 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2136 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2137 AS2 (ldd,%A0,Y+60) CR_TAB
2138 AS2 (ldd,%B0,Y+61) CR_TAB
2139 AS2 (ldd,%C0,Y+62) CR_TAB
2140 AS2 (ldd,%D0,Y+63) CR_TAB
2141 AS2 (sbiw,r28,%o1-60));
2143 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2144 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2145 AS2 (ld,%A0,Y) CR_TAB
2146 AS2 (ldd,%B0,Y+1) CR_TAB
2147 AS2 (ldd,%C0,Y+2) CR_TAB
2148 AS2 (ldd,%D0,Y+3) CR_TAB
2149 AS2 (subi,r28,lo8(%o1)) CR_TAB
2150 AS2 (sbci,r29,hi8(%o1)));
2153 reg_base = true_regnum (XEXP (base, 0));
2154 if (reg_base == REG_X)
2156 /* R = (X + d) */
2157 if (reg_dest == REG_X)
2159 *l = 7;
2160 /* "ld r26,-X" is undefined */
2161 return (AS2 (adiw,r26,%o1+3) CR_TAB
2162 AS2 (ld,r29,X) CR_TAB
2163 AS2 (ld,r28,-X) CR_TAB
2164 AS2 (ld,__tmp_reg__,-X) CR_TAB
2165 AS2 (sbiw,r26,1) CR_TAB
2166 AS2 (ld,r26,X) CR_TAB
2167 AS2 (mov,r27,__tmp_reg__));
2169 *l = 6;
2170 if (reg_dest == REG_X - 2)
2171 return (AS2 (adiw,r26,%o1) CR_TAB
2172 AS2 (ld,r24,X+) CR_TAB
2173 AS2 (ld,r25,X+) CR_TAB
2174 AS2 (ld,__tmp_reg__,X+) CR_TAB
2175 AS2 (ld,r27,X) CR_TAB
2176 AS2 (mov,r26,__tmp_reg__));
2178 return (AS2 (adiw,r26,%o1) CR_TAB
2179 AS2 (ld,%A0,X+) CR_TAB
2180 AS2 (ld,%B0,X+) CR_TAB
2181 AS2 (ld,%C0,X+) CR_TAB
2182 AS2 (ld,%D0,X) CR_TAB
2183 AS2 (sbiw,r26,%o1+3));
2185 if (reg_dest == reg_base)
2186 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2187 AS2 (ldd,%C0,%C1) CR_TAB
2188 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2189 AS2 (ldd,%A0,%A1) CR_TAB
2190 AS2 (mov,%B0,__tmp_reg__));
2191 else if (reg_dest == reg_base - 2)
2192 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2193 AS2 (ldd,%B0,%B1) CR_TAB
2194 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2195 AS2 (ldd,%D0,%D1) CR_TAB
2196 AS2 (mov,%C0,__tmp_reg__));
2197 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2198 AS2 (ldd,%B0,%B1) CR_TAB
2199 AS2 (ldd,%C0,%C1) CR_TAB
2200 AS2 (ldd,%D0,%D1));
2202 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2203 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2204 AS2 (ld,%C0,%1) CR_TAB
2205 AS2 (ld,%B0,%1) CR_TAB
2206 AS2 (ld,%A0,%1));
2207 else if (GET_CODE (base) == POST_INC) /* (R++) */
2208 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2209 AS2 (ld,%B0,%1) CR_TAB
2210 AS2 (ld,%C0,%1) CR_TAB
2211 AS2 (ld,%D0,%1));
2212 else if (CONSTANT_ADDRESS_P (base))
2213 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2214 AS2 (lds,%B0,%m1+1) CR_TAB
2215 AS2 (lds,%C0,%m1+2) CR_TAB
2216 AS2 (lds,%D0,%m1+3));
2218 fatal_insn ("unknown move insn:",insn);
2219 return "";
2222 const char *
2223 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2225 rtx dest = op[0];
2226 rtx src = op[1];
2227 rtx base = XEXP (dest, 0);
2228 int reg_base = true_regnum (base);
2229 int reg_src = true_regnum (src);
2230 int tmp;
2232 if (!l)
2233 l = &tmp;
2235 if (CONSTANT_ADDRESS_P (base))
2236 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2237 AS2 (sts,%m0+1,%B1) CR_TAB
2238 AS2 (sts,%m0+2,%C1) CR_TAB
2239 AS2 (sts,%m0+3,%D1));
2240 if (reg_base > 0) /* (r) */
2242 if (reg_base == REG_X) /* (R26) */
2244 if (reg_src == REG_X)
2246 /* "st X+,r26" is undefined */
2247 if (reg_unused_after (insn, base))
2248 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2249 AS2 (st,X,r26) CR_TAB
2250 AS2 (adiw,r26,1) CR_TAB
2251 AS2 (st,X+,__tmp_reg__) CR_TAB
2252 AS2 (st,X+,r28) CR_TAB
2253 AS2 (st,X,r29));
2254 else
2255 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2256 AS2 (st,X,r26) CR_TAB
2257 AS2 (adiw,r26,1) CR_TAB
2258 AS2 (st,X+,__tmp_reg__) CR_TAB
2259 AS2 (st,X+,r28) CR_TAB
2260 AS2 (st,X,r29) CR_TAB
2261 AS2 (sbiw,r26,3));
2263 else if (reg_base == reg_src + 2)
2265 if (reg_unused_after (insn, base))
2266 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2267 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2268 AS2 (st,%0+,%A1) CR_TAB
2269 AS2 (st,%0+,%B1) CR_TAB
2270 AS2 (st,%0+,__zero_reg__) CR_TAB
2271 AS2 (st,%0,__tmp_reg__) CR_TAB
2272 AS1 (clr,__zero_reg__));
2273 else
2274 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2275 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2276 AS2 (st,%0+,%A1) CR_TAB
2277 AS2 (st,%0+,%B1) CR_TAB
2278 AS2 (st,%0+,__zero_reg__) CR_TAB
2279 AS2 (st,%0,__tmp_reg__) CR_TAB
2280 AS1 (clr,__zero_reg__) CR_TAB
2281 AS2 (sbiw,r26,3));
2283 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2284 AS2 (st,%0+,%B1) CR_TAB
2285 AS2 (st,%0+,%C1) CR_TAB
2286 AS2 (st,%0,%D1) CR_TAB
2287 AS2 (sbiw,r26,3));
2289 else
2290 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2291 AS2 (std,%0+1,%B1) CR_TAB
2292 AS2 (std,%0+2,%C1) CR_TAB
2293 AS2 (std,%0+3,%D1));
2295 else if (GET_CODE (base) == PLUS) /* (R + i) */
2297 int disp = INTVAL (XEXP (base, 1));
2298 reg_base = REGNO (XEXP (base, 0));
2299 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2301 if (reg_base != REG_Y)
2302 fatal_insn ("incorrect insn:",insn);
2304 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2305 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2306 AS2 (std,Y+60,%A1) CR_TAB
2307 AS2 (std,Y+61,%B1) CR_TAB
2308 AS2 (std,Y+62,%C1) CR_TAB
2309 AS2 (std,Y+63,%D1) CR_TAB
2310 AS2 (sbiw,r28,%o0-60));
2312 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2313 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2314 AS2 (st,Y,%A1) CR_TAB
2315 AS2 (std,Y+1,%B1) CR_TAB
2316 AS2 (std,Y+2,%C1) CR_TAB
2317 AS2 (std,Y+3,%D1) CR_TAB
2318 AS2 (subi,r28,lo8(%o0)) CR_TAB
2319 AS2 (sbci,r29,hi8(%o0)));
2321 if (reg_base == REG_X)
2323 /* (X + d) = R */
2324 if (reg_src == REG_X)
2326 *l = 9;
2327 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2328 AS2 (mov,__zero_reg__,r27) CR_TAB
2329 AS2 (adiw,r26,%o0) CR_TAB
2330 AS2 (st,X+,__tmp_reg__) CR_TAB
2331 AS2 (st,X+,__zero_reg__) CR_TAB
2332 AS2 (st,X+,r28) CR_TAB
2333 AS2 (st,X,r29) CR_TAB
2334 AS1 (clr,__zero_reg__) CR_TAB
2335 AS2 (sbiw,r26,%o0+3));
2337 else if (reg_src == REG_X - 2)
2339 *l = 9;
2340 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2341 AS2 (mov,__zero_reg__,r27) CR_TAB
2342 AS2 (adiw,r26,%o0) CR_TAB
2343 AS2 (st,X+,r24) CR_TAB
2344 AS2 (st,X+,r25) CR_TAB
2345 AS2 (st,X+,__tmp_reg__) CR_TAB
2346 AS2 (st,X,__zero_reg__) CR_TAB
2347 AS1 (clr,__zero_reg__) CR_TAB
2348 AS2 (sbiw,r26,%o0+3));
2350 *l = 6;
2351 return (AS2 (adiw,r26,%o0) CR_TAB
2352 AS2 (st,X+,%A1) CR_TAB
2353 AS2 (st,X+,%B1) CR_TAB
2354 AS2 (st,X+,%C1) CR_TAB
2355 AS2 (st,X,%D1) CR_TAB
2356 AS2 (sbiw,r26,%o0+3));
2358 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2359 AS2 (std,%B0,%B1) CR_TAB
2360 AS2 (std,%C0,%C1) CR_TAB
2361 AS2 (std,%D0,%D1));
2363 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2364 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2365 AS2 (st,%0,%C1) CR_TAB
2366 AS2 (st,%0,%B1) CR_TAB
2367 AS2 (st,%0,%A1));
2368 else if (GET_CODE (base) == POST_INC) /* (R++) */
2369 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2370 AS2 (st,%0,%B1) CR_TAB
2371 AS2 (st,%0,%C1) CR_TAB
2372 AS2 (st,%0,%D1));
2373 fatal_insn ("unknown move insn:",insn);
2374 return "";
2377 const char *
2378 output_movsisf(rtx insn, rtx operands[], int *l)
2380 int dummy;
2381 rtx dest = operands[0];
2382 rtx src = operands[1];
2383 int *real_l = l;
2385 if (!l)
2386 l = &dummy;
2388 if (register_operand (dest, VOIDmode))
2390 if (register_operand (src, VOIDmode)) /* mov r,r */
2392 if (true_regnum (dest) > true_regnum (src))
2394 if (AVR_HAVE_MOVW)
2396 *l = 2;
2397 return (AS2 (movw,%C0,%C1) CR_TAB
2398 AS2 (movw,%A0,%A1));
2400 *l = 4;
2401 return (AS2 (mov,%D0,%D1) CR_TAB
2402 AS2 (mov,%C0,%C1) CR_TAB
2403 AS2 (mov,%B0,%B1) CR_TAB
2404 AS2 (mov,%A0,%A1));
2406 else
2408 if (AVR_HAVE_MOVW)
2410 *l = 2;
2411 return (AS2 (movw,%A0,%A1) CR_TAB
2412 AS2 (movw,%C0,%C1));
2414 *l = 4;
2415 return (AS2 (mov,%A0,%A1) CR_TAB
2416 AS2 (mov,%B0,%B1) CR_TAB
2417 AS2 (mov,%C0,%C1) CR_TAB
2418 AS2 (mov,%D0,%D1));
2421 else if (CONSTANT_P (src))
2423 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2425 *l = 4;
2426 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2427 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2428 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2429 AS2 (ldi,%D0,hhi8(%1)));
2432 if (GET_CODE (src) == CONST_INT)
2434 const char *const clr_op0 =
2435 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2436 AS1 (clr,%B0) CR_TAB
2437 AS2 (movw,%C0,%A0))
2438 : (AS1 (clr,%A0) CR_TAB
2439 AS1 (clr,%B0) CR_TAB
2440 AS1 (clr,%C0) CR_TAB
2441 AS1 (clr,%D0));
2443 if (src == const0_rtx) /* mov r,L */
2445 *l = AVR_HAVE_MOVW ? 3 : 4;
2446 return clr_op0;
2448 else if (src == const1_rtx)
2450 if (!real_l)
2451 output_asm_insn (clr_op0, operands);
2452 *l = AVR_HAVE_MOVW ? 4 : 5;
2453 return AS1 (inc,%A0);
2455 else if (src == constm1_rtx)
2457 /* Immediate constants -1 to any register */
2458 if (AVR_HAVE_MOVW)
2460 *l = 4;
2461 return (AS1 (clr,%A0) CR_TAB
2462 AS1 (dec,%A0) CR_TAB
2463 AS2 (mov,%B0,%A0) CR_TAB
2464 AS2 (movw,%C0,%A0));
2466 *l = 5;
2467 return (AS1 (clr,%A0) CR_TAB
2468 AS1 (dec,%A0) CR_TAB
2469 AS2 (mov,%B0,%A0) CR_TAB
2470 AS2 (mov,%C0,%A0) CR_TAB
2471 AS2 (mov,%D0,%A0));
2473 else
2475 int bit_nr = exact_log2 (INTVAL (src));
2477 if (bit_nr >= 0)
2479 *l = AVR_HAVE_MOVW ? 5 : 6;
2480 if (!real_l)
2482 output_asm_insn (clr_op0, operands);
2483 output_asm_insn ("set", operands);
2485 if (!real_l)
2486 avr_output_bld (operands, bit_nr);
2488 return "";
2493 /* Last resort, better than loading from memory. */
2494 *l = 10;
2495 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2496 AS2 (ldi,r31,lo8(%1)) CR_TAB
2497 AS2 (mov,%A0,r31) CR_TAB
2498 AS2 (ldi,r31,hi8(%1)) CR_TAB
2499 AS2 (mov,%B0,r31) CR_TAB
2500 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2501 AS2 (mov,%C0,r31) CR_TAB
2502 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2503 AS2 (mov,%D0,r31) CR_TAB
2504 AS2 (mov,r31,__tmp_reg__));
2506 else if (GET_CODE (src) == MEM)
2507 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2509 else if (GET_CODE (dest) == MEM)
2511 const char *templ;
2513 if (src == const0_rtx)
2514 operands[1] = zero_reg_rtx;
2516 templ = out_movsi_mr_r (insn, operands, real_l);
2518 if (!real_l)
2519 output_asm_insn (templ, operands);
2521 operands[1] = src;
2522 return "";
2524 fatal_insn ("invalid insn:", insn);
2525 return "";
2528 const char *
2529 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2531 rtx dest = op[0];
2532 rtx src = op[1];
2533 rtx x = XEXP (dest, 0);
2534 int dummy;
2536 if (!l)
2537 l = &dummy;
2539 if (CONSTANT_ADDRESS_P (x))
2541 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2543 *l = 1;
2544 return AS2 (out,__SREG__,%1);
2546 if (optimize > 0 && io_address_operand (x, QImode))
2548 *l = 1;
2549 return AS2 (out,%m0-0x20,%1);
2551 *l = 2;
2552 return AS2 (sts,%m0,%1);
2554 /* memory access by reg+disp */
2555 else if (GET_CODE (x) == PLUS
2556 && REG_P (XEXP (x,0))
2557 && GET_CODE (XEXP (x,1)) == CONST_INT)
2559 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2561 int disp = INTVAL (XEXP (x,1));
2562 if (REGNO (XEXP (x,0)) != REG_Y)
2563 fatal_insn ("incorrect insn:",insn);
2565 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2566 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2567 AS2 (std,Y+63,%1) CR_TAB
2568 AS2 (sbiw,r28,%o0-63));
2570 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2571 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2572 AS2 (st,Y,%1) CR_TAB
2573 AS2 (subi,r28,lo8(%o0)) CR_TAB
2574 AS2 (sbci,r29,hi8(%o0)));
2576 else if (REGNO (XEXP (x,0)) == REG_X)
2578 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2580 if (reg_unused_after (insn, XEXP (x,0)))
2581 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2582 AS2 (adiw,r26,%o0) CR_TAB
2583 AS2 (st,X,__tmp_reg__));
2585 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2586 AS2 (adiw,r26,%o0) CR_TAB
2587 AS2 (st,X,__tmp_reg__) CR_TAB
2588 AS2 (sbiw,r26,%o0));
2590 else
2592 if (reg_unused_after (insn, XEXP (x,0)))
2593 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2594 AS2 (st,X,%1));
2596 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2597 AS2 (st,X,%1) CR_TAB
2598 AS2 (sbiw,r26,%o0));
2601 *l = 1;
2602 return AS2 (std,%0,%1);
2604 *l = 1;
2605 return AS2 (st,%0,%1);
2608 const char *
2609 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2611 rtx dest = op[0];
2612 rtx src = op[1];
2613 rtx base = XEXP (dest, 0);
2614 int reg_base = true_regnum (base);
2615 int reg_src = true_regnum (src);
2616 /* "volatile" forces writing high byte first, even if less efficient,
2617 for correct operation with 16-bit I/O registers. */
2618 int mem_volatile_p = MEM_VOLATILE_P (dest);
2619 int tmp;
2621 if (!l)
2622 l = &tmp;
2623 if (CONSTANT_ADDRESS_P (base))
2625 if (optimize > 0 && io_address_operand (base, HImode))
2627 *l = 2;
2628 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
2629 AS2 (out,%m0-0x20,%A1));
2631 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
2632 AS2 (sts,%m0,%A1));
2634 if (reg_base > 0)
2636 if (reg_base == REG_X)
2638 if (reg_src == REG_X)
2640 /* "st X+,r26" and "st -X,r26" are undefined. */
2641 if (!mem_volatile_p && reg_unused_after (insn, src))
2642 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2643 AS2 (st,X,r26) CR_TAB
2644 AS2 (adiw,r26,1) CR_TAB
2645 AS2 (st,X,__tmp_reg__));
2646 else
2647 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2648 AS2 (adiw,r26,1) CR_TAB
2649 AS2 (st,X,__tmp_reg__) CR_TAB
2650 AS2 (sbiw,r26,1) CR_TAB
2651 AS2 (st,X,r26));
2653 else
2655 if (!mem_volatile_p && reg_unused_after (insn, base))
2656 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2657 AS2 (st,X,%B1));
2658 else
2659 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2660 AS2 (st,X,%B1) CR_TAB
2661 AS2 (st,-X,%A1));
2664 else
2665 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2666 AS2 (st,%0,%A1));
2668 else if (GET_CODE (base) == PLUS)
2670 int disp = INTVAL (XEXP (base, 1));
2671 reg_base = REGNO (XEXP (base, 0));
2672 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2674 if (reg_base != REG_Y)
2675 fatal_insn ("incorrect insn:",insn);
2677 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2678 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2679 AS2 (std,Y+63,%B1) CR_TAB
2680 AS2 (std,Y+62,%A1) CR_TAB
2681 AS2 (sbiw,r28,%o0-62));
2683 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2684 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2685 AS2 (std,Y+1,%B1) CR_TAB
2686 AS2 (st,Y,%A1) CR_TAB
2687 AS2 (subi,r28,lo8(%o0)) CR_TAB
2688 AS2 (sbci,r29,hi8(%o0)));
2690 if (reg_base == REG_X)
2692 /* (X + d) = R */
2693 if (reg_src == REG_X)
2695 *l = 7;
2696 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2697 AS2 (mov,__zero_reg__,r27) CR_TAB
2698 AS2 (adiw,r26,%o0+1) CR_TAB
2699 AS2 (st,X,__zero_reg__) CR_TAB
2700 AS2 (st,-X,__tmp_reg__) CR_TAB
2701 AS1 (clr,__zero_reg__) CR_TAB
2702 AS2 (sbiw,r26,%o0));
2704 *l = 4;
2705 return (AS2 (adiw,r26,%o0+1) CR_TAB
2706 AS2 (st,X,%B1) CR_TAB
2707 AS2 (st,-X,%A1) CR_TAB
2708 AS2 (sbiw,r26,%o0));
2710 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2711 AS2 (std,%A0,%A1));
2713 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2714 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2715 AS2 (st,%0,%A1));
2716 else if (GET_CODE (base) == POST_INC) /* (R++) */
2718 if (mem_volatile_p)
2720 if (REGNO (XEXP (base, 0)) == REG_X)
2722 *l = 4;
2723 return (AS2 (adiw,r26,1) CR_TAB
2724 AS2 (st,X,%B1) CR_TAB
2725 AS2 (st,-X,%A1) CR_TAB
2726 AS2 (adiw,r26,2));
2728 else
2730 *l = 3;
2731 return (AS2 (std,%p0+1,%B1) CR_TAB
2732 AS2 (st,%p0,%A1) CR_TAB
2733 AS2 (adiw,%r0,2));
2737 *l = 2;
2738 return (AS2 (st,%0,%A1) CR_TAB
2739 AS2 (st,%0,%B1));
2741 fatal_insn ("unknown move insn:",insn);
2742 return "";
2745 /* Return 1 if frame pointer for current function required. */
2747 bool
2748 avr_frame_pointer_required_p (void)
2750 return (cfun->calls_alloca
2751 || crtl->args.info.nregs == 0
2752 || get_frame_size () > 0);
2755 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2757 static RTX_CODE
2758 compare_condition (rtx insn)
2760 rtx next = next_real_insn (insn);
2761 RTX_CODE cond = UNKNOWN;
2762 if (next && GET_CODE (next) == JUMP_INSN)
2764 rtx pat = PATTERN (next);
2765 rtx src = SET_SRC (pat);
2766 rtx t = XEXP (src, 0);
2767 cond = GET_CODE (t);
2769 return cond;
2772 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2774 static int
2775 compare_sign_p (rtx insn)
2777 RTX_CODE cond = compare_condition (insn);
2778 return (cond == GE || cond == LT);
2781 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2782 that needs to be swapped (GT, GTU, LE, LEU). */
2785 compare_diff_p (rtx insn)
2787 RTX_CODE cond = compare_condition (insn);
2788 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2791 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2794 compare_eq_p (rtx insn)
2796 RTX_CODE cond = compare_condition (insn);
2797 return (cond == EQ || cond == NE);
2801 /* Output test instruction for HImode. */
2803 const char *
2804 out_tsthi (rtx insn, rtx op, int *l)
2806 if (compare_sign_p (insn))
2808 if (l) *l = 1;
2809 return AS1 (tst,%B0);
2811 if (reg_unused_after (insn, op)
2812 && compare_eq_p (insn))
2814 /* Faster than sbiw if we can clobber the operand. */
2815 if (l) *l = 1;
2816 return "or %A0,%B0";
2818 if (test_hard_reg_class (ADDW_REGS, op))
2820 if (l) *l = 1;
2821 return AS2 (sbiw,%0,0);
2823 if (l) *l = 2;
2824 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2825 AS2 (cpc,%B0,__zero_reg__));
2829 /* Output test instruction for SImode. */
2831 const char *
2832 out_tstsi (rtx insn, rtx op, int *l)
2834 if (compare_sign_p (insn))
2836 if (l) *l = 1;
2837 return AS1 (tst,%D0);
2839 if (test_hard_reg_class (ADDW_REGS, op))
2841 if (l) *l = 3;
2842 return (AS2 (sbiw,%A0,0) CR_TAB
2843 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2844 AS2 (cpc,%D0,__zero_reg__));
2846 if (l) *l = 4;
2847 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2848 AS2 (cpc,%B0,__zero_reg__) CR_TAB
2849 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2850 AS2 (cpc,%D0,__zero_reg__));
2854 /* Generate asm equivalent for various shifts.
2855 Shift count is a CONST_INT, MEM or REG.
2856 This only handles cases that are not already
2857 carefully hand-optimized in ?sh??i3_out. */
2859 void
2860 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
2861 int *len, int t_len)
2863 rtx op[10];
2864 char str[500];
2865 int second_label = 1;
2866 int saved_in_tmp = 0;
2867 int use_zero_reg = 0;
2869 op[0] = operands[0];
2870 op[1] = operands[1];
2871 op[2] = operands[2];
2872 op[3] = operands[3];
2873 str[0] = 0;
2875 if (len)
2876 *len = 1;
2878 if (GET_CODE (operands[2]) == CONST_INT)
2880 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2881 int count = INTVAL (operands[2]);
2882 int max_len = 10; /* If larger than this, always use a loop. */
2884 if (count <= 0)
2886 if (len)
2887 *len = 0;
2888 return;
2891 if (count < 8 && !scratch)
2892 use_zero_reg = 1;
2894 if (optimize_size)
2895 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
2897 if (t_len * count <= max_len)
2899 /* Output shifts inline with no loop - faster. */
2900 if (len)
2901 *len = t_len * count;
2902 else
2904 while (count-- > 0)
2905 output_asm_insn (templ, op);
2908 return;
2911 if (scratch)
2913 if (!len)
2914 strcat (str, AS2 (ldi,%3,%2));
2916 else if (use_zero_reg)
2918 /* Hack to save one word: use __zero_reg__ as loop counter.
2919 Set one bit, then shift in a loop until it is 0 again. */
2921 op[3] = zero_reg_rtx;
2922 if (len)
2923 *len = 2;
2924 else
2925 strcat (str, ("set" CR_TAB
2926 AS2 (bld,%3,%2-1)));
2928 else
2930 /* No scratch register available, use one from LD_REGS (saved in
2931 __tmp_reg__) that doesn't overlap with registers to shift. */
2933 op[3] = gen_rtx_REG (QImode,
2934 ((true_regnum (operands[0]) - 1) & 15) + 16);
2935 op[4] = tmp_reg_rtx;
2936 saved_in_tmp = 1;
2938 if (len)
2939 *len = 3; /* Includes "mov %3,%4" after the loop. */
2940 else
2941 strcat (str, (AS2 (mov,%4,%3) CR_TAB
2942 AS2 (ldi,%3,%2)));
2945 second_label = 0;
2947 else if (GET_CODE (operands[2]) == MEM)
2949 rtx op_mov[10];
2951 op[3] = op_mov[0] = tmp_reg_rtx;
2952 op_mov[1] = op[2];
2954 if (len)
2955 out_movqi_r_mr (insn, op_mov, len);
2956 else
2957 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
2959 else if (register_operand (operands[2], QImode))
2961 if (reg_unused_after (insn, operands[2]))
2962 op[3] = op[2];
2963 else
2965 op[3] = tmp_reg_rtx;
2966 if (!len)
2967 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
2970 else
2971 fatal_insn ("bad shift insn:", insn);
2973 if (second_label)
2975 if (len)
2976 ++*len;
2977 else
2978 strcat (str, AS1 (rjmp,2f));
2981 if (len)
2982 *len += t_len + 2; /* template + dec + brXX */
2983 else
2985 strcat (str, "\n1:\t");
2986 strcat (str, templ);
2987 strcat (str, second_label ? "\n2:\t" : "\n\t");
2988 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
2989 strcat (str, CR_TAB);
2990 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
2991 if (saved_in_tmp)
2992 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
2993 output_asm_insn (str, op);
2998 /* 8bit shift left ((char)x << i) */
3000 const char *
3001 ashlqi3_out (rtx insn, rtx operands[], int *len)
3003 if (GET_CODE (operands[2]) == CONST_INT)
3005 int k;
3007 if (!len)
3008 len = &k;
3010 switch (INTVAL (operands[2]))
3012 default:
3013 if (INTVAL (operands[2]) < 8)
3014 break;
3016 *len = 1;
3017 return AS1 (clr,%0);
3019 case 1:
3020 *len = 1;
3021 return AS1 (lsl,%0);
3023 case 2:
3024 *len = 2;
3025 return (AS1 (lsl,%0) CR_TAB
3026 AS1 (lsl,%0));
3028 case 3:
3029 *len = 3;
3030 return (AS1 (lsl,%0) CR_TAB
3031 AS1 (lsl,%0) CR_TAB
3032 AS1 (lsl,%0));
3034 case 4:
3035 if (test_hard_reg_class (LD_REGS, operands[0]))
3037 *len = 2;
3038 return (AS1 (swap,%0) CR_TAB
3039 AS2 (andi,%0,0xf0));
3041 *len = 4;
3042 return (AS1 (lsl,%0) CR_TAB
3043 AS1 (lsl,%0) CR_TAB
3044 AS1 (lsl,%0) CR_TAB
3045 AS1 (lsl,%0));
3047 case 5:
3048 if (test_hard_reg_class (LD_REGS, operands[0]))
3050 *len = 3;
3051 return (AS1 (swap,%0) CR_TAB
3052 AS1 (lsl,%0) CR_TAB
3053 AS2 (andi,%0,0xe0));
3055 *len = 5;
3056 return (AS1 (lsl,%0) CR_TAB
3057 AS1 (lsl,%0) CR_TAB
3058 AS1 (lsl,%0) CR_TAB
3059 AS1 (lsl,%0) CR_TAB
3060 AS1 (lsl,%0));
3062 case 6:
3063 if (test_hard_reg_class (LD_REGS, operands[0]))
3065 *len = 4;
3066 return (AS1 (swap,%0) CR_TAB
3067 AS1 (lsl,%0) CR_TAB
3068 AS1 (lsl,%0) CR_TAB
3069 AS2 (andi,%0,0xc0));
3071 *len = 6;
3072 return (AS1 (lsl,%0) CR_TAB
3073 AS1 (lsl,%0) CR_TAB
3074 AS1 (lsl,%0) CR_TAB
3075 AS1 (lsl,%0) CR_TAB
3076 AS1 (lsl,%0) CR_TAB
3077 AS1 (lsl,%0));
3079 case 7:
3080 *len = 3;
3081 return (AS1 (ror,%0) CR_TAB
3082 AS1 (clr,%0) CR_TAB
3083 AS1 (ror,%0));
3086 else if (CONSTANT_P (operands[2]))
3087 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3089 out_shift_with_cnt (AS1 (lsl,%0),
3090 insn, operands, len, 1);
3091 return "";
3095 /* 16bit shift left ((short)x << i) */
3097 const char *
3098 ashlhi3_out (rtx insn, rtx operands[], int *len)
3100 if (GET_CODE (operands[2]) == CONST_INT)
3102 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3103 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3104 int k;
3105 int *t = len;
3107 if (!len)
3108 len = &k;
3110 switch (INTVAL (operands[2]))
3112 default:
3113 if (INTVAL (operands[2]) < 16)
3114 break;
3116 *len = 2;
3117 return (AS1 (clr,%B0) CR_TAB
3118 AS1 (clr,%A0));
3120 case 4:
3121 if (optimize_size && scratch)
3122 break; /* 5 */
3123 if (ldi_ok)
3125 *len = 6;
3126 return (AS1 (swap,%A0) CR_TAB
3127 AS1 (swap,%B0) CR_TAB
3128 AS2 (andi,%B0,0xf0) CR_TAB
3129 AS2 (eor,%B0,%A0) CR_TAB
3130 AS2 (andi,%A0,0xf0) CR_TAB
3131 AS2 (eor,%B0,%A0));
3133 if (scratch)
3135 *len = 7;
3136 return (AS1 (swap,%A0) CR_TAB
3137 AS1 (swap,%B0) CR_TAB
3138 AS2 (ldi,%3,0xf0) CR_TAB
3139 "and %B0,%3" CR_TAB
3140 AS2 (eor,%B0,%A0) CR_TAB
3141 "and %A0,%3" CR_TAB
3142 AS2 (eor,%B0,%A0));
3144 break; /* optimize_size ? 6 : 8 */
3146 case 5:
3147 if (optimize_size)
3148 break; /* scratch ? 5 : 6 */
3149 if (ldi_ok)
3151 *len = 8;
3152 return (AS1 (lsl,%A0) CR_TAB
3153 AS1 (rol,%B0) CR_TAB
3154 AS1 (swap,%A0) CR_TAB
3155 AS1 (swap,%B0) CR_TAB
3156 AS2 (andi,%B0,0xf0) CR_TAB
3157 AS2 (eor,%B0,%A0) CR_TAB
3158 AS2 (andi,%A0,0xf0) CR_TAB
3159 AS2 (eor,%B0,%A0));
3161 if (scratch)
3163 *len = 9;
3164 return (AS1 (lsl,%A0) CR_TAB
3165 AS1 (rol,%B0) CR_TAB
3166 AS1 (swap,%A0) CR_TAB
3167 AS1 (swap,%B0) CR_TAB
3168 AS2 (ldi,%3,0xf0) CR_TAB
3169 "and %B0,%3" CR_TAB
3170 AS2 (eor,%B0,%A0) CR_TAB
3171 "and %A0,%3" CR_TAB
3172 AS2 (eor,%B0,%A0));
3174 break; /* 10 */
3176 case 6:
3177 if (optimize_size)
3178 break; /* scratch ? 5 : 6 */
3179 *len = 9;
3180 return (AS1 (clr,__tmp_reg__) CR_TAB
3181 AS1 (lsr,%B0) CR_TAB
3182 AS1 (ror,%A0) CR_TAB
3183 AS1 (ror,__tmp_reg__) CR_TAB
3184 AS1 (lsr,%B0) CR_TAB
3185 AS1 (ror,%A0) CR_TAB
3186 AS1 (ror,__tmp_reg__) CR_TAB
3187 AS2 (mov,%B0,%A0) CR_TAB
3188 AS2 (mov,%A0,__tmp_reg__));
3190 case 7:
3191 *len = 5;
3192 return (AS1 (lsr,%B0) CR_TAB
3193 AS2 (mov,%B0,%A0) CR_TAB
3194 AS1 (clr,%A0) CR_TAB
3195 AS1 (ror,%B0) CR_TAB
3196 AS1 (ror,%A0));
3198 case 8:
3199 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3200 AS1 (clr,%A0));
3202 case 9:
3203 *len = 3;
3204 return (AS2 (mov,%B0,%A0) CR_TAB
3205 AS1 (clr,%A0) CR_TAB
3206 AS1 (lsl,%B0));
3208 case 10:
3209 *len = 4;
3210 return (AS2 (mov,%B0,%A0) CR_TAB
3211 AS1 (clr,%A0) CR_TAB
3212 AS1 (lsl,%B0) CR_TAB
3213 AS1 (lsl,%B0));
3215 case 11:
3216 *len = 5;
3217 return (AS2 (mov,%B0,%A0) CR_TAB
3218 AS1 (clr,%A0) CR_TAB
3219 AS1 (lsl,%B0) CR_TAB
3220 AS1 (lsl,%B0) CR_TAB
3221 AS1 (lsl,%B0));
3223 case 12:
3224 if (ldi_ok)
3226 *len = 4;
3227 return (AS2 (mov,%B0,%A0) CR_TAB
3228 AS1 (clr,%A0) CR_TAB
3229 AS1 (swap,%B0) CR_TAB
3230 AS2 (andi,%B0,0xf0));
3232 if (scratch)
3234 *len = 5;
3235 return (AS2 (mov,%B0,%A0) CR_TAB
3236 AS1 (clr,%A0) CR_TAB
3237 AS1 (swap,%B0) CR_TAB
3238 AS2 (ldi,%3,0xf0) CR_TAB
3239 "and %B0,%3");
3241 *len = 6;
3242 return (AS2 (mov,%B0,%A0) CR_TAB
3243 AS1 (clr,%A0) CR_TAB
3244 AS1 (lsl,%B0) CR_TAB
3245 AS1 (lsl,%B0) CR_TAB
3246 AS1 (lsl,%B0) CR_TAB
3247 AS1 (lsl,%B0));
3249 case 13:
3250 if (ldi_ok)
3252 *len = 5;
3253 return (AS2 (mov,%B0,%A0) CR_TAB
3254 AS1 (clr,%A0) CR_TAB
3255 AS1 (swap,%B0) CR_TAB
3256 AS1 (lsl,%B0) CR_TAB
3257 AS2 (andi,%B0,0xe0));
3259 if (AVR_HAVE_MUL && scratch)
3261 *len = 5;
3262 return (AS2 (ldi,%3,0x20) CR_TAB
3263 AS2 (mul,%A0,%3) CR_TAB
3264 AS2 (mov,%B0,r0) CR_TAB
3265 AS1 (clr,%A0) CR_TAB
3266 AS1 (clr,__zero_reg__));
3268 if (optimize_size && scratch)
3269 break; /* 5 */
3270 if (scratch)
3272 *len = 6;
3273 return (AS2 (mov,%B0,%A0) CR_TAB
3274 AS1 (clr,%A0) CR_TAB
3275 AS1 (swap,%B0) CR_TAB
3276 AS1 (lsl,%B0) CR_TAB
3277 AS2 (ldi,%3,0xe0) CR_TAB
3278 "and %B0,%3");
3280 if (AVR_HAVE_MUL)
3282 *len = 6;
3283 return ("set" CR_TAB
3284 AS2 (bld,r1,5) CR_TAB
3285 AS2 (mul,%A0,r1) CR_TAB
3286 AS2 (mov,%B0,r0) CR_TAB
3287 AS1 (clr,%A0) CR_TAB
3288 AS1 (clr,__zero_reg__));
3290 *len = 7;
3291 return (AS2 (mov,%B0,%A0) CR_TAB
3292 AS1 (clr,%A0) CR_TAB
3293 AS1 (lsl,%B0) CR_TAB
3294 AS1 (lsl,%B0) CR_TAB
3295 AS1 (lsl,%B0) CR_TAB
3296 AS1 (lsl,%B0) CR_TAB
3297 AS1 (lsl,%B0));
3299 case 14:
3300 if (AVR_HAVE_MUL && ldi_ok)
3302 *len = 5;
3303 return (AS2 (ldi,%B0,0x40) CR_TAB
3304 AS2 (mul,%A0,%B0) CR_TAB
3305 AS2 (mov,%B0,r0) CR_TAB
3306 AS1 (clr,%A0) CR_TAB
3307 AS1 (clr,__zero_reg__));
3309 if (AVR_HAVE_MUL && scratch)
3311 *len = 5;
3312 return (AS2 (ldi,%3,0x40) CR_TAB
3313 AS2 (mul,%A0,%3) CR_TAB
3314 AS2 (mov,%B0,r0) CR_TAB
3315 AS1 (clr,%A0) CR_TAB
3316 AS1 (clr,__zero_reg__));
3318 if (optimize_size && ldi_ok)
3320 *len = 5;
3321 return (AS2 (mov,%B0,%A0) CR_TAB
3322 AS2 (ldi,%A0,6) "\n1:\t"
3323 AS1 (lsl,%B0) CR_TAB
3324 AS1 (dec,%A0) CR_TAB
3325 AS1 (brne,1b));
3327 if (optimize_size && scratch)
3328 break; /* 5 */
3329 *len = 6;
3330 return (AS1 (clr,%B0) CR_TAB
3331 AS1 (lsr,%A0) CR_TAB
3332 AS1 (ror,%B0) CR_TAB
3333 AS1 (lsr,%A0) CR_TAB
3334 AS1 (ror,%B0) CR_TAB
3335 AS1 (clr,%A0));
3337 case 15:
3338 *len = 4;
3339 return (AS1 (clr,%B0) CR_TAB
3340 AS1 (lsr,%A0) CR_TAB
3341 AS1 (ror,%B0) CR_TAB
3342 AS1 (clr,%A0));
3344 len = t;
3346 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3347 AS1 (rol,%B0)),
3348 insn, operands, len, 2);
3349 return "";
3353 /* 32bit shift left ((long)x << i) */
3355 const char *
3356 ashlsi3_out (rtx insn, rtx operands[], int *len)
3358 if (GET_CODE (operands[2]) == CONST_INT)
3360 int k;
3361 int *t = len;
3363 if (!len)
3364 len = &k;
3366 switch (INTVAL (operands[2]))
3368 default:
3369 if (INTVAL (operands[2]) < 32)
3370 break;
3372 if (AVR_HAVE_MOVW)
3373 return *len = 3, (AS1 (clr,%D0) CR_TAB
3374 AS1 (clr,%C0) CR_TAB
3375 AS2 (movw,%A0,%C0));
3376 *len = 4;
3377 return (AS1 (clr,%D0) CR_TAB
3378 AS1 (clr,%C0) CR_TAB
3379 AS1 (clr,%B0) CR_TAB
3380 AS1 (clr,%A0));
3382 case 8:
3384 int reg0 = true_regnum (operands[0]);
3385 int reg1 = true_regnum (operands[1]);
3386 *len = 4;
3387 if (reg0 >= reg1)
3388 return (AS2 (mov,%D0,%C1) CR_TAB
3389 AS2 (mov,%C0,%B1) CR_TAB
3390 AS2 (mov,%B0,%A1) CR_TAB
3391 AS1 (clr,%A0));
3392 else
3393 return (AS1 (clr,%A0) CR_TAB
3394 AS2 (mov,%B0,%A1) CR_TAB
3395 AS2 (mov,%C0,%B1) CR_TAB
3396 AS2 (mov,%D0,%C1));
3399 case 16:
3401 int reg0 = true_regnum (operands[0]);
3402 int reg1 = true_regnum (operands[1]);
3403 if (reg0 + 2 == reg1)
3404 return *len = 2, (AS1 (clr,%B0) CR_TAB
3405 AS1 (clr,%A0));
3406 if (AVR_HAVE_MOVW)
3407 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3408 AS1 (clr,%B0) CR_TAB
3409 AS1 (clr,%A0));
3410 else
3411 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3412 AS2 (mov,%D0,%B1) CR_TAB
3413 AS1 (clr,%B0) CR_TAB
3414 AS1 (clr,%A0));
3417 case 24:
3418 *len = 4;
3419 return (AS2 (mov,%D0,%A1) CR_TAB
3420 AS1 (clr,%C0) CR_TAB
3421 AS1 (clr,%B0) CR_TAB
3422 AS1 (clr,%A0));
3424 case 31:
3425 *len = 6;
3426 return (AS1 (clr,%D0) CR_TAB
3427 AS1 (lsr,%A0) CR_TAB
3428 AS1 (ror,%D0) CR_TAB
3429 AS1 (clr,%C0) CR_TAB
3430 AS1 (clr,%B0) CR_TAB
3431 AS1 (clr,%A0));
3433 len = t;
3435 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3436 AS1 (rol,%B0) CR_TAB
3437 AS1 (rol,%C0) CR_TAB
3438 AS1 (rol,%D0)),
3439 insn, operands, len, 4);
3440 return "";
3443 /* 8bit arithmetic shift right ((signed char)x >> i) */
3445 const char *
3446 ashrqi3_out (rtx insn, rtx operands[], int *len)
3448 if (GET_CODE (operands[2]) == CONST_INT)
3450 int k;
3452 if (!len)
3453 len = &k;
3455 switch (INTVAL (operands[2]))
3457 case 1:
3458 *len = 1;
3459 return AS1 (asr,%0);
3461 case 2:
3462 *len = 2;
3463 return (AS1 (asr,%0) CR_TAB
3464 AS1 (asr,%0));
3466 case 3:
3467 *len = 3;
3468 return (AS1 (asr,%0) CR_TAB
3469 AS1 (asr,%0) CR_TAB
3470 AS1 (asr,%0));
3472 case 4:
3473 *len = 4;
3474 return (AS1 (asr,%0) CR_TAB
3475 AS1 (asr,%0) CR_TAB
3476 AS1 (asr,%0) CR_TAB
3477 AS1 (asr,%0));
3479 case 5:
3480 *len = 5;
3481 return (AS1 (asr,%0) CR_TAB
3482 AS1 (asr,%0) CR_TAB
3483 AS1 (asr,%0) CR_TAB
3484 AS1 (asr,%0) CR_TAB
3485 AS1 (asr,%0));
3487 case 6:
3488 *len = 4;
3489 return (AS2 (bst,%0,6) CR_TAB
3490 AS1 (lsl,%0) CR_TAB
3491 AS2 (sbc,%0,%0) CR_TAB
3492 AS2 (bld,%0,0));
3494 default:
3495 if (INTVAL (operands[2]) < 8)
3496 break;
3498 /* fall through */
3500 case 7:
3501 *len = 2;
3502 return (AS1 (lsl,%0) CR_TAB
3503 AS2 (sbc,%0,%0));
3506 else if (CONSTANT_P (operands[2]))
3507 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3509 out_shift_with_cnt (AS1 (asr,%0),
3510 insn, operands, len, 1);
3511 return "";
3515 /* 16bit arithmetic shift right ((signed short)x >> i) */
3517 const char *
3518 ashrhi3_out (rtx insn, rtx operands[], int *len)
3520 if (GET_CODE (operands[2]) == CONST_INT)
3522 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3523 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3524 int k;
3525 int *t = len;
3527 if (!len)
3528 len = &k;
3530 switch (INTVAL (operands[2]))
3532 case 4:
3533 case 5:
3534 /* XXX try to optimize this too? */
3535 break;
3537 case 6:
3538 if (optimize_size)
3539 break; /* scratch ? 5 : 6 */
3540 *len = 8;
3541 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3542 AS2 (mov,%A0,%B0) CR_TAB
3543 AS1 (lsl,__tmp_reg__) CR_TAB
3544 AS1 (rol,%A0) CR_TAB
3545 AS2 (sbc,%B0,%B0) CR_TAB
3546 AS1 (lsl,__tmp_reg__) CR_TAB
3547 AS1 (rol,%A0) CR_TAB
3548 AS1 (rol,%B0));
3550 case 7:
3551 *len = 4;
3552 return (AS1 (lsl,%A0) CR_TAB
3553 AS2 (mov,%A0,%B0) CR_TAB
3554 AS1 (rol,%A0) CR_TAB
3555 AS2 (sbc,%B0,%B0));
3557 case 8:
3559 int reg0 = true_regnum (operands[0]);
3560 int reg1 = true_regnum (operands[1]);
3562 if (reg0 == reg1)
3563 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3564 AS1 (lsl,%B0) CR_TAB
3565 AS2 (sbc,%B0,%B0));
3566 else
3567 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3568 AS1 (clr,%B0) CR_TAB
3569 AS2 (sbrc,%A0,7) CR_TAB
3570 AS1 (dec,%B0));
3573 case 9:
3574 *len = 4;
3575 return (AS2 (mov,%A0,%B0) CR_TAB
3576 AS1 (lsl,%B0) CR_TAB
3577 AS2 (sbc,%B0,%B0) CR_TAB
3578 AS1 (asr,%A0));
3580 case 10:
3581 *len = 5;
3582 return (AS2 (mov,%A0,%B0) CR_TAB
3583 AS1 (lsl,%B0) CR_TAB
3584 AS2 (sbc,%B0,%B0) CR_TAB
3585 AS1 (asr,%A0) CR_TAB
3586 AS1 (asr,%A0));
3588 case 11:
3589 if (AVR_HAVE_MUL && ldi_ok)
3591 *len = 5;
3592 return (AS2 (ldi,%A0,0x20) CR_TAB
3593 AS2 (muls,%B0,%A0) CR_TAB
3594 AS2 (mov,%A0,r1) CR_TAB
3595 AS2 (sbc,%B0,%B0) CR_TAB
3596 AS1 (clr,__zero_reg__));
3598 if (optimize_size && scratch)
3599 break; /* 5 */
3600 *len = 6;
3601 return (AS2 (mov,%A0,%B0) CR_TAB
3602 AS1 (lsl,%B0) CR_TAB
3603 AS2 (sbc,%B0,%B0) CR_TAB
3604 AS1 (asr,%A0) CR_TAB
3605 AS1 (asr,%A0) CR_TAB
3606 AS1 (asr,%A0));
3608 case 12:
3609 if (AVR_HAVE_MUL && ldi_ok)
3611 *len = 5;
3612 return (AS2 (ldi,%A0,0x10) CR_TAB
3613 AS2 (muls,%B0,%A0) CR_TAB
3614 AS2 (mov,%A0,r1) CR_TAB
3615 AS2 (sbc,%B0,%B0) CR_TAB
3616 AS1 (clr,__zero_reg__));
3618 if (optimize_size && scratch)
3619 break; /* 5 */
3620 *len = 7;
3621 return (AS2 (mov,%A0,%B0) CR_TAB
3622 AS1 (lsl,%B0) CR_TAB
3623 AS2 (sbc,%B0,%B0) CR_TAB
3624 AS1 (asr,%A0) CR_TAB
3625 AS1 (asr,%A0) CR_TAB
3626 AS1 (asr,%A0) CR_TAB
3627 AS1 (asr,%A0));
3629 case 13:
3630 if (AVR_HAVE_MUL && ldi_ok)
3632 *len = 5;
3633 return (AS2 (ldi,%A0,0x08) CR_TAB
3634 AS2 (muls,%B0,%A0) CR_TAB
3635 AS2 (mov,%A0,r1) CR_TAB
3636 AS2 (sbc,%B0,%B0) CR_TAB
3637 AS1 (clr,__zero_reg__));
3639 if (optimize_size)
3640 break; /* scratch ? 5 : 7 */
3641 *len = 8;
3642 return (AS2 (mov,%A0,%B0) CR_TAB
3643 AS1 (lsl,%B0) CR_TAB
3644 AS2 (sbc,%B0,%B0) CR_TAB
3645 AS1 (asr,%A0) CR_TAB
3646 AS1 (asr,%A0) CR_TAB
3647 AS1 (asr,%A0) CR_TAB
3648 AS1 (asr,%A0) CR_TAB
3649 AS1 (asr,%A0));
3651 case 14:
3652 *len = 5;
3653 return (AS1 (lsl,%B0) CR_TAB
3654 AS2 (sbc,%A0,%A0) CR_TAB
3655 AS1 (lsl,%B0) CR_TAB
3656 AS2 (mov,%B0,%A0) CR_TAB
3657 AS1 (rol,%A0));
3659 default:
3660 if (INTVAL (operands[2]) < 16)
3661 break;
3663 /* fall through */
3665 case 15:
3666 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3667 AS2 (sbc,%A0,%A0) CR_TAB
3668 AS2 (mov,%B0,%A0));
3670 len = t;
3672 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3673 AS1 (ror,%A0)),
3674 insn, operands, len, 2);
3675 return "";
3679 /* 32bit arithmetic shift right ((signed long)x >> i) */
3681 const char *
3682 ashrsi3_out (rtx insn, rtx operands[], int *len)
3684 if (GET_CODE (operands[2]) == CONST_INT)
3686 int k;
3687 int *t = len;
3689 if (!len)
3690 len = &k;
3692 switch (INTVAL (operands[2]))
3694 case 8:
3696 int reg0 = true_regnum (operands[0]);
3697 int reg1 = true_regnum (operands[1]);
3698 *len=6;
3699 if (reg0 <= reg1)
3700 return (AS2 (mov,%A0,%B1) CR_TAB
3701 AS2 (mov,%B0,%C1) CR_TAB
3702 AS2 (mov,%C0,%D1) CR_TAB
3703 AS1 (clr,%D0) CR_TAB
3704 AS2 (sbrc,%C0,7) CR_TAB
3705 AS1 (dec,%D0));
3706 else
3707 return (AS1 (clr,%D0) CR_TAB
3708 AS2 (sbrc,%D1,7) CR_TAB
3709 AS1 (dec,%D0) CR_TAB
3710 AS2 (mov,%C0,%D1) CR_TAB
3711 AS2 (mov,%B0,%C1) CR_TAB
3712 AS2 (mov,%A0,%B1));
3715 case 16:
3717 int reg0 = true_regnum (operands[0]);
3718 int reg1 = true_regnum (operands[1]);
3720 if (reg0 == reg1 + 2)
3721 return *len = 4, (AS1 (clr,%D0) CR_TAB
3722 AS2 (sbrc,%B0,7) CR_TAB
3723 AS1 (com,%D0) CR_TAB
3724 AS2 (mov,%C0,%D0));
3725 if (AVR_HAVE_MOVW)
3726 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3727 AS1 (clr,%D0) CR_TAB
3728 AS2 (sbrc,%B0,7) CR_TAB
3729 AS1 (com,%D0) CR_TAB
3730 AS2 (mov,%C0,%D0));
3731 else
3732 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3733 AS2 (mov,%A0,%C1) CR_TAB
3734 AS1 (clr,%D0) CR_TAB
3735 AS2 (sbrc,%B0,7) CR_TAB
3736 AS1 (com,%D0) CR_TAB
3737 AS2 (mov,%C0,%D0));
3740 case 24:
3741 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3742 AS1 (clr,%D0) CR_TAB
3743 AS2 (sbrc,%A0,7) CR_TAB
3744 AS1 (com,%D0) CR_TAB
3745 AS2 (mov,%B0,%D0) CR_TAB
3746 AS2 (mov,%C0,%D0));
3748 default:
3749 if (INTVAL (operands[2]) < 32)
3750 break;
3752 /* fall through */
3754 case 31:
3755 if (AVR_HAVE_MOVW)
3756 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3757 AS2 (sbc,%A0,%A0) CR_TAB
3758 AS2 (mov,%B0,%A0) CR_TAB
3759 AS2 (movw,%C0,%A0));
3760 else
3761 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3762 AS2 (sbc,%A0,%A0) CR_TAB
3763 AS2 (mov,%B0,%A0) CR_TAB
3764 AS2 (mov,%C0,%A0) CR_TAB
3765 AS2 (mov,%D0,%A0));
3767 len = t;
3769 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3770 AS1 (ror,%C0) CR_TAB
3771 AS1 (ror,%B0) CR_TAB
3772 AS1 (ror,%A0)),
3773 insn, operands, len, 4);
3774 return "";
3777 /* 8bit logic shift right ((unsigned char)x >> i) */
3779 const char *
3780 lshrqi3_out (rtx insn, rtx operands[], int *len)
3782 if (GET_CODE (operands[2]) == CONST_INT)
3784 int k;
3786 if (!len)
3787 len = &k;
3789 switch (INTVAL (operands[2]))
3791 default:
3792 if (INTVAL (operands[2]) < 8)
3793 break;
3795 *len = 1;
3796 return AS1 (clr,%0);
3798 case 1:
3799 *len = 1;
3800 return AS1 (lsr,%0);
3802 case 2:
3803 *len = 2;
3804 return (AS1 (lsr,%0) CR_TAB
3805 AS1 (lsr,%0));
3806 case 3:
3807 *len = 3;
3808 return (AS1 (lsr,%0) CR_TAB
3809 AS1 (lsr,%0) CR_TAB
3810 AS1 (lsr,%0));
3812 case 4:
3813 if (test_hard_reg_class (LD_REGS, operands[0]))
3815 *len=2;
3816 return (AS1 (swap,%0) CR_TAB
3817 AS2 (andi,%0,0x0f));
3819 *len = 4;
3820 return (AS1 (lsr,%0) CR_TAB
3821 AS1 (lsr,%0) CR_TAB
3822 AS1 (lsr,%0) CR_TAB
3823 AS1 (lsr,%0));
3825 case 5:
3826 if (test_hard_reg_class (LD_REGS, operands[0]))
3828 *len = 3;
3829 return (AS1 (swap,%0) CR_TAB
3830 AS1 (lsr,%0) CR_TAB
3831 AS2 (andi,%0,0x7));
3833 *len = 5;
3834 return (AS1 (lsr,%0) CR_TAB
3835 AS1 (lsr,%0) CR_TAB
3836 AS1 (lsr,%0) CR_TAB
3837 AS1 (lsr,%0) CR_TAB
3838 AS1 (lsr,%0));
3840 case 6:
3841 if (test_hard_reg_class (LD_REGS, operands[0]))
3843 *len = 4;
3844 return (AS1 (swap,%0) CR_TAB
3845 AS1 (lsr,%0) CR_TAB
3846 AS1 (lsr,%0) CR_TAB
3847 AS2 (andi,%0,0x3));
3849 *len = 6;
3850 return (AS1 (lsr,%0) CR_TAB
3851 AS1 (lsr,%0) CR_TAB
3852 AS1 (lsr,%0) CR_TAB
3853 AS1 (lsr,%0) CR_TAB
3854 AS1 (lsr,%0) CR_TAB
3855 AS1 (lsr,%0));
3857 case 7:
3858 *len = 3;
3859 return (AS1 (rol,%0) CR_TAB
3860 AS1 (clr,%0) CR_TAB
3861 AS1 (rol,%0));
3864 else if (CONSTANT_P (operands[2]))
3865 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3867 out_shift_with_cnt (AS1 (lsr,%0),
3868 insn, operands, len, 1);
3869 return "";
3872 /* 16bit logic shift right ((unsigned short)x >> i) */
3874 const char *
3875 lshrhi3_out (rtx insn, rtx operands[], int *len)
3877 if (GET_CODE (operands[2]) == CONST_INT)
3879 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3880 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3881 int k;
3882 int *t = len;
3884 if (!len)
3885 len = &k;
3887 switch (INTVAL (operands[2]))
3889 default:
3890 if (INTVAL (operands[2]) < 16)
3891 break;
3893 *len = 2;
3894 return (AS1 (clr,%B0) CR_TAB
3895 AS1 (clr,%A0));
3897 case 4:
3898 if (optimize_size && scratch)
3899 break; /* 5 */
3900 if (ldi_ok)
3902 *len = 6;
3903 return (AS1 (swap,%B0) CR_TAB
3904 AS1 (swap,%A0) CR_TAB
3905 AS2 (andi,%A0,0x0f) CR_TAB
3906 AS2 (eor,%A0,%B0) CR_TAB
3907 AS2 (andi,%B0,0x0f) CR_TAB
3908 AS2 (eor,%A0,%B0));
3910 if (scratch)
3912 *len = 7;
3913 return (AS1 (swap,%B0) CR_TAB
3914 AS1 (swap,%A0) CR_TAB
3915 AS2 (ldi,%3,0x0f) CR_TAB
3916 "and %A0,%3" CR_TAB
3917 AS2 (eor,%A0,%B0) CR_TAB
3918 "and %B0,%3" CR_TAB
3919 AS2 (eor,%A0,%B0));
3921 break; /* optimize_size ? 6 : 8 */
3923 case 5:
3924 if (optimize_size)
3925 break; /* scratch ? 5 : 6 */
3926 if (ldi_ok)
3928 *len = 8;
3929 return (AS1 (lsr,%B0) CR_TAB
3930 AS1 (ror,%A0) CR_TAB
3931 AS1 (swap,%B0) CR_TAB
3932 AS1 (swap,%A0) CR_TAB
3933 AS2 (andi,%A0,0x0f) CR_TAB
3934 AS2 (eor,%A0,%B0) CR_TAB
3935 AS2 (andi,%B0,0x0f) CR_TAB
3936 AS2 (eor,%A0,%B0));
3938 if (scratch)
3940 *len = 9;
3941 return (AS1 (lsr,%B0) CR_TAB
3942 AS1 (ror,%A0) CR_TAB
3943 AS1 (swap,%B0) CR_TAB
3944 AS1 (swap,%A0) CR_TAB
3945 AS2 (ldi,%3,0x0f) CR_TAB
3946 "and %A0,%3" CR_TAB
3947 AS2 (eor,%A0,%B0) CR_TAB
3948 "and %B0,%3" CR_TAB
3949 AS2 (eor,%A0,%B0));
3951 break; /* 10 */
3953 case 6:
3954 if (optimize_size)
3955 break; /* scratch ? 5 : 6 */
3956 *len = 9;
3957 return (AS1 (clr,__tmp_reg__) CR_TAB
3958 AS1 (lsl,%A0) CR_TAB
3959 AS1 (rol,%B0) CR_TAB
3960 AS1 (rol,__tmp_reg__) CR_TAB
3961 AS1 (lsl,%A0) CR_TAB
3962 AS1 (rol,%B0) CR_TAB
3963 AS1 (rol,__tmp_reg__) CR_TAB
3964 AS2 (mov,%A0,%B0) CR_TAB
3965 AS2 (mov,%B0,__tmp_reg__));
3967 case 7:
3968 *len = 5;
3969 return (AS1 (lsl,%A0) CR_TAB
3970 AS2 (mov,%A0,%B0) CR_TAB
3971 AS1 (rol,%A0) CR_TAB
3972 AS2 (sbc,%B0,%B0) CR_TAB
3973 AS1 (neg,%B0));
3975 case 8:
3976 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
3977 AS1 (clr,%B0));
3979 case 9:
3980 *len = 3;
3981 return (AS2 (mov,%A0,%B0) CR_TAB
3982 AS1 (clr,%B0) CR_TAB
3983 AS1 (lsr,%A0));
3985 case 10:
3986 *len = 4;
3987 return (AS2 (mov,%A0,%B0) CR_TAB
3988 AS1 (clr,%B0) CR_TAB
3989 AS1 (lsr,%A0) CR_TAB
3990 AS1 (lsr,%A0));
3992 case 11:
3993 *len = 5;
3994 return (AS2 (mov,%A0,%B0) CR_TAB
3995 AS1 (clr,%B0) CR_TAB
3996 AS1 (lsr,%A0) CR_TAB
3997 AS1 (lsr,%A0) CR_TAB
3998 AS1 (lsr,%A0));
4000 case 12:
4001 if (ldi_ok)
4003 *len = 4;
4004 return (AS2 (mov,%A0,%B0) CR_TAB
4005 AS1 (clr,%B0) CR_TAB
4006 AS1 (swap,%A0) CR_TAB
4007 AS2 (andi,%A0,0x0f));
4009 if (scratch)
4011 *len = 5;
4012 return (AS2 (mov,%A0,%B0) CR_TAB
4013 AS1 (clr,%B0) CR_TAB
4014 AS1 (swap,%A0) CR_TAB
4015 AS2 (ldi,%3,0x0f) CR_TAB
4016 "and %A0,%3");
4018 *len = 6;
4019 return (AS2 (mov,%A0,%B0) CR_TAB
4020 AS1 (clr,%B0) CR_TAB
4021 AS1 (lsr,%A0) CR_TAB
4022 AS1 (lsr,%A0) CR_TAB
4023 AS1 (lsr,%A0) CR_TAB
4024 AS1 (lsr,%A0));
4026 case 13:
4027 if (ldi_ok)
4029 *len = 5;
4030 return (AS2 (mov,%A0,%B0) CR_TAB
4031 AS1 (clr,%B0) CR_TAB
4032 AS1 (swap,%A0) CR_TAB
4033 AS1 (lsr,%A0) CR_TAB
4034 AS2 (andi,%A0,0x07));
4036 if (AVR_HAVE_MUL && scratch)
4038 *len = 5;
4039 return (AS2 (ldi,%3,0x08) CR_TAB
4040 AS2 (mul,%B0,%3) CR_TAB
4041 AS2 (mov,%A0,r1) CR_TAB
4042 AS1 (clr,%B0) CR_TAB
4043 AS1 (clr,__zero_reg__));
4045 if (optimize_size && scratch)
4046 break; /* 5 */
4047 if (scratch)
4049 *len = 6;
4050 return (AS2 (mov,%A0,%B0) CR_TAB
4051 AS1 (clr,%B0) CR_TAB
4052 AS1 (swap,%A0) CR_TAB
4053 AS1 (lsr,%A0) CR_TAB
4054 AS2 (ldi,%3,0x07) CR_TAB
4055 "and %A0,%3");
4057 if (AVR_HAVE_MUL)
4059 *len = 6;
4060 return ("set" CR_TAB
4061 AS2 (bld,r1,3) CR_TAB
4062 AS2 (mul,%B0,r1) CR_TAB
4063 AS2 (mov,%A0,r1) CR_TAB
4064 AS1 (clr,%B0) CR_TAB
4065 AS1 (clr,__zero_reg__));
4067 *len = 7;
4068 return (AS2 (mov,%A0,%B0) CR_TAB
4069 AS1 (clr,%B0) CR_TAB
4070 AS1 (lsr,%A0) CR_TAB
4071 AS1 (lsr,%A0) CR_TAB
4072 AS1 (lsr,%A0) CR_TAB
4073 AS1 (lsr,%A0) CR_TAB
4074 AS1 (lsr,%A0));
4076 case 14:
4077 if (AVR_HAVE_MUL && ldi_ok)
4079 *len = 5;
4080 return (AS2 (ldi,%A0,0x04) CR_TAB
4081 AS2 (mul,%B0,%A0) CR_TAB
4082 AS2 (mov,%A0,r1) CR_TAB
4083 AS1 (clr,%B0) CR_TAB
4084 AS1 (clr,__zero_reg__));
4086 if (AVR_HAVE_MUL && scratch)
4088 *len = 5;
4089 return (AS2 (ldi,%3,0x04) CR_TAB
4090 AS2 (mul,%B0,%3) CR_TAB
4091 AS2 (mov,%A0,r1) CR_TAB
4092 AS1 (clr,%B0) CR_TAB
4093 AS1 (clr,__zero_reg__));
4095 if (optimize_size && ldi_ok)
4097 *len = 5;
4098 return (AS2 (mov,%A0,%B0) CR_TAB
4099 AS2 (ldi,%B0,6) "\n1:\t"
4100 AS1 (lsr,%A0) CR_TAB
4101 AS1 (dec,%B0) CR_TAB
4102 AS1 (brne,1b));
4104 if (optimize_size && scratch)
4105 break; /* 5 */
4106 *len = 6;
4107 return (AS1 (clr,%A0) CR_TAB
4108 AS1 (lsl,%B0) CR_TAB
4109 AS1 (rol,%A0) CR_TAB
4110 AS1 (lsl,%B0) CR_TAB
4111 AS1 (rol,%A0) CR_TAB
4112 AS1 (clr,%B0));
4114 case 15:
4115 *len = 4;
4116 return (AS1 (clr,%A0) CR_TAB
4117 AS1 (lsl,%B0) CR_TAB
4118 AS1 (rol,%A0) CR_TAB
4119 AS1 (clr,%B0));
4121 len = t;
4123 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4124 AS1 (ror,%A0)),
4125 insn, operands, len, 2);
4126 return "";
4129 /* 32bit logic shift right ((unsigned int)x >> i) */
4131 const char *
4132 lshrsi3_out (rtx insn, rtx operands[], int *len)
4134 if (GET_CODE (operands[2]) == CONST_INT)
4136 int k;
4137 int *t = len;
4139 if (!len)
4140 len = &k;
4142 switch (INTVAL (operands[2]))
4144 default:
4145 if (INTVAL (operands[2]) < 32)
4146 break;
4148 if (AVR_HAVE_MOVW)
4149 return *len = 3, (AS1 (clr,%D0) CR_TAB
4150 AS1 (clr,%C0) CR_TAB
4151 AS2 (movw,%A0,%C0));
4152 *len = 4;
4153 return (AS1 (clr,%D0) CR_TAB
4154 AS1 (clr,%C0) CR_TAB
4155 AS1 (clr,%B0) CR_TAB
4156 AS1 (clr,%A0));
4158 case 8:
4160 int reg0 = true_regnum (operands[0]);
4161 int reg1 = true_regnum (operands[1]);
4162 *len = 4;
4163 if (reg0 <= reg1)
4164 return (AS2 (mov,%A0,%B1) CR_TAB
4165 AS2 (mov,%B0,%C1) CR_TAB
4166 AS2 (mov,%C0,%D1) CR_TAB
4167 AS1 (clr,%D0));
4168 else
4169 return (AS1 (clr,%D0) CR_TAB
4170 AS2 (mov,%C0,%D1) CR_TAB
4171 AS2 (mov,%B0,%C1) CR_TAB
4172 AS2 (mov,%A0,%B1));
4175 case 16:
4177 int reg0 = true_regnum (operands[0]);
4178 int reg1 = true_regnum (operands[1]);
4180 if (reg0 == reg1 + 2)
4181 return *len = 2, (AS1 (clr,%C0) CR_TAB
4182 AS1 (clr,%D0));
4183 if (AVR_HAVE_MOVW)
4184 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4185 AS1 (clr,%C0) CR_TAB
4186 AS1 (clr,%D0));
4187 else
4188 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4189 AS2 (mov,%A0,%C1) CR_TAB
4190 AS1 (clr,%C0) CR_TAB
4191 AS1 (clr,%D0));
4194 case 24:
4195 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4196 AS1 (clr,%B0) CR_TAB
4197 AS1 (clr,%C0) CR_TAB
4198 AS1 (clr,%D0));
4200 case 31:
4201 *len = 6;
4202 return (AS1 (clr,%A0) CR_TAB
4203 AS2 (sbrc,%D0,7) CR_TAB
4204 AS1 (inc,%A0) CR_TAB
4205 AS1 (clr,%B0) CR_TAB
4206 AS1 (clr,%C0) CR_TAB
4207 AS1 (clr,%D0));
4209 len = t;
4211 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4212 AS1 (ror,%C0) CR_TAB
4213 AS1 (ror,%B0) CR_TAB
4214 AS1 (ror,%A0)),
4215 insn, operands, len, 4);
4216 return "";
4219 /* Create RTL split patterns for byte sized rotate expressions. This
4220 produces a series of move instructions and considers overlap situations.
4221 Overlapping non-HImode operands need a scratch register. */
4223 bool
4224 avr_rotate_bytes (rtx operands[])
4226 int i, j;
4227 enum machine_mode mode = GET_MODE (operands[0]);
4228 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
4229 bool same_reg = rtx_equal_p (operands[0], operands[1]);
4230 int num = INTVAL (operands[2]);
4231 rtx scratch = operands[3];
4232 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
4233 Word move if no scratch is needed, otherwise use size of scratch. */
4234 enum machine_mode move_mode = QImode;
4235 if (num & 0xf)
4236 move_mode = QImode;
4237 else if ((mode == SImode && !same_reg) || !overlapped)
4238 move_mode = HImode;
4239 else
4240 move_mode = GET_MODE (scratch);
4242 /* Force DI rotate to use QI moves since other DI moves are currently split
4243 into QI moves so forward propagation works better. */
4244 if (mode == DImode)
4245 move_mode = QImode;
4246 /* Make scratch smaller if needed. */
4247 if (GET_MODE (scratch) == HImode && move_mode == QImode)
4248 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
4250 int move_size = GET_MODE_SIZE (move_mode);
4251 /* Number of bytes/words to rotate. */
4252 int offset = (num >> 3) / move_size;
4253 /* Number of moves needed. */
4254 int size = GET_MODE_SIZE (mode) / move_size;
4255 /* Himode byte swap is special case to avoid a scratch register. */
4256 if (mode == HImode && same_reg)
4258 /* HImode byte swap, using xor. This is as quick as using scratch. */
4259 rtx src, dst;
4260 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
4261 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
4262 if (!rtx_equal_p (dst, src))
4264 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4265 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
4266 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4269 else
4271 /* Create linked list of moves to determine move order. */
4272 struct {
4273 rtx src, dst;
4274 int links;
4275 } move[size + 8];
4277 /* Generate list of subreg moves. */
4278 for (i = 0; i < size; i++)
4280 int from = i;
4281 int to = (from + offset) % size;
4282 move[i].src = simplify_gen_subreg (move_mode, operands[1],
4283 mode, from * move_size);
4284 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
4285 mode, to * move_size);
4286 move[i].links = -1;
4288 /* Mark dependence where a dst of one move is the src of another move.
4289 The first move is a conflict as it must wait until second is
4290 performed. We ignore moves to self - we catch this later. */
4291 if (overlapped)
4292 for (i = 0; i < size; i++)
4293 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
4294 for (j = 0; j < size; j++)
4295 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
4297 /* The dst of move i is the src of move j. */
4298 move[i].links = j;
4299 break;
4302 int blocked = -1;
4303 int moves = 0;
4304 /* Go through move list and perform non-conflicting moves. As each
4305 non-overlapping move is made, it may remove other conflicts
4306 so the process is repeated until no conflicts remain. */
4309 blocked = -1;
4310 moves = 0;
4311 /* Emit move where dst is not also a src or we have used that
4312 src already. */
4313 for (i = 0; i < size; i++)
4314 if (move[i].src != NULL_RTX)
4315 if (move[i].links == -1 || move[move[i].links].src == NULL_RTX)
4317 moves++;
4318 /* Ignore NOP moves to self. */
4319 if (!rtx_equal_p (move[i].dst, move[i].src))
4320 emit_move_insn (move[i].dst, move[i].src);
4322 /* Remove conflict from list. */
4323 move[i].src = NULL_RTX;
4325 else
4326 blocked = i;
4328 /* Check for deadlock. This is when no moves occurred and we have
4329 at least one blocked move. */
4330 if (moves == 0 && blocked != -1)
4332 /* Need to use scratch register to break deadlock.
4333 Add move to put dst of blocked move into scratch.
4334 When this move occurs, it will break chain deadlock.
4335 The scratch register is substituted for real move. */
4337 move[size].src = move[blocked].dst;
4338 move[size].dst = scratch;
4339 /* Scratch move is never blocked. */
4340 move[size].links = -1;
4341 /* Make sure we have valid link. */
4342 gcc_assert (move[blocked].links != -1);
4343 /* Replace src of blocking move with scratch reg. */
4344 move[move[blocked].links].src = scratch;
4345 /* Make dependent on scratch move occuring. */
4346 move[blocked].links = size;
4347 size=size+1;
4350 while (blocked != -1);
4352 return true;
4355 /* Modifies the length assigned to instruction INSN
4356 LEN is the initially computed length of the insn. */
4359 adjust_insn_length (rtx insn, int len)
4361 rtx patt = PATTERN (insn);
4362 rtx set;
4364 if (GET_CODE (patt) == SET)
4366 rtx op[10];
4367 op[1] = SET_SRC (patt);
4368 op[0] = SET_DEST (patt);
4369 if (general_operand (op[1], VOIDmode)
4370 && general_operand (op[0], VOIDmode))
4372 switch (GET_MODE (op[0]))
4374 case QImode:
4375 output_movqi (insn, op, &len);
4376 break;
4377 case HImode:
4378 output_movhi (insn, op, &len);
4379 break;
4380 case SImode:
4381 case SFmode:
4382 output_movsisf (insn, op, &len);
4383 break;
4384 default:
4385 break;
4388 else if (op[0] == cc0_rtx && REG_P (op[1]))
4390 switch (GET_MODE (op[1]))
4392 case HImode: out_tsthi (insn, op[1], &len); break;
4393 case SImode: out_tstsi (insn, op[1], &len); break;
4394 default: break;
4397 else if (GET_CODE (op[1]) == AND)
4399 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4401 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4402 if (GET_MODE (op[1]) == SImode)
4403 len = (((mask & 0xff) != 0xff)
4404 + ((mask & 0xff00) != 0xff00)
4405 + ((mask & 0xff0000L) != 0xff0000L)
4406 + ((mask & 0xff000000L) != 0xff000000L));
4407 else if (GET_MODE (op[1]) == HImode)
4408 len = (((mask & 0xff) != 0xff)
4409 + ((mask & 0xff00) != 0xff00));
4412 else if (GET_CODE (op[1]) == IOR)
4414 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4416 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4417 if (GET_MODE (op[1]) == SImode)
4418 len = (((mask & 0xff) != 0)
4419 + ((mask & 0xff00) != 0)
4420 + ((mask & 0xff0000L) != 0)
4421 + ((mask & 0xff000000L) != 0));
4422 else if (GET_MODE (op[1]) == HImode)
4423 len = (((mask & 0xff) != 0)
4424 + ((mask & 0xff00) != 0));
4428 set = single_set (insn);
4429 if (set)
4431 rtx op[10];
4433 op[1] = SET_SRC (set);
4434 op[0] = SET_DEST (set);
4436 if (GET_CODE (patt) == PARALLEL
4437 && general_operand (op[1], VOIDmode)
4438 && general_operand (op[0], VOIDmode))
4440 if (XVECLEN (patt, 0) == 2)
4441 op[2] = XVECEXP (patt, 0, 1);
4443 switch (GET_MODE (op[0]))
4445 case QImode:
4446 len = 2;
4447 break;
4448 case HImode:
4449 output_reload_inhi (insn, op, &len);
4450 break;
4451 case SImode:
4452 case SFmode:
4453 output_reload_insisf (insn, op, &len);
4454 break;
4455 default:
4456 break;
4459 else if (GET_CODE (op[1]) == ASHIFT
4460 || GET_CODE (op[1]) == ASHIFTRT
4461 || GET_CODE (op[1]) == LSHIFTRT)
4463 rtx ops[10];
4464 ops[0] = op[0];
4465 ops[1] = XEXP (op[1],0);
4466 ops[2] = XEXP (op[1],1);
4467 switch (GET_CODE (op[1]))
4469 case ASHIFT:
4470 switch (GET_MODE (op[0]))
4472 case QImode: ashlqi3_out (insn,ops,&len); break;
4473 case HImode: ashlhi3_out (insn,ops,&len); break;
4474 case SImode: ashlsi3_out (insn,ops,&len); break;
4475 default: break;
4477 break;
4478 case ASHIFTRT:
4479 switch (GET_MODE (op[0]))
4481 case QImode: ashrqi3_out (insn,ops,&len); break;
4482 case HImode: ashrhi3_out (insn,ops,&len); break;
4483 case SImode: ashrsi3_out (insn,ops,&len); break;
4484 default: break;
4486 break;
4487 case LSHIFTRT:
4488 switch (GET_MODE (op[0]))
4490 case QImode: lshrqi3_out (insn,ops,&len); break;
4491 case HImode: lshrhi3_out (insn,ops,&len); break;
4492 case SImode: lshrsi3_out (insn,ops,&len); break;
4493 default: break;
4495 break;
4496 default:
4497 break;
4501 return len;
4504 /* Return nonzero if register REG dead after INSN. */
4507 reg_unused_after (rtx insn, rtx reg)
4509 return (dead_or_set_p (insn, reg)
4510 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4513 /* Return nonzero if REG is not used after INSN.
4514 We assume REG is a reload reg, and therefore does
4515 not live past labels. It may live past calls or jumps though. */
4518 _reg_unused_after (rtx insn, rtx reg)
4520 enum rtx_code code;
4521 rtx set;
4523 /* If the reg is set by this instruction, then it is safe for our
4524 case. Disregard the case where this is a store to memory, since
4525 we are checking a register used in the store address. */
4526 set = single_set (insn);
4527 if (set && GET_CODE (SET_DEST (set)) != MEM
4528 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4529 return 1;
4531 while ((insn = NEXT_INSN (insn)))
4533 rtx set;
4534 code = GET_CODE (insn);
4536 #if 0
4537 /* If this is a label that existed before reload, then the register
4538 if dead here. However, if this is a label added by reorg, then
4539 the register may still be live here. We can't tell the difference,
4540 so we just ignore labels completely. */
4541 if (code == CODE_LABEL)
4542 return 1;
4543 /* else */
4544 #endif
4546 if (!INSN_P (insn))
4547 continue;
4549 if (code == JUMP_INSN)
4550 return 0;
4552 /* If this is a sequence, we must handle them all at once.
4553 We could have for instance a call that sets the target register,
4554 and an insn in a delay slot that uses the register. In this case,
4555 we must return 0. */
4556 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4558 int i;
4559 int retval = 0;
4561 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4563 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4564 rtx set = single_set (this_insn);
4566 if (GET_CODE (this_insn) == CALL_INSN)
4567 code = CALL_INSN;
4568 else if (GET_CODE (this_insn) == JUMP_INSN)
4570 if (INSN_ANNULLED_BRANCH_P (this_insn))
4571 return 0;
4572 code = JUMP_INSN;
4575 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4576 return 0;
4577 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4579 if (GET_CODE (SET_DEST (set)) != MEM)
4580 retval = 1;
4581 else
4582 return 0;
4584 if (set == 0
4585 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4586 return 0;
4588 if (retval == 1)
4589 return 1;
4590 else if (code == JUMP_INSN)
4591 return 0;
4594 if (code == CALL_INSN)
4596 rtx tem;
4597 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4598 if (GET_CODE (XEXP (tem, 0)) == USE
4599 && REG_P (XEXP (XEXP (tem, 0), 0))
4600 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4601 return 0;
4602 if (call_used_regs[REGNO (reg)])
4603 return 1;
4606 set = single_set (insn);
4608 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4609 return 0;
4610 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4611 return GET_CODE (SET_DEST (set)) != MEM;
4612 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4613 return 0;
4615 return 1;
4618 /* Target hook for assembling integer objects. The AVR version needs
4619 special handling for references to certain labels. */
4621 static bool
4622 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4624 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4625 && text_segment_operand (x, VOIDmode) )
4627 fputs ("\t.word\tgs(", asm_out_file);
4628 output_addr_const (asm_out_file, x);
4629 fputs (")\n", asm_out_file);
4630 return true;
4632 return default_assemble_integer (x, size, aligned_p);
4635 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4637 void
4638 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
4641 /* If the function has the 'signal' or 'interrupt' attribute, test to
4642 make sure that the name of the function is "__vector_NN" so as to
4643 catch when the user misspells the interrupt vector name. */
4645 if (cfun->machine->is_interrupt)
4647 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4649 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4650 "%qs appears to be a misspelled interrupt handler",
4651 name);
4654 else if (cfun->machine->is_signal)
4656 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4658 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4659 "%qs appears to be a misspelled signal handler",
4660 name);
4664 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
4665 ASM_OUTPUT_LABEL (file, name);
4668 /* The routine used to output NUL terminated strings. We use a special
4669 version of this for most svr4 targets because doing so makes the
4670 generated assembly code more compact (and thus faster to assemble)
4671 as well as more readable, especially for targets like the i386
4672 (where the only alternative is to output character sequences as
4673 comma separated lists of numbers). */
4675 void
4676 gas_output_limited_string(FILE *file, const char *str)
4678 const unsigned char *_limited_str = (const unsigned char *) str;
4679 unsigned ch;
4680 fprintf (file, "%s\"", STRING_ASM_OP);
4681 for (; (ch = *_limited_str); _limited_str++)
4683 int escape;
4684 switch (escape = ESCAPES[ch])
4686 case 0:
4687 putc (ch, file);
4688 break;
4689 case 1:
4690 fprintf (file, "\\%03o", ch);
4691 break;
4692 default:
4693 putc ('\\', file);
4694 putc (escape, file);
4695 break;
4698 fprintf (file, "\"\n");
4701 /* The routine used to output sequences of byte values. We use a special
4702 version of this for most svr4 targets because doing so makes the
4703 generated assembly code more compact (and thus faster to assemble)
4704 as well as more readable. Note that if we find subparts of the
4705 character sequence which end with NUL (and which are shorter than
4706 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4708 void
4709 gas_output_ascii(FILE *file, const char *str, size_t length)
4711 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4712 const unsigned char *limit = _ascii_bytes + length;
4713 unsigned bytes_in_chunk = 0;
4714 for (; _ascii_bytes < limit; _ascii_bytes++)
4716 const unsigned char *p;
4717 if (bytes_in_chunk >= 60)
4719 fprintf (file, "\"\n");
4720 bytes_in_chunk = 0;
4722 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4723 continue;
4724 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4726 if (bytes_in_chunk > 0)
4728 fprintf (file, "\"\n");
4729 bytes_in_chunk = 0;
4731 gas_output_limited_string (file, (const char*)_ascii_bytes);
4732 _ascii_bytes = p;
4734 else
4736 int escape;
4737 unsigned ch;
4738 if (bytes_in_chunk == 0)
4739 fprintf (file, "\t.ascii\t\"");
4740 switch (escape = ESCAPES[ch = *_ascii_bytes])
4742 case 0:
4743 putc (ch, file);
4744 bytes_in_chunk++;
4745 break;
4746 case 1:
4747 fprintf (file, "\\%03o", ch);
4748 bytes_in_chunk += 4;
4749 break;
4750 default:
4751 putc ('\\', file);
4752 putc (escape, file);
4753 bytes_in_chunk += 2;
4754 break;
4758 if (bytes_in_chunk > 0)
4759 fprintf (file, "\"\n");
4762 /* Return value is nonzero if pseudos that have been
4763 assigned to registers of class CLASS would likely be spilled
4764 because registers of CLASS are needed for spill registers. */
4766 bool
4767 class_likely_spilled_p (int c)
4769 return (c != ALL_REGS && c != ADDW_REGS);
4772 /* Valid attributes:
4773 progmem - put data to program memory;
4774 signal - make a function to be hardware interrupt. After function
4775 prologue interrupts are disabled;
4776 interrupt - make a function to be hardware interrupt. After function
4777 prologue interrupts are enabled;
4778 naked - don't generate function prologue/epilogue and `ret' command.
4780 Only `progmem' attribute valid for type. */
4782 /* Handle a "progmem" attribute; arguments as in
4783 struct attribute_spec.handler. */
4784 static tree
4785 avr_handle_progmem_attribute (tree *node, tree name,
4786 tree args ATTRIBUTE_UNUSED,
4787 int flags ATTRIBUTE_UNUSED,
4788 bool *no_add_attrs)
4790 if (DECL_P (*node))
4792 if (TREE_CODE (*node) == TYPE_DECL)
4794 /* This is really a decl attribute, not a type attribute,
4795 but try to handle it for GCC 3.0 backwards compatibility. */
4797 tree type = TREE_TYPE (*node);
4798 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4799 tree newtype = build_type_attribute_variant (type, attr);
4801 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4802 TREE_TYPE (*node) = newtype;
4803 *no_add_attrs = true;
4805 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4807 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4809 warning (0, "only initialized variables can be placed into "
4810 "program memory area");
4811 *no_add_attrs = true;
4814 else
4816 warning (OPT_Wattributes, "%qE attribute ignored",
4817 name);
4818 *no_add_attrs = true;
4822 return NULL_TREE;
4825 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4826 struct attribute_spec.handler. */
4828 static tree
4829 avr_handle_fndecl_attribute (tree *node, tree name,
4830 tree args ATTRIBUTE_UNUSED,
4831 int flags ATTRIBUTE_UNUSED,
4832 bool *no_add_attrs)
4834 if (TREE_CODE (*node) != FUNCTION_DECL)
4836 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4837 name);
4838 *no_add_attrs = true;
4841 return NULL_TREE;
4844 static tree
4845 avr_handle_fntype_attribute (tree *node, tree name,
4846 tree args ATTRIBUTE_UNUSED,
4847 int flags ATTRIBUTE_UNUSED,
4848 bool *no_add_attrs)
4850 if (TREE_CODE (*node) != FUNCTION_TYPE)
4852 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4853 name);
4854 *no_add_attrs = true;
4857 return NULL_TREE;
4860 /* Look for attribute `progmem' in DECL
4861 if found return 1, otherwise 0. */
4864 avr_progmem_p (tree decl, tree attributes)
4866 tree a;
4868 if (TREE_CODE (decl) != VAR_DECL)
4869 return 0;
4871 if (NULL_TREE
4872 != lookup_attribute ("progmem", attributes))
4873 return 1;
4875 a=decl;
4877 a = TREE_TYPE(a);
4878 while (TREE_CODE (a) == ARRAY_TYPE);
4880 if (a == error_mark_node)
4881 return 0;
4883 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4884 return 1;
4886 return 0;
4889 /* Add the section attribute if the variable is in progmem. */
4891 static void
4892 avr_insert_attributes (tree node, tree *attributes)
4894 if (TREE_CODE (node) == VAR_DECL
4895 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4896 && avr_progmem_p (node, *attributes))
4898 static const char dsec[] = ".progmem.data";
4899 *attributes = tree_cons (get_identifier ("section"),
4900 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4901 *attributes);
4903 /* ??? This seems sketchy. Why can't the user declare the
4904 thing const in the first place? */
4905 TREE_READONLY (node) = 1;
4909 /* A get_unnamed_section callback for switching to progmem_section. */
4911 static void
4912 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
4914 fprintf (asm_out_file,
4915 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4916 AVR_HAVE_JMP_CALL ? "a" : "ax");
4917 /* Should already be aligned, this is just to be safe if it isn't. */
4918 fprintf (asm_out_file, "\t.p2align 1\n");
4921 /* Implement TARGET_ASM_INIT_SECTIONS. */
4923 static void
4924 avr_asm_init_sections (void)
4926 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
4927 avr_output_progmem_section_asm_op,
4928 NULL);
4929 readonly_data_section = data_section;
4932 static unsigned int
4933 avr_section_type_flags (tree decl, const char *name, int reloc)
4935 unsigned int flags = default_section_type_flags (decl, name, reloc);
4937 if (strncmp (name, ".noinit", 7) == 0)
4939 if (decl && TREE_CODE (decl) == VAR_DECL
4940 && DECL_INITIAL (decl) == NULL_TREE)
4941 flags |= SECTION_BSS; /* @nobits */
4942 else
4943 warning (0, "only uninitialized variables can be placed in the "
4944 ".noinit section");
4947 return flags;
4950 /* Outputs some appropriate text to go at the start of an assembler
4951 file. */
4953 static void
4954 avr_file_start (void)
4956 if (avr_current_arch->asm_only)
4957 error ("MCU %qs supported for assembler only", avr_mcu_name);
4959 default_file_start ();
4961 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4962 fputs ("__SREG__ = 0x3f\n"
4963 "__SP_H__ = 0x3e\n"
4964 "__SP_L__ = 0x3d\n", asm_out_file);
4966 fputs ("__tmp_reg__ = 0\n"
4967 "__zero_reg__ = 1\n", asm_out_file);
4969 /* FIXME: output these only if there is anything in the .data / .bss
4970 sections - some code size could be saved by not linking in the
4971 initialization code from libgcc if one or both sections are empty. */
4972 fputs ("\t.global __do_copy_data\n", asm_out_file);
4973 fputs ("\t.global __do_clear_bss\n", asm_out_file);
4976 /* Outputs to the stdio stream FILE some
4977 appropriate text to go at the end of an assembler file. */
4979 static void
4980 avr_file_end (void)
4984 /* Choose the order in which to allocate hard registers for
4985 pseudo-registers local to a basic block.
4987 Store the desired register order in the array `reg_alloc_order'.
4988 Element 0 should be the register to allocate first; element 1, the
4989 next register; and so on. */
4991 void
4992 order_regs_for_local_alloc (void)
4994 unsigned int i;
4995 static const int order_0[] = {
4996 24,25,
4997 18,19,
4998 20,21,
4999 22,23,
5000 30,31,
5001 26,27,
5002 28,29,
5003 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5004 0,1,
5005 32,33,34,35
5007 static const int order_1[] = {
5008 18,19,
5009 20,21,
5010 22,23,
5011 24,25,
5012 30,31,
5013 26,27,
5014 28,29,
5015 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5016 0,1,
5017 32,33,34,35
5019 static const int order_2[] = {
5020 25,24,
5021 23,22,
5022 21,20,
5023 19,18,
5024 30,31,
5025 26,27,
5026 28,29,
5027 17,16,
5028 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5029 1,0,
5030 32,33,34,35
5033 const int *order = (TARGET_ORDER_1 ? order_1 :
5034 TARGET_ORDER_2 ? order_2 :
5035 order_0);
5036 for (i=0; i < ARRAY_SIZE (order_0); ++i)
5037 reg_alloc_order[i] = order[i];
5041 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5042 cost of an RTX operand given its context. X is the rtx of the
5043 operand, MODE is its mode, and OUTER is the rtx_code of this
5044 operand's parent operator. */
5046 static int
5047 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
5048 bool speed)
5050 enum rtx_code code = GET_CODE (x);
5051 int total;
5053 switch (code)
5055 case REG:
5056 case SUBREG:
5057 return 0;
5059 case CONST_INT:
5060 case CONST_DOUBLE:
5061 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5063 default:
5064 break;
5067 total = 0;
5068 avr_rtx_costs (x, code, outer, &total, speed);
5069 return total;
5072 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5073 is to be calculated. Return true if the complete cost has been
5074 computed, and false if subexpressions should be scanned. In either
5075 case, *TOTAL contains the cost result. */
5077 static bool
5078 avr_rtx_costs (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED, int *total,
5079 bool speed)
5081 enum rtx_code code = (enum rtx_code) codearg;
5082 enum machine_mode mode = GET_MODE (x);
5083 HOST_WIDE_INT val;
5085 switch (code)
5087 case CONST_INT:
5088 case CONST_DOUBLE:
5089 /* Immediate constants are as cheap as registers. */
5090 *total = 0;
5091 return true;
5093 case MEM:
5094 case CONST:
5095 case LABEL_REF:
5096 case SYMBOL_REF:
5097 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5098 return true;
5100 case NEG:
5101 switch (mode)
5103 case QImode:
5104 case SFmode:
5105 *total = COSTS_N_INSNS (1);
5106 break;
5108 case HImode:
5109 *total = COSTS_N_INSNS (3);
5110 break;
5112 case SImode:
5113 *total = COSTS_N_INSNS (7);
5114 break;
5116 default:
5117 return false;
5119 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5120 return true;
5122 case ABS:
5123 switch (mode)
5125 case QImode:
5126 case SFmode:
5127 *total = COSTS_N_INSNS (1);
5128 break;
5130 default:
5131 return false;
5133 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5134 return true;
5136 case NOT:
5137 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5138 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5139 return true;
5141 case ZERO_EXTEND:
5142 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5143 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5144 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5145 return true;
5147 case SIGN_EXTEND:
5148 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5149 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5150 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5151 return true;
5153 case PLUS:
5154 switch (mode)
5156 case QImode:
5157 *total = COSTS_N_INSNS (1);
5158 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5159 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5160 break;
5162 case HImode:
5163 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5165 *total = COSTS_N_INSNS (2);
5166 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5168 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5169 *total = COSTS_N_INSNS (1);
5170 else
5171 *total = COSTS_N_INSNS (2);
5172 break;
5174 case SImode:
5175 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5177 *total = COSTS_N_INSNS (4);
5178 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5180 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5181 *total = COSTS_N_INSNS (1);
5182 else
5183 *total = COSTS_N_INSNS (4);
5184 break;
5186 default:
5187 return false;
5189 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5190 return true;
5192 case MINUS:
5193 case AND:
5194 case IOR:
5195 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5196 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5197 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5198 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5199 return true;
5201 case XOR:
5202 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5203 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5204 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5205 return true;
5207 case MULT:
5208 switch (mode)
5210 case QImode:
5211 if (AVR_HAVE_MUL)
5212 *total = COSTS_N_INSNS (!speed ? 3 : 4);
5213 else if (!speed)
5214 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5215 else
5216 return false;
5217 break;
5219 case HImode:
5220 if (AVR_HAVE_MUL)
5221 *total = COSTS_N_INSNS (!speed ? 7 : 10);
5222 else if (!speed)
5223 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5224 else
5225 return false;
5226 break;
5228 default:
5229 return false;
5231 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5232 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5233 return true;
5235 case DIV:
5236 case MOD:
5237 case UDIV:
5238 case UMOD:
5239 if (!speed)
5240 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5241 else
5242 return false;
5243 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5244 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5245 return true;
5247 case ROTATE:
5248 switch (mode)
5250 case QImode:
5251 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
5252 *total = COSTS_N_INSNS (1);
5254 break;
5256 case HImode:
5257 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
5258 *total = COSTS_N_INSNS (3);
5260 break;
5262 case SImode:
5263 if (CONST_INT_P (XEXP (x, 1)))
5264 switch (INTVAL (XEXP (x, 1)))
5266 case 8:
5267 case 24:
5268 *total = COSTS_N_INSNS (5);
5269 break;
5270 case 16:
5271 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
5272 break;
5274 break;
5276 default:
5277 return false;
5279 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5280 return true;
5282 case ASHIFT:
5283 switch (mode)
5285 case QImode:
5286 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5288 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5289 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5291 else
5293 val = INTVAL (XEXP (x, 1));
5294 if (val == 7)
5295 *total = COSTS_N_INSNS (3);
5296 else if (val >= 0 && val <= 7)
5297 *total = COSTS_N_INSNS (val);
5298 else
5299 *total = COSTS_N_INSNS (1);
5301 break;
5303 case HImode:
5304 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5306 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5307 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5309 else
5310 switch (INTVAL (XEXP (x, 1)))
5312 case 0:
5313 *total = 0;
5314 break;
5315 case 1:
5316 case 8:
5317 *total = COSTS_N_INSNS (2);
5318 break;
5319 case 9:
5320 *total = COSTS_N_INSNS (3);
5321 break;
5322 case 2:
5323 case 3:
5324 case 10:
5325 case 15:
5326 *total = COSTS_N_INSNS (4);
5327 break;
5328 case 7:
5329 case 11:
5330 case 12:
5331 *total = COSTS_N_INSNS (5);
5332 break;
5333 case 4:
5334 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5335 break;
5336 case 6:
5337 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5338 break;
5339 case 5:
5340 *total = COSTS_N_INSNS (!speed ? 5 : 10);
5341 break;
5342 default:
5343 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5344 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5346 break;
5348 case SImode:
5349 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5351 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5352 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5354 else
5355 switch (INTVAL (XEXP (x, 1)))
5357 case 0:
5358 *total = 0;
5359 break;
5360 case 24:
5361 *total = COSTS_N_INSNS (3);
5362 break;
5363 case 1:
5364 case 8:
5365 case 16:
5366 *total = COSTS_N_INSNS (4);
5367 break;
5368 case 31:
5369 *total = COSTS_N_INSNS (6);
5370 break;
5371 case 2:
5372 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5373 break;
5374 default:
5375 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5376 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5378 break;
5380 default:
5381 return false;
5383 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5384 return true;
5386 case ASHIFTRT:
5387 switch (mode)
5389 case QImode:
5390 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5392 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5393 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5395 else
5397 val = INTVAL (XEXP (x, 1));
5398 if (val == 6)
5399 *total = COSTS_N_INSNS (4);
5400 else if (val == 7)
5401 *total = COSTS_N_INSNS (2);
5402 else if (val >= 0 && val <= 7)
5403 *total = COSTS_N_INSNS (val);
5404 else
5405 *total = COSTS_N_INSNS (1);
5407 break;
5409 case HImode:
5410 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5412 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5413 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5415 else
5416 switch (INTVAL (XEXP (x, 1)))
5418 case 0:
5419 *total = 0;
5420 break;
5421 case 1:
5422 *total = COSTS_N_INSNS (2);
5423 break;
5424 case 15:
5425 *total = COSTS_N_INSNS (3);
5426 break;
5427 case 2:
5428 case 7:
5429 case 8:
5430 case 9:
5431 *total = COSTS_N_INSNS (4);
5432 break;
5433 case 10:
5434 case 14:
5435 *total = COSTS_N_INSNS (5);
5436 break;
5437 case 11:
5438 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5439 break;
5440 case 12:
5441 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5442 break;
5443 case 6:
5444 case 13:
5445 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5446 break;
5447 default:
5448 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5449 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5451 break;
5453 case SImode:
5454 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5456 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5457 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5459 else
5460 switch (INTVAL (XEXP (x, 1)))
5462 case 0:
5463 *total = 0;
5464 break;
5465 case 1:
5466 *total = COSTS_N_INSNS (4);
5467 break;
5468 case 8:
5469 case 16:
5470 case 24:
5471 *total = COSTS_N_INSNS (6);
5472 break;
5473 case 2:
5474 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5475 break;
5476 case 31:
5477 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5478 break;
5479 default:
5480 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5481 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5483 break;
5485 default:
5486 return false;
5488 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5489 return true;
5491 case LSHIFTRT:
5492 switch (mode)
5494 case QImode:
5495 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5497 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5498 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5500 else
5502 val = INTVAL (XEXP (x, 1));
5503 if (val == 7)
5504 *total = COSTS_N_INSNS (3);
5505 else if (val >= 0 && val <= 7)
5506 *total = COSTS_N_INSNS (val);
5507 else
5508 *total = COSTS_N_INSNS (1);
5510 break;
5512 case HImode:
5513 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5515 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5516 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5518 else
5519 switch (INTVAL (XEXP (x, 1)))
5521 case 0:
5522 *total = 0;
5523 break;
5524 case 1:
5525 case 8:
5526 *total = COSTS_N_INSNS (2);
5527 break;
5528 case 9:
5529 *total = COSTS_N_INSNS (3);
5530 break;
5531 case 2:
5532 case 10:
5533 case 15:
5534 *total = COSTS_N_INSNS (4);
5535 break;
5536 case 7:
5537 case 11:
5538 *total = COSTS_N_INSNS (5);
5539 break;
5540 case 3:
5541 case 12:
5542 case 13:
5543 case 14:
5544 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5545 break;
5546 case 4:
5547 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5548 break;
5549 case 5:
5550 case 6:
5551 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5552 break;
5553 default:
5554 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5555 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5557 break;
5559 case SImode:
5560 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5562 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5563 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5565 else
5566 switch (INTVAL (XEXP (x, 1)))
5568 case 0:
5569 *total = 0;
5570 break;
5571 case 1:
5572 *total = COSTS_N_INSNS (4);
5573 break;
5574 case 2:
5575 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5576 break;
5577 case 8:
5578 case 16:
5579 case 24:
5580 *total = COSTS_N_INSNS (4);
5581 break;
5582 case 31:
5583 *total = COSTS_N_INSNS (6);
5584 break;
5585 default:
5586 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5587 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5589 break;
5591 default:
5592 return false;
5594 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5595 return true;
5597 case COMPARE:
5598 switch (GET_MODE (XEXP (x, 0)))
5600 case QImode:
5601 *total = COSTS_N_INSNS (1);
5602 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5603 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5604 break;
5606 case HImode:
5607 *total = COSTS_N_INSNS (2);
5608 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5609 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5610 else if (INTVAL (XEXP (x, 1)) != 0)
5611 *total += COSTS_N_INSNS (1);
5612 break;
5614 case SImode:
5615 *total = COSTS_N_INSNS (4);
5616 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5617 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5618 else if (INTVAL (XEXP (x, 1)) != 0)
5619 *total += COSTS_N_INSNS (3);
5620 break;
5622 default:
5623 return false;
5625 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5626 return true;
5628 default:
5629 break;
5631 return false;
5634 /* Calculate the cost of a memory address. */
5636 static int
5637 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
5639 if (GET_CODE (x) == PLUS
5640 && GET_CODE (XEXP (x,1)) == CONST_INT
5641 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5642 && INTVAL (XEXP (x,1)) >= 61)
5643 return 18;
5644 if (CONSTANT_ADDRESS_P (x))
5646 if (optimize > 0 && io_address_operand (x, QImode))
5647 return 2;
5648 return 4;
5650 return 4;
5653 /* Test for extra memory constraint 'Q'.
5654 It's a memory address based on Y or Z pointer with valid displacement. */
5657 extra_constraint_Q (rtx x)
5659 if (GET_CODE (XEXP (x,0)) == PLUS
5660 && REG_P (XEXP (XEXP (x,0), 0))
5661 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5662 && (INTVAL (XEXP (XEXP (x,0), 1))
5663 <= MAX_LD_OFFSET (GET_MODE (x))))
5665 rtx xx = XEXP (XEXP (x,0), 0);
5666 int regno = REGNO (xx);
5667 if (TARGET_ALL_DEBUG)
5669 fprintf (stderr, ("extra_constraint:\n"
5670 "reload_completed: %d\n"
5671 "reload_in_progress: %d\n"),
5672 reload_completed, reload_in_progress);
5673 debug_rtx (x);
5675 if (regno >= FIRST_PSEUDO_REGISTER)
5676 return 1; /* allocate pseudos */
5677 else if (regno == REG_Z || regno == REG_Y)
5678 return 1; /* strictly check */
5679 else if (xx == frame_pointer_rtx
5680 || xx == arg_pointer_rtx)
5681 return 1; /* XXX frame & arg pointer checks */
5683 return 0;
5686 /* Convert condition code CONDITION to the valid AVR condition code. */
5688 RTX_CODE
5689 avr_normalize_condition (RTX_CODE condition)
5691 switch (condition)
5693 case GT:
5694 return GE;
5695 case GTU:
5696 return GEU;
5697 case LE:
5698 return LT;
5699 case LEU:
5700 return LTU;
5701 default:
5702 gcc_unreachable ();
5706 /* This function optimizes conditional jumps. */
5708 static void
5709 avr_reorg (void)
5711 rtx insn, pattern;
5713 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5715 if (! (GET_CODE (insn) == INSN
5716 || GET_CODE (insn) == CALL_INSN
5717 || GET_CODE (insn) == JUMP_INSN)
5718 || !single_set (insn))
5719 continue;
5721 pattern = PATTERN (insn);
5723 if (GET_CODE (pattern) == PARALLEL)
5724 pattern = XVECEXP (pattern, 0, 0);
5725 if (GET_CODE (pattern) == SET
5726 && SET_DEST (pattern) == cc0_rtx
5727 && compare_diff_p (insn))
5729 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5731 /* Now we work under compare insn. */
5733 pattern = SET_SRC (pattern);
5734 if (true_regnum (XEXP (pattern,0)) >= 0
5735 && true_regnum (XEXP (pattern,1)) >= 0 )
5737 rtx x = XEXP (pattern,0);
5738 rtx next = next_real_insn (insn);
5739 rtx pat = PATTERN (next);
5740 rtx src = SET_SRC (pat);
5741 rtx t = XEXP (src,0);
5742 PUT_CODE (t, swap_condition (GET_CODE (t)));
5743 XEXP (pattern,0) = XEXP (pattern,1);
5744 XEXP (pattern,1) = x;
5745 INSN_CODE (next) = -1;
5747 else if (true_regnum (XEXP (pattern, 0)) >= 0
5748 && XEXP (pattern, 1) == const0_rtx)
5750 /* This is a tst insn, we can reverse it. */
5751 rtx next = next_real_insn (insn);
5752 rtx pat = PATTERN (next);
5753 rtx src = SET_SRC (pat);
5754 rtx t = XEXP (src,0);
5756 PUT_CODE (t, swap_condition (GET_CODE (t)));
5757 XEXP (pattern, 1) = XEXP (pattern, 0);
5758 XEXP (pattern, 0) = const0_rtx;
5759 INSN_CODE (next) = -1;
5760 INSN_CODE (insn) = -1;
5762 else if (true_regnum (XEXP (pattern,0)) >= 0
5763 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5765 rtx x = XEXP (pattern,1);
5766 rtx next = next_real_insn (insn);
5767 rtx pat = PATTERN (next);
5768 rtx src = SET_SRC (pat);
5769 rtx t = XEXP (src,0);
5770 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5772 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5774 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5775 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5776 INSN_CODE (next) = -1;
5777 INSN_CODE (insn) = -1;
5785 /* Returns register number for function return value.*/
5788 avr_ret_register (void)
5790 return 24;
5793 /* Create an RTX representing the place where a
5794 library function returns a value of mode MODE. */
5797 avr_libcall_value (enum machine_mode mode)
5799 int offs = GET_MODE_SIZE (mode);
5800 if (offs < 2)
5801 offs = 2;
5802 return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5805 /* Create an RTX representing the place where a
5806 function returns a value of data type VALTYPE. */
5809 avr_function_value (const_tree type,
5810 const_tree func ATTRIBUTE_UNUSED,
5811 bool outgoing ATTRIBUTE_UNUSED)
5813 unsigned int offs;
5815 if (TYPE_MODE (type) != BLKmode)
5816 return avr_libcall_value (TYPE_MODE (type));
5818 offs = int_size_in_bytes (type);
5819 if (offs < 2)
5820 offs = 2;
5821 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5822 offs = GET_MODE_SIZE (SImode);
5823 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5824 offs = GET_MODE_SIZE (DImode);
5826 return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5829 /* Places additional restrictions on the register class to
5830 use when it is necessary to copy value X into a register
5831 in class CLASS. */
5833 enum reg_class
5834 preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class rclass)
5836 return rclass;
5840 test_hard_reg_class (enum reg_class rclass, rtx x)
5842 int regno = true_regnum (x);
5843 if (regno < 0)
5844 return 0;
5846 if (TEST_HARD_REG_CLASS (rclass, regno))
5847 return 1;
5849 return 0;
5854 jump_over_one_insn_p (rtx insn, rtx dest)
5856 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5857 ? XEXP (dest, 0)
5858 : dest);
5859 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5860 int dest_addr = INSN_ADDRESSES (uid);
5861 return dest_addr - jump_addr == get_attr_length (insn) + 1;
5864 /* Returns 1 if a value of mode MODE can be stored starting with hard
5865 register number REGNO. On the enhanced core, anything larger than
5866 1 byte must start in even numbered register for "movw" to work
5867 (this way we don't have to check for odd registers everywhere). */
5870 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5872 /* Disallow QImode in stack pointer regs. */
5873 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
5874 return 0;
5876 /* The only thing that can go into registers r28:r29 is a Pmode. */
5877 if (regno == REG_Y && mode == Pmode)
5878 return 1;
5880 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5881 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
5882 return 0;
5884 if (mode == QImode)
5885 return 1;
5887 /* Modes larger than QImode occupy consecutive registers. */
5888 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
5889 return 0;
5891 /* All modes larger than QImode should start in an even register. */
5892 return !(regno & 1);
5895 const char *
5896 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5898 int tmp;
5899 if (!len)
5900 len = &tmp;
5902 if (GET_CODE (operands[1]) == CONST_INT)
5904 int val = INTVAL (operands[1]);
5905 if ((val & 0xff) == 0)
5907 *len = 3;
5908 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5909 AS2 (ldi,%2,hi8(%1)) CR_TAB
5910 AS2 (mov,%B0,%2));
5912 else if ((val & 0xff00) == 0)
5914 *len = 3;
5915 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5916 AS2 (mov,%A0,%2) CR_TAB
5917 AS2 (mov,%B0,__zero_reg__));
5919 else if ((val & 0xff) == ((val & 0xff00) >> 8))
5921 *len = 3;
5922 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5923 AS2 (mov,%A0,%2) CR_TAB
5924 AS2 (mov,%B0,%2));
5927 *len = 4;
5928 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5929 AS2 (mov,%A0,%2) CR_TAB
5930 AS2 (ldi,%2,hi8(%1)) CR_TAB
5931 AS2 (mov,%B0,%2));
5935 const char *
5936 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5938 rtx src = operands[1];
5939 int cnst = (GET_CODE (src) == CONST_INT);
5941 if (len)
5943 if (cnst)
5944 *len = 4 + ((INTVAL (src) & 0xff) != 0)
5945 + ((INTVAL (src) & 0xff00) != 0)
5946 + ((INTVAL (src) & 0xff0000) != 0)
5947 + ((INTVAL (src) & 0xff000000) != 0);
5948 else
5949 *len = 8;
5951 return "";
5954 if (cnst && ((INTVAL (src) & 0xff) == 0))
5955 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
5956 else
5958 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
5959 output_asm_insn (AS2 (mov, %A0, %2), operands);
5961 if (cnst && ((INTVAL (src) & 0xff00) == 0))
5962 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
5963 else
5965 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
5966 output_asm_insn (AS2 (mov, %B0, %2), operands);
5968 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
5969 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
5970 else
5972 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
5973 output_asm_insn (AS2 (mov, %C0, %2), operands);
5975 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
5976 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
5977 else
5979 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
5980 output_asm_insn (AS2 (mov, %D0, %2), operands);
5982 return "";
5985 void
5986 avr_output_bld (rtx operands[], int bit_nr)
5988 static char s[] = "bld %A0,0";
5990 s[5] = 'A' + (bit_nr >> 3);
5991 s[8] = '0' + (bit_nr & 7);
5992 output_asm_insn (s, operands);
5995 void
5996 avr_output_addr_vec_elt (FILE *stream, int value)
5998 switch_to_section (progmem_section);
5999 if (AVR_HAVE_JMP_CALL)
6000 fprintf (stream, "\t.word gs(.L%d)\n", value);
6001 else
6002 fprintf (stream, "\trjmp .L%d\n", value);
6005 /* Returns true if SCRATCH are safe to be allocated as a scratch
6006 registers (for a define_peephole2) in the current function. */
6008 bool
6009 avr_hard_regno_scratch_ok (unsigned int regno)
6011 /* Interrupt functions can only use registers that have already been saved
6012 by the prologue, even if they would normally be call-clobbered. */
6014 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6015 && !df_regs_ever_live_p (regno))
6016 return false;
6018 return true;
6021 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6024 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
6025 unsigned int new_reg)
6027 /* Interrupt functions can only use registers that have already been
6028 saved by the prologue, even if they would normally be
6029 call-clobbered. */
6031 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6032 && !df_regs_ever_live_p (new_reg))
6033 return 0;
6035 return 1;
6038 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
6039 or memory location in the I/O space (QImode only).
6041 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
6042 Operand 1: register operand to test, or CONST_INT memory address.
6043 Operand 2: bit number.
6044 Operand 3: label to jump to if the test is true. */
6046 const char *
6047 avr_out_sbxx_branch (rtx insn, rtx operands[])
6049 enum rtx_code comp = GET_CODE (operands[0]);
6050 int long_jump = (get_attr_length (insn) >= 4);
6051 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
6053 if (comp == GE)
6054 comp = EQ;
6055 else if (comp == LT)
6056 comp = NE;
6058 if (reverse)
6059 comp = reverse_condition (comp);
6061 if (GET_CODE (operands[1]) == CONST_INT)
6063 if (INTVAL (operands[1]) < 0x40)
6065 if (comp == EQ)
6066 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
6067 else
6068 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
6070 else
6072 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
6073 if (comp == EQ)
6074 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
6075 else
6076 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
6079 else /* GET_CODE (operands[1]) == REG */
6081 if (GET_MODE (operands[1]) == QImode)
6083 if (comp == EQ)
6084 output_asm_insn (AS2 (sbrs,%1,%2), operands);
6085 else
6086 output_asm_insn (AS2 (sbrc,%1,%2), operands);
6088 else /* HImode or SImode */
6090 static char buf[] = "sbrc %A1,0";
6091 int bit_nr = INTVAL (operands[2]);
6092 buf[3] = (comp == EQ) ? 's' : 'c';
6093 buf[6] = 'A' + (bit_nr >> 3);
6094 buf[9] = '0' + (bit_nr & 7);
6095 output_asm_insn (buf, operands);
6099 if (long_jump)
6100 return (AS1 (rjmp,.+4) CR_TAB
6101 AS1 (jmp,%x3));
6102 if (!reverse)
6103 return AS1 (rjmp,%x3);
6104 return "";
6107 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6109 static void
6110 avr_asm_out_ctor (rtx symbol, int priority)
6112 fputs ("\t.global __do_global_ctors\n", asm_out_file);
6113 default_ctor_section_asm_out_constructor (symbol, priority);
6116 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6118 static void
6119 avr_asm_out_dtor (rtx symbol, int priority)
6121 fputs ("\t.global __do_global_dtors\n", asm_out_file);
6122 default_dtor_section_asm_out_destructor (symbol, priority);
6125 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6127 static bool
6128 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
6130 if (TYPE_MODE (type) == BLKmode)
6132 HOST_WIDE_INT size = int_size_in_bytes (type);
6133 return (size == -1 || size > 8);
6135 else
6136 return false;
6139 /* Worker function for CASE_VALUES_THRESHOLD. */
6141 unsigned int avr_case_values_threshold (void)
6143 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
6146 #include "gt-avr.h"