1 /* Subroutines for insn-output.cc for ATMEL AVR micro controllers
2 Copyright (C) 1998-2023 Free Software Foundation, Inc.
3 Contributed by Denis Chertykov (chertykov@gmail.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #define IN_TARGET_CODE 1
26 #include "coretypes.h"
31 #include "stringpool.h"
34 #include "c-family/c-common.h"
43 #include "conditions.h"
44 #include "insn-attr.h"
48 #include "stor-layout.h"
52 #include "langhooks.h"
56 #include "tree-pass.h"
57 #include "print-rtl.h"
60 /* This file should be included last. */
61 #include "target-def.h"
63 /* Maximal allowed offset for an address in the LD command */
64 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
66 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
67 address space where data is to be located.
68 As the only non-generic address spaces are all located in flash,
69 this can be used to test if data shall go into some .progmem* section.
70 This must be the rightmost field of machine dependent section flags. */
71 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
73 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
74 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
76 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
77 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
78 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
80 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
81 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
84 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
85 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
86 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
87 / SYMBOL_FLAG_MACH_DEP)
89 /* (AVR_TINY only): Symbol has attribute progmem */
90 #define AVR_SYMBOL_FLAG_TINY_PM \
91 (SYMBOL_FLAG_MACH_DEP << 7)
93 /* (AVR_TINY only): Symbol has attribute absdata */
94 #define AVR_SYMBOL_FLAG_TINY_ABSDATA \
95 (SYMBOL_FLAG_MACH_DEP << 8)
97 #define TINY_ADIW(REG1, REG2, I) \
98 "subi " #REG1 ",lo8(-(" #I "))" CR_TAB \
99 "sbci " #REG2 ",hi8(-(" #I "))"
101 #define TINY_SBIW(REG1, REG2, I) \
102 "subi " #REG1 ",lo8((" #I "))" CR_TAB \
103 "sbci " #REG2 ",hi8((" #I "))"
105 #define AVR_TMP_REGNO (AVR_TINY ? TMP_REGNO_TINY : TMP_REGNO)
106 #define AVR_ZERO_REGNO (AVR_TINY ? ZERO_REGNO_TINY : ZERO_REGNO)
108 /* Known address spaces. The order must be the same as in the respective
109 enum from avr.h (or designated initialized must be used). */
110 const avr_addrspace_t avr_addrspace
[ADDR_SPACE_COUNT
] =
112 { ADDR_SPACE_RAM
, 0, 2, "", 0, NULL
},
113 { ADDR_SPACE_FLASH
, 1, 2, "__flash", 0, ".progmem.data" },
114 { ADDR_SPACE_FLASH1
, 1, 2, "__flash1", 1, ".progmem1.data" },
115 { ADDR_SPACE_FLASH2
, 1, 2, "__flash2", 2, ".progmem2.data" },
116 { ADDR_SPACE_FLASH3
, 1, 2, "__flash3", 3, ".progmem3.data" },
117 { ADDR_SPACE_FLASH4
, 1, 2, "__flash4", 4, ".progmem4.data" },
118 { ADDR_SPACE_FLASH5
, 1, 2, "__flash5", 5, ".progmem5.data" },
119 { ADDR_SPACE_MEMX
, 1, 3, "__memx", 0, ".progmemx.data" },
123 /* Holding RAM addresses of some SFRs used by the compiler and that
124 are unique over all devices in an architecture like 'avr4'. */
128 /* SREG: The processor status */
131 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
137 /* RAMPZ: The high byte of 24-bit address used with ELPM */
140 /* SP: The stack pointer and its low and high byte */
145 static avr_addr_t avr_addr
;
148 /* Prototypes for local helper functions. */
150 static const char* out_movqi_r_mr (rtx_insn
*, rtx
[], int*);
151 static const char* out_movhi_r_mr (rtx_insn
*, rtx
[], int*);
152 static const char* out_movsi_r_mr (rtx_insn
*, rtx
[], int*);
153 static const char* out_movqi_mr_r (rtx_insn
*, rtx
[], int*);
154 static const char* out_movhi_mr_r (rtx_insn
*, rtx
[], int*);
155 static const char* out_movsi_mr_r (rtx_insn
*, rtx
[], int*);
157 static int get_sequence_length (rtx_insn
*insns
);
158 static int sequent_regs_live (void);
159 static const char *ptrreg_to_str (int);
160 static const char *cond_string (enum rtx_code
);
161 static int avr_num_arg_regs (machine_mode
, const_tree
);
162 static int avr_operand_rtx_cost (rtx
, machine_mode
, enum rtx_code
,
164 static void output_reload_in_const (rtx
*, rtx
, int*, bool);
165 static struct machine_function
* avr_init_machine_status (void);
168 /* Prototypes for hook implementors if needed before their implementation. */
170 static bool avr_rtx_costs (rtx
, machine_mode
, int, int, int*, bool);
173 /* Allocate registers from r25 to r8 for parameters for function calls. */
174 #define FIRST_CUM_REG 26
176 /* Last call saved register */
177 #define LAST_CALLEE_SAVED_REG (AVR_TINY ? 19 : 17)
179 /* Implicit target register of LPM instruction (R0) */
180 extern GTY(()) rtx lpm_reg_rtx
;
183 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
184 extern GTY(()) rtx lpm_addr_reg_rtx
;
185 rtx lpm_addr_reg_rtx
;
187 /* Temporary register RTX (reg:QI TMP_REGNO) */
188 extern GTY(()) rtx tmp_reg_rtx
;
191 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
192 extern GTY(()) rtx zero_reg_rtx
;
195 /* Condition Code register RTX (reg:CC REG_CC) */
196 extern GTY(()) rtx cc_reg_rtx
;
199 /* RTXs for all general purpose registers as QImode */
200 extern GTY(()) rtx all_regs_rtx
[32];
201 rtx all_regs_rtx
[32];
203 /* SREG, the processor status */
204 extern GTY(()) rtx sreg_rtx
;
207 /* RAMP* special function registers */
208 extern GTY(()) rtx rampd_rtx
;
209 extern GTY(()) rtx rampx_rtx
;
210 extern GTY(()) rtx rampy_rtx
;
211 extern GTY(()) rtx rampz_rtx
;
217 /* RTX containing the strings "" and "e", respectively */
218 static GTY(()) rtx xstring_empty
;
219 static GTY(()) rtx xstring_e
;
221 /* Current architecture. */
222 const avr_arch_t
*avr_arch
;
224 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
225 or to address space __flash* or __memx. Only used as singletons inside
226 avr_asm_select_section, but it must not be local there because of GTY. */
227 static GTY(()) section
*progmem_section
[ADDR_SPACE_COUNT
];
229 /* Condition for insns/expanders from avr-dimode.md. */
230 bool avr_have_dimode
= true;
232 /* To track if code will use .bss and/or .data. */
233 bool avr_need_clear_bss_p
= false;
234 bool avr_need_copy_data_p
= false;
237 /* Transform UP into lowercase and write the result to LO.
238 You must provide enough space for LO. Return LO. */
241 avr_tolower (char *lo
, const char *up
)
245 for (; *up
; up
++, lo
++)
254 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
255 Return true if the least significant N_BYTES bytes of XVAL all have a
256 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
257 of integers which contains an integer N iff bit N of POP_MASK is set. */
260 avr_popcount_each_byte (rtx xval
, int n_bytes
, int pop_mask
)
262 machine_mode mode
= GET_MODE (xval
);
264 if (VOIDmode
== mode
)
267 for (int i
= 0; i
< n_bytes
; i
++)
269 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
270 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
272 if ((pop_mask
& (1 << popcount_hwi (val8
))) == 0)
280 /* Access some RTX as INT_MODE. If X is a CONST_FIXED we can get
281 the bit representation of X by "casting" it to CONST_INT. */
284 avr_to_int_mode (rtx x
)
286 machine_mode mode
= GET_MODE (x
);
288 return VOIDmode
== mode
290 : simplify_gen_subreg (int_mode_for_mode (mode
).require (), x
, mode
, 0);
295 static const pass_data avr_pass_data_recompute_notes
=
298 "", // name (will be patched)
299 OPTGROUP_NONE
, // optinfo_flags
301 0, // properties_required
302 0, // properties_provided
303 0, // properties_destroyed
304 0, // todo_flags_start
305 TODO_df_finish
| TODO_df_verify
// todo_flags_finish
309 class avr_pass_recompute_notes
: public rtl_opt_pass
312 avr_pass_recompute_notes (gcc::context
*ctxt
, const char *name
)
313 : rtl_opt_pass (avr_pass_data_recompute_notes
, ctxt
)
318 virtual unsigned int execute (function
*)
320 df_note_add_problem ();
325 }; // avr_pass_recompute_notes
327 static const pass_data avr_pass_data_casesi
=
330 "", // name (will be patched)
331 OPTGROUP_NONE
, // optinfo_flags
333 0, // properties_required
334 0, // properties_provided
335 0, // properties_destroyed
336 0, // todo_flags_start
337 0 // todo_flags_finish
341 class avr_pass_casesi
: public rtl_opt_pass
344 avr_pass_casesi (gcc::context
*ctxt
, const char *name
)
345 : rtl_opt_pass (avr_pass_data_casesi
, ctxt
)
350 void avr_rest_of_handle_casesi (function
*);
352 virtual bool gate (function
*) { return optimize
> 0; }
354 virtual unsigned int execute (function
*func
)
356 avr_rest_of_handle_casesi (func
);
360 }; // avr_pass_casesi
363 static const pass_data avr_pass_data_ifelse
=
366 "", // name (will be patched)
367 OPTGROUP_NONE
, // optinfo_flags
369 0, // properties_required
370 0, // properties_provided
371 0, // properties_destroyed
372 0, // todo_flags_start
373 TODO_df_finish
| TODO_df_verify
// todo_flags_finish
376 class avr_pass_ifelse
: public rtl_opt_pass
379 avr_pass_ifelse (gcc::context
*ctxt
, const char *name
)
380 : rtl_opt_pass (avr_pass_data_ifelse
, ctxt
)
385 void avr_rest_of_handle_ifelse (function
*);
387 virtual bool gate (function
*) { return optimize
> 0; }
389 virtual unsigned int execute (function
*func
)
391 avr_rest_of_handle_ifelse (func
);
395 }; // avr_pass_ifelse
400 make_avr_pass_recompute_notes (gcc::context
*ctxt
)
402 return new avr_pass_recompute_notes (ctxt
, "avr-notes-free-cfg");
406 make_avr_pass_casesi (gcc::context
*ctxt
)
408 return new avr_pass_casesi (ctxt
, "avr-casesi");
412 make_avr_pass_ifelse (gcc::context
*ctxt
)
414 return new avr_pass_ifelse (ctxt
, "avr-ifelse");
418 /* Make one parallel insn with all the patterns from insns i[0]..i[5]. */
421 avr_parallel_insn_from_insns (rtx_insn
*i
[5])
423 rtvec vec
= gen_rtvec (5, PATTERN (i
[0]), PATTERN (i
[1]), PATTERN (i
[2]),
424 PATTERN (i
[3]), PATTERN (i
[4]));
426 emit (gen_rtx_PARALLEL (VOIDmode
, vec
));
427 rtx_insn
*insn
= get_insns();
434 /* Return true if we see an insn stream generated by casesi expander together
435 with an extension to SImode of the switch value.
437 If this is the case, fill in the insns from casesi to INSNS[1..5] and
438 the SImode extension to INSNS[0]. Moreover, extract the operands of
439 pattern casesi_<mode>_sequence forged from the sequence to recog_data. */
442 avr_is_casesi_sequence (basic_block bb
, rtx_insn
*insn
, rtx_insn
*insns
[5])
446 /* A first and quick test for a casesi sequences. As a side effect of
447 the test, harvest respective insns to INSNS[0..4]. */
449 if (!(JUMP_P (insns
[4] = insn
)
450 // casesi is the only insn that comes up with UNSPEC_INDEX_JMP,
451 // hence the following test ensures that we are actually dealing
452 // with code from casesi.
453 && (set_4
= single_set (insns
[4]))
454 && UNSPEC
== GET_CODE (SET_SRC (set_4
))
455 && UNSPEC_INDEX_JMP
== XINT (SET_SRC (set_4
), 1)
457 && (insns
[3] = prev_real_insn (insns
[4]))
458 && (insns
[2] = prev_real_insn (insns
[3]))
459 && (insns
[1] = prev_real_insn (insns
[2]))
461 // Insn prior to casesi.
462 && (insns
[0] = prev_real_insn (insns
[1]))
463 && (set_0
= single_set (insns
[0]))
464 && extend_operator (SET_SRC (set_0
), SImode
)))
471 fprintf (dump_file
, ";; Sequence from casesi in "
472 "[bb %d]:\n\n", bb
->index
);
473 for (int i
= 0; i
< 5; i
++)
474 print_rtl_single (dump_file
, insns
[i
]);
477 /* We have to deal with quite some operands. Extracting them by hand
478 would be tedious, therefore wrap the insn patterns into a parallel,
479 run recog against it and then use insn extract to get the operands. */
481 rtx_insn
*xinsn
= avr_parallel_insn_from_insns (insns
);
483 INSN_CODE (xinsn
) = recog (PATTERN (xinsn
), xinsn
, NULL
/* num_clobbers */);
485 /* Failing to recognize means that someone changed the casesi expander or
486 that some passes prior to this one performed some unexpected changes.
487 Gracefully drop such situations instead of aborting. */
489 if (INSN_CODE (xinsn
) < 0)
492 fprintf (dump_file
, ";; Sequence not recognized, giving up.\n\n");
497 gcc_assert (CODE_FOR_casesi_qi_sequence
== INSN_CODE (xinsn
)
498 || CODE_FOR_casesi_hi_sequence
== INSN_CODE (xinsn
));
500 extract_insn (xinsn
);
502 // Assert on the anatomy of xinsn's operands we are going to work with.
504 gcc_assert (recog_data
.n_operands
== 11);
505 gcc_assert (recog_data
.n_dups
== 4);
509 fprintf (dump_file
, ";; Operands extracted:\n");
510 for (int i
= 0; i
< recog_data
.n_operands
; i
++)
511 avr_fdump (dump_file
, ";; $%d = %r\n", i
, recog_data
.operand
[i
]);
512 fprintf (dump_file
, "\n");
519 /* Perform some extra checks on operands of casesi_<mode>_sequence.
520 Not all operand dependencies can be described by means of predicates.
521 This function performs left over checks and should always return true.
522 Returning false means that someone changed the casesi expander but did
523 not adjust casesi_<mode>_sequence. */
526 avr_casei_sequence_check_operands (rtx
*xop
)
528 rtx sub_5
= NULL_RTX
;
530 if (AVR_HAVE_EIJMP_EICALL
531 // The last clobber op of the tablejump.
532 && xop
[8] == all_regs_rtx
[24])
534 // $6 is: (subreg:SI ($5) 0)
538 if (!AVR_HAVE_EIJMP_EICALL
539 // $6 is: (plus:HI (subreg:SI ($5) 0)
541 && PLUS
== GET_CODE (xop
[6])
542 && LABEL_REF
== GET_CODE (XEXP (xop
[6], 1))
543 && rtx_equal_p (xop
[3], XEXP (XEXP (xop
[6], 1), 0))
544 // The last clobber op of the tablejump.
545 && xop
[8] == const0_rtx
)
547 sub_5
= XEXP (xop
[6], 0);
552 && SUBREG_BYTE (sub_5
) == 0
553 && rtx_equal_p (xop
[5], SUBREG_REG (sub_5
)))
557 fprintf (dump_file
, "\n;; Failed condition for casesi_<mode>_sequence\n\n");
563 /* INSNS[1..4] is a sequence as generated by casesi and INSNS[0] is an
564 extension of an 8-bit or 16-bit integer to SImode. XOP contains the
565 operands of INSNS as extracted by insn_extract from pattern
566 casesi_<mode>_sequence:
568 $0: SImode reg switch value as result of $9.
569 $1: Negative of smallest index in switch.
570 $2: Number of entries in switch.
572 $4: Label if out-of-bounds.
574 $6: 3-byte PC: subreg:HI ($5) + label_ref ($3)
575 2-byte PC: subreg:HI ($5)
576 $7: HI reg index into table (Z or pseudo)
577 $8: R24 or const0_rtx (to be clobbered)
578 $9: Extension to SImode of an 8-bit or 16-bit integer register $10.
579 $10: QImode or HImode register input of $9.
581 Try to optimize this sequence, i.e. use the original HImode / QImode
582 switch value instead of SImode. */
585 avr_optimize_casesi (rtx_insn
*insns
[5], rtx
*xop
)
587 // Original mode of the switch value; this is QImode or HImode.
588 machine_mode mode
= GET_MODE (xop
[10]);
590 // How the original switch value was extended to SImode; this is
591 // SIGN_EXTEND or ZERO_EXTEND.
592 enum rtx_code code
= GET_CODE (xop
[9]);
594 // Lower index, upper index (plus one) and range of case calues.
595 HOST_WIDE_INT low_idx
= -INTVAL (xop
[1]);
596 HOST_WIDE_INT num_idx
= INTVAL (xop
[2]);
597 HOST_WIDE_INT hig_idx
= low_idx
+ num_idx
;
599 // Maximum ranges of (un)signed QImode resp. HImode.
600 unsigned umax
= QImode
== mode
? 0xff : 0xffff;
601 int imax
= QImode
== mode
? 0x7f : 0x7fff;
602 int imin
= -imax
- 1;
604 // Testing the case range and whether it fits into the range of the
605 // (un)signed mode. This test should actually always pass because it
606 // makes no sense to have case values outside the mode range. Notice
607 // that case labels which are unreachable because they are outside the
608 // mode of the switch value (e.g. "case -1" for uint8_t) have already
609 // been thrown away by the middle-end.
611 if (SIGN_EXTEND
== code
617 else if (ZERO_EXTEND
== code
619 && (unsigned) hig_idx
<= umax
)
626 fprintf (dump_file
, ";; Case ranges too big, giving up.\n\n");
630 // Do normalization of switch value $10 and out-of-bound check in its
631 // original mode instead of in SImode. Use a newly created pseudo.
632 // This will replace insns[1..2].
636 rtx_insn
*seq1
, *seq2
, *last1
, *last2
;
638 rtx reg
= copy_to_mode_reg (mode
, xop
[10]);
640 rtx (*gen_add
)(rtx
,rtx
,rtx
) = QImode
== mode
? gen_addqi3
: gen_addhi3
;
641 rtx (*gen_cbranch
)(rtx
,rtx
,rtx
,rtx
)
642 = QImode
== mode
? gen_cbranchqi4
: gen_cbranchhi4
;
644 emit_insn (gen_add (reg
, reg
, gen_int_mode (-low_idx
, mode
)));
645 rtx op0
= reg
; rtx op1
= gen_int_mode (num_idx
, mode
);
646 rtx labelref
= copy_rtx (xop
[4]);
647 rtx xbranch
= gen_cbranch (gen_rtx_fmt_ee (GTU
, VOIDmode
, op0
, op1
),
649 rtx_insn
*cbranch
= emit_jump_insn (xbranch
);
650 JUMP_LABEL (cbranch
) = xop
[4];
651 ++LABEL_NUSES (xop
[4]);
654 last1
= get_last_insn();
657 emit_insn_after (seq1
, insns
[2]);
659 // After the out-of-bounds test and corresponding branch, use a
660 // 16-bit index. If QImode is used, extend it to HImode first.
661 // This will replace insns[4].
666 reg
= force_reg (HImode
, gen_rtx_fmt_e (code
, HImode
, reg
));
668 rtx pat_4
= AVR_3_BYTE_PC
669 ? gen_movhi (xop
[7], reg
)
670 : gen_addhi3 (xop
[7], reg
, gen_rtx_LABEL_REF (VOIDmode
, xop
[3]));
675 last2
= get_last_insn();
678 emit_insn_after (seq2
, insns
[3]);
682 fprintf (dump_file
, ";; New insns: ");
684 for (rtx_insn
*insn
= seq1
; ; insn
= NEXT_INSN (insn
))
686 fprintf (dump_file
, "%d, ", INSN_UID (insn
));
690 for (rtx_insn
*insn
= seq2
; ; insn
= NEXT_INSN (insn
))
692 fprintf (dump_file
, "%d%s", INSN_UID (insn
),
693 insn
== last2
? ".\n\n" : ", ");
698 fprintf (dump_file
, ";; Deleting insns: %d, %d, %d.\n\n",
699 INSN_UID (insns
[1]), INSN_UID (insns
[2]), INSN_UID (insns
[3]));
702 // Pseudodelete the SImode and subreg of SImode insns. We don't care
703 // about the extension insns[0]: Its result is now unused and other
704 // passes will clean it up.
706 SET_INSN_DELETED (insns
[1]);
707 SET_INSN_DELETED (insns
[2]);
708 SET_INSN_DELETED (insns
[3]);
713 avr_pass_casesi::avr_rest_of_handle_casesi (function
*func
)
717 FOR_EACH_BB_FN (bb
, func
)
719 rtx_insn
*insn
, *insns
[5];
721 FOR_BB_INSNS (bb
, insn
)
723 if (avr_is_casesi_sequence (bb
, insn
, insns
))
725 avr_optimize_casesi (insns
, recog_data
.operand
);
732 /* A helper for the next method. Suppose we have two conditional branches
734 if (reg <cond1> xval1) goto label1;
735 if (reg <cond2> xval2) goto label2;
737 If the second comparison is redundant and there is a code <cond> such
738 that the sequence can be performed as
740 REG_CC = compare (reg, xval1);
741 if (REG_CC <cond1> 0) goto label1;
742 if (REG_CC <cond> 0) goto label2;
744 then return <cond>. Otherwise, return UNKNOWN.
745 xval1 and xval2 are CONST_INT, and mode is the scalar int mode in which
746 the comparison will be carried out. reverse_cond1 can be set to reverse
747 condition cond1. This is useful if the second comparison does not follow
748 the first one, but is located after label1 like in:
750 if (reg <cond1> xval1) goto label1;
753 if (reg <cond2> xval2) goto label2; */
756 avr_redundant_compare (enum rtx_code cond1
, rtx xval1
,
757 enum rtx_code cond2
, rtx xval2
,
758 machine_mode mode
, bool reverse_cond1
)
760 HOST_WIDE_INT ival1
= INTVAL (xval1
);
761 HOST_WIDE_INT ival2
= INTVAL (xval2
);
763 unsigned HOST_WIDE_INT mask
= GET_MODE_MASK (mode
);
764 unsigned HOST_WIDE_INT uval1
= mask
& UINTVAL (xval1
);
765 unsigned HOST_WIDE_INT uval2
= mask
& UINTVAL (xval2
);
768 cond1
= reverse_condition (cond1
);
772 ////////////////////////////////////////////////
774 // if (reg == val) goto label1;
775 // if (reg > val) goto label2;
776 // can be re-written using the same, simple comparison like in:
777 // REG_CC = compare (reg, val)
778 // if (REG_CC == 0) goto label1;
779 // if (REG_CC >= 0) goto label2;
781 && (cond2
== GT
|| cond2
== GTU
))
782 return avr_normalize_condition (cond2
);
784 // Similar, but the input sequence is like
785 // if (reg == val) goto label1;
786 // if (reg >= val) goto label2;
788 && (cond2
== GE
|| cond2
== GEU
))
791 // Similar, but the input sequence is like
792 // if (reg == val) goto label1;
793 // if (reg >= val + 1) goto label2;
794 if ((cond2
== GE
&& ival2
== 1 + ival1
)
795 || (cond2
== GEU
&& uval2
== 1 + uval1
))
798 // Similar, but the input sequence is like
799 // if (reg == val) goto label1;
800 // if (reg > val - 1) goto label2;
801 if ((cond2
== GT
&& ival2
== ival1
- 1)
802 || (cond2
== GTU
&& uval2
== uval1
- 1))
803 return avr_normalize_condition (cond2
);
805 /////////////////////////////////////////////////////////
807 // if (reg == val) goto label1;
808 // if (reg < 1 + val) goto label2;
809 // can be re-written as
810 // REG_CC = compare (reg, val)
811 // if (REG_CC == 0) goto label1;
812 // if (REG_CC < 0) goto label2;
813 if ((cond2
== LT
&& ival2
== 1 + ival1
)
814 || (cond2
== LTU
&& uval2
== 1 + uval1
))
817 // Similar, but with an input sequence like
818 // if (reg == val) goto label1;
819 // if (reg <= val) goto label2;
821 && (cond2
== LE
|| cond2
== LEU
))
822 return avr_normalize_condition (cond2
);
824 // Similar, but with an input sequence like
825 // if (reg == val) goto label1;
826 // if (reg < val) goto label2;
828 && (cond2
== LT
|| cond2
== LTU
))
831 // Similar, but with an input sequence like
832 // if (reg == val) goto label1;
833 // if (reg <= val - 1) goto label2;
834 if ((cond2
== LE
&& ival2
== ival1
- 1)
835 || (cond2
== LEU
&& uval2
== uval1
- 1))
836 return avr_normalize_condition (cond2
);
844 /* If-else decision trees generated for switch / case may produce sequences
847 SREG = compare (reg, val);
848 if (SREG == 0) goto label1;
849 SREG = compare (reg, 1 + val);
850 if (SREG >= 0) goto label2;
852 which can be optimized to
854 SREG = compare (reg, val);
855 if (SREG == 0) goto label1;
856 if (SREG >= 0) goto label2;
858 The optimal place for such a pass would be directly after expand, but
859 it's not possible for a jump insn to target more than one code label.
860 Hence, run a mini pass right before split2 which introduces REG_CC. */
863 avr_pass_ifelse::avr_rest_of_handle_ifelse (function
*)
867 for (rtx_insn
*insn
= get_insns(); insn
; insn
= next_insn
)
869 next_insn
= next_nonnote_nondebug_insn (insn
);
874 // Search for two cbranch insns. The first one is a cbranch.
875 // Filter for "cbranch<mode>4_insn" with mode in QI, HI, PSI, SI.
880 int icode1
= recog_memoized (insn
);
882 if (icode1
!= CODE_FOR_cbranchqi4_insn
883 && icode1
!= CODE_FOR_cbranchhi4_insn
884 && icode1
!= CODE_FOR_cbranchpsi4_insn
885 && icode1
!= CODE_FOR_cbranchsi4_insn
)
888 rtx_jump_insn
*insn1
= as_a
<rtx_jump_insn
*> (insn
);
889 rtx_jump_insn
*insn2
= nullptr;
890 bool follow_label1
= false;
892 // Extract the operands of the first insn:
893 // $0 = comparison operator ($1, $2)
895 // $2 = reg or const_int
897 // $4 = optional SCRATCH for HI, PSI, SI cases.
899 const auto &op
= recog_data
.operand
;
901 extract_insn (insn1
);
902 rtx xop1
[5] = { op
[0], op
[1], op
[2], op
[3], op
[4] };
903 int n_operands
= recog_data
.n_operands
;
905 // For now, we can optimize cbranches that follow an EQ cbranch,
906 // and cbranches that follow the label of a NE cbranch.
908 if (GET_CODE (xop1
[0]) == EQ
909 && JUMP_P (next_insn
)
910 && recog_memoized (next_insn
) == icode1
)
912 // The 2nd cbranch insn follows insn1, i.e. is located in the
913 // fallthrough path of insn1.
915 insn2
= as_a
<rtx_jump_insn
*> (next_insn
);
917 else if (GET_CODE (xop1
[0]) == NE
)
919 // insn1 might branch to a label followed by a cbranch.
921 rtx target1
= JUMP_LABEL (insn1
);
922 rtx_insn
*code_label1
= JUMP_LABEL_AS_INSN (insn1
);
923 rtx_insn
*next
= next_nonnote_nondebug_insn (code_label1
);
924 rtx_insn
*barrier
= prev_nonnote_nondebug_insn (code_label1
);
926 if (// Target label of insn1 is used exactly once and
927 // is not a fallthru, i.e. is preceded by a barrier.
928 LABEL_NUSES (target1
) == 1
930 && BARRIER_P (barrier
)
931 // Following the target label is a cbranch of the same kind.
934 && recog_memoized (next
) == icode1
)
936 follow_label1
= true;
937 insn2
= as_a
<rtx_jump_insn
*> (next
);
944 // Also extract operands of insn2, and filter for REG + CONST_INT
945 // comparsons against the same register.
947 extract_insn (insn2
);
948 rtx xop2
[5] = { op
[0], op
[1], op
[2], op
[3], op
[4] };
950 if (! rtx_equal_p (xop1
[1], xop2
[1])
951 || ! CONST_INT_P (xop1
[2])
952 || ! CONST_INT_P (xop2
[2]))
955 machine_mode mode
= GET_MODE (xop1
[1]);
956 enum rtx_code code1
= GET_CODE (xop1
[0]);
957 enum rtx_code code2
= GET_CODE (xop2
[0]);
959 code2
= avr_redundant_compare (code1
, xop1
[2], code2
, xop2
[2],
960 mode
, follow_label1
);
961 if (code2
== UNKNOWN
)
964 //////////////////////////////////////////////////////
965 // Found a replacement.
969 fprintf (dump_file
, "\n;; Found chain of jump_insn %d and"
970 " jump_insn %d, follow_label1=%d:\n",
971 INSN_UID (insn1
), INSN_UID (insn2
), follow_label1
);
972 print_rtl_single (dump_file
, PATTERN (insn1
));
973 print_rtl_single (dump_file
, PATTERN (insn2
));
977 next_insn
= next_nonnote_nondebug_insn (insn2
);
979 // Pop the new branch conditions and the new comparison.
980 // Prematurely split into compare + branch so that we can drop
981 // the 2nd comparison. The following pass, split2, splits all
982 // insns for REG_CC, and it should still work as usual even when
983 // there are already some REG_CC insns around.
985 rtx xcond1
= gen_rtx_fmt_ee (code1
, VOIDmode
, cc_reg_rtx
, const0_rtx
);
986 rtx xcond2
= gen_rtx_fmt_ee (code2
, VOIDmode
, cc_reg_rtx
, const0_rtx
);
987 rtx xpat1
= gen_branch (xop1
[3], xcond1
);
988 rtx xpat2
= gen_branch (xop2
[3], xcond2
);
989 rtx xcompare
= NULL_RTX
;
993 gcc_assert (n_operands
== 4);
994 xcompare
= gen_cmpqi3 (xop1
[1], xop1
[2]);
998 gcc_assert (n_operands
== 5);
999 rtx (*gen_cmp
)(rtx
,rtx
,rtx
)
1000 = mode
== HImode
? gen_gen_comparehi
1001 : mode
== PSImode
? gen_gen_comparepsi
1002 : gen_gen_comparesi
; // SImode
1003 xcompare
= gen_cmp (xop1
[1], xop1
[2], xop1
[4]);
1008 rtx_insn
*cmp
= emit_insn_before (xcompare
, insn1
);
1009 rtx_jump_insn
*branch1
= emit_jump_insn_before (xpat1
, insn1
);
1010 rtx_jump_insn
*branch2
= emit_jump_insn_before (xpat2
, insn2
);
1012 JUMP_LABEL (branch1
) = xop1
[3];
1013 JUMP_LABEL (branch2
) = xop2
[3];
1014 // delete_insn() decrements LABEL_NUSES when deleting a JUMP_INSN, but
1015 // when we pop a new JUMP_INSN, do it by hand.
1016 ++LABEL_NUSES (xop1
[3]);
1017 ++LABEL_NUSES (xop2
[3]);
1019 delete_insn (insn1
);
1020 delete_insn (insn2
);
1022 // As a side effect, also recog the new insns.
1023 gcc_assert (valid_insn_p (cmp
));
1024 gcc_assert (valid_insn_p (branch1
));
1025 gcc_assert (valid_insn_p (branch2
));
1030 /* Set `avr_arch' as specified by `-mmcu='.
1031 Return true on success. */
1034 avr_set_core_architecture (void)
1036 /* Search for mcu core architecture. */
1039 avr_mmcu
= AVR_MMCU_DEFAULT
;
1041 avr_arch
= &avr_arch_types
[0];
1043 for (const avr_mcu_t
*mcu
= avr_mcu_types
; ; mcu
++)
1045 if (mcu
->name
== NULL
)
1047 /* Reached the end of `avr_mcu_types'. This should actually never
1048 happen as options are provided by device-specs. It could be a
1049 typo in a device-specs or calling the compiler proper directly
1050 with -mmcu=<device>. */
1052 error ("unknown core architecture %qs specified with %qs",
1053 avr_mmcu
, "-mmcu=");
1054 avr_inform_core_architectures ();
1057 else if (strcmp (mcu
->name
, avr_mmcu
) == 0
1058 // Is this a proper architecture ?
1059 && mcu
->macro
== NULL
)
1061 avr_arch
= &avr_arch_types
[mcu
->arch_id
];
1062 if (avr_n_flash
< 0)
1063 avr_n_flash
= 1 + (mcu
->flash_size
- 1) / 0x10000;
1073 /* Implement `TARGET_OPTION_OVERRIDE'. */
1076 avr_option_override (void)
1078 /* caller-save.cc looks for call-clobbered hard registers that are assigned
1079 to pseudos that cross calls and tries so save-restore them around calls
1080 in order to reduce the number of stack slots needed.
1082 This might lead to situations where reload is no more able to cope
1083 with the challenge of AVR's very few address registers and fails to
1084 perform the requested spills. */
1087 flag_caller_saves
= 0;
1089 /* Unwind tables currently require a frame pointer for correctness,
1090 see toplev.cc:process_options(). */
1092 if ((flag_unwind_tables
1093 || flag_non_call_exceptions
1094 || flag_asynchronous_unwind_tables
)
1095 && !ACCUMULATE_OUTGOING_ARGS
)
1097 flag_omit_frame_pointer
= 0;
1100 /* Disable flag_delete_null_pointer_checks if zero is a valid address. */
1101 if (targetm
.addr_space
.zero_address_valid (ADDR_SPACE_GENERIC
))
1102 flag_delete_null_pointer_checks
= 0;
1105 warning (OPT_fpic
, "%<-fpic%> is not supported");
1107 warning (OPT_fPIC
, "%<-fPIC%> is not supported");
1109 warning (OPT_fpie
, "%<-fpie%> is not supported");
1111 warning (OPT_fPIE
, "%<-fPIE%> is not supported");
1113 #if !defined (HAVE_AS_AVR_MGCCISR_OPTION)
1114 avr_gasisr_prologues
= 0;
1117 if (!avr_set_core_architecture())
1120 /* Sould be set by avr-common.cc */
1121 gcc_assert (avr_long_double
>= avr_double
&& avr_double
>= 32);
1123 /* RAM addresses of some SFRs common to all devices in respective arch. */
1125 /* SREG: Status Register containing flags like I (global IRQ) */
1126 avr_addr
.sreg
= 0x3F + avr_arch
->sfr_offset
;
1128 /* RAMPZ: Address' high part when loading via ELPM */
1129 avr_addr
.rampz
= 0x3B + avr_arch
->sfr_offset
;
1131 avr_addr
.rampy
= 0x3A + avr_arch
->sfr_offset
;
1132 avr_addr
.rampx
= 0x39 + avr_arch
->sfr_offset
;
1133 avr_addr
.rampd
= 0x38 + avr_arch
->sfr_offset
;
1134 avr_addr
.ccp
= (AVR_TINY
? 0x3C : 0x34) + avr_arch
->sfr_offset
;
1136 /* SP: Stack Pointer (SP_H:SP_L) */
1137 avr_addr
.sp_l
= 0x3D + avr_arch
->sfr_offset
;
1138 avr_addr
.sp_h
= avr_addr
.sp_l
+ 1;
1140 init_machine_status
= avr_init_machine_status
;
1142 avr_log_set_avr_log();
1145 /* Function to set up the backend function structure. */
1147 static struct machine_function
*
1148 avr_init_machine_status (void)
1150 return ggc_cleared_alloc
<machine_function
> ();
1154 /* Implement `INIT_EXPANDERS'. */
1155 /* The function works like a singleton. */
1158 avr_init_expanders (void)
1160 for (int regno
= 0; regno
< 32; regno
++)
1161 all_regs_rtx
[regno
] = gen_rtx_REG (QImode
, regno
);
1163 lpm_reg_rtx
= all_regs_rtx
[LPM_REGNO
];
1164 tmp_reg_rtx
= all_regs_rtx
[AVR_TMP_REGNO
];
1165 zero_reg_rtx
= all_regs_rtx
[AVR_ZERO_REGNO
];
1167 cc_reg_rtx
= gen_rtx_REG (CCmode
, REG_CC
);
1169 lpm_addr_reg_rtx
= gen_rtx_REG (HImode
, REG_Z
);
1171 sreg_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.sreg
));
1172 rampd_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampd
));
1173 rampx_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampx
));
1174 rampy_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampy
));
1175 rampz_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampz
));
1177 xstring_empty
= gen_rtx_CONST_STRING (VOIDmode
, "");
1178 xstring_e
= gen_rtx_CONST_STRING (VOIDmode
, "e");
1180 /* TINY core does not have regs r10-r16, but avr-dimode.md expects them
1183 avr_have_dimode
= false;
1187 /* Implement `REGNO_REG_CLASS'. */
1188 /* Return register class for register R. */
1191 avr_regno_reg_class (int r
)
1193 static const enum reg_class reg_class_tab
[] =
1197 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
1198 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
1199 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
1200 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
1202 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
1203 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
1205 ADDW_REGS
, ADDW_REGS
,
1207 POINTER_X_REGS
, POINTER_X_REGS
,
1209 POINTER_Y_REGS
, POINTER_Y_REGS
,
1211 POINTER_Z_REGS
, POINTER_Z_REGS
,
1213 STACK_REG
, STACK_REG
1217 return reg_class_tab
[r
];
1226 /* Implement `TARGET_SCALAR_MODE_SUPPORTED_P'. */
1229 avr_scalar_mode_supported_p (scalar_mode mode
)
1231 if (ALL_FIXED_POINT_MODE_P (mode
))
1234 if (PSImode
== mode
)
1237 return default_scalar_mode_supported_p (mode
);
1241 /* Return TRUE if DECL is a VAR_DECL located in flash and FALSE, otherwise. */
1244 avr_decl_flash_p (tree decl
)
1246 if (TREE_CODE (decl
) != VAR_DECL
1247 || TREE_TYPE (decl
) == error_mark_node
)
1252 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
1256 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit flash
1257 address space and FALSE, otherwise. */
1260 avr_decl_memx_p (tree decl
)
1262 if (TREE_CODE (decl
) != VAR_DECL
1263 || TREE_TYPE (decl
) == error_mark_node
)
1268 return (ADDR_SPACE_MEMX
== TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
1272 /* Return TRUE if X is a MEM rtx located in flash and FALSE, otherwise. */
1275 avr_mem_flash_p (rtx x
)
1278 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
)));
1282 /* Return TRUE if X is a MEM rtx located in the 24-bit flash
1283 address space and FALSE, otherwise. */
1286 avr_mem_memx_p (rtx x
)
1289 && ADDR_SPACE_MEMX
== MEM_ADDR_SPACE (x
));
1293 /* A helper for the subsequent function attribute used to dig for
1294 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
1297 avr_lookup_function_attribute1 (const_tree func
, const char *name
)
1299 if (FUNCTION_DECL
== TREE_CODE (func
))
1301 if (NULL_TREE
!= lookup_attribute (name
, DECL_ATTRIBUTES (func
)))
1306 func
= TREE_TYPE (func
);
1309 gcc_assert (FUNC_OR_METHOD_TYPE_P (func
));
1311 return NULL_TREE
!= lookup_attribute (name
, TYPE_ATTRIBUTES (func
));
1314 /* Return nonzero if FUNC is a naked function. */
1317 avr_naked_function_p (tree func
)
1319 return avr_lookup_function_attribute1 (func
, "naked");
1322 /* Return nonzero if FUNC is an interrupt function as specified
1323 by the "interrupt" attribute. */
1326 avr_interrupt_function_p (tree func
)
1328 return avr_lookup_function_attribute1 (func
, "interrupt");
1331 /* Return nonzero if FUNC is a signal function as specified
1332 by the "signal" attribute. */
1335 avr_signal_function_p (tree func
)
1337 return avr_lookup_function_attribute1 (func
, "signal");
1340 /* Return nonzero if FUNC is an OS_task function. */
1343 avr_OS_task_function_p (tree func
)
1345 return avr_lookup_function_attribute1 (func
, "OS_task");
1348 /* Return nonzero if FUNC is an OS_main function. */
1351 avr_OS_main_function_p (tree func
)
1353 return avr_lookup_function_attribute1 (func
, "OS_main");
1357 /* Return nonzero if FUNC is a no_gccisr function as specified
1358 by the "no_gccisr" attribute. */
1361 avr_no_gccisr_function_p (tree func
)
1363 return avr_lookup_function_attribute1 (func
, "no_gccisr");
1367 /* Implement `TARGET_CAN_INLINE_P'. */
1368 /* Some options like -mgas_isr_prologues depend on optimization level,
1369 and the inliner might think that due to different options, inlining
1370 is not permitted; see PR104327. */
1373 avr_can_inline_p (tree
/* caller */, tree
/* callee */)
1375 // No restrictions whatsoever.
1379 /* Implement `TARGET_SET_CURRENT_FUNCTION'. */
1380 /* Sanity cheching for above function attributes. */
1383 avr_set_current_function (tree decl
)
1385 if (decl
== NULL_TREE
1386 || current_function_decl
== NULL_TREE
1387 || current_function_decl
== error_mark_node
1389 || cfun
->machine
->attributes_checked_p
)
1392 location_t loc
= DECL_SOURCE_LOCATION (decl
);
1394 cfun
->machine
->is_naked
= avr_naked_function_p (decl
);
1395 cfun
->machine
->is_signal
= avr_signal_function_p (decl
);
1396 cfun
->machine
->is_interrupt
= avr_interrupt_function_p (decl
);
1397 cfun
->machine
->is_OS_task
= avr_OS_task_function_p (decl
);
1398 cfun
->machine
->is_OS_main
= avr_OS_main_function_p (decl
);
1399 cfun
->machine
->is_no_gccisr
= avr_no_gccisr_function_p (decl
);
1401 const char *isr
= cfun
->machine
->is_interrupt
? "interrupt" : "signal";
1403 /* Too much attributes make no sense as they request conflicting features. */
1405 if (cfun
->machine
->is_OS_task
1406 && (cfun
->machine
->is_signal
|| cfun
->machine
->is_interrupt
))
1407 error_at (loc
, "function attributes %qs and %qs are mutually exclusive",
1410 if (cfun
->machine
->is_OS_main
1411 && (cfun
->machine
->is_signal
|| cfun
->machine
->is_interrupt
))
1412 error_at (loc
, "function attributes %qs and %qs are mutually exclusive",
1415 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
1417 tree args
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
1418 tree ret
= TREE_TYPE (TREE_TYPE (decl
));
1421 name
= DECL_ASSEMBLER_NAME_SET_P (decl
)
1422 ? IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
))
1423 : IDENTIFIER_POINTER (DECL_NAME (decl
));
1425 /* Skip a leading '*' that might still prefix the assembler name,
1426 e.g. in non-LTO runs. */
1428 name
= default_strip_name_encoding (name
);
1430 /* Interrupt handlers must be void __vector (void) functions. */
1432 if (args
&& TREE_CODE (TREE_VALUE (args
)) != VOID_TYPE
)
1433 error_at (loc
, "%qs function cannot have arguments", isr
);
1435 if (TREE_CODE (ret
) != VOID_TYPE
)
1436 error_at (loc
, "%qs function cannot return a value", isr
);
1438 #if defined WITH_AVRLIBC
1439 /* Silently ignore 'signal' if 'interrupt' is present. AVR-LibC startet
1440 using this when it switched from SIGNAL and INTERRUPT to ISR. */
1442 if (cfun
->machine
->is_interrupt
)
1443 cfun
->machine
->is_signal
= 0;
1445 /* If the function has the 'signal' or 'interrupt' attribute, ensure
1446 that the name of the function is "__vector_NN" so as to catch
1447 when the user misspells the vector name. */
1449 if (!startswith (name
, "__vector"))
1450 warning_at (loc
, OPT_Wmisspelled_isr
, "%qs appears to be a misspelled "
1451 "%qs handler, missing %<__vector%> prefix", name
, isr
);
1452 #endif // AVR-LibC naming conventions
1455 #if defined WITH_AVRLIBC
1456 // Common problem is using "ISR" without first including avr/interrupt.h.
1457 const char *name
= IDENTIFIER_POINTER (DECL_NAME (decl
));
1458 name
= default_strip_name_encoding (name
);
1459 if (strcmp ("ISR", name
) == 0
1460 || strcmp ("INTERRUPT", name
) == 0
1461 || strcmp ("SIGNAL", name
) == 0)
1463 warning_at (loc
, OPT_Wmisspelled_isr
, "%qs is a reserved identifier"
1464 " in AVR-LibC. Consider %<#include <avr/interrupt.h>%>"
1465 " before using the %qs macro", name
, name
);
1467 #endif // AVR-LibC naming conventions
1469 /* Don't print the above diagnostics more than once. */
1471 cfun
->machine
->attributes_checked_p
= 1;
1475 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
1478 avr_accumulate_outgoing_args (void)
1481 return TARGET_ACCUMULATE_OUTGOING_ARGS
;
1483 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
1484 what offset is correct. In some cases it is relative to
1485 virtual_outgoing_args_rtx and in others it is relative to
1486 virtual_stack_vars_rtx. For example code see
1487 gcc.c-torture/execute/built-in-setjmp.c
1488 gcc.c-torture/execute/builtins/sprintf-chk.c */
1490 return (TARGET_ACCUMULATE_OUTGOING_ARGS
1491 && !(cfun
->calls_setjmp
1492 || cfun
->has_nonlocal_label
));
1496 /* Report contribution of accumulated outgoing arguments to stack size. */
1499 avr_outgoing_args_size (void)
1501 return (ACCUMULATE_OUTGOING_ARGS
1502 ? (HOST_WIDE_INT
) crtl
->outgoing_args_size
: 0);
1506 /* Implement TARGET_STARTING_FRAME_OFFSET. */
1507 /* This is the offset from the frame pointer register to the first stack slot
1508 that contains a variable living in the frame. */
1510 static HOST_WIDE_INT
1511 avr_starting_frame_offset (void)
1513 return 1 + avr_outgoing_args_size ();
1517 /* Return the number of hard registers to push/pop in the prologue/epilogue
1518 of the current function, and optionally store these registers in SET. */
1521 avr_regs_to_save (HARD_REG_SET
*set
)
1524 int int_or_sig_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
1527 CLEAR_HARD_REG_SET (*set
);
1530 /* No need to save any registers if the function never returns or
1531 has the "OS_task" or "OS_main" attribute. */
1533 if (TREE_THIS_VOLATILE (current_function_decl
)
1534 || cfun
->machine
->is_OS_task
1535 || cfun
->machine
->is_OS_main
)
1538 for (int reg
= 0; reg
< 32; reg
++)
1540 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
1541 any global register variables. */
1543 if (fixed_regs
[reg
])
1546 if ((int_or_sig_p
&& !crtl
->is_leaf
&& call_used_or_fixed_reg_p (reg
))
1547 || (df_regs_ever_live_p (reg
)
1548 && (int_or_sig_p
|| !call_used_or_fixed_reg_p (reg
))
1549 /* Don't record frame pointer registers here. They are treated
1550 indivitually in prologue. */
1551 && !(frame_pointer_needed
1552 && (reg
== REG_Y
|| reg
== REG_Y
+ 1))))
1555 SET_HARD_REG_BIT (*set
, reg
);
1563 /* Implement `TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS' */
1566 avr_allocate_stack_slots_for_args (void)
1568 return !cfun
->machine
->is_naked
;
1572 /* Return true if register FROM can be eliminated via register TO. */
1575 avr_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
1577 return ((frame_pointer_needed
&& to
== FRAME_POINTER_REGNUM
)
1578 || !frame_pointer_needed
);
1582 /* Implement `TARGET_WARN_FUNC_RETURN'. */
1585 avr_warn_func_return (tree decl
)
1587 /* Naked functions are implemented entirely in assembly, including the
1588 return sequence, so suppress warnings about this. */
1590 return !avr_naked_function_p (decl
);
1593 /* Compute offset between arg_pointer and frame_pointer. */
1596 avr_initial_elimination_offset (int from
, int to
)
1598 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
1602 int offset
= frame_pointer_needed
? 2 : 0;
1603 int avr_pc_size
= AVR_HAVE_EIJMP_EICALL
? 3 : 2;
1605 // If FROM is ARG_POINTER_REGNUM, we are not in an ISR as ISRs
1606 // might not have arguments. Hence the following is not affected
1607 // by gasisr prologues.
1608 offset
+= avr_regs_to_save (NULL
);
1609 return (get_frame_size () + avr_outgoing_args_size()
1610 + avr_pc_size
+ 1 + offset
);
1615 /* Helper for the function below. */
1618 avr_adjust_type_node (tree
*node
, machine_mode mode
, int sat_p
)
1620 *node
= make_node (FIXED_POINT_TYPE
);
1621 TYPE_SATURATING (*node
) = sat_p
;
1622 TYPE_UNSIGNED (*node
) = UNSIGNED_FIXED_POINT_MODE_P (mode
);
1623 TYPE_IBIT (*node
) = GET_MODE_IBIT (mode
);
1624 TYPE_FBIT (*node
) = GET_MODE_FBIT (mode
);
1625 TYPE_PRECISION (*node
) = GET_MODE_BITSIZE (mode
);
1626 SET_TYPE_ALIGN (*node
, 8);
1627 SET_TYPE_MODE (*node
, mode
);
1629 layout_type (*node
);
1633 /* Implement `TARGET_BUILD_BUILTIN_VA_LIST'. */
1636 avr_build_builtin_va_list (void)
1638 /* avr-modes.def adjusts [U]TA to be 64-bit modes with 48 fractional bits.
1639 This is more appropriate for the 8-bit machine AVR than 128-bit modes.
1640 The ADJUST_IBIT/FBIT are handled in toplev:init_adjust_machine_modes()
1641 which is auto-generated by genmodes, but the compiler assigns [U]DAmode
1642 to the long long accum modes instead of the desired [U]TAmode.
1644 Fix this now, right after node setup in tree.cc:build_common_tree_nodes().
1645 This must run before c-cppbuiltin.cc:builtin_define_fixed_point_constants()
1646 which built-in defines macros like __ULLACCUM_FBIT__ that are used by
1647 libgcc to detect IBIT and FBIT. */
1649 avr_adjust_type_node (&ta_type_node
, TAmode
, 0);
1650 avr_adjust_type_node (&uta_type_node
, UTAmode
, 0);
1651 avr_adjust_type_node (&sat_ta_type_node
, TAmode
, 1);
1652 avr_adjust_type_node (&sat_uta_type_node
, UTAmode
, 1);
1654 unsigned_long_long_accum_type_node
= uta_type_node
;
1655 long_long_accum_type_node
= ta_type_node
;
1656 sat_unsigned_long_long_accum_type_node
= sat_uta_type_node
;
1657 sat_long_long_accum_type_node
= sat_ta_type_node
;
1659 /* Dispatch to the default handler. */
1661 return std_build_builtin_va_list ();
1665 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3-byte PC).
1666 This is return address of function. */
1669 avr_return_addr_rtx (int count
, rtx tem
)
1673 /* Can only return this function's return address. Others not supported. */
1679 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+2");
1680 warning (0, "%<builtin_return_address%> contains only 2 bytes"
1684 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+1");
1686 cfun
->machine
->use_L__stack_usage
= 1;
1688 r
= gen_rtx_PLUS (Pmode
, tem
, r
);
1689 r
= gen_frame_mem (Pmode
, memory_address (Pmode
, r
));
1690 r
= gen_rtx_ROTATE (HImode
, r
, GEN_INT (8));
1694 /* Return 1 if the function epilogue is just a single "ret". */
1697 avr_simple_epilogue (void)
1699 return (! frame_pointer_needed
1700 && get_frame_size () == 0
1701 && avr_outgoing_args_size() == 0
1702 && avr_regs_to_save (NULL
) == 0
1703 && ! cfun
->machine
->is_interrupt
1704 && ! cfun
->machine
->is_signal
1705 && ! cfun
->machine
->is_naked
1706 && ! TREE_THIS_VOLATILE (current_function_decl
));
1709 /* This function checks sequence of live registers. */
1712 sequent_regs_live (void)
1717 for (int reg
= 0; reg
<= LAST_CALLEE_SAVED_REG
; ++reg
)
1719 if (fixed_regs
[reg
])
1721 /* Don't recognize sequences that contain global register
1730 if (!call_used_or_fixed_reg_p (reg
))
1732 if (df_regs_ever_live_p (reg
))
1742 if (!frame_pointer_needed
)
1744 if (df_regs_ever_live_p (REG_Y
))
1752 if (df_regs_ever_live_p (REG_Y
+ 1))
1765 return (cur_seq
== live_seq
) ? live_seq
: 0;
1769 static const pass_data avr_pass_data_pre_proep
=
1772 "", // name (will be patched)
1773 OPTGROUP_NONE
, // optinfo_flags
1774 TV_DF_SCAN
, // tv_id
1775 0, // properties_required
1776 0, // properties_provided
1777 0, // properties_destroyed
1778 0, // todo_flags_start
1779 0 // todo_flags_finish
1783 class avr_pass_pre_proep
: public rtl_opt_pass
1786 avr_pass_pre_proep (gcc::context
*ctxt
, const char *name
)
1787 : rtl_opt_pass (avr_pass_data_pre_proep
, ctxt
)
1792 void compute_maybe_gasisr (function
*);
1794 virtual unsigned int execute (function
*fun
)
1796 if (avr_gasisr_prologues
1797 // Whether this function is an ISR worth scanning at all.
1798 && !fun
->machine
->is_no_gccisr
1799 && (fun
->machine
->is_interrupt
1800 || fun
->machine
->is_signal
)
1801 && !cfun
->machine
->is_naked
1802 // Paranoia: Non-local gotos and labels that might escape.
1803 && !cfun
->calls_setjmp
1804 && !cfun
->has_nonlocal_label
1805 && !cfun
->has_forced_label_in_static
)
1807 compute_maybe_gasisr (fun
);
1813 }; // avr_pass_pre_proep
1818 make_avr_pass_pre_proep (gcc::context
*ctxt
)
1820 return new avr_pass_pre_proep (ctxt
, "avr-pre-proep");
1824 /* Set fun->machine->gasisr.maybe provided we don't find anything that
1825 prohibits GAS generating parts of ISR prologues / epilogues for us. */
1828 avr_pass_pre_proep::compute_maybe_gasisr (function
*fun
)
1830 // Don't use BB iterators so that we see JUMP_TABLE_DATA.
1832 for (rtx_insn
*insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1834 // Transparent calls always use [R]CALL and are filtered out by GAS.
1835 // ISRs don't use -mcall-prologues, hence what remains to be filtered
1836 // out are open coded (tail) calls.
1841 // __tablejump2__ clobbers something and is targeted by JMP so
1842 // that GAS won't see its usage.
1844 if (AVR_HAVE_JMP_CALL
1845 && JUMP_TABLE_DATA_P (insn
))
1848 // Non-local gotos not seen in *FUN.
1851 && find_reg_note (insn
, REG_NON_LOCAL_GOTO
, NULL_RTX
))
1855 fun
->machine
->gasisr
.maybe
= 1;
1859 /* Obtain the length sequence of insns. */
1862 get_sequence_length (rtx_insn
*insns
)
1866 for (rtx_insn
*insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
1867 length
+= get_attr_length (insn
);
1873 /* Implement `INCOMING_RETURN_ADDR_RTX'. */
1876 avr_incoming_return_addr_rtx (void)
1878 /* The return address is at the top of the stack. Note that the push
1879 was via post-decrement, which means the actual address is off by one. */
1880 return gen_frame_mem (HImode
, plus_constant (Pmode
, stack_pointer_rtx
, 1));
1884 /* Unset a bit in *SET. If successful, return the respective bit number.
1885 Otherwise, return -1 and *SET is unaltered. */
1888 avr_hregs_split_reg (HARD_REG_SET
*set
)
1890 for (int regno
= 0; regno
< 32; regno
++)
1891 if (TEST_HARD_REG_BIT (*set
, regno
))
1893 // Don't remove a register from *SET which might indicate that
1894 // some RAMP* register might need ISR prologue / epilogue treatment.
1897 && (REG_X
== regno
|| REG_X
+ 1 == regno
)
1898 && TEST_HARD_REG_BIT (*set
, REG_X
)
1899 && TEST_HARD_REG_BIT (*set
, REG_X
+ 1))
1903 && !frame_pointer_needed
1904 && (REG_Y
== regno
|| REG_Y
+ 1 == regno
)
1905 && TEST_HARD_REG_BIT (*set
, REG_Y
)
1906 && TEST_HARD_REG_BIT (*set
, REG_Y
+ 1))
1910 && (REG_Z
== regno
|| REG_Z
+ 1 == regno
)
1911 && TEST_HARD_REG_BIT (*set
, REG_Z
)
1912 && TEST_HARD_REG_BIT (*set
, REG_Z
+ 1))
1915 CLEAR_HARD_REG_BIT (*set
, regno
);
1923 /* Helper for expand_prologue. Emit a push of a byte register. */
1926 emit_push_byte (unsigned regno
, bool frame_related_p
)
1931 mem
= gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
);
1932 mem
= gen_frame_mem (QImode
, mem
);
1933 reg
= gen_rtx_REG (QImode
, regno
);
1935 insn
= emit_insn (gen_rtx_SET (mem
, reg
));
1936 if (frame_related_p
)
1937 RTX_FRAME_RELATED_P (insn
) = 1;
1939 cfun
->machine
->stack_usage
++;
1943 /* Helper for expand_prologue. Emit a push of a SFR via register TREG.
1944 SFR is a MEM representing the memory location of the SFR.
1945 If CLR_P then clear the SFR after the push using zero_reg. */
1948 emit_push_sfr (rtx sfr
, bool frame_related_p
, bool clr_p
, int treg
)
1952 gcc_assert (MEM_P (sfr
));
1954 /* IN treg, IO(SFR) */
1955 insn
= emit_move_insn (all_regs_rtx
[treg
], sfr
);
1956 if (frame_related_p
)
1957 RTX_FRAME_RELATED_P (insn
) = 1;
1960 emit_push_byte (treg
, frame_related_p
);
1964 /* OUT IO(SFR), __zero_reg__ */
1965 insn
= emit_move_insn (sfr
, const0_rtx
);
1966 if (frame_related_p
)
1967 RTX_FRAME_RELATED_P (insn
) = 1;
1972 avr_prologue_setup_frame (HOST_WIDE_INT size
, HARD_REG_SET set
)
1975 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
1976 int live_seq
= sequent_regs_live ();
1978 HOST_WIDE_INT size_max
1979 = (HOST_WIDE_INT
) GET_MODE_MASK (AVR_HAVE_8BIT_SP
? QImode
: Pmode
);
1981 bool minimize
= (TARGET_CALL_PROLOGUES
1985 && !cfun
->machine
->is_OS_task
1986 && !cfun
->machine
->is_OS_main
1990 && (frame_pointer_needed
1991 || avr_outgoing_args_size() > 8
1992 || (AVR_2_BYTE_PC
&& live_seq
> 6)
1996 int first_reg
, reg
, offset
;
1998 emit_move_insn (gen_rtx_REG (HImode
, REG_X
),
1999 gen_int_mode (size
, HImode
));
2001 pattern
= gen_call_prologue_saves (gen_int_mode (live_seq
, HImode
),
2002 gen_int_mode (live_seq
+size
, HImode
));
2003 insn
= emit_insn (pattern
);
2004 RTX_FRAME_RELATED_P (insn
) = 1;
2006 /* Describe the effect of the unspec_volatile call to prologue_saves.
2007 Note that this formulation assumes that add_reg_note pushes the
2008 notes to the front. Thus we build them in the reverse order of
2009 how we want dwarf2out to process them. */
2011 /* The function does always set frame_pointer_rtx, but whether that
2012 is going to be permanent in the function is frame_pointer_needed. */
2014 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
2015 gen_rtx_SET ((frame_pointer_needed
2017 : stack_pointer_rtx
),
2018 plus_constant (Pmode
, stack_pointer_rtx
,
2019 -(size
+ live_seq
))));
2021 /* Note that live_seq always contains r28+r29, but the other
2022 registers to be saved are all below 18. */
2024 first_reg
= (LAST_CALLEE_SAVED_REG
+ 1) - (live_seq
- 2);
2026 for (reg
= 29, offset
= -live_seq
+ 1;
2028 reg
= (reg
== 28 ? LAST_CALLEE_SAVED_REG
: reg
- 1), ++offset
)
2032 m
= gen_rtx_MEM (QImode
, plus_constant (Pmode
, stack_pointer_rtx
,
2034 r
= gen_rtx_REG (QImode
, reg
);
2035 add_reg_note (insn
, REG_CFA_OFFSET
, gen_rtx_SET (m
, r
));
2038 cfun
->machine
->stack_usage
+= size
+ live_seq
;
2040 else /* !minimize */
2042 for (int reg
= 0; reg
< 32; ++reg
)
2043 if (TEST_HARD_REG_BIT (set
, reg
))
2044 emit_push_byte (reg
, true);
2046 if (frame_pointer_needed
2047 && (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
)))
2049 /* Push frame pointer. Always be consistent about the
2050 ordering of pushes -- epilogue_restores expects the
2051 register pair to be pushed low byte first. */
2053 emit_push_byte (REG_Y
, true);
2054 emit_push_byte (REG_Y
+ 1, true);
2057 if (frame_pointer_needed
2060 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
2061 RTX_FRAME_RELATED_P (insn
) = 1;
2066 /* Creating a frame can be done by direct manipulation of the
2067 stack or via the frame pointer. These two methods are:
2074 the optimum method depends on function type, stack and
2075 frame size. To avoid a complex logic, both methods are
2076 tested and shortest is selected.
2078 There is also the case where SIZE != 0 and no frame pointer is
2079 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
2080 In that case, insn (*) is not needed in that case.
2081 We use the X register as scratch. This is save because in X
2083 In an interrupt routine, the case of SIZE != 0 together with
2084 !frame_pointer_needed can only occur if the function is not a
2085 leaf function and thus X has already been saved. */
2088 HOST_WIDE_INT size_cfa
= size
, neg_size
;
2089 rtx_insn
*fp_plus_insns
;
2092 gcc_assert (frame_pointer_needed
2096 fp
= my_fp
= (frame_pointer_needed
2098 : gen_rtx_REG (Pmode
, REG_X
));
2100 if (AVR_HAVE_8BIT_SP
)
2102 /* The high byte (r29) does not change:
2103 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
2105 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
2108 /* Cut down size and avoid size = 0 so that we don't run
2109 into ICE like PR52488 in the remainder. */
2111 if (size
> size_max
)
2113 /* Don't error so that insane code from newlib still compiles
2114 and does not break building newlib. As PR51345 is implemented
2115 now, there are multilib variants with -msp8.
2117 If user wants sanity checks he can use -Wstack-usage=
2120 For CFA we emit the original, non-saturated size so that
2121 the generic machinery is aware of the real stack usage and
2122 will print the above diagnostic as expected. */
2127 size
= trunc_int_for_mode (size
, GET_MODE (my_fp
));
2128 neg_size
= trunc_int_for_mode (-size
, GET_MODE (my_fp
));
2130 /************ Method 1: Adjust frame pointer ************/
2134 /* Normally, the dwarf2out frame-related-expr interpreter does
2135 not expect to have the CFA change once the frame pointer is
2136 set up. Thus, we avoid marking the move insn below and
2137 instead indicate that the entire operation is complete after
2138 the frame pointer subtraction is done. */
2140 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
2141 if (frame_pointer_needed
)
2143 RTX_FRAME_RELATED_P (insn
) = 1;
2144 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
2145 gen_rtx_SET (fp
, stack_pointer_rtx
));
2148 insn
= emit_move_insn (my_fp
, plus_constant (GET_MODE (my_fp
),
2151 if (frame_pointer_needed
)
2153 RTX_FRAME_RELATED_P (insn
) = 1;
2154 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
2155 gen_rtx_SET (fp
, plus_constant (Pmode
, fp
,
2159 /* Copy to stack pointer. Note that since we've already
2160 changed the CFA to the frame pointer this operation
2161 need not be annotated if frame pointer is needed.
2162 Always move through unspec, see PR50063.
2163 For meaning of irq_state see movhi_sp_r insn. */
2165 if (cfun
->machine
->is_interrupt
)
2168 if (TARGET_NO_INTERRUPTS
2169 || cfun
->machine
->is_signal
2170 || cfun
->machine
->is_OS_main
)
2173 if (AVR_HAVE_8BIT_SP
)
2176 insn
= emit_insn (gen_movhi_sp_r (stack_pointer_rtx
,
2177 fp
, GEN_INT (irq_state
)));
2178 if (!frame_pointer_needed
)
2180 RTX_FRAME_RELATED_P (insn
) = 1;
2181 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
2182 gen_rtx_SET (stack_pointer_rtx
,
2183 plus_constant (Pmode
,
2188 fp_plus_insns
= get_insns ();
2191 /************ Method 2: Adjust Stack pointer ************/
2193 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
2194 can only handle specific offsets. */
2196 int n_rcall
= size
/ (AVR_3_BYTE_PC
? 3 : 2);
2198 if (avr_sp_immediate_operand (gen_int_mode (-size
, HImode
), HImode
)
2199 // Don't use more than 3 RCALLs.
2202 rtx_insn
*sp_plus_insns
;
2206 insn
= emit_move_insn (stack_pointer_rtx
,
2207 plus_constant (Pmode
, stack_pointer_rtx
,
2209 RTX_FRAME_RELATED_P (insn
) = 1;
2210 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
2211 gen_rtx_SET (stack_pointer_rtx
,
2212 plus_constant (Pmode
,
2215 if (frame_pointer_needed
)
2217 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
2218 RTX_FRAME_RELATED_P (insn
) = 1;
2221 sp_plus_insns
= get_insns ();
2224 /************ Use shortest method ************/
2226 emit_insn (get_sequence_length (sp_plus_insns
)
2227 < get_sequence_length (fp_plus_insns
)
2233 emit_insn (fp_plus_insns
);
2236 cfun
->machine
->stack_usage
+= size_cfa
;
2237 } /* !minimize && size != 0 */
2242 /* Output function prologue. */
2245 avr_expand_prologue (void)
2250 size
= get_frame_size() + avr_outgoing_args_size();
2252 cfun
->machine
->stack_usage
= 0;
2254 /* Prologue: naked. */
2255 if (cfun
->machine
->is_naked
)
2260 avr_regs_to_save (&set
);
2262 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
2264 int treg
= AVR_TMP_REGNO
;
2265 /* Enable interrupts. */
2266 if (cfun
->machine
->is_interrupt
)
2267 emit_insn (gen_enable_interrupt ());
2269 if (cfun
->machine
->gasisr
.maybe
)
2271 /* Let GAS PR21472 emit prologue preamble for us which handles SREG,
2272 ZERO_REG and TMP_REG and one additional, optional register for
2273 us in an optimal way. This even scans through inline asm. */
2275 cfun
->machine
->gasisr
.yes
= 1;
2277 // The optional reg or TMP_REG if we don't need one. If we need one,
2278 // remove that reg from SET so that it's not puhed / popped twice.
2279 // We also use it below instead of TMP_REG in some places.
2281 treg
= avr_hregs_split_reg (&set
);
2283 treg
= AVR_TMP_REGNO
;
2284 cfun
->machine
->gasisr
.regno
= treg
;
2286 // The worst case of pushes. The exact number can be inferred
2287 // at assembly time by magic expression __gcc_isr.n_pushed.
2288 cfun
->machine
->stack_usage
+= 3 + (treg
!= AVR_TMP_REGNO
);
2290 // Emit a Prologue chunk. Epilogue chunk(s) might follow.
2291 // The final Done chunk is emit by final postscan.
2292 emit_insn (gen_gasisr (GEN_INT (GASISR_Prologue
), GEN_INT (treg
)));
2294 else // !TARGET_GASISR_PROLOGUES: Classic, dumb prologue preamble.
2296 /* Push zero reg. */
2297 emit_push_byte (AVR_ZERO_REGNO
, true);
2300 emit_push_byte (AVR_TMP_REGNO
, true);
2303 /* ??? There's no dwarf2 column reserved for SREG. */
2304 emit_push_sfr (sreg_rtx
, false, false /* clr */, AVR_TMP_REGNO
);
2306 /* Clear zero reg. */
2307 emit_move_insn (zero_reg_rtx
, const0_rtx
);
2309 /* Prevent any attempt to delete the setting of ZERO_REG! */
2310 emit_use (zero_reg_rtx
);
2313 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
2314 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
2317 emit_push_sfr (rampd_rtx
, false /* frame */, true /* clr */, treg
);
2320 && TEST_HARD_REG_BIT (set
, REG_X
)
2321 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
2323 emit_push_sfr (rampx_rtx
, false /* frame */, true /* clr */, treg
);
2327 && (frame_pointer_needed
2328 || (TEST_HARD_REG_BIT (set
, REG_Y
)
2329 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
2331 emit_push_sfr (rampy_rtx
, false /* frame */, true /* clr */, treg
);
2335 && TEST_HARD_REG_BIT (set
, REG_Z
)
2336 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
2338 emit_push_sfr (rampz_rtx
, false /* frame */, AVR_HAVE_RAMPD
, treg
);
2340 } /* is_interrupt is_signal */
2342 avr_prologue_setup_frame (size
, set
);
2344 if (flag_stack_usage_info
)
2345 current_function_static_stack_size
2346 = cfun
->machine
->stack_usage
+ INCOMING_FRAME_SP_OFFSET
;
2350 /* Implement `TARGET_ASM_FUNCTION_END_PROLOGUE'. */
2351 /* Output summary at end of function prologue. */
2354 avr_asm_function_end_prologue (FILE *file
)
2356 if (cfun
->machine
->is_naked
)
2358 fputs ("/* prologue: naked */\n", file
);
2362 if (cfun
->machine
->is_interrupt
)
2364 fputs ("/* prologue: Interrupt */\n", file
);
2366 else if (cfun
->machine
->is_signal
)
2368 fputs ("/* prologue: Signal */\n", file
);
2371 fputs ("/* prologue: function */\n", file
);
2374 if (ACCUMULATE_OUTGOING_ARGS
)
2375 fprintf (file
, "/* outgoing args size = %d */\n",
2376 avr_outgoing_args_size());
2378 fprintf (file
, "/* frame size = " HOST_WIDE_INT_PRINT_DEC
" */\n",
2379 (HOST_WIDE_INT
) get_frame_size());
2381 if (!cfun
->machine
->gasisr
.yes
)
2383 fprintf (file
, "/* stack size = %d */\n", cfun
->machine
->stack_usage
);
2384 // Create symbol stack offset so all functions have it. Add 1 to stack
2385 // usage for offset so that SP + .L__stack_offset = return address.
2386 fprintf (file
, ".L__stack_usage = %d\n", cfun
->machine
->stack_usage
);
2390 int used_by_gasisr
= 3 + (cfun
->machine
->gasisr
.regno
!= AVR_TMP_REGNO
);
2391 int to
= cfun
->machine
->stack_usage
;
2392 int from
= to
- used_by_gasisr
;
2393 // Number of pushed regs is only known at assembly-time.
2394 fprintf (file
, "/* stack size = %d...%d */\n", from
, to
);
2395 fprintf (file
, ".L__stack_usage = %d + __gcc_isr.n_pushed\n", from
);
2400 /* Implement `EPILOGUE_USES'. */
2403 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED
)
2405 if (reload_completed
2407 && (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
))
2412 /* Helper for avr_expand_epilogue. Emit a pop of a byte register. */
2415 emit_pop_byte (unsigned regno
)
2419 mem
= gen_rtx_PRE_INC (HImode
, stack_pointer_rtx
);
2420 mem
= gen_frame_mem (QImode
, mem
);
2421 reg
= gen_rtx_REG (QImode
, regno
);
2423 emit_insn (gen_rtx_SET (reg
, mem
));
2426 /* Output RTL epilogue. */
2429 avr_expand_epilogue (bool sibcall_p
)
2435 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
2437 size
= get_frame_size() + avr_outgoing_args_size();
2439 /* epilogue: naked */
2440 if (cfun
->machine
->is_naked
)
2442 gcc_assert (!sibcall_p
);
2444 emit_jump_insn (gen_return ());
2448 avr_regs_to_save (&set
);
2449 live_seq
= sequent_regs_live ();
2451 minimize
= (TARGET_CALL_PROLOGUES
2454 && !cfun
->machine
->is_OS_task
2455 && !cfun
->machine
->is_OS_main
2460 || frame_pointer_needed
2463 /* Get rid of frame. */
2465 if (!frame_pointer_needed
)
2467 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
2472 emit_move_insn (frame_pointer_rtx
,
2473 plus_constant (Pmode
, frame_pointer_rtx
, size
));
2476 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq
, HImode
)));
2482 /* Try two methods to adjust stack and select shortest. */
2486 rtx_insn
*fp_plus_insns
;
2487 HOST_WIDE_INT size_max
;
2489 gcc_assert (frame_pointer_needed
2493 fp
= my_fp
= (frame_pointer_needed
2495 : gen_rtx_REG (Pmode
, REG_X
));
2497 if (AVR_HAVE_8BIT_SP
)
2499 /* The high byte (r29) does not change:
2500 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
2502 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
2505 /* For rationale see comment in prologue generation. */
2507 size_max
= (HOST_WIDE_INT
) GET_MODE_MASK (GET_MODE (my_fp
));
2508 if (size
> size_max
)
2510 size
= trunc_int_for_mode (size
, GET_MODE (my_fp
));
2512 /********** Method 1: Adjust fp register **********/
2516 if (!frame_pointer_needed
)
2517 emit_move_insn (fp
, stack_pointer_rtx
);
2519 emit_move_insn (my_fp
, plus_constant (GET_MODE (my_fp
), my_fp
, size
));
2521 /* Copy to stack pointer. */
2523 if (TARGET_NO_INTERRUPTS
)
2526 if (AVR_HAVE_8BIT_SP
)
2529 emit_insn (gen_movhi_sp_r (stack_pointer_rtx
, fp
,
2530 GEN_INT (irq_state
)));
2532 fp_plus_insns
= get_insns ();
2535 /********** Method 2: Adjust Stack pointer **********/
2537 if (avr_sp_immediate_operand (gen_int_mode (size
, HImode
), HImode
))
2539 rtx_insn
*sp_plus_insns
;
2543 emit_move_insn (stack_pointer_rtx
,
2544 plus_constant (Pmode
, stack_pointer_rtx
, size
));
2546 sp_plus_insns
= get_insns ();
2549 /************ Use shortest method ************/
2551 emit_insn (get_sequence_length (sp_plus_insns
)
2552 < get_sequence_length (fp_plus_insns
)
2557 emit_insn (fp_plus_insns
);
2560 if (frame_pointer_needed
2561 && !(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
2563 /* Restore previous frame_pointer. See avr_expand_prologue for
2564 rationale for not using pophi. */
2566 emit_pop_byte (REG_Y
+ 1);
2567 emit_pop_byte (REG_Y
);
2570 /* Restore used registers. */
2572 int treg
= AVR_TMP_REGNO
;
2575 && cfun
->machine
->gasisr
.yes
)
2577 treg
= cfun
->machine
->gasisr
.regno
;
2578 CLEAR_HARD_REG_BIT (set
, treg
);
2581 for (int reg
= 31; reg
>= 0; --reg
)
2582 if (TEST_HARD_REG_BIT (set
, reg
))
2583 emit_pop_byte (reg
);
2587 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
2588 The conditions to restore them must be tha same as in prologue. */
2591 && TEST_HARD_REG_BIT (set
, REG_Z
)
2592 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
2594 emit_pop_byte (treg
);
2595 emit_move_insn (rampz_rtx
, all_regs_rtx
[treg
]);
2599 && (frame_pointer_needed
2600 || (TEST_HARD_REG_BIT (set
, REG_Y
)
2601 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
2603 emit_pop_byte (treg
);
2604 emit_move_insn (rampy_rtx
, all_regs_rtx
[treg
]);
2608 && TEST_HARD_REG_BIT (set
, REG_X
)
2609 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
2611 emit_pop_byte (treg
);
2612 emit_move_insn (rampx_rtx
, all_regs_rtx
[treg
]);
2617 emit_pop_byte (treg
);
2618 emit_move_insn (rampd_rtx
, all_regs_rtx
[treg
]);
2621 if (cfun
->machine
->gasisr
.yes
)
2623 // Emit an Epilogue chunk.
2624 emit_insn (gen_gasisr (GEN_INT (GASISR_Epilogue
),
2625 GEN_INT (cfun
->machine
->gasisr
.regno
)));
2627 else // !TARGET_GASISR_PROLOGUES
2629 /* Restore SREG using tmp_reg as scratch. */
2631 emit_pop_byte (AVR_TMP_REGNO
);
2632 emit_move_insn (sreg_rtx
, tmp_reg_rtx
);
2634 /* Restore tmp REG. */
2635 emit_pop_byte (AVR_TMP_REGNO
);
2637 /* Restore zero REG. */
2638 emit_pop_byte (AVR_ZERO_REGNO
);
2643 emit_jump_insn (gen_return ());
2647 /* Implement `TARGET_ASM_FUNCTION_BEGIN_EPILOGUE'. */
2650 avr_asm_function_begin_epilogue (FILE *file
)
2653 fprintf (file
, "/* epilogue start */\n");
2657 /* Implement `TARGET_CANNOT_MODITY_JUMPS_P'. */
2660 avr_cannot_modify_jumps_p (void)
2662 /* Naked Functions must not have any instructions after
2663 their epilogue, see PR42240 */
2665 if (reload_completed
2667 && cfun
->machine
->is_naked
)
2676 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
2679 avr_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED
, addr_space_t as
)
2681 /* FIXME: Non-generic addresses are not mode-dependent in themselves.
2682 This hook just serves to hack around PR rtl-optimization/52543 by
2683 claiming that non-generic addresses were mode-dependent so that
2684 lower-subreg.cc will skip these addresses. lower-subreg.cc sets up fake
2685 RTXes to probe SET and MEM costs and assumes that MEM is always in the
2686 generic address space which is not true. */
2688 return !ADDR_SPACE_GENERIC_P (as
);
2692 /* Return true if rtx X is a CONST_INT, CONST or SYMBOL_REF
2693 address with the `absdata' variable attribute, i.e. respective
2694 data can be read / written by LDS / STS instruction.
2695 This is used only for AVR_TINY. */
2698 avr_address_tiny_absdata_p (rtx x
, machine_mode mode
)
2700 if (CONST
== GET_CODE (x
))
2701 x
= XEXP (XEXP (x
, 0), 0);
2703 if (SYMBOL_REF_P (x
))
2704 return SYMBOL_REF_FLAGS (x
) & AVR_SYMBOL_FLAG_TINY_ABSDATA
;
2707 && IN_RANGE (INTVAL (x
), 0, 0xc0 - GET_MODE_SIZE (mode
)))
2714 /* Helper function for `avr_legitimate_address_p'. */
2717 avr_reg_ok_for_addr_p (rtx reg
, addr_space_t as
,
2718 RTX_CODE outer_code
, bool strict
)
2721 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg
), QImode
,
2722 as
, outer_code
, UNKNOWN
)
2724 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)));
2728 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
2729 machine for a memory operand of mode MODE. */
2732 avr_legitimate_address_p (machine_mode mode
, rtx x
, bool strict
)
2734 bool ok
= CONSTANT_ADDRESS_P (x
);
2736 switch (GET_CODE (x
))
2739 ok
= avr_reg_ok_for_addr_p (x
, ADDR_SPACE_GENERIC
,
2743 && GET_MODE_SIZE (mode
) > 4
2744 && REG_X
== REGNO (x
))
2752 ok
= avr_reg_ok_for_addr_p (XEXP (x
, 0), ADDR_SPACE_GENERIC
,
2753 GET_CODE (x
), strict
);
2758 rtx reg
= XEXP (x
, 0);
2759 rtx op1
= XEXP (x
, 1);
2762 && CONST_INT_P (op1
)
2763 && INTVAL (op1
) >= 0)
2765 bool fit
= IN_RANGE (INTVAL (op1
), 0, MAX_LD_OFFSET (mode
));
2770 || avr_reg_ok_for_addr_p (reg
, ADDR_SPACE_GENERIC
,
2773 if (reg
== frame_pointer_rtx
2774 || reg
== arg_pointer_rtx
)
2779 else if (frame_pointer_needed
2780 && reg
== frame_pointer_rtx
)
2793 && CONSTANT_ADDRESS_P (x
))
2795 /* avrtiny's load / store instructions only cover addresses 0..0xbf:
2796 IN / OUT range is 0..0x3f and LDS / STS can access 0x40..0xbf. */
2798 ok
= avr_address_tiny_absdata_p (x
, mode
);
2801 if (avr_log
.legitimate_address_p
)
2803 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
2804 "reload_completed=%d reload_in_progress=%d %s:",
2805 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
2806 reg_renumber
? "(reg_renumber)" : "");
2808 if (GET_CODE (x
) == PLUS
2809 && REG_P (XEXP (x
, 0))
2810 && CONST_INT_P (XEXP (x
, 1))
2811 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
2814 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
2815 true_regnum (XEXP (x
, 0)));
2818 avr_edump ("\n%r\n", x
);
2825 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
2826 now only a helper for avr_addr_space_legitimize_address. */
2827 /* Attempts to replace X with a valid
2828 memory address for an operand of mode MODE */
2831 avr_legitimize_address (rtx x
, rtx oldx
, machine_mode mode
)
2833 bool big_offset_p
= false;
2839 if (CONSTANT_ADDRESS_P (x
)
2840 && ! avr_address_tiny_absdata_p (x
, mode
))
2842 x
= force_reg (Pmode
, x
);
2846 if (GET_CODE (oldx
) == PLUS
2847 && REG_P (XEXP (oldx
, 0)))
2849 if (REG_P (XEXP (oldx
, 1)))
2850 x
= force_reg (GET_MODE (oldx
), oldx
);
2851 else if (CONST_INT_P (XEXP (oldx
, 1)))
2853 int offs
= INTVAL (XEXP (oldx
, 1));
2854 if (frame_pointer_rtx
!= XEXP (oldx
, 0)
2855 && offs
> MAX_LD_OFFSET (mode
))
2857 big_offset_p
= true;
2858 x
= force_reg (GET_MODE (oldx
), oldx
);
2863 if (avr_log
.legitimize_address
)
2865 avr_edump ("\n%?: mode=%m\n %r\n", mode
, oldx
);
2868 avr_edump (" %s --> %r\n", big_offset_p
? "(big offset)" : "", x
);
2875 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
2876 /* This will allow register R26/27 to be used where it is no worse than normal
2877 base pointers R28/29 or R30/31. For example, if base offset is greater
2878 than 63 bytes or for R++ or --R addressing. */
2881 avr_legitimize_reload_address (rtx
*px
, machine_mode mode
,
2882 int opnum
, int type
, int addr_type
,
2883 int ind_levels ATTRIBUTE_UNUSED
,
2884 rtx (*mk_memloc
)(rtx
,int))
2888 if (avr_log
.legitimize_reload_address
)
2889 avr_edump ("\n%?:%m %r\n", mode
, x
);
2891 if (1 && (GET_CODE (x
) == POST_INC
2892 || GET_CODE (x
) == PRE_DEC
))
2894 push_reload (XEXP (x
, 0), XEXP (x
, 0), &XEXP (x
, 0), &XEXP (x
, 0),
2895 POINTER_REGS
, GET_MODE (x
), GET_MODE (x
), 0, 0,
2896 opnum
, RELOAD_OTHER
);
2898 if (avr_log
.legitimize_reload_address
)
2899 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
2900 POINTER_REGS
, XEXP (x
, 0), XEXP (x
, 0));
2905 if (GET_CODE (x
) == PLUS
2906 && REG_P (XEXP (x
, 0))
2907 && reg_equiv_constant (REGNO (XEXP (x
, 0))) == 0
2908 && CONST_INT_P (XEXP (x
, 1))
2909 && INTVAL (XEXP (x
, 1)) >= 1)
2911 bool fit
= INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
);
2915 if (reg_equiv_address (REGNO (XEXP (x
, 0))) != 0)
2917 int regno
= REGNO (XEXP (x
, 0));
2918 rtx mem
= mk_memloc (x
, regno
);
2920 push_reload (XEXP (mem
, 0), NULL_RTX
, &XEXP (mem
, 0), NULL
,
2921 POINTER_REGS
, Pmode
, VOIDmode
, 0, 0,
2922 1, (enum reload_type
) addr_type
);
2924 if (avr_log
.legitimize_reload_address
)
2925 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
2926 POINTER_REGS
, XEXP (mem
, 0), NULL_RTX
);
2928 push_reload (mem
, NULL_RTX
, &XEXP (x
, 0), NULL
,
2929 BASE_POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
2930 opnum
, (enum reload_type
) type
);
2932 if (avr_log
.legitimize_reload_address
)
2933 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
2934 BASE_POINTER_REGS
, mem
, NULL_RTX
);
2939 else if (! (frame_pointer_needed
2940 && XEXP (x
, 0) == frame_pointer_rtx
))
2942 push_reload (x
, NULL_RTX
, px
, NULL
,
2943 POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
2944 opnum
, (enum reload_type
) type
);
2946 if (avr_log
.legitimize_reload_address
)
2947 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
2948 POINTER_REGS
, x
, NULL_RTX
);
2958 /* Helper function to print assembler resp. track instruction
2959 sequence lengths. Always return "".
2962 Output assembler code from template TPL with operands supplied
2963 by OPERANDS. This is just forwarding to output_asm_insn.
2966 If N_WORDS >= 0 Add N_WORDS to *PLEN.
2967 If N_WORDS < 0 Set *PLEN to -N_WORDS.
2968 Don't output anything.
2972 avr_asm_len (const char* tpl
, rtx
* operands
, int* plen
, int n_words
)
2975 output_asm_insn (tpl
, operands
);
2988 /* Return a pointer register name as a string. */
2991 ptrreg_to_str (int regno
)
2995 case REG_X
: return "X";
2996 case REG_Y
: return "Y";
2997 case REG_Z
: return "Z";
2999 output_operand_lossage ("address operand requires constraint for"
3000 " X, Y, or Z register");
3005 /* Return the condition name as a string.
3006 Used in conditional jump constructing */
3009 cond_string (enum rtx_code code
)
3011 bool cc_overflow_unusable
= false;
3020 if (cc_overflow_unusable
)
3025 if (cc_overflow_unusable
)
3041 /* Return true if rtx X is a CONST or SYMBOL_REF with progmem.
3042 This must be used for AVR_TINY only because on other cores
3043 the flash memory is not visible in the RAM address range and
3044 cannot be read by, say, LD instruction. */
3047 avr_address_tiny_pm_p (rtx x
)
3049 if (CONST
== GET_CODE (x
))
3050 x
= XEXP (XEXP (x
, 0), 0);
3052 if (SYMBOL_REF_P (x
))
3053 return SYMBOL_REF_FLAGS (x
) & AVR_SYMBOL_FLAG_TINY_PM
;
3058 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
3059 /* Output ADDR to FILE as address. */
3062 avr_print_operand_address (FILE *file
, machine_mode
/*mode*/, rtx addr
)
3065 && avr_address_tiny_pm_p (addr
))
3067 addr
= plus_constant (Pmode
, addr
, avr_arch
->flash_pm_offset
);
3070 switch (GET_CODE (addr
))
3073 fprintf (file
, "%s", ptrreg_to_str (REGNO (addr
)));
3077 fprintf (file
, "-%s", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
3081 fprintf (file
, "%s+", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
3085 if (CONSTANT_ADDRESS_P (addr
)
3086 && text_segment_operand (addr
, VOIDmode
))
3089 if (GET_CODE (x
) == CONST
)
3091 if (GET_CODE (x
) == PLUS
&& CONST_INT_P (XEXP (x
, 1)))
3093 /* Assembler gs() will implant word address. Make offset
3094 a byte offset inside gs() for assembler. This is
3095 needed because the more logical (constant+gs(sym)) is not
3096 accepted by gas. For 128K and smaller devices this is ok.
3097 For large devices it will create a trampoline to offset
3098 from symbol which may not be what the user really wanted. */
3100 fprintf (file
, "gs(");
3101 output_addr_const (file
, XEXP (x
, 0));
3102 fprintf (file
, "+" HOST_WIDE_INT_PRINT_DEC
")",
3103 2 * INTVAL (XEXP (x
, 1)));
3105 if (warning (0, "pointer offset from symbol maybe incorrect"))
3107 output_addr_const (stderr
, addr
);
3108 fprintf (stderr
, "\n");
3113 fprintf (file
, "gs(");
3114 output_addr_const (file
, addr
);
3115 fprintf (file
, ")");
3119 output_addr_const (file
, addr
);
3124 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
3127 avr_print_operand_punct_valid_p (unsigned char code
)
3129 return code
== '~' || code
== '!';
3133 /* Implement `TARGET_PRINT_OPERAND'. */
3134 /* Output X as assembler operand to file FILE.
3135 For a description of supported %-codes, see top of avr.md. */
3138 avr_print_operand (FILE *file
, rtx x
, int code
)
3140 int abcd
= 0, ef
= 0, ij
= 0;
3142 if (code
>= 'A' && code
<= 'D')
3144 else if (code
== 'E' || code
== 'F')
3146 else if (code
== 'I' || code
== 'J')
3151 if (!AVR_HAVE_JMP_CALL
)
3154 else if (code
== '!')
3156 if (AVR_HAVE_EIJMP_EICALL
)
3159 else if (code
== 't'
3162 static int t_regno
= -1;
3163 static int t_nbits
= -1;
3165 if (REG_P (x
) && t_regno
< 0 && code
== 'T')
3167 t_regno
= REGNO (x
);
3168 t_nbits
= GET_MODE_BITSIZE (GET_MODE (x
));
3170 else if (CONST_INT_P (x
) && t_regno
>= 0
3171 && IN_RANGE (INTVAL (x
), 0, t_nbits
- 1))
3173 int bpos
= INTVAL (x
);
3175 fprintf (file
, "%s", reg_names
[t_regno
+ bpos
/ 8]);
3177 fprintf (file
, ",%d", bpos
% 8);
3182 fatal_insn ("operands to %T/%t must be reg + const_int:", x
);
3184 else if (code
== 'E' || code
== 'F')
3186 rtx op
= XEXP (x
, 0);
3187 fprintf (file
, "%s", reg_names
[REGNO (op
) + ef
]);
3189 else if (code
== 'I' || code
== 'J')
3191 rtx op
= XEXP (XEXP (x
, 0), 0);
3192 fprintf (file
, "%s", reg_names
[REGNO (op
) + ij
]);
3196 if (x
== zero_reg_rtx
)
3197 fprintf (file
, "__zero_reg__");
3198 else if (code
== 'r' && REGNO (x
) < 32)
3199 fprintf (file
, "%d", (int) REGNO (x
));
3201 fprintf (file
, "%s", reg_names
[REGNO (x
) + abcd
]);
3203 else if (CONST_INT_P (x
))
3205 HOST_WIDE_INT ival
= INTVAL (x
);
3208 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ival
+ abcd
);
3209 else if (low_io_address_operand (x
, VOIDmode
)
3210 || high_io_address_operand (x
, VOIDmode
))
3212 if (AVR_HAVE_RAMPZ
&& ival
== avr_addr
.rampz
)
3213 fprintf (file
, "__RAMPZ__");
3214 else if (AVR_HAVE_RAMPY
&& ival
== avr_addr
.rampy
)
3215 fprintf (file
, "__RAMPY__");
3216 else if (AVR_HAVE_RAMPX
&& ival
== avr_addr
.rampx
)
3217 fprintf (file
, "__RAMPX__");
3218 else if (AVR_HAVE_RAMPD
&& ival
== avr_addr
.rampd
)
3219 fprintf (file
, "__RAMPD__");
3220 else if ((AVR_XMEGA
|| AVR_TINY
) && ival
== avr_addr
.ccp
)
3221 fprintf (file
, "__CCP__");
3222 else if (ival
== avr_addr
.sreg
) fprintf (file
, "__SREG__");
3223 else if (ival
== avr_addr
.sp_l
) fprintf (file
, "__SP_L__");
3224 else if (ival
== avr_addr
.sp_h
) fprintf (file
, "__SP_H__");
3227 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
3228 ival
- avr_arch
->sfr_offset
);
3232 fatal_insn ("bad address, not an I/O address:", x
);
3236 rtx addr
= XEXP (x
, 0);
3240 if (!CONSTANT_P (addr
))
3241 fatal_insn ("bad address, not a constant:", addr
);
3242 /* Assembler template with m-code is data - not progmem section */
3243 if (text_segment_operand (addr
, VOIDmode
))
3244 if (warning (0, "accessing data memory with"
3245 " program memory address"))
3247 output_addr_const (stderr
, addr
);
3248 fprintf(stderr
,"\n");
3250 output_addr_const (file
, addr
);
3252 else if (code
== 'i')
3254 avr_print_operand (file
, addr
, 'i');
3256 else if (code
== 'o')
3258 if (GET_CODE (addr
) != PLUS
)
3259 fatal_insn ("bad address, not (reg+disp):", addr
);
3261 avr_print_operand (file
, XEXP (addr
, 1), 0);
3263 else if (code
== 'b')
3265 if (GET_CODE (addr
) != PLUS
)
3266 fatal_insn ("bad address, not (reg+disp):", addr
);
3268 avr_print_operand_address (file
, VOIDmode
, XEXP (addr
, 0));
3270 else if (code
== 'p' || code
== 'r')
3272 if (GET_CODE (addr
) != POST_INC
&& GET_CODE (addr
) != PRE_DEC
)
3273 fatal_insn ("bad address, not post_inc or pre_dec:", addr
);
3277 avr_print_operand_address (file
, VOIDmode
, XEXP (addr
, 0));
3279 avr_print_operand (file
, XEXP (addr
, 0), 0); /* r26, r28, r30 */
3281 else if (GET_CODE (addr
) == PLUS
)
3283 avr_print_operand_address (file
, VOIDmode
, XEXP (addr
, 0));
3284 if (REGNO (XEXP (addr
, 0)) == REG_X
)
3285 fatal_insn ("internal compiler error. Bad address:"
3288 avr_print_operand (file
, XEXP (addr
, 1), code
);
3291 avr_print_operand_address (file
, VOIDmode
, addr
);
3293 else if (code
== 'i')
3295 if (SYMBOL_REF_P (x
) && (SYMBOL_REF_FLAGS (x
) & SYMBOL_FLAG_IO
))
3296 avr_print_operand_address
3297 (file
, VOIDmode
, plus_constant (HImode
, x
, -avr_arch
->sfr_offset
));
3299 fatal_insn ("bad address, not an I/O address:", x
);
3301 else if (code
== 'x')
3303 /* Constant progmem address - like used in jmp or call */
3304 if (text_segment_operand (x
, VOIDmode
) == 0)
3305 if (warning (0, "accessing program memory"
3306 " with data memory address"))
3308 output_addr_const (stderr
, x
);
3309 fprintf(stderr
,"\n");
3311 /* Use normal symbol for direct address no linker trampoline needed */
3312 output_addr_const (file
, x
);
3314 else if (CONST_FIXED_P (x
))
3316 HOST_WIDE_INT ival
= INTVAL (avr_to_int_mode (x
));
3318 output_operand_lossage ("Unsupported code '%c' for fixed-point:",
3320 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ival
);
3322 else if (CONST_DOUBLE_P (x
))
3325 if (GET_MODE (x
) != SFmode
)
3326 fatal_insn ("internal compiler error. Unknown mode:", x
);
3327 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x
), val
);
3328 fprintf (file
, "0x%lx", val
);
3330 else if (GET_CODE (x
) == CONST_STRING
)
3331 fputs (XSTR (x
, 0), file
);
3332 else if (code
== 'j')
3333 fputs (cond_string (GET_CODE (x
)), file
);
3334 else if (code
== 'k')
3335 fputs (cond_string (reverse_condition (GET_CODE (x
))), file
);
3337 avr_print_operand_address (file
, VOIDmode
, x
);
3341 /* Implement TARGET_USE_BY_PIECES_INFRASTRUCTURE_P. */
3343 /* Prefer sequence of loads/stores for moves of size upto
3344 two - two pairs of load/store instructions are always better
3345 than the 5 instruction sequence for a loop (1 instruction
3346 for loop counter setup, and 4 for the body of the loop). */
3349 avr_use_by_pieces_infrastructure_p (unsigned HOST_WIDE_INT size
,
3350 unsigned int align ATTRIBUTE_UNUSED
,
3351 enum by_pieces_operation op
,
3354 if (op
!= MOVE_BY_PIECES
3355 || (speed_p
&& size
> MOVE_MAX_PIECES
))
3356 return default_use_by_pieces_infrastructure_p (size
, align
, op
, speed_p
);
3358 return size
<= MOVE_MAX_PIECES
;
3361 /* Choose mode for jump insn:
3362 1 - relative jump in range -63 <= x <= 62 ;
3363 2 - relative jump in range -2046 <= x <= 2045 ;
3364 3 - absolute jump (only for ATmega[16]03). */
3367 avr_jump_mode (rtx x
, rtx_insn
*insn
)
3369 int dest_addr
= INSN_ADDRESSES (INSN_UID (GET_CODE (x
) == LABEL_REF
3370 ? XEXP (x
, 0) : x
));
3371 int cur_addr
= INSN_ADDRESSES (INSN_UID (insn
));
3372 int jump_distance
= cur_addr
- dest_addr
;
3374 if (IN_RANGE (jump_distance
, -63, 62))
3376 else if (IN_RANGE (jump_distance
, -2046, 2045))
3378 else if (AVR_HAVE_JMP_CALL
)
3384 /* Return an AVR condition jump commands.
3385 X is a comparison RTX.
3386 LEN is a number returned by avr_jump_mode function.
3387 If REVERSE nonzero then condition code in X must be reversed. */
3390 ret_cond_branch (rtx x
, int len
, int reverse
)
3392 RTX_CODE cond
= reverse
? reverse_condition (GET_CODE (x
)) : GET_CODE (x
);
3393 bool cc_overflow_unusable
= false;
3398 if (cc_overflow_unusable
)
3399 return (len
== 1 ? ("breq .+2" CR_TAB
3401 len
== 2 ? ("breq .+4" CR_TAB
3409 return (len
== 1 ? ("breq .+2" CR_TAB
3411 len
== 2 ? ("breq .+4" CR_TAB
3418 return (len
== 1 ? ("breq .+2" CR_TAB
3420 len
== 2 ? ("breq .+4" CR_TAB
3427 if (cc_overflow_unusable
)
3428 return (len
== 1 ? ("breq %0" CR_TAB
3430 len
== 2 ? ("breq .+2" CR_TAB
3437 return (len
== 1 ? ("breq %0" CR_TAB
3439 len
== 2 ? ("breq .+2" CR_TAB
3446 return (len
== 1 ? ("breq %0" CR_TAB
3448 len
== 2 ? ("breq .+2" CR_TAB
3462 return ("br%j1 .+2" CR_TAB
3465 return ("br%j1 .+4" CR_TAB
3476 return ("br%k1 .+2" CR_TAB
3479 return ("br%k1 .+4" CR_TAB
3488 /* Worker function for `FINAL_PRESCAN_INSN'. */
3489 /* Output insn cost for next insn. */
3492 avr_final_prescan_insn (rtx_insn
*insn
, rtx
*operand ATTRIBUTE_UNUSED
,
3493 int num_operands ATTRIBUTE_UNUSED
)
3495 if (avr_log
.rtx_costs
)
3497 rtx set
= single_set (insn
);
3500 fprintf (asm_out_file
, "/* DEBUG: cost = %d. */\n",
3501 set_src_cost (SET_SRC (set
), GET_MODE (SET_DEST (set
)),
3502 optimize_insn_for_speed_p ()));
3504 fprintf (asm_out_file
, "/* DEBUG: pattern-cost = %d. */\n",
3505 rtx_cost (PATTERN (insn
), VOIDmode
, INSN
, 0,
3506 optimize_insn_for_speed_p()));
3509 if (avr_log
.insn_addresses
)
3510 fprintf (asm_out_file
, ";; ADDR = %d\n",
3511 (int) INSN_ADDRESSES (INSN_UID (insn
)));
3515 /* Implement `TARGET_ASM_FINAL_POSTSCAN_INSN'. */
3516 /* When GAS generates (parts of) ISR prologue / epilogue for us, we must
3517 hint GAS about the end of the code to scan. There migh be code located
3518 after the last epilogue. */
3521 avr_asm_final_postscan_insn (FILE *stream
, rtx_insn
*insn
, rtx
*, int)
3523 if (cfun
->machine
->gasisr
.yes
3524 && !next_real_insn (insn
))
3527 fprintf (stream
, "\t__gcc_isr %d,r%d\n", GASISR_Done
,
3528 cfun
->machine
->gasisr
.regno
);
3533 /* Worker function for `FUNCTION_ARG_REGNO_P'. */
3534 /* Returns nonzero if REGNO is the number of a hard
3535 register in which function arguments are sometimes passed. */
3538 avr_function_arg_regno_p (int r
)
3540 return AVR_TINY
? IN_RANGE (r
, 20, 25) : IN_RANGE (r
, 8, 25);
3544 /* Worker function for `INIT_CUMULATIVE_ARGS'. */
3545 /* Initializing the variable cum for the state at the beginning
3546 of the argument list. */
3549 avr_init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
, rtx libname
,
3550 tree fndecl ATTRIBUTE_UNUSED
)
3552 cum
->nregs
= AVR_TINY
? 6 : 18;
3553 cum
->regno
= FIRST_CUM_REG
;
3554 if (!libname
&& stdarg_p (fntype
))
3557 /* Assume the calle may be tail called */
3559 cfun
->machine
->sibcall_fails
= 0;
3562 /* Returns the number of registers to allocate for a function argument. */
3565 avr_num_arg_regs (machine_mode mode
, const_tree type
)
3569 if (mode
== BLKmode
)
3570 size
= int_size_in_bytes (type
);
3572 size
= GET_MODE_SIZE (mode
);
3574 /* Align all function arguments to start in even-numbered registers.
3575 Odd-sized arguments leave holes above them. */
3577 return (size
+ 1) & ~1;
3581 /* Implement `TARGET_FUNCTION_ARG'. */
3582 /* Controls whether a function argument is passed
3583 in a register, and which register. */
3586 avr_function_arg (cumulative_args_t cum_v
, const function_arg_info
&arg
)
3588 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
3589 int bytes
= avr_num_arg_regs (arg
.mode
, arg
.type
);
3591 if (cum
->nregs
&& bytes
<= cum
->nregs
)
3592 return gen_rtx_REG (arg
.mode
, cum
->regno
- bytes
);
3598 /* Implement `TARGET_FUNCTION_ARG_ADVANCE'. */
3599 /* Update the summarizer variable CUM to advance past an argument
3600 in the argument list. */
3603 avr_function_arg_advance (cumulative_args_t cum_v
,
3604 const function_arg_info
&arg
)
3606 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
3607 int bytes
= avr_num_arg_regs (arg
.mode
, arg
.type
);
3609 cum
->nregs
-= bytes
;
3610 cum
->regno
-= bytes
;
3612 /* A parameter is being passed in a call-saved register. As the original
3613 contents of these regs has to be restored before leaving the function,
3614 a function must not pass arguments in call-saved regs in order to get
3619 && !call_used_or_fixed_reg_p (cum
->regno
))
3621 /* FIXME: We ship info on failing tail-call in struct machine_function.
3622 This uses internals of calls.cc:expand_call() and the way args_so_far
3623 is used. targetm.function_ok_for_sibcall() needs to be extended to
3624 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
3625 dependent so that such an extension is not wanted. */
3627 cfun
->machine
->sibcall_fails
= 1;
3630 /* Test if all registers needed by the ABI are actually available. If the
3631 user has fixed a GPR needed to pass an argument, an (implicit) function
3632 call will clobber that fixed register. See PR45099 for an example. */
3637 for (int regno
= cum
->regno
; regno
< cum
->regno
+ bytes
; regno
++)
3638 if (fixed_regs
[regno
])
3639 warning (0, "fixed register %s used to pass parameter to function",
3643 if (cum
->nregs
<= 0)
3646 cum
->regno
= FIRST_CUM_REG
;
3650 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
3651 /* Decide whether we can make a sibling call to a function. DECL is the
3652 declaration of the function being targeted by the call and EXP is the
3653 CALL_EXPR representing the call. */
3656 avr_function_ok_for_sibcall (tree decl_callee
, tree exp_callee
)
3660 /* Tail-calling must fail if callee-saved regs are used to pass
3661 function args. We must not tail-call when `epilogue_restores'
3662 is used. Unfortunately, we cannot tell at this point if that
3663 actually will happen or not, and we cannot step back from
3664 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
3666 if (cfun
->machine
->sibcall_fails
3667 || TARGET_CALL_PROLOGUES
)
3672 fntype_callee
= TREE_TYPE (CALL_EXPR_FN (exp_callee
));
3676 decl_callee
= TREE_TYPE (decl_callee
);
3680 decl_callee
= fntype_callee
;
3682 while (FUNCTION_TYPE
!= TREE_CODE (decl_callee
)
3683 && METHOD_TYPE
!= TREE_CODE (decl_callee
))
3685 decl_callee
= TREE_TYPE (decl_callee
);
3689 /* Ensure that caller and callee have compatible epilogues */
3691 if (cfun
->machine
->is_interrupt
3692 || cfun
->machine
->is_signal
3693 || cfun
->machine
->is_naked
3694 || avr_naked_function_p (decl_callee
))
3702 /***********************************************************************
3703 Functions for outputting various mov's for a various modes
3704 ************************************************************************/
3706 /* Return true if a value of mode MODE is read from flash by
3707 __load_* function from libgcc. */
3710 avr_load_libgcc_p (rtx op
)
3712 machine_mode mode
= GET_MODE (op
);
3713 int n_bytes
= GET_MODE_SIZE (mode
);
3717 && avr_mem_flash_p (op
));
3720 /* Return true if a value of mode MODE is read by __xload_* function. */
3723 avr_xload_libgcc_p (machine_mode mode
)
3725 int n_bytes
= GET_MODE_SIZE (mode
);
3728 || avr_n_flash
> 1);
3732 /* Fixme: This is a hack because secondary reloads don't works as expected.
3734 Find an unused d-register to be used as scratch in INSN.
3735 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
3736 is a register, skip all possible return values that overlap EXCLUDE.
3737 The policy for the returned register is similar to that of
3738 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
3741 Return a QImode d-register or NULL_RTX if nothing found. */
3744 avr_find_unused_d_reg (rtx_insn
*insn
, rtx exclude
)
3746 bool isr_p
= (avr_interrupt_function_p (current_function_decl
)
3747 || avr_signal_function_p (current_function_decl
));
3749 for (int regno
= 16; regno
< 32; regno
++)
3751 rtx reg
= all_regs_rtx
[regno
];
3754 && reg_overlap_mentioned_p (exclude
, reg
))
3755 || fixed_regs
[regno
])
3760 /* Try non-live register */
3762 if (!df_regs_ever_live_p (regno
)
3763 && (TREE_THIS_VOLATILE (current_function_decl
)
3764 || cfun
->machine
->is_OS_task
3765 || cfun
->machine
->is_OS_main
3766 || (!isr_p
&& call_used_or_fixed_reg_p (regno
))))
3771 /* Any live register can be used if it is unused after.
3772 Prologue/epilogue will care for it as needed. */
3774 if (df_regs_ever_live_p (regno
)
3775 && reg_unused_after (insn
, reg
))
3785 /* Helper function for the next function in the case where only restricted
3786 version of LPM instruction is available. */
3789 avr_out_lpm_no_lpmx (rtx_insn
*insn
, rtx
*xop
, int *plen
)
3793 int n_bytes
= GET_MODE_SIZE (GET_MODE (dest
));
3796 regno_dest
= REGNO (dest
);
3798 /* The implicit target register of LPM. */
3799 xop
[3] = lpm_reg_rtx
;
3801 switch (GET_CODE (addr
))
3808 gcc_assert (REG_Z
== REGNO (addr
));
3816 avr_asm_len ("%4lpm", xop
, plen
, 1);
3818 if (regno_dest
!= LPM_REGNO
)
3819 avr_asm_len ("mov %0,%3", xop
, plen
, 1);
3824 if (REGNO (dest
) == REG_Z
)
3825 return avr_asm_len ("%4lpm" CR_TAB
3830 "pop %A0", xop
, plen
, 6);
3832 avr_asm_len ("%4lpm" CR_TAB
3836 "mov %B0,%3", xop
, plen
, 5);
3838 if (!reg_unused_after (insn
, addr
))
3839 avr_asm_len ("sbiw %2,1", xop
, plen
, 1);
3848 gcc_assert (REG_Z
== REGNO (XEXP (addr
, 0))
3851 if (regno_dest
== LPM_REGNO
)
3852 avr_asm_len ("%4lpm" CR_TAB
3853 "adiw %2,1", xop
, plen
, 2);
3855 avr_asm_len ("%4lpm" CR_TAB
3857 "adiw %2,1", xop
, plen
, 3);
3860 avr_asm_len ("%4lpm" CR_TAB
3862 "adiw %2,1", xop
, plen
, 3);
3865 avr_asm_len ("%4lpm" CR_TAB
3867 "adiw %2,1", xop
, plen
, 3);
3870 avr_asm_len ("%4lpm" CR_TAB
3872 "adiw %2,1", xop
, plen
, 3);
3874 break; /* POST_INC */
3876 } /* switch CODE (addr) */
3882 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
3883 OP[1] in AS1 to register OP[0].
3884 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
3888 avr_out_lpm (rtx_insn
*insn
, rtx
*op
, int *plen
)
3892 rtx src
= SET_SRC (single_set (insn
));
3894 int n_bytes
= GET_MODE_SIZE (GET_MODE (dest
));
3897 addr_space_t as
= MEM_ADDR_SPACE (src
);
3904 warning (0, "writing to address space %qs not supported",
3905 avr_addrspace
[MEM_ADDR_SPACE (dest
)].name
);
3910 addr
= XEXP (src
, 0);
3911 code
= GET_CODE (addr
);
3913 gcc_assert (REG_P (dest
));
3914 gcc_assert (REG
== code
|| POST_INC
== code
);
3918 xop
[2] = lpm_addr_reg_rtx
;
3919 xop
[4] = xstring_empty
;
3920 xop
[5] = tmp_reg_rtx
;
3921 xop
[6] = XEXP (rampz_rtx
, 0);
3923 segment
= avr_addrspace
[as
].segment
;
3925 /* Set RAMPZ as needed. */
3929 xop
[4] = GEN_INT (segment
);
3930 xop
[3] = avr_find_unused_d_reg (insn
, lpm_addr_reg_rtx
);
3932 if (xop
[3] != NULL_RTX
)
3934 avr_asm_len ("ldi %3,%4" CR_TAB
3935 "out %i6,%3", xop
, plen
, 2);
3937 else if (segment
== 1)
3939 avr_asm_len ("clr %5" CR_TAB
3941 "out %i6,%5", xop
, plen
, 3);
3945 avr_asm_len ("mov %5,%2" CR_TAB
3948 "mov %2,%5", xop
, plen
, 4);
3953 if (!AVR_HAVE_ELPMX
)
3954 return avr_out_lpm_no_lpmx (insn
, xop
, plen
);
3956 else if (!AVR_HAVE_LPMX
)
3958 return avr_out_lpm_no_lpmx (insn
, xop
, plen
);
3961 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
3963 switch (GET_CODE (addr
))
3970 gcc_assert (REG_Z
== REGNO (addr
));
3978 avr_asm_len ("%4lpm %0,%a2", xop
, plen
, 1);
3982 if (REGNO (dest
) == REG_Z
)
3983 avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3984 "%4lpm %B0,%a2" CR_TAB
3985 "mov %A0,%5", xop
, plen
, 3);
3988 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3989 "%4lpm %B0,%a2", xop
, plen
, 2);
3991 if (!reg_unused_after (insn
, addr
))
3992 avr_asm_len ("sbiw %2,1", xop
, plen
, 1);
3999 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
4000 "%4lpm %B0,%a2+" CR_TAB
4001 "%4lpm %C0,%a2", xop
, plen
, 3);
4003 if (!reg_unused_after (insn
, addr
))
4004 avr_asm_len ("sbiw %2,2", xop
, plen
, 1);
4010 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
4011 "%4lpm %B0,%a2+", xop
, plen
, 2);
4013 if (REGNO (dest
) == REG_Z
- 2)
4014 avr_asm_len ("%4lpm %5,%a2+" CR_TAB
4015 "%4lpm %C0,%a2" CR_TAB
4016 "mov %D0,%5", xop
, plen
, 3);
4019 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
4020 "%4lpm %D0,%a2", xop
, plen
, 2);
4022 if (!reg_unused_after (insn
, addr
))
4023 avr_asm_len ("sbiw %2,3", xop
, plen
, 1);
4033 gcc_assert (REG_Z
== REGNO (XEXP (addr
, 0))
4036 avr_asm_len ("%4lpm %A0,%a2+", xop
, plen
, 1);
4037 if (n_bytes
>= 2) avr_asm_len ("%4lpm %B0,%a2+", xop
, plen
, 1);
4038 if (n_bytes
>= 3) avr_asm_len ("%4lpm %C0,%a2+", xop
, plen
, 1);
4039 if (n_bytes
>= 4) avr_asm_len ("%4lpm %D0,%a2+", xop
, plen
, 1);
4041 break; /* POST_INC */
4043 } /* switch CODE (addr) */
4045 if (xop
[4] == xstring_e
&& AVR_HAVE_RAMPD
)
4047 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM. */
4049 xop
[0] = zero_reg_rtx
;
4050 avr_asm_len ("out %i6,%0", xop
, plen
, 1);
4057 /* Worker function for xload_8 insn. */
4060 avr_out_xload (rtx_insn
*insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
4066 xop
[2] = lpm_addr_reg_rtx
;
4067 xop
[3] = AVR_HAVE_LPMX
? op
[0] : lpm_reg_rtx
;
4069 avr_asm_len (AVR_HAVE_LPMX
? "lpm %3,%a2" : "lpm", xop
, plen
, -1);
4071 avr_asm_len ("sbrc %1,7" CR_TAB
4072 "ld %3,%a2", xop
, plen
, 2);
4074 if (REGNO (xop
[0]) != REGNO (xop
[3]))
4075 avr_asm_len ("mov %0,%3", xop
, plen
, 1);
4082 output_movqi (rtx_insn
*insn
, rtx operands
[], int *plen
)
4084 rtx dest
= operands
[0];
4085 rtx src
= operands
[1];
4087 if (avr_mem_flash_p (src
)
4088 || avr_mem_flash_p (dest
))
4090 return avr_out_lpm (insn
, operands
, plen
);
4093 gcc_assert (GET_MODE_SIZE (GET_MODE (dest
)) == 1);
4097 if (REG_P (src
)) /* mov r,r */
4099 if (test_hard_reg_class (STACK_REG
, dest
))
4100 return avr_asm_len ("out %0,%1", operands
, plen
, -1);
4101 else if (test_hard_reg_class (STACK_REG
, src
))
4102 return avr_asm_len ("in %0,%1", operands
, plen
, -1);
4104 return avr_asm_len ("mov %0,%1", operands
, plen
, -1);
4106 else if (CONSTANT_P (src
))
4108 output_reload_in_const (operands
, NULL_RTX
, plen
, false);
4111 else if (MEM_P (src
))
4112 return out_movqi_r_mr (insn
, operands
, plen
); /* mov r,m */
4114 else if (MEM_P (dest
))
4119 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
4121 return out_movqi_mr_r (insn
, xop
, plen
);
4129 output_movhi (rtx_insn
*insn
, rtx xop
[], int *plen
)
4134 gcc_assert (GET_MODE_SIZE (GET_MODE (dest
)) == 2);
4136 if (avr_mem_flash_p (src
)
4137 || avr_mem_flash_p (dest
))
4139 return avr_out_lpm (insn
, xop
, plen
);
4144 if (REG_P (src
)) /* mov r,r */
4146 if (test_hard_reg_class (STACK_REG
, dest
))
4148 if (AVR_HAVE_8BIT_SP
)
4149 return avr_asm_len ("out __SP_L__,%A1", xop
, plen
, -1);
4152 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
4153 "out __SP_H__,%B1", xop
, plen
, -2);
4155 /* Use simple load of SP if no interrupts are used. */
4157 return TARGET_NO_INTERRUPTS
4158 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
4159 "out __SP_L__,%A1", xop
, plen
, -2)
4160 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
4162 "out __SP_H__,%B1" CR_TAB
4163 "out __SREG__,__tmp_reg__" CR_TAB
4164 "out __SP_L__,%A1", xop
, plen
, -5);
4166 else if (test_hard_reg_class (STACK_REG
, src
))
4168 return !AVR_HAVE_SPH
4169 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
4170 "clr %B0", xop
, plen
, -2)
4172 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
4173 "in %B0,__SP_H__", xop
, plen
, -2);
4176 return AVR_HAVE_MOVW
4177 ? avr_asm_len ("movw %0,%1", xop
, plen
, -1)
4179 : avr_asm_len ("mov %A0,%A1" CR_TAB
4180 "mov %B0,%B1", xop
, plen
, -2);
4182 else if (CONSTANT_P (src
))
4184 return output_reload_inhi (xop
, NULL
, plen
);
4186 else if (MEM_P (src
))
4188 return out_movhi_r_mr (insn
, xop
, plen
); /* mov r,m */
4191 else if (MEM_P (dest
))
4196 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
4198 return out_movhi_mr_r (insn
, xop
, plen
);
4201 fatal_insn ("invalid insn:", insn
);
4207 /* Same as out_movqi_r_mr, but TINY does not have ADIW or SBIW */
4210 avr_out_movqi_r_mr_reg_disp_tiny (rtx_insn
*insn
, rtx op
[], int *plen
)
4214 rtx x
= XEXP (src
, 0);
4216 avr_asm_len (TINY_ADIW (%I1
, %J1
, %o1
) CR_TAB
4217 "ld %0,%b1" , op
, plen
, -3);
4219 if (!reg_overlap_mentioned_p (dest
, XEXP (x
, 0))
4220 && !reg_unused_after (insn
, XEXP (x
, 0)))
4221 avr_asm_len (TINY_SBIW (%I1
, %J1
, %o1
), op
, plen
, 2);
4227 out_movqi_r_mr (rtx_insn
*insn
, rtx op
[], int *plen
)
4231 rtx x
= XEXP (src
, 0);
4233 if (CONSTANT_ADDRESS_P (x
))
4235 int n_words
= AVR_TINY
? 1 : 2;
4236 return io_address_operand (x
, QImode
)
4237 ? avr_asm_len ("in %0,%i1", op
, plen
, -1)
4238 : avr_asm_len ("lds %0,%m1", op
, plen
, -n_words
);
4241 if (GET_CODE (x
) == PLUS
4242 && REG_P (XEXP (x
, 0))
4243 && CONST_INT_P (XEXP (x
, 1)))
4245 /* memory access by reg+disp */
4247 int disp
= INTVAL (XEXP (x
, 1));
4250 return avr_out_movqi_r_mr_reg_disp_tiny (insn
, op
, plen
);
4252 if (disp
- GET_MODE_SIZE (GET_MODE (src
)) >= 63)
4254 if (REGNO (XEXP (x
, 0)) != REG_Y
)
4255 fatal_insn ("incorrect insn:",insn
);
4257 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
4258 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
4259 "ldd %0,Y+63" CR_TAB
4260 "sbiw r28,%o1-63", op
, plen
, -3);
4262 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
4263 "sbci r29,hi8(-%o1)" CR_TAB
4265 "subi r28,lo8(%o1)" CR_TAB
4266 "sbci r29,hi8(%o1)", op
, plen
, -5);
4268 else if (REGNO (XEXP (x
, 0)) == REG_X
)
4270 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
4271 it but I have this situation with extremal optimizing options. */
4273 avr_asm_len ("adiw r26,%o1" CR_TAB
4274 "ld %0,X", op
, plen
, -2);
4276 if (!reg_overlap_mentioned_p (dest
, XEXP (x
, 0))
4277 && !reg_unused_after (insn
, XEXP (x
, 0)))
4279 avr_asm_len ("sbiw r26,%o1", op
, plen
, 1);
4285 return avr_asm_len ("ldd %0,%1", op
, plen
, -1);
4288 return avr_asm_len ("ld %0,%1", op
, plen
, -1);
4292 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
4295 avr_out_movhi_r_mr_reg_no_disp_tiny (rtx_insn
*insn
, rtx op
[], int *plen
)
4299 rtx base
= XEXP (src
, 0);
4301 int reg_dest
= true_regnum (dest
);
4302 int reg_base
= true_regnum (base
);
4304 if (reg_dest
== reg_base
) /* R = (R) */
4305 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
4307 "mov %A0,__tmp_reg__", op
, plen
, -3);
4309 avr_asm_len ("ld %A0,%1+" CR_TAB
4310 "ld %B0,%1", op
, plen
, -2);
4312 if (!reg_unused_after (insn
, base
))
4313 avr_asm_len (TINY_SBIW (%E1
, %F1
, 1), op
, plen
, 2);
4319 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
4322 avr_out_movhi_r_mr_reg_disp_tiny (rtx_insn
*insn
, rtx op
[], int *plen
)
4326 rtx base
= XEXP (src
, 0);
4328 int reg_dest
= true_regnum (dest
);
4329 int reg_base
= true_regnum (XEXP (base
, 0));
4331 if (reg_base
== reg_dest
)
4333 return avr_asm_len (TINY_ADIW (%I1
, %J1
, %o1
) CR_TAB
4334 "ld __tmp_reg__,%b1+" CR_TAB
4336 "mov %A0,__tmp_reg__", op
, plen
, -5);
4340 avr_asm_len (TINY_ADIW (%I1
, %J1
, %o1
) CR_TAB
4341 "ld %A0,%b1+" CR_TAB
4342 "ld %B0,%b1", op
, plen
, -4);
4344 if (!reg_unused_after (insn
, XEXP (base
, 0)))
4345 avr_asm_len (TINY_SBIW (%I1
, %J1
, %o1
+1), op
, plen
, 2);
4352 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
4355 avr_out_movhi_r_mr_pre_dec_tiny (rtx_insn
*insn
, rtx op
[], int *plen
)
4357 int mem_volatile_p
= 0;
4360 rtx base
= XEXP (src
, 0);
4362 /* "volatile" forces reading low byte first, even if less efficient,
4363 for correct operation with 16-bit I/O registers. */
4364 mem_volatile_p
= MEM_VOLATILE_P (src
);
4366 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
4367 fatal_insn ("incorrect insn:", insn
);
4369 if (!mem_volatile_p
)
4370 return avr_asm_len ("ld %B0,%1" CR_TAB
4371 "ld %A0,%1", op
, plen
, -2);
4373 return avr_asm_len (TINY_SBIW (%I1
, %J1
, 2) CR_TAB
4374 "ld %A0,%p1+" CR_TAB
4376 TINY_SBIW (%I1
, %J1
, 1), op
, plen
, -6);
4381 out_movhi_r_mr (rtx_insn
*insn
, rtx op
[], int *plen
)
4385 rtx base
= XEXP (src
, 0);
4386 int reg_dest
= true_regnum (dest
);
4387 int reg_base
= true_regnum (base
);
4388 /* "volatile" forces reading low byte first, even if less efficient,
4389 for correct operation with 16-bit I/O registers. */
4390 int mem_volatile_p
= MEM_VOLATILE_P (src
);
4395 return avr_out_movhi_r_mr_reg_no_disp_tiny (insn
, op
, plen
);
4397 if (reg_dest
== reg_base
) /* R = (R) */
4398 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
4400 "mov %A0,__tmp_reg__", op
, plen
, -3);
4402 if (reg_base
!= REG_X
)
4403 return avr_asm_len ("ld %A0,%1" CR_TAB
4404 "ldd %B0,%1+1", op
, plen
, -2);
4406 avr_asm_len ("ld %A0,X+" CR_TAB
4407 "ld %B0,X", op
, plen
, -2);
4409 if (!reg_unused_after (insn
, base
))
4410 avr_asm_len ("sbiw r26,1", op
, plen
, 1);
4414 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
4416 int disp
= INTVAL (XEXP (base
, 1));
4417 int reg_base
= true_regnum (XEXP (base
, 0));
4420 return avr_out_movhi_r_mr_reg_disp_tiny (insn
, op
, plen
);
4422 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
4424 if (REGNO (XEXP (base
, 0)) != REG_Y
)
4425 fatal_insn ("incorrect insn:",insn
);
4427 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
))
4428 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
4429 "ldd %A0,Y+62" CR_TAB
4430 "ldd %B0,Y+63" CR_TAB
4431 "sbiw r28,%o1-62", op
, plen
, -4)
4433 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
4434 "sbci r29,hi8(-%o1)" CR_TAB
4436 "ldd %B0,Y+1" CR_TAB
4437 "subi r28,lo8(%o1)" CR_TAB
4438 "sbci r29,hi8(%o1)", op
, plen
, -6);
4441 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
4442 it but I have this situation with extremal
4443 optimization options. */
4445 if (reg_base
== REG_X
)
4447 if (reg_base
== reg_dest
)
4448 return avr_asm_len ("adiw r26,%o1" CR_TAB
4449 "ld __tmp_reg__,X+" CR_TAB
4451 "mov %A0,__tmp_reg__", op
, plen
, -4);
4453 avr_asm_len ("adiw r26,%o1" CR_TAB
4455 "ld %B0,X", op
, plen
, -3);
4457 if (!reg_unused_after (insn
, XEXP (base
, 0)))
4458 avr_asm_len ("sbiw r26,%o1+1", op
, plen
, 1);
4463 return reg_base
== reg_dest
4464 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
4465 "ldd %B0,%B1" CR_TAB
4466 "mov %A0,__tmp_reg__", op
, plen
, -3)
4468 : avr_asm_len ("ldd %A0,%A1" CR_TAB
4469 "ldd %B0,%B1", op
, plen
, -2);
4471 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4474 return avr_out_movhi_r_mr_pre_dec_tiny (insn
, op
, plen
);
4476 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
4477 fatal_insn ("incorrect insn:", insn
);
4479 if (!mem_volatile_p
)
4480 return avr_asm_len ("ld %B0,%1" CR_TAB
4481 "ld %A0,%1", op
, plen
, -2);
4483 return REGNO (XEXP (base
, 0)) == REG_X
4484 ? avr_asm_len ("sbiw r26,2" CR_TAB
4487 "sbiw r26,1", op
, plen
, -4)
4489 : avr_asm_len ("sbiw %r1,2" CR_TAB
4491 "ldd %B0,%p1+1", op
, plen
, -3);
4493 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4495 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
4496 fatal_insn ("incorrect insn:", insn
);
4498 return avr_asm_len ("ld %A0,%1" CR_TAB
4499 "ld %B0,%1", op
, plen
, -2);
4501 else if (CONSTANT_ADDRESS_P (base
))
4503 int n_words
= AVR_TINY
? 2 : 4;
4504 return io_address_operand (base
, HImode
)
4505 ? avr_asm_len ("in %A0,%i1" CR_TAB
4506 "in %B0,%i1+1", op
, plen
, -2)
4508 : avr_asm_len ("lds %A0,%m1" CR_TAB
4509 "lds %B0,%m1+1", op
, plen
, -n_words
);
4512 fatal_insn ("unknown move insn:",insn
);
4517 avr_out_movsi_r_mr_reg_no_disp_tiny (rtx_insn
*insn
, rtx op
[], int *l
)
4521 rtx base
= XEXP (src
, 0);
4522 int reg_dest
= true_regnum (dest
);
4523 int reg_base
= true_regnum (base
);
4525 if (reg_dest
== reg_base
)
4527 /* "ld r26,-X" is undefined */
4528 return *l
= 9, (TINY_ADIW (%E1
, %F1
, 3) CR_TAB
4531 "ld __tmp_reg__,-%1" CR_TAB
4532 TINY_SBIW (%E1
, %F1
, 1) CR_TAB
4534 "mov %B0,__tmp_reg__");
4536 else if (reg_dest
== reg_base
- 2)
4538 return *l
= 5, ("ld %A0,%1+" CR_TAB
4540 "ld __tmp_reg__,%1+" CR_TAB
4542 "mov %C0,__tmp_reg__");
4544 else if (reg_unused_after (insn
, base
))
4546 return *l
= 4, ("ld %A0,%1+" CR_TAB
4553 return *l
= 6, ("ld %A0,%1+" CR_TAB
4557 TINY_SBIW (%E1
, %F1
, 3));
4563 avr_out_movsi_r_mr_reg_disp_tiny (rtx_insn
*insn
, rtx op
[], int *l
)
4567 rtx base
= XEXP (src
, 0);
4568 int reg_dest
= true_regnum (dest
);
4569 int reg_base
= true_regnum (XEXP (base
, 0));
4571 if (reg_dest
== reg_base
)
4573 /* "ld r26,-X" is undefined */
4574 return *l
= 9, (TINY_ADIW (%I1
, %J1
, %o1
+3) CR_TAB
4576 "ld %C0,-%b1" CR_TAB
4577 "ld __tmp_reg__,-%b1" CR_TAB
4578 TINY_SBIW (%I1
, %J1
, 1) CR_TAB
4580 "mov %B0,__tmp_reg__");
4582 else if (reg_dest
== reg_base
- 2)
4584 return *l
= 7, (TINY_ADIW (%I1
, %J1
, %o1
) CR_TAB
4585 "ld %A0,%b1+" CR_TAB
4586 "ld %B0,%b1+" CR_TAB
4587 "ld __tmp_reg__,%b1+" CR_TAB
4589 "mov %C0,__tmp_reg__");
4591 else if (reg_unused_after (insn
, XEXP (base
, 0)))
4593 return *l
= 6, (TINY_ADIW (%I1
, %J1
, %o1
) CR_TAB
4594 "ld %A0,%b1+" CR_TAB
4595 "ld %B0,%b1+" CR_TAB
4596 "ld %C0,%b1+" CR_TAB
4601 return *l
= 8, (TINY_ADIW (%I1
, %J1
, %o1
) CR_TAB
4602 "ld %A0,%b1+" CR_TAB
4603 "ld %B0,%b1+" CR_TAB
4604 "ld %C0,%b1+" CR_TAB
4606 TINY_SBIW (%I1
, %J1
, %o1
+3));
4611 out_movsi_r_mr (rtx_insn
*insn
, rtx op
[], int *l
)
4615 rtx base
= XEXP (src
, 0);
4616 int reg_dest
= true_regnum (dest
);
4617 int reg_base
= true_regnum (base
);
4626 return avr_out_movsi_r_mr_reg_no_disp_tiny (insn
, op
, l
);
4628 if (reg_base
== REG_X
) /* (R26) */
4630 if (reg_dest
== REG_X
)
4631 /* "ld r26,-X" is undefined */
4632 return *l
=7, ("adiw r26,3" CR_TAB
4635 "ld __tmp_reg__,-X" CR_TAB
4638 "mov r27,__tmp_reg__");
4639 else if (reg_dest
== REG_X
- 2)
4640 return *l
=5, ("ld %A0,X+" CR_TAB
4642 "ld __tmp_reg__,X+" CR_TAB
4644 "mov %C0,__tmp_reg__");
4645 else if (reg_unused_after (insn
, base
))
4646 return *l
=4, ("ld %A0,X+" CR_TAB
4651 return *l
=5, ("ld %A0,X+" CR_TAB
4659 if (reg_dest
== reg_base
)
4660 return *l
=5, ("ldd %D0,%1+3" CR_TAB
4661 "ldd %C0,%1+2" CR_TAB
4662 "ldd __tmp_reg__,%1+1" CR_TAB
4664 "mov %B0,__tmp_reg__");
4665 else if (reg_base
== reg_dest
+ 2)
4666 return *l
=5, ("ld %A0,%1" CR_TAB
4667 "ldd %B0,%1+1" CR_TAB
4668 "ldd __tmp_reg__,%1+2" CR_TAB
4669 "ldd %D0,%1+3" CR_TAB
4670 "mov %C0,__tmp_reg__");
4672 return *l
=4, ("ld %A0,%1" CR_TAB
4673 "ldd %B0,%1+1" CR_TAB
4674 "ldd %C0,%1+2" CR_TAB
4678 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
4680 int disp
= INTVAL (XEXP (base
, 1));
4683 return avr_out_movsi_r_mr_reg_disp_tiny (insn
, op
, l
);
4685 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
4687 if (REGNO (XEXP (base
, 0)) != REG_Y
)
4688 fatal_insn ("incorrect insn:",insn
);
4690 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
4691 return *l
= 6, ("adiw r28,%o1-60" CR_TAB
4692 "ldd %A0,Y+60" CR_TAB
4693 "ldd %B0,Y+61" CR_TAB
4694 "ldd %C0,Y+62" CR_TAB
4695 "ldd %D0,Y+63" CR_TAB
4698 return *l
= 8, ("subi r28,lo8(-%o1)" CR_TAB
4699 "sbci r29,hi8(-%o1)" CR_TAB
4701 "ldd %B0,Y+1" CR_TAB
4702 "ldd %C0,Y+2" CR_TAB
4703 "ldd %D0,Y+3" CR_TAB
4704 "subi r28,lo8(%o1)" CR_TAB
4705 "sbci r29,hi8(%o1)");
4708 reg_base
= true_regnum (XEXP (base
, 0));
4709 if (reg_base
== REG_X
)
4712 if (reg_dest
== REG_X
)
4715 /* "ld r26,-X" is undefined */
4716 return ("adiw r26,%o1+3" CR_TAB
4719 "ld __tmp_reg__,-X" CR_TAB
4722 "mov r27,__tmp_reg__");
4725 if (reg_dest
== REG_X
- 2)
4726 return ("adiw r26,%o1" CR_TAB
4729 "ld __tmp_reg__,X+" CR_TAB
4731 "mov r26,__tmp_reg__");
4733 return ("adiw r26,%o1" CR_TAB
4740 if (reg_dest
== reg_base
)
4741 return *l
=5, ("ldd %D0,%D1" CR_TAB
4742 "ldd %C0,%C1" CR_TAB
4743 "ldd __tmp_reg__,%B1" CR_TAB
4744 "ldd %A0,%A1" CR_TAB
4745 "mov %B0,__tmp_reg__");
4746 else if (reg_dest
== reg_base
- 2)
4747 return *l
=5, ("ldd %A0,%A1" CR_TAB
4748 "ldd %B0,%B1" CR_TAB
4749 "ldd __tmp_reg__,%C1" CR_TAB
4750 "ldd %D0,%D1" CR_TAB
4751 "mov %C0,__tmp_reg__");
4752 return *l
=4, ("ldd %A0,%A1" CR_TAB
4753 "ldd %B0,%B1" CR_TAB
4754 "ldd %C0,%C1" CR_TAB
4757 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4758 return *l
=4, ("ld %D0,%1" CR_TAB
4762 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4763 return *l
=4, ("ld %A0,%1" CR_TAB
4767 else if (CONSTANT_ADDRESS_P (base
))
4769 if (io_address_operand (base
, SImode
))
4772 return ("in %A0,%i1" CR_TAB
4773 "in %B0,%i1+1" CR_TAB
4774 "in %C0,%i1+2" CR_TAB
4779 *l
= AVR_TINY
? 4 : 8;
4780 return ("lds %A0,%m1" CR_TAB
4781 "lds %B0,%m1+1" CR_TAB
4782 "lds %C0,%m1+2" CR_TAB
4787 fatal_insn ("unknown move insn:",insn
);
4792 avr_out_movsi_mr_r_reg_no_disp_tiny (rtx_insn
*insn
, rtx op
[], int *l
)
4796 rtx base
= XEXP (dest
, 0);
4797 int reg_base
= true_regnum (base
);
4798 int reg_src
= true_regnum (src
);
4800 if (reg_base
== reg_src
)
4802 /* "ld r26,-X" is undefined */
4803 if (reg_unused_after (insn
, base
))
4805 return *l
= 7, ("mov __tmp_reg__, %B1" CR_TAB
4807 TINY_ADIW (%E0
, %F0
, 1) CR_TAB
4808 "st %0+,__tmp_reg__" CR_TAB
4814 return *l
= 9, ("mov __tmp_reg__, %B1" CR_TAB
4816 TINY_ADIW (%E0
, %F0
, 1) CR_TAB
4817 "st %0+,__tmp_reg__" CR_TAB
4820 TINY_SBIW (%E0
, %F0
, 3));
4823 else if (reg_base
== reg_src
+ 2)
4825 if (reg_unused_after (insn
, base
))
4826 return *l
= 7, ("mov __zero_reg__,%C1" CR_TAB
4827 "mov __tmp_reg__,%D1" CR_TAB
4830 "st %0+,__zero_reg__" CR_TAB
4831 "st %0,__tmp_reg__" CR_TAB
4832 "clr __zero_reg__");
4834 return *l
= 9, ("mov __zero_reg__,%C1" CR_TAB
4835 "mov __tmp_reg__,%D1" CR_TAB
4838 "st %0+,__zero_reg__" CR_TAB
4839 "st %0,__tmp_reg__" CR_TAB
4840 "clr __zero_reg__" CR_TAB
4841 TINY_SBIW (%E0
, %F0
, 3));
4844 return *l
= 6, ("st %0+,%A1" CR_TAB
4848 TINY_SBIW (%E0
, %F0
, 3));
4852 avr_out_movsi_mr_r_reg_disp_tiny (rtx op
[], int *l
)
4856 rtx base
= XEXP (dest
, 0);
4857 int reg_base
= REGNO (XEXP (base
, 0));
4858 int reg_src
=true_regnum (src
);
4860 if (reg_base
== reg_src
)
4863 return ("mov __tmp_reg__,%A2" CR_TAB
4864 "mov __zero_reg__,%B2" CR_TAB
4865 TINY_ADIW (%I0
, %J0
, %o0
) CR_TAB
4866 "st %b0+,__tmp_reg__" CR_TAB
4867 "st %b0+,__zero_reg__" CR_TAB
4868 "st %b0+,%C2" CR_TAB
4870 "clr __zero_reg__" CR_TAB
4871 TINY_SBIW (%I0
, %J0
, %o0
+3));
4873 else if (reg_src
== reg_base
- 2)
4876 return ("mov __tmp_reg__,%C2" CR_TAB
4877 "mov __zero_reg__,%D2" CR_TAB
4878 TINY_ADIW (%I0
, %J0
, %o0
) CR_TAB
4879 "st %b0+,%A0" CR_TAB
4880 "st %b0+,%B0" CR_TAB
4881 "st %b0+,__tmp_reg__" CR_TAB
4882 "st %b0,__zero_reg__" CR_TAB
4883 "clr __zero_reg__" CR_TAB
4884 TINY_SBIW (%I0
, %J0
, %o0
+3));
4887 return (TINY_ADIW (%I0
, %J0
, %o0
) CR_TAB
4888 "st %b0+,%A1" CR_TAB
4889 "st %b0+,%B1" CR_TAB
4890 "st %b0+,%C1" CR_TAB
4892 TINY_SBIW (%I0
, %J0
, %o0
+3));
4896 out_movsi_mr_r (rtx_insn
*insn
, rtx op
[], int *l
)
4900 rtx base
= XEXP (dest
, 0);
4901 int reg_base
= true_regnum (base
);
4902 int reg_src
= true_regnum (src
);
4908 if (CONSTANT_ADDRESS_P (base
))
4910 if (io_address_operand (base
, SImode
))
4912 return *l
=4,("out %i0, %A1" CR_TAB
4913 "out %i0+1,%B1" CR_TAB
4914 "out %i0+2,%C1" CR_TAB
4919 *l
= AVR_TINY
? 4 : 8;
4920 return ("sts %m0,%A1" CR_TAB
4921 "sts %m0+1,%B1" CR_TAB
4922 "sts %m0+2,%C1" CR_TAB
4927 if (reg_base
> 0) /* (r) */
4930 return avr_out_movsi_mr_r_reg_no_disp_tiny (insn
, op
, l
);
4932 if (reg_base
== REG_X
) /* (R26) */
4934 if (reg_src
== REG_X
)
4936 /* "st X+,r26" is undefined */
4937 if (reg_unused_after (insn
, base
))
4938 return *l
=6, ("mov __tmp_reg__,r27" CR_TAB
4941 "st X+,__tmp_reg__" CR_TAB
4945 return *l
=7, ("mov __tmp_reg__,r27" CR_TAB
4948 "st X+,__tmp_reg__" CR_TAB
4953 else if (reg_base
== reg_src
+ 2)
4955 if (reg_unused_after (insn
, base
))
4956 return *l
=7, ("mov __zero_reg__,%C1" CR_TAB
4957 "mov __tmp_reg__,%D1" CR_TAB
4960 "st %0+,__zero_reg__" CR_TAB
4961 "st %0,__tmp_reg__" CR_TAB
4962 "clr __zero_reg__");
4964 return *l
=8, ("mov __zero_reg__,%C1" CR_TAB
4965 "mov __tmp_reg__,%D1" CR_TAB
4968 "st %0+,__zero_reg__" CR_TAB
4969 "st %0,__tmp_reg__" CR_TAB
4970 "clr __zero_reg__" CR_TAB
4973 return *l
=5, ("st %0+,%A1" CR_TAB
4980 return *l
=4, ("st %0,%A1" CR_TAB
4981 "std %0+1,%B1" CR_TAB
4982 "std %0+2,%C1" CR_TAB
4985 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
4987 int disp
= INTVAL (XEXP (base
, 1));
4990 return avr_out_movsi_mr_r_reg_disp_tiny (op
, l
);
4992 reg_base
= REGNO (XEXP (base
, 0));
4993 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
4995 if (reg_base
!= REG_Y
)
4996 fatal_insn ("incorrect insn:",insn
);
4998 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
4999 return *l
= 6, ("adiw r28,%o0-60" CR_TAB
5000 "std Y+60,%A1" CR_TAB
5001 "std Y+61,%B1" CR_TAB
5002 "std Y+62,%C1" CR_TAB
5003 "std Y+63,%D1" CR_TAB
5006 return *l
= 8, ("subi r28,lo8(-%o0)" CR_TAB
5007 "sbci r29,hi8(-%o0)" CR_TAB
5009 "std Y+1,%B1" CR_TAB
5010 "std Y+2,%C1" CR_TAB
5011 "std Y+3,%D1" CR_TAB
5012 "subi r28,lo8(%o0)" CR_TAB
5013 "sbci r29,hi8(%o0)");
5015 if (reg_base
== REG_X
)
5018 if (reg_src
== REG_X
)
5021 return ("mov __tmp_reg__,r26" CR_TAB
5022 "mov __zero_reg__,r27" CR_TAB
5023 "adiw r26,%o0" CR_TAB
5024 "st X+,__tmp_reg__" CR_TAB
5025 "st X+,__zero_reg__" CR_TAB
5028 "clr __zero_reg__" CR_TAB
5031 else if (reg_src
== REG_X
- 2)
5034 return ("mov __tmp_reg__,r26" CR_TAB
5035 "mov __zero_reg__,r27" CR_TAB
5036 "adiw r26,%o0" CR_TAB
5039 "st X+,__tmp_reg__" CR_TAB
5040 "st X,__zero_reg__" CR_TAB
5041 "clr __zero_reg__" CR_TAB
5045 return ("adiw r26,%o0" CR_TAB
5052 return *l
=4, ("std %A0,%A1" CR_TAB
5053 "std %B0,%B1" CR_TAB
5054 "std %C0,%C1" CR_TAB
5057 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
5058 return *l
=4, ("st %0,%D1" CR_TAB
5062 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
5063 return *l
=4, ("st %0,%A1" CR_TAB
5067 fatal_insn ("unknown move insn:",insn
);
5072 output_movsisf (rtx_insn
*insn
, rtx operands
[], int *l
)
5075 rtx dest
= operands
[0];
5076 rtx src
= operands
[1];
5079 if (avr_mem_flash_p (src
)
5080 || avr_mem_flash_p (dest
))
5082 return avr_out_lpm (insn
, operands
, real_l
);
5088 gcc_assert (GET_MODE_SIZE (GET_MODE (dest
)) == 4);
5092 if (REG_P (src
)) /* mov r,r */
5094 if (true_regnum (dest
) > true_regnum (src
))
5099 return ("movw %C0,%C1" CR_TAB
5103 return ("mov %D0,%D1" CR_TAB
5104 "mov %C0,%C1" CR_TAB
5105 "mov %B0,%B1" CR_TAB
5113 return ("movw %A0,%A1" CR_TAB
5117 return ("mov %A0,%A1" CR_TAB
5118 "mov %B0,%B1" CR_TAB
5119 "mov %C0,%C1" CR_TAB
5123 else if (CONSTANT_P (src
))
5125 return output_reload_insisf (operands
, NULL_RTX
, real_l
);
5127 else if (MEM_P (src
))
5128 return out_movsi_r_mr (insn
, operands
, real_l
); /* mov r,m */
5130 else if (MEM_P (dest
))
5134 if (src
== CONST0_RTX (GET_MODE (dest
)))
5135 operands
[1] = zero_reg_rtx
;
5137 templ
= out_movsi_mr_r (insn
, operands
, real_l
);
5140 output_asm_insn (templ
, operands
);
5145 fatal_insn ("invalid insn:", insn
);
5150 /* Handle loads of 24-bit types from memory to register. */
5153 avr_out_load_psi_reg_no_disp_tiny (rtx_insn
*insn
, rtx
*op
, int *plen
)
5157 rtx base
= XEXP (src
, 0);
5158 int reg_dest
= true_regnum (dest
);
5159 int reg_base
= true_regnum (base
);
5161 if (reg_base
== reg_dest
)
5163 return avr_asm_len (TINY_ADIW (%E1
, %F1
, 2) CR_TAB
5165 "ld __tmp_reg__,-%1" CR_TAB
5166 TINY_SBIW (%E1
, %F1
, 1) CR_TAB
5168 "mov %B0,__tmp_reg__", op
, plen
, -8);
5172 avr_asm_len ("ld %A0,%1+" CR_TAB
5174 "ld %C0,%1", op
, plen
, -3);
5176 if (reg_dest
!= reg_base
- 2
5177 && !reg_unused_after (insn
, base
))
5179 avr_asm_len (TINY_SBIW (%E1
, %F1
, 2), op
, plen
, 2);
5186 avr_out_load_psi_reg_disp_tiny (rtx_insn
*insn
, rtx
*op
, int *plen
)
5190 rtx base
= XEXP (src
, 0);
5191 int reg_dest
= true_regnum (dest
);
5192 int reg_base
= true_regnum (base
);
5194 reg_base
= true_regnum (XEXP (base
, 0));
5195 if (reg_base
== reg_dest
)
5197 return avr_asm_len (TINY_ADIW (%I1
, %J1
, %o1
+2) CR_TAB
5199 "ld __tmp_reg__,-%b1" CR_TAB
5200 TINY_SBIW (%I1
, %J1
, 1) CR_TAB
5202 "mov %B0,__tmp_reg__", op
, plen
, -8);
5206 avr_asm_len (TINY_ADIW (%I1
, %J1
, %o1
) CR_TAB
5207 "ld %A0,%b1+" CR_TAB
5208 "ld %B0,%b1+" CR_TAB
5209 "ld %C0,%b1", op
, plen
, -5);
5211 if (reg_dest
!= reg_base
- 2
5212 && !reg_unused_after (insn
, XEXP (base
, 0)))
5213 avr_asm_len (TINY_SBIW (%I1
, %J1
, %o1
+2), op
, plen
, 2);
5220 avr_out_load_psi (rtx_insn
*insn
, rtx
*op
, int *plen
)
5224 rtx base
= XEXP (src
, 0);
5225 int reg_dest
= true_regnum (dest
);
5226 int reg_base
= true_regnum (base
);
5231 return avr_out_load_psi_reg_no_disp_tiny (insn
, op
, plen
);
5233 if (reg_base
== REG_X
) /* (R26) */
5235 if (reg_dest
== REG_X
)
5236 /* "ld r26,-X" is undefined */
5237 return avr_asm_len ("adiw r26,2" CR_TAB
5239 "ld __tmp_reg__,-X" CR_TAB
5242 "mov r27,__tmp_reg__", op
, plen
, -6);
5245 avr_asm_len ("ld %A0,X+" CR_TAB
5247 "ld %C0,X", op
, plen
, -3);
5249 if (reg_dest
!= REG_X
- 2
5250 && !reg_unused_after (insn
, base
))
5252 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
5258 else /* reg_base != REG_X */
5260 if (reg_dest
== reg_base
)
5261 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
5262 "ldd __tmp_reg__,%1+1" CR_TAB
5264 "mov %B0,__tmp_reg__", op
, plen
, -4);
5266 return avr_asm_len ("ld %A0,%1" CR_TAB
5267 "ldd %B0,%1+1" CR_TAB
5268 "ldd %C0,%1+2", op
, plen
, -3);
5271 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
5273 int disp
= INTVAL (XEXP (base
, 1));
5276 return avr_out_load_psi_reg_disp_tiny (insn
, op
, plen
);
5278 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
5280 if (REGNO (XEXP (base
, 0)) != REG_Y
)
5281 fatal_insn ("incorrect insn:",insn
);
5283 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
5284 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
5285 "ldd %A0,Y+61" CR_TAB
5286 "ldd %B0,Y+62" CR_TAB
5287 "ldd %C0,Y+63" CR_TAB
5288 "sbiw r28,%o1-61", op
, plen
, -5);
5290 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
5291 "sbci r29,hi8(-%o1)" CR_TAB
5293 "ldd %B0,Y+1" CR_TAB
5294 "ldd %C0,Y+2" CR_TAB
5295 "subi r28,lo8(%o1)" CR_TAB
5296 "sbci r29,hi8(%o1)", op
, plen
, -7);
5299 reg_base
= true_regnum (XEXP (base
, 0));
5300 if (reg_base
== REG_X
)
5303 if (reg_dest
== REG_X
)
5305 /* "ld r26,-X" is undefined */
5306 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
5308 "ld __tmp_reg__,-X" CR_TAB
5311 "mov r27,__tmp_reg__", op
, plen
, -6);
5314 avr_asm_len ("adiw r26,%o1" CR_TAB
5317 "ld %C0,X", op
, plen
, -4);
5319 if (reg_dest
!= REG_W
5320 && !reg_unused_after (insn
, XEXP (base
, 0)))
5321 avr_asm_len ("sbiw r26,%o1+2", op
, plen
, 1);
5326 if (reg_dest
== reg_base
)
5327 return avr_asm_len ("ldd %C0,%C1" CR_TAB
5328 "ldd __tmp_reg__,%B1" CR_TAB
5329 "ldd %A0,%A1" CR_TAB
5330 "mov %B0,__tmp_reg__", op
, plen
, -4);
5332 return avr_asm_len ("ldd %A0,%A1" CR_TAB
5333 "ldd %B0,%B1" CR_TAB
5334 "ldd %C0,%C1", op
, plen
, -3);
5336 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
5337 return avr_asm_len ("ld %C0,%1" CR_TAB
5339 "ld %A0,%1", op
, plen
, -3);
5340 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
5341 return avr_asm_len ("ld %A0,%1" CR_TAB
5343 "ld %C0,%1", op
, plen
, -3);
5345 else if (CONSTANT_ADDRESS_P (base
))
5347 int n_words
= AVR_TINY
? 3 : 6;
5348 return avr_asm_len ("lds %A0,%m1" CR_TAB
5349 "lds %B0,%m1+1" CR_TAB
5350 "lds %C0,%m1+2", op
, plen
, -n_words
);
5353 fatal_insn ("unknown move insn:",insn
);
5359 avr_out_store_psi_reg_no_disp_tiny (rtx_insn
*insn
, rtx
*op
, int *plen
)
5363 rtx base
= XEXP (dest
, 0);
5364 int reg_base
= true_regnum (base
);
5365 int reg_src
= true_regnum (src
);
5367 if (reg_base
== reg_src
)
5369 avr_asm_len ("st %0,%A1" CR_TAB
5370 "mov __tmp_reg__,%B1" CR_TAB
5371 TINY_ADIW (%E0
, %F0
, 1) CR_TAB
/* st X+, r27 is undefined */
5372 "st %0+,__tmp_reg__" CR_TAB
5373 "st %0,%C1", op
, plen
, -6);
5376 else if (reg_src
== reg_base
- 2)
5378 avr_asm_len ("st %0,%A1" CR_TAB
5379 "mov __tmp_reg__,%C1" CR_TAB
5380 TINY_ADIW (%E0
, %F0
, 1) CR_TAB
5382 "st %0,__tmp_reg__", op
, plen
, 6);
5386 avr_asm_len ("st %0+,%A1" CR_TAB
5388 "st %0,%C1", op
, plen
, -3);
5391 if (!reg_unused_after (insn
, base
))
5392 avr_asm_len (TINY_SBIW (%E0
, %F0
, 2), op
, plen
, 2);
5398 avr_out_store_psi_reg_disp_tiny (rtx_insn
*insn
, rtx
*op
, int *plen
)
5402 rtx base
= XEXP (dest
, 0);
5403 int reg_base
= REGNO (XEXP (base
, 0));
5404 int reg_src
= true_regnum (src
);
5406 if (reg_src
== reg_base
)
5407 avr_asm_len ("mov __tmp_reg__,%A1" CR_TAB
5408 "mov __zero_reg__,%B1" CR_TAB
5409 TINY_ADIW (%I0
, %J0
, %o0
) CR_TAB
5410 "st %b0+,__tmp_reg__" CR_TAB
5411 "st %b0+,__zero_reg__" CR_TAB
5413 "clr __zero_reg__", op
, plen
, -8);
5414 else if (reg_src
== reg_base
- 2)
5415 avr_asm_len ("mov __tmp_reg__,%C1" CR_TAB
5416 TINY_ADIW (%I0
, %J0
, %o0
) CR_TAB
5417 "st %b0+,%A1" CR_TAB
5418 "st %b0+,%B1" CR_TAB
5419 "st %b0,__tmp_reg__", op
, plen
, -6);
5421 avr_asm_len (TINY_ADIW (%I0
, %J0
, %o0
) CR_TAB
5422 "st %b0+,%A1" CR_TAB
5423 "st %b0+,%B1" CR_TAB
5424 "st %b0,%C1", op
, plen
, -5);
5426 if (!reg_unused_after (insn
, XEXP (base
, 0)))
5427 avr_asm_len (TINY_SBIW (%I0
, %J0
, %o0
+2), op
, plen
, 2);
5432 /* Handle store of 24-bit type from register or zero to memory. */
5435 avr_out_store_psi (rtx_insn
*insn
, rtx
*op
, int *plen
)
5439 rtx base
= XEXP (dest
, 0);
5440 int reg_base
= true_regnum (base
);
5442 if (CONSTANT_ADDRESS_P (base
))
5444 int n_words
= AVR_TINY
? 3 : 6;
5445 return avr_asm_len ("sts %m0,%A1" CR_TAB
5446 "sts %m0+1,%B1" CR_TAB
5447 "sts %m0+2,%C1", op
, plen
, -n_words
);
5450 if (reg_base
> 0) /* (r) */
5453 return avr_out_store_psi_reg_no_disp_tiny (insn
, op
, plen
);
5455 if (reg_base
== REG_X
) /* (R26) */
5457 gcc_assert (!reg_overlap_mentioned_p (base
, src
));
5459 avr_asm_len ("st %0+,%A1" CR_TAB
5461 "st %0,%C1", op
, plen
, -3);
5463 if (!reg_unused_after (insn
, base
))
5464 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
5469 return avr_asm_len ("st %0,%A1" CR_TAB
5470 "std %0+1,%B1" CR_TAB
5471 "std %0+2,%C1", op
, plen
, -3);
5473 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
5475 int disp
= INTVAL (XEXP (base
, 1));
5478 return avr_out_store_psi_reg_disp_tiny (insn
, op
, plen
);
5480 reg_base
= REGNO (XEXP (base
, 0));
5482 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
5484 if (reg_base
!= REG_Y
)
5485 fatal_insn ("incorrect insn:",insn
);
5487 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
5488 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
5489 "std Y+61,%A1" CR_TAB
5490 "std Y+62,%B1" CR_TAB
5491 "std Y+63,%C1" CR_TAB
5492 "sbiw r28,%o0-61", op
, plen
, -5);
5494 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
5495 "sbci r29,hi8(-%o0)" CR_TAB
5497 "std Y+1,%B1" CR_TAB
5498 "std Y+2,%C1" CR_TAB
5499 "subi r28,lo8(%o0)" CR_TAB
5500 "sbci r29,hi8(%o0)", op
, plen
, -7);
5502 if (reg_base
== REG_X
)
5505 gcc_assert (!reg_overlap_mentioned_p (XEXP (base
, 0), src
));
5507 avr_asm_len ("adiw r26,%o0" CR_TAB
5510 "st X,%C1", op
, plen
, -4);
5512 if (!reg_unused_after (insn
, XEXP (base
, 0)))
5513 avr_asm_len ("sbiw r26,%o0+2", op
, plen
, 1);
5518 return avr_asm_len ("std %A0,%A1" CR_TAB
5519 "std %B0,%B1" CR_TAB
5520 "std %C0,%C1", op
, plen
, -3);
5522 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
5523 return avr_asm_len ("st %0,%C1" CR_TAB
5525 "st %0,%A1", op
, plen
, -3);
5526 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
5527 return avr_asm_len ("st %0,%A1" CR_TAB
5529 "st %0,%C1", op
, plen
, -3);
5531 fatal_insn ("unknown move insn:",insn
);
5536 /* Move around 24-bit stuff. */
5539 avr_out_movpsi (rtx_insn
*insn
, rtx
*op
, int *plen
)
5544 if (avr_mem_flash_p (src
)
5545 || avr_mem_flash_p (dest
))
5547 return avr_out_lpm (insn
, op
, plen
);
5550 if (register_operand (dest
, VOIDmode
))
5552 if (register_operand (src
, VOIDmode
)) /* mov r,r */
5554 if (true_regnum (dest
) > true_regnum (src
))
5556 avr_asm_len ("mov %C0,%C1", op
, plen
, -1);
5559 return avr_asm_len ("movw %A0,%A1", op
, plen
, 1);
5561 return avr_asm_len ("mov %B0,%B1" CR_TAB
5562 "mov %A0,%A1", op
, plen
, 2);
5567 avr_asm_len ("movw %A0,%A1", op
, plen
, -1);
5569 avr_asm_len ("mov %A0,%A1" CR_TAB
5570 "mov %B0,%B1", op
, plen
, -2);
5572 return avr_asm_len ("mov %C0,%C1", op
, plen
, 1);
5575 else if (CONSTANT_P (src
))
5577 return avr_out_reload_inpsi (op
, NULL_RTX
, plen
);
5579 else if (MEM_P (src
))
5580 return avr_out_load_psi (insn
, op
, plen
); /* mov r,m */
5582 else if (MEM_P (dest
))
5587 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
5589 return avr_out_store_psi (insn
, xop
, plen
);
5592 fatal_insn ("invalid insn:", insn
);
5597 avr_out_movqi_mr_r_reg_disp_tiny (rtx_insn
*insn
, rtx op
[], int *plen
)
5601 rtx x
= XEXP (dest
, 0);
5603 if (reg_overlap_mentioned_p (src
, XEXP (x
, 0)))
5605 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
5606 TINY_ADIW (%I0
, %J0
, %o0
) CR_TAB
5607 "st %b0,__tmp_reg__", op
, plen
, -4);
5611 avr_asm_len (TINY_ADIW (%I0
, %J0
, %o0
) CR_TAB
5612 "st %b0,%1", op
, plen
, -3);
5615 if (!reg_unused_after (insn
, XEXP (x
, 0)))
5616 avr_asm_len (TINY_SBIW (%I0
, %J0
, %o0
), op
, plen
, 2);
5622 out_movqi_mr_r (rtx_insn
*insn
, rtx op
[], int *plen
)
5626 rtx x
= XEXP (dest
, 0);
5628 if (CONSTANT_ADDRESS_P (x
))
5630 int n_words
= AVR_TINY
? 1 : 2;
5631 return io_address_operand (x
, QImode
)
5632 ? avr_asm_len ("out %i0,%1", op
, plen
, -1)
5633 : avr_asm_len ("sts %m0,%1", op
, plen
, -n_words
);
5635 else if (GET_CODE (x
) == PLUS
5636 && REG_P (XEXP (x
, 0))
5637 && CONST_INT_P (XEXP (x
, 1)))
5639 /* memory access by reg+disp */
5641 int disp
= INTVAL (XEXP (x
, 1));
5644 return avr_out_movqi_mr_r_reg_disp_tiny (insn
, op
, plen
);
5646 if (disp
- GET_MODE_SIZE (GET_MODE (dest
)) >= 63)
5648 if (REGNO (XEXP (x
, 0)) != REG_Y
)
5649 fatal_insn ("incorrect insn:",insn
);
5651 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
5652 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
5653 "std Y+63,%1" CR_TAB
5654 "sbiw r28,%o0-63", op
, plen
, -3);
5656 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
5657 "sbci r29,hi8(-%o0)" CR_TAB
5659 "subi r28,lo8(%o0)" CR_TAB
5660 "sbci r29,hi8(%o0)", op
, plen
, -5);
5662 else if (REGNO (XEXP (x
, 0)) == REG_X
)
5664 if (reg_overlap_mentioned_p (src
, XEXP (x
, 0)))
5666 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
5667 "adiw r26,%o0" CR_TAB
5668 "st X,__tmp_reg__", op
, plen
, -3);
5672 avr_asm_len ("adiw r26,%o0" CR_TAB
5673 "st X,%1", op
, plen
, -2);
5676 if (!reg_unused_after (insn
, XEXP (x
, 0)))
5677 avr_asm_len ("sbiw r26,%o0", op
, plen
, 1);
5682 return avr_asm_len ("std %0,%1", op
, plen
, -1);
5685 return avr_asm_len ("st %0,%1", op
, plen
, -1);
5689 /* Helper for the next function for XMEGA. It does the same
5690 but with low byte first. */
5693 avr_out_movhi_mr_r_xmega (rtx_insn
*insn
, rtx op
[], int *plen
)
5697 rtx base
= XEXP (dest
, 0);
5698 int reg_base
= true_regnum (base
);
5699 int reg_src
= true_regnum (src
);
5701 /* "volatile" forces writing low byte first, even if less efficient,
5702 for correct operation with 16-bit I/O registers like SP. */
5703 int mem_volatile_p
= MEM_VOLATILE_P (dest
);
5705 if (CONSTANT_ADDRESS_P (base
))
5707 return io_address_operand (base
, HImode
)
5708 ? avr_asm_len ("out %i0,%A1" CR_TAB
5709 "out %i0+1,%B1", op
, plen
, -2)
5711 : avr_asm_len ("sts %m0,%A1" CR_TAB
5712 "sts %m0+1,%B1", op
, plen
, -4);
5717 if (reg_base
!= REG_X
)
5718 return avr_asm_len ("st %0,%A1" CR_TAB
5719 "std %0+1,%B1", op
, plen
, -2);
5721 if (reg_src
== REG_X
)
5722 /* "st X+,r26" and "st -X,r26" are undefined. */
5723 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
5726 "st X,__tmp_reg__", op
, plen
, -4);
5728 avr_asm_len ("st X+,%A1" CR_TAB
5729 "st X,%B1", op
, plen
, -2);
5731 return reg_unused_after (insn
, base
)
5733 : avr_asm_len ("sbiw r26,1", op
, plen
, 1);
5735 else if (GET_CODE (base
) == PLUS
)
5737 int disp
= INTVAL (XEXP (base
, 1));
5738 reg_base
= REGNO (XEXP (base
, 0));
5739 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
5741 if (reg_base
!= REG_Y
)
5742 fatal_insn ("incorrect insn:",insn
);
5744 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
5745 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
5746 "std Y+62,%A1" CR_TAB
5747 "std Y+63,%B1" CR_TAB
5748 "sbiw r28,%o0-62", op
, plen
, -4)
5750 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
5751 "sbci r29,hi8(-%o0)" CR_TAB
5753 "std Y+1,%B1" CR_TAB
5754 "subi r28,lo8(%o0)" CR_TAB
5755 "sbci r29,hi8(%o0)", op
, plen
, -6);
5758 if (reg_base
!= REG_X
)
5759 return avr_asm_len ("std %A0,%A1" CR_TAB
5760 "std %B0,%B1", op
, plen
, -2);
5762 return reg_src
== REG_X
5763 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
5764 "mov __zero_reg__,r27" CR_TAB
5765 "adiw r26,%o0" CR_TAB
5766 "st X+,__tmp_reg__" CR_TAB
5767 "st X,__zero_reg__" CR_TAB
5768 "clr __zero_reg__" CR_TAB
5769 "sbiw r26,%o0+1", op
, plen
, -7)
5771 : avr_asm_len ("adiw r26,%o0" CR_TAB
5774 "sbiw r26,%o0+1", op
, plen
, -4);
5776 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
5778 if (!mem_volatile_p
)
5779 return avr_asm_len ("st %0,%B1" CR_TAB
5780 "st %0,%A1", op
, plen
, -2);
5782 return REGNO (XEXP (base
, 0)) == REG_X
5783 ? avr_asm_len ("sbiw r26,2" CR_TAB
5786 "sbiw r26,1", op
, plen
, -4)
5788 : avr_asm_len ("sbiw %r0,2" CR_TAB
5790 "std %p0+1,%B1", op
, plen
, -3);
5792 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
5794 return avr_asm_len ("st %0,%A1" CR_TAB
5795 "st %0,%B1", op
, plen
, -2);
5798 fatal_insn ("unknown move insn:",insn
);
5803 avr_out_movhi_mr_r_reg_no_disp_tiny (rtx_insn
*insn
, rtx op
[], int *plen
)
5807 rtx base
= XEXP (dest
, 0);
5808 int reg_base
= true_regnum (base
);
5809 int reg_src
= true_regnum (src
);
5810 int mem_volatile_p
= MEM_VOLATILE_P (dest
);
5812 if (reg_base
== reg_src
)
5814 return !mem_volatile_p
&& reg_unused_after (insn
, src
)
5815 ? avr_asm_len ("mov __tmp_reg__,%B1" CR_TAB
5817 TINY_ADIW (%E0
, %F0
, 1) CR_TAB
5818 "st %0,__tmp_reg__", op
, plen
, -5)
5819 : avr_asm_len ("mov __tmp_reg__,%B1" CR_TAB
5820 TINY_ADIW (%E0
, %F0
, 1) CR_TAB
5821 "st %0,__tmp_reg__" CR_TAB
5822 TINY_SBIW (%E0
, %F0
, 1) CR_TAB
5823 "st %0, %A1", op
, plen
, -7);
5826 return !mem_volatile_p
&& reg_unused_after (insn
, base
)
5827 ? avr_asm_len ("st %0+,%A1" CR_TAB
5828 "st %0,%B1", op
, plen
, -2)
5829 : avr_asm_len (TINY_ADIW (%E0
, %F0
, 1) CR_TAB
5831 "st -%0,%A1", op
, plen
, -4);
5835 avr_out_movhi_mr_r_reg_disp_tiny (rtx_insn
*insn
, rtx op
[], int *plen
)
5839 rtx base
= XEXP (dest
, 0);
5840 int reg_base
= REGNO (XEXP (base
, 0));
5841 int reg_src
= true_regnum (src
);
5843 if (reg_src
== reg_base
)
5844 avr_asm_len ("mov __tmp_reg__,%A1" CR_TAB
5845 "mov __zero_reg__,%B1" CR_TAB
5846 TINY_ADIW (%I0
, %J0
, %o0
+1) CR_TAB
5847 "st %b0,__zero_reg__" CR_TAB
5848 "st -%b0,__tmp_reg__" CR_TAB
5849 "clr __zero_reg__", op
, plen
, -7);
5851 avr_asm_len (TINY_ADIW (%I0
, %J0
, %o0
+1) CR_TAB
5853 "st -%b0,%A1", op
, plen
, -4);
5855 if (!reg_unused_after (insn
, XEXP (base
, 0)))
5856 avr_asm_len (TINY_SBIW (%I0
, %J0
, %o0
), op
, plen
, 2);
5862 avr_out_movhi_mr_r_post_inc_tiny (rtx op
[], int *plen
)
5864 return avr_asm_len (TINY_ADIW (%I0
, %J0
, 1) CR_TAB
5866 "st -%p0,%A1" CR_TAB
5867 TINY_ADIW (%I0
, %J0
, 2), op
, plen
, -6);
5871 out_movhi_mr_r (rtx_insn
*insn
, rtx op
[], int *plen
)
5875 rtx base
= XEXP (dest
, 0);
5876 int reg_base
= true_regnum (base
);
5877 int reg_src
= true_regnum (src
);
5880 /* "volatile" forces writing high-byte first (no-xmega) resp.
5881 low-byte first (xmega) even if less efficient, for correct
5882 operation with 16-bit I/O registers like. */
5885 return avr_out_movhi_mr_r_xmega (insn
, op
, plen
);
5887 mem_volatile_p
= MEM_VOLATILE_P (dest
);
5889 if (CONSTANT_ADDRESS_P (base
))
5891 int n_words
= AVR_TINY
? 2 : 4;
5892 return io_address_operand (base
, HImode
)
5893 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
5894 "out %i0,%A1", op
, plen
, -2)
5896 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
5897 "sts %m0,%A1", op
, plen
, -n_words
);
5903 return avr_out_movhi_mr_r_reg_no_disp_tiny (insn
, op
, plen
);
5905 if (reg_base
!= REG_X
)
5906 return avr_asm_len ("std %0+1,%B1" CR_TAB
5907 "st %0,%A1", op
, plen
, -2);
5909 if (reg_src
== REG_X
)
5910 /* "st X+,r26" and "st -X,r26" are undefined. */
5911 return !mem_volatile_p
&& reg_unused_after (insn
, src
)
5912 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
5915 "st X,__tmp_reg__", op
, plen
, -4)
5917 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
5919 "st X,__tmp_reg__" CR_TAB
5921 "st X,r26", op
, plen
, -5);
5923 return !mem_volatile_p
&& reg_unused_after (insn
, base
)
5924 ? avr_asm_len ("st X+,%A1" CR_TAB
5925 "st X,%B1", op
, plen
, -2)
5926 : avr_asm_len ("adiw r26,1" CR_TAB
5928 "st -X,%A1", op
, plen
, -3);
5930 else if (GET_CODE (base
) == PLUS
)
5932 int disp
= INTVAL (XEXP (base
, 1));
5935 return avr_out_movhi_mr_r_reg_disp_tiny (insn
, op
, plen
);
5937 reg_base
= REGNO (XEXP (base
, 0));
5938 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
5940 if (reg_base
!= REG_Y
)
5941 fatal_insn ("incorrect insn:",insn
);
5943 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
5944 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
5945 "std Y+63,%B1" CR_TAB
5946 "std Y+62,%A1" CR_TAB
5947 "sbiw r28,%o0-62", op
, plen
, -4)
5949 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
5950 "sbci r29,hi8(-%o0)" CR_TAB
5951 "std Y+1,%B1" CR_TAB
5953 "subi r28,lo8(%o0)" CR_TAB
5954 "sbci r29,hi8(%o0)", op
, plen
, -6);
5957 if (reg_base
!= REG_X
)
5958 return avr_asm_len ("std %B0,%B1" CR_TAB
5959 "std %A0,%A1", op
, plen
, -2);
5961 return reg_src
== REG_X
5962 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
5963 "mov __zero_reg__,r27" CR_TAB
5964 "adiw r26,%o0+1" CR_TAB
5965 "st X,__zero_reg__" CR_TAB
5966 "st -X,__tmp_reg__" CR_TAB
5967 "clr __zero_reg__" CR_TAB
5968 "sbiw r26,%o0", op
, plen
, -7)
5970 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
5973 "sbiw r26,%o0", op
, plen
, -4);
5975 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
5977 return avr_asm_len ("st %0,%B1" CR_TAB
5978 "st %0,%A1", op
, plen
, -2);
5980 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
5982 if (!mem_volatile_p
)
5983 return avr_asm_len ("st %0,%A1" CR_TAB
5984 "st %0,%B1", op
, plen
, -2);
5987 return avr_out_movhi_mr_r_post_inc_tiny (op
, plen
);
5989 return REGNO (XEXP (base
, 0)) == REG_X
5990 ? avr_asm_len ("adiw r26,1" CR_TAB
5993 "adiw r26,2", op
, plen
, -4)
5995 : avr_asm_len ("std %p0+1,%B1" CR_TAB
5997 "adiw %r0,2", op
, plen
, -3);
5999 fatal_insn ("unknown move insn:",insn
);
6003 /* Return 1 if frame pointer for current function required. */
6006 avr_frame_pointer_required_p (void)
6008 return (cfun
->calls_alloca
6009 || cfun
->calls_setjmp
6010 || cfun
->has_nonlocal_label
6011 || crtl
->args
.info
.nregs
== 0
6012 || get_frame_size () > 0);
6016 /* Returns the condition of the branch following INSN, where INSN is some
6017 comparison. If the next insn is not a branch or the condition code set
6018 by INSN might be used by more insns than the next one, return UNKNOWN.
6019 For now, just look at the next insn, which misses some opportunities like
6023 compare_condition (rtx_insn
*insn
)
6026 rtx_insn
*next
= next_real_nondebug_insn (insn
);
6030 // If SREG does not die in the next insn, it is used in more than one
6031 // branch. This can happen due to pass .avr-ifelse optimizations.
6032 && dead_or_set_regno_p (next
, REG_CC
)
6033 // Branches are (set (pc) (if_then_else (COND (...)))).
6034 && (set
= single_set (next
))
6035 && GET_CODE (SET_SRC (set
)) == IF_THEN_ELSE
)
6037 return GET_CODE (XEXP (SET_SRC (set
), 0));
6044 /* Returns true if INSN is a tst insn that only tests the sign. */
6047 compare_sign_p (rtx_insn
*insn
)
6049 RTX_CODE cond
= compare_condition (insn
);
6050 return (cond
== GE
|| cond
== LT
);
6054 /* Returns true if INSN is a compare insn with the EQ or NE condition. */
6057 compare_eq_p (rtx_insn
*insn
)
6059 RTX_CODE cond
= compare_condition (insn
);
6060 return (cond
== EQ
|| cond
== NE
);
6064 /* Implement `TARGET_CANONICALIZE_COMPARISON'. */
6065 /* Basically tries to convert "difficult" comparisons like GT[U]
6066 and LE[U] to simple ones. Some asymmetric comparisons can be
6067 transformed to EQ or NE against zero. */
6070 avr_canonicalize_comparison (int *icode
, rtx
*op0
, rtx
*op1
, bool op0_fixed
)
6072 enum rtx_code code
= (enum rtx_code
) *icode
;
6073 machine_mode mode
= GET_MODE (*op0
);
6075 bool signed_p
= code
== GT
|| code
== LE
;
6076 bool unsigned_p
= code
== GTU
|| code
== LEU
;
6077 bool difficult_p
= signed_p
|| unsigned_p
;
6079 if (// Only do integers and fixed-points.
6080 (! SCALAR_INT_MODE_P (mode
)
6081 && ! ALL_SCALAR_FIXED_POINT_MODE_P (mode
))
6082 // Only do comparisons against a register.
6083 || ! register_operand (*op0
, mode
))
6086 // Canonicalize "difficult" reg-reg comparisons.
6090 && register_operand (*op1
, mode
))
6092 std::swap (*op0
, *op1
);
6093 *icode
= (int) swap_condition (code
);
6097 // Canonicalize comparisons against compile-time constants.
6099 if (CONST_INT_P (*op1
)
6100 || CONST_FIXED_P (*op1
))
6102 // INT_MODE of the same size.
6103 scalar_int_mode imode
= int_mode_for_mode (mode
).require ();
6105 unsigned HOST_WIDE_INT mask
= GET_MODE_MASK (imode
);
6106 unsigned HOST_WIDE_INT maxval
= signed_p
? mask
>> 1 : mask
;
6108 // Convert value *op1 to imode.
6109 rtx xval
= simplify_gen_subreg (imode
, *op1
, mode
, 0);
6111 // Canonicalize difficult comparisons against const.
6113 && (UINTVAL (xval
) & mask
) != maxval
)
6115 // Convert *op0 > *op1 to *op0 >= 1 + *op1.
6116 // Convert *op0 <= *op1 to *op0 < 1 + *op1.
6117 xval
= simplify_binary_operation (PLUS
, imode
, xval
, const1_rtx
);
6119 // Convert value back to its original mode.
6120 *op1
= simplify_gen_subreg (mode
, xval
, imode
, 0);
6122 // Map > to >= and <= to <.
6123 *icode
= (int) avr_normalize_condition (code
);
6128 // Some asymmetric comparisons can be turned into EQ or NE.
6129 if (code
== LTU
&& xval
== const1_rtx
)
6132 *op1
= CONST0_RTX (mode
);
6136 if (code
== GEU
&& xval
== const1_rtx
)
6139 *op1
= CONST0_RTX (mode
);
6146 /* Output compare instruction
6148 compare (XOP[0], XOP[1])
6150 for a register XOP[0] and a compile-time constant XOP[1]. Return "".
6151 XOP[2] is an 8-bit scratch register as needed.
6153 PLEN == NULL: Output instructions.
6154 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
6155 Don't output anything. */
6158 avr_out_compare (rtx_insn
*insn
, rtx
*xop
, int *plen
)
6160 /* Register to compare and value to compare against. */
6164 /* MODE of the comparison. */
6167 /* Number of bytes to operate on. */
6168 int n_bytes
= GET_MODE_SIZE (GET_MODE (xreg
));
6170 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
6171 int clobber_val
= -1;
6173 /* Map fixed mode operands to integer operands with the same binary
6174 representation. They are easier to handle in the remainder. */
6176 if (CONST_FIXED_P (xval
))
6178 xreg
= avr_to_int_mode (xop
[0]);
6179 xval
= avr_to_int_mode (xop
[1]);
6182 mode
= GET_MODE (xreg
);
6184 gcc_assert (REG_P (xreg
));
6185 gcc_assert ((CONST_INT_P (xval
) && n_bytes
<= 4)
6186 || (const_double_operand (xval
, VOIDmode
) && n_bytes
== 8));
6191 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
6192 against 0 by ORing the bytes. This is one instruction shorter.
6193 Notice that 64-bit comparisons are always against reg:ALL8 18 (ACC_A)
6194 and therefore don't use this. */
6196 if (!test_hard_reg_class (LD_REGS
, xreg
)
6197 && compare_eq_p (insn
)
6198 && reg_unused_after (insn
, xreg
))
6200 if (xval
== const1_rtx
)
6202 avr_asm_len ("dec %A0" CR_TAB
6203 "or %A0,%B0", xop
, plen
, 2);
6206 avr_asm_len ("or %A0,%C0", xop
, plen
, 1);
6209 avr_asm_len ("or %A0,%D0", xop
, plen
, 1);
6213 else if (xval
== constm1_rtx
)
6216 avr_asm_len ("and %A0,%D0", xop
, plen
, 1);
6219 avr_asm_len ("and %A0,%C0", xop
, plen
, 1);
6221 return avr_asm_len ("and %A0,%B0" CR_TAB
6222 "com %A0", xop
, plen
, 2);
6226 /* Comparisons == -1 and != -1 of a d-register that's used after the
6227 comparison. (If it's unused after we use CPI / SBCI or ADIW sequence
6228 from below.) Instead of CPI Rlo,-1 / LDI Rx,-1 / CPC Rhi,Rx we can
6229 use CPI Rlo,-1 / CPC Rhi,Rlo which is 1 instruction shorter:
6230 If CPI is true then Rlo contains -1 and we can use Rlo instead of Rx
6231 when CPC'ing the high part. If CPI is false then CPC cannot render
6232 the result to true. This also works for the more generic case where
6233 the constant is of the form 0xabab. */
6236 && xval
!= const0_rtx
6237 && test_hard_reg_class (LD_REGS
, xreg
)
6238 && compare_eq_p (insn
)
6239 && !reg_unused_after (insn
, xreg
))
6241 rtx xlo8
= simplify_gen_subreg (QImode
, xval
, mode
, 0);
6242 rtx xhi8
= simplify_gen_subreg (QImode
, xval
, mode
, 1);
6244 if (INTVAL (xlo8
) == INTVAL (xhi8
))
6249 return avr_asm_len ("cpi %A0,%1" CR_TAB
6250 "cpc %B0,%A0", xop
, plen
, 2);
6254 for (int i
= 0; i
< n_bytes
; i
++)
6256 /* We compare byte-wise. */
6257 rtx reg8
= simplify_gen_subreg (QImode
, xreg
, mode
, i
);
6258 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
6260 /* 8-bit value to compare with this byte. */
6261 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
6263 /* Registers R16..R31 can operate with immediate. */
6264 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
6267 xop
[1] = gen_int_mode (val8
, QImode
);
6269 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
6272 && test_hard_reg_class (ADDW_REGS
, reg8
))
6274 int val16
= trunc_int_for_mode (INTVAL (xval
), HImode
);
6276 if (IN_RANGE (val16
, 0, 63)
6278 || reg_unused_after (insn
, xreg
)))
6281 avr_asm_len (TINY_SBIW (%A0
, %B0
, %1), xop
, plen
, 2);
6283 avr_asm_len ("sbiw %0,%1", xop
, plen
, 1);
6290 && IN_RANGE (val16
, -63, -1)
6291 && compare_eq_p (insn
)
6292 && reg_unused_after (insn
, xreg
))
6295 ? avr_asm_len (TINY_ADIW (%A0
, %B0
, %n1
), xop
, plen
, 2)
6296 : avr_asm_len ("adiw %0,%n1", xop
, plen
, 1);
6300 /* Comparing against 0 is easy. */
6305 ? "cp %0,__zero_reg__"
6306 : "cpc %0,__zero_reg__", xop
, plen
, 1);
6310 /* Upper registers can compare and subtract-with-carry immediates.
6311 Notice that compare instructions do the same as respective subtract
6312 instruction; the only difference is that comparisons don't write
6313 the result back to the target register. */
6319 avr_asm_len ("cpi %0,%1", xop
, plen
, 1);
6322 else if (reg_unused_after (insn
, xreg
))
6324 avr_asm_len ("sbci %0,%1", xop
, plen
, 1);
6329 /* Must load the value into the scratch register. */
6331 gcc_assert (REG_P (xop
[2]));
6333 if (clobber_val
!= (int) val8
)
6334 avr_asm_len ("ldi %2,%1", xop
, plen
, 1);
6335 clobber_val
= (int) val8
;
6339 : "cpc %0,%2", xop
, plen
, 1);
6346 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
6349 avr_out_compare64 (rtx_insn
*insn
, rtx
*op
, int *plen
)
6353 xop
[0] = gen_rtx_REG (DImode
, 18);
6357 return avr_out_compare (insn
, xop
, plen
);
6360 /* Output test instruction for HImode. */
6363 avr_out_tsthi (rtx_insn
*insn
, rtx
*op
, int *plen
)
6365 if (compare_sign_p (insn
))
6367 avr_asm_len ("tst %B0", op
, plen
, -1);
6369 else if (reg_unused_after (insn
, op
[0])
6370 && compare_eq_p (insn
))
6372 /* Faster than sbiw if we can clobber the operand. */
6373 avr_asm_len ("or %A0,%B0", op
, plen
, -1);
6377 avr_out_compare (insn
, op
, plen
);
6384 /* Output test instruction for PSImode. */
6387 avr_out_tstpsi (rtx_insn
*insn
, rtx
*op
, int *plen
)
6389 if (compare_sign_p (insn
))
6391 avr_asm_len ("tst %C0", op
, plen
, -1);
6393 else if (reg_unused_after (insn
, op
[0])
6394 && compare_eq_p (insn
))
6396 /* Faster than sbiw if we can clobber the operand. */
6397 avr_asm_len ("or %A0,%B0" CR_TAB
6398 "or %A0,%C0", op
, plen
, -2);
6402 avr_out_compare (insn
, op
, plen
);
6409 /* Output test instruction for SImode. */
6412 avr_out_tstsi (rtx_insn
*insn
, rtx
*op
, int *plen
)
6414 if (compare_sign_p (insn
))
6416 avr_asm_len ("tst %D0", op
, plen
, -1);
6418 else if (reg_unused_after (insn
, op
[0])
6419 && compare_eq_p (insn
))
6421 /* Faster than sbiw if we can clobber the operand. */
6422 avr_asm_len ("or %A0,%B0" CR_TAB
6424 "or %A0,%D0", op
, plen
, -3);
6428 avr_out_compare (insn
, op
, plen
);
6435 /* Output a comparison of a zero- or sign-extended register against a
6436 plain register. CODE is SIGN_EXTEND or ZERO_EXTEND. Return "".
6438 PLEN != 0: Set *PLEN to the code length in words. Don't output anything.
6439 PLEN == 0: Print instructions. */
6442 avr_out_cmp_ext (rtx xop
[], enum rtx_code code
, int *plen
)
6444 // The smaller reg is the one that's to be extended. Get its index as z.
6445 int z
= GET_MODE_SIZE (GET_MODE (xop
[1])) < GET_MODE_SIZE (GET_MODE (xop
[0]));
6447 rtx reg
= xop
[1 - z
];
6448 machine_mode mode
= GET_MODE (reg
);
6449 machine_mode zmode
= GET_MODE (zreg
);
6455 // zex holds the extended bytes above zreg. This is 0 for ZERO_EXTEND,
6456 // and 0 or -1 for SIGN_EXTEND.
6458 if (code
== SIGN_EXTEND
)
6460 // Sign-extend the high-byte of zreg to tmp_reg.
6461 int zmsb
= GET_MODE_SIZE (zmode
) - 1;
6462 rtx xzmsb
= simplify_gen_subreg (QImode
, zreg
, zmode
, zmsb
);
6464 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
6465 "rol __tmp_reg__" CR_TAB
6466 "sbc __tmp_reg__,__tmp_reg__", &xzmsb
, plen
, 3);
6469 else if (code
== ZERO_EXTEND
)
6476 // Now output n_bytes bytes of the very comparison.
6478 int n_bytes
= GET_MODE_SIZE (mode
);
6480 avr_asm_len ("cp %0,%1", xop
, plen
, 1);
6482 for (int b
= 1; b
< n_bytes
; ++b
)
6485 regs
[1 - z
] = simplify_gen_subreg (QImode
, reg
, mode
, b
);
6486 regs
[z
] = (b
< GET_MODE_SIZE (zmode
)
6487 ? simplify_gen_subreg (QImode
, zreg
, zmode
, b
)
6490 avr_asm_len ("cpc %0,%1", regs
, plen
, 1);
6497 /* Generate asm equivalent for various shifts. This only handles cases
6498 that are not already carefully hand-optimized in ?sh??i3_out.
6500 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
6501 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
6502 OPERANDS[3] is a QImode scratch register from LD regs if
6503 available and SCRATCH, otherwise (no scratch available)
6505 TEMPL is an assembler template that shifts by one position.
6506 T_LEN is the length of this template. */
6509 out_shift_with_cnt (const char *templ
, rtx_insn
*insn
, rtx operands
[],
6510 int *plen
, int t_len
)
6512 bool second_label
= true;
6513 bool saved_in_tmp
= false;
6514 bool use_zero_reg
= false;
6517 op
[0] = operands
[0];
6518 op
[1] = operands
[1];
6519 op
[2] = operands
[2];
6520 op
[3] = operands
[3];
6525 if (CONST_INT_P (operands
[2]))
6527 /* Operand 3 is a scratch register if this is a
6528 parallel with three elements i.e. a set,
6529 a clobber of a scratch, and clobber of REG_CC.
6530 If a scratch reg is not available, then the parallel
6531 will contain only a set and clobber of REG_CC. */
6532 bool scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
6533 && XVECLEN (PATTERN (insn
), 0) == 3
6534 && REG_P (operands
[3]));
6535 int count
= INTVAL (operands
[2]);
6536 int max_len
= 10; /* If larger than this, always use a loop. */
6541 if (count
< 8 && !scratch
)
6542 use_zero_reg
= true;
6545 max_len
= t_len
+ (scratch
? 3 : (use_zero_reg
? 4 : 5));
6547 if (t_len
* count
<= max_len
)
6549 /* Output shifts inline with no loop - faster. */
6552 avr_asm_len (templ
, op
, plen
, t_len
);
6559 avr_asm_len ("ldi %3,%2", op
, plen
, 1);
6561 else if (use_zero_reg
)
6563 /* Hack to save one word: use __zero_reg__ as loop counter.
6564 Set one bit, then shift in a loop until it is 0 again. */
6566 op
[3] = zero_reg_rtx
;
6568 avr_asm_len ("set" CR_TAB
6569 "bld %3,%2-1", op
, plen
, 2);
6573 /* No scratch register available, use one from LD_REGS (saved in
6574 __tmp_reg__) that doesn't overlap with registers to shift. */
6576 op
[3] = all_regs_rtx
[((REGNO (op
[0]) - 1) & 15) + 16];
6577 op
[4] = tmp_reg_rtx
;
6578 saved_in_tmp
= true;
6580 avr_asm_len ("mov %4,%3" CR_TAB
6581 "ldi %3,%2", op
, plen
, 2);
6584 second_label
= false;
6586 else if (MEM_P (op
[2]))
6590 op_mov
[0] = op
[3] = tmp_reg_rtx
;
6593 out_movqi_r_mr (insn
, op_mov
, plen
);
6595 else if (register_operand (op
[2], QImode
))
6599 if (!reg_unused_after (insn
, op
[2])
6600 || reg_overlap_mentioned_p (op
[0], op
[2]))
6602 op
[3] = tmp_reg_rtx
;
6603 avr_asm_len ("mov %3,%2", op
, plen
, 1);
6607 fatal_insn ("bad shift insn:", insn
);
6610 avr_asm_len ("rjmp 2f", op
, plen
, 1);
6612 avr_asm_len ("1:", op
, plen
, 0);
6613 avr_asm_len (templ
, op
, plen
, t_len
);
6616 avr_asm_len ("2:", op
, plen
, 0);
6618 avr_asm_len (use_zero_reg
? "lsr %3" : "dec %3", op
, plen
, 1);
6619 avr_asm_len (second_label
? "brpl 1b" : "brne 1b", op
, plen
, 1);
6622 avr_asm_len ("mov %3,%4", op
, plen
, 1);
6626 /* 8bit shift left ((char)x << i) */
6629 ashlqi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
6631 if (CONST_INT_P (operands
[2]))
6638 switch (INTVAL (operands
[2]))
6641 if (INTVAL (operands
[2]) < 8)
6653 return ("lsl %0" CR_TAB
6658 return ("lsl %0" CR_TAB
6663 if (test_hard_reg_class (LD_REGS
, operands
[0]))
6666 return ("swap %0" CR_TAB
6670 return ("lsl %0" CR_TAB
6676 if (test_hard_reg_class (LD_REGS
, operands
[0]))
6679 return ("swap %0" CR_TAB
6684 return ("lsl %0" CR_TAB
6691 if (test_hard_reg_class (LD_REGS
, operands
[0]))
6694 return ("swap %0" CR_TAB
6700 return ("lsl %0" CR_TAB
6709 return ("ror %0" CR_TAB
6714 else if (CONSTANT_P (operands
[2]))
6715 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
6717 out_shift_with_cnt ("lsl %0",
6718 insn
, operands
, len
, 1);
6723 /* 16bit shift left ((short)x << i) */
6726 ashlhi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
6728 if (CONST_INT_P (operands
[2]))
6730 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
6731 && XVECLEN (PATTERN (insn
), 0) == 3
6732 && REG_P (operands
[3]));
6733 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
6740 switch (INTVAL (operands
[2]))
6743 if (INTVAL (operands
[2]) < 16)
6747 return ("clr %B0" CR_TAB
6751 if (optimize_size
&& scratch
)
6756 return ("swap %A0" CR_TAB
6758 "andi %B0,0xf0" CR_TAB
6759 "eor %B0,%A0" CR_TAB
6760 "andi %A0,0xf0" CR_TAB
6766 return ("swap %A0" CR_TAB
6768 "ldi %3,0xf0" CR_TAB
6770 "eor %B0,%A0" CR_TAB
6774 break; /* optimize_size ? 6 : 8 */
6778 break; /* scratch ? 5 : 6 */
6782 return ("lsl %A0" CR_TAB
6786 "andi %B0,0xf0" CR_TAB
6787 "eor %B0,%A0" CR_TAB
6788 "andi %A0,0xf0" CR_TAB
6794 return ("lsl %A0" CR_TAB
6798 "ldi %3,0xf0" CR_TAB
6800 "eor %B0,%A0" CR_TAB
6808 break; /* scratch ? 5 : 6 */
6810 return ("clr __tmp_reg__" CR_TAB
6813 "ror __tmp_reg__" CR_TAB
6816 "ror __tmp_reg__" CR_TAB
6817 "mov %B0,%A0" CR_TAB
6818 "mov %A0,__tmp_reg__");
6822 return ("lsr %B0" CR_TAB
6823 "mov %B0,%A0" CR_TAB
6829 return *len
= 2, ("mov %B0,%A1" CR_TAB
6834 return ("mov %B0,%A0" CR_TAB
6840 return ("mov %B0,%A0" CR_TAB
6847 return ("mov %B0,%A0" CR_TAB
6857 return ("mov %B0,%A0" CR_TAB
6865 return ("mov %B0,%A0" CR_TAB
6868 "ldi %3,0xf0" CR_TAB
6872 return ("mov %B0,%A0" CR_TAB
6883 return ("mov %B0,%A0" CR_TAB
6889 if (AVR_HAVE_MUL
&& scratch
)
6892 return ("ldi %3,0x20" CR_TAB
6896 "clr __zero_reg__");
6898 if (optimize_size
&& scratch
)
6903 return ("mov %B0,%A0" CR_TAB
6907 "ldi %3,0xe0" CR_TAB
6913 return ("set" CR_TAB
6918 "clr __zero_reg__");
6921 return ("mov %B0,%A0" CR_TAB
6930 if (AVR_HAVE_MUL
&& ldi_ok
)
6933 return ("ldi %B0,0x40" CR_TAB
6934 "mul %A0,%B0" CR_TAB
6937 "clr __zero_reg__");
6939 if (AVR_HAVE_MUL
&& scratch
)
6942 return ("ldi %3,0x40" CR_TAB
6946 "clr __zero_reg__");
6948 if (optimize_size
&& ldi_ok
)
6951 return ("mov %B0,%A0" CR_TAB
6952 "ldi %A0,6" "\n1:\t"
6957 if (optimize_size
&& scratch
)
6960 return ("clr %B0" CR_TAB
6969 return ("clr %B0" CR_TAB
6976 out_shift_with_cnt ("lsl %A0" CR_TAB
6977 "rol %B0", insn
, operands
, len
, 2);
6982 /* 24-bit shift left */
6985 avr_out_ashlpsi3 (rtx_insn
*insn
, rtx
*op
, int *plen
)
6990 if (CONST_INT_P (op
[2]))
6992 switch (INTVAL (op
[2]))
6995 if (INTVAL (op
[2]) < 24)
6998 return avr_asm_len ("clr %A0" CR_TAB
7000 "clr %C0", op
, plen
, 3);
7004 int reg0
= REGNO (op
[0]);
7005 int reg1
= REGNO (op
[1]);
7008 return avr_asm_len ("mov %C0,%B1" CR_TAB
7009 "mov %B0,%A1" CR_TAB
7010 "clr %A0", op
, plen
, 3);
7012 return avr_asm_len ("clr %A0" CR_TAB
7013 "mov %B0,%A1" CR_TAB
7014 "mov %C0,%B1", op
, plen
, 3);
7019 int reg0
= REGNO (op
[0]);
7020 int reg1
= REGNO (op
[1]);
7022 if (reg0
+ 2 != reg1
)
7023 avr_asm_len ("mov %C0,%A0", op
, plen
, 1);
7025 return avr_asm_len ("clr %B0" CR_TAB
7026 "clr %A0", op
, plen
, 2);
7030 return avr_asm_len ("clr %C0" CR_TAB
7034 "clr %A0", op
, plen
, 5);
7038 out_shift_with_cnt ("lsl %A0" CR_TAB
7040 "rol %C0", insn
, op
, plen
, 3);
7045 /* 32bit shift left ((long)x << i) */
7048 ashlsi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
7050 if (CONST_INT_P (operands
[2]))
7058 switch (INTVAL (operands
[2]))
7061 if (INTVAL (operands
[2]) < 32)
7065 return *len
= 3, ("clr %D0" CR_TAB
7069 return ("clr %D0" CR_TAB
7076 int reg0
= true_regnum (operands
[0]);
7077 int reg1
= true_regnum (operands
[1]);
7080 return ("mov %D0,%C1" CR_TAB
7081 "mov %C0,%B1" CR_TAB
7082 "mov %B0,%A1" CR_TAB
7085 return ("clr %A0" CR_TAB
7086 "mov %B0,%A1" CR_TAB
7087 "mov %C0,%B1" CR_TAB
7093 int reg0
= true_regnum (operands
[0]);
7094 int reg1
= true_regnum (operands
[1]);
7095 if (reg0
+ 2 == reg1
)
7096 return *len
= 2, ("clr %B0" CR_TAB
7099 return *len
= 3, ("movw %C0,%A1" CR_TAB
7103 return *len
= 4, ("mov %C0,%A1" CR_TAB
7104 "mov %D0,%B1" CR_TAB
7111 return ("mov %D0,%A1" CR_TAB
7118 return ("clr %D0" CR_TAB
7127 out_shift_with_cnt ("lsl %A0" CR_TAB
7130 "rol %D0", insn
, operands
, len
, 4);
7134 /* 8bit arithmetic shift right ((signed char)x >> i) */
7137 ashrqi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
7139 if (CONST_INT_P (operands
[2]))
7146 switch (INTVAL (operands
[2]))
7154 return ("asr %0" CR_TAB
7159 return ("asr %0" CR_TAB
7165 return ("asr %0" CR_TAB
7172 return ("asr %0" CR_TAB
7180 return ("bst %0,6" CR_TAB
7186 if (INTVAL (operands
[2]) < 8)
7193 return ("lsl %0" CR_TAB
7197 else if (CONSTANT_P (operands
[2]))
7198 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
7200 out_shift_with_cnt ("asr %0",
7201 insn
, operands
, len
, 1);
7206 /* 16bit arithmetic shift right ((signed short)x >> i) */
7209 ashrhi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
7211 if (CONST_INT_P (operands
[2]))
7213 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
7214 && XVECLEN (PATTERN (insn
), 0) == 3
7215 && REG_P (operands
[3]));
7216 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
7223 switch (INTVAL (operands
[2]))
7227 /* XXX try to optimize this too? */
7232 break; /* scratch ? 5 : 6 */
7234 return ("mov __tmp_reg__,%A0" CR_TAB
7235 "mov %A0,%B0" CR_TAB
7236 "lsl __tmp_reg__" CR_TAB
7238 "sbc %B0,%B0" CR_TAB
7239 "lsl __tmp_reg__" CR_TAB
7245 return ("lsl %A0" CR_TAB
7246 "mov %A0,%B0" CR_TAB
7252 int reg0
= true_regnum (operands
[0]);
7253 int reg1
= true_regnum (operands
[1]);
7256 return *len
= 3, ("mov %A0,%B0" CR_TAB
7260 return *len
= 4, ("mov %A0,%B1" CR_TAB
7268 return ("mov %A0,%B0" CR_TAB
7270 "sbc %B0,%B0" CR_TAB
7275 return ("mov %A0,%B0" CR_TAB
7277 "sbc %B0,%B0" CR_TAB
7282 if (AVR_HAVE_MUL
&& ldi_ok
)
7285 return ("ldi %A0,0x20" CR_TAB
7286 "muls %B0,%A0" CR_TAB
7288 "sbc %B0,%B0" CR_TAB
7289 "clr __zero_reg__");
7291 if (optimize_size
&& scratch
)
7294 return ("mov %A0,%B0" CR_TAB
7296 "sbc %B0,%B0" CR_TAB
7302 if (AVR_HAVE_MUL
&& ldi_ok
)
7305 return ("ldi %A0,0x10" CR_TAB
7306 "muls %B0,%A0" CR_TAB
7308 "sbc %B0,%B0" CR_TAB
7309 "clr __zero_reg__");
7311 if (optimize_size
&& scratch
)
7314 return ("mov %A0,%B0" CR_TAB
7316 "sbc %B0,%B0" CR_TAB
7323 if (AVR_HAVE_MUL
&& ldi_ok
)
7326 return ("ldi %A0,0x08" CR_TAB
7327 "muls %B0,%A0" CR_TAB
7329 "sbc %B0,%B0" CR_TAB
7330 "clr __zero_reg__");
7333 break; /* scratch ? 5 : 7 */
7335 return ("mov %A0,%B0" CR_TAB
7337 "sbc %B0,%B0" CR_TAB
7346 return ("lsl %B0" CR_TAB
7347 "sbc %A0,%A0" CR_TAB
7349 "mov %B0,%A0" CR_TAB
7353 if (INTVAL (operands
[2]) < 16)
7359 return *len
= 3, ("lsl %B0" CR_TAB
7360 "sbc %A0,%A0" CR_TAB
7365 out_shift_with_cnt ("asr %B0" CR_TAB
7366 "ror %A0", insn
, operands
, len
, 2);
7371 /* 24-bit arithmetic shift right */
7374 avr_out_ashrpsi3 (rtx_insn
*insn
, rtx
*op
, int *plen
)
7376 int dest
= REGNO (op
[0]);
7377 int src
= REGNO (op
[1]);
7379 if (CONST_INT_P (op
[2]))
7384 switch (INTVAL (op
[2]))
7388 return avr_asm_len ("mov %A0,%B1" CR_TAB
7389 "mov %B0,%C1" CR_TAB
7392 "dec %C0", op
, plen
, 5);
7394 return avr_asm_len ("clr %C0" CR_TAB
7397 "mov %B0,%C1" CR_TAB
7398 "mov %A0,%B1", op
, plen
, 5);
7401 if (dest
!= src
+ 2)
7402 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
7404 return avr_asm_len ("clr %B0" CR_TAB
7407 "mov %C0,%B0", op
, plen
, 4);
7410 if (INTVAL (op
[2]) < 24)
7416 return avr_asm_len ("lsl %C0" CR_TAB
7417 "sbc %A0,%A0" CR_TAB
7418 "mov %B0,%A0" CR_TAB
7419 "mov %C0,%A0", op
, plen
, 4);
7423 out_shift_with_cnt ("asr %C0" CR_TAB
7425 "ror %A0", insn
, op
, plen
, 3);
7430 /* 32-bit arithmetic shift right ((signed long)x >> i) */
7433 ashrsi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
7435 if (CONST_INT_P (operands
[2]))
7443 switch (INTVAL (operands
[2]))
7447 int reg0
= true_regnum (operands
[0]);
7448 int reg1
= true_regnum (operands
[1]);
7451 return ("mov %A0,%B1" CR_TAB
7452 "mov %B0,%C1" CR_TAB
7453 "mov %C0,%D1" CR_TAB
7458 return ("clr %D0" CR_TAB
7461 "mov %C0,%D1" CR_TAB
7462 "mov %B0,%C1" CR_TAB
7468 int reg0
= true_regnum (operands
[0]);
7469 int reg1
= true_regnum (operands
[1]);
7471 if (reg0
== reg1
+ 2)
7472 return *len
= 4, ("clr %D0" CR_TAB
7477 return *len
= 5, ("movw %A0,%C1" CR_TAB
7483 return *len
= 6, ("mov %B0,%D1" CR_TAB
7484 "mov %A0,%C1" CR_TAB
7492 return *len
= 6, ("mov %A0,%D1" CR_TAB
7496 "mov %B0,%D0" CR_TAB
7500 if (INTVAL (operands
[2]) < 32)
7507 return *len
= 4, ("lsl %D0" CR_TAB
7508 "sbc %A0,%A0" CR_TAB
7509 "mov %B0,%A0" CR_TAB
7512 return *len
= 5, ("lsl %D0" CR_TAB
7513 "sbc %A0,%A0" CR_TAB
7514 "mov %B0,%A0" CR_TAB
7515 "mov %C0,%A0" CR_TAB
7520 out_shift_with_cnt ("asr %D0" CR_TAB
7523 "ror %A0", insn
, operands
, len
, 4);
7527 /* 8-bit logic shift right ((unsigned char)x >> i) */
7530 lshrqi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
7532 if (CONST_INT_P (operands
[2]))
7539 switch (INTVAL (operands
[2]))
7542 if (INTVAL (operands
[2]) < 8)
7554 return ("lsr %0" CR_TAB
7558 return ("lsr %0" CR_TAB
7563 if (test_hard_reg_class (LD_REGS
, operands
[0]))
7566 return ("swap %0" CR_TAB
7570 return ("lsr %0" CR_TAB
7576 if (test_hard_reg_class (LD_REGS
, operands
[0]))
7579 return ("swap %0" CR_TAB
7584 return ("lsr %0" CR_TAB
7591 if (test_hard_reg_class (LD_REGS
, operands
[0]))
7594 return ("swap %0" CR_TAB
7600 return ("lsr %0" CR_TAB
7609 return ("bst %1,7" CR_TAB
7614 else if (CONSTANT_P (operands
[2]))
7615 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
7617 out_shift_with_cnt ("lsr %0",
7618 insn
, operands
, len
, 1);
7622 /* 16-bit logic shift right ((unsigned short)x >> i) */
7625 lshrhi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
7627 if (CONST_INT_P (operands
[2]))
7629 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
7630 && XVECLEN (PATTERN (insn
), 0) == 3
7631 && REG_P (operands
[3]));
7632 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
7639 switch (INTVAL (operands
[2]))
7642 if (INTVAL (operands
[2]) < 16)
7646 return ("clr %B0" CR_TAB
7650 if (optimize_size
&& scratch
)
7655 return ("swap %B0" CR_TAB
7657 "andi %A0,0x0f" CR_TAB
7658 "eor %A0,%B0" CR_TAB
7659 "andi %B0,0x0f" CR_TAB
7665 return ("swap %B0" CR_TAB
7667 "ldi %3,0x0f" CR_TAB
7669 "eor %A0,%B0" CR_TAB
7673 break; /* optimize_size ? 6 : 8 */
7677 break; /* scratch ? 5 : 6 */
7681 return ("lsr %B0" CR_TAB
7685 "andi %A0,0x0f" CR_TAB
7686 "eor %A0,%B0" CR_TAB
7687 "andi %B0,0x0f" CR_TAB
7693 return ("lsr %B0" CR_TAB
7697 "ldi %3,0x0f" CR_TAB
7699 "eor %A0,%B0" CR_TAB
7707 break; /* scratch ? 5 : 6 */
7709 return ("clr __tmp_reg__" CR_TAB
7712 "rol __tmp_reg__" CR_TAB
7715 "rol __tmp_reg__" CR_TAB
7716 "mov %A0,%B0" CR_TAB
7717 "mov %B0,__tmp_reg__");
7721 return ("lsl %A0" CR_TAB
7722 "mov %A0,%B0" CR_TAB
7724 "sbc %B0,%B0" CR_TAB
7728 return *len
= 2, ("mov %A0,%B1" CR_TAB
7733 return ("mov %A0,%B0" CR_TAB
7739 return ("mov %A0,%B0" CR_TAB
7746 return ("mov %A0,%B0" CR_TAB
7756 return ("mov %A0,%B0" CR_TAB
7764 return ("mov %A0,%B0" CR_TAB
7767 "ldi %3,0x0f" CR_TAB
7771 return ("mov %A0,%B0" CR_TAB
7782 return ("mov %A0,%B0" CR_TAB
7788 if (AVR_HAVE_MUL
&& scratch
)
7791 return ("ldi %3,0x08" CR_TAB
7795 "clr __zero_reg__");
7797 if (optimize_size
&& scratch
)
7802 return ("mov %A0,%B0" CR_TAB
7806 "ldi %3,0x07" CR_TAB
7812 return ("set" CR_TAB
7817 "clr __zero_reg__");
7820 return ("mov %A0,%B0" CR_TAB
7829 if (AVR_HAVE_MUL
&& ldi_ok
)
7832 return ("ldi %A0,0x04" CR_TAB
7833 "mul %B0,%A0" CR_TAB
7836 "clr __zero_reg__");
7838 if (AVR_HAVE_MUL
&& scratch
)
7841 return ("ldi %3,0x04" CR_TAB
7845 "clr __zero_reg__");
7847 if (optimize_size
&& ldi_ok
)
7850 return ("mov %A0,%B0" CR_TAB
7851 "ldi %B0,6" "\n1:\t"
7856 if (optimize_size
&& scratch
)
7859 return ("clr %A0" CR_TAB
7868 return ("bst %B1,7" CR_TAB
7875 out_shift_with_cnt ("lsr %B0" CR_TAB
7876 "ror %A0", insn
, operands
, len
, 2);
7881 /* 24-bit logic shift right */
7884 avr_out_lshrpsi3 (rtx_insn
*insn
, rtx
*op
, int *plen
)
7886 int dest
= REGNO (op
[0]);
7887 int src
= REGNO (op
[1]);
7889 if (CONST_INT_P (op
[2]))
7894 switch (INTVAL (op
[2]))
7898 return avr_asm_len ("mov %A0,%B1" CR_TAB
7899 "mov %B0,%C1" CR_TAB
7900 "clr %C0", op
, plen
, 3);
7902 return avr_asm_len ("clr %C0" CR_TAB
7903 "mov %B0,%C1" CR_TAB
7904 "mov %A0,%B1", op
, plen
, 3);
7907 if (dest
!= src
+ 2)
7908 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
7910 return avr_asm_len ("clr %B0" CR_TAB
7911 "clr %C0", op
, plen
, 2);
7914 if (INTVAL (op
[2]) < 24)
7920 return avr_asm_len ("bst %C1,7" CR_TAB
7924 "bld %A0,0", op
, plen
, 5);
7928 out_shift_with_cnt ("lsr %C0" CR_TAB
7930 "ror %A0", insn
, op
, plen
, 3);
7935 /* 32-bit logic shift right ((unsigned int)x >> i) */
7938 lshrsi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
7940 if (CONST_INT_P (operands
[2]))
7948 switch (INTVAL (operands
[2]))
7951 if (INTVAL (operands
[2]) < 32)
7955 return *len
= 3, ("clr %D0" CR_TAB
7959 return ("clr %D0" CR_TAB
7966 int reg0
= true_regnum (operands
[0]);
7967 int reg1
= true_regnum (operands
[1]);
7970 return ("mov %A0,%B1" CR_TAB
7971 "mov %B0,%C1" CR_TAB
7972 "mov %C0,%D1" CR_TAB
7975 return ("clr %D0" CR_TAB
7976 "mov %C0,%D1" CR_TAB
7977 "mov %B0,%C1" CR_TAB
7983 int reg0
= true_regnum (operands
[0]);
7984 int reg1
= true_regnum (operands
[1]);
7986 if (reg0
== reg1
+ 2)
7987 return *len
= 2, ("clr %C0" CR_TAB
7990 return *len
= 3, ("movw %A0,%C1" CR_TAB
7994 return *len
= 4, ("mov %B0,%D1" CR_TAB
7995 "mov %A0,%C1" CR_TAB
8001 return *len
= 4, ("mov %A0,%D1" CR_TAB
8008 return *len
= 5, ("bst %D1,7" CR_TAB
8011 "movw %C0,%A0" CR_TAB
8014 return ("bst %D1,7" CR_TAB
8023 out_shift_with_cnt ("lsr %D0" CR_TAB
8026 "ror %A0", insn
, operands
, len
, 4);
8031 /* Output addition of register XOP[0] and compile time constant XOP[2].
8032 CODE == PLUS: perform addition by using ADD instructions or
8033 CODE == MINUS: perform addition by using SUB instructions:
8035 XOP[0] = XOP[0] + XOP[2]
8037 Or perform addition/subtraction with register XOP[2] depending on CODE:
8039 XOP[0] = XOP[0] +/- XOP[2]
8041 If PLEN == NULL, print assembler instructions to perform the operation;
8042 otherwise, set *PLEN to the length of the instruction sequence (in words)
8043 printed with PLEN == NULL. XOP[3] is an 8-bit scratch register or NULL_RTX.
8044 Set *PCC to effect on cc0 according to respective CC_* insn attribute.
8046 CODE_SAT == UNKNOWN: Perform ordinary, non-saturating operation.
8047 CODE_SAT != UNKNOWN: Perform operation and saturate according to CODE_SAT.
8048 If CODE_SAT != UNKNOWN then SIGN contains the sign of the summand resp.
8049 the subtrahend in the original insn, provided it is a compile time constant.
8050 In all other cases, SIGN is 0.
8052 If OUT_LABEL is true, print the final 0: label which is needed for
8053 saturated addition / subtraction. The only case where OUT_LABEL = false
8054 is useful is for saturated addition / subtraction performed during
8055 fixed-point rounding, cf. `avr_out_round'. */
8058 avr_out_plus_1 (rtx
*xop
, int *plen
, enum rtx_code code
, int *pcc
,
8059 enum rtx_code code_sat
, int sign
, bool out_label
)
8061 /* MODE of the operation. */
8062 machine_mode mode
= GET_MODE (xop
[0]);
8064 /* INT_MODE of the same size. */
8065 scalar_int_mode imode
= int_mode_for_mode (mode
).require ();
8067 /* Number of bytes to operate on. */
8068 int n_bytes
= GET_MODE_SIZE (mode
);
8070 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
8071 int clobber_val
= -1;
8073 /* op[0]: 8-bit destination register
8074 op[1]: 8-bit const int
8075 op[2]: 8-bit scratch register */
8078 /* Started the operation? Before starting the operation we may skip
8079 adding 0. This is no more true after the operation started because
8080 carry must be taken into account. */
8081 bool started
= false;
8083 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
8086 /* Output a BRVC instruction. Only needed with saturation. */
8087 bool out_brvc
= true;
8094 *pcc
= MINUS
== code
? (int) CC_SET_CZN
: (int) CC_CLOBBER
;
8096 for (int i
= 0; i
< n_bytes
; i
++)
8098 /* We operate byte-wise on the destination. */
8099 op
[0] = simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
8100 op
[1] = simplify_gen_subreg (QImode
, xop
[2], mode
, i
);
8103 avr_asm_len (code
== PLUS
? "add %0,%1" : "sub %0,%1",
8106 avr_asm_len (code
== PLUS
? "adc %0,%1" : "sbc %0,%1",
8110 if (reg_overlap_mentioned_p (xop
[0], xop
[2]))
8112 gcc_assert (REGNO (xop
[0]) == REGNO (xop
[2]));
8121 /* Except in the case of ADIW with 16-bit register (see below)
8122 addition does not set cc0 in a usable way. */
8124 *pcc
= (MINUS
== code
) ? CC_SET_CZN
: CC_CLOBBER
;
8126 if (CONST_FIXED_P (xval
))
8127 xval
= avr_to_int_mode (xval
);
8129 /* Adding/Subtracting zero is a no-op. */
8131 if (xval
== const0_rtx
)
8138 xval
= simplify_unary_operation (NEG
, imode
, xval
, imode
);
8142 if (SS_PLUS
== code_sat
&& MINUS
== code
8144 && 0x80 == (INTVAL (simplify_gen_subreg (QImode
, xval
, imode
, n_bytes
-1))
8145 & GET_MODE_MASK (QImode
)))
8147 /* We compute x + 0x80 by means of SUB instructions. We negated the
8148 constant subtrahend above and are left with x - (-128) so that we
8149 need something like SUBI r,128 which does not exist because SUBI sets
8150 V according to the sign of the subtrahend. Notice the only case
8151 where this must be done is when NEG overflowed in case [2s] because
8152 the V computation needs the right sign of the subtrahend. */
8154 rtx msb
= simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
- 1);
8156 avr_asm_len ("subi %0,128" CR_TAB
8157 "brmi 0f", &msb
, plen
, 2);
8163 for (int i
= 0; i
< n_bytes
; i
++)
8165 /* We operate byte-wise on the destination. */
8166 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
8167 rtx xval8
= simplify_gen_subreg (QImode
, xval
, imode
, i
);
8169 /* 8-bit value to operate with this byte. */
8170 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
8172 /* Registers R16..R31 can operate with immediate. */
8173 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
8176 op
[1] = gen_int_mode (val8
, QImode
);
8178 /* To get usable cc0 no low-bytes must have been skipped. */
8186 && test_hard_reg_class (ADDW_REGS
, reg8
))
8188 rtx xval16
= simplify_gen_subreg (HImode
, xval
, imode
, i
);
8189 unsigned int val16
= UINTVAL (xval16
) & GET_MODE_MASK (HImode
);
8191 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
8192 i.e. operate word-wise. */
8199 avr_asm_len (code
== PLUS
? "adiw %0,%1" : "sbiw %0,%1",
8202 if (n_bytes
== 2 && PLUS
== code
)
8214 avr_asm_len (code
== PLUS
8215 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
8219 else if ((val8
== 1 || val8
== 0xff)
8220 && UNKNOWN
== code_sat
8222 && i
== n_bytes
- 1)
8224 avr_asm_len ((code
== PLUS
) ^ (val8
== 1) ? "dec %0" : "inc %0",
8234 gcc_assert (plen
!= NULL
|| (op
[2] && REG_P (op
[2])));
8236 if (plen
!= NULL
&& UNKNOWN
!= code_sat
)
8238 /* This belongs to the x + 0x80 corner case. The code with
8239 ADD instruction is not smaller, thus make this case
8240 expensive so that the caller won't pick it. */
8246 if (clobber_val
!= (int) val8
)
8247 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
8248 clobber_val
= (int) val8
;
8250 avr_asm_len (started
? "adc %0,%2" : "add %0,%2", op
, plen
, 1);
8257 avr_asm_len (started
? "sbci %0,%1" : "subi %0,%1", op
, plen
, 1);
8260 gcc_assert (plen
!= NULL
|| REG_P (op
[2]));
8262 if (clobber_val
!= (int) val8
)
8263 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
8264 clobber_val
= (int) val8
;
8266 avr_asm_len (started
? "sbc %0,%2" : "sub %0,%2", op
, plen
, 1);
8278 } /* for all sub-bytes */
8282 if (UNKNOWN
== code_sat
)
8285 *pcc
= (int) CC_CLOBBER
;
8287 /* Vanilla addition/subtraction is done. We are left with saturation.
8289 We have to compute A = A <op> B where A is a register and
8290 B is a register or a non-zero compile time constant CONST.
8291 A is register class "r" if unsigned && B is REG. Otherwise, A is in "d".
8292 B stands for the original operand $2 in INSN. In the case of B = CONST,
8293 SIGN in { -1, 1 } is the sign of B. Otherwise, SIGN is 0.
8295 CODE is the instruction flavor we use in the asm sequence to perform <op>.
8299 operation | code | sat if | b is | sat value | case
8300 -----------------+-------+----------+--------------+-----------+-------
8301 + as a + b | add | C == 1 | const, reg | u+ = 0xff | [1u]
8302 + as a - (-b) | sub | C == 0 | const | u+ = 0xff | [2u]
8303 - as a - b | sub | C == 1 | const, reg | u- = 0 | [3u]
8304 - as a + (-b) | add | C == 0 | const | u- = 0 | [4u]
8308 operation | code | sat if | b is | sat value | case
8309 -----------------+-------+----------+--------------+-----------+-------
8310 + as a + b | add | V == 1 | const, reg | s+ | [1s]
8311 + as a - (-b) | sub | V == 1 | const | s+ | [2s]
8312 - as a - b | sub | V == 1 | const, reg | s- | [3s]
8313 - as a + (-b) | add | V == 1 | const | s- | [4s]
8315 s+ = b < 0 ? -0x80 : 0x7f
8316 s- = b < 0 ? 0x7f : -0x80
8318 The cases a - b actually perform a - (-(-b)) if B is CONST.
8321 op
[0] = simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-1);
8323 ? simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-2)
8326 bool need_copy
= true;
8327 int len_call
= 1 + AVR_HAVE_JMP_CALL
;
8338 avr_asm_len ("brvc 0f", op
, plen
, 1);
8340 if (reg_overlap_mentioned_p (xop
[0], xop
[2]))
8345 avr_asm_len ("ldi %0,0x7f" CR_TAB
8346 "adc %0,__zero_reg__", op
, plen
, 2);
8348 avr_asm_len ("ldi %0,0x7f" CR_TAB
8349 "ldi %1,0xff" CR_TAB
8350 "adc %1,__zero_reg__" CR_TAB
8351 "adc %0,__zero_reg__", op
, plen
, 4);
8353 else if (sign
== 0 && PLUS
== code
)
8357 op
[2] = simplify_gen_subreg (QImode
, xop
[2], mode
, n_bytes
-1);
8360 avr_asm_len ("ldi %0,0x80" CR_TAB
8362 "dec %0", op
, plen
, 3);
8364 avr_asm_len ("ldi %0,0x80" CR_TAB
8367 "sbci %0,0", op
, plen
, 4);
8369 else if (sign
== 0 && MINUS
== code
)
8373 op
[2] = simplify_gen_subreg (QImode
, xop
[2], mode
, n_bytes
-1);
8376 avr_asm_len ("ldi %0,0x7f" CR_TAB
8378 "inc %0", op
, plen
, 3);
8380 avr_asm_len ("ldi %0,0x7f" CR_TAB
8383 "sbci %0,-1", op
, plen
, 4);
8385 else if ((sign
< 0) ^ (SS_MINUS
== code_sat
))
8387 /* [1s,const,B < 0] [2s,B < 0] */
8388 /* [3s,const,B > 0] [4s,B > 0] */
8392 avr_asm_len ("%~call __clr_8", op
, plen
, len_call
);
8396 avr_asm_len ("ldi %0,0x80", op
, plen
, 1);
8397 if (n_bytes
> 1 && need_copy
)
8398 avr_asm_len ("clr %1", op
, plen
, 1);
8400 else if ((sign
> 0) ^ (SS_MINUS
== code_sat
))
8402 /* [1s,const,B > 0] [2s,B > 0] */
8403 /* [3s,const,B < 0] [4s,B < 0] */
8407 avr_asm_len ("sec" CR_TAB
8408 "%~call __sbc_8", op
, plen
, 1 + len_call
);
8412 avr_asm_len ("ldi %0,0x7f", op
, plen
, 1);
8413 if (n_bytes
> 1 && need_copy
)
8414 avr_asm_len ("ldi %1,0xff", op
, plen
, 1);
8424 avr_asm_len (PLUS
== code
? "brcc 0f" : "brcs 0f", op
, plen
, 1);
8429 avr_asm_len ("sec", op
, plen
, 1);
8430 avr_asm_len ("%~call __sbc_8", op
, plen
, len_call
);
8436 if (MINUS
== code
&& !test_hard_reg_class (LD_REGS
, op
[0]))
8437 avr_asm_len ("sec" CR_TAB
8438 "sbc %0,%0", op
, plen
, 2);
8440 avr_asm_len (PLUS
== code
? "sbc %0,%0" : "ldi %0,0xff",
8443 break; /* US_PLUS */
8448 avr_asm_len (PLUS
== code
? "brcs 0f" : "brcc 0f", op
, plen
, 1);
8452 avr_asm_len ("%~call __clr_8", op
, plen
, len_call
);
8456 avr_asm_len ("clr %0", op
, plen
, 1);
8461 /* We set the MSB in the unsigned case and the 2 MSBs in the signed case.
8462 Now copy the right value to the LSBs. */
8464 if (need_copy
&& n_bytes
> 1)
8466 if (US_MINUS
== code_sat
|| US_PLUS
== code_sat
)
8468 avr_asm_len ("mov %1,%0", op
, plen
, 1);
8474 avr_asm_len ("movw %0,%1", op
, plen
, 1);
8476 avr_asm_len ("mov %A0,%1" CR_TAB
8477 "mov %B0,%1", op
, plen
, 2);
8480 else if (n_bytes
> 2)
8483 avr_asm_len ("mov %A0,%1" CR_TAB
8484 "mov %B0,%1", op
, plen
, 2);
8488 if (need_copy
&& n_bytes
== 8)
8491 avr_asm_len ("movw %r0+2,%0" CR_TAB
8492 "movw %r0+4,%0", xop
, plen
, 2);
8494 avr_asm_len ("mov %r0+2,%0" CR_TAB
8495 "mov %r0+3,%0" CR_TAB
8496 "mov %r0+4,%0" CR_TAB
8497 "mov %r0+5,%0", xop
, plen
, 4);
8501 avr_asm_len ("0:", op
, plen
, 0);
8505 /* Output addition/subtraction of register XOP[0] and a constant XOP[2] that
8506 is ont a compile-time constant:
8508 XOP[0] = XOP[0] +/- XOP[2]
8510 This is a helper for the function below. The only insns that need this
8511 are additions/subtraction for pointer modes, i.e. HImode and PSImode. */
8514 avr_out_plus_symbol (rtx
*xop
, enum rtx_code code
, int *plen
, int *pcc
)
8516 machine_mode mode
= GET_MODE (xop
[0]);
8518 /* Only pointer modes want to add symbols. */
8520 gcc_assert (mode
== HImode
|| mode
== PSImode
);
8522 *pcc
= MINUS
== code
? (int) CC_SET_CZN
: (int) CC_SET_N
;
8524 avr_asm_len (PLUS
== code
8525 ? "subi %A0,lo8(-(%2))" CR_TAB
"sbci %B0,hi8(-(%2))"
8526 : "subi %A0,lo8(%2)" CR_TAB
"sbci %B0,hi8(%2)",
8529 if (PSImode
== mode
)
8530 avr_asm_len (PLUS
== code
8531 ? "sbci %C0,hlo8(-(%2))"
8532 : "sbci %C0,hlo8(%2)", xop
, plen
, 1);
8537 /* Prepare operands of addition/subtraction to be used with avr_out_plus_1.
8539 INSN is a single_set insn or an insn pattern with a binary operation as
8540 SET_SRC that is one of: PLUS, SS_PLUS, US_PLUS, MINUS, SS_MINUS, US_MINUS.
8542 XOP are the operands of INSN. In the case of 64-bit operations with
8543 constant XOP[] has just one element: The summand/subtrahend in XOP[0].
8544 The non-saturating insns up to 32 bits may or may not supply a "d" class
8547 If PLEN == NULL output the instructions.
8548 If PLEN != NULL set *PLEN to the length of the sequence in words.
8550 PCC is a pointer to store the instructions' effect on cc0.
8553 PLEN and PCC default to NULL.
8555 OUT_LABEL defaults to TRUE. For a description, see AVR_OUT_PLUS_1.
8560 avr_out_plus (rtx insn
, rtx
*xop
, int *plen
, int *pcc
, bool out_label
)
8562 int cc_plus
, cc_minus
, cc_dummy
;
8563 int len_plus
, len_minus
;
8565 rtx xpattern
= INSN_P (insn
) ? single_set (as_a
<rtx_insn
*> (insn
)) : insn
;
8566 rtx xdest
= SET_DEST (xpattern
);
8567 machine_mode mode
= GET_MODE (xdest
);
8568 scalar_int_mode imode
= int_mode_for_mode (mode
).require ();
8569 int n_bytes
= GET_MODE_SIZE (mode
);
8570 enum rtx_code code_sat
= GET_CODE (SET_SRC (xpattern
));
8572 = (PLUS
== code_sat
|| SS_PLUS
== code_sat
|| US_PLUS
== code_sat
8578 /* PLUS and MINUS don't saturate: Use modular wrap-around. */
8580 if (PLUS
== code_sat
|| MINUS
== code_sat
)
8583 if (n_bytes
<= 4 && REG_P (xop
[2]))
8585 avr_out_plus_1 (xop
, plen
, code
, pcc
, code_sat
, 0, out_label
);
8591 op
[0] = gen_rtx_REG (DImode
, ACC_A
);
8592 op
[1] = gen_rtx_REG (DImode
, ACC_A
);
8593 op
[2] = avr_to_int_mode (xop
[0]);
8598 && !CONST_INT_P (xop
[2])
8599 && !CONST_FIXED_P (xop
[2]))
8601 return avr_out_plus_symbol (xop
, code
, plen
, pcc
);
8604 op
[0] = avr_to_int_mode (xop
[0]);
8605 op
[1] = avr_to_int_mode (xop
[1]);
8606 op
[2] = avr_to_int_mode (xop
[2]);
8609 /* Saturations and 64-bit operations don't have a clobber operand.
8610 For the other cases, the caller will provide a proper XOP[3]. */
8612 xpattern
= INSN_P (insn
) ? PATTERN (insn
) : insn
;
8613 op
[3] = PARALLEL
== GET_CODE (xpattern
) ? xop
[3] : NULL_RTX
;
8615 /* Saturation will need the sign of the original operand. */
8617 rtx xmsb
= simplify_gen_subreg (QImode
, op
[2], imode
, n_bytes
-1);
8618 int sign
= INTVAL (xmsb
) < 0 ? -1 : 1;
8620 /* If we subtract and the subtrahend is a constant, then negate it
8621 so that avr_out_plus_1 can be used. */
8624 op
[2] = simplify_unary_operation (NEG
, imode
, op
[2], imode
);
8626 /* Work out the shortest sequence. */
8628 avr_out_plus_1 (op
, &len_minus
, MINUS
, &cc_minus
, code_sat
, sign
, out_label
);
8629 avr_out_plus_1 (op
, &len_plus
, PLUS
, &cc_plus
, code_sat
, sign
, out_label
);
8633 *plen
= (len_minus
<= len_plus
) ? len_minus
: len_plus
;
8634 *pcc
= (len_minus
<= len_plus
) ? cc_minus
: cc_plus
;
8636 else if (len_minus
<= len_plus
)
8637 avr_out_plus_1 (op
, NULL
, MINUS
, pcc
, code_sat
, sign
, out_label
);
8639 avr_out_plus_1 (op
, NULL
, PLUS
, pcc
, code_sat
, sign
, out_label
);
8645 /* Output an instruction sequence for addition of REG in XOP[0] and CONST_INT
8646 in XOP[1] in such a way that SREG.Z and SREG.N are set according to the
8647 result. XOP[2] might be a d-regs clobber register. If XOP[2] is SCRATCH,
8648 then the addition can be performed without a clobber reg. Return "".
8650 If PLEN == NULL, then output the instructions.
8651 If PLEN != NULL, then set *PLEN to the length of the sequence in words. */
8654 avr_out_plus_set_ZN (rtx
*xop
, int *plen
)
8659 // Register to compare and value to compare against.
8663 machine_mode mode
= GET_MODE (xreg
);
8665 // Number of bytes to operate on.
8666 int n_bytes
= GET_MODE_SIZE (mode
);
8670 if (INTVAL (xval
) == 1)
8671 return avr_asm_len ("inc %0", xop
, plen
, 1);
8673 if (INTVAL (xval
) == -1)
8674 return avr_asm_len ("dec %0", xop
, plen
, 1);
8678 && test_hard_reg_class (ADDW_REGS
, xreg
)
8679 && IN_RANGE (INTVAL (xval
), 1, 63))
8681 // Add 16-bit value in [1..63] to a w register.
8682 return avr_asm_len ("adiw %0, %1", xop
, plen
, 1);
8685 // Addition won't work; subtract the negative of XVAL instead.
8686 xval
= simplify_unary_operation (NEG
, mode
, xval
, mode
);
8688 // Value (0..0xff) held in clobber register xop[2] or -1 if unknown.
8689 int clobber_val
= -1;
8691 // [0] = Current sub-register.
8692 // [1] = Current partial xval.
8693 // [2] = 8-bit clobber d-register or SCRATCH.
8697 // Work byte-wise from LSB to MSB. The lower two bytes might be
8698 // SBIW'ed in one go.
8699 for (int i
= 0; i
< n_bytes
; ++i
)
8701 op
[0] = simplify_gen_subreg (QImode
, xreg
, mode
, i
);
8705 && test_hard_reg_class (ADDW_REGS
, op
[0]))
8707 op
[1] = simplify_gen_subreg (HImode
, xval
, mode
, 0);
8708 if (IN_RANGE (INTVAL (op
[1]), 0, 63))
8710 // SBIW can handle the lower 16 bits.
8711 avr_asm_len ("sbiw %0, %1", op
, plen
, 1);
8713 // Next byte has already been handled: Skip it.
8719 op
[1] = simplify_gen_subreg (QImode
, xval
, mode
, i
);
8721 if (test_hard_reg_class (LD_REGS
, op
[0]))
8723 // d-regs can subtract immediates.
8726 : "sbci %0, %1", op
, plen
, 1);
8730 int val8
= 0xff & INTVAL (op
[1]);
8733 // Any register can subtract 0.
8735 ? "sub %0, __zero_reg__"
8736 : "sbc %0, __zero_reg__", op
, plen
, 1);
8740 // Use d-register to hold partial xval.
8742 if (val8
!= clobber_val
)
8744 // Load partial xval to QI clobber reg and memoize for later.
8745 gcc_assert (REG_P (op
[2]));
8746 avr_asm_len ("ldi %2, %1", op
, plen
, 1);
8752 : "sbc %0, %2", op
, plen
, 1);
8761 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
8762 time constant XOP[2]:
8764 XOP[0] = XOP[0] <op> XOP[2]
8766 and return "". If PLEN == NULL, print assembler instructions to perform the
8767 operation; otherwise, set *PLEN to the length of the instruction sequence
8768 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
8769 register or SCRATCH if no clobber register is needed for the operation.
8770 INSN is an INSN_P or a pattern of an insn. */
8773 avr_out_bitop (rtx insn
, rtx
*xop
, int *plen
)
8775 /* CODE and MODE of the operation. */
8776 rtx xpattern
= INSN_P (insn
) ? single_set (as_a
<rtx_insn
*> (insn
)) : insn
;
8777 enum rtx_code code
= GET_CODE (SET_SRC (xpattern
));
8778 machine_mode mode
= GET_MODE (xop
[0]);
8780 /* Number of bytes to operate on. */
8781 int n_bytes
= GET_MODE_SIZE (mode
);
8783 /* Value of T-flag (0 or 1) or -1 if unknow. */
8786 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
8787 int clobber_val
= -1;
8789 /* op[0]: 8-bit destination register
8790 op[1]: 8-bit const int
8791 op[2]: 8-bit clobber register, SCRATCH or NULL_RTX.
8792 op[3]: 8-bit register containing 0xff or NULL_RTX */
8795 op
[2] = QImode
== mode
? NULL_RTX
: xop
[3];
8801 for (int i
= 0; i
< n_bytes
; i
++)
8803 /* We operate byte-wise on the destination. */
8804 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
8805 rtx xval8
= simplify_gen_subreg (QImode
, xop
[2], mode
, i
);
8807 /* 8-bit value to operate with this byte. */
8808 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
8810 /* Number of bits set in the current byte of the constant. */
8811 int pop8
= popcount_hwi (val8
);
8813 /* Registers R16..R31 can operate with immediate. */
8814 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
8817 op
[1] = GEN_INT (val8
);
8826 avr_asm_len ("ori %0,%1", op
, plen
, 1);
8830 avr_asm_len ("set", op
, plen
, 1);
8833 op
[1] = GEN_INT (exact_log2 (val8
));
8834 avr_asm_len ("bld %0,%1", op
, plen
, 1);
8838 if (op
[3] != NULL_RTX
)
8839 avr_asm_len ("mov %0,%3", op
, plen
, 1);
8841 avr_asm_len ("clr %0" CR_TAB
8842 "dec %0", op
, plen
, 2);
8848 if (clobber_val
!= (int) val8
)
8849 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
8850 clobber_val
= (int) val8
;
8852 avr_asm_len ("or %0,%2", op
, plen
, 1);
8862 avr_asm_len ("clr %0", op
, plen
, 1);
8864 avr_asm_len ("andi %0,%1", op
, plen
, 1);
8868 avr_asm_len ("clt", op
, plen
, 1);
8871 op
[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode
) & ~val8
));
8872 avr_asm_len ("bld %0,%1", op
, plen
, 1);
8876 if (clobber_val
!= (int) val8
)
8877 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
8878 clobber_val
= (int) val8
;
8880 avr_asm_len ("and %0,%2", op
, plen
, 1);
8890 avr_asm_len ("com %0", op
, plen
, 1);
8891 else if (ld_reg_p
&& val8
== (1 << 7))
8892 avr_asm_len ("subi %0,%1", op
, plen
, 1);
8895 if (clobber_val
!= (int) val8
)
8896 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
8897 clobber_val
= (int) val8
;
8899 avr_asm_len ("eor %0,%2", op
, plen
, 1);
8905 /* Unknown rtx_code */
8908 } /* for all sub-bytes */
8914 /* Output sign extension from XOP[1] to XOP[0] and return "".
8915 If PLEN == NULL, print assembler instructions to perform the operation;
8916 otherwise, set *PLEN to the length of the instruction sequence (in words)
8917 as printed with PLEN == NULL. */
8920 avr_out_sign_extend (rtx_insn
*insn
, rtx
*xop
, int *plen
)
8922 // Size in bytes of source resp. destination operand.
8923 unsigned n_src
= GET_MODE_SIZE (GET_MODE (xop
[1]));
8924 unsigned n_dest
= GET_MODE_SIZE (GET_MODE (xop
[0]));
8925 rtx r_msb
= all_regs_rtx
[REGNO (xop
[1]) + n_src
- 1];
8930 // Copy destination to source
8932 if (REGNO (xop
[0]) != REGNO (xop
[1]))
8934 gcc_assert (n_src
<= 2);
8937 avr_asm_len (AVR_HAVE_MOVW
8939 : "mov %B0,%B1", xop
, plen
, 1);
8940 if (n_src
== 1 || !AVR_HAVE_MOVW
)
8941 avr_asm_len ("mov %A0,%A1", xop
, plen
, 1);
8944 // Set Carry to the sign bit MSB.7...
8946 if (REGNO (xop
[0]) == REGNO (xop
[1])
8947 || !reg_unused_after (insn
, r_msb
))
8949 avr_asm_len ("mov __tmp_reg__,%0", &r_msb
, plen
, 1);
8950 r_msb
= tmp_reg_rtx
;
8953 avr_asm_len ("lsl %0", &r_msb
, plen
, 1);
8955 // ...and propagate it to all the new sign bits
8957 for (unsigned n
= n_src
; n
< n_dest
; n
++)
8958 avr_asm_len ("sbc %0,%0", &all_regs_rtx
[REGNO (xop
[0]) + n
], plen
, 1);
8964 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
8965 PLEN != NULL: Set *PLEN to the length of that sequence.
8969 avr_out_addto_sp (rtx
*op
, int *plen
)
8971 int pc_len
= AVR_2_BYTE_PC
? 2 : 3;
8972 int addend
= INTVAL (op
[0]);
8979 if (flag_verbose_asm
|| flag_print_asm_name
)
8980 avr_asm_len (ASM_COMMENT_START
"SP -= %n0", op
, plen
, 0);
8982 while (addend
<= -pc_len
)
8985 avr_asm_len ("rcall .", op
, plen
, 1);
8988 while (addend
++ < 0)
8989 avr_asm_len ("push __tmp_reg__", op
, plen
, 1);
8991 else if (addend
> 0)
8993 if (flag_verbose_asm
|| flag_print_asm_name
)
8994 avr_asm_len (ASM_COMMENT_START
"SP += %0", op
, plen
, 0);
8996 while (addend
-- > 0)
8997 avr_asm_len ("pop __tmp_reg__", op
, plen
, 1);
9004 /* Output instructions to insert an inverted bit into OP[0]: $0.$1 = ~$2.$3.
9005 If PLEN = NULL then output the respective instruction sequence which
9006 is a combination of BST / BLD and some instruction(s) to invert the bit.
9007 If PLEN != NULL then store the length of the sequence (in words) in *PLEN.
9011 avr_out_insert_notbit (rtx_insn
*insn
, rtx op
[], int *plen
)
9013 if (INTVAL (op
[1]) == 7
9014 && test_hard_reg_class (LD_REGS
, op
[0]))
9016 /* If the inserted bit number is 7 and we have a d-reg, then invert
9017 the bit after the insertion by means of SUBI *,0x80. */
9019 if (INTVAL (op
[3]) == 7
9020 && REGNO (op
[0]) == REGNO (op
[2]))
9022 avr_asm_len ("subi %0,0x80", op
, plen
, -1);
9026 avr_asm_len ("bst %2,%3" CR_TAB
9028 "subi %0,0x80", op
, plen
, -3);
9031 else if (test_hard_reg_class (LD_REGS
, op
[0])
9032 && (INTVAL (op
[1]) != INTVAL (op
[3])
9033 || !reg_overlap_mentioned_p (op
[0], op
[2])))
9035 /* If the destination bit is in a d-reg we can jump depending
9036 on the source bit and use ANDI / ORI. This just applies if we
9037 have not an early-clobber situation with the bit. */
9039 avr_asm_len ("andi %0,~(1<<%1)" CR_TAB
9041 "ori %0,1<<%1", op
, plen
, -3);
9045 /* Otherwise, invert the bit by means of COM before we store it with
9046 BST and then undo the COM if needed. */
9048 avr_asm_len ("com %2" CR_TAB
9049 "bst %2,%3", op
, plen
, -2);
9051 if (!reg_unused_after (insn
, op
[2])
9052 // A simple 'reg_unused_after' is not enough because that function
9053 // assumes that the destination register is overwritten completely
9054 // and hence is in order for our purpose. This is not the case
9055 // with BLD which just changes one bit of the destination.
9056 || reg_overlap_mentioned_p (op
[0], op
[2]))
9058 /* Undo the COM from above. */
9059 avr_asm_len ("com %2", op
, plen
, 1);
9062 avr_asm_len ("bld %0,%1", op
, plen
, 1);
9069 /* Output instructions to extract a bit to 8-bit register XOP[0].
9070 The input XOP[1] is a register or an 8-bit MEM in the lower I/O range.
9071 XOP[2] is the const_int bit position. Return "".
9073 PLEN != 0: Set *PLEN to the code length in words. Don't output anything.
9074 PLEN == 0: Output instructions. */
9077 avr_out_extr (rtx_insn
*insn
, rtx xop
[], int *plen
)
9081 int bit
= INTVAL (xop
[2]);
9083 if (GET_MODE (src
) != QImode
)
9085 src
= xop
[1] = simplify_gen_subreg (QImode
, src
, GET_MODE (src
), bit
/ 8);
9087 xop
[2] = GEN_INT (bit
);
9092 xop
[1] = XEXP (src
, 0); // address
9093 gcc_assert (low_io_address_operand (xop
[1], Pmode
));
9095 return avr_asm_len ("clr %0" CR_TAB
9096 "sbic %i1,%2" CR_TAB
9097 "inc %0", xop
, plen
, -3);
9100 gcc_assert (REG_P (src
));
9102 bool ld_dest_p
= test_hard_reg_class (LD_REGS
, dest
);
9103 bool ld_src_p
= test_hard_reg_class (LD_REGS
, src
);
9106 && REGNO (src
) == REGNO (dest
))
9109 return avr_asm_len ("andi %0,1", xop
, plen
, -1);
9111 return avr_asm_len ("lsr %0" CR_TAB
9112 "andi %0,1", xop
, plen
, -2);
9114 return avr_asm_len ("swap %0" CR_TAB
9115 "andi %0,1", xop
, plen
, -2);
9119 && REGNO (src
) != REGNO (dest
))
9122 return avr_asm_len ("mov %0,%1" CR_TAB
9123 "andi %0,1", xop
, plen
, -2);
9125 && reg_unused_after (insn
, src
))
9126 return avr_asm_len ("andi %1,1" CR_TAB
9127 "mov %0,%1", xop
, plen
, -2);
9130 return avr_asm_len ("bst %1,%2" CR_TAB
9132 "bld %0,0", xop
, plen
, -3);
9136 /* Output instructions to extract a negated bit to 8-bit register XOP[0].
9137 The input XOP[1] is an 8-bit register or MEM in the lower I/O range.
9138 XOP[2] is the const_int bit position. Return "".
9140 PLEN != 0: Set *PLEN to the code length in words. Don't output anything.
9141 PLEN == 0: Output instructions. */
9144 avr_out_extr_not (rtx_insn
* /* insn */, rtx xop
[], int *plen
)
9148 int bit
= INTVAL (xop
[2]);
9152 xop
[1] = XEXP (src
, 0); // address
9153 gcc_assert (low_io_address_operand (xop
[1], Pmode
));
9155 return avr_asm_len ("clr %0" CR_TAB
9156 "sbis %i1,%2" CR_TAB
9157 "inc %0", xop
, plen
, -3);
9160 gcc_assert (REG_P (src
));
9162 bool ld_src_p
= test_hard_reg_class (LD_REGS
, src
);
9165 && REGNO (src
) == REGNO (dest
))
9168 return avr_asm_len ("inc %0" CR_TAB
9169 "andi %0,1", xop
, plen
, -2);
9171 return avr_asm_len ("lsr %0" CR_TAB
9173 "andi %0,1", xop
, plen
, -3);
9175 return avr_asm_len ("swap %0" CR_TAB
9177 "andi %0,1", xop
, plen
, -3);
9182 return avr_asm_len ("cpi %1,0x80" CR_TAB
9184 "neg %0", xop
, plen
, -3);
9186 if (REGNO (src
) != REGNO (dest
))
9187 return avr_asm_len ("clr %0" CR_TAB
9189 "inc %0", xop
, plen
, -3);
9191 return avr_asm_len ("clr __tmp_reg__" CR_TAB
9193 "inc __tmp_reg__" CR_TAB
9194 "mov %0,__tmp_reg__", xop
, plen
, -4);
9198 /* Outputs instructions needed for fixed point type conversion.
9199 This includes converting between any fixed point type, as well
9200 as converting to any integer type. Conversion between integer
9201 types is not supported.
9203 Converting signed fractional types requires a bit shift if converting
9204 to or from any unsigned fractional type because the decimal place is
9205 shifted by 1 bit. When the destination is a signed fractional, the sign
9206 is stored in either the carry or T bit. */
9209 avr_out_fract (rtx_insn
*insn
, rtx operands
[], bool intsigned
, int *plen
)
9212 RTX_CODE shift
= UNKNOWN
;
9213 bool sign_in_carry
= false;
9214 bool msb_in_carry
= false;
9215 bool lsb_in_tmp_reg
= false;
9216 bool lsb_in_carry
= false;
9217 bool frac_rounded
= false;
9218 const char *code_ashift
= "lsl %0";
9221 #define MAY_CLOBBER(RR) \
9222 /* Shorthand used below. */ \
9224 && IN_RANGE (RR, dest.regno_msb - sign_bytes + 1, dest.regno_msb)) \
9225 || (offset && IN_RANGE (RR, dest.regno, dest.regno_msb)) \
9226 || (reg_unused_after (insn, all_regs_rtx[RR]) \
9227 && !IN_RANGE (RR, dest.regno, dest.regno_msb)))
9231 /* bytes : Length of operand in bytes.
9232 ibyte : Length of integral part in bytes.
9233 fbyte, fbit : Length of fractional part in bytes, bits. */
9236 unsigned fbit
, bytes
, ibyte
, fbyte
;
9237 unsigned regno
, regno_msb
;
9238 } dest
, src
, *val
[2] = { &dest
, &src
};
9243 /* Step 0: Determine information on source and destination operand we
9244 ====== will need in the remainder. */
9246 for (size_t i
= 0; i
< ARRAY_SIZE (val
); i
++)
9250 xop
[i
] = operands
[i
];
9252 mode
= GET_MODE (xop
[i
]);
9254 val
[i
]->bytes
= GET_MODE_SIZE (mode
);
9255 val
[i
]->regno
= REGNO (xop
[i
]);
9256 val
[i
]->regno_msb
= REGNO (xop
[i
]) + val
[i
]->bytes
- 1;
9258 if (SCALAR_INT_MODE_P (mode
))
9260 val
[i
]->sbit
= intsigned
;
9263 else if (ALL_SCALAR_FIXED_POINT_MODE_P (mode
))
9265 val
[i
]->sbit
= SIGNED_SCALAR_FIXED_POINT_MODE_P (mode
);
9266 val
[i
]->fbit
= GET_MODE_FBIT (mode
);
9269 fatal_insn ("unsupported fixed-point conversion", insn
);
9271 val
[i
]->fbyte
= (1 + val
[i
]->fbit
) / BITS_PER_UNIT
;
9272 val
[i
]->ibyte
= val
[i
]->bytes
- val
[i
]->fbyte
;
9275 // Byte offset of the decimal point taking into account different place
9276 // of the decimal point in input and output and different register numbers
9277 // of input and output.
9278 int offset
= dest
.regno
- src
.regno
+ dest
.fbyte
- src
.fbyte
;
9280 // Number of destination bytes that will come from sign / zero extension.
9281 int sign_bytes
= (dest
.ibyte
- src
.ibyte
) * (dest
.ibyte
> src
.ibyte
);
9283 // Number of bytes at the low end to be filled with zeros.
9284 int zero_bytes
= (dest
.fbyte
- src
.fbyte
) * (dest
.fbyte
> src
.fbyte
);
9286 // Do we have a 16-Bit register that is cleared?
9287 rtx clrw
= NULL_RTX
;
9289 bool sign_extend
= src
.sbit
&& sign_bytes
;
9291 if (dest
.fbit
% 8 == 0 && src
.fbit
% 8 == 7)
9293 else if (dest
.fbit
% 8 == 7 && src
.fbit
% 8 == 0)
9295 else if (dest
.fbit
% 8 == src
.fbit
% 8)
9300 /* If we need to round the fraction part, we might need to save/round it
9301 before clobbering any of it in Step 1. Also, we might want to do
9302 the rounding now to make use of LD_REGS. */
9303 if (SCALAR_INT_MODE_P (GET_MODE (xop
[0]))
9304 && SCALAR_ACCUM_MODE_P (GET_MODE (xop
[1]))
9305 && !TARGET_FRACT_CONV_TRUNC
)
9309 (offset
? dest
.regno_msb
- sign_bytes
: dest
.regno
+ zero_bytes
- 1)
9310 && dest
.regno
- offset
-1 >= dest
.regno
);
9311 unsigned s0
= dest
.regno
- offset
-1;
9312 bool use_src
= true;
9314 unsigned copied_msb
= src
.regno_msb
;
9315 bool have_carry
= false;
9317 if (src
.ibyte
> dest
.ibyte
)
9318 copied_msb
-= src
.ibyte
- dest
.ibyte
;
9320 for (sn
= s0
; sn
<= copied_msb
; sn
++)
9321 if (!IN_RANGE (sn
, dest
.regno
, dest
.regno_msb
)
9322 && !reg_unused_after (insn
, all_regs_rtx
[sn
]))
9324 if (use_src
&& TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
], s0
))
9326 avr_asm_len ("tst %0" CR_TAB
"brpl 0f",
9327 &all_regs_rtx
[src
.regno_msb
], plen
, 2);
9331 if (TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
], sn
))
9332 avr_asm_len ("cpi %0,1", &all_regs_rtx
[sn
], plen
, 1);
9334 avr_asm_len ("sec" CR_TAB
9335 "cpc %0,__zero_reg__",
9336 &all_regs_rtx
[sn
], plen
, 2);
9340 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
9342 avr_asm_len (have_carry
? "sbci %0,128" : "subi %0,129",
9343 &all_regs_rtx
[s0
], plen
, 1);
9344 for (sn
= src
.regno
+ src
.fbyte
; sn
<= copied_msb
; sn
++)
9345 avr_asm_len ("sbci %0,255", &all_regs_rtx
[sn
], plen
, 1);
9346 avr_asm_len ("\n0:", NULL
, plen
, 0);
9347 frac_rounded
= true;
9349 else if (use_src
&& overlap
)
9351 avr_asm_len ("clr __tmp_reg__" CR_TAB
9353 "dec __tmp_reg__", xop
, plen
, 1);
9357 avr_asm_len ("add %0,__tmp_reg__", &all_regs_rtx
[sn
], plen
, 1);
9362 avr_asm_len ("adc %0,__tmp_reg__", &all_regs_rtx
[sn
], plen
, 1);
9365 avr_asm_len ("clt" CR_TAB
9366 "bld __tmp_reg__,7" CR_TAB
9367 "adc %0,__tmp_reg__",
9368 &all_regs_rtx
[s0
], plen
, 1);
9370 avr_asm_len ("lsr __tmp_reg" CR_TAB
9371 "add %0,__tmp_reg__",
9372 &all_regs_rtx
[s0
], plen
, 2);
9373 for (sn
= src
.regno
+ src
.fbyte
; sn
<= copied_msb
; sn
++)
9374 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
9375 frac_rounded
= true;
9380 = (TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
], s0
)
9381 && (IN_RANGE (s0
, dest
.regno
, dest
.regno_msb
)
9382 || reg_unused_after (insn
, all_regs_rtx
[s0
])));
9383 xop
[2] = all_regs_rtx
[s0
];
9384 unsigned sn
= src
.regno
;
9385 if (!use_src
|| sn
== s0
)
9386 avr_asm_len ("mov __tmp_reg__,%2", xop
, plen
, 1);
9387 /* We need to consider to-be-discarded bits
9388 if the value is negative. */
9391 avr_asm_len ("tst %0" CR_TAB
9393 &all_regs_rtx
[src
.regno_msb
], plen
, 2);
9394 /* Test to-be-discarded bytes for any nozero bits.
9395 ??? Could use OR or SBIW to test two registers at once. */
9397 avr_asm_len ("cp %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
9400 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
9401 /* Set bit 0 in __tmp_reg__ if any of the lower bits was set. */
9403 avr_asm_len ("breq 0f" CR_TAB
9405 "\n0:\t" "mov __tmp_reg__,%2",
9408 avr_asm_len ("breq 0f" CR_TAB
9410 "bld __tmp_reg__,0\n0:",
9413 lsb_in_tmp_reg
= true;
9417 /* Step 1: Clear bytes at the low end and copy payload bits from source
9418 ====== to destination. */
9420 int step
= offset
< 0 ? 1 : -1;
9421 unsigned d0
= offset
< 0 ? dest
.regno
: dest
.regno_msb
;
9423 // We cleared at least that number of registers.
9426 for (; d0
>= dest
.regno
&& d0
<= dest
.regno_msb
; d0
+= step
)
9428 // Next regno of destination is needed for MOVW
9429 unsigned d1
= d0
+ step
;
9431 // Current and next regno of source
9432 signed s0
= d0
- offset
;
9433 signed s1
= s0
+ step
;
9435 // Must current resp. next regno be CLRed? This applies to the low
9436 // bytes of the destination that have no associated source bytes.
9437 bool clr0
= s0
< (signed) src
.regno
;
9438 bool clr1
= s1
< (signed) src
.regno
&& d1
>= dest
.regno
;
9440 // First gather what code to emit (if any) and additional step to
9441 // apply if a MOVW is in use. xop[2] is destination rtx and xop[3]
9442 // is the source rtx for the current loop iteration.
9443 const char *code
= NULL
;
9448 if (AVR_HAVE_MOVW
&& clr1
&& clrw
)
9450 xop
[2] = all_regs_rtx
[d0
& ~1];
9452 code
= "movw %2,%3";
9457 xop
[2] = all_regs_rtx
[d0
];
9462 && d0
% 2 == (step
> 0))
9464 clrw
= all_regs_rtx
[d0
& ~1];
9468 else if (offset
&& s0
<= (signed) src
.regno_msb
)
9470 int movw
= AVR_HAVE_MOVW
&& offset
% 2 == 0
9471 && d0
% 2 == (offset
> 0)
9472 && d1
<= dest
.regno_msb
&& d1
>= dest
.regno
9473 && s1
<= (signed) src
.regno_msb
&& s1
>= (signed) src
.regno
;
9475 xop
[2] = all_regs_rtx
[d0
& ~movw
];
9476 xop
[3] = all_regs_rtx
[s0
& ~movw
];
9477 code
= movw
? "movw %2,%3" : "mov %2,%3";
9478 stepw
= step
* movw
;
9483 if (sign_extend
&& shift
!= ASHIFT
&& !sign_in_carry
9484 && (d0
== src
.regno_msb
|| d0
+ stepw
== src
.regno_msb
))
9486 /* We are going to override the sign bit. If we sign-extend,
9487 store the sign in the Carry flag. This is not needed if
9488 the destination will be ASHIFT in the remainder because
9489 the ASHIFT will set Carry without extra instruction. */
9491 avr_asm_len ("lsl %0", &all_regs_rtx
[src
.regno_msb
], plen
, 1);
9492 sign_in_carry
= true;
9495 unsigned src_msb
= dest
.regno_msb
- sign_bytes
- offset
+ 1;
9497 if (!sign_extend
&& shift
== ASHIFTRT
&& !msb_in_carry
9498 && src
.ibyte
> dest
.ibyte
9499 && (d0
== src_msb
|| d0
+ stepw
== src_msb
))
9501 /* We are going to override the MSB. If we shift right,
9502 store the MSB in the Carry flag. This is only needed if
9503 we don't sign-extend becaue with sign-extension the MSB
9504 (the sign) will be produced by the sign extension. */
9506 avr_asm_len ("lsr %0", &all_regs_rtx
[src_msb
], plen
, 1);
9507 msb_in_carry
= true;
9510 unsigned src_lsb
= dest
.regno
- offset
-1;
9512 if (shift
== ASHIFT
&& src
.fbyte
> dest
.fbyte
&& !lsb_in_carry
9514 && (d0
== src_lsb
|| d0
+ stepw
== src_lsb
))
9516 /* We are going to override the new LSB; store it into carry. */
9518 avr_asm_len ("lsl %0", &all_regs_rtx
[src_lsb
], plen
, 1);
9519 code_ashift
= "rol %0";
9520 lsb_in_carry
= true;
9523 avr_asm_len (code
, xop
, plen
, 1);
9528 /* Step 2: Shift destination left by 1 bit position. This might be needed
9529 ====== for signed input and unsigned output. */
9531 if (shift
== ASHIFT
&& src
.fbyte
> dest
.fbyte
&& !lsb_in_carry
)
9533 unsigned s0
= dest
.regno
- offset
-1;
9535 /* n1169 4.1.4 says:
9536 "Conversions from a fixed-point to an integer type round toward zero."
9537 Hence, converting a fract type to integer only gives a non-zero result
9539 if (SCALAR_INT_MODE_P (GET_MODE (xop
[0]))
9540 && SCALAR_FRACT_MODE_P (GET_MODE (xop
[1]))
9541 && !TARGET_FRACT_CONV_TRUNC
)
9543 gcc_assert (s0
== src
.regno_msb
);
9544 /* Check if the input is -1. We do that by checking if negating
9545 the input causes an integer overflow. */
9546 unsigned sn
= src
.regno
;
9547 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx
[sn
++], plen
, 1);
9549 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx
[sn
++], plen
, 1);
9551 /* Overflow goes with set carry. Clear carry otherwise. */
9552 avr_asm_len ("brvs 0f" CR_TAB
9553 "clc\n0:", NULL
, plen
, 2);
9555 /* Likewise, when converting from accumulator types to integer, we
9556 need to round up negative values. */
9557 else if (SCALAR_INT_MODE_P (GET_MODE (xop
[0]))
9558 && SCALAR_ACCUM_MODE_P (GET_MODE (xop
[1]))
9559 && !TARGET_FRACT_CONV_TRUNC
9562 bool have_carry
= false;
9564 xop
[2] = all_regs_rtx
[s0
];
9565 if (!lsb_in_tmp_reg
&& !MAY_CLOBBER (s0
))
9566 avr_asm_len ("mov __tmp_reg__,%2", xop
, plen
, 1);
9567 avr_asm_len ("tst %0" CR_TAB
"brpl 0f",
9568 &all_regs_rtx
[src
.regno_msb
], plen
, 2);
9569 if (!lsb_in_tmp_reg
)
9571 unsigned sn
= src
.regno
;
9574 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx
[sn
],
9579 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx
[sn
], plen
, 1);
9580 lsb_in_tmp_reg
= !MAY_CLOBBER (s0
);
9582 /* Add in C and the rounding value 127. */
9583 /* If the destination msb is a sign byte, and in LD_REGS,
9584 grab it as a temporary. */
9586 && TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
],
9589 xop
[3] = all_regs_rtx
[dest
.regno_msb
];
9590 avr_asm_len ("ldi %3,127", xop
, plen
, 1);
9591 avr_asm_len ((have_carry
&& lsb_in_tmp_reg
? "adc __tmp_reg__,%3"
9592 : have_carry
? "adc %2,%3"
9593 : lsb_in_tmp_reg
? "add __tmp_reg__,%3"
9599 /* Fall back to use __zero_reg__ as a temporary. */
9600 avr_asm_len ("dec __zero_reg__", NULL
, plen
, 1);
9602 avr_asm_len ("clt" CR_TAB
9603 "bld __zero_reg__,7", NULL
, plen
, 2);
9605 avr_asm_len ("lsr __zero_reg__", NULL
, plen
, 1);
9606 avr_asm_len (have_carry
&& lsb_in_tmp_reg
9607 ? "adc __tmp_reg__,__zero_reg__"
9608 : have_carry
? "adc %2,__zero_reg__"
9609 : lsb_in_tmp_reg
? "add __tmp_reg__,__zero_reg__"
9610 : "add %2,__zero_reg__",
9612 avr_asm_len ("eor __zero_reg__,__zero_reg__", NULL
, plen
, 1);
9615 for (d0
= dest
.regno
+ zero_bytes
;
9616 d0
<= dest
.regno_msb
- sign_bytes
; d0
++)
9617 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx
[d0
], plen
, 1);
9619 avr_asm_len (lsb_in_tmp_reg
9620 ? "\n0:\t" "lsl __tmp_reg__"
9621 : "\n0:\t" "lsl %2",
9624 else if (MAY_CLOBBER (s0
))
9625 avr_asm_len ("lsl %0", &all_regs_rtx
[s0
], plen
, 1);
9627 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
9628 "lsl __tmp_reg__", &all_regs_rtx
[s0
], plen
, 2);
9630 code_ashift
= "rol %0";
9631 lsb_in_carry
= true;
9634 if (shift
== ASHIFT
)
9636 for (d0
= dest
.regno
+ zero_bytes
;
9637 d0
<= dest
.regno_msb
- sign_bytes
; d0
++)
9639 avr_asm_len (code_ashift
, &all_regs_rtx
[d0
], plen
, 1);
9640 code_ashift
= "rol %0";
9643 lsb_in_carry
= false;
9644 sign_in_carry
= true;
9647 /* Step 4a: Store MSB in carry if we don't already have it or will produce
9648 ======= it in sign-extension below. */
9650 if (!sign_extend
&& shift
== ASHIFTRT
&& !msb_in_carry
9651 && src
.ibyte
> dest
.ibyte
)
9653 unsigned s0
= dest
.regno_msb
- sign_bytes
- offset
+ 1;
9655 if (MAY_CLOBBER (s0
))
9656 avr_asm_len ("lsr %0", &all_regs_rtx
[s0
], plen
, 1);
9658 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
9659 "lsr __tmp_reg__", &all_regs_rtx
[s0
], plen
, 2);
9661 msb_in_carry
= true;
9664 /* Step 3: Sign-extend or zero-extend the destination as needed.
9667 if (sign_extend
&& !sign_in_carry
)
9669 unsigned s0
= src
.regno_msb
;
9671 if (MAY_CLOBBER (s0
))
9672 avr_asm_len ("lsl %0", &all_regs_rtx
[s0
], plen
, 1);
9674 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
9675 "lsl __tmp_reg__", &all_regs_rtx
[s0
], plen
, 2);
9677 sign_in_carry
= true;
9680 gcc_assert (sign_in_carry
+ msb_in_carry
+ lsb_in_carry
<= 1);
9682 unsigned copies
= 0;
9683 rtx movw
= sign_extend
? NULL_RTX
: clrw
;
9685 for (d0
= dest
.regno_msb
- sign_bytes
+ 1; d0
<= dest
.regno_msb
; d0
++)
9687 if (AVR_HAVE_MOVW
&& movw
9688 && d0
% 2 == 0 && d0
+ 1 <= dest
.regno_msb
)
9690 xop
[2] = all_regs_rtx
[d0
];
9692 avr_asm_len ("movw %2,%3", xop
, plen
, 1);
9697 avr_asm_len (sign_extend
? "sbc %0,%0" : "clr %0",
9698 &all_regs_rtx
[d0
], plen
, 1);
9700 if (++copies
>= 2 && !movw
&& d0
% 2 == 1)
9701 movw
= all_regs_rtx
[d0
-1];
9706 /* Step 4: Right shift the destination. This might be needed for
9707 ====== conversions from unsigned to signed. */
9709 if (shift
== ASHIFTRT
)
9711 const char *code_ashiftrt
= "lsr %0";
9713 if (sign_extend
|| msb_in_carry
)
9714 code_ashiftrt
= "ror %0";
9716 if (src
.sbit
&& src
.ibyte
== dest
.ibyte
)
9717 code_ashiftrt
= "asr %0";
9719 for (d0
= dest
.regno_msb
- sign_bytes
;
9720 d0
>= dest
.regno
+ zero_bytes
- 1 && d0
>= dest
.regno
; d0
--)
9722 avr_asm_len (code_ashiftrt
, &all_regs_rtx
[d0
], plen
, 1);
9723 code_ashiftrt
= "ror %0";
9733 /* Output fixed-point rounding. XOP[0] = XOP[1] is the operand to round.
9734 XOP[2] is the rounding point, a CONST_INT. The function prints the
9735 instruction sequence if PLEN = NULL and computes the length in words
9736 of the sequence if PLEN != NULL. Most of this function deals with
9737 preparing operands for calls to `avr_out_plus' and `avr_out_bitop'. */
9740 avr_out_round (rtx_insn
*insn ATTRIBUTE_UNUSED
, rtx
*xop
, int *plen
)
9742 scalar_mode mode
= as_a
<scalar_mode
> (GET_MODE (xop
[0]));
9743 scalar_int_mode imode
= int_mode_for_mode (mode
).require ();
9744 // The smallest fractional bit not cleared by the rounding is 2^(-RP).
9745 int fbit
= (int) GET_MODE_FBIT (mode
);
9746 double_int i_add
= double_int_zero
.set_bit (fbit
-1 - INTVAL (xop
[2]));
9747 wide_int wi_add
= wi::set_bit_in_zero (fbit
-1 - INTVAL (xop
[2]),
9748 GET_MODE_PRECISION (imode
));
9749 // Lengths of PLUS and AND parts.
9750 int len_add
= 0, *plen_add
= plen
? &len_add
: NULL
;
9751 int len_and
= 0, *plen_and
= plen
? &len_and
: NULL
;
9753 // Add-Saturate 1/2 * 2^(-RP). Don't print the label "0:" when printing
9754 // the saturated addition so that we can emit the "rjmp 1f" before the
9757 rtx xadd
= const_fixed_from_double_int (i_add
, mode
);
9758 rtx xpattern
, xsrc
, op
[4];
9760 xsrc
= SIGNED_FIXED_POINT_MODE_P (mode
)
9761 ? gen_rtx_SS_PLUS (mode
, xop
[1], xadd
)
9762 : gen_rtx_US_PLUS (mode
, xop
[1], xadd
);
9763 xpattern
= gen_rtx_SET (xop
[0], xsrc
);
9768 avr_out_plus (xpattern
, op
, plen_add
, NULL
, false /* Don't print "0:" */);
9770 avr_asm_len ("rjmp 1f" CR_TAB
9771 "0:", NULL
, plen_add
, 1);
9773 // Keep all bits from RP and higher: ... 2^(-RP)
9774 // Clear all bits from RP+1 and lower: 2^(-RP-1) ...
9775 // Rounding point ^^^^^^^
9776 // Added above ^^^^^^^^^
9777 rtx xreg
= simplify_gen_subreg (imode
, xop
[0], mode
, 0);
9778 rtx xmask
= immed_wide_int_const (-wi_add
- wi_add
, imode
);
9780 xpattern
= gen_rtx_SET (xreg
, gen_rtx_AND (imode
, xreg
, xmask
));
9785 op
[3] = gen_rtx_SCRATCH (QImode
);
9786 avr_out_bitop (xpattern
, op
, plen_and
);
9787 avr_asm_len ("1:", NULL
, plen
, 0);
9790 *plen
= len_add
+ len_and
;
9796 /* Create RTL split patterns for byte sized rotate expressions. This
9797 produces a series of move instructions and considers overlap situations.
9798 Overlapping non-HImode operands need a scratch register. */
9801 avr_rotate_bytes (rtx operands
[])
9803 machine_mode mode
= GET_MODE (operands
[0]);
9804 bool overlapped
= reg_overlap_mentioned_p (operands
[0], operands
[1]);
9805 bool same_reg
= rtx_equal_p (operands
[0], operands
[1]);
9806 int num
= INTVAL (operands
[2]);
9807 rtx scratch
= operands
[3];
9808 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
9809 Word move if no scratch is needed, otherwise use size of scratch. */
9810 machine_mode move_mode
= QImode
;
9811 int move_size
, offset
, size
;
9815 else if ((mode
== SImode
&& !same_reg
) || !overlapped
)
9818 move_mode
= GET_MODE (scratch
);
9820 /* Force DI rotate to use QI moves since other DI moves are currently split
9821 into QI moves so forward propagation works better. */
9824 /* Make scratch smaller if needed. */
9825 if (SCRATCH
!= GET_CODE (scratch
)
9826 && HImode
== GET_MODE (scratch
)
9827 && QImode
== move_mode
)
9828 scratch
= simplify_gen_subreg (move_mode
, scratch
, HImode
, 0);
9830 move_size
= GET_MODE_SIZE (move_mode
);
9831 /* Number of bytes/words to rotate. */
9832 offset
= (num
>> 3) / move_size
;
9833 /* Number of moves needed. */
9834 size
= GET_MODE_SIZE (mode
) / move_size
;
9835 /* Himode byte swap is special case to avoid a scratch register. */
9836 if (mode
== HImode
&& same_reg
)
9838 /* HImode byte swap, using xor. This is as quick as using scratch. */
9840 src
= simplify_gen_subreg (move_mode
, operands
[1], mode
, 0);
9841 dst
= simplify_gen_subreg (move_mode
, operands
[0], mode
, 1);
9842 if (!rtx_equal_p (dst
, src
))
9844 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
9845 emit_move_insn (src
, gen_rtx_XOR (QImode
, src
, dst
));
9846 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
9851 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
9852 /* Create linked list of moves to determine move order. */
9856 } move
[MAX_SIZE
+ 8];
9859 gcc_assert (size
<= MAX_SIZE
);
9860 /* Generate list of subreg moves. */
9861 for (int i
= 0; i
< size
; i
++)
9864 int to
= (from
+ offset
) % size
;
9865 move
[i
].src
= simplify_gen_subreg (move_mode
, operands
[1],
9866 mode
, from
* move_size
);
9867 move
[i
].dst
= simplify_gen_subreg (move_mode
, operands
[0],
9868 mode
, to
* move_size
);
9871 /* Mark dependence where a dst of one move is the src of another move.
9872 The first move is a conflict as it must wait until second is
9873 performed. We ignore moves to self - we catch this later. */
9875 for (int i
= 0; i
< size
; i
++)
9876 if (reg_overlap_mentioned_p (move
[i
].dst
, operands
[1]))
9877 for (int j
= 0; j
< size
; j
++)
9878 if (j
!= i
&& rtx_equal_p (move
[j
].src
, move
[i
].dst
))
9880 /* The dst of move i is the src of move j. */
9887 /* Go through move list and perform non-conflicting moves. As each
9888 non-overlapping move is made, it may remove other conflicts
9889 so the process is repeated until no conflicts remain. */
9894 /* Emit move where dst is not also a src or we have used that
9896 for (int i
= 0; i
< size
; i
++)
9897 if (move
[i
].src
!= NULL_RTX
)
9899 if (move
[i
].links
== -1
9900 || move
[move
[i
].links
].src
== NULL_RTX
)
9903 /* Ignore NOP moves to self. */
9904 if (!rtx_equal_p (move
[i
].dst
, move
[i
].src
))
9905 emit_move_insn (move
[i
].dst
, move
[i
].src
);
9907 /* Remove conflict from list. */
9908 move
[i
].src
= NULL_RTX
;
9914 /* Check for deadlock. This is when no moves occurred and we have
9915 at least one blocked move. */
9916 if (moves
== 0 && blocked
!= -1)
9918 /* Need to use scratch register to break deadlock.
9919 Add move to put dst of blocked move into scratch.
9920 When this move occurs, it will break chain deadlock.
9921 The scratch register is substituted for real move. */
9923 gcc_assert (SCRATCH
!= GET_CODE (scratch
));
9925 move
[size
].src
= move
[blocked
].dst
;
9926 move
[size
].dst
= scratch
;
9927 /* Scratch move is never blocked. */
9928 move
[size
].links
= -1;
9929 /* Make sure we have valid link. */
9930 gcc_assert (move
[blocked
].links
!= -1);
9931 /* Replace src of blocking move with scratch reg. */
9932 move
[move
[blocked
].links
].src
= scratch
;
9933 /* Make dependent on scratch move occurring. */
9934 move
[blocked
].links
= size
;
9938 while (blocked
!= -1);
9944 /* Worker function for `ADJUST_INSN_LENGTH'. */
9945 /* Modifies the length assigned to instruction INSN
9946 LEN is the initially computed length of the insn. */
9949 avr_adjust_insn_length (rtx_insn
*insn
, int len
)
9951 rtx
*op
= recog_data
.operand
;
9952 enum attr_adjust_len adjust_len
;
9954 /* As we pretend jump tables in .text, fix branch offsets crossing jump
9957 if (JUMP_TABLE_DATA_P (insn
))
9960 /* Some complex insns don't need length adjustment and therefore
9961 the length need not/must not be adjusted for these insns.
9962 It is easier to state this in an insn attribute "adjust_len" than
9963 to clutter up code here... */
9965 if (!NONDEBUG_INSN_P (insn
) || recog_memoized (insn
) == -1)
9970 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
9972 adjust_len
= get_attr_adjust_len (insn
);
9974 if (adjust_len
== ADJUST_LEN_NO
)
9976 /* Nothing to adjust: The length from attribute "length" is fine.
9977 This is the default. */
9982 /* Extract insn's operands. */
9984 extract_constrain_insn_cached (insn
);
9986 /* Dispatch to right function. */
9990 case ADJUST_LEN_RELOAD_IN16
: output_reload_inhi (op
, op
[2], &len
); break;
9991 case ADJUST_LEN_RELOAD_IN24
: avr_out_reload_inpsi (op
, op
[2], &len
); break;
9992 case ADJUST_LEN_RELOAD_IN32
: output_reload_insisf (op
, op
[2], &len
); break;
9994 case ADJUST_LEN_OUT_BITOP
: avr_out_bitop (insn
, op
, &len
); break;
9995 case ADJUST_LEN_EXTR_NOT
: avr_out_extr_not (insn
, op
, &len
); break;
9996 case ADJUST_LEN_EXTR
: avr_out_extr (insn
, op
, &len
); break;
9998 case ADJUST_LEN_PLUS
: avr_out_plus (insn
, op
, &len
); break;
9999 case ADJUST_LEN_ADDTO_SP
: avr_out_addto_sp (op
, &len
); break;
10001 case ADJUST_LEN_MOV8
: output_movqi (insn
, op
, &len
); break;
10002 case ADJUST_LEN_MOV16
: output_movhi (insn
, op
, &len
); break;
10003 case ADJUST_LEN_MOV24
: avr_out_movpsi (insn
, op
, &len
); break;
10004 case ADJUST_LEN_MOV32
: output_movsisf (insn
, op
, &len
); break;
10005 case ADJUST_LEN_CPYMEM
: avr_out_cpymem (insn
, op
, &len
); break;
10006 case ADJUST_LEN_XLOAD
: avr_out_xload (insn
, op
, &len
); break;
10007 case ADJUST_LEN_SEXT
: avr_out_sign_extend (insn
, op
, &len
); break;
10009 case ADJUST_LEN_SFRACT
: avr_out_fract (insn
, op
, true, &len
); break;
10010 case ADJUST_LEN_UFRACT
: avr_out_fract (insn
, op
, false, &len
); break;
10011 case ADJUST_LEN_ROUND
: avr_out_round (insn
, op
, &len
); break;
10013 case ADJUST_LEN_TSTHI
: avr_out_tsthi (insn
, op
, &len
); break;
10014 case ADJUST_LEN_TSTPSI
: avr_out_tstpsi (insn
, op
, &len
); break;
10015 case ADJUST_LEN_TSTSI
: avr_out_tstsi (insn
, op
, &len
); break;
10016 case ADJUST_LEN_COMPARE
: avr_out_compare (insn
, op
, &len
); break;
10017 case ADJUST_LEN_COMPARE64
: avr_out_compare64 (insn
, op
, &len
); break;
10018 case ADJUST_LEN_CMP_UEXT
: avr_out_cmp_ext (op
, ZERO_EXTEND
, &len
); break;
10019 case ADJUST_LEN_CMP_SEXT
: avr_out_cmp_ext (op
, SIGN_EXTEND
, &len
); break;
10021 case ADJUST_LEN_LSHRQI
: lshrqi3_out (insn
, op
, &len
); break;
10022 case ADJUST_LEN_LSHRHI
: lshrhi3_out (insn
, op
, &len
); break;
10023 case ADJUST_LEN_LSHRSI
: lshrsi3_out (insn
, op
, &len
); break;
10025 case ADJUST_LEN_ASHRQI
: ashrqi3_out (insn
, op
, &len
); break;
10026 case ADJUST_LEN_ASHRHI
: ashrhi3_out (insn
, op
, &len
); break;
10027 case ADJUST_LEN_ASHRSI
: ashrsi3_out (insn
, op
, &len
); break;
10029 case ADJUST_LEN_ASHLQI
: ashlqi3_out (insn
, op
, &len
); break;
10030 case ADJUST_LEN_ASHLHI
: ashlhi3_out (insn
, op
, &len
); break;
10031 case ADJUST_LEN_ASHLSI
: ashlsi3_out (insn
, op
, &len
); break;
10033 case ADJUST_LEN_ASHLPSI
: avr_out_ashlpsi3 (insn
, op
, &len
); break;
10034 case ADJUST_LEN_ASHRPSI
: avr_out_ashrpsi3 (insn
, op
, &len
); break;
10035 case ADJUST_LEN_LSHRPSI
: avr_out_lshrpsi3 (insn
, op
, &len
); break;
10037 case ADJUST_LEN_CALL
: len
= AVR_HAVE_JMP_CALL
? 2 : 1; break;
10039 case ADJUST_LEN_INSERT_BITS
: avr_out_insert_bits (op
, &len
); break;
10040 case ADJUST_LEN_ADD_SET_ZN
: avr_out_plus_set_ZN (op
, &len
); break;
10042 case ADJUST_LEN_INSV_NOTBIT
: avr_out_insert_notbit (insn
, op
, &len
); break;
10051 /* Return nonzero if register REG dead after INSN. */
10054 reg_unused_after (rtx_insn
*insn
, rtx reg
)
10056 return (dead_or_set_p (insn
, reg
)
10057 || (REG_P (reg
) && _reg_unused_after (insn
, reg
)));
10060 /* Return nonzero if REG is not used after INSN.
10061 We assume REG is a reload reg, and therefore does
10062 not live past labels. It may live past calls or jumps though. */
10065 _reg_unused_after (rtx_insn
*insn
, rtx reg
)
10067 enum rtx_code code
;
10070 /* If the reg is set by this instruction, then it is safe for our
10071 case. Disregard the case where this is a store to memory, since
10072 we are checking a register used in the store address. */
10073 set
= single_set (insn
);
10074 if (set
&& !MEM_P (SET_DEST (set
))
10075 && reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
10078 while ((insn
= NEXT_INSN (insn
)))
10081 code
= GET_CODE (insn
);
10084 /* If this is a label that existed before reload, then the register
10085 if dead here. However, if this is a label added by reorg, then
10086 the register may still be live here. We can't tell the difference,
10087 so we just ignore labels completely. */
10088 if (code
== CODE_LABEL
)
10093 if (!INSN_P (insn
))
10096 if (code
== JUMP_INSN
)
10099 /* If this is a sequence, we must handle them all at once.
10100 We could have for instance a call that sets the target register,
10101 and an insn in a delay slot that uses the register. In this case,
10102 we must return 0. */
10103 else if (code
== INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
10105 rtx_sequence
*seq
= as_a
<rtx_sequence
*> (PATTERN (insn
));
10108 for (int i
= 0; i
< seq
->len (); i
++)
10110 rtx_insn
*this_insn
= seq
->insn (i
);
10111 rtx set
= single_set (this_insn
);
10113 if (CALL_P (this_insn
))
10115 else if (JUMP_P (this_insn
))
10117 if (INSN_ANNULLED_BRANCH_P (this_insn
))
10122 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
10124 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
10126 if (!MEM_P (SET_DEST (set
)))
10132 && reg_overlap_mentioned_p (reg
, PATTERN (this_insn
)))
10137 else if (code
== JUMP_INSN
)
10141 if (code
== CALL_INSN
)
10144 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
10145 if (GET_CODE (XEXP (tem
, 0)) == USE
10146 && REG_P (XEXP (XEXP (tem
, 0), 0))
10147 && reg_overlap_mentioned_p (reg
, XEXP (XEXP (tem
, 0), 0)))
10149 if (call_used_or_fixed_reg_p (REGNO (reg
)))
10153 set
= single_set (insn
);
10155 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
10157 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
10158 return !MEM_P (SET_DEST (set
));
10159 if (set
== 0 && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
10166 /* Implement `TARGET_ASM_INTEGER'. */
10167 /* Target hook for assembling integer objects. The AVR version needs
10168 special handling for references to certain labels. */
10171 avr_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
10173 if (size
== POINTER_SIZE
/ BITS_PER_UNIT
&& aligned_p
10174 && text_segment_operand (x
, VOIDmode
))
10176 fputs ("\t.word\tgs(", asm_out_file
);
10177 output_addr_const (asm_out_file
, x
);
10178 fputs (")\n", asm_out_file
);
10182 else if (GET_MODE (x
) == PSImode
)
10184 /* This needs binutils 2.23+, see PR binutils/13503 */
10186 fputs ("\t.byte\tlo8(", asm_out_file
);
10187 output_addr_const (asm_out_file
, x
);
10188 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
10190 fputs ("\t.byte\thi8(", asm_out_file
);
10191 output_addr_const (asm_out_file
, x
);
10192 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
10194 fputs ("\t.byte\thh8(", asm_out_file
);
10195 output_addr_const (asm_out_file
, x
);
10196 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
10200 else if (CONST_FIXED_P (x
))
10202 /* varasm fails to handle big fixed modes that don't fit in hwi. */
10204 for (unsigned n
= 0; n
< size
; n
++)
10206 rtx xn
= simplify_gen_subreg (QImode
, x
, GET_MODE (x
), n
);
10207 default_assemble_integer (xn
, 1, aligned_p
);
10214 && avr_address_tiny_pm_p (x
))
10216 x
= plus_constant (Pmode
, x
, avr_arch
->flash_pm_offset
);
10219 return default_assemble_integer (x
, size
, aligned_p
);
10222 /* Implement TARGET_CLASS_MAX_NREGS. Reasons described in comments for
10223 avr_hard_regno_nregs. */
10225 static unsigned char
10226 avr_class_max_nregs (reg_class_t rclass
, machine_mode mode
)
10228 if (rclass
== CC_REG
&& mode
== CCmode
)
10231 return CEIL (GET_MODE_SIZE (mode
), UNITS_PER_WORD
);
10235 /* Implement `TARGET_CLASS_LIKELY_SPILLED_P'. */
10236 /* Return value is nonzero if pseudos that have been
10237 assigned to registers of class CLASS would likely be spilled
10238 because registers of CLASS are needed for spill registers. */
10241 avr_class_likely_spilled_p (reg_class_t c
)
10243 return (c
!= ALL_REGS
&&
10244 (AVR_TINY
? 1 : c
!= ADDW_REGS
));
10248 /* Valid attributes:
10249 progmem - Put data to program memory.
10250 signal - Make a function to be hardware interrupt.
10251 After function prologue interrupts remain disabled.
10252 interrupt - Make a function to be hardware interrupt. Before function
10253 prologue interrupts are enabled by means of SEI.
10254 naked - Don't generate function prologue/epilogue and RET
10257 /* Handle a "progmem" attribute; arguments as in
10258 struct attribute_spec.handler. */
10261 avr_handle_progmem_attribute (tree
*node
, tree name
,
10262 tree args ATTRIBUTE_UNUSED
,
10263 int flags ATTRIBUTE_UNUSED
,
10264 bool *no_add_attrs
)
10266 if (DECL_P (*node
))
10268 if (TREE_CODE (*node
) == TYPE_DECL
)
10270 /* This is really a decl attribute, not a type attribute,
10271 but try to handle it for GCC 3.0 backwards compatibility. */
10273 tree type
= TREE_TYPE (*node
);
10274 tree attr
= tree_cons (name
, args
, TYPE_ATTRIBUTES (type
));
10275 tree newtype
= build_type_attribute_variant (type
, attr
);
10277 TYPE_MAIN_VARIANT (newtype
) = TYPE_MAIN_VARIANT (type
);
10278 TREE_TYPE (*node
) = newtype
;
10279 *no_add_attrs
= true;
10281 else if (TREE_STATIC (*node
) || DECL_EXTERNAL (*node
))
10283 *no_add_attrs
= false;
10287 warning (OPT_Wattributes
, "%qE attribute ignored",
10289 *no_add_attrs
= true;
10296 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
10297 struct attribute_spec.handler. */
10300 avr_handle_fndecl_attribute (tree
*node
, tree name
,
10301 tree args ATTRIBUTE_UNUSED
,
10302 int flags ATTRIBUTE_UNUSED
,
10303 bool *no_add_attrs
)
10305 if (TREE_CODE (*node
) != FUNCTION_DECL
)
10307 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
10309 *no_add_attrs
= true;
10316 avr_handle_fntype_attribute (tree
*node
, tree name
,
10317 tree args ATTRIBUTE_UNUSED
,
10318 int flags ATTRIBUTE_UNUSED
,
10319 bool *no_add_attrs
)
10321 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
10323 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
10325 *no_add_attrs
= true;
10332 avr_handle_absdata_attribute (tree
*node
, tree name
, tree
/* args */,
10333 int /* flags */, bool *no_add
)
10335 location_t loc
= DECL_SOURCE_LOCATION (*node
);
10339 if (TREE_CODE (*node
) != VAR_DECL
10340 || (!TREE_STATIC (*node
) && !DECL_EXTERNAL (*node
)))
10342 warning_at (loc
, OPT_Wattributes
, "%qE attribute only applies to"
10343 " variables in static storage", name
);
10349 warning_at (loc
, OPT_Wattributes
, "%qE attribute only supported"
10350 " for reduced Tiny cores", name
);
10358 avr_handle_addr_attribute (tree
*node
, tree name
, tree args
,
10359 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
10361 bool io_p
= startswith (IDENTIFIER_POINTER (name
), "io");
10362 location_t loc
= DECL_SOURCE_LOCATION (*node
);
10364 if (!VAR_P (*node
))
10366 warning_at (loc
, OPT_Wattributes
, "%qE attribute only applies to "
10367 "variables", name
);
10372 if (args
!= NULL_TREE
)
10374 if (TREE_CODE (TREE_VALUE (args
)) == NON_LVALUE_EXPR
)
10375 TREE_VALUE (args
) = TREE_OPERAND (TREE_VALUE (args
), 0);
10376 tree arg
= TREE_VALUE (args
);
10377 if (TREE_CODE (arg
) != INTEGER_CST
)
10379 warning_at (loc
, OPT_Wattributes
, "%qE attribute allows only an "
10380 "integer constant argument", name
);
10384 && (!tree_fits_shwi_p (arg
)
10385 || !(strcmp (IDENTIFIER_POINTER (name
), "io_low") == 0
10386 ? low_io_address_operand
: io_address_operand
)
10387 (GEN_INT (TREE_INT_CST_LOW (arg
)), QImode
)))
10389 warning_at (loc
, OPT_Wattributes
, "%qE attribute address "
10390 "out of range", name
);
10395 tree attribs
= DECL_ATTRIBUTES (*node
);
10396 const char *names
[] = { "io", "io_low", "address", NULL
};
10397 for (const char **p
= names
; *p
; p
++)
10399 tree other
= lookup_attribute (*p
, attribs
);
10400 if (other
&& TREE_VALUE (other
))
10402 warning_at (loc
, OPT_Wattributes
,
10403 "both %s and %qE attribute provide address",
10412 if (*no_add
== false && io_p
&& !TREE_THIS_VOLATILE (*node
))
10413 warning_at (loc
, OPT_Wattributes
, "%qE attribute on non-volatile variable",
10420 avr_eval_addr_attrib (rtx x
)
10422 if (SYMBOL_REF_P (x
)
10423 && (SYMBOL_REF_FLAGS (x
) & SYMBOL_FLAG_ADDRESS
))
10425 tree decl
= SYMBOL_REF_DECL (x
);
10426 tree attr
= NULL_TREE
;
10428 if (SYMBOL_REF_FLAGS (x
) & SYMBOL_FLAG_IO
)
10430 attr
= lookup_attribute ("io", DECL_ATTRIBUTES (decl
));
10431 if (!attr
|| !TREE_VALUE (attr
))
10432 attr
= lookup_attribute ("io_low", DECL_ATTRIBUTES (decl
));
10435 if (!attr
|| !TREE_VALUE (attr
))
10436 attr
= lookup_attribute ("address", DECL_ATTRIBUTES (decl
));
10437 gcc_assert (attr
&& TREE_VALUE (attr
) && TREE_VALUE (TREE_VALUE (attr
)));
10438 return GEN_INT (TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr
))));
10444 /* AVR attributes. */
10445 TARGET_GNU_ATTRIBUTES (avr_attribute_table
,
10447 /* { name, min_len, max_len, decl_req, type_req, fn_type_req,
10448 affects_type_identity, handler, exclude } */
10449 { "progmem", 0, 0, false, false, false, false,
10450 avr_handle_progmem_attribute
, NULL
},
10451 { "signal", 0, 0, true, false, false, false,
10452 avr_handle_fndecl_attribute
, NULL
},
10453 { "interrupt", 0, 0, true, false, false, false,
10454 avr_handle_fndecl_attribute
, NULL
},
10455 { "no_gccisr", 0, 0, true, false, false, false,
10456 avr_handle_fndecl_attribute
, NULL
},
10457 { "naked", 0, 0, false, true, true, false,
10458 avr_handle_fntype_attribute
, NULL
},
10459 { "OS_task", 0, 0, false, true, true, false,
10460 avr_handle_fntype_attribute
, NULL
},
10461 { "OS_main", 0, 0, false, true, true, false,
10462 avr_handle_fntype_attribute
, NULL
},
10463 { "io", 0, 1, true, false, false, false,
10464 avr_handle_addr_attribute
, NULL
},
10465 { "io_low", 0, 1, true, false, false, false,
10466 avr_handle_addr_attribute
, NULL
},
10467 { "address", 1, 1, true, false, false, false,
10468 avr_handle_addr_attribute
, NULL
},
10469 { "absdata", 0, 0, true, false, false, false,
10470 avr_handle_absdata_attribute
, NULL
}
10474 /* Return true if we support address space AS for the architecture in effect
10475 and false, otherwise. If LOC is not UNKNOWN_LOCATION then also issue
10476 a respective error. */
10479 avr_addr_space_supported_p (addr_space_t as
, location_t loc
)
10483 if (loc
!= UNKNOWN_LOCATION
)
10484 error_at (loc
, "address spaces are not supported for reduced "
10488 else if (avr_addrspace
[as
].segment
>= avr_n_flash
)
10490 if (loc
!= UNKNOWN_LOCATION
)
10491 error_at (loc
, "address space %qs not supported for devices with "
10492 "flash size up to %d KiB", avr_addrspace
[as
].name
,
10501 /* Implement `TARGET_ADDR_SPACE_DIAGNOSE_USAGE'. */
10504 avr_addr_space_diagnose_usage (addr_space_t as
, location_t loc
)
10506 (void) avr_addr_space_supported_p (as
, loc
);
10509 /* Implement `TARGET_ADDR_SPACE_ZERO_ADDRESS_VALID. Zero is a valid
10510 address in all address spaces. Even in ADDR_SPACE_FLASH1 etc..,
10511 a zero address is valid and means 0x<RAMPZ val>0000, where RAMPZ is
10512 set to the appropriate segment value. */
10515 avr_addr_space_zero_address_valid (addr_space_t
)
10520 /* Look if DECL shall be placed in program memory space by
10521 means of attribute `progmem' or some address-space qualifier.
10522 Return non-zero if DECL is data that must end up in Flash and
10523 zero if the data lives in RAM (.bss, .data, .rodata, ...).
10525 Return 2 if DECL is located in 24-bit flash address-space
10526 Return 1 if DECL is located in 16-bit flash address-space
10527 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
10528 Return 0 otherwise */
10531 avr_progmem_p (tree decl
, tree attributes
)
10535 if (TREE_CODE (decl
) != VAR_DECL
)
10538 if (avr_decl_memx_p (decl
))
10541 if (avr_decl_flash_p (decl
))
10545 != lookup_attribute ("progmem", attributes
))
10552 while (TREE_CODE (a
) == ARRAY_TYPE
);
10554 if (a
== error_mark_node
)
10557 if (NULL_TREE
!= lookup_attribute ("progmem", TYPE_ATTRIBUTES (a
)))
10564 /* Return true if DECL has attribute `absdata' set. This function should
10565 only be used for AVR_TINY. */
10568 avr_decl_absdata_p (tree decl
, tree attributes
)
10570 return (VAR_P (decl
)
10571 && NULL_TREE
!= lookup_attribute ("absdata", attributes
));
10575 /* Scan type TYP for pointer references to address space ASn.
10576 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
10577 the AS are also declared to be CONST.
10578 Otherwise, return the respective address space, i.e. a value != 0. */
10580 static addr_space_t
10581 avr_nonconst_pointer_addrspace (tree typ
)
10583 while (ARRAY_TYPE
== TREE_CODE (typ
))
10584 typ
= TREE_TYPE (typ
);
10586 if (POINTER_TYPE_P (typ
))
10589 tree target
= TREE_TYPE (typ
);
10591 /* Pointer to function: Test the function's return type. */
10593 if (FUNCTION_TYPE
== TREE_CODE (target
))
10594 return avr_nonconst_pointer_addrspace (TREE_TYPE (target
));
10596 /* "Ordinary" pointers... */
10598 while (TREE_CODE (target
) == ARRAY_TYPE
)
10599 target
= TREE_TYPE (target
);
10601 /* Pointers to non-generic address space must be const. */
10603 as
= TYPE_ADDR_SPACE (target
);
10605 if (!ADDR_SPACE_GENERIC_P (as
)
10606 && !TYPE_READONLY (target
)
10607 && avr_addr_space_supported_p (as
))
10612 /* Scan pointer's target type. */
10614 return avr_nonconst_pointer_addrspace (target
);
10617 return ADDR_SPACE_GENERIC
;
10621 /* Sanity check NODE so that all pointers targeting non-generic address spaces
10622 go along with CONST qualifier. Writing to these address spaces should
10623 be detected and complained about as early as possible. */
10626 avr_pgm_check_var_decl (tree node
)
10628 const char *reason
= NULL
;
10630 addr_space_t as
= ADDR_SPACE_GENERIC
;
10632 gcc_assert (as
== 0);
10634 if (avr_log
.progmem
)
10635 avr_edump ("%?: %t\n", node
);
10637 switch (TREE_CODE (node
))
10643 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
10644 reason
= _("variable");
10648 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
10649 reason
= _("function parameter");
10653 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
10654 reason
= _("structure field");
10657 case FUNCTION_DECL
:
10658 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node
))),
10660 reason
= _("return type of function");
10664 if (as
= avr_nonconst_pointer_addrspace (node
), as
)
10665 reason
= _("pointer");
10672 error ("pointer targeting address space %qs must be const in %qT",
10673 avr_addrspace
[as
].name
, node
);
10675 error ("pointer targeting address space %qs must be const"
10677 avr_addrspace
[as
].name
, reason
, node
);
10680 return reason
== NULL
;
10684 /* Implement `TARGET_INSERT_ATTRIBUTES'. */
10687 avr_insert_attributes (tree node
, tree
*attributes
)
10689 avr_pgm_check_var_decl (node
);
10691 if (TARGET_MAIN_IS_OS_TASK
10692 && TREE_CODE (node
) == FUNCTION_DECL
10693 && MAIN_NAME_P (DECL_NAME (node
))
10694 // FIXME: We'd like to also test `flag_hosted' which is only
10695 // available in the C-ish fronts, hence no such test for now.
10696 // Instead, we test the return type of "main" which is not exactly
10697 // the same but good enough.
10698 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (node
)))
10699 && NULL
== lookup_attribute ("OS_task", *attributes
))
10701 *attributes
= tree_cons (get_identifier ("OS_task"),
10702 NULL
, *attributes
);
10705 /* Add the section attribute if the variable is in progmem. */
10708 && (TREE_STATIC (node
) || DECL_EXTERNAL (node
))
10709 && avr_progmem_p (node
, *attributes
))
10714 /* For C++, we have to peel arrays in order to get correct
10715 determination of readonlyness. */
10718 node0
= TREE_TYPE (node0
);
10719 while (TREE_CODE (node0
) == ARRAY_TYPE
);
10721 if (error_mark_node
== node0
)
10724 as
= TYPE_ADDR_SPACE (TREE_TYPE (node
));
10726 if (!TYPE_READONLY (node0
)
10727 && !TREE_READONLY (node
))
10729 const char *reason
= "__attribute__((progmem))";
10731 if (!ADDR_SPACE_GENERIC_P (as
))
10732 reason
= avr_addrspace
[as
].name
;
10734 if (avr_log
.progmem
)
10735 avr_edump ("\n%?: %t\n%t\n", node
, node0
);
10737 error ("variable %q+D must be const in order to be put into"
10738 " read-only section by means of %qs", node
, reason
);
10744 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
10745 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
10746 /* Track need of __do_clear_bss. */
10749 avr_asm_output_aligned_decl_common (FILE * stream
,
10752 unsigned HOST_WIDE_INT size
,
10753 unsigned int align
, bool local_p
)
10755 rtx mem
= decl
== NULL_TREE
? NULL_RTX
: DECL_RTL (decl
);
10758 if (mem
!= NULL_RTX
&& MEM_P (mem
)
10759 && SYMBOL_REF_P ((symbol
= XEXP (mem
, 0)))
10760 && (SYMBOL_REF_FLAGS (symbol
) & (SYMBOL_FLAG_IO
| SYMBOL_FLAG_ADDRESS
)))
10764 fprintf (stream
, "\t.globl\t");
10765 assemble_name (stream
, name
);
10766 fprintf (stream
, "\n");
10768 if (SYMBOL_REF_FLAGS (symbol
) & SYMBOL_FLAG_ADDRESS
)
10770 assemble_name (stream
, name
);
10771 fprintf (stream
, " = %ld\n",
10772 (long) INTVAL (avr_eval_addr_attrib (symbol
)));
10775 error_at (DECL_SOURCE_LOCATION (decl
),
10776 "static IO declaration for %q+D needs an address", decl
);
10780 /* __gnu_lto_slim is just a marker for the linker injected by toplev.cc.
10781 There is no need to trigger __do_clear_bss code for them. */
10783 if (!startswith (name
, "__gnu_lto"))
10784 avr_need_clear_bss_p
= true;
10787 ASM_OUTPUT_ALIGNED_LOCAL (stream
, name
, size
, align
);
10789 ASM_OUTPUT_ALIGNED_COMMON (stream
, name
, size
, align
);
10793 avr_asm_asm_output_aligned_bss (FILE *file
, tree decl
, const char *name
,
10794 unsigned HOST_WIDE_INT size
, int align
,
10795 void (*default_func
)
10796 (FILE *, tree
, const char *,
10797 unsigned HOST_WIDE_INT
, int))
10799 rtx mem
= decl
== NULL_TREE
? NULL_RTX
: DECL_RTL (decl
);
10802 if (mem
!= NULL_RTX
&& MEM_P (mem
)
10803 && SYMBOL_REF_P ((symbol
= XEXP (mem
, 0)))
10804 && (SYMBOL_REF_FLAGS (symbol
) & (SYMBOL_FLAG_IO
| SYMBOL_FLAG_ADDRESS
)))
10806 if (!(SYMBOL_REF_FLAGS (symbol
) & SYMBOL_FLAG_ADDRESS
))
10807 error_at (DECL_SOURCE_LOCATION (decl
),
10808 "IO definition for %q+D needs an address", decl
);
10809 avr_asm_output_aligned_decl_common (file
, decl
, name
, size
, align
, false);
10812 default_func (file
, decl
, name
, size
, align
);
10816 /* Unnamed section callback for data_section
10817 to track need of __do_copy_data. */
10820 avr_output_data_section_asm_op (const char *data
)
10822 avr_need_copy_data_p
= true;
10824 /* Dispatch to default. */
10825 output_section_asm_op (data
);
10829 /* Unnamed section callback for bss_section
10830 to track need of __do_clear_bss. */
10833 avr_output_bss_section_asm_op (const char *data
)
10835 avr_need_clear_bss_p
= true;
10837 /* Dispatch to default. */
10838 output_section_asm_op (data
);
10842 /* Unnamed section callback for progmem*.data sections. */
10845 avr_output_progmem_section_asm_op (const char *data
)
10847 fprintf (asm_out_file
, "\t.section\t%s,\"a\",@progbits\n", data
);
10851 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
10854 avr_asm_init_sections (void)
10856 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
10857 resp. `avr_need_copy_data_p'. If flash is not mapped to RAM then
10858 we have also to track .rodata because it is located in RAM then. */
10860 #if defined HAVE_LD_AVR_AVRXMEGA3_RODATA_IN_FLASH
10861 if (avr_arch
->flash_pm_offset
== 0)
10863 readonly_data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
10864 data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
10865 bss_section
->unnamed
.callback
= avr_output_bss_section_asm_op
;
10869 /* Implement `TARGET_ASM_NAMED_SECTION'. */
10870 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
10873 avr_asm_named_section (const char *name
, unsigned int flags
, tree decl
)
10875 if (flags
& AVR_SECTION_PROGMEM
10876 // Only use section .progmem*.data if there is no attribute section.
10878 && DECL_SECTION_NAME (decl
)
10879 && symtab_node::get (decl
)
10880 && ! symtab_node::get (decl
)->implicit_section
))
10882 addr_space_t as
= (flags
& AVR_SECTION_PROGMEM
) / SECTION_MACH_DEP
;
10883 const char *old_prefix
= ".rodata";
10884 const char *new_prefix
= avr_addrspace
[as
].section_name
;
10886 if (startswith (name
, old_prefix
))
10888 const char *sname
= ACONCAT ((new_prefix
,
10889 name
+ strlen (old_prefix
), NULL
));
10890 default_elf_asm_named_section (sname
, flags
, decl
);
10894 default_elf_asm_named_section (new_prefix
, flags
, decl
);
10898 if (!avr_need_copy_data_p
)
10899 avr_need_copy_data_p
= (startswith (name
, ".data")
10900 || startswith (name
, ".gnu.linkonce.d"));
10902 if (!avr_need_copy_data_p
10903 #if defined HAVE_LD_AVR_AVRXMEGA3_RODATA_IN_FLASH
10904 && avr_arch
->flash_pm_offset
== 0
10907 avr_need_copy_data_p
= (startswith (name
, ".rodata")
10908 || startswith (name
, ".gnu.linkonce.r"));
10910 if (!avr_need_clear_bss_p
)
10911 avr_need_clear_bss_p
= startswith (name
, ".bss");
10913 default_elf_asm_named_section (name
, flags
, decl
);
10917 /* Implement `TARGET_SECTION_TYPE_FLAGS'. */
10919 static unsigned int
10920 avr_section_type_flags (tree decl
, const char *name
, int reloc
)
10922 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
10924 if (startswith (name
, ".noinit"))
10926 if (decl
&& VAR_P (decl
)
10927 && DECL_INITIAL (decl
) == NULL_TREE
)
10928 flags
|= SECTION_BSS
; /* @nobits */
10930 warning (0, "only uninitialized variables can be placed in the "
10931 "%<.noinit%> section");
10934 if (decl
&& DECL_P (decl
)
10935 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
10937 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
10939 /* Attribute progmem puts data in generic address space.
10940 Set section flags as if it was in __flash to get the right
10941 section prefix in the remainder. */
10943 if (ADDR_SPACE_GENERIC_P (as
))
10944 as
= ADDR_SPACE_FLASH
;
10946 flags
|= as
* SECTION_MACH_DEP
;
10947 flags
&= ~SECTION_WRITE
;
10948 flags
&= ~SECTION_BSS
;
10949 flags
&= ~SECTION_NOTYPE
;
10956 /* A helper for the next function. NODE is a decl that is associated with
10957 a symbol. Return TRUE if the respective object may be accessed by LDS.
10958 There might still be other reasons for why LDS is not appropriate.
10959 This function is only appropriate for AVR_TINY. */
10962 avr_decl_maybe_lds_p (tree node
)
10965 || TREE_CODE (node
) != VAR_DECL
10966 || DECL_SECTION_NAME (node
) != NULL
)
10969 /* Don't use LDS for objects that go to .rodata. The current default
10970 linker description file still locates .rodata in RAM, but this is not
10971 a must. A better linker script would just keep .rodata in flash and
10972 add an offset of 0x4000 to the VMA. Hence avoid LDS for such data. */
10974 if (TREE_READONLY (node
))
10977 // C++ requires peeling arrays.
10980 node
= TREE_TYPE (node
);
10981 while (ARRAY_TYPE
== TREE_CODE (node
));
10983 return (node
!= error_mark_node
10984 && !TYPE_READONLY (node
));
10988 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
10991 avr_encode_section_info (tree decl
, rtx rtl
, int new_decl_p
)
10993 tree addr_attr
= NULL_TREE
;
10995 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
10996 readily available, see PR34734. So we postpone the warning
10997 about uninitialized data in program memory section until here. */
11000 && decl
&& DECL_P (decl
)
11001 && !DECL_EXTERNAL (decl
)
11002 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
11004 if (!TREE_READONLY (decl
))
11006 // This might happen with C++ if stuff needs constructing.
11007 error ("variable %q+D with dynamic initialization put "
11008 "into program memory area", decl
);
11010 else if (NULL_TREE
== DECL_INITIAL (decl
))
11012 // Don't warn for (implicit) aliases like in PR80462.
11013 tree asmname
= DECL_ASSEMBLER_NAME (decl
);
11014 varpool_node
*node
= varpool_node::get_for_asmname (asmname
);
11015 bool alias_p
= node
&& node
->alias
;
11018 warning (OPT_Wuninitialized
, "uninitialized variable %q+D put "
11019 "into program memory area", decl
);
11023 default_encode_section_info (decl
, rtl
, new_decl_p
);
11025 if (decl
&& DECL_P (decl
)
11026 && TREE_CODE (decl
) != FUNCTION_DECL
11028 && SYMBOL_REF_P (XEXP (rtl
, 0)))
11030 rtx sym
= XEXP (rtl
, 0);
11031 tree type
= TREE_TYPE (decl
);
11032 tree attr
= DECL_ATTRIBUTES (decl
);
11033 if (type
== error_mark_node
)
11036 addr_space_t as
= TYPE_ADDR_SPACE (type
);
11038 /* PSTR strings are in generic space but located in flash:
11039 patch address space. */
11041 if (!AVR_TINY
&& avr_progmem_p (decl
, attr
) == -1)
11042 as
= ADDR_SPACE_FLASH
;
11044 AVR_SYMBOL_SET_ADDR_SPACE (sym
, as
);
11046 tree io_low_attr
= lookup_attribute ("io_low", attr
);
11047 tree io_attr
= lookup_attribute ("io", attr
);
11050 && TREE_VALUE (io_low_attr
) && TREE_VALUE (TREE_VALUE (io_low_attr
)))
11051 addr_attr
= io_attr
;
11053 && TREE_VALUE (io_attr
) && TREE_VALUE (TREE_VALUE (io_attr
)))
11054 addr_attr
= io_attr
;
11056 addr_attr
= lookup_attribute ("address", attr
);
11058 || (io_attr
&& addr_attr
11059 && low_io_address_operand
11060 (GEN_INT (TREE_INT_CST_LOW
11061 (TREE_VALUE (TREE_VALUE (addr_attr
)))), QImode
)))
11062 SYMBOL_REF_FLAGS (sym
) |= SYMBOL_FLAG_IO_LOW
;
11063 if (io_attr
|| io_low_attr
)
11064 SYMBOL_REF_FLAGS (sym
) |= SYMBOL_FLAG_IO
;
11065 /* If we have an (io) address attribute specification, but the variable
11066 is external, treat the address as only a tentative definition
11067 to be used to determine if an io port is in the lower range, but
11068 don't use the exact value for constant propagation. */
11069 if (addr_attr
&& !DECL_EXTERNAL (decl
))
11070 SYMBOL_REF_FLAGS (sym
) |= SYMBOL_FLAG_ADDRESS
;
11077 && SYMBOL_REF_P (XEXP (rtl
, 0)))
11079 rtx sym
= XEXP (rtl
, 0);
11080 bool progmem_p
= avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)) == -1;
11084 // Tag symbols for addition of 0x4000 (avr_arch->flash_pm_offset).
11085 SYMBOL_REF_FLAGS (sym
) |= AVR_SYMBOL_FLAG_TINY_PM
;
11088 if (avr_decl_absdata_p (decl
, DECL_ATTRIBUTES (decl
))
11092 && avr_decl_maybe_lds_p (decl
))
11094 // If addr_attr is non-null, it has an argument. Peek into it.
11095 && TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (addr_attr
))) < 0xc0))
11097 // May be accessed by LDS / STS.
11098 SYMBOL_REF_FLAGS (sym
) |= AVR_SYMBOL_FLAG_TINY_ABSDATA
;
11102 && avr_decl_absdata_p (decl
, DECL_ATTRIBUTES (decl
)))
11104 error ("%q+D has incompatible attributes %qs and %qs",
11105 decl
, "progmem", "absdata");
11111 /* Implement `TARGET_ASM_SELECT_SECTION' */
11114 avr_asm_select_section (tree decl
, int reloc
, unsigned HOST_WIDE_INT align
)
11116 section
* sect
= default_elf_select_section (decl
, reloc
, align
);
11118 if (decl
&& DECL_P (decl
)
11119 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
11121 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
11123 /* __progmem__ goes in generic space but shall be allocated to
11126 if (ADDR_SPACE_GENERIC_P (as
))
11127 as
= ADDR_SPACE_FLASH
;
11129 if (sect
->common
.flags
& SECTION_NAMED
)
11131 const char * name
= sect
->named
.name
;
11132 const char * old_prefix
= ".rodata";
11133 const char * new_prefix
= avr_addrspace
[as
].section_name
;
11135 if (startswith (name
, old_prefix
))
11137 const char *sname
= ACONCAT ((new_prefix
,
11138 name
+ strlen (old_prefix
), NULL
));
11139 return get_section (sname
,
11140 sect
->common
.flags
& ~SECTION_DECLARED
,
11145 if (!progmem_section
[as
])
11147 progmem_section
[as
]
11148 = get_unnamed_section (0, avr_output_progmem_section_asm_op
,
11149 avr_addrspace
[as
].section_name
);
11152 return progmem_section
[as
];
11158 /* Implement `TARGET_ASM_FILE_START'. */
11159 /* Outputs some text at the start of each assembler file. */
11162 avr_file_start (void)
11164 int sfr_offset
= avr_arch
->sfr_offset
;
11166 if (avr_arch
->asm_only
)
11167 error ("architecture %qs supported for assembler only", avr_mmcu
);
11169 default_file_start ();
11171 /* Print I/O addresses of some SFRs used with IN and OUT. */
11174 fprintf (asm_out_file
, "__SP_H__ = 0x%02x\n", avr_addr
.sp_h
- sfr_offset
);
11176 fprintf (asm_out_file
, "__SP_L__ = 0x%02x\n", avr_addr
.sp_l
- sfr_offset
);
11177 fprintf (asm_out_file
, "__SREG__ = 0x%02x\n", avr_addr
.sreg
- sfr_offset
);
11178 if (AVR_HAVE_RAMPZ
)
11179 fprintf (asm_out_file
, "__RAMPZ__ = 0x%02x\n", avr_addr
.rampz
- sfr_offset
);
11180 if (AVR_HAVE_RAMPY
)
11181 fprintf (asm_out_file
, "__RAMPY__ = 0x%02x\n", avr_addr
.rampy
- sfr_offset
);
11182 if (AVR_HAVE_RAMPX
)
11183 fprintf (asm_out_file
, "__RAMPX__ = 0x%02x\n", avr_addr
.rampx
- sfr_offset
);
11184 if (AVR_HAVE_RAMPD
)
11185 fprintf (asm_out_file
, "__RAMPD__ = 0x%02x\n", avr_addr
.rampd
- sfr_offset
);
11186 if (AVR_XMEGA
|| AVR_TINY
)
11187 fprintf (asm_out_file
, "__CCP__ = 0x%02x\n", avr_addr
.ccp
- sfr_offset
);
11188 fprintf (asm_out_file
, "__tmp_reg__ = %d\n", AVR_TMP_REGNO
);
11189 fprintf (asm_out_file
, "__zero_reg__ = %d\n", AVR_ZERO_REGNO
);
11193 /* Implement `TARGET_ASM_FILE_END'. */
11194 /* Outputs to the stdio stream FILE some
11195 appropriate text to go at the end of an assembler file. */
11198 avr_file_end (void)
11200 /* Output these only if there is anything in the
11201 .data* / .rodata* / .gnu.linkonce.* resp. .bss* or COMMON
11202 input section(s) - some code size can be saved by not
11203 linking in the initialization code from libgcc if resp.
11204 sections are empty, see PR18145. */
11206 if (avr_need_copy_data_p
)
11207 fputs (".global __do_copy_data\n", asm_out_file
);
11209 if (avr_need_clear_bss_p
)
11210 fputs (".global __do_clear_bss\n", asm_out_file
);
11214 /* Worker function for `ADJUST_REG_ALLOC_ORDER'. */
11215 /* Choose the order in which to allocate hard registers for
11216 pseudo-registers local to a basic block.
11218 Store the desired register order in the array `reg_alloc_order'.
11219 Element 0 should be the register to allocate first; element 1, the
11220 next register; and so on. */
11223 avr_adjust_reg_alloc_order (void)
11225 static const int order_0
[] =
11228 18, 19, 20, 21, 22, 23,
11231 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
11235 static const int tiny_order_0
[] = {
11245 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
11247 static const int order_1
[] =
11249 18, 19, 20, 21, 22, 23, 24, 25,
11252 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
11256 static const int tiny_order_1
[] = {
11265 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
11267 static const int order_2
[] =
11269 25, 24, 23, 22, 21, 20, 19, 18,
11272 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
11277 /* Select specific register allocation order.
11278 Tiny Core (ATtiny4/5/9/10/20/40) devices have only 16 registers,
11279 so different allocation order should be used. */
11281 const int *order
= (TARGET_ORDER_1
? (AVR_TINY
? tiny_order_1
: order_1
)
11282 : TARGET_ORDER_2
? (AVR_TINY
? tiny_order_0
: order_2
)
11283 : (AVR_TINY
? tiny_order_0
: order_0
));
11285 for (size_t i
= 0; i
< ARRAY_SIZE (order_0
); ++i
)
11286 reg_alloc_order
[i
] = order
[i
];
11290 /* Implement `TARGET_REGISTER_MOVE_COST' */
11293 avr_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED
,
11294 reg_class_t from
, reg_class_t to
)
11296 return (from
== STACK_REG
? 6
11297 : to
== STACK_REG
? 12
11302 /* Implement `TARGET_MEMORY_MOVE_COST' */
11305 avr_memory_move_cost (machine_mode mode
,
11306 reg_class_t rclass ATTRIBUTE_UNUSED
,
11307 bool in ATTRIBUTE_UNUSED
)
11309 return (mode
== QImode
? 2
11310 : mode
== HImode
? 4
11311 : mode
== SImode
? 8
11312 : mode
== SFmode
? 8
11317 /* Cost for mul highpart. X is a LSHIFTRT, i.e. the outer TRUNCATE is
11318 already stripped off. */
11321 avr_mul_highpart_cost (rtx x
, int)
11324 && LSHIFTRT
== GET_CODE (x
)
11325 && MULT
== GET_CODE (XEXP (x
, 0))
11326 && CONST_INT_P (XEXP (x
, 1)))
11328 // This is the wider mode.
11329 machine_mode mode
= GET_MODE (x
);
11331 // The middle-end might still have PR81444, i.e. it is calling the cost
11332 // functions with strange modes. Fix this now by also considering
11333 // PSImode (should actually be SImode instead).
11334 if (HImode
== mode
|| PSImode
== mode
|| SImode
== mode
)
11336 return COSTS_N_INSNS (2);
11344 /* Return the expected cost of a conditional branch like
11349 where X is some comparison operator. */
11352 avr_cbranch_cost (rtx x
)
11354 bool difficult_p
= difficult_comparison_operator (x
, VOIDmode
);
11356 if (reload_completed
)
11358 // After reload, we basically just have plain branches.
11359 return COSTS_N_INSNS (1 + difficult_p
);
11362 rtx xreg
= XEXP (x
, 0);
11363 rtx xval
= XEXP (x
, 1);
11364 machine_mode mode
= GET_MODE (xreg
);
11365 if (mode
== VOIDmode
)
11366 mode
= GET_MODE (xval
);
11367 int size
= GET_MODE_SIZE (mode
);
11369 if (GET_CODE (xreg
) == ZERO_EXTEND
11370 || GET_CODE (xval
) == ZERO_EXTEND
)
11372 // *cbranch<HISI:mode>.<code><QIPSI:mode>.0/1, code = zero_extend.
11373 return COSTS_N_INSNS (size
+ 1);
11376 if (GET_CODE (xreg
) == SIGN_EXTEND
11377 || GET_CODE (xval
) == SIGN_EXTEND
)
11379 // *cbranch<HISI:mode>.<code><QIPSI:mode>.0/1, code = sign_extend.
11380 // Make it a bit cheaper than it actually is (less reg pressure).
11381 return COSTS_N_INSNS (size
+ 1 + 1);
11384 bool reg_p
= register_operand (xreg
, mode
);
11385 bool reg_or_0_p
= reg_or_0_operand (xval
, mode
);
11387 return COSTS_N_INSNS (size
11390 // Combine might propagate constants other than zero
11391 // into the 2nd operand. Make that more expensive.
11392 + 1 * (!reg_p
|| !reg_or_0_p
));
11396 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
11397 cost of an RTX operand given its context. X is the rtx of the
11398 operand, MODE is its mode, and OUTER is the rtx_code of this
11399 operand's parent operator. */
11402 avr_operand_rtx_cost (rtx x
, machine_mode mode
, enum rtx_code outer
,
11403 int opno
, bool speed
)
11405 enum rtx_code code
= GET_CODE (x
);
11417 return COSTS_N_INSNS (GET_MODE_SIZE (mode
));
11424 avr_rtx_costs (x
, mode
, outer
, opno
, &total
, speed
);
11428 /* Worker function for AVR backend's rtx_cost function.
11429 X is rtx expression whose cost is to be calculated.
11430 Return true if the complete cost has been computed.
11431 Return false if subexpressions should be scanned.
11432 In either case, *TOTAL contains the cost result. */
11435 avr_rtx_costs_1 (rtx x
, machine_mode mode
, int outer_code
,
11436 int opno ATTRIBUTE_UNUSED
, int *total
, bool speed
)
11438 enum rtx_code code
= GET_CODE (x
);
11449 /* Immediate constants are as cheap as registers. */
11454 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
11462 *total
= COSTS_N_INSNS (1);
11468 *total
= COSTS_N_INSNS (2 * GET_MODE_SIZE (mode
) - 1);
11474 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
11482 *total
= COSTS_N_INSNS (1);
11488 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
11492 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
11493 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
11497 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
)
11498 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
11499 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), GET_MODE (XEXP (x
, 0)),
11504 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
) + 2
11505 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
11506 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), GET_MODE (XEXP (x
, 0)),
11515 && MULT
== GET_CODE (XEXP (x
, 0))
11516 && register_operand (XEXP (x
, 1), QImode
))
11519 *total
= COSTS_N_INSNS (speed
? 4 : 3);
11520 /* multiply-add with constant: will be split and load constant. */
11521 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
11522 *total
= COSTS_N_INSNS (1) + *total
;
11525 *total
= COSTS_N_INSNS (1);
11526 if (!CONST_INT_P (XEXP (x
, 1)))
11527 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
11532 && (MULT
== GET_CODE (XEXP (x
, 0))
11533 || ASHIFT
== GET_CODE (XEXP (x
, 0)))
11534 && register_operand (XEXP (x
, 1), HImode
)
11535 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))
11536 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))))
11539 *total
= COSTS_N_INSNS (speed
? 5 : 4);
11540 /* multiply-add with constant: will be split and load constant. */
11541 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
11542 *total
= COSTS_N_INSNS (1) + *total
;
11545 if (!CONST_INT_P (XEXP (x
, 1)))
11547 *total
= COSTS_N_INSNS (2);
11548 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
11551 else if (IN_RANGE (INTVAL (XEXP (x
, 1)), -63, 63))
11552 *total
= COSTS_N_INSNS (1);
11554 *total
= COSTS_N_INSNS (2);
11558 if (!CONST_INT_P (XEXP (x
, 1)))
11560 *total
= COSTS_N_INSNS (3);
11561 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
11564 else if (IN_RANGE (INTVAL (XEXP (x
, 1)), -63, 63))
11565 *total
= COSTS_N_INSNS (2);
11567 *total
= COSTS_N_INSNS (3);
11571 if (!CONST_INT_P (XEXP (x
, 1)))
11573 *total
= COSTS_N_INSNS (4);
11574 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
11577 else if (IN_RANGE (INTVAL (XEXP (x
, 1)), -63, 63))
11578 *total
= COSTS_N_INSNS (1);
11580 *total
= COSTS_N_INSNS (4);
11586 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
11592 && register_operand (XEXP (x
, 0), QImode
)
11593 && MULT
== GET_CODE (XEXP (x
, 1)))
11596 *total
= COSTS_N_INSNS (speed
? 4 : 3);
11597 /* multiply-sub with constant: will be split and load constant. */
11598 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
11599 *total
= COSTS_N_INSNS (1) + *total
;
11604 && register_operand (XEXP (x
, 0), HImode
)
11605 && (MULT
== GET_CODE (XEXP (x
, 1))
11606 || ASHIFT
== GET_CODE (XEXP (x
, 1)))
11607 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))
11608 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))))
11611 *total
= COSTS_N_INSNS (speed
? 5 : 4);
11612 /* multiply-sub with constant: will be split and load constant. */
11613 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
11614 *total
= COSTS_N_INSNS (1) + *total
;
11622 && ASHIFT
== GET_CODE (XEXP (x
, 0)))
11624 *total
= COSTS_N_INSNS (2);
11625 // Just a rough estimate. If we see no sign- or zero-extend,
11626 // then increase the cost a little bit.
11627 if (REG_P (XEXP (XEXP (x
, 0), 0)))
11628 *total
+= COSTS_N_INSNS (1);
11629 if (REG_P (XEXP (x
, 1)))
11630 *total
+= COSTS_N_INSNS (1);
11634 && AND
== GET_CODE (XEXP (x
, 0))
11635 && AND
== GET_CODE (XEXP (x
, 1))
11636 && single_zero_operand (XEXP (XEXP (x
, 0), 1), mode
))
11638 // Open-coded bit transfer.
11639 *total
= COSTS_N_INSNS (2);
11643 && single_one_operand (XEXP (x
, 1), mode
)
11644 && (ASHIFT
== GET_CODE (XEXP (x
, 0))
11645 || ASHIFTRT
== GET_CODE (XEXP (x
, 0))
11646 || LSHIFTRT
== GET_CODE (XEXP (x
, 0))))
11648 // "*insv.any_shift.<mode>
11649 *total
= COSTS_N_INSNS (1 + GET_MODE_SIZE (mode
));
11652 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
11653 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
11654 if (!CONST_INT_P (XEXP (x
, 1)))
11655 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
11659 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
11660 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
11661 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
11669 *total
= COSTS_N_INSNS (!speed
? 3 : 4);
11671 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
11679 rtx op0
= XEXP (x
, 0);
11680 rtx op1
= XEXP (x
, 1);
11681 enum rtx_code code0
= GET_CODE (op0
);
11682 enum rtx_code code1
= GET_CODE (op1
);
11683 bool ex0
= SIGN_EXTEND
== code0
|| ZERO_EXTEND
== code0
;
11684 bool ex1
= SIGN_EXTEND
== code1
|| ZERO_EXTEND
== code1
;
11687 && (u8_operand (op1
, HImode
)
11688 || s8_operand (op1
, HImode
)))
11690 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
11694 && register_operand (op1
, HImode
))
11696 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
11699 else if (ex0
|| ex1
)
11701 *total
= COSTS_N_INSNS (!speed
? 3 : 5);
11704 else if (register_operand (op0
, HImode
)
11705 && (u8_operand (op1
, HImode
)
11706 || s8_operand (op1
, HImode
)))
11708 *total
= COSTS_N_INSNS (!speed
? 6 : 9);
11712 *total
= COSTS_N_INSNS (!speed
? 7 : 10);
11715 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
11722 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
11733 /* Add some additional costs besides CALL like moves etc. */
11735 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
11739 /* Just a rough estimate. Even with -O2 we don't want bulky
11740 code expanded inline. */
11742 *total
= COSTS_N_INSNS (25);
11748 *total
= COSTS_N_INSNS (300);
11750 /* Add some additional costs besides CALL like moves etc. */
11751 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
11754 if (mode
== DImode
)
11762 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
11763 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
11771 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
11773 *total
= COSTS_N_INSNS (15 * GET_MODE_SIZE (mode
));
11774 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
11775 /* For div/mod with const-int divisor we have at least the cost of
11776 loading the divisor. */
11777 if (CONST_INT_P (XEXP (x
, 1)))
11778 *total
+= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
11779 /* Add some overall penaly for clobbering and moving around registers */
11780 *total
+= COSTS_N_INSNS (2);
11787 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 4)
11788 *total
= COSTS_N_INSNS (1);
11793 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 8)
11794 *total
= COSTS_N_INSNS (3);
11799 if (CONST_INT_P (XEXP (x
, 1)))
11800 switch (INTVAL (XEXP (x
, 1)))
11804 *total
= COSTS_N_INSNS (5);
11807 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 6);
11815 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
11822 if (!CONST_INT_P (XEXP (x
, 1)))
11824 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
11825 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
11830 val
= INTVAL (XEXP (x
, 1));
11832 *total
= COSTS_N_INSNS (3);
11833 else if (val
>= 0 && val
<= 7)
11834 *total
= COSTS_N_INSNS (val
);
11836 *total
= COSTS_N_INSNS (1);
11843 if (const_2_to_7_operand (XEXP (x
, 1), HImode
)
11844 && (SIGN_EXTEND
== GET_CODE (XEXP (x
, 0))
11845 || ZERO_EXTEND
== GET_CODE (XEXP (x
, 0))))
11847 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
11852 if (const1_rtx
== (XEXP (x
, 1))
11853 && SIGN_EXTEND
== GET_CODE (XEXP (x
, 0)))
11855 *total
= COSTS_N_INSNS (2);
11859 if (!CONST_INT_P (XEXP (x
, 1)))
11861 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
11862 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
11866 switch (INTVAL (XEXP (x
, 1)))
11873 *total
= COSTS_N_INSNS (2);
11876 *total
= COSTS_N_INSNS (3);
11882 *total
= COSTS_N_INSNS (4);
11887 *total
= COSTS_N_INSNS (5);
11890 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
11893 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
11896 *total
= COSTS_N_INSNS (!speed
? 5 : 10);
11899 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
11900 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
11906 if (!CONST_INT_P (XEXP (x
, 1)))
11908 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
11911 switch (INTVAL (XEXP (x
, 1)))
11919 *total
= COSTS_N_INSNS (3);
11922 *total
= COSTS_N_INSNS (5);
11925 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
11931 if (!CONST_INT_P (XEXP (x
, 1)))
11933 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
11934 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
11938 switch (INTVAL (XEXP (x
, 1)))
11944 *total
= COSTS_N_INSNS (3);
11949 *total
= COSTS_N_INSNS (4);
11952 *total
= COSTS_N_INSNS (6);
11955 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
11958 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
11959 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
11967 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
11974 if (!CONST_INT_P (XEXP (x
, 1)))
11976 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
11977 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
11982 val
= INTVAL (XEXP (x
, 1));
11984 *total
= COSTS_N_INSNS (4);
11986 *total
= COSTS_N_INSNS (2);
11987 else if (val
>= 0 && val
<= 7)
11988 *total
= COSTS_N_INSNS (val
);
11990 *total
= COSTS_N_INSNS (1);
11995 if (!CONST_INT_P (XEXP (x
, 1)))
11997 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
11998 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
12002 switch (INTVAL (XEXP (x
, 1)))
12008 *total
= COSTS_N_INSNS (2);
12011 *total
= COSTS_N_INSNS (3);
12017 *total
= COSTS_N_INSNS (4);
12021 *total
= COSTS_N_INSNS (5);
12024 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
12027 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
12031 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
12034 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
12035 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
12041 if (!CONST_INT_P (XEXP (x
, 1)))
12043 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
12046 switch (INTVAL (XEXP (x
, 1)))
12052 *total
= COSTS_N_INSNS (3);
12056 *total
= COSTS_N_INSNS (5);
12059 *total
= COSTS_N_INSNS (4);
12062 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
12068 if (!CONST_INT_P (XEXP (x
, 1)))
12070 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
12071 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
12075 switch (INTVAL (XEXP (x
, 1)))
12081 *total
= COSTS_N_INSNS (4);
12086 *total
= COSTS_N_INSNS (6);
12089 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
12092 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 5);
12095 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
12096 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
12104 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
12108 if (outer_code
== TRUNCATE
)
12110 *total
= avr_mul_highpart_cost (x
, speed
);
12117 if (!CONST_INT_P (XEXP (x
, 1)))
12119 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
12120 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
12125 val
= INTVAL (XEXP (x
, 1));
12127 *total
= COSTS_N_INSNS (3);
12128 else if (val
>= 0 && val
<= 7)
12129 *total
= COSTS_N_INSNS (val
);
12131 *total
= COSTS_N_INSNS (1);
12136 if (!CONST_INT_P (XEXP (x
, 1)))
12138 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
12139 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
12143 switch (INTVAL (XEXP (x
, 1)))
12150 *total
= COSTS_N_INSNS (2);
12153 *total
= COSTS_N_INSNS (3);
12158 *total
= COSTS_N_INSNS (4);
12162 *total
= COSTS_N_INSNS (5);
12168 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
12171 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
12175 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
12178 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
12179 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
12185 if (!CONST_INT_P (XEXP (x
, 1)))
12187 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
12190 switch (INTVAL (XEXP (x
, 1)))
12198 *total
= COSTS_N_INSNS (3);
12201 *total
= COSTS_N_INSNS (5);
12204 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
12210 if (!CONST_INT_P (XEXP (x
, 1)))
12212 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
12213 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
12217 switch (INTVAL (XEXP (x
, 1)))
12223 *total
= COSTS_N_INSNS (4);
12226 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
12231 *total
= COSTS_N_INSNS (4);
12234 *total
= COSTS_N_INSNS (6);
12237 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
12238 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
12246 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
12250 switch (GET_MODE (XEXP (x
, 0)))
12253 *total
= COSTS_N_INSNS (1);
12254 if (!CONST_INT_P (XEXP (x
, 1)))
12255 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), QImode
, code
,
12260 *total
= COSTS_N_INSNS (2);
12261 if (!CONST_INT_P (XEXP (x
, 1)))
12262 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), HImode
, code
,
12264 else if (INTVAL (XEXP (x
, 1)) != 0)
12265 *total
+= COSTS_N_INSNS (1);
12269 *total
= COSTS_N_INSNS (3);
12270 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) != 0)
12271 *total
+= COSTS_N_INSNS (2);
12275 *total
= COSTS_N_INSNS (4);
12276 if (!CONST_INT_P (XEXP (x
, 1)))
12277 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), SImode
, code
,
12279 else if (INTVAL (XEXP (x
, 1)) != 0)
12280 *total
+= COSTS_N_INSNS (3);
12286 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), GET_MODE (XEXP (x
, 0)),
12291 if (LSHIFTRT
== GET_CODE (XEXP (x
, 0)))
12293 *total
= avr_mul_highpart_cost (XEXP (x
, 0), speed
);
12299 if (outer_code
== SET
12300 && XEXP (x
, 2) == pc_rtx
12301 && ordered_comparison_operator (XEXP (x
, 0), VOIDmode
))
12303 *total
= avr_cbranch_cost (XEXP (x
, 0));
12314 /* Implement `TARGET_RTX_COSTS'. */
12317 avr_rtx_costs (rtx x
, machine_mode mode
, int outer_code
,
12318 int opno
, int *total
, bool speed
)
12320 bool done
= avr_rtx_costs_1 (x
, mode
, outer_code
, opno
, total
, speed
);
12322 if (avr_log
.rtx_costs
)
12324 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
12325 done
, speed
? "speed" : "size", *total
, outer_code
, x
);
12332 /* Implement `TARGET_INSN_COST'. */
12333 /* For some insns, it is not enough to look at the cost of the SET_SRC.
12334 In that case, have a look at the entire insn, e.g. during insn combine. */
12337 avr_insn_cost (rtx_insn
*insn
, bool speed
)
12339 const int unknown_cost
= -1;
12340 int cost
= unknown_cost
;
12342 rtx set
= single_set (insn
);
12345 && ZERO_EXTRACT
== GET_CODE (SET_DEST (set
)))
12347 // Try find anything that would flip the extracted bit.
12348 bool not_bit_p
= false;
12350 subrtx_iterator::array_type array
;
12351 FOR_EACH_SUBRTX (iter
, array
, SET_SRC (set
), NONCONST
)
12353 enum rtx_code code
= GET_CODE (*iter
);
12354 not_bit_p
|= code
== NOT
|| code
== XOR
|| code
== GE
;
12357 // Don't go too deep into the analysis. In almost all cases,
12358 // using BLD/BST is the best we can do for single-bit moves,
12359 // even considering CSE.
12360 cost
= COSTS_N_INSNS (2 + not_bit_p
);
12363 if (cost
!= unknown_cost
)
12365 if (avr_log
.rtx_costs
)
12366 avr_edump ("\n%? (%s) insn_cost=%d\n%r\n",
12367 speed
? "speed" : "size", cost
, insn
);
12371 // Resort to what rtlanal.cc::insn_cost() implements as a default
12372 // when targetm.insn_cost() is not implemented.
12374 return pattern_cost (PATTERN (insn
), speed
);
12378 /* Implement `TARGET_ADDRESS_COST'. */
12381 avr_address_cost (rtx x
, machine_mode mode ATTRIBUTE_UNUSED
,
12382 addr_space_t as ATTRIBUTE_UNUSED
,
12383 bool speed ATTRIBUTE_UNUSED
)
12387 if (GET_CODE (x
) == PLUS
12388 && CONST_INT_P (XEXP (x
, 1))
12389 && (REG_P (XEXP (x
, 0))
12390 || SUBREG_P (XEXP (x
, 0))))
12392 if (INTVAL (XEXP (x
, 1)) > MAX_LD_OFFSET(mode
))
12395 else if (CONSTANT_ADDRESS_P (x
))
12397 if (io_address_operand (x
, QImode
))
12401 && avr_address_tiny_absdata_p (x
, QImode
))
12405 if (avr_log
.address_cost
)
12406 avr_edump ("\n%?: %d = %r\n", cost
, x
);
12411 /* Test for extra memory constraint 'Q'.
12412 It's a memory address based on Y or Z pointer with valid displacement. */
12415 extra_constraint_Q (rtx x
)
12418 rtx plus
= XEXP (x
, 0);
12420 if (GET_CODE (plus
) == PLUS
12421 && REG_P (XEXP (plus
, 0))
12422 && CONST_INT_P (XEXP (plus
, 1))
12423 && (INTVAL (XEXP (plus
, 1))
12424 <= MAX_LD_OFFSET (GET_MODE (x
))))
12426 rtx xx
= XEXP (plus
, 0);
12427 int regno
= REGNO (xx
);
12429 ok
= (/* allocate pseudos */
12430 regno
>= FIRST_PSEUDO_REGISTER
12431 /* strictly check */
12432 || regno
== REG_Z
|| regno
== REG_Y
12433 /* XXX frame & arg pointer checks */
12434 || xx
== frame_pointer_rtx
12435 || xx
== arg_pointer_rtx
);
12437 if (avr_log
.constraints
)
12438 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
12439 ok
, reload_completed
, reload_in_progress
, x
);
12445 /* Convert condition code CONDITION to the valid AVR condition code. */
12448 avr_normalize_condition (RTX_CODE condition
)
12461 gcc_unreachable ();
12466 /* Returns register number for function return value.*/
12468 static inline unsigned int
12469 avr_ret_register (void)
12475 /* Implement `TARGET_FUNCTION_VALUE_REGNO_P'. */
12478 avr_function_value_regno_p (const unsigned int regno
)
12480 return (regno
== avr_ret_register ());
12484 /* Implement `TARGET_LIBCALL_VALUE'. */
12485 /* Create an RTX representing the place where a
12486 library function returns a value of mode MODE. */
12489 avr_libcall_value (machine_mode mode
,
12490 const_rtx func ATTRIBUTE_UNUSED
)
12492 int offs
= GET_MODE_SIZE (mode
);
12495 offs
= (offs
+ 1) & ~1;
12497 return gen_rtx_REG (mode
, avr_ret_register () + 2 - offs
);
12501 /* Implement `TARGET_FUNCTION_VALUE'. */
12502 /* Create an RTX representing the place where a
12503 function returns a value of data type VALTYPE. */
12506 avr_function_value (const_tree type
,
12507 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
12508 bool outgoing ATTRIBUTE_UNUSED
)
12512 if (TYPE_MODE (type
) != BLKmode
)
12513 return avr_libcall_value (TYPE_MODE (type
), NULL_RTX
);
12515 offs
= int_size_in_bytes (type
);
12518 if (offs
> 2 && offs
< GET_MODE_SIZE (SImode
))
12519 offs
= GET_MODE_SIZE (SImode
);
12520 else if (offs
> GET_MODE_SIZE (SImode
) && offs
< GET_MODE_SIZE (DImode
))
12521 offs
= GET_MODE_SIZE (DImode
);
12523 return gen_rtx_REG (BLKmode
, avr_ret_register () + 2 - offs
);
12527 test_hard_reg_class (enum reg_class rclass
, rtx x
)
12529 int regno
= true_regnum (x
);
12533 if (TEST_HARD_REG_CLASS (rclass
, regno
))
12540 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
12541 and thus is suitable to be skipped by CPSE, SBRC, etc. */
12544 avr_2word_insn_p (rtx_insn
*insn
)
12546 if (TARGET_SKIP_BUG
|| !insn
|| get_attr_length (insn
) != 2)
12551 switch (INSN_CODE (insn
))
12556 case CODE_FOR_movqi_insn
:
12557 case CODE_FOR_movuqq_insn
:
12558 case CODE_FOR_movqq_insn
:
12560 rtx set
= single_set (insn
);
12561 rtx src
= SET_SRC (set
);
12562 rtx dest
= SET_DEST (set
);
12564 /* Factor out LDS and STS from movqi_insn. */
12567 && (REG_P (src
) || src
== CONST0_RTX (GET_MODE (dest
))))
12569 return CONSTANT_ADDRESS_P (XEXP (dest
, 0));
12571 else if (REG_P (dest
)
12574 return CONSTANT_ADDRESS_P (XEXP (src
, 0));
12580 case CODE_FOR_call_insn
:
12581 case CODE_FOR_call_value_insn
:
12588 jump_over_one_insn_p (rtx_insn
*insn
, rtx dest
)
12590 int uid
= INSN_UID (GET_CODE (dest
) == LABEL_REF
12593 int jump_addr
= INSN_ADDRESSES (INSN_UID (insn
));
12594 int dest_addr
= INSN_ADDRESSES (uid
);
12595 int jump_offset
= dest_addr
- jump_addr
- get_attr_length (insn
);
12597 return (jump_offset
== 1
12598 || (jump_offset
== 2
12599 && avr_2word_insn_p (next_active_insn (insn
))));
12602 /* Implement TARGET_HARD_REGNO_NREGS. CCmode is four units for historical
12603 reasons. If this hook is not defined, TARGET_HARD_REGNO_NREGS
12604 reports that CCmode requires four registers.
12605 Define this hook to allow CCmode to fit in a single REG_CC. For
12606 other modes and regs, return the number of words in mode (i.e whatever
12607 the default implementation of the hook returned). */
12609 static unsigned int
12610 avr_hard_regno_nregs (unsigned int regno
, machine_mode mode
)
12612 if (regno
== REG_CC
&& mode
== CCmode
)
12615 return CEIL (GET_MODE_SIZE (mode
), UNITS_PER_WORD
);
12619 /* Implement TARGET_HARD_REGNO_MODE_OK. On the enhanced core, anything
12620 larger than 1 byte must start in even numbered register for "movw" to
12621 work (this way we don't have to check for odd registers everywhere). */
12624 avr_hard_regno_mode_ok (unsigned int regno
, machine_mode mode
)
12626 if (regno
== REG_CC
)
12627 return mode
== CCmode
;
12629 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
12630 Disallowing QI et al. in these regs might lead to code like
12631 (set (subreg:QI (reg:HI 28) n) ...)
12632 which will result in wrong code because reload does not
12633 handle SUBREGs of hard regsisters like this.
12634 This could be fixed in reload. However, it appears
12635 that fixing reload is not wanted by reload people. */
12637 /* Any GENERAL_REGS register can hold 8-bit values. */
12639 if (GET_MODE_SIZE (mode
) == 1)
12642 /* FIXME: Ideally, the following test is not needed.
12643 However, it turned out that it can reduce the number
12644 of spill fails. AVR and it's poor endowment with
12645 address registers is extreme stress test for reload. */
12647 if (GET_MODE_SIZE (mode
) >= 4
12651 /* All modes larger than 8 bits should start in an even register. */
12653 return !(regno
& 1);
12657 /* Implement TARGET_HARD_REGNO_CALL_PART_CLOBBERED. */
12660 avr_hard_regno_call_part_clobbered (unsigned, unsigned regno
,
12663 /* FIXME: This hook gets called with MODE:REGNO combinations that don't
12664 represent valid hard registers like, e.g. HI:29. Returning TRUE
12665 for such registers can lead to performance degradation as mentioned
12666 in PR53595. Thus, report invalid hard registers as FALSE. */
12668 if (!avr_hard_regno_mode_ok (regno
, mode
))
12671 /* Return true if any of the following boundaries is crossed:
12672 17/18 or 19/20 (if AVR_TINY), 27/28 and 29/30. */
12674 return ((regno
<= LAST_CALLEE_SAVED_REG
12675 && regno
+ GET_MODE_SIZE (mode
) > 1 + LAST_CALLEE_SAVED_REG
)
12676 || (regno
< REG_Y
&& regno
+ GET_MODE_SIZE (mode
) > REG_Y
)
12677 || (regno
< REG_Z
&& regno
+ GET_MODE_SIZE (mode
) > REG_Z
));
12681 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
12684 avr_mode_code_base_reg_class (machine_mode mode ATTRIBUTE_UNUSED
,
12685 addr_space_t as
, RTX_CODE outer_code
,
12686 RTX_CODE index_code ATTRIBUTE_UNUSED
)
12688 if (!ADDR_SPACE_GENERIC_P (as
))
12690 return POINTER_Z_REGS
;
12694 return reload_completed
? BASE_POINTER_REGS
: POINTER_REGS
;
12696 return PLUS
== outer_code
? BASE_POINTER_REGS
: POINTER_REGS
;
12700 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
12703 avr_regno_mode_code_ok_for_base_p (int regno
,
12704 machine_mode mode ATTRIBUTE_UNUSED
,
12705 addr_space_t as ATTRIBUTE_UNUSED
,
12706 RTX_CODE outer_code
,
12707 RTX_CODE index_code ATTRIBUTE_UNUSED
)
12711 if (!ADDR_SPACE_GENERIC_P (as
))
12713 if (regno
< FIRST_PSEUDO_REGISTER
12721 regno
= reg_renumber
[regno
];
12723 if (regno
== REG_Z
)
12732 if (regno
< FIRST_PSEUDO_REGISTER
12736 || regno
== ARG_POINTER_REGNUM
))
12740 else if (reg_renumber
)
12742 regno
= reg_renumber
[regno
];
12747 || regno
== ARG_POINTER_REGNUM
)
12754 && PLUS
== outer_code
12764 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
12765 /* Set 32-bit register OP[0] to compile-time constant OP[1].
12766 CLOBBER_REG is a QI clobber register or NULL_RTX.
12767 LEN == NULL: output instructions.
12768 LEN != NULL: set *LEN to the length of the instruction sequence
12769 (in words) printed with LEN = NULL.
12770 If CLEAR_P is true, OP[0] had been cleard to Zero already.
12771 If CLEAR_P is false, nothing is known about OP[0].
12773 The effect on cc0 is as follows:
12775 Load 0 to any register except ZERO_REG : NONE
12776 Load ld register with any value : NONE
12777 Anything else: : CLOBBER */
12780 output_reload_in_const (rtx
*op
, rtx clobber_reg
, int *len
, bool clear_p
)
12784 rtx xval
, xdest
[4];
12786 int clobber_val
= 1234;
12787 bool cooked_clobber_p
= false;
12788 bool set_p
= false;
12789 machine_mode mode
= GET_MODE (dest
);
12790 int n_bytes
= GET_MODE_SIZE (mode
);
12792 gcc_assert (REG_P (dest
)
12793 && CONSTANT_P (src
));
12798 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
12799 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
12801 if (REGNO (dest
) < 16
12802 && REGNO (dest
) + GET_MODE_SIZE (mode
) > 16)
12804 clobber_reg
= all_regs_rtx
[REGNO (dest
) + n_bytes
- 1];
12807 /* We might need a clobber reg but don't have one. Look at the value to
12808 be loaded more closely. A clobber is only needed if it is a symbol
12809 or contains a byte that is neither 0, -1 or a power of 2. */
12811 if (NULL_RTX
== clobber_reg
12812 && !test_hard_reg_class (LD_REGS
, dest
)
12813 && (! (CONST_INT_P (src
) || CONST_FIXED_P (src
) || CONST_DOUBLE_P (src
))
12814 || !avr_popcount_each_byte (src
, n_bytes
,
12815 (1 << 0) | (1 << 1) | (1 << 8))))
12817 /* We have no clobber register but need one. Cook one up.
12818 That's cheaper than loading from constant pool. */
12820 cooked_clobber_p
= true;
12821 clobber_reg
= all_regs_rtx
[REG_Z
+ 1];
12822 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg
, len
, 1);
12825 /* Now start filling DEST from LSB to MSB. */
12827 for (int n
= 0; n
< n_bytes
; n
++)
12830 bool done_byte
= false;
12833 /* Crop the n-th destination byte. */
12835 xdest
[n
] = simplify_gen_subreg (QImode
, dest
, mode
, n
);
12836 ldreg_p
= test_hard_reg_class (LD_REGS
, xdest
[n
]);
12838 if (!CONST_INT_P (src
)
12839 && !CONST_FIXED_P (src
)
12840 && !CONST_DOUBLE_P (src
))
12842 static const char* const asm_code
[][2] =
12844 { "ldi %2,lo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,lo8(%1)" },
12845 { "ldi %2,hi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hi8(%1)" },
12846 { "ldi %2,hlo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hlo8(%1)" },
12847 { "ldi %2,hhi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hhi8(%1)" }
12852 xop
[2] = clobber_reg
;
12854 avr_asm_len (asm_code
[n
][ldreg_p
], xop
, len
, ldreg_p
? 1 : 2);
12859 /* Crop the n-th source byte. */
12861 xval
= simplify_gen_subreg (QImode
, src
, mode
, n
);
12862 ival
[n
] = INTVAL (xval
);
12864 /* Look if we can reuse the low word by means of MOVW. */
12870 rtx lo16
= simplify_gen_subreg (HImode
, src
, mode
, 0);
12871 rtx hi16
= simplify_gen_subreg (HImode
, src
, mode
, 2);
12873 if (INTVAL (lo16
) == INTVAL (hi16
))
12875 if (INTVAL (lo16
) != 0 || !clear_p
)
12876 avr_asm_len ("movw %C0,%A0", &op
[0], len
, 1);
12882 /* Don't use CLR so that cc0 is set as expected. */
12887 avr_asm_len (ldreg_p
? "ldi %0,0"
12888 : AVR_ZERO_REGNO
== REGNO (xdest
[n
]) ? "clr %0"
12889 : "mov %0,__zero_reg__",
12890 &xdest
[n
], len
, 1);
12894 if (clobber_val
== ival
[n
]
12895 && REGNO (clobber_reg
) == REGNO (xdest
[n
]))
12900 /* LD_REGS can use LDI to move a constant value */
12906 avr_asm_len ("ldi %0,lo8(%1)", xop
, len
, 1);
12910 /* Try to reuse value already loaded in some lower byte. */
12912 for (int j
= 0; j
< n
; j
++)
12913 if (ival
[j
] == ival
[n
])
12918 avr_asm_len ("mov %0,%1", xop
, len
, 1);
12926 /* Need no clobber reg for -1: Use CLR/DEC */
12931 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
12933 avr_asm_len ("dec %0", &xdest
[n
], len
, 1);
12936 else if (ival
[n
] == 1)
12939 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
12941 avr_asm_len ("inc %0", &xdest
[n
], len
, 1);
12945 /* Use T flag or INC to manage powers of 2 if we have
12948 if (NULL_RTX
== clobber_reg
12949 && single_one_operand (xval
, QImode
))
12952 xop
[1] = GEN_INT (exact_log2 (ival
[n
] & GET_MODE_MASK (QImode
)));
12954 gcc_assert (constm1_rtx
!= xop
[1]);
12959 avr_asm_len ("set", xop
, len
, 1);
12963 avr_asm_len ("clr %0", xop
, len
, 1);
12965 avr_asm_len ("bld %0,%1", xop
, len
, 1);
12969 /* We actually need the LD_REGS clobber reg. */
12971 gcc_assert (NULL_RTX
!= clobber_reg
);
12975 xop
[2] = clobber_reg
;
12976 clobber_val
= ival
[n
];
12978 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
12979 "mov %0,%2", xop
, len
, 2);
12982 /* If we cooked up a clobber reg above, restore it. */
12984 if (cooked_clobber_p
)
12986 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg
, len
, 1);
12991 /* Reload the constant OP[1] into the HI register OP[0].
12992 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
12993 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
12994 need a clobber reg or have to cook one up.
12996 PLEN == NULL: Output instructions.
12997 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
12998 by the insns printed.
13003 output_reload_inhi (rtx
*op
, rtx clobber_reg
, int *plen
)
13005 output_reload_in_const (op
, clobber_reg
, plen
, false);
13010 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
13011 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
13012 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
13013 need a clobber reg or have to cook one up.
13015 LEN == NULL: Output instructions.
13017 LEN != NULL: Output nothing. Set *LEN to number of words occupied
13018 by the insns printed.
13023 output_reload_insisf (rtx
*op
, rtx clobber_reg
, int *len
)
13026 && !test_hard_reg_class (LD_REGS
, op
[0])
13027 && (CONST_INT_P (op
[1])
13028 || CONST_FIXED_P (op
[1])
13029 || CONST_DOUBLE_P (op
[1])))
13031 int len_clr
, len_noclr
;
13033 /* In some cases it is better to clear the destination beforehand, e.g.
13035 CLR R2 CLR R3 MOVW R4,R2 INC R2
13039 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
13041 We find it too tedious to work that out in the print function.
13042 Instead, we call the print function twice to get the lengths of
13043 both methods and use the shortest one. */
13045 output_reload_in_const (op
, clobber_reg
, &len_clr
, true);
13046 output_reload_in_const (op
, clobber_reg
, &len_noclr
, false);
13048 if (len_noclr
- len_clr
== 4)
13050 /* Default needs 4 CLR instructions: clear register beforehand. */
13052 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
13053 "mov %B0,__zero_reg__" CR_TAB
13054 "movw %C0,%A0", &op
[0], len
, 3);
13056 output_reload_in_const (op
, clobber_reg
, len
, true);
13065 /* Default: destination not pre-cleared. */
13067 output_reload_in_const (op
, clobber_reg
, len
, false);
13072 avr_out_reload_inpsi (rtx
*op
, rtx clobber_reg
, int *len
)
13074 output_reload_in_const (op
, clobber_reg
, len
, false);
13079 /* Worker function for `ASM_OUTPUT_ADDR_VEC'. */
13080 /* Emit jump tables out-of-line so that branches crossing the table
13081 get shorter offsets. If we have JUMP + CALL, then put the tables
13082 in a dedicated non-.text section so that CALLs get better chance to
13083 be relaxed to RCALLs.
13085 We emit the tables by hand because `function_rodata_section' does not
13086 work as expected, cf. PR71151, and we do *NOT* want the table to be
13087 in .rodata, hence setting JUMP_TABLES_IN_TEXT_SECTION = 0 is of limited
13088 use; and setting it to 1 attributes table lengths to branch offsets...
13089 Moreover, fincal.c keeps switching section before each table entry
13090 which we find too fragile as to rely on section caching. */
13093 avr_output_addr_vec (rtx_insn
*labl
, rtx table
)
13095 FILE *stream
= asm_out_file
;
13099 // Switch to appropriate (sub)section.
13101 if (DECL_SECTION_NAME (current_function_decl
)
13102 && symtab_node::get (current_function_decl
)
13103 && ! symtab_node::get (current_function_decl
)->implicit_section
)
13105 // .subsection will emit the code after the function and in the
13106 // section as chosen by the user.
13108 switch_to_section (current_function_section ());
13109 fprintf (stream
, "\t.subsection\t1\n");
13113 // Since PR63223 there is no restriction where to put the table; it
13114 // may even reside above 128 KiB. We put it in a section as high as
13115 // possible and avoid progmem in order not to waste flash <= 64 KiB.
13117 const char *sec_name
= ".jumptables.gcc";
13119 // The table belongs to its host function, therefore use fine
13120 // grained sections so that, if that function is removed by
13121 // --gc-sections, the child table(s) may also be removed. */
13123 tree asm_name
= DECL_ASSEMBLER_NAME (current_function_decl
);
13124 const char *fname
= IDENTIFIER_POINTER (asm_name
);
13125 fname
= targetm
.strip_name_encoding (fname
);
13126 sec_name
= ACONCAT ((sec_name
, ".", fname
, NULL
));
13128 fprintf (stream
, "\t.section\t%s,\"%s\",@progbits\n", sec_name
,
13129 AVR_HAVE_JMP_CALL
? "a" : "ax");
13132 // Output the label that preceeds the table.
13134 ASM_OUTPUT_ALIGN (stream
, 1);
13135 targetm
.asm_out
.internal_label (stream
, "L", CODE_LABEL_NUMBER (labl
));
13137 // Output the table's content.
13139 int vlen
= XVECLEN (table
, 0);
13141 for (int idx
= 0; idx
< vlen
; idx
++)
13143 int value
= CODE_LABEL_NUMBER (XEXP (XVECEXP (table
, 0, idx
), 0));
13145 if (AVR_HAVE_JMP_CALL
)
13146 fprintf (stream
, "\t.word gs(.L%d)\n", value
);
13148 fprintf (stream
, "\trjmp .L%d\n", value
);
13151 // Switch back to original section. As we clobbered the section above,
13152 // forget the current section before switching back.
13155 switch_to_section (current_function_section ());
13159 /* Implement `TARGET_CONDITIONAL_REGISTER_USAGE'. */
13162 avr_conditional_register_usage (void)
13166 const int tiny_reg_alloc_order
[] = {
13175 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
13178 /* Set R0-R17 as fixed registers. Reset R0-R17 in call used register list
13179 - R0-R15 are not available in Tiny Core devices
13180 - R16 and R17 are fixed registers. */
13182 for (size_t i
= 0; i
<= 17; i
++)
13185 call_used_regs
[i
] = 1;
13188 /* Set R18 to R21 as callee saved registers
13189 - R18, R19, R20 and R21 are the callee saved registers in
13190 Tiny Core devices */
13192 for (size_t i
= 18; i
<= LAST_CALLEE_SAVED_REG
; i
++)
13194 call_used_regs
[i
] = 0;
13197 /* Update register allocation order for Tiny Core devices */
13199 for (size_t i
= 0; i
< ARRAY_SIZE (tiny_reg_alloc_order
); i
++)
13201 reg_alloc_order
[i
] = tiny_reg_alloc_order
[i
];
13204 CLEAR_HARD_REG_SET (reg_class_contents
[(int) ADDW_REGS
]);
13205 CLEAR_HARD_REG_SET (reg_class_contents
[(int) NO_LD_REGS
]);
13209 /* Implement `TARGET_HARD_REGNO_SCRATCH_OK'. */
13210 /* Returns true if SCRATCH are safe to be allocated as a scratch
13211 registers (for a define_peephole2) in the current function. */
13214 avr_hard_regno_scratch_ok (unsigned int regno
)
13216 /* Interrupt functions can only use registers that have already been saved
13217 by the prologue, even if they would normally be call-clobbered. */
13219 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
13220 && !df_regs_ever_live_p (regno
))
13223 /* Don't allow hard registers that might be part of the frame pointer.
13224 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
13225 and don't care for a frame pointer that spans more than one register. */
13227 if ((!reload_completed
|| frame_pointer_needed
)
13228 && (regno
== REG_Y
|| regno
== REG_Y
+ 1))
13237 /* Worker function for `HARD_REGNO_RENAME_OK'. */
13238 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
13241 avr_hard_regno_rename_ok (unsigned int old_reg
,
13242 unsigned int new_reg
)
13244 /* Interrupt functions can only use registers that have already been
13245 saved by the prologue, even if they would normally be
13248 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
13249 && !df_regs_ever_live_p (new_reg
))
13252 /* Don't allow hard registers that might be part of the frame pointer.
13253 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
13254 and don't care for a frame pointer that spans more than one register. */
13256 if ((!reload_completed
|| frame_pointer_needed
)
13257 && (old_reg
== REG_Y
|| old_reg
== REG_Y
+ 1
13258 || new_reg
== REG_Y
|| new_reg
== REG_Y
+ 1))
13266 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
13267 or memory location in the I/O space (QImode only).
13269 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
13270 Operand 1: register operand to test, or CONST_INT memory address.
13271 Operand 2: bit number.
13272 Operand 3: label to jump to if the test is true. */
13275 avr_out_sbxx_branch (rtx_insn
*insn
, rtx operands
[])
13277 enum rtx_code comp
= GET_CODE (operands
[0]);
13278 bool long_jump
= get_attr_length (insn
) >= 4;
13279 bool reverse
= long_jump
|| jump_over_one_insn_p (insn
, operands
[3]);
13283 else if (comp
== LT
)
13287 comp
= reverse_condition (comp
);
13289 switch (GET_CODE (operands
[1]))
13298 if (low_io_address_operand (operands
[1], QImode
))
13301 output_asm_insn ("sbis %i1,%2", operands
);
13303 output_asm_insn ("sbic %i1,%2", operands
);
13307 gcc_assert (io_address_operand (operands
[1], QImode
));
13308 output_asm_insn ("in __tmp_reg__,%i1", operands
);
13310 output_asm_insn ("sbrs __tmp_reg__,%2", operands
);
13312 output_asm_insn ("sbrc __tmp_reg__,%2", operands
);
13315 break; /* CONST_INT */
13320 output_asm_insn ("sbrs %T1%T2", operands
);
13322 output_asm_insn ("sbrc %T1%T2", operands
);
13328 return ("rjmp .+4" CR_TAB
13337 /* Worker function for `TARGET_ASM_CONSTRUCTOR'. */
13340 avr_asm_out_ctor (rtx symbol
, int priority
)
13342 fputs ("\t.global __do_global_ctors\n", asm_out_file
);
13343 default_ctor_section_asm_out_constructor (symbol
, priority
);
13347 /* Worker function for `TARGET_ASM_DESTRUCTOR'. */
13350 avr_asm_out_dtor (rtx symbol
, int priority
)
13352 fputs ("\t.global __do_global_dtors\n", asm_out_file
);
13353 default_dtor_section_asm_out_destructor (symbol
, priority
);
13357 /* Worker function for `TARGET_RETURN_IN_MEMORY'. */
13360 avr_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
13362 HOST_WIDE_INT size
= int_size_in_bytes (type
);
13363 HOST_WIDE_INT ret_size_limit
= AVR_TINY
? 4 : 8;
13365 /* In avr, there are 8 return registers. But, for Tiny Core
13366 (ATtiny4/5/9/10/20/40) devices, only 4 registers are available.
13367 Return true if size is unknown or greater than the limit. */
13369 if (size
== -1 || size
> ret_size_limit
)
13380 /* Implement `CASE_VALUES_THRESHOLD'. */
13381 /* Supply the default for --param case-values-threshold=0 */
13383 static unsigned int
13384 avr_case_values_threshold (void)
13386 /* The exact break-even point between a jump table and an if-else tree
13387 depends on several factors not available here like, e.g. if 8-bit
13388 comparisons can be used in the if-else tree or not, on the
13389 range of the case values, if the case value can be reused, on the
13390 register allocation, etc. '7' appears to be a good choice. */
13396 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
13398 static scalar_int_mode
13399 avr_addr_space_address_mode (addr_space_t as
)
13401 return avr_addrspace
[as
].pointer_size
== 3 ? PSImode
: HImode
;
13405 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
13407 static scalar_int_mode
13408 avr_addr_space_pointer_mode (addr_space_t as
)
13410 return avr_addr_space_address_mode (as
);
13414 /* Helper for following function. */
13417 avr_reg_ok_for_pgm_addr (rtx reg
, bool strict
)
13419 gcc_assert (REG_P (reg
));
13423 return REGNO (reg
) == REG_Z
;
13426 /* Avoid combine to propagate hard regs. */
13428 if (can_create_pseudo_p()
13429 && REGNO (reg
) < REG_Z
)
13438 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
13441 avr_addr_space_legitimate_address_p (machine_mode mode
, rtx x
, bool strict
,
13442 addr_space_t as
, code_helper
= ERROR_MARK
)
13451 case ADDR_SPACE_GENERIC
:
13452 return avr_legitimate_address_p (mode
, x
, strict
);
13454 case ADDR_SPACE_FLASH
:
13455 case ADDR_SPACE_FLASH1
:
13456 case ADDR_SPACE_FLASH2
:
13457 case ADDR_SPACE_FLASH3
:
13458 case ADDR_SPACE_FLASH4
:
13459 case ADDR_SPACE_FLASH5
:
13461 switch (GET_CODE (x
))
13464 ok
= avr_reg_ok_for_pgm_addr (x
, strict
);
13468 ok
= avr_reg_ok_for_pgm_addr (XEXP (x
, 0), strict
);
13477 case ADDR_SPACE_MEMX
:
13480 && can_create_pseudo_p());
13482 if (LO_SUM
== GET_CODE (x
))
13484 rtx hi
= XEXP (x
, 0);
13485 rtx lo
= XEXP (x
, 1);
13488 && (!strict
|| REGNO (hi
) < FIRST_PSEUDO_REGISTER
)
13490 && REGNO (lo
) == REG_Z
);
13496 if (avr_log
.legitimate_address_p
)
13498 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
13499 "reload_completed=%d reload_in_progress=%d %s:",
13500 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
13501 reg_renumber
? "(reg_renumber)" : "");
13503 if (GET_CODE (x
) == PLUS
13504 && REG_P (XEXP (x
, 0))
13505 && CONST_INT_P (XEXP (x
, 1))
13506 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
13509 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
13510 true_regnum (XEXP (x
, 0)));
13513 avr_edump ("\n%r\n", x
);
13520 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
13523 avr_addr_space_legitimize_address (rtx x
, rtx old_x
,
13524 machine_mode mode
, addr_space_t as
)
13526 if (ADDR_SPACE_GENERIC_P (as
))
13527 return avr_legitimize_address (x
, old_x
, mode
);
13529 if (avr_log
.legitimize_address
)
13531 avr_edump ("\n%?: mode=%m\n %r\n", mode
, old_x
);
13538 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
13541 avr_addr_space_convert (rtx src
, tree type_from
, tree type_to
)
13543 addr_space_t as_from
= TYPE_ADDR_SPACE (TREE_TYPE (type_from
));
13544 addr_space_t as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type_to
));
13546 if (avr_log
.progmem
)
13547 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
13548 src
, type_from
, type_to
);
13550 /* Up-casting from 16-bit to 24-bit pointer. */
13552 if (as_from
!= ADDR_SPACE_MEMX
13553 && as_to
== ADDR_SPACE_MEMX
)
13557 rtx reg
= gen_reg_rtx (PSImode
);
13559 while (CONST
== GET_CODE (sym
) || PLUS
== GET_CODE (sym
))
13560 sym
= XEXP (sym
, 0);
13562 /* Look at symbol flags: avr_encode_section_info set the flags
13563 also if attribute progmem was seen so that we get the right
13564 promotion for, e.g. PSTR-like strings that reside in generic space
13565 but are located in flash. In that case we patch the incoming
13568 if (SYMBOL_REF_P (sym
)
13569 && ADDR_SPACE_FLASH
== AVR_SYMBOL_GET_ADDR_SPACE (sym
))
13571 as_from
= ADDR_SPACE_FLASH
;
13574 /* Linearize memory: RAM has bit 23 set. */
13576 msb
= ADDR_SPACE_GENERIC_P (as_from
)
13578 : avr_addrspace
[as_from
].segment
;
13580 src
= force_reg (Pmode
, src
);
13582 emit_insn (msb
== 0
13583 ? gen_zero_extendhipsi2 (reg
, src
)
13584 : gen_n_extendhipsi2 (reg
, gen_int_mode (msb
, QImode
), src
));
13589 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
13591 if (as_from
== ADDR_SPACE_MEMX
13592 && as_to
!= ADDR_SPACE_MEMX
)
13594 rtx new_src
= gen_reg_rtx (Pmode
);
13596 src
= force_reg (PSImode
, src
);
13598 emit_move_insn (new_src
,
13599 simplify_gen_subreg (Pmode
, src
, PSImode
, 0));
13607 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
13610 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED
,
13611 addr_space_t superset ATTRIBUTE_UNUSED
)
13613 /* Allow any kind of pointer mess. */
13619 /* Implement `TARGET_CONVERT_TO_TYPE'. */
13622 avr_convert_to_type (tree type
, tree expr
)
13624 /* Print a diagnose for pointer conversion that changes the address
13625 space of the pointer target to a non-enclosing address space,
13626 provided -Waddr-space-convert is on.
13628 FIXME: Filter out cases where the target object is known to
13629 be located in the right memory, like in
13631 (const __flash*) PSTR ("text")
13633 Also try to distinguish between explicit casts requested by
13634 the user and implicit casts like
13636 void f (const __flash char*);
13638 void g (const char *p)
13640 f ((const __flash*) p);
13643 under the assumption that an explicit casts means that the user
13644 knows what he is doing, e.g. interface with PSTR or old style
13645 code with progmem and pgm_read_xxx.
13648 if (avr_warn_addr_space_convert
13649 && expr
!= error_mark_node
13650 && POINTER_TYPE_P (type
)
13651 && POINTER_TYPE_P (TREE_TYPE (expr
)))
13653 addr_space_t as_old
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr
)));
13654 addr_space_t as_new
= TYPE_ADDR_SPACE (TREE_TYPE (type
));
13656 if (avr_log
.progmem
)
13657 avr_edump ("%?: type = %t\nexpr = %t\n\n", type
, expr
);
13659 if (as_new
!= ADDR_SPACE_MEMX
13660 && as_new
!= as_old
)
13662 location_t loc
= EXPR_LOCATION (expr
);
13663 const char *name_old
= avr_addrspace
[as_old
].name
;
13664 const char *name_new
= avr_addrspace
[as_new
].name
;
13666 warning (OPT_Waddr_space_convert
,
13667 "conversion from address space %qs to address space %qs",
13668 ADDR_SPACE_GENERIC_P (as_old
) ? "generic" : name_old
,
13669 ADDR_SPACE_GENERIC_P (as_new
) ? "generic" : name_new
);
13671 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, expr
);
13679 /* Implement `TARGET_LEGITIMATE_COMBINED_INSN'. */
13681 /* PR78883: Filter out paradoxical SUBREGs of MEM which are not handled
13682 properly by following passes. As INSN_SCHEDULING is off and hence
13683 general_operand accepts such expressions, ditch them now. */
13686 avr_legitimate_combined_insn (rtx_insn
*insn
)
13688 subrtx_iterator::array_type array
;
13690 FOR_EACH_SUBRTX (iter
, array
, PATTERN (insn
), NONCONST
)
13692 const_rtx op
= *iter
;
13695 && MEM_P (SUBREG_REG (op
))
13696 && (GET_MODE_SIZE (GET_MODE (op
))
13697 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op
)))))
13707 /* PR63633: The middle-end might come up with hard regs as input operands.
13709 RMASK is a bit mask representing a subset of hard registers R0...R31:
13710 Rn is an element of that set iff bit n of RMASK is set.
13711 OPMASK describes a subset of OP[]: If bit n of OPMASK is 1 then
13712 OP[n] has to be fixed; otherwise OP[n] is left alone.
13714 For each element of OPMASK which is a hard register overlapping RMASK,
13715 replace OP[n] with a newly created pseudo register
13717 HREG == 0: Also emit a move insn that copies the contents of that
13718 hard register into the new pseudo.
13720 HREG != 0: Also set HREG[n] to the hard register. */
13723 avr_fix_operands (rtx
*op
, rtx
*hreg
, unsigned opmask
, unsigned rmask
)
13725 for (; opmask
; opmask
>>= 1, op
++)
13734 && REGNO (reg
) < FIRST_PSEUDO_REGISTER
13735 // This hard-reg overlaps other prohibited hard regs?
13736 && (rmask
& regmask (GET_MODE (reg
), REGNO (reg
))))
13738 *op
= gen_reg_rtx (GET_MODE (reg
));
13740 emit_move_insn (*op
, reg
);
13752 avr_fix_inputs (rtx
*op
, unsigned opmask
, unsigned rmask
)
13754 avr_fix_operands (op
, NULL
, opmask
, rmask
);
13758 /* Helper for the function below: If bit n of MASK is set and
13759 HREG[n] != NULL, then emit a move insn to copy OP[n] to HREG[n].
13760 Otherwise do nothing for that n. Return TRUE. */
13763 avr_move_fixed_operands (rtx
*op
, rtx
*hreg
, unsigned mask
)
13765 for (; mask
; mask
>>= 1, op
++, hreg
++)
13768 emit_move_insn (*hreg
, *op
);
13774 /* PR63633: The middle-end might come up with hard regs as output operands.
13776 GEN is a sequence generating function like gen_mulsi3 with 3 operands OP[].
13777 RMASK is a bit mask representing a subset of hard registers R0...R31:
13778 Rn is an element of that set iff bit n of RMASK is set.
13779 OPMASK describes a subset of OP[]: If bit n of OPMASK is 1 then
13780 OP[n] has to be fixed; otherwise OP[n] is left alone.
13782 Emit the insn sequence as generated by GEN() with all elements of OPMASK
13783 which are hard registers overlapping RMASK replaced by newly created
13784 pseudo registers. After the sequence has been emitted, emit insns that
13785 move the contents of respective pseudos to their hard regs. */
13788 avr_emit3_fix_outputs (rtx (*gen
)(rtx
,rtx
,rtx
), rtx
*op
,
13789 unsigned opmask
, unsigned rmask
)
13794 /* It is letigimate for GEN to call this function, and in order not to
13795 get self-recursive we use the following static kludge. This is the
13796 only way not to duplicate all expanders and to avoid ugly and
13797 hard-to-maintain C-code instead of the much more appreciated RTL
13798 representation as supplied by define_expand. */
13799 static bool lock
= false;
13801 gcc_assert (opmask
< (1u << n
));
13806 avr_fix_operands (op
, hreg
, opmask
, rmask
);
13809 emit_insn (gen (op
[0], op
[1], op
[2]));
13812 return avr_move_fixed_operands (op
, hreg
, opmask
);
13816 /* Worker function for cpymemhi expander.
13817 XOP[0] Destination as MEM:BLK
13819 XOP[2] # Bytes to copy
13821 Return TRUE if the expansion is accomplished.
13822 Return FALSE if the operand compination is not supported. */
13825 avr_emit_cpymemhi (rtx
*xop
)
13827 HOST_WIDE_INT count
;
13828 machine_mode loop_mode
;
13829 addr_space_t as
= MEM_ADDR_SPACE (xop
[1]);
13830 rtx loop_reg
, addr1
, a_src
, a_dest
, insn
, xas
;
13831 rtx a_hi8
= NULL_RTX
;
13833 if (avr_mem_flash_p (xop
[0]))
13836 if (!CONST_INT_P (xop
[2]))
13839 count
= INTVAL (xop
[2]);
13843 a_src
= XEXP (xop
[1], 0);
13844 a_dest
= XEXP (xop
[0], 0);
13846 if (PSImode
== GET_MODE (a_src
))
13848 gcc_assert (as
== ADDR_SPACE_MEMX
);
13850 loop_mode
= (count
< 0x100) ? QImode
: HImode
;
13851 loop_reg
= gen_rtx_REG (loop_mode
, 24);
13852 emit_move_insn (loop_reg
, gen_int_mode (count
, loop_mode
));
13854 addr1
= simplify_gen_subreg (HImode
, a_src
, PSImode
, 0);
13855 a_hi8
= simplify_gen_subreg (QImode
, a_src
, PSImode
, 2);
13859 int segment
= avr_addrspace
[as
].segment
;
13862 && avr_n_flash
> 1)
13864 a_hi8
= GEN_INT (segment
);
13865 emit_move_insn (rampz_rtx
, a_hi8
= copy_to_mode_reg (QImode
, a_hi8
));
13867 else if (!ADDR_SPACE_GENERIC_P (as
))
13869 as
= ADDR_SPACE_FLASH
;
13874 loop_mode
= (count
<= 0x100) ? QImode
: HImode
;
13875 loop_reg
= copy_to_mode_reg (loop_mode
, gen_int_mode (count
, loop_mode
));
13878 xas
= GEN_INT (as
);
13880 /* FIXME: Register allocator might come up with spill fails if it is left
13881 on its own. Thus, we allocate the pointer registers by hand:
13883 X = destination address */
13885 emit_move_insn (lpm_addr_reg_rtx
, addr1
);
13886 emit_move_insn (gen_rtx_REG (HImode
, REG_X
), a_dest
);
13888 /* FIXME: Register allocator does a bad job and might spill address
13889 register(s) inside the loop leading to additional move instruction
13890 to/from stack which could clobber tmp_reg. Thus, do *not* emit
13891 load and store as separate insns. Instead, we perform the copy
13892 by means of one monolithic insn. */
13894 gcc_assert (TMP_REGNO
== LPM_REGNO
);
13896 if (as
!= ADDR_SPACE_MEMX
)
13898 /* Load instruction ([E]LPM or LD) is known at compile time:
13899 Do the copy-loop inline. */
13901 rtx (*fun
) (rtx
, rtx
, rtx
)
13902 = QImode
== loop_mode
? gen_cpymem_qi
: gen_cpymem_hi
;
13904 insn
= fun (xas
, loop_reg
, loop_reg
);
13908 rtx (*fun
) (rtx
, rtx
)
13909 = QImode
== loop_mode
? gen_cpymemx_qi
: gen_cpymemx_hi
;
13911 emit_move_insn (gen_rtx_REG (QImode
, 23), a_hi8
);
13913 insn
= fun (xas
, GEN_INT (avr_addr
.rampz
));
13916 set_mem_addr_space (SET_SRC (XVECEXP (insn
, 0, 0)), as
);
13923 /* Print assembler for cpymem_qi, cpymem_hi insns...
13925 $1, $2 : Loop register
13927 X : Destination address
13931 avr_out_cpymem (rtx_insn
*insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
13933 addr_space_t as
= (addr_space_t
) INTVAL (op
[0]);
13934 machine_mode loop_mode
= GET_MODE (op
[1]);
13935 bool sbiw_p
= test_hard_reg_class (ADDW_REGS
, op
[1]);
13943 xop
[2] = tmp_reg_rtx
;
13947 avr_asm_len ("0:", xop
, plen
, 0);
13949 /* Load with post-increment */
13956 case ADDR_SPACE_GENERIC
:
13958 avr_asm_len ("ld %2,Z+", xop
, plen
, 1);
13961 case ADDR_SPACE_FLASH
:
13964 avr_asm_len ("lpm %2,Z+", xop
, plen
, 1);
13966 avr_asm_len ("lpm" CR_TAB
13967 "adiw r30,1", xop
, plen
, 2);
13970 case ADDR_SPACE_FLASH1
:
13971 case ADDR_SPACE_FLASH2
:
13972 case ADDR_SPACE_FLASH3
:
13973 case ADDR_SPACE_FLASH4
:
13974 case ADDR_SPACE_FLASH5
:
13976 if (AVR_HAVE_ELPMX
)
13977 avr_asm_len ("elpm %2,Z+", xop
, plen
, 1);
13979 avr_asm_len ("elpm" CR_TAB
13980 "adiw r30,1", xop
, plen
, 2);
13984 /* Store with post-increment */
13986 avr_asm_len ("st X+,%2", xop
, plen
, 1);
13988 /* Decrement loop-counter and set Z-flag */
13990 if (QImode
== loop_mode
)
13992 avr_asm_len ("dec %1", xop
, plen
, 1);
13996 avr_asm_len ("sbiw %1,1", xop
, plen
, 1);
14000 avr_asm_len ("subi %A1,1" CR_TAB
14001 "sbci %B1,0", xop
, plen
, 2);
14004 /* Loop until zero */
14006 return avr_asm_len ("brne 0b", xop
, plen
, 1);
14011 /* Helper for __builtin_avr_delay_cycles */
14014 avr_mem_clobber (void)
14016 rtx mem
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (Pmode
));
14017 MEM_VOLATILE_P (mem
) = 1;
14022 avr_expand_delay_cycles (rtx operands0
)
14024 unsigned HOST_WIDE_INT cycles
= UINTVAL (operands0
) & GET_MODE_MASK (SImode
);
14025 unsigned HOST_WIDE_INT cycles_used
;
14026 unsigned HOST_WIDE_INT loop_count
;
14028 if (IN_RANGE (cycles
, 83886082, 0xFFFFFFFF))
14030 loop_count
= ((cycles
- 9) / 6) + 1;
14031 cycles_used
= ((loop_count
- 1) * 6) + 9;
14032 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count
, SImode
),
14033 avr_mem_clobber()));
14034 cycles
-= cycles_used
;
14037 if (IN_RANGE (cycles
, 262145, 83886081))
14039 loop_count
= ((cycles
- 7) / 5) + 1;
14040 if (loop_count
> 0xFFFFFF)
14041 loop_count
= 0xFFFFFF;
14042 cycles_used
= ((loop_count
- 1) * 5) + 7;
14043 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count
, SImode
),
14044 avr_mem_clobber()));
14045 cycles
-= cycles_used
;
14048 if (IN_RANGE (cycles
, 768, 262144))
14050 loop_count
= ((cycles
- 5) / 4) + 1;
14051 if (loop_count
> 0xFFFF)
14052 loop_count
= 0xFFFF;
14053 cycles_used
= ((loop_count
- 1) * 4) + 5;
14054 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count
, HImode
),
14055 avr_mem_clobber()));
14056 cycles
-= cycles_used
;
14059 if (IN_RANGE (cycles
, 6, 767))
14061 loop_count
= cycles
/ 3;
14062 if (loop_count
> 255)
14064 cycles_used
= loop_count
* 3;
14065 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count
, QImode
),
14066 avr_mem_clobber()));
14067 cycles
-= cycles_used
;
14070 while (cycles
>= 2)
14072 emit_insn (gen_nopv (GEN_INT (2)));
14078 emit_insn (gen_nopv (GEN_INT (1)));
14085 avr_expand_nops (rtx operands0
)
14087 unsigned HOST_WIDE_INT n_nops
= UINTVAL (operands0
) & GET_MODE_MASK (HImode
);
14091 emit_insn (gen_nopv (const1_rtx
));
14096 /* Compute the image of x under f, i.e. perform x --> f(x) */
14099 avr_map (unsigned int f
, int x
)
14101 return x
< 8 ? (f
>> (4 * x
)) & 0xf : 0;
14105 /* Return some metrics of map A. */
14109 /* Number of fixed points in { 0 ... 7 } */
14112 /* Size of preimage of non-fixed points in { 0 ... 7 } */
14115 /* Mask representing the fixed points in { 0 ... 7 } */
14116 MAP_MASK_FIXED_0_7
,
14118 /* Size of the preimage of { 0 ... 7 } */
14121 /* Mask that represents the preimage of { f } */
14122 MAP_MASK_PREIMAGE_F
14126 avr_map_metric (unsigned int a
, int mode
)
14128 unsigned metric
= 0;
14130 for (unsigned i
= 0; i
< 8; i
++)
14132 unsigned ai
= avr_map (a
, i
);
14134 if (mode
== MAP_FIXED_0_7
)
14136 else if (mode
== MAP_NONFIXED_0_7
)
14137 metric
+= ai
< 8 && ai
!= i
;
14138 else if (mode
== MAP_MASK_FIXED_0_7
)
14139 metric
|= ((unsigned) (ai
== i
)) << i
;
14140 else if (mode
== MAP_PREIMAGE_0_7
)
14142 else if (mode
== MAP_MASK_PREIMAGE_F
)
14143 metric
|= ((unsigned) (ai
== 0xf)) << i
;
14152 /* Return true if IVAL has a 0xf in its hexadecimal representation
14153 and false, otherwise. Only nibbles 0..7 are taken into account.
14154 Used as constraint helper for C0f and Cxf. */
14157 avr_has_nibble_0xf (rtx ival
)
14159 unsigned int map
= UINTVAL (ival
) & GET_MODE_MASK (SImode
);
14160 return avr_map_metric (map
, MAP_MASK_PREIMAGE_F
) != 0;
14164 /* We have a set of bits that are mapped by a function F.
14165 Try to decompose F by means of a second function G so that
14171 cost (F o G^-1) + cost (G) < cost (F)
14173 Example: Suppose builtin insert_bits supplies us with the map
14174 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
14175 nibble of the result, we can just as well rotate the bits before inserting
14176 them and use the map 0x7654ffff which is cheaper than the original map.
14177 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
14181 /* tree code of binary function G */
14182 enum tree_code code
;
14184 /* The constant second argument of G */
14187 /* G^-1, the inverse of G (*, arg) */
14190 /* The cost of applying G (*, arg) */
14193 /* The composition F o G^-1 (*, arg) for some function F */
14196 /* For debug purpose only */
14200 static const avr_map_op_t avr_map_op
[] =
14202 { LROTATE_EXPR
, 0, 0x76543210, 0, 0, "id" },
14203 { LROTATE_EXPR
, 1, 0x07654321, 2, 0, "<<<" },
14204 { LROTATE_EXPR
, 2, 0x10765432, 4, 0, "<<<" },
14205 { LROTATE_EXPR
, 3, 0x21076543, 4, 0, "<<<" },
14206 { LROTATE_EXPR
, 4, 0x32107654, 1, 0, "<<<" },
14207 { LROTATE_EXPR
, 5, 0x43210765, 3, 0, "<<<" },
14208 { LROTATE_EXPR
, 6, 0x54321076, 5, 0, "<<<" },
14209 { LROTATE_EXPR
, 7, 0x65432107, 3, 0, "<<<" },
14210 { RSHIFT_EXPR
, 1, 0x6543210c, 1, 0, ">>" },
14211 { RSHIFT_EXPR
, 1, 0x7543210c, 1, 0, ">>" },
14212 { RSHIFT_EXPR
, 2, 0x543210cc, 2, 0, ">>" },
14213 { RSHIFT_EXPR
, 2, 0x643210cc, 2, 0, ">>" },
14214 { RSHIFT_EXPR
, 2, 0x743210cc, 2, 0, ">>" },
14215 { LSHIFT_EXPR
, 1, 0xc7654321, 1, 0, "<<" },
14216 { LSHIFT_EXPR
, 2, 0xcc765432, 2, 0, "<<" }
14220 /* Try to decompose F as F = (F o G^-1) o G as described above.
14221 The result is a struct representing F o G^-1 and G.
14222 If result.cost < 0 then such a decomposition does not exist. */
14224 static avr_map_op_t
14225 avr_map_decompose (unsigned int f
, const avr_map_op_t
*g
, bool val_const_p
)
14227 bool val_used_p
= avr_map_metric (f
, MAP_MASK_PREIMAGE_F
) != 0;
14228 avr_map_op_t f_ginv
= *g
;
14229 unsigned int ginv
= g
->ginv
;
14233 /* Step 1: Computing F o G^-1 */
14235 for (int i
= 7; i
>= 0; i
--)
14237 int x
= avr_map (f
, i
);
14241 x
= avr_map (ginv
, x
);
14243 /* The bit is no element of the image of G: no avail (cost = -1) */
14249 f_ginv
.map
= (f_ginv
.map
<< 4) + x
;
14252 /* Step 2: Compute the cost of the operations.
14253 The overall cost of doing an operation prior to the insertion is
14254 the cost of the insertion plus the cost of the operation. */
14256 /* Step 2a: Compute cost of F o G^-1 */
14258 if (avr_map_metric (f_ginv
.map
, MAP_NONFIXED_0_7
) == 0)
14259 /* The mapping consists only of fixed points and can be folded
14260 to AND/OR logic in the remainder. Reasonable cost is 3. */
14261 f_ginv
.cost
= 2 + (val_used_p
&& !val_const_p
);
14266 /* Get the cost of the insn by calling the output worker with some
14267 fake values. Mimic effect of reloading xop[3]: Unused operands
14268 are mapped to 0 and used operands are reloaded to xop[0]. */
14270 xop
[0] = all_regs_rtx
[24];
14271 xop
[1] = gen_int_mode (f_ginv
.map
, SImode
);
14272 xop
[2] = all_regs_rtx
[25];
14273 xop
[3] = val_used_p
? xop
[0] : const0_rtx
;
14275 avr_out_insert_bits (xop
, &f_ginv
.cost
);
14277 f_ginv
.cost
+= val_const_p
&& val_used_p
? 1 : 0;
14280 /* Step 2b: Add cost of G */
14282 f_ginv
.cost
+= g
->cost
;
14284 if (avr_log
.builtin
)
14285 avr_edump (" %s%d=%d", g
->str
, g
->arg
, f_ginv
.cost
);
14291 /* Insert bits from XOP[1] into XOP[0] according to MAP.
14292 XOP[0] and XOP[1] don't overlap.
14293 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
14294 If FIXP_P = false: Just move the bit if its position in the destination
14295 is different to its source position. */
14298 avr_move_bits (rtx
*xop
, unsigned int map
, bool fixp_p
, int *plen
)
14300 /* T-flag contains this bit of the source, i.e. of XOP[1] */
14301 int t_bit_src
= -1;
14303 /* We order the operations according to the requested source bit b. */
14305 for (int b
= 0; b
< 8; b
++)
14306 for (int bit_dest
= 0; bit_dest
< 8; bit_dest
++)
14308 int bit_src
= avr_map (map
, bit_dest
);
14312 /* Same position: No need to copy as requested by FIXP_P. */
14313 || (bit_dest
== bit_src
&& !fixp_p
))
14316 if (t_bit_src
!= bit_src
)
14318 /* Source bit is not yet in T: Store it to T. */
14320 t_bit_src
= bit_src
;
14322 xop
[3] = GEN_INT (bit_src
);
14323 avr_asm_len ("bst %T1%T3", xop
, plen
, 1);
14326 /* Load destination bit with T. */
14328 xop
[3] = GEN_INT (bit_dest
);
14329 avr_asm_len ("bld %T0%T3", xop
, plen
, 1);
14334 /* PLEN == 0: Print assembler code for `insert_bits'.
14335 PLEN != 0: Compute code length in bytes.
14338 OP[1]: The mapping composed of nibbles. If nibble no. N is
14339 0: Bit N of result is copied from bit OP[2].0
14341 7: Bit N of result is copied from bit OP[2].7
14342 0xf: Bit N of result is copied from bit OP[3].N
14343 OP[2]: Bits to be inserted
14344 OP[3]: Target value */
14347 avr_out_insert_bits (rtx
*op
, int *plen
)
14349 unsigned int map
= UINTVAL (op
[1]) & GET_MODE_MASK (SImode
);
14350 unsigned mask_fixed
;
14351 bool fixp_p
= true;
14358 gcc_assert (REG_P (xop
[2]) || CONST_INT_P (xop
[2]));
14362 else if (flag_print_asm_name
)
14363 fprintf (asm_out_file
, ASM_COMMENT_START
"map = 0x%08x\n", map
);
14365 /* If MAP has fixed points it might be better to initialize the result
14366 with the bits to be inserted instead of moving all bits by hand. */
14368 mask_fixed
= avr_map_metric (map
, MAP_MASK_FIXED_0_7
);
14370 if (REGNO (xop
[0]) == REGNO (xop
[1]))
14372 /* Avoid early-clobber conflicts */
14374 avr_asm_len ("mov __tmp_reg__,%1", xop
, plen
, 1);
14375 xop
[1] = tmp_reg_rtx
;
14379 if (avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
14381 /* XOP[2] is used and reloaded to XOP[0] already */
14383 int n_fix
= 0, n_nofix
= 0;
14385 gcc_assert (REG_P (xop
[2]));
14387 /* Get the code size of the bit insertions; once with all bits
14388 moved and once with fixed points omitted. */
14390 avr_move_bits (xop
, map
, true, &n_fix
);
14391 avr_move_bits (xop
, map
, false, &n_nofix
);
14393 if (fixp_p
&& n_fix
- n_nofix
> 3)
14395 xop
[3] = gen_int_mode (~mask_fixed
, QImode
);
14397 avr_asm_len ("eor %0,%1" CR_TAB
14398 "andi %0,%3" CR_TAB
14399 "eor %0,%1", xop
, plen
, 3);
14405 /* XOP[2] is unused */
14407 if (fixp_p
&& mask_fixed
)
14409 avr_asm_len ("mov %0,%1", xop
, plen
, 1);
14414 /* Move/insert remaining bits. */
14416 avr_move_bits (xop
, map
, fixp_p
, plen
);
14422 /* IDs for all the AVR builtins. */
14424 enum avr_builtin_id
14426 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
14427 AVR_BUILTIN_ ## NAME,
14428 #include "builtins.def"
14434 struct GTY(()) avr_builtin_description
14436 enum insn_code icode
;
14442 /* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
14443 that a built-in's ID can be used to access the built-in by means of
14446 static GTY(()) struct avr_builtin_description
14447 avr_bdesc
[AVR_BUILTIN_COUNT
] =
14449 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, LIBNAME) \
14450 { (enum insn_code) CODE_FOR_ ## ICODE, N_ARGS, NULL_TREE },
14451 #include "builtins.def"
14456 /* Implement `TARGET_BUILTIN_DECL'. */
14459 avr_builtin_decl (unsigned id
, bool initialize_p ATTRIBUTE_UNUSED
)
14461 if (id
< AVR_BUILTIN_COUNT
)
14462 return avr_bdesc
[id
].fndecl
;
14464 return error_mark_node
;
14469 avr_init_builtin_int24 (void)
14471 tree int24_type
= make_signed_type (GET_MODE_BITSIZE (PSImode
));
14472 tree uint24_type
= make_unsigned_type (GET_MODE_BITSIZE (PSImode
));
14474 lang_hooks
.types
.register_builtin_type (int24_type
, "__int24");
14475 lang_hooks
.types
.register_builtin_type (uint24_type
, "__uint24");
14479 /* Implement `TARGET_INIT_BUILTINS' */
14480 /* Set up all builtin functions for this target. */
14483 avr_init_builtins (void)
14485 tree void_ftype_void
14486 = build_function_type_list (void_type_node
, NULL_TREE
);
14487 tree uchar_ftype_uchar
14488 = build_function_type_list (unsigned_char_type_node
,
14489 unsigned_char_type_node
,
14491 tree uint_ftype_uchar_uchar
14492 = build_function_type_list (unsigned_type_node
,
14493 unsigned_char_type_node
,
14494 unsigned_char_type_node
,
14496 tree int_ftype_char_char
14497 = build_function_type_list (integer_type_node
,
14501 tree int_ftype_char_uchar
14502 = build_function_type_list (integer_type_node
,
14504 unsigned_char_type_node
,
14506 tree void_ftype_ulong
14507 = build_function_type_list (void_type_node
,
14508 long_unsigned_type_node
,
14511 tree uchar_ftype_ulong_uchar_uchar
14512 = build_function_type_list (unsigned_char_type_node
,
14513 long_unsigned_type_node
,
14514 unsigned_char_type_node
,
14515 unsigned_char_type_node
,
14518 tree const_memx_void_node
14519 = build_qualified_type (void_type_node
,
14521 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX
));
14523 tree const_memx_ptr_type_node
14524 = build_pointer_type_for_mode (const_memx_void_node
, PSImode
, false);
14526 tree char_ftype_const_memx_ptr
14527 = build_function_type_list (char_type_node
,
14528 const_memx_ptr_type_node
,
14532 lang_hooks.types.type_for_size (TYPE_PRECISION (T), TYPE_UNSIGNED (T))
14534 #define FX_FTYPE_FX(fx) \
14535 tree fx##r_ftype_##fx##r \
14536 = build_function_type_list (node_##fx##r, node_##fx##r, NULL); \
14537 tree fx##k_ftype_##fx##k \
14538 = build_function_type_list (node_##fx##k, node_##fx##k, NULL)
14540 #define FX_FTYPE_FX_INT(fx) \
14541 tree fx##r_ftype_##fx##r_int \
14542 = build_function_type_list (node_##fx##r, node_##fx##r, \
14543 integer_type_node, NULL); \
14544 tree fx##k_ftype_##fx##k_int \
14545 = build_function_type_list (node_##fx##k, node_##fx##k, \
14546 integer_type_node, NULL)
14548 #define INT_FTYPE_FX(fx) \
14549 tree int_ftype_##fx##r \
14550 = build_function_type_list (integer_type_node, node_##fx##r, NULL); \
14551 tree int_ftype_##fx##k \
14552 = build_function_type_list (integer_type_node, node_##fx##k, NULL)
14554 #define INTX_FTYPE_FX(fx) \
14555 tree int##fx##r_ftype_##fx##r \
14556 = build_function_type_list (ITYP (node_##fx##r), node_##fx##r, NULL); \
14557 tree int##fx##k_ftype_##fx##k \
14558 = build_function_type_list (ITYP (node_##fx##k), node_##fx##k, NULL)
14560 #define FX_FTYPE_INTX(fx) \
14561 tree fx##r_ftype_int##fx##r \
14562 = build_function_type_list (node_##fx##r, ITYP (node_##fx##r), NULL); \
14563 tree fx##k_ftype_int##fx##k \
14564 = build_function_type_list (node_##fx##k, ITYP (node_##fx##k), NULL)
14566 tree node_hr
= short_fract_type_node
;
14567 tree node_nr
= fract_type_node
;
14568 tree node_lr
= long_fract_type_node
;
14569 tree node_llr
= long_long_fract_type_node
;
14571 tree node_uhr
= unsigned_short_fract_type_node
;
14572 tree node_unr
= unsigned_fract_type_node
;
14573 tree node_ulr
= unsigned_long_fract_type_node
;
14574 tree node_ullr
= unsigned_long_long_fract_type_node
;
14576 tree node_hk
= short_accum_type_node
;
14577 tree node_nk
= accum_type_node
;
14578 tree node_lk
= long_accum_type_node
;
14579 tree node_llk
= long_long_accum_type_node
;
14581 tree node_uhk
= unsigned_short_accum_type_node
;
14582 tree node_unk
= unsigned_accum_type_node
;
14583 tree node_ulk
= unsigned_long_accum_type_node
;
14584 tree node_ullk
= unsigned_long_long_accum_type_node
;
14587 /* For absfx builtins. */
14594 /* For roundfx builtins. */
14596 FX_FTYPE_FX_INT (h
);
14597 FX_FTYPE_FX_INT (n
);
14598 FX_FTYPE_FX_INT (l
);
14599 FX_FTYPE_FX_INT (ll
);
14601 FX_FTYPE_FX_INT (uh
);
14602 FX_FTYPE_FX_INT (un
);
14603 FX_FTYPE_FX_INT (ul
);
14604 FX_FTYPE_FX_INT (ull
);
14606 /* For countlsfx builtins. */
14616 INT_FTYPE_FX (ull
);
14618 /* For bitsfx builtins. */
14623 INTX_FTYPE_FX (ll
);
14625 INTX_FTYPE_FX (uh
);
14626 INTX_FTYPE_FX (un
);
14627 INTX_FTYPE_FX (ul
);
14628 INTX_FTYPE_FX (ull
);
14630 /* For fxbits builtins. */
14635 FX_FTYPE_INTX (ll
);
14637 FX_FTYPE_INTX (uh
);
14638 FX_FTYPE_INTX (un
);
14639 FX_FTYPE_INTX (ul
);
14640 FX_FTYPE_INTX (ull
);
14643 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
14645 int id = AVR_BUILTIN_ ## NAME; \
14646 const char *Name = "__builtin_avr_" #NAME; \
14647 char *name = (char*) alloca (1 + strlen (Name)); \
14649 gcc_assert (id < AVR_BUILTIN_COUNT); \
14650 avr_bdesc[id].fndecl \
14651 = add_builtin_function (avr_tolower (name, Name), TYPE, id, \
14652 BUILT_IN_MD, LIBNAME, NULL_TREE); \
14654 #include "builtins.def"
14657 avr_init_builtin_int24 ();
14661 /* Subroutine of avr_expand_builtin to expand vanilla builtins
14662 with non-void result and 1 ... 3 arguments. */
14665 avr_default_expand_builtin (enum insn_code icode
, tree exp
, rtx target
)
14668 int n_args
= call_expr_nargs (exp
);
14669 machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
14671 gcc_assert (n_args
>= 1 && n_args
<= 3);
14673 if (target
== NULL_RTX
14674 || GET_MODE (target
) != tmode
14675 || !insn_data
[icode
].operand
[0].predicate (target
, tmode
))
14677 target
= gen_reg_rtx (tmode
);
14680 for (int n
= 0; n
< n_args
; n
++)
14682 tree arg
= CALL_EXPR_ARG (exp
, n
);
14683 rtx op
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
14684 machine_mode opmode
= GET_MODE (op
);
14685 machine_mode mode
= insn_data
[icode
].operand
[n
+ 1].mode
;
14687 if ((opmode
== SImode
|| opmode
== VOIDmode
) && mode
== HImode
)
14690 op
= gen_lowpart (HImode
, op
);
14693 /* In case the insn wants input operands in modes different from
14694 the result, abort. */
14696 gcc_assert (opmode
== mode
|| opmode
== VOIDmode
);
14698 if (!insn_data
[icode
].operand
[n
+ 1].predicate (op
, mode
))
14699 op
= copy_to_mode_reg (mode
, op
);
14706 case 1: pat
= GEN_FCN (icode
) (target
, xop
[0]); break;
14707 case 2: pat
= GEN_FCN (icode
) (target
, xop
[0], xop
[1]); break;
14708 case 3: pat
= GEN_FCN (icode
) (target
, xop
[0], xop
[1], xop
[2]); break;
14714 if (pat
== NULL_RTX
)
14723 /* Implement `TARGET_EXPAND_BUILTIN'. */
14724 /* Expand an expression EXP that calls a built-in function,
14725 with result going to TARGET if that's convenient
14726 (and in mode MODE if that's convenient).
14727 SUBTARGET may be used as the target for computing one of EXP's operands.
14728 IGNORE is nonzero if the value is to be ignored. */
14731 avr_expand_builtin (tree exp
, rtx target
,
14732 rtx subtarget ATTRIBUTE_UNUSED
,
14733 machine_mode mode ATTRIBUTE_UNUSED
,
14736 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
14737 const char *bname
= IDENTIFIER_POINTER (DECL_NAME (fndecl
));
14738 unsigned int id
= DECL_MD_FUNCTION_CODE (fndecl
);
14739 const struct avr_builtin_description
*d
= &avr_bdesc
[id
];
14743 gcc_assert (id
< AVR_BUILTIN_COUNT
);
14747 case AVR_BUILTIN_NOP
:
14748 emit_insn (gen_nopv (GEN_INT (1)));
14751 case AVR_BUILTIN_DELAY_CYCLES
:
14753 arg0
= CALL_EXPR_ARG (exp
, 0);
14754 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
14756 if (!CONST_INT_P (op0
))
14757 error ("%s expects a compile time integer constant", bname
);
14759 avr_expand_delay_cycles (op0
);
14764 case AVR_BUILTIN_NOPS
:
14766 arg0
= CALL_EXPR_ARG (exp
, 0);
14767 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
14769 if (!CONST_INT_P (op0
))
14770 error ("%s expects a compile time integer constant", bname
);
14772 avr_expand_nops (op0
);
14777 case AVR_BUILTIN_INSERT_BITS
:
14779 arg0
= CALL_EXPR_ARG (exp
, 0);
14780 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
14782 if (!CONST_INT_P (op0
))
14784 error ("%s expects a compile time long integer constant"
14785 " as first argument", bname
);
14792 case AVR_BUILTIN_ROUNDHR
: case AVR_BUILTIN_ROUNDUHR
:
14793 case AVR_BUILTIN_ROUNDR
: case AVR_BUILTIN_ROUNDUR
:
14794 case AVR_BUILTIN_ROUNDLR
: case AVR_BUILTIN_ROUNDULR
:
14795 case AVR_BUILTIN_ROUNDLLR
: case AVR_BUILTIN_ROUNDULLR
:
14797 case AVR_BUILTIN_ROUNDHK
: case AVR_BUILTIN_ROUNDUHK
:
14798 case AVR_BUILTIN_ROUNDK
: case AVR_BUILTIN_ROUNDUK
:
14799 case AVR_BUILTIN_ROUNDLK
: case AVR_BUILTIN_ROUNDULK
:
14800 case AVR_BUILTIN_ROUNDLLK
: case AVR_BUILTIN_ROUNDULLK
:
14802 /* Warn about odd rounding. Rounding points >= FBIT will have
14805 if (TREE_CODE (CALL_EXPR_ARG (exp
, 1)) != INTEGER_CST
)
14808 int rbit
= (int) TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1));
14810 if (rbit
>= (int) GET_MODE_FBIT (mode
))
14812 warning (OPT_Wextra
, "rounding to %d bits has no effect for "
14813 "fixed-point value with %d fractional bits",
14814 rbit
, GET_MODE_FBIT (mode
));
14816 return expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
, mode
,
14819 else if (rbit
<= - (int) GET_MODE_IBIT (mode
))
14821 warning (0, "rounding result will always be 0");
14822 return CONST0_RTX (mode
);
14825 /* The rounding points RP satisfies now: -IBIT < RP < FBIT.
14827 TR 18037 only specifies results for RP > 0. However, the
14828 remaining cases of -IBIT < RP <= 0 can easily be supported
14829 without any additional overhead. */
14834 /* No fold found and no insn: Call support function from libgcc. */
14836 if (d
->icode
== CODE_FOR_nothing
14837 && DECL_ASSEMBLER_NAME (get_callee_fndecl (exp
)) != NULL_TREE
)
14839 return expand_call (exp
, target
, ignore
);
14842 /* No special treatment needed: vanilla expand. */
14844 gcc_assert (d
->icode
!= CODE_FOR_nothing
);
14845 gcc_assert (d
->n_args
== call_expr_nargs (exp
));
14847 if (d
->n_args
== 0)
14849 emit_insn ((GEN_FCN (d
->icode
)) (target
));
14853 return avr_default_expand_builtin (d
->icode
, exp
, target
);
14857 /* Helper for `avr_fold_builtin' that folds absfx (FIXED_CST). */
14860 avr_fold_absfx (tree tval
)
14862 if (FIXED_CST
!= TREE_CODE (tval
))
14865 /* Our fixed-points have no padding: Use double_int payload directly. */
14867 FIXED_VALUE_TYPE fval
= TREE_FIXED_CST (tval
);
14868 unsigned int bits
= GET_MODE_BITSIZE (fval
.mode
);
14869 double_int ival
= fval
.data
.sext (bits
);
14871 if (!ival
.is_negative())
14874 /* ISO/IEC TR 18037, 7.18a.6.2: The absfx functions are saturating. */
14876 fval
.data
= (ival
== double_int::min_value (bits
, false).sext (bits
))
14877 ? double_int::max_value (bits
, false)
14880 return build_fixed (TREE_TYPE (tval
), fval
);
14884 /* Implement `TARGET_FOLD_BUILTIN'. */
14887 avr_fold_builtin (tree fndecl
, int n_args ATTRIBUTE_UNUSED
, tree
*arg
,
14888 bool ignore ATTRIBUTE_UNUSED
)
14890 unsigned int fcode
= DECL_MD_FUNCTION_CODE (fndecl
);
14891 tree val_type
= TREE_TYPE (TREE_TYPE (fndecl
));
14901 case AVR_BUILTIN_SWAP
:
14903 return fold_build2 (LROTATE_EXPR
, val_type
, arg
[0],
14904 build_int_cst (val_type
, 4));
14907 case AVR_BUILTIN_ABSHR
:
14908 case AVR_BUILTIN_ABSR
:
14909 case AVR_BUILTIN_ABSLR
:
14910 case AVR_BUILTIN_ABSLLR
:
14912 case AVR_BUILTIN_ABSHK
:
14913 case AVR_BUILTIN_ABSK
:
14914 case AVR_BUILTIN_ABSLK
:
14915 case AVR_BUILTIN_ABSLLK
:
14916 /* GCC is not good with folding ABS for fixed-point. Do it by hand. */
14918 return avr_fold_absfx (arg
[0]);
14920 case AVR_BUILTIN_BITSHR
: case AVR_BUILTIN_HRBITS
:
14921 case AVR_BUILTIN_BITSHK
: case AVR_BUILTIN_HKBITS
:
14922 case AVR_BUILTIN_BITSUHR
: case AVR_BUILTIN_UHRBITS
:
14923 case AVR_BUILTIN_BITSUHK
: case AVR_BUILTIN_UHKBITS
:
14925 case AVR_BUILTIN_BITSR
: case AVR_BUILTIN_RBITS
:
14926 case AVR_BUILTIN_BITSK
: case AVR_BUILTIN_KBITS
:
14927 case AVR_BUILTIN_BITSUR
: case AVR_BUILTIN_URBITS
:
14928 case AVR_BUILTIN_BITSUK
: case AVR_BUILTIN_UKBITS
:
14930 case AVR_BUILTIN_BITSLR
: case AVR_BUILTIN_LRBITS
:
14931 case AVR_BUILTIN_BITSLK
: case AVR_BUILTIN_LKBITS
:
14932 case AVR_BUILTIN_BITSULR
: case AVR_BUILTIN_ULRBITS
:
14933 case AVR_BUILTIN_BITSULK
: case AVR_BUILTIN_ULKBITS
:
14935 case AVR_BUILTIN_BITSLLR
: case AVR_BUILTIN_LLRBITS
:
14936 case AVR_BUILTIN_BITSLLK
: case AVR_BUILTIN_LLKBITS
:
14937 case AVR_BUILTIN_BITSULLR
: case AVR_BUILTIN_ULLRBITS
:
14938 case AVR_BUILTIN_BITSULLK
: case AVR_BUILTIN_ULLKBITS
:
14940 gcc_assert (TYPE_PRECISION (val_type
)
14941 == TYPE_PRECISION (TREE_TYPE (arg
[0])));
14943 return build1 (VIEW_CONVERT_EXPR
, val_type
, arg
[0]);
14945 case AVR_BUILTIN_INSERT_BITS
:
14947 tree tbits
= arg
[1];
14948 tree tval
= arg
[2];
14950 tree map_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)));
14952 bool changed
= false;
14953 avr_map_op_t best_g
;
14955 if (TREE_CODE (arg
[0]) != INTEGER_CST
)
14957 /* No constant as first argument: Don't fold this and run into
14958 error in avr_expand_builtin. */
14963 tmap
= wide_int_to_tree (map_type
, wi::to_wide (arg
[0]));
14964 map
= TREE_INT_CST_LOW (tmap
);
14966 if (TREE_CODE (tval
) != INTEGER_CST
14967 && avr_map_metric (map
, MAP_MASK_PREIMAGE_F
) == 0)
14969 /* There are no F in the map, i.e. 3rd operand is unused.
14970 Replace that argument with some constant to render
14971 respective input unused. */
14973 tval
= build_int_cst (val_type
, 0);
14977 if (TREE_CODE (tbits
) != INTEGER_CST
14978 && avr_map_metric (map
, MAP_PREIMAGE_0_7
) == 0)
14980 /* Similar for the bits to be inserted. If they are unused,
14981 we can just as well pass 0. */
14983 tbits
= build_int_cst (val_type
, 0);
14986 if (TREE_CODE (tbits
) == INTEGER_CST
)
14988 /* Inserting bits known at compile time is easy and can be
14989 performed by AND and OR with appropriate masks. */
14991 int bits
= TREE_INT_CST_LOW (tbits
);
14992 int mask_ior
= 0, mask_and
= 0xff;
14994 for (size_t i
= 0; i
< 8; i
++)
14996 int mi
= avr_map (map
, i
);
15000 if (bits
& (1 << mi
)) mask_ior
|= (1 << i
);
15001 else mask_and
&= ~(1 << i
);
15005 tval
= fold_build2 (BIT_IOR_EXPR
, val_type
, tval
,
15006 build_int_cst (val_type
, mask_ior
));
15007 return fold_build2 (BIT_AND_EXPR
, val_type
, tval
,
15008 build_int_cst (val_type
, mask_and
));
15012 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
15014 /* If bits don't change their position, we can use vanilla logic
15015 to merge the two arguments... */
15017 if (avr_map_metric (map
, MAP_NONFIXED_0_7
) == 0
15018 // ...except when we are copying just one bit. In that
15019 // case, BLD/BST is better than XOR/AND/XOR, see PR90622.
15020 && avr_map_metric (map
, MAP_FIXED_0_7
) != 1)
15022 int mask_f
= avr_map_metric (map
, MAP_MASK_PREIMAGE_F
);
15023 tree tres
, tmask
= build_int_cst (val_type
, mask_f
^ 0xff);
15025 tres
= fold_build2 (BIT_XOR_EXPR
, val_type
, tbits
, tval
);
15026 tres
= fold_build2 (BIT_AND_EXPR
, val_type
, tres
, tmask
);
15027 return fold_build2 (BIT_XOR_EXPR
, val_type
, tres
, tval
);
15030 /* Try to decomposing map to reduce overall cost. */
15032 if (avr_log
.builtin
)
15033 avr_edump ("\n%?: %x\n%?: ROL cost: ", map
);
15035 best_g
= avr_map_op
[0];
15036 best_g
.cost
= 1000;
15038 for (size_t i
= 0; i
< ARRAY_SIZE (avr_map_op
); i
++)
15041 = avr_map_decompose (map
, avr_map_op
+ i
,
15042 TREE_CODE (tval
) == INTEGER_CST
);
15044 if (g
.cost
>= 0 && g
.cost
< best_g
.cost
)
15048 if (avr_log
.builtin
)
15051 if (best_g
.arg
== 0)
15052 /* No optimization found */
15055 /* Apply operation G to the 2nd argument. */
15057 if (avr_log
.builtin
)
15058 avr_edump ("%?: using OP(%s%d, %x) cost %d\n",
15059 best_g
.str
, best_g
.arg
, best_g
.map
, best_g
.cost
);
15061 /* Do right-shifts arithmetically: They copy the MSB instead of
15062 shifting in a non-usable value (0) as with logic right-shift. */
15064 tbits
= fold_convert (signed_char_type_node
, tbits
);
15065 tbits
= fold_build2 (best_g
.code
, signed_char_type_node
, tbits
,
15066 build_int_cst (val_type
, best_g
.arg
));
15067 tbits
= fold_convert (val_type
, tbits
);
15069 /* Use map o G^-1 instead of original map to undo the effect of G. */
15071 tmap
= wide_int_to_tree (map_type
, best_g
.map
);
15073 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
15074 } /* AVR_BUILTIN_INSERT_BITS */
15080 /* Prepend to CLOBBERS hard registers that are automatically clobbered
15081 for an asm. We do this for CC_REGNUM to maintain source compatibility
15082 with the original cc0-based compiler. */
15085 avr_md_asm_adjust (vec
<rtx
> &/*outputs*/, vec
<rtx
> &/*inputs*/,
15086 vec
<machine_mode
> & /*input_modes*/,
15087 vec
<const char *> &/*constraints*/,
15088 vec
<rtx
> &/*uses*/,
15089 vec
<rtx
> &clobbers
, HARD_REG_SET
&clobbered_regs
,
15090 location_t
/*loc*/)
15092 clobbers
.safe_push (cc_reg_rtx
);
15093 SET_HARD_REG_BIT (clobbered_regs
, REG_CC
);
15098 /* Worker function for `FLOAT_LIB_COMPARE_RETURNS_BOOL'. */
15101 avr_float_lib_compare_returns_bool (machine_mode mode
, enum rtx_code
)
15103 if (mode
== DFmode
)
15105 #if WITH_DOUBLE_COMPARISON == 2
15110 // This is the GCC default and also what AVR-LibC implements.
15116 /* Initialize the GCC target structure. */
15118 #undef TARGET_ASM_ALIGNED_HI_OP
15119 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
15120 #undef TARGET_ASM_ALIGNED_SI_OP
15121 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
15122 #undef TARGET_ASM_UNALIGNED_HI_OP
15123 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
15124 #undef TARGET_ASM_UNALIGNED_SI_OP
15125 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
15126 #undef TARGET_ASM_INTEGER
15127 #define TARGET_ASM_INTEGER avr_assemble_integer
15128 #undef TARGET_ASM_FILE_START
15129 #define TARGET_ASM_FILE_START avr_file_start
15130 #undef TARGET_ASM_FILE_END
15131 #define TARGET_ASM_FILE_END avr_file_end
15133 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
15134 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
15135 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
15136 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
15138 #undef TARGET_FUNCTION_VALUE
15139 #define TARGET_FUNCTION_VALUE avr_function_value
15140 #undef TARGET_LIBCALL_VALUE
15141 #define TARGET_LIBCALL_VALUE avr_libcall_value
15142 #undef TARGET_FUNCTION_VALUE_REGNO_P
15143 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
15145 #undef TARGET_ATTRIBUTE_TABLE
15146 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
15147 #undef TARGET_INSERT_ATTRIBUTES
15148 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
15149 #undef TARGET_SECTION_TYPE_FLAGS
15150 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
15152 #undef TARGET_ASM_NAMED_SECTION
15153 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
15154 #undef TARGET_ASM_INIT_SECTIONS
15155 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
15156 #undef TARGET_ENCODE_SECTION_INFO
15157 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
15158 #undef TARGET_ASM_SELECT_SECTION
15159 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
15161 #undef TARGET_ASM_FINAL_POSTSCAN_INSN
15162 #define TARGET_ASM_FINAL_POSTSCAN_INSN avr_asm_final_postscan_insn
15164 #undef TARGET_INSN_COST
15165 #define TARGET_INSN_COST avr_insn_cost
15166 #undef TARGET_REGISTER_MOVE_COST
15167 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
15168 #undef TARGET_MEMORY_MOVE_COST
15169 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
15170 #undef TARGET_RTX_COSTS
15171 #define TARGET_RTX_COSTS avr_rtx_costs
15172 #undef TARGET_ADDRESS_COST
15173 #define TARGET_ADDRESS_COST avr_address_cost
15174 #undef TARGET_FUNCTION_ARG
15175 #define TARGET_FUNCTION_ARG avr_function_arg
15176 #undef TARGET_FUNCTION_ARG_ADVANCE
15177 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
15179 #undef TARGET_SET_CURRENT_FUNCTION
15180 #define TARGET_SET_CURRENT_FUNCTION avr_set_current_function
15182 #undef TARGET_RETURN_IN_MEMORY
15183 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
15185 #undef TARGET_STRICT_ARGUMENT_NAMING
15186 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
15188 #undef TARGET_CONDITIONAL_REGISTER_USAGE
15189 #define TARGET_CONDITIONAL_REGISTER_USAGE avr_conditional_register_usage
15191 #undef TARGET_HARD_REGNO_NREGS
15192 #define TARGET_HARD_REGNO_NREGS avr_hard_regno_nregs
15194 #undef TARGET_HARD_REGNO_MODE_OK
15195 #define TARGET_HARD_REGNO_MODE_OK avr_hard_regno_mode_ok
15196 #undef TARGET_HARD_REGNO_SCRATCH_OK
15197 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
15198 #undef TARGET_HARD_REGNO_CALL_PART_CLOBBERED
15199 #define TARGET_HARD_REGNO_CALL_PART_CLOBBERED \
15200 avr_hard_regno_call_part_clobbered
15202 #undef TARGET_CASE_VALUES_THRESHOLD
15203 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
15205 #undef TARGET_FRAME_POINTER_REQUIRED
15206 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
15207 #undef TARGET_CAN_ELIMINATE
15208 #define TARGET_CAN_ELIMINATE avr_can_eliminate
15210 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
15211 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS avr_allocate_stack_slots_for_args
15213 #undef TARGET_WARN_FUNC_RETURN
15214 #define TARGET_WARN_FUNC_RETURN avr_warn_func_return
15216 #undef TARGET_CLASS_LIKELY_SPILLED_P
15217 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
15219 #undef TARGET_CLASS_MAX_NREGS
15220 #define TARGET_CLASS_MAX_NREGS avr_class_max_nregs
15222 #undef TARGET_OPTION_OVERRIDE
15223 #define TARGET_OPTION_OVERRIDE avr_option_override
15225 #undef TARGET_CANNOT_MODIFY_JUMPS_P
15226 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
15228 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
15229 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
15231 #undef TARGET_INIT_BUILTINS
15232 #define TARGET_INIT_BUILTINS avr_init_builtins
15234 #undef TARGET_BUILTIN_DECL
15235 #define TARGET_BUILTIN_DECL avr_builtin_decl
15237 #undef TARGET_EXPAND_BUILTIN
15238 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
15240 #undef TARGET_FOLD_BUILTIN
15241 #define TARGET_FOLD_BUILTIN avr_fold_builtin
15243 #undef TARGET_SCALAR_MODE_SUPPORTED_P
15244 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
15246 #undef TARGET_BUILD_BUILTIN_VA_LIST
15247 #define TARGET_BUILD_BUILTIN_VA_LIST avr_build_builtin_va_list
15249 #undef TARGET_FIXED_POINT_SUPPORTED_P
15250 #define TARGET_FIXED_POINT_SUPPORTED_P hook_bool_void_true
15252 #undef TARGET_CONVERT_TO_TYPE
15253 #define TARGET_CONVERT_TO_TYPE avr_convert_to_type
15255 #undef TARGET_LRA_P
15256 #define TARGET_LRA_P hook_bool_void_false
15258 #undef TARGET_ADDR_SPACE_SUBSET_P
15259 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
15261 #undef TARGET_ADDR_SPACE_CONVERT
15262 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
15264 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
15265 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
15267 #undef TARGET_ADDR_SPACE_POINTER_MODE
15268 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
15270 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
15271 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
15272 avr_addr_space_legitimate_address_p
15274 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
15275 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
15277 #undef TARGET_ADDR_SPACE_DIAGNOSE_USAGE
15278 #define TARGET_ADDR_SPACE_DIAGNOSE_USAGE avr_addr_space_diagnose_usage
15280 #undef TARGET_ADDR_SPACE_ZERO_ADDRESS_VALID
15281 #define TARGET_ADDR_SPACE_ZERO_ADDRESS_VALID avr_addr_space_zero_address_valid
15283 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
15284 #define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
15286 #undef TARGET_PRINT_OPERAND
15287 #define TARGET_PRINT_OPERAND avr_print_operand
15288 #undef TARGET_PRINT_OPERAND_ADDRESS
15289 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
15290 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
15291 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
15293 #undef TARGET_USE_BY_PIECES_INFRASTRUCTURE_P
15294 #define TARGET_USE_BY_PIECES_INFRASTRUCTURE_P \
15295 avr_use_by_pieces_infrastructure_p
15297 #undef TARGET_LEGITIMATE_COMBINED_INSN
15298 #define TARGET_LEGITIMATE_COMBINED_INSN avr_legitimate_combined_insn
15300 #undef TARGET_STARTING_FRAME_OFFSET
15301 #define TARGET_STARTING_FRAME_OFFSET avr_starting_frame_offset
15303 #undef TARGET_MD_ASM_ADJUST
15304 #define TARGET_MD_ASM_ADJUST avr_md_asm_adjust
15306 #undef TARGET_CAN_INLINE_P
15307 #define TARGET_CAN_INLINE_P avr_can_inline_p
15309 #undef TARGET_CANONICALIZE_COMPARISON
15310 #define TARGET_CANONICALIZE_COMPARISON avr_canonicalize_comparison
15312 /* According to the opening comment in PR86772, the following applies:
15313 "If the port does not (and never will in the future) need to mitigate
15314 against unsafe speculation." */
15315 #undef TARGET_HAVE_SPECULATION_SAFE_VALUE
15316 #define TARGET_HAVE_SPECULATION_SAFE_VALUE speculation_safe_value_not_needed
15318 struct gcc_target targetm
= TARGET_INITIALIZER
;
15321 #include "gt-avr.h"