1 /* Xstormy16 target functions.
2 Copyright (C) 1997-2017 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "stringpool.h"
34 #include "stringpool.h"
38 #include "diagnostic-core.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
46 #include "langhooks.h"
52 /* This file should be included last. */
53 #include "target-def.h"
55 static rtx
emit_addhi3_postreload (rtx
, rtx
, rtx
);
56 static void xstormy16_asm_out_constructor (rtx
, int);
57 static void xstormy16_asm_out_destructor (rtx
, int);
58 static void xstormy16_asm_output_mi_thunk (FILE *, tree
, HOST_WIDE_INT
,
61 static void xstormy16_init_builtins (void);
62 static rtx
xstormy16_expand_builtin (tree
, rtx
, rtx
, machine_mode
, int);
63 static int xstormy16_address_cost (rtx
, machine_mode
, addr_space_t
, bool);
64 static bool xstormy16_return_in_memory (const_tree
, const_tree
);
66 static GTY(()) section
*bss100_section
;
68 /* Compute a (partial) cost for rtx X. Return true if the complete
69 cost has been computed, and false if subexpressions should be
70 scanned. In either case, *TOTAL contains the cost result. */
73 xstormy16_rtx_costs (rtx x
, machine_mode mode ATTRIBUTE_UNUSED
,
74 int outer_code ATTRIBUTE_UNUSED
,
75 int opno ATTRIBUTE_UNUSED
, int *total
,
76 bool speed ATTRIBUTE_UNUSED
)
78 int code
= GET_CODE (x
);
83 if (INTVAL (x
) < 16 && INTVAL (x
) >= 0)
84 *total
= COSTS_N_INSNS (1) / 2;
85 else if (INTVAL (x
) < 256 && INTVAL (x
) >= 0)
86 *total
= COSTS_N_INSNS (1);
88 *total
= COSTS_N_INSNS (2);
95 *total
= COSTS_N_INSNS (2);
99 *total
= COSTS_N_INSNS (35 + 6);
102 *total
= COSTS_N_INSNS (51 - 6);
111 xstormy16_address_cost (rtx x
, machine_mode mode ATTRIBUTE_UNUSED
,
112 addr_space_t as ATTRIBUTE_UNUSED
,
113 bool speed ATTRIBUTE_UNUSED
)
115 return (CONST_INT_P (x
) ? 2
116 : GET_CODE (x
) == PLUS
? 7
120 /* Worker function for TARGET_MEMORY_MOVE_COST. */
123 xstormy16_memory_move_cost (machine_mode mode
, reg_class_t rclass
,
126 return (5 + memory_move_secondary_cost (mode
, rclass
, in
));
129 /* Branches are handled as follows:
131 1. HImode compare-and-branches. The machine supports these
132 natively, so the appropriate pattern is emitted directly.
134 2. SImode EQ and NE. These are emitted as pairs of HImode
135 compare-and-branches.
137 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
138 of a SImode subtract followed by a branch (not a compare-and-branch),
144 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
151 /* Emit a branch of kind CODE to location LOC. */
154 xstormy16_emit_cbranch (enum rtx_code code
, rtx op0
, rtx op1
, rtx loc
)
156 rtx condition_rtx
, loc_ref
, branch
, cy_clobber
;
160 mode
= GET_MODE (op0
);
161 gcc_assert (mode
== HImode
|| mode
== SImode
);
164 && (code
== GT
|| code
== LE
|| code
== GTU
|| code
== LEU
))
166 int unsigned_p
= (code
== GTU
|| code
== LEU
);
167 int gt_p
= (code
== GT
|| code
== GTU
);
171 lab
= gen_label_rtx ();
172 xstormy16_emit_cbranch (unsigned_p
? LTU
: LT
, op0
, op1
, gt_p
? lab
: loc
);
173 /* This should be generated as a comparison against the temporary
174 created by the previous insn, but reload can't handle that. */
175 xstormy16_emit_cbranch (gt_p
? NE
: EQ
, op0
, op1
, loc
);
180 else if (mode
== SImode
181 && (code
== NE
|| code
== EQ
)
182 && op1
!= const0_rtx
)
184 rtx op0_word
, op1_word
;
186 int num_words
= GET_MODE_BITSIZE (mode
) / BITS_PER_WORD
;
190 lab
= gen_label_rtx ();
192 for (i
= 0; i
< num_words
- 1; i
++)
194 op0_word
= simplify_gen_subreg (word_mode
, op0
, mode
,
196 op1_word
= simplify_gen_subreg (word_mode
, op1
, mode
,
198 xstormy16_emit_cbranch (NE
, op0_word
, op1_word
, code
== EQ
? lab
: loc
);
200 op0_word
= simplify_gen_subreg (word_mode
, op0
, mode
,
202 op1_word
= simplify_gen_subreg (word_mode
, op1
, mode
,
204 xstormy16_emit_cbranch (code
, op0_word
, op1_word
, loc
);
211 /* We can't allow reload to try to generate any reload after a branch,
212 so when some register must match we must make the temporary ourselves. */
216 tmp
= gen_reg_rtx (mode
);
217 emit_move_insn (tmp
, op0
);
221 condition_rtx
= gen_rtx_fmt_ee (code
, mode
, op0
, op1
);
222 loc_ref
= gen_rtx_LABEL_REF (VOIDmode
, loc
);
223 branch
= gen_rtx_SET (pc_rtx
,
224 gen_rtx_IF_THEN_ELSE (VOIDmode
, condition_rtx
,
227 cy_clobber
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (BImode
, CARRY_REGNUM
));
230 vec
= gen_rtvec (2, branch
, cy_clobber
);
231 else if (code
== NE
|| code
== EQ
)
232 vec
= gen_rtvec (2, branch
, gen_rtx_CLOBBER (VOIDmode
, op0
));
237 sub
= gen_rtx_SET (op0
, gen_rtx_MINUS (SImode
, op0
, op1
));
239 sub
= gen_rtx_CLOBBER (SImode
, op0
);
241 vec
= gen_rtvec (3, branch
, sub
, cy_clobber
);
244 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, vec
));
247 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
248 the arithmetic operation. Most of the work is done by
249 xstormy16_expand_arith. */
252 xstormy16_split_cbranch (machine_mode mode
, rtx label
, rtx comparison
,
255 rtx op0
= XEXP (comparison
, 0);
256 rtx op1
= XEXP (comparison
, 1);
257 rtx_insn
*seq
, *last_insn
;
261 xstormy16_expand_arith (mode
, COMPARE
, dest
, op0
, op1
);
265 gcc_assert (INSN_P (seq
));
268 while (NEXT_INSN (last_insn
) != NULL_RTX
)
269 last_insn
= NEXT_INSN (last_insn
);
271 compare
= SET_SRC (XVECEXP (PATTERN (last_insn
), 0, 0));
272 PUT_CODE (XEXP (compare
, 0), GET_CODE (comparison
));
273 XEXP (compare
, 1) = gen_rtx_LABEL_REF (VOIDmode
, label
);
278 /* Return the string to output a conditional branch to LABEL, which is
279 the operand number of the label.
281 OP is the conditional expression, or NULL for branch-always.
283 REVERSED is nonzero if we should reverse the sense of the comparison.
288 xstormy16_output_cbranch_hi (rtx op
, const char *label
, int reversed
,
291 static char string
[64];
292 int need_longbranch
= (op
!= NULL_RTX
293 ? get_attr_length (insn
) == 8
294 : get_attr_length (insn
) == 4);
295 int really_reversed
= reversed
^ need_longbranch
;
298 const char *operands
;
307 sprintf (string
, "%s %s", ccode
, label
);
311 code
= GET_CODE (op
);
313 if (! REG_P (XEXP (op
, 0)))
315 code
= swap_condition (code
);
321 /* Work out which way this really branches. */
323 code
= reverse_condition (code
);
327 case EQ
: ccode
= "z"; break;
328 case NE
: ccode
= "nz"; break;
329 case GE
: ccode
= "ge"; break;
330 case LT
: ccode
= "lt"; break;
331 case GT
: ccode
= "gt"; break;
332 case LE
: ccode
= "le"; break;
333 case GEU
: ccode
= "nc"; break;
334 case LTU
: ccode
= "c"; break;
335 case GTU
: ccode
= "hi"; break;
336 case LEU
: ccode
= "ls"; break;
343 templ
= "b%s %s,.+8 | jmpf %s";
346 sprintf (string
, templ
, ccode
, operands
, label
);
351 /* Return the string to output a conditional branch to LABEL, which is
352 the operand number of the label, but suitable for the tail of a
355 OP is the conditional expression (OP is never NULL_RTX).
357 REVERSED is nonzero if we should reverse the sense of the comparison.
362 xstormy16_output_cbranch_si (rtx op
, const char *label
, int reversed
,
365 static char string
[64];
366 int need_longbranch
= get_attr_length (insn
) >= 8;
367 int really_reversed
= reversed
^ need_longbranch
;
373 code
= GET_CODE (op
);
375 /* Work out which way this really branches. */
377 code
= reverse_condition (code
);
381 case EQ
: ccode
= "z"; break;
382 case NE
: ccode
= "nz"; break;
383 case GE
: ccode
= "ge"; break;
384 case LT
: ccode
= "lt"; break;
385 case GEU
: ccode
= "nc"; break;
386 case LTU
: ccode
= "c"; break;
388 /* The missing codes above should never be generated. */
399 gcc_assert (REG_P (XEXP (op
, 0)));
401 regnum
= REGNO (XEXP (op
, 0));
402 sprintf (prevop
, "or %s,%s", reg_names
[regnum
], reg_names
[regnum
+1]);
406 case GE
: case LT
: case GEU
: case LTU
:
407 strcpy (prevop
, "sbc %2,%3");
415 templ
= "%s | b%s .+6 | jmpf %s";
417 templ
= "%s | b%s %s";
418 sprintf (string
, templ
, prevop
, ccode
, label
);
423 /* Many machines have some registers that cannot be copied directly to or from
424 memory or even from other types of registers. An example is the `MQ'
425 register, which on most machines, can only be copied to or from general
426 registers, but not memory. Some machines allow copying all registers to and
427 from memory, but require a scratch register for stores to some memory
428 locations (e.g., those with symbolic address on the RT, and those with
429 certain symbolic address on the SPARC when compiling PIC). In some cases,
430 both an intermediate and a scratch register are required.
432 You should define these macros to indicate to the reload phase that it may
433 need to allocate at least one register for a reload in addition to the
434 register to contain the data. Specifically, if copying X to a register
435 RCLASS in MODE requires an intermediate register, you should define
436 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
437 whose registers can be used as intermediate registers or scratch registers.
439 If copying a register RCLASS in MODE to X requires an intermediate or scratch
440 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
441 largest register class required. If the requirements for input and output
442 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
443 instead of defining both macros identically.
445 The values returned by these macros are often `GENERAL_REGS'. Return
446 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
447 to or from a register of RCLASS in MODE without requiring a scratch register.
448 Do not define this macro if it would always return `NO_REGS'.
450 If a scratch register is required (either with or without an intermediate
451 register), you should define patterns for `reload_inM' or `reload_outM', as
452 required.. These patterns, which will normally be implemented with a
453 `define_expand', should be similar to the `movM' patterns, except that
454 operand 2 is the scratch register.
456 Define constraints for the reload register and scratch register that contain
457 a single register class. If the original reload register (whose class is
458 RCLASS) can meet the constraint given in the pattern, the value returned by
459 these macros is used for the class of the scratch register. Otherwise, two
460 additional reload registers are required. Their classes are obtained from
461 the constraints in the insn pattern.
463 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
464 either be in a hard register or in memory. Use `true_regnum' to find out;
465 it will return -1 if the pseudo is in memory and the hard register number if
468 These macros should not be used in the case where a particular class of
469 registers can only be copied to memory and not to another class of
470 registers. In that case, secondary reload registers are not needed and
471 would not be helpful. Instead, a stack location must be used to perform the
472 copy and the `movM' pattern should use memory as an intermediate storage.
473 This case often occurs between floating-point and general registers. */
476 xstormy16_secondary_reload_class (enum reg_class rclass
,
477 machine_mode mode ATTRIBUTE_UNUSED
,
480 /* This chip has the interesting property that only the first eight
481 registers can be moved to/from memory. */
483 || ((GET_CODE (x
) == SUBREG
|| REG_P (x
))
484 && (true_regnum (x
) == -1
485 || true_regnum (x
) >= FIRST_PSEUDO_REGISTER
)))
486 && ! reg_class_subset_p (rclass
, EIGHT_REGS
))
492 /* Worker function for TARGET_PREFERRED_RELOAD_CLASS
493 and TARGET_PREFERRED_OUTPUT_RELOAD_CLASS. */
496 xstormy16_preferred_reload_class (rtx x
, reg_class_t rclass
)
498 if (rclass
== GENERAL_REGS
&& MEM_P (x
))
504 /* Predicate for symbols and addresses that reflect special 8-bit
508 xstormy16_below100_symbol (rtx x
,
509 machine_mode mode ATTRIBUTE_UNUSED
)
511 if (GET_CODE (x
) == CONST
)
513 if (GET_CODE (x
) == PLUS
&& CONST_INT_P (XEXP (x
, 1)))
516 if (GET_CODE (x
) == SYMBOL_REF
)
517 return (SYMBOL_REF_FLAGS (x
) & SYMBOL_FLAG_XSTORMY16_BELOW100
) != 0;
521 HOST_WIDE_INT i
= INTVAL (x
);
523 if ((i
>= 0x0000 && i
<= 0x00ff)
524 || (i
>= 0x7f00 && i
<= 0x7fff))
530 /* Likewise, but only for non-volatile MEMs, for patterns where the
531 MEM will get split into smaller sized accesses. */
534 xstormy16_splittable_below100_operand (rtx x
, machine_mode mode
)
536 if (MEM_P (x
) && MEM_VOLATILE_P (x
))
538 return xstormy16_below100_operand (x
, mode
);
541 /* Expand an 8-bit IOR. This either detects the one case we can
542 actually do, or uses a 16-bit IOR. */
545 xstormy16_expand_iorqi3 (rtx
*operands
)
547 rtx in
, out
, outsub
, val
;
553 if (xstormy16_onebit_set_operand (val
, QImode
))
555 if (!xstormy16_below100_or_register (in
, QImode
))
556 in
= copy_to_mode_reg (QImode
, in
);
557 if (!xstormy16_below100_or_register (out
, QImode
))
558 out
= gen_reg_rtx (QImode
);
559 emit_insn (gen_iorqi3_internal (out
, in
, val
));
560 if (out
!= operands
[0])
561 emit_move_insn (operands
[0], out
);
566 in
= copy_to_mode_reg (QImode
, in
);
568 if (! REG_P (val
) && ! CONST_INT_P (val
))
569 val
= copy_to_mode_reg (QImode
, val
);
572 out
= gen_reg_rtx (QImode
);
574 in
= simplify_gen_subreg (HImode
, in
, QImode
, 0);
575 outsub
= simplify_gen_subreg (HImode
, out
, QImode
, 0);
577 if (! CONST_INT_P (val
))
578 val
= simplify_gen_subreg (HImode
, val
, QImode
, 0);
580 emit_insn (gen_iorhi3 (outsub
, in
, val
));
582 if (out
!= operands
[0])
583 emit_move_insn (operands
[0], out
);
586 /* Expand an 8-bit AND. This either detects the one case we can
587 actually do, or uses a 16-bit AND. */
590 xstormy16_expand_andqi3 (rtx
*operands
)
592 rtx in
, out
, outsub
, val
;
598 if (xstormy16_onebit_clr_operand (val
, QImode
))
600 if (!xstormy16_below100_or_register (in
, QImode
))
601 in
= copy_to_mode_reg (QImode
, in
);
602 if (!xstormy16_below100_or_register (out
, QImode
))
603 out
= gen_reg_rtx (QImode
);
604 emit_insn (gen_andqi3_internal (out
, in
, val
));
605 if (out
!= operands
[0])
606 emit_move_insn (operands
[0], out
);
611 in
= copy_to_mode_reg (QImode
, in
);
613 if (! REG_P (val
) && ! CONST_INT_P (val
))
614 val
= copy_to_mode_reg (QImode
, val
);
617 out
= gen_reg_rtx (QImode
);
619 in
= simplify_gen_subreg (HImode
, in
, QImode
, 0);
620 outsub
= simplify_gen_subreg (HImode
, out
, QImode
, 0);
622 if (! CONST_INT_P (val
))
623 val
= simplify_gen_subreg (HImode
, val
, QImode
, 0);
625 emit_insn (gen_andhi3 (outsub
, in
, val
));
627 if (out
!= operands
[0])
628 emit_move_insn (operands
[0], out
);
631 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
633 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
635 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
637 && INTVAL (X) + (OFFSET) >= 0 \
638 && INTVAL (X) + (OFFSET) < 0x8000 \
639 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
642 xstormy16_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED
,
645 if (LEGITIMATE_ADDRESS_CONST_INT_P (x
, 0))
648 if (GET_CODE (x
) == PLUS
649 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x
, 1), 0))
652 /* PR 31232: Do not allow INT+INT as an address. */
657 if ((GET_CODE (x
) == PRE_MODIFY
&& CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
658 || GET_CODE (x
) == POST_INC
659 || GET_CODE (x
) == PRE_DEC
)
663 && REGNO_OK_FOR_BASE_P (REGNO (x
))
664 && (! strict
|| REGNO (x
) < FIRST_PSEUDO_REGISTER
))
667 if (xstormy16_below100_symbol (x
, mode
))
673 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P.
675 On this chip, this is true if the address is valid with an offset
676 of 0 but not of 6, because in that case it cannot be used as an
677 address for DImode or DFmode, or if the address is a post-increment
678 or pre-decrement address. */
681 xstormy16_mode_dependent_address_p (const_rtx x
,
682 addr_space_t as ATTRIBUTE_UNUSED
)
684 if (LEGITIMATE_ADDRESS_CONST_INT_P (x
, 0)
685 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x
, 6))
688 if (GET_CODE (x
) == PLUS
689 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x
, 1), 0)
690 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x
, 1), 6))
693 /* Auto-increment addresses are now treated generically in recog.c. */
698 short_memory_operand (rtx x
, machine_mode mode
)
700 if (! memory_operand (x
, mode
))
702 return (GET_CODE (XEXP (x
, 0)) != PLUS
);
705 /* Splitter for the 'move' patterns, for modes not directly implemented
706 by hardware. Emit insns to copy a value of mode MODE from SRC to
709 This function is only called when reload_completed. */
712 xstormy16_split_move (machine_mode mode
, rtx dest
, rtx src
)
714 int num_words
= GET_MODE_BITSIZE (mode
) / BITS_PER_WORD
;
715 int direction
, end
, i
;
716 int src_modifies
= 0;
717 int dest_modifies
= 0;
718 int src_volatile
= 0;
719 int dest_volatile
= 0;
721 rtx auto_inc_reg_rtx
= NULL_RTX
;
723 /* Check initial conditions. */
724 gcc_assert (reload_completed
725 && mode
!= QImode
&& mode
!= HImode
726 && nonimmediate_operand (dest
, mode
)
727 && general_operand (src
, mode
));
729 /* This case is not supported below, and shouldn't be generated. */
730 gcc_assert (! MEM_P (dest
) || ! MEM_P (src
));
732 /* This case is very very bad after reload, so trap it now. */
733 gcc_assert (GET_CODE (dest
) != SUBREG
&& GET_CODE (src
) != SUBREG
);
735 /* The general idea is to copy by words, offsetting the source and
736 destination. Normally the least-significant word will be copied
737 first, but for pre-dec operations it's better to copy the
738 most-significant word first. Only one operand can be a pre-dec
741 It's also possible that the copy overlaps so that the direction
747 mem_operand
= XEXP (dest
, 0);
748 dest_modifies
= side_effects_p (mem_operand
);
749 if (auto_inc_p (mem_operand
))
750 auto_inc_reg_rtx
= XEXP (mem_operand
, 0);
751 dest_volatile
= MEM_VOLATILE_P (dest
);
754 dest
= copy_rtx (dest
);
755 MEM_VOLATILE_P (dest
) = 0;
758 else if (MEM_P (src
))
760 mem_operand
= XEXP (src
, 0);
761 src_modifies
= side_effects_p (mem_operand
);
762 if (auto_inc_p (mem_operand
))
763 auto_inc_reg_rtx
= XEXP (mem_operand
, 0);
764 src_volatile
= MEM_VOLATILE_P (src
);
767 src
= copy_rtx (src
);
768 MEM_VOLATILE_P (src
) = 0;
772 mem_operand
= NULL_RTX
;
774 if (mem_operand
== NULL_RTX
)
778 && reg_overlap_mentioned_p (dest
, src
)
779 && REGNO (dest
) > REGNO (src
))
782 else if (GET_CODE (mem_operand
) == PRE_DEC
783 || (GET_CODE (mem_operand
) == PLUS
784 && GET_CODE (XEXP (mem_operand
, 0)) == PRE_DEC
))
786 else if (MEM_P (src
) && reg_overlap_mentioned_p (dest
, src
))
790 gcc_assert (REG_P (dest
));
791 regno
= REGNO (dest
);
793 gcc_assert (refers_to_regno_p (regno
, regno
+ num_words
,
796 if (refers_to_regno_p (regno
, mem_operand
))
798 else if (refers_to_regno_p (regno
+ num_words
- 1, regno
+ num_words
,
802 /* This means something like
803 (set (reg:DI r0) (mem:DI (reg:HI r1)))
804 which we'd need to support by doing the set of the second word
809 end
= direction
< 0 ? -1 : num_words
;
810 for (i
= direction
< 0 ? num_words
- 1 : 0; i
!= end
; i
+= direction
)
812 rtx w_src
, w_dest
, insn
;
815 w_src
= gen_rtx_MEM (word_mode
, mem_operand
);
817 w_src
= simplify_gen_subreg (word_mode
, src
, mode
, i
* UNITS_PER_WORD
);
819 MEM_VOLATILE_P (w_src
) = 1;
821 w_dest
= gen_rtx_MEM (word_mode
, mem_operand
);
823 w_dest
= simplify_gen_subreg (word_mode
, dest
, mode
,
826 MEM_VOLATILE_P (w_dest
) = 1;
828 /* The simplify_subreg calls must always be able to simplify. */
829 gcc_assert (GET_CODE (w_src
) != SUBREG
830 && GET_CODE (w_dest
) != SUBREG
);
832 insn
= emit_insn (gen_rtx_SET (w_dest
, w_src
));
833 if (auto_inc_reg_rtx
)
834 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_INC
,
840 /* Expander for the 'move' patterns. Emit insns to copy a value of
841 mode MODE from SRC to DEST. */
844 xstormy16_expand_move (machine_mode mode
, rtx dest
, rtx src
)
846 if (MEM_P (dest
) && (GET_CODE (XEXP (dest
, 0)) == PRE_MODIFY
))
848 rtx pmv
= XEXP (dest
, 0);
849 rtx dest_reg
= XEXP (pmv
, 0);
850 rtx dest_mod
= XEXP (pmv
, 1);
851 rtx set
= gen_rtx_SET (dest_reg
, dest_mod
);
852 rtx clobber
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (BImode
, CARRY_REGNUM
));
854 dest
= gen_rtx_MEM (mode
, dest_reg
);
855 emit_insn (gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, set
, clobber
)));
857 else if (MEM_P (src
) && (GET_CODE (XEXP (src
, 0)) == PRE_MODIFY
))
859 rtx pmv
= XEXP (src
, 0);
860 rtx src_reg
= XEXP (pmv
, 0);
861 rtx src_mod
= XEXP (pmv
, 1);
862 rtx set
= gen_rtx_SET (src_reg
, src_mod
);
863 rtx clobber
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (BImode
, CARRY_REGNUM
));
865 src
= gen_rtx_MEM (mode
, src_reg
);
866 emit_insn (gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, set
, clobber
)));
869 /* There are only limited immediate-to-memory move instructions. */
870 if (! reload_in_progress
871 && ! reload_completed
873 && (! CONST_INT_P (XEXP (dest
, 0))
874 || ! xstormy16_legitimate_address_p (mode
, XEXP (dest
, 0), 0))
875 && ! xstormy16_below100_operand (dest
, mode
)
877 && GET_CODE (src
) != SUBREG
)
878 src
= copy_to_mode_reg (mode
, src
);
880 /* Don't emit something we would immediately split. */
882 && mode
!= HImode
&& mode
!= QImode
)
884 xstormy16_split_move (mode
, dest
, src
);
888 emit_insn (gen_rtx_SET (dest
, src
));
893 The stack is laid out as follows:
897 Register save area (up to 4 words)
898 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
900 AP-> Return address (two words)
901 9th procedure parameter word
902 10th procedure parameter word
904 last procedure parameter word
906 The frame pointer location is tuned to make it most likely that all
907 parameters and local variables can be accessed using a load-indexed
910 /* A structure to describe the layout. */
911 struct xstormy16_stack_layout
913 /* Size of the topmost three items on the stack. */
915 int register_save_size
;
916 int stdarg_save_size
;
917 /* Sum of the above items. */
919 /* Various offsets. */
920 int first_local_minus_ap
;
925 /* Does REGNO need to be saved? */
926 #define REG_NEEDS_SAVE(REGNUM, IFUN) \
927 ((df_regs_ever_live_p (REGNUM) && ! call_used_regs[REGNUM]) \
928 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
929 && (REGNUM != CARRY_REGNUM) \
930 && (df_regs_ever_live_p (REGNUM) || ! crtl->is_leaf)))
932 /* Compute the stack layout. */
934 struct xstormy16_stack_layout
935 xstormy16_compute_stack_layout (void)
937 struct xstormy16_stack_layout layout
;
939 const int ifun
= xstormy16_interrupt_function_p ();
941 layout
.locals_size
= get_frame_size ();
943 layout
.register_save_size
= 0;
944 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
945 if (REG_NEEDS_SAVE (regno
, ifun
))
946 layout
.register_save_size
+= UNITS_PER_WORD
;
949 layout
.stdarg_save_size
= NUM_ARGUMENT_REGISTERS
* UNITS_PER_WORD
;
951 layout
.stdarg_save_size
= 0;
953 layout
.frame_size
= (layout
.locals_size
954 + layout
.register_save_size
955 + layout
.stdarg_save_size
);
957 if (crtl
->args
.size
<= 2048 && crtl
->args
.size
!= -1)
959 if (layout
.frame_size
- INCOMING_FRAME_SP_OFFSET
960 + crtl
->args
.size
<= 2048)
961 layout
.fp_minus_ap
= layout
.frame_size
- INCOMING_FRAME_SP_OFFSET
;
963 layout
.fp_minus_ap
= 2048 - crtl
->args
.size
;
966 layout
.fp_minus_ap
= (layout
.stdarg_save_size
967 + layout
.register_save_size
968 - INCOMING_FRAME_SP_OFFSET
);
969 layout
.sp_minus_fp
= (layout
.frame_size
- INCOMING_FRAME_SP_OFFSET
970 - layout
.fp_minus_ap
);
971 layout
.first_local_minus_ap
= layout
.sp_minus_fp
- layout
.locals_size
;
975 /* Worker function for TARGET_CAN_ELIMINATE. */
978 xstormy16_can_eliminate (const int from
, const int to
)
980 return (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
981 ? ! frame_pointer_needed
985 /* Determine how all the special registers get eliminated. */
988 xstormy16_initial_elimination_offset (int from
, int to
)
990 struct xstormy16_stack_layout layout
;
993 layout
= xstormy16_compute_stack_layout ();
995 if (from
== FRAME_POINTER_REGNUM
&& to
== HARD_FRAME_POINTER_REGNUM
)
996 result
= layout
.sp_minus_fp
- layout
.locals_size
;
997 else if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
998 result
= - layout
.locals_size
;
999 else if (from
== ARG_POINTER_REGNUM
&& to
== HARD_FRAME_POINTER_REGNUM
)
1000 result
= - layout
.fp_minus_ap
;
1001 else if (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
1002 result
= - (layout
.sp_minus_fp
+ layout
.fp_minus_ap
);
1010 emit_addhi3_postreload (rtx dest
, rtx src0
, rtx src1
)
1012 rtx set
, clobber
, insn
;
1014 set
= gen_rtx_SET (dest
, gen_rtx_PLUS (HImode
, src0
, src1
));
1015 clobber
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (BImode
, CARRY_REGNUM
));
1016 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, set
, clobber
)));
1020 /* Called after register allocation to add any instructions needed for
1021 the prologue. Using a prologue insn is favored compared to putting
1022 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1023 since it allows the scheduler to intermix instructions with the
1024 saves of the caller saved registers. In some cases, it might be
1025 necessary to emit a barrier instruction as the last insn to prevent
1028 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1029 so that the debug info generation code can handle them properly. */
1032 xstormy16_expand_prologue (void)
1034 struct xstormy16_stack_layout layout
;
1038 const int ifun
= xstormy16_interrupt_function_p ();
1040 mem_push_rtx
= gen_rtx_POST_INC (Pmode
, stack_pointer_rtx
);
1041 mem_push_rtx
= gen_rtx_MEM (HImode
, mem_push_rtx
);
1043 layout
= xstormy16_compute_stack_layout ();
1045 if (layout
.locals_size
>= 32768)
1046 error ("local variable memory requirements exceed capacity");
1048 if (flag_stack_usage_info
)
1049 current_function_static_stack_size
= layout
.frame_size
;
1051 /* Save the argument registers if necessary. */
1052 if (layout
.stdarg_save_size
)
1053 for (regno
= FIRST_ARGUMENT_REGISTER
;
1054 regno
< FIRST_ARGUMENT_REGISTER
+ NUM_ARGUMENT_REGISTERS
;
1058 rtx reg
= gen_rtx_REG (HImode
, regno
);
1060 insn
= emit_move_insn (mem_push_rtx
, reg
);
1061 RTX_FRAME_RELATED_P (insn
) = 1;
1063 dwarf
= gen_rtx_SEQUENCE (VOIDmode
, rtvec_alloc (2));
1065 XVECEXP (dwarf
, 0, 0) = gen_rtx_SET (gen_rtx_MEM (Pmode
, stack_pointer_rtx
),
1067 XVECEXP (dwarf
, 0, 1) = gen_rtx_SET (stack_pointer_rtx
,
1068 plus_constant (Pmode
,
1070 GET_MODE_SIZE (Pmode
)));
1071 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
, dwarf
);
1072 RTX_FRAME_RELATED_P (XVECEXP (dwarf
, 0, 0)) = 1;
1073 RTX_FRAME_RELATED_P (XVECEXP (dwarf
, 0, 1)) = 1;
1076 /* Push each of the registers to save. */
1077 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1078 if (REG_NEEDS_SAVE (regno
, ifun
))
1081 rtx reg
= gen_rtx_REG (HImode
, regno
);
1083 insn
= emit_move_insn (mem_push_rtx
, reg
);
1084 RTX_FRAME_RELATED_P (insn
) = 1;
1086 dwarf
= gen_rtx_SEQUENCE (VOIDmode
, rtvec_alloc (2));
1088 XVECEXP (dwarf
, 0, 0) = gen_rtx_SET (gen_rtx_MEM (Pmode
, stack_pointer_rtx
),
1090 XVECEXP (dwarf
, 0, 1) = gen_rtx_SET (stack_pointer_rtx
,
1091 plus_constant (Pmode
,
1093 GET_MODE_SIZE (Pmode
)));
1094 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
, dwarf
);
1095 RTX_FRAME_RELATED_P (XVECEXP (dwarf
, 0, 0)) = 1;
1096 RTX_FRAME_RELATED_P (XVECEXP (dwarf
, 0, 1)) = 1;
1099 /* It's just possible that the SP here might be what we need for
1101 if (frame_pointer_needed
&& layout
.sp_minus_fp
== layout
.locals_size
)
1103 insn
= emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
);
1104 RTX_FRAME_RELATED_P (insn
) = 1;
1107 /* Allocate space for local variables. */
1108 if (layout
.locals_size
)
1110 insn
= emit_addhi3_postreload (stack_pointer_rtx
, stack_pointer_rtx
,
1111 GEN_INT (layout
.locals_size
));
1112 RTX_FRAME_RELATED_P (insn
) = 1;
1115 /* Set up the frame pointer, if required. */
1116 if (frame_pointer_needed
&& layout
.sp_minus_fp
!= layout
.locals_size
)
1118 insn
= emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
);
1119 RTX_FRAME_RELATED_P (insn
) = 1;
1121 if (layout
.sp_minus_fp
)
1123 insn
= emit_addhi3_postreload (hard_frame_pointer_rtx
,
1124 hard_frame_pointer_rtx
,
1125 GEN_INT (- layout
.sp_minus_fp
));
1126 RTX_FRAME_RELATED_P (insn
) = 1;
1131 /* Do we need an epilogue at all? */
1134 direct_return (void)
1136 return (reload_completed
1137 && xstormy16_compute_stack_layout ().frame_size
== 0
1138 && ! xstormy16_interrupt_function_p ());
1141 /* Called after register allocation to add any instructions needed for
1142 the epilogue. Using an epilogue insn is favored compared to putting
1143 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1144 since it allows the scheduler to intermix instructions with the
1145 saves of the caller saved registers. In some cases, it might be
1146 necessary to emit a barrier instruction as the last insn to prevent
1150 xstormy16_expand_epilogue (void)
1152 struct xstormy16_stack_layout layout
;
1155 const int ifun
= xstormy16_interrupt_function_p ();
1157 mem_pop_rtx
= gen_rtx_PRE_DEC (Pmode
, stack_pointer_rtx
);
1158 mem_pop_rtx
= gen_rtx_MEM (HImode
, mem_pop_rtx
);
1160 layout
= xstormy16_compute_stack_layout ();
1162 /* Pop the stack for the locals. */
1163 if (layout
.locals_size
)
1165 if (frame_pointer_needed
&& layout
.sp_minus_fp
== layout
.locals_size
)
1166 emit_move_insn (stack_pointer_rtx
, hard_frame_pointer_rtx
);
1168 emit_addhi3_postreload (stack_pointer_rtx
, stack_pointer_rtx
,
1169 GEN_INT (- layout
.locals_size
));
1172 /* Restore any call-saved registers. */
1173 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; regno
--)
1174 if (REG_NEEDS_SAVE (regno
, ifun
))
1175 emit_move_insn (gen_rtx_REG (HImode
, regno
), mem_pop_rtx
);
1177 /* Pop the stack for the stdarg save area. */
1178 if (layout
.stdarg_save_size
)
1179 emit_addhi3_postreload (stack_pointer_rtx
, stack_pointer_rtx
,
1180 GEN_INT (- layout
.stdarg_save_size
));
1184 emit_jump_insn (gen_return_internal_interrupt ());
1186 emit_jump_insn (gen_return_internal ());
1190 xstormy16_epilogue_uses (int regno
)
1192 if (reload_completed
&& call_used_regs
[regno
])
1194 const int ifun
= xstormy16_interrupt_function_p ();
1195 return REG_NEEDS_SAVE (regno
, ifun
);
1201 xstormy16_function_profiler (void)
1203 sorry ("function_profiler support");
1206 /* Update CUM to advance past an argument in the argument list. The
1207 values MODE, TYPE and NAMED describe that argument. Once this is
1208 done, the variable CUM is suitable for analyzing the *following*
1209 argument with `TARGET_FUNCTION_ARG', etc.
1211 This function need not do anything if the argument in question was
1212 passed on the stack. The compiler knows how to track the amount of
1213 stack space used for arguments without any special help. However,
1214 it makes life easier for xstormy16_build_va_list if it does update
1218 xstormy16_function_arg_advance (cumulative_args_t cum_v
, machine_mode mode
,
1219 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1221 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1223 /* If an argument would otherwise be passed partially in registers,
1224 and partially on the stack, the whole of it is passed on the
1226 if (*cum
< NUM_ARGUMENT_REGISTERS
1227 && *cum
+ XSTORMY16_WORD_SIZE (type
, mode
) > NUM_ARGUMENT_REGISTERS
)
1228 *cum
= NUM_ARGUMENT_REGISTERS
;
1230 *cum
+= XSTORMY16_WORD_SIZE (type
, mode
);
1234 xstormy16_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
1235 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1237 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1239 if (mode
== VOIDmode
)
1241 if (targetm
.calls
.must_pass_in_stack (mode
, type
)
1242 || *cum
+ XSTORMY16_WORD_SIZE (type
, mode
) > NUM_ARGUMENT_REGISTERS
)
1244 return gen_rtx_REG (mode
, *cum
+ FIRST_ARGUMENT_REGISTER
);
1247 /* Build the va_list type.
1249 For this chip, va_list is a record containing a counter and a pointer.
1250 The counter is of type 'int' and indicates how many bytes
1251 have been used to date. The pointer indicates the stack position
1252 for arguments that have not been passed in registers.
1253 To keep the layout nice, the pointer is first in the structure. */
1256 xstormy16_build_builtin_va_list (void)
1258 tree f_1
, f_2
, record
, type_decl
;
1260 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
1261 type_decl
= build_decl (BUILTINS_LOCATION
,
1262 TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
1264 f_1
= build_decl (BUILTINS_LOCATION
,
1265 FIELD_DECL
, get_identifier ("base"),
1267 f_2
= build_decl (BUILTINS_LOCATION
,
1268 FIELD_DECL
, get_identifier ("count"),
1269 unsigned_type_node
);
1271 DECL_FIELD_CONTEXT (f_1
) = record
;
1272 DECL_FIELD_CONTEXT (f_2
) = record
;
1274 TYPE_STUB_DECL (record
) = type_decl
;
1275 TYPE_NAME (record
) = type_decl
;
1276 TYPE_FIELDS (record
) = f_1
;
1277 DECL_CHAIN (f_1
) = f_2
;
1279 layout_type (record
);
1284 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this
1285 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1286 variable to initialize. NEXTARG is the machine independent notion of the
1287 'next' argument after the variable arguments. */
1290 xstormy16_expand_builtin_va_start (tree valist
, rtx nextarg ATTRIBUTE_UNUSED
)
1292 tree f_base
, f_count
;
1296 if (xstormy16_interrupt_function_p ())
1297 error ("cannot use va_start in interrupt function");
1299 f_base
= TYPE_FIELDS (va_list_type_node
);
1300 f_count
= DECL_CHAIN (f_base
);
1302 base
= build3 (COMPONENT_REF
, TREE_TYPE (f_base
), valist
, f_base
, NULL_TREE
);
1303 count
= build3 (COMPONENT_REF
, TREE_TYPE (f_count
), valist
, f_count
,
1306 t
= make_tree (TREE_TYPE (base
), virtual_incoming_args_rtx
);
1307 u
= build_int_cst (NULL_TREE
, - INCOMING_FRAME_SP_OFFSET
);
1308 u
= fold_convert (TREE_TYPE (count
), u
);
1309 t
= fold_build_pointer_plus (t
, u
);
1310 t
= build2 (MODIFY_EXPR
, TREE_TYPE (base
), base
, t
);
1311 TREE_SIDE_EFFECTS (t
) = 1;
1312 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
1314 t
= build2 (MODIFY_EXPR
, TREE_TYPE (count
), count
,
1315 build_int_cst (NULL_TREE
,
1316 crtl
->args
.info
* UNITS_PER_WORD
));
1317 TREE_SIDE_EFFECTS (t
) = 1;
1318 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
1321 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable
1322 of type va_list as a tree, TYPE is the type passed to va_arg.
1323 Note: This algorithm is documented in stormy-abi. */
1326 xstormy16_gimplify_va_arg_expr (tree valist
, tree type
, gimple_seq
*pre_p
,
1327 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
1329 tree f_base
, f_count
;
1331 tree count_tmp
, addr
, t
;
1332 tree lab_gotaddr
, lab_fromstack
;
1333 int size
, size_of_reg_args
, must_stack
;
1336 f_base
= TYPE_FIELDS (va_list_type_node
);
1337 f_count
= DECL_CHAIN (f_base
);
1339 base
= build3 (COMPONENT_REF
, TREE_TYPE (f_base
), valist
, f_base
, NULL_TREE
);
1340 count
= build3 (COMPONENT_REF
, TREE_TYPE (f_count
), valist
, f_count
,
1343 must_stack
= targetm
.calls
.must_pass_in_stack (TYPE_MODE (type
), type
);
1344 size_tree
= round_up (size_in_bytes (type
), UNITS_PER_WORD
);
1345 gimplify_expr (&size_tree
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
1347 size_of_reg_args
= NUM_ARGUMENT_REGISTERS
* UNITS_PER_WORD
;
1349 count_tmp
= get_initialized_tmp_var (count
, pre_p
, NULL
);
1350 lab_gotaddr
= create_artificial_label (UNKNOWN_LOCATION
);
1351 lab_fromstack
= create_artificial_label (UNKNOWN_LOCATION
);
1352 addr
= create_tmp_var (ptr_type_node
);
1358 t
= fold_convert (TREE_TYPE (count
), size_tree
);
1359 t
= build2 (PLUS_EXPR
, TREE_TYPE (count
), count_tmp
, t
);
1360 r
= fold_convert (TREE_TYPE (count
), size_int (size_of_reg_args
));
1361 t
= build2 (GT_EXPR
, boolean_type_node
, t
, r
);
1362 t
= build3 (COND_EXPR
, void_type_node
, t
,
1363 build1 (GOTO_EXPR
, void_type_node
, lab_fromstack
),
1365 gimplify_and_add (t
, pre_p
);
1367 t
= fold_build_pointer_plus (base
, count_tmp
);
1368 gimplify_assign (addr
, t
, pre_p
);
1370 t
= build1 (GOTO_EXPR
, void_type_node
, lab_gotaddr
);
1371 gimplify_and_add (t
, pre_p
);
1373 t
= build1 (LABEL_EXPR
, void_type_node
, lab_fromstack
);
1374 gimplify_and_add (t
, pre_p
);
1377 /* Arguments larger than a word might need to skip over some
1378 registers, since arguments are either passed entirely in
1379 registers or entirely on the stack. */
1380 size
= PUSH_ROUNDING (int_size_in_bytes (type
));
1381 if (size
> 2 || size
< 0 || must_stack
)
1385 r
= size_int (NUM_ARGUMENT_REGISTERS
* UNITS_PER_WORD
);
1386 u
= build2 (MODIFY_EXPR
, TREE_TYPE (count_tmp
), count_tmp
, r
);
1388 t
= fold_convert (TREE_TYPE (count
), r
);
1389 t
= build2 (GE_EXPR
, boolean_type_node
, count_tmp
, t
);
1390 t
= build3 (COND_EXPR
, void_type_node
, t
, NULL_TREE
, u
);
1391 gimplify_and_add (t
, pre_p
);
1394 t
= size_int (NUM_ARGUMENT_REGISTERS
* UNITS_PER_WORD
1395 + INCOMING_FRAME_SP_OFFSET
);
1396 t
= fold_convert (TREE_TYPE (count
), t
);
1397 t
= build2 (MINUS_EXPR
, TREE_TYPE (count
), count_tmp
, t
);
1398 t
= build2 (PLUS_EXPR
, TREE_TYPE (count
), t
,
1399 fold_convert (TREE_TYPE (count
), size_tree
));
1400 t
= fold_convert (TREE_TYPE (t
), fold (t
));
1401 t
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
1402 t
= fold_build_pointer_plus (base
, t
);
1403 gimplify_assign (addr
, t
, pre_p
);
1405 t
= build1 (LABEL_EXPR
, void_type_node
, lab_gotaddr
);
1406 gimplify_and_add (t
, pre_p
);
1408 t
= fold_convert (TREE_TYPE (count
), size_tree
);
1409 t
= build2 (PLUS_EXPR
, TREE_TYPE (count
), count_tmp
, t
);
1410 gimplify_assign (count
, t
, pre_p
);
1412 addr
= fold_convert (build_pointer_type (type
), addr
);
1413 return build_va_arg_indirect_ref (addr
);
1416 /* Worker function for TARGET_TRAMPOLINE_INIT. */
1419 xstormy16_trampoline_init (rtx m_tramp
, tree fndecl
, rtx static_chain
)
1421 rtx temp
= gen_reg_rtx (HImode
);
1422 rtx reg_fnaddr
= gen_reg_rtx (HImode
);
1423 rtx reg_addr
, reg_addr_mem
;
1425 reg_addr
= copy_to_reg (XEXP (m_tramp
, 0));
1426 reg_addr_mem
= adjust_automodify_address (m_tramp
, HImode
, reg_addr
, 0);
1428 emit_move_insn (temp
, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM
));
1429 emit_move_insn (reg_addr_mem
, temp
);
1430 emit_insn (gen_addhi3 (reg_addr
, reg_addr
, const2_rtx
));
1431 reg_addr_mem
= adjust_automodify_address (reg_addr_mem
, VOIDmode
, NULL
, 2);
1433 emit_move_insn (temp
, static_chain
);
1434 emit_move_insn (reg_addr_mem
, temp
);
1435 emit_insn (gen_addhi3 (reg_addr
, reg_addr
, const2_rtx
));
1436 reg_addr_mem
= adjust_automodify_address (reg_addr_mem
, VOIDmode
, NULL
, 2);
1438 emit_move_insn (reg_fnaddr
, XEXP (DECL_RTL (fndecl
), 0));
1439 emit_move_insn (temp
, reg_fnaddr
);
1440 emit_insn (gen_andhi3 (temp
, temp
, GEN_INT (0xFF)));
1441 emit_insn (gen_iorhi3 (temp
, temp
, GEN_INT (0x0200)));
1442 emit_move_insn (reg_addr_mem
, temp
);
1443 emit_insn (gen_addhi3 (reg_addr
, reg_addr
, const2_rtx
));
1444 reg_addr_mem
= adjust_automodify_address (reg_addr_mem
, VOIDmode
, NULL
, 2);
1446 emit_insn (gen_lshrhi3 (reg_fnaddr
, reg_fnaddr
, GEN_INT (8)));
1447 emit_move_insn (reg_addr_mem
, reg_fnaddr
);
1450 /* Worker function for TARGET_FUNCTION_VALUE. */
1453 xstormy16_function_value (const_tree valtype
,
1454 const_tree func ATTRIBUTE_UNUSED
,
1455 bool outgoing ATTRIBUTE_UNUSED
)
1458 mode
= TYPE_MODE (valtype
);
1459 PROMOTE_MODE (mode
, 0, valtype
);
1460 return gen_rtx_REG (mode
, RETURN_VALUE_REGNUM
);
1463 /* Worker function for TARGET_LIBCALL_VALUE. */
1466 xstormy16_libcall_value (machine_mode mode
,
1467 const_rtx fun ATTRIBUTE_UNUSED
)
1469 return gen_rtx_REG (mode
, RETURN_VALUE_REGNUM
);
1472 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
1475 xstormy16_function_value_regno_p (const unsigned int regno
)
1477 return (regno
== RETURN_VALUE_REGNUM
);
1480 /* A C compound statement that outputs the assembler code for a thunk function,
1481 used to implement C++ virtual function calls with multiple inheritance. The
1482 thunk acts as a wrapper around a virtual function, adjusting the implicit
1483 object parameter before handing control off to the real function.
1485 First, emit code to add the integer DELTA to the location that contains the
1486 incoming first argument. Assume that this argument contains a pointer, and
1487 is the one used to pass the `this' pointer in C++. This is the incoming
1488 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1489 addition must preserve the values of all other incoming arguments.
1491 After the addition, emit code to jump to FUNCTION, which is a
1492 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1493 the return address. Hence returning from FUNCTION will return to whoever
1494 called the current `thunk'.
1496 The effect must be as if @var{function} had been called directly
1497 with the adjusted first argument. This macro is responsible for
1498 emitting all of the code for a thunk function;
1499 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1502 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1503 extracted from it.) It might possibly be useful on some targets, but
1507 xstormy16_asm_output_mi_thunk (FILE *file
,
1508 tree thunk_fndecl ATTRIBUTE_UNUSED
,
1509 HOST_WIDE_INT delta
,
1510 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED
,
1513 int regnum
= FIRST_ARGUMENT_REGISTER
;
1515 /* There might be a hidden first argument for a returned structure. */
1516 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
)), function
))
1519 fprintf (file
, "\tadd %s,#0x%x\n", reg_names
[regnum
], (int) delta
& 0xFFFF);
1520 fputs ("\tjmpf ", file
);
1521 assemble_name (file
, XSTR (XEXP (DECL_RTL (function
), 0), 0));
1525 /* The purpose of this function is to override the default behavior of
1526 BSS objects. Normally, they go into .bss or .sbss via ".common"
1527 directives, but we need to override that and put them in
1528 .bss_below100. We can't just use a section override (like we do
1529 for .data_below100), because that makes them initialized rather
1530 than uninitialized. */
1533 xstormy16_asm_output_aligned_common (FILE *stream
,
1540 rtx mem
= decl
== NULL_TREE
? NULL_RTX
: DECL_RTL (decl
);
1545 && GET_CODE (symbol
= XEXP (mem
, 0)) == SYMBOL_REF
1546 && SYMBOL_REF_FLAGS (symbol
) & SYMBOL_FLAG_XSTORMY16_BELOW100
)
1551 switch_to_section (bss100_section
);
1559 name2
= default_strip_name_encoding (name
);
1561 fprintf (stream
, "\t.globl\t%s\n", name2
);
1563 fprintf (stream
, "\t.p2align %d\n", p2align
);
1564 fprintf (stream
, "\t.type\t%s, @object\n", name2
);
1565 fprintf (stream
, "\t.size\t%s, %d\n", name2
, size
);
1566 fprintf (stream
, "%s:\n\t.space\t%d\n", name2
, size
);
1572 fprintf (stream
, "\t.local\t");
1573 assemble_name (stream
, name
);
1574 fprintf (stream
, "\n");
1576 fprintf (stream
, "\t.comm\t");
1577 assemble_name (stream
, name
);
1578 fprintf (stream
, ",%u,%u\n", size
, align
/ BITS_PER_UNIT
);
1581 /* Implement TARGET_ASM_INIT_SECTIONS. */
1584 xstormy16_asm_init_sections (void)
1587 = get_unnamed_section (SECTION_WRITE
| SECTION_BSS
,
1588 output_section_asm_op
,
1589 "\t.section \".bss_below100\",\"aw\",@nobits");
1592 /* Mark symbols with the "below100" attribute so that we can use the
1593 special addressing modes for them. */
1596 xstormy16_encode_section_info (tree decl
, rtx r
, int first
)
1598 default_encode_section_info (decl
, r
, first
);
1600 if (TREE_CODE (decl
) == VAR_DECL
1601 && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl
))
1602 || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl
))))
1604 rtx symbol
= XEXP (r
, 0);
1606 gcc_assert (GET_CODE (symbol
) == SYMBOL_REF
);
1607 SYMBOL_REF_FLAGS (symbol
) |= SYMBOL_FLAG_XSTORMY16_BELOW100
;
1611 #undef TARGET_ASM_CONSTRUCTOR
1612 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1613 #undef TARGET_ASM_DESTRUCTOR
1614 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1616 /* Output constructors and destructors. Just like
1617 default_named_section_asm_out_* but don't set the sections writable. */
1620 xstormy16_asm_out_destructor (rtx symbol
, int priority
)
1622 const char *section
= ".dtors";
1625 /* ??? This only works reliably with the GNU linker. */
1626 if (priority
!= DEFAULT_INIT_PRIORITY
)
1628 sprintf (buf
, ".dtors.%.5u",
1629 /* Invert the numbering so the linker puts us in the proper
1630 order; constructors are run from right to left, and the
1631 linker sorts in increasing order. */
1632 MAX_INIT_PRIORITY
- priority
);
1636 switch_to_section (get_section (section
, 0, NULL
));
1637 assemble_align (POINTER_SIZE
);
1638 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
1642 xstormy16_asm_out_constructor (rtx symbol
, int priority
)
1644 const char *section
= ".ctors";
1647 /* ??? This only works reliably with the GNU linker. */
1648 if (priority
!= DEFAULT_INIT_PRIORITY
)
1650 sprintf (buf
, ".ctors.%.5u",
1651 /* Invert the numbering so the linker puts us in the proper
1652 order; constructors are run from right to left, and the
1653 linker sorts in increasing order. */
1654 MAX_INIT_PRIORITY
- priority
);
1658 switch_to_section (get_section (section
, 0, NULL
));
1659 assemble_align (POINTER_SIZE
);
1660 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
1663 /* Worker function for TARGET_PRINT_OPERAND_ADDRESS.
1665 Print a memory address as an operand to reference that memory location. */
1668 xstormy16_print_operand_address (FILE *file
, machine_mode
/*mode*/,
1671 HOST_WIDE_INT offset
;
1672 int pre_dec
, post_inc
;
1674 /* There are a few easy cases. */
1675 if (CONST_INT_P (address
))
1677 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (address
) & 0xFFFF);
1681 if (CONSTANT_P (address
) || LABEL_P (address
))
1683 output_addr_const (file
, address
);
1687 /* Otherwise, it's hopefully something of the form
1688 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...)). */
1689 if (GET_CODE (address
) == PLUS
)
1691 gcc_assert (CONST_INT_P (XEXP (address
, 1)));
1692 offset
= INTVAL (XEXP (address
, 1));
1693 address
= XEXP (address
, 0);
1698 pre_dec
= (GET_CODE (address
) == PRE_DEC
);
1699 post_inc
= (GET_CODE (address
) == POST_INC
);
1700 if (pre_dec
|| post_inc
)
1701 address
= XEXP (address
, 0);
1703 gcc_assert (REG_P (address
));
1708 fputs (reg_names
[REGNO (address
)], file
);
1712 fprintf (file
, "," HOST_WIDE_INT_PRINT_DEC
, offset
);
1716 /* Worker function for TARGET_PRINT_OPERAND.
1718 Print an operand to an assembler instruction. */
1721 xstormy16_print_operand (FILE *file
, rtx x
, int code
)
1726 /* There is either one bit set, or one bit clear, in X.
1727 Print it preceded by '#'. */
1729 static int bits_set
[8] = { 0, 1, 1, 2, 1, 2, 2, 3 };
1730 HOST_WIDE_INT xx
= 1;
1733 if (CONST_INT_P (x
))
1736 output_operand_lossage ("'B' operand is not constant");
1738 /* GCC sign-extends masks with the MSB set, so we have to
1739 detect all the cases that differ only in sign extension
1740 beyond the bits we care about. Normally, the predicates
1741 and constraints ensure that we have the right values. This
1742 works correctly for valid masks. */
1743 if (bits_set
[xx
& 7] <= 1)
1745 /* Remove sign extension bits. */
1746 if ((~xx
& ~(HOST_WIDE_INT
)0xff) == 0)
1748 else if ((~xx
& ~(HOST_WIDE_INT
)0xffff) == 0)
1750 l
= exact_log2 (xx
);
1754 /* Add sign extension bits. */
1755 if ((xx
& ~(HOST_WIDE_INT
)0xff) == 0)
1756 xx
|= ~(HOST_WIDE_INT
)0xff;
1757 else if ((xx
& ~(HOST_WIDE_INT
)0xffff) == 0)
1758 xx
|= ~(HOST_WIDE_INT
)0xffff;
1759 l
= exact_log2 (~xx
);
1763 output_operand_lossage ("'B' operand has multiple bits set");
1765 fprintf (file
, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC
, l
);
1770 /* Print the symbol without a surrounding @fptr(). */
1771 if (GET_CODE (x
) == SYMBOL_REF
)
1772 assemble_name (file
, XSTR (x
, 0));
1773 else if (LABEL_P (x
))
1774 output_asm_label (x
);
1776 xstormy16_print_operand_address (file
, VOIDmode
, x
);
1781 /* Print the immediate operand less one, preceded by '#'.
1782 For 'O', negate it first. */
1784 HOST_WIDE_INT xx
= 0;
1786 if (CONST_INT_P (x
))
1789 output_operand_lossage ("'o' operand is not constant");
1794 fprintf (file
, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC
, xx
- 1);
1799 /* Print the shift mask for bp/bn. */
1801 HOST_WIDE_INT xx
= 1;
1804 if (CONST_INT_P (x
))
1807 output_operand_lossage ("'B' operand is not constant");
1811 fputs (IMMEDIATE_PREFIX
, file
);
1812 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, l
);
1817 /* Handled below. */
1821 output_operand_lossage ("xstormy16_print_operand: unknown code");
1825 switch (GET_CODE (x
))
1828 fputs (reg_names
[REGNO (x
)], file
);
1832 xstormy16_print_operand_address (file
, GET_MODE (x
), XEXP (x
, 0));
1836 /* Some kind of constant or label; an immediate operand,
1837 so prefix it with '#' for the assembler. */
1838 fputs (IMMEDIATE_PREFIX
, file
);
1839 output_addr_const (file
, x
);
1846 /* Expander for the `casesi' pattern.
1847 INDEX is the index of the switch statement.
1848 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1849 to the first table entry.
1850 RANGE is the number of table entries.
1851 TABLE is an ADDR_VEC that is the jump table.
1852 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1853 range LOWER_BOUND to LOWER_BOUND + RANGE - 1. */
1856 xstormy16_expand_casesi (rtx index
, rtx lower_bound
, rtx range
,
1857 rtx table
, rtx default_label
)
1859 HOST_WIDE_INT range_i
= INTVAL (range
);
1862 /* This code uses 'br', so it can deal only with tables of size up to
1864 if (range_i
>= 8192)
1865 sorry ("switch statement of size %lu entries too large",
1866 (unsigned long) range_i
);
1868 index
= expand_binop (SImode
, sub_optab
, index
, lower_bound
, NULL_RTX
, 0,
1870 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, SImode
, 1,
1872 int_index
= gen_lowpart_common (HImode
, index
);
1873 emit_insn (gen_ashlhi3 (int_index
, int_index
, const2_rtx
));
1874 emit_jump_insn (gen_tablejump_pcrel (int_index
, table
));
1877 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1878 instructions, without label or alignment or any other special
1879 constructs. We know that the previous instruction will be the
1880 `tablejump_pcrel' output above.
1882 TODO: it might be nice to output 'br' instructions if they could
1886 xstormy16_output_addr_vec (FILE *file
, rtx label ATTRIBUTE_UNUSED
, rtx table
)
1890 switch_to_section (current_function_section ());
1892 vlen
= XVECLEN (table
, 0);
1893 for (idx
= 0; idx
< vlen
; idx
++)
1895 fputs ("\tjmpf ", file
);
1896 output_asm_label (XEXP (XVECEXP (table
, 0, idx
), 0));
1901 /* Expander for the `call' patterns.
1902 RETVAL is the RTL for the return register or NULL for void functions.
1903 DEST is the function to call, expressed as a MEM.
1904 COUNTER is ignored. */
1907 xstormy16_expand_call (rtx retval
, rtx dest
, rtx counter
)
1912 gcc_assert (MEM_P (dest
));
1913 dest
= XEXP (dest
, 0);
1915 if (! CONSTANT_P (dest
) && ! REG_P (dest
))
1916 dest
= force_reg (Pmode
, dest
);
1921 mode
= GET_MODE (retval
);
1923 call
= gen_rtx_CALL (mode
, gen_rtx_MEM (FUNCTION_MODE
, dest
),
1926 call
= gen_rtx_SET (retval
, call
);
1928 if (! CONSTANT_P (dest
))
1930 temp
= gen_reg_rtx (HImode
);
1931 emit_move_insn (temp
, const0_rtx
);
1936 call
= gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, call
,
1937 gen_rtx_USE (VOIDmode
, temp
)));
1938 emit_call_insn (call
);
1941 /* Expanders for multiword computational operations. */
1943 /* Expander for arithmetic operations; emit insns to compute
1945 (set DEST (CODE:MODE SRC0 SRC1))
1947 When CODE is COMPARE, a branch template is generated
1948 (this saves duplicating code in xstormy16_split_cbranch). */
1951 xstormy16_expand_arith (machine_mode mode
, enum rtx_code code
,
1952 rtx dest
, rtx src0
, rtx src1
)
1954 int num_words
= GET_MODE_BITSIZE (mode
) / BITS_PER_WORD
;
1959 emit_move_insn (src0
, const0_rtx
);
1961 for (i
= 0; i
< num_words
; i
++)
1963 rtx w_src0
, w_src1
, w_dest
;
1966 w_src0
= simplify_gen_subreg (word_mode
, src0
, mode
,
1967 i
* UNITS_PER_WORD
);
1968 w_src1
= simplify_gen_subreg (word_mode
, src1
, mode
, i
* UNITS_PER_WORD
);
1969 w_dest
= simplify_gen_subreg (word_mode
, dest
, mode
, i
* UNITS_PER_WORD
);
1975 && CONST_INT_P (w_src1
)
1976 && INTVAL (w_src1
) == 0)
1980 insn
= gen_addchi4 (w_dest
, w_src0
, w_src1
);
1982 insn
= gen_addchi5 (w_dest
, w_src0
, w_src1
);
1988 if (code
== COMPARE
&& i
== num_words
- 1)
1990 rtx branch
, sub
, clobber
, sub_1
;
1992 sub_1
= gen_rtx_MINUS (HImode
, w_src0
,
1993 gen_rtx_ZERO_EXTEND (HImode
, gen_rtx_REG (BImode
, CARRY_REGNUM
)));
1994 sub
= gen_rtx_SET (w_dest
,
1995 gen_rtx_MINUS (HImode
, sub_1
, w_src1
));
1996 clobber
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (BImode
, CARRY_REGNUM
));
1997 branch
= gen_rtx_SET (pc_rtx
,
1998 gen_rtx_IF_THEN_ELSE (VOIDmode
,
2004 insn
= gen_rtx_PARALLEL (VOIDmode
,
2005 gen_rtvec (3, branch
, sub
, clobber
));
2009 && CONST_INT_P (w_src1
)
2010 && INTVAL (w_src1
) == 0)
2013 insn
= gen_subchi4 (w_dest
, w_src0
, w_src1
);
2015 insn
= gen_subchi5 (w_dest
, w_src0
, w_src1
);
2021 if (CONST_INT_P (w_src1
)
2022 && INTVAL (w_src1
) == -(code
== AND
))
2025 insn
= gen_rtx_SET (w_dest
, gen_rtx_fmt_ee (code
, mode
,
2030 insn
= gen_rtx_SET (w_dest
, gen_rtx_NOT (mode
, w_src0
));
2041 /* If we emit nothing, try_split() will think we failed. So emit
2042 something that does nothing and can be optimized away. */
2047 /* The shift operations are split at output time for constant values;
2048 variable-width shifts get handed off to a library routine.
2050 Generate an output string to do (set X (CODE:MODE X SIZE_R))
2051 SIZE_R will be a CONST_INT, X will be a hard register. */
2054 xstormy16_output_shift (machine_mode mode
, enum rtx_code code
,
2055 rtx x
, rtx size_r
, rtx temp
)
2058 const char *r0
, *r1
, *rt
;
2061 gcc_assert (CONST_INT_P (size_r
)
2065 size
= INTVAL (size_r
) & (GET_MODE_BITSIZE (mode
) - 1);
2070 r0
= reg_names
[REGNO (x
)];
2071 r1
= reg_names
[REGNO (x
) + 1];
2073 /* For shifts of size 1, we can use the rotate instructions. */
2079 sprintf (r
, "shl %s,#1 | rlc %s,#1", r0
, r1
);
2082 sprintf (r
, "asr %s,#1 | rrc %s,#1", r1
, r0
);
2085 sprintf (r
, "shr %s,#1 | rrc %s,#1", r1
, r0
);
2093 /* For large shifts, there are easy special cases. */
2099 sprintf (r
, "mov %s,%s | mov %s,#0", r1
, r0
, r0
);
2102 sprintf (r
, "mov %s,%s | asr %s,#15", r0
, r1
, r1
);
2105 sprintf (r
, "mov %s,%s | mov %s,#0", r0
, r1
, r1
);
2117 sprintf (r
, "mov %s,%s | mov %s,#0 | shl %s,#%d",
2118 r1
, r0
, r0
, r1
, (int) size
- 16);
2121 sprintf (r
, "mov %s,%s | asr %s,#15 | asr %s,#%d",
2122 r0
, r1
, r1
, r0
, (int) size
- 16);
2125 sprintf (r
, "mov %s,%s | mov %s,#0 | shr %s,#%d",
2126 r0
, r1
, r1
, r0
, (int) size
- 16);
2134 /* For the rest, we have to do more work. In particular, we
2135 need a temporary. */
2136 rt
= reg_names
[REGNO (temp
)];
2141 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2142 rt
, r0
, r0
, (int) size
, r1
, (int) size
, rt
, (int) (16 - size
),
2147 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2148 rt
, r1
, r1
, (int) size
, r0
, (int) size
, rt
, (int) (16 - size
),
2153 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2154 rt
, r1
, r1
, (int) size
, r0
, (int) size
, rt
, (int) (16 - size
),
2163 /* Attribute handling. */
2165 /* Return nonzero if the function is an interrupt function. */
2168 xstormy16_interrupt_function_p (void)
2172 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2173 any functions are declared, which is demonstrably wrong, but
2174 it is worked around here. FIXME. */
2178 attributes
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
2179 return lookup_attribute ("interrupt", attributes
) != NULL_TREE
;
2182 #undef TARGET_ATTRIBUTE_TABLE
2183 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2185 static tree xstormy16_handle_interrupt_attribute
2186 (tree
*, tree
, tree
, int, bool *);
2187 static tree xstormy16_handle_below100_attribute
2188 (tree
*, tree
, tree
, int, bool *);
2190 static const struct attribute_spec xstormy16_attribute_table
[] =
2192 /* name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
2193 affects_type_identity, exclusions. */
2194 { "interrupt", 0, 0, false, true, true,
2195 xstormy16_handle_interrupt_attribute
, false, NULL
},
2196 { "BELOW100", 0, 0, false, false, false,
2197 xstormy16_handle_below100_attribute
, false, NULL
},
2198 { "below100", 0, 0, false, false, false,
2199 xstormy16_handle_below100_attribute
, false, NULL
},
2200 { NULL
, 0, 0, false, false, false, NULL
, false, NULL
}
2203 /* Handle an "interrupt" attribute;
2204 arguments as in struct attribute_spec.handler. */
2207 xstormy16_handle_interrupt_attribute (tree
*node
, tree name
,
2208 tree args ATTRIBUTE_UNUSED
,
2209 int flags ATTRIBUTE_UNUSED
,
2212 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
2214 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
2216 *no_add_attrs
= true;
2222 /* Handle an "below" attribute;
2223 arguments as in struct attribute_spec.handler. */
2226 xstormy16_handle_below100_attribute (tree
*node
,
2227 tree name ATTRIBUTE_UNUSED
,
2228 tree args ATTRIBUTE_UNUSED
,
2229 int flags ATTRIBUTE_UNUSED
,
2232 if (TREE_CODE (*node
) != VAR_DECL
2233 && TREE_CODE (*node
) != POINTER_TYPE
2234 && TREE_CODE (*node
) != TYPE_DECL
)
2236 warning (OPT_Wattributes
,
2237 "%<__BELOW100__%> attribute only applies to variables");
2238 *no_add_attrs
= true;
2240 else if (args
== NULL_TREE
&& TREE_CODE (*node
) == VAR_DECL
)
2242 if (! (TREE_PUBLIC (*node
) || TREE_STATIC (*node
)))
2244 warning (OPT_Wattributes
, "__BELOW100__ attribute not allowed "
2245 "with auto storage class");
2246 *no_add_attrs
= true;
2253 #undef TARGET_INIT_BUILTINS
2254 #define TARGET_INIT_BUILTINS xstormy16_init_builtins
2255 #undef TARGET_EXPAND_BUILTIN
2256 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2262 const char * arg_ops
; /* 0..9, t for temp register, r for return value. */
2263 const char * arg_types
; /* s=short,l=long, upper case for unsigned. */
2267 { "__sdivlh", CODE_FOR_sdivlh
, "rt01", "sls" },
2268 { "__smodlh", CODE_FOR_sdivlh
, "tr01", "sls" },
2269 { "__udivlh", CODE_FOR_udivlh
, "rt01", "SLS" },
2270 { "__umodlh", CODE_FOR_udivlh
, "tr01", "SLS" },
2271 { NULL
, 0, NULL
, NULL
}
2275 xstormy16_init_builtins (void)
2277 tree args
[2], ret_type
, arg
= NULL_TREE
, ftype
;
2280 ret_type
= void_type_node
;
2282 for (i
= 0; s16builtins
[i
].name
; i
++)
2284 n_args
= strlen (s16builtins
[i
].arg_types
) - 1;
2286 gcc_assert (n_args
<= (int) ARRAY_SIZE (args
));
2288 for (a
= n_args
- 1; a
>= 0; a
--)
2289 args
[a
] = NULL_TREE
;
2291 for (a
= n_args
; a
>= 0; a
--)
2293 switch (s16builtins
[i
].arg_types
[a
])
2295 case 's': arg
= short_integer_type_node
; break;
2296 case 'S': arg
= short_unsigned_type_node
; break;
2297 case 'l': arg
= long_integer_type_node
; break;
2298 case 'L': arg
= long_unsigned_type_node
; break;
2299 default: gcc_unreachable ();
2306 ftype
= build_function_type_list (ret_type
, args
[0], args
[1], NULL_TREE
);
2307 add_builtin_function (s16builtins
[i
].name
, ftype
,
2308 i
, BUILT_IN_MD
, NULL
, NULL_TREE
);
2313 xstormy16_expand_builtin (tree exp
, rtx target
,
2314 rtx subtarget ATTRIBUTE_UNUSED
,
2315 machine_mode mode ATTRIBUTE_UNUSED
,
2316 int ignore ATTRIBUTE_UNUSED
)
2318 rtx op
[10], args
[10], pat
, copyto
[10], retval
= 0;
2319 tree fndecl
, argtree
;
2322 fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
2323 argtree
= TREE_OPERAND (exp
, 1);
2324 i
= DECL_FUNCTION_CODE (fndecl
);
2325 code
= s16builtins
[i
].md_code
;
2327 for (a
= 0; a
< 10 && argtree
; a
++)
2329 args
[a
] = expand_normal (TREE_VALUE (argtree
));
2330 argtree
= TREE_CHAIN (argtree
);
2333 for (o
= 0; s16builtins
[i
].arg_ops
[o
]; o
++)
2335 char ao
= s16builtins
[i
].arg_ops
[o
];
2336 char c
= insn_data
[code
].operand
[o
].constraint
[0];
2341 omode
= (machine_mode
) insn_data
[code
].operand
[o
].mode
;
2343 op
[o
] = target
? target
: gen_reg_rtx (omode
);
2345 op
[o
] = gen_reg_rtx (omode
);
2347 op
[o
] = args
[(int) hex_value (ao
)];
2349 if (! (*insn_data
[code
].operand
[o
].predicate
) (op
[o
], GET_MODE (op
[o
])))
2351 if (c
== '+' || c
== '=')
2354 op
[o
] = gen_reg_rtx (omode
);
2357 op
[o
] = copy_to_mode_reg (omode
, op
[o
]);
2364 pat
= GEN_FCN (code
) (op
[0], op
[1], op
[2], op
[3], op
[4],
2365 op
[5], op
[6], op
[7], op
[8], op
[9]);
2368 for (o
= 0; s16builtins
[i
].arg_ops
[o
]; o
++)
2371 emit_move_insn (copyto
[o
], op
[o
]);
2372 if (op
[o
] == retval
)
2379 /* Look for combinations of insns that can be converted to BN or BP
2380 opcodes. This is, unfortunately, too complex to do with MD
2384 combine_bnp (rtx_insn
*insn
)
2386 int insn_code
, regno
, need_extend
;
2388 rtx cond
, reg
, qireg
, mem
;
2389 rtx_insn
*and_insn
, *load
;
2390 machine_mode load_mode
= QImode
;
2391 machine_mode and_mode
= QImode
;
2392 rtx_insn
*shift
= NULL
;
2394 insn_code
= recog_memoized (insn
);
2395 if (insn_code
!= CODE_FOR_cbranchhi
2396 && insn_code
!= CODE_FOR_cbranchhi_neg
)
2399 cond
= XVECEXP (PATTERN (insn
), 0, 0); /* set */
2400 cond
= XEXP (cond
, 1); /* if */
2401 cond
= XEXP (cond
, 0); /* cond */
2402 switch (GET_CODE (cond
))
2416 reg
= XEXP (cond
, 0);
2419 regno
= REGNO (reg
);
2420 if (XEXP (cond
, 1) != const0_rtx
)
2422 if (! find_regno_note (insn
, REG_DEAD
, regno
))
2424 qireg
= gen_rtx_REG (QImode
, regno
);
2428 /* LT and GE conditionals should have a sign extend before
2430 for (and_insn
= prev_real_insn (insn
);
2431 and_insn
!= NULL_RTX
;
2432 and_insn
= prev_real_insn (and_insn
))
2434 int and_code
= recog_memoized (and_insn
);
2436 if (and_code
== CODE_FOR_extendqihi2
2437 && rtx_equal_p (SET_DEST (PATTERN (and_insn
)), reg
)
2438 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn
)), 0), qireg
))
2441 if (and_code
== CODE_FOR_movhi_internal
2442 && rtx_equal_p (SET_DEST (PATTERN (and_insn
)), reg
))
2444 /* This is for testing bit 15. */
2449 if (reg_mentioned_p (reg
, and_insn
))
2452 if (! NOTE_P (and_insn
) && ! NONJUMP_INSN_P (and_insn
))
2458 /* EQ and NE conditionals have an AND before them. */
2459 for (and_insn
= prev_real_insn (insn
);
2460 and_insn
!= NULL_RTX
;
2461 and_insn
= prev_real_insn (and_insn
))
2463 if (recog_memoized (and_insn
) == CODE_FOR_andhi3
2464 && rtx_equal_p (SET_DEST (PATTERN (and_insn
)), reg
)
2465 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn
)), 0), reg
))
2468 if (reg_mentioned_p (reg
, and_insn
))
2471 if (! NOTE_P (and_insn
) && ! NONJUMP_INSN_P (and_insn
))
2477 /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
2478 followed by an AND like this:
2480 (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
2481 (clobber (reg:BI carry))]
2483 (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
2485 Attempt to detect this here. */
2486 for (shift
= prev_real_insn (and_insn
); shift
;
2487 shift
= prev_real_insn (shift
))
2489 if (recog_memoized (shift
) == CODE_FOR_lshrhi3
2490 && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift
), 0, 0)), reg
)
2491 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift
), 0, 0)), 0), reg
))
2494 if (reg_mentioned_p (reg
, shift
)
2495 || (! NOTE_P (shift
) && ! NONJUMP_INSN_P (shift
)))
2504 if (and_insn
== NULL_RTX
)
2507 for (load
= shift
? prev_real_insn (shift
) : prev_real_insn (and_insn
);
2509 load
= prev_real_insn (load
))
2511 int load_code
= recog_memoized (load
);
2513 if (load_code
== CODE_FOR_movhi_internal
2514 && rtx_equal_p (SET_DEST (PATTERN (load
)), reg
)
2515 && xstormy16_below100_operand (SET_SRC (PATTERN (load
)), HImode
)
2516 && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load
))))
2522 if (load_code
== CODE_FOR_movqi_internal
2523 && rtx_equal_p (SET_DEST (PATTERN (load
)), qireg
)
2524 && xstormy16_below100_operand (SET_SRC (PATTERN (load
)), QImode
))
2530 if (load_code
== CODE_FOR_zero_extendqihi2
2531 && rtx_equal_p (SET_DEST (PATTERN (load
)), reg
)
2532 && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load
)), 0), QImode
))
2539 if (reg_mentioned_p (reg
, load
))
2542 if (! NOTE_P (load
) && ! NONJUMP_INSN_P (load
))
2548 mem
= SET_SRC (PATTERN (load
));
2552 mask
= (load_mode
== HImode
) ? 0x8000 : 0x80;
2554 /* If the mem includes a zero-extend operation and we are
2555 going to generate a sign-extend operation then move the
2556 mem inside the zero-extend. */
2557 if (GET_CODE (mem
) == ZERO_EXTEND
)
2558 mem
= XEXP (mem
, 0);
2562 if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and_insn
)), 1),
2566 mask
= (int) INTVAL (XEXP (SET_SRC (PATTERN (and_insn
)), 1));
2569 mask
<<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift
), 0, 0)), 1));
2572 if (load_mode
== HImode
)
2574 rtx addr
= XEXP (mem
, 0);
2576 if (! (mask
& 0xff))
2578 addr
= plus_constant (Pmode
, addr
, 1);
2581 mem
= gen_rtx_MEM (QImode
, addr
);
2585 XEXP (cond
, 0) = gen_rtx_SIGN_EXTEND (HImode
, mem
);
2587 XEXP (cond
, 0) = gen_rtx_AND (and_mode
, mem
, GEN_INT (mask
));
2589 INSN_CODE (insn
) = -1;
2592 if (and_insn
!= insn
)
2593 delete_insn (and_insn
);
2595 if (shift
!= NULL_RTX
)
2596 delete_insn (shift
);
2600 xstormy16_reorg (void)
2604 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
2606 if (! JUMP_P (insn
))
2612 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2615 xstormy16_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
2617 const HOST_WIDE_INT size
= int_size_in_bytes (type
);
2618 return (size
== -1 || size
> UNITS_PER_WORD
* NUM_ARGUMENT_REGISTERS
);
2621 /* Implement TARGET_HARD_REGNO_MODE_OK. */
2624 xstormy16_hard_regno_mode_ok (unsigned int regno
, machine_mode mode
)
2626 return regno
!= 16 || mode
== BImode
;
2629 /* Implement TARGET_MODES_TIEABLE_P. */
2632 xstormy16_modes_tieable_p (machine_mode mode1
, machine_mode mode2
)
2634 return mode1
!= BImode
&& mode2
!= BImode
;
2637 #undef TARGET_ASM_ALIGNED_HI_OP
2638 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2639 #undef TARGET_ASM_ALIGNED_SI_OP
2640 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2641 #undef TARGET_ENCODE_SECTION_INFO
2642 #define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
2644 /* Select_section doesn't handle .bss_below100. */
2645 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
2646 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
2648 #undef TARGET_ASM_OUTPUT_MI_THUNK
2649 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2650 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2651 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2653 #undef TARGET_PRINT_OPERAND
2654 #define TARGET_PRINT_OPERAND xstormy16_print_operand
2655 #undef TARGET_PRINT_OPERAND_ADDRESS
2656 #define TARGET_PRINT_OPERAND_ADDRESS xstormy16_print_operand_address
2658 #undef TARGET_MEMORY_MOVE_COST
2659 #define TARGET_MEMORY_MOVE_COST xstormy16_memory_move_cost
2660 #undef TARGET_RTX_COSTS
2661 #define TARGET_RTX_COSTS xstormy16_rtx_costs
2662 #undef TARGET_ADDRESS_COST
2663 #define TARGET_ADDRESS_COST xstormy16_address_cost
2665 #undef TARGET_BUILD_BUILTIN_VA_LIST
2666 #define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
2667 #undef TARGET_EXPAND_BUILTIN_VA_START
2668 #define TARGET_EXPAND_BUILTIN_VA_START xstormy16_expand_builtin_va_start
2669 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
2670 #define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_gimplify_va_arg_expr
2672 #undef TARGET_PROMOTE_FUNCTION_MODE
2673 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
2674 #undef TARGET_PROMOTE_PROTOTYPES
2675 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
2677 #undef TARGET_FUNCTION_ARG
2678 #define TARGET_FUNCTION_ARG xstormy16_function_arg
2679 #undef TARGET_FUNCTION_ARG_ADVANCE
2680 #define TARGET_FUNCTION_ARG_ADVANCE xstormy16_function_arg_advance
2682 #undef TARGET_RETURN_IN_MEMORY
2683 #define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
2684 #undef TARGET_FUNCTION_VALUE
2685 #define TARGET_FUNCTION_VALUE xstormy16_function_value
2686 #undef TARGET_LIBCALL_VALUE
2687 #define TARGET_LIBCALL_VALUE xstormy16_libcall_value
2688 #undef TARGET_FUNCTION_VALUE_REGNO_P
2689 #define TARGET_FUNCTION_VALUE_REGNO_P xstormy16_function_value_regno_p
2691 #undef TARGET_MACHINE_DEPENDENT_REORG
2692 #define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg
2694 #undef TARGET_PREFERRED_RELOAD_CLASS
2695 #define TARGET_PREFERRED_RELOAD_CLASS xstormy16_preferred_reload_class
2696 #undef TARGET_PREFERRED_OUTPUT_RELOAD_CLASS
2697 #define TARGET_PREFERRED_OUTPUT_RELOAD_CLASS xstormy16_preferred_reload_class
2700 #define TARGET_LRA_P hook_bool_void_false
2702 #undef TARGET_LEGITIMATE_ADDRESS_P
2703 #define TARGET_LEGITIMATE_ADDRESS_P xstormy16_legitimate_address_p
2704 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
2705 #define TARGET_MODE_DEPENDENT_ADDRESS_P xstormy16_mode_dependent_address_p
2707 #undef TARGET_CAN_ELIMINATE
2708 #define TARGET_CAN_ELIMINATE xstormy16_can_eliminate
2710 #undef TARGET_TRAMPOLINE_INIT
2711 #define TARGET_TRAMPOLINE_INIT xstormy16_trampoline_init
2713 #undef TARGET_HARD_REGNO_MODE_OK
2714 #define TARGET_HARD_REGNO_MODE_OK xstormy16_hard_regno_mode_ok
2715 #undef TARGET_MODES_TIEABLE_P
2716 #define TARGET_MODES_TIEABLE_P xstormy16_modes_tieable_p
2718 #undef TARGET_CONSTANT_ALIGNMENT
2719 #define TARGET_CONSTANT_ALIGNMENT constant_alignment_word_strings
2721 struct gcc_target targetm
= TARGET_INITIALIZER
;
2723 #include "gt-stormy16.h"