1 /* Xstormy16 target functions.
2 Copyright (C) 1997-2014 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-flags.h"
32 #include "insn-attr.h"
35 #include "diagnostic-core.h"
38 #include "stringpool.h"
39 #include "stor-layout.h"
43 #include "insn-codes.h"
53 #include "target-def.h"
55 #include "langhooks.h"
56 #include "hash-table.h"
59 #include "dominance.h"
65 #include "cfgcleanup.h"
66 #include "basic-block.h"
67 #include "tree-ssa-alias.h"
68 #include "internal-fn.h"
69 #include "gimple-fold.h"
71 #include "gimple-expr.h"
79 static rtx
emit_addhi3_postreload (rtx
, rtx
, rtx
);
80 static void xstormy16_asm_out_constructor (rtx
, int);
81 static void xstormy16_asm_out_destructor (rtx
, int);
82 static void xstormy16_asm_output_mi_thunk (FILE *, tree
, HOST_WIDE_INT
,
85 static void xstormy16_init_builtins (void);
86 static rtx
xstormy16_expand_builtin (tree
, rtx
, rtx
, machine_mode
, int);
87 static bool xstormy16_rtx_costs (rtx
, int, int, int, int *, bool);
88 static int xstormy16_address_cost (rtx
, machine_mode
, addr_space_t
, bool);
89 static bool xstormy16_return_in_memory (const_tree
, const_tree
);
91 static GTY(()) section
*bss100_section
;
93 /* Compute a (partial) cost for rtx X. Return true if the complete
94 cost has been computed, and false if subexpressions should be
95 scanned. In either case, *TOTAL contains the cost result. */
98 xstormy16_rtx_costs (rtx x
, int code
, int outer_code ATTRIBUTE_UNUSED
,
99 int opno ATTRIBUTE_UNUSED
, int *total
,
100 bool speed ATTRIBUTE_UNUSED
)
105 if (INTVAL (x
) < 16 && INTVAL (x
) >= 0)
106 *total
= COSTS_N_INSNS (1) / 2;
107 else if (INTVAL (x
) < 256 && INTVAL (x
) >= 0)
108 *total
= COSTS_N_INSNS (1);
110 *total
= COSTS_N_INSNS (2);
117 *total
= COSTS_N_INSNS (2);
121 *total
= COSTS_N_INSNS (35 + 6);
124 *total
= COSTS_N_INSNS (51 - 6);
133 xstormy16_address_cost (rtx x
, machine_mode mode ATTRIBUTE_UNUSED
,
134 addr_space_t as ATTRIBUTE_UNUSED
,
135 bool speed ATTRIBUTE_UNUSED
)
137 return (CONST_INT_P (x
) ? 2
138 : GET_CODE (x
) == PLUS
? 7
142 /* Worker function for TARGET_MEMORY_MOVE_COST. */
145 xstormy16_memory_move_cost (machine_mode mode
, reg_class_t rclass
,
148 return (5 + memory_move_secondary_cost (mode
, rclass
, in
));
151 /* Branches are handled as follows:
153 1. HImode compare-and-branches. The machine supports these
154 natively, so the appropriate pattern is emitted directly.
156 2. SImode EQ and NE. These are emitted as pairs of HImode
157 compare-and-branches.
159 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
160 of a SImode subtract followed by a branch (not a compare-and-branch),
166 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
173 /* Emit a branch of kind CODE to location LOC. */
176 xstormy16_emit_cbranch (enum rtx_code code
, rtx op0
, rtx op1
, rtx loc
)
178 rtx condition_rtx
, loc_ref
, branch
, cy_clobber
;
182 mode
= GET_MODE (op0
);
183 gcc_assert (mode
== HImode
|| mode
== SImode
);
186 && (code
== GT
|| code
== LE
|| code
== GTU
|| code
== LEU
))
188 int unsigned_p
= (code
== GTU
|| code
== LEU
);
189 int gt_p
= (code
== GT
|| code
== GTU
);
193 lab
= gen_label_rtx ();
194 xstormy16_emit_cbranch (unsigned_p
? LTU
: LT
, op0
, op1
, gt_p
? lab
: loc
);
195 /* This should be generated as a comparison against the temporary
196 created by the previous insn, but reload can't handle that. */
197 xstormy16_emit_cbranch (gt_p
? NE
: EQ
, op0
, op1
, loc
);
202 else if (mode
== SImode
203 && (code
== NE
|| code
== EQ
)
204 && op1
!= const0_rtx
)
206 rtx op0_word
, op1_word
;
208 int num_words
= GET_MODE_BITSIZE (mode
) / BITS_PER_WORD
;
212 lab
= gen_label_rtx ();
214 for (i
= 0; i
< num_words
- 1; i
++)
216 op0_word
= simplify_gen_subreg (word_mode
, op0
, mode
,
218 op1_word
= simplify_gen_subreg (word_mode
, op1
, mode
,
220 xstormy16_emit_cbranch (NE
, op0_word
, op1_word
, code
== EQ
? lab
: loc
);
222 op0_word
= simplify_gen_subreg (word_mode
, op0
, mode
,
224 op1_word
= simplify_gen_subreg (word_mode
, op1
, mode
,
226 xstormy16_emit_cbranch (code
, op0_word
, op1_word
, loc
);
233 /* We can't allow reload to try to generate any reload after a branch,
234 so when some register must match we must make the temporary ourselves. */
238 tmp
= gen_reg_rtx (mode
);
239 emit_move_insn (tmp
, op0
);
243 condition_rtx
= gen_rtx_fmt_ee (code
, mode
, op0
, op1
);
244 loc_ref
= gen_rtx_LABEL_REF (VOIDmode
, loc
);
245 branch
= gen_rtx_SET (VOIDmode
, pc_rtx
,
246 gen_rtx_IF_THEN_ELSE (VOIDmode
, condition_rtx
,
249 cy_clobber
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (BImode
, CARRY_REGNUM
));
252 vec
= gen_rtvec (2, branch
, cy_clobber
);
253 else if (code
== NE
|| code
== EQ
)
254 vec
= gen_rtvec (2, branch
, gen_rtx_CLOBBER (VOIDmode
, op0
));
259 sub
= gen_rtx_SET (VOIDmode
, op0
, gen_rtx_MINUS (SImode
, op0
, op1
));
261 sub
= gen_rtx_CLOBBER (SImode
, op0
);
263 vec
= gen_rtvec (3, branch
, sub
, cy_clobber
);
266 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, vec
));
269 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
270 the arithmetic operation. Most of the work is done by
271 xstormy16_expand_arith. */
274 xstormy16_split_cbranch (machine_mode mode
, rtx label
, rtx comparison
,
277 rtx op0
= XEXP (comparison
, 0);
278 rtx op1
= XEXP (comparison
, 1);
279 rtx_insn
*seq
, *last_insn
;
283 xstormy16_expand_arith (mode
, COMPARE
, dest
, op0
, op1
);
287 gcc_assert (INSN_P (seq
));
290 while (NEXT_INSN (last_insn
) != NULL_RTX
)
291 last_insn
= NEXT_INSN (last_insn
);
293 compare
= SET_SRC (XVECEXP (PATTERN (last_insn
), 0, 0));
294 PUT_CODE (XEXP (compare
, 0), GET_CODE (comparison
));
295 XEXP (compare
, 1) = gen_rtx_LABEL_REF (VOIDmode
, label
);
300 /* Return the string to output a conditional branch to LABEL, which is
301 the operand number of the label.
303 OP is the conditional expression, or NULL for branch-always.
305 REVERSED is nonzero if we should reverse the sense of the comparison.
310 xstormy16_output_cbranch_hi (rtx op
, const char *label
, int reversed
,
313 static char string
[64];
314 int need_longbranch
= (op
!= NULL_RTX
315 ? get_attr_length (insn
) == 8
316 : get_attr_length (insn
) == 4);
317 int really_reversed
= reversed
^ need_longbranch
;
320 const char *operands
;
329 sprintf (string
, "%s %s", ccode
, label
);
333 code
= GET_CODE (op
);
335 if (! REG_P (XEXP (op
, 0)))
337 code
= swap_condition (code
);
343 /* Work out which way this really branches. */
345 code
= reverse_condition (code
);
349 case EQ
: ccode
= "z"; break;
350 case NE
: ccode
= "nz"; break;
351 case GE
: ccode
= "ge"; break;
352 case LT
: ccode
= "lt"; break;
353 case GT
: ccode
= "gt"; break;
354 case LE
: ccode
= "le"; break;
355 case GEU
: ccode
= "nc"; break;
356 case LTU
: ccode
= "c"; break;
357 case GTU
: ccode
= "hi"; break;
358 case LEU
: ccode
= "ls"; break;
365 templ
= "b%s %s,.+8 | jmpf %s";
368 sprintf (string
, templ
, ccode
, operands
, label
);
373 /* Return the string to output a conditional branch to LABEL, which is
374 the operand number of the label, but suitable for the tail of a
377 OP is the conditional expression (OP is never NULL_RTX).
379 REVERSED is nonzero if we should reverse the sense of the comparison.
384 xstormy16_output_cbranch_si (rtx op
, const char *label
, int reversed
,
387 static char string
[64];
388 int need_longbranch
= get_attr_length (insn
) >= 8;
389 int really_reversed
= reversed
^ need_longbranch
;
395 code
= GET_CODE (op
);
397 /* Work out which way this really branches. */
399 code
= reverse_condition (code
);
403 case EQ
: ccode
= "z"; break;
404 case NE
: ccode
= "nz"; break;
405 case GE
: ccode
= "ge"; break;
406 case LT
: ccode
= "lt"; break;
407 case GEU
: ccode
= "nc"; break;
408 case LTU
: ccode
= "c"; break;
410 /* The missing codes above should never be generated. */
421 gcc_assert (REG_P (XEXP (op
, 0)));
423 regnum
= REGNO (XEXP (op
, 0));
424 sprintf (prevop
, "or %s,%s", reg_names
[regnum
], reg_names
[regnum
+1]);
428 case GE
: case LT
: case GEU
: case LTU
:
429 strcpy (prevop
, "sbc %2,%3");
437 templ
= "%s | b%s .+6 | jmpf %s";
439 templ
= "%s | b%s %s";
440 sprintf (string
, templ
, prevop
, ccode
, label
);
445 /* Many machines have some registers that cannot be copied directly to or from
446 memory or even from other types of registers. An example is the `MQ'
447 register, which on most machines, can only be copied to or from general
448 registers, but not memory. Some machines allow copying all registers to and
449 from memory, but require a scratch register for stores to some memory
450 locations (e.g., those with symbolic address on the RT, and those with
451 certain symbolic address on the SPARC when compiling PIC). In some cases,
452 both an intermediate and a scratch register are required.
454 You should define these macros to indicate to the reload phase that it may
455 need to allocate at least one register for a reload in addition to the
456 register to contain the data. Specifically, if copying X to a register
457 RCLASS in MODE requires an intermediate register, you should define
458 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
459 whose registers can be used as intermediate registers or scratch registers.
461 If copying a register RCLASS in MODE to X requires an intermediate or scratch
462 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
463 largest register class required. If the requirements for input and output
464 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
465 instead of defining both macros identically.
467 The values returned by these macros are often `GENERAL_REGS'. Return
468 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
469 to or from a register of RCLASS in MODE without requiring a scratch register.
470 Do not define this macro if it would always return `NO_REGS'.
472 If a scratch register is required (either with or without an intermediate
473 register), you should define patterns for `reload_inM' or `reload_outM', as
474 required.. These patterns, which will normally be implemented with a
475 `define_expand', should be similar to the `movM' patterns, except that
476 operand 2 is the scratch register.
478 Define constraints for the reload register and scratch register that contain
479 a single register class. If the original reload register (whose class is
480 RCLASS) can meet the constraint given in the pattern, the value returned by
481 these macros is used for the class of the scratch register. Otherwise, two
482 additional reload registers are required. Their classes are obtained from
483 the constraints in the insn pattern.
485 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
486 either be in a hard register or in memory. Use `true_regnum' to find out;
487 it will return -1 if the pseudo is in memory and the hard register number if
490 These macros should not be used in the case where a particular class of
491 registers can only be copied to memory and not to another class of
492 registers. In that case, secondary reload registers are not needed and
493 would not be helpful. Instead, a stack location must be used to perform the
494 copy and the `movM' pattern should use memory as an intermediate storage.
495 This case often occurs between floating-point and general registers. */
498 xstormy16_secondary_reload_class (enum reg_class rclass
,
499 machine_mode mode ATTRIBUTE_UNUSED
,
502 /* This chip has the interesting property that only the first eight
503 registers can be moved to/from memory. */
505 || ((GET_CODE (x
) == SUBREG
|| REG_P (x
))
506 && (true_regnum (x
) == -1
507 || true_regnum (x
) >= FIRST_PSEUDO_REGISTER
)))
508 && ! reg_class_subset_p (rclass
, EIGHT_REGS
))
514 /* Worker function for TARGET_PREFERRED_RELOAD_CLASS
515 and TARGET_PREFERRED_OUTPUT_RELOAD_CLASS. */
518 xstormy16_preferred_reload_class (rtx x
, reg_class_t rclass
)
520 if (rclass
== GENERAL_REGS
&& MEM_P (x
))
526 /* Predicate for symbols and addresses that reflect special 8-bit
530 xstormy16_below100_symbol (rtx x
,
531 machine_mode mode ATTRIBUTE_UNUSED
)
533 if (GET_CODE (x
) == CONST
)
535 if (GET_CODE (x
) == PLUS
&& CONST_INT_P (XEXP (x
, 1)))
538 if (GET_CODE (x
) == SYMBOL_REF
)
539 return (SYMBOL_REF_FLAGS (x
) & SYMBOL_FLAG_XSTORMY16_BELOW100
) != 0;
543 HOST_WIDE_INT i
= INTVAL (x
);
545 if ((i
>= 0x0000 && i
<= 0x00ff)
546 || (i
>= 0x7f00 && i
<= 0x7fff))
552 /* Likewise, but only for non-volatile MEMs, for patterns where the
553 MEM will get split into smaller sized accesses. */
556 xstormy16_splittable_below100_operand (rtx x
, machine_mode mode
)
558 if (MEM_P (x
) && MEM_VOLATILE_P (x
))
560 return xstormy16_below100_operand (x
, mode
);
563 /* Expand an 8-bit IOR. This either detects the one case we can
564 actually do, or uses a 16-bit IOR. */
567 xstormy16_expand_iorqi3 (rtx
*operands
)
569 rtx in
, out
, outsub
, val
;
575 if (xstormy16_onebit_set_operand (val
, QImode
))
577 if (!xstormy16_below100_or_register (in
, QImode
))
578 in
= copy_to_mode_reg (QImode
, in
);
579 if (!xstormy16_below100_or_register (out
, QImode
))
580 out
= gen_reg_rtx (QImode
);
581 emit_insn (gen_iorqi3_internal (out
, in
, val
));
582 if (out
!= operands
[0])
583 emit_move_insn (operands
[0], out
);
588 in
= copy_to_mode_reg (QImode
, in
);
590 if (! REG_P (val
) && ! CONST_INT_P (val
))
591 val
= copy_to_mode_reg (QImode
, val
);
594 out
= gen_reg_rtx (QImode
);
596 in
= simplify_gen_subreg (HImode
, in
, QImode
, 0);
597 outsub
= simplify_gen_subreg (HImode
, out
, QImode
, 0);
599 if (! CONST_INT_P (val
))
600 val
= simplify_gen_subreg (HImode
, val
, QImode
, 0);
602 emit_insn (gen_iorhi3 (outsub
, in
, val
));
604 if (out
!= operands
[0])
605 emit_move_insn (operands
[0], out
);
608 /* Expand an 8-bit AND. This either detects the one case we can
609 actually do, or uses a 16-bit AND. */
612 xstormy16_expand_andqi3 (rtx
*operands
)
614 rtx in
, out
, outsub
, val
;
620 if (xstormy16_onebit_clr_operand (val
, QImode
))
622 if (!xstormy16_below100_or_register (in
, QImode
))
623 in
= copy_to_mode_reg (QImode
, in
);
624 if (!xstormy16_below100_or_register (out
, QImode
))
625 out
= gen_reg_rtx (QImode
);
626 emit_insn (gen_andqi3_internal (out
, in
, val
));
627 if (out
!= operands
[0])
628 emit_move_insn (operands
[0], out
);
633 in
= copy_to_mode_reg (QImode
, in
);
635 if (! REG_P (val
) && ! CONST_INT_P (val
))
636 val
= copy_to_mode_reg (QImode
, val
);
639 out
= gen_reg_rtx (QImode
);
641 in
= simplify_gen_subreg (HImode
, in
, QImode
, 0);
642 outsub
= simplify_gen_subreg (HImode
, out
, QImode
, 0);
644 if (! CONST_INT_P (val
))
645 val
= simplify_gen_subreg (HImode
, val
, QImode
, 0);
647 emit_insn (gen_andhi3 (outsub
, in
, val
));
649 if (out
!= operands
[0])
650 emit_move_insn (operands
[0], out
);
653 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
655 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
657 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
659 && INTVAL (X) + (OFFSET) >= 0 \
660 && INTVAL (X) + (OFFSET) < 0x8000 \
661 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
664 xstormy16_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED
,
667 if (LEGITIMATE_ADDRESS_CONST_INT_P (x
, 0))
670 if (GET_CODE (x
) == PLUS
671 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x
, 1), 0))
674 /* PR 31232: Do not allow INT+INT as an address. */
679 if ((GET_CODE (x
) == PRE_MODIFY
&& CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
680 || GET_CODE (x
) == POST_INC
681 || GET_CODE (x
) == PRE_DEC
)
685 && REGNO_OK_FOR_BASE_P (REGNO (x
))
686 && (! strict
|| REGNO (x
) < FIRST_PSEUDO_REGISTER
))
689 if (xstormy16_below100_symbol (x
, mode
))
695 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P.
697 On this chip, this is true if the address is valid with an offset
698 of 0 but not of 6, because in that case it cannot be used as an
699 address for DImode or DFmode, or if the address is a post-increment
700 or pre-decrement address. */
703 xstormy16_mode_dependent_address_p (const_rtx x
,
704 addr_space_t as ATTRIBUTE_UNUSED
)
706 if (LEGITIMATE_ADDRESS_CONST_INT_P (x
, 0)
707 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x
, 6))
710 if (GET_CODE (x
) == PLUS
711 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x
, 1), 0)
712 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x
, 1), 6))
715 /* Auto-increment addresses are now treated generically in recog.c. */
720 short_memory_operand (rtx x
, machine_mode mode
)
722 if (! memory_operand (x
, mode
))
724 return (GET_CODE (XEXP (x
, 0)) != PLUS
);
727 /* Splitter for the 'move' patterns, for modes not directly implemented
728 by hardware. Emit insns to copy a value of mode MODE from SRC to
731 This function is only called when reload_completed. */
734 xstormy16_split_move (machine_mode mode
, rtx dest
, rtx src
)
736 int num_words
= GET_MODE_BITSIZE (mode
) / BITS_PER_WORD
;
737 int direction
, end
, i
;
738 int src_modifies
= 0;
739 int dest_modifies
= 0;
740 int src_volatile
= 0;
741 int dest_volatile
= 0;
743 rtx auto_inc_reg_rtx
= NULL_RTX
;
745 /* Check initial conditions. */
746 gcc_assert (reload_completed
747 && mode
!= QImode
&& mode
!= HImode
748 && nonimmediate_operand (dest
, mode
)
749 && general_operand (src
, mode
));
751 /* This case is not supported below, and shouldn't be generated. */
752 gcc_assert (! MEM_P (dest
) || ! MEM_P (src
));
754 /* This case is very very bad after reload, so trap it now. */
755 gcc_assert (GET_CODE (dest
) != SUBREG
&& GET_CODE (src
) != SUBREG
);
757 /* The general idea is to copy by words, offsetting the source and
758 destination. Normally the least-significant word will be copied
759 first, but for pre-dec operations it's better to copy the
760 most-significant word first. Only one operand can be a pre-dec
763 It's also possible that the copy overlaps so that the direction
769 mem_operand
= XEXP (dest
, 0);
770 dest_modifies
= side_effects_p (mem_operand
);
771 if (auto_inc_p (mem_operand
))
772 auto_inc_reg_rtx
= XEXP (mem_operand
, 0);
773 dest_volatile
= MEM_VOLATILE_P (dest
);
776 dest
= copy_rtx (dest
);
777 MEM_VOLATILE_P (dest
) = 0;
780 else if (MEM_P (src
))
782 mem_operand
= XEXP (src
, 0);
783 src_modifies
= side_effects_p (mem_operand
);
784 if (auto_inc_p (mem_operand
))
785 auto_inc_reg_rtx
= XEXP (mem_operand
, 0);
786 src_volatile
= MEM_VOLATILE_P (src
);
789 src
= copy_rtx (src
);
790 MEM_VOLATILE_P (src
) = 0;
794 mem_operand
= NULL_RTX
;
796 if (mem_operand
== NULL_RTX
)
800 && reg_overlap_mentioned_p (dest
, src
)
801 && REGNO (dest
) > REGNO (src
))
804 else if (GET_CODE (mem_operand
) == PRE_DEC
805 || (GET_CODE (mem_operand
) == PLUS
806 && GET_CODE (XEXP (mem_operand
, 0)) == PRE_DEC
))
808 else if (MEM_P (src
) && reg_overlap_mentioned_p (dest
, src
))
812 gcc_assert (REG_P (dest
));
813 regno
= REGNO (dest
);
815 gcc_assert (refers_to_regno_p (regno
, regno
+ num_words
,
818 if (refers_to_regno_p (regno
, regno
+ 1, mem_operand
, 0))
820 else if (refers_to_regno_p (regno
+ num_words
- 1, regno
+ num_words
,
824 /* This means something like
825 (set (reg:DI r0) (mem:DI (reg:HI r1)))
826 which we'd need to support by doing the set of the second word
831 end
= direction
< 0 ? -1 : num_words
;
832 for (i
= direction
< 0 ? num_words
- 1 : 0; i
!= end
; i
+= direction
)
834 rtx w_src
, w_dest
, insn
;
837 w_src
= gen_rtx_MEM (word_mode
, mem_operand
);
839 w_src
= simplify_gen_subreg (word_mode
, src
, mode
, i
* UNITS_PER_WORD
);
841 MEM_VOLATILE_P (w_src
) = 1;
843 w_dest
= gen_rtx_MEM (word_mode
, mem_operand
);
845 w_dest
= simplify_gen_subreg (word_mode
, dest
, mode
,
848 MEM_VOLATILE_P (w_dest
) = 1;
850 /* The simplify_subreg calls must always be able to simplify. */
851 gcc_assert (GET_CODE (w_src
) != SUBREG
852 && GET_CODE (w_dest
) != SUBREG
);
854 insn
= emit_insn (gen_rtx_SET (VOIDmode
, w_dest
, w_src
));
855 if (auto_inc_reg_rtx
)
856 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_INC
,
862 /* Expander for the 'move' patterns. Emit insns to copy a value of
863 mode MODE from SRC to DEST. */
866 xstormy16_expand_move (machine_mode mode
, rtx dest
, rtx src
)
868 if (MEM_P (dest
) && (GET_CODE (XEXP (dest
, 0)) == PRE_MODIFY
))
870 rtx pmv
= XEXP (dest
, 0);
871 rtx dest_reg
= XEXP (pmv
, 0);
872 rtx dest_mod
= XEXP (pmv
, 1);
873 rtx set
= gen_rtx_SET (Pmode
, dest_reg
, dest_mod
);
874 rtx clobber
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (BImode
, CARRY_REGNUM
));
876 dest
= gen_rtx_MEM (mode
, dest_reg
);
877 emit_insn (gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, set
, clobber
)));
879 else if (MEM_P (src
) && (GET_CODE (XEXP (src
, 0)) == PRE_MODIFY
))
881 rtx pmv
= XEXP (src
, 0);
882 rtx src_reg
= XEXP (pmv
, 0);
883 rtx src_mod
= XEXP (pmv
, 1);
884 rtx set
= gen_rtx_SET (Pmode
, src_reg
, src_mod
);
885 rtx clobber
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (BImode
, CARRY_REGNUM
));
887 src
= gen_rtx_MEM (mode
, src_reg
);
888 emit_insn (gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, set
, clobber
)));
891 /* There are only limited immediate-to-memory move instructions. */
892 if (! reload_in_progress
893 && ! reload_completed
895 && (! CONST_INT_P (XEXP (dest
, 0))
896 || ! xstormy16_legitimate_address_p (mode
, XEXP (dest
, 0), 0))
897 && ! xstormy16_below100_operand (dest
, mode
)
899 && GET_CODE (src
) != SUBREG
)
900 src
= copy_to_mode_reg (mode
, src
);
902 /* Don't emit something we would immediately split. */
904 && mode
!= HImode
&& mode
!= QImode
)
906 xstormy16_split_move (mode
, dest
, src
);
910 emit_insn (gen_rtx_SET (VOIDmode
, dest
, src
));
915 The stack is laid out as follows:
919 Register save area (up to 4 words)
920 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
922 AP-> Return address (two words)
923 9th procedure parameter word
924 10th procedure parameter word
926 last procedure parameter word
928 The frame pointer location is tuned to make it most likely that all
929 parameters and local variables can be accessed using a load-indexed
932 /* A structure to describe the layout. */
933 struct xstormy16_stack_layout
935 /* Size of the topmost three items on the stack. */
937 int register_save_size
;
938 int stdarg_save_size
;
939 /* Sum of the above items. */
941 /* Various offsets. */
942 int first_local_minus_ap
;
947 /* Does REGNO need to be saved? */
948 #define REG_NEEDS_SAVE(REGNUM, IFUN) \
949 ((df_regs_ever_live_p (REGNUM) && ! call_used_regs[REGNUM]) \
950 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
951 && (REGNUM != CARRY_REGNUM) \
952 && (df_regs_ever_live_p (REGNUM) || ! crtl->is_leaf)))
954 /* Compute the stack layout. */
956 struct xstormy16_stack_layout
957 xstormy16_compute_stack_layout (void)
959 struct xstormy16_stack_layout layout
;
961 const int ifun
= xstormy16_interrupt_function_p ();
963 layout
.locals_size
= get_frame_size ();
965 layout
.register_save_size
= 0;
966 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
967 if (REG_NEEDS_SAVE (regno
, ifun
))
968 layout
.register_save_size
+= UNITS_PER_WORD
;
971 layout
.stdarg_save_size
= NUM_ARGUMENT_REGISTERS
* UNITS_PER_WORD
;
973 layout
.stdarg_save_size
= 0;
975 layout
.frame_size
= (layout
.locals_size
976 + layout
.register_save_size
977 + layout
.stdarg_save_size
);
979 if (crtl
->args
.size
<= 2048 && crtl
->args
.size
!= -1)
981 if (layout
.frame_size
- INCOMING_FRAME_SP_OFFSET
982 + crtl
->args
.size
<= 2048)
983 layout
.fp_minus_ap
= layout
.frame_size
- INCOMING_FRAME_SP_OFFSET
;
985 layout
.fp_minus_ap
= 2048 - crtl
->args
.size
;
988 layout
.fp_minus_ap
= (layout
.stdarg_save_size
989 + layout
.register_save_size
990 - INCOMING_FRAME_SP_OFFSET
);
991 layout
.sp_minus_fp
= (layout
.frame_size
- INCOMING_FRAME_SP_OFFSET
992 - layout
.fp_minus_ap
);
993 layout
.first_local_minus_ap
= layout
.sp_minus_fp
- layout
.locals_size
;
997 /* Worker function for TARGET_CAN_ELIMINATE. */
1000 xstormy16_can_eliminate (const int from
, const int to
)
1002 return (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
1003 ? ! frame_pointer_needed
1007 /* Determine how all the special registers get eliminated. */
1010 xstormy16_initial_elimination_offset (int from
, int to
)
1012 struct xstormy16_stack_layout layout
;
1015 layout
= xstormy16_compute_stack_layout ();
1017 if (from
== FRAME_POINTER_REGNUM
&& to
== HARD_FRAME_POINTER_REGNUM
)
1018 result
= layout
.sp_minus_fp
- layout
.locals_size
;
1019 else if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
1020 result
= - layout
.locals_size
;
1021 else if (from
== ARG_POINTER_REGNUM
&& to
== HARD_FRAME_POINTER_REGNUM
)
1022 result
= - layout
.fp_minus_ap
;
1023 else if (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
1024 result
= - (layout
.sp_minus_fp
+ layout
.fp_minus_ap
);
1032 emit_addhi3_postreload (rtx dest
, rtx src0
, rtx src1
)
1034 rtx set
, clobber
, insn
;
1036 set
= gen_rtx_SET (VOIDmode
, dest
, gen_rtx_PLUS (HImode
, src0
, src1
));
1037 clobber
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (BImode
, CARRY_REGNUM
));
1038 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, set
, clobber
)));
1042 /* Called after register allocation to add any instructions needed for
1043 the prologue. Using a prologue insn is favored compared to putting
1044 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1045 since it allows the scheduler to intermix instructions with the
1046 saves of the caller saved registers. In some cases, it might be
1047 necessary to emit a barrier instruction as the last insn to prevent
1050 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1051 so that the debug info generation code can handle them properly. */
1054 xstormy16_expand_prologue (void)
1056 struct xstormy16_stack_layout layout
;
1060 const int ifun
= xstormy16_interrupt_function_p ();
1062 mem_push_rtx
= gen_rtx_POST_INC (Pmode
, stack_pointer_rtx
);
1063 mem_push_rtx
= gen_rtx_MEM (HImode
, mem_push_rtx
);
1065 layout
= xstormy16_compute_stack_layout ();
1067 if (layout
.locals_size
>= 32768)
1068 error ("local variable memory requirements exceed capacity");
1070 if (flag_stack_usage_info
)
1071 current_function_static_stack_size
= layout
.frame_size
;
1073 /* Save the argument registers if necessary. */
1074 if (layout
.stdarg_save_size
)
1075 for (regno
= FIRST_ARGUMENT_REGISTER
;
1076 regno
< FIRST_ARGUMENT_REGISTER
+ NUM_ARGUMENT_REGISTERS
;
1080 rtx reg
= gen_rtx_REG (HImode
, regno
);
1082 insn
= emit_move_insn (mem_push_rtx
, reg
);
1083 RTX_FRAME_RELATED_P (insn
) = 1;
1085 dwarf
= gen_rtx_SEQUENCE (VOIDmode
, rtvec_alloc (2));
1087 XVECEXP (dwarf
, 0, 0) = gen_rtx_SET (VOIDmode
,
1088 gen_rtx_MEM (Pmode
, stack_pointer_rtx
),
1090 XVECEXP (dwarf
, 0, 1) = gen_rtx_SET (Pmode
, stack_pointer_rtx
,
1091 plus_constant (Pmode
,
1093 GET_MODE_SIZE (Pmode
)));
1094 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
, dwarf
);
1095 RTX_FRAME_RELATED_P (XVECEXP (dwarf
, 0, 0)) = 1;
1096 RTX_FRAME_RELATED_P (XVECEXP (dwarf
, 0, 1)) = 1;
1099 /* Push each of the registers to save. */
1100 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1101 if (REG_NEEDS_SAVE (regno
, ifun
))
1104 rtx reg
= gen_rtx_REG (HImode
, regno
);
1106 insn
= emit_move_insn (mem_push_rtx
, reg
);
1107 RTX_FRAME_RELATED_P (insn
) = 1;
1109 dwarf
= gen_rtx_SEQUENCE (VOIDmode
, rtvec_alloc (2));
1111 XVECEXP (dwarf
, 0, 0) = gen_rtx_SET (VOIDmode
,
1112 gen_rtx_MEM (Pmode
, stack_pointer_rtx
),
1114 XVECEXP (dwarf
, 0, 1) = gen_rtx_SET (Pmode
, stack_pointer_rtx
,
1115 plus_constant (Pmode
,
1117 GET_MODE_SIZE (Pmode
)));
1118 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
, dwarf
);
1119 RTX_FRAME_RELATED_P (XVECEXP (dwarf
, 0, 0)) = 1;
1120 RTX_FRAME_RELATED_P (XVECEXP (dwarf
, 0, 1)) = 1;
1123 /* It's just possible that the SP here might be what we need for
1125 if (frame_pointer_needed
&& layout
.sp_minus_fp
== layout
.locals_size
)
1127 insn
= emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
);
1128 RTX_FRAME_RELATED_P (insn
) = 1;
1131 /* Allocate space for local variables. */
1132 if (layout
.locals_size
)
1134 insn
= emit_addhi3_postreload (stack_pointer_rtx
, stack_pointer_rtx
,
1135 GEN_INT (layout
.locals_size
));
1136 RTX_FRAME_RELATED_P (insn
) = 1;
1139 /* Set up the frame pointer, if required. */
1140 if (frame_pointer_needed
&& layout
.sp_minus_fp
!= layout
.locals_size
)
1142 insn
= emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
);
1143 RTX_FRAME_RELATED_P (insn
) = 1;
1145 if (layout
.sp_minus_fp
)
1147 insn
= emit_addhi3_postreload (hard_frame_pointer_rtx
,
1148 hard_frame_pointer_rtx
,
1149 GEN_INT (- layout
.sp_minus_fp
));
1150 RTX_FRAME_RELATED_P (insn
) = 1;
1155 /* Do we need an epilogue at all? */
1158 direct_return (void)
1160 return (reload_completed
1161 && xstormy16_compute_stack_layout ().frame_size
== 0
1162 && ! xstormy16_interrupt_function_p ());
1165 /* Called after register allocation to add any instructions needed for
1166 the epilogue. Using an epilogue insn is favored compared to putting
1167 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1168 since it allows the scheduler to intermix instructions with the
1169 saves of the caller saved registers. In some cases, it might be
1170 necessary to emit a barrier instruction as the last insn to prevent
1174 xstormy16_expand_epilogue (void)
1176 struct xstormy16_stack_layout layout
;
1179 const int ifun
= xstormy16_interrupt_function_p ();
1181 mem_pop_rtx
= gen_rtx_PRE_DEC (Pmode
, stack_pointer_rtx
);
1182 mem_pop_rtx
= gen_rtx_MEM (HImode
, mem_pop_rtx
);
1184 layout
= xstormy16_compute_stack_layout ();
1186 /* Pop the stack for the locals. */
1187 if (layout
.locals_size
)
1189 if (frame_pointer_needed
&& layout
.sp_minus_fp
== layout
.locals_size
)
1190 emit_move_insn (stack_pointer_rtx
, hard_frame_pointer_rtx
);
1192 emit_addhi3_postreload (stack_pointer_rtx
, stack_pointer_rtx
,
1193 GEN_INT (- layout
.locals_size
));
1196 /* Restore any call-saved registers. */
1197 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; regno
--)
1198 if (REG_NEEDS_SAVE (regno
, ifun
))
1199 emit_move_insn (gen_rtx_REG (HImode
, regno
), mem_pop_rtx
);
1201 /* Pop the stack for the stdarg save area. */
1202 if (layout
.stdarg_save_size
)
1203 emit_addhi3_postreload (stack_pointer_rtx
, stack_pointer_rtx
,
1204 GEN_INT (- layout
.stdarg_save_size
));
1208 emit_jump_insn (gen_return_internal_interrupt ());
1210 emit_jump_insn (gen_return_internal ());
1214 xstormy16_epilogue_uses (int regno
)
1216 if (reload_completed
&& call_used_regs
[regno
])
1218 const int ifun
= xstormy16_interrupt_function_p ();
1219 return REG_NEEDS_SAVE (regno
, ifun
);
1225 xstormy16_function_profiler (void)
1227 sorry ("function_profiler support");
1230 /* Update CUM to advance past an argument in the argument list. The
1231 values MODE, TYPE and NAMED describe that argument. Once this is
1232 done, the variable CUM is suitable for analyzing the *following*
1233 argument with `TARGET_FUNCTION_ARG', etc.
1235 This function need not do anything if the argument in question was
1236 passed on the stack. The compiler knows how to track the amount of
1237 stack space used for arguments without any special help. However,
1238 it makes life easier for xstormy16_build_va_list if it does update
1242 xstormy16_function_arg_advance (cumulative_args_t cum_v
, machine_mode mode
,
1243 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1245 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1247 /* If an argument would otherwise be passed partially in registers,
1248 and partially on the stack, the whole of it is passed on the
1250 if (*cum
< NUM_ARGUMENT_REGISTERS
1251 && *cum
+ XSTORMY16_WORD_SIZE (type
, mode
) > NUM_ARGUMENT_REGISTERS
)
1252 *cum
= NUM_ARGUMENT_REGISTERS
;
1254 *cum
+= XSTORMY16_WORD_SIZE (type
, mode
);
1258 xstormy16_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
1259 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1261 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1263 if (mode
== VOIDmode
)
1265 if (targetm
.calls
.must_pass_in_stack (mode
, type
)
1266 || *cum
+ XSTORMY16_WORD_SIZE (type
, mode
) > NUM_ARGUMENT_REGISTERS
)
1268 return gen_rtx_REG (mode
, *cum
+ FIRST_ARGUMENT_REGISTER
);
1271 /* Build the va_list type.
1273 For this chip, va_list is a record containing a counter and a pointer.
1274 The counter is of type 'int' and indicates how many bytes
1275 have been used to date. The pointer indicates the stack position
1276 for arguments that have not been passed in registers.
1277 To keep the layout nice, the pointer is first in the structure. */
1280 xstormy16_build_builtin_va_list (void)
1282 tree f_1
, f_2
, record
, type_decl
;
1284 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
1285 type_decl
= build_decl (BUILTINS_LOCATION
,
1286 TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
1288 f_1
= build_decl (BUILTINS_LOCATION
,
1289 FIELD_DECL
, get_identifier ("base"),
1291 f_2
= build_decl (BUILTINS_LOCATION
,
1292 FIELD_DECL
, get_identifier ("count"),
1293 unsigned_type_node
);
1295 DECL_FIELD_CONTEXT (f_1
) = record
;
1296 DECL_FIELD_CONTEXT (f_2
) = record
;
1298 TYPE_STUB_DECL (record
) = type_decl
;
1299 TYPE_NAME (record
) = type_decl
;
1300 TYPE_FIELDS (record
) = f_1
;
1301 DECL_CHAIN (f_1
) = f_2
;
1303 layout_type (record
);
1308 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this
1309 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1310 variable to initialize. NEXTARG is the machine independent notion of the
1311 'next' argument after the variable arguments. */
1314 xstormy16_expand_builtin_va_start (tree valist
, rtx nextarg ATTRIBUTE_UNUSED
)
1316 tree f_base
, f_count
;
1320 if (xstormy16_interrupt_function_p ())
1321 error ("cannot use va_start in interrupt function");
1323 f_base
= TYPE_FIELDS (va_list_type_node
);
1324 f_count
= DECL_CHAIN (f_base
);
1326 base
= build3 (COMPONENT_REF
, TREE_TYPE (f_base
), valist
, f_base
, NULL_TREE
);
1327 count
= build3 (COMPONENT_REF
, TREE_TYPE (f_count
), valist
, f_count
,
1330 t
= make_tree (TREE_TYPE (base
), virtual_incoming_args_rtx
);
1331 u
= build_int_cst (NULL_TREE
, - INCOMING_FRAME_SP_OFFSET
);
1332 u
= fold_convert (TREE_TYPE (count
), u
);
1333 t
= fold_build_pointer_plus (t
, u
);
1334 t
= build2 (MODIFY_EXPR
, TREE_TYPE (base
), base
, t
);
1335 TREE_SIDE_EFFECTS (t
) = 1;
1336 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
1338 t
= build2 (MODIFY_EXPR
, TREE_TYPE (count
), count
,
1339 build_int_cst (NULL_TREE
,
1340 crtl
->args
.info
* UNITS_PER_WORD
));
1341 TREE_SIDE_EFFECTS (t
) = 1;
1342 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
1345 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable
1346 of type va_list as a tree, TYPE is the type passed to va_arg.
1347 Note: This algorithm is documented in stormy-abi. */
1350 xstormy16_gimplify_va_arg_expr (tree valist
, tree type
, gimple_seq
*pre_p
,
1351 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
1353 tree f_base
, f_count
;
1355 tree count_tmp
, addr
, t
;
1356 tree lab_gotaddr
, lab_fromstack
;
1357 int size
, size_of_reg_args
, must_stack
;
1360 f_base
= TYPE_FIELDS (va_list_type_node
);
1361 f_count
= DECL_CHAIN (f_base
);
1363 base
= build3 (COMPONENT_REF
, TREE_TYPE (f_base
), valist
, f_base
, NULL_TREE
);
1364 count
= build3 (COMPONENT_REF
, TREE_TYPE (f_count
), valist
, f_count
,
1367 must_stack
= targetm
.calls
.must_pass_in_stack (TYPE_MODE (type
), type
);
1368 size_tree
= round_up (size_in_bytes (type
), UNITS_PER_WORD
);
1369 gimplify_expr (&size_tree
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
1371 size_of_reg_args
= NUM_ARGUMENT_REGISTERS
* UNITS_PER_WORD
;
1373 count_tmp
= get_initialized_tmp_var (count
, pre_p
, NULL
);
1374 lab_gotaddr
= create_artificial_label (UNKNOWN_LOCATION
);
1375 lab_fromstack
= create_artificial_label (UNKNOWN_LOCATION
);
1376 addr
= create_tmp_var (ptr_type_node
);
1382 t
= fold_convert (TREE_TYPE (count
), size_tree
);
1383 t
= build2 (PLUS_EXPR
, TREE_TYPE (count
), count_tmp
, t
);
1384 r
= fold_convert (TREE_TYPE (count
), size_int (size_of_reg_args
));
1385 t
= build2 (GT_EXPR
, boolean_type_node
, t
, r
);
1386 t
= build3 (COND_EXPR
, void_type_node
, t
,
1387 build1 (GOTO_EXPR
, void_type_node
, lab_fromstack
),
1389 gimplify_and_add (t
, pre_p
);
1391 t
= fold_build_pointer_plus (base
, count_tmp
);
1392 gimplify_assign (addr
, t
, pre_p
);
1394 t
= build1 (GOTO_EXPR
, void_type_node
, lab_gotaddr
);
1395 gimplify_and_add (t
, pre_p
);
1397 t
= build1 (LABEL_EXPR
, void_type_node
, lab_fromstack
);
1398 gimplify_and_add (t
, pre_p
);
1401 /* Arguments larger than a word might need to skip over some
1402 registers, since arguments are either passed entirely in
1403 registers or entirely on the stack. */
1404 size
= PUSH_ROUNDING (int_size_in_bytes (type
));
1405 if (size
> 2 || size
< 0 || must_stack
)
1409 r
= size_int (NUM_ARGUMENT_REGISTERS
* UNITS_PER_WORD
);
1410 u
= build2 (MODIFY_EXPR
, TREE_TYPE (count_tmp
), count_tmp
, r
);
1412 t
= fold_convert (TREE_TYPE (count
), r
);
1413 t
= build2 (GE_EXPR
, boolean_type_node
, count_tmp
, t
);
1414 t
= build3 (COND_EXPR
, void_type_node
, t
, NULL_TREE
, u
);
1415 gimplify_and_add (t
, pre_p
);
1418 t
= size_int (NUM_ARGUMENT_REGISTERS
* UNITS_PER_WORD
1419 + INCOMING_FRAME_SP_OFFSET
);
1420 t
= fold_convert (TREE_TYPE (count
), t
);
1421 t
= build2 (MINUS_EXPR
, TREE_TYPE (count
), count_tmp
, t
);
1422 t
= build2 (PLUS_EXPR
, TREE_TYPE (count
), t
,
1423 fold_convert (TREE_TYPE (count
), size_tree
));
1424 t
= fold_convert (TREE_TYPE (t
), fold (t
));
1425 t
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
1426 t
= fold_build_pointer_plus (base
, t
);
1427 gimplify_assign (addr
, t
, pre_p
);
1429 t
= build1 (LABEL_EXPR
, void_type_node
, lab_gotaddr
);
1430 gimplify_and_add (t
, pre_p
);
1432 t
= fold_convert (TREE_TYPE (count
), size_tree
);
1433 t
= build2 (PLUS_EXPR
, TREE_TYPE (count
), count_tmp
, t
);
1434 gimplify_assign (count
, t
, pre_p
);
1436 addr
= fold_convert (build_pointer_type (type
), addr
);
1437 return build_va_arg_indirect_ref (addr
);
1440 /* Worker function for TARGET_TRAMPOLINE_INIT. */
1443 xstormy16_trampoline_init (rtx m_tramp
, tree fndecl
, rtx static_chain
)
1445 rtx temp
= gen_reg_rtx (HImode
);
1446 rtx reg_fnaddr
= gen_reg_rtx (HImode
);
1447 rtx reg_addr
, reg_addr_mem
;
1449 reg_addr
= copy_to_reg (XEXP (m_tramp
, 0));
1450 reg_addr_mem
= adjust_automodify_address (m_tramp
, HImode
, reg_addr
, 0);
1452 emit_move_insn (temp
, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM
));
1453 emit_move_insn (reg_addr_mem
, temp
);
1454 emit_insn (gen_addhi3 (reg_addr
, reg_addr
, const2_rtx
));
1455 reg_addr_mem
= adjust_automodify_address (reg_addr_mem
, VOIDmode
, NULL
, 2);
1457 emit_move_insn (temp
, static_chain
);
1458 emit_move_insn (reg_addr_mem
, temp
);
1459 emit_insn (gen_addhi3 (reg_addr
, reg_addr
, const2_rtx
));
1460 reg_addr_mem
= adjust_automodify_address (reg_addr_mem
, VOIDmode
, NULL
, 2);
1462 emit_move_insn (reg_fnaddr
, XEXP (DECL_RTL (fndecl
), 0));
1463 emit_move_insn (temp
, reg_fnaddr
);
1464 emit_insn (gen_andhi3 (temp
, temp
, GEN_INT (0xFF)));
1465 emit_insn (gen_iorhi3 (temp
, temp
, GEN_INT (0x0200)));
1466 emit_move_insn (reg_addr_mem
, temp
);
1467 emit_insn (gen_addhi3 (reg_addr
, reg_addr
, const2_rtx
));
1468 reg_addr_mem
= adjust_automodify_address (reg_addr_mem
, VOIDmode
, NULL
, 2);
1470 emit_insn (gen_lshrhi3 (reg_fnaddr
, reg_fnaddr
, GEN_INT (8)));
1471 emit_move_insn (reg_addr_mem
, reg_fnaddr
);
1474 /* Worker function for TARGET_FUNCTION_VALUE. */
1477 xstormy16_function_value (const_tree valtype
,
1478 const_tree func ATTRIBUTE_UNUSED
,
1479 bool outgoing ATTRIBUTE_UNUSED
)
1482 mode
= TYPE_MODE (valtype
);
1483 PROMOTE_MODE (mode
, 0, valtype
);
1484 return gen_rtx_REG (mode
, RETURN_VALUE_REGNUM
);
1487 /* Worker function for TARGET_LIBCALL_VALUE. */
1490 xstormy16_libcall_value (machine_mode mode
,
1491 const_rtx fun ATTRIBUTE_UNUSED
)
1493 return gen_rtx_REG (mode
, RETURN_VALUE_REGNUM
);
1496 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
1499 xstormy16_function_value_regno_p (const unsigned int regno
)
1501 return (regno
== RETURN_VALUE_REGNUM
);
1504 /* A C compound statement that outputs the assembler code for a thunk function,
1505 used to implement C++ virtual function calls with multiple inheritance. The
1506 thunk acts as a wrapper around a virtual function, adjusting the implicit
1507 object parameter before handing control off to the real function.
1509 First, emit code to add the integer DELTA to the location that contains the
1510 incoming first argument. Assume that this argument contains a pointer, and
1511 is the one used to pass the `this' pointer in C++. This is the incoming
1512 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1513 addition must preserve the values of all other incoming arguments.
1515 After the addition, emit code to jump to FUNCTION, which is a
1516 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1517 the return address. Hence returning from FUNCTION will return to whoever
1518 called the current `thunk'.
1520 The effect must be as if @var{function} had been called directly
1521 with the adjusted first argument. This macro is responsible for
1522 emitting all of the code for a thunk function;
1523 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1526 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1527 extracted from it.) It might possibly be useful on some targets, but
1531 xstormy16_asm_output_mi_thunk (FILE *file
,
1532 tree thunk_fndecl ATTRIBUTE_UNUSED
,
1533 HOST_WIDE_INT delta
,
1534 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED
,
1537 int regnum
= FIRST_ARGUMENT_REGISTER
;
1539 /* There might be a hidden first argument for a returned structure. */
1540 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
)), function
))
1543 fprintf (file
, "\tadd %s,#0x%x\n", reg_names
[regnum
], (int) delta
& 0xFFFF);
1544 fputs ("\tjmpf ", file
);
1545 assemble_name (file
, XSTR (XEXP (DECL_RTL (function
), 0), 0));
1549 /* The purpose of this function is to override the default behavior of
1550 BSS objects. Normally, they go into .bss or .sbss via ".common"
1551 directives, but we need to override that and put them in
1552 .bss_below100. We can't just use a section override (like we do
1553 for .data_below100), because that makes them initialized rather
1554 than uninitialized. */
1557 xstormy16_asm_output_aligned_common (FILE *stream
,
1564 rtx mem
= decl
== NULL_TREE
? NULL_RTX
: DECL_RTL (decl
);
1569 && GET_CODE (symbol
= XEXP (mem
, 0)) == SYMBOL_REF
1570 && SYMBOL_REF_FLAGS (symbol
) & SYMBOL_FLAG_XSTORMY16_BELOW100
)
1575 switch_to_section (bss100_section
);
1583 name2
= default_strip_name_encoding (name
);
1585 fprintf (stream
, "\t.globl\t%s\n", name2
);
1587 fprintf (stream
, "\t.p2align %d\n", p2align
);
1588 fprintf (stream
, "\t.type\t%s, @object\n", name2
);
1589 fprintf (stream
, "\t.size\t%s, %d\n", name2
, size
);
1590 fprintf (stream
, "%s:\n\t.space\t%d\n", name2
, size
);
1596 fprintf (stream
, "\t.local\t");
1597 assemble_name (stream
, name
);
1598 fprintf (stream
, "\n");
1600 fprintf (stream
, "\t.comm\t");
1601 assemble_name (stream
, name
);
1602 fprintf (stream
, ",%u,%u\n", size
, align
/ BITS_PER_UNIT
);
1605 /* Implement TARGET_ASM_INIT_SECTIONS. */
1608 xstormy16_asm_init_sections (void)
1611 = get_unnamed_section (SECTION_WRITE
| SECTION_BSS
,
1612 output_section_asm_op
,
1613 "\t.section \".bss_below100\",\"aw\",@nobits");
1616 /* Mark symbols with the "below100" attribute so that we can use the
1617 special addressing modes for them. */
1620 xstormy16_encode_section_info (tree decl
, rtx r
, int first
)
1622 default_encode_section_info (decl
, r
, first
);
1624 if (TREE_CODE (decl
) == VAR_DECL
1625 && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl
))
1626 || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl
))))
1628 rtx symbol
= XEXP (r
, 0);
1630 gcc_assert (GET_CODE (symbol
) == SYMBOL_REF
);
1631 SYMBOL_REF_FLAGS (symbol
) |= SYMBOL_FLAG_XSTORMY16_BELOW100
;
1635 #undef TARGET_ASM_CONSTRUCTOR
1636 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1637 #undef TARGET_ASM_DESTRUCTOR
1638 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1640 /* Output constructors and destructors. Just like
1641 default_named_section_asm_out_* but don't set the sections writable. */
1644 xstormy16_asm_out_destructor (rtx symbol
, int priority
)
1646 const char *section
= ".dtors";
1649 /* ??? This only works reliably with the GNU linker. */
1650 if (priority
!= DEFAULT_INIT_PRIORITY
)
1652 sprintf (buf
, ".dtors.%.5u",
1653 /* Invert the numbering so the linker puts us in the proper
1654 order; constructors are run from right to left, and the
1655 linker sorts in increasing order. */
1656 MAX_INIT_PRIORITY
- priority
);
1660 switch_to_section (get_section (section
, 0, NULL
));
1661 assemble_align (POINTER_SIZE
);
1662 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
1666 xstormy16_asm_out_constructor (rtx symbol
, int priority
)
1668 const char *section
= ".ctors";
1671 /* ??? This only works reliably with the GNU linker. */
1672 if (priority
!= DEFAULT_INIT_PRIORITY
)
1674 sprintf (buf
, ".ctors.%.5u",
1675 /* Invert the numbering so the linker puts us in the proper
1676 order; constructors are run from right to left, and the
1677 linker sorts in increasing order. */
1678 MAX_INIT_PRIORITY
- priority
);
1682 switch_to_section (get_section (section
, 0, NULL
));
1683 assemble_align (POINTER_SIZE
);
1684 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
1687 /* Worker function for TARGET_PRINT_OPERAND_ADDRESS.
1689 Print a memory address as an operand to reference that memory location. */
1692 xstormy16_print_operand_address (FILE *file
, rtx address
)
1694 HOST_WIDE_INT offset
;
1695 int pre_dec
, post_inc
;
1697 /* There are a few easy cases. */
1698 if (CONST_INT_P (address
))
1700 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (address
) & 0xFFFF);
1704 if (CONSTANT_P (address
) || LABEL_P (address
))
1706 output_addr_const (file
, address
);
1710 /* Otherwise, it's hopefully something of the form
1711 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...)). */
1712 if (GET_CODE (address
) == PLUS
)
1714 gcc_assert (CONST_INT_P (XEXP (address
, 1)));
1715 offset
= INTVAL (XEXP (address
, 1));
1716 address
= XEXP (address
, 0);
1721 pre_dec
= (GET_CODE (address
) == PRE_DEC
);
1722 post_inc
= (GET_CODE (address
) == POST_INC
);
1723 if (pre_dec
|| post_inc
)
1724 address
= XEXP (address
, 0);
1726 gcc_assert (REG_P (address
));
1731 fputs (reg_names
[REGNO (address
)], file
);
1735 fprintf (file
, "," HOST_WIDE_INT_PRINT_DEC
, offset
);
1739 /* Worker function for TARGET_PRINT_OPERAND.
1741 Print an operand to an assembler instruction. */
1744 xstormy16_print_operand (FILE *file
, rtx x
, int code
)
1749 /* There is either one bit set, or one bit clear, in X.
1750 Print it preceded by '#'. */
1752 static int bits_set
[8] = { 0, 1, 1, 2, 1, 2, 2, 3 };
1753 HOST_WIDE_INT xx
= 1;
1756 if (CONST_INT_P (x
))
1759 output_operand_lossage ("'B' operand is not constant");
1761 /* GCC sign-extends masks with the MSB set, so we have to
1762 detect all the cases that differ only in sign extension
1763 beyond the bits we care about. Normally, the predicates
1764 and constraints ensure that we have the right values. This
1765 works correctly for valid masks. */
1766 if (bits_set
[xx
& 7] <= 1)
1768 /* Remove sign extension bits. */
1769 if ((~xx
& ~(HOST_WIDE_INT
)0xff) == 0)
1771 else if ((~xx
& ~(HOST_WIDE_INT
)0xffff) == 0)
1773 l
= exact_log2 (xx
);
1777 /* Add sign extension bits. */
1778 if ((xx
& ~(HOST_WIDE_INT
)0xff) == 0)
1779 xx
|= ~(HOST_WIDE_INT
)0xff;
1780 else if ((xx
& ~(HOST_WIDE_INT
)0xffff) == 0)
1781 xx
|= ~(HOST_WIDE_INT
)0xffff;
1782 l
= exact_log2 (~xx
);
1786 output_operand_lossage ("'B' operand has multiple bits set");
1788 fprintf (file
, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC
, l
);
1793 /* Print the symbol without a surrounding @fptr(). */
1794 if (GET_CODE (x
) == SYMBOL_REF
)
1795 assemble_name (file
, XSTR (x
, 0));
1796 else if (LABEL_P (x
))
1797 output_asm_label (x
);
1799 xstormy16_print_operand_address (file
, x
);
1804 /* Print the immediate operand less one, preceded by '#'.
1805 For 'O', negate it first. */
1807 HOST_WIDE_INT xx
= 0;
1809 if (CONST_INT_P (x
))
1812 output_operand_lossage ("'o' operand is not constant");
1817 fprintf (file
, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC
, xx
- 1);
1822 /* Print the shift mask for bp/bn. */
1824 HOST_WIDE_INT xx
= 1;
1827 if (CONST_INT_P (x
))
1830 output_operand_lossage ("'B' operand is not constant");
1834 fputs (IMMEDIATE_PREFIX
, file
);
1835 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, l
);
1840 /* Handled below. */
1844 output_operand_lossage ("xstormy16_print_operand: unknown code");
1848 switch (GET_CODE (x
))
1851 fputs (reg_names
[REGNO (x
)], file
);
1855 xstormy16_print_operand_address (file
, XEXP (x
, 0));
1859 /* Some kind of constant or label; an immediate operand,
1860 so prefix it with '#' for the assembler. */
1861 fputs (IMMEDIATE_PREFIX
, file
);
1862 output_addr_const (file
, x
);
1869 /* Expander for the `casesi' pattern.
1870 INDEX is the index of the switch statement.
1871 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1872 to the first table entry.
1873 RANGE is the number of table entries.
1874 TABLE is an ADDR_VEC that is the jump table.
1875 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1876 range LOWER_BOUND to LOWER_BOUND + RANGE - 1. */
1879 xstormy16_expand_casesi (rtx index
, rtx lower_bound
, rtx range
,
1880 rtx table
, rtx default_label
)
1882 HOST_WIDE_INT range_i
= INTVAL (range
);
1885 /* This code uses 'br', so it can deal only with tables of size up to
1887 if (range_i
>= 8192)
1888 sorry ("switch statement of size %lu entries too large",
1889 (unsigned long) range_i
);
1891 index
= expand_binop (SImode
, sub_optab
, index
, lower_bound
, NULL_RTX
, 0,
1893 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, SImode
, 1,
1895 int_index
= gen_lowpart_common (HImode
, index
);
1896 emit_insn (gen_ashlhi3 (int_index
, int_index
, const2_rtx
));
1897 emit_jump_insn (gen_tablejump_pcrel (int_index
, table
));
1900 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1901 instructions, without label or alignment or any other special
1902 constructs. We know that the previous instruction will be the
1903 `tablejump_pcrel' output above.
1905 TODO: it might be nice to output 'br' instructions if they could
1909 xstormy16_output_addr_vec (FILE *file
, rtx label ATTRIBUTE_UNUSED
, rtx table
)
1913 switch_to_section (current_function_section ());
1915 vlen
= XVECLEN (table
, 0);
1916 for (idx
= 0; idx
< vlen
; idx
++)
1918 fputs ("\tjmpf ", file
);
1919 output_asm_label (XEXP (XVECEXP (table
, 0, idx
), 0));
1924 /* Expander for the `call' patterns.
1925 RETVAL is the RTL for the return register or NULL for void functions.
1926 DEST is the function to call, expressed as a MEM.
1927 COUNTER is ignored. */
1930 xstormy16_expand_call (rtx retval
, rtx dest
, rtx counter
)
1935 gcc_assert (MEM_P (dest
));
1936 dest
= XEXP (dest
, 0);
1938 if (! CONSTANT_P (dest
) && ! REG_P (dest
))
1939 dest
= force_reg (Pmode
, dest
);
1944 mode
= GET_MODE (retval
);
1946 call
= gen_rtx_CALL (mode
, gen_rtx_MEM (FUNCTION_MODE
, dest
),
1949 call
= gen_rtx_SET (VOIDmode
, retval
, call
);
1951 if (! CONSTANT_P (dest
))
1953 temp
= gen_reg_rtx (HImode
);
1954 emit_move_insn (temp
, const0_rtx
);
1959 call
= gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, call
,
1960 gen_rtx_USE (VOIDmode
, temp
)));
1961 emit_call_insn (call
);
1964 /* Expanders for multiword computational operations. */
1966 /* Expander for arithmetic operations; emit insns to compute
1968 (set DEST (CODE:MODE SRC0 SRC1))
1970 When CODE is COMPARE, a branch template is generated
1971 (this saves duplicating code in xstormy16_split_cbranch). */
1974 xstormy16_expand_arith (machine_mode mode
, enum rtx_code code
,
1975 rtx dest
, rtx src0
, rtx src1
)
1977 int num_words
= GET_MODE_BITSIZE (mode
) / BITS_PER_WORD
;
1982 emit_move_insn (src0
, const0_rtx
);
1984 for (i
= 0; i
< num_words
; i
++)
1986 rtx w_src0
, w_src1
, w_dest
;
1989 w_src0
= simplify_gen_subreg (word_mode
, src0
, mode
,
1990 i
* UNITS_PER_WORD
);
1991 w_src1
= simplify_gen_subreg (word_mode
, src1
, mode
, i
* UNITS_PER_WORD
);
1992 w_dest
= simplify_gen_subreg (word_mode
, dest
, mode
, i
* UNITS_PER_WORD
);
1998 && CONST_INT_P (w_src1
)
1999 && INTVAL (w_src1
) == 0)
2003 insn
= gen_addchi4 (w_dest
, w_src0
, w_src1
);
2005 insn
= gen_addchi5 (w_dest
, w_src0
, w_src1
);
2011 if (code
== COMPARE
&& i
== num_words
- 1)
2013 rtx branch
, sub
, clobber
, sub_1
;
2015 sub_1
= gen_rtx_MINUS (HImode
, w_src0
,
2016 gen_rtx_ZERO_EXTEND (HImode
, gen_rtx_REG (BImode
, CARRY_REGNUM
)));
2017 sub
= gen_rtx_SET (VOIDmode
, w_dest
,
2018 gen_rtx_MINUS (HImode
, sub_1
, w_src1
));
2019 clobber
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (BImode
, CARRY_REGNUM
));
2020 branch
= gen_rtx_SET (VOIDmode
, pc_rtx
,
2021 gen_rtx_IF_THEN_ELSE (VOIDmode
,
2027 insn
= gen_rtx_PARALLEL (VOIDmode
,
2028 gen_rtvec (3, branch
, sub
, clobber
));
2032 && CONST_INT_P (w_src1
)
2033 && INTVAL (w_src1
) == 0)
2036 insn
= gen_subchi4 (w_dest
, w_src0
, w_src1
);
2038 insn
= gen_subchi5 (w_dest
, w_src0
, w_src1
);
2044 if (CONST_INT_P (w_src1
)
2045 && INTVAL (w_src1
) == -(code
== AND
))
2048 insn
= gen_rtx_SET (VOIDmode
, w_dest
, gen_rtx_fmt_ee (code
, mode
,
2053 insn
= gen_rtx_SET (VOIDmode
, w_dest
, gen_rtx_NOT (mode
, w_src0
));
2064 /* If we emit nothing, try_split() will think we failed. So emit
2065 something that does nothing and can be optimized away. */
2070 /* The shift operations are split at output time for constant values;
2071 variable-width shifts get handed off to a library routine.
2073 Generate an output string to do (set X (CODE:MODE X SIZE_R))
2074 SIZE_R will be a CONST_INT, X will be a hard register. */
2077 xstormy16_output_shift (machine_mode mode
, enum rtx_code code
,
2078 rtx x
, rtx size_r
, rtx temp
)
2081 const char *r0
, *r1
, *rt
;
2084 gcc_assert (CONST_INT_P (size_r
)
2088 size
= INTVAL (size_r
) & (GET_MODE_BITSIZE (mode
) - 1);
2093 r0
= reg_names
[REGNO (x
)];
2094 r1
= reg_names
[REGNO (x
) + 1];
2096 /* For shifts of size 1, we can use the rotate instructions. */
2102 sprintf (r
, "shl %s,#1 | rlc %s,#1", r0
, r1
);
2105 sprintf (r
, "asr %s,#1 | rrc %s,#1", r1
, r0
);
2108 sprintf (r
, "shr %s,#1 | rrc %s,#1", r1
, r0
);
2116 /* For large shifts, there are easy special cases. */
2122 sprintf (r
, "mov %s,%s | mov %s,#0", r1
, r0
, r0
);
2125 sprintf (r
, "mov %s,%s | asr %s,#15", r0
, r1
, r1
);
2128 sprintf (r
, "mov %s,%s | mov %s,#0", r0
, r1
, r1
);
2140 sprintf (r
, "mov %s,%s | mov %s,#0 | shl %s,#%d",
2141 r1
, r0
, r0
, r1
, (int) size
- 16);
2144 sprintf (r
, "mov %s,%s | asr %s,#15 | asr %s,#%d",
2145 r0
, r1
, r1
, r0
, (int) size
- 16);
2148 sprintf (r
, "mov %s,%s | mov %s,#0 | shr %s,#%d",
2149 r0
, r1
, r1
, r0
, (int) size
- 16);
2157 /* For the rest, we have to do more work. In particular, we
2158 need a temporary. */
2159 rt
= reg_names
[REGNO (temp
)];
2164 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2165 rt
, r0
, r0
, (int) size
, r1
, (int) size
, rt
, (int) (16 - size
),
2170 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2171 rt
, r1
, r1
, (int) size
, r0
, (int) size
, rt
, (int) (16 - size
),
2176 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2177 rt
, r1
, r1
, (int) size
, r0
, (int) size
, rt
, (int) (16 - size
),
2186 /* Attribute handling. */
2188 /* Return nonzero if the function is an interrupt function. */
2191 xstormy16_interrupt_function_p (void)
2195 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2196 any functions are declared, which is demonstrably wrong, but
2197 it is worked around here. FIXME. */
2201 attributes
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
2202 return lookup_attribute ("interrupt", attributes
) != NULL_TREE
;
2205 #undef TARGET_ATTRIBUTE_TABLE
2206 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2208 static tree xstormy16_handle_interrupt_attribute
2209 (tree
*, tree
, tree
, int, bool *);
2210 static tree xstormy16_handle_below100_attribute
2211 (tree
*, tree
, tree
, int, bool *);
2213 static const struct attribute_spec xstormy16_attribute_table
[] =
2215 /* name, min_len, max_len, decl_req, type_req, fn_type_req, decl_handler,
2216 type_handler, affects_type_identity. */
2217 { "interrupt", 0, 0, false, true, true, NULL
,
2218 xstormy16_handle_interrupt_attribute
, false },
2219 { "BELOW100", 0, 0, true, false, false,
2220 xstormy16_handle_below100_attribute
, NULL
, false },
2221 { "below100", 0, 0, true, false, false,
2222 xstormy16_handle_below100_attribute
, NULL
, false },
2223 { NULL
, 0, 0, false, false, false, NULL
, NULL
, false }
2226 /* Handle an "interrupt" attribute;
2227 arguments as in struct attribute_spec.handler. */
2230 xstormy16_handle_interrupt_attribute (tree
*node
, tree name
,
2231 tree args ATTRIBUTE_UNUSED
,
2232 int flags ATTRIBUTE_UNUSED
,
2235 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
2237 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
2239 *no_add_attrs
= true;
2245 /* Handle an "below" attribute;
2246 arguments as in struct attribute_spec.handler. */
2249 xstormy16_handle_below100_attribute (tree
*node
,
2250 tree name ATTRIBUTE_UNUSED
,
2251 tree args ATTRIBUTE_UNUSED
,
2252 int flags ATTRIBUTE_UNUSED
,
2255 if (TREE_CODE (*node
) != VAR_DECL
)
2257 warning (OPT_Wattributes
,
2258 "%<__BELOW100__%> attribute only applies to variables");
2259 *no_add_attrs
= true;
2261 else if (args
== NULL_TREE
)
2263 if (! (TREE_PUBLIC (*node
) || TREE_STATIC (*node
)))
2265 warning (OPT_Wattributes
, "__BELOW100__ attribute not allowed "
2266 "with auto storage class");
2267 *no_add_attrs
= true;
2274 #undef TARGET_INIT_BUILTINS
2275 #define TARGET_INIT_BUILTINS xstormy16_init_builtins
2276 #undef TARGET_EXPAND_BUILTIN
2277 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2283 const char * arg_ops
; /* 0..9, t for temp register, r for return value. */
2284 const char * arg_types
; /* s=short,l=long, upper case for unsigned. */
2288 { "__sdivlh", CODE_FOR_sdivlh
, "rt01", "sls" },
2289 { "__smodlh", CODE_FOR_sdivlh
, "tr01", "sls" },
2290 { "__udivlh", CODE_FOR_udivlh
, "rt01", "SLS" },
2291 { "__umodlh", CODE_FOR_udivlh
, "tr01", "SLS" },
2292 { NULL
, 0, NULL
, NULL
}
2296 xstormy16_init_builtins (void)
2298 tree args
[2], ret_type
, arg
= NULL_TREE
, ftype
;
2301 ret_type
= void_type_node
;
2303 for (i
= 0; s16builtins
[i
].name
; i
++)
2305 n_args
= strlen (s16builtins
[i
].arg_types
) - 1;
2307 gcc_assert (n_args
<= (int) ARRAY_SIZE (args
));
2309 for (a
= n_args
- 1; a
>= 0; a
--)
2310 args
[a
] = NULL_TREE
;
2312 for (a
= n_args
; a
>= 0; a
--)
2314 switch (s16builtins
[i
].arg_types
[a
])
2316 case 's': arg
= short_integer_type_node
; break;
2317 case 'S': arg
= short_unsigned_type_node
; break;
2318 case 'l': arg
= long_integer_type_node
; break;
2319 case 'L': arg
= long_unsigned_type_node
; break;
2320 default: gcc_unreachable ();
2327 ftype
= build_function_type_list (ret_type
, args
[0], args
[1], NULL_TREE
);
2328 add_builtin_function (s16builtins
[i
].name
, ftype
,
2329 i
, BUILT_IN_MD
, NULL
, NULL_TREE
);
2334 xstormy16_expand_builtin (tree exp
, rtx target
,
2335 rtx subtarget ATTRIBUTE_UNUSED
,
2336 machine_mode mode ATTRIBUTE_UNUSED
,
2337 int ignore ATTRIBUTE_UNUSED
)
2339 rtx op
[10], args
[10], pat
, copyto
[10], retval
= 0;
2340 tree fndecl
, argtree
;
2343 fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
2344 argtree
= TREE_OPERAND (exp
, 1);
2345 i
= DECL_FUNCTION_CODE (fndecl
);
2346 code
= s16builtins
[i
].md_code
;
2348 for (a
= 0; a
< 10 && argtree
; a
++)
2350 args
[a
] = expand_normal (TREE_VALUE (argtree
));
2351 argtree
= TREE_CHAIN (argtree
);
2354 for (o
= 0; s16builtins
[i
].arg_ops
[o
]; o
++)
2356 char ao
= s16builtins
[i
].arg_ops
[o
];
2357 char c
= insn_data
[code
].operand
[o
].constraint
[0];
2362 omode
= (machine_mode
) insn_data
[code
].operand
[o
].mode
;
2364 op
[o
] = target
? target
: gen_reg_rtx (omode
);
2366 op
[o
] = gen_reg_rtx (omode
);
2368 op
[o
] = args
[(int) hex_value (ao
)];
2370 if (! (*insn_data
[code
].operand
[o
].predicate
) (op
[o
], GET_MODE (op
[o
])))
2372 if (c
== '+' || c
== '=')
2375 op
[o
] = gen_reg_rtx (omode
);
2378 op
[o
] = copy_to_mode_reg (omode
, op
[o
]);
2385 pat
= GEN_FCN (code
) (op
[0], op
[1], op
[2], op
[3], op
[4],
2386 op
[5], op
[6], op
[7], op
[8], op
[9]);
2389 for (o
= 0; s16builtins
[i
].arg_ops
[o
]; o
++)
2392 emit_move_insn (copyto
[o
], op
[o
]);
2393 if (op
[o
] == retval
)
2400 /* Look for combinations of insns that can be converted to BN or BP
2401 opcodes. This is, unfortunately, too complex to do with MD
2405 combine_bnp (rtx_insn
*insn
)
2407 int insn_code
, regno
, need_extend
;
2409 rtx cond
, reg
, qireg
, mem
;
2410 rtx_insn
*and_insn
, *load
;
2411 machine_mode load_mode
= QImode
;
2412 machine_mode and_mode
= QImode
;
2413 rtx_insn
*shift
= NULL
;
2415 insn_code
= recog_memoized (insn
);
2416 if (insn_code
!= CODE_FOR_cbranchhi
2417 && insn_code
!= CODE_FOR_cbranchhi_neg
)
2420 cond
= XVECEXP (PATTERN (insn
), 0, 0); /* set */
2421 cond
= XEXP (cond
, 1); /* if */
2422 cond
= XEXP (cond
, 0); /* cond */
2423 switch (GET_CODE (cond
))
2437 reg
= XEXP (cond
, 0);
2440 regno
= REGNO (reg
);
2441 if (XEXP (cond
, 1) != const0_rtx
)
2443 if (! find_regno_note (insn
, REG_DEAD
, regno
))
2445 qireg
= gen_rtx_REG (QImode
, regno
);
2449 /* LT and GE conditionals should have a sign extend before
2451 for (and_insn
= prev_real_insn (insn
);
2452 and_insn
!= NULL_RTX
;
2453 and_insn
= prev_real_insn (and_insn
))
2455 int and_code
= recog_memoized (and_insn
);
2457 if (and_code
== CODE_FOR_extendqihi2
2458 && rtx_equal_p (SET_DEST (PATTERN (and_insn
)), reg
)
2459 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn
)), 0), qireg
))
2462 if (and_code
== CODE_FOR_movhi_internal
2463 && rtx_equal_p (SET_DEST (PATTERN (and_insn
)), reg
))
2465 /* This is for testing bit 15. */
2470 if (reg_mentioned_p (reg
, and_insn
))
2473 if (! NOTE_P (and_insn
) && ! NONJUMP_INSN_P (and_insn
))
2479 /* EQ and NE conditionals have an AND before them. */
2480 for (and_insn
= prev_real_insn (insn
);
2481 and_insn
!= NULL_RTX
;
2482 and_insn
= prev_real_insn (and_insn
))
2484 if (recog_memoized (and_insn
) == CODE_FOR_andhi3
2485 && rtx_equal_p (SET_DEST (PATTERN (and_insn
)), reg
)
2486 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn
)), 0), reg
))
2489 if (reg_mentioned_p (reg
, and_insn
))
2492 if (! NOTE_P (and_insn
) && ! NONJUMP_INSN_P (and_insn
))
2498 /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
2499 followed by an AND like this:
2501 (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
2502 (clobber (reg:BI carry))]
2504 (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
2506 Attempt to detect this here. */
2507 for (shift
= prev_real_insn (and_insn
); shift
;
2508 shift
= prev_real_insn (shift
))
2510 if (recog_memoized (shift
) == CODE_FOR_lshrhi3
2511 && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift
), 0, 0)), reg
)
2512 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift
), 0, 0)), 0), reg
))
2515 if (reg_mentioned_p (reg
, shift
)
2516 || (! NOTE_P (shift
) && ! NONJUMP_INSN_P (shift
)))
2525 if (and_insn
== NULL_RTX
)
2528 for (load
= shift
? prev_real_insn (shift
) : prev_real_insn (and_insn
);
2530 load
= prev_real_insn (load
))
2532 int load_code
= recog_memoized (load
);
2534 if (load_code
== CODE_FOR_movhi_internal
2535 && rtx_equal_p (SET_DEST (PATTERN (load
)), reg
)
2536 && xstormy16_below100_operand (SET_SRC (PATTERN (load
)), HImode
)
2537 && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load
))))
2543 if (load_code
== CODE_FOR_movqi_internal
2544 && rtx_equal_p (SET_DEST (PATTERN (load
)), qireg
)
2545 && xstormy16_below100_operand (SET_SRC (PATTERN (load
)), QImode
))
2551 if (load_code
== CODE_FOR_zero_extendqihi2
2552 && rtx_equal_p (SET_DEST (PATTERN (load
)), reg
)
2553 && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load
)), 0), QImode
))
2560 if (reg_mentioned_p (reg
, load
))
2563 if (! NOTE_P (load
) && ! NONJUMP_INSN_P (load
))
2569 mem
= SET_SRC (PATTERN (load
));
2573 mask
= (load_mode
== HImode
) ? 0x8000 : 0x80;
2575 /* If the mem includes a zero-extend operation and we are
2576 going to generate a sign-extend operation then move the
2577 mem inside the zero-extend. */
2578 if (GET_CODE (mem
) == ZERO_EXTEND
)
2579 mem
= XEXP (mem
, 0);
2583 if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and_insn
)), 1),
2587 mask
= (int) INTVAL (XEXP (SET_SRC (PATTERN (and_insn
)), 1));
2590 mask
<<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift
), 0, 0)), 1));
2593 if (load_mode
== HImode
)
2595 rtx addr
= XEXP (mem
, 0);
2597 if (! (mask
& 0xff))
2599 addr
= plus_constant (Pmode
, addr
, 1);
2602 mem
= gen_rtx_MEM (QImode
, addr
);
2606 XEXP (cond
, 0) = gen_rtx_SIGN_EXTEND (HImode
, mem
);
2608 XEXP (cond
, 0) = gen_rtx_AND (and_mode
, mem
, GEN_INT (mask
));
2610 INSN_CODE (insn
) = -1;
2613 if (and_insn
!= insn
)
2614 delete_insn (and_insn
);
2616 if (shift
!= NULL_RTX
)
2617 delete_insn (shift
);
2621 xstormy16_reorg (void)
2625 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
2627 if (! JUMP_P (insn
))
2633 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2636 xstormy16_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
2638 const HOST_WIDE_INT size
= int_size_in_bytes (type
);
2639 return (size
== -1 || size
> UNITS_PER_WORD
* NUM_ARGUMENT_REGISTERS
);
2642 #undef TARGET_ASM_ALIGNED_HI_OP
2643 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2644 #undef TARGET_ASM_ALIGNED_SI_OP
2645 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2646 #undef TARGET_ENCODE_SECTION_INFO
2647 #define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
2649 /* Select_section doesn't handle .bss_below100. */
2650 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
2651 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
2653 #undef TARGET_ASM_OUTPUT_MI_THUNK
2654 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2655 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2656 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2658 #undef TARGET_PRINT_OPERAND
2659 #define TARGET_PRINT_OPERAND xstormy16_print_operand
2660 #undef TARGET_PRINT_OPERAND_ADDRESS
2661 #define TARGET_PRINT_OPERAND_ADDRESS xstormy16_print_operand_address
2663 #undef TARGET_MEMORY_MOVE_COST
2664 #define TARGET_MEMORY_MOVE_COST xstormy16_memory_move_cost
2665 #undef TARGET_RTX_COSTS
2666 #define TARGET_RTX_COSTS xstormy16_rtx_costs
2667 #undef TARGET_ADDRESS_COST
2668 #define TARGET_ADDRESS_COST xstormy16_address_cost
2670 #undef TARGET_BUILD_BUILTIN_VA_LIST
2671 #define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
2672 #undef TARGET_EXPAND_BUILTIN_VA_START
2673 #define TARGET_EXPAND_BUILTIN_VA_START xstormy16_expand_builtin_va_start
2674 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
2675 #define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_gimplify_va_arg_expr
2677 #undef TARGET_PROMOTE_FUNCTION_MODE
2678 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
2679 #undef TARGET_PROMOTE_PROTOTYPES
2680 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
2682 #undef TARGET_FUNCTION_ARG
2683 #define TARGET_FUNCTION_ARG xstormy16_function_arg
2684 #undef TARGET_FUNCTION_ARG_ADVANCE
2685 #define TARGET_FUNCTION_ARG_ADVANCE xstormy16_function_arg_advance
2687 #undef TARGET_RETURN_IN_MEMORY
2688 #define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
2689 #undef TARGET_FUNCTION_VALUE
2690 #define TARGET_FUNCTION_VALUE xstormy16_function_value
2691 #undef TARGET_LIBCALL_VALUE
2692 #define TARGET_LIBCALL_VALUE xstormy16_libcall_value
2693 #undef TARGET_FUNCTION_VALUE_REGNO_P
2694 #define TARGET_FUNCTION_VALUE_REGNO_P xstormy16_function_value_regno_p
2696 #undef TARGET_MACHINE_DEPENDENT_REORG
2697 #define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg
2699 #undef TARGET_PREFERRED_RELOAD_CLASS
2700 #define TARGET_PREFERRED_RELOAD_CLASS xstormy16_preferred_reload_class
2701 #undef TARGET_PREFERRED_OUTPUT_RELOAD_CLASS
2702 #define TARGET_PREFERRED_OUTPUT_RELOAD_CLASS xstormy16_preferred_reload_class
2704 #undef TARGET_LEGITIMATE_ADDRESS_P
2705 #define TARGET_LEGITIMATE_ADDRESS_P xstormy16_legitimate_address_p
2706 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
2707 #define TARGET_MODE_DEPENDENT_ADDRESS_P xstormy16_mode_dependent_address_p
2709 #undef TARGET_CAN_ELIMINATE
2710 #define TARGET_CAN_ELIMINATE xstormy16_can_eliminate
2712 #undef TARGET_TRAMPOLINE_INIT
2713 #define TARGET_TRAMPOLINE_INIT xstormy16_trampoline_init
2715 struct gcc_target targetm
= TARGET_INITIALIZER
;
2717 #include "gt-stormy16.h"