Remove old autovect-branch by moving to "dead" directory.
[official-gcc.git] / old-autovect-branch / gcc / config / stormy16 / stormy16.c
blob76becfc4f117bc26480cdca42e3cd1113c3f41ed
1 /* Xstormy16 target functions.
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
3 Free Software Foundation, Inc.
4 Contributed by Red Hat, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21 Boston, MA 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "output.h"
35 #include "insn-attr.h"
36 #include "flags.h"
37 #include "recog.h"
38 #include "toplev.h"
39 #include "obstack.h"
40 #include "tree.h"
41 #include "expr.h"
42 #include "optabs.h"
43 #include "except.h"
44 #include "function.h"
45 #include "target.h"
46 #include "target-def.h"
47 #include "tm_p.h"
48 #include "langhooks.h"
49 #include "tree-gimple.h"
50 #include "ggc.h"
52 static rtx emit_addhi3_postreload (rtx, rtx, rtx);
53 static void xstormy16_asm_out_constructor (rtx, int);
54 static void xstormy16_asm_out_destructor (rtx, int);
55 static void xstormy16_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
56 HOST_WIDE_INT, tree);
58 static void xstormy16_init_builtins (void);
59 static rtx xstormy16_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
60 static bool xstormy16_rtx_costs (rtx, int, int, int *);
61 static int xstormy16_address_cost (rtx);
62 static bool xstormy16_return_in_memory (tree, tree);
64 /* Define the information needed to generate branch and scc insns. This is
65 stored from the compare operation. */
66 struct rtx_def * xstormy16_compare_op0;
67 struct rtx_def * xstormy16_compare_op1;
69 static GTY(()) section *bss100_section;
71 /* Compute a (partial) cost for rtx X. Return true if the complete
72 cost has been computed, and false if subexpressions should be
73 scanned. In either case, *TOTAL contains the cost result. */
75 static bool
76 xstormy16_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
77 int *total)
79 switch (code)
81 case CONST_INT:
82 if (INTVAL (x) < 16 && INTVAL (x) >= 0)
83 *total = COSTS_N_INSNS (1) / 2;
84 else if (INTVAL (x) < 256 && INTVAL (x) >= 0)
85 *total = COSTS_N_INSNS (1);
86 else
87 *total = COSTS_N_INSNS (2);
88 return true;
90 case CONST_DOUBLE:
91 case CONST:
92 case SYMBOL_REF:
93 case LABEL_REF:
94 *total = COSTS_N_INSNS(2);
95 return true;
97 case MULT:
98 *total = COSTS_N_INSNS (35 + 6);
99 return true;
100 case DIV:
101 *total = COSTS_N_INSNS (51 - 6);
102 return true;
104 default:
105 return false;
109 static int
110 xstormy16_address_cost (rtx x)
112 return (GET_CODE (x) == CONST_INT ? 2
113 : GET_CODE (x) == PLUS ? 7
114 : 5);
117 /* Branches are handled as follows:
119 1. HImode compare-and-branches. The machine supports these
120 natively, so the appropriate pattern is emitted directly.
122 2. SImode EQ and NE. These are emitted as pairs of HImode
123 compare-and-branches.
125 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
126 of a SImode subtract followed by a branch (not a compare-and-branch),
127 like this:
132 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
140 /* Emit a branch of kind CODE to location LOC. */
142 void
143 xstormy16_emit_cbranch (enum rtx_code code, rtx loc)
145 rtx op0 = xstormy16_compare_op0;
146 rtx op1 = xstormy16_compare_op1;
147 rtx condition_rtx, loc_ref, branch, cy_clobber;
148 rtvec vec;
149 enum machine_mode mode;
151 mode = GET_MODE (op0);
152 gcc_assert (mode == HImode || mode == SImode);
154 if (mode == SImode
155 && (code == GT || code == LE || code == GTU || code == LEU))
157 int unsigned_p = (code == GTU || code == LEU);
158 int gt_p = (code == GT || code == GTU);
159 rtx lab = NULL_RTX;
161 if (gt_p)
162 lab = gen_label_rtx ();
163 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, gt_p ? lab : loc);
164 /* This should be generated as a comparison against the temporary
165 created by the previous insn, but reload can't handle that. */
166 xstormy16_emit_cbranch (gt_p ? NE : EQ, loc);
167 if (gt_p)
168 emit_label (lab);
169 return;
171 else if (mode == SImode
172 && (code == NE || code == EQ)
173 && op1 != const0_rtx)
175 rtx lab = NULL_RTX;
176 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
177 int i;
179 if (code == EQ)
180 lab = gen_label_rtx ();
182 for (i = 0; i < num_words - 1; i++)
184 xstormy16_compare_op0 = simplify_gen_subreg (word_mode, op0, mode,
185 i * UNITS_PER_WORD);
186 xstormy16_compare_op1 = simplify_gen_subreg (word_mode, op1, mode,
187 i * UNITS_PER_WORD);
188 xstormy16_emit_cbranch (NE, code == EQ ? lab : loc);
190 xstormy16_compare_op0 = simplify_gen_subreg (word_mode, op0, mode,
191 i * UNITS_PER_WORD);
192 xstormy16_compare_op1 = simplify_gen_subreg (word_mode, op1, mode,
193 i * UNITS_PER_WORD);
194 xstormy16_emit_cbranch (code, loc);
196 if (code == EQ)
197 emit_label (lab);
198 return;
201 /* We can't allow reload to try to generate any reload after a branch,
202 so when some register must match we must make the temporary ourselves. */
203 if (mode != HImode)
205 rtx tmp;
206 tmp = gen_reg_rtx (mode);
207 emit_move_insn (tmp, op0);
208 op0 = tmp;
211 condition_rtx = gen_rtx_fmt_ee (code, mode, op0, op1);
212 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
213 branch = gen_rtx_SET (VOIDmode, pc_rtx,
214 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
215 loc_ref, pc_rtx));
217 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (BImode));
219 if (mode == HImode)
220 vec = gen_rtvec (2, branch, cy_clobber);
221 else if (code == NE || code == EQ)
222 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
223 else
225 rtx sub;
226 #if 0
227 sub = gen_rtx_SET (VOIDmode, op0, gen_rtx_MINUS (SImode, op0, op1));
228 #else
229 sub = gen_rtx_CLOBBER (SImode, op0);
230 #endif
231 vec = gen_rtvec (3, branch, sub, cy_clobber);
234 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
237 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
238 the arithmetic operation. Most of the work is done by
239 xstormy16_expand_arith. */
241 void
242 xstormy16_split_cbranch (enum machine_mode mode, rtx label, rtx comparison,
243 rtx dest, rtx carry)
245 rtx op0 = XEXP (comparison, 0);
246 rtx op1 = XEXP (comparison, 1);
247 rtx seq, last_insn;
248 rtx compare;
250 start_sequence ();
251 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1, carry);
252 seq = get_insns ();
253 end_sequence ();
255 gcc_assert (INSN_P (seq));
257 last_insn = seq;
258 while (NEXT_INSN (last_insn) != NULL_RTX)
259 last_insn = NEXT_INSN (last_insn);
261 compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0));
262 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
263 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
264 emit_insn (seq);
268 /* Return the string to output a conditional branch to LABEL, which is
269 the operand number of the label.
271 OP is the conditional expression, or NULL for branch-always.
273 REVERSED is nonzero if we should reverse the sense of the comparison.
275 INSN is the insn. */
277 char *
278 xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed, rtx insn)
280 static char string[64];
281 int need_longbranch = (op != NULL_RTX
282 ? get_attr_length (insn) == 8
283 : get_attr_length (insn) == 4);
284 int really_reversed = reversed ^ need_longbranch;
285 const char *ccode;
286 const char *template;
287 const char *operands;
288 enum rtx_code code;
290 if (! op)
292 if (need_longbranch)
293 ccode = "jmpf";
294 else
295 ccode = "br";
296 sprintf (string, "%s %s", ccode, label);
297 return string;
300 code = GET_CODE (op);
302 if (GET_CODE (XEXP (op, 0)) != REG)
304 code = swap_condition (code);
305 operands = "%3,%2";
307 else
308 operands = "%2,%3";
310 /* Work out which way this really branches. */
311 if (really_reversed)
312 code = reverse_condition (code);
314 switch (code)
316 case EQ: ccode = "z"; break;
317 case NE: ccode = "nz"; break;
318 case GE: ccode = "ge"; break;
319 case LT: ccode = "lt"; break;
320 case GT: ccode = "gt"; break;
321 case LE: ccode = "le"; break;
322 case GEU: ccode = "nc"; break;
323 case LTU: ccode = "c"; break;
324 case GTU: ccode = "hi"; break;
325 case LEU: ccode = "ls"; break;
327 default:
328 gcc_unreachable ();
331 if (need_longbranch)
332 template = "b%s %s,.+8 | jmpf %s";
333 else
334 template = "b%s %s,%s";
335 sprintf (string, template, ccode, operands, label);
337 return string;
340 /* Return the string to output a conditional branch to LABEL, which is
341 the operand number of the label, but suitable for the tail of a
342 SImode branch.
344 OP is the conditional expression (OP is never NULL_RTX).
346 REVERSED is nonzero if we should reverse the sense of the comparison.
348 INSN is the insn. */
350 char *
351 xstormy16_output_cbranch_si (rtx op, const char *label, int reversed, rtx insn)
353 static char string[64];
354 int need_longbranch = get_attr_length (insn) >= 8;
355 int really_reversed = reversed ^ need_longbranch;
356 const char *ccode;
357 const char *template;
358 char prevop[16];
359 enum rtx_code code;
361 code = GET_CODE (op);
363 /* Work out which way this really branches. */
364 if (really_reversed)
365 code = reverse_condition (code);
367 switch (code)
369 case EQ: ccode = "z"; break;
370 case NE: ccode = "nz"; break;
371 case GE: ccode = "ge"; break;
372 case LT: ccode = "lt"; break;
373 case GEU: ccode = "nc"; break;
374 case LTU: ccode = "c"; break;
376 /* The missing codes above should never be generated. */
377 default:
378 gcc_unreachable ();
381 switch (code)
383 case EQ: case NE:
385 int regnum;
387 gcc_assert (GET_CODE (XEXP (op, 0)) == REG);
389 regnum = REGNO (XEXP (op, 0));
390 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
392 break;
394 case GE: case LT: case GEU: case LTU:
395 strcpy (prevop, "sbc %2,%3");
396 break;
398 default:
399 gcc_unreachable ();
402 if (need_longbranch)
403 template = "%s | b%s .+6 | jmpf %s";
404 else
405 template = "%s | b%s %s";
406 sprintf (string, template, prevop, ccode, label);
408 return string;
411 /* Many machines have some registers that cannot be copied directly to or from
412 memory or even from other types of registers. An example is the `MQ'
413 register, which on most machines, can only be copied to or from general
414 registers, but not memory. Some machines allow copying all registers to and
415 from memory, but require a scratch register for stores to some memory
416 locations (e.g., those with symbolic address on the RT, and those with
417 certain symbolic address on the SPARC when compiling PIC). In some cases,
418 both an intermediate and a scratch register are required.
420 You should define these macros to indicate to the reload phase that it may
421 need to allocate at least one register for a reload in addition to the
422 register to contain the data. Specifically, if copying X to a register
423 CLASS in MODE requires an intermediate register, you should define
424 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
425 whose registers can be used as intermediate registers or scratch registers.
427 If copying a register CLASS in MODE to X requires an intermediate or scratch
428 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
429 largest register class required. If the requirements for input and output
430 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
431 instead of defining both macros identically.
433 The values returned by these macros are often `GENERAL_REGS'. Return
434 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
435 to or from a register of CLASS in MODE without requiring a scratch register.
436 Do not define this macro if it would always return `NO_REGS'.
438 If a scratch register is required (either with or without an intermediate
439 register), you should define patterns for `reload_inM' or `reload_outM', as
440 required.. These patterns, which will normally be implemented with a
441 `define_expand', should be similar to the `movM' patterns, except that
442 operand 2 is the scratch register.
444 Define constraints for the reload register and scratch register that contain
445 a single register class. If the original reload register (whose class is
446 CLASS) can meet the constraint given in the pattern, the value returned by
447 these macros is used for the class of the scratch register. Otherwise, two
448 additional reload registers are required. Their classes are obtained from
449 the constraints in the insn pattern.
451 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
452 either be in a hard register or in memory. Use `true_regnum' to find out;
453 it will return -1 if the pseudo is in memory and the hard register number if
454 it is in a register.
456 These macros should not be used in the case where a particular class of
457 registers can only be copied to memory and not to another class of
458 registers. In that case, secondary reload registers are not needed and
459 would not be helpful. Instead, a stack location must be used to perform the
460 copy and the `movM' pattern should use memory as an intermediate storage.
461 This case often occurs between floating-point and general registers. */
463 enum reg_class
464 xstormy16_secondary_reload_class (enum reg_class class,
465 enum machine_mode mode,
466 rtx x)
468 /* This chip has the interesting property that only the first eight
469 registers can be moved to/from memory. */
470 if ((GET_CODE (x) == MEM
471 || ((GET_CODE (x) == SUBREG || GET_CODE (x) == REG)
472 && (true_regnum (x) == -1
473 || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
474 && ! reg_class_subset_p (class, EIGHT_REGS))
475 return EIGHT_REGS;
477 /* When reloading a PLUS, the carry register will be required
478 unless the inc or dec instructions can be used. */
479 if (xstormy16_carry_plus_operand (x, mode))
480 return CARRY_REGS;
482 return NO_REGS;
485 /* Recognize a PLUS that needs the carry register. */
487 xstormy16_carry_plus_operand (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
489 return (GET_CODE (x) == PLUS
490 && GET_CODE (XEXP (x, 1)) == CONST_INT
491 && (INTVAL (XEXP (x, 1)) < -4 || INTVAL (XEXP (x, 1)) > 4));
494 /* Detect and error out on out-of-range constants for movhi. */
496 xs_hi_general_operand (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
498 if ((GET_CODE (x) == CONST_INT)
499 && ((INTVAL (x) >= 32768) || (INTVAL (x) < -32768)))
500 error ("constant halfword load operand out of range");
501 return general_operand (x, mode);
504 /* Detect and error out on out-of-range constants for addhi and subhi. */
506 xs_hi_nonmemory_operand (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
508 if ((GET_CODE (x) == CONST_INT)
509 && ((INTVAL (x) >= 32768) || (INTVAL (x) < -32768)))
510 error ("constant arithmetic operand out of range");
511 return nonmemory_operand (x, mode);
514 enum reg_class
515 xstormy16_preferred_reload_class (rtx x, enum reg_class class)
517 if (class == GENERAL_REGS
518 && GET_CODE (x) == MEM)
519 return EIGHT_REGS;
521 return class;
524 /* Predicate for symbols and addresses that reflect special 8-bit
525 addressing. */
527 xstormy16_below100_symbol (rtx x,
528 enum machine_mode mode ATTRIBUTE_UNUSED)
530 if (GET_CODE (x) == CONST)
531 x = XEXP (x, 0);
532 if (GET_CODE (x) == PLUS
533 && GET_CODE (XEXP (x, 1)) == CONST_INT)
534 x = XEXP (x, 0);
536 if (GET_CODE (x) == SYMBOL_REF)
537 return (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_XSTORMY16_BELOW100) != 0;
539 if (GET_CODE (x) == CONST_INT)
541 HOST_WIDE_INT i = INTVAL (x);
542 if ((i >= 0x0000 && i <= 0x00ff)
543 || (i >= 0x7f00 && i <= 0x7fff))
544 return 1;
546 return 0;
549 /* Likewise, but only for non-volatile MEMs, for patterns where the
550 MEM will get split into smaller sized accesses. */
552 xstormy16_splittable_below100_operand (rtx x, enum machine_mode mode)
554 if (GET_CODE (x) == MEM && MEM_VOLATILE_P (x))
555 return 0;
556 return xstormy16_below100_operand (x, mode);
559 /* Expand an 8-bit IOR. This either detects the one case we can
560 actually do, or uses a 16-bit IOR. */
561 void
562 xstormy16_expand_iorqi3 (rtx *operands)
564 rtx in, out, outsub, val;
566 out = operands[0];
567 in = operands[1];
568 val = operands[2];
570 if (xstormy16_onebit_set_operand (val, QImode))
572 if (!xstormy16_below100_or_register (in, QImode))
573 in = copy_to_mode_reg (QImode, in);
574 if (!xstormy16_below100_or_register (out, QImode))
575 out = gen_reg_rtx (QImode);
576 emit_insn (gen_iorqi3_internal (out, in, val));
577 if (out != operands[0])
578 emit_move_insn (operands[0], out);
579 return;
582 if (GET_CODE (in) != REG)
583 in = copy_to_mode_reg (QImode, in);
584 if (GET_CODE (val) != REG
585 && GET_CODE (val) != CONST_INT)
586 val = copy_to_mode_reg (QImode, val);
587 if (GET_CODE (out) != REG)
588 out = gen_reg_rtx (QImode);
590 in = simplify_gen_subreg (HImode, in, QImode, 0);
591 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
592 if (GET_CODE (val) != CONST_INT)
593 val = simplify_gen_subreg (HImode, val, QImode, 0);
595 emit_insn (gen_iorhi3 (outsub, in, val));
597 if (out != operands[0])
598 emit_move_insn (operands[0], out);
601 /* Likewise, for AND. */
602 void
603 xstormy16_expand_andqi3 (rtx *operands)
605 rtx in, out, outsub, val;
607 out = operands[0];
608 in = operands[1];
609 val = operands[2];
611 if (xstormy16_onebit_clr_operand (val, QImode))
613 if (!xstormy16_below100_or_register (in, QImode))
614 in = copy_to_mode_reg (QImode, in);
615 if (!xstormy16_below100_or_register (out, QImode))
616 out = gen_reg_rtx (QImode);
617 emit_insn (gen_andqi3_internal (out, in, val));
618 if (out != operands[0])
619 emit_move_insn (operands[0], out);
620 return;
623 if (GET_CODE (in) != REG)
624 in = copy_to_mode_reg (QImode, in);
625 if (GET_CODE (val) != REG
626 && GET_CODE (val) != CONST_INT)
627 val = copy_to_mode_reg (QImode, val);
628 if (GET_CODE (out) != REG)
629 out = gen_reg_rtx (QImode);
631 in = simplify_gen_subreg (HImode, in, QImode, 0);
632 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
633 if (GET_CODE (val) != CONST_INT)
634 val = simplify_gen_subreg (HImode, val, QImode, 0);
636 emit_insn (gen_andhi3 (outsub, in, val));
638 if (out != operands[0])
639 emit_move_insn (operands[0], out);
642 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
643 (GET_CODE (X) == CONST_INT \
644 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
646 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
647 (GET_CODE (X) == CONST_INT \
648 && INTVAL (X) + (OFFSET) >= 0 \
649 && INTVAL (X) + (OFFSET) < 0x8000 \
650 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
653 xstormy16_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
654 rtx x, int strict)
656 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
657 return 1;
659 if (GET_CODE (x) == PLUS
660 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
661 x = XEXP (x, 0);
663 if ((GET_CODE (x) == PRE_MODIFY
664 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
665 || GET_CODE (x) == POST_INC
666 || GET_CODE (x) == PRE_DEC)
667 x = XEXP (x, 0);
669 if (GET_CODE (x) == REG && REGNO_OK_FOR_BASE_P (REGNO (x))
670 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
671 return 1;
673 if (xstormy16_below100_symbol(x, mode))
674 return 1;
676 return 0;
679 /* Return nonzero if memory address X (an RTX) can have different
680 meanings depending on the machine mode of the memory reference it
681 is used for or if the address is valid for some modes but not
682 others.
684 Autoincrement and autodecrement addresses typically have mode-dependent
685 effects because the amount of the increment or decrement is the size of the
686 operand being addressed. Some machines have other mode-dependent addresses.
687 Many RISC machines have no mode-dependent addresses.
689 You may assume that ADDR is a valid address for the machine.
691 On this chip, this is true if the address is valid with an offset
692 of 0 but not of 6, because in that case it cannot be used as an
693 address for DImode or DFmode, or if the address is a post-increment
694 or pre-decrement address. */
696 xstormy16_mode_dependent_address_p (rtx x)
698 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
699 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
700 return 1;
702 if (GET_CODE (x) == PLUS
703 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
704 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
705 return 1;
707 if (GET_CODE (x) == PLUS)
708 x = XEXP (x, 0);
710 if (GET_CODE (x) == POST_INC
711 || GET_CODE (x) == PRE_DEC)
712 return 1;
714 return 0;
717 /* A C expression that defines the optional machine-dependent constraint
718 letters (`Q', `R', `S', `T', `U') that can be used to segregate specific
719 types of operands, usually memory references, for the target machine.
720 Normally this macro will not be defined. If it is required for a particular
721 target machine, it should return 1 if VALUE corresponds to the operand type
722 represented by the constraint letter C. If C is not defined as an extra
723 constraint, the value returned should be 0 regardless of VALUE. */
725 xstormy16_extra_constraint_p (rtx x, int c)
727 switch (c)
729 /* 'Q' is for pushes. */
730 case 'Q':
731 return (GET_CODE (x) == MEM
732 && GET_CODE (XEXP (x, 0)) == POST_INC
733 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
735 /* 'R' is for pops. */
736 case 'R':
737 return (GET_CODE (x) == MEM
738 && GET_CODE (XEXP (x, 0)) == PRE_DEC
739 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
741 /* 'S' is for immediate memory addresses. */
742 case 'S':
743 return (GET_CODE (x) == MEM
744 && GET_CODE (XEXP (x, 0)) == CONST_INT
745 && xstormy16_legitimate_address_p (VOIDmode, XEXP (x, 0), 0));
747 /* 'T' is for Rx. */
748 case 'T':
749 /* Not implemented yet. */
750 return 0;
752 /* 'U' is for CONST_INT values not between 2 and 15 inclusive,
753 for allocating a scratch register for 32-bit shifts. */
754 case 'U':
755 return (GET_CODE (x) == CONST_INT
756 && (INTVAL (x) < 2 || INTVAL (x) > 15));
758 /* 'Z' is for CONST_INT value zero. This is for adding zero to
759 a register in addhi3, which would otherwise require a carry. */
760 case 'Z':
761 return (GET_CODE (x) == CONST_INT
762 && (INTVAL (x) == 0));
764 case 'W':
765 return xstormy16_below100_operand(x, GET_MODE(x));
767 default:
768 return 0;
773 short_memory_operand (rtx x, enum machine_mode mode)
775 if (! memory_operand (x, mode))
776 return 0;
777 return (GET_CODE (XEXP (x, 0)) != PLUS);
780 /* Splitter for the 'move' patterns, for modes not directly implemented
781 by hardware. Emit insns to copy a value of mode MODE from SRC to
782 DEST.
784 This function is only called when reload_completed.
787 void
788 xstormy16_split_move (enum machine_mode mode, rtx dest, rtx src)
790 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
791 int direction, end, i;
792 int src_modifies = 0;
793 int dest_modifies = 0;
794 int src_volatile = 0;
795 int dest_volatile = 0;
796 rtx mem_operand;
797 rtx auto_inc_reg_rtx = NULL_RTX;
799 /* Check initial conditions. */
800 gcc_assert (reload_completed
801 && mode != QImode && mode != HImode
802 && nonimmediate_operand (dest, mode)
803 && general_operand (src, mode));
805 /* This case is not supported below, and shouldn't be generated. */
806 gcc_assert (GET_CODE (dest) != MEM || GET_CODE (src) != MEM);
808 /* This case is very very bad after reload, so trap it now. */
809 gcc_assert (GET_CODE (dest) != SUBREG && GET_CODE (src) != SUBREG);
811 /* The general idea is to copy by words, offsetting the source and
812 destination. Normally the least-significant word will be copied
813 first, but for pre-dec operations it's better to copy the
814 most-significant word first. Only one operand can be a pre-dec
815 or post-inc operand.
817 It's also possible that the copy overlaps so that the direction
818 must be reversed. */
819 direction = 1;
821 if (GET_CODE (dest) == MEM)
823 mem_operand = XEXP (dest, 0);
824 dest_modifies = side_effects_p (mem_operand);
825 if (auto_inc_p (mem_operand))
826 auto_inc_reg_rtx = XEXP (mem_operand, 0);
827 dest_volatile = MEM_VOLATILE_P (dest);
828 if (dest_volatile)
830 dest = copy_rtx (dest);
831 MEM_VOLATILE_P (dest) = 0;
834 else if (GET_CODE (src) == MEM)
836 mem_operand = XEXP (src, 0);
837 src_modifies = side_effects_p (mem_operand);
838 if (auto_inc_p (mem_operand))
839 auto_inc_reg_rtx = XEXP (mem_operand, 0);
840 src_volatile = MEM_VOLATILE_P (src);
841 if (src_volatile)
843 src = copy_rtx (src);
844 MEM_VOLATILE_P (src) = 0;
847 else
848 mem_operand = NULL_RTX;
850 if (mem_operand == NULL_RTX)
852 if (GET_CODE (src) == REG
853 && GET_CODE (dest) == REG
854 && reg_overlap_mentioned_p (dest, src)
855 && REGNO (dest) > REGNO (src))
856 direction = -1;
858 else if (GET_CODE (mem_operand) == PRE_DEC
859 || (GET_CODE (mem_operand) == PLUS
860 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
861 direction = -1;
862 else if (GET_CODE (src) == MEM
863 && reg_overlap_mentioned_p (dest, src))
865 int regno;
867 gcc_assert (GET_CODE (dest) == REG);
868 regno = REGNO (dest);
870 gcc_assert (refers_to_regno_p (regno, regno + num_words,
871 mem_operand, 0));
873 if (refers_to_regno_p (regno, regno + 1, mem_operand, 0))
874 direction = -1;
875 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
876 mem_operand, 0))
877 direction = 1;
878 else
879 /* This means something like
880 (set (reg:DI r0) (mem:DI (reg:HI r1)))
881 which we'd need to support by doing the set of the second word
882 last. */
883 gcc_unreachable ();
886 end = direction < 0 ? -1 : num_words;
887 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
889 rtx w_src, w_dest, insn;
891 if (src_modifies)
892 w_src = gen_rtx_MEM (word_mode, mem_operand);
893 else
894 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
895 if (src_volatile)
896 MEM_VOLATILE_P (w_src) = 1;
897 if (dest_modifies)
898 w_dest = gen_rtx_MEM (word_mode, mem_operand);
899 else
900 w_dest = simplify_gen_subreg (word_mode, dest, mode,
901 i * UNITS_PER_WORD);
902 if (dest_volatile)
903 MEM_VOLATILE_P (w_dest) = 1;
905 /* The simplify_subreg calls must always be able to simplify. */
906 gcc_assert (GET_CODE (w_src) != SUBREG
907 && GET_CODE (w_dest) != SUBREG);
909 insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src));
910 if (auto_inc_reg_rtx)
911 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
912 auto_inc_reg_rtx,
913 REG_NOTES (insn));
917 /* Expander for the 'move' patterns. Emit insns to copy a value of
918 mode MODE from SRC to DEST. */
920 void
921 xstormy16_expand_move (enum machine_mode mode, rtx dest, rtx src)
923 if ((GET_CODE (dest) == MEM) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY))
925 rtx pmv = XEXP (dest, 0);
926 rtx dest_reg = XEXP (pmv, 0);
927 rtx dest_mod = XEXP (pmv, 1);
928 rtx set = gen_rtx_SET (Pmode, dest_reg, dest_mod);
929 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
931 dest = gen_rtx_MEM (mode, dest_reg);
932 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
934 else if ((GET_CODE (src) == MEM) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY))
936 rtx pmv = XEXP (src, 0);
937 rtx src_reg = XEXP (pmv, 0);
938 rtx src_mod = XEXP (pmv, 1);
939 rtx set = gen_rtx_SET (Pmode, src_reg, src_mod);
940 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
942 src = gen_rtx_MEM (mode, src_reg);
943 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
946 /* There are only limited immediate-to-memory move instructions. */
947 if (! reload_in_progress
948 && ! reload_completed
949 && GET_CODE (dest) == MEM
950 && (GET_CODE (XEXP (dest, 0)) != CONST_INT
951 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
952 && ! xstormy16_below100_operand (dest, mode)
953 && GET_CODE (src) != REG
954 && GET_CODE (src) != SUBREG)
955 src = copy_to_mode_reg (mode, src);
957 /* Don't emit something we would immediately split. */
958 if (reload_completed
959 && mode != HImode && mode != QImode)
961 xstormy16_split_move (mode, dest, src);
962 return;
965 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
969 /* Stack Layout:
971 The stack is laid out as follows:
973 SP->
974 FP-> Local variables
975 Register save area (up to 4 words)
976 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
978 AP-> Return address (two words)
979 9th procedure parameter word
980 10th procedure parameter word
982 last procedure parameter word
984 The frame pointer location is tuned to make it most likely that all
985 parameters and local variables can be accessed using a load-indexed
986 instruction. */
988 /* A structure to describe the layout. */
989 struct xstormy16_stack_layout
991 /* Size of the topmost three items on the stack. */
992 int locals_size;
993 int register_save_size;
994 int stdarg_save_size;
995 /* Sum of the above items. */
996 int frame_size;
997 /* Various offsets. */
998 int first_local_minus_ap;
999 int sp_minus_fp;
1000 int fp_minus_ap;
1003 /* Does REGNO need to be saved? */
1004 #define REG_NEEDS_SAVE(REGNUM, IFUN) \
1005 ((regs_ever_live[REGNUM] && ! call_used_regs[REGNUM]) \
1006 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
1007 && (REGNO_REG_CLASS (REGNUM) != CARRY_REGS) \
1008 && (regs_ever_live[REGNUM] || ! current_function_is_leaf)))
1010 /* Compute the stack layout. */
1011 struct xstormy16_stack_layout
1012 xstormy16_compute_stack_layout (void)
1014 struct xstormy16_stack_layout layout;
1015 int regno;
1016 const int ifun = xstormy16_interrupt_function_p ();
1018 layout.locals_size = get_frame_size ();
1020 layout.register_save_size = 0;
1021 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1022 if (REG_NEEDS_SAVE (regno, ifun))
1023 layout.register_save_size += UNITS_PER_WORD;
1025 if (current_function_stdarg)
1026 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1027 else
1028 layout.stdarg_save_size = 0;
1030 layout.frame_size = (layout.locals_size
1031 + layout.register_save_size
1032 + layout.stdarg_save_size);
1034 if (current_function_args_size <= 2048 && current_function_args_size != -1)
1036 if (layout.frame_size + INCOMING_FRAME_SP_OFFSET
1037 + current_function_args_size <= 2048)
1038 layout.fp_minus_ap = layout.frame_size + INCOMING_FRAME_SP_OFFSET;
1039 else
1040 layout.fp_minus_ap = 2048 - current_function_args_size;
1042 else
1043 layout.fp_minus_ap = (layout.stdarg_save_size
1044 + layout.register_save_size
1045 + INCOMING_FRAME_SP_OFFSET);
1046 layout.sp_minus_fp = (layout.frame_size + INCOMING_FRAME_SP_OFFSET
1047 - layout.fp_minus_ap);
1048 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
1049 return layout;
1052 /* Determine how all the special registers get eliminated. */
1054 xstormy16_initial_elimination_offset (int from, int to)
1056 struct xstormy16_stack_layout layout;
1057 int result;
1059 layout = xstormy16_compute_stack_layout ();
1061 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1062 result = layout.sp_minus_fp - layout.locals_size;
1063 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1064 result = -layout.locals_size;
1065 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1066 result = -layout.fp_minus_ap;
1067 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1068 result = -(layout.sp_minus_fp + layout.fp_minus_ap);
1069 else
1070 gcc_unreachable ();
1072 return result;
1075 static rtx
1076 emit_addhi3_postreload (rtx dest, rtx src0, rtx src1)
1078 rtx set, clobber, insn;
1080 set = gen_rtx_SET (VOIDmode, dest, gen_rtx_PLUS (HImode, src0, src1));
1081 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
1082 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
1083 return insn;
1086 /* Called after register allocation to add any instructions needed for
1087 the prologue. Using a prologue insn is favored compared to putting
1088 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1089 since it allows the scheduler to intermix instructions with the
1090 saves of the caller saved registers. In some cases, it might be
1091 necessary to emit a barrier instruction as the last insn to prevent
1092 such scheduling.
1094 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1095 so that the debug info generation code can handle them properly. */
1096 void
1097 xstormy16_expand_prologue (void)
1099 struct xstormy16_stack_layout layout;
1100 int regno;
1101 rtx insn;
1102 rtx mem_push_rtx;
1103 const int ifun = xstormy16_interrupt_function_p ();
1105 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
1106 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
1108 layout = xstormy16_compute_stack_layout ();
1110 if (layout.locals_size >= 32768)
1111 error ("local variable memory requirements exceed capacity");
1113 /* Save the argument registers if necessary. */
1114 if (layout.stdarg_save_size)
1115 for (regno = FIRST_ARGUMENT_REGISTER;
1116 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
1117 regno++)
1119 rtx dwarf;
1120 rtx reg = gen_rtx_REG (HImode, regno);
1122 insn = emit_move_insn (mem_push_rtx, reg);
1123 RTX_FRAME_RELATED_P (insn) = 1;
1125 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1127 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1128 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1129 reg);
1130 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1131 plus_constant (stack_pointer_rtx,
1132 GET_MODE_SIZE (Pmode)));
1133 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1134 dwarf,
1135 REG_NOTES (insn));
1136 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1137 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1140 /* Push each of the registers to save. */
1141 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1142 if (REG_NEEDS_SAVE (regno, ifun))
1144 rtx dwarf;
1145 rtx reg = gen_rtx_REG (HImode, regno);
1147 insn = emit_move_insn (mem_push_rtx, reg);
1148 RTX_FRAME_RELATED_P (insn) = 1;
1150 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1152 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1153 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1154 reg);
1155 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1156 plus_constant (stack_pointer_rtx,
1157 GET_MODE_SIZE (Pmode)));
1158 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1159 dwarf,
1160 REG_NOTES (insn));
1161 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1162 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1165 /* It's just possible that the SP here might be what we need for
1166 the new FP... */
1167 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1168 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1170 /* Allocate space for local variables. */
1171 if (layout.locals_size)
1173 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1174 GEN_INT (layout.locals_size));
1175 RTX_FRAME_RELATED_P (insn) = 1;
1178 /* Set up the frame pointer, if required. */
1179 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
1181 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1183 if (layout.sp_minus_fp)
1184 emit_addhi3_postreload (hard_frame_pointer_rtx,
1185 hard_frame_pointer_rtx,
1186 GEN_INT (-layout.sp_minus_fp));
1190 /* Do we need an epilogue at all? */
1192 direct_return (void)
1194 return (reload_completed
1195 && xstormy16_compute_stack_layout ().frame_size == 0);
1198 /* Called after register allocation to add any instructions needed for
1199 the epilogue. Using an epilogue insn is favored compared to putting
1200 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1201 since it allows the scheduler to intermix instructions with the
1202 saves of the caller saved registers. In some cases, it might be
1203 necessary to emit a barrier instruction as the last insn to prevent
1204 such scheduling. */
1206 void
1207 xstormy16_expand_epilogue (void)
1209 struct xstormy16_stack_layout layout;
1210 rtx mem_pop_rtx, insn;
1211 int regno;
1212 const int ifun = xstormy16_interrupt_function_p ();
1214 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1215 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
1217 layout = xstormy16_compute_stack_layout ();
1219 /* Pop the stack for the locals. */
1220 if (layout.locals_size)
1222 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1223 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1224 else
1226 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1227 GEN_INT (- layout.locals_size));
1228 RTX_FRAME_RELATED_P (insn) = 1;
1232 /* Restore any call-saved registers. */
1233 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1234 if (REG_NEEDS_SAVE (regno, ifun))
1236 rtx dwarf;
1238 insn = emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
1239 RTX_FRAME_RELATED_P (insn) = 1;
1240 dwarf = gen_rtx_SET (Pmode, stack_pointer_rtx,
1241 plus_constant (stack_pointer_rtx,
1242 -GET_MODE_SIZE (Pmode)));
1243 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1244 dwarf,
1245 REG_NOTES (insn));
1248 /* Pop the stack for the stdarg save area. */
1249 if (layout.stdarg_save_size)
1251 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1252 GEN_INT (- layout.stdarg_save_size));
1253 RTX_FRAME_RELATED_P (insn) = 1;
1256 /* Return. */
1257 if (ifun)
1258 emit_jump_insn (gen_return_internal_interrupt ());
1259 else
1260 emit_jump_insn (gen_return_internal ());
1264 xstormy16_epilogue_uses (int regno)
1266 if (reload_completed && call_used_regs[regno])
1268 const int ifun = xstormy16_interrupt_function_p ();
1269 return REG_NEEDS_SAVE (regno, ifun);
1271 return 0;
1274 void
1275 xstormy16_function_profiler (void)
1277 sorry ("function_profiler support");
1281 /* Return an updated summarizer variable CUM to advance past an
1282 argument in the argument list. The values MODE, TYPE and NAMED
1283 describe that argument. Once this is done, the variable CUM is
1284 suitable for analyzing the *following* argument with
1285 `FUNCTION_ARG', etc.
1287 This function need not do anything if the argument in question was
1288 passed on the stack. The compiler knows how to track the amount of
1289 stack space used for arguments without any special help. However,
1290 it makes life easier for xstormy16_build_va_list if it does update
1291 the word count. */
1292 CUMULATIVE_ARGS
1293 xstormy16_function_arg_advance (CUMULATIVE_ARGS cum, enum machine_mode mode,
1294 tree type, int named ATTRIBUTE_UNUSED)
1296 /* If an argument would otherwise be passed partially in registers,
1297 and partially on the stack, the whole of it is passed on the
1298 stack. */
1299 if (cum < NUM_ARGUMENT_REGISTERS
1300 && cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1301 cum = NUM_ARGUMENT_REGISTERS;
1303 cum += XSTORMY16_WORD_SIZE (type, mode);
1305 return cum;
1309 xstormy16_function_arg (CUMULATIVE_ARGS cum, enum machine_mode mode,
1310 tree type, int named ATTRIBUTE_UNUSED)
1312 if (mode == VOIDmode)
1313 return const0_rtx;
1314 if (targetm.calls.must_pass_in_stack (mode, type)
1315 || cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1316 return 0;
1317 return gen_rtx_REG (mode, cum + 2);
1320 /* Build the va_list type.
1322 For this chip, va_list is a record containing a counter and a pointer.
1323 The counter is of type 'int' and indicates how many bytes
1324 have been used to date. The pointer indicates the stack position
1325 for arguments that have not been passed in registers.
1326 To keep the layout nice, the pointer is first in the structure. */
1328 static tree
1329 xstormy16_build_builtin_va_list (void)
1331 tree f_1, f_2, record, type_decl;
1333 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
1334 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
1336 f_1 = build_decl (FIELD_DECL, get_identifier ("base"),
1337 ptr_type_node);
1338 f_2 = build_decl (FIELD_DECL, get_identifier ("count"),
1339 unsigned_type_node);
1341 DECL_FIELD_CONTEXT (f_1) = record;
1342 DECL_FIELD_CONTEXT (f_2) = record;
1344 TREE_CHAIN (record) = type_decl;
1345 TYPE_NAME (record) = type_decl;
1346 TYPE_FIELDS (record) = f_1;
1347 TREE_CHAIN (f_1) = f_2;
1349 layout_type (record);
1351 return record;
1354 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this
1355 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1356 variable to initialize. NEXTARG is the machine independent notion of the
1357 'next' argument after the variable arguments. */
1358 void
1359 xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
1361 tree f_base, f_count;
1362 tree base, count;
1363 tree t;
1365 if (xstormy16_interrupt_function_p ())
1366 error ("cannot use va_start in interrupt function");
1368 f_base = TYPE_FIELDS (va_list_type_node);
1369 f_count = TREE_CHAIN (f_base);
1371 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1372 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1373 NULL_TREE);
1375 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
1376 t = build2 (PLUS_EXPR, TREE_TYPE (base), t,
1377 build_int_cst (NULL_TREE, INCOMING_FRAME_SP_OFFSET));
1378 t = build2 (MODIFY_EXPR, TREE_TYPE (base), base, t);
1379 TREE_SIDE_EFFECTS (t) = 1;
1380 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1382 t = build2 (MODIFY_EXPR, TREE_TYPE (count), count,
1383 build_int_cst (NULL_TREE,
1384 current_function_args_info * UNITS_PER_WORD));
1385 TREE_SIDE_EFFECTS (t) = 1;
1386 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1389 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable
1390 of type va_list as a tree, TYPE is the type passed to va_arg.
1391 Note: This algorithm is documented in stormy-abi. */
1393 static tree
1394 xstormy16_expand_builtin_va_arg (tree valist, tree type, tree *pre_p,
1395 tree *post_p ATTRIBUTE_UNUSED)
1397 tree f_base, f_count;
1398 tree base, count;
1399 tree count_tmp, addr, t;
1400 tree lab_gotaddr, lab_fromstack;
1401 int size, size_of_reg_args, must_stack;
1402 tree size_tree;
1404 f_base = TYPE_FIELDS (va_list_type_node);
1405 f_count = TREE_CHAIN (f_base);
1407 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1408 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1409 NULL_TREE);
1411 must_stack = targetm.calls.must_pass_in_stack (TYPE_MODE (type), type);
1412 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
1413 gimplify_expr (&size_tree, pre_p, NULL, is_gimple_val, fb_rvalue);
1415 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1417 count_tmp = get_initialized_tmp_var (count, pre_p, NULL);
1418 lab_gotaddr = create_artificial_label ();
1419 lab_fromstack = create_artificial_label ();
1420 addr = create_tmp_var (ptr_type_node, NULL);
1422 if (!must_stack)
1424 tree r;
1426 t = fold_convert (TREE_TYPE (count), size_tree);
1427 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1428 r = fold_convert (TREE_TYPE (count), size_int (size_of_reg_args));
1429 t = build2 (GT_EXPR, boolean_type_node, t, r);
1430 t = build3 (COND_EXPR, void_type_node, t,
1431 build1 (GOTO_EXPR, void_type_node, lab_fromstack),
1432 NULL_TREE);
1433 gimplify_and_add (t, pre_p);
1435 t = fold_convert (ptr_type_node, count_tmp);
1436 t = build2 (PLUS_EXPR, ptr_type_node, base, t);
1437 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
1438 gimplify_and_add (t, pre_p);
1440 t = build1 (GOTO_EXPR, void_type_node, lab_gotaddr);
1441 gimplify_and_add (t, pre_p);
1443 t = build1 (LABEL_EXPR, void_type_node, lab_fromstack);
1444 gimplify_and_add (t, pre_p);
1447 /* Arguments larger than a word might need to skip over some
1448 registers, since arguments are either passed entirely in
1449 registers or entirely on the stack. */
1450 size = PUSH_ROUNDING (int_size_in_bytes (type));
1451 if (size > 2 || size < 0 || must_stack)
1453 tree r, u;
1455 r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD);
1456 u = build2 (MODIFY_EXPR, void_type_node, count_tmp, r);
1458 t = fold_convert (TREE_TYPE (count), r);
1459 t = build2 (GE_EXPR, boolean_type_node, count_tmp, t);
1460 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, u);
1461 gimplify_and_add (t, pre_p);
1464 t = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
1465 - INCOMING_FRAME_SP_OFFSET);
1466 t = fold_convert (TREE_TYPE (count), t);
1467 t = build2 (MINUS_EXPR, TREE_TYPE (count), count_tmp, t);
1468 t = build2 (PLUS_EXPR, TREE_TYPE (count), t,
1469 fold_convert (TREE_TYPE (count), size_tree));
1470 t = fold_convert (TREE_TYPE (base), fold (t));
1471 t = build2 (MINUS_EXPR, TREE_TYPE (base), base, t);
1472 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
1473 gimplify_and_add (t, pre_p);
1475 t = build1 (LABEL_EXPR, void_type_node, lab_gotaddr);
1476 gimplify_and_add (t, pre_p);
1478 t = fold_convert (TREE_TYPE (count), size_tree);
1479 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1480 t = build2 (MODIFY_EXPR, TREE_TYPE (count), count, t);
1481 gimplify_and_add (t, pre_p);
1483 addr = fold_convert (build_pointer_type (type), addr);
1484 return build_va_arg_indirect_ref (addr);
1487 /* Initialize the variable parts of a trampoline. ADDR is an RTX for
1488 the address of the trampoline; FNADDR is an RTX for the address of
1489 the nested function; STATIC_CHAIN is an RTX for the static chain
1490 value that should be passed to the function when it is called. */
1491 void
1492 xstormy16_initialize_trampoline (rtx addr, rtx fnaddr, rtx static_chain)
1494 rtx reg_addr = gen_reg_rtx (Pmode);
1495 rtx temp = gen_reg_rtx (HImode);
1496 rtx reg_fnaddr = gen_reg_rtx (HImode);
1497 rtx reg_addr_mem;
1499 reg_addr_mem = gen_rtx_MEM (HImode, reg_addr);
1501 emit_move_insn (reg_addr, addr);
1502 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1503 emit_move_insn (reg_addr_mem, temp);
1504 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1505 emit_move_insn (temp, static_chain);
1506 emit_move_insn (reg_addr_mem, temp);
1507 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1508 emit_move_insn (reg_fnaddr, fnaddr);
1509 emit_move_insn (temp, reg_fnaddr);
1510 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1511 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1512 emit_move_insn (reg_addr_mem, temp);
1513 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1514 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1515 emit_move_insn (reg_addr_mem, reg_fnaddr);
1518 /* Worker function for FUNCTION_VALUE. */
1521 xstormy16_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
1523 enum machine_mode mode;
1524 mode = TYPE_MODE (valtype);
1525 PROMOTE_MODE (mode, 0, valtype);
1526 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1529 /* A C compound statement that outputs the assembler code for a thunk function,
1530 used to implement C++ virtual function calls with multiple inheritance. The
1531 thunk acts as a wrapper around a virtual function, adjusting the implicit
1532 object parameter before handing control off to the real function.
1534 First, emit code to add the integer DELTA to the location that contains the
1535 incoming first argument. Assume that this argument contains a pointer, and
1536 is the one used to pass the `this' pointer in C++. This is the incoming
1537 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1538 addition must preserve the values of all other incoming arguments.
1540 After the addition, emit code to jump to FUNCTION, which is a
1541 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1542 the return address. Hence returning from FUNCTION will return to whoever
1543 called the current `thunk'.
1545 The effect must be as if @var{function} had been called directly
1546 with the adjusted first argument. This macro is responsible for
1547 emitting all of the code for a thunk function;
1548 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1549 not invoked.
1551 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1552 extracted from it.) It might possibly be useful on some targets, but
1553 probably not. */
1555 static void
1556 xstormy16_asm_output_mi_thunk (FILE *file,
1557 tree thunk_fndecl ATTRIBUTE_UNUSED,
1558 HOST_WIDE_INT delta,
1559 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1560 tree function)
1562 int regnum = FIRST_ARGUMENT_REGISTER;
1564 /* There might be a hidden first argument for a returned structure. */
1565 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
1566 regnum += 1;
1568 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF);
1569 fputs ("\tjmpf ", file);
1570 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1571 putc ('\n', file);
1574 /* The purpose of this function is to override the default behavior of
1575 BSS objects. Normally, they go into .bss or .sbss via ".common"
1576 directives, but we need to override that and put them in
1577 .bss_below100. We can't just use a section override (like we do
1578 for .data_below100), because that makes them initialized rather
1579 than uninitialized. */
1580 void
1581 xstormy16_asm_output_aligned_common (FILE *stream,
1582 tree decl,
1583 const char *name,
1584 int size,
1585 int align,
1586 int global)
1588 rtx mem = DECL_RTL (decl);
1589 rtx symbol;
1591 if (mem != NULL_RTX
1592 && GET_CODE (mem) == MEM
1593 && GET_CODE (symbol = XEXP (mem, 0)) == SYMBOL_REF
1594 && SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_XSTORMY16_BELOW100)
1596 const char *name2;
1597 int p2align = 0;
1599 switch_to_section (bss100_section);
1601 while (align > 8)
1603 align /= 2;
1604 p2align ++;
1607 name2 = default_strip_name_encoding (name);
1608 if (global)
1609 fprintf (stream, "\t.globl\t%s\n", name2);
1610 if (p2align)
1611 fprintf (stream, "\t.p2align %d\n", p2align);
1612 fprintf (stream, "\t.type\t%s, @object\n", name2);
1613 fprintf (stream, "\t.size\t%s, %d\n", name2, size);
1614 fprintf (stream, "%s:\n\t.space\t%d\n", name2, size);
1615 return;
1618 if (!global)
1620 fprintf (stream, "\t.local\t");
1621 assemble_name (stream, name);
1622 fprintf (stream, "\n");
1624 fprintf (stream, "\t.comm\t");
1625 assemble_name (stream, name);
1626 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
1629 /* Implement TARGET_ASM_INIT_SECTIONS. */
1631 static void
1632 xstormy16_asm_init_sections (void)
1634 bss100_section
1635 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
1636 output_section_asm_op,
1637 "\t.section \".bss_below100\",\"aw\",@nobits");
1640 /* Mark symbols with the "below100" attribute so that we can use the
1641 special addressing modes for them. */
1643 static void
1644 xstormy16_encode_section_info (tree decl, rtx r, int first)
1646 default_encode_section_info (decl, r, first);
1648 if (TREE_CODE (decl) == VAR_DECL
1649 && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl))
1650 || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl))))
1652 rtx symbol = XEXP (r, 0);
1654 gcc_assert (GET_CODE (symbol) == SYMBOL_REF);
1655 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_XSTORMY16_BELOW100;
1659 /* Output constructors and destructors. Just like
1660 default_named_section_asm_out_* but don't set the sections writable. */
1661 #undef TARGET_ASM_CONSTRUCTOR
1662 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1663 #undef TARGET_ASM_DESTRUCTOR
1664 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1666 static void
1667 xstormy16_asm_out_destructor (rtx symbol, int priority)
1669 const char *section = ".dtors";
1670 char buf[16];
1672 /* ??? This only works reliably with the GNU linker. */
1673 if (priority != DEFAULT_INIT_PRIORITY)
1675 sprintf (buf, ".dtors.%.5u",
1676 /* Invert the numbering so the linker puts us in the proper
1677 order; constructors are run from right to left, and the
1678 linker sorts in increasing order. */
1679 MAX_INIT_PRIORITY - priority);
1680 section = buf;
1683 switch_to_section (get_section (section, 0, NULL));
1684 assemble_align (POINTER_SIZE);
1685 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1688 static void
1689 xstormy16_asm_out_constructor (rtx symbol, int priority)
1691 const char *section = ".ctors";
1692 char buf[16];
1694 /* ??? This only works reliably with the GNU linker. */
1695 if (priority != DEFAULT_INIT_PRIORITY)
1697 sprintf (buf, ".ctors.%.5u",
1698 /* Invert the numbering so the linker puts us in the proper
1699 order; constructors are run from right to left, and the
1700 linker sorts in increasing order. */
1701 MAX_INIT_PRIORITY - priority);
1702 section = buf;
1705 switch_to_section (get_section (section, 0, NULL));
1706 assemble_align (POINTER_SIZE);
1707 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1710 /* Print a memory address as an operand to reference that memory location. */
1711 void
1712 xstormy16_print_operand_address (FILE *file, rtx address)
1714 HOST_WIDE_INT offset;
1715 int pre_dec, post_inc;
1717 /* There are a few easy cases. */
1718 if (GET_CODE (address) == CONST_INT)
1720 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1721 return;
1724 if (CONSTANT_P (address) || GET_CODE (address) == CODE_LABEL)
1726 output_addr_const (file, address);
1727 return;
1730 /* Otherwise, it's hopefully something of the form
1731 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...))
1734 if (GET_CODE (address) == PLUS)
1736 gcc_assert (GET_CODE (XEXP (address, 1)) == CONST_INT);
1737 offset = INTVAL (XEXP (address, 1));
1738 address = XEXP (address, 0);
1740 else
1741 offset = 0;
1743 pre_dec = (GET_CODE (address) == PRE_DEC);
1744 post_inc = (GET_CODE (address) == POST_INC);
1745 if (pre_dec || post_inc)
1746 address = XEXP (address, 0);
1748 gcc_assert (GET_CODE (address) == REG);
1750 fputc ('(', file);
1751 if (pre_dec)
1752 fputs ("--", file);
1753 fputs (reg_names [REGNO (address)], file);
1754 if (post_inc)
1755 fputs ("++", file);
1756 if (offset != 0)
1757 fprintf (file, "," HOST_WIDE_INT_PRINT_DEC, offset);
1758 fputc (')', file);
1761 /* Print an operand to an assembler instruction. */
1762 void
1763 xstormy16_print_operand (FILE *file, rtx x, int code)
1765 switch (code)
1767 case 'B':
1768 /* There is either one bit set, or one bit clear, in X.
1769 Print it preceded by '#'. */
1771 static int bits_set[8] = { 0, 1, 1, 2, 1, 2, 2, 3 };
1772 HOST_WIDE_INT xx = 1;
1773 HOST_WIDE_INT l;
1775 if (GET_CODE (x) == CONST_INT)
1776 xx = INTVAL (x);
1777 else
1778 output_operand_lossage ("'B' operand is not constant");
1780 /* GCC sign-extends masks with the MSB set, so we have to
1781 detect all the cases that differ only in sign extension
1782 beyond the bits we care about. Normally, the predicates
1783 and constraints ensure that we have the right values. This
1784 works correctly for valid masks. */
1785 if (bits_set[xx & 7] <= 1)
1787 /* Remove sign extension bits. */
1788 if ((~xx & ~(HOST_WIDE_INT)0xff) == 0)
1789 xx &= 0xff;
1790 else if ((~xx & ~(HOST_WIDE_INT)0xffff) == 0)
1791 xx &= 0xffff;
1792 l = exact_log2 (xx);
1794 else
1796 /* Add sign extension bits. */
1797 if ((xx & ~(HOST_WIDE_INT)0xff) == 0)
1798 xx |= ~(HOST_WIDE_INT)0xff;
1799 else if ((xx & ~(HOST_WIDE_INT)0xffff) == 0)
1800 xx |= ~(HOST_WIDE_INT)0xffff;
1801 l = exact_log2 (~xx);
1804 if (l == -1)
1805 output_operand_lossage ("'B' operand has multiple bits set");
1807 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l);
1808 return;
1811 case 'C':
1812 /* Print the symbol without a surrounding @fptr(). */
1813 if (GET_CODE (x) == SYMBOL_REF)
1814 assemble_name (file, XSTR (x, 0));
1815 else if (GET_CODE (x) == LABEL_REF)
1816 output_asm_label (x);
1817 else
1818 xstormy16_print_operand_address (file, x);
1819 return;
1821 case 'o':
1822 case 'O':
1823 /* Print the immediate operand less one, preceded by '#'.
1824 For 'O', negate it first. */
1826 HOST_WIDE_INT xx = 0;
1828 if (GET_CODE (x) == CONST_INT)
1829 xx = INTVAL (x);
1830 else
1831 output_operand_lossage ("'o' operand is not constant");
1833 if (code == 'O')
1834 xx = -xx;
1836 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, xx - 1);
1837 return;
1840 case 'b':
1841 /* Print the shift mask for bp/bn. */
1843 HOST_WIDE_INT xx = 1;
1844 HOST_WIDE_INT l;
1846 if (GET_CODE (x) == CONST_INT)
1847 xx = INTVAL (x);
1848 else
1849 output_operand_lossage ("'B' operand is not constant");
1851 l = 7 - xx;
1853 fputs (IMMEDIATE_PREFIX, file);
1854 fprintf (file, HOST_WIDE_INT_PRINT_DEC, l);
1855 return;
1858 case 0:
1859 /* Handled below. */
1860 break;
1862 default:
1863 output_operand_lossage ("xstormy16_print_operand: unknown code");
1864 return;
1867 switch (GET_CODE (x))
1869 case REG:
1870 fputs (reg_names [REGNO (x)], file);
1871 break;
1873 case MEM:
1874 xstormy16_print_operand_address (file, XEXP (x, 0));
1875 break;
1877 default:
1878 /* Some kind of constant or label; an immediate operand,
1879 so prefix it with '#' for the assembler. */
1880 fputs (IMMEDIATE_PREFIX, file);
1881 output_addr_const (file, x);
1882 break;
1885 return;
1889 /* Expander for the `casesi' pattern.
1890 INDEX is the index of the switch statement.
1891 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1892 to the first table entry.
1893 RANGE is the number of table entries.
1894 TABLE is an ADDR_VEC that is the jump table.
1895 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1896 range LOWER_BOUND to LOWER_BOUND+RANGE-1.
1899 void
1900 xstormy16_expand_casesi (rtx index, rtx lower_bound, rtx range,
1901 rtx table, rtx default_label)
1903 HOST_WIDE_INT range_i = INTVAL (range);
1904 rtx int_index;
1906 /* This code uses 'br', so it can deal only with tables of size up to
1907 8192 entries. */
1908 if (range_i >= 8192)
1909 sorry ("switch statement of size %lu entries too large",
1910 (unsigned long) range_i);
1912 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
1913 OPTAB_LIB_WIDEN);
1914 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
1915 default_label);
1916 int_index = gen_lowpart_common (HImode, index);
1917 emit_insn (gen_ashlhi3 (int_index, int_index, const2_rtx));
1918 emit_jump_insn (gen_tablejump_pcrel (int_index, table));
1921 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1922 instructions, without label or alignment or any other special
1923 constructs. We know that the previous instruction will be the
1924 `tablejump_pcrel' output above.
1926 TODO: it might be nice to output 'br' instructions if they could
1927 all reach. */
1929 void
1930 xstormy16_output_addr_vec (FILE *file, rtx label ATTRIBUTE_UNUSED, rtx table)
1932 int vlen, idx;
1934 switch_to_section (current_function_section ());
1936 vlen = XVECLEN (table, 0);
1937 for (idx = 0; idx < vlen; idx++)
1939 fputs ("\tjmpf ", file);
1940 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
1941 fputc ('\n', file);
1946 /* Expander for the `call' patterns.
1947 INDEX is the index of the switch statement.
1948 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1949 to the first table entry.
1950 RANGE is the number of table entries.
1951 TABLE is an ADDR_VEC that is the jump table.
1952 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1953 range LOWER_BOUND to LOWER_BOUND+RANGE-1.
1956 void
1957 xstormy16_expand_call (rtx retval, rtx dest, rtx counter)
1959 rtx call, temp;
1960 enum machine_mode mode;
1962 gcc_assert (GET_CODE (dest) == MEM);
1963 dest = XEXP (dest, 0);
1965 if (! CONSTANT_P (dest)
1966 && GET_CODE (dest) != REG)
1967 dest = force_reg (Pmode, dest);
1969 if (retval == NULL)
1970 mode = VOIDmode;
1971 else
1972 mode = GET_MODE (retval);
1974 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
1975 counter);
1976 if (retval)
1977 call = gen_rtx_SET (VOIDmode, retval, call);
1979 if (! CONSTANT_P (dest))
1981 temp = gen_reg_rtx (HImode);
1982 emit_move_insn (temp, const0_rtx);
1984 else
1985 temp = const0_rtx;
1987 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
1988 gen_rtx_USE (VOIDmode, temp)));
1989 emit_call_insn (call);
1992 /* Expanders for multiword computational operations. */
1994 /* Expander for arithmetic operations; emit insns to compute
1996 (set DEST (CODE:MODE SRC0 SRC1))
1998 using CARRY as a temporary. When CODE is COMPARE, a branch
1999 template is generated (this saves duplicating code in
2000 xstormy16_split_cbranch). */
2002 void
2003 xstormy16_expand_arith (enum machine_mode mode, enum rtx_code code,
2004 rtx dest, rtx src0, rtx src1, rtx carry)
2006 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
2007 int i;
2008 int firstloop = 1;
2010 if (code == NEG)
2011 emit_move_insn (src0, const0_rtx);
2013 for (i = 0; i < num_words; i++)
2015 rtx w_src0, w_src1, w_dest;
2016 rtx insn;
2018 w_src0 = simplify_gen_subreg (word_mode, src0, mode,
2019 i * UNITS_PER_WORD);
2020 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
2021 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
2023 switch (code)
2025 case PLUS:
2026 if (firstloop
2027 && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
2028 continue;
2030 if (firstloop)
2031 insn = gen_addchi4 (w_dest, w_src0, w_src1, carry);
2032 else
2033 insn = gen_addchi5 (w_dest, w_src0, w_src1, carry, carry);
2034 break;
2036 case NEG:
2037 case MINUS:
2038 case COMPARE:
2039 if (code == COMPARE && i == num_words - 1)
2041 rtx branch, sub, clobber, sub_1;
2043 sub_1 = gen_rtx_MINUS (HImode, w_src0,
2044 gen_rtx_ZERO_EXTEND (HImode, carry));
2045 sub = gen_rtx_SET (VOIDmode, w_dest,
2046 gen_rtx_MINUS (HImode, sub_1, w_src1));
2047 clobber = gen_rtx_CLOBBER (VOIDmode, carry);
2048 branch = gen_rtx_SET (VOIDmode, pc_rtx,
2049 gen_rtx_IF_THEN_ELSE (VOIDmode,
2050 gen_rtx_EQ (HImode,
2051 sub_1,
2052 w_src1),
2053 pc_rtx,
2054 pc_rtx));
2055 insn = gen_rtx_PARALLEL (VOIDmode,
2056 gen_rtvec (3, branch, sub, clobber));
2058 else if (firstloop
2059 && code != COMPARE
2060 && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
2061 continue;
2062 else if (firstloop)
2063 insn = gen_subchi4 (w_dest, w_src0, w_src1, carry);
2064 else
2065 insn = gen_subchi5 (w_dest, w_src0, w_src1, carry, carry);
2066 break;
2068 case IOR:
2069 case XOR:
2070 case AND:
2071 if (GET_CODE (w_src1) == CONST_INT
2072 && INTVAL (w_src1) == -(code == AND))
2073 continue;
2075 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_fmt_ee (code, mode,
2076 w_src0, w_src1));
2077 break;
2079 case NOT:
2080 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_NOT (mode, w_src0));
2081 break;
2083 default:
2084 gcc_unreachable ();
2087 firstloop = 0;
2088 emit (insn);
2091 /* If we emit nothing, try_split() will think we failed. So emit
2092 something that does nothing and can be optimized away. */
2093 if (firstloop)
2094 emit (gen_nop ());
2097 /* The shift operations are split at output time for constant values;
2098 variable-width shifts get handed off to a library routine.
2100 Generate an output string to do (set X (CODE:MODE X SIZE_R))
2101 SIZE_R will be a CONST_INT, X will be a hard register. */
2103 const char *
2104 xstormy16_output_shift (enum machine_mode mode, enum rtx_code code,
2105 rtx x, rtx size_r, rtx temp)
2107 HOST_WIDE_INT size;
2108 const char *r0, *r1, *rt;
2109 static char r[64];
2111 gcc_assert (GET_CODE (size_r) == CONST_INT
2112 && GET_CODE (x) == REG && mode == SImode);
2113 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
2115 if (size == 0)
2116 return "";
2118 r0 = reg_names [REGNO (x)];
2119 r1 = reg_names [REGNO (x) + 1];
2121 /* For shifts of size 1, we can use the rotate instructions. */
2122 if (size == 1)
2124 switch (code)
2126 case ASHIFT:
2127 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
2128 break;
2129 case ASHIFTRT:
2130 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
2131 break;
2132 case LSHIFTRT:
2133 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
2134 break;
2135 default:
2136 gcc_unreachable ();
2138 return r;
2141 /* For large shifts, there are easy special cases. */
2142 if (size == 16)
2144 switch (code)
2146 case ASHIFT:
2147 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
2148 break;
2149 case ASHIFTRT:
2150 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
2151 break;
2152 case LSHIFTRT:
2153 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
2154 break;
2155 default:
2156 gcc_unreachable ();
2158 return r;
2160 if (size > 16)
2162 switch (code)
2164 case ASHIFT:
2165 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
2166 r1, r0, r0, r1, (int) size - 16);
2167 break;
2168 case ASHIFTRT:
2169 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
2170 r0, r1, r1, r0, (int) size - 16);
2171 break;
2172 case LSHIFTRT:
2173 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
2174 r0, r1, r1, r0, (int) size - 16);
2175 break;
2176 default:
2177 gcc_unreachable ();
2179 return r;
2182 /* For the rest, we have to do more work. In particular, we
2183 need a temporary. */
2184 rt = reg_names [REGNO (temp)];
2185 switch (code)
2187 case ASHIFT:
2188 sprintf (r,
2189 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2190 rt, r0, r0, (int) size, r1, (int) size, rt, (int) (16-size),
2191 r1, rt);
2192 break;
2193 case ASHIFTRT:
2194 sprintf (r,
2195 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2196 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16-size),
2197 r0, rt);
2198 break;
2199 case LSHIFTRT:
2200 sprintf (r,
2201 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2202 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16-size),
2203 r0, rt);
2204 break;
2205 default:
2206 gcc_unreachable ();
2208 return r;
2211 /* Attribute handling. */
2213 /* Return nonzero if the function is an interrupt function. */
2215 xstormy16_interrupt_function_p (void)
2217 tree attributes;
2219 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2220 any functions are declared, which is demonstrably wrong, but
2221 it is worked around here. FIXME. */
2222 if (!cfun)
2223 return 0;
2225 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
2226 return lookup_attribute ("interrupt", attributes) != NULL_TREE;
2229 #undef TARGET_ATTRIBUTE_TABLE
2230 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2231 static tree xstormy16_handle_interrupt_attribute
2232 (tree *, tree, tree, int, bool *);
2233 static tree xstormy16_handle_below100_attribute
2234 (tree *, tree, tree, int, bool *);
2236 static const struct attribute_spec xstormy16_attribute_table[] =
2238 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2239 { "interrupt", 0, 0, false, true, true, xstormy16_handle_interrupt_attribute },
2240 { "BELOW100", 0, 0, false, false, false, xstormy16_handle_below100_attribute },
2241 { "below100", 0, 0, false, false, false, xstormy16_handle_below100_attribute },
2242 { NULL, 0, 0, false, false, false, NULL }
2245 /* Handle an "interrupt" attribute;
2246 arguments as in struct attribute_spec.handler. */
2247 static tree
2248 xstormy16_handle_interrupt_attribute (tree *node, tree name,
2249 tree args ATTRIBUTE_UNUSED,
2250 int flags ATTRIBUTE_UNUSED,
2251 bool *no_add_attrs)
2253 if (TREE_CODE (*node) != FUNCTION_TYPE)
2255 warning (OPT_Wattributes, "%qs attribute only applies to functions",
2256 IDENTIFIER_POINTER (name));
2257 *no_add_attrs = true;
2260 return NULL_TREE;
2263 /* Handle an "below" attribute;
2264 arguments as in struct attribute_spec.handler. */
2265 static tree
2266 xstormy16_handle_below100_attribute (tree *node,
2267 tree name ATTRIBUTE_UNUSED,
2268 tree args ATTRIBUTE_UNUSED,
2269 int flags ATTRIBUTE_UNUSED,
2270 bool *no_add_attrs)
2272 if (TREE_CODE (*node) != VAR_DECL
2273 && TREE_CODE (*node) != POINTER_TYPE
2274 && TREE_CODE (*node) != TYPE_DECL)
2276 warning (OPT_Wattributes,
2277 "%<__BELOW100__%> attribute only applies to variables");
2278 *no_add_attrs = true;
2280 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
2282 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
2284 warning (OPT_Wattributes, "__BELOW100__ attribute not allowed "
2285 "with auto storage class");
2286 *no_add_attrs = true;
2290 return NULL_TREE;
2293 #undef TARGET_INIT_BUILTINS
2294 #define TARGET_INIT_BUILTINS xstormy16_init_builtins
2295 #undef TARGET_EXPAND_BUILTIN
2296 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2298 static struct {
2299 const char *name;
2300 int md_code;
2301 const char *arg_ops; /* 0..9, t for temp register, r for return value */
2302 const char *arg_types; /* s=short,l=long, upper case for unsigned */
2303 } s16builtins[] = {
2304 { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" },
2305 { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" },
2306 { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" },
2307 { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" },
2308 { 0, 0, 0, 0 }
2311 static void
2312 xstormy16_init_builtins (void)
2314 tree args, ret_type, arg;
2315 int i, a;
2317 ret_type = void_type_node;
2319 for (i=0; s16builtins[i].name; i++)
2321 args = void_list_node;
2322 for (a=strlen (s16builtins[i].arg_types)-1; a>=0; a--)
2324 switch (s16builtins[i].arg_types[a])
2326 case 's': arg = short_integer_type_node; break;
2327 case 'S': arg = short_unsigned_type_node; break;
2328 case 'l': arg = long_integer_type_node; break;
2329 case 'L': arg = long_unsigned_type_node; break;
2330 default: gcc_unreachable ();
2332 if (a == 0)
2333 ret_type = arg;
2334 else
2335 args = tree_cons (NULL_TREE, arg, args);
2337 lang_hooks.builtin_function (s16builtins[i].name,
2338 build_function_type (ret_type, args),
2339 i, BUILT_IN_MD, NULL, NULL);
2343 static rtx
2344 xstormy16_expand_builtin(tree exp, rtx target,
2345 rtx subtarget ATTRIBUTE_UNUSED,
2346 enum machine_mode mode ATTRIBUTE_UNUSED,
2347 int ignore ATTRIBUTE_UNUSED)
2349 rtx op[10], args[10], pat, copyto[10], retval = 0;
2350 tree fndecl, argtree;
2351 int i, a, o, code;
2353 fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2354 argtree = TREE_OPERAND (exp, 1);
2355 i = DECL_FUNCTION_CODE (fndecl);
2356 code = s16builtins[i].md_code;
2358 for (a = 0; a < 10 && argtree; a++)
2360 args[a] = expand_expr (TREE_VALUE (argtree), NULL_RTX, VOIDmode, 0);
2361 argtree = TREE_CHAIN (argtree);
2364 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2366 char ao = s16builtins[i].arg_ops[o];
2367 char c = insn_data[code].operand[o].constraint[0];
2368 int omode;
2370 copyto[o] = 0;
2372 omode = insn_data[code].operand[o].mode;
2373 if (ao == 'r')
2374 op[o] = target ? target : gen_reg_rtx (omode);
2375 else if (ao == 't')
2376 op[o] = gen_reg_rtx (omode);
2377 else
2378 op[o] = args[(int) hex_value (ao)];
2380 if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o])))
2382 if (c == '+' || c == '=')
2384 copyto[o] = op[o];
2385 op[o] = gen_reg_rtx (omode);
2387 else
2388 op[o] = copy_to_mode_reg (omode, op[o]);
2391 if (ao == 'r')
2392 retval = op[o];
2395 pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4],
2396 op[5], op[6], op[7], op[8], op[9]);
2397 emit_insn (pat);
2399 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2400 if (copyto[o])
2402 emit_move_insn (copyto[o], op[o]);
2403 if (op[o] == retval)
2404 retval = copyto[o];
2407 return retval;
2411 /* Look for combinations of insns that can be converted to BN or BP
2412 opcodes. This is, unfortunately, too complex to do with MD
2413 patterns. */
2414 static void
2415 combine_bnp (rtx insn)
2417 int insn_code, regno, need_extend;
2418 unsigned int mask;
2419 rtx cond, reg, and, load, qireg, mem;
2420 enum machine_mode load_mode = QImode;
2421 enum machine_mode and_mode = QImode;
2422 rtx shift = NULL_RTX;
2424 insn_code = recog_memoized (insn);
2425 if (insn_code != CODE_FOR_cbranchhi
2426 && insn_code != CODE_FOR_cbranchhi_neg)
2427 return;
2429 cond = XVECEXP (PATTERN (insn), 0, 0); /* set */
2430 cond = XEXP (cond, 1); /* if */
2431 cond = XEXP (cond, 0); /* cond */
2432 switch (GET_CODE (cond))
2434 case NE:
2435 case EQ:
2436 need_extend = 0;
2437 break;
2438 case LT:
2439 case GE:
2440 need_extend = 1;
2441 break;
2442 default:
2443 return;
2446 reg = XEXP (cond, 0);
2447 if (GET_CODE (reg) != REG)
2448 return;
2449 regno = REGNO (reg);
2450 if (XEXP (cond, 1) != const0_rtx)
2451 return;
2452 if (! find_regno_note (insn, REG_DEAD, regno))
2453 return;
2454 qireg = gen_rtx_REG (QImode, regno);
2456 if (need_extend)
2458 /* LT and GE conditionals should have a sign extend before
2459 them. */
2460 for (and = prev_real_insn (insn); and; and = prev_real_insn (and))
2462 int and_code = recog_memoized (and);
2464 if (and_code == CODE_FOR_extendqihi2
2465 && rtx_equal_p (SET_DEST (PATTERN (and)), reg)
2466 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and)), 0), qireg))
2467 break;
2469 if (and_code == CODE_FOR_movhi_internal
2470 && rtx_equal_p (SET_DEST (PATTERN (and)), reg))
2472 /* This is for testing bit 15. */
2473 and = insn;
2474 break;
2477 if (reg_mentioned_p (reg, and))
2478 return;
2480 if (GET_CODE (and) != NOTE
2481 && GET_CODE (and) != INSN)
2482 return;
2485 else
2487 /* EQ and NE conditionals have an AND before them. */
2488 for (and = prev_real_insn (insn); and; and = prev_real_insn (and))
2490 if (recog_memoized (and) == CODE_FOR_andhi3
2491 && rtx_equal_p (SET_DEST (PATTERN (and)), reg)
2492 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and)), 0), reg))
2493 break;
2495 if (reg_mentioned_p (reg, and))
2496 return;
2498 if (GET_CODE (and) != NOTE
2499 && GET_CODE (and) != INSN)
2500 return;
2503 if (and)
2505 /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
2506 followed by an AND like this:
2508 (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
2509 (clobber (reg:BI carry))]
2511 (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
2513 Attempt to detect this here. */
2514 for (shift = prev_real_insn (and); shift; shift = prev_real_insn (shift))
2516 if (recog_memoized (shift) == CODE_FOR_lshrhi3
2517 && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift), 0, 0)), reg)
2518 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 0), reg))
2519 break;
2521 if (reg_mentioned_p (reg, shift)
2522 || (GET_CODE (shift) != NOTE
2523 && GET_CODE (shift) != INSN))
2525 shift = NULL_RTX;
2526 break;
2531 if (!and)
2532 return;
2534 for (load = shift ? prev_real_insn (shift) : prev_real_insn (and);
2535 load;
2536 load = prev_real_insn (load))
2538 int load_code = recog_memoized (load);
2540 if (load_code == CODE_FOR_movhi_internal
2541 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2542 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), HImode)
2543 && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load))))
2545 load_mode = HImode;
2546 break;
2549 if (load_code == CODE_FOR_movqi_internal
2550 && rtx_equal_p (SET_DEST (PATTERN (load)), qireg)
2551 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), QImode))
2553 load_mode = QImode;
2554 break;
2557 if (load_code == CODE_FOR_zero_extendqihi2
2558 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2559 && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load)), 0), QImode))
2561 load_mode = QImode;
2562 and_mode = HImode;
2563 break;
2566 if (reg_mentioned_p (reg, load))
2567 return;
2569 if (GET_CODE (load) != NOTE
2570 && GET_CODE (load) != INSN)
2571 return;
2573 if (!load)
2574 return;
2576 mem = SET_SRC (PATTERN (load));
2578 if (need_extend)
2580 mask = (load_mode == HImode) ? 0x8000 : 0x80;
2582 /* If the mem includes a zero-extend operation and we are
2583 going to generate a sign-extend operation then move the
2584 mem inside the zero-extend. */
2585 if (GET_CODE (mem) == ZERO_EXTEND)
2586 mem = XEXP (mem, 0);
2588 else
2590 if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and)), 1), load_mode))
2591 return;
2593 mask = (int) INTVAL (XEXP (SET_SRC (PATTERN (and)), 1));
2595 if (shift)
2596 mask <<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 1));
2599 if (load_mode == HImode)
2601 rtx addr = XEXP (mem, 0);
2603 if (! (mask & 0xff))
2605 addr = plus_constant (addr, 1);
2606 mask >>= 8;
2608 mem = gen_rtx_MEM (QImode, addr);
2611 if (need_extend)
2612 XEXP (cond, 0) = gen_rtx_SIGN_EXTEND (HImode, mem);
2613 else
2614 XEXP (cond, 0) = gen_rtx_AND (and_mode, mem, GEN_INT (mask));
2616 INSN_CODE (insn) = -1;
2617 delete_insn (load);
2619 if (and != insn)
2620 delete_insn (and);
2622 if (shift != NULL_RTX)
2623 delete_insn (shift);
2626 static void
2627 xstormy16_reorg (void)
2629 rtx insn;
2631 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2633 if (! JUMP_P (insn))
2634 continue;
2635 combine_bnp (insn);
2640 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2642 static bool
2643 xstormy16_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
2645 HOST_WIDE_INT size = int_size_in_bytes (type);
2646 return (size == -1 || size > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS);
2649 #undef TARGET_ASM_ALIGNED_HI_OP
2650 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2651 #undef TARGET_ASM_ALIGNED_SI_OP
2652 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2653 #undef TARGET_ENCODE_SECTION_INFO
2654 #define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
2656 #undef TARGET_ASM_OUTPUT_MI_THUNK
2657 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2658 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2659 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2661 #undef TARGET_RTX_COSTS
2662 #define TARGET_RTX_COSTS xstormy16_rtx_costs
2663 #undef TARGET_ADDRESS_COST
2664 #define TARGET_ADDRESS_COST xstormy16_address_cost
2666 #undef TARGET_BUILD_BUILTIN_VA_LIST
2667 #define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
2668 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
2669 #define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_expand_builtin_va_arg
2671 #undef TARGET_PROMOTE_FUNCTION_ARGS
2672 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
2673 #undef TARGET_PROMOTE_FUNCTION_RETURN
2674 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
2675 #undef TARGET_PROMOTE_PROTOTYPES
2676 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
2678 #undef TARGET_RETURN_IN_MEMORY
2679 #define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
2681 #undef TARGET_MACHINE_DEPENDENT_REORG
2682 #define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg
2684 struct gcc_target targetm = TARGET_INITIALIZER;
2686 #include "gt-stormy16.h"