Merged r158465 through r158660 into branch.
[official-gcc.git] / gcc / config / stormy16 / stormy16.c
blobc3627ca6e96cd09d3af8b3031ced72a92f8d19de
1 /* Xstormy16 target functions.
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
4 Contributed by Red Hat, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "output.h"
34 #include "insn-attr.h"
35 #include "flags.h"
36 #include "recog.h"
37 #include "toplev.h"
38 #include "obstack.h"
39 #include "tree.h"
40 #include "expr.h"
41 #include "optabs.h"
42 #include "except.h"
43 #include "function.h"
44 #include "target.h"
45 #include "target-def.h"
46 #include "tm_p.h"
47 #include "langhooks.h"
48 #include "gimple.h"
49 #include "df.h"
50 #include "ggc.h"
52 static rtx emit_addhi3_postreload (rtx, rtx, rtx);
53 static void xstormy16_asm_out_constructor (rtx, int);
54 static void xstormy16_asm_out_destructor (rtx, int);
55 static void xstormy16_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
56 HOST_WIDE_INT, tree);
58 static void xstormy16_init_builtins (void);
59 static rtx xstormy16_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
60 static bool xstormy16_rtx_costs (rtx, int, int, int *, bool);
61 static int xstormy16_address_cost (rtx, bool);
62 static bool xstormy16_return_in_memory (const_tree, const_tree);
64 static GTY(()) section *bss100_section;
66 /* Compute a (partial) cost for rtx X. Return true if the complete
67 cost has been computed, and false if subexpressions should be
68 scanned. In either case, *TOTAL contains the cost result. */
70 static bool
71 xstormy16_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
72 int *total, bool speed ATTRIBUTE_UNUSED)
74 switch (code)
76 case CONST_INT:
77 if (INTVAL (x) < 16 && INTVAL (x) >= 0)
78 *total = COSTS_N_INSNS (1) / 2;
79 else if (INTVAL (x) < 256 && INTVAL (x) >= 0)
80 *total = COSTS_N_INSNS (1);
81 else
82 *total = COSTS_N_INSNS (2);
83 return true;
85 case CONST_DOUBLE:
86 case CONST:
87 case SYMBOL_REF:
88 case LABEL_REF:
89 *total = COSTS_N_INSNS (2);
90 return true;
92 case MULT:
93 *total = COSTS_N_INSNS (35 + 6);
94 return true;
95 case DIV:
96 *total = COSTS_N_INSNS (51 - 6);
97 return true;
99 default:
100 return false;
104 static int
105 xstormy16_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
107 return (GET_CODE (x) == CONST_INT ? 2
108 : GET_CODE (x) == PLUS ? 7
109 : 5);
112 /* Branches are handled as follows:
114 1. HImode compare-and-branches. The machine supports these
115 natively, so the appropriate pattern is emitted directly.
117 2. SImode EQ and NE. These are emitted as pairs of HImode
118 compare-and-branches.
120 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
121 of a SImode subtract followed by a branch (not a compare-and-branch),
122 like this:
127 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
132 bne. */
134 /* Emit a branch of kind CODE to location LOC. */
136 void
137 xstormy16_emit_cbranch (enum rtx_code code, rtx op0, rtx op1, rtx loc)
139 rtx condition_rtx, loc_ref, branch, cy_clobber;
140 rtvec vec;
141 enum machine_mode mode;
143 mode = GET_MODE (op0);
144 gcc_assert (mode == HImode || mode == SImode);
146 if (mode == SImode
147 && (code == GT || code == LE || code == GTU || code == LEU))
149 int unsigned_p = (code == GTU || code == LEU);
150 int gt_p = (code == GT || code == GTU);
151 rtx lab = NULL_RTX;
153 if (gt_p)
154 lab = gen_label_rtx ();
155 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, op0, op1, gt_p ? lab : loc);
156 /* This should be generated as a comparison against the temporary
157 created by the previous insn, but reload can't handle that. */
158 xstormy16_emit_cbranch (gt_p ? NE : EQ, op0, op1, loc);
159 if (gt_p)
160 emit_label (lab);
161 return;
163 else if (mode == SImode
164 && (code == NE || code == EQ)
165 && op1 != const0_rtx)
167 rtx op0_word, op1_word;
168 rtx lab = NULL_RTX;
169 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
170 int i;
172 if (code == EQ)
173 lab = gen_label_rtx ();
175 for (i = 0; i < num_words - 1; i++)
177 op0_word = simplify_gen_subreg (word_mode, op0, mode,
178 i * UNITS_PER_WORD);
179 op1_word = simplify_gen_subreg (word_mode, op1, mode,
180 i * UNITS_PER_WORD);
181 xstormy16_emit_cbranch (NE, op0_word, op1_word, code == EQ ? lab : loc);
183 op0_word = simplify_gen_subreg (word_mode, op0, mode,
184 i * UNITS_PER_WORD);
185 op1_word = simplify_gen_subreg (word_mode, op1, mode,
186 i * UNITS_PER_WORD);
187 xstormy16_emit_cbranch (code, op0_word, op1_word, loc);
189 if (code == EQ)
190 emit_label (lab);
191 return;
194 /* We can't allow reload to try to generate any reload after a branch,
195 so when some register must match we must make the temporary ourselves. */
196 if (mode != HImode)
198 rtx tmp;
199 tmp = gen_reg_rtx (mode);
200 emit_move_insn (tmp, op0);
201 op0 = tmp;
204 condition_rtx = gen_rtx_fmt_ee (code, mode, op0, op1);
205 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
206 branch = gen_rtx_SET (VOIDmode, pc_rtx,
207 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
208 loc_ref, pc_rtx));
210 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
212 if (mode == HImode)
213 vec = gen_rtvec (2, branch, cy_clobber);
214 else if (code == NE || code == EQ)
215 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
216 else
218 rtx sub;
219 #if 0
220 sub = gen_rtx_SET (VOIDmode, op0, gen_rtx_MINUS (SImode, op0, op1));
221 #else
222 sub = gen_rtx_CLOBBER (SImode, op0);
223 #endif
224 vec = gen_rtvec (3, branch, sub, cy_clobber);
227 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
230 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
231 the arithmetic operation. Most of the work is done by
232 xstormy16_expand_arith. */
234 void
235 xstormy16_split_cbranch (enum machine_mode mode, rtx label, rtx comparison,
236 rtx dest)
238 rtx op0 = XEXP (comparison, 0);
239 rtx op1 = XEXP (comparison, 1);
240 rtx seq, last_insn;
241 rtx compare;
243 start_sequence ();
244 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1);
245 seq = get_insns ();
246 end_sequence ();
248 gcc_assert (INSN_P (seq));
250 last_insn = seq;
251 while (NEXT_INSN (last_insn) != NULL_RTX)
252 last_insn = NEXT_INSN (last_insn);
254 compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0));
255 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
256 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
257 emit_insn (seq);
261 /* Return the string to output a conditional branch to LABEL, which is
262 the operand number of the label.
264 OP is the conditional expression, or NULL for branch-always.
266 REVERSED is nonzero if we should reverse the sense of the comparison.
268 INSN is the insn. */
270 char *
271 xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed, rtx insn)
273 static char string[64];
274 int need_longbranch = (op != NULL_RTX
275 ? get_attr_length (insn) == 8
276 : get_attr_length (insn) == 4);
277 int really_reversed = reversed ^ need_longbranch;
278 const char *ccode;
279 const char *templ;
280 const char *operands;
281 enum rtx_code code;
283 if (! op)
285 if (need_longbranch)
286 ccode = "jmpf";
287 else
288 ccode = "br";
289 sprintf (string, "%s %s", ccode, label);
290 return string;
293 code = GET_CODE (op);
295 if (GET_CODE (XEXP (op, 0)) != REG)
297 code = swap_condition (code);
298 operands = "%3,%2";
300 else
301 operands = "%2,%3";
303 /* Work out which way this really branches. */
304 if (really_reversed)
305 code = reverse_condition (code);
307 switch (code)
309 case EQ: ccode = "z"; break;
310 case NE: ccode = "nz"; break;
311 case GE: ccode = "ge"; break;
312 case LT: ccode = "lt"; break;
313 case GT: ccode = "gt"; break;
314 case LE: ccode = "le"; break;
315 case GEU: ccode = "nc"; break;
316 case LTU: ccode = "c"; break;
317 case GTU: ccode = "hi"; break;
318 case LEU: ccode = "ls"; break;
320 default:
321 gcc_unreachable ();
324 if (need_longbranch)
325 templ = "b%s %s,.+8 | jmpf %s";
326 else
327 templ = "b%s %s,%s";
328 sprintf (string, templ, ccode, operands, label);
330 return string;
333 /* Return the string to output a conditional branch to LABEL, which is
334 the operand number of the label, but suitable for the tail of a
335 SImode branch.
337 OP is the conditional expression (OP is never NULL_RTX).
339 REVERSED is nonzero if we should reverse the sense of the comparison.
341 INSN is the insn. */
343 char *
344 xstormy16_output_cbranch_si (rtx op, const char *label, int reversed, rtx insn)
346 static char string[64];
347 int need_longbranch = get_attr_length (insn) >= 8;
348 int really_reversed = reversed ^ need_longbranch;
349 const char *ccode;
350 const char *templ;
351 char prevop[16];
352 enum rtx_code code;
354 code = GET_CODE (op);
356 /* Work out which way this really branches. */
357 if (really_reversed)
358 code = reverse_condition (code);
360 switch (code)
362 case EQ: ccode = "z"; break;
363 case NE: ccode = "nz"; break;
364 case GE: ccode = "ge"; break;
365 case LT: ccode = "lt"; break;
366 case GEU: ccode = "nc"; break;
367 case LTU: ccode = "c"; break;
369 /* The missing codes above should never be generated. */
370 default:
371 gcc_unreachable ();
374 switch (code)
376 case EQ: case NE:
378 int regnum;
380 gcc_assert (GET_CODE (XEXP (op, 0)) == REG);
382 regnum = REGNO (XEXP (op, 0));
383 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
385 break;
387 case GE: case LT: case GEU: case LTU:
388 strcpy (prevop, "sbc %2,%3");
389 break;
391 default:
392 gcc_unreachable ();
395 if (need_longbranch)
396 templ = "%s | b%s .+6 | jmpf %s";
397 else
398 templ = "%s | b%s %s";
399 sprintf (string, templ, prevop, ccode, label);
401 return string;
404 /* Many machines have some registers that cannot be copied directly to or from
405 memory or even from other types of registers. An example is the `MQ'
406 register, which on most machines, can only be copied to or from general
407 registers, but not memory. Some machines allow copying all registers to and
408 from memory, but require a scratch register for stores to some memory
409 locations (e.g., those with symbolic address on the RT, and those with
410 certain symbolic address on the SPARC when compiling PIC). In some cases,
411 both an intermediate and a scratch register are required.
413 You should define these macros to indicate to the reload phase that it may
414 need to allocate at least one register for a reload in addition to the
415 register to contain the data. Specifically, if copying X to a register
416 RCLASS in MODE requires an intermediate register, you should define
417 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
418 whose registers can be used as intermediate registers or scratch registers.
420 If copying a register RCLASS in MODE to X requires an intermediate or scratch
421 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
422 largest register class required. If the requirements for input and output
423 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
424 instead of defining both macros identically.
426 The values returned by these macros are often `GENERAL_REGS'. Return
427 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
428 to or from a register of RCLASS in MODE without requiring a scratch register.
429 Do not define this macro if it would always return `NO_REGS'.
431 If a scratch register is required (either with or without an intermediate
432 register), you should define patterns for `reload_inM' or `reload_outM', as
433 required.. These patterns, which will normally be implemented with a
434 `define_expand', should be similar to the `movM' patterns, except that
435 operand 2 is the scratch register.
437 Define constraints for the reload register and scratch register that contain
438 a single register class. If the original reload register (whose class is
439 RCLASS) can meet the constraint given in the pattern, the value returned by
440 these macros is used for the class of the scratch register. Otherwise, two
441 additional reload registers are required. Their classes are obtained from
442 the constraints in the insn pattern.
444 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
445 either be in a hard register or in memory. Use `true_regnum' to find out;
446 it will return -1 if the pseudo is in memory and the hard register number if
447 it is in a register.
449 These macros should not be used in the case where a particular class of
450 registers can only be copied to memory and not to another class of
451 registers. In that case, secondary reload registers are not needed and
452 would not be helpful. Instead, a stack location must be used to perform the
453 copy and the `movM' pattern should use memory as an intermediate storage.
454 This case often occurs between floating-point and general registers. */
456 enum reg_class
457 xstormy16_secondary_reload_class (enum reg_class rclass,
458 enum machine_mode mode,
459 rtx x)
461 /* This chip has the interesting property that only the first eight
462 registers can be moved to/from memory. */
463 if ((GET_CODE (x) == MEM
464 || ((GET_CODE (x) == SUBREG || GET_CODE (x) == REG)
465 && (true_regnum (x) == -1
466 || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
467 && ! reg_class_subset_p (rclass, EIGHT_REGS))
468 return EIGHT_REGS;
470 return NO_REGS;
473 enum reg_class
474 xstormy16_preferred_reload_class (rtx x, enum reg_class rclass)
476 if (rclass == GENERAL_REGS
477 && GET_CODE (x) == MEM)
478 return EIGHT_REGS;
480 return rclass;
483 /* Predicate for symbols and addresses that reflect special 8-bit
484 addressing. */
487 xstormy16_below100_symbol (rtx x,
488 enum machine_mode mode ATTRIBUTE_UNUSED)
490 if (GET_CODE (x) == CONST)
491 x = XEXP (x, 0);
492 if (GET_CODE (x) == PLUS
493 && GET_CODE (XEXP (x, 1)) == CONST_INT)
494 x = XEXP (x, 0);
496 if (GET_CODE (x) == SYMBOL_REF)
497 return (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_XSTORMY16_BELOW100) != 0;
499 if (GET_CODE (x) == CONST_INT)
501 HOST_WIDE_INT i = INTVAL (x);
502 if ((i >= 0x0000 && i <= 0x00ff)
503 || (i >= 0x7f00 && i <= 0x7fff))
504 return 1;
506 return 0;
509 /* Likewise, but only for non-volatile MEMs, for patterns where the
510 MEM will get split into smaller sized accesses. */
513 xstormy16_splittable_below100_operand (rtx x, enum machine_mode mode)
515 if (GET_CODE (x) == MEM && MEM_VOLATILE_P (x))
516 return 0;
517 return xstormy16_below100_operand (x, mode);
520 /* Expand an 8-bit IOR. This either detects the one case we can
521 actually do, or uses a 16-bit IOR. */
523 void
524 xstormy16_expand_iorqi3 (rtx *operands)
526 rtx in, out, outsub, val;
528 out = operands[0];
529 in = operands[1];
530 val = operands[2];
532 if (xstormy16_onebit_set_operand (val, QImode))
534 if (!xstormy16_below100_or_register (in, QImode))
535 in = copy_to_mode_reg (QImode, in);
536 if (!xstormy16_below100_or_register (out, QImode))
537 out = gen_reg_rtx (QImode);
538 emit_insn (gen_iorqi3_internal (out, in, val));
539 if (out != operands[0])
540 emit_move_insn (operands[0], out);
541 return;
544 if (GET_CODE (in) != REG)
545 in = copy_to_mode_reg (QImode, in);
546 if (GET_CODE (val) != REG
547 && GET_CODE (val) != CONST_INT)
548 val = copy_to_mode_reg (QImode, val);
549 if (GET_CODE (out) != REG)
550 out = gen_reg_rtx (QImode);
552 in = simplify_gen_subreg (HImode, in, QImode, 0);
553 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
554 if (GET_CODE (val) != CONST_INT)
555 val = simplify_gen_subreg (HImode, val, QImode, 0);
557 emit_insn (gen_iorhi3 (outsub, in, val));
559 if (out != operands[0])
560 emit_move_insn (operands[0], out);
563 /* Expand an 8-bit AND. This either detects the one case we can
564 actually do, or uses a 16-bit AND. */
566 void
567 xstormy16_expand_andqi3 (rtx *operands)
569 rtx in, out, outsub, val;
571 out = operands[0];
572 in = operands[1];
573 val = operands[2];
575 if (xstormy16_onebit_clr_operand (val, QImode))
577 if (!xstormy16_below100_or_register (in, QImode))
578 in = copy_to_mode_reg (QImode, in);
579 if (!xstormy16_below100_or_register (out, QImode))
580 out = gen_reg_rtx (QImode);
581 emit_insn (gen_andqi3_internal (out, in, val));
582 if (out != operands[0])
583 emit_move_insn (operands[0], out);
584 return;
587 if (GET_CODE (in) != REG)
588 in = copy_to_mode_reg (QImode, in);
589 if (GET_CODE (val) != REG
590 && GET_CODE (val) != CONST_INT)
591 val = copy_to_mode_reg (QImode, val);
592 if (GET_CODE (out) != REG)
593 out = gen_reg_rtx (QImode);
595 in = simplify_gen_subreg (HImode, in, QImode, 0);
596 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
597 if (GET_CODE (val) != CONST_INT)
598 val = simplify_gen_subreg (HImode, val, QImode, 0);
600 emit_insn (gen_andhi3 (outsub, in, val));
602 if (out != operands[0])
603 emit_move_insn (operands[0], out);
606 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
607 (GET_CODE (X) == CONST_INT \
608 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
610 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
611 (GET_CODE (X) == CONST_INT \
612 && INTVAL (X) + (OFFSET) >= 0 \
613 && INTVAL (X) + (OFFSET) < 0x8000 \
614 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
616 static bool
617 xstormy16_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
618 rtx x, bool strict)
620 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
621 return 1;
623 if (GET_CODE (x) == PLUS
624 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
626 x = XEXP (x, 0);
627 /* PR 31232: Do not allow INT+INT as an address. */
628 if (GET_CODE (x) == CONST_INT)
629 return 0;
632 if ((GET_CODE (x) == PRE_MODIFY
633 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
634 || GET_CODE (x) == POST_INC
635 || GET_CODE (x) == PRE_DEC)
636 x = XEXP (x, 0);
638 if (GET_CODE (x) == REG && REGNO_OK_FOR_BASE_P (REGNO (x))
639 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
640 return 1;
642 if (xstormy16_below100_symbol (x, mode))
643 return 1;
645 return 0;
648 /* Return nonzero if memory address X (an RTX) can have different
649 meanings depending on the machine mode of the memory reference it
650 is used for or if the address is valid for some modes but not
651 others.
653 Autoincrement and autodecrement addresses typically have mode-dependent
654 effects because the amount of the increment or decrement is the size of the
655 operand being addressed. Some machines have other mode-dependent addresses.
656 Many RISC machines have no mode-dependent addresses.
658 You may assume that ADDR is a valid address for the machine.
660 On this chip, this is true if the address is valid with an offset
661 of 0 but not of 6, because in that case it cannot be used as an
662 address for DImode or DFmode, or if the address is a post-increment
663 or pre-decrement address. */
666 xstormy16_mode_dependent_address_p (rtx x)
668 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
669 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
670 return 1;
672 if (GET_CODE (x) == PLUS
673 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
674 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
675 return 1;
677 if (GET_CODE (x) == PLUS)
678 x = XEXP (x, 0);
680 /* Auto-increment addresses are now treated generically in recog.c. */
681 return 0;
684 /* A C expression that defines the optional machine-dependent constraint
685 letters (`Q', `R', `S', `T', `U') that can be used to segregate specific
686 types of operands, usually memory references, for the target machine.
687 Normally this macro will not be defined. If it is required for a particular
688 target machine, it should return 1 if VALUE corresponds to the operand type
689 represented by the constraint letter C. If C is not defined as an extra
690 constraint, the value returned should be 0 regardless of VALUE. */
693 xstormy16_extra_constraint_p (rtx x, int c)
695 switch (c)
697 /* 'Q' is for pushes. */
698 case 'Q':
699 return (GET_CODE (x) == MEM
700 && GET_CODE (XEXP (x, 0)) == POST_INC
701 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
703 /* 'R' is for pops. */
704 case 'R':
705 return (GET_CODE (x) == MEM
706 && GET_CODE (XEXP (x, 0)) == PRE_DEC
707 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
709 /* 'S' is for immediate memory addresses. */
710 case 'S':
711 return (GET_CODE (x) == MEM
712 && GET_CODE (XEXP (x, 0)) == CONST_INT
713 && xstormy16_legitimate_address_p (VOIDmode, XEXP (x, 0), 0));
715 /* 'T' is for Rx. */
716 case 'T':
717 /* Not implemented yet. */
718 return 0;
720 /* 'U' is for CONST_INT values not between 2 and 15 inclusive,
721 for allocating a scratch register for 32-bit shifts. */
722 case 'U':
723 return (GET_CODE (x) == CONST_INT
724 && (INTVAL (x) < 2 || INTVAL (x) > 15));
726 /* 'Z' is for CONST_INT value zero. This is for adding zero to
727 a register in addhi3, which would otherwise require a carry. */
728 case 'Z':
729 return (GET_CODE (x) == CONST_INT
730 && (INTVAL (x) == 0));
732 case 'W':
733 return xstormy16_below100_operand (x, GET_MODE (x));
735 default:
736 return 0;
741 short_memory_operand (rtx x, enum machine_mode mode)
743 if (! memory_operand (x, mode))
744 return 0;
745 return (GET_CODE (XEXP (x, 0)) != PLUS);
748 /* Splitter for the 'move' patterns, for modes not directly implemented
749 by hardware. Emit insns to copy a value of mode MODE from SRC to
750 DEST.
752 This function is only called when reload_completed. */
754 void
755 xstormy16_split_move (enum machine_mode mode, rtx dest, rtx src)
757 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
758 int direction, end, i;
759 int src_modifies = 0;
760 int dest_modifies = 0;
761 int src_volatile = 0;
762 int dest_volatile = 0;
763 rtx mem_operand;
764 rtx auto_inc_reg_rtx = NULL_RTX;
766 /* Check initial conditions. */
767 gcc_assert (reload_completed
768 && mode != QImode && mode != HImode
769 && nonimmediate_operand (dest, mode)
770 && general_operand (src, mode));
772 /* This case is not supported below, and shouldn't be generated. */
773 gcc_assert (GET_CODE (dest) != MEM || GET_CODE (src) != MEM);
775 /* This case is very very bad after reload, so trap it now. */
776 gcc_assert (GET_CODE (dest) != SUBREG && GET_CODE (src) != SUBREG);
778 /* The general idea is to copy by words, offsetting the source and
779 destination. Normally the least-significant word will be copied
780 first, but for pre-dec operations it's better to copy the
781 most-significant word first. Only one operand can be a pre-dec
782 or post-inc operand.
784 It's also possible that the copy overlaps so that the direction
785 must be reversed. */
786 direction = 1;
788 if (GET_CODE (dest) == MEM)
790 mem_operand = XEXP (dest, 0);
791 dest_modifies = side_effects_p (mem_operand);
792 if (auto_inc_p (mem_operand))
793 auto_inc_reg_rtx = XEXP (mem_operand, 0);
794 dest_volatile = MEM_VOLATILE_P (dest);
795 if (dest_volatile)
797 dest = copy_rtx (dest);
798 MEM_VOLATILE_P (dest) = 0;
801 else if (GET_CODE (src) == MEM)
803 mem_operand = XEXP (src, 0);
804 src_modifies = side_effects_p (mem_operand);
805 if (auto_inc_p (mem_operand))
806 auto_inc_reg_rtx = XEXP (mem_operand, 0);
807 src_volatile = MEM_VOLATILE_P (src);
808 if (src_volatile)
810 src = copy_rtx (src);
811 MEM_VOLATILE_P (src) = 0;
814 else
815 mem_operand = NULL_RTX;
817 if (mem_operand == NULL_RTX)
819 if (GET_CODE (src) == REG
820 && GET_CODE (dest) == REG
821 && reg_overlap_mentioned_p (dest, src)
822 && REGNO (dest) > REGNO (src))
823 direction = -1;
825 else if (GET_CODE (mem_operand) == PRE_DEC
826 || (GET_CODE (mem_operand) == PLUS
827 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
828 direction = -1;
829 else if (GET_CODE (src) == MEM
830 && reg_overlap_mentioned_p (dest, src))
832 int regno;
834 gcc_assert (GET_CODE (dest) == REG);
835 regno = REGNO (dest);
837 gcc_assert (refers_to_regno_p (regno, regno + num_words,
838 mem_operand, 0));
840 if (refers_to_regno_p (regno, regno + 1, mem_operand, 0))
841 direction = -1;
842 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
843 mem_operand, 0))
844 direction = 1;
845 else
846 /* This means something like
847 (set (reg:DI r0) (mem:DI (reg:HI r1)))
848 which we'd need to support by doing the set of the second word
849 last. */
850 gcc_unreachable ();
853 end = direction < 0 ? -1 : num_words;
854 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
856 rtx w_src, w_dest, insn;
858 if (src_modifies)
859 w_src = gen_rtx_MEM (word_mode, mem_operand);
860 else
861 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
862 if (src_volatile)
863 MEM_VOLATILE_P (w_src) = 1;
864 if (dest_modifies)
865 w_dest = gen_rtx_MEM (word_mode, mem_operand);
866 else
867 w_dest = simplify_gen_subreg (word_mode, dest, mode,
868 i * UNITS_PER_WORD);
869 if (dest_volatile)
870 MEM_VOLATILE_P (w_dest) = 1;
872 /* The simplify_subreg calls must always be able to simplify. */
873 gcc_assert (GET_CODE (w_src) != SUBREG
874 && GET_CODE (w_dest) != SUBREG);
876 insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src));
877 if (auto_inc_reg_rtx)
878 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
879 auto_inc_reg_rtx,
880 REG_NOTES (insn));
884 /* Expander for the 'move' patterns. Emit insns to copy a value of
885 mode MODE from SRC to DEST. */
887 void
888 xstormy16_expand_move (enum machine_mode mode, rtx dest, rtx src)
890 if ((GET_CODE (dest) == MEM) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY))
892 rtx pmv = XEXP (dest, 0);
893 rtx dest_reg = XEXP (pmv, 0);
894 rtx dest_mod = XEXP (pmv, 1);
895 rtx set = gen_rtx_SET (Pmode, dest_reg, dest_mod);
896 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
898 dest = gen_rtx_MEM (mode, dest_reg);
899 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
901 else if ((GET_CODE (src) == MEM) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY))
903 rtx pmv = XEXP (src, 0);
904 rtx src_reg = XEXP (pmv, 0);
905 rtx src_mod = XEXP (pmv, 1);
906 rtx set = gen_rtx_SET (Pmode, src_reg, src_mod);
907 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
909 src = gen_rtx_MEM (mode, src_reg);
910 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
913 /* There are only limited immediate-to-memory move instructions. */
914 if (! reload_in_progress
915 && ! reload_completed
916 && GET_CODE (dest) == MEM
917 && (GET_CODE (XEXP (dest, 0)) != CONST_INT
918 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
919 && ! xstormy16_below100_operand (dest, mode)
920 && GET_CODE (src) != REG
921 && GET_CODE (src) != SUBREG)
922 src = copy_to_mode_reg (mode, src);
924 /* Don't emit something we would immediately split. */
925 if (reload_completed
926 && mode != HImode && mode != QImode)
928 xstormy16_split_move (mode, dest, src);
929 return;
932 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
935 /* Stack Layout:
937 The stack is laid out as follows:
939 SP->
940 FP-> Local variables
941 Register save area (up to 4 words)
942 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
944 AP-> Return address (two words)
945 9th procedure parameter word
946 10th procedure parameter word
948 last procedure parameter word
950 The frame pointer location is tuned to make it most likely that all
951 parameters and local variables can be accessed using a load-indexed
952 instruction. */
954 /* A structure to describe the layout. */
955 struct xstormy16_stack_layout
957 /* Size of the topmost three items on the stack. */
958 int locals_size;
959 int register_save_size;
960 int stdarg_save_size;
961 /* Sum of the above items. */
962 int frame_size;
963 /* Various offsets. */
964 int first_local_minus_ap;
965 int sp_minus_fp;
966 int fp_minus_ap;
969 /* Does REGNO need to be saved? */
970 #define REG_NEEDS_SAVE(REGNUM, IFUN) \
971 ((df_regs_ever_live_p (REGNUM) && ! call_used_regs[REGNUM]) \
972 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
973 && (REGNUM != CARRY_REGNUM) \
974 && (df_regs_ever_live_p (REGNUM) || ! current_function_is_leaf)))
976 /* Compute the stack layout. */
978 struct xstormy16_stack_layout
979 xstormy16_compute_stack_layout (void)
981 struct xstormy16_stack_layout layout;
982 int regno;
983 const int ifun = xstormy16_interrupt_function_p ();
985 layout.locals_size = get_frame_size ();
987 layout.register_save_size = 0;
988 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
989 if (REG_NEEDS_SAVE (regno, ifun))
990 layout.register_save_size += UNITS_PER_WORD;
992 if (cfun->stdarg)
993 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
994 else
995 layout.stdarg_save_size = 0;
997 layout.frame_size = (layout.locals_size
998 + layout.register_save_size
999 + layout.stdarg_save_size);
1001 if (crtl->args.size <= 2048 && crtl->args.size != -1)
1003 if (layout.frame_size - INCOMING_FRAME_SP_OFFSET
1004 + crtl->args.size <= 2048)
1005 layout.fp_minus_ap = layout.frame_size - INCOMING_FRAME_SP_OFFSET;
1006 else
1007 layout.fp_minus_ap = 2048 - crtl->args.size;
1009 else
1010 layout.fp_minus_ap = (layout.stdarg_save_size
1011 + layout.register_save_size
1012 - INCOMING_FRAME_SP_OFFSET);
1013 layout.sp_minus_fp = (layout.frame_size - INCOMING_FRAME_SP_OFFSET
1014 - layout.fp_minus_ap);
1015 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
1016 return layout;
1019 /* Worker function for TARGET_CAN_ELIMINATE. */
1021 static bool
1022 xstormy16_can_eliminate (const int from, const int to)
1024 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
1025 ? ! frame_pointer_needed
1026 : true);
1029 /* Determine how all the special registers get eliminated. */
1032 xstormy16_initial_elimination_offset (int from, int to)
1034 struct xstormy16_stack_layout layout;
1035 int result;
1037 layout = xstormy16_compute_stack_layout ();
1039 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1040 result = layout.sp_minus_fp - layout.locals_size;
1041 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1042 result = - layout.locals_size;
1043 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1044 result = - layout.fp_minus_ap;
1045 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1046 result = - (layout.sp_minus_fp + layout.fp_minus_ap);
1047 else
1048 gcc_unreachable ();
1050 return result;
1053 static rtx
1054 emit_addhi3_postreload (rtx dest, rtx src0, rtx src1)
1056 rtx set, clobber, insn;
1058 set = gen_rtx_SET (VOIDmode, dest, gen_rtx_PLUS (HImode, src0, src1));
1059 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
1060 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
1061 return insn;
1064 /* Called after register allocation to add any instructions needed for
1065 the prologue. Using a prologue insn is favored compared to putting
1066 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1067 since it allows the scheduler to intermix instructions with the
1068 saves of the caller saved registers. In some cases, it might be
1069 necessary to emit a barrier instruction as the last insn to prevent
1070 such scheduling.
1072 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1073 so that the debug info generation code can handle them properly. */
1075 void
1076 xstormy16_expand_prologue (void)
1078 struct xstormy16_stack_layout layout;
1079 int regno;
1080 rtx insn;
1081 rtx mem_push_rtx;
1082 const int ifun = xstormy16_interrupt_function_p ();
1084 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
1085 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
1087 layout = xstormy16_compute_stack_layout ();
1089 if (layout.locals_size >= 32768)
1090 error ("local variable memory requirements exceed capacity");
1092 /* Save the argument registers if necessary. */
1093 if (layout.stdarg_save_size)
1094 for (regno = FIRST_ARGUMENT_REGISTER;
1095 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
1096 regno++)
1098 rtx dwarf;
1099 rtx reg = gen_rtx_REG (HImode, regno);
1101 insn = emit_move_insn (mem_push_rtx, reg);
1102 RTX_FRAME_RELATED_P (insn) = 1;
1104 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1106 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1107 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1108 reg);
1109 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1110 plus_constant (stack_pointer_rtx,
1111 GET_MODE_SIZE (Pmode)));
1112 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1113 dwarf,
1114 REG_NOTES (insn));
1115 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1116 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1119 /* Push each of the registers to save. */
1120 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1121 if (REG_NEEDS_SAVE (regno, ifun))
1123 rtx dwarf;
1124 rtx reg = gen_rtx_REG (HImode, regno);
1126 insn = emit_move_insn (mem_push_rtx, reg);
1127 RTX_FRAME_RELATED_P (insn) = 1;
1129 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1131 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1132 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1133 reg);
1134 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1135 plus_constant (stack_pointer_rtx,
1136 GET_MODE_SIZE (Pmode)));
1137 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1138 dwarf,
1139 REG_NOTES (insn));
1140 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1141 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1144 /* It's just possible that the SP here might be what we need for
1145 the new FP... */
1146 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1148 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1149 RTX_FRAME_RELATED_P (insn) = 1;
1152 /* Allocate space for local variables. */
1153 if (layout.locals_size)
1155 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1156 GEN_INT (layout.locals_size));
1157 RTX_FRAME_RELATED_P (insn) = 1;
1160 /* Set up the frame pointer, if required. */
1161 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
1163 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1164 RTX_FRAME_RELATED_P (insn) = 1;
1166 if (layout.sp_minus_fp)
1168 insn = emit_addhi3_postreload (hard_frame_pointer_rtx,
1169 hard_frame_pointer_rtx,
1170 GEN_INT (- layout.sp_minus_fp));
1171 RTX_FRAME_RELATED_P (insn) = 1;
1176 /* Do we need an epilogue at all? */
1179 direct_return (void)
1181 return (reload_completed
1182 && xstormy16_compute_stack_layout ().frame_size == 0);
1185 /* Called after register allocation to add any instructions needed for
1186 the epilogue. Using an epilogue insn is favored compared to putting
1187 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1188 since it allows the scheduler to intermix instructions with the
1189 saves of the caller saved registers. In some cases, it might be
1190 necessary to emit a barrier instruction as the last insn to prevent
1191 such scheduling. */
1193 void
1194 xstormy16_expand_epilogue (void)
1196 struct xstormy16_stack_layout layout;
1197 rtx mem_pop_rtx, insn;
1198 int regno;
1199 const int ifun = xstormy16_interrupt_function_p ();
1201 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1202 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
1204 layout = xstormy16_compute_stack_layout ();
1206 /* Pop the stack for the locals. */
1207 if (layout.locals_size)
1209 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1210 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1211 else
1212 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1213 GEN_INT (- layout.locals_size));
1216 /* Restore any call-saved registers. */
1217 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1218 if (REG_NEEDS_SAVE (regno, ifun))
1219 emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
1221 /* Pop the stack for the stdarg save area. */
1222 if (layout.stdarg_save_size)
1223 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1224 GEN_INT (- layout.stdarg_save_size));
1226 /* Return. */
1227 if (ifun)
1228 emit_jump_insn (gen_return_internal_interrupt ());
1229 else
1230 emit_jump_insn (gen_return_internal ());
1234 xstormy16_epilogue_uses (int regno)
1236 if (reload_completed && call_used_regs[regno])
1238 const int ifun = xstormy16_interrupt_function_p ();
1239 return REG_NEEDS_SAVE (regno, ifun);
1241 return 0;
1244 void
1245 xstormy16_function_profiler (void)
1247 sorry ("function_profiler support");
1250 /* Return an updated summarizer variable CUM to advance past an
1251 argument in the argument list. The values MODE, TYPE and NAMED
1252 describe that argument. Once this is done, the variable CUM is
1253 suitable for analyzing the *following* argument with
1254 `FUNCTION_ARG', etc.
1256 This function need not do anything if the argument in question was
1257 passed on the stack. The compiler knows how to track the amount of
1258 stack space used for arguments without any special help. However,
1259 it makes life easier for xstormy16_build_va_list if it does update
1260 the word count. */
1262 CUMULATIVE_ARGS
1263 xstormy16_function_arg_advance (CUMULATIVE_ARGS cum, enum machine_mode mode,
1264 tree type, int named ATTRIBUTE_UNUSED)
1266 /* If an argument would otherwise be passed partially in registers,
1267 and partially on the stack, the whole of it is passed on the
1268 stack. */
1269 if (cum < NUM_ARGUMENT_REGISTERS
1270 && cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1271 cum = NUM_ARGUMENT_REGISTERS;
1273 cum += XSTORMY16_WORD_SIZE (type, mode);
1275 return cum;
1279 xstormy16_function_arg (CUMULATIVE_ARGS cum, enum machine_mode mode,
1280 tree type, int named ATTRIBUTE_UNUSED)
1282 if (mode == VOIDmode)
1283 return const0_rtx;
1284 if (targetm.calls.must_pass_in_stack (mode, type)
1285 || cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1286 return NULL_RTX;
1287 return gen_rtx_REG (mode, cum + 2);
1290 /* Build the va_list type.
1292 For this chip, va_list is a record containing a counter and a pointer.
1293 The counter is of type 'int' and indicates how many bytes
1294 have been used to date. The pointer indicates the stack position
1295 for arguments that have not been passed in registers.
1296 To keep the layout nice, the pointer is first in the structure. */
1298 static tree
1299 xstormy16_build_builtin_va_list (void)
1301 tree f_1, f_2, record, type_decl;
1303 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
1304 type_decl = build_decl (BUILTINS_LOCATION,
1305 TYPE_DECL, get_identifier ("__va_list_tag"), record);
1307 f_1 = build_decl (BUILTINS_LOCATION,
1308 FIELD_DECL, get_identifier ("base"),
1309 ptr_type_node);
1310 f_2 = build_decl (BUILTINS_LOCATION,
1311 FIELD_DECL, get_identifier ("count"),
1312 unsigned_type_node);
1314 DECL_FIELD_CONTEXT (f_1) = record;
1315 DECL_FIELD_CONTEXT (f_2) = record;
1317 TREE_CHAIN (record) = type_decl;
1318 TYPE_NAME (record) = type_decl;
1319 TYPE_FIELDS (record) = f_1;
1320 TREE_CHAIN (f_1) = f_2;
1322 layout_type (record);
1324 return record;
1327 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this
1328 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1329 variable to initialize. NEXTARG is the machine independent notion of the
1330 'next' argument after the variable arguments. */
1332 static void
1333 xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
1335 tree f_base, f_count;
1336 tree base, count;
1337 tree t,u;
1339 if (xstormy16_interrupt_function_p ())
1340 error ("cannot use va_start in interrupt function");
1342 f_base = TYPE_FIELDS (va_list_type_node);
1343 f_count = TREE_CHAIN (f_base);
1345 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1346 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1347 NULL_TREE);
1349 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
1350 u = build_int_cst (NULL_TREE, - INCOMING_FRAME_SP_OFFSET);
1351 u = fold_convert (TREE_TYPE (count), u);
1352 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (base), t, u);
1353 t = build2 (MODIFY_EXPR, TREE_TYPE (base), base, t);
1354 TREE_SIDE_EFFECTS (t) = 1;
1355 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1357 t = build2 (MODIFY_EXPR, TREE_TYPE (count), count,
1358 build_int_cst (NULL_TREE,
1359 crtl->args.info * UNITS_PER_WORD));
1360 TREE_SIDE_EFFECTS (t) = 1;
1361 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1364 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable
1365 of type va_list as a tree, TYPE is the type passed to va_arg.
1366 Note: This algorithm is documented in stormy-abi. */
1368 static tree
1369 xstormy16_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
1370 gimple_seq *post_p ATTRIBUTE_UNUSED)
1372 tree f_base, f_count;
1373 tree base, count;
1374 tree count_tmp, addr, t;
1375 tree lab_gotaddr, lab_fromstack;
1376 int size, size_of_reg_args, must_stack;
1377 tree size_tree;
1379 f_base = TYPE_FIELDS (va_list_type_node);
1380 f_count = TREE_CHAIN (f_base);
1382 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1383 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1384 NULL_TREE);
1386 must_stack = targetm.calls.must_pass_in_stack (TYPE_MODE (type), type);
1387 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
1388 gimplify_expr (&size_tree, pre_p, NULL, is_gimple_val, fb_rvalue);
1390 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1392 count_tmp = get_initialized_tmp_var (count, pre_p, NULL);
1393 lab_gotaddr = create_artificial_label (UNKNOWN_LOCATION);
1394 lab_fromstack = create_artificial_label (UNKNOWN_LOCATION);
1395 addr = create_tmp_var (ptr_type_node, NULL);
1397 if (!must_stack)
1399 tree r;
1401 t = fold_convert (TREE_TYPE (count), size_tree);
1402 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1403 r = fold_convert (TREE_TYPE (count), size_int (size_of_reg_args));
1404 t = build2 (GT_EXPR, boolean_type_node, t, r);
1405 t = build3 (COND_EXPR, void_type_node, t,
1406 build1 (GOTO_EXPR, void_type_node, lab_fromstack),
1407 NULL_TREE);
1408 gimplify_and_add (t, pre_p);
1410 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, base, count_tmp);
1411 gimplify_assign (addr, t, pre_p);
1413 t = build1 (GOTO_EXPR, void_type_node, lab_gotaddr);
1414 gimplify_and_add (t, pre_p);
1416 t = build1 (LABEL_EXPR, void_type_node, lab_fromstack);
1417 gimplify_and_add (t, pre_p);
1420 /* Arguments larger than a word might need to skip over some
1421 registers, since arguments are either passed entirely in
1422 registers or entirely on the stack. */
1423 size = PUSH_ROUNDING (int_size_in_bytes (type));
1424 if (size > 2 || size < 0 || must_stack)
1426 tree r, u;
1428 r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD);
1429 u = build2 (MODIFY_EXPR, TREE_TYPE (count_tmp), count_tmp, r);
1431 t = fold_convert (TREE_TYPE (count), r);
1432 t = build2 (GE_EXPR, boolean_type_node, count_tmp, t);
1433 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, u);
1434 gimplify_and_add (t, pre_p);
1437 t = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
1438 + INCOMING_FRAME_SP_OFFSET);
1439 t = fold_convert (TREE_TYPE (count), t);
1440 t = build2 (MINUS_EXPR, TREE_TYPE (count), count_tmp, t);
1441 t = build2 (PLUS_EXPR, TREE_TYPE (count), t,
1442 fold_convert (TREE_TYPE (count), size_tree));
1443 t = fold_convert (TREE_TYPE (t), fold (t));
1444 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1445 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (base), base, t);
1446 gimplify_assign (addr, t, pre_p);
1448 t = build1 (LABEL_EXPR, void_type_node, lab_gotaddr);
1449 gimplify_and_add (t, pre_p);
1451 t = fold_convert (TREE_TYPE (count), size_tree);
1452 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1453 gimplify_assign (count, t, pre_p);
1455 addr = fold_convert (build_pointer_type (type), addr);
1456 return build_va_arg_indirect_ref (addr);
1459 /* Worker function for TARGET_TRAMPOLINE_INIT. */
1461 static void
1462 xstormy16_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
1464 rtx temp = gen_reg_rtx (HImode);
1465 rtx reg_fnaddr = gen_reg_rtx (HImode);
1466 rtx reg_addr, reg_addr_mem;
1468 reg_addr = copy_to_reg (XEXP (m_tramp, 0));
1469 reg_addr_mem = adjust_automodify_address (m_tramp, HImode, reg_addr, 0);
1471 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1472 emit_move_insn (reg_addr_mem, temp);
1473 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1474 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1476 emit_move_insn (temp, static_chain);
1477 emit_move_insn (reg_addr_mem, temp);
1478 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1479 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1481 emit_move_insn (reg_fnaddr, XEXP (DECL_RTL (fndecl), 0));
1482 emit_move_insn (temp, reg_fnaddr);
1483 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1484 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1485 emit_move_insn (reg_addr_mem, temp);
1486 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1487 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1489 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1490 emit_move_insn (reg_addr_mem, reg_fnaddr);
1493 /* Worker function for FUNCTION_VALUE. */
1496 xstormy16_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED)
1498 enum machine_mode mode;
1499 mode = TYPE_MODE (valtype);
1500 PROMOTE_MODE (mode, 0, valtype);
1501 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1504 /* A C compound statement that outputs the assembler code for a thunk function,
1505 used to implement C++ virtual function calls with multiple inheritance. The
1506 thunk acts as a wrapper around a virtual function, adjusting the implicit
1507 object parameter before handing control off to the real function.
1509 First, emit code to add the integer DELTA to the location that contains the
1510 incoming first argument. Assume that this argument contains a pointer, and
1511 is the one used to pass the `this' pointer in C++. This is the incoming
1512 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1513 addition must preserve the values of all other incoming arguments.
1515 After the addition, emit code to jump to FUNCTION, which is a
1516 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1517 the return address. Hence returning from FUNCTION will return to whoever
1518 called the current `thunk'.
1520 The effect must be as if @var{function} had been called directly
1521 with the adjusted first argument. This macro is responsible for
1522 emitting all of the code for a thunk function;
1523 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1524 not invoked.
1526 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1527 extracted from it.) It might possibly be useful on some targets, but
1528 probably not. */
1530 static void
1531 xstormy16_asm_output_mi_thunk (FILE *file,
1532 tree thunk_fndecl ATTRIBUTE_UNUSED,
1533 HOST_WIDE_INT delta,
1534 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1535 tree function)
1537 int regnum = FIRST_ARGUMENT_REGISTER;
1539 /* There might be a hidden first argument for a returned structure. */
1540 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
1541 regnum += 1;
1543 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF);
1544 fputs ("\tjmpf ", file);
1545 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1546 putc ('\n', file);
1549 /* The purpose of this function is to override the default behavior of
1550 BSS objects. Normally, they go into .bss or .sbss via ".common"
1551 directives, but we need to override that and put them in
1552 .bss_below100. We can't just use a section override (like we do
1553 for .data_below100), because that makes them initialized rather
1554 than uninitialized. */
1556 void
1557 xstormy16_asm_output_aligned_common (FILE *stream,
1558 tree decl,
1559 const char *name,
1560 int size,
1561 int align,
1562 int global)
1564 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
1565 rtx symbol;
1567 if (mem != NULL_RTX
1568 && GET_CODE (mem) == MEM
1569 && GET_CODE (symbol = XEXP (mem, 0)) == SYMBOL_REF
1570 && SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_XSTORMY16_BELOW100)
1572 const char *name2;
1573 int p2align = 0;
1575 switch_to_section (bss100_section);
1577 while (align > 8)
1579 align /= 2;
1580 p2align ++;
1583 name2 = default_strip_name_encoding (name);
1584 if (global)
1585 fprintf (stream, "\t.globl\t%s\n", name2);
1586 if (p2align)
1587 fprintf (stream, "\t.p2align %d\n", p2align);
1588 fprintf (stream, "\t.type\t%s, @object\n", name2);
1589 fprintf (stream, "\t.size\t%s, %d\n", name2, size);
1590 fprintf (stream, "%s:\n\t.space\t%d\n", name2, size);
1591 return;
1594 if (!global)
1596 fprintf (stream, "\t.local\t");
1597 assemble_name (stream, name);
1598 fprintf (stream, "\n");
1600 fprintf (stream, "\t.comm\t");
1601 assemble_name (stream, name);
1602 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
1605 /* Implement TARGET_ASM_INIT_SECTIONS. */
1607 static void
1608 xstormy16_asm_init_sections (void)
1610 bss100_section
1611 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
1612 output_section_asm_op,
1613 "\t.section \".bss_below100\",\"aw\",@nobits");
1616 /* Mark symbols with the "below100" attribute so that we can use the
1617 special addressing modes for them. */
1619 static void
1620 xstormy16_encode_section_info (tree decl, rtx r, int first)
1622 default_encode_section_info (decl, r, first);
1624 if (TREE_CODE (decl) == VAR_DECL
1625 && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl))
1626 || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl))))
1628 rtx symbol = XEXP (r, 0);
1630 gcc_assert (GET_CODE (symbol) == SYMBOL_REF);
1631 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_XSTORMY16_BELOW100;
1635 #undef TARGET_ASM_CONSTRUCTOR
1636 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1637 #undef TARGET_ASM_DESTRUCTOR
1638 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1640 /* Output constructors and destructors. Just like
1641 default_named_section_asm_out_* but don't set the sections writable. */
1643 static void
1644 xstormy16_asm_out_destructor (rtx symbol, int priority)
1646 const char *section = ".dtors";
1647 char buf[16];
1649 /* ??? This only works reliably with the GNU linker. */
1650 if (priority != DEFAULT_INIT_PRIORITY)
1652 sprintf (buf, ".dtors.%.5u",
1653 /* Invert the numbering so the linker puts us in the proper
1654 order; constructors are run from right to left, and the
1655 linker sorts in increasing order. */
1656 MAX_INIT_PRIORITY - priority);
1657 section = buf;
1660 switch_to_section (get_section (section, 0, NULL));
1661 assemble_align (POINTER_SIZE);
1662 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1665 static void
1666 xstormy16_asm_out_constructor (rtx symbol, int priority)
1668 const char *section = ".ctors";
1669 char buf[16];
1671 /* ??? This only works reliably with the GNU linker. */
1672 if (priority != DEFAULT_INIT_PRIORITY)
1674 sprintf (buf, ".ctors.%.5u",
1675 /* Invert the numbering so the linker puts us in the proper
1676 order; constructors are run from right to left, and the
1677 linker sorts in increasing order. */
1678 MAX_INIT_PRIORITY - priority);
1679 section = buf;
1682 switch_to_section (get_section (section, 0, NULL));
1683 assemble_align (POINTER_SIZE);
1684 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1687 /* Print a memory address as an operand to reference that memory location. */
1689 void
1690 xstormy16_print_operand_address (FILE *file, rtx address)
1692 HOST_WIDE_INT offset;
1693 int pre_dec, post_inc;
1695 /* There are a few easy cases. */
1696 if (GET_CODE (address) == CONST_INT)
1698 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1699 return;
1702 if (CONSTANT_P (address) || GET_CODE (address) == CODE_LABEL)
1704 output_addr_const (file, address);
1705 return;
1708 /* Otherwise, it's hopefully something of the form
1709 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...)). */
1710 if (GET_CODE (address) == PLUS)
1712 gcc_assert (GET_CODE (XEXP (address, 1)) == CONST_INT);
1713 offset = INTVAL (XEXP (address, 1));
1714 address = XEXP (address, 0);
1716 else
1717 offset = 0;
1719 pre_dec = (GET_CODE (address) == PRE_DEC);
1720 post_inc = (GET_CODE (address) == POST_INC);
1721 if (pre_dec || post_inc)
1722 address = XEXP (address, 0);
1724 gcc_assert (GET_CODE (address) == REG);
1726 fputc ('(', file);
1727 if (pre_dec)
1728 fputs ("--", file);
1729 fputs (reg_names [REGNO (address)], file);
1730 if (post_inc)
1731 fputs ("++", file);
1732 if (offset != 0)
1733 fprintf (file, "," HOST_WIDE_INT_PRINT_DEC, offset);
1734 fputc (')', file);
1737 /* Print an operand to an assembler instruction. */
1739 void
1740 xstormy16_print_operand (FILE *file, rtx x, int code)
1742 switch (code)
1744 case 'B':
1745 /* There is either one bit set, or one bit clear, in X.
1746 Print it preceded by '#'. */
1748 static int bits_set[8] = { 0, 1, 1, 2, 1, 2, 2, 3 };
1749 HOST_WIDE_INT xx = 1;
1750 HOST_WIDE_INT l;
1752 if (GET_CODE (x) == CONST_INT)
1753 xx = INTVAL (x);
1754 else
1755 output_operand_lossage ("'B' operand is not constant");
1757 /* GCC sign-extends masks with the MSB set, so we have to
1758 detect all the cases that differ only in sign extension
1759 beyond the bits we care about. Normally, the predicates
1760 and constraints ensure that we have the right values. This
1761 works correctly for valid masks. */
1762 if (bits_set[xx & 7] <= 1)
1764 /* Remove sign extension bits. */
1765 if ((~xx & ~(HOST_WIDE_INT)0xff) == 0)
1766 xx &= 0xff;
1767 else if ((~xx & ~(HOST_WIDE_INT)0xffff) == 0)
1768 xx &= 0xffff;
1769 l = exact_log2 (xx);
1771 else
1773 /* Add sign extension bits. */
1774 if ((xx & ~(HOST_WIDE_INT)0xff) == 0)
1775 xx |= ~(HOST_WIDE_INT)0xff;
1776 else if ((xx & ~(HOST_WIDE_INT)0xffff) == 0)
1777 xx |= ~(HOST_WIDE_INT)0xffff;
1778 l = exact_log2 (~xx);
1781 if (l == -1)
1782 output_operand_lossage ("'B' operand has multiple bits set");
1784 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l);
1785 return;
1788 case 'C':
1789 /* Print the symbol without a surrounding @fptr(). */
1790 if (GET_CODE (x) == SYMBOL_REF)
1791 assemble_name (file, XSTR (x, 0));
1792 else if (GET_CODE (x) == LABEL_REF)
1793 output_asm_label (x);
1794 else
1795 xstormy16_print_operand_address (file, x);
1796 return;
1798 case 'o':
1799 case 'O':
1800 /* Print the immediate operand less one, preceded by '#'.
1801 For 'O', negate it first. */
1803 HOST_WIDE_INT xx = 0;
1805 if (GET_CODE (x) == CONST_INT)
1806 xx = INTVAL (x);
1807 else
1808 output_operand_lossage ("'o' operand is not constant");
1810 if (code == 'O')
1811 xx = -xx;
1813 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, xx - 1);
1814 return;
1817 case 'b':
1818 /* Print the shift mask for bp/bn. */
1820 HOST_WIDE_INT xx = 1;
1821 HOST_WIDE_INT l;
1823 if (GET_CODE (x) == CONST_INT)
1824 xx = INTVAL (x);
1825 else
1826 output_operand_lossage ("'B' operand is not constant");
1828 l = 7 - xx;
1830 fputs (IMMEDIATE_PREFIX, file);
1831 fprintf (file, HOST_WIDE_INT_PRINT_DEC, l);
1832 return;
1835 case 0:
1836 /* Handled below. */
1837 break;
1839 default:
1840 output_operand_lossage ("xstormy16_print_operand: unknown code");
1841 return;
1844 switch (GET_CODE (x))
1846 case REG:
1847 fputs (reg_names [REGNO (x)], file);
1848 break;
1850 case MEM:
1851 xstormy16_print_operand_address (file, XEXP (x, 0));
1852 break;
1854 default:
1855 /* Some kind of constant or label; an immediate operand,
1856 so prefix it with '#' for the assembler. */
1857 fputs (IMMEDIATE_PREFIX, file);
1858 output_addr_const (file, x);
1859 break;
1862 return;
1865 /* Expander for the `casesi' pattern.
1866 INDEX is the index of the switch statement.
1867 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1868 to the first table entry.
1869 RANGE is the number of table entries.
1870 TABLE is an ADDR_VEC that is the jump table.
1871 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1872 range LOWER_BOUND to LOWER_BOUND + RANGE - 1. */
1874 void
1875 xstormy16_expand_casesi (rtx index, rtx lower_bound, rtx range,
1876 rtx table, rtx default_label)
1878 HOST_WIDE_INT range_i = INTVAL (range);
1879 rtx int_index;
1881 /* This code uses 'br', so it can deal only with tables of size up to
1882 8192 entries. */
1883 if (range_i >= 8192)
1884 sorry ("switch statement of size %lu entries too large",
1885 (unsigned long) range_i);
1887 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
1888 OPTAB_LIB_WIDEN);
1889 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
1890 default_label);
1891 int_index = gen_lowpart_common (HImode, index);
1892 emit_insn (gen_ashlhi3 (int_index, int_index, const2_rtx));
1893 emit_jump_insn (gen_tablejump_pcrel (int_index, table));
1896 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1897 instructions, without label or alignment or any other special
1898 constructs. We know that the previous instruction will be the
1899 `tablejump_pcrel' output above.
1901 TODO: it might be nice to output 'br' instructions if they could
1902 all reach. */
1904 void
1905 xstormy16_output_addr_vec (FILE *file, rtx label ATTRIBUTE_UNUSED, rtx table)
1907 int vlen, idx;
1909 switch_to_section (current_function_section ());
1911 vlen = XVECLEN (table, 0);
1912 for (idx = 0; idx < vlen; idx++)
1914 fputs ("\tjmpf ", file);
1915 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
1916 fputc ('\n', file);
1920 /* Expander for the `call' patterns.
1921 INDEX is the index of the switch statement.
1922 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1923 to the first table entry.
1924 RANGE is the number of table entries.
1925 TABLE is an ADDR_VEC that is the jump table.
1926 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1927 range LOWER_BOUND to LOWER_BOUND + RANGE - 1. */
1929 void
1930 xstormy16_expand_call (rtx retval, rtx dest, rtx counter)
1932 rtx call, temp;
1933 enum machine_mode mode;
1935 gcc_assert (GET_CODE (dest) == MEM);
1936 dest = XEXP (dest, 0);
1938 if (! CONSTANT_P (dest)
1939 && GET_CODE (dest) != REG)
1940 dest = force_reg (Pmode, dest);
1942 if (retval == NULL)
1943 mode = VOIDmode;
1944 else
1945 mode = GET_MODE (retval);
1947 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
1948 counter);
1949 if (retval)
1950 call = gen_rtx_SET (VOIDmode, retval, call);
1952 if (! CONSTANT_P (dest))
1954 temp = gen_reg_rtx (HImode);
1955 emit_move_insn (temp, const0_rtx);
1957 else
1958 temp = const0_rtx;
1960 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
1961 gen_rtx_USE (VOIDmode, temp)));
1962 emit_call_insn (call);
1965 /* Expanders for multiword computational operations. */
1967 /* Expander for arithmetic operations; emit insns to compute
1969 (set DEST (CODE:MODE SRC0 SRC1))
1971 When CODE is COMPARE, a branch template is generated
1972 (this saves duplicating code in xstormy16_split_cbranch). */
1974 void
1975 xstormy16_expand_arith (enum machine_mode mode, enum rtx_code code,
1976 rtx dest, rtx src0, rtx src1)
1978 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
1979 int i;
1980 int firstloop = 1;
1982 if (code == NEG)
1983 emit_move_insn (src0, const0_rtx);
1985 for (i = 0; i < num_words; i++)
1987 rtx w_src0, w_src1, w_dest;
1988 rtx insn;
1990 w_src0 = simplify_gen_subreg (word_mode, src0, mode,
1991 i * UNITS_PER_WORD);
1992 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
1993 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
1995 switch (code)
1997 case PLUS:
1998 if (firstloop
1999 && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
2000 continue;
2002 if (firstloop)
2003 insn = gen_addchi4 (w_dest, w_src0, w_src1);
2004 else
2005 insn = gen_addchi5 (w_dest, w_src0, w_src1);
2006 break;
2008 case NEG:
2009 case MINUS:
2010 case COMPARE:
2011 if (code == COMPARE && i == num_words - 1)
2013 rtx branch, sub, clobber, sub_1;
2015 sub_1 = gen_rtx_MINUS (HImode, w_src0,
2016 gen_rtx_ZERO_EXTEND (HImode, gen_rtx_REG (BImode, CARRY_REGNUM)));
2017 sub = gen_rtx_SET (VOIDmode, w_dest,
2018 gen_rtx_MINUS (HImode, sub_1, w_src1));
2019 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
2020 branch = gen_rtx_SET (VOIDmode, pc_rtx,
2021 gen_rtx_IF_THEN_ELSE (VOIDmode,
2022 gen_rtx_EQ (HImode,
2023 sub_1,
2024 w_src1),
2025 pc_rtx,
2026 pc_rtx));
2027 insn = gen_rtx_PARALLEL (VOIDmode,
2028 gen_rtvec (3, branch, sub, clobber));
2030 else if (firstloop
2031 && code != COMPARE
2032 && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
2033 continue;
2034 else if (firstloop)
2035 insn = gen_subchi4 (w_dest, w_src0, w_src1);
2036 else
2037 insn = gen_subchi5 (w_dest, w_src0, w_src1);
2038 break;
2040 case IOR:
2041 case XOR:
2042 case AND:
2043 if (GET_CODE (w_src1) == CONST_INT
2044 && INTVAL (w_src1) == -(code == AND))
2045 continue;
2047 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_fmt_ee (code, mode,
2048 w_src0, w_src1));
2049 break;
2051 case NOT:
2052 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_NOT (mode, w_src0));
2053 break;
2055 default:
2056 gcc_unreachable ();
2059 firstloop = 0;
2060 emit (insn);
2063 /* If we emit nothing, try_split() will think we failed. So emit
2064 something that does nothing and can be optimized away. */
2065 if (firstloop)
2066 emit (gen_nop ());
2069 /* The shift operations are split at output time for constant values;
2070 variable-width shifts get handed off to a library routine.
2072 Generate an output string to do (set X (CODE:MODE X SIZE_R))
2073 SIZE_R will be a CONST_INT, X will be a hard register. */
2075 const char *
2076 xstormy16_output_shift (enum machine_mode mode, enum rtx_code code,
2077 rtx x, rtx size_r, rtx temp)
2079 HOST_WIDE_INT size;
2080 const char *r0, *r1, *rt;
2081 static char r[64];
2083 gcc_assert (GET_CODE (size_r) == CONST_INT
2084 && GET_CODE (x) == REG && mode == SImode);
2085 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
2087 if (size == 0)
2088 return "";
2090 r0 = reg_names [REGNO (x)];
2091 r1 = reg_names [REGNO (x) + 1];
2093 /* For shifts of size 1, we can use the rotate instructions. */
2094 if (size == 1)
2096 switch (code)
2098 case ASHIFT:
2099 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
2100 break;
2101 case ASHIFTRT:
2102 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
2103 break;
2104 case LSHIFTRT:
2105 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
2106 break;
2107 default:
2108 gcc_unreachable ();
2110 return r;
2113 /* For large shifts, there are easy special cases. */
2114 if (size == 16)
2116 switch (code)
2118 case ASHIFT:
2119 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
2120 break;
2121 case ASHIFTRT:
2122 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
2123 break;
2124 case LSHIFTRT:
2125 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
2126 break;
2127 default:
2128 gcc_unreachable ();
2130 return r;
2132 if (size > 16)
2134 switch (code)
2136 case ASHIFT:
2137 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
2138 r1, r0, r0, r1, (int) size - 16);
2139 break;
2140 case ASHIFTRT:
2141 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
2142 r0, r1, r1, r0, (int) size - 16);
2143 break;
2144 case LSHIFTRT:
2145 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
2146 r0, r1, r1, r0, (int) size - 16);
2147 break;
2148 default:
2149 gcc_unreachable ();
2151 return r;
2154 /* For the rest, we have to do more work. In particular, we
2155 need a temporary. */
2156 rt = reg_names [REGNO (temp)];
2157 switch (code)
2159 case ASHIFT:
2160 sprintf (r,
2161 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2162 rt, r0, r0, (int) size, r1, (int) size, rt, (int) (16 - size),
2163 r1, rt);
2164 break;
2165 case ASHIFTRT:
2166 sprintf (r,
2167 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2168 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2169 r0, rt);
2170 break;
2171 case LSHIFTRT:
2172 sprintf (r,
2173 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2174 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2175 r0, rt);
2176 break;
2177 default:
2178 gcc_unreachable ();
2180 return r;
2183 /* Attribute handling. */
2185 /* Return nonzero if the function is an interrupt function. */
2188 xstormy16_interrupt_function_p (void)
2190 tree attributes;
2192 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2193 any functions are declared, which is demonstrably wrong, but
2194 it is worked around here. FIXME. */
2195 if (!cfun)
2196 return 0;
2198 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
2199 return lookup_attribute ("interrupt", attributes) != NULL_TREE;
2202 #undef TARGET_ATTRIBUTE_TABLE
2203 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2205 static tree xstormy16_handle_interrupt_attribute
2206 (tree *, tree, tree, int, bool *);
2207 static tree xstormy16_handle_below100_attribute
2208 (tree *, tree, tree, int, bool *);
2210 static const struct attribute_spec xstormy16_attribute_table[] =
2212 /* name, min_len, max_len, decl_req, type_req, fn_type_req, handler. */
2213 { "interrupt", 0, 0, false, true, true, xstormy16_handle_interrupt_attribute },
2214 { "BELOW100", 0, 0, false, false, false, xstormy16_handle_below100_attribute },
2215 { "below100", 0, 0, false, false, false, xstormy16_handle_below100_attribute },
2216 { NULL, 0, 0, false, false, false, NULL }
2219 /* Handle an "interrupt" attribute;
2220 arguments as in struct attribute_spec.handler. */
2222 static tree
2223 xstormy16_handle_interrupt_attribute (tree *node, tree name,
2224 tree args ATTRIBUTE_UNUSED,
2225 int flags ATTRIBUTE_UNUSED,
2226 bool *no_add_attrs)
2228 if (TREE_CODE (*node) != FUNCTION_TYPE)
2230 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2231 name);
2232 *no_add_attrs = true;
2235 return NULL_TREE;
2238 /* Handle an "below" attribute;
2239 arguments as in struct attribute_spec.handler. */
2241 static tree
2242 xstormy16_handle_below100_attribute (tree *node,
2243 tree name ATTRIBUTE_UNUSED,
2244 tree args ATTRIBUTE_UNUSED,
2245 int flags ATTRIBUTE_UNUSED,
2246 bool *no_add_attrs)
2248 if (TREE_CODE (*node) != VAR_DECL
2249 && TREE_CODE (*node) != POINTER_TYPE
2250 && TREE_CODE (*node) != TYPE_DECL)
2252 warning (OPT_Wattributes,
2253 "%<__BELOW100__%> attribute only applies to variables");
2254 *no_add_attrs = true;
2256 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
2258 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
2260 warning (OPT_Wattributes, "__BELOW100__ attribute not allowed "
2261 "with auto storage class");
2262 *no_add_attrs = true;
2266 return NULL_TREE;
2269 #undef TARGET_INIT_BUILTINS
2270 #define TARGET_INIT_BUILTINS xstormy16_init_builtins
2271 #undef TARGET_EXPAND_BUILTIN
2272 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2274 static struct
2276 const char * name;
2277 int md_code;
2278 const char * arg_ops; /* 0..9, t for temp register, r for return value. */
2279 const char * arg_types; /* s=short,l=long, upper case for unsigned. */
2281 s16builtins[] =
2283 { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" },
2284 { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" },
2285 { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" },
2286 { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" },
2287 { NULL, 0, NULL, NULL }
2290 static void
2291 xstormy16_init_builtins (void)
2293 tree args, ret_type, arg;
2294 int i, a;
2296 ret_type = void_type_node;
2298 for (i = 0; s16builtins[i].name; i++)
2300 args = void_list_node;
2301 for (a = strlen (s16builtins[i].arg_types) - 1; a >= 0; a--)
2303 switch (s16builtins[i].arg_types[a])
2305 case 's': arg = short_integer_type_node; break;
2306 case 'S': arg = short_unsigned_type_node; break;
2307 case 'l': arg = long_integer_type_node; break;
2308 case 'L': arg = long_unsigned_type_node; break;
2309 default: gcc_unreachable ();
2311 if (a == 0)
2312 ret_type = arg;
2313 else
2314 args = tree_cons (NULL_TREE, arg, args);
2316 add_builtin_function (s16builtins[i].name,
2317 build_function_type (ret_type, args),
2318 i, BUILT_IN_MD, NULL, NULL);
2322 static rtx
2323 xstormy16_expand_builtin (tree exp, rtx target,
2324 rtx subtarget ATTRIBUTE_UNUSED,
2325 enum machine_mode mode ATTRIBUTE_UNUSED,
2326 int ignore ATTRIBUTE_UNUSED)
2328 rtx op[10], args[10], pat, copyto[10], retval = 0;
2329 tree fndecl, argtree;
2330 int i, a, o, code;
2332 fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2333 argtree = TREE_OPERAND (exp, 1);
2334 i = DECL_FUNCTION_CODE (fndecl);
2335 code = s16builtins[i].md_code;
2337 for (a = 0; a < 10 && argtree; a++)
2339 args[a] = expand_expr (TREE_VALUE (argtree), NULL_RTX, VOIDmode, 0);
2340 argtree = TREE_CHAIN (argtree);
2343 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2345 char ao = s16builtins[i].arg_ops[o];
2346 char c = insn_data[code].operand[o].constraint[0];
2347 int omode;
2349 copyto[o] = 0;
2351 omode = insn_data[code].operand[o].mode;
2352 if (ao == 'r')
2353 op[o] = target ? target : gen_reg_rtx (omode);
2354 else if (ao == 't')
2355 op[o] = gen_reg_rtx (omode);
2356 else
2357 op[o] = args[(int) hex_value (ao)];
2359 if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o])))
2361 if (c == '+' || c == '=')
2363 copyto[o] = op[o];
2364 op[o] = gen_reg_rtx (omode);
2366 else
2367 op[o] = copy_to_mode_reg (omode, op[o]);
2370 if (ao == 'r')
2371 retval = op[o];
2374 pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4],
2375 op[5], op[6], op[7], op[8], op[9]);
2376 emit_insn (pat);
2378 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2379 if (copyto[o])
2381 emit_move_insn (copyto[o], op[o]);
2382 if (op[o] == retval)
2383 retval = copyto[o];
2386 return retval;
2389 /* Look for combinations of insns that can be converted to BN or BP
2390 opcodes. This is, unfortunately, too complex to do with MD
2391 patterns. */
2393 static void
2394 combine_bnp (rtx insn)
2396 int insn_code, regno, need_extend;
2397 unsigned int mask;
2398 rtx cond, reg, and, load, qireg, mem;
2399 enum machine_mode load_mode = QImode;
2400 enum machine_mode and_mode = QImode;
2401 rtx shift = NULL_RTX;
2403 insn_code = recog_memoized (insn);
2404 if (insn_code != CODE_FOR_cbranchhi
2405 && insn_code != CODE_FOR_cbranchhi_neg)
2406 return;
2408 cond = XVECEXP (PATTERN (insn), 0, 0); /* set */
2409 cond = XEXP (cond, 1); /* if */
2410 cond = XEXP (cond, 0); /* cond */
2411 switch (GET_CODE (cond))
2413 case NE:
2414 case EQ:
2415 need_extend = 0;
2416 break;
2417 case LT:
2418 case GE:
2419 need_extend = 1;
2420 break;
2421 default:
2422 return;
2425 reg = XEXP (cond, 0);
2426 if (GET_CODE (reg) != REG)
2427 return;
2428 regno = REGNO (reg);
2429 if (XEXP (cond, 1) != const0_rtx)
2430 return;
2431 if (! find_regno_note (insn, REG_DEAD, regno))
2432 return;
2433 qireg = gen_rtx_REG (QImode, regno);
2435 if (need_extend)
2437 /* LT and GE conditionals should have a sign extend before
2438 them. */
2439 for (and = prev_real_insn (insn); and; and = prev_real_insn (and))
2441 int and_code = recog_memoized (and);
2443 if (and_code == CODE_FOR_extendqihi2
2444 && rtx_equal_p (SET_DEST (PATTERN (and)), reg)
2445 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and)), 0), qireg))
2446 break;
2448 if (and_code == CODE_FOR_movhi_internal
2449 && rtx_equal_p (SET_DEST (PATTERN (and)), reg))
2451 /* This is for testing bit 15. */
2452 and = insn;
2453 break;
2456 if (reg_mentioned_p (reg, and))
2457 return;
2459 if (GET_CODE (and) != NOTE
2460 && GET_CODE (and) != INSN)
2461 return;
2464 else
2466 /* EQ and NE conditionals have an AND before them. */
2467 for (and = prev_real_insn (insn); and; and = prev_real_insn (and))
2469 if (recog_memoized (and) == CODE_FOR_andhi3
2470 && rtx_equal_p (SET_DEST (PATTERN (and)), reg)
2471 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and)), 0), reg))
2472 break;
2474 if (reg_mentioned_p (reg, and))
2475 return;
2477 if (GET_CODE (and) != NOTE
2478 && GET_CODE (and) != INSN)
2479 return;
2482 if (and)
2484 /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
2485 followed by an AND like this:
2487 (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
2488 (clobber (reg:BI carry))]
2490 (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
2492 Attempt to detect this here. */
2493 for (shift = prev_real_insn (and); shift; shift = prev_real_insn (shift))
2495 if (recog_memoized (shift) == CODE_FOR_lshrhi3
2496 && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift), 0, 0)), reg)
2497 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 0), reg))
2498 break;
2500 if (reg_mentioned_p (reg, shift)
2501 || (GET_CODE (shift) != NOTE
2502 && GET_CODE (shift) != INSN))
2504 shift = NULL_RTX;
2505 break;
2510 if (!and)
2511 return;
2513 for (load = shift ? prev_real_insn (shift) : prev_real_insn (and);
2514 load;
2515 load = prev_real_insn (load))
2517 int load_code = recog_memoized (load);
2519 if (load_code == CODE_FOR_movhi_internal
2520 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2521 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), HImode)
2522 && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load))))
2524 load_mode = HImode;
2525 break;
2528 if (load_code == CODE_FOR_movqi_internal
2529 && rtx_equal_p (SET_DEST (PATTERN (load)), qireg)
2530 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), QImode))
2532 load_mode = QImode;
2533 break;
2536 if (load_code == CODE_FOR_zero_extendqihi2
2537 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2538 && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load)), 0), QImode))
2540 load_mode = QImode;
2541 and_mode = HImode;
2542 break;
2545 if (reg_mentioned_p (reg, load))
2546 return;
2548 if (GET_CODE (load) != NOTE
2549 && GET_CODE (load) != INSN)
2550 return;
2552 if (!load)
2553 return;
2555 mem = SET_SRC (PATTERN (load));
2557 if (need_extend)
2559 mask = (load_mode == HImode) ? 0x8000 : 0x80;
2561 /* If the mem includes a zero-extend operation and we are
2562 going to generate a sign-extend operation then move the
2563 mem inside the zero-extend. */
2564 if (GET_CODE (mem) == ZERO_EXTEND)
2565 mem = XEXP (mem, 0);
2567 else
2569 if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and)), 1), load_mode))
2570 return;
2572 mask = (int) INTVAL (XEXP (SET_SRC (PATTERN (and)), 1));
2574 if (shift)
2575 mask <<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 1));
2578 if (load_mode == HImode)
2580 rtx addr = XEXP (mem, 0);
2582 if (! (mask & 0xff))
2584 addr = plus_constant (addr, 1);
2585 mask >>= 8;
2587 mem = gen_rtx_MEM (QImode, addr);
2590 if (need_extend)
2591 XEXP (cond, 0) = gen_rtx_SIGN_EXTEND (HImode, mem);
2592 else
2593 XEXP (cond, 0) = gen_rtx_AND (and_mode, mem, GEN_INT (mask));
2595 INSN_CODE (insn) = -1;
2596 delete_insn (load);
2598 if (and != insn)
2599 delete_insn (and);
2601 if (shift != NULL_RTX)
2602 delete_insn (shift);
2605 static void
2606 xstormy16_reorg (void)
2608 rtx insn;
2610 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2612 if (! JUMP_P (insn))
2613 continue;
2614 combine_bnp (insn);
2618 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2620 static bool
2621 xstormy16_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2623 const HOST_WIDE_INT size = int_size_in_bytes (type);
2624 return (size == -1 || size > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS);
2627 #undef TARGET_ASM_ALIGNED_HI_OP
2628 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2629 #undef TARGET_ASM_ALIGNED_SI_OP
2630 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2631 #undef TARGET_ENCODE_SECTION_INFO
2632 #define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
2634 /* Select_section doesn't handle .bss_below100. */
2635 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
2636 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
2638 #undef TARGET_ASM_OUTPUT_MI_THUNK
2639 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2640 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2641 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2643 #undef TARGET_RTX_COSTS
2644 #define TARGET_RTX_COSTS xstormy16_rtx_costs
2645 #undef TARGET_ADDRESS_COST
2646 #define TARGET_ADDRESS_COST xstormy16_address_cost
2648 #undef TARGET_BUILD_BUILTIN_VA_LIST
2649 #define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
2650 #undef TARGET_EXPAND_BUILTIN_VA_START
2651 #define TARGET_EXPAND_BUILTIN_VA_START xstormy16_expand_builtin_va_start
2652 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
2653 #define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_gimplify_va_arg_expr
2655 #undef TARGET_PROMOTE_FUNCTION_MODE
2656 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
2657 #undef TARGET_PROMOTE_PROTOTYPES
2658 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
2660 #undef TARGET_RETURN_IN_MEMORY
2661 #define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
2663 #undef TARGET_MACHINE_DEPENDENT_REORG
2664 #define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg
2666 #undef TARGET_LEGITIMATE_ADDRESS_P
2667 #define TARGET_LEGITIMATE_ADDRESS_P xstormy16_legitimate_address_p
2669 #undef TARGET_CAN_ELIMINATE
2670 #define TARGET_CAN_ELIMINATE xstormy16_can_eliminate
2672 #undef TARGET_TRAMPOLINE_INIT
2673 #define TARGET_TRAMPOLINE_INIT xstormy16_trampoline_init
2675 struct gcc_target targetm = TARGET_INITIALIZER;
2677 #include "gt-stormy16.h"