Merge from mainline (165734:167278).
[official-gcc/graphite-test-results.git] / gcc / config / stormy16 / stormy16.c
blobce83f683d392dd56701c09bf7d2738c46bac4293
1 /* Xstormy16 target functions.
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
4 Contributed by Red Hat, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
32 #include "output.h"
33 #include "insn-attr.h"
34 #include "flags.h"
35 #include "recog.h"
36 #include "diagnostic-core.h"
37 #include "toplev.h"
38 #include "obstack.h"
39 #include "tree.h"
40 #include "expr.h"
41 #include "optabs.h"
42 #include "except.h"
43 #include "function.h"
44 #include "target.h"
45 #include "target-def.h"
46 #include "tm_p.h"
47 #include "langhooks.h"
48 #include "gimple.h"
49 #include "df.h"
50 #include "ggc.h"
52 static rtx emit_addhi3_postreload (rtx, rtx, rtx);
53 static void xstormy16_asm_out_constructor (rtx, int);
54 static void xstormy16_asm_out_destructor (rtx, int);
55 static void xstormy16_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
56 HOST_WIDE_INT, tree);
58 static void xstormy16_init_builtins (void);
59 static rtx xstormy16_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
60 static bool xstormy16_rtx_costs (rtx, int, int, int *, bool);
61 static int xstormy16_address_cost (rtx, bool);
62 static bool xstormy16_return_in_memory (const_tree, const_tree);
64 static GTY(()) section *bss100_section;
66 /* Compute a (partial) cost for rtx X. Return true if the complete
67 cost has been computed, and false if subexpressions should be
68 scanned. In either case, *TOTAL contains the cost result. */
70 static bool
71 xstormy16_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
72 int *total, bool speed ATTRIBUTE_UNUSED)
74 switch (code)
76 case CONST_INT:
77 if (INTVAL (x) < 16 && INTVAL (x) >= 0)
78 *total = COSTS_N_INSNS (1) / 2;
79 else if (INTVAL (x) < 256 && INTVAL (x) >= 0)
80 *total = COSTS_N_INSNS (1);
81 else
82 *total = COSTS_N_INSNS (2);
83 return true;
85 case CONST_DOUBLE:
86 case CONST:
87 case SYMBOL_REF:
88 case LABEL_REF:
89 *total = COSTS_N_INSNS (2);
90 return true;
92 case MULT:
93 *total = COSTS_N_INSNS (35 + 6);
94 return true;
95 case DIV:
96 *total = COSTS_N_INSNS (51 - 6);
97 return true;
99 default:
100 return false;
104 static int
105 xstormy16_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
107 return (CONST_INT_P (x) ? 2
108 : GET_CODE (x) == PLUS ? 7
109 : 5);
112 /* Branches are handled as follows:
114 1. HImode compare-and-branches. The machine supports these
115 natively, so the appropriate pattern is emitted directly.
117 2. SImode EQ and NE. These are emitted as pairs of HImode
118 compare-and-branches.
120 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
121 of a SImode subtract followed by a branch (not a compare-and-branch),
122 like this:
127 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
132 bne. */
134 /* Emit a branch of kind CODE to location LOC. */
136 void
137 xstormy16_emit_cbranch (enum rtx_code code, rtx op0, rtx op1, rtx loc)
139 rtx condition_rtx, loc_ref, branch, cy_clobber;
140 rtvec vec;
141 enum machine_mode mode;
143 mode = GET_MODE (op0);
144 gcc_assert (mode == HImode || mode == SImode);
146 if (mode == SImode
147 && (code == GT || code == LE || code == GTU || code == LEU))
149 int unsigned_p = (code == GTU || code == LEU);
150 int gt_p = (code == GT || code == GTU);
151 rtx lab = NULL_RTX;
153 if (gt_p)
154 lab = gen_label_rtx ();
155 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, op0, op1, gt_p ? lab : loc);
156 /* This should be generated as a comparison against the temporary
157 created by the previous insn, but reload can't handle that. */
158 xstormy16_emit_cbranch (gt_p ? NE : EQ, op0, op1, loc);
159 if (gt_p)
160 emit_label (lab);
161 return;
163 else if (mode == SImode
164 && (code == NE || code == EQ)
165 && op1 != const0_rtx)
167 rtx op0_word, op1_word;
168 rtx lab = NULL_RTX;
169 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
170 int i;
172 if (code == EQ)
173 lab = gen_label_rtx ();
175 for (i = 0; i < num_words - 1; i++)
177 op0_word = simplify_gen_subreg (word_mode, op0, mode,
178 i * UNITS_PER_WORD);
179 op1_word = simplify_gen_subreg (word_mode, op1, mode,
180 i * UNITS_PER_WORD);
181 xstormy16_emit_cbranch (NE, op0_word, op1_word, code == EQ ? lab : loc);
183 op0_word = simplify_gen_subreg (word_mode, op0, mode,
184 i * UNITS_PER_WORD);
185 op1_word = simplify_gen_subreg (word_mode, op1, mode,
186 i * UNITS_PER_WORD);
187 xstormy16_emit_cbranch (code, op0_word, op1_word, loc);
189 if (code == EQ)
190 emit_label (lab);
191 return;
194 /* We can't allow reload to try to generate any reload after a branch,
195 so when some register must match we must make the temporary ourselves. */
196 if (mode != HImode)
198 rtx tmp;
199 tmp = gen_reg_rtx (mode);
200 emit_move_insn (tmp, op0);
201 op0 = tmp;
204 condition_rtx = gen_rtx_fmt_ee (code, mode, op0, op1);
205 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
206 branch = gen_rtx_SET (VOIDmode, pc_rtx,
207 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
208 loc_ref, pc_rtx));
210 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
212 if (mode == HImode)
213 vec = gen_rtvec (2, branch, cy_clobber);
214 else if (code == NE || code == EQ)
215 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
216 else
218 rtx sub;
219 #if 0
220 sub = gen_rtx_SET (VOIDmode, op0, gen_rtx_MINUS (SImode, op0, op1));
221 #else
222 sub = gen_rtx_CLOBBER (SImode, op0);
223 #endif
224 vec = gen_rtvec (3, branch, sub, cy_clobber);
227 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
230 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
231 the arithmetic operation. Most of the work is done by
232 xstormy16_expand_arith. */
234 void
235 xstormy16_split_cbranch (enum machine_mode mode, rtx label, rtx comparison,
236 rtx dest)
238 rtx op0 = XEXP (comparison, 0);
239 rtx op1 = XEXP (comparison, 1);
240 rtx seq, last_insn;
241 rtx compare;
243 start_sequence ();
244 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1);
245 seq = get_insns ();
246 end_sequence ();
248 gcc_assert (INSN_P (seq));
250 last_insn = seq;
251 while (NEXT_INSN (last_insn) != NULL_RTX)
252 last_insn = NEXT_INSN (last_insn);
254 compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0));
255 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
256 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
257 emit_insn (seq);
261 /* Return the string to output a conditional branch to LABEL, which is
262 the operand number of the label.
264 OP is the conditional expression, or NULL for branch-always.
266 REVERSED is nonzero if we should reverse the sense of the comparison.
268 INSN is the insn. */
270 char *
271 xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed, rtx insn)
273 static char string[64];
274 int need_longbranch = (op != NULL_RTX
275 ? get_attr_length (insn) == 8
276 : get_attr_length (insn) == 4);
277 int really_reversed = reversed ^ need_longbranch;
278 const char *ccode;
279 const char *templ;
280 const char *operands;
281 enum rtx_code code;
283 if (! op)
285 if (need_longbranch)
286 ccode = "jmpf";
287 else
288 ccode = "br";
289 sprintf (string, "%s %s", ccode, label);
290 return string;
293 code = GET_CODE (op);
295 if (! REG_P (XEXP (op, 0)))
297 code = swap_condition (code);
298 operands = "%3,%2";
300 else
301 operands = "%2,%3";
303 /* Work out which way this really branches. */
304 if (really_reversed)
305 code = reverse_condition (code);
307 switch (code)
309 case EQ: ccode = "z"; break;
310 case NE: ccode = "nz"; break;
311 case GE: ccode = "ge"; break;
312 case LT: ccode = "lt"; break;
313 case GT: ccode = "gt"; break;
314 case LE: ccode = "le"; break;
315 case GEU: ccode = "nc"; break;
316 case LTU: ccode = "c"; break;
317 case GTU: ccode = "hi"; break;
318 case LEU: ccode = "ls"; break;
320 default:
321 gcc_unreachable ();
324 if (need_longbranch)
325 templ = "b%s %s,.+8 | jmpf %s";
326 else
327 templ = "b%s %s,%s";
328 sprintf (string, templ, ccode, operands, label);
330 return string;
333 /* Return the string to output a conditional branch to LABEL, which is
334 the operand number of the label, but suitable for the tail of a
335 SImode branch.
337 OP is the conditional expression (OP is never NULL_RTX).
339 REVERSED is nonzero if we should reverse the sense of the comparison.
341 INSN is the insn. */
343 char *
344 xstormy16_output_cbranch_si (rtx op, const char *label, int reversed, rtx insn)
346 static char string[64];
347 int need_longbranch = get_attr_length (insn) >= 8;
348 int really_reversed = reversed ^ need_longbranch;
349 const char *ccode;
350 const char *templ;
351 char prevop[16];
352 enum rtx_code code;
354 code = GET_CODE (op);
356 /* Work out which way this really branches. */
357 if (really_reversed)
358 code = reverse_condition (code);
360 switch (code)
362 case EQ: ccode = "z"; break;
363 case NE: ccode = "nz"; break;
364 case GE: ccode = "ge"; break;
365 case LT: ccode = "lt"; break;
366 case GEU: ccode = "nc"; break;
367 case LTU: ccode = "c"; break;
369 /* The missing codes above should never be generated. */
370 default:
371 gcc_unreachable ();
374 switch (code)
376 case EQ: case NE:
378 int regnum;
380 gcc_assert (REG_P (XEXP (op, 0)));
382 regnum = REGNO (XEXP (op, 0));
383 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
385 break;
387 case GE: case LT: case GEU: case LTU:
388 strcpy (prevop, "sbc %2,%3");
389 break;
391 default:
392 gcc_unreachable ();
395 if (need_longbranch)
396 templ = "%s | b%s .+6 | jmpf %s";
397 else
398 templ = "%s | b%s %s";
399 sprintf (string, templ, prevop, ccode, label);
401 return string;
404 /* Many machines have some registers that cannot be copied directly to or from
405 memory or even from other types of registers. An example is the `MQ'
406 register, which on most machines, can only be copied to or from general
407 registers, but not memory. Some machines allow copying all registers to and
408 from memory, but require a scratch register for stores to some memory
409 locations (e.g., those with symbolic address on the RT, and those with
410 certain symbolic address on the SPARC when compiling PIC). In some cases,
411 both an intermediate and a scratch register are required.
413 You should define these macros to indicate to the reload phase that it may
414 need to allocate at least one register for a reload in addition to the
415 register to contain the data. Specifically, if copying X to a register
416 RCLASS in MODE requires an intermediate register, you should define
417 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
418 whose registers can be used as intermediate registers or scratch registers.
420 If copying a register RCLASS in MODE to X requires an intermediate or scratch
421 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
422 largest register class required. If the requirements for input and output
423 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
424 instead of defining both macros identically.
426 The values returned by these macros are often `GENERAL_REGS'. Return
427 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
428 to or from a register of RCLASS in MODE without requiring a scratch register.
429 Do not define this macro if it would always return `NO_REGS'.
431 If a scratch register is required (either with or without an intermediate
432 register), you should define patterns for `reload_inM' or `reload_outM', as
433 required.. These patterns, which will normally be implemented with a
434 `define_expand', should be similar to the `movM' patterns, except that
435 operand 2 is the scratch register.
437 Define constraints for the reload register and scratch register that contain
438 a single register class. If the original reload register (whose class is
439 RCLASS) can meet the constraint given in the pattern, the value returned by
440 these macros is used for the class of the scratch register. Otherwise, two
441 additional reload registers are required. Their classes are obtained from
442 the constraints in the insn pattern.
444 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
445 either be in a hard register or in memory. Use `true_regnum' to find out;
446 it will return -1 if the pseudo is in memory and the hard register number if
447 it is in a register.
449 These macros should not be used in the case where a particular class of
450 registers can only be copied to memory and not to another class of
451 registers. In that case, secondary reload registers are not needed and
452 would not be helpful. Instead, a stack location must be used to perform the
453 copy and the `movM' pattern should use memory as an intermediate storage.
454 This case often occurs between floating-point and general registers. */
456 enum reg_class
457 xstormy16_secondary_reload_class (enum reg_class rclass,
458 enum machine_mode mode ATTRIBUTE_UNUSED,
459 rtx x)
461 /* This chip has the interesting property that only the first eight
462 registers can be moved to/from memory. */
463 if ((MEM_P (x)
464 || ((GET_CODE (x) == SUBREG || REG_P (x))
465 && (true_regnum (x) == -1
466 || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
467 && ! reg_class_subset_p (rclass, EIGHT_REGS))
468 return EIGHT_REGS;
470 return NO_REGS;
473 enum reg_class
474 xstormy16_preferred_reload_class (rtx x, enum reg_class rclass)
476 if (rclass == GENERAL_REGS && MEM_P (x))
477 return EIGHT_REGS;
479 return rclass;
482 /* Predicate for symbols and addresses that reflect special 8-bit
483 addressing. */
486 xstormy16_below100_symbol (rtx x,
487 enum machine_mode mode ATTRIBUTE_UNUSED)
489 if (GET_CODE (x) == CONST)
490 x = XEXP (x, 0);
491 if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
492 x = XEXP (x, 0);
494 if (GET_CODE (x) == SYMBOL_REF)
495 return (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_XSTORMY16_BELOW100) != 0;
497 if (CONST_INT_P (x))
499 HOST_WIDE_INT i = INTVAL (x);
501 if ((i >= 0x0000 && i <= 0x00ff)
502 || (i >= 0x7f00 && i <= 0x7fff))
503 return 1;
505 return 0;
508 /* Likewise, but only for non-volatile MEMs, for patterns where the
509 MEM will get split into smaller sized accesses. */
512 xstormy16_splittable_below100_operand (rtx x, enum machine_mode mode)
514 if (MEM_P (x) && MEM_VOLATILE_P (x))
515 return 0;
516 return xstormy16_below100_operand (x, mode);
519 /* Expand an 8-bit IOR. This either detects the one case we can
520 actually do, or uses a 16-bit IOR. */
522 void
523 xstormy16_expand_iorqi3 (rtx *operands)
525 rtx in, out, outsub, val;
527 out = operands[0];
528 in = operands[1];
529 val = operands[2];
531 if (xstormy16_onebit_set_operand (val, QImode))
533 if (!xstormy16_below100_or_register (in, QImode))
534 in = copy_to_mode_reg (QImode, in);
535 if (!xstormy16_below100_or_register (out, QImode))
536 out = gen_reg_rtx (QImode);
537 emit_insn (gen_iorqi3_internal (out, in, val));
538 if (out != operands[0])
539 emit_move_insn (operands[0], out);
540 return;
543 if (! REG_P (in))
544 in = copy_to_mode_reg (QImode, in);
546 if (! REG_P (val) && ! CONST_INT_P (val))
547 val = copy_to_mode_reg (QImode, val);
549 if (! REG_P (out))
550 out = gen_reg_rtx (QImode);
552 in = simplify_gen_subreg (HImode, in, QImode, 0);
553 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
555 if (! CONST_INT_P (val))
556 val = simplify_gen_subreg (HImode, val, QImode, 0);
558 emit_insn (gen_iorhi3 (outsub, in, val));
560 if (out != operands[0])
561 emit_move_insn (operands[0], out);
564 /* Expand an 8-bit AND. This either detects the one case we can
565 actually do, or uses a 16-bit AND. */
567 void
568 xstormy16_expand_andqi3 (rtx *operands)
570 rtx in, out, outsub, val;
572 out = operands[0];
573 in = operands[1];
574 val = operands[2];
576 if (xstormy16_onebit_clr_operand (val, QImode))
578 if (!xstormy16_below100_or_register (in, QImode))
579 in = copy_to_mode_reg (QImode, in);
580 if (!xstormy16_below100_or_register (out, QImode))
581 out = gen_reg_rtx (QImode);
582 emit_insn (gen_andqi3_internal (out, in, val));
583 if (out != operands[0])
584 emit_move_insn (operands[0], out);
585 return;
588 if (! REG_P (in))
589 in = copy_to_mode_reg (QImode, in);
591 if (! REG_P (val) && ! CONST_INT_P (val))
592 val = copy_to_mode_reg (QImode, val);
594 if (! REG_P (out))
595 out = gen_reg_rtx (QImode);
597 in = simplify_gen_subreg (HImode, in, QImode, 0);
598 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
600 if (! CONST_INT_P (val))
601 val = simplify_gen_subreg (HImode, val, QImode, 0);
603 emit_insn (gen_andhi3 (outsub, in, val));
605 if (out != operands[0])
606 emit_move_insn (operands[0], out);
609 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
610 (CONST_INT_P (X) \
611 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
613 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
614 (CONST_INT_P (X) \
615 && INTVAL (X) + (OFFSET) >= 0 \
616 && INTVAL (X) + (OFFSET) < 0x8000 \
617 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
619 static bool
620 xstormy16_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
621 rtx x, bool strict)
623 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
624 return true;
626 if (GET_CODE (x) == PLUS
627 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
629 x = XEXP (x, 0);
630 /* PR 31232: Do not allow INT+INT as an address. */
631 if (CONST_INT_P (x))
632 return false;
635 if ((GET_CODE (x) == PRE_MODIFY && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
636 || GET_CODE (x) == POST_INC
637 || GET_CODE (x) == PRE_DEC)
638 x = XEXP (x, 0);
640 if (REG_P (x)
641 && REGNO_OK_FOR_BASE_P (REGNO (x))
642 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
643 return true;
645 if (xstormy16_below100_symbol (x, mode))
646 return true;
648 return false;
651 /* Return nonzero if memory address X (an RTX) can have different
652 meanings depending on the machine mode of the memory reference it
653 is used for or if the address is valid for some modes but not
654 others.
656 Autoincrement and autodecrement addresses typically have mode-dependent
657 effects because the amount of the increment or decrement is the size of the
658 operand being addressed. Some machines have other mode-dependent addresses.
659 Many RISC machines have no mode-dependent addresses.
661 You may assume that ADDR is a valid address for the machine.
663 On this chip, this is true if the address is valid with an offset
664 of 0 but not of 6, because in that case it cannot be used as an
665 address for DImode or DFmode, or if the address is a post-increment
666 or pre-decrement address. */
669 xstormy16_mode_dependent_address_p (rtx x)
671 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
672 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
673 return 1;
675 if (GET_CODE (x) == PLUS
676 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
677 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
678 return 1;
680 if (GET_CODE (x) == PLUS)
681 x = XEXP (x, 0);
683 /* Auto-increment addresses are now treated generically in recog.c. */
684 return 0;
687 /* A C expression that defines the optional machine-dependent constraint
688 letters (`Q', `R', `S', `T', `U') that can be used to segregate specific
689 types of operands, usually memory references, for the target machine.
690 Normally this macro will not be defined. If it is required for a particular
691 target machine, it should return 1 if VALUE corresponds to the operand type
692 represented by the constraint letter C. If C is not defined as an extra
693 constraint, the value returned should be 0 regardless of VALUE. */
696 xstormy16_extra_constraint_p (rtx x, int c)
698 switch (c)
700 /* 'Q' is for pushes. */
701 case 'Q':
702 return (MEM_P (x)
703 && GET_CODE (XEXP (x, 0)) == POST_INC
704 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
706 /* 'R' is for pops. */
707 case 'R':
708 return (MEM_P (x)
709 && GET_CODE (XEXP (x, 0)) == PRE_DEC
710 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
712 /* 'S' is for immediate memory addresses. */
713 case 'S':
714 return (MEM_P (x)
715 && CONST_INT_P (XEXP (x, 0))
716 && xstormy16_legitimate_address_p (VOIDmode, XEXP (x, 0), 0));
718 /* 'T' is for Rx. */
719 case 'T':
720 /* Not implemented yet. */
721 return 0;
723 /* 'U' is for CONST_INT values not between 2 and 15 inclusive,
724 for allocating a scratch register for 32-bit shifts. */
725 case 'U':
726 return (CONST_INT_P (x) && (! IN_RANGE (INTVAL (x), 2, 15)));
728 /* 'Z' is for CONST_INT value zero. This is for adding zero to
729 a register in addhi3, which would otherwise require a carry. */
730 case 'Z':
731 return (CONST_INT_P (x) && (INTVAL (x) == 0));
733 case 'W':
734 return xstormy16_below100_operand (x, GET_MODE (x));
736 default:
737 return 0;
742 short_memory_operand (rtx x, enum machine_mode mode)
744 if (! memory_operand (x, mode))
745 return 0;
746 return (GET_CODE (XEXP (x, 0)) != PLUS);
749 /* Splitter for the 'move' patterns, for modes not directly implemented
750 by hardware. Emit insns to copy a value of mode MODE from SRC to
751 DEST.
753 This function is only called when reload_completed. */
755 void
756 xstormy16_split_move (enum machine_mode mode, rtx dest, rtx src)
758 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
759 int direction, end, i;
760 int src_modifies = 0;
761 int dest_modifies = 0;
762 int src_volatile = 0;
763 int dest_volatile = 0;
764 rtx mem_operand;
765 rtx auto_inc_reg_rtx = NULL_RTX;
767 /* Check initial conditions. */
768 gcc_assert (reload_completed
769 && mode != QImode && mode != HImode
770 && nonimmediate_operand (dest, mode)
771 && general_operand (src, mode));
773 /* This case is not supported below, and shouldn't be generated. */
774 gcc_assert (! MEM_P (dest) || ! MEM_P (src));
776 /* This case is very very bad after reload, so trap it now. */
777 gcc_assert (GET_CODE (dest) != SUBREG && GET_CODE (src) != SUBREG);
779 /* The general idea is to copy by words, offsetting the source and
780 destination. Normally the least-significant word will be copied
781 first, but for pre-dec operations it's better to copy the
782 most-significant word first. Only one operand can be a pre-dec
783 or post-inc operand.
785 It's also possible that the copy overlaps so that the direction
786 must be reversed. */
787 direction = 1;
789 if (MEM_P (dest))
791 mem_operand = XEXP (dest, 0);
792 dest_modifies = side_effects_p (mem_operand);
793 if (auto_inc_p (mem_operand))
794 auto_inc_reg_rtx = XEXP (mem_operand, 0);
795 dest_volatile = MEM_VOLATILE_P (dest);
796 if (dest_volatile)
798 dest = copy_rtx (dest);
799 MEM_VOLATILE_P (dest) = 0;
802 else if (MEM_P (src))
804 mem_operand = XEXP (src, 0);
805 src_modifies = side_effects_p (mem_operand);
806 if (auto_inc_p (mem_operand))
807 auto_inc_reg_rtx = XEXP (mem_operand, 0);
808 src_volatile = MEM_VOLATILE_P (src);
809 if (src_volatile)
811 src = copy_rtx (src);
812 MEM_VOLATILE_P (src) = 0;
815 else
816 mem_operand = NULL_RTX;
818 if (mem_operand == NULL_RTX)
820 if (REG_P (src)
821 && REG_P (dest)
822 && reg_overlap_mentioned_p (dest, src)
823 && REGNO (dest) > REGNO (src))
824 direction = -1;
826 else if (GET_CODE (mem_operand) == PRE_DEC
827 || (GET_CODE (mem_operand) == PLUS
828 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
829 direction = -1;
830 else if (MEM_P (src) && reg_overlap_mentioned_p (dest, src))
832 int regno;
834 gcc_assert (REG_P (dest));
835 regno = REGNO (dest);
837 gcc_assert (refers_to_regno_p (regno, regno + num_words,
838 mem_operand, 0));
840 if (refers_to_regno_p (regno, regno + 1, mem_operand, 0))
841 direction = -1;
842 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
843 mem_operand, 0))
844 direction = 1;
845 else
846 /* This means something like
847 (set (reg:DI r0) (mem:DI (reg:HI r1)))
848 which we'd need to support by doing the set of the second word
849 last. */
850 gcc_unreachable ();
853 end = direction < 0 ? -1 : num_words;
854 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
856 rtx w_src, w_dest, insn;
858 if (src_modifies)
859 w_src = gen_rtx_MEM (word_mode, mem_operand);
860 else
861 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
862 if (src_volatile)
863 MEM_VOLATILE_P (w_src) = 1;
864 if (dest_modifies)
865 w_dest = gen_rtx_MEM (word_mode, mem_operand);
866 else
867 w_dest = simplify_gen_subreg (word_mode, dest, mode,
868 i * UNITS_PER_WORD);
869 if (dest_volatile)
870 MEM_VOLATILE_P (w_dest) = 1;
872 /* The simplify_subreg calls must always be able to simplify. */
873 gcc_assert (GET_CODE (w_src) != SUBREG
874 && GET_CODE (w_dest) != SUBREG);
876 insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src));
877 if (auto_inc_reg_rtx)
878 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
879 auto_inc_reg_rtx,
880 REG_NOTES (insn));
884 /* Expander for the 'move' patterns. Emit insns to copy a value of
885 mode MODE from SRC to DEST. */
887 void
888 xstormy16_expand_move (enum machine_mode mode, rtx dest, rtx src)
890 if (MEM_P (dest) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY))
892 rtx pmv = XEXP (dest, 0);
893 rtx dest_reg = XEXP (pmv, 0);
894 rtx dest_mod = XEXP (pmv, 1);
895 rtx set = gen_rtx_SET (Pmode, dest_reg, dest_mod);
896 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
898 dest = gen_rtx_MEM (mode, dest_reg);
899 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
901 else if (MEM_P (src) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY))
903 rtx pmv = XEXP (src, 0);
904 rtx src_reg = XEXP (pmv, 0);
905 rtx src_mod = XEXP (pmv, 1);
906 rtx set = gen_rtx_SET (Pmode, src_reg, src_mod);
907 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
909 src = gen_rtx_MEM (mode, src_reg);
910 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
913 /* There are only limited immediate-to-memory move instructions. */
914 if (! reload_in_progress
915 && ! reload_completed
916 && MEM_P (dest)
917 && (! CONST_INT_P (XEXP (dest, 0))
918 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
919 && ! xstormy16_below100_operand (dest, mode)
920 && ! REG_P (src)
921 && GET_CODE (src) != SUBREG)
922 src = copy_to_mode_reg (mode, src);
924 /* Don't emit something we would immediately split. */
925 if (reload_completed
926 && mode != HImode && mode != QImode)
928 xstormy16_split_move (mode, dest, src);
929 return;
932 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
935 /* Stack Layout:
937 The stack is laid out as follows:
939 SP->
940 FP-> Local variables
941 Register save area (up to 4 words)
942 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
944 AP-> Return address (two words)
945 9th procedure parameter word
946 10th procedure parameter word
948 last procedure parameter word
950 The frame pointer location is tuned to make it most likely that all
951 parameters and local variables can be accessed using a load-indexed
952 instruction. */
954 /* A structure to describe the layout. */
955 struct xstormy16_stack_layout
957 /* Size of the topmost three items on the stack. */
958 int locals_size;
959 int register_save_size;
960 int stdarg_save_size;
961 /* Sum of the above items. */
962 int frame_size;
963 /* Various offsets. */
964 int first_local_minus_ap;
965 int sp_minus_fp;
966 int fp_minus_ap;
969 /* Does REGNO need to be saved? */
970 #define REG_NEEDS_SAVE(REGNUM, IFUN) \
971 ((df_regs_ever_live_p (REGNUM) && ! call_used_regs[REGNUM]) \
972 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
973 && (REGNUM != CARRY_REGNUM) \
974 && (df_regs_ever_live_p (REGNUM) || ! current_function_is_leaf)))
976 /* Compute the stack layout. */
978 struct xstormy16_stack_layout
979 xstormy16_compute_stack_layout (void)
981 struct xstormy16_stack_layout layout;
982 int regno;
983 const int ifun = xstormy16_interrupt_function_p ();
985 layout.locals_size = get_frame_size ();
987 layout.register_save_size = 0;
988 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
989 if (REG_NEEDS_SAVE (regno, ifun))
990 layout.register_save_size += UNITS_PER_WORD;
992 if (cfun->stdarg)
993 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
994 else
995 layout.stdarg_save_size = 0;
997 layout.frame_size = (layout.locals_size
998 + layout.register_save_size
999 + layout.stdarg_save_size);
1001 if (crtl->args.size <= 2048 && crtl->args.size != -1)
1003 if (layout.frame_size - INCOMING_FRAME_SP_OFFSET
1004 + crtl->args.size <= 2048)
1005 layout.fp_minus_ap = layout.frame_size - INCOMING_FRAME_SP_OFFSET;
1006 else
1007 layout.fp_minus_ap = 2048 - crtl->args.size;
1009 else
1010 layout.fp_minus_ap = (layout.stdarg_save_size
1011 + layout.register_save_size
1012 - INCOMING_FRAME_SP_OFFSET);
1013 layout.sp_minus_fp = (layout.frame_size - INCOMING_FRAME_SP_OFFSET
1014 - layout.fp_minus_ap);
1015 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
1016 return layout;
1019 /* Worker function for TARGET_CAN_ELIMINATE. */
1021 static bool
1022 xstormy16_can_eliminate (const int from, const int to)
1024 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
1025 ? ! frame_pointer_needed
1026 : true);
1029 /* Determine how all the special registers get eliminated. */
1032 xstormy16_initial_elimination_offset (int from, int to)
1034 struct xstormy16_stack_layout layout;
1035 int result;
1037 layout = xstormy16_compute_stack_layout ();
1039 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1040 result = layout.sp_minus_fp - layout.locals_size;
1041 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1042 result = - layout.locals_size;
1043 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1044 result = - layout.fp_minus_ap;
1045 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1046 result = - (layout.sp_minus_fp + layout.fp_minus_ap);
1047 else
1048 gcc_unreachable ();
1050 return result;
1053 static rtx
1054 emit_addhi3_postreload (rtx dest, rtx src0, rtx src1)
1056 rtx set, clobber, insn;
1058 set = gen_rtx_SET (VOIDmode, dest, gen_rtx_PLUS (HImode, src0, src1));
1059 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
1060 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
1061 return insn;
1064 /* Called after register allocation to add any instructions needed for
1065 the prologue. Using a prologue insn is favored compared to putting
1066 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1067 since it allows the scheduler to intermix instructions with the
1068 saves of the caller saved registers. In some cases, it might be
1069 necessary to emit a barrier instruction as the last insn to prevent
1070 such scheduling.
1072 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1073 so that the debug info generation code can handle them properly. */
1075 void
1076 xstormy16_expand_prologue (void)
1078 struct xstormy16_stack_layout layout;
1079 int regno;
1080 rtx insn;
1081 rtx mem_push_rtx;
1082 const int ifun = xstormy16_interrupt_function_p ();
1084 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
1085 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
1087 layout = xstormy16_compute_stack_layout ();
1089 if (layout.locals_size >= 32768)
1090 error ("local variable memory requirements exceed capacity");
1092 /* Save the argument registers if necessary. */
1093 if (layout.stdarg_save_size)
1094 for (regno = FIRST_ARGUMENT_REGISTER;
1095 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
1096 regno++)
1098 rtx dwarf;
1099 rtx reg = gen_rtx_REG (HImode, regno);
1101 insn = emit_move_insn (mem_push_rtx, reg);
1102 RTX_FRAME_RELATED_P (insn) = 1;
1104 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1106 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1107 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1108 reg);
1109 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1110 plus_constant (stack_pointer_rtx,
1111 GET_MODE_SIZE (Pmode)));
1112 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
1113 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1114 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1117 /* Push each of the registers to save. */
1118 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1119 if (REG_NEEDS_SAVE (regno, ifun))
1121 rtx dwarf;
1122 rtx reg = gen_rtx_REG (HImode, regno);
1124 insn = emit_move_insn (mem_push_rtx, reg);
1125 RTX_FRAME_RELATED_P (insn) = 1;
1127 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1129 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1130 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1131 reg);
1132 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1133 plus_constant (stack_pointer_rtx,
1134 GET_MODE_SIZE (Pmode)));
1135 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
1136 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1137 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1140 /* It's just possible that the SP here might be what we need for
1141 the new FP... */
1142 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1144 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1145 RTX_FRAME_RELATED_P (insn) = 1;
1148 /* Allocate space for local variables. */
1149 if (layout.locals_size)
1151 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1152 GEN_INT (layout.locals_size));
1153 RTX_FRAME_RELATED_P (insn) = 1;
1156 /* Set up the frame pointer, if required. */
1157 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
1159 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1160 RTX_FRAME_RELATED_P (insn) = 1;
1162 if (layout.sp_minus_fp)
1164 insn = emit_addhi3_postreload (hard_frame_pointer_rtx,
1165 hard_frame_pointer_rtx,
1166 GEN_INT (- layout.sp_minus_fp));
1167 RTX_FRAME_RELATED_P (insn) = 1;
1172 /* Do we need an epilogue at all? */
1175 direct_return (void)
1177 return (reload_completed
1178 && xstormy16_compute_stack_layout ().frame_size == 0
1179 && ! xstormy16_interrupt_function_p ());
1182 /* Called after register allocation to add any instructions needed for
1183 the epilogue. Using an epilogue insn is favored compared to putting
1184 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1185 since it allows the scheduler to intermix instructions with the
1186 saves of the caller saved registers. In some cases, it might be
1187 necessary to emit a barrier instruction as the last insn to prevent
1188 such scheduling. */
1190 void
1191 xstormy16_expand_epilogue (void)
1193 struct xstormy16_stack_layout layout;
1194 rtx mem_pop_rtx;
1195 int regno;
1196 const int ifun = xstormy16_interrupt_function_p ();
1198 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1199 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
1201 layout = xstormy16_compute_stack_layout ();
1203 /* Pop the stack for the locals. */
1204 if (layout.locals_size)
1206 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1207 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1208 else
1209 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1210 GEN_INT (- layout.locals_size));
1213 /* Restore any call-saved registers. */
1214 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1215 if (REG_NEEDS_SAVE (regno, ifun))
1216 emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
1218 /* Pop the stack for the stdarg save area. */
1219 if (layout.stdarg_save_size)
1220 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1221 GEN_INT (- layout.stdarg_save_size));
1223 /* Return. */
1224 if (ifun)
1225 emit_jump_insn (gen_return_internal_interrupt ());
1226 else
1227 emit_jump_insn (gen_return_internal ());
1231 xstormy16_epilogue_uses (int regno)
1233 if (reload_completed && call_used_regs[regno])
1235 const int ifun = xstormy16_interrupt_function_p ();
1236 return REG_NEEDS_SAVE (regno, ifun);
1238 return 0;
1241 void
1242 xstormy16_function_profiler (void)
1244 sorry ("function_profiler support");
1247 /* Update CUM to advance past an argument in the argument list. The
1248 values MODE, TYPE and NAMED describe that argument. Once this is
1249 done, the variable CUM is suitable for analyzing the *following*
1250 argument with `TARGET_FUNCTION_ARG', etc.
1252 This function need not do anything if the argument in question was
1253 passed on the stack. The compiler knows how to track the amount of
1254 stack space used for arguments without any special help. However,
1255 it makes life easier for xstormy16_build_va_list if it does update
1256 the word count. */
1258 static void
1259 xstormy16_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1260 const_tree type, bool named ATTRIBUTE_UNUSED)
1262 /* If an argument would otherwise be passed partially in registers,
1263 and partially on the stack, the whole of it is passed on the
1264 stack. */
1265 if (*cum < NUM_ARGUMENT_REGISTERS
1266 && *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1267 *cum = NUM_ARGUMENT_REGISTERS;
1269 *cum += XSTORMY16_WORD_SIZE (type, mode);
1272 static rtx
1273 xstormy16_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1274 const_tree type, bool named ATTRIBUTE_UNUSED)
1276 if (mode == VOIDmode)
1277 return const0_rtx;
1278 if (targetm.calls.must_pass_in_stack (mode, type)
1279 || *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1280 return NULL_RTX;
1281 return gen_rtx_REG (mode, *cum + FIRST_ARGUMENT_REGISTER);
1284 /* Build the va_list type.
1286 For this chip, va_list is a record containing a counter and a pointer.
1287 The counter is of type 'int' and indicates how many bytes
1288 have been used to date. The pointer indicates the stack position
1289 for arguments that have not been passed in registers.
1290 To keep the layout nice, the pointer is first in the structure. */
1292 static tree
1293 xstormy16_build_builtin_va_list (void)
1295 tree f_1, f_2, record, type_decl;
1297 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
1298 type_decl = build_decl (BUILTINS_LOCATION,
1299 TYPE_DECL, get_identifier ("__va_list_tag"), record);
1301 f_1 = build_decl (BUILTINS_LOCATION,
1302 FIELD_DECL, get_identifier ("base"),
1303 ptr_type_node);
1304 f_2 = build_decl (BUILTINS_LOCATION,
1305 FIELD_DECL, get_identifier ("count"),
1306 unsigned_type_node);
1308 DECL_FIELD_CONTEXT (f_1) = record;
1309 DECL_FIELD_CONTEXT (f_2) = record;
1311 TYPE_STUB_DECL (record) = type_decl;
1312 TYPE_NAME (record) = type_decl;
1313 TYPE_FIELDS (record) = f_1;
1314 DECL_CHAIN (f_1) = f_2;
1316 layout_type (record);
1318 return record;
1321 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this
1322 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1323 variable to initialize. NEXTARG is the machine independent notion of the
1324 'next' argument after the variable arguments. */
1326 static void
1327 xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
1329 tree f_base, f_count;
1330 tree base, count;
1331 tree t,u;
1333 if (xstormy16_interrupt_function_p ())
1334 error ("cannot use va_start in interrupt function");
1336 f_base = TYPE_FIELDS (va_list_type_node);
1337 f_count = DECL_CHAIN (f_base);
1339 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1340 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1341 NULL_TREE);
1343 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
1344 u = build_int_cst (NULL_TREE, - INCOMING_FRAME_SP_OFFSET);
1345 u = fold_convert (TREE_TYPE (count), u);
1346 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (base), t, u);
1347 t = build2 (MODIFY_EXPR, TREE_TYPE (base), base, t);
1348 TREE_SIDE_EFFECTS (t) = 1;
1349 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1351 t = build2 (MODIFY_EXPR, TREE_TYPE (count), count,
1352 build_int_cst (NULL_TREE,
1353 crtl->args.info * UNITS_PER_WORD));
1354 TREE_SIDE_EFFECTS (t) = 1;
1355 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1358 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable
1359 of type va_list as a tree, TYPE is the type passed to va_arg.
1360 Note: This algorithm is documented in stormy-abi. */
1362 static tree
1363 xstormy16_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
1364 gimple_seq *post_p ATTRIBUTE_UNUSED)
1366 tree f_base, f_count;
1367 tree base, count;
1368 tree count_tmp, addr, t;
1369 tree lab_gotaddr, lab_fromstack;
1370 int size, size_of_reg_args, must_stack;
1371 tree size_tree;
1373 f_base = TYPE_FIELDS (va_list_type_node);
1374 f_count = DECL_CHAIN (f_base);
1376 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1377 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1378 NULL_TREE);
1380 must_stack = targetm.calls.must_pass_in_stack (TYPE_MODE (type), type);
1381 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
1382 gimplify_expr (&size_tree, pre_p, NULL, is_gimple_val, fb_rvalue);
1384 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1386 count_tmp = get_initialized_tmp_var (count, pre_p, NULL);
1387 lab_gotaddr = create_artificial_label (UNKNOWN_LOCATION);
1388 lab_fromstack = create_artificial_label (UNKNOWN_LOCATION);
1389 addr = create_tmp_var (ptr_type_node, NULL);
1391 if (!must_stack)
1393 tree r;
1395 t = fold_convert (TREE_TYPE (count), size_tree);
1396 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1397 r = fold_convert (TREE_TYPE (count), size_int (size_of_reg_args));
1398 t = build2 (GT_EXPR, boolean_type_node, t, r);
1399 t = build3 (COND_EXPR, void_type_node, t,
1400 build1 (GOTO_EXPR, void_type_node, lab_fromstack),
1401 NULL_TREE);
1402 gimplify_and_add (t, pre_p);
1404 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, base, count_tmp);
1405 gimplify_assign (addr, t, pre_p);
1407 t = build1 (GOTO_EXPR, void_type_node, lab_gotaddr);
1408 gimplify_and_add (t, pre_p);
1410 t = build1 (LABEL_EXPR, void_type_node, lab_fromstack);
1411 gimplify_and_add (t, pre_p);
1414 /* Arguments larger than a word might need to skip over some
1415 registers, since arguments are either passed entirely in
1416 registers or entirely on the stack. */
1417 size = PUSH_ROUNDING (int_size_in_bytes (type));
1418 if (size > 2 || size < 0 || must_stack)
1420 tree r, u;
1422 r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD);
1423 u = build2 (MODIFY_EXPR, TREE_TYPE (count_tmp), count_tmp, r);
1425 t = fold_convert (TREE_TYPE (count), r);
1426 t = build2 (GE_EXPR, boolean_type_node, count_tmp, t);
1427 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, u);
1428 gimplify_and_add (t, pre_p);
1431 t = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
1432 + INCOMING_FRAME_SP_OFFSET);
1433 t = fold_convert (TREE_TYPE (count), t);
1434 t = build2 (MINUS_EXPR, TREE_TYPE (count), count_tmp, t);
1435 t = build2 (PLUS_EXPR, TREE_TYPE (count), t,
1436 fold_convert (TREE_TYPE (count), size_tree));
1437 t = fold_convert (TREE_TYPE (t), fold (t));
1438 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1439 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (base), base, t);
1440 gimplify_assign (addr, t, pre_p);
1442 t = build1 (LABEL_EXPR, void_type_node, lab_gotaddr);
1443 gimplify_and_add (t, pre_p);
1445 t = fold_convert (TREE_TYPE (count), size_tree);
1446 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1447 gimplify_assign (count, t, pre_p);
1449 addr = fold_convert (build_pointer_type (type), addr);
1450 return build_va_arg_indirect_ref (addr);
1453 /* Worker function for TARGET_TRAMPOLINE_INIT. */
1455 static void
1456 xstormy16_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
1458 rtx temp = gen_reg_rtx (HImode);
1459 rtx reg_fnaddr = gen_reg_rtx (HImode);
1460 rtx reg_addr, reg_addr_mem;
1462 reg_addr = copy_to_reg (XEXP (m_tramp, 0));
1463 reg_addr_mem = adjust_automodify_address (m_tramp, HImode, reg_addr, 0);
1465 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1466 emit_move_insn (reg_addr_mem, temp);
1467 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1468 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1470 emit_move_insn (temp, static_chain);
1471 emit_move_insn (reg_addr_mem, temp);
1472 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1473 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1475 emit_move_insn (reg_fnaddr, XEXP (DECL_RTL (fndecl), 0));
1476 emit_move_insn (temp, reg_fnaddr);
1477 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1478 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1479 emit_move_insn (reg_addr_mem, temp);
1480 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1481 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1483 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1484 emit_move_insn (reg_addr_mem, reg_fnaddr);
1487 /* Worker function for FUNCTION_VALUE. */
1490 xstormy16_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED)
1492 enum machine_mode mode;
1493 mode = TYPE_MODE (valtype);
1494 PROMOTE_MODE (mode, 0, valtype);
1495 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1498 /* A C compound statement that outputs the assembler code for a thunk function,
1499 used to implement C++ virtual function calls with multiple inheritance. The
1500 thunk acts as a wrapper around a virtual function, adjusting the implicit
1501 object parameter before handing control off to the real function.
1503 First, emit code to add the integer DELTA to the location that contains the
1504 incoming first argument. Assume that this argument contains a pointer, and
1505 is the one used to pass the `this' pointer in C++. This is the incoming
1506 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1507 addition must preserve the values of all other incoming arguments.
1509 After the addition, emit code to jump to FUNCTION, which is a
1510 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1511 the return address. Hence returning from FUNCTION will return to whoever
1512 called the current `thunk'.
1514 The effect must be as if @var{function} had been called directly
1515 with the adjusted first argument. This macro is responsible for
1516 emitting all of the code for a thunk function;
1517 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1518 not invoked.
1520 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1521 extracted from it.) It might possibly be useful on some targets, but
1522 probably not. */
1524 static void
1525 xstormy16_asm_output_mi_thunk (FILE *file,
1526 tree thunk_fndecl ATTRIBUTE_UNUSED,
1527 HOST_WIDE_INT delta,
1528 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1529 tree function)
1531 int regnum = FIRST_ARGUMENT_REGISTER;
1533 /* There might be a hidden first argument for a returned structure. */
1534 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
1535 regnum += 1;
1537 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF);
1538 fputs ("\tjmpf ", file);
1539 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1540 putc ('\n', file);
1543 /* The purpose of this function is to override the default behavior of
1544 BSS objects. Normally, they go into .bss or .sbss via ".common"
1545 directives, but we need to override that and put them in
1546 .bss_below100. We can't just use a section override (like we do
1547 for .data_below100), because that makes them initialized rather
1548 than uninitialized. */
1550 void
1551 xstormy16_asm_output_aligned_common (FILE *stream,
1552 tree decl,
1553 const char *name,
1554 int size,
1555 int align,
1556 int global)
1558 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
1559 rtx symbol;
1561 if (mem != NULL_RTX
1562 && MEM_P (mem)
1563 && GET_CODE (symbol = XEXP (mem, 0)) == SYMBOL_REF
1564 && SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_XSTORMY16_BELOW100)
1566 const char *name2;
1567 int p2align = 0;
1569 switch_to_section (bss100_section);
1571 while (align > 8)
1573 align /= 2;
1574 p2align ++;
1577 name2 = default_strip_name_encoding (name);
1578 if (global)
1579 fprintf (stream, "\t.globl\t%s\n", name2);
1580 if (p2align)
1581 fprintf (stream, "\t.p2align %d\n", p2align);
1582 fprintf (stream, "\t.type\t%s, @object\n", name2);
1583 fprintf (stream, "\t.size\t%s, %d\n", name2, size);
1584 fprintf (stream, "%s:\n\t.space\t%d\n", name2, size);
1585 return;
1588 if (!global)
1590 fprintf (stream, "\t.local\t");
1591 assemble_name (stream, name);
1592 fprintf (stream, "\n");
1594 fprintf (stream, "\t.comm\t");
1595 assemble_name (stream, name);
1596 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
1599 /* Implement TARGET_ASM_INIT_SECTIONS. */
1601 static void
1602 xstormy16_asm_init_sections (void)
1604 bss100_section
1605 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
1606 output_section_asm_op,
1607 "\t.section \".bss_below100\",\"aw\",@nobits");
1610 /* Mark symbols with the "below100" attribute so that we can use the
1611 special addressing modes for them. */
1613 static void
1614 xstormy16_encode_section_info (tree decl, rtx r, int first)
1616 default_encode_section_info (decl, r, first);
1618 if (TREE_CODE (decl) == VAR_DECL
1619 && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl))
1620 || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl))))
1622 rtx symbol = XEXP (r, 0);
1624 gcc_assert (GET_CODE (symbol) == SYMBOL_REF);
1625 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_XSTORMY16_BELOW100;
1629 #undef TARGET_ASM_CONSTRUCTOR
1630 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1631 #undef TARGET_ASM_DESTRUCTOR
1632 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1634 /* Output constructors and destructors. Just like
1635 default_named_section_asm_out_* but don't set the sections writable. */
1637 static void
1638 xstormy16_asm_out_destructor (rtx symbol, int priority)
1640 const char *section = ".dtors";
1641 char buf[16];
1643 /* ??? This only works reliably with the GNU linker. */
1644 if (priority != DEFAULT_INIT_PRIORITY)
1646 sprintf (buf, ".dtors.%.5u",
1647 /* Invert the numbering so the linker puts us in the proper
1648 order; constructors are run from right to left, and the
1649 linker sorts in increasing order. */
1650 MAX_INIT_PRIORITY - priority);
1651 section = buf;
1654 switch_to_section (get_section (section, 0, NULL));
1655 assemble_align (POINTER_SIZE);
1656 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1659 static void
1660 xstormy16_asm_out_constructor (rtx symbol, int priority)
1662 const char *section = ".ctors";
1663 char buf[16];
1665 /* ??? This only works reliably with the GNU linker. */
1666 if (priority != DEFAULT_INIT_PRIORITY)
1668 sprintf (buf, ".ctors.%.5u",
1669 /* Invert the numbering so the linker puts us in the proper
1670 order; constructors are run from right to left, and the
1671 linker sorts in increasing order. */
1672 MAX_INIT_PRIORITY - priority);
1673 section = buf;
1676 switch_to_section (get_section (section, 0, NULL));
1677 assemble_align (POINTER_SIZE);
1678 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1681 /* Print a memory address as an operand to reference that memory location. */
1683 void
1684 xstormy16_print_operand_address (FILE *file, rtx address)
1686 HOST_WIDE_INT offset;
1687 int pre_dec, post_inc;
1689 /* There are a few easy cases. */
1690 if (CONST_INT_P (address))
1692 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1693 return;
1696 if (CONSTANT_P (address) || LABEL_P (address))
1698 output_addr_const (file, address);
1699 return;
1702 /* Otherwise, it's hopefully something of the form
1703 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...)). */
1704 if (GET_CODE (address) == PLUS)
1706 gcc_assert (CONST_INT_P (XEXP (address, 1)));
1707 offset = INTVAL (XEXP (address, 1));
1708 address = XEXP (address, 0);
1710 else
1711 offset = 0;
1713 pre_dec = (GET_CODE (address) == PRE_DEC);
1714 post_inc = (GET_CODE (address) == POST_INC);
1715 if (pre_dec || post_inc)
1716 address = XEXP (address, 0);
1718 gcc_assert (REG_P (address));
1720 fputc ('(', file);
1721 if (pre_dec)
1722 fputs ("--", file);
1723 fputs (reg_names [REGNO (address)], file);
1724 if (post_inc)
1725 fputs ("++", file);
1726 if (offset != 0)
1727 fprintf (file, "," HOST_WIDE_INT_PRINT_DEC, offset);
1728 fputc (')', file);
1731 /* Print an operand to an assembler instruction. */
1733 void
1734 xstormy16_print_operand (FILE *file, rtx x, int code)
1736 switch (code)
1738 case 'B':
1739 /* There is either one bit set, or one bit clear, in X.
1740 Print it preceded by '#'. */
1742 static int bits_set[8] = { 0, 1, 1, 2, 1, 2, 2, 3 };
1743 HOST_WIDE_INT xx = 1;
1744 HOST_WIDE_INT l;
1746 if (CONST_INT_P (x))
1747 xx = INTVAL (x);
1748 else
1749 output_operand_lossage ("'B' operand is not constant");
1751 /* GCC sign-extends masks with the MSB set, so we have to
1752 detect all the cases that differ only in sign extension
1753 beyond the bits we care about. Normally, the predicates
1754 and constraints ensure that we have the right values. This
1755 works correctly for valid masks. */
1756 if (bits_set[xx & 7] <= 1)
1758 /* Remove sign extension bits. */
1759 if ((~xx & ~(HOST_WIDE_INT)0xff) == 0)
1760 xx &= 0xff;
1761 else if ((~xx & ~(HOST_WIDE_INT)0xffff) == 0)
1762 xx &= 0xffff;
1763 l = exact_log2 (xx);
1765 else
1767 /* Add sign extension bits. */
1768 if ((xx & ~(HOST_WIDE_INT)0xff) == 0)
1769 xx |= ~(HOST_WIDE_INT)0xff;
1770 else if ((xx & ~(HOST_WIDE_INT)0xffff) == 0)
1771 xx |= ~(HOST_WIDE_INT)0xffff;
1772 l = exact_log2 (~xx);
1775 if (l == -1)
1776 output_operand_lossage ("'B' operand has multiple bits set");
1778 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l);
1779 return;
1782 case 'C':
1783 /* Print the symbol without a surrounding @fptr(). */
1784 if (GET_CODE (x) == SYMBOL_REF)
1785 assemble_name (file, XSTR (x, 0));
1786 else if (LABEL_P (x))
1787 output_asm_label (x);
1788 else
1789 xstormy16_print_operand_address (file, x);
1790 return;
1792 case 'o':
1793 case 'O':
1794 /* Print the immediate operand less one, preceded by '#'.
1795 For 'O', negate it first. */
1797 HOST_WIDE_INT xx = 0;
1799 if (CONST_INT_P (x))
1800 xx = INTVAL (x);
1801 else
1802 output_operand_lossage ("'o' operand is not constant");
1804 if (code == 'O')
1805 xx = -xx;
1807 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, xx - 1);
1808 return;
1811 case 'b':
1812 /* Print the shift mask for bp/bn. */
1814 HOST_WIDE_INT xx = 1;
1815 HOST_WIDE_INT l;
1817 if (CONST_INT_P (x))
1818 xx = INTVAL (x);
1819 else
1820 output_operand_lossage ("'B' operand is not constant");
1822 l = 7 - xx;
1824 fputs (IMMEDIATE_PREFIX, file);
1825 fprintf (file, HOST_WIDE_INT_PRINT_DEC, l);
1826 return;
1829 case 0:
1830 /* Handled below. */
1831 break;
1833 default:
1834 output_operand_lossage ("xstormy16_print_operand: unknown code");
1835 return;
1838 switch (GET_CODE (x))
1840 case REG:
1841 fputs (reg_names [REGNO (x)], file);
1842 break;
1844 case MEM:
1845 xstormy16_print_operand_address (file, XEXP (x, 0));
1846 break;
1848 default:
1849 /* Some kind of constant or label; an immediate operand,
1850 so prefix it with '#' for the assembler. */
1851 fputs (IMMEDIATE_PREFIX, file);
1852 output_addr_const (file, x);
1853 break;
1856 return;
1859 /* Expander for the `casesi' pattern.
1860 INDEX is the index of the switch statement.
1861 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1862 to the first table entry.
1863 RANGE is the number of table entries.
1864 TABLE is an ADDR_VEC that is the jump table.
1865 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1866 range LOWER_BOUND to LOWER_BOUND + RANGE - 1. */
1868 void
1869 xstormy16_expand_casesi (rtx index, rtx lower_bound, rtx range,
1870 rtx table, rtx default_label)
1872 HOST_WIDE_INT range_i = INTVAL (range);
1873 rtx int_index;
1875 /* This code uses 'br', so it can deal only with tables of size up to
1876 8192 entries. */
1877 if (range_i >= 8192)
1878 sorry ("switch statement of size %lu entries too large",
1879 (unsigned long) range_i);
1881 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
1882 OPTAB_LIB_WIDEN);
1883 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
1884 default_label);
1885 int_index = gen_lowpart_common (HImode, index);
1886 emit_insn (gen_ashlhi3 (int_index, int_index, const2_rtx));
1887 emit_jump_insn (gen_tablejump_pcrel (int_index, table));
1890 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1891 instructions, without label or alignment or any other special
1892 constructs. We know that the previous instruction will be the
1893 `tablejump_pcrel' output above.
1895 TODO: it might be nice to output 'br' instructions if they could
1896 all reach. */
1898 void
1899 xstormy16_output_addr_vec (FILE *file, rtx label ATTRIBUTE_UNUSED, rtx table)
1901 int vlen, idx;
1903 switch_to_section (current_function_section ());
1905 vlen = XVECLEN (table, 0);
1906 for (idx = 0; idx < vlen; idx++)
1908 fputs ("\tjmpf ", file);
1909 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
1910 fputc ('\n', file);
1914 /* Expander for the `call' patterns.
1915 RETVAL is the RTL for the return register or NULL for void functions.
1916 DEST is the function to call, expressed as a MEM.
1917 COUNTER is ignored. */
1919 void
1920 xstormy16_expand_call (rtx retval, rtx dest, rtx counter)
1922 rtx call, temp;
1923 enum machine_mode mode;
1925 gcc_assert (MEM_P (dest));
1926 dest = XEXP (dest, 0);
1928 if (! CONSTANT_P (dest) && ! REG_P (dest))
1929 dest = force_reg (Pmode, dest);
1931 if (retval == NULL)
1932 mode = VOIDmode;
1933 else
1934 mode = GET_MODE (retval);
1936 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
1937 counter);
1938 if (retval)
1939 call = gen_rtx_SET (VOIDmode, retval, call);
1941 if (! CONSTANT_P (dest))
1943 temp = gen_reg_rtx (HImode);
1944 emit_move_insn (temp, const0_rtx);
1946 else
1947 temp = const0_rtx;
1949 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
1950 gen_rtx_USE (VOIDmode, temp)));
1951 emit_call_insn (call);
1954 /* Expanders for multiword computational operations. */
1956 /* Expander for arithmetic operations; emit insns to compute
1958 (set DEST (CODE:MODE SRC0 SRC1))
1960 When CODE is COMPARE, a branch template is generated
1961 (this saves duplicating code in xstormy16_split_cbranch). */
1963 void
1964 xstormy16_expand_arith (enum machine_mode mode, enum rtx_code code,
1965 rtx dest, rtx src0, rtx src1)
1967 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
1968 int i;
1969 int firstloop = 1;
1971 if (code == NEG)
1972 emit_move_insn (src0, const0_rtx);
1974 for (i = 0; i < num_words; i++)
1976 rtx w_src0, w_src1, w_dest;
1977 rtx insn;
1979 w_src0 = simplify_gen_subreg (word_mode, src0, mode,
1980 i * UNITS_PER_WORD);
1981 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
1982 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
1984 switch (code)
1986 case PLUS:
1987 if (firstloop
1988 && CONST_INT_P (w_src1)
1989 && INTVAL (w_src1) == 0)
1990 continue;
1992 if (firstloop)
1993 insn = gen_addchi4 (w_dest, w_src0, w_src1);
1994 else
1995 insn = gen_addchi5 (w_dest, w_src0, w_src1);
1996 break;
1998 case NEG:
1999 case MINUS:
2000 case COMPARE:
2001 if (code == COMPARE && i == num_words - 1)
2003 rtx branch, sub, clobber, sub_1;
2005 sub_1 = gen_rtx_MINUS (HImode, w_src0,
2006 gen_rtx_ZERO_EXTEND (HImode, gen_rtx_REG (BImode, CARRY_REGNUM)));
2007 sub = gen_rtx_SET (VOIDmode, w_dest,
2008 gen_rtx_MINUS (HImode, sub_1, w_src1));
2009 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
2010 branch = gen_rtx_SET (VOIDmode, pc_rtx,
2011 gen_rtx_IF_THEN_ELSE (VOIDmode,
2012 gen_rtx_EQ (HImode,
2013 sub_1,
2014 w_src1),
2015 pc_rtx,
2016 pc_rtx));
2017 insn = gen_rtx_PARALLEL (VOIDmode,
2018 gen_rtvec (3, branch, sub, clobber));
2020 else if (firstloop
2021 && code != COMPARE
2022 && CONST_INT_P (w_src1)
2023 && INTVAL (w_src1) == 0)
2024 continue;
2025 else if (firstloop)
2026 insn = gen_subchi4 (w_dest, w_src0, w_src1);
2027 else
2028 insn = gen_subchi5 (w_dest, w_src0, w_src1);
2029 break;
2031 case IOR:
2032 case XOR:
2033 case AND:
2034 if (CONST_INT_P (w_src1)
2035 && INTVAL (w_src1) == -(code == AND))
2036 continue;
2038 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_fmt_ee (code, mode,
2039 w_src0, w_src1));
2040 break;
2042 case NOT:
2043 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_NOT (mode, w_src0));
2044 break;
2046 default:
2047 gcc_unreachable ();
2050 firstloop = 0;
2051 emit (insn);
2054 /* If we emit nothing, try_split() will think we failed. So emit
2055 something that does nothing and can be optimized away. */
2056 if (firstloop)
2057 emit (gen_nop ());
2060 /* The shift operations are split at output time for constant values;
2061 variable-width shifts get handed off to a library routine.
2063 Generate an output string to do (set X (CODE:MODE X SIZE_R))
2064 SIZE_R will be a CONST_INT, X will be a hard register. */
2066 const char *
2067 xstormy16_output_shift (enum machine_mode mode, enum rtx_code code,
2068 rtx x, rtx size_r, rtx temp)
2070 HOST_WIDE_INT size;
2071 const char *r0, *r1, *rt;
2072 static char r[64];
2074 gcc_assert (CONST_INT_P (size_r)
2075 && REG_P (x)
2076 && mode == SImode);
2078 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
2080 if (size == 0)
2081 return "";
2083 r0 = reg_names [REGNO (x)];
2084 r1 = reg_names [REGNO (x) + 1];
2086 /* For shifts of size 1, we can use the rotate instructions. */
2087 if (size == 1)
2089 switch (code)
2091 case ASHIFT:
2092 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
2093 break;
2094 case ASHIFTRT:
2095 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
2096 break;
2097 case LSHIFTRT:
2098 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
2099 break;
2100 default:
2101 gcc_unreachable ();
2103 return r;
2106 /* For large shifts, there are easy special cases. */
2107 if (size == 16)
2109 switch (code)
2111 case ASHIFT:
2112 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
2113 break;
2114 case ASHIFTRT:
2115 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
2116 break;
2117 case LSHIFTRT:
2118 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
2119 break;
2120 default:
2121 gcc_unreachable ();
2123 return r;
2125 if (size > 16)
2127 switch (code)
2129 case ASHIFT:
2130 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
2131 r1, r0, r0, r1, (int) size - 16);
2132 break;
2133 case ASHIFTRT:
2134 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
2135 r0, r1, r1, r0, (int) size - 16);
2136 break;
2137 case LSHIFTRT:
2138 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
2139 r0, r1, r1, r0, (int) size - 16);
2140 break;
2141 default:
2142 gcc_unreachable ();
2144 return r;
2147 /* For the rest, we have to do more work. In particular, we
2148 need a temporary. */
2149 rt = reg_names [REGNO (temp)];
2150 switch (code)
2152 case ASHIFT:
2153 sprintf (r,
2154 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2155 rt, r0, r0, (int) size, r1, (int) size, rt, (int) (16 - size),
2156 r1, rt);
2157 break;
2158 case ASHIFTRT:
2159 sprintf (r,
2160 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2161 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2162 r0, rt);
2163 break;
2164 case LSHIFTRT:
2165 sprintf (r,
2166 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2167 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2168 r0, rt);
2169 break;
2170 default:
2171 gcc_unreachable ();
2173 return r;
2176 /* Attribute handling. */
2178 /* Return nonzero if the function is an interrupt function. */
2181 xstormy16_interrupt_function_p (void)
2183 tree attributes;
2185 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2186 any functions are declared, which is demonstrably wrong, but
2187 it is worked around here. FIXME. */
2188 if (!cfun)
2189 return 0;
2191 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
2192 return lookup_attribute ("interrupt", attributes) != NULL_TREE;
2195 #undef TARGET_ATTRIBUTE_TABLE
2196 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2198 static tree xstormy16_handle_interrupt_attribute
2199 (tree *, tree, tree, int, bool *);
2200 static tree xstormy16_handle_below100_attribute
2201 (tree *, tree, tree, int, bool *);
2203 static const struct attribute_spec xstormy16_attribute_table[] =
2205 /* name, min_len, max_len, decl_req, type_req, fn_type_req, handler. */
2206 { "interrupt", 0, 0, false, true, true, xstormy16_handle_interrupt_attribute },
2207 { "BELOW100", 0, 0, false, false, false, xstormy16_handle_below100_attribute },
2208 { "below100", 0, 0, false, false, false, xstormy16_handle_below100_attribute },
2209 { NULL, 0, 0, false, false, false, NULL }
2212 /* Handle an "interrupt" attribute;
2213 arguments as in struct attribute_spec.handler. */
2215 static tree
2216 xstormy16_handle_interrupt_attribute (tree *node, tree name,
2217 tree args ATTRIBUTE_UNUSED,
2218 int flags ATTRIBUTE_UNUSED,
2219 bool *no_add_attrs)
2221 if (TREE_CODE (*node) != FUNCTION_TYPE)
2223 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2224 name);
2225 *no_add_attrs = true;
2228 return NULL_TREE;
2231 /* Handle an "below" attribute;
2232 arguments as in struct attribute_spec.handler. */
2234 static tree
2235 xstormy16_handle_below100_attribute (tree *node,
2236 tree name ATTRIBUTE_UNUSED,
2237 tree args ATTRIBUTE_UNUSED,
2238 int flags ATTRIBUTE_UNUSED,
2239 bool *no_add_attrs)
2241 if (TREE_CODE (*node) != VAR_DECL
2242 && TREE_CODE (*node) != POINTER_TYPE
2243 && TREE_CODE (*node) != TYPE_DECL)
2245 warning (OPT_Wattributes,
2246 "%<__BELOW100__%> attribute only applies to variables");
2247 *no_add_attrs = true;
2249 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
2251 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
2253 warning (OPT_Wattributes, "__BELOW100__ attribute not allowed "
2254 "with auto storage class");
2255 *no_add_attrs = true;
2259 return NULL_TREE;
2262 #undef TARGET_INIT_BUILTINS
2263 #define TARGET_INIT_BUILTINS xstormy16_init_builtins
2264 #undef TARGET_EXPAND_BUILTIN
2265 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2267 static struct
2269 const char * name;
2270 int md_code;
2271 const char * arg_ops; /* 0..9, t for temp register, r for return value. */
2272 const char * arg_types; /* s=short,l=long, upper case for unsigned. */
2274 s16builtins[] =
2276 { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" },
2277 { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" },
2278 { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" },
2279 { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" },
2280 { NULL, 0, NULL, NULL }
2283 static void
2284 xstormy16_init_builtins (void)
2286 tree args, ret_type, arg;
2287 int i, a;
2289 ret_type = void_type_node;
2291 for (i = 0; s16builtins[i].name; i++)
2293 args = void_list_node;
2294 for (a = strlen (s16builtins[i].arg_types) - 1; a >= 0; a--)
2296 switch (s16builtins[i].arg_types[a])
2298 case 's': arg = short_integer_type_node; break;
2299 case 'S': arg = short_unsigned_type_node; break;
2300 case 'l': arg = long_integer_type_node; break;
2301 case 'L': arg = long_unsigned_type_node; break;
2302 default: gcc_unreachable ();
2304 if (a == 0)
2305 ret_type = arg;
2306 else
2307 args = tree_cons (NULL_TREE, arg, args);
2309 add_builtin_function (s16builtins[i].name,
2310 build_function_type (ret_type, args),
2311 i, BUILT_IN_MD, NULL, NULL);
2315 static rtx
2316 xstormy16_expand_builtin (tree exp, rtx target,
2317 rtx subtarget ATTRIBUTE_UNUSED,
2318 enum machine_mode mode ATTRIBUTE_UNUSED,
2319 int ignore ATTRIBUTE_UNUSED)
2321 rtx op[10], args[10], pat, copyto[10], retval = 0;
2322 tree fndecl, argtree;
2323 int i, a, o, code;
2325 fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2326 argtree = TREE_OPERAND (exp, 1);
2327 i = DECL_FUNCTION_CODE (fndecl);
2328 code = s16builtins[i].md_code;
2330 for (a = 0; a < 10 && argtree; a++)
2332 args[a] = expand_normal (TREE_VALUE (argtree));
2333 argtree = TREE_CHAIN (argtree);
2336 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2338 char ao = s16builtins[i].arg_ops[o];
2339 char c = insn_data[code].operand[o].constraint[0];
2340 enum machine_mode omode;
2342 copyto[o] = 0;
2344 omode = (enum machine_mode) insn_data[code].operand[o].mode;
2345 if (ao == 'r')
2346 op[o] = target ? target : gen_reg_rtx (omode);
2347 else if (ao == 't')
2348 op[o] = gen_reg_rtx (omode);
2349 else
2350 op[o] = args[(int) hex_value (ao)];
2352 if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o])))
2354 if (c == '+' || c == '=')
2356 copyto[o] = op[o];
2357 op[o] = gen_reg_rtx (omode);
2359 else
2360 op[o] = copy_to_mode_reg (omode, op[o]);
2363 if (ao == 'r')
2364 retval = op[o];
2367 pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4],
2368 op[5], op[6], op[7], op[8], op[9]);
2369 emit_insn (pat);
2371 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2372 if (copyto[o])
2374 emit_move_insn (copyto[o], op[o]);
2375 if (op[o] == retval)
2376 retval = copyto[o];
2379 return retval;
2382 /* Look for combinations of insns that can be converted to BN or BP
2383 opcodes. This is, unfortunately, too complex to do with MD
2384 patterns. */
2386 static void
2387 combine_bnp (rtx insn)
2389 int insn_code, regno, need_extend;
2390 unsigned int mask;
2391 rtx cond, reg, and_insn, load, qireg, mem;
2392 enum machine_mode load_mode = QImode;
2393 enum machine_mode and_mode = QImode;
2394 rtx shift = NULL_RTX;
2396 insn_code = recog_memoized (insn);
2397 if (insn_code != CODE_FOR_cbranchhi
2398 && insn_code != CODE_FOR_cbranchhi_neg)
2399 return;
2401 cond = XVECEXP (PATTERN (insn), 0, 0); /* set */
2402 cond = XEXP (cond, 1); /* if */
2403 cond = XEXP (cond, 0); /* cond */
2404 switch (GET_CODE (cond))
2406 case NE:
2407 case EQ:
2408 need_extend = 0;
2409 break;
2410 case LT:
2411 case GE:
2412 need_extend = 1;
2413 break;
2414 default:
2415 return;
2418 reg = XEXP (cond, 0);
2419 if (! REG_P (reg))
2420 return;
2421 regno = REGNO (reg);
2422 if (XEXP (cond, 1) != const0_rtx)
2423 return;
2424 if (! find_regno_note (insn, REG_DEAD, regno))
2425 return;
2426 qireg = gen_rtx_REG (QImode, regno);
2428 if (need_extend)
2430 /* LT and GE conditionals should have a sign extend before
2431 them. */
2432 for (and_insn = prev_real_insn (insn); and_insn;
2433 and_insn = prev_real_insn (and_insn))
2435 int and_code = recog_memoized (and_insn);
2437 if (and_code == CODE_FOR_extendqihi2
2438 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)
2439 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), qireg))
2440 break;
2442 if (and_code == CODE_FOR_movhi_internal
2443 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg))
2445 /* This is for testing bit 15. */
2446 and_insn = insn;
2447 break;
2450 if (reg_mentioned_p (reg, and_insn))
2451 return;
2453 if (GET_CODE (and_insn) != NOTE
2454 && GET_CODE (and_insn) != INSN)
2455 return;
2458 else
2460 /* EQ and NE conditionals have an AND before them. */
2461 for (and_insn = prev_real_insn (insn); and_insn;
2462 and_insn = prev_real_insn (and_insn))
2464 if (recog_memoized (and_insn) == CODE_FOR_andhi3
2465 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)
2466 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), reg))
2467 break;
2469 if (reg_mentioned_p (reg, and_insn))
2470 return;
2472 if (GET_CODE (and_insn) != NOTE
2473 && GET_CODE (and_insn) != INSN)
2474 return;
2477 if (and_insn)
2479 /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
2480 followed by an AND like this:
2482 (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
2483 (clobber (reg:BI carry))]
2485 (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
2487 Attempt to detect this here. */
2488 for (shift = prev_real_insn (and_insn); shift;
2489 shift = prev_real_insn (shift))
2491 if (recog_memoized (shift) == CODE_FOR_lshrhi3
2492 && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift), 0, 0)), reg)
2493 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 0), reg))
2494 break;
2496 if (reg_mentioned_p (reg, shift)
2497 || (GET_CODE (shift) != NOTE
2498 && GET_CODE (shift) != INSN))
2500 shift = NULL_RTX;
2501 break;
2506 if (!and_insn)
2507 return;
2509 for (load = shift ? prev_real_insn (shift) : prev_real_insn (and_insn);
2510 load;
2511 load = prev_real_insn (load))
2513 int load_code = recog_memoized (load);
2515 if (load_code == CODE_FOR_movhi_internal
2516 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2517 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), HImode)
2518 && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load))))
2520 load_mode = HImode;
2521 break;
2524 if (load_code == CODE_FOR_movqi_internal
2525 && rtx_equal_p (SET_DEST (PATTERN (load)), qireg)
2526 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), QImode))
2528 load_mode = QImode;
2529 break;
2532 if (load_code == CODE_FOR_zero_extendqihi2
2533 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2534 && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load)), 0), QImode))
2536 load_mode = QImode;
2537 and_mode = HImode;
2538 break;
2541 if (reg_mentioned_p (reg, load))
2542 return;
2544 if (GET_CODE (load) != NOTE
2545 && GET_CODE (load) != INSN)
2546 return;
2548 if (!load)
2549 return;
2551 mem = SET_SRC (PATTERN (load));
2553 if (need_extend)
2555 mask = (load_mode == HImode) ? 0x8000 : 0x80;
2557 /* If the mem includes a zero-extend operation and we are
2558 going to generate a sign-extend operation then move the
2559 mem inside the zero-extend. */
2560 if (GET_CODE (mem) == ZERO_EXTEND)
2561 mem = XEXP (mem, 0);
2563 else
2565 if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and_insn)), 1),
2566 load_mode))
2567 return;
2569 mask = (int) INTVAL (XEXP (SET_SRC (PATTERN (and_insn)), 1));
2571 if (shift)
2572 mask <<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 1));
2575 if (load_mode == HImode)
2577 rtx addr = XEXP (mem, 0);
2579 if (! (mask & 0xff))
2581 addr = plus_constant (addr, 1);
2582 mask >>= 8;
2584 mem = gen_rtx_MEM (QImode, addr);
2587 if (need_extend)
2588 XEXP (cond, 0) = gen_rtx_SIGN_EXTEND (HImode, mem);
2589 else
2590 XEXP (cond, 0) = gen_rtx_AND (and_mode, mem, GEN_INT (mask));
2592 INSN_CODE (insn) = -1;
2593 delete_insn (load);
2595 if (and_insn != insn)
2596 delete_insn (and_insn);
2598 if (shift != NULL_RTX)
2599 delete_insn (shift);
2602 static void
2603 xstormy16_reorg (void)
2605 rtx insn;
2607 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2609 if (! JUMP_P (insn))
2610 continue;
2611 combine_bnp (insn);
2615 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2617 static bool
2618 xstormy16_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2620 const HOST_WIDE_INT size = int_size_in_bytes (type);
2621 return (size == -1 || size > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS);
2624 /* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
2625 static const struct default_options xstorym16_option_optimization_table[] =
2627 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
2628 { OPT_LEVELS_NONE, 0, NULL, 0 }
2631 #undef TARGET_ASM_ALIGNED_HI_OP
2632 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2633 #undef TARGET_ASM_ALIGNED_SI_OP
2634 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2635 #undef TARGET_ENCODE_SECTION_INFO
2636 #define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
2638 /* Select_section doesn't handle .bss_below100. */
2639 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
2640 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
2642 #undef TARGET_ASM_OUTPUT_MI_THUNK
2643 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2644 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2645 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2647 #undef TARGET_RTX_COSTS
2648 #define TARGET_RTX_COSTS xstormy16_rtx_costs
2649 #undef TARGET_ADDRESS_COST
2650 #define TARGET_ADDRESS_COST xstormy16_address_cost
2652 #undef TARGET_BUILD_BUILTIN_VA_LIST
2653 #define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
2654 #undef TARGET_EXPAND_BUILTIN_VA_START
2655 #define TARGET_EXPAND_BUILTIN_VA_START xstormy16_expand_builtin_va_start
2656 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
2657 #define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_gimplify_va_arg_expr
2659 #undef TARGET_PROMOTE_FUNCTION_MODE
2660 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
2661 #undef TARGET_PROMOTE_PROTOTYPES
2662 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
2664 #undef TARGET_FUNCTION_ARG
2665 #define TARGET_FUNCTION_ARG xstormy16_function_arg
2666 #undef TARGET_FUNCTION_ARG_ADVANCE
2667 #define TARGET_FUNCTION_ARG_ADVANCE xstormy16_function_arg_advance
2669 #undef TARGET_RETURN_IN_MEMORY
2670 #define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
2672 #undef TARGET_MACHINE_DEPENDENT_REORG
2673 #define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg
2675 #undef TARGET_LEGITIMATE_ADDRESS_P
2676 #define TARGET_LEGITIMATE_ADDRESS_P xstormy16_legitimate_address_p
2678 #undef TARGET_CAN_ELIMINATE
2679 #define TARGET_CAN_ELIMINATE xstormy16_can_eliminate
2681 #undef TARGET_TRAMPOLINE_INIT
2682 #define TARGET_TRAMPOLINE_INIT xstormy16_trampoline_init
2684 #undef TARGET_OPTION_OPTIMIZATION_TABLE
2685 #define TARGET_OPTION_OPTIMIZATION_TABLE xstorym16_option_optimization_table
2687 struct gcc_target targetm = TARGET_INITIALIZER;
2689 #include "gt-stormy16.h"