PR target/81369
[official-gcc.git] / gcc / config / stormy16 / stormy16.c
blobaee7742de89319c38d95838b4fcbe0e104cf9bbb
1 /* Xstormy16 target functions.
2 Copyright (C) 1997-2017 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "df.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "stringpool.h"
33 #include "optabs.h"
34 #include "emit-rtl.h"
35 #include "recog.h"
36 #include "diagnostic-core.h"
37 #include "output.h"
38 #include "fold-const.h"
39 #include "stor-layout.h"
40 #include "varasm.h"
41 #include "calls.h"
42 #include "explow.h"
43 #include "expr.h"
44 #include "langhooks.h"
45 #include "cfgrtl.h"
46 #include "gimplify.h"
47 #include "reload.h"
48 #include "builtins.h"
50 /* This file should be included last. */
51 #include "target-def.h"
53 static rtx emit_addhi3_postreload (rtx, rtx, rtx);
54 static void xstormy16_asm_out_constructor (rtx, int);
55 static void xstormy16_asm_out_destructor (rtx, int);
56 static void xstormy16_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
57 HOST_WIDE_INT, tree);
59 static void xstormy16_init_builtins (void);
60 static rtx xstormy16_expand_builtin (tree, rtx, rtx, machine_mode, int);
61 static int xstormy16_address_cost (rtx, machine_mode, addr_space_t, bool);
62 static bool xstormy16_return_in_memory (const_tree, const_tree);
64 static GTY(()) section *bss100_section;
66 /* Compute a (partial) cost for rtx X. Return true if the complete
67 cost has been computed, and false if subexpressions should be
68 scanned. In either case, *TOTAL contains the cost result. */
70 static bool
71 xstormy16_rtx_costs (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
72 int outer_code ATTRIBUTE_UNUSED,
73 int opno ATTRIBUTE_UNUSED, int *total,
74 bool speed ATTRIBUTE_UNUSED)
76 int code = GET_CODE (x);
78 switch (code)
80 case CONST_INT:
81 if (INTVAL (x) < 16 && INTVAL (x) >= 0)
82 *total = COSTS_N_INSNS (1) / 2;
83 else if (INTVAL (x) < 256 && INTVAL (x) >= 0)
84 *total = COSTS_N_INSNS (1);
85 else
86 *total = COSTS_N_INSNS (2);
87 return true;
89 case CONST_DOUBLE:
90 case CONST:
91 case SYMBOL_REF:
92 case LABEL_REF:
93 *total = COSTS_N_INSNS (2);
94 return true;
96 case MULT:
97 *total = COSTS_N_INSNS (35 + 6);
98 return true;
99 case DIV:
100 *total = COSTS_N_INSNS (51 - 6);
101 return true;
103 default:
104 return false;
108 static int
109 xstormy16_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
110 addr_space_t as ATTRIBUTE_UNUSED,
111 bool speed ATTRIBUTE_UNUSED)
113 return (CONST_INT_P (x) ? 2
114 : GET_CODE (x) == PLUS ? 7
115 : 5);
118 /* Worker function for TARGET_MEMORY_MOVE_COST. */
120 static int
121 xstormy16_memory_move_cost (machine_mode mode, reg_class_t rclass,
122 bool in)
124 return (5 + memory_move_secondary_cost (mode, rclass, in));
127 /* Branches are handled as follows:
129 1. HImode compare-and-branches. The machine supports these
130 natively, so the appropriate pattern is emitted directly.
132 2. SImode EQ and NE. These are emitted as pairs of HImode
133 compare-and-branches.
135 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
136 of a SImode subtract followed by a branch (not a compare-and-branch),
137 like this:
142 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
147 bne. */
149 /* Emit a branch of kind CODE to location LOC. */
151 void
152 xstormy16_emit_cbranch (enum rtx_code code, rtx op0, rtx op1, rtx loc)
154 rtx condition_rtx, loc_ref, branch, cy_clobber;
155 rtvec vec;
156 machine_mode mode;
158 mode = GET_MODE (op0);
159 gcc_assert (mode == HImode || mode == SImode);
161 if (mode == SImode
162 && (code == GT || code == LE || code == GTU || code == LEU))
164 int unsigned_p = (code == GTU || code == LEU);
165 int gt_p = (code == GT || code == GTU);
166 rtx lab = NULL_RTX;
168 if (gt_p)
169 lab = gen_label_rtx ();
170 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, op0, op1, gt_p ? lab : loc);
171 /* This should be generated as a comparison against the temporary
172 created by the previous insn, but reload can't handle that. */
173 xstormy16_emit_cbranch (gt_p ? NE : EQ, op0, op1, loc);
174 if (gt_p)
175 emit_label (lab);
176 return;
178 else if (mode == SImode
179 && (code == NE || code == EQ)
180 && op1 != const0_rtx)
182 rtx op0_word, op1_word;
183 rtx lab = NULL_RTX;
184 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
185 int i;
187 if (code == EQ)
188 lab = gen_label_rtx ();
190 for (i = 0; i < num_words - 1; i++)
192 op0_word = simplify_gen_subreg (word_mode, op0, mode,
193 i * UNITS_PER_WORD);
194 op1_word = simplify_gen_subreg (word_mode, op1, mode,
195 i * UNITS_PER_WORD);
196 xstormy16_emit_cbranch (NE, op0_word, op1_word, code == EQ ? lab : loc);
198 op0_word = simplify_gen_subreg (word_mode, op0, mode,
199 i * UNITS_PER_WORD);
200 op1_word = simplify_gen_subreg (word_mode, op1, mode,
201 i * UNITS_PER_WORD);
202 xstormy16_emit_cbranch (code, op0_word, op1_word, loc);
204 if (code == EQ)
205 emit_label (lab);
206 return;
209 /* We can't allow reload to try to generate any reload after a branch,
210 so when some register must match we must make the temporary ourselves. */
211 if (mode != HImode)
213 rtx tmp;
214 tmp = gen_reg_rtx (mode);
215 emit_move_insn (tmp, op0);
216 op0 = tmp;
219 condition_rtx = gen_rtx_fmt_ee (code, mode, op0, op1);
220 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
221 branch = gen_rtx_SET (pc_rtx,
222 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
223 loc_ref, pc_rtx));
225 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
227 if (mode == HImode)
228 vec = gen_rtvec (2, branch, cy_clobber);
229 else if (code == NE || code == EQ)
230 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
231 else
233 rtx sub;
234 #if 0
235 sub = gen_rtx_SET (op0, gen_rtx_MINUS (SImode, op0, op1));
236 #else
237 sub = gen_rtx_CLOBBER (SImode, op0);
238 #endif
239 vec = gen_rtvec (3, branch, sub, cy_clobber);
242 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
245 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
246 the arithmetic operation. Most of the work is done by
247 xstormy16_expand_arith. */
249 void
250 xstormy16_split_cbranch (machine_mode mode, rtx label, rtx comparison,
251 rtx dest)
253 rtx op0 = XEXP (comparison, 0);
254 rtx op1 = XEXP (comparison, 1);
255 rtx_insn *seq, *last_insn;
256 rtx compare;
258 start_sequence ();
259 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1);
260 seq = get_insns ();
261 end_sequence ();
263 gcc_assert (INSN_P (seq));
265 last_insn = seq;
266 while (NEXT_INSN (last_insn) != NULL_RTX)
267 last_insn = NEXT_INSN (last_insn);
269 compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0));
270 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
271 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
272 emit_insn (seq);
276 /* Return the string to output a conditional branch to LABEL, which is
277 the operand number of the label.
279 OP is the conditional expression, or NULL for branch-always.
281 REVERSED is nonzero if we should reverse the sense of the comparison.
283 INSN is the insn. */
285 char *
286 xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed,
287 rtx_insn *insn)
289 static char string[64];
290 int need_longbranch = (op != NULL_RTX
291 ? get_attr_length (insn) == 8
292 : get_attr_length (insn) == 4);
293 int really_reversed = reversed ^ need_longbranch;
294 const char *ccode;
295 const char *templ;
296 const char *operands;
297 enum rtx_code code;
299 if (! op)
301 if (need_longbranch)
302 ccode = "jmpf";
303 else
304 ccode = "br";
305 sprintf (string, "%s %s", ccode, label);
306 return string;
309 code = GET_CODE (op);
311 if (! REG_P (XEXP (op, 0)))
313 code = swap_condition (code);
314 operands = "%3,%2";
316 else
317 operands = "%2,%3";
319 /* Work out which way this really branches. */
320 if (really_reversed)
321 code = reverse_condition (code);
323 switch (code)
325 case EQ: ccode = "z"; break;
326 case NE: ccode = "nz"; break;
327 case GE: ccode = "ge"; break;
328 case LT: ccode = "lt"; break;
329 case GT: ccode = "gt"; break;
330 case LE: ccode = "le"; break;
331 case GEU: ccode = "nc"; break;
332 case LTU: ccode = "c"; break;
333 case GTU: ccode = "hi"; break;
334 case LEU: ccode = "ls"; break;
336 default:
337 gcc_unreachable ();
340 if (need_longbranch)
341 templ = "b%s %s,.+8 | jmpf %s";
342 else
343 templ = "b%s %s,%s";
344 sprintf (string, templ, ccode, operands, label);
346 return string;
349 /* Return the string to output a conditional branch to LABEL, which is
350 the operand number of the label, but suitable for the tail of a
351 SImode branch.
353 OP is the conditional expression (OP is never NULL_RTX).
355 REVERSED is nonzero if we should reverse the sense of the comparison.
357 INSN is the insn. */
359 char *
360 xstormy16_output_cbranch_si (rtx op, const char *label, int reversed,
361 rtx_insn *insn)
363 static char string[64];
364 int need_longbranch = get_attr_length (insn) >= 8;
365 int really_reversed = reversed ^ need_longbranch;
366 const char *ccode;
367 const char *templ;
368 char prevop[16];
369 enum rtx_code code;
371 code = GET_CODE (op);
373 /* Work out which way this really branches. */
374 if (really_reversed)
375 code = reverse_condition (code);
377 switch (code)
379 case EQ: ccode = "z"; break;
380 case NE: ccode = "nz"; break;
381 case GE: ccode = "ge"; break;
382 case LT: ccode = "lt"; break;
383 case GEU: ccode = "nc"; break;
384 case LTU: ccode = "c"; break;
386 /* The missing codes above should never be generated. */
387 default:
388 gcc_unreachable ();
391 switch (code)
393 case EQ: case NE:
395 int regnum;
397 gcc_assert (REG_P (XEXP (op, 0)));
399 regnum = REGNO (XEXP (op, 0));
400 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
402 break;
404 case GE: case LT: case GEU: case LTU:
405 strcpy (prevop, "sbc %2,%3");
406 break;
408 default:
409 gcc_unreachable ();
412 if (need_longbranch)
413 templ = "%s | b%s .+6 | jmpf %s";
414 else
415 templ = "%s | b%s %s";
416 sprintf (string, templ, prevop, ccode, label);
418 return string;
421 /* Many machines have some registers that cannot be copied directly to or from
422 memory or even from other types of registers. An example is the `MQ'
423 register, which on most machines, can only be copied to or from general
424 registers, but not memory. Some machines allow copying all registers to and
425 from memory, but require a scratch register for stores to some memory
426 locations (e.g., those with symbolic address on the RT, and those with
427 certain symbolic address on the SPARC when compiling PIC). In some cases,
428 both an intermediate and a scratch register are required.
430 You should define these macros to indicate to the reload phase that it may
431 need to allocate at least one register for a reload in addition to the
432 register to contain the data. Specifically, if copying X to a register
433 RCLASS in MODE requires an intermediate register, you should define
434 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
435 whose registers can be used as intermediate registers or scratch registers.
437 If copying a register RCLASS in MODE to X requires an intermediate or scratch
438 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
439 largest register class required. If the requirements for input and output
440 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
441 instead of defining both macros identically.
443 The values returned by these macros are often `GENERAL_REGS'. Return
444 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
445 to or from a register of RCLASS in MODE without requiring a scratch register.
446 Do not define this macro if it would always return `NO_REGS'.
448 If a scratch register is required (either with or without an intermediate
449 register), you should define patterns for `reload_inM' or `reload_outM', as
450 required.. These patterns, which will normally be implemented with a
451 `define_expand', should be similar to the `movM' patterns, except that
452 operand 2 is the scratch register.
454 Define constraints for the reload register and scratch register that contain
455 a single register class. If the original reload register (whose class is
456 RCLASS) can meet the constraint given in the pattern, the value returned by
457 these macros is used for the class of the scratch register. Otherwise, two
458 additional reload registers are required. Their classes are obtained from
459 the constraints in the insn pattern.
461 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
462 either be in a hard register or in memory. Use `true_regnum' to find out;
463 it will return -1 if the pseudo is in memory and the hard register number if
464 it is in a register.
466 These macros should not be used in the case where a particular class of
467 registers can only be copied to memory and not to another class of
468 registers. In that case, secondary reload registers are not needed and
469 would not be helpful. Instead, a stack location must be used to perform the
470 copy and the `movM' pattern should use memory as an intermediate storage.
471 This case often occurs between floating-point and general registers. */
473 enum reg_class
474 xstormy16_secondary_reload_class (enum reg_class rclass,
475 machine_mode mode ATTRIBUTE_UNUSED,
476 rtx x)
478 /* This chip has the interesting property that only the first eight
479 registers can be moved to/from memory. */
480 if ((MEM_P (x)
481 || ((GET_CODE (x) == SUBREG || REG_P (x))
482 && (true_regnum (x) == -1
483 || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
484 && ! reg_class_subset_p (rclass, EIGHT_REGS))
485 return EIGHT_REGS;
487 return NO_REGS;
490 /* Worker function for TARGET_PREFERRED_RELOAD_CLASS
491 and TARGET_PREFERRED_OUTPUT_RELOAD_CLASS. */
493 static reg_class_t
494 xstormy16_preferred_reload_class (rtx x, reg_class_t rclass)
496 if (rclass == GENERAL_REGS && MEM_P (x))
497 return EIGHT_REGS;
499 return rclass;
502 /* Predicate for symbols and addresses that reflect special 8-bit
503 addressing. */
506 xstormy16_below100_symbol (rtx x,
507 machine_mode mode ATTRIBUTE_UNUSED)
509 if (GET_CODE (x) == CONST)
510 x = XEXP (x, 0);
511 if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
512 x = XEXP (x, 0);
514 if (GET_CODE (x) == SYMBOL_REF)
515 return (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_XSTORMY16_BELOW100) != 0;
517 if (CONST_INT_P (x))
519 HOST_WIDE_INT i = INTVAL (x);
521 if ((i >= 0x0000 && i <= 0x00ff)
522 || (i >= 0x7f00 && i <= 0x7fff))
523 return 1;
525 return 0;
528 /* Likewise, but only for non-volatile MEMs, for patterns where the
529 MEM will get split into smaller sized accesses. */
532 xstormy16_splittable_below100_operand (rtx x, machine_mode mode)
534 if (MEM_P (x) && MEM_VOLATILE_P (x))
535 return 0;
536 return xstormy16_below100_operand (x, mode);
539 /* Expand an 8-bit IOR. This either detects the one case we can
540 actually do, or uses a 16-bit IOR. */
542 void
543 xstormy16_expand_iorqi3 (rtx *operands)
545 rtx in, out, outsub, val;
547 out = operands[0];
548 in = operands[1];
549 val = operands[2];
551 if (xstormy16_onebit_set_operand (val, QImode))
553 if (!xstormy16_below100_or_register (in, QImode))
554 in = copy_to_mode_reg (QImode, in);
555 if (!xstormy16_below100_or_register (out, QImode))
556 out = gen_reg_rtx (QImode);
557 emit_insn (gen_iorqi3_internal (out, in, val));
558 if (out != operands[0])
559 emit_move_insn (operands[0], out);
560 return;
563 if (! REG_P (in))
564 in = copy_to_mode_reg (QImode, in);
566 if (! REG_P (val) && ! CONST_INT_P (val))
567 val = copy_to_mode_reg (QImode, val);
569 if (! REG_P (out))
570 out = gen_reg_rtx (QImode);
572 in = simplify_gen_subreg (HImode, in, QImode, 0);
573 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
575 if (! CONST_INT_P (val))
576 val = simplify_gen_subreg (HImode, val, QImode, 0);
578 emit_insn (gen_iorhi3 (outsub, in, val));
580 if (out != operands[0])
581 emit_move_insn (operands[0], out);
584 /* Expand an 8-bit AND. This either detects the one case we can
585 actually do, or uses a 16-bit AND. */
587 void
588 xstormy16_expand_andqi3 (rtx *operands)
590 rtx in, out, outsub, val;
592 out = operands[0];
593 in = operands[1];
594 val = operands[2];
596 if (xstormy16_onebit_clr_operand (val, QImode))
598 if (!xstormy16_below100_or_register (in, QImode))
599 in = copy_to_mode_reg (QImode, in);
600 if (!xstormy16_below100_or_register (out, QImode))
601 out = gen_reg_rtx (QImode);
602 emit_insn (gen_andqi3_internal (out, in, val));
603 if (out != operands[0])
604 emit_move_insn (operands[0], out);
605 return;
608 if (! REG_P (in))
609 in = copy_to_mode_reg (QImode, in);
611 if (! REG_P (val) && ! CONST_INT_P (val))
612 val = copy_to_mode_reg (QImode, val);
614 if (! REG_P (out))
615 out = gen_reg_rtx (QImode);
617 in = simplify_gen_subreg (HImode, in, QImode, 0);
618 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
620 if (! CONST_INT_P (val))
621 val = simplify_gen_subreg (HImode, val, QImode, 0);
623 emit_insn (gen_andhi3 (outsub, in, val));
625 if (out != operands[0])
626 emit_move_insn (operands[0], out);
629 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
630 (CONST_INT_P (X) \
631 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
633 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
634 (CONST_INT_P (X) \
635 && INTVAL (X) + (OFFSET) >= 0 \
636 && INTVAL (X) + (OFFSET) < 0x8000 \
637 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
639 bool
640 xstormy16_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED,
641 rtx x, bool strict)
643 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
644 return true;
646 if (GET_CODE (x) == PLUS
647 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
649 x = XEXP (x, 0);
650 /* PR 31232: Do not allow INT+INT as an address. */
651 if (CONST_INT_P (x))
652 return false;
655 if ((GET_CODE (x) == PRE_MODIFY && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
656 || GET_CODE (x) == POST_INC
657 || GET_CODE (x) == PRE_DEC)
658 x = XEXP (x, 0);
660 if (REG_P (x)
661 && REGNO_OK_FOR_BASE_P (REGNO (x))
662 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
663 return true;
665 if (xstormy16_below100_symbol (x, mode))
666 return true;
668 return false;
671 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P.
673 On this chip, this is true if the address is valid with an offset
674 of 0 but not of 6, because in that case it cannot be used as an
675 address for DImode or DFmode, or if the address is a post-increment
676 or pre-decrement address. */
678 static bool
679 xstormy16_mode_dependent_address_p (const_rtx x,
680 addr_space_t as ATTRIBUTE_UNUSED)
682 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
683 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
684 return true;
686 if (GET_CODE (x) == PLUS
687 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
688 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
689 return true;
691 /* Auto-increment addresses are now treated generically in recog.c. */
692 return false;
696 short_memory_operand (rtx x, machine_mode mode)
698 if (! memory_operand (x, mode))
699 return 0;
700 return (GET_CODE (XEXP (x, 0)) != PLUS);
703 /* Splitter for the 'move' patterns, for modes not directly implemented
704 by hardware. Emit insns to copy a value of mode MODE from SRC to
705 DEST.
707 This function is only called when reload_completed. */
709 void
710 xstormy16_split_move (machine_mode mode, rtx dest, rtx src)
712 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
713 int direction, end, i;
714 int src_modifies = 0;
715 int dest_modifies = 0;
716 int src_volatile = 0;
717 int dest_volatile = 0;
718 rtx mem_operand;
719 rtx auto_inc_reg_rtx = NULL_RTX;
721 /* Check initial conditions. */
722 gcc_assert (reload_completed
723 && mode != QImode && mode != HImode
724 && nonimmediate_operand (dest, mode)
725 && general_operand (src, mode));
727 /* This case is not supported below, and shouldn't be generated. */
728 gcc_assert (! MEM_P (dest) || ! MEM_P (src));
730 /* This case is very very bad after reload, so trap it now. */
731 gcc_assert (GET_CODE (dest) != SUBREG && GET_CODE (src) != SUBREG);
733 /* The general idea is to copy by words, offsetting the source and
734 destination. Normally the least-significant word will be copied
735 first, but for pre-dec operations it's better to copy the
736 most-significant word first. Only one operand can be a pre-dec
737 or post-inc operand.
739 It's also possible that the copy overlaps so that the direction
740 must be reversed. */
741 direction = 1;
743 if (MEM_P (dest))
745 mem_operand = XEXP (dest, 0);
746 dest_modifies = side_effects_p (mem_operand);
747 if (auto_inc_p (mem_operand))
748 auto_inc_reg_rtx = XEXP (mem_operand, 0);
749 dest_volatile = MEM_VOLATILE_P (dest);
750 if (dest_volatile)
752 dest = copy_rtx (dest);
753 MEM_VOLATILE_P (dest) = 0;
756 else if (MEM_P (src))
758 mem_operand = XEXP (src, 0);
759 src_modifies = side_effects_p (mem_operand);
760 if (auto_inc_p (mem_operand))
761 auto_inc_reg_rtx = XEXP (mem_operand, 0);
762 src_volatile = MEM_VOLATILE_P (src);
763 if (src_volatile)
765 src = copy_rtx (src);
766 MEM_VOLATILE_P (src) = 0;
769 else
770 mem_operand = NULL_RTX;
772 if (mem_operand == NULL_RTX)
774 if (REG_P (src)
775 && REG_P (dest)
776 && reg_overlap_mentioned_p (dest, src)
777 && REGNO (dest) > REGNO (src))
778 direction = -1;
780 else if (GET_CODE (mem_operand) == PRE_DEC
781 || (GET_CODE (mem_operand) == PLUS
782 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
783 direction = -1;
784 else if (MEM_P (src) && reg_overlap_mentioned_p (dest, src))
786 int regno;
788 gcc_assert (REG_P (dest));
789 regno = REGNO (dest);
791 gcc_assert (refers_to_regno_p (regno, regno + num_words,
792 mem_operand, 0));
794 if (refers_to_regno_p (regno, mem_operand))
795 direction = -1;
796 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
797 mem_operand, 0))
798 direction = 1;
799 else
800 /* This means something like
801 (set (reg:DI r0) (mem:DI (reg:HI r1)))
802 which we'd need to support by doing the set of the second word
803 last. */
804 gcc_unreachable ();
807 end = direction < 0 ? -1 : num_words;
808 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
810 rtx w_src, w_dest, insn;
812 if (src_modifies)
813 w_src = gen_rtx_MEM (word_mode, mem_operand);
814 else
815 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
816 if (src_volatile)
817 MEM_VOLATILE_P (w_src) = 1;
818 if (dest_modifies)
819 w_dest = gen_rtx_MEM (word_mode, mem_operand);
820 else
821 w_dest = simplify_gen_subreg (word_mode, dest, mode,
822 i * UNITS_PER_WORD);
823 if (dest_volatile)
824 MEM_VOLATILE_P (w_dest) = 1;
826 /* The simplify_subreg calls must always be able to simplify. */
827 gcc_assert (GET_CODE (w_src) != SUBREG
828 && GET_CODE (w_dest) != SUBREG);
830 insn = emit_insn (gen_rtx_SET (w_dest, w_src));
831 if (auto_inc_reg_rtx)
832 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
833 auto_inc_reg_rtx,
834 REG_NOTES (insn));
838 /* Expander for the 'move' patterns. Emit insns to copy a value of
839 mode MODE from SRC to DEST. */
841 void
842 xstormy16_expand_move (machine_mode mode, rtx dest, rtx src)
844 if (MEM_P (dest) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY))
846 rtx pmv = XEXP (dest, 0);
847 rtx dest_reg = XEXP (pmv, 0);
848 rtx dest_mod = XEXP (pmv, 1);
849 rtx set = gen_rtx_SET (dest_reg, dest_mod);
850 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
852 dest = gen_rtx_MEM (mode, dest_reg);
853 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
855 else if (MEM_P (src) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY))
857 rtx pmv = XEXP (src, 0);
858 rtx src_reg = XEXP (pmv, 0);
859 rtx src_mod = XEXP (pmv, 1);
860 rtx set = gen_rtx_SET (src_reg, src_mod);
861 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
863 src = gen_rtx_MEM (mode, src_reg);
864 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
867 /* There are only limited immediate-to-memory move instructions. */
868 if (! reload_in_progress
869 && ! reload_completed
870 && MEM_P (dest)
871 && (! CONST_INT_P (XEXP (dest, 0))
872 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
873 && ! xstormy16_below100_operand (dest, mode)
874 && ! REG_P (src)
875 && GET_CODE (src) != SUBREG)
876 src = copy_to_mode_reg (mode, src);
878 /* Don't emit something we would immediately split. */
879 if (reload_completed
880 && mode != HImode && mode != QImode)
882 xstormy16_split_move (mode, dest, src);
883 return;
886 emit_insn (gen_rtx_SET (dest, src));
889 /* Stack Layout:
891 The stack is laid out as follows:
893 SP->
894 FP-> Local variables
895 Register save area (up to 4 words)
896 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
898 AP-> Return address (two words)
899 9th procedure parameter word
900 10th procedure parameter word
902 last procedure parameter word
904 The frame pointer location is tuned to make it most likely that all
905 parameters and local variables can be accessed using a load-indexed
906 instruction. */
908 /* A structure to describe the layout. */
909 struct xstormy16_stack_layout
911 /* Size of the topmost three items on the stack. */
912 int locals_size;
913 int register_save_size;
914 int stdarg_save_size;
915 /* Sum of the above items. */
916 int frame_size;
917 /* Various offsets. */
918 int first_local_minus_ap;
919 int sp_minus_fp;
920 int fp_minus_ap;
923 /* Does REGNO need to be saved? */
924 #define REG_NEEDS_SAVE(REGNUM, IFUN) \
925 ((df_regs_ever_live_p (REGNUM) && ! call_used_regs[REGNUM]) \
926 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
927 && (REGNUM != CARRY_REGNUM) \
928 && (df_regs_ever_live_p (REGNUM) || ! crtl->is_leaf)))
930 /* Compute the stack layout. */
932 struct xstormy16_stack_layout
933 xstormy16_compute_stack_layout (void)
935 struct xstormy16_stack_layout layout;
936 int regno;
937 const int ifun = xstormy16_interrupt_function_p ();
939 layout.locals_size = get_frame_size ();
941 layout.register_save_size = 0;
942 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
943 if (REG_NEEDS_SAVE (regno, ifun))
944 layout.register_save_size += UNITS_PER_WORD;
946 if (cfun->stdarg)
947 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
948 else
949 layout.stdarg_save_size = 0;
951 layout.frame_size = (layout.locals_size
952 + layout.register_save_size
953 + layout.stdarg_save_size);
955 if (crtl->args.size <= 2048 && crtl->args.size != -1)
957 if (layout.frame_size - INCOMING_FRAME_SP_OFFSET
958 + crtl->args.size <= 2048)
959 layout.fp_minus_ap = layout.frame_size - INCOMING_FRAME_SP_OFFSET;
960 else
961 layout.fp_minus_ap = 2048 - crtl->args.size;
963 else
964 layout.fp_minus_ap = (layout.stdarg_save_size
965 + layout.register_save_size
966 - INCOMING_FRAME_SP_OFFSET);
967 layout.sp_minus_fp = (layout.frame_size - INCOMING_FRAME_SP_OFFSET
968 - layout.fp_minus_ap);
969 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
970 return layout;
973 /* Worker function for TARGET_CAN_ELIMINATE. */
975 static bool
976 xstormy16_can_eliminate (const int from, const int to)
978 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
979 ? ! frame_pointer_needed
980 : true);
983 /* Determine how all the special registers get eliminated. */
986 xstormy16_initial_elimination_offset (int from, int to)
988 struct xstormy16_stack_layout layout;
989 int result;
991 layout = xstormy16_compute_stack_layout ();
993 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
994 result = layout.sp_minus_fp - layout.locals_size;
995 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
996 result = - layout.locals_size;
997 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
998 result = - layout.fp_minus_ap;
999 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1000 result = - (layout.sp_minus_fp + layout.fp_minus_ap);
1001 else
1002 gcc_unreachable ();
1004 return result;
1007 static rtx
1008 emit_addhi3_postreload (rtx dest, rtx src0, rtx src1)
1010 rtx set, clobber, insn;
1012 set = gen_rtx_SET (dest, gen_rtx_PLUS (HImode, src0, src1));
1013 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
1014 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
1015 return insn;
1018 /* Called after register allocation to add any instructions needed for
1019 the prologue. Using a prologue insn is favored compared to putting
1020 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1021 since it allows the scheduler to intermix instructions with the
1022 saves of the caller saved registers. In some cases, it might be
1023 necessary to emit a barrier instruction as the last insn to prevent
1024 such scheduling.
1026 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1027 so that the debug info generation code can handle them properly. */
1029 void
1030 xstormy16_expand_prologue (void)
1032 struct xstormy16_stack_layout layout;
1033 int regno;
1034 rtx insn;
1035 rtx mem_push_rtx;
1036 const int ifun = xstormy16_interrupt_function_p ();
1038 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
1039 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
1041 layout = xstormy16_compute_stack_layout ();
1043 if (layout.locals_size >= 32768)
1044 error ("local variable memory requirements exceed capacity");
1046 if (flag_stack_usage_info)
1047 current_function_static_stack_size = layout.frame_size;
1049 /* Save the argument registers if necessary. */
1050 if (layout.stdarg_save_size)
1051 for (regno = FIRST_ARGUMENT_REGISTER;
1052 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
1053 regno++)
1055 rtx dwarf;
1056 rtx reg = gen_rtx_REG (HImode, regno);
1058 insn = emit_move_insn (mem_push_rtx, reg);
1059 RTX_FRAME_RELATED_P (insn) = 1;
1061 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1063 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (gen_rtx_MEM (Pmode, stack_pointer_rtx),
1064 reg);
1065 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (stack_pointer_rtx,
1066 plus_constant (Pmode,
1067 stack_pointer_rtx,
1068 GET_MODE_SIZE (Pmode)));
1069 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
1070 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1071 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1074 /* Push each of the registers to save. */
1075 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1076 if (REG_NEEDS_SAVE (regno, ifun))
1078 rtx dwarf;
1079 rtx reg = gen_rtx_REG (HImode, regno);
1081 insn = emit_move_insn (mem_push_rtx, reg);
1082 RTX_FRAME_RELATED_P (insn) = 1;
1084 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1086 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (gen_rtx_MEM (Pmode, stack_pointer_rtx),
1087 reg);
1088 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (stack_pointer_rtx,
1089 plus_constant (Pmode,
1090 stack_pointer_rtx,
1091 GET_MODE_SIZE (Pmode)));
1092 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
1093 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1094 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1097 /* It's just possible that the SP here might be what we need for
1098 the new FP... */
1099 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1101 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1102 RTX_FRAME_RELATED_P (insn) = 1;
1105 /* Allocate space for local variables. */
1106 if (layout.locals_size)
1108 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1109 GEN_INT (layout.locals_size));
1110 RTX_FRAME_RELATED_P (insn) = 1;
1113 /* Set up the frame pointer, if required. */
1114 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
1116 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1117 RTX_FRAME_RELATED_P (insn) = 1;
1119 if (layout.sp_minus_fp)
1121 insn = emit_addhi3_postreload (hard_frame_pointer_rtx,
1122 hard_frame_pointer_rtx,
1123 GEN_INT (- layout.sp_minus_fp));
1124 RTX_FRAME_RELATED_P (insn) = 1;
1129 /* Do we need an epilogue at all? */
1132 direct_return (void)
1134 return (reload_completed
1135 && xstormy16_compute_stack_layout ().frame_size == 0
1136 && ! xstormy16_interrupt_function_p ());
1139 /* Called after register allocation to add any instructions needed for
1140 the epilogue. Using an epilogue insn is favored compared to putting
1141 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1142 since it allows the scheduler to intermix instructions with the
1143 saves of the caller saved registers. In some cases, it might be
1144 necessary to emit a barrier instruction as the last insn to prevent
1145 such scheduling. */
1147 void
1148 xstormy16_expand_epilogue (void)
1150 struct xstormy16_stack_layout layout;
1151 rtx mem_pop_rtx;
1152 int regno;
1153 const int ifun = xstormy16_interrupt_function_p ();
1155 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1156 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
1158 layout = xstormy16_compute_stack_layout ();
1160 /* Pop the stack for the locals. */
1161 if (layout.locals_size)
1163 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1164 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1165 else
1166 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1167 GEN_INT (- layout.locals_size));
1170 /* Restore any call-saved registers. */
1171 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1172 if (REG_NEEDS_SAVE (regno, ifun))
1173 emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
1175 /* Pop the stack for the stdarg save area. */
1176 if (layout.stdarg_save_size)
1177 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1178 GEN_INT (- layout.stdarg_save_size));
1180 /* Return. */
1181 if (ifun)
1182 emit_jump_insn (gen_return_internal_interrupt ());
1183 else
1184 emit_jump_insn (gen_return_internal ());
1188 xstormy16_epilogue_uses (int regno)
1190 if (reload_completed && call_used_regs[regno])
1192 const int ifun = xstormy16_interrupt_function_p ();
1193 return REG_NEEDS_SAVE (regno, ifun);
1195 return 0;
1198 void
1199 xstormy16_function_profiler (void)
1201 sorry ("function_profiler support");
1204 /* Update CUM to advance past an argument in the argument list. The
1205 values MODE, TYPE and NAMED describe that argument. Once this is
1206 done, the variable CUM is suitable for analyzing the *following*
1207 argument with `TARGET_FUNCTION_ARG', etc.
1209 This function need not do anything if the argument in question was
1210 passed on the stack. The compiler knows how to track the amount of
1211 stack space used for arguments without any special help. However,
1212 it makes life easier for xstormy16_build_va_list if it does update
1213 the word count. */
1215 static void
1216 xstormy16_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
1217 const_tree type, bool named ATTRIBUTE_UNUSED)
1219 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1221 /* If an argument would otherwise be passed partially in registers,
1222 and partially on the stack, the whole of it is passed on the
1223 stack. */
1224 if (*cum < NUM_ARGUMENT_REGISTERS
1225 && *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1226 *cum = NUM_ARGUMENT_REGISTERS;
1228 *cum += XSTORMY16_WORD_SIZE (type, mode);
1231 static rtx
1232 xstormy16_function_arg (cumulative_args_t cum_v, machine_mode mode,
1233 const_tree type, bool named ATTRIBUTE_UNUSED)
1235 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1237 if (mode == VOIDmode)
1238 return const0_rtx;
1239 if (targetm.calls.must_pass_in_stack (mode, type)
1240 || *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1241 return NULL_RTX;
1242 return gen_rtx_REG (mode, *cum + FIRST_ARGUMENT_REGISTER);
1245 /* Build the va_list type.
1247 For this chip, va_list is a record containing a counter and a pointer.
1248 The counter is of type 'int' and indicates how many bytes
1249 have been used to date. The pointer indicates the stack position
1250 for arguments that have not been passed in registers.
1251 To keep the layout nice, the pointer is first in the structure. */
1253 static tree
1254 xstormy16_build_builtin_va_list (void)
1256 tree f_1, f_2, record, type_decl;
1258 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
1259 type_decl = build_decl (BUILTINS_LOCATION,
1260 TYPE_DECL, get_identifier ("__va_list_tag"), record);
1262 f_1 = build_decl (BUILTINS_LOCATION,
1263 FIELD_DECL, get_identifier ("base"),
1264 ptr_type_node);
1265 f_2 = build_decl (BUILTINS_LOCATION,
1266 FIELD_DECL, get_identifier ("count"),
1267 unsigned_type_node);
1269 DECL_FIELD_CONTEXT (f_1) = record;
1270 DECL_FIELD_CONTEXT (f_2) = record;
1272 TYPE_STUB_DECL (record) = type_decl;
1273 TYPE_NAME (record) = type_decl;
1274 TYPE_FIELDS (record) = f_1;
1275 DECL_CHAIN (f_1) = f_2;
1277 layout_type (record);
1279 return record;
1282 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this
1283 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1284 variable to initialize. NEXTARG is the machine independent notion of the
1285 'next' argument after the variable arguments. */
1287 static void
1288 xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
1290 tree f_base, f_count;
1291 tree base, count;
1292 tree t,u;
1294 if (xstormy16_interrupt_function_p ())
1295 error ("cannot use va_start in interrupt function");
1297 f_base = TYPE_FIELDS (va_list_type_node);
1298 f_count = DECL_CHAIN (f_base);
1300 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1301 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1302 NULL_TREE);
1304 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
1305 u = build_int_cst (NULL_TREE, - INCOMING_FRAME_SP_OFFSET);
1306 u = fold_convert (TREE_TYPE (count), u);
1307 t = fold_build_pointer_plus (t, u);
1308 t = build2 (MODIFY_EXPR, TREE_TYPE (base), base, t);
1309 TREE_SIDE_EFFECTS (t) = 1;
1310 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1312 t = build2 (MODIFY_EXPR, TREE_TYPE (count), count,
1313 build_int_cst (NULL_TREE,
1314 crtl->args.info * UNITS_PER_WORD));
1315 TREE_SIDE_EFFECTS (t) = 1;
1316 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1319 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable
1320 of type va_list as a tree, TYPE is the type passed to va_arg.
1321 Note: This algorithm is documented in stormy-abi. */
1323 static tree
1324 xstormy16_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
1325 gimple_seq *post_p ATTRIBUTE_UNUSED)
1327 tree f_base, f_count;
1328 tree base, count;
1329 tree count_tmp, addr, t;
1330 tree lab_gotaddr, lab_fromstack;
1331 int size, size_of_reg_args, must_stack;
1332 tree size_tree;
1334 f_base = TYPE_FIELDS (va_list_type_node);
1335 f_count = DECL_CHAIN (f_base);
1337 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1338 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1339 NULL_TREE);
1341 must_stack = targetm.calls.must_pass_in_stack (TYPE_MODE (type), type);
1342 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
1343 gimplify_expr (&size_tree, pre_p, NULL, is_gimple_val, fb_rvalue);
1345 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1347 count_tmp = get_initialized_tmp_var (count, pre_p, NULL);
1348 lab_gotaddr = create_artificial_label (UNKNOWN_LOCATION);
1349 lab_fromstack = create_artificial_label (UNKNOWN_LOCATION);
1350 addr = create_tmp_var (ptr_type_node);
1352 if (!must_stack)
1354 tree r;
1356 t = fold_convert (TREE_TYPE (count), size_tree);
1357 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1358 r = fold_convert (TREE_TYPE (count), size_int (size_of_reg_args));
1359 t = build2 (GT_EXPR, boolean_type_node, t, r);
1360 t = build3 (COND_EXPR, void_type_node, t,
1361 build1 (GOTO_EXPR, void_type_node, lab_fromstack),
1362 NULL_TREE);
1363 gimplify_and_add (t, pre_p);
1365 t = fold_build_pointer_plus (base, count_tmp);
1366 gimplify_assign (addr, t, pre_p);
1368 t = build1 (GOTO_EXPR, void_type_node, lab_gotaddr);
1369 gimplify_and_add (t, pre_p);
1371 t = build1 (LABEL_EXPR, void_type_node, lab_fromstack);
1372 gimplify_and_add (t, pre_p);
1375 /* Arguments larger than a word might need to skip over some
1376 registers, since arguments are either passed entirely in
1377 registers or entirely on the stack. */
1378 size = PUSH_ROUNDING (int_size_in_bytes (type));
1379 if (size > 2 || size < 0 || must_stack)
1381 tree r, u;
1383 r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD);
1384 u = build2 (MODIFY_EXPR, TREE_TYPE (count_tmp), count_tmp, r);
1386 t = fold_convert (TREE_TYPE (count), r);
1387 t = build2 (GE_EXPR, boolean_type_node, count_tmp, t);
1388 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, u);
1389 gimplify_and_add (t, pre_p);
1392 t = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
1393 + INCOMING_FRAME_SP_OFFSET);
1394 t = fold_convert (TREE_TYPE (count), t);
1395 t = build2 (MINUS_EXPR, TREE_TYPE (count), count_tmp, t);
1396 t = build2 (PLUS_EXPR, TREE_TYPE (count), t,
1397 fold_convert (TREE_TYPE (count), size_tree));
1398 t = fold_convert (TREE_TYPE (t), fold (t));
1399 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1400 t = fold_build_pointer_plus (base, t);
1401 gimplify_assign (addr, t, pre_p);
1403 t = build1 (LABEL_EXPR, void_type_node, lab_gotaddr);
1404 gimplify_and_add (t, pre_p);
1406 t = fold_convert (TREE_TYPE (count), size_tree);
1407 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1408 gimplify_assign (count, t, pre_p);
1410 addr = fold_convert (build_pointer_type (type), addr);
1411 return build_va_arg_indirect_ref (addr);
1414 /* Worker function for TARGET_TRAMPOLINE_INIT. */
1416 static void
1417 xstormy16_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
1419 rtx temp = gen_reg_rtx (HImode);
1420 rtx reg_fnaddr = gen_reg_rtx (HImode);
1421 rtx reg_addr, reg_addr_mem;
1423 reg_addr = copy_to_reg (XEXP (m_tramp, 0));
1424 reg_addr_mem = adjust_automodify_address (m_tramp, HImode, reg_addr, 0);
1426 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1427 emit_move_insn (reg_addr_mem, temp);
1428 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1429 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1431 emit_move_insn (temp, static_chain);
1432 emit_move_insn (reg_addr_mem, temp);
1433 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1434 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1436 emit_move_insn (reg_fnaddr, XEXP (DECL_RTL (fndecl), 0));
1437 emit_move_insn (temp, reg_fnaddr);
1438 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1439 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1440 emit_move_insn (reg_addr_mem, temp);
1441 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1442 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1444 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1445 emit_move_insn (reg_addr_mem, reg_fnaddr);
1448 /* Worker function for TARGET_FUNCTION_VALUE. */
1450 static rtx
1451 xstormy16_function_value (const_tree valtype,
1452 const_tree func ATTRIBUTE_UNUSED,
1453 bool outgoing ATTRIBUTE_UNUSED)
1455 machine_mode mode;
1456 mode = TYPE_MODE (valtype);
1457 PROMOTE_MODE (mode, 0, valtype);
1458 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1461 /* Worker function for TARGET_LIBCALL_VALUE. */
1463 static rtx
1464 xstormy16_libcall_value (machine_mode mode,
1465 const_rtx fun ATTRIBUTE_UNUSED)
1467 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1470 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
1472 static bool
1473 xstormy16_function_value_regno_p (const unsigned int regno)
1475 return (regno == RETURN_VALUE_REGNUM);
1478 /* A C compound statement that outputs the assembler code for a thunk function,
1479 used to implement C++ virtual function calls with multiple inheritance. The
1480 thunk acts as a wrapper around a virtual function, adjusting the implicit
1481 object parameter before handing control off to the real function.
1483 First, emit code to add the integer DELTA to the location that contains the
1484 incoming first argument. Assume that this argument contains a pointer, and
1485 is the one used to pass the `this' pointer in C++. This is the incoming
1486 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1487 addition must preserve the values of all other incoming arguments.
1489 After the addition, emit code to jump to FUNCTION, which is a
1490 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1491 the return address. Hence returning from FUNCTION will return to whoever
1492 called the current `thunk'.
1494 The effect must be as if @var{function} had been called directly
1495 with the adjusted first argument. This macro is responsible for
1496 emitting all of the code for a thunk function;
1497 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1498 not invoked.
1500 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1501 extracted from it.) It might possibly be useful on some targets, but
1502 probably not. */
1504 static void
1505 xstormy16_asm_output_mi_thunk (FILE *file,
1506 tree thunk_fndecl ATTRIBUTE_UNUSED,
1507 HOST_WIDE_INT delta,
1508 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1509 tree function)
1511 int regnum = FIRST_ARGUMENT_REGISTER;
1513 /* There might be a hidden first argument for a returned structure. */
1514 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
1515 regnum += 1;
1517 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF);
1518 fputs ("\tjmpf ", file);
1519 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1520 putc ('\n', file);
1523 /* The purpose of this function is to override the default behavior of
1524 BSS objects. Normally, they go into .bss or .sbss via ".common"
1525 directives, but we need to override that and put them in
1526 .bss_below100. We can't just use a section override (like we do
1527 for .data_below100), because that makes them initialized rather
1528 than uninitialized. */
1530 void
1531 xstormy16_asm_output_aligned_common (FILE *stream,
1532 tree decl,
1533 const char *name,
1534 int size,
1535 int align,
1536 int global)
1538 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
1539 rtx symbol;
1541 if (mem != NULL_RTX
1542 && MEM_P (mem)
1543 && GET_CODE (symbol = XEXP (mem, 0)) == SYMBOL_REF
1544 && SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_XSTORMY16_BELOW100)
1546 const char *name2;
1547 int p2align = 0;
1549 switch_to_section (bss100_section);
1551 while (align > 8)
1553 align /= 2;
1554 p2align ++;
1557 name2 = default_strip_name_encoding (name);
1558 if (global)
1559 fprintf (stream, "\t.globl\t%s\n", name2);
1560 if (p2align)
1561 fprintf (stream, "\t.p2align %d\n", p2align);
1562 fprintf (stream, "\t.type\t%s, @object\n", name2);
1563 fprintf (stream, "\t.size\t%s, %d\n", name2, size);
1564 fprintf (stream, "%s:\n\t.space\t%d\n", name2, size);
1565 return;
1568 if (!global)
1570 fprintf (stream, "\t.local\t");
1571 assemble_name (stream, name);
1572 fprintf (stream, "\n");
1574 fprintf (stream, "\t.comm\t");
1575 assemble_name (stream, name);
1576 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
1579 /* Implement TARGET_ASM_INIT_SECTIONS. */
1581 static void
1582 xstormy16_asm_init_sections (void)
1584 bss100_section
1585 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
1586 output_section_asm_op,
1587 "\t.section \".bss_below100\",\"aw\",@nobits");
1590 /* Mark symbols with the "below100" attribute so that we can use the
1591 special addressing modes for them. */
1593 static void
1594 xstormy16_encode_section_info (tree decl, rtx r, int first)
1596 default_encode_section_info (decl, r, first);
1598 if (TREE_CODE (decl) == VAR_DECL
1599 && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl))
1600 || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl))))
1602 rtx symbol = XEXP (r, 0);
1604 gcc_assert (GET_CODE (symbol) == SYMBOL_REF);
1605 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_XSTORMY16_BELOW100;
1609 #undef TARGET_ASM_CONSTRUCTOR
1610 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1611 #undef TARGET_ASM_DESTRUCTOR
1612 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1614 /* Output constructors and destructors. Just like
1615 default_named_section_asm_out_* but don't set the sections writable. */
1617 static void
1618 xstormy16_asm_out_destructor (rtx symbol, int priority)
1620 const char *section = ".dtors";
1621 char buf[18];
1623 /* ??? This only works reliably with the GNU linker. */
1624 if (priority != DEFAULT_INIT_PRIORITY)
1626 sprintf (buf, ".dtors.%.5u",
1627 /* Invert the numbering so the linker puts us in the proper
1628 order; constructors are run from right to left, and the
1629 linker sorts in increasing order. */
1630 MAX_INIT_PRIORITY - priority);
1631 section = buf;
1634 switch_to_section (get_section (section, 0, NULL));
1635 assemble_align (POINTER_SIZE);
1636 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1639 static void
1640 xstormy16_asm_out_constructor (rtx symbol, int priority)
1642 const char *section = ".ctors";
1643 char buf[18];
1645 /* ??? This only works reliably with the GNU linker. */
1646 if (priority != DEFAULT_INIT_PRIORITY)
1648 sprintf (buf, ".ctors.%.5u",
1649 /* Invert the numbering so the linker puts us in the proper
1650 order; constructors are run from right to left, and the
1651 linker sorts in increasing order. */
1652 MAX_INIT_PRIORITY - priority);
1653 section = buf;
1656 switch_to_section (get_section (section, 0, NULL));
1657 assemble_align (POINTER_SIZE);
1658 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1661 /* Worker function for TARGET_PRINT_OPERAND_ADDRESS.
1663 Print a memory address as an operand to reference that memory location. */
1665 static void
1666 xstormy16_print_operand_address (FILE *file, machine_mode /*mode*/,
1667 rtx address)
1669 HOST_WIDE_INT offset;
1670 int pre_dec, post_inc;
1672 /* There are a few easy cases. */
1673 if (CONST_INT_P (address))
1675 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1676 return;
1679 if (CONSTANT_P (address) || LABEL_P (address))
1681 output_addr_const (file, address);
1682 return;
1685 /* Otherwise, it's hopefully something of the form
1686 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...)). */
1687 if (GET_CODE (address) == PLUS)
1689 gcc_assert (CONST_INT_P (XEXP (address, 1)));
1690 offset = INTVAL (XEXP (address, 1));
1691 address = XEXP (address, 0);
1693 else
1694 offset = 0;
1696 pre_dec = (GET_CODE (address) == PRE_DEC);
1697 post_inc = (GET_CODE (address) == POST_INC);
1698 if (pre_dec || post_inc)
1699 address = XEXP (address, 0);
1701 gcc_assert (REG_P (address));
1703 fputc ('(', file);
1704 if (pre_dec)
1705 fputs ("--", file);
1706 fputs (reg_names [REGNO (address)], file);
1707 if (post_inc)
1708 fputs ("++", file);
1709 if (offset != 0)
1710 fprintf (file, "," HOST_WIDE_INT_PRINT_DEC, offset);
1711 fputc (')', file);
1714 /* Worker function for TARGET_PRINT_OPERAND.
1716 Print an operand to an assembler instruction. */
1718 static void
1719 xstormy16_print_operand (FILE *file, rtx x, int code)
1721 switch (code)
1723 case 'B':
1724 /* There is either one bit set, or one bit clear, in X.
1725 Print it preceded by '#'. */
1727 static int bits_set[8] = { 0, 1, 1, 2, 1, 2, 2, 3 };
1728 HOST_WIDE_INT xx = 1;
1729 HOST_WIDE_INT l;
1731 if (CONST_INT_P (x))
1732 xx = INTVAL (x);
1733 else
1734 output_operand_lossage ("'B' operand is not constant");
1736 /* GCC sign-extends masks with the MSB set, so we have to
1737 detect all the cases that differ only in sign extension
1738 beyond the bits we care about. Normally, the predicates
1739 and constraints ensure that we have the right values. This
1740 works correctly for valid masks. */
1741 if (bits_set[xx & 7] <= 1)
1743 /* Remove sign extension bits. */
1744 if ((~xx & ~(HOST_WIDE_INT)0xff) == 0)
1745 xx &= 0xff;
1746 else if ((~xx & ~(HOST_WIDE_INT)0xffff) == 0)
1747 xx &= 0xffff;
1748 l = exact_log2 (xx);
1750 else
1752 /* Add sign extension bits. */
1753 if ((xx & ~(HOST_WIDE_INT)0xff) == 0)
1754 xx |= ~(HOST_WIDE_INT)0xff;
1755 else if ((xx & ~(HOST_WIDE_INT)0xffff) == 0)
1756 xx |= ~(HOST_WIDE_INT)0xffff;
1757 l = exact_log2 (~xx);
1760 if (l == -1)
1761 output_operand_lossage ("'B' operand has multiple bits set");
1763 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l);
1764 return;
1767 case 'C':
1768 /* Print the symbol without a surrounding @fptr(). */
1769 if (GET_CODE (x) == SYMBOL_REF)
1770 assemble_name (file, XSTR (x, 0));
1771 else if (LABEL_P (x))
1772 output_asm_label (x);
1773 else
1774 xstormy16_print_operand_address (file, VOIDmode, x);
1775 return;
1777 case 'o':
1778 case 'O':
1779 /* Print the immediate operand less one, preceded by '#'.
1780 For 'O', negate it first. */
1782 HOST_WIDE_INT xx = 0;
1784 if (CONST_INT_P (x))
1785 xx = INTVAL (x);
1786 else
1787 output_operand_lossage ("'o' operand is not constant");
1789 if (code == 'O')
1790 xx = -xx;
1792 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, xx - 1);
1793 return;
1796 case 'b':
1797 /* Print the shift mask for bp/bn. */
1799 HOST_WIDE_INT xx = 1;
1800 HOST_WIDE_INT l;
1802 if (CONST_INT_P (x))
1803 xx = INTVAL (x);
1804 else
1805 output_operand_lossage ("'B' operand is not constant");
1807 l = 7 - xx;
1809 fputs (IMMEDIATE_PREFIX, file);
1810 fprintf (file, HOST_WIDE_INT_PRINT_DEC, l);
1811 return;
1814 case 0:
1815 /* Handled below. */
1816 break;
1818 default:
1819 output_operand_lossage ("xstormy16_print_operand: unknown code");
1820 return;
1823 switch (GET_CODE (x))
1825 case REG:
1826 fputs (reg_names [REGNO (x)], file);
1827 break;
1829 case MEM:
1830 xstormy16_print_operand_address (file, GET_MODE (x), XEXP (x, 0));
1831 break;
1833 default:
1834 /* Some kind of constant or label; an immediate operand,
1835 so prefix it with '#' for the assembler. */
1836 fputs (IMMEDIATE_PREFIX, file);
1837 output_addr_const (file, x);
1838 break;
1841 return;
1844 /* Expander for the `casesi' pattern.
1845 INDEX is the index of the switch statement.
1846 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1847 to the first table entry.
1848 RANGE is the number of table entries.
1849 TABLE is an ADDR_VEC that is the jump table.
1850 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1851 range LOWER_BOUND to LOWER_BOUND + RANGE - 1. */
1853 void
1854 xstormy16_expand_casesi (rtx index, rtx lower_bound, rtx range,
1855 rtx table, rtx default_label)
1857 HOST_WIDE_INT range_i = INTVAL (range);
1858 rtx int_index;
1860 /* This code uses 'br', so it can deal only with tables of size up to
1861 8192 entries. */
1862 if (range_i >= 8192)
1863 sorry ("switch statement of size %lu entries too large",
1864 (unsigned long) range_i);
1866 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
1867 OPTAB_LIB_WIDEN);
1868 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
1869 default_label);
1870 int_index = gen_lowpart_common (HImode, index);
1871 emit_insn (gen_ashlhi3 (int_index, int_index, const2_rtx));
1872 emit_jump_insn (gen_tablejump_pcrel (int_index, table));
1875 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1876 instructions, without label or alignment or any other special
1877 constructs. We know that the previous instruction will be the
1878 `tablejump_pcrel' output above.
1880 TODO: it might be nice to output 'br' instructions if they could
1881 all reach. */
1883 void
1884 xstormy16_output_addr_vec (FILE *file, rtx label ATTRIBUTE_UNUSED, rtx table)
1886 int vlen, idx;
1888 switch_to_section (current_function_section ());
1890 vlen = XVECLEN (table, 0);
1891 for (idx = 0; idx < vlen; idx++)
1893 fputs ("\tjmpf ", file);
1894 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
1895 fputc ('\n', file);
1899 /* Expander for the `call' patterns.
1900 RETVAL is the RTL for the return register or NULL for void functions.
1901 DEST is the function to call, expressed as a MEM.
1902 COUNTER is ignored. */
1904 void
1905 xstormy16_expand_call (rtx retval, rtx dest, rtx counter)
1907 rtx call, temp;
1908 machine_mode mode;
1910 gcc_assert (MEM_P (dest));
1911 dest = XEXP (dest, 0);
1913 if (! CONSTANT_P (dest) && ! REG_P (dest))
1914 dest = force_reg (Pmode, dest);
1916 if (retval == NULL)
1917 mode = VOIDmode;
1918 else
1919 mode = GET_MODE (retval);
1921 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
1922 counter);
1923 if (retval)
1924 call = gen_rtx_SET (retval, call);
1926 if (! CONSTANT_P (dest))
1928 temp = gen_reg_rtx (HImode);
1929 emit_move_insn (temp, const0_rtx);
1931 else
1932 temp = const0_rtx;
1934 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
1935 gen_rtx_USE (VOIDmode, temp)));
1936 emit_call_insn (call);
1939 /* Expanders for multiword computational operations. */
1941 /* Expander for arithmetic operations; emit insns to compute
1943 (set DEST (CODE:MODE SRC0 SRC1))
1945 When CODE is COMPARE, a branch template is generated
1946 (this saves duplicating code in xstormy16_split_cbranch). */
1948 void
1949 xstormy16_expand_arith (machine_mode mode, enum rtx_code code,
1950 rtx dest, rtx src0, rtx src1)
1952 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
1953 int i;
1954 int firstloop = 1;
1956 if (code == NEG)
1957 emit_move_insn (src0, const0_rtx);
1959 for (i = 0; i < num_words; i++)
1961 rtx w_src0, w_src1, w_dest;
1962 rtx insn;
1964 w_src0 = simplify_gen_subreg (word_mode, src0, mode,
1965 i * UNITS_PER_WORD);
1966 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
1967 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
1969 switch (code)
1971 case PLUS:
1972 if (firstloop
1973 && CONST_INT_P (w_src1)
1974 && INTVAL (w_src1) == 0)
1975 continue;
1977 if (firstloop)
1978 insn = gen_addchi4 (w_dest, w_src0, w_src1);
1979 else
1980 insn = gen_addchi5 (w_dest, w_src0, w_src1);
1981 break;
1983 case NEG:
1984 case MINUS:
1985 case COMPARE:
1986 if (code == COMPARE && i == num_words - 1)
1988 rtx branch, sub, clobber, sub_1;
1990 sub_1 = gen_rtx_MINUS (HImode, w_src0,
1991 gen_rtx_ZERO_EXTEND (HImode, gen_rtx_REG (BImode, CARRY_REGNUM)));
1992 sub = gen_rtx_SET (w_dest,
1993 gen_rtx_MINUS (HImode, sub_1, w_src1));
1994 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
1995 branch = gen_rtx_SET (pc_rtx,
1996 gen_rtx_IF_THEN_ELSE (VOIDmode,
1997 gen_rtx_EQ (HImode,
1998 sub_1,
1999 w_src1),
2000 pc_rtx,
2001 pc_rtx));
2002 insn = gen_rtx_PARALLEL (VOIDmode,
2003 gen_rtvec (3, branch, sub, clobber));
2005 else if (firstloop
2006 && code != COMPARE
2007 && CONST_INT_P (w_src1)
2008 && INTVAL (w_src1) == 0)
2009 continue;
2010 else if (firstloop)
2011 insn = gen_subchi4 (w_dest, w_src0, w_src1);
2012 else
2013 insn = gen_subchi5 (w_dest, w_src0, w_src1);
2014 break;
2016 case IOR:
2017 case XOR:
2018 case AND:
2019 if (CONST_INT_P (w_src1)
2020 && INTVAL (w_src1) == -(code == AND))
2021 continue;
2023 insn = gen_rtx_SET (w_dest, gen_rtx_fmt_ee (code, mode,
2024 w_src0, w_src1));
2025 break;
2027 case NOT:
2028 insn = gen_rtx_SET (w_dest, gen_rtx_NOT (mode, w_src0));
2029 break;
2031 default:
2032 gcc_unreachable ();
2035 firstloop = 0;
2036 emit (insn);
2039 /* If we emit nothing, try_split() will think we failed. So emit
2040 something that does nothing and can be optimized away. */
2041 if (firstloop)
2042 emit (gen_nop ());
2045 /* The shift operations are split at output time for constant values;
2046 variable-width shifts get handed off to a library routine.
2048 Generate an output string to do (set X (CODE:MODE X SIZE_R))
2049 SIZE_R will be a CONST_INT, X will be a hard register. */
2051 const char *
2052 xstormy16_output_shift (machine_mode mode, enum rtx_code code,
2053 rtx x, rtx size_r, rtx temp)
2055 HOST_WIDE_INT size;
2056 const char *r0, *r1, *rt;
2057 static char r[64];
2059 gcc_assert (CONST_INT_P (size_r)
2060 && REG_P (x)
2061 && mode == SImode);
2063 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
2065 if (size == 0)
2066 return "";
2068 r0 = reg_names [REGNO (x)];
2069 r1 = reg_names [REGNO (x) + 1];
2071 /* For shifts of size 1, we can use the rotate instructions. */
2072 if (size == 1)
2074 switch (code)
2076 case ASHIFT:
2077 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
2078 break;
2079 case ASHIFTRT:
2080 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
2081 break;
2082 case LSHIFTRT:
2083 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
2084 break;
2085 default:
2086 gcc_unreachable ();
2088 return r;
2091 /* For large shifts, there are easy special cases. */
2092 if (size == 16)
2094 switch (code)
2096 case ASHIFT:
2097 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
2098 break;
2099 case ASHIFTRT:
2100 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
2101 break;
2102 case LSHIFTRT:
2103 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
2104 break;
2105 default:
2106 gcc_unreachable ();
2108 return r;
2110 if (size > 16)
2112 switch (code)
2114 case ASHIFT:
2115 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
2116 r1, r0, r0, r1, (int) size - 16);
2117 break;
2118 case ASHIFTRT:
2119 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
2120 r0, r1, r1, r0, (int) size - 16);
2121 break;
2122 case LSHIFTRT:
2123 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
2124 r0, r1, r1, r0, (int) size - 16);
2125 break;
2126 default:
2127 gcc_unreachable ();
2129 return r;
2132 /* For the rest, we have to do more work. In particular, we
2133 need a temporary. */
2134 rt = reg_names [REGNO (temp)];
2135 switch (code)
2137 case ASHIFT:
2138 sprintf (r,
2139 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2140 rt, r0, r0, (int) size, r1, (int) size, rt, (int) (16 - size),
2141 r1, rt);
2142 break;
2143 case ASHIFTRT:
2144 sprintf (r,
2145 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2146 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2147 r0, rt);
2148 break;
2149 case LSHIFTRT:
2150 sprintf (r,
2151 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2152 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2153 r0, rt);
2154 break;
2155 default:
2156 gcc_unreachable ();
2158 return r;
2161 /* Attribute handling. */
2163 /* Return nonzero if the function is an interrupt function. */
2166 xstormy16_interrupt_function_p (void)
2168 tree attributes;
2170 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2171 any functions are declared, which is demonstrably wrong, but
2172 it is worked around here. FIXME. */
2173 if (!cfun)
2174 return 0;
2176 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
2177 return lookup_attribute ("interrupt", attributes) != NULL_TREE;
2180 #undef TARGET_ATTRIBUTE_TABLE
2181 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2183 static tree xstormy16_handle_interrupt_attribute
2184 (tree *, tree, tree, int, bool *);
2185 static tree xstormy16_handle_below100_attribute
2186 (tree *, tree, tree, int, bool *);
2188 static const struct attribute_spec xstormy16_attribute_table[] =
2190 /* name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
2191 affects_type_identity. */
2192 { "interrupt", 0, 0, false, true, true,
2193 xstormy16_handle_interrupt_attribute , false },
2194 { "BELOW100", 0, 0, false, false, false,
2195 xstormy16_handle_below100_attribute, false },
2196 { "below100", 0, 0, false, false, false,
2197 xstormy16_handle_below100_attribute, false },
2198 { NULL, 0, 0, false, false, false, NULL, false }
2201 /* Handle an "interrupt" attribute;
2202 arguments as in struct attribute_spec.handler. */
2204 static tree
2205 xstormy16_handle_interrupt_attribute (tree *node, tree name,
2206 tree args ATTRIBUTE_UNUSED,
2207 int flags ATTRIBUTE_UNUSED,
2208 bool *no_add_attrs)
2210 if (TREE_CODE (*node) != FUNCTION_TYPE)
2212 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2213 name);
2214 *no_add_attrs = true;
2217 return NULL_TREE;
2220 /* Handle an "below" attribute;
2221 arguments as in struct attribute_spec.handler. */
2223 static tree
2224 xstormy16_handle_below100_attribute (tree *node,
2225 tree name ATTRIBUTE_UNUSED,
2226 tree args ATTRIBUTE_UNUSED,
2227 int flags ATTRIBUTE_UNUSED,
2228 bool *no_add_attrs)
2230 if (TREE_CODE (*node) != VAR_DECL
2231 && TREE_CODE (*node) != POINTER_TYPE
2232 && TREE_CODE (*node) != TYPE_DECL)
2234 warning (OPT_Wattributes,
2235 "%<__BELOW100__%> attribute only applies to variables");
2236 *no_add_attrs = true;
2238 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
2240 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
2242 warning (OPT_Wattributes, "__BELOW100__ attribute not allowed "
2243 "with auto storage class");
2244 *no_add_attrs = true;
2248 return NULL_TREE;
2251 #undef TARGET_INIT_BUILTINS
2252 #define TARGET_INIT_BUILTINS xstormy16_init_builtins
2253 #undef TARGET_EXPAND_BUILTIN
2254 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2256 static struct
2258 const char * name;
2259 int md_code;
2260 const char * arg_ops; /* 0..9, t for temp register, r for return value. */
2261 const char * arg_types; /* s=short,l=long, upper case for unsigned. */
2263 s16builtins[] =
2265 { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" },
2266 { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" },
2267 { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" },
2268 { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" },
2269 { NULL, 0, NULL, NULL }
2272 static void
2273 xstormy16_init_builtins (void)
2275 tree args[2], ret_type, arg = NULL_TREE, ftype;
2276 int i, a, n_args;
2278 ret_type = void_type_node;
2280 for (i = 0; s16builtins[i].name; i++)
2282 n_args = strlen (s16builtins[i].arg_types) - 1;
2284 gcc_assert (n_args <= (int) ARRAY_SIZE (args));
2286 for (a = n_args - 1; a >= 0; a--)
2287 args[a] = NULL_TREE;
2289 for (a = n_args; a >= 0; a--)
2291 switch (s16builtins[i].arg_types[a])
2293 case 's': arg = short_integer_type_node; break;
2294 case 'S': arg = short_unsigned_type_node; break;
2295 case 'l': arg = long_integer_type_node; break;
2296 case 'L': arg = long_unsigned_type_node; break;
2297 default: gcc_unreachable ();
2299 if (a == 0)
2300 ret_type = arg;
2301 else
2302 args[a-1] = arg;
2304 ftype = build_function_type_list (ret_type, args[0], args[1], NULL_TREE);
2305 add_builtin_function (s16builtins[i].name, ftype,
2306 i, BUILT_IN_MD, NULL, NULL_TREE);
2310 static rtx
2311 xstormy16_expand_builtin (tree exp, rtx target,
2312 rtx subtarget ATTRIBUTE_UNUSED,
2313 machine_mode mode ATTRIBUTE_UNUSED,
2314 int ignore ATTRIBUTE_UNUSED)
2316 rtx op[10], args[10], pat, copyto[10], retval = 0;
2317 tree fndecl, argtree;
2318 int i, a, o, code;
2320 fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2321 argtree = TREE_OPERAND (exp, 1);
2322 i = DECL_FUNCTION_CODE (fndecl);
2323 code = s16builtins[i].md_code;
2325 for (a = 0; a < 10 && argtree; a++)
2327 args[a] = expand_normal (TREE_VALUE (argtree));
2328 argtree = TREE_CHAIN (argtree);
2331 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2333 char ao = s16builtins[i].arg_ops[o];
2334 char c = insn_data[code].operand[o].constraint[0];
2335 machine_mode omode;
2337 copyto[o] = 0;
2339 omode = (machine_mode) insn_data[code].operand[o].mode;
2340 if (ao == 'r')
2341 op[o] = target ? target : gen_reg_rtx (omode);
2342 else if (ao == 't')
2343 op[o] = gen_reg_rtx (omode);
2344 else
2345 op[o] = args[(int) hex_value (ao)];
2347 if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o])))
2349 if (c == '+' || c == '=')
2351 copyto[o] = op[o];
2352 op[o] = gen_reg_rtx (omode);
2354 else
2355 op[o] = copy_to_mode_reg (omode, op[o]);
2358 if (ao == 'r')
2359 retval = op[o];
2362 pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4],
2363 op[5], op[6], op[7], op[8], op[9]);
2364 emit_insn (pat);
2366 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2367 if (copyto[o])
2369 emit_move_insn (copyto[o], op[o]);
2370 if (op[o] == retval)
2371 retval = copyto[o];
2374 return retval;
2377 /* Look for combinations of insns that can be converted to BN or BP
2378 opcodes. This is, unfortunately, too complex to do with MD
2379 patterns. */
2381 static void
2382 combine_bnp (rtx_insn *insn)
2384 int insn_code, regno, need_extend;
2385 unsigned int mask;
2386 rtx cond, reg, qireg, mem;
2387 rtx_insn *and_insn, *load;
2388 machine_mode load_mode = QImode;
2389 machine_mode and_mode = QImode;
2390 rtx_insn *shift = NULL;
2392 insn_code = recog_memoized (insn);
2393 if (insn_code != CODE_FOR_cbranchhi
2394 && insn_code != CODE_FOR_cbranchhi_neg)
2395 return;
2397 cond = XVECEXP (PATTERN (insn), 0, 0); /* set */
2398 cond = XEXP (cond, 1); /* if */
2399 cond = XEXP (cond, 0); /* cond */
2400 switch (GET_CODE (cond))
2402 case NE:
2403 case EQ:
2404 need_extend = 0;
2405 break;
2406 case LT:
2407 case GE:
2408 need_extend = 1;
2409 break;
2410 default:
2411 return;
2414 reg = XEXP (cond, 0);
2415 if (! REG_P (reg))
2416 return;
2417 regno = REGNO (reg);
2418 if (XEXP (cond, 1) != const0_rtx)
2419 return;
2420 if (! find_regno_note (insn, REG_DEAD, regno))
2421 return;
2422 qireg = gen_rtx_REG (QImode, regno);
2424 if (need_extend)
2426 /* LT and GE conditionals should have a sign extend before
2427 them. */
2428 for (and_insn = prev_real_insn (insn);
2429 and_insn != NULL_RTX;
2430 and_insn = prev_real_insn (and_insn))
2432 int and_code = recog_memoized (and_insn);
2434 if (and_code == CODE_FOR_extendqihi2
2435 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)
2436 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), qireg))
2437 break;
2439 if (and_code == CODE_FOR_movhi_internal
2440 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg))
2442 /* This is for testing bit 15. */
2443 and_insn = insn;
2444 break;
2447 if (reg_mentioned_p (reg, and_insn))
2448 return;
2450 if (! NOTE_P (and_insn) && ! NONJUMP_INSN_P (and_insn))
2451 return;
2454 else
2456 /* EQ and NE conditionals have an AND before them. */
2457 for (and_insn = prev_real_insn (insn);
2458 and_insn != NULL_RTX;
2459 and_insn = prev_real_insn (and_insn))
2461 if (recog_memoized (and_insn) == CODE_FOR_andhi3
2462 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)
2463 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), reg))
2464 break;
2466 if (reg_mentioned_p (reg, and_insn))
2467 return;
2469 if (! NOTE_P (and_insn) && ! NONJUMP_INSN_P (and_insn))
2470 return;
2473 if (and_insn)
2475 /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
2476 followed by an AND like this:
2478 (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
2479 (clobber (reg:BI carry))]
2481 (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
2483 Attempt to detect this here. */
2484 for (shift = prev_real_insn (and_insn); shift;
2485 shift = prev_real_insn (shift))
2487 if (recog_memoized (shift) == CODE_FOR_lshrhi3
2488 && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift), 0, 0)), reg)
2489 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 0), reg))
2490 break;
2492 if (reg_mentioned_p (reg, shift)
2493 || (! NOTE_P (shift) && ! NONJUMP_INSN_P (shift)))
2495 shift = NULL;
2496 break;
2502 if (and_insn == NULL_RTX)
2503 return;
2505 for (load = shift ? prev_real_insn (shift) : prev_real_insn (and_insn);
2506 load;
2507 load = prev_real_insn (load))
2509 int load_code = recog_memoized (load);
2511 if (load_code == CODE_FOR_movhi_internal
2512 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2513 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), HImode)
2514 && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load))))
2516 load_mode = HImode;
2517 break;
2520 if (load_code == CODE_FOR_movqi_internal
2521 && rtx_equal_p (SET_DEST (PATTERN (load)), qireg)
2522 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), QImode))
2524 load_mode = QImode;
2525 break;
2528 if (load_code == CODE_FOR_zero_extendqihi2
2529 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2530 && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load)), 0), QImode))
2532 load_mode = QImode;
2533 and_mode = HImode;
2534 break;
2537 if (reg_mentioned_p (reg, load))
2538 return;
2540 if (! NOTE_P (load) && ! NONJUMP_INSN_P (load))
2541 return;
2543 if (!load)
2544 return;
2546 mem = SET_SRC (PATTERN (load));
2548 if (need_extend)
2550 mask = (load_mode == HImode) ? 0x8000 : 0x80;
2552 /* If the mem includes a zero-extend operation and we are
2553 going to generate a sign-extend operation then move the
2554 mem inside the zero-extend. */
2555 if (GET_CODE (mem) == ZERO_EXTEND)
2556 mem = XEXP (mem, 0);
2558 else
2560 if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and_insn)), 1),
2561 load_mode))
2562 return;
2564 mask = (int) INTVAL (XEXP (SET_SRC (PATTERN (and_insn)), 1));
2566 if (shift)
2567 mask <<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 1));
2570 if (load_mode == HImode)
2572 rtx addr = XEXP (mem, 0);
2574 if (! (mask & 0xff))
2576 addr = plus_constant (Pmode, addr, 1);
2577 mask >>= 8;
2579 mem = gen_rtx_MEM (QImode, addr);
2582 if (need_extend)
2583 XEXP (cond, 0) = gen_rtx_SIGN_EXTEND (HImode, mem);
2584 else
2585 XEXP (cond, 0) = gen_rtx_AND (and_mode, mem, GEN_INT (mask));
2587 INSN_CODE (insn) = -1;
2588 delete_insn (load);
2590 if (and_insn != insn)
2591 delete_insn (and_insn);
2593 if (shift != NULL_RTX)
2594 delete_insn (shift);
2597 static void
2598 xstormy16_reorg (void)
2600 rtx_insn *insn;
2602 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2604 if (! JUMP_P (insn))
2605 continue;
2606 combine_bnp (insn);
2610 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2612 static bool
2613 xstormy16_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2615 const HOST_WIDE_INT size = int_size_in_bytes (type);
2616 return (size == -1 || size > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS);
2619 #undef TARGET_ASM_ALIGNED_HI_OP
2620 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2621 #undef TARGET_ASM_ALIGNED_SI_OP
2622 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2623 #undef TARGET_ENCODE_SECTION_INFO
2624 #define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
2626 /* Select_section doesn't handle .bss_below100. */
2627 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
2628 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
2630 #undef TARGET_ASM_OUTPUT_MI_THUNK
2631 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2632 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2633 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2635 #undef TARGET_PRINT_OPERAND
2636 #define TARGET_PRINT_OPERAND xstormy16_print_operand
2637 #undef TARGET_PRINT_OPERAND_ADDRESS
2638 #define TARGET_PRINT_OPERAND_ADDRESS xstormy16_print_operand_address
2640 #undef TARGET_MEMORY_MOVE_COST
2641 #define TARGET_MEMORY_MOVE_COST xstormy16_memory_move_cost
2642 #undef TARGET_RTX_COSTS
2643 #define TARGET_RTX_COSTS xstormy16_rtx_costs
2644 #undef TARGET_ADDRESS_COST
2645 #define TARGET_ADDRESS_COST xstormy16_address_cost
2647 #undef TARGET_BUILD_BUILTIN_VA_LIST
2648 #define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
2649 #undef TARGET_EXPAND_BUILTIN_VA_START
2650 #define TARGET_EXPAND_BUILTIN_VA_START xstormy16_expand_builtin_va_start
2651 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
2652 #define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_gimplify_va_arg_expr
2654 #undef TARGET_PROMOTE_FUNCTION_MODE
2655 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
2656 #undef TARGET_PROMOTE_PROTOTYPES
2657 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
2659 #undef TARGET_FUNCTION_ARG
2660 #define TARGET_FUNCTION_ARG xstormy16_function_arg
2661 #undef TARGET_FUNCTION_ARG_ADVANCE
2662 #define TARGET_FUNCTION_ARG_ADVANCE xstormy16_function_arg_advance
2664 #undef TARGET_RETURN_IN_MEMORY
2665 #define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
2666 #undef TARGET_FUNCTION_VALUE
2667 #define TARGET_FUNCTION_VALUE xstormy16_function_value
2668 #undef TARGET_LIBCALL_VALUE
2669 #define TARGET_LIBCALL_VALUE xstormy16_libcall_value
2670 #undef TARGET_FUNCTION_VALUE_REGNO_P
2671 #define TARGET_FUNCTION_VALUE_REGNO_P xstormy16_function_value_regno_p
2673 #undef TARGET_MACHINE_DEPENDENT_REORG
2674 #define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg
2676 #undef TARGET_PREFERRED_RELOAD_CLASS
2677 #define TARGET_PREFERRED_RELOAD_CLASS xstormy16_preferred_reload_class
2678 #undef TARGET_PREFERRED_OUTPUT_RELOAD_CLASS
2679 #define TARGET_PREFERRED_OUTPUT_RELOAD_CLASS xstormy16_preferred_reload_class
2681 #undef TARGET_LRA_P
2682 #define TARGET_LRA_P hook_bool_void_false
2684 #undef TARGET_LEGITIMATE_ADDRESS_P
2685 #define TARGET_LEGITIMATE_ADDRESS_P xstormy16_legitimate_address_p
2686 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
2687 #define TARGET_MODE_DEPENDENT_ADDRESS_P xstormy16_mode_dependent_address_p
2689 #undef TARGET_CAN_ELIMINATE
2690 #define TARGET_CAN_ELIMINATE xstormy16_can_eliminate
2692 #undef TARGET_TRAMPOLINE_INIT
2693 #define TARGET_TRAMPOLINE_INIT xstormy16_trampoline_init
2695 struct gcc_target targetm = TARGET_INITIALIZER;
2697 #include "gt-stormy16.h"