Update Copyright years for files modified in 2011 and/or 2012.
[official-gcc.git] / gcc / config / stormy16 / stormy16.c
blob88329ebe4c3bf48a647406556c568497c7fed76b
1 /* Xstormy16 target functions.
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3 2006, 2007, 2008, 2009, 2010, 2011, 2012 Free Software Foundation, Inc.
4 Contributed by Red Hat, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
32 #include "output.h"
33 #include "insn-attr.h"
34 #include "flags.h"
35 #include "recog.h"
36 #include "diagnostic-core.h"
37 #include "obstack.h"
38 #include "tree.h"
39 #include "expr.h"
40 #include "optabs.h"
41 #include "except.h"
42 #include "function.h"
43 #include "target.h"
44 #include "target-def.h"
45 #include "tm_p.h"
46 #include "langhooks.h"
47 #include "gimple.h"
48 #include "df.h"
49 #include "reload.h"
50 #include "ggc.h"
52 static rtx emit_addhi3_postreload (rtx, rtx, rtx);
53 static void xstormy16_asm_out_constructor (rtx, int);
54 static void xstormy16_asm_out_destructor (rtx, int);
55 static void xstormy16_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
56 HOST_WIDE_INT, tree);
58 static void xstormy16_init_builtins (void);
59 static rtx xstormy16_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
60 static bool xstormy16_rtx_costs (rtx, int, int, int, int *, bool);
61 static int xstormy16_address_cost (rtx, enum machine_mode, addr_space_t, bool);
62 static bool xstormy16_return_in_memory (const_tree, const_tree);
64 static GTY(()) section *bss100_section;
66 /* Compute a (partial) cost for rtx X. Return true if the complete
67 cost has been computed, and false if subexpressions should be
68 scanned. In either case, *TOTAL contains the cost result. */
70 static bool
71 xstormy16_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
72 int opno ATTRIBUTE_UNUSED, int *total,
73 bool speed ATTRIBUTE_UNUSED)
75 switch (code)
77 case CONST_INT:
78 if (INTVAL (x) < 16 && INTVAL (x) >= 0)
79 *total = COSTS_N_INSNS (1) / 2;
80 else if (INTVAL (x) < 256 && INTVAL (x) >= 0)
81 *total = COSTS_N_INSNS (1);
82 else
83 *total = COSTS_N_INSNS (2);
84 return true;
86 case CONST_DOUBLE:
87 case CONST:
88 case SYMBOL_REF:
89 case LABEL_REF:
90 *total = COSTS_N_INSNS (2);
91 return true;
93 case MULT:
94 *total = COSTS_N_INSNS (35 + 6);
95 return true;
96 case DIV:
97 *total = COSTS_N_INSNS (51 - 6);
98 return true;
100 default:
101 return false;
105 static int
106 xstormy16_address_cost (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED,
107 addr_space_t as ATTRIBUTE_UNUSED,
108 bool speed ATTRIBUTE_UNUSED)
110 return (CONST_INT_P (x) ? 2
111 : GET_CODE (x) == PLUS ? 7
112 : 5);
115 /* Worker function for TARGET_MEMORY_MOVE_COST. */
117 static int
118 xstormy16_memory_move_cost (enum machine_mode mode, reg_class_t rclass,
119 bool in)
121 return (5 + memory_move_secondary_cost (mode, rclass, in));
124 /* Branches are handled as follows:
126 1. HImode compare-and-branches. The machine supports these
127 natively, so the appropriate pattern is emitted directly.
129 2. SImode EQ and NE. These are emitted as pairs of HImode
130 compare-and-branches.
132 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
133 of a SImode subtract followed by a branch (not a compare-and-branch),
134 like this:
139 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
144 bne. */
146 /* Emit a branch of kind CODE to location LOC. */
148 void
149 xstormy16_emit_cbranch (enum rtx_code code, rtx op0, rtx op1, rtx loc)
151 rtx condition_rtx, loc_ref, branch, cy_clobber;
152 rtvec vec;
153 enum machine_mode mode;
155 mode = GET_MODE (op0);
156 gcc_assert (mode == HImode || mode == SImode);
158 if (mode == SImode
159 && (code == GT || code == LE || code == GTU || code == LEU))
161 int unsigned_p = (code == GTU || code == LEU);
162 int gt_p = (code == GT || code == GTU);
163 rtx lab = NULL_RTX;
165 if (gt_p)
166 lab = gen_label_rtx ();
167 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, op0, op1, gt_p ? lab : loc);
168 /* This should be generated as a comparison against the temporary
169 created by the previous insn, but reload can't handle that. */
170 xstormy16_emit_cbranch (gt_p ? NE : EQ, op0, op1, loc);
171 if (gt_p)
172 emit_label (lab);
173 return;
175 else if (mode == SImode
176 && (code == NE || code == EQ)
177 && op1 != const0_rtx)
179 rtx op0_word, op1_word;
180 rtx lab = NULL_RTX;
181 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
182 int i;
184 if (code == EQ)
185 lab = gen_label_rtx ();
187 for (i = 0; i < num_words - 1; i++)
189 op0_word = simplify_gen_subreg (word_mode, op0, mode,
190 i * UNITS_PER_WORD);
191 op1_word = simplify_gen_subreg (word_mode, op1, mode,
192 i * UNITS_PER_WORD);
193 xstormy16_emit_cbranch (NE, op0_word, op1_word, code == EQ ? lab : loc);
195 op0_word = simplify_gen_subreg (word_mode, op0, mode,
196 i * UNITS_PER_WORD);
197 op1_word = simplify_gen_subreg (word_mode, op1, mode,
198 i * UNITS_PER_WORD);
199 xstormy16_emit_cbranch (code, op0_word, op1_word, loc);
201 if (code == EQ)
202 emit_label (lab);
203 return;
206 /* We can't allow reload to try to generate any reload after a branch,
207 so when some register must match we must make the temporary ourselves. */
208 if (mode != HImode)
210 rtx tmp;
211 tmp = gen_reg_rtx (mode);
212 emit_move_insn (tmp, op0);
213 op0 = tmp;
216 condition_rtx = gen_rtx_fmt_ee (code, mode, op0, op1);
217 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
218 branch = gen_rtx_SET (VOIDmode, pc_rtx,
219 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
220 loc_ref, pc_rtx));
222 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
224 if (mode == HImode)
225 vec = gen_rtvec (2, branch, cy_clobber);
226 else if (code == NE || code == EQ)
227 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
228 else
230 rtx sub;
231 #if 0
232 sub = gen_rtx_SET (VOIDmode, op0, gen_rtx_MINUS (SImode, op0, op1));
233 #else
234 sub = gen_rtx_CLOBBER (SImode, op0);
235 #endif
236 vec = gen_rtvec (3, branch, sub, cy_clobber);
239 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
242 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
243 the arithmetic operation. Most of the work is done by
244 xstormy16_expand_arith. */
246 void
247 xstormy16_split_cbranch (enum machine_mode mode, rtx label, rtx comparison,
248 rtx dest)
250 rtx op0 = XEXP (comparison, 0);
251 rtx op1 = XEXP (comparison, 1);
252 rtx seq, last_insn;
253 rtx compare;
255 start_sequence ();
256 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1);
257 seq = get_insns ();
258 end_sequence ();
260 gcc_assert (INSN_P (seq));
262 last_insn = seq;
263 while (NEXT_INSN (last_insn) != NULL_RTX)
264 last_insn = NEXT_INSN (last_insn);
266 compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0));
267 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
268 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
269 emit_insn (seq);
273 /* Return the string to output a conditional branch to LABEL, which is
274 the operand number of the label.
276 OP is the conditional expression, or NULL for branch-always.
278 REVERSED is nonzero if we should reverse the sense of the comparison.
280 INSN is the insn. */
282 char *
283 xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed, rtx insn)
285 static char string[64];
286 int need_longbranch = (op != NULL_RTX
287 ? get_attr_length (insn) == 8
288 : get_attr_length (insn) == 4);
289 int really_reversed = reversed ^ need_longbranch;
290 const char *ccode;
291 const char *templ;
292 const char *operands;
293 enum rtx_code code;
295 if (! op)
297 if (need_longbranch)
298 ccode = "jmpf";
299 else
300 ccode = "br";
301 sprintf (string, "%s %s", ccode, label);
302 return string;
305 code = GET_CODE (op);
307 if (! REG_P (XEXP (op, 0)))
309 code = swap_condition (code);
310 operands = "%3,%2";
312 else
313 operands = "%2,%3";
315 /* Work out which way this really branches. */
316 if (really_reversed)
317 code = reverse_condition (code);
319 switch (code)
321 case EQ: ccode = "z"; break;
322 case NE: ccode = "nz"; break;
323 case GE: ccode = "ge"; break;
324 case LT: ccode = "lt"; break;
325 case GT: ccode = "gt"; break;
326 case LE: ccode = "le"; break;
327 case GEU: ccode = "nc"; break;
328 case LTU: ccode = "c"; break;
329 case GTU: ccode = "hi"; break;
330 case LEU: ccode = "ls"; break;
332 default:
333 gcc_unreachable ();
336 if (need_longbranch)
337 templ = "b%s %s,.+8 | jmpf %s";
338 else
339 templ = "b%s %s,%s";
340 sprintf (string, templ, ccode, operands, label);
342 return string;
345 /* Return the string to output a conditional branch to LABEL, which is
346 the operand number of the label, but suitable for the tail of a
347 SImode branch.
349 OP is the conditional expression (OP is never NULL_RTX).
351 REVERSED is nonzero if we should reverse the sense of the comparison.
353 INSN is the insn. */
355 char *
356 xstormy16_output_cbranch_si (rtx op, const char *label, int reversed, rtx insn)
358 static char string[64];
359 int need_longbranch = get_attr_length (insn) >= 8;
360 int really_reversed = reversed ^ need_longbranch;
361 const char *ccode;
362 const char *templ;
363 char prevop[16];
364 enum rtx_code code;
366 code = GET_CODE (op);
368 /* Work out which way this really branches. */
369 if (really_reversed)
370 code = reverse_condition (code);
372 switch (code)
374 case EQ: ccode = "z"; break;
375 case NE: ccode = "nz"; break;
376 case GE: ccode = "ge"; break;
377 case LT: ccode = "lt"; break;
378 case GEU: ccode = "nc"; break;
379 case LTU: ccode = "c"; break;
381 /* The missing codes above should never be generated. */
382 default:
383 gcc_unreachable ();
386 switch (code)
388 case EQ: case NE:
390 int regnum;
392 gcc_assert (REG_P (XEXP (op, 0)));
394 regnum = REGNO (XEXP (op, 0));
395 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
397 break;
399 case GE: case LT: case GEU: case LTU:
400 strcpy (prevop, "sbc %2,%3");
401 break;
403 default:
404 gcc_unreachable ();
407 if (need_longbranch)
408 templ = "%s | b%s .+6 | jmpf %s";
409 else
410 templ = "%s | b%s %s";
411 sprintf (string, templ, prevop, ccode, label);
413 return string;
416 /* Many machines have some registers that cannot be copied directly to or from
417 memory or even from other types of registers. An example is the `MQ'
418 register, which on most machines, can only be copied to or from general
419 registers, but not memory. Some machines allow copying all registers to and
420 from memory, but require a scratch register for stores to some memory
421 locations (e.g., those with symbolic address on the RT, and those with
422 certain symbolic address on the SPARC when compiling PIC). In some cases,
423 both an intermediate and a scratch register are required.
425 You should define these macros to indicate to the reload phase that it may
426 need to allocate at least one register for a reload in addition to the
427 register to contain the data. Specifically, if copying X to a register
428 RCLASS in MODE requires an intermediate register, you should define
429 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
430 whose registers can be used as intermediate registers or scratch registers.
432 If copying a register RCLASS in MODE to X requires an intermediate or scratch
433 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
434 largest register class required. If the requirements for input and output
435 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
436 instead of defining both macros identically.
438 The values returned by these macros are often `GENERAL_REGS'. Return
439 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
440 to or from a register of RCLASS in MODE without requiring a scratch register.
441 Do not define this macro if it would always return `NO_REGS'.
443 If a scratch register is required (either with or without an intermediate
444 register), you should define patterns for `reload_inM' or `reload_outM', as
445 required.. These patterns, which will normally be implemented with a
446 `define_expand', should be similar to the `movM' patterns, except that
447 operand 2 is the scratch register.
449 Define constraints for the reload register and scratch register that contain
450 a single register class. If the original reload register (whose class is
451 RCLASS) can meet the constraint given in the pattern, the value returned by
452 these macros is used for the class of the scratch register. Otherwise, two
453 additional reload registers are required. Their classes are obtained from
454 the constraints in the insn pattern.
456 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
457 either be in a hard register or in memory. Use `true_regnum' to find out;
458 it will return -1 if the pseudo is in memory and the hard register number if
459 it is in a register.
461 These macros should not be used in the case where a particular class of
462 registers can only be copied to memory and not to another class of
463 registers. In that case, secondary reload registers are not needed and
464 would not be helpful. Instead, a stack location must be used to perform the
465 copy and the `movM' pattern should use memory as an intermediate storage.
466 This case often occurs between floating-point and general registers. */
468 enum reg_class
469 xstormy16_secondary_reload_class (enum reg_class rclass,
470 enum machine_mode mode ATTRIBUTE_UNUSED,
471 rtx x)
473 /* This chip has the interesting property that only the first eight
474 registers can be moved to/from memory. */
475 if ((MEM_P (x)
476 || ((GET_CODE (x) == SUBREG || REG_P (x))
477 && (true_regnum (x) == -1
478 || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
479 && ! reg_class_subset_p (rclass, EIGHT_REGS))
480 return EIGHT_REGS;
482 return NO_REGS;
485 /* Worker function for TARGET_PREFERRED_RELOAD_CLASS
486 and TARGET_PREFERRED_OUTPUT_RELOAD_CLASS. */
488 static reg_class_t
489 xstormy16_preferred_reload_class (rtx x, reg_class_t rclass)
491 if (rclass == GENERAL_REGS && MEM_P (x))
492 return EIGHT_REGS;
494 return rclass;
497 /* Predicate for symbols and addresses that reflect special 8-bit
498 addressing. */
501 xstormy16_below100_symbol (rtx x,
502 enum machine_mode mode ATTRIBUTE_UNUSED)
504 if (GET_CODE (x) == CONST)
505 x = XEXP (x, 0);
506 if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
507 x = XEXP (x, 0);
509 if (GET_CODE (x) == SYMBOL_REF)
510 return (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_XSTORMY16_BELOW100) != 0;
512 if (CONST_INT_P (x))
514 HOST_WIDE_INT i = INTVAL (x);
516 if ((i >= 0x0000 && i <= 0x00ff)
517 || (i >= 0x7f00 && i <= 0x7fff))
518 return 1;
520 return 0;
523 /* Likewise, but only for non-volatile MEMs, for patterns where the
524 MEM will get split into smaller sized accesses. */
527 xstormy16_splittable_below100_operand (rtx x, enum machine_mode mode)
529 if (MEM_P (x) && MEM_VOLATILE_P (x))
530 return 0;
531 return xstormy16_below100_operand (x, mode);
534 /* Expand an 8-bit IOR. This either detects the one case we can
535 actually do, or uses a 16-bit IOR. */
537 void
538 xstormy16_expand_iorqi3 (rtx *operands)
540 rtx in, out, outsub, val;
542 out = operands[0];
543 in = operands[1];
544 val = operands[2];
546 if (xstormy16_onebit_set_operand (val, QImode))
548 if (!xstormy16_below100_or_register (in, QImode))
549 in = copy_to_mode_reg (QImode, in);
550 if (!xstormy16_below100_or_register (out, QImode))
551 out = gen_reg_rtx (QImode);
552 emit_insn (gen_iorqi3_internal (out, in, val));
553 if (out != operands[0])
554 emit_move_insn (operands[0], out);
555 return;
558 if (! REG_P (in))
559 in = copy_to_mode_reg (QImode, in);
561 if (! REG_P (val) && ! CONST_INT_P (val))
562 val = copy_to_mode_reg (QImode, val);
564 if (! REG_P (out))
565 out = gen_reg_rtx (QImode);
567 in = simplify_gen_subreg (HImode, in, QImode, 0);
568 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
570 if (! CONST_INT_P (val))
571 val = simplify_gen_subreg (HImode, val, QImode, 0);
573 emit_insn (gen_iorhi3 (outsub, in, val));
575 if (out != operands[0])
576 emit_move_insn (operands[0], out);
579 /* Expand an 8-bit AND. This either detects the one case we can
580 actually do, or uses a 16-bit AND. */
582 void
583 xstormy16_expand_andqi3 (rtx *operands)
585 rtx in, out, outsub, val;
587 out = operands[0];
588 in = operands[1];
589 val = operands[2];
591 if (xstormy16_onebit_clr_operand (val, QImode))
593 if (!xstormy16_below100_or_register (in, QImode))
594 in = copy_to_mode_reg (QImode, in);
595 if (!xstormy16_below100_or_register (out, QImode))
596 out = gen_reg_rtx (QImode);
597 emit_insn (gen_andqi3_internal (out, in, val));
598 if (out != operands[0])
599 emit_move_insn (operands[0], out);
600 return;
603 if (! REG_P (in))
604 in = copy_to_mode_reg (QImode, in);
606 if (! REG_P (val) && ! CONST_INT_P (val))
607 val = copy_to_mode_reg (QImode, val);
609 if (! REG_P (out))
610 out = gen_reg_rtx (QImode);
612 in = simplify_gen_subreg (HImode, in, QImode, 0);
613 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
615 if (! CONST_INT_P (val))
616 val = simplify_gen_subreg (HImode, val, QImode, 0);
618 emit_insn (gen_andhi3 (outsub, in, val));
620 if (out != operands[0])
621 emit_move_insn (operands[0], out);
624 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
625 (CONST_INT_P (X) \
626 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
628 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
629 (CONST_INT_P (X) \
630 && INTVAL (X) + (OFFSET) >= 0 \
631 && INTVAL (X) + (OFFSET) < 0x8000 \
632 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
634 bool
635 xstormy16_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
636 rtx x, bool strict)
638 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
639 return true;
641 if (GET_CODE (x) == PLUS
642 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
644 x = XEXP (x, 0);
645 /* PR 31232: Do not allow INT+INT as an address. */
646 if (CONST_INT_P (x))
647 return false;
650 if ((GET_CODE (x) == PRE_MODIFY && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
651 || GET_CODE (x) == POST_INC
652 || GET_CODE (x) == PRE_DEC)
653 x = XEXP (x, 0);
655 if (REG_P (x)
656 && REGNO_OK_FOR_BASE_P (REGNO (x))
657 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
658 return true;
660 if (xstormy16_below100_symbol (x, mode))
661 return true;
663 return false;
666 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P.
668 On this chip, this is true if the address is valid with an offset
669 of 0 but not of 6, because in that case it cannot be used as an
670 address for DImode or DFmode, or if the address is a post-increment
671 or pre-decrement address. */
673 static bool
674 xstormy16_mode_dependent_address_p (const_rtx x,
675 addr_space_t as ATTRIBUTE_UNUSED)
677 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
678 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
679 return true;
681 if (GET_CODE (x) == PLUS
682 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
683 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
684 return true;
686 /* Auto-increment addresses are now treated generically in recog.c. */
687 return false;
691 short_memory_operand (rtx x, enum machine_mode mode)
693 if (! memory_operand (x, mode))
694 return 0;
695 return (GET_CODE (XEXP (x, 0)) != PLUS);
698 /* Splitter for the 'move' patterns, for modes not directly implemented
699 by hardware. Emit insns to copy a value of mode MODE from SRC to
700 DEST.
702 This function is only called when reload_completed. */
704 void
705 xstormy16_split_move (enum machine_mode mode, rtx dest, rtx src)
707 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
708 int direction, end, i;
709 int src_modifies = 0;
710 int dest_modifies = 0;
711 int src_volatile = 0;
712 int dest_volatile = 0;
713 rtx mem_operand;
714 rtx auto_inc_reg_rtx = NULL_RTX;
716 /* Check initial conditions. */
717 gcc_assert (reload_completed
718 && mode != QImode && mode != HImode
719 && nonimmediate_operand (dest, mode)
720 && general_operand (src, mode));
722 /* This case is not supported below, and shouldn't be generated. */
723 gcc_assert (! MEM_P (dest) || ! MEM_P (src));
725 /* This case is very very bad after reload, so trap it now. */
726 gcc_assert (GET_CODE (dest) != SUBREG && GET_CODE (src) != SUBREG);
728 /* The general idea is to copy by words, offsetting the source and
729 destination. Normally the least-significant word will be copied
730 first, but for pre-dec operations it's better to copy the
731 most-significant word first. Only one operand can be a pre-dec
732 or post-inc operand.
734 It's also possible that the copy overlaps so that the direction
735 must be reversed. */
736 direction = 1;
738 if (MEM_P (dest))
740 mem_operand = XEXP (dest, 0);
741 dest_modifies = side_effects_p (mem_operand);
742 if (auto_inc_p (mem_operand))
743 auto_inc_reg_rtx = XEXP (mem_operand, 0);
744 dest_volatile = MEM_VOLATILE_P (dest);
745 if (dest_volatile)
747 dest = copy_rtx (dest);
748 MEM_VOLATILE_P (dest) = 0;
751 else if (MEM_P (src))
753 mem_operand = XEXP (src, 0);
754 src_modifies = side_effects_p (mem_operand);
755 if (auto_inc_p (mem_operand))
756 auto_inc_reg_rtx = XEXP (mem_operand, 0);
757 src_volatile = MEM_VOLATILE_P (src);
758 if (src_volatile)
760 src = copy_rtx (src);
761 MEM_VOLATILE_P (src) = 0;
764 else
765 mem_operand = NULL_RTX;
767 if (mem_operand == NULL_RTX)
769 if (REG_P (src)
770 && REG_P (dest)
771 && reg_overlap_mentioned_p (dest, src)
772 && REGNO (dest) > REGNO (src))
773 direction = -1;
775 else if (GET_CODE (mem_operand) == PRE_DEC
776 || (GET_CODE (mem_operand) == PLUS
777 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
778 direction = -1;
779 else if (MEM_P (src) && reg_overlap_mentioned_p (dest, src))
781 int regno;
783 gcc_assert (REG_P (dest));
784 regno = REGNO (dest);
786 gcc_assert (refers_to_regno_p (regno, regno + num_words,
787 mem_operand, 0));
789 if (refers_to_regno_p (regno, regno + 1, mem_operand, 0))
790 direction = -1;
791 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
792 mem_operand, 0))
793 direction = 1;
794 else
795 /* This means something like
796 (set (reg:DI r0) (mem:DI (reg:HI r1)))
797 which we'd need to support by doing the set of the second word
798 last. */
799 gcc_unreachable ();
802 end = direction < 0 ? -1 : num_words;
803 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
805 rtx w_src, w_dest, insn;
807 if (src_modifies)
808 w_src = gen_rtx_MEM (word_mode, mem_operand);
809 else
810 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
811 if (src_volatile)
812 MEM_VOLATILE_P (w_src) = 1;
813 if (dest_modifies)
814 w_dest = gen_rtx_MEM (word_mode, mem_operand);
815 else
816 w_dest = simplify_gen_subreg (word_mode, dest, mode,
817 i * UNITS_PER_WORD);
818 if (dest_volatile)
819 MEM_VOLATILE_P (w_dest) = 1;
821 /* The simplify_subreg calls must always be able to simplify. */
822 gcc_assert (GET_CODE (w_src) != SUBREG
823 && GET_CODE (w_dest) != SUBREG);
825 insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src));
826 if (auto_inc_reg_rtx)
827 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
828 auto_inc_reg_rtx,
829 REG_NOTES (insn));
833 /* Expander for the 'move' patterns. Emit insns to copy a value of
834 mode MODE from SRC to DEST. */
836 void
837 xstormy16_expand_move (enum machine_mode mode, rtx dest, rtx src)
839 if (MEM_P (dest) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY))
841 rtx pmv = XEXP (dest, 0);
842 rtx dest_reg = XEXP (pmv, 0);
843 rtx dest_mod = XEXP (pmv, 1);
844 rtx set = gen_rtx_SET (Pmode, dest_reg, dest_mod);
845 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
847 dest = gen_rtx_MEM (mode, dest_reg);
848 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
850 else if (MEM_P (src) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY))
852 rtx pmv = XEXP (src, 0);
853 rtx src_reg = XEXP (pmv, 0);
854 rtx src_mod = XEXP (pmv, 1);
855 rtx set = gen_rtx_SET (Pmode, src_reg, src_mod);
856 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
858 src = gen_rtx_MEM (mode, src_reg);
859 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
862 /* There are only limited immediate-to-memory move instructions. */
863 if (! reload_in_progress
864 && ! reload_completed
865 && MEM_P (dest)
866 && (! CONST_INT_P (XEXP (dest, 0))
867 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
868 && ! xstormy16_below100_operand (dest, mode)
869 && ! REG_P (src)
870 && GET_CODE (src) != SUBREG)
871 src = copy_to_mode_reg (mode, src);
873 /* Don't emit something we would immediately split. */
874 if (reload_completed
875 && mode != HImode && mode != QImode)
877 xstormy16_split_move (mode, dest, src);
878 return;
881 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
884 /* Stack Layout:
886 The stack is laid out as follows:
888 SP->
889 FP-> Local variables
890 Register save area (up to 4 words)
891 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
893 AP-> Return address (two words)
894 9th procedure parameter word
895 10th procedure parameter word
897 last procedure parameter word
899 The frame pointer location is tuned to make it most likely that all
900 parameters and local variables can be accessed using a load-indexed
901 instruction. */
903 /* A structure to describe the layout. */
904 struct xstormy16_stack_layout
906 /* Size of the topmost three items on the stack. */
907 int locals_size;
908 int register_save_size;
909 int stdarg_save_size;
910 /* Sum of the above items. */
911 int frame_size;
912 /* Various offsets. */
913 int first_local_minus_ap;
914 int sp_minus_fp;
915 int fp_minus_ap;
918 /* Does REGNO need to be saved? */
919 #define REG_NEEDS_SAVE(REGNUM, IFUN) \
920 ((df_regs_ever_live_p (REGNUM) && ! call_used_regs[REGNUM]) \
921 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
922 && (REGNUM != CARRY_REGNUM) \
923 && (df_regs_ever_live_p (REGNUM) || ! crtl->is_leaf)))
925 /* Compute the stack layout. */
927 struct xstormy16_stack_layout
928 xstormy16_compute_stack_layout (void)
930 struct xstormy16_stack_layout layout;
931 int regno;
932 const int ifun = xstormy16_interrupt_function_p ();
934 layout.locals_size = get_frame_size ();
936 layout.register_save_size = 0;
937 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
938 if (REG_NEEDS_SAVE (regno, ifun))
939 layout.register_save_size += UNITS_PER_WORD;
941 if (cfun->stdarg)
942 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
943 else
944 layout.stdarg_save_size = 0;
946 layout.frame_size = (layout.locals_size
947 + layout.register_save_size
948 + layout.stdarg_save_size);
950 if (crtl->args.size <= 2048 && crtl->args.size != -1)
952 if (layout.frame_size - INCOMING_FRAME_SP_OFFSET
953 + crtl->args.size <= 2048)
954 layout.fp_minus_ap = layout.frame_size - INCOMING_FRAME_SP_OFFSET;
955 else
956 layout.fp_minus_ap = 2048 - crtl->args.size;
958 else
959 layout.fp_minus_ap = (layout.stdarg_save_size
960 + layout.register_save_size
961 - INCOMING_FRAME_SP_OFFSET);
962 layout.sp_minus_fp = (layout.frame_size - INCOMING_FRAME_SP_OFFSET
963 - layout.fp_minus_ap);
964 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
965 return layout;
968 /* Worker function for TARGET_CAN_ELIMINATE. */
970 static bool
971 xstormy16_can_eliminate (const int from, const int to)
973 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
974 ? ! frame_pointer_needed
975 : true);
978 /* Determine how all the special registers get eliminated. */
981 xstormy16_initial_elimination_offset (int from, int to)
983 struct xstormy16_stack_layout layout;
984 int result;
986 layout = xstormy16_compute_stack_layout ();
988 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
989 result = layout.sp_minus_fp - layout.locals_size;
990 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
991 result = - layout.locals_size;
992 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
993 result = - layout.fp_minus_ap;
994 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
995 result = - (layout.sp_minus_fp + layout.fp_minus_ap);
996 else
997 gcc_unreachable ();
999 return result;
1002 static rtx
1003 emit_addhi3_postreload (rtx dest, rtx src0, rtx src1)
1005 rtx set, clobber, insn;
1007 set = gen_rtx_SET (VOIDmode, dest, gen_rtx_PLUS (HImode, src0, src1));
1008 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
1009 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
1010 return insn;
1013 /* Called after register allocation to add any instructions needed for
1014 the prologue. Using a prologue insn is favored compared to putting
1015 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1016 since it allows the scheduler to intermix instructions with the
1017 saves of the caller saved registers. In some cases, it might be
1018 necessary to emit a barrier instruction as the last insn to prevent
1019 such scheduling.
1021 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1022 so that the debug info generation code can handle them properly. */
1024 void
1025 xstormy16_expand_prologue (void)
1027 struct xstormy16_stack_layout layout;
1028 int regno;
1029 rtx insn;
1030 rtx mem_push_rtx;
1031 const int ifun = xstormy16_interrupt_function_p ();
1033 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
1034 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
1036 layout = xstormy16_compute_stack_layout ();
1038 if (layout.locals_size >= 32768)
1039 error ("local variable memory requirements exceed capacity");
1041 if (flag_stack_usage_info)
1042 current_function_static_stack_size = layout.frame_size;
1044 /* Save the argument registers if necessary. */
1045 if (layout.stdarg_save_size)
1046 for (regno = FIRST_ARGUMENT_REGISTER;
1047 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
1048 regno++)
1050 rtx dwarf;
1051 rtx reg = gen_rtx_REG (HImode, regno);
1053 insn = emit_move_insn (mem_push_rtx, reg);
1054 RTX_FRAME_RELATED_P (insn) = 1;
1056 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1058 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1059 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1060 reg);
1061 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1062 plus_constant (Pmode,
1063 stack_pointer_rtx,
1064 GET_MODE_SIZE (Pmode)));
1065 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
1066 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1067 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1070 /* Push each of the registers to save. */
1071 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1072 if (REG_NEEDS_SAVE (regno, ifun))
1074 rtx dwarf;
1075 rtx reg = gen_rtx_REG (HImode, regno);
1077 insn = emit_move_insn (mem_push_rtx, reg);
1078 RTX_FRAME_RELATED_P (insn) = 1;
1080 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1082 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1083 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1084 reg);
1085 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1086 plus_constant (Pmode, \
1087 stack_pointer_rtx,
1088 GET_MODE_SIZE (Pmode)));
1089 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
1090 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1091 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1094 /* It's just possible that the SP here might be what we need for
1095 the new FP... */
1096 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1098 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1099 RTX_FRAME_RELATED_P (insn) = 1;
1102 /* Allocate space for local variables. */
1103 if (layout.locals_size)
1105 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1106 GEN_INT (layout.locals_size));
1107 RTX_FRAME_RELATED_P (insn) = 1;
1110 /* Set up the frame pointer, if required. */
1111 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
1113 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1114 RTX_FRAME_RELATED_P (insn) = 1;
1116 if (layout.sp_minus_fp)
1118 insn = emit_addhi3_postreload (hard_frame_pointer_rtx,
1119 hard_frame_pointer_rtx,
1120 GEN_INT (- layout.sp_minus_fp));
1121 RTX_FRAME_RELATED_P (insn) = 1;
1126 /* Do we need an epilogue at all? */
1129 direct_return (void)
1131 return (reload_completed
1132 && xstormy16_compute_stack_layout ().frame_size == 0
1133 && ! xstormy16_interrupt_function_p ());
1136 /* Called after register allocation to add any instructions needed for
1137 the epilogue. Using an epilogue insn is favored compared to putting
1138 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1139 since it allows the scheduler to intermix instructions with the
1140 saves of the caller saved registers. In some cases, it might be
1141 necessary to emit a barrier instruction as the last insn to prevent
1142 such scheduling. */
1144 void
1145 xstormy16_expand_epilogue (void)
1147 struct xstormy16_stack_layout layout;
1148 rtx mem_pop_rtx;
1149 int regno;
1150 const int ifun = xstormy16_interrupt_function_p ();
1152 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1153 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
1155 layout = xstormy16_compute_stack_layout ();
1157 /* Pop the stack for the locals. */
1158 if (layout.locals_size)
1160 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1161 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1162 else
1163 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1164 GEN_INT (- layout.locals_size));
1167 /* Restore any call-saved registers. */
1168 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1169 if (REG_NEEDS_SAVE (regno, ifun))
1170 emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
1172 /* Pop the stack for the stdarg save area. */
1173 if (layout.stdarg_save_size)
1174 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1175 GEN_INT (- layout.stdarg_save_size));
1177 /* Return. */
1178 if (ifun)
1179 emit_jump_insn (gen_return_internal_interrupt ());
1180 else
1181 emit_jump_insn (gen_return_internal ());
1185 xstormy16_epilogue_uses (int regno)
1187 if (reload_completed && call_used_regs[regno])
1189 const int ifun = xstormy16_interrupt_function_p ();
1190 return REG_NEEDS_SAVE (regno, ifun);
1192 return 0;
1195 void
1196 xstormy16_function_profiler (void)
1198 sorry ("function_profiler support");
1201 /* Update CUM to advance past an argument in the argument list. The
1202 values MODE, TYPE and NAMED describe that argument. Once this is
1203 done, the variable CUM is suitable for analyzing the *following*
1204 argument with `TARGET_FUNCTION_ARG', etc.
1206 This function need not do anything if the argument in question was
1207 passed on the stack. The compiler knows how to track the amount of
1208 stack space used for arguments without any special help. However,
1209 it makes life easier for xstormy16_build_va_list if it does update
1210 the word count. */
1212 static void
1213 xstormy16_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
1214 const_tree type, bool named ATTRIBUTE_UNUSED)
1216 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1218 /* If an argument would otherwise be passed partially in registers,
1219 and partially on the stack, the whole of it is passed on the
1220 stack. */
1221 if (*cum < NUM_ARGUMENT_REGISTERS
1222 && *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1223 *cum = NUM_ARGUMENT_REGISTERS;
1225 *cum += XSTORMY16_WORD_SIZE (type, mode);
1228 static rtx
1229 xstormy16_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
1230 const_tree type, bool named ATTRIBUTE_UNUSED)
1232 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1234 if (mode == VOIDmode)
1235 return const0_rtx;
1236 if (targetm.calls.must_pass_in_stack (mode, type)
1237 || *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1238 return NULL_RTX;
1239 return gen_rtx_REG (mode, *cum + FIRST_ARGUMENT_REGISTER);
1242 /* Build the va_list type.
1244 For this chip, va_list is a record containing a counter and a pointer.
1245 The counter is of type 'int' and indicates how many bytes
1246 have been used to date. The pointer indicates the stack position
1247 for arguments that have not been passed in registers.
1248 To keep the layout nice, the pointer is first in the structure. */
1250 static tree
1251 xstormy16_build_builtin_va_list (void)
1253 tree f_1, f_2, record, type_decl;
1255 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
1256 type_decl = build_decl (BUILTINS_LOCATION,
1257 TYPE_DECL, get_identifier ("__va_list_tag"), record);
1259 f_1 = build_decl (BUILTINS_LOCATION,
1260 FIELD_DECL, get_identifier ("base"),
1261 ptr_type_node);
1262 f_2 = build_decl (BUILTINS_LOCATION,
1263 FIELD_DECL, get_identifier ("count"),
1264 unsigned_type_node);
1266 DECL_FIELD_CONTEXT (f_1) = record;
1267 DECL_FIELD_CONTEXT (f_2) = record;
1269 TYPE_STUB_DECL (record) = type_decl;
1270 TYPE_NAME (record) = type_decl;
1271 TYPE_FIELDS (record) = f_1;
1272 DECL_CHAIN (f_1) = f_2;
1274 layout_type (record);
1276 return record;
1279 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this
1280 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1281 variable to initialize. NEXTARG is the machine independent notion of the
1282 'next' argument after the variable arguments. */
1284 static void
1285 xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
1287 tree f_base, f_count;
1288 tree base, count;
1289 tree t,u;
1291 if (xstormy16_interrupt_function_p ())
1292 error ("cannot use va_start in interrupt function");
1294 f_base = TYPE_FIELDS (va_list_type_node);
1295 f_count = DECL_CHAIN (f_base);
1297 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1298 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1299 NULL_TREE);
1301 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
1302 u = build_int_cst (NULL_TREE, - INCOMING_FRAME_SP_OFFSET);
1303 u = fold_convert (TREE_TYPE (count), u);
1304 t = fold_build_pointer_plus (t, u);
1305 t = build2 (MODIFY_EXPR, TREE_TYPE (base), base, t);
1306 TREE_SIDE_EFFECTS (t) = 1;
1307 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1309 t = build2 (MODIFY_EXPR, TREE_TYPE (count), count,
1310 build_int_cst (NULL_TREE,
1311 crtl->args.info * UNITS_PER_WORD));
1312 TREE_SIDE_EFFECTS (t) = 1;
1313 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1316 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable
1317 of type va_list as a tree, TYPE is the type passed to va_arg.
1318 Note: This algorithm is documented in stormy-abi. */
1320 static tree
1321 xstormy16_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
1322 gimple_seq *post_p ATTRIBUTE_UNUSED)
1324 tree f_base, f_count;
1325 tree base, count;
1326 tree count_tmp, addr, t;
1327 tree lab_gotaddr, lab_fromstack;
1328 int size, size_of_reg_args, must_stack;
1329 tree size_tree;
1331 f_base = TYPE_FIELDS (va_list_type_node);
1332 f_count = DECL_CHAIN (f_base);
1334 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1335 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1336 NULL_TREE);
1338 must_stack = targetm.calls.must_pass_in_stack (TYPE_MODE (type), type);
1339 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
1340 gimplify_expr (&size_tree, pre_p, NULL, is_gimple_val, fb_rvalue);
1342 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1344 count_tmp = get_initialized_tmp_var (count, pre_p, NULL);
1345 lab_gotaddr = create_artificial_label (UNKNOWN_LOCATION);
1346 lab_fromstack = create_artificial_label (UNKNOWN_LOCATION);
1347 addr = create_tmp_var (ptr_type_node, NULL);
1349 if (!must_stack)
1351 tree r;
1353 t = fold_convert (TREE_TYPE (count), size_tree);
1354 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1355 r = fold_convert (TREE_TYPE (count), size_int (size_of_reg_args));
1356 t = build2 (GT_EXPR, boolean_type_node, t, r);
1357 t = build3 (COND_EXPR, void_type_node, t,
1358 build1 (GOTO_EXPR, void_type_node, lab_fromstack),
1359 NULL_TREE);
1360 gimplify_and_add (t, pre_p);
1362 t = fold_build_pointer_plus (base, count_tmp);
1363 gimplify_assign (addr, t, pre_p);
1365 t = build1 (GOTO_EXPR, void_type_node, lab_gotaddr);
1366 gimplify_and_add (t, pre_p);
1368 t = build1 (LABEL_EXPR, void_type_node, lab_fromstack);
1369 gimplify_and_add (t, pre_p);
1372 /* Arguments larger than a word might need to skip over some
1373 registers, since arguments are either passed entirely in
1374 registers or entirely on the stack. */
1375 size = PUSH_ROUNDING (int_size_in_bytes (type));
1376 if (size > 2 || size < 0 || must_stack)
1378 tree r, u;
1380 r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD);
1381 u = build2 (MODIFY_EXPR, TREE_TYPE (count_tmp), count_tmp, r);
1383 t = fold_convert (TREE_TYPE (count), r);
1384 t = build2 (GE_EXPR, boolean_type_node, count_tmp, t);
1385 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, u);
1386 gimplify_and_add (t, pre_p);
1389 t = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
1390 + INCOMING_FRAME_SP_OFFSET);
1391 t = fold_convert (TREE_TYPE (count), t);
1392 t = build2 (MINUS_EXPR, TREE_TYPE (count), count_tmp, t);
1393 t = build2 (PLUS_EXPR, TREE_TYPE (count), t,
1394 fold_convert (TREE_TYPE (count), size_tree));
1395 t = fold_convert (TREE_TYPE (t), fold (t));
1396 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1397 t = fold_build_pointer_plus (base, t);
1398 gimplify_assign (addr, t, pre_p);
1400 t = build1 (LABEL_EXPR, void_type_node, lab_gotaddr);
1401 gimplify_and_add (t, pre_p);
1403 t = fold_convert (TREE_TYPE (count), size_tree);
1404 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1405 gimplify_assign (count, t, pre_p);
1407 addr = fold_convert (build_pointer_type (type), addr);
1408 return build_va_arg_indirect_ref (addr);
1411 /* Worker function for TARGET_TRAMPOLINE_INIT. */
1413 static void
1414 xstormy16_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
1416 rtx temp = gen_reg_rtx (HImode);
1417 rtx reg_fnaddr = gen_reg_rtx (HImode);
1418 rtx reg_addr, reg_addr_mem;
1420 reg_addr = copy_to_reg (XEXP (m_tramp, 0));
1421 reg_addr_mem = adjust_automodify_address (m_tramp, HImode, reg_addr, 0);
1423 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1424 emit_move_insn (reg_addr_mem, temp);
1425 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1426 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1428 emit_move_insn (temp, static_chain);
1429 emit_move_insn (reg_addr_mem, temp);
1430 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1431 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1433 emit_move_insn (reg_fnaddr, XEXP (DECL_RTL (fndecl), 0));
1434 emit_move_insn (temp, reg_fnaddr);
1435 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1436 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1437 emit_move_insn (reg_addr_mem, temp);
1438 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1439 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1441 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1442 emit_move_insn (reg_addr_mem, reg_fnaddr);
1445 /* Worker function for TARGET_FUNCTION_VALUE. */
1447 static rtx
1448 xstormy16_function_value (const_tree valtype,
1449 const_tree func ATTRIBUTE_UNUSED,
1450 bool outgoing ATTRIBUTE_UNUSED)
1452 enum machine_mode mode;
1453 mode = TYPE_MODE (valtype);
1454 PROMOTE_MODE (mode, 0, valtype);
1455 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1458 /* Worker function for TARGET_LIBCALL_VALUE. */
1460 static rtx
1461 xstormy16_libcall_value (enum machine_mode mode,
1462 const_rtx fun ATTRIBUTE_UNUSED)
1464 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1467 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
1469 static bool
1470 xstormy16_function_value_regno_p (const unsigned int regno)
1472 return (regno == RETURN_VALUE_REGNUM);
1475 /* A C compound statement that outputs the assembler code for a thunk function,
1476 used to implement C++ virtual function calls with multiple inheritance. The
1477 thunk acts as a wrapper around a virtual function, adjusting the implicit
1478 object parameter before handing control off to the real function.
1480 First, emit code to add the integer DELTA to the location that contains the
1481 incoming first argument. Assume that this argument contains a pointer, and
1482 is the one used to pass the `this' pointer in C++. This is the incoming
1483 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1484 addition must preserve the values of all other incoming arguments.
1486 After the addition, emit code to jump to FUNCTION, which is a
1487 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1488 the return address. Hence returning from FUNCTION will return to whoever
1489 called the current `thunk'.
1491 The effect must be as if @var{function} had been called directly
1492 with the adjusted first argument. This macro is responsible for
1493 emitting all of the code for a thunk function;
1494 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1495 not invoked.
1497 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1498 extracted from it.) It might possibly be useful on some targets, but
1499 probably not. */
1501 static void
1502 xstormy16_asm_output_mi_thunk (FILE *file,
1503 tree thunk_fndecl ATTRIBUTE_UNUSED,
1504 HOST_WIDE_INT delta,
1505 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1506 tree function)
1508 int regnum = FIRST_ARGUMENT_REGISTER;
1510 /* There might be a hidden first argument for a returned structure. */
1511 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
1512 regnum += 1;
1514 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF);
1515 fputs ("\tjmpf ", file);
1516 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1517 putc ('\n', file);
1520 /* The purpose of this function is to override the default behavior of
1521 BSS objects. Normally, they go into .bss or .sbss via ".common"
1522 directives, but we need to override that and put them in
1523 .bss_below100. We can't just use a section override (like we do
1524 for .data_below100), because that makes them initialized rather
1525 than uninitialized. */
1527 void
1528 xstormy16_asm_output_aligned_common (FILE *stream,
1529 tree decl,
1530 const char *name,
1531 int size,
1532 int align,
1533 int global)
1535 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
1536 rtx symbol;
1538 if (mem != NULL_RTX
1539 && MEM_P (mem)
1540 && GET_CODE (symbol = XEXP (mem, 0)) == SYMBOL_REF
1541 && SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_XSTORMY16_BELOW100)
1543 const char *name2;
1544 int p2align = 0;
1546 switch_to_section (bss100_section);
1548 while (align > 8)
1550 align /= 2;
1551 p2align ++;
1554 name2 = default_strip_name_encoding (name);
1555 if (global)
1556 fprintf (stream, "\t.globl\t%s\n", name2);
1557 if (p2align)
1558 fprintf (stream, "\t.p2align %d\n", p2align);
1559 fprintf (stream, "\t.type\t%s, @object\n", name2);
1560 fprintf (stream, "\t.size\t%s, %d\n", name2, size);
1561 fprintf (stream, "%s:\n\t.space\t%d\n", name2, size);
1562 return;
1565 if (!global)
1567 fprintf (stream, "\t.local\t");
1568 assemble_name (stream, name);
1569 fprintf (stream, "\n");
1571 fprintf (stream, "\t.comm\t");
1572 assemble_name (stream, name);
1573 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
1576 /* Implement TARGET_ASM_INIT_SECTIONS. */
1578 static void
1579 xstormy16_asm_init_sections (void)
1581 bss100_section
1582 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
1583 output_section_asm_op,
1584 "\t.section \".bss_below100\",\"aw\",@nobits");
1587 /* Mark symbols with the "below100" attribute so that we can use the
1588 special addressing modes for them. */
1590 static void
1591 xstormy16_encode_section_info (tree decl, rtx r, int first)
1593 default_encode_section_info (decl, r, first);
1595 if (TREE_CODE (decl) == VAR_DECL
1596 && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl))
1597 || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl))))
1599 rtx symbol = XEXP (r, 0);
1601 gcc_assert (GET_CODE (symbol) == SYMBOL_REF);
1602 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_XSTORMY16_BELOW100;
1606 #undef TARGET_ASM_CONSTRUCTOR
1607 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1608 #undef TARGET_ASM_DESTRUCTOR
1609 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1611 /* Output constructors and destructors. Just like
1612 default_named_section_asm_out_* but don't set the sections writable. */
1614 static void
1615 xstormy16_asm_out_destructor (rtx symbol, int priority)
1617 const char *section = ".dtors";
1618 char buf[16];
1620 /* ??? This only works reliably with the GNU linker. */
1621 if (priority != DEFAULT_INIT_PRIORITY)
1623 sprintf (buf, ".dtors.%.5u",
1624 /* Invert the numbering so the linker puts us in the proper
1625 order; constructors are run from right to left, and the
1626 linker sorts in increasing order. */
1627 MAX_INIT_PRIORITY - priority);
1628 section = buf;
1631 switch_to_section (get_section (section, 0, NULL));
1632 assemble_align (POINTER_SIZE);
1633 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1636 static void
1637 xstormy16_asm_out_constructor (rtx symbol, int priority)
1639 const char *section = ".ctors";
1640 char buf[16];
1642 /* ??? This only works reliably with the GNU linker. */
1643 if (priority != DEFAULT_INIT_PRIORITY)
1645 sprintf (buf, ".ctors.%.5u",
1646 /* Invert the numbering so the linker puts us in the proper
1647 order; constructors are run from right to left, and the
1648 linker sorts in increasing order. */
1649 MAX_INIT_PRIORITY - priority);
1650 section = buf;
1653 switch_to_section (get_section (section, 0, NULL));
1654 assemble_align (POINTER_SIZE);
1655 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1658 /* Worker function for TARGET_PRINT_OPERAND_ADDRESS.
1660 Print a memory address as an operand to reference that memory location. */
1662 static void
1663 xstormy16_print_operand_address (FILE *file, rtx address)
1665 HOST_WIDE_INT offset;
1666 int pre_dec, post_inc;
1668 /* There are a few easy cases. */
1669 if (CONST_INT_P (address))
1671 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1672 return;
1675 if (CONSTANT_P (address) || LABEL_P (address))
1677 output_addr_const (file, address);
1678 return;
1681 /* Otherwise, it's hopefully something of the form
1682 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...)). */
1683 if (GET_CODE (address) == PLUS)
1685 gcc_assert (CONST_INT_P (XEXP (address, 1)));
1686 offset = INTVAL (XEXP (address, 1));
1687 address = XEXP (address, 0);
1689 else
1690 offset = 0;
1692 pre_dec = (GET_CODE (address) == PRE_DEC);
1693 post_inc = (GET_CODE (address) == POST_INC);
1694 if (pre_dec || post_inc)
1695 address = XEXP (address, 0);
1697 gcc_assert (REG_P (address));
1699 fputc ('(', file);
1700 if (pre_dec)
1701 fputs ("--", file);
1702 fputs (reg_names [REGNO (address)], file);
1703 if (post_inc)
1704 fputs ("++", file);
1705 if (offset != 0)
1706 fprintf (file, "," HOST_WIDE_INT_PRINT_DEC, offset);
1707 fputc (')', file);
1710 /* Worker function for TARGET_PRINT_OPERAND.
1712 Print an operand to an assembler instruction. */
1714 static void
1715 xstormy16_print_operand (FILE *file, rtx x, int code)
1717 switch (code)
1719 case 'B':
1720 /* There is either one bit set, or one bit clear, in X.
1721 Print it preceded by '#'. */
1723 static int bits_set[8] = { 0, 1, 1, 2, 1, 2, 2, 3 };
1724 HOST_WIDE_INT xx = 1;
1725 HOST_WIDE_INT l;
1727 if (CONST_INT_P (x))
1728 xx = INTVAL (x);
1729 else
1730 output_operand_lossage ("'B' operand is not constant");
1732 /* GCC sign-extends masks with the MSB set, so we have to
1733 detect all the cases that differ only in sign extension
1734 beyond the bits we care about. Normally, the predicates
1735 and constraints ensure that we have the right values. This
1736 works correctly for valid masks. */
1737 if (bits_set[xx & 7] <= 1)
1739 /* Remove sign extension bits. */
1740 if ((~xx & ~(HOST_WIDE_INT)0xff) == 0)
1741 xx &= 0xff;
1742 else if ((~xx & ~(HOST_WIDE_INT)0xffff) == 0)
1743 xx &= 0xffff;
1744 l = exact_log2 (xx);
1746 else
1748 /* Add sign extension bits. */
1749 if ((xx & ~(HOST_WIDE_INT)0xff) == 0)
1750 xx |= ~(HOST_WIDE_INT)0xff;
1751 else if ((xx & ~(HOST_WIDE_INT)0xffff) == 0)
1752 xx |= ~(HOST_WIDE_INT)0xffff;
1753 l = exact_log2 (~xx);
1756 if (l == -1)
1757 output_operand_lossage ("'B' operand has multiple bits set");
1759 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l);
1760 return;
1763 case 'C':
1764 /* Print the symbol without a surrounding @fptr(). */
1765 if (GET_CODE (x) == SYMBOL_REF)
1766 assemble_name (file, XSTR (x, 0));
1767 else if (LABEL_P (x))
1768 output_asm_label (x);
1769 else
1770 xstormy16_print_operand_address (file, x);
1771 return;
1773 case 'o':
1774 case 'O':
1775 /* Print the immediate operand less one, preceded by '#'.
1776 For 'O', negate it first. */
1778 HOST_WIDE_INT xx = 0;
1780 if (CONST_INT_P (x))
1781 xx = INTVAL (x);
1782 else
1783 output_operand_lossage ("'o' operand is not constant");
1785 if (code == 'O')
1786 xx = -xx;
1788 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, xx - 1);
1789 return;
1792 case 'b':
1793 /* Print the shift mask for bp/bn. */
1795 HOST_WIDE_INT xx = 1;
1796 HOST_WIDE_INT l;
1798 if (CONST_INT_P (x))
1799 xx = INTVAL (x);
1800 else
1801 output_operand_lossage ("'B' operand is not constant");
1803 l = 7 - xx;
1805 fputs (IMMEDIATE_PREFIX, file);
1806 fprintf (file, HOST_WIDE_INT_PRINT_DEC, l);
1807 return;
1810 case 0:
1811 /* Handled below. */
1812 break;
1814 default:
1815 output_operand_lossage ("xstormy16_print_operand: unknown code");
1816 return;
1819 switch (GET_CODE (x))
1821 case REG:
1822 fputs (reg_names [REGNO (x)], file);
1823 break;
1825 case MEM:
1826 xstormy16_print_operand_address (file, XEXP (x, 0));
1827 break;
1829 default:
1830 /* Some kind of constant or label; an immediate operand,
1831 so prefix it with '#' for the assembler. */
1832 fputs (IMMEDIATE_PREFIX, file);
1833 output_addr_const (file, x);
1834 break;
1837 return;
1840 /* Expander for the `casesi' pattern.
1841 INDEX is the index of the switch statement.
1842 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1843 to the first table entry.
1844 RANGE is the number of table entries.
1845 TABLE is an ADDR_VEC that is the jump table.
1846 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1847 range LOWER_BOUND to LOWER_BOUND + RANGE - 1. */
1849 void
1850 xstormy16_expand_casesi (rtx index, rtx lower_bound, rtx range,
1851 rtx table, rtx default_label)
1853 HOST_WIDE_INT range_i = INTVAL (range);
1854 rtx int_index;
1856 /* This code uses 'br', so it can deal only with tables of size up to
1857 8192 entries. */
1858 if (range_i >= 8192)
1859 sorry ("switch statement of size %lu entries too large",
1860 (unsigned long) range_i);
1862 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
1863 OPTAB_LIB_WIDEN);
1864 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
1865 default_label);
1866 int_index = gen_lowpart_common (HImode, index);
1867 emit_insn (gen_ashlhi3 (int_index, int_index, const2_rtx));
1868 emit_jump_insn (gen_tablejump_pcrel (int_index, table));
1871 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1872 instructions, without label or alignment or any other special
1873 constructs. We know that the previous instruction will be the
1874 `tablejump_pcrel' output above.
1876 TODO: it might be nice to output 'br' instructions if they could
1877 all reach. */
1879 void
1880 xstormy16_output_addr_vec (FILE *file, rtx label ATTRIBUTE_UNUSED, rtx table)
1882 int vlen, idx;
1884 switch_to_section (current_function_section ());
1886 vlen = XVECLEN (table, 0);
1887 for (idx = 0; idx < vlen; idx++)
1889 fputs ("\tjmpf ", file);
1890 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
1891 fputc ('\n', file);
1895 /* Expander for the `call' patterns.
1896 RETVAL is the RTL for the return register or NULL for void functions.
1897 DEST is the function to call, expressed as a MEM.
1898 COUNTER is ignored. */
1900 void
1901 xstormy16_expand_call (rtx retval, rtx dest, rtx counter)
1903 rtx call, temp;
1904 enum machine_mode mode;
1906 gcc_assert (MEM_P (dest));
1907 dest = XEXP (dest, 0);
1909 if (! CONSTANT_P (dest) && ! REG_P (dest))
1910 dest = force_reg (Pmode, dest);
1912 if (retval == NULL)
1913 mode = VOIDmode;
1914 else
1915 mode = GET_MODE (retval);
1917 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
1918 counter);
1919 if (retval)
1920 call = gen_rtx_SET (VOIDmode, retval, call);
1922 if (! CONSTANT_P (dest))
1924 temp = gen_reg_rtx (HImode);
1925 emit_move_insn (temp, const0_rtx);
1927 else
1928 temp = const0_rtx;
1930 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
1931 gen_rtx_USE (VOIDmode, temp)));
1932 emit_call_insn (call);
1935 /* Expanders for multiword computational operations. */
1937 /* Expander for arithmetic operations; emit insns to compute
1939 (set DEST (CODE:MODE SRC0 SRC1))
1941 When CODE is COMPARE, a branch template is generated
1942 (this saves duplicating code in xstormy16_split_cbranch). */
1944 void
1945 xstormy16_expand_arith (enum machine_mode mode, enum rtx_code code,
1946 rtx dest, rtx src0, rtx src1)
1948 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
1949 int i;
1950 int firstloop = 1;
1952 if (code == NEG)
1953 emit_move_insn (src0, const0_rtx);
1955 for (i = 0; i < num_words; i++)
1957 rtx w_src0, w_src1, w_dest;
1958 rtx insn;
1960 w_src0 = simplify_gen_subreg (word_mode, src0, mode,
1961 i * UNITS_PER_WORD);
1962 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
1963 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
1965 switch (code)
1967 case PLUS:
1968 if (firstloop
1969 && CONST_INT_P (w_src1)
1970 && INTVAL (w_src1) == 0)
1971 continue;
1973 if (firstloop)
1974 insn = gen_addchi4 (w_dest, w_src0, w_src1);
1975 else
1976 insn = gen_addchi5 (w_dest, w_src0, w_src1);
1977 break;
1979 case NEG:
1980 case MINUS:
1981 case COMPARE:
1982 if (code == COMPARE && i == num_words - 1)
1984 rtx branch, sub, clobber, sub_1;
1986 sub_1 = gen_rtx_MINUS (HImode, w_src0,
1987 gen_rtx_ZERO_EXTEND (HImode, gen_rtx_REG (BImode, CARRY_REGNUM)));
1988 sub = gen_rtx_SET (VOIDmode, w_dest,
1989 gen_rtx_MINUS (HImode, sub_1, w_src1));
1990 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
1991 branch = gen_rtx_SET (VOIDmode, pc_rtx,
1992 gen_rtx_IF_THEN_ELSE (VOIDmode,
1993 gen_rtx_EQ (HImode,
1994 sub_1,
1995 w_src1),
1996 pc_rtx,
1997 pc_rtx));
1998 insn = gen_rtx_PARALLEL (VOIDmode,
1999 gen_rtvec (3, branch, sub, clobber));
2001 else if (firstloop
2002 && code != COMPARE
2003 && CONST_INT_P (w_src1)
2004 && INTVAL (w_src1) == 0)
2005 continue;
2006 else if (firstloop)
2007 insn = gen_subchi4 (w_dest, w_src0, w_src1);
2008 else
2009 insn = gen_subchi5 (w_dest, w_src0, w_src1);
2010 break;
2012 case IOR:
2013 case XOR:
2014 case AND:
2015 if (CONST_INT_P (w_src1)
2016 && INTVAL (w_src1) == -(code == AND))
2017 continue;
2019 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_fmt_ee (code, mode,
2020 w_src0, w_src1));
2021 break;
2023 case NOT:
2024 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_NOT (mode, w_src0));
2025 break;
2027 default:
2028 gcc_unreachable ();
2031 firstloop = 0;
2032 emit (insn);
2035 /* If we emit nothing, try_split() will think we failed. So emit
2036 something that does nothing and can be optimized away. */
2037 if (firstloop)
2038 emit (gen_nop ());
2041 /* The shift operations are split at output time for constant values;
2042 variable-width shifts get handed off to a library routine.
2044 Generate an output string to do (set X (CODE:MODE X SIZE_R))
2045 SIZE_R will be a CONST_INT, X will be a hard register. */
2047 const char *
2048 xstormy16_output_shift (enum machine_mode mode, enum rtx_code code,
2049 rtx x, rtx size_r, rtx temp)
2051 HOST_WIDE_INT size;
2052 const char *r0, *r1, *rt;
2053 static char r[64];
2055 gcc_assert (CONST_INT_P (size_r)
2056 && REG_P (x)
2057 && mode == SImode);
2059 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
2061 if (size == 0)
2062 return "";
2064 r0 = reg_names [REGNO (x)];
2065 r1 = reg_names [REGNO (x) + 1];
2067 /* For shifts of size 1, we can use the rotate instructions. */
2068 if (size == 1)
2070 switch (code)
2072 case ASHIFT:
2073 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
2074 break;
2075 case ASHIFTRT:
2076 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
2077 break;
2078 case LSHIFTRT:
2079 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
2080 break;
2081 default:
2082 gcc_unreachable ();
2084 return r;
2087 /* For large shifts, there are easy special cases. */
2088 if (size == 16)
2090 switch (code)
2092 case ASHIFT:
2093 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
2094 break;
2095 case ASHIFTRT:
2096 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
2097 break;
2098 case LSHIFTRT:
2099 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
2100 break;
2101 default:
2102 gcc_unreachable ();
2104 return r;
2106 if (size > 16)
2108 switch (code)
2110 case ASHIFT:
2111 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
2112 r1, r0, r0, r1, (int) size - 16);
2113 break;
2114 case ASHIFTRT:
2115 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
2116 r0, r1, r1, r0, (int) size - 16);
2117 break;
2118 case LSHIFTRT:
2119 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
2120 r0, r1, r1, r0, (int) size - 16);
2121 break;
2122 default:
2123 gcc_unreachable ();
2125 return r;
2128 /* For the rest, we have to do more work. In particular, we
2129 need a temporary. */
2130 rt = reg_names [REGNO (temp)];
2131 switch (code)
2133 case ASHIFT:
2134 sprintf (r,
2135 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2136 rt, r0, r0, (int) size, r1, (int) size, rt, (int) (16 - size),
2137 r1, rt);
2138 break;
2139 case ASHIFTRT:
2140 sprintf (r,
2141 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2142 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2143 r0, rt);
2144 break;
2145 case LSHIFTRT:
2146 sprintf (r,
2147 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2148 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2149 r0, rt);
2150 break;
2151 default:
2152 gcc_unreachable ();
2154 return r;
2157 /* Attribute handling. */
2159 /* Return nonzero if the function is an interrupt function. */
2162 xstormy16_interrupt_function_p (void)
2164 tree attributes;
2166 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2167 any functions are declared, which is demonstrably wrong, but
2168 it is worked around here. FIXME. */
2169 if (!cfun)
2170 return 0;
2172 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
2173 return lookup_attribute ("interrupt", attributes) != NULL_TREE;
2176 #undef TARGET_ATTRIBUTE_TABLE
2177 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2179 static tree xstormy16_handle_interrupt_attribute
2180 (tree *, tree, tree, int, bool *);
2181 static tree xstormy16_handle_below100_attribute
2182 (tree *, tree, tree, int, bool *);
2184 static const struct attribute_spec xstormy16_attribute_table[] =
2186 /* name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
2187 affects_type_identity. */
2188 { "interrupt", 0, 0, false, true, true,
2189 xstormy16_handle_interrupt_attribute , false },
2190 { "BELOW100", 0, 0, false, false, false,
2191 xstormy16_handle_below100_attribute, false },
2192 { "below100", 0, 0, false, false, false,
2193 xstormy16_handle_below100_attribute, false },
2194 { NULL, 0, 0, false, false, false, NULL, false }
2197 /* Handle an "interrupt" attribute;
2198 arguments as in struct attribute_spec.handler. */
2200 static tree
2201 xstormy16_handle_interrupt_attribute (tree *node, tree name,
2202 tree args ATTRIBUTE_UNUSED,
2203 int flags ATTRIBUTE_UNUSED,
2204 bool *no_add_attrs)
2206 if (TREE_CODE (*node) != FUNCTION_TYPE)
2208 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2209 name);
2210 *no_add_attrs = true;
2213 return NULL_TREE;
2216 /* Handle an "below" attribute;
2217 arguments as in struct attribute_spec.handler. */
2219 static tree
2220 xstormy16_handle_below100_attribute (tree *node,
2221 tree name ATTRIBUTE_UNUSED,
2222 tree args ATTRIBUTE_UNUSED,
2223 int flags ATTRIBUTE_UNUSED,
2224 bool *no_add_attrs)
2226 if (TREE_CODE (*node) != VAR_DECL
2227 && TREE_CODE (*node) != POINTER_TYPE
2228 && TREE_CODE (*node) != TYPE_DECL)
2230 warning (OPT_Wattributes,
2231 "%<__BELOW100__%> attribute only applies to variables");
2232 *no_add_attrs = true;
2234 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
2236 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
2238 warning (OPT_Wattributes, "__BELOW100__ attribute not allowed "
2239 "with auto storage class");
2240 *no_add_attrs = true;
2244 return NULL_TREE;
2247 #undef TARGET_INIT_BUILTINS
2248 #define TARGET_INIT_BUILTINS xstormy16_init_builtins
2249 #undef TARGET_EXPAND_BUILTIN
2250 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2252 static struct
2254 const char * name;
2255 int md_code;
2256 const char * arg_ops; /* 0..9, t for temp register, r for return value. */
2257 const char * arg_types; /* s=short,l=long, upper case for unsigned. */
2259 s16builtins[] =
2261 { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" },
2262 { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" },
2263 { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" },
2264 { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" },
2265 { NULL, 0, NULL, NULL }
2268 static void
2269 xstormy16_init_builtins (void)
2271 tree args[2], ret_type, arg = NULL_TREE, ftype;
2272 int i, a, n_args;
2274 ret_type = void_type_node;
2276 for (i = 0; s16builtins[i].name; i++)
2278 n_args = strlen (s16builtins[i].arg_types) - 1;
2280 gcc_assert (n_args <= (int) ARRAY_SIZE (args));
2282 for (a = n_args - 1; a >= 0; a--)
2283 args[a] = NULL_TREE;
2285 for (a = n_args; a >= 0; a--)
2287 switch (s16builtins[i].arg_types[a])
2289 case 's': arg = short_integer_type_node; break;
2290 case 'S': arg = short_unsigned_type_node; break;
2291 case 'l': arg = long_integer_type_node; break;
2292 case 'L': arg = long_unsigned_type_node; break;
2293 default: gcc_unreachable ();
2295 if (a == 0)
2296 ret_type = arg;
2297 else
2298 args[a-1] = arg;
2300 ftype = build_function_type_list (ret_type, args[0], args[1], NULL_TREE);
2301 add_builtin_function (s16builtins[i].name, ftype,
2302 i, BUILT_IN_MD, NULL, NULL_TREE);
2306 static rtx
2307 xstormy16_expand_builtin (tree exp, rtx target,
2308 rtx subtarget ATTRIBUTE_UNUSED,
2309 enum machine_mode mode ATTRIBUTE_UNUSED,
2310 int ignore ATTRIBUTE_UNUSED)
2312 rtx op[10], args[10], pat, copyto[10], retval = 0;
2313 tree fndecl, argtree;
2314 int i, a, o, code;
2316 fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2317 argtree = TREE_OPERAND (exp, 1);
2318 i = DECL_FUNCTION_CODE (fndecl);
2319 code = s16builtins[i].md_code;
2321 for (a = 0; a < 10 && argtree; a++)
2323 args[a] = expand_normal (TREE_VALUE (argtree));
2324 argtree = TREE_CHAIN (argtree);
2327 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2329 char ao = s16builtins[i].arg_ops[o];
2330 char c = insn_data[code].operand[o].constraint[0];
2331 enum machine_mode omode;
2333 copyto[o] = 0;
2335 omode = (enum machine_mode) insn_data[code].operand[o].mode;
2336 if (ao == 'r')
2337 op[o] = target ? target : gen_reg_rtx (omode);
2338 else if (ao == 't')
2339 op[o] = gen_reg_rtx (omode);
2340 else
2341 op[o] = args[(int) hex_value (ao)];
2343 if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o])))
2345 if (c == '+' || c == '=')
2347 copyto[o] = op[o];
2348 op[o] = gen_reg_rtx (omode);
2350 else
2351 op[o] = copy_to_mode_reg (omode, op[o]);
2354 if (ao == 'r')
2355 retval = op[o];
2358 pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4],
2359 op[5], op[6], op[7], op[8], op[9]);
2360 emit_insn (pat);
2362 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2363 if (copyto[o])
2365 emit_move_insn (copyto[o], op[o]);
2366 if (op[o] == retval)
2367 retval = copyto[o];
2370 return retval;
2373 /* Look for combinations of insns that can be converted to BN or BP
2374 opcodes. This is, unfortunately, too complex to do with MD
2375 patterns. */
2377 static void
2378 combine_bnp (rtx insn)
2380 int insn_code, regno, need_extend;
2381 unsigned int mask;
2382 rtx cond, reg, and_insn, load, qireg, mem;
2383 enum machine_mode load_mode = QImode;
2384 enum machine_mode and_mode = QImode;
2385 rtx shift = NULL_RTX;
2387 insn_code = recog_memoized (insn);
2388 if (insn_code != CODE_FOR_cbranchhi
2389 && insn_code != CODE_FOR_cbranchhi_neg)
2390 return;
2392 cond = XVECEXP (PATTERN (insn), 0, 0); /* set */
2393 cond = XEXP (cond, 1); /* if */
2394 cond = XEXP (cond, 0); /* cond */
2395 switch (GET_CODE (cond))
2397 case NE:
2398 case EQ:
2399 need_extend = 0;
2400 break;
2401 case LT:
2402 case GE:
2403 need_extend = 1;
2404 break;
2405 default:
2406 return;
2409 reg = XEXP (cond, 0);
2410 if (! REG_P (reg))
2411 return;
2412 regno = REGNO (reg);
2413 if (XEXP (cond, 1) != const0_rtx)
2414 return;
2415 if (! find_regno_note (insn, REG_DEAD, regno))
2416 return;
2417 qireg = gen_rtx_REG (QImode, regno);
2419 if (need_extend)
2421 /* LT and GE conditionals should have a sign extend before
2422 them. */
2423 for (and_insn = prev_real_insn (insn);
2424 and_insn != NULL_RTX;
2425 and_insn = prev_real_insn (and_insn))
2427 int and_code = recog_memoized (and_insn);
2429 if (and_code == CODE_FOR_extendqihi2
2430 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)
2431 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), qireg))
2432 break;
2434 if (and_code == CODE_FOR_movhi_internal
2435 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg))
2437 /* This is for testing bit 15. */
2438 and_insn = insn;
2439 break;
2442 if (reg_mentioned_p (reg, and_insn))
2443 return;
2445 if (GET_CODE (and_insn) != NOTE
2446 && GET_CODE (and_insn) != INSN)
2447 return;
2450 else
2452 /* EQ and NE conditionals have an AND before them. */
2453 for (and_insn = prev_real_insn (insn);
2454 and_insn != NULL_RTX;
2455 and_insn = prev_real_insn (and_insn))
2457 if (recog_memoized (and_insn) == CODE_FOR_andhi3
2458 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)
2459 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), reg))
2460 break;
2462 if (reg_mentioned_p (reg, and_insn))
2463 return;
2465 if (GET_CODE (and_insn) != NOTE
2466 && GET_CODE (and_insn) != INSN)
2467 return;
2470 if (and_insn)
2472 /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
2473 followed by an AND like this:
2475 (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
2476 (clobber (reg:BI carry))]
2478 (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
2480 Attempt to detect this here. */
2481 for (shift = prev_real_insn (and_insn); shift;
2482 shift = prev_real_insn (shift))
2484 if (recog_memoized (shift) == CODE_FOR_lshrhi3
2485 && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift), 0, 0)), reg)
2486 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 0), reg))
2487 break;
2489 if (reg_mentioned_p (reg, shift)
2490 || (GET_CODE (shift) != NOTE
2491 && GET_CODE (shift) != INSN))
2493 shift = NULL_RTX;
2494 break;
2500 if (and_insn == NULL_RTX)
2501 return;
2503 for (load = shift ? prev_real_insn (shift) : prev_real_insn (and_insn);
2504 load;
2505 load = prev_real_insn (load))
2507 int load_code = recog_memoized (load);
2509 if (load_code == CODE_FOR_movhi_internal
2510 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2511 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), HImode)
2512 && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load))))
2514 load_mode = HImode;
2515 break;
2518 if (load_code == CODE_FOR_movqi_internal
2519 && rtx_equal_p (SET_DEST (PATTERN (load)), qireg)
2520 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), QImode))
2522 load_mode = QImode;
2523 break;
2526 if (load_code == CODE_FOR_zero_extendqihi2
2527 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2528 && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load)), 0), QImode))
2530 load_mode = QImode;
2531 and_mode = HImode;
2532 break;
2535 if (reg_mentioned_p (reg, load))
2536 return;
2538 if (GET_CODE (load) != NOTE
2539 && GET_CODE (load) != INSN)
2540 return;
2542 if (!load)
2543 return;
2545 mem = SET_SRC (PATTERN (load));
2547 if (need_extend)
2549 mask = (load_mode == HImode) ? 0x8000 : 0x80;
2551 /* If the mem includes a zero-extend operation and we are
2552 going to generate a sign-extend operation then move the
2553 mem inside the zero-extend. */
2554 if (GET_CODE (mem) == ZERO_EXTEND)
2555 mem = XEXP (mem, 0);
2557 else
2559 if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and_insn)), 1),
2560 load_mode))
2561 return;
2563 mask = (int) INTVAL (XEXP (SET_SRC (PATTERN (and_insn)), 1));
2565 if (shift)
2566 mask <<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 1));
2569 if (load_mode == HImode)
2571 rtx addr = XEXP (mem, 0);
2573 if (! (mask & 0xff))
2575 addr = plus_constant (Pmode, addr, 1);
2576 mask >>= 8;
2578 mem = gen_rtx_MEM (QImode, addr);
2581 if (need_extend)
2582 XEXP (cond, 0) = gen_rtx_SIGN_EXTEND (HImode, mem);
2583 else
2584 XEXP (cond, 0) = gen_rtx_AND (and_mode, mem, GEN_INT (mask));
2586 INSN_CODE (insn) = -1;
2587 delete_insn (load);
2589 if (and_insn != insn)
2590 delete_insn (and_insn);
2592 if (shift != NULL_RTX)
2593 delete_insn (shift);
2596 static void
2597 xstormy16_reorg (void)
2599 rtx insn;
2601 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2603 if (! JUMP_P (insn))
2604 continue;
2605 combine_bnp (insn);
2609 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2611 static bool
2612 xstormy16_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2614 const HOST_WIDE_INT size = int_size_in_bytes (type);
2615 return (size == -1 || size > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS);
2618 #undef TARGET_ASM_ALIGNED_HI_OP
2619 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2620 #undef TARGET_ASM_ALIGNED_SI_OP
2621 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2622 #undef TARGET_ENCODE_SECTION_INFO
2623 #define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
2625 /* Select_section doesn't handle .bss_below100. */
2626 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
2627 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
2629 #undef TARGET_ASM_OUTPUT_MI_THUNK
2630 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2631 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2632 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2634 #undef TARGET_PRINT_OPERAND
2635 #define TARGET_PRINT_OPERAND xstormy16_print_operand
2636 #undef TARGET_PRINT_OPERAND_ADDRESS
2637 #define TARGET_PRINT_OPERAND_ADDRESS xstormy16_print_operand_address
2639 #undef TARGET_MEMORY_MOVE_COST
2640 #define TARGET_MEMORY_MOVE_COST xstormy16_memory_move_cost
2641 #undef TARGET_RTX_COSTS
2642 #define TARGET_RTX_COSTS xstormy16_rtx_costs
2643 #undef TARGET_ADDRESS_COST
2644 #define TARGET_ADDRESS_COST xstormy16_address_cost
2646 #undef TARGET_BUILD_BUILTIN_VA_LIST
2647 #define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
2648 #undef TARGET_EXPAND_BUILTIN_VA_START
2649 #define TARGET_EXPAND_BUILTIN_VA_START xstormy16_expand_builtin_va_start
2650 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
2651 #define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_gimplify_va_arg_expr
2653 #undef TARGET_PROMOTE_FUNCTION_MODE
2654 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
2655 #undef TARGET_PROMOTE_PROTOTYPES
2656 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
2658 #undef TARGET_FUNCTION_ARG
2659 #define TARGET_FUNCTION_ARG xstormy16_function_arg
2660 #undef TARGET_FUNCTION_ARG_ADVANCE
2661 #define TARGET_FUNCTION_ARG_ADVANCE xstormy16_function_arg_advance
2663 #undef TARGET_RETURN_IN_MEMORY
2664 #define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
2665 #undef TARGET_FUNCTION_VALUE
2666 #define TARGET_FUNCTION_VALUE xstormy16_function_value
2667 #undef TARGET_LIBCALL_VALUE
2668 #define TARGET_LIBCALL_VALUE xstormy16_libcall_value
2669 #undef TARGET_FUNCTION_VALUE_REGNO_P
2670 #define TARGET_FUNCTION_VALUE_REGNO_P xstormy16_function_value_regno_p
2672 #undef TARGET_MACHINE_DEPENDENT_REORG
2673 #define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg
2675 #undef TARGET_PREFERRED_RELOAD_CLASS
2676 #define TARGET_PREFERRED_RELOAD_CLASS xstormy16_preferred_reload_class
2677 #undef TARGET_PREFERRED_OUTPUT_RELOAD_CLASS
2678 #define TARGET_PREFERRED_OUTPUT_RELOAD_CLASS xstormy16_preferred_reload_class
2680 #undef TARGET_LEGITIMATE_ADDRESS_P
2681 #define TARGET_LEGITIMATE_ADDRESS_P xstormy16_legitimate_address_p
2682 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
2683 #define TARGET_MODE_DEPENDENT_ADDRESS_P xstormy16_mode_dependent_address_p
2685 #undef TARGET_CAN_ELIMINATE
2686 #define TARGET_CAN_ELIMINATE xstormy16_can_eliminate
2688 #undef TARGET_TRAMPOLINE_INIT
2689 #define TARGET_TRAMPOLINE_INIT xstormy16_trampoline_init
2691 struct gcc_target targetm = TARGET_INITIALIZER;
2693 #include "gt-stormy16.h"