2015-07-14 Sandra Loosemore <sandra@codesourcery.com>
[official-gcc.git] / gcc / config / stormy16 / stormy16.c
blob99412ab1c0efbaecafd1dc92b6b3adff2be96bff
1 /* Xstormy16 target functions.
2 Copyright (C) 1997-2015 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "cfghooks.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "rtl.h"
29 #include "df.h"
30 #include "regs.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "output.h"
35 #include "insn-attr.h"
36 #include "flags.h"
37 #include "recog.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "stringpool.h"
42 #include "stor-layout.h"
43 #include "varasm.h"
44 #include "calls.h"
45 #include "expmed.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "emit-rtl.h"
49 #include "stmt.h"
50 #include "expr.h"
51 #include "insn-codes.h"
52 #include "optabs.h"
53 #include "except.h"
54 #include "target.h"
55 #include "tm_p.h"
56 #include "langhooks.h"
57 #include "cfgrtl.h"
58 #include "cfganal.h"
59 #include "lcm.h"
60 #include "cfgbuild.h"
61 #include "cfgcleanup.h"
62 #include "internal-fn.h"
63 #include "gimple-fold.h"
64 #include "tree-eh.h"
65 #include "gimplify.h"
66 #include "reload.h"
67 #include "builtins.h"
69 /* This file should be included last. */
70 #include "target-def.h"
72 static rtx emit_addhi3_postreload (rtx, rtx, rtx);
73 static void xstormy16_asm_out_constructor (rtx, int);
74 static void xstormy16_asm_out_destructor (rtx, int);
75 static void xstormy16_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
76 HOST_WIDE_INT, tree);
78 static void xstormy16_init_builtins (void);
79 static rtx xstormy16_expand_builtin (tree, rtx, rtx, machine_mode, int);
80 static int xstormy16_address_cost (rtx, machine_mode, addr_space_t, bool);
81 static bool xstormy16_return_in_memory (const_tree, const_tree);
83 static GTY(()) section *bss100_section;
85 /* Compute a (partial) cost for rtx X. Return true if the complete
86 cost has been computed, and false if subexpressions should be
87 scanned. In either case, *TOTAL contains the cost result. */
89 static bool
90 xstormy16_rtx_costs (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
91 int outer_code ATTRIBUTE_UNUSED,
92 int opno ATTRIBUTE_UNUSED, int *total,
93 bool speed ATTRIBUTE_UNUSED)
95 int code = GET_CODE (x);
97 switch (code)
99 case CONST_INT:
100 if (INTVAL (x) < 16 && INTVAL (x) >= 0)
101 *total = COSTS_N_INSNS (1) / 2;
102 else if (INTVAL (x) < 256 && INTVAL (x) >= 0)
103 *total = COSTS_N_INSNS (1);
104 else
105 *total = COSTS_N_INSNS (2);
106 return true;
108 case CONST_DOUBLE:
109 case CONST:
110 case SYMBOL_REF:
111 case LABEL_REF:
112 *total = COSTS_N_INSNS (2);
113 return true;
115 case MULT:
116 *total = COSTS_N_INSNS (35 + 6);
117 return true;
118 case DIV:
119 *total = COSTS_N_INSNS (51 - 6);
120 return true;
122 default:
123 return false;
127 static int
128 xstormy16_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
129 addr_space_t as ATTRIBUTE_UNUSED,
130 bool speed ATTRIBUTE_UNUSED)
132 return (CONST_INT_P (x) ? 2
133 : GET_CODE (x) == PLUS ? 7
134 : 5);
137 /* Worker function for TARGET_MEMORY_MOVE_COST. */
139 static int
140 xstormy16_memory_move_cost (machine_mode mode, reg_class_t rclass,
141 bool in)
143 return (5 + memory_move_secondary_cost (mode, rclass, in));
146 /* Branches are handled as follows:
148 1. HImode compare-and-branches. The machine supports these
149 natively, so the appropriate pattern is emitted directly.
151 2. SImode EQ and NE. These are emitted as pairs of HImode
152 compare-and-branches.
154 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
155 of a SImode subtract followed by a branch (not a compare-and-branch),
156 like this:
161 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
166 bne. */
168 /* Emit a branch of kind CODE to location LOC. */
170 void
171 xstormy16_emit_cbranch (enum rtx_code code, rtx op0, rtx op1, rtx loc)
173 rtx condition_rtx, loc_ref, branch, cy_clobber;
174 rtvec vec;
175 machine_mode mode;
177 mode = GET_MODE (op0);
178 gcc_assert (mode == HImode || mode == SImode);
180 if (mode == SImode
181 && (code == GT || code == LE || code == GTU || code == LEU))
183 int unsigned_p = (code == GTU || code == LEU);
184 int gt_p = (code == GT || code == GTU);
185 rtx lab = NULL_RTX;
187 if (gt_p)
188 lab = gen_label_rtx ();
189 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, op0, op1, gt_p ? lab : loc);
190 /* This should be generated as a comparison against the temporary
191 created by the previous insn, but reload can't handle that. */
192 xstormy16_emit_cbranch (gt_p ? NE : EQ, op0, op1, loc);
193 if (gt_p)
194 emit_label (lab);
195 return;
197 else if (mode == SImode
198 && (code == NE || code == EQ)
199 && op1 != const0_rtx)
201 rtx op0_word, op1_word;
202 rtx lab = NULL_RTX;
203 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
204 int i;
206 if (code == EQ)
207 lab = gen_label_rtx ();
209 for (i = 0; i < num_words - 1; i++)
211 op0_word = simplify_gen_subreg (word_mode, op0, mode,
212 i * UNITS_PER_WORD);
213 op1_word = simplify_gen_subreg (word_mode, op1, mode,
214 i * UNITS_PER_WORD);
215 xstormy16_emit_cbranch (NE, op0_word, op1_word, code == EQ ? lab : loc);
217 op0_word = simplify_gen_subreg (word_mode, op0, mode,
218 i * UNITS_PER_WORD);
219 op1_word = simplify_gen_subreg (word_mode, op1, mode,
220 i * UNITS_PER_WORD);
221 xstormy16_emit_cbranch (code, op0_word, op1_word, loc);
223 if (code == EQ)
224 emit_label (lab);
225 return;
228 /* We can't allow reload to try to generate any reload after a branch,
229 so when some register must match we must make the temporary ourselves. */
230 if (mode != HImode)
232 rtx tmp;
233 tmp = gen_reg_rtx (mode);
234 emit_move_insn (tmp, op0);
235 op0 = tmp;
238 condition_rtx = gen_rtx_fmt_ee (code, mode, op0, op1);
239 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
240 branch = gen_rtx_SET (pc_rtx,
241 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
242 loc_ref, pc_rtx));
244 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
246 if (mode == HImode)
247 vec = gen_rtvec (2, branch, cy_clobber);
248 else if (code == NE || code == EQ)
249 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
250 else
252 rtx sub;
253 #if 0
254 sub = gen_rtx_SET (op0, gen_rtx_MINUS (SImode, op0, op1));
255 #else
256 sub = gen_rtx_CLOBBER (SImode, op0);
257 #endif
258 vec = gen_rtvec (3, branch, sub, cy_clobber);
261 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
264 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
265 the arithmetic operation. Most of the work is done by
266 xstormy16_expand_arith. */
268 void
269 xstormy16_split_cbranch (machine_mode mode, rtx label, rtx comparison,
270 rtx dest)
272 rtx op0 = XEXP (comparison, 0);
273 rtx op1 = XEXP (comparison, 1);
274 rtx_insn *seq, *last_insn;
275 rtx compare;
277 start_sequence ();
278 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1);
279 seq = get_insns ();
280 end_sequence ();
282 gcc_assert (INSN_P (seq));
284 last_insn = seq;
285 while (NEXT_INSN (last_insn) != NULL_RTX)
286 last_insn = NEXT_INSN (last_insn);
288 compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0));
289 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
290 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
291 emit_insn (seq);
295 /* Return the string to output a conditional branch to LABEL, which is
296 the operand number of the label.
298 OP is the conditional expression, or NULL for branch-always.
300 REVERSED is nonzero if we should reverse the sense of the comparison.
302 INSN is the insn. */
304 char *
305 xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed,
306 rtx_insn *insn)
308 static char string[64];
309 int need_longbranch = (op != NULL_RTX
310 ? get_attr_length (insn) == 8
311 : get_attr_length (insn) == 4);
312 int really_reversed = reversed ^ need_longbranch;
313 const char *ccode;
314 const char *templ;
315 const char *operands;
316 enum rtx_code code;
318 if (! op)
320 if (need_longbranch)
321 ccode = "jmpf";
322 else
323 ccode = "br";
324 sprintf (string, "%s %s", ccode, label);
325 return string;
328 code = GET_CODE (op);
330 if (! REG_P (XEXP (op, 0)))
332 code = swap_condition (code);
333 operands = "%3,%2";
335 else
336 operands = "%2,%3";
338 /* Work out which way this really branches. */
339 if (really_reversed)
340 code = reverse_condition (code);
342 switch (code)
344 case EQ: ccode = "z"; break;
345 case NE: ccode = "nz"; break;
346 case GE: ccode = "ge"; break;
347 case LT: ccode = "lt"; break;
348 case GT: ccode = "gt"; break;
349 case LE: ccode = "le"; break;
350 case GEU: ccode = "nc"; break;
351 case LTU: ccode = "c"; break;
352 case GTU: ccode = "hi"; break;
353 case LEU: ccode = "ls"; break;
355 default:
356 gcc_unreachable ();
359 if (need_longbranch)
360 templ = "b%s %s,.+8 | jmpf %s";
361 else
362 templ = "b%s %s,%s";
363 sprintf (string, templ, ccode, operands, label);
365 return string;
368 /* Return the string to output a conditional branch to LABEL, which is
369 the operand number of the label, but suitable for the tail of a
370 SImode branch.
372 OP is the conditional expression (OP is never NULL_RTX).
374 REVERSED is nonzero if we should reverse the sense of the comparison.
376 INSN is the insn. */
378 char *
379 xstormy16_output_cbranch_si (rtx op, const char *label, int reversed,
380 rtx_insn *insn)
382 static char string[64];
383 int need_longbranch = get_attr_length (insn) >= 8;
384 int really_reversed = reversed ^ need_longbranch;
385 const char *ccode;
386 const char *templ;
387 char prevop[16];
388 enum rtx_code code;
390 code = GET_CODE (op);
392 /* Work out which way this really branches. */
393 if (really_reversed)
394 code = reverse_condition (code);
396 switch (code)
398 case EQ: ccode = "z"; break;
399 case NE: ccode = "nz"; break;
400 case GE: ccode = "ge"; break;
401 case LT: ccode = "lt"; break;
402 case GEU: ccode = "nc"; break;
403 case LTU: ccode = "c"; break;
405 /* The missing codes above should never be generated. */
406 default:
407 gcc_unreachable ();
410 switch (code)
412 case EQ: case NE:
414 int regnum;
416 gcc_assert (REG_P (XEXP (op, 0)));
418 regnum = REGNO (XEXP (op, 0));
419 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
421 break;
423 case GE: case LT: case GEU: case LTU:
424 strcpy (prevop, "sbc %2,%3");
425 break;
427 default:
428 gcc_unreachable ();
431 if (need_longbranch)
432 templ = "%s | b%s .+6 | jmpf %s";
433 else
434 templ = "%s | b%s %s";
435 sprintf (string, templ, prevop, ccode, label);
437 return string;
440 /* Many machines have some registers that cannot be copied directly to or from
441 memory or even from other types of registers. An example is the `MQ'
442 register, which on most machines, can only be copied to or from general
443 registers, but not memory. Some machines allow copying all registers to and
444 from memory, but require a scratch register for stores to some memory
445 locations (e.g., those with symbolic address on the RT, and those with
446 certain symbolic address on the SPARC when compiling PIC). In some cases,
447 both an intermediate and a scratch register are required.
449 You should define these macros to indicate to the reload phase that it may
450 need to allocate at least one register for a reload in addition to the
451 register to contain the data. Specifically, if copying X to a register
452 RCLASS in MODE requires an intermediate register, you should define
453 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
454 whose registers can be used as intermediate registers or scratch registers.
456 If copying a register RCLASS in MODE to X requires an intermediate or scratch
457 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
458 largest register class required. If the requirements for input and output
459 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
460 instead of defining both macros identically.
462 The values returned by these macros are often `GENERAL_REGS'. Return
463 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
464 to or from a register of RCLASS in MODE without requiring a scratch register.
465 Do not define this macro if it would always return `NO_REGS'.
467 If a scratch register is required (either with or without an intermediate
468 register), you should define patterns for `reload_inM' or `reload_outM', as
469 required.. These patterns, which will normally be implemented with a
470 `define_expand', should be similar to the `movM' patterns, except that
471 operand 2 is the scratch register.
473 Define constraints for the reload register and scratch register that contain
474 a single register class. If the original reload register (whose class is
475 RCLASS) can meet the constraint given in the pattern, the value returned by
476 these macros is used for the class of the scratch register. Otherwise, two
477 additional reload registers are required. Their classes are obtained from
478 the constraints in the insn pattern.
480 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
481 either be in a hard register or in memory. Use `true_regnum' to find out;
482 it will return -1 if the pseudo is in memory and the hard register number if
483 it is in a register.
485 These macros should not be used in the case where a particular class of
486 registers can only be copied to memory and not to another class of
487 registers. In that case, secondary reload registers are not needed and
488 would not be helpful. Instead, a stack location must be used to perform the
489 copy and the `movM' pattern should use memory as an intermediate storage.
490 This case often occurs between floating-point and general registers. */
492 enum reg_class
493 xstormy16_secondary_reload_class (enum reg_class rclass,
494 machine_mode mode ATTRIBUTE_UNUSED,
495 rtx x)
497 /* This chip has the interesting property that only the first eight
498 registers can be moved to/from memory. */
499 if ((MEM_P (x)
500 || ((GET_CODE (x) == SUBREG || REG_P (x))
501 && (true_regnum (x) == -1
502 || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
503 && ! reg_class_subset_p (rclass, EIGHT_REGS))
504 return EIGHT_REGS;
506 return NO_REGS;
509 /* Worker function for TARGET_PREFERRED_RELOAD_CLASS
510 and TARGET_PREFERRED_OUTPUT_RELOAD_CLASS. */
512 static reg_class_t
513 xstormy16_preferred_reload_class (rtx x, reg_class_t rclass)
515 if (rclass == GENERAL_REGS && MEM_P (x))
516 return EIGHT_REGS;
518 return rclass;
521 /* Predicate for symbols and addresses that reflect special 8-bit
522 addressing. */
525 xstormy16_below100_symbol (rtx x,
526 machine_mode mode ATTRIBUTE_UNUSED)
528 if (GET_CODE (x) == CONST)
529 x = XEXP (x, 0);
530 if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
531 x = XEXP (x, 0);
533 if (GET_CODE (x) == SYMBOL_REF)
534 return (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_XSTORMY16_BELOW100) != 0;
536 if (CONST_INT_P (x))
538 HOST_WIDE_INT i = INTVAL (x);
540 if ((i >= 0x0000 && i <= 0x00ff)
541 || (i >= 0x7f00 && i <= 0x7fff))
542 return 1;
544 return 0;
547 /* Likewise, but only for non-volatile MEMs, for patterns where the
548 MEM will get split into smaller sized accesses. */
551 xstormy16_splittable_below100_operand (rtx x, machine_mode mode)
553 if (MEM_P (x) && MEM_VOLATILE_P (x))
554 return 0;
555 return xstormy16_below100_operand (x, mode);
558 /* Expand an 8-bit IOR. This either detects the one case we can
559 actually do, or uses a 16-bit IOR. */
561 void
562 xstormy16_expand_iorqi3 (rtx *operands)
564 rtx in, out, outsub, val;
566 out = operands[0];
567 in = operands[1];
568 val = operands[2];
570 if (xstormy16_onebit_set_operand (val, QImode))
572 if (!xstormy16_below100_or_register (in, QImode))
573 in = copy_to_mode_reg (QImode, in);
574 if (!xstormy16_below100_or_register (out, QImode))
575 out = gen_reg_rtx (QImode);
576 emit_insn (gen_iorqi3_internal (out, in, val));
577 if (out != operands[0])
578 emit_move_insn (operands[0], out);
579 return;
582 if (! REG_P (in))
583 in = copy_to_mode_reg (QImode, in);
585 if (! REG_P (val) && ! CONST_INT_P (val))
586 val = copy_to_mode_reg (QImode, val);
588 if (! REG_P (out))
589 out = gen_reg_rtx (QImode);
591 in = simplify_gen_subreg (HImode, in, QImode, 0);
592 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
594 if (! CONST_INT_P (val))
595 val = simplify_gen_subreg (HImode, val, QImode, 0);
597 emit_insn (gen_iorhi3 (outsub, in, val));
599 if (out != operands[0])
600 emit_move_insn (operands[0], out);
603 /* Expand an 8-bit AND. This either detects the one case we can
604 actually do, or uses a 16-bit AND. */
606 void
607 xstormy16_expand_andqi3 (rtx *operands)
609 rtx in, out, outsub, val;
611 out = operands[0];
612 in = operands[1];
613 val = operands[2];
615 if (xstormy16_onebit_clr_operand (val, QImode))
617 if (!xstormy16_below100_or_register (in, QImode))
618 in = copy_to_mode_reg (QImode, in);
619 if (!xstormy16_below100_or_register (out, QImode))
620 out = gen_reg_rtx (QImode);
621 emit_insn (gen_andqi3_internal (out, in, val));
622 if (out != operands[0])
623 emit_move_insn (operands[0], out);
624 return;
627 if (! REG_P (in))
628 in = copy_to_mode_reg (QImode, in);
630 if (! REG_P (val) && ! CONST_INT_P (val))
631 val = copy_to_mode_reg (QImode, val);
633 if (! REG_P (out))
634 out = gen_reg_rtx (QImode);
636 in = simplify_gen_subreg (HImode, in, QImode, 0);
637 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
639 if (! CONST_INT_P (val))
640 val = simplify_gen_subreg (HImode, val, QImode, 0);
642 emit_insn (gen_andhi3 (outsub, in, val));
644 if (out != operands[0])
645 emit_move_insn (operands[0], out);
648 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
649 (CONST_INT_P (X) \
650 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
652 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
653 (CONST_INT_P (X) \
654 && INTVAL (X) + (OFFSET) >= 0 \
655 && INTVAL (X) + (OFFSET) < 0x8000 \
656 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
658 bool
659 xstormy16_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED,
660 rtx x, bool strict)
662 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
663 return true;
665 if (GET_CODE (x) == PLUS
666 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
668 x = XEXP (x, 0);
669 /* PR 31232: Do not allow INT+INT as an address. */
670 if (CONST_INT_P (x))
671 return false;
674 if ((GET_CODE (x) == PRE_MODIFY && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
675 || GET_CODE (x) == POST_INC
676 || GET_CODE (x) == PRE_DEC)
677 x = XEXP (x, 0);
679 if (REG_P (x)
680 && REGNO_OK_FOR_BASE_P (REGNO (x))
681 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
682 return true;
684 if (xstormy16_below100_symbol (x, mode))
685 return true;
687 return false;
690 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P.
692 On this chip, this is true if the address is valid with an offset
693 of 0 but not of 6, because in that case it cannot be used as an
694 address for DImode or DFmode, or if the address is a post-increment
695 or pre-decrement address. */
697 static bool
698 xstormy16_mode_dependent_address_p (const_rtx x,
699 addr_space_t as ATTRIBUTE_UNUSED)
701 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
702 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
703 return true;
705 if (GET_CODE (x) == PLUS
706 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
707 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
708 return true;
710 /* Auto-increment addresses are now treated generically in recog.c. */
711 return false;
715 short_memory_operand (rtx x, machine_mode mode)
717 if (! memory_operand (x, mode))
718 return 0;
719 return (GET_CODE (XEXP (x, 0)) != PLUS);
722 /* Splitter for the 'move' patterns, for modes not directly implemented
723 by hardware. Emit insns to copy a value of mode MODE from SRC to
724 DEST.
726 This function is only called when reload_completed. */
728 void
729 xstormy16_split_move (machine_mode mode, rtx dest, rtx src)
731 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
732 int direction, end, i;
733 int src_modifies = 0;
734 int dest_modifies = 0;
735 int src_volatile = 0;
736 int dest_volatile = 0;
737 rtx mem_operand;
738 rtx auto_inc_reg_rtx = NULL_RTX;
740 /* Check initial conditions. */
741 gcc_assert (reload_completed
742 && mode != QImode && mode != HImode
743 && nonimmediate_operand (dest, mode)
744 && general_operand (src, mode));
746 /* This case is not supported below, and shouldn't be generated. */
747 gcc_assert (! MEM_P (dest) || ! MEM_P (src));
749 /* This case is very very bad after reload, so trap it now. */
750 gcc_assert (GET_CODE (dest) != SUBREG && GET_CODE (src) != SUBREG);
752 /* The general idea is to copy by words, offsetting the source and
753 destination. Normally the least-significant word will be copied
754 first, but for pre-dec operations it's better to copy the
755 most-significant word first. Only one operand can be a pre-dec
756 or post-inc operand.
758 It's also possible that the copy overlaps so that the direction
759 must be reversed. */
760 direction = 1;
762 if (MEM_P (dest))
764 mem_operand = XEXP (dest, 0);
765 dest_modifies = side_effects_p (mem_operand);
766 if (auto_inc_p (mem_operand))
767 auto_inc_reg_rtx = XEXP (mem_operand, 0);
768 dest_volatile = MEM_VOLATILE_P (dest);
769 if (dest_volatile)
771 dest = copy_rtx (dest);
772 MEM_VOLATILE_P (dest) = 0;
775 else if (MEM_P (src))
777 mem_operand = XEXP (src, 0);
778 src_modifies = side_effects_p (mem_operand);
779 if (auto_inc_p (mem_operand))
780 auto_inc_reg_rtx = XEXP (mem_operand, 0);
781 src_volatile = MEM_VOLATILE_P (src);
782 if (src_volatile)
784 src = copy_rtx (src);
785 MEM_VOLATILE_P (src) = 0;
788 else
789 mem_operand = NULL_RTX;
791 if (mem_operand == NULL_RTX)
793 if (REG_P (src)
794 && REG_P (dest)
795 && reg_overlap_mentioned_p (dest, src)
796 && REGNO (dest) > REGNO (src))
797 direction = -1;
799 else if (GET_CODE (mem_operand) == PRE_DEC
800 || (GET_CODE (mem_operand) == PLUS
801 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
802 direction = -1;
803 else if (MEM_P (src) && reg_overlap_mentioned_p (dest, src))
805 int regno;
807 gcc_assert (REG_P (dest));
808 regno = REGNO (dest);
810 gcc_assert (refers_to_regno_p (regno, regno + num_words,
811 mem_operand, 0));
813 if (refers_to_regno_p (regno, mem_operand))
814 direction = -1;
815 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
816 mem_operand, 0))
817 direction = 1;
818 else
819 /* This means something like
820 (set (reg:DI r0) (mem:DI (reg:HI r1)))
821 which we'd need to support by doing the set of the second word
822 last. */
823 gcc_unreachable ();
826 end = direction < 0 ? -1 : num_words;
827 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
829 rtx w_src, w_dest, insn;
831 if (src_modifies)
832 w_src = gen_rtx_MEM (word_mode, mem_operand);
833 else
834 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
835 if (src_volatile)
836 MEM_VOLATILE_P (w_src) = 1;
837 if (dest_modifies)
838 w_dest = gen_rtx_MEM (word_mode, mem_operand);
839 else
840 w_dest = simplify_gen_subreg (word_mode, dest, mode,
841 i * UNITS_PER_WORD);
842 if (dest_volatile)
843 MEM_VOLATILE_P (w_dest) = 1;
845 /* The simplify_subreg calls must always be able to simplify. */
846 gcc_assert (GET_CODE (w_src) != SUBREG
847 && GET_CODE (w_dest) != SUBREG);
849 insn = emit_insn (gen_rtx_SET (w_dest, w_src));
850 if (auto_inc_reg_rtx)
851 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
852 auto_inc_reg_rtx,
853 REG_NOTES (insn));
857 /* Expander for the 'move' patterns. Emit insns to copy a value of
858 mode MODE from SRC to DEST. */
860 void
861 xstormy16_expand_move (machine_mode mode, rtx dest, rtx src)
863 if (MEM_P (dest) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY))
865 rtx pmv = XEXP (dest, 0);
866 rtx dest_reg = XEXP (pmv, 0);
867 rtx dest_mod = XEXP (pmv, 1);
868 rtx set = gen_rtx_SET (dest_reg, dest_mod);
869 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
871 dest = gen_rtx_MEM (mode, dest_reg);
872 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
874 else if (MEM_P (src) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY))
876 rtx pmv = XEXP (src, 0);
877 rtx src_reg = XEXP (pmv, 0);
878 rtx src_mod = XEXP (pmv, 1);
879 rtx set = gen_rtx_SET (src_reg, src_mod);
880 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
882 src = gen_rtx_MEM (mode, src_reg);
883 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
886 /* There are only limited immediate-to-memory move instructions. */
887 if (! reload_in_progress
888 && ! reload_completed
889 && MEM_P (dest)
890 && (! CONST_INT_P (XEXP (dest, 0))
891 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
892 && ! xstormy16_below100_operand (dest, mode)
893 && ! REG_P (src)
894 && GET_CODE (src) != SUBREG)
895 src = copy_to_mode_reg (mode, src);
897 /* Don't emit something we would immediately split. */
898 if (reload_completed
899 && mode != HImode && mode != QImode)
901 xstormy16_split_move (mode, dest, src);
902 return;
905 emit_insn (gen_rtx_SET (dest, src));
908 /* Stack Layout:
910 The stack is laid out as follows:
912 SP->
913 FP-> Local variables
914 Register save area (up to 4 words)
915 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
917 AP-> Return address (two words)
918 9th procedure parameter word
919 10th procedure parameter word
921 last procedure parameter word
923 The frame pointer location is tuned to make it most likely that all
924 parameters and local variables can be accessed using a load-indexed
925 instruction. */
927 /* A structure to describe the layout. */
928 struct xstormy16_stack_layout
930 /* Size of the topmost three items on the stack. */
931 int locals_size;
932 int register_save_size;
933 int stdarg_save_size;
934 /* Sum of the above items. */
935 int frame_size;
936 /* Various offsets. */
937 int first_local_minus_ap;
938 int sp_minus_fp;
939 int fp_minus_ap;
942 /* Does REGNO need to be saved? */
943 #define REG_NEEDS_SAVE(REGNUM, IFUN) \
944 ((df_regs_ever_live_p (REGNUM) && ! call_used_regs[REGNUM]) \
945 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
946 && (REGNUM != CARRY_REGNUM) \
947 && (df_regs_ever_live_p (REGNUM) || ! crtl->is_leaf)))
949 /* Compute the stack layout. */
951 struct xstormy16_stack_layout
952 xstormy16_compute_stack_layout (void)
954 struct xstormy16_stack_layout layout;
955 int regno;
956 const int ifun = xstormy16_interrupt_function_p ();
958 layout.locals_size = get_frame_size ();
960 layout.register_save_size = 0;
961 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
962 if (REG_NEEDS_SAVE (regno, ifun))
963 layout.register_save_size += UNITS_PER_WORD;
965 if (cfun->stdarg)
966 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
967 else
968 layout.stdarg_save_size = 0;
970 layout.frame_size = (layout.locals_size
971 + layout.register_save_size
972 + layout.stdarg_save_size);
974 if (crtl->args.size <= 2048 && crtl->args.size != -1)
976 if (layout.frame_size - INCOMING_FRAME_SP_OFFSET
977 + crtl->args.size <= 2048)
978 layout.fp_minus_ap = layout.frame_size - INCOMING_FRAME_SP_OFFSET;
979 else
980 layout.fp_minus_ap = 2048 - crtl->args.size;
982 else
983 layout.fp_minus_ap = (layout.stdarg_save_size
984 + layout.register_save_size
985 - INCOMING_FRAME_SP_OFFSET);
986 layout.sp_minus_fp = (layout.frame_size - INCOMING_FRAME_SP_OFFSET
987 - layout.fp_minus_ap);
988 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
989 return layout;
992 /* Worker function for TARGET_CAN_ELIMINATE. */
994 static bool
995 xstormy16_can_eliminate (const int from, const int to)
997 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
998 ? ! frame_pointer_needed
999 : true);
1002 /* Determine how all the special registers get eliminated. */
1005 xstormy16_initial_elimination_offset (int from, int to)
1007 struct xstormy16_stack_layout layout;
1008 int result;
1010 layout = xstormy16_compute_stack_layout ();
1012 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1013 result = layout.sp_minus_fp - layout.locals_size;
1014 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1015 result = - layout.locals_size;
1016 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1017 result = - layout.fp_minus_ap;
1018 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1019 result = - (layout.sp_minus_fp + layout.fp_minus_ap);
1020 else
1021 gcc_unreachable ();
1023 return result;
1026 static rtx
1027 emit_addhi3_postreload (rtx dest, rtx src0, rtx src1)
1029 rtx set, clobber, insn;
1031 set = gen_rtx_SET (dest, gen_rtx_PLUS (HImode, src0, src1));
1032 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
1033 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
1034 return insn;
1037 /* Called after register allocation to add any instructions needed for
1038 the prologue. Using a prologue insn is favored compared to putting
1039 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1040 since it allows the scheduler to intermix instructions with the
1041 saves of the caller saved registers. In some cases, it might be
1042 necessary to emit a barrier instruction as the last insn to prevent
1043 such scheduling.
1045 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1046 so that the debug info generation code can handle them properly. */
1048 void
1049 xstormy16_expand_prologue (void)
1051 struct xstormy16_stack_layout layout;
1052 int regno;
1053 rtx insn;
1054 rtx mem_push_rtx;
1055 const int ifun = xstormy16_interrupt_function_p ();
1057 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
1058 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
1060 layout = xstormy16_compute_stack_layout ();
1062 if (layout.locals_size >= 32768)
1063 error ("local variable memory requirements exceed capacity");
1065 if (flag_stack_usage_info)
1066 current_function_static_stack_size = layout.frame_size;
1068 /* Save the argument registers if necessary. */
1069 if (layout.stdarg_save_size)
1070 for (regno = FIRST_ARGUMENT_REGISTER;
1071 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
1072 regno++)
1074 rtx dwarf;
1075 rtx reg = gen_rtx_REG (HImode, regno);
1077 insn = emit_move_insn (mem_push_rtx, reg);
1078 RTX_FRAME_RELATED_P (insn) = 1;
1080 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1082 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (gen_rtx_MEM (Pmode, stack_pointer_rtx),
1083 reg);
1084 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (stack_pointer_rtx,
1085 plus_constant (Pmode,
1086 stack_pointer_rtx,
1087 GET_MODE_SIZE (Pmode)));
1088 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
1089 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1090 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1093 /* Push each of the registers to save. */
1094 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1095 if (REG_NEEDS_SAVE (regno, ifun))
1097 rtx dwarf;
1098 rtx reg = gen_rtx_REG (HImode, regno);
1100 insn = emit_move_insn (mem_push_rtx, reg);
1101 RTX_FRAME_RELATED_P (insn) = 1;
1103 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1105 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (gen_rtx_MEM (Pmode, stack_pointer_rtx),
1106 reg);
1107 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (stack_pointer_rtx,
1108 plus_constant (Pmode,
1109 stack_pointer_rtx,
1110 GET_MODE_SIZE (Pmode)));
1111 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
1112 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1113 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1116 /* It's just possible that the SP here might be what we need for
1117 the new FP... */
1118 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1120 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1121 RTX_FRAME_RELATED_P (insn) = 1;
1124 /* Allocate space for local variables. */
1125 if (layout.locals_size)
1127 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1128 GEN_INT (layout.locals_size));
1129 RTX_FRAME_RELATED_P (insn) = 1;
1132 /* Set up the frame pointer, if required. */
1133 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
1135 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1136 RTX_FRAME_RELATED_P (insn) = 1;
1138 if (layout.sp_minus_fp)
1140 insn = emit_addhi3_postreload (hard_frame_pointer_rtx,
1141 hard_frame_pointer_rtx,
1142 GEN_INT (- layout.sp_minus_fp));
1143 RTX_FRAME_RELATED_P (insn) = 1;
1148 /* Do we need an epilogue at all? */
1151 direct_return (void)
1153 return (reload_completed
1154 && xstormy16_compute_stack_layout ().frame_size == 0
1155 && ! xstormy16_interrupt_function_p ());
1158 /* Called after register allocation to add any instructions needed for
1159 the epilogue. Using an epilogue insn is favored compared to putting
1160 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1161 since it allows the scheduler to intermix instructions with the
1162 saves of the caller saved registers. In some cases, it might be
1163 necessary to emit a barrier instruction as the last insn to prevent
1164 such scheduling. */
1166 void
1167 xstormy16_expand_epilogue (void)
1169 struct xstormy16_stack_layout layout;
1170 rtx mem_pop_rtx;
1171 int regno;
1172 const int ifun = xstormy16_interrupt_function_p ();
1174 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1175 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
1177 layout = xstormy16_compute_stack_layout ();
1179 /* Pop the stack for the locals. */
1180 if (layout.locals_size)
1182 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1183 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1184 else
1185 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1186 GEN_INT (- layout.locals_size));
1189 /* Restore any call-saved registers. */
1190 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1191 if (REG_NEEDS_SAVE (regno, ifun))
1192 emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
1194 /* Pop the stack for the stdarg save area. */
1195 if (layout.stdarg_save_size)
1196 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1197 GEN_INT (- layout.stdarg_save_size));
1199 /* Return. */
1200 if (ifun)
1201 emit_jump_insn (gen_return_internal_interrupt ());
1202 else
1203 emit_jump_insn (gen_return_internal ());
1207 xstormy16_epilogue_uses (int regno)
1209 if (reload_completed && call_used_regs[regno])
1211 const int ifun = xstormy16_interrupt_function_p ();
1212 return REG_NEEDS_SAVE (regno, ifun);
1214 return 0;
1217 void
1218 xstormy16_function_profiler (void)
1220 sorry ("function_profiler support");
1223 /* Update CUM to advance past an argument in the argument list. The
1224 values MODE, TYPE and NAMED describe that argument. Once this is
1225 done, the variable CUM is suitable for analyzing the *following*
1226 argument with `TARGET_FUNCTION_ARG', etc.
1228 This function need not do anything if the argument in question was
1229 passed on the stack. The compiler knows how to track the amount of
1230 stack space used for arguments without any special help. However,
1231 it makes life easier for xstormy16_build_va_list if it does update
1232 the word count. */
1234 static void
1235 xstormy16_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
1236 const_tree type, bool named ATTRIBUTE_UNUSED)
1238 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1240 /* If an argument would otherwise be passed partially in registers,
1241 and partially on the stack, the whole of it is passed on the
1242 stack. */
1243 if (*cum < NUM_ARGUMENT_REGISTERS
1244 && *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1245 *cum = NUM_ARGUMENT_REGISTERS;
1247 *cum += XSTORMY16_WORD_SIZE (type, mode);
1250 static rtx
1251 xstormy16_function_arg (cumulative_args_t cum_v, machine_mode mode,
1252 const_tree type, bool named ATTRIBUTE_UNUSED)
1254 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1256 if (mode == VOIDmode)
1257 return const0_rtx;
1258 if (targetm.calls.must_pass_in_stack (mode, type)
1259 || *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1260 return NULL_RTX;
1261 return gen_rtx_REG (mode, *cum + FIRST_ARGUMENT_REGISTER);
1264 /* Build the va_list type.
1266 For this chip, va_list is a record containing a counter and a pointer.
1267 The counter is of type 'int' and indicates how many bytes
1268 have been used to date. The pointer indicates the stack position
1269 for arguments that have not been passed in registers.
1270 To keep the layout nice, the pointer is first in the structure. */
1272 static tree
1273 xstormy16_build_builtin_va_list (void)
1275 tree f_1, f_2, record, type_decl;
1277 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
1278 type_decl = build_decl (BUILTINS_LOCATION,
1279 TYPE_DECL, get_identifier ("__va_list_tag"), record);
1281 f_1 = build_decl (BUILTINS_LOCATION,
1282 FIELD_DECL, get_identifier ("base"),
1283 ptr_type_node);
1284 f_2 = build_decl (BUILTINS_LOCATION,
1285 FIELD_DECL, get_identifier ("count"),
1286 unsigned_type_node);
1288 DECL_FIELD_CONTEXT (f_1) = record;
1289 DECL_FIELD_CONTEXT (f_2) = record;
1291 TYPE_STUB_DECL (record) = type_decl;
1292 TYPE_NAME (record) = type_decl;
1293 TYPE_FIELDS (record) = f_1;
1294 DECL_CHAIN (f_1) = f_2;
1296 layout_type (record);
1298 return record;
1301 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this
1302 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1303 variable to initialize. NEXTARG is the machine independent notion of the
1304 'next' argument after the variable arguments. */
1306 static void
1307 xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
1309 tree f_base, f_count;
1310 tree base, count;
1311 tree t,u;
1313 if (xstormy16_interrupt_function_p ())
1314 error ("cannot use va_start in interrupt function");
1316 f_base = TYPE_FIELDS (va_list_type_node);
1317 f_count = DECL_CHAIN (f_base);
1319 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1320 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1321 NULL_TREE);
1323 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
1324 u = build_int_cst (NULL_TREE, - INCOMING_FRAME_SP_OFFSET);
1325 u = fold_convert (TREE_TYPE (count), u);
1326 t = fold_build_pointer_plus (t, u);
1327 t = build2 (MODIFY_EXPR, TREE_TYPE (base), base, t);
1328 TREE_SIDE_EFFECTS (t) = 1;
1329 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1331 t = build2 (MODIFY_EXPR, TREE_TYPE (count), count,
1332 build_int_cst (NULL_TREE,
1333 crtl->args.info * UNITS_PER_WORD));
1334 TREE_SIDE_EFFECTS (t) = 1;
1335 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1338 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable
1339 of type va_list as a tree, TYPE is the type passed to va_arg.
1340 Note: This algorithm is documented in stormy-abi. */
1342 static tree
1343 xstormy16_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
1344 gimple_seq *post_p ATTRIBUTE_UNUSED)
1346 tree f_base, f_count;
1347 tree base, count;
1348 tree count_tmp, addr, t;
1349 tree lab_gotaddr, lab_fromstack;
1350 int size, size_of_reg_args, must_stack;
1351 tree size_tree;
1353 f_base = TYPE_FIELDS (va_list_type_node);
1354 f_count = DECL_CHAIN (f_base);
1356 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1357 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1358 NULL_TREE);
1360 must_stack = targetm.calls.must_pass_in_stack (TYPE_MODE (type), type);
1361 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
1362 gimplify_expr (&size_tree, pre_p, NULL, is_gimple_val, fb_rvalue);
1364 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1366 count_tmp = get_initialized_tmp_var (count, pre_p, NULL);
1367 lab_gotaddr = create_artificial_label (UNKNOWN_LOCATION);
1368 lab_fromstack = create_artificial_label (UNKNOWN_LOCATION);
1369 addr = create_tmp_var (ptr_type_node);
1371 if (!must_stack)
1373 tree r;
1375 t = fold_convert (TREE_TYPE (count), size_tree);
1376 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1377 r = fold_convert (TREE_TYPE (count), size_int (size_of_reg_args));
1378 t = build2 (GT_EXPR, boolean_type_node, t, r);
1379 t = build3 (COND_EXPR, void_type_node, t,
1380 build1 (GOTO_EXPR, void_type_node, lab_fromstack),
1381 NULL_TREE);
1382 gimplify_and_add (t, pre_p);
1384 t = fold_build_pointer_plus (base, count_tmp);
1385 gimplify_assign (addr, t, pre_p);
1387 t = build1 (GOTO_EXPR, void_type_node, lab_gotaddr);
1388 gimplify_and_add (t, pre_p);
1390 t = build1 (LABEL_EXPR, void_type_node, lab_fromstack);
1391 gimplify_and_add (t, pre_p);
1394 /* Arguments larger than a word might need to skip over some
1395 registers, since arguments are either passed entirely in
1396 registers or entirely on the stack. */
1397 size = PUSH_ROUNDING (int_size_in_bytes (type));
1398 if (size > 2 || size < 0 || must_stack)
1400 tree r, u;
1402 r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD);
1403 u = build2 (MODIFY_EXPR, TREE_TYPE (count_tmp), count_tmp, r);
1405 t = fold_convert (TREE_TYPE (count), r);
1406 t = build2 (GE_EXPR, boolean_type_node, count_tmp, t);
1407 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, u);
1408 gimplify_and_add (t, pre_p);
1411 t = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
1412 + INCOMING_FRAME_SP_OFFSET);
1413 t = fold_convert (TREE_TYPE (count), t);
1414 t = build2 (MINUS_EXPR, TREE_TYPE (count), count_tmp, t);
1415 t = build2 (PLUS_EXPR, TREE_TYPE (count), t,
1416 fold_convert (TREE_TYPE (count), size_tree));
1417 t = fold_convert (TREE_TYPE (t), fold (t));
1418 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1419 t = fold_build_pointer_plus (base, t);
1420 gimplify_assign (addr, t, pre_p);
1422 t = build1 (LABEL_EXPR, void_type_node, lab_gotaddr);
1423 gimplify_and_add (t, pre_p);
1425 t = fold_convert (TREE_TYPE (count), size_tree);
1426 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1427 gimplify_assign (count, t, pre_p);
1429 addr = fold_convert (build_pointer_type (type), addr);
1430 return build_va_arg_indirect_ref (addr);
1433 /* Worker function for TARGET_TRAMPOLINE_INIT. */
1435 static void
1436 xstormy16_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
1438 rtx temp = gen_reg_rtx (HImode);
1439 rtx reg_fnaddr = gen_reg_rtx (HImode);
1440 rtx reg_addr, reg_addr_mem;
1442 reg_addr = copy_to_reg (XEXP (m_tramp, 0));
1443 reg_addr_mem = adjust_automodify_address (m_tramp, HImode, reg_addr, 0);
1445 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1446 emit_move_insn (reg_addr_mem, temp);
1447 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1448 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1450 emit_move_insn (temp, static_chain);
1451 emit_move_insn (reg_addr_mem, temp);
1452 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1453 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1455 emit_move_insn (reg_fnaddr, XEXP (DECL_RTL (fndecl), 0));
1456 emit_move_insn (temp, reg_fnaddr);
1457 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1458 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1459 emit_move_insn (reg_addr_mem, temp);
1460 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1461 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1463 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1464 emit_move_insn (reg_addr_mem, reg_fnaddr);
1467 /* Worker function for TARGET_FUNCTION_VALUE. */
1469 static rtx
1470 xstormy16_function_value (const_tree valtype,
1471 const_tree func ATTRIBUTE_UNUSED,
1472 bool outgoing ATTRIBUTE_UNUSED)
1474 machine_mode mode;
1475 mode = TYPE_MODE (valtype);
1476 PROMOTE_MODE (mode, 0, valtype);
1477 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1480 /* Worker function for TARGET_LIBCALL_VALUE. */
1482 static rtx
1483 xstormy16_libcall_value (machine_mode mode,
1484 const_rtx fun ATTRIBUTE_UNUSED)
1486 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1489 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
1491 static bool
1492 xstormy16_function_value_regno_p (const unsigned int regno)
1494 return (regno == RETURN_VALUE_REGNUM);
1497 /* A C compound statement that outputs the assembler code for a thunk function,
1498 used to implement C++ virtual function calls with multiple inheritance. The
1499 thunk acts as a wrapper around a virtual function, adjusting the implicit
1500 object parameter before handing control off to the real function.
1502 First, emit code to add the integer DELTA to the location that contains the
1503 incoming first argument. Assume that this argument contains a pointer, and
1504 is the one used to pass the `this' pointer in C++. This is the incoming
1505 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1506 addition must preserve the values of all other incoming arguments.
1508 After the addition, emit code to jump to FUNCTION, which is a
1509 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1510 the return address. Hence returning from FUNCTION will return to whoever
1511 called the current `thunk'.
1513 The effect must be as if @var{function} had been called directly
1514 with the adjusted first argument. This macro is responsible for
1515 emitting all of the code for a thunk function;
1516 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1517 not invoked.
1519 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1520 extracted from it.) It might possibly be useful on some targets, but
1521 probably not. */
1523 static void
1524 xstormy16_asm_output_mi_thunk (FILE *file,
1525 tree thunk_fndecl ATTRIBUTE_UNUSED,
1526 HOST_WIDE_INT delta,
1527 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1528 tree function)
1530 int regnum = FIRST_ARGUMENT_REGISTER;
1532 /* There might be a hidden first argument for a returned structure. */
1533 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
1534 regnum += 1;
1536 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF);
1537 fputs ("\tjmpf ", file);
1538 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1539 putc ('\n', file);
1542 /* The purpose of this function is to override the default behavior of
1543 BSS objects. Normally, they go into .bss or .sbss via ".common"
1544 directives, but we need to override that and put them in
1545 .bss_below100. We can't just use a section override (like we do
1546 for .data_below100), because that makes them initialized rather
1547 than uninitialized. */
1549 void
1550 xstormy16_asm_output_aligned_common (FILE *stream,
1551 tree decl,
1552 const char *name,
1553 int size,
1554 int align,
1555 int global)
1557 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
1558 rtx symbol;
1560 if (mem != NULL_RTX
1561 && MEM_P (mem)
1562 && GET_CODE (symbol = XEXP (mem, 0)) == SYMBOL_REF
1563 && SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_XSTORMY16_BELOW100)
1565 const char *name2;
1566 int p2align = 0;
1568 switch_to_section (bss100_section);
1570 while (align > 8)
1572 align /= 2;
1573 p2align ++;
1576 name2 = default_strip_name_encoding (name);
1577 if (global)
1578 fprintf (stream, "\t.globl\t%s\n", name2);
1579 if (p2align)
1580 fprintf (stream, "\t.p2align %d\n", p2align);
1581 fprintf (stream, "\t.type\t%s, @object\n", name2);
1582 fprintf (stream, "\t.size\t%s, %d\n", name2, size);
1583 fprintf (stream, "%s:\n\t.space\t%d\n", name2, size);
1584 return;
1587 if (!global)
1589 fprintf (stream, "\t.local\t");
1590 assemble_name (stream, name);
1591 fprintf (stream, "\n");
1593 fprintf (stream, "\t.comm\t");
1594 assemble_name (stream, name);
1595 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
1598 /* Implement TARGET_ASM_INIT_SECTIONS. */
1600 static void
1601 xstormy16_asm_init_sections (void)
1603 bss100_section
1604 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
1605 output_section_asm_op,
1606 "\t.section \".bss_below100\",\"aw\",@nobits");
1609 /* Mark symbols with the "below100" attribute so that we can use the
1610 special addressing modes for them. */
1612 static void
1613 xstormy16_encode_section_info (tree decl, rtx r, int first)
1615 default_encode_section_info (decl, r, first);
1617 if (TREE_CODE (decl) == VAR_DECL
1618 && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl))
1619 || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl))))
1621 rtx symbol = XEXP (r, 0);
1623 gcc_assert (GET_CODE (symbol) == SYMBOL_REF);
1624 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_XSTORMY16_BELOW100;
1628 #undef TARGET_ASM_CONSTRUCTOR
1629 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1630 #undef TARGET_ASM_DESTRUCTOR
1631 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1633 /* Output constructors and destructors. Just like
1634 default_named_section_asm_out_* but don't set the sections writable. */
1636 static void
1637 xstormy16_asm_out_destructor (rtx symbol, int priority)
1639 const char *section = ".dtors";
1640 char buf[16];
1642 /* ??? This only works reliably with the GNU linker. */
1643 if (priority != DEFAULT_INIT_PRIORITY)
1645 sprintf (buf, ".dtors.%.5u",
1646 /* Invert the numbering so the linker puts us in the proper
1647 order; constructors are run from right to left, and the
1648 linker sorts in increasing order. */
1649 MAX_INIT_PRIORITY - priority);
1650 section = buf;
1653 switch_to_section (get_section (section, 0, NULL));
1654 assemble_align (POINTER_SIZE);
1655 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1658 static void
1659 xstormy16_asm_out_constructor (rtx symbol, int priority)
1661 const char *section = ".ctors";
1662 char buf[16];
1664 /* ??? This only works reliably with the GNU linker. */
1665 if (priority != DEFAULT_INIT_PRIORITY)
1667 sprintf (buf, ".ctors.%.5u",
1668 /* Invert the numbering so the linker puts us in the proper
1669 order; constructors are run from right to left, and the
1670 linker sorts in increasing order. */
1671 MAX_INIT_PRIORITY - priority);
1672 section = buf;
1675 switch_to_section (get_section (section, 0, NULL));
1676 assemble_align (POINTER_SIZE);
1677 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1680 /* Worker function for TARGET_PRINT_OPERAND_ADDRESS.
1682 Print a memory address as an operand to reference that memory location. */
1684 static void
1685 xstormy16_print_operand_address (FILE *file, rtx address)
1687 HOST_WIDE_INT offset;
1688 int pre_dec, post_inc;
1690 /* There are a few easy cases. */
1691 if (CONST_INT_P (address))
1693 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1694 return;
1697 if (CONSTANT_P (address) || LABEL_P (address))
1699 output_addr_const (file, address);
1700 return;
1703 /* Otherwise, it's hopefully something of the form
1704 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...)). */
1705 if (GET_CODE (address) == PLUS)
1707 gcc_assert (CONST_INT_P (XEXP (address, 1)));
1708 offset = INTVAL (XEXP (address, 1));
1709 address = XEXP (address, 0);
1711 else
1712 offset = 0;
1714 pre_dec = (GET_CODE (address) == PRE_DEC);
1715 post_inc = (GET_CODE (address) == POST_INC);
1716 if (pre_dec || post_inc)
1717 address = XEXP (address, 0);
1719 gcc_assert (REG_P (address));
1721 fputc ('(', file);
1722 if (pre_dec)
1723 fputs ("--", file);
1724 fputs (reg_names [REGNO (address)], file);
1725 if (post_inc)
1726 fputs ("++", file);
1727 if (offset != 0)
1728 fprintf (file, "," HOST_WIDE_INT_PRINT_DEC, offset);
1729 fputc (')', file);
1732 /* Worker function for TARGET_PRINT_OPERAND.
1734 Print an operand to an assembler instruction. */
1736 static void
1737 xstormy16_print_operand (FILE *file, rtx x, int code)
1739 switch (code)
1741 case 'B':
1742 /* There is either one bit set, or one bit clear, in X.
1743 Print it preceded by '#'. */
1745 static int bits_set[8] = { 0, 1, 1, 2, 1, 2, 2, 3 };
1746 HOST_WIDE_INT xx = 1;
1747 HOST_WIDE_INT l;
1749 if (CONST_INT_P (x))
1750 xx = INTVAL (x);
1751 else
1752 output_operand_lossage ("'B' operand is not constant");
1754 /* GCC sign-extends masks with the MSB set, so we have to
1755 detect all the cases that differ only in sign extension
1756 beyond the bits we care about. Normally, the predicates
1757 and constraints ensure that we have the right values. This
1758 works correctly for valid masks. */
1759 if (bits_set[xx & 7] <= 1)
1761 /* Remove sign extension bits. */
1762 if ((~xx & ~(HOST_WIDE_INT)0xff) == 0)
1763 xx &= 0xff;
1764 else if ((~xx & ~(HOST_WIDE_INT)0xffff) == 0)
1765 xx &= 0xffff;
1766 l = exact_log2 (xx);
1768 else
1770 /* Add sign extension bits. */
1771 if ((xx & ~(HOST_WIDE_INT)0xff) == 0)
1772 xx |= ~(HOST_WIDE_INT)0xff;
1773 else if ((xx & ~(HOST_WIDE_INT)0xffff) == 0)
1774 xx |= ~(HOST_WIDE_INT)0xffff;
1775 l = exact_log2 (~xx);
1778 if (l == -1)
1779 output_operand_lossage ("'B' operand has multiple bits set");
1781 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l);
1782 return;
1785 case 'C':
1786 /* Print the symbol without a surrounding @fptr(). */
1787 if (GET_CODE (x) == SYMBOL_REF)
1788 assemble_name (file, XSTR (x, 0));
1789 else if (LABEL_P (x))
1790 output_asm_label (x);
1791 else
1792 xstormy16_print_operand_address (file, x);
1793 return;
1795 case 'o':
1796 case 'O':
1797 /* Print the immediate operand less one, preceded by '#'.
1798 For 'O', negate it first. */
1800 HOST_WIDE_INT xx = 0;
1802 if (CONST_INT_P (x))
1803 xx = INTVAL (x);
1804 else
1805 output_operand_lossage ("'o' operand is not constant");
1807 if (code == 'O')
1808 xx = -xx;
1810 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, xx - 1);
1811 return;
1814 case 'b':
1815 /* Print the shift mask for bp/bn. */
1817 HOST_WIDE_INT xx = 1;
1818 HOST_WIDE_INT l;
1820 if (CONST_INT_P (x))
1821 xx = INTVAL (x);
1822 else
1823 output_operand_lossage ("'B' operand is not constant");
1825 l = 7 - xx;
1827 fputs (IMMEDIATE_PREFIX, file);
1828 fprintf (file, HOST_WIDE_INT_PRINT_DEC, l);
1829 return;
1832 case 0:
1833 /* Handled below. */
1834 break;
1836 default:
1837 output_operand_lossage ("xstormy16_print_operand: unknown code");
1838 return;
1841 switch (GET_CODE (x))
1843 case REG:
1844 fputs (reg_names [REGNO (x)], file);
1845 break;
1847 case MEM:
1848 xstormy16_print_operand_address (file, XEXP (x, 0));
1849 break;
1851 default:
1852 /* Some kind of constant or label; an immediate operand,
1853 so prefix it with '#' for the assembler. */
1854 fputs (IMMEDIATE_PREFIX, file);
1855 output_addr_const (file, x);
1856 break;
1859 return;
1862 /* Expander for the `casesi' pattern.
1863 INDEX is the index of the switch statement.
1864 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1865 to the first table entry.
1866 RANGE is the number of table entries.
1867 TABLE is an ADDR_VEC that is the jump table.
1868 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1869 range LOWER_BOUND to LOWER_BOUND + RANGE - 1. */
1871 void
1872 xstormy16_expand_casesi (rtx index, rtx lower_bound, rtx range,
1873 rtx table, rtx default_label)
1875 HOST_WIDE_INT range_i = INTVAL (range);
1876 rtx int_index;
1878 /* This code uses 'br', so it can deal only with tables of size up to
1879 8192 entries. */
1880 if (range_i >= 8192)
1881 sorry ("switch statement of size %lu entries too large",
1882 (unsigned long) range_i);
1884 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
1885 OPTAB_LIB_WIDEN);
1886 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
1887 default_label);
1888 int_index = gen_lowpart_common (HImode, index);
1889 emit_insn (gen_ashlhi3 (int_index, int_index, const2_rtx));
1890 emit_jump_insn (gen_tablejump_pcrel (int_index, table));
1893 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1894 instructions, without label or alignment or any other special
1895 constructs. We know that the previous instruction will be the
1896 `tablejump_pcrel' output above.
1898 TODO: it might be nice to output 'br' instructions if they could
1899 all reach. */
1901 void
1902 xstormy16_output_addr_vec (FILE *file, rtx label ATTRIBUTE_UNUSED, rtx table)
1904 int vlen, idx;
1906 switch_to_section (current_function_section ());
1908 vlen = XVECLEN (table, 0);
1909 for (idx = 0; idx < vlen; idx++)
1911 fputs ("\tjmpf ", file);
1912 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
1913 fputc ('\n', file);
1917 /* Expander for the `call' patterns.
1918 RETVAL is the RTL for the return register or NULL for void functions.
1919 DEST is the function to call, expressed as a MEM.
1920 COUNTER is ignored. */
1922 void
1923 xstormy16_expand_call (rtx retval, rtx dest, rtx counter)
1925 rtx call, temp;
1926 machine_mode mode;
1928 gcc_assert (MEM_P (dest));
1929 dest = XEXP (dest, 0);
1931 if (! CONSTANT_P (dest) && ! REG_P (dest))
1932 dest = force_reg (Pmode, dest);
1934 if (retval == NULL)
1935 mode = VOIDmode;
1936 else
1937 mode = GET_MODE (retval);
1939 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
1940 counter);
1941 if (retval)
1942 call = gen_rtx_SET (retval, call);
1944 if (! CONSTANT_P (dest))
1946 temp = gen_reg_rtx (HImode);
1947 emit_move_insn (temp, const0_rtx);
1949 else
1950 temp = const0_rtx;
1952 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
1953 gen_rtx_USE (VOIDmode, temp)));
1954 emit_call_insn (call);
1957 /* Expanders for multiword computational operations. */
1959 /* Expander for arithmetic operations; emit insns to compute
1961 (set DEST (CODE:MODE SRC0 SRC1))
1963 When CODE is COMPARE, a branch template is generated
1964 (this saves duplicating code in xstormy16_split_cbranch). */
1966 void
1967 xstormy16_expand_arith (machine_mode mode, enum rtx_code code,
1968 rtx dest, rtx src0, rtx src1)
1970 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
1971 int i;
1972 int firstloop = 1;
1974 if (code == NEG)
1975 emit_move_insn (src0, const0_rtx);
1977 for (i = 0; i < num_words; i++)
1979 rtx w_src0, w_src1, w_dest;
1980 rtx insn;
1982 w_src0 = simplify_gen_subreg (word_mode, src0, mode,
1983 i * UNITS_PER_WORD);
1984 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
1985 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
1987 switch (code)
1989 case PLUS:
1990 if (firstloop
1991 && CONST_INT_P (w_src1)
1992 && INTVAL (w_src1) == 0)
1993 continue;
1995 if (firstloop)
1996 insn = gen_addchi4 (w_dest, w_src0, w_src1);
1997 else
1998 insn = gen_addchi5 (w_dest, w_src0, w_src1);
1999 break;
2001 case NEG:
2002 case MINUS:
2003 case COMPARE:
2004 if (code == COMPARE && i == num_words - 1)
2006 rtx branch, sub, clobber, sub_1;
2008 sub_1 = gen_rtx_MINUS (HImode, w_src0,
2009 gen_rtx_ZERO_EXTEND (HImode, gen_rtx_REG (BImode, CARRY_REGNUM)));
2010 sub = gen_rtx_SET (w_dest,
2011 gen_rtx_MINUS (HImode, sub_1, w_src1));
2012 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
2013 branch = gen_rtx_SET (pc_rtx,
2014 gen_rtx_IF_THEN_ELSE (VOIDmode,
2015 gen_rtx_EQ (HImode,
2016 sub_1,
2017 w_src1),
2018 pc_rtx,
2019 pc_rtx));
2020 insn = gen_rtx_PARALLEL (VOIDmode,
2021 gen_rtvec (3, branch, sub, clobber));
2023 else if (firstloop
2024 && code != COMPARE
2025 && CONST_INT_P (w_src1)
2026 && INTVAL (w_src1) == 0)
2027 continue;
2028 else if (firstloop)
2029 insn = gen_subchi4 (w_dest, w_src0, w_src1);
2030 else
2031 insn = gen_subchi5 (w_dest, w_src0, w_src1);
2032 break;
2034 case IOR:
2035 case XOR:
2036 case AND:
2037 if (CONST_INT_P (w_src1)
2038 && INTVAL (w_src1) == -(code == AND))
2039 continue;
2041 insn = gen_rtx_SET (w_dest, gen_rtx_fmt_ee (code, mode,
2042 w_src0, w_src1));
2043 break;
2045 case NOT:
2046 insn = gen_rtx_SET (w_dest, gen_rtx_NOT (mode, w_src0));
2047 break;
2049 default:
2050 gcc_unreachable ();
2053 firstloop = 0;
2054 emit (insn);
2057 /* If we emit nothing, try_split() will think we failed. So emit
2058 something that does nothing and can be optimized away. */
2059 if (firstloop)
2060 emit (gen_nop ());
2063 /* The shift operations are split at output time for constant values;
2064 variable-width shifts get handed off to a library routine.
2066 Generate an output string to do (set X (CODE:MODE X SIZE_R))
2067 SIZE_R will be a CONST_INT, X will be a hard register. */
2069 const char *
2070 xstormy16_output_shift (machine_mode mode, enum rtx_code code,
2071 rtx x, rtx size_r, rtx temp)
2073 HOST_WIDE_INT size;
2074 const char *r0, *r1, *rt;
2075 static char r[64];
2077 gcc_assert (CONST_INT_P (size_r)
2078 && REG_P (x)
2079 && mode == SImode);
2081 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
2083 if (size == 0)
2084 return "";
2086 r0 = reg_names [REGNO (x)];
2087 r1 = reg_names [REGNO (x) + 1];
2089 /* For shifts of size 1, we can use the rotate instructions. */
2090 if (size == 1)
2092 switch (code)
2094 case ASHIFT:
2095 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
2096 break;
2097 case ASHIFTRT:
2098 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
2099 break;
2100 case LSHIFTRT:
2101 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
2102 break;
2103 default:
2104 gcc_unreachable ();
2106 return r;
2109 /* For large shifts, there are easy special cases. */
2110 if (size == 16)
2112 switch (code)
2114 case ASHIFT:
2115 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
2116 break;
2117 case ASHIFTRT:
2118 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
2119 break;
2120 case LSHIFTRT:
2121 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
2122 break;
2123 default:
2124 gcc_unreachable ();
2126 return r;
2128 if (size > 16)
2130 switch (code)
2132 case ASHIFT:
2133 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
2134 r1, r0, r0, r1, (int) size - 16);
2135 break;
2136 case ASHIFTRT:
2137 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
2138 r0, r1, r1, r0, (int) size - 16);
2139 break;
2140 case LSHIFTRT:
2141 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
2142 r0, r1, r1, r0, (int) size - 16);
2143 break;
2144 default:
2145 gcc_unreachable ();
2147 return r;
2150 /* For the rest, we have to do more work. In particular, we
2151 need a temporary. */
2152 rt = reg_names [REGNO (temp)];
2153 switch (code)
2155 case ASHIFT:
2156 sprintf (r,
2157 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2158 rt, r0, r0, (int) size, r1, (int) size, rt, (int) (16 - size),
2159 r1, rt);
2160 break;
2161 case ASHIFTRT:
2162 sprintf (r,
2163 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2164 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2165 r0, rt);
2166 break;
2167 case LSHIFTRT:
2168 sprintf (r,
2169 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2170 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2171 r0, rt);
2172 break;
2173 default:
2174 gcc_unreachable ();
2176 return r;
2179 /* Attribute handling. */
2181 /* Return nonzero if the function is an interrupt function. */
2184 xstormy16_interrupt_function_p (void)
2186 tree attributes;
2188 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2189 any functions are declared, which is demonstrably wrong, but
2190 it is worked around here. FIXME. */
2191 if (!cfun)
2192 return 0;
2194 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
2195 return lookup_attribute ("interrupt", attributes) != NULL_TREE;
2198 #undef TARGET_ATTRIBUTE_TABLE
2199 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2201 static tree xstormy16_handle_interrupt_attribute
2202 (tree *, tree, tree, int, bool *);
2203 static tree xstormy16_handle_below100_attribute
2204 (tree *, tree, tree, int, bool *);
2206 static const struct attribute_spec xstormy16_attribute_table[] =
2208 /* name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
2209 affects_type_identity. */
2210 { "interrupt", 0, 0, false, true, true,
2211 xstormy16_handle_interrupt_attribute , false },
2212 { "BELOW100", 0, 0, false, false, false,
2213 xstormy16_handle_below100_attribute, false },
2214 { "below100", 0, 0, false, false, false,
2215 xstormy16_handle_below100_attribute, false },
2216 { NULL, 0, 0, false, false, false, NULL, false }
2219 /* Handle an "interrupt" attribute;
2220 arguments as in struct attribute_spec.handler. */
2222 static tree
2223 xstormy16_handle_interrupt_attribute (tree *node, tree name,
2224 tree args ATTRIBUTE_UNUSED,
2225 int flags ATTRIBUTE_UNUSED,
2226 bool *no_add_attrs)
2228 if (TREE_CODE (*node) != FUNCTION_TYPE)
2230 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2231 name);
2232 *no_add_attrs = true;
2235 return NULL_TREE;
2238 /* Handle an "below" attribute;
2239 arguments as in struct attribute_spec.handler. */
2241 static tree
2242 xstormy16_handle_below100_attribute (tree *node,
2243 tree name ATTRIBUTE_UNUSED,
2244 tree args ATTRIBUTE_UNUSED,
2245 int flags ATTRIBUTE_UNUSED,
2246 bool *no_add_attrs)
2248 if (TREE_CODE (*node) != VAR_DECL
2249 && TREE_CODE (*node) != POINTER_TYPE
2250 && TREE_CODE (*node) != TYPE_DECL)
2252 warning (OPT_Wattributes,
2253 "%<__BELOW100__%> attribute only applies to variables");
2254 *no_add_attrs = true;
2256 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
2258 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
2260 warning (OPT_Wattributes, "__BELOW100__ attribute not allowed "
2261 "with auto storage class");
2262 *no_add_attrs = true;
2266 return NULL_TREE;
2269 #undef TARGET_INIT_BUILTINS
2270 #define TARGET_INIT_BUILTINS xstormy16_init_builtins
2271 #undef TARGET_EXPAND_BUILTIN
2272 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2274 static struct
2276 const char * name;
2277 int md_code;
2278 const char * arg_ops; /* 0..9, t for temp register, r for return value. */
2279 const char * arg_types; /* s=short,l=long, upper case for unsigned. */
2281 s16builtins[] =
2283 { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" },
2284 { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" },
2285 { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" },
2286 { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" },
2287 { NULL, 0, NULL, NULL }
2290 static void
2291 xstormy16_init_builtins (void)
2293 tree args[2], ret_type, arg = NULL_TREE, ftype;
2294 int i, a, n_args;
2296 ret_type = void_type_node;
2298 for (i = 0; s16builtins[i].name; i++)
2300 n_args = strlen (s16builtins[i].arg_types) - 1;
2302 gcc_assert (n_args <= (int) ARRAY_SIZE (args));
2304 for (a = n_args - 1; a >= 0; a--)
2305 args[a] = NULL_TREE;
2307 for (a = n_args; a >= 0; a--)
2309 switch (s16builtins[i].arg_types[a])
2311 case 's': arg = short_integer_type_node; break;
2312 case 'S': arg = short_unsigned_type_node; break;
2313 case 'l': arg = long_integer_type_node; break;
2314 case 'L': arg = long_unsigned_type_node; break;
2315 default: gcc_unreachable ();
2317 if (a == 0)
2318 ret_type = arg;
2319 else
2320 args[a-1] = arg;
2322 ftype = build_function_type_list (ret_type, args[0], args[1], NULL_TREE);
2323 add_builtin_function (s16builtins[i].name, ftype,
2324 i, BUILT_IN_MD, NULL, NULL_TREE);
2328 static rtx
2329 xstormy16_expand_builtin (tree exp, rtx target,
2330 rtx subtarget ATTRIBUTE_UNUSED,
2331 machine_mode mode ATTRIBUTE_UNUSED,
2332 int ignore ATTRIBUTE_UNUSED)
2334 rtx op[10], args[10], pat, copyto[10], retval = 0;
2335 tree fndecl, argtree;
2336 int i, a, o, code;
2338 fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2339 argtree = TREE_OPERAND (exp, 1);
2340 i = DECL_FUNCTION_CODE (fndecl);
2341 code = s16builtins[i].md_code;
2343 for (a = 0; a < 10 && argtree; a++)
2345 args[a] = expand_normal (TREE_VALUE (argtree));
2346 argtree = TREE_CHAIN (argtree);
2349 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2351 char ao = s16builtins[i].arg_ops[o];
2352 char c = insn_data[code].operand[o].constraint[0];
2353 machine_mode omode;
2355 copyto[o] = 0;
2357 omode = (machine_mode) insn_data[code].operand[o].mode;
2358 if (ao == 'r')
2359 op[o] = target ? target : gen_reg_rtx (omode);
2360 else if (ao == 't')
2361 op[o] = gen_reg_rtx (omode);
2362 else
2363 op[o] = args[(int) hex_value (ao)];
2365 if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o])))
2367 if (c == '+' || c == '=')
2369 copyto[o] = op[o];
2370 op[o] = gen_reg_rtx (omode);
2372 else
2373 op[o] = copy_to_mode_reg (omode, op[o]);
2376 if (ao == 'r')
2377 retval = op[o];
2380 pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4],
2381 op[5], op[6], op[7], op[8], op[9]);
2382 emit_insn (pat);
2384 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2385 if (copyto[o])
2387 emit_move_insn (copyto[o], op[o]);
2388 if (op[o] == retval)
2389 retval = copyto[o];
2392 return retval;
2395 /* Look for combinations of insns that can be converted to BN or BP
2396 opcodes. This is, unfortunately, too complex to do with MD
2397 patterns. */
2399 static void
2400 combine_bnp (rtx_insn *insn)
2402 int insn_code, regno, need_extend;
2403 unsigned int mask;
2404 rtx cond, reg, qireg, mem;
2405 rtx_insn *and_insn, *load;
2406 machine_mode load_mode = QImode;
2407 machine_mode and_mode = QImode;
2408 rtx_insn *shift = NULL;
2410 insn_code = recog_memoized (insn);
2411 if (insn_code != CODE_FOR_cbranchhi
2412 && insn_code != CODE_FOR_cbranchhi_neg)
2413 return;
2415 cond = XVECEXP (PATTERN (insn), 0, 0); /* set */
2416 cond = XEXP (cond, 1); /* if */
2417 cond = XEXP (cond, 0); /* cond */
2418 switch (GET_CODE (cond))
2420 case NE:
2421 case EQ:
2422 need_extend = 0;
2423 break;
2424 case LT:
2425 case GE:
2426 need_extend = 1;
2427 break;
2428 default:
2429 return;
2432 reg = XEXP (cond, 0);
2433 if (! REG_P (reg))
2434 return;
2435 regno = REGNO (reg);
2436 if (XEXP (cond, 1) != const0_rtx)
2437 return;
2438 if (! find_regno_note (insn, REG_DEAD, regno))
2439 return;
2440 qireg = gen_rtx_REG (QImode, regno);
2442 if (need_extend)
2444 /* LT and GE conditionals should have a sign extend before
2445 them. */
2446 for (and_insn = prev_real_insn (insn);
2447 and_insn != NULL_RTX;
2448 and_insn = prev_real_insn (and_insn))
2450 int and_code = recog_memoized (and_insn);
2452 if (and_code == CODE_FOR_extendqihi2
2453 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)
2454 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), qireg))
2455 break;
2457 if (and_code == CODE_FOR_movhi_internal
2458 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg))
2460 /* This is for testing bit 15. */
2461 and_insn = insn;
2462 break;
2465 if (reg_mentioned_p (reg, and_insn))
2466 return;
2468 if (! NOTE_P (and_insn) && ! NONJUMP_INSN_P (and_insn))
2469 return;
2472 else
2474 /* EQ and NE conditionals have an AND before them. */
2475 for (and_insn = prev_real_insn (insn);
2476 and_insn != NULL_RTX;
2477 and_insn = prev_real_insn (and_insn))
2479 if (recog_memoized (and_insn) == CODE_FOR_andhi3
2480 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)
2481 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), reg))
2482 break;
2484 if (reg_mentioned_p (reg, and_insn))
2485 return;
2487 if (! NOTE_P (and_insn) && ! NONJUMP_INSN_P (and_insn))
2488 return;
2491 if (and_insn)
2493 /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
2494 followed by an AND like this:
2496 (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
2497 (clobber (reg:BI carry))]
2499 (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
2501 Attempt to detect this here. */
2502 for (shift = prev_real_insn (and_insn); shift;
2503 shift = prev_real_insn (shift))
2505 if (recog_memoized (shift) == CODE_FOR_lshrhi3
2506 && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift), 0, 0)), reg)
2507 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 0), reg))
2508 break;
2510 if (reg_mentioned_p (reg, shift)
2511 || (! NOTE_P (shift) && ! NONJUMP_INSN_P (shift)))
2513 shift = NULL;
2514 break;
2520 if (and_insn == NULL_RTX)
2521 return;
2523 for (load = shift ? prev_real_insn (shift) : prev_real_insn (and_insn);
2524 load;
2525 load = prev_real_insn (load))
2527 int load_code = recog_memoized (load);
2529 if (load_code == CODE_FOR_movhi_internal
2530 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2531 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), HImode)
2532 && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load))))
2534 load_mode = HImode;
2535 break;
2538 if (load_code == CODE_FOR_movqi_internal
2539 && rtx_equal_p (SET_DEST (PATTERN (load)), qireg)
2540 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), QImode))
2542 load_mode = QImode;
2543 break;
2546 if (load_code == CODE_FOR_zero_extendqihi2
2547 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2548 && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load)), 0), QImode))
2550 load_mode = QImode;
2551 and_mode = HImode;
2552 break;
2555 if (reg_mentioned_p (reg, load))
2556 return;
2558 if (! NOTE_P (load) && ! NONJUMP_INSN_P (load))
2559 return;
2561 if (!load)
2562 return;
2564 mem = SET_SRC (PATTERN (load));
2566 if (need_extend)
2568 mask = (load_mode == HImode) ? 0x8000 : 0x80;
2570 /* If the mem includes a zero-extend operation and we are
2571 going to generate a sign-extend operation then move the
2572 mem inside the zero-extend. */
2573 if (GET_CODE (mem) == ZERO_EXTEND)
2574 mem = XEXP (mem, 0);
2576 else
2578 if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and_insn)), 1),
2579 load_mode))
2580 return;
2582 mask = (int) INTVAL (XEXP (SET_SRC (PATTERN (and_insn)), 1));
2584 if (shift)
2585 mask <<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 1));
2588 if (load_mode == HImode)
2590 rtx addr = XEXP (mem, 0);
2592 if (! (mask & 0xff))
2594 addr = plus_constant (Pmode, addr, 1);
2595 mask >>= 8;
2597 mem = gen_rtx_MEM (QImode, addr);
2600 if (need_extend)
2601 XEXP (cond, 0) = gen_rtx_SIGN_EXTEND (HImode, mem);
2602 else
2603 XEXP (cond, 0) = gen_rtx_AND (and_mode, mem, GEN_INT (mask));
2605 INSN_CODE (insn) = -1;
2606 delete_insn (load);
2608 if (and_insn != insn)
2609 delete_insn (and_insn);
2611 if (shift != NULL_RTX)
2612 delete_insn (shift);
2615 static void
2616 xstormy16_reorg (void)
2618 rtx_insn *insn;
2620 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2622 if (! JUMP_P (insn))
2623 continue;
2624 combine_bnp (insn);
2628 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2630 static bool
2631 xstormy16_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2633 const HOST_WIDE_INT size = int_size_in_bytes (type);
2634 return (size == -1 || size > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS);
2637 #undef TARGET_ASM_ALIGNED_HI_OP
2638 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2639 #undef TARGET_ASM_ALIGNED_SI_OP
2640 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2641 #undef TARGET_ENCODE_SECTION_INFO
2642 #define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
2644 /* Select_section doesn't handle .bss_below100. */
2645 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
2646 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
2648 #undef TARGET_ASM_OUTPUT_MI_THUNK
2649 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2650 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2651 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2653 #undef TARGET_PRINT_OPERAND
2654 #define TARGET_PRINT_OPERAND xstormy16_print_operand
2655 #undef TARGET_PRINT_OPERAND_ADDRESS
2656 #define TARGET_PRINT_OPERAND_ADDRESS xstormy16_print_operand_address
2658 #undef TARGET_MEMORY_MOVE_COST
2659 #define TARGET_MEMORY_MOVE_COST xstormy16_memory_move_cost
2660 #undef TARGET_RTX_COSTS
2661 #define TARGET_RTX_COSTS xstormy16_rtx_costs
2662 #undef TARGET_ADDRESS_COST
2663 #define TARGET_ADDRESS_COST xstormy16_address_cost
2665 #undef TARGET_BUILD_BUILTIN_VA_LIST
2666 #define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
2667 #undef TARGET_EXPAND_BUILTIN_VA_START
2668 #define TARGET_EXPAND_BUILTIN_VA_START xstormy16_expand_builtin_va_start
2669 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
2670 #define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_gimplify_va_arg_expr
2672 #undef TARGET_PROMOTE_FUNCTION_MODE
2673 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
2674 #undef TARGET_PROMOTE_PROTOTYPES
2675 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
2677 #undef TARGET_FUNCTION_ARG
2678 #define TARGET_FUNCTION_ARG xstormy16_function_arg
2679 #undef TARGET_FUNCTION_ARG_ADVANCE
2680 #define TARGET_FUNCTION_ARG_ADVANCE xstormy16_function_arg_advance
2682 #undef TARGET_RETURN_IN_MEMORY
2683 #define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
2684 #undef TARGET_FUNCTION_VALUE
2685 #define TARGET_FUNCTION_VALUE xstormy16_function_value
2686 #undef TARGET_LIBCALL_VALUE
2687 #define TARGET_LIBCALL_VALUE xstormy16_libcall_value
2688 #undef TARGET_FUNCTION_VALUE_REGNO_P
2689 #define TARGET_FUNCTION_VALUE_REGNO_P xstormy16_function_value_regno_p
2691 #undef TARGET_MACHINE_DEPENDENT_REORG
2692 #define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg
2694 #undef TARGET_PREFERRED_RELOAD_CLASS
2695 #define TARGET_PREFERRED_RELOAD_CLASS xstormy16_preferred_reload_class
2696 #undef TARGET_PREFERRED_OUTPUT_RELOAD_CLASS
2697 #define TARGET_PREFERRED_OUTPUT_RELOAD_CLASS xstormy16_preferred_reload_class
2699 #undef TARGET_LEGITIMATE_ADDRESS_P
2700 #define TARGET_LEGITIMATE_ADDRESS_P xstormy16_legitimate_address_p
2701 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
2702 #define TARGET_MODE_DEPENDENT_ADDRESS_P xstormy16_mode_dependent_address_p
2704 #undef TARGET_CAN_ELIMINATE
2705 #define TARGET_CAN_ELIMINATE xstormy16_can_eliminate
2707 #undef TARGET_TRAMPOLINE_INIT
2708 #define TARGET_TRAMPOLINE_INIT xstormy16_trampoline_init
2710 struct gcc_target targetm = TARGET_INITIALIZER;
2712 #include "gt-stormy16.h"