recog_memoized works on an rtx_insn *
[official-gcc.git] / gcc / config / stormy16 / stormy16.c
blobc5f5e1f0936d23b694635a3275993134a1321717
1 /* Xstormy16 target functions.
2 Copyright (C) 1997-2014 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "regs.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-flags.h"
31 #include "output.h"
32 #include "insn-attr.h"
33 #include "flags.h"
34 #include "recog.h"
35 #include "diagnostic-core.h"
36 #include "obstack.h"
37 #include "tree.h"
38 #include "stringpool.h"
39 #include "stor-layout.h"
40 #include "varasm.h"
41 #include "calls.h"
42 #include "expr.h"
43 #include "optabs.h"
44 #include "except.h"
45 #include "function.h"
46 #include "target.h"
47 #include "target-def.h"
48 #include "tm_p.h"
49 #include "langhooks.h"
50 #include "hash-table.h"
51 #include "vec.h"
52 #include "ggc.h"
53 #include "basic-block.h"
54 #include "tree-ssa-alias.h"
55 #include "internal-fn.h"
56 #include "gimple-fold.h"
57 #include "tree-eh.h"
58 #include "gimple-expr.h"
59 #include "is-a.h"
60 #include "gimple.h"
61 #include "gimplify.h"
62 #include "df.h"
63 #include "reload.h"
64 #include "builtins.h"
66 static rtx emit_addhi3_postreload (rtx, rtx, rtx);
67 static void xstormy16_asm_out_constructor (rtx, int);
68 static void xstormy16_asm_out_destructor (rtx, int);
69 static void xstormy16_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
70 HOST_WIDE_INT, tree);
72 static void xstormy16_init_builtins (void);
73 static rtx xstormy16_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
74 static bool xstormy16_rtx_costs (rtx, int, int, int, int *, bool);
75 static int xstormy16_address_cost (rtx, enum machine_mode, addr_space_t, bool);
76 static bool xstormy16_return_in_memory (const_tree, const_tree);
78 static GTY(()) section *bss100_section;
80 /* Compute a (partial) cost for rtx X. Return true if the complete
81 cost has been computed, and false if subexpressions should be
82 scanned. In either case, *TOTAL contains the cost result. */
84 static bool
85 xstormy16_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
86 int opno ATTRIBUTE_UNUSED, int *total,
87 bool speed ATTRIBUTE_UNUSED)
89 switch (code)
91 case CONST_INT:
92 if (INTVAL (x) < 16 && INTVAL (x) >= 0)
93 *total = COSTS_N_INSNS (1) / 2;
94 else if (INTVAL (x) < 256 && INTVAL (x) >= 0)
95 *total = COSTS_N_INSNS (1);
96 else
97 *total = COSTS_N_INSNS (2);
98 return true;
100 case CONST_DOUBLE:
101 case CONST:
102 case SYMBOL_REF:
103 case LABEL_REF:
104 *total = COSTS_N_INSNS (2);
105 return true;
107 case MULT:
108 *total = COSTS_N_INSNS (35 + 6);
109 return true;
110 case DIV:
111 *total = COSTS_N_INSNS (51 - 6);
112 return true;
114 default:
115 return false;
119 static int
120 xstormy16_address_cost (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED,
121 addr_space_t as ATTRIBUTE_UNUSED,
122 bool speed ATTRIBUTE_UNUSED)
124 return (CONST_INT_P (x) ? 2
125 : GET_CODE (x) == PLUS ? 7
126 : 5);
129 /* Worker function for TARGET_MEMORY_MOVE_COST. */
131 static int
132 xstormy16_memory_move_cost (enum machine_mode mode, reg_class_t rclass,
133 bool in)
135 return (5 + memory_move_secondary_cost (mode, rclass, in));
138 /* Branches are handled as follows:
140 1. HImode compare-and-branches. The machine supports these
141 natively, so the appropriate pattern is emitted directly.
143 2. SImode EQ and NE. These are emitted as pairs of HImode
144 compare-and-branches.
146 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
147 of a SImode subtract followed by a branch (not a compare-and-branch),
148 like this:
153 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
158 bne. */
160 /* Emit a branch of kind CODE to location LOC. */
162 void
163 xstormy16_emit_cbranch (enum rtx_code code, rtx op0, rtx op1, rtx loc)
165 rtx condition_rtx, loc_ref, branch, cy_clobber;
166 rtvec vec;
167 enum machine_mode mode;
169 mode = GET_MODE (op0);
170 gcc_assert (mode == HImode || mode == SImode);
172 if (mode == SImode
173 && (code == GT || code == LE || code == GTU || code == LEU))
175 int unsigned_p = (code == GTU || code == LEU);
176 int gt_p = (code == GT || code == GTU);
177 rtx lab = NULL_RTX;
179 if (gt_p)
180 lab = gen_label_rtx ();
181 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, op0, op1, gt_p ? lab : loc);
182 /* This should be generated as a comparison against the temporary
183 created by the previous insn, but reload can't handle that. */
184 xstormy16_emit_cbranch (gt_p ? NE : EQ, op0, op1, loc);
185 if (gt_p)
186 emit_label (lab);
187 return;
189 else if (mode == SImode
190 && (code == NE || code == EQ)
191 && op1 != const0_rtx)
193 rtx op0_word, op1_word;
194 rtx lab = NULL_RTX;
195 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
196 int i;
198 if (code == EQ)
199 lab = gen_label_rtx ();
201 for (i = 0; i < num_words - 1; i++)
203 op0_word = simplify_gen_subreg (word_mode, op0, mode,
204 i * UNITS_PER_WORD);
205 op1_word = simplify_gen_subreg (word_mode, op1, mode,
206 i * UNITS_PER_WORD);
207 xstormy16_emit_cbranch (NE, op0_word, op1_word, code == EQ ? lab : loc);
209 op0_word = simplify_gen_subreg (word_mode, op0, mode,
210 i * UNITS_PER_WORD);
211 op1_word = simplify_gen_subreg (word_mode, op1, mode,
212 i * UNITS_PER_WORD);
213 xstormy16_emit_cbranch (code, op0_word, op1_word, loc);
215 if (code == EQ)
216 emit_label (lab);
217 return;
220 /* We can't allow reload to try to generate any reload after a branch,
221 so when some register must match we must make the temporary ourselves. */
222 if (mode != HImode)
224 rtx tmp;
225 tmp = gen_reg_rtx (mode);
226 emit_move_insn (tmp, op0);
227 op0 = tmp;
230 condition_rtx = gen_rtx_fmt_ee (code, mode, op0, op1);
231 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
232 branch = gen_rtx_SET (VOIDmode, pc_rtx,
233 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
234 loc_ref, pc_rtx));
236 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
238 if (mode == HImode)
239 vec = gen_rtvec (2, branch, cy_clobber);
240 else if (code == NE || code == EQ)
241 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
242 else
244 rtx sub;
245 #if 0
246 sub = gen_rtx_SET (VOIDmode, op0, gen_rtx_MINUS (SImode, op0, op1));
247 #else
248 sub = gen_rtx_CLOBBER (SImode, op0);
249 #endif
250 vec = gen_rtvec (3, branch, sub, cy_clobber);
253 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
256 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
257 the arithmetic operation. Most of the work is done by
258 xstormy16_expand_arith. */
260 void
261 xstormy16_split_cbranch (enum machine_mode mode, rtx label, rtx comparison,
262 rtx dest)
264 rtx op0 = XEXP (comparison, 0);
265 rtx op1 = XEXP (comparison, 1);
266 rtx_insn *seq, *last_insn;
267 rtx compare;
269 start_sequence ();
270 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1);
271 seq = get_insns ();
272 end_sequence ();
274 gcc_assert (INSN_P (seq));
276 last_insn = seq;
277 while (NEXT_INSN (last_insn) != NULL_RTX)
278 last_insn = NEXT_INSN (last_insn);
280 compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0));
281 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
282 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
283 emit_insn (seq);
287 /* Return the string to output a conditional branch to LABEL, which is
288 the operand number of the label.
290 OP is the conditional expression, or NULL for branch-always.
292 REVERSED is nonzero if we should reverse the sense of the comparison.
294 INSN is the insn. */
296 char *
297 xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed, rtx insn)
299 static char string[64];
300 int need_longbranch = (op != NULL_RTX
301 ? get_attr_length (insn) == 8
302 : get_attr_length (insn) == 4);
303 int really_reversed = reversed ^ need_longbranch;
304 const char *ccode;
305 const char *templ;
306 const char *operands;
307 enum rtx_code code;
309 if (! op)
311 if (need_longbranch)
312 ccode = "jmpf";
313 else
314 ccode = "br";
315 sprintf (string, "%s %s", ccode, label);
316 return string;
319 code = GET_CODE (op);
321 if (! REG_P (XEXP (op, 0)))
323 code = swap_condition (code);
324 operands = "%3,%2";
326 else
327 operands = "%2,%3";
329 /* Work out which way this really branches. */
330 if (really_reversed)
331 code = reverse_condition (code);
333 switch (code)
335 case EQ: ccode = "z"; break;
336 case NE: ccode = "nz"; break;
337 case GE: ccode = "ge"; break;
338 case LT: ccode = "lt"; break;
339 case GT: ccode = "gt"; break;
340 case LE: ccode = "le"; break;
341 case GEU: ccode = "nc"; break;
342 case LTU: ccode = "c"; break;
343 case GTU: ccode = "hi"; break;
344 case LEU: ccode = "ls"; break;
346 default:
347 gcc_unreachable ();
350 if (need_longbranch)
351 templ = "b%s %s,.+8 | jmpf %s";
352 else
353 templ = "b%s %s,%s";
354 sprintf (string, templ, ccode, operands, label);
356 return string;
359 /* Return the string to output a conditional branch to LABEL, which is
360 the operand number of the label, but suitable for the tail of a
361 SImode branch.
363 OP is the conditional expression (OP is never NULL_RTX).
365 REVERSED is nonzero if we should reverse the sense of the comparison.
367 INSN is the insn. */
369 char *
370 xstormy16_output_cbranch_si (rtx op, const char *label, int reversed, rtx insn)
372 static char string[64];
373 int need_longbranch = get_attr_length (insn) >= 8;
374 int really_reversed = reversed ^ need_longbranch;
375 const char *ccode;
376 const char *templ;
377 char prevop[16];
378 enum rtx_code code;
380 code = GET_CODE (op);
382 /* Work out which way this really branches. */
383 if (really_reversed)
384 code = reverse_condition (code);
386 switch (code)
388 case EQ: ccode = "z"; break;
389 case NE: ccode = "nz"; break;
390 case GE: ccode = "ge"; break;
391 case LT: ccode = "lt"; break;
392 case GEU: ccode = "nc"; break;
393 case LTU: ccode = "c"; break;
395 /* The missing codes above should never be generated. */
396 default:
397 gcc_unreachable ();
400 switch (code)
402 case EQ: case NE:
404 int regnum;
406 gcc_assert (REG_P (XEXP (op, 0)));
408 regnum = REGNO (XEXP (op, 0));
409 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
411 break;
413 case GE: case LT: case GEU: case LTU:
414 strcpy (prevop, "sbc %2,%3");
415 break;
417 default:
418 gcc_unreachable ();
421 if (need_longbranch)
422 templ = "%s | b%s .+6 | jmpf %s";
423 else
424 templ = "%s | b%s %s";
425 sprintf (string, templ, prevop, ccode, label);
427 return string;
430 /* Many machines have some registers that cannot be copied directly to or from
431 memory or even from other types of registers. An example is the `MQ'
432 register, which on most machines, can only be copied to or from general
433 registers, but not memory. Some machines allow copying all registers to and
434 from memory, but require a scratch register for stores to some memory
435 locations (e.g., those with symbolic address on the RT, and those with
436 certain symbolic address on the SPARC when compiling PIC). In some cases,
437 both an intermediate and a scratch register are required.
439 You should define these macros to indicate to the reload phase that it may
440 need to allocate at least one register for a reload in addition to the
441 register to contain the data. Specifically, if copying X to a register
442 RCLASS in MODE requires an intermediate register, you should define
443 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
444 whose registers can be used as intermediate registers or scratch registers.
446 If copying a register RCLASS in MODE to X requires an intermediate or scratch
447 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
448 largest register class required. If the requirements for input and output
449 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
450 instead of defining both macros identically.
452 The values returned by these macros are often `GENERAL_REGS'. Return
453 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
454 to or from a register of RCLASS in MODE without requiring a scratch register.
455 Do not define this macro if it would always return `NO_REGS'.
457 If a scratch register is required (either with or without an intermediate
458 register), you should define patterns for `reload_inM' or `reload_outM', as
459 required.. These patterns, which will normally be implemented with a
460 `define_expand', should be similar to the `movM' patterns, except that
461 operand 2 is the scratch register.
463 Define constraints for the reload register and scratch register that contain
464 a single register class. If the original reload register (whose class is
465 RCLASS) can meet the constraint given in the pattern, the value returned by
466 these macros is used for the class of the scratch register. Otherwise, two
467 additional reload registers are required. Their classes are obtained from
468 the constraints in the insn pattern.
470 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
471 either be in a hard register or in memory. Use `true_regnum' to find out;
472 it will return -1 if the pseudo is in memory and the hard register number if
473 it is in a register.
475 These macros should not be used in the case where a particular class of
476 registers can only be copied to memory and not to another class of
477 registers. In that case, secondary reload registers are not needed and
478 would not be helpful. Instead, a stack location must be used to perform the
479 copy and the `movM' pattern should use memory as an intermediate storage.
480 This case often occurs between floating-point and general registers. */
482 enum reg_class
483 xstormy16_secondary_reload_class (enum reg_class rclass,
484 enum machine_mode mode ATTRIBUTE_UNUSED,
485 rtx x)
487 /* This chip has the interesting property that only the first eight
488 registers can be moved to/from memory. */
489 if ((MEM_P (x)
490 || ((GET_CODE (x) == SUBREG || REG_P (x))
491 && (true_regnum (x) == -1
492 || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
493 && ! reg_class_subset_p (rclass, EIGHT_REGS))
494 return EIGHT_REGS;
496 return NO_REGS;
499 /* Worker function for TARGET_PREFERRED_RELOAD_CLASS
500 and TARGET_PREFERRED_OUTPUT_RELOAD_CLASS. */
502 static reg_class_t
503 xstormy16_preferred_reload_class (rtx x, reg_class_t rclass)
505 if (rclass == GENERAL_REGS && MEM_P (x))
506 return EIGHT_REGS;
508 return rclass;
511 /* Predicate for symbols and addresses that reflect special 8-bit
512 addressing. */
515 xstormy16_below100_symbol (rtx x,
516 enum machine_mode mode ATTRIBUTE_UNUSED)
518 if (GET_CODE (x) == CONST)
519 x = XEXP (x, 0);
520 if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
521 x = XEXP (x, 0);
523 if (GET_CODE (x) == SYMBOL_REF)
524 return (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_XSTORMY16_BELOW100) != 0;
526 if (CONST_INT_P (x))
528 HOST_WIDE_INT i = INTVAL (x);
530 if ((i >= 0x0000 && i <= 0x00ff)
531 || (i >= 0x7f00 && i <= 0x7fff))
532 return 1;
534 return 0;
537 /* Likewise, but only for non-volatile MEMs, for patterns where the
538 MEM will get split into smaller sized accesses. */
541 xstormy16_splittable_below100_operand (rtx x, enum machine_mode mode)
543 if (MEM_P (x) && MEM_VOLATILE_P (x))
544 return 0;
545 return xstormy16_below100_operand (x, mode);
548 /* Expand an 8-bit IOR. This either detects the one case we can
549 actually do, or uses a 16-bit IOR. */
551 void
552 xstormy16_expand_iorqi3 (rtx *operands)
554 rtx in, out, outsub, val;
556 out = operands[0];
557 in = operands[1];
558 val = operands[2];
560 if (xstormy16_onebit_set_operand (val, QImode))
562 if (!xstormy16_below100_or_register (in, QImode))
563 in = copy_to_mode_reg (QImode, in);
564 if (!xstormy16_below100_or_register (out, QImode))
565 out = gen_reg_rtx (QImode);
566 emit_insn (gen_iorqi3_internal (out, in, val));
567 if (out != operands[0])
568 emit_move_insn (operands[0], out);
569 return;
572 if (! REG_P (in))
573 in = copy_to_mode_reg (QImode, in);
575 if (! REG_P (val) && ! CONST_INT_P (val))
576 val = copy_to_mode_reg (QImode, val);
578 if (! REG_P (out))
579 out = gen_reg_rtx (QImode);
581 in = simplify_gen_subreg (HImode, in, QImode, 0);
582 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
584 if (! CONST_INT_P (val))
585 val = simplify_gen_subreg (HImode, val, QImode, 0);
587 emit_insn (gen_iorhi3 (outsub, in, val));
589 if (out != operands[0])
590 emit_move_insn (operands[0], out);
593 /* Expand an 8-bit AND. This either detects the one case we can
594 actually do, or uses a 16-bit AND. */
596 void
597 xstormy16_expand_andqi3 (rtx *operands)
599 rtx in, out, outsub, val;
601 out = operands[0];
602 in = operands[1];
603 val = operands[2];
605 if (xstormy16_onebit_clr_operand (val, QImode))
607 if (!xstormy16_below100_or_register (in, QImode))
608 in = copy_to_mode_reg (QImode, in);
609 if (!xstormy16_below100_or_register (out, QImode))
610 out = gen_reg_rtx (QImode);
611 emit_insn (gen_andqi3_internal (out, in, val));
612 if (out != operands[0])
613 emit_move_insn (operands[0], out);
614 return;
617 if (! REG_P (in))
618 in = copy_to_mode_reg (QImode, in);
620 if (! REG_P (val) && ! CONST_INT_P (val))
621 val = copy_to_mode_reg (QImode, val);
623 if (! REG_P (out))
624 out = gen_reg_rtx (QImode);
626 in = simplify_gen_subreg (HImode, in, QImode, 0);
627 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
629 if (! CONST_INT_P (val))
630 val = simplify_gen_subreg (HImode, val, QImode, 0);
632 emit_insn (gen_andhi3 (outsub, in, val));
634 if (out != operands[0])
635 emit_move_insn (operands[0], out);
638 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
639 (CONST_INT_P (X) \
640 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
642 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
643 (CONST_INT_P (X) \
644 && INTVAL (X) + (OFFSET) >= 0 \
645 && INTVAL (X) + (OFFSET) < 0x8000 \
646 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
648 bool
649 xstormy16_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
650 rtx x, bool strict)
652 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
653 return true;
655 if (GET_CODE (x) == PLUS
656 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
658 x = XEXP (x, 0);
659 /* PR 31232: Do not allow INT+INT as an address. */
660 if (CONST_INT_P (x))
661 return false;
664 if ((GET_CODE (x) == PRE_MODIFY && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
665 || GET_CODE (x) == POST_INC
666 || GET_CODE (x) == PRE_DEC)
667 x = XEXP (x, 0);
669 if (REG_P (x)
670 && REGNO_OK_FOR_BASE_P (REGNO (x))
671 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
672 return true;
674 if (xstormy16_below100_symbol (x, mode))
675 return true;
677 return false;
680 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P.
682 On this chip, this is true if the address is valid with an offset
683 of 0 but not of 6, because in that case it cannot be used as an
684 address for DImode or DFmode, or if the address is a post-increment
685 or pre-decrement address. */
687 static bool
688 xstormy16_mode_dependent_address_p (const_rtx x,
689 addr_space_t as ATTRIBUTE_UNUSED)
691 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
692 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
693 return true;
695 if (GET_CODE (x) == PLUS
696 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
697 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
698 return true;
700 /* Auto-increment addresses are now treated generically in recog.c. */
701 return false;
705 short_memory_operand (rtx x, enum machine_mode mode)
707 if (! memory_operand (x, mode))
708 return 0;
709 return (GET_CODE (XEXP (x, 0)) != PLUS);
712 /* Splitter for the 'move' patterns, for modes not directly implemented
713 by hardware. Emit insns to copy a value of mode MODE from SRC to
714 DEST.
716 This function is only called when reload_completed. */
718 void
719 xstormy16_split_move (enum machine_mode mode, rtx dest, rtx src)
721 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
722 int direction, end, i;
723 int src_modifies = 0;
724 int dest_modifies = 0;
725 int src_volatile = 0;
726 int dest_volatile = 0;
727 rtx mem_operand;
728 rtx auto_inc_reg_rtx = NULL_RTX;
730 /* Check initial conditions. */
731 gcc_assert (reload_completed
732 && mode != QImode && mode != HImode
733 && nonimmediate_operand (dest, mode)
734 && general_operand (src, mode));
736 /* This case is not supported below, and shouldn't be generated. */
737 gcc_assert (! MEM_P (dest) || ! MEM_P (src));
739 /* This case is very very bad after reload, so trap it now. */
740 gcc_assert (GET_CODE (dest) != SUBREG && GET_CODE (src) != SUBREG);
742 /* The general idea is to copy by words, offsetting the source and
743 destination. Normally the least-significant word will be copied
744 first, but for pre-dec operations it's better to copy the
745 most-significant word first. Only one operand can be a pre-dec
746 or post-inc operand.
748 It's also possible that the copy overlaps so that the direction
749 must be reversed. */
750 direction = 1;
752 if (MEM_P (dest))
754 mem_operand = XEXP (dest, 0);
755 dest_modifies = side_effects_p (mem_operand);
756 if (auto_inc_p (mem_operand))
757 auto_inc_reg_rtx = XEXP (mem_operand, 0);
758 dest_volatile = MEM_VOLATILE_P (dest);
759 if (dest_volatile)
761 dest = copy_rtx (dest);
762 MEM_VOLATILE_P (dest) = 0;
765 else if (MEM_P (src))
767 mem_operand = XEXP (src, 0);
768 src_modifies = side_effects_p (mem_operand);
769 if (auto_inc_p (mem_operand))
770 auto_inc_reg_rtx = XEXP (mem_operand, 0);
771 src_volatile = MEM_VOLATILE_P (src);
772 if (src_volatile)
774 src = copy_rtx (src);
775 MEM_VOLATILE_P (src) = 0;
778 else
779 mem_operand = NULL_RTX;
781 if (mem_operand == NULL_RTX)
783 if (REG_P (src)
784 && REG_P (dest)
785 && reg_overlap_mentioned_p (dest, src)
786 && REGNO (dest) > REGNO (src))
787 direction = -1;
789 else if (GET_CODE (mem_operand) == PRE_DEC
790 || (GET_CODE (mem_operand) == PLUS
791 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
792 direction = -1;
793 else if (MEM_P (src) && reg_overlap_mentioned_p (dest, src))
795 int regno;
797 gcc_assert (REG_P (dest));
798 regno = REGNO (dest);
800 gcc_assert (refers_to_regno_p (regno, regno + num_words,
801 mem_operand, 0));
803 if (refers_to_regno_p (regno, regno + 1, mem_operand, 0))
804 direction = -1;
805 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
806 mem_operand, 0))
807 direction = 1;
808 else
809 /* This means something like
810 (set (reg:DI r0) (mem:DI (reg:HI r1)))
811 which we'd need to support by doing the set of the second word
812 last. */
813 gcc_unreachable ();
816 end = direction < 0 ? -1 : num_words;
817 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
819 rtx w_src, w_dest, insn;
821 if (src_modifies)
822 w_src = gen_rtx_MEM (word_mode, mem_operand);
823 else
824 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
825 if (src_volatile)
826 MEM_VOLATILE_P (w_src) = 1;
827 if (dest_modifies)
828 w_dest = gen_rtx_MEM (word_mode, mem_operand);
829 else
830 w_dest = simplify_gen_subreg (word_mode, dest, mode,
831 i * UNITS_PER_WORD);
832 if (dest_volatile)
833 MEM_VOLATILE_P (w_dest) = 1;
835 /* The simplify_subreg calls must always be able to simplify. */
836 gcc_assert (GET_CODE (w_src) != SUBREG
837 && GET_CODE (w_dest) != SUBREG);
839 insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src));
840 if (auto_inc_reg_rtx)
841 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
842 auto_inc_reg_rtx,
843 REG_NOTES (insn));
847 /* Expander for the 'move' patterns. Emit insns to copy a value of
848 mode MODE from SRC to DEST. */
850 void
851 xstormy16_expand_move (enum machine_mode mode, rtx dest, rtx src)
853 if (MEM_P (dest) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY))
855 rtx pmv = XEXP (dest, 0);
856 rtx dest_reg = XEXP (pmv, 0);
857 rtx dest_mod = XEXP (pmv, 1);
858 rtx set = gen_rtx_SET (Pmode, dest_reg, dest_mod);
859 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
861 dest = gen_rtx_MEM (mode, dest_reg);
862 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
864 else if (MEM_P (src) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY))
866 rtx pmv = XEXP (src, 0);
867 rtx src_reg = XEXP (pmv, 0);
868 rtx src_mod = XEXP (pmv, 1);
869 rtx set = gen_rtx_SET (Pmode, src_reg, src_mod);
870 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
872 src = gen_rtx_MEM (mode, src_reg);
873 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
876 /* There are only limited immediate-to-memory move instructions. */
877 if (! reload_in_progress
878 && ! reload_completed
879 && MEM_P (dest)
880 && (! CONST_INT_P (XEXP (dest, 0))
881 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
882 && ! xstormy16_below100_operand (dest, mode)
883 && ! REG_P (src)
884 && GET_CODE (src) != SUBREG)
885 src = copy_to_mode_reg (mode, src);
887 /* Don't emit something we would immediately split. */
888 if (reload_completed
889 && mode != HImode && mode != QImode)
891 xstormy16_split_move (mode, dest, src);
892 return;
895 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
898 /* Stack Layout:
900 The stack is laid out as follows:
902 SP->
903 FP-> Local variables
904 Register save area (up to 4 words)
905 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
907 AP-> Return address (two words)
908 9th procedure parameter word
909 10th procedure parameter word
911 last procedure parameter word
913 The frame pointer location is tuned to make it most likely that all
914 parameters and local variables can be accessed using a load-indexed
915 instruction. */
917 /* A structure to describe the layout. */
918 struct xstormy16_stack_layout
920 /* Size of the topmost three items on the stack. */
921 int locals_size;
922 int register_save_size;
923 int stdarg_save_size;
924 /* Sum of the above items. */
925 int frame_size;
926 /* Various offsets. */
927 int first_local_minus_ap;
928 int sp_minus_fp;
929 int fp_minus_ap;
932 /* Does REGNO need to be saved? */
933 #define REG_NEEDS_SAVE(REGNUM, IFUN) \
934 ((df_regs_ever_live_p (REGNUM) && ! call_used_regs[REGNUM]) \
935 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
936 && (REGNUM != CARRY_REGNUM) \
937 && (df_regs_ever_live_p (REGNUM) || ! crtl->is_leaf)))
939 /* Compute the stack layout. */
941 struct xstormy16_stack_layout
942 xstormy16_compute_stack_layout (void)
944 struct xstormy16_stack_layout layout;
945 int regno;
946 const int ifun = xstormy16_interrupt_function_p ();
948 layout.locals_size = get_frame_size ();
950 layout.register_save_size = 0;
951 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
952 if (REG_NEEDS_SAVE (regno, ifun))
953 layout.register_save_size += UNITS_PER_WORD;
955 if (cfun->stdarg)
956 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
957 else
958 layout.stdarg_save_size = 0;
960 layout.frame_size = (layout.locals_size
961 + layout.register_save_size
962 + layout.stdarg_save_size);
964 if (crtl->args.size <= 2048 && crtl->args.size != -1)
966 if (layout.frame_size - INCOMING_FRAME_SP_OFFSET
967 + crtl->args.size <= 2048)
968 layout.fp_minus_ap = layout.frame_size - INCOMING_FRAME_SP_OFFSET;
969 else
970 layout.fp_minus_ap = 2048 - crtl->args.size;
972 else
973 layout.fp_minus_ap = (layout.stdarg_save_size
974 + layout.register_save_size
975 - INCOMING_FRAME_SP_OFFSET);
976 layout.sp_minus_fp = (layout.frame_size - INCOMING_FRAME_SP_OFFSET
977 - layout.fp_minus_ap);
978 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
979 return layout;
982 /* Worker function for TARGET_CAN_ELIMINATE. */
984 static bool
985 xstormy16_can_eliminate (const int from, const int to)
987 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
988 ? ! frame_pointer_needed
989 : true);
992 /* Determine how all the special registers get eliminated. */
995 xstormy16_initial_elimination_offset (int from, int to)
997 struct xstormy16_stack_layout layout;
998 int result;
1000 layout = xstormy16_compute_stack_layout ();
1002 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1003 result = layout.sp_minus_fp - layout.locals_size;
1004 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1005 result = - layout.locals_size;
1006 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1007 result = - layout.fp_minus_ap;
1008 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1009 result = - (layout.sp_minus_fp + layout.fp_minus_ap);
1010 else
1011 gcc_unreachable ();
1013 return result;
1016 static rtx
1017 emit_addhi3_postreload (rtx dest, rtx src0, rtx src1)
1019 rtx set, clobber, insn;
1021 set = gen_rtx_SET (VOIDmode, dest, gen_rtx_PLUS (HImode, src0, src1));
1022 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
1023 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
1024 return insn;
1027 /* Called after register allocation to add any instructions needed for
1028 the prologue. Using a prologue insn is favored compared to putting
1029 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1030 since it allows the scheduler to intermix instructions with the
1031 saves of the caller saved registers. In some cases, it might be
1032 necessary to emit a barrier instruction as the last insn to prevent
1033 such scheduling.
1035 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1036 so that the debug info generation code can handle them properly. */
1038 void
1039 xstormy16_expand_prologue (void)
1041 struct xstormy16_stack_layout layout;
1042 int regno;
1043 rtx insn;
1044 rtx mem_push_rtx;
1045 const int ifun = xstormy16_interrupt_function_p ();
1047 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
1048 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
1050 layout = xstormy16_compute_stack_layout ();
1052 if (layout.locals_size >= 32768)
1053 error ("local variable memory requirements exceed capacity");
1055 if (flag_stack_usage_info)
1056 current_function_static_stack_size = layout.frame_size;
1058 /* Save the argument registers if necessary. */
1059 if (layout.stdarg_save_size)
1060 for (regno = FIRST_ARGUMENT_REGISTER;
1061 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
1062 regno++)
1064 rtx dwarf;
1065 rtx reg = gen_rtx_REG (HImode, regno);
1067 insn = emit_move_insn (mem_push_rtx, reg);
1068 RTX_FRAME_RELATED_P (insn) = 1;
1070 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1072 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1073 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1074 reg);
1075 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1076 plus_constant (Pmode,
1077 stack_pointer_rtx,
1078 GET_MODE_SIZE (Pmode)));
1079 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
1080 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1081 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1084 /* Push each of the registers to save. */
1085 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1086 if (REG_NEEDS_SAVE (regno, ifun))
1088 rtx dwarf;
1089 rtx reg = gen_rtx_REG (HImode, regno);
1091 insn = emit_move_insn (mem_push_rtx, reg);
1092 RTX_FRAME_RELATED_P (insn) = 1;
1094 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1096 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1097 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1098 reg);
1099 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1100 plus_constant (Pmode,
1101 stack_pointer_rtx,
1102 GET_MODE_SIZE (Pmode)));
1103 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
1104 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1105 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1108 /* It's just possible that the SP here might be what we need for
1109 the new FP... */
1110 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1112 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1113 RTX_FRAME_RELATED_P (insn) = 1;
1116 /* Allocate space for local variables. */
1117 if (layout.locals_size)
1119 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1120 GEN_INT (layout.locals_size));
1121 RTX_FRAME_RELATED_P (insn) = 1;
1124 /* Set up the frame pointer, if required. */
1125 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
1127 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1128 RTX_FRAME_RELATED_P (insn) = 1;
1130 if (layout.sp_minus_fp)
1132 insn = emit_addhi3_postreload (hard_frame_pointer_rtx,
1133 hard_frame_pointer_rtx,
1134 GEN_INT (- layout.sp_minus_fp));
1135 RTX_FRAME_RELATED_P (insn) = 1;
1140 /* Do we need an epilogue at all? */
1143 direct_return (void)
1145 return (reload_completed
1146 && xstormy16_compute_stack_layout ().frame_size == 0
1147 && ! xstormy16_interrupt_function_p ());
1150 /* Called after register allocation to add any instructions needed for
1151 the epilogue. Using an epilogue insn is favored compared to putting
1152 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1153 since it allows the scheduler to intermix instructions with the
1154 saves of the caller saved registers. In some cases, it might be
1155 necessary to emit a barrier instruction as the last insn to prevent
1156 such scheduling. */
1158 void
1159 xstormy16_expand_epilogue (void)
1161 struct xstormy16_stack_layout layout;
1162 rtx mem_pop_rtx;
1163 int regno;
1164 const int ifun = xstormy16_interrupt_function_p ();
1166 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1167 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
1169 layout = xstormy16_compute_stack_layout ();
1171 /* Pop the stack for the locals. */
1172 if (layout.locals_size)
1174 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1175 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1176 else
1177 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1178 GEN_INT (- layout.locals_size));
1181 /* Restore any call-saved registers. */
1182 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1183 if (REG_NEEDS_SAVE (regno, ifun))
1184 emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
1186 /* Pop the stack for the stdarg save area. */
1187 if (layout.stdarg_save_size)
1188 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1189 GEN_INT (- layout.stdarg_save_size));
1191 /* Return. */
1192 if (ifun)
1193 emit_jump_insn (gen_return_internal_interrupt ());
1194 else
1195 emit_jump_insn (gen_return_internal ());
1199 xstormy16_epilogue_uses (int regno)
1201 if (reload_completed && call_used_regs[regno])
1203 const int ifun = xstormy16_interrupt_function_p ();
1204 return REG_NEEDS_SAVE (regno, ifun);
1206 return 0;
1209 void
1210 xstormy16_function_profiler (void)
1212 sorry ("function_profiler support");
1215 /* Update CUM to advance past an argument in the argument list. The
1216 values MODE, TYPE and NAMED describe that argument. Once this is
1217 done, the variable CUM is suitable for analyzing the *following*
1218 argument with `TARGET_FUNCTION_ARG', etc.
1220 This function need not do anything if the argument in question was
1221 passed on the stack. The compiler knows how to track the amount of
1222 stack space used for arguments without any special help. However,
1223 it makes life easier for xstormy16_build_va_list if it does update
1224 the word count. */
1226 static void
1227 xstormy16_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
1228 const_tree type, bool named ATTRIBUTE_UNUSED)
1230 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1232 /* If an argument would otherwise be passed partially in registers,
1233 and partially on the stack, the whole of it is passed on the
1234 stack. */
1235 if (*cum < NUM_ARGUMENT_REGISTERS
1236 && *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1237 *cum = NUM_ARGUMENT_REGISTERS;
1239 *cum += XSTORMY16_WORD_SIZE (type, mode);
1242 static rtx
1243 xstormy16_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
1244 const_tree type, bool named ATTRIBUTE_UNUSED)
1246 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1248 if (mode == VOIDmode)
1249 return const0_rtx;
1250 if (targetm.calls.must_pass_in_stack (mode, type)
1251 || *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1252 return NULL_RTX;
1253 return gen_rtx_REG (mode, *cum + FIRST_ARGUMENT_REGISTER);
1256 /* Build the va_list type.
1258 For this chip, va_list is a record containing a counter and a pointer.
1259 The counter is of type 'int' and indicates how many bytes
1260 have been used to date. The pointer indicates the stack position
1261 for arguments that have not been passed in registers.
1262 To keep the layout nice, the pointer is first in the structure. */
1264 static tree
1265 xstormy16_build_builtin_va_list (void)
1267 tree f_1, f_2, record, type_decl;
1269 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
1270 type_decl = build_decl (BUILTINS_LOCATION,
1271 TYPE_DECL, get_identifier ("__va_list_tag"), record);
1273 f_1 = build_decl (BUILTINS_LOCATION,
1274 FIELD_DECL, get_identifier ("base"),
1275 ptr_type_node);
1276 f_2 = build_decl (BUILTINS_LOCATION,
1277 FIELD_DECL, get_identifier ("count"),
1278 unsigned_type_node);
1280 DECL_FIELD_CONTEXT (f_1) = record;
1281 DECL_FIELD_CONTEXT (f_2) = record;
1283 TYPE_STUB_DECL (record) = type_decl;
1284 TYPE_NAME (record) = type_decl;
1285 TYPE_FIELDS (record) = f_1;
1286 DECL_CHAIN (f_1) = f_2;
1288 layout_type (record);
1290 return record;
1293 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this
1294 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1295 variable to initialize. NEXTARG is the machine independent notion of the
1296 'next' argument after the variable arguments. */
1298 static void
1299 xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
1301 tree f_base, f_count;
1302 tree base, count;
1303 tree t,u;
1305 if (xstormy16_interrupt_function_p ())
1306 error ("cannot use va_start in interrupt function");
1308 f_base = TYPE_FIELDS (va_list_type_node);
1309 f_count = DECL_CHAIN (f_base);
1311 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1312 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1313 NULL_TREE);
1315 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
1316 u = build_int_cst (NULL_TREE, - INCOMING_FRAME_SP_OFFSET);
1317 u = fold_convert (TREE_TYPE (count), u);
1318 t = fold_build_pointer_plus (t, u);
1319 t = build2 (MODIFY_EXPR, TREE_TYPE (base), base, t);
1320 TREE_SIDE_EFFECTS (t) = 1;
1321 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1323 t = build2 (MODIFY_EXPR, TREE_TYPE (count), count,
1324 build_int_cst (NULL_TREE,
1325 crtl->args.info * UNITS_PER_WORD));
1326 TREE_SIDE_EFFECTS (t) = 1;
1327 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1330 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable
1331 of type va_list as a tree, TYPE is the type passed to va_arg.
1332 Note: This algorithm is documented in stormy-abi. */
1334 static tree
1335 xstormy16_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
1336 gimple_seq *post_p ATTRIBUTE_UNUSED)
1338 tree f_base, f_count;
1339 tree base, count;
1340 tree count_tmp, addr, t;
1341 tree lab_gotaddr, lab_fromstack;
1342 int size, size_of_reg_args, must_stack;
1343 tree size_tree;
1345 f_base = TYPE_FIELDS (va_list_type_node);
1346 f_count = DECL_CHAIN (f_base);
1348 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1349 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1350 NULL_TREE);
1352 must_stack = targetm.calls.must_pass_in_stack (TYPE_MODE (type), type);
1353 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
1354 gimplify_expr (&size_tree, pre_p, NULL, is_gimple_val, fb_rvalue);
1356 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1358 count_tmp = get_initialized_tmp_var (count, pre_p, NULL);
1359 lab_gotaddr = create_artificial_label (UNKNOWN_LOCATION);
1360 lab_fromstack = create_artificial_label (UNKNOWN_LOCATION);
1361 addr = create_tmp_var (ptr_type_node, NULL);
1363 if (!must_stack)
1365 tree r;
1367 t = fold_convert (TREE_TYPE (count), size_tree);
1368 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1369 r = fold_convert (TREE_TYPE (count), size_int (size_of_reg_args));
1370 t = build2 (GT_EXPR, boolean_type_node, t, r);
1371 t = build3 (COND_EXPR, void_type_node, t,
1372 build1 (GOTO_EXPR, void_type_node, lab_fromstack),
1373 NULL_TREE);
1374 gimplify_and_add (t, pre_p);
1376 t = fold_build_pointer_plus (base, count_tmp);
1377 gimplify_assign (addr, t, pre_p);
1379 t = build1 (GOTO_EXPR, void_type_node, lab_gotaddr);
1380 gimplify_and_add (t, pre_p);
1382 t = build1 (LABEL_EXPR, void_type_node, lab_fromstack);
1383 gimplify_and_add (t, pre_p);
1386 /* Arguments larger than a word might need to skip over some
1387 registers, since arguments are either passed entirely in
1388 registers or entirely on the stack. */
1389 size = PUSH_ROUNDING (int_size_in_bytes (type));
1390 if (size > 2 || size < 0 || must_stack)
1392 tree r, u;
1394 r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD);
1395 u = build2 (MODIFY_EXPR, TREE_TYPE (count_tmp), count_tmp, r);
1397 t = fold_convert (TREE_TYPE (count), r);
1398 t = build2 (GE_EXPR, boolean_type_node, count_tmp, t);
1399 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, u);
1400 gimplify_and_add (t, pre_p);
1403 t = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
1404 + INCOMING_FRAME_SP_OFFSET);
1405 t = fold_convert (TREE_TYPE (count), t);
1406 t = build2 (MINUS_EXPR, TREE_TYPE (count), count_tmp, t);
1407 t = build2 (PLUS_EXPR, TREE_TYPE (count), t,
1408 fold_convert (TREE_TYPE (count), size_tree));
1409 t = fold_convert (TREE_TYPE (t), fold (t));
1410 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1411 t = fold_build_pointer_plus (base, t);
1412 gimplify_assign (addr, t, pre_p);
1414 t = build1 (LABEL_EXPR, void_type_node, lab_gotaddr);
1415 gimplify_and_add (t, pre_p);
1417 t = fold_convert (TREE_TYPE (count), size_tree);
1418 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1419 gimplify_assign (count, t, pre_p);
1421 addr = fold_convert (build_pointer_type (type), addr);
1422 return build_va_arg_indirect_ref (addr);
1425 /* Worker function for TARGET_TRAMPOLINE_INIT. */
1427 static void
1428 xstormy16_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
1430 rtx temp = gen_reg_rtx (HImode);
1431 rtx reg_fnaddr = gen_reg_rtx (HImode);
1432 rtx reg_addr, reg_addr_mem;
1434 reg_addr = copy_to_reg (XEXP (m_tramp, 0));
1435 reg_addr_mem = adjust_automodify_address (m_tramp, HImode, reg_addr, 0);
1437 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1438 emit_move_insn (reg_addr_mem, temp);
1439 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1440 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1442 emit_move_insn (temp, static_chain);
1443 emit_move_insn (reg_addr_mem, temp);
1444 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1445 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1447 emit_move_insn (reg_fnaddr, XEXP (DECL_RTL (fndecl), 0));
1448 emit_move_insn (temp, reg_fnaddr);
1449 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1450 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1451 emit_move_insn (reg_addr_mem, temp);
1452 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1453 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1455 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1456 emit_move_insn (reg_addr_mem, reg_fnaddr);
1459 /* Worker function for TARGET_FUNCTION_VALUE. */
1461 static rtx
1462 xstormy16_function_value (const_tree valtype,
1463 const_tree func ATTRIBUTE_UNUSED,
1464 bool outgoing ATTRIBUTE_UNUSED)
1466 enum machine_mode mode;
1467 mode = TYPE_MODE (valtype);
1468 PROMOTE_MODE (mode, 0, valtype);
1469 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1472 /* Worker function for TARGET_LIBCALL_VALUE. */
1474 static rtx
1475 xstormy16_libcall_value (enum machine_mode mode,
1476 const_rtx fun ATTRIBUTE_UNUSED)
1478 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1481 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
1483 static bool
1484 xstormy16_function_value_regno_p (const unsigned int regno)
1486 return (regno == RETURN_VALUE_REGNUM);
1489 /* A C compound statement that outputs the assembler code for a thunk function,
1490 used to implement C++ virtual function calls with multiple inheritance. The
1491 thunk acts as a wrapper around a virtual function, adjusting the implicit
1492 object parameter before handing control off to the real function.
1494 First, emit code to add the integer DELTA to the location that contains the
1495 incoming first argument. Assume that this argument contains a pointer, and
1496 is the one used to pass the `this' pointer in C++. This is the incoming
1497 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1498 addition must preserve the values of all other incoming arguments.
1500 After the addition, emit code to jump to FUNCTION, which is a
1501 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1502 the return address. Hence returning from FUNCTION will return to whoever
1503 called the current `thunk'.
1505 The effect must be as if @var{function} had been called directly
1506 with the adjusted first argument. This macro is responsible for
1507 emitting all of the code for a thunk function;
1508 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1509 not invoked.
1511 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1512 extracted from it.) It might possibly be useful on some targets, but
1513 probably not. */
1515 static void
1516 xstormy16_asm_output_mi_thunk (FILE *file,
1517 tree thunk_fndecl ATTRIBUTE_UNUSED,
1518 HOST_WIDE_INT delta,
1519 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1520 tree function)
1522 int regnum = FIRST_ARGUMENT_REGISTER;
1524 /* There might be a hidden first argument for a returned structure. */
1525 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
1526 regnum += 1;
1528 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF);
1529 fputs ("\tjmpf ", file);
1530 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1531 putc ('\n', file);
1534 /* The purpose of this function is to override the default behavior of
1535 BSS objects. Normally, they go into .bss or .sbss via ".common"
1536 directives, but we need to override that and put them in
1537 .bss_below100. We can't just use a section override (like we do
1538 for .data_below100), because that makes them initialized rather
1539 than uninitialized. */
1541 void
1542 xstormy16_asm_output_aligned_common (FILE *stream,
1543 tree decl,
1544 const char *name,
1545 int size,
1546 int align,
1547 int global)
1549 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
1550 rtx symbol;
1552 if (mem != NULL_RTX
1553 && MEM_P (mem)
1554 && GET_CODE (symbol = XEXP (mem, 0)) == SYMBOL_REF
1555 && SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_XSTORMY16_BELOW100)
1557 const char *name2;
1558 int p2align = 0;
1560 switch_to_section (bss100_section);
1562 while (align > 8)
1564 align /= 2;
1565 p2align ++;
1568 name2 = default_strip_name_encoding (name);
1569 if (global)
1570 fprintf (stream, "\t.globl\t%s\n", name2);
1571 if (p2align)
1572 fprintf (stream, "\t.p2align %d\n", p2align);
1573 fprintf (stream, "\t.type\t%s, @object\n", name2);
1574 fprintf (stream, "\t.size\t%s, %d\n", name2, size);
1575 fprintf (stream, "%s:\n\t.space\t%d\n", name2, size);
1576 return;
1579 if (!global)
1581 fprintf (stream, "\t.local\t");
1582 assemble_name (stream, name);
1583 fprintf (stream, "\n");
1585 fprintf (stream, "\t.comm\t");
1586 assemble_name (stream, name);
1587 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
1590 /* Implement TARGET_ASM_INIT_SECTIONS. */
1592 static void
1593 xstormy16_asm_init_sections (void)
1595 bss100_section
1596 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
1597 output_section_asm_op,
1598 "\t.section \".bss_below100\",\"aw\",@nobits");
1601 /* Mark symbols with the "below100" attribute so that we can use the
1602 special addressing modes for them. */
1604 static void
1605 xstormy16_encode_section_info (tree decl, rtx r, int first)
1607 default_encode_section_info (decl, r, first);
1609 if (TREE_CODE (decl) == VAR_DECL
1610 && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl))
1611 || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl))))
1613 rtx symbol = XEXP (r, 0);
1615 gcc_assert (GET_CODE (symbol) == SYMBOL_REF);
1616 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_XSTORMY16_BELOW100;
1620 #undef TARGET_ASM_CONSTRUCTOR
1621 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1622 #undef TARGET_ASM_DESTRUCTOR
1623 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1625 /* Output constructors and destructors. Just like
1626 default_named_section_asm_out_* but don't set the sections writable. */
1628 static void
1629 xstormy16_asm_out_destructor (rtx symbol, int priority)
1631 const char *section = ".dtors";
1632 char buf[16];
1634 /* ??? This only works reliably with the GNU linker. */
1635 if (priority != DEFAULT_INIT_PRIORITY)
1637 sprintf (buf, ".dtors.%.5u",
1638 /* Invert the numbering so the linker puts us in the proper
1639 order; constructors are run from right to left, and the
1640 linker sorts in increasing order. */
1641 MAX_INIT_PRIORITY - priority);
1642 section = buf;
1645 switch_to_section (get_section (section, 0, NULL));
1646 assemble_align (POINTER_SIZE);
1647 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1650 static void
1651 xstormy16_asm_out_constructor (rtx symbol, int priority)
1653 const char *section = ".ctors";
1654 char buf[16];
1656 /* ??? This only works reliably with the GNU linker. */
1657 if (priority != DEFAULT_INIT_PRIORITY)
1659 sprintf (buf, ".ctors.%.5u",
1660 /* Invert the numbering so the linker puts us in the proper
1661 order; constructors are run from right to left, and the
1662 linker sorts in increasing order. */
1663 MAX_INIT_PRIORITY - priority);
1664 section = buf;
1667 switch_to_section (get_section (section, 0, NULL));
1668 assemble_align (POINTER_SIZE);
1669 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1672 /* Worker function for TARGET_PRINT_OPERAND_ADDRESS.
1674 Print a memory address as an operand to reference that memory location. */
1676 static void
1677 xstormy16_print_operand_address (FILE *file, rtx address)
1679 HOST_WIDE_INT offset;
1680 int pre_dec, post_inc;
1682 /* There are a few easy cases. */
1683 if (CONST_INT_P (address))
1685 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1686 return;
1689 if (CONSTANT_P (address) || LABEL_P (address))
1691 output_addr_const (file, address);
1692 return;
1695 /* Otherwise, it's hopefully something of the form
1696 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...)). */
1697 if (GET_CODE (address) == PLUS)
1699 gcc_assert (CONST_INT_P (XEXP (address, 1)));
1700 offset = INTVAL (XEXP (address, 1));
1701 address = XEXP (address, 0);
1703 else
1704 offset = 0;
1706 pre_dec = (GET_CODE (address) == PRE_DEC);
1707 post_inc = (GET_CODE (address) == POST_INC);
1708 if (pre_dec || post_inc)
1709 address = XEXP (address, 0);
1711 gcc_assert (REG_P (address));
1713 fputc ('(', file);
1714 if (pre_dec)
1715 fputs ("--", file);
1716 fputs (reg_names [REGNO (address)], file);
1717 if (post_inc)
1718 fputs ("++", file);
1719 if (offset != 0)
1720 fprintf (file, "," HOST_WIDE_INT_PRINT_DEC, offset);
1721 fputc (')', file);
1724 /* Worker function for TARGET_PRINT_OPERAND.
1726 Print an operand to an assembler instruction. */
1728 static void
1729 xstormy16_print_operand (FILE *file, rtx x, int code)
1731 switch (code)
1733 case 'B':
1734 /* There is either one bit set, or one bit clear, in X.
1735 Print it preceded by '#'. */
1737 static int bits_set[8] = { 0, 1, 1, 2, 1, 2, 2, 3 };
1738 HOST_WIDE_INT xx = 1;
1739 HOST_WIDE_INT l;
1741 if (CONST_INT_P (x))
1742 xx = INTVAL (x);
1743 else
1744 output_operand_lossage ("'B' operand is not constant");
1746 /* GCC sign-extends masks with the MSB set, so we have to
1747 detect all the cases that differ only in sign extension
1748 beyond the bits we care about. Normally, the predicates
1749 and constraints ensure that we have the right values. This
1750 works correctly for valid masks. */
1751 if (bits_set[xx & 7] <= 1)
1753 /* Remove sign extension bits. */
1754 if ((~xx & ~(HOST_WIDE_INT)0xff) == 0)
1755 xx &= 0xff;
1756 else if ((~xx & ~(HOST_WIDE_INT)0xffff) == 0)
1757 xx &= 0xffff;
1758 l = exact_log2 (xx);
1760 else
1762 /* Add sign extension bits. */
1763 if ((xx & ~(HOST_WIDE_INT)0xff) == 0)
1764 xx |= ~(HOST_WIDE_INT)0xff;
1765 else if ((xx & ~(HOST_WIDE_INT)0xffff) == 0)
1766 xx |= ~(HOST_WIDE_INT)0xffff;
1767 l = exact_log2 (~xx);
1770 if (l == -1)
1771 output_operand_lossage ("'B' operand has multiple bits set");
1773 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l);
1774 return;
1777 case 'C':
1778 /* Print the symbol without a surrounding @fptr(). */
1779 if (GET_CODE (x) == SYMBOL_REF)
1780 assemble_name (file, XSTR (x, 0));
1781 else if (LABEL_P (x))
1782 output_asm_label (x);
1783 else
1784 xstormy16_print_operand_address (file, x);
1785 return;
1787 case 'o':
1788 case 'O':
1789 /* Print the immediate operand less one, preceded by '#'.
1790 For 'O', negate it first. */
1792 HOST_WIDE_INT xx = 0;
1794 if (CONST_INT_P (x))
1795 xx = INTVAL (x);
1796 else
1797 output_operand_lossage ("'o' operand is not constant");
1799 if (code == 'O')
1800 xx = -xx;
1802 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, xx - 1);
1803 return;
1806 case 'b':
1807 /* Print the shift mask for bp/bn. */
1809 HOST_WIDE_INT xx = 1;
1810 HOST_WIDE_INT l;
1812 if (CONST_INT_P (x))
1813 xx = INTVAL (x);
1814 else
1815 output_operand_lossage ("'B' operand is not constant");
1817 l = 7 - xx;
1819 fputs (IMMEDIATE_PREFIX, file);
1820 fprintf (file, HOST_WIDE_INT_PRINT_DEC, l);
1821 return;
1824 case 0:
1825 /* Handled below. */
1826 break;
1828 default:
1829 output_operand_lossage ("xstormy16_print_operand: unknown code");
1830 return;
1833 switch (GET_CODE (x))
1835 case REG:
1836 fputs (reg_names [REGNO (x)], file);
1837 break;
1839 case MEM:
1840 xstormy16_print_operand_address (file, XEXP (x, 0));
1841 break;
1843 default:
1844 /* Some kind of constant or label; an immediate operand,
1845 so prefix it with '#' for the assembler. */
1846 fputs (IMMEDIATE_PREFIX, file);
1847 output_addr_const (file, x);
1848 break;
1851 return;
1854 /* Expander for the `casesi' pattern.
1855 INDEX is the index of the switch statement.
1856 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1857 to the first table entry.
1858 RANGE is the number of table entries.
1859 TABLE is an ADDR_VEC that is the jump table.
1860 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1861 range LOWER_BOUND to LOWER_BOUND + RANGE - 1. */
1863 void
1864 xstormy16_expand_casesi (rtx index, rtx lower_bound, rtx range,
1865 rtx table, rtx default_label)
1867 HOST_WIDE_INT range_i = INTVAL (range);
1868 rtx int_index;
1870 /* This code uses 'br', so it can deal only with tables of size up to
1871 8192 entries. */
1872 if (range_i >= 8192)
1873 sorry ("switch statement of size %lu entries too large",
1874 (unsigned long) range_i);
1876 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
1877 OPTAB_LIB_WIDEN);
1878 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
1879 default_label);
1880 int_index = gen_lowpart_common (HImode, index);
1881 emit_insn (gen_ashlhi3 (int_index, int_index, const2_rtx));
1882 emit_jump_insn (gen_tablejump_pcrel (int_index, table));
1885 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1886 instructions, without label or alignment or any other special
1887 constructs. We know that the previous instruction will be the
1888 `tablejump_pcrel' output above.
1890 TODO: it might be nice to output 'br' instructions if they could
1891 all reach. */
1893 void
1894 xstormy16_output_addr_vec (FILE *file, rtx label ATTRIBUTE_UNUSED, rtx table)
1896 int vlen, idx;
1898 switch_to_section (current_function_section ());
1900 vlen = XVECLEN (table, 0);
1901 for (idx = 0; idx < vlen; idx++)
1903 fputs ("\tjmpf ", file);
1904 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
1905 fputc ('\n', file);
1909 /* Expander for the `call' patterns.
1910 RETVAL is the RTL for the return register or NULL for void functions.
1911 DEST is the function to call, expressed as a MEM.
1912 COUNTER is ignored. */
1914 void
1915 xstormy16_expand_call (rtx retval, rtx dest, rtx counter)
1917 rtx call, temp;
1918 enum machine_mode mode;
1920 gcc_assert (MEM_P (dest));
1921 dest = XEXP (dest, 0);
1923 if (! CONSTANT_P (dest) && ! REG_P (dest))
1924 dest = force_reg (Pmode, dest);
1926 if (retval == NULL)
1927 mode = VOIDmode;
1928 else
1929 mode = GET_MODE (retval);
1931 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
1932 counter);
1933 if (retval)
1934 call = gen_rtx_SET (VOIDmode, retval, call);
1936 if (! CONSTANT_P (dest))
1938 temp = gen_reg_rtx (HImode);
1939 emit_move_insn (temp, const0_rtx);
1941 else
1942 temp = const0_rtx;
1944 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
1945 gen_rtx_USE (VOIDmode, temp)));
1946 emit_call_insn (call);
1949 /* Expanders for multiword computational operations. */
1951 /* Expander for arithmetic operations; emit insns to compute
1953 (set DEST (CODE:MODE SRC0 SRC1))
1955 When CODE is COMPARE, a branch template is generated
1956 (this saves duplicating code in xstormy16_split_cbranch). */
1958 void
1959 xstormy16_expand_arith (enum machine_mode mode, enum rtx_code code,
1960 rtx dest, rtx src0, rtx src1)
1962 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
1963 int i;
1964 int firstloop = 1;
1966 if (code == NEG)
1967 emit_move_insn (src0, const0_rtx);
1969 for (i = 0; i < num_words; i++)
1971 rtx w_src0, w_src1, w_dest;
1972 rtx insn;
1974 w_src0 = simplify_gen_subreg (word_mode, src0, mode,
1975 i * UNITS_PER_WORD);
1976 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
1977 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
1979 switch (code)
1981 case PLUS:
1982 if (firstloop
1983 && CONST_INT_P (w_src1)
1984 && INTVAL (w_src1) == 0)
1985 continue;
1987 if (firstloop)
1988 insn = gen_addchi4 (w_dest, w_src0, w_src1);
1989 else
1990 insn = gen_addchi5 (w_dest, w_src0, w_src1);
1991 break;
1993 case NEG:
1994 case MINUS:
1995 case COMPARE:
1996 if (code == COMPARE && i == num_words - 1)
1998 rtx branch, sub, clobber, sub_1;
2000 sub_1 = gen_rtx_MINUS (HImode, w_src0,
2001 gen_rtx_ZERO_EXTEND (HImode, gen_rtx_REG (BImode, CARRY_REGNUM)));
2002 sub = gen_rtx_SET (VOIDmode, w_dest,
2003 gen_rtx_MINUS (HImode, sub_1, w_src1));
2004 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
2005 branch = gen_rtx_SET (VOIDmode, pc_rtx,
2006 gen_rtx_IF_THEN_ELSE (VOIDmode,
2007 gen_rtx_EQ (HImode,
2008 sub_1,
2009 w_src1),
2010 pc_rtx,
2011 pc_rtx));
2012 insn = gen_rtx_PARALLEL (VOIDmode,
2013 gen_rtvec (3, branch, sub, clobber));
2015 else if (firstloop
2016 && code != COMPARE
2017 && CONST_INT_P (w_src1)
2018 && INTVAL (w_src1) == 0)
2019 continue;
2020 else if (firstloop)
2021 insn = gen_subchi4 (w_dest, w_src0, w_src1);
2022 else
2023 insn = gen_subchi5 (w_dest, w_src0, w_src1);
2024 break;
2026 case IOR:
2027 case XOR:
2028 case AND:
2029 if (CONST_INT_P (w_src1)
2030 && INTVAL (w_src1) == -(code == AND))
2031 continue;
2033 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_fmt_ee (code, mode,
2034 w_src0, w_src1));
2035 break;
2037 case NOT:
2038 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_NOT (mode, w_src0));
2039 break;
2041 default:
2042 gcc_unreachable ();
2045 firstloop = 0;
2046 emit (insn);
2049 /* If we emit nothing, try_split() will think we failed. So emit
2050 something that does nothing and can be optimized away. */
2051 if (firstloop)
2052 emit (gen_nop ());
2055 /* The shift operations are split at output time for constant values;
2056 variable-width shifts get handed off to a library routine.
2058 Generate an output string to do (set X (CODE:MODE X SIZE_R))
2059 SIZE_R will be a CONST_INT, X will be a hard register. */
2061 const char *
2062 xstormy16_output_shift (enum machine_mode mode, enum rtx_code code,
2063 rtx x, rtx size_r, rtx temp)
2065 HOST_WIDE_INT size;
2066 const char *r0, *r1, *rt;
2067 static char r[64];
2069 gcc_assert (CONST_INT_P (size_r)
2070 && REG_P (x)
2071 && mode == SImode);
2073 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
2075 if (size == 0)
2076 return "";
2078 r0 = reg_names [REGNO (x)];
2079 r1 = reg_names [REGNO (x) + 1];
2081 /* For shifts of size 1, we can use the rotate instructions. */
2082 if (size == 1)
2084 switch (code)
2086 case ASHIFT:
2087 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
2088 break;
2089 case ASHIFTRT:
2090 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
2091 break;
2092 case LSHIFTRT:
2093 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
2094 break;
2095 default:
2096 gcc_unreachable ();
2098 return r;
2101 /* For large shifts, there are easy special cases. */
2102 if (size == 16)
2104 switch (code)
2106 case ASHIFT:
2107 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
2108 break;
2109 case ASHIFTRT:
2110 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
2111 break;
2112 case LSHIFTRT:
2113 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
2114 break;
2115 default:
2116 gcc_unreachable ();
2118 return r;
2120 if (size > 16)
2122 switch (code)
2124 case ASHIFT:
2125 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
2126 r1, r0, r0, r1, (int) size - 16);
2127 break;
2128 case ASHIFTRT:
2129 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
2130 r0, r1, r1, r0, (int) size - 16);
2131 break;
2132 case LSHIFTRT:
2133 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
2134 r0, r1, r1, r0, (int) size - 16);
2135 break;
2136 default:
2137 gcc_unreachable ();
2139 return r;
2142 /* For the rest, we have to do more work. In particular, we
2143 need a temporary. */
2144 rt = reg_names [REGNO (temp)];
2145 switch (code)
2147 case ASHIFT:
2148 sprintf (r,
2149 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2150 rt, r0, r0, (int) size, r1, (int) size, rt, (int) (16 - size),
2151 r1, rt);
2152 break;
2153 case ASHIFTRT:
2154 sprintf (r,
2155 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2156 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2157 r0, rt);
2158 break;
2159 case LSHIFTRT:
2160 sprintf (r,
2161 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2162 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2163 r0, rt);
2164 break;
2165 default:
2166 gcc_unreachable ();
2168 return r;
2171 /* Attribute handling. */
2173 /* Return nonzero if the function is an interrupt function. */
2176 xstormy16_interrupt_function_p (void)
2178 tree attributes;
2180 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2181 any functions are declared, which is demonstrably wrong, but
2182 it is worked around here. FIXME. */
2183 if (!cfun)
2184 return 0;
2186 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
2187 return lookup_attribute ("interrupt", attributes) != NULL_TREE;
2190 #undef TARGET_ATTRIBUTE_TABLE
2191 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2193 static tree xstormy16_handle_interrupt_attribute
2194 (tree *, tree, tree, int, bool *);
2195 static tree xstormy16_handle_below100_attribute
2196 (tree *, tree, tree, int, bool *);
2198 static const struct attribute_spec xstormy16_attribute_table[] =
2200 /* name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
2201 affects_type_identity. */
2202 { "interrupt", 0, 0, false, true, true,
2203 xstormy16_handle_interrupt_attribute , false },
2204 { "BELOW100", 0, 0, false, false, false,
2205 xstormy16_handle_below100_attribute, false },
2206 { "below100", 0, 0, false, false, false,
2207 xstormy16_handle_below100_attribute, false },
2208 { NULL, 0, 0, false, false, false, NULL, false }
2211 /* Handle an "interrupt" attribute;
2212 arguments as in struct attribute_spec.handler. */
2214 static tree
2215 xstormy16_handle_interrupt_attribute (tree *node, tree name,
2216 tree args ATTRIBUTE_UNUSED,
2217 int flags ATTRIBUTE_UNUSED,
2218 bool *no_add_attrs)
2220 if (TREE_CODE (*node) != FUNCTION_TYPE)
2222 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2223 name);
2224 *no_add_attrs = true;
2227 return NULL_TREE;
2230 /* Handle an "below" attribute;
2231 arguments as in struct attribute_spec.handler. */
2233 static tree
2234 xstormy16_handle_below100_attribute (tree *node,
2235 tree name ATTRIBUTE_UNUSED,
2236 tree args ATTRIBUTE_UNUSED,
2237 int flags ATTRIBUTE_UNUSED,
2238 bool *no_add_attrs)
2240 if (TREE_CODE (*node) != VAR_DECL
2241 && TREE_CODE (*node) != POINTER_TYPE
2242 && TREE_CODE (*node) != TYPE_DECL)
2244 warning (OPT_Wattributes,
2245 "%<__BELOW100__%> attribute only applies to variables");
2246 *no_add_attrs = true;
2248 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
2250 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
2252 warning (OPT_Wattributes, "__BELOW100__ attribute not allowed "
2253 "with auto storage class");
2254 *no_add_attrs = true;
2258 return NULL_TREE;
2261 #undef TARGET_INIT_BUILTINS
2262 #define TARGET_INIT_BUILTINS xstormy16_init_builtins
2263 #undef TARGET_EXPAND_BUILTIN
2264 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2266 static struct
2268 const char * name;
2269 int md_code;
2270 const char * arg_ops; /* 0..9, t for temp register, r for return value. */
2271 const char * arg_types; /* s=short,l=long, upper case for unsigned. */
2273 s16builtins[] =
2275 { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" },
2276 { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" },
2277 { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" },
2278 { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" },
2279 { NULL, 0, NULL, NULL }
2282 static void
2283 xstormy16_init_builtins (void)
2285 tree args[2], ret_type, arg = NULL_TREE, ftype;
2286 int i, a, n_args;
2288 ret_type = void_type_node;
2290 for (i = 0; s16builtins[i].name; i++)
2292 n_args = strlen (s16builtins[i].arg_types) - 1;
2294 gcc_assert (n_args <= (int) ARRAY_SIZE (args));
2296 for (a = n_args - 1; a >= 0; a--)
2297 args[a] = NULL_TREE;
2299 for (a = n_args; a >= 0; a--)
2301 switch (s16builtins[i].arg_types[a])
2303 case 's': arg = short_integer_type_node; break;
2304 case 'S': arg = short_unsigned_type_node; break;
2305 case 'l': arg = long_integer_type_node; break;
2306 case 'L': arg = long_unsigned_type_node; break;
2307 default: gcc_unreachable ();
2309 if (a == 0)
2310 ret_type = arg;
2311 else
2312 args[a-1] = arg;
2314 ftype = build_function_type_list (ret_type, args[0], args[1], NULL_TREE);
2315 add_builtin_function (s16builtins[i].name, ftype,
2316 i, BUILT_IN_MD, NULL, NULL_TREE);
2320 static rtx
2321 xstormy16_expand_builtin (tree exp, rtx target,
2322 rtx subtarget ATTRIBUTE_UNUSED,
2323 enum machine_mode mode ATTRIBUTE_UNUSED,
2324 int ignore ATTRIBUTE_UNUSED)
2326 rtx op[10], args[10], pat, copyto[10], retval = 0;
2327 tree fndecl, argtree;
2328 int i, a, o, code;
2330 fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2331 argtree = TREE_OPERAND (exp, 1);
2332 i = DECL_FUNCTION_CODE (fndecl);
2333 code = s16builtins[i].md_code;
2335 for (a = 0; a < 10 && argtree; a++)
2337 args[a] = expand_normal (TREE_VALUE (argtree));
2338 argtree = TREE_CHAIN (argtree);
2341 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2343 char ao = s16builtins[i].arg_ops[o];
2344 char c = insn_data[code].operand[o].constraint[0];
2345 enum machine_mode omode;
2347 copyto[o] = 0;
2349 omode = (enum machine_mode) insn_data[code].operand[o].mode;
2350 if (ao == 'r')
2351 op[o] = target ? target : gen_reg_rtx (omode);
2352 else if (ao == 't')
2353 op[o] = gen_reg_rtx (omode);
2354 else
2355 op[o] = args[(int) hex_value (ao)];
2357 if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o])))
2359 if (c == '+' || c == '=')
2361 copyto[o] = op[o];
2362 op[o] = gen_reg_rtx (omode);
2364 else
2365 op[o] = copy_to_mode_reg (omode, op[o]);
2368 if (ao == 'r')
2369 retval = op[o];
2372 pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4],
2373 op[5], op[6], op[7], op[8], op[9]);
2374 emit_insn (pat);
2376 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2377 if (copyto[o])
2379 emit_move_insn (copyto[o], op[o]);
2380 if (op[o] == retval)
2381 retval = copyto[o];
2384 return retval;
2387 /* Look for combinations of insns that can be converted to BN or BP
2388 opcodes. This is, unfortunately, too complex to do with MD
2389 patterns. */
2391 static void
2392 combine_bnp (rtx_insn *insn)
2394 int insn_code, regno, need_extend;
2395 unsigned int mask;
2396 rtx cond, reg, qireg, mem;
2397 rtx_insn *and_insn, *load;
2398 enum machine_mode load_mode = QImode;
2399 enum machine_mode and_mode = QImode;
2400 rtx_insn *shift = NULL;
2402 insn_code = recog_memoized (insn);
2403 if (insn_code != CODE_FOR_cbranchhi
2404 && insn_code != CODE_FOR_cbranchhi_neg)
2405 return;
2407 cond = XVECEXP (PATTERN (insn), 0, 0); /* set */
2408 cond = XEXP (cond, 1); /* if */
2409 cond = XEXP (cond, 0); /* cond */
2410 switch (GET_CODE (cond))
2412 case NE:
2413 case EQ:
2414 need_extend = 0;
2415 break;
2416 case LT:
2417 case GE:
2418 need_extend = 1;
2419 break;
2420 default:
2421 return;
2424 reg = XEXP (cond, 0);
2425 if (! REG_P (reg))
2426 return;
2427 regno = REGNO (reg);
2428 if (XEXP (cond, 1) != const0_rtx)
2429 return;
2430 if (! find_regno_note (insn, REG_DEAD, regno))
2431 return;
2432 qireg = gen_rtx_REG (QImode, regno);
2434 if (need_extend)
2436 /* LT and GE conditionals should have a sign extend before
2437 them. */
2438 for (and_insn = prev_real_insn (insn);
2439 and_insn != NULL_RTX;
2440 and_insn = prev_real_insn (and_insn))
2442 int and_code = recog_memoized (and_insn);
2444 if (and_code == CODE_FOR_extendqihi2
2445 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)
2446 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), qireg))
2447 break;
2449 if (and_code == CODE_FOR_movhi_internal
2450 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg))
2452 /* This is for testing bit 15. */
2453 and_insn = insn;
2454 break;
2457 if (reg_mentioned_p (reg, and_insn))
2458 return;
2460 if (! NOTE_P (and_insn) && ! NONJUMP_INSN_P (and_insn))
2461 return;
2464 else
2466 /* EQ and NE conditionals have an AND before them. */
2467 for (and_insn = prev_real_insn (insn);
2468 and_insn != NULL_RTX;
2469 and_insn = prev_real_insn (and_insn))
2471 if (recog_memoized (and_insn) == CODE_FOR_andhi3
2472 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)
2473 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), reg))
2474 break;
2476 if (reg_mentioned_p (reg, and_insn))
2477 return;
2479 if (! NOTE_P (and_insn) && ! NONJUMP_INSN_P (and_insn))
2480 return;
2483 if (and_insn)
2485 /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
2486 followed by an AND like this:
2488 (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
2489 (clobber (reg:BI carry))]
2491 (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
2493 Attempt to detect this here. */
2494 for (shift = prev_real_insn (and_insn); shift;
2495 shift = prev_real_insn (shift))
2497 if (recog_memoized (shift) == CODE_FOR_lshrhi3
2498 && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift), 0, 0)), reg)
2499 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 0), reg))
2500 break;
2502 if (reg_mentioned_p (reg, shift)
2503 || (! NOTE_P (shift) && ! NONJUMP_INSN_P (shift)))
2505 shift = NULL;
2506 break;
2512 if (and_insn == NULL_RTX)
2513 return;
2515 for (load = shift ? prev_real_insn (shift) : prev_real_insn (and_insn);
2516 load;
2517 load = prev_real_insn (load))
2519 int load_code = recog_memoized (load);
2521 if (load_code == CODE_FOR_movhi_internal
2522 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2523 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), HImode)
2524 && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load))))
2526 load_mode = HImode;
2527 break;
2530 if (load_code == CODE_FOR_movqi_internal
2531 && rtx_equal_p (SET_DEST (PATTERN (load)), qireg)
2532 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), QImode))
2534 load_mode = QImode;
2535 break;
2538 if (load_code == CODE_FOR_zero_extendqihi2
2539 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2540 && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load)), 0), QImode))
2542 load_mode = QImode;
2543 and_mode = HImode;
2544 break;
2547 if (reg_mentioned_p (reg, load))
2548 return;
2550 if (! NOTE_P (load) && ! NONJUMP_INSN_P (load))
2551 return;
2553 if (!load)
2554 return;
2556 mem = SET_SRC (PATTERN (load));
2558 if (need_extend)
2560 mask = (load_mode == HImode) ? 0x8000 : 0x80;
2562 /* If the mem includes a zero-extend operation and we are
2563 going to generate a sign-extend operation then move the
2564 mem inside the zero-extend. */
2565 if (GET_CODE (mem) == ZERO_EXTEND)
2566 mem = XEXP (mem, 0);
2568 else
2570 if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and_insn)), 1),
2571 load_mode))
2572 return;
2574 mask = (int) INTVAL (XEXP (SET_SRC (PATTERN (and_insn)), 1));
2576 if (shift)
2577 mask <<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 1));
2580 if (load_mode == HImode)
2582 rtx addr = XEXP (mem, 0);
2584 if (! (mask & 0xff))
2586 addr = plus_constant (Pmode, addr, 1);
2587 mask >>= 8;
2589 mem = gen_rtx_MEM (QImode, addr);
2592 if (need_extend)
2593 XEXP (cond, 0) = gen_rtx_SIGN_EXTEND (HImode, mem);
2594 else
2595 XEXP (cond, 0) = gen_rtx_AND (and_mode, mem, GEN_INT (mask));
2597 INSN_CODE (insn) = -1;
2598 delete_insn (load);
2600 if (and_insn != insn)
2601 delete_insn (and_insn);
2603 if (shift != NULL_RTX)
2604 delete_insn (shift);
2607 static void
2608 xstormy16_reorg (void)
2610 rtx_insn *insn;
2612 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2614 if (! JUMP_P (insn))
2615 continue;
2616 combine_bnp (insn);
2620 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2622 static bool
2623 xstormy16_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2625 const HOST_WIDE_INT size = int_size_in_bytes (type);
2626 return (size == -1 || size > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS);
2629 #undef TARGET_ASM_ALIGNED_HI_OP
2630 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2631 #undef TARGET_ASM_ALIGNED_SI_OP
2632 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2633 #undef TARGET_ENCODE_SECTION_INFO
2634 #define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
2636 /* Select_section doesn't handle .bss_below100. */
2637 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
2638 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
2640 #undef TARGET_ASM_OUTPUT_MI_THUNK
2641 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2642 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2643 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2645 #undef TARGET_PRINT_OPERAND
2646 #define TARGET_PRINT_OPERAND xstormy16_print_operand
2647 #undef TARGET_PRINT_OPERAND_ADDRESS
2648 #define TARGET_PRINT_OPERAND_ADDRESS xstormy16_print_operand_address
2650 #undef TARGET_MEMORY_MOVE_COST
2651 #define TARGET_MEMORY_MOVE_COST xstormy16_memory_move_cost
2652 #undef TARGET_RTX_COSTS
2653 #define TARGET_RTX_COSTS xstormy16_rtx_costs
2654 #undef TARGET_ADDRESS_COST
2655 #define TARGET_ADDRESS_COST xstormy16_address_cost
2657 #undef TARGET_BUILD_BUILTIN_VA_LIST
2658 #define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
2659 #undef TARGET_EXPAND_BUILTIN_VA_START
2660 #define TARGET_EXPAND_BUILTIN_VA_START xstormy16_expand_builtin_va_start
2661 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
2662 #define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_gimplify_va_arg_expr
2664 #undef TARGET_PROMOTE_FUNCTION_MODE
2665 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
2666 #undef TARGET_PROMOTE_PROTOTYPES
2667 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
2669 #undef TARGET_FUNCTION_ARG
2670 #define TARGET_FUNCTION_ARG xstormy16_function_arg
2671 #undef TARGET_FUNCTION_ARG_ADVANCE
2672 #define TARGET_FUNCTION_ARG_ADVANCE xstormy16_function_arg_advance
2674 #undef TARGET_RETURN_IN_MEMORY
2675 #define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
2676 #undef TARGET_FUNCTION_VALUE
2677 #define TARGET_FUNCTION_VALUE xstormy16_function_value
2678 #undef TARGET_LIBCALL_VALUE
2679 #define TARGET_LIBCALL_VALUE xstormy16_libcall_value
2680 #undef TARGET_FUNCTION_VALUE_REGNO_P
2681 #define TARGET_FUNCTION_VALUE_REGNO_P xstormy16_function_value_regno_p
2683 #undef TARGET_MACHINE_DEPENDENT_REORG
2684 #define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg
2686 #undef TARGET_PREFERRED_RELOAD_CLASS
2687 #define TARGET_PREFERRED_RELOAD_CLASS xstormy16_preferred_reload_class
2688 #undef TARGET_PREFERRED_OUTPUT_RELOAD_CLASS
2689 #define TARGET_PREFERRED_OUTPUT_RELOAD_CLASS xstormy16_preferred_reload_class
2691 #undef TARGET_LEGITIMATE_ADDRESS_P
2692 #define TARGET_LEGITIMATE_ADDRESS_P xstormy16_legitimate_address_p
2693 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
2694 #define TARGET_MODE_DEPENDENT_ADDRESS_P xstormy16_mode_dependent_address_p
2696 #undef TARGET_CAN_ELIMINATE
2697 #define TARGET_CAN_ELIMINATE xstormy16_can_eliminate
2699 #undef TARGET_TRAMPOLINE_INIT
2700 #define TARGET_TRAMPOLINE_INIT xstormy16_trampoline_init
2702 struct gcc_target targetm = TARGET_INITIALIZER;
2704 #include "gt-stormy16.h"