Turn HARD_REGNO_MODE_OK into a target hook
[official-gcc.git] / gcc / config / stormy16 / stormy16.c
blob897d0e6f32c6b37076102105c9bd5eb80fd5e772
1 /* Xstormy16 target functions.
2 Copyright (C) 1997-2017 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "stringpool.h"
29 #include "attribs.h"
30 #include "gimple.h"
31 #include "df.h"
32 #include "memmodel.h"
33 #include "tm_p.h"
34 #include "stringpool.h"
35 #include "optabs.h"
36 #include "emit-rtl.h"
37 #include "recog.h"
38 #include "diagnostic-core.h"
39 #include "output.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
42 #include "varasm.h"
43 #include "calls.h"
44 #include "explow.h"
45 #include "expr.h"
46 #include "langhooks.h"
47 #include "cfgrtl.h"
48 #include "gimplify.h"
49 #include "reload.h"
50 #include "builtins.h"
52 /* This file should be included last. */
53 #include "target-def.h"
55 static rtx emit_addhi3_postreload (rtx, rtx, rtx);
56 static void xstormy16_asm_out_constructor (rtx, int);
57 static void xstormy16_asm_out_destructor (rtx, int);
58 static void xstormy16_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
59 HOST_WIDE_INT, tree);
61 static void xstormy16_init_builtins (void);
62 static rtx xstormy16_expand_builtin (tree, rtx, rtx, machine_mode, int);
63 static int xstormy16_address_cost (rtx, machine_mode, addr_space_t, bool);
64 static bool xstormy16_return_in_memory (const_tree, const_tree);
66 static GTY(()) section *bss100_section;
68 /* Compute a (partial) cost for rtx X. Return true if the complete
69 cost has been computed, and false if subexpressions should be
70 scanned. In either case, *TOTAL contains the cost result. */
72 static bool
73 xstormy16_rtx_costs (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
74 int outer_code ATTRIBUTE_UNUSED,
75 int opno ATTRIBUTE_UNUSED, int *total,
76 bool speed ATTRIBUTE_UNUSED)
78 int code = GET_CODE (x);
80 switch (code)
82 case CONST_INT:
83 if (INTVAL (x) < 16 && INTVAL (x) >= 0)
84 *total = COSTS_N_INSNS (1) / 2;
85 else if (INTVAL (x) < 256 && INTVAL (x) >= 0)
86 *total = COSTS_N_INSNS (1);
87 else
88 *total = COSTS_N_INSNS (2);
89 return true;
91 case CONST_DOUBLE:
92 case CONST:
93 case SYMBOL_REF:
94 case LABEL_REF:
95 *total = COSTS_N_INSNS (2);
96 return true;
98 case MULT:
99 *total = COSTS_N_INSNS (35 + 6);
100 return true;
101 case DIV:
102 *total = COSTS_N_INSNS (51 - 6);
103 return true;
105 default:
106 return false;
110 static int
111 xstormy16_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
112 addr_space_t as ATTRIBUTE_UNUSED,
113 bool speed ATTRIBUTE_UNUSED)
115 return (CONST_INT_P (x) ? 2
116 : GET_CODE (x) == PLUS ? 7
117 : 5);
120 /* Worker function for TARGET_MEMORY_MOVE_COST. */
122 static int
123 xstormy16_memory_move_cost (machine_mode mode, reg_class_t rclass,
124 bool in)
126 return (5 + memory_move_secondary_cost (mode, rclass, in));
129 /* Branches are handled as follows:
131 1. HImode compare-and-branches. The machine supports these
132 natively, so the appropriate pattern is emitted directly.
134 2. SImode EQ and NE. These are emitted as pairs of HImode
135 compare-and-branches.
137 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
138 of a SImode subtract followed by a branch (not a compare-and-branch),
139 like this:
144 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
149 bne. */
151 /* Emit a branch of kind CODE to location LOC. */
153 void
154 xstormy16_emit_cbranch (enum rtx_code code, rtx op0, rtx op1, rtx loc)
156 rtx condition_rtx, loc_ref, branch, cy_clobber;
157 rtvec vec;
158 machine_mode mode;
160 mode = GET_MODE (op0);
161 gcc_assert (mode == HImode || mode == SImode);
163 if (mode == SImode
164 && (code == GT || code == LE || code == GTU || code == LEU))
166 int unsigned_p = (code == GTU || code == LEU);
167 int gt_p = (code == GT || code == GTU);
168 rtx lab = NULL_RTX;
170 if (gt_p)
171 lab = gen_label_rtx ();
172 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, op0, op1, gt_p ? lab : loc);
173 /* This should be generated as a comparison against the temporary
174 created by the previous insn, but reload can't handle that. */
175 xstormy16_emit_cbranch (gt_p ? NE : EQ, op0, op1, loc);
176 if (gt_p)
177 emit_label (lab);
178 return;
180 else if (mode == SImode
181 && (code == NE || code == EQ)
182 && op1 != const0_rtx)
184 rtx op0_word, op1_word;
185 rtx lab = NULL_RTX;
186 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
187 int i;
189 if (code == EQ)
190 lab = gen_label_rtx ();
192 for (i = 0; i < num_words - 1; i++)
194 op0_word = simplify_gen_subreg (word_mode, op0, mode,
195 i * UNITS_PER_WORD);
196 op1_word = simplify_gen_subreg (word_mode, op1, mode,
197 i * UNITS_PER_WORD);
198 xstormy16_emit_cbranch (NE, op0_word, op1_word, code == EQ ? lab : loc);
200 op0_word = simplify_gen_subreg (word_mode, op0, mode,
201 i * UNITS_PER_WORD);
202 op1_word = simplify_gen_subreg (word_mode, op1, mode,
203 i * UNITS_PER_WORD);
204 xstormy16_emit_cbranch (code, op0_word, op1_word, loc);
206 if (code == EQ)
207 emit_label (lab);
208 return;
211 /* We can't allow reload to try to generate any reload after a branch,
212 so when some register must match we must make the temporary ourselves. */
213 if (mode != HImode)
215 rtx tmp;
216 tmp = gen_reg_rtx (mode);
217 emit_move_insn (tmp, op0);
218 op0 = tmp;
221 condition_rtx = gen_rtx_fmt_ee (code, mode, op0, op1);
222 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
223 branch = gen_rtx_SET (pc_rtx,
224 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
225 loc_ref, pc_rtx));
227 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
229 if (mode == HImode)
230 vec = gen_rtvec (2, branch, cy_clobber);
231 else if (code == NE || code == EQ)
232 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
233 else
235 rtx sub;
236 #if 0
237 sub = gen_rtx_SET (op0, gen_rtx_MINUS (SImode, op0, op1));
238 #else
239 sub = gen_rtx_CLOBBER (SImode, op0);
240 #endif
241 vec = gen_rtvec (3, branch, sub, cy_clobber);
244 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
247 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
248 the arithmetic operation. Most of the work is done by
249 xstormy16_expand_arith. */
251 void
252 xstormy16_split_cbranch (machine_mode mode, rtx label, rtx comparison,
253 rtx dest)
255 rtx op0 = XEXP (comparison, 0);
256 rtx op1 = XEXP (comparison, 1);
257 rtx_insn *seq, *last_insn;
258 rtx compare;
260 start_sequence ();
261 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1);
262 seq = get_insns ();
263 end_sequence ();
265 gcc_assert (INSN_P (seq));
267 last_insn = seq;
268 while (NEXT_INSN (last_insn) != NULL_RTX)
269 last_insn = NEXT_INSN (last_insn);
271 compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0));
272 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
273 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
274 emit_insn (seq);
278 /* Return the string to output a conditional branch to LABEL, which is
279 the operand number of the label.
281 OP is the conditional expression, or NULL for branch-always.
283 REVERSED is nonzero if we should reverse the sense of the comparison.
285 INSN is the insn. */
287 char *
288 xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed,
289 rtx_insn *insn)
291 static char string[64];
292 int need_longbranch = (op != NULL_RTX
293 ? get_attr_length (insn) == 8
294 : get_attr_length (insn) == 4);
295 int really_reversed = reversed ^ need_longbranch;
296 const char *ccode;
297 const char *templ;
298 const char *operands;
299 enum rtx_code code;
301 if (! op)
303 if (need_longbranch)
304 ccode = "jmpf";
305 else
306 ccode = "br";
307 sprintf (string, "%s %s", ccode, label);
308 return string;
311 code = GET_CODE (op);
313 if (! REG_P (XEXP (op, 0)))
315 code = swap_condition (code);
316 operands = "%3,%2";
318 else
319 operands = "%2,%3";
321 /* Work out which way this really branches. */
322 if (really_reversed)
323 code = reverse_condition (code);
325 switch (code)
327 case EQ: ccode = "z"; break;
328 case NE: ccode = "nz"; break;
329 case GE: ccode = "ge"; break;
330 case LT: ccode = "lt"; break;
331 case GT: ccode = "gt"; break;
332 case LE: ccode = "le"; break;
333 case GEU: ccode = "nc"; break;
334 case LTU: ccode = "c"; break;
335 case GTU: ccode = "hi"; break;
336 case LEU: ccode = "ls"; break;
338 default:
339 gcc_unreachable ();
342 if (need_longbranch)
343 templ = "b%s %s,.+8 | jmpf %s";
344 else
345 templ = "b%s %s,%s";
346 sprintf (string, templ, ccode, operands, label);
348 return string;
351 /* Return the string to output a conditional branch to LABEL, which is
352 the operand number of the label, but suitable for the tail of a
353 SImode branch.
355 OP is the conditional expression (OP is never NULL_RTX).
357 REVERSED is nonzero if we should reverse the sense of the comparison.
359 INSN is the insn. */
361 char *
362 xstormy16_output_cbranch_si (rtx op, const char *label, int reversed,
363 rtx_insn *insn)
365 static char string[64];
366 int need_longbranch = get_attr_length (insn) >= 8;
367 int really_reversed = reversed ^ need_longbranch;
368 const char *ccode;
369 const char *templ;
370 char prevop[16];
371 enum rtx_code code;
373 code = GET_CODE (op);
375 /* Work out which way this really branches. */
376 if (really_reversed)
377 code = reverse_condition (code);
379 switch (code)
381 case EQ: ccode = "z"; break;
382 case NE: ccode = "nz"; break;
383 case GE: ccode = "ge"; break;
384 case LT: ccode = "lt"; break;
385 case GEU: ccode = "nc"; break;
386 case LTU: ccode = "c"; break;
388 /* The missing codes above should never be generated. */
389 default:
390 gcc_unreachable ();
393 switch (code)
395 case EQ: case NE:
397 int regnum;
399 gcc_assert (REG_P (XEXP (op, 0)));
401 regnum = REGNO (XEXP (op, 0));
402 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
404 break;
406 case GE: case LT: case GEU: case LTU:
407 strcpy (prevop, "sbc %2,%3");
408 break;
410 default:
411 gcc_unreachable ();
414 if (need_longbranch)
415 templ = "%s | b%s .+6 | jmpf %s";
416 else
417 templ = "%s | b%s %s";
418 sprintf (string, templ, prevop, ccode, label);
420 return string;
423 /* Many machines have some registers that cannot be copied directly to or from
424 memory or even from other types of registers. An example is the `MQ'
425 register, which on most machines, can only be copied to or from general
426 registers, but not memory. Some machines allow copying all registers to and
427 from memory, but require a scratch register for stores to some memory
428 locations (e.g., those with symbolic address on the RT, and those with
429 certain symbolic address on the SPARC when compiling PIC). In some cases,
430 both an intermediate and a scratch register are required.
432 You should define these macros to indicate to the reload phase that it may
433 need to allocate at least one register for a reload in addition to the
434 register to contain the data. Specifically, if copying X to a register
435 RCLASS in MODE requires an intermediate register, you should define
436 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
437 whose registers can be used as intermediate registers or scratch registers.
439 If copying a register RCLASS in MODE to X requires an intermediate or scratch
440 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
441 largest register class required. If the requirements for input and output
442 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
443 instead of defining both macros identically.
445 The values returned by these macros are often `GENERAL_REGS'. Return
446 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
447 to or from a register of RCLASS in MODE without requiring a scratch register.
448 Do not define this macro if it would always return `NO_REGS'.
450 If a scratch register is required (either with or without an intermediate
451 register), you should define patterns for `reload_inM' or `reload_outM', as
452 required.. These patterns, which will normally be implemented with a
453 `define_expand', should be similar to the `movM' patterns, except that
454 operand 2 is the scratch register.
456 Define constraints for the reload register and scratch register that contain
457 a single register class. If the original reload register (whose class is
458 RCLASS) can meet the constraint given in the pattern, the value returned by
459 these macros is used for the class of the scratch register. Otherwise, two
460 additional reload registers are required. Their classes are obtained from
461 the constraints in the insn pattern.
463 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
464 either be in a hard register or in memory. Use `true_regnum' to find out;
465 it will return -1 if the pseudo is in memory and the hard register number if
466 it is in a register.
468 These macros should not be used in the case where a particular class of
469 registers can only be copied to memory and not to another class of
470 registers. In that case, secondary reload registers are not needed and
471 would not be helpful. Instead, a stack location must be used to perform the
472 copy and the `movM' pattern should use memory as an intermediate storage.
473 This case often occurs between floating-point and general registers. */
475 enum reg_class
476 xstormy16_secondary_reload_class (enum reg_class rclass,
477 machine_mode mode ATTRIBUTE_UNUSED,
478 rtx x)
480 /* This chip has the interesting property that only the first eight
481 registers can be moved to/from memory. */
482 if ((MEM_P (x)
483 || ((GET_CODE (x) == SUBREG || REG_P (x))
484 && (true_regnum (x) == -1
485 || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
486 && ! reg_class_subset_p (rclass, EIGHT_REGS))
487 return EIGHT_REGS;
489 return NO_REGS;
492 /* Worker function for TARGET_PREFERRED_RELOAD_CLASS
493 and TARGET_PREFERRED_OUTPUT_RELOAD_CLASS. */
495 static reg_class_t
496 xstormy16_preferred_reload_class (rtx x, reg_class_t rclass)
498 if (rclass == GENERAL_REGS && MEM_P (x))
499 return EIGHT_REGS;
501 return rclass;
504 /* Predicate for symbols and addresses that reflect special 8-bit
505 addressing. */
508 xstormy16_below100_symbol (rtx x,
509 machine_mode mode ATTRIBUTE_UNUSED)
511 if (GET_CODE (x) == CONST)
512 x = XEXP (x, 0);
513 if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
514 x = XEXP (x, 0);
516 if (GET_CODE (x) == SYMBOL_REF)
517 return (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_XSTORMY16_BELOW100) != 0;
519 if (CONST_INT_P (x))
521 HOST_WIDE_INT i = INTVAL (x);
523 if ((i >= 0x0000 && i <= 0x00ff)
524 || (i >= 0x7f00 && i <= 0x7fff))
525 return 1;
527 return 0;
530 /* Likewise, but only for non-volatile MEMs, for patterns where the
531 MEM will get split into smaller sized accesses. */
534 xstormy16_splittable_below100_operand (rtx x, machine_mode mode)
536 if (MEM_P (x) && MEM_VOLATILE_P (x))
537 return 0;
538 return xstormy16_below100_operand (x, mode);
541 /* Expand an 8-bit IOR. This either detects the one case we can
542 actually do, or uses a 16-bit IOR. */
544 void
545 xstormy16_expand_iorqi3 (rtx *operands)
547 rtx in, out, outsub, val;
549 out = operands[0];
550 in = operands[1];
551 val = operands[2];
553 if (xstormy16_onebit_set_operand (val, QImode))
555 if (!xstormy16_below100_or_register (in, QImode))
556 in = copy_to_mode_reg (QImode, in);
557 if (!xstormy16_below100_or_register (out, QImode))
558 out = gen_reg_rtx (QImode);
559 emit_insn (gen_iorqi3_internal (out, in, val));
560 if (out != operands[0])
561 emit_move_insn (operands[0], out);
562 return;
565 if (! REG_P (in))
566 in = copy_to_mode_reg (QImode, in);
568 if (! REG_P (val) && ! CONST_INT_P (val))
569 val = copy_to_mode_reg (QImode, val);
571 if (! REG_P (out))
572 out = gen_reg_rtx (QImode);
574 in = simplify_gen_subreg (HImode, in, QImode, 0);
575 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
577 if (! CONST_INT_P (val))
578 val = simplify_gen_subreg (HImode, val, QImode, 0);
580 emit_insn (gen_iorhi3 (outsub, in, val));
582 if (out != operands[0])
583 emit_move_insn (operands[0], out);
586 /* Expand an 8-bit AND. This either detects the one case we can
587 actually do, or uses a 16-bit AND. */
589 void
590 xstormy16_expand_andqi3 (rtx *operands)
592 rtx in, out, outsub, val;
594 out = operands[0];
595 in = operands[1];
596 val = operands[2];
598 if (xstormy16_onebit_clr_operand (val, QImode))
600 if (!xstormy16_below100_or_register (in, QImode))
601 in = copy_to_mode_reg (QImode, in);
602 if (!xstormy16_below100_or_register (out, QImode))
603 out = gen_reg_rtx (QImode);
604 emit_insn (gen_andqi3_internal (out, in, val));
605 if (out != operands[0])
606 emit_move_insn (operands[0], out);
607 return;
610 if (! REG_P (in))
611 in = copy_to_mode_reg (QImode, in);
613 if (! REG_P (val) && ! CONST_INT_P (val))
614 val = copy_to_mode_reg (QImode, val);
616 if (! REG_P (out))
617 out = gen_reg_rtx (QImode);
619 in = simplify_gen_subreg (HImode, in, QImode, 0);
620 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
622 if (! CONST_INT_P (val))
623 val = simplify_gen_subreg (HImode, val, QImode, 0);
625 emit_insn (gen_andhi3 (outsub, in, val));
627 if (out != operands[0])
628 emit_move_insn (operands[0], out);
631 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
632 (CONST_INT_P (X) \
633 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
635 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
636 (CONST_INT_P (X) \
637 && INTVAL (X) + (OFFSET) >= 0 \
638 && INTVAL (X) + (OFFSET) < 0x8000 \
639 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
641 bool
642 xstormy16_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED,
643 rtx x, bool strict)
645 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
646 return true;
648 if (GET_CODE (x) == PLUS
649 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
651 x = XEXP (x, 0);
652 /* PR 31232: Do not allow INT+INT as an address. */
653 if (CONST_INT_P (x))
654 return false;
657 if ((GET_CODE (x) == PRE_MODIFY && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
658 || GET_CODE (x) == POST_INC
659 || GET_CODE (x) == PRE_DEC)
660 x = XEXP (x, 0);
662 if (REG_P (x)
663 && REGNO_OK_FOR_BASE_P (REGNO (x))
664 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
665 return true;
667 if (xstormy16_below100_symbol (x, mode))
668 return true;
670 return false;
673 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P.
675 On this chip, this is true if the address is valid with an offset
676 of 0 but not of 6, because in that case it cannot be used as an
677 address for DImode or DFmode, or if the address is a post-increment
678 or pre-decrement address. */
680 static bool
681 xstormy16_mode_dependent_address_p (const_rtx x,
682 addr_space_t as ATTRIBUTE_UNUSED)
684 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
685 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
686 return true;
688 if (GET_CODE (x) == PLUS
689 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
690 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
691 return true;
693 /* Auto-increment addresses are now treated generically in recog.c. */
694 return false;
698 short_memory_operand (rtx x, machine_mode mode)
700 if (! memory_operand (x, mode))
701 return 0;
702 return (GET_CODE (XEXP (x, 0)) != PLUS);
705 /* Splitter for the 'move' patterns, for modes not directly implemented
706 by hardware. Emit insns to copy a value of mode MODE from SRC to
707 DEST.
709 This function is only called when reload_completed. */
711 void
712 xstormy16_split_move (machine_mode mode, rtx dest, rtx src)
714 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
715 int direction, end, i;
716 int src_modifies = 0;
717 int dest_modifies = 0;
718 int src_volatile = 0;
719 int dest_volatile = 0;
720 rtx mem_operand;
721 rtx auto_inc_reg_rtx = NULL_RTX;
723 /* Check initial conditions. */
724 gcc_assert (reload_completed
725 && mode != QImode && mode != HImode
726 && nonimmediate_operand (dest, mode)
727 && general_operand (src, mode));
729 /* This case is not supported below, and shouldn't be generated. */
730 gcc_assert (! MEM_P (dest) || ! MEM_P (src));
732 /* This case is very very bad after reload, so trap it now. */
733 gcc_assert (GET_CODE (dest) != SUBREG && GET_CODE (src) != SUBREG);
735 /* The general idea is to copy by words, offsetting the source and
736 destination. Normally the least-significant word will be copied
737 first, but for pre-dec operations it's better to copy the
738 most-significant word first. Only one operand can be a pre-dec
739 or post-inc operand.
741 It's also possible that the copy overlaps so that the direction
742 must be reversed. */
743 direction = 1;
745 if (MEM_P (dest))
747 mem_operand = XEXP (dest, 0);
748 dest_modifies = side_effects_p (mem_operand);
749 if (auto_inc_p (mem_operand))
750 auto_inc_reg_rtx = XEXP (mem_operand, 0);
751 dest_volatile = MEM_VOLATILE_P (dest);
752 if (dest_volatile)
754 dest = copy_rtx (dest);
755 MEM_VOLATILE_P (dest) = 0;
758 else if (MEM_P (src))
760 mem_operand = XEXP (src, 0);
761 src_modifies = side_effects_p (mem_operand);
762 if (auto_inc_p (mem_operand))
763 auto_inc_reg_rtx = XEXP (mem_operand, 0);
764 src_volatile = MEM_VOLATILE_P (src);
765 if (src_volatile)
767 src = copy_rtx (src);
768 MEM_VOLATILE_P (src) = 0;
771 else
772 mem_operand = NULL_RTX;
774 if (mem_operand == NULL_RTX)
776 if (REG_P (src)
777 && REG_P (dest)
778 && reg_overlap_mentioned_p (dest, src)
779 && REGNO (dest) > REGNO (src))
780 direction = -1;
782 else if (GET_CODE (mem_operand) == PRE_DEC
783 || (GET_CODE (mem_operand) == PLUS
784 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
785 direction = -1;
786 else if (MEM_P (src) && reg_overlap_mentioned_p (dest, src))
788 int regno;
790 gcc_assert (REG_P (dest));
791 regno = REGNO (dest);
793 gcc_assert (refers_to_regno_p (regno, regno + num_words,
794 mem_operand, 0));
796 if (refers_to_regno_p (regno, mem_operand))
797 direction = -1;
798 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
799 mem_operand, 0))
800 direction = 1;
801 else
802 /* This means something like
803 (set (reg:DI r0) (mem:DI (reg:HI r1)))
804 which we'd need to support by doing the set of the second word
805 last. */
806 gcc_unreachable ();
809 end = direction < 0 ? -1 : num_words;
810 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
812 rtx w_src, w_dest, insn;
814 if (src_modifies)
815 w_src = gen_rtx_MEM (word_mode, mem_operand);
816 else
817 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
818 if (src_volatile)
819 MEM_VOLATILE_P (w_src) = 1;
820 if (dest_modifies)
821 w_dest = gen_rtx_MEM (word_mode, mem_operand);
822 else
823 w_dest = simplify_gen_subreg (word_mode, dest, mode,
824 i * UNITS_PER_WORD);
825 if (dest_volatile)
826 MEM_VOLATILE_P (w_dest) = 1;
828 /* The simplify_subreg calls must always be able to simplify. */
829 gcc_assert (GET_CODE (w_src) != SUBREG
830 && GET_CODE (w_dest) != SUBREG);
832 insn = emit_insn (gen_rtx_SET (w_dest, w_src));
833 if (auto_inc_reg_rtx)
834 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
835 auto_inc_reg_rtx,
836 REG_NOTES (insn));
840 /* Expander for the 'move' patterns. Emit insns to copy a value of
841 mode MODE from SRC to DEST. */
843 void
844 xstormy16_expand_move (machine_mode mode, rtx dest, rtx src)
846 if (MEM_P (dest) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY))
848 rtx pmv = XEXP (dest, 0);
849 rtx dest_reg = XEXP (pmv, 0);
850 rtx dest_mod = XEXP (pmv, 1);
851 rtx set = gen_rtx_SET (dest_reg, dest_mod);
852 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
854 dest = gen_rtx_MEM (mode, dest_reg);
855 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
857 else if (MEM_P (src) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY))
859 rtx pmv = XEXP (src, 0);
860 rtx src_reg = XEXP (pmv, 0);
861 rtx src_mod = XEXP (pmv, 1);
862 rtx set = gen_rtx_SET (src_reg, src_mod);
863 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
865 src = gen_rtx_MEM (mode, src_reg);
866 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
869 /* There are only limited immediate-to-memory move instructions. */
870 if (! reload_in_progress
871 && ! reload_completed
872 && MEM_P (dest)
873 && (! CONST_INT_P (XEXP (dest, 0))
874 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
875 && ! xstormy16_below100_operand (dest, mode)
876 && ! REG_P (src)
877 && GET_CODE (src) != SUBREG)
878 src = copy_to_mode_reg (mode, src);
880 /* Don't emit something we would immediately split. */
881 if (reload_completed
882 && mode != HImode && mode != QImode)
884 xstormy16_split_move (mode, dest, src);
885 return;
888 emit_insn (gen_rtx_SET (dest, src));
891 /* Stack Layout:
893 The stack is laid out as follows:
895 SP->
896 FP-> Local variables
897 Register save area (up to 4 words)
898 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
900 AP-> Return address (two words)
901 9th procedure parameter word
902 10th procedure parameter word
904 last procedure parameter word
906 The frame pointer location is tuned to make it most likely that all
907 parameters and local variables can be accessed using a load-indexed
908 instruction. */
910 /* A structure to describe the layout. */
911 struct xstormy16_stack_layout
913 /* Size of the topmost three items on the stack. */
914 int locals_size;
915 int register_save_size;
916 int stdarg_save_size;
917 /* Sum of the above items. */
918 int frame_size;
919 /* Various offsets. */
920 int first_local_minus_ap;
921 int sp_minus_fp;
922 int fp_minus_ap;
925 /* Does REGNO need to be saved? */
926 #define REG_NEEDS_SAVE(REGNUM, IFUN) \
927 ((df_regs_ever_live_p (REGNUM) && ! call_used_regs[REGNUM]) \
928 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
929 && (REGNUM != CARRY_REGNUM) \
930 && (df_regs_ever_live_p (REGNUM) || ! crtl->is_leaf)))
932 /* Compute the stack layout. */
934 struct xstormy16_stack_layout
935 xstormy16_compute_stack_layout (void)
937 struct xstormy16_stack_layout layout;
938 int regno;
939 const int ifun = xstormy16_interrupt_function_p ();
941 layout.locals_size = get_frame_size ();
943 layout.register_save_size = 0;
944 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
945 if (REG_NEEDS_SAVE (regno, ifun))
946 layout.register_save_size += UNITS_PER_WORD;
948 if (cfun->stdarg)
949 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
950 else
951 layout.stdarg_save_size = 0;
953 layout.frame_size = (layout.locals_size
954 + layout.register_save_size
955 + layout.stdarg_save_size);
957 if (crtl->args.size <= 2048 && crtl->args.size != -1)
959 if (layout.frame_size - INCOMING_FRAME_SP_OFFSET
960 + crtl->args.size <= 2048)
961 layout.fp_minus_ap = layout.frame_size - INCOMING_FRAME_SP_OFFSET;
962 else
963 layout.fp_minus_ap = 2048 - crtl->args.size;
965 else
966 layout.fp_minus_ap = (layout.stdarg_save_size
967 + layout.register_save_size
968 - INCOMING_FRAME_SP_OFFSET);
969 layout.sp_minus_fp = (layout.frame_size - INCOMING_FRAME_SP_OFFSET
970 - layout.fp_minus_ap);
971 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
972 return layout;
975 /* Worker function for TARGET_CAN_ELIMINATE. */
977 static bool
978 xstormy16_can_eliminate (const int from, const int to)
980 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
981 ? ! frame_pointer_needed
982 : true);
985 /* Determine how all the special registers get eliminated. */
988 xstormy16_initial_elimination_offset (int from, int to)
990 struct xstormy16_stack_layout layout;
991 int result;
993 layout = xstormy16_compute_stack_layout ();
995 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
996 result = layout.sp_minus_fp - layout.locals_size;
997 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
998 result = - layout.locals_size;
999 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1000 result = - layout.fp_minus_ap;
1001 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1002 result = - (layout.sp_minus_fp + layout.fp_minus_ap);
1003 else
1004 gcc_unreachable ();
1006 return result;
1009 static rtx
1010 emit_addhi3_postreload (rtx dest, rtx src0, rtx src1)
1012 rtx set, clobber, insn;
1014 set = gen_rtx_SET (dest, gen_rtx_PLUS (HImode, src0, src1));
1015 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
1016 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
1017 return insn;
1020 /* Called after register allocation to add any instructions needed for
1021 the prologue. Using a prologue insn is favored compared to putting
1022 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1023 since it allows the scheduler to intermix instructions with the
1024 saves of the caller saved registers. In some cases, it might be
1025 necessary to emit a barrier instruction as the last insn to prevent
1026 such scheduling.
1028 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1029 so that the debug info generation code can handle them properly. */
1031 void
1032 xstormy16_expand_prologue (void)
1034 struct xstormy16_stack_layout layout;
1035 int regno;
1036 rtx insn;
1037 rtx mem_push_rtx;
1038 const int ifun = xstormy16_interrupt_function_p ();
1040 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
1041 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
1043 layout = xstormy16_compute_stack_layout ();
1045 if (layout.locals_size >= 32768)
1046 error ("local variable memory requirements exceed capacity");
1048 if (flag_stack_usage_info)
1049 current_function_static_stack_size = layout.frame_size;
1051 /* Save the argument registers if necessary. */
1052 if (layout.stdarg_save_size)
1053 for (regno = FIRST_ARGUMENT_REGISTER;
1054 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
1055 regno++)
1057 rtx dwarf;
1058 rtx reg = gen_rtx_REG (HImode, regno);
1060 insn = emit_move_insn (mem_push_rtx, reg);
1061 RTX_FRAME_RELATED_P (insn) = 1;
1063 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1065 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (gen_rtx_MEM (Pmode, stack_pointer_rtx),
1066 reg);
1067 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (stack_pointer_rtx,
1068 plus_constant (Pmode,
1069 stack_pointer_rtx,
1070 GET_MODE_SIZE (Pmode)));
1071 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
1072 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1073 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1076 /* Push each of the registers to save. */
1077 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1078 if (REG_NEEDS_SAVE (regno, ifun))
1080 rtx dwarf;
1081 rtx reg = gen_rtx_REG (HImode, regno);
1083 insn = emit_move_insn (mem_push_rtx, reg);
1084 RTX_FRAME_RELATED_P (insn) = 1;
1086 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1088 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (gen_rtx_MEM (Pmode, stack_pointer_rtx),
1089 reg);
1090 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (stack_pointer_rtx,
1091 plus_constant (Pmode,
1092 stack_pointer_rtx,
1093 GET_MODE_SIZE (Pmode)));
1094 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
1095 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1096 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1099 /* It's just possible that the SP here might be what we need for
1100 the new FP... */
1101 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1103 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1104 RTX_FRAME_RELATED_P (insn) = 1;
1107 /* Allocate space for local variables. */
1108 if (layout.locals_size)
1110 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1111 GEN_INT (layout.locals_size));
1112 RTX_FRAME_RELATED_P (insn) = 1;
1115 /* Set up the frame pointer, if required. */
1116 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
1118 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1119 RTX_FRAME_RELATED_P (insn) = 1;
1121 if (layout.sp_minus_fp)
1123 insn = emit_addhi3_postreload (hard_frame_pointer_rtx,
1124 hard_frame_pointer_rtx,
1125 GEN_INT (- layout.sp_minus_fp));
1126 RTX_FRAME_RELATED_P (insn) = 1;
1131 /* Do we need an epilogue at all? */
1134 direct_return (void)
1136 return (reload_completed
1137 && xstormy16_compute_stack_layout ().frame_size == 0
1138 && ! xstormy16_interrupt_function_p ());
1141 /* Called after register allocation to add any instructions needed for
1142 the epilogue. Using an epilogue insn is favored compared to putting
1143 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1144 since it allows the scheduler to intermix instructions with the
1145 saves of the caller saved registers. In some cases, it might be
1146 necessary to emit a barrier instruction as the last insn to prevent
1147 such scheduling. */
1149 void
1150 xstormy16_expand_epilogue (void)
1152 struct xstormy16_stack_layout layout;
1153 rtx mem_pop_rtx;
1154 int regno;
1155 const int ifun = xstormy16_interrupt_function_p ();
1157 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1158 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
1160 layout = xstormy16_compute_stack_layout ();
1162 /* Pop the stack for the locals. */
1163 if (layout.locals_size)
1165 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1166 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1167 else
1168 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1169 GEN_INT (- layout.locals_size));
1172 /* Restore any call-saved registers. */
1173 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1174 if (REG_NEEDS_SAVE (regno, ifun))
1175 emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
1177 /* Pop the stack for the stdarg save area. */
1178 if (layout.stdarg_save_size)
1179 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1180 GEN_INT (- layout.stdarg_save_size));
1182 /* Return. */
1183 if (ifun)
1184 emit_jump_insn (gen_return_internal_interrupt ());
1185 else
1186 emit_jump_insn (gen_return_internal ());
1190 xstormy16_epilogue_uses (int regno)
1192 if (reload_completed && call_used_regs[regno])
1194 const int ifun = xstormy16_interrupt_function_p ();
1195 return REG_NEEDS_SAVE (regno, ifun);
1197 return 0;
1200 void
1201 xstormy16_function_profiler (void)
1203 sorry ("function_profiler support");
1206 /* Update CUM to advance past an argument in the argument list. The
1207 values MODE, TYPE and NAMED describe that argument. Once this is
1208 done, the variable CUM is suitable for analyzing the *following*
1209 argument with `TARGET_FUNCTION_ARG', etc.
1211 This function need not do anything if the argument in question was
1212 passed on the stack. The compiler knows how to track the amount of
1213 stack space used for arguments without any special help. However,
1214 it makes life easier for xstormy16_build_va_list if it does update
1215 the word count. */
1217 static void
1218 xstormy16_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
1219 const_tree type, bool named ATTRIBUTE_UNUSED)
1221 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1223 /* If an argument would otherwise be passed partially in registers,
1224 and partially on the stack, the whole of it is passed on the
1225 stack. */
1226 if (*cum < NUM_ARGUMENT_REGISTERS
1227 && *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1228 *cum = NUM_ARGUMENT_REGISTERS;
1230 *cum += XSTORMY16_WORD_SIZE (type, mode);
1233 static rtx
1234 xstormy16_function_arg (cumulative_args_t cum_v, machine_mode mode,
1235 const_tree type, bool named ATTRIBUTE_UNUSED)
1237 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1239 if (mode == VOIDmode)
1240 return const0_rtx;
1241 if (targetm.calls.must_pass_in_stack (mode, type)
1242 || *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1243 return NULL_RTX;
1244 return gen_rtx_REG (mode, *cum + FIRST_ARGUMENT_REGISTER);
1247 /* Build the va_list type.
1249 For this chip, va_list is a record containing a counter and a pointer.
1250 The counter is of type 'int' and indicates how many bytes
1251 have been used to date. The pointer indicates the stack position
1252 for arguments that have not been passed in registers.
1253 To keep the layout nice, the pointer is first in the structure. */
1255 static tree
1256 xstormy16_build_builtin_va_list (void)
1258 tree f_1, f_2, record, type_decl;
1260 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
1261 type_decl = build_decl (BUILTINS_LOCATION,
1262 TYPE_DECL, get_identifier ("__va_list_tag"), record);
1264 f_1 = build_decl (BUILTINS_LOCATION,
1265 FIELD_DECL, get_identifier ("base"),
1266 ptr_type_node);
1267 f_2 = build_decl (BUILTINS_LOCATION,
1268 FIELD_DECL, get_identifier ("count"),
1269 unsigned_type_node);
1271 DECL_FIELD_CONTEXT (f_1) = record;
1272 DECL_FIELD_CONTEXT (f_2) = record;
1274 TYPE_STUB_DECL (record) = type_decl;
1275 TYPE_NAME (record) = type_decl;
1276 TYPE_FIELDS (record) = f_1;
1277 DECL_CHAIN (f_1) = f_2;
1279 layout_type (record);
1281 return record;
1284 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this
1285 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1286 variable to initialize. NEXTARG is the machine independent notion of the
1287 'next' argument after the variable arguments. */
1289 static void
1290 xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
1292 tree f_base, f_count;
1293 tree base, count;
1294 tree t,u;
1296 if (xstormy16_interrupt_function_p ())
1297 error ("cannot use va_start in interrupt function");
1299 f_base = TYPE_FIELDS (va_list_type_node);
1300 f_count = DECL_CHAIN (f_base);
1302 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1303 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1304 NULL_TREE);
1306 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
1307 u = build_int_cst (NULL_TREE, - INCOMING_FRAME_SP_OFFSET);
1308 u = fold_convert (TREE_TYPE (count), u);
1309 t = fold_build_pointer_plus (t, u);
1310 t = build2 (MODIFY_EXPR, TREE_TYPE (base), base, t);
1311 TREE_SIDE_EFFECTS (t) = 1;
1312 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1314 t = build2 (MODIFY_EXPR, TREE_TYPE (count), count,
1315 build_int_cst (NULL_TREE,
1316 crtl->args.info * UNITS_PER_WORD));
1317 TREE_SIDE_EFFECTS (t) = 1;
1318 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1321 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable
1322 of type va_list as a tree, TYPE is the type passed to va_arg.
1323 Note: This algorithm is documented in stormy-abi. */
1325 static tree
1326 xstormy16_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
1327 gimple_seq *post_p ATTRIBUTE_UNUSED)
1329 tree f_base, f_count;
1330 tree base, count;
1331 tree count_tmp, addr, t;
1332 tree lab_gotaddr, lab_fromstack;
1333 int size, size_of_reg_args, must_stack;
1334 tree size_tree;
1336 f_base = TYPE_FIELDS (va_list_type_node);
1337 f_count = DECL_CHAIN (f_base);
1339 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1340 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1341 NULL_TREE);
1343 must_stack = targetm.calls.must_pass_in_stack (TYPE_MODE (type), type);
1344 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
1345 gimplify_expr (&size_tree, pre_p, NULL, is_gimple_val, fb_rvalue);
1347 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1349 count_tmp = get_initialized_tmp_var (count, pre_p, NULL);
1350 lab_gotaddr = create_artificial_label (UNKNOWN_LOCATION);
1351 lab_fromstack = create_artificial_label (UNKNOWN_LOCATION);
1352 addr = create_tmp_var (ptr_type_node);
1354 if (!must_stack)
1356 tree r;
1358 t = fold_convert (TREE_TYPE (count), size_tree);
1359 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1360 r = fold_convert (TREE_TYPE (count), size_int (size_of_reg_args));
1361 t = build2 (GT_EXPR, boolean_type_node, t, r);
1362 t = build3 (COND_EXPR, void_type_node, t,
1363 build1 (GOTO_EXPR, void_type_node, lab_fromstack),
1364 NULL_TREE);
1365 gimplify_and_add (t, pre_p);
1367 t = fold_build_pointer_plus (base, count_tmp);
1368 gimplify_assign (addr, t, pre_p);
1370 t = build1 (GOTO_EXPR, void_type_node, lab_gotaddr);
1371 gimplify_and_add (t, pre_p);
1373 t = build1 (LABEL_EXPR, void_type_node, lab_fromstack);
1374 gimplify_and_add (t, pre_p);
1377 /* Arguments larger than a word might need to skip over some
1378 registers, since arguments are either passed entirely in
1379 registers or entirely on the stack. */
1380 size = PUSH_ROUNDING (int_size_in_bytes (type));
1381 if (size > 2 || size < 0 || must_stack)
1383 tree r, u;
1385 r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD);
1386 u = build2 (MODIFY_EXPR, TREE_TYPE (count_tmp), count_tmp, r);
1388 t = fold_convert (TREE_TYPE (count), r);
1389 t = build2 (GE_EXPR, boolean_type_node, count_tmp, t);
1390 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, u);
1391 gimplify_and_add (t, pre_p);
1394 t = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
1395 + INCOMING_FRAME_SP_OFFSET);
1396 t = fold_convert (TREE_TYPE (count), t);
1397 t = build2 (MINUS_EXPR, TREE_TYPE (count), count_tmp, t);
1398 t = build2 (PLUS_EXPR, TREE_TYPE (count), t,
1399 fold_convert (TREE_TYPE (count), size_tree));
1400 t = fold_convert (TREE_TYPE (t), fold (t));
1401 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1402 t = fold_build_pointer_plus (base, t);
1403 gimplify_assign (addr, t, pre_p);
1405 t = build1 (LABEL_EXPR, void_type_node, lab_gotaddr);
1406 gimplify_and_add (t, pre_p);
1408 t = fold_convert (TREE_TYPE (count), size_tree);
1409 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1410 gimplify_assign (count, t, pre_p);
1412 addr = fold_convert (build_pointer_type (type), addr);
1413 return build_va_arg_indirect_ref (addr);
1416 /* Worker function for TARGET_TRAMPOLINE_INIT. */
1418 static void
1419 xstormy16_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
1421 rtx temp = gen_reg_rtx (HImode);
1422 rtx reg_fnaddr = gen_reg_rtx (HImode);
1423 rtx reg_addr, reg_addr_mem;
1425 reg_addr = copy_to_reg (XEXP (m_tramp, 0));
1426 reg_addr_mem = adjust_automodify_address (m_tramp, HImode, reg_addr, 0);
1428 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1429 emit_move_insn (reg_addr_mem, temp);
1430 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1431 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1433 emit_move_insn (temp, static_chain);
1434 emit_move_insn (reg_addr_mem, temp);
1435 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1436 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1438 emit_move_insn (reg_fnaddr, XEXP (DECL_RTL (fndecl), 0));
1439 emit_move_insn (temp, reg_fnaddr);
1440 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1441 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1442 emit_move_insn (reg_addr_mem, temp);
1443 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1444 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1446 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1447 emit_move_insn (reg_addr_mem, reg_fnaddr);
1450 /* Worker function for TARGET_FUNCTION_VALUE. */
1452 static rtx
1453 xstormy16_function_value (const_tree valtype,
1454 const_tree func ATTRIBUTE_UNUSED,
1455 bool outgoing ATTRIBUTE_UNUSED)
1457 machine_mode mode;
1458 mode = TYPE_MODE (valtype);
1459 PROMOTE_MODE (mode, 0, valtype);
1460 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1463 /* Worker function for TARGET_LIBCALL_VALUE. */
1465 static rtx
1466 xstormy16_libcall_value (machine_mode mode,
1467 const_rtx fun ATTRIBUTE_UNUSED)
1469 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1472 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
1474 static bool
1475 xstormy16_function_value_regno_p (const unsigned int regno)
1477 return (regno == RETURN_VALUE_REGNUM);
1480 /* A C compound statement that outputs the assembler code for a thunk function,
1481 used to implement C++ virtual function calls with multiple inheritance. The
1482 thunk acts as a wrapper around a virtual function, adjusting the implicit
1483 object parameter before handing control off to the real function.
1485 First, emit code to add the integer DELTA to the location that contains the
1486 incoming first argument. Assume that this argument contains a pointer, and
1487 is the one used to pass the `this' pointer in C++. This is the incoming
1488 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1489 addition must preserve the values of all other incoming arguments.
1491 After the addition, emit code to jump to FUNCTION, which is a
1492 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1493 the return address. Hence returning from FUNCTION will return to whoever
1494 called the current `thunk'.
1496 The effect must be as if @var{function} had been called directly
1497 with the adjusted first argument. This macro is responsible for
1498 emitting all of the code for a thunk function;
1499 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1500 not invoked.
1502 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1503 extracted from it.) It might possibly be useful on some targets, but
1504 probably not. */
1506 static void
1507 xstormy16_asm_output_mi_thunk (FILE *file,
1508 tree thunk_fndecl ATTRIBUTE_UNUSED,
1509 HOST_WIDE_INT delta,
1510 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1511 tree function)
1513 int regnum = FIRST_ARGUMENT_REGISTER;
1515 /* There might be a hidden first argument for a returned structure. */
1516 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
1517 regnum += 1;
1519 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF);
1520 fputs ("\tjmpf ", file);
1521 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1522 putc ('\n', file);
1525 /* The purpose of this function is to override the default behavior of
1526 BSS objects. Normally, they go into .bss or .sbss via ".common"
1527 directives, but we need to override that and put them in
1528 .bss_below100. We can't just use a section override (like we do
1529 for .data_below100), because that makes them initialized rather
1530 than uninitialized. */
1532 void
1533 xstormy16_asm_output_aligned_common (FILE *stream,
1534 tree decl,
1535 const char *name,
1536 int size,
1537 int align,
1538 int global)
1540 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
1541 rtx symbol;
1543 if (mem != NULL_RTX
1544 && MEM_P (mem)
1545 && GET_CODE (symbol = XEXP (mem, 0)) == SYMBOL_REF
1546 && SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_XSTORMY16_BELOW100)
1548 const char *name2;
1549 int p2align = 0;
1551 switch_to_section (bss100_section);
1553 while (align > 8)
1555 align /= 2;
1556 p2align ++;
1559 name2 = default_strip_name_encoding (name);
1560 if (global)
1561 fprintf (stream, "\t.globl\t%s\n", name2);
1562 if (p2align)
1563 fprintf (stream, "\t.p2align %d\n", p2align);
1564 fprintf (stream, "\t.type\t%s, @object\n", name2);
1565 fprintf (stream, "\t.size\t%s, %d\n", name2, size);
1566 fprintf (stream, "%s:\n\t.space\t%d\n", name2, size);
1567 return;
1570 if (!global)
1572 fprintf (stream, "\t.local\t");
1573 assemble_name (stream, name);
1574 fprintf (stream, "\n");
1576 fprintf (stream, "\t.comm\t");
1577 assemble_name (stream, name);
1578 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
1581 /* Implement TARGET_ASM_INIT_SECTIONS. */
1583 static void
1584 xstormy16_asm_init_sections (void)
1586 bss100_section
1587 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
1588 output_section_asm_op,
1589 "\t.section \".bss_below100\",\"aw\",@nobits");
1592 /* Mark symbols with the "below100" attribute so that we can use the
1593 special addressing modes for them. */
1595 static void
1596 xstormy16_encode_section_info (tree decl, rtx r, int first)
1598 default_encode_section_info (decl, r, first);
1600 if (TREE_CODE (decl) == VAR_DECL
1601 && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl))
1602 || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl))))
1604 rtx symbol = XEXP (r, 0);
1606 gcc_assert (GET_CODE (symbol) == SYMBOL_REF);
1607 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_XSTORMY16_BELOW100;
1611 #undef TARGET_ASM_CONSTRUCTOR
1612 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1613 #undef TARGET_ASM_DESTRUCTOR
1614 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1616 /* Output constructors and destructors. Just like
1617 default_named_section_asm_out_* but don't set the sections writable. */
1619 static void
1620 xstormy16_asm_out_destructor (rtx symbol, int priority)
1622 const char *section = ".dtors";
1623 char buf[18];
1625 /* ??? This only works reliably with the GNU linker. */
1626 if (priority != DEFAULT_INIT_PRIORITY)
1628 sprintf (buf, ".dtors.%.5u",
1629 /* Invert the numbering so the linker puts us in the proper
1630 order; constructors are run from right to left, and the
1631 linker sorts in increasing order. */
1632 MAX_INIT_PRIORITY - priority);
1633 section = buf;
1636 switch_to_section (get_section (section, 0, NULL));
1637 assemble_align (POINTER_SIZE);
1638 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1641 static void
1642 xstormy16_asm_out_constructor (rtx symbol, int priority)
1644 const char *section = ".ctors";
1645 char buf[18];
1647 /* ??? This only works reliably with the GNU linker. */
1648 if (priority != DEFAULT_INIT_PRIORITY)
1650 sprintf (buf, ".ctors.%.5u",
1651 /* Invert the numbering so the linker puts us in the proper
1652 order; constructors are run from right to left, and the
1653 linker sorts in increasing order. */
1654 MAX_INIT_PRIORITY - priority);
1655 section = buf;
1658 switch_to_section (get_section (section, 0, NULL));
1659 assemble_align (POINTER_SIZE);
1660 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1663 /* Worker function for TARGET_PRINT_OPERAND_ADDRESS.
1665 Print a memory address as an operand to reference that memory location. */
1667 static void
1668 xstormy16_print_operand_address (FILE *file, machine_mode /*mode*/,
1669 rtx address)
1671 HOST_WIDE_INT offset;
1672 int pre_dec, post_inc;
1674 /* There are a few easy cases. */
1675 if (CONST_INT_P (address))
1677 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1678 return;
1681 if (CONSTANT_P (address) || LABEL_P (address))
1683 output_addr_const (file, address);
1684 return;
1687 /* Otherwise, it's hopefully something of the form
1688 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...)). */
1689 if (GET_CODE (address) == PLUS)
1691 gcc_assert (CONST_INT_P (XEXP (address, 1)));
1692 offset = INTVAL (XEXP (address, 1));
1693 address = XEXP (address, 0);
1695 else
1696 offset = 0;
1698 pre_dec = (GET_CODE (address) == PRE_DEC);
1699 post_inc = (GET_CODE (address) == POST_INC);
1700 if (pre_dec || post_inc)
1701 address = XEXP (address, 0);
1703 gcc_assert (REG_P (address));
1705 fputc ('(', file);
1706 if (pre_dec)
1707 fputs ("--", file);
1708 fputs (reg_names [REGNO (address)], file);
1709 if (post_inc)
1710 fputs ("++", file);
1711 if (offset != 0)
1712 fprintf (file, "," HOST_WIDE_INT_PRINT_DEC, offset);
1713 fputc (')', file);
1716 /* Worker function for TARGET_PRINT_OPERAND.
1718 Print an operand to an assembler instruction. */
1720 static void
1721 xstormy16_print_operand (FILE *file, rtx x, int code)
1723 switch (code)
1725 case 'B':
1726 /* There is either one bit set, or one bit clear, in X.
1727 Print it preceded by '#'. */
1729 static int bits_set[8] = { 0, 1, 1, 2, 1, 2, 2, 3 };
1730 HOST_WIDE_INT xx = 1;
1731 HOST_WIDE_INT l;
1733 if (CONST_INT_P (x))
1734 xx = INTVAL (x);
1735 else
1736 output_operand_lossage ("'B' operand is not constant");
1738 /* GCC sign-extends masks with the MSB set, so we have to
1739 detect all the cases that differ only in sign extension
1740 beyond the bits we care about. Normally, the predicates
1741 and constraints ensure that we have the right values. This
1742 works correctly for valid masks. */
1743 if (bits_set[xx & 7] <= 1)
1745 /* Remove sign extension bits. */
1746 if ((~xx & ~(HOST_WIDE_INT)0xff) == 0)
1747 xx &= 0xff;
1748 else if ((~xx & ~(HOST_WIDE_INT)0xffff) == 0)
1749 xx &= 0xffff;
1750 l = exact_log2 (xx);
1752 else
1754 /* Add sign extension bits. */
1755 if ((xx & ~(HOST_WIDE_INT)0xff) == 0)
1756 xx |= ~(HOST_WIDE_INT)0xff;
1757 else if ((xx & ~(HOST_WIDE_INT)0xffff) == 0)
1758 xx |= ~(HOST_WIDE_INT)0xffff;
1759 l = exact_log2 (~xx);
1762 if (l == -1)
1763 output_operand_lossage ("'B' operand has multiple bits set");
1765 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l);
1766 return;
1769 case 'C':
1770 /* Print the symbol without a surrounding @fptr(). */
1771 if (GET_CODE (x) == SYMBOL_REF)
1772 assemble_name (file, XSTR (x, 0));
1773 else if (LABEL_P (x))
1774 output_asm_label (x);
1775 else
1776 xstormy16_print_operand_address (file, VOIDmode, x);
1777 return;
1779 case 'o':
1780 case 'O':
1781 /* Print the immediate operand less one, preceded by '#'.
1782 For 'O', negate it first. */
1784 HOST_WIDE_INT xx = 0;
1786 if (CONST_INT_P (x))
1787 xx = INTVAL (x);
1788 else
1789 output_operand_lossage ("'o' operand is not constant");
1791 if (code == 'O')
1792 xx = -xx;
1794 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, xx - 1);
1795 return;
1798 case 'b':
1799 /* Print the shift mask for bp/bn. */
1801 HOST_WIDE_INT xx = 1;
1802 HOST_WIDE_INT l;
1804 if (CONST_INT_P (x))
1805 xx = INTVAL (x);
1806 else
1807 output_operand_lossage ("'B' operand is not constant");
1809 l = 7 - xx;
1811 fputs (IMMEDIATE_PREFIX, file);
1812 fprintf (file, HOST_WIDE_INT_PRINT_DEC, l);
1813 return;
1816 case 0:
1817 /* Handled below. */
1818 break;
1820 default:
1821 output_operand_lossage ("xstormy16_print_operand: unknown code");
1822 return;
1825 switch (GET_CODE (x))
1827 case REG:
1828 fputs (reg_names [REGNO (x)], file);
1829 break;
1831 case MEM:
1832 xstormy16_print_operand_address (file, GET_MODE (x), XEXP (x, 0));
1833 break;
1835 default:
1836 /* Some kind of constant or label; an immediate operand,
1837 so prefix it with '#' for the assembler. */
1838 fputs (IMMEDIATE_PREFIX, file);
1839 output_addr_const (file, x);
1840 break;
1843 return;
1846 /* Expander for the `casesi' pattern.
1847 INDEX is the index of the switch statement.
1848 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1849 to the first table entry.
1850 RANGE is the number of table entries.
1851 TABLE is an ADDR_VEC that is the jump table.
1852 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1853 range LOWER_BOUND to LOWER_BOUND + RANGE - 1. */
1855 void
1856 xstormy16_expand_casesi (rtx index, rtx lower_bound, rtx range,
1857 rtx table, rtx default_label)
1859 HOST_WIDE_INT range_i = INTVAL (range);
1860 rtx int_index;
1862 /* This code uses 'br', so it can deal only with tables of size up to
1863 8192 entries. */
1864 if (range_i >= 8192)
1865 sorry ("switch statement of size %lu entries too large",
1866 (unsigned long) range_i);
1868 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
1869 OPTAB_LIB_WIDEN);
1870 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
1871 default_label);
1872 int_index = gen_lowpart_common (HImode, index);
1873 emit_insn (gen_ashlhi3 (int_index, int_index, const2_rtx));
1874 emit_jump_insn (gen_tablejump_pcrel (int_index, table));
1877 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1878 instructions, without label or alignment or any other special
1879 constructs. We know that the previous instruction will be the
1880 `tablejump_pcrel' output above.
1882 TODO: it might be nice to output 'br' instructions if they could
1883 all reach. */
1885 void
1886 xstormy16_output_addr_vec (FILE *file, rtx label ATTRIBUTE_UNUSED, rtx table)
1888 int vlen, idx;
1890 switch_to_section (current_function_section ());
1892 vlen = XVECLEN (table, 0);
1893 for (idx = 0; idx < vlen; idx++)
1895 fputs ("\tjmpf ", file);
1896 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
1897 fputc ('\n', file);
1901 /* Expander for the `call' patterns.
1902 RETVAL is the RTL for the return register or NULL for void functions.
1903 DEST is the function to call, expressed as a MEM.
1904 COUNTER is ignored. */
1906 void
1907 xstormy16_expand_call (rtx retval, rtx dest, rtx counter)
1909 rtx call, temp;
1910 machine_mode mode;
1912 gcc_assert (MEM_P (dest));
1913 dest = XEXP (dest, 0);
1915 if (! CONSTANT_P (dest) && ! REG_P (dest))
1916 dest = force_reg (Pmode, dest);
1918 if (retval == NULL)
1919 mode = VOIDmode;
1920 else
1921 mode = GET_MODE (retval);
1923 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
1924 counter);
1925 if (retval)
1926 call = gen_rtx_SET (retval, call);
1928 if (! CONSTANT_P (dest))
1930 temp = gen_reg_rtx (HImode);
1931 emit_move_insn (temp, const0_rtx);
1933 else
1934 temp = const0_rtx;
1936 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
1937 gen_rtx_USE (VOIDmode, temp)));
1938 emit_call_insn (call);
1941 /* Expanders for multiword computational operations. */
1943 /* Expander for arithmetic operations; emit insns to compute
1945 (set DEST (CODE:MODE SRC0 SRC1))
1947 When CODE is COMPARE, a branch template is generated
1948 (this saves duplicating code in xstormy16_split_cbranch). */
1950 void
1951 xstormy16_expand_arith (machine_mode mode, enum rtx_code code,
1952 rtx dest, rtx src0, rtx src1)
1954 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
1955 int i;
1956 int firstloop = 1;
1958 if (code == NEG)
1959 emit_move_insn (src0, const0_rtx);
1961 for (i = 0; i < num_words; i++)
1963 rtx w_src0, w_src1, w_dest;
1964 rtx insn;
1966 w_src0 = simplify_gen_subreg (word_mode, src0, mode,
1967 i * UNITS_PER_WORD);
1968 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
1969 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
1971 switch (code)
1973 case PLUS:
1974 if (firstloop
1975 && CONST_INT_P (w_src1)
1976 && INTVAL (w_src1) == 0)
1977 continue;
1979 if (firstloop)
1980 insn = gen_addchi4 (w_dest, w_src0, w_src1);
1981 else
1982 insn = gen_addchi5 (w_dest, w_src0, w_src1);
1983 break;
1985 case NEG:
1986 case MINUS:
1987 case COMPARE:
1988 if (code == COMPARE && i == num_words - 1)
1990 rtx branch, sub, clobber, sub_1;
1992 sub_1 = gen_rtx_MINUS (HImode, w_src0,
1993 gen_rtx_ZERO_EXTEND (HImode, gen_rtx_REG (BImode, CARRY_REGNUM)));
1994 sub = gen_rtx_SET (w_dest,
1995 gen_rtx_MINUS (HImode, sub_1, w_src1));
1996 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
1997 branch = gen_rtx_SET (pc_rtx,
1998 gen_rtx_IF_THEN_ELSE (VOIDmode,
1999 gen_rtx_EQ (HImode,
2000 sub_1,
2001 w_src1),
2002 pc_rtx,
2003 pc_rtx));
2004 insn = gen_rtx_PARALLEL (VOIDmode,
2005 gen_rtvec (3, branch, sub, clobber));
2007 else if (firstloop
2008 && code != COMPARE
2009 && CONST_INT_P (w_src1)
2010 && INTVAL (w_src1) == 0)
2011 continue;
2012 else if (firstloop)
2013 insn = gen_subchi4 (w_dest, w_src0, w_src1);
2014 else
2015 insn = gen_subchi5 (w_dest, w_src0, w_src1);
2016 break;
2018 case IOR:
2019 case XOR:
2020 case AND:
2021 if (CONST_INT_P (w_src1)
2022 && INTVAL (w_src1) == -(code == AND))
2023 continue;
2025 insn = gen_rtx_SET (w_dest, gen_rtx_fmt_ee (code, mode,
2026 w_src0, w_src1));
2027 break;
2029 case NOT:
2030 insn = gen_rtx_SET (w_dest, gen_rtx_NOT (mode, w_src0));
2031 break;
2033 default:
2034 gcc_unreachable ();
2037 firstloop = 0;
2038 emit (insn);
2041 /* If we emit nothing, try_split() will think we failed. So emit
2042 something that does nothing and can be optimized away. */
2043 if (firstloop)
2044 emit (gen_nop ());
2047 /* The shift operations are split at output time for constant values;
2048 variable-width shifts get handed off to a library routine.
2050 Generate an output string to do (set X (CODE:MODE X SIZE_R))
2051 SIZE_R will be a CONST_INT, X will be a hard register. */
2053 const char *
2054 xstormy16_output_shift (machine_mode mode, enum rtx_code code,
2055 rtx x, rtx size_r, rtx temp)
2057 HOST_WIDE_INT size;
2058 const char *r0, *r1, *rt;
2059 static char r[64];
2061 gcc_assert (CONST_INT_P (size_r)
2062 && REG_P (x)
2063 && mode == SImode);
2065 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
2067 if (size == 0)
2068 return "";
2070 r0 = reg_names [REGNO (x)];
2071 r1 = reg_names [REGNO (x) + 1];
2073 /* For shifts of size 1, we can use the rotate instructions. */
2074 if (size == 1)
2076 switch (code)
2078 case ASHIFT:
2079 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
2080 break;
2081 case ASHIFTRT:
2082 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
2083 break;
2084 case LSHIFTRT:
2085 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
2086 break;
2087 default:
2088 gcc_unreachable ();
2090 return r;
2093 /* For large shifts, there are easy special cases. */
2094 if (size == 16)
2096 switch (code)
2098 case ASHIFT:
2099 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
2100 break;
2101 case ASHIFTRT:
2102 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
2103 break;
2104 case LSHIFTRT:
2105 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
2106 break;
2107 default:
2108 gcc_unreachable ();
2110 return r;
2112 if (size > 16)
2114 switch (code)
2116 case ASHIFT:
2117 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
2118 r1, r0, r0, r1, (int) size - 16);
2119 break;
2120 case ASHIFTRT:
2121 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
2122 r0, r1, r1, r0, (int) size - 16);
2123 break;
2124 case LSHIFTRT:
2125 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
2126 r0, r1, r1, r0, (int) size - 16);
2127 break;
2128 default:
2129 gcc_unreachable ();
2131 return r;
2134 /* For the rest, we have to do more work. In particular, we
2135 need a temporary. */
2136 rt = reg_names [REGNO (temp)];
2137 switch (code)
2139 case ASHIFT:
2140 sprintf (r,
2141 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2142 rt, r0, r0, (int) size, r1, (int) size, rt, (int) (16 - size),
2143 r1, rt);
2144 break;
2145 case ASHIFTRT:
2146 sprintf (r,
2147 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2148 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2149 r0, rt);
2150 break;
2151 case LSHIFTRT:
2152 sprintf (r,
2153 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2154 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2155 r0, rt);
2156 break;
2157 default:
2158 gcc_unreachable ();
2160 return r;
2163 /* Attribute handling. */
2165 /* Return nonzero if the function is an interrupt function. */
2168 xstormy16_interrupt_function_p (void)
2170 tree attributes;
2172 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2173 any functions are declared, which is demonstrably wrong, but
2174 it is worked around here. FIXME. */
2175 if (!cfun)
2176 return 0;
2178 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
2179 return lookup_attribute ("interrupt", attributes) != NULL_TREE;
2182 #undef TARGET_ATTRIBUTE_TABLE
2183 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2185 static tree xstormy16_handle_interrupt_attribute
2186 (tree *, tree, tree, int, bool *);
2187 static tree xstormy16_handle_below100_attribute
2188 (tree *, tree, tree, int, bool *);
2190 static const struct attribute_spec xstormy16_attribute_table[] =
2192 /* name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
2193 affects_type_identity. */
2194 { "interrupt", 0, 0, false, true, true,
2195 xstormy16_handle_interrupt_attribute , false },
2196 { "BELOW100", 0, 0, false, false, false,
2197 xstormy16_handle_below100_attribute, false },
2198 { "below100", 0, 0, false, false, false,
2199 xstormy16_handle_below100_attribute, false },
2200 { NULL, 0, 0, false, false, false, NULL, false }
2203 /* Handle an "interrupt" attribute;
2204 arguments as in struct attribute_spec.handler. */
2206 static tree
2207 xstormy16_handle_interrupt_attribute (tree *node, tree name,
2208 tree args ATTRIBUTE_UNUSED,
2209 int flags ATTRIBUTE_UNUSED,
2210 bool *no_add_attrs)
2212 if (TREE_CODE (*node) != FUNCTION_TYPE)
2214 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2215 name);
2216 *no_add_attrs = true;
2219 return NULL_TREE;
2222 /* Handle an "below" attribute;
2223 arguments as in struct attribute_spec.handler. */
2225 static tree
2226 xstormy16_handle_below100_attribute (tree *node,
2227 tree name ATTRIBUTE_UNUSED,
2228 tree args ATTRIBUTE_UNUSED,
2229 int flags ATTRIBUTE_UNUSED,
2230 bool *no_add_attrs)
2232 if (TREE_CODE (*node) != VAR_DECL
2233 && TREE_CODE (*node) != POINTER_TYPE
2234 && TREE_CODE (*node) != TYPE_DECL)
2236 warning (OPT_Wattributes,
2237 "%<__BELOW100__%> attribute only applies to variables");
2238 *no_add_attrs = true;
2240 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
2242 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
2244 warning (OPT_Wattributes, "__BELOW100__ attribute not allowed "
2245 "with auto storage class");
2246 *no_add_attrs = true;
2250 return NULL_TREE;
2253 #undef TARGET_INIT_BUILTINS
2254 #define TARGET_INIT_BUILTINS xstormy16_init_builtins
2255 #undef TARGET_EXPAND_BUILTIN
2256 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2258 static struct
2260 const char * name;
2261 int md_code;
2262 const char * arg_ops; /* 0..9, t for temp register, r for return value. */
2263 const char * arg_types; /* s=short,l=long, upper case for unsigned. */
2265 s16builtins[] =
2267 { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" },
2268 { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" },
2269 { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" },
2270 { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" },
2271 { NULL, 0, NULL, NULL }
2274 static void
2275 xstormy16_init_builtins (void)
2277 tree args[2], ret_type, arg = NULL_TREE, ftype;
2278 int i, a, n_args;
2280 ret_type = void_type_node;
2282 for (i = 0; s16builtins[i].name; i++)
2284 n_args = strlen (s16builtins[i].arg_types) - 1;
2286 gcc_assert (n_args <= (int) ARRAY_SIZE (args));
2288 for (a = n_args - 1; a >= 0; a--)
2289 args[a] = NULL_TREE;
2291 for (a = n_args; a >= 0; a--)
2293 switch (s16builtins[i].arg_types[a])
2295 case 's': arg = short_integer_type_node; break;
2296 case 'S': arg = short_unsigned_type_node; break;
2297 case 'l': arg = long_integer_type_node; break;
2298 case 'L': arg = long_unsigned_type_node; break;
2299 default: gcc_unreachable ();
2301 if (a == 0)
2302 ret_type = arg;
2303 else
2304 args[a-1] = arg;
2306 ftype = build_function_type_list (ret_type, args[0], args[1], NULL_TREE);
2307 add_builtin_function (s16builtins[i].name, ftype,
2308 i, BUILT_IN_MD, NULL, NULL_TREE);
2312 static rtx
2313 xstormy16_expand_builtin (tree exp, rtx target,
2314 rtx subtarget ATTRIBUTE_UNUSED,
2315 machine_mode mode ATTRIBUTE_UNUSED,
2316 int ignore ATTRIBUTE_UNUSED)
2318 rtx op[10], args[10], pat, copyto[10], retval = 0;
2319 tree fndecl, argtree;
2320 int i, a, o, code;
2322 fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2323 argtree = TREE_OPERAND (exp, 1);
2324 i = DECL_FUNCTION_CODE (fndecl);
2325 code = s16builtins[i].md_code;
2327 for (a = 0; a < 10 && argtree; a++)
2329 args[a] = expand_normal (TREE_VALUE (argtree));
2330 argtree = TREE_CHAIN (argtree);
2333 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2335 char ao = s16builtins[i].arg_ops[o];
2336 char c = insn_data[code].operand[o].constraint[0];
2337 machine_mode omode;
2339 copyto[o] = 0;
2341 omode = (machine_mode) insn_data[code].operand[o].mode;
2342 if (ao == 'r')
2343 op[o] = target ? target : gen_reg_rtx (omode);
2344 else if (ao == 't')
2345 op[o] = gen_reg_rtx (omode);
2346 else
2347 op[o] = args[(int) hex_value (ao)];
2349 if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o])))
2351 if (c == '+' || c == '=')
2353 copyto[o] = op[o];
2354 op[o] = gen_reg_rtx (omode);
2356 else
2357 op[o] = copy_to_mode_reg (omode, op[o]);
2360 if (ao == 'r')
2361 retval = op[o];
2364 pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4],
2365 op[5], op[6], op[7], op[8], op[9]);
2366 emit_insn (pat);
2368 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2369 if (copyto[o])
2371 emit_move_insn (copyto[o], op[o]);
2372 if (op[o] == retval)
2373 retval = copyto[o];
2376 return retval;
2379 /* Look for combinations of insns that can be converted to BN or BP
2380 opcodes. This is, unfortunately, too complex to do with MD
2381 patterns. */
2383 static void
2384 combine_bnp (rtx_insn *insn)
2386 int insn_code, regno, need_extend;
2387 unsigned int mask;
2388 rtx cond, reg, qireg, mem;
2389 rtx_insn *and_insn, *load;
2390 machine_mode load_mode = QImode;
2391 machine_mode and_mode = QImode;
2392 rtx_insn *shift = NULL;
2394 insn_code = recog_memoized (insn);
2395 if (insn_code != CODE_FOR_cbranchhi
2396 && insn_code != CODE_FOR_cbranchhi_neg)
2397 return;
2399 cond = XVECEXP (PATTERN (insn), 0, 0); /* set */
2400 cond = XEXP (cond, 1); /* if */
2401 cond = XEXP (cond, 0); /* cond */
2402 switch (GET_CODE (cond))
2404 case NE:
2405 case EQ:
2406 need_extend = 0;
2407 break;
2408 case LT:
2409 case GE:
2410 need_extend = 1;
2411 break;
2412 default:
2413 return;
2416 reg = XEXP (cond, 0);
2417 if (! REG_P (reg))
2418 return;
2419 regno = REGNO (reg);
2420 if (XEXP (cond, 1) != const0_rtx)
2421 return;
2422 if (! find_regno_note (insn, REG_DEAD, regno))
2423 return;
2424 qireg = gen_rtx_REG (QImode, regno);
2426 if (need_extend)
2428 /* LT and GE conditionals should have a sign extend before
2429 them. */
2430 for (and_insn = prev_real_insn (insn);
2431 and_insn != NULL_RTX;
2432 and_insn = prev_real_insn (and_insn))
2434 int and_code = recog_memoized (and_insn);
2436 if (and_code == CODE_FOR_extendqihi2
2437 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)
2438 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), qireg))
2439 break;
2441 if (and_code == CODE_FOR_movhi_internal
2442 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg))
2444 /* This is for testing bit 15. */
2445 and_insn = insn;
2446 break;
2449 if (reg_mentioned_p (reg, and_insn))
2450 return;
2452 if (! NOTE_P (and_insn) && ! NONJUMP_INSN_P (and_insn))
2453 return;
2456 else
2458 /* EQ and NE conditionals have an AND before them. */
2459 for (and_insn = prev_real_insn (insn);
2460 and_insn != NULL_RTX;
2461 and_insn = prev_real_insn (and_insn))
2463 if (recog_memoized (and_insn) == CODE_FOR_andhi3
2464 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)
2465 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), reg))
2466 break;
2468 if (reg_mentioned_p (reg, and_insn))
2469 return;
2471 if (! NOTE_P (and_insn) && ! NONJUMP_INSN_P (and_insn))
2472 return;
2475 if (and_insn)
2477 /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
2478 followed by an AND like this:
2480 (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
2481 (clobber (reg:BI carry))]
2483 (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
2485 Attempt to detect this here. */
2486 for (shift = prev_real_insn (and_insn); shift;
2487 shift = prev_real_insn (shift))
2489 if (recog_memoized (shift) == CODE_FOR_lshrhi3
2490 && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift), 0, 0)), reg)
2491 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 0), reg))
2492 break;
2494 if (reg_mentioned_p (reg, shift)
2495 || (! NOTE_P (shift) && ! NONJUMP_INSN_P (shift)))
2497 shift = NULL;
2498 break;
2504 if (and_insn == NULL_RTX)
2505 return;
2507 for (load = shift ? prev_real_insn (shift) : prev_real_insn (and_insn);
2508 load;
2509 load = prev_real_insn (load))
2511 int load_code = recog_memoized (load);
2513 if (load_code == CODE_FOR_movhi_internal
2514 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2515 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), HImode)
2516 && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load))))
2518 load_mode = HImode;
2519 break;
2522 if (load_code == CODE_FOR_movqi_internal
2523 && rtx_equal_p (SET_DEST (PATTERN (load)), qireg)
2524 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), QImode))
2526 load_mode = QImode;
2527 break;
2530 if (load_code == CODE_FOR_zero_extendqihi2
2531 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2532 && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load)), 0), QImode))
2534 load_mode = QImode;
2535 and_mode = HImode;
2536 break;
2539 if (reg_mentioned_p (reg, load))
2540 return;
2542 if (! NOTE_P (load) && ! NONJUMP_INSN_P (load))
2543 return;
2545 if (!load)
2546 return;
2548 mem = SET_SRC (PATTERN (load));
2550 if (need_extend)
2552 mask = (load_mode == HImode) ? 0x8000 : 0x80;
2554 /* If the mem includes a zero-extend operation and we are
2555 going to generate a sign-extend operation then move the
2556 mem inside the zero-extend. */
2557 if (GET_CODE (mem) == ZERO_EXTEND)
2558 mem = XEXP (mem, 0);
2560 else
2562 if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and_insn)), 1),
2563 load_mode))
2564 return;
2566 mask = (int) INTVAL (XEXP (SET_SRC (PATTERN (and_insn)), 1));
2568 if (shift)
2569 mask <<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 1));
2572 if (load_mode == HImode)
2574 rtx addr = XEXP (mem, 0);
2576 if (! (mask & 0xff))
2578 addr = plus_constant (Pmode, addr, 1);
2579 mask >>= 8;
2581 mem = gen_rtx_MEM (QImode, addr);
2584 if (need_extend)
2585 XEXP (cond, 0) = gen_rtx_SIGN_EXTEND (HImode, mem);
2586 else
2587 XEXP (cond, 0) = gen_rtx_AND (and_mode, mem, GEN_INT (mask));
2589 INSN_CODE (insn) = -1;
2590 delete_insn (load);
2592 if (and_insn != insn)
2593 delete_insn (and_insn);
2595 if (shift != NULL_RTX)
2596 delete_insn (shift);
2599 static void
2600 xstormy16_reorg (void)
2602 rtx_insn *insn;
2604 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2606 if (! JUMP_P (insn))
2607 continue;
2608 combine_bnp (insn);
2612 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2614 static bool
2615 xstormy16_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2617 const HOST_WIDE_INT size = int_size_in_bytes (type);
2618 return (size == -1 || size > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS);
2621 /* Implement TARGET_HARD_REGNO_MODE_OK. */
2623 static bool
2624 xstormy16_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
2626 return regno != 16 || mode == BImode;
2629 #undef TARGET_ASM_ALIGNED_HI_OP
2630 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2631 #undef TARGET_ASM_ALIGNED_SI_OP
2632 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2633 #undef TARGET_ENCODE_SECTION_INFO
2634 #define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
2636 /* Select_section doesn't handle .bss_below100. */
2637 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
2638 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
2640 #undef TARGET_ASM_OUTPUT_MI_THUNK
2641 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2642 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2643 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2645 #undef TARGET_PRINT_OPERAND
2646 #define TARGET_PRINT_OPERAND xstormy16_print_operand
2647 #undef TARGET_PRINT_OPERAND_ADDRESS
2648 #define TARGET_PRINT_OPERAND_ADDRESS xstormy16_print_operand_address
2650 #undef TARGET_MEMORY_MOVE_COST
2651 #define TARGET_MEMORY_MOVE_COST xstormy16_memory_move_cost
2652 #undef TARGET_RTX_COSTS
2653 #define TARGET_RTX_COSTS xstormy16_rtx_costs
2654 #undef TARGET_ADDRESS_COST
2655 #define TARGET_ADDRESS_COST xstormy16_address_cost
2657 #undef TARGET_BUILD_BUILTIN_VA_LIST
2658 #define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
2659 #undef TARGET_EXPAND_BUILTIN_VA_START
2660 #define TARGET_EXPAND_BUILTIN_VA_START xstormy16_expand_builtin_va_start
2661 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
2662 #define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_gimplify_va_arg_expr
2664 #undef TARGET_PROMOTE_FUNCTION_MODE
2665 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
2666 #undef TARGET_PROMOTE_PROTOTYPES
2667 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
2669 #undef TARGET_FUNCTION_ARG
2670 #define TARGET_FUNCTION_ARG xstormy16_function_arg
2671 #undef TARGET_FUNCTION_ARG_ADVANCE
2672 #define TARGET_FUNCTION_ARG_ADVANCE xstormy16_function_arg_advance
2674 #undef TARGET_RETURN_IN_MEMORY
2675 #define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
2676 #undef TARGET_FUNCTION_VALUE
2677 #define TARGET_FUNCTION_VALUE xstormy16_function_value
2678 #undef TARGET_LIBCALL_VALUE
2679 #define TARGET_LIBCALL_VALUE xstormy16_libcall_value
2680 #undef TARGET_FUNCTION_VALUE_REGNO_P
2681 #define TARGET_FUNCTION_VALUE_REGNO_P xstormy16_function_value_regno_p
2683 #undef TARGET_MACHINE_DEPENDENT_REORG
2684 #define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg
2686 #undef TARGET_PREFERRED_RELOAD_CLASS
2687 #define TARGET_PREFERRED_RELOAD_CLASS xstormy16_preferred_reload_class
2688 #undef TARGET_PREFERRED_OUTPUT_RELOAD_CLASS
2689 #define TARGET_PREFERRED_OUTPUT_RELOAD_CLASS xstormy16_preferred_reload_class
2691 #undef TARGET_LRA_P
2692 #define TARGET_LRA_P hook_bool_void_false
2694 #undef TARGET_LEGITIMATE_ADDRESS_P
2695 #define TARGET_LEGITIMATE_ADDRESS_P xstormy16_legitimate_address_p
2696 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
2697 #define TARGET_MODE_DEPENDENT_ADDRESS_P xstormy16_mode_dependent_address_p
2699 #undef TARGET_CAN_ELIMINATE
2700 #define TARGET_CAN_ELIMINATE xstormy16_can_eliminate
2702 #undef TARGET_TRAMPOLINE_INIT
2703 #define TARGET_TRAMPOLINE_INIT xstormy16_trampoline_init
2705 #undef TARGET_HARD_REGNO_MODE_OK
2706 #define TARGET_HARD_REGNO_MODE_OK xstormy16_hard_regno_mode_ok
2708 struct gcc_target targetm = TARGET_INITIALIZER;
2710 #include "gt-stormy16.h"