PR target/16201
[official-gcc.git] / gcc / config / stormy16 / stormy16.c
blobec40cd916b25fe3ded0d2c75058decf99de20451
1 /* Xstormy16 target functions.
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004
3 Free Software Foundation, Inc.
4 Contributed by Red Hat, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "output.h"
35 #include "insn-attr.h"
36 #include "flags.h"
37 #include "recog.h"
38 #include "toplev.h"
39 #include "obstack.h"
40 #include "tree.h"
41 #include "expr.h"
42 #include "optabs.h"
43 #include "except.h"
44 #include "function.h"
45 #include "target.h"
46 #include "target-def.h"
47 #include "tm_p.h"
48 #include "langhooks.h"
49 #include "tree-gimple.h"
51 static rtx emit_addhi3_postreload (rtx, rtx, rtx);
52 static void xstormy16_asm_out_constructor (rtx, int);
53 static void xstormy16_asm_out_destructor (rtx, int);
54 static void xstormy16_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
55 HOST_WIDE_INT, tree);
57 static void xstormy16_init_builtins (void);
58 static rtx xstormy16_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
59 static bool xstormy16_rtx_costs (rtx, int, int, int *);
60 static int xstormy16_address_cost (rtx);
61 static bool xstormy16_return_in_memory (tree, tree);
63 /* Define the information needed to generate branch and scc insns. This is
64 stored from the compare operation. */
65 struct rtx_def * xstormy16_compare_op0;
66 struct rtx_def * xstormy16_compare_op1;
68 /* Return 1 if this is a LT, GE, LTU, or GEU operator. */
70 int
71 xstormy16_ineqsi_operator (register rtx op, enum machine_mode mode)
73 enum rtx_code code = GET_CODE (op);
75 return ((mode == VOIDmode || GET_MODE (op) == mode)
76 && (code == LT || code == GE || code == LTU || code == GEU));
79 /* Return 1 if this is an EQ or NE operator. */
81 int
82 equality_operator (register rtx op, enum machine_mode mode)
84 return ((mode == VOIDmode || GET_MODE (op) == mode)
85 && (GET_CODE (op) == EQ || GET_CODE (op) == NE));
88 /* Return 1 if this is a comparison operator but not an EQ or NE operator. */
90 int
91 inequality_operator (register rtx op, enum machine_mode mode)
93 return comparison_operator (op, mode) && ! equality_operator (op, mode);
96 /* Compute a (partial) cost for rtx X. Return true if the complete
97 cost has been computed, and false if subexpressions should be
98 scanned. In either case, *TOTAL contains the cost result. */
100 static bool
101 xstormy16_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
102 int *total)
104 switch (code)
106 case CONST_INT:
107 if (INTVAL (x) < 16 && INTVAL (x) >= 0)
108 *total = COSTS_N_INSNS (1) / 2;
109 else if (INTVAL (x) < 256 && INTVAL (x) >= 0)
110 *total = COSTS_N_INSNS (1);
111 else
112 *total = COSTS_N_INSNS (2);
113 return true;
115 case CONST_DOUBLE:
116 case CONST:
117 case SYMBOL_REF:
118 case LABEL_REF:
119 *total = COSTS_N_INSNS(2);
120 return true;
122 case MULT:
123 *total = COSTS_N_INSNS (35 + 6);
124 return true;
125 case DIV:
126 *total = COSTS_N_INSNS (51 - 6);
127 return true;
129 default:
130 return false;
134 static int
135 xstormy16_address_cost (rtx x)
137 return (GET_CODE (x) == CONST_INT ? 2
138 : GET_CODE (x) == PLUS ? 7
139 : 5);
142 /* Branches are handled as follows:
144 1. HImode compare-and-branches. The machine supports these
145 natively, so the appropriate pattern is emitted directly.
147 2. SImode EQ and NE. These are emitted as pairs of HImode
148 compare-and-branches.
150 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
151 of a SImode subtract followed by a branch (not a compare-and-branch),
152 like this:
157 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
165 /* Emit a branch of kind CODE to location LOC. */
167 void
168 xstormy16_emit_cbranch (enum rtx_code code, rtx loc)
170 rtx op0 = xstormy16_compare_op0;
171 rtx op1 = xstormy16_compare_op1;
172 rtx condition_rtx, loc_ref, branch, cy_clobber;
173 rtvec vec;
174 enum machine_mode mode;
176 mode = GET_MODE (op0);
177 if (mode != HImode && mode != SImode)
178 abort ();
180 if (mode == SImode
181 && (code == GT || code == LE || code == GTU || code == LEU))
183 int unsigned_p = (code == GTU || code == LEU);
184 int gt_p = (code == GT || code == GTU);
185 rtx lab = NULL_RTX;
187 if (gt_p)
188 lab = gen_label_rtx ();
189 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, gt_p ? lab : loc);
190 /* This should be generated as a comparison against the temporary
191 created by the previous insn, but reload can't handle that. */
192 xstormy16_emit_cbranch (gt_p ? NE : EQ, loc);
193 if (gt_p)
194 emit_label (lab);
195 return;
197 else if (mode == SImode
198 && (code == NE || code == EQ)
199 && op1 != const0_rtx)
201 rtx lab = NULL_RTX;
202 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
203 int i;
205 if (code == EQ)
206 lab = gen_label_rtx ();
208 for (i = 0; i < num_words - 1; i++)
210 xstormy16_compare_op0 = simplify_gen_subreg (word_mode, op0, mode,
211 i * UNITS_PER_WORD);
212 xstormy16_compare_op1 = simplify_gen_subreg (word_mode, op1, mode,
213 i * UNITS_PER_WORD);
214 xstormy16_emit_cbranch (NE, code == EQ ? lab : loc);
216 xstormy16_compare_op0 = simplify_gen_subreg (word_mode, op0, mode,
217 i * UNITS_PER_WORD);
218 xstormy16_compare_op1 = simplify_gen_subreg (word_mode, op1, mode,
219 i * UNITS_PER_WORD);
220 xstormy16_emit_cbranch (code, loc);
222 if (code == EQ)
223 emit_label (lab);
224 return;
227 /* We can't allow reload to try to generate any reload after a branch,
228 so when some register must match we must make the temporary ourselves. */
229 if (mode != HImode)
231 rtx tmp;
232 tmp = gen_reg_rtx (mode);
233 emit_move_insn (tmp, op0);
234 op0 = tmp;
237 condition_rtx = gen_rtx_fmt_ee (code, mode, op0, op1);
238 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
239 branch = gen_rtx_SET (VOIDmode, pc_rtx,
240 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
241 loc_ref, pc_rtx));
243 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (BImode));
245 if (mode == HImode)
246 vec = gen_rtvec (2, branch, cy_clobber);
247 else if (code == NE || code == EQ)
248 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
249 else
251 rtx sub;
252 #if 0
253 sub = gen_rtx_SET (VOIDmode, op0, gen_rtx_MINUS (SImode, op0, op1));
254 #else
255 sub = gen_rtx_CLOBBER (SImode, op0);
256 #endif
257 vec = gen_rtvec (3, branch, sub, cy_clobber);
260 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
263 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
264 the arithmetic operation. Most of the work is done by
265 xstormy16_expand_arith. */
267 void
268 xstormy16_split_cbranch (enum machine_mode mode, rtx label, rtx comparison,
269 rtx dest, rtx carry)
271 rtx op0 = XEXP (comparison, 0);
272 rtx op1 = XEXP (comparison, 1);
273 rtx seq, last_insn;
274 rtx compare;
276 start_sequence ();
277 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1, carry);
278 seq = get_insns ();
279 end_sequence ();
281 if (! INSN_P (seq))
282 abort ();
284 last_insn = seq;
285 while (NEXT_INSN (last_insn) != NULL_RTX)
286 last_insn = NEXT_INSN (last_insn);
288 compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0));
289 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
290 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
291 emit_insn (seq);
295 /* Return the string to output a conditional branch to LABEL, which is
296 the operand number of the label.
298 OP is the conditional expression, or NULL for branch-always.
300 REVERSED is nonzero if we should reverse the sense of the comparison.
302 INSN is the insn. */
304 char *
305 xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed, rtx insn)
307 static char string[64];
308 int need_longbranch = (op != NULL_RTX
309 ? get_attr_length (insn) == 8
310 : get_attr_length (insn) == 4);
311 int really_reversed = reversed ^ need_longbranch;
312 const char *ccode;
313 const char *template;
314 const char *operands;
315 enum rtx_code code;
317 if (! op)
319 if (need_longbranch)
320 ccode = "jmpf";
321 else
322 ccode = "br";
323 sprintf (string, "%s %s", ccode, label);
324 return string;
327 code = GET_CODE (op);
329 if (GET_CODE (XEXP (op, 0)) != REG)
331 code = swap_condition (code);
332 operands = "%3,%2";
334 else
335 operands = "%2,%3";
337 /* Work out which way this really branches. */
338 if (really_reversed)
339 code = reverse_condition (code);
341 switch (code)
343 case EQ: ccode = "z"; break;
344 case NE: ccode = "nz"; break;
345 case GE: ccode = "ge"; break;
346 case LT: ccode = "lt"; break;
347 case GT: ccode = "gt"; break;
348 case LE: ccode = "le"; break;
349 case GEU: ccode = "nc"; break;
350 case LTU: ccode = "c"; break;
351 case GTU: ccode = "hi"; break;
352 case LEU: ccode = "ls"; break;
354 default:
355 abort ();
358 if (need_longbranch)
359 template = "b%s %s,.+8 | jmpf %s";
360 else
361 template = "b%s %s,%s";
362 sprintf (string, template, ccode, operands, label);
364 return string;
367 /* Return the string to output a conditional branch to LABEL, which is
368 the operand number of the label, but suitable for the tail of a
369 SImode branch.
371 OP is the conditional expression (OP is never NULL_RTX).
373 REVERSED is nonzero if we should reverse the sense of the comparison.
375 INSN is the insn. */
377 char *
378 xstormy16_output_cbranch_si (rtx op, const char *label, int reversed, rtx insn)
380 static char string[64];
381 int need_longbranch = get_attr_length (insn) >= 8;
382 int really_reversed = reversed ^ need_longbranch;
383 const char *ccode;
384 const char *template;
385 char prevop[16];
386 enum rtx_code code;
388 code = GET_CODE (op);
390 /* Work out which way this really branches. */
391 if (really_reversed)
392 code = reverse_condition (code);
394 switch (code)
396 case EQ: ccode = "z"; break;
397 case NE: ccode = "nz"; break;
398 case GE: ccode = "ge"; break;
399 case LT: ccode = "lt"; break;
400 case GEU: ccode = "nc"; break;
401 case LTU: ccode = "c"; break;
403 /* The missing codes above should never be generated. */
404 default:
405 abort ();
408 switch (code)
410 case EQ: case NE:
412 int regnum;
414 if (GET_CODE (XEXP (op, 0)) != REG)
415 abort ();
417 regnum = REGNO (XEXP (op, 0));
418 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
420 break;
422 case GE: case LT: case GEU: case LTU:
423 strcpy (prevop, "sbc %2,%3");
424 break;
426 default:
427 abort ();
430 if (need_longbranch)
431 template = "%s | b%s .+6 | jmpf %s";
432 else
433 template = "%s | b%s %s";
434 sprintf (string, template, prevop, ccode, label);
436 return string;
439 /* Many machines have some registers that cannot be copied directly to or from
440 memory or even from other types of registers. An example is the `MQ'
441 register, which on most machines, can only be copied to or from general
442 registers, but not memory. Some machines allow copying all registers to and
443 from memory, but require a scratch register for stores to some memory
444 locations (e.g., those with symbolic address on the RT, and those with
445 certain symbolic address on the SPARC when compiling PIC). In some cases,
446 both an intermediate and a scratch register are required.
448 You should define these macros to indicate to the reload phase that it may
449 need to allocate at least one register for a reload in addition to the
450 register to contain the data. Specifically, if copying X to a register
451 CLASS in MODE requires an intermediate register, you should define
452 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
453 whose registers can be used as intermediate registers or scratch registers.
455 If copying a register CLASS in MODE to X requires an intermediate or scratch
456 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
457 largest register class required. If the requirements for input and output
458 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
459 instead of defining both macros identically.
461 The values returned by these macros are often `GENERAL_REGS'. Return
462 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
463 to or from a register of CLASS in MODE without requiring a scratch register.
464 Do not define this macro if it would always return `NO_REGS'.
466 If a scratch register is required (either with or without an intermediate
467 register), you should define patterns for `reload_inM' or `reload_outM', as
468 required.. These patterns, which will normally be implemented with a
469 `define_expand', should be similar to the `movM' patterns, except that
470 operand 2 is the scratch register.
472 Define constraints for the reload register and scratch register that contain
473 a single register class. If the original reload register (whose class is
474 CLASS) can meet the constraint given in the pattern, the value returned by
475 these macros is used for the class of the scratch register. Otherwise, two
476 additional reload registers are required. Their classes are obtained from
477 the constraints in the insn pattern.
479 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
480 either be in a hard register or in memory. Use `true_regnum' to find out;
481 it will return -1 if the pseudo is in memory and the hard register number if
482 it is in a register.
484 These macros should not be used in the case where a particular class of
485 registers can only be copied to memory and not to another class of
486 registers. In that case, secondary reload registers are not needed and
487 would not be helpful. Instead, a stack location must be used to perform the
488 copy and the `movM' pattern should use memory as an intermediate storage.
489 This case often occurs between floating-point and general registers. */
491 enum reg_class
492 xstormy16_secondary_reload_class (enum reg_class class,
493 enum machine_mode mode,
494 rtx x)
496 /* This chip has the interesting property that only the first eight
497 registers can be moved to/from memory. */
498 if ((GET_CODE (x) == MEM
499 || ((GET_CODE (x) == SUBREG || GET_CODE (x) == REG)
500 && (true_regnum (x) == -1
501 || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
502 && ! reg_class_subset_p (class, EIGHT_REGS))
503 return EIGHT_REGS;
505 /* When reloading a PLUS, the carry register will be required
506 unless the inc or dec instructions can be used. */
507 if (xstormy16_carry_plus_operand (x, mode))
508 return CARRY_REGS;
510 return NO_REGS;
513 /* Recognize a PLUS that needs the carry register. */
515 xstormy16_carry_plus_operand (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
517 return (GET_CODE (x) == PLUS
518 && GET_CODE (XEXP (x, 1)) == CONST_INT
519 && (INTVAL (XEXP (x, 1)) < -4 || INTVAL (XEXP (x, 1)) > 4));
522 /* Detect and error out on out-of-range constants for movhi. */
524 xs_hi_general_operand (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
526 if ((GET_CODE (x) == CONST_INT)
527 && ((INTVAL (x) >= 32768) || (INTVAL (x) < -32768)))
528 error ("Constant halfword load operand out of range.");
529 return general_operand (x, mode);
532 /* Detect and error out on out-of-range constants for addhi and subhi. */
534 xs_hi_nonmemory_operand (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
536 if ((GET_CODE (x) == CONST_INT)
537 && ((INTVAL (x) >= 32768) || (INTVAL (x) < -32768)))
538 error ("Constant arithmetic operand out of range.");
539 return nonmemory_operand (x, mode);
542 enum reg_class
543 xstormy16_preferred_reload_class (rtx x, enum reg_class class)
545 if (class == GENERAL_REGS
546 && GET_CODE (x) == MEM)
547 return EIGHT_REGS;
549 return class;
552 /* Predicate for symbols and addresses that reflect special 8-bit
553 addressing. */
555 xstormy16_below100_symbol (rtx x,
556 enum machine_mode mode ATTRIBUTE_UNUSED)
558 if (GET_CODE (x) == CONST)
559 x = XEXP (x, 0);
560 if (GET_CODE (x) == PLUS
561 && GET_CODE (XEXP (x, 1)) == CONST_INT)
562 x = XEXP (x, 0);
563 if (GET_CODE (x) == SYMBOL_REF)
565 const char *n = XSTR (x, 0);
566 if (n[0] == '@' && n[1] == 'b' && n[2] == '.')
567 return 1;
569 if (GET_CODE (x) == CONST_INT)
571 HOST_WIDE_INT i = INTVAL (x);
572 if ((i >= 0x0000 && i <= 0x00ff)
573 || (i >= 0x7f00 && i <= 0x7fff))
574 return 1;
576 return 0;
579 /* Predicate for MEMs that can use special 8-bit addressing. */
581 xstormy16_below100_operand (rtx x, enum machine_mode mode)
583 if (GET_MODE (x) != mode)
584 return 0;
585 if (GET_CODE (x) == MEM)
586 x = XEXP (x, 0);
587 else if (GET_CODE (x) == SUBREG
588 && GET_CODE (XEXP (x, 0)) == MEM
589 && !MEM_VOLATILE_P (XEXP (x, 0)))
590 x = XEXP (XEXP (x, 0), 0);
591 else
592 return 0;
593 if (GET_CODE (x) == CONST_INT)
595 HOST_WIDE_INT i = INTVAL (x);
596 return (i >= 0x7f00 && i < 0x7fff);
598 return xstormy16_below100_symbol (x, HImode);
601 /* Likewise, but only for non-volatile MEMs, for patterns where the
602 MEM will get split into smaller sized accesses. */
604 xstormy16_splittable_below100_operand (rtx x, enum machine_mode mode)
606 if (GET_CODE (x) == MEM && MEM_VOLATILE_P (x))
607 return 0;
608 return xstormy16_below100_operand (x, mode);
612 xstormy16_below100_or_register (rtx x, enum machine_mode mode)
614 return (xstormy16_below100_operand (x, mode)
615 || register_operand (x, mode));
619 xstormy16_splittable_below100_or_register (rtx x, enum machine_mode mode)
621 if (GET_CODE (x) == MEM && MEM_VOLATILE_P (x))
622 return 0;
623 return (xstormy16_below100_operand (x, mode)
624 || register_operand (x, mode));
627 /* Predicate for constants with exactly one bit set. */
629 xstormy16_onebit_set_operand (rtx x, enum machine_mode mode)
631 HOST_WIDE_INT i;
632 if (GET_CODE (x) != CONST_INT)
633 return 0;
634 i = INTVAL (x);
635 if (mode == QImode)
636 i &= 0xff;
637 if (mode == HImode)
638 i &= 0xffff;
639 return exact_log2 (i) != -1;
642 /* Predicate for constants with exactly one bit not set. */
644 xstormy16_onebit_clr_operand (rtx x, enum machine_mode mode)
646 HOST_WIDE_INT i;
647 if (GET_CODE (x) != CONST_INT)
648 return 0;
649 i = ~ INTVAL (x);
650 if (mode == QImode)
651 i &= 0xff;
652 if (mode == HImode)
653 i &= 0xffff;
654 return exact_log2 (i) != -1;
657 /* Expand an 8-bit IOR. This either detects the one case we can
658 actually do, or uses a 16-bit IOR. */
659 void
660 xstormy16_expand_iorqi3 (rtx *operands)
662 rtx in, out, outsub, val;
664 out = operands[0];
665 in = operands[1];
666 val = operands[2];
668 if (xstormy16_onebit_set_operand (val, QImode))
670 if (!xstormy16_below100_or_register (in, QImode))
671 in = copy_to_mode_reg (QImode, in);
672 if (!xstormy16_below100_or_register (out, QImode))
673 out = gen_reg_rtx (QImode);
674 emit_insn (gen_iorqi3_internal (out, in, val));
675 if (out != operands[0])
676 emit_move_insn (operands[0], out);
677 return;
680 if (GET_CODE (in) != REG)
681 in = copy_to_mode_reg (QImode, in);
682 if (GET_CODE (val) != REG
683 && GET_CODE (val) != CONST_INT)
684 val = copy_to_mode_reg (QImode, val);
685 if (GET_CODE (out) != REG)
686 out = gen_reg_rtx (QImode);
688 in = simplify_gen_subreg (HImode, in, QImode, 0);
689 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
690 if (GET_CODE (val) != CONST_INT)
691 val = simplify_gen_subreg (HImode, val, QImode, 0);
693 emit_insn (gen_iorhi3 (outsub, in, val));
695 if (out != operands[0])
696 emit_move_insn (operands[0], out);
699 /* Likewise, for AND. */
700 void
701 xstormy16_expand_andqi3 (rtx *operands)
703 rtx in, out, outsub, val;
705 out = operands[0];
706 in = operands[1];
707 val = operands[2];
709 if (xstormy16_onebit_clr_operand (val, QImode))
711 if (!xstormy16_below100_or_register (in, QImode))
712 in = copy_to_mode_reg (QImode, in);
713 if (!xstormy16_below100_or_register (out, QImode))
714 out = gen_reg_rtx (QImode);
715 emit_insn (gen_andqi3_internal (out, in, val));
716 if (out != operands[0])
717 emit_move_insn (operands[0], out);
718 return;
721 if (GET_CODE (in) != REG)
722 in = copy_to_mode_reg (QImode, in);
723 if (GET_CODE (val) != REG
724 && GET_CODE (val) != CONST_INT)
725 val = copy_to_mode_reg (QImode, val);
726 if (GET_CODE (out) != REG)
727 out = gen_reg_rtx (QImode);
729 in = simplify_gen_subreg (HImode, in, QImode, 0);
730 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
731 if (GET_CODE (val) != CONST_INT)
732 val = simplify_gen_subreg (HImode, val, QImode, 0);
734 emit_insn (gen_andhi3 (outsub, in, val));
736 if (out != operands[0])
737 emit_move_insn (operands[0], out);
740 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
741 (GET_CODE (X) == CONST_INT \
742 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
744 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
745 (GET_CODE (X) == CONST_INT \
746 && INTVAL (X) + (OFFSET) >= 0 \
747 && INTVAL (X) + (OFFSET) < 0x8000 \
748 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
751 xstormy16_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
752 rtx x, int strict)
754 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
755 return 1;
757 if (GET_CODE (x) == PLUS
758 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
759 x = XEXP (x, 0);
761 if ((GET_CODE (x) == PRE_MODIFY
762 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
763 || GET_CODE (x) == POST_INC
764 || GET_CODE (x) == PRE_DEC)
765 x = XEXP (x, 0);
767 if (GET_CODE (x) == REG && REGNO_OK_FOR_BASE_P (REGNO (x))
768 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
769 return 1;
771 if (xstormy16_below100_symbol(x, mode))
772 return 1;
774 return 0;
777 /* Return nonzero if memory address X (an RTX) can have different
778 meanings depending on the machine mode of the memory reference it
779 is used for or if the address is valid for some modes but not
780 others.
782 Autoincrement and autodecrement addresses typically have mode-dependent
783 effects because the amount of the increment or decrement is the size of the
784 operand being addressed. Some machines have other mode-dependent addresses.
785 Many RISC machines have no mode-dependent addresses.
787 You may assume that ADDR is a valid address for the machine.
789 On this chip, this is true if the address is valid with an offset
790 of 0 but not of 6, because in that case it cannot be used as an
791 address for DImode or DFmode, or if the address is a post-increment
792 or pre-decrement address. */
794 xstormy16_mode_dependent_address_p (rtx x)
796 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
797 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
798 return 1;
800 if (GET_CODE (x) == PLUS
801 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
802 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
803 return 1;
805 if (GET_CODE (x) == PLUS)
806 x = XEXP (x, 0);
808 if (GET_CODE (x) == POST_INC
809 || GET_CODE (x) == PRE_DEC)
810 return 1;
812 return 0;
815 /* A C expression that defines the optional machine-dependent constraint
816 letters (`Q', `R', `S', `T', `U') that can be used to segregate specific
817 types of operands, usually memory references, for the target machine.
818 Normally this macro will not be defined. If it is required for a particular
819 target machine, it should return 1 if VALUE corresponds to the operand type
820 represented by the constraint letter C. If C is not defined as an extra
821 constraint, the value returned should be 0 regardless of VALUE. */
823 xstormy16_extra_constraint_p (rtx x, int c)
825 switch (c)
827 /* 'Q' is for pushes. */
828 case 'Q':
829 return (GET_CODE (x) == MEM
830 && GET_CODE (XEXP (x, 0)) == POST_INC
831 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
833 /* 'R' is for pops. */
834 case 'R':
835 return (GET_CODE (x) == MEM
836 && GET_CODE (XEXP (x, 0)) == PRE_DEC
837 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
839 /* 'S' is for immediate memory addresses. */
840 case 'S':
841 return (GET_CODE (x) == MEM
842 && GET_CODE (XEXP (x, 0)) == CONST_INT
843 && xstormy16_legitimate_address_p (VOIDmode, XEXP (x, 0), 0));
845 /* 'T' is for Rx. */
846 case 'T':
847 /* Not implemented yet. */
848 return 0;
850 /* 'U' is for CONST_INT values not between 2 and 15 inclusive,
851 for allocating a scratch register for 32-bit shifts. */
852 case 'U':
853 return (GET_CODE (x) == CONST_INT
854 && (INTVAL (x) < 2 || INTVAL (x) > 15));
856 /* 'Z' is for CONST_INT value zero. This is for adding zero to
857 a register in addhi3, which would otherwise require a carry. */
858 case 'Z':
859 return (GET_CODE (x) == CONST_INT
860 && (INTVAL (x) == 0));
862 case 'W':
863 return xstormy16_below100_operand(x, GET_MODE(x));
865 default:
866 return 0;
871 short_memory_operand (rtx x, enum machine_mode mode)
873 if (! memory_operand (x, mode))
874 return 0;
875 return (GET_CODE (XEXP (x, 0)) != PLUS);
879 nonimmediate_nonstack_operand (rtx op, enum machine_mode mode)
881 /* 'Q' is for pushes, 'R' for pops. */
882 return (nonimmediate_operand (op, mode)
883 && ! xstormy16_extra_constraint_p (op, 'Q')
884 && ! xstormy16_extra_constraint_p (op, 'R'));
887 /* Splitter for the 'move' patterns, for modes not directly implemented
888 by hardware. Emit insns to copy a value of mode MODE from SRC to
889 DEST.
891 This function is only called when reload_completed.
894 void
895 xstormy16_split_move (enum machine_mode mode, rtx dest, rtx src)
897 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
898 int direction, end, i;
899 int src_modifies = 0;
900 int dest_modifies = 0;
901 int src_volatile = 0;
902 int dest_volatile = 0;
903 rtx mem_operand;
904 rtx auto_inc_reg_rtx = NULL_RTX;
906 /* Check initial conditions. */
907 if (! reload_completed
908 || mode == QImode || mode == HImode
909 || ! nonimmediate_operand (dest, mode)
910 || ! general_operand (src, mode))
911 abort ();
913 /* This case is not supported below, and shouldn't be generated. */
914 if (GET_CODE (dest) == MEM
915 && GET_CODE (src) == MEM)
916 abort ();
918 /* This case is very very bad after reload, so trap it now. */
919 if (GET_CODE (dest) == SUBREG
920 || GET_CODE (src) == SUBREG)
921 abort ();
923 /* The general idea is to copy by words, offsetting the source and
924 destination. Normally the least-significant word will be copied
925 first, but for pre-dec operations it's better to copy the
926 most-significant word first. Only one operand can be a pre-dec
927 or post-inc operand.
929 It's also possible that the copy overlaps so that the direction
930 must be reversed. */
931 direction = 1;
933 if (GET_CODE (dest) == MEM)
935 mem_operand = XEXP (dest, 0);
936 dest_modifies = side_effects_p (mem_operand);
937 if (auto_inc_p (mem_operand))
938 auto_inc_reg_rtx = XEXP (mem_operand, 0);
939 dest_volatile = MEM_VOLATILE_P (dest);
940 if (dest_volatile)
942 dest = copy_rtx (dest);
943 MEM_VOLATILE_P (dest) = 0;
946 else if (GET_CODE (src) == MEM)
948 mem_operand = XEXP (src, 0);
949 src_modifies = side_effects_p (mem_operand);
950 if (auto_inc_p (mem_operand))
951 auto_inc_reg_rtx = XEXP (mem_operand, 0);
952 src_volatile = MEM_VOLATILE_P (src);
953 if (src_volatile)
955 src = copy_rtx (src);
956 MEM_VOLATILE_P (src) = 0;
959 else
960 mem_operand = NULL_RTX;
962 if (mem_operand == NULL_RTX)
964 if (GET_CODE (src) == REG
965 && GET_CODE (dest) == REG
966 && reg_overlap_mentioned_p (dest, src)
967 && REGNO (dest) > REGNO (src))
968 direction = -1;
970 else if (GET_CODE (mem_operand) == PRE_DEC
971 || (GET_CODE (mem_operand) == PLUS
972 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
973 direction = -1;
974 else if (GET_CODE (src) == MEM
975 && reg_overlap_mentioned_p (dest, src))
977 int regno;
978 if (GET_CODE (dest) != REG)
979 abort ();
980 regno = REGNO (dest);
982 if (! refers_to_regno_p (regno, regno + num_words, mem_operand, 0))
983 abort ();
985 if (refers_to_regno_p (regno, regno + 1, mem_operand, 0))
986 direction = -1;
987 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
988 mem_operand, 0))
989 direction = 1;
990 else
991 /* This means something like
992 (set (reg:DI r0) (mem:DI (reg:HI r1)))
993 which we'd need to support by doing the set of the second word
994 last. */
995 abort ();
998 end = direction < 0 ? -1 : num_words;
999 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
1001 rtx w_src, w_dest, insn;
1003 if (src_modifies)
1004 w_src = gen_rtx_MEM (word_mode, mem_operand);
1005 else
1006 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
1007 if (src_volatile)
1008 MEM_VOLATILE_P (w_src) = 1;
1009 if (dest_modifies)
1010 w_dest = gen_rtx_MEM (word_mode, mem_operand);
1011 else
1012 w_dest = simplify_gen_subreg (word_mode, dest, mode,
1013 i * UNITS_PER_WORD);
1014 if (dest_volatile)
1015 MEM_VOLATILE_P (w_dest) = 1;
1017 /* The simplify_subreg calls must always be able to simplify. */
1018 if (GET_CODE (w_src) == SUBREG
1019 || GET_CODE (w_dest) == SUBREG)
1020 abort ();
1022 insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src));
1023 if (auto_inc_reg_rtx)
1024 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
1025 auto_inc_reg_rtx,
1026 REG_NOTES (insn));
1030 /* Expander for the 'move' patterns. Emit insns to copy a value of
1031 mode MODE from SRC to DEST. */
1033 void
1034 xstormy16_expand_move (enum machine_mode mode, rtx dest, rtx src)
1036 if ((GET_CODE (dest) == MEM) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY))
1038 rtx pmv = XEXP (dest, 0);
1039 rtx dest_reg = XEXP (pmv, 0);
1040 rtx dest_mod = XEXP (pmv, 1);
1041 rtx set = gen_rtx_SET (Pmode, dest_reg, dest_mod);
1042 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
1044 dest = gen_rtx_MEM (mode, dest_reg);
1045 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
1047 else if ((GET_CODE (src) == MEM) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY))
1049 rtx pmv = XEXP (src, 0);
1050 rtx src_reg = XEXP (pmv, 0);
1051 rtx src_mod = XEXP (pmv, 1);
1052 rtx set = gen_rtx_SET (Pmode, src_reg, src_mod);
1053 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
1055 src = gen_rtx_MEM (mode, src_reg);
1056 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
1059 /* There are only limited immediate-to-memory move instructions. */
1060 if (! reload_in_progress
1061 && ! reload_completed
1062 && GET_CODE (dest) == MEM
1063 && (GET_CODE (XEXP (dest, 0)) != CONST_INT
1064 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
1065 && ! xstormy16_below100_operand (dest, mode)
1066 && GET_CODE (src) != REG
1067 && GET_CODE (src) != SUBREG)
1068 src = copy_to_mode_reg (mode, src);
1070 /* Don't emit something we would immediately split. */
1071 if (reload_completed
1072 && mode != HImode && mode != QImode)
1074 xstormy16_split_move (mode, dest, src);
1075 return;
1078 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
1082 /* Stack Layout:
1084 The stack is laid out as follows:
1086 SP->
1087 FP-> Local variables
1088 Register save area (up to 4 words)
1089 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
1091 AP-> Return address (two words)
1092 9th procedure parameter word
1093 10th procedure parameter word
1095 last procedure parameter word
1097 The frame pointer location is tuned to make it most likely that all
1098 parameters and local variables can be accessed using a load-indexed
1099 instruction. */
1101 /* A structure to describe the layout. */
1102 struct xstormy16_stack_layout
1104 /* Size of the topmost three items on the stack. */
1105 int locals_size;
1106 int register_save_size;
1107 int stdarg_save_size;
1108 /* Sum of the above items. */
1109 int frame_size;
1110 /* Various offsets. */
1111 int first_local_minus_ap;
1112 int sp_minus_fp;
1113 int fp_minus_ap;
1116 /* Does REGNO need to be saved? */
1117 #define REG_NEEDS_SAVE(REGNUM, IFUN) \
1118 ((regs_ever_live[REGNUM] && ! call_used_regs[REGNUM]) \
1119 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
1120 && (REGNO_REG_CLASS (REGNUM) != CARRY_REGS) \
1121 && (regs_ever_live[REGNUM] || ! current_function_is_leaf)))
1123 /* Compute the stack layout. */
1124 struct xstormy16_stack_layout
1125 xstormy16_compute_stack_layout (void)
1127 struct xstormy16_stack_layout layout;
1128 int regno;
1129 const int ifun = xstormy16_interrupt_function_p ();
1131 layout.locals_size = get_frame_size ();
1133 layout.register_save_size = 0;
1134 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1135 if (REG_NEEDS_SAVE (regno, ifun))
1136 layout.register_save_size += UNITS_PER_WORD;
1138 if (current_function_stdarg)
1139 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1140 else
1141 layout.stdarg_save_size = 0;
1143 layout.frame_size = (layout.locals_size
1144 + layout.register_save_size
1145 + layout.stdarg_save_size);
1147 if (current_function_args_size <= 2048 && current_function_args_size != -1)
1149 if (layout.frame_size + INCOMING_FRAME_SP_OFFSET
1150 + current_function_args_size <= 2048)
1151 layout.fp_minus_ap = layout.frame_size + INCOMING_FRAME_SP_OFFSET;
1152 else
1153 layout.fp_minus_ap = 2048 - current_function_args_size;
1155 else
1156 layout.fp_minus_ap = (layout.stdarg_save_size
1157 + layout.register_save_size
1158 + INCOMING_FRAME_SP_OFFSET);
1159 layout.sp_minus_fp = (layout.frame_size + INCOMING_FRAME_SP_OFFSET
1160 - layout.fp_minus_ap);
1161 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
1162 return layout;
1165 /* Determine how all the special registers get eliminated. */
1167 xstormy16_initial_elimination_offset (int from, int to)
1169 struct xstormy16_stack_layout layout;
1170 int result;
1172 layout = xstormy16_compute_stack_layout ();
1174 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1175 result = layout.sp_minus_fp - layout.locals_size;
1176 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1177 result = -layout.locals_size;
1178 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1179 result = -layout.fp_minus_ap;
1180 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1181 result = -(layout.sp_minus_fp + layout.fp_minus_ap);
1182 else
1183 abort ();
1185 return result;
1188 static rtx
1189 emit_addhi3_postreload (rtx dest, rtx src0, rtx src1)
1191 rtx set, clobber, insn;
1193 set = gen_rtx_SET (VOIDmode, dest, gen_rtx_PLUS (HImode, src0, src1));
1194 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
1195 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
1196 return insn;
1199 /* Called after register allocation to add any instructions needed for
1200 the prologue. Using a prologue insn is favored compared to putting
1201 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1202 since it allows the scheduler to intermix instructions with the
1203 saves of the caller saved registers. In some cases, it might be
1204 necessary to emit a barrier instruction as the last insn to prevent
1205 such scheduling.
1207 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1208 so that the debug info generation code can handle them properly. */
1209 void
1210 xstormy16_expand_prologue (void)
1212 struct xstormy16_stack_layout layout;
1213 int regno;
1214 rtx insn;
1215 rtx mem_push_rtx;
1216 const int ifun = xstormy16_interrupt_function_p ();
1218 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
1219 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
1221 layout = xstormy16_compute_stack_layout ();
1223 if (layout.locals_size >= 32768)
1224 error ("Local variable memory requirements exceed capacity.");
1226 /* Save the argument registers if necessary. */
1227 if (layout.stdarg_save_size)
1228 for (regno = FIRST_ARGUMENT_REGISTER;
1229 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
1230 regno++)
1232 rtx dwarf;
1233 rtx reg = gen_rtx_REG (HImode, regno);
1235 insn = emit_move_insn (mem_push_rtx, reg);
1236 RTX_FRAME_RELATED_P (insn) = 1;
1238 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1240 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1241 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1242 reg);
1243 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1244 plus_constant (stack_pointer_rtx,
1245 GET_MODE_SIZE (Pmode)));
1246 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1247 dwarf,
1248 REG_NOTES (insn));
1249 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1250 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1253 /* Push each of the registers to save. */
1254 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1255 if (REG_NEEDS_SAVE (regno, ifun))
1257 rtx dwarf;
1258 rtx reg = gen_rtx_REG (HImode, regno);
1260 insn = emit_move_insn (mem_push_rtx, reg);
1261 RTX_FRAME_RELATED_P (insn) = 1;
1263 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1265 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1266 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1267 reg);
1268 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1269 plus_constant (stack_pointer_rtx,
1270 GET_MODE_SIZE (Pmode)));
1271 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1272 dwarf,
1273 REG_NOTES (insn));
1274 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1275 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1278 /* It's just possible that the SP here might be what we need for
1279 the new FP... */
1280 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1281 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1283 /* Allocate space for local variables. */
1284 if (layout.locals_size)
1286 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1287 GEN_INT (layout.locals_size));
1288 RTX_FRAME_RELATED_P (insn) = 1;
1291 /* Set up the frame pointer, if required. */
1292 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
1294 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1296 if (layout.sp_minus_fp)
1297 emit_addhi3_postreload (hard_frame_pointer_rtx,
1298 hard_frame_pointer_rtx,
1299 GEN_INT (-layout.sp_minus_fp));
1303 /* Do we need an epilogue at all? */
1305 direct_return (void)
1307 return (reload_completed
1308 && xstormy16_compute_stack_layout ().frame_size == 0);
1311 /* Called after register allocation to add any instructions needed for
1312 the epilogue. Using an epilogue insn is favored compared to putting
1313 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1314 since it allows the scheduler to intermix instructions with the
1315 saves of the caller saved registers. In some cases, it might be
1316 necessary to emit a barrier instruction as the last insn to prevent
1317 such scheduling. */
1319 void
1320 xstormy16_expand_epilogue (void)
1322 struct xstormy16_stack_layout layout;
1323 rtx mem_pop_rtx, insn;
1324 int regno;
1325 const int ifun = xstormy16_interrupt_function_p ();
1327 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1328 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
1330 layout = xstormy16_compute_stack_layout ();
1332 /* Pop the stack for the locals. */
1333 if (layout.locals_size)
1335 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1336 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1337 else
1339 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1340 GEN_INT (- layout.locals_size));
1341 RTX_FRAME_RELATED_P (insn) = 1;
1345 /* Restore any call-saved registers. */
1346 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1347 if (REG_NEEDS_SAVE (regno, ifun))
1349 rtx dwarf;
1351 insn = emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
1352 RTX_FRAME_RELATED_P (insn) = 1;
1353 dwarf = gen_rtx_SET (Pmode, stack_pointer_rtx,
1354 plus_constant (stack_pointer_rtx,
1355 -GET_MODE_SIZE (Pmode)));
1356 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1357 dwarf,
1358 REG_NOTES (insn));
1361 /* Pop the stack for the stdarg save area. */
1362 if (layout.stdarg_save_size)
1364 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1365 GEN_INT (- layout.stdarg_save_size));
1366 RTX_FRAME_RELATED_P (insn) = 1;
1369 /* Return. */
1370 if (ifun)
1371 emit_jump_insn (gen_return_internal_interrupt ());
1372 else
1373 emit_jump_insn (gen_return_internal ());
1377 xstormy16_epilogue_uses (int regno)
1379 if (reload_completed && call_used_regs[regno])
1381 const int ifun = xstormy16_interrupt_function_p ();
1382 return REG_NEEDS_SAVE (regno, ifun);
1384 return 0;
1387 void
1388 xstormy16_function_profiler (void)
1390 sorry ("function_profiler support");
1394 /* Return an updated summarizer variable CUM to advance past an
1395 argument in the argument list. The values MODE, TYPE and NAMED
1396 describe that argument. Once this is done, the variable CUM is
1397 suitable for analyzing the *following* argument with
1398 `FUNCTION_ARG', etc.
1400 This function need not do anything if the argument in question was
1401 passed on the stack. The compiler knows how to track the amount of
1402 stack space used for arguments without any special help. However,
1403 it makes life easier for xstormy16_build_va_list if it does update
1404 the word count. */
1405 CUMULATIVE_ARGS
1406 xstormy16_function_arg_advance (CUMULATIVE_ARGS cum, enum machine_mode mode,
1407 tree type, int named ATTRIBUTE_UNUSED)
1409 /* If an argument would otherwise be passed partially in registers,
1410 and partially on the stack, the whole of it is passed on the
1411 stack. */
1412 if (cum < NUM_ARGUMENT_REGISTERS
1413 && cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1414 cum = NUM_ARGUMENT_REGISTERS;
1416 cum += XSTORMY16_WORD_SIZE (type, mode);
1418 return cum;
1422 xstormy16_function_arg (CUMULATIVE_ARGS cum, enum machine_mode mode,
1423 tree type, int named ATTRIBUTE_UNUSED)
1425 if (mode == VOIDmode)
1426 return const0_rtx;
1427 if (targetm.calls.must_pass_in_stack (mode, type)
1428 || cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1429 return 0;
1430 return gen_rtx_REG (mode, cum + 2);
1433 /* Build the va_list type.
1435 For this chip, va_list is a record containing a counter and a pointer.
1436 The counter is of type 'int' and indicates how many bytes
1437 have been used to date. The pointer indicates the stack position
1438 for arguments that have not been passed in registers.
1439 To keep the layout nice, the pointer is first in the structure. */
1441 static tree
1442 xstormy16_build_builtin_va_list (void)
1444 tree f_1, f_2, record, type_decl;
1446 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
1447 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
1449 f_1 = build_decl (FIELD_DECL, get_identifier ("base"),
1450 ptr_type_node);
1451 f_2 = build_decl (FIELD_DECL, get_identifier ("count"),
1452 unsigned_type_node);
1454 DECL_FIELD_CONTEXT (f_1) = record;
1455 DECL_FIELD_CONTEXT (f_2) = record;
1457 TREE_CHAIN (record) = type_decl;
1458 TYPE_NAME (record) = type_decl;
1459 TYPE_FIELDS (record) = f_1;
1460 TREE_CHAIN (f_1) = f_2;
1462 layout_type (record);
1464 return record;
1467 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this
1468 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1469 variable to initialize. NEXTARG is the machine independent notion of the
1470 'next' argument after the variable arguments. */
1471 void
1472 xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
1474 tree f_base, f_count;
1475 tree base, count;
1476 tree t;
1478 if (xstormy16_interrupt_function_p ())
1479 error ("cannot use va_start in interrupt function");
1481 f_base = TYPE_FIELDS (va_list_type_node);
1482 f_count = TREE_CHAIN (f_base);
1484 base = build (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1485 count = build (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1486 NULL_TREE);
1488 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
1489 t = build (PLUS_EXPR, TREE_TYPE (base), t,
1490 build_int_cst (NULL_TREE, INCOMING_FRAME_SP_OFFSET));
1491 t = build (MODIFY_EXPR, TREE_TYPE (base), base, t);
1492 TREE_SIDE_EFFECTS (t) = 1;
1493 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1495 t = build (MODIFY_EXPR, TREE_TYPE (count), count,
1496 build_int_cst (NULL_TREE,
1497 current_function_args_info * UNITS_PER_WORD));
1498 TREE_SIDE_EFFECTS (t) = 1;
1499 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1502 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable
1503 of type va_list as a tree, TYPE is the type passed to va_arg.
1504 Note: This algorithm is documented in stormy-abi. */
1506 static tree
1507 xstormy16_expand_builtin_va_arg (tree valist, tree type, tree *pre_p,
1508 tree *post_p ATTRIBUTE_UNUSED)
1510 tree f_base, f_count;
1511 tree base, count;
1512 tree count_tmp, addr, t;
1513 tree lab_gotaddr, lab_fromstack;
1514 int size, size_of_reg_args, must_stack;
1515 tree size_tree;
1517 f_base = TYPE_FIELDS (va_list_type_node);
1518 f_count = TREE_CHAIN (f_base);
1520 base = build (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1521 count = build (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1522 NULL_TREE);
1524 must_stack = targetm.calls.must_pass_in_stack (TYPE_MODE (type), type);
1525 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
1526 gimplify_expr (&size_tree, pre_p, NULL, is_gimple_val, fb_rvalue);
1528 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1530 count_tmp = get_initialized_tmp_var (count, pre_p, NULL);
1531 lab_gotaddr = create_artificial_label ();
1532 lab_fromstack = create_artificial_label ();
1533 addr = create_tmp_var (ptr_type_node, NULL);
1535 if (!must_stack)
1537 tree r;
1539 t = fold_convert (TREE_TYPE (count), size_tree);
1540 t = build (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1541 r = fold_convert (TREE_TYPE (count), size_int (size_of_reg_args));
1542 t = build (GT_EXPR, boolean_type_node, t, r);
1543 t = build (COND_EXPR, void_type_node, t,
1544 build (GOTO_EXPR, void_type_node, lab_fromstack),
1545 NULL);
1546 gimplify_and_add (t, pre_p);
1548 t = fold_convert (ptr_type_node, count_tmp);
1549 t = build (PLUS_EXPR, ptr_type_node, base, t);
1550 t = build (MODIFY_EXPR, void_type_node, addr, t);
1551 gimplify_and_add (t, pre_p);
1553 t = build (GOTO_EXPR, void_type_node, lab_gotaddr);
1554 gimplify_and_add (t, pre_p);
1556 t = build (LABEL_EXPR, void_type_node, lab_fromstack);
1557 gimplify_and_add (t, pre_p);
1560 /* Arguments larger than a word might need to skip over some
1561 registers, since arguments are either passed entirely in
1562 registers or entirely on the stack. */
1563 size = PUSH_ROUNDING (int_size_in_bytes (type));
1564 if (size > 2 || size < 0 || must_stack)
1566 tree r, u;
1568 r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD);
1569 u = build (MODIFY_EXPR, void_type_node, count_tmp, r);
1571 t = fold_convert (TREE_TYPE (count), r);
1572 t = build (GE_EXPR, boolean_type_node, count_tmp, t);
1573 t = build (COND_EXPR, void_type_node, t, NULL, u);
1574 gimplify_and_add (t, pre_p);
1577 t = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
1578 - INCOMING_FRAME_SP_OFFSET);
1579 t = fold_convert (TREE_TYPE (count), t);
1580 t = build (MINUS_EXPR, TREE_TYPE (count), count_tmp, t);
1581 t = build (PLUS_EXPR, TREE_TYPE (count), t,
1582 fold_convert (TREE_TYPE (count), size_tree));
1583 t = fold_convert (TREE_TYPE (base), fold (t));
1584 t = build (MINUS_EXPR, TREE_TYPE (base), base, t);
1585 t = build (MODIFY_EXPR, void_type_node, addr, t);
1586 gimplify_and_add (t, pre_p);
1588 t = build (LABEL_EXPR, void_type_node, lab_gotaddr);
1589 gimplify_and_add (t, pre_p);
1591 t = fold_convert (TREE_TYPE (count), size_tree);
1592 t = build (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1593 t = build (MODIFY_EXPR, TREE_TYPE (count), count, t);
1594 gimplify_and_add (t, pre_p);
1596 addr = fold_convert (build_pointer_type (type), addr);
1597 return build_fold_indirect_ref (addr);
1600 /* Initialize the variable parts of a trampoline. ADDR is an RTX for
1601 the address of the trampoline; FNADDR is an RTX for the address of
1602 the nested function; STATIC_CHAIN is an RTX for the static chain
1603 value that should be passed to the function when it is called. */
1604 void
1605 xstormy16_initialize_trampoline (rtx addr, rtx fnaddr, rtx static_chain)
1607 rtx reg_addr = gen_reg_rtx (Pmode);
1608 rtx temp = gen_reg_rtx (HImode);
1609 rtx reg_fnaddr = gen_reg_rtx (HImode);
1610 rtx reg_addr_mem;
1612 reg_addr_mem = gen_rtx_MEM (HImode, reg_addr);
1614 emit_move_insn (reg_addr, addr);
1615 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1616 emit_move_insn (reg_addr_mem, temp);
1617 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1618 emit_move_insn (temp, static_chain);
1619 emit_move_insn (reg_addr_mem, temp);
1620 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1621 emit_move_insn (reg_fnaddr, fnaddr);
1622 emit_move_insn (temp, reg_fnaddr);
1623 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1624 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1625 emit_move_insn (reg_addr_mem, temp);
1626 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1627 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1628 emit_move_insn (reg_addr_mem, reg_fnaddr);
1631 /* Worker function for FUNCTION_VALUE. */
1634 xstormy16_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
1636 enum machine_mode mode;
1637 mode = TYPE_MODE (valtype);
1638 PROMOTE_MODE (mode, 0, valtype);
1639 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1642 /* A C compound statement that outputs the assembler code for a thunk function,
1643 used to implement C++ virtual function calls with multiple inheritance. The
1644 thunk acts as a wrapper around a virtual function, adjusting the implicit
1645 object parameter before handing control off to the real function.
1647 First, emit code to add the integer DELTA to the location that contains the
1648 incoming first argument. Assume that this argument contains a pointer, and
1649 is the one used to pass the `this' pointer in C++. This is the incoming
1650 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1651 addition must preserve the values of all other incoming arguments.
1653 After the addition, emit code to jump to FUNCTION, which is a
1654 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1655 the return address. Hence returning from FUNCTION will return to whoever
1656 called the current `thunk'.
1658 The effect must be as if @var{function} had been called directly
1659 with the adjusted first argument. This macro is responsible for
1660 emitting all of the code for a thunk function;
1661 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1662 not invoked.
1664 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1665 extracted from it.) It might possibly be useful on some targets, but
1666 probably not. */
1668 static void
1669 xstormy16_asm_output_mi_thunk (FILE *file,
1670 tree thunk_fndecl ATTRIBUTE_UNUSED,
1671 HOST_WIDE_INT delta,
1672 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1673 tree function)
1675 int regnum = FIRST_ARGUMENT_REGISTER;
1677 /* There might be a hidden first argument for a returned structure. */
1678 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
1679 regnum += 1;
1681 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF);
1682 fputs ("\tjmpf ", file);
1683 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1684 putc ('\n', file);
1687 /* The purpose of this function is to override the default behavior of
1688 BSS objects. Normally, they go into .bss or .sbss via ".common"
1689 directives, but we need to override that and put them in
1690 .bss_below100. We can't just use a section override (like we do
1691 for .data_below100), because that makes them initialized rather
1692 than uninitialized. */
1693 void
1694 xstormy16_asm_output_aligned_common (FILE *stream,
1695 tree decl ATTRIBUTE_UNUSED,
1696 const char *name,
1697 int size,
1698 int align,
1699 int global)
1701 if (name[0] == '@' && name[2] == '.')
1703 const char *op = 0;
1704 switch (name[1])
1706 case 'b':
1707 bss100_section();
1708 op = "space";
1709 break;
1711 if (op)
1713 const char *name2;
1714 int p2align = 0;
1716 while (align > 8)
1718 align /= 2;
1719 p2align ++;
1721 name2 = xstormy16_strip_name_encoding (name);
1722 if (global)
1723 fprintf (stream, "\t.globl\t%s\n", name2);
1724 if (p2align)
1725 fprintf (stream, "\t.p2align %d\n", p2align);
1726 fprintf (stream, "\t.type\t%s, @object\n", name2);
1727 fprintf (stream, "\t.size\t%s, %d\n", name2, size);
1728 fprintf (stream, "%s:\n\t.%s\t%d\n", name2, op, size);
1729 return;
1733 if (!global)
1735 fprintf (stream, "\t.local\t");
1736 assemble_name (stream, name);
1737 fprintf (stream, "\n");
1739 fprintf (stream, "\t.comm\t");
1740 assemble_name (stream, name);
1741 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
1744 /* Mark symbols with the "below100" attribute so that we can use the
1745 special addressing modes for them. */
1747 static void
1748 xstormy16_encode_section_info (tree decl,
1749 rtx r,
1750 int first ATTRIBUTE_UNUSED)
1752 if (TREE_CODE (decl) == VAR_DECL
1753 && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl))
1754 || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl))))
1756 const char *newsection = 0;
1757 char *newname;
1758 tree idp;
1759 rtx rtlname, rtl;
1760 const char *oldname;
1762 rtl = r;
1763 rtlname = XEXP (rtl, 0);
1764 if (GET_CODE (rtlname) == SYMBOL_REF)
1765 oldname = XSTR (rtlname, 0);
1766 else if (GET_CODE (rtlname) == MEM
1767 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
1768 oldname = XSTR (XEXP (rtlname, 0), 0);
1769 else
1770 abort ();
1772 if (DECL_INITIAL (decl))
1774 newsection = ".data_below100";
1775 DECL_SECTION_NAME (decl) = build_string (strlen (newsection), newsection);
1778 newname = alloca (strlen (oldname) + 4);
1779 sprintf (newname, "@b.%s", oldname);
1780 idp = get_identifier (newname);
1781 XEXP (rtl, 0) =
1782 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
1786 const char *
1787 xstormy16_strip_name_encoding (const char *name)
1789 while (1)
1791 if (name[0] == '@' && name[2] == '.')
1792 name += 3;
1793 else if (name[0] == '*')
1794 name ++;
1795 else
1796 return name;
1800 /* Output constructors and destructors. Just like
1801 default_named_section_asm_out_* but don't set the sections writable. */
1802 #undef TARGET_ASM_CONSTRUCTOR
1803 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1804 #undef TARGET_ASM_DESTRUCTOR
1805 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1807 static void
1808 xstormy16_asm_out_destructor (rtx symbol, int priority)
1810 const char *section = ".dtors";
1811 char buf[16];
1813 /* ??? This only works reliably with the GNU linker. */
1814 if (priority != DEFAULT_INIT_PRIORITY)
1816 sprintf (buf, ".dtors.%.5u",
1817 /* Invert the numbering so the linker puts us in the proper
1818 order; constructors are run from right to left, and the
1819 linker sorts in increasing order. */
1820 MAX_INIT_PRIORITY - priority);
1821 section = buf;
1824 named_section_flags (section, 0);
1825 assemble_align (POINTER_SIZE);
1826 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1829 static void
1830 xstormy16_asm_out_constructor (rtx symbol, int priority)
1832 const char *section = ".ctors";
1833 char buf[16];
1835 /* ??? This only works reliably with the GNU linker. */
1836 if (priority != DEFAULT_INIT_PRIORITY)
1838 sprintf (buf, ".ctors.%.5u",
1839 /* Invert the numbering so the linker puts us in the proper
1840 order; constructors are run from right to left, and the
1841 linker sorts in increasing order. */
1842 MAX_INIT_PRIORITY - priority);
1843 section = buf;
1846 named_section_flags (section, 0);
1847 assemble_align (POINTER_SIZE);
1848 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1851 /* Print a memory address as an operand to reference that memory location. */
1852 void
1853 xstormy16_print_operand_address (FILE *file, rtx address)
1855 HOST_WIDE_INT offset;
1856 int pre_dec, post_inc;
1858 /* There are a few easy cases. */
1859 if (GET_CODE (address) == CONST_INT)
1861 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1862 return;
1865 if (CONSTANT_P (address) || GET_CODE (address) == CODE_LABEL)
1867 output_addr_const (file, address);
1868 return;
1871 /* Otherwise, it's hopefully something of the form
1872 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...))
1875 if (GET_CODE (address) == PLUS)
1877 if (GET_CODE (XEXP (address, 1)) != CONST_INT)
1878 abort ();
1879 offset = INTVAL (XEXP (address, 1));
1880 address = XEXP (address, 0);
1882 else
1883 offset = 0;
1885 pre_dec = (GET_CODE (address) == PRE_DEC);
1886 post_inc = (GET_CODE (address) == POST_INC);
1887 if (pre_dec || post_inc)
1888 address = XEXP (address, 0);
1890 if (GET_CODE (address) != REG)
1891 abort ();
1893 fputc ('(', file);
1894 if (pre_dec)
1895 fputs ("--", file);
1896 fputs (reg_names [REGNO (address)], file);
1897 if (post_inc)
1898 fputs ("++", file);
1899 if (offset != 0)
1900 fprintf (file, "," HOST_WIDE_INT_PRINT_DEC, offset);
1901 fputc (')', file);
1904 /* Print an operand to an assembler instruction. */
1905 void
1906 xstormy16_print_operand (FILE *file, rtx x, int code)
1908 switch (code)
1910 case 'B':
1911 /* There is either one bit set, or one bit clear, in X.
1912 Print it preceded by '#'. */
1914 static int bits_set[8] = { 0, 1, 1, 2, 1, 2, 2, 3 };
1915 HOST_WIDE_INT xx = 1;
1916 HOST_WIDE_INT l;
1918 if (GET_CODE (x) == CONST_INT)
1919 xx = INTVAL (x);
1920 else
1921 output_operand_lossage ("'B' operand is not constant");
1923 /* GCC sign-extends masks with the MSB set, so we have to
1924 detect all the cases that differ only in sign extension
1925 beyond the bits we care about. Normally, the predicates
1926 and constraints ensure that we have the right values. This
1927 works correctly for valid masks. */
1928 if (bits_set[xx & 7] <= 1)
1930 /* Remove sign extension bits. */
1931 if ((~xx & ~(HOST_WIDE_INT)0xff) == 0)
1932 xx &= 0xff;
1933 else if ((~xx & ~(HOST_WIDE_INT)0xffff) == 0)
1934 xx &= 0xffff;
1935 l = exact_log2 (xx);
1937 else
1939 /* Add sign extension bits. */
1940 if ((xx & ~(HOST_WIDE_INT)0xff) == 0)
1941 xx |= ~(HOST_WIDE_INT)0xff;
1942 else if ((xx & ~(HOST_WIDE_INT)0xffff) == 0)
1943 xx |= ~(HOST_WIDE_INT)0xffff;
1944 l = exact_log2 (~xx);
1947 if (l == -1)
1948 output_operand_lossage ("'B' operand has multiple bits set");
1950 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l);
1951 return;
1954 case 'C':
1955 /* Print the symbol without a surrounding @fptr(). */
1956 if (GET_CODE (x) == SYMBOL_REF)
1957 assemble_name (file, XSTR (x, 0));
1958 else if (GET_CODE (x) == LABEL_REF)
1959 output_asm_label (x);
1960 else
1961 xstormy16_print_operand_address (file, x);
1962 return;
1964 case 'o':
1965 case 'O':
1966 /* Print the immediate operand less one, preceded by '#'.
1967 For 'O', negate it first. */
1969 HOST_WIDE_INT xx = 0;
1971 if (GET_CODE (x) == CONST_INT)
1972 xx = INTVAL (x);
1973 else
1974 output_operand_lossage ("'o' operand is not constant");
1976 if (code == 'O')
1977 xx = -xx;
1979 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, xx - 1);
1980 return;
1983 case 'b':
1984 /* Print the shift mask for bp/bn. */
1986 HOST_WIDE_INT xx = 1;
1987 HOST_WIDE_INT l;
1989 if (GET_CODE (x) == CONST_INT)
1990 xx = INTVAL (x);
1991 else
1992 output_operand_lossage ("'B' operand is not constant");
1994 l = 7 - xx;
1996 fputs (IMMEDIATE_PREFIX, file);
1997 fprintf (file, HOST_WIDE_INT_PRINT_DEC, l);
1998 return;
2001 case 0:
2002 /* Handled below. */
2003 break;
2005 default:
2006 output_operand_lossage ("xstormy16_print_operand: unknown code");
2007 return;
2010 switch (GET_CODE (x))
2012 case REG:
2013 fputs (reg_names [REGNO (x)], file);
2014 break;
2016 case MEM:
2017 xstormy16_print_operand_address (file, XEXP (x, 0));
2018 break;
2020 default:
2021 /* Some kind of constant or label; an immediate operand,
2022 so prefix it with '#' for the assembler. */
2023 fputs (IMMEDIATE_PREFIX, file);
2024 output_addr_const (file, x);
2025 break;
2028 return;
2032 /* Expander for the `casesi' pattern.
2033 INDEX is the index of the switch statement.
2034 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
2035 to the first table entry.
2036 RANGE is the number of table entries.
2037 TABLE is an ADDR_VEC that is the jump table.
2038 DEFAULT_LABEL is the address to branch to if INDEX is outside the
2039 range LOWER_BOUND to LOWER_BOUND+RANGE-1.
2042 void
2043 xstormy16_expand_casesi (rtx index, rtx lower_bound, rtx range,
2044 rtx table, rtx default_label)
2046 HOST_WIDE_INT range_i = INTVAL (range);
2047 rtx int_index;
2049 /* This code uses 'br', so it can deal only with tables of size up to
2050 8192 entries. */
2051 if (range_i >= 8192)
2052 sorry ("switch statement of size %lu entries too large",
2053 (unsigned long) range_i);
2055 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
2056 OPTAB_LIB_WIDEN);
2057 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
2058 default_label);
2059 int_index = gen_lowpart_common (HImode, index);
2060 emit_insn (gen_ashlhi3 (int_index, int_index, const2_rtx));
2061 emit_jump_insn (gen_tablejump_pcrel (int_index, table));
2064 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
2065 instructions, without label or alignment or any other special
2066 constructs. We know that the previous instruction will be the
2067 `tablejump_pcrel' output above.
2069 TODO: it might be nice to output 'br' instructions if they could
2070 all reach. */
2072 void
2073 xstormy16_output_addr_vec (FILE *file, rtx label ATTRIBUTE_UNUSED, rtx table)
2075 int vlen, idx;
2077 function_section (current_function_decl);
2079 vlen = XVECLEN (table, 0);
2080 for (idx = 0; idx < vlen; idx++)
2082 fputs ("\tjmpf ", file);
2083 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
2084 fputc ('\n', file);
2089 /* Expander for the `call' patterns.
2090 INDEX is the index of the switch statement.
2091 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
2092 to the first table entry.
2093 RANGE is the number of table entries.
2094 TABLE is an ADDR_VEC that is the jump table.
2095 DEFAULT_LABEL is the address to branch to if INDEX is outside the
2096 range LOWER_BOUND to LOWER_BOUND+RANGE-1.
2099 void
2100 xstormy16_expand_call (rtx retval, rtx dest, rtx counter)
2102 rtx call, temp;
2103 enum machine_mode mode;
2105 if (GET_CODE (dest) != MEM)
2106 abort ();
2107 dest = XEXP (dest, 0);
2109 if (! CONSTANT_P (dest)
2110 && GET_CODE (dest) != REG)
2111 dest = force_reg (Pmode, dest);
2113 if (retval == NULL)
2114 mode = VOIDmode;
2115 else
2116 mode = GET_MODE (retval);
2118 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
2119 counter);
2120 if (retval)
2121 call = gen_rtx_SET (VOIDmode, retval, call);
2123 if (! CONSTANT_P (dest))
2125 temp = gen_reg_rtx (HImode);
2126 emit_move_insn (temp, const0_rtx);
2128 else
2129 temp = const0_rtx;
2131 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
2132 gen_rtx_USE (VOIDmode, temp)));
2133 emit_call_insn (call);
2136 /* Expanders for multiword computational operations. */
2138 /* Expander for arithmetic operations; emit insns to compute
2140 (set DEST (CODE:MODE SRC0 SRC1))
2142 using CARRY as a temporary. When CODE is COMPARE, a branch
2143 template is generated (this saves duplicating code in
2144 xstormy16_split_cbranch). */
2146 void
2147 xstormy16_expand_arith (enum machine_mode mode, enum rtx_code code,
2148 rtx dest, rtx src0, rtx src1, rtx carry)
2150 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
2151 int i;
2152 int firstloop = 1;
2154 if (code == NEG)
2155 emit_move_insn (src0, const0_rtx);
2157 for (i = 0; i < num_words; i++)
2159 rtx w_src0, w_src1, w_dest;
2160 rtx insn;
2162 w_src0 = simplify_gen_subreg (word_mode, src0, mode,
2163 i * UNITS_PER_WORD);
2164 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
2165 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
2167 switch (code)
2169 case PLUS:
2170 if (firstloop
2171 && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
2172 continue;
2174 if (firstloop)
2175 insn = gen_addchi4 (w_dest, w_src0, w_src1, carry);
2176 else
2177 insn = gen_addchi5 (w_dest, w_src0, w_src1, carry, carry);
2178 break;
2180 case NEG:
2181 case MINUS:
2182 case COMPARE:
2183 if (code == COMPARE && i == num_words - 1)
2185 rtx branch, sub, clobber, sub_1;
2187 sub_1 = gen_rtx_MINUS (HImode, w_src0,
2188 gen_rtx_ZERO_EXTEND (HImode, carry));
2189 sub = gen_rtx_SET (VOIDmode, w_dest,
2190 gen_rtx_MINUS (HImode, sub_1, w_src1));
2191 clobber = gen_rtx_CLOBBER (VOIDmode, carry);
2192 branch = gen_rtx_SET (VOIDmode, pc_rtx,
2193 gen_rtx_IF_THEN_ELSE (VOIDmode,
2194 gen_rtx_EQ (HImode,
2195 sub_1,
2196 w_src1),
2197 pc_rtx,
2198 pc_rtx));
2199 insn = gen_rtx_PARALLEL (VOIDmode,
2200 gen_rtvec (3, branch, sub, clobber));
2202 else if (firstloop
2203 && code != COMPARE
2204 && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
2205 continue;
2206 else if (firstloop)
2207 insn = gen_subchi4 (w_dest, w_src0, w_src1, carry);
2208 else
2209 insn = gen_subchi5 (w_dest, w_src0, w_src1, carry, carry);
2210 break;
2212 case IOR:
2213 case XOR:
2214 case AND:
2215 if (GET_CODE (w_src1) == CONST_INT
2216 && INTVAL (w_src1) == -(code == AND))
2217 continue;
2219 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_fmt_ee (code, mode,
2220 w_src0, w_src1));
2221 break;
2223 case NOT:
2224 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_NOT (mode, w_src0));
2225 break;
2227 default:
2228 abort ();
2231 firstloop = 0;
2232 emit (insn);
2235 /* If we emit nothing, try_split() will think we failed. So emit
2236 something that does nothing and can be optimized away. */
2237 if (firstloop)
2238 emit (gen_nop ());
2241 /* Return 1 if OP is a shift operator. */
2244 shift_operator (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2246 enum rtx_code code = GET_CODE (op);
2248 return (code == ASHIFT
2249 || code == ASHIFTRT
2250 || code == LSHIFTRT);
2253 /* The shift operations are split at output time for constant values;
2254 variable-width shifts get handed off to a library routine.
2256 Generate an output string to do (set X (CODE:MODE X SIZE_R))
2257 SIZE_R will be a CONST_INT, X will be a hard register. */
2259 const char *
2260 xstormy16_output_shift (enum machine_mode mode, enum rtx_code code,
2261 rtx x, rtx size_r, rtx temp)
2263 HOST_WIDE_INT size;
2264 const char *r0, *r1, *rt;
2265 static char r[64];
2267 if (GET_CODE (size_r) != CONST_INT
2268 || GET_CODE (x) != REG
2269 || mode != SImode)
2270 abort ();
2271 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
2273 if (size == 0)
2274 return "";
2276 r0 = reg_names [REGNO (x)];
2277 r1 = reg_names [REGNO (x) + 1];
2279 /* For shifts of size 1, we can use the rotate instructions. */
2280 if (size == 1)
2282 switch (code)
2284 case ASHIFT:
2285 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
2286 break;
2287 case ASHIFTRT:
2288 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
2289 break;
2290 case LSHIFTRT:
2291 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
2292 break;
2293 default:
2294 abort ();
2296 return r;
2299 /* For large shifts, there are easy special cases. */
2300 if (size == 16)
2302 switch (code)
2304 case ASHIFT:
2305 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
2306 break;
2307 case ASHIFTRT:
2308 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
2309 break;
2310 case LSHIFTRT:
2311 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
2312 break;
2313 default:
2314 abort ();
2316 return r;
2318 if (size > 16)
2320 switch (code)
2322 case ASHIFT:
2323 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
2324 r1, r0, r0, r1, (int) size - 16);
2325 break;
2326 case ASHIFTRT:
2327 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
2328 r0, r1, r1, r0, (int) size - 16);
2329 break;
2330 case LSHIFTRT:
2331 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
2332 r0, r1, r1, r0, (int) size - 16);
2333 break;
2334 default:
2335 abort ();
2337 return r;
2340 /* For the rest, we have to do more work. In particular, we
2341 need a temporary. */
2342 rt = reg_names [REGNO (temp)];
2343 switch (code)
2345 case ASHIFT:
2346 sprintf (r,
2347 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2348 rt, r0, r0, (int) size, r1, (int) size, rt, (int) (16-size),
2349 r1, rt);
2350 break;
2351 case ASHIFTRT:
2352 sprintf (r,
2353 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2354 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16-size),
2355 r0, rt);
2356 break;
2357 case LSHIFTRT:
2358 sprintf (r,
2359 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2360 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16-size),
2361 r0, rt);
2362 break;
2363 default:
2364 abort ();
2366 return r;
2369 /* Attribute handling. */
2371 /* Return nonzero if the function is an interrupt function. */
2373 xstormy16_interrupt_function_p (void)
2375 tree attributes;
2377 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2378 any functions are declared, which is demonstrably wrong, but
2379 it is worked around here. FIXME. */
2380 if (!cfun)
2381 return 0;
2383 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
2384 return lookup_attribute ("interrupt", attributes) != NULL_TREE;
2387 #undef TARGET_ATTRIBUTE_TABLE
2388 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2389 static tree xstormy16_handle_interrupt_attribute
2390 (tree *, tree, tree, int, bool *);
2391 static tree xstormy16_handle_below100_attribute
2392 (tree *, tree, tree, int, bool *);
2394 static const struct attribute_spec xstormy16_attribute_table[] =
2396 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2397 { "interrupt", 0, 0, false, true, true, xstormy16_handle_interrupt_attribute },
2398 { "BELOW100", 0, 0, false, false, false, xstormy16_handle_below100_attribute },
2399 { "below100", 0, 0, false, false, false, xstormy16_handle_below100_attribute },
2400 { NULL, 0, 0, false, false, false, NULL }
2403 /* Handle an "interrupt" attribute;
2404 arguments as in struct attribute_spec.handler. */
2405 static tree
2406 xstormy16_handle_interrupt_attribute (tree *node, tree name,
2407 tree args ATTRIBUTE_UNUSED,
2408 int flags ATTRIBUTE_UNUSED,
2409 bool *no_add_attrs)
2411 if (TREE_CODE (*node) != FUNCTION_TYPE)
2413 warning ("%qs attribute only applies to functions",
2414 IDENTIFIER_POINTER (name));
2415 *no_add_attrs = true;
2418 return NULL_TREE;
2421 /* Handle an "below" attribute;
2422 arguments as in struct attribute_spec.handler. */
2423 static tree
2424 xstormy16_handle_below100_attribute (tree *node,
2425 tree name ATTRIBUTE_UNUSED,
2426 tree args ATTRIBUTE_UNUSED,
2427 int flags ATTRIBUTE_UNUSED,
2428 bool *no_add_attrs)
2430 if (TREE_CODE (*node) != VAR_DECL
2431 && TREE_CODE (*node) != POINTER_TYPE
2432 && TREE_CODE (*node) != TYPE_DECL)
2434 warning ("%<__BELOW100__%> attribute only applies to variables");
2435 *no_add_attrs = true;
2437 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
2439 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
2441 warning ("__BELOW100__ attribute not allowed with auto storage class.");
2442 *no_add_attrs = true;
2446 return NULL_TREE;
2449 #undef TARGET_INIT_BUILTINS
2450 #define TARGET_INIT_BUILTINS xstormy16_init_builtins
2451 #undef TARGET_EXPAND_BUILTIN
2452 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2454 static struct {
2455 const char *name;
2456 int md_code;
2457 const char *arg_ops; /* 0..9, t for temp register, r for return value */
2458 const char *arg_types; /* s=short,l=long, upper case for unsigned */
2459 } s16builtins[] = {
2460 { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" },
2461 { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" },
2462 { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" },
2463 { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" },
2464 { 0, 0, 0, 0 }
2467 static void
2468 xstormy16_init_builtins (void)
2470 tree args, ret_type, arg;
2471 int i, a;
2473 ret_type = void_type_node;
2475 for (i=0; s16builtins[i].name; i++)
2477 args = void_list_node;
2478 for (a=strlen (s16builtins[i].arg_types)-1; a>=0; a--)
2480 switch (s16builtins[i].arg_types[a])
2482 case 's': arg = short_integer_type_node; break;
2483 case 'S': arg = short_unsigned_type_node; break;
2484 case 'l': arg = long_integer_type_node; break;
2485 case 'L': arg = long_unsigned_type_node; break;
2486 default: abort();
2488 if (a == 0)
2489 ret_type = arg;
2490 else
2491 args = tree_cons (NULL_TREE, arg, args);
2493 lang_hooks.builtin_function (s16builtins[i].name,
2494 build_function_type (ret_type, args),
2495 i, BUILT_IN_MD, NULL, NULL);
2499 static rtx
2500 xstormy16_expand_builtin(tree exp, rtx target,
2501 rtx subtarget ATTRIBUTE_UNUSED,
2502 enum machine_mode mode ATTRIBUTE_UNUSED,
2503 int ignore ATTRIBUTE_UNUSED)
2505 rtx op[10], args[10], pat, copyto[10], retval = 0;
2506 tree fndecl, argtree;
2507 int i, a, o, code;
2509 fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2510 argtree = TREE_OPERAND (exp, 1);
2511 i = DECL_FUNCTION_CODE (fndecl);
2512 code = s16builtins[i].md_code;
2514 for (a = 0; a < 10 && argtree; a++)
2516 args[a] = expand_expr (TREE_VALUE (argtree), NULL_RTX, VOIDmode, 0);
2517 argtree = TREE_CHAIN (argtree);
2520 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2522 char ao = s16builtins[i].arg_ops[o];
2523 char c = insn_data[code].operand[o].constraint[0];
2524 int omode;
2526 copyto[o] = 0;
2528 omode = insn_data[code].operand[o].mode;
2529 if (ao == 'r')
2530 op[o] = target ? target : gen_reg_rtx (omode);
2531 else if (ao == 't')
2532 op[o] = gen_reg_rtx (omode);
2533 else
2534 op[o] = args[(int) hex_value (ao)];
2536 if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o])))
2538 if (c == '+' || c == '=')
2540 copyto[o] = op[o];
2541 op[o] = gen_reg_rtx (omode);
2543 else
2544 op[o] = copy_to_mode_reg (omode, op[o]);
2547 if (ao == 'r')
2548 retval = op[o];
2551 pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4],
2552 op[5], op[6], op[7], op[8], op[9]);
2553 emit_insn (pat);
2555 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2556 if (copyto[o])
2558 emit_move_insn (copyto[o], op[o]);
2559 if (op[o] == retval)
2560 retval = copyto[o];
2563 return retval;
2567 /* Look for combinations of insns that can be converted to BN or BP
2568 opcodes. This is, unfortunately, too complex to do with MD
2569 patterns. */
2570 static void
2571 combine_bnp (rtx insn)
2573 int insn_code, regno, need_extend;
2574 unsigned int mask;
2575 rtx cond, reg, and, load, qireg, mem;
2576 enum machine_mode load_mode = QImode;
2577 enum machine_mode and_mode = QImode;
2578 rtx shift = NULL_RTX;
2580 insn_code = recog_memoized (insn);
2581 if (insn_code != CODE_FOR_cbranchhi
2582 && insn_code != CODE_FOR_cbranchhi_neg)
2583 return;
2585 cond = XVECEXP (PATTERN (insn), 0, 0); /* set */
2586 cond = XEXP (cond, 1); /* if */
2587 cond = XEXP (cond, 0); /* cond */
2588 switch (GET_CODE (cond))
2590 case NE:
2591 case EQ:
2592 need_extend = 0;
2593 break;
2594 case LT:
2595 case GE:
2596 need_extend = 1;
2597 break;
2598 default:
2599 return;
2602 reg = XEXP (cond, 0);
2603 if (GET_CODE (reg) != REG)
2604 return;
2605 regno = REGNO (reg);
2606 if (XEXP (cond, 1) != const0_rtx)
2607 return;
2608 if (! find_regno_note (insn, REG_DEAD, regno))
2609 return;
2610 qireg = gen_rtx_REG (QImode, regno);
2612 if (need_extend)
2614 /* LT and GE conditionals should have an sign extend before
2615 them. */
2616 for (and = prev_real_insn (insn); and; and = prev_real_insn (and))
2618 int and_code = recog_memoized (and);
2620 if (and_code == CODE_FOR_extendqihi2
2621 && rtx_equal_p (SET_DEST (PATTERN (and)), reg)
2622 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and)), 0), qireg))
2623 break;
2625 if (and_code == CODE_FOR_movhi_internal
2626 && rtx_equal_p (SET_DEST (PATTERN (and)), reg))
2628 /* This is for testing bit 15. */
2629 and = insn;
2630 break;
2633 if (reg_mentioned_p (reg, and))
2634 return;
2636 if (GET_CODE (and) != NOTE
2637 && GET_CODE (and) != INSN)
2638 return;
2641 else
2643 /* EQ and NE conditionals have an AND before them. */
2644 for (and = prev_real_insn (insn); and; and = prev_real_insn (and))
2646 if (recog_memoized (and) == CODE_FOR_andhi3
2647 && rtx_equal_p (SET_DEST (PATTERN (and)), reg)
2648 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and)), 0), reg))
2649 break;
2651 if (reg_mentioned_p (reg, and))
2652 return;
2654 if (GET_CODE (and) != NOTE
2655 && GET_CODE (and) != INSN)
2656 return;
2659 if (and)
2661 /* Some mis-optimisations by GCC can generate a RIGHT-SHIFT
2662 followed by an AND like this:
2664 (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
2665 (clobber (reg:BI carry))]
2667 (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
2669 Attempt to detect this here. */
2670 for (shift = prev_real_insn (and); shift; shift = prev_real_insn (shift))
2672 if (recog_memoized (shift) == CODE_FOR_lshrhi3
2673 && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift), 0, 0)), reg)
2674 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 0), reg))
2675 break;
2677 if (reg_mentioned_p (reg, shift)
2678 || (GET_CODE (shift) != NOTE
2679 && GET_CODE (shift) != INSN))
2681 shift = NULL_RTX;
2682 break;
2687 if (!and)
2688 return;
2690 for (load = shift ? prev_real_insn (shift) : prev_real_insn (and);
2691 load;
2692 load = prev_real_insn (load))
2694 int load_code = recog_memoized (load);
2696 if (load_code == CODE_FOR_movhi_internal
2697 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2698 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), HImode)
2699 && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load))))
2701 load_mode = HImode;
2702 break;
2705 if (load_code == CODE_FOR_movqi_internal
2706 && rtx_equal_p (SET_DEST (PATTERN (load)), qireg)
2707 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), QImode))
2709 load_mode = QImode;
2710 break;
2713 if (load_code == CODE_FOR_zero_extendqihi2
2714 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2715 && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load)), 0), QImode))
2717 load_mode = QImode;
2718 and_mode = HImode;
2719 break;
2722 if (reg_mentioned_p (reg, load))
2723 return;
2725 if (GET_CODE (load) != NOTE
2726 && GET_CODE (load) != INSN)
2727 return;
2729 if (!load)
2730 return;
2732 mem = SET_SRC (PATTERN (load));
2734 if (need_extend)
2736 mask = (load_mode == HImode) ? 0x8000 : 0x80;
2738 /* If the mem includes a zero-extend operation and we are
2739 going to generate a sign-extend operation then move the
2740 mem inside the zero-extend. */
2741 if (GET_CODE (mem) == ZERO_EXTEND)
2742 mem = XEXP (mem, 0);
2744 else
2746 if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and)), 1), load_mode))
2747 return;
2749 mask = (int) INTVAL (XEXP (SET_SRC (PATTERN (and)), 1));
2751 if (shift)
2752 mask <<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 1));
2755 if (load_mode == HImode)
2757 rtx addr = XEXP (mem, 0);
2759 if (! (mask & 0xff))
2761 addr = plus_constant (addr, 1);
2762 mask >>= 8;
2764 mem = gen_rtx_MEM (QImode, addr);
2767 if (need_extend)
2768 XEXP (cond, 0) = gen_rtx_SIGN_EXTEND (HImode, mem);
2769 else
2770 XEXP (cond, 0) = gen_rtx_AND (and_mode, mem, GEN_INT (mask));
2772 INSN_CODE (insn) = -1;
2773 delete_insn (load);
2775 if (and != insn)
2776 delete_insn (and);
2778 if (shift != NULL_RTX)
2779 delete_insn (shift);
2782 static void
2783 xstormy16_reorg (void)
2785 rtx insn;
2787 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2789 if (! JUMP_P (insn))
2790 continue;
2791 combine_bnp (insn);
2796 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2798 static bool
2799 xstormy16_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
2801 HOST_WIDE_INT size = int_size_in_bytes (type);
2802 return (size == -1 || size > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS);
2805 #undef TARGET_ASM_ALIGNED_HI_OP
2806 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2807 #undef TARGET_ASM_ALIGNED_SI_OP
2808 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2809 #undef TARGET_ENCODE_SECTION_INFO
2810 #define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
2811 #undef TARGET_STRIP_NAME_ENCODING
2812 #define TARGET_STRIP_NAME_ENCODING xstormy16_strip_name_encoding
2814 #undef TARGET_ASM_OUTPUT_MI_THUNK
2815 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2816 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2817 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2819 #undef TARGET_RTX_COSTS
2820 #define TARGET_RTX_COSTS xstormy16_rtx_costs
2821 #undef TARGET_ADDRESS_COST
2822 #define TARGET_ADDRESS_COST xstormy16_address_cost
2824 #undef TARGET_BUILD_BUILTIN_VA_LIST
2825 #define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
2826 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
2827 #define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_expand_builtin_va_arg
2829 #undef TARGET_PROMOTE_FUNCTION_ARGS
2830 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
2831 #undef TARGET_PROMOTE_FUNCTION_RETURN
2832 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
2833 #undef TARGET_PROMOTE_PROTOTYPES
2834 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
2836 #undef TARGET_RETURN_IN_MEMORY
2837 #define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
2839 #undef TARGET_MACHINE_DEPENDENT_REORG
2840 #define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg
2842 struct gcc_target targetm = TARGET_INITIALIZER;