Merge -r 127928:132243 from trunk
[official-gcc.git] / gcc / config / stormy16 / stormy16.c
bloba4c1746319476027bf25cd0404dd5f5ef4b99df4
1 /* Xstormy16 target functions.
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3 2006, 2007, 2008 Free Software Foundation, Inc.
4 Contributed by Red Hat, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "output.h"
34 #include "insn-attr.h"
35 #include "flags.h"
36 #include "recog.h"
37 #include "toplev.h"
38 #include "obstack.h"
39 #include "tree.h"
40 #include "expr.h"
41 #include "optabs.h"
42 #include "except.h"
43 #include "function.h"
44 #include "target.h"
45 #include "target-def.h"
46 #include "tm_p.h"
47 #include "langhooks.h"
48 #include "tree-gimple.h"
49 #include "df.h"
50 #include "ggc.h"
52 static rtx emit_addhi3_postreload (rtx, rtx, rtx);
53 static void xstormy16_asm_out_constructor (rtx, int);
54 static void xstormy16_asm_out_destructor (rtx, int);
55 static void xstormy16_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
56 HOST_WIDE_INT, tree);
58 static void xstormy16_init_builtins (void);
59 static rtx xstormy16_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
60 static bool xstormy16_rtx_costs (rtx, int, int, int *);
61 static int xstormy16_address_cost (rtx);
62 static bool xstormy16_return_in_memory (const_tree, const_tree);
64 /* Define the information needed to generate branch and scc insns. This is
65 stored from the compare operation. */
66 struct rtx_def * xstormy16_compare_op0;
67 struct rtx_def * xstormy16_compare_op1;
69 static GTY(()) section *bss100_section;
71 /* Compute a (partial) cost for rtx X. Return true if the complete
72 cost has been computed, and false if subexpressions should be
73 scanned. In either case, *TOTAL contains the cost result. */
75 static bool
76 xstormy16_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
77 int *total)
79 switch (code)
81 case CONST_INT:
82 if (INTVAL (x) < 16 && INTVAL (x) >= 0)
83 *total = COSTS_N_INSNS (1) / 2;
84 else if (INTVAL (x) < 256 && INTVAL (x) >= 0)
85 *total = COSTS_N_INSNS (1);
86 else
87 *total = COSTS_N_INSNS (2);
88 return true;
90 case CONST_DOUBLE:
91 case CONST:
92 case SYMBOL_REF:
93 case LABEL_REF:
94 *total = COSTS_N_INSNS(2);
95 return true;
97 case MULT:
98 *total = COSTS_N_INSNS (35 + 6);
99 return true;
100 case DIV:
101 *total = COSTS_N_INSNS (51 - 6);
102 return true;
104 default:
105 return false;
109 static int
110 xstormy16_address_cost (rtx x)
112 return (GET_CODE (x) == CONST_INT ? 2
113 : GET_CODE (x) == PLUS ? 7
114 : 5);
117 /* Branches are handled as follows:
119 1. HImode compare-and-branches. The machine supports these
120 natively, so the appropriate pattern is emitted directly.
122 2. SImode EQ and NE. These are emitted as pairs of HImode
123 compare-and-branches.
125 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
126 of a SImode subtract followed by a branch (not a compare-and-branch),
127 like this:
132 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
140 /* Emit a branch of kind CODE to location LOC. */
142 void
143 xstormy16_emit_cbranch (enum rtx_code code, rtx loc)
145 rtx op0 = xstormy16_compare_op0;
146 rtx op1 = xstormy16_compare_op1;
147 rtx condition_rtx, loc_ref, branch, cy_clobber;
148 rtvec vec;
149 enum machine_mode mode;
151 mode = GET_MODE (op0);
152 gcc_assert (mode == HImode || mode == SImode);
154 if (mode == SImode
155 && (code == GT || code == LE || code == GTU || code == LEU))
157 int unsigned_p = (code == GTU || code == LEU);
158 int gt_p = (code == GT || code == GTU);
159 rtx lab = NULL_RTX;
161 if (gt_p)
162 lab = gen_label_rtx ();
163 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, gt_p ? lab : loc);
164 /* This should be generated as a comparison against the temporary
165 created by the previous insn, but reload can't handle that. */
166 xstormy16_emit_cbranch (gt_p ? NE : EQ, loc);
167 if (gt_p)
168 emit_label (lab);
169 return;
171 else if (mode == SImode
172 && (code == NE || code == EQ)
173 && op1 != const0_rtx)
175 rtx lab = NULL_RTX;
176 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
177 int i;
179 if (code == EQ)
180 lab = gen_label_rtx ();
182 for (i = 0; i < num_words - 1; i++)
184 xstormy16_compare_op0 = simplify_gen_subreg (word_mode, op0, mode,
185 i * UNITS_PER_WORD);
186 xstormy16_compare_op1 = simplify_gen_subreg (word_mode, op1, mode,
187 i * UNITS_PER_WORD);
188 xstormy16_emit_cbranch (NE, code == EQ ? lab : loc);
190 xstormy16_compare_op0 = simplify_gen_subreg (word_mode, op0, mode,
191 i * UNITS_PER_WORD);
192 xstormy16_compare_op1 = simplify_gen_subreg (word_mode, op1, mode,
193 i * UNITS_PER_WORD);
194 xstormy16_emit_cbranch (code, loc);
196 if (code == EQ)
197 emit_label (lab);
198 return;
201 /* We can't allow reload to try to generate any reload after a branch,
202 so when some register must match we must make the temporary ourselves. */
203 if (mode != HImode)
205 rtx tmp;
206 tmp = gen_reg_rtx (mode);
207 emit_move_insn (tmp, op0);
208 op0 = tmp;
211 condition_rtx = gen_rtx_fmt_ee (code, mode, op0, op1);
212 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
213 branch = gen_rtx_SET (VOIDmode, pc_rtx,
214 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
215 loc_ref, pc_rtx));
217 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (BImode));
219 if (mode == HImode)
220 vec = gen_rtvec (2, branch, cy_clobber);
221 else if (code == NE || code == EQ)
222 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
223 else
225 rtx sub;
226 #if 0
227 sub = gen_rtx_SET (VOIDmode, op0, gen_rtx_MINUS (SImode, op0, op1));
228 #else
229 sub = gen_rtx_CLOBBER (SImode, op0);
230 #endif
231 vec = gen_rtvec (3, branch, sub, cy_clobber);
234 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
237 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
238 the arithmetic operation. Most of the work is done by
239 xstormy16_expand_arith. */
241 void
242 xstormy16_split_cbranch (enum machine_mode mode, rtx label, rtx comparison,
243 rtx dest, rtx carry)
245 rtx op0 = XEXP (comparison, 0);
246 rtx op1 = XEXP (comparison, 1);
247 rtx seq, last_insn;
248 rtx compare;
250 start_sequence ();
251 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1, carry);
252 seq = get_insns ();
253 end_sequence ();
255 gcc_assert (INSN_P (seq));
257 last_insn = seq;
258 while (NEXT_INSN (last_insn) != NULL_RTX)
259 last_insn = NEXT_INSN (last_insn);
261 compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0));
262 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
263 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
264 emit_insn (seq);
268 /* Return the string to output a conditional branch to LABEL, which is
269 the operand number of the label.
271 OP is the conditional expression, or NULL for branch-always.
273 REVERSED is nonzero if we should reverse the sense of the comparison.
275 INSN is the insn. */
277 char *
278 xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed, rtx insn)
280 static char string[64];
281 int need_longbranch = (op != NULL_RTX
282 ? get_attr_length (insn) == 8
283 : get_attr_length (insn) == 4);
284 int really_reversed = reversed ^ need_longbranch;
285 const char *ccode;
286 const char *template;
287 const char *operands;
288 enum rtx_code code;
290 if (! op)
292 if (need_longbranch)
293 ccode = "jmpf";
294 else
295 ccode = "br";
296 sprintf (string, "%s %s", ccode, label);
297 return string;
300 code = GET_CODE (op);
302 if (GET_CODE (XEXP (op, 0)) != REG)
304 code = swap_condition (code);
305 operands = "%3,%2";
307 else
308 operands = "%2,%3";
310 /* Work out which way this really branches. */
311 if (really_reversed)
312 code = reverse_condition (code);
314 switch (code)
316 case EQ: ccode = "z"; break;
317 case NE: ccode = "nz"; break;
318 case GE: ccode = "ge"; break;
319 case LT: ccode = "lt"; break;
320 case GT: ccode = "gt"; break;
321 case LE: ccode = "le"; break;
322 case GEU: ccode = "nc"; break;
323 case LTU: ccode = "c"; break;
324 case GTU: ccode = "hi"; break;
325 case LEU: ccode = "ls"; break;
327 default:
328 gcc_unreachable ();
331 if (need_longbranch)
332 template = "b%s %s,.+8 | jmpf %s";
333 else
334 template = "b%s %s,%s";
335 sprintf (string, template, ccode, operands, label);
337 return string;
340 /* Return the string to output a conditional branch to LABEL, which is
341 the operand number of the label, but suitable for the tail of a
342 SImode branch.
344 OP is the conditional expression (OP is never NULL_RTX).
346 REVERSED is nonzero if we should reverse the sense of the comparison.
348 INSN is the insn. */
350 char *
351 xstormy16_output_cbranch_si (rtx op, const char *label, int reversed, rtx insn)
353 static char string[64];
354 int need_longbranch = get_attr_length (insn) >= 8;
355 int really_reversed = reversed ^ need_longbranch;
356 const char *ccode;
357 const char *template;
358 char prevop[16];
359 enum rtx_code code;
361 code = GET_CODE (op);
363 /* Work out which way this really branches. */
364 if (really_reversed)
365 code = reverse_condition (code);
367 switch (code)
369 case EQ: ccode = "z"; break;
370 case NE: ccode = "nz"; break;
371 case GE: ccode = "ge"; break;
372 case LT: ccode = "lt"; break;
373 case GEU: ccode = "nc"; break;
374 case LTU: ccode = "c"; break;
376 /* The missing codes above should never be generated. */
377 default:
378 gcc_unreachable ();
381 switch (code)
383 case EQ: case NE:
385 int regnum;
387 gcc_assert (GET_CODE (XEXP (op, 0)) == REG);
389 regnum = REGNO (XEXP (op, 0));
390 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
392 break;
394 case GE: case LT: case GEU: case LTU:
395 strcpy (prevop, "sbc %2,%3");
396 break;
398 default:
399 gcc_unreachable ();
402 if (need_longbranch)
403 template = "%s | b%s .+6 | jmpf %s";
404 else
405 template = "%s | b%s %s";
406 sprintf (string, template, prevop, ccode, label);
408 return string;
411 /* Many machines have some registers that cannot be copied directly to or from
412 memory or even from other types of registers. An example is the `MQ'
413 register, which on most machines, can only be copied to or from general
414 registers, but not memory. Some machines allow copying all registers to and
415 from memory, but require a scratch register for stores to some memory
416 locations (e.g., those with symbolic address on the RT, and those with
417 certain symbolic address on the SPARC when compiling PIC). In some cases,
418 both an intermediate and a scratch register are required.
420 You should define these macros to indicate to the reload phase that it may
421 need to allocate at least one register for a reload in addition to the
422 register to contain the data. Specifically, if copying X to a register
423 CLASS in MODE requires an intermediate register, you should define
424 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
425 whose registers can be used as intermediate registers or scratch registers.
427 If copying a register CLASS in MODE to X requires an intermediate or scratch
428 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
429 largest register class required. If the requirements for input and output
430 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
431 instead of defining both macros identically.
433 The values returned by these macros are often `GENERAL_REGS'. Return
434 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
435 to or from a register of CLASS in MODE without requiring a scratch register.
436 Do not define this macro if it would always return `NO_REGS'.
438 If a scratch register is required (either with or without an intermediate
439 register), you should define patterns for `reload_inM' or `reload_outM', as
440 required.. These patterns, which will normally be implemented with a
441 `define_expand', should be similar to the `movM' patterns, except that
442 operand 2 is the scratch register.
444 Define constraints for the reload register and scratch register that contain
445 a single register class. If the original reload register (whose class is
446 CLASS) can meet the constraint given in the pattern, the value returned by
447 these macros is used for the class of the scratch register. Otherwise, two
448 additional reload registers are required. Their classes are obtained from
449 the constraints in the insn pattern.
451 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
452 either be in a hard register or in memory. Use `true_regnum' to find out;
453 it will return -1 if the pseudo is in memory and the hard register number if
454 it is in a register.
456 These macros should not be used in the case where a particular class of
457 registers can only be copied to memory and not to another class of
458 registers. In that case, secondary reload registers are not needed and
459 would not be helpful. Instead, a stack location must be used to perform the
460 copy and the `movM' pattern should use memory as an intermediate storage.
461 This case often occurs between floating-point and general registers. */
463 enum reg_class
464 xstormy16_secondary_reload_class (enum reg_class class,
465 enum machine_mode mode,
466 rtx x)
468 /* This chip has the interesting property that only the first eight
469 registers can be moved to/from memory. */
470 if ((GET_CODE (x) == MEM
471 || ((GET_CODE (x) == SUBREG || GET_CODE (x) == REG)
472 && (true_regnum (x) == -1
473 || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
474 && ! reg_class_subset_p (class, EIGHT_REGS))
475 return EIGHT_REGS;
477 /* When reloading a PLUS, the carry register will be required
478 unless the inc or dec instructions can be used. */
479 if (xstormy16_carry_plus_operand (x, mode))
480 return CARRY_REGS;
482 return NO_REGS;
485 enum reg_class
486 xstormy16_preferred_reload_class (rtx x, enum reg_class class)
488 if (class == GENERAL_REGS
489 && GET_CODE (x) == MEM)
490 return EIGHT_REGS;
492 return class;
495 /* Predicate for symbols and addresses that reflect special 8-bit
496 addressing. */
498 xstormy16_below100_symbol (rtx x,
499 enum machine_mode mode ATTRIBUTE_UNUSED)
501 if (GET_CODE (x) == CONST)
502 x = XEXP (x, 0);
503 if (GET_CODE (x) == PLUS
504 && GET_CODE (XEXP (x, 1)) == CONST_INT)
505 x = XEXP (x, 0);
507 if (GET_CODE (x) == SYMBOL_REF)
508 return (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_XSTORMY16_BELOW100) != 0;
510 if (GET_CODE (x) == CONST_INT)
512 HOST_WIDE_INT i = INTVAL (x);
513 if ((i >= 0x0000 && i <= 0x00ff)
514 || (i >= 0x7f00 && i <= 0x7fff))
515 return 1;
517 return 0;
520 /* Likewise, but only for non-volatile MEMs, for patterns where the
521 MEM will get split into smaller sized accesses. */
523 xstormy16_splittable_below100_operand (rtx x, enum machine_mode mode)
525 if (GET_CODE (x) == MEM && MEM_VOLATILE_P (x))
526 return 0;
527 return xstormy16_below100_operand (x, mode);
530 /* Expand an 8-bit IOR. This either detects the one case we can
531 actually do, or uses a 16-bit IOR. */
532 void
533 xstormy16_expand_iorqi3 (rtx *operands)
535 rtx in, out, outsub, val;
537 out = operands[0];
538 in = operands[1];
539 val = operands[2];
541 if (xstormy16_onebit_set_operand (val, QImode))
543 if (!xstormy16_below100_or_register (in, QImode))
544 in = copy_to_mode_reg (QImode, in);
545 if (!xstormy16_below100_or_register (out, QImode))
546 out = gen_reg_rtx (QImode);
547 emit_insn (gen_iorqi3_internal (out, in, val));
548 if (out != operands[0])
549 emit_move_insn (operands[0], out);
550 return;
553 if (GET_CODE (in) != REG)
554 in = copy_to_mode_reg (QImode, in);
555 if (GET_CODE (val) != REG
556 && GET_CODE (val) != CONST_INT)
557 val = copy_to_mode_reg (QImode, val);
558 if (GET_CODE (out) != REG)
559 out = gen_reg_rtx (QImode);
561 in = simplify_gen_subreg (HImode, in, QImode, 0);
562 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
563 if (GET_CODE (val) != CONST_INT)
564 val = simplify_gen_subreg (HImode, val, QImode, 0);
566 emit_insn (gen_iorhi3 (outsub, in, val));
568 if (out != operands[0])
569 emit_move_insn (operands[0], out);
572 /* Likewise, for AND. */
573 void
574 xstormy16_expand_andqi3 (rtx *operands)
576 rtx in, out, outsub, val;
578 out = operands[0];
579 in = operands[1];
580 val = operands[2];
582 if (xstormy16_onebit_clr_operand (val, QImode))
584 if (!xstormy16_below100_or_register (in, QImode))
585 in = copy_to_mode_reg (QImode, in);
586 if (!xstormy16_below100_or_register (out, QImode))
587 out = gen_reg_rtx (QImode);
588 emit_insn (gen_andqi3_internal (out, in, val));
589 if (out != operands[0])
590 emit_move_insn (operands[0], out);
591 return;
594 if (GET_CODE (in) != REG)
595 in = copy_to_mode_reg (QImode, in);
596 if (GET_CODE (val) != REG
597 && GET_CODE (val) != CONST_INT)
598 val = copy_to_mode_reg (QImode, val);
599 if (GET_CODE (out) != REG)
600 out = gen_reg_rtx (QImode);
602 in = simplify_gen_subreg (HImode, in, QImode, 0);
603 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
604 if (GET_CODE (val) != CONST_INT)
605 val = simplify_gen_subreg (HImode, val, QImode, 0);
607 emit_insn (gen_andhi3 (outsub, in, val));
609 if (out != operands[0])
610 emit_move_insn (operands[0], out);
613 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
614 (GET_CODE (X) == CONST_INT \
615 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
617 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
618 (GET_CODE (X) == CONST_INT \
619 && INTVAL (X) + (OFFSET) >= 0 \
620 && INTVAL (X) + (OFFSET) < 0x8000 \
621 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
624 xstormy16_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
625 rtx x, int strict)
627 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
628 return 1;
630 if (GET_CODE (x) == PLUS
631 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
632 x = XEXP (x, 0);
634 if ((GET_CODE (x) == PRE_MODIFY
635 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
636 || GET_CODE (x) == POST_INC
637 || GET_CODE (x) == PRE_DEC)
638 x = XEXP (x, 0);
640 if (GET_CODE (x) == REG && REGNO_OK_FOR_BASE_P (REGNO (x))
641 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
642 return 1;
644 if (xstormy16_below100_symbol (x, mode))
645 return 1;
647 return 0;
650 /* Return nonzero if memory address X (an RTX) can have different
651 meanings depending on the machine mode of the memory reference it
652 is used for or if the address is valid for some modes but not
653 others.
655 Autoincrement and autodecrement addresses typically have mode-dependent
656 effects because the amount of the increment or decrement is the size of the
657 operand being addressed. Some machines have other mode-dependent addresses.
658 Many RISC machines have no mode-dependent addresses.
660 You may assume that ADDR is a valid address for the machine.
662 On this chip, this is true if the address is valid with an offset
663 of 0 but not of 6, because in that case it cannot be used as an
664 address for DImode or DFmode, or if the address is a post-increment
665 or pre-decrement address. */
667 xstormy16_mode_dependent_address_p (rtx x)
669 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
670 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
671 return 1;
673 if (GET_CODE (x) == PLUS
674 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
675 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
676 return 1;
678 if (GET_CODE (x) == PLUS)
679 x = XEXP (x, 0);
681 /* Auto-increment addresses are now treated generically in recog.c. */
683 return 0;
686 /* A C expression that defines the optional machine-dependent constraint
687 letters (`Q', `R', `S', `T', `U') that can be used to segregate specific
688 types of operands, usually memory references, for the target machine.
689 Normally this macro will not be defined. If it is required for a particular
690 target machine, it should return 1 if VALUE corresponds to the operand type
691 represented by the constraint letter C. If C is not defined as an extra
692 constraint, the value returned should be 0 regardless of VALUE. */
694 xstormy16_extra_constraint_p (rtx x, int c)
696 switch (c)
698 /* 'Q' is for pushes. */
699 case 'Q':
700 return (GET_CODE (x) == MEM
701 && GET_CODE (XEXP (x, 0)) == POST_INC
702 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
704 /* 'R' is for pops. */
705 case 'R':
706 return (GET_CODE (x) == MEM
707 && GET_CODE (XEXP (x, 0)) == PRE_DEC
708 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
710 /* 'S' is for immediate memory addresses. */
711 case 'S':
712 return (GET_CODE (x) == MEM
713 && GET_CODE (XEXP (x, 0)) == CONST_INT
714 && xstormy16_legitimate_address_p (VOIDmode, XEXP (x, 0), 0));
716 /* 'T' is for Rx. */
717 case 'T':
718 /* Not implemented yet. */
719 return 0;
721 /* 'U' is for CONST_INT values not between 2 and 15 inclusive,
722 for allocating a scratch register for 32-bit shifts. */
723 case 'U':
724 return (GET_CODE (x) == CONST_INT
725 && (INTVAL (x) < 2 || INTVAL (x) > 15));
727 /* 'Z' is for CONST_INT value zero. This is for adding zero to
728 a register in addhi3, which would otherwise require a carry. */
729 case 'Z':
730 return (GET_CODE (x) == CONST_INT
731 && (INTVAL (x) == 0));
733 case 'W':
734 return xstormy16_below100_operand (x, GET_MODE (x));
736 default:
737 return 0;
742 short_memory_operand (rtx x, enum machine_mode mode)
744 if (! memory_operand (x, mode))
745 return 0;
746 return (GET_CODE (XEXP (x, 0)) != PLUS);
749 /* Splitter for the 'move' patterns, for modes not directly implemented
750 by hardware. Emit insns to copy a value of mode MODE from SRC to
751 DEST.
753 This function is only called when reload_completed.
756 void
757 xstormy16_split_move (enum machine_mode mode, rtx dest, rtx src)
759 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
760 int direction, end, i;
761 int src_modifies = 0;
762 int dest_modifies = 0;
763 int src_volatile = 0;
764 int dest_volatile = 0;
765 rtx mem_operand;
766 rtx auto_inc_reg_rtx = NULL_RTX;
768 /* Check initial conditions. */
769 gcc_assert (reload_completed
770 && mode != QImode && mode != HImode
771 && nonimmediate_operand (dest, mode)
772 && general_operand (src, mode));
774 /* This case is not supported below, and shouldn't be generated. */
775 gcc_assert (GET_CODE (dest) != MEM || GET_CODE (src) != MEM);
777 /* This case is very very bad after reload, so trap it now. */
778 gcc_assert (GET_CODE (dest) != SUBREG && GET_CODE (src) != SUBREG);
780 /* The general idea is to copy by words, offsetting the source and
781 destination. Normally the least-significant word will be copied
782 first, but for pre-dec operations it's better to copy the
783 most-significant word first. Only one operand can be a pre-dec
784 or post-inc operand.
786 It's also possible that the copy overlaps so that the direction
787 must be reversed. */
788 direction = 1;
790 if (GET_CODE (dest) == MEM)
792 mem_operand = XEXP (dest, 0);
793 dest_modifies = side_effects_p (mem_operand);
794 if (auto_inc_p (mem_operand))
795 auto_inc_reg_rtx = XEXP (mem_operand, 0);
796 dest_volatile = MEM_VOLATILE_P (dest);
797 if (dest_volatile)
799 dest = copy_rtx (dest);
800 MEM_VOLATILE_P (dest) = 0;
803 else if (GET_CODE (src) == MEM)
805 mem_operand = XEXP (src, 0);
806 src_modifies = side_effects_p (mem_operand);
807 if (auto_inc_p (mem_operand))
808 auto_inc_reg_rtx = XEXP (mem_operand, 0);
809 src_volatile = MEM_VOLATILE_P (src);
810 if (src_volatile)
812 src = copy_rtx (src);
813 MEM_VOLATILE_P (src) = 0;
816 else
817 mem_operand = NULL_RTX;
819 if (mem_operand == NULL_RTX)
821 if (GET_CODE (src) == REG
822 && GET_CODE (dest) == REG
823 && reg_overlap_mentioned_p (dest, src)
824 && REGNO (dest) > REGNO (src))
825 direction = -1;
827 else if (GET_CODE (mem_operand) == PRE_DEC
828 || (GET_CODE (mem_operand) == PLUS
829 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
830 direction = -1;
831 else if (GET_CODE (src) == MEM
832 && reg_overlap_mentioned_p (dest, src))
834 int regno;
836 gcc_assert (GET_CODE (dest) == REG);
837 regno = REGNO (dest);
839 gcc_assert (refers_to_regno_p (regno, regno + num_words,
840 mem_operand, 0));
842 if (refers_to_regno_p (regno, regno + 1, mem_operand, 0))
843 direction = -1;
844 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
845 mem_operand, 0))
846 direction = 1;
847 else
848 /* This means something like
849 (set (reg:DI r0) (mem:DI (reg:HI r1)))
850 which we'd need to support by doing the set of the second word
851 last. */
852 gcc_unreachable ();
855 end = direction < 0 ? -1 : num_words;
856 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
858 rtx w_src, w_dest, insn;
860 if (src_modifies)
861 w_src = gen_rtx_MEM (word_mode, mem_operand);
862 else
863 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
864 if (src_volatile)
865 MEM_VOLATILE_P (w_src) = 1;
866 if (dest_modifies)
867 w_dest = gen_rtx_MEM (word_mode, mem_operand);
868 else
869 w_dest = simplify_gen_subreg (word_mode, dest, mode,
870 i * UNITS_PER_WORD);
871 if (dest_volatile)
872 MEM_VOLATILE_P (w_dest) = 1;
874 /* The simplify_subreg calls must always be able to simplify. */
875 gcc_assert (GET_CODE (w_src) != SUBREG
876 && GET_CODE (w_dest) != SUBREG);
878 insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src));
879 if (auto_inc_reg_rtx)
880 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
881 auto_inc_reg_rtx,
882 REG_NOTES (insn));
886 /* Expander for the 'move' patterns. Emit insns to copy a value of
887 mode MODE from SRC to DEST. */
889 void
890 xstormy16_expand_move (enum machine_mode mode, rtx dest, rtx src)
892 if ((GET_CODE (dest) == MEM) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY))
894 rtx pmv = XEXP (dest, 0);
895 rtx dest_reg = XEXP (pmv, 0);
896 rtx dest_mod = XEXP (pmv, 1);
897 rtx set = gen_rtx_SET (Pmode, dest_reg, dest_mod);
898 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
900 dest = gen_rtx_MEM (mode, dest_reg);
901 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
903 else if ((GET_CODE (src) == MEM) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY))
905 rtx pmv = XEXP (src, 0);
906 rtx src_reg = XEXP (pmv, 0);
907 rtx src_mod = XEXP (pmv, 1);
908 rtx set = gen_rtx_SET (Pmode, src_reg, src_mod);
909 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
911 src = gen_rtx_MEM (mode, src_reg);
912 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
915 /* There are only limited immediate-to-memory move instructions. */
916 if (! reload_in_progress
917 && ! reload_completed
918 && GET_CODE (dest) == MEM
919 && (GET_CODE (XEXP (dest, 0)) != CONST_INT
920 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
921 && ! xstormy16_below100_operand (dest, mode)
922 && GET_CODE (src) != REG
923 && GET_CODE (src) != SUBREG)
924 src = copy_to_mode_reg (mode, src);
926 /* Don't emit something we would immediately split. */
927 if (reload_completed
928 && mode != HImode && mode != QImode)
930 xstormy16_split_move (mode, dest, src);
931 return;
934 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
938 /* Stack Layout:
940 The stack is laid out as follows:
942 SP->
943 FP-> Local variables
944 Register save area (up to 4 words)
945 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
947 AP-> Return address (two words)
948 9th procedure parameter word
949 10th procedure parameter word
951 last procedure parameter word
953 The frame pointer location is tuned to make it most likely that all
954 parameters and local variables can be accessed using a load-indexed
955 instruction. */
957 /* A structure to describe the layout. */
958 struct xstormy16_stack_layout
960 /* Size of the topmost three items on the stack. */
961 int locals_size;
962 int register_save_size;
963 int stdarg_save_size;
964 /* Sum of the above items. */
965 int frame_size;
966 /* Various offsets. */
967 int first_local_minus_ap;
968 int sp_minus_fp;
969 int fp_minus_ap;
972 /* Does REGNO need to be saved? */
973 #define REG_NEEDS_SAVE(REGNUM, IFUN) \
974 ((df_regs_ever_live_p (REGNUM) && ! call_used_regs[REGNUM]) \
975 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
976 && (REGNO_REG_CLASS (REGNUM) != CARRY_REGS) \
977 && (df_regs_ever_live_p (REGNUM) || ! current_function_is_leaf)))
979 /* Compute the stack layout. */
980 struct xstormy16_stack_layout
981 xstormy16_compute_stack_layout (void)
983 struct xstormy16_stack_layout layout;
984 int regno;
985 const int ifun = xstormy16_interrupt_function_p ();
987 layout.locals_size = get_frame_size ();
989 layout.register_save_size = 0;
990 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
991 if (REG_NEEDS_SAVE (regno, ifun))
992 layout.register_save_size += UNITS_PER_WORD;
994 if (current_function_stdarg)
995 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
996 else
997 layout.stdarg_save_size = 0;
999 layout.frame_size = (layout.locals_size
1000 + layout.register_save_size
1001 + layout.stdarg_save_size);
1003 if (current_function_args_size <= 2048 && current_function_args_size != -1)
1005 if (layout.frame_size + INCOMING_FRAME_SP_OFFSET
1006 + current_function_args_size <= 2048)
1007 layout.fp_minus_ap = layout.frame_size + INCOMING_FRAME_SP_OFFSET;
1008 else
1009 layout.fp_minus_ap = 2048 - current_function_args_size;
1011 else
1012 layout.fp_minus_ap = (layout.stdarg_save_size
1013 + layout.register_save_size
1014 + INCOMING_FRAME_SP_OFFSET);
1015 layout.sp_minus_fp = (layout.frame_size + INCOMING_FRAME_SP_OFFSET
1016 - layout.fp_minus_ap);
1017 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
1018 return layout;
1021 /* Determine how all the special registers get eliminated. */
1023 xstormy16_initial_elimination_offset (int from, int to)
1025 struct xstormy16_stack_layout layout;
1026 int result;
1028 layout = xstormy16_compute_stack_layout ();
1030 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1031 result = layout.sp_minus_fp - layout.locals_size;
1032 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1033 result = -layout.locals_size;
1034 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1035 result = -layout.fp_minus_ap;
1036 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1037 result = -(layout.sp_minus_fp + layout.fp_minus_ap);
1038 else
1039 gcc_unreachable ();
1041 return result;
1044 static rtx
1045 emit_addhi3_postreload (rtx dest, rtx src0, rtx src1)
1047 rtx set, clobber, insn;
1049 set = gen_rtx_SET (VOIDmode, dest, gen_rtx_PLUS (HImode, src0, src1));
1050 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
1051 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
1052 return insn;
1055 /* Called after register allocation to add any instructions needed for
1056 the prologue. Using a prologue insn is favored compared to putting
1057 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1058 since it allows the scheduler to intermix instructions with the
1059 saves of the caller saved registers. In some cases, it might be
1060 necessary to emit a barrier instruction as the last insn to prevent
1061 such scheduling.
1063 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1064 so that the debug info generation code can handle them properly. */
1065 void
1066 xstormy16_expand_prologue (void)
1068 struct xstormy16_stack_layout layout;
1069 int regno;
1070 rtx insn;
1071 rtx mem_push_rtx;
1072 const int ifun = xstormy16_interrupt_function_p ();
1074 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
1075 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
1077 layout = xstormy16_compute_stack_layout ();
1079 if (layout.locals_size >= 32768)
1080 error ("local variable memory requirements exceed capacity");
1082 /* Save the argument registers if necessary. */
1083 if (layout.stdarg_save_size)
1084 for (regno = FIRST_ARGUMENT_REGISTER;
1085 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
1086 regno++)
1088 rtx dwarf;
1089 rtx reg = gen_rtx_REG (HImode, regno);
1091 insn = emit_move_insn (mem_push_rtx, reg);
1092 RTX_FRAME_RELATED_P (insn) = 1;
1094 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1096 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1097 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1098 reg);
1099 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1100 plus_constant (stack_pointer_rtx,
1101 GET_MODE_SIZE (Pmode)));
1102 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1103 dwarf,
1104 REG_NOTES (insn));
1105 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1106 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1109 /* Push each of the registers to save. */
1110 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1111 if (REG_NEEDS_SAVE (regno, ifun))
1113 rtx dwarf;
1114 rtx reg = gen_rtx_REG (HImode, regno);
1116 insn = emit_move_insn (mem_push_rtx, reg);
1117 RTX_FRAME_RELATED_P (insn) = 1;
1119 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1121 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1122 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1123 reg);
1124 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1125 plus_constant (stack_pointer_rtx,
1126 GET_MODE_SIZE (Pmode)));
1127 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1128 dwarf,
1129 REG_NOTES (insn));
1130 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1131 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1134 /* It's just possible that the SP here might be what we need for
1135 the new FP... */
1136 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1137 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1139 /* Allocate space for local variables. */
1140 if (layout.locals_size)
1142 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1143 GEN_INT (layout.locals_size));
1144 RTX_FRAME_RELATED_P (insn) = 1;
1147 /* Set up the frame pointer, if required. */
1148 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
1150 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1152 if (layout.sp_minus_fp)
1153 emit_addhi3_postreload (hard_frame_pointer_rtx,
1154 hard_frame_pointer_rtx,
1155 GEN_INT (-layout.sp_minus_fp));
1159 /* Do we need an epilogue at all? */
1161 direct_return (void)
1163 return (reload_completed
1164 && xstormy16_compute_stack_layout ().frame_size == 0);
1167 /* Called after register allocation to add any instructions needed for
1168 the epilogue. Using an epilogue insn is favored compared to putting
1169 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1170 since it allows the scheduler to intermix instructions with the
1171 saves of the caller saved registers. In some cases, it might be
1172 necessary to emit a barrier instruction as the last insn to prevent
1173 such scheduling. */
1175 void
1176 xstormy16_expand_epilogue (void)
1178 struct xstormy16_stack_layout layout;
1179 rtx mem_pop_rtx, insn;
1180 int regno;
1181 const int ifun = xstormy16_interrupt_function_p ();
1183 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1184 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
1186 layout = xstormy16_compute_stack_layout ();
1188 /* Pop the stack for the locals. */
1189 if (layout.locals_size)
1191 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1192 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1193 else
1195 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1196 GEN_INT (- layout.locals_size));
1197 RTX_FRAME_RELATED_P (insn) = 1;
1201 /* Restore any call-saved registers. */
1202 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1203 if (REG_NEEDS_SAVE (regno, ifun))
1205 rtx dwarf;
1207 insn = emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
1208 RTX_FRAME_RELATED_P (insn) = 1;
1209 dwarf = gen_rtx_SET (Pmode, stack_pointer_rtx,
1210 plus_constant (stack_pointer_rtx,
1211 -GET_MODE_SIZE (Pmode)));
1212 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1213 dwarf,
1214 REG_NOTES (insn));
1217 /* Pop the stack for the stdarg save area. */
1218 if (layout.stdarg_save_size)
1220 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1221 GEN_INT (- layout.stdarg_save_size));
1222 RTX_FRAME_RELATED_P (insn) = 1;
1225 /* Return. */
1226 if (ifun)
1227 emit_jump_insn (gen_return_internal_interrupt ());
1228 else
1229 emit_jump_insn (gen_return_internal ());
1233 xstormy16_epilogue_uses (int regno)
1235 if (reload_completed && call_used_regs[regno])
1237 const int ifun = xstormy16_interrupt_function_p ();
1238 return REG_NEEDS_SAVE (regno, ifun);
1240 return 0;
1243 void
1244 xstormy16_function_profiler (void)
1246 sorry ("function_profiler support");
1250 /* Return an updated summarizer variable CUM to advance past an
1251 argument in the argument list. The values MODE, TYPE and NAMED
1252 describe that argument. Once this is done, the variable CUM is
1253 suitable for analyzing the *following* argument with
1254 `FUNCTION_ARG', etc.
1256 This function need not do anything if the argument in question was
1257 passed on the stack. The compiler knows how to track the amount of
1258 stack space used for arguments without any special help. However,
1259 it makes life easier for xstormy16_build_va_list if it does update
1260 the word count. */
1261 CUMULATIVE_ARGS
1262 xstormy16_function_arg_advance (CUMULATIVE_ARGS cum, enum machine_mode mode,
1263 tree type, int named ATTRIBUTE_UNUSED)
1265 /* If an argument would otherwise be passed partially in registers,
1266 and partially on the stack, the whole of it is passed on the
1267 stack. */
1268 if (cum < NUM_ARGUMENT_REGISTERS
1269 && cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1270 cum = NUM_ARGUMENT_REGISTERS;
1272 cum += XSTORMY16_WORD_SIZE (type, mode);
1274 return cum;
1278 xstormy16_function_arg (CUMULATIVE_ARGS cum, enum machine_mode mode,
1279 tree type, int named ATTRIBUTE_UNUSED)
1281 if (mode == VOIDmode)
1282 return const0_rtx;
1283 if (targetm.calls.must_pass_in_stack (mode, type)
1284 || cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1285 return 0;
1286 return gen_rtx_REG (mode, cum + 2);
1289 /* Build the va_list type.
1291 For this chip, va_list is a record containing a counter and a pointer.
1292 The counter is of type 'int' and indicates how many bytes
1293 have been used to date. The pointer indicates the stack position
1294 for arguments that have not been passed in registers.
1295 To keep the layout nice, the pointer is first in the structure. */
1297 static tree
1298 xstormy16_build_builtin_va_list (void)
1300 tree f_1, f_2, record, type_decl;
1302 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
1303 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
1305 f_1 = build_decl (FIELD_DECL, get_identifier ("base"),
1306 ptr_type_node);
1307 f_2 = build_decl (FIELD_DECL, get_identifier ("count"),
1308 unsigned_type_node);
1310 DECL_FIELD_CONTEXT (f_1) = record;
1311 DECL_FIELD_CONTEXT (f_2) = record;
1313 TREE_CHAIN (record) = type_decl;
1314 TYPE_NAME (record) = type_decl;
1315 TYPE_FIELDS (record) = f_1;
1316 TREE_CHAIN (f_1) = f_2;
1318 layout_type (record);
1320 return record;
1323 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this
1324 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1325 variable to initialize. NEXTARG is the machine independent notion of the
1326 'next' argument after the variable arguments. */
1327 static void
1328 xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
1330 tree f_base, f_count;
1331 tree base, count;
1332 tree t,u;
1334 if (xstormy16_interrupt_function_p ())
1335 error ("cannot use va_start in interrupt function");
1337 f_base = TYPE_FIELDS (va_list_type_node);
1338 f_count = TREE_CHAIN (f_base);
1340 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1341 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1342 NULL_TREE);
1344 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
1345 u = build_int_cst (NULL_TREE, INCOMING_FRAME_SP_OFFSET);
1346 u = fold_convert (TREE_TYPE (count), u);
1347 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (base), t, u);
1348 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (base), base, t);
1349 TREE_SIDE_EFFECTS (t) = 1;
1350 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1352 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (count), count,
1353 build_int_cst (NULL_TREE,
1354 current_function_args_info * UNITS_PER_WORD));
1355 TREE_SIDE_EFFECTS (t) = 1;
1356 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1359 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable
1360 of type va_list as a tree, TYPE is the type passed to va_arg.
1361 Note: This algorithm is documented in stormy-abi. */
1363 static tree
1364 xstormy16_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p,
1365 tree *post_p ATTRIBUTE_UNUSED)
1367 tree f_base, f_count;
1368 tree base, count;
1369 tree count_tmp, addr, t;
1370 tree lab_gotaddr, lab_fromstack;
1371 int size, size_of_reg_args, must_stack;
1372 tree size_tree;
1374 f_base = TYPE_FIELDS (va_list_type_node);
1375 f_count = TREE_CHAIN (f_base);
1377 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1378 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1379 NULL_TREE);
1381 must_stack = targetm.calls.must_pass_in_stack (TYPE_MODE (type), type);
1382 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
1383 gimplify_expr (&size_tree, pre_p, NULL, is_gimple_val, fb_rvalue);
1385 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1387 count_tmp = get_initialized_tmp_var (count, pre_p, NULL);
1388 lab_gotaddr = create_artificial_label ();
1389 lab_fromstack = create_artificial_label ();
1390 addr = create_tmp_var (ptr_type_node, NULL);
1392 if (!must_stack)
1394 tree r;
1396 t = fold_convert (TREE_TYPE (count), size_tree);
1397 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1398 r = fold_convert (TREE_TYPE (count), size_int (size_of_reg_args));
1399 t = build2 (GT_EXPR, boolean_type_node, t, r);
1400 t = build3 (COND_EXPR, void_type_node, t,
1401 build1 (GOTO_EXPR, void_type_node, lab_fromstack),
1402 NULL_TREE);
1403 gimplify_and_add (t, pre_p);
1405 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, base, count_tmp);
1406 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
1407 gimplify_and_add (t, pre_p);
1409 t = build1 (GOTO_EXPR, void_type_node, lab_gotaddr);
1410 gimplify_and_add (t, pre_p);
1412 t = build1 (LABEL_EXPR, void_type_node, lab_fromstack);
1413 gimplify_and_add (t, pre_p);
1416 /* Arguments larger than a word might need to skip over some
1417 registers, since arguments are either passed entirely in
1418 registers or entirely on the stack. */
1419 size = PUSH_ROUNDING (int_size_in_bytes (type));
1420 if (size > 2 || size < 0 || must_stack)
1422 tree r, u;
1424 r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD);
1425 u = build2 (GIMPLE_MODIFY_STMT, void_type_node, count_tmp, r);
1427 t = fold_convert (TREE_TYPE (count), r);
1428 t = build2 (GE_EXPR, boolean_type_node, count_tmp, t);
1429 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, u);
1430 gimplify_and_add (t, pre_p);
1433 t = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
1434 - INCOMING_FRAME_SP_OFFSET);
1435 t = fold_convert (TREE_TYPE (count), t);
1436 t = build2 (MINUS_EXPR, TREE_TYPE (count), count_tmp, t);
1437 t = build2 (PLUS_EXPR, TREE_TYPE (count), t,
1438 fold_convert (TREE_TYPE (count), size_tree));
1439 t = fold_convert (TREE_TYPE (t), fold (t));
1440 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1441 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (base), base, t);
1442 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
1443 gimplify_and_add (t, pre_p);
1445 t = build1 (LABEL_EXPR, void_type_node, lab_gotaddr);
1446 gimplify_and_add (t, pre_p);
1448 t = fold_convert (TREE_TYPE (count), size_tree);
1449 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1450 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (count), count, t);
1451 gimplify_and_add (t, pre_p);
1453 addr = fold_convert (build_pointer_type (type), addr);
1454 return build_va_arg_indirect_ref (addr);
1457 /* Initialize the variable parts of a trampoline. ADDR is an RTX for
1458 the address of the trampoline; FNADDR is an RTX for the address of
1459 the nested function; STATIC_CHAIN is an RTX for the static chain
1460 value that should be passed to the function when it is called. */
1461 void
1462 xstormy16_initialize_trampoline (rtx addr, rtx fnaddr, rtx static_chain)
1464 rtx reg_addr = gen_reg_rtx (Pmode);
1465 rtx temp = gen_reg_rtx (HImode);
1466 rtx reg_fnaddr = gen_reg_rtx (HImode);
1467 rtx reg_addr_mem;
1469 reg_addr_mem = gen_rtx_MEM (HImode, reg_addr);
1471 emit_move_insn (reg_addr, addr);
1472 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1473 emit_move_insn (reg_addr_mem, temp);
1474 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1475 emit_move_insn (temp, static_chain);
1476 emit_move_insn (reg_addr_mem, temp);
1477 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1478 emit_move_insn (reg_fnaddr, fnaddr);
1479 emit_move_insn (temp, reg_fnaddr);
1480 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1481 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1482 emit_move_insn (reg_addr_mem, temp);
1483 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1484 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1485 emit_move_insn (reg_addr_mem, reg_fnaddr);
1488 /* Worker function for FUNCTION_VALUE. */
1491 xstormy16_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED)
1493 enum machine_mode mode;
1494 mode = TYPE_MODE (valtype);
1495 PROMOTE_MODE (mode, 0, valtype);
1496 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1499 /* A C compound statement that outputs the assembler code for a thunk function,
1500 used to implement C++ virtual function calls with multiple inheritance. The
1501 thunk acts as a wrapper around a virtual function, adjusting the implicit
1502 object parameter before handing control off to the real function.
1504 First, emit code to add the integer DELTA to the location that contains the
1505 incoming first argument. Assume that this argument contains a pointer, and
1506 is the one used to pass the `this' pointer in C++. This is the incoming
1507 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1508 addition must preserve the values of all other incoming arguments.
1510 After the addition, emit code to jump to FUNCTION, which is a
1511 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1512 the return address. Hence returning from FUNCTION will return to whoever
1513 called the current `thunk'.
1515 The effect must be as if @var{function} had been called directly
1516 with the adjusted first argument. This macro is responsible for
1517 emitting all of the code for a thunk function;
1518 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1519 not invoked.
1521 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1522 extracted from it.) It might possibly be useful on some targets, but
1523 probably not. */
1525 static void
1526 xstormy16_asm_output_mi_thunk (FILE *file,
1527 tree thunk_fndecl ATTRIBUTE_UNUSED,
1528 HOST_WIDE_INT delta,
1529 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1530 tree function)
1532 int regnum = FIRST_ARGUMENT_REGISTER;
1534 /* There might be a hidden first argument for a returned structure. */
1535 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
1536 regnum += 1;
1538 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF);
1539 fputs ("\tjmpf ", file);
1540 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1541 putc ('\n', file);
1544 /* The purpose of this function is to override the default behavior of
1545 BSS objects. Normally, they go into .bss or .sbss via ".common"
1546 directives, but we need to override that and put them in
1547 .bss_below100. We can't just use a section override (like we do
1548 for .data_below100), because that makes them initialized rather
1549 than uninitialized. */
1550 void
1551 xstormy16_asm_output_aligned_common (FILE *stream,
1552 tree decl,
1553 const char *name,
1554 int size,
1555 int align,
1556 int global)
1558 rtx mem = DECL_RTL (decl);
1559 rtx symbol;
1561 if (mem != NULL_RTX
1562 && GET_CODE (mem) == MEM
1563 && GET_CODE (symbol = XEXP (mem, 0)) == SYMBOL_REF
1564 && SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_XSTORMY16_BELOW100)
1566 const char *name2;
1567 int p2align = 0;
1569 switch_to_section (bss100_section);
1571 while (align > 8)
1573 align /= 2;
1574 p2align ++;
1577 name2 = default_strip_name_encoding (name);
1578 if (global)
1579 fprintf (stream, "\t.globl\t%s\n", name2);
1580 if (p2align)
1581 fprintf (stream, "\t.p2align %d\n", p2align);
1582 fprintf (stream, "\t.type\t%s, @object\n", name2);
1583 fprintf (stream, "\t.size\t%s, %d\n", name2, size);
1584 fprintf (stream, "%s:\n\t.space\t%d\n", name2, size);
1585 return;
1588 if (!global)
1590 fprintf (stream, "\t.local\t");
1591 assemble_name (stream, name);
1592 fprintf (stream, "\n");
1594 fprintf (stream, "\t.comm\t");
1595 assemble_name (stream, name);
1596 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
1599 /* Implement TARGET_ASM_INIT_SECTIONS. */
1601 static void
1602 xstormy16_asm_init_sections (void)
1604 bss100_section
1605 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
1606 output_section_asm_op,
1607 "\t.section \".bss_below100\",\"aw\",@nobits");
1610 /* Mark symbols with the "below100" attribute so that we can use the
1611 special addressing modes for them. */
1613 static void
1614 xstormy16_encode_section_info (tree decl, rtx r, int first)
1616 default_encode_section_info (decl, r, first);
1618 if (TREE_CODE (decl) == VAR_DECL
1619 && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl))
1620 || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl))))
1622 rtx symbol = XEXP (r, 0);
1624 gcc_assert (GET_CODE (symbol) == SYMBOL_REF);
1625 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_XSTORMY16_BELOW100;
1629 /* Output constructors and destructors. Just like
1630 default_named_section_asm_out_* but don't set the sections writable. */
1631 #undef TARGET_ASM_CONSTRUCTOR
1632 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1633 #undef TARGET_ASM_DESTRUCTOR
1634 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1636 static void
1637 xstormy16_asm_out_destructor (rtx symbol, int priority)
1639 const char *section = ".dtors";
1640 char buf[16];
1642 /* ??? This only works reliably with the GNU linker. */
1643 if (priority != DEFAULT_INIT_PRIORITY)
1645 sprintf (buf, ".dtors.%.5u",
1646 /* Invert the numbering so the linker puts us in the proper
1647 order; constructors are run from right to left, and the
1648 linker sorts in increasing order. */
1649 MAX_INIT_PRIORITY - priority);
1650 section = buf;
1653 switch_to_section (get_section (section, 0, NULL));
1654 assemble_align (POINTER_SIZE);
1655 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1658 static void
1659 xstormy16_asm_out_constructor (rtx symbol, int priority)
1661 const char *section = ".ctors";
1662 char buf[16];
1664 /* ??? This only works reliably with the GNU linker. */
1665 if (priority != DEFAULT_INIT_PRIORITY)
1667 sprintf (buf, ".ctors.%.5u",
1668 /* Invert the numbering so the linker puts us in the proper
1669 order; constructors are run from right to left, and the
1670 linker sorts in increasing order. */
1671 MAX_INIT_PRIORITY - priority);
1672 section = buf;
1675 switch_to_section (get_section (section, 0, NULL));
1676 assemble_align (POINTER_SIZE);
1677 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1680 /* Print a memory address as an operand to reference that memory location. */
1681 void
1682 xstormy16_print_operand_address (FILE *file, rtx address)
1684 HOST_WIDE_INT offset;
1685 int pre_dec, post_inc;
1687 /* There are a few easy cases. */
1688 if (GET_CODE (address) == CONST_INT)
1690 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1691 return;
1694 if (CONSTANT_P (address) || GET_CODE (address) == CODE_LABEL)
1696 output_addr_const (file, address);
1697 return;
1700 /* Otherwise, it's hopefully something of the form
1701 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...))
1704 if (GET_CODE (address) == PLUS)
1706 gcc_assert (GET_CODE (XEXP (address, 1)) == CONST_INT);
1707 offset = INTVAL (XEXP (address, 1));
1708 address = XEXP (address, 0);
1710 else
1711 offset = 0;
1713 pre_dec = (GET_CODE (address) == PRE_DEC);
1714 post_inc = (GET_CODE (address) == POST_INC);
1715 if (pre_dec || post_inc)
1716 address = XEXP (address, 0);
1718 gcc_assert (GET_CODE (address) == REG);
1720 fputc ('(', file);
1721 if (pre_dec)
1722 fputs ("--", file);
1723 fputs (reg_names [REGNO (address)], file);
1724 if (post_inc)
1725 fputs ("++", file);
1726 if (offset != 0)
1727 fprintf (file, "," HOST_WIDE_INT_PRINT_DEC, offset);
1728 fputc (')', file);
1731 /* Print an operand to an assembler instruction. */
1732 void
1733 xstormy16_print_operand (FILE *file, rtx x, int code)
1735 switch (code)
1737 case 'B':
1738 /* There is either one bit set, or one bit clear, in X.
1739 Print it preceded by '#'. */
1741 static int bits_set[8] = { 0, 1, 1, 2, 1, 2, 2, 3 };
1742 HOST_WIDE_INT xx = 1;
1743 HOST_WIDE_INT l;
1745 if (GET_CODE (x) == CONST_INT)
1746 xx = INTVAL (x);
1747 else
1748 output_operand_lossage ("'B' operand is not constant");
1750 /* GCC sign-extends masks with the MSB set, so we have to
1751 detect all the cases that differ only in sign extension
1752 beyond the bits we care about. Normally, the predicates
1753 and constraints ensure that we have the right values. This
1754 works correctly for valid masks. */
1755 if (bits_set[xx & 7] <= 1)
1757 /* Remove sign extension bits. */
1758 if ((~xx & ~(HOST_WIDE_INT)0xff) == 0)
1759 xx &= 0xff;
1760 else if ((~xx & ~(HOST_WIDE_INT)0xffff) == 0)
1761 xx &= 0xffff;
1762 l = exact_log2 (xx);
1764 else
1766 /* Add sign extension bits. */
1767 if ((xx & ~(HOST_WIDE_INT)0xff) == 0)
1768 xx |= ~(HOST_WIDE_INT)0xff;
1769 else if ((xx & ~(HOST_WIDE_INT)0xffff) == 0)
1770 xx |= ~(HOST_WIDE_INT)0xffff;
1771 l = exact_log2 (~xx);
1774 if (l == -1)
1775 output_operand_lossage ("'B' operand has multiple bits set");
1777 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l);
1778 return;
1781 case 'C':
1782 /* Print the symbol without a surrounding @fptr(). */
1783 if (GET_CODE (x) == SYMBOL_REF)
1784 assemble_name (file, XSTR (x, 0));
1785 else if (GET_CODE (x) == LABEL_REF)
1786 output_asm_label (x);
1787 else
1788 xstormy16_print_operand_address (file, x);
1789 return;
1791 case 'o':
1792 case 'O':
1793 /* Print the immediate operand less one, preceded by '#'.
1794 For 'O', negate it first. */
1796 HOST_WIDE_INT xx = 0;
1798 if (GET_CODE (x) == CONST_INT)
1799 xx = INTVAL (x);
1800 else
1801 output_operand_lossage ("'o' operand is not constant");
1803 if (code == 'O')
1804 xx = -xx;
1806 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, xx - 1);
1807 return;
1810 case 'b':
1811 /* Print the shift mask for bp/bn. */
1813 HOST_WIDE_INT xx = 1;
1814 HOST_WIDE_INT l;
1816 if (GET_CODE (x) == CONST_INT)
1817 xx = INTVAL (x);
1818 else
1819 output_operand_lossage ("'B' operand is not constant");
1821 l = 7 - xx;
1823 fputs (IMMEDIATE_PREFIX, file);
1824 fprintf (file, HOST_WIDE_INT_PRINT_DEC, l);
1825 return;
1828 case 0:
1829 /* Handled below. */
1830 break;
1832 default:
1833 output_operand_lossage ("xstormy16_print_operand: unknown code");
1834 return;
1837 switch (GET_CODE (x))
1839 case REG:
1840 fputs (reg_names [REGNO (x)], file);
1841 break;
1843 case MEM:
1844 xstormy16_print_operand_address (file, XEXP (x, 0));
1845 break;
1847 default:
1848 /* Some kind of constant or label; an immediate operand,
1849 so prefix it with '#' for the assembler. */
1850 fputs (IMMEDIATE_PREFIX, file);
1851 output_addr_const (file, x);
1852 break;
1855 return;
1859 /* Expander for the `casesi' pattern.
1860 INDEX is the index of the switch statement.
1861 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1862 to the first table entry.
1863 RANGE is the number of table entries.
1864 TABLE is an ADDR_VEC that is the jump table.
1865 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1866 range LOWER_BOUND to LOWER_BOUND+RANGE-1.
1869 void
1870 xstormy16_expand_casesi (rtx index, rtx lower_bound, rtx range,
1871 rtx table, rtx default_label)
1873 HOST_WIDE_INT range_i = INTVAL (range);
1874 rtx int_index;
1876 /* This code uses 'br', so it can deal only with tables of size up to
1877 8192 entries. */
1878 if (range_i >= 8192)
1879 sorry ("switch statement of size %lu entries too large",
1880 (unsigned long) range_i);
1882 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
1883 OPTAB_LIB_WIDEN);
1884 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
1885 default_label);
1886 int_index = gen_lowpart_common (HImode, index);
1887 emit_insn (gen_ashlhi3 (int_index, int_index, const2_rtx));
1888 emit_jump_insn (gen_tablejump_pcrel (int_index, table));
1891 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1892 instructions, without label or alignment or any other special
1893 constructs. We know that the previous instruction will be the
1894 `tablejump_pcrel' output above.
1896 TODO: it might be nice to output 'br' instructions if they could
1897 all reach. */
1899 void
1900 xstormy16_output_addr_vec (FILE *file, rtx label ATTRIBUTE_UNUSED, rtx table)
1902 int vlen, idx;
1904 switch_to_section (current_function_section ());
1906 vlen = XVECLEN (table, 0);
1907 for (idx = 0; idx < vlen; idx++)
1909 fputs ("\tjmpf ", file);
1910 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
1911 fputc ('\n', file);
1916 /* Expander for the `call' patterns.
1917 INDEX is the index of the switch statement.
1918 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1919 to the first table entry.
1920 RANGE is the number of table entries.
1921 TABLE is an ADDR_VEC that is the jump table.
1922 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1923 range LOWER_BOUND to LOWER_BOUND+RANGE-1.
1926 void
1927 xstormy16_expand_call (rtx retval, rtx dest, rtx counter)
1929 rtx call, temp;
1930 enum machine_mode mode;
1932 gcc_assert (GET_CODE (dest) == MEM);
1933 dest = XEXP (dest, 0);
1935 if (! CONSTANT_P (dest)
1936 && GET_CODE (dest) != REG)
1937 dest = force_reg (Pmode, dest);
1939 if (retval == NULL)
1940 mode = VOIDmode;
1941 else
1942 mode = GET_MODE (retval);
1944 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
1945 counter);
1946 if (retval)
1947 call = gen_rtx_SET (VOIDmode, retval, call);
1949 if (! CONSTANT_P (dest))
1951 temp = gen_reg_rtx (HImode);
1952 emit_move_insn (temp, const0_rtx);
1954 else
1955 temp = const0_rtx;
1957 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
1958 gen_rtx_USE (VOIDmode, temp)));
1959 emit_call_insn (call);
1962 /* Expanders for multiword computational operations. */
1964 /* Expander for arithmetic operations; emit insns to compute
1966 (set DEST (CODE:MODE SRC0 SRC1))
1968 using CARRY as a temporary. When CODE is COMPARE, a branch
1969 template is generated (this saves duplicating code in
1970 xstormy16_split_cbranch). */
1972 void
1973 xstormy16_expand_arith (enum machine_mode mode, enum rtx_code code,
1974 rtx dest, rtx src0, rtx src1, rtx carry)
1976 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
1977 int i;
1978 int firstloop = 1;
1980 if (code == NEG)
1981 emit_move_insn (src0, const0_rtx);
1983 for (i = 0; i < num_words; i++)
1985 rtx w_src0, w_src1, w_dest;
1986 rtx insn;
1988 w_src0 = simplify_gen_subreg (word_mode, src0, mode,
1989 i * UNITS_PER_WORD);
1990 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
1991 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
1993 switch (code)
1995 case PLUS:
1996 if (firstloop
1997 && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
1998 continue;
2000 if (firstloop)
2001 insn = gen_addchi4 (w_dest, w_src0, w_src1, carry);
2002 else
2003 insn = gen_addchi5 (w_dest, w_src0, w_src1, carry, carry);
2004 break;
2006 case NEG:
2007 case MINUS:
2008 case COMPARE:
2009 if (code == COMPARE && i == num_words - 1)
2011 rtx branch, sub, clobber, sub_1;
2013 sub_1 = gen_rtx_MINUS (HImode, w_src0,
2014 gen_rtx_ZERO_EXTEND (HImode, carry));
2015 sub = gen_rtx_SET (VOIDmode, w_dest,
2016 gen_rtx_MINUS (HImode, sub_1, w_src1));
2017 clobber = gen_rtx_CLOBBER (VOIDmode, carry);
2018 branch = gen_rtx_SET (VOIDmode, pc_rtx,
2019 gen_rtx_IF_THEN_ELSE (VOIDmode,
2020 gen_rtx_EQ (HImode,
2021 sub_1,
2022 w_src1),
2023 pc_rtx,
2024 pc_rtx));
2025 insn = gen_rtx_PARALLEL (VOIDmode,
2026 gen_rtvec (3, branch, sub, clobber));
2028 else if (firstloop
2029 && code != COMPARE
2030 && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
2031 continue;
2032 else if (firstloop)
2033 insn = gen_subchi4 (w_dest, w_src0, w_src1, carry);
2034 else
2035 insn = gen_subchi5 (w_dest, w_src0, w_src1, carry, carry);
2036 break;
2038 case IOR:
2039 case XOR:
2040 case AND:
2041 if (GET_CODE (w_src1) == CONST_INT
2042 && INTVAL (w_src1) == -(code == AND))
2043 continue;
2045 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_fmt_ee (code, mode,
2046 w_src0, w_src1));
2047 break;
2049 case NOT:
2050 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_NOT (mode, w_src0));
2051 break;
2053 default:
2054 gcc_unreachable ();
2057 firstloop = 0;
2058 emit (insn);
2061 /* If we emit nothing, try_split() will think we failed. So emit
2062 something that does nothing and can be optimized away. */
2063 if (firstloop)
2064 emit (gen_nop ());
2067 /* The shift operations are split at output time for constant values;
2068 variable-width shifts get handed off to a library routine.
2070 Generate an output string to do (set X (CODE:MODE X SIZE_R))
2071 SIZE_R will be a CONST_INT, X will be a hard register. */
2073 const char *
2074 xstormy16_output_shift (enum machine_mode mode, enum rtx_code code,
2075 rtx x, rtx size_r, rtx temp)
2077 HOST_WIDE_INT size;
2078 const char *r0, *r1, *rt;
2079 static char r[64];
2081 gcc_assert (GET_CODE (size_r) == CONST_INT
2082 && GET_CODE (x) == REG && mode == SImode);
2083 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
2085 if (size == 0)
2086 return "";
2088 r0 = reg_names [REGNO (x)];
2089 r1 = reg_names [REGNO (x) + 1];
2091 /* For shifts of size 1, we can use the rotate instructions. */
2092 if (size == 1)
2094 switch (code)
2096 case ASHIFT:
2097 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
2098 break;
2099 case ASHIFTRT:
2100 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
2101 break;
2102 case LSHIFTRT:
2103 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
2104 break;
2105 default:
2106 gcc_unreachable ();
2108 return r;
2111 /* For large shifts, there are easy special cases. */
2112 if (size == 16)
2114 switch (code)
2116 case ASHIFT:
2117 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
2118 break;
2119 case ASHIFTRT:
2120 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
2121 break;
2122 case LSHIFTRT:
2123 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
2124 break;
2125 default:
2126 gcc_unreachable ();
2128 return r;
2130 if (size > 16)
2132 switch (code)
2134 case ASHIFT:
2135 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
2136 r1, r0, r0, r1, (int) size - 16);
2137 break;
2138 case ASHIFTRT:
2139 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
2140 r0, r1, r1, r0, (int) size - 16);
2141 break;
2142 case LSHIFTRT:
2143 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
2144 r0, r1, r1, r0, (int) size - 16);
2145 break;
2146 default:
2147 gcc_unreachable ();
2149 return r;
2152 /* For the rest, we have to do more work. In particular, we
2153 need a temporary. */
2154 rt = reg_names [REGNO (temp)];
2155 switch (code)
2157 case ASHIFT:
2158 sprintf (r,
2159 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2160 rt, r0, r0, (int) size, r1, (int) size, rt, (int) (16-size),
2161 r1, rt);
2162 break;
2163 case ASHIFTRT:
2164 sprintf (r,
2165 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2166 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16-size),
2167 r0, rt);
2168 break;
2169 case LSHIFTRT:
2170 sprintf (r,
2171 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2172 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16-size),
2173 r0, rt);
2174 break;
2175 default:
2176 gcc_unreachable ();
2178 return r;
2181 /* Attribute handling. */
2183 /* Return nonzero if the function is an interrupt function. */
2185 xstormy16_interrupt_function_p (void)
2187 tree attributes;
2189 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2190 any functions are declared, which is demonstrably wrong, but
2191 it is worked around here. FIXME. */
2192 if (!cfun)
2193 return 0;
2195 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
2196 return lookup_attribute ("interrupt", attributes) != NULL_TREE;
2199 #undef TARGET_ATTRIBUTE_TABLE
2200 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2201 static tree xstormy16_handle_interrupt_attribute
2202 (tree *, tree, tree, int, bool *);
2203 static tree xstormy16_handle_below100_attribute
2204 (tree *, tree, tree, int, bool *);
2206 static const struct attribute_spec xstormy16_attribute_table[] =
2208 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2209 { "interrupt", 0, 0, false, true, true, xstormy16_handle_interrupt_attribute },
2210 { "BELOW100", 0, 0, false, false, false, xstormy16_handle_below100_attribute },
2211 { "below100", 0, 0, false, false, false, xstormy16_handle_below100_attribute },
2212 { NULL, 0, 0, false, false, false, NULL }
2215 /* Handle an "interrupt" attribute;
2216 arguments as in struct attribute_spec.handler. */
2217 static tree
2218 xstormy16_handle_interrupt_attribute (tree *node, tree name,
2219 tree args ATTRIBUTE_UNUSED,
2220 int flags ATTRIBUTE_UNUSED,
2221 bool *no_add_attrs)
2223 if (TREE_CODE (*node) != FUNCTION_TYPE)
2225 warning (OPT_Wattributes, "%qs attribute only applies to functions",
2226 IDENTIFIER_POINTER (name));
2227 *no_add_attrs = true;
2230 return NULL_TREE;
2233 /* Handle an "below" attribute;
2234 arguments as in struct attribute_spec.handler. */
2235 static tree
2236 xstormy16_handle_below100_attribute (tree *node,
2237 tree name ATTRIBUTE_UNUSED,
2238 tree args ATTRIBUTE_UNUSED,
2239 int flags ATTRIBUTE_UNUSED,
2240 bool *no_add_attrs)
2242 if (TREE_CODE (*node) != VAR_DECL
2243 && TREE_CODE (*node) != POINTER_TYPE
2244 && TREE_CODE (*node) != TYPE_DECL)
2246 warning (OPT_Wattributes,
2247 "%<__BELOW100__%> attribute only applies to variables");
2248 *no_add_attrs = true;
2250 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
2252 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
2254 warning (OPT_Wattributes, "__BELOW100__ attribute not allowed "
2255 "with auto storage class");
2256 *no_add_attrs = true;
2260 return NULL_TREE;
2263 #undef TARGET_INIT_BUILTINS
2264 #define TARGET_INIT_BUILTINS xstormy16_init_builtins
2265 #undef TARGET_EXPAND_BUILTIN
2266 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2268 static struct {
2269 const char *name;
2270 int md_code;
2271 const char *arg_ops; /* 0..9, t for temp register, r for return value */
2272 const char *arg_types; /* s=short,l=long, upper case for unsigned */
2273 } s16builtins[] = {
2274 { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" },
2275 { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" },
2276 { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" },
2277 { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" },
2278 { 0, 0, 0, 0 }
2281 static void
2282 xstormy16_init_builtins (void)
2284 tree args, ret_type, arg;
2285 int i, a;
2287 ret_type = void_type_node;
2289 for (i=0; s16builtins[i].name; i++)
2291 args = void_list_node;
2292 for (a=strlen (s16builtins[i].arg_types)-1; a>=0; a--)
2294 switch (s16builtins[i].arg_types[a])
2296 case 's': arg = short_integer_type_node; break;
2297 case 'S': arg = short_unsigned_type_node; break;
2298 case 'l': arg = long_integer_type_node; break;
2299 case 'L': arg = long_unsigned_type_node; break;
2300 default: gcc_unreachable ();
2302 if (a == 0)
2303 ret_type = arg;
2304 else
2305 args = tree_cons (NULL_TREE, arg, args);
2307 add_builtin_function (s16builtins[i].name,
2308 build_function_type (ret_type, args),
2309 i, BUILT_IN_MD, NULL, NULL);
2313 static rtx
2314 xstormy16_expand_builtin (tree exp, rtx target,
2315 rtx subtarget ATTRIBUTE_UNUSED,
2316 enum machine_mode mode ATTRIBUTE_UNUSED,
2317 int ignore ATTRIBUTE_UNUSED)
2319 rtx op[10], args[10], pat, copyto[10], retval = 0;
2320 tree fndecl, argtree;
2321 int i, a, o, code;
2323 fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2324 argtree = TREE_OPERAND (exp, 1);
2325 i = DECL_FUNCTION_CODE (fndecl);
2326 code = s16builtins[i].md_code;
2328 for (a = 0; a < 10 && argtree; a++)
2330 args[a] = expand_expr (TREE_VALUE (argtree), NULL_RTX, VOIDmode, 0);
2331 argtree = TREE_CHAIN (argtree);
2334 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2336 char ao = s16builtins[i].arg_ops[o];
2337 char c = insn_data[code].operand[o].constraint[0];
2338 int omode;
2340 copyto[o] = 0;
2342 omode = insn_data[code].operand[o].mode;
2343 if (ao == 'r')
2344 op[o] = target ? target : gen_reg_rtx (omode);
2345 else if (ao == 't')
2346 op[o] = gen_reg_rtx (omode);
2347 else
2348 op[o] = args[(int) hex_value (ao)];
2350 if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o])))
2352 if (c == '+' || c == '=')
2354 copyto[o] = op[o];
2355 op[o] = gen_reg_rtx (omode);
2357 else
2358 op[o] = copy_to_mode_reg (omode, op[o]);
2361 if (ao == 'r')
2362 retval = op[o];
2365 pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4],
2366 op[5], op[6], op[7], op[8], op[9]);
2367 emit_insn (pat);
2369 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2370 if (copyto[o])
2372 emit_move_insn (copyto[o], op[o]);
2373 if (op[o] == retval)
2374 retval = copyto[o];
2377 return retval;
2381 /* Look for combinations of insns that can be converted to BN or BP
2382 opcodes. This is, unfortunately, too complex to do with MD
2383 patterns. */
2384 static void
2385 combine_bnp (rtx insn)
2387 int insn_code, regno, need_extend;
2388 unsigned int mask;
2389 rtx cond, reg, and, load, qireg, mem;
2390 enum machine_mode load_mode = QImode;
2391 enum machine_mode and_mode = QImode;
2392 rtx shift = NULL_RTX;
2394 insn_code = recog_memoized (insn);
2395 if (insn_code != CODE_FOR_cbranchhi
2396 && insn_code != CODE_FOR_cbranchhi_neg)
2397 return;
2399 cond = XVECEXP (PATTERN (insn), 0, 0); /* set */
2400 cond = XEXP (cond, 1); /* if */
2401 cond = XEXP (cond, 0); /* cond */
2402 switch (GET_CODE (cond))
2404 case NE:
2405 case EQ:
2406 need_extend = 0;
2407 break;
2408 case LT:
2409 case GE:
2410 need_extend = 1;
2411 break;
2412 default:
2413 return;
2416 reg = XEXP (cond, 0);
2417 if (GET_CODE (reg) != REG)
2418 return;
2419 regno = REGNO (reg);
2420 if (XEXP (cond, 1) != const0_rtx)
2421 return;
2422 if (! find_regno_note (insn, REG_DEAD, regno))
2423 return;
2424 qireg = gen_rtx_REG (QImode, regno);
2426 if (need_extend)
2428 /* LT and GE conditionals should have a sign extend before
2429 them. */
2430 for (and = prev_real_insn (insn); and; and = prev_real_insn (and))
2432 int and_code = recog_memoized (and);
2434 if (and_code == CODE_FOR_extendqihi2
2435 && rtx_equal_p (SET_DEST (PATTERN (and)), reg)
2436 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and)), 0), qireg))
2437 break;
2439 if (and_code == CODE_FOR_movhi_internal
2440 && rtx_equal_p (SET_DEST (PATTERN (and)), reg))
2442 /* This is for testing bit 15. */
2443 and = insn;
2444 break;
2447 if (reg_mentioned_p (reg, and))
2448 return;
2450 if (GET_CODE (and) != NOTE
2451 && GET_CODE (and) != INSN)
2452 return;
2455 else
2457 /* EQ and NE conditionals have an AND before them. */
2458 for (and = prev_real_insn (insn); and; and = prev_real_insn (and))
2460 if (recog_memoized (and) == CODE_FOR_andhi3
2461 && rtx_equal_p (SET_DEST (PATTERN (and)), reg)
2462 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and)), 0), reg))
2463 break;
2465 if (reg_mentioned_p (reg, and))
2466 return;
2468 if (GET_CODE (and) != NOTE
2469 && GET_CODE (and) != INSN)
2470 return;
2473 if (and)
2475 /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
2476 followed by an AND like this:
2478 (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
2479 (clobber (reg:BI carry))]
2481 (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
2483 Attempt to detect this here. */
2484 for (shift = prev_real_insn (and); shift; shift = prev_real_insn (shift))
2486 if (recog_memoized (shift) == CODE_FOR_lshrhi3
2487 && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift), 0, 0)), reg)
2488 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 0), reg))
2489 break;
2491 if (reg_mentioned_p (reg, shift)
2492 || (GET_CODE (shift) != NOTE
2493 && GET_CODE (shift) != INSN))
2495 shift = NULL_RTX;
2496 break;
2501 if (!and)
2502 return;
2504 for (load = shift ? prev_real_insn (shift) : prev_real_insn (and);
2505 load;
2506 load = prev_real_insn (load))
2508 int load_code = recog_memoized (load);
2510 if (load_code == CODE_FOR_movhi_internal
2511 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2512 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), HImode)
2513 && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load))))
2515 load_mode = HImode;
2516 break;
2519 if (load_code == CODE_FOR_movqi_internal
2520 && rtx_equal_p (SET_DEST (PATTERN (load)), qireg)
2521 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), QImode))
2523 load_mode = QImode;
2524 break;
2527 if (load_code == CODE_FOR_zero_extendqihi2
2528 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2529 && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load)), 0), QImode))
2531 load_mode = QImode;
2532 and_mode = HImode;
2533 break;
2536 if (reg_mentioned_p (reg, load))
2537 return;
2539 if (GET_CODE (load) != NOTE
2540 && GET_CODE (load) != INSN)
2541 return;
2543 if (!load)
2544 return;
2546 mem = SET_SRC (PATTERN (load));
2548 if (need_extend)
2550 mask = (load_mode == HImode) ? 0x8000 : 0x80;
2552 /* If the mem includes a zero-extend operation and we are
2553 going to generate a sign-extend operation then move the
2554 mem inside the zero-extend. */
2555 if (GET_CODE (mem) == ZERO_EXTEND)
2556 mem = XEXP (mem, 0);
2558 else
2560 if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and)), 1), load_mode))
2561 return;
2563 mask = (int) INTVAL (XEXP (SET_SRC (PATTERN (and)), 1));
2565 if (shift)
2566 mask <<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 1));
2569 if (load_mode == HImode)
2571 rtx addr = XEXP (mem, 0);
2573 if (! (mask & 0xff))
2575 addr = plus_constant (addr, 1);
2576 mask >>= 8;
2578 mem = gen_rtx_MEM (QImode, addr);
2581 if (need_extend)
2582 XEXP (cond, 0) = gen_rtx_SIGN_EXTEND (HImode, mem);
2583 else
2584 XEXP (cond, 0) = gen_rtx_AND (and_mode, mem, GEN_INT (mask));
2586 INSN_CODE (insn) = -1;
2587 delete_insn (load);
2589 if (and != insn)
2590 delete_insn (and);
2592 if (shift != NULL_RTX)
2593 delete_insn (shift);
2596 static void
2597 xstormy16_reorg (void)
2599 rtx insn;
2601 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2603 if (! JUMP_P (insn))
2604 continue;
2605 combine_bnp (insn);
2610 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2612 static bool
2613 xstormy16_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2615 const HOST_WIDE_INT size = int_size_in_bytes (type);
2616 return (size == -1 || size > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS);
2619 #undef TARGET_ASM_ALIGNED_HI_OP
2620 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2621 #undef TARGET_ASM_ALIGNED_SI_OP
2622 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2623 #undef TARGET_ENCODE_SECTION_INFO
2624 #define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
2626 /* select_section doesn't handle .bss_below100. */
2627 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
2628 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
2630 #undef TARGET_ASM_OUTPUT_MI_THUNK
2631 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2632 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2633 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2635 #undef TARGET_RTX_COSTS
2636 #define TARGET_RTX_COSTS xstormy16_rtx_costs
2637 #undef TARGET_ADDRESS_COST
2638 #define TARGET_ADDRESS_COST xstormy16_address_cost
2640 #undef TARGET_BUILD_BUILTIN_VA_LIST
2641 #define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
2642 #undef TARGET_EXPAND_BUILTIN_VA_START
2643 #define TARGET_EXPAND_BUILTIN_VA_START xstormy16_expand_builtin_va_start
2644 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
2645 #define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_gimplify_va_arg_expr
2647 #undef TARGET_PROMOTE_FUNCTION_ARGS
2648 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
2649 #undef TARGET_PROMOTE_FUNCTION_RETURN
2650 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
2651 #undef TARGET_PROMOTE_PROTOTYPES
2652 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
2654 #undef TARGET_RETURN_IN_MEMORY
2655 #define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
2657 #undef TARGET_MACHINE_DEPENDENT_REORG
2658 #define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg
2660 struct gcc_target targetm = TARGET_INITIALIZER;
2662 #include "gt-stormy16.h"