2008-01-10 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / config / score / score-mdaux.c
blob7eb51530a6b83bb38528962bf84a49b57f2debe1
1 /* score-mdaux.c for Sunplus S+CORE processor
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
3 Contributed by Sunnorth
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include <signal.h>
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
33 #include "recog.h"
34 #include "toplev.h"
35 #include "output.h"
36 #include "tree.h"
37 #include "function.h"
38 #include "expr.h"
39 #include "optabs.h"
40 #include "flags.h"
41 #include "reload.h"
42 #include "tm_p.h"
43 #include "ggc.h"
44 #include "gstab.h"
45 #include "hashtab.h"
46 #include "debug.h"
47 #include "target.h"
48 #include "target-def.h"
49 #include "integrate.h"
50 #include "langhooks.h"
51 #include "cfglayout.h"
52 #include "score-mdaux.h"
54 #define BITSET_P(VALUE, BIT) (((VALUE) & (1L << (BIT))) != 0)
55 #define INS_BUF_SZ 100
57 /* Define the information needed to generate branch insns. This is
58 stored from the compare operation. */
59 rtx cmp_op0, cmp_op1;
61 static char ins[INS_BUF_SZ + 8];
63 /* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points
64 to the same object as SYMBOL. */
65 static int
66 score_offset_within_object_p (rtx symbol, HOST_WIDE_INT offset)
68 if (GET_CODE (symbol) != SYMBOL_REF)
69 return 0;
71 if (CONSTANT_POOL_ADDRESS_P (symbol)
72 && offset >= 0
73 && offset < (int)GET_MODE_SIZE (get_pool_mode (symbol)))
74 return 1;
76 if (SYMBOL_REF_DECL (symbol) != 0
77 && offset >= 0
78 && offset < int_size_in_bytes (TREE_TYPE (SYMBOL_REF_DECL (symbol))))
79 return 1;
81 return 0;
84 /* Split X into a base and a constant offset, storing them in *BASE
85 and *OFFSET respectively. */
86 static void
87 score_split_const (rtx x, rtx *base, HOST_WIDE_INT *offset)
89 *offset = 0;
91 if (GET_CODE (x) == CONST)
92 x = XEXP (x, 0);
94 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
96 *offset += INTVAL (XEXP (x, 1));
97 x = XEXP (x, 0);
100 *base = x;
103 /* Classify symbol X, which must be a SYMBOL_REF or a LABEL_REF. */
104 static enum
105 score_symbol_type score_classify_symbol (rtx x)
107 if (GET_CODE (x) == LABEL_REF)
108 return SYMBOL_GENERAL;
110 gcc_assert (GET_CODE (x) == SYMBOL_REF);
112 if (CONSTANT_POOL_ADDRESS_P (x))
114 if (GET_MODE_SIZE (get_pool_mode (x)) <= SCORE_SDATA_MAX)
115 return SYMBOL_SMALL_DATA;
116 return SYMBOL_GENERAL;
118 if (SYMBOL_REF_SMALL_P (x))
119 return SYMBOL_SMALL_DATA;
120 return SYMBOL_GENERAL;
123 /* Return true if the current function must save REGNO. */
124 static int
125 score_save_reg_p (unsigned int regno)
127 /* Check call-saved registers. */
128 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
129 return 1;
131 /* We need to save the old frame pointer before setting up a new one. */
132 if (regno == HARD_FRAME_POINTER_REGNUM && frame_pointer_needed)
133 return 1;
135 /* We need to save the incoming return address if it is ever clobbered
136 within the function. */
137 if (regno == RA_REGNUM && df_regs_ever_live_p (regno))
138 return 1;
140 return 0;
143 /* Return one word of double-word value OP, taking into account the fixed
144 endianness of certain registers. HIGH_P is true to select the high part,
145 false to select the low part. */
146 static rtx
147 subw (rtx op, int high_p)
149 unsigned int byte;
150 enum machine_mode mode = GET_MODE (op);
152 if (mode == VOIDmode)
153 mode = DImode;
155 byte = (TARGET_LITTLE_ENDIAN ? high_p : !high_p) ? UNITS_PER_WORD : 0;
157 if (GET_CODE (op) == REG && REGNO (op) == HI_REGNUM)
158 return gen_rtx_REG (SImode, high_p ? HI_REGNUM : LO_REGNUM);
160 if (GET_CODE (op) == MEM)
161 return adjust_address (op, SImode, byte);
163 return simplify_gen_subreg (SImode, op, mode, byte);
166 struct score_frame_info *
167 mda_cached_frame (void)
169 static struct score_frame_info _frame_info;
170 return &_frame_info;
173 /* Return the bytes needed to compute the frame pointer from the current
174 stack pointer. SIZE is the size (in bytes) of the local variables. */
175 struct score_frame_info *
176 mda_compute_frame_size (HOST_WIDE_INT size)
178 unsigned int regno;
179 struct score_frame_info *f = mda_cached_frame ();
181 memset (f, 0, sizeof (struct score_frame_info));
182 f->gp_reg_size = 0;
183 f->mask = 0;
184 f->var_size = SCORE_STACK_ALIGN (size);
185 f->args_size = current_function_outgoing_args_size;
186 f->cprestore_size = flag_pic ? UNITS_PER_WORD : 0;
187 if (f->var_size == 0 && current_function_is_leaf)
188 f->args_size = f->cprestore_size = 0;
190 if (f->args_size == 0 && current_function_calls_alloca)
191 f->args_size = UNITS_PER_WORD;
193 f->total_size = f->var_size + f->args_size + f->cprestore_size;
194 for (regno = GP_REG_FIRST; regno <= GP_REG_LAST; regno++)
196 if (score_save_reg_p (regno))
198 f->gp_reg_size += GET_MODE_SIZE (SImode);
199 f->mask |= 1 << (regno - GP_REG_FIRST);
203 if (current_function_calls_eh_return)
205 unsigned int i;
206 for (i = 0;; ++i)
208 regno = EH_RETURN_DATA_REGNO (i);
209 if (regno == INVALID_REGNUM)
210 break;
211 f->gp_reg_size += GET_MODE_SIZE (SImode);
212 f->mask |= 1 << (regno - GP_REG_FIRST);
216 f->total_size += f->gp_reg_size;
217 f->num_gp = f->gp_reg_size / UNITS_PER_WORD;
219 if (f->mask)
221 HOST_WIDE_INT offset;
222 offset = (f->args_size + f->cprestore_size + f->var_size
223 + f->gp_reg_size - GET_MODE_SIZE (SImode));
224 f->gp_sp_offset = offset;
226 else
227 f->gp_sp_offset = 0;
229 return f;
232 /* Generate the prologue instructions for entry into a S+core function. */
233 void
234 mdx_prologue (void)
236 #define EMIT_PL(_rtx) RTX_FRAME_RELATED_P (_rtx) = 1
238 struct score_frame_info *f = mda_compute_frame_size (get_frame_size ());
239 HOST_WIDE_INT size;
240 int regno;
242 size = f->total_size - f->gp_reg_size;
244 if (flag_pic)
245 emit_insn (gen_cpload ());
247 for (regno = (int) GP_REG_LAST; regno >= (int) GP_REG_FIRST; regno--)
249 if (BITSET_P (f->mask, regno - GP_REG_FIRST))
251 rtx mem = gen_rtx_MEM (SImode,
252 gen_rtx_PRE_DEC (SImode, stack_pointer_rtx));
253 rtx reg = gen_rtx_REG (SImode, regno);
254 if (!current_function_calls_eh_return)
255 MEM_READONLY_P (mem) = 1;
256 EMIT_PL (emit_insn (gen_pushsi (mem, reg)));
260 if (size > 0)
262 rtx insn;
264 if (CONST_OK_FOR_LETTER_P (-size, 'L'))
265 EMIT_PL (emit_insn (gen_add3_insn (stack_pointer_rtx,
266 stack_pointer_rtx,
267 GEN_INT (-size))));
268 else
270 EMIT_PL (emit_move_insn (gen_rtx_REG (Pmode, PROLOGUE_TEMP_REGNUM),
271 GEN_INT (size)));
272 EMIT_PL (emit_insn
273 (gen_sub3_insn (stack_pointer_rtx,
274 stack_pointer_rtx,
275 gen_rtx_REG (Pmode,
276 PROLOGUE_TEMP_REGNUM))));
278 insn = get_last_insn ();
279 REG_NOTES (insn) =
280 alloc_EXPR_LIST (REG_FRAME_RELATED_EXPR,
281 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
282 plus_constant (stack_pointer_rtx,
283 -size)),
284 REG_NOTES (insn));
287 if (frame_pointer_needed)
288 EMIT_PL (emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx));
290 if (flag_pic && f->cprestore_size)
292 if (frame_pointer_needed)
293 emit_insn (gen_cprestore_use_fp (GEN_INT (size - f->cprestore_size)));
294 else
295 emit_insn (gen_cprestore_use_sp (GEN_INT (size - f->cprestore_size)));
298 #undef EMIT_PL
301 /* Generate the epilogue instructions in a S+core function. */
302 void
303 mdx_epilogue (int sibcall_p)
305 struct score_frame_info *f = mda_compute_frame_size (get_frame_size ());
306 HOST_WIDE_INT size;
307 int regno;
308 rtx base;
310 size = f->total_size - f->gp_reg_size;
312 if (!frame_pointer_needed)
313 base = stack_pointer_rtx;
314 else
315 base = hard_frame_pointer_rtx;
317 if (size)
319 if (CONST_OK_FOR_LETTER_P (size, 'L'))
320 emit_insn (gen_add3_insn (base, base, GEN_INT (size)));
321 else
323 emit_move_insn (gen_rtx_REG (Pmode, EPILOGUE_TEMP_REGNUM),
324 GEN_INT (size));
325 emit_insn (gen_add3_insn (base, base,
326 gen_rtx_REG (Pmode,
327 EPILOGUE_TEMP_REGNUM)));
331 if (base != stack_pointer_rtx)
332 emit_move_insn (stack_pointer_rtx, base);
334 if (current_function_calls_eh_return)
335 emit_insn (gen_add3_insn (stack_pointer_rtx,
336 stack_pointer_rtx,
337 EH_RETURN_STACKADJ_RTX));
339 for (regno = (int) GP_REG_FIRST; regno <= (int) GP_REG_LAST; regno++)
341 if (BITSET_P (f->mask, regno - GP_REG_FIRST))
343 rtx mem = gen_rtx_MEM (SImode,
344 gen_rtx_POST_INC (SImode, stack_pointer_rtx));
345 rtx reg = gen_rtx_REG (SImode, regno);
347 if (!current_function_calls_eh_return)
348 MEM_READONLY_P (mem) = 1;
350 emit_insn (gen_popsi (reg, mem));
354 if (!sibcall_p)
355 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode, RA_REGNUM)));
358 /* Return true if X is a valid base register for the given mode.
359 Allow only hard registers if STRICT. */
361 mda_valid_base_register_p (rtx x, int strict)
363 if (!strict && GET_CODE (x) == SUBREG)
364 x = SUBREG_REG (x);
366 return (GET_CODE (x) == REG
367 && score_regno_mode_ok_for_base_p (REGNO (x), strict));
370 /* Return true if X is a valid address for machine mode MODE. If it is,
371 fill in INFO appropriately. STRICT is true if we should only accept
372 hard base registers. */
374 mda_classify_address (struct score_address_info *info,
375 enum machine_mode mode, rtx x, int strict)
377 info->code = GET_CODE (x);
379 switch (info->code)
381 case REG:
382 case SUBREG:
383 info->type = ADD_REG;
384 info->reg = x;
385 info->offset = const0_rtx;
386 return mda_valid_base_register_p (info->reg, strict);
387 case PLUS:
388 info->type = ADD_REG;
389 info->reg = XEXP (x, 0);
390 info->offset = XEXP (x, 1);
391 return (mda_valid_base_register_p (info->reg, strict)
392 && GET_CODE (info->offset) == CONST_INT
393 && IMM_IN_RANGE (INTVAL (info->offset), 15, 1));
394 case PRE_DEC:
395 case POST_DEC:
396 case PRE_INC:
397 case POST_INC:
398 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (SImode))
399 return false;
400 info->type = ADD_REG;
401 info->reg = XEXP (x, 0);
402 info->offset = GEN_INT (GET_MODE_SIZE (mode));
403 return mda_valid_base_register_p (info->reg, strict);
404 case CONST_INT:
405 info->type = ADD_CONST_INT;
406 return IMM_IN_RANGE (INTVAL (x), 15, 1);
407 case CONST:
408 case LABEL_REF:
409 case SYMBOL_REF:
410 info->type = ADD_SYMBOLIC;
411 return (mda_symbolic_constant_p (x, &info->symbol_type)
412 && (info->symbol_type == SYMBOL_GENERAL
413 || info->symbol_type == SYMBOL_SMALL_DATA));
414 default:
415 return 0;
419 void
420 mda_gen_cmp (enum machine_mode mode)
422 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_REG (mode, CC_REGNUM),
423 gen_rtx_COMPARE (mode, cmp_op0, cmp_op1)));
426 /* Return true if X is a symbolic constant that can be calculated in
427 the same way as a bare symbol. If it is, store the type of the
428 symbol in *SYMBOL_TYPE. */
430 mda_symbolic_constant_p (rtx x, enum score_symbol_type *symbol_type)
432 HOST_WIDE_INT offset;
434 score_split_const (x, &x, &offset);
435 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
436 *symbol_type = score_classify_symbol (x);
437 else
438 return 0;
440 if (offset == 0)
441 return 1;
443 /* if offset > 15bit, must reload */
444 if (!IMM_IN_RANGE (offset, 15, 1))
445 return 0;
447 switch (*symbol_type)
449 case SYMBOL_GENERAL:
450 return 1;
451 case SYMBOL_SMALL_DATA:
452 return score_offset_within_object_p (x, offset);
454 gcc_unreachable ();
457 void
458 mdx_movsicc (rtx *ops)
460 enum machine_mode mode;
462 mode = score_select_cc_mode (GET_CODE (ops[1]), ops[2], ops[3]);
463 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_REG (mode, CC_REGNUM),
464 gen_rtx_COMPARE (mode, cmp_op0, cmp_op1)));
467 /* Call and sibcall pattern all need call this function. */
468 void
469 mdx_call (rtx *ops, bool sib)
471 rtx addr = XEXP (ops[0], 0);
472 if (!call_insn_operand (addr, VOIDmode))
474 rtx oaddr = addr;
475 addr = gen_reg_rtx (Pmode);
476 gen_move_insn (addr, oaddr);
479 if (sib)
480 emit_call_insn (gen_sibcall_internal (addr, ops[1]));
481 else
482 emit_call_insn (gen_call_internal (addr, ops[1]));
485 /* Call value and sibcall value pattern all need call this function. */
486 void
487 mdx_call_value (rtx *ops, bool sib)
489 rtx result = ops[0];
490 rtx addr = XEXP (ops[1], 0);
491 rtx arg = ops[2];
493 if (!call_insn_operand (addr, VOIDmode))
495 rtx oaddr = addr;
496 addr = gen_reg_rtx (Pmode);
497 gen_move_insn (addr, oaddr);
500 if (sib)
501 emit_call_insn (gen_sibcall_value_internal (result, addr, arg));
502 else
503 emit_call_insn (gen_call_value_internal (result, addr, arg));
506 /* Machine Split */
507 void
508 mds_movdi (rtx *ops)
510 rtx dst = ops[0];
511 rtx src = ops[1];
512 rtx dst0 = subw (dst, 0);
513 rtx dst1 = subw (dst, 1);
514 rtx src0 = subw (src, 0);
515 rtx src1 = subw (src, 1);
517 if (GET_CODE (dst0) == REG && reg_overlap_mentioned_p (dst0, src))
519 emit_move_insn (dst1, src1);
520 emit_move_insn (dst0, src0);
522 else
524 emit_move_insn (dst0, src0);
525 emit_move_insn (dst1, src1);
529 void
530 mds_zero_extract_andi (rtx *ops)
532 if (INTVAL (ops[1]) == 1 && const_uimm5 (ops[2], SImode))
533 emit_insn (gen_zero_extract_bittst (ops[0], ops[2]));
534 else
536 unsigned HOST_WIDE_INT mask;
537 mask = (0xffffffffU & ((1U << INTVAL (ops[1])) - 1U));
538 mask = mask << INTVAL (ops[2]);
539 emit_insn (gen_andsi3_cmp (ops[3], ops[0],
540 gen_int_mode (mask, SImode)));
544 /* Check addr could be present as PRE/POST mode. */
545 static bool
546 mda_pindex_mem (rtx addr)
548 if (GET_CODE (addr) == MEM)
550 switch (GET_CODE (XEXP (addr, 0)))
552 case PRE_DEC:
553 case POST_DEC:
554 case PRE_INC:
555 case POST_INC:
556 return true;
557 default:
558 break;
561 return false;
564 /* Output asm code for ld/sw insn. */
565 static int
566 pr_addr_post (rtx *ops, int idata, int iaddr, char *ip, enum mda_mem_unit unit)
568 struct score_address_info ai;
570 gcc_assert (GET_CODE (ops[idata]) == REG);
571 gcc_assert (mda_classify_address (&ai, SImode, XEXP (ops[iaddr], 0), true));
573 if (!mda_pindex_mem (ops[iaddr])
574 && ai.type == ADD_REG
575 && GET_CODE (ai.offset) == CONST_INT
576 && G16_REG_P (REGNO (ops[idata]))
577 && G16_REG_P (REGNO (ai.reg)))
579 if (INTVAL (ai.offset) == 0)
581 ops[iaddr] = ai.reg;
582 return snprintf (ip, INS_BUF_SZ,
583 "! %%%d, [%%%d]", idata, iaddr);
585 if (REGNO (ai.reg) == HARD_FRAME_POINTER_REGNUM)
587 HOST_WIDE_INT offset = INTVAL (ai.offset);
588 if (MDA_ALIGN_UNIT (offset, unit)
589 && CONST_OK_FOR_LETTER_P (offset >> unit, 'J'))
591 ops[iaddr] = ai.offset;
592 return snprintf (ip, INS_BUF_SZ,
593 "p! %%%d, %%c%d", idata, iaddr);
597 return snprintf (ip, INS_BUF_SZ, " %%%d, %%a%d", idata, iaddr);
600 /* Output asm insn for load. */
601 const char *
602 mdp_linsn (rtx *ops, enum mda_mem_unit unit, bool sign)
604 const char *pre_ins[] =
605 {"lbu", "lhu", "lw", "??", "lb", "lh", "lw", "??"};
606 char *ip;
608 strcpy (ins, pre_ins[(sign ? 4 : 0) + unit]);
609 ip = ins + strlen (ins);
611 if ((!sign && unit != MDA_HWORD)
612 || (sign && unit != MDA_BYTE))
613 pr_addr_post (ops, 0, 1, ip, unit);
614 else
615 snprintf (ip, INS_BUF_SZ, " %%0, %%a1");
617 return ins;
620 /* Output asm insn for store. */
621 const char *
622 mdp_sinsn (rtx *ops, enum mda_mem_unit unit)
624 const char *pre_ins[] = {"sb", "sh", "sw"};
625 char *ip;
627 strcpy (ins, pre_ins[unit]);
628 ip = ins + strlen (ins);
629 pr_addr_post (ops, 1, 0, ip, unit);
630 return ins;
633 /* Output asm insn for load immediate. */
634 const char *
635 mdp_limm (rtx *ops)
637 HOST_WIDE_INT v;
639 gcc_assert (GET_CODE (ops[0]) == REG);
640 gcc_assert (GET_CODE (ops[1]) == CONST_INT);
642 v = INTVAL (ops[1]);
643 if (G16_REG_P (REGNO (ops[0])) && IMM_IN_RANGE (v, 8, 0))
644 return "ldiu! %0, %c1";
645 else if (IMM_IN_RANGE (v, 16, 1))
646 return "ldi %0, %c1";
647 else if ((v & 0xffff) == 0)
648 return "ldis %0, %U1";
649 else
650 return "li %0, %c1";
653 /* Output asm insn for move. */
654 const char *
655 mdp_move (rtx *ops)
657 gcc_assert (GET_CODE (ops[0]) == REG);
658 gcc_assert (GET_CODE (ops[1]) == REG);
660 if (G16_REG_P (REGNO (ops[0])))
662 if (G16_REG_P (REGNO (ops[1])))
663 return "mv! %0, %1";
664 else
665 return "mlfh! %0, %1";
667 else if (G16_REG_P (REGNO (ops[1])))
668 return "mhfl! %0, %1";
669 else
670 return "mv %0, %1";
673 /* Emit lcb/lce insns. */
674 bool
675 mdx_unaligned_load (rtx *ops)
677 rtx dst = ops[0];
678 rtx src = ops[1];
679 rtx len = ops[2];
680 rtx off = ops[3];
681 rtx addr_reg;
683 if (INTVAL (len) != BITS_PER_WORD
684 || (INTVAL (off) % BITS_PER_UNIT) != 0)
685 return false;
687 gcc_assert (GET_MODE_SIZE (GET_MODE (dst)) == GET_MODE_SIZE (SImode));
689 addr_reg = copy_addr_to_reg (XEXP (src, 0));
690 emit_insn (gen_move_lcb (addr_reg, addr_reg));
691 emit_insn (gen_move_lce (addr_reg, addr_reg, dst));
693 return true;
696 /* Emit scb/sce insns. */
697 bool
698 mdx_unaligned_store (rtx *ops)
700 rtx dst = ops[0];
701 rtx len = ops[1];
702 rtx off = ops[2];
703 rtx src = ops[3];
704 rtx addr_reg;
706 if (INTVAL(len) != BITS_PER_WORD
707 || (INTVAL(off) % BITS_PER_UNIT) != 0)
708 return false;
710 gcc_assert (GET_MODE_SIZE (GET_MODE (src)) == GET_MODE_SIZE (SImode));
712 addr_reg = copy_addr_to_reg (XEXP (dst, 0));
713 emit_insn (gen_move_scb (addr_reg, addr_reg, src));
714 emit_insn (gen_move_sce (addr_reg, addr_reg));
716 return true;
719 /* If length is short, generate move insns straight. */
720 static void
721 mdx_block_move_straight (rtx dst, rtx src, HOST_WIDE_INT length)
723 HOST_WIDE_INT leftover;
724 int i, reg_count;
725 rtx *regs;
727 leftover = length % UNITS_PER_WORD;
728 length -= leftover;
729 reg_count = length / UNITS_PER_WORD;
731 regs = alloca (sizeof (rtx) * reg_count);
732 for (i = 0; i < reg_count; i++)
733 regs[i] = gen_reg_rtx (SImode);
735 /* Load from src to regs. */
736 if (MEM_ALIGN (src) >= BITS_PER_WORD)
738 HOST_WIDE_INT offset = 0;
739 for (i = 0; i < reg_count; offset += UNITS_PER_WORD, i++)
740 emit_move_insn (regs[i], adjust_address (src, SImode, offset));
742 else if (reg_count >= 1)
744 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
746 emit_insn (gen_move_lcb (src_reg, src_reg));
747 for (i = 0; i < (reg_count - 1); i++)
748 emit_insn (gen_move_lcw (src_reg, src_reg, regs[i]));
749 emit_insn (gen_move_lce (src_reg, src_reg, regs[i]));
752 /* Store regs to dest. */
753 if (MEM_ALIGN (dst) >= BITS_PER_WORD)
755 HOST_WIDE_INT offset = 0;
756 for (i = 0; i < reg_count; offset += UNITS_PER_WORD, i++)
757 emit_move_insn (adjust_address (dst, SImode, offset), regs[i]);
759 else if (reg_count >= 1)
761 rtx dst_reg = copy_addr_to_reg (XEXP (dst, 0));
763 emit_insn (gen_move_scb (dst_reg, dst_reg, regs[0]));
764 for (i = 1; i < reg_count; i++)
765 emit_insn (gen_move_scw (dst_reg, dst_reg, regs[i]));
766 emit_insn (gen_move_sce (dst_reg, dst_reg));
769 /* Mop up any left-over bytes. */
770 if (leftover > 0)
772 src = adjust_address (src, BLKmode, length);
773 dst = adjust_address (dst, BLKmode, length);
774 move_by_pieces (dst, src, leftover,
775 MIN (MEM_ALIGN (src), MEM_ALIGN (dst)), 0);
779 /* Generate loop head when dst or src is unaligned. */
780 static void
781 mdx_block_move_loop_head (rtx dst_reg, HOST_WIDE_INT dst_align,
782 rtx src_reg, HOST_WIDE_INT src_align,
783 HOST_WIDE_INT length)
785 bool src_unaligned = (src_align < BITS_PER_WORD);
786 bool dst_unaligned = (dst_align < BITS_PER_WORD);
788 rtx temp = gen_reg_rtx (SImode);
790 gcc_assert (length == UNITS_PER_WORD);
792 if (src_unaligned)
794 emit_insn (gen_move_lcb (src_reg, src_reg));
795 emit_insn (gen_move_lcw (src_reg, src_reg, temp));
797 else
798 emit_insn (gen_move_lw_a (src_reg,
799 src_reg, gen_int_mode (4, SImode), temp));
801 if (dst_unaligned)
802 emit_insn (gen_move_scb (dst_reg, dst_reg, temp));
803 else
804 emit_insn (gen_move_sw_a (dst_reg,
805 dst_reg, gen_int_mode (4, SImode), temp));
808 /* Generate loop body, copy length bytes per iteration. */
809 static void
810 mdx_block_move_loop_body (rtx dst_reg, HOST_WIDE_INT dst_align,
811 rtx src_reg, HOST_WIDE_INT src_align,
812 HOST_WIDE_INT length)
814 int reg_count = length / UNITS_PER_WORD;
815 rtx *regs = alloca (sizeof (rtx) * reg_count);
816 int i;
817 bool src_unaligned = (src_align < BITS_PER_WORD);
818 bool dst_unaligned = (dst_align < BITS_PER_WORD);
820 for (i = 0; i < reg_count; i++)
821 regs[i] = gen_reg_rtx (SImode);
823 if (src_unaligned)
825 for (i = 0; i < reg_count; i++)
826 emit_insn (gen_move_lcw (src_reg, src_reg, regs[i]));
828 else
830 for (i = 0; i < reg_count; i++)
831 emit_insn (gen_move_lw_a (src_reg,
832 src_reg, gen_int_mode (4, SImode), regs[i]));
835 if (dst_unaligned)
837 for (i = 0; i < reg_count; i++)
838 emit_insn (gen_move_scw (dst_reg, dst_reg, regs[i]));
840 else
842 for (i = 0; i < reg_count; i++)
843 emit_insn (gen_move_sw_a (dst_reg,
844 dst_reg, gen_int_mode (4, SImode), regs[i]));
848 /* Generate loop foot, copy the leftover bytes. */
849 static void
850 mdx_block_move_loop_foot (rtx dst_reg, HOST_WIDE_INT dst_align,
851 rtx src_reg, HOST_WIDE_INT src_align,
852 HOST_WIDE_INT length)
854 bool src_unaligned = (src_align < BITS_PER_WORD);
855 bool dst_unaligned = (dst_align < BITS_PER_WORD);
857 HOST_WIDE_INT leftover;
859 leftover = length % UNITS_PER_WORD;
860 length -= leftover;
862 if (length > 0)
863 mdx_block_move_loop_body (dst_reg, dst_align,
864 src_reg, src_align, length);
866 if (dst_unaligned)
867 emit_insn (gen_move_sce (dst_reg, dst_reg));
869 if (leftover > 0)
871 HOST_WIDE_INT src_adj = src_unaligned ? -4 : 0;
872 HOST_WIDE_INT dst_adj = dst_unaligned ? -4 : 0;
873 rtx temp;
875 gcc_assert (leftover < UNITS_PER_WORD);
877 if (leftover >= UNITS_PER_WORD / 2
878 && src_align >= BITS_PER_WORD / 2
879 && dst_align >= BITS_PER_WORD / 2)
881 temp = gen_reg_rtx (HImode);
882 emit_insn (gen_move_lhu_b (src_reg, src_reg,
883 gen_int_mode (src_adj, SImode), temp));
884 emit_insn (gen_move_sh_b (dst_reg, dst_reg,
885 gen_int_mode (dst_adj, SImode), temp));
886 leftover -= UNITS_PER_WORD / 2;
887 src_adj = UNITS_PER_WORD / 2;
888 dst_adj = UNITS_PER_WORD / 2;
891 while (leftover > 0)
893 temp = gen_reg_rtx (QImode);
894 emit_insn (gen_move_lbu_b (src_reg, src_reg,
895 gen_int_mode (src_adj, SImode), temp));
896 emit_insn (gen_move_sb_b (dst_reg, dst_reg,
897 gen_int_mode (dst_adj, SImode), temp));
898 leftover--;
899 src_adj = 1;
900 dst_adj = 1;
905 #define MIN_MOVE_REGS 3
906 #define MIN_MOVE_BYTES (MIN_MOVE_REGS * UNITS_PER_WORD)
907 #define MAX_MOVE_REGS 4
908 #define MAX_MOVE_BYTES (MAX_MOVE_REGS * UNITS_PER_WORD)
910 /* The length is large, generate a loop if necessary.
911 The loop is consisted by loop head/body/foot. */
912 static void
913 mdx_block_move_loop (rtx dst, rtx src, HOST_WIDE_INT length)
915 HOST_WIDE_INT src_align = MEM_ALIGN (src);
916 HOST_WIDE_INT dst_align = MEM_ALIGN (dst);
917 HOST_WIDE_INT loop_mov_bytes;
918 HOST_WIDE_INT iteration = 0;
919 HOST_WIDE_INT head_length = 0, leftover;
920 rtx label, src_reg, dst_reg, final_dst;
922 bool gen_loop_head = (src_align < BITS_PER_WORD
923 || dst_align < BITS_PER_WORD);
925 if (gen_loop_head)
926 head_length += UNITS_PER_WORD;
928 for (loop_mov_bytes = MAX_MOVE_BYTES;
929 loop_mov_bytes >= MIN_MOVE_BYTES;
930 loop_mov_bytes -= UNITS_PER_WORD)
932 iteration = (length - head_length) / loop_mov_bytes;
933 if (iteration > 1)
934 break;
936 if (iteration <= 1)
938 mdx_block_move_straight (dst, src, length);
939 return;
942 leftover = (length - head_length) % loop_mov_bytes;
943 length -= leftover;
945 src_reg = copy_addr_to_reg (XEXP (src, 0));
946 dst_reg = copy_addr_to_reg (XEXP (dst, 0));
947 final_dst = expand_simple_binop (Pmode, PLUS, dst_reg, GEN_INT (length),
948 0, 0, OPTAB_WIDEN);
950 if (gen_loop_head)
951 mdx_block_move_loop_head (dst_reg, dst_align,
952 src_reg, src_align, head_length);
954 label = gen_label_rtx ();
955 emit_label (label);
957 mdx_block_move_loop_body (dst_reg, dst_align,
958 src_reg, src_align, loop_mov_bytes);
960 emit_insn (gen_cmpsi (dst_reg, final_dst));
961 emit_jump_insn (gen_bne (label));
963 mdx_block_move_loop_foot (dst_reg, dst_align,
964 src_reg, src_align, leftover);
967 /* Generate block move, for misc.md: "movmemsi". */
968 bool
969 mdx_block_move (rtx *ops)
971 rtx dst = ops[0];
972 rtx src = ops[1];
973 rtx length = ops[2];
975 if (TARGET_LITTLE_ENDIAN
976 && (MEM_ALIGN (src) < BITS_PER_WORD || MEM_ALIGN (dst) < BITS_PER_WORD)
977 && INTVAL (length) >= UNITS_PER_WORD)
978 return false;
980 if (GET_CODE (length) == CONST_INT)
982 if (INTVAL (length) <= 2 * MAX_MOVE_BYTES)
984 mdx_block_move_straight (dst, src, INTVAL (length));
985 return true;
987 else if (optimize &&
988 !(flag_unroll_loops || flag_unroll_all_loops))
990 mdx_block_move_loop (dst, src, INTVAL (length));
991 return true;
994 return false;
997 /* Generate add insn. */
998 const char *
999 mdp_select_add_imm (rtx *ops, bool set_cc)
1001 HOST_WIDE_INT v = INTVAL (ops[2]);
1003 gcc_assert (GET_CODE (ops[2]) == CONST_INT);
1004 gcc_assert (REGNO (ops[0]) == REGNO (ops[1]));
1006 if (set_cc && G16_REG_P (REGNO (ops[0])))
1008 if (v > 0 && IMM_IS_POW_OF_2 ((unsigned HOST_WIDE_INT) v, 0, 15))
1010 ops[2] = GEN_INT (ffs (v) - 1);
1011 return "addei! %0, %c2";
1014 if (v < 0 && IMM_IS_POW_OF_2 ((unsigned HOST_WIDE_INT) (-v), 0, 15))
1016 ops[2] = GEN_INT (ffs (-v) - 1);
1017 return "subei! %0, %c2";
1021 if (set_cc)
1022 return "addi.c %0, %c2";
1023 else
1024 return "addi %0, %c2";
1027 /* Output arith insn. */
1028 const char *
1029 mdp_select (rtx *ops, const char *inst_pre,
1030 bool commu, const char *letter, bool set_cc)
1032 gcc_assert (GET_CODE (ops[0]) == REG);
1033 gcc_assert (GET_CODE (ops[1]) == REG);
1035 if (set_cc && G16_REG_P (REGNO (ops[0]))
1036 && (GET_CODE (ops[2]) == REG ? G16_REG_P (REGNO (ops[2])) : 1)
1037 && REGNO (ops[0]) == REGNO (ops[1]))
1039 snprintf (ins, INS_BUF_SZ, "%s! %%0, %%%s2", inst_pre, letter);
1040 return ins;
1043 if (commu && set_cc && G16_REG_P (REGNO (ops[0]))
1044 && G16_REG_P (REGNO (ops[1]))
1045 && REGNO (ops[0]) == REGNO (ops[2]))
1047 gcc_assert (GET_CODE (ops[2]) == REG);
1048 snprintf (ins, INS_BUF_SZ, "%s! %%0, %%%s1", inst_pre, letter);
1049 return ins;
1052 if (set_cc)
1053 snprintf (ins, INS_BUF_SZ, "%s.c %%0, %%1, %%%s2", inst_pre, letter);
1054 else
1055 snprintf (ins, INS_BUF_SZ, "%s %%0, %%1, %%%s2", inst_pre, letter);
1056 return ins;