* config/bfin/bfin.c (bfin_delegitimize_address): New.
[official-gcc/alias-decl.git] / gcc / config / bfin / bfin.c
blob288e32703a3f16ab115ac62b73bceb3946fadfc5
1 /* The Blackfin code generation auxiliary output file.
2 Copyright (C) 2005, 2006 Free Software Foundation, Inc.
3 Contributed by Analog Devices.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 2, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "insn-codes.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "output.h"
35 #include "insn-attr.h"
36 #include "tree.h"
37 #include "flags.h"
38 #include "except.h"
39 #include "function.h"
40 #include "input.h"
41 #include "target.h"
42 #include "target-def.h"
43 #include "expr.h"
44 #include "toplev.h"
45 #include "recog.h"
46 #include "optabs.h"
47 #include "ggc.h"
48 #include "integrate.h"
49 #include "cgraph.h"
50 #include "langhooks.h"
51 #include "bfin-protos.h"
52 #include "tm-preds.h"
53 #include "gt-bfin.h"
55 /* Test and compare insns in bfin.md store the information needed to
56 generate branch and scc insns here. */
57 rtx bfin_compare_op0, bfin_compare_op1;
59 /* RTX for condition code flag register and RETS register */
60 extern GTY(()) rtx bfin_cc_rtx;
61 extern GTY(()) rtx bfin_rets_rtx;
62 rtx bfin_cc_rtx, bfin_rets_rtx;
64 int max_arg_registers = 0;
66 /* Arrays used when emitting register names. */
67 const char *short_reg_names[] = SHORT_REGISTER_NAMES;
68 const char *high_reg_names[] = HIGH_REGISTER_NAMES;
69 const char *dregs_pair_names[] = DREGS_PAIR_NAMES;
70 const char *byte_reg_names[] = BYTE_REGISTER_NAMES;
72 static int arg_regs[] = FUNCTION_ARG_REGISTERS;
74 /* Nonzero if -mshared-library-id was given. */
75 static int bfin_lib_id_given;
77 static void
78 bfin_globalize_label (FILE *stream, const char *name)
80 fputs (".global ", stream);
81 assemble_name (stream, name);
82 fputc (';',stream);
83 fputc ('\n',stream);
86 static void
87 output_file_start (void)
89 FILE *file = asm_out_file;
90 int i;
92 fprintf (file, ".file \"%s\";\n", input_filename);
94 for (i = 0; arg_regs[i] >= 0; i++)
96 max_arg_registers = i; /* how many arg reg used */
99 /* Called early in the compilation to conditionally modify
100 fixed_regs/call_used_regs. */
102 void
103 conditional_register_usage (void)
105 /* initialize condition code flag register rtx */
106 bfin_cc_rtx = gen_rtx_REG (BImode, REG_CC);
107 bfin_rets_rtx = gen_rtx_REG (Pmode, REG_RETS);
110 /* Examine machine-dependent attributes of function type FUNTYPE and return its
111 type. See the definition of E_FUNKIND. */
113 static e_funkind funkind (tree funtype)
115 tree attrs = TYPE_ATTRIBUTES (funtype);
116 if (lookup_attribute ("interrupt_handler", attrs))
117 return INTERRUPT_HANDLER;
118 else if (lookup_attribute ("exception_handler", attrs))
119 return EXCPT_HANDLER;
120 else if (lookup_attribute ("nmi_handler", attrs))
121 return NMI_HANDLER;
122 else
123 return SUBROUTINE;
126 /* Legitimize PIC addresses. If the address is already position-independent,
127 we return ORIG. Newly generated position-independent addresses go into a
128 reg. This is REG if nonzero, otherwise we allocate register(s) as
129 necessary. PICREG is the register holding the pointer to the PIC offset
130 table. */
132 static rtx
133 legitimize_pic_address (rtx orig, rtx reg, rtx picreg)
135 rtx addr = orig;
136 rtx new = orig;
138 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
140 if (GET_CODE (addr) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (addr))
141 reg = new = orig;
142 else
144 int unspec;
145 rtx tmp;
147 if (TARGET_ID_SHARED_LIBRARY)
148 unspec = UNSPEC_MOVE_PIC;
149 else if (GET_CODE (addr) == SYMBOL_REF
150 && SYMBOL_REF_FUNCTION_P (addr))
152 unspec = UNSPEC_FUNCDESC_GOT17M4;
154 else
156 unspec = UNSPEC_MOVE_FDPIC;
159 if (reg == 0)
161 gcc_assert (!no_new_pseudos);
162 reg = gen_reg_rtx (Pmode);
165 tmp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), unspec);
166 new = gen_const_mem (Pmode, gen_rtx_PLUS (Pmode, picreg, tmp));
168 emit_move_insn (reg, new);
170 if (picreg == pic_offset_table_rtx)
171 current_function_uses_pic_offset_table = 1;
172 return reg;
175 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
177 rtx base;
179 if (GET_CODE (addr) == CONST)
181 addr = XEXP (addr, 0);
182 gcc_assert (GET_CODE (addr) == PLUS);
185 if (XEXP (addr, 0) == picreg)
186 return orig;
188 if (reg == 0)
190 gcc_assert (!no_new_pseudos);
191 reg = gen_reg_rtx (Pmode);
194 base = legitimize_pic_address (XEXP (addr, 0), reg, picreg);
195 addr = legitimize_pic_address (XEXP (addr, 1),
196 base == reg ? NULL_RTX : reg,
197 picreg);
199 if (GET_CODE (addr) == CONST_INT)
201 gcc_assert (! reload_in_progress && ! reload_completed);
202 addr = force_reg (Pmode, addr);
205 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
207 base = gen_rtx_PLUS (Pmode, base, XEXP (addr, 0));
208 addr = XEXP (addr, 1);
211 return gen_rtx_PLUS (Pmode, base, addr);
214 return new;
217 /* Stack frame layout. */
219 /* Compute the number of DREGS to save with a push_multiple operation.
220 This could include registers that aren't modified in the function,
221 since push_multiple only takes a range of registers.
222 If IS_INTHANDLER, then everything that is live must be saved, even
223 if normally call-clobbered. */
225 static int
226 n_dregs_to_save (bool is_inthandler)
228 unsigned i;
230 for (i = REG_R0; i <= REG_R7; i++)
232 if (regs_ever_live[i] && (is_inthandler || ! call_used_regs[i]))
233 return REG_R7 - i + 1;
235 if (current_function_calls_eh_return)
237 unsigned j;
238 for (j = 0; ; j++)
240 unsigned test = EH_RETURN_DATA_REGNO (j);
241 if (test == INVALID_REGNUM)
242 break;
243 if (test == i)
244 return REG_R7 - i + 1;
249 return 0;
252 /* Like n_dregs_to_save, but compute number of PREGS to save. */
254 static int
255 n_pregs_to_save (bool is_inthandler)
257 unsigned i;
259 for (i = REG_P0; i <= REG_P5; i++)
260 if ((regs_ever_live[i] && (is_inthandler || ! call_used_regs[i]))
261 || (!TARGET_FDPIC
262 && i == PIC_OFFSET_TABLE_REGNUM
263 && (current_function_uses_pic_offset_table
264 || (TARGET_ID_SHARED_LIBRARY && ! current_function_is_leaf))))
265 return REG_P5 - i + 1;
266 return 0;
269 /* Determine if we are going to save the frame pointer in the prologue. */
271 static bool
272 must_save_fp_p (void)
274 return frame_pointer_needed || regs_ever_live[REG_FP];
277 static bool
278 stack_frame_needed_p (void)
280 /* EH return puts a new return address into the frame using an
281 address relative to the frame pointer. */
282 if (current_function_calls_eh_return)
283 return true;
284 return frame_pointer_needed;
287 /* Emit code to save registers in the prologue. SAVEALL is nonzero if we
288 must save all registers; this is used for interrupt handlers.
289 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
290 this for an interrupt (or exception) handler. */
292 static void
293 expand_prologue_reg_save (rtx spreg, int saveall, bool is_inthandler)
295 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler);
296 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler);
297 int dregno = REG_R7 + 1 - ndregs;
298 int pregno = REG_P5 + 1 - npregs;
299 int total = ndregs + npregs;
300 int i;
301 rtx pat, insn, val;
303 if (total == 0)
304 return;
306 val = GEN_INT (-total * 4);
307 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 2));
308 XVECEXP (pat, 0, 0) = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, val),
309 UNSPEC_PUSH_MULTIPLE);
310 XVECEXP (pat, 0, total + 1) = gen_rtx_SET (VOIDmode, spreg,
311 gen_rtx_PLUS (Pmode, spreg,
312 val));
313 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, total + 1)) = 1;
314 for (i = 0; i < total; i++)
316 rtx memref = gen_rtx_MEM (word_mode,
317 gen_rtx_PLUS (Pmode, spreg,
318 GEN_INT (- i * 4 - 4)));
319 rtx subpat;
320 if (ndregs > 0)
322 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
323 dregno++));
324 ndregs--;
326 else
328 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
329 pregno++));
330 npregs++;
332 XVECEXP (pat, 0, i + 1) = subpat;
333 RTX_FRAME_RELATED_P (subpat) = 1;
335 insn = emit_insn (pat);
336 RTX_FRAME_RELATED_P (insn) = 1;
339 /* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
340 must save all registers; this is used for interrupt handlers.
341 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
342 this for an interrupt (or exception) handler. */
344 static void
345 expand_epilogue_reg_restore (rtx spreg, bool saveall, bool is_inthandler)
347 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler);
348 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler);
349 int total = ndregs + npregs;
350 int i, regno;
351 rtx pat, insn;
353 if (total == 0)
354 return;
356 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 1));
357 XVECEXP (pat, 0, 0) = gen_rtx_SET (VOIDmode, spreg,
358 gen_rtx_PLUS (Pmode, spreg,
359 GEN_INT (total * 4)));
361 if (npregs > 0)
362 regno = REG_P5 + 1;
363 else
364 regno = REG_R7 + 1;
366 for (i = 0; i < total; i++)
368 rtx addr = (i > 0
369 ? gen_rtx_PLUS (Pmode, spreg, GEN_INT (i * 4))
370 : spreg);
371 rtx memref = gen_rtx_MEM (word_mode, addr);
373 regno--;
374 XVECEXP (pat, 0, i + 1)
375 = gen_rtx_SET (VOIDmode, gen_rtx_REG (word_mode, regno), memref);
377 if (npregs > 0)
379 if (--npregs == 0)
380 regno = REG_R7 + 1;
384 insn = emit_insn (pat);
385 RTX_FRAME_RELATED_P (insn) = 1;
388 /* Perform any needed actions needed for a function that is receiving a
389 variable number of arguments.
391 CUM is as above.
393 MODE and TYPE are the mode and type of the current parameter.
395 PRETEND_SIZE is a variable that should be set to the amount of stack
396 that must be pushed by the prolog to pretend that our caller pushed
399 Normally, this macro will push all remaining incoming registers on the
400 stack and set PRETEND_SIZE to the length of the registers pushed.
402 Blackfin specific :
403 - VDSP C compiler manual (our ABI) says that a variable args function
404 should save the R0, R1 and R2 registers in the stack.
405 - The caller will always leave space on the stack for the
406 arguments that are passed in registers, so we dont have
407 to leave any extra space.
408 - now, the vastart pointer can access all arguments from the stack. */
410 static void
411 setup_incoming_varargs (CUMULATIVE_ARGS *cum,
412 enum machine_mode mode ATTRIBUTE_UNUSED,
413 tree type ATTRIBUTE_UNUSED, int *pretend_size,
414 int no_rtl)
416 rtx mem;
417 int i;
419 if (no_rtl)
420 return;
422 /* The move for named arguments will be generated automatically by the
423 compiler. We need to generate the move rtx for the unnamed arguments
424 if they are in the first 3 words. We assume at least 1 named argument
425 exists, so we never generate [ARGP] = R0 here. */
427 for (i = cum->words + 1; i < max_arg_registers; i++)
429 mem = gen_rtx_MEM (Pmode,
430 plus_constant (arg_pointer_rtx, (i * UNITS_PER_WORD)));
431 emit_move_insn (mem, gen_rtx_REG (Pmode, i));
434 *pretend_size = 0;
437 /* Value should be nonzero if functions must have frame pointers.
438 Zero means the frame pointer need not be set up (and parms may
439 be accessed via the stack pointer) in functions that seem suitable. */
442 bfin_frame_pointer_required (void)
444 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
446 if (fkind != SUBROUTINE)
447 return 1;
449 /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
450 so we have to override it for non-leaf functions. */
451 if (TARGET_OMIT_LEAF_FRAME_POINTER && ! current_function_is_leaf)
452 return 1;
454 return 0;
457 /* Return the number of registers pushed during the prologue. */
459 static int
460 n_regs_saved_by_prologue (void)
462 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
463 bool is_inthandler = fkind != SUBROUTINE;
464 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
465 bool all = (lookup_attribute ("saveall", attrs) != NULL_TREE
466 || (is_inthandler && !current_function_is_leaf));
467 int ndregs = all ? 8 : n_dregs_to_save (is_inthandler);
468 int npregs = all ? 6 : n_pregs_to_save (is_inthandler);
469 int n = ndregs + npregs;
471 if (all || stack_frame_needed_p ())
472 /* We use a LINK instruction in this case. */
473 n += 2;
474 else
476 if (must_save_fp_p ())
477 n++;
478 if (! current_function_is_leaf)
479 n++;
482 if (fkind != SUBROUTINE)
484 int i;
486 /* Increment once for ASTAT. */
487 n++;
489 /* RETE/X/N. */
490 if (lookup_attribute ("nesting", attrs))
491 n++;
493 for (i = REG_P7 + 1; i < REG_CC; i++)
494 if (all
495 || regs_ever_live[i]
496 || (!leaf_function_p () && call_used_regs[i]))
497 n += i == REG_A0 || i == REG_A1 ? 2 : 1;
499 return n;
502 /* Return the offset between two registers, one to be eliminated, and the other
503 its replacement, at the start of a routine. */
505 HOST_WIDE_INT
506 bfin_initial_elimination_offset (int from, int to)
508 HOST_WIDE_INT offset = 0;
510 if (from == ARG_POINTER_REGNUM)
511 offset = n_regs_saved_by_prologue () * 4;
513 if (to == STACK_POINTER_REGNUM)
515 if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
516 offset += current_function_outgoing_args_size;
517 else if (current_function_outgoing_args_size)
518 offset += FIXED_STACK_AREA;
520 offset += get_frame_size ();
523 return offset;
526 /* Emit code to load a constant CONSTANT into register REG; setting
527 RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
528 Make sure that the insns we generate need not be split. */
530 static void
531 frame_related_constant_load (rtx reg, HOST_WIDE_INT constant, bool related)
533 rtx insn;
534 rtx cst = GEN_INT (constant);
536 if (constant >= -32768 && constant < 65536)
537 insn = emit_move_insn (reg, cst);
538 else
540 /* We don't call split_load_immediate here, since dwarf2out.c can get
541 confused about some of the more clever sequences it can generate. */
542 insn = emit_insn (gen_movsi_high (reg, cst));
543 if (related)
544 RTX_FRAME_RELATED_P (insn) = 1;
545 insn = emit_insn (gen_movsi_low (reg, reg, cst));
547 if (related)
548 RTX_FRAME_RELATED_P (insn) = 1;
551 /* Generate efficient code to add a value to the frame pointer. We
552 can use P1 as a scratch register. Set RTX_FRAME_RELATED_P on the
553 generated insns if FRAME is nonzero. */
555 static void
556 add_to_sp (rtx spreg, HOST_WIDE_INT value, int frame)
558 if (value == 0)
559 return;
561 /* Choose whether to use a sequence using a temporary register, or
562 a sequence with multiple adds. We can add a signed 7 bit value
563 in one instruction. */
564 if (value > 120 || value < -120)
566 rtx tmpreg = gen_rtx_REG (SImode, REG_P1);
567 rtx insn;
569 if (frame)
570 frame_related_constant_load (tmpreg, value, TRUE);
571 else
573 insn = emit_move_insn (tmpreg, GEN_INT (value));
574 if (frame)
575 RTX_FRAME_RELATED_P (insn) = 1;
578 insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
579 if (frame)
580 RTX_FRAME_RELATED_P (insn) = 1;
582 else
585 int size = value;
586 rtx insn;
588 if (size > 60)
589 size = 60;
590 else if (size < -60)
591 /* We could use -62, but that would leave the stack unaligned, so
592 it's no good. */
593 size = -60;
595 insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (size)));
596 if (frame)
597 RTX_FRAME_RELATED_P (insn) = 1;
598 value -= size;
600 while (value != 0);
603 /* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
604 is too large, generate a sequence of insns that has the same effect.
605 SPREG contains (reg:SI REG_SP). */
607 static void
608 emit_link_insn (rtx spreg, HOST_WIDE_INT frame_size)
610 HOST_WIDE_INT link_size = frame_size;
611 rtx insn;
612 int i;
614 if (link_size > 262140)
615 link_size = 262140;
617 /* Use a LINK insn with as big a constant as possible, then subtract
618 any remaining size from the SP. */
619 insn = emit_insn (gen_link (GEN_INT (-8 - link_size)));
620 RTX_FRAME_RELATED_P (insn) = 1;
622 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
624 rtx set = XVECEXP (PATTERN (insn), 0, i);
625 gcc_assert (GET_CODE (set) == SET);
626 RTX_FRAME_RELATED_P (set) = 1;
629 frame_size -= link_size;
631 if (frame_size > 0)
633 /* Must use a call-clobbered PREG that isn't the static chain. */
634 rtx tmpreg = gen_rtx_REG (Pmode, REG_P1);
636 frame_related_constant_load (tmpreg, -frame_size, TRUE);
637 insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
638 RTX_FRAME_RELATED_P (insn) = 1;
642 /* Return the number of bytes we must reserve for outgoing arguments
643 in the current function's stack frame. */
645 static HOST_WIDE_INT
646 arg_area_size (void)
648 if (current_function_outgoing_args_size)
650 if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
651 return current_function_outgoing_args_size;
652 else
653 return FIXED_STACK_AREA;
655 return 0;
658 /* Save RETS and FP, and allocate a stack frame. ALL is true if the
659 function must save all its registers (true only for certain interrupt
660 handlers). */
662 static void
663 do_link (rtx spreg, HOST_WIDE_INT frame_size, bool all)
665 frame_size += arg_area_size ();
667 if (all || stack_frame_needed_p ()
668 || (must_save_fp_p () && ! current_function_is_leaf))
669 emit_link_insn (spreg, frame_size);
670 else
672 if (! current_function_is_leaf)
674 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
675 gen_rtx_PRE_DEC (Pmode, spreg)),
676 bfin_rets_rtx);
677 rtx insn = emit_insn (pat);
678 RTX_FRAME_RELATED_P (insn) = 1;
680 if (must_save_fp_p ())
682 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
683 gen_rtx_PRE_DEC (Pmode, spreg)),
684 gen_rtx_REG (Pmode, REG_FP));
685 rtx insn = emit_insn (pat);
686 RTX_FRAME_RELATED_P (insn) = 1;
688 add_to_sp (spreg, -frame_size, 1);
692 /* Like do_link, but used for epilogues to deallocate the stack frame. */
694 static void
695 do_unlink (rtx spreg, HOST_WIDE_INT frame_size, bool all)
697 frame_size += arg_area_size ();
699 if (all || stack_frame_needed_p ())
700 emit_insn (gen_unlink ());
701 else
703 rtx postinc = gen_rtx_MEM (Pmode, gen_rtx_POST_INC (Pmode, spreg));
705 add_to_sp (spreg, frame_size, 0);
706 if (must_save_fp_p ())
708 rtx fpreg = gen_rtx_REG (Pmode, REG_FP);
709 emit_move_insn (fpreg, postinc);
710 emit_insn (gen_rtx_USE (VOIDmode, fpreg));
712 if (! current_function_is_leaf)
714 emit_move_insn (bfin_rets_rtx, postinc);
715 emit_insn (gen_rtx_USE (VOIDmode, bfin_rets_rtx));
720 /* Generate a prologue suitable for a function of kind FKIND. This is
721 called for interrupt and exception handler prologues.
722 SPREG contains (reg:SI REG_SP). */
724 static void
725 expand_interrupt_handler_prologue (rtx spreg, e_funkind fkind)
727 int i;
728 HOST_WIDE_INT frame_size = get_frame_size ();
729 rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
730 rtx predec = gen_rtx_MEM (SImode, predec1);
731 rtx insn;
732 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
733 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
734 tree kspisusp = lookup_attribute ("kspisusp", attrs);
736 if (kspisusp)
738 insn = emit_move_insn (spreg, gen_rtx_REG (Pmode, REG_USP));
739 RTX_FRAME_RELATED_P (insn) = 1;
742 /* We need space on the stack in case we need to save the argument
743 registers. */
744 if (fkind == EXCPT_HANDLER)
746 insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (-12)));
747 RTX_FRAME_RELATED_P (insn) = 1;
750 insn = emit_move_insn (predec, gen_rtx_REG (SImode, REG_ASTAT));
751 RTX_FRAME_RELATED_P (insn) = 1;
753 /* If we're calling other functions, they won't save their call-clobbered
754 registers, so we must save everything here. */
755 if (!current_function_is_leaf)
756 all = true;
757 expand_prologue_reg_save (spreg, all, true);
759 for (i = REG_P7 + 1; i < REG_CC; i++)
760 if (all
761 || regs_ever_live[i]
762 || (!leaf_function_p () && call_used_regs[i]))
764 if (i == REG_A0 || i == REG_A1)
765 insn = emit_move_insn (gen_rtx_MEM (PDImode, predec1),
766 gen_rtx_REG (PDImode, i));
767 else
768 insn = emit_move_insn (predec, gen_rtx_REG (SImode, i));
769 RTX_FRAME_RELATED_P (insn) = 1;
772 if (lookup_attribute ("nesting", attrs))
774 rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
775 : fkind == NMI_HANDLER ? REG_RETN
776 : REG_RETI));
777 insn = emit_move_insn (predec, srcreg);
778 RTX_FRAME_RELATED_P (insn) = 1;
781 do_link (spreg, frame_size, all);
783 if (fkind == EXCPT_HANDLER)
785 rtx r0reg = gen_rtx_REG (SImode, REG_R0);
786 rtx r1reg = gen_rtx_REG (SImode, REG_R1);
787 rtx r2reg = gen_rtx_REG (SImode, REG_R2);
788 rtx insn;
790 insn = emit_move_insn (r0reg, gen_rtx_REG (SImode, REG_SEQSTAT));
791 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
792 NULL_RTX);
793 insn = emit_insn (gen_ashrsi3 (r0reg, r0reg, GEN_INT (26)));
794 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
795 NULL_RTX);
796 insn = emit_insn (gen_ashlsi3 (r0reg, r0reg, GEN_INT (26)));
797 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
798 NULL_RTX);
799 insn = emit_move_insn (r1reg, spreg);
800 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
801 NULL_RTX);
802 insn = emit_move_insn (r2reg, gen_rtx_REG (Pmode, REG_FP));
803 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
804 NULL_RTX);
805 insn = emit_insn (gen_addsi3 (r2reg, r2reg, GEN_INT (8)));
806 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
807 NULL_RTX);
811 /* Generate an epilogue suitable for a function of kind FKIND. This is
812 called for interrupt and exception handler epilogues.
813 SPREG contains (reg:SI REG_SP). */
815 static void
816 expand_interrupt_handler_epilogue (rtx spreg, e_funkind fkind)
818 int i;
819 rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
820 rtx postinc = gen_rtx_MEM (SImode, postinc1);
821 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
822 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
824 /* A slightly crude technique to stop flow from trying to delete "dead"
825 insns. */
826 MEM_VOLATILE_P (postinc) = 1;
828 do_unlink (spreg, get_frame_size (), all);
830 if (lookup_attribute ("nesting", attrs))
832 rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
833 : fkind == NMI_HANDLER ? REG_RETN
834 : REG_RETI));
835 emit_move_insn (srcreg, postinc);
838 /* If we're calling other functions, they won't save their call-clobbered
839 registers, so we must save (and restore) everything here. */
840 if (!current_function_is_leaf)
841 all = true;
843 for (i = REG_CC - 1; i > REG_P7; i--)
844 if (all
845 || regs_ever_live[i]
846 || (!leaf_function_p () && call_used_regs[i]))
848 if (i == REG_A0 || i == REG_A1)
850 rtx mem = gen_rtx_MEM (PDImode, postinc1);
851 MEM_VOLATILE_P (mem) = 1;
852 emit_move_insn (gen_rtx_REG (PDImode, i), mem);
854 else
855 emit_move_insn (gen_rtx_REG (SImode, i), postinc);
858 expand_epilogue_reg_restore (spreg, all, true);
860 emit_move_insn (gen_rtx_REG (SImode, REG_ASTAT), postinc);
862 /* Deallocate any space we left on the stack in case we needed to save the
863 argument registers. */
864 if (fkind == EXCPT_HANDLER)
865 emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (12)));
867 emit_jump_insn (gen_return_internal (GEN_INT (fkind)));
870 /* Used while emitting the prologue to generate code to load the correct value
871 into the PIC register, which is passed in DEST. */
873 static rtx
874 bfin_load_pic_reg (rtx dest)
876 struct cgraph_local_info *i = NULL;
877 rtx addr, insn;
879 if (flag_unit_at_a_time)
880 i = cgraph_local_info (current_function_decl);
882 /* Functions local to the translation unit don't need to reload the
883 pic reg, since the caller always passes a usable one. */
884 if (i && i->local)
885 return pic_offset_table_rtx;
887 if (bfin_lib_id_given)
888 addr = plus_constant (pic_offset_table_rtx, -4 - bfin_library_id * 4);
889 else
890 addr = gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
891 gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
892 UNSPEC_LIBRARY_OFFSET));
893 insn = emit_insn (gen_movsi (dest, gen_rtx_MEM (Pmode, addr)));
894 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
895 return dest;
898 /* Generate RTL for the prologue of the current function. */
900 void
901 bfin_expand_prologue (void)
903 rtx insn;
904 HOST_WIDE_INT frame_size = get_frame_size ();
905 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
906 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
907 rtx pic_reg_loaded = NULL_RTX;
909 if (fkind != SUBROUTINE)
911 expand_interrupt_handler_prologue (spreg, fkind);
912 return;
915 if (current_function_limit_stack)
917 HOST_WIDE_INT offset
918 = bfin_initial_elimination_offset (ARG_POINTER_REGNUM,
919 STACK_POINTER_REGNUM);
920 rtx lim = stack_limit_rtx;
922 if (GET_CODE (lim) == SYMBOL_REF)
924 rtx p2reg = gen_rtx_REG (Pmode, REG_P2);
925 if (TARGET_ID_SHARED_LIBRARY)
927 rtx p1reg = gen_rtx_REG (Pmode, REG_P1);
928 rtx val;
929 pic_reg_loaded = bfin_load_pic_reg (p2reg);
930 val = legitimize_pic_address (stack_limit_rtx, p1reg,
931 pic_reg_loaded);
932 emit_move_insn (p1reg, val);
933 frame_related_constant_load (p2reg, offset, FALSE);
934 emit_insn (gen_addsi3 (p2reg, p2reg, p1reg));
935 lim = p2reg;
937 else
939 rtx limit = plus_constant (stack_limit_rtx, offset);
940 emit_move_insn (p2reg, limit);
941 lim = p2reg;
944 emit_insn (gen_compare_lt (bfin_cc_rtx, spreg, lim));
945 emit_insn (gen_trapifcc ());
947 expand_prologue_reg_save (spreg, 0, false);
949 do_link (spreg, frame_size, false);
951 if (TARGET_ID_SHARED_LIBRARY
952 && (current_function_uses_pic_offset_table
953 || !current_function_is_leaf))
954 bfin_load_pic_reg (pic_offset_table_rtx);
957 /* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
958 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
959 eh_return pattern. */
961 void
962 bfin_expand_epilogue (int need_return, int eh_return)
964 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
965 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
967 if (fkind != SUBROUTINE)
969 expand_interrupt_handler_epilogue (spreg, fkind);
970 return;
973 do_unlink (spreg, get_frame_size (), false);
975 expand_epilogue_reg_restore (spreg, false, false);
977 /* Omit the return insn if this is for a sibcall. */
978 if (! need_return)
979 return;
981 if (eh_return)
982 emit_insn (gen_addsi3 (spreg, spreg, gen_rtx_REG (Pmode, REG_P2)));
984 emit_jump_insn (gen_return_internal (GEN_INT (SUBROUTINE)));
987 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
990 bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
991 unsigned int new_reg)
993 /* Interrupt functions can only use registers that have already been
994 saved by the prologue, even if they would normally be
995 call-clobbered. */
997 if (funkind (TREE_TYPE (current_function_decl)) != SUBROUTINE
998 && !regs_ever_live[new_reg])
999 return 0;
1001 return 1;
1004 /* Return the value of the return address for the frame COUNT steps up
1005 from the current frame, after the prologue.
1006 We punt for everything but the current frame by returning const0_rtx. */
1009 bfin_return_addr_rtx (int count)
1011 if (count != 0)
1012 return const0_rtx;
1014 return get_hard_reg_initial_val (Pmode, REG_RETS);
1017 /* Try machine-dependent ways of modifying an illegitimate address X
1018 to be legitimate. If we find one, return the new, valid address,
1019 otherwise return NULL_RTX.
1021 OLDX is the address as it was before break_out_memory_refs was called.
1022 In some cases it is useful to look at this to decide what needs to be done.
1024 MODE is the mode of the memory reference. */
1027 legitimize_address (rtx x ATTRIBUTE_UNUSED, rtx oldx ATTRIBUTE_UNUSED,
1028 enum machine_mode mode ATTRIBUTE_UNUSED)
1030 return NULL_RTX;
1033 static rtx
1034 bfin_delegitimize_address (rtx orig_x)
1036 rtx x = orig_x, y;
1038 if (GET_CODE (x) != MEM)
1039 return orig_x;
1041 x = XEXP (x, 0);
1042 if (GET_CODE (x) == PLUS
1043 && GET_CODE (XEXP (x, 1)) == UNSPEC
1044 && XINT (XEXP (x, 1), 1) == UNSPEC_MOVE_PIC
1045 && GET_CODE (XEXP (x, 0)) == REG
1046 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
1047 return XVECEXP (XEXP (x, 1), 0, 0);
1049 return orig_x;
1052 /* This predicate is used to compute the length of a load/store insn.
1053 OP is a MEM rtx, we return nonzero if its addressing mode requires a
1054 32 bit instruction. */
1057 effective_address_32bit_p (rtx op, enum machine_mode mode)
1059 HOST_WIDE_INT offset;
1061 mode = GET_MODE (op);
1062 op = XEXP (op, 0);
1064 if (GET_CODE (op) != PLUS)
1066 gcc_assert (REG_P (op) || GET_CODE (op) == POST_INC
1067 || GET_CODE (op) == PRE_DEC || GET_CODE (op) == POST_DEC);
1068 return 0;
1071 offset = INTVAL (XEXP (op, 1));
1073 /* All byte loads use a 16 bit offset. */
1074 if (GET_MODE_SIZE (mode) == 1)
1075 return 1;
1077 if (GET_MODE_SIZE (mode) == 4)
1079 /* Frame pointer relative loads can use a negative offset, all others
1080 are restricted to a small positive one. */
1081 if (XEXP (op, 0) == frame_pointer_rtx)
1082 return offset < -128 || offset > 60;
1083 return offset < 0 || offset > 60;
1086 /* Must be HImode now. */
1087 return offset < 0 || offset > 30;
1090 /* Returns true if X is a memory reference using an I register. */
1091 bool
1092 bfin_dsp_memref_p (rtx x)
1094 if (! MEM_P (x))
1095 return false;
1096 x = XEXP (x, 0);
1097 if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_INC
1098 || GET_CODE (x) == POST_DEC || GET_CODE (x) == PRE_DEC)
1099 x = XEXP (x, 0);
1100 return IREG_P (x);
1103 /* Return cost of the memory address ADDR.
1104 All addressing modes are equally cheap on the Blackfin. */
1106 static int
1107 bfin_address_cost (rtx addr ATTRIBUTE_UNUSED)
1109 return 1;
1112 /* Subroutine of print_operand; used to print a memory reference X to FILE. */
1114 void
1115 print_address_operand (FILE *file, rtx x)
1117 switch (GET_CODE (x))
1119 case PLUS:
1120 output_address (XEXP (x, 0));
1121 fprintf (file, "+");
1122 output_address (XEXP (x, 1));
1123 break;
1125 case PRE_DEC:
1126 fprintf (file, "--");
1127 output_address (XEXP (x, 0));
1128 break;
1129 case POST_INC:
1130 output_address (XEXP (x, 0));
1131 fprintf (file, "++");
1132 break;
1133 case POST_DEC:
1134 output_address (XEXP (x, 0));
1135 fprintf (file, "--");
1136 break;
1138 default:
1139 gcc_assert (GET_CODE (x) != MEM);
1140 print_operand (file, x, 0);
1141 break;
1145 /* Adding intp DImode support by Tony
1146 * -- Q: (low word)
1147 * -- R: (high word)
1150 void
1151 print_operand (FILE *file, rtx x, char code)
1153 enum machine_mode mode = GET_MODE (x);
1155 switch (code)
1157 case 'j':
1158 switch (GET_CODE (x))
1160 case EQ:
1161 fprintf (file, "e");
1162 break;
1163 case NE:
1164 fprintf (file, "ne");
1165 break;
1166 case GT:
1167 fprintf (file, "g");
1168 break;
1169 case LT:
1170 fprintf (file, "l");
1171 break;
1172 case GE:
1173 fprintf (file, "ge");
1174 break;
1175 case LE:
1176 fprintf (file, "le");
1177 break;
1178 case GTU:
1179 fprintf (file, "g");
1180 break;
1181 case LTU:
1182 fprintf (file, "l");
1183 break;
1184 case GEU:
1185 fprintf (file, "ge");
1186 break;
1187 case LEU:
1188 fprintf (file, "le");
1189 break;
1190 default:
1191 output_operand_lossage ("invalid %%j value");
1193 break;
1195 case 'J': /* reverse logic */
1196 switch (GET_CODE(x))
1198 case EQ:
1199 fprintf (file, "ne");
1200 break;
1201 case NE:
1202 fprintf (file, "e");
1203 break;
1204 case GT:
1205 fprintf (file, "le");
1206 break;
1207 case LT:
1208 fprintf (file, "ge");
1209 break;
1210 case GE:
1211 fprintf (file, "l");
1212 break;
1213 case LE:
1214 fprintf (file, "g");
1215 break;
1216 case GTU:
1217 fprintf (file, "le");
1218 break;
1219 case LTU:
1220 fprintf (file, "ge");
1221 break;
1222 case GEU:
1223 fprintf (file, "l");
1224 break;
1225 case LEU:
1226 fprintf (file, "g");
1227 break;
1228 default:
1229 output_operand_lossage ("invalid %%J value");
1231 break;
1233 default:
1234 switch (GET_CODE (x))
1236 case REG:
1237 if (code == 'h')
1239 gcc_assert (REGNO (x) < 32);
1240 fprintf (file, "%s", short_reg_names[REGNO (x)]);
1241 /*fprintf (file, "\n%d\n ", REGNO (x));*/
1242 break;
1244 else if (code == 'd')
1246 gcc_assert (REGNO (x) < 32);
1247 fprintf (file, "%s", high_reg_names[REGNO (x)]);
1248 break;
1250 else if (code == 'w')
1252 gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
1253 fprintf (file, "%s.w", reg_names[REGNO (x)]);
1255 else if (code == 'x')
1257 gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
1258 fprintf (file, "%s.x", reg_names[REGNO (x)]);
1260 else if (code == 'D')
1262 fprintf (file, "%s", dregs_pair_names[REGNO (x)]);
1264 else if (code == 'H')
1266 gcc_assert (mode == DImode || mode == DFmode);
1267 gcc_assert (REG_P (x));
1268 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
1270 else if (code == 'T')
1272 gcc_assert (D_REGNO_P (REGNO (x)));
1273 fprintf (file, "%s", byte_reg_names[REGNO (x)]);
1275 else
1276 fprintf (file, "%s", reg_names[REGNO (x)]);
1277 break;
1279 case MEM:
1280 fputc ('[', file);
1281 x = XEXP (x,0);
1282 print_address_operand (file, x);
1283 fputc (']', file);
1284 break;
1286 case CONST_INT:
1287 if (code == 'M')
1289 switch (INTVAL (x))
1291 case MACFLAG_NONE:
1292 break;
1293 case MACFLAG_FU:
1294 fputs ("(FU)", file);
1295 break;
1296 case MACFLAG_T:
1297 fputs ("(T)", file);
1298 break;
1299 case MACFLAG_TFU:
1300 fputs ("(TFU)", file);
1301 break;
1302 case MACFLAG_W32:
1303 fputs ("(W32)", file);
1304 break;
1305 case MACFLAG_IS:
1306 fputs ("(IS)", file);
1307 break;
1308 case MACFLAG_IU:
1309 fputs ("(IU)", file);
1310 break;
1311 case MACFLAG_IH:
1312 fputs ("(IH)", file);
1313 break;
1314 case MACFLAG_M:
1315 fputs ("(M)", file);
1316 break;
1317 case MACFLAG_ISS2:
1318 fputs ("(ISS2)", file);
1319 break;
1320 case MACFLAG_S2RND:
1321 fputs ("(S2RND)", file);
1322 break;
1323 default:
1324 gcc_unreachable ();
1326 break;
1328 else if (code == 'b')
1330 if (INTVAL (x) == 0)
1331 fputs ("+=", file);
1332 else if (INTVAL (x) == 1)
1333 fputs ("-=", file);
1334 else
1335 gcc_unreachable ();
1336 break;
1338 /* Moves to half registers with d or h modifiers always use unsigned
1339 constants. */
1340 else if (code == 'd')
1341 x = GEN_INT ((INTVAL (x) >> 16) & 0xffff);
1342 else if (code == 'h')
1343 x = GEN_INT (INTVAL (x) & 0xffff);
1344 else if (code == 'X')
1345 x = GEN_INT (exact_log2 (0xffffffff & INTVAL (x)));
1346 else if (code == 'Y')
1347 x = GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x)));
1348 else if (code == 'Z')
1349 /* Used for LINK insns. */
1350 x = GEN_INT (-8 - INTVAL (x));
1352 /* fall through */
1354 case SYMBOL_REF:
1355 output_addr_const (file, x);
1356 break;
1358 case CONST_DOUBLE:
1359 output_operand_lossage ("invalid const_double operand");
1360 break;
1362 case UNSPEC:
1363 switch (XINT (x, 1))
1365 case UNSPEC_MOVE_PIC:
1366 output_addr_const (file, XVECEXP (x, 0, 0));
1367 fprintf (file, "@GOT");
1368 break;
1370 case UNSPEC_MOVE_FDPIC:
1371 output_addr_const (file, XVECEXP (x, 0, 0));
1372 fprintf (file, "@GOT17M4");
1373 break;
1375 case UNSPEC_FUNCDESC_GOT17M4:
1376 output_addr_const (file, XVECEXP (x, 0, 0));
1377 fprintf (file, "@FUNCDESC_GOT17M4");
1378 break;
1380 case UNSPEC_LIBRARY_OFFSET:
1381 fprintf (file, "_current_shared_library_p5_offset_");
1382 break;
1384 default:
1385 gcc_unreachable ();
1387 break;
1389 default:
1390 output_addr_const (file, x);
1395 /* Argument support functions. */
1397 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1398 for a call to a function whose data type is FNTYPE.
1399 For a library call, FNTYPE is 0.
1400 VDSP C Compiler manual, our ABI says that
1401 first 3 words of arguments will use R0, R1 and R2.
1404 void
1405 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
1406 rtx libname ATTRIBUTE_UNUSED)
1408 static CUMULATIVE_ARGS zero_cum;
1410 *cum = zero_cum;
1412 /* Set up the number of registers to use for passing arguments. */
1414 cum->nregs = max_arg_registers;
1415 cum->arg_regs = arg_regs;
1417 cum->call_cookie = CALL_NORMAL;
1418 /* Check for a longcall attribute. */
1419 if (fntype && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
1420 cum->call_cookie |= CALL_SHORT;
1421 else if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
1422 cum->call_cookie |= CALL_LONG;
1424 return;
1427 /* Update the data in CUM to advance over an argument
1428 of mode MODE and data type TYPE.
1429 (TYPE is null for libcalls where that information may not be available.) */
1431 void
1432 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1433 int named ATTRIBUTE_UNUSED)
1435 int count, bytes, words;
1437 bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1438 words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1440 cum->words += words;
1441 cum->nregs -= words;
1443 if (cum->nregs <= 0)
1445 cum->nregs = 0;
1446 cum->arg_regs = NULL;
1448 else
1450 for (count = 1; count <= words; count++)
1451 cum->arg_regs++;
1454 return;
1457 /* Define where to put the arguments to a function.
1458 Value is zero to push the argument on the stack,
1459 or a hard register in which to store the argument.
1461 MODE is the argument's machine mode.
1462 TYPE is the data type of the argument (as a tree).
1463 This is null for libcalls where that information may
1464 not be available.
1465 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1466 the preceding args and about the function being called.
1467 NAMED is nonzero if this argument is a named parameter
1468 (otherwise it is an extra parameter matching an ellipsis). */
1470 struct rtx_def *
1471 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1472 int named ATTRIBUTE_UNUSED)
1474 int bytes
1475 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1477 if (mode == VOIDmode)
1478 /* Compute operand 2 of the call insn. */
1479 return GEN_INT (cum->call_cookie);
1481 if (bytes == -1)
1482 return NULL_RTX;
1484 if (cum->nregs)
1485 return gen_rtx_REG (mode, *(cum->arg_regs));
1487 return NULL_RTX;
1490 /* For an arg passed partly in registers and partly in memory,
1491 this is the number of bytes passed in registers.
1492 For args passed entirely in registers or entirely in memory, zero.
1494 Refer VDSP C Compiler manual, our ABI.
1495 First 3 words are in registers. So, if a an argument is larger
1496 than the registers available, it will span the register and
1497 stack. */
1499 static int
1500 bfin_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1501 tree type ATTRIBUTE_UNUSED,
1502 bool named ATTRIBUTE_UNUSED)
1504 int bytes
1505 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1506 int bytes_left = cum->nregs * UNITS_PER_WORD;
1508 if (bytes == -1)
1509 return 0;
1511 if (bytes_left == 0)
1512 return 0;
1513 if (bytes > bytes_left)
1514 return bytes_left;
1515 return 0;
1518 /* Variable sized types are passed by reference. */
1520 static bool
1521 bfin_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
1522 enum machine_mode mode ATTRIBUTE_UNUSED,
1523 tree type, bool named ATTRIBUTE_UNUSED)
1525 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
1528 /* Decide whether a type should be returned in memory (true)
1529 or in a register (false). This is called by the macro
1530 RETURN_IN_MEMORY. */
1533 bfin_return_in_memory (tree type)
1535 int size = int_size_in_bytes (type);
1536 return size > 2 * UNITS_PER_WORD || size == -1;
1539 /* Register in which address to store a structure value
1540 is passed to a function. */
1541 static rtx
1542 bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1543 int incoming ATTRIBUTE_UNUSED)
1545 return gen_rtx_REG (Pmode, REG_P0);
1548 /* Return true when register may be used to pass function parameters. */
1550 bool
1551 function_arg_regno_p (int n)
1553 int i;
1554 for (i = 0; arg_regs[i] != -1; i++)
1555 if (n == arg_regs[i])
1556 return true;
1557 return false;
1560 /* Returns 1 if OP contains a symbol reference */
1563 symbolic_reference_mentioned_p (rtx op)
1565 register const char *fmt;
1566 register int i;
1568 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1569 return 1;
1571 fmt = GET_RTX_FORMAT (GET_CODE (op));
1572 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1574 if (fmt[i] == 'E')
1576 register int j;
1578 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1579 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1580 return 1;
1583 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1584 return 1;
1587 return 0;
1590 /* Decide whether we can make a sibling call to a function. DECL is the
1591 declaration of the function being targeted by the call and EXP is the
1592 CALL_EXPR representing the call. */
1594 static bool
1595 bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
1596 tree exp ATTRIBUTE_UNUSED)
1598 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
1599 return fkind == SUBROUTINE;
1602 /* Emit RTL insns to initialize the variable parts of a trampoline at
1603 TRAMP. FNADDR is an RTX for the address of the function's pure
1604 code. CXT is an RTX for the static chain value for the function. */
1606 void
1607 initialize_trampoline (tramp, fnaddr, cxt)
1608 rtx tramp, fnaddr, cxt;
1610 rtx t1 = copy_to_reg (fnaddr);
1611 rtx t2 = copy_to_reg (cxt);
1612 rtx addr;
1613 int i = 0;
1615 if (TARGET_FDPIC)
1617 rtx a = memory_address (Pmode, plus_constant (tramp, 8));
1618 addr = memory_address (Pmode, tramp);
1619 emit_move_insn (gen_rtx_MEM (SImode, addr), a);
1620 i = 8;
1623 addr = memory_address (Pmode, plus_constant (tramp, i + 2));
1624 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1625 emit_insn (gen_ashrsi3 (t1, t1, GEN_INT (16)));
1626 addr = memory_address (Pmode, plus_constant (tramp, i + 6));
1627 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1629 addr = memory_address (Pmode, plus_constant (tramp, i + 10));
1630 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1631 emit_insn (gen_ashrsi3 (t2, t2, GEN_INT (16)));
1632 addr = memory_address (Pmode, plus_constant (tramp, i + 14));
1633 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1636 /* Emit insns to move operands[1] into operands[0]. */
1638 void
1639 emit_pic_move (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1641 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
1643 gcc_assert (!TARGET_FDPIC || !(reload_in_progress || reload_completed));
1644 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
1645 operands[1] = force_reg (SImode, operands[1]);
1646 else
1647 operands[1] = legitimize_pic_address (operands[1], temp,
1648 TARGET_FDPIC ? OUR_FDPIC_REG
1649 : pic_offset_table_rtx);
1652 /* Expand a move operation in mode MODE. The operands are in OPERANDS. */
1654 void
1655 expand_move (rtx *operands, enum machine_mode mode)
1657 rtx op = operands[1];
1658 if ((TARGET_ID_SHARED_LIBRARY || TARGET_FDPIC)
1659 && SYMBOLIC_CONST (op))
1660 emit_pic_move (operands, mode);
1661 /* Don't generate memory->memory or constant->memory moves, go through a
1662 register */
1663 else if ((reload_in_progress | reload_completed) == 0
1664 && GET_CODE (operands[0]) == MEM
1665 && GET_CODE (operands[1]) != REG)
1666 operands[1] = force_reg (mode, operands[1]);
1669 /* Split one or more DImode RTL references into pairs of SImode
1670 references. The RTL can be REG, offsettable MEM, integer constant, or
1671 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
1672 split and "num" is its length. lo_half and hi_half are output arrays
1673 that parallel "operands". */
1675 void
1676 split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
1678 while (num--)
1680 rtx op = operands[num];
1682 /* simplify_subreg refuse to split volatile memory addresses,
1683 but we still have to handle it. */
1684 if (GET_CODE (op) == MEM)
1686 lo_half[num] = adjust_address (op, SImode, 0);
1687 hi_half[num] = adjust_address (op, SImode, 4);
1689 else
1691 lo_half[num] = simplify_gen_subreg (SImode, op,
1692 GET_MODE (op) == VOIDmode
1693 ? DImode : GET_MODE (op), 0);
1694 hi_half[num] = simplify_gen_subreg (SImode, op,
1695 GET_MODE (op) == VOIDmode
1696 ? DImode : GET_MODE (op), 4);
1701 bool
1702 bfin_longcall_p (rtx op, int call_cookie)
1704 gcc_assert (GET_CODE (op) == SYMBOL_REF);
1705 if (call_cookie & CALL_SHORT)
1706 return 0;
1707 if (call_cookie & CALL_LONG)
1708 return 1;
1709 if (TARGET_LONG_CALLS)
1710 return 1;
1711 return 0;
1714 /* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
1715 COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
1716 SIBCALL is nonzero if this is a sibling call. */
1718 void
1719 bfin_expand_call (rtx retval, rtx fnaddr, rtx callarg1, rtx cookie, int sibcall)
1721 rtx use = NULL, call;
1722 rtx callee = XEXP (fnaddr, 0);
1723 int nelts = 2 + !!sibcall;
1724 rtx pat;
1725 rtx picreg = get_hard_reg_initial_val (SImode, FDPIC_REGNO);
1726 int n;
1728 /* In an untyped call, we can get NULL for operand 2. */
1729 if (cookie == NULL_RTX)
1730 cookie = const0_rtx;
1732 /* Static functions and indirect calls don't need the pic register. */
1733 if (!TARGET_FDPIC && flag_pic
1734 && GET_CODE (callee) == SYMBOL_REF
1735 && !SYMBOL_REF_LOCAL_P (callee))
1736 use_reg (&use, pic_offset_table_rtx);
1738 if (TARGET_FDPIC)
1740 if (GET_CODE (callee) != SYMBOL_REF
1741 || bfin_longcall_p (callee, INTVAL (cookie)))
1743 rtx addr = callee;
1744 if (! address_operand (addr, Pmode))
1745 addr = force_reg (Pmode, addr);
1747 fnaddr = gen_reg_rtx (SImode);
1748 emit_insn (gen_load_funcdescsi (fnaddr, addr));
1749 fnaddr = gen_rtx_MEM (Pmode, fnaddr);
1751 picreg = gen_reg_rtx (SImode);
1752 emit_insn (gen_load_funcdescsi (picreg,
1753 plus_constant (addr, 4)));
1756 nelts++;
1758 else if ((!register_no_elim_operand (callee, Pmode)
1759 && GET_CODE (callee) != SYMBOL_REF)
1760 || (GET_CODE (callee) == SYMBOL_REF
1761 && (flag_pic
1762 || bfin_longcall_p (callee, INTVAL (cookie)))))
1764 callee = copy_to_mode_reg (Pmode, callee);
1765 fnaddr = gen_rtx_MEM (Pmode, callee);
1767 call = gen_rtx_CALL (VOIDmode, fnaddr, callarg1);
1769 if (retval)
1770 call = gen_rtx_SET (VOIDmode, retval, call);
1772 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nelts));
1773 n = 0;
1774 XVECEXP (pat, 0, n++) = call;
1775 if (TARGET_FDPIC)
1776 XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, picreg);
1777 XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, cookie);
1778 if (sibcall)
1779 XVECEXP (pat, 0, n++) = gen_rtx_RETURN (VOIDmode);
1780 call = emit_call_insn (pat);
1781 if (use)
1782 CALL_INSN_FUNCTION_USAGE (call) = use;
1785 /* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
1788 hard_regno_mode_ok (int regno, enum machine_mode mode)
1790 /* Allow only dregs to store value of mode HI or QI */
1791 enum reg_class class = REGNO_REG_CLASS (regno);
1793 if (mode == CCmode)
1794 return 0;
1796 if (mode == V2HImode)
1797 return D_REGNO_P (regno);
1798 if (class == CCREGS)
1799 return mode == BImode;
1800 if (mode == PDImode || mode == V2PDImode)
1801 return regno == REG_A0 || regno == REG_A1;
1802 if (mode == SImode
1803 && TEST_HARD_REG_BIT (reg_class_contents[PROLOGUE_REGS], regno))
1804 return 1;
1806 return TEST_HARD_REG_BIT (reg_class_contents[MOST_REGS], regno);
1809 /* Implements target hook vector_mode_supported_p. */
1811 static bool
1812 bfin_vector_mode_supported_p (enum machine_mode mode)
1814 return mode == V2HImode;
1817 /* Return the cost of moving data from a register in class CLASS1 to
1818 one in class CLASS2. A cost of 2 is the default. */
1821 bfin_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1822 enum reg_class class1, enum reg_class class2)
1824 /* These need secondary reloads, so they're more expensive. */
1825 if ((class1 == CCREGS && class2 != DREGS)
1826 || (class1 != DREGS && class2 == CCREGS))
1827 return 4;
1829 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
1830 if (optimize_size)
1831 return 2;
1833 /* There are some stalls involved when moving from a DREG to a different
1834 class reg, and using the value in one of the following instructions.
1835 Attempt to model this by slightly discouraging such moves. */
1836 if (class1 == DREGS && class2 != DREGS)
1837 return 2 * 2;
1839 return 2;
1842 /* Return the cost of moving data of mode M between a
1843 register and memory. A value of 2 is the default; this cost is
1844 relative to those in `REGISTER_MOVE_COST'.
1846 ??? In theory L1 memory has single-cycle latency. We should add a switch
1847 that tells the compiler whether we expect to use only L1 memory for the
1848 program; it'll make the costs more accurate. */
1851 bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1852 enum reg_class class,
1853 int in ATTRIBUTE_UNUSED)
1855 /* Make memory accesses slightly more expensive than any register-register
1856 move. Also, penalize non-DP registers, since they need secondary
1857 reloads to load and store. */
1858 if (! reg_class_subset_p (class, DPREGS))
1859 return 10;
1861 return 8;
1864 /* Inform reload about cases where moving X with a mode MODE to a register in
1865 CLASS requires an extra scratch register. Return the class needed for the
1866 scratch register. */
1868 static enum reg_class
1869 bfin_secondary_reload (bool in_p, rtx x, enum reg_class class,
1870 enum machine_mode mode, secondary_reload_info *sri)
1872 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
1873 in most other cases we can also use PREGS. */
1874 enum reg_class default_class = GET_MODE_SIZE (mode) >= 4 ? DPREGS : DREGS;
1875 enum reg_class x_class = NO_REGS;
1876 enum rtx_code code = GET_CODE (x);
1878 if (code == SUBREG)
1879 x = SUBREG_REG (x), code = GET_CODE (x);
1880 if (REG_P (x))
1882 int regno = REGNO (x);
1883 if (regno >= FIRST_PSEUDO_REGISTER)
1884 regno = reg_renumber[regno];
1886 if (regno == -1)
1887 code = MEM;
1888 else
1889 x_class = REGNO_REG_CLASS (regno);
1892 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
1893 This happens as a side effect of register elimination, and we need
1894 a scratch register to do it. */
1895 if (fp_plus_const_operand (x, mode))
1897 rtx op2 = XEXP (x, 1);
1898 int large_constant_p = ! CONST_7BIT_IMM_P (INTVAL (op2));
1900 if (class == PREGS || class == PREGS_CLOBBERED)
1901 return NO_REGS;
1902 /* If destination is a DREG, we can do this without a scratch register
1903 if the constant is valid for an add instruction. */
1904 if ((class == DREGS || class == DPREGS)
1905 && ! large_constant_p)
1906 return NO_REGS;
1907 /* Reloading to anything other than a DREG? Use a PREG scratch
1908 register. */
1909 sri->icode = CODE_FOR_reload_insi;
1910 return NO_REGS;
1913 /* Data can usually be moved freely between registers of most classes.
1914 AREGS are an exception; they can only move to or from another register
1915 in AREGS or one in DREGS. They can also be assigned the constant 0. */
1916 if (x_class == AREGS)
1917 return class == DREGS || class == AREGS ? NO_REGS : DREGS;
1919 if (class == AREGS)
1921 if (x != const0_rtx && x_class != DREGS)
1922 return DREGS;
1923 else
1924 return NO_REGS;
1927 /* CCREGS can only be moved from/to DREGS. */
1928 if (class == CCREGS && x_class != DREGS)
1929 return DREGS;
1930 if (x_class == CCREGS && class != DREGS)
1931 return DREGS;
1933 /* All registers other than AREGS can load arbitrary constants. The only
1934 case that remains is MEM. */
1935 if (code == MEM)
1936 if (! reg_class_subset_p (class, default_class))
1937 return default_class;
1938 return NO_REGS;
1941 /* Implement TARGET_HANDLE_OPTION. */
1943 static bool
1944 bfin_handle_option (size_t code, const char *arg, int value)
1946 switch (code)
1948 case OPT_mshared_library_id_:
1949 if (value > MAX_LIBRARY_ID)
1950 error ("-mshared-library-id=%s is not between 0 and %d",
1951 arg, MAX_LIBRARY_ID);
1952 bfin_lib_id_given = 1;
1953 return true;
1955 default:
1956 return true;
1960 /* Implement the macro OVERRIDE_OPTIONS. */
1962 void
1963 override_options (void)
1965 if (TARGET_OMIT_LEAF_FRAME_POINTER)
1966 flag_omit_frame_pointer = 1;
1968 /* Library identification */
1969 if (bfin_lib_id_given && ! TARGET_ID_SHARED_LIBRARY)
1970 error ("-mshared-library-id= specified without -mid-shared-library");
1972 if (TARGET_ID_SHARED_LIBRARY && flag_pic == 0)
1973 flag_pic = 1;
1975 if (TARGET_ID_SHARED_LIBRARY && TARGET_FDPIC)
1976 error ("ID shared libraries and FD-PIC mode can't be used together.");
1978 /* There is no single unaligned SI op for PIC code. Sometimes we
1979 need to use ".4byte" and sometimes we need to use ".picptr".
1980 See bfin_assemble_integer for details. */
1981 if (TARGET_FDPIC)
1982 targetm.asm_out.unaligned_op.si = 0;
1984 /* Silently turn off flag_pic if not doing FDPIC or ID shared libraries,
1985 since we don't support it and it'll just break. */
1986 if (flag_pic && !TARGET_FDPIC && !TARGET_ID_SHARED_LIBRARY)
1987 flag_pic = 0;
1989 flag_schedule_insns = 0;
1992 /* Return the destination address of BRANCH.
1993 We need to use this instead of get_attr_length, because the
1994 cbranch_with_nops pattern conservatively sets its length to 6, and
1995 we still prefer to use shorter sequences. */
1997 static int
1998 branch_dest (rtx branch)
2000 rtx dest;
2001 int dest_uid;
2002 rtx pat = PATTERN (branch);
2003 if (GET_CODE (pat) == PARALLEL)
2004 pat = XVECEXP (pat, 0, 0);
2005 dest = SET_SRC (pat);
2006 if (GET_CODE (dest) == IF_THEN_ELSE)
2007 dest = XEXP (dest, 1);
2008 dest = XEXP (dest, 0);
2009 dest_uid = INSN_UID (dest);
2010 return INSN_ADDRESSES (dest_uid);
2013 /* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
2014 it's a branch that's predicted taken. */
2016 static int
2017 cbranch_predicted_taken_p (rtx insn)
2019 rtx x = find_reg_note (insn, REG_BR_PROB, 0);
2021 if (x)
2023 int pred_val = INTVAL (XEXP (x, 0));
2025 return pred_val >= REG_BR_PROB_BASE / 2;
2028 return 0;
2031 /* Templates for use by asm_conditional_branch. */
2033 static const char *ccbranch_templates[][3] = {
2034 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
2035 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
2036 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
2037 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
2040 /* Output INSN, which is a conditional branch instruction with operands
2041 OPERANDS.
2043 We deal with the various forms of conditional branches that can be generated
2044 by bfin_reorg to prevent the hardware from doing speculative loads, by
2045 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
2046 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
2047 Either of these is only necessary if the branch is short, otherwise the
2048 template we use ends in an unconditional jump which flushes the pipeline
2049 anyway. */
2051 void
2052 asm_conditional_branch (rtx insn, rtx *operands, int n_nops, int predict_taken)
2054 int offset = branch_dest (insn) - INSN_ADDRESSES (INSN_UID (insn));
2055 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
2056 is to be taken from start of if cc rather than jump.
2057 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
2059 int len = (offset >= -1024 && offset <= 1022 ? 0
2060 : offset >= -4094 && offset <= 4096 ? 1
2061 : 2);
2062 int bp = predict_taken && len == 0 ? 1 : cbranch_predicted_taken_p (insn);
2063 int idx = (bp << 1) | (GET_CODE (operands[0]) == EQ ? BRF : BRT);
2064 output_asm_insn (ccbranch_templates[idx][len], operands);
2065 gcc_assert (n_nops == 0 || !bp);
2066 if (len == 0)
2067 while (n_nops-- > 0)
2068 output_asm_insn ("nop;", NULL);
2071 /* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
2072 stored in bfin_compare_op0 and bfin_compare_op1 already. */
2075 bfin_gen_compare (rtx cmp, enum machine_mode mode ATTRIBUTE_UNUSED)
2077 enum rtx_code code1, code2;
2078 rtx op0 = bfin_compare_op0, op1 = bfin_compare_op1;
2079 rtx tem = bfin_cc_rtx;
2080 enum rtx_code code = GET_CODE (cmp);
2082 /* If we have a BImode input, then we already have a compare result, and
2083 do not need to emit another comparison. */
2084 if (GET_MODE (op0) == BImode)
2086 gcc_assert ((code == NE || code == EQ) && op1 == const0_rtx);
2087 tem = op0, code2 = code;
2089 else
2091 switch (code) {
2092 /* bfin has these conditions */
2093 case EQ:
2094 case LT:
2095 case LE:
2096 case LEU:
2097 case LTU:
2098 code1 = code;
2099 code2 = NE;
2100 break;
2101 default:
2102 code1 = reverse_condition (code);
2103 code2 = EQ;
2104 break;
2106 emit_insn (gen_rtx_SET (BImode, tem,
2107 gen_rtx_fmt_ee (code1, BImode, op0, op1)));
2110 return gen_rtx_fmt_ee (code2, BImode, tem, CONST0_RTX (BImode));
2113 /* Return nonzero iff C has exactly one bit set if it is interpreted
2114 as a 32 bit constant. */
2117 log2constp (unsigned HOST_WIDE_INT c)
2119 c &= 0xFFFFFFFF;
2120 return c != 0 && (c & (c-1)) == 0;
2123 /* Returns the number of consecutive least significant zeros in the binary
2124 representation of *V.
2125 We modify *V to contain the original value arithmetically shifted right by
2126 the number of zeroes. */
2128 static int
2129 shiftr_zero (HOST_WIDE_INT *v)
2131 unsigned HOST_WIDE_INT tmp = *v;
2132 unsigned HOST_WIDE_INT sgn;
2133 int n = 0;
2135 if (tmp == 0)
2136 return 0;
2138 sgn = tmp & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1));
2139 while ((tmp & 0x1) == 0 && n <= 32)
2141 tmp = (tmp >> 1) | sgn;
2142 n++;
2144 *v = tmp;
2145 return n;
2148 /* After reload, split the load of an immediate constant. OPERANDS are the
2149 operands of the movsi_insn pattern which we are splitting. We return
2150 nonzero if we emitted a sequence to load the constant, zero if we emitted
2151 nothing because we want to use the splitter's default sequence. */
2154 split_load_immediate (rtx operands[])
2156 HOST_WIDE_INT val = INTVAL (operands[1]);
2157 HOST_WIDE_INT tmp;
2158 HOST_WIDE_INT shifted = val;
2159 HOST_WIDE_INT shifted_compl = ~val;
2160 int num_zero = shiftr_zero (&shifted);
2161 int num_compl_zero = shiftr_zero (&shifted_compl);
2162 unsigned int regno = REGNO (operands[0]);
2163 enum reg_class class1 = REGNO_REG_CLASS (regno);
2165 /* This case takes care of single-bit set/clear constants, which we could
2166 also implement with BITSET/BITCLR. */
2167 if (num_zero
2168 && shifted >= -32768 && shifted < 65536
2169 && (D_REGNO_P (regno)
2170 || (regno >= REG_P0 && regno <= REG_P7 && num_zero <= 2)))
2172 emit_insn (gen_movsi (operands[0], GEN_INT (shifted)));
2173 emit_insn (gen_ashlsi3 (operands[0], operands[0], GEN_INT (num_zero)));
2174 return 1;
2177 tmp = val & 0xFFFF;
2178 tmp |= -(tmp & 0x8000);
2180 /* If high word has one bit set or clear, try to use a bit operation. */
2181 if (D_REGNO_P (regno))
2183 if (log2constp (val & 0xFFFF0000))
2185 emit_insn (gen_movsi (operands[0], GEN_INT (val & 0xFFFF)));
2186 emit_insn (gen_iorsi3 (operands[0], operands[0], GEN_INT (val & 0xFFFF0000)));
2187 return 1;
2189 else if (log2constp (val | 0xFFFF) && (val & 0x8000) != 0)
2191 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2192 emit_insn (gen_andsi3 (operands[0], operands[0], GEN_INT (val | 0xFFFF)));
2196 if (D_REGNO_P (regno))
2198 if (CONST_7BIT_IMM_P (tmp))
2200 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2201 emit_insn (gen_movstricthi_high (operands[0], GEN_INT (val & -65536)));
2202 return 1;
2205 if ((val & 0xFFFF0000) == 0)
2207 emit_insn (gen_movsi (operands[0], const0_rtx));
2208 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2209 return 1;
2212 if ((val & 0xFFFF0000) == 0xFFFF0000)
2214 emit_insn (gen_movsi (operands[0], constm1_rtx));
2215 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2216 return 1;
2220 /* Need DREGs for the remaining case. */
2221 if (regno > REG_R7)
2222 return 0;
2224 if (optimize_size
2225 && num_compl_zero && CONST_7BIT_IMM_P (shifted_compl))
2227 /* If optimizing for size, generate a sequence that has more instructions
2228 but is shorter. */
2229 emit_insn (gen_movsi (operands[0], GEN_INT (shifted_compl)));
2230 emit_insn (gen_ashlsi3 (operands[0], operands[0],
2231 GEN_INT (num_compl_zero)));
2232 emit_insn (gen_one_cmplsi2 (operands[0], operands[0]));
2233 return 1;
2235 return 0;
2238 /* Return true if the legitimate memory address for a memory operand of mode
2239 MODE. Return false if not. */
2241 static bool
2242 bfin_valid_add (enum machine_mode mode, HOST_WIDE_INT value)
2244 unsigned HOST_WIDE_INT v = value > 0 ? value : -value;
2245 int sz = GET_MODE_SIZE (mode);
2246 int shift = sz == 1 ? 0 : sz == 2 ? 1 : 2;
2247 /* The usual offsettable_memref machinery doesn't work so well for this
2248 port, so we deal with the problem here. */
2249 unsigned HOST_WIDE_INT mask = sz == 8 ? 0x7ffe : 0x7fff;
2250 return (v & ~(mask << shift)) == 0;
2253 static bool
2254 bfin_valid_reg_p (unsigned int regno, int strict, enum machine_mode mode,
2255 enum rtx_code outer_code)
2257 if (strict)
2258 return REGNO_OK_FOR_BASE_STRICT_P (regno, mode, outer_code, SCRATCH);
2259 else
2260 return REGNO_OK_FOR_BASE_NONSTRICT_P (regno, mode, outer_code, SCRATCH);
2263 bool
2264 bfin_legitimate_address_p (enum machine_mode mode, rtx x, int strict)
2266 switch (GET_CODE (x)) {
2267 case REG:
2268 if (bfin_valid_reg_p (REGNO (x), strict, mode, MEM))
2269 return true;
2270 break;
2271 case PLUS:
2272 if (REG_P (XEXP (x, 0))
2273 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PLUS)
2274 && ((GET_CODE (XEXP (x, 1)) == UNSPEC && mode == SImode)
2275 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2276 && bfin_valid_add (mode, INTVAL (XEXP (x, 1))))))
2277 return true;
2278 break;
2279 case POST_INC:
2280 case POST_DEC:
2281 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2282 && REG_P (XEXP (x, 0))
2283 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, POST_INC))
2284 return true;
2285 case PRE_DEC:
2286 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2287 && XEXP (x, 0) == stack_pointer_rtx
2288 && REG_P (XEXP (x, 0))
2289 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PRE_DEC))
2290 return true;
2291 break;
2292 default:
2293 break;
2295 return false;
2298 static bool
2299 bfin_rtx_costs (rtx x, int code, int outer_code, int *total)
2301 int cost2 = COSTS_N_INSNS (1);
2303 switch (code)
2305 case CONST_INT:
2306 if (outer_code == SET || outer_code == PLUS)
2307 *total = CONST_7BIT_IMM_P (INTVAL (x)) ? 0 : cost2;
2308 else if (outer_code == AND)
2309 *total = log2constp (~INTVAL (x)) ? 0 : cost2;
2310 else if (outer_code == LE || outer_code == LT || outer_code == EQ)
2311 *total = (INTVAL (x) >= -4 && INTVAL (x) <= 3) ? 0 : cost2;
2312 else if (outer_code == LEU || outer_code == LTU)
2313 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 7) ? 0 : cost2;
2314 else if (outer_code == MULT)
2315 *total = (INTVAL (x) == 2 || INTVAL (x) == 4) ? 0 : cost2;
2316 else if (outer_code == ASHIFT && (INTVAL (x) == 1 || INTVAL (x) == 2))
2317 *total = 0;
2318 else if (outer_code == ASHIFT || outer_code == ASHIFTRT
2319 || outer_code == LSHIFTRT)
2320 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 31) ? 0 : cost2;
2321 else if (outer_code == IOR || outer_code == XOR)
2322 *total = (INTVAL (x) & (INTVAL (x) - 1)) == 0 ? 0 : cost2;
2323 else
2324 *total = cost2;
2325 return true;
2327 case CONST:
2328 case LABEL_REF:
2329 case SYMBOL_REF:
2330 case CONST_DOUBLE:
2331 *total = COSTS_N_INSNS (2);
2332 return true;
2334 case PLUS:
2335 if (GET_MODE (x) == Pmode)
2337 if (GET_CODE (XEXP (x, 0)) == MULT
2338 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2340 HOST_WIDE_INT val = INTVAL (XEXP (XEXP (x, 0), 1));
2341 if (val == 2 || val == 4)
2343 *total = cost2;
2344 *total += rtx_cost (XEXP (XEXP (x, 0), 0), outer_code);
2345 *total += rtx_cost (XEXP (x, 1), outer_code);
2346 return true;
2351 /* fall through */
2353 case MINUS:
2354 case ASHIFT:
2355 case ASHIFTRT:
2356 case LSHIFTRT:
2357 if (GET_MODE (x) == DImode)
2358 *total = 6 * cost2;
2359 return false;
2361 case AND:
2362 case IOR:
2363 case XOR:
2364 if (GET_MODE (x) == DImode)
2365 *total = 2 * cost2;
2366 return false;
2368 case MULT:
2369 if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD)
2370 *total = COSTS_N_INSNS (3);
2371 return false;
2373 case VEC_CONCAT:
2374 case VEC_SELECT:
2375 if (outer_code == SET)
2376 *total = cost2;
2377 return true;
2379 default:
2380 return false;
2384 static void
2385 bfin_internal_label (FILE *stream, const char *prefix, unsigned long num)
2387 fprintf (stream, "%s%s$%ld:\n", LOCAL_LABEL_PREFIX, prefix, num);
2390 /* Used for communication between {push,pop}_multiple_operation (which
2391 we use not only as a predicate) and the corresponding output functions. */
2392 static int first_preg_to_save, first_dreg_to_save;
2395 push_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2397 int lastdreg = 8, lastpreg = 6;
2398 int i, group;
2400 first_preg_to_save = lastpreg;
2401 first_dreg_to_save = lastdreg;
2402 for (i = 1, group = 0; i < XVECLEN (op, 0) - 1; i++)
2404 rtx t = XVECEXP (op, 0, i);
2405 rtx src, dest;
2406 int regno;
2408 if (GET_CODE (t) != SET)
2409 return 0;
2411 src = SET_SRC (t);
2412 dest = SET_DEST (t);
2413 if (GET_CODE (dest) != MEM || ! REG_P (src))
2414 return 0;
2415 dest = XEXP (dest, 0);
2416 if (GET_CODE (dest) != PLUS
2417 || ! REG_P (XEXP (dest, 0))
2418 || REGNO (XEXP (dest, 0)) != REG_SP
2419 || GET_CODE (XEXP (dest, 1)) != CONST_INT
2420 || INTVAL (XEXP (dest, 1)) != -i * 4)
2421 return 0;
2423 regno = REGNO (src);
2424 if (group == 0)
2426 if (D_REGNO_P (regno))
2428 group = 1;
2429 first_dreg_to_save = lastdreg = regno - REG_R0;
2431 else if (regno >= REG_P0 && regno <= REG_P7)
2433 group = 2;
2434 first_preg_to_save = lastpreg = regno - REG_P0;
2436 else
2437 return 0;
2439 continue;
2442 if (group == 1)
2444 if (regno >= REG_P0 && regno <= REG_P7)
2446 group = 2;
2447 first_preg_to_save = lastpreg = regno - REG_P0;
2449 else if (regno != REG_R0 + lastdreg + 1)
2450 return 0;
2451 else
2452 lastdreg++;
2454 else if (group == 2)
2456 if (regno != REG_P0 + lastpreg + 1)
2457 return 0;
2458 lastpreg++;
2461 return 1;
2465 pop_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2467 int lastdreg = 8, lastpreg = 6;
2468 int i, group;
2470 for (i = 1, group = 0; i < XVECLEN (op, 0); i++)
2472 rtx t = XVECEXP (op, 0, i);
2473 rtx src, dest;
2474 int regno;
2476 if (GET_CODE (t) != SET)
2477 return 0;
2479 src = SET_SRC (t);
2480 dest = SET_DEST (t);
2481 if (GET_CODE (src) != MEM || ! REG_P (dest))
2482 return 0;
2483 src = XEXP (src, 0);
2485 if (i == 1)
2487 if (! REG_P (src) || REGNO (src) != REG_SP)
2488 return 0;
2490 else if (GET_CODE (src) != PLUS
2491 || ! REG_P (XEXP (src, 0))
2492 || REGNO (XEXP (src, 0)) != REG_SP
2493 || GET_CODE (XEXP (src, 1)) != CONST_INT
2494 || INTVAL (XEXP (src, 1)) != (i - 1) * 4)
2495 return 0;
2497 regno = REGNO (dest);
2498 if (group == 0)
2500 if (regno == REG_R7)
2502 group = 1;
2503 lastdreg = 7;
2505 else if (regno != REG_P0 + lastpreg - 1)
2506 return 0;
2507 else
2508 lastpreg--;
2510 else if (group == 1)
2512 if (regno != REG_R0 + lastdreg - 1)
2513 return 0;
2514 else
2515 lastdreg--;
2518 first_dreg_to_save = lastdreg;
2519 first_preg_to_save = lastpreg;
2520 return 1;
2523 /* Emit assembly code for one multi-register push described by INSN, with
2524 operands in OPERANDS. */
2526 void
2527 output_push_multiple (rtx insn, rtx *operands)
2529 char buf[80];
2530 int ok;
2532 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2533 ok = push_multiple_operation (PATTERN (insn), VOIDmode);
2534 gcc_assert (ok);
2536 if (first_dreg_to_save == 8)
2537 sprintf (buf, "[--sp] = ( p5:%d );\n", first_preg_to_save);
2538 else if (first_preg_to_save == 6)
2539 sprintf (buf, "[--sp] = ( r7:%d );\n", first_dreg_to_save);
2540 else
2541 sprintf (buf, "[--sp] = ( r7:%d, p5:%d );\n",
2542 first_dreg_to_save, first_preg_to_save);
2544 output_asm_insn (buf, operands);
2547 /* Emit assembly code for one multi-register pop described by INSN, with
2548 operands in OPERANDS. */
2550 void
2551 output_pop_multiple (rtx insn, rtx *operands)
2553 char buf[80];
2554 int ok;
2556 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2557 ok = pop_multiple_operation (PATTERN (insn), VOIDmode);
2558 gcc_assert (ok);
2560 if (first_dreg_to_save == 8)
2561 sprintf (buf, "( p5:%d ) = [sp++];\n", first_preg_to_save);
2562 else if (first_preg_to_save == 6)
2563 sprintf (buf, "( r7:%d ) = [sp++];\n", first_dreg_to_save);
2564 else
2565 sprintf (buf, "( r7:%d, p5:%d ) = [sp++];\n",
2566 first_dreg_to_save, first_preg_to_save);
2568 output_asm_insn (buf, operands);
2571 /* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
2573 static void
2574 single_move_for_movmem (rtx dst, rtx src, enum machine_mode mode, HOST_WIDE_INT offset)
2576 rtx scratch = gen_reg_rtx (mode);
2577 rtx srcmem, dstmem;
2579 srcmem = adjust_address_nv (src, mode, offset);
2580 dstmem = adjust_address_nv (dst, mode, offset);
2581 emit_move_insn (scratch, srcmem);
2582 emit_move_insn (dstmem, scratch);
2585 /* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
2586 alignment ALIGN_EXP. Return true if successful, false if we should fall
2587 back on a different method. */
2589 bool
2590 bfin_expand_movmem (rtx dst, rtx src, rtx count_exp, rtx align_exp)
2592 rtx srcreg, destreg, countreg;
2593 HOST_WIDE_INT align = 0;
2594 unsigned HOST_WIDE_INT count = 0;
2596 if (GET_CODE (align_exp) == CONST_INT)
2597 align = INTVAL (align_exp);
2598 if (GET_CODE (count_exp) == CONST_INT)
2600 count = INTVAL (count_exp);
2601 #if 0
2602 if (!TARGET_INLINE_ALL_STRINGOPS && count > 64)
2603 return false;
2604 #endif
2607 /* If optimizing for size, only do single copies inline. */
2608 if (optimize_size)
2610 if (count == 2 && align < 2)
2611 return false;
2612 if (count == 4 && align < 4)
2613 return false;
2614 if (count != 1 && count != 2 && count != 4)
2615 return false;
2617 if (align < 2 && count != 1)
2618 return false;
2620 destreg = copy_to_mode_reg (Pmode, XEXP (dst, 0));
2621 if (destreg != XEXP (dst, 0))
2622 dst = replace_equiv_address_nv (dst, destreg);
2623 srcreg = copy_to_mode_reg (Pmode, XEXP (src, 0));
2624 if (srcreg != XEXP (src, 0))
2625 src = replace_equiv_address_nv (src, srcreg);
2627 if (count != 0 && align >= 2)
2629 unsigned HOST_WIDE_INT offset = 0;
2631 if (align >= 4)
2633 if ((count & ~3) == 4)
2635 single_move_for_movmem (dst, src, SImode, offset);
2636 offset = 4;
2638 else if (count & ~3)
2640 HOST_WIDE_INT new_count = ((count >> 2) & 0x3fffffff) - 1;
2641 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
2643 emit_insn (gen_rep_movsi (destreg, srcreg, countreg, destreg, srcreg));
2645 if (count & 2)
2647 single_move_for_movmem (dst, src, HImode, offset);
2648 offset += 2;
2651 else
2653 if ((count & ~1) == 2)
2655 single_move_for_movmem (dst, src, HImode, offset);
2656 offset = 2;
2658 else if (count & ~1)
2660 HOST_WIDE_INT new_count = ((count >> 1) & 0x7fffffff) - 1;
2661 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
2663 emit_insn (gen_rep_movhi (destreg, srcreg, countreg, destreg, srcreg));
2666 if (count & 1)
2668 single_move_for_movmem (dst, src, QImode, offset);
2670 return true;
2672 return false;
2676 static int
2677 bfin_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
2679 enum attr_type insn_type, dep_insn_type;
2680 int dep_insn_code_number;
2682 /* Anti and output dependencies have zero cost. */
2683 if (REG_NOTE_KIND (link) != 0)
2684 return 0;
2686 dep_insn_code_number = recog_memoized (dep_insn);
2688 /* If we can't recognize the insns, we can't really do anything. */
2689 if (dep_insn_code_number < 0 || recog_memoized (insn) < 0)
2690 return cost;
2692 insn_type = get_attr_type (insn);
2693 dep_insn_type = get_attr_type (dep_insn);
2695 if (dep_insn_type == TYPE_MOVE || dep_insn_type == TYPE_MCLD)
2697 rtx pat = PATTERN (dep_insn);
2698 rtx dest = SET_DEST (pat);
2699 rtx src = SET_SRC (pat);
2700 if (! ADDRESS_REGNO_P (REGNO (dest)) || ! D_REGNO_P (REGNO (src)))
2701 return cost;
2702 return cost + (dep_insn_type == TYPE_MOVE ? 4 : 3);
2705 return cost;
2708 /* We use the machine specific reorg pass for emitting CSYNC instructions
2709 after conditional branches as needed.
2711 The Blackfin is unusual in that a code sequence like
2712 if cc jump label
2713 r0 = (p0)
2714 may speculatively perform the load even if the condition isn't true. This
2715 happens for a branch that is predicted not taken, because the pipeline
2716 isn't flushed or stalled, so the early stages of the following instructions,
2717 which perform the memory reference, are allowed to execute before the
2718 jump condition is evaluated.
2719 Therefore, we must insert additional instructions in all places where this
2720 could lead to incorrect behavior. The manual recommends CSYNC, while
2721 VDSP seems to use NOPs (even though its corresponding compiler option is
2722 named CSYNC).
2724 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
2725 When optimizing for size, we turn the branch into a predicted taken one.
2726 This may be slower due to mispredicts, but saves code size. */
2728 static void
2729 bfin_reorg (void)
2731 rtx insn, last_condjump = NULL_RTX;
2732 int cycles_since_jump = INT_MAX;
2734 if (! TARGET_SPECLD_ANOMALY || ! TARGET_CSYNC_ANOMALY)
2735 return;
2737 /* First pass: find predicted-false branches; if something after them
2738 needs nops, insert them or change the branch to predict true. */
2739 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2741 rtx pat;
2743 if (NOTE_P (insn) || BARRIER_P (insn) || LABEL_P (insn))
2744 continue;
2746 pat = PATTERN (insn);
2747 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
2748 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
2749 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
2750 continue;
2752 if (JUMP_P (insn))
2754 if (any_condjump_p (insn)
2755 && ! cbranch_predicted_taken_p (insn))
2757 last_condjump = insn;
2758 cycles_since_jump = 0;
2760 else
2761 cycles_since_jump = INT_MAX;
2763 else if (INSN_P (insn))
2765 enum attr_type type = get_attr_type (insn);
2766 int delay_needed = 0;
2767 if (cycles_since_jump < INT_MAX)
2768 cycles_since_jump++;
2770 if (type == TYPE_MCLD && TARGET_SPECLD_ANOMALY)
2772 rtx pat = single_set (insn);
2773 if (may_trap_p (SET_SRC (pat)))
2774 delay_needed = 3;
2776 else if (type == TYPE_SYNC && TARGET_CSYNC_ANOMALY)
2777 delay_needed = 4;
2779 if (delay_needed > cycles_since_jump)
2781 rtx pat;
2782 int num_clobbers;
2783 rtx *op = recog_data.operand;
2785 delay_needed -= cycles_since_jump;
2787 extract_insn (last_condjump);
2788 if (optimize_size)
2790 pat = gen_cbranch_predicted_taken (op[0], op[1], op[2],
2791 op[3]);
2792 cycles_since_jump = INT_MAX;
2794 else
2795 /* Do not adjust cycles_since_jump in this case, so that
2796 we'll increase the number of NOPs for a subsequent insn
2797 if necessary. */
2798 pat = gen_cbranch_with_nops (op[0], op[1], op[2], op[3],
2799 GEN_INT (delay_needed));
2800 PATTERN (last_condjump) = pat;
2801 INSN_CODE (last_condjump) = recog (pat, insn, &num_clobbers);
2805 /* Second pass: for predicted-true branches, see if anything at the
2806 branch destination needs extra nops. */
2807 if (! TARGET_CSYNC_ANOMALY)
2808 return;
2810 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2812 if (JUMP_P (insn)
2813 && any_condjump_p (insn)
2814 && (INSN_CODE (insn) == CODE_FOR_cbranch_predicted_taken
2815 || cbranch_predicted_taken_p (insn)))
2817 rtx target = JUMP_LABEL (insn);
2818 rtx label = target;
2819 cycles_since_jump = 0;
2820 for (; target && cycles_since_jump < 3; target = NEXT_INSN (target))
2822 rtx pat;
2824 if (NOTE_P (target) || BARRIER_P (target) || LABEL_P (target))
2825 continue;
2827 pat = PATTERN (target);
2828 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
2829 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
2830 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
2831 continue;
2833 if (INSN_P (target))
2835 enum attr_type type = get_attr_type (target);
2836 int delay_needed = 0;
2837 if (cycles_since_jump < INT_MAX)
2838 cycles_since_jump++;
2840 if (type == TYPE_SYNC && TARGET_CSYNC_ANOMALY)
2841 delay_needed = 2;
2843 if (delay_needed > cycles_since_jump)
2845 rtx prev = prev_real_insn (label);
2846 delay_needed -= cycles_since_jump;
2847 if (dump_file)
2848 fprintf (dump_file, "Adding %d nops after %d\n",
2849 delay_needed, INSN_UID (label));
2850 if (JUMP_P (prev)
2851 && INSN_CODE (prev) == CODE_FOR_cbranch_with_nops)
2853 rtx x;
2854 HOST_WIDE_INT v;
2856 if (dump_file)
2857 fprintf (dump_file,
2858 "Reducing nops on insn %d.\n",
2859 INSN_UID (prev));
2860 x = PATTERN (prev);
2861 x = XVECEXP (x, 0, 1);
2862 v = INTVAL (XVECEXP (x, 0, 0)) - delay_needed;
2863 XVECEXP (x, 0, 0) = GEN_INT (v);
2865 while (delay_needed-- > 0)
2866 emit_insn_after (gen_nop (), label);
2867 break;
2875 /* Handle interrupt_handler, exception_handler and nmi_handler function
2876 attributes; arguments as in struct attribute_spec.handler. */
2878 static tree
2879 handle_int_attribute (tree *node, tree name,
2880 tree args ATTRIBUTE_UNUSED,
2881 int flags ATTRIBUTE_UNUSED,
2882 bool *no_add_attrs)
2884 tree x = *node;
2885 if (TREE_CODE (x) == FUNCTION_DECL)
2886 x = TREE_TYPE (x);
2888 if (TREE_CODE (x) != FUNCTION_TYPE)
2890 warning (OPT_Wattributes, "%qs attribute only applies to functions",
2891 IDENTIFIER_POINTER (name));
2892 *no_add_attrs = true;
2894 else if (funkind (x) != SUBROUTINE)
2895 error ("multiple function type attributes specified");
2897 return NULL_TREE;
2900 /* Return 0 if the attributes for two types are incompatible, 1 if they
2901 are compatible, and 2 if they are nearly compatible (which causes a
2902 warning to be generated). */
2904 static int
2905 bfin_comp_type_attributes (tree type1, tree type2)
2907 e_funkind kind1, kind2;
2909 if (TREE_CODE (type1) != FUNCTION_TYPE)
2910 return 1;
2912 kind1 = funkind (type1);
2913 kind2 = funkind (type2);
2915 if (kind1 != kind2)
2916 return 0;
2918 /* Check for mismatched modifiers */
2919 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1))
2920 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2)))
2921 return 0;
2923 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1))
2924 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2)))
2925 return 0;
2927 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1))
2928 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2)))
2929 return 0;
2931 if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1))
2932 != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2)))
2933 return 0;
2935 return 1;
2938 /* Handle a "longcall" or "shortcall" attribute; arguments as in
2939 struct attribute_spec.handler. */
2941 static tree
2942 bfin_handle_longcall_attribute (tree *node, tree name,
2943 tree args ATTRIBUTE_UNUSED,
2944 int flags ATTRIBUTE_UNUSED,
2945 bool *no_add_attrs)
2947 if (TREE_CODE (*node) != FUNCTION_TYPE
2948 && TREE_CODE (*node) != FIELD_DECL
2949 && TREE_CODE (*node) != TYPE_DECL)
2951 warning (OPT_Wattributes, "`%s' attribute only applies to functions",
2952 IDENTIFIER_POINTER (name));
2953 *no_add_attrs = true;
2956 if ((strcmp (IDENTIFIER_POINTER (name), "longcall") == 0
2957 && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node)))
2958 || (strcmp (IDENTIFIER_POINTER (name), "shortcall") == 0
2959 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node))))
2961 warning (OPT_Wattributes,
2962 "can't apply both longcall and shortcall attributes to the same function");
2963 *no_add_attrs = true;
2966 return NULL_TREE;
2969 /* Table of valid machine attributes. */
2970 const struct attribute_spec bfin_attribute_table[] =
2972 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2973 { "interrupt_handler", 0, 0, false, true, true, handle_int_attribute },
2974 { "exception_handler", 0, 0, false, true, true, handle_int_attribute },
2975 { "nmi_handler", 0, 0, false, true, true, handle_int_attribute },
2976 { "nesting", 0, 0, false, true, true, NULL },
2977 { "kspisusp", 0, 0, false, true, true, NULL },
2978 { "saveall", 0, 0, false, true, true, NULL },
2979 { "longcall", 0, 0, false, true, true, bfin_handle_longcall_attribute },
2980 { "shortcall", 0, 0, false, true, true, bfin_handle_longcall_attribute },
2981 { NULL, 0, 0, false, false, false, NULL }
2984 /* Implementation of TARGET_ASM_INTEGER. When using FD-PIC, we need to
2985 tell the assembler to generate pointers to function descriptors in
2986 some cases. */
2988 static bool
2989 bfin_assemble_integer (rtx value, unsigned int size, int aligned_p)
2991 if (TARGET_FDPIC && size == UNITS_PER_WORD)
2993 if (GET_CODE (value) == SYMBOL_REF
2994 && SYMBOL_REF_FUNCTION_P (value))
2996 fputs ("\t.picptr\tfuncdesc(", asm_out_file);
2997 output_addr_const (asm_out_file, value);
2998 fputs (")\n", asm_out_file);
2999 return true;
3001 if (!aligned_p)
3003 /* We've set the unaligned SI op to NULL, so we always have to
3004 handle the unaligned case here. */
3005 assemble_integer_with_op ("\t.4byte\t", value);
3006 return true;
3009 return default_assemble_integer (value, size, aligned_p);
3012 /* Output the assembler code for a thunk function. THUNK_DECL is the
3013 declaration for the thunk function itself, FUNCTION is the decl for
3014 the target function. DELTA is an immediate constant offset to be
3015 added to THIS. If VCALL_OFFSET is nonzero, the word at
3016 *(*this + vcall_offset) should be added to THIS. */
3018 static void
3019 bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED,
3020 tree thunk ATTRIBUTE_UNUSED, HOST_WIDE_INT delta,
3021 HOST_WIDE_INT vcall_offset, tree function)
3023 rtx xops[3];
3024 /* The this parameter is passed as the first argument. */
3025 rtx this = gen_rtx_REG (Pmode, REG_R0);
3027 /* Adjust the this parameter by a fixed constant. */
3028 if (delta)
3030 xops[1] = this;
3031 if (delta >= -64 && delta <= 63)
3033 xops[0] = GEN_INT (delta);
3034 output_asm_insn ("%1 += %0;", xops);
3036 else if (delta >= -128 && delta < -64)
3038 xops[0] = GEN_INT (delta + 64);
3039 output_asm_insn ("%1 += -64; %1 += %0;", xops);
3041 else if (delta > 63 && delta <= 126)
3043 xops[0] = GEN_INT (delta - 63);
3044 output_asm_insn ("%1 += 63; %1 += %0;", xops);
3046 else
3048 xops[0] = GEN_INT (delta);
3049 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops);
3053 /* Adjust the this parameter by a value stored in the vtable. */
3054 if (vcall_offset)
3056 rtx p2tmp = gen_rtx_REG (Pmode, REG_P2);
3057 rtx tmp = gen_rtx_REG (Pmode, REG_R2);
3059 xops[1] = tmp;
3060 xops[2] = p2tmp;
3061 output_asm_insn ("%2 = r0; %2 = [%2];", xops);
3063 /* Adjust the this parameter. */
3064 xops[0] = gen_rtx_MEM (Pmode, plus_constant (p2tmp, vcall_offset));
3065 if (!memory_operand (xops[0], Pmode))
3067 rtx tmp2 = gen_rtx_REG (Pmode, REG_P1);
3068 xops[0] = GEN_INT (vcall_offset);
3069 xops[1] = tmp2;
3070 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops);
3071 xops[0] = gen_rtx_MEM (Pmode, p2tmp);
3073 xops[2] = this;
3074 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops);
3077 xops[0] = XEXP (DECL_RTL (function), 0);
3078 if (1 || !flag_pic || (*targetm.binds_local_p) (function))
3079 output_asm_insn ("jump.l\t%P0", xops);
3082 /* Codes for all the Blackfin builtins. */
3083 enum bfin_builtins
3085 BFIN_BUILTIN_CSYNC,
3086 BFIN_BUILTIN_SSYNC,
3087 BFIN_BUILTIN_COMPOSE_2X16,
3088 BFIN_BUILTIN_EXTRACTLO,
3089 BFIN_BUILTIN_EXTRACTHI,
3091 BFIN_BUILTIN_SSADD_2X16,
3092 BFIN_BUILTIN_SSSUB_2X16,
3093 BFIN_BUILTIN_SSADDSUB_2X16,
3094 BFIN_BUILTIN_SSSUBADD_2X16,
3095 BFIN_BUILTIN_MULT_2X16,
3096 BFIN_BUILTIN_MULTR_2X16,
3097 BFIN_BUILTIN_NEG_2X16,
3098 BFIN_BUILTIN_ABS_2X16,
3099 BFIN_BUILTIN_MIN_2X16,
3100 BFIN_BUILTIN_MAX_2X16,
3102 BFIN_BUILTIN_SSADD_1X16,
3103 BFIN_BUILTIN_SSSUB_1X16,
3104 BFIN_BUILTIN_MULT_1X16,
3105 BFIN_BUILTIN_MULTR_1X16,
3106 BFIN_BUILTIN_NORM_1X16,
3107 BFIN_BUILTIN_NEG_1X16,
3108 BFIN_BUILTIN_ABS_1X16,
3109 BFIN_BUILTIN_MIN_1X16,
3110 BFIN_BUILTIN_MAX_1X16,
3112 BFIN_BUILTIN_DIFFHL_2X16,
3113 BFIN_BUILTIN_DIFFLH_2X16,
3115 BFIN_BUILTIN_SSADD_1X32,
3116 BFIN_BUILTIN_SSSUB_1X32,
3117 BFIN_BUILTIN_NORM_1X32,
3118 BFIN_BUILTIN_NEG_1X32,
3119 BFIN_BUILTIN_MIN_1X32,
3120 BFIN_BUILTIN_MAX_1X32,
3121 BFIN_BUILTIN_MULT_1X32,
3123 BFIN_BUILTIN_MULHISILL,
3124 BFIN_BUILTIN_MULHISILH,
3125 BFIN_BUILTIN_MULHISIHL,
3126 BFIN_BUILTIN_MULHISIHH,
3128 BFIN_BUILTIN_LSHIFT_1X16,
3129 BFIN_BUILTIN_LSHIFT_2X16,
3130 BFIN_BUILTIN_SSASHIFT_1X16,
3131 BFIN_BUILTIN_SSASHIFT_2X16,
3133 BFIN_BUILTIN_CPLX_MUL_16,
3134 BFIN_BUILTIN_CPLX_MAC_16,
3135 BFIN_BUILTIN_CPLX_MSU_16,
3137 BFIN_BUILTIN_MAX
3140 #define def_builtin(NAME, TYPE, CODE) \
3141 do { \
3142 lang_hooks.builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
3143 NULL, NULL_TREE); \
3144 } while (0)
3146 /* Set up all builtin functions for this target. */
3147 static void
3148 bfin_init_builtins (void)
3150 tree V2HI_type_node = build_vector_type_for_mode (intHI_type_node, V2HImode);
3151 tree void_ftype_void
3152 = build_function_type (void_type_node, void_list_node);
3153 tree short_ftype_short
3154 = build_function_type_list (short_integer_type_node, short_integer_type_node,
3155 NULL_TREE);
3156 tree short_ftype_int_int
3157 = build_function_type_list (short_integer_type_node, integer_type_node,
3158 integer_type_node, NULL_TREE);
3159 tree int_ftype_int_int
3160 = build_function_type_list (integer_type_node, integer_type_node,
3161 integer_type_node, NULL_TREE);
3162 tree int_ftype_int
3163 = build_function_type_list (integer_type_node, integer_type_node,
3164 NULL_TREE);
3165 tree short_ftype_int
3166 = build_function_type_list (short_integer_type_node, integer_type_node,
3167 NULL_TREE);
3168 tree int_ftype_v2hi_v2hi
3169 = build_function_type_list (integer_type_node, V2HI_type_node,
3170 V2HI_type_node, NULL_TREE);
3171 tree v2hi_ftype_v2hi_v2hi
3172 = build_function_type_list (V2HI_type_node, V2HI_type_node,
3173 V2HI_type_node, NULL_TREE);
3174 tree v2hi_ftype_v2hi_v2hi_v2hi
3175 = build_function_type_list (V2HI_type_node, V2HI_type_node,
3176 V2HI_type_node, V2HI_type_node, NULL_TREE);
3177 tree v2hi_ftype_int_int
3178 = build_function_type_list (V2HI_type_node, integer_type_node,
3179 integer_type_node, NULL_TREE);
3180 tree v2hi_ftype_v2hi_int
3181 = build_function_type_list (V2HI_type_node, V2HI_type_node,
3182 integer_type_node, NULL_TREE);
3183 tree int_ftype_short_short
3184 = build_function_type_list (integer_type_node, short_integer_type_node,
3185 short_integer_type_node, NULL_TREE);
3186 tree v2hi_ftype_v2hi
3187 = build_function_type_list (V2HI_type_node, V2HI_type_node, NULL_TREE);
3188 tree short_ftype_v2hi
3189 = build_function_type_list (short_integer_type_node, V2HI_type_node,
3190 NULL_TREE);
3192 /* Add the remaining MMX insns with somewhat more complicated types. */
3193 def_builtin ("__builtin_bfin_csync", void_ftype_void, BFIN_BUILTIN_CSYNC);
3194 def_builtin ("__builtin_bfin_ssync", void_ftype_void, BFIN_BUILTIN_SSYNC);
3196 def_builtin ("__builtin_bfin_compose_2x16", v2hi_ftype_int_int,
3197 BFIN_BUILTIN_COMPOSE_2X16);
3198 def_builtin ("__builtin_bfin_extract_hi", short_ftype_v2hi,
3199 BFIN_BUILTIN_EXTRACTHI);
3200 def_builtin ("__builtin_bfin_extract_lo", short_ftype_v2hi,
3201 BFIN_BUILTIN_EXTRACTLO);
3203 def_builtin ("__builtin_bfin_min_fr2x16", v2hi_ftype_v2hi_v2hi,
3204 BFIN_BUILTIN_MIN_2X16);
3205 def_builtin ("__builtin_bfin_max_fr2x16", v2hi_ftype_v2hi_v2hi,
3206 BFIN_BUILTIN_MAX_2X16);
3208 def_builtin ("__builtin_bfin_add_fr2x16", v2hi_ftype_v2hi_v2hi,
3209 BFIN_BUILTIN_SSADD_2X16);
3210 def_builtin ("__builtin_bfin_sub_fr2x16", v2hi_ftype_v2hi_v2hi,
3211 BFIN_BUILTIN_SSSUB_2X16);
3212 def_builtin ("__builtin_bfin_dspaddsubsat", v2hi_ftype_v2hi_v2hi,
3213 BFIN_BUILTIN_SSADDSUB_2X16);
3214 def_builtin ("__builtin_bfin_dspsubaddsat", v2hi_ftype_v2hi_v2hi,
3215 BFIN_BUILTIN_SSSUBADD_2X16);
3216 def_builtin ("__builtin_bfin_mult_fr2x16", v2hi_ftype_v2hi_v2hi,
3217 BFIN_BUILTIN_MULT_2X16);
3218 def_builtin ("__builtin_bfin_multr_fr2x16", v2hi_ftype_v2hi_v2hi,
3219 BFIN_BUILTIN_MULTR_2X16);
3220 def_builtin ("__builtin_bfin_negate_fr2x16", v2hi_ftype_v2hi,
3221 BFIN_BUILTIN_NEG_2X16);
3222 def_builtin ("__builtin_bfin_abs_fr2x16", v2hi_ftype_v2hi,
3223 BFIN_BUILTIN_ABS_2X16);
3225 def_builtin ("__builtin_bfin_add_fr1x16", short_ftype_int_int,
3226 BFIN_BUILTIN_SSADD_1X16);
3227 def_builtin ("__builtin_bfin_sub_fr1x16", short_ftype_int_int,
3228 BFIN_BUILTIN_SSSUB_1X16);
3229 def_builtin ("__builtin_bfin_mult_fr1x16", short_ftype_int_int,
3230 BFIN_BUILTIN_MULT_1X16);
3231 def_builtin ("__builtin_bfin_multr_fr1x16", short_ftype_int_int,
3232 BFIN_BUILTIN_MULTR_1X16);
3233 def_builtin ("__builtin_bfin_negate_fr1x16", short_ftype_short,
3234 BFIN_BUILTIN_NEG_1X16);
3235 def_builtin ("__builtin_bfin_abs_fr1x16", short_ftype_short,
3236 BFIN_BUILTIN_ABS_1X16);
3237 def_builtin ("__builtin_bfin_norm_fr1x16", short_ftype_int,
3238 BFIN_BUILTIN_NORM_1X16);
3240 def_builtin ("__builtin_bfin_diff_hl_fr2x16", short_ftype_v2hi,
3241 BFIN_BUILTIN_DIFFHL_2X16);
3242 def_builtin ("__builtin_bfin_diff_lh_fr2x16", short_ftype_v2hi,
3243 BFIN_BUILTIN_DIFFLH_2X16);
3245 def_builtin ("__builtin_bfin_mulhisill", int_ftype_v2hi_v2hi,
3246 BFIN_BUILTIN_MULHISILL);
3247 def_builtin ("__builtin_bfin_mulhisihl", int_ftype_v2hi_v2hi,
3248 BFIN_BUILTIN_MULHISIHL);
3249 def_builtin ("__builtin_bfin_mulhisilh", int_ftype_v2hi_v2hi,
3250 BFIN_BUILTIN_MULHISILH);
3251 def_builtin ("__builtin_bfin_mulhisihh", int_ftype_v2hi_v2hi,
3252 BFIN_BUILTIN_MULHISIHH);
3254 def_builtin ("__builtin_bfin_add_fr1x32", int_ftype_int_int,
3255 BFIN_BUILTIN_SSADD_1X32);
3256 def_builtin ("__builtin_bfin_sub_fr1x32", int_ftype_int_int,
3257 BFIN_BUILTIN_SSSUB_1X32);
3258 def_builtin ("__builtin_bfin_negate_fr1x32", int_ftype_int,
3259 BFIN_BUILTIN_NEG_1X32);
3260 def_builtin ("__builtin_bfin_norm_fr1x32", short_ftype_int,
3261 BFIN_BUILTIN_NORM_1X32);
3262 def_builtin ("__builtin_bfin_mult_fr1x32", int_ftype_short_short,
3263 BFIN_BUILTIN_MULT_1X32);
3265 /* Shifts. */
3266 def_builtin ("__builtin_bfin_shl_fr1x16", short_ftype_int_int,
3267 BFIN_BUILTIN_SSASHIFT_1X16);
3268 def_builtin ("__builtin_bfin_shl_fr2x16", v2hi_ftype_v2hi_int,
3269 BFIN_BUILTIN_SSASHIFT_2X16);
3270 def_builtin ("__builtin_bfin_lshl_fr1x16", short_ftype_int_int,
3271 BFIN_BUILTIN_LSHIFT_1X16);
3272 def_builtin ("__builtin_bfin_lshl_fr2x16", v2hi_ftype_v2hi_int,
3273 BFIN_BUILTIN_LSHIFT_2X16);
3275 /* Complex numbers. */
3276 def_builtin ("__builtin_bfin_cmplx_mul", v2hi_ftype_v2hi_v2hi,
3277 BFIN_BUILTIN_CPLX_MUL_16);
3278 def_builtin ("__builtin_bfin_cmplx_mac", v2hi_ftype_v2hi_v2hi_v2hi,
3279 BFIN_BUILTIN_CPLX_MAC_16);
3280 def_builtin ("__builtin_bfin_cmplx_msu", v2hi_ftype_v2hi_v2hi_v2hi,
3281 BFIN_BUILTIN_CPLX_MSU_16);
3285 struct builtin_description
3287 const enum insn_code icode;
3288 const char *const name;
3289 const enum bfin_builtins code;
3290 int macflag;
3293 static const struct builtin_description bdesc_2arg[] =
3295 { CODE_FOR_composev2hi, "__builtin_bfin_compose_2x16", BFIN_BUILTIN_COMPOSE_2X16, -1 },
3297 { CODE_FOR_ssashiftv2hi3, "__builtin_bfin_shl_fr2x16", BFIN_BUILTIN_SSASHIFT_2X16, -1 },
3298 { CODE_FOR_ssashifthi3, "__builtin_bfin_shl_fr1x16", BFIN_BUILTIN_SSASHIFT_1X16, -1 },
3299 { CODE_FOR_lshiftv2hi3, "__builtin_bfin_lshl_fr2x16", BFIN_BUILTIN_LSHIFT_2X16, -1 },
3300 { CODE_FOR_lshifthi3, "__builtin_bfin_lshl_fr1x16", BFIN_BUILTIN_LSHIFT_1X16, -1 },
3302 { CODE_FOR_sminhi3, "__builtin_bfin_min_fr1x16", BFIN_BUILTIN_MIN_1X16, -1 },
3303 { CODE_FOR_smaxhi3, "__builtin_bfin_max_fr1x16", BFIN_BUILTIN_MAX_1X16, -1 },
3304 { CODE_FOR_ssaddhi3, "__builtin_bfin_add_fr1x16", BFIN_BUILTIN_SSADD_1X16, -1 },
3305 { CODE_FOR_sssubhi3, "__builtin_bfin_sub_fr1x16", BFIN_BUILTIN_SSSUB_1X16, -1 },
3307 { CODE_FOR_sminsi3, "__builtin_bfin_min_fr1x32", BFIN_BUILTIN_MIN_1X32, -1 },
3308 { CODE_FOR_smaxsi3, "__builtin_bfin_max_fr1x32", BFIN_BUILTIN_MAX_1X32, -1 },
3309 { CODE_FOR_ssaddsi3, "__builtin_bfin_add_fr1x32", BFIN_BUILTIN_SSADD_1X32, -1 },
3310 { CODE_FOR_sssubsi3, "__builtin_bfin_sub_fr1x32", BFIN_BUILTIN_SSSUB_1X32, -1 },
3312 { CODE_FOR_sminv2hi3, "__builtin_bfin_min_fr2x16", BFIN_BUILTIN_MIN_2X16, -1 },
3313 { CODE_FOR_smaxv2hi3, "__builtin_bfin_max_fr2x16", BFIN_BUILTIN_MAX_2X16, -1 },
3314 { CODE_FOR_ssaddv2hi3, "__builtin_bfin_add_fr2x16", BFIN_BUILTIN_SSADD_2X16, -1 },
3315 { CODE_FOR_sssubv2hi3, "__builtin_bfin_sub_fr2x16", BFIN_BUILTIN_SSSUB_2X16, -1 },
3316 { CODE_FOR_ssaddsubv2hi3, "__builtin_bfin_dspaddsubsat", BFIN_BUILTIN_SSADDSUB_2X16, -1 },
3317 { CODE_FOR_sssubaddv2hi3, "__builtin_bfin_dspsubaddsat", BFIN_BUILTIN_SSSUBADD_2X16, -1 },
3319 { CODE_FOR_flag_mulhisi, "__builtin_bfin_mult_fr1x32", BFIN_BUILTIN_MULT_1X32, MACFLAG_NONE },
3320 { CODE_FOR_flag_mulhi, "__builtin_bfin_mult_fr1x16", BFIN_BUILTIN_MULT_1X16, MACFLAG_T },
3321 { CODE_FOR_flag_mulhi, "__builtin_bfin_multr_fr1x16", BFIN_BUILTIN_MULTR_1X16, MACFLAG_NONE },
3322 { CODE_FOR_flag_mulv2hi, "__builtin_bfin_mult_fr2x16", BFIN_BUILTIN_MULT_2X16, MACFLAG_T },
3323 { CODE_FOR_flag_mulv2hi, "__builtin_bfin_multr_fr2x16", BFIN_BUILTIN_MULTR_2X16, MACFLAG_NONE }
3326 static const struct builtin_description bdesc_1arg[] =
3328 { CODE_FOR_signbitshi2, "__builtin_bfin_norm_fr1x16", BFIN_BUILTIN_NORM_1X16, 0 },
3329 { CODE_FOR_ssneghi2, "__builtin_bfin_negate_fr1x16", BFIN_BUILTIN_NEG_1X16, 0 },
3330 { CODE_FOR_abshi2, "__builtin_bfin_abs_fr1x16", BFIN_BUILTIN_ABS_1X16, 0 },
3332 { CODE_FOR_signbitssi2, "__builtin_bfin_norm_fr1x32", BFIN_BUILTIN_NORM_1X32, 0 },
3333 { CODE_FOR_ssnegsi2, "__builtin_bfin_negate_fr1x32", BFIN_BUILTIN_NEG_1X32, 0 },
3335 { CODE_FOR_movv2hi_hi_low, "__builtin_bfin_extract_lo", BFIN_BUILTIN_EXTRACTLO, 0 },
3336 { CODE_FOR_movv2hi_hi_high, "__builtin_bfin_extract_hi", BFIN_BUILTIN_EXTRACTHI, 0 },
3337 { CODE_FOR_ssnegv2hi2, "__builtin_bfin_negate_fr2x16", BFIN_BUILTIN_NEG_2X16, 0 },
3338 { CODE_FOR_absv2hi2, "__builtin_bfin_abs_fr2x16", BFIN_BUILTIN_ABS_2X16, 0 }
3341 /* Errors in the source file can cause expand_expr to return const0_rtx
3342 where we expect a vector. To avoid crashing, use one of the vector
3343 clear instructions. */
3344 static rtx
3345 safe_vector_operand (rtx x, enum machine_mode mode)
3347 if (x != const0_rtx)
3348 return x;
3349 x = gen_reg_rtx (SImode);
3351 emit_insn (gen_movsi (x, CONST0_RTX (SImode)));
3352 return gen_lowpart (mode, x);
3355 /* Subroutine of bfin_expand_builtin to take care of binop insns. MACFLAG is -1
3356 if this is a normal binary op, or one of the MACFLAG_xxx constants. */
3358 static rtx
3359 bfin_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target,
3360 int macflag)
3362 rtx pat;
3363 tree arg0 = TREE_VALUE (arglist);
3364 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3365 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3366 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3367 enum machine_mode op0mode = GET_MODE (op0);
3368 enum machine_mode op1mode = GET_MODE (op1);
3369 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3370 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3371 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3373 if (VECTOR_MODE_P (mode0))
3374 op0 = safe_vector_operand (op0, mode0);
3375 if (VECTOR_MODE_P (mode1))
3376 op1 = safe_vector_operand (op1, mode1);
3378 if (! target
3379 || GET_MODE (target) != tmode
3380 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3381 target = gen_reg_rtx (tmode);
3383 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
3385 op0mode = HImode;
3386 op0 = gen_lowpart (HImode, op0);
3388 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
3390 op1mode = HImode;
3391 op1 = gen_lowpart (HImode, op1);
3393 /* In case the insn wants input operands in modes different from
3394 the result, abort. */
3395 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
3396 && (op1mode == mode1 || op1mode == VOIDmode));
3398 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3399 op0 = copy_to_mode_reg (mode0, op0);
3400 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3401 op1 = copy_to_mode_reg (mode1, op1);
3403 if (macflag == -1)
3404 pat = GEN_FCN (icode) (target, op0, op1);
3405 else
3406 pat = GEN_FCN (icode) (target, op0, op1, GEN_INT (macflag));
3407 if (! pat)
3408 return 0;
3410 emit_insn (pat);
3411 return target;
3414 /* Subroutine of bfin_expand_builtin to take care of unop insns. */
3416 static rtx
3417 bfin_expand_unop_builtin (enum insn_code icode, tree arglist,
3418 rtx target)
3420 rtx pat;
3421 tree arg0 = TREE_VALUE (arglist);
3422 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3423 enum machine_mode op0mode = GET_MODE (op0);
3424 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3425 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3427 if (! target
3428 || GET_MODE (target) != tmode
3429 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3430 target = gen_reg_rtx (tmode);
3432 if (VECTOR_MODE_P (mode0))
3433 op0 = safe_vector_operand (op0, mode0);
3435 if (op0mode == SImode && mode0 == HImode)
3437 op0mode = HImode;
3438 op0 = gen_lowpart (HImode, op0);
3440 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
3442 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3443 op0 = copy_to_mode_reg (mode0, op0);
3445 pat = GEN_FCN (icode) (target, op0);
3446 if (! pat)
3447 return 0;
3448 emit_insn (pat);
3449 return target;
3452 /* Expand an expression EXP that calls a built-in function,
3453 with result going to TARGET if that's convenient
3454 (and in mode MODE if that's convenient).
3455 SUBTARGET may be used as the target for computing one of EXP's operands.
3456 IGNORE is nonzero if the value is to be ignored. */
3458 static rtx
3459 bfin_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
3460 rtx subtarget ATTRIBUTE_UNUSED,
3461 enum machine_mode mode ATTRIBUTE_UNUSED,
3462 int ignore ATTRIBUTE_UNUSED)
3464 size_t i;
3465 enum insn_code icode;
3466 const struct builtin_description *d;
3467 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3468 tree arglist = TREE_OPERAND (exp, 1);
3469 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3470 tree arg0, arg1, arg2;
3471 rtx op0, op1, op2, accvec, pat, tmp1, tmp2;
3472 enum machine_mode tmode, mode0;
3474 switch (fcode)
3476 case BFIN_BUILTIN_CSYNC:
3477 emit_insn (gen_csync ());
3478 return 0;
3479 case BFIN_BUILTIN_SSYNC:
3480 emit_insn (gen_ssync ());
3481 return 0;
3483 case BFIN_BUILTIN_DIFFHL_2X16:
3484 case BFIN_BUILTIN_DIFFLH_2X16:
3485 arg0 = TREE_VALUE (arglist);
3486 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3487 icode = (fcode == BFIN_BUILTIN_DIFFHL_2X16
3488 ? CODE_FOR_subhilov2hi3 : CODE_FOR_sublohiv2hi3);
3489 tmode = insn_data[icode].operand[0].mode;
3490 mode0 = insn_data[icode].operand[1].mode;
3492 if (! target
3493 || GET_MODE (target) != tmode
3494 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3495 target = gen_reg_rtx (tmode);
3497 if (VECTOR_MODE_P (mode0))
3498 op0 = safe_vector_operand (op0, mode0);
3500 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3501 op0 = copy_to_mode_reg (mode0, op0);
3503 pat = GEN_FCN (icode) (target, op0, op0);
3504 if (! pat)
3505 return 0;
3506 emit_insn (pat);
3507 return target;
3509 case BFIN_BUILTIN_CPLX_MUL_16:
3510 arg0 = TREE_VALUE (arglist);
3511 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3512 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3513 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3514 accvec = gen_reg_rtx (V2PDImode);
3516 if (! target
3517 || GET_MODE (target) != V2HImode
3518 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
3519 target = gen_reg_rtx (tmode);
3520 if (! register_operand (op0, GET_MODE (op0)))
3521 op0 = copy_to_mode_reg (GET_MODE (op0), op0);
3522 if (! register_operand (op1, GET_MODE (op1)))
3523 op1 = copy_to_mode_reg (GET_MODE (op1), op1);
3525 emit_insn (gen_flag_macinit1v2hi_parts (accvec, op0, op1, const0_rtx,
3526 const0_rtx, const0_rtx,
3527 const1_rtx, GEN_INT (MACFLAG_NONE)));
3528 emit_insn (gen_flag_macv2hi_parts (target, op0, op1, const1_rtx,
3529 const1_rtx, const1_rtx,
3530 const0_rtx, accvec, const1_rtx, const0_rtx,
3531 GEN_INT (MACFLAG_NONE), accvec));
3533 return target;
3535 case BFIN_BUILTIN_CPLX_MAC_16:
3536 case BFIN_BUILTIN_CPLX_MSU_16:
3537 arg0 = TREE_VALUE (arglist);
3538 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3539 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3540 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3541 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3542 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3543 accvec = gen_reg_rtx (V2PDImode);
3545 if (! target
3546 || GET_MODE (target) != V2HImode
3547 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
3548 target = gen_reg_rtx (tmode);
3549 if (! register_operand (op0, GET_MODE (op0)))
3550 op0 = copy_to_mode_reg (GET_MODE (op0), op0);
3551 if (! register_operand (op1, GET_MODE (op1)))
3552 op1 = copy_to_mode_reg (GET_MODE (op1), op1);
3554 tmp1 = gen_reg_rtx (SImode);
3555 tmp2 = gen_reg_rtx (SImode);
3556 emit_insn (gen_ashlsi3 (tmp1, gen_lowpart (SImode, op2), GEN_INT (16)));
3557 emit_move_insn (tmp2, gen_lowpart (SImode, op2));
3558 emit_insn (gen_movstricthi_1 (gen_lowpart (HImode, tmp2), const0_rtx));
3559 emit_insn (gen_load_accumulator_pair (accvec, tmp1, tmp2));
3560 emit_insn (gen_flag_macv2hi_parts_acconly (accvec, op0, op1, const0_rtx,
3561 const0_rtx, const0_rtx,
3562 const1_rtx, accvec, const0_rtx,
3563 const0_rtx,
3564 GEN_INT (MACFLAG_W32)));
3565 tmp1 = (fcode == BFIN_BUILTIN_CPLX_MAC_16 ? const1_rtx : const0_rtx);
3566 tmp2 = (fcode == BFIN_BUILTIN_CPLX_MAC_16 ? const0_rtx : const1_rtx);
3567 emit_insn (gen_flag_macv2hi_parts (target, op0, op1, const1_rtx,
3568 const1_rtx, const1_rtx,
3569 const0_rtx, accvec, tmp1, tmp2,
3570 GEN_INT (MACFLAG_NONE), accvec));
3572 return target;
3574 default:
3575 break;
3578 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
3579 if (d->code == fcode)
3580 return bfin_expand_binop_builtin (d->icode, arglist, target,
3581 d->macflag);
3583 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
3584 if (d->code == fcode)
3585 return bfin_expand_unop_builtin (d->icode, arglist, target);
3587 gcc_unreachable ();
3590 #undef TARGET_INIT_BUILTINS
3591 #define TARGET_INIT_BUILTINS bfin_init_builtins
3593 #undef TARGET_EXPAND_BUILTIN
3594 #define TARGET_EXPAND_BUILTIN bfin_expand_builtin
3596 #undef TARGET_ASM_GLOBALIZE_LABEL
3597 #define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
3599 #undef TARGET_ASM_FILE_START
3600 #define TARGET_ASM_FILE_START output_file_start
3602 #undef TARGET_ATTRIBUTE_TABLE
3603 #define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
3605 #undef TARGET_COMP_TYPE_ATTRIBUTES
3606 #define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
3608 #undef TARGET_RTX_COSTS
3609 #define TARGET_RTX_COSTS bfin_rtx_costs
3611 #undef TARGET_ADDRESS_COST
3612 #define TARGET_ADDRESS_COST bfin_address_cost
3614 #undef TARGET_ASM_INTERNAL_LABEL
3615 #define TARGET_ASM_INTERNAL_LABEL bfin_internal_label
3617 #undef TARGET_ASM_INTEGER
3618 #define TARGET_ASM_INTEGER bfin_assemble_integer
3620 #undef TARGET_MACHINE_DEPENDENT_REORG
3621 #define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
3623 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
3624 #define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
3626 #undef TARGET_ASM_OUTPUT_MI_THUNK
3627 #define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
3628 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
3629 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
3631 #undef TARGET_SCHED_ADJUST_COST
3632 #define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
3634 #undef TARGET_PROMOTE_PROTOTYPES
3635 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
3636 #undef TARGET_PROMOTE_FUNCTION_ARGS
3637 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
3638 #undef TARGET_PROMOTE_FUNCTION_RETURN
3639 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
3641 #undef TARGET_ARG_PARTIAL_BYTES
3642 #define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
3644 #undef TARGET_PASS_BY_REFERENCE
3645 #define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
3647 #undef TARGET_SETUP_INCOMING_VARARGS
3648 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
3650 #undef TARGET_STRUCT_VALUE_RTX
3651 #define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
3653 #undef TARGET_VECTOR_MODE_SUPPORTED_P
3654 #define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
3656 #undef TARGET_HANDLE_OPTION
3657 #define TARGET_HANDLE_OPTION bfin_handle_option
3659 #undef TARGET_DEFAULT_TARGET_FLAGS
3660 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
3662 #undef TARGET_SECONDARY_RELOAD
3663 #define TARGET_SECONDARY_RELOAD bfin_secondary_reload
3665 #undef TARGET_DELEGITIMIZE_ADDRESS
3666 #define TARGET_DELEGITIMIZE_ADDRESS bfin_delegitimize_address
3668 struct gcc_target targetm = TARGET_INITIALIZER;