Remove old autovect-branch by moving to "dead" directory.
[official-gcc.git] / old-autovect-branch / gcc / config / bfin / bfin.c
blobb60716d9f97d9aec04b80044b362731181fdf5eb
1 /* The Blackfin code generation auxiliary output file.
2 Copyright (C) 2005 Free Software Foundation, Inc.
3 Contributed by Analog Devices.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 2, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "insn-codes.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "output.h"
35 #include "insn-attr.h"
36 #include "tree.h"
37 #include "flags.h"
38 #include "except.h"
39 #include "function.h"
40 #include "input.h"
41 #include "target.h"
42 #include "target-def.h"
43 #include "expr.h"
44 #include "toplev.h"
45 #include "recog.h"
46 #include "ggc.h"
47 #include "integrate.h"
48 #include "cgraph.h"
49 #include "langhooks.h"
50 #include "bfin-protos.h"
51 #include "tm-preds.h"
52 #include "gt-bfin.h"
54 /* Test and compare insns in bfin.md store the information needed to
55 generate branch and scc insns here. */
56 rtx bfin_compare_op0, bfin_compare_op1;
58 /* RTX for condition code flag register and RETS register */
59 extern GTY(()) rtx bfin_cc_rtx;
60 extern GTY(()) rtx bfin_rets_rtx;
61 rtx bfin_cc_rtx, bfin_rets_rtx;
63 int max_arg_registers = 0;
65 /* Arrays used when emitting register names. */
66 const char *short_reg_names[] = SHORT_REGISTER_NAMES;
67 const char *high_reg_names[] = HIGH_REGISTER_NAMES;
68 const char *dregs_pair_names[] = DREGS_PAIR_NAMES;
69 const char *byte_reg_names[] = BYTE_REGISTER_NAMES;
71 static int arg_regs[] = FUNCTION_ARG_REGISTERS;
73 /* Nonzero if -mshared-library-id was given. */
74 static int bfin_lib_id_given;
76 static void
77 bfin_globalize_label (FILE *stream, const char *name)
79 fputs (".global ", stream);
80 assemble_name (stream, name);
81 fputc (';',stream);
82 fputc ('\n',stream);
85 static void
86 output_file_start (void)
88 FILE *file = asm_out_file;
89 int i;
91 fprintf (file, ".file \"%s\";\n", input_filename);
93 for (i = 0; arg_regs[i] >= 0; i++)
95 max_arg_registers = i; /* how many arg reg used */
98 /* Called early in the compilation to conditionally modify
99 fixed_regs/call_used_regs. */
101 void
102 conditional_register_usage (void)
104 /* initialize condition code flag register rtx */
105 bfin_cc_rtx = gen_rtx_REG (BImode, REG_CC);
106 bfin_rets_rtx = gen_rtx_REG (Pmode, REG_RETS);
109 /* Examine machine-dependent attributes of function type FUNTYPE and return its
110 type. See the definition of E_FUNKIND. */
112 static e_funkind funkind (tree funtype)
114 tree attrs = TYPE_ATTRIBUTES (funtype);
115 if (lookup_attribute ("interrupt_handler", attrs))
116 return INTERRUPT_HANDLER;
117 else if (lookup_attribute ("exception_handler", attrs))
118 return EXCPT_HANDLER;
119 else if (lookup_attribute ("nmi_handler", attrs))
120 return NMI_HANDLER;
121 else
122 return SUBROUTINE;
125 /* Legitimize PIC addresses. If the address is already position-independent,
126 we return ORIG. Newly generated position-independent addresses go into a
127 reg. This is REG if nonzero, otherwise we allocate register(s) as
128 necessary. PICREG is the register holding the pointer to the PIC offset
129 table. */
132 legitimize_pic_address (rtx orig, rtx reg, rtx picreg)
134 rtx addr = orig;
135 rtx new = orig;
137 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
139 if (GET_CODE (addr) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (addr))
140 reg = new = orig;
141 else
143 if (reg == 0)
145 gcc_assert (!no_new_pseudos);
146 reg = gen_reg_rtx (Pmode);
149 if (flag_pic == 2)
151 emit_insn (gen_movsi_high_pic (reg, addr));
152 emit_insn (gen_movsi_low_pic (reg, reg, addr));
153 emit_insn (gen_addsi3 (reg, reg, picreg));
154 new = gen_const_mem (Pmode, reg);
156 else
158 rtx tmp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
159 UNSPEC_MOVE_PIC);
160 new = gen_const_mem (Pmode,
161 gen_rtx_PLUS (Pmode, picreg, tmp));
163 emit_move_insn (reg, new);
165 if (picreg == pic_offset_table_rtx)
166 current_function_uses_pic_offset_table = 1;
167 return reg;
170 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
172 rtx base;
174 if (GET_CODE (addr) == CONST)
176 addr = XEXP (addr, 0);
177 gcc_assert (GET_CODE (addr) == PLUS);
180 if (XEXP (addr, 0) == picreg)
181 return orig;
183 if (reg == 0)
185 gcc_assert (!no_new_pseudos);
186 reg = gen_reg_rtx (Pmode);
189 base = legitimize_pic_address (XEXP (addr, 0), reg, picreg);
190 addr = legitimize_pic_address (XEXP (addr, 1),
191 base == reg ? NULL_RTX : reg,
192 picreg);
194 if (GET_CODE (addr) == CONST_INT)
196 gcc_assert (! reload_in_progress && ! reload_completed);
197 addr = force_reg (Pmode, addr);
200 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
202 base = gen_rtx_PLUS (Pmode, base, XEXP (addr, 0));
203 addr = XEXP (addr, 1);
206 return gen_rtx_PLUS (Pmode, base, addr);
209 return new;
212 /* Stack frame layout. */
214 /* Compute the number of DREGS to save with a push_multiple operation.
215 This could include registers that aren't modified in the function,
216 since push_multiple only takes a range of registers.
217 If IS_INTHANDLER, then everything that is live must be saved, even
218 if normally call-clobbered. */
220 static int
221 n_dregs_to_save (bool is_inthandler)
223 unsigned i;
225 for (i = REG_R0; i <= REG_R7; i++)
227 if (regs_ever_live[i] && (is_inthandler || ! call_used_regs[i]))
228 return REG_R7 - i + 1;
230 if (current_function_calls_eh_return)
232 unsigned j;
233 for (j = 0; ; j++)
235 unsigned test = EH_RETURN_DATA_REGNO (j);
236 if (test == INVALID_REGNUM)
237 break;
238 if (test == i)
239 return REG_R7 - i + 1;
244 return 0;
247 /* Like n_dregs_to_save, but compute number of PREGS to save. */
249 static int
250 n_pregs_to_save (bool is_inthandler)
252 unsigned i;
254 for (i = REG_P0; i <= REG_P5; i++)
255 if ((regs_ever_live[i] && (is_inthandler || ! call_used_regs[i]))
256 || (i == PIC_OFFSET_TABLE_REGNUM
257 && (current_function_uses_pic_offset_table
258 || (TARGET_ID_SHARED_LIBRARY && ! current_function_is_leaf))))
259 return REG_P5 - i + 1;
260 return 0;
263 /* Determine if we are going to save the frame pointer in the prologue. */
265 static bool
266 must_save_fp_p (void)
268 return frame_pointer_needed || regs_ever_live[REG_FP];
271 static bool
272 stack_frame_needed_p (void)
274 /* EH return puts a new return address into the frame using an
275 address relative to the frame pointer. */
276 if (current_function_calls_eh_return)
277 return true;
278 return frame_pointer_needed;
281 /* Emit code to save registers in the prologue. SAVEALL is nonzero if we
282 must save all registers; this is used for interrupt handlers.
283 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
284 this for an interrupt (or exception) handler. */
286 static void
287 expand_prologue_reg_save (rtx spreg, int saveall, bool is_inthandler)
289 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler);
290 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler);
291 int dregno = REG_R7 + 1 - ndregs;
292 int pregno = REG_P5 + 1 - npregs;
293 int total = ndregs + npregs;
294 int i;
295 rtx pat, insn, val;
297 if (total == 0)
298 return;
300 val = GEN_INT (-total * 4);
301 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 2));
302 XVECEXP (pat, 0, 0) = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, val),
303 UNSPEC_PUSH_MULTIPLE);
304 XVECEXP (pat, 0, total + 1) = gen_rtx_SET (VOIDmode, spreg,
305 gen_rtx_PLUS (Pmode, spreg,
306 val));
307 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, total + 1)) = 1;
308 for (i = 0; i < total; i++)
310 rtx memref = gen_rtx_MEM (word_mode,
311 gen_rtx_PLUS (Pmode, spreg,
312 GEN_INT (- i * 4 - 4)));
313 rtx subpat;
314 if (ndregs > 0)
316 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
317 dregno++));
318 ndregs--;
320 else
322 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
323 pregno++));
324 npregs++;
326 XVECEXP (pat, 0, i + 1) = subpat;
327 RTX_FRAME_RELATED_P (subpat) = 1;
329 insn = emit_insn (pat);
330 RTX_FRAME_RELATED_P (insn) = 1;
333 /* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
334 must save all registers; this is used for interrupt handlers.
335 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
336 this for an interrupt (or exception) handler. */
338 static void
339 expand_epilogue_reg_restore (rtx spreg, bool saveall, bool is_inthandler)
341 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler);
342 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler);
343 int total = ndregs + npregs;
344 int i, regno;
345 rtx pat, insn;
347 if (total == 0)
348 return;
350 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 1));
351 XVECEXP (pat, 0, 0) = gen_rtx_SET (VOIDmode, spreg,
352 gen_rtx_PLUS (Pmode, spreg,
353 GEN_INT (total * 4)));
355 if (npregs > 0)
356 regno = REG_P5 + 1;
357 else
358 regno = REG_R7 + 1;
360 for (i = 0; i < total; i++)
362 rtx addr = (i > 0
363 ? gen_rtx_PLUS (Pmode, spreg, GEN_INT (i * 4))
364 : spreg);
365 rtx memref = gen_rtx_MEM (word_mode, addr);
367 regno--;
368 XVECEXP (pat, 0, i + 1)
369 = gen_rtx_SET (VOIDmode, gen_rtx_REG (word_mode, regno), memref);
371 if (npregs > 0)
373 if (--npregs == 0)
374 regno = REG_R7 + 1;
378 insn = emit_insn (pat);
379 RTX_FRAME_RELATED_P (insn) = 1;
382 /* Perform any needed actions needed for a function that is receiving a
383 variable number of arguments.
385 CUM is as above.
387 MODE and TYPE are the mode and type of the current parameter.
389 PRETEND_SIZE is a variable that should be set to the amount of stack
390 that must be pushed by the prolog to pretend that our caller pushed
393 Normally, this macro will push all remaining incoming registers on the
394 stack and set PRETEND_SIZE to the length of the registers pushed.
396 Blackfin specific :
397 - VDSP C compiler manual (our ABI) says that a variable args function
398 should save the R0, R1 and R2 registers in the stack.
399 - The caller will always leave space on the stack for the
400 arguments that are passed in registers, so we dont have
401 to leave any extra space.
402 - now, the vastart pointer can access all arguments from the stack. */
404 static void
405 setup_incoming_varargs (CUMULATIVE_ARGS *cum,
406 enum machine_mode mode ATTRIBUTE_UNUSED,
407 tree type ATTRIBUTE_UNUSED, int *pretend_size,
408 int no_rtl)
410 rtx mem;
411 int i;
413 if (no_rtl)
414 return;
416 /* The move for named arguments will be generated automatically by the
417 compiler. We need to generate the move rtx for the unnamed arguments
418 if they are in the first 3 words. We assume at least 1 named argument
419 exists, so we never generate [ARGP] = R0 here. */
421 for (i = cum->words + 1; i < max_arg_registers; i++)
423 mem = gen_rtx_MEM (Pmode,
424 plus_constant (arg_pointer_rtx, (i * UNITS_PER_WORD)));
425 emit_move_insn (mem, gen_rtx_REG (Pmode, i));
428 *pretend_size = 0;
431 /* Value should be nonzero if functions must have frame pointers.
432 Zero means the frame pointer need not be set up (and parms may
433 be accessed via the stack pointer) in functions that seem suitable. */
436 bfin_frame_pointer_required (void)
438 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
440 if (fkind != SUBROUTINE)
441 return 1;
443 /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
444 so we have to override it for non-leaf functions. */
445 if (TARGET_OMIT_LEAF_FRAME_POINTER && ! current_function_is_leaf)
446 return 1;
448 return 0;
451 /* Return the number of registers pushed during the prologue. */
453 static int
454 n_regs_saved_by_prologue (void)
456 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
457 bool is_inthandler = fkind != SUBROUTINE;
458 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
459 bool all = (lookup_attribute ("saveall", attrs) != NULL_TREE
460 || (is_inthandler && !current_function_is_leaf));
461 int ndregs = all ? 8 : n_dregs_to_save (is_inthandler);
462 int npregs = all ? 6 : n_pregs_to_save (is_inthandler);
463 int n = ndregs + npregs;
465 if (all || stack_frame_needed_p ())
466 /* We use a LINK instruction in this case. */
467 n += 2;
468 else
470 if (must_save_fp_p ())
471 n++;
472 if (! current_function_is_leaf)
473 n++;
476 if (fkind != SUBROUTINE)
478 int i;
480 /* Increment once for ASTAT. */
481 n++;
483 /* RETE/X/N. */
484 if (lookup_attribute ("nesting", attrs))
485 n++;
487 for (i = REG_P7 + 1; i < REG_CC; i++)
488 if (all
489 || regs_ever_live[i]
490 || (!leaf_function_p () && call_used_regs[i]))
491 n += i == REG_A0 || i == REG_A1 ? 2 : 1;
493 return n;
496 /* Return the offset between two registers, one to be eliminated, and the other
497 its replacement, at the start of a routine. */
499 HOST_WIDE_INT
500 bfin_initial_elimination_offset (int from, int to)
502 HOST_WIDE_INT offset = 0;
504 if (from == ARG_POINTER_REGNUM)
505 offset = n_regs_saved_by_prologue () * 4;
507 if (to == STACK_POINTER_REGNUM)
509 if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
510 offset += current_function_outgoing_args_size;
511 else if (current_function_outgoing_args_size)
512 offset += FIXED_STACK_AREA;
514 offset += get_frame_size ();
517 return offset;
520 /* Emit code to load a constant CONSTANT into register REG; setting
521 RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
522 Make sure that the insns we generate need not be split. */
524 static void
525 frame_related_constant_load (rtx reg, HOST_WIDE_INT constant, bool related)
527 rtx insn;
528 rtx cst = GEN_INT (constant);
530 if (constant >= -32768 && constant < 65536)
531 insn = emit_move_insn (reg, cst);
532 else
534 /* We don't call split_load_immediate here, since dwarf2out.c can get
535 confused about some of the more clever sequences it can generate. */
536 insn = emit_insn (gen_movsi_high (reg, cst));
537 if (related)
538 RTX_FRAME_RELATED_P (insn) = 1;
539 insn = emit_insn (gen_movsi_low (reg, reg, cst));
541 if (related)
542 RTX_FRAME_RELATED_P (insn) = 1;
545 /* Generate efficient code to add a value to the frame pointer. We
546 can use P1 as a scratch register. Set RTX_FRAME_RELATED_P on the
547 generated insns if FRAME is nonzero. */
549 static void
550 add_to_sp (rtx spreg, HOST_WIDE_INT value, int frame)
552 if (value == 0)
553 return;
555 /* Choose whether to use a sequence using a temporary register, or
556 a sequence with multiple adds. We can add a signed 7 bit value
557 in one instruction. */
558 if (value > 120 || value < -120)
560 rtx tmpreg = gen_rtx_REG (SImode, REG_P1);
561 rtx insn;
563 if (frame)
564 frame_related_constant_load (tmpreg, value, TRUE);
565 else
567 insn = emit_move_insn (tmpreg, GEN_INT (value));
568 if (frame)
569 RTX_FRAME_RELATED_P (insn) = 1;
572 insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
573 if (frame)
574 RTX_FRAME_RELATED_P (insn) = 1;
576 else
579 int size = value;
580 rtx insn;
582 if (size > 60)
583 size = 60;
584 else if (size < -60)
585 /* We could use -62, but that would leave the stack unaligned, so
586 it's no good. */
587 size = -60;
589 insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (size)));
590 if (frame)
591 RTX_FRAME_RELATED_P (insn) = 1;
592 value -= size;
594 while (value != 0);
597 /* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
598 is too large, generate a sequence of insns that has the same effect.
599 SPREG contains (reg:SI REG_SP). */
601 static void
602 emit_link_insn (rtx spreg, HOST_WIDE_INT frame_size)
604 HOST_WIDE_INT link_size = frame_size;
605 rtx insn;
606 int i;
608 if (link_size > 262140)
609 link_size = 262140;
611 /* Use a LINK insn with as big a constant as possible, then subtract
612 any remaining size from the SP. */
613 insn = emit_insn (gen_link (GEN_INT (-8 - link_size)));
614 RTX_FRAME_RELATED_P (insn) = 1;
616 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
618 rtx set = XVECEXP (PATTERN (insn), 0, i);
619 gcc_assert (GET_CODE (set) == SET);
620 RTX_FRAME_RELATED_P (set) = 1;
623 frame_size -= link_size;
625 if (frame_size > 0)
627 /* Must use a call-clobbered PREG that isn't the static chain. */
628 rtx tmpreg = gen_rtx_REG (Pmode, REG_P1);
630 frame_related_constant_load (tmpreg, -frame_size, TRUE);
631 insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
632 RTX_FRAME_RELATED_P (insn) = 1;
636 /* Return the number of bytes we must reserve for outgoing arguments
637 in the current function's stack frame. */
639 static HOST_WIDE_INT
640 arg_area_size (void)
642 if (current_function_outgoing_args_size)
644 if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
645 return current_function_outgoing_args_size;
646 else
647 return FIXED_STACK_AREA;
649 return 0;
652 /* Save RETS and FP, and allocate a stack frame. ALL is true if the
653 function must save all its registers (true only for certain interrupt
654 handlers). */
656 static void
657 do_link (rtx spreg, HOST_WIDE_INT frame_size, bool all)
659 frame_size += arg_area_size ();
661 if (all || stack_frame_needed_p ()
662 || (must_save_fp_p () && ! current_function_is_leaf))
663 emit_link_insn (spreg, frame_size);
664 else
666 if (! current_function_is_leaf)
668 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
669 gen_rtx_PRE_DEC (Pmode, spreg)),
670 bfin_rets_rtx);
671 rtx insn = emit_insn (pat);
672 RTX_FRAME_RELATED_P (insn) = 1;
674 if (must_save_fp_p ())
676 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
677 gen_rtx_PRE_DEC (Pmode, spreg)),
678 gen_rtx_REG (Pmode, REG_FP));
679 rtx insn = emit_insn (pat);
680 RTX_FRAME_RELATED_P (insn) = 1;
682 add_to_sp (spreg, -frame_size, 1);
686 /* Like do_link, but used for epilogues to deallocate the stack frame. */
688 static void
689 do_unlink (rtx spreg, HOST_WIDE_INT frame_size, bool all)
691 frame_size += arg_area_size ();
693 if (all || stack_frame_needed_p ())
694 emit_insn (gen_unlink ());
695 else
697 rtx postinc = gen_rtx_MEM (Pmode, gen_rtx_POST_INC (Pmode, spreg));
699 add_to_sp (spreg, frame_size, 0);
700 if (must_save_fp_p ())
702 rtx fpreg = gen_rtx_REG (Pmode, REG_FP);
703 emit_move_insn (fpreg, postinc);
704 emit_insn (gen_rtx_USE (VOIDmode, fpreg));
706 if (! current_function_is_leaf)
708 emit_move_insn (bfin_rets_rtx, postinc);
709 emit_insn (gen_rtx_USE (VOIDmode, bfin_rets_rtx));
714 /* Generate a prologue suitable for a function of kind FKIND. This is
715 called for interrupt and exception handler prologues.
716 SPREG contains (reg:SI REG_SP). */
718 static void
719 expand_interrupt_handler_prologue (rtx spreg, e_funkind fkind)
721 int i;
722 HOST_WIDE_INT frame_size = get_frame_size ();
723 rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
724 rtx predec = gen_rtx_MEM (SImode, predec1);
725 rtx insn;
726 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
727 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
728 tree kspisusp = lookup_attribute ("kspisusp", attrs);
730 if (kspisusp)
732 insn = emit_move_insn (spreg, gen_rtx_REG (Pmode, REG_USP));
733 RTX_FRAME_RELATED_P (insn) = 1;
736 /* We need space on the stack in case we need to save the argument
737 registers. */
738 if (fkind == EXCPT_HANDLER)
740 insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (-12)));
741 RTX_FRAME_RELATED_P (insn) = 1;
744 insn = emit_move_insn (predec, gen_rtx_REG (SImode, REG_ASTAT));
745 RTX_FRAME_RELATED_P (insn) = 1;
747 /* If we're calling other functions, they won't save their call-clobbered
748 registers, so we must save everything here. */
749 if (!current_function_is_leaf)
750 all = true;
751 expand_prologue_reg_save (spreg, all, true);
753 for (i = REG_P7 + 1; i < REG_CC; i++)
754 if (all
755 || regs_ever_live[i]
756 || (!leaf_function_p () && call_used_regs[i]))
758 if (i == REG_A0 || i == REG_A1)
759 insn = emit_move_insn (gen_rtx_MEM (PDImode, predec1),
760 gen_rtx_REG (PDImode, i));
761 else
762 insn = emit_move_insn (predec, gen_rtx_REG (SImode, i));
763 RTX_FRAME_RELATED_P (insn) = 1;
766 if (lookup_attribute ("nesting", attrs))
768 rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
769 : fkind == NMI_HANDLER ? REG_RETN
770 : REG_RETI));
771 insn = emit_move_insn (predec, srcreg);
772 RTX_FRAME_RELATED_P (insn) = 1;
775 do_link (spreg, frame_size, all);
777 if (fkind == EXCPT_HANDLER)
779 rtx r0reg = gen_rtx_REG (SImode, REG_R0);
780 rtx r1reg = gen_rtx_REG (SImode, REG_R1);
781 rtx r2reg = gen_rtx_REG (SImode, REG_R2);
782 rtx insn;
784 insn = emit_move_insn (r0reg, gen_rtx_REG (SImode, REG_SEQSTAT));
785 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
786 NULL_RTX);
787 insn = emit_insn (gen_ashrsi3 (r0reg, r0reg, GEN_INT (26)));
788 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
789 NULL_RTX);
790 insn = emit_insn (gen_ashlsi3 (r0reg, r0reg, GEN_INT (26)));
791 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
792 NULL_RTX);
793 insn = emit_move_insn (r1reg, spreg);
794 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
795 NULL_RTX);
796 insn = emit_move_insn (r2reg, gen_rtx_REG (Pmode, REG_FP));
797 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
798 NULL_RTX);
799 insn = emit_insn (gen_addsi3 (r2reg, r2reg, GEN_INT (8)));
800 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
801 NULL_RTX);
805 /* Generate an epilogue suitable for a function of kind FKIND. This is
806 called for interrupt and exception handler epilogues.
807 SPREG contains (reg:SI REG_SP). */
809 static void
810 expand_interrupt_handler_epilogue (rtx spreg, e_funkind fkind)
812 int i;
813 rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
814 rtx postinc = gen_rtx_MEM (SImode, postinc1);
815 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
816 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
818 /* A slightly crude technique to stop flow from trying to delete "dead"
819 insns. */
820 MEM_VOLATILE_P (postinc) = 1;
822 do_unlink (spreg, get_frame_size (), all);
824 if (lookup_attribute ("nesting", attrs))
826 rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
827 : fkind == NMI_HANDLER ? REG_RETN
828 : REG_RETI));
829 emit_move_insn (srcreg, postinc);
832 /* If we're calling other functions, they won't save their call-clobbered
833 registers, so we must save (and restore) everything here. */
834 if (!current_function_is_leaf)
835 all = true;
837 for (i = REG_CC - 1; i > REG_P7; i--)
838 if (all
839 || regs_ever_live[i]
840 || (!leaf_function_p () && call_used_regs[i]))
842 if (i == REG_A0 || i == REG_A1)
844 rtx mem = gen_rtx_MEM (PDImode, postinc1);
845 MEM_VOLATILE_P (mem) = 1;
846 emit_move_insn (gen_rtx_REG (PDImode, i), mem);
848 else
849 emit_move_insn (gen_rtx_REG (SImode, i), postinc);
852 expand_epilogue_reg_restore (spreg, all, true);
854 emit_move_insn (gen_rtx_REG (SImode, REG_ASTAT), postinc);
856 /* Deallocate any space we left on the stack in case we needed to save the
857 argument registers. */
858 if (fkind == EXCPT_HANDLER)
859 emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (12)));
861 emit_jump_insn (gen_return_internal (GEN_INT (fkind)));
864 /* Used while emitting the prologue to generate code to load the correct value
865 into the PIC register, which is passed in DEST. */
867 static rtx
868 bfin_load_pic_reg (rtx dest)
870 struct cgraph_local_info *i = NULL;
871 rtx addr, insn;
873 if (flag_unit_at_a_time)
874 i = cgraph_local_info (current_function_decl);
876 /* Functions local to the translation unit don't need to reload the
877 pic reg, since the caller always passes a usable one. */
878 if (i && i->local)
879 return pic_offset_table_rtx;
881 if (bfin_lib_id_given)
882 addr = plus_constant (pic_offset_table_rtx, -4 - bfin_library_id * 4);
883 else
884 addr = gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
885 gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
886 UNSPEC_LIBRARY_OFFSET));
887 insn = emit_insn (gen_movsi (dest, gen_rtx_MEM (Pmode, addr)));
888 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
889 return dest;
892 /* Generate RTL for the prologue of the current function. */
894 void
895 bfin_expand_prologue (void)
897 rtx insn;
898 HOST_WIDE_INT frame_size = get_frame_size ();
899 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
900 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
901 rtx pic_reg_loaded = NULL_RTX;
903 if (fkind != SUBROUTINE)
905 expand_interrupt_handler_prologue (spreg, fkind);
906 return;
909 if (current_function_limit_stack)
911 HOST_WIDE_INT offset
912 = bfin_initial_elimination_offset (ARG_POINTER_REGNUM,
913 STACK_POINTER_REGNUM);
914 rtx lim = stack_limit_rtx;
916 if (GET_CODE (lim) == SYMBOL_REF)
918 rtx p2reg = gen_rtx_REG (Pmode, REG_P2);
919 if (TARGET_ID_SHARED_LIBRARY)
921 rtx p1reg = gen_rtx_REG (Pmode, REG_P1);
922 rtx val;
923 pic_reg_loaded = bfin_load_pic_reg (p2reg);
924 val = legitimize_pic_address (stack_limit_rtx, p1reg,
925 pic_reg_loaded);
926 emit_move_insn (p1reg, val);
927 frame_related_constant_load (p2reg, offset, FALSE);
928 emit_insn (gen_addsi3 (p2reg, p2reg, p1reg));
929 lim = p2reg;
931 else
933 rtx limit = plus_constant (stack_limit_rtx, offset);
934 emit_move_insn (p2reg, limit);
935 lim = p2reg;
938 emit_insn (gen_compare_lt (bfin_cc_rtx, spreg, lim));
939 emit_insn (gen_trapifcc ());
941 expand_prologue_reg_save (spreg, 0, false);
943 do_link (spreg, frame_size, false);
945 if (TARGET_ID_SHARED_LIBRARY
946 && (current_function_uses_pic_offset_table
947 || !current_function_is_leaf))
948 bfin_load_pic_reg (pic_offset_table_rtx);
951 /* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
952 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
953 eh_return pattern. */
955 void
956 bfin_expand_epilogue (int need_return, int eh_return)
958 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
959 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
961 if (fkind != SUBROUTINE)
963 expand_interrupt_handler_epilogue (spreg, fkind);
964 return;
967 do_unlink (spreg, get_frame_size (), false);
969 expand_epilogue_reg_restore (spreg, false, false);
971 /* Omit the return insn if this is for a sibcall. */
972 if (! need_return)
973 return;
975 if (eh_return)
976 emit_insn (gen_addsi3 (spreg, spreg, gen_rtx_REG (Pmode, REG_P2)));
978 emit_jump_insn (gen_return_internal (GEN_INT (SUBROUTINE)));
981 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
984 bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
985 unsigned int new_reg)
987 /* Interrupt functions can only use registers that have already been
988 saved by the prologue, even if they would normally be
989 call-clobbered. */
991 if (funkind (TREE_TYPE (current_function_decl)) != SUBROUTINE
992 && !regs_ever_live[new_reg])
993 return 0;
995 return 1;
998 /* Return the value of the return address for the frame COUNT steps up
999 from the current frame, after the prologue.
1000 We punt for everything but the current frame by returning const0_rtx. */
1003 bfin_return_addr_rtx (int count)
1005 if (count != 0)
1006 return const0_rtx;
1008 return get_hard_reg_initial_val (Pmode, REG_RETS);
1011 /* Try machine-dependent ways of modifying an illegitimate address X
1012 to be legitimate. If we find one, return the new, valid address,
1013 otherwise return NULL_RTX.
1015 OLDX is the address as it was before break_out_memory_refs was called.
1016 In some cases it is useful to look at this to decide what needs to be done.
1018 MODE is the mode of the memory reference. */
1021 legitimize_address (rtx x ATTRIBUTE_UNUSED, rtx oldx ATTRIBUTE_UNUSED,
1022 enum machine_mode mode ATTRIBUTE_UNUSED)
1024 return NULL_RTX;
1027 /* This predicate is used to compute the length of a load/store insn.
1028 OP is a MEM rtx, we return nonzero if its addressing mode requires a
1029 32 bit instruction. */
1032 effective_address_32bit_p (rtx op, enum machine_mode mode)
1034 HOST_WIDE_INT offset;
1036 mode = GET_MODE (op);
1037 op = XEXP (op, 0);
1039 if (GET_CODE (op) != PLUS)
1041 gcc_assert (REG_P (op) || GET_CODE (op) == POST_INC
1042 || GET_CODE (op) == PRE_DEC || GET_CODE (op) == POST_DEC);
1043 return 0;
1046 offset = INTVAL (XEXP (op, 1));
1048 /* All byte loads use a 16 bit offset. */
1049 if (GET_MODE_SIZE (mode) == 1)
1050 return 1;
1052 if (GET_MODE_SIZE (mode) == 4)
1054 /* Frame pointer relative loads can use a negative offset, all others
1055 are restricted to a small positive one. */
1056 if (XEXP (op, 0) == frame_pointer_rtx)
1057 return offset < -128 || offset > 60;
1058 return offset < 0 || offset > 60;
1061 /* Must be HImode now. */
1062 return offset < 0 || offset > 30;
1065 /* Return cost of the memory address ADDR.
1066 All addressing modes are equally cheap on the Blackfin. */
1068 static int
1069 bfin_address_cost (rtx addr ATTRIBUTE_UNUSED)
1071 return 1;
1074 /* Subroutine of print_operand; used to print a memory reference X to FILE. */
1076 void
1077 print_address_operand (FILE *file, rtx x)
1079 switch (GET_CODE (x))
1081 case PLUS:
1082 output_address (XEXP (x, 0));
1083 fprintf (file, "+");
1084 output_address (XEXP (x, 1));
1085 break;
1087 case PRE_DEC:
1088 fprintf (file, "--");
1089 output_address (XEXP (x, 0));
1090 break;
1091 case POST_INC:
1092 output_address (XEXP (x, 0));
1093 fprintf (file, "++");
1094 break;
1095 case POST_DEC:
1096 output_address (XEXP (x, 0));
1097 fprintf (file, "--");
1098 break;
1100 default:
1101 gcc_assert (GET_CODE (x) != MEM);
1102 print_operand (file, x, 0);
1103 break;
1107 /* Adding intp DImode support by Tony
1108 * -- Q: (low word)
1109 * -- R: (high word)
1112 void
1113 print_operand (FILE *file, rtx x, char code)
1115 enum machine_mode mode = GET_MODE (x);
1117 switch (code)
1119 case 'j':
1120 switch (GET_CODE (x))
1122 case EQ:
1123 fprintf (file, "e");
1124 break;
1125 case NE:
1126 fprintf (file, "ne");
1127 break;
1128 case GT:
1129 fprintf (file, "g");
1130 break;
1131 case LT:
1132 fprintf (file, "l");
1133 break;
1134 case GE:
1135 fprintf (file, "ge");
1136 break;
1137 case LE:
1138 fprintf (file, "le");
1139 break;
1140 case GTU:
1141 fprintf (file, "g");
1142 break;
1143 case LTU:
1144 fprintf (file, "l");
1145 break;
1146 case GEU:
1147 fprintf (file, "ge");
1148 break;
1149 case LEU:
1150 fprintf (file, "le");
1151 break;
1152 default:
1153 output_operand_lossage ("invalid %%j value");
1155 break;
1157 case 'J': /* reverse logic */
1158 switch (GET_CODE(x))
1160 case EQ:
1161 fprintf (file, "ne");
1162 break;
1163 case NE:
1164 fprintf (file, "e");
1165 break;
1166 case GT:
1167 fprintf (file, "le");
1168 break;
1169 case LT:
1170 fprintf (file, "ge");
1171 break;
1172 case GE:
1173 fprintf (file, "l");
1174 break;
1175 case LE:
1176 fprintf (file, "g");
1177 break;
1178 case GTU:
1179 fprintf (file, "le");
1180 break;
1181 case LTU:
1182 fprintf (file, "ge");
1183 break;
1184 case GEU:
1185 fprintf (file, "l");
1186 break;
1187 case LEU:
1188 fprintf (file, "g");
1189 break;
1190 default:
1191 output_operand_lossage ("invalid %%J value");
1193 break;
1195 default:
1196 switch (GET_CODE (x))
1198 case REG:
1199 if (code == 'h')
1201 gcc_assert (REGNO (x) < 32);
1202 fprintf (file, "%s", short_reg_names[REGNO (x)]);
1203 /*fprintf (file, "\n%d\n ", REGNO (x));*/
1204 break;
1206 else if (code == 'd')
1208 gcc_assert (REGNO (x) < 32);
1209 fprintf (file, "%s", high_reg_names[REGNO (x)]);
1210 break;
1212 else if (code == 'w')
1214 gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
1215 fprintf (file, "%s.w", reg_names[REGNO (x)]);
1217 else if (code == 'x')
1219 gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
1220 fprintf (file, "%s.x", reg_names[REGNO (x)]);
1222 else if (code == 'D')
1224 fprintf (file, "%s", dregs_pair_names[REGNO (x)]);
1226 else if (code == 'H')
1228 gcc_assert (mode == DImode || mode == DFmode);
1229 gcc_assert (REG_P (x));
1230 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
1232 else if (code == 'T')
1234 gcc_assert (D_REGNO_P (REGNO (x)));
1235 fprintf (file, "%s", byte_reg_names[REGNO (x)]);
1237 else
1238 fprintf (file, "%s", reg_names[REGNO (x)]);
1239 break;
1241 case MEM:
1242 fputc ('[', file);
1243 x = XEXP (x,0);
1244 print_address_operand (file, x);
1245 fputc (']', file);
1246 break;
1248 case CONST_INT:
1249 /* Moves to half registers with d or h modifiers always use unsigned
1250 constants. */
1251 if (code == 'd')
1252 x = GEN_INT ((INTVAL (x) >> 16) & 0xffff);
1253 else if (code == 'h')
1254 x = GEN_INT (INTVAL (x) & 0xffff);
1255 else if (code == 'X')
1256 x = GEN_INT (exact_log2 (0xffffffff & INTVAL (x)));
1257 else if (code == 'Y')
1258 x = GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x)));
1259 else if (code == 'Z')
1260 /* Used for LINK insns. */
1261 x = GEN_INT (-8 - INTVAL (x));
1263 /* fall through */
1265 case SYMBOL_REF:
1266 output_addr_const (file, x);
1267 if (code == 'G' && flag_pic)
1268 fprintf (file, "@GOT");
1269 break;
1271 case CONST_DOUBLE:
1272 output_operand_lossage ("invalid const_double operand");
1273 break;
1275 case UNSPEC:
1276 switch (XINT (x, 1))
1278 case UNSPEC_MOVE_PIC:
1279 output_addr_const (file, XVECEXP (x, 0, 0));
1280 fprintf (file, "@GOT");
1281 break;
1283 case UNSPEC_LIBRARY_OFFSET:
1284 fprintf (file, "_current_shared_library_p5_offset_");
1285 break;
1287 default:
1288 gcc_unreachable ();
1290 break;
1292 default:
1293 output_addr_const (file, x);
1298 /* Argument support functions. */
1300 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1301 for a call to a function whose data type is FNTYPE.
1302 For a library call, FNTYPE is 0.
1303 VDSP C Compiler manual, our ABI says that
1304 first 3 words of arguments will use R0, R1 and R2.
1307 void
1308 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
1309 rtx libname ATTRIBUTE_UNUSED)
1311 static CUMULATIVE_ARGS zero_cum;
1313 *cum = zero_cum;
1315 /* Set up the number of registers to use for passing arguments. */
1317 cum->nregs = max_arg_registers;
1318 cum->arg_regs = arg_regs;
1320 cum->call_cookie = CALL_NORMAL;
1321 /* Check for a longcall attribute. */
1322 if (fntype && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
1323 cum->call_cookie |= CALL_SHORT;
1324 else if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
1325 cum->call_cookie |= CALL_LONG;
1327 return;
1330 /* Update the data in CUM to advance over an argument
1331 of mode MODE and data type TYPE.
1332 (TYPE is null for libcalls where that information may not be available.) */
1334 void
1335 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1336 int named ATTRIBUTE_UNUSED)
1338 int count, bytes, words;
1340 bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1341 words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1343 cum->words += words;
1344 cum->nregs -= words;
1346 if (cum->nregs <= 0)
1348 cum->nregs = 0;
1349 cum->arg_regs = NULL;
1351 else
1353 for (count = 1; count <= words; count++)
1354 cum->arg_regs++;
1357 return;
1360 /* Define where to put the arguments to a function.
1361 Value is zero to push the argument on the stack,
1362 or a hard register in which to store the argument.
1364 MODE is the argument's machine mode.
1365 TYPE is the data type of the argument (as a tree).
1366 This is null for libcalls where that information may
1367 not be available.
1368 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1369 the preceding args and about the function being called.
1370 NAMED is nonzero if this argument is a named parameter
1371 (otherwise it is an extra parameter matching an ellipsis). */
1373 struct rtx_def *
1374 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1375 int named ATTRIBUTE_UNUSED)
1377 int bytes
1378 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1380 if (mode == VOIDmode)
1381 /* Compute operand 2 of the call insn. */
1382 return GEN_INT (cum->call_cookie);
1384 if (bytes == -1)
1385 return NULL_RTX;
1387 if (cum->nregs)
1388 return gen_rtx_REG (mode, *(cum->arg_regs));
1390 return NULL_RTX;
1393 /* For an arg passed partly in registers and partly in memory,
1394 this is the number of bytes passed in registers.
1395 For args passed entirely in registers or entirely in memory, zero.
1397 Refer VDSP C Compiler manual, our ABI.
1398 First 3 words are in registers. So, if a an argument is larger
1399 than the registers available, it will span the register and
1400 stack. */
1402 static int
1403 bfin_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1404 tree type ATTRIBUTE_UNUSED,
1405 bool named ATTRIBUTE_UNUSED)
1407 int bytes
1408 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1409 int bytes_left = cum->nregs * UNITS_PER_WORD;
1411 if (bytes == -1)
1412 return 0;
1414 if (bytes_left == 0)
1415 return 0;
1416 if (bytes > bytes_left)
1417 return bytes_left;
1418 return 0;
1421 /* Variable sized types are passed by reference. */
1423 static bool
1424 bfin_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
1425 enum machine_mode mode ATTRIBUTE_UNUSED,
1426 tree type, bool named ATTRIBUTE_UNUSED)
1428 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
1431 /* Decide whether a type should be returned in memory (true)
1432 or in a register (false). This is called by the macro
1433 RETURN_IN_MEMORY. */
1436 bfin_return_in_memory (tree type)
1438 int size = int_size_in_bytes (type);
1439 return size > 2 * UNITS_PER_WORD || size == -1;
1442 /* Register in which address to store a structure value
1443 is passed to a function. */
1444 static rtx
1445 bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1446 int incoming ATTRIBUTE_UNUSED)
1448 return gen_rtx_REG (Pmode, REG_P0);
1451 /* Return true when register may be used to pass function parameters. */
1453 bool
1454 function_arg_regno_p (int n)
1456 int i;
1457 for (i = 0; arg_regs[i] != -1; i++)
1458 if (n == arg_regs[i])
1459 return true;
1460 return false;
1463 /* Returns 1 if OP contains a symbol reference */
1466 symbolic_reference_mentioned_p (rtx op)
1468 register const char *fmt;
1469 register int i;
1471 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1472 return 1;
1474 fmt = GET_RTX_FORMAT (GET_CODE (op));
1475 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1477 if (fmt[i] == 'E')
1479 register int j;
1481 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1482 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1483 return 1;
1486 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1487 return 1;
1490 return 0;
1493 /* Decide whether we can make a sibling call to a function. DECL is the
1494 declaration of the function being targeted by the call and EXP is the
1495 CALL_EXPR representing the call. */
1497 static bool
1498 bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
1499 tree exp ATTRIBUTE_UNUSED)
1501 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
1502 return fkind == SUBROUTINE;
1505 /* Emit RTL insns to initialize the variable parts of a trampoline at
1506 TRAMP. FNADDR is an RTX for the address of the function's pure
1507 code. CXT is an RTX for the static chain value for the function. */
1509 void
1510 initialize_trampoline (tramp, fnaddr, cxt)
1511 rtx tramp, fnaddr, cxt;
1513 rtx t1 = copy_to_reg (fnaddr);
1514 rtx t2 = copy_to_reg (cxt);
1515 rtx addr;
1517 addr = memory_address (Pmode, plus_constant (tramp, 2));
1518 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1519 emit_insn (gen_ashrsi3 (t1, t1, GEN_INT (16)));
1520 addr = memory_address (Pmode, plus_constant (tramp, 6));
1521 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1523 addr = memory_address (Pmode, plus_constant (tramp, 10));
1524 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1525 emit_insn (gen_ashrsi3 (t2, t2, GEN_INT (16)));
1526 addr = memory_address (Pmode, plus_constant (tramp, 14));
1527 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1530 /* Emit insns to move operands[1] into operands[0]. */
1532 void
1533 emit_pic_move (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1535 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
1537 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
1538 operands[1] = force_reg (SImode, operands[1]);
1539 else
1540 operands[1] = legitimize_pic_address (operands[1], temp,
1541 pic_offset_table_rtx);
1544 /* Expand a move operation in mode MODE. The operands are in OPERANDS. */
1546 void
1547 expand_move (rtx *operands, enum machine_mode mode)
1549 if (flag_pic && SYMBOLIC_CONST (operands[1]))
1550 emit_pic_move (operands, mode);
1552 /* Don't generate memory->memory or constant->memory moves, go through a
1553 register */
1554 else if ((reload_in_progress | reload_completed) == 0
1555 && GET_CODE (operands[0]) == MEM
1556 && GET_CODE (operands[1]) != REG)
1557 operands[1] = force_reg (mode, operands[1]);
1560 /* Split one or more DImode RTL references into pairs of SImode
1561 references. The RTL can be REG, offsettable MEM, integer constant, or
1562 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
1563 split and "num" is its length. lo_half and hi_half are output arrays
1564 that parallel "operands". */
1566 void
1567 split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
1569 while (num--)
1571 rtx op = operands[num];
1573 /* simplify_subreg refuse to split volatile memory addresses,
1574 but we still have to handle it. */
1575 if (GET_CODE (op) == MEM)
1577 lo_half[num] = adjust_address (op, SImode, 0);
1578 hi_half[num] = adjust_address (op, SImode, 4);
1580 else
1582 lo_half[num] = simplify_gen_subreg (SImode, op,
1583 GET_MODE (op) == VOIDmode
1584 ? DImode : GET_MODE (op), 0);
1585 hi_half[num] = simplify_gen_subreg (SImode, op,
1586 GET_MODE (op) == VOIDmode
1587 ? DImode : GET_MODE (op), 4);
1592 bool
1593 bfin_longcall_p (rtx op, int call_cookie)
1595 gcc_assert (GET_CODE (op) == SYMBOL_REF);
1596 if (call_cookie & CALL_SHORT)
1597 return 0;
1598 if (call_cookie & CALL_LONG)
1599 return 1;
1600 if (TARGET_LONG_CALLS)
1601 return 1;
1602 return 0;
1605 /* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
1606 COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
1607 SIBCALL is nonzero if this is a sibling call. */
1609 void
1610 bfin_expand_call (rtx retval, rtx fnaddr, rtx callarg1, rtx cookie, int sibcall)
1612 rtx use = NULL, call;
1613 rtx callee = XEXP (fnaddr, 0);
1614 rtx pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (sibcall ? 3 : 2));
1616 /* In an untyped call, we can get NULL for operand 2. */
1617 if (cookie == NULL_RTX)
1618 cookie = const0_rtx;
1620 /* Static functions and indirect calls don't need the pic register. */
1621 if (flag_pic
1622 && GET_CODE (callee) == SYMBOL_REF
1623 && !SYMBOL_REF_LOCAL_P (callee))
1624 use_reg (&use, pic_offset_table_rtx);
1626 if ((!register_no_elim_operand (callee, Pmode)
1627 && GET_CODE (callee) != SYMBOL_REF)
1628 || (GET_CODE (callee) == SYMBOL_REF
1629 && (flag_pic
1630 || bfin_longcall_p (callee, INTVAL (cookie)))))
1632 callee = copy_to_mode_reg (Pmode, callee);
1633 fnaddr = gen_rtx_MEM (Pmode, callee);
1635 call = gen_rtx_CALL (VOIDmode, fnaddr, callarg1);
1637 if (retval)
1638 call = gen_rtx_SET (VOIDmode, retval, call);
1640 XVECEXP (pat, 0, 0) = call;
1641 XVECEXP (pat, 0, 1) = gen_rtx_USE (VOIDmode, cookie);
1642 if (sibcall)
1643 XVECEXP (pat, 0, 2) = gen_rtx_RETURN (VOIDmode);
1644 call = emit_call_insn (pat);
1645 if (use)
1646 CALL_INSN_FUNCTION_USAGE (call) = use;
1649 /* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
1652 hard_regno_mode_ok (int regno, enum machine_mode mode)
1654 /* Allow only dregs to store value of mode HI or QI */
1655 enum reg_class class = REGNO_REG_CLASS (regno);
1657 if (mode == CCmode)
1658 return 0;
1660 if (mode == V2HImode)
1661 return D_REGNO_P (regno);
1662 if (class == CCREGS)
1663 return mode == BImode;
1664 if (mode == PDImode)
1665 return regno == REG_A0 || regno == REG_A1;
1666 if (mode == SImode
1667 && TEST_HARD_REG_BIT (reg_class_contents[PROLOGUE_REGS], regno))
1668 return 1;
1670 return TEST_HARD_REG_BIT (reg_class_contents[MOST_REGS], regno);
1673 /* Implements target hook vector_mode_supported_p. */
1675 static bool
1676 bfin_vector_mode_supported_p (enum machine_mode mode)
1678 return mode == V2HImode;
1681 /* Return the cost of moving data from a register in class CLASS1 to
1682 one in class CLASS2. A cost of 2 is the default. */
1685 bfin_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1686 enum reg_class class1, enum reg_class class2)
1688 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
1689 if (optimize_size)
1690 return 2;
1692 /* There are some stalls involved when moving from a DREG to a different
1693 class reg, and using the value in one of the following instructions.
1694 Attempt to model this by slightly discouraging such moves. */
1695 if (class1 == DREGS && class2 != DREGS)
1696 return 2 * 2;
1698 return 2;
1701 /* Return the cost of moving data of mode M between a
1702 register and memory. A value of 2 is the default; this cost is
1703 relative to those in `REGISTER_MOVE_COST'.
1705 ??? In theory L1 memory has single-cycle latency. We should add a switch
1706 that tells the compiler whether we expect to use only L1 memory for the
1707 program; it'll make the costs more accurate. */
1710 bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1711 enum reg_class class,
1712 int in ATTRIBUTE_UNUSED)
1714 /* Make memory accesses slightly more expensive than any register-register
1715 move. Also, penalize non-DP registers, since they need secondary
1716 reloads to load and store. */
1717 if (! reg_class_subset_p (class, DPREGS))
1718 return 10;
1720 return 8;
1723 /* Inform reload about cases where moving X with a mode MODE to a register in
1724 CLASS requires an extra scratch register. Return the class needed for the
1725 scratch register. */
1727 static enum reg_class
1728 bfin_secondary_reload (bool in_p, rtx x, enum reg_class class,
1729 enum machine_mode mode, secondary_reload_info *sri)
1731 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
1732 in most other cases we can also use PREGS. */
1733 enum reg_class default_class = GET_MODE_SIZE (mode) >= 4 ? DPREGS : DREGS;
1734 enum reg_class x_class = NO_REGS;
1735 enum rtx_code code = GET_CODE (x);
1737 if (code == SUBREG)
1738 x = SUBREG_REG (x), code = GET_CODE (x);
1739 if (REG_P (x))
1741 int regno = REGNO (x);
1742 if (regno >= FIRST_PSEUDO_REGISTER)
1743 regno = reg_renumber[regno];
1745 if (regno == -1)
1746 code = MEM;
1747 else
1748 x_class = REGNO_REG_CLASS (regno);
1751 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
1752 This happens as a side effect of register elimination, and we need
1753 a scratch register to do it. */
1754 if (fp_plus_const_operand (x, mode))
1756 rtx op2 = XEXP (x, 1);
1757 int large_constant_p = ! CONST_7BIT_IMM_P (INTVAL (op2));
1759 if (class == PREGS || class == PREGS_CLOBBERED)
1760 return NO_REGS;
1761 /* If destination is a DREG, we can do this without a scratch register
1762 if the constant is valid for an add instruction. */
1763 if ((class == DREGS || class == DPREGS)
1764 && ! large_constant_p)
1765 return NO_REGS;
1766 /* Reloading to anything other than a DREG? Use a PREG scratch
1767 register. */
1768 sri->icode = CODE_FOR_reload_insi;
1769 return NO_REGS;
1772 /* Data can usually be moved freely between registers of most classes.
1773 AREGS are an exception; they can only move to or from another register
1774 in AREGS or one in DREGS. They can also be assigned the constant 0. */
1775 if (x_class == AREGS)
1776 return class == DREGS || class == AREGS ? NO_REGS : DREGS;
1778 if (class == AREGS)
1780 if (x != const0_rtx && x_class != DREGS)
1781 return DREGS;
1782 else
1783 return NO_REGS;
1786 /* CCREGS can only be moved from/to DREGS. */
1787 if (class == CCREGS && x_class != DREGS)
1788 return DREGS;
1789 if (x_class == CCREGS && class != DREGS)
1790 return DREGS;
1791 /* All registers other than AREGS can load arbitrary constants. The only
1792 case that remains is MEM. */
1793 if (code == MEM)
1794 if (! reg_class_subset_p (class, default_class))
1795 return default_class;
1796 return NO_REGS;
1799 /* Implement TARGET_HANDLE_OPTION. */
1801 static bool
1802 bfin_handle_option (size_t code, const char *arg, int value)
1804 switch (code)
1806 case OPT_mshared_library_id_:
1807 if (value > MAX_LIBRARY_ID)
1808 error ("-mshared-library-id=%s is not between 0 and %d",
1809 arg, MAX_LIBRARY_ID);
1810 bfin_lib_id_given = 1;
1811 return true;
1813 default:
1814 return true;
1818 /* Implement the macro OVERRIDE_OPTIONS. */
1820 void
1821 override_options (void)
1823 if (TARGET_OMIT_LEAF_FRAME_POINTER)
1824 flag_omit_frame_pointer = 1;
1826 /* Library identification */
1827 if (bfin_lib_id_given && ! TARGET_ID_SHARED_LIBRARY)
1828 error ("-mshared-library-id= specified without -mid-shared-library");
1830 if (TARGET_ID_SHARED_LIBRARY)
1831 /* ??? Provide a way to use a bigger GOT. */
1832 flag_pic = 1;
1834 flag_schedule_insns = 0;
1837 /* Return the destination address of BRANCH.
1838 We need to use this instead of get_attr_length, because the
1839 cbranch_with_nops pattern conservatively sets its length to 6, and
1840 we still prefer to use shorter sequences. */
1842 static int
1843 branch_dest (rtx branch)
1845 rtx dest;
1846 int dest_uid;
1847 rtx pat = PATTERN (branch);
1848 if (GET_CODE (pat) == PARALLEL)
1849 pat = XVECEXP (pat, 0, 0);
1850 dest = SET_SRC (pat);
1851 if (GET_CODE (dest) == IF_THEN_ELSE)
1852 dest = XEXP (dest, 1);
1853 dest = XEXP (dest, 0);
1854 dest_uid = INSN_UID (dest);
1855 return INSN_ADDRESSES (dest_uid);
1858 /* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
1859 it's a branch that's predicted taken. */
1861 static int
1862 cbranch_predicted_taken_p (rtx insn)
1864 rtx x = find_reg_note (insn, REG_BR_PROB, 0);
1866 if (x)
1868 int pred_val = INTVAL (XEXP (x, 0));
1870 return pred_val >= REG_BR_PROB_BASE / 2;
1873 return 0;
1876 /* Templates for use by asm_conditional_branch. */
1878 static const char *ccbranch_templates[][3] = {
1879 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
1880 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
1881 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
1882 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
1885 /* Output INSN, which is a conditional branch instruction with operands
1886 OPERANDS.
1888 We deal with the various forms of conditional branches that can be generated
1889 by bfin_reorg to prevent the hardware from doing speculative loads, by
1890 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
1891 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
1892 Either of these is only necessary if the branch is short, otherwise the
1893 template we use ends in an unconditional jump which flushes the pipeline
1894 anyway. */
1896 void
1897 asm_conditional_branch (rtx insn, rtx *operands, int n_nops, int predict_taken)
1899 int offset = branch_dest (insn) - INSN_ADDRESSES (INSN_UID (insn));
1900 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
1901 is to be taken from start of if cc rather than jump.
1902 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
1904 int len = (offset >= -1024 && offset <= 1022 ? 0
1905 : offset >= -4094 && offset <= 4096 ? 1
1906 : 2);
1907 int bp = predict_taken && len == 0 ? 1 : cbranch_predicted_taken_p (insn);
1908 int idx = (bp << 1) | (GET_CODE (operands[0]) == EQ ? BRF : BRT);
1909 output_asm_insn (ccbranch_templates[idx][len], operands);
1910 gcc_assert (n_nops == 0 || !bp);
1911 if (len == 0)
1912 while (n_nops-- > 0)
1913 output_asm_insn ("nop;", NULL);
1916 /* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
1917 stored in bfin_compare_op0 and bfin_compare_op1 already. */
1920 bfin_gen_compare (rtx cmp, enum machine_mode mode ATTRIBUTE_UNUSED)
1922 enum rtx_code code1, code2;
1923 rtx op0 = bfin_compare_op0, op1 = bfin_compare_op1;
1924 rtx tem = bfin_cc_rtx;
1925 enum rtx_code code = GET_CODE (cmp);
1927 /* If we have a BImode input, then we already have a compare result, and
1928 do not need to emit another comparison. */
1929 if (GET_MODE (op0) == BImode)
1931 gcc_assert ((code == NE || code == EQ) && op1 == const0_rtx);
1932 tem = op0, code2 = code;
1934 else
1936 switch (code) {
1937 /* bfin has these conditions */
1938 case EQ:
1939 case LT:
1940 case LE:
1941 case LEU:
1942 case LTU:
1943 code1 = code;
1944 code2 = NE;
1945 break;
1946 default:
1947 code1 = reverse_condition (code);
1948 code2 = EQ;
1949 break;
1951 emit_insn (gen_rtx_SET (BImode, tem,
1952 gen_rtx_fmt_ee (code1, BImode, op0, op1)));
1955 return gen_rtx_fmt_ee (code2, BImode, tem, CONST0_RTX (BImode));
1958 /* Return nonzero iff C has exactly one bit set if it is interpreted
1959 as a 32 bit constant. */
1962 log2constp (unsigned HOST_WIDE_INT c)
1964 c &= 0xFFFFFFFF;
1965 return c != 0 && (c & (c-1)) == 0;
1968 /* Returns the number of consecutive least significant zeros in the binary
1969 representation of *V.
1970 We modify *V to contain the original value arithmetically shifted right by
1971 the number of zeroes. */
1973 static int
1974 shiftr_zero (HOST_WIDE_INT *v)
1976 unsigned HOST_WIDE_INT tmp = *v;
1977 unsigned HOST_WIDE_INT sgn;
1978 int n = 0;
1980 if (tmp == 0)
1981 return 0;
1983 sgn = tmp & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1));
1984 while ((tmp & 0x1) == 0 && n <= 32)
1986 tmp = (tmp >> 1) | sgn;
1987 n++;
1989 *v = tmp;
1990 return n;
1993 /* After reload, split the load of an immediate constant. OPERANDS are the
1994 operands of the movsi_insn pattern which we are splitting. We return
1995 nonzero if we emitted a sequence to load the constant, zero if we emitted
1996 nothing because we want to use the splitter's default sequence. */
1999 split_load_immediate (rtx operands[])
2001 HOST_WIDE_INT val = INTVAL (operands[1]);
2002 HOST_WIDE_INT tmp;
2003 HOST_WIDE_INT shifted = val;
2004 HOST_WIDE_INT shifted_compl = ~val;
2005 int num_zero = shiftr_zero (&shifted);
2006 int num_compl_zero = shiftr_zero (&shifted_compl);
2007 unsigned int regno = REGNO (operands[0]);
2008 enum reg_class class1 = REGNO_REG_CLASS (regno);
2010 /* This case takes care of single-bit set/clear constants, which we could
2011 also implement with BITSET/BITCLR. */
2012 if (num_zero
2013 && shifted >= -32768 && shifted < 65536
2014 && (D_REGNO_P (regno)
2015 || (regno >= REG_P0 && regno <= REG_P7 && num_zero <= 2)))
2017 emit_insn (gen_movsi (operands[0], GEN_INT (shifted)));
2018 emit_insn (gen_ashlsi3 (operands[0], operands[0], GEN_INT (num_zero)));
2019 return 1;
2022 tmp = val & 0xFFFF;
2023 tmp |= -(tmp & 0x8000);
2025 /* If high word has one bit set or clear, try to use a bit operation. */
2026 if (D_REGNO_P (regno))
2028 if (log2constp (val & 0xFFFF0000))
2030 emit_insn (gen_movsi (operands[0], GEN_INT (val & 0xFFFF)));
2031 emit_insn (gen_iorsi3 (operands[0], operands[0], GEN_INT (val & 0xFFFF0000)));
2032 return 1;
2034 else if (log2constp (val | 0xFFFF) && (val & 0x8000) != 0)
2036 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2037 emit_insn (gen_andsi3 (operands[0], operands[0], GEN_INT (val | 0xFFFF)));
2041 if (D_REGNO_P (regno))
2043 if (CONST_7BIT_IMM_P (tmp))
2045 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2046 emit_insn (gen_movstricthi_high (operands[0], GEN_INT (val & -65536)));
2047 return 1;
2050 if ((val & 0xFFFF0000) == 0)
2052 emit_insn (gen_movsi (operands[0], const0_rtx));
2053 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2054 return 1;
2057 if ((val & 0xFFFF0000) == 0xFFFF0000)
2059 emit_insn (gen_movsi (operands[0], constm1_rtx));
2060 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2061 return 1;
2065 /* Need DREGs for the remaining case. */
2066 if (regno > REG_R7)
2067 return 0;
2069 if (optimize_size
2070 && num_compl_zero && CONST_7BIT_IMM_P (shifted_compl))
2072 /* If optimizing for size, generate a sequence that has more instructions
2073 but is shorter. */
2074 emit_insn (gen_movsi (operands[0], GEN_INT (shifted_compl)));
2075 emit_insn (gen_ashlsi3 (operands[0], operands[0],
2076 GEN_INT (num_compl_zero)));
2077 emit_insn (gen_one_cmplsi2 (operands[0], operands[0]));
2078 return 1;
2080 return 0;
2083 /* Return true if the legitimate memory address for a memory operand of mode
2084 MODE. Return false if not. */
2086 static bool
2087 bfin_valid_add (enum machine_mode mode, HOST_WIDE_INT value)
2089 unsigned HOST_WIDE_INT v = value > 0 ? value : -value;
2090 int sz = GET_MODE_SIZE (mode);
2091 int shift = sz == 1 ? 0 : sz == 2 ? 1 : 2;
2092 /* The usual offsettable_memref machinery doesn't work so well for this
2093 port, so we deal with the problem here. */
2094 unsigned HOST_WIDE_INT mask = sz == 8 ? 0x7ffe : 0x7fff;
2095 return (v & ~(mask << shift)) == 0;
2098 static bool
2099 bfin_valid_reg_p (unsigned int regno, int strict)
2101 return ((strict && REGNO_OK_FOR_BASE_STRICT_P (regno))
2102 || (!strict && REGNO_OK_FOR_BASE_NONSTRICT_P (regno)));
2105 bool
2106 bfin_legitimate_address_p (enum machine_mode mode, rtx x, int strict)
2108 switch (GET_CODE (x)) {
2109 case REG:
2110 if (bfin_valid_reg_p (REGNO (x), strict))
2111 return true;
2112 break;
2113 case PLUS:
2114 if (REG_P (XEXP (x, 0))
2115 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict)
2116 && (GET_CODE (XEXP (x, 1)) == UNSPEC
2117 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2118 && bfin_valid_add (mode, INTVAL (XEXP (x, 1))))))
2119 return true;
2120 break;
2121 case POST_INC:
2122 case POST_DEC:
2123 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2124 && REG_P (XEXP (x, 0))
2125 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict))
2126 return true;
2127 case PRE_DEC:
2128 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2129 && XEXP (x, 0) == stack_pointer_rtx
2130 && REG_P (XEXP (x, 0))
2131 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict))
2132 return true;
2133 break;
2134 default:
2135 break;
2137 return false;
2140 static bool
2141 bfin_rtx_costs (rtx x, int code, int outer_code, int *total)
2143 int cost2 = COSTS_N_INSNS (1);
2145 switch (code)
2147 case CONST_INT:
2148 if (outer_code == SET || outer_code == PLUS)
2149 *total = CONST_7BIT_IMM_P (INTVAL (x)) ? 0 : cost2;
2150 else if (outer_code == AND)
2151 *total = log2constp (~INTVAL (x)) ? 0 : cost2;
2152 else if (outer_code == LE || outer_code == LT || outer_code == EQ)
2153 *total = (INTVAL (x) >= -4 && INTVAL (x) <= 3) ? 0 : cost2;
2154 else if (outer_code == LEU || outer_code == LTU)
2155 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 7) ? 0 : cost2;
2156 else if (outer_code == MULT)
2157 *total = (INTVAL (x) == 2 || INTVAL (x) == 4) ? 0 : cost2;
2158 else if (outer_code == ASHIFT && (INTVAL (x) == 1 || INTVAL (x) == 2))
2159 *total = 0;
2160 else if (outer_code == ASHIFT || outer_code == ASHIFTRT
2161 || outer_code == LSHIFTRT)
2162 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 31) ? 0 : cost2;
2163 else if (outer_code == IOR || outer_code == XOR)
2164 *total = (INTVAL (x) & (INTVAL (x) - 1)) == 0 ? 0 : cost2;
2165 else
2166 *total = cost2;
2167 return true;
2169 case CONST:
2170 case LABEL_REF:
2171 case SYMBOL_REF:
2172 case CONST_DOUBLE:
2173 *total = COSTS_N_INSNS (2);
2174 return true;
2176 case PLUS:
2177 if (GET_MODE (x) == Pmode)
2179 if (GET_CODE (XEXP (x, 0)) == MULT
2180 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2182 HOST_WIDE_INT val = INTVAL (XEXP (XEXP (x, 0), 1));
2183 if (val == 2 || val == 4)
2185 *total = cost2;
2186 *total += rtx_cost (XEXP (XEXP (x, 0), 0), outer_code);
2187 *total += rtx_cost (XEXP (x, 1), outer_code);
2188 return true;
2193 /* fall through */
2195 case MINUS:
2196 case ASHIFT:
2197 case ASHIFTRT:
2198 case LSHIFTRT:
2199 if (GET_MODE (x) == DImode)
2200 *total = 6 * cost2;
2201 return false;
2203 case AND:
2204 case IOR:
2205 case XOR:
2206 if (GET_MODE (x) == DImode)
2207 *total = 2 * cost2;
2208 return false;
2210 case MULT:
2211 if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD)
2212 *total = COSTS_N_INSNS (3);
2213 return false;
2215 default:
2216 return false;
2220 static void
2221 bfin_internal_label (FILE *stream, const char *prefix, unsigned long num)
2223 fprintf (stream, "%s%s$%ld:\n", LOCAL_LABEL_PREFIX, prefix, num);
2226 /* Used for communication between {push,pop}_multiple_operation (which
2227 we use not only as a predicate) and the corresponding output functions. */
2228 static int first_preg_to_save, first_dreg_to_save;
2231 push_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2233 int lastdreg = 8, lastpreg = 6;
2234 int i, group;
2236 first_preg_to_save = lastpreg;
2237 first_dreg_to_save = lastdreg;
2238 for (i = 1, group = 0; i < XVECLEN (op, 0) - 1; i++)
2240 rtx t = XVECEXP (op, 0, i);
2241 rtx src, dest;
2242 int regno;
2244 if (GET_CODE (t) != SET)
2245 return 0;
2247 src = SET_SRC (t);
2248 dest = SET_DEST (t);
2249 if (GET_CODE (dest) != MEM || ! REG_P (src))
2250 return 0;
2251 dest = XEXP (dest, 0);
2252 if (GET_CODE (dest) != PLUS
2253 || ! REG_P (XEXP (dest, 0))
2254 || REGNO (XEXP (dest, 0)) != REG_SP
2255 || GET_CODE (XEXP (dest, 1)) != CONST_INT
2256 || INTVAL (XEXP (dest, 1)) != -i * 4)
2257 return 0;
2259 regno = REGNO (src);
2260 if (group == 0)
2262 if (D_REGNO_P (regno))
2264 group = 1;
2265 first_dreg_to_save = lastdreg = regno - REG_R0;
2267 else if (regno >= REG_P0 && regno <= REG_P7)
2269 group = 2;
2270 first_preg_to_save = lastpreg = regno - REG_P0;
2272 else
2273 return 0;
2275 continue;
2278 if (group == 1)
2280 if (regno >= REG_P0 && regno <= REG_P7)
2282 group = 2;
2283 first_preg_to_save = lastpreg = regno - REG_P0;
2285 else if (regno != REG_R0 + lastdreg + 1)
2286 return 0;
2287 else
2288 lastdreg++;
2290 else if (group == 2)
2292 if (regno != REG_P0 + lastpreg + 1)
2293 return 0;
2294 lastpreg++;
2297 return 1;
2301 pop_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2303 int lastdreg = 8, lastpreg = 6;
2304 int i, group;
2306 for (i = 1, group = 0; i < XVECLEN (op, 0); i++)
2308 rtx t = XVECEXP (op, 0, i);
2309 rtx src, dest;
2310 int regno;
2312 if (GET_CODE (t) != SET)
2313 return 0;
2315 src = SET_SRC (t);
2316 dest = SET_DEST (t);
2317 if (GET_CODE (src) != MEM || ! REG_P (dest))
2318 return 0;
2319 src = XEXP (src, 0);
2321 if (i == 1)
2323 if (! REG_P (src) || REGNO (src) != REG_SP)
2324 return 0;
2326 else if (GET_CODE (src) != PLUS
2327 || ! REG_P (XEXP (src, 0))
2328 || REGNO (XEXP (src, 0)) != REG_SP
2329 || GET_CODE (XEXP (src, 1)) != CONST_INT
2330 || INTVAL (XEXP (src, 1)) != (i - 1) * 4)
2331 return 0;
2333 regno = REGNO (dest);
2334 if (group == 0)
2336 if (regno == REG_R7)
2338 group = 1;
2339 lastdreg = 7;
2341 else if (regno != REG_P0 + lastpreg - 1)
2342 return 0;
2343 else
2344 lastpreg--;
2346 else if (group == 1)
2348 if (regno != REG_R0 + lastdreg - 1)
2349 return 0;
2350 else
2351 lastdreg--;
2354 first_dreg_to_save = lastdreg;
2355 first_preg_to_save = lastpreg;
2356 return 1;
2359 /* Emit assembly code for one multi-register push described by INSN, with
2360 operands in OPERANDS. */
2362 void
2363 output_push_multiple (rtx insn, rtx *operands)
2365 char buf[80];
2366 int ok;
2368 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2369 ok = push_multiple_operation (PATTERN (insn), VOIDmode);
2370 gcc_assert (ok);
2372 if (first_dreg_to_save == 8)
2373 sprintf (buf, "[--sp] = ( p5:%d );\n", first_preg_to_save);
2374 else if (first_preg_to_save == 6)
2375 sprintf (buf, "[--sp] = ( r7:%d );\n", first_dreg_to_save);
2376 else
2377 sprintf (buf, "[--sp] = ( r7:%d, p5:%d );\n",
2378 first_dreg_to_save, first_preg_to_save);
2380 output_asm_insn (buf, operands);
2383 /* Emit assembly code for one multi-register pop described by INSN, with
2384 operands in OPERANDS. */
2386 void
2387 output_pop_multiple (rtx insn, rtx *operands)
2389 char buf[80];
2390 int ok;
2392 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2393 ok = pop_multiple_operation (PATTERN (insn), VOIDmode);
2394 gcc_assert (ok);
2396 if (first_dreg_to_save == 8)
2397 sprintf (buf, "( p5:%d ) = [sp++];\n", first_preg_to_save);
2398 else if (first_preg_to_save == 6)
2399 sprintf (buf, "( r7:%d ) = [sp++];\n", first_dreg_to_save);
2400 else
2401 sprintf (buf, "( r7:%d, p5:%d ) = [sp++];\n",
2402 first_dreg_to_save, first_preg_to_save);
2404 output_asm_insn (buf, operands);
2407 /* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
2409 static void
2410 single_move_for_strmov (rtx dst, rtx src, enum machine_mode mode, HOST_WIDE_INT offset)
2412 rtx scratch = gen_reg_rtx (mode);
2413 rtx srcmem, dstmem;
2415 srcmem = adjust_address_nv (src, mode, offset);
2416 dstmem = adjust_address_nv (dst, mode, offset);
2417 emit_move_insn (scratch, srcmem);
2418 emit_move_insn (dstmem, scratch);
2421 /* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
2422 alignment ALIGN_EXP. Return true if successful, false if we should fall
2423 back on a different method. */
2425 bool
2426 bfin_expand_strmov (rtx dst, rtx src, rtx count_exp, rtx align_exp)
2428 rtx srcreg, destreg, countreg;
2429 HOST_WIDE_INT align = 0;
2430 unsigned HOST_WIDE_INT count = 0;
2432 if (GET_CODE (align_exp) == CONST_INT)
2433 align = INTVAL (align_exp);
2434 if (GET_CODE (count_exp) == CONST_INT)
2436 count = INTVAL (count_exp);
2437 #if 0
2438 if (!TARGET_INLINE_ALL_STRINGOPS && count > 64)
2439 return false;
2440 #endif
2443 /* If optimizing for size, only do single copies inline. */
2444 if (optimize_size)
2446 if (count == 2 && align < 2)
2447 return false;
2448 if (count == 4 && align < 4)
2449 return false;
2450 if (count != 1 && count != 2 && count != 4)
2451 return false;
2453 if (align < 2 && count != 1)
2454 return false;
2456 destreg = copy_to_mode_reg (Pmode, XEXP (dst, 0));
2457 if (destreg != XEXP (dst, 0))
2458 dst = replace_equiv_address_nv (dst, destreg);
2459 srcreg = copy_to_mode_reg (Pmode, XEXP (src, 0));
2460 if (srcreg != XEXP (src, 0))
2461 src = replace_equiv_address_nv (src, srcreg);
2463 if (count != 0 && align >= 2)
2465 unsigned HOST_WIDE_INT offset = 0;
2467 if (align >= 4)
2469 if ((count & ~3) == 4)
2471 single_move_for_strmov (dst, src, SImode, offset);
2472 offset = 4;
2474 else if (count & ~3)
2476 HOST_WIDE_INT new_count = ((count >> 2) & 0x3fffffff) - 1;
2477 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
2479 emit_insn (gen_rep_movsi (destreg, srcreg, countreg, destreg, srcreg));
2481 if (count & 2)
2483 single_move_for_strmov (dst, src, HImode, offset);
2484 offset += 2;
2487 else
2489 if ((count & ~1) == 2)
2491 single_move_for_strmov (dst, src, HImode, offset);
2492 offset = 2;
2494 else if (count & ~1)
2496 HOST_WIDE_INT new_count = ((count >> 1) & 0x7fffffff) - 1;
2497 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
2499 emit_insn (gen_rep_movhi (destreg, srcreg, countreg, destreg, srcreg));
2502 if (count & 1)
2504 single_move_for_strmov (dst, src, QImode, offset);
2506 return true;
2508 return false;
2512 static int
2513 bfin_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
2515 enum attr_type insn_type, dep_insn_type;
2516 int dep_insn_code_number;
2518 /* Anti and output dependencies have zero cost. */
2519 if (REG_NOTE_KIND (link) != 0)
2520 return 0;
2522 dep_insn_code_number = recog_memoized (dep_insn);
2524 /* If we can't recognize the insns, we can't really do anything. */
2525 if (dep_insn_code_number < 0 || recog_memoized (insn) < 0)
2526 return cost;
2528 insn_type = get_attr_type (insn);
2529 dep_insn_type = get_attr_type (dep_insn);
2531 if (dep_insn_type == TYPE_MOVE || dep_insn_type == TYPE_MCLD)
2533 rtx pat = PATTERN (dep_insn);
2534 rtx dest = SET_DEST (pat);
2535 rtx src = SET_SRC (pat);
2536 if (! ADDRESS_REGNO_P (REGNO (dest)) || ! D_REGNO_P (REGNO (src)))
2537 return cost;
2538 return cost + (dep_insn_type == TYPE_MOVE ? 4 : 3);
2541 return cost;
2544 /* We use the machine specific reorg pass for emitting CSYNC instructions
2545 after conditional branches as needed.
2547 The Blackfin is unusual in that a code sequence like
2548 if cc jump label
2549 r0 = (p0)
2550 may speculatively perform the load even if the condition isn't true. This
2551 happens for a branch that is predicted not taken, because the pipeline
2552 isn't flushed or stalled, so the early stages of the following instructions,
2553 which perform the memory reference, are allowed to execute before the
2554 jump condition is evaluated.
2555 Therefore, we must insert additional instructions in all places where this
2556 could lead to incorrect behavior. The manual recommends CSYNC, while
2557 VDSP seems to use NOPs (even though its corresponding compiler option is
2558 named CSYNC).
2560 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
2561 When optimizing for size, we turn the branch into a predicted taken one.
2562 This may be slower due to mispredicts, but saves code size. */
2564 static void
2565 bfin_reorg (void)
2567 rtx insn, last_condjump = NULL_RTX;
2568 int cycles_since_jump = INT_MAX;
2570 if (! TARGET_SPECLD_ANOMALY || ! TARGET_CSYNC_ANOMALY)
2571 return;
2573 /* First pass: find predicted-false branches; if something after them
2574 needs nops, insert them or change the branch to predict true. */
2575 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2577 rtx pat;
2579 if (NOTE_P (insn) || BARRIER_P (insn) || LABEL_P (insn))
2580 continue;
2582 pat = PATTERN (insn);
2583 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
2584 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
2585 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
2586 continue;
2588 if (JUMP_P (insn))
2590 if (any_condjump_p (insn)
2591 && ! cbranch_predicted_taken_p (insn))
2593 last_condjump = insn;
2594 cycles_since_jump = 0;
2596 else
2597 cycles_since_jump = INT_MAX;
2599 else if (INSN_P (insn))
2601 enum attr_type type = get_attr_type (insn);
2602 int delay_needed = 0;
2603 if (cycles_since_jump < INT_MAX)
2604 cycles_since_jump++;
2606 if (type == TYPE_MCLD && TARGET_SPECLD_ANOMALY)
2608 rtx pat = single_set (insn);
2609 if (may_trap_p (SET_SRC (pat)))
2610 delay_needed = 3;
2612 else if (type == TYPE_SYNC && TARGET_CSYNC_ANOMALY)
2613 delay_needed = 4;
2615 if (delay_needed > cycles_since_jump)
2617 rtx pat;
2618 int num_clobbers;
2619 rtx *op = recog_data.operand;
2621 delay_needed -= cycles_since_jump;
2623 extract_insn (last_condjump);
2624 if (optimize_size)
2626 pat = gen_cbranch_predicted_taken (op[0], op[1], op[2],
2627 op[3]);
2628 cycles_since_jump = INT_MAX;
2630 else
2631 /* Do not adjust cycles_since_jump in this case, so that
2632 we'll increase the number of NOPs for a subsequent insn
2633 if necessary. */
2634 pat = gen_cbranch_with_nops (op[0], op[1], op[2], op[3],
2635 GEN_INT (delay_needed));
2636 PATTERN (last_condjump) = pat;
2637 INSN_CODE (last_condjump) = recog (pat, insn, &num_clobbers);
2641 /* Second pass: for predicted-true branches, see if anything at the
2642 branch destination needs extra nops. */
2643 if (! TARGET_CSYNC_ANOMALY)
2644 return;
2646 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2648 if (JUMP_P (insn)
2649 && any_condjump_p (insn)
2650 && (INSN_CODE (insn) == CODE_FOR_cbranch_predicted_taken
2651 || cbranch_predicted_taken_p (insn)))
2653 rtx target = JUMP_LABEL (insn);
2654 rtx label = target;
2655 cycles_since_jump = 0;
2656 for (; target && cycles_since_jump < 3; target = NEXT_INSN (target))
2658 rtx pat;
2660 if (NOTE_P (target) || BARRIER_P (target) || LABEL_P (target))
2661 continue;
2663 pat = PATTERN (target);
2664 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
2665 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
2666 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
2667 continue;
2669 if (INSN_P (target))
2671 enum attr_type type = get_attr_type (target);
2672 int delay_needed = 0;
2673 if (cycles_since_jump < INT_MAX)
2674 cycles_since_jump++;
2676 if (type == TYPE_SYNC && TARGET_CSYNC_ANOMALY)
2677 delay_needed = 2;
2679 if (delay_needed > cycles_since_jump)
2681 rtx prev = prev_real_insn (label);
2682 delay_needed -= cycles_since_jump;
2683 if (dump_file)
2684 fprintf (dump_file, "Adding %d nops after %d\n",
2685 delay_needed, INSN_UID (label));
2686 if (JUMP_P (prev)
2687 && INSN_CODE (prev) == CODE_FOR_cbranch_with_nops)
2689 rtx x;
2690 HOST_WIDE_INT v;
2692 if (dump_file)
2693 fprintf (dump_file,
2694 "Reducing nops on insn %d.\n",
2695 INSN_UID (prev));
2696 x = PATTERN (prev);
2697 x = XVECEXP (x, 0, 1);
2698 v = INTVAL (XVECEXP (x, 0, 0)) - delay_needed;
2699 XVECEXP (x, 0, 0) = GEN_INT (v);
2701 while (delay_needed-- > 0)
2702 emit_insn_after (gen_nop (), label);
2703 break;
2711 /* Handle interrupt_handler, exception_handler and nmi_handler function
2712 attributes; arguments as in struct attribute_spec.handler. */
2714 static tree
2715 handle_int_attribute (tree *node, tree name,
2716 tree args ATTRIBUTE_UNUSED,
2717 int flags ATTRIBUTE_UNUSED,
2718 bool *no_add_attrs)
2720 tree x = *node;
2721 if (TREE_CODE (x) == FUNCTION_DECL)
2722 x = TREE_TYPE (x);
2724 if (TREE_CODE (x) != FUNCTION_TYPE)
2726 warning (OPT_Wattributes, "%qs attribute only applies to functions",
2727 IDENTIFIER_POINTER (name));
2728 *no_add_attrs = true;
2730 else if (funkind (x) != SUBROUTINE)
2731 error ("multiple function type attributes specified");
2733 return NULL_TREE;
2736 /* Return 0 if the attributes for two types are incompatible, 1 if they
2737 are compatible, and 2 if they are nearly compatible (which causes a
2738 warning to be generated). */
2740 static int
2741 bfin_comp_type_attributes (tree type1, tree type2)
2743 e_funkind kind1, kind2;
2745 if (TREE_CODE (type1) != FUNCTION_TYPE)
2746 return 1;
2748 kind1 = funkind (type1);
2749 kind2 = funkind (type2);
2751 if (kind1 != kind2)
2752 return 0;
2754 /* Check for mismatched modifiers */
2755 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1))
2756 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2)))
2757 return 0;
2759 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1))
2760 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2)))
2761 return 0;
2763 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1))
2764 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2)))
2765 return 0;
2767 if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1))
2768 != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2)))
2769 return 0;
2771 return 1;
2774 /* Handle a "longcall" or "shortcall" attribute; arguments as in
2775 struct attribute_spec.handler. */
2777 static tree
2778 bfin_handle_longcall_attribute (tree *node, tree name,
2779 tree args ATTRIBUTE_UNUSED,
2780 int flags ATTRIBUTE_UNUSED,
2781 bool *no_add_attrs)
2783 if (TREE_CODE (*node) != FUNCTION_TYPE
2784 && TREE_CODE (*node) != FIELD_DECL
2785 && TREE_CODE (*node) != TYPE_DECL)
2787 warning (OPT_Wattributes, "`%s' attribute only applies to functions",
2788 IDENTIFIER_POINTER (name));
2789 *no_add_attrs = true;
2792 if ((strcmp (IDENTIFIER_POINTER (name), "longcall") == 0
2793 && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node)))
2794 || (strcmp (IDENTIFIER_POINTER (name), "shortcall") == 0
2795 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node))))
2797 warning (OPT_Wattributes,
2798 "can't apply both longcall and shortcall attributes to the same function");
2799 *no_add_attrs = true;
2802 return NULL_TREE;
2805 /* Table of valid machine attributes. */
2806 const struct attribute_spec bfin_attribute_table[] =
2808 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2809 { "interrupt_handler", 0, 0, false, true, true, handle_int_attribute },
2810 { "exception_handler", 0, 0, false, true, true, handle_int_attribute },
2811 { "nmi_handler", 0, 0, false, true, true, handle_int_attribute },
2812 { "nesting", 0, 0, false, true, true, NULL },
2813 { "kspisusp", 0, 0, false, true, true, NULL },
2814 { "saveall", 0, 0, false, true, true, NULL },
2815 { "longcall", 0, 0, false, true, true, bfin_handle_longcall_attribute },
2816 { "shortcall", 0, 0, false, true, true, bfin_handle_longcall_attribute },
2817 { NULL, 0, 0, false, false, false, NULL }
2820 /* Output the assembler code for a thunk function. THUNK_DECL is the
2821 declaration for the thunk function itself, FUNCTION is the decl for
2822 the target function. DELTA is an immediate constant offset to be
2823 added to THIS. If VCALL_OFFSET is nonzero, the word at
2824 *(*this + vcall_offset) should be added to THIS. */
2826 static void
2827 bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED,
2828 tree thunk ATTRIBUTE_UNUSED, HOST_WIDE_INT delta,
2829 HOST_WIDE_INT vcall_offset, tree function)
2831 rtx xops[3];
2832 /* The this parameter is passed as the first argument. */
2833 rtx this = gen_rtx_REG (Pmode, REG_R0);
2835 /* Adjust the this parameter by a fixed constant. */
2836 if (delta)
2838 xops[1] = this;
2839 if (delta >= -64 && delta <= 63)
2841 xops[0] = GEN_INT (delta);
2842 output_asm_insn ("%1 += %0;", xops);
2844 else if (delta >= -128 && delta < -64)
2846 xops[0] = GEN_INT (delta + 64);
2847 output_asm_insn ("%1 += -64; %1 += %0;", xops);
2849 else if (delta > 63 && delta <= 126)
2851 xops[0] = GEN_INT (delta - 63);
2852 output_asm_insn ("%1 += 63; %1 += %0;", xops);
2854 else
2856 xops[0] = GEN_INT (delta);
2857 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops);
2861 /* Adjust the this parameter by a value stored in the vtable. */
2862 if (vcall_offset)
2864 rtx p2tmp = gen_rtx_REG (Pmode, REG_P2);
2865 rtx tmp = gen_rtx_REG (Pmode, REG_R2);
2867 xops[1] = tmp;
2868 xops[2] = p2tmp;
2869 output_asm_insn ("%2 = r0; %2 = [%2];", xops);
2871 /* Adjust the this parameter. */
2872 xops[0] = gen_rtx_MEM (Pmode, plus_constant (p2tmp, vcall_offset));
2873 if (!memory_operand (xops[0], Pmode))
2875 rtx tmp2 = gen_rtx_REG (Pmode, REG_P1);
2876 xops[0] = GEN_INT (vcall_offset);
2877 xops[1] = tmp2;
2878 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops);
2879 xops[0] = gen_rtx_MEM (Pmode, p2tmp);
2881 xops[2] = this;
2882 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops);
2885 xops[0] = XEXP (DECL_RTL (function), 0);
2886 if (1 || !flag_pic || (*targetm.binds_local_p) (function))
2887 output_asm_insn ("jump.l\t%P0", xops);
2890 /* Codes for all the Blackfin builtins. */
2891 enum bfin_builtins
2893 BFIN_BUILTIN_CSYNC,
2894 BFIN_BUILTIN_SSYNC,
2895 BFIN_BUILTIN_MAX
2898 #define def_builtin(NAME, TYPE, CODE) \
2899 do { \
2900 lang_hooks.builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
2901 NULL, NULL_TREE); \
2902 } while (0)
2904 /* Set up all builtin functions for this target. */
2905 static void
2906 bfin_init_builtins (void)
2908 tree void_ftype_void
2909 = build_function_type (void_type_node, void_list_node);
2911 /* Add the remaining MMX insns with somewhat more complicated types. */
2912 def_builtin ("__builtin_bfin_csync", void_ftype_void, BFIN_BUILTIN_CSYNC);
2913 def_builtin ("__builtin_bfin_ssync", void_ftype_void, BFIN_BUILTIN_SSYNC);
2916 /* Expand an expression EXP that calls a built-in function,
2917 with result going to TARGET if that's convenient
2918 (and in mode MODE if that's convenient).
2919 SUBTARGET may be used as the target for computing one of EXP's operands.
2920 IGNORE is nonzero if the value is to be ignored. */
2922 static rtx
2923 bfin_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
2924 rtx subtarget ATTRIBUTE_UNUSED,
2925 enum machine_mode mode ATTRIBUTE_UNUSED,
2926 int ignore ATTRIBUTE_UNUSED)
2928 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2929 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2931 switch (fcode)
2933 case BFIN_BUILTIN_CSYNC:
2934 emit_insn (gen_csync ());
2935 return 0;
2936 case BFIN_BUILTIN_SSYNC:
2937 emit_insn (gen_ssync ());
2938 return 0;
2940 default:
2941 gcc_unreachable ();
2945 #undef TARGET_INIT_BUILTINS
2946 #define TARGET_INIT_BUILTINS bfin_init_builtins
2948 #undef TARGET_EXPAND_BUILTIN
2949 #define TARGET_EXPAND_BUILTIN bfin_expand_builtin
2951 #undef TARGET_ASM_GLOBALIZE_LABEL
2952 #define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
2954 #undef TARGET_ASM_FILE_START
2955 #define TARGET_ASM_FILE_START output_file_start
2957 #undef TARGET_ATTRIBUTE_TABLE
2958 #define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
2960 #undef TARGET_COMP_TYPE_ATTRIBUTES
2961 #define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
2963 #undef TARGET_RTX_COSTS
2964 #define TARGET_RTX_COSTS bfin_rtx_costs
2966 #undef TARGET_ADDRESS_COST
2967 #define TARGET_ADDRESS_COST bfin_address_cost
2969 #undef TARGET_ASM_INTERNAL_LABEL
2970 #define TARGET_ASM_INTERNAL_LABEL bfin_internal_label
2972 #undef TARGET_MACHINE_DEPENDENT_REORG
2973 #define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
2975 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
2976 #define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
2978 #undef TARGET_ASM_OUTPUT_MI_THUNK
2979 #define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
2980 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2981 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
2983 #undef TARGET_SCHED_ADJUST_COST
2984 #define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
2986 #undef TARGET_PROMOTE_PROTOTYPES
2987 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
2988 #undef TARGET_PROMOTE_FUNCTION_ARGS
2989 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
2990 #undef TARGET_PROMOTE_FUNCTION_RETURN
2991 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
2993 #undef TARGET_ARG_PARTIAL_BYTES
2994 #define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
2996 #undef TARGET_PASS_BY_REFERENCE
2997 #define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
2999 #undef TARGET_SETUP_INCOMING_VARARGS
3000 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
3002 #undef TARGET_STRUCT_VALUE_RTX
3003 #define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
3005 #undef TARGET_VECTOR_MODE_SUPPORTED_P
3006 #define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
3008 #undef TARGET_HANDLE_OPTION
3009 #define TARGET_HANDLE_OPTION bfin_handle_option
3011 #undef TARGET_DEFAULT_TARGET_FLAGS
3012 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
3014 #undef TARGET_SECONDARY_RELOAD
3015 #define TARGET_SECONDARY_RELOAD bfin_secondary_reload
3017 struct gcc_target targetm = TARGET_INITIALIZER;