* config/bfin/bfin.md (doloop_end): Fail for loops that can iterate
[official-gcc.git] / gcc / config / bfin / bfin.c
blobe71305a7eb003d4e2d0c2af2f0398f58d7513c56
1 /* The Blackfin code generation auxiliary output file.
2 Copyright (C) 2005, 2006 Free Software Foundation, Inc.
3 Contributed by Analog Devices.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 2, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "insn-codes.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "output.h"
35 #include "insn-attr.h"
36 #include "tree.h"
37 #include "flags.h"
38 #include "except.h"
39 #include "function.h"
40 #include "input.h"
41 #include "target.h"
42 #include "target-def.h"
43 #include "expr.h"
44 #include "toplev.h"
45 #include "recog.h"
46 #include "optabs.h"
47 #include "ggc.h"
48 #include "integrate.h"
49 #include "cgraph.h"
50 #include "langhooks.h"
51 #include "bfin-protos.h"
52 #include "tm-preds.h"
53 #include "gt-bfin.h"
54 #include "basic-block.h"
55 #include "timevar.h"
57 /* A C structure for machine-specific, per-function data.
58 This is added to the cfun structure. */
59 struct machine_function GTY(())
61 int has_hardware_loops;
64 /* Test and compare insns in bfin.md store the information needed to
65 generate branch and scc insns here. */
66 rtx bfin_compare_op0, bfin_compare_op1;
68 /* RTX for condition code flag register and RETS register */
69 extern GTY(()) rtx bfin_cc_rtx;
70 extern GTY(()) rtx bfin_rets_rtx;
71 rtx bfin_cc_rtx, bfin_rets_rtx;
73 int max_arg_registers = 0;
75 /* Arrays used when emitting register names. */
76 const char *short_reg_names[] = SHORT_REGISTER_NAMES;
77 const char *high_reg_names[] = HIGH_REGISTER_NAMES;
78 const char *dregs_pair_names[] = DREGS_PAIR_NAMES;
79 const char *byte_reg_names[] = BYTE_REGISTER_NAMES;
81 static int arg_regs[] = FUNCTION_ARG_REGISTERS;
83 /* Nonzero if -mshared-library-id was given. */
84 static int bfin_lib_id_given;
86 /* Nonzero if -fschedule-insns2 was given. We override it and
87 call the scheduler ourselves during reorg. */
88 static int bfin_flag_schedule_insns2;
90 /* Determines whether we run variable tracking in machine dependent
91 reorganization. */
92 static int bfin_flag_var_tracking;
94 int splitting_for_sched;
96 static void
97 bfin_globalize_label (FILE *stream, const char *name)
99 fputs (".global ", stream);
100 assemble_name (stream, name);
101 fputc (';',stream);
102 fputc ('\n',stream);
105 static void
106 output_file_start (void)
108 FILE *file = asm_out_file;
109 int i;
111 /* Variable tracking should be run after all optimizations which change order
112 of insns. It also needs a valid CFG. This can't be done in
113 override_options, because flag_var_tracking is finalized after
114 that. */
115 bfin_flag_var_tracking = flag_var_tracking;
116 flag_var_tracking = 0;
118 fprintf (file, ".file \"%s\";\n", input_filename);
120 for (i = 0; arg_regs[i] >= 0; i++)
122 max_arg_registers = i; /* how many arg reg used */
125 /* Called early in the compilation to conditionally modify
126 fixed_regs/call_used_regs. */
128 void
129 conditional_register_usage (void)
131 /* initialize condition code flag register rtx */
132 bfin_cc_rtx = gen_rtx_REG (BImode, REG_CC);
133 bfin_rets_rtx = gen_rtx_REG (Pmode, REG_RETS);
136 /* Examine machine-dependent attributes of function type FUNTYPE and return its
137 type. See the definition of E_FUNKIND. */
139 static e_funkind funkind (tree funtype)
141 tree attrs = TYPE_ATTRIBUTES (funtype);
142 if (lookup_attribute ("interrupt_handler", attrs))
143 return INTERRUPT_HANDLER;
144 else if (lookup_attribute ("exception_handler", attrs))
145 return EXCPT_HANDLER;
146 else if (lookup_attribute ("nmi_handler", attrs))
147 return NMI_HANDLER;
148 else
149 return SUBROUTINE;
152 /* Legitimize PIC addresses. If the address is already position-independent,
153 we return ORIG. Newly generated position-independent addresses go into a
154 reg. This is REG if nonzero, otherwise we allocate register(s) as
155 necessary. PICREG is the register holding the pointer to the PIC offset
156 table. */
158 static rtx
159 legitimize_pic_address (rtx orig, rtx reg, rtx picreg)
161 rtx addr = orig;
162 rtx new = orig;
164 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
166 int unspec;
167 rtx tmp;
169 if (TARGET_ID_SHARED_LIBRARY)
170 unspec = UNSPEC_MOVE_PIC;
171 else if (GET_CODE (addr) == SYMBOL_REF
172 && SYMBOL_REF_FUNCTION_P (addr))
173 unspec = UNSPEC_FUNCDESC_GOT17M4;
174 else
175 unspec = UNSPEC_MOVE_FDPIC;
177 if (reg == 0)
179 gcc_assert (!no_new_pseudos);
180 reg = gen_reg_rtx (Pmode);
183 tmp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), unspec);
184 new = gen_const_mem (Pmode, gen_rtx_PLUS (Pmode, picreg, tmp));
186 emit_move_insn (reg, new);
187 if (picreg == pic_offset_table_rtx)
188 current_function_uses_pic_offset_table = 1;
189 return reg;
192 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
194 rtx base;
196 if (GET_CODE (addr) == CONST)
198 addr = XEXP (addr, 0);
199 gcc_assert (GET_CODE (addr) == PLUS);
202 if (XEXP (addr, 0) == picreg)
203 return orig;
205 if (reg == 0)
207 gcc_assert (!no_new_pseudos);
208 reg = gen_reg_rtx (Pmode);
211 base = legitimize_pic_address (XEXP (addr, 0), reg, picreg);
212 addr = legitimize_pic_address (XEXP (addr, 1),
213 base == reg ? NULL_RTX : reg,
214 picreg);
216 if (GET_CODE (addr) == CONST_INT)
218 gcc_assert (! reload_in_progress && ! reload_completed);
219 addr = force_reg (Pmode, addr);
222 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
224 base = gen_rtx_PLUS (Pmode, base, XEXP (addr, 0));
225 addr = XEXP (addr, 1);
228 return gen_rtx_PLUS (Pmode, base, addr);
231 return new;
234 /* Stack frame layout. */
236 /* Compute the number of DREGS to save with a push_multiple operation.
237 This could include registers that aren't modified in the function,
238 since push_multiple only takes a range of registers.
239 If IS_INTHANDLER, then everything that is live must be saved, even
240 if normally call-clobbered. */
242 static int
243 n_dregs_to_save (bool is_inthandler)
245 unsigned i;
247 for (i = REG_R0; i <= REG_R7; i++)
249 if (regs_ever_live[i] && (is_inthandler || ! call_used_regs[i]))
250 return REG_R7 - i + 1;
252 if (current_function_calls_eh_return)
254 unsigned j;
255 for (j = 0; ; j++)
257 unsigned test = EH_RETURN_DATA_REGNO (j);
258 if (test == INVALID_REGNUM)
259 break;
260 if (test == i)
261 return REG_R7 - i + 1;
266 return 0;
269 /* Like n_dregs_to_save, but compute number of PREGS to save. */
271 static int
272 n_pregs_to_save (bool is_inthandler)
274 unsigned i;
276 for (i = REG_P0; i <= REG_P5; i++)
277 if ((regs_ever_live[i] && (is_inthandler || ! call_used_regs[i]))
278 || (!TARGET_FDPIC
279 && i == PIC_OFFSET_TABLE_REGNUM
280 && (current_function_uses_pic_offset_table
281 || (TARGET_ID_SHARED_LIBRARY && ! current_function_is_leaf))))
282 return REG_P5 - i + 1;
283 return 0;
286 /* Determine if we are going to save the frame pointer in the prologue. */
288 static bool
289 must_save_fp_p (void)
291 return frame_pointer_needed || regs_ever_live[REG_FP];
294 static bool
295 stack_frame_needed_p (void)
297 /* EH return puts a new return address into the frame using an
298 address relative to the frame pointer. */
299 if (current_function_calls_eh_return)
300 return true;
301 return frame_pointer_needed;
304 /* Emit code to save registers in the prologue. SAVEALL is nonzero if we
305 must save all registers; this is used for interrupt handlers.
306 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
307 this for an interrupt (or exception) handler. */
309 static void
310 expand_prologue_reg_save (rtx spreg, int saveall, bool is_inthandler)
312 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler);
313 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler);
314 int dregno = REG_R7 + 1 - ndregs;
315 int pregno = REG_P5 + 1 - npregs;
316 int total = ndregs + npregs;
317 int i;
318 rtx pat, insn, val;
320 if (total == 0)
321 return;
323 val = GEN_INT (-total * 4);
324 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 2));
325 XVECEXP (pat, 0, 0) = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, val),
326 UNSPEC_PUSH_MULTIPLE);
327 XVECEXP (pat, 0, total + 1) = gen_rtx_SET (VOIDmode, spreg,
328 gen_rtx_PLUS (Pmode, spreg,
329 val));
330 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, total + 1)) = 1;
331 for (i = 0; i < total; i++)
333 rtx memref = gen_rtx_MEM (word_mode,
334 gen_rtx_PLUS (Pmode, spreg,
335 GEN_INT (- i * 4 - 4)));
336 rtx subpat;
337 if (ndregs > 0)
339 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
340 dregno++));
341 ndregs--;
343 else
345 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
346 pregno++));
347 npregs++;
349 XVECEXP (pat, 0, i + 1) = subpat;
350 RTX_FRAME_RELATED_P (subpat) = 1;
352 insn = emit_insn (pat);
353 RTX_FRAME_RELATED_P (insn) = 1;
356 /* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
357 must save all registers; this is used for interrupt handlers.
358 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
359 this for an interrupt (or exception) handler. */
361 static void
362 expand_epilogue_reg_restore (rtx spreg, bool saveall, bool is_inthandler)
364 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler);
365 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler);
366 int total = ndregs + npregs;
367 int i, regno;
368 rtx pat, insn;
370 if (total == 0)
371 return;
373 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 1));
374 XVECEXP (pat, 0, 0) = gen_rtx_SET (VOIDmode, spreg,
375 gen_rtx_PLUS (Pmode, spreg,
376 GEN_INT (total * 4)));
378 if (npregs > 0)
379 regno = REG_P5 + 1;
380 else
381 regno = REG_R7 + 1;
383 for (i = 0; i < total; i++)
385 rtx addr = (i > 0
386 ? gen_rtx_PLUS (Pmode, spreg, GEN_INT (i * 4))
387 : spreg);
388 rtx memref = gen_rtx_MEM (word_mode, addr);
390 regno--;
391 XVECEXP (pat, 0, i + 1)
392 = gen_rtx_SET (VOIDmode, gen_rtx_REG (word_mode, regno), memref);
394 if (npregs > 0)
396 if (--npregs == 0)
397 regno = REG_R7 + 1;
401 insn = emit_insn (pat);
402 RTX_FRAME_RELATED_P (insn) = 1;
405 /* Perform any needed actions needed for a function that is receiving a
406 variable number of arguments.
408 CUM is as above.
410 MODE and TYPE are the mode and type of the current parameter.
412 PRETEND_SIZE is a variable that should be set to the amount of stack
413 that must be pushed by the prolog to pretend that our caller pushed
416 Normally, this macro will push all remaining incoming registers on the
417 stack and set PRETEND_SIZE to the length of the registers pushed.
419 Blackfin specific :
420 - VDSP C compiler manual (our ABI) says that a variable args function
421 should save the R0, R1 and R2 registers in the stack.
422 - The caller will always leave space on the stack for the
423 arguments that are passed in registers, so we dont have
424 to leave any extra space.
425 - now, the vastart pointer can access all arguments from the stack. */
427 static void
428 setup_incoming_varargs (CUMULATIVE_ARGS *cum,
429 enum machine_mode mode ATTRIBUTE_UNUSED,
430 tree type ATTRIBUTE_UNUSED, int *pretend_size,
431 int no_rtl)
433 rtx mem;
434 int i;
436 if (no_rtl)
437 return;
439 /* The move for named arguments will be generated automatically by the
440 compiler. We need to generate the move rtx for the unnamed arguments
441 if they are in the first 3 words. We assume at least 1 named argument
442 exists, so we never generate [ARGP] = R0 here. */
444 for (i = cum->words + 1; i < max_arg_registers; i++)
446 mem = gen_rtx_MEM (Pmode,
447 plus_constant (arg_pointer_rtx, (i * UNITS_PER_WORD)));
448 emit_move_insn (mem, gen_rtx_REG (Pmode, i));
451 *pretend_size = 0;
454 /* Value should be nonzero if functions must have frame pointers.
455 Zero means the frame pointer need not be set up (and parms may
456 be accessed via the stack pointer) in functions that seem suitable. */
459 bfin_frame_pointer_required (void)
461 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
463 if (fkind != SUBROUTINE)
464 return 1;
466 /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
467 so we have to override it for non-leaf functions. */
468 if (TARGET_OMIT_LEAF_FRAME_POINTER && ! current_function_is_leaf)
469 return 1;
471 return 0;
474 /* Return the number of registers pushed during the prologue. */
476 static int
477 n_regs_saved_by_prologue (void)
479 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
480 bool is_inthandler = fkind != SUBROUTINE;
481 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
482 bool all = (lookup_attribute ("saveall", attrs) != NULL_TREE
483 || (is_inthandler && !current_function_is_leaf));
484 int ndregs = all ? 8 : n_dregs_to_save (is_inthandler);
485 int npregs = all ? 6 : n_pregs_to_save (is_inthandler);
486 int n = ndregs + npregs;
488 if (all || stack_frame_needed_p ())
489 /* We use a LINK instruction in this case. */
490 n += 2;
491 else
493 if (must_save_fp_p ())
494 n++;
495 if (! current_function_is_leaf)
496 n++;
499 if (fkind != SUBROUTINE)
501 int i;
503 /* Increment once for ASTAT. */
504 n++;
506 /* RETE/X/N. */
507 if (lookup_attribute ("nesting", attrs))
508 n++;
510 for (i = REG_P7 + 1; i < REG_CC; i++)
511 if (all
512 || regs_ever_live[i]
513 || (!leaf_function_p () && call_used_regs[i]))
514 n += i == REG_A0 || i == REG_A1 ? 2 : 1;
516 return n;
519 /* Return the offset between two registers, one to be eliminated, and the other
520 its replacement, at the start of a routine. */
522 HOST_WIDE_INT
523 bfin_initial_elimination_offset (int from, int to)
525 HOST_WIDE_INT offset = 0;
527 if (from == ARG_POINTER_REGNUM)
528 offset = n_regs_saved_by_prologue () * 4;
530 if (to == STACK_POINTER_REGNUM)
532 if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
533 offset += current_function_outgoing_args_size;
534 else if (current_function_outgoing_args_size)
535 offset += FIXED_STACK_AREA;
537 offset += get_frame_size ();
540 return offset;
543 /* Emit code to load a constant CONSTANT into register REG; setting
544 RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
545 Make sure that the insns we generate need not be split. */
547 static void
548 frame_related_constant_load (rtx reg, HOST_WIDE_INT constant, bool related)
550 rtx insn;
551 rtx cst = GEN_INT (constant);
553 if (constant >= -32768 && constant < 65536)
554 insn = emit_move_insn (reg, cst);
555 else
557 /* We don't call split_load_immediate here, since dwarf2out.c can get
558 confused about some of the more clever sequences it can generate. */
559 insn = emit_insn (gen_movsi_high (reg, cst));
560 if (related)
561 RTX_FRAME_RELATED_P (insn) = 1;
562 insn = emit_insn (gen_movsi_low (reg, reg, cst));
564 if (related)
565 RTX_FRAME_RELATED_P (insn) = 1;
568 /* Generate efficient code to add a value to a P register. We can use
569 P1 as a scratch register. Set RTX_FRAME_RELATED_P on the generated
570 insns if FRAME is nonzero. */
572 static void
573 add_to_reg (rtx reg, HOST_WIDE_INT value, int frame)
575 if (value == 0)
576 return;
578 /* Choose whether to use a sequence using a temporary register, or
579 a sequence with multiple adds. We can add a signed 7-bit value
580 in one instruction. */
581 if (value > 120 || value < -120)
583 rtx tmpreg = gen_rtx_REG (SImode, REG_P1);
584 rtx insn;
586 if (frame)
587 frame_related_constant_load (tmpreg, value, TRUE);
588 else
589 insn = emit_move_insn (tmpreg, GEN_INT (value));
591 insn = emit_insn (gen_addsi3 (reg, reg, tmpreg));
592 if (frame)
593 RTX_FRAME_RELATED_P (insn) = 1;
595 else
598 int size = value;
599 rtx insn;
601 if (size > 60)
602 size = 60;
603 else if (size < -60)
604 /* We could use -62, but that would leave the stack unaligned, so
605 it's no good. */
606 size = -60;
608 insn = emit_insn (gen_addsi3 (reg, reg, GEN_INT (size)));
609 if (frame)
610 RTX_FRAME_RELATED_P (insn) = 1;
611 value -= size;
613 while (value != 0);
616 /* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
617 is too large, generate a sequence of insns that has the same effect.
618 SPREG contains (reg:SI REG_SP). */
620 static void
621 emit_link_insn (rtx spreg, HOST_WIDE_INT frame_size)
623 HOST_WIDE_INT link_size = frame_size;
624 rtx insn;
625 int i;
627 if (link_size > 262140)
628 link_size = 262140;
630 /* Use a LINK insn with as big a constant as possible, then subtract
631 any remaining size from the SP. */
632 insn = emit_insn (gen_link (GEN_INT (-8 - link_size)));
633 RTX_FRAME_RELATED_P (insn) = 1;
635 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
637 rtx set = XVECEXP (PATTERN (insn), 0, i);
638 gcc_assert (GET_CODE (set) == SET);
639 RTX_FRAME_RELATED_P (set) = 1;
642 frame_size -= link_size;
644 if (frame_size > 0)
646 /* Must use a call-clobbered PREG that isn't the static chain. */
647 rtx tmpreg = gen_rtx_REG (Pmode, REG_P1);
649 frame_related_constant_load (tmpreg, -frame_size, TRUE);
650 insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
651 RTX_FRAME_RELATED_P (insn) = 1;
655 /* Return the number of bytes we must reserve for outgoing arguments
656 in the current function's stack frame. */
658 static HOST_WIDE_INT
659 arg_area_size (void)
661 if (current_function_outgoing_args_size)
663 if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
664 return current_function_outgoing_args_size;
665 else
666 return FIXED_STACK_AREA;
668 return 0;
671 /* Save RETS and FP, and allocate a stack frame. ALL is true if the
672 function must save all its registers (true only for certain interrupt
673 handlers). */
675 static void
676 do_link (rtx spreg, HOST_WIDE_INT frame_size, bool all)
678 frame_size += arg_area_size ();
680 if (all || stack_frame_needed_p ()
681 || (must_save_fp_p () && ! current_function_is_leaf))
682 emit_link_insn (spreg, frame_size);
683 else
685 if (! current_function_is_leaf)
687 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
688 gen_rtx_PRE_DEC (Pmode, spreg)),
689 bfin_rets_rtx);
690 rtx insn = emit_insn (pat);
691 RTX_FRAME_RELATED_P (insn) = 1;
693 if (must_save_fp_p ())
695 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
696 gen_rtx_PRE_DEC (Pmode, spreg)),
697 gen_rtx_REG (Pmode, REG_FP));
698 rtx insn = emit_insn (pat);
699 RTX_FRAME_RELATED_P (insn) = 1;
701 add_to_reg (spreg, -frame_size, 1);
705 /* Like do_link, but used for epilogues to deallocate the stack frame. */
707 static void
708 do_unlink (rtx spreg, HOST_WIDE_INT frame_size, bool all)
710 frame_size += arg_area_size ();
712 if (all || stack_frame_needed_p ())
713 emit_insn (gen_unlink ());
714 else
716 rtx postinc = gen_rtx_MEM (Pmode, gen_rtx_POST_INC (Pmode, spreg));
718 add_to_reg (spreg, frame_size, 0);
719 if (must_save_fp_p ())
721 rtx fpreg = gen_rtx_REG (Pmode, REG_FP);
722 emit_move_insn (fpreg, postinc);
723 emit_insn (gen_rtx_USE (VOIDmode, fpreg));
725 if (! current_function_is_leaf)
727 emit_move_insn (bfin_rets_rtx, postinc);
728 emit_insn (gen_rtx_USE (VOIDmode, bfin_rets_rtx));
733 /* Generate a prologue suitable for a function of kind FKIND. This is
734 called for interrupt and exception handler prologues.
735 SPREG contains (reg:SI REG_SP). */
737 static void
738 expand_interrupt_handler_prologue (rtx spreg, e_funkind fkind)
740 int i;
741 HOST_WIDE_INT frame_size = get_frame_size ();
742 rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
743 rtx predec = gen_rtx_MEM (SImode, predec1);
744 rtx insn;
745 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
746 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
747 tree kspisusp = lookup_attribute ("kspisusp", attrs);
749 if (kspisusp)
751 insn = emit_move_insn (spreg, gen_rtx_REG (Pmode, REG_USP));
752 RTX_FRAME_RELATED_P (insn) = 1;
755 /* We need space on the stack in case we need to save the argument
756 registers. */
757 if (fkind == EXCPT_HANDLER)
759 insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (-12)));
760 RTX_FRAME_RELATED_P (insn) = 1;
763 insn = emit_move_insn (predec, gen_rtx_REG (SImode, REG_ASTAT));
764 RTX_FRAME_RELATED_P (insn) = 1;
766 /* If we're calling other functions, they won't save their call-clobbered
767 registers, so we must save everything here. */
768 if (!current_function_is_leaf)
769 all = true;
770 expand_prologue_reg_save (spreg, all, true);
772 for (i = REG_P7 + 1; i < REG_CC; i++)
773 if (all
774 || regs_ever_live[i]
775 || (!leaf_function_p () && call_used_regs[i]))
777 if (i == REG_A0 || i == REG_A1)
778 insn = emit_move_insn (gen_rtx_MEM (PDImode, predec1),
779 gen_rtx_REG (PDImode, i));
780 else
781 insn = emit_move_insn (predec, gen_rtx_REG (SImode, i));
782 RTX_FRAME_RELATED_P (insn) = 1;
785 if (lookup_attribute ("nesting", attrs))
787 rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
788 : fkind == NMI_HANDLER ? REG_RETN
789 : REG_RETI));
790 insn = emit_move_insn (predec, srcreg);
791 RTX_FRAME_RELATED_P (insn) = 1;
794 do_link (spreg, frame_size, all);
796 if (fkind == EXCPT_HANDLER)
798 rtx r0reg = gen_rtx_REG (SImode, REG_R0);
799 rtx r1reg = gen_rtx_REG (SImode, REG_R1);
800 rtx r2reg = gen_rtx_REG (SImode, REG_R2);
801 rtx insn;
803 insn = emit_move_insn (r0reg, gen_rtx_REG (SImode, REG_SEQSTAT));
804 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
805 NULL_RTX);
806 insn = emit_insn (gen_ashrsi3 (r0reg, r0reg, GEN_INT (26)));
807 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
808 NULL_RTX);
809 insn = emit_insn (gen_ashlsi3 (r0reg, r0reg, GEN_INT (26)));
810 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
811 NULL_RTX);
812 insn = emit_move_insn (r1reg, spreg);
813 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
814 NULL_RTX);
815 insn = emit_move_insn (r2reg, gen_rtx_REG (Pmode, REG_FP));
816 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
817 NULL_RTX);
818 insn = emit_insn (gen_addsi3 (r2reg, r2reg, GEN_INT (8)));
819 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
820 NULL_RTX);
824 /* Generate an epilogue suitable for a function of kind FKIND. This is
825 called for interrupt and exception handler epilogues.
826 SPREG contains (reg:SI REG_SP). */
828 static void
829 expand_interrupt_handler_epilogue (rtx spreg, e_funkind fkind)
831 int i;
832 rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
833 rtx postinc = gen_rtx_MEM (SImode, postinc1);
834 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
835 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
837 /* A slightly crude technique to stop flow from trying to delete "dead"
838 insns. */
839 MEM_VOLATILE_P (postinc) = 1;
841 do_unlink (spreg, get_frame_size (), all);
843 if (lookup_attribute ("nesting", attrs))
845 rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
846 : fkind == NMI_HANDLER ? REG_RETN
847 : REG_RETI));
848 emit_move_insn (srcreg, postinc);
851 /* If we're calling other functions, they won't save their call-clobbered
852 registers, so we must save (and restore) everything here. */
853 if (!current_function_is_leaf)
854 all = true;
856 for (i = REG_CC - 1; i > REG_P7; i--)
857 if (all
858 || regs_ever_live[i]
859 || (!leaf_function_p () && call_used_regs[i]))
861 if (i == REG_A0 || i == REG_A1)
863 rtx mem = gen_rtx_MEM (PDImode, postinc1);
864 MEM_VOLATILE_P (mem) = 1;
865 emit_move_insn (gen_rtx_REG (PDImode, i), mem);
867 else
868 emit_move_insn (gen_rtx_REG (SImode, i), postinc);
871 expand_epilogue_reg_restore (spreg, all, true);
873 emit_move_insn (gen_rtx_REG (SImode, REG_ASTAT), postinc);
875 /* Deallocate any space we left on the stack in case we needed to save the
876 argument registers. */
877 if (fkind == EXCPT_HANDLER)
878 emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (12)));
880 emit_jump_insn (gen_return_internal (GEN_INT (fkind)));
883 /* Used while emitting the prologue to generate code to load the correct value
884 into the PIC register, which is passed in DEST. */
886 static rtx
887 bfin_load_pic_reg (rtx dest)
889 struct cgraph_local_info *i = NULL;
890 rtx addr, insn;
892 if (flag_unit_at_a_time)
893 i = cgraph_local_info (current_function_decl);
895 /* Functions local to the translation unit don't need to reload the
896 pic reg, since the caller always passes a usable one. */
897 if (i && i->local)
898 return pic_offset_table_rtx;
900 if (bfin_lib_id_given)
901 addr = plus_constant (pic_offset_table_rtx, -4 - bfin_library_id * 4);
902 else
903 addr = gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
904 gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
905 UNSPEC_LIBRARY_OFFSET));
906 insn = emit_insn (gen_movsi (dest, gen_rtx_MEM (Pmode, addr)));
907 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
908 return dest;
911 /* Generate RTL for the prologue of the current function. */
913 void
914 bfin_expand_prologue (void)
916 rtx insn;
917 HOST_WIDE_INT frame_size = get_frame_size ();
918 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
919 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
920 rtx pic_reg_loaded = NULL_RTX;
922 if (fkind != SUBROUTINE)
924 expand_interrupt_handler_prologue (spreg, fkind);
925 return;
928 if (current_function_limit_stack
929 || TARGET_STACK_CHECK_L1)
931 HOST_WIDE_INT offset
932 = bfin_initial_elimination_offset (ARG_POINTER_REGNUM,
933 STACK_POINTER_REGNUM);
934 rtx lim = current_function_limit_stack ? stack_limit_rtx : NULL_RTX;
935 rtx p2reg = gen_rtx_REG (Pmode, REG_P2);
937 if (!lim)
939 rtx p1reg = gen_rtx_REG (Pmode, REG_P1);
940 emit_move_insn (p2reg, gen_int_mode (0xFFB00000, SImode));
941 emit_move_insn (p2reg, gen_rtx_MEM (Pmode, p2reg));
942 lim = p2reg;
944 if (GET_CODE (lim) == SYMBOL_REF)
946 if (TARGET_ID_SHARED_LIBRARY)
948 rtx p1reg = gen_rtx_REG (Pmode, REG_P1);
949 rtx val;
950 pic_reg_loaded = bfin_load_pic_reg (p2reg);
951 val = legitimize_pic_address (stack_limit_rtx, p1reg,
952 pic_reg_loaded);
953 emit_move_insn (p1reg, val);
954 frame_related_constant_load (p2reg, offset, FALSE);
955 emit_insn (gen_addsi3 (p2reg, p2reg, p1reg));
956 lim = p2reg;
958 else
960 rtx limit = plus_constant (lim, offset);
961 emit_move_insn (p2reg, limit);
962 lim = p2reg;
965 else
967 if (lim != p2reg)
968 emit_move_insn (p2reg, lim);
969 add_to_reg (p2reg, offset, 0);
970 lim = p2reg;
972 emit_insn (gen_compare_lt (bfin_cc_rtx, spreg, lim));
973 emit_insn (gen_trapifcc ());
975 expand_prologue_reg_save (spreg, 0, false);
977 do_link (spreg, frame_size, false);
979 if (TARGET_ID_SHARED_LIBRARY
980 && !TARGET_SEP_DATA
981 && (current_function_uses_pic_offset_table
982 || !current_function_is_leaf))
983 bfin_load_pic_reg (pic_offset_table_rtx);
986 /* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
987 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
988 eh_return pattern. */
990 void
991 bfin_expand_epilogue (int need_return, int eh_return)
993 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
994 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
996 if (fkind != SUBROUTINE)
998 expand_interrupt_handler_epilogue (spreg, fkind);
999 return;
1002 do_unlink (spreg, get_frame_size (), false);
1004 expand_epilogue_reg_restore (spreg, false, false);
1006 /* Omit the return insn if this is for a sibcall. */
1007 if (! need_return)
1008 return;
1010 if (eh_return)
1011 emit_insn (gen_addsi3 (spreg, spreg, gen_rtx_REG (Pmode, REG_P2)));
1013 emit_jump_insn (gen_return_internal (GEN_INT (SUBROUTINE)));
1016 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
1019 bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
1020 unsigned int new_reg)
1022 /* Interrupt functions can only use registers that have already been
1023 saved by the prologue, even if they would normally be
1024 call-clobbered. */
1026 if (funkind (TREE_TYPE (current_function_decl)) != SUBROUTINE
1027 && !regs_ever_live[new_reg])
1028 return 0;
1030 return 1;
1033 /* Return the value of the return address for the frame COUNT steps up
1034 from the current frame, after the prologue.
1035 We punt for everything but the current frame by returning const0_rtx. */
1038 bfin_return_addr_rtx (int count)
1040 if (count != 0)
1041 return const0_rtx;
1043 return get_hard_reg_initial_val (Pmode, REG_RETS);
1046 /* Try machine-dependent ways of modifying an illegitimate address X
1047 to be legitimate. If we find one, return the new, valid address,
1048 otherwise return NULL_RTX.
1050 OLDX is the address as it was before break_out_memory_refs was called.
1051 In some cases it is useful to look at this to decide what needs to be done.
1053 MODE is the mode of the memory reference. */
1056 legitimize_address (rtx x ATTRIBUTE_UNUSED, rtx oldx ATTRIBUTE_UNUSED,
1057 enum machine_mode mode ATTRIBUTE_UNUSED)
1059 return NULL_RTX;
1062 static rtx
1063 bfin_delegitimize_address (rtx orig_x)
1065 rtx x = orig_x, y;
1067 if (GET_CODE (x) != MEM)
1068 return orig_x;
1070 x = XEXP (x, 0);
1071 if (GET_CODE (x) == PLUS
1072 && GET_CODE (XEXP (x, 1)) == UNSPEC
1073 && XINT (XEXP (x, 1), 1) == UNSPEC_MOVE_PIC
1074 && GET_CODE (XEXP (x, 0)) == REG
1075 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
1076 return XVECEXP (XEXP (x, 1), 0, 0);
1078 return orig_x;
1081 /* This predicate is used to compute the length of a load/store insn.
1082 OP is a MEM rtx, we return nonzero if its addressing mode requires a
1083 32-bit instruction. */
1086 effective_address_32bit_p (rtx op, enum machine_mode mode)
1088 HOST_WIDE_INT offset;
1090 mode = GET_MODE (op);
1091 op = XEXP (op, 0);
1093 if (GET_CODE (op) != PLUS)
1095 gcc_assert (REG_P (op) || GET_CODE (op) == POST_INC
1096 || GET_CODE (op) == PRE_DEC || GET_CODE (op) == POST_DEC);
1097 return 0;
1100 if (GET_CODE (XEXP (op, 1)) == UNSPEC)
1101 return 1;
1103 offset = INTVAL (XEXP (op, 1));
1105 /* All byte loads use a 16-bit offset. */
1106 if (GET_MODE_SIZE (mode) == 1)
1107 return 1;
1109 if (GET_MODE_SIZE (mode) == 4)
1111 /* Frame pointer relative loads can use a negative offset, all others
1112 are restricted to a small positive one. */
1113 if (XEXP (op, 0) == frame_pointer_rtx)
1114 return offset < -128 || offset > 60;
1115 return offset < 0 || offset > 60;
1118 /* Must be HImode now. */
1119 return offset < 0 || offset > 30;
1122 /* Returns true if X is a memory reference using an I register. */
1123 bool
1124 bfin_dsp_memref_p (rtx x)
1126 if (! MEM_P (x))
1127 return false;
1128 x = XEXP (x, 0);
1129 if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_INC
1130 || GET_CODE (x) == POST_DEC || GET_CODE (x) == PRE_DEC)
1131 x = XEXP (x, 0);
1132 return IREG_P (x);
1135 /* Return cost of the memory address ADDR.
1136 All addressing modes are equally cheap on the Blackfin. */
1138 static int
1139 bfin_address_cost (rtx addr ATTRIBUTE_UNUSED)
1141 return 1;
1144 /* Subroutine of print_operand; used to print a memory reference X to FILE. */
1146 void
1147 print_address_operand (FILE *file, rtx x)
1149 switch (GET_CODE (x))
1151 case PLUS:
1152 output_address (XEXP (x, 0));
1153 fprintf (file, "+");
1154 output_address (XEXP (x, 1));
1155 break;
1157 case PRE_DEC:
1158 fprintf (file, "--");
1159 output_address (XEXP (x, 0));
1160 break;
1161 case POST_INC:
1162 output_address (XEXP (x, 0));
1163 fprintf (file, "++");
1164 break;
1165 case POST_DEC:
1166 output_address (XEXP (x, 0));
1167 fprintf (file, "--");
1168 break;
1170 default:
1171 gcc_assert (GET_CODE (x) != MEM);
1172 print_operand (file, x, 0);
1173 break;
1177 /* Adding intp DImode support by Tony
1178 * -- Q: (low word)
1179 * -- R: (high word)
1182 void
1183 print_operand (FILE *file, rtx x, char code)
1185 enum machine_mode mode;
1187 if (code == '!')
1189 if (GET_MODE (current_output_insn) == SImode)
1190 fprintf (file, " ||");
1191 else
1192 fprintf (file, ";");
1193 return;
1196 mode = GET_MODE (x);
1198 switch (code)
1200 case 'j':
1201 switch (GET_CODE (x))
1203 case EQ:
1204 fprintf (file, "e");
1205 break;
1206 case NE:
1207 fprintf (file, "ne");
1208 break;
1209 case GT:
1210 fprintf (file, "g");
1211 break;
1212 case LT:
1213 fprintf (file, "l");
1214 break;
1215 case GE:
1216 fprintf (file, "ge");
1217 break;
1218 case LE:
1219 fprintf (file, "le");
1220 break;
1221 case GTU:
1222 fprintf (file, "g");
1223 break;
1224 case LTU:
1225 fprintf (file, "l");
1226 break;
1227 case GEU:
1228 fprintf (file, "ge");
1229 break;
1230 case LEU:
1231 fprintf (file, "le");
1232 break;
1233 default:
1234 output_operand_lossage ("invalid %%j value");
1236 break;
1238 case 'J': /* reverse logic */
1239 switch (GET_CODE(x))
1241 case EQ:
1242 fprintf (file, "ne");
1243 break;
1244 case NE:
1245 fprintf (file, "e");
1246 break;
1247 case GT:
1248 fprintf (file, "le");
1249 break;
1250 case LT:
1251 fprintf (file, "ge");
1252 break;
1253 case GE:
1254 fprintf (file, "l");
1255 break;
1256 case LE:
1257 fprintf (file, "g");
1258 break;
1259 case GTU:
1260 fprintf (file, "le");
1261 break;
1262 case LTU:
1263 fprintf (file, "ge");
1264 break;
1265 case GEU:
1266 fprintf (file, "l");
1267 break;
1268 case LEU:
1269 fprintf (file, "g");
1270 break;
1271 default:
1272 output_operand_lossage ("invalid %%J value");
1274 break;
1276 default:
1277 switch (GET_CODE (x))
1279 case REG:
1280 if (code == 'h')
1282 gcc_assert (REGNO (x) < 32);
1283 fprintf (file, "%s", short_reg_names[REGNO (x)]);
1284 /*fprintf (file, "\n%d\n ", REGNO (x));*/
1285 break;
1287 else if (code == 'd')
1289 gcc_assert (REGNO (x) < 32);
1290 fprintf (file, "%s", high_reg_names[REGNO (x)]);
1291 break;
1293 else if (code == 'w')
1295 gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
1296 fprintf (file, "%s.w", reg_names[REGNO (x)]);
1298 else if (code == 'x')
1300 gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
1301 fprintf (file, "%s.x", reg_names[REGNO (x)]);
1303 else if (code == 'D')
1305 fprintf (file, "%s", dregs_pair_names[REGNO (x)]);
1307 else if (code == 'H')
1309 gcc_assert (mode == DImode || mode == DFmode);
1310 gcc_assert (REG_P (x));
1311 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
1313 else if (code == 'T')
1315 gcc_assert (D_REGNO_P (REGNO (x)));
1316 fprintf (file, "%s", byte_reg_names[REGNO (x)]);
1318 else
1319 fprintf (file, "%s", reg_names[REGNO (x)]);
1320 break;
1322 case MEM:
1323 fputc ('[', file);
1324 x = XEXP (x,0);
1325 print_address_operand (file, x);
1326 fputc (']', file);
1327 break;
1329 case CONST_INT:
1330 if (code == 'M')
1332 switch (INTVAL (x))
1334 case MACFLAG_NONE:
1335 break;
1336 case MACFLAG_FU:
1337 fputs ("(FU)", file);
1338 break;
1339 case MACFLAG_T:
1340 fputs ("(T)", file);
1341 break;
1342 case MACFLAG_TFU:
1343 fputs ("(TFU)", file);
1344 break;
1345 case MACFLAG_W32:
1346 fputs ("(W32)", file);
1347 break;
1348 case MACFLAG_IS:
1349 fputs ("(IS)", file);
1350 break;
1351 case MACFLAG_IU:
1352 fputs ("(IU)", file);
1353 break;
1354 case MACFLAG_IH:
1355 fputs ("(IH)", file);
1356 break;
1357 case MACFLAG_M:
1358 fputs ("(M)", file);
1359 break;
1360 case MACFLAG_ISS2:
1361 fputs ("(ISS2)", file);
1362 break;
1363 case MACFLAG_S2RND:
1364 fputs ("(S2RND)", file);
1365 break;
1366 default:
1367 gcc_unreachable ();
1369 break;
1371 else if (code == 'b')
1373 if (INTVAL (x) == 0)
1374 fputs ("+=", file);
1375 else if (INTVAL (x) == 1)
1376 fputs ("-=", file);
1377 else
1378 gcc_unreachable ();
1379 break;
1381 /* Moves to half registers with d or h modifiers always use unsigned
1382 constants. */
1383 else if (code == 'd')
1384 x = GEN_INT ((INTVAL (x) >> 16) & 0xffff);
1385 else if (code == 'h')
1386 x = GEN_INT (INTVAL (x) & 0xffff);
1387 else if (code == 'N')
1388 x = GEN_INT (-INTVAL (x));
1389 else if (code == 'X')
1390 x = GEN_INT (exact_log2 (0xffffffff & INTVAL (x)));
1391 else if (code == 'Y')
1392 x = GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x)));
1393 else if (code == 'Z')
1394 /* Used for LINK insns. */
1395 x = GEN_INT (-8 - INTVAL (x));
1397 /* fall through */
1399 case SYMBOL_REF:
1400 output_addr_const (file, x);
1401 break;
1403 case CONST_DOUBLE:
1404 output_operand_lossage ("invalid const_double operand");
1405 break;
1407 case UNSPEC:
1408 switch (XINT (x, 1))
1410 case UNSPEC_MOVE_PIC:
1411 output_addr_const (file, XVECEXP (x, 0, 0));
1412 fprintf (file, "@GOT");
1413 break;
1415 case UNSPEC_MOVE_FDPIC:
1416 output_addr_const (file, XVECEXP (x, 0, 0));
1417 fprintf (file, "@GOT17M4");
1418 break;
1420 case UNSPEC_FUNCDESC_GOT17M4:
1421 output_addr_const (file, XVECEXP (x, 0, 0));
1422 fprintf (file, "@FUNCDESC_GOT17M4");
1423 break;
1425 case UNSPEC_LIBRARY_OFFSET:
1426 fprintf (file, "_current_shared_library_p5_offset_");
1427 break;
1429 default:
1430 gcc_unreachable ();
1432 break;
1434 default:
1435 output_addr_const (file, x);
1440 /* Argument support functions. */
1442 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1443 for a call to a function whose data type is FNTYPE.
1444 For a library call, FNTYPE is 0.
1445 VDSP C Compiler manual, our ABI says that
1446 first 3 words of arguments will use R0, R1 and R2.
1449 void
1450 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
1451 rtx libname ATTRIBUTE_UNUSED)
1453 static CUMULATIVE_ARGS zero_cum;
1455 *cum = zero_cum;
1457 /* Set up the number of registers to use for passing arguments. */
1459 cum->nregs = max_arg_registers;
1460 cum->arg_regs = arg_regs;
1462 cum->call_cookie = CALL_NORMAL;
1463 /* Check for a longcall attribute. */
1464 if (fntype && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
1465 cum->call_cookie |= CALL_SHORT;
1466 else if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
1467 cum->call_cookie |= CALL_LONG;
1469 return;
1472 /* Update the data in CUM to advance over an argument
1473 of mode MODE and data type TYPE.
1474 (TYPE is null for libcalls where that information may not be available.) */
1476 void
1477 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1478 int named ATTRIBUTE_UNUSED)
1480 int count, bytes, words;
1482 bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1483 words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1485 cum->words += words;
1486 cum->nregs -= words;
1488 if (cum->nregs <= 0)
1490 cum->nregs = 0;
1491 cum->arg_regs = NULL;
1493 else
1495 for (count = 1; count <= words; count++)
1496 cum->arg_regs++;
1499 return;
1502 /* Define where to put the arguments to a function.
1503 Value is zero to push the argument on the stack,
1504 or a hard register in which to store the argument.
1506 MODE is the argument's machine mode.
1507 TYPE is the data type of the argument (as a tree).
1508 This is null for libcalls where that information may
1509 not be available.
1510 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1511 the preceding args and about the function being called.
1512 NAMED is nonzero if this argument is a named parameter
1513 (otherwise it is an extra parameter matching an ellipsis). */
1515 struct rtx_def *
1516 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1517 int named ATTRIBUTE_UNUSED)
1519 int bytes
1520 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1522 if (mode == VOIDmode)
1523 /* Compute operand 2 of the call insn. */
1524 return GEN_INT (cum->call_cookie);
1526 if (bytes == -1)
1527 return NULL_RTX;
1529 if (cum->nregs)
1530 return gen_rtx_REG (mode, *(cum->arg_regs));
1532 return NULL_RTX;
1535 /* For an arg passed partly in registers and partly in memory,
1536 this is the number of bytes passed in registers.
1537 For args passed entirely in registers or entirely in memory, zero.
1539 Refer VDSP C Compiler manual, our ABI.
1540 First 3 words are in registers. So, if an argument is larger
1541 than the registers available, it will span the register and
1542 stack. */
1544 static int
1545 bfin_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1546 tree type ATTRIBUTE_UNUSED,
1547 bool named ATTRIBUTE_UNUSED)
1549 int bytes
1550 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1551 int bytes_left = cum->nregs * UNITS_PER_WORD;
1553 if (bytes == -1)
1554 return 0;
1556 if (bytes_left == 0)
1557 return 0;
1558 if (bytes > bytes_left)
1559 return bytes_left;
1560 return 0;
1563 /* Variable sized types are passed by reference. */
1565 static bool
1566 bfin_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
1567 enum machine_mode mode ATTRIBUTE_UNUSED,
1568 tree type, bool named ATTRIBUTE_UNUSED)
1570 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
1573 /* Decide whether a type should be returned in memory (true)
1574 or in a register (false). This is called by the macro
1575 RETURN_IN_MEMORY. */
1578 bfin_return_in_memory (tree type)
1580 int size = int_size_in_bytes (type);
1581 return size > 2 * UNITS_PER_WORD || size == -1;
1584 /* Register in which address to store a structure value
1585 is passed to a function. */
1586 static rtx
1587 bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1588 int incoming ATTRIBUTE_UNUSED)
1590 return gen_rtx_REG (Pmode, REG_P0);
1593 /* Return true when register may be used to pass function parameters. */
1595 bool
1596 function_arg_regno_p (int n)
1598 int i;
1599 for (i = 0; arg_regs[i] != -1; i++)
1600 if (n == arg_regs[i])
1601 return true;
1602 return false;
1605 /* Returns 1 if OP contains a symbol reference */
1608 symbolic_reference_mentioned_p (rtx op)
1610 register const char *fmt;
1611 register int i;
1613 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1614 return 1;
1616 fmt = GET_RTX_FORMAT (GET_CODE (op));
1617 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1619 if (fmt[i] == 'E')
1621 register int j;
1623 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1624 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1625 return 1;
1628 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1629 return 1;
1632 return 0;
1635 /* Decide whether we can make a sibling call to a function. DECL is the
1636 declaration of the function being targeted by the call and EXP is the
1637 CALL_EXPR representing the call. */
1639 static bool
1640 bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
1641 tree exp ATTRIBUTE_UNUSED)
1643 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
1644 if (fkind != SUBROUTINE)
1645 return false;
1646 if (!TARGET_ID_SHARED_LIBRARY || TARGET_SEP_DATA)
1647 return true;
1649 /* When compiling for ID shared libraries, can't sibcall a local function
1650 from a non-local function, because the local function thinks it does
1651 not need to reload P5 in the prologue, but the sibcall wil pop P5 in the
1652 sibcall epilogue, and we end up with the wrong value in P5. */
1654 if (!flag_unit_at_a_time || decl == NULL)
1655 /* Not enough information. */
1656 return false;
1659 struct cgraph_local_info *this_func, *called_func;
1660 rtx addr, insn;
1662 this_func = cgraph_local_info (current_function_decl);
1663 called_func = cgraph_local_info (decl);
1664 return !called_func->local || this_func->local;
1668 /* Emit RTL insns to initialize the variable parts of a trampoline at
1669 TRAMP. FNADDR is an RTX for the address of the function's pure
1670 code. CXT is an RTX for the static chain value for the function. */
1672 void
1673 initialize_trampoline (tramp, fnaddr, cxt)
1674 rtx tramp, fnaddr, cxt;
1676 rtx t1 = copy_to_reg (fnaddr);
1677 rtx t2 = copy_to_reg (cxt);
1678 rtx addr;
1679 int i = 0;
1681 if (TARGET_FDPIC)
1683 rtx a = memory_address (Pmode, plus_constant (tramp, 8));
1684 addr = memory_address (Pmode, tramp);
1685 emit_move_insn (gen_rtx_MEM (SImode, addr), a);
1686 i = 8;
1689 addr = memory_address (Pmode, plus_constant (tramp, i + 2));
1690 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1691 emit_insn (gen_ashrsi3 (t1, t1, GEN_INT (16)));
1692 addr = memory_address (Pmode, plus_constant (tramp, i + 6));
1693 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1695 addr = memory_address (Pmode, plus_constant (tramp, i + 10));
1696 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1697 emit_insn (gen_ashrsi3 (t2, t2, GEN_INT (16)));
1698 addr = memory_address (Pmode, plus_constant (tramp, i + 14));
1699 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1702 /* Emit insns to move operands[1] into operands[0]. */
1704 void
1705 emit_pic_move (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1707 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
1709 gcc_assert (!TARGET_FDPIC || !(reload_in_progress || reload_completed));
1710 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
1711 operands[1] = force_reg (SImode, operands[1]);
1712 else
1713 operands[1] = legitimize_pic_address (operands[1], temp,
1714 TARGET_FDPIC ? OUR_FDPIC_REG
1715 : pic_offset_table_rtx);
1718 /* Expand a move operation in mode MODE. The operands are in OPERANDS.
1719 Returns true if no further code must be generated, false if the caller
1720 should generate an insn to move OPERANDS[1] to OPERANDS[0]. */
1722 bool
1723 expand_move (rtx *operands, enum machine_mode mode)
1725 rtx op = operands[1];
1726 if ((TARGET_ID_SHARED_LIBRARY || TARGET_FDPIC)
1727 && SYMBOLIC_CONST (op))
1728 emit_pic_move (operands, mode);
1729 else if (mode == SImode && GET_CODE (op) == CONST
1730 && GET_CODE (XEXP (op, 0)) == PLUS
1731 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
1732 && !bfin_legitimate_constant_p (op))
1734 rtx dest = operands[0];
1735 rtx op0, op1;
1736 gcc_assert (!reload_in_progress && !reload_completed);
1737 op = XEXP (op, 0);
1738 op0 = force_reg (mode, XEXP (op, 0));
1739 op1 = XEXP (op, 1);
1740 if (!insn_data[CODE_FOR_addsi3].operand[2].predicate (op1, mode))
1741 op1 = force_reg (mode, op1);
1742 if (GET_CODE (dest) == MEM)
1743 dest = gen_reg_rtx (mode);
1744 emit_insn (gen_addsi3 (dest, op0, op1));
1745 if (dest == operands[0])
1746 return true;
1747 operands[1] = dest;
1749 /* Don't generate memory->memory or constant->memory moves, go through a
1750 register */
1751 else if ((reload_in_progress | reload_completed) == 0
1752 && GET_CODE (operands[0]) == MEM
1753 && GET_CODE (operands[1]) != REG)
1754 operands[1] = force_reg (mode, operands[1]);
1755 return false;
1758 /* Split one or more DImode RTL references into pairs of SImode
1759 references. The RTL can be REG, offsettable MEM, integer constant, or
1760 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
1761 split and "num" is its length. lo_half and hi_half are output arrays
1762 that parallel "operands". */
1764 void
1765 split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
1767 while (num--)
1769 rtx op = operands[num];
1771 /* simplify_subreg refuse to split volatile memory addresses,
1772 but we still have to handle it. */
1773 if (GET_CODE (op) == MEM)
1775 lo_half[num] = adjust_address (op, SImode, 0);
1776 hi_half[num] = adjust_address (op, SImode, 4);
1778 else
1780 lo_half[num] = simplify_gen_subreg (SImode, op,
1781 GET_MODE (op) == VOIDmode
1782 ? DImode : GET_MODE (op), 0);
1783 hi_half[num] = simplify_gen_subreg (SImode, op,
1784 GET_MODE (op) == VOIDmode
1785 ? DImode : GET_MODE (op), 4);
1790 bool
1791 bfin_longcall_p (rtx op, int call_cookie)
1793 gcc_assert (GET_CODE (op) == SYMBOL_REF);
1794 if (call_cookie & CALL_SHORT)
1795 return 0;
1796 if (call_cookie & CALL_LONG)
1797 return 1;
1798 if (TARGET_LONG_CALLS)
1799 return 1;
1800 return 0;
1803 /* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
1804 COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
1805 SIBCALL is nonzero if this is a sibling call. */
1807 void
1808 bfin_expand_call (rtx retval, rtx fnaddr, rtx callarg1, rtx cookie, int sibcall)
1810 rtx use = NULL, call;
1811 rtx callee = XEXP (fnaddr, 0);
1812 int nelts = 2 + !!sibcall;
1813 rtx pat;
1814 rtx picreg = get_hard_reg_initial_val (SImode, FDPIC_REGNO);
1815 int n;
1817 /* In an untyped call, we can get NULL for operand 2. */
1818 if (cookie == NULL_RTX)
1819 cookie = const0_rtx;
1821 /* Static functions and indirect calls don't need the pic register. */
1822 if (!TARGET_FDPIC && flag_pic
1823 && GET_CODE (callee) == SYMBOL_REF
1824 && !SYMBOL_REF_LOCAL_P (callee))
1825 use_reg (&use, pic_offset_table_rtx);
1827 if (TARGET_FDPIC)
1829 if (GET_CODE (callee) != SYMBOL_REF
1830 || bfin_longcall_p (callee, INTVAL (cookie)))
1832 rtx addr = callee;
1833 if (! address_operand (addr, Pmode))
1834 addr = force_reg (Pmode, addr);
1836 fnaddr = gen_reg_rtx (SImode);
1837 emit_insn (gen_load_funcdescsi (fnaddr, addr));
1838 fnaddr = gen_rtx_MEM (Pmode, fnaddr);
1840 picreg = gen_reg_rtx (SImode);
1841 emit_insn (gen_load_funcdescsi (picreg,
1842 plus_constant (addr, 4)));
1845 nelts++;
1847 else if ((!register_no_elim_operand (callee, Pmode)
1848 && GET_CODE (callee) != SYMBOL_REF)
1849 || (GET_CODE (callee) == SYMBOL_REF
1850 && ((TARGET_ID_SHARED_LIBRARY && !TARGET_LEAF_ID_SHARED_LIBRARY)
1851 || bfin_longcall_p (callee, INTVAL (cookie)))))
1853 callee = copy_to_mode_reg (Pmode, callee);
1854 fnaddr = gen_rtx_MEM (Pmode, callee);
1856 call = gen_rtx_CALL (VOIDmode, fnaddr, callarg1);
1858 if (retval)
1859 call = gen_rtx_SET (VOIDmode, retval, call);
1861 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nelts));
1862 n = 0;
1863 XVECEXP (pat, 0, n++) = call;
1864 if (TARGET_FDPIC)
1865 XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, picreg);
1866 XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, cookie);
1867 if (sibcall)
1868 XVECEXP (pat, 0, n++) = gen_rtx_RETURN (VOIDmode);
1869 call = emit_call_insn (pat);
1870 if (use)
1871 CALL_INSN_FUNCTION_USAGE (call) = use;
1874 /* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
1877 hard_regno_mode_ok (int regno, enum machine_mode mode)
1879 /* Allow only dregs to store value of mode HI or QI */
1880 enum reg_class class = REGNO_REG_CLASS (regno);
1882 if (mode == CCmode)
1883 return 0;
1885 if (mode == V2HImode)
1886 return D_REGNO_P (regno);
1887 if (class == CCREGS)
1888 return mode == BImode;
1889 if (mode == PDImode || mode == V2PDImode)
1890 return regno == REG_A0 || regno == REG_A1;
1892 /* Allow all normal 32-bit regs, except REG_M3, in case regclass ever comes
1893 up with a bad register class (such as ALL_REGS) for DImode. */
1894 if (mode == DImode)
1895 return regno < REG_M3;
1897 if (mode == SImode
1898 && TEST_HARD_REG_BIT (reg_class_contents[PROLOGUE_REGS], regno))
1899 return 1;
1901 return TEST_HARD_REG_BIT (reg_class_contents[MOST_REGS], regno);
1904 /* Implements target hook vector_mode_supported_p. */
1906 static bool
1907 bfin_vector_mode_supported_p (enum machine_mode mode)
1909 return mode == V2HImode;
1912 /* Return the cost of moving data from a register in class CLASS1 to
1913 one in class CLASS2. A cost of 2 is the default. */
1916 bfin_register_move_cost (enum machine_mode mode,
1917 enum reg_class class1, enum reg_class class2)
1919 /* These need secondary reloads, so they're more expensive. */
1920 if ((class1 == CCREGS && class2 != DREGS)
1921 || (class1 != DREGS && class2 == CCREGS))
1922 return 4;
1924 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
1925 if (optimize_size)
1926 return 2;
1928 /* There are some stalls involved when moving from a DREG to a different
1929 class reg, and using the value in one of the following instructions.
1930 Attempt to model this by slightly discouraging such moves. */
1931 if (class1 == DREGS && class2 != DREGS)
1932 return 2 * 2;
1934 if (GET_MODE_CLASS (mode) == MODE_INT)
1936 /* Discourage trying to use the accumulators. */
1937 if (TEST_HARD_REG_BIT (reg_class_contents[class1], REG_A0)
1938 || TEST_HARD_REG_BIT (reg_class_contents[class1], REG_A1)
1939 || TEST_HARD_REG_BIT (reg_class_contents[class2], REG_A0)
1940 || TEST_HARD_REG_BIT (reg_class_contents[class2], REG_A1))
1941 return 20;
1943 return 2;
1946 /* Return the cost of moving data of mode M between a
1947 register and memory. A value of 2 is the default; this cost is
1948 relative to those in `REGISTER_MOVE_COST'.
1950 ??? In theory L1 memory has single-cycle latency. We should add a switch
1951 that tells the compiler whether we expect to use only L1 memory for the
1952 program; it'll make the costs more accurate. */
1955 bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1956 enum reg_class class,
1957 int in ATTRIBUTE_UNUSED)
1959 /* Make memory accesses slightly more expensive than any register-register
1960 move. Also, penalize non-DP registers, since they need secondary
1961 reloads to load and store. */
1962 if (! reg_class_subset_p (class, DPREGS))
1963 return 10;
1965 return 8;
1968 /* Inform reload about cases where moving X with a mode MODE to a register in
1969 CLASS requires an extra scratch register. Return the class needed for the
1970 scratch register. */
1972 static enum reg_class
1973 bfin_secondary_reload (bool in_p, rtx x, enum reg_class class,
1974 enum machine_mode mode, secondary_reload_info *sri)
1976 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
1977 in most other cases we can also use PREGS. */
1978 enum reg_class default_class = GET_MODE_SIZE (mode) >= 4 ? DPREGS : DREGS;
1979 enum reg_class x_class = NO_REGS;
1980 enum rtx_code code = GET_CODE (x);
1982 if (code == SUBREG)
1983 x = SUBREG_REG (x), code = GET_CODE (x);
1984 if (REG_P (x))
1986 int regno = REGNO (x);
1987 if (regno >= FIRST_PSEUDO_REGISTER)
1988 regno = reg_renumber[regno];
1990 if (regno == -1)
1991 code = MEM;
1992 else
1993 x_class = REGNO_REG_CLASS (regno);
1996 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
1997 This happens as a side effect of register elimination, and we need
1998 a scratch register to do it. */
1999 if (fp_plus_const_operand (x, mode))
2001 rtx op2 = XEXP (x, 1);
2002 int large_constant_p = ! CONST_7BIT_IMM_P (INTVAL (op2));
2004 if (class == PREGS || class == PREGS_CLOBBERED)
2005 return NO_REGS;
2006 /* If destination is a DREG, we can do this without a scratch register
2007 if the constant is valid for an add instruction. */
2008 if ((class == DREGS || class == DPREGS)
2009 && ! large_constant_p)
2010 return NO_REGS;
2011 /* Reloading to anything other than a DREG? Use a PREG scratch
2012 register. */
2013 sri->icode = CODE_FOR_reload_insi;
2014 return NO_REGS;
2017 /* Data can usually be moved freely between registers of most classes.
2018 AREGS are an exception; they can only move to or from another register
2019 in AREGS or one in DREGS. They can also be assigned the constant 0. */
2020 if (x_class == AREGS)
2021 return class == DREGS || class == AREGS ? NO_REGS : DREGS;
2023 if (class == AREGS)
2025 if (x != const0_rtx && x_class != DREGS)
2026 return DREGS;
2027 else
2028 return NO_REGS;
2031 /* CCREGS can only be moved from/to DREGS. */
2032 if (class == CCREGS && x_class != DREGS)
2033 return DREGS;
2034 if (x_class == CCREGS && class != DREGS)
2035 return DREGS;
2037 /* All registers other than AREGS can load arbitrary constants. The only
2038 case that remains is MEM. */
2039 if (code == MEM)
2040 if (! reg_class_subset_p (class, default_class))
2041 return default_class;
2042 return NO_REGS;
2045 /* Implement TARGET_HANDLE_OPTION. */
2047 static bool
2048 bfin_handle_option (size_t code, const char *arg, int value)
2050 switch (code)
2052 case OPT_mshared_library_id_:
2053 if (value > MAX_LIBRARY_ID)
2054 error ("-mshared-library-id=%s is not between 0 and %d",
2055 arg, MAX_LIBRARY_ID);
2056 bfin_lib_id_given = 1;
2057 return true;
2059 default:
2060 return true;
2064 static struct machine_function *
2065 bfin_init_machine_status (void)
2067 struct machine_function *f;
2069 f = ggc_alloc_cleared (sizeof (struct machine_function));
2071 return f;
2074 /* Implement the macro OVERRIDE_OPTIONS. */
2076 void
2077 override_options (void)
2079 if (TARGET_OMIT_LEAF_FRAME_POINTER)
2080 flag_omit_frame_pointer = 1;
2082 /* Library identification */
2083 if (bfin_lib_id_given && ! TARGET_ID_SHARED_LIBRARY)
2084 error ("-mshared-library-id= specified without -mid-shared-library");
2086 if (TARGET_ID_SHARED_LIBRARY && flag_pic == 0)
2087 flag_pic = 1;
2089 if (stack_limit_rtx && TARGET_STACK_CHECK_L1)
2090 error ("Can't use multiple stack checking methods together.");
2092 if (TARGET_ID_SHARED_LIBRARY && TARGET_FDPIC)
2093 error ("ID shared libraries and FD-PIC mode can't be used together.");
2095 /* Don't allow the user to specify -mid-shared-library and -msep-data
2096 together, as it makes little sense from a user's point of view... */
2097 if (TARGET_SEP_DATA && TARGET_ID_SHARED_LIBRARY)
2098 error ("cannot specify both -msep-data and -mid-shared-library");
2099 /* ... internally, however, it's nearly the same. */
2100 if (TARGET_SEP_DATA)
2101 target_flags |= MASK_ID_SHARED_LIBRARY | MASK_LEAF_ID_SHARED_LIBRARY;
2103 /* There is no single unaligned SI op for PIC code. Sometimes we
2104 need to use ".4byte" and sometimes we need to use ".picptr".
2105 See bfin_assemble_integer for details. */
2106 if (TARGET_FDPIC)
2107 targetm.asm_out.unaligned_op.si = 0;
2109 /* Silently turn off flag_pic if not doing FDPIC or ID shared libraries,
2110 since we don't support it and it'll just break. */
2111 if (flag_pic && !TARGET_FDPIC && !TARGET_ID_SHARED_LIBRARY)
2112 flag_pic = 0;
2114 flag_schedule_insns = 0;
2116 /* Passes after sched2 can break the helpful TImode annotations that
2117 haifa-sched puts on every insn. Just do scheduling in reorg. */
2118 bfin_flag_schedule_insns2 = flag_schedule_insns_after_reload;
2119 flag_schedule_insns_after_reload = 0;
2121 init_machine_status = bfin_init_machine_status;
2124 /* Return the destination address of BRANCH.
2125 We need to use this instead of get_attr_length, because the
2126 cbranch_with_nops pattern conservatively sets its length to 6, and
2127 we still prefer to use shorter sequences. */
2129 static int
2130 branch_dest (rtx branch)
2132 rtx dest;
2133 int dest_uid;
2134 rtx pat = PATTERN (branch);
2135 if (GET_CODE (pat) == PARALLEL)
2136 pat = XVECEXP (pat, 0, 0);
2137 dest = SET_SRC (pat);
2138 if (GET_CODE (dest) == IF_THEN_ELSE)
2139 dest = XEXP (dest, 1);
2140 dest = XEXP (dest, 0);
2141 dest_uid = INSN_UID (dest);
2142 return INSN_ADDRESSES (dest_uid);
2145 /* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
2146 it's a branch that's predicted taken. */
2148 static int
2149 cbranch_predicted_taken_p (rtx insn)
2151 rtx x = find_reg_note (insn, REG_BR_PROB, 0);
2153 if (x)
2155 int pred_val = INTVAL (XEXP (x, 0));
2157 return pred_val >= REG_BR_PROB_BASE / 2;
2160 return 0;
2163 /* Templates for use by asm_conditional_branch. */
2165 static const char *ccbranch_templates[][3] = {
2166 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
2167 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
2168 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
2169 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
2172 /* Output INSN, which is a conditional branch instruction with operands
2173 OPERANDS.
2175 We deal with the various forms of conditional branches that can be generated
2176 by bfin_reorg to prevent the hardware from doing speculative loads, by
2177 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
2178 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
2179 Either of these is only necessary if the branch is short, otherwise the
2180 template we use ends in an unconditional jump which flushes the pipeline
2181 anyway. */
2183 void
2184 asm_conditional_branch (rtx insn, rtx *operands, int n_nops, int predict_taken)
2186 int offset = branch_dest (insn) - INSN_ADDRESSES (INSN_UID (insn));
2187 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
2188 is to be taken from start of if cc rather than jump.
2189 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
2191 int len = (offset >= -1024 && offset <= 1022 ? 0
2192 : offset >= -4094 && offset <= 4096 ? 1
2193 : 2);
2194 int bp = predict_taken && len == 0 ? 1 : cbranch_predicted_taken_p (insn);
2195 int idx = (bp << 1) | (GET_CODE (operands[0]) == EQ ? BRF : BRT);
2196 output_asm_insn (ccbranch_templates[idx][len], operands);
2197 gcc_assert (n_nops == 0 || !bp);
2198 if (len == 0)
2199 while (n_nops-- > 0)
2200 output_asm_insn ("nop;", NULL);
2203 /* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
2204 stored in bfin_compare_op0 and bfin_compare_op1 already. */
2207 bfin_gen_compare (rtx cmp, enum machine_mode mode ATTRIBUTE_UNUSED)
2209 enum rtx_code code1, code2;
2210 rtx op0 = bfin_compare_op0, op1 = bfin_compare_op1;
2211 rtx tem = bfin_cc_rtx;
2212 enum rtx_code code = GET_CODE (cmp);
2214 /* If we have a BImode input, then we already have a compare result, and
2215 do not need to emit another comparison. */
2216 if (GET_MODE (op0) == BImode)
2218 gcc_assert ((code == NE || code == EQ) && op1 == const0_rtx);
2219 tem = op0, code2 = code;
2221 else
2223 switch (code) {
2224 /* bfin has these conditions */
2225 case EQ:
2226 case LT:
2227 case LE:
2228 case LEU:
2229 case LTU:
2230 code1 = code;
2231 code2 = NE;
2232 break;
2233 default:
2234 code1 = reverse_condition (code);
2235 code2 = EQ;
2236 break;
2238 emit_insn (gen_rtx_SET (BImode, tem,
2239 gen_rtx_fmt_ee (code1, BImode, op0, op1)));
2242 return gen_rtx_fmt_ee (code2, BImode, tem, CONST0_RTX (BImode));
2245 /* Return nonzero iff C has exactly one bit set if it is interpreted
2246 as a 32-bit constant. */
2249 log2constp (unsigned HOST_WIDE_INT c)
2251 c &= 0xFFFFFFFF;
2252 return c != 0 && (c & (c-1)) == 0;
2255 /* Returns the number of consecutive least significant zeros in the binary
2256 representation of *V.
2257 We modify *V to contain the original value arithmetically shifted right by
2258 the number of zeroes. */
2260 static int
2261 shiftr_zero (HOST_WIDE_INT *v)
2263 unsigned HOST_WIDE_INT tmp = *v;
2264 unsigned HOST_WIDE_INT sgn;
2265 int n = 0;
2267 if (tmp == 0)
2268 return 0;
2270 sgn = tmp & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1));
2271 while ((tmp & 0x1) == 0 && n <= 32)
2273 tmp = (tmp >> 1) | sgn;
2274 n++;
2276 *v = tmp;
2277 return n;
2280 /* After reload, split the load of an immediate constant. OPERANDS are the
2281 operands of the movsi_insn pattern which we are splitting. We return
2282 nonzero if we emitted a sequence to load the constant, zero if we emitted
2283 nothing because we want to use the splitter's default sequence. */
2286 split_load_immediate (rtx operands[])
2288 HOST_WIDE_INT val = INTVAL (operands[1]);
2289 HOST_WIDE_INT tmp;
2290 HOST_WIDE_INT shifted = val;
2291 HOST_WIDE_INT shifted_compl = ~val;
2292 int num_zero = shiftr_zero (&shifted);
2293 int num_compl_zero = shiftr_zero (&shifted_compl);
2294 unsigned int regno = REGNO (operands[0]);
2295 enum reg_class class1 = REGNO_REG_CLASS (regno);
2297 /* This case takes care of single-bit set/clear constants, which we could
2298 also implement with BITSET/BITCLR. */
2299 if (num_zero
2300 && shifted >= -32768 && shifted < 65536
2301 && (D_REGNO_P (regno)
2302 || (regno >= REG_P0 && regno <= REG_P7 && num_zero <= 2)))
2304 emit_insn (gen_movsi (operands[0], GEN_INT (shifted)));
2305 emit_insn (gen_ashlsi3 (operands[0], operands[0], GEN_INT (num_zero)));
2306 return 1;
2309 tmp = val & 0xFFFF;
2310 tmp |= -(tmp & 0x8000);
2312 /* If high word has one bit set or clear, try to use a bit operation. */
2313 if (D_REGNO_P (regno))
2315 if (log2constp (val & 0xFFFF0000))
2317 emit_insn (gen_movsi (operands[0], GEN_INT (val & 0xFFFF)));
2318 emit_insn (gen_iorsi3 (operands[0], operands[0], GEN_INT (val & 0xFFFF0000)));
2319 return 1;
2321 else if (log2constp (val | 0xFFFF) && (val & 0x8000) != 0)
2323 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2324 emit_insn (gen_andsi3 (operands[0], operands[0], GEN_INT (val | 0xFFFF)));
2328 if (D_REGNO_P (regno))
2330 if (CONST_7BIT_IMM_P (tmp))
2332 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2333 emit_insn (gen_movstricthi_high (operands[0], GEN_INT (val & -65536)));
2334 return 1;
2337 if ((val & 0xFFFF0000) == 0)
2339 emit_insn (gen_movsi (operands[0], const0_rtx));
2340 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2341 return 1;
2344 if ((val & 0xFFFF0000) == 0xFFFF0000)
2346 emit_insn (gen_movsi (operands[0], constm1_rtx));
2347 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2348 return 1;
2352 /* Need DREGs for the remaining case. */
2353 if (regno > REG_R7)
2354 return 0;
2356 if (optimize_size
2357 && num_compl_zero && CONST_7BIT_IMM_P (shifted_compl))
2359 /* If optimizing for size, generate a sequence that has more instructions
2360 but is shorter. */
2361 emit_insn (gen_movsi (operands[0], GEN_INT (shifted_compl)));
2362 emit_insn (gen_ashlsi3 (operands[0], operands[0],
2363 GEN_INT (num_compl_zero)));
2364 emit_insn (gen_one_cmplsi2 (operands[0], operands[0]));
2365 return 1;
2367 return 0;
2370 /* Return true if the legitimate memory address for a memory operand of mode
2371 MODE. Return false if not. */
2373 static bool
2374 bfin_valid_add (enum machine_mode mode, HOST_WIDE_INT value)
2376 unsigned HOST_WIDE_INT v = value > 0 ? value : -value;
2377 int sz = GET_MODE_SIZE (mode);
2378 int shift = sz == 1 ? 0 : sz == 2 ? 1 : 2;
2379 /* The usual offsettable_memref machinery doesn't work so well for this
2380 port, so we deal with the problem here. */
2381 if (value > 0 && sz == 8)
2382 v += 4;
2383 return (v & ~(0x7fff << shift)) == 0;
2386 static bool
2387 bfin_valid_reg_p (unsigned int regno, int strict, enum machine_mode mode,
2388 enum rtx_code outer_code)
2390 if (strict)
2391 return REGNO_OK_FOR_BASE_STRICT_P (regno, mode, outer_code, SCRATCH);
2392 else
2393 return REGNO_OK_FOR_BASE_NONSTRICT_P (regno, mode, outer_code, SCRATCH);
2396 bool
2397 bfin_legitimate_address_p (enum machine_mode mode, rtx x, int strict)
2399 switch (GET_CODE (x)) {
2400 case REG:
2401 if (bfin_valid_reg_p (REGNO (x), strict, mode, MEM))
2402 return true;
2403 break;
2404 case PLUS:
2405 if (REG_P (XEXP (x, 0))
2406 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PLUS)
2407 && ((GET_CODE (XEXP (x, 1)) == UNSPEC && mode == SImode)
2408 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2409 && bfin_valid_add (mode, INTVAL (XEXP (x, 1))))))
2410 return true;
2411 break;
2412 case POST_INC:
2413 case POST_DEC:
2414 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2415 && REG_P (XEXP (x, 0))
2416 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, POST_INC))
2417 return true;
2418 case PRE_DEC:
2419 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2420 && XEXP (x, 0) == stack_pointer_rtx
2421 && REG_P (XEXP (x, 0))
2422 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PRE_DEC))
2423 return true;
2424 break;
2425 default:
2426 break;
2428 return false;
2431 /* Decide whether we can force certain constants to memory. If we
2432 decide we can't, the caller should be able to cope with it in
2433 another way. */
2435 static bool
2436 bfin_cannot_force_const_mem (rtx x ATTRIBUTE_UNUSED)
2438 /* We have only one class of non-legitimate constants, and our movsi
2439 expander knows how to handle them. Dropping these constants into the
2440 data section would only shift the problem - we'd still get relocs
2441 outside the object, in the data section rather than the text section. */
2442 return true;
2445 /* Ensure that for any constant of the form symbol + offset, the offset
2446 remains within the object. Any other constants are ok.
2447 This ensures that flat binaries never have to deal with relocations
2448 crossing section boundaries. */
2450 bool
2451 bfin_legitimate_constant_p (rtx x)
2453 rtx sym;
2454 HOST_WIDE_INT offset;
2456 if (GET_CODE (x) != CONST)
2457 return true;
2459 x = XEXP (x, 0);
2460 gcc_assert (GET_CODE (x) == PLUS);
2462 sym = XEXP (x, 0);
2463 x = XEXP (x, 1);
2464 if (GET_CODE (sym) != SYMBOL_REF
2465 || GET_CODE (x) != CONST_INT)
2466 return true;
2467 offset = INTVAL (x);
2469 if (SYMBOL_REF_DECL (sym) == 0)
2470 return true;
2471 if (offset < 0
2472 || offset >= int_size_in_bytes (TREE_TYPE (SYMBOL_REF_DECL (sym))))
2473 return false;
2475 return true;
2478 static bool
2479 bfin_rtx_costs (rtx x, int code, int outer_code, int *total)
2481 int cost2 = COSTS_N_INSNS (1);
2482 rtx op0, op1;
2484 switch (code)
2486 case CONST_INT:
2487 if (outer_code == SET || outer_code == PLUS)
2488 *total = CONST_7BIT_IMM_P (INTVAL (x)) ? 0 : cost2;
2489 else if (outer_code == AND)
2490 *total = log2constp (~INTVAL (x)) ? 0 : cost2;
2491 else if (outer_code == LE || outer_code == LT || outer_code == EQ)
2492 *total = (INTVAL (x) >= -4 && INTVAL (x) <= 3) ? 0 : cost2;
2493 else if (outer_code == LEU || outer_code == LTU)
2494 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 7) ? 0 : cost2;
2495 else if (outer_code == MULT)
2496 *total = (INTVAL (x) == 2 || INTVAL (x) == 4) ? 0 : cost2;
2497 else if (outer_code == ASHIFT && (INTVAL (x) == 1 || INTVAL (x) == 2))
2498 *total = 0;
2499 else if (outer_code == ASHIFT || outer_code == ASHIFTRT
2500 || outer_code == LSHIFTRT)
2501 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 31) ? 0 : cost2;
2502 else if (outer_code == IOR || outer_code == XOR)
2503 *total = (INTVAL (x) & (INTVAL (x) - 1)) == 0 ? 0 : cost2;
2504 else
2505 *total = cost2;
2506 return true;
2508 case CONST:
2509 case LABEL_REF:
2510 case SYMBOL_REF:
2511 case CONST_DOUBLE:
2512 *total = COSTS_N_INSNS (2);
2513 return true;
2515 case PLUS:
2516 op0 = XEXP (x, 0);
2517 op1 = XEXP (x, 1);
2518 if (GET_MODE (x) == SImode)
2520 if (GET_CODE (op0) == MULT
2521 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
2523 HOST_WIDE_INT val = INTVAL (XEXP (op0, 1));
2524 if (val == 2 || val == 4)
2526 *total = cost2;
2527 *total += rtx_cost (XEXP (op0, 0), outer_code);
2528 *total += rtx_cost (op1, outer_code);
2529 return true;
2532 *total = cost2;
2533 if (GET_CODE (op0) != REG
2534 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
2535 *total += rtx_cost (op0, SET);
2536 #if 0 /* We'd like to do this for accuracy, but it biases the loop optimizer
2537 towards creating too many induction variables. */
2538 if (!reg_or_7bit_operand (op1, SImode))
2539 *total += rtx_cost (op1, SET);
2540 #endif
2542 else if (GET_MODE (x) == DImode)
2544 *total = 6 * cost2;
2545 if (GET_CODE (op1) != CONST_INT
2546 || !CONST_7BIT_IMM_P (INTVAL (op1)))
2547 *total += rtx_cost (op1, PLUS);
2548 if (GET_CODE (op0) != REG
2549 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
2550 *total += rtx_cost (op0, PLUS);
2552 return true;
2554 case MINUS:
2555 if (GET_MODE (x) == DImode)
2556 *total = 6 * cost2;
2557 else
2558 *total = cost2;
2559 return true;
2561 case ASHIFT:
2562 case ASHIFTRT:
2563 case LSHIFTRT:
2564 if (GET_MODE (x) == DImode)
2565 *total = 6 * cost2;
2566 else
2567 *total = cost2;
2569 op0 = XEXP (x, 0);
2570 op1 = XEXP (x, 1);
2571 if (GET_CODE (op0) != REG
2572 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
2573 *total += rtx_cost (op0, code);
2575 return true;
2577 case IOR:
2578 case AND:
2579 case XOR:
2580 op0 = XEXP (x, 0);
2581 op1 = XEXP (x, 1);
2583 /* Handle special cases of IOR: rotates, ALIGN insns, movstricthi_high. */
2584 if (code == IOR)
2586 if ((GET_CODE (op0) == LSHIFTRT && GET_CODE (op1) == ASHIFT)
2587 || (GET_CODE (op0) == ASHIFT && GET_CODE (op1) == ZERO_EXTEND)
2588 || (GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
2589 || (GET_CODE (op0) == AND && GET_CODE (op1) == CONST_INT))
2591 *total = cost2;
2592 return true;
2596 if (GET_CODE (op0) != REG
2597 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
2598 *total += rtx_cost (op0, code);
2600 if (GET_MODE (x) == DImode)
2602 *total = 2 * cost2;
2603 return true;
2605 *total = cost2;
2606 if (GET_MODE (x) != SImode)
2607 return true;
2609 if (code == AND)
2611 if (! rhs_andsi3_operand (XEXP (x, 1), SImode))
2612 *total += rtx_cost (XEXP (x, 1), code);
2614 else
2616 if (! regorlog2_operand (XEXP (x, 1), SImode))
2617 *total += rtx_cost (XEXP (x, 1), code);
2620 return true;
2622 case ZERO_EXTRACT:
2623 case SIGN_EXTRACT:
2624 if (outer_code == SET
2625 && XEXP (x, 1) == const1_rtx
2626 && GET_CODE (XEXP (x, 2)) == CONST_INT)
2628 *total = 2 * cost2;
2629 return true;
2631 /* fall through */
2633 case SIGN_EXTEND:
2634 case ZERO_EXTEND:
2635 *total = cost2;
2636 return true;
2638 case MULT:
2640 op0 = XEXP (x, 0);
2641 op1 = XEXP (x, 1);
2642 if (GET_CODE (op0) == GET_CODE (op1)
2643 && (GET_CODE (op0) == ZERO_EXTEND
2644 || GET_CODE (op0) == SIGN_EXTEND))
2646 *total = COSTS_N_INSNS (1);
2647 op0 = XEXP (op0, 0);
2648 op1 = XEXP (op1, 0);
2650 else if (optimize_size)
2651 *total = COSTS_N_INSNS (1);
2652 else
2653 *total = COSTS_N_INSNS (3);
2655 if (GET_CODE (op0) != REG
2656 && (GET_CODE (op0) != SUBREG || GET_CODE (SUBREG_REG (op0)) != REG))
2657 *total += rtx_cost (op0, MULT);
2658 if (GET_CODE (op1) != REG
2659 && (GET_CODE (op1) != SUBREG || GET_CODE (SUBREG_REG (op1)) != REG))
2660 *total += rtx_cost (op1, MULT);
2662 return true;
2664 case UDIV:
2665 case UMOD:
2666 *total = COSTS_N_INSNS (32);
2667 return true;
2669 case VEC_CONCAT:
2670 case VEC_SELECT:
2671 if (outer_code == SET)
2672 *total = cost2;
2673 return true;
2675 default:
2676 return false;
2680 static void
2681 bfin_internal_label (FILE *stream, const char *prefix, unsigned long num)
2683 fprintf (stream, "%s%s$%ld:\n", LOCAL_LABEL_PREFIX, prefix, num);
2686 /* Used for communication between {push,pop}_multiple_operation (which
2687 we use not only as a predicate) and the corresponding output functions. */
2688 static int first_preg_to_save, first_dreg_to_save;
2691 push_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2693 int lastdreg = 8, lastpreg = 6;
2694 int i, group;
2696 first_preg_to_save = lastpreg;
2697 first_dreg_to_save = lastdreg;
2698 for (i = 1, group = 0; i < XVECLEN (op, 0) - 1; i++)
2700 rtx t = XVECEXP (op, 0, i);
2701 rtx src, dest;
2702 int regno;
2704 if (GET_CODE (t) != SET)
2705 return 0;
2707 src = SET_SRC (t);
2708 dest = SET_DEST (t);
2709 if (GET_CODE (dest) != MEM || ! REG_P (src))
2710 return 0;
2711 dest = XEXP (dest, 0);
2712 if (GET_CODE (dest) != PLUS
2713 || ! REG_P (XEXP (dest, 0))
2714 || REGNO (XEXP (dest, 0)) != REG_SP
2715 || GET_CODE (XEXP (dest, 1)) != CONST_INT
2716 || INTVAL (XEXP (dest, 1)) != -i * 4)
2717 return 0;
2719 regno = REGNO (src);
2720 if (group == 0)
2722 if (D_REGNO_P (regno))
2724 group = 1;
2725 first_dreg_to_save = lastdreg = regno - REG_R0;
2727 else if (regno >= REG_P0 && regno <= REG_P7)
2729 group = 2;
2730 first_preg_to_save = lastpreg = regno - REG_P0;
2732 else
2733 return 0;
2735 continue;
2738 if (group == 1)
2740 if (regno >= REG_P0 && regno <= REG_P7)
2742 group = 2;
2743 first_preg_to_save = lastpreg = regno - REG_P0;
2745 else if (regno != REG_R0 + lastdreg + 1)
2746 return 0;
2747 else
2748 lastdreg++;
2750 else if (group == 2)
2752 if (regno != REG_P0 + lastpreg + 1)
2753 return 0;
2754 lastpreg++;
2757 return 1;
2761 pop_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2763 int lastdreg = 8, lastpreg = 6;
2764 int i, group;
2766 for (i = 1, group = 0; i < XVECLEN (op, 0); i++)
2768 rtx t = XVECEXP (op, 0, i);
2769 rtx src, dest;
2770 int regno;
2772 if (GET_CODE (t) != SET)
2773 return 0;
2775 src = SET_SRC (t);
2776 dest = SET_DEST (t);
2777 if (GET_CODE (src) != MEM || ! REG_P (dest))
2778 return 0;
2779 src = XEXP (src, 0);
2781 if (i == 1)
2783 if (! REG_P (src) || REGNO (src) != REG_SP)
2784 return 0;
2786 else if (GET_CODE (src) != PLUS
2787 || ! REG_P (XEXP (src, 0))
2788 || REGNO (XEXP (src, 0)) != REG_SP
2789 || GET_CODE (XEXP (src, 1)) != CONST_INT
2790 || INTVAL (XEXP (src, 1)) != (i - 1) * 4)
2791 return 0;
2793 regno = REGNO (dest);
2794 if (group == 0)
2796 if (regno == REG_R7)
2798 group = 1;
2799 lastdreg = 7;
2801 else if (regno != REG_P0 + lastpreg - 1)
2802 return 0;
2803 else
2804 lastpreg--;
2806 else if (group == 1)
2808 if (regno != REG_R0 + lastdreg - 1)
2809 return 0;
2810 else
2811 lastdreg--;
2814 first_dreg_to_save = lastdreg;
2815 first_preg_to_save = lastpreg;
2816 return 1;
2819 /* Emit assembly code for one multi-register push described by INSN, with
2820 operands in OPERANDS. */
2822 void
2823 output_push_multiple (rtx insn, rtx *operands)
2825 char buf[80];
2826 int ok;
2828 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2829 ok = push_multiple_operation (PATTERN (insn), VOIDmode);
2830 gcc_assert (ok);
2832 if (first_dreg_to_save == 8)
2833 sprintf (buf, "[--sp] = ( p5:%d );\n", first_preg_to_save);
2834 else if (first_preg_to_save == 6)
2835 sprintf (buf, "[--sp] = ( r7:%d );\n", first_dreg_to_save);
2836 else
2837 sprintf (buf, "[--sp] = ( r7:%d, p5:%d );\n",
2838 first_dreg_to_save, first_preg_to_save);
2840 output_asm_insn (buf, operands);
2843 /* Emit assembly code for one multi-register pop described by INSN, with
2844 operands in OPERANDS. */
2846 void
2847 output_pop_multiple (rtx insn, rtx *operands)
2849 char buf[80];
2850 int ok;
2852 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2853 ok = pop_multiple_operation (PATTERN (insn), VOIDmode);
2854 gcc_assert (ok);
2856 if (first_dreg_to_save == 8)
2857 sprintf (buf, "( p5:%d ) = [sp++];\n", first_preg_to_save);
2858 else if (first_preg_to_save == 6)
2859 sprintf (buf, "( r7:%d ) = [sp++];\n", first_dreg_to_save);
2860 else
2861 sprintf (buf, "( r7:%d, p5:%d ) = [sp++];\n",
2862 first_dreg_to_save, first_preg_to_save);
2864 output_asm_insn (buf, operands);
2867 /* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
2869 static void
2870 single_move_for_movmem (rtx dst, rtx src, enum machine_mode mode, HOST_WIDE_INT offset)
2872 rtx scratch = gen_reg_rtx (mode);
2873 rtx srcmem, dstmem;
2875 srcmem = adjust_address_nv (src, mode, offset);
2876 dstmem = adjust_address_nv (dst, mode, offset);
2877 emit_move_insn (scratch, srcmem);
2878 emit_move_insn (dstmem, scratch);
2881 /* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
2882 alignment ALIGN_EXP. Return true if successful, false if we should fall
2883 back on a different method. */
2885 bool
2886 bfin_expand_movmem (rtx dst, rtx src, rtx count_exp, rtx align_exp)
2888 rtx srcreg, destreg, countreg;
2889 HOST_WIDE_INT align = 0;
2890 unsigned HOST_WIDE_INT count = 0;
2892 if (GET_CODE (align_exp) == CONST_INT)
2893 align = INTVAL (align_exp);
2894 if (GET_CODE (count_exp) == CONST_INT)
2896 count = INTVAL (count_exp);
2897 #if 0
2898 if (!TARGET_INLINE_ALL_STRINGOPS && count > 64)
2899 return false;
2900 #endif
2903 /* If optimizing for size, only do single copies inline. */
2904 if (optimize_size)
2906 if (count == 2 && align < 2)
2907 return false;
2908 if (count == 4 && align < 4)
2909 return false;
2910 if (count != 1 && count != 2 && count != 4)
2911 return false;
2913 if (align < 2 && count != 1)
2914 return false;
2916 destreg = copy_to_mode_reg (Pmode, XEXP (dst, 0));
2917 if (destreg != XEXP (dst, 0))
2918 dst = replace_equiv_address_nv (dst, destreg);
2919 srcreg = copy_to_mode_reg (Pmode, XEXP (src, 0));
2920 if (srcreg != XEXP (src, 0))
2921 src = replace_equiv_address_nv (src, srcreg);
2923 if (count != 0 && align >= 2)
2925 unsigned HOST_WIDE_INT offset = 0;
2927 if (align >= 4)
2929 if ((count & ~3) == 4)
2931 single_move_for_movmem (dst, src, SImode, offset);
2932 offset = 4;
2934 else if (count & ~3)
2936 HOST_WIDE_INT new_count = ((count >> 2) & 0x3fffffff) - 1;
2937 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
2939 emit_insn (gen_rep_movsi (destreg, srcreg, countreg, destreg, srcreg));
2941 if (count & 2)
2943 single_move_for_movmem (dst, src, HImode, offset);
2944 offset += 2;
2947 else
2949 if ((count & ~1) == 2)
2951 single_move_for_movmem (dst, src, HImode, offset);
2952 offset = 2;
2954 else if (count & ~1)
2956 HOST_WIDE_INT new_count = ((count >> 1) & 0x7fffffff) - 1;
2957 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
2959 emit_insn (gen_rep_movhi (destreg, srcreg, countreg, destreg, srcreg));
2962 if (count & 1)
2964 single_move_for_movmem (dst, src, QImode, offset);
2966 return true;
2968 return false;
2971 /* Implement TARGET_SCHED_ISSUE_RATE. */
2973 static int
2974 bfin_issue_rate (void)
2976 return 3;
2979 static int
2980 bfin_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
2982 enum attr_type insn_type, dep_insn_type;
2983 int dep_insn_code_number;
2985 /* Anti and output dependencies have zero cost. */
2986 if (REG_NOTE_KIND (link) != 0)
2987 return 0;
2989 dep_insn_code_number = recog_memoized (dep_insn);
2991 /* If we can't recognize the insns, we can't really do anything. */
2992 if (dep_insn_code_number < 0 || recog_memoized (insn) < 0)
2993 return cost;
2995 insn_type = get_attr_type (insn);
2996 dep_insn_type = get_attr_type (dep_insn);
2998 if (dep_insn_type == TYPE_MOVE || dep_insn_type == TYPE_MCLD)
3000 rtx pat = PATTERN (dep_insn);
3001 rtx dest = SET_DEST (pat);
3002 rtx src = SET_SRC (pat);
3003 if (! ADDRESS_REGNO_P (REGNO (dest))
3004 || ! (MEM_P (src) || D_REGNO_P (REGNO (src))))
3005 return cost;
3006 return cost + (dep_insn_type == TYPE_MOVE ? 4 : 3);
3009 return cost;
3013 /* Increment the counter for the number of loop instructions in the
3014 current function. */
3016 void
3017 bfin_hardware_loop (void)
3019 cfun->machine->has_hardware_loops++;
3022 /* Maximum loop nesting depth. */
3023 #define MAX_LOOP_DEPTH 2
3025 /* Maximum size of a loop. */
3026 #define MAX_LOOP_LENGTH 2042
3028 /* We need to keep a vector of loops */
3029 typedef struct loop_info *loop_info;
3030 DEF_VEC_P (loop_info);
3031 DEF_VEC_ALLOC_P (loop_info,heap);
3033 /* Information about a loop we have found (or are in the process of
3034 finding). */
3035 struct loop_info GTY (())
3037 /* loop number, for dumps */
3038 int loop_no;
3040 /* Predecessor block of the loop. This is the one that falls into
3041 the loop and contains the initialization instruction. */
3042 basic_block predecessor;
3044 /* First block in the loop. This is the one branched to by the loop_end
3045 insn. */
3046 basic_block head;
3048 /* Last block in the loop (the one with the loop_end insn). */
3049 basic_block tail;
3051 /* The successor block of the loop. This is the one the loop_end insn
3052 falls into. */
3053 basic_block successor;
3055 /* The last instruction in the tail. */
3056 rtx last_insn;
3058 /* The loop_end insn. */
3059 rtx loop_end;
3061 /* The iteration register. */
3062 rtx iter_reg;
3064 /* The new initialization insn. */
3065 rtx init;
3067 /* The new initialization instruction. */
3068 rtx loop_init;
3070 /* The new label placed at the beginning of the loop. */
3071 rtx start_label;
3073 /* The new label placed at the end of the loop. */
3074 rtx end_label;
3076 /* The length of the loop. */
3077 int length;
3079 /* The nesting depth of the loop. */
3080 int depth;
3082 /* Nonzero if we can't optimize this loop. */
3083 int bad;
3085 /* True if we have visited this loop. */
3086 int visited;
3088 /* True if this loop body clobbers any of LC0, LT0, or LB0. */
3089 int clobber_loop0;
3091 /* True if this loop body clobbers any of LC1, LT1, or LB1. */
3092 int clobber_loop1;
3094 /* Next loop in the graph. */
3095 struct loop_info *next;
3097 /* Immediate outer loop of this loop. */
3098 struct loop_info *outer;
3100 /* Vector of blocks only within the loop, including those within
3101 inner loops. */
3102 VEC (basic_block,heap) *blocks;
3104 /* Same information in a bitmap. */
3105 bitmap block_bitmap;
3107 /* Vector of inner loops within this loop */
3108 VEC (loop_info,heap) *loops;
3111 static void
3112 bfin_dump_loops (loop_info loops)
3114 loop_info loop;
3116 for (loop = loops; loop; loop = loop->next)
3118 loop_info i;
3119 basic_block b;
3120 unsigned ix;
3122 fprintf (dump_file, ";; loop %d: ", loop->loop_no);
3123 if (loop->bad)
3124 fprintf (dump_file, "(bad) ");
3125 fprintf (dump_file, "{head:%d, depth:%d}", loop->head->index, loop->depth);
3127 fprintf (dump_file, " blocks: [ ");
3128 for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, b); ix++)
3129 fprintf (dump_file, "%d ", b->index);
3130 fprintf (dump_file, "] ");
3132 fprintf (dump_file, " inner loops: [ ");
3133 for (ix = 0; VEC_iterate (loop_info, loop->loops, ix, i); ix++)
3134 fprintf (dump_file, "%d ", i->loop_no);
3135 fprintf (dump_file, "]\n");
3137 fprintf (dump_file, "\n");
3140 /* Scan the blocks of LOOP (and its inferiors) looking for basic block
3141 BB. Return true, if we find it. */
3143 static bool
3144 bfin_bb_in_loop (loop_info loop, basic_block bb)
3146 return bitmap_bit_p (loop->block_bitmap, bb->index);
3149 /* Scan the blocks of LOOP (and its inferiors) looking for uses of
3150 REG. Return true, if we find any. Don't count the loop's loop_end
3151 insn if it matches LOOP_END. */
3153 static bool
3154 bfin_scan_loop (loop_info loop, rtx reg, rtx loop_end)
3156 unsigned ix;
3157 basic_block bb;
3159 for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, bb); ix++)
3161 rtx insn;
3163 for (insn = BB_HEAD (bb);
3164 insn != NEXT_INSN (BB_END (bb));
3165 insn = NEXT_INSN (insn))
3167 if (!INSN_P (insn))
3168 continue;
3169 if (insn == loop_end)
3170 continue;
3171 if (reg_mentioned_p (reg, PATTERN (insn)))
3172 return true;
3175 return false;
3178 /* Optimize LOOP. */
3180 static void
3181 bfin_optimize_loop (loop_info loop)
3183 basic_block bb;
3184 loop_info inner;
3185 rtx insn, init_insn, last_insn, nop_insn;
3186 rtx loop_init, start_label, end_label;
3187 rtx reg_lc0, reg_lc1, reg_lt0, reg_lt1, reg_lb0, reg_lb1;
3188 rtx iter_reg;
3189 rtx lc_reg, lt_reg, lb_reg;
3190 rtx seq;
3191 int length;
3192 unsigned ix;
3193 int inner_depth = 0;
3195 if (loop->visited)
3196 return;
3198 loop->visited = 1;
3200 if (loop->bad)
3202 if (dump_file)
3203 fprintf (dump_file, ";; loop %d bad when found\n", loop->loop_no);
3204 goto bad_loop;
3207 /* Every loop contains in its list of inner loops every loop nested inside
3208 it, even if there are intermediate loops. This works because we're doing
3209 a depth-first search here and never visit a loop more than once. */
3210 for (ix = 0; VEC_iterate (loop_info, loop->loops, ix, inner); ix++)
3212 bfin_optimize_loop (inner);
3214 if (!inner->bad && inner_depth < inner->depth)
3216 inner_depth = inner->depth;
3218 loop->clobber_loop0 |= inner->clobber_loop0;
3219 loop->clobber_loop1 |= inner->clobber_loop1;
3223 loop->depth = inner_depth + 1;
3224 if (loop->depth > MAX_LOOP_DEPTH)
3226 if (dump_file)
3227 fprintf (dump_file, ";; loop %d too deep\n", loop->loop_no);
3228 goto bad_loop;
3231 /* Get the loop iteration register. */
3232 iter_reg = loop->iter_reg;
3234 if (!DPREG_P (iter_reg))
3236 if (dump_file)
3237 fprintf (dump_file, ";; loop %d iteration count NOT in PREG or DREG\n",
3238 loop->loop_no);
3239 goto bad_loop;
3242 /* Check if start_label appears before loop_end and calculate the
3243 offset between them. We calculate the length of instructions
3244 conservatively. */
3245 length = 0;
3246 for (insn = loop->start_label;
3247 insn && insn != loop->loop_end;
3248 insn = NEXT_INSN (insn))
3250 if (JUMP_P (insn) && any_condjump_p (insn) && !optimize_size)
3252 if (TARGET_CSYNC_ANOMALY)
3253 length += 8;
3254 else if (TARGET_SPECLD_ANOMALY)
3255 length += 6;
3257 else if (LABEL_P (insn))
3259 if (TARGET_CSYNC_ANOMALY)
3260 length += 4;
3263 if (INSN_P (insn))
3264 length += get_attr_length (insn);
3267 if (!insn)
3269 if (dump_file)
3270 fprintf (dump_file, ";; loop %d start_label not before loop_end\n",
3271 loop->loop_no);
3272 goto bad_loop;
3275 loop->length = length;
3276 if (loop->length > MAX_LOOP_LENGTH)
3278 if (dump_file)
3279 fprintf (dump_file, ";; loop %d too long\n", loop->loop_no);
3280 goto bad_loop;
3283 /* Scan all the blocks to make sure they don't use iter_reg. */
3284 if (bfin_scan_loop (loop, iter_reg, loop->loop_end))
3286 if (dump_file)
3287 fprintf (dump_file, ";; loop %d uses iterator\n", loop->loop_no);
3288 goto bad_loop;
3291 /* Scan all the insns to see if the loop body clobber
3292 any hardware loop registers. */
3294 reg_lc0 = gen_rtx_REG (SImode, REG_LC0);
3295 reg_lc1 = gen_rtx_REG (SImode, REG_LC1);
3296 reg_lt0 = gen_rtx_REG (SImode, REG_LT0);
3297 reg_lt1 = gen_rtx_REG (SImode, REG_LT1);
3298 reg_lb0 = gen_rtx_REG (SImode, REG_LB0);
3299 reg_lb1 = gen_rtx_REG (SImode, REG_LB1);
3301 for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, bb); ix++)
3303 rtx insn;
3305 for (insn = BB_HEAD (bb);
3306 insn != NEXT_INSN (BB_END (bb));
3307 insn = NEXT_INSN (insn))
3309 if (!INSN_P (insn))
3310 continue;
3312 if (reg_set_p (reg_lc0, insn)
3313 || reg_set_p (reg_lt0, insn)
3314 || reg_set_p (reg_lb0, insn))
3315 loop->clobber_loop0 = 1;
3317 if (reg_set_p (reg_lc1, insn)
3318 || reg_set_p (reg_lt1, insn)
3319 || reg_set_p (reg_lb1, insn))
3320 loop->clobber_loop1 |= 1;
3324 if ((loop->clobber_loop0 && loop->clobber_loop1)
3325 || (loop->depth == MAX_LOOP_DEPTH && loop->clobber_loop0))
3327 loop->depth = MAX_LOOP_DEPTH + 1;
3328 if (dump_file)
3329 fprintf (dump_file, ";; loop %d no loop reg available\n",
3330 loop->loop_no);
3331 goto bad_loop;
3334 /* There should be an instruction before the loop_end instruction
3335 in the same basic block. And the instruction must not be
3336 - JUMP
3337 - CONDITIONAL BRANCH
3338 - CALL
3339 - CSYNC
3340 - SSYNC
3341 - Returns (RTS, RTN, etc.) */
3343 bb = loop->tail;
3344 last_insn = PREV_INSN (loop->loop_end);
3346 while (1)
3348 for (; last_insn != PREV_INSN (BB_HEAD (bb));
3349 last_insn = PREV_INSN (last_insn))
3350 if (INSN_P (last_insn))
3351 break;
3353 if (last_insn != PREV_INSN (BB_HEAD (bb)))
3354 break;
3356 if (single_pred_p (bb)
3357 && single_pred (bb) != ENTRY_BLOCK_PTR)
3359 bb = single_pred (bb);
3360 last_insn = BB_END (bb);
3361 continue;
3363 else
3365 last_insn = NULL_RTX;
3366 break;
3370 if (!last_insn)
3372 if (dump_file)
3373 fprintf (dump_file, ";; loop %d has no last instruction\n",
3374 loop->loop_no);
3375 goto bad_loop;
3378 if (JUMP_P (last_insn))
3380 loop_info inner = bb->aux;
3381 if (inner
3382 && inner->outer == loop
3383 && inner->loop_end == last_insn
3384 && inner->depth == 1)
3385 /* This jump_insn is the exact loop_end of an inner loop
3386 and to be optimized away. So use the inner's last_insn. */
3387 last_insn = inner->last_insn;
3388 else
3390 if (dump_file)
3391 fprintf (dump_file, ";; loop %d has bad last instruction\n",
3392 loop->loop_no);
3393 goto bad_loop;
3396 else if (CALL_P (last_insn)
3397 || (GET_CODE (PATTERN (last_insn)) != SEQUENCE
3398 && get_attr_type (last_insn) == TYPE_SYNC)
3399 || recog_memoized (last_insn) == CODE_FOR_return_internal)
3401 if (dump_file)
3402 fprintf (dump_file, ";; loop %d has bad last instruction\n",
3403 loop->loop_no);
3404 goto bad_loop;
3407 if (GET_CODE (PATTERN (last_insn)) == ASM_INPUT
3408 || asm_noperands (PATTERN (last_insn)) >= 0
3409 || (GET_CODE (PATTERN (last_insn)) != SEQUENCE
3410 && get_attr_seq_insns (last_insn) == SEQ_INSNS_MULTI))
3412 nop_insn = emit_insn_after (gen_nop (), last_insn);
3413 last_insn = nop_insn;
3416 loop->last_insn = last_insn;
3418 /* The loop is good for replacement. */
3419 start_label = loop->start_label;
3420 end_label = gen_label_rtx ();
3421 iter_reg = loop->iter_reg;
3423 if (loop->depth == 1 && !loop->clobber_loop1)
3425 lc_reg = reg_lc1;
3426 lt_reg = reg_lt1;
3427 lb_reg = reg_lb1;
3428 loop->clobber_loop1 = 1;
3430 else
3432 lc_reg = reg_lc0;
3433 lt_reg = reg_lt0;
3434 lb_reg = reg_lb0;
3435 loop->clobber_loop0 = 1;
3438 /* If iter_reg is a DREG, we need generate an instruction to load
3439 the loop count into LC register. */
3440 if (D_REGNO_P (REGNO (iter_reg)))
3442 init_insn = gen_movsi (lc_reg, iter_reg);
3443 loop_init = gen_lsetup_without_autoinit (lt_reg, start_label,
3444 lb_reg, end_label,
3445 lc_reg);
3447 else if (P_REGNO_P (REGNO (iter_reg)))
3449 init_insn = NULL_RTX;
3450 loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
3451 lb_reg, end_label,
3452 lc_reg, iter_reg);
3454 else
3455 gcc_unreachable ();
3457 loop->init = init_insn;
3458 loop->end_label = end_label;
3459 loop->loop_init = loop_init;
3461 if (dump_file)
3463 fprintf (dump_file, ";; replacing loop %d initializer with\n",
3464 loop->loop_no);
3465 print_rtl_single (dump_file, loop->loop_init);
3466 fprintf (dump_file, ";; replacing loop %d terminator with\n",
3467 loop->loop_no);
3468 print_rtl_single (dump_file, loop->loop_end);
3471 start_sequence ();
3473 if (loop->init != NULL_RTX)
3474 emit_insn (loop->init);
3475 emit_insn(loop->loop_init);
3476 emit_label (loop->start_label);
3478 seq = get_insns ();
3479 end_sequence ();
3481 emit_insn_after (seq, BB_END (loop->predecessor));
3482 delete_insn (loop->loop_end);
3484 /* Insert the loop end label before the last instruction of the loop. */
3485 emit_label_before (loop->end_label, loop->last_insn);
3487 return;
3489 bad_loop:
3491 if (dump_file)
3492 fprintf (dump_file, ";; loop %d is bad\n", loop->loop_no);
3494 loop->bad = 1;
3496 if (DPREG_P (loop->iter_reg))
3498 /* If loop->iter_reg is a DREG or PREG, we can split it here
3499 without scratch register. */
3500 rtx insn;
3502 emit_insn_before (gen_addsi3 (loop->iter_reg,
3503 loop->iter_reg,
3504 constm1_rtx),
3505 loop->loop_end);
3507 emit_insn_before (gen_cmpsi (loop->iter_reg, const0_rtx),
3508 loop->loop_end);
3510 insn = emit_jump_insn_before (gen_bne (loop->start_label),
3511 loop->loop_end);
3513 JUMP_LABEL (insn) = loop->start_label;
3514 LABEL_NUSES (loop->start_label)++;
3515 delete_insn (loop->loop_end);
3519 /* Called from bfin_reorg_loops when a potential loop end is found. LOOP is
3520 a newly set up structure describing the loop, it is this function's
3521 responsibility to fill most of it. TAIL_BB and TAIL_INSN point to the
3522 loop_end insn and its enclosing basic block. */
3524 static void
3525 bfin_discover_loop (loop_info loop, basic_block tail_bb, rtx tail_insn)
3527 unsigned dwork = 0;
3528 basic_block bb;
3529 VEC (basic_block,heap) *works = VEC_alloc (basic_block,heap,20);
3531 loop->tail = tail_bb;
3532 loop->head = BRANCH_EDGE (tail_bb)->dest;
3533 loop->successor = FALLTHRU_EDGE (tail_bb)->dest;
3534 loop->predecessor = NULL;
3535 loop->loop_end = tail_insn;
3536 loop->last_insn = NULL_RTX;
3537 loop->iter_reg = SET_DEST (XVECEXP (PATTERN (tail_insn), 0, 1));
3538 loop->depth = loop->length = 0;
3539 loop->visited = 0;
3540 loop->clobber_loop0 = loop->clobber_loop1 = 0;
3541 loop->outer = NULL;
3542 loop->loops = NULL;
3544 loop->init = loop->loop_init = NULL_RTX;
3545 loop->start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (tail_insn), 0, 0)), 1), 0);
3546 loop->end_label = NULL_RTX;
3547 loop->bad = 0;
3549 VEC_safe_push (basic_block, heap, works, loop->head);
3551 while (VEC_iterate (basic_block, works, dwork++, bb))
3553 edge e;
3554 edge_iterator ei;
3555 if (bb == EXIT_BLOCK_PTR)
3557 /* We've reached the exit block. The loop must be bad. */
3558 if (dump_file)
3559 fprintf (dump_file,
3560 ";; Loop is bad - reached exit block while scanning\n");
3561 loop->bad = 1;
3562 break;
3565 if (bitmap_bit_p (loop->block_bitmap, bb->index))
3566 continue;
3568 /* We've not seen this block before. Add it to the loop's
3569 list and then add each successor to the work list. */
3571 VEC_safe_push (basic_block, heap, loop->blocks, bb);
3572 bitmap_set_bit (loop->block_bitmap, bb->index);
3574 if (bb != tail_bb)
3576 FOR_EACH_EDGE (e, ei, bb->succs)
3578 basic_block succ = EDGE_SUCC (bb, ei.index)->dest;
3579 if (!REGNO_REG_SET_P (succ->il.rtl->global_live_at_start,
3580 REGNO (loop->iter_reg)))
3581 continue;
3582 if (!VEC_space (basic_block, works, 1))
3584 if (dwork)
3586 VEC_block_remove (basic_block, works, 0, dwork);
3587 dwork = 0;
3589 else
3590 VEC_reserve (basic_block, heap, works, 1);
3592 VEC_quick_push (basic_block, works, succ);
3597 if (!loop->bad)
3599 /* Make sure we only have one entry point. */
3600 if (EDGE_COUNT (loop->head->preds) == 2)
3602 loop->predecessor = EDGE_PRED (loop->head, 0)->src;
3603 if (loop->predecessor == loop->tail)
3604 /* We wanted the other predecessor. */
3605 loop->predecessor = EDGE_PRED (loop->head, 1)->src;
3607 /* We can only place a loop insn on a fall through edge of a
3608 single exit block. */
3609 if (EDGE_COUNT (loop->predecessor->succs) != 1
3610 || !(EDGE_SUCC (loop->predecessor, 0)->flags & EDGE_FALLTHRU)
3611 /* If loop->predecessor is in loop, loop->head is not really
3612 the head of the loop. */
3613 || bfin_bb_in_loop (loop, loop->predecessor))
3614 loop->predecessor = NULL;
3617 if (loop->predecessor == NULL)
3619 if (dump_file)
3620 fprintf (dump_file, ";; loop has bad predecessor\n");
3621 loop->bad = 1;
3625 #ifdef ENABLE_CHECKING
3626 /* Make sure nothing jumps into this loop. This shouldn't happen as we
3627 wouldn't have generated the counted loop patterns in such a case.
3628 However, this test must be done after the test above to detect loops
3629 with invalid headers. */
3630 if (!loop->bad)
3631 for (dwork = 0; VEC_iterate (basic_block, loop->blocks, dwork, bb); dwork++)
3633 edge e;
3634 edge_iterator ei;
3635 if (bb == loop->head)
3636 continue;
3637 FOR_EACH_EDGE (e, ei, bb->preds)
3639 basic_block pred = EDGE_PRED (bb, ei.index)->src;
3640 if (!bfin_bb_in_loop (loop, pred))
3641 abort ();
3644 #endif
3645 VEC_free (basic_block, heap, works);
3648 static void
3649 bfin_reorg_loops (FILE *dump_file)
3651 bitmap_obstack stack;
3652 bitmap tmp_bitmap;
3653 basic_block bb;
3654 loop_info loops = NULL;
3655 loop_info loop;
3656 int nloops = 0;
3658 bitmap_obstack_initialize (&stack);
3660 /* Find all the possible loop tails. This means searching for every
3661 loop_end instruction. For each one found, create a loop_info
3662 structure and add the head block to the work list. */
3663 FOR_EACH_BB (bb)
3665 rtx tail = BB_END (bb);
3667 while (GET_CODE (tail) == NOTE)
3668 tail = PREV_INSN (tail);
3670 bb->aux = NULL;
3672 if (INSN_P (tail) && recog_memoized (tail) == CODE_FOR_loop_end)
3674 /* A possible loop end */
3676 loop = XNEW (struct loop_info);
3677 loop->next = loops;
3678 loops = loop;
3679 loop->loop_no = nloops++;
3680 loop->blocks = VEC_alloc (basic_block, heap, 20);
3681 loop->block_bitmap = BITMAP_ALLOC (&stack);
3682 bb->aux = loop;
3684 if (dump_file)
3686 fprintf (dump_file, ";; potential loop %d ending at\n",
3687 loop->loop_no);
3688 print_rtl_single (dump_file, tail);
3691 bfin_discover_loop (loop, bb, tail);
3695 tmp_bitmap = BITMAP_ALLOC (&stack);
3696 /* Compute loop nestings. */
3697 for (loop = loops; loop; loop = loop->next)
3699 loop_info other;
3700 if (loop->bad)
3701 continue;
3703 for (other = loop->next; other; other = other->next)
3705 if (other->bad)
3706 continue;
3708 bitmap_and (tmp_bitmap, other->block_bitmap, loop->block_bitmap);
3709 if (bitmap_empty_p (tmp_bitmap))
3710 continue;
3711 if (bitmap_equal_p (tmp_bitmap, other->block_bitmap))
3713 other->outer = loop;
3714 VEC_safe_push (loop_info, heap, loop->loops, other);
3716 else if (bitmap_equal_p (tmp_bitmap, loop->block_bitmap))
3718 loop->outer = other;
3719 VEC_safe_push (loop_info, heap, other->loops, loop);
3721 else
3723 loop->bad = other->bad = 1;
3727 BITMAP_FREE (tmp_bitmap);
3729 if (dump_file)
3731 fprintf (dump_file, ";; All loops found:\n\n");
3732 bfin_dump_loops (loops);
3735 /* Now apply the optimizations. */
3736 for (loop = loops; loop; loop = loop->next)
3737 bfin_optimize_loop (loop);
3739 if (dump_file)
3741 fprintf (dump_file, ";; After hardware loops optimization:\n\n");
3742 bfin_dump_loops (loops);
3745 /* Free up the loop structures */
3746 while (loops)
3748 loop = loops;
3749 loops = loop->next;
3750 VEC_free (loop_info, heap, loop->loops);
3751 VEC_free (basic_block, heap, loop->blocks);
3752 BITMAP_FREE (loop->block_bitmap);
3753 XDELETE (loop);
3756 if (dump_file)
3757 print_rtl (dump_file, get_insns ());
3759 FOR_EACH_BB (bb)
3760 bb->aux = NULL;
3763 /* Possibly generate a SEQUENCE out of three insns found in SLOT.
3764 Returns true if we modified the insn chain, false otherwise. */
3765 static bool
3766 gen_one_bundle (rtx slot[3])
3768 rtx bundle;
3770 gcc_assert (slot[1] != NULL_RTX);
3772 /* Verify that we really can do the multi-issue. */
3773 if (slot[0])
3775 rtx t = NEXT_INSN (slot[0]);
3776 while (t != slot[1])
3778 if (GET_CODE (t) != NOTE
3779 || NOTE_LINE_NUMBER (t) != NOTE_INSN_DELETED)
3780 return false;
3781 t = NEXT_INSN (t);
3784 if (slot[2])
3786 rtx t = NEXT_INSN (slot[1]);
3787 while (t != slot[2])
3789 if (GET_CODE (t) != NOTE
3790 || NOTE_LINE_NUMBER (t) != NOTE_INSN_DELETED)
3791 return false;
3792 t = NEXT_INSN (t);
3796 if (slot[0] == NULL_RTX)
3797 slot[0] = emit_insn_before (gen_mnop (), slot[1]);
3798 if (slot[2] == NULL_RTX)
3799 slot[2] = emit_insn_after (gen_nop (), slot[1]);
3801 /* Avoid line number information being printed inside one bundle. */
3802 if (INSN_LOCATOR (slot[1])
3803 && INSN_LOCATOR (slot[1]) != INSN_LOCATOR (slot[0]))
3804 INSN_LOCATOR (slot[1]) = INSN_LOCATOR (slot[0]);
3805 if (INSN_LOCATOR (slot[2])
3806 && INSN_LOCATOR (slot[2]) != INSN_LOCATOR (slot[0]))
3807 INSN_LOCATOR (slot[2]) = INSN_LOCATOR (slot[0]);
3809 /* Terminate them with "|| " instead of ";" in the output. */
3810 PUT_MODE (slot[0], SImode);
3811 PUT_MODE (slot[1], SImode);
3813 /* This is a cheat to avoid emit_insn's special handling of SEQUENCEs.
3814 Generating a PARALLEL first and changing its code later is the
3815 easiest way to emit a SEQUENCE insn. */
3816 bundle = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (3, slot[0], slot[1], slot[2]));
3817 emit_insn_before (bundle, slot[0]);
3818 remove_insn (slot[0]);
3819 remove_insn (slot[1]);
3820 remove_insn (slot[2]);
3821 PUT_CODE (bundle, SEQUENCE);
3823 return true;
3826 /* Go through all insns, and use the information generated during scheduling
3827 to generate SEQUENCEs to represent bundles of instructions issued
3828 simultaneously. */
3830 static void
3831 bfin_gen_bundles (void)
3833 basic_block bb;
3834 FOR_EACH_BB (bb)
3836 rtx insn, next;
3837 rtx slot[3];
3838 int n_filled = 0;
3840 slot[0] = slot[1] = slot[2] = NULL_RTX;
3841 for (insn = BB_HEAD (bb);; insn = next)
3843 int at_end;
3844 if (INSN_P (insn))
3846 if (get_attr_type (insn) == TYPE_DSP32)
3847 slot[0] = insn;
3848 else if (slot[1] == NULL_RTX)
3849 slot[1] = insn;
3850 else
3851 slot[2] = insn;
3852 n_filled++;
3855 next = NEXT_INSN (insn);
3856 while (next && insn != BB_END (bb)
3857 && !(INSN_P (next)
3858 && GET_CODE (PATTERN (next)) != USE
3859 && GET_CODE (PATTERN (next)) != CLOBBER))
3861 insn = next;
3862 next = NEXT_INSN (insn);
3865 /* BB_END can change due to emitting extra NOPs, so check here. */
3866 at_end = insn == BB_END (bb);
3867 if (at_end || GET_MODE (next) == TImode)
3869 if ((n_filled < 2
3870 || !gen_one_bundle (slot))
3871 && slot[0] != NULL_RTX)
3873 rtx pat = PATTERN (slot[0]);
3874 if (GET_CODE (pat) == SET
3875 && GET_CODE (SET_SRC (pat)) == UNSPEC
3876 && XINT (SET_SRC (pat), 1) == UNSPEC_32BIT)
3878 SET_SRC (pat) = XVECEXP (SET_SRC (pat), 0, 0);
3879 INSN_CODE (slot[0]) = -1;
3882 n_filled = 0;
3883 slot[0] = slot[1] = slot[2] = NULL_RTX;
3885 if (at_end)
3886 break;
3891 /* Return an insn type for INSN that can be used by the caller for anomaly
3892 workarounds. This differs from plain get_attr_type in that it handles
3893 SEQUENCEs. */
3895 static enum attr_type
3896 type_for_anomaly (rtx insn)
3898 rtx pat = PATTERN (insn);
3899 if (GET_CODE (pat) == SEQUENCE)
3901 enum attr_type t;
3902 t = get_attr_type (XVECEXP (pat, 0, 1));
3903 if (t == TYPE_MCLD)
3904 return t;
3905 t = get_attr_type (XVECEXP (pat, 0, 2));
3906 if (t == TYPE_MCLD)
3907 return t;
3908 return TYPE_MCST;
3910 else
3911 return get_attr_type (insn);
3914 /* Return nonzero if INSN contains any loads that may trap. It handles
3915 SEQUENCEs correctly. */
3917 static bool
3918 trapping_loads_p (rtx insn)
3920 rtx pat = PATTERN (insn);
3921 if (GET_CODE (pat) == SEQUENCE)
3923 enum attr_type t;
3924 t = get_attr_type (XVECEXP (pat, 0, 1));
3925 if (t == TYPE_MCLD
3926 && may_trap_p (SET_SRC (PATTERN (XVECEXP (pat, 0, 1)))))
3927 return true;
3928 t = get_attr_type (XVECEXP (pat, 0, 2));
3929 if (t == TYPE_MCLD
3930 && may_trap_p (SET_SRC (PATTERN (XVECEXP (pat, 0, 2)))))
3931 return true;
3932 return false;
3934 else
3935 return may_trap_p (SET_SRC (single_set (insn)));
3938 /* We use the machine specific reorg pass for emitting CSYNC instructions
3939 after conditional branches as needed.
3941 The Blackfin is unusual in that a code sequence like
3942 if cc jump label
3943 r0 = (p0)
3944 may speculatively perform the load even if the condition isn't true. This
3945 happens for a branch that is predicted not taken, because the pipeline
3946 isn't flushed or stalled, so the early stages of the following instructions,
3947 which perform the memory reference, are allowed to execute before the
3948 jump condition is evaluated.
3949 Therefore, we must insert additional instructions in all places where this
3950 could lead to incorrect behavior. The manual recommends CSYNC, while
3951 VDSP seems to use NOPs (even though its corresponding compiler option is
3952 named CSYNC).
3954 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
3955 When optimizing for size, we turn the branch into a predicted taken one.
3956 This may be slower due to mispredicts, but saves code size. */
3958 static void
3959 bfin_reorg (void)
3961 rtx insn, last_condjump = NULL_RTX;
3962 int cycles_since_jump = INT_MAX;
3964 /* We are freeing block_for_insn in the toplev to keep compatibility
3965 with old MDEP_REORGS that are not CFG based. Recompute it now. */
3966 compute_bb_for_insn ();
3968 if (bfin_flag_schedule_insns2)
3970 splitting_for_sched = 1;
3971 split_all_insns (0);
3972 splitting_for_sched = 0;
3974 update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3976 timevar_push (TV_SCHED2);
3977 schedule_insns ();
3978 timevar_pop (TV_SCHED2);
3980 /* Examine the schedule and insert nops as necessary for 64-bit parallel
3981 instructions. */
3982 bfin_gen_bundles ();
3985 /* Doloop optimization */
3986 if (cfun->machine->has_hardware_loops)
3987 bfin_reorg_loops (dump_file);
3989 if (! TARGET_SPECLD_ANOMALY && ! TARGET_CSYNC_ANOMALY)
3990 return;
3992 /* First pass: find predicted-false branches; if something after them
3993 needs nops, insert them or change the branch to predict true. */
3994 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3996 rtx pat;
3998 if (NOTE_P (insn) || BARRIER_P (insn) || LABEL_P (insn))
3999 continue;
4001 pat = PATTERN (insn);
4002 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
4003 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
4004 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
4005 continue;
4007 if (JUMP_P (insn))
4009 if (any_condjump_p (insn)
4010 && ! cbranch_predicted_taken_p (insn))
4012 last_condjump = insn;
4013 cycles_since_jump = 0;
4015 else
4016 cycles_since_jump = INT_MAX;
4018 else if (INSN_P (insn))
4020 enum attr_type type = type_for_anomaly (insn);
4021 int delay_needed = 0;
4022 if (cycles_since_jump < INT_MAX)
4023 cycles_since_jump++;
4025 if (type == TYPE_MCLD && TARGET_SPECLD_ANOMALY)
4027 if (trapping_loads_p (insn))
4028 delay_needed = 3;
4030 else if (type == TYPE_SYNC && TARGET_CSYNC_ANOMALY)
4031 delay_needed = 4;
4033 if (delay_needed > cycles_since_jump)
4035 rtx pat;
4036 int num_clobbers;
4037 rtx *op = recog_data.operand;
4039 delay_needed -= cycles_since_jump;
4041 extract_insn (last_condjump);
4042 if (optimize_size)
4044 pat = gen_cbranch_predicted_taken (op[0], op[1], op[2],
4045 op[3]);
4046 cycles_since_jump = INT_MAX;
4048 else
4049 /* Do not adjust cycles_since_jump in this case, so that
4050 we'll increase the number of NOPs for a subsequent insn
4051 if necessary. */
4052 pat = gen_cbranch_with_nops (op[0], op[1], op[2], op[3],
4053 GEN_INT (delay_needed));
4054 PATTERN (last_condjump) = pat;
4055 INSN_CODE (last_condjump) = recog (pat, insn, &num_clobbers);
4059 /* Second pass: for predicted-true branches, see if anything at the
4060 branch destination needs extra nops. */
4061 if (! TARGET_CSYNC_ANOMALY)
4062 return;
4064 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4066 if (JUMP_P (insn)
4067 && any_condjump_p (insn)
4068 && (INSN_CODE (insn) == CODE_FOR_cbranch_predicted_taken
4069 || cbranch_predicted_taken_p (insn)))
4071 rtx target = JUMP_LABEL (insn);
4072 rtx label = target;
4073 cycles_since_jump = 0;
4074 for (; target && cycles_since_jump < 3; target = NEXT_INSN (target))
4076 rtx pat;
4078 if (NOTE_P (target) || BARRIER_P (target) || LABEL_P (target))
4079 continue;
4081 pat = PATTERN (target);
4082 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
4083 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
4084 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
4085 continue;
4087 if (INSN_P (target))
4089 enum attr_type type = type_for_anomaly (target);
4090 int delay_needed = 0;
4091 if (cycles_since_jump < INT_MAX)
4092 cycles_since_jump++;
4094 if (type == TYPE_SYNC && TARGET_CSYNC_ANOMALY)
4095 delay_needed = 2;
4097 if (delay_needed > cycles_since_jump)
4099 rtx prev = prev_real_insn (label);
4100 delay_needed -= cycles_since_jump;
4101 if (dump_file)
4102 fprintf (dump_file, "Adding %d nops after %d\n",
4103 delay_needed, INSN_UID (label));
4104 if (JUMP_P (prev)
4105 && INSN_CODE (prev) == CODE_FOR_cbranch_with_nops)
4107 rtx x;
4108 HOST_WIDE_INT v;
4110 if (dump_file)
4111 fprintf (dump_file,
4112 "Reducing nops on insn %d.\n",
4113 INSN_UID (prev));
4114 x = PATTERN (prev);
4115 x = XVECEXP (x, 0, 1);
4116 v = INTVAL (XVECEXP (x, 0, 0)) - delay_needed;
4117 XVECEXP (x, 0, 0) = GEN_INT (v);
4119 while (delay_needed-- > 0)
4120 emit_insn_after (gen_nop (), label);
4121 break;
4128 if (bfin_flag_var_tracking)
4130 timevar_push (TV_VAR_TRACKING);
4131 variable_tracking_main ();
4132 timevar_pop (TV_VAR_TRACKING);
4136 /* Handle interrupt_handler, exception_handler and nmi_handler function
4137 attributes; arguments as in struct attribute_spec.handler. */
4139 static tree
4140 handle_int_attribute (tree *node, tree name,
4141 tree args ATTRIBUTE_UNUSED,
4142 int flags ATTRIBUTE_UNUSED,
4143 bool *no_add_attrs)
4145 tree x = *node;
4146 if (TREE_CODE (x) == FUNCTION_DECL)
4147 x = TREE_TYPE (x);
4149 if (TREE_CODE (x) != FUNCTION_TYPE)
4151 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4152 IDENTIFIER_POINTER (name));
4153 *no_add_attrs = true;
4155 else if (funkind (x) != SUBROUTINE)
4156 error ("multiple function type attributes specified");
4158 return NULL_TREE;
4161 /* Return 0 if the attributes for two types are incompatible, 1 if they
4162 are compatible, and 2 if they are nearly compatible (which causes a
4163 warning to be generated). */
4165 static int
4166 bfin_comp_type_attributes (tree type1, tree type2)
4168 e_funkind kind1, kind2;
4170 if (TREE_CODE (type1) != FUNCTION_TYPE)
4171 return 1;
4173 kind1 = funkind (type1);
4174 kind2 = funkind (type2);
4176 if (kind1 != kind2)
4177 return 0;
4179 /* Check for mismatched modifiers */
4180 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1))
4181 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2)))
4182 return 0;
4184 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1))
4185 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2)))
4186 return 0;
4188 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1))
4189 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2)))
4190 return 0;
4192 if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1))
4193 != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2)))
4194 return 0;
4196 return 1;
4199 /* Handle a "longcall" or "shortcall" attribute; arguments as in
4200 struct attribute_spec.handler. */
4202 static tree
4203 bfin_handle_longcall_attribute (tree *node, tree name,
4204 tree args ATTRIBUTE_UNUSED,
4205 int flags ATTRIBUTE_UNUSED,
4206 bool *no_add_attrs)
4208 if (TREE_CODE (*node) != FUNCTION_TYPE
4209 && TREE_CODE (*node) != FIELD_DECL
4210 && TREE_CODE (*node) != TYPE_DECL)
4212 warning (OPT_Wattributes, "`%s' attribute only applies to functions",
4213 IDENTIFIER_POINTER (name));
4214 *no_add_attrs = true;
4217 if ((strcmp (IDENTIFIER_POINTER (name), "longcall") == 0
4218 && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node)))
4219 || (strcmp (IDENTIFIER_POINTER (name), "shortcall") == 0
4220 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node))))
4222 warning (OPT_Wattributes,
4223 "can't apply both longcall and shortcall attributes to the same function");
4224 *no_add_attrs = true;
4227 return NULL_TREE;
4230 /* Table of valid machine attributes. */
4231 const struct attribute_spec bfin_attribute_table[] =
4233 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4234 { "interrupt_handler", 0, 0, false, true, true, handle_int_attribute },
4235 { "exception_handler", 0, 0, false, true, true, handle_int_attribute },
4236 { "nmi_handler", 0, 0, false, true, true, handle_int_attribute },
4237 { "nesting", 0, 0, false, true, true, NULL },
4238 { "kspisusp", 0, 0, false, true, true, NULL },
4239 { "saveall", 0, 0, false, true, true, NULL },
4240 { "longcall", 0, 0, false, true, true, bfin_handle_longcall_attribute },
4241 { "shortcall", 0, 0, false, true, true, bfin_handle_longcall_attribute },
4242 { NULL, 0, 0, false, false, false, NULL }
4245 /* Implementation of TARGET_ASM_INTEGER. When using FD-PIC, we need to
4246 tell the assembler to generate pointers to function descriptors in
4247 some cases. */
4249 static bool
4250 bfin_assemble_integer (rtx value, unsigned int size, int aligned_p)
4252 if (TARGET_FDPIC && size == UNITS_PER_WORD)
4254 if (GET_CODE (value) == SYMBOL_REF
4255 && SYMBOL_REF_FUNCTION_P (value))
4257 fputs ("\t.picptr\tfuncdesc(", asm_out_file);
4258 output_addr_const (asm_out_file, value);
4259 fputs (")\n", asm_out_file);
4260 return true;
4262 if (!aligned_p)
4264 /* We've set the unaligned SI op to NULL, so we always have to
4265 handle the unaligned case here. */
4266 assemble_integer_with_op ("\t.4byte\t", value);
4267 return true;
4270 return default_assemble_integer (value, size, aligned_p);
4273 /* Output the assembler code for a thunk function. THUNK_DECL is the
4274 declaration for the thunk function itself, FUNCTION is the decl for
4275 the target function. DELTA is an immediate constant offset to be
4276 added to THIS. If VCALL_OFFSET is nonzero, the word at
4277 *(*this + vcall_offset) should be added to THIS. */
4279 static void
4280 bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED,
4281 tree thunk ATTRIBUTE_UNUSED, HOST_WIDE_INT delta,
4282 HOST_WIDE_INT vcall_offset, tree function)
4284 rtx xops[3];
4285 /* The this parameter is passed as the first argument. */
4286 rtx this = gen_rtx_REG (Pmode, REG_R0);
4288 /* Adjust the this parameter by a fixed constant. */
4289 if (delta)
4291 xops[1] = this;
4292 if (delta >= -64 && delta <= 63)
4294 xops[0] = GEN_INT (delta);
4295 output_asm_insn ("%1 += %0;", xops);
4297 else if (delta >= -128 && delta < -64)
4299 xops[0] = GEN_INT (delta + 64);
4300 output_asm_insn ("%1 += -64; %1 += %0;", xops);
4302 else if (delta > 63 && delta <= 126)
4304 xops[0] = GEN_INT (delta - 63);
4305 output_asm_insn ("%1 += 63; %1 += %0;", xops);
4307 else
4309 xops[0] = GEN_INT (delta);
4310 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops);
4314 /* Adjust the this parameter by a value stored in the vtable. */
4315 if (vcall_offset)
4317 rtx p2tmp = gen_rtx_REG (Pmode, REG_P2);
4318 rtx tmp = gen_rtx_REG (Pmode, REG_R2);
4320 xops[1] = tmp;
4321 xops[2] = p2tmp;
4322 output_asm_insn ("%2 = r0; %2 = [%2];", xops);
4324 /* Adjust the this parameter. */
4325 xops[0] = gen_rtx_MEM (Pmode, plus_constant (p2tmp, vcall_offset));
4326 if (!memory_operand (xops[0], Pmode))
4328 rtx tmp2 = gen_rtx_REG (Pmode, REG_P1);
4329 xops[0] = GEN_INT (vcall_offset);
4330 xops[1] = tmp2;
4331 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops);
4332 xops[0] = gen_rtx_MEM (Pmode, p2tmp);
4334 xops[2] = this;
4335 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops);
4338 xops[0] = XEXP (DECL_RTL (function), 0);
4339 if (1 || !flag_pic || (*targetm.binds_local_p) (function))
4340 output_asm_insn ("jump.l\t%P0", xops);
4343 /* Codes for all the Blackfin builtins. */
4344 enum bfin_builtins
4346 BFIN_BUILTIN_CSYNC,
4347 BFIN_BUILTIN_SSYNC,
4348 BFIN_BUILTIN_COMPOSE_2X16,
4349 BFIN_BUILTIN_EXTRACTLO,
4350 BFIN_BUILTIN_EXTRACTHI,
4352 BFIN_BUILTIN_SSADD_2X16,
4353 BFIN_BUILTIN_SSSUB_2X16,
4354 BFIN_BUILTIN_SSADDSUB_2X16,
4355 BFIN_BUILTIN_SSSUBADD_2X16,
4356 BFIN_BUILTIN_MULT_2X16,
4357 BFIN_BUILTIN_MULTR_2X16,
4358 BFIN_BUILTIN_NEG_2X16,
4359 BFIN_BUILTIN_ABS_2X16,
4360 BFIN_BUILTIN_MIN_2X16,
4361 BFIN_BUILTIN_MAX_2X16,
4363 BFIN_BUILTIN_SSADD_1X16,
4364 BFIN_BUILTIN_SSSUB_1X16,
4365 BFIN_BUILTIN_MULT_1X16,
4366 BFIN_BUILTIN_MULTR_1X16,
4367 BFIN_BUILTIN_NORM_1X16,
4368 BFIN_BUILTIN_NEG_1X16,
4369 BFIN_BUILTIN_ABS_1X16,
4370 BFIN_BUILTIN_MIN_1X16,
4371 BFIN_BUILTIN_MAX_1X16,
4373 BFIN_BUILTIN_DIFFHL_2X16,
4374 BFIN_BUILTIN_DIFFLH_2X16,
4376 BFIN_BUILTIN_SSADD_1X32,
4377 BFIN_BUILTIN_SSSUB_1X32,
4378 BFIN_BUILTIN_NORM_1X32,
4379 BFIN_BUILTIN_NEG_1X32,
4380 BFIN_BUILTIN_MIN_1X32,
4381 BFIN_BUILTIN_MAX_1X32,
4382 BFIN_BUILTIN_MULT_1X32,
4384 BFIN_BUILTIN_MULHISILL,
4385 BFIN_BUILTIN_MULHISILH,
4386 BFIN_BUILTIN_MULHISIHL,
4387 BFIN_BUILTIN_MULHISIHH,
4389 BFIN_BUILTIN_LSHIFT_1X16,
4390 BFIN_BUILTIN_LSHIFT_2X16,
4391 BFIN_BUILTIN_SSASHIFT_1X16,
4392 BFIN_BUILTIN_SSASHIFT_2X16,
4394 BFIN_BUILTIN_CPLX_MUL_16,
4395 BFIN_BUILTIN_CPLX_MAC_16,
4396 BFIN_BUILTIN_CPLX_MSU_16,
4398 BFIN_BUILTIN_MAX
4401 #define def_builtin(NAME, TYPE, CODE) \
4402 do { \
4403 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
4404 NULL, NULL_TREE); \
4405 } while (0)
4407 /* Set up all builtin functions for this target. */
4408 static void
4409 bfin_init_builtins (void)
4411 tree V2HI_type_node = build_vector_type_for_mode (intHI_type_node, V2HImode);
4412 tree void_ftype_void
4413 = build_function_type (void_type_node, void_list_node);
4414 tree short_ftype_short
4415 = build_function_type_list (short_integer_type_node, short_integer_type_node,
4416 NULL_TREE);
4417 tree short_ftype_int_int
4418 = build_function_type_list (short_integer_type_node, integer_type_node,
4419 integer_type_node, NULL_TREE);
4420 tree int_ftype_int_int
4421 = build_function_type_list (integer_type_node, integer_type_node,
4422 integer_type_node, NULL_TREE);
4423 tree int_ftype_int
4424 = build_function_type_list (integer_type_node, integer_type_node,
4425 NULL_TREE);
4426 tree short_ftype_int
4427 = build_function_type_list (short_integer_type_node, integer_type_node,
4428 NULL_TREE);
4429 tree int_ftype_v2hi_v2hi
4430 = build_function_type_list (integer_type_node, V2HI_type_node,
4431 V2HI_type_node, NULL_TREE);
4432 tree v2hi_ftype_v2hi_v2hi
4433 = build_function_type_list (V2HI_type_node, V2HI_type_node,
4434 V2HI_type_node, NULL_TREE);
4435 tree v2hi_ftype_v2hi_v2hi_v2hi
4436 = build_function_type_list (V2HI_type_node, V2HI_type_node,
4437 V2HI_type_node, V2HI_type_node, NULL_TREE);
4438 tree v2hi_ftype_int_int
4439 = build_function_type_list (V2HI_type_node, integer_type_node,
4440 integer_type_node, NULL_TREE);
4441 tree v2hi_ftype_v2hi_int
4442 = build_function_type_list (V2HI_type_node, V2HI_type_node,
4443 integer_type_node, NULL_TREE);
4444 tree int_ftype_short_short
4445 = build_function_type_list (integer_type_node, short_integer_type_node,
4446 short_integer_type_node, NULL_TREE);
4447 tree v2hi_ftype_v2hi
4448 = build_function_type_list (V2HI_type_node, V2HI_type_node, NULL_TREE);
4449 tree short_ftype_v2hi
4450 = build_function_type_list (short_integer_type_node, V2HI_type_node,
4451 NULL_TREE);
4453 /* Add the remaining MMX insns with somewhat more complicated types. */
4454 def_builtin ("__builtin_bfin_csync", void_ftype_void, BFIN_BUILTIN_CSYNC);
4455 def_builtin ("__builtin_bfin_ssync", void_ftype_void, BFIN_BUILTIN_SSYNC);
4457 def_builtin ("__builtin_bfin_compose_2x16", v2hi_ftype_int_int,
4458 BFIN_BUILTIN_COMPOSE_2X16);
4459 def_builtin ("__builtin_bfin_extract_hi", short_ftype_v2hi,
4460 BFIN_BUILTIN_EXTRACTHI);
4461 def_builtin ("__builtin_bfin_extract_lo", short_ftype_v2hi,
4462 BFIN_BUILTIN_EXTRACTLO);
4464 def_builtin ("__builtin_bfin_min_fr2x16", v2hi_ftype_v2hi_v2hi,
4465 BFIN_BUILTIN_MIN_2X16);
4466 def_builtin ("__builtin_bfin_max_fr2x16", v2hi_ftype_v2hi_v2hi,
4467 BFIN_BUILTIN_MAX_2X16);
4469 def_builtin ("__builtin_bfin_add_fr2x16", v2hi_ftype_v2hi_v2hi,
4470 BFIN_BUILTIN_SSADD_2X16);
4471 def_builtin ("__builtin_bfin_sub_fr2x16", v2hi_ftype_v2hi_v2hi,
4472 BFIN_BUILTIN_SSSUB_2X16);
4473 def_builtin ("__builtin_bfin_dspaddsubsat", v2hi_ftype_v2hi_v2hi,
4474 BFIN_BUILTIN_SSADDSUB_2X16);
4475 def_builtin ("__builtin_bfin_dspsubaddsat", v2hi_ftype_v2hi_v2hi,
4476 BFIN_BUILTIN_SSSUBADD_2X16);
4477 def_builtin ("__builtin_bfin_mult_fr2x16", v2hi_ftype_v2hi_v2hi,
4478 BFIN_BUILTIN_MULT_2X16);
4479 def_builtin ("__builtin_bfin_multr_fr2x16", v2hi_ftype_v2hi_v2hi,
4480 BFIN_BUILTIN_MULTR_2X16);
4481 def_builtin ("__builtin_bfin_negate_fr2x16", v2hi_ftype_v2hi,
4482 BFIN_BUILTIN_NEG_2X16);
4483 def_builtin ("__builtin_bfin_abs_fr2x16", v2hi_ftype_v2hi,
4484 BFIN_BUILTIN_ABS_2X16);
4486 def_builtin ("__builtin_bfin_add_fr1x16", short_ftype_int_int,
4487 BFIN_BUILTIN_SSADD_1X16);
4488 def_builtin ("__builtin_bfin_sub_fr1x16", short_ftype_int_int,
4489 BFIN_BUILTIN_SSSUB_1X16);
4490 def_builtin ("__builtin_bfin_mult_fr1x16", short_ftype_int_int,
4491 BFIN_BUILTIN_MULT_1X16);
4492 def_builtin ("__builtin_bfin_multr_fr1x16", short_ftype_int_int,
4493 BFIN_BUILTIN_MULTR_1X16);
4494 def_builtin ("__builtin_bfin_negate_fr1x16", short_ftype_short,
4495 BFIN_BUILTIN_NEG_1X16);
4496 def_builtin ("__builtin_bfin_abs_fr1x16", short_ftype_short,
4497 BFIN_BUILTIN_ABS_1X16);
4498 def_builtin ("__builtin_bfin_norm_fr1x16", short_ftype_int,
4499 BFIN_BUILTIN_NORM_1X16);
4501 def_builtin ("__builtin_bfin_diff_hl_fr2x16", short_ftype_v2hi,
4502 BFIN_BUILTIN_DIFFHL_2X16);
4503 def_builtin ("__builtin_bfin_diff_lh_fr2x16", short_ftype_v2hi,
4504 BFIN_BUILTIN_DIFFLH_2X16);
4506 def_builtin ("__builtin_bfin_mulhisill", int_ftype_v2hi_v2hi,
4507 BFIN_BUILTIN_MULHISILL);
4508 def_builtin ("__builtin_bfin_mulhisihl", int_ftype_v2hi_v2hi,
4509 BFIN_BUILTIN_MULHISIHL);
4510 def_builtin ("__builtin_bfin_mulhisilh", int_ftype_v2hi_v2hi,
4511 BFIN_BUILTIN_MULHISILH);
4512 def_builtin ("__builtin_bfin_mulhisihh", int_ftype_v2hi_v2hi,
4513 BFIN_BUILTIN_MULHISIHH);
4515 def_builtin ("__builtin_bfin_add_fr1x32", int_ftype_int_int,
4516 BFIN_BUILTIN_SSADD_1X32);
4517 def_builtin ("__builtin_bfin_sub_fr1x32", int_ftype_int_int,
4518 BFIN_BUILTIN_SSSUB_1X32);
4519 def_builtin ("__builtin_bfin_negate_fr1x32", int_ftype_int,
4520 BFIN_BUILTIN_NEG_1X32);
4521 def_builtin ("__builtin_bfin_norm_fr1x32", short_ftype_int,
4522 BFIN_BUILTIN_NORM_1X32);
4523 def_builtin ("__builtin_bfin_mult_fr1x32", int_ftype_short_short,
4524 BFIN_BUILTIN_MULT_1X32);
4526 /* Shifts. */
4527 def_builtin ("__builtin_bfin_shl_fr1x16", short_ftype_int_int,
4528 BFIN_BUILTIN_SSASHIFT_1X16);
4529 def_builtin ("__builtin_bfin_shl_fr2x16", v2hi_ftype_v2hi_int,
4530 BFIN_BUILTIN_SSASHIFT_2X16);
4531 def_builtin ("__builtin_bfin_lshl_fr1x16", short_ftype_int_int,
4532 BFIN_BUILTIN_LSHIFT_1X16);
4533 def_builtin ("__builtin_bfin_lshl_fr2x16", v2hi_ftype_v2hi_int,
4534 BFIN_BUILTIN_LSHIFT_2X16);
4536 /* Complex numbers. */
4537 def_builtin ("__builtin_bfin_cmplx_mul", v2hi_ftype_v2hi_v2hi,
4538 BFIN_BUILTIN_CPLX_MUL_16);
4539 def_builtin ("__builtin_bfin_cmplx_mac", v2hi_ftype_v2hi_v2hi_v2hi,
4540 BFIN_BUILTIN_CPLX_MAC_16);
4541 def_builtin ("__builtin_bfin_cmplx_msu", v2hi_ftype_v2hi_v2hi_v2hi,
4542 BFIN_BUILTIN_CPLX_MSU_16);
4546 struct builtin_description
4548 const enum insn_code icode;
4549 const char *const name;
4550 const enum bfin_builtins code;
4551 int macflag;
4554 static const struct builtin_description bdesc_2arg[] =
4556 { CODE_FOR_composev2hi, "__builtin_bfin_compose_2x16", BFIN_BUILTIN_COMPOSE_2X16, -1 },
4558 { CODE_FOR_ssashiftv2hi3, "__builtin_bfin_shl_fr2x16", BFIN_BUILTIN_SSASHIFT_2X16, -1 },
4559 { CODE_FOR_ssashifthi3, "__builtin_bfin_shl_fr1x16", BFIN_BUILTIN_SSASHIFT_1X16, -1 },
4560 { CODE_FOR_lshiftv2hi3, "__builtin_bfin_lshl_fr2x16", BFIN_BUILTIN_LSHIFT_2X16, -1 },
4561 { CODE_FOR_lshifthi3, "__builtin_bfin_lshl_fr1x16", BFIN_BUILTIN_LSHIFT_1X16, -1 },
4563 { CODE_FOR_sminhi3, "__builtin_bfin_min_fr1x16", BFIN_BUILTIN_MIN_1X16, -1 },
4564 { CODE_FOR_smaxhi3, "__builtin_bfin_max_fr1x16", BFIN_BUILTIN_MAX_1X16, -1 },
4565 { CODE_FOR_ssaddhi3, "__builtin_bfin_add_fr1x16", BFIN_BUILTIN_SSADD_1X16, -1 },
4566 { CODE_FOR_sssubhi3, "__builtin_bfin_sub_fr1x16", BFIN_BUILTIN_SSSUB_1X16, -1 },
4568 { CODE_FOR_sminsi3, "__builtin_bfin_min_fr1x32", BFIN_BUILTIN_MIN_1X32, -1 },
4569 { CODE_FOR_smaxsi3, "__builtin_bfin_max_fr1x32", BFIN_BUILTIN_MAX_1X32, -1 },
4570 { CODE_FOR_ssaddsi3, "__builtin_bfin_add_fr1x32", BFIN_BUILTIN_SSADD_1X32, -1 },
4571 { CODE_FOR_sssubsi3, "__builtin_bfin_sub_fr1x32", BFIN_BUILTIN_SSSUB_1X32, -1 },
4573 { CODE_FOR_sminv2hi3, "__builtin_bfin_min_fr2x16", BFIN_BUILTIN_MIN_2X16, -1 },
4574 { CODE_FOR_smaxv2hi3, "__builtin_bfin_max_fr2x16", BFIN_BUILTIN_MAX_2X16, -1 },
4575 { CODE_FOR_ssaddv2hi3, "__builtin_bfin_add_fr2x16", BFIN_BUILTIN_SSADD_2X16, -1 },
4576 { CODE_FOR_sssubv2hi3, "__builtin_bfin_sub_fr2x16", BFIN_BUILTIN_SSSUB_2X16, -1 },
4577 { CODE_FOR_ssaddsubv2hi3, "__builtin_bfin_dspaddsubsat", BFIN_BUILTIN_SSADDSUB_2X16, -1 },
4578 { CODE_FOR_sssubaddv2hi3, "__builtin_bfin_dspsubaddsat", BFIN_BUILTIN_SSSUBADD_2X16, -1 },
4580 { CODE_FOR_flag_mulhisi, "__builtin_bfin_mult_fr1x32", BFIN_BUILTIN_MULT_1X32, MACFLAG_NONE },
4581 { CODE_FOR_flag_mulhi, "__builtin_bfin_mult_fr1x16", BFIN_BUILTIN_MULT_1X16, MACFLAG_T },
4582 { CODE_FOR_flag_mulhi, "__builtin_bfin_multr_fr1x16", BFIN_BUILTIN_MULTR_1X16, MACFLAG_NONE },
4583 { CODE_FOR_flag_mulv2hi, "__builtin_bfin_mult_fr2x16", BFIN_BUILTIN_MULT_2X16, MACFLAG_T },
4584 { CODE_FOR_flag_mulv2hi, "__builtin_bfin_multr_fr2x16", BFIN_BUILTIN_MULTR_2X16, MACFLAG_NONE }
4587 static const struct builtin_description bdesc_1arg[] =
4589 { CODE_FOR_signbitshi2, "__builtin_bfin_norm_fr1x16", BFIN_BUILTIN_NORM_1X16, 0 },
4590 { CODE_FOR_ssneghi2, "__builtin_bfin_negate_fr1x16", BFIN_BUILTIN_NEG_1X16, 0 },
4591 { CODE_FOR_abshi2, "__builtin_bfin_abs_fr1x16", BFIN_BUILTIN_ABS_1X16, 0 },
4593 { CODE_FOR_signbitssi2, "__builtin_bfin_norm_fr1x32", BFIN_BUILTIN_NORM_1X32, 0 },
4594 { CODE_FOR_ssnegsi2, "__builtin_bfin_negate_fr1x32", BFIN_BUILTIN_NEG_1X32, 0 },
4596 { CODE_FOR_movv2hi_hi_low, "__builtin_bfin_extract_lo", BFIN_BUILTIN_EXTRACTLO, 0 },
4597 { CODE_FOR_movv2hi_hi_high, "__builtin_bfin_extract_hi", BFIN_BUILTIN_EXTRACTHI, 0 },
4598 { CODE_FOR_ssnegv2hi2, "__builtin_bfin_negate_fr2x16", BFIN_BUILTIN_NEG_2X16, 0 },
4599 { CODE_FOR_absv2hi2, "__builtin_bfin_abs_fr2x16", BFIN_BUILTIN_ABS_2X16, 0 }
4602 /* Errors in the source file can cause expand_expr to return const0_rtx
4603 where we expect a vector. To avoid crashing, use one of the vector
4604 clear instructions. */
4605 static rtx
4606 safe_vector_operand (rtx x, enum machine_mode mode)
4608 if (x != const0_rtx)
4609 return x;
4610 x = gen_reg_rtx (SImode);
4612 emit_insn (gen_movsi (x, CONST0_RTX (SImode)));
4613 return gen_lowpart (mode, x);
4616 /* Subroutine of bfin_expand_builtin to take care of binop insns. MACFLAG is -1
4617 if this is a normal binary op, or one of the MACFLAG_xxx constants. */
4619 static rtx
4620 bfin_expand_binop_builtin (enum insn_code icode, tree exp, rtx target,
4621 int macflag)
4623 rtx pat;
4624 tree arg0 = CALL_EXPR_ARG (exp, 0);
4625 tree arg1 = CALL_EXPR_ARG (exp, 1);
4626 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4627 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4628 enum machine_mode op0mode = GET_MODE (op0);
4629 enum machine_mode op1mode = GET_MODE (op1);
4630 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4631 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4632 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4634 if (VECTOR_MODE_P (mode0))
4635 op0 = safe_vector_operand (op0, mode0);
4636 if (VECTOR_MODE_P (mode1))
4637 op1 = safe_vector_operand (op1, mode1);
4639 if (! target
4640 || GET_MODE (target) != tmode
4641 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4642 target = gen_reg_rtx (tmode);
4644 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
4646 op0mode = HImode;
4647 op0 = gen_lowpart (HImode, op0);
4649 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
4651 op1mode = HImode;
4652 op1 = gen_lowpart (HImode, op1);
4654 /* In case the insn wants input operands in modes different from
4655 the result, abort. */
4656 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
4657 && (op1mode == mode1 || op1mode == VOIDmode));
4659 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4660 op0 = copy_to_mode_reg (mode0, op0);
4661 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4662 op1 = copy_to_mode_reg (mode1, op1);
4664 if (macflag == -1)
4665 pat = GEN_FCN (icode) (target, op0, op1);
4666 else
4667 pat = GEN_FCN (icode) (target, op0, op1, GEN_INT (macflag));
4668 if (! pat)
4669 return 0;
4671 emit_insn (pat);
4672 return target;
4675 /* Subroutine of bfin_expand_builtin to take care of unop insns. */
4677 static rtx
4678 bfin_expand_unop_builtin (enum insn_code icode, tree exp,
4679 rtx target)
4681 rtx pat;
4682 tree arg0 = CALL_EXPR_ARG (exp, 0);
4683 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4684 enum machine_mode op0mode = GET_MODE (op0);
4685 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4686 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4688 if (! target
4689 || GET_MODE (target) != tmode
4690 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4691 target = gen_reg_rtx (tmode);
4693 if (VECTOR_MODE_P (mode0))
4694 op0 = safe_vector_operand (op0, mode0);
4696 if (op0mode == SImode && mode0 == HImode)
4698 op0mode = HImode;
4699 op0 = gen_lowpart (HImode, op0);
4701 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
4703 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4704 op0 = copy_to_mode_reg (mode0, op0);
4706 pat = GEN_FCN (icode) (target, op0);
4707 if (! pat)
4708 return 0;
4709 emit_insn (pat);
4710 return target;
4713 /* Expand an expression EXP that calls a built-in function,
4714 with result going to TARGET if that's convenient
4715 (and in mode MODE if that's convenient).
4716 SUBTARGET may be used as the target for computing one of EXP's operands.
4717 IGNORE is nonzero if the value is to be ignored. */
4719 static rtx
4720 bfin_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
4721 rtx subtarget ATTRIBUTE_UNUSED,
4722 enum machine_mode mode ATTRIBUTE_UNUSED,
4723 int ignore ATTRIBUTE_UNUSED)
4725 size_t i;
4726 enum insn_code icode;
4727 const struct builtin_description *d;
4728 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
4729 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4730 tree arg0, arg1, arg2;
4731 rtx op0, op1, op2, accvec, pat, tmp1, tmp2;
4732 enum machine_mode tmode, mode0;
4734 switch (fcode)
4736 case BFIN_BUILTIN_CSYNC:
4737 emit_insn (gen_csync ());
4738 return 0;
4739 case BFIN_BUILTIN_SSYNC:
4740 emit_insn (gen_ssync ());
4741 return 0;
4743 case BFIN_BUILTIN_DIFFHL_2X16:
4744 case BFIN_BUILTIN_DIFFLH_2X16:
4745 arg0 = CALL_EXPR_ARG (exp, 0);
4746 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4747 icode = (fcode == BFIN_BUILTIN_DIFFHL_2X16
4748 ? CODE_FOR_subhilov2hi3 : CODE_FOR_sublohiv2hi3);
4749 tmode = insn_data[icode].operand[0].mode;
4750 mode0 = insn_data[icode].operand[1].mode;
4752 if (! target
4753 || GET_MODE (target) != tmode
4754 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4755 target = gen_reg_rtx (tmode);
4757 if (VECTOR_MODE_P (mode0))
4758 op0 = safe_vector_operand (op0, mode0);
4760 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4761 op0 = copy_to_mode_reg (mode0, op0);
4763 pat = GEN_FCN (icode) (target, op0, op0);
4764 if (! pat)
4765 return 0;
4766 emit_insn (pat);
4767 return target;
4769 case BFIN_BUILTIN_CPLX_MUL_16:
4770 arg0 = CALL_EXPR_ARG (exp, 0);
4771 arg1 = CALL_EXPR_ARG (exp, 1);
4772 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4773 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4774 accvec = gen_reg_rtx (V2PDImode);
4776 if (! target
4777 || GET_MODE (target) != V2HImode
4778 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
4779 target = gen_reg_rtx (tmode);
4780 if (! register_operand (op0, GET_MODE (op0)))
4781 op0 = copy_to_mode_reg (GET_MODE (op0), op0);
4782 if (! register_operand (op1, GET_MODE (op1)))
4783 op1 = copy_to_mode_reg (GET_MODE (op1), op1);
4785 emit_insn (gen_flag_macinit1v2hi_parts (accvec, op0, op1, const0_rtx,
4786 const0_rtx, const0_rtx,
4787 const1_rtx, GEN_INT (MACFLAG_NONE)));
4788 emit_insn (gen_flag_macv2hi_parts (target, op0, op1, const1_rtx,
4789 const1_rtx, const1_rtx,
4790 const0_rtx, accvec, const1_rtx, const0_rtx,
4791 GEN_INT (MACFLAG_NONE), accvec));
4793 return target;
4795 case BFIN_BUILTIN_CPLX_MAC_16:
4796 case BFIN_BUILTIN_CPLX_MSU_16:
4797 arg0 = CALL_EXPR_ARG (exp, 0);
4798 arg1 = CALL_EXPR_ARG (exp, 1);
4799 arg2 = CALL_EXPR_ARG (exp, 2);
4800 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4801 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4802 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4803 accvec = gen_reg_rtx (V2PDImode);
4805 if (! target
4806 || GET_MODE (target) != V2HImode
4807 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
4808 target = gen_reg_rtx (tmode);
4809 if (! register_operand (op0, GET_MODE (op0)))
4810 op0 = copy_to_mode_reg (GET_MODE (op0), op0);
4811 if (! register_operand (op1, GET_MODE (op1)))
4812 op1 = copy_to_mode_reg (GET_MODE (op1), op1);
4814 tmp1 = gen_reg_rtx (SImode);
4815 tmp2 = gen_reg_rtx (SImode);
4816 emit_insn (gen_ashlsi3 (tmp1, gen_lowpart (SImode, op2), GEN_INT (16)));
4817 emit_move_insn (tmp2, gen_lowpart (SImode, op2));
4818 emit_insn (gen_movstricthi_1 (gen_lowpart (HImode, tmp2), const0_rtx));
4819 emit_insn (gen_load_accumulator_pair (accvec, tmp1, tmp2));
4820 emit_insn (gen_flag_macv2hi_parts_acconly (accvec, op0, op1, const0_rtx,
4821 const0_rtx, const0_rtx,
4822 const1_rtx, accvec, const0_rtx,
4823 const0_rtx,
4824 GEN_INT (MACFLAG_W32)));
4825 tmp1 = (fcode == BFIN_BUILTIN_CPLX_MAC_16 ? const1_rtx : const0_rtx);
4826 tmp2 = (fcode == BFIN_BUILTIN_CPLX_MAC_16 ? const0_rtx : const1_rtx);
4827 emit_insn (gen_flag_macv2hi_parts (target, op0, op1, const1_rtx,
4828 const1_rtx, const1_rtx,
4829 const0_rtx, accvec, tmp1, tmp2,
4830 GEN_INT (MACFLAG_NONE), accvec));
4832 return target;
4834 default:
4835 break;
4838 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
4839 if (d->code == fcode)
4840 return bfin_expand_binop_builtin (d->icode, exp, target,
4841 d->macflag);
4843 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
4844 if (d->code == fcode)
4845 return bfin_expand_unop_builtin (d->icode, exp, target);
4847 gcc_unreachable ();
4850 #undef TARGET_INIT_BUILTINS
4851 #define TARGET_INIT_BUILTINS bfin_init_builtins
4853 #undef TARGET_EXPAND_BUILTIN
4854 #define TARGET_EXPAND_BUILTIN bfin_expand_builtin
4856 #undef TARGET_ASM_GLOBALIZE_LABEL
4857 #define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
4859 #undef TARGET_ASM_FILE_START
4860 #define TARGET_ASM_FILE_START output_file_start
4862 #undef TARGET_ATTRIBUTE_TABLE
4863 #define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
4865 #undef TARGET_COMP_TYPE_ATTRIBUTES
4866 #define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
4868 #undef TARGET_RTX_COSTS
4869 #define TARGET_RTX_COSTS bfin_rtx_costs
4871 #undef TARGET_ADDRESS_COST
4872 #define TARGET_ADDRESS_COST bfin_address_cost
4874 #undef TARGET_ASM_INTERNAL_LABEL
4875 #define TARGET_ASM_INTERNAL_LABEL bfin_internal_label
4877 #undef TARGET_ASM_INTEGER
4878 #define TARGET_ASM_INTEGER bfin_assemble_integer
4880 #undef TARGET_MACHINE_DEPENDENT_REORG
4881 #define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
4883 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
4884 #define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
4886 #undef TARGET_ASM_OUTPUT_MI_THUNK
4887 #define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
4888 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
4889 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
4891 #undef TARGET_SCHED_ADJUST_COST
4892 #define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
4894 #undef TARGET_SCHED_ISSUE_RATE
4895 #define TARGET_SCHED_ISSUE_RATE bfin_issue_rate
4897 #undef TARGET_PROMOTE_PROTOTYPES
4898 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
4899 #undef TARGET_PROMOTE_FUNCTION_ARGS
4900 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
4901 #undef TARGET_PROMOTE_FUNCTION_RETURN
4902 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
4904 #undef TARGET_ARG_PARTIAL_BYTES
4905 #define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
4907 #undef TARGET_PASS_BY_REFERENCE
4908 #define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
4910 #undef TARGET_SETUP_INCOMING_VARARGS
4911 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
4913 #undef TARGET_STRUCT_VALUE_RTX
4914 #define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
4916 #undef TARGET_VECTOR_MODE_SUPPORTED_P
4917 #define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
4919 #undef TARGET_HANDLE_OPTION
4920 #define TARGET_HANDLE_OPTION bfin_handle_option
4922 #undef TARGET_DEFAULT_TARGET_FLAGS
4923 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
4925 #undef TARGET_SECONDARY_RELOAD
4926 #define TARGET_SECONDARY_RELOAD bfin_secondary_reload
4928 #undef TARGET_DELEGITIMIZE_ADDRESS
4929 #define TARGET_DELEGITIMIZE_ADDRESS bfin_delegitimize_address
4931 #undef TARGET_CANNOT_FORCE_CONST_MEM
4932 #define TARGET_CANNOT_FORCE_CONST_MEM bfin_cannot_force_const_mem
4934 struct gcc_target targetm = TARGET_INITIALIZER;