1 /* The Blackfin code generation auxiliary output file.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
3 Contributed by Analog Devices.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "insn-codes.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
33 #include "insn-attr.h"
46 #include "target-def.h"
48 #include "diagnostic-core.h"
53 #include "dominance.h"
59 #include "cfgcleanup.h"
60 #include "basic-block.h"
63 #include "plugin-api.h"
66 #include "langhooks.h"
67 #include "bfin-protos.h"
70 #include "tm-constrs.h"
74 #include "sel-sched.h"
75 #include "hw-doloop.h"
80 /* A C structure for machine-specific, per-function data.
81 This is added to the cfun structure. */
82 struct GTY(()) machine_function
84 /* Set if we are notified by the doloop pass that a hardware loop
86 int has_hardware_loops
;
88 /* Set if we create a memcpy pattern that uses loop registers. */
89 int has_loopreg_clobber
;
92 /* RTX for condition code flag register and RETS register */
93 extern GTY(()) rtx bfin_cc_rtx
;
94 extern GTY(()) rtx bfin_rets_rtx
;
95 rtx bfin_cc_rtx
, bfin_rets_rtx
;
97 int max_arg_registers
= 0;
99 /* Arrays used when emitting register names. */
100 const char *short_reg_names
[] = SHORT_REGISTER_NAMES
;
101 const char *high_reg_names
[] = HIGH_REGISTER_NAMES
;
102 const char *dregs_pair_names
[] = DREGS_PAIR_NAMES
;
103 const char *byte_reg_names
[] = BYTE_REGISTER_NAMES
;
105 static int arg_regs
[] = FUNCTION_ARG_REGISTERS
;
106 static int ret_regs
[] = FUNCTION_RETURN_REGISTERS
;
108 int splitting_for_sched
, splitting_loops
;
111 bfin_globalize_label (FILE *stream
, const char *name
)
113 fputs (".global ", stream
);
114 assemble_name (stream
, name
);
120 output_file_start (void)
122 FILE *file
= asm_out_file
;
125 fprintf (file
, ".file \"%s\";\n", LOCATION_FILE (input_location
));
127 for (i
= 0; arg_regs
[i
] >= 0; i
++)
129 max_arg_registers
= i
; /* how many arg reg used */
132 /* Examine machine-dependent attributes of function type FUNTYPE and return its
133 type. See the definition of E_FUNKIND. */
136 funkind (const_tree funtype
)
138 tree attrs
= TYPE_ATTRIBUTES (funtype
);
139 if (lookup_attribute ("interrupt_handler", attrs
))
140 return INTERRUPT_HANDLER
;
141 else if (lookup_attribute ("exception_handler", attrs
))
142 return EXCPT_HANDLER
;
143 else if (lookup_attribute ("nmi_handler", attrs
))
149 /* Legitimize PIC addresses. If the address is already position-independent,
150 we return ORIG. Newly generated position-independent addresses go into a
151 reg. This is REG if nonzero, otherwise we allocate register(s) as
152 necessary. PICREG is the register holding the pointer to the PIC offset
156 legitimize_pic_address (rtx orig
, rtx reg
, rtx picreg
)
161 if (GET_CODE (addr
) == SYMBOL_REF
|| GET_CODE (addr
) == LABEL_REF
)
166 if (TARGET_ID_SHARED_LIBRARY
)
167 unspec
= UNSPEC_MOVE_PIC
;
168 else if (GET_CODE (addr
) == SYMBOL_REF
169 && SYMBOL_REF_FUNCTION_P (addr
))
170 unspec
= UNSPEC_FUNCDESC_GOT17M4
;
172 unspec
= UNSPEC_MOVE_FDPIC
;
176 gcc_assert (can_create_pseudo_p ());
177 reg
= gen_reg_rtx (Pmode
);
180 tmp
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), unspec
);
181 new_rtx
= gen_const_mem (Pmode
, gen_rtx_PLUS (Pmode
, picreg
, tmp
));
183 emit_move_insn (reg
, new_rtx
);
184 if (picreg
== pic_offset_table_rtx
)
185 crtl
->uses_pic_offset_table
= 1;
189 else if (GET_CODE (addr
) == CONST
|| GET_CODE (addr
) == PLUS
)
193 if (GET_CODE (addr
) == CONST
)
195 addr
= XEXP (addr
, 0);
196 gcc_assert (GET_CODE (addr
) == PLUS
);
199 if (XEXP (addr
, 0) == picreg
)
204 gcc_assert (can_create_pseudo_p ());
205 reg
= gen_reg_rtx (Pmode
);
208 base
= legitimize_pic_address (XEXP (addr
, 0), reg
, picreg
);
209 addr
= legitimize_pic_address (XEXP (addr
, 1),
210 base
== reg
? NULL_RTX
: reg
,
213 if (GET_CODE (addr
) == CONST_INT
)
215 gcc_assert (! reload_in_progress
&& ! reload_completed
);
216 addr
= force_reg (Pmode
, addr
);
219 if (GET_CODE (addr
) == PLUS
&& CONSTANT_P (XEXP (addr
, 1)))
221 base
= gen_rtx_PLUS (Pmode
, base
, XEXP (addr
, 0));
222 addr
= XEXP (addr
, 1);
225 return gen_rtx_PLUS (Pmode
, base
, addr
);
231 /* Stack frame layout. */
233 /* For a given REGNO, determine whether it must be saved in the function
234 prologue. IS_INTHANDLER specifies whether we're generating a normal
235 prologue or an interrupt/exception one. */
237 must_save_p (bool is_inthandler
, unsigned regno
)
239 if (D_REGNO_P (regno
))
241 bool is_eh_return_reg
= false;
242 if (crtl
->calls_eh_return
)
247 unsigned test
= EH_RETURN_DATA_REGNO (j
);
248 if (test
== INVALID_REGNUM
)
251 is_eh_return_reg
= true;
255 return (is_eh_return_reg
256 || (df_regs_ever_live_p (regno
)
257 && !fixed_regs
[regno
]
258 && (is_inthandler
|| !call_used_regs
[regno
])));
260 else if (P_REGNO_P (regno
))
262 return ((df_regs_ever_live_p (regno
)
263 && !fixed_regs
[regno
]
264 && (is_inthandler
|| !call_used_regs
[regno
]))
266 && (ENABLE_WA_05000283
|| ENABLE_WA_05000315
)
269 && regno
== PIC_OFFSET_TABLE_REGNUM
270 && (crtl
->uses_pic_offset_table
271 || (TARGET_ID_SHARED_LIBRARY
&& !crtl
->is_leaf
))));
274 return ((is_inthandler
|| !call_used_regs
[regno
])
275 && (df_regs_ever_live_p (regno
)
276 || (!leaf_function_p () && call_used_regs
[regno
])));
280 /* Compute the number of DREGS to save with a push_multiple operation.
281 This could include registers that aren't modified in the function,
282 since push_multiple only takes a range of registers.
283 If IS_INTHANDLER, then everything that is live must be saved, even
284 if normally call-clobbered.
285 If CONSECUTIVE, return the number of registers we can save in one
286 instruction with a push/pop multiple instruction. */
289 n_dregs_to_save (bool is_inthandler
, bool consecutive
)
294 for (i
= REG_R7
+ 1; i
-- != REG_R0
;)
296 if (must_save_p (is_inthandler
, i
))
298 else if (consecutive
)
304 /* Like n_dregs_to_save, but compute number of PREGS to save. */
307 n_pregs_to_save (bool is_inthandler
, bool consecutive
)
312 for (i
= REG_P5
+ 1; i
-- != REG_P0
;)
313 if (must_save_p (is_inthandler
, i
))
315 else if (consecutive
)
320 /* Determine if we are going to save the frame pointer in the prologue. */
323 must_save_fp_p (void)
325 return df_regs_ever_live_p (REG_FP
);
328 /* Determine if we are going to save the RETS register. */
330 must_save_rets_p (void)
332 return df_regs_ever_live_p (REG_RETS
);
336 stack_frame_needed_p (void)
338 /* EH return puts a new return address into the frame using an
339 address relative to the frame pointer. */
340 if (crtl
->calls_eh_return
)
342 return frame_pointer_needed
;
345 /* Emit code to save registers in the prologue. SAVEALL is nonzero if we
346 must save all registers; this is used for interrupt handlers.
347 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
348 this for an interrupt (or exception) handler. */
351 expand_prologue_reg_save (rtx spreg
, int saveall
, bool is_inthandler
)
353 rtx predec1
= gen_rtx_PRE_DEC (SImode
, spreg
);
354 rtx predec
= gen_rtx_MEM (SImode
, predec1
);
355 int ndregs
= saveall
? 8 : n_dregs_to_save (is_inthandler
, false);
356 int npregs
= saveall
? 6 : n_pregs_to_save (is_inthandler
, false);
357 int ndregs_consec
= saveall
? 8 : n_dregs_to_save (is_inthandler
, true);
358 int npregs_consec
= saveall
? 6 : n_pregs_to_save (is_inthandler
, true);
360 int total_consec
= ndregs_consec
+ npregs_consec
;
363 if (saveall
|| is_inthandler
)
365 rtx_insn
*insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, REG_ASTAT
));
367 RTX_FRAME_RELATED_P (insn
) = 1;
368 for (dregno
= REG_LT0
; dregno
<= REG_LB1
; dregno
++)
370 || cfun
->machine
->has_hardware_loops
371 || cfun
->machine
->has_loopreg_clobber
372 || (ENABLE_WA_05000257
373 && (dregno
== REG_LC0
|| dregno
== REG_LC1
)))
375 insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, dregno
));
376 RTX_FRAME_RELATED_P (insn
) = 1;
380 if (total_consec
!= 0)
383 rtx val
= GEN_INT (-total_consec
* 4);
384 rtx pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (total_consec
+ 2));
386 XVECEXP (pat
, 0, 0) = gen_rtx_UNSPEC (VOIDmode
, gen_rtvec (1, val
),
387 UNSPEC_PUSH_MULTIPLE
);
388 XVECEXP (pat
, 0, total_consec
+ 1) = gen_rtx_SET (VOIDmode
, spreg
,
392 RTX_FRAME_RELATED_P (XVECEXP (pat
, 0, total_consec
+ 1)) = 1;
393 d_to_save
= ndregs_consec
;
394 dregno
= REG_R7
+ 1 - ndregs_consec
;
395 pregno
= REG_P5
+ 1 - npregs_consec
;
396 for (i
= 0; i
< total_consec
; i
++)
398 rtx memref
= gen_rtx_MEM (word_mode
,
399 gen_rtx_PLUS (Pmode
, spreg
,
400 GEN_INT (- i
* 4 - 4)));
404 subpat
= gen_rtx_SET (VOIDmode
, memref
, gen_rtx_REG (word_mode
,
410 subpat
= gen_rtx_SET (VOIDmode
, memref
, gen_rtx_REG (word_mode
,
413 XVECEXP (pat
, 0, i
+ 1) = subpat
;
414 RTX_FRAME_RELATED_P (subpat
) = 1;
416 insn
= emit_insn (pat
);
417 RTX_FRAME_RELATED_P (insn
) = 1;
420 for (dregno
= REG_R0
; ndregs
!= ndregs_consec
; dregno
++)
422 if (must_save_p (is_inthandler
, dregno
))
425 emit_move_insn (predec
, gen_rtx_REG (word_mode
, dregno
));
426 RTX_FRAME_RELATED_P (insn
) = 1;
430 for (pregno
= REG_P0
; npregs
!= npregs_consec
; pregno
++)
432 if (must_save_p (is_inthandler
, pregno
))
435 emit_move_insn (predec
, gen_rtx_REG (word_mode
, pregno
));
436 RTX_FRAME_RELATED_P (insn
) = 1;
440 for (i
= REG_P7
+ 1; i
< REG_CC
; i
++)
443 && (df_regs_ever_live_p (i
)
444 || (!leaf_function_p () && call_used_regs
[i
]))))
447 if (i
== REG_A0
|| i
== REG_A1
)
448 insn
= emit_move_insn (gen_rtx_MEM (PDImode
, predec1
),
449 gen_rtx_REG (PDImode
, i
));
451 insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, i
));
452 RTX_FRAME_RELATED_P (insn
) = 1;
456 /* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
457 must save all registers; this is used for interrupt handlers.
458 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
459 this for an interrupt (or exception) handler. */
462 expand_epilogue_reg_restore (rtx spreg
, bool saveall
, bool is_inthandler
)
464 rtx postinc1
= gen_rtx_POST_INC (SImode
, spreg
);
465 rtx postinc
= gen_rtx_MEM (SImode
, postinc1
);
467 int ndregs
= saveall
? 8 : n_dregs_to_save (is_inthandler
, false);
468 int npregs
= saveall
? 6 : n_pregs_to_save (is_inthandler
, false);
469 int ndregs_consec
= saveall
? 8 : n_dregs_to_save (is_inthandler
, true);
470 int npregs_consec
= saveall
? 6 : n_pregs_to_save (is_inthandler
, true);
471 int total_consec
= ndregs_consec
+ npregs_consec
;
475 /* A slightly crude technique to stop flow from trying to delete "dead"
477 MEM_VOLATILE_P (postinc
) = 1;
479 for (i
= REG_CC
- 1; i
> REG_P7
; i
--)
482 && (df_regs_ever_live_p (i
)
483 || (!leaf_function_p () && call_used_regs
[i
]))))
485 if (i
== REG_A0
|| i
== REG_A1
)
487 rtx mem
= gen_rtx_MEM (PDImode
, postinc1
);
488 MEM_VOLATILE_P (mem
) = 1;
489 emit_move_insn (gen_rtx_REG (PDImode
, i
), mem
);
492 emit_move_insn (gen_rtx_REG (SImode
, i
), postinc
);
495 regno
= REG_P5
- npregs_consec
;
496 for (; npregs
!= npregs_consec
; regno
--)
498 if (must_save_p (is_inthandler
, regno
))
500 emit_move_insn (gen_rtx_REG (word_mode
, regno
), postinc
);
504 regno
= REG_R7
- ndregs_consec
;
505 for (; ndregs
!= ndregs_consec
; regno
--)
507 if (must_save_p (is_inthandler
, regno
))
509 emit_move_insn (gen_rtx_REG (word_mode
, regno
), postinc
);
514 if (total_consec
!= 0)
516 rtx pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (total_consec
+ 1));
518 = gen_rtx_SET (VOIDmode
, spreg
,
519 gen_rtx_PLUS (Pmode
, spreg
,
520 GEN_INT (total_consec
* 4)));
522 if (npregs_consec
> 0)
527 for (i
= 0; i
< total_consec
; i
++)
530 ? gen_rtx_PLUS (Pmode
, spreg
, GEN_INT (i
* 4))
532 rtx memref
= gen_rtx_MEM (word_mode
, addr
);
535 XVECEXP (pat
, 0, i
+ 1)
536 = gen_rtx_SET (VOIDmode
, gen_rtx_REG (word_mode
, regno
), memref
);
538 if (npregs_consec
> 0)
540 if (--npregs_consec
== 0)
545 insn
= emit_insn (pat
);
546 RTX_FRAME_RELATED_P (insn
) = 1;
548 if (saveall
|| is_inthandler
)
550 for (regno
= REG_LB1
; regno
>= REG_LT0
; regno
--)
552 || cfun
->machine
->has_hardware_loops
553 || cfun
->machine
->has_loopreg_clobber
554 || (ENABLE_WA_05000257
&& (regno
== REG_LC0
|| regno
== REG_LC1
)))
555 emit_move_insn (gen_rtx_REG (SImode
, regno
), postinc
);
557 emit_move_insn (gen_rtx_REG (SImode
, REG_ASTAT
), postinc
);
561 /* Perform any needed actions needed for a function that is receiving a
562 variable number of arguments.
566 MODE and TYPE are the mode and type of the current parameter.
568 PRETEND_SIZE is a variable that should be set to the amount of stack
569 that must be pushed by the prolog to pretend that our caller pushed
572 Normally, this macro will push all remaining incoming registers on the
573 stack and set PRETEND_SIZE to the length of the registers pushed.
576 - VDSP C compiler manual (our ABI) says that a variable args function
577 should save the R0, R1 and R2 registers in the stack.
578 - The caller will always leave space on the stack for the
579 arguments that are passed in registers, so we dont have
580 to leave any extra space.
581 - now, the vastart pointer can access all arguments from the stack. */
584 setup_incoming_varargs (cumulative_args_t cum
,
585 machine_mode mode ATTRIBUTE_UNUSED
,
586 tree type ATTRIBUTE_UNUSED
, int *pretend_size
,
595 /* The move for named arguments will be generated automatically by the
596 compiler. We need to generate the move rtx for the unnamed arguments
597 if they are in the first 3 words. We assume at least 1 named argument
598 exists, so we never generate [ARGP] = R0 here. */
600 for (i
= get_cumulative_args (cum
)->words
+ 1; i
< max_arg_registers
; i
++)
602 mem
= gen_rtx_MEM (Pmode
,
603 plus_constant (Pmode
, arg_pointer_rtx
,
604 (i
* UNITS_PER_WORD
)));
605 emit_move_insn (mem
, gen_rtx_REG (Pmode
, i
));
611 /* Value should be nonzero if functions must have frame pointers.
612 Zero means the frame pointer need not be set up (and parms may
613 be accessed via the stack pointer) in functions that seem suitable. */
616 bfin_frame_pointer_required (void)
618 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
620 if (fkind
!= SUBROUTINE
)
623 /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
624 so we have to override it for non-leaf functions. */
625 if (TARGET_OMIT_LEAF_FRAME_POINTER
&& ! crtl
->is_leaf
)
631 /* Return the number of registers pushed during the prologue. */
634 n_regs_saved_by_prologue (void)
636 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
637 bool is_inthandler
= fkind
!= SUBROUTINE
;
638 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
639 bool all
= (lookup_attribute ("saveall", attrs
) != NULL_TREE
640 || (is_inthandler
&& !crtl
->is_leaf
));
641 int ndregs
= all
? 8 : n_dregs_to_save (is_inthandler
, false);
642 int npregs
= all
? 6 : n_pregs_to_save (is_inthandler
, false);
643 int n
= ndregs
+ npregs
;
646 if (all
|| stack_frame_needed_p ())
650 if (must_save_fp_p ())
652 if (must_save_rets_p ())
656 if (fkind
!= SUBROUTINE
|| all
)
658 /* Increment once for ASTAT. */
661 || cfun
->machine
->has_hardware_loops
662 || cfun
->machine
->has_loopreg_clobber
)
668 if (fkind
!= SUBROUTINE
)
671 if (lookup_attribute ("nesting", attrs
))
675 for (i
= REG_P7
+ 1; i
< REG_CC
; i
++)
677 || (fkind
!= SUBROUTINE
678 && (df_regs_ever_live_p (i
)
679 || (!leaf_function_p () && call_used_regs
[i
]))))
680 n
+= i
== REG_A0
|| i
== REG_A1
? 2 : 1;
685 /* Given FROM and TO register numbers, say whether this elimination is
686 allowed. Frame pointer elimination is automatically handled.
688 All other eliminations are valid. */
691 bfin_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
693 return (to
== STACK_POINTER_REGNUM
? ! frame_pointer_needed
: true);
696 /* Return the offset between two registers, one to be eliminated, and the other
697 its replacement, at the start of a routine. */
700 bfin_initial_elimination_offset (int from
, int to
)
702 HOST_WIDE_INT offset
= 0;
704 if (from
== ARG_POINTER_REGNUM
)
705 offset
= n_regs_saved_by_prologue () * 4;
707 if (to
== STACK_POINTER_REGNUM
)
709 if (crtl
->outgoing_args_size
>= FIXED_STACK_AREA
)
710 offset
+= crtl
->outgoing_args_size
;
711 else if (crtl
->outgoing_args_size
)
712 offset
+= FIXED_STACK_AREA
;
714 offset
+= get_frame_size ();
720 /* Emit code to load a constant CONSTANT into register REG; setting
721 RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
722 Make sure that the insns we generate need not be split. */
725 frame_related_constant_load (rtx reg
, HOST_WIDE_INT constant
, bool related
)
728 rtx cst
= GEN_INT (constant
);
730 if (constant
>= -32768 && constant
< 65536)
731 insn
= emit_move_insn (reg
, cst
);
734 /* We don't call split_load_immediate here, since dwarf2out.c can get
735 confused about some of the more clever sequences it can generate. */
736 insn
= emit_insn (gen_movsi_high (reg
, cst
));
738 RTX_FRAME_RELATED_P (insn
) = 1;
739 insn
= emit_insn (gen_movsi_low (reg
, reg
, cst
));
742 RTX_FRAME_RELATED_P (insn
) = 1;
745 /* Generate efficient code to add a value to a P register.
746 Set RTX_FRAME_RELATED_P on the generated insns if FRAME is nonzero.
747 EPILOGUE_P is zero if this function is called for prologue,
748 otherwise it's nonzero. And it's less than zero if this is for
752 add_to_reg (rtx reg
, HOST_WIDE_INT value
, int frame
, int epilogue_p
)
757 /* Choose whether to use a sequence using a temporary register, or
758 a sequence with multiple adds. We can add a signed 7-bit value
759 in one instruction. */
760 if (value
> 120 || value
< -120)
768 /* For prologue or normal epilogue, P1 can be safely used
769 as the temporary register. For sibcall epilogue, we try to find
770 a call used P register, which will be restored in epilogue.
771 If we cannot find such a P register, we have to use one I register
775 tmpreg
= gen_rtx_REG (SImode
, REG_P1
);
779 for (i
= REG_P0
; i
<= REG_P5
; i
++)
780 if ((df_regs_ever_live_p (i
) && ! call_used_regs
[i
])
782 && i
== PIC_OFFSET_TABLE_REGNUM
783 && (crtl
->uses_pic_offset_table
784 || (TARGET_ID_SHARED_LIBRARY
785 && ! crtl
->is_leaf
))))
788 tmpreg
= gen_rtx_REG (SImode
, i
);
791 tmpreg
= gen_rtx_REG (SImode
, REG_P1
);
792 tmpreg2
= gen_rtx_REG (SImode
, REG_I0
);
793 emit_move_insn (tmpreg2
, tmpreg
);
798 frame_related_constant_load (tmpreg
, value
, TRUE
);
800 insn
= emit_move_insn (tmpreg
, GEN_INT (value
));
802 insn
= emit_insn (gen_addsi3 (reg
, reg
, tmpreg
));
804 RTX_FRAME_RELATED_P (insn
) = 1;
806 if (tmpreg2
!= NULL_RTX
)
807 emit_move_insn (tmpreg
, tmpreg2
);
818 /* We could use -62, but that would leave the stack unaligned, so
822 insn
= emit_insn (gen_addsi3 (reg
, reg
, GEN_INT (size
)));
824 RTX_FRAME_RELATED_P (insn
) = 1;
830 /* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
831 is too large, generate a sequence of insns that has the same effect.
832 SPREG contains (reg:SI REG_SP). */
835 emit_link_insn (rtx spreg
, HOST_WIDE_INT frame_size
)
837 HOST_WIDE_INT link_size
= frame_size
;
841 if (link_size
> 262140)
844 /* Use a LINK insn with as big a constant as possible, then subtract
845 any remaining size from the SP. */
846 insn
= emit_insn (gen_link (GEN_INT (-8 - link_size
)));
847 RTX_FRAME_RELATED_P (insn
) = 1;
849 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
851 rtx set
= XVECEXP (PATTERN (insn
), 0, i
);
852 gcc_assert (GET_CODE (set
) == SET
);
853 RTX_FRAME_RELATED_P (set
) = 1;
856 frame_size
-= link_size
;
860 /* Must use a call-clobbered PREG that isn't the static chain. */
861 rtx tmpreg
= gen_rtx_REG (Pmode
, REG_P1
);
863 frame_related_constant_load (tmpreg
, -frame_size
, TRUE
);
864 insn
= emit_insn (gen_addsi3 (spreg
, spreg
, tmpreg
));
865 RTX_FRAME_RELATED_P (insn
) = 1;
869 /* Return the number of bytes we must reserve for outgoing arguments
870 in the current function's stack frame. */
875 if (crtl
->outgoing_args_size
)
877 if (crtl
->outgoing_args_size
>= FIXED_STACK_AREA
)
878 return crtl
->outgoing_args_size
;
880 return FIXED_STACK_AREA
;
885 /* Save RETS and FP, and allocate a stack frame. ALL is true if the
886 function must save all its registers (true only for certain interrupt
890 do_link (rtx spreg
, HOST_WIDE_INT frame_size
, bool all
)
892 frame_size
+= arg_area_size ();
895 || stack_frame_needed_p ()
896 || (must_save_rets_p () && must_save_fp_p ()))
897 emit_link_insn (spreg
, frame_size
);
900 if (must_save_rets_p ())
902 rtx pat
= gen_movsi (gen_rtx_MEM (Pmode
,
903 gen_rtx_PRE_DEC (Pmode
, spreg
)),
905 rtx_insn
*insn
= emit_insn (pat
);
906 RTX_FRAME_RELATED_P (insn
) = 1;
908 if (must_save_fp_p ())
910 rtx pat
= gen_movsi (gen_rtx_MEM (Pmode
,
911 gen_rtx_PRE_DEC (Pmode
, spreg
)),
912 gen_rtx_REG (Pmode
, REG_FP
));
913 rtx_insn
*insn
= emit_insn (pat
);
914 RTX_FRAME_RELATED_P (insn
) = 1;
916 add_to_reg (spreg
, -frame_size
, 1, 0);
920 /* Like do_link, but used for epilogues to deallocate the stack frame.
921 EPILOGUE_P is zero if this function is called for prologue,
922 otherwise it's nonzero. And it's less than zero if this is for
926 do_unlink (rtx spreg
, HOST_WIDE_INT frame_size
, bool all
, int epilogue_p
)
928 frame_size
+= arg_area_size ();
930 if (stack_frame_needed_p ())
931 emit_insn (gen_unlink ());
934 rtx postinc
= gen_rtx_MEM (Pmode
, gen_rtx_POST_INC (Pmode
, spreg
));
936 add_to_reg (spreg
, frame_size
, 0, epilogue_p
);
937 if (all
|| must_save_fp_p ())
939 rtx fpreg
= gen_rtx_REG (Pmode
, REG_FP
);
940 emit_move_insn (fpreg
, postinc
);
943 if (all
|| must_save_rets_p ())
945 emit_move_insn (bfin_rets_rtx
, postinc
);
946 emit_use (bfin_rets_rtx
);
951 /* Generate a prologue suitable for a function of kind FKIND. This is
952 called for interrupt and exception handler prologues.
953 SPREG contains (reg:SI REG_SP). */
956 expand_interrupt_handler_prologue (rtx spreg
, e_funkind fkind
, bool all
)
958 HOST_WIDE_INT frame_size
= get_frame_size ();
959 rtx predec1
= gen_rtx_PRE_DEC (SImode
, spreg
);
960 rtx predec
= gen_rtx_MEM (SImode
, predec1
);
962 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
963 tree kspisusp
= lookup_attribute ("kspisusp", attrs
);
967 insn
= emit_move_insn (spreg
, gen_rtx_REG (Pmode
, REG_USP
));
968 RTX_FRAME_RELATED_P (insn
) = 1;
971 /* We need space on the stack in case we need to save the argument
973 if (fkind
== EXCPT_HANDLER
)
975 insn
= emit_insn (gen_addsi3 (spreg
, spreg
, GEN_INT (-12)));
976 RTX_FRAME_RELATED_P (insn
) = 1;
979 /* If we're calling other functions, they won't save their call-clobbered
980 registers, so we must save everything here. */
983 expand_prologue_reg_save (spreg
, all
, true);
985 if (ENABLE_WA_05000283
|| ENABLE_WA_05000315
)
987 rtx chipid
= GEN_INT (trunc_int_for_mode (0xFFC00014, SImode
));
988 rtx p5reg
= gen_rtx_REG (Pmode
, REG_P5
);
989 emit_insn (gen_movbi (bfin_cc_rtx
, const1_rtx
));
990 emit_insn (gen_movsi_high (p5reg
, chipid
));
991 emit_insn (gen_movsi_low (p5reg
, p5reg
, chipid
));
992 emit_insn (gen_dummy_load (p5reg
, bfin_cc_rtx
));
995 if (lookup_attribute ("nesting", attrs
))
997 rtx srcreg
= gen_rtx_REG (Pmode
, ret_regs
[fkind
]);
998 insn
= emit_move_insn (predec
, srcreg
);
999 RTX_FRAME_RELATED_P (insn
) = 1;
1002 do_link (spreg
, frame_size
, all
);
1004 if (fkind
== EXCPT_HANDLER
)
1006 rtx r0reg
= gen_rtx_REG (SImode
, REG_R0
);
1007 rtx r1reg
= gen_rtx_REG (SImode
, REG_R1
);
1008 rtx r2reg
= gen_rtx_REG (SImode
, REG_R2
);
1010 emit_move_insn (r0reg
, gen_rtx_REG (SImode
, REG_SEQSTAT
));
1011 emit_insn (gen_ashrsi3 (r0reg
, r0reg
, GEN_INT (26)));
1012 emit_insn (gen_ashlsi3 (r0reg
, r0reg
, GEN_INT (26)));
1013 emit_move_insn (r1reg
, spreg
);
1014 emit_move_insn (r2reg
, gen_rtx_REG (Pmode
, REG_FP
));
1015 emit_insn (gen_addsi3 (r2reg
, r2reg
, GEN_INT (8)));
1019 /* Generate an epilogue suitable for a function of kind FKIND. This is
1020 called for interrupt and exception handler epilogues.
1021 SPREG contains (reg:SI REG_SP). */
1024 expand_interrupt_handler_epilogue (rtx spreg
, e_funkind fkind
, bool all
)
1026 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
1027 rtx postinc1
= gen_rtx_POST_INC (SImode
, spreg
);
1028 rtx postinc
= gen_rtx_MEM (SImode
, postinc1
);
1030 /* A slightly crude technique to stop flow from trying to delete "dead"
1032 MEM_VOLATILE_P (postinc
) = 1;
1034 do_unlink (spreg
, get_frame_size (), all
, 1);
1036 if (lookup_attribute ("nesting", attrs
))
1038 rtx srcreg
= gen_rtx_REG (Pmode
, ret_regs
[fkind
]);
1039 emit_move_insn (srcreg
, postinc
);
1042 /* If we're calling other functions, they won't save their call-clobbered
1043 registers, so we must save (and restore) everything here. */
1047 expand_epilogue_reg_restore (spreg
, all
, true);
1049 /* Deallocate any space we left on the stack in case we needed to save the
1050 argument registers. */
1051 if (fkind
== EXCPT_HANDLER
)
1052 emit_insn (gen_addsi3 (spreg
, spreg
, GEN_INT (12)));
1054 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode
, ret_regs
[fkind
])));
1057 /* Used while emitting the prologue to generate code to load the correct value
1058 into the PIC register, which is passed in DEST. */
1061 bfin_load_pic_reg (rtx dest
)
1063 struct cgraph_local_info
*i
= NULL
;
1066 i
= cgraph_node::local_info (current_function_decl
);
1068 /* Functions local to the translation unit don't need to reload the
1069 pic reg, since the caller always passes a usable one. */
1071 return pic_offset_table_rtx
;
1073 if (global_options_set
.x_bfin_library_id
)
1074 addr
= plus_constant (Pmode
, pic_offset_table_rtx
,
1075 -4 - bfin_library_id
* 4);
1077 addr
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
,
1078 gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
),
1079 UNSPEC_LIBRARY_OFFSET
));
1080 emit_insn (gen_movsi (dest
, gen_rtx_MEM (Pmode
, addr
)));
1084 /* Generate RTL for the prologue of the current function. */
1087 bfin_expand_prologue (void)
1089 HOST_WIDE_INT frame_size
= get_frame_size ();
1090 rtx spreg
= gen_rtx_REG (Pmode
, REG_SP
);
1091 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
1092 rtx pic_reg_loaded
= NULL_RTX
;
1093 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
1094 bool all
= lookup_attribute ("saveall", attrs
) != NULL_TREE
;
1096 if (fkind
!= SUBROUTINE
)
1098 expand_interrupt_handler_prologue (spreg
, fkind
, all
);
1102 if (crtl
->limit_stack
1103 || (TARGET_STACK_CHECK_L1
1104 && !DECL_NO_LIMIT_STACK (current_function_decl
)))
1106 HOST_WIDE_INT offset
1107 = bfin_initial_elimination_offset (ARG_POINTER_REGNUM
,
1108 STACK_POINTER_REGNUM
);
1109 rtx lim
= crtl
->limit_stack
? stack_limit_rtx
: NULL_RTX
;
1110 rtx tmp
= gen_rtx_REG (Pmode
, REG_R3
);
1111 rtx p2reg
= gen_rtx_REG (Pmode
, REG_P2
);
1113 emit_move_insn (tmp
, p2reg
);
1116 emit_move_insn (p2reg
, gen_int_mode (0xFFB00000, SImode
));
1117 emit_move_insn (p2reg
, gen_rtx_MEM (Pmode
, p2reg
));
1120 if (GET_CODE (lim
) == SYMBOL_REF
)
1122 if (TARGET_ID_SHARED_LIBRARY
)
1124 rtx p1reg
= gen_rtx_REG (Pmode
, REG_P1
);
1126 pic_reg_loaded
= bfin_load_pic_reg (p2reg
);
1127 val
= legitimize_pic_address (stack_limit_rtx
, p1reg
,
1129 emit_move_insn (p1reg
, val
);
1130 frame_related_constant_load (p2reg
, offset
, FALSE
);
1131 emit_insn (gen_addsi3 (p2reg
, p2reg
, p1reg
));
1136 rtx limit
= plus_constant (Pmode
, lim
, offset
);
1137 emit_move_insn (p2reg
, limit
);
1144 emit_move_insn (p2reg
, lim
);
1145 add_to_reg (p2reg
, offset
, 0, 0);
1148 emit_insn (gen_compare_lt (bfin_cc_rtx
, spreg
, lim
));
1149 emit_insn (gen_trapifcc ());
1150 emit_move_insn (p2reg
, tmp
);
1152 expand_prologue_reg_save (spreg
, all
, false);
1154 do_link (spreg
, frame_size
, all
);
1156 if (TARGET_ID_SHARED_LIBRARY
1158 && (crtl
->uses_pic_offset_table
1160 bfin_load_pic_reg (pic_offset_table_rtx
);
1163 /* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
1164 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
1165 eh_return pattern. SIBCALL_P is true if this is a sibcall epilogue,
1169 bfin_expand_epilogue (int need_return
, int eh_return
, bool sibcall_p
)
1171 rtx spreg
= gen_rtx_REG (Pmode
, REG_SP
);
1172 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
1173 int e
= sibcall_p
? -1 : 1;
1174 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
1175 bool all
= lookup_attribute ("saveall", attrs
) != NULL_TREE
;
1177 if (fkind
!= SUBROUTINE
)
1179 expand_interrupt_handler_epilogue (spreg
, fkind
, all
);
1183 do_unlink (spreg
, get_frame_size (), all
, e
);
1185 expand_epilogue_reg_restore (spreg
, all
, false);
1187 /* Omit the return insn if this is for a sibcall. */
1192 emit_insn (gen_addsi3 (spreg
, spreg
, gen_rtx_REG (Pmode
, REG_P2
)));
1194 emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode
, REG_RETS
)));
1197 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
1200 bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED
,
1201 unsigned int new_reg
)
1203 /* Interrupt functions can only use registers that have already been
1204 saved by the prologue, even if they would normally be
1207 if (funkind (TREE_TYPE (current_function_decl
)) != SUBROUTINE
1208 && !df_regs_ever_live_p (new_reg
))
1214 /* Implement TARGET_EXTRA_LIVE_ON_ENTRY. */
1216 bfin_extra_live_on_entry (bitmap regs
)
1219 bitmap_set_bit (regs
, FDPIC_REGNO
);
1222 /* Return the value of the return address for the frame COUNT steps up
1223 from the current frame, after the prologue.
1224 We punt for everything but the current frame by returning const0_rtx. */
1227 bfin_return_addr_rtx (int count
)
1232 return get_hard_reg_initial_val (Pmode
, REG_RETS
);
1236 bfin_delegitimize_address (rtx orig_x
)
1240 if (GET_CODE (x
) != MEM
)
1244 if (GET_CODE (x
) == PLUS
1245 && GET_CODE (XEXP (x
, 1)) == UNSPEC
1246 && XINT (XEXP (x
, 1), 1) == UNSPEC_MOVE_PIC
1247 && GET_CODE (XEXP (x
, 0)) == REG
1248 && REGNO (XEXP (x
, 0)) == PIC_OFFSET_TABLE_REGNUM
)
1249 return XVECEXP (XEXP (x
, 1), 0, 0);
1254 /* This predicate is used to compute the length of a load/store insn.
1255 OP is a MEM rtx, we return nonzero if its addressing mode requires a
1256 32-bit instruction. */
1259 effective_address_32bit_p (rtx op
, machine_mode mode
)
1261 HOST_WIDE_INT offset
;
1263 mode
= GET_MODE (op
);
1266 if (GET_CODE (op
) != PLUS
)
1268 gcc_assert (REG_P (op
) || GET_CODE (op
) == POST_INC
1269 || GET_CODE (op
) == PRE_DEC
|| GET_CODE (op
) == POST_DEC
);
1273 if (GET_CODE (XEXP (op
, 1)) == UNSPEC
)
1276 offset
= INTVAL (XEXP (op
, 1));
1278 /* All byte loads use a 16-bit offset. */
1279 if (GET_MODE_SIZE (mode
) == 1)
1282 if (GET_MODE_SIZE (mode
) == 4)
1284 /* Frame pointer relative loads can use a negative offset, all others
1285 are restricted to a small positive one. */
1286 if (XEXP (op
, 0) == frame_pointer_rtx
)
1287 return offset
< -128 || offset
> 60;
1288 return offset
< 0 || offset
> 60;
1291 /* Must be HImode now. */
1292 return offset
< 0 || offset
> 30;
1295 /* Returns true if X is a memory reference using an I register. */
1297 bfin_dsp_memref_p (rtx x
)
1302 if (GET_CODE (x
) == POST_INC
|| GET_CODE (x
) == PRE_INC
1303 || GET_CODE (x
) == POST_DEC
|| GET_CODE (x
) == PRE_DEC
)
1308 /* Return cost of the memory address ADDR.
1309 All addressing modes are equally cheap on the Blackfin. */
1312 bfin_address_cost (rtx addr ATTRIBUTE_UNUSED
,
1313 machine_mode mode ATTRIBUTE_UNUSED
,
1314 addr_space_t as ATTRIBUTE_UNUSED
,
1315 bool speed ATTRIBUTE_UNUSED
)
1320 /* Subroutine of print_operand; used to print a memory reference X to FILE. */
1323 print_address_operand (FILE *file
, rtx x
)
1325 switch (GET_CODE (x
))
1328 output_address (XEXP (x
, 0));
1329 fprintf (file
, "+");
1330 output_address (XEXP (x
, 1));
1334 fprintf (file
, "--");
1335 output_address (XEXP (x
, 0));
1338 output_address (XEXP (x
, 0));
1339 fprintf (file
, "++");
1342 output_address (XEXP (x
, 0));
1343 fprintf (file
, "--");
1347 gcc_assert (GET_CODE (x
) != MEM
);
1348 print_operand (file
, x
, 0);
1353 /* Adding intp DImode support by Tony
1359 print_operand (FILE *file
, rtx x
, char code
)
1365 if (GET_MODE (current_output_insn
) == SImode
)
1366 fprintf (file
, " ||");
1368 fprintf (file
, ";");
1372 mode
= GET_MODE (x
);
1377 switch (GET_CODE (x
))
1380 fprintf (file
, "e");
1383 fprintf (file
, "ne");
1386 fprintf (file
, "g");
1389 fprintf (file
, "l");
1392 fprintf (file
, "ge");
1395 fprintf (file
, "le");
1398 fprintf (file
, "g");
1401 fprintf (file
, "l");
1404 fprintf (file
, "ge");
1407 fprintf (file
, "le");
1410 output_operand_lossage ("invalid %%j value");
1414 case 'J': /* reverse logic */
1415 switch (GET_CODE(x
))
1418 fprintf (file
, "ne");
1421 fprintf (file
, "e");
1424 fprintf (file
, "le");
1427 fprintf (file
, "ge");
1430 fprintf (file
, "l");
1433 fprintf (file
, "g");
1436 fprintf (file
, "le");
1439 fprintf (file
, "ge");
1442 fprintf (file
, "l");
1445 fprintf (file
, "g");
1448 output_operand_lossage ("invalid %%J value");
1453 switch (GET_CODE (x
))
1459 fprintf (file
, "%s", short_reg_names
[REGNO (x
)]);
1461 output_operand_lossage ("invalid operand for code '%c'", code
);
1463 else if (code
== 'd')
1466 fprintf (file
, "%s", high_reg_names
[REGNO (x
)]);
1468 output_operand_lossage ("invalid operand for code '%c'", code
);
1470 else if (code
== 'w')
1472 if (REGNO (x
) == REG_A0
|| REGNO (x
) == REG_A1
)
1473 fprintf (file
, "%s.w", reg_names
[REGNO (x
)]);
1475 output_operand_lossage ("invalid operand for code '%c'", code
);
1477 else if (code
== 'x')
1479 if (REGNO (x
) == REG_A0
|| REGNO (x
) == REG_A1
)
1480 fprintf (file
, "%s.x", reg_names
[REGNO (x
)]);
1482 output_operand_lossage ("invalid operand for code '%c'", code
);
1484 else if (code
== 'v')
1486 if (REGNO (x
) == REG_A0
)
1487 fprintf (file
, "AV0");
1488 else if (REGNO (x
) == REG_A1
)
1489 fprintf (file
, "AV1");
1491 output_operand_lossage ("invalid operand for code '%c'", code
);
1493 else if (code
== 'D')
1495 if (D_REGNO_P (REGNO (x
)))
1496 fprintf (file
, "%s", dregs_pair_names
[REGNO (x
)]);
1498 output_operand_lossage ("invalid operand for code '%c'", code
);
1500 else if (code
== 'H')
1502 if ((mode
== DImode
|| mode
== DFmode
) && REG_P (x
))
1503 fprintf (file
, "%s", reg_names
[REGNO (x
) + 1]);
1505 output_operand_lossage ("invalid operand for code '%c'", code
);
1507 else if (code
== 'T')
1509 if (D_REGNO_P (REGNO (x
)))
1510 fprintf (file
, "%s", byte_reg_names
[REGNO (x
)]);
1512 output_operand_lossage ("invalid operand for code '%c'", code
);
1515 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
1521 print_address_operand (file
, x
);
1533 fputs ("(FU)", file
);
1536 fputs ("(T)", file
);
1539 fputs ("(TFU)", file
);
1542 fputs ("(W32)", file
);
1545 fputs ("(IS)", file
);
1548 fputs ("(IU)", file
);
1551 fputs ("(IH)", file
);
1554 fputs ("(M)", file
);
1557 fputs ("(IS,M)", file
);
1560 fputs ("(ISS2)", file
);
1563 fputs ("(S2RND)", file
);
1570 else if (code
== 'b')
1572 if (INTVAL (x
) == 0)
1574 else if (INTVAL (x
) == 1)
1580 /* Moves to half registers with d or h modifiers always use unsigned
1582 else if (code
== 'd')
1583 x
= GEN_INT ((INTVAL (x
) >> 16) & 0xffff);
1584 else if (code
== 'h')
1585 x
= GEN_INT (INTVAL (x
) & 0xffff);
1586 else if (code
== 'N')
1587 x
= GEN_INT (-INTVAL (x
));
1588 else if (code
== 'X')
1589 x
= GEN_INT (exact_log2 (0xffffffff & INTVAL (x
)));
1590 else if (code
== 'Y')
1591 x
= GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x
)));
1592 else if (code
== 'Z')
1593 /* Used for LINK insns. */
1594 x
= GEN_INT (-8 - INTVAL (x
));
1599 output_addr_const (file
, x
);
1603 output_operand_lossage ("invalid const_double operand");
1607 switch (XINT (x
, 1))
1609 case UNSPEC_MOVE_PIC
:
1610 output_addr_const (file
, XVECEXP (x
, 0, 0));
1611 fprintf (file
, "@GOT");
1614 case UNSPEC_MOVE_FDPIC
:
1615 output_addr_const (file
, XVECEXP (x
, 0, 0));
1616 fprintf (file
, "@GOT17M4");
1619 case UNSPEC_FUNCDESC_GOT17M4
:
1620 output_addr_const (file
, XVECEXP (x
, 0, 0));
1621 fprintf (file
, "@FUNCDESC_GOT17M4");
1624 case UNSPEC_LIBRARY_OFFSET
:
1625 fprintf (file
, "_current_shared_library_p5_offset_");
1634 output_addr_const (file
, x
);
1639 /* Argument support functions. */
1641 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1642 for a call to a function whose data type is FNTYPE.
1643 For a library call, FNTYPE is 0.
1644 VDSP C Compiler manual, our ABI says that
1645 first 3 words of arguments will use R0, R1 and R2.
1649 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
,
1650 rtx libname ATTRIBUTE_UNUSED
)
1652 static CUMULATIVE_ARGS zero_cum
;
1656 /* Set up the number of registers to use for passing arguments. */
1658 cum
->nregs
= max_arg_registers
;
1659 cum
->arg_regs
= arg_regs
;
1661 cum
->call_cookie
= CALL_NORMAL
;
1662 /* Check for a longcall attribute. */
1663 if (fntype
&& lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype
)))
1664 cum
->call_cookie
|= CALL_SHORT
;
1665 else if (fntype
&& lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype
)))
1666 cum
->call_cookie
|= CALL_LONG
;
1671 /* Update the data in CUM to advance over an argument
1672 of mode MODE and data type TYPE.
1673 (TYPE is null for libcalls where that information may not be available.) */
1676 bfin_function_arg_advance (cumulative_args_t cum_v
, machine_mode mode
,
1677 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1679 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1680 int count
, bytes
, words
;
1682 bytes
= (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1683 words
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
1685 cum
->words
+= words
;
1686 cum
->nregs
-= words
;
1688 if (cum
->nregs
<= 0)
1691 cum
->arg_regs
= NULL
;
1695 for (count
= 1; count
<= words
; count
++)
1702 /* Define where to put the arguments to a function.
1703 Value is zero to push the argument on the stack,
1704 or a hard register in which to store the argument.
1706 MODE is the argument's machine mode.
1707 TYPE is the data type of the argument (as a tree).
1708 This is null for libcalls where that information may
1710 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1711 the preceding args and about the function being called.
1712 NAMED is nonzero if this argument is a named parameter
1713 (otherwise it is an extra parameter matching an ellipsis). */
1716 bfin_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
1717 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1719 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1721 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1723 if (mode
== VOIDmode
)
1724 /* Compute operand 2 of the call insn. */
1725 return GEN_INT (cum
->call_cookie
);
1731 return gen_rtx_REG (mode
, *(cum
->arg_regs
));
1736 /* For an arg passed partly in registers and partly in memory,
1737 this is the number of bytes passed in registers.
1738 For args passed entirely in registers or entirely in memory, zero.
1740 Refer VDSP C Compiler manual, our ABI.
1741 First 3 words are in registers. So, if an argument is larger
1742 than the registers available, it will span the register and
1746 bfin_arg_partial_bytes (cumulative_args_t cum
, machine_mode mode
,
1747 tree type ATTRIBUTE_UNUSED
,
1748 bool named ATTRIBUTE_UNUSED
)
1751 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1752 int bytes_left
= get_cumulative_args (cum
)->nregs
* UNITS_PER_WORD
;
1757 if (bytes_left
== 0)
1759 if (bytes
> bytes_left
)
1764 /* Variable sized types are passed by reference. */
1767 bfin_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED
,
1768 machine_mode mode ATTRIBUTE_UNUSED
,
1769 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1771 return type
&& TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
;
1774 /* Decide whether a type should be returned in memory (true)
1775 or in a register (false). This is called by the macro
1776 TARGET_RETURN_IN_MEMORY. */
1779 bfin_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
1781 int size
= int_size_in_bytes (type
);
1782 return size
> 2 * UNITS_PER_WORD
|| size
== -1;
1785 /* Register in which address to store a structure value
1786 is passed to a function. */
1788 bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED
,
1789 int incoming ATTRIBUTE_UNUSED
)
1791 return gen_rtx_REG (Pmode
, REG_P0
);
1794 /* Return true when register may be used to pass function parameters. */
1797 function_arg_regno_p (int n
)
1800 for (i
= 0; arg_regs
[i
] != -1; i
++)
1801 if (n
== arg_regs
[i
])
1806 /* Returns 1 if OP contains a symbol reference */
1809 symbolic_reference_mentioned_p (rtx op
)
1811 register const char *fmt
;
1814 if (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
)
1817 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
1818 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
1824 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
1825 if (symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
1829 else if (fmt
[i
] == 'e' && symbolic_reference_mentioned_p (XEXP (op
, i
)))
1836 /* Decide whether we can make a sibling call to a function. DECL is the
1837 declaration of the function being targeted by the call and EXP is the
1838 CALL_EXPR representing the call. */
1841 bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED
,
1842 tree exp ATTRIBUTE_UNUSED
)
1844 struct cgraph_local_info
*this_func
, *called_func
;
1845 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
1846 if (fkind
!= SUBROUTINE
)
1848 if (!TARGET_ID_SHARED_LIBRARY
|| TARGET_SEP_DATA
)
1851 /* When compiling for ID shared libraries, can't sibcall a local function
1852 from a non-local function, because the local function thinks it does
1853 not need to reload P5 in the prologue, but the sibcall wil pop P5 in the
1854 sibcall epilogue, and we end up with the wrong value in P5. */
1857 /* Not enough information. */
1860 this_func
= cgraph_node::local_info (current_function_decl
);
1861 called_func
= cgraph_node::local_info (decl
);
1864 return !called_func
->local
|| this_func
->local
;
1867 /* Write a template for a trampoline to F. */
1870 bfin_asm_trampoline_template (FILE *f
)
1874 fprintf (f
, "\t.dd\t0x00000000\n"); /* 0 */
1875 fprintf (f
, "\t.dd\t0x00000000\n"); /* 0 */
1876 fprintf (f
, "\t.dd\t0x0000e109\n"); /* p1.l = fn low */
1877 fprintf (f
, "\t.dd\t0x0000e149\n"); /* p1.h = fn high */
1878 fprintf (f
, "\t.dd\t0x0000e10a\n"); /* p2.l = sc low */
1879 fprintf (f
, "\t.dd\t0x0000e14a\n"); /* p2.h = sc high */
1880 fprintf (f
, "\t.dw\t0xac4b\n"); /* p3 = [p1 + 4] */
1881 fprintf (f
, "\t.dw\t0x9149\n"); /* p1 = [p1] */
1882 fprintf (f
, "\t.dw\t0x0051\n"); /* jump (p1)*/
1886 fprintf (f
, "\t.dd\t0x0000e109\n"); /* p1.l = fn low */
1887 fprintf (f
, "\t.dd\t0x0000e149\n"); /* p1.h = fn high */
1888 fprintf (f
, "\t.dd\t0x0000e10a\n"); /* p2.l = sc low */
1889 fprintf (f
, "\t.dd\t0x0000e14a\n"); /* p2.h = sc high */
1890 fprintf (f
, "\t.dw\t0x0051\n"); /* jump (p1)*/
1894 /* Emit RTL insns to initialize the variable parts of a trampoline at
1895 M_TRAMP. FNDECL is the target function. CHAIN_VALUE is an RTX for
1896 the static chain value for the function. */
1899 bfin_trampoline_init (rtx m_tramp
, tree fndecl
, rtx chain_value
)
1901 rtx t1
= copy_to_reg (XEXP (DECL_RTL (fndecl
), 0));
1902 rtx t2
= copy_to_reg (chain_value
);
1906 emit_block_move (m_tramp
, assemble_trampoline_template (),
1907 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
1911 rtx a
= force_reg (Pmode
, plus_constant (Pmode
, XEXP (m_tramp
, 0), 8));
1912 mem
= adjust_address (m_tramp
, Pmode
, 0);
1913 emit_move_insn (mem
, a
);
1917 mem
= adjust_address (m_tramp
, HImode
, i
+ 2);
1918 emit_move_insn (mem
, gen_lowpart (HImode
, t1
));
1919 emit_insn (gen_ashrsi3 (t1
, t1
, GEN_INT (16)));
1920 mem
= adjust_address (m_tramp
, HImode
, i
+ 6);
1921 emit_move_insn (mem
, gen_lowpart (HImode
, t1
));
1923 mem
= adjust_address (m_tramp
, HImode
, i
+ 10);
1924 emit_move_insn (mem
, gen_lowpart (HImode
, t2
));
1925 emit_insn (gen_ashrsi3 (t2
, t2
, GEN_INT (16)));
1926 mem
= adjust_address (m_tramp
, HImode
, i
+ 14);
1927 emit_move_insn (mem
, gen_lowpart (HImode
, t2
));
1930 /* Emit insns to move operands[1] into operands[0]. */
1933 emit_pic_move (rtx
*operands
, machine_mode mode ATTRIBUTE_UNUSED
)
1935 rtx temp
= reload_in_progress
? operands
[0] : gen_reg_rtx (Pmode
);
1937 gcc_assert (!TARGET_FDPIC
|| !(reload_in_progress
|| reload_completed
));
1938 if (GET_CODE (operands
[0]) == MEM
&& SYMBOLIC_CONST (operands
[1]))
1939 operands
[1] = force_reg (SImode
, operands
[1]);
1941 operands
[1] = legitimize_pic_address (operands
[1], temp
,
1942 TARGET_FDPIC
? OUR_FDPIC_REG
1943 : pic_offset_table_rtx
);
1946 /* Expand a move operation in mode MODE. The operands are in OPERANDS.
1947 Returns true if no further code must be generated, false if the caller
1948 should generate an insn to move OPERANDS[1] to OPERANDS[0]. */
1951 expand_move (rtx
*operands
, machine_mode mode
)
1953 rtx op
= operands
[1];
1954 if ((TARGET_ID_SHARED_LIBRARY
|| TARGET_FDPIC
)
1955 && SYMBOLIC_CONST (op
))
1956 emit_pic_move (operands
, mode
);
1957 else if (mode
== SImode
&& GET_CODE (op
) == CONST
1958 && GET_CODE (XEXP (op
, 0)) == PLUS
1959 && GET_CODE (XEXP (XEXP (op
, 0), 0)) == SYMBOL_REF
1960 && !targetm
.legitimate_constant_p (mode
, op
))
1962 rtx dest
= operands
[0];
1964 gcc_assert (!reload_in_progress
&& !reload_completed
);
1966 op0
= force_reg (mode
, XEXP (op
, 0));
1968 if (!insn_data
[CODE_FOR_addsi3
].operand
[2].predicate (op1
, mode
))
1969 op1
= force_reg (mode
, op1
);
1970 if (GET_CODE (dest
) == MEM
)
1971 dest
= gen_reg_rtx (mode
);
1972 emit_insn (gen_addsi3 (dest
, op0
, op1
));
1973 if (dest
== operands
[0])
1977 /* Don't generate memory->memory or constant->memory moves, go through a
1979 else if ((reload_in_progress
| reload_completed
) == 0
1980 && GET_CODE (operands
[0]) == MEM
1981 && GET_CODE (operands
[1]) != REG
)
1982 operands
[1] = force_reg (mode
, operands
[1]);
1986 /* Split one or more DImode RTL references into pairs of SImode
1987 references. The RTL can be REG, offsettable MEM, integer constant, or
1988 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
1989 split and "num" is its length. lo_half and hi_half are output arrays
1990 that parallel "operands". */
1993 split_di (rtx operands
[], int num
, rtx lo_half
[], rtx hi_half
[])
1997 rtx op
= operands
[num
];
1999 /* simplify_subreg refuse to split volatile memory addresses,
2000 but we still have to handle it. */
2001 if (GET_CODE (op
) == MEM
)
2003 lo_half
[num
] = adjust_address (op
, SImode
, 0);
2004 hi_half
[num
] = adjust_address (op
, SImode
, 4);
2008 lo_half
[num
] = simplify_gen_subreg (SImode
, op
,
2009 GET_MODE (op
) == VOIDmode
2010 ? DImode
: GET_MODE (op
), 0);
2011 hi_half
[num
] = simplify_gen_subreg (SImode
, op
,
2012 GET_MODE (op
) == VOIDmode
2013 ? DImode
: GET_MODE (op
), 4);
2019 bfin_longcall_p (rtx op
, int call_cookie
)
2021 gcc_assert (GET_CODE (op
) == SYMBOL_REF
);
2022 if (SYMBOL_REF_WEAK (op
))
2024 if (call_cookie
& CALL_SHORT
)
2026 if (call_cookie
& CALL_LONG
)
2028 if (TARGET_LONG_CALLS
)
2033 /* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
2034 COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
2035 SIBCALL is nonzero if this is a sibling call. */
2038 bfin_expand_call (rtx retval
, rtx fnaddr
, rtx callarg1
, rtx cookie
, int sibcall
)
2040 rtx use
= NULL
, call
;
2041 rtx callee
= XEXP (fnaddr
, 0);
2044 rtx picreg
= get_hard_reg_initial_val (SImode
, FDPIC_REGNO
);
2045 rtx retsreg
= gen_rtx_REG (Pmode
, REG_RETS
);
2048 /* In an untyped call, we can get NULL for operand 2. */
2049 if (cookie
== NULL_RTX
)
2050 cookie
= const0_rtx
;
2052 /* Static functions and indirect calls don't need the pic register. */
2053 if (!TARGET_FDPIC
&& flag_pic
2054 && GET_CODE (callee
) == SYMBOL_REF
2055 && !SYMBOL_REF_LOCAL_P (callee
))
2056 use_reg (&use
, pic_offset_table_rtx
);
2060 int caller_in_sram
, callee_in_sram
;
2062 /* 0 is not in sram, 1 is in L1 sram, 2 is in L2 sram. */
2063 caller_in_sram
= callee_in_sram
= 0;
2065 if (lookup_attribute ("l1_text",
2066 DECL_ATTRIBUTES (cfun
->decl
)) != NULL_TREE
)
2068 else if (lookup_attribute ("l2",
2069 DECL_ATTRIBUTES (cfun
->decl
)) != NULL_TREE
)
2072 if (GET_CODE (callee
) == SYMBOL_REF
2073 && SYMBOL_REF_DECL (callee
) && DECL_P (SYMBOL_REF_DECL (callee
)))
2075 if (lookup_attribute
2077 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee
))) != NULL_TREE
)
2079 else if (lookup_attribute
2081 DECL_ATTRIBUTES (SYMBOL_REF_DECL (callee
))) != NULL_TREE
)
2085 if (GET_CODE (callee
) != SYMBOL_REF
2086 || bfin_longcall_p (callee
, INTVAL (cookie
))
2087 || (GET_CODE (callee
) == SYMBOL_REF
2088 && !SYMBOL_REF_LOCAL_P (callee
)
2089 && TARGET_INLINE_PLT
)
2090 || caller_in_sram
!= callee_in_sram
2091 || (caller_in_sram
&& callee_in_sram
2092 && (GET_CODE (callee
) != SYMBOL_REF
2093 || !SYMBOL_REF_LOCAL_P (callee
))))
2096 if (! address_operand (addr
, Pmode
))
2097 addr
= force_reg (Pmode
, addr
);
2099 fnaddr
= gen_reg_rtx (SImode
);
2100 emit_insn (gen_load_funcdescsi (fnaddr
, addr
));
2101 fnaddr
= gen_rtx_MEM (Pmode
, fnaddr
);
2103 picreg
= gen_reg_rtx (SImode
);
2104 emit_insn (gen_load_funcdescsi (picreg
,
2105 plus_constant (Pmode
, addr
, 4)));
2110 else if ((!register_no_elim_operand (callee
, Pmode
)
2111 && GET_CODE (callee
) != SYMBOL_REF
)
2112 || (GET_CODE (callee
) == SYMBOL_REF
2113 && ((TARGET_ID_SHARED_LIBRARY
&& !TARGET_LEAF_ID_SHARED_LIBRARY
)
2114 || bfin_longcall_p (callee
, INTVAL (cookie
)))))
2116 callee
= copy_to_mode_reg (Pmode
, callee
);
2117 fnaddr
= gen_rtx_MEM (Pmode
, callee
);
2119 call
= gen_rtx_CALL (VOIDmode
, fnaddr
, callarg1
);
2122 call
= gen_rtx_SET (VOIDmode
, retval
, call
);
2124 pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nelts
));
2126 XVECEXP (pat
, 0, n
++) = call
;
2128 XVECEXP (pat
, 0, n
++) = gen_rtx_USE (VOIDmode
, picreg
);
2129 XVECEXP (pat
, 0, n
++) = gen_rtx_USE (VOIDmode
, cookie
);
2131 XVECEXP (pat
, 0, n
++) = ret_rtx
;
2133 XVECEXP (pat
, 0, n
++) = gen_rtx_CLOBBER (VOIDmode
, retsreg
);
2134 call
= emit_call_insn (pat
);
2136 CALL_INSN_FUNCTION_USAGE (call
) = use
;
2139 /* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
2142 hard_regno_mode_ok (int regno
, machine_mode mode
)
2144 /* Allow only dregs to store value of mode HI or QI */
2145 enum reg_class rclass
= REGNO_REG_CLASS (regno
);
2150 if (mode
== V2HImode
)
2151 return D_REGNO_P (regno
);
2152 if (rclass
== CCREGS
)
2153 return mode
== BImode
;
2154 if (mode
== PDImode
|| mode
== V2PDImode
)
2155 return regno
== REG_A0
|| regno
== REG_A1
;
2157 /* Allow all normal 32-bit regs, except REG_M3, in case regclass ever comes
2158 up with a bad register class (such as ALL_REGS) for DImode. */
2160 return regno
< REG_M3
;
2163 && TEST_HARD_REG_BIT (reg_class_contents
[PROLOGUE_REGS
], regno
))
2166 return TEST_HARD_REG_BIT (reg_class_contents
[MOST_REGS
], regno
);
2169 /* Implements target hook vector_mode_supported_p. */
2172 bfin_vector_mode_supported_p (machine_mode mode
)
2174 return mode
== V2HImode
;
2177 /* Worker function for TARGET_REGISTER_MOVE_COST. */
2180 bfin_register_move_cost (machine_mode mode
,
2181 reg_class_t class1
, reg_class_t class2
)
2183 /* These need secondary reloads, so they're more expensive. */
2184 if ((class1
== CCREGS
&& !reg_class_subset_p (class2
, DREGS
))
2185 || (class2
== CCREGS
&& !reg_class_subset_p (class1
, DREGS
)))
2188 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
2192 if (GET_MODE_CLASS (mode
) == MODE_INT
)
2194 /* Discourage trying to use the accumulators. */
2195 if (TEST_HARD_REG_BIT (reg_class_contents
[class1
], REG_A0
)
2196 || TEST_HARD_REG_BIT (reg_class_contents
[class1
], REG_A1
)
2197 || TEST_HARD_REG_BIT (reg_class_contents
[class2
], REG_A0
)
2198 || TEST_HARD_REG_BIT (reg_class_contents
[class2
], REG_A1
))
2204 /* Worker function for TARGET_MEMORY_MOVE_COST.
2206 ??? In theory L1 memory has single-cycle latency. We should add a switch
2207 that tells the compiler whether we expect to use only L1 memory for the
2208 program; it'll make the costs more accurate. */
2211 bfin_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED
,
2213 bool in ATTRIBUTE_UNUSED
)
2215 /* Make memory accesses slightly more expensive than any register-register
2216 move. Also, penalize non-DP registers, since they need secondary
2217 reloads to load and store. */
2218 if (! reg_class_subset_p (rclass
, DPREGS
))
2224 /* Inform reload about cases where moving X with a mode MODE to a register in
2225 RCLASS requires an extra scratch register. Return the class needed for the
2226 scratch register. */
2229 bfin_secondary_reload (bool in_p
, rtx x
, reg_class_t rclass_i
,
2230 machine_mode mode
, secondary_reload_info
*sri
)
2232 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
2233 in most other cases we can also use PREGS. */
2234 enum reg_class default_class
= GET_MODE_SIZE (mode
) >= 4 ? DPREGS
: DREGS
;
2235 enum reg_class x_class
= NO_REGS
;
2236 enum rtx_code code
= GET_CODE (x
);
2237 enum reg_class rclass
= (enum reg_class
) rclass_i
;
2240 x
= SUBREG_REG (x
), code
= GET_CODE (x
);
2243 int regno
= REGNO (x
);
2244 if (regno
>= FIRST_PSEUDO_REGISTER
)
2245 regno
= reg_renumber
[regno
];
2250 x_class
= REGNO_REG_CLASS (regno
);
2253 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
2254 This happens as a side effect of register elimination, and we need
2255 a scratch register to do it. */
2256 if (fp_plus_const_operand (x
, mode
))
2258 rtx op2
= XEXP (x
, 1);
2259 int large_constant_p
= ! satisfies_constraint_Ks7 (op2
);
2261 if (rclass
== PREGS
|| rclass
== PREGS_CLOBBERED
)
2263 /* If destination is a DREG, we can do this without a scratch register
2264 if the constant is valid for an add instruction. */
2265 if ((rclass
== DREGS
|| rclass
== DPREGS
)
2266 && ! large_constant_p
)
2268 /* Reloading to anything other than a DREG? Use a PREG scratch
2270 sri
->icode
= CODE_FOR_reload_insi
;
2274 /* Data can usually be moved freely between registers of most classes.
2275 AREGS are an exception; they can only move to or from another register
2276 in AREGS or one in DREGS. They can also be assigned the constant 0. */
2277 if (x_class
== AREGS
|| x_class
== EVEN_AREGS
|| x_class
== ODD_AREGS
)
2278 return (rclass
== DREGS
|| rclass
== AREGS
|| rclass
== EVEN_AREGS
2279 || rclass
== ODD_AREGS
2282 if (rclass
== AREGS
|| rclass
== EVEN_AREGS
|| rclass
== ODD_AREGS
)
2286 sri
->icode
= in_p
? CODE_FOR_reload_inpdi
: CODE_FOR_reload_outpdi
;
2290 if (x
!= const0_rtx
&& x_class
!= DREGS
)
2298 /* CCREGS can only be moved from/to DREGS. */
2299 if (rclass
== CCREGS
&& x_class
!= DREGS
)
2301 if (x_class
== CCREGS
&& rclass
!= DREGS
)
2304 /* All registers other than AREGS can load arbitrary constants. The only
2305 case that remains is MEM. */
2307 if (! reg_class_subset_p (rclass
, default_class
))
2308 return default_class
;
2313 /* Implement TARGET_CLASS_LIKELY_SPILLED_P. */
2316 bfin_class_likely_spilled_p (reg_class_t rclass
)
2320 case PREGS_CLOBBERED
:
2336 static struct machine_function
*
2337 bfin_init_machine_status (void)
2339 return ggc_cleared_alloc
<machine_function
> ();
2342 /* Implement the TARGET_OPTION_OVERRIDE hook. */
2345 bfin_option_override (void)
2347 /* If processor type is not specified, enable all workarounds. */
2348 if (bfin_cpu_type
== BFIN_CPU_UNKNOWN
)
2352 for (i
= 0; bfin_cpus
[i
].name
!= NULL
; i
++)
2353 bfin_workarounds
|= bfin_cpus
[i
].workarounds
;
2355 bfin_si_revision
= 0xffff;
2358 if (bfin_csync_anomaly
== 1)
2359 bfin_workarounds
|= WA_SPECULATIVE_SYNCS
;
2360 else if (bfin_csync_anomaly
== 0)
2361 bfin_workarounds
&= ~WA_SPECULATIVE_SYNCS
;
2363 if (bfin_specld_anomaly
== 1)
2364 bfin_workarounds
|= WA_SPECULATIVE_LOADS
;
2365 else if (bfin_specld_anomaly
== 0)
2366 bfin_workarounds
&= ~WA_SPECULATIVE_LOADS
;
2368 if (TARGET_OMIT_LEAF_FRAME_POINTER
)
2369 flag_omit_frame_pointer
= 1;
2371 #ifdef SUBTARGET_FDPIC_NOT_SUPPORTED
2373 error ("-mfdpic is not supported, please use a bfin-linux-uclibc target");
2376 /* Library identification */
2377 if (global_options_set
.x_bfin_library_id
&& ! TARGET_ID_SHARED_LIBRARY
)
2378 error ("-mshared-library-id= specified without -mid-shared-library");
2380 if (stack_limit_rtx
&& TARGET_FDPIC
)
2382 warning (0, "-fstack-limit- options are ignored with -mfdpic; use -mstack-check-l1");
2383 stack_limit_rtx
= NULL_RTX
;
2386 if (stack_limit_rtx
&& TARGET_STACK_CHECK_L1
)
2387 error ("can%'t use multiple stack checking methods together");
2389 if (TARGET_ID_SHARED_LIBRARY
&& TARGET_FDPIC
)
2390 error ("ID shared libraries and FD-PIC mode can%'t be used together");
2392 /* Don't allow the user to specify -mid-shared-library and -msep-data
2393 together, as it makes little sense from a user's point of view... */
2394 if (TARGET_SEP_DATA
&& TARGET_ID_SHARED_LIBRARY
)
2395 error ("cannot specify both -msep-data and -mid-shared-library");
2396 /* ... internally, however, it's nearly the same. */
2397 if (TARGET_SEP_DATA
)
2398 target_flags
|= MASK_ID_SHARED_LIBRARY
| MASK_LEAF_ID_SHARED_LIBRARY
;
2400 if (TARGET_ID_SHARED_LIBRARY
&& flag_pic
== 0)
2403 /* There is no single unaligned SI op for PIC code. Sometimes we
2404 need to use ".4byte" and sometimes we need to use ".picptr".
2405 See bfin_assemble_integer for details. */
2407 targetm
.asm_out
.unaligned_op
.si
= 0;
2409 /* Silently turn off flag_pic if not doing FDPIC or ID shared libraries,
2410 since we don't support it and it'll just break. */
2411 if (flag_pic
&& !TARGET_FDPIC
&& !TARGET_ID_SHARED_LIBRARY
)
2414 if (TARGET_MULTICORE
&& bfin_cpu_type
!= BFIN_CPU_BF561
)
2415 error ("-mmulticore can only be used with BF561");
2417 if (TARGET_COREA
&& !TARGET_MULTICORE
)
2418 error ("-mcorea should be used with -mmulticore");
2420 if (TARGET_COREB
&& !TARGET_MULTICORE
)
2421 error ("-mcoreb should be used with -mmulticore");
2423 if (TARGET_COREA
&& TARGET_COREB
)
2424 error ("-mcorea and -mcoreb can%'t be used together");
2426 flag_schedule_insns
= 0;
2428 init_machine_status
= bfin_init_machine_status
;
2431 /* Return the destination address of BRANCH.
2432 We need to use this instead of get_attr_length, because the
2433 cbranch_with_nops pattern conservatively sets its length to 6, and
2434 we still prefer to use shorter sequences. */
2437 branch_dest (rtx_insn
*branch
)
2441 rtx pat
= PATTERN (branch
);
2442 if (GET_CODE (pat
) == PARALLEL
)
2443 pat
= XVECEXP (pat
, 0, 0);
2444 dest
= SET_SRC (pat
);
2445 if (GET_CODE (dest
) == IF_THEN_ELSE
)
2446 dest
= XEXP (dest
, 1);
2447 dest
= XEXP (dest
, 0);
2448 dest_uid
= INSN_UID (dest
);
2449 return INSN_ADDRESSES (dest_uid
);
2452 /* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
2453 it's a branch that's predicted taken. */
2456 cbranch_predicted_taken_p (rtx insn
)
2458 rtx x
= find_reg_note (insn
, REG_BR_PROB
, 0);
2462 int pred_val
= XINT (x
, 0);
2464 return pred_val
>= REG_BR_PROB_BASE
/ 2;
2470 /* Templates for use by asm_conditional_branch. */
2472 static const char *ccbranch_templates
[][3] = {
2473 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
2474 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
2475 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
2476 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
2479 /* Output INSN, which is a conditional branch instruction with operands
2482 We deal with the various forms of conditional branches that can be generated
2483 by bfin_reorg to prevent the hardware from doing speculative loads, by
2484 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
2485 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
2486 Either of these is only necessary if the branch is short, otherwise the
2487 template we use ends in an unconditional jump which flushes the pipeline
2491 asm_conditional_branch (rtx_insn
*insn
, rtx
*operands
, int n_nops
, int predict_taken
)
2493 int offset
= branch_dest (insn
) - INSN_ADDRESSES (INSN_UID (insn
));
2494 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
2495 is to be taken from start of if cc rather than jump.
2496 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
2498 int len
= (offset
>= -1024 && offset
<= 1022 ? 0
2499 : offset
>= -4094 && offset
<= 4096 ? 1
2501 int bp
= predict_taken
&& len
== 0 ? 1 : cbranch_predicted_taken_p (insn
);
2502 int idx
= (bp
<< 1) | (GET_CODE (operands
[0]) == EQ
? BRF
: BRT
);
2503 output_asm_insn (ccbranch_templates
[idx
][len
], operands
);
2504 gcc_assert (n_nops
== 0 || !bp
);
2506 while (n_nops
-- > 0)
2507 output_asm_insn ("nop;", NULL
);
2510 /* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
2511 stored in bfin_compare_op0 and bfin_compare_op1 already. */
2514 bfin_gen_compare (rtx cmp
, machine_mode mode ATTRIBUTE_UNUSED
)
2516 enum rtx_code code1
, code2
;
2517 rtx op0
= XEXP (cmp
, 0), op1
= XEXP (cmp
, 1);
2518 rtx tem
= bfin_cc_rtx
;
2519 enum rtx_code code
= GET_CODE (cmp
);
2521 /* If we have a BImode input, then we already have a compare result, and
2522 do not need to emit another comparison. */
2523 if (GET_MODE (op0
) == BImode
)
2525 gcc_assert ((code
== NE
|| code
== EQ
) && op1
== const0_rtx
);
2526 tem
= op0
, code2
= code
;
2531 /* bfin has these conditions */
2541 code1
= reverse_condition (code
);
2545 emit_insn (gen_rtx_SET (VOIDmode
, tem
,
2546 gen_rtx_fmt_ee (code1
, BImode
, op0
, op1
)));
2549 return gen_rtx_fmt_ee (code2
, BImode
, tem
, CONST0_RTX (BImode
));
2552 /* Return nonzero iff C has exactly one bit set if it is interpreted
2553 as a 32-bit constant. */
2556 log2constp (unsigned HOST_WIDE_INT c
)
2559 return c
!= 0 && (c
& (c
-1)) == 0;
2562 /* Returns the number of consecutive least significant zeros in the binary
2563 representation of *V.
2564 We modify *V to contain the original value arithmetically shifted right by
2565 the number of zeroes. */
2568 shiftr_zero (HOST_WIDE_INT
*v
)
2570 unsigned HOST_WIDE_INT tmp
= *v
;
2571 unsigned HOST_WIDE_INT sgn
;
2577 sgn
= tmp
& ((unsigned HOST_WIDE_INT
) 1 << (HOST_BITS_PER_WIDE_INT
- 1));
2578 while ((tmp
& 0x1) == 0 && n
<= 32)
2580 tmp
= (tmp
>> 1) | sgn
;
2587 /* After reload, split the load of an immediate constant. OPERANDS are the
2588 operands of the movsi_insn pattern which we are splitting. We return
2589 nonzero if we emitted a sequence to load the constant, zero if we emitted
2590 nothing because we want to use the splitter's default sequence. */
2593 split_load_immediate (rtx operands
[])
2595 HOST_WIDE_INT val
= INTVAL (operands
[1]);
2597 HOST_WIDE_INT shifted
= val
;
2598 HOST_WIDE_INT shifted_compl
= ~val
;
2599 int num_zero
= shiftr_zero (&shifted
);
2600 int num_compl_zero
= shiftr_zero (&shifted_compl
);
2601 unsigned int regno
= REGNO (operands
[0]);
2603 /* This case takes care of single-bit set/clear constants, which we could
2604 also implement with BITSET/BITCLR. */
2606 && shifted
>= -32768 && shifted
< 65536
2607 && (D_REGNO_P (regno
)
2608 || (regno
>= REG_P0
&& regno
<= REG_P7
&& num_zero
<= 2)))
2610 emit_insn (gen_movsi (operands
[0], gen_int_mode (shifted
, SImode
)));
2611 emit_insn (gen_ashlsi3 (operands
[0], operands
[0], GEN_INT (num_zero
)));
2616 tmp
|= -(tmp
& 0x8000);
2618 /* If high word has one bit set or clear, try to use a bit operation. */
2619 if (D_REGNO_P (regno
))
2621 if (log2constp (val
& 0xFFFF0000))
2623 emit_insn (gen_movsi (operands
[0], GEN_INT (val
& 0xFFFF)));
2624 emit_insn (gen_iorsi3 (operands
[0], operands
[0],
2625 gen_int_mode (val
& 0xFFFF0000, SImode
)));
2628 else if (log2constp (val
| 0xFFFF) && (val
& 0x8000) != 0)
2630 emit_insn (gen_movsi (operands
[0], GEN_INT (tmp
)));
2631 emit_insn (gen_andsi3 (operands
[0], operands
[0],
2632 gen_int_mode (val
| 0xFFFF, SImode
)));
2636 if (D_REGNO_P (regno
))
2638 if (tmp
>= -64 && tmp
<= 63)
2640 emit_insn (gen_movsi (operands
[0], GEN_INT (tmp
)));
2641 emit_insn (gen_movstricthi_high (operands
[0],
2642 gen_int_mode (val
& -65536,
2647 if ((val
& 0xFFFF0000) == 0)
2649 emit_insn (gen_movsi (operands
[0], const0_rtx
));
2650 emit_insn (gen_movsi_low (operands
[0], operands
[0], operands
[1]));
2654 if ((val
& 0xFFFF0000) == 0xFFFF0000)
2656 emit_insn (gen_movsi (operands
[0], constm1_rtx
));
2657 emit_insn (gen_movsi_low (operands
[0], operands
[0], operands
[1]));
2662 /* Need DREGs for the remaining case. */
2667 && num_compl_zero
&& shifted_compl
>= -64 && shifted_compl
<= 63)
2669 /* If optimizing for size, generate a sequence that has more instructions
2671 emit_insn (gen_movsi (operands
[0], gen_int_mode (shifted_compl
, SImode
)));
2672 emit_insn (gen_ashlsi3 (operands
[0], operands
[0],
2673 GEN_INT (num_compl_zero
)));
2674 emit_insn (gen_one_cmplsi2 (operands
[0], operands
[0]));
2680 /* Return true if the legitimate memory address for a memory operand of mode
2681 MODE. Return false if not. */
2684 bfin_valid_add (machine_mode mode
, HOST_WIDE_INT value
)
2686 unsigned HOST_WIDE_INT v
= value
> 0 ? value
: -value
;
2687 int sz
= GET_MODE_SIZE (mode
);
2688 int shift
= sz
== 1 ? 0 : sz
== 2 ? 1 : 2;
2689 /* The usual offsettable_memref machinery doesn't work so well for this
2690 port, so we deal with the problem here. */
2691 if (value
> 0 && sz
== 8)
2693 return (v
& ~(0x7fff << shift
)) == 0;
2697 bfin_valid_reg_p (unsigned int regno
, int strict
, machine_mode mode
,
2698 enum rtx_code outer_code
)
2701 return REGNO_OK_FOR_BASE_STRICT_P (regno
, mode
, outer_code
, SCRATCH
);
2703 return REGNO_OK_FOR_BASE_NONSTRICT_P (regno
, mode
, outer_code
, SCRATCH
);
2706 /* Recognize an RTL expression that is a valid memory address for an
2707 instruction. The MODE argument is the machine mode for the MEM expression
2708 that wants to use this address.
2710 Blackfin addressing modes are as follows:
2716 W [ Preg + uimm16m2 ]
2725 bfin_legitimate_address_p (machine_mode mode
, rtx x
, bool strict
)
2727 switch (GET_CODE (x
)) {
2729 if (bfin_valid_reg_p (REGNO (x
), strict
, mode
, MEM
))
2733 if (REG_P (XEXP (x
, 0))
2734 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, PLUS
)
2735 && ((GET_CODE (XEXP (x
, 1)) == UNSPEC
&& mode
== SImode
)
2736 || (GET_CODE (XEXP (x
, 1)) == CONST_INT
2737 && bfin_valid_add (mode
, INTVAL (XEXP (x
, 1))))))
2742 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode
)
2743 && REG_P (XEXP (x
, 0))
2744 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, POST_INC
))
2747 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode
)
2748 && XEXP (x
, 0) == stack_pointer_rtx
2749 && REG_P (XEXP (x
, 0))
2750 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, PRE_DEC
))
2759 /* Decide whether we can force certain constants to memory. If we
2760 decide we can't, the caller should be able to cope with it in
2764 bfin_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED
,
2765 rtx x ATTRIBUTE_UNUSED
)
2767 /* We have only one class of non-legitimate constants, and our movsi
2768 expander knows how to handle them. Dropping these constants into the
2769 data section would only shift the problem - we'd still get relocs
2770 outside the object, in the data section rather than the text section. */
2774 /* Ensure that for any constant of the form symbol + offset, the offset
2775 remains within the object. Any other constants are ok.
2776 This ensures that flat binaries never have to deal with relocations
2777 crossing section boundaries. */
2780 bfin_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
2783 HOST_WIDE_INT offset
;
2785 if (GET_CODE (x
) != CONST
)
2789 gcc_assert (GET_CODE (x
) == PLUS
);
2793 if (GET_CODE (sym
) != SYMBOL_REF
2794 || GET_CODE (x
) != CONST_INT
)
2796 offset
= INTVAL (x
);
2798 if (SYMBOL_REF_DECL (sym
) == 0)
2801 || offset
>= int_size_in_bytes (TREE_TYPE (SYMBOL_REF_DECL (sym
))))
2808 bfin_rtx_costs (rtx x
, int code_i
, int outer_code_i
, int opno
, int *total
,
2811 enum rtx_code code
= (enum rtx_code
) code_i
;
2812 enum rtx_code outer_code
= (enum rtx_code
) outer_code_i
;
2813 int cost2
= COSTS_N_INSNS (1);
2819 if (outer_code
== SET
|| outer_code
== PLUS
)
2820 *total
= satisfies_constraint_Ks7 (x
) ? 0 : cost2
;
2821 else if (outer_code
== AND
)
2822 *total
= log2constp (~INTVAL (x
)) ? 0 : cost2
;
2823 else if (outer_code
== LE
|| outer_code
== LT
|| outer_code
== EQ
)
2824 *total
= (INTVAL (x
) >= -4 && INTVAL (x
) <= 3) ? 0 : cost2
;
2825 else if (outer_code
== LEU
|| outer_code
== LTU
)
2826 *total
= (INTVAL (x
) >= 0 && INTVAL (x
) <= 7) ? 0 : cost2
;
2827 else if (outer_code
== MULT
)
2828 *total
= (INTVAL (x
) == 2 || INTVAL (x
) == 4) ? 0 : cost2
;
2829 else if (outer_code
== ASHIFT
&& (INTVAL (x
) == 1 || INTVAL (x
) == 2))
2831 else if (outer_code
== ASHIFT
|| outer_code
== ASHIFTRT
2832 || outer_code
== LSHIFTRT
)
2833 *total
= (INTVAL (x
) >= 0 && INTVAL (x
) <= 31) ? 0 : cost2
;
2834 else if (outer_code
== IOR
|| outer_code
== XOR
)
2835 *total
= (INTVAL (x
) & (INTVAL (x
) - 1)) == 0 ? 0 : cost2
;
2844 *total
= COSTS_N_INSNS (2);
2850 if (GET_MODE (x
) == SImode
)
2852 if (GET_CODE (op0
) == MULT
2853 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
2855 HOST_WIDE_INT val
= INTVAL (XEXP (op0
, 1));
2856 if (val
== 2 || val
== 4)
2859 *total
+= rtx_cost (XEXP (op0
, 0), outer_code
, opno
, speed
);
2860 *total
+= rtx_cost (op1
, outer_code
, opno
, speed
);
2865 if (GET_CODE (op0
) != REG
2866 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2867 *total
+= set_src_cost (op0
, speed
);
2868 #if 0 /* We'd like to do this for accuracy, but it biases the loop optimizer
2869 towards creating too many induction variables. */
2870 if (!reg_or_7bit_operand (op1
, SImode
))
2871 *total
+= set_src_cost (op1
, speed
);
2874 else if (GET_MODE (x
) == DImode
)
2877 if (GET_CODE (op1
) != CONST_INT
2878 || !satisfies_constraint_Ks7 (op1
))
2879 *total
+= rtx_cost (op1
, PLUS
, 1, speed
);
2880 if (GET_CODE (op0
) != REG
2881 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2882 *total
+= rtx_cost (op0
, PLUS
, 0, speed
);
2887 if (GET_MODE (x
) == DImode
)
2896 if (GET_MODE (x
) == DImode
)
2903 if (GET_CODE (op0
) != REG
2904 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2905 *total
+= rtx_cost (op0
, code
, 0, speed
);
2915 /* Handle special cases of IOR: rotates, ALIGN insns, movstricthi_high. */
2918 if ((GET_CODE (op0
) == LSHIFTRT
&& GET_CODE (op1
) == ASHIFT
)
2919 || (GET_CODE (op0
) == ASHIFT
&& GET_CODE (op1
) == ZERO_EXTEND
)
2920 || (GET_CODE (op0
) == ASHIFT
&& GET_CODE (op1
) == LSHIFTRT
)
2921 || (GET_CODE (op0
) == AND
&& GET_CODE (op1
) == CONST_INT
))
2928 if (GET_CODE (op0
) != REG
2929 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2930 *total
+= rtx_cost (op0
, code
, 0, speed
);
2932 if (GET_MODE (x
) == DImode
)
2938 if (GET_MODE (x
) != SImode
)
2943 if (! rhs_andsi3_operand (XEXP (x
, 1), SImode
))
2944 *total
+= rtx_cost (XEXP (x
, 1), code
, 1, speed
);
2948 if (! regorlog2_operand (XEXP (x
, 1), SImode
))
2949 *total
+= rtx_cost (XEXP (x
, 1), code
, 1, speed
);
2956 if (outer_code
== SET
2957 && XEXP (x
, 1) == const1_rtx
2958 && GET_CODE (XEXP (x
, 2)) == CONST_INT
)
2974 if (GET_CODE (op0
) == GET_CODE (op1
)
2975 && (GET_CODE (op0
) == ZERO_EXTEND
2976 || GET_CODE (op0
) == SIGN_EXTEND
))
2978 *total
= COSTS_N_INSNS (1);
2979 op0
= XEXP (op0
, 0);
2980 op1
= XEXP (op1
, 0);
2983 *total
= COSTS_N_INSNS (1);
2985 *total
= COSTS_N_INSNS (3);
2987 if (GET_CODE (op0
) != REG
2988 && (GET_CODE (op0
) != SUBREG
|| GET_CODE (SUBREG_REG (op0
)) != REG
))
2989 *total
+= rtx_cost (op0
, MULT
, 0, speed
);
2990 if (GET_CODE (op1
) != REG
2991 && (GET_CODE (op1
) != SUBREG
|| GET_CODE (SUBREG_REG (op1
)) != REG
))
2992 *total
+= rtx_cost (op1
, MULT
, 1, speed
);
2998 *total
= COSTS_N_INSNS (32);
3003 if (outer_code
== SET
)
3012 /* Used for communication between {push,pop}_multiple_operation (which
3013 we use not only as a predicate) and the corresponding output functions. */
3014 static int first_preg_to_save
, first_dreg_to_save
;
3015 static int n_regs_to_save
;
3018 analyze_push_multiple_operation (rtx op
)
3020 int lastdreg
= 8, lastpreg
= 6;
3023 first_preg_to_save
= lastpreg
;
3024 first_dreg_to_save
= lastdreg
;
3025 for (i
= 1, group
= 0; i
< XVECLEN (op
, 0) - 1; i
++)
3027 rtx t
= XVECEXP (op
, 0, i
);
3031 if (GET_CODE (t
) != SET
)
3035 dest
= SET_DEST (t
);
3036 if (GET_CODE (dest
) != MEM
|| ! REG_P (src
))
3038 dest
= XEXP (dest
, 0);
3039 if (GET_CODE (dest
) != PLUS
3040 || ! REG_P (XEXP (dest
, 0))
3041 || REGNO (XEXP (dest
, 0)) != REG_SP
3042 || GET_CODE (XEXP (dest
, 1)) != CONST_INT
3043 || INTVAL (XEXP (dest
, 1)) != -i
* 4)
3046 regno
= REGNO (src
);
3049 if (D_REGNO_P (regno
))
3052 first_dreg_to_save
= lastdreg
= regno
- REG_R0
;
3054 else if (regno
>= REG_P0
&& regno
<= REG_P7
)
3057 first_preg_to_save
= lastpreg
= regno
- REG_P0
;
3067 if (regno
>= REG_P0
&& regno
<= REG_P7
)
3070 first_preg_to_save
= lastpreg
= regno
- REG_P0
;
3072 else if (regno
!= REG_R0
+ lastdreg
+ 1)
3077 else if (group
== 2)
3079 if (regno
!= REG_P0
+ lastpreg
+ 1)
3084 n_regs_to_save
= 8 - first_dreg_to_save
+ 6 - first_preg_to_save
;
3089 analyze_pop_multiple_operation (rtx op
)
3091 int lastdreg
= 8, lastpreg
= 6;
3094 for (i
= 1, group
= 0; i
< XVECLEN (op
, 0); i
++)
3096 rtx t
= XVECEXP (op
, 0, i
);
3100 if (GET_CODE (t
) != SET
)
3104 dest
= SET_DEST (t
);
3105 if (GET_CODE (src
) != MEM
|| ! REG_P (dest
))
3107 src
= XEXP (src
, 0);
3111 if (! REG_P (src
) || REGNO (src
) != REG_SP
)
3114 else if (GET_CODE (src
) != PLUS
3115 || ! REG_P (XEXP (src
, 0))
3116 || REGNO (XEXP (src
, 0)) != REG_SP
3117 || GET_CODE (XEXP (src
, 1)) != CONST_INT
3118 || INTVAL (XEXP (src
, 1)) != (i
- 1) * 4)
3121 regno
= REGNO (dest
);
3124 if (regno
== REG_R7
)
3129 else if (regno
!= REG_P0
+ lastpreg
- 1)
3134 else if (group
== 1)
3136 if (regno
!= REG_R0
+ lastdreg
- 1)
3142 first_dreg_to_save
= lastdreg
;
3143 first_preg_to_save
= lastpreg
;
3144 n_regs_to_save
= 8 - first_dreg_to_save
+ 6 - first_preg_to_save
;
3148 /* Emit assembly code for one multi-register push described by INSN, with
3149 operands in OPERANDS. */
3152 output_push_multiple (rtx insn
, rtx
*operands
)
3157 /* Validate the insn again, and compute first_[dp]reg_to_save. */
3158 ok
= analyze_push_multiple_operation (PATTERN (insn
));
3161 if (first_dreg_to_save
== 8)
3162 sprintf (buf
, "[--sp] = ( p5:%d );\n", first_preg_to_save
);
3163 else if (first_preg_to_save
== 6)
3164 sprintf (buf
, "[--sp] = ( r7:%d );\n", first_dreg_to_save
);
3166 sprintf (buf
, "[--sp] = ( r7:%d, p5:%d );\n",
3167 first_dreg_to_save
, first_preg_to_save
);
3169 output_asm_insn (buf
, operands
);
3172 /* Emit assembly code for one multi-register pop described by INSN, with
3173 operands in OPERANDS. */
3176 output_pop_multiple (rtx insn
, rtx
*operands
)
3181 /* Validate the insn again, and compute first_[dp]reg_to_save. */
3182 ok
= analyze_pop_multiple_operation (PATTERN (insn
));
3185 if (first_dreg_to_save
== 8)
3186 sprintf (buf
, "( p5:%d ) = [sp++];\n", first_preg_to_save
);
3187 else if (first_preg_to_save
== 6)
3188 sprintf (buf
, "( r7:%d ) = [sp++];\n", first_dreg_to_save
);
3190 sprintf (buf
, "( r7:%d, p5:%d ) = [sp++];\n",
3191 first_dreg_to_save
, first_preg_to_save
);
3193 output_asm_insn (buf
, operands
);
3196 /* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
3199 single_move_for_movmem (rtx dst
, rtx src
, machine_mode mode
, HOST_WIDE_INT offset
)
3201 rtx scratch
= gen_reg_rtx (mode
);
3204 srcmem
= adjust_address_nv (src
, mode
, offset
);
3205 dstmem
= adjust_address_nv (dst
, mode
, offset
);
3206 emit_move_insn (scratch
, srcmem
);
3207 emit_move_insn (dstmem
, scratch
);
3210 /* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
3211 alignment ALIGN_EXP. Return true if successful, false if we should fall
3212 back on a different method. */
3215 bfin_expand_movmem (rtx dst
, rtx src
, rtx count_exp
, rtx align_exp
)
3217 rtx srcreg
, destreg
, countreg
;
3218 HOST_WIDE_INT align
= 0;
3219 unsigned HOST_WIDE_INT count
= 0;
3221 if (GET_CODE (align_exp
) == CONST_INT
)
3222 align
= INTVAL (align_exp
);
3223 if (GET_CODE (count_exp
) == CONST_INT
)
3225 count
= INTVAL (count_exp
);
3227 if (!TARGET_INLINE_ALL_STRINGOPS
&& count
> 64)
3232 /* If optimizing for size, only do single copies inline. */
3235 if (count
== 2 && align
< 2)
3237 if (count
== 4 && align
< 4)
3239 if (count
!= 1 && count
!= 2 && count
!= 4)
3242 if (align
< 2 && count
!= 1)
3245 destreg
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
3246 if (destreg
!= XEXP (dst
, 0))
3247 dst
= replace_equiv_address_nv (dst
, destreg
);
3248 srcreg
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
3249 if (srcreg
!= XEXP (src
, 0))
3250 src
= replace_equiv_address_nv (src
, srcreg
);
3252 if (count
!= 0 && align
>= 2)
3254 unsigned HOST_WIDE_INT offset
= 0;
3258 if ((count
& ~3) == 4)
3260 single_move_for_movmem (dst
, src
, SImode
, offset
);
3263 else if (count
& ~3)
3265 HOST_WIDE_INT new_count
= ((count
>> 2) & 0x3fffffff) - 1;
3266 countreg
= copy_to_mode_reg (Pmode
, GEN_INT (new_count
));
3268 emit_insn (gen_rep_movsi (destreg
, srcreg
, countreg
, destreg
, srcreg
));
3269 cfun
->machine
->has_loopreg_clobber
= true;
3273 single_move_for_movmem (dst
, src
, HImode
, offset
);
3279 if ((count
& ~1) == 2)
3281 single_move_for_movmem (dst
, src
, HImode
, offset
);
3284 else if (count
& ~1)
3286 HOST_WIDE_INT new_count
= ((count
>> 1) & 0x7fffffff) - 1;
3287 countreg
= copy_to_mode_reg (Pmode
, GEN_INT (new_count
));
3289 emit_insn (gen_rep_movhi (destreg
, srcreg
, countreg
, destreg
, srcreg
));
3290 cfun
->machine
->has_loopreg_clobber
= true;
3295 single_move_for_movmem (dst
, src
, QImode
, offset
);
3302 /* Compute the alignment for a local variable.
3303 TYPE is the data type, and ALIGN is the alignment that
3304 the object would ordinarily have. The value of this macro is used
3305 instead of that alignment to align the object. */
3308 bfin_local_alignment (tree type
, unsigned align
)
3310 /* Increasing alignment for (relatively) big types allows the builtin
3311 memcpy can use 32 bit loads/stores. */
3312 if (TYPE_SIZE (type
)
3313 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
3314 && wi::gtu_p (TYPE_SIZE (type
), 8)
3320 /* Implement TARGET_SCHED_ISSUE_RATE. */
3323 bfin_issue_rate (void)
3329 bfin_adjust_cost (rtx_insn
*insn
, rtx link
, rtx_insn
*dep_insn
, int cost
)
3331 enum attr_type dep_insn_type
;
3332 int dep_insn_code_number
;
3334 /* Anti and output dependencies have zero cost. */
3335 if (REG_NOTE_KIND (link
) != 0)
3338 dep_insn_code_number
= recog_memoized (dep_insn
);
3340 /* If we can't recognize the insns, we can't really do anything. */
3341 if (dep_insn_code_number
< 0 || recog_memoized (insn
) < 0)
3344 dep_insn_type
= get_attr_type (dep_insn
);
3346 if (dep_insn_type
== TYPE_MOVE
|| dep_insn_type
== TYPE_MCLD
)
3348 rtx pat
= PATTERN (dep_insn
);
3351 if (GET_CODE (pat
) == PARALLEL
)
3352 pat
= XVECEXP (pat
, 0, 0);
3353 dest
= SET_DEST (pat
);
3354 src
= SET_SRC (pat
);
3355 if (! ADDRESS_REGNO_P (REGNO (dest
))
3356 || ! (MEM_P (src
) || D_REGNO_P (REGNO (src
))))
3358 return cost
+ (dep_insn_type
== TYPE_MOVE
? 4 : 3);
3364 /* This function acts like NEXT_INSN, but is aware of three-insn bundles and
3365 skips all subsequent parallel instructions if INSN is the start of such
3368 find_next_insn_start (rtx_insn
*insn
)
3370 if (GET_MODE (insn
) == SImode
)
3372 while (GET_MODE (insn
) != QImode
)
3373 insn
= NEXT_INSN (insn
);
3375 return NEXT_INSN (insn
);
3378 /* This function acts like PREV_INSN, but is aware of three-insn bundles and
3379 skips all subsequent parallel instructions if INSN is the start of such
3382 find_prev_insn_start (rtx_insn
*insn
)
3384 insn
= PREV_INSN (insn
);
3385 gcc_assert (GET_MODE (insn
) != SImode
);
3386 if (GET_MODE (insn
) == QImode
)
3388 while (GET_MODE (PREV_INSN (insn
)) == SImode
)
3389 insn
= PREV_INSN (insn
);
3394 /* Implement TARGET_CAN_USE_DOLOOP_P. */
3397 bfin_can_use_doloop_p (const widest_int
&, const widest_int
&iterations_max
,
3400 /* Due to limitations in the hardware (an initial loop count of 0
3401 does not loop 2^32 times) we must avoid to generate a hardware
3402 loops when we cannot rule out this case. */
3403 if (!flag_unsafe_loop_optimizations
3404 && wi::geu_p (iterations_max
, 0xFFFFFFFF))
3409 /* Increment the counter for the number of loop instructions in the
3410 current function. */
3413 bfin_hardware_loop (void)
3415 cfun
->machine
->has_hardware_loops
++;
3418 /* Maximum loop nesting depth. */
3419 #define MAX_LOOP_DEPTH 2
3421 /* Maximum size of a loop. */
3422 #define MAX_LOOP_LENGTH 2042
3424 /* Maximum distance of the LSETUP instruction from the loop start. */
3425 #define MAX_LSETUP_DISTANCE 30
3427 /* Estimate the length of INSN conservatively. */
3430 length_for_loop (rtx_insn
*insn
)
3433 if (JUMP_P (insn
) && any_condjump_p (insn
) && !optimize_size
)
3435 if (ENABLE_WA_SPECULATIVE_SYNCS
)
3437 else if (ENABLE_WA_SPECULATIVE_LOADS
)
3440 else if (LABEL_P (insn
))
3442 if (ENABLE_WA_SPECULATIVE_SYNCS
)
3446 if (NONDEBUG_INSN_P (insn
))
3447 length
+= get_attr_length (insn
);
3452 /* Optimize LOOP. */
3455 hwloop_optimize (hwloop_info loop
)
3458 rtx_insn
*insn
, *last_insn
;
3459 rtx loop_init
, start_label
, end_label
;
3460 rtx iter_reg
, scratchreg
, scratch_init
, scratch_init_insn
;
3461 rtx lc_reg
, lt_reg
, lb_reg
;
3465 bool clobber0
, clobber1
;
3467 if (loop
->depth
> MAX_LOOP_DEPTH
)
3470 fprintf (dump_file
, ";; loop %d too deep\n", loop
->loop_no
);
3474 /* Get the loop iteration register. */
3475 iter_reg
= loop
->iter_reg
;
3477 gcc_assert (REG_P (iter_reg
));
3479 scratchreg
= NULL_RTX
;
3480 scratch_init
= iter_reg
;
3481 scratch_init_insn
= NULL_RTX
;
3482 if (!PREG_P (iter_reg
) && loop
->incoming_src
)
3484 basic_block bb_in
= loop
->incoming_src
;
3486 for (i
= REG_P0
; i
<= REG_P5
; i
++)
3487 if ((df_regs_ever_live_p (i
)
3488 || (funkind (TREE_TYPE (current_function_decl
)) == SUBROUTINE
3489 && call_used_regs
[i
]))
3490 && !REGNO_REG_SET_P (df_get_live_out (bb_in
), i
))
3492 scratchreg
= gen_rtx_REG (SImode
, i
);
3495 for (insn
= BB_END (bb_in
); insn
!= BB_HEAD (bb_in
);
3496 insn
= PREV_INSN (insn
))
3499 if (NOTE_P (insn
) || BARRIER_P (insn
))
3501 set
= single_set (insn
);
3502 if (set
&& rtx_equal_p (SET_DEST (set
), iter_reg
))
3504 if (CONSTANT_P (SET_SRC (set
)))
3506 scratch_init
= SET_SRC (set
);
3507 scratch_init_insn
= insn
;
3511 else if (reg_mentioned_p (iter_reg
, PATTERN (insn
)))
3516 if (loop
->incoming_src
)
3518 /* Make sure the predecessor is before the loop start label, as required by
3519 the LSETUP instruction. */
3521 insn
= BB_END (loop
->incoming_src
);
3522 /* If we have to insert the LSETUP before a jump, count that jump in the
3524 if (vec_safe_length (loop
->incoming
) > 1
3525 || !(loop
->incoming
->last ()->flags
& EDGE_FALLTHRU
))
3527 gcc_assert (JUMP_P (insn
));
3528 insn
= PREV_INSN (insn
);
3531 for (; insn
&& insn
!= loop
->start_label
; insn
= NEXT_INSN (insn
))
3532 length
+= length_for_loop (insn
);
3537 fprintf (dump_file
, ";; loop %d lsetup not before loop_start\n",
3542 /* Account for the pop of a scratch register where necessary. */
3543 if (!PREG_P (iter_reg
) && scratchreg
== NULL_RTX
3544 && ENABLE_WA_LOAD_LCREGS
)
3547 if (length
> MAX_LSETUP_DISTANCE
)
3550 fprintf (dump_file
, ";; loop %d lsetup too far away\n", loop
->loop_no
);
3555 /* Check if start_label appears before loop_end and calculate the
3556 offset between them. We calculate the length of instructions
3559 for (insn
= loop
->start_label
;
3560 insn
&& insn
!= loop
->loop_end
;
3561 insn
= NEXT_INSN (insn
))
3562 length
+= length_for_loop (insn
);
3567 fprintf (dump_file
, ";; loop %d start_label not before loop_end\n",
3572 loop
->length
= length
;
3573 if (loop
->length
> MAX_LOOP_LENGTH
)
3576 fprintf (dump_file
, ";; loop %d too long\n", loop
->loop_no
);
3580 /* Scan all the blocks to make sure they don't use iter_reg. */
3581 if (loop
->iter_reg_used
|| loop
->iter_reg_used_outside
)
3584 fprintf (dump_file
, ";; loop %d uses iterator\n", loop
->loop_no
);
3588 clobber0
= (TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC0
)
3589 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LB0
)
3590 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LT0
));
3591 clobber1
= (TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC1
)
3592 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LB1
)
3593 || TEST_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LT1
));
3594 if (clobber0
&& clobber1
)
3597 fprintf (dump_file
, ";; loop %d no loop reg available\n",
3602 /* There should be an instruction before the loop_end instruction
3603 in the same basic block. And the instruction must not be
3605 - CONDITIONAL BRANCH
3609 - Returns (RTS, RTN, etc.) */
3612 last_insn
= find_prev_insn_start (loop
->loop_end
);
3616 for (; last_insn
!= BB_HEAD (bb
);
3617 last_insn
= find_prev_insn_start (last_insn
))
3618 if (NONDEBUG_INSN_P (last_insn
))
3621 if (last_insn
!= BB_HEAD (bb
))
3624 if (single_pred_p (bb
)
3625 && single_pred_edge (bb
)->flags
& EDGE_FALLTHRU
3626 && single_pred (bb
) != ENTRY_BLOCK_PTR_FOR_FN (cfun
))
3628 bb
= single_pred (bb
);
3629 last_insn
= BB_END (bb
);
3642 fprintf (dump_file
, ";; loop %d has no last instruction\n",
3647 if (JUMP_P (last_insn
) && !any_condjump_p (last_insn
))
3650 fprintf (dump_file
, ";; loop %d has bad last instruction\n",
3654 /* In all other cases, try to replace a bad last insn with a nop. */
3655 else if (JUMP_P (last_insn
)
3656 || CALL_P (last_insn
)
3657 || get_attr_type (last_insn
) == TYPE_SYNC
3658 || get_attr_type (last_insn
) == TYPE_CALL
3659 || get_attr_seq_insns (last_insn
) == SEQ_INSNS_MULTI
3660 || recog_memoized (last_insn
) == CODE_FOR_return_internal
3661 || GET_CODE (PATTERN (last_insn
)) == ASM_INPUT
3662 || asm_noperands (PATTERN (last_insn
)) >= 0)
3664 if (loop
->length
+ 2 > MAX_LOOP_LENGTH
)
3667 fprintf (dump_file
, ";; loop %d too long\n", loop
->loop_no
);
3671 fprintf (dump_file
, ";; loop %d has bad last insn; replace with nop\n",
3674 last_insn
= emit_insn_after (gen_forced_nop (), last_insn
);
3677 loop
->last_insn
= last_insn
;
3679 /* The loop is good for replacement. */
3680 start_label
= loop
->start_label
;
3681 end_label
= gen_label_rtx ();
3682 iter_reg
= loop
->iter_reg
;
3684 if (loop
->depth
== 1 && !clobber1
)
3686 lc_reg
= gen_rtx_REG (SImode
, REG_LC1
);
3687 lb_reg
= gen_rtx_REG (SImode
, REG_LB1
);
3688 lt_reg
= gen_rtx_REG (SImode
, REG_LT1
);
3689 SET_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC1
);
3693 lc_reg
= gen_rtx_REG (SImode
, REG_LC0
);
3694 lb_reg
= gen_rtx_REG (SImode
, REG_LB0
);
3695 lt_reg
= gen_rtx_REG (SImode
, REG_LT0
);
3696 SET_HARD_REG_BIT (loop
->regs_set_in_loop
, REG_LC0
);
3699 loop
->end_label
= end_label
;
3701 /* Create a sequence containing the loop setup. */
3704 /* LSETUP only accepts P registers. If we have one, we can use it,
3705 otherwise there are several ways of working around the problem.
3706 If we're not affected by anomaly 312, we can load the LC register
3707 from any iteration register, and use LSETUP without initialization.
3708 If we've found a P scratch register that's not live here, we can
3709 instead copy the iter_reg into that and use an initializing LSETUP.
3710 If all else fails, push and pop P0 and use it as a scratch. */
3711 if (P_REGNO_P (REGNO (iter_reg
)))
3713 loop_init
= gen_lsetup_with_autoinit (lt_reg
, start_label
,
3716 seq_end
= emit_insn (loop_init
);
3718 else if (!ENABLE_WA_LOAD_LCREGS
&& DPREG_P (iter_reg
))
3720 emit_insn (gen_movsi (lc_reg
, iter_reg
));
3721 loop_init
= gen_lsetup_without_autoinit (lt_reg
, start_label
,
3724 seq_end
= emit_insn (loop_init
);
3726 else if (scratchreg
!= NULL_RTX
)
3728 emit_insn (gen_movsi (scratchreg
, scratch_init
));
3729 loop_init
= gen_lsetup_with_autoinit (lt_reg
, start_label
,
3731 lc_reg
, scratchreg
);
3732 seq_end
= emit_insn (loop_init
);
3733 if (scratch_init_insn
!= NULL_RTX
)
3734 delete_insn (scratch_init_insn
);
3738 rtx p0reg
= gen_rtx_REG (SImode
, REG_P0
);
3739 rtx push
= gen_frame_mem (SImode
,
3740 gen_rtx_PRE_DEC (SImode
, stack_pointer_rtx
));
3741 rtx pop
= gen_frame_mem (SImode
,
3742 gen_rtx_POST_INC (SImode
, stack_pointer_rtx
));
3743 emit_insn (gen_movsi (push
, p0reg
));
3744 emit_insn (gen_movsi (p0reg
, scratch_init
));
3745 loop_init
= gen_lsetup_with_autoinit (lt_reg
, start_label
,
3748 emit_insn (loop_init
);
3749 seq_end
= emit_insn (gen_movsi (p0reg
, pop
));
3750 if (scratch_init_insn
!= NULL_RTX
)
3751 delete_insn (scratch_init_insn
);
3756 fprintf (dump_file
, ";; replacing loop %d initializer with\n",
3758 print_rtl_single (dump_file
, loop_init
);
3759 fprintf (dump_file
, ";; replacing loop %d terminator with\n",
3761 print_rtl_single (dump_file
, loop
->loop_end
);
3764 /* If the loop isn't entered at the top, also create a jump to the entry
3766 if (!loop
->incoming_src
&& loop
->head
!= loop
->incoming_dest
)
3768 rtx label
= BB_HEAD (loop
->incoming_dest
);
3769 /* If we're jumping to the final basic block in the loop, and there's
3770 only one cheap instruction before the end (typically an increment of
3771 an induction variable), we can just emit a copy here instead of a
3773 if (loop
->incoming_dest
== loop
->tail
3774 && next_real_insn (label
) == last_insn
3775 && asm_noperands (last_insn
) < 0
3776 && GET_CODE (PATTERN (last_insn
)) == SET
)
3778 seq_end
= emit_insn (copy_rtx (PATTERN (last_insn
)));
3782 emit_jump_insn (gen_jump (label
));
3783 seq_end
= emit_barrier ();
3790 if (loop
->incoming_src
)
3792 rtx_insn
*prev
= BB_END (loop
->incoming_src
);
3793 if (vec_safe_length (loop
->incoming
) > 1
3794 || !(loop
->incoming
->last ()->flags
& EDGE_FALLTHRU
))
3796 gcc_assert (JUMP_P (prev
));
3797 prev
= PREV_INSN (prev
);
3799 emit_insn_after (seq
, prev
);
3807 #ifdef ENABLE_CHECKING
3808 if (loop
->head
!= loop
->incoming_dest
)
3810 /* We aren't entering the loop at the top. Since we've established
3811 that the loop is entered only at one point, this means there
3812 can't be fallthru edges into the head. Any such fallthru edges
3813 would become invalid when we insert the new block, so verify
3814 that this does not in fact happen. */
3815 FOR_EACH_EDGE (e
, ei
, loop
->head
->preds
)
3816 gcc_assert (!(e
->flags
& EDGE_FALLTHRU
));
3820 emit_insn_before (seq
, BB_HEAD (loop
->head
));
3821 seq
= emit_label_before (gen_label_rtx (), seq
);
3823 new_bb
= create_basic_block (seq
, seq_end
, loop
->head
->prev_bb
);
3824 FOR_EACH_EDGE (e
, ei
, loop
->incoming
)
3826 if (!(e
->flags
& EDGE_FALLTHRU
)
3827 || e
->dest
!= loop
->head
)
3828 redirect_edge_and_branch_force (e
, new_bb
);
3830 redirect_edge_succ (e
, new_bb
);
3832 e
= make_edge (new_bb
, loop
->head
, 0);
3835 delete_insn (loop
->loop_end
);
3836 /* Insert the loop end label before the last instruction of the loop. */
3837 emit_label_before (loop
->end_label
, loop
->last_insn
);
3842 /* A callback for the hw-doloop pass. Called when a loop we have discovered
3843 turns out not to be optimizable; we have to split the doloop_end pattern
3844 into a subtract and a test. */
3846 hwloop_fail (hwloop_info loop
)
3848 rtx insn
= loop
->loop_end
;
3850 if (DPREG_P (loop
->iter_reg
))
3852 /* If loop->iter_reg is a DREG or PREG, we can split it here
3853 without scratch register. */
3856 emit_insn_before (gen_addsi3 (loop
->iter_reg
,
3861 test
= gen_rtx_NE (VOIDmode
, loop
->iter_reg
, const0_rtx
);
3862 insn
= emit_jump_insn_before (gen_cbranchsi4 (test
,
3863 loop
->iter_reg
, const0_rtx
,
3867 JUMP_LABEL (insn
) = loop
->start_label
;
3868 LABEL_NUSES (loop
->start_label
)++;
3869 delete_insn (loop
->loop_end
);
3873 splitting_loops
= 1;
3874 try_split (PATTERN (insn
), insn
, 1);
3875 splitting_loops
= 0;
3879 /* A callback for the hw-doloop pass. This function examines INSN; if
3880 it is a loop_end pattern we recognize, return the reg rtx for the
3881 loop counter. Otherwise, return NULL_RTX. */
3884 hwloop_pattern_reg (rtx_insn
*insn
)
3888 if (!JUMP_P (insn
) || recog_memoized (insn
) != CODE_FOR_loop_end
)
3891 reg
= SET_DEST (XVECEXP (PATTERN (insn
), 0, 1));
3897 static struct hw_doloop_hooks bfin_doloop_hooks
=
3904 /* Run from machine_dependent_reorg, this pass looks for doloop_end insns
3905 and tries to rewrite the RTL of these loops so that proper Blackfin
3906 hardware loops are generated. */
3909 bfin_reorg_loops (void)
3911 reorg_loops (true, &bfin_doloop_hooks
);
3914 /* Possibly generate a SEQUENCE out of three insns found in SLOT.
3915 Returns true if we modified the insn chain, false otherwise. */
3917 gen_one_bundle (rtx_insn
*slot
[3])
3919 gcc_assert (slot
[1] != NULL_RTX
);
3921 /* Don't add extra NOPs if optimizing for size. */
3923 && (slot
[0] == NULL_RTX
|| slot
[2] == NULL_RTX
))
3926 /* Verify that we really can do the multi-issue. */
3929 rtx_insn
*t
= NEXT_INSN (slot
[0]);
3930 while (t
!= slot
[1])
3932 if (! NOTE_P (t
) || NOTE_KIND (t
) != NOTE_INSN_DELETED
)
3939 rtx_insn
*t
= NEXT_INSN (slot
[1]);
3940 while (t
!= slot
[2])
3942 if (! NOTE_P (t
) || NOTE_KIND (t
) != NOTE_INSN_DELETED
)
3948 if (slot
[0] == NULL_RTX
)
3950 slot
[0] = emit_insn_before (gen_mnop (), slot
[1]);
3951 df_insn_rescan (slot
[0]);
3953 if (slot
[2] == NULL_RTX
)
3955 slot
[2] = emit_insn_after (gen_forced_nop (), slot
[1]);
3956 df_insn_rescan (slot
[2]);
3959 /* Avoid line number information being printed inside one bundle. */
3960 if (INSN_LOCATION (slot
[1])
3961 && INSN_LOCATION (slot
[1]) != INSN_LOCATION (slot
[0]))
3962 INSN_LOCATION (slot
[1]) = INSN_LOCATION (slot
[0]);
3963 if (INSN_LOCATION (slot
[2])
3964 && INSN_LOCATION (slot
[2]) != INSN_LOCATION (slot
[0]))
3965 INSN_LOCATION (slot
[2]) = INSN_LOCATION (slot
[0]);
3967 /* Terminate them with "|| " instead of ";" in the output. */
3968 PUT_MODE (slot
[0], SImode
);
3969 PUT_MODE (slot
[1], SImode
);
3970 /* Terminate the bundle, for the benefit of reorder_var_tracking_notes. */
3971 PUT_MODE (slot
[2], QImode
);
3975 /* Go through all insns, and use the information generated during scheduling
3976 to generate SEQUENCEs to represent bundles of instructions issued
3980 bfin_gen_bundles (void)
3983 FOR_EACH_BB_FN (bb
, cfun
)
3985 rtx_insn
*insn
, *next
;
3989 slot
[0] = slot
[1] = slot
[2] = NULL
;
3990 for (insn
= BB_HEAD (bb
);; insn
= next
)
3993 rtx delete_this
= NULL_RTX
;
3995 if (NONDEBUG_INSN_P (insn
))
3997 enum attr_type type
= get_attr_type (insn
);
3999 if (type
== TYPE_STALL
)
4001 gcc_assert (n_filled
== 0);
4006 if (type
== TYPE_DSP32
|| type
== TYPE_DSP32SHIFTIMM
)
4008 else if (slot
[1] == NULL_RTX
)
4016 next
= NEXT_INSN (insn
);
4017 while (next
&& insn
!= BB_END (bb
)
4019 && GET_CODE (PATTERN (next
)) != USE
4020 && GET_CODE (PATTERN (next
)) != CLOBBER
))
4023 next
= NEXT_INSN (insn
);
4026 /* BB_END can change due to emitting extra NOPs, so check here. */
4027 at_end
= insn
== BB_END (bb
);
4028 if (delete_this
== NULL_RTX
&& (at_end
|| GET_MODE (next
) == TImode
))
4031 || !gen_one_bundle (slot
))
4032 && slot
[0] != NULL_RTX
)
4034 rtx pat
= PATTERN (slot
[0]);
4035 if (GET_CODE (pat
) == SET
4036 && GET_CODE (SET_SRC (pat
)) == UNSPEC
4037 && XINT (SET_SRC (pat
), 1) == UNSPEC_32BIT
)
4039 SET_SRC (pat
) = XVECEXP (SET_SRC (pat
), 0, 0);
4040 INSN_CODE (slot
[0]) = -1;
4041 df_insn_rescan (slot
[0]);
4045 slot
[0] = slot
[1] = slot
[2] = NULL
;
4047 if (delete_this
!= NULL_RTX
)
4048 delete_insn (delete_this
);
4055 /* Ensure that no var tracking notes are emitted in the middle of a
4056 three-instruction bundle. */
4059 reorder_var_tracking_notes (void)
4062 FOR_EACH_BB_FN (bb
, cfun
)
4064 rtx_insn
*insn
, *next
;
4065 rtx_insn
*queue
= NULL
;
4066 bool in_bundle
= false;
4068 for (insn
= BB_HEAD (bb
); insn
!= BB_END (bb
); insn
= next
)
4070 next
= NEXT_INSN (insn
);
4074 /* Emit queued up notes at the last instruction of a bundle. */
4075 if (GET_MODE (insn
) == QImode
)
4079 rtx_insn
*next_queue
= PREV_INSN (queue
);
4080 SET_PREV_INSN (NEXT_INSN (insn
)) = queue
;
4081 SET_NEXT_INSN (queue
) = NEXT_INSN (insn
);
4082 SET_NEXT_INSN (insn
) = queue
;
4083 SET_PREV_INSN (queue
) = insn
;
4088 else if (GET_MODE (insn
) == SImode
)
4091 else if (NOTE_P (insn
) && NOTE_KIND (insn
) == NOTE_INSN_VAR_LOCATION
)
4095 rtx_insn
*prev
= PREV_INSN (insn
);
4096 SET_PREV_INSN (next
) = prev
;
4097 SET_NEXT_INSN (prev
) = next
;
4099 SET_PREV_INSN (insn
) = queue
;
4107 /* On some silicon revisions, functions shorter than a certain number of cycles
4108 can cause unpredictable behaviour. Work around this by adding NOPs as
4111 workaround_rts_anomaly (void)
4113 rtx_insn
*insn
, *first_insn
= NULL
;
4116 if (! ENABLE_WA_RETS
)
4119 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4123 if (BARRIER_P (insn
))
4126 if (NOTE_P (insn
) || LABEL_P (insn
))
4129 if (JUMP_TABLE_DATA_P (insn
))
4132 if (first_insn
== NULL_RTX
)
4134 pat
= PATTERN (insn
);
4135 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
4136 || GET_CODE (pat
) == ASM_INPUT
4137 || asm_noperands (pat
) >= 0)
4145 if (recog_memoized (insn
) == CODE_FOR_return_internal
)
4148 /* Nothing to worry about for direct jumps. */
4149 if (!any_condjump_p (insn
))
4155 else if (INSN_P (insn
))
4157 rtx pat
= PATTERN (insn
);
4158 int this_cycles
= 1;
4160 if (GET_CODE (pat
) == PARALLEL
)
4162 if (analyze_push_multiple_operation (pat
)
4163 || analyze_pop_multiple_operation (pat
))
4164 this_cycles
= n_regs_to_save
;
4168 int icode
= recog_memoized (insn
);
4170 if (icode
== CODE_FOR_link
)
4172 else if (icode
== CODE_FOR_unlink
)
4174 else if (icode
== CODE_FOR_mulsi3
)
4177 if (this_cycles
>= cycles
)
4180 cycles
-= this_cycles
;
4185 emit_insn_before (gen_nop (), first_insn
);
4190 /* Return an insn type for INSN that can be used by the caller for anomaly
4191 workarounds. This differs from plain get_attr_type in that it handles
4194 static enum attr_type
4195 type_for_anomaly (rtx_insn
*insn
)
4197 rtx pat
= PATTERN (insn
);
4198 if (rtx_sequence
*seq
= dyn_cast
<rtx_sequence
*> (pat
))
4201 t
= get_attr_type (seq
->insn (1));
4204 t
= get_attr_type (seq
->insn (2));
4210 return get_attr_type (insn
);
4213 /* Return true iff the address found in MEM is based on the register
4214 NP_REG and optionally has a positive offset. */
4216 harmless_null_pointer_p (rtx mem
, int np_reg
)
4218 mem
= XEXP (mem
, 0);
4219 if (GET_CODE (mem
) == POST_INC
|| GET_CODE (mem
) == POST_DEC
)
4220 mem
= XEXP (mem
, 0);
4221 if (REG_P (mem
) && (int) REGNO (mem
) == np_reg
)
4223 if (GET_CODE (mem
) == PLUS
4224 && REG_P (XEXP (mem
, 0)) && (int) REGNO (XEXP (mem
, 0)) == np_reg
)
4226 mem
= XEXP (mem
, 1);
4227 if (GET_CODE (mem
) == CONST_INT
&& INTVAL (mem
) > 0)
4233 /* Return nonzero if INSN contains any loads that may trap. */
4236 trapping_loads_p (rtx_insn
*insn
, int np_reg
, bool after_np_branch
)
4238 rtx mem
= SET_SRC (single_set (insn
));
4240 if (!after_np_branch
)
4242 return ((np_reg
== -1 || !harmless_null_pointer_p (mem
, np_reg
))
4243 && may_trap_p (mem
));
4246 /* Return INSN if it is of TYPE_MCLD. Alternatively, if INSN is the start of
4247 a three-insn bundle, see if one of them is a load and return that if so.
4248 Return NULL if the insn does not contain loads. */
4250 find_load (rtx_insn
*insn
)
4252 if (!NONDEBUG_INSN_P (insn
))
4254 if (get_attr_type (insn
) == TYPE_MCLD
)
4256 if (GET_MODE (insn
) != SImode
)
4259 insn
= NEXT_INSN (insn
);
4260 if ((GET_MODE (insn
) == SImode
|| GET_MODE (insn
) == QImode
)
4261 && get_attr_type (insn
) == TYPE_MCLD
)
4263 } while (GET_MODE (insn
) != QImode
);
4267 /* Determine whether PAT is an indirect call pattern. */
4269 indirect_call_p (rtx pat
)
4271 if (GET_CODE (pat
) == PARALLEL
)
4272 pat
= XVECEXP (pat
, 0, 0);
4273 if (GET_CODE (pat
) == SET
)
4274 pat
= SET_SRC (pat
);
4275 gcc_assert (GET_CODE (pat
) == CALL
);
4276 pat
= XEXP (pat
, 0);
4277 gcc_assert (GET_CODE (pat
) == MEM
);
4278 pat
= XEXP (pat
, 0);
4283 /* During workaround_speculation, track whether we're in the shadow of a
4284 conditional branch that tests a P register for NULL. If so, we can omit
4285 emitting NOPs if we see a load from that P register, since a speculative
4286 access at address 0 isn't a problem, and the load is executed in all other
4288 Global for communication with note_np_check_stores through note_stores.
4290 int np_check_regno
= -1;
4291 bool np_after_branch
= false;
4293 /* Subroutine of workaround_speculation, called through note_stores. */
4295 note_np_check_stores (rtx x
, const_rtx pat ATTRIBUTE_UNUSED
,
4296 void *data ATTRIBUTE_UNUSED
)
4298 if (REG_P (x
) && (REGNO (x
) == REG_CC
|| (int) REGNO (x
) == np_check_regno
))
4299 np_check_regno
= -1;
4303 workaround_speculation (void)
4305 rtx_insn
*insn
, *next
;
4306 rtx_insn
*last_condjump
= NULL
;
4307 int cycles_since_jump
= INT_MAX
;
4308 int delay_added
= 0;
4310 if (! ENABLE_WA_SPECULATIVE_LOADS
&& ! ENABLE_WA_SPECULATIVE_SYNCS
4311 && ! ENABLE_WA_INDIRECT_CALLS
)
4314 /* First pass: find predicted-false branches; if something after them
4315 needs nops, insert them or change the branch to predict true. */
4316 for (insn
= get_insns (); insn
; insn
= next
)
4319 int delay_needed
= 0;
4321 next
= find_next_insn_start (insn
);
4323 if (NOTE_P (insn
) || BARRIER_P (insn
))
4325 if (JUMP_TABLE_DATA_P (insn
))
4330 np_check_regno
= -1;
4334 pat
= PATTERN (insn
);
4335 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
4338 if (GET_CODE (pat
) == ASM_INPUT
|| asm_noperands (pat
) >= 0)
4340 np_check_regno
= -1;
4346 /* Is this a condjump based on a null pointer comparison we saw
4348 if (np_check_regno
!= -1
4349 && recog_memoized (insn
) == CODE_FOR_cbranchbi4
)
4351 rtx op
= XEXP (SET_SRC (PATTERN (insn
)), 0);
4352 gcc_assert (GET_CODE (op
) == EQ
|| GET_CODE (op
) == NE
);
4353 if (GET_CODE (op
) == NE
)
4354 np_after_branch
= true;
4356 if (any_condjump_p (insn
)
4357 && ! cbranch_predicted_taken_p (insn
))
4359 last_condjump
= insn
;
4361 cycles_since_jump
= 0;
4364 cycles_since_jump
= INT_MAX
;
4366 else if (CALL_P (insn
))
4368 np_check_regno
= -1;
4369 if (cycles_since_jump
< INT_MAX
)
4370 cycles_since_jump
++;
4371 if (indirect_call_p (pat
) && ENABLE_WA_INDIRECT_CALLS
)
4376 else if (NONDEBUG_INSN_P (insn
))
4378 rtx_insn
*load_insn
= find_load (insn
);
4379 enum attr_type type
= type_for_anomaly (insn
);
4381 if (cycles_since_jump
< INT_MAX
)
4382 cycles_since_jump
++;
4384 /* Detect a comparison of a P register with zero. If we later
4385 see a condjump based on it, we have found a null pointer
4387 if (recog_memoized (insn
) == CODE_FOR_compare_eq
)
4389 rtx src
= SET_SRC (PATTERN (insn
));
4390 if (REG_P (XEXP (src
, 0))
4391 && P_REGNO_P (REGNO (XEXP (src
, 0)))
4392 && XEXP (src
, 1) == const0_rtx
)
4394 np_check_regno
= REGNO (XEXP (src
, 0));
4395 np_after_branch
= false;
4398 np_check_regno
= -1;
4401 if (load_insn
&& ENABLE_WA_SPECULATIVE_LOADS
)
4403 if (trapping_loads_p (load_insn
, np_check_regno
,
4407 else if (type
== TYPE_SYNC
&& ENABLE_WA_SPECULATIVE_SYNCS
)
4410 /* See if we need to forget about a null pointer comparison
4411 we found earlier. */
4412 if (recog_memoized (insn
) != CODE_FOR_compare_eq
)
4414 note_stores (PATTERN (insn
), note_np_check_stores
, NULL
);
4415 if (np_check_regno
!= -1)
4417 if (find_regno_note (insn
, REG_INC
, np_check_regno
))
4418 np_check_regno
= -1;
4424 if (delay_needed
> cycles_since_jump
4425 && (delay_needed
- cycles_since_jump
) > delay_added
)
4429 rtx
*op
= recog_data
.operand
;
4431 delay_needed
-= cycles_since_jump
;
4433 extract_insn (last_condjump
);
4436 pat1
= gen_cbranch_predicted_taken (op
[0], op
[1], op
[2],
4438 cycles_since_jump
= INT_MAX
;
4442 /* Do not adjust cycles_since_jump in this case, so that
4443 we'll increase the number of NOPs for a subsequent insn
4445 pat1
= gen_cbranch_with_nops (op
[0], op
[1], op
[2], op
[3],
4446 GEN_INT (delay_needed
));
4447 delay_added
= delay_needed
;
4449 PATTERN (last_condjump
) = pat1
;
4450 INSN_CODE (last_condjump
) = recog (pat1
, insn
, &num_clobbers
);
4454 cycles_since_jump
= INT_MAX
;
4459 /* Second pass: for predicted-true branches, see if anything at the
4460 branch destination needs extra nops. */
4461 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4463 int cycles_since_jump
;
4465 && any_condjump_p (insn
)
4466 && (INSN_CODE (insn
) == CODE_FOR_cbranch_predicted_taken
4467 || cbranch_predicted_taken_p (insn
)))
4469 rtx_insn
*target
= JUMP_LABEL_AS_INSN (insn
);
4473 cycles_since_jump
= 0;
4474 for (; target
&& cycles_since_jump
< 3; target
= next_tgt
)
4478 next_tgt
= find_next_insn_start (target
);
4480 if (NOTE_P (target
) || BARRIER_P (target
) || LABEL_P (target
))
4483 if (JUMP_TABLE_DATA_P (target
))
4486 pat
= PATTERN (target
);
4487 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
4488 || GET_CODE (pat
) == ASM_INPUT
4489 || asm_noperands (pat
) >= 0)
4492 if (NONDEBUG_INSN_P (target
))
4494 rtx_insn
*load_insn
= find_load (target
);
4495 enum attr_type type
= type_for_anomaly (target
);
4496 int delay_needed
= 0;
4497 if (cycles_since_jump
< INT_MAX
)
4498 cycles_since_jump
++;
4500 if (load_insn
&& ENABLE_WA_SPECULATIVE_LOADS
)
4502 if (trapping_loads_p (load_insn
, -1, false))
4505 else if (type
== TYPE_SYNC
&& ENABLE_WA_SPECULATIVE_SYNCS
)
4508 if (delay_needed
> cycles_since_jump
)
4510 rtx prev
= prev_real_insn (label
);
4511 delay_needed
-= cycles_since_jump
;
4513 fprintf (dump_file
, "Adding %d nops after %d\n",
4514 delay_needed
, INSN_UID (label
));
4516 && INSN_CODE (prev
) == CODE_FOR_cbranch_with_nops
)
4523 "Reducing nops on insn %d.\n",
4526 x
= XVECEXP (x
, 0, 1);
4527 v
= INTVAL (XVECEXP (x
, 0, 0)) - delay_needed
;
4528 XVECEXP (x
, 0, 0) = GEN_INT (v
);
4530 while (delay_needed
-- > 0)
4531 emit_insn_after (gen_nop (), label
);
4540 /* Called just before the final scheduling pass. If we need to insert NOPs
4541 later on to work around speculative loads, insert special placeholder
4542 insns that cause loads to be delayed for as many cycles as necessary
4543 (and possible). This reduces the number of NOPs we need to add.
4544 The dummy insns we generate are later removed by bfin_gen_bundles. */
4546 add_sched_insns_for_speculation (void)
4550 if (! ENABLE_WA_SPECULATIVE_LOADS
&& ! ENABLE_WA_SPECULATIVE_SYNCS
4551 && ! ENABLE_WA_INDIRECT_CALLS
)
4554 /* First pass: find predicted-false branches; if something after them
4555 needs nops, insert them or change the branch to predict true. */
4556 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4560 if (NOTE_P (insn
) || BARRIER_P (insn
) || LABEL_P (insn
))
4562 if (JUMP_TABLE_DATA_P (insn
))
4565 pat
= PATTERN (insn
);
4566 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
4567 || GET_CODE (pat
) == ASM_INPUT
4568 || asm_noperands (pat
) >= 0)
4573 if (any_condjump_p (insn
)
4574 && !cbranch_predicted_taken_p (insn
))
4576 rtx n
= next_real_insn (insn
);
4577 emit_insn_before (gen_stall (GEN_INT (3)), n
);
4582 /* Second pass: for predicted-true branches, see if anything at the
4583 branch destination needs extra nops. */
4584 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4587 && any_condjump_p (insn
)
4588 && (cbranch_predicted_taken_p (insn
)))
4590 rtx target
= JUMP_LABEL (insn
);
4591 rtx_insn
*next
= next_real_insn (target
);
4593 if (GET_CODE (PATTERN (next
)) == UNSPEC_VOLATILE
4594 && get_attr_type (next
) == TYPE_STALL
)
4596 emit_insn_before (gen_stall (GEN_INT (1)), next
);
4601 /* We use the machine specific reorg pass for emitting CSYNC instructions
4602 after conditional branches as needed.
4604 The Blackfin is unusual in that a code sequence like
4607 may speculatively perform the load even if the condition isn't true. This
4608 happens for a branch that is predicted not taken, because the pipeline
4609 isn't flushed or stalled, so the early stages of the following instructions,
4610 which perform the memory reference, are allowed to execute before the
4611 jump condition is evaluated.
4612 Therefore, we must insert additional instructions in all places where this
4613 could lead to incorrect behavior. The manual recommends CSYNC, while
4614 VDSP seems to use NOPs (even though its corresponding compiler option is
4617 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
4618 When optimizing for size, we turn the branch into a predicted taken one.
4619 This may be slower due to mispredicts, but saves code size. */
4624 /* We are freeing block_for_insn in the toplev to keep compatibility
4625 with old MDEP_REORGS that are not CFG based. Recompute it now. */
4626 compute_bb_for_insn ();
4628 if (flag_schedule_insns_after_reload
)
4630 splitting_for_sched
= 1;
4632 splitting_for_sched
= 0;
4634 add_sched_insns_for_speculation ();
4636 timevar_push (TV_SCHED2
);
4637 if (flag_selective_scheduling2
4638 && !maybe_skip_selective_scheduling ())
4639 run_selective_scheduling ();
4642 timevar_pop (TV_SCHED2
);
4644 /* Examine the schedule and insert nops as necessary for 64-bit parallel
4646 bfin_gen_bundles ();
4651 /* Doloop optimization */
4652 if (cfun
->machine
->has_hardware_loops
)
4653 bfin_reorg_loops ();
4655 workaround_speculation ();
4657 if (flag_var_tracking
)
4659 timevar_push (TV_VAR_TRACKING
);
4660 variable_tracking_main ();
4661 reorder_var_tracking_notes ();
4662 timevar_pop (TV_VAR_TRACKING
);
4665 df_finish_pass (false);
4667 workaround_rts_anomaly ();
4670 /* Handle interrupt_handler, exception_handler and nmi_handler function
4671 attributes; arguments as in struct attribute_spec.handler. */
4674 handle_int_type_attribute (tree
*node
, tree name
,
4675 tree args ATTRIBUTE_UNUSED
,
4676 int flags ATTRIBUTE_UNUSED
,
4680 if (TREE_CODE (x
) != FUNCTION_TYPE
)
4682 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
4684 *no_add_attrs
= true;
4686 else if (funkind (x
) != SUBROUTINE
)
4687 error ("multiple function type attributes specified");
4693 handle_int_decl_attribute (tree
*node
, tree name
, tree args
, int flags
,
4697 if (TREE_CODE (x
) == FUNCTION_DECL
)
4698 return handle_int_type_attribute (&TREE_TYPE(x
), name
, args
, flags
,
4701 warning (OPT_Wattributes
, "%qE attribute only applies to functions", name
);
4702 *no_add_attrs
= true;
4707 /* Return 0 if the attributes for two types are incompatible, 1 if they
4708 are compatible, and 2 if they are nearly compatible (which causes a
4709 warning to be generated). */
4712 bfin_comp_type_attributes (const_tree type1
, const_tree type2
)
4714 e_funkind kind1
, kind2
;
4716 if (TREE_CODE (type1
) != FUNCTION_TYPE
)
4719 kind1
= funkind (type1
);
4720 kind2
= funkind (type2
);
4725 /* Check for mismatched modifiers */
4726 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1
))
4727 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2
)))
4730 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1
))
4731 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2
)))
4734 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1
))
4735 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2
)))
4738 if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1
))
4739 != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2
)))
4745 /* Handle a "longcall" or "shortcall" attribute; arguments as in
4746 struct attribute_spec.handler. */
4749 bfin_handle_longcall_attribute (tree
*node
, tree name
,
4750 tree args ATTRIBUTE_UNUSED
,
4751 int flags ATTRIBUTE_UNUSED
,
4754 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
4756 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
4758 *no_add_attrs
= true;
4761 if ((strcmp (IDENTIFIER_POINTER (name
), "longcall") == 0
4762 && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node
)))
4763 || (strcmp (IDENTIFIER_POINTER (name
), "shortcall") == 0
4764 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node
))))
4766 warning (OPT_Wattributes
,
4767 "can%'t apply both longcall and shortcall attributes to the same function");
4768 *no_add_attrs
= true;
4774 /* Handle a "l1_text" attribute; arguments as in
4775 struct attribute_spec.handler. */
4778 bfin_handle_l1_text_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
4779 int ARG_UNUSED (flags
), bool *no_add_attrs
)
4783 if (TREE_CODE (decl
) != FUNCTION_DECL
)
4785 error ("%qE attribute only applies to functions",
4787 *no_add_attrs
= true;
4790 /* The decl may have already been given a section attribute
4791 from a previous declaration. Ensure they match. */
4792 else if (DECL_SECTION_NAME (decl
) != NULL
4793 && strcmp (DECL_SECTION_NAME (decl
),
4796 error ("section of %q+D conflicts with previous declaration",
4798 *no_add_attrs
= true;
4801 set_decl_section_name (decl
, ".l1.text");
4806 /* Handle a "l1_data", "l1_data_A" or "l1_data_B" attribute;
4807 arguments as in struct attribute_spec.handler. */
4810 bfin_handle_l1_data_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
4811 int ARG_UNUSED (flags
), bool *no_add_attrs
)
4815 if (TREE_CODE (decl
) != VAR_DECL
)
4817 error ("%qE attribute only applies to variables",
4819 *no_add_attrs
= true;
4821 else if (current_function_decl
!= NULL_TREE
4822 && !TREE_STATIC (decl
))
4824 error ("%qE attribute cannot be specified for local variables",
4826 *no_add_attrs
= true;
4830 const char *section_name
;
4832 if (strcmp (IDENTIFIER_POINTER (name
), "l1_data") == 0)
4833 section_name
= ".l1.data";
4834 else if (strcmp (IDENTIFIER_POINTER (name
), "l1_data_A") == 0)
4835 section_name
= ".l1.data.A";
4836 else if (strcmp (IDENTIFIER_POINTER (name
), "l1_data_B") == 0)
4837 section_name
= ".l1.data.B";
4841 /* The decl may have already been given a section attribute
4842 from a previous declaration. Ensure they match. */
4843 if (DECL_SECTION_NAME (decl
) != NULL
4844 && strcmp (DECL_SECTION_NAME (decl
),
4847 error ("section of %q+D conflicts with previous declaration",
4849 *no_add_attrs
= true;
4852 set_decl_section_name (decl
, section_name
);
4858 /* Handle a "l2" attribute; arguments as in struct attribute_spec.handler. */
4861 bfin_handle_l2_attribute (tree
*node
, tree
ARG_UNUSED (name
),
4862 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
4867 if (TREE_CODE (decl
) == FUNCTION_DECL
)
4869 if (DECL_SECTION_NAME (decl
) != NULL
4870 && strcmp (DECL_SECTION_NAME (decl
),
4873 error ("section of %q+D conflicts with previous declaration",
4875 *no_add_attrs
= true;
4878 set_decl_section_name (decl
, ".l2.text");
4880 else if (TREE_CODE (decl
) == VAR_DECL
)
4882 if (DECL_SECTION_NAME (decl
) != NULL
4883 && strcmp (DECL_SECTION_NAME (decl
),
4886 error ("section of %q+D conflicts with previous declaration",
4888 *no_add_attrs
= true;
4891 set_decl_section_name (decl
, ".l2.data");
4897 /* Table of valid machine attributes. */
4898 static const struct attribute_spec bfin_attribute_table
[] =
4900 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
4901 affects_type_identity } */
4902 { "interrupt_handler", 0, 0, false, true, true, handle_int_decl_attribute
,
4903 handle_int_type_attribute
, false },
4904 { "exception_handler", 0, 0, false, true, true, handle_int_decl_attribute
,
4905 handle_int_type_attribute
, false },
4906 { "nmi_handler", 0, 0, false, true, true, handle_int_decl_attribute
,
4907 handle_int_type_attribute
, false },
4908 { "nesting", 0, 0, false, true, true, NULL
, NULL
, false },
4909 { "kspisusp", 0, 0, false, true, true, NULL
, NULL
, false },
4910 { "saveall", 0, 0, false, true, true, NULL
, NULL
, false },
4911 { "longcall", 0, 0, false, true, true, NULL
,
4912 bfin_handle_longcall_attribute
, false },
4913 { "shortcall", 0, 0, false, true, true, NULL
,
4914 bfin_handle_longcall_attribute
, false },
4915 { "l1_text", 0, 0, true, false, false, bfin_handle_l1_text_attribute
, NULL
,
4917 { "l1_data", 0, 0, true, false, false, bfin_handle_l1_data_attribute
, NULL
,
4919 { "l1_data_A", 0, 0, true, false, false, bfin_handle_l1_data_attribute
, NULL
,
4921 { "l1_data_B", 0, 0, true, false, false, bfin_handle_l1_data_attribute
, NULL
,
4923 { "l2", 0, 0, true, false, false, bfin_handle_l2_attribute
, NULL
, false },
4924 { NULL
, 0, 0, false, false, false, NULL
, NULL
, false }
4927 /* Implementation of TARGET_ASM_INTEGER. When using FD-PIC, we need to
4928 tell the assembler to generate pointers to function descriptors in
4932 bfin_assemble_integer (rtx value
, unsigned int size
, int aligned_p
)
4934 if (TARGET_FDPIC
&& size
== UNITS_PER_WORD
)
4936 if (GET_CODE (value
) == SYMBOL_REF
4937 && SYMBOL_REF_FUNCTION_P (value
))
4939 fputs ("\t.picptr\tfuncdesc(", asm_out_file
);
4940 output_addr_const (asm_out_file
, value
);
4941 fputs (")\n", asm_out_file
);
4946 /* We've set the unaligned SI op to NULL, so we always have to
4947 handle the unaligned case here. */
4948 assemble_integer_with_op ("\t.4byte\t", value
);
4952 return default_assemble_integer (value
, size
, aligned_p
);
4955 /* Output the assembler code for a thunk function. THUNK_DECL is the
4956 declaration for the thunk function itself, FUNCTION is the decl for
4957 the target function. DELTA is an immediate constant offset to be
4958 added to THIS. If VCALL_OFFSET is nonzero, the word at
4959 *(*this + vcall_offset) should be added to THIS. */
4962 bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED
,
4963 tree thunk ATTRIBUTE_UNUSED
, HOST_WIDE_INT delta
,
4964 HOST_WIDE_INT vcall_offset
, tree function
)
4967 /* The this parameter is passed as the first argument. */
4968 rtx this_rtx
= gen_rtx_REG (Pmode
, REG_R0
);
4970 /* Adjust the this parameter by a fixed constant. */
4974 if (delta
>= -64 && delta
<= 63)
4976 xops
[0] = GEN_INT (delta
);
4977 output_asm_insn ("%1 += %0;", xops
);
4979 else if (delta
>= -128 && delta
< -64)
4981 xops
[0] = GEN_INT (delta
+ 64);
4982 output_asm_insn ("%1 += -64; %1 += %0;", xops
);
4984 else if (delta
> 63 && delta
<= 126)
4986 xops
[0] = GEN_INT (delta
- 63);
4987 output_asm_insn ("%1 += 63; %1 += %0;", xops
);
4991 xops
[0] = GEN_INT (delta
);
4992 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops
);
4996 /* Adjust the this parameter by a value stored in the vtable. */
4999 rtx p2tmp
= gen_rtx_REG (Pmode
, REG_P2
);
5000 rtx tmp
= gen_rtx_REG (Pmode
, REG_R3
);
5004 output_asm_insn ("%2 = r0; %2 = [%2];", xops
);
5006 /* Adjust the this parameter. */
5007 xops
[0] = gen_rtx_MEM (Pmode
, plus_constant (Pmode
, p2tmp
,
5009 if (!memory_operand (xops
[0], Pmode
))
5011 rtx tmp2
= gen_rtx_REG (Pmode
, REG_P1
);
5012 xops
[0] = GEN_INT (vcall_offset
);
5014 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops
);
5015 xops
[0] = gen_rtx_MEM (Pmode
, p2tmp
);
5018 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops
);
5021 xops
[0] = XEXP (DECL_RTL (function
), 0);
5022 if (1 || !flag_pic
|| (*targetm
.binds_local_p
) (function
))
5023 output_asm_insn ("jump.l\t%P0", xops
);
5026 /* Codes for all the Blackfin builtins. */
5032 BFIN_BUILTIN_COMPOSE_2X16
,
5033 BFIN_BUILTIN_EXTRACTLO
,
5034 BFIN_BUILTIN_EXTRACTHI
,
5036 BFIN_BUILTIN_SSADD_2X16
,
5037 BFIN_BUILTIN_SSSUB_2X16
,
5038 BFIN_BUILTIN_SSADDSUB_2X16
,
5039 BFIN_BUILTIN_SSSUBADD_2X16
,
5040 BFIN_BUILTIN_MULT_2X16
,
5041 BFIN_BUILTIN_MULTR_2X16
,
5042 BFIN_BUILTIN_NEG_2X16
,
5043 BFIN_BUILTIN_ABS_2X16
,
5044 BFIN_BUILTIN_MIN_2X16
,
5045 BFIN_BUILTIN_MAX_2X16
,
5047 BFIN_BUILTIN_SSADD_1X16
,
5048 BFIN_BUILTIN_SSSUB_1X16
,
5049 BFIN_BUILTIN_MULT_1X16
,
5050 BFIN_BUILTIN_MULTR_1X16
,
5051 BFIN_BUILTIN_NORM_1X16
,
5052 BFIN_BUILTIN_NEG_1X16
,
5053 BFIN_BUILTIN_ABS_1X16
,
5054 BFIN_BUILTIN_MIN_1X16
,
5055 BFIN_BUILTIN_MAX_1X16
,
5057 BFIN_BUILTIN_SUM_2X16
,
5058 BFIN_BUILTIN_DIFFHL_2X16
,
5059 BFIN_BUILTIN_DIFFLH_2X16
,
5061 BFIN_BUILTIN_SSADD_1X32
,
5062 BFIN_BUILTIN_SSSUB_1X32
,
5063 BFIN_BUILTIN_NORM_1X32
,
5064 BFIN_BUILTIN_ROUND_1X32
,
5065 BFIN_BUILTIN_NEG_1X32
,
5066 BFIN_BUILTIN_ABS_1X32
,
5067 BFIN_BUILTIN_MIN_1X32
,
5068 BFIN_BUILTIN_MAX_1X32
,
5069 BFIN_BUILTIN_MULT_1X32
,
5070 BFIN_BUILTIN_MULT_1X32X32
,
5071 BFIN_BUILTIN_MULT_1X32X32NS
,
5073 BFIN_BUILTIN_MULHISILL
,
5074 BFIN_BUILTIN_MULHISILH
,
5075 BFIN_BUILTIN_MULHISIHL
,
5076 BFIN_BUILTIN_MULHISIHH
,
5078 BFIN_BUILTIN_LSHIFT_1X16
,
5079 BFIN_BUILTIN_LSHIFT_2X16
,
5080 BFIN_BUILTIN_SSASHIFT_1X16
,
5081 BFIN_BUILTIN_SSASHIFT_2X16
,
5082 BFIN_BUILTIN_SSASHIFT_1X32
,
5084 BFIN_BUILTIN_CPLX_MUL_16
,
5085 BFIN_BUILTIN_CPLX_MAC_16
,
5086 BFIN_BUILTIN_CPLX_MSU_16
,
5088 BFIN_BUILTIN_CPLX_MUL_16_S40
,
5089 BFIN_BUILTIN_CPLX_MAC_16_S40
,
5090 BFIN_BUILTIN_CPLX_MSU_16_S40
,
5092 BFIN_BUILTIN_CPLX_SQU
,
5094 BFIN_BUILTIN_LOADBYTES
,
5099 #define def_builtin(NAME, TYPE, CODE) \
5101 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
5105 /* Set up all builtin functions for this target. */
5107 bfin_init_builtins (void)
5109 tree V2HI_type_node
= build_vector_type_for_mode (intHI_type_node
, V2HImode
);
5110 tree void_ftype_void
5111 = build_function_type_list (void_type_node
, NULL_TREE
);
5112 tree short_ftype_short
5113 = build_function_type_list (short_integer_type_node
, short_integer_type_node
,
5115 tree short_ftype_int_int
5116 = build_function_type_list (short_integer_type_node
, integer_type_node
,
5117 integer_type_node
, NULL_TREE
);
5118 tree int_ftype_int_int
5119 = build_function_type_list (integer_type_node
, integer_type_node
,
5120 integer_type_node
, NULL_TREE
);
5122 = build_function_type_list (integer_type_node
, integer_type_node
,
5124 tree short_ftype_int
5125 = build_function_type_list (short_integer_type_node
, integer_type_node
,
5127 tree int_ftype_v2hi_v2hi
5128 = build_function_type_list (integer_type_node
, V2HI_type_node
,
5129 V2HI_type_node
, NULL_TREE
);
5130 tree v2hi_ftype_v2hi_v2hi
5131 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
5132 V2HI_type_node
, NULL_TREE
);
5133 tree v2hi_ftype_v2hi_v2hi_v2hi
5134 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
5135 V2HI_type_node
, V2HI_type_node
, NULL_TREE
);
5136 tree v2hi_ftype_int_int
5137 = build_function_type_list (V2HI_type_node
, integer_type_node
,
5138 integer_type_node
, NULL_TREE
);
5139 tree v2hi_ftype_v2hi_int
5140 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
5141 integer_type_node
, NULL_TREE
);
5142 tree int_ftype_short_short
5143 = build_function_type_list (integer_type_node
, short_integer_type_node
,
5144 short_integer_type_node
, NULL_TREE
);
5145 tree v2hi_ftype_v2hi
5146 = build_function_type_list (V2HI_type_node
, V2HI_type_node
, NULL_TREE
);
5147 tree short_ftype_v2hi
5148 = build_function_type_list (short_integer_type_node
, V2HI_type_node
,
5151 = build_function_type_list (integer_type_node
,
5152 build_pointer_type (integer_type_node
),
5155 /* Add the remaining MMX insns with somewhat more complicated types. */
5156 def_builtin ("__builtin_bfin_csync", void_ftype_void
, BFIN_BUILTIN_CSYNC
);
5157 def_builtin ("__builtin_bfin_ssync", void_ftype_void
, BFIN_BUILTIN_SSYNC
);
5159 def_builtin ("__builtin_bfin_ones", short_ftype_int
, BFIN_BUILTIN_ONES
);
5161 def_builtin ("__builtin_bfin_compose_2x16", v2hi_ftype_int_int
,
5162 BFIN_BUILTIN_COMPOSE_2X16
);
5163 def_builtin ("__builtin_bfin_extract_hi", short_ftype_v2hi
,
5164 BFIN_BUILTIN_EXTRACTHI
);
5165 def_builtin ("__builtin_bfin_extract_lo", short_ftype_v2hi
,
5166 BFIN_BUILTIN_EXTRACTLO
);
5168 def_builtin ("__builtin_bfin_min_fr2x16", v2hi_ftype_v2hi_v2hi
,
5169 BFIN_BUILTIN_MIN_2X16
);
5170 def_builtin ("__builtin_bfin_max_fr2x16", v2hi_ftype_v2hi_v2hi
,
5171 BFIN_BUILTIN_MAX_2X16
);
5173 def_builtin ("__builtin_bfin_add_fr2x16", v2hi_ftype_v2hi_v2hi
,
5174 BFIN_BUILTIN_SSADD_2X16
);
5175 def_builtin ("__builtin_bfin_sub_fr2x16", v2hi_ftype_v2hi_v2hi
,
5176 BFIN_BUILTIN_SSSUB_2X16
);
5177 def_builtin ("__builtin_bfin_dspaddsubsat", v2hi_ftype_v2hi_v2hi
,
5178 BFIN_BUILTIN_SSADDSUB_2X16
);
5179 def_builtin ("__builtin_bfin_dspsubaddsat", v2hi_ftype_v2hi_v2hi
,
5180 BFIN_BUILTIN_SSSUBADD_2X16
);
5181 def_builtin ("__builtin_bfin_mult_fr2x16", v2hi_ftype_v2hi_v2hi
,
5182 BFIN_BUILTIN_MULT_2X16
);
5183 def_builtin ("__builtin_bfin_multr_fr2x16", v2hi_ftype_v2hi_v2hi
,
5184 BFIN_BUILTIN_MULTR_2X16
);
5185 def_builtin ("__builtin_bfin_negate_fr2x16", v2hi_ftype_v2hi
,
5186 BFIN_BUILTIN_NEG_2X16
);
5187 def_builtin ("__builtin_bfin_abs_fr2x16", v2hi_ftype_v2hi
,
5188 BFIN_BUILTIN_ABS_2X16
);
5190 def_builtin ("__builtin_bfin_min_fr1x16", short_ftype_int_int
,
5191 BFIN_BUILTIN_MIN_1X16
);
5192 def_builtin ("__builtin_bfin_max_fr1x16", short_ftype_int_int
,
5193 BFIN_BUILTIN_MAX_1X16
);
5195 def_builtin ("__builtin_bfin_add_fr1x16", short_ftype_int_int
,
5196 BFIN_BUILTIN_SSADD_1X16
);
5197 def_builtin ("__builtin_bfin_sub_fr1x16", short_ftype_int_int
,
5198 BFIN_BUILTIN_SSSUB_1X16
);
5199 def_builtin ("__builtin_bfin_mult_fr1x16", short_ftype_int_int
,
5200 BFIN_BUILTIN_MULT_1X16
);
5201 def_builtin ("__builtin_bfin_multr_fr1x16", short_ftype_int_int
,
5202 BFIN_BUILTIN_MULTR_1X16
);
5203 def_builtin ("__builtin_bfin_negate_fr1x16", short_ftype_short
,
5204 BFIN_BUILTIN_NEG_1X16
);
5205 def_builtin ("__builtin_bfin_abs_fr1x16", short_ftype_short
,
5206 BFIN_BUILTIN_ABS_1X16
);
5207 def_builtin ("__builtin_bfin_norm_fr1x16", short_ftype_int
,
5208 BFIN_BUILTIN_NORM_1X16
);
5210 def_builtin ("__builtin_bfin_sum_fr2x16", short_ftype_v2hi
,
5211 BFIN_BUILTIN_SUM_2X16
);
5212 def_builtin ("__builtin_bfin_diff_hl_fr2x16", short_ftype_v2hi
,
5213 BFIN_BUILTIN_DIFFHL_2X16
);
5214 def_builtin ("__builtin_bfin_diff_lh_fr2x16", short_ftype_v2hi
,
5215 BFIN_BUILTIN_DIFFLH_2X16
);
5217 def_builtin ("__builtin_bfin_mulhisill", int_ftype_v2hi_v2hi
,
5218 BFIN_BUILTIN_MULHISILL
);
5219 def_builtin ("__builtin_bfin_mulhisihl", int_ftype_v2hi_v2hi
,
5220 BFIN_BUILTIN_MULHISIHL
);
5221 def_builtin ("__builtin_bfin_mulhisilh", int_ftype_v2hi_v2hi
,
5222 BFIN_BUILTIN_MULHISILH
);
5223 def_builtin ("__builtin_bfin_mulhisihh", int_ftype_v2hi_v2hi
,
5224 BFIN_BUILTIN_MULHISIHH
);
5226 def_builtin ("__builtin_bfin_min_fr1x32", int_ftype_int_int
,
5227 BFIN_BUILTIN_MIN_1X32
);
5228 def_builtin ("__builtin_bfin_max_fr1x32", int_ftype_int_int
,
5229 BFIN_BUILTIN_MAX_1X32
);
5231 def_builtin ("__builtin_bfin_add_fr1x32", int_ftype_int_int
,
5232 BFIN_BUILTIN_SSADD_1X32
);
5233 def_builtin ("__builtin_bfin_sub_fr1x32", int_ftype_int_int
,
5234 BFIN_BUILTIN_SSSUB_1X32
);
5235 def_builtin ("__builtin_bfin_negate_fr1x32", int_ftype_int
,
5236 BFIN_BUILTIN_NEG_1X32
);
5237 def_builtin ("__builtin_bfin_abs_fr1x32", int_ftype_int
,
5238 BFIN_BUILTIN_ABS_1X32
);
5239 def_builtin ("__builtin_bfin_norm_fr1x32", short_ftype_int
,
5240 BFIN_BUILTIN_NORM_1X32
);
5241 def_builtin ("__builtin_bfin_round_fr1x32", short_ftype_int
,
5242 BFIN_BUILTIN_ROUND_1X32
);
5243 def_builtin ("__builtin_bfin_mult_fr1x32", int_ftype_short_short
,
5244 BFIN_BUILTIN_MULT_1X32
);
5245 def_builtin ("__builtin_bfin_mult_fr1x32x32", int_ftype_int_int
,
5246 BFIN_BUILTIN_MULT_1X32X32
);
5247 def_builtin ("__builtin_bfin_mult_fr1x32x32NS", int_ftype_int_int
,
5248 BFIN_BUILTIN_MULT_1X32X32NS
);
5251 def_builtin ("__builtin_bfin_shl_fr1x16", short_ftype_int_int
,
5252 BFIN_BUILTIN_SSASHIFT_1X16
);
5253 def_builtin ("__builtin_bfin_shl_fr2x16", v2hi_ftype_v2hi_int
,
5254 BFIN_BUILTIN_SSASHIFT_2X16
);
5255 def_builtin ("__builtin_bfin_lshl_fr1x16", short_ftype_int_int
,
5256 BFIN_BUILTIN_LSHIFT_1X16
);
5257 def_builtin ("__builtin_bfin_lshl_fr2x16", v2hi_ftype_v2hi_int
,
5258 BFIN_BUILTIN_LSHIFT_2X16
);
5259 def_builtin ("__builtin_bfin_shl_fr1x32", int_ftype_int_int
,
5260 BFIN_BUILTIN_SSASHIFT_1X32
);
5262 /* Complex numbers. */
5263 def_builtin ("__builtin_bfin_cmplx_add", v2hi_ftype_v2hi_v2hi
,
5264 BFIN_BUILTIN_SSADD_2X16
);
5265 def_builtin ("__builtin_bfin_cmplx_sub", v2hi_ftype_v2hi_v2hi
,
5266 BFIN_BUILTIN_SSSUB_2X16
);
5267 def_builtin ("__builtin_bfin_cmplx_mul", v2hi_ftype_v2hi_v2hi
,
5268 BFIN_BUILTIN_CPLX_MUL_16
);
5269 def_builtin ("__builtin_bfin_cmplx_mac", v2hi_ftype_v2hi_v2hi_v2hi
,
5270 BFIN_BUILTIN_CPLX_MAC_16
);
5271 def_builtin ("__builtin_bfin_cmplx_msu", v2hi_ftype_v2hi_v2hi_v2hi
,
5272 BFIN_BUILTIN_CPLX_MSU_16
);
5273 def_builtin ("__builtin_bfin_cmplx_mul_s40", v2hi_ftype_v2hi_v2hi
,
5274 BFIN_BUILTIN_CPLX_MUL_16_S40
);
5275 def_builtin ("__builtin_bfin_cmplx_mac_s40", v2hi_ftype_v2hi_v2hi_v2hi
,
5276 BFIN_BUILTIN_CPLX_MAC_16_S40
);
5277 def_builtin ("__builtin_bfin_cmplx_msu_s40", v2hi_ftype_v2hi_v2hi_v2hi
,
5278 BFIN_BUILTIN_CPLX_MSU_16_S40
);
5279 def_builtin ("__builtin_bfin_csqu_fr16", v2hi_ftype_v2hi
,
5280 BFIN_BUILTIN_CPLX_SQU
);
5282 /* "Unaligned" load. */
5283 def_builtin ("__builtin_bfin_loadbytes", int_ftype_pint
,
5284 BFIN_BUILTIN_LOADBYTES
);
5289 struct builtin_description
5291 const enum insn_code icode
;
5292 const char *const name
;
5293 const enum bfin_builtins code
;
5297 static const struct builtin_description bdesc_2arg
[] =
5299 { CODE_FOR_composev2hi
, "__builtin_bfin_compose_2x16", BFIN_BUILTIN_COMPOSE_2X16
, -1 },
5301 { CODE_FOR_ssashiftv2hi3
, "__builtin_bfin_shl_fr2x16", BFIN_BUILTIN_SSASHIFT_2X16
, -1 },
5302 { CODE_FOR_ssashifthi3
, "__builtin_bfin_shl_fr1x16", BFIN_BUILTIN_SSASHIFT_1X16
, -1 },
5303 { CODE_FOR_lshiftv2hi3
, "__builtin_bfin_lshl_fr2x16", BFIN_BUILTIN_LSHIFT_2X16
, -1 },
5304 { CODE_FOR_lshifthi3
, "__builtin_bfin_lshl_fr1x16", BFIN_BUILTIN_LSHIFT_1X16
, -1 },
5305 { CODE_FOR_ssashiftsi3
, "__builtin_bfin_shl_fr1x32", BFIN_BUILTIN_SSASHIFT_1X32
, -1 },
5307 { CODE_FOR_sminhi3
, "__builtin_bfin_min_fr1x16", BFIN_BUILTIN_MIN_1X16
, -1 },
5308 { CODE_FOR_smaxhi3
, "__builtin_bfin_max_fr1x16", BFIN_BUILTIN_MAX_1X16
, -1 },
5309 { CODE_FOR_ssaddhi3
, "__builtin_bfin_add_fr1x16", BFIN_BUILTIN_SSADD_1X16
, -1 },
5310 { CODE_FOR_sssubhi3
, "__builtin_bfin_sub_fr1x16", BFIN_BUILTIN_SSSUB_1X16
, -1 },
5312 { CODE_FOR_sminsi3
, "__builtin_bfin_min_fr1x32", BFIN_BUILTIN_MIN_1X32
, -1 },
5313 { CODE_FOR_smaxsi3
, "__builtin_bfin_max_fr1x32", BFIN_BUILTIN_MAX_1X32
, -1 },
5314 { CODE_FOR_ssaddsi3
, "__builtin_bfin_add_fr1x32", BFIN_BUILTIN_SSADD_1X32
, -1 },
5315 { CODE_FOR_sssubsi3
, "__builtin_bfin_sub_fr1x32", BFIN_BUILTIN_SSSUB_1X32
, -1 },
5317 { CODE_FOR_sminv2hi3
, "__builtin_bfin_min_fr2x16", BFIN_BUILTIN_MIN_2X16
, -1 },
5318 { CODE_FOR_smaxv2hi3
, "__builtin_bfin_max_fr2x16", BFIN_BUILTIN_MAX_2X16
, -1 },
5319 { CODE_FOR_ssaddv2hi3
, "__builtin_bfin_add_fr2x16", BFIN_BUILTIN_SSADD_2X16
, -1 },
5320 { CODE_FOR_sssubv2hi3
, "__builtin_bfin_sub_fr2x16", BFIN_BUILTIN_SSSUB_2X16
, -1 },
5321 { CODE_FOR_ssaddsubv2hi3
, "__builtin_bfin_dspaddsubsat", BFIN_BUILTIN_SSADDSUB_2X16
, -1 },
5322 { CODE_FOR_sssubaddv2hi3
, "__builtin_bfin_dspsubaddsat", BFIN_BUILTIN_SSSUBADD_2X16
, -1 },
5324 { CODE_FOR_flag_mulhisi
, "__builtin_bfin_mult_fr1x32", BFIN_BUILTIN_MULT_1X32
, MACFLAG_NONE
},
5325 { CODE_FOR_flag_mulhi
, "__builtin_bfin_mult_fr1x16", BFIN_BUILTIN_MULT_1X16
, MACFLAG_T
},
5326 { CODE_FOR_flag_mulhi
, "__builtin_bfin_multr_fr1x16", BFIN_BUILTIN_MULTR_1X16
, MACFLAG_NONE
},
5327 { CODE_FOR_flag_mulv2hi
, "__builtin_bfin_mult_fr2x16", BFIN_BUILTIN_MULT_2X16
, MACFLAG_T
},
5328 { CODE_FOR_flag_mulv2hi
, "__builtin_bfin_multr_fr2x16", BFIN_BUILTIN_MULTR_2X16
, MACFLAG_NONE
},
5330 { CODE_FOR_mulhisi_ll
, "__builtin_bfin_mulhisill", BFIN_BUILTIN_MULHISILL
, -1 },
5331 { CODE_FOR_mulhisi_lh
, "__builtin_bfin_mulhisilh", BFIN_BUILTIN_MULHISILH
, -1 },
5332 { CODE_FOR_mulhisi_hl
, "__builtin_bfin_mulhisihl", BFIN_BUILTIN_MULHISIHL
, -1 },
5333 { CODE_FOR_mulhisi_hh
, "__builtin_bfin_mulhisihh", BFIN_BUILTIN_MULHISIHH
, -1 }
5337 static const struct builtin_description bdesc_1arg
[] =
5339 { CODE_FOR_loadbytes
, "__builtin_bfin_loadbytes", BFIN_BUILTIN_LOADBYTES
, 0 },
5341 { CODE_FOR_ones
, "__builtin_bfin_ones", BFIN_BUILTIN_ONES
, 0 },
5343 { CODE_FOR_clrsbhi2
, "__builtin_bfin_norm_fr1x16", BFIN_BUILTIN_NORM_1X16
, 0 },
5344 { CODE_FOR_ssneghi2
, "__builtin_bfin_negate_fr1x16", BFIN_BUILTIN_NEG_1X16
, 0 },
5345 { CODE_FOR_abshi2
, "__builtin_bfin_abs_fr1x16", BFIN_BUILTIN_ABS_1X16
, 0 },
5347 { CODE_FOR_clrsbsi2
, "__builtin_bfin_norm_fr1x32", BFIN_BUILTIN_NORM_1X32
, 0 },
5348 { CODE_FOR_ssroundsi2
, "__builtin_bfin_round_fr1x32", BFIN_BUILTIN_ROUND_1X32
, 0 },
5349 { CODE_FOR_ssnegsi2
, "__builtin_bfin_negate_fr1x32", BFIN_BUILTIN_NEG_1X32
, 0 },
5350 { CODE_FOR_ssabssi2
, "__builtin_bfin_abs_fr1x32", BFIN_BUILTIN_ABS_1X32
, 0 },
5352 { CODE_FOR_movv2hi_hi_low
, "__builtin_bfin_extract_lo", BFIN_BUILTIN_EXTRACTLO
, 0 },
5353 { CODE_FOR_movv2hi_hi_high
, "__builtin_bfin_extract_hi", BFIN_BUILTIN_EXTRACTHI
, 0 },
5354 { CODE_FOR_ssnegv2hi2
, "__builtin_bfin_negate_fr2x16", BFIN_BUILTIN_NEG_2X16
, 0 },
5355 { CODE_FOR_ssabsv2hi2
, "__builtin_bfin_abs_fr2x16", BFIN_BUILTIN_ABS_2X16
, 0 }
5358 /* Errors in the source file can cause expand_expr to return const0_rtx
5359 where we expect a vector. To avoid crashing, use one of the vector
5360 clear instructions. */
5362 safe_vector_operand (rtx x
, machine_mode mode
)
5364 if (x
!= const0_rtx
)
5366 x
= gen_reg_rtx (SImode
);
5368 emit_insn (gen_movsi (x
, CONST0_RTX (SImode
)));
5369 return gen_lowpart (mode
, x
);
5372 /* Subroutine of bfin_expand_builtin to take care of binop insns. MACFLAG is -1
5373 if this is a normal binary op, or one of the MACFLAG_xxx constants. */
5376 bfin_expand_binop_builtin (enum insn_code icode
, tree exp
, rtx target
,
5380 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5381 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5382 rtx op0
= expand_normal (arg0
);
5383 rtx op1
= expand_normal (arg1
);
5384 machine_mode op0mode
= GET_MODE (op0
);
5385 machine_mode op1mode
= GET_MODE (op1
);
5386 machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
5387 machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5388 machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5390 if (VECTOR_MODE_P (mode0
))
5391 op0
= safe_vector_operand (op0
, mode0
);
5392 if (VECTOR_MODE_P (mode1
))
5393 op1
= safe_vector_operand (op1
, mode1
);
5396 || GET_MODE (target
) != tmode
5397 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5398 target
= gen_reg_rtx (tmode
);
5400 if ((op0mode
== SImode
|| op0mode
== VOIDmode
) && mode0
== HImode
)
5403 op0
= gen_lowpart (HImode
, op0
);
5405 if ((op1mode
== SImode
|| op1mode
== VOIDmode
) && mode1
== HImode
)
5408 op1
= gen_lowpart (HImode
, op1
);
5410 /* In case the insn wants input operands in modes different from
5411 the result, abort. */
5412 gcc_assert ((op0mode
== mode0
|| op0mode
== VOIDmode
)
5413 && (op1mode
== mode1
|| op1mode
== VOIDmode
));
5415 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5416 op0
= copy_to_mode_reg (mode0
, op0
);
5417 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
5418 op1
= copy_to_mode_reg (mode1
, op1
);
5421 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
5423 pat
= GEN_FCN (icode
) (target
, op0
, op1
, GEN_INT (macflag
));
5431 /* Subroutine of bfin_expand_builtin to take care of unop insns. */
5434 bfin_expand_unop_builtin (enum insn_code icode
, tree exp
,
5438 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5439 rtx op0
= expand_normal (arg0
);
5440 machine_mode op0mode
= GET_MODE (op0
);
5441 machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
5442 machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5445 || GET_MODE (target
) != tmode
5446 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5447 target
= gen_reg_rtx (tmode
);
5449 if (VECTOR_MODE_P (mode0
))
5450 op0
= safe_vector_operand (op0
, mode0
);
5452 if (op0mode
== SImode
&& mode0
== HImode
)
5455 op0
= gen_lowpart (HImode
, op0
);
5457 gcc_assert (op0mode
== mode0
|| op0mode
== VOIDmode
);
5459 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5460 op0
= copy_to_mode_reg (mode0
, op0
);
5462 pat
= GEN_FCN (icode
) (target
, op0
);
5469 /* Expand an expression EXP that calls a built-in function,
5470 with result going to TARGET if that's convenient
5471 (and in mode MODE if that's convenient).
5472 SUBTARGET may be used as the target for computing one of EXP's operands.
5473 IGNORE is nonzero if the value is to be ignored. */
5476 bfin_expand_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
5477 rtx subtarget ATTRIBUTE_UNUSED
,
5478 machine_mode mode ATTRIBUTE_UNUSED
,
5479 int ignore ATTRIBUTE_UNUSED
)
5482 enum insn_code icode
;
5483 const struct builtin_description
*d
;
5484 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
5485 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5486 tree arg0
, arg1
, arg2
;
5487 rtx op0
, op1
, op2
, accvec
, pat
, tmp1
, tmp2
, a0reg
, a1reg
;
5488 machine_mode tmode
, mode0
;
5492 case BFIN_BUILTIN_CSYNC
:
5493 emit_insn (gen_csync ());
5495 case BFIN_BUILTIN_SSYNC
:
5496 emit_insn (gen_ssync ());
5499 case BFIN_BUILTIN_DIFFHL_2X16
:
5500 case BFIN_BUILTIN_DIFFLH_2X16
:
5501 case BFIN_BUILTIN_SUM_2X16
:
5502 arg0
= CALL_EXPR_ARG (exp
, 0);
5503 op0
= expand_normal (arg0
);
5504 icode
= (fcode
== BFIN_BUILTIN_DIFFHL_2X16
? CODE_FOR_subhilov2hi3
5505 : fcode
== BFIN_BUILTIN_DIFFLH_2X16
? CODE_FOR_sublohiv2hi3
5506 : CODE_FOR_ssaddhilov2hi3
);
5507 tmode
= insn_data
[icode
].operand
[0].mode
;
5508 mode0
= insn_data
[icode
].operand
[1].mode
;
5511 || GET_MODE (target
) != tmode
5512 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5513 target
= gen_reg_rtx (tmode
);
5515 if (VECTOR_MODE_P (mode0
))
5516 op0
= safe_vector_operand (op0
, mode0
);
5518 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5519 op0
= copy_to_mode_reg (mode0
, op0
);
5521 pat
= GEN_FCN (icode
) (target
, op0
, op0
);
5527 case BFIN_BUILTIN_MULT_1X32X32
:
5528 case BFIN_BUILTIN_MULT_1X32X32NS
:
5529 arg0
= CALL_EXPR_ARG (exp
, 0);
5530 arg1
= CALL_EXPR_ARG (exp
, 1);
5531 op0
= expand_normal (arg0
);
5532 op1
= expand_normal (arg1
);
5534 || !register_operand (target
, SImode
))
5535 target
= gen_reg_rtx (SImode
);
5536 if (! register_operand (op0
, SImode
))
5537 op0
= copy_to_mode_reg (SImode
, op0
);
5538 if (! register_operand (op1
, SImode
))
5539 op1
= copy_to_mode_reg (SImode
, op1
);
5541 a1reg
= gen_rtx_REG (PDImode
, REG_A1
);
5542 a0reg
= gen_rtx_REG (PDImode
, REG_A0
);
5543 tmp1
= gen_lowpart (V2HImode
, op0
);
5544 tmp2
= gen_lowpart (V2HImode
, op1
);
5545 emit_insn (gen_flag_macinit1hi (a1reg
,
5546 gen_lowpart (HImode
, op0
),
5547 gen_lowpart (HImode
, op1
),
5548 GEN_INT (MACFLAG_FU
)));
5549 emit_insn (gen_lshrpdi3 (a1reg
, a1reg
, GEN_INT (16)));
5551 if (fcode
== BFIN_BUILTIN_MULT_1X32X32
)
5552 emit_insn (gen_flag_mul_macv2hi_parts_acconly (a0reg
, a1reg
, tmp1
, tmp2
,
5553 const1_rtx
, const1_rtx
,
5554 const1_rtx
, const0_rtx
, a1reg
,
5555 const0_rtx
, GEN_INT (MACFLAG_NONE
),
5556 GEN_INT (MACFLAG_M
)));
5559 /* For saturating multiplication, there's exactly one special case
5560 to be handled: multiplying the smallest negative value with
5561 itself. Due to shift correction in fractional multiplies, this
5562 can overflow. Iff this happens, OP2 will contain 1, which, when
5563 added in 32 bits to the smallest negative, wraps to the largest
5564 positive, which is the result we want. */
5565 op2
= gen_reg_rtx (V2HImode
);
5566 emit_insn (gen_packv2hi (op2
, tmp1
, tmp2
, const0_rtx
, const0_rtx
));
5567 emit_insn (gen_movsibi (gen_rtx_REG (BImode
, REG_CC
),
5568 gen_lowpart (SImode
, op2
)));
5569 emit_insn (gen_flag_mul_macv2hi_parts_acconly_andcc0 (a0reg
, a1reg
, tmp1
, tmp2
,
5570 const1_rtx
, const1_rtx
,
5571 const1_rtx
, const0_rtx
, a1reg
,
5572 const0_rtx
, GEN_INT (MACFLAG_NONE
),
5573 GEN_INT (MACFLAG_M
)));
5574 op2
= gen_reg_rtx (SImode
);
5575 emit_insn (gen_movbisi (op2
, gen_rtx_REG (BImode
, REG_CC
)));
5577 emit_insn (gen_flag_machi_parts_acconly (a1reg
, tmp2
, tmp1
,
5578 const1_rtx
, const0_rtx
,
5579 a1reg
, const0_rtx
, GEN_INT (MACFLAG_M
)));
5580 emit_insn (gen_ashrpdi3 (a1reg
, a1reg
, GEN_INT (15)));
5581 emit_insn (gen_sum_of_accumulators (target
, a0reg
, a0reg
, a1reg
));
5582 if (fcode
== BFIN_BUILTIN_MULT_1X32X32NS
)
5583 emit_insn (gen_addsi3 (target
, target
, op2
));
5586 case BFIN_BUILTIN_CPLX_MUL_16
:
5587 case BFIN_BUILTIN_CPLX_MUL_16_S40
:
5588 arg0
= CALL_EXPR_ARG (exp
, 0);
5589 arg1
= CALL_EXPR_ARG (exp
, 1);
5590 op0
= expand_normal (arg0
);
5591 op1
= expand_normal (arg1
);
5592 accvec
= gen_reg_rtx (V2PDImode
);
5593 icode
= CODE_FOR_flag_macv2hi_parts
;
5594 tmode
= insn_data
[icode
].operand
[0].mode
;
5597 || GET_MODE (target
) != V2HImode
5598 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
5599 target
= gen_reg_rtx (tmode
);
5600 if (! register_operand (op0
, GET_MODE (op0
)))
5601 op0
= copy_to_mode_reg (GET_MODE (op0
), op0
);
5602 if (! register_operand (op1
, GET_MODE (op1
)))
5603 op1
= copy_to_mode_reg (GET_MODE (op1
), op1
);
5605 if (fcode
== BFIN_BUILTIN_CPLX_MUL_16
)
5606 emit_insn (gen_flag_macinit1v2hi_parts (accvec
, op0
, op1
, const0_rtx
,
5607 const0_rtx
, const0_rtx
,
5608 const1_rtx
, GEN_INT (MACFLAG_W32
)));
5610 emit_insn (gen_flag_macinit1v2hi_parts (accvec
, op0
, op1
, const0_rtx
,
5611 const0_rtx
, const0_rtx
,
5612 const1_rtx
, GEN_INT (MACFLAG_NONE
)));
5613 emit_insn (gen_flag_macv2hi_parts (target
, op0
, op1
, const1_rtx
,
5614 const1_rtx
, const1_rtx
,
5615 const0_rtx
, accvec
, const1_rtx
, const0_rtx
,
5616 GEN_INT (MACFLAG_NONE
), accvec
));
5620 case BFIN_BUILTIN_CPLX_MAC_16
:
5621 case BFIN_BUILTIN_CPLX_MSU_16
:
5622 case BFIN_BUILTIN_CPLX_MAC_16_S40
:
5623 case BFIN_BUILTIN_CPLX_MSU_16_S40
:
5624 arg0
= CALL_EXPR_ARG (exp
, 0);
5625 arg1
= CALL_EXPR_ARG (exp
, 1);
5626 arg2
= CALL_EXPR_ARG (exp
, 2);
5627 op0
= expand_normal (arg0
);
5628 op1
= expand_normal (arg1
);
5629 op2
= expand_normal (arg2
);
5630 accvec
= gen_reg_rtx (V2PDImode
);
5631 icode
= CODE_FOR_flag_macv2hi_parts
;
5632 tmode
= insn_data
[icode
].operand
[0].mode
;
5635 || GET_MODE (target
) != V2HImode
5636 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
5637 target
= gen_reg_rtx (tmode
);
5638 if (! register_operand (op1
, GET_MODE (op1
)))
5639 op1
= copy_to_mode_reg (GET_MODE (op1
), op1
);
5640 if (! register_operand (op2
, GET_MODE (op2
)))
5641 op2
= copy_to_mode_reg (GET_MODE (op2
), op2
);
5643 tmp1
= gen_reg_rtx (SImode
);
5644 tmp2
= gen_reg_rtx (SImode
);
5645 emit_insn (gen_ashlsi3 (tmp1
, gen_lowpart (SImode
, op0
), GEN_INT (16)));
5646 emit_move_insn (tmp2
, gen_lowpart (SImode
, op0
));
5647 emit_insn (gen_movstricthi_1 (gen_lowpart (HImode
, tmp2
), const0_rtx
));
5648 emit_insn (gen_load_accumulator_pair (accvec
, tmp1
, tmp2
));
5649 if (fcode
== BFIN_BUILTIN_CPLX_MAC_16
5650 || fcode
== BFIN_BUILTIN_CPLX_MSU_16
)
5651 emit_insn (gen_flag_macv2hi_parts_acconly (accvec
, op1
, op2
, const0_rtx
,
5652 const0_rtx
, const0_rtx
,
5653 const1_rtx
, accvec
, const0_rtx
,
5655 GEN_INT (MACFLAG_W32
)));
5657 emit_insn (gen_flag_macv2hi_parts_acconly (accvec
, op1
, op2
, const0_rtx
,
5658 const0_rtx
, const0_rtx
,
5659 const1_rtx
, accvec
, const0_rtx
,
5661 GEN_INT (MACFLAG_NONE
)));
5662 if (fcode
== BFIN_BUILTIN_CPLX_MAC_16
5663 || fcode
== BFIN_BUILTIN_CPLX_MAC_16_S40
)
5673 emit_insn (gen_flag_macv2hi_parts (target
, op1
, op2
, const1_rtx
,
5674 const1_rtx
, const1_rtx
,
5675 const0_rtx
, accvec
, tmp1
, tmp2
,
5676 GEN_INT (MACFLAG_NONE
), accvec
));
5680 case BFIN_BUILTIN_CPLX_SQU
:
5681 arg0
= CALL_EXPR_ARG (exp
, 0);
5682 op0
= expand_normal (arg0
);
5683 accvec
= gen_reg_rtx (V2PDImode
);
5684 icode
= CODE_FOR_flag_mulv2hi
;
5685 tmp1
= gen_reg_rtx (V2HImode
);
5686 tmp2
= gen_reg_rtx (V2HImode
);
5689 || GET_MODE (target
) != V2HImode
5690 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
5691 target
= gen_reg_rtx (V2HImode
);
5692 if (! register_operand (op0
, GET_MODE (op0
)))
5693 op0
= copy_to_mode_reg (GET_MODE (op0
), op0
);
5695 emit_insn (gen_flag_mulv2hi (tmp1
, op0
, op0
, GEN_INT (MACFLAG_NONE
)));
5697 emit_insn (gen_flag_mulhi_parts (gen_lowpart (HImode
, tmp2
), op0
, op0
,
5698 const0_rtx
, const1_rtx
,
5699 GEN_INT (MACFLAG_NONE
)));
5701 emit_insn (gen_ssaddhi3_high_parts (target
, tmp2
, tmp2
, tmp2
, const0_rtx
,
5703 emit_insn (gen_sssubhi3_low_parts (target
, target
, tmp1
, tmp1
,
5704 const0_rtx
, const1_rtx
));
5712 for (i
= 0, d
= bdesc_2arg
; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
5713 if (d
->code
== fcode
)
5714 return bfin_expand_binop_builtin (d
->icode
, exp
, target
,
5717 for (i
= 0, d
= bdesc_1arg
; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
5718 if (d
->code
== fcode
)
5719 return bfin_expand_unop_builtin (d
->icode
, exp
, target
);
5725 bfin_conditional_register_usage (void)
5727 /* initialize condition code flag register rtx */
5728 bfin_cc_rtx
= gen_rtx_REG (BImode
, REG_CC
);
5729 bfin_rets_rtx
= gen_rtx_REG (Pmode
, REG_RETS
);
5731 call_used_regs
[FDPIC_REGNO
] = 1;
5732 if (!TARGET_FDPIC
&& flag_pic
)
5734 fixed_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
5735 call_used_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
5739 #undef TARGET_INIT_BUILTINS
5740 #define TARGET_INIT_BUILTINS bfin_init_builtins
5742 #undef TARGET_EXPAND_BUILTIN
5743 #define TARGET_EXPAND_BUILTIN bfin_expand_builtin
5745 #undef TARGET_ASM_GLOBALIZE_LABEL
5746 #define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
5748 #undef TARGET_ASM_FILE_START
5749 #define TARGET_ASM_FILE_START output_file_start
5751 #undef TARGET_ATTRIBUTE_TABLE
5752 #define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
5754 #undef TARGET_COMP_TYPE_ATTRIBUTES
5755 #define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
5757 #undef TARGET_RTX_COSTS
5758 #define TARGET_RTX_COSTS bfin_rtx_costs
5760 #undef TARGET_ADDRESS_COST
5761 #define TARGET_ADDRESS_COST bfin_address_cost
5763 #undef TARGET_REGISTER_MOVE_COST
5764 #define TARGET_REGISTER_MOVE_COST bfin_register_move_cost
5766 #undef TARGET_MEMORY_MOVE_COST
5767 #define TARGET_MEMORY_MOVE_COST bfin_memory_move_cost
5769 #undef TARGET_ASM_INTEGER
5770 #define TARGET_ASM_INTEGER bfin_assemble_integer
5772 #undef TARGET_MACHINE_DEPENDENT_REORG
5773 #define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
5775 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
5776 #define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
5778 #undef TARGET_ASM_OUTPUT_MI_THUNK
5779 #define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
5780 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5781 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
5783 #undef TARGET_SCHED_ADJUST_COST
5784 #define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
5786 #undef TARGET_SCHED_ISSUE_RATE
5787 #define TARGET_SCHED_ISSUE_RATE bfin_issue_rate
5789 #undef TARGET_PROMOTE_FUNCTION_MODE
5790 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
5792 #undef TARGET_ARG_PARTIAL_BYTES
5793 #define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
5795 #undef TARGET_FUNCTION_ARG
5796 #define TARGET_FUNCTION_ARG bfin_function_arg
5798 #undef TARGET_FUNCTION_ARG_ADVANCE
5799 #define TARGET_FUNCTION_ARG_ADVANCE bfin_function_arg_advance
5801 #undef TARGET_PASS_BY_REFERENCE
5802 #define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
5804 #undef TARGET_SETUP_INCOMING_VARARGS
5805 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
5807 #undef TARGET_STRUCT_VALUE_RTX
5808 #define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
5810 #undef TARGET_VECTOR_MODE_SUPPORTED_P
5811 #define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
5813 #undef TARGET_OPTION_OVERRIDE
5814 #define TARGET_OPTION_OVERRIDE bfin_option_override
5816 #undef TARGET_SECONDARY_RELOAD
5817 #define TARGET_SECONDARY_RELOAD bfin_secondary_reload
5819 #undef TARGET_CLASS_LIKELY_SPILLED_P
5820 #define TARGET_CLASS_LIKELY_SPILLED_P bfin_class_likely_spilled_p
5822 #undef TARGET_DELEGITIMIZE_ADDRESS
5823 #define TARGET_DELEGITIMIZE_ADDRESS bfin_delegitimize_address
5825 #undef TARGET_LEGITIMATE_CONSTANT_P
5826 #define TARGET_LEGITIMATE_CONSTANT_P bfin_legitimate_constant_p
5828 #undef TARGET_CANNOT_FORCE_CONST_MEM
5829 #define TARGET_CANNOT_FORCE_CONST_MEM bfin_cannot_force_const_mem
5831 #undef TARGET_RETURN_IN_MEMORY
5832 #define TARGET_RETURN_IN_MEMORY bfin_return_in_memory
5834 #undef TARGET_LEGITIMATE_ADDRESS_P
5835 #define TARGET_LEGITIMATE_ADDRESS_P bfin_legitimate_address_p
5837 #undef TARGET_FRAME_POINTER_REQUIRED
5838 #define TARGET_FRAME_POINTER_REQUIRED bfin_frame_pointer_required
5840 #undef TARGET_CAN_ELIMINATE
5841 #define TARGET_CAN_ELIMINATE bfin_can_eliminate
5843 #undef TARGET_CONDITIONAL_REGISTER_USAGE
5844 #define TARGET_CONDITIONAL_REGISTER_USAGE bfin_conditional_register_usage
5846 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
5847 #define TARGET_ASM_TRAMPOLINE_TEMPLATE bfin_asm_trampoline_template
5848 #undef TARGET_TRAMPOLINE_INIT
5849 #define TARGET_TRAMPOLINE_INIT bfin_trampoline_init
5851 #undef TARGET_EXTRA_LIVE_ON_ENTRY
5852 #define TARGET_EXTRA_LIVE_ON_ENTRY bfin_extra_live_on_entry
5854 /* Passes after sched2 can break the helpful TImode annotations that
5855 haifa-sched puts on every insn. Just do scheduling in reorg. */
5856 #undef TARGET_DELAY_SCHED2
5857 #define TARGET_DELAY_SCHED2 true
5859 /* Variable tracking should be run after all optimizations which
5860 change order of insns. It also needs a valid CFG. */
5861 #undef TARGET_DELAY_VARTRACK
5862 #define TARGET_DELAY_VARTRACK true
5864 #undef TARGET_CAN_USE_DOLOOP_P
5865 #define TARGET_CAN_USE_DOLOOP_P bfin_can_use_doloop_p
5867 struct gcc_target targetm
= TARGET_INITIALIZER
;