2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
6 * Copyright (C) 2003, 04, 07 Ralf Baechle <ralf@linux-mips.org>
7 * Copyright (C) MIPS Technologies, Inc.
8 * written by Ralf Baechle <ralf@linux-mips.org>
10 #ifndef _ASM_HAZARDS_H
11 #define _ASM_HAZARDS_H
15 #define ASMMACRO(name, code...) .macro name; code; .endm
18 #define ASMMACRO(name, code...) \
19 __asm__(".macro " #name "; " #code "; .endm"); \
21 static inline void name(void) \
23 __asm__ __volatile__ (#name); \
27 * MIPS R2 instruction hazard barrier. Needs to be called as a subroutine.
29 extern void mips_ihb(void);
44 #if defined(CONFIG_CPU_MIPSR2)
47 * MIPSR2 defines ehb for hazard avoidance
50 ASMMACRO(mtc0_tlbw_hazard
,
53 ASMMACRO(tlbw_use_hazard
,
56 ASMMACRO(tlb_probe_hazard
,
59 ASMMACRO(irq_enable_hazard
,
62 ASMMACRO(irq_disable_hazard
,
65 ASMMACRO(back_to_back_c0_hazard
,
69 * gcc has a tradition of misscompiling the previous construct using the
70 * address of a label as argument to inline assembler. Gas otoh has the
71 * annoying difference between la and dla which are only usable for 32-bit
72 * rsp. 64-bit code, so can't be used without conditional compilation.
73 * The alterantive is switching the assembler to 64-bit code which happens
74 * to work right even for 32-bit code ...
76 #define instruction_hazard() \
80 __asm__ __volatile__( \
89 #elif defined(CONFIG_CPU_R10000)
92 * R10000 rocks - all hazards handled in hardware, so this becomes a nobrainer.
95 ASMMACRO(mtc0_tlbw_hazard
,
97 ASMMACRO(tlbw_use_hazard
,
99 ASMMACRO(tlb_probe_hazard
,
101 ASMMACRO(irq_enable_hazard
,
103 ASMMACRO(irq_disable_hazard
,
105 ASMMACRO(back_to_back_c0_hazard
,
107 #define instruction_hazard() do { } while (0)
109 #elif defined(CONFIG_CPU_RM9000)
112 * RM9000 hazards. When the JTLB is updated by tlbwi or tlbwr, a subsequent
113 * use of the JTLB for instructions should not occur for 4 cpu cycles and use
114 * for data translations should not occur for 3 cpu cycles.
117 ASMMACRO(mtc0_tlbw_hazard
,
118 _ssnop
; _ssnop
; _ssnop
; _ssnop
120 ASMMACRO(tlbw_use_hazard
,
121 _ssnop
; _ssnop
; _ssnop
; _ssnop
123 ASMMACRO(tlb_probe_hazard
,
124 _ssnop
; _ssnop
; _ssnop
; _ssnop
126 ASMMACRO(irq_enable_hazard
,
128 ASMMACRO(irq_disable_hazard
,
130 ASMMACRO(back_to_back_c0_hazard
,
132 #define instruction_hazard() do { } while (0)
134 #elif defined(CONFIG_CPU_SB1)
137 * Mostly like R4000 for historic reasons
139 ASMMACRO(mtc0_tlbw_hazard
,
141 ASMMACRO(tlbw_use_hazard
,
143 ASMMACRO(tlb_probe_hazard
,
145 ASMMACRO(irq_enable_hazard
,
147 ASMMACRO(irq_disable_hazard
,
148 _ssnop
; _ssnop
; _ssnop
150 ASMMACRO(back_to_back_c0_hazard
,
152 #define instruction_hazard() do { } while (0)
157 * Finally the catchall case for all other processors including R4000, R4400,
158 * R4600, R4700, R5000, RM7000, NEC VR41xx etc.
160 * The taken branch will result in a two cycle penalty for the two killed
161 * instructions on R4000 / R4400. Other processors only have a single cycle
162 * hazard so this is nice trick to have an optimal code for a range of
165 ASMMACRO(mtc0_tlbw_hazard
,
168 ASMMACRO(tlbw_use_hazard
,
171 ASMMACRO(tlb_probe_hazard
,
174 ASMMACRO(irq_enable_hazard
,
175 _ssnop
; _ssnop
; _ssnop
;
177 ASMMACRO(irq_disable_hazard
,
180 ASMMACRO(back_to_back_c0_hazard
,
181 _ssnop
; _ssnop
; _ssnop
;
183 #define instruction_hazard() do { } while (0)
190 #if defined(CONFIG_CPU_SB1)
191 ASMMACRO(enable_fpu_hazard
,
200 ASMMACRO(disable_fpu_hazard
,
203 #elif defined(CONFIG_CPU_MIPSR2)
204 ASMMACRO(enable_fpu_hazard
,
207 ASMMACRO(disable_fpu_hazard
,
211 ASMMACRO(enable_fpu_hazard
,
214 ASMMACRO(disable_fpu_hazard
,
219 #endif /* _ASM_HAZARDS_H */