2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
6 * Copyright (C) 2003, 2004 Ralf Baechle <ralf@linux-mips.org>
7 * Copyright (C) MIPS Technologies, Inc.
8 * written by Ralf Baechle <ralf@linux-mips.org>
10 #ifndef _ASM_HAZARDS_H
11 #define _ASM_HAZARDS_H
15 #define ASMMACRO(name, code...) .macro name; code; .endm
18 #define ASMMACRO(name, code...) \
19 __asm__(".macro " #name "; " #code "; .endm"); \
21 static inline void name(void) \
23 __asm__ __volatile__ (#name); \
39 #if defined(CONFIG_CPU_MIPSR2)
42 * MIPSR2 defines ehb for hazard avoidance
45 ASMMACRO(mtc0_tlbw_hazard
,
48 ASMMACRO(tlbw_use_hazard
,
51 ASMMACRO(tlb_probe_hazard
,
54 ASMMACRO(irq_enable_hazard
,
57 ASMMACRO(irq_disable_hazard
,
60 ASMMACRO(back_to_back_c0_hazard
,
64 * gcc has a tradition of misscompiling the previous construct using the
65 * address of a label as argument to inline assembler. Gas otoh has the
66 * annoying difference between la and dla which are only usable for 32-bit
67 * rsp. 64-bit code, so can't be used without conditional compilation.
68 * The alterantive is switching the assembler to 64-bit code which happens
69 * to work right even for 32-bit code ...
71 #define instruction_hazard() \
75 __asm__ __volatile__( \
84 #elif defined(CONFIG_CPU_R10000)
87 * R10000 rocks - all hazards handled in hardware, so this becomes a nobrainer.
90 ASMMACRO(mtc0_tlbw_hazard
,
92 ASMMACRO(tlbw_use_hazard
,
94 ASMMACRO(tlb_probe_hazard
,
96 ASMMACRO(irq_enable_hazard
,
98 ASMMACRO(irq_disable_hazard
,
100 ASMMACRO(back_to_back_c0_hazard
,
102 #define instruction_hazard() do { } while (0)
104 #elif defined(CONFIG_CPU_RM9000)
107 * RM9000 hazards. When the JTLB is updated by tlbwi or tlbwr, a subsequent
108 * use of the JTLB for instructions should not occur for 4 cpu cycles and use
109 * for data translations should not occur for 3 cpu cycles.
112 ASMMACRO(mtc0_tlbw_hazard
,
113 _ssnop
; _ssnop
; _ssnop
; _ssnop
115 ASMMACRO(tlbw_use_hazard
,
116 _ssnop
; _ssnop
; _ssnop
; _ssnop
118 ASMMACRO(tlb_probe_hazard
,
119 _ssnop
; _ssnop
; _ssnop
; _ssnop
121 ASMMACRO(irq_enable_hazard
,
123 ASMMACRO(irq_disable_hazard
,
125 ASMMACRO(back_to_back_c0_hazard
,
127 #define instruction_hazard() do { } while (0)
129 #elif defined(CONFIG_CPU_SB1)
132 * Mostly like R4000 for historic reasons
134 ASMMACRO(mtc0_tlbw_hazard
,
136 ASMMACRO(tlbw_use_hazard
,
138 ASMMACRO(tlb_probe_hazard
,
140 ASMMACRO(irq_enable_hazard
,
142 ASMMACRO(irq_disable_hazard
,
143 _ssnop
; _ssnop
; _ssnop
145 ASMMACRO(back_to_back_c0_hazard
,
147 #define instruction_hazard() do { } while (0)
152 * Finally the catchall case for all other processors including R4000, R4400,
153 * R4600, R4700, R5000, RM7000, NEC VR41xx etc.
155 * The taken branch will result in a two cycle penalty for the two killed
156 * instructions on R4000 / R4400. Other processors only have a single cycle
157 * hazard so this is nice trick to have an optimal code for a range of
160 ASMMACRO(mtc0_tlbw_hazard
,
163 ASMMACRO(tlbw_use_hazard
,
166 ASMMACRO(tlb_probe_hazard
,
169 ASMMACRO(irq_enable_hazard
,
171 ASMMACRO(irq_disable_hazard
,
174 ASMMACRO(back_to_back_c0_hazard
,
175 _ssnop
; _ssnop
; _ssnop
;
177 #define instruction_hazard() do { } while (0)
184 #if defined(CONFIG_CPU_SB1)
185 ASMMACRO(enable_fpu_hazard
,
194 ASMMACRO(disable_fpu_hazard
,
197 #elif defined(CONFIG_CPU_MIPSR2)
198 ASMMACRO(enable_fpu_hazard
,
201 ASMMACRO(disable_fpu_hazard
,
205 ASMMACRO(enable_fpu_hazard
,
208 ASMMACRO(disable_fpu_hazard
,
213 #endif /* _ASM_HAZARDS_H */