1 /* $Id: spitfire.h,v 1.9 1998/04/28 08:23:33 davem Exp $
2 * spitfire.h: SpitFire/BlackBird/Cheetah inline MMU operations.
4 * Copyright (C) 1996 David S. Miller (davem@caip.rutgers.edu)
7 #ifndef _SPARC64_SPITFIRE_H
8 #define _SPARC64_SPITFIRE_H
12 /* The following register addresses are accessible via ASI_DMMU
13 * and ASI_IMMU, that is there is a distinct and unique copy of
14 * each these registers for each TLB.
16 #define TSB_TAG_TARGET 0x0000000000000000
17 #define TLB_SFSR 0x0000000000000018
18 #define TSB_REG 0x0000000000000028
19 #define TLB_TAG_ACCESS 0x0000000000000030
21 /* These registers only exist as one entity, and are accessed
24 #define PRIMARY_CONTEXT 0x0000000000000008
25 #define SECONDARY_CONTEXT 0x0000000000000010
26 #define DMMU_SFAR 0x0000000000000020
27 #define VIRT_WATCHPOINT 0x0000000000000038
28 #define PHYS_WATCHPOINT 0x0000000000000040
32 extern __inline__
unsigned long spitfire_get_isfsr(void)
36 __asm__
__volatile__("ldxa [%1] %2, %0"
38 : "r" (TLB_SFSR
), "i" (ASI_IMMU
));
42 extern __inline__
unsigned long spitfire_get_dsfsr(void)
46 __asm__
__volatile__("ldxa [%1] %2, %0"
48 : "r" (TLB_SFSR
), "i" (ASI_DMMU
));
52 extern __inline__
unsigned long spitfire_get_sfar(void)
56 __asm__
__volatile__("ldxa [%1] %2, %0"
58 : "r" (DMMU_SFAR
), "i" (ASI_DMMU
));
62 extern __inline__
void spitfire_put_isfsr(unsigned long sfsr
)
64 __asm__
__volatile__("stxa %0, [%1] %2" :
65 : "r" (sfsr
), "r" (TLB_SFSR
), "i" (ASI_IMMU
));
68 extern __inline__
void spitfire_put_dsfsr(unsigned long sfsr
)
70 __asm__
__volatile__("stxa %0, [%1] %2" :
71 : "r" (sfsr
), "r" (TLB_SFSR
), "i" (ASI_DMMU
));
74 extern __inline__
unsigned long spitfire_get_primary_context(void)
78 __asm__
__volatile__("ldxa [%1] %2, %0"
80 : "r" (PRIMARY_CONTEXT
), "i" (ASI_DMMU
));
84 extern __inline__
void spitfire_set_primary_context(unsigned long ctx
)
86 __asm__
__volatile__("stxa %0, [%1] %2"
89 "r" (PRIMARY_CONTEXT
), "i" (ASI_DMMU
));
93 extern __inline__
unsigned long spitfire_get_secondary_context(void)
97 __asm__
__volatile__("ldxa [%1] %2, %0"
99 : "r" (SECONDARY_CONTEXT
), "i" (ASI_DMMU
));
103 extern __inline__
void spitfire_set_secondary_context(unsigned long ctx
)
105 __asm__
__volatile__("stxa %0, [%1] %2"
108 "r" (SECONDARY_CONTEXT
), "i" (ASI_DMMU
));
112 /* The data cache is write through, so this just invalidates the
115 extern __inline__
void spitfire_put_dcache_tag(unsigned long addr
, unsigned long tag
)
117 __asm__
__volatile__("stxa %0, [%1] %2"
119 : "r" (tag
), "r" (addr
), "i" (ASI_DCACHE_TAG
));
122 /* The instruction cache lines are flushed with this, but note that
123 * this does not flush the pipeline. It is possible for a line to
124 * get flushed but stale instructions to still be in the pipeline,
125 * a flush instruction (to any address) is sufficient to handle
126 * this issue after the line is invalidated.
128 extern __inline__
void spitfire_put_icache_tag(unsigned long addr
, unsigned long tag
)
130 __asm__
__volatile__("stxa %0, [%1] %2"
132 : "r" (tag
), "r" (addr
), "i" (ASI_IC_TAG
));
135 extern __inline__
unsigned long spitfire_get_dtlb_data(int entry
)
139 __asm__
__volatile__("ldxa [%1] %2, %0"
141 : "r" (entry
<< 3), "i" (ASI_DTLB_DATA_ACCESS
));
145 extern __inline__
unsigned long spitfire_get_dtlb_tag(int entry
)
149 __asm__
__volatile__("ldxa [%1] %2, %0"
151 : "r" (entry
<< 3), "i" (ASI_DTLB_TAG_READ
));
155 extern __inline__
void spitfire_put_dtlb_data(int entry
, unsigned long data
)
157 __asm__
__volatile__("stxa %0, [%1] %2"
159 : "r" (data
), "r" (entry
<< 3),
160 "i" (ASI_DTLB_DATA_ACCESS
));
163 extern __inline__
unsigned long spitfire_get_itlb_data(int entry
)
167 __asm__
__volatile__("ldxa [%1] %2, %0"
169 : "r" (entry
<< 3), "i" (ASI_ITLB_DATA_ACCESS
));
173 extern __inline__
unsigned long spitfire_get_itlb_tag(int entry
)
177 __asm__
__volatile__("ldxa [%1] %2, %0"
179 : "r" (entry
<< 3), "i" (ASI_ITLB_TAG_READ
));
183 extern __inline__
void spitfire_put_itlb_data(int entry
, unsigned long data
)
185 __asm__
__volatile__("stxa %0, [%1] %2"
187 : "r" (data
), "r" (entry
<< 3),
188 "i" (ASI_ITLB_DATA_ACCESS
));
191 /* Spitfire hardware assisted TLB flushes. */
193 /* Context level flushes. */
194 extern __inline__
void spitfire_flush_dtlb_primary_context(void)
196 __asm__
__volatile__("stxa %%g0, [%0] %1"
198 : "r" (0x40), "i" (ASI_DMMU_DEMAP
));
201 extern __inline__
void spitfire_flush_itlb_primary_context(void)
203 __asm__
__volatile__("stxa %%g0, [%0] %1"
205 : "r" (0x40), "i" (ASI_IMMU_DEMAP
));
208 extern __inline__
void spitfire_flush_dtlb_secondary_context(void)
210 __asm__
__volatile__("stxa %%g0, [%0] %1"
212 : "r" (0x50), "i" (ASI_DMMU_DEMAP
));
215 extern __inline__
void spitfire_flush_itlb_secondary_context(void)
217 __asm__
__volatile__("stxa %%g0, [%0] %1"
219 : "r" (0x50), "i" (ASI_IMMU_DEMAP
));
222 extern __inline__
void spitfire_flush_dtlb_nucleus_context(void)
224 __asm__
__volatile__("stxa %%g0, [%0] %1"
226 : "r" (0x60), "i" (ASI_DMMU_DEMAP
));
229 extern __inline__
void spitfire_flush_itlb_nucleus_context(void)
231 __asm__
__volatile__("stxa %%g0, [%0] %1"
233 : "r" (0x60), "i" (ASI_IMMU_DEMAP
));
236 /* Page level flushes. */
237 extern __inline__
void spitfire_flush_dtlb_primary_page(unsigned long page
)
239 __asm__
__volatile__("stxa %%g0, [%0] %1"
241 : "r" (page
), "i" (ASI_DMMU_DEMAP
));
244 extern __inline__
void spitfire_flush_itlb_primary_page(unsigned long page
)
246 __asm__
__volatile__("stxa %%g0, [%0] %1"
248 : "r" (page
), "i" (ASI_IMMU_DEMAP
));
251 extern __inline__
void spitfire_flush_dtlb_secondary_page(unsigned long page
)
253 __asm__
__volatile__("stxa %%g0, [%0] %1"
255 : "r" (page
| 0x10), "i" (ASI_DMMU_DEMAP
));
258 extern __inline__
void spitfire_flush_itlb_secondary_page(unsigned long page
)
260 __asm__
__volatile__("stxa %%g0, [%0] %1"
262 : "r" (page
| 0x10), "i" (ASI_IMMU_DEMAP
));
265 extern __inline__
void spitfire_flush_dtlb_nucleus_page(unsigned long page
)
267 __asm__
__volatile__("stxa %%g0, [%0] %1"
269 : "r" (page
| 0x20), "i" (ASI_DMMU_DEMAP
));
272 extern __inline__
void spitfire_flush_itlb_nucleus_page(unsigned long page
)
274 __asm__
__volatile__("stxa %%g0, [%0] %1"
276 : "r" (page
| 0x20), "i" (ASI_IMMU_DEMAP
));
279 #endif /* !(__ASSEMBLY__) */
281 #endif /* !(_SPARC64_SPITFIRE_H) */