1 /* $Id: spitfire.h,v 1.10 2000/10/06 13:10:29 anton Exp $
2 * spitfire.h: SpitFire/BlackBird/Cheetah inline MMU operations.
4 * Copyright (C) 1996 David S. Miller (davem@caip.rutgers.edu)
7 #ifndef _SPARC64_SPITFIRE_H
8 #define _SPARC64_SPITFIRE_H
12 /* The following register addresses are accessible via ASI_DMMU
13 * and ASI_IMMU, that is there is a distinct and unique copy of
14 * each these registers for each TLB.
16 #define TSB_TAG_TARGET 0x0000000000000000
17 #define TLB_SFSR 0x0000000000000018
18 #define TSB_REG 0x0000000000000028
19 #define TLB_TAG_ACCESS 0x0000000000000030
21 /* These registers only exist as one entity, and are accessed
24 #define PRIMARY_CONTEXT 0x0000000000000008
25 #define SECONDARY_CONTEXT 0x0000000000000010
26 #define DMMU_SFAR 0x0000000000000020
27 #define VIRT_WATCHPOINT 0x0000000000000038
28 #define PHYS_WATCHPOINT 0x0000000000000040
32 extern __inline__
unsigned long spitfire_get_isfsr(void)
36 __asm__
__volatile__("ldxa [%1] %2, %0"
38 : "r" (TLB_SFSR
), "i" (ASI_IMMU
));
42 extern __inline__
unsigned long spitfire_get_dsfsr(void)
46 __asm__
__volatile__("ldxa [%1] %2, %0"
48 : "r" (TLB_SFSR
), "i" (ASI_DMMU
));
52 extern __inline__
unsigned long spitfire_get_sfar(void)
56 __asm__
__volatile__("ldxa [%1] %2, %0"
58 : "r" (DMMU_SFAR
), "i" (ASI_DMMU
));
62 extern __inline__
void spitfire_put_isfsr(unsigned long sfsr
)
64 __asm__
__volatile__("stxa %0, [%1] %2" :
65 : "r" (sfsr
), "r" (TLB_SFSR
), "i" (ASI_IMMU
));
68 extern __inline__
void spitfire_put_dsfsr(unsigned long sfsr
)
70 __asm__
__volatile__("stxa %0, [%1] %2" :
71 : "r" (sfsr
), "r" (TLB_SFSR
), "i" (ASI_DMMU
));
74 extern __inline__
unsigned long spitfire_get_primary_context(void)
78 __asm__
__volatile__("ldxa [%1] %2, %0"
80 : "r" (PRIMARY_CONTEXT
), "i" (ASI_DMMU
));
84 extern __inline__
void spitfire_set_primary_context(unsigned long ctx
)
86 __asm__
__volatile__("stxa %0, [%1] %2"
89 "r" (PRIMARY_CONTEXT
), "i" (ASI_DMMU
));
93 extern __inline__
unsigned long spitfire_get_secondary_context(void)
97 __asm__
__volatile__("ldxa [%1] %2, %0"
99 : "r" (SECONDARY_CONTEXT
), "i" (ASI_DMMU
));
103 extern __inline__
void spitfire_set_secondary_context(unsigned long ctx
)
105 __asm__
__volatile__("stxa %0, [%1] %2"
108 "r" (SECONDARY_CONTEXT
), "i" (ASI_DMMU
));
112 /* The data cache is write through, so this just invalidates the
115 extern __inline__
void spitfire_put_dcache_tag(unsigned long addr
, unsigned long tag
)
117 __asm__
__volatile__("stxa %0, [%1] %2"
119 : "r" (tag
), "r" (addr
), "i" (ASI_DCACHE_TAG
));
123 /* The instruction cache lines are flushed with this, but note that
124 * this does not flush the pipeline. It is possible for a line to
125 * get flushed but stale instructions to still be in the pipeline,
126 * a flush instruction (to any address) is sufficient to handle
127 * this issue after the line is invalidated.
129 extern __inline__
void spitfire_put_icache_tag(unsigned long addr
, unsigned long tag
)
131 __asm__
__volatile__("stxa %0, [%1] %2"
133 : "r" (tag
), "r" (addr
), "i" (ASI_IC_TAG
));
136 extern __inline__
unsigned long spitfire_get_dtlb_data(int entry
)
140 __asm__
__volatile__("ldxa [%1] %2, %0"
142 : "r" (entry
<< 3), "i" (ASI_DTLB_DATA_ACCESS
));
146 extern __inline__
unsigned long spitfire_get_dtlb_tag(int entry
)
150 __asm__
__volatile__("ldxa [%1] %2, %0"
152 : "r" (entry
<< 3), "i" (ASI_DTLB_TAG_READ
));
156 extern __inline__
void spitfire_put_dtlb_data(int entry
, unsigned long data
)
158 __asm__
__volatile__("stxa %0, [%1] %2"
160 : "r" (data
), "r" (entry
<< 3),
161 "i" (ASI_DTLB_DATA_ACCESS
));
164 extern __inline__
unsigned long spitfire_get_itlb_data(int entry
)
168 __asm__
__volatile__("ldxa [%1] %2, %0"
170 : "r" (entry
<< 3), "i" (ASI_ITLB_DATA_ACCESS
));
174 extern __inline__
unsigned long spitfire_get_itlb_tag(int entry
)
178 __asm__
__volatile__("ldxa [%1] %2, %0"
180 : "r" (entry
<< 3), "i" (ASI_ITLB_TAG_READ
));
184 extern __inline__
void spitfire_put_itlb_data(int entry
, unsigned long data
)
186 __asm__
__volatile__("stxa %0, [%1] %2"
188 : "r" (data
), "r" (entry
<< 3),
189 "i" (ASI_ITLB_DATA_ACCESS
));
192 /* Spitfire hardware assisted TLB flushes. */
194 /* Context level flushes. */
195 extern __inline__
void spitfire_flush_dtlb_primary_context(void)
197 __asm__
__volatile__("stxa %%g0, [%0] %1"
199 : "r" (0x40), "i" (ASI_DMMU_DEMAP
));
202 extern __inline__
void spitfire_flush_itlb_primary_context(void)
204 __asm__
__volatile__("stxa %%g0, [%0] %1"
206 : "r" (0x40), "i" (ASI_IMMU_DEMAP
));
209 extern __inline__
void spitfire_flush_dtlb_secondary_context(void)
211 __asm__
__volatile__("stxa %%g0, [%0] %1"
213 : "r" (0x50), "i" (ASI_DMMU_DEMAP
));
216 extern __inline__
void spitfire_flush_itlb_secondary_context(void)
218 __asm__
__volatile__("stxa %%g0, [%0] %1"
220 : "r" (0x50), "i" (ASI_IMMU_DEMAP
));
223 extern __inline__
void spitfire_flush_dtlb_nucleus_context(void)
225 __asm__
__volatile__("stxa %%g0, [%0] %1"
227 : "r" (0x60), "i" (ASI_DMMU_DEMAP
));
230 extern __inline__
void spitfire_flush_itlb_nucleus_context(void)
232 __asm__
__volatile__("stxa %%g0, [%0] %1"
234 : "r" (0x60), "i" (ASI_IMMU_DEMAP
));
237 /* Page level flushes. */
238 extern __inline__
void spitfire_flush_dtlb_primary_page(unsigned long page
)
240 __asm__
__volatile__("stxa %%g0, [%0] %1"
242 : "r" (page
), "i" (ASI_DMMU_DEMAP
));
245 extern __inline__
void spitfire_flush_itlb_primary_page(unsigned long page
)
247 __asm__
__volatile__("stxa %%g0, [%0] %1"
249 : "r" (page
), "i" (ASI_IMMU_DEMAP
));
252 extern __inline__
void spitfire_flush_dtlb_secondary_page(unsigned long page
)
254 __asm__
__volatile__("stxa %%g0, [%0] %1"
256 : "r" (page
| 0x10), "i" (ASI_DMMU_DEMAP
));
259 extern __inline__
void spitfire_flush_itlb_secondary_page(unsigned long page
)
261 __asm__
__volatile__("stxa %%g0, [%0] %1"
263 : "r" (page
| 0x10), "i" (ASI_IMMU_DEMAP
));
266 extern __inline__
void spitfire_flush_dtlb_nucleus_page(unsigned long page
)
268 __asm__
__volatile__("stxa %%g0, [%0] %1"
270 : "r" (page
| 0x20), "i" (ASI_DMMU_DEMAP
));
273 extern __inline__
void spitfire_flush_itlb_nucleus_page(unsigned long page
)
275 __asm__
__volatile__("stxa %%g0, [%0] %1"
277 : "r" (page
| 0x20), "i" (ASI_IMMU_DEMAP
));
280 #endif /* !(__ASSEMBLY__) */
282 #endif /* !(_SPARC64_SPITFIRE_H) */