ppc64: Don't set Kp bit on SLB
[openbios.git] / arch / sparc64 / spitfire.h
blob4a951b1fbd148024e6726724ebfa4be8a4907b40
1 /* $Id: spitfire.h,v 1.18 2001/11/29 16:42:10 kanoj Exp $
2 * spitfire.h: SpitFire/BlackBird/Cheetah inline MMU operations.
4 * Copyright (C) 1996 David S. Miller (davem@caip.rutgers.edu)
5 */
7 #ifndef _SPARC64_SPITFIRE_H
8 #define _SPARC64_SPITFIRE_H
10 #include <asm/asi.h>
12 /* The following register addresses are accessible via ASI_DMMU
13 * and ASI_IMMU, that is there is a distinct and unique copy of
14 * each these registers for each TLB.
16 #define TSB_TAG_TARGET 0x0000000000000000 /* All chips */
17 #define TLB_SFSR 0x0000000000000018 /* All chips */
18 #define TSB_REG 0x0000000000000028 /* All chips */
19 #define TLB_TAG_ACCESS 0x0000000000000030 /* All chips */
20 #define VIRT_WATCHPOINT 0x0000000000000038 /* All chips */
21 #define PHYS_WATCHPOINT 0x0000000000000040 /* All chips */
22 #define TSB_EXTENSION_P 0x0000000000000048 /* Ultra-III and later */
23 #define TSB_EXTENSION_S 0x0000000000000050 /* Ultra-III and later, D-TLB only */
24 #define TSB_EXTENSION_N 0x0000000000000058 /* Ultra-III and later */
25 #define TLB_TAG_ACCESS_EXT 0x0000000000000060 /* Ultra-III+ and later */
27 /* These registers only exist as one entity, and are accessed
28 * via ASI_DMMU only.
30 #define PRIMARY_CONTEXT 0x0000000000000008
31 #define SECONDARY_CONTEXT 0x0000000000000010
32 #define DMMU_SFAR 0x0000000000000020
33 #define VIRT_WATCHPOINT 0x0000000000000038
34 #define PHYS_WATCHPOINT 0x0000000000000040
36 #define SPITFIRE_HIGHEST_LOCKED_TLBENT (64 - 1)
38 /* translation table entry bits */
39 #define SPITFIRE_TTE_WRITABLE 0x02
40 #define SPITFIRE_TTE_PRIVILEGED 0x04
41 #define SPITFIRE_TTE_CV 0x10
42 #define SPITFIRE_TTE_CP 0x20
43 #define SPITFIRE_TTE_LOCKED 0x40
44 #define SPITFIRE_TTE_VALID 0x8000000000000000ULL
46 #ifndef __ASSEMBLY__
48 enum ultra_tlb_layout {
49 spitfire = 0,
50 cheetah = 1,
51 cheetah_plus = 2,
54 extern enum ultra_tlb_layout tlb_type;
56 #define CHEETAH_HIGHEST_LOCKED_TLBENT (16 - 1)
58 #define L1DCACHE_SIZE 0x4000
60 #define sparc64_highest_locked_tlbent() \
61 (tlb_type == spitfire ? \
62 SPITFIRE_HIGHEST_LOCKED_TLBENT : \
63 CHEETAH_HIGHEST_LOCKED_TLBENT)
65 static __inline__ unsigned long spitfire_get_isfsr(void)
67 unsigned long ret;
69 __asm__ __volatile__("ldxa [%1] %2, %0"
70 : "=r" (ret)
71 : "r" (TLB_SFSR), "i" (ASI_IMMU));
72 return ret;
75 static __inline__ unsigned long spitfire_get_dsfsr(void)
77 unsigned long ret;
79 __asm__ __volatile__("ldxa [%1] %2, %0"
80 : "=r" (ret)
81 : "r" (TLB_SFSR), "i" (ASI_DMMU));
82 return ret;
85 static __inline__ unsigned long spitfire_get_sfar(void)
87 unsigned long ret;
89 __asm__ __volatile__("ldxa [%1] %2, %0"
90 : "=r" (ret)
91 : "r" (DMMU_SFAR), "i" (ASI_DMMU));
92 return ret;
95 static __inline__ void spitfire_put_isfsr(unsigned long sfsr)
97 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
98 "membar #Sync"
99 : /* no outputs */
100 : "r" (sfsr), "r" (TLB_SFSR), "i" (ASI_IMMU));
103 static __inline__ void spitfire_put_dsfsr(unsigned long sfsr)
105 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
106 "membar #Sync"
107 : /* no outputs */
108 : "r" (sfsr), "r" (TLB_SFSR), "i" (ASI_DMMU));
111 static __inline__ unsigned long spitfire_get_primary_context(void)
113 unsigned long ctx;
115 __asm__ __volatile__("ldxa [%1] %2, %0"
116 : "=r" (ctx)
117 : "r" (PRIMARY_CONTEXT), "i" (ASI_DMMU));
118 return ctx;
121 static __inline__ void spitfire_set_primary_context(unsigned long ctx)
123 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
124 "membar #Sync"
125 : /* No outputs */
126 : "r" (ctx & 0x3ff),
127 "r" (PRIMARY_CONTEXT), "i" (ASI_DMMU));
128 __asm__ __volatile__ ("membar #Sync" : : : "memory");
131 static __inline__ unsigned long spitfire_get_secondary_context(void)
133 unsigned long ctx;
135 __asm__ __volatile__("ldxa [%1] %2, %0"
136 : "=r" (ctx)
137 : "r" (SECONDARY_CONTEXT), "i" (ASI_DMMU));
138 return ctx;
141 static __inline__ void spitfire_set_secondary_context(unsigned long ctx)
143 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
144 "membar #Sync"
145 : /* No outputs */
146 : "r" (ctx & 0x3ff),
147 "r" (SECONDARY_CONTEXT), "i" (ASI_DMMU));
148 __asm__ __volatile__ ("membar #Sync" : : : "memory");
151 /* The data cache is write through, so this just invalidates the
152 * specified line.
154 static __inline__ void spitfire_put_dcache_tag(unsigned long addr, unsigned long tag)
156 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
157 "membar #Sync"
158 : /* No outputs */
159 : "r" (tag), "r" (addr), "i" (ASI_DCACHE_TAG));
160 __asm__ __volatile__ ("membar #Sync" : : : "memory");
163 /* The instruction cache lines are flushed with this, but note that
164 * this does not flush the pipeline. It is possible for a line to
165 * get flushed but stale instructions to still be in the pipeline,
166 * a flush instruction (to any address) is sufficient to handle
167 * this issue after the line is invalidated.
169 static __inline__ void spitfire_put_icache_tag(unsigned long addr, unsigned long tag)
171 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
172 "membar #Sync"
173 : /* No outputs */
174 : "r" (tag), "r" (addr), "i" (ASI_IC_TAG));
177 static __inline__ unsigned long spitfire_get_dtlb_data(int entry)
179 unsigned long data;
181 __asm__ __volatile__("ldxa [%1] %2, %0"
182 : "=r" (data)
183 : "r" (entry << 3), "i" (ASI_DTLB_DATA_ACCESS));
185 /* Clear TTE diag bits. */
186 data &= ~0x0003fe0000000000UL;
188 return data;
191 static __inline__ unsigned long spitfire_get_dtlb_tag(int entry)
193 unsigned long tag;
195 __asm__ __volatile__("ldxa [%1] %2, %0"
196 : "=r" (tag)
197 : "r" (entry << 3), "i" (ASI_DTLB_TAG_READ));
198 return tag;
201 static __inline__ void spitfire_put_dtlb_data(int entry, unsigned long data)
203 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
204 "membar #Sync"
205 : /* No outputs */
206 : "r" (data), "r" (entry << 3),
207 "i" (ASI_DTLB_DATA_ACCESS));
210 static __inline__ unsigned long spitfire_get_itlb_data(int entry)
212 unsigned long data;
214 __asm__ __volatile__("ldxa [%1] %2, %0"
215 : "=r" (data)
216 : "r" (entry << 3), "i" (ASI_ITLB_DATA_ACCESS));
218 /* Clear TTE diag bits. */
219 data &= ~0x0003fe0000000000UL;
221 return data;
224 static __inline__ unsigned long spitfire_get_itlb_tag(int entry)
226 unsigned long tag;
228 __asm__ __volatile__("ldxa [%1] %2, %0"
229 : "=r" (tag)
230 : "r" (entry << 3), "i" (ASI_ITLB_TAG_READ));
231 return tag;
234 static __inline__ void spitfire_put_itlb_data(int entry, unsigned long data)
236 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
237 "membar #Sync"
238 : /* No outputs */
239 : "r" (data), "r" (entry << 3),
240 "i" (ASI_ITLB_DATA_ACCESS));
243 /* Spitfire hardware assisted TLB flushes. */
245 /* Context level flushes. */
246 static __inline__ void spitfire_flush_dtlb_primary_context(void)
248 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
249 "membar #Sync"
250 : /* No outputs */
251 : "r" (0x40), "i" (ASI_DMMU_DEMAP));
254 static __inline__ void spitfire_flush_itlb_primary_context(void)
256 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
257 "membar #Sync"
258 : /* No outputs */
259 : "r" (0x40), "i" (ASI_IMMU_DEMAP));
262 static __inline__ void spitfire_flush_dtlb_secondary_context(void)
264 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
265 "membar #Sync"
266 : /* No outputs */
267 : "r" (0x50), "i" (ASI_DMMU_DEMAP));
270 static __inline__ void spitfire_flush_itlb_secondary_context(void)
272 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
273 "membar #Sync"
274 : /* No outputs */
275 : "r" (0x50), "i" (ASI_IMMU_DEMAP));
278 static __inline__ void spitfire_flush_dtlb_nucleus_context(void)
280 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
281 "membar #Sync"
282 : /* No outputs */
283 : "r" (0x60), "i" (ASI_DMMU_DEMAP));
286 static __inline__ void spitfire_flush_itlb_nucleus_context(void)
288 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
289 "membar #Sync"
290 : /* No outputs */
291 : "r" (0x60), "i" (ASI_IMMU_DEMAP));
294 /* Page level flushes. */
295 static __inline__ void spitfire_flush_dtlb_primary_page(unsigned long page)
297 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
298 "membar #Sync"
299 : /* No outputs */
300 : "r" (page), "i" (ASI_DMMU_DEMAP));
303 static __inline__ void spitfire_flush_itlb_primary_page(unsigned long page)
305 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
306 "membar #Sync"
307 : /* No outputs */
308 : "r" (page), "i" (ASI_IMMU_DEMAP));
311 static __inline__ void spitfire_flush_dtlb_secondary_page(unsigned long page)
313 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
314 "membar #Sync"
315 : /* No outputs */
316 : "r" (page | 0x10), "i" (ASI_DMMU_DEMAP));
319 static __inline__ void spitfire_flush_itlb_secondary_page(unsigned long page)
321 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
322 "membar #Sync"
323 : /* No outputs */
324 : "r" (page | 0x10), "i" (ASI_IMMU_DEMAP));
327 static __inline__ void spitfire_flush_dtlb_nucleus_page(unsigned long page)
329 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
330 "membar #Sync"
331 : /* No outputs */
332 : "r" (page | 0x20), "i" (ASI_DMMU_DEMAP));
335 static __inline__ void spitfire_flush_itlb_nucleus_page(unsigned long page)
337 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
338 "membar #Sync"
339 : /* No outputs */
340 : "r" (page | 0x20), "i" (ASI_IMMU_DEMAP));
343 /* Cheetah has "all non-locked" tlb flushes. */
344 static __inline__ void cheetah_flush_dtlb_all(void)
346 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
347 "membar #Sync"
348 : /* No outputs */
349 : "r" (0x80), "i" (ASI_DMMU_DEMAP));
352 static __inline__ void cheetah_flush_itlb_all(void)
354 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
355 "membar #Sync"
356 : /* No outputs */
357 : "r" (0x80), "i" (ASI_IMMU_DEMAP));
360 /* Cheetah has a 4-tlb layout so direct access is a bit different.
361 * The first two TLBs are fully assosciative, hold 16 entries, and are
362 * used only for locked and >8K sized translations. One exists for
363 * data accesses and one for instruction accesses.
365 * The third TLB is for data accesses to 8K non-locked translations, is
366 * 2 way assosciative, and holds 512 entries. The fourth TLB is for
367 * instruction accesses to 8K non-locked translations, is 2 way
368 * assosciative, and holds 128 entries.
370 * Cheetah has some bug where bogus data can be returned from
371 * ASI_{D,I}TLB_DATA_ACCESS loads, doing the load twice fixes
372 * the problem for me. -DaveM
374 static __inline__ unsigned long cheetah_get_ldtlb_data(int entry)
376 unsigned long data;
378 __asm__ __volatile__("ldxa [%1] %2, %%g0\n\t"
379 "ldxa [%1] %2, %0"
380 : "=r" (data)
381 : "r" ((0 << 16) | (entry << 3)),
382 "i" (ASI_DTLB_DATA_ACCESS));
384 return data;
387 static __inline__ unsigned long cheetah_get_litlb_data(int entry)
389 unsigned long data;
391 __asm__ __volatile__("ldxa [%1] %2, %%g0\n\t"
392 "ldxa [%1] %2, %0"
393 : "=r" (data)
394 : "r" ((0 << 16) | (entry << 3)),
395 "i" (ASI_ITLB_DATA_ACCESS));
397 return data;
400 static __inline__ unsigned long cheetah_get_ldtlb_tag(int entry)
402 unsigned long tag;
404 __asm__ __volatile__("ldxa [%1] %2, %0"
405 : "=r" (tag)
406 : "r" ((0 << 16) | (entry << 3)),
407 "i" (ASI_DTLB_TAG_READ));
409 return tag;
412 static __inline__ unsigned long cheetah_get_litlb_tag(int entry)
414 unsigned long tag;
416 __asm__ __volatile__("ldxa [%1] %2, %0"
417 : "=r" (tag)
418 : "r" ((0 << 16) | (entry << 3)),
419 "i" (ASI_ITLB_TAG_READ));
421 return tag;
424 static __inline__ void cheetah_put_ldtlb_data(int entry, unsigned long data)
426 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
427 "membar #Sync"
428 : /* No outputs */
429 : "r" (data),
430 "r" ((0 << 16) | (entry << 3)),
431 "i" (ASI_DTLB_DATA_ACCESS));
434 static __inline__ void cheetah_put_litlb_data(int entry, unsigned long data)
436 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
437 "membar #Sync"
438 : /* No outputs */
439 : "r" (data),
440 "r" ((0 << 16) | (entry << 3)),
441 "i" (ASI_ITLB_DATA_ACCESS));
444 static __inline__ unsigned long cheetah_get_dtlb_data(int entry, int tlb)
446 unsigned long data;
448 __asm__ __volatile__("ldxa [%1] %2, %%g0\n\t"
449 "ldxa [%1] %2, %0"
450 : "=r" (data)
451 : "r" ((tlb << 16) | (entry << 3)), "i" (ASI_DTLB_DATA_ACCESS));
453 return data;
456 static __inline__ unsigned long cheetah_get_dtlb_tag(int entry, int tlb)
458 unsigned long tag;
460 __asm__ __volatile__("ldxa [%1] %2, %0"
461 : "=r" (tag)
462 : "r" ((tlb << 16) | (entry << 3)), "i" (ASI_DTLB_TAG_READ));
463 return tag;
466 static __inline__ void cheetah_put_dtlb_data(int entry, unsigned long data, int tlb)
468 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
469 "membar #Sync"
470 : /* No outputs */
471 : "r" (data),
472 "r" ((tlb << 16) | (entry << 3)),
473 "i" (ASI_DTLB_DATA_ACCESS));
476 static __inline__ unsigned long cheetah_get_itlb_data(int entry)
478 unsigned long data;
480 __asm__ __volatile__("ldxa [%1] %2, %%g0\n\t"
481 "ldxa [%1] %2, %0"
482 : "=r" (data)
483 : "r" ((2 << 16) | (entry << 3)),
484 "i" (ASI_ITLB_DATA_ACCESS));
486 return data;
489 static __inline__ unsigned long cheetah_get_itlb_tag(int entry)
491 unsigned long tag;
493 __asm__ __volatile__("ldxa [%1] %2, %0"
494 : "=r" (tag)
495 : "r" ((2 << 16) | (entry << 3)), "i" (ASI_ITLB_TAG_READ));
496 return tag;
499 static __inline__ void cheetah_put_itlb_data(int entry, unsigned long data)
501 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
502 "membar #Sync"
503 : /* No outputs */
504 : "r" (data), "r" ((2 << 16) | (entry << 3)),
505 "i" (ASI_ITLB_DATA_ACCESS));
508 #endif /* !(__ASSEMBLY__) */
510 #endif /* !(_SPARC64_SPITFIRE_H) */