1 #ifndef _ASM_M32R_TLBFLUSH_H
2 #define _ASM_M32R_TLBFLUSH_H
9 * - flush_tlb() flushes the current mm struct TLBs
10 * - flush_tlb_all() flushes all processes TLBs
11 * - flush_tlb_mm(mm) flushes the specified mm context TLB's
12 * - flush_tlb_page(vma, vmaddr) flushes one page
13 * - flush_tlb_range(vma, start, end) flushes a range of pages
14 * - flush_tlb_kernel_range(start, end) flushes a range of kernel pages
15 * - flush_tlb_pgtables(mm, start, end) flushes a range of page tables
18 extern void local_flush_tlb_all(void);
19 extern void local_flush_tlb_mm(struct mm_struct
*);
20 extern void local_flush_tlb_page(struct vm_area_struct
*, unsigned long);
21 extern void local_flush_tlb_range(struct vm_area_struct
*, unsigned long,
26 #define flush_tlb_all() local_flush_tlb_all()
27 #define flush_tlb_mm(mm) local_flush_tlb_mm(mm)
28 #define flush_tlb_page(vma, page) local_flush_tlb_page(vma, page)
29 #define flush_tlb_range(vma, start, end) \
30 local_flush_tlb_range(vma, start, end)
31 #define flush_tlb_kernel_range(start, end) local_flush_tlb_all()
32 #else /* CONFIG_MMU */
33 #define flush_tlb_all() do { } while (0)
34 #define flush_tlb_mm(mm) do { } while (0)
35 #define flush_tlb_page(vma, vmaddr) do { } while (0)
36 #define flush_tlb_range(vma, start, end) do { } while (0)
37 #endif /* CONFIG_MMU */
38 #else /* CONFIG_SMP */
39 extern void smp_flush_tlb_all(void);
40 extern void smp_flush_tlb_mm(struct mm_struct
*);
41 extern void smp_flush_tlb_page(struct vm_area_struct
*, unsigned long);
42 extern void smp_flush_tlb_range(struct vm_area_struct
*, unsigned long,
45 #define flush_tlb_all() smp_flush_tlb_all()
46 #define flush_tlb_mm(mm) smp_flush_tlb_mm(mm)
47 #define flush_tlb_page(vma, page) smp_flush_tlb_page(vma, page)
48 #define flush_tlb_range(vma, start, end) \
49 smp_flush_tlb_range(vma, start, end)
50 #define flush_tlb_kernel_range(start, end) smp_flush_tlb_all()
51 #endif /* CONFIG_SMP */
53 static __inline__
void __flush_tlb_page(unsigned long page
)
55 unsigned int tmpreg0
, tmpreg1
, tmpreg2
;
57 __asm__
__volatile__ (
58 "seth %0, #high(%4) \n\t"
59 "st %3, @(%5, %0) \n\t"
61 "st %1, @(%6, %0) \n\t"
62 "add3 %1, %0, %7 \n\t"
65 "ld %2, @(%6, %0) \n\t"
71 : "=&r" (tmpreg0
), "=&r" (tmpreg1
), "=&r" (tmpreg2
)
72 : "r" (page
), "i" (MMU_REG_BASE
), "i" (MSVA_offset
),
73 "i" (MTOP_offset
), "i" (MIDXI_offset
)
78 static __inline__
void __flush_tlb_all(void)
80 unsigned int tmpreg0
, tmpreg1
;
82 __asm__
__volatile__ (
83 "seth %0, #high(%2) \n\t"
84 "or3 %0, %0, #low(%2) \n\t"
91 : "=&r" (tmpreg0
), "=&r" (tmpreg1
)
92 : "i" (MTOP
) : "memory"
96 #define flush_tlb_pgtables(mm, start, end) do { } while (0)
98 extern void update_mmu_cache(struct vm_area_struct
*, unsigned long, pte_t
);
100 #endif /* _ASM_M32R_TLBFLUSH_H */