1 #ifndef __ASM_SH64_CACHEFLUSH_H
2 #define __ASM_SH64_CACHEFLUSH_H
12 extern void flush_cache_all(void);
13 extern void flush_cache_mm(struct mm_struct
*mm
);
14 extern void flush_cache_sigtramp(unsigned long start
, unsigned long end
);
15 extern void flush_cache_range(struct vm_area_struct
*vma
, unsigned long start
,
17 extern void flush_cache_page(struct vm_area_struct
*vma
, unsigned long addr
, unsigned long pfn
);
18 extern void flush_dcache_page(struct page
*pg
);
19 extern void flush_icache_range(unsigned long start
, unsigned long end
);
20 extern void flush_icache_user_range(struct vm_area_struct
*vma
,
21 struct page
*page
, unsigned long addr
,
24 #define flush_cache_dup_mm(mm) flush_cache_mm(mm)
26 #define flush_dcache_mmap_lock(mapping) do { } while (0)
27 #define flush_dcache_mmap_unlock(mapping) do { } while (0)
29 #define flush_cache_vmap(start, end) flush_cache_all()
30 #define flush_cache_vunmap(start, end) flush_cache_all()
32 #define flush_icache_page(vma, page) do { } while (0)
34 #define copy_to_user_page(vma, page, vaddr, dst, src, len) \
36 flush_cache_page(vma, vaddr, page_to_pfn(page));\
37 memcpy(dst, src, len); \
38 flush_icache_user_range(vma, page, vaddr, len); \
41 #define copy_from_user_page(vma, page, vaddr, dst, src, len) \
43 flush_cache_page(vma, vaddr, page_to_pfn(page));\
44 memcpy(dst, src, len); \
47 #endif /* __ASSEMBLY__ */
49 #endif /* __ASM_SH64_CACHEFLUSH_H */