1 #ifndef __ASM_SH64_CACHEFLUSH_H
2 #define __ASM_SH64_CACHEFLUSH_H
12 extern void flush_cache_all(void);
13 extern void flush_cache_mm(struct mm_struct
*mm
);
14 extern void flush_cache_sigtramp(unsigned long start
, unsigned long end
);
15 extern void flush_cache_range(struct vm_area_struct
*vma
, unsigned long start
,
17 extern void flush_cache_page(struct vm_area_struct
*vma
, unsigned long addr
, unsigned long pfn
);
18 extern void flush_dcache_page(struct page
*pg
);
19 extern void flush_icache_range(unsigned long start
, unsigned long end
);
20 extern void flush_icache_user_range(struct vm_area_struct
*vma
,
21 struct page
*page
, unsigned long addr
,
24 #define flush_dcache_mmap_lock(mapping) do { } while (0)
25 #define flush_dcache_mmap_unlock(mapping) do { } while (0)
27 #define flush_cache_vmap(start, end) flush_cache_all()
28 #define flush_cache_vunmap(start, end) flush_cache_all()
30 #define flush_icache_page(vma, page) do { } while (0)
32 #define copy_to_user_page(vma, page, vaddr, dst, src, len) \
34 flush_cache_page(vma, vaddr, page_to_pfn(page));\
35 memcpy(dst, src, len); \
36 flush_icache_user_range(vma, page, vaddr, len); \
39 #define copy_from_user_page(vma, page, vaddr, dst, src, len) \
41 flush_cache_page(vma, vaddr, page_to_pfn(page));\
42 memcpy(dst, src, len); \
45 #endif /* __ASSEMBLY__ */
47 #endif /* __ASM_SH64_CACHEFLUSH_H */