sparc: move the duplication in dma-mapping_{32|64}.h to dma-mapping.h
[linux-2.6/verdex.git] / arch / sparc / include / asm / dma-mapping_64.h
blob017ae706ac642fba88662e21517adfdc337403fc
1 #ifndef _ASM_SPARC64_DMA_MAPPING_H
2 #define _ASM_SPARC64_DMA_MAPPING_H
4 #include <linux/scatterlist.h>
5 #include <linux/mm.h>
7 struct dma_ops {
8 void *(*alloc_coherent)(struct device *dev, size_t size,
9 dma_addr_t *dma_handle, gfp_t flag);
10 void (*free_coherent)(struct device *dev, size_t size,
11 void *cpu_addr, dma_addr_t dma_handle);
12 dma_addr_t (*map_single)(struct device *dev, void *cpu_addr,
13 size_t size,
14 enum dma_data_direction direction);
15 void (*unmap_single)(struct device *dev, dma_addr_t dma_addr,
16 size_t size,
17 enum dma_data_direction direction);
18 int (*map_sg)(struct device *dev, struct scatterlist *sg, int nents,
19 enum dma_data_direction direction);
20 void (*unmap_sg)(struct device *dev, struct scatterlist *sg,
21 int nhwentries,
22 enum dma_data_direction direction);
23 void (*sync_single_for_cpu)(struct device *dev,
24 dma_addr_t dma_handle, size_t size,
25 enum dma_data_direction direction);
26 void (*sync_sg_for_cpu)(struct device *dev, struct scatterlist *sg,
27 int nelems,
28 enum dma_data_direction direction);
30 extern const struct dma_ops *dma_ops;
32 static inline void *dma_alloc_coherent(struct device *dev, size_t size,
33 dma_addr_t *dma_handle, gfp_t flag)
35 return dma_ops->alloc_coherent(dev, size, dma_handle, flag);
38 static inline void dma_free_coherent(struct device *dev, size_t size,
39 void *cpu_addr, dma_addr_t dma_handle)
41 dma_ops->free_coherent(dev, size, cpu_addr, dma_handle);
44 static inline dma_addr_t dma_map_single(struct device *dev, void *cpu_addr,
45 size_t size,
46 enum dma_data_direction direction)
48 return dma_ops->map_single(dev, cpu_addr, size, direction);
51 static inline void dma_unmap_single(struct device *dev, dma_addr_t dma_addr,
52 size_t size,
53 enum dma_data_direction direction)
55 dma_ops->unmap_single(dev, dma_addr, size, direction);
58 static inline dma_addr_t dma_map_page(struct device *dev, struct page *page,
59 unsigned long offset, size_t size,
60 enum dma_data_direction direction)
62 return dma_ops->map_single(dev, page_address(page) + offset,
63 size, direction);
66 static inline void dma_unmap_page(struct device *dev, dma_addr_t dma_address,
67 size_t size,
68 enum dma_data_direction direction)
70 dma_ops->unmap_single(dev, dma_address, size, direction);
73 static inline int dma_map_sg(struct device *dev, struct scatterlist *sg,
74 int nents, enum dma_data_direction direction)
76 return dma_ops->map_sg(dev, sg, nents, direction);
79 static inline void dma_unmap_sg(struct device *dev, struct scatterlist *sg,
80 int nents, enum dma_data_direction direction)
82 dma_ops->unmap_sg(dev, sg, nents, direction);
85 static inline void dma_sync_single_for_cpu(struct device *dev,
86 dma_addr_t dma_handle, size_t size,
87 enum dma_data_direction direction)
89 dma_ops->sync_single_for_cpu(dev, dma_handle, size, direction);
92 static inline void dma_sync_single_for_device(struct device *dev,
93 dma_addr_t dma_handle,
94 size_t size,
95 enum dma_data_direction direction)
97 /* No flushing needed to sync cpu writes to the device. */
100 static inline void dma_sync_sg_for_cpu(struct device *dev,
101 struct scatterlist *sg, int nelems,
102 enum dma_data_direction direction)
104 dma_ops->sync_sg_for_cpu(dev, sg, nelems, direction);
107 static inline void dma_sync_sg_for_device(struct device *dev,
108 struct scatterlist *sg, int nelems,
109 enum dma_data_direction direction)
111 /* No flushing needed to sync cpu writes to the device. */
114 #endif /* _ASM_SPARC64_DMA_MAPPING_H */