1 #ifndef _ASM_DMA_MAPPING_H_
2 #define _ASM_DMA_MAPPING_H_
5 * IOMMU interface. See Documentation/DMA-mapping.txt and DMA-API.txt for
9 #include <linux/scatterlist.h>
11 #include <asm/swiotlb.h>
13 extern dma_addr_t bad_dma_address
;
14 extern int iommu_merge
;
15 extern struct device fallback_dev
;
16 extern int panic_on_overflow
;
17 extern int forbid_dac
;
18 extern int force_iommu
;
20 struct dma_mapping_ops
{
21 int (*mapping_error
)(dma_addr_t dma_addr
);
22 void* (*alloc_coherent
)(struct device
*dev
, size_t size
,
23 dma_addr_t
*dma_handle
, gfp_t gfp
);
24 void (*free_coherent
)(struct device
*dev
, size_t size
,
25 void *vaddr
, dma_addr_t dma_handle
);
26 dma_addr_t (*map_single
)(struct device
*hwdev
, phys_addr_t ptr
,
27 size_t size
, int direction
);
28 /* like map_single, but doesn't check the device mask */
29 dma_addr_t (*map_simple
)(struct device
*hwdev
, phys_addr_t ptr
,
30 size_t size
, int direction
);
31 void (*unmap_single
)(struct device
*dev
, dma_addr_t addr
,
32 size_t size
, int direction
);
33 void (*sync_single_for_cpu
)(struct device
*hwdev
,
34 dma_addr_t dma_handle
, size_t size
,
36 void (*sync_single_for_device
)(struct device
*hwdev
,
37 dma_addr_t dma_handle
, size_t size
,
39 void (*sync_single_range_for_cpu
)(struct device
*hwdev
,
40 dma_addr_t dma_handle
, unsigned long offset
,
41 size_t size
, int direction
);
42 void (*sync_single_range_for_device
)(struct device
*hwdev
,
43 dma_addr_t dma_handle
, unsigned long offset
,
44 size_t size
, int direction
);
45 void (*sync_sg_for_cpu
)(struct device
*hwdev
,
46 struct scatterlist
*sg
, int nelems
,
48 void (*sync_sg_for_device
)(struct device
*hwdev
,
49 struct scatterlist
*sg
, int nelems
,
51 int (*map_sg
)(struct device
*hwdev
, struct scatterlist
*sg
,
52 int nents
, int direction
);
53 void (*unmap_sg
)(struct device
*hwdev
,
54 struct scatterlist
*sg
, int nents
,
56 int (*dma_supported
)(struct device
*hwdev
, u64 mask
);
60 extern const struct dma_mapping_ops
*dma_ops
;
62 static inline int dma_mapping_error(dma_addr_t dma_addr
)
64 if (dma_ops
->mapping_error
)
65 return dma_ops
->mapping_error(dma_addr
);
67 return (dma_addr
== bad_dma_address
);
70 #define dma_alloc_noncoherent(d, s, h, f) dma_alloc_coherent(d, s, h, f)
71 #define dma_free_noncoherent(d, s, v, h) dma_free_coherent(d, s, v, h)
73 void *dma_alloc_coherent(struct device
*dev
, size_t size
,
74 dma_addr_t
*dma_handle
, gfp_t flag
);
76 void dma_free_coherent(struct device
*dev
, size_t size
,
77 void *vaddr
, dma_addr_t dma_handle
);
80 extern int dma_supported(struct device
*hwdev
, u64 mask
);
81 extern int dma_set_mask(struct device
*dev
, u64 mask
);
83 static inline dma_addr_t
84 dma_map_single(struct device
*hwdev
, void *ptr
, size_t size
,
87 BUG_ON(!valid_dma_direction(direction
));
88 return dma_ops
->map_single(hwdev
, virt_to_phys(ptr
), size
, direction
);
92 dma_unmap_single(struct device
*dev
, dma_addr_t addr
, size_t size
,
95 BUG_ON(!valid_dma_direction(direction
));
96 if (dma_ops
->unmap_single
)
97 dma_ops
->unmap_single(dev
, addr
, size
, direction
);
101 dma_map_sg(struct device
*hwdev
, struct scatterlist
*sg
,
102 int nents
, int direction
)
104 BUG_ON(!valid_dma_direction(direction
));
105 return dma_ops
->map_sg(hwdev
, sg
, nents
, direction
);
109 dma_unmap_sg(struct device
*hwdev
, struct scatterlist
*sg
, int nents
,
112 BUG_ON(!valid_dma_direction(direction
));
113 if (dma_ops
->unmap_sg
)
114 dma_ops
->unmap_sg(hwdev
, sg
, nents
, direction
);
118 dma_sync_single_for_cpu(struct device
*hwdev
, dma_addr_t dma_handle
,
119 size_t size
, int direction
)
121 BUG_ON(!valid_dma_direction(direction
));
122 if (dma_ops
->sync_single_for_cpu
)
123 dma_ops
->sync_single_for_cpu(hwdev
, dma_handle
, size
,
125 flush_write_buffers();
129 dma_sync_single_for_device(struct device
*hwdev
, dma_addr_t dma_handle
,
130 size_t size
, int direction
)
132 BUG_ON(!valid_dma_direction(direction
));
133 if (dma_ops
->sync_single_for_device
)
134 dma_ops
->sync_single_for_device(hwdev
, dma_handle
, size
,
136 flush_write_buffers();
140 dma_sync_single_range_for_cpu(struct device
*hwdev
, dma_addr_t dma_handle
,
141 unsigned long offset
, size_t size
, int direction
)
143 BUG_ON(!valid_dma_direction(direction
));
144 if (dma_ops
->sync_single_range_for_cpu
)
145 dma_ops
->sync_single_range_for_cpu(hwdev
, dma_handle
, offset
,
148 flush_write_buffers();
152 dma_sync_single_range_for_device(struct device
*hwdev
, dma_addr_t dma_handle
,
153 unsigned long offset
, size_t size
,
156 BUG_ON(!valid_dma_direction(direction
));
157 if (dma_ops
->sync_single_range_for_device
)
158 dma_ops
->sync_single_range_for_device(hwdev
, dma_handle
,
159 offset
, size
, direction
);
161 flush_write_buffers();
165 dma_sync_sg_for_cpu(struct device
*hwdev
, struct scatterlist
*sg
,
166 int nelems
, int direction
)
168 BUG_ON(!valid_dma_direction(direction
));
169 if (dma_ops
->sync_sg_for_cpu
)
170 dma_ops
->sync_sg_for_cpu(hwdev
, sg
, nelems
, direction
);
171 flush_write_buffers();
175 dma_sync_sg_for_device(struct device
*hwdev
, struct scatterlist
*sg
,
176 int nelems
, int direction
)
178 BUG_ON(!valid_dma_direction(direction
));
179 if (dma_ops
->sync_sg_for_device
)
180 dma_ops
->sync_sg_for_device(hwdev
, sg
, nelems
, direction
);
182 flush_write_buffers();
185 static inline dma_addr_t
dma_map_page(struct device
*dev
, struct page
*page
,
186 size_t offset
, size_t size
,
189 BUG_ON(!valid_dma_direction(direction
));
190 return dma_ops
->map_single(dev
, page_to_phys(page
)+offset
,
194 static inline void dma_unmap_page(struct device
*dev
, dma_addr_t addr
,
195 size_t size
, int direction
)
197 dma_unmap_single(dev
, addr
, size
, direction
);
201 dma_cache_sync(struct device
*dev
, void *vaddr
, size_t size
,
202 enum dma_data_direction dir
)
204 flush_write_buffers();
207 static inline int dma_get_cache_alignment(void)
209 /* no easy way to get cache size on all x86, so return the
210 * maximum possible, to be safe */
211 return boot_cpu_data
.x86_clflush_size
;
214 #define dma_is_consistent(d, h) (1)
217 # define ARCH_HAS_DMA_DECLARE_COHERENT_MEMORY
218 struct dma_coherent_mem
{
223 unsigned long *bitmap
;
227 dma_declare_coherent_memory(struct device
*dev
, dma_addr_t bus_addr
,
228 dma_addr_t device_addr
, size_t size
, int flags
);
231 dma_release_declared_memory(struct device
*dev
);
234 dma_mark_declared_memory_occupied(struct device
*dev
,
235 dma_addr_t device_addr
, size_t size
);
236 #endif /* CONFIG_X86_32 */