1 #ifndef _ASM_DMA_MAPPING_H_
2 #define _ASM_DMA_MAPPING_H_
5 * IOMMU interface. See Documentation/DMA-mapping.txt and DMA-API.txt for
9 #include <linux/scatterlist.h>
11 #include <asm/swiotlb.h>
13 struct dma_mapping_ops
{
14 int (*mapping_error
)(dma_addr_t dma_addr
);
15 void* (*alloc_coherent
)(struct device
*dev
, size_t size
,
16 dma_addr_t
*dma_handle
, gfp_t gfp
);
17 void (*free_coherent
)(struct device
*dev
, size_t size
,
18 void *vaddr
, dma_addr_t dma_handle
);
19 dma_addr_t (*map_single
)(struct device
*hwdev
, void *ptr
,
20 size_t size
, int direction
);
21 /* like map_single, but doesn't check the device mask */
22 dma_addr_t (*map_simple
)(struct device
*hwdev
, char *ptr
,
23 size_t size
, int direction
);
24 void (*unmap_single
)(struct device
*dev
, dma_addr_t addr
,
25 size_t size
, int direction
);
26 void (*sync_single_for_cpu
)(struct device
*hwdev
,
27 dma_addr_t dma_handle
, size_t size
,
29 void (*sync_single_for_device
)(struct device
*hwdev
,
30 dma_addr_t dma_handle
, size_t size
,
32 void (*sync_single_range_for_cpu
)(struct device
*hwdev
,
33 dma_addr_t dma_handle
, unsigned long offset
,
34 size_t size
, int direction
);
35 void (*sync_single_range_for_device
)(struct device
*hwdev
,
36 dma_addr_t dma_handle
, unsigned long offset
,
37 size_t size
, int direction
);
38 void (*sync_sg_for_cpu
)(struct device
*hwdev
,
39 struct scatterlist
*sg
, int nelems
,
41 void (*sync_sg_for_device
)(struct device
*hwdev
,
42 struct scatterlist
*sg
, int nelems
,
44 int (*map_sg
)(struct device
*hwdev
, struct scatterlist
*sg
,
45 int nents
, int direction
);
46 void (*unmap_sg
)(struct device
*hwdev
,
47 struct scatterlist
*sg
, int nents
,
49 int (*dma_supported
)(struct device
*hwdev
, u64 mask
);
53 extern const struct dma_mapping_ops
*dma_ops
;
56 # include "dma-mapping_32.h"
58 # include "dma-mapping_64.h"
61 static inline dma_addr_t
62 dma_map_single(struct device
*hwdev
, void *ptr
, size_t size
,
65 BUG_ON(!valid_dma_direction(direction
));
66 return dma_ops
->map_single(hwdev
, ptr
, size
, direction
);
70 dma_unmap_single(struct device
*dev
, dma_addr_t addr
, size_t size
,
73 BUG_ON(!valid_dma_direction(direction
));
74 if (dma_ops
->unmap_single
)
75 dma_ops
->unmap_single(dev
, addr
, size
, direction
);
79 dma_map_sg(struct device
*hwdev
, struct scatterlist
*sg
,
80 int nents
, int direction
)
82 BUG_ON(!valid_dma_direction(direction
));
83 return dma_ops
->map_sg(hwdev
, sg
, nents
, direction
);
87 dma_unmap_sg(struct device
*hwdev
, struct scatterlist
*sg
, int nents
,
90 BUG_ON(!valid_dma_direction(direction
));
91 if (dma_ops
->unmap_sg
)
92 dma_ops
->unmap_sg(hwdev
, sg
, nents
, direction
);
96 dma_sync_single_for_cpu(struct device
*hwdev
, dma_addr_t dma_handle
,
97 size_t size
, int direction
)
99 BUG_ON(!valid_dma_direction(direction
));
100 if (dma_ops
->sync_single_for_cpu
)
101 dma_ops
->sync_single_for_cpu(hwdev
, dma_handle
, size
,
103 flush_write_buffers();