1 #ifndef _LINUX_DMA_MAPPING_H
2 #define _LINUX_DMA_MAPPING_H
4 #include <linux/device.h>
6 #include <linux/dma-attrs.h>
7 #include <linux/dma-direction.h>
8 #include <linux/scatterlist.h>
11 void* (*alloc_coherent
)(struct device
*dev
, size_t size
,
12 dma_addr_t
*dma_handle
, gfp_t gfp
);
13 void (*free_coherent
)(struct device
*dev
, size_t size
,
14 void *vaddr
, dma_addr_t dma_handle
);
15 dma_addr_t (*map_page
)(struct device
*dev
, struct page
*page
,
16 unsigned long offset
, size_t size
,
17 enum dma_data_direction dir
,
18 struct dma_attrs
*attrs
);
19 void (*unmap_page
)(struct device
*dev
, dma_addr_t dma_handle
,
20 size_t size
, enum dma_data_direction dir
,
21 struct dma_attrs
*attrs
);
22 int (*map_sg
)(struct device
*dev
, struct scatterlist
*sg
,
23 int nents
, enum dma_data_direction dir
,
24 struct dma_attrs
*attrs
);
25 void (*unmap_sg
)(struct device
*dev
,
26 struct scatterlist
*sg
, int nents
,
27 enum dma_data_direction dir
,
28 struct dma_attrs
*attrs
);
29 void (*sync_single_for_cpu
)(struct device
*dev
,
30 dma_addr_t dma_handle
, size_t size
,
31 enum dma_data_direction dir
);
32 void (*sync_single_for_device
)(struct device
*dev
,
33 dma_addr_t dma_handle
, size_t size
,
34 enum dma_data_direction dir
);
35 void (*sync_sg_for_cpu
)(struct device
*dev
,
36 struct scatterlist
*sg
, int nents
,
37 enum dma_data_direction dir
);
38 void (*sync_sg_for_device
)(struct device
*dev
,
39 struct scatterlist
*sg
, int nents
,
40 enum dma_data_direction dir
);
41 int (*mapping_error
)(struct device
*dev
, dma_addr_t dma_addr
);
42 int (*dma_supported
)(struct device
*dev
, u64 mask
);
43 int (*set_dma_mask
)(struct device
*dev
, u64 mask
);
47 #define DMA_BIT_MASK(n) (((n) == 64) ? ~0ULL : ((1ULL<<(n))-1))
49 typedef u64 DMA_nnBIT_MASK __deprecated
;
52 * NOTE: do not use the below macros in new code and do not add new definitions
55 * Instead, just open-code DMA_BIT_MASK(n) within your driver
57 #define DMA_64BIT_MASK (DMA_nnBIT_MASK)DMA_BIT_MASK(64)
58 #define DMA_48BIT_MASK (DMA_nnBIT_MASK)DMA_BIT_MASK(48)
59 #define DMA_47BIT_MASK (DMA_nnBIT_MASK)DMA_BIT_MASK(47)
60 #define DMA_40BIT_MASK (DMA_nnBIT_MASK)DMA_BIT_MASK(40)
61 #define DMA_39BIT_MASK (DMA_nnBIT_MASK)DMA_BIT_MASK(39)
62 #define DMA_35BIT_MASK (DMA_nnBIT_MASK)DMA_BIT_MASK(35)
63 #define DMA_32BIT_MASK (DMA_nnBIT_MASK)DMA_BIT_MASK(32)
64 #define DMA_31BIT_MASK (DMA_nnBIT_MASK)DMA_BIT_MASK(31)
65 #define DMA_30BIT_MASK (DMA_nnBIT_MASK)DMA_BIT_MASK(30)
66 #define DMA_29BIT_MASK (DMA_nnBIT_MASK)DMA_BIT_MASK(29)
67 #define DMA_28BIT_MASK (DMA_nnBIT_MASK)DMA_BIT_MASK(28)
68 #define DMA_24BIT_MASK (DMA_nnBIT_MASK)DMA_BIT_MASK(24)
70 #define DMA_MASK_NONE 0x0ULL
72 static inline int valid_dma_direction(int dma_direction
)
74 return ((dma_direction
== DMA_BIDIRECTIONAL
) ||
75 (dma_direction
== DMA_TO_DEVICE
) ||
76 (dma_direction
== DMA_FROM_DEVICE
));
79 static inline int is_device_dma_capable(struct device
*dev
)
81 return dev
->dma_mask
!= NULL
&& *dev
->dma_mask
!= DMA_MASK_NONE
;
85 #include <asm/dma-mapping.h>
87 #include <asm-generic/dma-mapping-broken.h>
90 static inline u64
dma_get_mask(struct device
*dev
)
92 if (dev
&& dev
->dma_mask
&& *dev
->dma_mask
)
93 return *dev
->dma_mask
;
94 return DMA_BIT_MASK(32);
97 #ifdef ARCH_HAS_DMA_SET_COHERENT_MASK
98 int dma_set_coherent_mask(struct device
*dev
, u64 mask
);
100 static inline int dma_set_coherent_mask(struct device
*dev
, u64 mask
)
102 if (!dma_supported(dev
, mask
))
104 dev
->coherent_dma_mask
= mask
;
109 extern u64
dma_get_required_mask(struct device
*dev
);
111 static inline unsigned int dma_get_max_seg_size(struct device
*dev
)
113 return dev
->dma_parms
? dev
->dma_parms
->max_segment_size
: 65536;
116 static inline unsigned int dma_set_max_seg_size(struct device
*dev
,
119 if (dev
->dma_parms
) {
120 dev
->dma_parms
->max_segment_size
= size
;
126 static inline unsigned long dma_get_seg_boundary(struct device
*dev
)
128 return dev
->dma_parms
?
129 dev
->dma_parms
->segment_boundary_mask
: 0xffffffff;
132 static inline int dma_set_seg_boundary(struct device
*dev
, unsigned long mask
)
134 if (dev
->dma_parms
) {
135 dev
->dma_parms
->segment_boundary_mask
= mask
;
141 #ifdef CONFIG_HAS_DMA
142 static inline int dma_get_cache_alignment(void)
144 #ifdef ARCH_DMA_MINALIGN
145 return ARCH_DMA_MINALIGN
;
151 /* flags for the coherent memory api */
152 #define DMA_MEMORY_MAP 0x01
153 #define DMA_MEMORY_IO 0x02
154 #define DMA_MEMORY_INCLUDES_CHILDREN 0x04
155 #define DMA_MEMORY_EXCLUSIVE 0x08
157 #ifndef ARCH_HAS_DMA_DECLARE_COHERENT_MEMORY
159 dma_declare_coherent_memory(struct device
*dev
, dma_addr_t bus_addr
,
160 dma_addr_t device_addr
, size_t size
, int flags
)
166 dma_release_declared_memory(struct device
*dev
)
171 dma_mark_declared_memory_occupied(struct device
*dev
,
172 dma_addr_t device_addr
, size_t size
)
174 return ERR_PTR(-EBUSY
);
181 extern void *dmam_alloc_coherent(struct device
*dev
, size_t size
,
182 dma_addr_t
*dma_handle
, gfp_t gfp
);
183 extern void dmam_free_coherent(struct device
*dev
, size_t size
, void *vaddr
,
184 dma_addr_t dma_handle
);
185 extern void *dmam_alloc_noncoherent(struct device
*dev
, size_t size
,
186 dma_addr_t
*dma_handle
, gfp_t gfp
);
187 extern void dmam_free_noncoherent(struct device
*dev
, size_t size
, void *vaddr
,
188 dma_addr_t dma_handle
);
189 #ifdef ARCH_HAS_DMA_DECLARE_COHERENT_MEMORY
190 extern int dmam_declare_coherent_memory(struct device
*dev
, dma_addr_t bus_addr
,
191 dma_addr_t device_addr
, size_t size
,
193 extern void dmam_release_declared_memory(struct device
*dev
);
194 #else /* ARCH_HAS_DMA_DECLARE_COHERENT_MEMORY */
195 static inline int dmam_declare_coherent_memory(struct device
*dev
,
196 dma_addr_t bus_addr
, dma_addr_t device_addr
,
197 size_t size
, gfp_t gfp
)
202 static inline void dmam_release_declared_memory(struct device
*dev
)
205 #endif /* ARCH_HAS_DMA_DECLARE_COHERENT_MEMORY */
207 #ifndef CONFIG_HAVE_DMA_ATTRS
210 #define dma_map_single_attrs(dev, cpu_addr, size, dir, attrs) \
211 dma_map_single(dev, cpu_addr, size, dir)
213 #define dma_unmap_single_attrs(dev, dma_addr, size, dir, attrs) \
214 dma_unmap_single(dev, dma_addr, size, dir)
216 #define dma_map_sg_attrs(dev, sgl, nents, dir, attrs) \
217 dma_map_sg(dev, sgl, nents, dir)
219 #define dma_unmap_sg_attrs(dev, sgl, nents, dir, attrs) \
220 dma_unmap_sg(dev, sgl, nents, dir)
222 #endif /* CONFIG_HAVE_DMA_ATTRS */
224 #ifdef CONFIG_NEED_DMA_MAP_STATE
225 #define DEFINE_DMA_UNMAP_ADDR(ADDR_NAME) dma_addr_t ADDR_NAME
226 #define DEFINE_DMA_UNMAP_LEN(LEN_NAME) __u32 LEN_NAME
227 #define dma_unmap_addr(PTR, ADDR_NAME) ((PTR)->ADDR_NAME)
228 #define dma_unmap_addr_set(PTR, ADDR_NAME, VAL) (((PTR)->ADDR_NAME) = (VAL))
229 #define dma_unmap_len(PTR, LEN_NAME) ((PTR)->LEN_NAME)
230 #define dma_unmap_len_set(PTR, LEN_NAME, VAL) (((PTR)->LEN_NAME) = (VAL))
232 #define DEFINE_DMA_UNMAP_ADDR(ADDR_NAME)
233 #define DEFINE_DMA_UNMAP_LEN(LEN_NAME)
234 #define dma_unmap_addr(PTR, ADDR_NAME) (0)
235 #define dma_unmap_addr_set(PTR, ADDR_NAME, VAL) do { } while (0)
236 #define dma_unmap_len(PTR, LEN_NAME) (0)
237 #define dma_unmap_len_set(PTR, LEN_NAME, VAL) do { } while (0)