DMA-API: sound: fix dma mask handling in a lot of drivers
[linux-2.6.git] / sound / soc / samsung / dma.c
blobfe2748b494d4cd38c6db6ae4409d38f56819ff3f
1 /*
2 * dma.c -- ALSA Soc Audio Layer
4 * (c) 2006 Wolfson Microelectronics PLC.
5 * Graeme Gregory graeme.gregory@wolfsonmicro.com or linux@wolfsonmicro.com
7 * Copyright 2004-2005 Simtec Electronics
8 * http://armlinux.simtec.co.uk/
9 * Ben Dooks <ben@simtec.co.uk>
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the
13 * Free Software Foundation; either version 2 of the License, or (at your
14 * option) any later version.
17 #include <linux/slab.h>
18 #include <linux/dma-mapping.h>
19 #include <linux/module.h>
21 #include <sound/soc.h>
22 #include <sound/pcm_params.h>
24 #include <asm/dma.h>
25 #include <mach/hardware.h>
26 #include <mach/dma.h>
28 #include "dma.h"
30 #define ST_RUNNING (1<<0)
31 #define ST_OPENED (1<<1)
33 static const struct snd_pcm_hardware dma_hardware = {
34 .info = SNDRV_PCM_INFO_INTERLEAVED |
35 SNDRV_PCM_INFO_BLOCK_TRANSFER |
36 SNDRV_PCM_INFO_MMAP |
37 SNDRV_PCM_INFO_MMAP_VALID,
38 .formats = SNDRV_PCM_FMTBIT_S16_LE |
39 SNDRV_PCM_FMTBIT_U16_LE |
40 SNDRV_PCM_FMTBIT_U8 |
41 SNDRV_PCM_FMTBIT_S8,
42 .channels_min = 2,
43 .channels_max = 2,
44 .buffer_bytes_max = 128*1024,
45 .period_bytes_min = PAGE_SIZE,
46 .period_bytes_max = PAGE_SIZE*2,
47 .periods_min = 2,
48 .periods_max = 128,
49 .fifo_size = 32,
52 struct runtime_data {
53 spinlock_t lock;
54 int state;
55 unsigned int dma_loaded;
56 unsigned int dma_period;
57 dma_addr_t dma_start;
58 dma_addr_t dma_pos;
59 dma_addr_t dma_end;
60 struct s3c_dma_params *params;
63 static void audio_buffdone(void *data);
65 /* dma_enqueue
67 * place a dma buffer onto the queue for the dma system
68 * to handle.
70 static void dma_enqueue(struct snd_pcm_substream *substream)
72 struct runtime_data *prtd = substream->runtime->private_data;
73 dma_addr_t pos = prtd->dma_pos;
74 unsigned int limit;
75 struct samsung_dma_prep dma_info;
77 pr_debug("Entered %s\n", __func__);
79 limit = (prtd->dma_end - prtd->dma_start) / prtd->dma_period;
81 pr_debug("%s: loaded %d, limit %d\n",
82 __func__, prtd->dma_loaded, limit);
84 dma_info.cap = (samsung_dma_has_circular() ? DMA_CYCLIC : DMA_SLAVE);
85 dma_info.direction =
86 (substream->stream == SNDRV_PCM_STREAM_PLAYBACK
87 ? DMA_MEM_TO_DEV : DMA_DEV_TO_MEM);
88 dma_info.fp = audio_buffdone;
89 dma_info.fp_param = substream;
90 dma_info.period = prtd->dma_period;
91 dma_info.len = prtd->dma_period*limit;
93 if (dma_info.cap == DMA_CYCLIC) {
94 dma_info.buf = pos;
95 prtd->params->ops->prepare(prtd->params->ch, &dma_info);
96 prtd->dma_loaded += limit;
97 return;
100 while (prtd->dma_loaded < limit) {
101 pr_debug("dma_loaded: %d\n", prtd->dma_loaded);
103 if ((pos + dma_info.period) > prtd->dma_end) {
104 dma_info.period = prtd->dma_end - pos;
105 pr_debug("%s: corrected dma len %ld\n",
106 __func__, dma_info.period);
109 dma_info.buf = pos;
110 prtd->params->ops->prepare(prtd->params->ch, &dma_info);
112 prtd->dma_loaded++;
113 pos += prtd->dma_period;
114 if (pos >= prtd->dma_end)
115 pos = prtd->dma_start;
118 prtd->dma_pos = pos;
121 static void audio_buffdone(void *data)
123 struct snd_pcm_substream *substream = data;
124 struct runtime_data *prtd = substream->runtime->private_data;
126 pr_debug("Entered %s\n", __func__);
128 if (prtd->state & ST_RUNNING) {
129 prtd->dma_pos += prtd->dma_period;
130 if (prtd->dma_pos >= prtd->dma_end)
131 prtd->dma_pos = prtd->dma_start;
133 if (substream)
134 snd_pcm_period_elapsed(substream);
136 spin_lock(&prtd->lock);
137 if (!samsung_dma_has_circular()) {
138 prtd->dma_loaded--;
139 dma_enqueue(substream);
141 spin_unlock(&prtd->lock);
145 static int dma_hw_params(struct snd_pcm_substream *substream,
146 struct snd_pcm_hw_params *params)
148 struct snd_pcm_runtime *runtime = substream->runtime;
149 struct runtime_data *prtd = runtime->private_data;
150 struct snd_soc_pcm_runtime *rtd = substream->private_data;
151 unsigned long totbytes = params_buffer_bytes(params);
152 struct s3c_dma_params *dma =
153 snd_soc_dai_get_dma_data(rtd->cpu_dai, substream);
154 struct samsung_dma_req req;
155 struct samsung_dma_config config;
157 pr_debug("Entered %s\n", __func__);
159 /* return if this is a bufferless transfer e.g.
160 * codec <--> BT codec or GSM modem -- lg FIXME */
161 if (!dma)
162 return 0;
164 /* this may get called several times by oss emulation
165 * with different params -HW */
166 if (prtd->params == NULL) {
167 /* prepare DMA */
168 prtd->params = dma;
170 pr_debug("params %p, client %p, channel %d\n", prtd->params,
171 prtd->params->client, prtd->params->channel);
173 prtd->params->ops = samsung_dma_get_ops();
175 req.cap = (samsung_dma_has_circular() ?
176 DMA_CYCLIC : DMA_SLAVE);
177 req.client = prtd->params->client;
178 config.direction =
179 (substream->stream == SNDRV_PCM_STREAM_PLAYBACK
180 ? DMA_MEM_TO_DEV : DMA_DEV_TO_MEM);
181 config.width = prtd->params->dma_size;
182 config.fifo = prtd->params->dma_addr;
183 prtd->params->ch = prtd->params->ops->request(
184 prtd->params->channel, &req, rtd->cpu_dai->dev,
185 prtd->params->ch_name);
186 if (!prtd->params->ch) {
187 pr_err("Failed to allocate DMA channel\n");
188 return -ENXIO;
190 prtd->params->ops->config(prtd->params->ch, &config);
193 snd_pcm_set_runtime_buffer(substream, &substream->dma_buffer);
195 runtime->dma_bytes = totbytes;
197 spin_lock_irq(&prtd->lock);
198 prtd->dma_loaded = 0;
199 prtd->dma_period = params_period_bytes(params);
200 prtd->dma_start = runtime->dma_addr;
201 prtd->dma_pos = prtd->dma_start;
202 prtd->dma_end = prtd->dma_start + totbytes;
203 spin_unlock_irq(&prtd->lock);
205 return 0;
208 static int dma_hw_free(struct snd_pcm_substream *substream)
210 struct runtime_data *prtd = substream->runtime->private_data;
212 pr_debug("Entered %s\n", __func__);
214 snd_pcm_set_runtime_buffer(substream, NULL);
216 if (prtd->params) {
217 prtd->params->ops->flush(prtd->params->ch);
218 prtd->params->ops->release(prtd->params->ch,
219 prtd->params->client);
220 prtd->params = NULL;
223 return 0;
226 static int dma_prepare(struct snd_pcm_substream *substream)
228 struct runtime_data *prtd = substream->runtime->private_data;
229 int ret = 0;
231 pr_debug("Entered %s\n", __func__);
233 /* return if this is a bufferless transfer e.g.
234 * codec <--> BT codec or GSM modem -- lg FIXME */
235 if (!prtd->params)
236 return 0;
238 /* flush the DMA channel */
239 prtd->params->ops->flush(prtd->params->ch);
241 prtd->dma_loaded = 0;
242 prtd->dma_pos = prtd->dma_start;
244 /* enqueue dma buffers */
245 dma_enqueue(substream);
247 return ret;
250 static int dma_trigger(struct snd_pcm_substream *substream, int cmd)
252 struct runtime_data *prtd = substream->runtime->private_data;
253 int ret = 0;
255 pr_debug("Entered %s\n", __func__);
257 spin_lock(&prtd->lock);
259 switch (cmd) {
260 case SNDRV_PCM_TRIGGER_START:
261 prtd->state |= ST_RUNNING;
262 prtd->params->ops->trigger(prtd->params->ch);
263 break;
265 case SNDRV_PCM_TRIGGER_STOP:
266 prtd->state &= ~ST_RUNNING;
267 prtd->params->ops->stop(prtd->params->ch);
268 break;
270 default:
271 ret = -EINVAL;
272 break;
275 spin_unlock(&prtd->lock);
277 return ret;
280 static snd_pcm_uframes_t
281 dma_pointer(struct snd_pcm_substream *substream)
283 struct snd_pcm_runtime *runtime = substream->runtime;
284 struct runtime_data *prtd = runtime->private_data;
285 unsigned long res;
287 pr_debug("Entered %s\n", __func__);
289 res = prtd->dma_pos - prtd->dma_start;
291 pr_debug("Pointer offset: %lu\n", res);
293 /* we seem to be getting the odd error from the pcm library due
294 * to out-of-bounds pointers. this is maybe due to the dma engine
295 * not having loaded the new values for the channel before being
296 * called... (todo - fix )
299 if (res >= snd_pcm_lib_buffer_bytes(substream)) {
300 if (res == snd_pcm_lib_buffer_bytes(substream))
301 res = 0;
304 return bytes_to_frames(substream->runtime, res);
307 static int dma_open(struct snd_pcm_substream *substream)
309 struct snd_pcm_runtime *runtime = substream->runtime;
310 struct runtime_data *prtd;
312 pr_debug("Entered %s\n", __func__);
314 snd_pcm_hw_constraint_integer(runtime, SNDRV_PCM_HW_PARAM_PERIODS);
315 snd_soc_set_runtime_hwparams(substream, &dma_hardware);
317 prtd = kzalloc(sizeof(struct runtime_data), GFP_KERNEL);
318 if (prtd == NULL)
319 return -ENOMEM;
321 spin_lock_init(&prtd->lock);
323 runtime->private_data = prtd;
324 return 0;
327 static int dma_close(struct snd_pcm_substream *substream)
329 struct snd_pcm_runtime *runtime = substream->runtime;
330 struct runtime_data *prtd = runtime->private_data;
332 pr_debug("Entered %s\n", __func__);
334 if (!prtd)
335 pr_debug("dma_close called with prtd == NULL\n");
337 kfree(prtd);
339 return 0;
342 static int dma_mmap(struct snd_pcm_substream *substream,
343 struct vm_area_struct *vma)
345 struct snd_pcm_runtime *runtime = substream->runtime;
347 pr_debug("Entered %s\n", __func__);
349 return dma_mmap_writecombine(substream->pcm->card->dev, vma,
350 runtime->dma_area,
351 runtime->dma_addr,
352 runtime->dma_bytes);
355 static struct snd_pcm_ops dma_ops = {
356 .open = dma_open,
357 .close = dma_close,
358 .ioctl = snd_pcm_lib_ioctl,
359 .hw_params = dma_hw_params,
360 .hw_free = dma_hw_free,
361 .prepare = dma_prepare,
362 .trigger = dma_trigger,
363 .pointer = dma_pointer,
364 .mmap = dma_mmap,
367 static int preallocate_dma_buffer(struct snd_pcm *pcm, int stream)
369 struct snd_pcm_substream *substream = pcm->streams[stream].substream;
370 struct snd_dma_buffer *buf = &substream->dma_buffer;
371 size_t size = dma_hardware.buffer_bytes_max;
373 pr_debug("Entered %s\n", __func__);
375 buf->dev.type = SNDRV_DMA_TYPE_DEV;
376 buf->dev.dev = pcm->card->dev;
377 buf->private_data = NULL;
378 buf->area = dma_alloc_writecombine(pcm->card->dev, size,
379 &buf->addr, GFP_KERNEL);
380 if (!buf->area)
381 return -ENOMEM;
382 buf->bytes = size;
383 return 0;
386 static void dma_free_dma_buffers(struct snd_pcm *pcm)
388 struct snd_pcm_substream *substream;
389 struct snd_dma_buffer *buf;
390 int stream;
392 pr_debug("Entered %s\n", __func__);
394 for (stream = 0; stream < 2; stream++) {
395 substream = pcm->streams[stream].substream;
396 if (!substream)
397 continue;
399 buf = &substream->dma_buffer;
400 if (!buf->area)
401 continue;
403 dma_free_writecombine(pcm->card->dev, buf->bytes,
404 buf->area, buf->addr);
405 buf->area = NULL;
409 static int dma_new(struct snd_soc_pcm_runtime *rtd)
411 struct snd_card *card = rtd->card->snd_card;
412 struct snd_pcm *pcm = rtd->pcm;
413 int ret;
415 pr_debug("Entered %s\n", __func__);
417 ret = dma_coerce_mask_and_coherent(card->dev, DMA_BIT_MASK(32));
418 if (ret)
419 return ret;
421 if (pcm->streams[SNDRV_PCM_STREAM_PLAYBACK].substream) {
422 ret = preallocate_dma_buffer(pcm,
423 SNDRV_PCM_STREAM_PLAYBACK);
424 if (ret)
425 goto out;
428 if (pcm->streams[SNDRV_PCM_STREAM_CAPTURE].substream) {
429 ret = preallocate_dma_buffer(pcm,
430 SNDRV_PCM_STREAM_CAPTURE);
431 if (ret)
432 goto out;
434 out:
435 return ret;
438 static struct snd_soc_platform_driver samsung_asoc_platform = {
439 .ops = &dma_ops,
440 .pcm_new = dma_new,
441 .pcm_free = dma_free_dma_buffers,
444 int samsung_asoc_dma_platform_register(struct device *dev)
446 return snd_soc_register_platform(dev, &samsung_asoc_platform);
448 EXPORT_SYMBOL_GPL(samsung_asoc_dma_platform_register);
450 void samsung_asoc_dma_platform_unregister(struct device *dev)
452 snd_soc_unregister_platform(dev);
454 EXPORT_SYMBOL_GPL(samsung_asoc_dma_platform_unregister);
456 MODULE_AUTHOR("Ben Dooks, <ben@simtec.co.uk>");
457 MODULE_DESCRIPTION("Samsung ASoC DMA Driver");
458 MODULE_LICENSE("GPL");