Blackfin arch: implement support for /proc/dma
[linux-2.6/x86.git] / arch / blackfin / kernel / bfin_dma_5xx.c
blob2625aa20a92cec8c0681f244b47c7f8f1ce9c2f1
1 /*
2 * File: arch/blackfin/kernel/bfin_dma_5xx.c
3 * Based on:
4 * Author:
6 * Created:
7 * Description: This file contains the simple DMA Implementation for Blackfin
9 * Modified:
10 * Copyright 2004-2006 Analog Devices Inc.
12 * Bugs: Enter bugs at http://blackfin.uclinux.org/
14 * This program is free software; you can redistribute it and/or modify
15 * it under the terms of the GNU General Public License as published by
16 * the Free Software Foundation; either version 2 of the License, or
17 * (at your option) any later version.
19 * This program is distributed in the hope that it will be useful,
20 * but WITHOUT ANY WARRANTY; without even the implied warranty of
21 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 * GNU General Public License for more details.
24 * You should have received a copy of the GNU General Public License
25 * along with this program; if not, see the file COPYING, or write
26 * to the Free Software Foundation, Inc.,
27 * 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
30 #include <linux/errno.h>
31 #include <linux/module.h>
32 #include <linux/proc_fs.h>
33 #include <linux/sched.h>
34 #include <linux/seq_file.h>
35 #include <linux/interrupt.h>
36 #include <linux/kernel.h>
37 #include <linux/param.h>
39 #include <asm/blackfin.h>
40 #include <asm/dma.h>
41 #include <asm/cacheflush.h>
43 /**************************************************************************
44 * Global Variables
45 ***************************************************************************/
47 static struct dma_channel dma_ch[MAX_BLACKFIN_DMA_CHANNEL];
49 /*------------------------------------------------------------------------------
50 * Set the Buffer Clear bit in the Configuration register of specific DMA
51 * channel. This will stop the descriptor based DMA operation.
52 *-----------------------------------------------------------------------------*/
53 static void clear_dma_buffer(unsigned int channel)
55 dma_ch[channel].regs->cfg |= RESTART;
56 SSYNC();
57 dma_ch[channel].regs->cfg &= ~RESTART;
60 static int __init blackfin_dma_init(void)
62 int i;
64 printk(KERN_INFO "Blackfin DMA Controller\n");
66 for (i = 0; i < MAX_BLACKFIN_DMA_CHANNEL; i++) {
67 dma_ch[i].chan_status = DMA_CHANNEL_FREE;
68 dma_ch[i].regs = dma_io_base_addr[i];
69 mutex_init(&(dma_ch[i].dmalock));
71 /* Mark MEMDMA Channel 0 as requested since we're using it internally */
72 request_dma(CH_MEM_STREAM0_DEST, "Blackfin dma_memcpy");
73 request_dma(CH_MEM_STREAM0_SRC, "Blackfin dma_memcpy");
75 #if defined(CONFIG_DEB_DMA_URGENT)
76 bfin_write_EBIU_DDRQUE(bfin_read_EBIU_DDRQUE()
77 | DEB1_URGENT | DEB2_URGENT | DEB3_URGENT);
78 #endif
80 return 0;
82 arch_initcall(blackfin_dma_init);
84 #ifdef CONFIG_PROC_FS
86 static int proc_dma_show(struct seq_file *m, void *v)
88 int i;
90 for (i = 0 ; i < MAX_BLACKFIN_DMA_CHANNEL; ++i)
91 if (dma_ch[i].chan_status != DMA_CHANNEL_FREE)
92 seq_printf(m, "%2d: %s\n", i, dma_ch[i].device_id);
94 return 0;
97 static int proc_dma_open(struct inode *inode, struct file *file)
99 return single_open(file, proc_dma_show, NULL);
102 static const struct file_operations proc_dma_operations = {
103 .open = proc_dma_open,
104 .read = seq_read,
105 .llseek = seq_lseek,
106 .release = single_release,
109 static int __init proc_dma_init(void)
111 return proc_create("dma", 0, NULL, &proc_dma_operations) != NULL;
113 late_initcall(proc_dma_init);
114 #endif
116 /*------------------------------------------------------------------------------
117 * Request the specific DMA channel from the system.
118 *-----------------------------------------------------------------------------*/
119 int request_dma(unsigned int channel, char *device_id)
121 pr_debug("request_dma() : BEGIN \n");
123 if (device_id == NULL)
124 printk(KERN_WARNING "request_dma(%u): no device_id given\n", channel);
126 #if defined(CONFIG_BF561) && ANOMALY_05000182
127 if (channel >= CH_IMEM_STREAM0_DEST && channel <= CH_IMEM_STREAM1_DEST) {
128 if (get_cclk() > 500000000) {
129 printk(KERN_WARNING
130 "Request IMDMA failed due to ANOMALY 05000182\n");
131 return -EFAULT;
134 #endif
136 mutex_lock(&(dma_ch[channel].dmalock));
138 if ((dma_ch[channel].chan_status == DMA_CHANNEL_REQUESTED)
139 || (dma_ch[channel].chan_status == DMA_CHANNEL_ENABLED)) {
140 mutex_unlock(&(dma_ch[channel].dmalock));
141 pr_debug("DMA CHANNEL IN USE \n");
142 return -EBUSY;
143 } else {
144 dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
145 pr_debug("DMA CHANNEL IS ALLOCATED \n");
148 mutex_unlock(&(dma_ch[channel].dmalock));
150 #ifdef CONFIG_BF54x
151 if (channel >= CH_UART2_RX && channel <= CH_UART3_TX) {
152 unsigned int per_map;
153 per_map = dma_ch[channel].regs->peripheral_map & 0xFFF;
154 if (strncmp(device_id, "BFIN_UART", 9) == 0)
155 dma_ch[channel].regs->peripheral_map = per_map |
156 ((channel - CH_UART2_RX + 0xC)<<12);
157 else
158 dma_ch[channel].regs->peripheral_map = per_map |
159 ((channel - CH_UART2_RX + 0x6)<<12);
161 #endif
163 dma_ch[channel].device_id = device_id;
164 dma_ch[channel].irq_callback = NULL;
166 /* This is to be enabled by putting a restriction -
167 * you have to request DMA, before doing any operations on
168 * descriptor/channel
170 pr_debug("request_dma() : END \n");
171 return channel;
173 EXPORT_SYMBOL(request_dma);
175 int set_dma_callback(unsigned int channel, dma_interrupt_t callback, void *data)
177 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
178 && channel < MAX_BLACKFIN_DMA_CHANNEL));
180 if (callback != NULL) {
181 int ret_val;
182 dma_ch[channel].irq = channel2irq(channel);
183 dma_ch[channel].data = data;
185 ret_val =
186 request_irq(dma_ch[channel].irq, callback, IRQF_DISABLED,
187 dma_ch[channel].device_id, data);
188 if (ret_val) {
189 printk(KERN_NOTICE
190 "Request irq in DMA engine failed.\n");
191 return -EPERM;
193 dma_ch[channel].irq_callback = callback;
195 return 0;
197 EXPORT_SYMBOL(set_dma_callback);
199 void free_dma(unsigned int channel)
201 pr_debug("freedma() : BEGIN \n");
202 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
203 && channel < MAX_BLACKFIN_DMA_CHANNEL));
205 /* Halt the DMA */
206 disable_dma(channel);
207 clear_dma_buffer(channel);
209 if (dma_ch[channel].irq_callback != NULL)
210 free_irq(dma_ch[channel].irq, dma_ch[channel].data);
212 /* Clear the DMA Variable in the Channel */
213 mutex_lock(&(dma_ch[channel].dmalock));
214 dma_ch[channel].chan_status = DMA_CHANNEL_FREE;
215 mutex_unlock(&(dma_ch[channel].dmalock));
217 pr_debug("freedma() : END \n");
219 EXPORT_SYMBOL(free_dma);
221 void dma_enable_irq(unsigned int channel)
223 pr_debug("dma_enable_irq() : BEGIN \n");
224 enable_irq(dma_ch[channel].irq);
226 EXPORT_SYMBOL(dma_enable_irq);
228 void dma_disable_irq(unsigned int channel)
230 pr_debug("dma_disable_irq() : BEGIN \n");
231 disable_irq(dma_ch[channel].irq);
233 EXPORT_SYMBOL(dma_disable_irq);
235 int dma_channel_active(unsigned int channel)
237 if (dma_ch[channel].chan_status == DMA_CHANNEL_FREE) {
238 return 0;
239 } else {
240 return 1;
243 EXPORT_SYMBOL(dma_channel_active);
245 /*------------------------------------------------------------------------------
246 * stop the specific DMA channel.
247 *-----------------------------------------------------------------------------*/
248 void disable_dma(unsigned int channel)
250 pr_debug("stop_dma() : BEGIN \n");
251 dma_ch[channel].regs->cfg &= ~DMAEN; /* Clean the enable bit */
252 SSYNC();
253 dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
254 /* Needs to be enabled Later */
255 pr_debug("stop_dma() : END \n");
256 return;
258 EXPORT_SYMBOL(disable_dma);
260 void enable_dma(unsigned int channel)
262 pr_debug("enable_dma() : BEGIN \n");
263 dma_ch[channel].chan_status = DMA_CHANNEL_ENABLED;
264 dma_ch[channel].regs->curr_x_count = 0;
265 dma_ch[channel].regs->curr_y_count = 0;
267 dma_ch[channel].regs->cfg |= DMAEN; /* Set the enable bit */
268 pr_debug("enable_dma() : END \n");
269 return;
271 EXPORT_SYMBOL(enable_dma);
273 /*------------------------------------------------------------------------------
274 * Set the Start Address register for the specific DMA channel
275 * This function can be used for register based DMA,
276 * to setup the start address
277 * addr: Starting address of the DMA Data to be transferred.
278 *-----------------------------------------------------------------------------*/
279 void set_dma_start_addr(unsigned int channel, unsigned long addr)
281 pr_debug("set_dma_start_addr() : BEGIN \n");
282 dma_ch[channel].regs->start_addr = addr;
283 pr_debug("set_dma_start_addr() : END\n");
285 EXPORT_SYMBOL(set_dma_start_addr);
287 void set_dma_next_desc_addr(unsigned int channel, unsigned long addr)
289 pr_debug("set_dma_next_desc_addr() : BEGIN \n");
290 dma_ch[channel].regs->next_desc_ptr = addr;
291 pr_debug("set_dma_next_desc_addr() : END\n");
293 EXPORT_SYMBOL(set_dma_next_desc_addr);
295 void set_dma_curr_desc_addr(unsigned int channel, unsigned long addr)
297 pr_debug("set_dma_curr_desc_addr() : BEGIN \n");
298 dma_ch[channel].regs->curr_desc_ptr = addr;
299 pr_debug("set_dma_curr_desc_addr() : END\n");
301 EXPORT_SYMBOL(set_dma_curr_desc_addr);
303 void set_dma_x_count(unsigned int channel, unsigned short x_count)
305 dma_ch[channel].regs->x_count = x_count;
307 EXPORT_SYMBOL(set_dma_x_count);
309 void set_dma_y_count(unsigned int channel, unsigned short y_count)
311 dma_ch[channel].regs->y_count = y_count;
313 EXPORT_SYMBOL(set_dma_y_count);
315 void set_dma_x_modify(unsigned int channel, short x_modify)
317 dma_ch[channel].regs->x_modify = x_modify;
319 EXPORT_SYMBOL(set_dma_x_modify);
321 void set_dma_y_modify(unsigned int channel, short y_modify)
323 dma_ch[channel].regs->y_modify = y_modify;
325 EXPORT_SYMBOL(set_dma_y_modify);
327 void set_dma_config(unsigned int channel, unsigned short config)
329 dma_ch[channel].regs->cfg = config;
331 EXPORT_SYMBOL(set_dma_config);
333 unsigned short
334 set_bfin_dma_config(char direction, char flow_mode,
335 char intr_mode, char dma_mode, char width, char syncmode)
337 unsigned short config;
339 config =
340 ((direction << 1) | (width << 2) | (dma_mode << 4) |
341 (intr_mode << 6) | (flow_mode << 12) | (syncmode << 5));
342 return config;
344 EXPORT_SYMBOL(set_bfin_dma_config);
346 void set_dma_sg(unsigned int channel, struct dmasg *sg, int nr_sg)
348 dma_ch[channel].regs->cfg |= ((nr_sg & 0x0F) << 8);
349 dma_ch[channel].regs->next_desc_ptr = (unsigned int)sg;
351 EXPORT_SYMBOL(set_dma_sg);
353 void set_dma_curr_addr(unsigned int channel, unsigned long addr)
355 dma_ch[channel].regs->curr_addr_ptr = addr;
357 EXPORT_SYMBOL(set_dma_curr_addr);
359 /*------------------------------------------------------------------------------
360 * Get the DMA status of a specific DMA channel from the system.
361 *-----------------------------------------------------------------------------*/
362 unsigned short get_dma_curr_irqstat(unsigned int channel)
364 return dma_ch[channel].regs->irq_status;
366 EXPORT_SYMBOL(get_dma_curr_irqstat);
368 /*------------------------------------------------------------------------------
369 * Clear the DMA_DONE bit in DMA status. Stop the DMA completion interrupt.
370 *-----------------------------------------------------------------------------*/
371 void clear_dma_irqstat(unsigned int channel)
373 dma_ch[channel].regs->irq_status |= 3;
375 EXPORT_SYMBOL(clear_dma_irqstat);
377 /*------------------------------------------------------------------------------
378 * Get current DMA xcount of a specific DMA channel from the system.
379 *-----------------------------------------------------------------------------*/
380 unsigned short get_dma_curr_xcount(unsigned int channel)
382 return dma_ch[channel].regs->curr_x_count;
384 EXPORT_SYMBOL(get_dma_curr_xcount);
386 /*------------------------------------------------------------------------------
387 * Get current DMA ycount of a specific DMA channel from the system.
388 *-----------------------------------------------------------------------------*/
389 unsigned short get_dma_curr_ycount(unsigned int channel)
391 return dma_ch[channel].regs->curr_y_count;
393 EXPORT_SYMBOL(get_dma_curr_ycount);
395 unsigned long get_dma_next_desc_ptr(unsigned int channel)
397 return dma_ch[channel].regs->next_desc_ptr;
399 EXPORT_SYMBOL(get_dma_next_desc_ptr);
401 unsigned long get_dma_curr_desc_ptr(unsigned int channel)
403 return dma_ch[channel].regs->curr_desc_ptr;
405 EXPORT_SYMBOL(get_dma_curr_desc_ptr);
407 unsigned long get_dma_curr_addr(unsigned int channel)
409 return dma_ch[channel].regs->curr_addr_ptr;
411 EXPORT_SYMBOL(get_dma_curr_addr);
413 #ifdef CONFIG_PM
414 int blackfin_dma_suspend(void)
416 int i;
418 #ifdef CONFIG_BF561 /* IMDMA channels doesn't have a PERIPHERAL_MAP */
419 for (i = 0; i <= CH_MEM_STREAM3_SRC; i++) {
420 #else
421 for (i = 0; i < MAX_BLACKFIN_DMA_CHANNEL; i++) {
422 #endif
423 if (dma_ch[i].chan_status == DMA_CHANNEL_ENABLED) {
424 printk(KERN_ERR "DMA Channel %d failed to suspend\n", i);
425 return -EBUSY;
428 dma_ch[i].saved_peripheral_map = dma_ch[i].regs->peripheral_map;
431 return 0;
434 void blackfin_dma_resume(void)
436 int i;
438 #ifdef CONFIG_BF561 /* IMDMA channels doesn't have a PERIPHERAL_MAP */
439 for (i = 0; i <= CH_MEM_STREAM3_SRC; i++)
440 #else
441 for (i = 0; i < MAX_BLACKFIN_DMA_CHANNEL; i++)
442 #endif
443 dma_ch[i].regs->peripheral_map = dma_ch[i].saved_peripheral_map;
445 #endif
447 static void *__dma_memcpy(void *dest, const void *src, size_t size)
449 int direction; /* 1 - address decrease, 0 - address increase */
450 int flag_align; /* 1 - address aligned, 0 - address unaligned */
451 int flag_2D; /* 1 - 2D DMA needed, 0 - 1D DMA needed */
452 unsigned long flags;
454 if (size <= 0)
455 return NULL;
457 local_irq_save(flags);
459 if ((unsigned long)src < memory_end)
460 blackfin_dcache_flush_range((unsigned int)src,
461 (unsigned int)(src + size));
463 if ((unsigned long)dest < memory_end)
464 blackfin_dcache_invalidate_range((unsigned int)dest,
465 (unsigned int)(dest + size));
467 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
469 if ((unsigned long)src < (unsigned long)dest)
470 direction = 1;
471 else
472 direction = 0;
474 if ((((unsigned long)dest % 2) == 0) && (((unsigned long)src % 2) == 0)
475 && ((size % 2) == 0))
476 flag_align = 1;
477 else
478 flag_align = 0;
480 if (size > 0x10000) /* size > 64K */
481 flag_2D = 1;
482 else
483 flag_2D = 0;
485 /* Setup destination and source start address */
486 if (direction) {
487 if (flag_align) {
488 bfin_write_MDMA_D0_START_ADDR(dest + size - 2);
489 bfin_write_MDMA_S0_START_ADDR(src + size - 2);
490 } else {
491 bfin_write_MDMA_D0_START_ADDR(dest + size - 1);
492 bfin_write_MDMA_S0_START_ADDR(src + size - 1);
494 } else {
495 bfin_write_MDMA_D0_START_ADDR(dest);
496 bfin_write_MDMA_S0_START_ADDR(src);
499 /* Setup destination and source xcount */
500 if (flag_2D) {
501 if (flag_align) {
502 bfin_write_MDMA_D0_X_COUNT(1024 / 2);
503 bfin_write_MDMA_S0_X_COUNT(1024 / 2);
504 } else {
505 bfin_write_MDMA_D0_X_COUNT(1024);
506 bfin_write_MDMA_S0_X_COUNT(1024);
508 bfin_write_MDMA_D0_Y_COUNT(size >> 10);
509 bfin_write_MDMA_S0_Y_COUNT(size >> 10);
510 } else {
511 if (flag_align) {
512 bfin_write_MDMA_D0_X_COUNT(size / 2);
513 bfin_write_MDMA_S0_X_COUNT(size / 2);
514 } else {
515 bfin_write_MDMA_D0_X_COUNT(size);
516 bfin_write_MDMA_S0_X_COUNT(size);
520 /* Setup destination and source xmodify and ymodify */
521 if (direction) {
522 if (flag_align) {
523 bfin_write_MDMA_D0_X_MODIFY(-2);
524 bfin_write_MDMA_S0_X_MODIFY(-2);
525 if (flag_2D) {
526 bfin_write_MDMA_D0_Y_MODIFY(-2);
527 bfin_write_MDMA_S0_Y_MODIFY(-2);
529 } else {
530 bfin_write_MDMA_D0_X_MODIFY(-1);
531 bfin_write_MDMA_S0_X_MODIFY(-1);
532 if (flag_2D) {
533 bfin_write_MDMA_D0_Y_MODIFY(-1);
534 bfin_write_MDMA_S0_Y_MODIFY(-1);
537 } else {
538 if (flag_align) {
539 bfin_write_MDMA_D0_X_MODIFY(2);
540 bfin_write_MDMA_S0_X_MODIFY(2);
541 if (flag_2D) {
542 bfin_write_MDMA_D0_Y_MODIFY(2);
543 bfin_write_MDMA_S0_Y_MODIFY(2);
545 } else {
546 bfin_write_MDMA_D0_X_MODIFY(1);
547 bfin_write_MDMA_S0_X_MODIFY(1);
548 if (flag_2D) {
549 bfin_write_MDMA_D0_Y_MODIFY(1);
550 bfin_write_MDMA_S0_Y_MODIFY(1);
555 /* Enable source DMA */
556 if (flag_2D) {
557 if (flag_align) {
558 bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D | WDSIZE_16);
559 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D | WDSIZE_16);
560 } else {
561 bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D);
562 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D);
564 } else {
565 if (flag_align) {
566 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
567 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
568 } else {
569 bfin_write_MDMA_S0_CONFIG(DMAEN);
570 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN);
574 SSYNC();
576 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE))
579 bfin_write_MDMA_D0_IRQ_STATUS(bfin_read_MDMA_D0_IRQ_STATUS() |
580 (DMA_DONE | DMA_ERR));
582 bfin_write_MDMA_S0_CONFIG(0);
583 bfin_write_MDMA_D0_CONFIG(0);
585 local_irq_restore(flags);
587 return dest;
590 void *dma_memcpy(void *dest, const void *src, size_t size)
592 size_t bulk;
593 size_t rest;
594 void * addr;
596 bulk = (size >> 16) << 16;
597 rest = size - bulk;
598 if (bulk)
599 __dma_memcpy(dest, src, bulk);
600 addr = __dma_memcpy(dest+bulk, src+bulk, rest);
601 return addr;
603 EXPORT_SYMBOL(dma_memcpy);
605 void *safe_dma_memcpy(void *dest, const void *src, size_t size)
607 void *addr;
608 addr = dma_memcpy(dest, src, size);
609 return addr;
611 EXPORT_SYMBOL(safe_dma_memcpy);
613 void dma_outsb(unsigned long addr, const void *buf, unsigned short len)
615 unsigned long flags;
617 local_irq_save(flags);
619 blackfin_dcache_flush_range((unsigned int)buf,
620 (unsigned int)(buf) + len);
622 bfin_write_MDMA_D0_START_ADDR(addr);
623 bfin_write_MDMA_D0_X_COUNT(len);
624 bfin_write_MDMA_D0_X_MODIFY(0);
625 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
627 bfin_write_MDMA_S0_START_ADDR(buf);
628 bfin_write_MDMA_S0_X_COUNT(len);
629 bfin_write_MDMA_S0_X_MODIFY(1);
630 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
632 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
633 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
635 SSYNC();
637 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
639 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
641 bfin_write_MDMA_S0_CONFIG(0);
642 bfin_write_MDMA_D0_CONFIG(0);
643 local_irq_restore(flags);
646 EXPORT_SYMBOL(dma_outsb);
649 void dma_insb(unsigned long addr, void *buf, unsigned short len)
651 unsigned long flags;
653 blackfin_dcache_invalidate_range((unsigned int)buf,
654 (unsigned int)(buf) + len);
656 local_irq_save(flags);
657 bfin_write_MDMA_D0_START_ADDR(buf);
658 bfin_write_MDMA_D0_X_COUNT(len);
659 bfin_write_MDMA_D0_X_MODIFY(1);
660 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
662 bfin_write_MDMA_S0_START_ADDR(addr);
663 bfin_write_MDMA_S0_X_COUNT(len);
664 bfin_write_MDMA_S0_X_MODIFY(0);
665 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
667 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
668 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
670 SSYNC();
672 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
674 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
676 bfin_write_MDMA_S0_CONFIG(0);
677 bfin_write_MDMA_D0_CONFIG(0);
678 local_irq_restore(flags);
681 EXPORT_SYMBOL(dma_insb);
683 void dma_outsw(unsigned long addr, const void *buf, unsigned short len)
685 unsigned long flags;
687 local_irq_save(flags);
689 blackfin_dcache_flush_range((unsigned int)buf,
690 (unsigned int)(buf) + len * sizeof(short));
692 bfin_write_MDMA_D0_START_ADDR(addr);
693 bfin_write_MDMA_D0_X_COUNT(len);
694 bfin_write_MDMA_D0_X_MODIFY(0);
695 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
697 bfin_write_MDMA_S0_START_ADDR(buf);
698 bfin_write_MDMA_S0_X_COUNT(len);
699 bfin_write_MDMA_S0_X_MODIFY(2);
700 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
702 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
703 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
705 SSYNC();
707 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
709 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
711 bfin_write_MDMA_S0_CONFIG(0);
712 bfin_write_MDMA_D0_CONFIG(0);
713 local_irq_restore(flags);
716 EXPORT_SYMBOL(dma_outsw);
718 void dma_insw(unsigned long addr, void *buf, unsigned short len)
720 unsigned long flags;
722 blackfin_dcache_invalidate_range((unsigned int)buf,
723 (unsigned int)(buf) + len * sizeof(short));
725 local_irq_save(flags);
727 bfin_write_MDMA_D0_START_ADDR(buf);
728 bfin_write_MDMA_D0_X_COUNT(len);
729 bfin_write_MDMA_D0_X_MODIFY(2);
730 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
732 bfin_write_MDMA_S0_START_ADDR(addr);
733 bfin_write_MDMA_S0_X_COUNT(len);
734 bfin_write_MDMA_S0_X_MODIFY(0);
735 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
737 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
738 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
740 SSYNC();
742 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
744 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
746 bfin_write_MDMA_S0_CONFIG(0);
747 bfin_write_MDMA_D0_CONFIG(0);
748 local_irq_restore(flags);
751 EXPORT_SYMBOL(dma_insw);
753 void dma_outsl(unsigned long addr, const void *buf, unsigned short len)
755 unsigned long flags;
757 local_irq_save(flags);
759 blackfin_dcache_flush_range((unsigned int)buf,
760 (unsigned int)(buf) + len * sizeof(long));
762 bfin_write_MDMA_D0_START_ADDR(addr);
763 bfin_write_MDMA_D0_X_COUNT(len);
764 bfin_write_MDMA_D0_X_MODIFY(0);
765 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
767 bfin_write_MDMA_S0_START_ADDR(buf);
768 bfin_write_MDMA_S0_X_COUNT(len);
769 bfin_write_MDMA_S0_X_MODIFY(4);
770 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
772 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
773 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
775 SSYNC();
777 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
779 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
781 bfin_write_MDMA_S0_CONFIG(0);
782 bfin_write_MDMA_D0_CONFIG(0);
783 local_irq_restore(flags);
786 EXPORT_SYMBOL(dma_outsl);
788 void dma_insl(unsigned long addr, void *buf, unsigned short len)
790 unsigned long flags;
792 blackfin_dcache_invalidate_range((unsigned int)buf,
793 (unsigned int)(buf) + len * sizeof(long));
795 local_irq_save(flags);
797 bfin_write_MDMA_D0_START_ADDR(buf);
798 bfin_write_MDMA_D0_X_COUNT(len);
799 bfin_write_MDMA_D0_X_MODIFY(4);
800 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
802 bfin_write_MDMA_S0_START_ADDR(addr);
803 bfin_write_MDMA_S0_X_COUNT(len);
804 bfin_write_MDMA_S0_X_MODIFY(0);
805 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
807 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
808 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
810 SSYNC();
812 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
814 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
816 bfin_write_MDMA_S0_CONFIG(0);
817 bfin_write_MDMA_D0_CONFIG(0);
818 local_irq_restore(flags);
821 EXPORT_SYMBOL(dma_insl);