sched: fix the wrong mask_len, cleanup
[linux-2.6/linux-loongson.git] / arch / blackfin / kernel / bfin_dma_5xx.c
blob339293d677cc33608970e912476dad1040ab7ade
1 /*
2 * File: arch/blackfin/kernel/bfin_dma_5xx.c
3 * Based on:
4 * Author:
6 * Created:
7 * Description: This file contains the simple DMA Implementation for Blackfin
9 * Modified:
10 * Copyright 2004-2006 Analog Devices Inc.
12 * Bugs: Enter bugs at http://blackfin.uclinux.org/
14 * This program is free software; you can redistribute it and/or modify
15 * it under the terms of the GNU General Public License as published by
16 * the Free Software Foundation; either version 2 of the License, or
17 * (at your option) any later version.
19 * This program is distributed in the hope that it will be useful,
20 * but WITHOUT ANY WARRANTY; without even the implied warranty of
21 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 * GNU General Public License for more details.
24 * You should have received a copy of the GNU General Public License
25 * along with this program; if not, see the file COPYING, or write
26 * to the Free Software Foundation, Inc.,
27 * 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
30 #include <linux/errno.h>
31 #include <linux/module.h>
32 #include <linux/sched.h>
33 #include <linux/interrupt.h>
34 #include <linux/kernel.h>
35 #include <linux/param.h>
37 #include <asm/blackfin.h>
38 #include <asm/dma.h>
39 #include <asm/cacheflush.h>
41 /* Remove unused code not exported by symbol or internally called */
42 #define REMOVE_DEAD_CODE
44 /**************************************************************************
45 * Global Variables
46 ***************************************************************************/
48 static struct dma_channel dma_ch[MAX_BLACKFIN_DMA_CHANNEL];
50 /*------------------------------------------------------------------------------
51 * Set the Buffer Clear bit in the Configuration register of specific DMA
52 * channel. This will stop the descriptor based DMA operation.
53 *-----------------------------------------------------------------------------*/
54 static void clear_dma_buffer(unsigned int channel)
56 dma_ch[channel].regs->cfg |= RESTART;
57 SSYNC();
58 dma_ch[channel].regs->cfg &= ~RESTART;
59 SSYNC();
62 static int __init blackfin_dma_init(void)
64 int i;
66 printk(KERN_INFO "Blackfin DMA Controller\n");
68 for (i = 0; i < MAX_BLACKFIN_DMA_CHANNEL; i++) {
69 dma_ch[i].chan_status = DMA_CHANNEL_FREE;
70 dma_ch[i].regs = dma_io_base_addr[i];
71 mutex_init(&(dma_ch[i].dmalock));
73 /* Mark MEMDMA Channel 0 as requested since we're using it internally */
74 dma_ch[CH_MEM_STREAM0_DEST].chan_status = DMA_CHANNEL_REQUESTED;
75 dma_ch[CH_MEM_STREAM0_SRC].chan_status = DMA_CHANNEL_REQUESTED;
77 #if defined(CONFIG_DEB_DMA_URGENT)
78 bfin_write_EBIU_DDRQUE(bfin_read_EBIU_DDRQUE()
79 | DEB1_URGENT | DEB2_URGENT | DEB3_URGENT);
80 #endif
81 return 0;
84 arch_initcall(blackfin_dma_init);
86 /*------------------------------------------------------------------------------
87 * Request the specific DMA channel from the system.
88 *-----------------------------------------------------------------------------*/
89 int request_dma(unsigned int channel, char *device_id)
92 pr_debug("request_dma() : BEGIN \n");
94 #if defined(CONFIG_BF561) && ANOMALY_05000182
95 if (channel >= CH_IMEM_STREAM0_DEST && channel <= CH_IMEM_STREAM1_DEST) {
96 if (get_cclk() > 500000000) {
97 printk(KERN_WARNING
98 "Request IMDMA failed due to ANOMALY 05000182\n");
99 return -EFAULT;
102 #endif
104 mutex_lock(&(dma_ch[channel].dmalock));
106 if ((dma_ch[channel].chan_status == DMA_CHANNEL_REQUESTED)
107 || (dma_ch[channel].chan_status == DMA_CHANNEL_ENABLED)) {
108 mutex_unlock(&(dma_ch[channel].dmalock));
109 pr_debug("DMA CHANNEL IN USE \n");
110 return -EBUSY;
111 } else {
112 dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
113 pr_debug("DMA CHANNEL IS ALLOCATED \n");
116 mutex_unlock(&(dma_ch[channel].dmalock));
118 #ifdef CONFIG_BF54x
119 if (channel >= CH_UART2_RX && channel <= CH_UART3_TX) {
120 unsigned int per_map;
121 per_map = dma_ch[channel].regs->peripheral_map & 0xFFF;
122 if (strncmp(device_id, "BFIN_UART", 9) == 0)
123 dma_ch[channel].regs->peripheral_map = per_map |
124 ((channel - CH_UART2_RX + 0xC)<<12);
125 else
126 dma_ch[channel].regs->peripheral_map = per_map |
127 ((channel - CH_UART2_RX + 0x6)<<12);
129 #endif
131 dma_ch[channel].device_id = device_id;
132 dma_ch[channel].irq_callback = NULL;
134 /* This is to be enabled by putting a restriction -
135 * you have to request DMA, before doing any operations on
136 * descriptor/channel
138 pr_debug("request_dma() : END \n");
139 return channel;
141 EXPORT_SYMBOL(request_dma);
143 int set_dma_callback(unsigned int channel, dma_interrupt_t callback, void *data)
145 int ret_irq = 0;
147 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
148 && channel < MAX_BLACKFIN_DMA_CHANNEL));
150 if (callback != NULL) {
151 int ret_val;
152 ret_irq = channel2irq(channel);
154 dma_ch[channel].data = data;
156 ret_val =
157 request_irq(ret_irq, (void *)callback, IRQF_DISABLED,
158 dma_ch[channel].device_id, data);
159 if (ret_val) {
160 printk(KERN_NOTICE
161 "Request irq in DMA engine failed.\n");
162 return -EPERM;
164 dma_ch[channel].irq_callback = callback;
166 return 0;
168 EXPORT_SYMBOL(set_dma_callback);
170 void free_dma(unsigned int channel)
172 int ret_irq;
174 pr_debug("freedma() : BEGIN \n");
175 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
176 && channel < MAX_BLACKFIN_DMA_CHANNEL));
178 /* Halt the DMA */
179 disable_dma(channel);
180 clear_dma_buffer(channel);
182 if (dma_ch[channel].irq_callback != NULL) {
183 ret_irq = channel2irq(channel);
184 free_irq(ret_irq, dma_ch[channel].data);
187 /* Clear the DMA Variable in the Channel */
188 mutex_lock(&(dma_ch[channel].dmalock));
189 dma_ch[channel].chan_status = DMA_CHANNEL_FREE;
190 mutex_unlock(&(dma_ch[channel].dmalock));
192 pr_debug("freedma() : END \n");
194 EXPORT_SYMBOL(free_dma);
196 void dma_enable_irq(unsigned int channel)
198 int ret_irq;
200 pr_debug("dma_enable_irq() : BEGIN \n");
201 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
202 && channel < MAX_BLACKFIN_DMA_CHANNEL));
204 ret_irq = channel2irq(channel);
205 enable_irq(ret_irq);
207 EXPORT_SYMBOL(dma_enable_irq);
209 void dma_disable_irq(unsigned int channel)
211 int ret_irq;
213 pr_debug("dma_disable_irq() : BEGIN \n");
214 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
215 && channel < MAX_BLACKFIN_DMA_CHANNEL));
217 ret_irq = channel2irq(channel);
218 disable_irq(ret_irq);
220 EXPORT_SYMBOL(dma_disable_irq);
222 int dma_channel_active(unsigned int channel)
224 if (dma_ch[channel].chan_status == DMA_CHANNEL_FREE) {
225 return 0;
226 } else {
227 return 1;
230 EXPORT_SYMBOL(dma_channel_active);
232 /*------------------------------------------------------------------------------
233 * stop the specific DMA channel.
234 *-----------------------------------------------------------------------------*/
235 void disable_dma(unsigned int channel)
237 pr_debug("stop_dma() : BEGIN \n");
239 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
240 && channel < MAX_BLACKFIN_DMA_CHANNEL));
242 dma_ch[channel].regs->cfg &= ~DMAEN; /* Clean the enable bit */
243 SSYNC();
244 dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
245 /* Needs to be enabled Later */
246 pr_debug("stop_dma() : END \n");
247 return;
249 EXPORT_SYMBOL(disable_dma);
251 void enable_dma(unsigned int channel)
253 pr_debug("enable_dma() : BEGIN \n");
255 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
256 && channel < MAX_BLACKFIN_DMA_CHANNEL));
258 dma_ch[channel].chan_status = DMA_CHANNEL_ENABLED;
259 dma_ch[channel].regs->curr_x_count = 0;
260 dma_ch[channel].regs->curr_y_count = 0;
262 dma_ch[channel].regs->cfg |= DMAEN; /* Set the enable bit */
263 SSYNC();
264 pr_debug("enable_dma() : END \n");
265 return;
267 EXPORT_SYMBOL(enable_dma);
269 /*------------------------------------------------------------------------------
270 * Set the Start Address register for the specific DMA channel
271 * This function can be used for register based DMA,
272 * to setup the start address
273 * addr: Starting address of the DMA Data to be transferred.
274 *-----------------------------------------------------------------------------*/
275 void set_dma_start_addr(unsigned int channel, unsigned long addr)
277 pr_debug("set_dma_start_addr() : BEGIN \n");
279 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
280 && channel < MAX_BLACKFIN_DMA_CHANNEL));
282 dma_ch[channel].regs->start_addr = addr;
283 SSYNC();
284 pr_debug("set_dma_start_addr() : END\n");
286 EXPORT_SYMBOL(set_dma_start_addr);
288 void set_dma_next_desc_addr(unsigned int channel, unsigned long addr)
290 pr_debug("set_dma_next_desc_addr() : BEGIN \n");
292 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
293 && channel < MAX_BLACKFIN_DMA_CHANNEL));
295 dma_ch[channel].regs->next_desc_ptr = addr;
296 SSYNC();
297 pr_debug("set_dma_next_desc_addr() : END\n");
299 EXPORT_SYMBOL(set_dma_next_desc_addr);
301 void set_dma_curr_desc_addr(unsigned int channel, unsigned long addr)
303 pr_debug("set_dma_curr_desc_addr() : BEGIN \n");
305 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
306 && channel < MAX_BLACKFIN_DMA_CHANNEL));
308 dma_ch[channel].regs->curr_desc_ptr = addr;
309 SSYNC();
310 pr_debug("set_dma_curr_desc_addr() : END\n");
312 EXPORT_SYMBOL(set_dma_curr_desc_addr);
314 void set_dma_x_count(unsigned int channel, unsigned short x_count)
316 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
317 && channel < MAX_BLACKFIN_DMA_CHANNEL));
319 dma_ch[channel].regs->x_count = x_count;
320 SSYNC();
322 EXPORT_SYMBOL(set_dma_x_count);
324 void set_dma_y_count(unsigned int channel, unsigned short y_count)
326 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
327 && channel < MAX_BLACKFIN_DMA_CHANNEL));
329 dma_ch[channel].regs->y_count = y_count;
330 SSYNC();
332 EXPORT_SYMBOL(set_dma_y_count);
334 void set_dma_x_modify(unsigned int channel, short x_modify)
336 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
337 && channel < MAX_BLACKFIN_DMA_CHANNEL));
339 dma_ch[channel].regs->x_modify = x_modify;
340 SSYNC();
342 EXPORT_SYMBOL(set_dma_x_modify);
344 void set_dma_y_modify(unsigned int channel, short y_modify)
346 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
347 && channel < MAX_BLACKFIN_DMA_CHANNEL));
349 dma_ch[channel].regs->y_modify = y_modify;
350 SSYNC();
352 EXPORT_SYMBOL(set_dma_y_modify);
354 void set_dma_config(unsigned int channel, unsigned short config)
356 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
357 && channel < MAX_BLACKFIN_DMA_CHANNEL));
359 dma_ch[channel].regs->cfg = config;
360 SSYNC();
362 EXPORT_SYMBOL(set_dma_config);
364 unsigned short
365 set_bfin_dma_config(char direction, char flow_mode,
366 char intr_mode, char dma_mode, char width, char syncmode)
368 unsigned short config;
370 config =
371 ((direction << 1) | (width << 2) | (dma_mode << 4) |
372 (intr_mode << 6) | (flow_mode << 12) | (syncmode << 5));
373 return config;
375 EXPORT_SYMBOL(set_bfin_dma_config);
377 void set_dma_sg(unsigned int channel, struct dmasg *sg, int nr_sg)
379 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
380 && channel < MAX_BLACKFIN_DMA_CHANNEL));
382 dma_ch[channel].regs->cfg |= ((nr_sg & 0x0F) << 8);
384 dma_ch[channel].regs->next_desc_ptr = (unsigned int)sg;
386 SSYNC();
388 EXPORT_SYMBOL(set_dma_sg);
390 void set_dma_curr_addr(unsigned int channel, unsigned long addr)
392 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
393 && channel < MAX_BLACKFIN_DMA_CHANNEL));
395 dma_ch[channel].regs->curr_addr_ptr = addr;
396 SSYNC();
398 EXPORT_SYMBOL(set_dma_curr_addr);
400 /*------------------------------------------------------------------------------
401 * Get the DMA status of a specific DMA channel from the system.
402 *-----------------------------------------------------------------------------*/
403 unsigned short get_dma_curr_irqstat(unsigned int channel)
405 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
406 && channel < MAX_BLACKFIN_DMA_CHANNEL));
408 return dma_ch[channel].regs->irq_status;
410 EXPORT_SYMBOL(get_dma_curr_irqstat);
412 /*------------------------------------------------------------------------------
413 * Clear the DMA_DONE bit in DMA status. Stop the DMA completion interrupt.
414 *-----------------------------------------------------------------------------*/
415 void clear_dma_irqstat(unsigned int channel)
417 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
418 && channel < MAX_BLACKFIN_DMA_CHANNEL));
419 dma_ch[channel].regs->irq_status |= 3;
421 EXPORT_SYMBOL(clear_dma_irqstat);
423 /*------------------------------------------------------------------------------
424 * Get current DMA xcount of a specific DMA channel from the system.
425 *-----------------------------------------------------------------------------*/
426 unsigned short get_dma_curr_xcount(unsigned int channel)
428 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
429 && channel < MAX_BLACKFIN_DMA_CHANNEL));
431 return dma_ch[channel].regs->curr_x_count;
433 EXPORT_SYMBOL(get_dma_curr_xcount);
435 /*------------------------------------------------------------------------------
436 * Get current DMA ycount of a specific DMA channel from the system.
437 *-----------------------------------------------------------------------------*/
438 unsigned short get_dma_curr_ycount(unsigned int channel)
440 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
441 && channel < MAX_BLACKFIN_DMA_CHANNEL));
443 return dma_ch[channel].regs->curr_y_count;
445 EXPORT_SYMBOL(get_dma_curr_ycount);
447 unsigned long get_dma_next_desc_ptr(unsigned int channel)
449 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
450 && channel < MAX_BLACKFIN_DMA_CHANNEL));
452 return dma_ch[channel].regs->next_desc_ptr;
454 EXPORT_SYMBOL(get_dma_next_desc_ptr);
456 unsigned long get_dma_curr_desc_ptr(unsigned int channel)
458 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
459 && channel < MAX_BLACKFIN_DMA_CHANNEL));
461 return dma_ch[channel].regs->curr_desc_ptr;
463 EXPORT_SYMBOL(get_dma_curr_desc_ptr);
465 unsigned long get_dma_curr_addr(unsigned int channel)
467 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
468 && channel < MAX_BLACKFIN_DMA_CHANNEL));
470 return dma_ch[channel].regs->curr_addr_ptr;
472 EXPORT_SYMBOL(get_dma_curr_addr);
474 #ifdef CONFIG_PM
475 int blackfin_dma_suspend(void)
477 int i;
479 #ifdef CONFIG_BF561 /* IMDMA channels doesn't have a PERIPHERAL_MAP */
480 for (i = 0; i <= CH_MEM_STREAM3_SRC; i++) {
481 #else
482 for (i = 0; i < MAX_BLACKFIN_DMA_CHANNEL; i++) {
483 #endif
484 if (dma_ch[i].chan_status == DMA_CHANNEL_ENABLED) {
485 printk(KERN_ERR "DMA Channel %d failed to suspend\n", i);
486 return -EBUSY;
489 dma_ch[i].saved_peripheral_map = dma_ch[i].regs->peripheral_map;
492 return 0;
495 void blackfin_dma_resume(void)
497 int i;
499 #ifdef CONFIG_BF561 /* IMDMA channels doesn't have a PERIPHERAL_MAP */
500 for (i = 0; i <= CH_MEM_STREAM3_SRC; i++)
501 #else
502 for (i = 0; i < MAX_BLACKFIN_DMA_CHANNEL; i++)
503 #endif
504 dma_ch[i].regs->peripheral_map = dma_ch[i].saved_peripheral_map;
506 #endif
508 static void *__dma_memcpy(void *dest, const void *src, size_t size)
510 int direction; /* 1 - address decrease, 0 - address increase */
511 int flag_align; /* 1 - address aligned, 0 - address unaligned */
512 int flag_2D; /* 1 - 2D DMA needed, 0 - 1D DMA needed */
513 unsigned long flags;
515 if (size <= 0)
516 return NULL;
518 local_irq_save(flags);
520 if ((unsigned long)src < memory_end)
521 blackfin_dcache_flush_range((unsigned int)src,
522 (unsigned int)(src + size));
524 if ((unsigned long)dest < memory_end)
525 blackfin_dcache_invalidate_range((unsigned int)dest,
526 (unsigned int)(dest + size));
528 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
530 if ((unsigned long)src < (unsigned long)dest)
531 direction = 1;
532 else
533 direction = 0;
535 if ((((unsigned long)dest % 2) == 0) && (((unsigned long)src % 2) == 0)
536 && ((size % 2) == 0))
537 flag_align = 1;
538 else
539 flag_align = 0;
541 if (size > 0x10000) /* size > 64K */
542 flag_2D = 1;
543 else
544 flag_2D = 0;
546 /* Setup destination and source start address */
547 if (direction) {
548 if (flag_align) {
549 bfin_write_MDMA_D0_START_ADDR(dest + size - 2);
550 bfin_write_MDMA_S0_START_ADDR(src + size - 2);
551 } else {
552 bfin_write_MDMA_D0_START_ADDR(dest + size - 1);
553 bfin_write_MDMA_S0_START_ADDR(src + size - 1);
555 } else {
556 bfin_write_MDMA_D0_START_ADDR(dest);
557 bfin_write_MDMA_S0_START_ADDR(src);
560 /* Setup destination and source xcount */
561 if (flag_2D) {
562 if (flag_align) {
563 bfin_write_MDMA_D0_X_COUNT(1024 / 2);
564 bfin_write_MDMA_S0_X_COUNT(1024 / 2);
565 } else {
566 bfin_write_MDMA_D0_X_COUNT(1024);
567 bfin_write_MDMA_S0_X_COUNT(1024);
569 bfin_write_MDMA_D0_Y_COUNT(size >> 10);
570 bfin_write_MDMA_S0_Y_COUNT(size >> 10);
571 } else {
572 if (flag_align) {
573 bfin_write_MDMA_D0_X_COUNT(size / 2);
574 bfin_write_MDMA_S0_X_COUNT(size / 2);
575 } else {
576 bfin_write_MDMA_D0_X_COUNT(size);
577 bfin_write_MDMA_S0_X_COUNT(size);
581 /* Setup destination and source xmodify and ymodify */
582 if (direction) {
583 if (flag_align) {
584 bfin_write_MDMA_D0_X_MODIFY(-2);
585 bfin_write_MDMA_S0_X_MODIFY(-2);
586 if (flag_2D) {
587 bfin_write_MDMA_D0_Y_MODIFY(-2);
588 bfin_write_MDMA_S0_Y_MODIFY(-2);
590 } else {
591 bfin_write_MDMA_D0_X_MODIFY(-1);
592 bfin_write_MDMA_S0_X_MODIFY(-1);
593 if (flag_2D) {
594 bfin_write_MDMA_D0_Y_MODIFY(-1);
595 bfin_write_MDMA_S0_Y_MODIFY(-1);
598 } else {
599 if (flag_align) {
600 bfin_write_MDMA_D0_X_MODIFY(2);
601 bfin_write_MDMA_S0_X_MODIFY(2);
602 if (flag_2D) {
603 bfin_write_MDMA_D0_Y_MODIFY(2);
604 bfin_write_MDMA_S0_Y_MODIFY(2);
606 } else {
607 bfin_write_MDMA_D0_X_MODIFY(1);
608 bfin_write_MDMA_S0_X_MODIFY(1);
609 if (flag_2D) {
610 bfin_write_MDMA_D0_Y_MODIFY(1);
611 bfin_write_MDMA_S0_Y_MODIFY(1);
616 /* Enable source DMA */
617 if (flag_2D) {
618 if (flag_align) {
619 bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D | WDSIZE_16);
620 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D | WDSIZE_16);
621 } else {
622 bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D);
623 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D);
625 } else {
626 if (flag_align) {
627 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
628 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
629 } else {
630 bfin_write_MDMA_S0_CONFIG(DMAEN);
631 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN);
635 SSYNC();
637 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE))
640 bfin_write_MDMA_D0_IRQ_STATUS(bfin_read_MDMA_D0_IRQ_STATUS() |
641 (DMA_DONE | DMA_ERR));
643 bfin_write_MDMA_S0_CONFIG(0);
644 bfin_write_MDMA_D0_CONFIG(0);
646 local_irq_restore(flags);
648 return dest;
651 void *dma_memcpy(void *dest, const void *src, size_t size)
653 size_t bulk;
654 size_t rest;
655 void * addr;
657 bulk = (size >> 16) << 16;
658 rest = size - bulk;
659 if (bulk)
660 __dma_memcpy(dest, src, bulk);
661 addr = __dma_memcpy(dest+bulk, src+bulk, rest);
662 return addr;
664 EXPORT_SYMBOL(dma_memcpy);
666 void *safe_dma_memcpy(void *dest, const void *src, size_t size)
668 void *addr;
669 addr = dma_memcpy(dest, src, size);
670 return addr;
672 EXPORT_SYMBOL(safe_dma_memcpy);
674 void dma_outsb(unsigned long addr, const void *buf, unsigned short len)
676 unsigned long flags;
678 local_irq_save(flags);
680 blackfin_dcache_flush_range((unsigned int)buf,
681 (unsigned int)(buf) + len);
683 bfin_write_MDMA_D0_START_ADDR(addr);
684 bfin_write_MDMA_D0_X_COUNT(len);
685 bfin_write_MDMA_D0_X_MODIFY(0);
686 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
688 bfin_write_MDMA_S0_START_ADDR(buf);
689 bfin_write_MDMA_S0_X_COUNT(len);
690 bfin_write_MDMA_S0_X_MODIFY(1);
691 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
693 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
694 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
696 SSYNC();
698 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
700 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
702 bfin_write_MDMA_S0_CONFIG(0);
703 bfin_write_MDMA_D0_CONFIG(0);
704 local_irq_restore(flags);
707 EXPORT_SYMBOL(dma_outsb);
710 void dma_insb(unsigned long addr, void *buf, unsigned short len)
712 unsigned long flags;
714 blackfin_dcache_invalidate_range((unsigned int)buf,
715 (unsigned int)(buf) + len);
717 local_irq_save(flags);
718 bfin_write_MDMA_D0_START_ADDR(buf);
719 bfin_write_MDMA_D0_X_COUNT(len);
720 bfin_write_MDMA_D0_X_MODIFY(1);
721 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
723 bfin_write_MDMA_S0_START_ADDR(addr);
724 bfin_write_MDMA_S0_X_COUNT(len);
725 bfin_write_MDMA_S0_X_MODIFY(0);
726 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
728 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
729 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
731 SSYNC();
733 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
735 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
737 bfin_write_MDMA_S0_CONFIG(0);
738 bfin_write_MDMA_D0_CONFIG(0);
739 local_irq_restore(flags);
742 EXPORT_SYMBOL(dma_insb);
744 void dma_outsw(unsigned long addr, const void *buf, unsigned short len)
746 unsigned long flags;
748 local_irq_save(flags);
750 blackfin_dcache_flush_range((unsigned int)buf,
751 (unsigned int)(buf) + len * sizeof(short));
753 bfin_write_MDMA_D0_START_ADDR(addr);
754 bfin_write_MDMA_D0_X_COUNT(len);
755 bfin_write_MDMA_D0_X_MODIFY(0);
756 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
758 bfin_write_MDMA_S0_START_ADDR(buf);
759 bfin_write_MDMA_S0_X_COUNT(len);
760 bfin_write_MDMA_S0_X_MODIFY(2);
761 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
763 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
764 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
766 SSYNC();
768 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
770 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
772 bfin_write_MDMA_S0_CONFIG(0);
773 bfin_write_MDMA_D0_CONFIG(0);
774 local_irq_restore(flags);
777 EXPORT_SYMBOL(dma_outsw);
779 void dma_insw(unsigned long addr, void *buf, unsigned short len)
781 unsigned long flags;
783 blackfin_dcache_invalidate_range((unsigned int)buf,
784 (unsigned int)(buf) + len * sizeof(short));
786 local_irq_save(flags);
788 bfin_write_MDMA_D0_START_ADDR(buf);
789 bfin_write_MDMA_D0_X_COUNT(len);
790 bfin_write_MDMA_D0_X_MODIFY(2);
791 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
793 bfin_write_MDMA_S0_START_ADDR(addr);
794 bfin_write_MDMA_S0_X_COUNT(len);
795 bfin_write_MDMA_S0_X_MODIFY(0);
796 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
798 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
799 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
801 SSYNC();
803 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
805 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
807 bfin_write_MDMA_S0_CONFIG(0);
808 bfin_write_MDMA_D0_CONFIG(0);
809 local_irq_restore(flags);
812 EXPORT_SYMBOL(dma_insw);
814 void dma_outsl(unsigned long addr, const void *buf, unsigned short len)
816 unsigned long flags;
818 local_irq_save(flags);
820 blackfin_dcache_flush_range((unsigned int)buf,
821 (unsigned int)(buf) + len * sizeof(long));
823 bfin_write_MDMA_D0_START_ADDR(addr);
824 bfin_write_MDMA_D0_X_COUNT(len);
825 bfin_write_MDMA_D0_X_MODIFY(0);
826 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
828 bfin_write_MDMA_S0_START_ADDR(buf);
829 bfin_write_MDMA_S0_X_COUNT(len);
830 bfin_write_MDMA_S0_X_MODIFY(4);
831 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
833 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
834 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
836 SSYNC();
838 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
840 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
842 bfin_write_MDMA_S0_CONFIG(0);
843 bfin_write_MDMA_D0_CONFIG(0);
844 local_irq_restore(flags);
847 EXPORT_SYMBOL(dma_outsl);
849 void dma_insl(unsigned long addr, void *buf, unsigned short len)
851 unsigned long flags;
853 blackfin_dcache_invalidate_range((unsigned int)buf,
854 (unsigned int)(buf) + len * sizeof(long));
856 local_irq_save(flags);
858 bfin_write_MDMA_D0_START_ADDR(buf);
859 bfin_write_MDMA_D0_X_COUNT(len);
860 bfin_write_MDMA_D0_X_MODIFY(4);
861 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
863 bfin_write_MDMA_S0_START_ADDR(addr);
864 bfin_write_MDMA_S0_X_COUNT(len);
865 bfin_write_MDMA_S0_X_MODIFY(0);
866 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
868 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
869 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
871 SSYNC();
873 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
875 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
877 bfin_write_MDMA_S0_CONFIG(0);
878 bfin_write_MDMA_D0_CONFIG(0);
879 local_irq_restore(flags);
882 EXPORT_SYMBOL(dma_insl);