allow coexistance of N build and AC build.
[tomato.git] / release / src-rt-6.x / linux / linux-2.6 / arch / blackfin / kernel / bfin_dma_5xx.c
blob069a896a8f260e67c7354066b6c05020811383cc
1 /*
2 * File: arch/blackfin/kernel/bfin_dma_5xx.c
3 * Based on:
4 * Author:
6 * Created:
7 * Description: This file contains the simple DMA Implementation for Blackfin
9 * Modified:
10 * Copyright 2004-2006 Analog Devices Inc.
12 * Bugs: Enter bugs at http://blackfin.uclinux.org/
14 * This program is free software; you can redistribute it and/or modify
15 * it under the terms of the GNU General Public License as published by
16 * the Free Software Foundation; either version 2 of the License, or
17 * (at your option) any later version.
19 * This program is distributed in the hope that it will be useful,
20 * but WITHOUT ANY WARRANTY; without even the implied warranty of
21 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 * GNU General Public License for more details.
24 * You should have received a copy of the GNU General Public License
25 * along with this program; if not, see the file COPYING, or write
26 * to the Free Software Foundation, Inc.,
27 * 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
30 #include <linux/errno.h>
31 #include <linux/module.h>
32 #include <linux/sched.h>
33 #include <linux/interrupt.h>
34 #include <linux/kernel.h>
35 #include <linux/param.h>
37 #include <asm/dma.h>
38 #include <asm/cacheflush.h>
40 /* Remove unused code not exported by symbol or internally called */
41 #define REMOVE_DEAD_CODE
43 /**************************************************************************
44 * Global Variables
45 ***************************************************************************/
47 static struct dma_channel dma_ch[MAX_BLACKFIN_DMA_CHANNEL];
48 #if defined (CONFIG_BF561)
49 static struct dma_register *base_addr[MAX_BLACKFIN_DMA_CHANNEL] = {
50 (struct dma_register *) DMA1_0_NEXT_DESC_PTR,
51 (struct dma_register *) DMA1_1_NEXT_DESC_PTR,
52 (struct dma_register *) DMA1_2_NEXT_DESC_PTR,
53 (struct dma_register *) DMA1_3_NEXT_DESC_PTR,
54 (struct dma_register *) DMA1_4_NEXT_DESC_PTR,
55 (struct dma_register *) DMA1_5_NEXT_DESC_PTR,
56 (struct dma_register *) DMA1_6_NEXT_DESC_PTR,
57 (struct dma_register *) DMA1_7_NEXT_DESC_PTR,
58 (struct dma_register *) DMA1_8_NEXT_DESC_PTR,
59 (struct dma_register *) DMA1_9_NEXT_DESC_PTR,
60 (struct dma_register *) DMA1_10_NEXT_DESC_PTR,
61 (struct dma_register *) DMA1_11_NEXT_DESC_PTR,
62 (struct dma_register *) DMA2_0_NEXT_DESC_PTR,
63 (struct dma_register *) DMA2_1_NEXT_DESC_PTR,
64 (struct dma_register *) DMA2_2_NEXT_DESC_PTR,
65 (struct dma_register *) DMA2_3_NEXT_DESC_PTR,
66 (struct dma_register *) DMA2_4_NEXT_DESC_PTR,
67 (struct dma_register *) DMA2_5_NEXT_DESC_PTR,
68 (struct dma_register *) DMA2_6_NEXT_DESC_PTR,
69 (struct dma_register *) DMA2_7_NEXT_DESC_PTR,
70 (struct dma_register *) DMA2_8_NEXT_DESC_PTR,
71 (struct dma_register *) DMA2_9_NEXT_DESC_PTR,
72 (struct dma_register *) DMA2_10_NEXT_DESC_PTR,
73 (struct dma_register *) DMA2_11_NEXT_DESC_PTR,
74 (struct dma_register *) MDMA1_D0_NEXT_DESC_PTR,
75 (struct dma_register *) MDMA1_S0_NEXT_DESC_PTR,
76 (struct dma_register *) MDMA1_D1_NEXT_DESC_PTR,
77 (struct dma_register *) MDMA1_S1_NEXT_DESC_PTR,
78 (struct dma_register *) MDMA2_D0_NEXT_DESC_PTR,
79 (struct dma_register *) MDMA2_S0_NEXT_DESC_PTR,
80 (struct dma_register *) MDMA2_D1_NEXT_DESC_PTR,
81 (struct dma_register *) MDMA2_S1_NEXT_DESC_PTR,
82 (struct dma_register *) IMDMA_D0_NEXT_DESC_PTR,
83 (struct dma_register *) IMDMA_S0_NEXT_DESC_PTR,
84 (struct dma_register *) IMDMA_D1_NEXT_DESC_PTR,
85 (struct dma_register *) IMDMA_S1_NEXT_DESC_PTR,
87 #else
88 static struct dma_register *base_addr[MAX_BLACKFIN_DMA_CHANNEL] = {
89 (struct dma_register *) DMA0_NEXT_DESC_PTR,
90 (struct dma_register *) DMA1_NEXT_DESC_PTR,
91 (struct dma_register *) DMA2_NEXT_DESC_PTR,
92 (struct dma_register *) DMA3_NEXT_DESC_PTR,
93 (struct dma_register *) DMA4_NEXT_DESC_PTR,
94 (struct dma_register *) DMA5_NEXT_DESC_PTR,
95 (struct dma_register *) DMA6_NEXT_DESC_PTR,
96 (struct dma_register *) DMA7_NEXT_DESC_PTR,
97 #if (defined(CONFIG_BF537) || defined(CONFIG_BF534) || defined(CONFIG_BF536))
98 (struct dma_register *) DMA8_NEXT_DESC_PTR,
99 (struct dma_register *) DMA9_NEXT_DESC_PTR,
100 (struct dma_register *) DMA10_NEXT_DESC_PTR,
101 (struct dma_register *) DMA11_NEXT_DESC_PTR,
102 #endif
103 (struct dma_register *) MDMA_D0_NEXT_DESC_PTR,
104 (struct dma_register *) MDMA_S0_NEXT_DESC_PTR,
105 (struct dma_register *) MDMA_D1_NEXT_DESC_PTR,
106 (struct dma_register *) MDMA_S1_NEXT_DESC_PTR,
108 #endif
110 /*------------------------------------------------------------------------------
111 * Set the Buffer Clear bit in the Configuration register of specific DMA
112 * channel. This will stop the descriptor based DMA operation.
113 *-----------------------------------------------------------------------------*/
114 static void clear_dma_buffer(unsigned int channel)
116 dma_ch[channel].regs->cfg |= RESTART;
117 SSYNC();
118 dma_ch[channel].regs->cfg &= ~RESTART;
119 SSYNC();
122 static int __init blackfin_dma_init(void)
124 int i;
126 printk(KERN_INFO "Blackfin DMA Controller\n");
128 for (i = 0; i < MAX_BLACKFIN_DMA_CHANNEL; i++) {
129 dma_ch[i].chan_status = DMA_CHANNEL_FREE;
130 dma_ch[i].regs = base_addr[i];
131 mutex_init(&(dma_ch[i].dmalock));
133 /* Mark MEMDMA Channel 0 as requested since we're using it internally */
134 dma_ch[CH_MEM_STREAM0_DEST].chan_status = DMA_CHANNEL_REQUESTED;
135 dma_ch[CH_MEM_STREAM0_SRC].chan_status = DMA_CHANNEL_REQUESTED;
136 return 0;
139 arch_initcall(blackfin_dma_init);
142 * Form the channel find the irq number for that channel.
144 #if !defined(CONFIG_BF561)
146 static int bf533_channel2irq(unsigned int channel)
148 int ret_irq = -1;
150 switch (channel) {
151 case CH_PPI:
152 ret_irq = IRQ_PPI;
153 break;
155 #if (defined(CONFIG_BF537) || defined(CONFIG_BF534) || defined(CONFIG_BF536))
156 case CH_EMAC_RX:
157 ret_irq = IRQ_MAC_RX;
158 break;
160 case CH_EMAC_TX:
161 ret_irq = IRQ_MAC_TX;
162 break;
164 case CH_UART1_RX:
165 ret_irq = IRQ_UART1_RX;
166 break;
168 case CH_UART1_TX:
169 ret_irq = IRQ_UART1_TX;
170 break;
171 #endif
173 case CH_SPORT0_RX:
174 ret_irq = IRQ_SPORT0_RX;
175 break;
177 case CH_SPORT0_TX:
178 ret_irq = IRQ_SPORT0_TX;
179 break;
181 case CH_SPORT1_RX:
182 ret_irq = IRQ_SPORT1_RX;
183 break;
185 case CH_SPORT1_TX:
186 ret_irq = IRQ_SPORT1_TX;
187 break;
189 case CH_SPI:
190 ret_irq = IRQ_SPI;
191 break;
193 case CH_UART_RX:
194 ret_irq = IRQ_UART_RX;
195 break;
197 case CH_UART_TX:
198 ret_irq = IRQ_UART_TX;
199 break;
201 case CH_MEM_STREAM0_SRC:
202 case CH_MEM_STREAM0_DEST:
203 ret_irq = IRQ_MEM_DMA0;
204 break;
206 case CH_MEM_STREAM1_SRC:
207 case CH_MEM_STREAM1_DEST:
208 ret_irq = IRQ_MEM_DMA1;
209 break;
211 return ret_irq;
214 # define channel2irq(channel) bf533_channel2irq(channel)
216 #else
218 static int bf561_channel2irq(unsigned int channel)
220 int ret_irq = -1;
222 switch (channel) {
223 case CH_PPI0:
224 ret_irq = IRQ_PPI0;
225 break;
226 case CH_PPI1:
227 ret_irq = IRQ_PPI1;
228 break;
229 case CH_SPORT0_RX:
230 ret_irq = IRQ_SPORT0_RX;
231 break;
232 case CH_SPORT0_TX:
233 ret_irq = IRQ_SPORT0_TX;
234 break;
235 case CH_SPORT1_RX:
236 ret_irq = IRQ_SPORT1_RX;
237 break;
238 case CH_SPORT1_TX:
239 ret_irq = IRQ_SPORT1_TX;
240 break;
241 case CH_SPI:
242 ret_irq = IRQ_SPI;
243 break;
244 case CH_UART_RX:
245 ret_irq = IRQ_UART_RX;
246 break;
247 case CH_UART_TX:
248 ret_irq = IRQ_UART_TX;
249 break;
251 case CH_MEM_STREAM0_SRC:
252 case CH_MEM_STREAM0_DEST:
253 ret_irq = IRQ_MEM_DMA0;
254 break;
255 case CH_MEM_STREAM1_SRC:
256 case CH_MEM_STREAM1_DEST:
257 ret_irq = IRQ_MEM_DMA1;
258 break;
259 case CH_MEM_STREAM2_SRC:
260 case CH_MEM_STREAM2_DEST:
261 ret_irq = IRQ_MEM_DMA2;
262 break;
263 case CH_MEM_STREAM3_SRC:
264 case CH_MEM_STREAM3_DEST:
265 ret_irq = IRQ_MEM_DMA3;
266 break;
268 case CH_IMEM_STREAM0_SRC:
269 case CH_IMEM_STREAM0_DEST:
270 ret_irq = IRQ_IMEM_DMA0;
271 break;
272 case CH_IMEM_STREAM1_SRC:
273 case CH_IMEM_STREAM1_DEST:
274 ret_irq = IRQ_IMEM_DMA1;
275 break;
277 return ret_irq;
280 # define channel2irq(channel) bf561_channel2irq(channel)
282 #endif
284 /*------------------------------------------------------------------------------
285 * Request the specific DMA channel from the system.
286 *-----------------------------------------------------------------------------*/
287 int request_dma(unsigned int channel, char *device_id)
290 pr_debug("request_dma() : BEGIN \n");
291 mutex_lock(&(dma_ch[channel].dmalock));
293 if ((dma_ch[channel].chan_status == DMA_CHANNEL_REQUESTED)
294 || (dma_ch[channel].chan_status == DMA_CHANNEL_ENABLED)) {
295 mutex_unlock(&(dma_ch[channel].dmalock));
296 pr_debug("DMA CHANNEL IN USE \n");
297 return -EBUSY;
298 } else {
299 dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
300 pr_debug("DMA CHANNEL IS ALLOCATED \n");
303 mutex_unlock(&(dma_ch[channel].dmalock));
305 dma_ch[channel].device_id = device_id;
306 dma_ch[channel].irq_callback = NULL;
308 /* This is to be enabled by putting a restriction -
309 * you have to request DMA, before doing any operations on
310 * descriptor/channel
312 pr_debug("request_dma() : END \n");
313 return channel;
315 EXPORT_SYMBOL(request_dma);
317 int set_dma_callback(unsigned int channel, dma_interrupt_t callback, void *data)
319 int ret_irq = 0;
321 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
322 && channel < MAX_BLACKFIN_DMA_CHANNEL));
324 if (callback != NULL) {
325 int ret_val;
326 ret_irq = channel2irq(channel);
328 dma_ch[channel].data = data;
330 ret_val =
331 request_irq(ret_irq, (void *)callback, IRQF_DISABLED,
332 dma_ch[channel].device_id, data);
333 if (ret_val) {
334 printk(KERN_NOTICE
335 "Request irq in DMA engine failed.\n");
336 return -EPERM;
338 dma_ch[channel].irq_callback = callback;
340 return 0;
342 EXPORT_SYMBOL(set_dma_callback);
344 void free_dma(unsigned int channel)
346 int ret_irq;
348 pr_debug("freedma() : BEGIN \n");
349 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
350 && channel < MAX_BLACKFIN_DMA_CHANNEL));
352 /* Halt the DMA */
353 disable_dma(channel);
354 clear_dma_buffer(channel);
356 if (dma_ch[channel].irq_callback != NULL) {
357 ret_irq = channel2irq(channel);
358 free_irq(ret_irq, dma_ch[channel].data);
361 /* Clear the DMA Variable in the Channel */
362 mutex_lock(&(dma_ch[channel].dmalock));
363 dma_ch[channel].chan_status = DMA_CHANNEL_FREE;
364 mutex_unlock(&(dma_ch[channel].dmalock));
366 pr_debug("freedma() : END \n");
368 EXPORT_SYMBOL(free_dma);
370 void dma_enable_irq(unsigned int channel)
372 int ret_irq;
374 pr_debug("dma_enable_irq() : BEGIN \n");
375 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
376 && channel < MAX_BLACKFIN_DMA_CHANNEL));
378 ret_irq = channel2irq(channel);
379 enable_irq(ret_irq);
381 EXPORT_SYMBOL(dma_enable_irq);
383 void dma_disable_irq(unsigned int channel)
385 int ret_irq;
387 pr_debug("dma_disable_irq() : BEGIN \n");
388 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
389 && channel < MAX_BLACKFIN_DMA_CHANNEL));
391 ret_irq = channel2irq(channel);
392 disable_irq(ret_irq);
394 EXPORT_SYMBOL(dma_disable_irq);
396 int dma_channel_active(unsigned int channel)
398 if (dma_ch[channel].chan_status == DMA_CHANNEL_FREE) {
399 return 0;
400 } else {
401 return 1;
404 EXPORT_SYMBOL(dma_channel_active);
406 /*------------------------------------------------------------------------------
407 * stop the specific DMA channel.
408 *-----------------------------------------------------------------------------*/
409 void disable_dma(unsigned int channel)
411 pr_debug("stop_dma() : BEGIN \n");
413 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
414 && channel < MAX_BLACKFIN_DMA_CHANNEL));
416 dma_ch[channel].regs->cfg &= ~DMAEN; /* Clean the enable bit */
417 SSYNC();
418 dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
419 /* Needs to be enabled Later */
420 pr_debug("stop_dma() : END \n");
421 return;
423 EXPORT_SYMBOL(disable_dma);
425 void enable_dma(unsigned int channel)
427 pr_debug("enable_dma() : BEGIN \n");
429 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
430 && channel < MAX_BLACKFIN_DMA_CHANNEL));
432 dma_ch[channel].chan_status = DMA_CHANNEL_ENABLED;
433 dma_ch[channel].regs->curr_x_count = 0;
434 dma_ch[channel].regs->curr_y_count = 0;
436 dma_ch[channel].regs->cfg |= DMAEN; /* Set the enable bit */
437 SSYNC();
438 pr_debug("enable_dma() : END \n");
439 return;
441 EXPORT_SYMBOL(enable_dma);
443 /*------------------------------------------------------------------------------
444 * Set the Start Address register for the specific DMA channel
445 * This function can be used for register based DMA,
446 * to setup the start address
447 * addr: Starting address of the DMA Data to be transferred.
448 *-----------------------------------------------------------------------------*/
449 void set_dma_start_addr(unsigned int channel, unsigned long addr)
451 pr_debug("set_dma_start_addr() : BEGIN \n");
453 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
454 && channel < MAX_BLACKFIN_DMA_CHANNEL));
456 dma_ch[channel].regs->start_addr = addr;
457 SSYNC();
458 pr_debug("set_dma_start_addr() : END\n");
460 EXPORT_SYMBOL(set_dma_start_addr);
462 void set_dma_next_desc_addr(unsigned int channel, unsigned long addr)
464 pr_debug("set_dma_next_desc_addr() : BEGIN \n");
466 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
467 && channel < MAX_BLACKFIN_DMA_CHANNEL));
469 dma_ch[channel].regs->next_desc_ptr = addr;
470 SSYNC();
471 pr_debug("set_dma_start_addr() : END\n");
473 EXPORT_SYMBOL(set_dma_next_desc_addr);
475 void set_dma_x_count(unsigned int channel, unsigned short x_count)
477 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
478 && channel < MAX_BLACKFIN_DMA_CHANNEL));
480 dma_ch[channel].regs->x_count = x_count;
481 SSYNC();
483 EXPORT_SYMBOL(set_dma_x_count);
485 void set_dma_y_count(unsigned int channel, unsigned short y_count)
487 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
488 && channel < MAX_BLACKFIN_DMA_CHANNEL));
490 dma_ch[channel].regs->y_count = y_count;
491 SSYNC();
493 EXPORT_SYMBOL(set_dma_y_count);
495 void set_dma_x_modify(unsigned int channel, short x_modify)
497 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
498 && channel < MAX_BLACKFIN_DMA_CHANNEL));
500 dma_ch[channel].regs->x_modify = x_modify;
501 SSYNC();
503 EXPORT_SYMBOL(set_dma_x_modify);
505 void set_dma_y_modify(unsigned int channel, short y_modify)
507 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
508 && channel < MAX_BLACKFIN_DMA_CHANNEL));
510 dma_ch[channel].regs->y_modify = y_modify;
511 SSYNC();
513 EXPORT_SYMBOL(set_dma_y_modify);
515 void set_dma_config(unsigned int channel, unsigned short config)
517 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
518 && channel < MAX_BLACKFIN_DMA_CHANNEL));
520 dma_ch[channel].regs->cfg = config;
521 SSYNC();
523 EXPORT_SYMBOL(set_dma_config);
525 unsigned short
526 set_bfin_dma_config(char direction, char flow_mode,
527 char intr_mode, char dma_mode, char width)
529 unsigned short config;
531 config =
532 ((direction << 1) | (width << 2) | (dma_mode << 4) |
533 (intr_mode << 6) | (flow_mode << 12) | RESTART);
534 return config;
536 EXPORT_SYMBOL(set_bfin_dma_config);
538 void set_dma_sg(unsigned int channel, struct dmasg * sg, int nr_sg)
540 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
541 && channel < MAX_BLACKFIN_DMA_CHANNEL));
543 dma_ch[channel].regs->cfg |= ((nr_sg & 0x0F) << 8);
545 dma_ch[channel].regs->next_desc_ptr = (unsigned int)sg;
547 SSYNC();
549 EXPORT_SYMBOL(set_dma_sg);
551 /*------------------------------------------------------------------------------
552 * Get the DMA status of a specific DMA channel from the system.
553 *-----------------------------------------------------------------------------*/
554 unsigned short get_dma_curr_irqstat(unsigned int channel)
556 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
557 && channel < MAX_BLACKFIN_DMA_CHANNEL));
559 return dma_ch[channel].regs->irq_status;
561 EXPORT_SYMBOL(get_dma_curr_irqstat);
563 /*------------------------------------------------------------------------------
564 * Clear the DMA_DONE bit in DMA status. Stop the DMA completion interrupt.
565 *-----------------------------------------------------------------------------*/
566 void clear_dma_irqstat(unsigned int channel)
568 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
569 && channel < MAX_BLACKFIN_DMA_CHANNEL));
570 dma_ch[channel].regs->irq_status |= 3;
572 EXPORT_SYMBOL(clear_dma_irqstat);
574 /*------------------------------------------------------------------------------
575 * Get current DMA xcount of a specific DMA channel from the system.
576 *-----------------------------------------------------------------------------*/
577 unsigned short get_dma_curr_xcount(unsigned int channel)
579 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
580 && channel < MAX_BLACKFIN_DMA_CHANNEL));
582 return dma_ch[channel].regs->curr_x_count;
584 EXPORT_SYMBOL(get_dma_curr_xcount);
586 /*------------------------------------------------------------------------------
587 * Get current DMA ycount of a specific DMA channel from the system.
588 *-----------------------------------------------------------------------------*/
589 unsigned short get_dma_curr_ycount(unsigned int channel)
591 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
592 && channel < MAX_BLACKFIN_DMA_CHANNEL));
594 return dma_ch[channel].regs->curr_y_count;
596 EXPORT_SYMBOL(get_dma_curr_ycount);
598 static void *__dma_memcpy(void *dest, const void *src, size_t size)
600 int direction; /* 1 - address decrease, 0 - address increase */
601 int flag_align; /* 1 - address aligned, 0 - address unaligned */
602 int flag_2D; /* 1 - 2D DMA needed, 0 - 1D DMA needed */
603 unsigned long flags;
605 if (size <= 0)
606 return NULL;
608 local_irq_save(flags);
610 if ((unsigned long)src < memory_end)
611 blackfin_dcache_flush_range((unsigned int)src,
612 (unsigned int)(src + size));
614 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
616 if ((unsigned long)src < (unsigned long)dest)
617 direction = 1;
618 else
619 direction = 0;
621 if ((((unsigned long)dest % 2) == 0) && (((unsigned long)src % 2) == 0)
622 && ((size % 2) == 0))
623 flag_align = 1;
624 else
625 flag_align = 0;
627 if (size > 0x10000) /* size > 64K */
628 flag_2D = 1;
629 else
630 flag_2D = 0;
632 /* Setup destination and source start address */
633 if (direction) {
634 if (flag_align) {
635 bfin_write_MDMA_D0_START_ADDR(dest + size - 2);
636 bfin_write_MDMA_S0_START_ADDR(src + size - 2);
637 } else {
638 bfin_write_MDMA_D0_START_ADDR(dest + size - 1);
639 bfin_write_MDMA_S0_START_ADDR(src + size - 1);
641 } else {
642 bfin_write_MDMA_D0_START_ADDR(dest);
643 bfin_write_MDMA_S0_START_ADDR(src);
646 /* Setup destination and source xcount */
647 if (flag_2D) {
648 if (flag_align) {
649 bfin_write_MDMA_D0_X_COUNT(1024 / 2);
650 bfin_write_MDMA_S0_X_COUNT(1024 / 2);
651 } else {
652 bfin_write_MDMA_D0_X_COUNT(1024);
653 bfin_write_MDMA_S0_X_COUNT(1024);
655 bfin_write_MDMA_D0_Y_COUNT(size >> 10);
656 bfin_write_MDMA_S0_Y_COUNT(size >> 10);
657 } else {
658 if (flag_align) {
659 bfin_write_MDMA_D0_X_COUNT(size / 2);
660 bfin_write_MDMA_S0_X_COUNT(size / 2);
661 } else {
662 bfin_write_MDMA_D0_X_COUNT(size);
663 bfin_write_MDMA_S0_X_COUNT(size);
667 /* Setup destination and source xmodify and ymodify */
668 if (direction) {
669 if (flag_align) {
670 bfin_write_MDMA_D0_X_MODIFY(-2);
671 bfin_write_MDMA_S0_X_MODIFY(-2);
672 if (flag_2D) {
673 bfin_write_MDMA_D0_Y_MODIFY(-2);
674 bfin_write_MDMA_S0_Y_MODIFY(-2);
676 } else {
677 bfin_write_MDMA_D0_X_MODIFY(-1);
678 bfin_write_MDMA_S0_X_MODIFY(-1);
679 if (flag_2D) {
680 bfin_write_MDMA_D0_Y_MODIFY(-1);
681 bfin_write_MDMA_S0_Y_MODIFY(-1);
684 } else {
685 if (flag_align) {
686 bfin_write_MDMA_D0_X_MODIFY(2);
687 bfin_write_MDMA_S0_X_MODIFY(2);
688 if (flag_2D) {
689 bfin_write_MDMA_D0_Y_MODIFY(2);
690 bfin_write_MDMA_S0_Y_MODIFY(2);
692 } else {
693 bfin_write_MDMA_D0_X_MODIFY(1);
694 bfin_write_MDMA_S0_X_MODIFY(1);
695 if (flag_2D) {
696 bfin_write_MDMA_D0_Y_MODIFY(1);
697 bfin_write_MDMA_S0_Y_MODIFY(1);
702 /* Enable source DMA */
703 if (flag_2D) {
704 if (flag_align) {
705 bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D | WDSIZE_16);
706 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D | WDSIZE_16);
707 } else {
708 bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D);
709 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D);
711 } else {
712 if (flag_align) {
713 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
714 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
715 } else {
716 bfin_write_MDMA_S0_CONFIG(DMAEN);
717 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN);
721 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE))
724 bfin_write_MDMA_D0_IRQ_STATUS(bfin_read_MDMA_D0_IRQ_STATUS() |
725 (DMA_DONE | DMA_ERR));
727 bfin_write_MDMA_S0_CONFIG(0);
728 bfin_write_MDMA_D0_CONFIG(0);
730 if ((unsigned long)dest < memory_end)
731 blackfin_dcache_invalidate_range((unsigned int)dest,
732 (unsigned int)(dest + size));
733 local_irq_restore(flags);
735 return dest;
738 void *dma_memcpy(void *dest, const void *src, size_t size)
740 size_t bulk;
741 size_t rest;
742 void * addr;
744 bulk = (size >> 16) << 16;
745 rest = size - bulk;
746 if (bulk)
747 __dma_memcpy(dest, src, bulk);
748 addr = __dma_memcpy(dest+bulk, src+bulk, rest);
749 return addr;
752 EXPORT_SYMBOL(dma_memcpy);
754 void *safe_dma_memcpy(void *dest, const void *src, size_t size)
756 void *addr;
757 addr = dma_memcpy(dest, src, size);
758 return addr;
760 EXPORT_SYMBOL(safe_dma_memcpy);
762 void dma_outsb(void __iomem *addr, const void *buf, unsigned short len)
765 unsigned long flags;
767 local_irq_save(flags);
769 blackfin_dcache_flush_range((unsigned int)buf,(unsigned int)(buf) + len);
771 bfin_write_MDMA_D0_START_ADDR(addr);
772 bfin_write_MDMA_D0_X_COUNT(len);
773 bfin_write_MDMA_D0_X_MODIFY(0);
774 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
776 bfin_write_MDMA_S0_START_ADDR(buf);
777 bfin_write_MDMA_S0_X_COUNT(len);
778 bfin_write_MDMA_S0_X_MODIFY(1);
779 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
781 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
782 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
784 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
786 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
788 bfin_write_MDMA_S0_CONFIG(0);
789 bfin_write_MDMA_D0_CONFIG(0);
790 local_irq_restore(flags);
793 EXPORT_SYMBOL(dma_outsb);
796 void dma_insb(const void __iomem *addr, void *buf, unsigned short len)
798 unsigned long flags;
800 local_irq_save(flags);
801 bfin_write_MDMA_D0_START_ADDR(buf);
802 bfin_write_MDMA_D0_X_COUNT(len);
803 bfin_write_MDMA_D0_X_MODIFY(1);
804 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
806 bfin_write_MDMA_S0_START_ADDR(addr);
807 bfin_write_MDMA_S0_X_COUNT(len);
808 bfin_write_MDMA_S0_X_MODIFY(0);
809 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
811 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
812 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
814 blackfin_dcache_invalidate_range((unsigned int)buf, (unsigned int)(buf) + len);
816 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
818 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
820 bfin_write_MDMA_S0_CONFIG(0);
821 bfin_write_MDMA_D0_CONFIG(0);
822 local_irq_restore(flags);
825 EXPORT_SYMBOL(dma_insb);
827 void dma_outsw(void __iomem *addr, const void *buf, unsigned short len)
829 unsigned long flags;
831 local_irq_save(flags);
833 blackfin_dcache_flush_range((unsigned int)buf,(unsigned int)(buf) + len);
835 bfin_write_MDMA_D0_START_ADDR(addr);
836 bfin_write_MDMA_D0_X_COUNT(len);
837 bfin_write_MDMA_D0_X_MODIFY(0);
838 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
840 bfin_write_MDMA_S0_START_ADDR(buf);
841 bfin_write_MDMA_S0_X_COUNT(len);
842 bfin_write_MDMA_S0_X_MODIFY(2);
843 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
845 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
846 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
848 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
850 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
852 bfin_write_MDMA_S0_CONFIG(0);
853 bfin_write_MDMA_D0_CONFIG(0);
854 local_irq_restore(flags);
857 EXPORT_SYMBOL(dma_outsw);
859 void dma_insw(const void __iomem *addr, void *buf, unsigned short len)
861 unsigned long flags;
863 local_irq_save(flags);
865 bfin_write_MDMA_D0_START_ADDR(buf);
866 bfin_write_MDMA_D0_X_COUNT(len);
867 bfin_write_MDMA_D0_X_MODIFY(2);
868 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
870 bfin_write_MDMA_S0_START_ADDR(addr);
871 bfin_write_MDMA_S0_X_COUNT(len);
872 bfin_write_MDMA_S0_X_MODIFY(0);
873 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
875 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
876 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
878 blackfin_dcache_invalidate_range((unsigned int)buf, (unsigned int)(buf) + len);
880 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
882 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
884 bfin_write_MDMA_S0_CONFIG(0);
885 bfin_write_MDMA_D0_CONFIG(0);
886 local_irq_restore(flags);
889 EXPORT_SYMBOL(dma_insw);
891 void dma_outsl(void __iomem *addr, const void *buf, unsigned short len)
893 unsigned long flags;
895 local_irq_save(flags);
897 blackfin_dcache_flush_range((unsigned int)buf,(unsigned int)(buf) + len);
899 bfin_write_MDMA_D0_START_ADDR(addr);
900 bfin_write_MDMA_D0_X_COUNT(len);
901 bfin_write_MDMA_D0_X_MODIFY(0);
902 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
904 bfin_write_MDMA_S0_START_ADDR(buf);
905 bfin_write_MDMA_S0_X_COUNT(len);
906 bfin_write_MDMA_S0_X_MODIFY(4);
907 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
909 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
910 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
912 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
914 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
916 bfin_write_MDMA_S0_CONFIG(0);
917 bfin_write_MDMA_D0_CONFIG(0);
918 local_irq_restore(flags);
921 EXPORT_SYMBOL(dma_outsl);
923 void dma_insl(const void __iomem *addr, void *buf, unsigned short len)
925 unsigned long flags;
927 local_irq_save(flags);
929 bfin_write_MDMA_D0_START_ADDR(buf);
930 bfin_write_MDMA_D0_X_COUNT(len);
931 bfin_write_MDMA_D0_X_MODIFY(4);
932 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
934 bfin_write_MDMA_S0_START_ADDR(addr);
935 bfin_write_MDMA_S0_X_COUNT(len);
936 bfin_write_MDMA_S0_X_MODIFY(0);
937 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
939 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
940 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
942 blackfin_dcache_invalidate_range((unsigned int)buf, (unsigned int)(buf) + len);
944 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
946 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
948 bfin_write_MDMA_S0_CONFIG(0);
949 bfin_write_MDMA_D0_CONFIG(0);
950 local_irq_restore(flags);
953 EXPORT_SYMBOL(dma_insl);