PM: add /sys/power documentation to Documentation/ABI
[linux-2.6/mini2440.git] / arch / arm / mach-pnx4008 / dma.c
blobec01574f88ac995e5a72b19be62baf5e6e2eb2e7
1 /*
2 * linux/arch/arm/mach-pnx4008/dma.c
4 * PNX4008 DMA registration and IRQ dispatching
6 * Author: Vitaly Wool
7 * Copyright: MontaVista Software Inc. (c) 2005
9 * Based on the code from Nicolas Pitre
11 * This program is free software; you can redistribute it and/or modify
12 * it under the terms of the GNU General Public License version 2 as
13 * published by the Free Software Foundation.
16 #include <linux/module.h>
17 #include <linux/init.h>
18 #include <linux/kernel.h>
19 #include <linux/interrupt.h>
20 #include <linux/errno.h>
21 #include <linux/err.h>
22 #include <linux/dma-mapping.h>
23 #include <linux/clk.h>
25 #include <asm/system.h>
26 #include <asm/hardware.h>
27 #include <asm/dma.h>
28 #include <asm/dma-mapping.h>
29 #include <asm/io.h>
30 #include <asm/mach/dma.h>
31 #include <asm/arch/clock.h>
33 static struct dma_channel {
34 char *name;
35 void (*irq_handler) (int, int, void *, struct pt_regs *);
36 void *data;
37 struct pnx4008_dma_ll *ll;
38 u32 ll_dma;
39 void *target_addr;
40 int target_id;
41 } dma_channels[MAX_DMA_CHANNELS];
43 static struct ll_pool {
44 void *vaddr;
45 void *cur;
46 dma_addr_t dma_addr;
47 int count;
48 } ll_pool;
50 static spinlock_t ll_lock = SPIN_LOCK_UNLOCKED;
52 struct pnx4008_dma_ll *pnx4008_alloc_ll_entry(dma_addr_t * ll_dma)
54 struct pnx4008_dma_ll *ll = NULL;
55 unsigned long flags;
57 spin_lock_irqsave(&ll_lock, flags);
58 if (ll_pool.count > 4) { /* can give one more */
59 ll = *(struct pnx4008_dma_ll **) ll_pool.cur;
60 *ll_dma = ll_pool.dma_addr + ((void *)ll - ll_pool.vaddr);
61 *(void **)ll_pool.cur = **(void ***)ll_pool.cur;
62 memset(ll, 0, sizeof(*ll));
63 ll_pool.count--;
65 spin_unlock_irqrestore(&ll_lock, flags);
67 return ll;
70 EXPORT_SYMBOL_GPL(pnx4008_alloc_ll_entry);
72 void pnx4008_free_ll_entry(struct pnx4008_dma_ll * ll, dma_addr_t ll_dma)
74 unsigned long flags;
76 if (ll) {
77 if ((unsigned long)((long)ll - (long)ll_pool.vaddr) > 0x4000) {
78 printk(KERN_ERR "Trying to free entry not allocated by DMA\n");
79 BUG();
82 if (ll->flags & DMA_BUFFER_ALLOCATED)
83 ll->free(ll->alloc_data);
85 spin_lock_irqsave(&ll_lock, flags);
86 *(long *)ll = *(long *)ll_pool.cur;
87 *(long *)ll_pool.cur = (long)ll;
88 ll_pool.count++;
89 spin_unlock_irqrestore(&ll_lock, flags);
93 EXPORT_SYMBOL_GPL(pnx4008_free_ll_entry);
95 void pnx4008_free_ll(u32 ll_dma, struct pnx4008_dma_ll * ll)
97 struct pnx4008_dma_ll *ptr;
98 u32 dma;
100 while (ll) {
101 dma = ll->next_dma;
102 ptr = ll->next;
103 pnx4008_free_ll_entry(ll, ll_dma);
105 ll_dma = dma;
106 ll = ptr;
110 EXPORT_SYMBOL_GPL(pnx4008_free_ll);
112 static int dma_channels_requested = 0;
114 static inline void dma_increment_usage(void)
116 if (!dma_channels_requested++) {
117 struct clk *clk = clk_get(0, "dma_ck");
118 if (!IS_ERR(clk)) {
119 clk_set_rate(clk, 1);
120 clk_put(clk);
122 pnx4008_config_dma(-1, -1, 1);
125 static inline void dma_decrement_usage(void)
127 if (!--dma_channels_requested) {
128 struct clk *clk = clk_get(0, "dma_ck");
129 if (!IS_ERR(clk)) {
130 clk_set_rate(clk, 0);
131 clk_put(clk);
133 pnx4008_config_dma(-1, -1, 0);
138 static spinlock_t dma_lock = SPIN_LOCK_UNLOCKED;
140 static inline void pnx4008_dma_lock(void)
142 spin_lock_irq(&dma_lock);
145 static inline void pnx4008_dma_unlock(void)
147 spin_unlock_irq(&dma_lock);
150 #define VALID_CHANNEL(c) (((c) >= 0) && ((c) < MAX_DMA_CHANNELS))
152 int pnx4008_request_channel(char *name, int ch,
153 void (*irq_handler) (int, int, void *,
154 struct pt_regs *), void *data)
156 int i, found = 0;
158 /* basic sanity checks */
159 if (!name || (ch != -1 && !VALID_CHANNEL(ch)))
160 return -EINVAL;
162 pnx4008_dma_lock();
164 /* try grabbing a DMA channel with the requested priority */
165 for (i = MAX_DMA_CHANNELS - 1; i >= 0; i--) {
166 if (!dma_channels[i].name && (ch == -1 || ch == i)) {
167 found = 1;
168 break;
172 if (found) {
173 dma_increment_usage();
174 dma_channels[i].name = name;
175 dma_channels[i].irq_handler = irq_handler;
176 dma_channels[i].data = data;
177 dma_channels[i].ll = NULL;
178 dma_channels[i].ll_dma = 0;
179 } else {
180 printk(KERN_WARNING "No more available DMA channels for %s\n",
181 name);
182 i = -ENODEV;
185 pnx4008_dma_unlock();
186 return i;
189 EXPORT_SYMBOL_GPL(pnx4008_request_channel);
191 void pnx4008_free_channel(int ch)
193 if (!dma_channels[ch].name) {
194 printk(KERN_CRIT
195 "%s: trying to free channel %d which is already freed\n",
196 __FUNCTION__, ch);
197 return;
200 pnx4008_dma_lock();
201 pnx4008_free_ll(dma_channels[ch].ll_dma, dma_channels[ch].ll);
202 dma_channels[ch].ll = NULL;
203 dma_decrement_usage();
205 dma_channels[ch].name = NULL;
206 pnx4008_dma_unlock();
209 EXPORT_SYMBOL_GPL(pnx4008_free_channel);
211 int pnx4008_config_dma(int ahb_m1_be, int ahb_m2_be, int enable)
213 unsigned long dma_cfg = __raw_readl(DMAC_CONFIG);
215 switch (ahb_m1_be) {
216 case 0:
217 dma_cfg &= ~(1 << 1);
218 break;
219 case 1:
220 dma_cfg |= (1 << 1);
221 break;
222 default:
223 break;
226 switch (ahb_m2_be) {
227 case 0:
228 dma_cfg &= ~(1 << 2);
229 break;
230 case 1:
231 dma_cfg |= (1 << 2);
232 break;
233 default:
234 break;
237 switch (enable) {
238 case 0:
239 dma_cfg &= ~(1 << 0);
240 break;
241 case 1:
242 dma_cfg |= (1 << 0);
243 break;
244 default:
245 break;
248 pnx4008_dma_lock();
249 __raw_writel(dma_cfg, DMAC_CONFIG);
250 pnx4008_dma_unlock();
252 return 0;
255 EXPORT_SYMBOL_GPL(pnx4008_config_dma);
257 int pnx4008_dma_pack_control(const struct pnx4008_dma_ch_ctrl * ch_ctrl,
258 unsigned long *ctrl)
260 int i = 0, dbsize, sbsize, err = 0;
262 if (!ctrl || !ch_ctrl) {
263 err = -EINVAL;
264 goto out;
267 *ctrl = 0;
269 switch (ch_ctrl->tc_mask) {
270 case 0:
271 break;
272 case 1:
273 *ctrl |= (1 << 31);
274 break;
276 default:
277 err = -EINVAL;
278 goto out;
281 switch (ch_ctrl->cacheable) {
282 case 0:
283 break;
284 case 1:
285 *ctrl |= (1 << 30);
286 break;
288 default:
289 err = -EINVAL;
290 goto out;
292 switch (ch_ctrl->bufferable) {
293 case 0:
294 break;
295 case 1:
296 *ctrl |= (1 << 29);
297 break;
299 default:
300 err = -EINVAL;
301 goto out;
303 switch (ch_ctrl->priv_mode) {
304 case 0:
305 break;
306 case 1:
307 *ctrl |= (1 << 28);
308 break;
310 default:
311 err = -EINVAL;
312 goto out;
314 switch (ch_ctrl->di) {
315 case 0:
316 break;
317 case 1:
318 *ctrl |= (1 << 27);
319 break;
321 default:
322 err = -EINVAL;
323 goto out;
325 switch (ch_ctrl->si) {
326 case 0:
327 break;
328 case 1:
329 *ctrl |= (1 << 26);
330 break;
332 default:
333 err = -EINVAL;
334 goto out;
336 switch (ch_ctrl->dest_ahb1) {
337 case 0:
338 break;
339 case 1:
340 *ctrl |= (1 << 25);
341 break;
343 default:
344 err = -EINVAL;
345 goto out;
347 switch (ch_ctrl->src_ahb1) {
348 case 0:
349 break;
350 case 1:
351 *ctrl |= (1 << 24);
352 break;
354 default:
355 err = -EINVAL;
356 goto out;
358 switch (ch_ctrl->dwidth) {
359 case WIDTH_BYTE:
360 *ctrl &= ~(7 << 21);
361 break;
362 case WIDTH_HWORD:
363 *ctrl &= ~(7 << 21);
364 *ctrl |= (1 << 21);
365 break;
366 case WIDTH_WORD:
367 *ctrl &= ~(7 << 21);
368 *ctrl |= (2 << 21);
369 break;
371 default:
372 err = -EINVAL;
373 goto out;
375 switch (ch_ctrl->swidth) {
376 case WIDTH_BYTE:
377 *ctrl &= ~(7 << 18);
378 break;
379 case WIDTH_HWORD:
380 *ctrl &= ~(7 << 18);
381 *ctrl |= (1 << 18);
382 break;
383 case WIDTH_WORD:
384 *ctrl &= ~(7 << 18);
385 *ctrl |= (2 << 18);
386 break;
388 default:
389 err = -EINVAL;
390 goto out;
392 dbsize = ch_ctrl->dbsize;
393 while (!(dbsize & 1)) {
394 i++;
395 dbsize >>= 1;
397 if (ch_ctrl->dbsize != 1 || i > 8 || i == 1) {
398 err = -EINVAL;
399 goto out;
400 } else if (i > 1)
401 i--;
402 *ctrl &= ~(7 << 15);
403 *ctrl |= (i << 15);
405 sbsize = ch_ctrl->sbsize;
406 while (!(sbsize & 1)) {
407 i++;
408 sbsize >>= 1;
410 if (ch_ctrl->sbsize != 1 || i > 8 || i == 1) {
411 err = -EINVAL;
412 goto out;
413 } else if (i > 1)
414 i--;
415 *ctrl &= ~(7 << 12);
416 *ctrl |= (i << 12);
418 if (ch_ctrl->tr_size > 0x7ff) {
419 err = -E2BIG;
420 goto out;
422 *ctrl &= ~0x7ff;
423 *ctrl |= ch_ctrl->tr_size & 0x7ff;
425 out:
426 return err;
429 EXPORT_SYMBOL_GPL(pnx4008_dma_pack_control);
431 int pnx4008_dma_parse_control(unsigned long ctrl,
432 struct pnx4008_dma_ch_ctrl * ch_ctrl)
434 int err = 0;
436 if (!ch_ctrl) {
437 err = -EINVAL;
438 goto out;
441 ch_ctrl->tr_size = ctrl & 0x7ff;
442 ctrl >>= 12;
444 ch_ctrl->sbsize = 1 << (ctrl & 7);
445 if (ch_ctrl->sbsize > 1)
446 ch_ctrl->sbsize <<= 1;
447 ctrl >>= 3;
449 ch_ctrl->dbsize = 1 << (ctrl & 7);
450 if (ch_ctrl->dbsize > 1)
451 ch_ctrl->dbsize <<= 1;
452 ctrl >>= 3;
454 switch (ctrl & 7) {
455 case 0:
456 ch_ctrl->swidth = WIDTH_BYTE;
457 break;
458 case 1:
459 ch_ctrl->swidth = WIDTH_HWORD;
460 break;
461 case 2:
462 ch_ctrl->swidth = WIDTH_WORD;
463 break;
464 default:
465 err = -EINVAL;
466 goto out;
468 ctrl >>= 3;
470 switch (ctrl & 7) {
471 case 0:
472 ch_ctrl->dwidth = WIDTH_BYTE;
473 break;
474 case 1:
475 ch_ctrl->dwidth = WIDTH_HWORD;
476 break;
477 case 2:
478 ch_ctrl->dwidth = WIDTH_WORD;
479 break;
480 default:
481 err = -EINVAL;
482 goto out;
484 ctrl >>= 3;
486 ch_ctrl->src_ahb1 = ctrl & 1;
487 ctrl >>= 1;
489 ch_ctrl->dest_ahb1 = ctrl & 1;
490 ctrl >>= 1;
492 ch_ctrl->si = ctrl & 1;
493 ctrl >>= 1;
495 ch_ctrl->di = ctrl & 1;
496 ctrl >>= 1;
498 ch_ctrl->priv_mode = ctrl & 1;
499 ctrl >>= 1;
501 ch_ctrl->bufferable = ctrl & 1;
502 ctrl >>= 1;
504 ch_ctrl->cacheable = ctrl & 1;
505 ctrl >>= 1;
507 ch_ctrl->tc_mask = ctrl & 1;
509 out:
510 return err;
513 EXPORT_SYMBOL_GPL(pnx4008_dma_parse_control);
515 int pnx4008_dma_pack_config(const struct pnx4008_dma_ch_config * ch_cfg,
516 unsigned long *cfg)
518 int err = 0;
520 if (!cfg || !ch_cfg) {
521 err = -EINVAL;
522 goto out;
525 *cfg = 0;
527 switch (ch_cfg->halt) {
528 case 0:
529 break;
530 case 1:
531 *cfg |= (1 << 18);
532 break;
534 default:
535 err = -EINVAL;
536 goto out;
538 switch (ch_cfg->active) {
539 case 0:
540 break;
541 case 1:
542 *cfg |= (1 << 17);
543 break;
545 default:
546 err = -EINVAL;
547 goto out;
549 switch (ch_cfg->lock) {
550 case 0:
551 break;
552 case 1:
553 *cfg |= (1 << 16);
554 break;
556 default:
557 err = -EINVAL;
558 goto out;
560 switch (ch_cfg->itc) {
561 case 0:
562 break;
563 case 1:
564 *cfg |= (1 << 15);
565 break;
567 default:
568 err = -EINVAL;
569 goto out;
571 switch (ch_cfg->ie) {
572 case 0:
573 break;
574 case 1:
575 *cfg |= (1 << 14);
576 break;
578 default:
579 err = -EINVAL;
580 goto out;
582 switch (ch_cfg->flow_cntrl) {
583 case FC_MEM2MEM_DMA:
584 *cfg &= ~(7 << 11);
585 break;
586 case FC_MEM2PER_DMA:
587 *cfg &= ~(7 << 11);
588 *cfg |= (1 << 11);
589 break;
590 case FC_PER2MEM_DMA:
591 *cfg &= ~(7 << 11);
592 *cfg |= (2 << 11);
593 break;
594 case FC_PER2PER_DMA:
595 *cfg &= ~(7 << 11);
596 *cfg |= (3 << 11);
597 break;
598 case FC_PER2PER_DPER:
599 *cfg &= ~(7 << 11);
600 *cfg |= (4 << 11);
601 break;
602 case FC_MEM2PER_PER:
603 *cfg &= ~(7 << 11);
604 *cfg |= (5 << 11);
605 break;
606 case FC_PER2MEM_PER:
607 *cfg &= ~(7 << 11);
608 *cfg |= (6 << 11);
609 break;
610 case FC_PER2PER_SPER:
611 *cfg |= (7 << 11);
612 break;
614 default:
615 err = -EINVAL;
616 goto out;
618 *cfg &= ~(0x1f << 6);
619 *cfg |= ((ch_cfg->dest_per & 0x1f) << 6);
621 *cfg &= ~(0x1f << 1);
622 *cfg |= ((ch_cfg->src_per & 0x1f) << 1);
624 out:
625 return err;
628 EXPORT_SYMBOL_GPL(pnx4008_dma_pack_config);
630 int pnx4008_dma_parse_config(unsigned long cfg,
631 struct pnx4008_dma_ch_config * ch_cfg)
633 int err = 0;
635 if (!ch_cfg) {
636 err = -EINVAL;
637 goto out;
640 cfg >>= 1;
642 ch_cfg->src_per = cfg & 0x1f;
643 cfg >>= 5;
645 ch_cfg->dest_per = cfg & 0x1f;
646 cfg >>= 5;
648 switch (cfg & 7) {
649 case 0:
650 ch_cfg->flow_cntrl = FC_MEM2MEM_DMA;
651 break;
652 case 1:
653 ch_cfg->flow_cntrl = FC_MEM2PER_DMA;
654 break;
655 case 2:
656 ch_cfg->flow_cntrl = FC_PER2MEM_DMA;
657 break;
658 case 3:
659 ch_cfg->flow_cntrl = FC_PER2PER_DMA;
660 break;
661 case 4:
662 ch_cfg->flow_cntrl = FC_PER2PER_DPER;
663 break;
664 case 5:
665 ch_cfg->flow_cntrl = FC_MEM2PER_PER;
666 break;
667 case 6:
668 ch_cfg->flow_cntrl = FC_PER2MEM_PER;
669 break;
670 case 7:
671 ch_cfg->flow_cntrl = FC_PER2PER_SPER;
673 cfg >>= 3;
675 ch_cfg->ie = cfg & 1;
676 cfg >>= 1;
678 ch_cfg->itc = cfg & 1;
679 cfg >>= 1;
681 ch_cfg->lock = cfg & 1;
682 cfg >>= 1;
684 ch_cfg->active = cfg & 1;
685 cfg >>= 1;
687 ch_cfg->halt = cfg & 1;
689 out:
690 return err;
693 EXPORT_SYMBOL_GPL(pnx4008_dma_parse_config);
695 void pnx4008_dma_split_head_entry(struct pnx4008_dma_config * config,
696 struct pnx4008_dma_ch_ctrl * ctrl)
698 int new_len = ctrl->tr_size, num_entries = 0;
699 int old_len = new_len;
700 int src_width, dest_width, count = 1;
702 switch (ctrl->swidth) {
703 case WIDTH_BYTE:
704 src_width = 1;
705 break;
706 case WIDTH_HWORD:
707 src_width = 2;
708 break;
709 case WIDTH_WORD:
710 src_width = 4;
711 break;
712 default:
713 return;
716 switch (ctrl->dwidth) {
717 case WIDTH_BYTE:
718 dest_width = 1;
719 break;
720 case WIDTH_HWORD:
721 dest_width = 2;
722 break;
723 case WIDTH_WORD:
724 dest_width = 4;
725 break;
726 default:
727 return;
730 while (new_len > 0x7FF) {
731 num_entries++;
732 new_len = (ctrl->tr_size + num_entries) / (num_entries + 1);
734 if (num_entries != 0) {
735 struct pnx4008_dma_ll *ll = NULL;
736 config->ch_ctrl &= ~0x7ff;
737 config->ch_ctrl |= new_len;
738 if (!config->is_ll) {
739 config->is_ll = 1;
740 while (num_entries) {
741 if (!ll) {
742 config->ll =
743 pnx4008_alloc_ll_entry(&config->
744 ll_dma);
745 ll = config->ll;
746 } else {
747 ll->next =
748 pnx4008_alloc_ll_entry(&ll->
749 next_dma);
750 ll = ll->next;
753 if (ctrl->si)
754 ll->src_addr =
755 config->src_addr +
756 src_width * new_len * count;
757 else
758 ll->src_addr = config->src_addr;
759 if (ctrl->di)
760 ll->dest_addr =
761 config->dest_addr +
762 dest_width * new_len * count;
763 else
764 ll->dest_addr = config->dest_addr;
765 ll->ch_ctrl = config->ch_ctrl & 0x7fffffff;
766 ll->next_dma = 0;
767 ll->next = NULL;
768 num_entries--;
769 count++;
771 } else {
772 struct pnx4008_dma_ll *ll_old = config->ll;
773 unsigned long ll_dma_old = config->ll_dma;
774 while (num_entries) {
775 if (!ll) {
776 config->ll =
777 pnx4008_alloc_ll_entry(&config->
778 ll_dma);
779 ll = config->ll;
780 } else {
781 ll->next =
782 pnx4008_alloc_ll_entry(&ll->
783 next_dma);
784 ll = ll->next;
787 if (ctrl->si)
788 ll->src_addr =
789 config->src_addr +
790 src_width * new_len * count;
791 else
792 ll->src_addr = config->src_addr;
793 if (ctrl->di)
794 ll->dest_addr =
795 config->dest_addr +
796 dest_width * new_len * count;
797 else
798 ll->dest_addr = config->dest_addr;
799 ll->ch_ctrl = config->ch_ctrl & 0x7fffffff;
800 ll->next_dma = 0;
801 ll->next = NULL;
802 num_entries--;
803 count++;
805 ll->next_dma = ll_dma_old;
806 ll->next = ll_old;
808 /* adjust last length/tc */
809 ll->ch_ctrl = config->ch_ctrl & (~0x7ff);
810 ll->ch_ctrl |= old_len - new_len * (count - 1);
811 config->ch_ctrl &= 0x7fffffff;
815 EXPORT_SYMBOL_GPL(pnx4008_dma_split_head_entry);
817 void pnx4008_dma_split_ll_entry(struct pnx4008_dma_ll * cur_ll,
818 struct pnx4008_dma_ch_ctrl * ctrl)
820 int new_len = ctrl->tr_size, num_entries = 0;
821 int old_len = new_len;
822 int src_width, dest_width, count = 1;
824 switch (ctrl->swidth) {
825 case WIDTH_BYTE:
826 src_width = 1;
827 break;
828 case WIDTH_HWORD:
829 src_width = 2;
830 break;
831 case WIDTH_WORD:
832 src_width = 4;
833 break;
834 default:
835 return;
838 switch (ctrl->dwidth) {
839 case WIDTH_BYTE:
840 dest_width = 1;
841 break;
842 case WIDTH_HWORD:
843 dest_width = 2;
844 break;
845 case WIDTH_WORD:
846 dest_width = 4;
847 break;
848 default:
849 return;
852 while (new_len > 0x7FF) {
853 num_entries++;
854 new_len = (ctrl->tr_size + num_entries) / (num_entries + 1);
856 if (num_entries != 0) {
857 struct pnx4008_dma_ll *ll = NULL;
858 cur_ll->ch_ctrl &= ~0x7ff;
859 cur_ll->ch_ctrl |= new_len;
860 if (!cur_ll->next) {
861 while (num_entries) {
862 if (!ll) {
863 cur_ll->next =
864 pnx4008_alloc_ll_entry(&cur_ll->
865 next_dma);
866 ll = cur_ll->next;
867 } else {
868 ll->next =
869 pnx4008_alloc_ll_entry(&ll->
870 next_dma);
871 ll = ll->next;
874 if (ctrl->si)
875 ll->src_addr =
876 cur_ll->src_addr +
877 src_width * new_len * count;
878 else
879 ll->src_addr = cur_ll->src_addr;
880 if (ctrl->di)
881 ll->dest_addr =
882 cur_ll->dest_addr +
883 dest_width * new_len * count;
884 else
885 ll->dest_addr = cur_ll->dest_addr;
886 ll->ch_ctrl = cur_ll->ch_ctrl & 0x7fffffff;
887 ll->next_dma = 0;
888 ll->next = NULL;
889 num_entries--;
890 count++;
892 } else {
893 struct pnx4008_dma_ll *ll_old = cur_ll->next;
894 unsigned long ll_dma_old = cur_ll->next_dma;
895 while (num_entries) {
896 if (!ll) {
897 cur_ll->next =
898 pnx4008_alloc_ll_entry(&cur_ll->
899 next_dma);
900 ll = cur_ll->next;
901 } else {
902 ll->next =
903 pnx4008_alloc_ll_entry(&ll->
904 next_dma);
905 ll = ll->next;
908 if (ctrl->si)
909 ll->src_addr =
910 cur_ll->src_addr +
911 src_width * new_len * count;
912 else
913 ll->src_addr = cur_ll->src_addr;
914 if (ctrl->di)
915 ll->dest_addr =
916 cur_ll->dest_addr +
917 dest_width * new_len * count;
918 else
919 ll->dest_addr = cur_ll->dest_addr;
920 ll->ch_ctrl = cur_ll->ch_ctrl & 0x7fffffff;
921 ll->next_dma = 0;
922 ll->next = NULL;
923 num_entries--;
924 count++;
927 ll->next_dma = ll_dma_old;
928 ll->next = ll_old;
930 /* adjust last length/tc */
931 ll->ch_ctrl = cur_ll->ch_ctrl & (~0x7ff);
932 ll->ch_ctrl |= old_len - new_len * (count - 1);
933 cur_ll->ch_ctrl &= 0x7fffffff;
937 EXPORT_SYMBOL_GPL(pnx4008_dma_split_ll_entry);
939 int pnx4008_config_channel(int ch, struct pnx4008_dma_config * config)
941 if (!VALID_CHANNEL(ch) || !dma_channels[ch].name)
942 return -EINVAL;
944 pnx4008_dma_lock();
945 __raw_writel(config->src_addr, DMAC_Cx_SRC_ADDR(ch));
946 __raw_writel(config->dest_addr, DMAC_Cx_DEST_ADDR(ch));
948 if (config->is_ll)
949 __raw_writel(config->ll_dma, DMAC_Cx_LLI(ch));
950 else
951 __raw_writel(0, DMAC_Cx_LLI(ch));
953 __raw_writel(config->ch_ctrl, DMAC_Cx_CONTROL(ch));
954 __raw_writel(config->ch_cfg, DMAC_Cx_CONFIG(ch));
955 pnx4008_dma_unlock();
957 return 0;
961 EXPORT_SYMBOL_GPL(pnx4008_config_channel);
963 int pnx4008_channel_get_config(int ch, struct pnx4008_dma_config * config)
965 if (!VALID_CHANNEL(ch) || !dma_channels[ch].name || !config)
966 return -EINVAL;
968 pnx4008_dma_lock();
969 config->ch_cfg = __raw_readl(DMAC_Cx_CONFIG(ch));
970 config->ch_ctrl = __raw_readl(DMAC_Cx_CONTROL(ch));
972 config->ll_dma = __raw_readl(DMAC_Cx_LLI(ch));
973 config->is_ll = config->ll_dma ? 1 : 0;
975 config->src_addr = __raw_readl(DMAC_Cx_SRC_ADDR(ch));
976 config->dest_addr = __raw_readl(DMAC_Cx_DEST_ADDR(ch));
977 pnx4008_dma_unlock();
979 return 0;
982 EXPORT_SYMBOL_GPL(pnx4008_channel_get_config);
984 int pnx4008_dma_ch_enable(int ch)
986 unsigned long ch_cfg;
988 if (!VALID_CHANNEL(ch) || !dma_channels[ch].name)
989 return -EINVAL;
991 pnx4008_dma_lock();
992 ch_cfg = __raw_readl(DMAC_Cx_CONFIG(ch));
993 ch_cfg |= 1;
994 __raw_writel(ch_cfg, DMAC_Cx_CONFIG(ch));
995 pnx4008_dma_unlock();
997 return 0;
1000 EXPORT_SYMBOL_GPL(pnx4008_dma_ch_enable);
1002 int pnx4008_dma_ch_disable(int ch)
1004 unsigned long ch_cfg;
1006 if (!VALID_CHANNEL(ch) || !dma_channels[ch].name)
1007 return -EINVAL;
1009 pnx4008_dma_lock();
1010 ch_cfg = __raw_readl(DMAC_Cx_CONFIG(ch));
1011 ch_cfg &= ~1;
1012 __raw_writel(ch_cfg, DMAC_Cx_CONFIG(ch));
1013 pnx4008_dma_unlock();
1015 return 0;
1018 EXPORT_SYMBOL_GPL(pnx4008_dma_ch_disable);
1020 int pnx4008_dma_ch_enabled(int ch)
1022 unsigned long ch_cfg;
1024 if (!VALID_CHANNEL(ch) || !dma_channels[ch].name)
1025 return -EINVAL;
1027 pnx4008_dma_lock();
1028 ch_cfg = __raw_readl(DMAC_Cx_CONFIG(ch));
1029 pnx4008_dma_unlock();
1031 return ch_cfg & 1;
1034 EXPORT_SYMBOL_GPL(pnx4008_dma_ch_enabled);
1036 static irqreturn_t dma_irq_handler(int irq, void *dev_id, struct pt_regs *regs)
1038 int i;
1039 unsigned long dint = __raw_readl(DMAC_INT_STAT);
1040 unsigned long tcint = __raw_readl(DMAC_INT_TC_STAT);
1041 unsigned long eint = __raw_readl(DMAC_INT_ERR_STAT);
1042 unsigned long i_bit;
1044 for (i = MAX_DMA_CHANNELS - 1; i >= 0; i--) {
1045 i_bit = 1 << i;
1046 if (dint & i_bit) {
1047 struct dma_channel *channel = &dma_channels[i];
1049 if (channel->name && channel->irq_handler) {
1050 int cause = 0;
1052 if (eint & i_bit)
1053 cause |= DMA_ERR_INT;
1054 if (tcint & i_bit)
1055 cause |= DMA_TC_INT;
1056 channel->irq_handler(i, cause, channel->data,
1057 regs);
1058 } else {
1060 * IRQ for an unregistered DMA channel
1062 printk(KERN_WARNING
1063 "spurious IRQ for DMA channel %d\n", i);
1065 if (tcint & i_bit)
1066 __raw_writel(i_bit, DMAC_INT_TC_CLEAR);
1067 if (eint & i_bit)
1068 __raw_writel(i_bit, DMAC_INT_ERR_CLEAR);
1071 return IRQ_HANDLED;
1074 static int __init pnx4008_dma_init(void)
1076 int ret, i;
1078 ret = request_irq(DMA_INT, dma_irq_handler, 0, "DMA", NULL);
1079 if (ret) {
1080 printk(KERN_CRIT "Wow! Can't register IRQ for DMA\n");
1081 goto out;
1084 ll_pool.count = 0x4000 / sizeof(struct pnx4008_dma_ll);
1085 ll_pool.cur = ll_pool.vaddr =
1086 dma_alloc_coherent(NULL, ll_pool.count * sizeof(struct pnx4008_dma_ll),
1087 &ll_pool.dma_addr, GFP_KERNEL);
1089 if (!ll_pool.vaddr) {
1090 ret = -ENOMEM;
1091 free_irq(DMA_INT, NULL);
1092 goto out;
1095 for (i = 0; i < ll_pool.count - 1; i++) {
1096 void **addr = ll_pool.vaddr + i * sizeof(struct pnx4008_dma_ll);
1097 *addr = (void *)addr + sizeof(struct pnx4008_dma_ll);
1099 *(long *)(ll_pool.vaddr +
1100 (ll_pool.count - 1) * sizeof(struct pnx4008_dma_ll)) =
1101 (long)ll_pool.vaddr;
1103 __raw_writel(1, DMAC_CONFIG);
1105 out:
1106 return ret;
1108 arch_initcall(pnx4008_dma_init);