2 * linux/arch/arm/mach-pnx4008/dma.c
4 * PNX4008 DMA registration and IRQ dispatching
7 * Copyright: MontaVista Software Inc. (c) 2005
9 * Based on the code from Nicolas Pitre
11 * This program is free software; you can redistribute it and/or modify
12 * it under the terms of the GNU General Public License version 2 as
13 * published by the Free Software Foundation.
16 #include <linux/module.h>
17 #include <linux/init.h>
18 #include <linux/kernel.h>
19 #include <linux/interrupt.h>
20 #include <linux/errno.h>
21 #include <linux/err.h>
22 #include <linux/dma-mapping.h>
23 #include <linux/clk.h>
25 #include <asm/system.h>
26 #include <asm/hardware.h>
28 #include <asm/dma-mapping.h>
30 #include <asm/mach/dma.h>
31 #include <asm/arch/clock.h>
33 static struct dma_channel
{
35 void (*irq_handler
) (int, int, void *, struct pt_regs
*);
37 struct pnx4008_dma_ll
*ll
;
41 } dma_channels
[MAX_DMA_CHANNELS
];
43 static struct ll_pool
{
50 static spinlock_t ll_lock
= SPIN_LOCK_UNLOCKED
;
52 struct pnx4008_dma_ll
*pnx4008_alloc_ll_entry(dma_addr_t
* ll_dma
)
54 struct pnx4008_dma_ll
*ll
= NULL
;
57 spin_lock_irqsave(&ll_lock
, flags
);
58 if (ll_pool
.count
> 4) { /* can give one more */
59 ll
= *(struct pnx4008_dma_ll
**) ll_pool
.cur
;
60 *ll_dma
= ll_pool
.dma_addr
+ ((void *)ll
- ll_pool
.vaddr
);
61 *(void **)ll_pool
.cur
= **(void ***)ll_pool
.cur
;
62 memset(ll
, 0, sizeof(*ll
));
65 spin_unlock_irqrestore(&ll_lock
, flags
);
70 EXPORT_SYMBOL_GPL(pnx4008_alloc_ll_entry
);
72 void pnx4008_free_ll_entry(struct pnx4008_dma_ll
* ll
, dma_addr_t ll_dma
)
77 if ((unsigned long)((long)ll
- (long)ll_pool
.vaddr
) > 0x4000) {
78 printk(KERN_ERR
"Trying to free entry not allocated by DMA\n");
82 if (ll
->flags
& DMA_BUFFER_ALLOCATED
)
83 ll
->free(ll
->alloc_data
);
85 spin_lock_irqsave(&ll_lock
, flags
);
86 *(long *)ll
= *(long *)ll_pool
.cur
;
87 *(long *)ll_pool
.cur
= (long)ll
;
89 spin_unlock_irqrestore(&ll_lock
, flags
);
93 EXPORT_SYMBOL_GPL(pnx4008_free_ll_entry
);
95 void pnx4008_free_ll(u32 ll_dma
, struct pnx4008_dma_ll
* ll
)
97 struct pnx4008_dma_ll
*ptr
;
103 pnx4008_free_ll_entry(ll
, ll_dma
);
110 EXPORT_SYMBOL_GPL(pnx4008_free_ll
);
112 static int dma_channels_requested
= 0;
114 static inline void dma_increment_usage(void)
116 if (!dma_channels_requested
++) {
117 struct clk
*clk
= clk_get(0, "dma_ck");
119 clk_set_rate(clk
, 1);
122 pnx4008_config_dma(-1, -1, 1);
125 static inline void dma_decrement_usage(void)
127 if (!--dma_channels_requested
) {
128 struct clk
*clk
= clk_get(0, "dma_ck");
130 clk_set_rate(clk
, 0);
133 pnx4008_config_dma(-1, -1, 0);
138 static spinlock_t dma_lock
= SPIN_LOCK_UNLOCKED
;
140 static inline void pnx4008_dma_lock(void)
142 spin_lock_irq(&dma_lock
);
145 static inline void pnx4008_dma_unlock(void)
147 spin_unlock_irq(&dma_lock
);
150 #define VALID_CHANNEL(c) (((c) >= 0) && ((c) < MAX_DMA_CHANNELS))
152 int pnx4008_request_channel(char *name
, int ch
,
153 void (*irq_handler
) (int, int, void *,
154 struct pt_regs
*), void *data
)
158 /* basic sanity checks */
159 if (!name
|| (ch
!= -1 && !VALID_CHANNEL(ch
)))
164 /* try grabbing a DMA channel with the requested priority */
165 for (i
= MAX_DMA_CHANNELS
- 1; i
>= 0; i
--) {
166 if (!dma_channels
[i
].name
&& (ch
== -1 || ch
== i
)) {
173 dma_increment_usage();
174 dma_channels
[i
].name
= name
;
175 dma_channels
[i
].irq_handler
= irq_handler
;
176 dma_channels
[i
].data
= data
;
177 dma_channels
[i
].ll
= NULL
;
178 dma_channels
[i
].ll_dma
= 0;
180 printk(KERN_WARNING
"No more available DMA channels for %s\n",
185 pnx4008_dma_unlock();
189 EXPORT_SYMBOL_GPL(pnx4008_request_channel
);
191 void pnx4008_free_channel(int ch
)
193 if (!dma_channels
[ch
].name
) {
195 "%s: trying to free channel %d which is already freed\n",
201 pnx4008_free_ll(dma_channels
[ch
].ll_dma
, dma_channels
[ch
].ll
);
202 dma_channels
[ch
].ll
= NULL
;
203 dma_decrement_usage();
205 dma_channels
[ch
].name
= NULL
;
206 pnx4008_dma_unlock();
209 EXPORT_SYMBOL_GPL(pnx4008_free_channel
);
211 int pnx4008_config_dma(int ahb_m1_be
, int ahb_m2_be
, int enable
)
213 unsigned long dma_cfg
= __raw_readl(DMAC_CONFIG
);
217 dma_cfg
&= ~(1 << 1);
228 dma_cfg
&= ~(1 << 2);
239 dma_cfg
&= ~(1 << 0);
249 __raw_writel(dma_cfg
, DMAC_CONFIG
);
250 pnx4008_dma_unlock();
255 EXPORT_SYMBOL_GPL(pnx4008_config_dma
);
257 int pnx4008_dma_pack_control(const struct pnx4008_dma_ch_ctrl
* ch_ctrl
,
260 int i
= 0, dbsize
, sbsize
, err
= 0;
262 if (!ctrl
|| !ch_ctrl
) {
269 switch (ch_ctrl
->tc_mask
) {
281 switch (ch_ctrl
->cacheable
) {
292 switch (ch_ctrl
->bufferable
) {
303 switch (ch_ctrl
->priv_mode
) {
314 switch (ch_ctrl
->di
) {
325 switch (ch_ctrl
->si
) {
336 switch (ch_ctrl
->dest_ahb1
) {
347 switch (ch_ctrl
->src_ahb1
) {
358 switch (ch_ctrl
->dwidth
) {
375 switch (ch_ctrl
->swidth
) {
392 dbsize
= ch_ctrl
->dbsize
;
393 while (!(dbsize
& 1)) {
397 if (ch_ctrl
->dbsize
!= 1 || i
> 8 || i
== 1) {
405 sbsize
= ch_ctrl
->sbsize
;
406 while (!(sbsize
& 1)) {
410 if (ch_ctrl
->sbsize
!= 1 || i
> 8 || i
== 1) {
418 if (ch_ctrl
->tr_size
> 0x7ff) {
423 *ctrl
|= ch_ctrl
->tr_size
& 0x7ff;
429 EXPORT_SYMBOL_GPL(pnx4008_dma_pack_control
);
431 int pnx4008_dma_parse_control(unsigned long ctrl
,
432 struct pnx4008_dma_ch_ctrl
* ch_ctrl
)
441 ch_ctrl
->tr_size
= ctrl
& 0x7ff;
444 ch_ctrl
->sbsize
= 1 << (ctrl
& 7);
445 if (ch_ctrl
->sbsize
> 1)
446 ch_ctrl
->sbsize
<<= 1;
449 ch_ctrl
->dbsize
= 1 << (ctrl
& 7);
450 if (ch_ctrl
->dbsize
> 1)
451 ch_ctrl
->dbsize
<<= 1;
456 ch_ctrl
->swidth
= WIDTH_BYTE
;
459 ch_ctrl
->swidth
= WIDTH_HWORD
;
462 ch_ctrl
->swidth
= WIDTH_WORD
;
472 ch_ctrl
->dwidth
= WIDTH_BYTE
;
475 ch_ctrl
->dwidth
= WIDTH_HWORD
;
478 ch_ctrl
->dwidth
= WIDTH_WORD
;
486 ch_ctrl
->src_ahb1
= ctrl
& 1;
489 ch_ctrl
->dest_ahb1
= ctrl
& 1;
492 ch_ctrl
->si
= ctrl
& 1;
495 ch_ctrl
->di
= ctrl
& 1;
498 ch_ctrl
->priv_mode
= ctrl
& 1;
501 ch_ctrl
->bufferable
= ctrl
& 1;
504 ch_ctrl
->cacheable
= ctrl
& 1;
507 ch_ctrl
->tc_mask
= ctrl
& 1;
513 EXPORT_SYMBOL_GPL(pnx4008_dma_parse_control
);
515 int pnx4008_dma_pack_config(const struct pnx4008_dma_ch_config
* ch_cfg
,
520 if (!cfg
|| !ch_cfg
) {
527 switch (ch_cfg
->halt
) {
538 switch (ch_cfg
->active
) {
549 switch (ch_cfg
->lock
) {
560 switch (ch_cfg
->itc
) {
571 switch (ch_cfg
->ie
) {
582 switch (ch_cfg
->flow_cntrl
) {
598 case FC_PER2PER_DPER
:
610 case FC_PER2PER_SPER
:
618 *cfg
&= ~(0x1f << 6);
619 *cfg
|= ((ch_cfg
->dest_per
& 0x1f) << 6);
621 *cfg
&= ~(0x1f << 1);
622 *cfg
|= ((ch_cfg
->src_per
& 0x1f) << 1);
628 EXPORT_SYMBOL_GPL(pnx4008_dma_pack_config
);
630 int pnx4008_dma_parse_config(unsigned long cfg
,
631 struct pnx4008_dma_ch_config
* ch_cfg
)
642 ch_cfg
->src_per
= cfg
& 0x1f;
645 ch_cfg
->dest_per
= cfg
& 0x1f;
650 ch_cfg
->flow_cntrl
= FC_MEM2MEM_DMA
;
653 ch_cfg
->flow_cntrl
= FC_MEM2PER_DMA
;
656 ch_cfg
->flow_cntrl
= FC_PER2MEM_DMA
;
659 ch_cfg
->flow_cntrl
= FC_PER2PER_DMA
;
662 ch_cfg
->flow_cntrl
= FC_PER2PER_DPER
;
665 ch_cfg
->flow_cntrl
= FC_MEM2PER_PER
;
668 ch_cfg
->flow_cntrl
= FC_PER2MEM_PER
;
671 ch_cfg
->flow_cntrl
= FC_PER2PER_SPER
;
675 ch_cfg
->ie
= cfg
& 1;
678 ch_cfg
->itc
= cfg
& 1;
681 ch_cfg
->lock
= cfg
& 1;
684 ch_cfg
->active
= cfg
& 1;
687 ch_cfg
->halt
= cfg
& 1;
693 EXPORT_SYMBOL_GPL(pnx4008_dma_parse_config
);
695 void pnx4008_dma_split_head_entry(struct pnx4008_dma_config
* config
,
696 struct pnx4008_dma_ch_ctrl
* ctrl
)
698 int new_len
= ctrl
->tr_size
, num_entries
= 0;
699 int old_len
= new_len
;
700 int src_width
, dest_width
, count
= 1;
702 switch (ctrl
->swidth
) {
716 switch (ctrl
->dwidth
) {
730 while (new_len
> 0x7FF) {
732 new_len
= (ctrl
->tr_size
+ num_entries
) / (num_entries
+ 1);
734 if (num_entries
!= 0) {
735 struct pnx4008_dma_ll
*ll
= NULL
;
736 config
->ch_ctrl
&= ~0x7ff;
737 config
->ch_ctrl
|= new_len
;
738 if (!config
->is_ll
) {
740 while (num_entries
) {
743 pnx4008_alloc_ll_entry(&config
->
748 pnx4008_alloc_ll_entry(&ll
->
756 src_width
* new_len
* count
;
758 ll
->src_addr
= config
->src_addr
;
762 dest_width
* new_len
* count
;
764 ll
->dest_addr
= config
->dest_addr
;
765 ll
->ch_ctrl
= config
->ch_ctrl
& 0x7fffffff;
772 struct pnx4008_dma_ll
*ll_old
= config
->ll
;
773 unsigned long ll_dma_old
= config
->ll_dma
;
774 while (num_entries
) {
777 pnx4008_alloc_ll_entry(&config
->
782 pnx4008_alloc_ll_entry(&ll
->
790 src_width
* new_len
* count
;
792 ll
->src_addr
= config
->src_addr
;
796 dest_width
* new_len
* count
;
798 ll
->dest_addr
= config
->dest_addr
;
799 ll
->ch_ctrl
= config
->ch_ctrl
& 0x7fffffff;
805 ll
->next_dma
= ll_dma_old
;
808 /* adjust last length/tc */
809 ll
->ch_ctrl
= config
->ch_ctrl
& (~0x7ff);
810 ll
->ch_ctrl
|= old_len
- new_len
* (count
- 1);
811 config
->ch_ctrl
&= 0x7fffffff;
815 EXPORT_SYMBOL_GPL(pnx4008_dma_split_head_entry
);
817 void pnx4008_dma_split_ll_entry(struct pnx4008_dma_ll
* cur_ll
,
818 struct pnx4008_dma_ch_ctrl
* ctrl
)
820 int new_len
= ctrl
->tr_size
, num_entries
= 0;
821 int old_len
= new_len
;
822 int src_width
, dest_width
, count
= 1;
824 switch (ctrl
->swidth
) {
838 switch (ctrl
->dwidth
) {
852 while (new_len
> 0x7FF) {
854 new_len
= (ctrl
->tr_size
+ num_entries
) / (num_entries
+ 1);
856 if (num_entries
!= 0) {
857 struct pnx4008_dma_ll
*ll
= NULL
;
858 cur_ll
->ch_ctrl
&= ~0x7ff;
859 cur_ll
->ch_ctrl
|= new_len
;
861 while (num_entries
) {
864 pnx4008_alloc_ll_entry(&cur_ll
->
869 pnx4008_alloc_ll_entry(&ll
->
877 src_width
* new_len
* count
;
879 ll
->src_addr
= cur_ll
->src_addr
;
883 dest_width
* new_len
* count
;
885 ll
->dest_addr
= cur_ll
->dest_addr
;
886 ll
->ch_ctrl
= cur_ll
->ch_ctrl
& 0x7fffffff;
893 struct pnx4008_dma_ll
*ll_old
= cur_ll
->next
;
894 unsigned long ll_dma_old
= cur_ll
->next_dma
;
895 while (num_entries
) {
898 pnx4008_alloc_ll_entry(&cur_ll
->
903 pnx4008_alloc_ll_entry(&ll
->
911 src_width
* new_len
* count
;
913 ll
->src_addr
= cur_ll
->src_addr
;
917 dest_width
* new_len
* count
;
919 ll
->dest_addr
= cur_ll
->dest_addr
;
920 ll
->ch_ctrl
= cur_ll
->ch_ctrl
& 0x7fffffff;
927 ll
->next_dma
= ll_dma_old
;
930 /* adjust last length/tc */
931 ll
->ch_ctrl
= cur_ll
->ch_ctrl
& (~0x7ff);
932 ll
->ch_ctrl
|= old_len
- new_len
* (count
- 1);
933 cur_ll
->ch_ctrl
&= 0x7fffffff;
937 EXPORT_SYMBOL_GPL(pnx4008_dma_split_ll_entry
);
939 int pnx4008_config_channel(int ch
, struct pnx4008_dma_config
* config
)
941 if (!VALID_CHANNEL(ch
) || !dma_channels
[ch
].name
)
945 __raw_writel(config
->src_addr
, DMAC_Cx_SRC_ADDR(ch
));
946 __raw_writel(config
->dest_addr
, DMAC_Cx_DEST_ADDR(ch
));
949 __raw_writel(config
->ll_dma
, DMAC_Cx_LLI(ch
));
951 __raw_writel(0, DMAC_Cx_LLI(ch
));
953 __raw_writel(config
->ch_ctrl
, DMAC_Cx_CONTROL(ch
));
954 __raw_writel(config
->ch_cfg
, DMAC_Cx_CONFIG(ch
));
955 pnx4008_dma_unlock();
961 EXPORT_SYMBOL_GPL(pnx4008_config_channel
);
963 int pnx4008_channel_get_config(int ch
, struct pnx4008_dma_config
* config
)
965 if (!VALID_CHANNEL(ch
) || !dma_channels
[ch
].name
|| !config
)
969 config
->ch_cfg
= __raw_readl(DMAC_Cx_CONFIG(ch
));
970 config
->ch_ctrl
= __raw_readl(DMAC_Cx_CONTROL(ch
));
972 config
->ll_dma
= __raw_readl(DMAC_Cx_LLI(ch
));
973 config
->is_ll
= config
->ll_dma
? 1 : 0;
975 config
->src_addr
= __raw_readl(DMAC_Cx_SRC_ADDR(ch
));
976 config
->dest_addr
= __raw_readl(DMAC_Cx_DEST_ADDR(ch
));
977 pnx4008_dma_unlock();
982 EXPORT_SYMBOL_GPL(pnx4008_channel_get_config
);
984 int pnx4008_dma_ch_enable(int ch
)
986 unsigned long ch_cfg
;
988 if (!VALID_CHANNEL(ch
) || !dma_channels
[ch
].name
)
992 ch_cfg
= __raw_readl(DMAC_Cx_CONFIG(ch
));
994 __raw_writel(ch_cfg
, DMAC_Cx_CONFIG(ch
));
995 pnx4008_dma_unlock();
1000 EXPORT_SYMBOL_GPL(pnx4008_dma_ch_enable
);
1002 int pnx4008_dma_ch_disable(int ch
)
1004 unsigned long ch_cfg
;
1006 if (!VALID_CHANNEL(ch
) || !dma_channels
[ch
].name
)
1010 ch_cfg
= __raw_readl(DMAC_Cx_CONFIG(ch
));
1012 __raw_writel(ch_cfg
, DMAC_Cx_CONFIG(ch
));
1013 pnx4008_dma_unlock();
1018 EXPORT_SYMBOL_GPL(pnx4008_dma_ch_disable
);
1020 int pnx4008_dma_ch_enabled(int ch
)
1022 unsigned long ch_cfg
;
1024 if (!VALID_CHANNEL(ch
) || !dma_channels
[ch
].name
)
1028 ch_cfg
= __raw_readl(DMAC_Cx_CONFIG(ch
));
1029 pnx4008_dma_unlock();
1034 EXPORT_SYMBOL_GPL(pnx4008_dma_ch_enabled
);
1036 static irqreturn_t
dma_irq_handler(int irq
, void *dev_id
, struct pt_regs
*regs
)
1039 unsigned long dint
= __raw_readl(DMAC_INT_STAT
);
1040 unsigned long tcint
= __raw_readl(DMAC_INT_TC_STAT
);
1041 unsigned long eint
= __raw_readl(DMAC_INT_ERR_STAT
);
1042 unsigned long i_bit
;
1044 for (i
= MAX_DMA_CHANNELS
- 1; i
>= 0; i
--) {
1047 struct dma_channel
*channel
= &dma_channels
[i
];
1049 if (channel
->name
&& channel
->irq_handler
) {
1053 cause
|= DMA_ERR_INT
;
1055 cause
|= DMA_TC_INT
;
1056 channel
->irq_handler(i
, cause
, channel
->data
,
1060 * IRQ for an unregistered DMA channel
1063 "spurious IRQ for DMA channel %d\n", i
);
1066 __raw_writel(i_bit
, DMAC_INT_TC_CLEAR
);
1068 __raw_writel(i_bit
, DMAC_INT_ERR_CLEAR
);
1074 static int __init
pnx4008_dma_init(void)
1078 ret
= request_irq(DMA_INT
, dma_irq_handler
, 0, "DMA", NULL
);
1080 printk(KERN_CRIT
"Wow! Can't register IRQ for DMA\n");
1084 ll_pool
.count
= 0x4000 / sizeof(struct pnx4008_dma_ll
);
1085 ll_pool
.cur
= ll_pool
.vaddr
=
1086 dma_alloc_coherent(NULL
, ll_pool
.count
* sizeof(struct pnx4008_dma_ll
),
1087 &ll_pool
.dma_addr
, GFP_KERNEL
);
1089 if (!ll_pool
.vaddr
) {
1091 free_irq(DMA_INT
, NULL
);
1095 for (i
= 0; i
< ll_pool
.count
- 1; i
++) {
1096 void **addr
= ll_pool
.vaddr
+ i
* sizeof(struct pnx4008_dma_ll
);
1097 *addr
= (void *)addr
+ sizeof(struct pnx4008_dma_ll
);
1099 *(long *)(ll_pool
.vaddr
+
1100 (ll_pool
.count
- 1) * sizeof(struct pnx4008_dma_ll
)) =
1101 (long)ll_pool
.vaddr
;
1103 __raw_writel(1, DMAC_CONFIG
);
1108 arch_initcall(pnx4008_dma_init
);