2 * include/asm-ppc/ppc4xx_dma.h
4 * IBM PPC4xx DMA engine library
6 * Copyright 2000-2004 MontaVista Software Inc.
8 * Cleaned up a bit more, Matt Porter <mporter@kernel.crashing.org>
10 * Original code by Armin Kuster <akuster@mvista.com>
11 * and Pete Popov <ppopov@mvista.com>
13 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the
15 * Free Software Foundation; either version 2 of the License, or (at your
16 * option) any later version.
18 * You should have received a copy of the GNU General Public License along
19 * with this program; if not, write to the Free Software Foundation, Inc.,
20 * 675 Mass Ave, Cambridge, MA 02139, USA.
24 #ifndef __ASMPPC_PPC4xx_DMA_H
25 #define __ASMPPC_PPC4xx_DMA_H
27 #include <linux/config.h>
28 #include <linux/types.h>
30 #include <asm/ibm4xx.h>
34 #define MAX_PPC4xx_DMA_CHANNELS 4
36 /* in arch/ppc/kernel/setup.c -- Cort */
37 extern unsigned long DMA_MODE_WRITE
, DMA_MODE_READ
;
40 * Function return status codes
41 * These values are used to indicate whether or not the function
42 * call was successful, or a bad/invalid parameter was passed.
44 #define DMA_STATUS_GOOD 0
45 #define DMA_STATUS_BAD_CHANNEL 1
46 #define DMA_STATUS_BAD_HANDLE 2
47 #define DMA_STATUS_BAD_MODE 3
48 #define DMA_STATUS_NULL_POINTER 4
49 #define DMA_STATUS_OUT_OF_MEMORY 5
50 #define DMA_STATUS_SGL_LIST_EMPTY 6
51 #define DMA_STATUS_GENERAL_ERROR 7
52 #define DMA_STATUS_CHANNEL_NOTFREE 8
54 #define DMA_CHANNEL_BUSY 0x80000000
57 * These indicate status as returned from the DMA Status Register.
59 #define DMA_STATUS_NO_ERROR 0
60 #define DMA_STATUS_CS 1 /* Count Status */
61 #define DMA_STATUS_TS 2 /* Transfer Status */
62 #define DMA_STATUS_DMA_ERROR 3 /* DMA Error Occurred */
63 #define DMA_STATUS_DMA_BUSY 4 /* The channel is busy */
67 * DMA Channel Control Registers
71 #define PPC4xx_DMA_64BIT
72 #define DMA_CR_OFFSET 1
74 #define DMA_CR_OFFSET 0
77 #define DMA_CE_ENABLE (1<<31) /* DMA Channel Enable */
78 #define SET_DMA_CE_ENABLE(x) (((x)&0x1)<<31)
79 #define GET_DMA_CE_ENABLE(x) (((x)&DMA_CE_ENABLE)>>31)
81 #define DMA_CIE_ENABLE (1<<30) /* DMA Channel Interrupt Enable */
82 #define SET_DMA_CIE_ENABLE(x) (((x)&0x1)<<30)
83 #define GET_DMA_CIE_ENABLE(x) (((x)&DMA_CIE_ENABLE)>>30)
85 #define DMA_TD (1<<29)
86 #define SET_DMA_TD(x) (((x)&0x1)<<29)
87 #define GET_DMA_TD(x) (((x)&DMA_TD)>>29)
89 #define DMA_PL (1<<28) /* Peripheral Location */
90 #define SET_DMA_PL(x) (((x)&0x1)<<28)
91 #define GET_DMA_PL(x) (((x)&DMA_PL)>>28)
93 #define EXTERNAL_PERIPHERAL 0
94 #define INTERNAL_PERIPHERAL 1
96 #define SET_DMA_PW(x) (((x)&0x3)<<(26-DMA_CR_OFFSET)) /* Peripheral Width */
97 #define DMA_PW_MASK SET_DMA_PW(3)
102 /* FIXME: Add PW_128 support for 440GP DMA block */
103 #define GET_DMA_PW(x) (((x)&DMA_PW_MASK)>>(26-DMA_CR_OFFSET))
105 #define DMA_DAI (1<<(25-DMA_CR_OFFSET)) /* Destination Address Increment */
106 #define SET_DMA_DAI(x) (((x)&0x1)<<(25-DMA_CR_OFFSET))
108 #define DMA_SAI (1<<(24-DMA_CR_OFFSET)) /* Source Address Increment */
109 #define SET_DMA_SAI(x) (((x)&0x1)<<(24-DMA_CR_OFFSET))
111 #define DMA_BEN (1<<(23-DMA_CR_OFFSET)) /* Buffer Enable */
112 #define SET_DMA_BEN(x) (((x)&0x1)<<(23-DMA_CR_OFFSET))
114 #define SET_DMA_TM(x) (((x)&0x3)<<(21-DMA_CR_OFFSET)) /* Transfer Mode */
115 #define DMA_TM_MASK SET_DMA_TM(3)
116 #define TM_PERIPHERAL 0 /* Peripheral */
117 #define TM_RESERVED 1 /* Reserved */
118 #define TM_S_MM 2 /* Memory to Memory */
119 #define TM_D_MM 3 /* Device Paced Memory to Memory */
120 #define GET_DMA_TM(x) (((x)&DMA_TM_MASK)>>(21-DMA_CR_OFFSET))
122 #define SET_DMA_PSC(x) (((x)&0x3)<<(19-DMA_CR_OFFSET)) /* Peripheral Setup Cycles */
123 #define DMA_PSC_MASK SET_DMA_PSC(3)
124 #define GET_DMA_PSC(x) (((x)&DMA_PSC_MASK)>>(19-DMA_CR_OFFSET))
126 #define SET_DMA_PWC(x) (((x)&0x3F)<<(13-DMA_CR_OFFSET)) /* Peripheral Wait Cycles */
127 #define DMA_PWC_MASK SET_DMA_PWC(0x3F)
128 #define GET_DMA_PWC(x) (((x)&DMA_PWC_MASK)>>(13-DMA_CR_OFFSET))
130 #define SET_DMA_PHC(x) (((x)&0x7)<<(10-DMA_CR_OFFSET)) /* Peripheral Hold Cycles */
131 #define DMA_PHC_MASK SET_DMA_PHC(0x7)
132 #define GET_DMA_PHC(x) (((x)&DMA_PHC_MASK)>>(10-DMA_CR_OFFSET))
134 #define DMA_ETD_OUTPUT (1<<(9-DMA_CR_OFFSET)) /* EOT pin is a TC output */
135 #define SET_DMA_ETD(x) (((x)&0x1)<<(9-DMA_CR_OFFSET))
137 #define DMA_TCE_ENABLE (1<<(8-DMA_CR_OFFSET))
138 #define SET_DMA_TCE(x) (((x)&0x1)<<(8-DMA_CR_OFFSET))
140 #define DMA_DEC (1<<(2) /* Address Decrement */
141 #define SET_DMA_DEC(x) (((x)&0x1)<<2)
142 #define GET_DMA_DEC(x) (((x)&DMA_DEC)>>2)
146 * These modes are defined in a way that makes it possible to
147 * simply "or" in the value in the control register.
150 #define DMA_MODE_MM (SET_DMA_TM(TM_S_MM)) /* memory to memory */
152 /* Device-paced memory to memory, */
153 /* device is at source address */
154 #define DMA_MODE_MM_DEVATSRC (DMA_TD | SET_DMA_TM(TM_D_MM))
156 /* Device-paced memory to memory, */
157 /* device is at destination address */
158 #define DMA_MODE_MM_DEVATDST (SET_DMA_TM(TM_D_MM))
161 #define SET_DMA_PREFETCH(x) (((x)&0x3)<<(4-DMA_CR_OFFSET)) /* Memory Read Prefetch */
162 #define DMA_PREFETCH_MASK SET_DMA_PREFETCH(3)
163 #define PREFETCH_1 0 /* Prefetch 1 Double Word */
166 #define GET_DMA_PREFETCH(x) (((x)&DMA_PREFETCH_MASK)>>(4-DMA_CR_OFFSET))
168 #define DMA_PCE (1<<(3-DMA_CR_OFFSET)) /* Parity Check Enable */
169 #define SET_DMA_PCE(x) (((x)&0x1)<<(3-DMA_CR_OFFSET))
170 #define GET_DMA_PCE(x) (((x)&DMA_PCE)>>(3-DMA_CR_OFFSET))
174 #define DMA_ECE_ENABLE (1<<5)
175 #define SET_DMA_ECE(x) (((x)&0x1)<<5)
176 #define GET_DMA_ECE(x) (((x)&DMA_ECE_ENABLE)>>5)
178 #define DMA_TCD_DISABLE (1<<4)
179 #define SET_DMA_TCD(x) (((x)&0x1)<<4)
180 #define GET_DMA_TCD(x) (((x)&DMA_TCD_DISABLE)>>4)
182 typedef uint32_t sgl_handle_t
;
184 #ifdef CONFIG_PPC4xx_EDMA
186 #define SGL_LIST_SIZE 4096
187 #define DMA_PPC4xx_SIZE SGL_LIST_SIZE
189 #define SET_DMA_PRIORITY(x) (((x)&0x3)<<(6-DMA_CR_OFFSET)) /* DMA Channel Priority */
190 #define DMA_PRIORITY_MASK SET_DMA_PRIORITY(3)
191 #define PRIORITY_LOW 0
192 #define PRIORITY_MID_LOW 1
193 #define PRIORITY_MID_HIGH 2
194 #define PRIORITY_HIGH 3
195 #define GET_DMA_PRIORITY(x) (((x)&DMA_PRIORITY_MASK)>>(6-DMA_CR_OFFSET))
198 * DMA Polarity Configuration Register
200 #define DMAReq_ActiveLow(chan) (1<<(31-(chan*3)))
201 #define DMAAck_ActiveLow(chan) (1<<(30-(chan*3)))
202 #define EOT_ActiveLow(chan) (1<<(29-(chan*3))) /* End of Transfer */
205 * DMA Sleep Mode Register
207 #define SLEEP_MODE_ENABLE (1<<21)
210 * DMA Status Register
212 #define DMA_CS0 (1<<31) /* Terminal Count has been reached */
213 #define DMA_CS1 (1<<30)
214 #define DMA_CS2 (1<<29)
215 #define DMA_CS3 (1<<28)
217 #define DMA_TS0 (1<<27) /* End of Transfer has been requested */
218 #define DMA_TS1 (1<<26)
219 #define DMA_TS2 (1<<25)
220 #define DMA_TS3 (1<<24)
222 #define DMA_CH0_ERR (1<<23) /* DMA Chanel 0 Error */
223 #define DMA_CH1_ERR (1<<22)
224 #define DMA_CH2_ERR (1<<21)
225 #define DMA_CH3_ERR (1<<20)
227 #define DMA_IN_DMA_REQ0 (1<<19) /* Internal DMA Request is pending */
228 #define DMA_IN_DMA_REQ1 (1<<18)
229 #define DMA_IN_DMA_REQ2 (1<<17)
230 #define DMA_IN_DMA_REQ3 (1<<16)
232 #define DMA_EXT_DMA_REQ0 (1<<15) /* External DMA Request is pending */
233 #define DMA_EXT_DMA_REQ1 (1<<14)
234 #define DMA_EXT_DMA_REQ2 (1<<13)
235 #define DMA_EXT_DMA_REQ3 (1<<12)
237 #define DMA_CH0_BUSY (1<<11) /* DMA Channel 0 Busy */
238 #define DMA_CH1_BUSY (1<<10)
239 #define DMA_CH2_BUSY (1<<9)
240 #define DMA_CH3_BUSY (1<<8)
242 #define DMA_SG0 (1<<7) /* DMA Channel 0 Scatter/Gather in progress */
243 #define DMA_SG1 (1<<6)
244 #define DMA_SG2 (1<<5)
245 #define DMA_SG3 (1<<4)
248 * DMA SG Command Register
250 #define SSG_ENABLE(chan) (1<<(31-chan)) /* Start Scatter Gather */
251 #define SSG_MASK_ENABLE(chan) (1<<(15-chan)) /* Enable writing to SSG0 bit */
254 * DMA Scatter/Gather Descriptor Bit fields
256 #define SG_LINK (1<<31) /* Link */
257 #define SG_TCI_ENABLE (1<<29) /* Enable Terminal Count Interrupt */
258 #define SG_ETI_ENABLE (1<<28) /* Enable End of Transfer Interrupt */
259 #define SG_ERI_ENABLE (1<<27) /* Enable Error Interrupt */
260 #define SG_COUNT_MASK 0xFFFF /* Count Field */
262 #define SET_DMA_CONTROL \
263 (SET_DMA_CIE_ENABLE(p_init->int_enable) | /* interrupt enable */ \
264 SET_DMA_BEN(p_init->buffer_enable) | /* buffer enable */\
265 SET_DMA_ETD(p_init->etd_output) | /* end of transfer pin */ \
266 SET_DMA_TCE(p_init->tce_enable) | /* terminal count enable */ \
267 SET_DMA_PL(p_init->pl) | /* peripheral location */ \
268 SET_DMA_DAI(p_init->dai) | /* dest addr increment */ \
269 SET_DMA_SAI(p_init->sai) | /* src addr increment */ \
270 SET_DMA_PRIORITY(p_init->cp) | /* channel priority */ \
271 SET_DMA_PW(p_init->pwidth) | /* peripheral/bus width */ \
272 SET_DMA_PSC(p_init->psc) | /* peripheral setup cycles */ \
273 SET_DMA_PWC(p_init->pwc) | /* peripheral wait cycles */ \
274 SET_DMA_PHC(p_init->phc) | /* peripheral hold cycles */ \
275 SET_DMA_PREFETCH(p_init->pf) /* read prefetch */)
277 #define GET_DMA_POLARITY(chan) (DMAReq_ActiveLow(chan) | DMAAck_ActiveLow(chan) | EOT_ActiveLow(chan))
279 #elif defined(CONFIG_STBXXX_DMA) /* stb03xxx */
281 #define DMA_PPC4xx_SIZE 4096
284 * DMA Status Register
287 #define SET_DMA_PRIORITY(x) (((x)&0x00800001)) /* DMA Channel Priority */
288 #define DMA_PRIORITY_MASK 0x00800001
289 #define PRIORITY_LOW 0x00000000
290 #define PRIORITY_MID_LOW 0x00000001
291 #define PRIORITY_MID_HIGH 0x00800000
292 #define PRIORITY_HIGH 0x00800001
293 #define GET_DMA_PRIORITY(x) (((((x)&DMA_PRIORITY_MASK) &0x00800000) >> 22 ) | (((x)&DMA_PRIORITY_MASK) &0x00000001))
295 #define DMA_CS0 (1<<31) /* Terminal Count has been reached */
296 #define DMA_CS1 (1<<30)
297 #define DMA_CS2 (1<<29)
298 #define DMA_CS3 (1<<28)
300 #define DMA_TS0 (1<<27) /* End of Transfer has been requested */
301 #define DMA_TS1 (1<<26)
302 #define DMA_TS2 (1<<25)
303 #define DMA_TS3 (1<<24)
305 #define DMA_CH0_ERR (1<<23) /* DMA Chanel 0 Error */
306 #define DMA_CH1_ERR (1<<22)
307 #define DMA_CH2_ERR (1<<21)
308 #define DMA_CH3_ERR (1<<20)
310 #define DMA_CT0 (1<<19) /* Chained transfere */
312 #define DMA_IN_DMA_REQ0 (1<<18) /* Internal DMA Request is pending */
313 #define DMA_IN_DMA_REQ1 (1<<17)
314 #define DMA_IN_DMA_REQ2 (1<<16)
315 #define DMA_IN_DMA_REQ3 (1<<15)
317 #define DMA_EXT_DMA_REQ0 (1<<14) /* External DMA Request is pending */
318 #define DMA_EXT_DMA_REQ1 (1<<13)
319 #define DMA_EXT_DMA_REQ2 (1<<12)
320 #define DMA_EXT_DMA_REQ3 (1<<11)
322 #define DMA_CH0_BUSY (1<<10) /* DMA Channel 0 Busy */
323 #define DMA_CH1_BUSY (1<<9)
324 #define DMA_CH2_BUSY (1<<8)
325 #define DMA_CH3_BUSY (1<<7)
327 #define DMA_CT1 (1<<6) /* Chained transfere */
328 #define DMA_CT2 (1<<5)
329 #define DMA_CT3 (1<<4)
331 #define DMA_CH_ENABLE (1<<7)
332 #define SET_DMA_CH(x) (((x)&0x1)<<7)
333 #define GET_DMA_CH(x) (((x)&DMA_CH_ENABLE)>>7)
335 /* STBx25xxx dma unique */
336 /* enable device port on a dma channel
337 * example ext 0 on dma 1
347 #define SERIAL2_XMIT 7
348 #define SERIAL2_RECV 6
351 #define SERIAL1_XMIT 3
352 #define SERIAL1_RECV 2
353 #define SERIAL0_XMIT 1
354 #define SERIAL0_RECV 0
364 * Bit 30 must be one for Redwoods, otherwise transfers may receive errors.
366 #define DMA_CR_MB0 0x2
368 #define SET_DMA_CONTROL \
369 (SET_DMA_CIE_ENABLE(p_init->int_enable) | /* interrupt enable */ \
370 SET_DMA_ETD(p_init->etd_output) | /* end of transfer pin */ \
371 SET_DMA_TCE(p_init->tce_enable) | /* terminal count enable */ \
372 SET_DMA_PL(p_init->pl) | /* peripheral location */ \
373 SET_DMA_DAI(p_init->dai) | /* dest addr increment */ \
374 SET_DMA_SAI(p_init->sai) | /* src addr increment */ \
375 SET_DMA_PRIORITY(p_init->cp) | /* channel priority */ \
376 SET_DMA_PW(p_init->pwidth) | /* peripheral/bus width */ \
377 SET_DMA_PSC(p_init->psc) | /* peripheral setup cycles */ \
378 SET_DMA_PWC(p_init->pwc) | /* peripheral wait cycles */ \
379 SET_DMA_PHC(p_init->phc) | /* peripheral hold cycles */ \
380 SET_DMA_TCD(p_init->tcd_disable) | /* TC chain mode disable */ \
381 SET_DMA_ECE(p_init->ece_enable) | /* ECE chanin mode enable */ \
382 SET_DMA_CH(p_init->ch_enable) | /* Chain enable */ \
383 DMA_CR_MB0 /* must be one */)
385 #define GET_DMA_POLARITY(chan) chan
390 unsigned short in_use
; /* set when channel is being used, clr when
394 * Valid polarity settings:
395 * DMAReq_ActiveLow(n)
396 * DMAAck_ActiveLow(n)
399 * n is 0 to max dma chans
401 unsigned int polarity
;
403 char buffer_enable
; /* Boolean: buffer enable */
404 char tce_enable
; /* Boolean: terminal count enable */
405 char etd_output
; /* Boolean: eot pin is a tc output */
406 char pce
; /* Boolean: parity check enable */
409 * Peripheral location:
410 * INTERNAL_PERIPHERAL (UART0 on the 405GP)
411 * EXTERNAL_PERIPHERAL
413 char pl
; /* internal/external peripheral */
416 * Valid pwidth settings:
424 char dai
; /* Boolean: dst address increment */
425 char sai
; /* Boolean: src address increment */
428 * Valid psc settings: 0-3
430 unsigned int psc
; /* Peripheral Setup Cycles */
433 * Valid pwc settings:
436 unsigned int pwc
; /* Peripheral Wait Cycles */
439 * Valid phc settings:
442 unsigned int phc
; /* Peripheral Hold Cycles */
445 * Valid cp (channel priority) settings:
451 unsigned int cp
; /* channel priority */
454 * Valid pf (memory read prefetch) settings:
460 unsigned int pf
; /* memory read prefetch */
463 * Boolean: channel interrupt enable
464 * NOTE: for sgl transfers, only the last descriptor will be setup to
469 char shift
; /* easy access to byte_count shift, based on */
470 /* the width of the channel */
472 uint32_t control
; /* channel control word */
474 /* These variabled are used ONLY in single dma transfers */
475 unsigned int mode
; /* transfer mode */
477 char ce
; /* channel enable */
478 #ifdef CONFIG_STB03xxx
482 char td
; /* transfer direction */
488 * PPC44x DMA implementations have a slightly different
489 * descriptor layout. Probably moved about due to the
490 * change to 64-bit addresses and link pointer. I don't
491 * know why they didn't just leave control_count after
494 #ifdef PPC4xx_DMA_64BIT
497 uint32_t control_count
;
498 phys_addr_t src_addr
;
499 phys_addr_t dst_addr
;
505 phys_addr_t src_addr
;
506 phys_addr_t dst_addr
;
507 uint32_t control_count
;
514 uint32_t control
; /* channel ctrl word; loaded from each descrptr */
515 uint32_t sgl_control
; /* LK, TCI, ETI, and ERI bits in sgl descriptor */
516 dma_addr_t dma_addr
; /* dma (physical) address of this list */
518 dma_addr_t phead_dma
;
520 dma_addr_t ptail_dma
;
524 phys_addr_t
*src_addr
;
525 phys_addr_t
*dst_addr
;
526 phys_addr_t dma_src_addr
;
527 phys_addr_t dma_dst_addr
;
530 extern ppc_dma_ch_t dma_channels
[];
533 * The DMA API are in ppc4xx_dma.c and ppc4xx_sgdma.c
535 extern int ppc4xx_init_dma_channel(unsigned int, ppc_dma_ch_t
*);
536 extern int ppc4xx_get_channel_config(unsigned int, ppc_dma_ch_t
*);
537 extern int ppc4xx_set_channel_priority(unsigned int, unsigned int);
538 extern unsigned int ppc4xx_get_peripheral_width(unsigned int);
539 extern void ppc4xx_set_sg_addr(int, phys_addr_t
);
540 extern int ppc4xx_add_dma_sgl(sgl_handle_t
, phys_addr_t
, phys_addr_t
, unsigned int);
541 extern void ppc4xx_enable_dma_sgl(sgl_handle_t
);
542 extern void ppc4xx_disable_dma_sgl(sgl_handle_t
);
543 extern int ppc4xx_get_dma_sgl_residue(sgl_handle_t
, phys_addr_t
*, phys_addr_t
*);
544 extern int ppc4xx_delete_dma_sgl_element(sgl_handle_t
, phys_addr_t
*, phys_addr_t
*);
545 extern int ppc4xx_alloc_dma_handle(sgl_handle_t
*, unsigned int, unsigned int);
546 extern void ppc4xx_free_dma_handle(sgl_handle_t
);
547 extern int ppc4xx_get_dma_status(void);
548 extern void ppc4xx_set_src_addr(int dmanr
, phys_addr_t src_addr
);
549 extern void ppc4xx_set_dst_addr(int dmanr
, phys_addr_t dst_addr
);
550 extern void ppc4xx_enable_dma(unsigned int dmanr
);
551 extern void ppc4xx_disable_dma(unsigned int dmanr
);
552 extern void ppc4xx_set_dma_count(unsigned int dmanr
, unsigned int count
);
553 extern int ppc4xx_get_dma_residue(unsigned int dmanr
);
554 extern void ppc4xx_set_dma_addr2(unsigned int dmanr
, phys_addr_t src_dma_addr
,
555 phys_addr_t dst_dma_addr
);
556 extern int ppc4xx_enable_dma_interrupt(unsigned int dmanr
);
557 extern int ppc4xx_disable_dma_interrupt(unsigned int dmanr
);
558 extern int ppc4xx_clr_dma_status(unsigned int dmanr
);
559 extern int ppc4xx_map_dma_port(unsigned int dmanr
, unsigned int ocp_dma
,short dma_chan
);
560 extern int ppc4xx_disable_dma_port(unsigned int dmanr
, unsigned int ocp_dma
,short dma_chan
);
561 extern int ppc4xx_set_dma_mode(unsigned int dmanr
, unsigned int mode
);
563 /* These are in kernel/dma.c: */
565 /* reserve a DMA channel */
566 extern int request_dma(unsigned int dmanr
, const char *device_id
);
567 /* release it again */
568 extern void free_dma(unsigned int dmanr
);
570 #endif /* __KERNEL__ */