2 * Copyright (c) 2010 Broadcom Corporation
4 * Permission to use, copy, modify, and/or distribute this software for any
5 * purpose with or without fee is hereby granted, provided that the above
6 * copyright notice and this permission notice appear in all copies.
8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
11 * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
13 * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
14 * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
20 #include "wlc_types.h" /* forward structure declarations */
25 #endif /* _dma_pub_ */
28 * support two DMA engines: 32 bits address or 64 bit addressing
29 * basic DMA register set is per channel(transmit or receive)
30 * a pair of channels is defined for convenience
33 /* 32 bits addressing */
35 typedef volatile struct { /* diag access */
36 u32 fifoaddr
; /* diag address */
37 u32 fifodatalow
; /* low 32bits of data */
38 u32 fifodatahigh
; /* high 32bits of data */
39 u32 pad
; /* reserved */
42 /* 64 bits addressing */
44 /* dma registers per channel(xmt or rcv) */
45 typedef volatile struct {
46 u32 control
; /* enable, et al */
47 u32 ptr
; /* last descriptor posted to chip */
48 u32 addrlow
; /* descriptor ring base address low 32-bits (8K aligned) */
49 u32 addrhigh
; /* descriptor ring base address bits 63:32 (8K aligned) */
50 u32 status0
; /* current descriptor, xmt state */
51 u32 status1
; /* active descriptor, xmt error */
54 /* map/unmap direction */
55 #define DMA_TX 1 /* TX direction for DMA */
56 #define DMA_RX 2 /* RX direction for DMA */
57 #define BUS_SWAP32(v) (v)
59 /* range param for dma_getnexttxp() and dma_txreclaim */
60 typedef enum txd_range
{
62 HNDDMA_RANGE_TRANSMITTED
,
63 HNDDMA_RANGE_TRANSFERED
66 /* dma function type */
67 typedef void (*di_detach_t
) (struct dma_pub
*dmah
);
68 typedef bool(*di_txreset_t
) (struct dma_pub
*dmah
);
69 typedef bool(*di_rxreset_t
) (struct dma_pub
*dmah
);
70 typedef bool(*di_rxidle_t
) (struct dma_pub
*dmah
);
71 typedef void (*di_txinit_t
) (struct dma_pub
*dmah
);
72 typedef bool(*di_txenabled_t
) (struct dma_pub
*dmah
);
73 typedef void (*di_rxinit_t
) (struct dma_pub
*dmah
);
74 typedef void (*di_txsuspend_t
) (struct dma_pub
*dmah
);
75 typedef void (*di_txresume_t
) (struct dma_pub
*dmah
);
76 typedef bool(*di_txsuspended_t
) (struct dma_pub
*dmah
);
77 typedef bool(*di_txsuspendedidle_t
) (struct dma_pub
*dmah
);
78 typedef int (*di_txfast_t
) (struct dma_pub
*dmah
, struct sk_buff
*p
,
80 typedef int (*di_txunframed_t
) (struct dma_pub
*dmah
, void *p
, uint len
,
82 typedef void *(*di_getpos_t
) (struct dma_pub
*di
, bool direction
);
83 typedef void (*di_fifoloopbackenable_t
) (struct dma_pub
*dmah
);
84 typedef bool(*di_txstopped_t
) (struct dma_pub
*dmah
);
85 typedef bool(*di_rxstopped_t
) (struct dma_pub
*dmah
);
86 typedef bool(*di_rxenable_t
) (struct dma_pub
*dmah
);
87 typedef bool(*di_rxenabled_t
) (struct dma_pub
*dmah
);
88 typedef void *(*di_rx_t
) (struct dma_pub
*dmah
);
89 typedef bool(*di_rxfill_t
) (struct dma_pub
*dmah
);
90 typedef void (*di_txreclaim_t
) (struct dma_pub
*dmah
, txd_range_t range
);
91 typedef void (*di_rxreclaim_t
) (struct dma_pub
*dmah
);
92 typedef unsigned long (*di_getvar_t
) (struct dma_pub
*dmah
,
94 typedef void *(*di_getnexttxp_t
) (struct dma_pub
*dmah
, txd_range_t range
);
95 typedef void *(*di_getnextrxp_t
) (struct dma_pub
*dmah
, bool forceall
);
96 typedef void *(*di_peeknexttxp_t
) (struct dma_pub
*dmah
);
97 typedef void *(*di_peeknextrxp_t
) (struct dma_pub
*dmah
);
98 typedef void (*di_rxparam_get_t
) (struct dma_pub
*dmah
, u16
*rxoffset
,
100 typedef void (*di_txblock_t
) (struct dma_pub
*dmah
);
101 typedef void (*di_txunblock_t
) (struct dma_pub
*dmah
);
102 typedef uint(*di_txactive_t
) (struct dma_pub
*dmah
);
103 typedef void (*di_txrotate_t
) (struct dma_pub
*dmah
);
104 typedef void (*di_counterreset_t
) (struct dma_pub
*dmah
);
105 typedef uint(*di_ctrlflags_t
) (struct dma_pub
*dmah
, uint mask
, uint flags
);
106 typedef char *(*di_dump_t
) (struct dma_pub
*dmah
, struct bcmstrbuf
*b
,
108 typedef char *(*di_dumptx_t
) (struct dma_pub
*dmah
, struct bcmstrbuf
*b
,
110 typedef char *(*di_dumprx_t
) (struct dma_pub
*dmah
, struct bcmstrbuf
*b
,
112 typedef uint(*di_rxactive_t
) (struct dma_pub
*dmah
);
113 typedef uint(*di_txpending_t
) (struct dma_pub
*dmah
);
114 typedef uint(*di_txcommitted_t
) (struct dma_pub
*dmah
);
117 typedef struct di_fcn_s
{
120 di_txreset_t txreset
;
121 di_txenabled_t txenabled
;
122 di_txsuspend_t txsuspend
;
123 di_txresume_t txresume
;
124 di_txsuspended_t txsuspended
;
125 di_txsuspendedidle_t txsuspendedidle
;
127 di_txunframed_t txunframed
;
129 di_txstopped_t txstopped
;
130 di_txreclaim_t txreclaim
;
131 di_getnexttxp_t getnexttxp
;
132 di_peeknexttxp_t peeknexttxp
;
133 di_txblock_t txblock
;
134 di_txunblock_t txunblock
;
135 di_txactive_t txactive
;
136 di_txrotate_t txrotate
;
139 di_rxreset_t rxreset
;
141 di_rxstopped_t rxstopped
;
142 di_rxenable_t rxenable
;
143 di_rxenabled_t rxenabled
;
146 di_rxreclaim_t rxreclaim
;
147 di_getnextrxp_t getnextrxp
;
148 di_peeknextrxp_t peeknextrxp
;
149 di_rxparam_get_t rxparam_get
;
151 di_fifoloopbackenable_t fifoloopbackenable
;
152 di_getvar_t d_getvar
;
153 di_counterreset_t counterreset
;
154 di_ctrlflags_t ctrlflags
;
158 di_rxactive_t rxactive
;
159 di_txpending_t txpending
;
160 di_txcommitted_t txcommitted
;
165 * Exported data structure (read-only)
167 /* export structure */
169 const di_fcn_t
*di_fn
; /* DMA function pointers */
170 uint txavail
; /* # free tx descriptors */
171 uint dmactrlflags
; /* dma control flags */
173 /* rx error counters */
174 uint rxgiants
; /* rx giant frames */
175 uint rxnobuf
; /* rx out of dma descriptors */
176 /* tx error counters */
177 uint txnobuf
; /* tx out of dma descriptors */
180 extern struct dma_pub
*dma_attach(char *name
, struct si_pub
*sih
,
181 void *dmaregstx
, void *dmaregsrx
, uint ntxd
,
182 uint nrxd
, uint rxbufsize
, int rxextheadroom
,
183 uint nrxpost
, uint rxoffset
, uint
*msg_level
);
185 extern const di_fcn_t dma64proc
;
187 #define dma_detach(di) (dma64proc.detach(di))
188 #define dma_txreset(di) (dma64proc.txreset(di))
189 #define dma_rxreset(di) (dma64proc.rxreset(di))
190 #define dma_rxidle(di) (dma64proc.rxidle(di))
191 #define dma_txinit(di) (dma64proc.txinit(di))
192 #define dma_txenabled(di) (dma64proc.txenabled(di))
193 #define dma_rxinit(di) (dma64proc.rxinit(di))
194 #define dma_txsuspend(di) (dma64proc.txsuspend(di))
195 #define dma_txresume(di) (dma64proc.txresume(di))
196 #define dma_txsuspended(di) (dma64proc.txsuspended(di))
197 #define dma_txsuspendedidle(di) (dma64proc.txsuspendedidle(di))
198 #define dma_txfast(di, p, commit) (dma64proc.txfast(di, p, commit))
199 #define dma_txunframed(di, p, l, commit)(dma64proc.txunframed(di, p, l, commit))
200 #define dma_getpos(di, dir) (dma64proc.getpos(di, dir))
201 #define dma_fifoloopbackenable(di) (dma64proc.fifoloopbackenable(di))
202 #define dma_txstopped(di) (dma64proc.txstopped(di))
203 #define dma_rxstopped(di) (dma64proc.rxstopped(di))
204 #define dma_rxenable(di) (dma64proc.rxenable(di))
205 #define dma_rxenabled(di) (dma64proc.rxenabled(di))
206 #define dma_rx(di) (dma64proc.rx(di))
207 #define dma_rxfill(di) (dma64proc.rxfill(di))
208 #define dma_txreclaim(di, range) (dma64proc.txreclaim(di, range))
209 #define dma_rxreclaim(di) (dma64proc.rxreclaim(di))
210 #define dma_getvar(di, name) (dma64proc.d_getvar(di, name))
211 #define dma_getnexttxp(di, range) (dma64proc.getnexttxp(di, range))
212 #define dma_getnextrxp(di, forceall) (dma64proc.getnextrxp(di, forceall))
213 #define dma_peeknexttxp(di) (dma64proc.peeknexttxp(di))
214 #define dma_peeknextrxp(di) (dma64proc.peeknextrxp(di))
215 #define dma_rxparam_get(di, off, bufs) (dma64proc.rxparam_get(di, off, bufs))
217 #define dma_txblock(di) (dma64proc.txblock(di))
218 #define dma_txunblock(di) (dma64proc.txunblock(di))
219 #define dma_txactive(di) (dma64proc.txactive(di))
220 #define dma_rxactive(di) (dma64proc.rxactive(di))
221 #define dma_txrotate(di) (dma64proc.txrotate(di))
222 #define dma_counterreset(di) (dma64proc.counterreset(di))
223 #define dma_ctrlflags(di, mask, flags) (dma64proc.ctrlflags((di), (mask), (flags)))
224 #define dma_txpending(di) (dma64proc.txpending(di))
225 #define dma_txcommitted(di) (dma64proc.txcommitted(di))
228 /* return addresswidth allowed
229 * This needs to be done after SB attach but before dma attach.
230 * SB attach provides ability to probe backplane and dma core capabilities
231 * This info is needed by DMA_ALLOC_CONSISTENT in dma attach
233 extern uint
dma_addrwidth(struct si_pub
*sih
, void *dmaregs
);
234 void dma_walk_packets(struct dma_pub
*dmah
, void (*callback_fnc
)
235 (void *pkt
, void *arg_a
), void *arg_a
);
238 * DMA(Bug) on some chips seems to declare that the packet is ready, but the
239 * packet length is not updated yet (by DMA) on the expected time.
240 * Workaround is to hold processor till DMA updates the length, and stay off
241 * the bus to allow DMA update the length in buffer
243 static inline void dma_spin_for_len(uint len
, struct sk_buff
*head
)
245 #if defined(__mips__)
247 while (!(len
= *(u16
*) KSEG1ADDR(head
->data
)))
250 *(u16
*) (head
->data
) = cpu_to_le16((u16
) len
);
252 #endif /* defined(__mips__) */
255 #endif /* _bcmdma_h_ */