Committer: Michael Beasley <mike@snafu.setup>
[mikesnafu-overlay.git] / drivers / isdn / hisax / netjet.c
blob02c6fbaeccf82927a6e62423eff8be1efb1e0fb0
1 /* $Id: netjet.c,v 1.29.2.4 2004/02/11 13:21:34 keil Exp $
3 * low level stuff for Traverse Technologie NETJet ISDN cards
5 * Author Karsten Keil
6 * Copyright by Karsten Keil <keil@isdn4linux.de>
7 *
8 * This software may be used and distributed according to the terms
9 * of the GNU General Public License, incorporated herein by reference.
11 * Thanks to Traverse Technologies Australia for documents and information
13 * 16-Apr-2002 - led code added - Guy Ellis (guy@traverse.com.au)
17 #include <linux/init.h>
18 #include "hisax.h"
19 #include "isac.h"
20 #include "hscx.h"
21 #include "isdnl1.h"
22 #include <linux/interrupt.h>
23 #include <linux/ppp_defs.h>
24 #include <asm/io.h>
25 #include "netjet.h"
27 /* Interface functions */
29 u_char
30 NETjet_ReadIC(struct IsdnCardState *cs, u_char offset)
32 u_char ret;
34 cs->hw.njet.auxd &= 0xfc;
35 cs->hw.njet.auxd |= (offset>>4) & 3;
36 byteout(cs->hw.njet.auxa, cs->hw.njet.auxd);
37 ret = bytein(cs->hw.njet.isac + ((offset & 0xf)<<2));
38 return(ret);
41 void
42 NETjet_WriteIC(struct IsdnCardState *cs, u_char offset, u_char value)
44 cs->hw.njet.auxd &= 0xfc;
45 cs->hw.njet.auxd |= (offset>>4) & 3;
46 byteout(cs->hw.njet.auxa, cs->hw.njet.auxd);
47 byteout(cs->hw.njet.isac + ((offset & 0xf)<<2), value);
50 void
51 NETjet_ReadICfifo(struct IsdnCardState *cs, u_char *data, int size)
53 cs->hw.njet.auxd &= 0xfc;
54 byteout(cs->hw.njet.auxa, cs->hw.njet.auxd);
55 insb(cs->hw.njet.isac, data, size);
58 void
59 NETjet_WriteICfifo(struct IsdnCardState *cs, u_char *data, int size)
61 cs->hw.njet.auxd &= 0xfc;
62 byteout(cs->hw.njet.auxa, cs->hw.njet.auxd);
63 outsb(cs->hw.njet.isac, data, size);
66 static void fill_mem(struct BCState *bcs, u_int *pos, u_int cnt, int chan, u_char fill)
68 u_int mask=0x000000ff, val = 0, *p=pos;
69 u_int i;
71 val |= fill;
72 if (chan) {
73 val <<= 8;
74 mask <<= 8;
76 mask ^= 0xffffffff;
77 for (i=0; i<cnt; i++) {
78 *p &= mask;
79 *p++ |= val;
80 if (p > bcs->hw.tiger.s_end)
81 p = bcs->hw.tiger.send;
85 static void
86 mode_tiger(struct BCState *bcs, int mode, int bc)
88 struct IsdnCardState *cs = bcs->cs;
89 u_char led;
91 if (cs->debug & L1_DEB_HSCX)
92 debugl1(cs, "Tiger mode %d bchan %d/%d",
93 mode, bc, bcs->channel);
94 bcs->mode = mode;
95 bcs->channel = bc;
96 switch (mode) {
97 case (L1_MODE_NULL):
98 fill_mem(bcs, bcs->hw.tiger.send,
99 NETJET_DMA_TXSIZE, bc, 0xff);
100 if (cs->debug & L1_DEB_HSCX)
101 debugl1(cs, "Tiger stat rec %d/%d send %d",
102 bcs->hw.tiger.r_tot, bcs->hw.tiger.r_err,
103 bcs->hw.tiger.s_tot);
104 if ((cs->bcs[0].mode == L1_MODE_NULL) &&
105 (cs->bcs[1].mode == L1_MODE_NULL)) {
106 cs->hw.njet.dmactrl = 0;
107 byteout(cs->hw.njet.base + NETJET_DMACTRL,
108 cs->hw.njet.dmactrl);
109 byteout(cs->hw.njet.base + NETJET_IRQMASK0, 0);
111 if (cs->typ == ISDN_CTYPE_NETJET_S)
113 // led off
114 led = bc & 0x01;
115 led = 0x01 << (6 + led); // convert to mask
116 led = ~led;
117 cs->hw.njet.auxd &= led;
118 byteout(cs->hw.njet.auxa, cs->hw.njet.auxd);
120 break;
121 case (L1_MODE_TRANS):
122 break;
123 case (L1_MODE_HDLC_56K):
124 case (L1_MODE_HDLC):
125 fill_mem(bcs, bcs->hw.tiger.send,
126 NETJET_DMA_TXSIZE, bc, 0xff);
127 bcs->hw.tiger.r_state = HDLC_ZERO_SEARCH;
128 bcs->hw.tiger.r_tot = 0;
129 bcs->hw.tiger.r_bitcnt = 0;
130 bcs->hw.tiger.r_one = 0;
131 bcs->hw.tiger.r_err = 0;
132 bcs->hw.tiger.s_tot = 0;
133 if (! cs->hw.njet.dmactrl) {
134 fill_mem(bcs, bcs->hw.tiger.send,
135 NETJET_DMA_TXSIZE, !bc, 0xff);
136 cs->hw.njet.dmactrl = 1;
137 byteout(cs->hw.njet.base + NETJET_DMACTRL,
138 cs->hw.njet.dmactrl);
139 byteout(cs->hw.njet.base + NETJET_IRQMASK0, 0x0f);
140 /* was 0x3f now 0x0f for TJ300 and TJ320 GE 13/07/00 */
142 bcs->hw.tiger.sendp = bcs->hw.tiger.send;
143 bcs->hw.tiger.free = NETJET_DMA_TXSIZE;
144 test_and_set_bit(BC_FLG_EMPTY, &bcs->Flag);
145 if (cs->typ == ISDN_CTYPE_NETJET_S)
147 // led on
148 led = bc & 0x01;
149 led = 0x01 << (6 + led); // convert to mask
150 cs->hw.njet.auxd |= led;
151 byteout(cs->hw.njet.auxa, cs->hw.njet.auxd);
153 break;
155 if (cs->debug & L1_DEB_HSCX)
156 debugl1(cs, "tiger: set %x %x %x %x/%x pulse=%d",
157 bytein(cs->hw.njet.base + NETJET_DMACTRL),
158 bytein(cs->hw.njet.base + NETJET_IRQMASK0),
159 bytein(cs->hw.njet.base + NETJET_IRQSTAT0),
160 inl(cs->hw.njet.base + NETJET_DMA_READ_ADR),
161 inl(cs->hw.njet.base + NETJET_DMA_WRITE_ADR),
162 bytein(cs->hw.njet.base + NETJET_PULSE_CNT));
165 static void printframe(struct IsdnCardState *cs, u_char *buf, int count, char *s) {
166 char tmp[128];
167 char *t = tmp;
168 int i=count,j;
169 u_char *p = buf;
171 t += sprintf(t, "tiger %s(%4d)", s, count);
172 while (i>0) {
173 if (i>16)
174 j=16;
175 else
176 j=i;
177 QuickHex(t, p, j);
178 debugl1(cs, tmp);
179 p += j;
180 i -= j;
181 t = tmp;
182 t += sprintf(t, "tiger %s ", s);
186 // macro for 64k
188 #define MAKE_RAW_BYTE for (j=0; j<8; j++) { \
189 bitcnt++;\
190 s_val >>= 1;\
191 if (val & 1) {\
192 s_one++;\
193 s_val |= 0x80;\
194 } else {\
195 s_one = 0;\
196 s_val &= 0x7f;\
198 if (bitcnt==8) {\
199 bcs->hw.tiger.sendbuf[s_cnt++] = s_val;\
200 bitcnt = 0;\
202 if (s_one == 5) {\
203 s_val >>= 1;\
204 s_val &= 0x7f;\
205 bitcnt++;\
206 s_one = 0;\
208 if (bitcnt==8) {\
209 bcs->hw.tiger.sendbuf[s_cnt++] = s_val;\
210 bitcnt = 0;\
212 val >>= 1;\
215 static int make_raw_data(struct BCState *bcs) {
216 // this make_raw is for 64k
217 register u_int i,s_cnt=0;
218 register u_char j;
219 register u_char val;
220 register u_char s_one = 0;
221 register u_char s_val = 0;
222 register u_char bitcnt = 0;
223 u_int fcs;
225 if (!bcs->tx_skb) {
226 debugl1(bcs->cs, "tiger make_raw: NULL skb");
227 return(1);
229 bcs->hw.tiger.sendbuf[s_cnt++] = HDLC_FLAG_VALUE;
230 fcs = PPP_INITFCS;
231 for (i=0; i<bcs->tx_skb->len; i++) {
232 val = bcs->tx_skb->data[i];
233 fcs = PPP_FCS (fcs, val);
234 MAKE_RAW_BYTE;
236 fcs ^= 0xffff;
237 val = fcs & 0xff;
238 MAKE_RAW_BYTE;
239 val = (fcs>>8) & 0xff;
240 MAKE_RAW_BYTE;
241 val = HDLC_FLAG_VALUE;
242 for (j=0; j<8; j++) {
243 bitcnt++;
244 s_val >>= 1;
245 if (val & 1)
246 s_val |= 0x80;
247 else
248 s_val &= 0x7f;
249 if (bitcnt==8) {
250 bcs->hw.tiger.sendbuf[s_cnt++] = s_val;
251 bitcnt = 0;
253 val >>= 1;
255 if (bcs->cs->debug & L1_DEB_HSCX)
256 debugl1(bcs->cs,"tiger make_raw: in %ld out %d.%d",
257 bcs->tx_skb->len, s_cnt, bitcnt);
258 if (bitcnt) {
259 while (8>bitcnt++) {
260 s_val >>= 1;
261 s_val |= 0x80;
263 bcs->hw.tiger.sendbuf[s_cnt++] = s_val;
264 bcs->hw.tiger.sendbuf[s_cnt++] = 0xff; // NJ<->NJ thoughput bug fix
266 bcs->hw.tiger.sendcnt = s_cnt;
267 bcs->tx_cnt -= bcs->tx_skb->len;
268 bcs->hw.tiger.sp = bcs->hw.tiger.sendbuf;
269 return(0);
272 // macro for 56k
274 #define MAKE_RAW_BYTE_56K for (j=0; j<8; j++) { \
275 bitcnt++;\
276 s_val >>= 1;\
277 if (val & 1) {\
278 s_one++;\
279 s_val |= 0x80;\
280 } else {\
281 s_one = 0;\
282 s_val &= 0x7f;\
284 if (bitcnt==7) {\
285 s_val >>= 1;\
286 s_val |= 0x80;\
287 bcs->hw.tiger.sendbuf[s_cnt++] = s_val;\
288 bitcnt = 0;\
290 if (s_one == 5) {\
291 s_val >>= 1;\
292 s_val &= 0x7f;\
293 bitcnt++;\
294 s_one = 0;\
296 if (bitcnt==7) {\
297 s_val >>= 1;\
298 s_val |= 0x80;\
299 bcs->hw.tiger.sendbuf[s_cnt++] = s_val;\
300 bitcnt = 0;\
302 val >>= 1;\
305 static int make_raw_data_56k(struct BCState *bcs) {
306 // this make_raw is for 56k
307 register u_int i,s_cnt=0;
308 register u_char j;
309 register u_char val;
310 register u_char s_one = 0;
311 register u_char s_val = 0;
312 register u_char bitcnt = 0;
313 u_int fcs;
315 if (!bcs->tx_skb) {
316 debugl1(bcs->cs, "tiger make_raw_56k: NULL skb");
317 return(1);
319 val = HDLC_FLAG_VALUE;
320 for (j=0; j<8; j++) {
321 bitcnt++;
322 s_val >>= 1;
323 if (val & 1)
324 s_val |= 0x80;
325 else
326 s_val &= 0x7f;
327 if (bitcnt==7) {
328 s_val >>= 1;
329 s_val |= 0x80;
330 bcs->hw.tiger.sendbuf[s_cnt++] = s_val;
331 bitcnt = 0;
333 val >>= 1;
335 fcs = PPP_INITFCS;
336 for (i=0; i<bcs->tx_skb->len; i++) {
337 val = bcs->tx_skb->data[i];
338 fcs = PPP_FCS (fcs, val);
339 MAKE_RAW_BYTE_56K;
341 fcs ^= 0xffff;
342 val = fcs & 0xff;
343 MAKE_RAW_BYTE_56K;
344 val = (fcs>>8) & 0xff;
345 MAKE_RAW_BYTE_56K;
346 val = HDLC_FLAG_VALUE;
347 for (j=0; j<8; j++) {
348 bitcnt++;
349 s_val >>= 1;
350 if (val & 1)
351 s_val |= 0x80;
352 else
353 s_val &= 0x7f;
354 if (bitcnt==7) {
355 s_val >>= 1;
356 s_val |= 0x80;
357 bcs->hw.tiger.sendbuf[s_cnt++] = s_val;
358 bitcnt = 0;
360 val >>= 1;
362 if (bcs->cs->debug & L1_DEB_HSCX)
363 debugl1(bcs->cs,"tiger make_raw_56k: in %ld out %d.%d",
364 bcs->tx_skb->len, s_cnt, bitcnt);
365 if (bitcnt) {
366 while (8>bitcnt++) {
367 s_val >>= 1;
368 s_val |= 0x80;
370 bcs->hw.tiger.sendbuf[s_cnt++] = s_val;
371 bcs->hw.tiger.sendbuf[s_cnt++] = 0xff; // NJ<->NJ thoughput bug fix
373 bcs->hw.tiger.sendcnt = s_cnt;
374 bcs->tx_cnt -= bcs->tx_skb->len;
375 bcs->hw.tiger.sp = bcs->hw.tiger.sendbuf;
376 return(0);
379 static void got_frame(struct BCState *bcs, int count) {
380 struct sk_buff *skb;
382 if (!(skb = dev_alloc_skb(count)))
383 printk(KERN_WARNING "TIGER: receive out of memory\n");
384 else {
385 memcpy(skb_put(skb, count), bcs->hw.tiger.rcvbuf, count);
386 skb_queue_tail(&bcs->rqueue, skb);
388 test_and_set_bit(B_RCVBUFREADY, &bcs->event);
389 schedule_work(&bcs->tqueue);
391 if (bcs->cs->debug & L1_DEB_RECEIVE_FRAME)
392 printframe(bcs->cs, bcs->hw.tiger.rcvbuf, count, "rec");
397 static void read_raw(struct BCState *bcs, u_int *buf, int cnt){
398 int i;
399 register u_char j;
400 register u_char val;
401 u_int *pend = bcs->hw.tiger.rec +NETJET_DMA_RXSIZE -1;
402 register u_char state = bcs->hw.tiger.r_state;
403 register u_char r_one = bcs->hw.tiger.r_one;
404 register u_char r_val = bcs->hw.tiger.r_val;
405 register u_int bitcnt = bcs->hw.tiger.r_bitcnt;
406 u_int *p = buf;
407 int bits;
408 u_char mask;
410 if (bcs->mode == L1_MODE_HDLC) { // it's 64k
411 mask = 0xff;
412 bits = 8;
414 else { // it's 56K
415 mask = 0x7f;
416 bits = 7;
418 for (i=0;i<cnt;i++) {
419 val = bcs->channel ? ((*p>>8) & 0xff) : (*p & 0xff);
420 p++;
421 if (p > pend)
422 p = bcs->hw.tiger.rec;
423 if ((val & mask) == mask) {
424 state = HDLC_ZERO_SEARCH;
425 bcs->hw.tiger.r_tot++;
426 bitcnt = 0;
427 r_one = 0;
428 continue;
430 for (j=0;j<bits;j++) {
431 if (state == HDLC_ZERO_SEARCH) {
432 if (val & 1) {
433 r_one++;
434 } else {
435 r_one=0;
436 state= HDLC_FLAG_SEARCH;
437 if (bcs->cs->debug & L1_DEB_HSCX)
438 debugl1(bcs->cs,"tiger read_raw: zBit(%d,%d,%d) %x",
439 bcs->hw.tiger.r_tot,i,j,val);
441 } else if (state == HDLC_FLAG_SEARCH) {
442 if (val & 1) {
443 r_one++;
444 if (r_one>6) {
445 state=HDLC_ZERO_SEARCH;
447 } else {
448 if (r_one==6) {
449 bitcnt=0;
450 r_val=0;
451 state=HDLC_FLAG_FOUND;
452 if (bcs->cs->debug & L1_DEB_HSCX)
453 debugl1(bcs->cs,"tiger read_raw: flag(%d,%d,%d) %x",
454 bcs->hw.tiger.r_tot,i,j,val);
456 r_one=0;
458 } else if (state == HDLC_FLAG_FOUND) {
459 if (val & 1) {
460 r_one++;
461 if (r_one>6) {
462 state=HDLC_ZERO_SEARCH;
463 } else {
464 r_val >>= 1;
465 r_val |= 0x80;
466 bitcnt++;
468 } else {
469 if (r_one==6) {
470 bitcnt=0;
471 r_val=0;
472 r_one=0;
473 val >>= 1;
474 continue;
475 } else if (r_one!=5) {
476 r_val >>= 1;
477 r_val &= 0x7f;
478 bitcnt++;
480 r_one=0;
482 if ((state != HDLC_ZERO_SEARCH) &&
483 !(bitcnt & 7)) {
484 state=HDLC_FRAME_FOUND;
485 bcs->hw.tiger.r_fcs = PPP_INITFCS;
486 bcs->hw.tiger.rcvbuf[0] = r_val;
487 bcs->hw.tiger.r_fcs = PPP_FCS (bcs->hw.tiger.r_fcs, r_val);
488 if (bcs->cs->debug & L1_DEB_HSCX)
489 debugl1(bcs->cs,"tiger read_raw: byte1(%d,%d,%d) rval %x val %x i %x",
490 bcs->hw.tiger.r_tot,i,j,r_val,val,
491 bcs->cs->hw.njet.irqstat0);
493 } else if (state == HDLC_FRAME_FOUND) {
494 if (val & 1) {
495 r_one++;
496 if (r_one>6) {
497 state=HDLC_ZERO_SEARCH;
498 bitcnt=0;
499 } else {
500 r_val >>= 1;
501 r_val |= 0x80;
502 bitcnt++;
504 } else {
505 if (r_one==6) {
506 r_val=0;
507 r_one=0;
508 bitcnt++;
509 if (bitcnt & 7) {
510 debugl1(bcs->cs, "tiger: frame not byte aligned");
511 state=HDLC_FLAG_SEARCH;
512 bcs->hw.tiger.r_err++;
513 #ifdef ERROR_STATISTIC
514 bcs->err_inv++;
515 #endif
516 } else {
517 if (bcs->cs->debug & L1_DEB_HSCX)
518 debugl1(bcs->cs,"tiger frame end(%d,%d): fcs(%x) i %x",
519 i,j,bcs->hw.tiger.r_fcs, bcs->cs->hw.njet.irqstat0);
520 if (bcs->hw.tiger.r_fcs == PPP_GOODFCS) {
521 got_frame(bcs, (bitcnt>>3)-3);
522 } else {
523 if (bcs->cs->debug) {
524 debugl1(bcs->cs, "tiger FCS error");
525 printframe(bcs->cs, bcs->hw.tiger.rcvbuf,
526 (bitcnt>>3)-1, "rec");
527 bcs->hw.tiger.r_err++;
529 #ifdef ERROR_STATISTIC
530 bcs->err_crc++;
531 #endif
533 state=HDLC_FLAG_FOUND;
535 bitcnt=0;
536 } else if (r_one==5) {
537 val >>= 1;
538 r_one=0;
539 continue;
540 } else {
541 r_val >>= 1;
542 r_val &= 0x7f;
543 bitcnt++;
545 r_one=0;
547 if ((state == HDLC_FRAME_FOUND) &&
548 !(bitcnt & 7)) {
549 if ((bitcnt>>3)>=HSCX_BUFMAX) {
550 debugl1(bcs->cs, "tiger: frame too big");
551 r_val=0;
552 state=HDLC_FLAG_SEARCH;
553 bcs->hw.tiger.r_err++;
554 #ifdef ERROR_STATISTIC
555 bcs->err_inv++;
556 #endif
557 } else {
558 bcs->hw.tiger.rcvbuf[(bitcnt>>3)-1] = r_val;
559 bcs->hw.tiger.r_fcs =
560 PPP_FCS (bcs->hw.tiger.r_fcs, r_val);
564 val >>= 1;
566 bcs->hw.tiger.r_tot++;
568 bcs->hw.tiger.r_state = state;
569 bcs->hw.tiger.r_one = r_one;
570 bcs->hw.tiger.r_val = r_val;
571 bcs->hw.tiger.r_bitcnt = bitcnt;
574 void read_tiger(struct IsdnCardState *cs) {
575 u_int *p;
576 int cnt = NETJET_DMA_RXSIZE/2;
578 if ((cs->hw.njet.irqstat0 & cs->hw.njet.last_is0) & NETJET_IRQM0_READ) {
579 debugl1(cs,"tiger warn read double dma %x/%x",
580 cs->hw.njet.irqstat0, cs->hw.njet.last_is0);
581 #ifdef ERROR_STATISTIC
582 if (cs->bcs[0].mode)
583 cs->bcs[0].err_rdo++;
584 if (cs->bcs[1].mode)
585 cs->bcs[1].err_rdo++;
586 #endif
587 return;
588 } else {
589 cs->hw.njet.last_is0 &= ~NETJET_IRQM0_READ;
590 cs->hw.njet.last_is0 |= (cs->hw.njet.irqstat0 & NETJET_IRQM0_READ);
592 if (cs->hw.njet.irqstat0 & NETJET_IRQM0_READ_1)
593 p = cs->bcs[0].hw.tiger.rec + NETJET_DMA_RXSIZE - 1;
594 else
595 p = cs->bcs[0].hw.tiger.rec + cnt - 1;
596 if ((cs->bcs[0].mode == L1_MODE_HDLC) || (cs->bcs[0].mode == L1_MODE_HDLC_56K))
597 read_raw(cs->bcs, p, cnt);
599 if ((cs->bcs[1].mode == L1_MODE_HDLC) || (cs->bcs[1].mode == L1_MODE_HDLC_56K))
600 read_raw(cs->bcs + 1, p, cnt);
601 cs->hw.njet.irqstat0 &= ~NETJET_IRQM0_READ;
604 static void write_raw(struct BCState *bcs, u_int *buf, int cnt);
606 void netjet_fill_dma(struct BCState *bcs)
608 register u_int *p, *sp;
609 register int cnt;
611 if (!bcs->tx_skb)
612 return;
613 if (bcs->cs->debug & L1_DEB_HSCX)
614 debugl1(bcs->cs,"tiger fill_dma1: c%d %4x", bcs->channel,
615 bcs->Flag);
616 if (test_and_set_bit(BC_FLG_BUSY, &bcs->Flag))
617 return;
618 if (bcs->mode == L1_MODE_HDLC) { // it's 64k
619 if (make_raw_data(bcs))
620 return;
622 else { // it's 56k
623 if (make_raw_data_56k(bcs))
624 return;
626 if (bcs->cs->debug & L1_DEB_HSCX)
627 debugl1(bcs->cs,"tiger fill_dma2: c%d %4x", bcs->channel,
628 bcs->Flag);
629 if (test_and_clear_bit(BC_FLG_NOFRAME, &bcs->Flag)) {
630 write_raw(bcs, bcs->hw.tiger.sendp, bcs->hw.tiger.free);
631 } else if (test_and_clear_bit(BC_FLG_HALF, &bcs->Flag)) {
632 p = bus_to_virt(inl(bcs->cs->hw.njet.base + NETJET_DMA_READ_ADR));
633 sp = bcs->hw.tiger.sendp;
634 if (p == bcs->hw.tiger.s_end)
635 p = bcs->hw.tiger.send -1;
636 if (sp == bcs->hw.tiger.s_end)
637 sp = bcs->hw.tiger.send -1;
638 cnt = p - sp;
639 if (cnt <0) {
640 write_raw(bcs, bcs->hw.tiger.sendp, bcs->hw.tiger.free);
641 } else {
642 p++;
643 cnt++;
644 if (p > bcs->hw.tiger.s_end)
645 p = bcs->hw.tiger.send;
646 p++;
647 cnt++;
648 if (p > bcs->hw.tiger.s_end)
649 p = bcs->hw.tiger.send;
650 write_raw(bcs, p, bcs->hw.tiger.free - cnt);
652 } else if (test_and_clear_bit(BC_FLG_EMPTY, &bcs->Flag)) {
653 p = bus_to_virt(inl(bcs->cs->hw.njet.base + NETJET_DMA_READ_ADR));
654 cnt = bcs->hw.tiger.s_end - p;
655 if (cnt < 2) {
656 p = bcs->hw.tiger.send + 1;
657 cnt = NETJET_DMA_TXSIZE/2 - 2;
658 } else {
659 p++;
660 p++;
661 if (cnt <= (NETJET_DMA_TXSIZE/2))
662 cnt += NETJET_DMA_TXSIZE/2;
663 cnt--;
664 cnt--;
666 write_raw(bcs, p, cnt);
668 if (bcs->cs->debug & L1_DEB_HSCX)
669 debugl1(bcs->cs,"tiger fill_dma3: c%d %4x", bcs->channel,
670 bcs->Flag);
673 static void write_raw(struct BCState *bcs, u_int *buf, int cnt) {
674 u_int mask, val, *p=buf;
675 u_int i, s_cnt;
677 if (cnt <= 0)
678 return;
679 if (test_bit(BC_FLG_BUSY, &bcs->Flag)) {
680 if (bcs->hw.tiger.sendcnt> cnt) {
681 s_cnt = cnt;
682 bcs->hw.tiger.sendcnt -= cnt;
683 } else {
684 s_cnt = bcs->hw.tiger.sendcnt;
685 bcs->hw.tiger.sendcnt = 0;
687 if (bcs->channel)
688 mask = 0xffff00ff;
689 else
690 mask = 0xffffff00;
691 for (i=0; i<s_cnt; i++) {
692 val = bcs->channel ? ((bcs->hw.tiger.sp[i] <<8) & 0xff00) :
693 (bcs->hw.tiger.sp[i]);
694 *p &= mask;
695 *p++ |= val;
696 if (p>bcs->hw.tiger.s_end)
697 p = bcs->hw.tiger.send;
699 bcs->hw.tiger.s_tot += s_cnt;
700 if (bcs->cs->debug & L1_DEB_HSCX)
701 debugl1(bcs->cs,"tiger write_raw: c%d %p-%p %d/%d %d %x", bcs->channel,
702 buf, p, s_cnt, cnt,
703 bcs->hw.tiger.sendcnt, bcs->cs->hw.njet.irqstat0);
704 if (bcs->cs->debug & L1_DEB_HSCX_FIFO)
705 printframe(bcs->cs, bcs->hw.tiger.sp, s_cnt, "snd");
706 bcs->hw.tiger.sp += s_cnt;
707 bcs->hw.tiger.sendp = p;
708 if (!bcs->hw.tiger.sendcnt) {
709 if (!bcs->tx_skb) {
710 debugl1(bcs->cs,"tiger write_raw: NULL skb s_cnt %d", s_cnt);
711 } else {
712 if (test_bit(FLG_LLI_L1WAKEUP,&bcs->st->lli.flag) &&
713 (PACKET_NOACK != bcs->tx_skb->pkt_type)) {
714 u_long flags;
715 spin_lock_irqsave(&bcs->aclock, flags);
716 bcs->ackcnt += bcs->tx_skb->len;
717 spin_unlock_irqrestore(&bcs->aclock, flags);
718 schedule_event(bcs, B_ACKPENDING);
720 dev_kfree_skb_any(bcs->tx_skb);
721 bcs->tx_skb = NULL;
723 test_and_clear_bit(BC_FLG_BUSY, &bcs->Flag);
724 bcs->hw.tiger.free = cnt - s_cnt;
725 if (bcs->hw.tiger.free > (NETJET_DMA_TXSIZE/2))
726 test_and_set_bit(BC_FLG_HALF, &bcs->Flag);
727 else {
728 test_and_clear_bit(BC_FLG_HALF, &bcs->Flag);
729 test_and_set_bit(BC_FLG_NOFRAME, &bcs->Flag);
731 if ((bcs->tx_skb = skb_dequeue(&bcs->squeue))) {
732 netjet_fill_dma(bcs);
733 } else {
734 mask ^= 0xffffffff;
735 if (s_cnt < cnt) {
736 for (i=s_cnt; i<cnt;i++) {
737 *p++ |= mask;
738 if (p>bcs->hw.tiger.s_end)
739 p = bcs->hw.tiger.send;
741 if (bcs->cs->debug & L1_DEB_HSCX)
742 debugl1(bcs->cs, "tiger write_raw: fill rest %d",
743 cnt - s_cnt);
745 test_and_set_bit(B_XMTBUFREADY, &bcs->event);
746 schedule_work(&bcs->tqueue);
749 } else if (test_and_clear_bit(BC_FLG_NOFRAME, &bcs->Flag)) {
750 test_and_set_bit(BC_FLG_HALF, &bcs->Flag);
751 fill_mem(bcs, buf, cnt, bcs->channel, 0xff);
752 bcs->hw.tiger.free += cnt;
753 if (bcs->cs->debug & L1_DEB_HSCX)
754 debugl1(bcs->cs,"tiger write_raw: fill half");
755 } else if (test_and_clear_bit(BC_FLG_HALF, &bcs->Flag)) {
756 test_and_set_bit(BC_FLG_EMPTY, &bcs->Flag);
757 fill_mem(bcs, buf, cnt, bcs->channel, 0xff);
758 if (bcs->cs->debug & L1_DEB_HSCX)
759 debugl1(bcs->cs,"tiger write_raw: fill full");
763 void write_tiger(struct IsdnCardState *cs) {
764 u_int *p, cnt = NETJET_DMA_TXSIZE/2;
766 if ((cs->hw.njet.irqstat0 & cs->hw.njet.last_is0) & NETJET_IRQM0_WRITE) {
767 debugl1(cs,"tiger warn write double dma %x/%x",
768 cs->hw.njet.irqstat0, cs->hw.njet.last_is0);
769 #ifdef ERROR_STATISTIC
770 if (cs->bcs[0].mode)
771 cs->bcs[0].err_tx++;
772 if (cs->bcs[1].mode)
773 cs->bcs[1].err_tx++;
774 #endif
775 return;
776 } else {
777 cs->hw.njet.last_is0 &= ~NETJET_IRQM0_WRITE;
778 cs->hw.njet.last_is0 |= (cs->hw.njet.irqstat0 & NETJET_IRQM0_WRITE);
780 if (cs->hw.njet.irqstat0 & NETJET_IRQM0_WRITE_1)
781 p = cs->bcs[0].hw.tiger.send + NETJET_DMA_TXSIZE - 1;
782 else
783 p = cs->bcs[0].hw.tiger.send + cnt - 1;
784 if ((cs->bcs[0].mode == L1_MODE_HDLC) || (cs->bcs[0].mode == L1_MODE_HDLC_56K))
785 write_raw(cs->bcs, p, cnt);
786 if ((cs->bcs[1].mode == L1_MODE_HDLC) || (cs->bcs[1].mode == L1_MODE_HDLC_56K))
787 write_raw(cs->bcs + 1, p, cnt);
788 cs->hw.njet.irqstat0 &= ~NETJET_IRQM0_WRITE;
791 static void
792 tiger_l2l1(struct PStack *st, int pr, void *arg)
794 struct BCState *bcs = st->l1.bcs;
795 struct sk_buff *skb = arg;
796 u_long flags;
798 switch (pr) {
799 case (PH_DATA | REQUEST):
800 spin_lock_irqsave(&bcs->cs->lock, flags);
801 if (bcs->tx_skb) {
802 skb_queue_tail(&bcs->squeue, skb);
803 } else {
804 bcs->tx_skb = skb;
805 bcs->cs->BC_Send_Data(bcs);
807 spin_unlock_irqrestore(&bcs->cs->lock, flags);
808 break;
809 case (PH_PULL | INDICATION):
810 spin_lock_irqsave(&bcs->cs->lock, flags);
811 if (bcs->tx_skb) {
812 printk(KERN_WARNING "tiger_l2l1: this shouldn't happen\n");
813 } else {
814 bcs->tx_skb = skb;
815 bcs->cs->BC_Send_Data(bcs);
817 spin_unlock_irqrestore(&bcs->cs->lock, flags);
818 break;
819 case (PH_PULL | REQUEST):
820 if (!bcs->tx_skb) {
821 test_and_clear_bit(FLG_L1_PULL_REQ, &st->l1.Flags);
822 st->l1.l1l2(st, PH_PULL | CONFIRM, NULL);
823 } else
824 test_and_set_bit(FLG_L1_PULL_REQ, &st->l1.Flags);
825 break;
826 case (PH_ACTIVATE | REQUEST):
827 spin_lock_irqsave(&bcs->cs->lock, flags);
828 test_and_set_bit(BC_FLG_ACTIV, &bcs->Flag);
829 mode_tiger(bcs, st->l1.mode, st->l1.bc);
830 /* 2001/10/04 Christoph Ersfeld, Formula-n Europe AG */
831 spin_unlock_irqrestore(&bcs->cs->lock, flags);
832 bcs->cs->cardmsg(bcs->cs, MDL_BC_ASSIGN, (void *)(&st->l1.bc));
833 l1_msg_b(st, pr, arg);
834 break;
835 case (PH_DEACTIVATE | REQUEST):
836 /* 2001/10/04 Christoph Ersfeld, Formula-n Europe AG */
837 bcs->cs->cardmsg(bcs->cs, MDL_BC_RELEASE, (void *)(&st->l1.bc));
838 l1_msg_b(st, pr, arg);
839 break;
840 case (PH_DEACTIVATE | CONFIRM):
841 spin_lock_irqsave(&bcs->cs->lock, flags);
842 test_and_clear_bit(BC_FLG_ACTIV, &bcs->Flag);
843 test_and_clear_bit(BC_FLG_BUSY, &bcs->Flag);
844 mode_tiger(bcs, 0, st->l1.bc);
845 spin_unlock_irqrestore(&bcs->cs->lock, flags);
846 st->l1.l1l2(st, PH_DEACTIVATE | CONFIRM, NULL);
847 break;
852 static void
853 close_tigerstate(struct BCState *bcs)
855 mode_tiger(bcs, 0, bcs->channel);
856 if (test_and_clear_bit(BC_FLG_INIT, &bcs->Flag)) {
857 kfree(bcs->hw.tiger.rcvbuf);
858 bcs->hw.tiger.rcvbuf = NULL;
859 kfree(bcs->hw.tiger.sendbuf);
860 bcs->hw.tiger.sendbuf = NULL;
861 skb_queue_purge(&bcs->rqueue);
862 skb_queue_purge(&bcs->squeue);
863 if (bcs->tx_skb) {
864 dev_kfree_skb_any(bcs->tx_skb);
865 bcs->tx_skb = NULL;
866 test_and_clear_bit(BC_FLG_BUSY, &bcs->Flag);
871 static int
872 open_tigerstate(struct IsdnCardState *cs, struct BCState *bcs)
874 if (!test_and_set_bit(BC_FLG_INIT, &bcs->Flag)) {
875 if (!(bcs->hw.tiger.rcvbuf = kmalloc(HSCX_BUFMAX, GFP_ATOMIC))) {
876 printk(KERN_WARNING
877 "HiSax: No memory for tiger.rcvbuf\n");
878 return (1);
880 if (!(bcs->hw.tiger.sendbuf = kmalloc(RAW_BUFMAX, GFP_ATOMIC))) {
881 printk(KERN_WARNING
882 "HiSax: No memory for tiger.sendbuf\n");
883 return (1);
885 skb_queue_head_init(&bcs->rqueue);
886 skb_queue_head_init(&bcs->squeue);
888 bcs->tx_skb = NULL;
889 bcs->hw.tiger.sendcnt = 0;
890 test_and_clear_bit(BC_FLG_BUSY, &bcs->Flag);
891 bcs->event = 0;
892 bcs->tx_cnt = 0;
893 return (0);
896 static int
897 setstack_tiger(struct PStack *st, struct BCState *bcs)
899 bcs->channel = st->l1.bc;
900 if (open_tigerstate(st->l1.hardware, bcs))
901 return (-1);
902 st->l1.bcs = bcs;
903 st->l2.l2l1 = tiger_l2l1;
904 setstack_manager(st);
905 bcs->st = st;
906 setstack_l1_B(st);
907 return (0);
911 void
912 inittiger(struct IsdnCardState *cs)
914 if (!(cs->bcs[0].hw.tiger.send = kmalloc(NETJET_DMA_TXSIZE * sizeof(unsigned int),
915 GFP_KERNEL | GFP_DMA))) {
916 printk(KERN_WARNING
917 "HiSax: No memory for tiger.send\n");
918 return;
920 cs->bcs[0].hw.tiger.s_irq = cs->bcs[0].hw.tiger.send + NETJET_DMA_TXSIZE/2 - 1;
921 cs->bcs[0].hw.tiger.s_end = cs->bcs[0].hw.tiger.send + NETJET_DMA_TXSIZE - 1;
922 cs->bcs[1].hw.tiger.send = cs->bcs[0].hw.tiger.send;
923 cs->bcs[1].hw.tiger.s_irq = cs->bcs[0].hw.tiger.s_irq;
924 cs->bcs[1].hw.tiger.s_end = cs->bcs[0].hw.tiger.s_end;
926 memset(cs->bcs[0].hw.tiger.send, 0xff, NETJET_DMA_TXSIZE * sizeof(unsigned int));
927 debugl1(cs, "tiger: send buf %p - %p", cs->bcs[0].hw.tiger.send,
928 cs->bcs[0].hw.tiger.send + NETJET_DMA_TXSIZE - 1);
929 outl(virt_to_bus(cs->bcs[0].hw.tiger.send),
930 cs->hw.njet.base + NETJET_DMA_READ_START);
931 outl(virt_to_bus(cs->bcs[0].hw.tiger.s_irq),
932 cs->hw.njet.base + NETJET_DMA_READ_IRQ);
933 outl(virt_to_bus(cs->bcs[0].hw.tiger.s_end),
934 cs->hw.njet.base + NETJET_DMA_READ_END);
935 if (!(cs->bcs[0].hw.tiger.rec = kmalloc(NETJET_DMA_RXSIZE * sizeof(unsigned int),
936 GFP_KERNEL | GFP_DMA))) {
937 printk(KERN_WARNING
938 "HiSax: No memory for tiger.rec\n");
939 return;
941 debugl1(cs, "tiger: rec buf %p - %p", cs->bcs[0].hw.tiger.rec,
942 cs->bcs[0].hw.tiger.rec + NETJET_DMA_RXSIZE - 1);
943 cs->bcs[1].hw.tiger.rec = cs->bcs[0].hw.tiger.rec;
944 memset(cs->bcs[0].hw.tiger.rec, 0xff, NETJET_DMA_RXSIZE * sizeof(unsigned int));
945 outl(virt_to_bus(cs->bcs[0].hw.tiger.rec),
946 cs->hw.njet.base + NETJET_DMA_WRITE_START);
947 outl(virt_to_bus(cs->bcs[0].hw.tiger.rec + NETJET_DMA_RXSIZE/2 - 1),
948 cs->hw.njet.base + NETJET_DMA_WRITE_IRQ);
949 outl(virt_to_bus(cs->bcs[0].hw.tiger.rec + NETJET_DMA_RXSIZE - 1),
950 cs->hw.njet.base + NETJET_DMA_WRITE_END);
951 debugl1(cs, "tiger: dmacfg %x/%x pulse=%d",
952 inl(cs->hw.njet.base + NETJET_DMA_WRITE_ADR),
953 inl(cs->hw.njet.base + NETJET_DMA_READ_ADR),
954 bytein(cs->hw.njet.base + NETJET_PULSE_CNT));
955 cs->hw.njet.last_is0 = 0;
956 cs->bcs[0].BC_SetStack = setstack_tiger;
957 cs->bcs[1].BC_SetStack = setstack_tiger;
958 cs->bcs[0].BC_Close = close_tigerstate;
959 cs->bcs[1].BC_Close = close_tigerstate;
962 static void
963 releasetiger(struct IsdnCardState *cs)
965 kfree(cs->bcs[0].hw.tiger.send);
966 cs->bcs[0].hw.tiger.send = NULL;
967 cs->bcs[1].hw.tiger.send = NULL;
968 kfree(cs->bcs[0].hw.tiger.rec);
969 cs->bcs[0].hw.tiger.rec = NULL;
970 cs->bcs[1].hw.tiger.rec = NULL;
973 void
974 release_io_netjet(struct IsdnCardState *cs)
976 byteout(cs->hw.njet.base + NETJET_IRQMASK0, 0);
977 byteout(cs->hw.njet.base + NETJET_IRQMASK1, 0);
978 releasetiger(cs);
979 release_region(cs->hw.njet.base, 256);