2 * Copyright (c) 2010 Atheros Communications Inc.
4 * Permission to use, copy, modify, and/or distribute this software for any
5 * purpose with or without fee is hereby granted, provided that the above
6 * copyright notice and this permission notice appear in all copies.
8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
19 /* identify firmware images */
20 #define FIRMWARE_AR7010 "ar7010.fw"
21 #define FIRMWARE_AR7010_1_1 "ar7010_1_1.fw"
22 #define FIRMWARE_AR9271 "ar9271.fw"
24 MODULE_FIRMWARE(FIRMWARE_AR7010
);
25 MODULE_FIRMWARE(FIRMWARE_AR7010_1_1
);
26 MODULE_FIRMWARE(FIRMWARE_AR9271
);
28 static struct usb_device_id ath9k_hif_usb_ids
[] = {
29 { USB_DEVICE(0x0cf3, 0x9271) }, /* Atheros */
30 { USB_DEVICE(0x0cf3, 0x1006) }, /* Atheros */
31 { USB_DEVICE(0x0cf3, 0x7010) }, /* Atheros */
32 { USB_DEVICE(0x0cf3, 0x7015) }, /* Atheros */
33 { USB_DEVICE(0x0846, 0x9030) }, /* Netgear N150 */
34 { USB_DEVICE(0x0846, 0x9018) }, /* Netgear WNDA3200 */
35 { USB_DEVICE(0x07D1, 0x3A10) }, /* Dlink Wireless 150 */
36 { USB_DEVICE(0x13D3, 0x3327) }, /* Azurewave */
37 { USB_DEVICE(0x13D3, 0x3328) }, /* Azurewave */
38 { USB_DEVICE(0x13D3, 0x3346) }, /* IMC Networks */
39 { USB_DEVICE(0x13D3, 0x3348) }, /* Azurewave */
40 { USB_DEVICE(0x13D3, 0x3349) }, /* Azurewave */
41 { USB_DEVICE(0x13D3, 0x3350) }, /* Azurewave */
42 { USB_DEVICE(0x04CA, 0x4605) }, /* Liteon */
43 { USB_DEVICE(0x083A, 0xA704) }, /* SMC Networks */
44 { USB_DEVICE(0x040D, 0x3801) }, /* VIA */
45 { USB_DEVICE(0x1668, 0x1200) }, /* Verizon */
49 MODULE_DEVICE_TABLE(usb
, ath9k_hif_usb_ids
);
51 static int __hif_usb_tx(struct hif_device_usb
*hif_dev
);
53 static void hif_usb_regout_cb(struct urb
*urb
)
55 struct cmd_buf
*cmd
= (struct cmd_buf
*)urb
->context
;
57 switch (urb
->status
) {
70 ath9k_htc_txcompletion_cb(cmd
->hif_dev
->htc_handle
,
81 static int hif_usb_send_regout(struct hif_device_usb
*hif_dev
,
88 urb
= usb_alloc_urb(0, GFP_KERNEL
);
92 cmd
= kzalloc(sizeof(*cmd
), GFP_KERNEL
);
99 cmd
->hif_dev
= hif_dev
;
101 usb_fill_int_urb(urb
, hif_dev
->udev
,
102 usb_sndintpipe(hif_dev
->udev
, USB_REG_OUT_PIPE
),
104 hif_usb_regout_cb
, cmd
, 1);
106 usb_anchor_urb(urb
, &hif_dev
->regout_submitted
);
107 ret
= usb_submit_urb(urb
, GFP_KERNEL
);
109 usb_unanchor_urb(urb
);
117 static inline void ath9k_skb_queue_purge(struct hif_device_usb
*hif_dev
,
118 struct sk_buff_head
*list
)
122 while ((skb
= __skb_dequeue(list
)) != NULL
) {
123 dev_kfree_skb_any(skb
);
124 TX_STAT_INC(skb_dropped
);
128 static void hif_usb_tx_cb(struct urb
*urb
)
130 struct tx_buf
*tx_buf
= (struct tx_buf
*) urb
->context
;
131 struct hif_device_usb
*hif_dev
;
134 if (!tx_buf
|| !tx_buf
->hif_dev
)
137 hif_dev
= tx_buf
->hif_dev
;
139 switch (urb
->status
) {
147 * The URB has been killed, free the SKBs.
149 ath9k_skb_queue_purge(hif_dev
, &tx_buf
->skb_queue
);
152 * If the URBs are being flushed, no need to add this
153 * URB to the free list.
155 spin_lock(&hif_dev
->tx
.tx_lock
);
156 if (hif_dev
->tx
.flags
& HIF_USB_TX_FLUSH
) {
157 spin_unlock(&hif_dev
->tx
.tx_lock
);
160 spin_unlock(&hif_dev
->tx
.tx_lock
);
163 * In the stop() case, this URB has to be added to
172 * Check if TX has been stopped, this is needed because
173 * this CB could have been invoked just after the TX lock
174 * was released in hif_stop() and kill_urb() hasn't been
177 spin_lock(&hif_dev
->tx
.tx_lock
);
178 if (hif_dev
->tx
.flags
& HIF_USB_TX_STOP
) {
179 spin_unlock(&hif_dev
->tx
.tx_lock
);
180 ath9k_skb_queue_purge(hif_dev
, &tx_buf
->skb_queue
);
183 spin_unlock(&hif_dev
->tx
.tx_lock
);
185 /* Complete the queued SKBs. */
186 while ((skb
= __skb_dequeue(&tx_buf
->skb_queue
)) != NULL
) {
187 ath9k_htc_txcompletion_cb(hif_dev
->htc_handle
,
189 TX_STAT_INC(skb_completed
);
193 /* Re-initialize the SKB queue */
194 tx_buf
->len
= tx_buf
->offset
= 0;
195 __skb_queue_head_init(&tx_buf
->skb_queue
);
197 /* Add this TX buffer to the free list */
198 spin_lock(&hif_dev
->tx
.tx_lock
);
199 list_move_tail(&tx_buf
->list
, &hif_dev
->tx
.tx_buf
);
200 hif_dev
->tx
.tx_buf_cnt
++;
201 if (!(hif_dev
->tx
.flags
& HIF_USB_TX_STOP
))
202 __hif_usb_tx(hif_dev
); /* Check for pending SKBs */
203 TX_STAT_INC(buf_completed
);
204 spin_unlock(&hif_dev
->tx
.tx_lock
);
207 /* TX lock has to be taken */
208 static int __hif_usb_tx(struct hif_device_usb
*hif_dev
)
210 struct tx_buf
*tx_buf
= NULL
;
211 struct sk_buff
*nskb
= NULL
;
213 u16
*hdr
, tx_skb_cnt
= 0;
216 if (hif_dev
->tx
.tx_skb_cnt
== 0)
219 /* Check if a free TX buffer is available */
220 if (list_empty(&hif_dev
->tx
.tx_buf
))
223 tx_buf
= list_first_entry(&hif_dev
->tx
.tx_buf
, struct tx_buf
, list
);
224 list_move_tail(&tx_buf
->list
, &hif_dev
->tx
.tx_pending
);
225 hif_dev
->tx
.tx_buf_cnt
--;
227 tx_skb_cnt
= min_t(u16
, hif_dev
->tx
.tx_skb_cnt
, MAX_TX_AGGR_NUM
);
229 for (i
= 0; i
< tx_skb_cnt
; i
++) {
230 nskb
= __skb_dequeue(&hif_dev
->tx
.tx_skb_queue
);
232 /* Should never be NULL */
235 hif_dev
->tx
.tx_skb_cnt
--;
238 buf
+= tx_buf
->offset
;
241 *hdr
++ = ATH_USB_TX_STREAM_MODE_TAG
;
243 memcpy(buf
, nskb
->data
, nskb
->len
);
244 tx_buf
->len
= nskb
->len
+ 4;
246 if (i
< (tx_skb_cnt
- 1))
247 tx_buf
->offset
+= (((tx_buf
->len
- 1) / 4) + 1) * 4;
249 if (i
== (tx_skb_cnt
- 1))
250 tx_buf
->len
+= tx_buf
->offset
;
252 __skb_queue_tail(&tx_buf
->skb_queue
, nskb
);
253 TX_STAT_INC(skb_queued
);
256 usb_fill_bulk_urb(tx_buf
->urb
, hif_dev
->udev
,
257 usb_sndbulkpipe(hif_dev
->udev
, USB_WLAN_TX_PIPE
),
258 tx_buf
->buf
, tx_buf
->len
,
259 hif_usb_tx_cb
, tx_buf
);
261 ret
= usb_submit_urb(tx_buf
->urb
, GFP_ATOMIC
);
263 tx_buf
->len
= tx_buf
->offset
= 0;
264 ath9k_skb_queue_purge(hif_dev
, &tx_buf
->skb_queue
);
265 __skb_queue_head_init(&tx_buf
->skb_queue
);
266 list_move_tail(&tx_buf
->list
, &hif_dev
->tx
.tx_buf
);
267 hif_dev
->tx
.tx_buf_cnt
++;
271 TX_STAT_INC(buf_queued
);
276 static int hif_usb_send_tx(struct hif_device_usb
*hif_dev
, struct sk_buff
*skb
,
277 struct ath9k_htc_tx_ctl
*tx_ctl
)
281 spin_lock_irqsave(&hif_dev
->tx
.tx_lock
, flags
);
283 if (hif_dev
->tx
.flags
& HIF_USB_TX_STOP
) {
284 spin_unlock_irqrestore(&hif_dev
->tx
.tx_lock
, flags
);
288 /* Check if the max queue count has been reached */
289 if (hif_dev
->tx
.tx_skb_cnt
> MAX_TX_BUF_NUM
) {
290 spin_unlock_irqrestore(&hif_dev
->tx
.tx_lock
, flags
);
294 __skb_queue_tail(&hif_dev
->tx
.tx_skb_queue
, skb
);
295 hif_dev
->tx
.tx_skb_cnt
++;
297 /* Send normal frames immediately */
298 if (!tx_ctl
|| (tx_ctl
&& (tx_ctl
->type
== ATH9K_HTC_NORMAL
)))
299 __hif_usb_tx(hif_dev
);
301 /* Check if AMPDUs have to be sent immediately */
302 if (tx_ctl
&& (tx_ctl
->type
== ATH9K_HTC_AMPDU
) &&
303 (hif_dev
->tx
.tx_buf_cnt
== MAX_TX_URB_NUM
) &&
304 (hif_dev
->tx
.tx_skb_cnt
< 2)) {
305 __hif_usb_tx(hif_dev
);
308 spin_unlock_irqrestore(&hif_dev
->tx
.tx_lock
, flags
);
313 static void hif_usb_start(void *hif_handle
, u8 pipe_id
)
315 struct hif_device_usb
*hif_dev
= (struct hif_device_usb
*)hif_handle
;
318 hif_dev
->flags
|= HIF_USB_START
;
320 spin_lock_irqsave(&hif_dev
->tx
.tx_lock
, flags
);
321 hif_dev
->tx
.flags
&= ~HIF_USB_TX_STOP
;
322 spin_unlock_irqrestore(&hif_dev
->tx
.tx_lock
, flags
);
325 static void hif_usb_stop(void *hif_handle
, u8 pipe_id
)
327 struct hif_device_usb
*hif_dev
= (struct hif_device_usb
*)hif_handle
;
328 struct tx_buf
*tx_buf
= NULL
, *tx_buf_tmp
= NULL
;
331 spin_lock_irqsave(&hif_dev
->tx
.tx_lock
, flags
);
332 ath9k_skb_queue_purge(hif_dev
, &hif_dev
->tx
.tx_skb_queue
);
333 hif_dev
->tx
.tx_skb_cnt
= 0;
334 hif_dev
->tx
.flags
|= HIF_USB_TX_STOP
;
335 spin_unlock_irqrestore(&hif_dev
->tx
.tx_lock
, flags
);
337 /* The pending URBs have to be canceled. */
338 list_for_each_entry_safe(tx_buf
, tx_buf_tmp
,
339 &hif_dev
->tx
.tx_pending
, list
) {
340 usb_kill_urb(tx_buf
->urb
);
344 static int hif_usb_send(void *hif_handle
, u8 pipe_id
, struct sk_buff
*skb
,
345 struct ath9k_htc_tx_ctl
*tx_ctl
)
347 struct hif_device_usb
*hif_dev
= (struct hif_device_usb
*)hif_handle
;
351 case USB_WLAN_TX_PIPE
:
352 ret
= hif_usb_send_tx(hif_dev
, skb
, tx_ctl
);
354 case USB_REG_OUT_PIPE
:
355 ret
= hif_usb_send_regout(hif_dev
, skb
);
358 dev_err(&hif_dev
->udev
->dev
,
359 "ath9k_htc: Invalid TX pipe: %d\n", pipe_id
);
367 static struct ath9k_htc_hif hif_usb
= {
368 .transport
= ATH9K_HIF_USB
,
369 .name
= "ath9k_hif_usb",
371 .control_ul_pipe
= USB_REG_OUT_PIPE
,
372 .control_dl_pipe
= USB_REG_IN_PIPE
,
374 .start
= hif_usb_start
,
375 .stop
= hif_usb_stop
,
376 .send
= hif_usb_send
,
379 static void ath9k_hif_usb_rx_stream(struct hif_device_usb
*hif_dev
,
382 struct sk_buff
*nskb
, *skb_pool
[MAX_PKT_NUM_IN_TRANSFER
];
383 int index
= 0, i
= 0, chk_idx
, len
= skb
->len
;
384 int rx_remain_len
= 0, rx_pkt_len
= 0;
385 u16 pkt_len
, pkt_tag
, pool_index
= 0;
388 spin_lock(&hif_dev
->rx_lock
);
390 rx_remain_len
= hif_dev
->rx_remain_len
;
391 rx_pkt_len
= hif_dev
->rx_transfer_len
;
393 if (rx_remain_len
!= 0) {
394 struct sk_buff
*remain_skb
= hif_dev
->remain_skb
;
397 ptr
= (u8
*) remain_skb
->data
;
399 index
= rx_remain_len
;
400 rx_remain_len
-= hif_dev
->rx_pad_len
;
403 memcpy(ptr
, skb
->data
, rx_remain_len
);
405 rx_pkt_len
+= rx_remain_len
;
406 hif_dev
->rx_remain_len
= 0;
407 skb_put(remain_skb
, rx_pkt_len
);
409 skb_pool
[pool_index
++] = remain_skb
;
412 index
= rx_remain_len
;
416 spin_unlock(&hif_dev
->rx_lock
);
418 while (index
< len
) {
419 ptr
= (u8
*) skb
->data
;
421 pkt_len
= ptr
[index
] + (ptr
[index
+1] << 8);
422 pkt_tag
= ptr
[index
+2] + (ptr
[index
+3] << 8);
424 if (pkt_tag
== ATH_USB_RX_STREAM_MODE_TAG
) {
427 pad_len
= 4 - (pkt_len
& 0x3);
432 index
= index
+ 4 + pkt_len
+ pad_len
;
434 if (index
> MAX_RX_BUF_SIZE
) {
435 spin_lock(&hif_dev
->rx_lock
);
436 hif_dev
->rx_remain_len
= index
- MAX_RX_BUF_SIZE
;
437 hif_dev
->rx_transfer_len
=
438 MAX_RX_BUF_SIZE
- chk_idx
- 4;
439 hif_dev
->rx_pad_len
= pad_len
;
441 nskb
= __dev_alloc_skb(pkt_len
+ 32,
444 dev_err(&hif_dev
->udev
->dev
,
445 "ath9k_htc: RX memory allocation"
447 spin_unlock(&hif_dev
->rx_lock
);
450 skb_reserve(nskb
, 32);
451 RX_STAT_INC(skb_allocated
);
453 memcpy(nskb
->data
, &(skb
->data
[chk_idx
+4]),
454 hif_dev
->rx_transfer_len
);
456 /* Record the buffer pointer */
457 hif_dev
->remain_skb
= nskb
;
458 spin_unlock(&hif_dev
->rx_lock
);
460 nskb
= __dev_alloc_skb(pkt_len
+ 32, GFP_ATOMIC
);
462 dev_err(&hif_dev
->udev
->dev
,
463 "ath9k_htc: RX memory allocation"
467 skb_reserve(nskb
, 32);
468 RX_STAT_INC(skb_allocated
);
470 memcpy(nskb
->data
, &(skb
->data
[chk_idx
+4]), pkt_len
);
471 skb_put(nskb
, pkt_len
);
472 skb_pool
[pool_index
++] = nskb
;
475 RX_STAT_INC(skb_dropped
);
481 for (i
= 0; i
< pool_index
; i
++) {
482 ath9k_htc_rx_msg(hif_dev
->htc_handle
, skb_pool
[i
],
483 skb_pool
[i
]->len
, USB_WLAN_RX_PIPE
);
484 RX_STAT_INC(skb_completed
);
488 static void ath9k_hif_usb_rx_cb(struct urb
*urb
)
490 struct sk_buff
*skb
= (struct sk_buff
*) urb
->context
;
491 struct hif_device_usb
*hif_dev
= (struct hif_device_usb
*)
492 usb_get_intfdata(usb_ifnum_to_if(urb
->dev
, 0));
501 switch (urb
->status
) {
513 if (likely(urb
->actual_length
!= 0)) {
514 skb_put(skb
, urb
->actual_length
);
515 ath9k_hif_usb_rx_stream(hif_dev
, skb
);
519 skb_reset_tail_pointer(skb
);
522 usb_anchor_urb(urb
, &hif_dev
->rx_submitted
);
523 ret
= usb_submit_urb(urb
, GFP_ATOMIC
);
525 usb_unanchor_urb(urb
);
534 static void ath9k_hif_usb_reg_in_cb(struct urb
*urb
)
536 struct sk_buff
*skb
= (struct sk_buff
*) urb
->context
;
537 struct sk_buff
*nskb
;
538 struct hif_device_usb
*hif_dev
= (struct hif_device_usb
*)
539 usb_get_intfdata(usb_ifnum_to_if(urb
->dev
, 0));
548 switch (urb
->status
) {
560 if (likely(urb
->actual_length
!= 0)) {
561 skb_put(skb
, urb
->actual_length
);
563 /* Process the command first */
564 ath9k_htc_rx_msg(hif_dev
->htc_handle
, skb
,
565 skb
->len
, USB_REG_IN_PIPE
);
568 nskb
= alloc_skb(MAX_REG_IN_BUF_SIZE
, GFP_ATOMIC
);
570 dev_err(&hif_dev
->udev
->dev
,
571 "ath9k_htc: REG_IN memory allocation failure\n");
576 usb_fill_int_urb(urb
, hif_dev
->udev
,
577 usb_rcvintpipe(hif_dev
->udev
, USB_REG_IN_PIPE
),
578 nskb
->data
, MAX_REG_IN_BUF_SIZE
,
579 ath9k_hif_usb_reg_in_cb
, nskb
, 1);
581 ret
= usb_submit_urb(urb
, GFP_ATOMIC
);
591 skb_reset_tail_pointer(skb
);
594 ret
= usb_submit_urb(urb
, GFP_ATOMIC
);
604 static void ath9k_hif_usb_dealloc_tx_urbs(struct hif_device_usb
*hif_dev
)
606 struct tx_buf
*tx_buf
= NULL
, *tx_buf_tmp
= NULL
;
609 list_for_each_entry_safe(tx_buf
, tx_buf_tmp
,
610 &hif_dev
->tx
.tx_buf
, list
) {
611 usb_kill_urb(tx_buf
->urb
);
612 list_del(&tx_buf
->list
);
613 usb_free_urb(tx_buf
->urb
);
618 spin_lock_irqsave(&hif_dev
->tx
.tx_lock
, flags
);
619 hif_dev
->tx
.flags
|= HIF_USB_TX_FLUSH
;
620 spin_unlock_irqrestore(&hif_dev
->tx
.tx_lock
, flags
);
622 list_for_each_entry_safe(tx_buf
, tx_buf_tmp
,
623 &hif_dev
->tx
.tx_pending
, list
) {
624 usb_kill_urb(tx_buf
->urb
);
625 list_del(&tx_buf
->list
);
626 usb_free_urb(tx_buf
->urb
);
632 static int ath9k_hif_usb_alloc_tx_urbs(struct hif_device_usb
*hif_dev
)
634 struct tx_buf
*tx_buf
;
637 INIT_LIST_HEAD(&hif_dev
->tx
.tx_buf
);
638 INIT_LIST_HEAD(&hif_dev
->tx
.tx_pending
);
639 spin_lock_init(&hif_dev
->tx
.tx_lock
);
640 __skb_queue_head_init(&hif_dev
->tx
.tx_skb_queue
);
642 for (i
= 0; i
< MAX_TX_URB_NUM
; i
++) {
643 tx_buf
= kzalloc(sizeof(struct tx_buf
), GFP_KERNEL
);
647 tx_buf
->buf
= kzalloc(MAX_TX_BUF_SIZE
, GFP_KERNEL
);
651 tx_buf
->urb
= usb_alloc_urb(0, GFP_KERNEL
);
655 tx_buf
->hif_dev
= hif_dev
;
656 __skb_queue_head_init(&tx_buf
->skb_queue
);
658 list_add_tail(&tx_buf
->list
, &hif_dev
->tx
.tx_buf
);
661 hif_dev
->tx
.tx_buf_cnt
= MAX_TX_URB_NUM
;
669 ath9k_hif_usb_dealloc_tx_urbs(hif_dev
);
673 static void ath9k_hif_usb_dealloc_rx_urbs(struct hif_device_usb
*hif_dev
)
675 usb_kill_anchored_urbs(&hif_dev
->rx_submitted
);
678 static int ath9k_hif_usb_alloc_rx_urbs(struct hif_device_usb
*hif_dev
)
680 struct urb
*urb
= NULL
;
681 struct sk_buff
*skb
= NULL
;
684 init_usb_anchor(&hif_dev
->rx_submitted
);
685 spin_lock_init(&hif_dev
->rx_lock
);
687 for (i
= 0; i
< MAX_RX_URB_NUM
; i
++) {
690 urb
= usb_alloc_urb(0, GFP_KERNEL
);
696 /* Allocate buffer */
697 skb
= alloc_skb(MAX_RX_BUF_SIZE
, GFP_KERNEL
);
703 usb_fill_bulk_urb(urb
, hif_dev
->udev
,
704 usb_rcvbulkpipe(hif_dev
->udev
,
706 skb
->data
, MAX_RX_BUF_SIZE
,
707 ath9k_hif_usb_rx_cb
, skb
);
710 usb_anchor_urb(urb
, &hif_dev
->rx_submitted
);
713 ret
= usb_submit_urb(urb
, GFP_KERNEL
);
715 usb_unanchor_urb(urb
);
720 * Drop reference count.
721 * This ensures that the URB is freed when killing them.
733 ath9k_hif_usb_dealloc_rx_urbs(hif_dev
);
737 static void ath9k_hif_usb_dealloc_reg_in_urb(struct hif_device_usb
*hif_dev
)
739 if (hif_dev
->reg_in_urb
) {
740 usb_kill_urb(hif_dev
->reg_in_urb
);
741 if (hif_dev
->reg_in_urb
->context
)
742 kfree_skb((void *)hif_dev
->reg_in_urb
->context
);
743 usb_free_urb(hif_dev
->reg_in_urb
);
744 hif_dev
->reg_in_urb
= NULL
;
748 static int ath9k_hif_usb_alloc_reg_in_urb(struct hif_device_usb
*hif_dev
)
752 hif_dev
->reg_in_urb
= usb_alloc_urb(0, GFP_KERNEL
);
753 if (hif_dev
->reg_in_urb
== NULL
)
756 skb
= alloc_skb(MAX_REG_IN_BUF_SIZE
, GFP_KERNEL
);
760 usb_fill_int_urb(hif_dev
->reg_in_urb
, hif_dev
->udev
,
761 usb_rcvintpipe(hif_dev
->udev
, USB_REG_IN_PIPE
),
762 skb
->data
, MAX_REG_IN_BUF_SIZE
,
763 ath9k_hif_usb_reg_in_cb
, skb
, 1);
765 if (usb_submit_urb(hif_dev
->reg_in_urb
, GFP_KERNEL
) != 0)
771 ath9k_hif_usb_dealloc_reg_in_urb(hif_dev
);
775 static int ath9k_hif_usb_alloc_urbs(struct hif_device_usb
*hif_dev
)
778 init_usb_anchor(&hif_dev
->regout_submitted
);
781 if (ath9k_hif_usb_alloc_tx_urbs(hif_dev
) < 0)
785 if (ath9k_hif_usb_alloc_rx_urbs(hif_dev
) < 0)
789 if (ath9k_hif_usb_alloc_reg_in_urb(hif_dev
) < 0)
794 ath9k_hif_usb_dealloc_rx_urbs(hif_dev
);
796 ath9k_hif_usb_dealloc_tx_urbs(hif_dev
);
801 static void ath9k_hif_usb_dealloc_urbs(struct hif_device_usb
*hif_dev
)
803 usb_kill_anchored_urbs(&hif_dev
->regout_submitted
);
804 ath9k_hif_usb_dealloc_reg_in_urb(hif_dev
);
805 ath9k_hif_usb_dealloc_tx_urbs(hif_dev
);
806 ath9k_hif_usb_dealloc_rx_urbs(hif_dev
);
809 static int ath9k_hif_usb_download_fw(struct hif_device_usb
*hif_dev
)
812 const void *data
= hif_dev
->firmware
->data
;
813 size_t len
= hif_dev
->firmware
->size
;
814 u32 addr
= AR9271_FIRMWARE
;
815 u8
*buf
= kzalloc(4096, GFP_KERNEL
);
822 transfer
= min_t(int, len
, 4096);
823 memcpy(buf
, data
, transfer
);
825 err
= usb_control_msg(hif_dev
->udev
,
826 usb_sndctrlpipe(hif_dev
->udev
, 0),
827 FIRMWARE_DOWNLOAD
, 0x40 | USB_DIR_OUT
,
828 addr
>> 8, 0, buf
, transfer
, HZ
);
840 switch (hif_dev
->device_id
) {
846 firm_offset
= AR7010_FIRMWARE_TEXT
;
849 firm_offset
= AR9271_FIRMWARE_TEXT
;
854 * Issue FW download complete command to firmware.
856 err
= usb_control_msg(hif_dev
->udev
, usb_sndctrlpipe(hif_dev
->udev
, 0),
857 FIRMWARE_DOWNLOAD_COMP
,
859 firm_offset
>> 8, 0, NULL
, 0, HZ
);
863 dev_info(&hif_dev
->udev
->dev
, "ath9k_htc: Transferred FW: %s, size: %ld\n",
864 hif_dev
->fw_name
, (unsigned long) hif_dev
->firmware
->size
);
869 static int ath9k_hif_usb_dev_init(struct hif_device_usb
*hif_dev
)
873 /* Request firmware */
874 ret
= request_firmware(&hif_dev
->firmware
, hif_dev
->fw_name
,
875 &hif_dev
->udev
->dev
);
877 dev_err(&hif_dev
->udev
->dev
,
878 "ath9k_htc: Firmware - %s not found\n", hif_dev
->fw_name
);
883 ret
= ath9k_hif_usb_alloc_urbs(hif_dev
);
885 dev_err(&hif_dev
->udev
->dev
,
886 "ath9k_htc: Unable to allocate URBs\n");
890 /* Download firmware */
891 ret
= ath9k_hif_usb_download_fw(hif_dev
);
893 dev_err(&hif_dev
->udev
->dev
,
894 "ath9k_htc: Firmware - %s download failed\n",
896 goto err_fw_download
;
902 ath9k_hif_usb_dealloc_urbs(hif_dev
);
904 release_firmware(hif_dev
->firmware
);
906 hif_dev
->firmware
= NULL
;
910 static void ath9k_hif_usb_dev_deinit(struct hif_device_usb
*hif_dev
)
912 ath9k_hif_usb_dealloc_urbs(hif_dev
);
913 if (hif_dev
->firmware
)
914 release_firmware(hif_dev
->firmware
);
917 static int ath9k_hif_usb_probe(struct usb_interface
*interface
,
918 const struct usb_device_id
*id
)
920 struct usb_device
*udev
= interface_to_usbdev(interface
);
921 struct hif_device_usb
*hif_dev
;
924 hif_dev
= kzalloc(sizeof(struct hif_device_usb
), GFP_KERNEL
);
931 hif_dev
->udev
= udev
;
932 hif_dev
->interface
= interface
;
933 hif_dev
->device_id
= id
->idProduct
;
935 udev
->reset_resume
= 1;
937 usb_set_intfdata(interface
, hif_dev
);
939 hif_dev
->htc_handle
= ath9k_htc_hw_alloc(hif_dev
, &hif_usb
,
940 &hif_dev
->udev
->dev
);
941 if (hif_dev
->htc_handle
== NULL
) {
943 goto err_htc_hw_alloc
;
946 /* Find out which firmware to load */
948 switch(hif_dev
->device_id
) {
954 if (le16_to_cpu(udev
->descriptor
.bcdDevice
) == 0x0202)
955 hif_dev
->fw_name
= FIRMWARE_AR7010_1_1
;
957 hif_dev
->fw_name
= FIRMWARE_AR7010
;
960 hif_dev
->fw_name
= FIRMWARE_AR9271
;
964 ret
= ath9k_hif_usb_dev_init(hif_dev
);
967 goto err_hif_init_usb
;
970 ret
= ath9k_htc_hw_init(hif_dev
->htc_handle
,
971 &hif_dev
->udev
->dev
, hif_dev
->device_id
);
974 goto err_htc_hw_init
;
977 dev_info(&hif_dev
->udev
->dev
, "ath9k_htc: USB layer initialized\n");
982 ath9k_hif_usb_dev_deinit(hif_dev
);
984 ath9k_htc_hw_free(hif_dev
->htc_handle
);
986 usb_set_intfdata(interface
, NULL
);
993 static void ath9k_hif_usb_reboot(struct usb_device
*udev
)
995 u32 reboot_cmd
= 0xffffffff;
999 buf
= kmemdup(&reboot_cmd
, 4, GFP_KERNEL
);
1003 ret
= usb_bulk_msg(udev
, usb_sndbulkpipe(udev
, USB_REG_OUT_PIPE
),
1006 dev_err(&udev
->dev
, "ath9k_htc: USB reboot failed\n");
1011 static void ath9k_hif_usb_disconnect(struct usb_interface
*interface
)
1013 struct usb_device
*udev
= interface_to_usbdev(interface
);
1014 struct hif_device_usb
*hif_dev
=
1015 (struct hif_device_usb
*) usb_get_intfdata(interface
);
1018 ath9k_htc_hw_deinit(hif_dev
->htc_handle
,
1019 (udev
->state
== USB_STATE_NOTATTACHED
) ? true : false);
1020 ath9k_htc_hw_free(hif_dev
->htc_handle
);
1021 ath9k_hif_usb_dev_deinit(hif_dev
);
1022 usb_set_intfdata(interface
, NULL
);
1025 if (hif_dev
->flags
& HIF_USB_START
)
1026 ath9k_hif_usb_reboot(udev
);
1029 dev_info(&udev
->dev
, "ath9k_htc: USB layer deinitialized\n");
1034 static int ath9k_hif_usb_suspend(struct usb_interface
*interface
,
1035 pm_message_t message
)
1037 struct hif_device_usb
*hif_dev
=
1038 (struct hif_device_usb
*) usb_get_intfdata(interface
);
1040 ath9k_hif_usb_dealloc_urbs(hif_dev
);
1045 static int ath9k_hif_usb_resume(struct usb_interface
*interface
)
1047 struct hif_device_usb
*hif_dev
=
1048 (struct hif_device_usb
*) usb_get_intfdata(interface
);
1051 ret
= ath9k_hif_usb_alloc_urbs(hif_dev
);
1055 if (hif_dev
->firmware
) {
1056 ret
= ath9k_hif_usb_download_fw(hif_dev
);
1060 ath9k_hif_usb_dealloc_urbs(hif_dev
);
1066 ret
= ath9k_htc_resume(hif_dev
->htc_handle
);
1074 ath9k_hif_usb_dealloc_urbs(hif_dev
);
1080 static struct usb_driver ath9k_hif_usb_driver
= {
1081 .name
= "ath9k_hif_usb",
1082 .probe
= ath9k_hif_usb_probe
,
1083 .disconnect
= ath9k_hif_usb_disconnect
,
1085 .suspend
= ath9k_hif_usb_suspend
,
1086 .resume
= ath9k_hif_usb_resume
,
1087 .reset_resume
= ath9k_hif_usb_resume
,
1089 .id_table
= ath9k_hif_usb_ids
,
1093 int ath9k_hif_usb_init(void)
1095 return usb_register(&ath9k_hif_usb_driver
);
1098 void ath9k_hif_usb_exit(void)
1100 usb_deregister(&ath9k_hif_usb_driver
);