2 * Copyright (c) 2010 Atheros Communications Inc.
4 * Permission to use, copy, modify, and/or distribute this software for any
5 * purpose with or without fee is hereby granted, provided that the above
6 * copyright notice and this permission notice appear in all copies.
8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
19 /* identify firmware images */
20 #define FIRMWARE_AR7010 "ar7010.fw"
21 #define FIRMWARE_AR7010_1_1 "ar7010_1_1.fw"
22 #define FIRMWARE_AR9271 "ar9271.fw"
24 MODULE_FIRMWARE(FIRMWARE_AR7010
);
25 MODULE_FIRMWARE(FIRMWARE_AR7010_1_1
);
26 MODULE_FIRMWARE(FIRMWARE_AR9271
);
28 static struct usb_device_id ath9k_hif_usb_ids
[] = {
29 { USB_DEVICE(0x0cf3, 0x9271) }, /* Atheros */
30 { USB_DEVICE(0x0cf3, 0x1006) }, /* Atheros */
31 { USB_DEVICE(0x0cf3, 0x7010) }, /* Atheros */
32 { USB_DEVICE(0x0cf3, 0x7015) }, /* Atheros */
33 { USB_DEVICE(0x0846, 0x9030) }, /* Netgear N150 */
34 { USB_DEVICE(0x0846, 0x9018) }, /* Netgear WNDA3200 */
35 { USB_DEVICE(0x07D1, 0x3A10) }, /* Dlink Wireless 150 */
36 { USB_DEVICE(0x13D3, 0x3327) }, /* Azurewave */
37 { USB_DEVICE(0x13D3, 0x3328) }, /* Azurewave */
38 { USB_DEVICE(0x13D3, 0x3346) }, /* IMC Networks */
39 { USB_DEVICE(0x13D3, 0x3348) }, /* Azurewave */
40 { USB_DEVICE(0x13D3, 0x3349) }, /* Azurewave */
41 { USB_DEVICE(0x13D3, 0x3350) }, /* Azurewave */
42 { USB_DEVICE(0x04CA, 0x4605) }, /* Liteon */
43 { USB_DEVICE(0x083A, 0xA704) }, /* SMC Networks */
44 { USB_DEVICE(0x040D, 0x3801) }, /* VIA */
45 { USB_DEVICE(0x1668, 0x1200) }, /* Verizon */
49 MODULE_DEVICE_TABLE(usb
, ath9k_hif_usb_ids
);
51 static int __hif_usb_tx(struct hif_device_usb
*hif_dev
);
53 static void hif_usb_regout_cb(struct urb
*urb
)
55 struct cmd_buf
*cmd
= (struct cmd_buf
*)urb
->context
;
57 switch (urb
->status
) {
70 ath9k_htc_txcompletion_cb(cmd
->hif_dev
->htc_handle
,
81 static int hif_usb_send_regout(struct hif_device_usb
*hif_dev
,
88 urb
= usb_alloc_urb(0, GFP_KERNEL
);
92 cmd
= kzalloc(sizeof(*cmd
), GFP_KERNEL
);
99 cmd
->hif_dev
= hif_dev
;
101 usb_fill_bulk_urb(urb
, hif_dev
->udev
,
102 usb_sndbulkpipe(hif_dev
->udev
, USB_REG_OUT_PIPE
),
104 hif_usb_regout_cb
, cmd
);
106 usb_anchor_urb(urb
, &hif_dev
->regout_submitted
);
107 ret
= usb_submit_urb(urb
, GFP_KERNEL
);
109 usb_unanchor_urb(urb
);
117 static inline void ath9k_skb_queue_purge(struct hif_device_usb
*hif_dev
,
118 struct sk_buff_head
*list
)
122 while ((skb
= __skb_dequeue(list
)) != NULL
) {
123 dev_kfree_skb_any(skb
);
124 TX_STAT_INC(skb_dropped
);
128 static void hif_usb_tx_cb(struct urb
*urb
)
130 struct tx_buf
*tx_buf
= (struct tx_buf
*) urb
->context
;
131 struct hif_device_usb
*hif_dev
;
134 if (!tx_buf
|| !tx_buf
->hif_dev
)
137 hif_dev
= tx_buf
->hif_dev
;
139 switch (urb
->status
) {
147 * The URB has been killed, free the SKBs.
149 ath9k_skb_queue_purge(hif_dev
, &tx_buf
->skb_queue
);
152 * If the URBs are being flushed, no need to add this
153 * URB to the free list.
155 spin_lock(&hif_dev
->tx
.tx_lock
);
156 if (hif_dev
->tx
.flags
& HIF_USB_TX_FLUSH
) {
157 spin_unlock(&hif_dev
->tx
.tx_lock
);
160 spin_unlock(&hif_dev
->tx
.tx_lock
);
163 * In the stop() case, this URB has to be added to
172 * Check if TX has been stopped, this is needed because
173 * this CB could have been invoked just after the TX lock
174 * was released in hif_stop() and kill_urb() hasn't been
177 spin_lock(&hif_dev
->tx
.tx_lock
);
178 if (hif_dev
->tx
.flags
& HIF_USB_TX_STOP
) {
179 spin_unlock(&hif_dev
->tx
.tx_lock
);
180 ath9k_skb_queue_purge(hif_dev
, &tx_buf
->skb_queue
);
183 spin_unlock(&hif_dev
->tx
.tx_lock
);
185 /* Complete the queued SKBs. */
186 while ((skb
= __skb_dequeue(&tx_buf
->skb_queue
)) != NULL
) {
187 ath9k_htc_txcompletion_cb(hif_dev
->htc_handle
,
189 TX_STAT_INC(skb_completed
);
193 /* Re-initialize the SKB queue */
194 tx_buf
->len
= tx_buf
->offset
= 0;
195 __skb_queue_head_init(&tx_buf
->skb_queue
);
197 /* Add this TX buffer to the free list */
198 spin_lock(&hif_dev
->tx
.tx_lock
);
199 list_move_tail(&tx_buf
->list
, &hif_dev
->tx
.tx_buf
);
200 hif_dev
->tx
.tx_buf_cnt
++;
201 if (!(hif_dev
->tx
.flags
& HIF_USB_TX_STOP
))
202 __hif_usb_tx(hif_dev
); /* Check for pending SKBs */
203 TX_STAT_INC(buf_completed
);
204 spin_unlock(&hif_dev
->tx
.tx_lock
);
207 /* TX lock has to be taken */
208 static int __hif_usb_tx(struct hif_device_usb
*hif_dev
)
210 struct tx_buf
*tx_buf
= NULL
;
211 struct sk_buff
*nskb
= NULL
;
217 if (hif_dev
->tx
.tx_skb_cnt
== 0)
220 /* Check if a free TX buffer is available */
221 if (list_empty(&hif_dev
->tx
.tx_buf
))
224 tx_buf
= list_first_entry(&hif_dev
->tx
.tx_buf
, struct tx_buf
, list
);
225 list_move_tail(&tx_buf
->list
, &hif_dev
->tx
.tx_pending
);
226 hif_dev
->tx
.tx_buf_cnt
--;
228 tx_skb_cnt
= min_t(u16
, hif_dev
->tx
.tx_skb_cnt
, MAX_TX_AGGR_NUM
);
230 for (i
= 0; i
< tx_skb_cnt
; i
++) {
231 nskb
= __skb_dequeue(&hif_dev
->tx
.tx_skb_queue
);
233 /* Should never be NULL */
236 hif_dev
->tx
.tx_skb_cnt
--;
239 buf
+= tx_buf
->offset
;
241 *hdr
++ = cpu_to_le16(nskb
->len
);
242 *hdr
++ = cpu_to_le16(ATH_USB_TX_STREAM_MODE_TAG
);
244 memcpy(buf
, nskb
->data
, nskb
->len
);
245 tx_buf
->len
= nskb
->len
+ 4;
247 if (i
< (tx_skb_cnt
- 1))
248 tx_buf
->offset
+= (((tx_buf
->len
- 1) / 4) + 1) * 4;
250 if (i
== (tx_skb_cnt
- 1))
251 tx_buf
->len
+= tx_buf
->offset
;
253 __skb_queue_tail(&tx_buf
->skb_queue
, nskb
);
254 TX_STAT_INC(skb_queued
);
257 usb_fill_bulk_urb(tx_buf
->urb
, hif_dev
->udev
,
258 usb_sndbulkpipe(hif_dev
->udev
, USB_WLAN_TX_PIPE
),
259 tx_buf
->buf
, tx_buf
->len
,
260 hif_usb_tx_cb
, tx_buf
);
262 ret
= usb_submit_urb(tx_buf
->urb
, GFP_ATOMIC
);
264 tx_buf
->len
= tx_buf
->offset
= 0;
265 ath9k_skb_queue_purge(hif_dev
, &tx_buf
->skb_queue
);
266 __skb_queue_head_init(&tx_buf
->skb_queue
);
267 list_move_tail(&tx_buf
->list
, &hif_dev
->tx
.tx_buf
);
268 hif_dev
->tx
.tx_buf_cnt
++;
272 TX_STAT_INC(buf_queued
);
277 static int hif_usb_send_tx(struct hif_device_usb
*hif_dev
, struct sk_buff
*skb
,
278 struct ath9k_htc_tx_ctl
*tx_ctl
)
282 spin_lock_irqsave(&hif_dev
->tx
.tx_lock
, flags
);
284 if (hif_dev
->tx
.flags
& HIF_USB_TX_STOP
) {
285 spin_unlock_irqrestore(&hif_dev
->tx
.tx_lock
, flags
);
289 /* Check if the max queue count has been reached */
290 if (hif_dev
->tx
.tx_skb_cnt
> MAX_TX_BUF_NUM
) {
291 spin_unlock_irqrestore(&hif_dev
->tx
.tx_lock
, flags
);
295 __skb_queue_tail(&hif_dev
->tx
.tx_skb_queue
, skb
);
296 hif_dev
->tx
.tx_skb_cnt
++;
298 /* Send normal frames immediately */
299 if (!tx_ctl
|| (tx_ctl
&& (tx_ctl
->type
== ATH9K_HTC_NORMAL
)))
300 __hif_usb_tx(hif_dev
);
302 /* Check if AMPDUs have to be sent immediately */
303 if (tx_ctl
&& (tx_ctl
->type
== ATH9K_HTC_AMPDU
) &&
304 (hif_dev
->tx
.tx_buf_cnt
== MAX_TX_URB_NUM
) &&
305 (hif_dev
->tx
.tx_skb_cnt
< 2)) {
306 __hif_usb_tx(hif_dev
);
309 spin_unlock_irqrestore(&hif_dev
->tx
.tx_lock
, flags
);
314 static void hif_usb_start(void *hif_handle
, u8 pipe_id
)
316 struct hif_device_usb
*hif_dev
= (struct hif_device_usb
*)hif_handle
;
319 hif_dev
->flags
|= HIF_USB_START
;
321 spin_lock_irqsave(&hif_dev
->tx
.tx_lock
, flags
);
322 hif_dev
->tx
.flags
&= ~HIF_USB_TX_STOP
;
323 spin_unlock_irqrestore(&hif_dev
->tx
.tx_lock
, flags
);
326 static void hif_usb_stop(void *hif_handle
, u8 pipe_id
)
328 struct hif_device_usb
*hif_dev
= (struct hif_device_usb
*)hif_handle
;
329 struct tx_buf
*tx_buf
= NULL
, *tx_buf_tmp
= NULL
;
332 spin_lock_irqsave(&hif_dev
->tx
.tx_lock
, flags
);
333 ath9k_skb_queue_purge(hif_dev
, &hif_dev
->tx
.tx_skb_queue
);
334 hif_dev
->tx
.tx_skb_cnt
= 0;
335 hif_dev
->tx
.flags
|= HIF_USB_TX_STOP
;
336 spin_unlock_irqrestore(&hif_dev
->tx
.tx_lock
, flags
);
338 /* The pending URBs have to be canceled. */
339 list_for_each_entry_safe(tx_buf
, tx_buf_tmp
,
340 &hif_dev
->tx
.tx_pending
, list
) {
341 usb_kill_urb(tx_buf
->urb
);
345 static int hif_usb_send(void *hif_handle
, u8 pipe_id
, struct sk_buff
*skb
,
346 struct ath9k_htc_tx_ctl
*tx_ctl
)
348 struct hif_device_usb
*hif_dev
= (struct hif_device_usb
*)hif_handle
;
352 case USB_WLAN_TX_PIPE
:
353 ret
= hif_usb_send_tx(hif_dev
, skb
, tx_ctl
);
355 case USB_REG_OUT_PIPE
:
356 ret
= hif_usb_send_regout(hif_dev
, skb
);
359 dev_err(&hif_dev
->udev
->dev
,
360 "ath9k_htc: Invalid TX pipe: %d\n", pipe_id
);
368 static struct ath9k_htc_hif hif_usb
= {
369 .transport
= ATH9K_HIF_USB
,
370 .name
= "ath9k_hif_usb",
372 .control_ul_pipe
= USB_REG_OUT_PIPE
,
373 .control_dl_pipe
= USB_REG_IN_PIPE
,
375 .start
= hif_usb_start
,
376 .stop
= hif_usb_stop
,
377 .send
= hif_usb_send
,
380 static void ath9k_hif_usb_rx_stream(struct hif_device_usb
*hif_dev
,
383 struct sk_buff
*nskb
, *skb_pool
[MAX_PKT_NUM_IN_TRANSFER
];
384 int index
= 0, i
= 0, chk_idx
, len
= skb
->len
;
385 int rx_remain_len
= 0, rx_pkt_len
= 0;
386 u16 pkt_len
, pkt_tag
, pool_index
= 0;
389 spin_lock(&hif_dev
->rx_lock
);
391 rx_remain_len
= hif_dev
->rx_remain_len
;
392 rx_pkt_len
= hif_dev
->rx_transfer_len
;
394 if (rx_remain_len
!= 0) {
395 struct sk_buff
*remain_skb
= hif_dev
->remain_skb
;
398 ptr
= (u8
*) remain_skb
->data
;
400 index
= rx_remain_len
;
401 rx_remain_len
-= hif_dev
->rx_pad_len
;
404 memcpy(ptr
, skb
->data
, rx_remain_len
);
406 rx_pkt_len
+= rx_remain_len
;
407 hif_dev
->rx_remain_len
= 0;
408 skb_put(remain_skb
, rx_pkt_len
);
410 skb_pool
[pool_index
++] = remain_skb
;
413 index
= rx_remain_len
;
417 spin_unlock(&hif_dev
->rx_lock
);
419 while (index
< len
) {
420 ptr
= (u8
*) skb
->data
;
422 pkt_len
= ptr
[index
] + (ptr
[index
+1] << 8);
423 pkt_tag
= ptr
[index
+2] + (ptr
[index
+3] << 8);
425 if (pkt_tag
== ATH_USB_RX_STREAM_MODE_TAG
) {
428 pad_len
= 4 - (pkt_len
& 0x3);
433 index
= index
+ 4 + pkt_len
+ pad_len
;
435 if (index
> MAX_RX_BUF_SIZE
) {
436 spin_lock(&hif_dev
->rx_lock
);
437 hif_dev
->rx_remain_len
= index
- MAX_RX_BUF_SIZE
;
438 hif_dev
->rx_transfer_len
=
439 MAX_RX_BUF_SIZE
- chk_idx
- 4;
440 hif_dev
->rx_pad_len
= pad_len
;
442 nskb
= __dev_alloc_skb(pkt_len
+ 32,
445 dev_err(&hif_dev
->udev
->dev
,
446 "ath9k_htc: RX memory allocation"
448 spin_unlock(&hif_dev
->rx_lock
);
451 skb_reserve(nskb
, 32);
452 RX_STAT_INC(skb_allocated
);
454 memcpy(nskb
->data
, &(skb
->data
[chk_idx
+4]),
455 hif_dev
->rx_transfer_len
);
457 /* Record the buffer pointer */
458 hif_dev
->remain_skb
= nskb
;
459 spin_unlock(&hif_dev
->rx_lock
);
461 nskb
= __dev_alloc_skb(pkt_len
+ 32, GFP_ATOMIC
);
463 dev_err(&hif_dev
->udev
->dev
,
464 "ath9k_htc: RX memory allocation"
468 skb_reserve(nskb
, 32);
469 RX_STAT_INC(skb_allocated
);
471 memcpy(nskb
->data
, &(skb
->data
[chk_idx
+4]), pkt_len
);
472 skb_put(nskb
, pkt_len
);
473 skb_pool
[pool_index
++] = nskb
;
476 RX_STAT_INC(skb_dropped
);
482 for (i
= 0; i
< pool_index
; i
++) {
483 ath9k_htc_rx_msg(hif_dev
->htc_handle
, skb_pool
[i
],
484 skb_pool
[i
]->len
, USB_WLAN_RX_PIPE
);
485 RX_STAT_INC(skb_completed
);
489 static void ath9k_hif_usb_rx_cb(struct urb
*urb
)
491 struct sk_buff
*skb
= (struct sk_buff
*) urb
->context
;
492 struct hif_device_usb
*hif_dev
= (struct hif_device_usb
*)
493 usb_get_intfdata(usb_ifnum_to_if(urb
->dev
, 0));
502 switch (urb
->status
) {
514 if (likely(urb
->actual_length
!= 0)) {
515 skb_put(skb
, urb
->actual_length
);
516 ath9k_hif_usb_rx_stream(hif_dev
, skb
);
520 skb_reset_tail_pointer(skb
);
523 usb_anchor_urb(urb
, &hif_dev
->rx_submitted
);
524 ret
= usb_submit_urb(urb
, GFP_ATOMIC
);
526 usb_unanchor_urb(urb
);
535 static void ath9k_hif_usb_reg_in_cb(struct urb
*urb
)
537 struct sk_buff
*skb
= (struct sk_buff
*) urb
->context
;
538 struct sk_buff
*nskb
;
539 struct hif_device_usb
*hif_dev
= (struct hif_device_usb
*)
540 usb_get_intfdata(usb_ifnum_to_if(urb
->dev
, 0));
549 switch (urb
->status
) {
561 if (likely(urb
->actual_length
!= 0)) {
562 skb_put(skb
, urb
->actual_length
);
564 /* Process the command first */
565 ath9k_htc_rx_msg(hif_dev
->htc_handle
, skb
,
566 skb
->len
, USB_REG_IN_PIPE
);
569 nskb
= alloc_skb(MAX_REG_IN_BUF_SIZE
, GFP_ATOMIC
);
571 dev_err(&hif_dev
->udev
->dev
,
572 "ath9k_htc: REG_IN memory allocation failure\n");
577 usb_fill_bulk_urb(urb
, hif_dev
->udev
,
578 usb_rcvbulkpipe(hif_dev
->udev
,
580 nskb
->data
, MAX_REG_IN_BUF_SIZE
,
581 ath9k_hif_usb_reg_in_cb
, nskb
);
583 ret
= usb_submit_urb(urb
, GFP_ATOMIC
);
593 skb_reset_tail_pointer(skb
);
596 ret
= usb_submit_urb(urb
, GFP_ATOMIC
);
606 static void ath9k_hif_usb_dealloc_tx_urbs(struct hif_device_usb
*hif_dev
)
608 struct tx_buf
*tx_buf
= NULL
, *tx_buf_tmp
= NULL
;
611 list_for_each_entry_safe(tx_buf
, tx_buf_tmp
,
612 &hif_dev
->tx
.tx_buf
, list
) {
613 usb_kill_urb(tx_buf
->urb
);
614 list_del(&tx_buf
->list
);
615 usb_free_urb(tx_buf
->urb
);
620 spin_lock_irqsave(&hif_dev
->tx
.tx_lock
, flags
);
621 hif_dev
->tx
.flags
|= HIF_USB_TX_FLUSH
;
622 spin_unlock_irqrestore(&hif_dev
->tx
.tx_lock
, flags
);
624 list_for_each_entry_safe(tx_buf
, tx_buf_tmp
,
625 &hif_dev
->tx
.tx_pending
, list
) {
626 usb_kill_urb(tx_buf
->urb
);
627 list_del(&tx_buf
->list
);
628 usb_free_urb(tx_buf
->urb
);
634 static int ath9k_hif_usb_alloc_tx_urbs(struct hif_device_usb
*hif_dev
)
636 struct tx_buf
*tx_buf
;
639 INIT_LIST_HEAD(&hif_dev
->tx
.tx_buf
);
640 INIT_LIST_HEAD(&hif_dev
->tx
.tx_pending
);
641 spin_lock_init(&hif_dev
->tx
.tx_lock
);
642 __skb_queue_head_init(&hif_dev
->tx
.tx_skb_queue
);
644 for (i
= 0; i
< MAX_TX_URB_NUM
; i
++) {
645 tx_buf
= kzalloc(sizeof(struct tx_buf
), GFP_KERNEL
);
649 tx_buf
->buf
= kzalloc(MAX_TX_BUF_SIZE
, GFP_KERNEL
);
653 tx_buf
->urb
= usb_alloc_urb(0, GFP_KERNEL
);
657 tx_buf
->hif_dev
= hif_dev
;
658 __skb_queue_head_init(&tx_buf
->skb_queue
);
660 list_add_tail(&tx_buf
->list
, &hif_dev
->tx
.tx_buf
);
663 hif_dev
->tx
.tx_buf_cnt
= MAX_TX_URB_NUM
;
671 ath9k_hif_usb_dealloc_tx_urbs(hif_dev
);
675 static void ath9k_hif_usb_dealloc_rx_urbs(struct hif_device_usb
*hif_dev
)
677 usb_kill_anchored_urbs(&hif_dev
->rx_submitted
);
680 static int ath9k_hif_usb_alloc_rx_urbs(struct hif_device_usb
*hif_dev
)
682 struct urb
*urb
= NULL
;
683 struct sk_buff
*skb
= NULL
;
686 init_usb_anchor(&hif_dev
->rx_submitted
);
687 spin_lock_init(&hif_dev
->rx_lock
);
689 for (i
= 0; i
< MAX_RX_URB_NUM
; i
++) {
692 urb
= usb_alloc_urb(0, GFP_KERNEL
);
698 /* Allocate buffer */
699 skb
= alloc_skb(MAX_RX_BUF_SIZE
, GFP_KERNEL
);
705 usb_fill_bulk_urb(urb
, hif_dev
->udev
,
706 usb_rcvbulkpipe(hif_dev
->udev
,
708 skb
->data
, MAX_RX_BUF_SIZE
,
709 ath9k_hif_usb_rx_cb
, skb
);
712 usb_anchor_urb(urb
, &hif_dev
->rx_submitted
);
715 ret
= usb_submit_urb(urb
, GFP_KERNEL
);
717 usb_unanchor_urb(urb
);
722 * Drop reference count.
723 * This ensures that the URB is freed when killing them.
735 ath9k_hif_usb_dealloc_rx_urbs(hif_dev
);
739 static void ath9k_hif_usb_dealloc_reg_in_urb(struct hif_device_usb
*hif_dev
)
741 if (hif_dev
->reg_in_urb
) {
742 usb_kill_urb(hif_dev
->reg_in_urb
);
743 if (hif_dev
->reg_in_urb
->context
)
744 kfree_skb((void *)hif_dev
->reg_in_urb
->context
);
745 usb_free_urb(hif_dev
->reg_in_urb
);
746 hif_dev
->reg_in_urb
= NULL
;
750 static int ath9k_hif_usb_alloc_reg_in_urb(struct hif_device_usb
*hif_dev
)
754 hif_dev
->reg_in_urb
= usb_alloc_urb(0, GFP_KERNEL
);
755 if (hif_dev
->reg_in_urb
== NULL
)
758 skb
= alloc_skb(MAX_REG_IN_BUF_SIZE
, GFP_KERNEL
);
762 usb_fill_bulk_urb(hif_dev
->reg_in_urb
, hif_dev
->udev
,
763 usb_rcvbulkpipe(hif_dev
->udev
,
765 skb
->data
, MAX_REG_IN_BUF_SIZE
,
766 ath9k_hif_usb_reg_in_cb
, skb
);
768 if (usb_submit_urb(hif_dev
->reg_in_urb
, GFP_KERNEL
) != 0)
774 ath9k_hif_usb_dealloc_reg_in_urb(hif_dev
);
778 static int ath9k_hif_usb_alloc_urbs(struct hif_device_usb
*hif_dev
)
781 init_usb_anchor(&hif_dev
->regout_submitted
);
784 if (ath9k_hif_usb_alloc_tx_urbs(hif_dev
) < 0)
788 if (ath9k_hif_usb_alloc_rx_urbs(hif_dev
) < 0)
792 if (ath9k_hif_usb_alloc_reg_in_urb(hif_dev
) < 0)
797 ath9k_hif_usb_dealloc_rx_urbs(hif_dev
);
799 ath9k_hif_usb_dealloc_tx_urbs(hif_dev
);
804 static void ath9k_hif_usb_dealloc_urbs(struct hif_device_usb
*hif_dev
)
806 usb_kill_anchored_urbs(&hif_dev
->regout_submitted
);
807 ath9k_hif_usb_dealloc_reg_in_urb(hif_dev
);
808 ath9k_hif_usb_dealloc_tx_urbs(hif_dev
);
809 ath9k_hif_usb_dealloc_rx_urbs(hif_dev
);
812 static int ath9k_hif_usb_download_fw(struct hif_device_usb
*hif_dev
)
815 const void *data
= hif_dev
->firmware
->data
;
816 size_t len
= hif_dev
->firmware
->size
;
817 u32 addr
= AR9271_FIRMWARE
;
818 u8
*buf
= kzalloc(4096, GFP_KERNEL
);
825 transfer
= min_t(int, len
, 4096);
826 memcpy(buf
, data
, transfer
);
828 err
= usb_control_msg(hif_dev
->udev
,
829 usb_sndctrlpipe(hif_dev
->udev
, 0),
830 FIRMWARE_DOWNLOAD
, 0x40 | USB_DIR_OUT
,
831 addr
>> 8, 0, buf
, transfer
, HZ
);
843 switch (hif_dev
->device_id
) {
849 firm_offset
= AR7010_FIRMWARE_TEXT
;
852 firm_offset
= AR9271_FIRMWARE_TEXT
;
857 * Issue FW download complete command to firmware.
859 err
= usb_control_msg(hif_dev
->udev
, usb_sndctrlpipe(hif_dev
->udev
, 0),
860 FIRMWARE_DOWNLOAD_COMP
,
862 firm_offset
>> 8, 0, NULL
, 0, HZ
);
866 dev_info(&hif_dev
->udev
->dev
, "ath9k_htc: Transferred FW: %s, size: %ld\n",
867 hif_dev
->fw_name
, (unsigned long) hif_dev
->firmware
->size
);
872 static int ath9k_hif_usb_dev_init(struct hif_device_usb
*hif_dev
)
875 struct usb_host_interface
*alt
= &hif_dev
->interface
->altsetting
[0];
876 struct usb_endpoint_descriptor
*endp
;
878 /* Request firmware */
879 ret
= request_firmware(&hif_dev
->firmware
, hif_dev
->fw_name
,
880 &hif_dev
->udev
->dev
);
882 dev_err(&hif_dev
->udev
->dev
,
883 "ath9k_htc: Firmware - %s not found\n", hif_dev
->fw_name
);
887 /* Download firmware */
888 ret
= ath9k_hif_usb_download_fw(hif_dev
);
890 dev_err(&hif_dev
->udev
->dev
,
891 "ath9k_htc: Firmware - %s download failed\n",
893 goto err_fw_download
;
896 /* On downloading the firmware to the target, the USB descriptor of EP4
897 * is 'patched' to change the type of the endpoint to Bulk. This will
898 * bring down CPU usage during the scan period.
900 for (idx
= 0; idx
< alt
->desc
.bNumEndpoints
; idx
++) {
901 endp
= &alt
->endpoint
[idx
].desc
;
902 if ((endp
->bmAttributes
& USB_ENDPOINT_XFERTYPE_MASK
)
903 == USB_ENDPOINT_XFER_INT
) {
904 endp
->bmAttributes
&= ~USB_ENDPOINT_XFERTYPE_MASK
;
905 endp
->bmAttributes
|= USB_ENDPOINT_XFER_BULK
;
911 ret
= ath9k_hif_usb_alloc_urbs(hif_dev
);
913 dev_err(&hif_dev
->udev
->dev
,
914 "ath9k_htc: Unable to allocate URBs\n");
921 ath9k_hif_usb_dealloc_urbs(hif_dev
);
923 release_firmware(hif_dev
->firmware
);
925 hif_dev
->firmware
= NULL
;
929 static void ath9k_hif_usb_dev_deinit(struct hif_device_usb
*hif_dev
)
931 ath9k_hif_usb_dealloc_urbs(hif_dev
);
932 if (hif_dev
->firmware
)
933 release_firmware(hif_dev
->firmware
);
936 static int ath9k_hif_usb_probe(struct usb_interface
*interface
,
937 const struct usb_device_id
*id
)
939 struct usb_device
*udev
= interface_to_usbdev(interface
);
940 struct hif_device_usb
*hif_dev
;
943 hif_dev
= kzalloc(sizeof(struct hif_device_usb
), GFP_KERNEL
);
950 hif_dev
->udev
= udev
;
951 hif_dev
->interface
= interface
;
952 hif_dev
->device_id
= id
->idProduct
;
954 udev
->reset_resume
= 1;
956 usb_set_intfdata(interface
, hif_dev
);
958 hif_dev
->htc_handle
= ath9k_htc_hw_alloc(hif_dev
, &hif_usb
,
959 &hif_dev
->udev
->dev
);
960 if (hif_dev
->htc_handle
== NULL
) {
962 goto err_htc_hw_alloc
;
965 /* Find out which firmware to load */
967 switch(hif_dev
->device_id
) {
973 if (le16_to_cpu(udev
->descriptor
.bcdDevice
) == 0x0202)
974 hif_dev
->fw_name
= FIRMWARE_AR7010_1_1
;
976 hif_dev
->fw_name
= FIRMWARE_AR7010
;
979 hif_dev
->fw_name
= FIRMWARE_AR9271
;
983 ret
= ath9k_hif_usb_dev_init(hif_dev
);
986 goto err_hif_init_usb
;
989 ret
= ath9k_htc_hw_init(hif_dev
->htc_handle
,
990 &hif_dev
->udev
->dev
, hif_dev
->device_id
,
991 hif_dev
->udev
->product
);
994 goto err_htc_hw_init
;
997 dev_info(&hif_dev
->udev
->dev
, "ath9k_htc: USB layer initialized\n");
1002 ath9k_hif_usb_dev_deinit(hif_dev
);
1004 ath9k_htc_hw_free(hif_dev
->htc_handle
);
1006 usb_set_intfdata(interface
, NULL
);
1013 static void ath9k_hif_usb_reboot(struct usb_device
*udev
)
1015 u32 reboot_cmd
= 0xffffffff;
1019 buf
= kmemdup(&reboot_cmd
, 4, GFP_KERNEL
);
1023 ret
= usb_bulk_msg(udev
, usb_sndbulkpipe(udev
, USB_REG_OUT_PIPE
),
1026 dev_err(&udev
->dev
, "ath9k_htc: USB reboot failed\n");
1031 static void ath9k_hif_usb_disconnect(struct usb_interface
*interface
)
1033 struct usb_device
*udev
= interface_to_usbdev(interface
);
1034 struct hif_device_usb
*hif_dev
=
1035 (struct hif_device_usb
*) usb_get_intfdata(interface
);
1038 ath9k_htc_hw_deinit(hif_dev
->htc_handle
,
1039 (udev
->state
== USB_STATE_NOTATTACHED
) ? true : false);
1040 ath9k_htc_hw_free(hif_dev
->htc_handle
);
1041 ath9k_hif_usb_dev_deinit(hif_dev
);
1042 usb_set_intfdata(interface
, NULL
);
1045 if (hif_dev
->flags
& HIF_USB_START
)
1046 ath9k_hif_usb_reboot(udev
);
1049 dev_info(&udev
->dev
, "ath9k_htc: USB layer deinitialized\n");
1054 static int ath9k_hif_usb_suspend(struct usb_interface
*interface
,
1055 pm_message_t message
)
1057 struct hif_device_usb
*hif_dev
=
1058 (struct hif_device_usb
*) usb_get_intfdata(interface
);
1061 * The device has to be set to FULLSLEEP mode in case no
1064 if (!(hif_dev
->flags
& HIF_USB_START
))
1065 ath9k_htc_suspend(hif_dev
->htc_handle
);
1067 ath9k_hif_usb_dealloc_urbs(hif_dev
);
1072 static int ath9k_hif_usb_resume(struct usb_interface
*interface
)
1074 struct hif_device_usb
*hif_dev
=
1075 (struct hif_device_usb
*) usb_get_intfdata(interface
);
1078 ret
= ath9k_hif_usb_alloc_urbs(hif_dev
);
1082 if (hif_dev
->firmware
) {
1083 ret
= ath9k_hif_usb_download_fw(hif_dev
);
1087 ath9k_hif_usb_dealloc_urbs(hif_dev
);
1093 ret
= ath9k_htc_resume(hif_dev
->htc_handle
);
1101 ath9k_hif_usb_dealloc_urbs(hif_dev
);
1107 static struct usb_driver ath9k_hif_usb_driver
= {
1108 .name
= "ath9k_hif_usb",
1109 .probe
= ath9k_hif_usb_probe
,
1110 .disconnect
= ath9k_hif_usb_disconnect
,
1112 .suspend
= ath9k_hif_usb_suspend
,
1113 .resume
= ath9k_hif_usb_resume
,
1114 .reset_resume
= ath9k_hif_usb_resume
,
1116 .id_table
= ath9k_hif_usb_ids
,
1120 int ath9k_hif_usb_init(void)
1122 return usb_register(&ath9k_hif_usb_driver
);
1125 void ath9k_hif_usb_exit(void)
1127 usb_deregister(&ath9k_hif_usb_driver
);