ath9k_hw: add AR9271 srev and device ID to allow hw to support ar9271
[linux-2.6/linux-acpi-2.6/ibm-acpi-2.6.git] / drivers / net / wireless / ath / ath9k / hw.c
blob692fd1dd909e5c7eed0b6726089cc72fc1ddad2a
1 /*
2 * Copyright (c) 2008-2009 Atheros Communications Inc.
4 * Permission to use, copy, modify, and/or distribute this software for any
5 * purpose with or without fee is hereby granted, provided that the above
6 * copyright notice and this permission notice appear in all copies.
8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
17 #include <linux/io.h>
18 #include <asm/unaligned.h>
20 #include "hw.h"
21 #include "rc.h"
22 #include "initvals.h"
24 #define ATH9K_CLOCK_RATE_CCK 22
25 #define ATH9K_CLOCK_RATE_5GHZ_OFDM 40
26 #define ATH9K_CLOCK_RATE_2GHZ_OFDM 44
28 static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type);
29 static void ath9k_hw_set_regs(struct ath_hw *ah, struct ath9k_channel *chan);
30 static u32 ath9k_hw_ini_fixup(struct ath_hw *ah,
31 struct ar5416_eeprom_def *pEepData,
32 u32 reg, u32 value);
33 static void ath9k_hw_9280_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan);
34 static void ath9k_hw_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan);
36 MODULE_AUTHOR("Atheros Communications");
37 MODULE_DESCRIPTION("Support for Atheros 802.11n wireless LAN cards.");
38 MODULE_SUPPORTED_DEVICE("Atheros 802.11n WLAN cards");
39 MODULE_LICENSE("Dual BSD/GPL");
41 static int __init ath9k_init(void)
43 return 0;
45 module_init(ath9k_init);
47 static void __exit ath9k_exit(void)
49 return;
51 module_exit(ath9k_exit);
53 /********************/
54 /* Helper Functions */
55 /********************/
57 static u32 ath9k_hw_mac_usec(struct ath_hw *ah, u32 clks)
59 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
61 if (!ah->curchan) /* should really check for CCK instead */
62 return clks / ATH9K_CLOCK_RATE_CCK;
63 if (conf->channel->band == IEEE80211_BAND_2GHZ)
64 return clks / ATH9K_CLOCK_RATE_2GHZ_OFDM;
66 return clks / ATH9K_CLOCK_RATE_5GHZ_OFDM;
69 static u32 ath9k_hw_mac_to_usec(struct ath_hw *ah, u32 clks)
71 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
73 if (conf_is_ht40(conf))
74 return ath9k_hw_mac_usec(ah, clks) / 2;
75 else
76 return ath9k_hw_mac_usec(ah, clks);
79 static u32 ath9k_hw_mac_clks(struct ath_hw *ah, u32 usecs)
81 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
83 if (!ah->curchan) /* should really check for CCK instead */
84 return usecs *ATH9K_CLOCK_RATE_CCK;
85 if (conf->channel->band == IEEE80211_BAND_2GHZ)
86 return usecs *ATH9K_CLOCK_RATE_2GHZ_OFDM;
87 return usecs *ATH9K_CLOCK_RATE_5GHZ_OFDM;
90 static u32 ath9k_hw_mac_to_clks(struct ath_hw *ah, u32 usecs)
92 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
94 if (conf_is_ht40(conf))
95 return ath9k_hw_mac_clks(ah, usecs) * 2;
96 else
97 return ath9k_hw_mac_clks(ah, usecs);
100 bool ath9k_hw_wait(struct ath_hw *ah, u32 reg, u32 mask, u32 val, u32 timeout)
102 int i;
104 BUG_ON(timeout < AH_TIME_QUANTUM);
106 for (i = 0; i < (timeout / AH_TIME_QUANTUM); i++) {
107 if ((REG_READ(ah, reg) & mask) == val)
108 return true;
110 udelay(AH_TIME_QUANTUM);
113 ath_print(ath9k_hw_common(ah), ATH_DBG_ANY,
114 "timeout (%d us) on reg 0x%x: 0x%08x & 0x%08x != 0x%08x\n",
115 timeout, reg, REG_READ(ah, reg), mask, val);
117 return false;
119 EXPORT_SYMBOL(ath9k_hw_wait);
121 u32 ath9k_hw_reverse_bits(u32 val, u32 n)
123 u32 retval;
124 int i;
126 for (i = 0, retval = 0; i < n; i++) {
127 retval = (retval << 1) | (val & 1);
128 val >>= 1;
130 return retval;
133 bool ath9k_get_channel_edges(struct ath_hw *ah,
134 u16 flags, u16 *low,
135 u16 *high)
137 struct ath9k_hw_capabilities *pCap = &ah->caps;
139 if (flags & CHANNEL_5GHZ) {
140 *low = pCap->low_5ghz_chan;
141 *high = pCap->high_5ghz_chan;
142 return true;
144 if ((flags & CHANNEL_2GHZ)) {
145 *low = pCap->low_2ghz_chan;
146 *high = pCap->high_2ghz_chan;
147 return true;
149 return false;
152 u16 ath9k_hw_computetxtime(struct ath_hw *ah,
153 const struct ath_rate_table *rates,
154 u32 frameLen, u16 rateix,
155 bool shortPreamble)
157 u32 bitsPerSymbol, numBits, numSymbols, phyTime, txTime;
158 u32 kbps;
160 kbps = rates->info[rateix].ratekbps;
162 if (kbps == 0)
163 return 0;
165 switch (rates->info[rateix].phy) {
166 case WLAN_RC_PHY_CCK:
167 phyTime = CCK_PREAMBLE_BITS + CCK_PLCP_BITS;
168 if (shortPreamble && rates->info[rateix].short_preamble)
169 phyTime >>= 1;
170 numBits = frameLen << 3;
171 txTime = CCK_SIFS_TIME + phyTime + ((numBits * 1000) / kbps);
172 break;
173 case WLAN_RC_PHY_OFDM:
174 if (ah->curchan && IS_CHAN_QUARTER_RATE(ah->curchan)) {
175 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME_QUARTER) / 1000;
176 numBits = OFDM_PLCP_BITS + (frameLen << 3);
177 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
178 txTime = OFDM_SIFS_TIME_QUARTER
179 + OFDM_PREAMBLE_TIME_QUARTER
180 + (numSymbols * OFDM_SYMBOL_TIME_QUARTER);
181 } else if (ah->curchan &&
182 IS_CHAN_HALF_RATE(ah->curchan)) {
183 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME_HALF) / 1000;
184 numBits = OFDM_PLCP_BITS + (frameLen << 3);
185 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
186 txTime = OFDM_SIFS_TIME_HALF +
187 OFDM_PREAMBLE_TIME_HALF
188 + (numSymbols * OFDM_SYMBOL_TIME_HALF);
189 } else {
190 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME) / 1000;
191 numBits = OFDM_PLCP_BITS + (frameLen << 3);
192 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
193 txTime = OFDM_SIFS_TIME + OFDM_PREAMBLE_TIME
194 + (numSymbols * OFDM_SYMBOL_TIME);
196 break;
197 default:
198 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
199 "Unknown phy %u (rate ix %u)\n",
200 rates->info[rateix].phy, rateix);
201 txTime = 0;
202 break;
205 return txTime;
207 EXPORT_SYMBOL(ath9k_hw_computetxtime);
209 void ath9k_hw_get_channel_centers(struct ath_hw *ah,
210 struct ath9k_channel *chan,
211 struct chan_centers *centers)
213 int8_t extoff;
215 if (!IS_CHAN_HT40(chan)) {
216 centers->ctl_center = centers->ext_center =
217 centers->synth_center = chan->channel;
218 return;
221 if ((chan->chanmode == CHANNEL_A_HT40PLUS) ||
222 (chan->chanmode == CHANNEL_G_HT40PLUS)) {
223 centers->synth_center =
224 chan->channel + HT40_CHANNEL_CENTER_SHIFT;
225 extoff = 1;
226 } else {
227 centers->synth_center =
228 chan->channel - HT40_CHANNEL_CENTER_SHIFT;
229 extoff = -1;
232 centers->ctl_center =
233 centers->synth_center - (extoff * HT40_CHANNEL_CENTER_SHIFT);
234 /* 25 MHz spacing is supported by hw but not on upper layers */
235 centers->ext_center =
236 centers->synth_center + (extoff * HT40_CHANNEL_CENTER_SHIFT);
239 /******************/
240 /* Chip Revisions */
241 /******************/
243 static void ath9k_hw_read_revisions(struct ath_hw *ah)
245 u32 val;
247 val = REG_READ(ah, AR_SREV) & AR_SREV_ID;
249 if (val == 0xFF) {
250 val = REG_READ(ah, AR_SREV);
251 ah->hw_version.macVersion =
252 (val & AR_SREV_VERSION2) >> AR_SREV_TYPE2_S;
253 ah->hw_version.macRev = MS(val, AR_SREV_REVISION2);
254 ah->is_pciexpress = (val & AR_SREV_TYPE2_HOST_MODE) ? 0 : 1;
255 } else {
256 if (!AR_SREV_9100(ah))
257 ah->hw_version.macVersion = MS(val, AR_SREV_VERSION);
259 ah->hw_version.macRev = val & AR_SREV_REVISION;
261 if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCIE)
262 ah->is_pciexpress = true;
266 static int ath9k_hw_get_radiorev(struct ath_hw *ah)
268 u32 val;
269 int i;
271 REG_WRITE(ah, AR_PHY(0x36), 0x00007058);
273 for (i = 0; i < 8; i++)
274 REG_WRITE(ah, AR_PHY(0x20), 0x00010000);
275 val = (REG_READ(ah, AR_PHY(256)) >> 24) & 0xff;
276 val = ((val & 0xf0) >> 4) | ((val & 0x0f) << 4);
278 return ath9k_hw_reverse_bits(val, 8);
281 /************************************/
282 /* HW Attach, Detach, Init Routines */
283 /************************************/
285 static void ath9k_hw_disablepcie(struct ath_hw *ah)
287 if (AR_SREV_9100(ah))
288 return;
290 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00);
291 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
292 REG_WRITE(ah, AR_PCIE_SERDES, 0x28000029);
293 REG_WRITE(ah, AR_PCIE_SERDES, 0x57160824);
294 REG_WRITE(ah, AR_PCIE_SERDES, 0x25980579);
295 REG_WRITE(ah, AR_PCIE_SERDES, 0x00000000);
296 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
297 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
298 REG_WRITE(ah, AR_PCIE_SERDES, 0x000e1007);
300 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
303 static bool ath9k_hw_chip_test(struct ath_hw *ah)
305 struct ath_common *common = ath9k_hw_common(ah);
306 u32 regAddr[2] = { AR_STA_ID0, AR_PHY_BASE + (8 << 2) };
307 u32 regHold[2];
308 u32 patternData[4] = { 0x55555555,
309 0xaaaaaaaa,
310 0x66666666,
311 0x99999999 };
312 int i, j;
314 for (i = 0; i < 2; i++) {
315 u32 addr = regAddr[i];
316 u32 wrData, rdData;
318 regHold[i] = REG_READ(ah, addr);
319 for (j = 0; j < 0x100; j++) {
320 wrData = (j << 16) | j;
321 REG_WRITE(ah, addr, wrData);
322 rdData = REG_READ(ah, addr);
323 if (rdData != wrData) {
324 ath_print(common, ATH_DBG_FATAL,
325 "address test failed "
326 "addr: 0x%08x - wr:0x%08x != "
327 "rd:0x%08x\n",
328 addr, wrData, rdData);
329 return false;
332 for (j = 0; j < 4; j++) {
333 wrData = patternData[j];
334 REG_WRITE(ah, addr, wrData);
335 rdData = REG_READ(ah, addr);
336 if (wrData != rdData) {
337 ath_print(common, ATH_DBG_FATAL,
338 "address test failed "
339 "addr: 0x%08x - wr:0x%08x != "
340 "rd:0x%08x\n",
341 addr, wrData, rdData);
342 return false;
345 REG_WRITE(ah, regAddr[i], regHold[i]);
347 udelay(100);
349 return true;
352 static const char *ath9k_hw_devname(u16 devid)
354 switch (devid) {
355 case AR5416_DEVID_PCI:
356 return "Atheros 5416";
357 case AR5416_DEVID_PCIE:
358 return "Atheros 5418";
359 case AR9160_DEVID_PCI:
360 return "Atheros 9160";
361 case AR5416_AR9100_DEVID:
362 return "Atheros 9100";
363 case AR9280_DEVID_PCI:
364 case AR9280_DEVID_PCIE:
365 return "Atheros 9280";
366 case AR9285_DEVID_PCIE:
367 return "Atheros 9285";
368 case AR5416_DEVID_AR9287_PCI:
369 case AR5416_DEVID_AR9287_PCIE:
370 return "Atheros 9287";
373 return NULL;
376 static void ath9k_hw_init_config(struct ath_hw *ah)
378 int i;
380 ah->config.dma_beacon_response_time = 2;
381 ah->config.sw_beacon_response_time = 10;
382 ah->config.additional_swba_backoff = 0;
383 ah->config.ack_6mb = 0x0;
384 ah->config.cwm_ignore_extcca = 0;
385 ah->config.pcie_powersave_enable = 0;
386 ah->config.pcie_clock_req = 0;
387 ah->config.pcie_waen = 0;
388 ah->config.analog_shiftreg = 1;
389 ah->config.ht_enable = 1;
390 ah->config.ofdm_trig_low = 200;
391 ah->config.ofdm_trig_high = 500;
392 ah->config.cck_trig_high = 200;
393 ah->config.cck_trig_low = 100;
394 ah->config.enable_ani = 1;
395 ah->config.diversity_control = ATH9K_ANT_VARIABLE;
396 ah->config.antenna_switch_swap = 0;
398 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
399 ah->config.spurchans[i][0] = AR_NO_SPUR;
400 ah->config.spurchans[i][1] = AR_NO_SPUR;
403 ah->config.intr_mitigation = true;
406 * We need this for PCI devices only (Cardbus, PCI, miniPCI)
407 * _and_ if on non-uniprocessor systems (Multiprocessor/HT).
408 * This means we use it for all AR5416 devices, and the few
409 * minor PCI AR9280 devices out there.
411 * Serialization is required because these devices do not handle
412 * well the case of two concurrent reads/writes due to the latency
413 * involved. During one read/write another read/write can be issued
414 * on another CPU while the previous read/write may still be working
415 * on our hardware, if we hit this case the hardware poops in a loop.
416 * We prevent this by serializing reads and writes.
418 * This issue is not present on PCI-Express devices or pre-AR5416
419 * devices (legacy, 802.11abg).
421 if (num_possible_cpus() > 1)
422 ah->config.serialize_regmode = SER_REG_MODE_AUTO;
424 EXPORT_SYMBOL(ath9k_hw_init);
426 static void ath9k_hw_init_defaults(struct ath_hw *ah)
428 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
430 regulatory->country_code = CTRY_DEFAULT;
431 regulatory->power_limit = MAX_RATE_POWER;
432 regulatory->tp_scale = ATH9K_TP_SCALE_MAX;
434 ah->hw_version.magic = AR5416_MAGIC;
435 ah->hw_version.subvendorid = 0;
437 ah->ah_flags = 0;
438 if (ah->hw_version.devid == AR5416_AR9100_DEVID)
439 ah->hw_version.macVersion = AR_SREV_VERSION_9100;
440 if (!AR_SREV_9100(ah))
441 ah->ah_flags = AH_USE_EEPROM;
443 ah->atim_window = 0;
444 ah->sta_id1_defaults = AR_STA_ID1_CRPT_MIC_ENABLE;
445 ah->beacon_interval = 100;
446 ah->enable_32kHz_clock = DONT_USE_32KHZ;
447 ah->slottime = (u32) -1;
448 ah->acktimeout = (u32) -1;
449 ah->ctstimeout = (u32) -1;
450 ah->globaltxtimeout = (u32) -1;
452 ah->gbeacon_rate = 0;
454 ah->power_mode = ATH9K_PM_UNDEFINED;
457 static int ath9k_hw_rfattach(struct ath_hw *ah)
459 bool rfStatus = false;
460 int ecode = 0;
462 rfStatus = ath9k_hw_init_rf(ah, &ecode);
463 if (!rfStatus) {
464 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
465 "RF setup failed, status: %u\n", ecode);
466 return ecode;
469 return 0;
472 static int ath9k_hw_rf_claim(struct ath_hw *ah)
474 u32 val;
476 REG_WRITE(ah, AR_PHY(0), 0x00000007);
478 val = ath9k_hw_get_radiorev(ah);
479 switch (val & AR_RADIO_SREV_MAJOR) {
480 case 0:
481 val = AR_RAD5133_SREV_MAJOR;
482 break;
483 case AR_RAD5133_SREV_MAJOR:
484 case AR_RAD5122_SREV_MAJOR:
485 case AR_RAD2133_SREV_MAJOR:
486 case AR_RAD2122_SREV_MAJOR:
487 break;
488 default:
489 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
490 "Radio Chip Rev 0x%02X not supported\n",
491 val & AR_RADIO_SREV_MAJOR);
492 return -EOPNOTSUPP;
495 ah->hw_version.analog5GhzRev = val;
497 return 0;
500 static int ath9k_hw_init_macaddr(struct ath_hw *ah)
502 struct ath_common *common = ath9k_hw_common(ah);
503 u32 sum;
504 int i;
505 u16 eeval;
507 sum = 0;
508 for (i = 0; i < 3; i++) {
509 eeval = ah->eep_ops->get_eeprom(ah, AR_EEPROM_MAC(i));
510 sum += eeval;
511 common->macaddr[2 * i] = eeval >> 8;
512 common->macaddr[2 * i + 1] = eeval & 0xff;
514 if (sum == 0 || sum == 0xffff * 3)
515 return -EADDRNOTAVAIL;
517 return 0;
520 static void ath9k_hw_init_rxgain_ini(struct ath_hw *ah)
522 u32 rxgain_type;
524 if (ah->eep_ops->get_eeprom(ah, EEP_MINOR_REV) >= AR5416_EEP_MINOR_VER_17) {
525 rxgain_type = ah->eep_ops->get_eeprom(ah, EEP_RXGAIN_TYPE);
527 if (rxgain_type == AR5416_EEP_RXGAIN_13DB_BACKOFF)
528 INIT_INI_ARRAY(&ah->iniModesRxGain,
529 ar9280Modes_backoff_13db_rxgain_9280_2,
530 ARRAY_SIZE(ar9280Modes_backoff_13db_rxgain_9280_2), 6);
531 else if (rxgain_type == AR5416_EEP_RXGAIN_23DB_BACKOFF)
532 INIT_INI_ARRAY(&ah->iniModesRxGain,
533 ar9280Modes_backoff_23db_rxgain_9280_2,
534 ARRAY_SIZE(ar9280Modes_backoff_23db_rxgain_9280_2), 6);
535 else
536 INIT_INI_ARRAY(&ah->iniModesRxGain,
537 ar9280Modes_original_rxgain_9280_2,
538 ARRAY_SIZE(ar9280Modes_original_rxgain_9280_2), 6);
539 } else {
540 INIT_INI_ARRAY(&ah->iniModesRxGain,
541 ar9280Modes_original_rxgain_9280_2,
542 ARRAY_SIZE(ar9280Modes_original_rxgain_9280_2), 6);
546 static void ath9k_hw_init_txgain_ini(struct ath_hw *ah)
548 u32 txgain_type;
550 if (ah->eep_ops->get_eeprom(ah, EEP_MINOR_REV) >= AR5416_EEP_MINOR_VER_19) {
551 txgain_type = ah->eep_ops->get_eeprom(ah, EEP_TXGAIN_TYPE);
553 if (txgain_type == AR5416_EEP_TXGAIN_HIGH_POWER)
554 INIT_INI_ARRAY(&ah->iniModesTxGain,
555 ar9280Modes_high_power_tx_gain_9280_2,
556 ARRAY_SIZE(ar9280Modes_high_power_tx_gain_9280_2), 6);
557 else
558 INIT_INI_ARRAY(&ah->iniModesTxGain,
559 ar9280Modes_original_tx_gain_9280_2,
560 ARRAY_SIZE(ar9280Modes_original_tx_gain_9280_2), 6);
561 } else {
562 INIT_INI_ARRAY(&ah->iniModesTxGain,
563 ar9280Modes_original_tx_gain_9280_2,
564 ARRAY_SIZE(ar9280Modes_original_tx_gain_9280_2), 6);
568 static int ath9k_hw_post_init(struct ath_hw *ah)
570 int ecode;
572 if (!ath9k_hw_chip_test(ah))
573 return -ENODEV;
575 ecode = ath9k_hw_rf_claim(ah);
576 if (ecode != 0)
577 return ecode;
579 ecode = ath9k_hw_eeprom_init(ah);
580 if (ecode != 0)
581 return ecode;
583 ath_print(ath9k_hw_common(ah), ATH_DBG_CONFIG,
584 "Eeprom VER: %d, REV: %d\n",
585 ah->eep_ops->get_eeprom_ver(ah),
586 ah->eep_ops->get_eeprom_rev(ah));
588 ecode = ath9k_hw_rfattach(ah);
589 if (ecode != 0)
590 return ecode;
592 if (!AR_SREV_9100(ah)) {
593 ath9k_hw_ani_setup(ah);
594 ath9k_hw_ani_init(ah);
597 return 0;
600 static bool ath9k_hw_devid_supported(u16 devid)
602 switch (devid) {
603 case AR5416_DEVID_PCI:
604 case AR5416_DEVID_PCIE:
605 case AR5416_AR9100_DEVID:
606 case AR9160_DEVID_PCI:
607 case AR9280_DEVID_PCI:
608 case AR9280_DEVID_PCIE:
609 case AR9285_DEVID_PCIE:
610 case AR5416_DEVID_AR9287_PCI:
611 case AR5416_DEVID_AR9287_PCIE:
612 case AR9271_USB:
613 return true;
614 default:
615 break;
617 return false;
620 static bool ath9k_hw_macversion_supported(u32 macversion)
622 switch (macversion) {
623 case AR_SREV_VERSION_5416_PCI:
624 case AR_SREV_VERSION_5416_PCIE:
625 case AR_SREV_VERSION_9160:
626 case AR_SREV_VERSION_9100:
627 case AR_SREV_VERSION_9280:
628 case AR_SREV_VERSION_9285:
629 case AR_SREV_VERSION_9287:
630 case AR_SREV_VERSION_9271:
631 return true;
632 default:
633 break;
635 return false;
638 static void ath9k_hw_init_cal_settings(struct ath_hw *ah)
640 if (AR_SREV_9160_10_OR_LATER(ah)) {
641 if (AR_SREV_9280_10_OR_LATER(ah)) {
642 ah->iq_caldata.calData = &iq_cal_single_sample;
643 ah->adcgain_caldata.calData =
644 &adc_gain_cal_single_sample;
645 ah->adcdc_caldata.calData =
646 &adc_dc_cal_single_sample;
647 ah->adcdc_calinitdata.calData =
648 &adc_init_dc_cal;
649 } else {
650 ah->iq_caldata.calData = &iq_cal_multi_sample;
651 ah->adcgain_caldata.calData =
652 &adc_gain_cal_multi_sample;
653 ah->adcdc_caldata.calData =
654 &adc_dc_cal_multi_sample;
655 ah->adcdc_calinitdata.calData =
656 &adc_init_dc_cal;
658 ah->supp_cals = ADC_GAIN_CAL | ADC_DC_CAL | IQ_MISMATCH_CAL;
662 static void ath9k_hw_init_mode_regs(struct ath_hw *ah)
664 if (AR_SREV_9271(ah)) {
665 INIT_INI_ARRAY(&ah->iniModes, ar9271Modes_9271_1_0,
666 ARRAY_SIZE(ar9271Modes_9271_1_0), 6);
667 INIT_INI_ARRAY(&ah->iniCommon, ar9271Common_9271_1_0,
668 ARRAY_SIZE(ar9271Common_9271_1_0), 2);
669 return;
672 if (AR_SREV_9287_11_OR_LATER(ah)) {
673 INIT_INI_ARRAY(&ah->iniModes, ar9287Modes_9287_1_1,
674 ARRAY_SIZE(ar9287Modes_9287_1_1), 6);
675 INIT_INI_ARRAY(&ah->iniCommon, ar9287Common_9287_1_1,
676 ARRAY_SIZE(ar9287Common_9287_1_1), 2);
677 if (ah->config.pcie_clock_req)
678 INIT_INI_ARRAY(&ah->iniPcieSerdes,
679 ar9287PciePhy_clkreq_off_L1_9287_1_1,
680 ARRAY_SIZE(ar9287PciePhy_clkreq_off_L1_9287_1_1), 2);
681 else
682 INIT_INI_ARRAY(&ah->iniPcieSerdes,
683 ar9287PciePhy_clkreq_always_on_L1_9287_1_1,
684 ARRAY_SIZE(ar9287PciePhy_clkreq_always_on_L1_9287_1_1),
686 } else if (AR_SREV_9287_10_OR_LATER(ah)) {
687 INIT_INI_ARRAY(&ah->iniModes, ar9287Modes_9287_1_0,
688 ARRAY_SIZE(ar9287Modes_9287_1_0), 6);
689 INIT_INI_ARRAY(&ah->iniCommon, ar9287Common_9287_1_0,
690 ARRAY_SIZE(ar9287Common_9287_1_0), 2);
692 if (ah->config.pcie_clock_req)
693 INIT_INI_ARRAY(&ah->iniPcieSerdes,
694 ar9287PciePhy_clkreq_off_L1_9287_1_0,
695 ARRAY_SIZE(ar9287PciePhy_clkreq_off_L1_9287_1_0), 2);
696 else
697 INIT_INI_ARRAY(&ah->iniPcieSerdes,
698 ar9287PciePhy_clkreq_always_on_L1_9287_1_0,
699 ARRAY_SIZE(ar9287PciePhy_clkreq_always_on_L1_9287_1_0),
701 } else if (AR_SREV_9285_12_OR_LATER(ah)) {
704 INIT_INI_ARRAY(&ah->iniModes, ar9285Modes_9285_1_2,
705 ARRAY_SIZE(ar9285Modes_9285_1_2), 6);
706 INIT_INI_ARRAY(&ah->iniCommon, ar9285Common_9285_1_2,
707 ARRAY_SIZE(ar9285Common_9285_1_2), 2);
709 if (ah->config.pcie_clock_req) {
710 INIT_INI_ARRAY(&ah->iniPcieSerdes,
711 ar9285PciePhy_clkreq_off_L1_9285_1_2,
712 ARRAY_SIZE(ar9285PciePhy_clkreq_off_L1_9285_1_2), 2);
713 } else {
714 INIT_INI_ARRAY(&ah->iniPcieSerdes,
715 ar9285PciePhy_clkreq_always_on_L1_9285_1_2,
716 ARRAY_SIZE(ar9285PciePhy_clkreq_always_on_L1_9285_1_2),
719 } else if (AR_SREV_9285_10_OR_LATER(ah)) {
720 INIT_INI_ARRAY(&ah->iniModes, ar9285Modes_9285,
721 ARRAY_SIZE(ar9285Modes_9285), 6);
722 INIT_INI_ARRAY(&ah->iniCommon, ar9285Common_9285,
723 ARRAY_SIZE(ar9285Common_9285), 2);
725 if (ah->config.pcie_clock_req) {
726 INIT_INI_ARRAY(&ah->iniPcieSerdes,
727 ar9285PciePhy_clkreq_off_L1_9285,
728 ARRAY_SIZE(ar9285PciePhy_clkreq_off_L1_9285), 2);
729 } else {
730 INIT_INI_ARRAY(&ah->iniPcieSerdes,
731 ar9285PciePhy_clkreq_always_on_L1_9285,
732 ARRAY_SIZE(ar9285PciePhy_clkreq_always_on_L1_9285), 2);
734 } else if (AR_SREV_9280_20_OR_LATER(ah)) {
735 INIT_INI_ARRAY(&ah->iniModes, ar9280Modes_9280_2,
736 ARRAY_SIZE(ar9280Modes_9280_2), 6);
737 INIT_INI_ARRAY(&ah->iniCommon, ar9280Common_9280_2,
738 ARRAY_SIZE(ar9280Common_9280_2), 2);
740 if (ah->config.pcie_clock_req) {
741 INIT_INI_ARRAY(&ah->iniPcieSerdes,
742 ar9280PciePhy_clkreq_off_L1_9280,
743 ARRAY_SIZE(ar9280PciePhy_clkreq_off_L1_9280),2);
744 } else {
745 INIT_INI_ARRAY(&ah->iniPcieSerdes,
746 ar9280PciePhy_clkreq_always_on_L1_9280,
747 ARRAY_SIZE(ar9280PciePhy_clkreq_always_on_L1_9280), 2);
749 INIT_INI_ARRAY(&ah->iniModesAdditional,
750 ar9280Modes_fast_clock_9280_2,
751 ARRAY_SIZE(ar9280Modes_fast_clock_9280_2), 3);
752 } else if (AR_SREV_9280_10_OR_LATER(ah)) {
753 INIT_INI_ARRAY(&ah->iniModes, ar9280Modes_9280,
754 ARRAY_SIZE(ar9280Modes_9280), 6);
755 INIT_INI_ARRAY(&ah->iniCommon, ar9280Common_9280,
756 ARRAY_SIZE(ar9280Common_9280), 2);
757 } else if (AR_SREV_9160_10_OR_LATER(ah)) {
758 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes_9160,
759 ARRAY_SIZE(ar5416Modes_9160), 6);
760 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common_9160,
761 ARRAY_SIZE(ar5416Common_9160), 2);
762 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0_9160,
763 ARRAY_SIZE(ar5416Bank0_9160), 2);
764 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain_9160,
765 ARRAY_SIZE(ar5416BB_RfGain_9160), 3);
766 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1_9160,
767 ARRAY_SIZE(ar5416Bank1_9160), 2);
768 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2_9160,
769 ARRAY_SIZE(ar5416Bank2_9160), 2);
770 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3_9160,
771 ARRAY_SIZE(ar5416Bank3_9160), 3);
772 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6_9160,
773 ARRAY_SIZE(ar5416Bank6_9160), 3);
774 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC_9160,
775 ARRAY_SIZE(ar5416Bank6TPC_9160), 3);
776 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7_9160,
777 ARRAY_SIZE(ar5416Bank7_9160), 2);
778 if (AR_SREV_9160_11(ah)) {
779 INIT_INI_ARRAY(&ah->iniAddac,
780 ar5416Addac_91601_1,
781 ARRAY_SIZE(ar5416Addac_91601_1), 2);
782 } else {
783 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac_9160,
784 ARRAY_SIZE(ar5416Addac_9160), 2);
786 } else if (AR_SREV_9100_OR_LATER(ah)) {
787 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes_9100,
788 ARRAY_SIZE(ar5416Modes_9100), 6);
789 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common_9100,
790 ARRAY_SIZE(ar5416Common_9100), 2);
791 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0_9100,
792 ARRAY_SIZE(ar5416Bank0_9100), 2);
793 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain_9100,
794 ARRAY_SIZE(ar5416BB_RfGain_9100), 3);
795 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1_9100,
796 ARRAY_SIZE(ar5416Bank1_9100), 2);
797 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2_9100,
798 ARRAY_SIZE(ar5416Bank2_9100), 2);
799 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3_9100,
800 ARRAY_SIZE(ar5416Bank3_9100), 3);
801 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6_9100,
802 ARRAY_SIZE(ar5416Bank6_9100), 3);
803 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC_9100,
804 ARRAY_SIZE(ar5416Bank6TPC_9100), 3);
805 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7_9100,
806 ARRAY_SIZE(ar5416Bank7_9100), 2);
807 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac_9100,
808 ARRAY_SIZE(ar5416Addac_9100), 2);
809 } else {
810 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes,
811 ARRAY_SIZE(ar5416Modes), 6);
812 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common,
813 ARRAY_SIZE(ar5416Common), 2);
814 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0,
815 ARRAY_SIZE(ar5416Bank0), 2);
816 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain,
817 ARRAY_SIZE(ar5416BB_RfGain), 3);
818 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1,
819 ARRAY_SIZE(ar5416Bank1), 2);
820 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2,
821 ARRAY_SIZE(ar5416Bank2), 2);
822 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3,
823 ARRAY_SIZE(ar5416Bank3), 3);
824 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6,
825 ARRAY_SIZE(ar5416Bank6), 3);
826 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC,
827 ARRAY_SIZE(ar5416Bank6TPC), 3);
828 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7,
829 ARRAY_SIZE(ar5416Bank7), 2);
830 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac,
831 ARRAY_SIZE(ar5416Addac), 2);
835 static void ath9k_hw_init_mode_gain_regs(struct ath_hw *ah)
837 if (AR_SREV_9287_11_OR_LATER(ah))
838 INIT_INI_ARRAY(&ah->iniModesRxGain,
839 ar9287Modes_rx_gain_9287_1_1,
840 ARRAY_SIZE(ar9287Modes_rx_gain_9287_1_1), 6);
841 else if (AR_SREV_9287_10(ah))
842 INIT_INI_ARRAY(&ah->iniModesRxGain,
843 ar9287Modes_rx_gain_9287_1_0,
844 ARRAY_SIZE(ar9287Modes_rx_gain_9287_1_0), 6);
845 else if (AR_SREV_9280_20(ah))
846 ath9k_hw_init_rxgain_ini(ah);
848 if (AR_SREV_9287_11_OR_LATER(ah)) {
849 INIT_INI_ARRAY(&ah->iniModesTxGain,
850 ar9287Modes_tx_gain_9287_1_1,
851 ARRAY_SIZE(ar9287Modes_tx_gain_9287_1_1), 6);
852 } else if (AR_SREV_9287_10(ah)) {
853 INIT_INI_ARRAY(&ah->iniModesTxGain,
854 ar9287Modes_tx_gain_9287_1_0,
855 ARRAY_SIZE(ar9287Modes_tx_gain_9287_1_0), 6);
856 } else if (AR_SREV_9280_20(ah)) {
857 ath9k_hw_init_txgain_ini(ah);
858 } else if (AR_SREV_9285_12_OR_LATER(ah)) {
859 u32 txgain_type = ah->eep_ops->get_eeprom(ah, EEP_TXGAIN_TYPE);
861 /* txgain table */
862 if (txgain_type == AR5416_EEP_TXGAIN_HIGH_POWER) {
863 INIT_INI_ARRAY(&ah->iniModesTxGain,
864 ar9285Modes_high_power_tx_gain_9285_1_2,
865 ARRAY_SIZE(ar9285Modes_high_power_tx_gain_9285_1_2), 6);
866 } else {
867 INIT_INI_ARRAY(&ah->iniModesTxGain,
868 ar9285Modes_original_tx_gain_9285_1_2,
869 ARRAY_SIZE(ar9285Modes_original_tx_gain_9285_1_2), 6);
875 static void ath9k_hw_init_11a_eeprom_fix(struct ath_hw *ah)
877 u32 i, j;
879 if ((ah->hw_version.devid == AR9280_DEVID_PCI) &&
880 test_bit(ATH9K_MODE_11A, ah->caps.wireless_modes)) {
882 /* EEPROM Fixup */
883 for (i = 0; i < ah->iniModes.ia_rows; i++) {
884 u32 reg = INI_RA(&ah->iniModes, i, 0);
886 for (j = 1; j < ah->iniModes.ia_columns; j++) {
887 u32 val = INI_RA(&ah->iniModes, i, j);
889 INI_RA(&ah->iniModes, i, j) =
890 ath9k_hw_ini_fixup(ah,
891 &ah->eeprom.def,
892 reg, val);
898 int ath9k_hw_init(struct ath_hw *ah)
900 struct ath_common *common = ath9k_hw_common(ah);
901 int r = 0;
903 if (!ath9k_hw_devid_supported(ah->hw_version.devid)) {
904 ath_print(common, ATH_DBG_FATAL,
905 "Unsupported device ID: 0x%0x\n",
906 ah->hw_version.devid);
907 return -EOPNOTSUPP;
910 ath9k_hw_init_defaults(ah);
911 ath9k_hw_init_config(ah);
913 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON)) {
914 ath_print(common, ATH_DBG_FATAL,
915 "Couldn't reset chip\n");
916 return -EIO;
919 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE)) {
920 ath_print(common, ATH_DBG_FATAL, "Couldn't wakeup chip\n");
921 return -EIO;
924 if (ah->config.serialize_regmode == SER_REG_MODE_AUTO) {
925 if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCI ||
926 (AR_SREV_9280(ah) && !ah->is_pciexpress)) {
927 ah->config.serialize_regmode =
928 SER_REG_MODE_ON;
929 } else {
930 ah->config.serialize_regmode =
931 SER_REG_MODE_OFF;
935 ath_print(common, ATH_DBG_RESET, "serialize_regmode is %d\n",
936 ah->config.serialize_regmode);
938 if (!ath9k_hw_macversion_supported(ah->hw_version.macVersion)) {
939 ath_print(common, ATH_DBG_FATAL,
940 "Mac Chip Rev 0x%02x.%x is not supported by "
941 "this driver\n", ah->hw_version.macVersion,
942 ah->hw_version.macRev);
943 return -EOPNOTSUPP;
946 if (AR_SREV_9100(ah)) {
947 ah->iq_caldata.calData = &iq_cal_multi_sample;
948 ah->supp_cals = IQ_MISMATCH_CAL;
949 ah->is_pciexpress = false;
952 if (AR_SREV_9271(ah))
953 ah->is_pciexpress = false;
955 ah->hw_version.phyRev = REG_READ(ah, AR_PHY_CHIP_ID);
957 ath9k_hw_init_cal_settings(ah);
959 ah->ani_function = ATH9K_ANI_ALL;
960 if (AR_SREV_9280_10_OR_LATER(ah))
961 ah->ani_function &= ~ATH9K_ANI_NOISE_IMMUNITY_LEVEL;
963 ath9k_hw_init_mode_regs(ah);
965 if (ah->is_pciexpress)
966 ath9k_hw_configpcipowersave(ah, 0, 0);
967 else
968 ath9k_hw_disablepcie(ah);
970 /* Support for Japan ch.14 (2484) spread */
971 if (AR_SREV_9287_11_OR_LATER(ah)) {
972 INIT_INI_ARRAY(&ah->iniCckfirNormal,
973 ar9287Common_normal_cck_fir_coeff_92871_1,
974 ARRAY_SIZE(ar9287Common_normal_cck_fir_coeff_92871_1), 2);
975 INIT_INI_ARRAY(&ah->iniCckfirJapan2484,
976 ar9287Common_japan_2484_cck_fir_coeff_92871_1,
977 ARRAY_SIZE(ar9287Common_japan_2484_cck_fir_coeff_92871_1), 2);
980 r = ath9k_hw_post_init(ah);
981 if (r)
982 return r;
984 ath9k_hw_init_mode_gain_regs(ah);
985 ath9k_hw_fill_cap_info(ah);
986 ath9k_hw_init_11a_eeprom_fix(ah);
988 r = ath9k_hw_init_macaddr(ah);
989 if (r) {
990 ath_print(common, ATH_DBG_FATAL,
991 "Failed to initialize MAC address\n");
992 return r;
995 if (AR_SREV_9285(ah) || AR_SREV_9271(ah))
996 ah->tx_trig_level = (AR_FTRIG_256B >> AR_FTRIG_S);
997 else
998 ah->tx_trig_level = (AR_FTRIG_512B >> AR_FTRIG_S);
1000 ath9k_init_nfcal_hist_buffer(ah);
1002 return 0;
1005 static void ath9k_hw_init_bb(struct ath_hw *ah,
1006 struct ath9k_channel *chan)
1008 u32 synthDelay;
1010 synthDelay = REG_READ(ah, AR_PHY_RX_DELAY) & AR_PHY_RX_DELAY_DELAY;
1011 if (IS_CHAN_B(chan))
1012 synthDelay = (4 * synthDelay) / 22;
1013 else
1014 synthDelay /= 10;
1016 REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
1018 udelay(synthDelay + BASE_ACTIVATE_DELAY);
1021 static void ath9k_hw_init_qos(struct ath_hw *ah)
1023 REG_WRITE(ah, AR_MIC_QOS_CONTROL, 0x100aa);
1024 REG_WRITE(ah, AR_MIC_QOS_SELECT, 0x3210);
1026 REG_WRITE(ah, AR_QOS_NO_ACK,
1027 SM(2, AR_QOS_NO_ACK_TWO_BIT) |
1028 SM(5, AR_QOS_NO_ACK_BIT_OFF) |
1029 SM(0, AR_QOS_NO_ACK_BYTE_OFF));
1031 REG_WRITE(ah, AR_TXOP_X, AR_TXOP_X_VAL);
1032 REG_WRITE(ah, AR_TXOP_0_3, 0xFFFFFFFF);
1033 REG_WRITE(ah, AR_TXOP_4_7, 0xFFFFFFFF);
1034 REG_WRITE(ah, AR_TXOP_8_11, 0xFFFFFFFF);
1035 REG_WRITE(ah, AR_TXOP_12_15, 0xFFFFFFFF);
1038 static void ath9k_hw_init_pll(struct ath_hw *ah,
1039 struct ath9k_channel *chan)
1041 u32 pll;
1043 if (AR_SREV_9100(ah)) {
1044 if (chan && IS_CHAN_5GHZ(chan))
1045 pll = 0x1450;
1046 else
1047 pll = 0x1458;
1048 } else {
1049 if (AR_SREV_9280_10_OR_LATER(ah)) {
1050 pll = SM(0x5, AR_RTC_9160_PLL_REFDIV);
1052 if (chan && IS_CHAN_HALF_RATE(chan))
1053 pll |= SM(0x1, AR_RTC_9160_PLL_CLKSEL);
1054 else if (chan && IS_CHAN_QUARTER_RATE(chan))
1055 pll |= SM(0x2, AR_RTC_9160_PLL_CLKSEL);
1057 if (chan && IS_CHAN_5GHZ(chan)) {
1058 pll |= SM(0x28, AR_RTC_9160_PLL_DIV);
1061 if (AR_SREV_9280_20(ah)) {
1062 if (((chan->channel % 20) == 0)
1063 || ((chan->channel % 10) == 0))
1064 pll = 0x2850;
1065 else
1066 pll = 0x142c;
1068 } else {
1069 pll |= SM(0x2c, AR_RTC_9160_PLL_DIV);
1072 } else if (AR_SREV_9160_10_OR_LATER(ah)) {
1074 pll = SM(0x5, AR_RTC_9160_PLL_REFDIV);
1076 if (chan && IS_CHAN_HALF_RATE(chan))
1077 pll |= SM(0x1, AR_RTC_9160_PLL_CLKSEL);
1078 else if (chan && IS_CHAN_QUARTER_RATE(chan))
1079 pll |= SM(0x2, AR_RTC_9160_PLL_CLKSEL);
1081 if (chan && IS_CHAN_5GHZ(chan))
1082 pll |= SM(0x50, AR_RTC_9160_PLL_DIV);
1083 else
1084 pll |= SM(0x58, AR_RTC_9160_PLL_DIV);
1085 } else {
1086 pll = AR_RTC_PLL_REFDIV_5 | AR_RTC_PLL_DIV2;
1088 if (chan && IS_CHAN_HALF_RATE(chan))
1089 pll |= SM(0x1, AR_RTC_PLL_CLKSEL);
1090 else if (chan && IS_CHAN_QUARTER_RATE(chan))
1091 pll |= SM(0x2, AR_RTC_PLL_CLKSEL);
1093 if (chan && IS_CHAN_5GHZ(chan))
1094 pll |= SM(0xa, AR_RTC_PLL_DIV);
1095 else
1096 pll |= SM(0xb, AR_RTC_PLL_DIV);
1099 REG_WRITE(ah, AR_RTC_PLL_CONTROL, pll);
1101 udelay(RTC_PLL_SETTLE_DELAY);
1103 REG_WRITE(ah, AR_RTC_SLEEP_CLK, AR_RTC_FORCE_DERIVED_CLK);
1106 static void ath9k_hw_init_chain_masks(struct ath_hw *ah)
1108 int rx_chainmask, tx_chainmask;
1110 rx_chainmask = ah->rxchainmask;
1111 tx_chainmask = ah->txchainmask;
1113 switch (rx_chainmask) {
1114 case 0x5:
1115 REG_SET_BIT(ah, AR_PHY_ANALOG_SWAP,
1116 AR_PHY_SWAP_ALT_CHAIN);
1117 case 0x3:
1118 if (((ah)->hw_version.macVersion <= AR_SREV_VERSION_9160)) {
1119 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, 0x7);
1120 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, 0x7);
1121 break;
1123 case 0x1:
1124 case 0x2:
1125 case 0x7:
1126 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, rx_chainmask);
1127 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, rx_chainmask);
1128 break;
1129 default:
1130 break;
1133 REG_WRITE(ah, AR_SELFGEN_MASK, tx_chainmask);
1134 if (tx_chainmask == 0x5) {
1135 REG_SET_BIT(ah, AR_PHY_ANALOG_SWAP,
1136 AR_PHY_SWAP_ALT_CHAIN);
1138 if (AR_SREV_9100(ah))
1139 REG_WRITE(ah, AR_PHY_ANALOG_SWAP,
1140 REG_READ(ah, AR_PHY_ANALOG_SWAP) | 0x00000001);
1143 static void ath9k_hw_init_interrupt_masks(struct ath_hw *ah,
1144 enum nl80211_iftype opmode)
1146 ah->mask_reg = AR_IMR_TXERR |
1147 AR_IMR_TXURN |
1148 AR_IMR_RXERR |
1149 AR_IMR_RXORN |
1150 AR_IMR_BCNMISC;
1152 if (ah->config.intr_mitigation)
1153 ah->mask_reg |= AR_IMR_RXINTM | AR_IMR_RXMINTR;
1154 else
1155 ah->mask_reg |= AR_IMR_RXOK;
1157 ah->mask_reg |= AR_IMR_TXOK;
1159 if (opmode == NL80211_IFTYPE_AP)
1160 ah->mask_reg |= AR_IMR_MIB;
1162 REG_WRITE(ah, AR_IMR, ah->mask_reg);
1163 REG_WRITE(ah, AR_IMR_S2, REG_READ(ah, AR_IMR_S2) | AR_IMR_S2_GTT);
1165 if (!AR_SREV_9100(ah)) {
1166 REG_WRITE(ah, AR_INTR_SYNC_CAUSE, 0xFFFFFFFF);
1167 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, AR_INTR_SYNC_DEFAULT);
1168 REG_WRITE(ah, AR_INTR_SYNC_MASK, 0);
1172 static bool ath9k_hw_set_ack_timeout(struct ath_hw *ah, u32 us)
1174 if (us > ath9k_hw_mac_to_usec(ah, MS(0xffffffff, AR_TIME_OUT_ACK))) {
1175 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET,
1176 "bad ack timeout %u\n", us);
1177 ah->acktimeout = (u32) -1;
1178 return false;
1179 } else {
1180 REG_RMW_FIELD(ah, AR_TIME_OUT,
1181 AR_TIME_OUT_ACK, ath9k_hw_mac_to_clks(ah, us));
1182 ah->acktimeout = us;
1183 return true;
1187 static bool ath9k_hw_set_cts_timeout(struct ath_hw *ah, u32 us)
1189 if (us > ath9k_hw_mac_to_usec(ah, MS(0xffffffff, AR_TIME_OUT_CTS))) {
1190 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET,
1191 "bad cts timeout %u\n", us);
1192 ah->ctstimeout = (u32) -1;
1193 return false;
1194 } else {
1195 REG_RMW_FIELD(ah, AR_TIME_OUT,
1196 AR_TIME_OUT_CTS, ath9k_hw_mac_to_clks(ah, us));
1197 ah->ctstimeout = us;
1198 return true;
1202 static bool ath9k_hw_set_global_txtimeout(struct ath_hw *ah, u32 tu)
1204 if (tu > 0xFFFF) {
1205 ath_print(ath9k_hw_common(ah), ATH_DBG_XMIT,
1206 "bad global tx timeout %u\n", tu);
1207 ah->globaltxtimeout = (u32) -1;
1208 return false;
1209 } else {
1210 REG_RMW_FIELD(ah, AR_GTXTO, AR_GTXTO_TIMEOUT_LIMIT, tu);
1211 ah->globaltxtimeout = tu;
1212 return true;
1216 static void ath9k_hw_init_user_settings(struct ath_hw *ah)
1218 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET, "ah->misc_mode 0x%x\n",
1219 ah->misc_mode);
1221 if (ah->misc_mode != 0)
1222 REG_WRITE(ah, AR_PCU_MISC,
1223 REG_READ(ah, AR_PCU_MISC) | ah->misc_mode);
1224 if (ah->slottime != (u32) -1)
1225 ath9k_hw_setslottime(ah, ah->slottime);
1226 if (ah->acktimeout != (u32) -1)
1227 ath9k_hw_set_ack_timeout(ah, ah->acktimeout);
1228 if (ah->ctstimeout != (u32) -1)
1229 ath9k_hw_set_cts_timeout(ah, ah->ctstimeout);
1230 if (ah->globaltxtimeout != (u32) -1)
1231 ath9k_hw_set_global_txtimeout(ah, ah->globaltxtimeout);
1234 const char *ath9k_hw_probe(u16 vendorid, u16 devid)
1236 return vendorid == ATHEROS_VENDOR_ID ?
1237 ath9k_hw_devname(devid) : NULL;
1240 void ath9k_hw_detach(struct ath_hw *ah)
1242 if (!AR_SREV_9100(ah))
1243 ath9k_hw_ani_disable(ah);
1245 ath9k_hw_rf_free(ah);
1246 ath9k_hw_setpower(ah, ATH9K_PM_FULL_SLEEP);
1247 kfree(ah);
1248 ah = NULL;
1250 EXPORT_SYMBOL(ath9k_hw_detach);
1252 /*******/
1253 /* INI */
1254 /*******/
1256 static void ath9k_hw_override_ini(struct ath_hw *ah,
1257 struct ath9k_channel *chan)
1259 u32 val;
1261 if (AR_SREV_9271(ah)) {
1263 * Enable spectral scan to solution for issues with stuck
1264 * beacons on AR9271 1.0. The beacon stuck issue is not seeon on
1265 * AR9271 1.1
1267 if (AR_SREV_9271_10(ah)) {
1268 val = REG_READ(ah, AR_PHY_SPECTRAL_SCAN) | AR_PHY_SPECTRAL_SCAN_ENABLE;
1269 REG_WRITE(ah, AR_PHY_SPECTRAL_SCAN, val);
1271 else if (AR_SREV_9271_11(ah))
1273 * change AR_PHY_RF_CTL3 setting to fix MAC issue
1274 * present on AR9271 1.1
1276 REG_WRITE(ah, AR_PHY_RF_CTL3, 0x3a020001);
1277 return;
1281 * Set the RX_ABORT and RX_DIS and clear if off only after
1282 * RXE is set for MAC. This prevents frames with corrupted
1283 * descriptor status.
1285 REG_SET_BIT(ah, AR_DIAG_SW, (AR_DIAG_RX_DIS | AR_DIAG_RX_ABORT));
1287 if (AR_SREV_9280_10_OR_LATER(ah)) {
1288 val = REG_READ(ah, AR_PCU_MISC_MODE2) &
1289 (~AR_PCU_MISC_MODE2_HWWAR1);
1291 if (AR_SREV_9287_10_OR_LATER(ah))
1292 val = val & (~AR_PCU_MISC_MODE2_HWWAR2);
1294 REG_WRITE(ah, AR_PCU_MISC_MODE2, val);
1297 if (!AR_SREV_5416_20_OR_LATER(ah) ||
1298 AR_SREV_9280_10_OR_LATER(ah))
1299 return;
1301 * Disable BB clock gating
1302 * Necessary to avoid issues on AR5416 2.0
1304 REG_WRITE(ah, 0x9800 + (651 << 2), 0x11);
1307 static u32 ath9k_hw_def_ini_fixup(struct ath_hw *ah,
1308 struct ar5416_eeprom_def *pEepData,
1309 u32 reg, u32 value)
1311 struct base_eep_header *pBase = &(pEepData->baseEepHeader);
1312 struct ath_common *common = ath9k_hw_common(ah);
1314 switch (ah->hw_version.devid) {
1315 case AR9280_DEVID_PCI:
1316 if (reg == 0x7894) {
1317 ath_print(common, ATH_DBG_EEPROM,
1318 "ini VAL: %x EEPROM: %x\n", value,
1319 (pBase->version & 0xff));
1321 if ((pBase->version & 0xff) > 0x0a) {
1322 ath_print(common, ATH_DBG_EEPROM,
1323 "PWDCLKIND: %d\n",
1324 pBase->pwdclkind);
1325 value &= ~AR_AN_TOP2_PWDCLKIND;
1326 value |= AR_AN_TOP2_PWDCLKIND &
1327 (pBase->pwdclkind << AR_AN_TOP2_PWDCLKIND_S);
1328 } else {
1329 ath_print(common, ATH_DBG_EEPROM,
1330 "PWDCLKIND Earlier Rev\n");
1333 ath_print(common, ATH_DBG_EEPROM,
1334 "final ini VAL: %x\n", value);
1336 break;
1339 return value;
1342 static u32 ath9k_hw_ini_fixup(struct ath_hw *ah,
1343 struct ar5416_eeprom_def *pEepData,
1344 u32 reg, u32 value)
1346 if (ah->eep_map == EEP_MAP_4KBITS)
1347 return value;
1348 else
1349 return ath9k_hw_def_ini_fixup(ah, pEepData, reg, value);
1352 static void ath9k_olc_init(struct ath_hw *ah)
1354 u32 i;
1356 if (OLC_FOR_AR9287_10_LATER) {
1357 REG_SET_BIT(ah, AR_PHY_TX_PWRCTRL9,
1358 AR_PHY_TX_PWRCTRL9_RES_DC_REMOVAL);
1359 ath9k_hw_analog_shift_rmw(ah, AR9287_AN_TXPC0,
1360 AR9287_AN_TXPC0_TXPCMODE,
1361 AR9287_AN_TXPC0_TXPCMODE_S,
1362 AR9287_AN_TXPC0_TXPCMODE_TEMPSENSE);
1363 udelay(100);
1364 } else {
1365 for (i = 0; i < AR9280_TX_GAIN_TABLE_SIZE; i++)
1366 ah->originalGain[i] =
1367 MS(REG_READ(ah, AR_PHY_TX_GAIN_TBL1 + i * 4),
1368 AR_PHY_TX_GAIN);
1369 ah->PDADCdelta = 0;
1373 static u32 ath9k_regd_get_ctl(struct ath_regulatory *reg,
1374 struct ath9k_channel *chan)
1376 u32 ctl = ath_regd_get_band_ctl(reg, chan->chan->band);
1378 if (IS_CHAN_B(chan))
1379 ctl |= CTL_11B;
1380 else if (IS_CHAN_G(chan))
1381 ctl |= CTL_11G;
1382 else
1383 ctl |= CTL_11A;
1385 return ctl;
1388 static int ath9k_hw_process_ini(struct ath_hw *ah,
1389 struct ath9k_channel *chan)
1391 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
1392 int i, regWrites = 0;
1393 struct ieee80211_channel *channel = chan->chan;
1394 u32 modesIndex, freqIndex;
1396 switch (chan->chanmode) {
1397 case CHANNEL_A:
1398 case CHANNEL_A_HT20:
1399 modesIndex = 1;
1400 freqIndex = 1;
1401 break;
1402 case CHANNEL_A_HT40PLUS:
1403 case CHANNEL_A_HT40MINUS:
1404 modesIndex = 2;
1405 freqIndex = 1;
1406 break;
1407 case CHANNEL_G:
1408 case CHANNEL_G_HT20:
1409 case CHANNEL_B:
1410 modesIndex = 4;
1411 freqIndex = 2;
1412 break;
1413 case CHANNEL_G_HT40PLUS:
1414 case CHANNEL_G_HT40MINUS:
1415 modesIndex = 3;
1416 freqIndex = 2;
1417 break;
1419 default:
1420 return -EINVAL;
1423 REG_WRITE(ah, AR_PHY(0), 0x00000007);
1424 REG_WRITE(ah, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_EXTERNAL_RADIO);
1425 ah->eep_ops->set_addac(ah, chan);
1427 if (AR_SREV_5416_22_OR_LATER(ah)) {
1428 REG_WRITE_ARRAY(&ah->iniAddac, 1, regWrites);
1429 } else {
1430 struct ar5416IniArray temp;
1431 u32 addacSize =
1432 sizeof(u32) * ah->iniAddac.ia_rows *
1433 ah->iniAddac.ia_columns;
1435 memcpy(ah->addac5416_21,
1436 ah->iniAddac.ia_array, addacSize);
1438 (ah->addac5416_21)[31 * ah->iniAddac.ia_columns + 1] = 0;
1440 temp.ia_array = ah->addac5416_21;
1441 temp.ia_columns = ah->iniAddac.ia_columns;
1442 temp.ia_rows = ah->iniAddac.ia_rows;
1443 REG_WRITE_ARRAY(&temp, 1, regWrites);
1446 REG_WRITE(ah, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_INTERNAL_ADDAC);
1448 for (i = 0; i < ah->iniModes.ia_rows; i++) {
1449 u32 reg = INI_RA(&ah->iniModes, i, 0);
1450 u32 val = INI_RA(&ah->iniModes, i, modesIndex);
1452 REG_WRITE(ah, reg, val);
1454 if (reg >= 0x7800 && reg < 0x78a0
1455 && ah->config.analog_shiftreg) {
1456 udelay(100);
1459 DO_DELAY(regWrites);
1462 if (AR_SREV_9280(ah) || AR_SREV_9287_10_OR_LATER(ah))
1463 REG_WRITE_ARRAY(&ah->iniModesRxGain, modesIndex, regWrites);
1465 if (AR_SREV_9280(ah) || AR_SREV_9285_12_OR_LATER(ah) ||
1466 AR_SREV_9287_10_OR_LATER(ah))
1467 REG_WRITE_ARRAY(&ah->iniModesTxGain, modesIndex, regWrites);
1469 for (i = 0; i < ah->iniCommon.ia_rows; i++) {
1470 u32 reg = INI_RA(&ah->iniCommon, i, 0);
1471 u32 val = INI_RA(&ah->iniCommon, i, 1);
1473 REG_WRITE(ah, reg, val);
1475 if (reg >= 0x7800 && reg < 0x78a0
1476 && ah->config.analog_shiftreg) {
1477 udelay(100);
1480 DO_DELAY(regWrites);
1483 ath9k_hw_write_regs(ah, modesIndex, freqIndex, regWrites);
1485 if (AR_SREV_9280_20(ah) && IS_CHAN_A_5MHZ_SPACED(chan)) {
1486 REG_WRITE_ARRAY(&ah->iniModesAdditional, modesIndex,
1487 regWrites);
1490 ath9k_hw_override_ini(ah, chan);
1491 ath9k_hw_set_regs(ah, chan);
1492 ath9k_hw_init_chain_masks(ah);
1494 if (OLC_FOR_AR9280_20_LATER)
1495 ath9k_olc_init(ah);
1497 ah->eep_ops->set_txpower(ah, chan,
1498 ath9k_regd_get_ctl(regulatory, chan),
1499 channel->max_antenna_gain * 2,
1500 channel->max_power * 2,
1501 min((u32) MAX_RATE_POWER,
1502 (u32) regulatory->power_limit));
1504 if (!ath9k_hw_set_rf_regs(ah, chan, freqIndex)) {
1505 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
1506 "ar5416SetRfRegs failed\n");
1507 return -EIO;
1510 return 0;
1513 /****************************************/
1514 /* Reset and Channel Switching Routines */
1515 /****************************************/
1517 static void ath9k_hw_set_rfmode(struct ath_hw *ah, struct ath9k_channel *chan)
1519 u32 rfMode = 0;
1521 if (chan == NULL)
1522 return;
1524 rfMode |= (IS_CHAN_B(chan) || IS_CHAN_G(chan))
1525 ? AR_PHY_MODE_DYNAMIC : AR_PHY_MODE_OFDM;
1527 if (!AR_SREV_9280_10_OR_LATER(ah))
1528 rfMode |= (IS_CHAN_5GHZ(chan)) ?
1529 AR_PHY_MODE_RF5GHZ : AR_PHY_MODE_RF2GHZ;
1531 if (AR_SREV_9280_20(ah) && IS_CHAN_A_5MHZ_SPACED(chan))
1532 rfMode |= (AR_PHY_MODE_DYNAMIC | AR_PHY_MODE_DYN_CCK_DISABLE);
1534 REG_WRITE(ah, AR_PHY_MODE, rfMode);
1537 static void ath9k_hw_mark_phy_inactive(struct ath_hw *ah)
1539 REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS);
1542 static inline void ath9k_hw_set_dma(struct ath_hw *ah)
1544 u32 regval;
1547 * set AHB_MODE not to do cacheline prefetches
1549 regval = REG_READ(ah, AR_AHB_MODE);
1550 REG_WRITE(ah, AR_AHB_MODE, regval | AR_AHB_PREFETCH_RD_EN);
1553 * let mac dma reads be in 128 byte chunks
1555 regval = REG_READ(ah, AR_TXCFG) & ~AR_TXCFG_DMASZ_MASK;
1556 REG_WRITE(ah, AR_TXCFG, regval | AR_TXCFG_DMASZ_128B);
1559 * Restore TX Trigger Level to its pre-reset value.
1560 * The initial value depends on whether aggregation is enabled, and is
1561 * adjusted whenever underruns are detected.
1563 REG_RMW_FIELD(ah, AR_TXCFG, AR_FTRIG, ah->tx_trig_level);
1566 * let mac dma writes be in 128 byte chunks
1568 regval = REG_READ(ah, AR_RXCFG) & ~AR_RXCFG_DMASZ_MASK;
1569 REG_WRITE(ah, AR_RXCFG, regval | AR_RXCFG_DMASZ_128B);
1572 * Setup receive FIFO threshold to hold off TX activities
1574 REG_WRITE(ah, AR_RXFIFO_CFG, 0x200);
1577 * reduce the number of usable entries in PCU TXBUF to avoid
1578 * wrap around issues.
1580 if (AR_SREV_9285(ah)) {
1581 /* For AR9285 the number of Fifos are reduced to half.
1582 * So set the usable tx buf size also to half to
1583 * avoid data/delimiter underruns
1585 REG_WRITE(ah, AR_PCU_TXBUF_CTRL,
1586 AR_9285_PCU_TXBUF_CTRL_USABLE_SIZE);
1587 } else if (!AR_SREV_9271(ah)) {
1588 REG_WRITE(ah, AR_PCU_TXBUF_CTRL,
1589 AR_PCU_TXBUF_CTRL_USABLE_SIZE);
1593 static void ath9k_hw_set_operating_mode(struct ath_hw *ah, int opmode)
1595 u32 val;
1597 val = REG_READ(ah, AR_STA_ID1);
1598 val &= ~(AR_STA_ID1_STA_AP | AR_STA_ID1_ADHOC);
1599 switch (opmode) {
1600 case NL80211_IFTYPE_AP:
1601 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_STA_AP
1602 | AR_STA_ID1_KSRCH_MODE);
1603 REG_CLR_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1604 break;
1605 case NL80211_IFTYPE_ADHOC:
1606 case NL80211_IFTYPE_MESH_POINT:
1607 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_ADHOC
1608 | AR_STA_ID1_KSRCH_MODE);
1609 REG_SET_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1610 break;
1611 case NL80211_IFTYPE_STATION:
1612 case NL80211_IFTYPE_MONITOR:
1613 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_KSRCH_MODE);
1614 break;
1618 static inline void ath9k_hw_get_delta_slope_vals(struct ath_hw *ah,
1619 u32 coef_scaled,
1620 u32 *coef_mantissa,
1621 u32 *coef_exponent)
1623 u32 coef_exp, coef_man;
1625 for (coef_exp = 31; coef_exp > 0; coef_exp--)
1626 if ((coef_scaled >> coef_exp) & 0x1)
1627 break;
1629 coef_exp = 14 - (coef_exp - COEF_SCALE_S);
1631 coef_man = coef_scaled + (1 << (COEF_SCALE_S - coef_exp - 1));
1633 *coef_mantissa = coef_man >> (COEF_SCALE_S - coef_exp);
1634 *coef_exponent = coef_exp - 16;
1637 static void ath9k_hw_set_delta_slope(struct ath_hw *ah,
1638 struct ath9k_channel *chan)
1640 u32 coef_scaled, ds_coef_exp, ds_coef_man;
1641 u32 clockMhzScaled = 0x64000000;
1642 struct chan_centers centers;
1644 if (IS_CHAN_HALF_RATE(chan))
1645 clockMhzScaled = clockMhzScaled >> 1;
1646 else if (IS_CHAN_QUARTER_RATE(chan))
1647 clockMhzScaled = clockMhzScaled >> 2;
1649 ath9k_hw_get_channel_centers(ah, chan, &centers);
1650 coef_scaled = clockMhzScaled / centers.synth_center;
1652 ath9k_hw_get_delta_slope_vals(ah, coef_scaled, &ds_coef_man,
1653 &ds_coef_exp);
1655 REG_RMW_FIELD(ah, AR_PHY_TIMING3,
1656 AR_PHY_TIMING3_DSC_MAN, ds_coef_man);
1657 REG_RMW_FIELD(ah, AR_PHY_TIMING3,
1658 AR_PHY_TIMING3_DSC_EXP, ds_coef_exp);
1660 coef_scaled = (9 * coef_scaled) / 10;
1662 ath9k_hw_get_delta_slope_vals(ah, coef_scaled, &ds_coef_man,
1663 &ds_coef_exp);
1665 REG_RMW_FIELD(ah, AR_PHY_HALFGI,
1666 AR_PHY_HALFGI_DSC_MAN, ds_coef_man);
1667 REG_RMW_FIELD(ah, AR_PHY_HALFGI,
1668 AR_PHY_HALFGI_DSC_EXP, ds_coef_exp);
1671 static bool ath9k_hw_set_reset(struct ath_hw *ah, int type)
1673 u32 rst_flags;
1674 u32 tmpReg;
1676 if (AR_SREV_9100(ah)) {
1677 u32 val = REG_READ(ah, AR_RTC_DERIVED_CLK);
1678 val &= ~AR_RTC_DERIVED_CLK_PERIOD;
1679 val |= SM(1, AR_RTC_DERIVED_CLK_PERIOD);
1680 REG_WRITE(ah, AR_RTC_DERIVED_CLK, val);
1681 (void)REG_READ(ah, AR_RTC_DERIVED_CLK);
1684 REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1685 AR_RTC_FORCE_WAKE_ON_INT);
1687 if (AR_SREV_9100(ah)) {
1688 rst_flags = AR_RTC_RC_MAC_WARM | AR_RTC_RC_MAC_COLD |
1689 AR_RTC_RC_COLD_RESET | AR_RTC_RC_WARM_RESET;
1690 } else {
1691 tmpReg = REG_READ(ah, AR_INTR_SYNC_CAUSE);
1692 if (tmpReg &
1693 (AR_INTR_SYNC_LOCAL_TIMEOUT |
1694 AR_INTR_SYNC_RADM_CPL_TIMEOUT)) {
1695 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0);
1696 REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF);
1697 } else {
1698 REG_WRITE(ah, AR_RC, AR_RC_AHB);
1701 rst_flags = AR_RTC_RC_MAC_WARM;
1702 if (type == ATH9K_RESET_COLD)
1703 rst_flags |= AR_RTC_RC_MAC_COLD;
1706 REG_WRITE(ah, AR_RTC_RC, rst_flags);
1707 udelay(50);
1709 REG_WRITE(ah, AR_RTC_RC, 0);
1710 if (!ath9k_hw_wait(ah, AR_RTC_RC, AR_RTC_RC_M, 0, AH_WAIT_TIMEOUT)) {
1711 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET,
1712 "RTC stuck in MAC reset\n");
1713 return false;
1716 if (!AR_SREV_9100(ah))
1717 REG_WRITE(ah, AR_RC, 0);
1719 if (AR_SREV_9100(ah))
1720 udelay(50);
1722 return true;
1725 static bool ath9k_hw_set_reset_power_on(struct ath_hw *ah)
1727 REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1728 AR_RTC_FORCE_WAKE_ON_INT);
1730 if (!AR_SREV_9100(ah))
1731 REG_WRITE(ah, AR_RC, AR_RC_AHB);
1733 REG_WRITE(ah, AR_RTC_RESET, 0);
1734 udelay(2);
1736 if (!AR_SREV_9100(ah))
1737 REG_WRITE(ah, AR_RC, 0);
1739 REG_WRITE(ah, AR_RTC_RESET, 1);
1741 if (!ath9k_hw_wait(ah,
1742 AR_RTC_STATUS,
1743 AR_RTC_STATUS_M,
1744 AR_RTC_STATUS_ON,
1745 AH_WAIT_TIMEOUT)) {
1746 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET,
1747 "RTC not waking up\n");
1748 return false;
1751 ath9k_hw_read_revisions(ah);
1753 return ath9k_hw_set_reset(ah, ATH9K_RESET_WARM);
1756 static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type)
1758 REG_WRITE(ah, AR_RTC_FORCE_WAKE,
1759 AR_RTC_FORCE_WAKE_EN | AR_RTC_FORCE_WAKE_ON_INT);
1761 switch (type) {
1762 case ATH9K_RESET_POWER_ON:
1763 return ath9k_hw_set_reset_power_on(ah);
1764 case ATH9K_RESET_WARM:
1765 case ATH9K_RESET_COLD:
1766 return ath9k_hw_set_reset(ah, type);
1767 default:
1768 return false;
1772 static void ath9k_hw_set_regs(struct ath_hw *ah, struct ath9k_channel *chan)
1774 u32 phymode;
1775 u32 enableDacFifo = 0;
1777 if (AR_SREV_9285_10_OR_LATER(ah))
1778 enableDacFifo = (REG_READ(ah, AR_PHY_TURBO) &
1779 AR_PHY_FC_ENABLE_DAC_FIFO);
1781 phymode = AR_PHY_FC_HT_EN | AR_PHY_FC_SHORT_GI_40
1782 | AR_PHY_FC_SINGLE_HT_LTF1 | AR_PHY_FC_WALSH | enableDacFifo;
1784 if (IS_CHAN_HT40(chan)) {
1785 phymode |= AR_PHY_FC_DYN2040_EN;
1787 if ((chan->chanmode == CHANNEL_A_HT40PLUS) ||
1788 (chan->chanmode == CHANNEL_G_HT40PLUS))
1789 phymode |= AR_PHY_FC_DYN2040_PRI_CH;
1792 REG_WRITE(ah, AR_PHY_TURBO, phymode);
1794 ath9k_hw_set11nmac2040(ah);
1796 REG_WRITE(ah, AR_GTXTO, 25 << AR_GTXTO_TIMEOUT_LIMIT_S);
1797 REG_WRITE(ah, AR_CST, 0xF << AR_CST_TIMEOUT_LIMIT_S);
1800 static bool ath9k_hw_chip_reset(struct ath_hw *ah,
1801 struct ath9k_channel *chan)
1803 if (AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL)) {
1804 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON))
1805 return false;
1806 } else if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_WARM))
1807 return false;
1809 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
1810 return false;
1812 ah->chip_fullsleep = false;
1813 ath9k_hw_init_pll(ah, chan);
1814 ath9k_hw_set_rfmode(ah, chan);
1816 return true;
1819 static bool ath9k_hw_channel_change(struct ath_hw *ah,
1820 struct ath9k_channel *chan)
1822 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
1823 struct ath_common *common = ath9k_hw_common(ah);
1824 struct ieee80211_channel *channel = chan->chan;
1825 u32 synthDelay, qnum;
1827 for (qnum = 0; qnum < AR_NUM_QCU; qnum++) {
1828 if (ath9k_hw_numtxpending(ah, qnum)) {
1829 ath_print(common, ATH_DBG_QUEUE,
1830 "Transmit frames pending on "
1831 "queue %d\n", qnum);
1832 return false;
1836 REG_WRITE(ah, AR_PHY_RFBUS_REQ, AR_PHY_RFBUS_REQ_EN);
1837 if (!ath9k_hw_wait(ah, AR_PHY_RFBUS_GRANT, AR_PHY_RFBUS_GRANT_EN,
1838 AR_PHY_RFBUS_GRANT_EN, AH_WAIT_TIMEOUT)) {
1839 ath_print(common, ATH_DBG_FATAL,
1840 "Could not kill baseband RX\n");
1841 return false;
1844 ath9k_hw_set_regs(ah, chan);
1846 if (AR_SREV_9280_10_OR_LATER(ah)) {
1847 ath9k_hw_ar9280_set_channel(ah, chan);
1848 } else {
1849 if (!(ath9k_hw_set_channel(ah, chan))) {
1850 ath_print(common, ATH_DBG_FATAL,
1851 "Failed to set channel\n");
1852 return false;
1856 ah->eep_ops->set_txpower(ah, chan,
1857 ath9k_regd_get_ctl(regulatory, chan),
1858 channel->max_antenna_gain * 2,
1859 channel->max_power * 2,
1860 min((u32) MAX_RATE_POWER,
1861 (u32) regulatory->power_limit));
1863 synthDelay = REG_READ(ah, AR_PHY_RX_DELAY) & AR_PHY_RX_DELAY_DELAY;
1864 if (IS_CHAN_B(chan))
1865 synthDelay = (4 * synthDelay) / 22;
1866 else
1867 synthDelay /= 10;
1869 udelay(synthDelay + BASE_ACTIVATE_DELAY);
1871 REG_WRITE(ah, AR_PHY_RFBUS_REQ, 0);
1873 if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan))
1874 ath9k_hw_set_delta_slope(ah, chan);
1876 if (AR_SREV_9280_10_OR_LATER(ah))
1877 ath9k_hw_9280_spur_mitigate(ah, chan);
1878 else
1879 ath9k_hw_spur_mitigate(ah, chan);
1881 if (!chan->oneTimeCalsDone)
1882 chan->oneTimeCalsDone = true;
1884 return true;
1887 static void ath9k_hw_9280_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan)
1889 int bb_spur = AR_NO_SPUR;
1890 int freq;
1891 int bin, cur_bin;
1892 int bb_spur_off, spur_subchannel_sd;
1893 int spur_freq_sd;
1894 int spur_delta_phase;
1895 int denominator;
1896 int upper, lower, cur_vit_mask;
1897 int tmp, newVal;
1898 int i;
1899 int pilot_mask_reg[4] = { AR_PHY_TIMING7, AR_PHY_TIMING8,
1900 AR_PHY_PILOT_MASK_01_30, AR_PHY_PILOT_MASK_31_60
1902 int chan_mask_reg[4] = { AR_PHY_TIMING9, AR_PHY_TIMING10,
1903 AR_PHY_CHANNEL_MASK_01_30, AR_PHY_CHANNEL_MASK_31_60
1905 int inc[4] = { 0, 100, 0, 0 };
1906 struct chan_centers centers;
1908 int8_t mask_m[123];
1909 int8_t mask_p[123];
1910 int8_t mask_amt;
1911 int tmp_mask;
1912 int cur_bb_spur;
1913 bool is2GHz = IS_CHAN_2GHZ(chan);
1915 memset(&mask_m, 0, sizeof(int8_t) * 123);
1916 memset(&mask_p, 0, sizeof(int8_t) * 123);
1918 ath9k_hw_get_channel_centers(ah, chan, &centers);
1919 freq = centers.synth_center;
1921 ah->config.spurmode = SPUR_ENABLE_EEPROM;
1922 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
1923 cur_bb_spur = ah->eep_ops->get_spur_channel(ah, i, is2GHz);
1925 if (is2GHz)
1926 cur_bb_spur = (cur_bb_spur / 10) + AR_BASE_FREQ_2GHZ;
1927 else
1928 cur_bb_spur = (cur_bb_spur / 10) + AR_BASE_FREQ_5GHZ;
1930 if (AR_NO_SPUR == cur_bb_spur)
1931 break;
1932 cur_bb_spur = cur_bb_spur - freq;
1934 if (IS_CHAN_HT40(chan)) {
1935 if ((cur_bb_spur > -AR_SPUR_FEEQ_BOUND_HT40) &&
1936 (cur_bb_spur < AR_SPUR_FEEQ_BOUND_HT40)) {
1937 bb_spur = cur_bb_spur;
1938 break;
1940 } else if ((cur_bb_spur > -AR_SPUR_FEEQ_BOUND_HT20) &&
1941 (cur_bb_spur < AR_SPUR_FEEQ_BOUND_HT20)) {
1942 bb_spur = cur_bb_spur;
1943 break;
1947 if (AR_NO_SPUR == bb_spur) {
1948 REG_CLR_BIT(ah, AR_PHY_FORCE_CLKEN_CCK,
1949 AR_PHY_FORCE_CLKEN_CCK_MRC_MUX);
1950 return;
1951 } else {
1952 REG_CLR_BIT(ah, AR_PHY_FORCE_CLKEN_CCK,
1953 AR_PHY_FORCE_CLKEN_CCK_MRC_MUX);
1956 bin = bb_spur * 320;
1958 tmp = REG_READ(ah, AR_PHY_TIMING_CTRL4(0));
1960 newVal = tmp | (AR_PHY_TIMING_CTRL4_ENABLE_SPUR_RSSI |
1961 AR_PHY_TIMING_CTRL4_ENABLE_SPUR_FILTER |
1962 AR_PHY_TIMING_CTRL4_ENABLE_CHAN_MASK |
1963 AR_PHY_TIMING_CTRL4_ENABLE_PILOT_MASK);
1964 REG_WRITE(ah, AR_PHY_TIMING_CTRL4(0), newVal);
1966 newVal = (AR_PHY_SPUR_REG_MASK_RATE_CNTL |
1967 AR_PHY_SPUR_REG_ENABLE_MASK_PPM |
1968 AR_PHY_SPUR_REG_MASK_RATE_SELECT |
1969 AR_PHY_SPUR_REG_ENABLE_VIT_SPUR_RSSI |
1970 SM(SPUR_RSSI_THRESH, AR_PHY_SPUR_REG_SPUR_RSSI_THRESH));
1971 REG_WRITE(ah, AR_PHY_SPUR_REG, newVal);
1973 if (IS_CHAN_HT40(chan)) {
1974 if (bb_spur < 0) {
1975 spur_subchannel_sd = 1;
1976 bb_spur_off = bb_spur + 10;
1977 } else {
1978 spur_subchannel_sd = 0;
1979 bb_spur_off = bb_spur - 10;
1981 } else {
1982 spur_subchannel_sd = 0;
1983 bb_spur_off = bb_spur;
1986 if (IS_CHAN_HT40(chan))
1987 spur_delta_phase =
1988 ((bb_spur * 262144) /
1989 10) & AR_PHY_TIMING11_SPUR_DELTA_PHASE;
1990 else
1991 spur_delta_phase =
1992 ((bb_spur * 524288) /
1993 10) & AR_PHY_TIMING11_SPUR_DELTA_PHASE;
1995 denominator = IS_CHAN_2GHZ(chan) ? 44 : 40;
1996 spur_freq_sd = ((bb_spur_off * 2048) / denominator) & 0x3ff;
1998 newVal = (AR_PHY_TIMING11_USE_SPUR_IN_AGC |
1999 SM(spur_freq_sd, AR_PHY_TIMING11_SPUR_FREQ_SD) |
2000 SM(spur_delta_phase, AR_PHY_TIMING11_SPUR_DELTA_PHASE));
2001 REG_WRITE(ah, AR_PHY_TIMING11, newVal);
2003 newVal = spur_subchannel_sd << AR_PHY_SFCORR_SPUR_SUBCHNL_SD_S;
2004 REG_WRITE(ah, AR_PHY_SFCORR_EXT, newVal);
2006 cur_bin = -6000;
2007 upper = bin + 100;
2008 lower = bin - 100;
2010 for (i = 0; i < 4; i++) {
2011 int pilot_mask = 0;
2012 int chan_mask = 0;
2013 int bp = 0;
2014 for (bp = 0; bp < 30; bp++) {
2015 if ((cur_bin > lower) && (cur_bin < upper)) {
2016 pilot_mask = pilot_mask | 0x1 << bp;
2017 chan_mask = chan_mask | 0x1 << bp;
2019 cur_bin += 100;
2021 cur_bin += inc[i];
2022 REG_WRITE(ah, pilot_mask_reg[i], pilot_mask);
2023 REG_WRITE(ah, chan_mask_reg[i], chan_mask);
2026 cur_vit_mask = 6100;
2027 upper = bin + 120;
2028 lower = bin - 120;
2030 for (i = 0; i < 123; i++) {
2031 if ((cur_vit_mask > lower) && (cur_vit_mask < upper)) {
2033 /* workaround for gcc bug #37014 */
2034 volatile int tmp_v = abs(cur_vit_mask - bin);
2036 if (tmp_v < 75)
2037 mask_amt = 1;
2038 else
2039 mask_amt = 0;
2040 if (cur_vit_mask < 0)
2041 mask_m[abs(cur_vit_mask / 100)] = mask_amt;
2042 else
2043 mask_p[cur_vit_mask / 100] = mask_amt;
2045 cur_vit_mask -= 100;
2048 tmp_mask = (mask_m[46] << 30) | (mask_m[47] << 28)
2049 | (mask_m[48] << 26) | (mask_m[49] << 24)
2050 | (mask_m[50] << 22) | (mask_m[51] << 20)
2051 | (mask_m[52] << 18) | (mask_m[53] << 16)
2052 | (mask_m[54] << 14) | (mask_m[55] << 12)
2053 | (mask_m[56] << 10) | (mask_m[57] << 8)
2054 | (mask_m[58] << 6) | (mask_m[59] << 4)
2055 | (mask_m[60] << 2) | (mask_m[61] << 0);
2056 REG_WRITE(ah, AR_PHY_BIN_MASK_1, tmp_mask);
2057 REG_WRITE(ah, AR_PHY_VIT_MASK2_M_46_61, tmp_mask);
2059 tmp_mask = (mask_m[31] << 28)
2060 | (mask_m[32] << 26) | (mask_m[33] << 24)
2061 | (mask_m[34] << 22) | (mask_m[35] << 20)
2062 | (mask_m[36] << 18) | (mask_m[37] << 16)
2063 | (mask_m[48] << 14) | (mask_m[39] << 12)
2064 | (mask_m[40] << 10) | (mask_m[41] << 8)
2065 | (mask_m[42] << 6) | (mask_m[43] << 4)
2066 | (mask_m[44] << 2) | (mask_m[45] << 0);
2067 REG_WRITE(ah, AR_PHY_BIN_MASK_2, tmp_mask);
2068 REG_WRITE(ah, AR_PHY_MASK2_M_31_45, tmp_mask);
2070 tmp_mask = (mask_m[16] << 30) | (mask_m[16] << 28)
2071 | (mask_m[18] << 26) | (mask_m[18] << 24)
2072 | (mask_m[20] << 22) | (mask_m[20] << 20)
2073 | (mask_m[22] << 18) | (mask_m[22] << 16)
2074 | (mask_m[24] << 14) | (mask_m[24] << 12)
2075 | (mask_m[25] << 10) | (mask_m[26] << 8)
2076 | (mask_m[27] << 6) | (mask_m[28] << 4)
2077 | (mask_m[29] << 2) | (mask_m[30] << 0);
2078 REG_WRITE(ah, AR_PHY_BIN_MASK_3, tmp_mask);
2079 REG_WRITE(ah, AR_PHY_MASK2_M_16_30, tmp_mask);
2081 tmp_mask = (mask_m[0] << 30) | (mask_m[1] << 28)
2082 | (mask_m[2] << 26) | (mask_m[3] << 24)
2083 | (mask_m[4] << 22) | (mask_m[5] << 20)
2084 | (mask_m[6] << 18) | (mask_m[7] << 16)
2085 | (mask_m[8] << 14) | (mask_m[9] << 12)
2086 | (mask_m[10] << 10) | (mask_m[11] << 8)
2087 | (mask_m[12] << 6) | (mask_m[13] << 4)
2088 | (mask_m[14] << 2) | (mask_m[15] << 0);
2089 REG_WRITE(ah, AR_PHY_MASK_CTL, tmp_mask);
2090 REG_WRITE(ah, AR_PHY_MASK2_M_00_15, tmp_mask);
2092 tmp_mask = (mask_p[15] << 28)
2093 | (mask_p[14] << 26) | (mask_p[13] << 24)
2094 | (mask_p[12] << 22) | (mask_p[11] << 20)
2095 | (mask_p[10] << 18) | (mask_p[9] << 16)
2096 | (mask_p[8] << 14) | (mask_p[7] << 12)
2097 | (mask_p[6] << 10) | (mask_p[5] << 8)
2098 | (mask_p[4] << 6) | (mask_p[3] << 4)
2099 | (mask_p[2] << 2) | (mask_p[1] << 0);
2100 REG_WRITE(ah, AR_PHY_BIN_MASK2_1, tmp_mask);
2101 REG_WRITE(ah, AR_PHY_MASK2_P_15_01, tmp_mask);
2103 tmp_mask = (mask_p[30] << 28)
2104 | (mask_p[29] << 26) | (mask_p[28] << 24)
2105 | (mask_p[27] << 22) | (mask_p[26] << 20)
2106 | (mask_p[25] << 18) | (mask_p[24] << 16)
2107 | (mask_p[23] << 14) | (mask_p[22] << 12)
2108 | (mask_p[21] << 10) | (mask_p[20] << 8)
2109 | (mask_p[19] << 6) | (mask_p[18] << 4)
2110 | (mask_p[17] << 2) | (mask_p[16] << 0);
2111 REG_WRITE(ah, AR_PHY_BIN_MASK2_2, tmp_mask);
2112 REG_WRITE(ah, AR_PHY_MASK2_P_30_16, tmp_mask);
2114 tmp_mask = (mask_p[45] << 28)
2115 | (mask_p[44] << 26) | (mask_p[43] << 24)
2116 | (mask_p[42] << 22) | (mask_p[41] << 20)
2117 | (mask_p[40] << 18) | (mask_p[39] << 16)
2118 | (mask_p[38] << 14) | (mask_p[37] << 12)
2119 | (mask_p[36] << 10) | (mask_p[35] << 8)
2120 | (mask_p[34] << 6) | (mask_p[33] << 4)
2121 | (mask_p[32] << 2) | (mask_p[31] << 0);
2122 REG_WRITE(ah, AR_PHY_BIN_MASK2_3, tmp_mask);
2123 REG_WRITE(ah, AR_PHY_MASK2_P_45_31, tmp_mask);
2125 tmp_mask = (mask_p[61] << 30) | (mask_p[60] << 28)
2126 | (mask_p[59] << 26) | (mask_p[58] << 24)
2127 | (mask_p[57] << 22) | (mask_p[56] << 20)
2128 | (mask_p[55] << 18) | (mask_p[54] << 16)
2129 | (mask_p[53] << 14) | (mask_p[52] << 12)
2130 | (mask_p[51] << 10) | (mask_p[50] << 8)
2131 | (mask_p[49] << 6) | (mask_p[48] << 4)
2132 | (mask_p[47] << 2) | (mask_p[46] << 0);
2133 REG_WRITE(ah, AR_PHY_BIN_MASK2_4, tmp_mask);
2134 REG_WRITE(ah, AR_PHY_MASK2_P_61_45, tmp_mask);
2137 static void ath9k_hw_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan)
2139 int bb_spur = AR_NO_SPUR;
2140 int bin, cur_bin;
2141 int spur_freq_sd;
2142 int spur_delta_phase;
2143 int denominator;
2144 int upper, lower, cur_vit_mask;
2145 int tmp, new;
2146 int i;
2147 int pilot_mask_reg[4] = { AR_PHY_TIMING7, AR_PHY_TIMING8,
2148 AR_PHY_PILOT_MASK_01_30, AR_PHY_PILOT_MASK_31_60
2150 int chan_mask_reg[4] = { AR_PHY_TIMING9, AR_PHY_TIMING10,
2151 AR_PHY_CHANNEL_MASK_01_30, AR_PHY_CHANNEL_MASK_31_60
2153 int inc[4] = { 0, 100, 0, 0 };
2155 int8_t mask_m[123];
2156 int8_t mask_p[123];
2157 int8_t mask_amt;
2158 int tmp_mask;
2159 int cur_bb_spur;
2160 bool is2GHz = IS_CHAN_2GHZ(chan);
2162 memset(&mask_m, 0, sizeof(int8_t) * 123);
2163 memset(&mask_p, 0, sizeof(int8_t) * 123);
2165 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
2166 cur_bb_spur = ah->eep_ops->get_spur_channel(ah, i, is2GHz);
2167 if (AR_NO_SPUR == cur_bb_spur)
2168 break;
2169 cur_bb_spur = cur_bb_spur - (chan->channel * 10);
2170 if ((cur_bb_spur > -95) && (cur_bb_spur < 95)) {
2171 bb_spur = cur_bb_spur;
2172 break;
2176 if (AR_NO_SPUR == bb_spur)
2177 return;
2179 bin = bb_spur * 32;
2181 tmp = REG_READ(ah, AR_PHY_TIMING_CTRL4(0));
2182 new = tmp | (AR_PHY_TIMING_CTRL4_ENABLE_SPUR_RSSI |
2183 AR_PHY_TIMING_CTRL4_ENABLE_SPUR_FILTER |
2184 AR_PHY_TIMING_CTRL4_ENABLE_CHAN_MASK |
2185 AR_PHY_TIMING_CTRL4_ENABLE_PILOT_MASK);
2187 REG_WRITE(ah, AR_PHY_TIMING_CTRL4(0), new);
2189 new = (AR_PHY_SPUR_REG_MASK_RATE_CNTL |
2190 AR_PHY_SPUR_REG_ENABLE_MASK_PPM |
2191 AR_PHY_SPUR_REG_MASK_RATE_SELECT |
2192 AR_PHY_SPUR_REG_ENABLE_VIT_SPUR_RSSI |
2193 SM(SPUR_RSSI_THRESH, AR_PHY_SPUR_REG_SPUR_RSSI_THRESH));
2194 REG_WRITE(ah, AR_PHY_SPUR_REG, new);
2196 spur_delta_phase = ((bb_spur * 524288) / 100) &
2197 AR_PHY_TIMING11_SPUR_DELTA_PHASE;
2199 denominator = IS_CHAN_2GHZ(chan) ? 440 : 400;
2200 spur_freq_sd = ((bb_spur * 2048) / denominator) & 0x3ff;
2202 new = (AR_PHY_TIMING11_USE_SPUR_IN_AGC |
2203 SM(spur_freq_sd, AR_PHY_TIMING11_SPUR_FREQ_SD) |
2204 SM(spur_delta_phase, AR_PHY_TIMING11_SPUR_DELTA_PHASE));
2205 REG_WRITE(ah, AR_PHY_TIMING11, new);
2207 cur_bin = -6000;
2208 upper = bin + 100;
2209 lower = bin - 100;
2211 for (i = 0; i < 4; i++) {
2212 int pilot_mask = 0;
2213 int chan_mask = 0;
2214 int bp = 0;
2215 for (bp = 0; bp < 30; bp++) {
2216 if ((cur_bin > lower) && (cur_bin < upper)) {
2217 pilot_mask = pilot_mask | 0x1 << bp;
2218 chan_mask = chan_mask | 0x1 << bp;
2220 cur_bin += 100;
2222 cur_bin += inc[i];
2223 REG_WRITE(ah, pilot_mask_reg[i], pilot_mask);
2224 REG_WRITE(ah, chan_mask_reg[i], chan_mask);
2227 cur_vit_mask = 6100;
2228 upper = bin + 120;
2229 lower = bin - 120;
2231 for (i = 0; i < 123; i++) {
2232 if ((cur_vit_mask > lower) && (cur_vit_mask < upper)) {
2234 /* workaround for gcc bug #37014 */
2235 volatile int tmp_v = abs(cur_vit_mask - bin);
2237 if (tmp_v < 75)
2238 mask_amt = 1;
2239 else
2240 mask_amt = 0;
2241 if (cur_vit_mask < 0)
2242 mask_m[abs(cur_vit_mask / 100)] = mask_amt;
2243 else
2244 mask_p[cur_vit_mask / 100] = mask_amt;
2246 cur_vit_mask -= 100;
2249 tmp_mask = (mask_m[46] << 30) | (mask_m[47] << 28)
2250 | (mask_m[48] << 26) | (mask_m[49] << 24)
2251 | (mask_m[50] << 22) | (mask_m[51] << 20)
2252 | (mask_m[52] << 18) | (mask_m[53] << 16)
2253 | (mask_m[54] << 14) | (mask_m[55] << 12)
2254 | (mask_m[56] << 10) | (mask_m[57] << 8)
2255 | (mask_m[58] << 6) | (mask_m[59] << 4)
2256 | (mask_m[60] << 2) | (mask_m[61] << 0);
2257 REG_WRITE(ah, AR_PHY_BIN_MASK_1, tmp_mask);
2258 REG_WRITE(ah, AR_PHY_VIT_MASK2_M_46_61, tmp_mask);
2260 tmp_mask = (mask_m[31] << 28)
2261 | (mask_m[32] << 26) | (mask_m[33] << 24)
2262 | (mask_m[34] << 22) | (mask_m[35] << 20)
2263 | (mask_m[36] << 18) | (mask_m[37] << 16)
2264 | (mask_m[48] << 14) | (mask_m[39] << 12)
2265 | (mask_m[40] << 10) | (mask_m[41] << 8)
2266 | (mask_m[42] << 6) | (mask_m[43] << 4)
2267 | (mask_m[44] << 2) | (mask_m[45] << 0);
2268 REG_WRITE(ah, AR_PHY_BIN_MASK_2, tmp_mask);
2269 REG_WRITE(ah, AR_PHY_MASK2_M_31_45, tmp_mask);
2271 tmp_mask = (mask_m[16] << 30) | (mask_m[16] << 28)
2272 | (mask_m[18] << 26) | (mask_m[18] << 24)
2273 | (mask_m[20] << 22) | (mask_m[20] << 20)
2274 | (mask_m[22] << 18) | (mask_m[22] << 16)
2275 | (mask_m[24] << 14) | (mask_m[24] << 12)
2276 | (mask_m[25] << 10) | (mask_m[26] << 8)
2277 | (mask_m[27] << 6) | (mask_m[28] << 4)
2278 | (mask_m[29] << 2) | (mask_m[30] << 0);
2279 REG_WRITE(ah, AR_PHY_BIN_MASK_3, tmp_mask);
2280 REG_WRITE(ah, AR_PHY_MASK2_M_16_30, tmp_mask);
2282 tmp_mask = (mask_m[0] << 30) | (mask_m[1] << 28)
2283 | (mask_m[2] << 26) | (mask_m[3] << 24)
2284 | (mask_m[4] << 22) | (mask_m[5] << 20)
2285 | (mask_m[6] << 18) | (mask_m[7] << 16)
2286 | (mask_m[8] << 14) | (mask_m[9] << 12)
2287 | (mask_m[10] << 10) | (mask_m[11] << 8)
2288 | (mask_m[12] << 6) | (mask_m[13] << 4)
2289 | (mask_m[14] << 2) | (mask_m[15] << 0);
2290 REG_WRITE(ah, AR_PHY_MASK_CTL, tmp_mask);
2291 REG_WRITE(ah, AR_PHY_MASK2_M_00_15, tmp_mask);
2293 tmp_mask = (mask_p[15] << 28)
2294 | (mask_p[14] << 26) | (mask_p[13] << 24)
2295 | (mask_p[12] << 22) | (mask_p[11] << 20)
2296 | (mask_p[10] << 18) | (mask_p[9] << 16)
2297 | (mask_p[8] << 14) | (mask_p[7] << 12)
2298 | (mask_p[6] << 10) | (mask_p[5] << 8)
2299 | (mask_p[4] << 6) | (mask_p[3] << 4)
2300 | (mask_p[2] << 2) | (mask_p[1] << 0);
2301 REG_WRITE(ah, AR_PHY_BIN_MASK2_1, tmp_mask);
2302 REG_WRITE(ah, AR_PHY_MASK2_P_15_01, tmp_mask);
2304 tmp_mask = (mask_p[30] << 28)
2305 | (mask_p[29] << 26) | (mask_p[28] << 24)
2306 | (mask_p[27] << 22) | (mask_p[26] << 20)
2307 | (mask_p[25] << 18) | (mask_p[24] << 16)
2308 | (mask_p[23] << 14) | (mask_p[22] << 12)
2309 | (mask_p[21] << 10) | (mask_p[20] << 8)
2310 | (mask_p[19] << 6) | (mask_p[18] << 4)
2311 | (mask_p[17] << 2) | (mask_p[16] << 0);
2312 REG_WRITE(ah, AR_PHY_BIN_MASK2_2, tmp_mask);
2313 REG_WRITE(ah, AR_PHY_MASK2_P_30_16, tmp_mask);
2315 tmp_mask = (mask_p[45] << 28)
2316 | (mask_p[44] << 26) | (mask_p[43] << 24)
2317 | (mask_p[42] << 22) | (mask_p[41] << 20)
2318 | (mask_p[40] << 18) | (mask_p[39] << 16)
2319 | (mask_p[38] << 14) | (mask_p[37] << 12)
2320 | (mask_p[36] << 10) | (mask_p[35] << 8)
2321 | (mask_p[34] << 6) | (mask_p[33] << 4)
2322 | (mask_p[32] << 2) | (mask_p[31] << 0);
2323 REG_WRITE(ah, AR_PHY_BIN_MASK2_3, tmp_mask);
2324 REG_WRITE(ah, AR_PHY_MASK2_P_45_31, tmp_mask);
2326 tmp_mask = (mask_p[61] << 30) | (mask_p[60] << 28)
2327 | (mask_p[59] << 26) | (mask_p[58] << 24)
2328 | (mask_p[57] << 22) | (mask_p[56] << 20)
2329 | (mask_p[55] << 18) | (mask_p[54] << 16)
2330 | (mask_p[53] << 14) | (mask_p[52] << 12)
2331 | (mask_p[51] << 10) | (mask_p[50] << 8)
2332 | (mask_p[49] << 6) | (mask_p[48] << 4)
2333 | (mask_p[47] << 2) | (mask_p[46] << 0);
2334 REG_WRITE(ah, AR_PHY_BIN_MASK2_4, tmp_mask);
2335 REG_WRITE(ah, AR_PHY_MASK2_P_61_45, tmp_mask);
2338 static void ath9k_enable_rfkill(struct ath_hw *ah)
2340 REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL,
2341 AR_GPIO_INPUT_EN_VAL_RFSILENT_BB);
2343 REG_CLR_BIT(ah, AR_GPIO_INPUT_MUX2,
2344 AR_GPIO_INPUT_MUX2_RFSILENT);
2346 ath9k_hw_cfg_gpio_input(ah, ah->rfkill_gpio);
2347 REG_SET_BIT(ah, AR_PHY_TEST, RFSILENT_BB);
2350 int ath9k_hw_reset(struct ath_hw *ah, struct ath9k_channel *chan,
2351 bool bChannelChange)
2353 struct ath_common *common = ath9k_hw_common(ah);
2354 u32 saveLedState;
2355 struct ath9k_channel *curchan = ah->curchan;
2356 u32 saveDefAntenna;
2357 u32 macStaId1;
2358 u64 tsf = 0;
2359 int i, rx_chainmask, r;
2361 ah->txchainmask = common->tx_chainmask;
2362 ah->rxchainmask = common->rx_chainmask;
2364 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
2365 return -EIO;
2367 if (curchan && !ah->chip_fullsleep)
2368 ath9k_hw_getnf(ah, curchan);
2370 if (bChannelChange &&
2371 (ah->chip_fullsleep != true) &&
2372 (ah->curchan != NULL) &&
2373 (chan->channel != ah->curchan->channel) &&
2374 ((chan->channelFlags & CHANNEL_ALL) ==
2375 (ah->curchan->channelFlags & CHANNEL_ALL)) &&
2376 !(AR_SREV_9280(ah) || IS_CHAN_A_5MHZ_SPACED(chan) ||
2377 IS_CHAN_A_5MHZ_SPACED(ah->curchan))) {
2379 if (ath9k_hw_channel_change(ah, chan)) {
2380 ath9k_hw_loadnf(ah, ah->curchan);
2381 ath9k_hw_start_nfcal(ah);
2382 return 0;
2386 saveDefAntenna = REG_READ(ah, AR_DEF_ANTENNA);
2387 if (saveDefAntenna == 0)
2388 saveDefAntenna = 1;
2390 macStaId1 = REG_READ(ah, AR_STA_ID1) & AR_STA_ID1_BASE_RATE_11B;
2392 /* For chips on which RTC reset is done, save TSF before it gets cleared */
2393 if (AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL))
2394 tsf = ath9k_hw_gettsf64(ah);
2396 saveLedState = REG_READ(ah, AR_CFG_LED) &
2397 (AR_CFG_LED_ASSOC_CTL | AR_CFG_LED_MODE_SEL |
2398 AR_CFG_LED_BLINK_THRESH_SEL | AR_CFG_LED_BLINK_SLOW);
2400 ath9k_hw_mark_phy_inactive(ah);
2402 if (AR_SREV_9271(ah) && ah->htc_reset_init) {
2403 REG_WRITE(ah,
2404 AR9271_RESET_POWER_DOWN_CONTROL,
2405 AR9271_RADIO_RF_RST);
2406 udelay(50);
2409 if (!ath9k_hw_chip_reset(ah, chan)) {
2410 ath_print(common, ATH_DBG_FATAL, "Chip reset failed\n");
2411 return -EINVAL;
2414 if (AR_SREV_9271(ah) && ah->htc_reset_init) {
2415 ah->htc_reset_init = false;
2416 REG_WRITE(ah,
2417 AR9271_RESET_POWER_DOWN_CONTROL,
2418 AR9271_GATE_MAC_CTL);
2419 udelay(50);
2422 /* Restore TSF */
2423 if (tsf && AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL))
2424 ath9k_hw_settsf64(ah, tsf);
2426 if (AR_SREV_9280_10_OR_LATER(ah))
2427 REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL, AR_GPIO_JTAG_DISABLE);
2429 if (AR_SREV_9287_12_OR_LATER(ah)) {
2430 /* Enable ASYNC FIFO */
2431 REG_SET_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2432 AR_MAC_PCU_ASYNC_FIFO_REG3_DATAPATH_SEL);
2433 REG_SET_BIT(ah, AR_PHY_MODE, AR_PHY_MODE_ASYNCFIFO);
2434 REG_CLR_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2435 AR_MAC_PCU_ASYNC_FIFO_REG3_SOFT_RESET);
2436 REG_SET_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2437 AR_MAC_PCU_ASYNC_FIFO_REG3_SOFT_RESET);
2439 r = ath9k_hw_process_ini(ah, chan);
2440 if (r)
2441 return r;
2443 /* Setup MFP options for CCMP */
2444 if (AR_SREV_9280_20_OR_LATER(ah)) {
2445 /* Mask Retry(b11), PwrMgt(b12), MoreData(b13) to 0 in mgmt
2446 * frames when constructing CCMP AAD. */
2447 REG_RMW_FIELD(ah, AR_AES_MUTE_MASK1, AR_AES_MUTE_MASK1_FC_MGMT,
2448 0xc7ff);
2449 ah->sw_mgmt_crypto = false;
2450 } else if (AR_SREV_9160_10_OR_LATER(ah)) {
2451 /* Disable hardware crypto for management frames */
2452 REG_CLR_BIT(ah, AR_PCU_MISC_MODE2,
2453 AR_PCU_MISC_MODE2_MGMT_CRYPTO_ENABLE);
2454 REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
2455 AR_PCU_MISC_MODE2_NO_CRYPTO_FOR_NON_DATA_PKT);
2456 ah->sw_mgmt_crypto = true;
2457 } else
2458 ah->sw_mgmt_crypto = true;
2460 if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan))
2461 ath9k_hw_set_delta_slope(ah, chan);
2463 if (AR_SREV_9280_10_OR_LATER(ah))
2464 ath9k_hw_9280_spur_mitigate(ah, chan);
2465 else
2466 ath9k_hw_spur_mitigate(ah, chan);
2468 ah->eep_ops->set_board_values(ah, chan);
2470 ath9k_hw_decrease_chain_power(ah, chan);
2472 REG_WRITE(ah, AR_STA_ID0, get_unaligned_le32(common->macaddr));
2473 REG_WRITE(ah, AR_STA_ID1, get_unaligned_le16(common->macaddr + 4)
2474 | macStaId1
2475 | AR_STA_ID1_RTS_USE_DEF
2476 | (ah->config.
2477 ack_6mb ? AR_STA_ID1_ACKCTS_6MB : 0)
2478 | ah->sta_id1_defaults);
2479 ath9k_hw_set_operating_mode(ah, ah->opmode);
2481 ath_hw_setbssidmask(common);
2483 REG_WRITE(ah, AR_DEF_ANTENNA, saveDefAntenna);
2485 ath9k_hw_write_associd(ah);
2487 REG_WRITE(ah, AR_ISR, ~0);
2489 REG_WRITE(ah, AR_RSSI_THR, INIT_RSSI_THR);
2491 if (AR_SREV_9280_10_OR_LATER(ah))
2492 ath9k_hw_ar9280_set_channel(ah, chan);
2493 else
2494 if (!(ath9k_hw_set_channel(ah, chan)))
2495 return -EIO;
2497 for (i = 0; i < AR_NUM_DCU; i++)
2498 REG_WRITE(ah, AR_DQCUMASK(i), 1 << i);
2500 ah->intr_txqs = 0;
2501 for (i = 0; i < ah->caps.total_queues; i++)
2502 ath9k_hw_resettxqueue(ah, i);
2504 ath9k_hw_init_interrupt_masks(ah, ah->opmode);
2505 ath9k_hw_init_qos(ah);
2507 if (ah->caps.hw_caps & ATH9K_HW_CAP_RFSILENT)
2508 ath9k_enable_rfkill(ah);
2510 ath9k_hw_init_user_settings(ah);
2512 if (AR_SREV_9287_12_OR_LATER(ah)) {
2513 REG_WRITE(ah, AR_D_GBL_IFS_SIFS,
2514 AR_D_GBL_IFS_SIFS_ASYNC_FIFO_DUR);
2515 REG_WRITE(ah, AR_D_GBL_IFS_SLOT,
2516 AR_D_GBL_IFS_SLOT_ASYNC_FIFO_DUR);
2517 REG_WRITE(ah, AR_D_GBL_IFS_EIFS,
2518 AR_D_GBL_IFS_EIFS_ASYNC_FIFO_DUR);
2520 REG_WRITE(ah, AR_TIME_OUT, AR_TIME_OUT_ACK_CTS_ASYNC_FIFO_DUR);
2521 REG_WRITE(ah, AR_USEC, AR_USEC_ASYNC_FIFO_DUR);
2523 REG_SET_BIT(ah, AR_MAC_PCU_LOGIC_ANALYZER,
2524 AR_MAC_PCU_LOGIC_ANALYZER_DISBUG20768);
2525 REG_RMW_FIELD(ah, AR_AHB_MODE, AR_AHB_CUSTOM_BURST_EN,
2526 AR_AHB_CUSTOM_BURST_ASYNC_FIFO_VAL);
2528 if (AR_SREV_9287_12_OR_LATER(ah)) {
2529 REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
2530 AR_PCU_MISC_MODE2_ENABLE_AGGWEP);
2533 REG_WRITE(ah, AR_STA_ID1,
2534 REG_READ(ah, AR_STA_ID1) | AR_STA_ID1_PRESERVE_SEQNUM);
2536 ath9k_hw_set_dma(ah);
2538 REG_WRITE(ah, AR_OBS, 8);
2540 if (ah->config.intr_mitigation) {
2541 REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_LAST, 500);
2542 REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_FIRST, 2000);
2545 ath9k_hw_init_bb(ah, chan);
2547 if (!ath9k_hw_init_cal(ah, chan))
2548 return -EIO;
2550 rx_chainmask = ah->rxchainmask;
2551 if ((rx_chainmask == 0x5) || (rx_chainmask == 0x3)) {
2552 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, rx_chainmask);
2553 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, rx_chainmask);
2556 REG_WRITE(ah, AR_CFG_LED, saveLedState | AR_CFG_SCLK_32KHZ);
2559 * For big endian systems turn on swapping for descriptors
2561 if (AR_SREV_9100(ah)) {
2562 u32 mask;
2563 mask = REG_READ(ah, AR_CFG);
2564 if (mask & (AR_CFG_SWRB | AR_CFG_SWTB | AR_CFG_SWRG)) {
2565 ath_print(common, ATH_DBG_RESET,
2566 "CFG Byte Swap Set 0x%x\n", mask);
2567 } else {
2568 mask =
2569 INIT_CONFIG_STATUS | AR_CFG_SWRB | AR_CFG_SWTB;
2570 REG_WRITE(ah, AR_CFG, mask);
2571 ath_print(common, ATH_DBG_RESET,
2572 "Setting CFG 0x%x\n", REG_READ(ah, AR_CFG));
2574 } else {
2575 /* Configure AR9271 target WLAN */
2576 if (AR_SREV_9271(ah))
2577 REG_WRITE(ah, AR_CFG, AR_CFG_SWRB | AR_CFG_SWTB);
2578 #ifdef __BIG_ENDIAN
2579 else
2580 REG_WRITE(ah, AR_CFG, AR_CFG_SWTD | AR_CFG_SWRD);
2581 #endif
2584 if (ah->btcoex_hw.enabled)
2585 ath9k_hw_btcoex_enable(ah);
2587 return 0;
2589 EXPORT_SYMBOL(ath9k_hw_reset);
2591 /************************/
2592 /* Key Cache Management */
2593 /************************/
2595 bool ath9k_hw_keyreset(struct ath_hw *ah, u16 entry)
2597 u32 keyType;
2599 if (entry >= ah->caps.keycache_size) {
2600 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
2601 "keychache entry %u out of range\n", entry);
2602 return false;
2605 keyType = REG_READ(ah, AR_KEYTABLE_TYPE(entry));
2607 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), 0);
2608 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), 0);
2609 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), 0);
2610 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), 0);
2611 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), 0);
2612 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), AR_KEYTABLE_TYPE_CLR);
2613 REG_WRITE(ah, AR_KEYTABLE_MAC0(entry), 0);
2614 REG_WRITE(ah, AR_KEYTABLE_MAC1(entry), 0);
2616 if (keyType == AR_KEYTABLE_TYPE_TKIP && ATH9K_IS_MIC_ENABLED(ah)) {
2617 u16 micentry = entry + 64;
2619 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), 0);
2620 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), 0);
2621 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), 0);
2622 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), 0);
2626 return true;
2628 EXPORT_SYMBOL(ath9k_hw_keyreset);
2630 bool ath9k_hw_keysetmac(struct ath_hw *ah, u16 entry, const u8 *mac)
2632 u32 macHi, macLo;
2634 if (entry >= ah->caps.keycache_size) {
2635 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
2636 "keychache entry %u out of range\n", entry);
2637 return false;
2640 if (mac != NULL) {
2641 macHi = (mac[5] << 8) | mac[4];
2642 macLo = (mac[3] << 24) |
2643 (mac[2] << 16) |
2644 (mac[1] << 8) |
2645 mac[0];
2646 macLo >>= 1;
2647 macLo |= (macHi & 1) << 31;
2648 macHi >>= 1;
2649 } else {
2650 macLo = macHi = 0;
2652 REG_WRITE(ah, AR_KEYTABLE_MAC0(entry), macLo);
2653 REG_WRITE(ah, AR_KEYTABLE_MAC1(entry), macHi | AR_KEYTABLE_VALID);
2655 return true;
2657 EXPORT_SYMBOL(ath9k_hw_keysetmac);
2659 bool ath9k_hw_set_keycache_entry(struct ath_hw *ah, u16 entry,
2660 const struct ath9k_keyval *k,
2661 const u8 *mac)
2663 const struct ath9k_hw_capabilities *pCap = &ah->caps;
2664 struct ath_common *common = ath9k_hw_common(ah);
2665 u32 key0, key1, key2, key3, key4;
2666 u32 keyType;
2668 if (entry >= pCap->keycache_size) {
2669 ath_print(common, ATH_DBG_FATAL,
2670 "keycache entry %u out of range\n", entry);
2671 return false;
2674 switch (k->kv_type) {
2675 case ATH9K_CIPHER_AES_OCB:
2676 keyType = AR_KEYTABLE_TYPE_AES;
2677 break;
2678 case ATH9K_CIPHER_AES_CCM:
2679 if (!(pCap->hw_caps & ATH9K_HW_CAP_CIPHER_AESCCM)) {
2680 ath_print(common, ATH_DBG_ANY,
2681 "AES-CCM not supported by mac rev 0x%x\n",
2682 ah->hw_version.macRev);
2683 return false;
2685 keyType = AR_KEYTABLE_TYPE_CCM;
2686 break;
2687 case ATH9K_CIPHER_TKIP:
2688 keyType = AR_KEYTABLE_TYPE_TKIP;
2689 if (ATH9K_IS_MIC_ENABLED(ah)
2690 && entry + 64 >= pCap->keycache_size) {
2691 ath_print(common, ATH_DBG_ANY,
2692 "entry %u inappropriate for TKIP\n", entry);
2693 return false;
2695 break;
2696 case ATH9K_CIPHER_WEP:
2697 if (k->kv_len < WLAN_KEY_LEN_WEP40) {
2698 ath_print(common, ATH_DBG_ANY,
2699 "WEP key length %u too small\n", k->kv_len);
2700 return false;
2702 if (k->kv_len <= WLAN_KEY_LEN_WEP40)
2703 keyType = AR_KEYTABLE_TYPE_40;
2704 else if (k->kv_len <= WLAN_KEY_LEN_WEP104)
2705 keyType = AR_KEYTABLE_TYPE_104;
2706 else
2707 keyType = AR_KEYTABLE_TYPE_128;
2708 break;
2709 case ATH9K_CIPHER_CLR:
2710 keyType = AR_KEYTABLE_TYPE_CLR;
2711 break;
2712 default:
2713 ath_print(common, ATH_DBG_FATAL,
2714 "cipher %u not supported\n", k->kv_type);
2715 return false;
2718 key0 = get_unaligned_le32(k->kv_val + 0);
2719 key1 = get_unaligned_le16(k->kv_val + 4);
2720 key2 = get_unaligned_le32(k->kv_val + 6);
2721 key3 = get_unaligned_le16(k->kv_val + 10);
2722 key4 = get_unaligned_le32(k->kv_val + 12);
2723 if (k->kv_len <= WLAN_KEY_LEN_WEP104)
2724 key4 &= 0xff;
2727 * Note: Key cache registers access special memory area that requires
2728 * two 32-bit writes to actually update the values in the internal
2729 * memory. Consequently, the exact order and pairs used here must be
2730 * maintained.
2733 if (keyType == AR_KEYTABLE_TYPE_TKIP && ATH9K_IS_MIC_ENABLED(ah)) {
2734 u16 micentry = entry + 64;
2737 * Write inverted key[47:0] first to avoid Michael MIC errors
2738 * on frames that could be sent or received at the same time.
2739 * The correct key will be written in the end once everything
2740 * else is ready.
2742 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), ~key0);
2743 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), ~key1);
2745 /* Write key[95:48] */
2746 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), key2);
2747 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), key3);
2749 /* Write key[127:96] and key type */
2750 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), key4);
2751 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), keyType);
2753 /* Write MAC address for the entry */
2754 (void) ath9k_hw_keysetmac(ah, entry, mac);
2756 if (ah->misc_mode & AR_PCU_MIC_NEW_LOC_ENA) {
2758 * TKIP uses two key cache entries:
2759 * Michael MIC TX/RX keys in the same key cache entry
2760 * (idx = main index + 64):
2761 * key0 [31:0] = RX key [31:0]
2762 * key1 [15:0] = TX key [31:16]
2763 * key1 [31:16] = reserved
2764 * key2 [31:0] = RX key [63:32]
2765 * key3 [15:0] = TX key [15:0]
2766 * key3 [31:16] = reserved
2767 * key4 [31:0] = TX key [63:32]
2769 u32 mic0, mic1, mic2, mic3, mic4;
2771 mic0 = get_unaligned_le32(k->kv_mic + 0);
2772 mic2 = get_unaligned_le32(k->kv_mic + 4);
2773 mic1 = get_unaligned_le16(k->kv_txmic + 2) & 0xffff;
2774 mic3 = get_unaligned_le16(k->kv_txmic + 0) & 0xffff;
2775 mic4 = get_unaligned_le32(k->kv_txmic + 4);
2777 /* Write RX[31:0] and TX[31:16] */
2778 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), mic0);
2779 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), mic1);
2781 /* Write RX[63:32] and TX[15:0] */
2782 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), mic2);
2783 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), mic3);
2785 /* Write TX[63:32] and keyType(reserved) */
2786 REG_WRITE(ah, AR_KEYTABLE_KEY4(micentry), mic4);
2787 REG_WRITE(ah, AR_KEYTABLE_TYPE(micentry),
2788 AR_KEYTABLE_TYPE_CLR);
2790 } else {
2792 * TKIP uses four key cache entries (two for group
2793 * keys):
2794 * Michael MIC TX/RX keys are in different key cache
2795 * entries (idx = main index + 64 for TX and
2796 * main index + 32 + 96 for RX):
2797 * key0 [31:0] = TX/RX MIC key [31:0]
2798 * key1 [31:0] = reserved
2799 * key2 [31:0] = TX/RX MIC key [63:32]
2800 * key3 [31:0] = reserved
2801 * key4 [31:0] = reserved
2803 * Upper layer code will call this function separately
2804 * for TX and RX keys when these registers offsets are
2805 * used.
2807 u32 mic0, mic2;
2809 mic0 = get_unaligned_le32(k->kv_mic + 0);
2810 mic2 = get_unaligned_le32(k->kv_mic + 4);
2812 /* Write MIC key[31:0] */
2813 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), mic0);
2814 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), 0);
2816 /* Write MIC key[63:32] */
2817 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), mic2);
2818 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), 0);
2820 /* Write TX[63:32] and keyType(reserved) */
2821 REG_WRITE(ah, AR_KEYTABLE_KEY4(micentry), 0);
2822 REG_WRITE(ah, AR_KEYTABLE_TYPE(micentry),
2823 AR_KEYTABLE_TYPE_CLR);
2826 /* MAC address registers are reserved for the MIC entry */
2827 REG_WRITE(ah, AR_KEYTABLE_MAC0(micentry), 0);
2828 REG_WRITE(ah, AR_KEYTABLE_MAC1(micentry), 0);
2831 * Write the correct (un-inverted) key[47:0] last to enable
2832 * TKIP now that all other registers are set with correct
2833 * values.
2835 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), key0);
2836 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), key1);
2837 } else {
2838 /* Write key[47:0] */
2839 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), key0);
2840 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), key1);
2842 /* Write key[95:48] */
2843 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), key2);
2844 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), key3);
2846 /* Write key[127:96] and key type */
2847 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), key4);
2848 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), keyType);
2850 /* Write MAC address for the entry */
2851 (void) ath9k_hw_keysetmac(ah, entry, mac);
2854 return true;
2856 EXPORT_SYMBOL(ath9k_hw_set_keycache_entry);
2858 bool ath9k_hw_keyisvalid(struct ath_hw *ah, u16 entry)
2860 if (entry < ah->caps.keycache_size) {
2861 u32 val = REG_READ(ah, AR_KEYTABLE_MAC1(entry));
2862 if (val & AR_KEYTABLE_VALID)
2863 return true;
2865 return false;
2867 EXPORT_SYMBOL(ath9k_hw_keyisvalid);
2869 /******************************/
2870 /* Power Management (Chipset) */
2871 /******************************/
2873 static void ath9k_set_power_sleep(struct ath_hw *ah, int setChip)
2875 REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2876 if (setChip) {
2877 REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE,
2878 AR_RTC_FORCE_WAKE_EN);
2879 if (!AR_SREV_9100(ah))
2880 REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF);
2882 if(!AR_SREV_5416(ah))
2883 REG_CLR_BIT(ah, (AR_RTC_RESET),
2884 AR_RTC_RESET_EN);
2888 static void ath9k_set_power_network_sleep(struct ath_hw *ah, int setChip)
2890 REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2891 if (setChip) {
2892 struct ath9k_hw_capabilities *pCap = &ah->caps;
2894 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
2895 REG_WRITE(ah, AR_RTC_FORCE_WAKE,
2896 AR_RTC_FORCE_WAKE_ON_INT);
2897 } else {
2898 REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE,
2899 AR_RTC_FORCE_WAKE_EN);
2904 static bool ath9k_hw_set_power_awake(struct ath_hw *ah, int setChip)
2906 u32 val;
2907 int i;
2909 if (setChip) {
2910 if ((REG_READ(ah, AR_RTC_STATUS) &
2911 AR_RTC_STATUS_M) == AR_RTC_STATUS_SHUTDOWN) {
2912 if (ath9k_hw_set_reset_reg(ah,
2913 ATH9K_RESET_POWER_ON) != true) {
2914 return false;
2916 ath9k_hw_init_pll(ah, NULL);
2918 if (AR_SREV_9100(ah))
2919 REG_SET_BIT(ah, AR_RTC_RESET,
2920 AR_RTC_RESET_EN);
2922 REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
2923 AR_RTC_FORCE_WAKE_EN);
2924 udelay(50);
2926 for (i = POWER_UP_TIME / 50; i > 0; i--) {
2927 val = REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M;
2928 if (val == AR_RTC_STATUS_ON)
2929 break;
2930 udelay(50);
2931 REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
2932 AR_RTC_FORCE_WAKE_EN);
2934 if (i == 0) {
2935 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
2936 "Failed to wakeup in %uus\n",
2937 POWER_UP_TIME / 20);
2938 return false;
2942 REG_CLR_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2944 return true;
2947 bool ath9k_hw_setpower(struct ath_hw *ah, enum ath9k_power_mode mode)
2949 struct ath_common *common = ath9k_hw_common(ah);
2950 int status = true, setChip = true;
2951 static const char *modes[] = {
2952 "AWAKE",
2953 "FULL-SLEEP",
2954 "NETWORK SLEEP",
2955 "UNDEFINED"
2958 if (ah->power_mode == mode)
2959 return status;
2961 ath_print(common, ATH_DBG_RESET, "%s -> %s\n",
2962 modes[ah->power_mode], modes[mode]);
2964 switch (mode) {
2965 case ATH9K_PM_AWAKE:
2966 status = ath9k_hw_set_power_awake(ah, setChip);
2967 break;
2968 case ATH9K_PM_FULL_SLEEP:
2969 ath9k_set_power_sleep(ah, setChip);
2970 ah->chip_fullsleep = true;
2971 break;
2972 case ATH9K_PM_NETWORK_SLEEP:
2973 ath9k_set_power_network_sleep(ah, setChip);
2974 break;
2975 default:
2976 ath_print(common, ATH_DBG_FATAL,
2977 "Unknown power mode %u\n", mode);
2978 return false;
2980 ah->power_mode = mode;
2982 return status;
2984 EXPORT_SYMBOL(ath9k_hw_setpower);
2987 * Helper for ASPM support.
2989 * Disable PLL when in L0s as well as receiver clock when in L1.
2990 * This power saving option must be enabled through the SerDes.
2992 * Programming the SerDes must go through the same 288 bit serial shift
2993 * register as the other analog registers. Hence the 9 writes.
2995 void ath9k_hw_configpcipowersave(struct ath_hw *ah, int restore, int power_off)
2997 u8 i;
2998 u32 val;
3000 if (ah->is_pciexpress != true)
3001 return;
3003 /* Do not touch SerDes registers */
3004 if (ah->config.pcie_powersave_enable == 2)
3005 return;
3007 /* Nothing to do on restore for 11N */
3008 if (!restore) {
3009 if (AR_SREV_9280_20_OR_LATER(ah)) {
3011 * AR9280 2.0 or later chips use SerDes values from the
3012 * initvals.h initialized depending on chipset during
3013 * ath9k_hw_init()
3015 for (i = 0; i < ah->iniPcieSerdes.ia_rows; i++) {
3016 REG_WRITE(ah, INI_RA(&ah->iniPcieSerdes, i, 0),
3017 INI_RA(&ah->iniPcieSerdes, i, 1));
3019 } else if (AR_SREV_9280(ah) &&
3020 (ah->hw_version.macRev == AR_SREV_REVISION_9280_10)) {
3021 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fd00);
3022 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
3024 /* RX shut off when elecidle is asserted */
3025 REG_WRITE(ah, AR_PCIE_SERDES, 0xa8000019);
3026 REG_WRITE(ah, AR_PCIE_SERDES, 0x13160820);
3027 REG_WRITE(ah, AR_PCIE_SERDES, 0xe5980560);
3029 /* Shut off CLKREQ active in L1 */
3030 if (ah->config.pcie_clock_req)
3031 REG_WRITE(ah, AR_PCIE_SERDES, 0x401deffc);
3032 else
3033 REG_WRITE(ah, AR_PCIE_SERDES, 0x401deffd);
3035 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
3036 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
3037 REG_WRITE(ah, AR_PCIE_SERDES, 0x00043007);
3039 /* Load the new settings */
3040 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
3042 } else {
3043 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00);
3044 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
3046 /* RX shut off when elecidle is asserted */
3047 REG_WRITE(ah, AR_PCIE_SERDES, 0x28000039);
3048 REG_WRITE(ah, AR_PCIE_SERDES, 0x53160824);
3049 REG_WRITE(ah, AR_PCIE_SERDES, 0xe5980579);
3052 * Ignore ah->ah_config.pcie_clock_req setting for
3053 * pre-AR9280 11n
3055 REG_WRITE(ah, AR_PCIE_SERDES, 0x001defff);
3057 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
3058 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
3059 REG_WRITE(ah, AR_PCIE_SERDES, 0x000e3007);
3061 /* Load the new settings */
3062 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
3065 udelay(1000);
3067 /* set bit 19 to allow forcing of pcie core into L1 state */
3068 REG_SET_BIT(ah, AR_PCIE_PM_CTRL, AR_PCIE_PM_CTRL_ENA);
3070 /* Several PCIe massages to ensure proper behaviour */
3071 if (ah->config.pcie_waen) {
3072 val = ah->config.pcie_waen;
3073 if (!power_off)
3074 val &= (~AR_WA_D3_L1_DISABLE);
3075 } else {
3076 if (AR_SREV_9285(ah) || AR_SREV_9271(ah) ||
3077 AR_SREV_9287(ah)) {
3078 val = AR9285_WA_DEFAULT;
3079 if (!power_off)
3080 val &= (~AR_WA_D3_L1_DISABLE);
3081 } else if (AR_SREV_9280(ah)) {
3083 * On AR9280 chips bit 22 of 0x4004 needs to be
3084 * set otherwise card may disappear.
3086 val = AR9280_WA_DEFAULT;
3087 if (!power_off)
3088 val &= (~AR_WA_D3_L1_DISABLE);
3089 } else
3090 val = AR_WA_DEFAULT;
3093 REG_WRITE(ah, AR_WA, val);
3096 if (power_off) {
3098 * Set PCIe workaround bits
3099 * bit 14 in WA register (disable L1) should only
3100 * be set when device enters D3 and be cleared
3101 * when device comes back to D0.
3103 if (ah->config.pcie_waen) {
3104 if (ah->config.pcie_waen & AR_WA_D3_L1_DISABLE)
3105 REG_SET_BIT(ah, AR_WA, AR_WA_D3_L1_DISABLE);
3106 } else {
3107 if (((AR_SREV_9285(ah) || AR_SREV_9271(ah) ||
3108 AR_SREV_9287(ah)) &&
3109 (AR9285_WA_DEFAULT & AR_WA_D3_L1_DISABLE)) ||
3110 (AR_SREV_9280(ah) &&
3111 (AR9280_WA_DEFAULT & AR_WA_D3_L1_DISABLE))) {
3112 REG_SET_BIT(ah, AR_WA, AR_WA_D3_L1_DISABLE);
3117 EXPORT_SYMBOL(ath9k_hw_configpcipowersave);
3119 /**********************/
3120 /* Interrupt Handling */
3121 /**********************/
3123 bool ath9k_hw_intrpend(struct ath_hw *ah)
3125 u32 host_isr;
3127 if (AR_SREV_9100(ah))
3128 return true;
3130 host_isr = REG_READ(ah, AR_INTR_ASYNC_CAUSE);
3131 if ((host_isr & AR_INTR_MAC_IRQ) && (host_isr != AR_INTR_SPURIOUS))
3132 return true;
3134 host_isr = REG_READ(ah, AR_INTR_SYNC_CAUSE);
3135 if ((host_isr & AR_INTR_SYNC_DEFAULT)
3136 && (host_isr != AR_INTR_SPURIOUS))
3137 return true;
3139 return false;
3141 EXPORT_SYMBOL(ath9k_hw_intrpend);
3143 bool ath9k_hw_getisr(struct ath_hw *ah, enum ath9k_int *masked)
3145 u32 isr = 0;
3146 u32 mask2 = 0;
3147 struct ath9k_hw_capabilities *pCap = &ah->caps;
3148 u32 sync_cause = 0;
3149 bool fatal_int = false;
3150 struct ath_common *common = ath9k_hw_common(ah);
3152 if (!AR_SREV_9100(ah)) {
3153 if (REG_READ(ah, AR_INTR_ASYNC_CAUSE) & AR_INTR_MAC_IRQ) {
3154 if ((REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M)
3155 == AR_RTC_STATUS_ON) {
3156 isr = REG_READ(ah, AR_ISR);
3160 sync_cause = REG_READ(ah, AR_INTR_SYNC_CAUSE) &
3161 AR_INTR_SYNC_DEFAULT;
3163 *masked = 0;
3165 if (!isr && !sync_cause)
3166 return false;
3167 } else {
3168 *masked = 0;
3169 isr = REG_READ(ah, AR_ISR);
3172 if (isr) {
3173 if (isr & AR_ISR_BCNMISC) {
3174 u32 isr2;
3175 isr2 = REG_READ(ah, AR_ISR_S2);
3176 if (isr2 & AR_ISR_S2_TIM)
3177 mask2 |= ATH9K_INT_TIM;
3178 if (isr2 & AR_ISR_S2_DTIM)
3179 mask2 |= ATH9K_INT_DTIM;
3180 if (isr2 & AR_ISR_S2_DTIMSYNC)
3181 mask2 |= ATH9K_INT_DTIMSYNC;
3182 if (isr2 & (AR_ISR_S2_CABEND))
3183 mask2 |= ATH9K_INT_CABEND;
3184 if (isr2 & AR_ISR_S2_GTT)
3185 mask2 |= ATH9K_INT_GTT;
3186 if (isr2 & AR_ISR_S2_CST)
3187 mask2 |= ATH9K_INT_CST;
3188 if (isr2 & AR_ISR_S2_TSFOOR)
3189 mask2 |= ATH9K_INT_TSFOOR;
3192 isr = REG_READ(ah, AR_ISR_RAC);
3193 if (isr == 0xffffffff) {
3194 *masked = 0;
3195 return false;
3198 *masked = isr & ATH9K_INT_COMMON;
3200 if (ah->config.intr_mitigation) {
3201 if (isr & (AR_ISR_RXMINTR | AR_ISR_RXINTM))
3202 *masked |= ATH9K_INT_RX;
3205 if (isr & (AR_ISR_RXOK | AR_ISR_RXERR))
3206 *masked |= ATH9K_INT_RX;
3207 if (isr &
3208 (AR_ISR_TXOK | AR_ISR_TXDESC | AR_ISR_TXERR |
3209 AR_ISR_TXEOL)) {
3210 u32 s0_s, s1_s;
3212 *masked |= ATH9K_INT_TX;
3214 s0_s = REG_READ(ah, AR_ISR_S0_S);
3215 ah->intr_txqs |= MS(s0_s, AR_ISR_S0_QCU_TXOK);
3216 ah->intr_txqs |= MS(s0_s, AR_ISR_S0_QCU_TXDESC);
3218 s1_s = REG_READ(ah, AR_ISR_S1_S);
3219 ah->intr_txqs |= MS(s1_s, AR_ISR_S1_QCU_TXERR);
3220 ah->intr_txqs |= MS(s1_s, AR_ISR_S1_QCU_TXEOL);
3223 if (isr & AR_ISR_RXORN) {
3224 ath_print(common, ATH_DBG_INTERRUPT,
3225 "receive FIFO overrun interrupt\n");
3228 if (!AR_SREV_9100(ah)) {
3229 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
3230 u32 isr5 = REG_READ(ah, AR_ISR_S5_S);
3231 if (isr5 & AR_ISR_S5_TIM_TIMER)
3232 *masked |= ATH9K_INT_TIM_TIMER;
3236 *masked |= mask2;
3239 if (AR_SREV_9100(ah))
3240 return true;
3242 if (isr & AR_ISR_GENTMR) {
3243 u32 s5_s;
3245 s5_s = REG_READ(ah, AR_ISR_S5_S);
3246 if (isr & AR_ISR_GENTMR) {
3247 ah->intr_gen_timer_trigger =
3248 MS(s5_s, AR_ISR_S5_GENTIMER_TRIG);
3250 ah->intr_gen_timer_thresh =
3251 MS(s5_s, AR_ISR_S5_GENTIMER_THRESH);
3253 if (ah->intr_gen_timer_trigger)
3254 *masked |= ATH9K_INT_GENTIMER;
3259 if (sync_cause) {
3260 fatal_int =
3261 (sync_cause &
3262 (AR_INTR_SYNC_HOST1_FATAL | AR_INTR_SYNC_HOST1_PERR))
3263 ? true : false;
3265 if (fatal_int) {
3266 if (sync_cause & AR_INTR_SYNC_HOST1_FATAL) {
3267 ath_print(common, ATH_DBG_ANY,
3268 "received PCI FATAL interrupt\n");
3270 if (sync_cause & AR_INTR_SYNC_HOST1_PERR) {
3271 ath_print(common, ATH_DBG_ANY,
3272 "received PCI PERR interrupt\n");
3274 *masked |= ATH9K_INT_FATAL;
3276 if (sync_cause & AR_INTR_SYNC_RADM_CPL_TIMEOUT) {
3277 ath_print(common, ATH_DBG_INTERRUPT,
3278 "AR_INTR_SYNC_RADM_CPL_TIMEOUT\n");
3279 REG_WRITE(ah, AR_RC, AR_RC_HOSTIF);
3280 REG_WRITE(ah, AR_RC, 0);
3281 *masked |= ATH9K_INT_FATAL;
3283 if (sync_cause & AR_INTR_SYNC_LOCAL_TIMEOUT) {
3284 ath_print(common, ATH_DBG_INTERRUPT,
3285 "AR_INTR_SYNC_LOCAL_TIMEOUT\n");
3288 REG_WRITE(ah, AR_INTR_SYNC_CAUSE_CLR, sync_cause);
3289 (void) REG_READ(ah, AR_INTR_SYNC_CAUSE_CLR);
3292 return true;
3294 EXPORT_SYMBOL(ath9k_hw_getisr);
3296 enum ath9k_int ath9k_hw_set_interrupts(struct ath_hw *ah, enum ath9k_int ints)
3298 u32 omask = ah->mask_reg;
3299 u32 mask, mask2;
3300 struct ath9k_hw_capabilities *pCap = &ah->caps;
3301 struct ath_common *common = ath9k_hw_common(ah);
3303 ath_print(common, ATH_DBG_INTERRUPT, "0x%x => 0x%x\n", omask, ints);
3305 if (omask & ATH9K_INT_GLOBAL) {
3306 ath_print(common, ATH_DBG_INTERRUPT, "disable IER\n");
3307 REG_WRITE(ah, AR_IER, AR_IER_DISABLE);
3308 (void) REG_READ(ah, AR_IER);
3309 if (!AR_SREV_9100(ah)) {
3310 REG_WRITE(ah, AR_INTR_ASYNC_ENABLE, 0);
3311 (void) REG_READ(ah, AR_INTR_ASYNC_ENABLE);
3313 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0);
3314 (void) REG_READ(ah, AR_INTR_SYNC_ENABLE);
3318 mask = ints & ATH9K_INT_COMMON;
3319 mask2 = 0;
3321 if (ints & ATH9K_INT_TX) {
3322 if (ah->txok_interrupt_mask)
3323 mask |= AR_IMR_TXOK;
3324 if (ah->txdesc_interrupt_mask)
3325 mask |= AR_IMR_TXDESC;
3326 if (ah->txerr_interrupt_mask)
3327 mask |= AR_IMR_TXERR;
3328 if (ah->txeol_interrupt_mask)
3329 mask |= AR_IMR_TXEOL;
3331 if (ints & ATH9K_INT_RX) {
3332 mask |= AR_IMR_RXERR;
3333 if (ah->config.intr_mitigation)
3334 mask |= AR_IMR_RXMINTR | AR_IMR_RXINTM;
3335 else
3336 mask |= AR_IMR_RXOK | AR_IMR_RXDESC;
3337 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP))
3338 mask |= AR_IMR_GENTMR;
3341 if (ints & (ATH9K_INT_BMISC)) {
3342 mask |= AR_IMR_BCNMISC;
3343 if (ints & ATH9K_INT_TIM)
3344 mask2 |= AR_IMR_S2_TIM;
3345 if (ints & ATH9K_INT_DTIM)
3346 mask2 |= AR_IMR_S2_DTIM;
3347 if (ints & ATH9K_INT_DTIMSYNC)
3348 mask2 |= AR_IMR_S2_DTIMSYNC;
3349 if (ints & ATH9K_INT_CABEND)
3350 mask2 |= AR_IMR_S2_CABEND;
3351 if (ints & ATH9K_INT_TSFOOR)
3352 mask2 |= AR_IMR_S2_TSFOOR;
3355 if (ints & (ATH9K_INT_GTT | ATH9K_INT_CST)) {
3356 mask |= AR_IMR_BCNMISC;
3357 if (ints & ATH9K_INT_GTT)
3358 mask2 |= AR_IMR_S2_GTT;
3359 if (ints & ATH9K_INT_CST)
3360 mask2 |= AR_IMR_S2_CST;
3363 ath_print(common, ATH_DBG_INTERRUPT, "new IMR 0x%x\n", mask);
3364 REG_WRITE(ah, AR_IMR, mask);
3365 mask = REG_READ(ah, AR_IMR_S2) & ~(AR_IMR_S2_TIM |
3366 AR_IMR_S2_DTIM |
3367 AR_IMR_S2_DTIMSYNC |
3368 AR_IMR_S2_CABEND |
3369 AR_IMR_S2_CABTO |
3370 AR_IMR_S2_TSFOOR |
3371 AR_IMR_S2_GTT | AR_IMR_S2_CST);
3372 REG_WRITE(ah, AR_IMR_S2, mask | mask2);
3373 ah->mask_reg = ints;
3375 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
3376 if (ints & ATH9K_INT_TIM_TIMER)
3377 REG_SET_BIT(ah, AR_IMR_S5, AR_IMR_S5_TIM_TIMER);
3378 else
3379 REG_CLR_BIT(ah, AR_IMR_S5, AR_IMR_S5_TIM_TIMER);
3382 if (ints & ATH9K_INT_GLOBAL) {
3383 ath_print(common, ATH_DBG_INTERRUPT, "enable IER\n");
3384 REG_WRITE(ah, AR_IER, AR_IER_ENABLE);
3385 if (!AR_SREV_9100(ah)) {
3386 REG_WRITE(ah, AR_INTR_ASYNC_ENABLE,
3387 AR_INTR_MAC_IRQ);
3388 REG_WRITE(ah, AR_INTR_ASYNC_MASK, AR_INTR_MAC_IRQ);
3391 REG_WRITE(ah, AR_INTR_SYNC_ENABLE,
3392 AR_INTR_SYNC_DEFAULT);
3393 REG_WRITE(ah, AR_INTR_SYNC_MASK,
3394 AR_INTR_SYNC_DEFAULT);
3396 ath_print(common, ATH_DBG_INTERRUPT, "AR_IMR 0x%x IER 0x%x\n",
3397 REG_READ(ah, AR_IMR), REG_READ(ah, AR_IER));
3400 return omask;
3402 EXPORT_SYMBOL(ath9k_hw_set_interrupts);
3404 /*******************/
3405 /* Beacon Handling */
3406 /*******************/
3408 void ath9k_hw_beaconinit(struct ath_hw *ah, u32 next_beacon, u32 beacon_period)
3410 int flags = 0;
3412 ah->beacon_interval = beacon_period;
3414 switch (ah->opmode) {
3415 case NL80211_IFTYPE_STATION:
3416 case NL80211_IFTYPE_MONITOR:
3417 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(next_beacon));
3418 REG_WRITE(ah, AR_NEXT_DMA_BEACON_ALERT, 0xffff);
3419 REG_WRITE(ah, AR_NEXT_SWBA, 0x7ffff);
3420 flags |= AR_TBTT_TIMER_EN;
3421 break;
3422 case NL80211_IFTYPE_ADHOC:
3423 case NL80211_IFTYPE_MESH_POINT:
3424 REG_SET_BIT(ah, AR_TXCFG,
3425 AR_TXCFG_ADHOC_BEACON_ATIM_TX_POLICY);
3426 REG_WRITE(ah, AR_NEXT_NDP_TIMER,
3427 TU_TO_USEC(next_beacon +
3428 (ah->atim_window ? ah->
3429 atim_window : 1)));
3430 flags |= AR_NDP_TIMER_EN;
3431 case NL80211_IFTYPE_AP:
3432 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(next_beacon));
3433 REG_WRITE(ah, AR_NEXT_DMA_BEACON_ALERT,
3434 TU_TO_USEC(next_beacon -
3435 ah->config.
3436 dma_beacon_response_time));
3437 REG_WRITE(ah, AR_NEXT_SWBA,
3438 TU_TO_USEC(next_beacon -
3439 ah->config.
3440 sw_beacon_response_time));
3441 flags |=
3442 AR_TBTT_TIMER_EN | AR_DBA_TIMER_EN | AR_SWBA_TIMER_EN;
3443 break;
3444 default:
3445 ath_print(ath9k_hw_common(ah), ATH_DBG_BEACON,
3446 "%s: unsupported opmode: %d\n",
3447 __func__, ah->opmode);
3448 return;
3449 break;
3452 REG_WRITE(ah, AR_BEACON_PERIOD, TU_TO_USEC(beacon_period));
3453 REG_WRITE(ah, AR_DMA_BEACON_PERIOD, TU_TO_USEC(beacon_period));
3454 REG_WRITE(ah, AR_SWBA_PERIOD, TU_TO_USEC(beacon_period));
3455 REG_WRITE(ah, AR_NDP_PERIOD, TU_TO_USEC(beacon_period));
3457 beacon_period &= ~ATH9K_BEACON_ENA;
3458 if (beacon_period & ATH9K_BEACON_RESET_TSF) {
3459 ath9k_hw_reset_tsf(ah);
3462 REG_SET_BIT(ah, AR_TIMER_MODE, flags);
3464 EXPORT_SYMBOL(ath9k_hw_beaconinit);
3466 void ath9k_hw_set_sta_beacon_timers(struct ath_hw *ah,
3467 const struct ath9k_beacon_state *bs)
3469 u32 nextTbtt, beaconintval, dtimperiod, beacontimeout;
3470 struct ath9k_hw_capabilities *pCap = &ah->caps;
3471 struct ath_common *common = ath9k_hw_common(ah);
3473 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(bs->bs_nexttbtt));
3475 REG_WRITE(ah, AR_BEACON_PERIOD,
3476 TU_TO_USEC(bs->bs_intval & ATH9K_BEACON_PERIOD));
3477 REG_WRITE(ah, AR_DMA_BEACON_PERIOD,
3478 TU_TO_USEC(bs->bs_intval & ATH9K_BEACON_PERIOD));
3480 REG_RMW_FIELD(ah, AR_RSSI_THR,
3481 AR_RSSI_THR_BM_THR, bs->bs_bmissthreshold);
3483 beaconintval = bs->bs_intval & ATH9K_BEACON_PERIOD;
3485 if (bs->bs_sleepduration > beaconintval)
3486 beaconintval = bs->bs_sleepduration;
3488 dtimperiod = bs->bs_dtimperiod;
3489 if (bs->bs_sleepduration > dtimperiod)
3490 dtimperiod = bs->bs_sleepduration;
3492 if (beaconintval == dtimperiod)
3493 nextTbtt = bs->bs_nextdtim;
3494 else
3495 nextTbtt = bs->bs_nexttbtt;
3497 ath_print(common, ATH_DBG_BEACON, "next DTIM %d\n", bs->bs_nextdtim);
3498 ath_print(common, ATH_DBG_BEACON, "next beacon %d\n", nextTbtt);
3499 ath_print(common, ATH_DBG_BEACON, "beacon period %d\n", beaconintval);
3500 ath_print(common, ATH_DBG_BEACON, "DTIM period %d\n", dtimperiod);
3502 REG_WRITE(ah, AR_NEXT_DTIM,
3503 TU_TO_USEC(bs->bs_nextdtim - SLEEP_SLOP));
3504 REG_WRITE(ah, AR_NEXT_TIM, TU_TO_USEC(nextTbtt - SLEEP_SLOP));
3506 REG_WRITE(ah, AR_SLEEP1,
3507 SM((CAB_TIMEOUT_VAL << 3), AR_SLEEP1_CAB_TIMEOUT)
3508 | AR_SLEEP1_ASSUME_DTIM);
3510 if (pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)
3511 beacontimeout = (BEACON_TIMEOUT_VAL << 3);
3512 else
3513 beacontimeout = MIN_BEACON_TIMEOUT_VAL;
3515 REG_WRITE(ah, AR_SLEEP2,
3516 SM(beacontimeout, AR_SLEEP2_BEACON_TIMEOUT));
3518 REG_WRITE(ah, AR_TIM_PERIOD, TU_TO_USEC(beaconintval));
3519 REG_WRITE(ah, AR_DTIM_PERIOD, TU_TO_USEC(dtimperiod));
3521 REG_SET_BIT(ah, AR_TIMER_MODE,
3522 AR_TBTT_TIMER_EN | AR_TIM_TIMER_EN |
3523 AR_DTIM_TIMER_EN);
3525 /* TSF Out of Range Threshold */
3526 REG_WRITE(ah, AR_TSFOOR_THRESHOLD, bs->bs_tsfoor_threshold);
3528 EXPORT_SYMBOL(ath9k_hw_set_sta_beacon_timers);
3530 /*******************/
3531 /* HW Capabilities */
3532 /*******************/
3534 void ath9k_hw_fill_cap_info(struct ath_hw *ah)
3536 struct ath9k_hw_capabilities *pCap = &ah->caps;
3537 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
3538 struct ath_common *common = ath9k_hw_common(ah);
3539 struct ath_btcoex_hw *btcoex_hw = &ah->btcoex_hw;
3541 u16 capField = 0, eeval;
3543 eeval = ah->eep_ops->get_eeprom(ah, EEP_REG_0);
3544 regulatory->current_rd = eeval;
3546 eeval = ah->eep_ops->get_eeprom(ah, EEP_REG_1);
3547 if (AR_SREV_9285_10_OR_LATER(ah))
3548 eeval |= AR9285_RDEXT_DEFAULT;
3549 regulatory->current_rd_ext = eeval;
3551 capField = ah->eep_ops->get_eeprom(ah, EEP_OP_CAP);
3553 if (ah->opmode != NL80211_IFTYPE_AP &&
3554 ah->hw_version.subvendorid == AR_SUBVENDOR_ID_NEW_A) {
3555 if (regulatory->current_rd == 0x64 ||
3556 regulatory->current_rd == 0x65)
3557 regulatory->current_rd += 5;
3558 else if (regulatory->current_rd == 0x41)
3559 regulatory->current_rd = 0x43;
3560 ath_print(common, ATH_DBG_REGULATORY,
3561 "regdomain mapped to 0x%x\n", regulatory->current_rd);
3564 eeval = ah->eep_ops->get_eeprom(ah, EEP_OP_MODE);
3565 bitmap_zero(pCap->wireless_modes, ATH9K_MODE_MAX);
3567 if (eeval & AR5416_OPFLAGS_11A) {
3568 set_bit(ATH9K_MODE_11A, pCap->wireless_modes);
3569 if (ah->config.ht_enable) {
3570 if (!(eeval & AR5416_OPFLAGS_N_5G_HT20))
3571 set_bit(ATH9K_MODE_11NA_HT20,
3572 pCap->wireless_modes);
3573 if (!(eeval & AR5416_OPFLAGS_N_5G_HT40)) {
3574 set_bit(ATH9K_MODE_11NA_HT40PLUS,
3575 pCap->wireless_modes);
3576 set_bit(ATH9K_MODE_11NA_HT40MINUS,
3577 pCap->wireless_modes);
3582 if (eeval & AR5416_OPFLAGS_11G) {
3583 set_bit(ATH9K_MODE_11G, pCap->wireless_modes);
3584 if (ah->config.ht_enable) {
3585 if (!(eeval & AR5416_OPFLAGS_N_2G_HT20))
3586 set_bit(ATH9K_MODE_11NG_HT20,
3587 pCap->wireless_modes);
3588 if (!(eeval & AR5416_OPFLAGS_N_2G_HT40)) {
3589 set_bit(ATH9K_MODE_11NG_HT40PLUS,
3590 pCap->wireless_modes);
3591 set_bit(ATH9K_MODE_11NG_HT40MINUS,
3592 pCap->wireless_modes);
3597 pCap->tx_chainmask = ah->eep_ops->get_eeprom(ah, EEP_TX_MASK);
3599 * For AR9271 we will temporarilly uses the rx chainmax as read from
3600 * the EEPROM.
3602 if ((ah->hw_version.devid == AR5416_DEVID_PCI) &&
3603 !(eeval & AR5416_OPFLAGS_11A) &&
3604 !(AR_SREV_9271(ah)))
3605 /* CB71: GPIO 0 is pulled down to indicate 3 rx chains */
3606 pCap->rx_chainmask = ath9k_hw_gpio_get(ah, 0) ? 0x5 : 0x7;
3607 else
3608 /* Use rx_chainmask from EEPROM. */
3609 pCap->rx_chainmask = ah->eep_ops->get_eeprom(ah, EEP_RX_MASK);
3611 if (!(AR_SREV_9280(ah) && (ah->hw_version.macRev == 0)))
3612 ah->misc_mode |= AR_PCU_MIC_NEW_LOC_ENA;
3614 pCap->low_2ghz_chan = 2312;
3615 pCap->high_2ghz_chan = 2732;
3617 pCap->low_5ghz_chan = 4920;
3618 pCap->high_5ghz_chan = 6100;
3620 pCap->hw_caps &= ~ATH9K_HW_CAP_CIPHER_CKIP;
3621 pCap->hw_caps |= ATH9K_HW_CAP_CIPHER_TKIP;
3622 pCap->hw_caps |= ATH9K_HW_CAP_CIPHER_AESCCM;
3624 pCap->hw_caps &= ~ATH9K_HW_CAP_MIC_CKIP;
3625 pCap->hw_caps |= ATH9K_HW_CAP_MIC_TKIP;
3626 pCap->hw_caps |= ATH9K_HW_CAP_MIC_AESCCM;
3628 if (ah->config.ht_enable)
3629 pCap->hw_caps |= ATH9K_HW_CAP_HT;
3630 else
3631 pCap->hw_caps &= ~ATH9K_HW_CAP_HT;
3633 pCap->hw_caps |= ATH9K_HW_CAP_GTT;
3634 pCap->hw_caps |= ATH9K_HW_CAP_VEOL;
3635 pCap->hw_caps |= ATH9K_HW_CAP_BSSIDMASK;
3636 pCap->hw_caps &= ~ATH9K_HW_CAP_MCAST_KEYSEARCH;
3638 if (capField & AR_EEPROM_EEPCAP_MAXQCU)
3639 pCap->total_queues =
3640 MS(capField, AR_EEPROM_EEPCAP_MAXQCU);
3641 else
3642 pCap->total_queues = ATH9K_NUM_TX_QUEUES;
3644 if (capField & AR_EEPROM_EEPCAP_KC_ENTRIES)
3645 pCap->keycache_size =
3646 1 << MS(capField, AR_EEPROM_EEPCAP_KC_ENTRIES);
3647 else
3648 pCap->keycache_size = AR_KEYTABLE_SIZE;
3650 pCap->hw_caps |= ATH9K_HW_CAP_FASTCC;
3651 pCap->tx_triglevel_max = MAX_TX_FIFO_THRESHOLD;
3653 if (AR_SREV_9285_10_OR_LATER(ah))
3654 pCap->num_gpio_pins = AR9285_NUM_GPIO;
3655 else if (AR_SREV_9280_10_OR_LATER(ah))
3656 pCap->num_gpio_pins = AR928X_NUM_GPIO;
3657 else
3658 pCap->num_gpio_pins = AR_NUM_GPIO;
3660 if (AR_SREV_9160_10_OR_LATER(ah) || AR_SREV_9100(ah)) {
3661 pCap->hw_caps |= ATH9K_HW_CAP_CST;
3662 pCap->rts_aggr_limit = ATH_AMPDU_LIMIT_MAX;
3663 } else {
3664 pCap->rts_aggr_limit = (8 * 1024);
3667 pCap->hw_caps |= ATH9K_HW_CAP_ENHANCEDPM;
3669 #if defined(CONFIG_RFKILL) || defined(CONFIG_RFKILL_MODULE)
3670 ah->rfsilent = ah->eep_ops->get_eeprom(ah, EEP_RF_SILENT);
3671 if (ah->rfsilent & EEP_RFSILENT_ENABLED) {
3672 ah->rfkill_gpio =
3673 MS(ah->rfsilent, EEP_RFSILENT_GPIO_SEL);
3674 ah->rfkill_polarity =
3675 MS(ah->rfsilent, EEP_RFSILENT_POLARITY);
3677 pCap->hw_caps |= ATH9K_HW_CAP_RFSILENT;
3679 #endif
3681 pCap->hw_caps &= ~ATH9K_HW_CAP_AUTOSLEEP;
3683 if (AR_SREV_9280(ah) || AR_SREV_9285(ah))
3684 pCap->hw_caps &= ~ATH9K_HW_CAP_4KB_SPLITTRANS;
3685 else
3686 pCap->hw_caps |= ATH9K_HW_CAP_4KB_SPLITTRANS;
3688 if (regulatory->current_rd_ext & (1 << REG_EXT_JAPAN_MIDBAND)) {
3689 pCap->reg_cap =
3690 AR_EEPROM_EEREGCAP_EN_KK_NEW_11A |
3691 AR_EEPROM_EEREGCAP_EN_KK_U1_EVEN |
3692 AR_EEPROM_EEREGCAP_EN_KK_U2 |
3693 AR_EEPROM_EEREGCAP_EN_KK_MIDBAND;
3694 } else {
3695 pCap->reg_cap =
3696 AR_EEPROM_EEREGCAP_EN_KK_NEW_11A |
3697 AR_EEPROM_EEREGCAP_EN_KK_U1_EVEN;
3700 /* Advertise midband for AR5416 with FCC midband set in eeprom */
3701 if (regulatory->current_rd_ext & (1 << REG_EXT_FCC_MIDBAND) &&
3702 AR_SREV_5416(ah))
3703 pCap->reg_cap |= AR_EEPROM_EEREGCAP_EN_FCC_MIDBAND;
3705 pCap->num_antcfg_5ghz =
3706 ah->eep_ops->get_num_ant_config(ah, ATH9K_HAL_FREQ_BAND_5GHZ);
3707 pCap->num_antcfg_2ghz =
3708 ah->eep_ops->get_num_ant_config(ah, ATH9K_HAL_FREQ_BAND_2GHZ);
3710 if (AR_SREV_9280_10_OR_LATER(ah) &&
3711 ath9k_hw_btcoex_supported(ah)) {
3712 btcoex_hw->btactive_gpio = ATH_BTACTIVE_GPIO;
3713 btcoex_hw->wlanactive_gpio = ATH_WLANACTIVE_GPIO;
3715 if (AR_SREV_9285(ah)) {
3716 btcoex_hw->scheme = ATH_BTCOEX_CFG_3WIRE;
3717 btcoex_hw->btpriority_gpio = ATH_BTPRIORITY_GPIO;
3718 } else {
3719 btcoex_hw->scheme = ATH_BTCOEX_CFG_2WIRE;
3721 } else {
3722 btcoex_hw->scheme = ATH_BTCOEX_CFG_NONE;
3726 bool ath9k_hw_getcapability(struct ath_hw *ah, enum ath9k_capability_type type,
3727 u32 capability, u32 *result)
3729 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
3730 switch (type) {
3731 case ATH9K_CAP_CIPHER:
3732 switch (capability) {
3733 case ATH9K_CIPHER_AES_CCM:
3734 case ATH9K_CIPHER_AES_OCB:
3735 case ATH9K_CIPHER_TKIP:
3736 case ATH9K_CIPHER_WEP:
3737 case ATH9K_CIPHER_MIC:
3738 case ATH9K_CIPHER_CLR:
3739 return true;
3740 default:
3741 return false;
3743 case ATH9K_CAP_TKIP_MIC:
3744 switch (capability) {
3745 case 0:
3746 return true;
3747 case 1:
3748 return (ah->sta_id1_defaults &
3749 AR_STA_ID1_CRPT_MIC_ENABLE) ? true :
3750 false;
3752 case ATH9K_CAP_TKIP_SPLIT:
3753 return (ah->misc_mode & AR_PCU_MIC_NEW_LOC_ENA) ?
3754 false : true;
3755 case ATH9K_CAP_DIVERSITY:
3756 return (REG_READ(ah, AR_PHY_CCK_DETECT) &
3757 AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV) ?
3758 true : false;
3759 case ATH9K_CAP_MCAST_KEYSRCH:
3760 switch (capability) {
3761 case 0:
3762 return true;
3763 case 1:
3764 if (REG_READ(ah, AR_STA_ID1) & AR_STA_ID1_ADHOC) {
3765 return false;
3766 } else {
3767 return (ah->sta_id1_defaults &
3768 AR_STA_ID1_MCAST_KSRCH) ? true :
3769 false;
3772 return false;
3773 case ATH9K_CAP_TXPOW:
3774 switch (capability) {
3775 case 0:
3776 return 0;
3777 case 1:
3778 *result = regulatory->power_limit;
3779 return 0;
3780 case 2:
3781 *result = regulatory->max_power_level;
3782 return 0;
3783 case 3:
3784 *result = regulatory->tp_scale;
3785 return 0;
3787 return false;
3788 case ATH9K_CAP_DS:
3789 return (AR_SREV_9280_20_OR_LATER(ah) &&
3790 (ah->eep_ops->get_eeprom(ah, EEP_RC_CHAIN_MASK) == 1))
3791 ? false : true;
3792 default:
3793 return false;
3796 EXPORT_SYMBOL(ath9k_hw_getcapability);
3798 bool ath9k_hw_setcapability(struct ath_hw *ah, enum ath9k_capability_type type,
3799 u32 capability, u32 setting, int *status)
3801 u32 v;
3803 switch (type) {
3804 case ATH9K_CAP_TKIP_MIC:
3805 if (setting)
3806 ah->sta_id1_defaults |=
3807 AR_STA_ID1_CRPT_MIC_ENABLE;
3808 else
3809 ah->sta_id1_defaults &=
3810 ~AR_STA_ID1_CRPT_MIC_ENABLE;
3811 return true;
3812 case ATH9K_CAP_DIVERSITY:
3813 v = REG_READ(ah, AR_PHY_CCK_DETECT);
3814 if (setting)
3815 v |= AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV;
3816 else
3817 v &= ~AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV;
3818 REG_WRITE(ah, AR_PHY_CCK_DETECT, v);
3819 return true;
3820 case ATH9K_CAP_MCAST_KEYSRCH:
3821 if (setting)
3822 ah->sta_id1_defaults |= AR_STA_ID1_MCAST_KSRCH;
3823 else
3824 ah->sta_id1_defaults &= ~AR_STA_ID1_MCAST_KSRCH;
3825 return true;
3826 default:
3827 return false;
3830 EXPORT_SYMBOL(ath9k_hw_setcapability);
3832 /****************************/
3833 /* GPIO / RFKILL / Antennae */
3834 /****************************/
3836 static void ath9k_hw_gpio_cfg_output_mux(struct ath_hw *ah,
3837 u32 gpio, u32 type)
3839 int addr;
3840 u32 gpio_shift, tmp;
3842 if (gpio > 11)
3843 addr = AR_GPIO_OUTPUT_MUX3;
3844 else if (gpio > 5)
3845 addr = AR_GPIO_OUTPUT_MUX2;
3846 else
3847 addr = AR_GPIO_OUTPUT_MUX1;
3849 gpio_shift = (gpio % 6) * 5;
3851 if (AR_SREV_9280_20_OR_LATER(ah)
3852 || (addr != AR_GPIO_OUTPUT_MUX1)) {
3853 REG_RMW(ah, addr, (type << gpio_shift),
3854 (0x1f << gpio_shift));
3855 } else {
3856 tmp = REG_READ(ah, addr);
3857 tmp = ((tmp & 0x1F0) << 1) | (tmp & ~0x1F0);
3858 tmp &= ~(0x1f << gpio_shift);
3859 tmp |= (type << gpio_shift);
3860 REG_WRITE(ah, addr, tmp);
3864 void ath9k_hw_cfg_gpio_input(struct ath_hw *ah, u32 gpio)
3866 u32 gpio_shift;
3868 BUG_ON(gpio >= ah->caps.num_gpio_pins);
3870 gpio_shift = gpio << 1;
3872 REG_RMW(ah,
3873 AR_GPIO_OE_OUT,
3874 (AR_GPIO_OE_OUT_DRV_NO << gpio_shift),
3875 (AR_GPIO_OE_OUT_DRV << gpio_shift));
3877 EXPORT_SYMBOL(ath9k_hw_cfg_gpio_input);
3879 u32 ath9k_hw_gpio_get(struct ath_hw *ah, u32 gpio)
3881 #define MS_REG_READ(x, y) \
3882 (MS(REG_READ(ah, AR_GPIO_IN_OUT), x##_GPIO_IN_VAL) & (AR_GPIO_BIT(y)))
3884 if (gpio >= ah->caps.num_gpio_pins)
3885 return 0xffffffff;
3887 if (AR_SREV_9287_10_OR_LATER(ah))
3888 return MS_REG_READ(AR9287, gpio) != 0;
3889 else if (AR_SREV_9285_10_OR_LATER(ah))
3890 return MS_REG_READ(AR9285, gpio) != 0;
3891 else if (AR_SREV_9280_10_OR_LATER(ah))
3892 return MS_REG_READ(AR928X, gpio) != 0;
3893 else
3894 return MS_REG_READ(AR, gpio) != 0;
3896 EXPORT_SYMBOL(ath9k_hw_gpio_get);
3898 void ath9k_hw_cfg_output(struct ath_hw *ah, u32 gpio,
3899 u32 ah_signal_type)
3901 u32 gpio_shift;
3903 ath9k_hw_gpio_cfg_output_mux(ah, gpio, ah_signal_type);
3905 gpio_shift = 2 * gpio;
3907 REG_RMW(ah,
3908 AR_GPIO_OE_OUT,
3909 (AR_GPIO_OE_OUT_DRV_ALL << gpio_shift),
3910 (AR_GPIO_OE_OUT_DRV << gpio_shift));
3912 EXPORT_SYMBOL(ath9k_hw_cfg_output);
3914 void ath9k_hw_set_gpio(struct ath_hw *ah, u32 gpio, u32 val)
3916 REG_RMW(ah, AR_GPIO_IN_OUT, ((val & 1) << gpio),
3917 AR_GPIO_BIT(gpio));
3919 EXPORT_SYMBOL(ath9k_hw_set_gpio);
3921 u32 ath9k_hw_getdefantenna(struct ath_hw *ah)
3923 return REG_READ(ah, AR_DEF_ANTENNA) & 0x7;
3925 EXPORT_SYMBOL(ath9k_hw_getdefantenna);
3927 void ath9k_hw_setantenna(struct ath_hw *ah, u32 antenna)
3929 REG_WRITE(ah, AR_DEF_ANTENNA, (antenna & 0x7));
3931 EXPORT_SYMBOL(ath9k_hw_setantenna);
3933 bool ath9k_hw_setantennaswitch(struct ath_hw *ah,
3934 enum ath9k_ant_setting settings,
3935 struct ath9k_channel *chan,
3936 u8 *tx_chainmask,
3937 u8 *rx_chainmask,
3938 u8 *antenna_cfgd)
3940 static u8 tx_chainmask_cfg, rx_chainmask_cfg;
3942 if (AR_SREV_9280(ah)) {
3943 if (!tx_chainmask_cfg) {
3945 tx_chainmask_cfg = *tx_chainmask;
3946 rx_chainmask_cfg = *rx_chainmask;
3949 switch (settings) {
3950 case ATH9K_ANT_FIXED_A:
3951 *tx_chainmask = ATH9K_ANTENNA0_CHAINMASK;
3952 *rx_chainmask = ATH9K_ANTENNA0_CHAINMASK;
3953 *antenna_cfgd = true;
3954 break;
3955 case ATH9K_ANT_FIXED_B:
3956 if (ah->caps.tx_chainmask >
3957 ATH9K_ANTENNA1_CHAINMASK) {
3958 *tx_chainmask = ATH9K_ANTENNA1_CHAINMASK;
3960 *rx_chainmask = ATH9K_ANTENNA1_CHAINMASK;
3961 *antenna_cfgd = true;
3962 break;
3963 case ATH9K_ANT_VARIABLE:
3964 *tx_chainmask = tx_chainmask_cfg;
3965 *rx_chainmask = rx_chainmask_cfg;
3966 *antenna_cfgd = true;
3967 break;
3968 default:
3969 break;
3971 } else {
3972 ah->config.diversity_control = settings;
3975 return true;
3978 /*********************/
3979 /* General Operation */
3980 /*********************/
3982 u32 ath9k_hw_getrxfilter(struct ath_hw *ah)
3984 u32 bits = REG_READ(ah, AR_RX_FILTER);
3985 u32 phybits = REG_READ(ah, AR_PHY_ERR);
3987 if (phybits & AR_PHY_ERR_RADAR)
3988 bits |= ATH9K_RX_FILTER_PHYRADAR;
3989 if (phybits & (AR_PHY_ERR_OFDM_TIMING | AR_PHY_ERR_CCK_TIMING))
3990 bits |= ATH9K_RX_FILTER_PHYERR;
3992 return bits;
3994 EXPORT_SYMBOL(ath9k_hw_getrxfilter);
3996 void ath9k_hw_setrxfilter(struct ath_hw *ah, u32 bits)
3998 u32 phybits;
4000 REG_WRITE(ah, AR_RX_FILTER, bits);
4002 phybits = 0;
4003 if (bits & ATH9K_RX_FILTER_PHYRADAR)
4004 phybits |= AR_PHY_ERR_RADAR;
4005 if (bits & ATH9K_RX_FILTER_PHYERR)
4006 phybits |= AR_PHY_ERR_OFDM_TIMING | AR_PHY_ERR_CCK_TIMING;
4007 REG_WRITE(ah, AR_PHY_ERR, phybits);
4009 if (phybits)
4010 REG_WRITE(ah, AR_RXCFG,
4011 REG_READ(ah, AR_RXCFG) | AR_RXCFG_ZLFDMA);
4012 else
4013 REG_WRITE(ah, AR_RXCFG,
4014 REG_READ(ah, AR_RXCFG) & ~AR_RXCFG_ZLFDMA);
4016 EXPORT_SYMBOL(ath9k_hw_setrxfilter);
4018 bool ath9k_hw_phy_disable(struct ath_hw *ah)
4020 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_WARM))
4021 return false;
4023 ath9k_hw_init_pll(ah, NULL);
4024 return true;
4026 EXPORT_SYMBOL(ath9k_hw_phy_disable);
4028 bool ath9k_hw_disable(struct ath_hw *ah)
4030 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
4031 return false;
4033 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_COLD))
4034 return false;
4036 ath9k_hw_init_pll(ah, NULL);
4037 return true;
4039 EXPORT_SYMBOL(ath9k_hw_disable);
4041 void ath9k_hw_set_txpowerlimit(struct ath_hw *ah, u32 limit)
4043 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
4044 struct ath9k_channel *chan = ah->curchan;
4045 struct ieee80211_channel *channel = chan->chan;
4047 regulatory->power_limit = min(limit, (u32) MAX_RATE_POWER);
4049 ah->eep_ops->set_txpower(ah, chan,
4050 ath9k_regd_get_ctl(regulatory, chan),
4051 channel->max_antenna_gain * 2,
4052 channel->max_power * 2,
4053 min((u32) MAX_RATE_POWER,
4054 (u32) regulatory->power_limit));
4056 EXPORT_SYMBOL(ath9k_hw_set_txpowerlimit);
4058 void ath9k_hw_setmac(struct ath_hw *ah, const u8 *mac)
4060 memcpy(ath9k_hw_common(ah)->macaddr, mac, ETH_ALEN);
4062 EXPORT_SYMBOL(ath9k_hw_setmac);
4064 void ath9k_hw_setopmode(struct ath_hw *ah)
4066 ath9k_hw_set_operating_mode(ah, ah->opmode);
4068 EXPORT_SYMBOL(ath9k_hw_setopmode);
4070 void ath9k_hw_setmcastfilter(struct ath_hw *ah, u32 filter0, u32 filter1)
4072 REG_WRITE(ah, AR_MCAST_FIL0, filter0);
4073 REG_WRITE(ah, AR_MCAST_FIL1, filter1);
4075 EXPORT_SYMBOL(ath9k_hw_setmcastfilter);
4077 void ath9k_hw_write_associd(struct ath_hw *ah)
4079 struct ath_common *common = ath9k_hw_common(ah);
4081 REG_WRITE(ah, AR_BSS_ID0, get_unaligned_le32(common->curbssid));
4082 REG_WRITE(ah, AR_BSS_ID1, get_unaligned_le16(common->curbssid + 4) |
4083 ((common->curaid & 0x3fff) << AR_BSS_ID1_AID_S));
4085 EXPORT_SYMBOL(ath9k_hw_write_associd);
4087 u64 ath9k_hw_gettsf64(struct ath_hw *ah)
4089 u64 tsf;
4091 tsf = REG_READ(ah, AR_TSF_U32);
4092 tsf = (tsf << 32) | REG_READ(ah, AR_TSF_L32);
4094 return tsf;
4096 EXPORT_SYMBOL(ath9k_hw_gettsf64);
4098 void ath9k_hw_settsf64(struct ath_hw *ah, u64 tsf64)
4100 REG_WRITE(ah, AR_TSF_L32, tsf64 & 0xffffffff);
4101 REG_WRITE(ah, AR_TSF_U32, (tsf64 >> 32) & 0xffffffff);
4103 EXPORT_SYMBOL(ath9k_hw_settsf64);
4105 void ath9k_hw_reset_tsf(struct ath_hw *ah)
4107 if (!ath9k_hw_wait(ah, AR_SLP32_MODE, AR_SLP32_TSF_WRITE_STATUS, 0,
4108 AH_TSF_WRITE_TIMEOUT))
4109 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET,
4110 "AR_SLP32_TSF_WRITE_STATUS limit exceeded\n");
4112 REG_WRITE(ah, AR_RESET_TSF, AR_RESET_TSF_ONCE);
4114 EXPORT_SYMBOL(ath9k_hw_reset_tsf);
4116 void ath9k_hw_set_tsfadjust(struct ath_hw *ah, u32 setting)
4118 if (setting)
4119 ah->misc_mode |= AR_PCU_TX_ADD_TSF;
4120 else
4121 ah->misc_mode &= ~AR_PCU_TX_ADD_TSF;
4123 EXPORT_SYMBOL(ath9k_hw_set_tsfadjust);
4125 bool ath9k_hw_setslottime(struct ath_hw *ah, u32 us)
4127 if (us < ATH9K_SLOT_TIME_9 || us > ath9k_hw_mac_to_usec(ah, 0xffff)) {
4128 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET,
4129 "bad slot time %u\n", us);
4130 ah->slottime = (u32) -1;
4131 return false;
4132 } else {
4133 REG_WRITE(ah, AR_D_GBL_IFS_SLOT, ath9k_hw_mac_to_clks(ah, us));
4134 ah->slottime = us;
4135 return true;
4138 EXPORT_SYMBOL(ath9k_hw_setslottime);
4140 void ath9k_hw_set11nmac2040(struct ath_hw *ah)
4142 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
4143 u32 macmode;
4145 if (conf_is_ht40(conf) && !ah->config.cwm_ignore_extcca)
4146 macmode = AR_2040_JOINED_RX_CLEAR;
4147 else
4148 macmode = 0;
4150 REG_WRITE(ah, AR_2040_MODE, macmode);
4153 /* HW Generic timers configuration */
4155 static const struct ath_gen_timer_configuration gen_tmr_configuration[] =
4157 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4158 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4159 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4160 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4161 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4162 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4163 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4164 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4165 {AR_NEXT_NDP2_TIMER, AR_NDP2_PERIOD, AR_NDP2_TIMER_MODE, 0x0001},
4166 {AR_NEXT_NDP2_TIMER + 1*4, AR_NDP2_PERIOD + 1*4,
4167 AR_NDP2_TIMER_MODE, 0x0002},
4168 {AR_NEXT_NDP2_TIMER + 2*4, AR_NDP2_PERIOD + 2*4,
4169 AR_NDP2_TIMER_MODE, 0x0004},
4170 {AR_NEXT_NDP2_TIMER + 3*4, AR_NDP2_PERIOD + 3*4,
4171 AR_NDP2_TIMER_MODE, 0x0008},
4172 {AR_NEXT_NDP2_TIMER + 4*4, AR_NDP2_PERIOD + 4*4,
4173 AR_NDP2_TIMER_MODE, 0x0010},
4174 {AR_NEXT_NDP2_TIMER + 5*4, AR_NDP2_PERIOD + 5*4,
4175 AR_NDP2_TIMER_MODE, 0x0020},
4176 {AR_NEXT_NDP2_TIMER + 6*4, AR_NDP2_PERIOD + 6*4,
4177 AR_NDP2_TIMER_MODE, 0x0040},
4178 {AR_NEXT_NDP2_TIMER + 7*4, AR_NDP2_PERIOD + 7*4,
4179 AR_NDP2_TIMER_MODE, 0x0080}
4182 /* HW generic timer primitives */
4184 /* compute and clear index of rightmost 1 */
4185 static u32 rightmost_index(struct ath_gen_timer_table *timer_table, u32 *mask)
4187 u32 b;
4189 b = *mask;
4190 b &= (0-b);
4191 *mask &= ~b;
4192 b *= debruijn32;
4193 b >>= 27;
4195 return timer_table->gen_timer_index[b];
4198 u32 ath9k_hw_gettsf32(struct ath_hw *ah)
4200 return REG_READ(ah, AR_TSF_L32);
4202 EXPORT_SYMBOL(ath9k_hw_gettsf32);
4204 struct ath_gen_timer *ath_gen_timer_alloc(struct ath_hw *ah,
4205 void (*trigger)(void *),
4206 void (*overflow)(void *),
4207 void *arg,
4208 u8 timer_index)
4210 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
4211 struct ath_gen_timer *timer;
4213 timer = kzalloc(sizeof(struct ath_gen_timer), GFP_KERNEL);
4215 if (timer == NULL) {
4216 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL,
4217 "Failed to allocate memory"
4218 "for hw timer[%d]\n", timer_index);
4219 return NULL;
4222 /* allocate a hardware generic timer slot */
4223 timer_table->timers[timer_index] = timer;
4224 timer->index = timer_index;
4225 timer->trigger = trigger;
4226 timer->overflow = overflow;
4227 timer->arg = arg;
4229 return timer;
4231 EXPORT_SYMBOL(ath_gen_timer_alloc);
4233 void ath9k_hw_gen_timer_start(struct ath_hw *ah,
4234 struct ath_gen_timer *timer,
4235 u32 timer_next,
4236 u32 timer_period)
4238 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
4239 u32 tsf;
4241 BUG_ON(!timer_period);
4243 set_bit(timer->index, &timer_table->timer_mask.timer_bits);
4245 tsf = ath9k_hw_gettsf32(ah);
4247 ath_print(ath9k_hw_common(ah), ATH_DBG_HWTIMER,
4248 "curent tsf %x period %x"
4249 "timer_next %x\n", tsf, timer_period, timer_next);
4252 * Pull timer_next forward if the current TSF already passed it
4253 * because of software latency
4255 if (timer_next < tsf)
4256 timer_next = tsf + timer_period;
4259 * Program generic timer registers
4261 REG_WRITE(ah, gen_tmr_configuration[timer->index].next_addr,
4262 timer_next);
4263 REG_WRITE(ah, gen_tmr_configuration[timer->index].period_addr,
4264 timer_period);
4265 REG_SET_BIT(ah, gen_tmr_configuration[timer->index].mode_addr,
4266 gen_tmr_configuration[timer->index].mode_mask);
4268 /* Enable both trigger and thresh interrupt masks */
4269 REG_SET_BIT(ah, AR_IMR_S5,
4270 (SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_THRESH) |
4271 SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_TRIG)));
4273 EXPORT_SYMBOL(ath9k_hw_gen_timer_start);
4275 void ath9k_hw_gen_timer_stop(struct ath_hw *ah, struct ath_gen_timer *timer)
4277 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
4279 if ((timer->index < AR_FIRST_NDP_TIMER) ||
4280 (timer->index >= ATH_MAX_GEN_TIMER)) {
4281 return;
4284 /* Clear generic timer enable bits. */
4285 REG_CLR_BIT(ah, gen_tmr_configuration[timer->index].mode_addr,
4286 gen_tmr_configuration[timer->index].mode_mask);
4288 /* Disable both trigger and thresh interrupt masks */
4289 REG_CLR_BIT(ah, AR_IMR_S5,
4290 (SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_THRESH) |
4291 SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_TRIG)));
4293 clear_bit(timer->index, &timer_table->timer_mask.timer_bits);
4295 EXPORT_SYMBOL(ath9k_hw_gen_timer_stop);
4297 void ath_gen_timer_free(struct ath_hw *ah, struct ath_gen_timer *timer)
4299 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
4301 /* free the hardware generic timer slot */
4302 timer_table->timers[timer->index] = NULL;
4303 kfree(timer);
4305 EXPORT_SYMBOL(ath_gen_timer_free);
4308 * Generic Timer Interrupts handling
4310 void ath_gen_timer_isr(struct ath_hw *ah)
4312 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
4313 struct ath_gen_timer *timer;
4314 struct ath_common *common = ath9k_hw_common(ah);
4315 u32 trigger_mask, thresh_mask, index;
4317 /* get hardware generic timer interrupt status */
4318 trigger_mask = ah->intr_gen_timer_trigger;
4319 thresh_mask = ah->intr_gen_timer_thresh;
4320 trigger_mask &= timer_table->timer_mask.val;
4321 thresh_mask &= timer_table->timer_mask.val;
4323 trigger_mask &= ~thresh_mask;
4325 while (thresh_mask) {
4326 index = rightmost_index(timer_table, &thresh_mask);
4327 timer = timer_table->timers[index];
4328 BUG_ON(!timer);
4329 ath_print(common, ATH_DBG_HWTIMER,
4330 "TSF overflow for Gen timer %d\n", index);
4331 timer->overflow(timer->arg);
4334 while (trigger_mask) {
4335 index = rightmost_index(timer_table, &trigger_mask);
4336 timer = timer_table->timers[index];
4337 BUG_ON(!timer);
4338 ath_print(common, ATH_DBG_HWTIMER,
4339 "Gen timer[%d] trigger\n", index);
4340 timer->trigger(timer->arg);
4343 EXPORT_SYMBOL(ath_gen_timer_isr);