ath9k: Determine btcoex scheme type based on chip version
[linux-2.6/linux-acpi-2.6/ibm-acpi-2.6.git] / drivers / net / wireless / ath / ath9k / hw.c
blob7b4bc8b74bb85d09f5ccd5dbfde1ea004c4d3b35
1 /*
2 * Copyright (c) 2008-2009 Atheros Communications Inc.
4 * Permission to use, copy, modify, and/or distribute this software for any
5 * purpose with or without fee is hereby granted, provided that the above
6 * copyright notice and this permission notice appear in all copies.
8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
17 #include <linux/io.h>
18 #include <asm/unaligned.h>
20 #include "ath9k.h"
21 #include "initvals.h"
23 static int btcoex_enable;
24 module_param(btcoex_enable, bool, 0);
25 MODULE_PARM_DESC(btcoex_enable, "Enable Bluetooth coexistence support");
27 #define ATH9K_CLOCK_RATE_CCK 22
28 #define ATH9K_CLOCK_RATE_5GHZ_OFDM 40
29 #define ATH9K_CLOCK_RATE_2GHZ_OFDM 44
31 static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type);
32 static void ath9k_hw_set_regs(struct ath_hw *ah, struct ath9k_channel *chan,
33 enum ath9k_ht_macmode macmode);
34 static u32 ath9k_hw_ini_fixup(struct ath_hw *ah,
35 struct ar5416_eeprom_def *pEepData,
36 u32 reg, u32 value);
37 static void ath9k_hw_9280_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan);
38 static void ath9k_hw_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan);
40 /********************/
41 /* Helper Functions */
42 /********************/
44 static u32 ath9k_hw_mac_usec(struct ath_hw *ah, u32 clks)
46 struct ieee80211_conf *conf = &ah->ah_sc->hw->conf;
48 if (!ah->curchan) /* should really check for CCK instead */
49 return clks / ATH9K_CLOCK_RATE_CCK;
50 if (conf->channel->band == IEEE80211_BAND_2GHZ)
51 return clks / ATH9K_CLOCK_RATE_2GHZ_OFDM;
53 return clks / ATH9K_CLOCK_RATE_5GHZ_OFDM;
56 static u32 ath9k_hw_mac_to_usec(struct ath_hw *ah, u32 clks)
58 struct ieee80211_conf *conf = &ah->ah_sc->hw->conf;
60 if (conf_is_ht40(conf))
61 return ath9k_hw_mac_usec(ah, clks) / 2;
62 else
63 return ath9k_hw_mac_usec(ah, clks);
66 static u32 ath9k_hw_mac_clks(struct ath_hw *ah, u32 usecs)
68 struct ieee80211_conf *conf = &ah->ah_sc->hw->conf;
70 if (!ah->curchan) /* should really check for CCK instead */
71 return usecs *ATH9K_CLOCK_RATE_CCK;
72 if (conf->channel->band == IEEE80211_BAND_2GHZ)
73 return usecs *ATH9K_CLOCK_RATE_2GHZ_OFDM;
74 return usecs *ATH9K_CLOCK_RATE_5GHZ_OFDM;
77 static u32 ath9k_hw_mac_to_clks(struct ath_hw *ah, u32 usecs)
79 struct ieee80211_conf *conf = &ah->ah_sc->hw->conf;
81 if (conf_is_ht40(conf))
82 return ath9k_hw_mac_clks(ah, usecs) * 2;
83 else
84 return ath9k_hw_mac_clks(ah, usecs);
88 * Read and write, they both share the same lock. We do this to serialize
89 * reads and writes on Atheros 802.11n PCI devices only. This is required
90 * as the FIFO on these devices can only accept sanely 2 requests. After
91 * that the device goes bananas. Serializing the reads/writes prevents this
92 * from happening.
95 void ath9k_iowrite32(struct ath_hw *ah, u32 reg_offset, u32 val)
97 if (ah->config.serialize_regmode == SER_REG_MODE_ON) {
98 unsigned long flags;
99 spin_lock_irqsave(&ah->ah_sc->sc_serial_rw, flags);
100 iowrite32(val, ah->ah_sc->mem + reg_offset);
101 spin_unlock_irqrestore(&ah->ah_sc->sc_serial_rw, flags);
102 } else
103 iowrite32(val, ah->ah_sc->mem + reg_offset);
106 unsigned int ath9k_ioread32(struct ath_hw *ah, u32 reg_offset)
108 u32 val;
109 if (ah->config.serialize_regmode == SER_REG_MODE_ON) {
110 unsigned long flags;
111 spin_lock_irqsave(&ah->ah_sc->sc_serial_rw, flags);
112 val = ioread32(ah->ah_sc->mem + reg_offset);
113 spin_unlock_irqrestore(&ah->ah_sc->sc_serial_rw, flags);
114 } else
115 val = ioread32(ah->ah_sc->mem + reg_offset);
116 return val;
119 bool ath9k_hw_wait(struct ath_hw *ah, u32 reg, u32 mask, u32 val, u32 timeout)
121 int i;
123 BUG_ON(timeout < AH_TIME_QUANTUM);
125 for (i = 0; i < (timeout / AH_TIME_QUANTUM); i++) {
126 if ((REG_READ(ah, reg) & mask) == val)
127 return true;
129 udelay(AH_TIME_QUANTUM);
132 DPRINTF(ah->ah_sc, ATH_DBG_ANY,
133 "timeout (%d us) on reg 0x%x: 0x%08x & 0x%08x != 0x%08x\n",
134 timeout, reg, REG_READ(ah, reg), mask, val);
136 return false;
139 u32 ath9k_hw_reverse_bits(u32 val, u32 n)
141 u32 retval;
142 int i;
144 for (i = 0, retval = 0; i < n; i++) {
145 retval = (retval << 1) | (val & 1);
146 val >>= 1;
148 return retval;
151 bool ath9k_get_channel_edges(struct ath_hw *ah,
152 u16 flags, u16 *low,
153 u16 *high)
155 struct ath9k_hw_capabilities *pCap = &ah->caps;
157 if (flags & CHANNEL_5GHZ) {
158 *low = pCap->low_5ghz_chan;
159 *high = pCap->high_5ghz_chan;
160 return true;
162 if ((flags & CHANNEL_2GHZ)) {
163 *low = pCap->low_2ghz_chan;
164 *high = pCap->high_2ghz_chan;
165 return true;
167 return false;
170 u16 ath9k_hw_computetxtime(struct ath_hw *ah,
171 const struct ath_rate_table *rates,
172 u32 frameLen, u16 rateix,
173 bool shortPreamble)
175 u32 bitsPerSymbol, numBits, numSymbols, phyTime, txTime;
176 u32 kbps;
178 kbps = rates->info[rateix].ratekbps;
180 if (kbps == 0)
181 return 0;
183 switch (rates->info[rateix].phy) {
184 case WLAN_RC_PHY_CCK:
185 phyTime = CCK_PREAMBLE_BITS + CCK_PLCP_BITS;
186 if (shortPreamble && rates->info[rateix].short_preamble)
187 phyTime >>= 1;
188 numBits = frameLen << 3;
189 txTime = CCK_SIFS_TIME + phyTime + ((numBits * 1000) / kbps);
190 break;
191 case WLAN_RC_PHY_OFDM:
192 if (ah->curchan && IS_CHAN_QUARTER_RATE(ah->curchan)) {
193 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME_QUARTER) / 1000;
194 numBits = OFDM_PLCP_BITS + (frameLen << 3);
195 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
196 txTime = OFDM_SIFS_TIME_QUARTER
197 + OFDM_PREAMBLE_TIME_QUARTER
198 + (numSymbols * OFDM_SYMBOL_TIME_QUARTER);
199 } else if (ah->curchan &&
200 IS_CHAN_HALF_RATE(ah->curchan)) {
201 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME_HALF) / 1000;
202 numBits = OFDM_PLCP_BITS + (frameLen << 3);
203 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
204 txTime = OFDM_SIFS_TIME_HALF +
205 OFDM_PREAMBLE_TIME_HALF
206 + (numSymbols * OFDM_SYMBOL_TIME_HALF);
207 } else {
208 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME) / 1000;
209 numBits = OFDM_PLCP_BITS + (frameLen << 3);
210 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
211 txTime = OFDM_SIFS_TIME + OFDM_PREAMBLE_TIME
212 + (numSymbols * OFDM_SYMBOL_TIME);
214 break;
215 default:
216 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
217 "Unknown phy %u (rate ix %u)\n",
218 rates->info[rateix].phy, rateix);
219 txTime = 0;
220 break;
223 return txTime;
226 void ath9k_hw_get_channel_centers(struct ath_hw *ah,
227 struct ath9k_channel *chan,
228 struct chan_centers *centers)
230 int8_t extoff;
232 if (!IS_CHAN_HT40(chan)) {
233 centers->ctl_center = centers->ext_center =
234 centers->synth_center = chan->channel;
235 return;
238 if ((chan->chanmode == CHANNEL_A_HT40PLUS) ||
239 (chan->chanmode == CHANNEL_G_HT40PLUS)) {
240 centers->synth_center =
241 chan->channel + HT40_CHANNEL_CENTER_SHIFT;
242 extoff = 1;
243 } else {
244 centers->synth_center =
245 chan->channel - HT40_CHANNEL_CENTER_SHIFT;
246 extoff = -1;
249 centers->ctl_center =
250 centers->synth_center - (extoff * HT40_CHANNEL_CENTER_SHIFT);
251 centers->ext_center =
252 centers->synth_center + (extoff *
253 ((ah->extprotspacing == ATH9K_HT_EXTPROTSPACING_20) ?
254 HT40_CHANNEL_CENTER_SHIFT : 15));
257 /******************/
258 /* Chip Revisions */
259 /******************/
261 static void ath9k_hw_read_revisions(struct ath_hw *ah)
263 u32 val;
265 val = REG_READ(ah, AR_SREV) & AR_SREV_ID;
267 if (val == 0xFF) {
268 val = REG_READ(ah, AR_SREV);
269 ah->hw_version.macVersion =
270 (val & AR_SREV_VERSION2) >> AR_SREV_TYPE2_S;
271 ah->hw_version.macRev = MS(val, AR_SREV_REVISION2);
272 ah->is_pciexpress = (val & AR_SREV_TYPE2_HOST_MODE) ? 0 : 1;
273 } else {
274 if (!AR_SREV_9100(ah))
275 ah->hw_version.macVersion = MS(val, AR_SREV_VERSION);
277 ah->hw_version.macRev = val & AR_SREV_REVISION;
279 if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCIE)
280 ah->is_pciexpress = true;
284 static int ath9k_hw_get_radiorev(struct ath_hw *ah)
286 u32 val;
287 int i;
289 REG_WRITE(ah, AR_PHY(0x36), 0x00007058);
291 for (i = 0; i < 8; i++)
292 REG_WRITE(ah, AR_PHY(0x20), 0x00010000);
293 val = (REG_READ(ah, AR_PHY(256)) >> 24) & 0xff;
294 val = ((val & 0xf0) >> 4) | ((val & 0x0f) << 4);
296 return ath9k_hw_reverse_bits(val, 8);
299 /************************************/
300 /* HW Attach, Detach, Init Routines */
301 /************************************/
303 static void ath9k_hw_disablepcie(struct ath_hw *ah)
305 if (AR_SREV_9100(ah))
306 return;
308 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00);
309 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
310 REG_WRITE(ah, AR_PCIE_SERDES, 0x28000029);
311 REG_WRITE(ah, AR_PCIE_SERDES, 0x57160824);
312 REG_WRITE(ah, AR_PCIE_SERDES, 0x25980579);
313 REG_WRITE(ah, AR_PCIE_SERDES, 0x00000000);
314 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
315 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
316 REG_WRITE(ah, AR_PCIE_SERDES, 0x000e1007);
318 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
321 static bool ath9k_hw_chip_test(struct ath_hw *ah)
323 u32 regAddr[2] = { AR_STA_ID0, AR_PHY_BASE + (8 << 2) };
324 u32 regHold[2];
325 u32 patternData[4] = { 0x55555555,
326 0xaaaaaaaa,
327 0x66666666,
328 0x99999999 };
329 int i, j;
331 for (i = 0; i < 2; i++) {
332 u32 addr = regAddr[i];
333 u32 wrData, rdData;
335 regHold[i] = REG_READ(ah, addr);
336 for (j = 0; j < 0x100; j++) {
337 wrData = (j << 16) | j;
338 REG_WRITE(ah, addr, wrData);
339 rdData = REG_READ(ah, addr);
340 if (rdData != wrData) {
341 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
342 "address test failed "
343 "addr: 0x%08x - wr:0x%08x != rd:0x%08x\n",
344 addr, wrData, rdData);
345 return false;
348 for (j = 0; j < 4; j++) {
349 wrData = patternData[j];
350 REG_WRITE(ah, addr, wrData);
351 rdData = REG_READ(ah, addr);
352 if (wrData != rdData) {
353 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
354 "address test failed "
355 "addr: 0x%08x - wr:0x%08x != rd:0x%08x\n",
356 addr, wrData, rdData);
357 return false;
360 REG_WRITE(ah, regAddr[i], regHold[i]);
362 udelay(100);
364 return true;
367 static const char *ath9k_hw_devname(u16 devid)
369 switch (devid) {
370 case AR5416_DEVID_PCI:
371 return "Atheros 5416";
372 case AR5416_DEVID_PCIE:
373 return "Atheros 5418";
374 case AR9160_DEVID_PCI:
375 return "Atheros 9160";
376 case AR5416_AR9100_DEVID:
377 return "Atheros 9100";
378 case AR9280_DEVID_PCI:
379 case AR9280_DEVID_PCIE:
380 return "Atheros 9280";
381 case AR9285_DEVID_PCIE:
382 return "Atheros 9285";
383 case AR5416_DEVID_AR9287_PCI:
384 case AR5416_DEVID_AR9287_PCIE:
385 return "Atheros 9287";
388 return NULL;
391 static void ath9k_hw_init_config(struct ath_hw *ah)
393 int i;
395 ah->config.dma_beacon_response_time = 2;
396 ah->config.sw_beacon_response_time = 10;
397 ah->config.additional_swba_backoff = 0;
398 ah->config.ack_6mb = 0x0;
399 ah->config.cwm_ignore_extcca = 0;
400 ah->config.pcie_powersave_enable = 0;
401 ah->config.pcie_clock_req = 0;
402 ah->config.pcie_waen = 0;
403 ah->config.analog_shiftreg = 1;
404 ah->config.ht_enable = 1;
405 ah->config.ofdm_trig_low = 200;
406 ah->config.ofdm_trig_high = 500;
407 ah->config.cck_trig_high = 200;
408 ah->config.cck_trig_low = 100;
409 ah->config.enable_ani = 1;
410 ah->config.diversity_control = ATH9K_ANT_VARIABLE;
411 ah->config.antenna_switch_swap = 0;
413 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
414 ah->config.spurchans[i][0] = AR_NO_SPUR;
415 ah->config.spurchans[i][1] = AR_NO_SPUR;
418 ah->config.intr_mitigation = true;
421 * We need this for PCI devices only (Cardbus, PCI, miniPCI)
422 * _and_ if on non-uniprocessor systems (Multiprocessor/HT).
423 * This means we use it for all AR5416 devices, and the few
424 * minor PCI AR9280 devices out there.
426 * Serialization is required because these devices do not handle
427 * well the case of two concurrent reads/writes due to the latency
428 * involved. During one read/write another read/write can be issued
429 * on another CPU while the previous read/write may still be working
430 * on our hardware, if we hit this case the hardware poops in a loop.
431 * We prevent this by serializing reads and writes.
433 * This issue is not present on PCI-Express devices or pre-AR5416
434 * devices (legacy, 802.11abg).
436 if (num_possible_cpus() > 1)
437 ah->config.serialize_regmode = SER_REG_MODE_AUTO;
440 static void ath9k_hw_init_defaults(struct ath_hw *ah)
442 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
444 regulatory->country_code = CTRY_DEFAULT;
445 regulatory->power_limit = MAX_RATE_POWER;
446 regulatory->tp_scale = ATH9K_TP_SCALE_MAX;
448 ah->hw_version.magic = AR5416_MAGIC;
449 ah->hw_version.subvendorid = 0;
451 ah->ah_flags = 0;
452 if (ah->hw_version.devid == AR5416_AR9100_DEVID)
453 ah->hw_version.macVersion = AR_SREV_VERSION_9100;
454 if (!AR_SREV_9100(ah))
455 ah->ah_flags = AH_USE_EEPROM;
457 ah->atim_window = 0;
458 ah->sta_id1_defaults = AR_STA_ID1_CRPT_MIC_ENABLE;
459 ah->beacon_interval = 100;
460 ah->enable_32kHz_clock = DONT_USE_32KHZ;
461 ah->slottime = (u32) -1;
462 ah->acktimeout = (u32) -1;
463 ah->ctstimeout = (u32) -1;
464 ah->globaltxtimeout = (u32) -1;
466 ah->gbeacon_rate = 0;
468 ah->power_mode = ATH9K_PM_UNDEFINED;
471 static int ath9k_hw_rfattach(struct ath_hw *ah)
473 bool rfStatus = false;
474 int ecode = 0;
476 rfStatus = ath9k_hw_init_rf(ah, &ecode);
477 if (!rfStatus) {
478 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
479 "RF setup failed, status: %u\n", ecode);
480 return ecode;
483 return 0;
486 static int ath9k_hw_rf_claim(struct ath_hw *ah)
488 u32 val;
490 REG_WRITE(ah, AR_PHY(0), 0x00000007);
492 val = ath9k_hw_get_radiorev(ah);
493 switch (val & AR_RADIO_SREV_MAJOR) {
494 case 0:
495 val = AR_RAD5133_SREV_MAJOR;
496 break;
497 case AR_RAD5133_SREV_MAJOR:
498 case AR_RAD5122_SREV_MAJOR:
499 case AR_RAD2133_SREV_MAJOR:
500 case AR_RAD2122_SREV_MAJOR:
501 break;
502 default:
503 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
504 "Radio Chip Rev 0x%02X not supported\n",
505 val & AR_RADIO_SREV_MAJOR);
506 return -EOPNOTSUPP;
509 ah->hw_version.analog5GhzRev = val;
511 return 0;
514 static int ath9k_hw_init_macaddr(struct ath_hw *ah)
516 u32 sum;
517 int i;
518 u16 eeval;
520 sum = 0;
521 for (i = 0; i < 3; i++) {
522 eeval = ah->eep_ops->get_eeprom(ah, AR_EEPROM_MAC(i));
523 sum += eeval;
524 ah->macaddr[2 * i] = eeval >> 8;
525 ah->macaddr[2 * i + 1] = eeval & 0xff;
527 if (sum == 0 || sum == 0xffff * 3)
528 return -EADDRNOTAVAIL;
530 return 0;
533 static void ath9k_hw_init_rxgain_ini(struct ath_hw *ah)
535 u32 rxgain_type;
537 if (ah->eep_ops->get_eeprom(ah, EEP_MINOR_REV) >= AR5416_EEP_MINOR_VER_17) {
538 rxgain_type = ah->eep_ops->get_eeprom(ah, EEP_RXGAIN_TYPE);
540 if (rxgain_type == AR5416_EEP_RXGAIN_13DB_BACKOFF)
541 INIT_INI_ARRAY(&ah->iniModesRxGain,
542 ar9280Modes_backoff_13db_rxgain_9280_2,
543 ARRAY_SIZE(ar9280Modes_backoff_13db_rxgain_9280_2), 6);
544 else if (rxgain_type == AR5416_EEP_RXGAIN_23DB_BACKOFF)
545 INIT_INI_ARRAY(&ah->iniModesRxGain,
546 ar9280Modes_backoff_23db_rxgain_9280_2,
547 ARRAY_SIZE(ar9280Modes_backoff_23db_rxgain_9280_2), 6);
548 else
549 INIT_INI_ARRAY(&ah->iniModesRxGain,
550 ar9280Modes_original_rxgain_9280_2,
551 ARRAY_SIZE(ar9280Modes_original_rxgain_9280_2), 6);
552 } else {
553 INIT_INI_ARRAY(&ah->iniModesRxGain,
554 ar9280Modes_original_rxgain_9280_2,
555 ARRAY_SIZE(ar9280Modes_original_rxgain_9280_2), 6);
559 static void ath9k_hw_init_txgain_ini(struct ath_hw *ah)
561 u32 txgain_type;
563 if (ah->eep_ops->get_eeprom(ah, EEP_MINOR_REV) >= AR5416_EEP_MINOR_VER_19) {
564 txgain_type = ah->eep_ops->get_eeprom(ah, EEP_TXGAIN_TYPE);
566 if (txgain_type == AR5416_EEP_TXGAIN_HIGH_POWER)
567 INIT_INI_ARRAY(&ah->iniModesTxGain,
568 ar9280Modes_high_power_tx_gain_9280_2,
569 ARRAY_SIZE(ar9280Modes_high_power_tx_gain_9280_2), 6);
570 else
571 INIT_INI_ARRAY(&ah->iniModesTxGain,
572 ar9280Modes_original_tx_gain_9280_2,
573 ARRAY_SIZE(ar9280Modes_original_tx_gain_9280_2), 6);
574 } else {
575 INIT_INI_ARRAY(&ah->iniModesTxGain,
576 ar9280Modes_original_tx_gain_9280_2,
577 ARRAY_SIZE(ar9280Modes_original_tx_gain_9280_2), 6);
581 static int ath9k_hw_post_init(struct ath_hw *ah)
583 int ecode;
585 if (!ath9k_hw_chip_test(ah))
586 return -ENODEV;
588 ecode = ath9k_hw_rf_claim(ah);
589 if (ecode != 0)
590 return ecode;
592 ecode = ath9k_hw_eeprom_init(ah);
593 if (ecode != 0)
594 return ecode;
596 DPRINTF(ah->ah_sc, ATH_DBG_CONFIG, "Eeprom VER: %d, REV: %d\n",
597 ah->eep_ops->get_eeprom_ver(ah), ah->eep_ops->get_eeprom_rev(ah));
599 ecode = ath9k_hw_rfattach(ah);
600 if (ecode != 0)
601 return ecode;
603 if (!AR_SREV_9100(ah)) {
604 ath9k_hw_ani_setup(ah);
605 ath9k_hw_ani_init(ah);
608 return 0;
611 static bool ath9k_hw_devid_supported(u16 devid)
613 switch (devid) {
614 case AR5416_DEVID_PCI:
615 case AR5416_DEVID_PCIE:
616 case AR5416_AR9100_DEVID:
617 case AR9160_DEVID_PCI:
618 case AR9280_DEVID_PCI:
619 case AR9280_DEVID_PCIE:
620 case AR9285_DEVID_PCIE:
621 case AR5416_DEVID_AR9287_PCI:
622 case AR5416_DEVID_AR9287_PCIE:
623 return true;
624 default:
625 break;
627 return false;
630 static bool ath9k_hw_macversion_supported(u32 macversion)
632 switch (macversion) {
633 case AR_SREV_VERSION_5416_PCI:
634 case AR_SREV_VERSION_5416_PCIE:
635 case AR_SREV_VERSION_9160:
636 case AR_SREV_VERSION_9100:
637 case AR_SREV_VERSION_9280:
638 case AR_SREV_VERSION_9285:
639 case AR_SREV_VERSION_9287:
640 return true;
641 /* Not yet */
642 case AR_SREV_VERSION_9271:
643 default:
644 break;
646 return false;
649 static void ath9k_hw_init_cal_settings(struct ath_hw *ah)
651 if (AR_SREV_9160_10_OR_LATER(ah)) {
652 if (AR_SREV_9280_10_OR_LATER(ah)) {
653 ah->iq_caldata.calData = &iq_cal_single_sample;
654 ah->adcgain_caldata.calData =
655 &adc_gain_cal_single_sample;
656 ah->adcdc_caldata.calData =
657 &adc_dc_cal_single_sample;
658 ah->adcdc_calinitdata.calData =
659 &adc_init_dc_cal;
660 } else {
661 ah->iq_caldata.calData = &iq_cal_multi_sample;
662 ah->adcgain_caldata.calData =
663 &adc_gain_cal_multi_sample;
664 ah->adcdc_caldata.calData =
665 &adc_dc_cal_multi_sample;
666 ah->adcdc_calinitdata.calData =
667 &adc_init_dc_cal;
669 ah->supp_cals = ADC_GAIN_CAL | ADC_DC_CAL | IQ_MISMATCH_CAL;
673 static void ath9k_hw_init_mode_regs(struct ath_hw *ah)
675 if (AR_SREV_9271(ah)) {
676 INIT_INI_ARRAY(&ah->iniModes, ar9271Modes_9271_1_0,
677 ARRAY_SIZE(ar9271Modes_9271_1_0), 6);
678 INIT_INI_ARRAY(&ah->iniCommon, ar9271Common_9271_1_0,
679 ARRAY_SIZE(ar9271Common_9271_1_0), 2);
680 return;
683 if (AR_SREV_9287_11_OR_LATER(ah)) {
684 INIT_INI_ARRAY(&ah->iniModes, ar9287Modes_9287_1_1,
685 ARRAY_SIZE(ar9287Modes_9287_1_1), 6);
686 INIT_INI_ARRAY(&ah->iniCommon, ar9287Common_9287_1_1,
687 ARRAY_SIZE(ar9287Common_9287_1_1), 2);
688 if (ah->config.pcie_clock_req)
689 INIT_INI_ARRAY(&ah->iniPcieSerdes,
690 ar9287PciePhy_clkreq_off_L1_9287_1_1,
691 ARRAY_SIZE(ar9287PciePhy_clkreq_off_L1_9287_1_1), 2);
692 else
693 INIT_INI_ARRAY(&ah->iniPcieSerdes,
694 ar9287PciePhy_clkreq_always_on_L1_9287_1_1,
695 ARRAY_SIZE(ar9287PciePhy_clkreq_always_on_L1_9287_1_1),
697 } else if (AR_SREV_9287_10_OR_LATER(ah)) {
698 INIT_INI_ARRAY(&ah->iniModes, ar9287Modes_9287_1_0,
699 ARRAY_SIZE(ar9287Modes_9287_1_0), 6);
700 INIT_INI_ARRAY(&ah->iniCommon, ar9287Common_9287_1_0,
701 ARRAY_SIZE(ar9287Common_9287_1_0), 2);
703 if (ah->config.pcie_clock_req)
704 INIT_INI_ARRAY(&ah->iniPcieSerdes,
705 ar9287PciePhy_clkreq_off_L1_9287_1_0,
706 ARRAY_SIZE(ar9287PciePhy_clkreq_off_L1_9287_1_0), 2);
707 else
708 INIT_INI_ARRAY(&ah->iniPcieSerdes,
709 ar9287PciePhy_clkreq_always_on_L1_9287_1_0,
710 ARRAY_SIZE(ar9287PciePhy_clkreq_always_on_L1_9287_1_0),
712 } else if (AR_SREV_9285_12_OR_LATER(ah)) {
715 INIT_INI_ARRAY(&ah->iniModes, ar9285Modes_9285_1_2,
716 ARRAY_SIZE(ar9285Modes_9285_1_2), 6);
717 INIT_INI_ARRAY(&ah->iniCommon, ar9285Common_9285_1_2,
718 ARRAY_SIZE(ar9285Common_9285_1_2), 2);
720 if (ah->config.pcie_clock_req) {
721 INIT_INI_ARRAY(&ah->iniPcieSerdes,
722 ar9285PciePhy_clkreq_off_L1_9285_1_2,
723 ARRAY_SIZE(ar9285PciePhy_clkreq_off_L1_9285_1_2), 2);
724 } else {
725 INIT_INI_ARRAY(&ah->iniPcieSerdes,
726 ar9285PciePhy_clkreq_always_on_L1_9285_1_2,
727 ARRAY_SIZE(ar9285PciePhy_clkreq_always_on_L1_9285_1_2),
730 } else if (AR_SREV_9285_10_OR_LATER(ah)) {
731 INIT_INI_ARRAY(&ah->iniModes, ar9285Modes_9285,
732 ARRAY_SIZE(ar9285Modes_9285), 6);
733 INIT_INI_ARRAY(&ah->iniCommon, ar9285Common_9285,
734 ARRAY_SIZE(ar9285Common_9285), 2);
736 if (ah->config.pcie_clock_req) {
737 INIT_INI_ARRAY(&ah->iniPcieSerdes,
738 ar9285PciePhy_clkreq_off_L1_9285,
739 ARRAY_SIZE(ar9285PciePhy_clkreq_off_L1_9285), 2);
740 } else {
741 INIT_INI_ARRAY(&ah->iniPcieSerdes,
742 ar9285PciePhy_clkreq_always_on_L1_9285,
743 ARRAY_SIZE(ar9285PciePhy_clkreq_always_on_L1_9285), 2);
745 } else if (AR_SREV_9280_20_OR_LATER(ah)) {
746 INIT_INI_ARRAY(&ah->iniModes, ar9280Modes_9280_2,
747 ARRAY_SIZE(ar9280Modes_9280_2), 6);
748 INIT_INI_ARRAY(&ah->iniCommon, ar9280Common_9280_2,
749 ARRAY_SIZE(ar9280Common_9280_2), 2);
751 if (ah->config.pcie_clock_req) {
752 INIT_INI_ARRAY(&ah->iniPcieSerdes,
753 ar9280PciePhy_clkreq_off_L1_9280,
754 ARRAY_SIZE(ar9280PciePhy_clkreq_off_L1_9280),2);
755 } else {
756 INIT_INI_ARRAY(&ah->iniPcieSerdes,
757 ar9280PciePhy_clkreq_always_on_L1_9280,
758 ARRAY_SIZE(ar9280PciePhy_clkreq_always_on_L1_9280), 2);
760 INIT_INI_ARRAY(&ah->iniModesAdditional,
761 ar9280Modes_fast_clock_9280_2,
762 ARRAY_SIZE(ar9280Modes_fast_clock_9280_2), 3);
763 } else if (AR_SREV_9280_10_OR_LATER(ah)) {
764 INIT_INI_ARRAY(&ah->iniModes, ar9280Modes_9280,
765 ARRAY_SIZE(ar9280Modes_9280), 6);
766 INIT_INI_ARRAY(&ah->iniCommon, ar9280Common_9280,
767 ARRAY_SIZE(ar9280Common_9280), 2);
768 } else if (AR_SREV_9160_10_OR_LATER(ah)) {
769 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes_9160,
770 ARRAY_SIZE(ar5416Modes_9160), 6);
771 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common_9160,
772 ARRAY_SIZE(ar5416Common_9160), 2);
773 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0_9160,
774 ARRAY_SIZE(ar5416Bank0_9160), 2);
775 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain_9160,
776 ARRAY_SIZE(ar5416BB_RfGain_9160), 3);
777 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1_9160,
778 ARRAY_SIZE(ar5416Bank1_9160), 2);
779 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2_9160,
780 ARRAY_SIZE(ar5416Bank2_9160), 2);
781 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3_9160,
782 ARRAY_SIZE(ar5416Bank3_9160), 3);
783 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6_9160,
784 ARRAY_SIZE(ar5416Bank6_9160), 3);
785 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC_9160,
786 ARRAY_SIZE(ar5416Bank6TPC_9160), 3);
787 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7_9160,
788 ARRAY_SIZE(ar5416Bank7_9160), 2);
789 if (AR_SREV_9160_11(ah)) {
790 INIT_INI_ARRAY(&ah->iniAddac,
791 ar5416Addac_91601_1,
792 ARRAY_SIZE(ar5416Addac_91601_1), 2);
793 } else {
794 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac_9160,
795 ARRAY_SIZE(ar5416Addac_9160), 2);
797 } else if (AR_SREV_9100_OR_LATER(ah)) {
798 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes_9100,
799 ARRAY_SIZE(ar5416Modes_9100), 6);
800 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common_9100,
801 ARRAY_SIZE(ar5416Common_9100), 2);
802 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0_9100,
803 ARRAY_SIZE(ar5416Bank0_9100), 2);
804 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain_9100,
805 ARRAY_SIZE(ar5416BB_RfGain_9100), 3);
806 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1_9100,
807 ARRAY_SIZE(ar5416Bank1_9100), 2);
808 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2_9100,
809 ARRAY_SIZE(ar5416Bank2_9100), 2);
810 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3_9100,
811 ARRAY_SIZE(ar5416Bank3_9100), 3);
812 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6_9100,
813 ARRAY_SIZE(ar5416Bank6_9100), 3);
814 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC_9100,
815 ARRAY_SIZE(ar5416Bank6TPC_9100), 3);
816 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7_9100,
817 ARRAY_SIZE(ar5416Bank7_9100), 2);
818 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac_9100,
819 ARRAY_SIZE(ar5416Addac_9100), 2);
820 } else {
821 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes,
822 ARRAY_SIZE(ar5416Modes), 6);
823 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common,
824 ARRAY_SIZE(ar5416Common), 2);
825 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0,
826 ARRAY_SIZE(ar5416Bank0), 2);
827 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain,
828 ARRAY_SIZE(ar5416BB_RfGain), 3);
829 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1,
830 ARRAY_SIZE(ar5416Bank1), 2);
831 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2,
832 ARRAY_SIZE(ar5416Bank2), 2);
833 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3,
834 ARRAY_SIZE(ar5416Bank3), 3);
835 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6,
836 ARRAY_SIZE(ar5416Bank6), 3);
837 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC,
838 ARRAY_SIZE(ar5416Bank6TPC), 3);
839 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7,
840 ARRAY_SIZE(ar5416Bank7), 2);
841 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac,
842 ARRAY_SIZE(ar5416Addac), 2);
846 static void ath9k_hw_init_mode_gain_regs(struct ath_hw *ah)
848 if (AR_SREV_9287_11(ah))
849 INIT_INI_ARRAY(&ah->iniModesRxGain,
850 ar9287Modes_rx_gain_9287_1_1,
851 ARRAY_SIZE(ar9287Modes_rx_gain_9287_1_1), 6);
852 else if (AR_SREV_9287_10(ah))
853 INIT_INI_ARRAY(&ah->iniModesRxGain,
854 ar9287Modes_rx_gain_9287_1_0,
855 ARRAY_SIZE(ar9287Modes_rx_gain_9287_1_0), 6);
856 else if (AR_SREV_9280_20(ah))
857 ath9k_hw_init_rxgain_ini(ah);
859 if (AR_SREV_9287_11(ah)) {
860 INIT_INI_ARRAY(&ah->iniModesTxGain,
861 ar9287Modes_tx_gain_9287_1_1,
862 ARRAY_SIZE(ar9287Modes_tx_gain_9287_1_1), 6);
863 } else if (AR_SREV_9287_10(ah)) {
864 INIT_INI_ARRAY(&ah->iniModesTxGain,
865 ar9287Modes_tx_gain_9287_1_0,
866 ARRAY_SIZE(ar9287Modes_tx_gain_9287_1_0), 6);
867 } else if (AR_SREV_9280_20(ah)) {
868 ath9k_hw_init_txgain_ini(ah);
869 } else if (AR_SREV_9285_12_OR_LATER(ah)) {
870 u32 txgain_type = ah->eep_ops->get_eeprom(ah, EEP_TXGAIN_TYPE);
872 /* txgain table */
873 if (txgain_type == AR5416_EEP_TXGAIN_HIGH_POWER) {
874 INIT_INI_ARRAY(&ah->iniModesTxGain,
875 ar9285Modes_high_power_tx_gain_9285_1_2,
876 ARRAY_SIZE(ar9285Modes_high_power_tx_gain_9285_1_2), 6);
877 } else {
878 INIT_INI_ARRAY(&ah->iniModesTxGain,
879 ar9285Modes_original_tx_gain_9285_1_2,
880 ARRAY_SIZE(ar9285Modes_original_tx_gain_9285_1_2), 6);
886 static void ath9k_hw_init_11a_eeprom_fix(struct ath_hw *ah)
888 u32 i, j;
890 if ((ah->hw_version.devid == AR9280_DEVID_PCI) &&
891 test_bit(ATH9K_MODE_11A, ah->caps.wireless_modes)) {
893 /* EEPROM Fixup */
894 for (i = 0; i < ah->iniModes.ia_rows; i++) {
895 u32 reg = INI_RA(&ah->iniModes, i, 0);
897 for (j = 1; j < ah->iniModes.ia_columns; j++) {
898 u32 val = INI_RA(&ah->iniModes, i, j);
900 INI_RA(&ah->iniModes, i, j) =
901 ath9k_hw_ini_fixup(ah,
902 &ah->eeprom.def,
903 reg, val);
909 int ath9k_hw_init(struct ath_hw *ah)
911 int r = 0;
913 if (!ath9k_hw_devid_supported(ah->hw_version.devid))
914 return -EOPNOTSUPP;
916 ath9k_hw_init_defaults(ah);
917 ath9k_hw_init_config(ah);
919 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON)) {
920 DPRINTF(ah->ah_sc, ATH_DBG_FATAL, "Couldn't reset chip\n");
921 return -EIO;
924 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE)) {
925 DPRINTF(ah->ah_sc, ATH_DBG_FATAL, "Couldn't wakeup chip\n");
926 return -EIO;
929 if (ah->config.serialize_regmode == SER_REG_MODE_AUTO) {
930 if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCI ||
931 (AR_SREV_9280(ah) && !ah->is_pciexpress)) {
932 ah->config.serialize_regmode =
933 SER_REG_MODE_ON;
934 } else {
935 ah->config.serialize_regmode =
936 SER_REG_MODE_OFF;
940 DPRINTF(ah->ah_sc, ATH_DBG_RESET, "serialize_regmode is %d\n",
941 ah->config.serialize_regmode);
943 if (!ath9k_hw_macversion_supported(ah->hw_version.macVersion)) {
944 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
945 "Mac Chip Rev 0x%02x.%x is not supported by "
946 "this driver\n", ah->hw_version.macVersion,
947 ah->hw_version.macRev);
948 return -EOPNOTSUPP;
951 if (AR_SREV_9100(ah)) {
952 ah->iq_caldata.calData = &iq_cal_multi_sample;
953 ah->supp_cals = IQ_MISMATCH_CAL;
954 ah->is_pciexpress = false;
957 if (AR_SREV_9271(ah))
958 ah->is_pciexpress = false;
960 ah->hw_version.phyRev = REG_READ(ah, AR_PHY_CHIP_ID);
962 ath9k_hw_init_cal_settings(ah);
964 ah->ani_function = ATH9K_ANI_ALL;
965 if (AR_SREV_9280_10_OR_LATER(ah))
966 ah->ani_function &= ~ATH9K_ANI_NOISE_IMMUNITY_LEVEL;
968 ath9k_hw_init_mode_regs(ah);
970 if (ah->is_pciexpress)
971 ath9k_hw_configpcipowersave(ah, 0);
972 else
973 ath9k_hw_disablepcie(ah);
975 r = ath9k_hw_post_init(ah);
976 if (r)
977 return r;
979 ath9k_hw_init_mode_gain_regs(ah);
980 ath9k_hw_fill_cap_info(ah);
981 ath9k_hw_init_11a_eeprom_fix(ah);
983 r = ath9k_hw_init_macaddr(ah);
984 if (r) {
985 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
986 "Failed to initialize MAC address\n");
987 return r;
990 if (AR_SREV_9285(ah) || AR_SREV_9271(ah))
991 ah->tx_trig_level = (AR_FTRIG_256B >> AR_FTRIG_S);
992 else
993 ah->tx_trig_level = (AR_FTRIG_512B >> AR_FTRIG_S);
995 ath9k_init_nfcal_hist_buffer(ah);
997 return 0;
1000 static void ath9k_hw_init_bb(struct ath_hw *ah,
1001 struct ath9k_channel *chan)
1003 u32 synthDelay;
1005 synthDelay = REG_READ(ah, AR_PHY_RX_DELAY) & AR_PHY_RX_DELAY_DELAY;
1006 if (IS_CHAN_B(chan))
1007 synthDelay = (4 * synthDelay) / 22;
1008 else
1009 synthDelay /= 10;
1011 REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
1013 udelay(synthDelay + BASE_ACTIVATE_DELAY);
1016 static void ath9k_hw_init_qos(struct ath_hw *ah)
1018 REG_WRITE(ah, AR_MIC_QOS_CONTROL, 0x100aa);
1019 REG_WRITE(ah, AR_MIC_QOS_SELECT, 0x3210);
1021 REG_WRITE(ah, AR_QOS_NO_ACK,
1022 SM(2, AR_QOS_NO_ACK_TWO_BIT) |
1023 SM(5, AR_QOS_NO_ACK_BIT_OFF) |
1024 SM(0, AR_QOS_NO_ACK_BYTE_OFF));
1026 REG_WRITE(ah, AR_TXOP_X, AR_TXOP_X_VAL);
1027 REG_WRITE(ah, AR_TXOP_0_3, 0xFFFFFFFF);
1028 REG_WRITE(ah, AR_TXOP_4_7, 0xFFFFFFFF);
1029 REG_WRITE(ah, AR_TXOP_8_11, 0xFFFFFFFF);
1030 REG_WRITE(ah, AR_TXOP_12_15, 0xFFFFFFFF);
1033 static void ath9k_hw_init_pll(struct ath_hw *ah,
1034 struct ath9k_channel *chan)
1036 u32 pll;
1038 if (AR_SREV_9100(ah)) {
1039 if (chan && IS_CHAN_5GHZ(chan))
1040 pll = 0x1450;
1041 else
1042 pll = 0x1458;
1043 } else {
1044 if (AR_SREV_9280_10_OR_LATER(ah)) {
1045 pll = SM(0x5, AR_RTC_9160_PLL_REFDIV);
1047 if (chan && IS_CHAN_HALF_RATE(chan))
1048 pll |= SM(0x1, AR_RTC_9160_PLL_CLKSEL);
1049 else if (chan && IS_CHAN_QUARTER_RATE(chan))
1050 pll |= SM(0x2, AR_RTC_9160_PLL_CLKSEL);
1052 if (chan && IS_CHAN_5GHZ(chan)) {
1053 pll |= SM(0x28, AR_RTC_9160_PLL_DIV);
1056 if (AR_SREV_9280_20(ah)) {
1057 if (((chan->channel % 20) == 0)
1058 || ((chan->channel % 10) == 0))
1059 pll = 0x2850;
1060 else
1061 pll = 0x142c;
1063 } else {
1064 pll |= SM(0x2c, AR_RTC_9160_PLL_DIV);
1067 } else if (AR_SREV_9160_10_OR_LATER(ah)) {
1069 pll = SM(0x5, AR_RTC_9160_PLL_REFDIV);
1071 if (chan && IS_CHAN_HALF_RATE(chan))
1072 pll |= SM(0x1, AR_RTC_9160_PLL_CLKSEL);
1073 else if (chan && IS_CHAN_QUARTER_RATE(chan))
1074 pll |= SM(0x2, AR_RTC_9160_PLL_CLKSEL);
1076 if (chan && IS_CHAN_5GHZ(chan))
1077 pll |= SM(0x50, AR_RTC_9160_PLL_DIV);
1078 else
1079 pll |= SM(0x58, AR_RTC_9160_PLL_DIV);
1080 } else {
1081 pll = AR_RTC_PLL_REFDIV_5 | AR_RTC_PLL_DIV2;
1083 if (chan && IS_CHAN_HALF_RATE(chan))
1084 pll |= SM(0x1, AR_RTC_PLL_CLKSEL);
1085 else if (chan && IS_CHAN_QUARTER_RATE(chan))
1086 pll |= SM(0x2, AR_RTC_PLL_CLKSEL);
1088 if (chan && IS_CHAN_5GHZ(chan))
1089 pll |= SM(0xa, AR_RTC_PLL_DIV);
1090 else
1091 pll |= SM(0xb, AR_RTC_PLL_DIV);
1094 REG_WRITE(ah, AR_RTC_PLL_CONTROL, pll);
1096 udelay(RTC_PLL_SETTLE_DELAY);
1098 REG_WRITE(ah, AR_RTC_SLEEP_CLK, AR_RTC_FORCE_DERIVED_CLK);
1101 static void ath9k_hw_init_chain_masks(struct ath_hw *ah)
1103 int rx_chainmask, tx_chainmask;
1105 rx_chainmask = ah->rxchainmask;
1106 tx_chainmask = ah->txchainmask;
1108 switch (rx_chainmask) {
1109 case 0x5:
1110 REG_SET_BIT(ah, AR_PHY_ANALOG_SWAP,
1111 AR_PHY_SWAP_ALT_CHAIN);
1112 case 0x3:
1113 if (((ah)->hw_version.macVersion <= AR_SREV_VERSION_9160)) {
1114 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, 0x7);
1115 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, 0x7);
1116 break;
1118 case 0x1:
1119 case 0x2:
1120 case 0x7:
1121 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, rx_chainmask);
1122 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, rx_chainmask);
1123 break;
1124 default:
1125 break;
1128 REG_WRITE(ah, AR_SELFGEN_MASK, tx_chainmask);
1129 if (tx_chainmask == 0x5) {
1130 REG_SET_BIT(ah, AR_PHY_ANALOG_SWAP,
1131 AR_PHY_SWAP_ALT_CHAIN);
1133 if (AR_SREV_9100(ah))
1134 REG_WRITE(ah, AR_PHY_ANALOG_SWAP,
1135 REG_READ(ah, AR_PHY_ANALOG_SWAP) | 0x00000001);
1138 static void ath9k_hw_init_interrupt_masks(struct ath_hw *ah,
1139 enum nl80211_iftype opmode)
1141 ah->mask_reg = AR_IMR_TXERR |
1142 AR_IMR_TXURN |
1143 AR_IMR_RXERR |
1144 AR_IMR_RXORN |
1145 AR_IMR_BCNMISC;
1147 if (ah->config.intr_mitigation)
1148 ah->mask_reg |= AR_IMR_RXINTM | AR_IMR_RXMINTR;
1149 else
1150 ah->mask_reg |= AR_IMR_RXOK;
1152 ah->mask_reg |= AR_IMR_TXOK;
1154 if (opmode == NL80211_IFTYPE_AP)
1155 ah->mask_reg |= AR_IMR_MIB;
1157 REG_WRITE(ah, AR_IMR, ah->mask_reg);
1158 REG_WRITE(ah, AR_IMR_S2, REG_READ(ah, AR_IMR_S2) | AR_IMR_S2_GTT);
1160 if (!AR_SREV_9100(ah)) {
1161 REG_WRITE(ah, AR_INTR_SYNC_CAUSE, 0xFFFFFFFF);
1162 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, AR_INTR_SYNC_DEFAULT);
1163 REG_WRITE(ah, AR_INTR_SYNC_MASK, 0);
1167 static bool ath9k_hw_set_ack_timeout(struct ath_hw *ah, u32 us)
1169 if (us > ath9k_hw_mac_to_usec(ah, MS(0xffffffff, AR_TIME_OUT_ACK))) {
1170 DPRINTF(ah->ah_sc, ATH_DBG_RESET, "bad ack timeout %u\n", us);
1171 ah->acktimeout = (u32) -1;
1172 return false;
1173 } else {
1174 REG_RMW_FIELD(ah, AR_TIME_OUT,
1175 AR_TIME_OUT_ACK, ath9k_hw_mac_to_clks(ah, us));
1176 ah->acktimeout = us;
1177 return true;
1181 static bool ath9k_hw_set_cts_timeout(struct ath_hw *ah, u32 us)
1183 if (us > ath9k_hw_mac_to_usec(ah, MS(0xffffffff, AR_TIME_OUT_CTS))) {
1184 DPRINTF(ah->ah_sc, ATH_DBG_RESET, "bad cts timeout %u\n", us);
1185 ah->ctstimeout = (u32) -1;
1186 return false;
1187 } else {
1188 REG_RMW_FIELD(ah, AR_TIME_OUT,
1189 AR_TIME_OUT_CTS, ath9k_hw_mac_to_clks(ah, us));
1190 ah->ctstimeout = us;
1191 return true;
1195 static bool ath9k_hw_set_global_txtimeout(struct ath_hw *ah, u32 tu)
1197 if (tu > 0xFFFF) {
1198 DPRINTF(ah->ah_sc, ATH_DBG_XMIT,
1199 "bad global tx timeout %u\n", tu);
1200 ah->globaltxtimeout = (u32) -1;
1201 return false;
1202 } else {
1203 REG_RMW_FIELD(ah, AR_GTXTO, AR_GTXTO_TIMEOUT_LIMIT, tu);
1204 ah->globaltxtimeout = tu;
1205 return true;
1209 static void ath9k_hw_init_user_settings(struct ath_hw *ah)
1211 DPRINTF(ah->ah_sc, ATH_DBG_RESET, "ah->misc_mode 0x%x\n",
1212 ah->misc_mode);
1214 if (ah->misc_mode != 0)
1215 REG_WRITE(ah, AR_PCU_MISC,
1216 REG_READ(ah, AR_PCU_MISC) | ah->misc_mode);
1217 if (ah->slottime != (u32) -1)
1218 ath9k_hw_setslottime(ah, ah->slottime);
1219 if (ah->acktimeout != (u32) -1)
1220 ath9k_hw_set_ack_timeout(ah, ah->acktimeout);
1221 if (ah->ctstimeout != (u32) -1)
1222 ath9k_hw_set_cts_timeout(ah, ah->ctstimeout);
1223 if (ah->globaltxtimeout != (u32) -1)
1224 ath9k_hw_set_global_txtimeout(ah, ah->globaltxtimeout);
1227 const char *ath9k_hw_probe(u16 vendorid, u16 devid)
1229 return vendorid == ATHEROS_VENDOR_ID ?
1230 ath9k_hw_devname(devid) : NULL;
1233 void ath9k_hw_detach(struct ath_hw *ah)
1235 if (!AR_SREV_9100(ah))
1236 ath9k_hw_ani_disable(ah);
1238 ath9k_hw_rf_free(ah);
1239 ath9k_hw_setpower(ah, ATH9K_PM_FULL_SLEEP);
1240 kfree(ah);
1241 ah = NULL;
1244 /*******/
1245 /* INI */
1246 /*******/
1248 static void ath9k_hw_override_ini(struct ath_hw *ah,
1249 struct ath9k_channel *chan)
1251 u32 val;
1253 if (AR_SREV_9271(ah)) {
1255 * Enable spectral scan to solution for issues with stuck
1256 * beacons on AR9271 1.0. The beacon stuck issue is not seeon on
1257 * AR9271 1.1
1259 if (AR_SREV_9271_10(ah)) {
1260 val = REG_READ(ah, AR_PHY_SPECTRAL_SCAN) | AR_PHY_SPECTRAL_SCAN_ENABLE;
1261 REG_WRITE(ah, AR_PHY_SPECTRAL_SCAN, val);
1263 else if (AR_SREV_9271_11(ah))
1265 * change AR_PHY_RF_CTL3 setting to fix MAC issue
1266 * present on AR9271 1.1
1268 REG_WRITE(ah, AR_PHY_RF_CTL3, 0x3a020001);
1269 return;
1273 * Set the RX_ABORT and RX_DIS and clear if off only after
1274 * RXE is set for MAC. This prevents frames with corrupted
1275 * descriptor status.
1277 REG_SET_BIT(ah, AR_DIAG_SW, (AR_DIAG_RX_DIS | AR_DIAG_RX_ABORT));
1280 if (!AR_SREV_5416_20_OR_LATER(ah) ||
1281 AR_SREV_9280_10_OR_LATER(ah))
1282 return;
1284 * Disable BB clock gating
1285 * Necessary to avoid issues on AR5416 2.0
1287 REG_WRITE(ah, 0x9800 + (651 << 2), 0x11);
1290 static u32 ath9k_hw_def_ini_fixup(struct ath_hw *ah,
1291 struct ar5416_eeprom_def *pEepData,
1292 u32 reg, u32 value)
1294 struct base_eep_header *pBase = &(pEepData->baseEepHeader);
1296 switch (ah->hw_version.devid) {
1297 case AR9280_DEVID_PCI:
1298 if (reg == 0x7894) {
1299 DPRINTF(ah->ah_sc, ATH_DBG_EEPROM,
1300 "ini VAL: %x EEPROM: %x\n", value,
1301 (pBase->version & 0xff));
1303 if ((pBase->version & 0xff) > 0x0a) {
1304 DPRINTF(ah->ah_sc, ATH_DBG_EEPROM,
1305 "PWDCLKIND: %d\n",
1306 pBase->pwdclkind);
1307 value &= ~AR_AN_TOP2_PWDCLKIND;
1308 value |= AR_AN_TOP2_PWDCLKIND &
1309 (pBase->pwdclkind << AR_AN_TOP2_PWDCLKIND_S);
1310 } else {
1311 DPRINTF(ah->ah_sc, ATH_DBG_EEPROM,
1312 "PWDCLKIND Earlier Rev\n");
1315 DPRINTF(ah->ah_sc, ATH_DBG_EEPROM,
1316 "final ini VAL: %x\n", value);
1318 break;
1321 return value;
1324 static u32 ath9k_hw_ini_fixup(struct ath_hw *ah,
1325 struct ar5416_eeprom_def *pEepData,
1326 u32 reg, u32 value)
1328 if (ah->eep_map == EEP_MAP_4KBITS)
1329 return value;
1330 else
1331 return ath9k_hw_def_ini_fixup(ah, pEepData, reg, value);
1334 static void ath9k_olc_init(struct ath_hw *ah)
1336 u32 i;
1338 if (OLC_FOR_AR9287_10_LATER) {
1339 REG_SET_BIT(ah, AR_PHY_TX_PWRCTRL9,
1340 AR_PHY_TX_PWRCTRL9_RES_DC_REMOVAL);
1341 ath9k_hw_analog_shift_rmw(ah, AR9287_AN_TXPC0,
1342 AR9287_AN_TXPC0_TXPCMODE,
1343 AR9287_AN_TXPC0_TXPCMODE_S,
1344 AR9287_AN_TXPC0_TXPCMODE_TEMPSENSE);
1345 udelay(100);
1346 } else {
1347 for (i = 0; i < AR9280_TX_GAIN_TABLE_SIZE; i++)
1348 ah->originalGain[i] =
1349 MS(REG_READ(ah, AR_PHY_TX_GAIN_TBL1 + i * 4),
1350 AR_PHY_TX_GAIN);
1351 ah->PDADCdelta = 0;
1355 static u32 ath9k_regd_get_ctl(struct ath_regulatory *reg,
1356 struct ath9k_channel *chan)
1358 u32 ctl = ath_regd_get_band_ctl(reg, chan->chan->band);
1360 if (IS_CHAN_B(chan))
1361 ctl |= CTL_11B;
1362 else if (IS_CHAN_G(chan))
1363 ctl |= CTL_11G;
1364 else
1365 ctl |= CTL_11A;
1367 return ctl;
1370 static int ath9k_hw_process_ini(struct ath_hw *ah,
1371 struct ath9k_channel *chan,
1372 enum ath9k_ht_macmode macmode)
1374 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
1375 int i, regWrites = 0;
1376 struct ieee80211_channel *channel = chan->chan;
1377 u32 modesIndex, freqIndex;
1379 switch (chan->chanmode) {
1380 case CHANNEL_A:
1381 case CHANNEL_A_HT20:
1382 modesIndex = 1;
1383 freqIndex = 1;
1384 break;
1385 case CHANNEL_A_HT40PLUS:
1386 case CHANNEL_A_HT40MINUS:
1387 modesIndex = 2;
1388 freqIndex = 1;
1389 break;
1390 case CHANNEL_G:
1391 case CHANNEL_G_HT20:
1392 case CHANNEL_B:
1393 modesIndex = 4;
1394 freqIndex = 2;
1395 break;
1396 case CHANNEL_G_HT40PLUS:
1397 case CHANNEL_G_HT40MINUS:
1398 modesIndex = 3;
1399 freqIndex = 2;
1400 break;
1402 default:
1403 return -EINVAL;
1406 REG_WRITE(ah, AR_PHY(0), 0x00000007);
1407 REG_WRITE(ah, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_EXTERNAL_RADIO);
1408 ah->eep_ops->set_addac(ah, chan);
1410 if (AR_SREV_5416_22_OR_LATER(ah)) {
1411 REG_WRITE_ARRAY(&ah->iniAddac, 1, regWrites);
1412 } else {
1413 struct ar5416IniArray temp;
1414 u32 addacSize =
1415 sizeof(u32) * ah->iniAddac.ia_rows *
1416 ah->iniAddac.ia_columns;
1418 memcpy(ah->addac5416_21,
1419 ah->iniAddac.ia_array, addacSize);
1421 (ah->addac5416_21)[31 * ah->iniAddac.ia_columns + 1] = 0;
1423 temp.ia_array = ah->addac5416_21;
1424 temp.ia_columns = ah->iniAddac.ia_columns;
1425 temp.ia_rows = ah->iniAddac.ia_rows;
1426 REG_WRITE_ARRAY(&temp, 1, regWrites);
1429 REG_WRITE(ah, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_INTERNAL_ADDAC);
1431 for (i = 0; i < ah->iniModes.ia_rows; i++) {
1432 u32 reg = INI_RA(&ah->iniModes, i, 0);
1433 u32 val = INI_RA(&ah->iniModes, i, modesIndex);
1435 REG_WRITE(ah, reg, val);
1437 if (reg >= 0x7800 && reg < 0x78a0
1438 && ah->config.analog_shiftreg) {
1439 udelay(100);
1442 DO_DELAY(regWrites);
1445 if (AR_SREV_9280(ah) || AR_SREV_9287_10_OR_LATER(ah))
1446 REG_WRITE_ARRAY(&ah->iniModesRxGain, modesIndex, regWrites);
1448 if (AR_SREV_9280(ah) || AR_SREV_9285_12_OR_LATER(ah) ||
1449 AR_SREV_9287_10_OR_LATER(ah))
1450 REG_WRITE_ARRAY(&ah->iniModesTxGain, modesIndex, regWrites);
1452 for (i = 0; i < ah->iniCommon.ia_rows; i++) {
1453 u32 reg = INI_RA(&ah->iniCommon, i, 0);
1454 u32 val = INI_RA(&ah->iniCommon, i, 1);
1456 REG_WRITE(ah, reg, val);
1458 if (reg >= 0x7800 && reg < 0x78a0
1459 && ah->config.analog_shiftreg) {
1460 udelay(100);
1463 DO_DELAY(regWrites);
1466 ath9k_hw_write_regs(ah, modesIndex, freqIndex, regWrites);
1468 if (AR_SREV_9280_20(ah) && IS_CHAN_A_5MHZ_SPACED(chan)) {
1469 REG_WRITE_ARRAY(&ah->iniModesAdditional, modesIndex,
1470 regWrites);
1473 ath9k_hw_override_ini(ah, chan);
1474 ath9k_hw_set_regs(ah, chan, macmode);
1475 ath9k_hw_init_chain_masks(ah);
1477 if (OLC_FOR_AR9280_20_LATER)
1478 ath9k_olc_init(ah);
1480 ah->eep_ops->set_txpower(ah, chan,
1481 ath9k_regd_get_ctl(regulatory, chan),
1482 channel->max_antenna_gain * 2,
1483 channel->max_power * 2,
1484 min((u32) MAX_RATE_POWER,
1485 (u32) regulatory->power_limit));
1487 if (!ath9k_hw_set_rf_regs(ah, chan, freqIndex)) {
1488 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
1489 "ar5416SetRfRegs failed\n");
1490 return -EIO;
1493 return 0;
1496 /****************************************/
1497 /* Reset and Channel Switching Routines */
1498 /****************************************/
1500 static void ath9k_hw_set_rfmode(struct ath_hw *ah, struct ath9k_channel *chan)
1502 u32 rfMode = 0;
1504 if (chan == NULL)
1505 return;
1507 rfMode |= (IS_CHAN_B(chan) || IS_CHAN_G(chan))
1508 ? AR_PHY_MODE_DYNAMIC : AR_PHY_MODE_OFDM;
1510 if (!AR_SREV_9280_10_OR_LATER(ah))
1511 rfMode |= (IS_CHAN_5GHZ(chan)) ?
1512 AR_PHY_MODE_RF5GHZ : AR_PHY_MODE_RF2GHZ;
1514 if (AR_SREV_9280_20(ah) && IS_CHAN_A_5MHZ_SPACED(chan))
1515 rfMode |= (AR_PHY_MODE_DYNAMIC | AR_PHY_MODE_DYN_CCK_DISABLE);
1517 REG_WRITE(ah, AR_PHY_MODE, rfMode);
1520 static void ath9k_hw_mark_phy_inactive(struct ath_hw *ah)
1522 REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS);
1525 static inline void ath9k_hw_set_dma(struct ath_hw *ah)
1527 u32 regval;
1530 * set AHB_MODE not to do cacheline prefetches
1532 regval = REG_READ(ah, AR_AHB_MODE);
1533 REG_WRITE(ah, AR_AHB_MODE, regval | AR_AHB_PREFETCH_RD_EN);
1536 * let mac dma reads be in 128 byte chunks
1538 regval = REG_READ(ah, AR_TXCFG) & ~AR_TXCFG_DMASZ_MASK;
1539 REG_WRITE(ah, AR_TXCFG, regval | AR_TXCFG_DMASZ_128B);
1542 * Restore TX Trigger Level to its pre-reset value.
1543 * The initial value depends on whether aggregation is enabled, and is
1544 * adjusted whenever underruns are detected.
1546 REG_RMW_FIELD(ah, AR_TXCFG, AR_FTRIG, ah->tx_trig_level);
1549 * let mac dma writes be in 128 byte chunks
1551 regval = REG_READ(ah, AR_RXCFG) & ~AR_RXCFG_DMASZ_MASK;
1552 REG_WRITE(ah, AR_RXCFG, regval | AR_RXCFG_DMASZ_128B);
1555 * Setup receive FIFO threshold to hold off TX activities
1557 REG_WRITE(ah, AR_RXFIFO_CFG, 0x200);
1560 * reduce the number of usable entries in PCU TXBUF to avoid
1561 * wrap around issues.
1563 if (AR_SREV_9285(ah)) {
1564 /* For AR9285 the number of Fifos are reduced to half.
1565 * So set the usable tx buf size also to half to
1566 * avoid data/delimiter underruns
1568 REG_WRITE(ah, AR_PCU_TXBUF_CTRL,
1569 AR_9285_PCU_TXBUF_CTRL_USABLE_SIZE);
1570 } else if (!AR_SREV_9271(ah)) {
1571 REG_WRITE(ah, AR_PCU_TXBUF_CTRL,
1572 AR_PCU_TXBUF_CTRL_USABLE_SIZE);
1576 static void ath9k_hw_set_operating_mode(struct ath_hw *ah, int opmode)
1578 u32 val;
1580 val = REG_READ(ah, AR_STA_ID1);
1581 val &= ~(AR_STA_ID1_STA_AP | AR_STA_ID1_ADHOC);
1582 switch (opmode) {
1583 case NL80211_IFTYPE_AP:
1584 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_STA_AP
1585 | AR_STA_ID1_KSRCH_MODE);
1586 REG_CLR_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1587 break;
1588 case NL80211_IFTYPE_ADHOC:
1589 case NL80211_IFTYPE_MESH_POINT:
1590 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_ADHOC
1591 | AR_STA_ID1_KSRCH_MODE);
1592 REG_SET_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1593 break;
1594 case NL80211_IFTYPE_STATION:
1595 case NL80211_IFTYPE_MONITOR:
1596 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_KSRCH_MODE);
1597 break;
1601 static inline void ath9k_hw_get_delta_slope_vals(struct ath_hw *ah,
1602 u32 coef_scaled,
1603 u32 *coef_mantissa,
1604 u32 *coef_exponent)
1606 u32 coef_exp, coef_man;
1608 for (coef_exp = 31; coef_exp > 0; coef_exp--)
1609 if ((coef_scaled >> coef_exp) & 0x1)
1610 break;
1612 coef_exp = 14 - (coef_exp - COEF_SCALE_S);
1614 coef_man = coef_scaled + (1 << (COEF_SCALE_S - coef_exp - 1));
1616 *coef_mantissa = coef_man >> (COEF_SCALE_S - coef_exp);
1617 *coef_exponent = coef_exp - 16;
1620 static void ath9k_hw_set_delta_slope(struct ath_hw *ah,
1621 struct ath9k_channel *chan)
1623 u32 coef_scaled, ds_coef_exp, ds_coef_man;
1624 u32 clockMhzScaled = 0x64000000;
1625 struct chan_centers centers;
1627 if (IS_CHAN_HALF_RATE(chan))
1628 clockMhzScaled = clockMhzScaled >> 1;
1629 else if (IS_CHAN_QUARTER_RATE(chan))
1630 clockMhzScaled = clockMhzScaled >> 2;
1632 ath9k_hw_get_channel_centers(ah, chan, &centers);
1633 coef_scaled = clockMhzScaled / centers.synth_center;
1635 ath9k_hw_get_delta_slope_vals(ah, coef_scaled, &ds_coef_man,
1636 &ds_coef_exp);
1638 REG_RMW_FIELD(ah, AR_PHY_TIMING3,
1639 AR_PHY_TIMING3_DSC_MAN, ds_coef_man);
1640 REG_RMW_FIELD(ah, AR_PHY_TIMING3,
1641 AR_PHY_TIMING3_DSC_EXP, ds_coef_exp);
1643 coef_scaled = (9 * coef_scaled) / 10;
1645 ath9k_hw_get_delta_slope_vals(ah, coef_scaled, &ds_coef_man,
1646 &ds_coef_exp);
1648 REG_RMW_FIELD(ah, AR_PHY_HALFGI,
1649 AR_PHY_HALFGI_DSC_MAN, ds_coef_man);
1650 REG_RMW_FIELD(ah, AR_PHY_HALFGI,
1651 AR_PHY_HALFGI_DSC_EXP, ds_coef_exp);
1654 static bool ath9k_hw_set_reset(struct ath_hw *ah, int type)
1656 u32 rst_flags;
1657 u32 tmpReg;
1659 if (AR_SREV_9100(ah)) {
1660 u32 val = REG_READ(ah, AR_RTC_DERIVED_CLK);
1661 val &= ~AR_RTC_DERIVED_CLK_PERIOD;
1662 val |= SM(1, AR_RTC_DERIVED_CLK_PERIOD);
1663 REG_WRITE(ah, AR_RTC_DERIVED_CLK, val);
1664 (void)REG_READ(ah, AR_RTC_DERIVED_CLK);
1667 REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1668 AR_RTC_FORCE_WAKE_ON_INT);
1670 if (AR_SREV_9100(ah)) {
1671 rst_flags = AR_RTC_RC_MAC_WARM | AR_RTC_RC_MAC_COLD |
1672 AR_RTC_RC_COLD_RESET | AR_RTC_RC_WARM_RESET;
1673 } else {
1674 tmpReg = REG_READ(ah, AR_INTR_SYNC_CAUSE);
1675 if (tmpReg &
1676 (AR_INTR_SYNC_LOCAL_TIMEOUT |
1677 AR_INTR_SYNC_RADM_CPL_TIMEOUT)) {
1678 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0);
1679 REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF);
1680 } else {
1681 REG_WRITE(ah, AR_RC, AR_RC_AHB);
1684 rst_flags = AR_RTC_RC_MAC_WARM;
1685 if (type == ATH9K_RESET_COLD)
1686 rst_flags |= AR_RTC_RC_MAC_COLD;
1689 REG_WRITE(ah, AR_RTC_RC, rst_flags);
1690 udelay(50);
1692 REG_WRITE(ah, AR_RTC_RC, 0);
1693 if (!ath9k_hw_wait(ah, AR_RTC_RC, AR_RTC_RC_M, 0, AH_WAIT_TIMEOUT)) {
1694 DPRINTF(ah->ah_sc, ATH_DBG_RESET,
1695 "RTC stuck in MAC reset\n");
1696 return false;
1699 if (!AR_SREV_9100(ah))
1700 REG_WRITE(ah, AR_RC, 0);
1702 ath9k_hw_init_pll(ah, NULL);
1704 if (AR_SREV_9100(ah))
1705 udelay(50);
1707 return true;
1710 static bool ath9k_hw_set_reset_power_on(struct ath_hw *ah)
1712 REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1713 AR_RTC_FORCE_WAKE_ON_INT);
1715 REG_WRITE(ah, AR_RTC_RESET, 0);
1716 udelay(2);
1717 REG_WRITE(ah, AR_RTC_RESET, 1);
1719 if (!ath9k_hw_wait(ah,
1720 AR_RTC_STATUS,
1721 AR_RTC_STATUS_M,
1722 AR_RTC_STATUS_ON,
1723 AH_WAIT_TIMEOUT)) {
1724 DPRINTF(ah->ah_sc, ATH_DBG_RESET, "RTC not waking up\n");
1725 return false;
1728 ath9k_hw_read_revisions(ah);
1730 return ath9k_hw_set_reset(ah, ATH9K_RESET_WARM);
1733 static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type)
1735 REG_WRITE(ah, AR_RTC_FORCE_WAKE,
1736 AR_RTC_FORCE_WAKE_EN | AR_RTC_FORCE_WAKE_ON_INT);
1738 switch (type) {
1739 case ATH9K_RESET_POWER_ON:
1740 return ath9k_hw_set_reset_power_on(ah);
1741 case ATH9K_RESET_WARM:
1742 case ATH9K_RESET_COLD:
1743 return ath9k_hw_set_reset(ah, type);
1744 default:
1745 return false;
1749 static void ath9k_hw_set_regs(struct ath_hw *ah, struct ath9k_channel *chan,
1750 enum ath9k_ht_macmode macmode)
1752 u32 phymode;
1753 u32 enableDacFifo = 0;
1755 if (AR_SREV_9285_10_OR_LATER(ah))
1756 enableDacFifo = (REG_READ(ah, AR_PHY_TURBO) &
1757 AR_PHY_FC_ENABLE_DAC_FIFO);
1759 phymode = AR_PHY_FC_HT_EN | AR_PHY_FC_SHORT_GI_40
1760 | AR_PHY_FC_SINGLE_HT_LTF1 | AR_PHY_FC_WALSH | enableDacFifo;
1762 if (IS_CHAN_HT40(chan)) {
1763 phymode |= AR_PHY_FC_DYN2040_EN;
1765 if ((chan->chanmode == CHANNEL_A_HT40PLUS) ||
1766 (chan->chanmode == CHANNEL_G_HT40PLUS))
1767 phymode |= AR_PHY_FC_DYN2040_PRI_CH;
1769 if (ah->extprotspacing == ATH9K_HT_EXTPROTSPACING_25)
1770 phymode |= AR_PHY_FC_DYN2040_EXT_CH;
1772 REG_WRITE(ah, AR_PHY_TURBO, phymode);
1774 ath9k_hw_set11nmac2040(ah, macmode);
1776 REG_WRITE(ah, AR_GTXTO, 25 << AR_GTXTO_TIMEOUT_LIMIT_S);
1777 REG_WRITE(ah, AR_CST, 0xF << AR_CST_TIMEOUT_LIMIT_S);
1780 static bool ath9k_hw_chip_reset(struct ath_hw *ah,
1781 struct ath9k_channel *chan)
1783 if (OLC_FOR_AR9280_20_LATER) {
1784 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON))
1785 return false;
1786 } else if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_WARM))
1787 return false;
1789 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
1790 return false;
1792 ah->chip_fullsleep = false;
1793 ath9k_hw_init_pll(ah, chan);
1794 ath9k_hw_set_rfmode(ah, chan);
1796 return true;
1799 static bool ath9k_hw_channel_change(struct ath_hw *ah,
1800 struct ath9k_channel *chan,
1801 enum ath9k_ht_macmode macmode)
1803 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
1804 struct ieee80211_channel *channel = chan->chan;
1805 u32 synthDelay, qnum;
1807 for (qnum = 0; qnum < AR_NUM_QCU; qnum++) {
1808 if (ath9k_hw_numtxpending(ah, qnum)) {
1809 DPRINTF(ah->ah_sc, ATH_DBG_QUEUE,
1810 "Transmit frames pending on queue %d\n", qnum);
1811 return false;
1815 REG_WRITE(ah, AR_PHY_RFBUS_REQ, AR_PHY_RFBUS_REQ_EN);
1816 if (!ath9k_hw_wait(ah, AR_PHY_RFBUS_GRANT, AR_PHY_RFBUS_GRANT_EN,
1817 AR_PHY_RFBUS_GRANT_EN, AH_WAIT_TIMEOUT)) {
1818 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
1819 "Could not kill baseband RX\n");
1820 return false;
1823 ath9k_hw_set_regs(ah, chan, macmode);
1825 if (AR_SREV_9280_10_OR_LATER(ah)) {
1826 ath9k_hw_ar9280_set_channel(ah, chan);
1827 } else {
1828 if (!(ath9k_hw_set_channel(ah, chan))) {
1829 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
1830 "Failed to set channel\n");
1831 return false;
1835 ah->eep_ops->set_txpower(ah, chan,
1836 ath9k_regd_get_ctl(regulatory, chan),
1837 channel->max_antenna_gain * 2,
1838 channel->max_power * 2,
1839 min((u32) MAX_RATE_POWER,
1840 (u32) regulatory->power_limit));
1842 synthDelay = REG_READ(ah, AR_PHY_RX_DELAY) & AR_PHY_RX_DELAY_DELAY;
1843 if (IS_CHAN_B(chan))
1844 synthDelay = (4 * synthDelay) / 22;
1845 else
1846 synthDelay /= 10;
1848 udelay(synthDelay + BASE_ACTIVATE_DELAY);
1850 REG_WRITE(ah, AR_PHY_RFBUS_REQ, 0);
1852 if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan))
1853 ath9k_hw_set_delta_slope(ah, chan);
1855 if (AR_SREV_9280_10_OR_LATER(ah))
1856 ath9k_hw_9280_spur_mitigate(ah, chan);
1857 else
1858 ath9k_hw_spur_mitigate(ah, chan);
1860 if (!chan->oneTimeCalsDone)
1861 chan->oneTimeCalsDone = true;
1863 return true;
1866 static void ath9k_hw_9280_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan)
1868 int bb_spur = AR_NO_SPUR;
1869 int freq;
1870 int bin, cur_bin;
1871 int bb_spur_off, spur_subchannel_sd;
1872 int spur_freq_sd;
1873 int spur_delta_phase;
1874 int denominator;
1875 int upper, lower, cur_vit_mask;
1876 int tmp, newVal;
1877 int i;
1878 int pilot_mask_reg[4] = { AR_PHY_TIMING7, AR_PHY_TIMING8,
1879 AR_PHY_PILOT_MASK_01_30, AR_PHY_PILOT_MASK_31_60
1881 int chan_mask_reg[4] = { AR_PHY_TIMING9, AR_PHY_TIMING10,
1882 AR_PHY_CHANNEL_MASK_01_30, AR_PHY_CHANNEL_MASK_31_60
1884 int inc[4] = { 0, 100, 0, 0 };
1885 struct chan_centers centers;
1887 int8_t mask_m[123];
1888 int8_t mask_p[123];
1889 int8_t mask_amt;
1890 int tmp_mask;
1891 int cur_bb_spur;
1892 bool is2GHz = IS_CHAN_2GHZ(chan);
1894 memset(&mask_m, 0, sizeof(int8_t) * 123);
1895 memset(&mask_p, 0, sizeof(int8_t) * 123);
1897 ath9k_hw_get_channel_centers(ah, chan, &centers);
1898 freq = centers.synth_center;
1900 ah->config.spurmode = SPUR_ENABLE_EEPROM;
1901 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
1902 cur_bb_spur = ah->eep_ops->get_spur_channel(ah, i, is2GHz);
1904 if (is2GHz)
1905 cur_bb_spur = (cur_bb_spur / 10) + AR_BASE_FREQ_2GHZ;
1906 else
1907 cur_bb_spur = (cur_bb_spur / 10) + AR_BASE_FREQ_5GHZ;
1909 if (AR_NO_SPUR == cur_bb_spur)
1910 break;
1911 cur_bb_spur = cur_bb_spur - freq;
1913 if (IS_CHAN_HT40(chan)) {
1914 if ((cur_bb_spur > -AR_SPUR_FEEQ_BOUND_HT40) &&
1915 (cur_bb_spur < AR_SPUR_FEEQ_BOUND_HT40)) {
1916 bb_spur = cur_bb_spur;
1917 break;
1919 } else if ((cur_bb_spur > -AR_SPUR_FEEQ_BOUND_HT20) &&
1920 (cur_bb_spur < AR_SPUR_FEEQ_BOUND_HT20)) {
1921 bb_spur = cur_bb_spur;
1922 break;
1926 if (AR_NO_SPUR == bb_spur) {
1927 REG_CLR_BIT(ah, AR_PHY_FORCE_CLKEN_CCK,
1928 AR_PHY_FORCE_CLKEN_CCK_MRC_MUX);
1929 return;
1930 } else {
1931 REG_CLR_BIT(ah, AR_PHY_FORCE_CLKEN_CCK,
1932 AR_PHY_FORCE_CLKEN_CCK_MRC_MUX);
1935 bin = bb_spur * 320;
1937 tmp = REG_READ(ah, AR_PHY_TIMING_CTRL4(0));
1939 newVal = tmp | (AR_PHY_TIMING_CTRL4_ENABLE_SPUR_RSSI |
1940 AR_PHY_TIMING_CTRL4_ENABLE_SPUR_FILTER |
1941 AR_PHY_TIMING_CTRL4_ENABLE_CHAN_MASK |
1942 AR_PHY_TIMING_CTRL4_ENABLE_PILOT_MASK);
1943 REG_WRITE(ah, AR_PHY_TIMING_CTRL4(0), newVal);
1945 newVal = (AR_PHY_SPUR_REG_MASK_RATE_CNTL |
1946 AR_PHY_SPUR_REG_ENABLE_MASK_PPM |
1947 AR_PHY_SPUR_REG_MASK_RATE_SELECT |
1948 AR_PHY_SPUR_REG_ENABLE_VIT_SPUR_RSSI |
1949 SM(SPUR_RSSI_THRESH, AR_PHY_SPUR_REG_SPUR_RSSI_THRESH));
1950 REG_WRITE(ah, AR_PHY_SPUR_REG, newVal);
1952 if (IS_CHAN_HT40(chan)) {
1953 if (bb_spur < 0) {
1954 spur_subchannel_sd = 1;
1955 bb_spur_off = bb_spur + 10;
1956 } else {
1957 spur_subchannel_sd = 0;
1958 bb_spur_off = bb_spur - 10;
1960 } else {
1961 spur_subchannel_sd = 0;
1962 bb_spur_off = bb_spur;
1965 if (IS_CHAN_HT40(chan))
1966 spur_delta_phase =
1967 ((bb_spur * 262144) /
1968 10) & AR_PHY_TIMING11_SPUR_DELTA_PHASE;
1969 else
1970 spur_delta_phase =
1971 ((bb_spur * 524288) /
1972 10) & AR_PHY_TIMING11_SPUR_DELTA_PHASE;
1974 denominator = IS_CHAN_2GHZ(chan) ? 44 : 40;
1975 spur_freq_sd = ((bb_spur_off * 2048) / denominator) & 0x3ff;
1977 newVal = (AR_PHY_TIMING11_USE_SPUR_IN_AGC |
1978 SM(spur_freq_sd, AR_PHY_TIMING11_SPUR_FREQ_SD) |
1979 SM(spur_delta_phase, AR_PHY_TIMING11_SPUR_DELTA_PHASE));
1980 REG_WRITE(ah, AR_PHY_TIMING11, newVal);
1982 newVal = spur_subchannel_sd << AR_PHY_SFCORR_SPUR_SUBCHNL_SD_S;
1983 REG_WRITE(ah, AR_PHY_SFCORR_EXT, newVal);
1985 cur_bin = -6000;
1986 upper = bin + 100;
1987 lower = bin - 100;
1989 for (i = 0; i < 4; i++) {
1990 int pilot_mask = 0;
1991 int chan_mask = 0;
1992 int bp = 0;
1993 for (bp = 0; bp < 30; bp++) {
1994 if ((cur_bin > lower) && (cur_bin < upper)) {
1995 pilot_mask = pilot_mask | 0x1 << bp;
1996 chan_mask = chan_mask | 0x1 << bp;
1998 cur_bin += 100;
2000 cur_bin += inc[i];
2001 REG_WRITE(ah, pilot_mask_reg[i], pilot_mask);
2002 REG_WRITE(ah, chan_mask_reg[i], chan_mask);
2005 cur_vit_mask = 6100;
2006 upper = bin + 120;
2007 lower = bin - 120;
2009 for (i = 0; i < 123; i++) {
2010 if ((cur_vit_mask > lower) && (cur_vit_mask < upper)) {
2012 /* workaround for gcc bug #37014 */
2013 volatile int tmp_v = abs(cur_vit_mask - bin);
2015 if (tmp_v < 75)
2016 mask_amt = 1;
2017 else
2018 mask_amt = 0;
2019 if (cur_vit_mask < 0)
2020 mask_m[abs(cur_vit_mask / 100)] = mask_amt;
2021 else
2022 mask_p[cur_vit_mask / 100] = mask_amt;
2024 cur_vit_mask -= 100;
2027 tmp_mask = (mask_m[46] << 30) | (mask_m[47] << 28)
2028 | (mask_m[48] << 26) | (mask_m[49] << 24)
2029 | (mask_m[50] << 22) | (mask_m[51] << 20)
2030 | (mask_m[52] << 18) | (mask_m[53] << 16)
2031 | (mask_m[54] << 14) | (mask_m[55] << 12)
2032 | (mask_m[56] << 10) | (mask_m[57] << 8)
2033 | (mask_m[58] << 6) | (mask_m[59] << 4)
2034 | (mask_m[60] << 2) | (mask_m[61] << 0);
2035 REG_WRITE(ah, AR_PHY_BIN_MASK_1, tmp_mask);
2036 REG_WRITE(ah, AR_PHY_VIT_MASK2_M_46_61, tmp_mask);
2038 tmp_mask = (mask_m[31] << 28)
2039 | (mask_m[32] << 26) | (mask_m[33] << 24)
2040 | (mask_m[34] << 22) | (mask_m[35] << 20)
2041 | (mask_m[36] << 18) | (mask_m[37] << 16)
2042 | (mask_m[48] << 14) | (mask_m[39] << 12)
2043 | (mask_m[40] << 10) | (mask_m[41] << 8)
2044 | (mask_m[42] << 6) | (mask_m[43] << 4)
2045 | (mask_m[44] << 2) | (mask_m[45] << 0);
2046 REG_WRITE(ah, AR_PHY_BIN_MASK_2, tmp_mask);
2047 REG_WRITE(ah, AR_PHY_MASK2_M_31_45, tmp_mask);
2049 tmp_mask = (mask_m[16] << 30) | (mask_m[16] << 28)
2050 | (mask_m[18] << 26) | (mask_m[18] << 24)
2051 | (mask_m[20] << 22) | (mask_m[20] << 20)
2052 | (mask_m[22] << 18) | (mask_m[22] << 16)
2053 | (mask_m[24] << 14) | (mask_m[24] << 12)
2054 | (mask_m[25] << 10) | (mask_m[26] << 8)
2055 | (mask_m[27] << 6) | (mask_m[28] << 4)
2056 | (mask_m[29] << 2) | (mask_m[30] << 0);
2057 REG_WRITE(ah, AR_PHY_BIN_MASK_3, tmp_mask);
2058 REG_WRITE(ah, AR_PHY_MASK2_M_16_30, tmp_mask);
2060 tmp_mask = (mask_m[0] << 30) | (mask_m[1] << 28)
2061 | (mask_m[2] << 26) | (mask_m[3] << 24)
2062 | (mask_m[4] << 22) | (mask_m[5] << 20)
2063 | (mask_m[6] << 18) | (mask_m[7] << 16)
2064 | (mask_m[8] << 14) | (mask_m[9] << 12)
2065 | (mask_m[10] << 10) | (mask_m[11] << 8)
2066 | (mask_m[12] << 6) | (mask_m[13] << 4)
2067 | (mask_m[14] << 2) | (mask_m[15] << 0);
2068 REG_WRITE(ah, AR_PHY_MASK_CTL, tmp_mask);
2069 REG_WRITE(ah, AR_PHY_MASK2_M_00_15, tmp_mask);
2071 tmp_mask = (mask_p[15] << 28)
2072 | (mask_p[14] << 26) | (mask_p[13] << 24)
2073 | (mask_p[12] << 22) | (mask_p[11] << 20)
2074 | (mask_p[10] << 18) | (mask_p[9] << 16)
2075 | (mask_p[8] << 14) | (mask_p[7] << 12)
2076 | (mask_p[6] << 10) | (mask_p[5] << 8)
2077 | (mask_p[4] << 6) | (mask_p[3] << 4)
2078 | (mask_p[2] << 2) | (mask_p[1] << 0);
2079 REG_WRITE(ah, AR_PHY_BIN_MASK2_1, tmp_mask);
2080 REG_WRITE(ah, AR_PHY_MASK2_P_15_01, tmp_mask);
2082 tmp_mask = (mask_p[30] << 28)
2083 | (mask_p[29] << 26) | (mask_p[28] << 24)
2084 | (mask_p[27] << 22) | (mask_p[26] << 20)
2085 | (mask_p[25] << 18) | (mask_p[24] << 16)
2086 | (mask_p[23] << 14) | (mask_p[22] << 12)
2087 | (mask_p[21] << 10) | (mask_p[20] << 8)
2088 | (mask_p[19] << 6) | (mask_p[18] << 4)
2089 | (mask_p[17] << 2) | (mask_p[16] << 0);
2090 REG_WRITE(ah, AR_PHY_BIN_MASK2_2, tmp_mask);
2091 REG_WRITE(ah, AR_PHY_MASK2_P_30_16, tmp_mask);
2093 tmp_mask = (mask_p[45] << 28)
2094 | (mask_p[44] << 26) | (mask_p[43] << 24)
2095 | (mask_p[42] << 22) | (mask_p[41] << 20)
2096 | (mask_p[40] << 18) | (mask_p[39] << 16)
2097 | (mask_p[38] << 14) | (mask_p[37] << 12)
2098 | (mask_p[36] << 10) | (mask_p[35] << 8)
2099 | (mask_p[34] << 6) | (mask_p[33] << 4)
2100 | (mask_p[32] << 2) | (mask_p[31] << 0);
2101 REG_WRITE(ah, AR_PHY_BIN_MASK2_3, tmp_mask);
2102 REG_WRITE(ah, AR_PHY_MASK2_P_45_31, tmp_mask);
2104 tmp_mask = (mask_p[61] << 30) | (mask_p[60] << 28)
2105 | (mask_p[59] << 26) | (mask_p[58] << 24)
2106 | (mask_p[57] << 22) | (mask_p[56] << 20)
2107 | (mask_p[55] << 18) | (mask_p[54] << 16)
2108 | (mask_p[53] << 14) | (mask_p[52] << 12)
2109 | (mask_p[51] << 10) | (mask_p[50] << 8)
2110 | (mask_p[49] << 6) | (mask_p[48] << 4)
2111 | (mask_p[47] << 2) | (mask_p[46] << 0);
2112 REG_WRITE(ah, AR_PHY_BIN_MASK2_4, tmp_mask);
2113 REG_WRITE(ah, AR_PHY_MASK2_P_61_45, tmp_mask);
2116 static void ath9k_hw_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan)
2118 int bb_spur = AR_NO_SPUR;
2119 int bin, cur_bin;
2120 int spur_freq_sd;
2121 int spur_delta_phase;
2122 int denominator;
2123 int upper, lower, cur_vit_mask;
2124 int tmp, new;
2125 int i;
2126 int pilot_mask_reg[4] = { AR_PHY_TIMING7, AR_PHY_TIMING8,
2127 AR_PHY_PILOT_MASK_01_30, AR_PHY_PILOT_MASK_31_60
2129 int chan_mask_reg[4] = { AR_PHY_TIMING9, AR_PHY_TIMING10,
2130 AR_PHY_CHANNEL_MASK_01_30, AR_PHY_CHANNEL_MASK_31_60
2132 int inc[4] = { 0, 100, 0, 0 };
2134 int8_t mask_m[123];
2135 int8_t mask_p[123];
2136 int8_t mask_amt;
2137 int tmp_mask;
2138 int cur_bb_spur;
2139 bool is2GHz = IS_CHAN_2GHZ(chan);
2141 memset(&mask_m, 0, sizeof(int8_t) * 123);
2142 memset(&mask_p, 0, sizeof(int8_t) * 123);
2144 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
2145 cur_bb_spur = ah->eep_ops->get_spur_channel(ah, i, is2GHz);
2146 if (AR_NO_SPUR == cur_bb_spur)
2147 break;
2148 cur_bb_spur = cur_bb_spur - (chan->channel * 10);
2149 if ((cur_bb_spur > -95) && (cur_bb_spur < 95)) {
2150 bb_spur = cur_bb_spur;
2151 break;
2155 if (AR_NO_SPUR == bb_spur)
2156 return;
2158 bin = bb_spur * 32;
2160 tmp = REG_READ(ah, AR_PHY_TIMING_CTRL4(0));
2161 new = tmp | (AR_PHY_TIMING_CTRL4_ENABLE_SPUR_RSSI |
2162 AR_PHY_TIMING_CTRL4_ENABLE_SPUR_FILTER |
2163 AR_PHY_TIMING_CTRL4_ENABLE_CHAN_MASK |
2164 AR_PHY_TIMING_CTRL4_ENABLE_PILOT_MASK);
2166 REG_WRITE(ah, AR_PHY_TIMING_CTRL4(0), new);
2168 new = (AR_PHY_SPUR_REG_MASK_RATE_CNTL |
2169 AR_PHY_SPUR_REG_ENABLE_MASK_PPM |
2170 AR_PHY_SPUR_REG_MASK_RATE_SELECT |
2171 AR_PHY_SPUR_REG_ENABLE_VIT_SPUR_RSSI |
2172 SM(SPUR_RSSI_THRESH, AR_PHY_SPUR_REG_SPUR_RSSI_THRESH));
2173 REG_WRITE(ah, AR_PHY_SPUR_REG, new);
2175 spur_delta_phase = ((bb_spur * 524288) / 100) &
2176 AR_PHY_TIMING11_SPUR_DELTA_PHASE;
2178 denominator = IS_CHAN_2GHZ(chan) ? 440 : 400;
2179 spur_freq_sd = ((bb_spur * 2048) / denominator) & 0x3ff;
2181 new = (AR_PHY_TIMING11_USE_SPUR_IN_AGC |
2182 SM(spur_freq_sd, AR_PHY_TIMING11_SPUR_FREQ_SD) |
2183 SM(spur_delta_phase, AR_PHY_TIMING11_SPUR_DELTA_PHASE));
2184 REG_WRITE(ah, AR_PHY_TIMING11, new);
2186 cur_bin = -6000;
2187 upper = bin + 100;
2188 lower = bin - 100;
2190 for (i = 0; i < 4; i++) {
2191 int pilot_mask = 0;
2192 int chan_mask = 0;
2193 int bp = 0;
2194 for (bp = 0; bp < 30; bp++) {
2195 if ((cur_bin > lower) && (cur_bin < upper)) {
2196 pilot_mask = pilot_mask | 0x1 << bp;
2197 chan_mask = chan_mask | 0x1 << bp;
2199 cur_bin += 100;
2201 cur_bin += inc[i];
2202 REG_WRITE(ah, pilot_mask_reg[i], pilot_mask);
2203 REG_WRITE(ah, chan_mask_reg[i], chan_mask);
2206 cur_vit_mask = 6100;
2207 upper = bin + 120;
2208 lower = bin - 120;
2210 for (i = 0; i < 123; i++) {
2211 if ((cur_vit_mask > lower) && (cur_vit_mask < upper)) {
2213 /* workaround for gcc bug #37014 */
2214 volatile int tmp_v = abs(cur_vit_mask - bin);
2216 if (tmp_v < 75)
2217 mask_amt = 1;
2218 else
2219 mask_amt = 0;
2220 if (cur_vit_mask < 0)
2221 mask_m[abs(cur_vit_mask / 100)] = mask_amt;
2222 else
2223 mask_p[cur_vit_mask / 100] = mask_amt;
2225 cur_vit_mask -= 100;
2228 tmp_mask = (mask_m[46] << 30) | (mask_m[47] << 28)
2229 | (mask_m[48] << 26) | (mask_m[49] << 24)
2230 | (mask_m[50] << 22) | (mask_m[51] << 20)
2231 | (mask_m[52] << 18) | (mask_m[53] << 16)
2232 | (mask_m[54] << 14) | (mask_m[55] << 12)
2233 | (mask_m[56] << 10) | (mask_m[57] << 8)
2234 | (mask_m[58] << 6) | (mask_m[59] << 4)
2235 | (mask_m[60] << 2) | (mask_m[61] << 0);
2236 REG_WRITE(ah, AR_PHY_BIN_MASK_1, tmp_mask);
2237 REG_WRITE(ah, AR_PHY_VIT_MASK2_M_46_61, tmp_mask);
2239 tmp_mask = (mask_m[31] << 28)
2240 | (mask_m[32] << 26) | (mask_m[33] << 24)
2241 | (mask_m[34] << 22) | (mask_m[35] << 20)
2242 | (mask_m[36] << 18) | (mask_m[37] << 16)
2243 | (mask_m[48] << 14) | (mask_m[39] << 12)
2244 | (mask_m[40] << 10) | (mask_m[41] << 8)
2245 | (mask_m[42] << 6) | (mask_m[43] << 4)
2246 | (mask_m[44] << 2) | (mask_m[45] << 0);
2247 REG_WRITE(ah, AR_PHY_BIN_MASK_2, tmp_mask);
2248 REG_WRITE(ah, AR_PHY_MASK2_M_31_45, tmp_mask);
2250 tmp_mask = (mask_m[16] << 30) | (mask_m[16] << 28)
2251 | (mask_m[18] << 26) | (mask_m[18] << 24)
2252 | (mask_m[20] << 22) | (mask_m[20] << 20)
2253 | (mask_m[22] << 18) | (mask_m[22] << 16)
2254 | (mask_m[24] << 14) | (mask_m[24] << 12)
2255 | (mask_m[25] << 10) | (mask_m[26] << 8)
2256 | (mask_m[27] << 6) | (mask_m[28] << 4)
2257 | (mask_m[29] << 2) | (mask_m[30] << 0);
2258 REG_WRITE(ah, AR_PHY_BIN_MASK_3, tmp_mask);
2259 REG_WRITE(ah, AR_PHY_MASK2_M_16_30, tmp_mask);
2261 tmp_mask = (mask_m[0] << 30) | (mask_m[1] << 28)
2262 | (mask_m[2] << 26) | (mask_m[3] << 24)
2263 | (mask_m[4] << 22) | (mask_m[5] << 20)
2264 | (mask_m[6] << 18) | (mask_m[7] << 16)
2265 | (mask_m[8] << 14) | (mask_m[9] << 12)
2266 | (mask_m[10] << 10) | (mask_m[11] << 8)
2267 | (mask_m[12] << 6) | (mask_m[13] << 4)
2268 | (mask_m[14] << 2) | (mask_m[15] << 0);
2269 REG_WRITE(ah, AR_PHY_MASK_CTL, tmp_mask);
2270 REG_WRITE(ah, AR_PHY_MASK2_M_00_15, tmp_mask);
2272 tmp_mask = (mask_p[15] << 28)
2273 | (mask_p[14] << 26) | (mask_p[13] << 24)
2274 | (mask_p[12] << 22) | (mask_p[11] << 20)
2275 | (mask_p[10] << 18) | (mask_p[9] << 16)
2276 | (mask_p[8] << 14) | (mask_p[7] << 12)
2277 | (mask_p[6] << 10) | (mask_p[5] << 8)
2278 | (mask_p[4] << 6) | (mask_p[3] << 4)
2279 | (mask_p[2] << 2) | (mask_p[1] << 0);
2280 REG_WRITE(ah, AR_PHY_BIN_MASK2_1, tmp_mask);
2281 REG_WRITE(ah, AR_PHY_MASK2_P_15_01, tmp_mask);
2283 tmp_mask = (mask_p[30] << 28)
2284 | (mask_p[29] << 26) | (mask_p[28] << 24)
2285 | (mask_p[27] << 22) | (mask_p[26] << 20)
2286 | (mask_p[25] << 18) | (mask_p[24] << 16)
2287 | (mask_p[23] << 14) | (mask_p[22] << 12)
2288 | (mask_p[21] << 10) | (mask_p[20] << 8)
2289 | (mask_p[19] << 6) | (mask_p[18] << 4)
2290 | (mask_p[17] << 2) | (mask_p[16] << 0);
2291 REG_WRITE(ah, AR_PHY_BIN_MASK2_2, tmp_mask);
2292 REG_WRITE(ah, AR_PHY_MASK2_P_30_16, tmp_mask);
2294 tmp_mask = (mask_p[45] << 28)
2295 | (mask_p[44] << 26) | (mask_p[43] << 24)
2296 | (mask_p[42] << 22) | (mask_p[41] << 20)
2297 | (mask_p[40] << 18) | (mask_p[39] << 16)
2298 | (mask_p[38] << 14) | (mask_p[37] << 12)
2299 | (mask_p[36] << 10) | (mask_p[35] << 8)
2300 | (mask_p[34] << 6) | (mask_p[33] << 4)
2301 | (mask_p[32] << 2) | (mask_p[31] << 0);
2302 REG_WRITE(ah, AR_PHY_BIN_MASK2_3, tmp_mask);
2303 REG_WRITE(ah, AR_PHY_MASK2_P_45_31, tmp_mask);
2305 tmp_mask = (mask_p[61] << 30) | (mask_p[60] << 28)
2306 | (mask_p[59] << 26) | (mask_p[58] << 24)
2307 | (mask_p[57] << 22) | (mask_p[56] << 20)
2308 | (mask_p[55] << 18) | (mask_p[54] << 16)
2309 | (mask_p[53] << 14) | (mask_p[52] << 12)
2310 | (mask_p[51] << 10) | (mask_p[50] << 8)
2311 | (mask_p[49] << 6) | (mask_p[48] << 4)
2312 | (mask_p[47] << 2) | (mask_p[46] << 0);
2313 REG_WRITE(ah, AR_PHY_BIN_MASK2_4, tmp_mask);
2314 REG_WRITE(ah, AR_PHY_MASK2_P_61_45, tmp_mask);
2317 static void ath9k_enable_rfkill(struct ath_hw *ah)
2319 REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL,
2320 AR_GPIO_INPUT_EN_VAL_RFSILENT_BB);
2322 REG_CLR_BIT(ah, AR_GPIO_INPUT_MUX2,
2323 AR_GPIO_INPUT_MUX2_RFSILENT);
2325 ath9k_hw_cfg_gpio_input(ah, ah->rfkill_gpio);
2326 REG_SET_BIT(ah, AR_PHY_TEST, RFSILENT_BB);
2329 int ath9k_hw_reset(struct ath_hw *ah, struct ath9k_channel *chan,
2330 bool bChannelChange)
2332 u32 saveLedState;
2333 struct ath_softc *sc = ah->ah_sc;
2334 struct ath9k_channel *curchan = ah->curchan;
2335 u32 saveDefAntenna;
2336 u32 macStaId1;
2337 int i, rx_chainmask, r;
2339 ah->extprotspacing = sc->ht_extprotspacing;
2340 ah->txchainmask = sc->tx_chainmask;
2341 ah->rxchainmask = sc->rx_chainmask;
2343 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
2344 return -EIO;
2346 if (curchan)
2347 ath9k_hw_getnf(ah, curchan);
2349 if (bChannelChange &&
2350 (ah->chip_fullsleep != true) &&
2351 (ah->curchan != NULL) &&
2352 (chan->channel != ah->curchan->channel) &&
2353 ((chan->channelFlags & CHANNEL_ALL) ==
2354 (ah->curchan->channelFlags & CHANNEL_ALL)) &&
2355 (!AR_SREV_9280(ah) || (!IS_CHAN_A_5MHZ_SPACED(chan) &&
2356 !IS_CHAN_A_5MHZ_SPACED(ah->curchan)))) {
2358 if (ath9k_hw_channel_change(ah, chan, sc->tx_chan_width)) {
2359 ath9k_hw_loadnf(ah, ah->curchan);
2360 ath9k_hw_start_nfcal(ah);
2361 return 0;
2365 saveDefAntenna = REG_READ(ah, AR_DEF_ANTENNA);
2366 if (saveDefAntenna == 0)
2367 saveDefAntenna = 1;
2369 macStaId1 = REG_READ(ah, AR_STA_ID1) & AR_STA_ID1_BASE_RATE_11B;
2371 saveLedState = REG_READ(ah, AR_CFG_LED) &
2372 (AR_CFG_LED_ASSOC_CTL | AR_CFG_LED_MODE_SEL |
2373 AR_CFG_LED_BLINK_THRESH_SEL | AR_CFG_LED_BLINK_SLOW);
2375 ath9k_hw_mark_phy_inactive(ah);
2377 if (AR_SREV_9271(ah) && ah->htc_reset_init) {
2378 REG_WRITE(ah,
2379 AR9271_RESET_POWER_DOWN_CONTROL,
2380 AR9271_RADIO_RF_RST);
2381 udelay(50);
2384 if (!ath9k_hw_chip_reset(ah, chan)) {
2385 DPRINTF(ah->ah_sc, ATH_DBG_FATAL, "Chip reset failed\n");
2386 return -EINVAL;
2389 if (AR_SREV_9271(ah) && ah->htc_reset_init) {
2390 ah->htc_reset_init = false;
2391 REG_WRITE(ah,
2392 AR9271_RESET_POWER_DOWN_CONTROL,
2393 AR9271_GATE_MAC_CTL);
2394 udelay(50);
2397 if (AR_SREV_9280_10_OR_LATER(ah))
2398 REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL, AR_GPIO_JTAG_DISABLE);
2400 if (AR_SREV_9287_12_OR_LATER(ah)) {
2401 /* Enable ASYNC FIFO */
2402 REG_SET_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2403 AR_MAC_PCU_ASYNC_FIFO_REG3_DATAPATH_SEL);
2404 REG_SET_BIT(ah, AR_PHY_MODE, AR_PHY_MODE_ASYNCFIFO);
2405 REG_CLR_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2406 AR_MAC_PCU_ASYNC_FIFO_REG3_SOFT_RESET);
2407 REG_SET_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2408 AR_MAC_PCU_ASYNC_FIFO_REG3_SOFT_RESET);
2410 r = ath9k_hw_process_ini(ah, chan, sc->tx_chan_width);
2411 if (r)
2412 return r;
2414 /* Setup MFP options for CCMP */
2415 if (AR_SREV_9280_20_OR_LATER(ah)) {
2416 /* Mask Retry(b11), PwrMgt(b12), MoreData(b13) to 0 in mgmt
2417 * frames when constructing CCMP AAD. */
2418 REG_RMW_FIELD(ah, AR_AES_MUTE_MASK1, AR_AES_MUTE_MASK1_FC_MGMT,
2419 0xc7ff);
2420 ah->sw_mgmt_crypto = false;
2421 } else if (AR_SREV_9160_10_OR_LATER(ah)) {
2422 /* Disable hardware crypto for management frames */
2423 REG_CLR_BIT(ah, AR_PCU_MISC_MODE2,
2424 AR_PCU_MISC_MODE2_MGMT_CRYPTO_ENABLE);
2425 REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
2426 AR_PCU_MISC_MODE2_NO_CRYPTO_FOR_NON_DATA_PKT);
2427 ah->sw_mgmt_crypto = true;
2428 } else
2429 ah->sw_mgmt_crypto = true;
2431 if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan))
2432 ath9k_hw_set_delta_slope(ah, chan);
2434 if (AR_SREV_9280_10_OR_LATER(ah))
2435 ath9k_hw_9280_spur_mitigate(ah, chan);
2436 else
2437 ath9k_hw_spur_mitigate(ah, chan);
2439 ah->eep_ops->set_board_values(ah, chan);
2441 ath9k_hw_decrease_chain_power(ah, chan);
2443 REG_WRITE(ah, AR_STA_ID0, get_unaligned_le32(ah->macaddr));
2444 REG_WRITE(ah, AR_STA_ID1, get_unaligned_le16(ah->macaddr + 4)
2445 | macStaId1
2446 | AR_STA_ID1_RTS_USE_DEF
2447 | (ah->config.
2448 ack_6mb ? AR_STA_ID1_ACKCTS_6MB : 0)
2449 | ah->sta_id1_defaults);
2450 ath9k_hw_set_operating_mode(ah, ah->opmode);
2452 REG_WRITE(ah, AR_BSSMSKL, get_unaligned_le32(sc->bssidmask));
2453 REG_WRITE(ah, AR_BSSMSKU, get_unaligned_le16(sc->bssidmask + 4));
2455 REG_WRITE(ah, AR_DEF_ANTENNA, saveDefAntenna);
2457 REG_WRITE(ah, AR_BSS_ID0, get_unaligned_le32(sc->curbssid));
2458 REG_WRITE(ah, AR_BSS_ID1, get_unaligned_le16(sc->curbssid + 4) |
2459 ((sc->curaid & 0x3fff) << AR_BSS_ID1_AID_S));
2461 REG_WRITE(ah, AR_ISR, ~0);
2463 REG_WRITE(ah, AR_RSSI_THR, INIT_RSSI_THR);
2465 if (AR_SREV_9280_10_OR_LATER(ah))
2466 ath9k_hw_ar9280_set_channel(ah, chan);
2467 else
2468 if (!(ath9k_hw_set_channel(ah, chan)))
2469 return -EIO;
2471 for (i = 0; i < AR_NUM_DCU; i++)
2472 REG_WRITE(ah, AR_DQCUMASK(i), 1 << i);
2474 ah->intr_txqs = 0;
2475 for (i = 0; i < ah->caps.total_queues; i++)
2476 ath9k_hw_resettxqueue(ah, i);
2478 ath9k_hw_init_interrupt_masks(ah, ah->opmode);
2479 ath9k_hw_init_qos(ah);
2481 if (ah->caps.hw_caps & ATH9K_HW_CAP_RFSILENT)
2482 ath9k_enable_rfkill(ah);
2484 ath9k_hw_init_user_settings(ah);
2486 if (AR_SREV_9287_12_OR_LATER(ah)) {
2487 REG_WRITE(ah, AR_D_GBL_IFS_SIFS,
2488 AR_D_GBL_IFS_SIFS_ASYNC_FIFO_DUR);
2489 REG_WRITE(ah, AR_D_GBL_IFS_SLOT,
2490 AR_D_GBL_IFS_SLOT_ASYNC_FIFO_DUR);
2491 REG_WRITE(ah, AR_D_GBL_IFS_EIFS,
2492 AR_D_GBL_IFS_EIFS_ASYNC_FIFO_DUR);
2494 REG_WRITE(ah, AR_TIME_OUT, AR_TIME_OUT_ACK_CTS_ASYNC_FIFO_DUR);
2495 REG_WRITE(ah, AR_USEC, AR_USEC_ASYNC_FIFO_DUR);
2497 REG_SET_BIT(ah, AR_MAC_PCU_LOGIC_ANALYZER,
2498 AR_MAC_PCU_LOGIC_ANALYZER_DISBUG20768);
2499 REG_RMW_FIELD(ah, AR_AHB_MODE, AR_AHB_CUSTOM_BURST_EN,
2500 AR_AHB_CUSTOM_BURST_ASYNC_FIFO_VAL);
2502 if (AR_SREV_9287_12_OR_LATER(ah)) {
2503 REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
2504 AR_PCU_MISC_MODE2_ENABLE_AGGWEP);
2507 REG_WRITE(ah, AR_STA_ID1,
2508 REG_READ(ah, AR_STA_ID1) | AR_STA_ID1_PRESERVE_SEQNUM);
2510 ath9k_hw_set_dma(ah);
2512 REG_WRITE(ah, AR_OBS, 8);
2514 if (ah->config.intr_mitigation) {
2515 REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_LAST, 500);
2516 REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_FIRST, 2000);
2519 ath9k_hw_init_bb(ah, chan);
2521 if (!ath9k_hw_init_cal(ah, chan))
2522 return -EIO;
2524 rx_chainmask = ah->rxchainmask;
2525 if ((rx_chainmask == 0x5) || (rx_chainmask == 0x3)) {
2526 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, rx_chainmask);
2527 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, rx_chainmask);
2530 REG_WRITE(ah, AR_CFG_LED, saveLedState | AR_CFG_SCLK_32KHZ);
2533 * For big endian systems turn on swapping for descriptors
2535 if (AR_SREV_9100(ah)) {
2536 u32 mask;
2537 mask = REG_READ(ah, AR_CFG);
2538 if (mask & (AR_CFG_SWRB | AR_CFG_SWTB | AR_CFG_SWRG)) {
2539 DPRINTF(ah->ah_sc, ATH_DBG_RESET,
2540 "CFG Byte Swap Set 0x%x\n", mask);
2541 } else {
2542 mask =
2543 INIT_CONFIG_STATUS | AR_CFG_SWRB | AR_CFG_SWTB;
2544 REG_WRITE(ah, AR_CFG, mask);
2545 DPRINTF(ah->ah_sc, ATH_DBG_RESET,
2546 "Setting CFG 0x%x\n", REG_READ(ah, AR_CFG));
2548 } else {
2549 /* Configure AR9271 target WLAN */
2550 if (AR_SREV_9271(ah))
2551 REG_WRITE(ah, AR_CFG, AR_CFG_SWRB | AR_CFG_SWTB);
2552 #ifdef __BIG_ENDIAN
2553 else
2554 REG_WRITE(ah, AR_CFG, AR_CFG_SWTD | AR_CFG_SWRD);
2555 #endif
2558 if (ah->ah_sc->sc_flags & SC_OP_BTCOEX_ENABLED)
2559 ath9k_hw_btcoex_enable(ah);
2561 return 0;
2564 /************************/
2565 /* Key Cache Management */
2566 /************************/
2568 bool ath9k_hw_keyreset(struct ath_hw *ah, u16 entry)
2570 u32 keyType;
2572 if (entry >= ah->caps.keycache_size) {
2573 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
2574 "keychache entry %u out of range\n", entry);
2575 return false;
2578 keyType = REG_READ(ah, AR_KEYTABLE_TYPE(entry));
2580 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), 0);
2581 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), 0);
2582 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), 0);
2583 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), 0);
2584 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), 0);
2585 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), AR_KEYTABLE_TYPE_CLR);
2586 REG_WRITE(ah, AR_KEYTABLE_MAC0(entry), 0);
2587 REG_WRITE(ah, AR_KEYTABLE_MAC1(entry), 0);
2589 if (keyType == AR_KEYTABLE_TYPE_TKIP && ATH9K_IS_MIC_ENABLED(ah)) {
2590 u16 micentry = entry + 64;
2592 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), 0);
2593 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), 0);
2594 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), 0);
2595 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), 0);
2599 return true;
2602 bool ath9k_hw_keysetmac(struct ath_hw *ah, u16 entry, const u8 *mac)
2604 u32 macHi, macLo;
2606 if (entry >= ah->caps.keycache_size) {
2607 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
2608 "keychache entry %u out of range\n", entry);
2609 return false;
2612 if (mac != NULL) {
2613 macHi = (mac[5] << 8) | mac[4];
2614 macLo = (mac[3] << 24) |
2615 (mac[2] << 16) |
2616 (mac[1] << 8) |
2617 mac[0];
2618 macLo >>= 1;
2619 macLo |= (macHi & 1) << 31;
2620 macHi >>= 1;
2621 } else {
2622 macLo = macHi = 0;
2624 REG_WRITE(ah, AR_KEYTABLE_MAC0(entry), macLo);
2625 REG_WRITE(ah, AR_KEYTABLE_MAC1(entry), macHi | AR_KEYTABLE_VALID);
2627 return true;
2630 bool ath9k_hw_set_keycache_entry(struct ath_hw *ah, u16 entry,
2631 const struct ath9k_keyval *k,
2632 const u8 *mac)
2634 const struct ath9k_hw_capabilities *pCap = &ah->caps;
2635 u32 key0, key1, key2, key3, key4;
2636 u32 keyType;
2638 if (entry >= pCap->keycache_size) {
2639 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
2640 "keycache entry %u out of range\n", entry);
2641 return false;
2644 switch (k->kv_type) {
2645 case ATH9K_CIPHER_AES_OCB:
2646 keyType = AR_KEYTABLE_TYPE_AES;
2647 break;
2648 case ATH9K_CIPHER_AES_CCM:
2649 if (!(pCap->hw_caps & ATH9K_HW_CAP_CIPHER_AESCCM)) {
2650 DPRINTF(ah->ah_sc, ATH_DBG_ANY,
2651 "AES-CCM not supported by mac rev 0x%x\n",
2652 ah->hw_version.macRev);
2653 return false;
2655 keyType = AR_KEYTABLE_TYPE_CCM;
2656 break;
2657 case ATH9K_CIPHER_TKIP:
2658 keyType = AR_KEYTABLE_TYPE_TKIP;
2659 if (ATH9K_IS_MIC_ENABLED(ah)
2660 && entry + 64 >= pCap->keycache_size) {
2661 DPRINTF(ah->ah_sc, ATH_DBG_ANY,
2662 "entry %u inappropriate for TKIP\n", entry);
2663 return false;
2665 break;
2666 case ATH9K_CIPHER_WEP:
2667 if (k->kv_len < WLAN_KEY_LEN_WEP40) {
2668 DPRINTF(ah->ah_sc, ATH_DBG_ANY,
2669 "WEP key length %u too small\n", k->kv_len);
2670 return false;
2672 if (k->kv_len <= WLAN_KEY_LEN_WEP40)
2673 keyType = AR_KEYTABLE_TYPE_40;
2674 else if (k->kv_len <= WLAN_KEY_LEN_WEP104)
2675 keyType = AR_KEYTABLE_TYPE_104;
2676 else
2677 keyType = AR_KEYTABLE_TYPE_128;
2678 break;
2679 case ATH9K_CIPHER_CLR:
2680 keyType = AR_KEYTABLE_TYPE_CLR;
2681 break;
2682 default:
2683 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
2684 "cipher %u not supported\n", k->kv_type);
2685 return false;
2688 key0 = get_unaligned_le32(k->kv_val + 0);
2689 key1 = get_unaligned_le16(k->kv_val + 4);
2690 key2 = get_unaligned_le32(k->kv_val + 6);
2691 key3 = get_unaligned_le16(k->kv_val + 10);
2692 key4 = get_unaligned_le32(k->kv_val + 12);
2693 if (k->kv_len <= WLAN_KEY_LEN_WEP104)
2694 key4 &= 0xff;
2697 * Note: Key cache registers access special memory area that requires
2698 * two 32-bit writes to actually update the values in the internal
2699 * memory. Consequently, the exact order and pairs used here must be
2700 * maintained.
2703 if (keyType == AR_KEYTABLE_TYPE_TKIP && ATH9K_IS_MIC_ENABLED(ah)) {
2704 u16 micentry = entry + 64;
2707 * Write inverted key[47:0] first to avoid Michael MIC errors
2708 * on frames that could be sent or received at the same time.
2709 * The correct key will be written in the end once everything
2710 * else is ready.
2712 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), ~key0);
2713 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), ~key1);
2715 /* Write key[95:48] */
2716 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), key2);
2717 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), key3);
2719 /* Write key[127:96] and key type */
2720 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), key4);
2721 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), keyType);
2723 /* Write MAC address for the entry */
2724 (void) ath9k_hw_keysetmac(ah, entry, mac);
2726 if (ah->misc_mode & AR_PCU_MIC_NEW_LOC_ENA) {
2728 * TKIP uses two key cache entries:
2729 * Michael MIC TX/RX keys in the same key cache entry
2730 * (idx = main index + 64):
2731 * key0 [31:0] = RX key [31:0]
2732 * key1 [15:0] = TX key [31:16]
2733 * key1 [31:16] = reserved
2734 * key2 [31:0] = RX key [63:32]
2735 * key3 [15:0] = TX key [15:0]
2736 * key3 [31:16] = reserved
2737 * key4 [31:0] = TX key [63:32]
2739 u32 mic0, mic1, mic2, mic3, mic4;
2741 mic0 = get_unaligned_le32(k->kv_mic + 0);
2742 mic2 = get_unaligned_le32(k->kv_mic + 4);
2743 mic1 = get_unaligned_le16(k->kv_txmic + 2) & 0xffff;
2744 mic3 = get_unaligned_le16(k->kv_txmic + 0) & 0xffff;
2745 mic4 = get_unaligned_le32(k->kv_txmic + 4);
2747 /* Write RX[31:0] and TX[31:16] */
2748 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), mic0);
2749 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), mic1);
2751 /* Write RX[63:32] and TX[15:0] */
2752 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), mic2);
2753 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), mic3);
2755 /* Write TX[63:32] and keyType(reserved) */
2756 REG_WRITE(ah, AR_KEYTABLE_KEY4(micentry), mic4);
2757 REG_WRITE(ah, AR_KEYTABLE_TYPE(micentry),
2758 AR_KEYTABLE_TYPE_CLR);
2760 } else {
2762 * TKIP uses four key cache entries (two for group
2763 * keys):
2764 * Michael MIC TX/RX keys are in different key cache
2765 * entries (idx = main index + 64 for TX and
2766 * main index + 32 + 96 for RX):
2767 * key0 [31:0] = TX/RX MIC key [31:0]
2768 * key1 [31:0] = reserved
2769 * key2 [31:0] = TX/RX MIC key [63:32]
2770 * key3 [31:0] = reserved
2771 * key4 [31:0] = reserved
2773 * Upper layer code will call this function separately
2774 * for TX and RX keys when these registers offsets are
2775 * used.
2777 u32 mic0, mic2;
2779 mic0 = get_unaligned_le32(k->kv_mic + 0);
2780 mic2 = get_unaligned_le32(k->kv_mic + 4);
2782 /* Write MIC key[31:0] */
2783 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), mic0);
2784 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), 0);
2786 /* Write MIC key[63:32] */
2787 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), mic2);
2788 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), 0);
2790 /* Write TX[63:32] and keyType(reserved) */
2791 REG_WRITE(ah, AR_KEYTABLE_KEY4(micentry), 0);
2792 REG_WRITE(ah, AR_KEYTABLE_TYPE(micentry),
2793 AR_KEYTABLE_TYPE_CLR);
2796 /* MAC address registers are reserved for the MIC entry */
2797 REG_WRITE(ah, AR_KEYTABLE_MAC0(micentry), 0);
2798 REG_WRITE(ah, AR_KEYTABLE_MAC1(micentry), 0);
2801 * Write the correct (un-inverted) key[47:0] last to enable
2802 * TKIP now that all other registers are set with correct
2803 * values.
2805 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), key0);
2806 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), key1);
2807 } else {
2808 /* Write key[47:0] */
2809 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), key0);
2810 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), key1);
2812 /* Write key[95:48] */
2813 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), key2);
2814 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), key3);
2816 /* Write key[127:96] and key type */
2817 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), key4);
2818 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), keyType);
2820 /* Write MAC address for the entry */
2821 (void) ath9k_hw_keysetmac(ah, entry, mac);
2824 return true;
2827 bool ath9k_hw_keyisvalid(struct ath_hw *ah, u16 entry)
2829 if (entry < ah->caps.keycache_size) {
2830 u32 val = REG_READ(ah, AR_KEYTABLE_MAC1(entry));
2831 if (val & AR_KEYTABLE_VALID)
2832 return true;
2834 return false;
2837 /******************************/
2838 /* Power Management (Chipset) */
2839 /******************************/
2841 static void ath9k_set_power_sleep(struct ath_hw *ah, int setChip)
2843 REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2844 if (setChip) {
2845 REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE,
2846 AR_RTC_FORCE_WAKE_EN);
2847 if (!AR_SREV_9100(ah))
2848 REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF);
2850 REG_CLR_BIT(ah, (AR_RTC_RESET),
2851 AR_RTC_RESET_EN);
2855 static void ath9k_set_power_network_sleep(struct ath_hw *ah, int setChip)
2857 REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2858 if (setChip) {
2859 struct ath9k_hw_capabilities *pCap = &ah->caps;
2861 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
2862 REG_WRITE(ah, AR_RTC_FORCE_WAKE,
2863 AR_RTC_FORCE_WAKE_ON_INT);
2864 } else {
2865 REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE,
2866 AR_RTC_FORCE_WAKE_EN);
2871 static bool ath9k_hw_set_power_awake(struct ath_hw *ah, int setChip)
2873 u32 val;
2874 int i;
2876 if (setChip) {
2877 if ((REG_READ(ah, AR_RTC_STATUS) &
2878 AR_RTC_STATUS_M) == AR_RTC_STATUS_SHUTDOWN) {
2879 if (ath9k_hw_set_reset_reg(ah,
2880 ATH9K_RESET_POWER_ON) != true) {
2881 return false;
2884 if (AR_SREV_9100(ah))
2885 REG_SET_BIT(ah, AR_RTC_RESET,
2886 AR_RTC_RESET_EN);
2888 REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
2889 AR_RTC_FORCE_WAKE_EN);
2890 udelay(50);
2892 for (i = POWER_UP_TIME / 50; i > 0; i--) {
2893 val = REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M;
2894 if (val == AR_RTC_STATUS_ON)
2895 break;
2896 udelay(50);
2897 REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
2898 AR_RTC_FORCE_WAKE_EN);
2900 if (i == 0) {
2901 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
2902 "Failed to wakeup in %uus\n", POWER_UP_TIME / 20);
2903 return false;
2907 REG_CLR_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2909 return true;
2912 static bool ath9k_hw_setpower_nolock(struct ath_hw *ah,
2913 enum ath9k_power_mode mode)
2915 int status = true, setChip = true;
2916 static const char *modes[] = {
2917 "AWAKE",
2918 "FULL-SLEEP",
2919 "NETWORK SLEEP",
2920 "UNDEFINED"
2923 if (ah->power_mode == mode)
2924 return status;
2926 DPRINTF(ah->ah_sc, ATH_DBG_RESET, "%s -> %s\n",
2927 modes[ah->power_mode], modes[mode]);
2929 switch (mode) {
2930 case ATH9K_PM_AWAKE:
2931 status = ath9k_hw_set_power_awake(ah, setChip);
2932 break;
2933 case ATH9K_PM_FULL_SLEEP:
2934 ath9k_set_power_sleep(ah, setChip);
2935 ah->chip_fullsleep = true;
2936 break;
2937 case ATH9K_PM_NETWORK_SLEEP:
2938 ath9k_set_power_network_sleep(ah, setChip);
2939 break;
2940 default:
2941 DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
2942 "Unknown power mode %u\n", mode);
2943 return false;
2945 ah->power_mode = mode;
2947 return status;
2950 bool ath9k_hw_setpower(struct ath_hw *ah, enum ath9k_power_mode mode)
2952 unsigned long flags;
2953 bool ret;
2955 spin_lock_irqsave(&ah->ah_sc->sc_pm_lock, flags);
2956 ret = ath9k_hw_setpower_nolock(ah, mode);
2957 spin_unlock_irqrestore(&ah->ah_sc->sc_pm_lock, flags);
2959 return ret;
2962 void ath9k_ps_wakeup(struct ath_softc *sc)
2964 unsigned long flags;
2966 spin_lock_irqsave(&sc->sc_pm_lock, flags);
2967 if (++sc->ps_usecount != 1)
2968 goto unlock;
2970 ath9k_hw_setpower_nolock(sc->sc_ah, ATH9K_PM_AWAKE);
2972 unlock:
2973 spin_unlock_irqrestore(&sc->sc_pm_lock, flags);
2976 void ath9k_ps_restore(struct ath_softc *sc)
2978 unsigned long flags;
2980 spin_lock_irqsave(&sc->sc_pm_lock, flags);
2981 if (--sc->ps_usecount != 0)
2982 goto unlock;
2984 if (sc->ps_enabled &&
2985 !(sc->sc_flags & (SC_OP_WAIT_FOR_BEACON |
2986 SC_OP_WAIT_FOR_CAB |
2987 SC_OP_WAIT_FOR_PSPOLL_DATA |
2988 SC_OP_WAIT_FOR_TX_ACK)))
2989 ath9k_hw_setpower_nolock(sc->sc_ah, ATH9K_PM_NETWORK_SLEEP);
2991 unlock:
2992 spin_unlock_irqrestore(&sc->sc_pm_lock, flags);
2996 * Helper for ASPM support.
2998 * Disable PLL when in L0s as well as receiver clock when in L1.
2999 * This power saving option must be enabled through the SerDes.
3001 * Programming the SerDes must go through the same 288 bit serial shift
3002 * register as the other analog registers. Hence the 9 writes.
3004 void ath9k_hw_configpcipowersave(struct ath_hw *ah, int restore)
3006 u8 i;
3008 if (ah->is_pciexpress != true)
3009 return;
3011 /* Do not touch SerDes registers */
3012 if (ah->config.pcie_powersave_enable == 2)
3013 return;
3015 /* Nothing to do on restore for 11N */
3016 if (restore)
3017 return;
3019 if (AR_SREV_9280_20_OR_LATER(ah)) {
3021 * AR9280 2.0 or later chips use SerDes values from the
3022 * initvals.h initialized depending on chipset during
3023 * ath9k_hw_init()
3025 for (i = 0; i < ah->iniPcieSerdes.ia_rows; i++) {
3026 REG_WRITE(ah, INI_RA(&ah->iniPcieSerdes, i, 0),
3027 INI_RA(&ah->iniPcieSerdes, i, 1));
3029 } else if (AR_SREV_9280(ah) &&
3030 (ah->hw_version.macRev == AR_SREV_REVISION_9280_10)) {
3031 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fd00);
3032 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
3034 /* RX shut off when elecidle is asserted */
3035 REG_WRITE(ah, AR_PCIE_SERDES, 0xa8000019);
3036 REG_WRITE(ah, AR_PCIE_SERDES, 0x13160820);
3037 REG_WRITE(ah, AR_PCIE_SERDES, 0xe5980560);
3039 /* Shut off CLKREQ active in L1 */
3040 if (ah->config.pcie_clock_req)
3041 REG_WRITE(ah, AR_PCIE_SERDES, 0x401deffc);
3042 else
3043 REG_WRITE(ah, AR_PCIE_SERDES, 0x401deffd);
3045 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
3046 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
3047 REG_WRITE(ah, AR_PCIE_SERDES, 0x00043007);
3049 /* Load the new settings */
3050 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
3052 } else {
3053 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00);
3054 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
3056 /* RX shut off when elecidle is asserted */
3057 REG_WRITE(ah, AR_PCIE_SERDES, 0x28000039);
3058 REG_WRITE(ah, AR_PCIE_SERDES, 0x53160824);
3059 REG_WRITE(ah, AR_PCIE_SERDES, 0xe5980579);
3062 * Ignore ah->ah_config.pcie_clock_req setting for
3063 * pre-AR9280 11n
3065 REG_WRITE(ah, AR_PCIE_SERDES, 0x001defff);
3067 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
3068 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
3069 REG_WRITE(ah, AR_PCIE_SERDES, 0x000e3007);
3071 /* Load the new settings */
3072 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
3075 udelay(1000);
3077 /* set bit 19 to allow forcing of pcie core into L1 state */
3078 REG_SET_BIT(ah, AR_PCIE_PM_CTRL, AR_PCIE_PM_CTRL_ENA);
3080 /* Several PCIe massages to ensure proper behaviour */
3081 if (ah->config.pcie_waen) {
3082 REG_WRITE(ah, AR_WA, ah->config.pcie_waen);
3083 } else {
3084 if (AR_SREV_9285(ah) || AR_SREV_9271(ah) || AR_SREV_9287(ah))
3085 REG_WRITE(ah, AR_WA, AR9285_WA_DEFAULT);
3087 * On AR9280 chips bit 22 of 0x4004 needs to be set to
3088 * otherwise card may disappear.
3090 else if (AR_SREV_9280(ah))
3091 REG_WRITE(ah, AR_WA, AR9280_WA_DEFAULT);
3092 else
3093 REG_WRITE(ah, AR_WA, AR_WA_DEFAULT);
3097 /**********************/
3098 /* Interrupt Handling */
3099 /**********************/
3101 bool ath9k_hw_intrpend(struct ath_hw *ah)
3103 u32 host_isr;
3105 if (AR_SREV_9100(ah))
3106 return true;
3108 host_isr = REG_READ(ah, AR_INTR_ASYNC_CAUSE);
3109 if ((host_isr & AR_INTR_MAC_IRQ) && (host_isr != AR_INTR_SPURIOUS))
3110 return true;
3112 host_isr = REG_READ(ah, AR_INTR_SYNC_CAUSE);
3113 if ((host_isr & AR_INTR_SYNC_DEFAULT)
3114 && (host_isr != AR_INTR_SPURIOUS))
3115 return true;
3117 return false;
3120 bool ath9k_hw_getisr(struct ath_hw *ah, enum ath9k_int *masked)
3122 u32 isr = 0;
3123 u32 mask2 = 0;
3124 struct ath9k_hw_capabilities *pCap = &ah->caps;
3125 u32 sync_cause = 0;
3126 bool fatal_int = false;
3128 if (!AR_SREV_9100(ah)) {
3129 if (REG_READ(ah, AR_INTR_ASYNC_CAUSE) & AR_INTR_MAC_IRQ) {
3130 if ((REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M)
3131 == AR_RTC_STATUS_ON) {
3132 isr = REG_READ(ah, AR_ISR);
3136 sync_cause = REG_READ(ah, AR_INTR_SYNC_CAUSE) &
3137 AR_INTR_SYNC_DEFAULT;
3139 *masked = 0;
3141 if (!isr && !sync_cause)
3142 return false;
3143 } else {
3144 *masked = 0;
3145 isr = REG_READ(ah, AR_ISR);
3148 if (isr) {
3149 if (isr & AR_ISR_BCNMISC) {
3150 u32 isr2;
3151 isr2 = REG_READ(ah, AR_ISR_S2);
3152 if (isr2 & AR_ISR_S2_TIM)
3153 mask2 |= ATH9K_INT_TIM;
3154 if (isr2 & AR_ISR_S2_DTIM)
3155 mask2 |= ATH9K_INT_DTIM;
3156 if (isr2 & AR_ISR_S2_DTIMSYNC)
3157 mask2 |= ATH9K_INT_DTIMSYNC;
3158 if (isr2 & (AR_ISR_S2_CABEND))
3159 mask2 |= ATH9K_INT_CABEND;
3160 if (isr2 & AR_ISR_S2_GTT)
3161 mask2 |= ATH9K_INT_GTT;
3162 if (isr2 & AR_ISR_S2_CST)
3163 mask2 |= ATH9K_INT_CST;
3164 if (isr2 & AR_ISR_S2_TSFOOR)
3165 mask2 |= ATH9K_INT_TSFOOR;
3168 isr = REG_READ(ah, AR_ISR_RAC);
3169 if (isr == 0xffffffff) {
3170 *masked = 0;
3171 return false;
3174 *masked = isr & ATH9K_INT_COMMON;
3176 if (ah->config.intr_mitigation) {
3177 if (isr & (AR_ISR_RXMINTR | AR_ISR_RXINTM))
3178 *masked |= ATH9K_INT_RX;
3181 if (isr & (AR_ISR_RXOK | AR_ISR_RXERR))
3182 *masked |= ATH9K_INT_RX;
3183 if (isr &
3184 (AR_ISR_TXOK | AR_ISR_TXDESC | AR_ISR_TXERR |
3185 AR_ISR_TXEOL)) {
3186 u32 s0_s, s1_s;
3188 *masked |= ATH9K_INT_TX;
3190 s0_s = REG_READ(ah, AR_ISR_S0_S);
3191 ah->intr_txqs |= MS(s0_s, AR_ISR_S0_QCU_TXOK);
3192 ah->intr_txqs |= MS(s0_s, AR_ISR_S0_QCU_TXDESC);
3194 s1_s = REG_READ(ah, AR_ISR_S1_S);
3195 ah->intr_txqs |= MS(s1_s, AR_ISR_S1_QCU_TXERR);
3196 ah->intr_txqs |= MS(s1_s, AR_ISR_S1_QCU_TXEOL);
3199 if (isr & AR_ISR_RXORN) {
3200 DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT,
3201 "receive FIFO overrun interrupt\n");
3204 if (!AR_SREV_9100(ah)) {
3205 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
3206 u32 isr5 = REG_READ(ah, AR_ISR_S5_S);
3207 if (isr5 & AR_ISR_S5_TIM_TIMER)
3208 *masked |= ATH9K_INT_TIM_TIMER;
3212 *masked |= mask2;
3215 if (AR_SREV_9100(ah))
3216 return true;
3218 if (sync_cause) {
3219 fatal_int =
3220 (sync_cause &
3221 (AR_INTR_SYNC_HOST1_FATAL | AR_INTR_SYNC_HOST1_PERR))
3222 ? true : false;
3224 if (fatal_int) {
3225 if (sync_cause & AR_INTR_SYNC_HOST1_FATAL) {
3226 DPRINTF(ah->ah_sc, ATH_DBG_ANY,
3227 "received PCI FATAL interrupt\n");
3229 if (sync_cause & AR_INTR_SYNC_HOST1_PERR) {
3230 DPRINTF(ah->ah_sc, ATH_DBG_ANY,
3231 "received PCI PERR interrupt\n");
3233 *masked |= ATH9K_INT_FATAL;
3235 if (sync_cause & AR_INTR_SYNC_RADM_CPL_TIMEOUT) {
3236 DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT,
3237 "AR_INTR_SYNC_RADM_CPL_TIMEOUT\n");
3238 REG_WRITE(ah, AR_RC, AR_RC_HOSTIF);
3239 REG_WRITE(ah, AR_RC, 0);
3240 *masked |= ATH9K_INT_FATAL;
3242 if (sync_cause & AR_INTR_SYNC_LOCAL_TIMEOUT) {
3243 DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT,
3244 "AR_INTR_SYNC_LOCAL_TIMEOUT\n");
3247 REG_WRITE(ah, AR_INTR_SYNC_CAUSE_CLR, sync_cause);
3248 (void) REG_READ(ah, AR_INTR_SYNC_CAUSE_CLR);
3251 return true;
3254 enum ath9k_int ath9k_hw_set_interrupts(struct ath_hw *ah, enum ath9k_int ints)
3256 u32 omask = ah->mask_reg;
3257 u32 mask, mask2;
3258 struct ath9k_hw_capabilities *pCap = &ah->caps;
3260 DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT, "0x%x => 0x%x\n", omask, ints);
3262 if (omask & ATH9K_INT_GLOBAL) {
3263 DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT, "disable IER\n");
3264 REG_WRITE(ah, AR_IER, AR_IER_DISABLE);
3265 (void) REG_READ(ah, AR_IER);
3266 if (!AR_SREV_9100(ah)) {
3267 REG_WRITE(ah, AR_INTR_ASYNC_ENABLE, 0);
3268 (void) REG_READ(ah, AR_INTR_ASYNC_ENABLE);
3270 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0);
3271 (void) REG_READ(ah, AR_INTR_SYNC_ENABLE);
3275 mask = ints & ATH9K_INT_COMMON;
3276 mask2 = 0;
3278 if (ints & ATH9K_INT_TX) {
3279 if (ah->txok_interrupt_mask)
3280 mask |= AR_IMR_TXOK;
3281 if (ah->txdesc_interrupt_mask)
3282 mask |= AR_IMR_TXDESC;
3283 if (ah->txerr_interrupt_mask)
3284 mask |= AR_IMR_TXERR;
3285 if (ah->txeol_interrupt_mask)
3286 mask |= AR_IMR_TXEOL;
3288 if (ints & ATH9K_INT_RX) {
3289 mask |= AR_IMR_RXERR;
3290 if (ah->config.intr_mitigation)
3291 mask |= AR_IMR_RXMINTR | AR_IMR_RXINTM;
3292 else
3293 mask |= AR_IMR_RXOK | AR_IMR_RXDESC;
3294 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP))
3295 mask |= AR_IMR_GENTMR;
3298 if (ints & (ATH9K_INT_BMISC)) {
3299 mask |= AR_IMR_BCNMISC;
3300 if (ints & ATH9K_INT_TIM)
3301 mask2 |= AR_IMR_S2_TIM;
3302 if (ints & ATH9K_INT_DTIM)
3303 mask2 |= AR_IMR_S2_DTIM;
3304 if (ints & ATH9K_INT_DTIMSYNC)
3305 mask2 |= AR_IMR_S2_DTIMSYNC;
3306 if (ints & ATH9K_INT_CABEND)
3307 mask2 |= AR_IMR_S2_CABEND;
3308 if (ints & ATH9K_INT_TSFOOR)
3309 mask2 |= AR_IMR_S2_TSFOOR;
3312 if (ints & (ATH9K_INT_GTT | ATH9K_INT_CST)) {
3313 mask |= AR_IMR_BCNMISC;
3314 if (ints & ATH9K_INT_GTT)
3315 mask2 |= AR_IMR_S2_GTT;
3316 if (ints & ATH9K_INT_CST)
3317 mask2 |= AR_IMR_S2_CST;
3320 DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT, "new IMR 0x%x\n", mask);
3321 REG_WRITE(ah, AR_IMR, mask);
3322 mask = REG_READ(ah, AR_IMR_S2) & ~(AR_IMR_S2_TIM |
3323 AR_IMR_S2_DTIM |
3324 AR_IMR_S2_DTIMSYNC |
3325 AR_IMR_S2_CABEND |
3326 AR_IMR_S2_CABTO |
3327 AR_IMR_S2_TSFOOR |
3328 AR_IMR_S2_GTT | AR_IMR_S2_CST);
3329 REG_WRITE(ah, AR_IMR_S2, mask | mask2);
3330 ah->mask_reg = ints;
3332 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
3333 if (ints & ATH9K_INT_TIM_TIMER)
3334 REG_SET_BIT(ah, AR_IMR_S5, AR_IMR_S5_TIM_TIMER);
3335 else
3336 REG_CLR_BIT(ah, AR_IMR_S5, AR_IMR_S5_TIM_TIMER);
3339 if (ints & ATH9K_INT_GLOBAL) {
3340 DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT, "enable IER\n");
3341 REG_WRITE(ah, AR_IER, AR_IER_ENABLE);
3342 if (!AR_SREV_9100(ah)) {
3343 REG_WRITE(ah, AR_INTR_ASYNC_ENABLE,
3344 AR_INTR_MAC_IRQ);
3345 REG_WRITE(ah, AR_INTR_ASYNC_MASK, AR_INTR_MAC_IRQ);
3348 REG_WRITE(ah, AR_INTR_SYNC_ENABLE,
3349 AR_INTR_SYNC_DEFAULT);
3350 REG_WRITE(ah, AR_INTR_SYNC_MASK,
3351 AR_INTR_SYNC_DEFAULT);
3353 DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT, "AR_IMR 0x%x IER 0x%x\n",
3354 REG_READ(ah, AR_IMR), REG_READ(ah, AR_IER));
3357 return omask;
3360 /*******************/
3361 /* Beacon Handling */
3362 /*******************/
3364 void ath9k_hw_beaconinit(struct ath_hw *ah, u32 next_beacon, u32 beacon_period)
3366 int flags = 0;
3368 ah->beacon_interval = beacon_period;
3370 switch (ah->opmode) {
3371 case NL80211_IFTYPE_STATION:
3372 case NL80211_IFTYPE_MONITOR:
3373 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(next_beacon));
3374 REG_WRITE(ah, AR_NEXT_DMA_BEACON_ALERT, 0xffff);
3375 REG_WRITE(ah, AR_NEXT_SWBA, 0x7ffff);
3376 flags |= AR_TBTT_TIMER_EN;
3377 break;
3378 case NL80211_IFTYPE_ADHOC:
3379 case NL80211_IFTYPE_MESH_POINT:
3380 REG_SET_BIT(ah, AR_TXCFG,
3381 AR_TXCFG_ADHOC_BEACON_ATIM_TX_POLICY);
3382 REG_WRITE(ah, AR_NEXT_NDP_TIMER,
3383 TU_TO_USEC(next_beacon +
3384 (ah->atim_window ? ah->
3385 atim_window : 1)));
3386 flags |= AR_NDP_TIMER_EN;
3387 case NL80211_IFTYPE_AP:
3388 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(next_beacon));
3389 REG_WRITE(ah, AR_NEXT_DMA_BEACON_ALERT,
3390 TU_TO_USEC(next_beacon -
3391 ah->config.
3392 dma_beacon_response_time));
3393 REG_WRITE(ah, AR_NEXT_SWBA,
3394 TU_TO_USEC(next_beacon -
3395 ah->config.
3396 sw_beacon_response_time));
3397 flags |=
3398 AR_TBTT_TIMER_EN | AR_DBA_TIMER_EN | AR_SWBA_TIMER_EN;
3399 break;
3400 default:
3401 DPRINTF(ah->ah_sc, ATH_DBG_BEACON,
3402 "%s: unsupported opmode: %d\n",
3403 __func__, ah->opmode);
3404 return;
3405 break;
3408 REG_WRITE(ah, AR_BEACON_PERIOD, TU_TO_USEC(beacon_period));
3409 REG_WRITE(ah, AR_DMA_BEACON_PERIOD, TU_TO_USEC(beacon_period));
3410 REG_WRITE(ah, AR_SWBA_PERIOD, TU_TO_USEC(beacon_period));
3411 REG_WRITE(ah, AR_NDP_PERIOD, TU_TO_USEC(beacon_period));
3413 beacon_period &= ~ATH9K_BEACON_ENA;
3414 if (beacon_period & ATH9K_BEACON_RESET_TSF) {
3415 beacon_period &= ~ATH9K_BEACON_RESET_TSF;
3416 ath9k_hw_reset_tsf(ah);
3419 REG_SET_BIT(ah, AR_TIMER_MODE, flags);
3422 void ath9k_hw_set_sta_beacon_timers(struct ath_hw *ah,
3423 const struct ath9k_beacon_state *bs)
3425 u32 nextTbtt, beaconintval, dtimperiod, beacontimeout;
3426 struct ath9k_hw_capabilities *pCap = &ah->caps;
3428 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(bs->bs_nexttbtt));
3430 REG_WRITE(ah, AR_BEACON_PERIOD,
3431 TU_TO_USEC(bs->bs_intval & ATH9K_BEACON_PERIOD));
3432 REG_WRITE(ah, AR_DMA_BEACON_PERIOD,
3433 TU_TO_USEC(bs->bs_intval & ATH9K_BEACON_PERIOD));
3435 REG_RMW_FIELD(ah, AR_RSSI_THR,
3436 AR_RSSI_THR_BM_THR, bs->bs_bmissthreshold);
3438 beaconintval = bs->bs_intval & ATH9K_BEACON_PERIOD;
3440 if (bs->bs_sleepduration > beaconintval)
3441 beaconintval = bs->bs_sleepduration;
3443 dtimperiod = bs->bs_dtimperiod;
3444 if (bs->bs_sleepduration > dtimperiod)
3445 dtimperiod = bs->bs_sleepduration;
3447 if (beaconintval == dtimperiod)
3448 nextTbtt = bs->bs_nextdtim;
3449 else
3450 nextTbtt = bs->bs_nexttbtt;
3452 DPRINTF(ah->ah_sc, ATH_DBG_BEACON, "next DTIM %d\n", bs->bs_nextdtim);
3453 DPRINTF(ah->ah_sc, ATH_DBG_BEACON, "next beacon %d\n", nextTbtt);
3454 DPRINTF(ah->ah_sc, ATH_DBG_BEACON, "beacon period %d\n", beaconintval);
3455 DPRINTF(ah->ah_sc, ATH_DBG_BEACON, "DTIM period %d\n", dtimperiod);
3457 REG_WRITE(ah, AR_NEXT_DTIM,
3458 TU_TO_USEC(bs->bs_nextdtim - SLEEP_SLOP));
3459 REG_WRITE(ah, AR_NEXT_TIM, TU_TO_USEC(nextTbtt - SLEEP_SLOP));
3461 REG_WRITE(ah, AR_SLEEP1,
3462 SM((CAB_TIMEOUT_VAL << 3), AR_SLEEP1_CAB_TIMEOUT)
3463 | AR_SLEEP1_ASSUME_DTIM);
3465 if (pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)
3466 beacontimeout = (BEACON_TIMEOUT_VAL << 3);
3467 else
3468 beacontimeout = MIN_BEACON_TIMEOUT_VAL;
3470 REG_WRITE(ah, AR_SLEEP2,
3471 SM(beacontimeout, AR_SLEEP2_BEACON_TIMEOUT));
3473 REG_WRITE(ah, AR_TIM_PERIOD, TU_TO_USEC(beaconintval));
3474 REG_WRITE(ah, AR_DTIM_PERIOD, TU_TO_USEC(dtimperiod));
3476 REG_SET_BIT(ah, AR_TIMER_MODE,
3477 AR_TBTT_TIMER_EN | AR_TIM_TIMER_EN |
3478 AR_DTIM_TIMER_EN);
3480 /* TSF Out of Range Threshold */
3481 REG_WRITE(ah, AR_TSFOOR_THRESHOLD, bs->bs_tsfoor_threshold);
3484 /*******************/
3485 /* HW Capabilities */
3486 /*******************/
3488 void ath9k_hw_fill_cap_info(struct ath_hw *ah)
3490 struct ath9k_hw_capabilities *pCap = &ah->caps;
3491 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
3492 struct ath_btcoex_info *btcoex_info = &ah->ah_sc->btcoex_info;
3494 u16 capField = 0, eeval;
3496 eeval = ah->eep_ops->get_eeprom(ah, EEP_REG_0);
3497 regulatory->current_rd = eeval;
3499 eeval = ah->eep_ops->get_eeprom(ah, EEP_REG_1);
3500 if (AR_SREV_9285_10_OR_LATER(ah))
3501 eeval |= AR9285_RDEXT_DEFAULT;
3502 regulatory->current_rd_ext = eeval;
3504 capField = ah->eep_ops->get_eeprom(ah, EEP_OP_CAP);
3506 if (ah->opmode != NL80211_IFTYPE_AP &&
3507 ah->hw_version.subvendorid == AR_SUBVENDOR_ID_NEW_A) {
3508 if (regulatory->current_rd == 0x64 ||
3509 regulatory->current_rd == 0x65)
3510 regulatory->current_rd += 5;
3511 else if (regulatory->current_rd == 0x41)
3512 regulatory->current_rd = 0x43;
3513 DPRINTF(ah->ah_sc, ATH_DBG_REGULATORY,
3514 "regdomain mapped to 0x%x\n", regulatory->current_rd);
3517 eeval = ah->eep_ops->get_eeprom(ah, EEP_OP_MODE);
3518 bitmap_zero(pCap->wireless_modes, ATH9K_MODE_MAX);
3520 if (eeval & AR5416_OPFLAGS_11A) {
3521 set_bit(ATH9K_MODE_11A, pCap->wireless_modes);
3522 if (ah->config.ht_enable) {
3523 if (!(eeval & AR5416_OPFLAGS_N_5G_HT20))
3524 set_bit(ATH9K_MODE_11NA_HT20,
3525 pCap->wireless_modes);
3526 if (!(eeval & AR5416_OPFLAGS_N_5G_HT40)) {
3527 set_bit(ATH9K_MODE_11NA_HT40PLUS,
3528 pCap->wireless_modes);
3529 set_bit(ATH9K_MODE_11NA_HT40MINUS,
3530 pCap->wireless_modes);
3535 if (eeval & AR5416_OPFLAGS_11G) {
3536 set_bit(ATH9K_MODE_11G, pCap->wireless_modes);
3537 if (ah->config.ht_enable) {
3538 if (!(eeval & AR5416_OPFLAGS_N_2G_HT20))
3539 set_bit(ATH9K_MODE_11NG_HT20,
3540 pCap->wireless_modes);
3541 if (!(eeval & AR5416_OPFLAGS_N_2G_HT40)) {
3542 set_bit(ATH9K_MODE_11NG_HT40PLUS,
3543 pCap->wireless_modes);
3544 set_bit(ATH9K_MODE_11NG_HT40MINUS,
3545 pCap->wireless_modes);
3550 pCap->tx_chainmask = ah->eep_ops->get_eeprom(ah, EEP_TX_MASK);
3552 * For AR9271 we will temporarilly uses the rx chainmax as read from
3553 * the EEPROM.
3555 if ((ah->hw_version.devid == AR5416_DEVID_PCI) &&
3556 !(eeval & AR5416_OPFLAGS_11A) &&
3557 !(AR_SREV_9271(ah)))
3558 /* CB71: GPIO 0 is pulled down to indicate 3 rx chains */
3559 pCap->rx_chainmask = ath9k_hw_gpio_get(ah, 0) ? 0x5 : 0x7;
3560 else
3561 /* Use rx_chainmask from EEPROM. */
3562 pCap->rx_chainmask = ah->eep_ops->get_eeprom(ah, EEP_RX_MASK);
3564 if (!(AR_SREV_9280(ah) && (ah->hw_version.macRev == 0)))
3565 ah->misc_mode |= AR_PCU_MIC_NEW_LOC_ENA;
3567 pCap->low_2ghz_chan = 2312;
3568 pCap->high_2ghz_chan = 2732;
3570 pCap->low_5ghz_chan = 4920;
3571 pCap->high_5ghz_chan = 6100;
3573 pCap->hw_caps &= ~ATH9K_HW_CAP_CIPHER_CKIP;
3574 pCap->hw_caps |= ATH9K_HW_CAP_CIPHER_TKIP;
3575 pCap->hw_caps |= ATH9K_HW_CAP_CIPHER_AESCCM;
3577 pCap->hw_caps &= ~ATH9K_HW_CAP_MIC_CKIP;
3578 pCap->hw_caps |= ATH9K_HW_CAP_MIC_TKIP;
3579 pCap->hw_caps |= ATH9K_HW_CAP_MIC_AESCCM;
3581 if (ah->config.ht_enable)
3582 pCap->hw_caps |= ATH9K_HW_CAP_HT;
3583 else
3584 pCap->hw_caps &= ~ATH9K_HW_CAP_HT;
3586 pCap->hw_caps |= ATH9K_HW_CAP_GTT;
3587 pCap->hw_caps |= ATH9K_HW_CAP_VEOL;
3588 pCap->hw_caps |= ATH9K_HW_CAP_BSSIDMASK;
3589 pCap->hw_caps &= ~ATH9K_HW_CAP_MCAST_KEYSEARCH;
3591 if (capField & AR_EEPROM_EEPCAP_MAXQCU)
3592 pCap->total_queues =
3593 MS(capField, AR_EEPROM_EEPCAP_MAXQCU);
3594 else
3595 pCap->total_queues = ATH9K_NUM_TX_QUEUES;
3597 if (capField & AR_EEPROM_EEPCAP_KC_ENTRIES)
3598 pCap->keycache_size =
3599 1 << MS(capField, AR_EEPROM_EEPCAP_KC_ENTRIES);
3600 else
3601 pCap->keycache_size = AR_KEYTABLE_SIZE;
3603 pCap->hw_caps |= ATH9K_HW_CAP_FASTCC;
3604 pCap->tx_triglevel_max = MAX_TX_FIFO_THRESHOLD;
3606 if (AR_SREV_9285_10_OR_LATER(ah))
3607 pCap->num_gpio_pins = AR9285_NUM_GPIO;
3608 else if (AR_SREV_9280_10_OR_LATER(ah))
3609 pCap->num_gpio_pins = AR928X_NUM_GPIO;
3610 else
3611 pCap->num_gpio_pins = AR_NUM_GPIO;
3613 if (AR_SREV_9160_10_OR_LATER(ah) || AR_SREV_9100(ah)) {
3614 pCap->hw_caps |= ATH9K_HW_CAP_CST;
3615 pCap->rts_aggr_limit = ATH_AMPDU_LIMIT_MAX;
3616 } else {
3617 pCap->rts_aggr_limit = (8 * 1024);
3620 pCap->hw_caps |= ATH9K_HW_CAP_ENHANCEDPM;
3622 #if defined(CONFIG_RFKILL) || defined(CONFIG_RFKILL_MODULE)
3623 ah->rfsilent = ah->eep_ops->get_eeprom(ah, EEP_RF_SILENT);
3624 if (ah->rfsilent & EEP_RFSILENT_ENABLED) {
3625 ah->rfkill_gpio =
3626 MS(ah->rfsilent, EEP_RFSILENT_GPIO_SEL);
3627 ah->rfkill_polarity =
3628 MS(ah->rfsilent, EEP_RFSILENT_POLARITY);
3630 pCap->hw_caps |= ATH9K_HW_CAP_RFSILENT;
3632 #endif
3634 if ((ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCI) ||
3635 (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCIE) ||
3636 (ah->hw_version.macVersion == AR_SREV_VERSION_9160) ||
3637 (ah->hw_version.macVersion == AR_SREV_VERSION_9100) ||
3638 (ah->hw_version.macVersion == AR_SREV_VERSION_9280) ||
3639 (ah->hw_version.macVersion == AR_SREV_VERSION_9285))
3640 pCap->hw_caps &= ~ATH9K_HW_CAP_AUTOSLEEP;
3641 else
3642 pCap->hw_caps |= ATH9K_HW_CAP_AUTOSLEEP;
3644 if (AR_SREV_9280(ah) || AR_SREV_9285(ah))
3645 pCap->hw_caps &= ~ATH9K_HW_CAP_4KB_SPLITTRANS;
3646 else
3647 pCap->hw_caps |= ATH9K_HW_CAP_4KB_SPLITTRANS;
3649 if (regulatory->current_rd_ext & (1 << REG_EXT_JAPAN_MIDBAND)) {
3650 pCap->reg_cap =
3651 AR_EEPROM_EEREGCAP_EN_KK_NEW_11A |
3652 AR_EEPROM_EEREGCAP_EN_KK_U1_EVEN |
3653 AR_EEPROM_EEREGCAP_EN_KK_U2 |
3654 AR_EEPROM_EEREGCAP_EN_KK_MIDBAND;
3655 } else {
3656 pCap->reg_cap =
3657 AR_EEPROM_EEREGCAP_EN_KK_NEW_11A |
3658 AR_EEPROM_EEREGCAP_EN_KK_U1_EVEN;
3661 pCap->reg_cap |= AR_EEPROM_EEREGCAP_EN_FCC_MIDBAND;
3663 pCap->num_antcfg_5ghz =
3664 ah->eep_ops->get_num_ant_config(ah, ATH9K_HAL_FREQ_BAND_5GHZ);
3665 pCap->num_antcfg_2ghz =
3666 ah->eep_ops->get_num_ant_config(ah, ATH9K_HAL_FREQ_BAND_2GHZ);
3668 if (AR_SREV_9280_10_OR_LATER(ah) && btcoex_enable) {
3669 pCap->hw_caps |= ATH9K_HW_CAP_BT_COEX;
3670 btcoex_info->btactive_gpio = ATH_BTACTIVE_GPIO;
3671 btcoex_info->wlanactive_gpio = ATH_WLANACTIVE_GPIO;
3673 if (AR_SREV_9285(ah))
3674 btcoex_info->btcoex_scheme = ATH_BTCOEX_CFG_3WIRE;
3675 else
3676 btcoex_info->btcoex_scheme = ATH_BTCOEX_CFG_2WIRE;
3677 } else {
3678 btcoex_info->btcoex_scheme = ATH_BTCOEX_CFG_NONE;
3682 bool ath9k_hw_getcapability(struct ath_hw *ah, enum ath9k_capability_type type,
3683 u32 capability, u32 *result)
3685 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
3686 switch (type) {
3687 case ATH9K_CAP_CIPHER:
3688 switch (capability) {
3689 case ATH9K_CIPHER_AES_CCM:
3690 case ATH9K_CIPHER_AES_OCB:
3691 case ATH9K_CIPHER_TKIP:
3692 case ATH9K_CIPHER_WEP:
3693 case ATH9K_CIPHER_MIC:
3694 case ATH9K_CIPHER_CLR:
3695 return true;
3696 default:
3697 return false;
3699 case ATH9K_CAP_TKIP_MIC:
3700 switch (capability) {
3701 case 0:
3702 return true;
3703 case 1:
3704 return (ah->sta_id1_defaults &
3705 AR_STA_ID1_CRPT_MIC_ENABLE) ? true :
3706 false;
3708 case ATH9K_CAP_TKIP_SPLIT:
3709 return (ah->misc_mode & AR_PCU_MIC_NEW_LOC_ENA) ?
3710 false : true;
3711 case ATH9K_CAP_DIVERSITY:
3712 return (REG_READ(ah, AR_PHY_CCK_DETECT) &
3713 AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV) ?
3714 true : false;
3715 case ATH9K_CAP_MCAST_KEYSRCH:
3716 switch (capability) {
3717 case 0:
3718 return true;
3719 case 1:
3720 if (REG_READ(ah, AR_STA_ID1) & AR_STA_ID1_ADHOC) {
3721 return false;
3722 } else {
3723 return (ah->sta_id1_defaults &
3724 AR_STA_ID1_MCAST_KSRCH) ? true :
3725 false;
3728 return false;
3729 case ATH9K_CAP_TXPOW:
3730 switch (capability) {
3731 case 0:
3732 return 0;
3733 case 1:
3734 *result = regulatory->power_limit;
3735 return 0;
3736 case 2:
3737 *result = regulatory->max_power_level;
3738 return 0;
3739 case 3:
3740 *result = regulatory->tp_scale;
3741 return 0;
3743 return false;
3744 case ATH9K_CAP_DS:
3745 return (AR_SREV_9280_20_OR_LATER(ah) &&
3746 (ah->eep_ops->get_eeprom(ah, EEP_RC_CHAIN_MASK) == 1))
3747 ? false : true;
3748 default:
3749 return false;
3753 bool ath9k_hw_setcapability(struct ath_hw *ah, enum ath9k_capability_type type,
3754 u32 capability, u32 setting, int *status)
3756 u32 v;
3758 switch (type) {
3759 case ATH9K_CAP_TKIP_MIC:
3760 if (setting)
3761 ah->sta_id1_defaults |=
3762 AR_STA_ID1_CRPT_MIC_ENABLE;
3763 else
3764 ah->sta_id1_defaults &=
3765 ~AR_STA_ID1_CRPT_MIC_ENABLE;
3766 return true;
3767 case ATH9K_CAP_DIVERSITY:
3768 v = REG_READ(ah, AR_PHY_CCK_DETECT);
3769 if (setting)
3770 v |= AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV;
3771 else
3772 v &= ~AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV;
3773 REG_WRITE(ah, AR_PHY_CCK_DETECT, v);
3774 return true;
3775 case ATH9K_CAP_MCAST_KEYSRCH:
3776 if (setting)
3777 ah->sta_id1_defaults |= AR_STA_ID1_MCAST_KSRCH;
3778 else
3779 ah->sta_id1_defaults &= ~AR_STA_ID1_MCAST_KSRCH;
3780 return true;
3781 default:
3782 return false;
3786 /****************************/
3787 /* GPIO / RFKILL / Antennae */
3788 /****************************/
3790 static void ath9k_hw_gpio_cfg_output_mux(struct ath_hw *ah,
3791 u32 gpio, u32 type)
3793 int addr;
3794 u32 gpio_shift, tmp;
3796 if (gpio > 11)
3797 addr = AR_GPIO_OUTPUT_MUX3;
3798 else if (gpio > 5)
3799 addr = AR_GPIO_OUTPUT_MUX2;
3800 else
3801 addr = AR_GPIO_OUTPUT_MUX1;
3803 gpio_shift = (gpio % 6) * 5;
3805 if (AR_SREV_9280_20_OR_LATER(ah)
3806 || (addr != AR_GPIO_OUTPUT_MUX1)) {
3807 REG_RMW(ah, addr, (type << gpio_shift),
3808 (0x1f << gpio_shift));
3809 } else {
3810 tmp = REG_READ(ah, addr);
3811 tmp = ((tmp & 0x1F0) << 1) | (tmp & ~0x1F0);
3812 tmp &= ~(0x1f << gpio_shift);
3813 tmp |= (type << gpio_shift);
3814 REG_WRITE(ah, addr, tmp);
3818 void ath9k_hw_cfg_gpio_input(struct ath_hw *ah, u32 gpio)
3820 u32 gpio_shift;
3822 ASSERT(gpio < ah->caps.num_gpio_pins);
3824 gpio_shift = gpio << 1;
3826 REG_RMW(ah,
3827 AR_GPIO_OE_OUT,
3828 (AR_GPIO_OE_OUT_DRV_NO << gpio_shift),
3829 (AR_GPIO_OE_OUT_DRV << gpio_shift));
3832 u32 ath9k_hw_gpio_get(struct ath_hw *ah, u32 gpio)
3834 #define MS_REG_READ(x, y) \
3835 (MS(REG_READ(ah, AR_GPIO_IN_OUT), x##_GPIO_IN_VAL) & (AR_GPIO_BIT(y)))
3837 if (gpio >= ah->caps.num_gpio_pins)
3838 return 0xffffffff;
3840 if (AR_SREV_9287_10_OR_LATER(ah))
3841 return MS_REG_READ(AR9287, gpio) != 0;
3842 else if (AR_SREV_9285_10_OR_LATER(ah))
3843 return MS_REG_READ(AR9285, gpio) != 0;
3844 else if (AR_SREV_9280_10_OR_LATER(ah))
3845 return MS_REG_READ(AR928X, gpio) != 0;
3846 else
3847 return MS_REG_READ(AR, gpio) != 0;
3850 void ath9k_hw_cfg_output(struct ath_hw *ah, u32 gpio,
3851 u32 ah_signal_type)
3853 u32 gpio_shift;
3855 ath9k_hw_gpio_cfg_output_mux(ah, gpio, ah_signal_type);
3857 gpio_shift = 2 * gpio;
3859 REG_RMW(ah,
3860 AR_GPIO_OE_OUT,
3861 (AR_GPIO_OE_OUT_DRV_ALL << gpio_shift),
3862 (AR_GPIO_OE_OUT_DRV << gpio_shift));
3865 void ath9k_hw_set_gpio(struct ath_hw *ah, u32 gpio, u32 val)
3867 REG_RMW(ah, AR_GPIO_IN_OUT, ((val & 1) << gpio),
3868 AR_GPIO_BIT(gpio));
3871 u32 ath9k_hw_getdefantenna(struct ath_hw *ah)
3873 return REG_READ(ah, AR_DEF_ANTENNA) & 0x7;
3876 void ath9k_hw_setantenna(struct ath_hw *ah, u32 antenna)
3878 REG_WRITE(ah, AR_DEF_ANTENNA, (antenna & 0x7));
3881 bool ath9k_hw_setantennaswitch(struct ath_hw *ah,
3882 enum ath9k_ant_setting settings,
3883 struct ath9k_channel *chan,
3884 u8 *tx_chainmask,
3885 u8 *rx_chainmask,
3886 u8 *antenna_cfgd)
3888 static u8 tx_chainmask_cfg, rx_chainmask_cfg;
3890 if (AR_SREV_9280(ah)) {
3891 if (!tx_chainmask_cfg) {
3893 tx_chainmask_cfg = *tx_chainmask;
3894 rx_chainmask_cfg = *rx_chainmask;
3897 switch (settings) {
3898 case ATH9K_ANT_FIXED_A:
3899 *tx_chainmask = ATH9K_ANTENNA0_CHAINMASK;
3900 *rx_chainmask = ATH9K_ANTENNA0_CHAINMASK;
3901 *antenna_cfgd = true;
3902 break;
3903 case ATH9K_ANT_FIXED_B:
3904 if (ah->caps.tx_chainmask >
3905 ATH9K_ANTENNA1_CHAINMASK) {
3906 *tx_chainmask = ATH9K_ANTENNA1_CHAINMASK;
3908 *rx_chainmask = ATH9K_ANTENNA1_CHAINMASK;
3909 *antenna_cfgd = true;
3910 break;
3911 case ATH9K_ANT_VARIABLE:
3912 *tx_chainmask = tx_chainmask_cfg;
3913 *rx_chainmask = rx_chainmask_cfg;
3914 *antenna_cfgd = true;
3915 break;
3916 default:
3917 break;
3919 } else {
3920 ah->config.diversity_control = settings;
3923 return true;
3926 /*********************/
3927 /* General Operation */
3928 /*********************/
3930 u32 ath9k_hw_getrxfilter(struct ath_hw *ah)
3932 u32 bits = REG_READ(ah, AR_RX_FILTER);
3933 u32 phybits = REG_READ(ah, AR_PHY_ERR);
3935 if (phybits & AR_PHY_ERR_RADAR)
3936 bits |= ATH9K_RX_FILTER_PHYRADAR;
3937 if (phybits & (AR_PHY_ERR_OFDM_TIMING | AR_PHY_ERR_CCK_TIMING))
3938 bits |= ATH9K_RX_FILTER_PHYERR;
3940 return bits;
3943 void ath9k_hw_setrxfilter(struct ath_hw *ah, u32 bits)
3945 u32 phybits;
3947 REG_WRITE(ah, AR_RX_FILTER, (bits & 0xffff) | AR_RX_COMPR_BAR);
3948 phybits = 0;
3949 if (bits & ATH9K_RX_FILTER_PHYRADAR)
3950 phybits |= AR_PHY_ERR_RADAR;
3951 if (bits & ATH9K_RX_FILTER_PHYERR)
3952 phybits |= AR_PHY_ERR_OFDM_TIMING | AR_PHY_ERR_CCK_TIMING;
3953 REG_WRITE(ah, AR_PHY_ERR, phybits);
3955 if (phybits)
3956 REG_WRITE(ah, AR_RXCFG,
3957 REG_READ(ah, AR_RXCFG) | AR_RXCFG_ZLFDMA);
3958 else
3959 REG_WRITE(ah, AR_RXCFG,
3960 REG_READ(ah, AR_RXCFG) & ~AR_RXCFG_ZLFDMA);
3963 bool ath9k_hw_phy_disable(struct ath_hw *ah)
3965 return ath9k_hw_set_reset_reg(ah, ATH9K_RESET_WARM);
3968 bool ath9k_hw_disable(struct ath_hw *ah)
3970 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
3971 return false;
3973 return ath9k_hw_set_reset_reg(ah, ATH9K_RESET_COLD);
3976 void ath9k_hw_set_txpowerlimit(struct ath_hw *ah, u32 limit)
3978 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
3979 struct ath9k_channel *chan = ah->curchan;
3980 struct ieee80211_channel *channel = chan->chan;
3982 regulatory->power_limit = min(limit, (u32) MAX_RATE_POWER);
3984 ah->eep_ops->set_txpower(ah, chan,
3985 ath9k_regd_get_ctl(regulatory, chan),
3986 channel->max_antenna_gain * 2,
3987 channel->max_power * 2,
3988 min((u32) MAX_RATE_POWER,
3989 (u32) regulatory->power_limit));
3992 void ath9k_hw_setmac(struct ath_hw *ah, const u8 *mac)
3994 memcpy(ah->macaddr, mac, ETH_ALEN);
3997 void ath9k_hw_setopmode(struct ath_hw *ah)
3999 ath9k_hw_set_operating_mode(ah, ah->opmode);
4002 void ath9k_hw_setmcastfilter(struct ath_hw *ah, u32 filter0, u32 filter1)
4004 REG_WRITE(ah, AR_MCAST_FIL0, filter0);
4005 REG_WRITE(ah, AR_MCAST_FIL1, filter1);
4008 void ath9k_hw_setbssidmask(struct ath_softc *sc)
4010 REG_WRITE(sc->sc_ah, AR_BSSMSKL, get_unaligned_le32(sc->bssidmask));
4011 REG_WRITE(sc->sc_ah, AR_BSSMSKU, get_unaligned_le16(sc->bssidmask + 4));
4014 void ath9k_hw_write_associd(struct ath_softc *sc)
4016 REG_WRITE(sc->sc_ah, AR_BSS_ID0, get_unaligned_le32(sc->curbssid));
4017 REG_WRITE(sc->sc_ah, AR_BSS_ID1, get_unaligned_le16(sc->curbssid + 4) |
4018 ((sc->curaid & 0x3fff) << AR_BSS_ID1_AID_S));
4021 u64 ath9k_hw_gettsf64(struct ath_hw *ah)
4023 u64 tsf;
4025 tsf = REG_READ(ah, AR_TSF_U32);
4026 tsf = (tsf << 32) | REG_READ(ah, AR_TSF_L32);
4028 return tsf;
4031 void ath9k_hw_settsf64(struct ath_hw *ah, u64 tsf64)
4033 REG_WRITE(ah, AR_TSF_L32, tsf64 & 0xffffffff);
4034 REG_WRITE(ah, AR_TSF_U32, (tsf64 >> 32) & 0xffffffff);
4037 void ath9k_hw_reset_tsf(struct ath_hw *ah)
4039 ath9k_ps_wakeup(ah->ah_sc);
4040 if (!ath9k_hw_wait(ah, AR_SLP32_MODE, AR_SLP32_TSF_WRITE_STATUS, 0,
4041 AH_TSF_WRITE_TIMEOUT))
4042 DPRINTF(ah->ah_sc, ATH_DBG_RESET,
4043 "AR_SLP32_TSF_WRITE_STATUS limit exceeded\n");
4045 REG_WRITE(ah, AR_RESET_TSF, AR_RESET_TSF_ONCE);
4046 ath9k_ps_restore(ah->ah_sc);
4049 void ath9k_hw_set_tsfadjust(struct ath_hw *ah, u32 setting)
4051 if (setting)
4052 ah->misc_mode |= AR_PCU_TX_ADD_TSF;
4053 else
4054 ah->misc_mode &= ~AR_PCU_TX_ADD_TSF;
4057 bool ath9k_hw_setslottime(struct ath_hw *ah, u32 us)
4059 if (us < ATH9K_SLOT_TIME_9 || us > ath9k_hw_mac_to_usec(ah, 0xffff)) {
4060 DPRINTF(ah->ah_sc, ATH_DBG_RESET, "bad slot time %u\n", us);
4061 ah->slottime = (u32) -1;
4062 return false;
4063 } else {
4064 REG_WRITE(ah, AR_D_GBL_IFS_SLOT, ath9k_hw_mac_to_clks(ah, us));
4065 ah->slottime = us;
4066 return true;
4070 void ath9k_hw_set11nmac2040(struct ath_hw *ah, enum ath9k_ht_macmode mode)
4072 u32 macmode;
4074 if (mode == ATH9K_HT_MACMODE_2040 &&
4075 !ah->config.cwm_ignore_extcca)
4076 macmode = AR_2040_JOINED_RX_CLEAR;
4077 else
4078 macmode = 0;
4080 REG_WRITE(ah, AR_2040_MODE, macmode);