xref: /linux/drivers/net/wireless/ath/ath9k/hw.c (revision 9429ec96c2718c0d1e3317cf60a87a0405223814)
1 /*
2  * Copyright (c) 2008-2011 Atheros Communications Inc.
3  *
4  * Permission to use, copy, modify, and/or distribute this software for any
5  * purpose with or without fee is hereby granted, provided that the above
6  * copyright notice and this permission notice appear in all copies.
7  *
8  * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9  * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10  * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11  * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12  * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13  * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14  * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
15  */
16 
17 #include <linux/io.h>
18 #include <linux/slab.h>
19 #include <linux/module.h>
20 #include <asm/unaligned.h>
21 
22 #include "hw.h"
23 #include "hw-ops.h"
24 #include "rc.h"
25 #include "ar9003_mac.h"
26 #include "ar9003_mci.h"
27 #include "debug.h"
28 #include "ath9k.h"
29 
30 static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type);
31 
32 MODULE_AUTHOR("Atheros Communications");
33 MODULE_DESCRIPTION("Support for Atheros 802.11n wireless LAN cards.");
34 MODULE_SUPPORTED_DEVICE("Atheros 802.11n WLAN cards");
35 MODULE_LICENSE("Dual BSD/GPL");
36 
37 static int __init ath9k_init(void)
38 {
39 	return 0;
40 }
41 module_init(ath9k_init);
42 
43 static void __exit ath9k_exit(void)
44 {
45 	return;
46 }
47 module_exit(ath9k_exit);
48 
49 /* Private hardware callbacks */
50 
51 static void ath9k_hw_init_cal_settings(struct ath_hw *ah)
52 {
53 	ath9k_hw_private_ops(ah)->init_cal_settings(ah);
54 }
55 
56 static void ath9k_hw_init_mode_regs(struct ath_hw *ah)
57 {
58 	ath9k_hw_private_ops(ah)->init_mode_regs(ah);
59 }
60 
61 static u32 ath9k_hw_compute_pll_control(struct ath_hw *ah,
62 					struct ath9k_channel *chan)
63 {
64 	return ath9k_hw_private_ops(ah)->compute_pll_control(ah, chan);
65 }
66 
67 static void ath9k_hw_init_mode_gain_regs(struct ath_hw *ah)
68 {
69 	if (!ath9k_hw_private_ops(ah)->init_mode_gain_regs)
70 		return;
71 
72 	ath9k_hw_private_ops(ah)->init_mode_gain_regs(ah);
73 }
74 
75 static void ath9k_hw_ani_cache_ini_regs(struct ath_hw *ah)
76 {
77 	/* You will not have this callback if using the old ANI */
78 	if (!ath9k_hw_private_ops(ah)->ani_cache_ini_regs)
79 		return;
80 
81 	ath9k_hw_private_ops(ah)->ani_cache_ini_regs(ah);
82 }
83 
84 /********************/
85 /* Helper Functions */
86 /********************/
87 
88 #ifdef CONFIG_ATH9K_DEBUGFS
89 
90 void ath9k_debug_sync_cause(struct ath_common *common, u32 sync_cause)
91 {
92 	struct ath_softc *sc = common->priv;
93 	if (sync_cause)
94 		sc->debug.stats.istats.sync_cause_all++;
95 	if (sync_cause & AR_INTR_SYNC_RTC_IRQ)
96 		sc->debug.stats.istats.sync_rtc_irq++;
97 	if (sync_cause & AR_INTR_SYNC_MAC_IRQ)
98 		sc->debug.stats.istats.sync_mac_irq++;
99 	if (sync_cause & AR_INTR_SYNC_EEPROM_ILLEGAL_ACCESS)
100 		sc->debug.stats.istats.eeprom_illegal_access++;
101 	if (sync_cause & AR_INTR_SYNC_APB_TIMEOUT)
102 		sc->debug.stats.istats.apb_timeout++;
103 	if (sync_cause & AR_INTR_SYNC_PCI_MODE_CONFLICT)
104 		sc->debug.stats.istats.pci_mode_conflict++;
105 	if (sync_cause & AR_INTR_SYNC_HOST1_FATAL)
106 		sc->debug.stats.istats.host1_fatal++;
107 	if (sync_cause & AR_INTR_SYNC_HOST1_PERR)
108 		sc->debug.stats.istats.host1_perr++;
109 	if (sync_cause & AR_INTR_SYNC_TRCV_FIFO_PERR)
110 		sc->debug.stats.istats.trcv_fifo_perr++;
111 	if (sync_cause & AR_INTR_SYNC_RADM_CPL_EP)
112 		sc->debug.stats.istats.radm_cpl_ep++;
113 	if (sync_cause & AR_INTR_SYNC_RADM_CPL_DLLP_ABORT)
114 		sc->debug.stats.istats.radm_cpl_dllp_abort++;
115 	if (sync_cause & AR_INTR_SYNC_RADM_CPL_TLP_ABORT)
116 		sc->debug.stats.istats.radm_cpl_tlp_abort++;
117 	if (sync_cause & AR_INTR_SYNC_RADM_CPL_ECRC_ERR)
118 		sc->debug.stats.istats.radm_cpl_ecrc_err++;
119 	if (sync_cause & AR_INTR_SYNC_RADM_CPL_TIMEOUT)
120 		sc->debug.stats.istats.radm_cpl_timeout++;
121 	if (sync_cause & AR_INTR_SYNC_LOCAL_TIMEOUT)
122 		sc->debug.stats.istats.local_timeout++;
123 	if (sync_cause & AR_INTR_SYNC_PM_ACCESS)
124 		sc->debug.stats.istats.pm_access++;
125 	if (sync_cause & AR_INTR_SYNC_MAC_AWAKE)
126 		sc->debug.stats.istats.mac_awake++;
127 	if (sync_cause & AR_INTR_SYNC_MAC_ASLEEP)
128 		sc->debug.stats.istats.mac_asleep++;
129 	if (sync_cause & AR_INTR_SYNC_MAC_SLEEP_ACCESS)
130 		sc->debug.stats.istats.mac_sleep_access++;
131 }
132 #endif
133 
134 
135 static void ath9k_hw_set_clockrate(struct ath_hw *ah)
136 {
137 	struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
138 	struct ath_common *common = ath9k_hw_common(ah);
139 	unsigned int clockrate;
140 
141 	/* AR9287 v1.3+ uses async FIFO and runs the MAC at 117 MHz */
142 	if (AR_SREV_9287(ah) && AR_SREV_9287_13_OR_LATER(ah))
143 		clockrate = 117;
144 	else if (!ah->curchan) /* should really check for CCK instead */
145 		clockrate = ATH9K_CLOCK_RATE_CCK;
146 	else if (conf->channel->band == IEEE80211_BAND_2GHZ)
147 		clockrate = ATH9K_CLOCK_RATE_2GHZ_OFDM;
148 	else if (ah->caps.hw_caps & ATH9K_HW_CAP_FASTCLOCK)
149 		clockrate = ATH9K_CLOCK_FAST_RATE_5GHZ_OFDM;
150 	else
151 		clockrate = ATH9K_CLOCK_RATE_5GHZ_OFDM;
152 
153 	if (conf_is_ht40(conf))
154 		clockrate *= 2;
155 
156 	if (ah->curchan) {
157 		if (IS_CHAN_HALF_RATE(ah->curchan))
158 			clockrate /= 2;
159 		if (IS_CHAN_QUARTER_RATE(ah->curchan))
160 			clockrate /= 4;
161 	}
162 
163 	common->clockrate = clockrate;
164 }
165 
166 static u32 ath9k_hw_mac_to_clks(struct ath_hw *ah, u32 usecs)
167 {
168 	struct ath_common *common = ath9k_hw_common(ah);
169 
170 	return usecs * common->clockrate;
171 }
172 
173 bool ath9k_hw_wait(struct ath_hw *ah, u32 reg, u32 mask, u32 val, u32 timeout)
174 {
175 	int i;
176 
177 	BUG_ON(timeout < AH_TIME_QUANTUM);
178 
179 	for (i = 0; i < (timeout / AH_TIME_QUANTUM); i++) {
180 		if ((REG_READ(ah, reg) & mask) == val)
181 			return true;
182 
183 		udelay(AH_TIME_QUANTUM);
184 	}
185 
186 	ath_dbg(ath9k_hw_common(ah), ANY,
187 		"timeout (%d us) on reg 0x%x: 0x%08x & 0x%08x != 0x%08x\n",
188 		timeout, reg, REG_READ(ah, reg), mask, val);
189 
190 	return false;
191 }
192 EXPORT_SYMBOL(ath9k_hw_wait);
193 
194 void ath9k_hw_synth_delay(struct ath_hw *ah, struct ath9k_channel *chan,
195 			  int hw_delay)
196 {
197 	if (IS_CHAN_B(chan))
198 		hw_delay = (4 * hw_delay) / 22;
199 	else
200 		hw_delay /= 10;
201 
202 	if (IS_CHAN_HALF_RATE(chan))
203 		hw_delay *= 2;
204 	else if (IS_CHAN_QUARTER_RATE(chan))
205 		hw_delay *= 4;
206 
207 	udelay(hw_delay + BASE_ACTIVATE_DELAY);
208 }
209 
210 void ath9k_hw_write_array(struct ath_hw *ah, struct ar5416IniArray *array,
211 			  int column, unsigned int *writecnt)
212 {
213 	int r;
214 
215 	ENABLE_REGWRITE_BUFFER(ah);
216 	for (r = 0; r < array->ia_rows; r++) {
217 		REG_WRITE(ah, INI_RA(array, r, 0),
218 			  INI_RA(array, r, column));
219 		DO_DELAY(*writecnt);
220 	}
221 	REGWRITE_BUFFER_FLUSH(ah);
222 }
223 
224 u32 ath9k_hw_reverse_bits(u32 val, u32 n)
225 {
226 	u32 retval;
227 	int i;
228 
229 	for (i = 0, retval = 0; i < n; i++) {
230 		retval = (retval << 1) | (val & 1);
231 		val >>= 1;
232 	}
233 	return retval;
234 }
235 
236 u16 ath9k_hw_computetxtime(struct ath_hw *ah,
237 			   u8 phy, int kbps,
238 			   u32 frameLen, u16 rateix,
239 			   bool shortPreamble)
240 {
241 	u32 bitsPerSymbol, numBits, numSymbols, phyTime, txTime;
242 
243 	if (kbps == 0)
244 		return 0;
245 
246 	switch (phy) {
247 	case WLAN_RC_PHY_CCK:
248 		phyTime = CCK_PREAMBLE_BITS + CCK_PLCP_BITS;
249 		if (shortPreamble)
250 			phyTime >>= 1;
251 		numBits = frameLen << 3;
252 		txTime = CCK_SIFS_TIME + phyTime + ((numBits * 1000) / kbps);
253 		break;
254 	case WLAN_RC_PHY_OFDM:
255 		if (ah->curchan && IS_CHAN_QUARTER_RATE(ah->curchan)) {
256 			bitsPerSymbol =	(kbps * OFDM_SYMBOL_TIME_QUARTER) / 1000;
257 			numBits = OFDM_PLCP_BITS + (frameLen << 3);
258 			numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
259 			txTime = OFDM_SIFS_TIME_QUARTER
260 				+ OFDM_PREAMBLE_TIME_QUARTER
261 				+ (numSymbols * OFDM_SYMBOL_TIME_QUARTER);
262 		} else if (ah->curchan &&
263 			   IS_CHAN_HALF_RATE(ah->curchan)) {
264 			bitsPerSymbol =	(kbps * OFDM_SYMBOL_TIME_HALF) / 1000;
265 			numBits = OFDM_PLCP_BITS + (frameLen << 3);
266 			numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
267 			txTime = OFDM_SIFS_TIME_HALF +
268 				OFDM_PREAMBLE_TIME_HALF
269 				+ (numSymbols * OFDM_SYMBOL_TIME_HALF);
270 		} else {
271 			bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME) / 1000;
272 			numBits = OFDM_PLCP_BITS + (frameLen << 3);
273 			numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
274 			txTime = OFDM_SIFS_TIME + OFDM_PREAMBLE_TIME
275 				+ (numSymbols * OFDM_SYMBOL_TIME);
276 		}
277 		break;
278 	default:
279 		ath_err(ath9k_hw_common(ah),
280 			"Unknown phy %u (rate ix %u)\n", phy, rateix);
281 		txTime = 0;
282 		break;
283 	}
284 
285 	return txTime;
286 }
287 EXPORT_SYMBOL(ath9k_hw_computetxtime);
288 
289 void ath9k_hw_get_channel_centers(struct ath_hw *ah,
290 				  struct ath9k_channel *chan,
291 				  struct chan_centers *centers)
292 {
293 	int8_t extoff;
294 
295 	if (!IS_CHAN_HT40(chan)) {
296 		centers->ctl_center = centers->ext_center =
297 			centers->synth_center = chan->channel;
298 		return;
299 	}
300 
301 	if ((chan->chanmode == CHANNEL_A_HT40PLUS) ||
302 	    (chan->chanmode == CHANNEL_G_HT40PLUS)) {
303 		centers->synth_center =
304 			chan->channel + HT40_CHANNEL_CENTER_SHIFT;
305 		extoff = 1;
306 	} else {
307 		centers->synth_center =
308 			chan->channel - HT40_CHANNEL_CENTER_SHIFT;
309 		extoff = -1;
310 	}
311 
312 	centers->ctl_center =
313 		centers->synth_center - (extoff * HT40_CHANNEL_CENTER_SHIFT);
314 	/* 25 MHz spacing is supported by hw but not on upper layers */
315 	centers->ext_center =
316 		centers->synth_center + (extoff * HT40_CHANNEL_CENTER_SHIFT);
317 }
318 
319 /******************/
320 /* Chip Revisions */
321 /******************/
322 
323 static void ath9k_hw_read_revisions(struct ath_hw *ah)
324 {
325 	u32 val;
326 
327 	switch (ah->hw_version.devid) {
328 	case AR5416_AR9100_DEVID:
329 		ah->hw_version.macVersion = AR_SREV_VERSION_9100;
330 		break;
331 	case AR9300_DEVID_AR9330:
332 		ah->hw_version.macVersion = AR_SREV_VERSION_9330;
333 		if (ah->get_mac_revision) {
334 			ah->hw_version.macRev = ah->get_mac_revision();
335 		} else {
336 			val = REG_READ(ah, AR_SREV);
337 			ah->hw_version.macRev = MS(val, AR_SREV_REVISION2);
338 		}
339 		return;
340 	case AR9300_DEVID_AR9340:
341 		ah->hw_version.macVersion = AR_SREV_VERSION_9340;
342 		val = REG_READ(ah, AR_SREV);
343 		ah->hw_version.macRev = MS(val, AR_SREV_REVISION2);
344 		return;
345 	case AR9300_DEVID_QCA955X:
346 		ah->hw_version.macVersion = AR_SREV_VERSION_9550;
347 		return;
348 	}
349 
350 	val = REG_READ(ah, AR_SREV) & AR_SREV_ID;
351 
352 	if (val == 0xFF) {
353 		val = REG_READ(ah, AR_SREV);
354 		ah->hw_version.macVersion =
355 			(val & AR_SREV_VERSION2) >> AR_SREV_TYPE2_S;
356 		ah->hw_version.macRev = MS(val, AR_SREV_REVISION2);
357 
358 		if (AR_SREV_9462(ah))
359 			ah->is_pciexpress = true;
360 		else
361 			ah->is_pciexpress = (val &
362 					     AR_SREV_TYPE2_HOST_MODE) ? 0 : 1;
363 	} else {
364 		if (!AR_SREV_9100(ah))
365 			ah->hw_version.macVersion = MS(val, AR_SREV_VERSION);
366 
367 		ah->hw_version.macRev = val & AR_SREV_REVISION;
368 
369 		if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCIE)
370 			ah->is_pciexpress = true;
371 	}
372 }
373 
374 /************************************/
375 /* HW Attach, Detach, Init Routines */
376 /************************************/
377 
378 static void ath9k_hw_disablepcie(struct ath_hw *ah)
379 {
380 	if (!AR_SREV_5416(ah))
381 		return;
382 
383 	REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00);
384 	REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
385 	REG_WRITE(ah, AR_PCIE_SERDES, 0x28000029);
386 	REG_WRITE(ah, AR_PCIE_SERDES, 0x57160824);
387 	REG_WRITE(ah, AR_PCIE_SERDES, 0x25980579);
388 	REG_WRITE(ah, AR_PCIE_SERDES, 0x00000000);
389 	REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
390 	REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
391 	REG_WRITE(ah, AR_PCIE_SERDES, 0x000e1007);
392 
393 	REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
394 }
395 
396 /* This should work for all families including legacy */
397 static bool ath9k_hw_chip_test(struct ath_hw *ah)
398 {
399 	struct ath_common *common = ath9k_hw_common(ah);
400 	u32 regAddr[2] = { AR_STA_ID0 };
401 	u32 regHold[2];
402 	static const u32 patternData[4] = {
403 		0x55555555, 0xaaaaaaaa, 0x66666666, 0x99999999
404 	};
405 	int i, j, loop_max;
406 
407 	if (!AR_SREV_9300_20_OR_LATER(ah)) {
408 		loop_max = 2;
409 		regAddr[1] = AR_PHY_BASE + (8 << 2);
410 	} else
411 		loop_max = 1;
412 
413 	for (i = 0; i < loop_max; i++) {
414 		u32 addr = regAddr[i];
415 		u32 wrData, rdData;
416 
417 		regHold[i] = REG_READ(ah, addr);
418 		for (j = 0; j < 0x100; j++) {
419 			wrData = (j << 16) | j;
420 			REG_WRITE(ah, addr, wrData);
421 			rdData = REG_READ(ah, addr);
422 			if (rdData != wrData) {
423 				ath_err(common,
424 					"address test failed addr: 0x%08x - wr:0x%08x != rd:0x%08x\n",
425 					addr, wrData, rdData);
426 				return false;
427 			}
428 		}
429 		for (j = 0; j < 4; j++) {
430 			wrData = patternData[j];
431 			REG_WRITE(ah, addr, wrData);
432 			rdData = REG_READ(ah, addr);
433 			if (wrData != rdData) {
434 				ath_err(common,
435 					"address test failed addr: 0x%08x - wr:0x%08x != rd:0x%08x\n",
436 					addr, wrData, rdData);
437 				return false;
438 			}
439 		}
440 		REG_WRITE(ah, regAddr[i], regHold[i]);
441 	}
442 	udelay(100);
443 
444 	return true;
445 }
446 
447 static void ath9k_hw_init_config(struct ath_hw *ah)
448 {
449 	int i;
450 
451 	ah->config.dma_beacon_response_time = 1;
452 	ah->config.sw_beacon_response_time = 6;
453 	ah->config.additional_swba_backoff = 0;
454 	ah->config.ack_6mb = 0x0;
455 	ah->config.cwm_ignore_extcca = 0;
456 	ah->config.pcie_clock_req = 0;
457 	ah->config.pcie_waen = 0;
458 	ah->config.analog_shiftreg = 1;
459 	ah->config.enable_ani = true;
460 
461 	for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
462 		ah->config.spurchans[i][0] = AR_NO_SPUR;
463 		ah->config.spurchans[i][1] = AR_NO_SPUR;
464 	}
465 
466 	ah->config.rx_intr_mitigation = true;
467 	ah->config.pcieSerDesWrite = true;
468 
469 	/*
470 	 * We need this for PCI devices only (Cardbus, PCI, miniPCI)
471 	 * _and_ if on non-uniprocessor systems (Multiprocessor/HT).
472 	 * This means we use it for all AR5416 devices, and the few
473 	 * minor PCI AR9280 devices out there.
474 	 *
475 	 * Serialization is required because these devices do not handle
476 	 * well the case of two concurrent reads/writes due to the latency
477 	 * involved. During one read/write another read/write can be issued
478 	 * on another CPU while the previous read/write may still be working
479 	 * on our hardware, if we hit this case the hardware poops in a loop.
480 	 * We prevent this by serializing reads and writes.
481 	 *
482 	 * This issue is not present on PCI-Express devices or pre-AR5416
483 	 * devices (legacy, 802.11abg).
484 	 */
485 	if (num_possible_cpus() > 1)
486 		ah->config.serialize_regmode = SER_REG_MODE_AUTO;
487 }
488 
489 static void ath9k_hw_init_defaults(struct ath_hw *ah)
490 {
491 	struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
492 
493 	regulatory->country_code = CTRY_DEFAULT;
494 	regulatory->power_limit = MAX_RATE_POWER;
495 
496 	ah->hw_version.magic = AR5416_MAGIC;
497 	ah->hw_version.subvendorid = 0;
498 
499 	ah->atim_window = 0;
500 	ah->sta_id1_defaults =
501 		AR_STA_ID1_CRPT_MIC_ENABLE |
502 		AR_STA_ID1_MCAST_KSRCH;
503 	if (AR_SREV_9100(ah))
504 		ah->sta_id1_defaults |= AR_STA_ID1_AR9100_BA_FIX;
505 	ah->slottime = ATH9K_SLOT_TIME_9;
506 	ah->globaltxtimeout = (u32) -1;
507 	ah->power_mode = ATH9K_PM_UNDEFINED;
508 	ah->htc_reset_init = true;
509 }
510 
511 static int ath9k_hw_init_macaddr(struct ath_hw *ah)
512 {
513 	struct ath_common *common = ath9k_hw_common(ah);
514 	u32 sum;
515 	int i;
516 	u16 eeval;
517 	static const u32 EEP_MAC[] = { EEP_MAC_LSW, EEP_MAC_MID, EEP_MAC_MSW };
518 
519 	sum = 0;
520 	for (i = 0; i < 3; i++) {
521 		eeval = ah->eep_ops->get_eeprom(ah, EEP_MAC[i]);
522 		sum += eeval;
523 		common->macaddr[2 * i] = eeval >> 8;
524 		common->macaddr[2 * i + 1] = eeval & 0xff;
525 	}
526 	if (sum == 0 || sum == 0xffff * 3)
527 		return -EADDRNOTAVAIL;
528 
529 	return 0;
530 }
531 
532 static int ath9k_hw_post_init(struct ath_hw *ah)
533 {
534 	struct ath_common *common = ath9k_hw_common(ah);
535 	int ecode;
536 
537 	if (common->bus_ops->ath_bus_type != ATH_USB) {
538 		if (!ath9k_hw_chip_test(ah))
539 			return -ENODEV;
540 	}
541 
542 	if (!AR_SREV_9300_20_OR_LATER(ah)) {
543 		ecode = ar9002_hw_rf_claim(ah);
544 		if (ecode != 0)
545 			return ecode;
546 	}
547 
548 	ecode = ath9k_hw_eeprom_init(ah);
549 	if (ecode != 0)
550 		return ecode;
551 
552 	ath_dbg(ath9k_hw_common(ah), CONFIG, "Eeprom VER: %d, REV: %d\n",
553 		ah->eep_ops->get_eeprom_ver(ah),
554 		ah->eep_ops->get_eeprom_rev(ah));
555 
556 	ecode = ath9k_hw_rf_alloc_ext_banks(ah);
557 	if (ecode) {
558 		ath_err(ath9k_hw_common(ah),
559 			"Failed allocating banks for external radio\n");
560 		ath9k_hw_rf_free_ext_banks(ah);
561 		return ecode;
562 	}
563 
564 	if (ah->config.enable_ani) {
565 		ath9k_hw_ani_setup(ah);
566 		ath9k_hw_ani_init(ah);
567 	}
568 
569 	return 0;
570 }
571 
572 static void ath9k_hw_attach_ops(struct ath_hw *ah)
573 {
574 	if (AR_SREV_9300_20_OR_LATER(ah))
575 		ar9003_hw_attach_ops(ah);
576 	else
577 		ar9002_hw_attach_ops(ah);
578 }
579 
580 /* Called for all hardware families */
581 static int __ath9k_hw_init(struct ath_hw *ah)
582 {
583 	struct ath_common *common = ath9k_hw_common(ah);
584 	int r = 0;
585 
586 	ath9k_hw_read_revisions(ah);
587 
588 	/*
589 	 * Read back AR_WA into a permanent copy and set bits 14 and 17.
590 	 * We need to do this to avoid RMW of this register. We cannot
591 	 * read the reg when chip is asleep.
592 	 */
593 	ah->WARegVal = REG_READ(ah, AR_WA);
594 	ah->WARegVal |= (AR_WA_D3_L1_DISABLE |
595 			 AR_WA_ASPM_TIMER_BASED_DISABLE);
596 
597 	if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON)) {
598 		ath_err(common, "Couldn't reset chip\n");
599 		return -EIO;
600 	}
601 
602 	if (AR_SREV_9462(ah))
603 		ah->WARegVal &= ~AR_WA_D3_L1_DISABLE;
604 
605 	ath9k_hw_init_defaults(ah);
606 	ath9k_hw_init_config(ah);
607 
608 	ath9k_hw_attach_ops(ah);
609 
610 	if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE)) {
611 		ath_err(common, "Couldn't wakeup chip\n");
612 		return -EIO;
613 	}
614 
615 	if (NR_CPUS > 1 && ah->config.serialize_regmode == SER_REG_MODE_AUTO) {
616 		if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCI ||
617 		    ((AR_SREV_9160(ah) || AR_SREV_9280(ah) || AR_SREV_9287(ah)) &&
618 		     !ah->is_pciexpress)) {
619 			ah->config.serialize_regmode =
620 				SER_REG_MODE_ON;
621 		} else {
622 			ah->config.serialize_regmode =
623 				SER_REG_MODE_OFF;
624 		}
625 	}
626 
627 	ath_dbg(common, RESET, "serialize_regmode is %d\n",
628 		ah->config.serialize_regmode);
629 
630 	if (AR_SREV_9285(ah) || AR_SREV_9271(ah))
631 		ah->config.max_txtrig_level = MAX_TX_FIFO_THRESHOLD >> 1;
632 	else
633 		ah->config.max_txtrig_level = MAX_TX_FIFO_THRESHOLD;
634 
635 	switch (ah->hw_version.macVersion) {
636 	case AR_SREV_VERSION_5416_PCI:
637 	case AR_SREV_VERSION_5416_PCIE:
638 	case AR_SREV_VERSION_9160:
639 	case AR_SREV_VERSION_9100:
640 	case AR_SREV_VERSION_9280:
641 	case AR_SREV_VERSION_9285:
642 	case AR_SREV_VERSION_9287:
643 	case AR_SREV_VERSION_9271:
644 	case AR_SREV_VERSION_9300:
645 	case AR_SREV_VERSION_9330:
646 	case AR_SREV_VERSION_9485:
647 	case AR_SREV_VERSION_9340:
648 	case AR_SREV_VERSION_9462:
649 	case AR_SREV_VERSION_9550:
650 		break;
651 	default:
652 		ath_err(common,
653 			"Mac Chip Rev 0x%02x.%x is not supported by this driver\n",
654 			ah->hw_version.macVersion, ah->hw_version.macRev);
655 		return -EOPNOTSUPP;
656 	}
657 
658 	if (AR_SREV_9271(ah) || AR_SREV_9100(ah) || AR_SREV_9340(ah) ||
659 	    AR_SREV_9330(ah) || AR_SREV_9550(ah))
660 		ah->is_pciexpress = false;
661 
662 	ah->hw_version.phyRev = REG_READ(ah, AR_PHY_CHIP_ID);
663 	ath9k_hw_init_cal_settings(ah);
664 
665 	ah->ani_function = ATH9K_ANI_ALL;
666 	if (AR_SREV_9280_20_OR_LATER(ah) && !AR_SREV_9300_20_OR_LATER(ah))
667 		ah->ani_function &= ~ATH9K_ANI_NOISE_IMMUNITY_LEVEL;
668 	if (!AR_SREV_9300_20_OR_LATER(ah))
669 		ah->ani_function &= ~ATH9K_ANI_MRC_CCK;
670 
671 	ath9k_hw_init_mode_regs(ah);
672 
673 	if (!ah->is_pciexpress)
674 		ath9k_hw_disablepcie(ah);
675 
676 	r = ath9k_hw_post_init(ah);
677 	if (r)
678 		return r;
679 
680 	ath9k_hw_init_mode_gain_regs(ah);
681 	r = ath9k_hw_fill_cap_info(ah);
682 	if (r)
683 		return r;
684 
685 	r = ath9k_hw_init_macaddr(ah);
686 	if (r) {
687 		ath_err(common, "Failed to initialize MAC address\n");
688 		return r;
689 	}
690 
691 	if (AR_SREV_9285(ah) || AR_SREV_9271(ah))
692 		ah->tx_trig_level = (AR_FTRIG_256B >> AR_FTRIG_S);
693 	else
694 		ah->tx_trig_level = (AR_FTRIG_512B >> AR_FTRIG_S);
695 
696 	if (AR_SREV_9330(ah))
697 		ah->bb_watchdog_timeout_ms = 85;
698 	else
699 		ah->bb_watchdog_timeout_ms = 25;
700 
701 	common->state = ATH_HW_INITIALIZED;
702 
703 	return 0;
704 }
705 
706 int ath9k_hw_init(struct ath_hw *ah)
707 {
708 	int ret;
709 	struct ath_common *common = ath9k_hw_common(ah);
710 
711 	/* These are all the AR5008/AR9001/AR9002 hardware family of chipsets */
712 	switch (ah->hw_version.devid) {
713 	case AR5416_DEVID_PCI:
714 	case AR5416_DEVID_PCIE:
715 	case AR5416_AR9100_DEVID:
716 	case AR9160_DEVID_PCI:
717 	case AR9280_DEVID_PCI:
718 	case AR9280_DEVID_PCIE:
719 	case AR9285_DEVID_PCIE:
720 	case AR9287_DEVID_PCI:
721 	case AR9287_DEVID_PCIE:
722 	case AR2427_DEVID_PCIE:
723 	case AR9300_DEVID_PCIE:
724 	case AR9300_DEVID_AR9485_PCIE:
725 	case AR9300_DEVID_AR9330:
726 	case AR9300_DEVID_AR9340:
727 	case AR9300_DEVID_QCA955X:
728 	case AR9300_DEVID_AR9580:
729 	case AR9300_DEVID_AR9462:
730 	case AR9485_DEVID_AR1111:
731 		break;
732 	default:
733 		if (common->bus_ops->ath_bus_type == ATH_USB)
734 			break;
735 		ath_err(common, "Hardware device ID 0x%04x not supported\n",
736 			ah->hw_version.devid);
737 		return -EOPNOTSUPP;
738 	}
739 
740 	ret = __ath9k_hw_init(ah);
741 	if (ret) {
742 		ath_err(common,
743 			"Unable to initialize hardware; initialization status: %d\n",
744 			ret);
745 		return ret;
746 	}
747 
748 	return 0;
749 }
750 EXPORT_SYMBOL(ath9k_hw_init);
751 
752 static void ath9k_hw_init_qos(struct ath_hw *ah)
753 {
754 	ENABLE_REGWRITE_BUFFER(ah);
755 
756 	REG_WRITE(ah, AR_MIC_QOS_CONTROL, 0x100aa);
757 	REG_WRITE(ah, AR_MIC_QOS_SELECT, 0x3210);
758 
759 	REG_WRITE(ah, AR_QOS_NO_ACK,
760 		  SM(2, AR_QOS_NO_ACK_TWO_BIT) |
761 		  SM(5, AR_QOS_NO_ACK_BIT_OFF) |
762 		  SM(0, AR_QOS_NO_ACK_BYTE_OFF));
763 
764 	REG_WRITE(ah, AR_TXOP_X, AR_TXOP_X_VAL);
765 	REG_WRITE(ah, AR_TXOP_0_3, 0xFFFFFFFF);
766 	REG_WRITE(ah, AR_TXOP_4_7, 0xFFFFFFFF);
767 	REG_WRITE(ah, AR_TXOP_8_11, 0xFFFFFFFF);
768 	REG_WRITE(ah, AR_TXOP_12_15, 0xFFFFFFFF);
769 
770 	REGWRITE_BUFFER_FLUSH(ah);
771 }
772 
773 u32 ar9003_get_pll_sqsum_dvc(struct ath_hw *ah)
774 {
775 	struct ath_common *common = ath9k_hw_common(ah);
776 	int i = 0;
777 
778 	REG_CLR_BIT(ah, PLL3, PLL3_DO_MEAS_MASK);
779 	udelay(100);
780 	REG_SET_BIT(ah, PLL3, PLL3_DO_MEAS_MASK);
781 
782 	while ((REG_READ(ah, PLL4) & PLL4_MEAS_DONE) == 0) {
783 
784 		udelay(100);
785 
786 		if (WARN_ON_ONCE(i >= 100)) {
787 			ath_err(common, "PLL4 meaurement not done\n");
788 			break;
789 		}
790 
791 		i++;
792 	}
793 
794 	return (REG_READ(ah, PLL3) & SQSUM_DVC_MASK) >> 3;
795 }
796 EXPORT_SYMBOL(ar9003_get_pll_sqsum_dvc);
797 
798 static void ath9k_hw_init_pll(struct ath_hw *ah,
799 			      struct ath9k_channel *chan)
800 {
801 	u32 pll;
802 
803 	if (AR_SREV_9485(ah)) {
804 
805 		/* program BB PLL ki and kd value, ki=0x4, kd=0x40 */
806 		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
807 			      AR_CH0_BB_DPLL2_PLL_PWD, 0x1);
808 		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
809 			      AR_CH0_DPLL2_KD, 0x40);
810 		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
811 			      AR_CH0_DPLL2_KI, 0x4);
812 
813 		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL1,
814 			      AR_CH0_BB_DPLL1_REFDIV, 0x5);
815 		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL1,
816 			      AR_CH0_BB_DPLL1_NINI, 0x58);
817 		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL1,
818 			      AR_CH0_BB_DPLL1_NFRAC, 0x0);
819 
820 		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
821 			      AR_CH0_BB_DPLL2_OUTDIV, 0x1);
822 		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
823 			      AR_CH0_BB_DPLL2_LOCAL_PLL, 0x1);
824 		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
825 			      AR_CH0_BB_DPLL2_EN_NEGTRIG, 0x1);
826 
827 		/* program BB PLL phase_shift to 0x6 */
828 		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL3,
829 			      AR_CH0_BB_DPLL3_PHASE_SHIFT, 0x6);
830 
831 		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
832 			      AR_CH0_BB_DPLL2_PLL_PWD, 0x0);
833 		udelay(1000);
834 	} else if (AR_SREV_9330(ah)) {
835 		u32 ddr_dpll2, pll_control2, kd;
836 
837 		if (ah->is_clk_25mhz) {
838 			ddr_dpll2 = 0x18e82f01;
839 			pll_control2 = 0xe04a3d;
840 			kd = 0x1d;
841 		} else {
842 			ddr_dpll2 = 0x19e82f01;
843 			pll_control2 = 0x886666;
844 			kd = 0x3d;
845 		}
846 
847 		/* program DDR PLL ki and kd value */
848 		REG_WRITE(ah, AR_CH0_DDR_DPLL2, ddr_dpll2);
849 
850 		/* program DDR PLL phase_shift */
851 		REG_RMW_FIELD(ah, AR_CH0_DDR_DPLL3,
852 			      AR_CH0_DPLL3_PHASE_SHIFT, 0x1);
853 
854 		REG_WRITE(ah, AR_RTC_PLL_CONTROL, 0x1142c);
855 		udelay(1000);
856 
857 		/* program refdiv, nint, frac to RTC register */
858 		REG_WRITE(ah, AR_RTC_PLL_CONTROL2, pll_control2);
859 
860 		/* program BB PLL kd and ki value */
861 		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2, AR_CH0_DPLL2_KD, kd);
862 		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2, AR_CH0_DPLL2_KI, 0x06);
863 
864 		/* program BB PLL phase_shift */
865 		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL3,
866 			      AR_CH0_BB_DPLL3_PHASE_SHIFT, 0x1);
867 	} else if (AR_SREV_9340(ah) || AR_SREV_9550(ah)) {
868 		u32 regval, pll2_divint, pll2_divfrac, refdiv;
869 
870 		REG_WRITE(ah, AR_RTC_PLL_CONTROL, 0x1142c);
871 		udelay(1000);
872 
873 		REG_SET_BIT(ah, AR_PHY_PLL_MODE, 0x1 << 16);
874 		udelay(100);
875 
876 		if (ah->is_clk_25mhz) {
877 			pll2_divint = 0x54;
878 			pll2_divfrac = 0x1eb85;
879 			refdiv = 3;
880 		} else {
881 			if (AR_SREV_9340(ah)) {
882 				pll2_divint = 88;
883 				pll2_divfrac = 0;
884 				refdiv = 5;
885 			} else {
886 				pll2_divint = 0x11;
887 				pll2_divfrac = 0x26666;
888 				refdiv = 1;
889 			}
890 		}
891 
892 		regval = REG_READ(ah, AR_PHY_PLL_MODE);
893 		regval |= (0x1 << 16);
894 		REG_WRITE(ah, AR_PHY_PLL_MODE, regval);
895 		udelay(100);
896 
897 		REG_WRITE(ah, AR_PHY_PLL_CONTROL, (refdiv << 27) |
898 			  (pll2_divint << 18) | pll2_divfrac);
899 		udelay(100);
900 
901 		regval = REG_READ(ah, AR_PHY_PLL_MODE);
902 		if (AR_SREV_9340(ah))
903 			regval = (regval & 0x80071fff) | (0x1 << 30) |
904 				 (0x1 << 13) | (0x4 << 26) | (0x18 << 19);
905 		else
906 			regval = (regval & 0x80071fff) | (0x3 << 30) |
907 				 (0x1 << 13) | (0x4 << 26) | (0x60 << 19);
908 		REG_WRITE(ah, AR_PHY_PLL_MODE, regval);
909 		REG_WRITE(ah, AR_PHY_PLL_MODE,
910 			  REG_READ(ah, AR_PHY_PLL_MODE) & 0xfffeffff);
911 		udelay(1000);
912 	}
913 
914 	pll = ath9k_hw_compute_pll_control(ah, chan);
915 
916 	REG_WRITE(ah, AR_RTC_PLL_CONTROL, pll);
917 
918 	if (AR_SREV_9485(ah) || AR_SREV_9340(ah) || AR_SREV_9330(ah) ||
919 	    AR_SREV_9550(ah))
920 		udelay(1000);
921 
922 	/* Switch the core clock for ar9271 to 117Mhz */
923 	if (AR_SREV_9271(ah)) {
924 		udelay(500);
925 		REG_WRITE(ah, 0x50040, 0x304);
926 	}
927 
928 	udelay(RTC_PLL_SETTLE_DELAY);
929 
930 	REG_WRITE(ah, AR_RTC_SLEEP_CLK, AR_RTC_FORCE_DERIVED_CLK);
931 
932 	if (AR_SREV_9340(ah) || AR_SREV_9550(ah)) {
933 		if (ah->is_clk_25mhz) {
934 			REG_WRITE(ah, AR_RTC_DERIVED_CLK, 0x17c << 1);
935 			REG_WRITE(ah, AR_SLP32_MODE, 0x0010f3d7);
936 			REG_WRITE(ah,  AR_SLP32_INC, 0x0001e7ae);
937 		} else {
938 			REG_WRITE(ah, AR_RTC_DERIVED_CLK, 0x261 << 1);
939 			REG_WRITE(ah, AR_SLP32_MODE, 0x0010f400);
940 			REG_WRITE(ah,  AR_SLP32_INC, 0x0001e800);
941 		}
942 		udelay(100);
943 	}
944 }
945 
946 static void ath9k_hw_init_interrupt_masks(struct ath_hw *ah,
947 					  enum nl80211_iftype opmode)
948 {
949 	u32 sync_default = AR_INTR_SYNC_DEFAULT;
950 	u32 imr_reg = AR_IMR_TXERR |
951 		AR_IMR_TXURN |
952 		AR_IMR_RXERR |
953 		AR_IMR_RXORN |
954 		AR_IMR_BCNMISC;
955 
956 	if (AR_SREV_9340(ah) || AR_SREV_9550(ah))
957 		sync_default &= ~AR_INTR_SYNC_HOST1_FATAL;
958 
959 	if (AR_SREV_9300_20_OR_LATER(ah)) {
960 		imr_reg |= AR_IMR_RXOK_HP;
961 		if (ah->config.rx_intr_mitigation)
962 			imr_reg |= AR_IMR_RXINTM | AR_IMR_RXMINTR;
963 		else
964 			imr_reg |= AR_IMR_RXOK_LP;
965 
966 	} else {
967 		if (ah->config.rx_intr_mitigation)
968 			imr_reg |= AR_IMR_RXINTM | AR_IMR_RXMINTR;
969 		else
970 			imr_reg |= AR_IMR_RXOK;
971 	}
972 
973 	if (ah->config.tx_intr_mitigation)
974 		imr_reg |= AR_IMR_TXINTM | AR_IMR_TXMINTR;
975 	else
976 		imr_reg |= AR_IMR_TXOK;
977 
978 	ENABLE_REGWRITE_BUFFER(ah);
979 
980 	REG_WRITE(ah, AR_IMR, imr_reg);
981 	ah->imrs2_reg |= AR_IMR_S2_GTT;
982 	REG_WRITE(ah, AR_IMR_S2, ah->imrs2_reg);
983 
984 	if (!AR_SREV_9100(ah)) {
985 		REG_WRITE(ah, AR_INTR_SYNC_CAUSE, 0xFFFFFFFF);
986 		REG_WRITE(ah, AR_INTR_SYNC_ENABLE, sync_default);
987 		REG_WRITE(ah, AR_INTR_SYNC_MASK, 0);
988 	}
989 
990 	REGWRITE_BUFFER_FLUSH(ah);
991 
992 	if (AR_SREV_9300_20_OR_LATER(ah)) {
993 		REG_WRITE(ah, AR_INTR_PRIO_ASYNC_ENABLE, 0);
994 		REG_WRITE(ah, AR_INTR_PRIO_ASYNC_MASK, 0);
995 		REG_WRITE(ah, AR_INTR_PRIO_SYNC_ENABLE, 0);
996 		REG_WRITE(ah, AR_INTR_PRIO_SYNC_MASK, 0);
997 	}
998 }
999 
1000 static void ath9k_hw_set_sifs_time(struct ath_hw *ah, u32 us)
1001 {
1002 	u32 val = ath9k_hw_mac_to_clks(ah, us - 2);
1003 	val = min(val, (u32) 0xFFFF);
1004 	REG_WRITE(ah, AR_D_GBL_IFS_SIFS, val);
1005 }
1006 
1007 static void ath9k_hw_setslottime(struct ath_hw *ah, u32 us)
1008 {
1009 	u32 val = ath9k_hw_mac_to_clks(ah, us);
1010 	val = min(val, (u32) 0xFFFF);
1011 	REG_WRITE(ah, AR_D_GBL_IFS_SLOT, val);
1012 }
1013 
1014 static void ath9k_hw_set_ack_timeout(struct ath_hw *ah, u32 us)
1015 {
1016 	u32 val = ath9k_hw_mac_to_clks(ah, us);
1017 	val = min(val, (u32) MS(0xFFFFFFFF, AR_TIME_OUT_ACK));
1018 	REG_RMW_FIELD(ah, AR_TIME_OUT, AR_TIME_OUT_ACK, val);
1019 }
1020 
1021 static void ath9k_hw_set_cts_timeout(struct ath_hw *ah, u32 us)
1022 {
1023 	u32 val = ath9k_hw_mac_to_clks(ah, us);
1024 	val = min(val, (u32) MS(0xFFFFFFFF, AR_TIME_OUT_CTS));
1025 	REG_RMW_FIELD(ah, AR_TIME_OUT, AR_TIME_OUT_CTS, val);
1026 }
1027 
1028 static bool ath9k_hw_set_global_txtimeout(struct ath_hw *ah, u32 tu)
1029 {
1030 	if (tu > 0xFFFF) {
1031 		ath_dbg(ath9k_hw_common(ah), XMIT, "bad global tx timeout %u\n",
1032 			tu);
1033 		ah->globaltxtimeout = (u32) -1;
1034 		return false;
1035 	} else {
1036 		REG_RMW_FIELD(ah, AR_GTXTO, AR_GTXTO_TIMEOUT_LIMIT, tu);
1037 		ah->globaltxtimeout = tu;
1038 		return true;
1039 	}
1040 }
1041 
1042 void ath9k_hw_init_global_settings(struct ath_hw *ah)
1043 {
1044 	struct ath_common *common = ath9k_hw_common(ah);
1045 	struct ieee80211_conf *conf = &common->hw->conf;
1046 	const struct ath9k_channel *chan = ah->curchan;
1047 	int acktimeout, ctstimeout, ack_offset = 0;
1048 	int slottime;
1049 	int sifstime;
1050 	int rx_lat = 0, tx_lat = 0, eifs = 0;
1051 	u32 reg;
1052 
1053 	ath_dbg(ath9k_hw_common(ah), RESET, "ah->misc_mode 0x%x\n",
1054 		ah->misc_mode);
1055 
1056 	if (!chan)
1057 		return;
1058 
1059 	if (ah->misc_mode != 0)
1060 		REG_SET_BIT(ah, AR_PCU_MISC, ah->misc_mode);
1061 
1062 	if (IS_CHAN_A_FAST_CLOCK(ah, chan))
1063 		rx_lat = 41;
1064 	else
1065 		rx_lat = 37;
1066 	tx_lat = 54;
1067 
1068 	if (IS_CHAN_5GHZ(chan))
1069 		sifstime = 16;
1070 	else
1071 		sifstime = 10;
1072 
1073 	if (IS_CHAN_HALF_RATE(chan)) {
1074 		eifs = 175;
1075 		rx_lat *= 2;
1076 		tx_lat *= 2;
1077 		if (IS_CHAN_A_FAST_CLOCK(ah, chan))
1078 		    tx_lat += 11;
1079 
1080 		sifstime *= 2;
1081 		ack_offset = 16;
1082 		slottime = 13;
1083 	} else if (IS_CHAN_QUARTER_RATE(chan)) {
1084 		eifs = 340;
1085 		rx_lat = (rx_lat * 4) - 1;
1086 		tx_lat *= 4;
1087 		if (IS_CHAN_A_FAST_CLOCK(ah, chan))
1088 		    tx_lat += 22;
1089 
1090 		sifstime *= 4;
1091 		ack_offset = 32;
1092 		slottime = 21;
1093 	} else {
1094 		if (AR_SREV_9287(ah) && AR_SREV_9287_13_OR_LATER(ah)) {
1095 			eifs = AR_D_GBL_IFS_EIFS_ASYNC_FIFO;
1096 			reg = AR_USEC_ASYNC_FIFO;
1097 		} else {
1098 			eifs = REG_READ(ah, AR_D_GBL_IFS_EIFS)/
1099 				common->clockrate;
1100 			reg = REG_READ(ah, AR_USEC);
1101 		}
1102 		rx_lat = MS(reg, AR_USEC_RX_LAT);
1103 		tx_lat = MS(reg, AR_USEC_TX_LAT);
1104 
1105 		slottime = ah->slottime;
1106 	}
1107 
1108 	/* As defined by IEEE 802.11-2007 17.3.8.6 */
1109 	acktimeout = slottime + sifstime + 3 * ah->coverage_class + ack_offset;
1110 	ctstimeout = acktimeout;
1111 
1112 	/*
1113 	 * Workaround for early ACK timeouts, add an offset to match the
1114 	 * initval's 64us ack timeout value. Use 48us for the CTS timeout.
1115 	 * This was initially only meant to work around an issue with delayed
1116 	 * BA frames in some implementations, but it has been found to fix ACK
1117 	 * timeout issues in other cases as well.
1118 	 */
1119 	if (conf->channel && conf->channel->band == IEEE80211_BAND_2GHZ &&
1120 	    !IS_CHAN_HALF_RATE(chan) && !IS_CHAN_QUARTER_RATE(chan)) {
1121 		acktimeout += 64 - sifstime - ah->slottime;
1122 		ctstimeout += 48 - sifstime - ah->slottime;
1123 	}
1124 
1125 
1126 	ath9k_hw_set_sifs_time(ah, sifstime);
1127 	ath9k_hw_setslottime(ah, slottime);
1128 	ath9k_hw_set_ack_timeout(ah, acktimeout);
1129 	ath9k_hw_set_cts_timeout(ah, ctstimeout);
1130 	if (ah->globaltxtimeout != (u32) -1)
1131 		ath9k_hw_set_global_txtimeout(ah, ah->globaltxtimeout);
1132 
1133 	REG_WRITE(ah, AR_D_GBL_IFS_EIFS, ath9k_hw_mac_to_clks(ah, eifs));
1134 	REG_RMW(ah, AR_USEC,
1135 		(common->clockrate - 1) |
1136 		SM(rx_lat, AR_USEC_RX_LAT) |
1137 		SM(tx_lat, AR_USEC_TX_LAT),
1138 		AR_USEC_TX_LAT | AR_USEC_RX_LAT | AR_USEC_USEC);
1139 
1140 }
1141 EXPORT_SYMBOL(ath9k_hw_init_global_settings);
1142 
1143 void ath9k_hw_deinit(struct ath_hw *ah)
1144 {
1145 	struct ath_common *common = ath9k_hw_common(ah);
1146 
1147 	if (common->state < ATH_HW_INITIALIZED)
1148 		goto free_hw;
1149 
1150 	ath9k_hw_setpower(ah, ATH9K_PM_FULL_SLEEP);
1151 
1152 free_hw:
1153 	ath9k_hw_rf_free_ext_banks(ah);
1154 }
1155 EXPORT_SYMBOL(ath9k_hw_deinit);
1156 
1157 /*******/
1158 /* INI */
1159 /*******/
1160 
1161 u32 ath9k_regd_get_ctl(struct ath_regulatory *reg, struct ath9k_channel *chan)
1162 {
1163 	u32 ctl = ath_regd_get_band_ctl(reg, chan->chan->band);
1164 
1165 	if (IS_CHAN_B(chan))
1166 		ctl |= CTL_11B;
1167 	else if (IS_CHAN_G(chan))
1168 		ctl |= CTL_11G;
1169 	else
1170 		ctl |= CTL_11A;
1171 
1172 	return ctl;
1173 }
1174 
1175 /****************************************/
1176 /* Reset and Channel Switching Routines */
1177 /****************************************/
1178 
1179 static inline void ath9k_hw_set_dma(struct ath_hw *ah)
1180 {
1181 	struct ath_common *common = ath9k_hw_common(ah);
1182 
1183 	ENABLE_REGWRITE_BUFFER(ah);
1184 
1185 	/*
1186 	 * set AHB_MODE not to do cacheline prefetches
1187 	*/
1188 	if (!AR_SREV_9300_20_OR_LATER(ah))
1189 		REG_SET_BIT(ah, AR_AHB_MODE, AR_AHB_PREFETCH_RD_EN);
1190 
1191 	/*
1192 	 * let mac dma reads be in 128 byte chunks
1193 	 */
1194 	REG_RMW(ah, AR_TXCFG, AR_TXCFG_DMASZ_128B, AR_TXCFG_DMASZ_MASK);
1195 
1196 	REGWRITE_BUFFER_FLUSH(ah);
1197 
1198 	/*
1199 	 * Restore TX Trigger Level to its pre-reset value.
1200 	 * The initial value depends on whether aggregation is enabled, and is
1201 	 * adjusted whenever underruns are detected.
1202 	 */
1203 	if (!AR_SREV_9300_20_OR_LATER(ah))
1204 		REG_RMW_FIELD(ah, AR_TXCFG, AR_FTRIG, ah->tx_trig_level);
1205 
1206 	ENABLE_REGWRITE_BUFFER(ah);
1207 
1208 	/*
1209 	 * let mac dma writes be in 128 byte chunks
1210 	 */
1211 	REG_RMW(ah, AR_RXCFG, AR_RXCFG_DMASZ_128B, AR_RXCFG_DMASZ_MASK);
1212 
1213 	/*
1214 	 * Setup receive FIFO threshold to hold off TX activities
1215 	 */
1216 	REG_WRITE(ah, AR_RXFIFO_CFG, 0x200);
1217 
1218 	if (AR_SREV_9300_20_OR_LATER(ah)) {
1219 		REG_RMW_FIELD(ah, AR_RXBP_THRESH, AR_RXBP_THRESH_HP, 0x1);
1220 		REG_RMW_FIELD(ah, AR_RXBP_THRESH, AR_RXBP_THRESH_LP, 0x1);
1221 
1222 		ath9k_hw_set_rx_bufsize(ah, common->rx_bufsize -
1223 			ah->caps.rx_status_len);
1224 	}
1225 
1226 	/*
1227 	 * reduce the number of usable entries in PCU TXBUF to avoid
1228 	 * wrap around issues.
1229 	 */
1230 	if (AR_SREV_9285(ah)) {
1231 		/* For AR9285 the number of Fifos are reduced to half.
1232 		 * So set the usable tx buf size also to half to
1233 		 * avoid data/delimiter underruns
1234 		 */
1235 		REG_WRITE(ah, AR_PCU_TXBUF_CTRL,
1236 			  AR_9285_PCU_TXBUF_CTRL_USABLE_SIZE);
1237 	} else if (!AR_SREV_9271(ah)) {
1238 		REG_WRITE(ah, AR_PCU_TXBUF_CTRL,
1239 			  AR_PCU_TXBUF_CTRL_USABLE_SIZE);
1240 	}
1241 
1242 	REGWRITE_BUFFER_FLUSH(ah);
1243 
1244 	if (AR_SREV_9300_20_OR_LATER(ah))
1245 		ath9k_hw_reset_txstatus_ring(ah);
1246 }
1247 
1248 static void ath9k_hw_set_operating_mode(struct ath_hw *ah, int opmode)
1249 {
1250 	u32 mask = AR_STA_ID1_STA_AP | AR_STA_ID1_ADHOC;
1251 	u32 set = AR_STA_ID1_KSRCH_MODE;
1252 
1253 	switch (opmode) {
1254 	case NL80211_IFTYPE_ADHOC:
1255 	case NL80211_IFTYPE_MESH_POINT:
1256 		set |= AR_STA_ID1_ADHOC;
1257 		REG_SET_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1258 		break;
1259 	case NL80211_IFTYPE_AP:
1260 		set |= AR_STA_ID1_STA_AP;
1261 		/* fall through */
1262 	case NL80211_IFTYPE_STATION:
1263 		REG_CLR_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1264 		break;
1265 	default:
1266 		if (!ah->is_monitoring)
1267 			set = 0;
1268 		break;
1269 	}
1270 	REG_RMW(ah, AR_STA_ID1, set, mask);
1271 }
1272 
1273 void ath9k_hw_get_delta_slope_vals(struct ath_hw *ah, u32 coef_scaled,
1274 				   u32 *coef_mantissa, u32 *coef_exponent)
1275 {
1276 	u32 coef_exp, coef_man;
1277 
1278 	for (coef_exp = 31; coef_exp > 0; coef_exp--)
1279 		if ((coef_scaled >> coef_exp) & 0x1)
1280 			break;
1281 
1282 	coef_exp = 14 - (coef_exp - COEF_SCALE_S);
1283 
1284 	coef_man = coef_scaled + (1 << (COEF_SCALE_S - coef_exp - 1));
1285 
1286 	*coef_mantissa = coef_man >> (COEF_SCALE_S - coef_exp);
1287 	*coef_exponent = coef_exp - 16;
1288 }
1289 
1290 static bool ath9k_hw_set_reset(struct ath_hw *ah, int type)
1291 {
1292 	u32 rst_flags;
1293 	u32 tmpReg;
1294 
1295 	if (AR_SREV_9100(ah)) {
1296 		REG_RMW_FIELD(ah, AR_RTC_DERIVED_CLK,
1297 			      AR_RTC_DERIVED_CLK_PERIOD, 1);
1298 		(void)REG_READ(ah, AR_RTC_DERIVED_CLK);
1299 	}
1300 
1301 	ENABLE_REGWRITE_BUFFER(ah);
1302 
1303 	if (AR_SREV_9300_20_OR_LATER(ah)) {
1304 		REG_WRITE(ah, AR_WA, ah->WARegVal);
1305 		udelay(10);
1306 	}
1307 
1308 	REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1309 		  AR_RTC_FORCE_WAKE_ON_INT);
1310 
1311 	if (AR_SREV_9100(ah)) {
1312 		rst_flags = AR_RTC_RC_MAC_WARM | AR_RTC_RC_MAC_COLD |
1313 			AR_RTC_RC_COLD_RESET | AR_RTC_RC_WARM_RESET;
1314 	} else {
1315 		tmpReg = REG_READ(ah, AR_INTR_SYNC_CAUSE);
1316 		if (tmpReg &
1317 		    (AR_INTR_SYNC_LOCAL_TIMEOUT |
1318 		     AR_INTR_SYNC_RADM_CPL_TIMEOUT)) {
1319 			u32 val;
1320 			REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0);
1321 
1322 			val = AR_RC_HOSTIF;
1323 			if (!AR_SREV_9300_20_OR_LATER(ah))
1324 				val |= AR_RC_AHB;
1325 			REG_WRITE(ah, AR_RC, val);
1326 
1327 		} else if (!AR_SREV_9300_20_OR_LATER(ah))
1328 			REG_WRITE(ah, AR_RC, AR_RC_AHB);
1329 
1330 		rst_flags = AR_RTC_RC_MAC_WARM;
1331 		if (type == ATH9K_RESET_COLD)
1332 			rst_flags |= AR_RTC_RC_MAC_COLD;
1333 	}
1334 
1335 	if (AR_SREV_9330(ah)) {
1336 		int npend = 0;
1337 		int i;
1338 
1339 		/* AR9330 WAR:
1340 		 * call external reset function to reset WMAC if:
1341 		 * - doing a cold reset
1342 		 * - we have pending frames in the TX queues
1343 		 */
1344 
1345 		for (i = 0; i < AR_NUM_QCU; i++) {
1346 			npend = ath9k_hw_numtxpending(ah, i);
1347 			if (npend)
1348 				break;
1349 		}
1350 
1351 		if (ah->external_reset &&
1352 		    (npend || type == ATH9K_RESET_COLD)) {
1353 			int reset_err = 0;
1354 
1355 			ath_dbg(ath9k_hw_common(ah), RESET,
1356 				"reset MAC via external reset\n");
1357 
1358 			reset_err = ah->external_reset();
1359 			if (reset_err) {
1360 				ath_err(ath9k_hw_common(ah),
1361 					"External reset failed, err=%d\n",
1362 					reset_err);
1363 				return false;
1364 			}
1365 
1366 			REG_WRITE(ah, AR_RTC_RESET, 1);
1367 		}
1368 	}
1369 
1370 	if (ath9k_hw_mci_is_enabled(ah))
1371 		ar9003_mci_check_gpm_offset(ah);
1372 
1373 	REG_WRITE(ah, AR_RTC_RC, rst_flags);
1374 
1375 	REGWRITE_BUFFER_FLUSH(ah);
1376 
1377 	udelay(50);
1378 
1379 	REG_WRITE(ah, AR_RTC_RC, 0);
1380 	if (!ath9k_hw_wait(ah, AR_RTC_RC, AR_RTC_RC_M, 0, AH_WAIT_TIMEOUT)) {
1381 		ath_dbg(ath9k_hw_common(ah), RESET, "RTC stuck in MAC reset\n");
1382 		return false;
1383 	}
1384 
1385 	if (!AR_SREV_9100(ah))
1386 		REG_WRITE(ah, AR_RC, 0);
1387 
1388 	if (AR_SREV_9100(ah))
1389 		udelay(50);
1390 
1391 	return true;
1392 }
1393 
1394 static bool ath9k_hw_set_reset_power_on(struct ath_hw *ah)
1395 {
1396 	ENABLE_REGWRITE_BUFFER(ah);
1397 
1398 	if (AR_SREV_9300_20_OR_LATER(ah)) {
1399 		REG_WRITE(ah, AR_WA, ah->WARegVal);
1400 		udelay(10);
1401 	}
1402 
1403 	REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1404 		  AR_RTC_FORCE_WAKE_ON_INT);
1405 
1406 	if (!AR_SREV_9100(ah) && !AR_SREV_9300_20_OR_LATER(ah))
1407 		REG_WRITE(ah, AR_RC, AR_RC_AHB);
1408 
1409 	REG_WRITE(ah, AR_RTC_RESET, 0);
1410 
1411 	REGWRITE_BUFFER_FLUSH(ah);
1412 
1413 	if (!AR_SREV_9300_20_OR_LATER(ah))
1414 		udelay(2);
1415 
1416 	if (!AR_SREV_9100(ah) && !AR_SREV_9300_20_OR_LATER(ah))
1417 		REG_WRITE(ah, AR_RC, 0);
1418 
1419 	REG_WRITE(ah, AR_RTC_RESET, 1);
1420 
1421 	if (!ath9k_hw_wait(ah,
1422 			   AR_RTC_STATUS,
1423 			   AR_RTC_STATUS_M,
1424 			   AR_RTC_STATUS_ON,
1425 			   AH_WAIT_TIMEOUT)) {
1426 		ath_dbg(ath9k_hw_common(ah), RESET, "RTC not waking up\n");
1427 		return false;
1428 	}
1429 
1430 	return ath9k_hw_set_reset(ah, ATH9K_RESET_WARM);
1431 }
1432 
1433 static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type)
1434 {
1435 	bool ret = false;
1436 
1437 	if (AR_SREV_9300_20_OR_LATER(ah)) {
1438 		REG_WRITE(ah, AR_WA, ah->WARegVal);
1439 		udelay(10);
1440 	}
1441 
1442 	REG_WRITE(ah, AR_RTC_FORCE_WAKE,
1443 		  AR_RTC_FORCE_WAKE_EN | AR_RTC_FORCE_WAKE_ON_INT);
1444 
1445 	switch (type) {
1446 	case ATH9K_RESET_POWER_ON:
1447 		ret = ath9k_hw_set_reset_power_on(ah);
1448 		break;
1449 	case ATH9K_RESET_WARM:
1450 	case ATH9K_RESET_COLD:
1451 		ret = ath9k_hw_set_reset(ah, type);
1452 		break;
1453 	default:
1454 		break;
1455 	}
1456 
1457 	return ret;
1458 }
1459 
1460 static bool ath9k_hw_chip_reset(struct ath_hw *ah,
1461 				struct ath9k_channel *chan)
1462 {
1463 	int reset_type = ATH9K_RESET_WARM;
1464 
1465 	if (AR_SREV_9280(ah)) {
1466 		if (ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL))
1467 			reset_type = ATH9K_RESET_POWER_ON;
1468 		else
1469 			reset_type = ATH9K_RESET_COLD;
1470 	}
1471 
1472 	if (!ath9k_hw_set_reset_reg(ah, reset_type))
1473 		return false;
1474 
1475 	if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
1476 		return false;
1477 
1478 	ah->chip_fullsleep = false;
1479 
1480 	if (AR_SREV_9330(ah))
1481 		ar9003_hw_internal_regulator_apply(ah);
1482 	ath9k_hw_init_pll(ah, chan);
1483 	ath9k_hw_set_rfmode(ah, chan);
1484 
1485 	return true;
1486 }
1487 
1488 static bool ath9k_hw_channel_change(struct ath_hw *ah,
1489 				    struct ath9k_channel *chan)
1490 {
1491 	struct ath_common *common = ath9k_hw_common(ah);
1492 	u32 qnum;
1493 	int r;
1494 	bool edma = !!(ah->caps.hw_caps & ATH9K_HW_CAP_EDMA);
1495 	bool band_switch, mode_diff;
1496 	u8 ini_reloaded;
1497 
1498 	band_switch = (chan->channelFlags & (CHANNEL_2GHZ | CHANNEL_5GHZ)) !=
1499 		      (ah->curchan->channelFlags & (CHANNEL_2GHZ |
1500 						    CHANNEL_5GHZ));
1501 	mode_diff = (chan->chanmode != ah->curchan->chanmode);
1502 
1503 	for (qnum = 0; qnum < AR_NUM_QCU; qnum++) {
1504 		if (ath9k_hw_numtxpending(ah, qnum)) {
1505 			ath_dbg(common, QUEUE,
1506 				"Transmit frames pending on queue %d\n", qnum);
1507 			return false;
1508 		}
1509 	}
1510 
1511 	if (!ath9k_hw_rfbus_req(ah)) {
1512 		ath_err(common, "Could not kill baseband RX\n");
1513 		return false;
1514 	}
1515 
1516 	if (edma && (band_switch || mode_diff)) {
1517 		ath9k_hw_mark_phy_inactive(ah);
1518 		udelay(5);
1519 
1520 		ath9k_hw_init_pll(ah, NULL);
1521 
1522 		if (ath9k_hw_fast_chan_change(ah, chan, &ini_reloaded)) {
1523 			ath_err(common, "Failed to do fast channel change\n");
1524 			return false;
1525 		}
1526 	}
1527 
1528 	ath9k_hw_set_channel_regs(ah, chan);
1529 
1530 	r = ath9k_hw_rf_set_freq(ah, chan);
1531 	if (r) {
1532 		ath_err(common, "Failed to set channel\n");
1533 		return false;
1534 	}
1535 	ath9k_hw_set_clockrate(ah);
1536 	ath9k_hw_apply_txpower(ah, chan, false);
1537 	ath9k_hw_rfbus_done(ah);
1538 
1539 	if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan))
1540 		ath9k_hw_set_delta_slope(ah, chan);
1541 
1542 	ath9k_hw_spur_mitigate_freq(ah, chan);
1543 
1544 	if (edma && (band_switch || mode_diff)) {
1545 		ah->ah_flags |= AH_FASTCC;
1546 		if (band_switch || ini_reloaded)
1547 			ah->eep_ops->set_board_values(ah, chan);
1548 
1549 		ath9k_hw_init_bb(ah, chan);
1550 
1551 		if (band_switch || ini_reloaded)
1552 			ath9k_hw_init_cal(ah, chan);
1553 		ah->ah_flags &= ~AH_FASTCC;
1554 	}
1555 
1556 	return true;
1557 }
1558 
1559 static void ath9k_hw_apply_gpio_override(struct ath_hw *ah)
1560 {
1561 	u32 gpio_mask = ah->gpio_mask;
1562 	int i;
1563 
1564 	for (i = 0; gpio_mask; i++, gpio_mask >>= 1) {
1565 		if (!(gpio_mask & 1))
1566 			continue;
1567 
1568 		ath9k_hw_cfg_output(ah, i, AR_GPIO_OUTPUT_MUX_AS_OUTPUT);
1569 		ath9k_hw_set_gpio(ah, i, !!(ah->gpio_val & BIT(i)));
1570 	}
1571 }
1572 
1573 static bool ath9k_hw_check_dcs(u32 dma_dbg, u32 num_dcu_states,
1574 			       int *hang_state, int *hang_pos)
1575 {
1576 	static u32 dcu_chain_state[] = {5, 6, 9}; /* DCU chain stuck states */
1577 	u32 chain_state, dcs_pos, i;
1578 
1579 	for (dcs_pos = 0; dcs_pos < num_dcu_states; dcs_pos++) {
1580 		chain_state = (dma_dbg >> (5 * dcs_pos)) & 0x1f;
1581 		for (i = 0; i < 3; i++) {
1582 			if (chain_state == dcu_chain_state[i]) {
1583 				*hang_state = chain_state;
1584 				*hang_pos = dcs_pos;
1585 				return true;
1586 			}
1587 		}
1588 	}
1589 	return false;
1590 }
1591 
1592 #define DCU_COMPLETE_STATE        1
1593 #define DCU_COMPLETE_STATE_MASK 0x3
1594 #define NUM_STATUS_READS         50
1595 static bool ath9k_hw_detect_mac_hang(struct ath_hw *ah)
1596 {
1597 	u32 chain_state, comp_state, dcs_reg = AR_DMADBG_4;
1598 	u32 i, hang_pos, hang_state, num_state = 6;
1599 
1600 	comp_state = REG_READ(ah, AR_DMADBG_6);
1601 
1602 	if ((comp_state & DCU_COMPLETE_STATE_MASK) != DCU_COMPLETE_STATE) {
1603 		ath_dbg(ath9k_hw_common(ah), RESET,
1604 			"MAC Hang signature not found at DCU complete\n");
1605 		return false;
1606 	}
1607 
1608 	chain_state = REG_READ(ah, dcs_reg);
1609 	if (ath9k_hw_check_dcs(chain_state, num_state, &hang_state, &hang_pos))
1610 		goto hang_check_iter;
1611 
1612 	dcs_reg = AR_DMADBG_5;
1613 	num_state = 4;
1614 	chain_state = REG_READ(ah, dcs_reg);
1615 	if (ath9k_hw_check_dcs(chain_state, num_state, &hang_state, &hang_pos))
1616 		goto hang_check_iter;
1617 
1618 	ath_dbg(ath9k_hw_common(ah), RESET,
1619 		"MAC Hang signature 1 not found\n");
1620 	return false;
1621 
1622 hang_check_iter:
1623 	ath_dbg(ath9k_hw_common(ah), RESET,
1624 		"DCU registers: chain %08x complete %08x Hang: state %d pos %d\n",
1625 		chain_state, comp_state, hang_state, hang_pos);
1626 
1627 	for (i = 0; i < NUM_STATUS_READS; i++) {
1628 		chain_state = REG_READ(ah, dcs_reg);
1629 		chain_state = (chain_state >> (5 * hang_pos)) & 0x1f;
1630 		comp_state = REG_READ(ah, AR_DMADBG_6);
1631 
1632 		if (((comp_state & DCU_COMPLETE_STATE_MASK) !=
1633 					DCU_COMPLETE_STATE) ||
1634 		    (chain_state != hang_state))
1635 			return false;
1636 	}
1637 
1638 	ath_dbg(ath9k_hw_common(ah), RESET, "MAC Hang signature 1 found\n");
1639 
1640 	return true;
1641 }
1642 
1643 bool ath9k_hw_check_alive(struct ath_hw *ah)
1644 {
1645 	int count = 50;
1646 	u32 reg;
1647 
1648 	if (AR_SREV_9300(ah))
1649 		return !ath9k_hw_detect_mac_hang(ah);
1650 
1651 	if (AR_SREV_9285_12_OR_LATER(ah))
1652 		return true;
1653 
1654 	do {
1655 		reg = REG_READ(ah, AR_OBS_BUS_1);
1656 
1657 		if ((reg & 0x7E7FFFEF) == 0x00702400)
1658 			continue;
1659 
1660 		switch (reg & 0x7E000B00) {
1661 		case 0x1E000000:
1662 		case 0x52000B00:
1663 		case 0x18000B00:
1664 			continue;
1665 		default:
1666 			return true;
1667 		}
1668 	} while (count-- > 0);
1669 
1670 	return false;
1671 }
1672 EXPORT_SYMBOL(ath9k_hw_check_alive);
1673 
1674 /*
1675  * Fast channel change:
1676  * (Change synthesizer based on channel freq without resetting chip)
1677  *
1678  * Don't do FCC when
1679  *   - Flag is not set
1680  *   - Chip is just coming out of full sleep
1681  *   - Channel to be set is same as current channel
1682  *   - Channel flags are different, (eg.,moving from 2GHz to 5GHz channel)
1683  */
1684 static int ath9k_hw_do_fastcc(struct ath_hw *ah, struct ath9k_channel *chan)
1685 {
1686 	struct ath_common *common = ath9k_hw_common(ah);
1687 	int ret;
1688 
1689 	if (AR_SREV_9280(ah) && common->bus_ops->ath_bus_type == ATH_PCI)
1690 		goto fail;
1691 
1692 	if (ah->chip_fullsleep)
1693 		goto fail;
1694 
1695 	if (!ah->curchan)
1696 		goto fail;
1697 
1698 	if (chan->channel == ah->curchan->channel)
1699 		goto fail;
1700 
1701 	if ((ah->curchan->channelFlags | chan->channelFlags) &
1702 	    (CHANNEL_HALF | CHANNEL_QUARTER))
1703 		goto fail;
1704 
1705 	if ((chan->channelFlags & CHANNEL_ALL) !=
1706 	    (ah->curchan->channelFlags & CHANNEL_ALL))
1707 		goto fail;
1708 
1709 	if (!ath9k_hw_check_alive(ah))
1710 		goto fail;
1711 
1712 	/*
1713 	 * For AR9462, make sure that calibration data for
1714 	 * re-using are present.
1715 	 */
1716 	if (AR_SREV_9462(ah) && (ah->caldata &&
1717 				 (!ah->caldata->done_txiqcal_once ||
1718 				  !ah->caldata->done_txclcal_once ||
1719 				  !ah->caldata->rtt_done)))
1720 		goto fail;
1721 
1722 	ath_dbg(common, RESET, "FastChannelChange for %d -> %d\n",
1723 		ah->curchan->channel, chan->channel);
1724 
1725 	ret = ath9k_hw_channel_change(ah, chan);
1726 	if (!ret)
1727 		goto fail;
1728 
1729 	ath9k_hw_loadnf(ah, ah->curchan);
1730 	ath9k_hw_start_nfcal(ah, true);
1731 
1732 	if (ath9k_hw_mci_is_enabled(ah))
1733 		ar9003_mci_2g5g_switch(ah, false);
1734 
1735 	if (AR_SREV_9271(ah))
1736 		ar9002_hw_load_ani_reg(ah, chan);
1737 
1738 	return 0;
1739 fail:
1740 	return -EINVAL;
1741 }
1742 
1743 int ath9k_hw_reset(struct ath_hw *ah, struct ath9k_channel *chan,
1744 		   struct ath9k_hw_cal_data *caldata, bool fastcc)
1745 {
1746 	struct ath_common *common = ath9k_hw_common(ah);
1747 	u32 saveLedState;
1748 	u32 saveDefAntenna;
1749 	u32 macStaId1;
1750 	u64 tsf = 0;
1751 	int i, r;
1752 	bool start_mci_reset = false;
1753 	bool save_fullsleep = ah->chip_fullsleep;
1754 
1755 	if (ath9k_hw_mci_is_enabled(ah)) {
1756 		start_mci_reset = ar9003_mci_start_reset(ah, chan);
1757 		if (start_mci_reset)
1758 			return 0;
1759 	}
1760 
1761 	if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
1762 		return -EIO;
1763 
1764 	if (ah->curchan && !ah->chip_fullsleep)
1765 		ath9k_hw_getnf(ah, ah->curchan);
1766 
1767 	ah->caldata = caldata;
1768 	if (caldata &&
1769 	    (chan->channel != caldata->channel ||
1770 	     (chan->channelFlags & ~CHANNEL_CW_INT) !=
1771 	     (caldata->channelFlags & ~CHANNEL_CW_INT))) {
1772 		/* Operating channel changed, reset channel calibration data */
1773 		memset(caldata, 0, sizeof(*caldata));
1774 		ath9k_init_nfcal_hist_buffer(ah, chan);
1775 	} else if (caldata) {
1776 		caldata->paprd_packet_sent = false;
1777 	}
1778 	ah->noise = ath9k_hw_getchan_noise(ah, chan);
1779 
1780 	if (fastcc) {
1781 		r = ath9k_hw_do_fastcc(ah, chan);
1782 		if (!r)
1783 			return r;
1784 	}
1785 
1786 	if (ath9k_hw_mci_is_enabled(ah))
1787 		ar9003_mci_stop_bt(ah, save_fullsleep);
1788 
1789 	saveDefAntenna = REG_READ(ah, AR_DEF_ANTENNA);
1790 	if (saveDefAntenna == 0)
1791 		saveDefAntenna = 1;
1792 
1793 	macStaId1 = REG_READ(ah, AR_STA_ID1) & AR_STA_ID1_BASE_RATE_11B;
1794 
1795 	/* For chips on which RTC reset is done, save TSF before it gets cleared */
1796 	if (AR_SREV_9100(ah) ||
1797 	    (AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL)))
1798 		tsf = ath9k_hw_gettsf64(ah);
1799 
1800 	saveLedState = REG_READ(ah, AR_CFG_LED) &
1801 		(AR_CFG_LED_ASSOC_CTL | AR_CFG_LED_MODE_SEL |
1802 		 AR_CFG_LED_BLINK_THRESH_SEL | AR_CFG_LED_BLINK_SLOW);
1803 
1804 	ath9k_hw_mark_phy_inactive(ah);
1805 
1806 	ah->paprd_table_write_done = false;
1807 
1808 	/* Only required on the first reset */
1809 	if (AR_SREV_9271(ah) && ah->htc_reset_init) {
1810 		REG_WRITE(ah,
1811 			  AR9271_RESET_POWER_DOWN_CONTROL,
1812 			  AR9271_RADIO_RF_RST);
1813 		udelay(50);
1814 	}
1815 
1816 	if (!ath9k_hw_chip_reset(ah, chan)) {
1817 		ath_err(common, "Chip reset failed\n");
1818 		return -EINVAL;
1819 	}
1820 
1821 	/* Only required on the first reset */
1822 	if (AR_SREV_9271(ah) && ah->htc_reset_init) {
1823 		ah->htc_reset_init = false;
1824 		REG_WRITE(ah,
1825 			  AR9271_RESET_POWER_DOWN_CONTROL,
1826 			  AR9271_GATE_MAC_CTL);
1827 		udelay(50);
1828 	}
1829 
1830 	/* Restore TSF */
1831 	if (tsf)
1832 		ath9k_hw_settsf64(ah, tsf);
1833 
1834 	if (AR_SREV_9280_20_OR_LATER(ah))
1835 		REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL, AR_GPIO_JTAG_DISABLE);
1836 
1837 	if (!AR_SREV_9300_20_OR_LATER(ah))
1838 		ar9002_hw_enable_async_fifo(ah);
1839 
1840 	r = ath9k_hw_process_ini(ah, chan);
1841 	if (r)
1842 		return r;
1843 
1844 	if (ath9k_hw_mci_is_enabled(ah))
1845 		ar9003_mci_reset(ah, false, IS_CHAN_2GHZ(chan), save_fullsleep);
1846 
1847 	/*
1848 	 * Some AR91xx SoC devices frequently fail to accept TSF writes
1849 	 * right after the chip reset. When that happens, write a new
1850 	 * value after the initvals have been applied, with an offset
1851 	 * based on measured time difference
1852 	 */
1853 	if (AR_SREV_9100(ah) && (ath9k_hw_gettsf64(ah) < tsf)) {
1854 		tsf += 1500;
1855 		ath9k_hw_settsf64(ah, tsf);
1856 	}
1857 
1858 	/* Setup MFP options for CCMP */
1859 	if (AR_SREV_9280_20_OR_LATER(ah)) {
1860 		/* Mask Retry(b11), PwrMgt(b12), MoreData(b13) to 0 in mgmt
1861 		 * frames when constructing CCMP AAD. */
1862 		REG_RMW_FIELD(ah, AR_AES_MUTE_MASK1, AR_AES_MUTE_MASK1_FC_MGMT,
1863 			      0xc7ff);
1864 		ah->sw_mgmt_crypto = false;
1865 	} else if (AR_SREV_9160_10_OR_LATER(ah)) {
1866 		/* Disable hardware crypto for management frames */
1867 		REG_CLR_BIT(ah, AR_PCU_MISC_MODE2,
1868 			    AR_PCU_MISC_MODE2_MGMT_CRYPTO_ENABLE);
1869 		REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
1870 			    AR_PCU_MISC_MODE2_NO_CRYPTO_FOR_NON_DATA_PKT);
1871 		ah->sw_mgmt_crypto = true;
1872 	} else
1873 		ah->sw_mgmt_crypto = true;
1874 
1875 	if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan))
1876 		ath9k_hw_set_delta_slope(ah, chan);
1877 
1878 	ath9k_hw_spur_mitigate_freq(ah, chan);
1879 	ah->eep_ops->set_board_values(ah, chan);
1880 
1881 	ENABLE_REGWRITE_BUFFER(ah);
1882 
1883 	REG_WRITE(ah, AR_STA_ID0, get_unaligned_le32(common->macaddr));
1884 	REG_WRITE(ah, AR_STA_ID1, get_unaligned_le16(common->macaddr + 4)
1885 		  | macStaId1
1886 		  | AR_STA_ID1_RTS_USE_DEF
1887 		  | (ah->config.
1888 		     ack_6mb ? AR_STA_ID1_ACKCTS_6MB : 0)
1889 		  | ah->sta_id1_defaults);
1890 	ath_hw_setbssidmask(common);
1891 	REG_WRITE(ah, AR_DEF_ANTENNA, saveDefAntenna);
1892 	ath9k_hw_write_associd(ah);
1893 	REG_WRITE(ah, AR_ISR, ~0);
1894 	REG_WRITE(ah, AR_RSSI_THR, INIT_RSSI_THR);
1895 
1896 	REGWRITE_BUFFER_FLUSH(ah);
1897 
1898 	ath9k_hw_set_operating_mode(ah, ah->opmode);
1899 
1900 	r = ath9k_hw_rf_set_freq(ah, chan);
1901 	if (r)
1902 		return r;
1903 
1904 	ath9k_hw_set_clockrate(ah);
1905 
1906 	ENABLE_REGWRITE_BUFFER(ah);
1907 
1908 	for (i = 0; i < AR_NUM_DCU; i++)
1909 		REG_WRITE(ah, AR_DQCUMASK(i), 1 << i);
1910 
1911 	REGWRITE_BUFFER_FLUSH(ah);
1912 
1913 	ah->intr_txqs = 0;
1914 	for (i = 0; i < ATH9K_NUM_TX_QUEUES; i++)
1915 		ath9k_hw_resettxqueue(ah, i);
1916 
1917 	ath9k_hw_init_interrupt_masks(ah, ah->opmode);
1918 	ath9k_hw_ani_cache_ini_regs(ah);
1919 	ath9k_hw_init_qos(ah);
1920 
1921 	if (ah->caps.hw_caps & ATH9K_HW_CAP_RFSILENT)
1922 		ath9k_hw_cfg_gpio_input(ah, ah->rfkill_gpio);
1923 
1924 	ath9k_hw_init_global_settings(ah);
1925 
1926 	if (AR_SREV_9287(ah) && AR_SREV_9287_13_OR_LATER(ah)) {
1927 		REG_SET_BIT(ah, AR_MAC_PCU_LOGIC_ANALYZER,
1928 			    AR_MAC_PCU_LOGIC_ANALYZER_DISBUG20768);
1929 		REG_RMW_FIELD(ah, AR_AHB_MODE, AR_AHB_CUSTOM_BURST_EN,
1930 			      AR_AHB_CUSTOM_BURST_ASYNC_FIFO_VAL);
1931 		REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
1932 			    AR_PCU_MISC_MODE2_ENABLE_AGGWEP);
1933 	}
1934 
1935 	REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PRESERVE_SEQNUM);
1936 
1937 	ath9k_hw_set_dma(ah);
1938 
1939 	if (!ath9k_hw_mci_is_enabled(ah))
1940 		REG_WRITE(ah, AR_OBS, 8);
1941 
1942 	if (ah->config.rx_intr_mitigation) {
1943 		REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_LAST, 500);
1944 		REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_FIRST, 2000);
1945 	}
1946 
1947 	if (ah->config.tx_intr_mitigation) {
1948 		REG_RMW_FIELD(ah, AR_TIMT, AR_TIMT_LAST, 300);
1949 		REG_RMW_FIELD(ah, AR_TIMT, AR_TIMT_FIRST, 750);
1950 	}
1951 
1952 	ath9k_hw_init_bb(ah, chan);
1953 
1954 	if (caldata) {
1955 		caldata->done_txiqcal_once = false;
1956 		caldata->done_txclcal_once = false;
1957 	}
1958 	if (!ath9k_hw_init_cal(ah, chan))
1959 		return -EIO;
1960 
1961 	if (ath9k_hw_mci_is_enabled(ah) && ar9003_mci_end_reset(ah, chan, caldata))
1962 		return -EIO;
1963 
1964 	ENABLE_REGWRITE_BUFFER(ah);
1965 
1966 	ath9k_hw_restore_chainmask(ah);
1967 	REG_WRITE(ah, AR_CFG_LED, saveLedState | AR_CFG_SCLK_32KHZ);
1968 
1969 	REGWRITE_BUFFER_FLUSH(ah);
1970 
1971 	/*
1972 	 * For big endian systems turn on swapping for descriptors
1973 	 */
1974 	if (AR_SREV_9100(ah)) {
1975 		u32 mask;
1976 		mask = REG_READ(ah, AR_CFG);
1977 		if (mask & (AR_CFG_SWRB | AR_CFG_SWTB | AR_CFG_SWRG)) {
1978 			ath_dbg(common, RESET, "CFG Byte Swap Set 0x%x\n",
1979 				mask);
1980 		} else {
1981 			mask =
1982 				INIT_CONFIG_STATUS | AR_CFG_SWRB | AR_CFG_SWTB;
1983 			REG_WRITE(ah, AR_CFG, mask);
1984 			ath_dbg(common, RESET, "Setting CFG 0x%x\n",
1985 				REG_READ(ah, AR_CFG));
1986 		}
1987 	} else {
1988 		if (common->bus_ops->ath_bus_type == ATH_USB) {
1989 			/* Configure AR9271 target WLAN */
1990 			if (AR_SREV_9271(ah))
1991 				REG_WRITE(ah, AR_CFG, AR_CFG_SWRB | AR_CFG_SWTB);
1992 			else
1993 				REG_WRITE(ah, AR_CFG, AR_CFG_SWTD | AR_CFG_SWRD);
1994 		}
1995 #ifdef __BIG_ENDIAN
1996 		else if (AR_SREV_9330(ah) || AR_SREV_9340(ah) ||
1997 			 AR_SREV_9550(ah))
1998 			REG_RMW(ah, AR_CFG, AR_CFG_SWRB | AR_CFG_SWTB, 0);
1999 		else
2000 			REG_WRITE(ah, AR_CFG, AR_CFG_SWTD | AR_CFG_SWRD);
2001 #endif
2002 	}
2003 
2004 	if (ath9k_hw_btcoex_is_enabled(ah))
2005 		ath9k_hw_btcoex_enable(ah);
2006 
2007 	if (ath9k_hw_mci_is_enabled(ah))
2008 		ar9003_mci_check_bt(ah);
2009 
2010 	ath9k_hw_loadnf(ah, chan);
2011 	ath9k_hw_start_nfcal(ah, true);
2012 
2013 	if (AR_SREV_9300_20_OR_LATER(ah)) {
2014 		ar9003_hw_bb_watchdog_config(ah);
2015 
2016 		ar9003_hw_disable_phy_restart(ah);
2017 	}
2018 
2019 	ath9k_hw_apply_gpio_override(ah);
2020 
2021 	return 0;
2022 }
2023 EXPORT_SYMBOL(ath9k_hw_reset);
2024 
2025 /******************************/
2026 /* Power Management (Chipset) */
2027 /******************************/
2028 
2029 /*
2030  * Notify Power Mgt is disabled in self-generated frames.
2031  * If requested, force chip to sleep.
2032  */
2033 static void ath9k_set_power_sleep(struct ath_hw *ah)
2034 {
2035 	REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2036 
2037 	if (AR_SREV_9462(ah)) {
2038 		REG_CLR_BIT(ah, AR_TIMER_MODE, 0xff);
2039 		REG_CLR_BIT(ah, AR_NDP2_TIMER_MODE, 0xff);
2040 		REG_CLR_BIT(ah, AR_SLP32_INC, 0xfffff);
2041 		/* xxx Required for WLAN only case ? */
2042 		REG_WRITE(ah, AR_MCI_INTERRUPT_RX_MSG_EN, 0);
2043 		udelay(100);
2044 	}
2045 
2046 	/*
2047 	 * Clear the RTC force wake bit to allow the
2048 	 * mac to go to sleep.
2049 	 */
2050 	REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN);
2051 
2052 	if (ath9k_hw_mci_is_enabled(ah))
2053 		udelay(100);
2054 
2055 	if (!AR_SREV_9100(ah) && !AR_SREV_9300_20_OR_LATER(ah))
2056 		REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF);
2057 
2058 	/* Shutdown chip. Active low */
2059 	if (!AR_SREV_5416(ah) && !AR_SREV_9271(ah)) {
2060 		REG_CLR_BIT(ah, AR_RTC_RESET, AR_RTC_RESET_EN);
2061 		udelay(2);
2062 	}
2063 
2064 	/* Clear Bit 14 of AR_WA after putting chip into Full Sleep mode. */
2065 	if (AR_SREV_9300_20_OR_LATER(ah))
2066 		REG_WRITE(ah, AR_WA, ah->WARegVal & ~AR_WA_D3_L1_DISABLE);
2067 }
2068 
2069 /*
2070  * Notify Power Management is enabled in self-generating
2071  * frames. If request, set power mode of chip to
2072  * auto/normal.  Duration in units of 128us (1/8 TU).
2073  */
2074 static void ath9k_set_power_network_sleep(struct ath_hw *ah)
2075 {
2076 	struct ath9k_hw_capabilities *pCap = &ah->caps;
2077 
2078 	REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2079 
2080 	if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
2081 		/* Set WakeOnInterrupt bit; clear ForceWake bit */
2082 		REG_WRITE(ah, AR_RTC_FORCE_WAKE,
2083 			  AR_RTC_FORCE_WAKE_ON_INT);
2084 	} else {
2085 
2086 		/* When chip goes into network sleep, it could be waken
2087 		 * up by MCI_INT interrupt caused by BT's HW messages
2088 		 * (LNA_xxx, CONT_xxx) which chould be in a very fast
2089 		 * rate (~100us). This will cause chip to leave and
2090 		 * re-enter network sleep mode frequently, which in
2091 		 * consequence will have WLAN MCI HW to generate lots of
2092 		 * SYS_WAKING and SYS_SLEEPING messages which will make
2093 		 * BT CPU to busy to process.
2094 		 */
2095 		if (ath9k_hw_mci_is_enabled(ah))
2096 			REG_CLR_BIT(ah, AR_MCI_INTERRUPT_RX_MSG_EN,
2097 				    AR_MCI_INTERRUPT_RX_HW_MSG_MASK);
2098 		/*
2099 		 * Clear the RTC force wake bit to allow the
2100 		 * mac to go to sleep.
2101 		 */
2102 		REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN);
2103 
2104 		if (ath9k_hw_mci_is_enabled(ah))
2105 			udelay(30);
2106 	}
2107 
2108 	/* Clear Bit 14 of AR_WA after putting chip into Net Sleep mode. */
2109 	if (AR_SREV_9300_20_OR_LATER(ah))
2110 		REG_WRITE(ah, AR_WA, ah->WARegVal & ~AR_WA_D3_L1_DISABLE);
2111 }
2112 
2113 static bool ath9k_hw_set_power_awake(struct ath_hw *ah)
2114 {
2115 	u32 val;
2116 	int i;
2117 
2118 	/* Set Bits 14 and 17 of AR_WA before powering on the chip. */
2119 	if (AR_SREV_9300_20_OR_LATER(ah)) {
2120 		REG_WRITE(ah, AR_WA, ah->WARegVal);
2121 		udelay(10);
2122 	}
2123 
2124 	if ((REG_READ(ah, AR_RTC_STATUS) &
2125 	     AR_RTC_STATUS_M) == AR_RTC_STATUS_SHUTDOWN) {
2126 		if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON)) {
2127 			return false;
2128 		}
2129 		if (!AR_SREV_9300_20_OR_LATER(ah))
2130 			ath9k_hw_init_pll(ah, NULL);
2131 	}
2132 	if (AR_SREV_9100(ah))
2133 		REG_SET_BIT(ah, AR_RTC_RESET,
2134 			    AR_RTC_RESET_EN);
2135 
2136 	REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
2137 		    AR_RTC_FORCE_WAKE_EN);
2138 	udelay(50);
2139 
2140 	if (ath9k_hw_mci_is_enabled(ah))
2141 		ar9003_mci_set_power_awake(ah);
2142 
2143 	for (i = POWER_UP_TIME / 50; i > 0; i--) {
2144 		val = REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M;
2145 		if (val == AR_RTC_STATUS_ON)
2146 			break;
2147 		udelay(50);
2148 		REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
2149 			    AR_RTC_FORCE_WAKE_EN);
2150 	}
2151 	if (i == 0) {
2152 		ath_err(ath9k_hw_common(ah),
2153 			"Failed to wakeup in %uus\n",
2154 			POWER_UP_TIME / 20);
2155 		return false;
2156 	}
2157 
2158 	REG_CLR_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2159 
2160 	return true;
2161 }
2162 
2163 bool ath9k_hw_setpower(struct ath_hw *ah, enum ath9k_power_mode mode)
2164 {
2165 	struct ath_common *common = ath9k_hw_common(ah);
2166 	int status = true;
2167 	static const char *modes[] = {
2168 		"AWAKE",
2169 		"FULL-SLEEP",
2170 		"NETWORK SLEEP",
2171 		"UNDEFINED"
2172 	};
2173 
2174 	if (ah->power_mode == mode)
2175 		return status;
2176 
2177 	ath_dbg(common, RESET, "%s -> %s\n",
2178 		modes[ah->power_mode], modes[mode]);
2179 
2180 	switch (mode) {
2181 	case ATH9K_PM_AWAKE:
2182 		status = ath9k_hw_set_power_awake(ah);
2183 		break;
2184 	case ATH9K_PM_FULL_SLEEP:
2185 		if (ath9k_hw_mci_is_enabled(ah))
2186 			ar9003_mci_set_full_sleep(ah);
2187 
2188 		ath9k_set_power_sleep(ah);
2189 		ah->chip_fullsleep = true;
2190 		break;
2191 	case ATH9K_PM_NETWORK_SLEEP:
2192 		ath9k_set_power_network_sleep(ah);
2193 		break;
2194 	default:
2195 		ath_err(common, "Unknown power mode %u\n", mode);
2196 		return false;
2197 	}
2198 	ah->power_mode = mode;
2199 
2200 	/*
2201 	 * XXX: If this warning never comes up after a while then
2202 	 * simply keep the ATH_DBG_WARN_ON_ONCE() but make
2203 	 * ath9k_hw_setpower() return type void.
2204 	 */
2205 
2206 	if (!(ah->ah_flags & AH_UNPLUGGED))
2207 		ATH_DBG_WARN_ON_ONCE(!status);
2208 
2209 	return status;
2210 }
2211 EXPORT_SYMBOL(ath9k_hw_setpower);
2212 
2213 /*******************/
2214 /* Beacon Handling */
2215 /*******************/
2216 
2217 void ath9k_hw_beaconinit(struct ath_hw *ah, u32 next_beacon, u32 beacon_period)
2218 {
2219 	int flags = 0;
2220 
2221 	ENABLE_REGWRITE_BUFFER(ah);
2222 
2223 	switch (ah->opmode) {
2224 	case NL80211_IFTYPE_ADHOC:
2225 	case NL80211_IFTYPE_MESH_POINT:
2226 		REG_SET_BIT(ah, AR_TXCFG,
2227 			    AR_TXCFG_ADHOC_BEACON_ATIM_TX_POLICY);
2228 		REG_WRITE(ah, AR_NEXT_NDP_TIMER, next_beacon +
2229 			  TU_TO_USEC(ah->atim_window ? ah->atim_window : 1));
2230 		flags |= AR_NDP_TIMER_EN;
2231 	case NL80211_IFTYPE_AP:
2232 		REG_WRITE(ah, AR_NEXT_TBTT_TIMER, next_beacon);
2233 		REG_WRITE(ah, AR_NEXT_DMA_BEACON_ALERT, next_beacon -
2234 			  TU_TO_USEC(ah->config.dma_beacon_response_time));
2235 		REG_WRITE(ah, AR_NEXT_SWBA, next_beacon -
2236 			  TU_TO_USEC(ah->config.sw_beacon_response_time));
2237 		flags |=
2238 			AR_TBTT_TIMER_EN | AR_DBA_TIMER_EN | AR_SWBA_TIMER_EN;
2239 		break;
2240 	default:
2241 		ath_dbg(ath9k_hw_common(ah), BEACON,
2242 			"%s: unsupported opmode: %d\n", __func__, ah->opmode);
2243 		return;
2244 		break;
2245 	}
2246 
2247 	REG_WRITE(ah, AR_BEACON_PERIOD, beacon_period);
2248 	REG_WRITE(ah, AR_DMA_BEACON_PERIOD, beacon_period);
2249 	REG_WRITE(ah, AR_SWBA_PERIOD, beacon_period);
2250 	REG_WRITE(ah, AR_NDP_PERIOD, beacon_period);
2251 
2252 	REGWRITE_BUFFER_FLUSH(ah);
2253 
2254 	REG_SET_BIT(ah, AR_TIMER_MODE, flags);
2255 }
2256 EXPORT_SYMBOL(ath9k_hw_beaconinit);
2257 
2258 void ath9k_hw_set_sta_beacon_timers(struct ath_hw *ah,
2259 				    const struct ath9k_beacon_state *bs)
2260 {
2261 	u32 nextTbtt, beaconintval, dtimperiod, beacontimeout;
2262 	struct ath9k_hw_capabilities *pCap = &ah->caps;
2263 	struct ath_common *common = ath9k_hw_common(ah);
2264 
2265 	ENABLE_REGWRITE_BUFFER(ah);
2266 
2267 	REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(bs->bs_nexttbtt));
2268 
2269 	REG_WRITE(ah, AR_BEACON_PERIOD,
2270 		  TU_TO_USEC(bs->bs_intval));
2271 	REG_WRITE(ah, AR_DMA_BEACON_PERIOD,
2272 		  TU_TO_USEC(bs->bs_intval));
2273 
2274 	REGWRITE_BUFFER_FLUSH(ah);
2275 
2276 	REG_RMW_FIELD(ah, AR_RSSI_THR,
2277 		      AR_RSSI_THR_BM_THR, bs->bs_bmissthreshold);
2278 
2279 	beaconintval = bs->bs_intval;
2280 
2281 	if (bs->bs_sleepduration > beaconintval)
2282 		beaconintval = bs->bs_sleepduration;
2283 
2284 	dtimperiod = bs->bs_dtimperiod;
2285 	if (bs->bs_sleepduration > dtimperiod)
2286 		dtimperiod = bs->bs_sleepduration;
2287 
2288 	if (beaconintval == dtimperiod)
2289 		nextTbtt = bs->bs_nextdtim;
2290 	else
2291 		nextTbtt = bs->bs_nexttbtt;
2292 
2293 	ath_dbg(common, BEACON, "next DTIM %d\n", bs->bs_nextdtim);
2294 	ath_dbg(common, BEACON, "next beacon %d\n", nextTbtt);
2295 	ath_dbg(common, BEACON, "beacon period %d\n", beaconintval);
2296 	ath_dbg(common, BEACON, "DTIM period %d\n", dtimperiod);
2297 
2298 	ENABLE_REGWRITE_BUFFER(ah);
2299 
2300 	REG_WRITE(ah, AR_NEXT_DTIM,
2301 		  TU_TO_USEC(bs->bs_nextdtim - SLEEP_SLOP));
2302 	REG_WRITE(ah, AR_NEXT_TIM, TU_TO_USEC(nextTbtt - SLEEP_SLOP));
2303 
2304 	REG_WRITE(ah, AR_SLEEP1,
2305 		  SM((CAB_TIMEOUT_VAL << 3), AR_SLEEP1_CAB_TIMEOUT)
2306 		  | AR_SLEEP1_ASSUME_DTIM);
2307 
2308 	if (pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)
2309 		beacontimeout = (BEACON_TIMEOUT_VAL << 3);
2310 	else
2311 		beacontimeout = MIN_BEACON_TIMEOUT_VAL;
2312 
2313 	REG_WRITE(ah, AR_SLEEP2,
2314 		  SM(beacontimeout, AR_SLEEP2_BEACON_TIMEOUT));
2315 
2316 	REG_WRITE(ah, AR_TIM_PERIOD, TU_TO_USEC(beaconintval));
2317 	REG_WRITE(ah, AR_DTIM_PERIOD, TU_TO_USEC(dtimperiod));
2318 
2319 	REGWRITE_BUFFER_FLUSH(ah);
2320 
2321 	REG_SET_BIT(ah, AR_TIMER_MODE,
2322 		    AR_TBTT_TIMER_EN | AR_TIM_TIMER_EN |
2323 		    AR_DTIM_TIMER_EN);
2324 
2325 	/* TSF Out of Range Threshold */
2326 	REG_WRITE(ah, AR_TSFOOR_THRESHOLD, bs->bs_tsfoor_threshold);
2327 }
2328 EXPORT_SYMBOL(ath9k_hw_set_sta_beacon_timers);
2329 
2330 /*******************/
2331 /* HW Capabilities */
2332 /*******************/
2333 
2334 static u8 fixup_chainmask(u8 chip_chainmask, u8 eeprom_chainmask)
2335 {
2336 	eeprom_chainmask &= chip_chainmask;
2337 	if (eeprom_chainmask)
2338 		return eeprom_chainmask;
2339 	else
2340 		return chip_chainmask;
2341 }
2342 
2343 /**
2344  * ath9k_hw_dfs_tested - checks if DFS has been tested with used chipset
2345  * @ah: the atheros hardware data structure
2346  *
2347  * We enable DFS support upstream on chipsets which have passed a series
2348  * of tests. The testing requirements are going to be documented. Desired
2349  * test requirements are documented at:
2350  *
2351  * http://wireless.kernel.org/en/users/Drivers/ath9k/dfs
2352  *
2353  * Once a new chipset gets properly tested an individual commit can be used
2354  * to document the testing for DFS for that chipset.
2355  */
2356 static bool ath9k_hw_dfs_tested(struct ath_hw *ah)
2357 {
2358 
2359 	switch (ah->hw_version.macVersion) {
2360 	/* AR9580 will likely be our first target to get testing on */
2361 	case AR_SREV_VERSION_9580:
2362 	default:
2363 		return false;
2364 	}
2365 }
2366 
2367 int ath9k_hw_fill_cap_info(struct ath_hw *ah)
2368 {
2369 	struct ath9k_hw_capabilities *pCap = &ah->caps;
2370 	struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
2371 	struct ath_common *common = ath9k_hw_common(ah);
2372 	unsigned int chip_chainmask;
2373 
2374 	u16 eeval;
2375 	u8 ant_div_ctl1, tx_chainmask, rx_chainmask;
2376 
2377 	eeval = ah->eep_ops->get_eeprom(ah, EEP_REG_0);
2378 	regulatory->current_rd = eeval;
2379 
2380 	if (ah->opmode != NL80211_IFTYPE_AP &&
2381 	    ah->hw_version.subvendorid == AR_SUBVENDOR_ID_NEW_A) {
2382 		if (regulatory->current_rd == 0x64 ||
2383 		    regulatory->current_rd == 0x65)
2384 			regulatory->current_rd += 5;
2385 		else if (regulatory->current_rd == 0x41)
2386 			regulatory->current_rd = 0x43;
2387 		ath_dbg(common, REGULATORY, "regdomain mapped to 0x%x\n",
2388 			regulatory->current_rd);
2389 	}
2390 
2391 	eeval = ah->eep_ops->get_eeprom(ah, EEP_OP_MODE);
2392 	if ((eeval & (AR5416_OPFLAGS_11G | AR5416_OPFLAGS_11A)) == 0) {
2393 		ath_err(common,
2394 			"no band has been marked as supported in EEPROM\n");
2395 		return -EINVAL;
2396 	}
2397 
2398 	if (eeval & AR5416_OPFLAGS_11A)
2399 		pCap->hw_caps |= ATH9K_HW_CAP_5GHZ;
2400 
2401 	if (eeval & AR5416_OPFLAGS_11G)
2402 		pCap->hw_caps |= ATH9K_HW_CAP_2GHZ;
2403 
2404 	if (AR_SREV_9485(ah) || AR_SREV_9285(ah) || AR_SREV_9330(ah))
2405 		chip_chainmask = 1;
2406 	else if (AR_SREV_9462(ah))
2407 		chip_chainmask = 3;
2408 	else if (!AR_SREV_9280_20_OR_LATER(ah))
2409 		chip_chainmask = 7;
2410 	else if (!AR_SREV_9300_20_OR_LATER(ah) || AR_SREV_9340(ah))
2411 		chip_chainmask = 3;
2412 	else
2413 		chip_chainmask = 7;
2414 
2415 	pCap->tx_chainmask = ah->eep_ops->get_eeprom(ah, EEP_TX_MASK);
2416 	/*
2417 	 * For AR9271 we will temporarilly uses the rx chainmax as read from
2418 	 * the EEPROM.
2419 	 */
2420 	if ((ah->hw_version.devid == AR5416_DEVID_PCI) &&
2421 	    !(eeval & AR5416_OPFLAGS_11A) &&
2422 	    !(AR_SREV_9271(ah)))
2423 		/* CB71: GPIO 0 is pulled down to indicate 3 rx chains */
2424 		pCap->rx_chainmask = ath9k_hw_gpio_get(ah, 0) ? 0x5 : 0x7;
2425 	else if (AR_SREV_9100(ah))
2426 		pCap->rx_chainmask = 0x7;
2427 	else
2428 		/* Use rx_chainmask from EEPROM. */
2429 		pCap->rx_chainmask = ah->eep_ops->get_eeprom(ah, EEP_RX_MASK);
2430 
2431 	pCap->tx_chainmask = fixup_chainmask(chip_chainmask, pCap->tx_chainmask);
2432 	pCap->rx_chainmask = fixup_chainmask(chip_chainmask, pCap->rx_chainmask);
2433 	ah->txchainmask = pCap->tx_chainmask;
2434 	ah->rxchainmask = pCap->rx_chainmask;
2435 
2436 	ah->misc_mode |= AR_PCU_MIC_NEW_LOC_ENA;
2437 
2438 	/* enable key search for every frame in an aggregate */
2439 	if (AR_SREV_9300_20_OR_LATER(ah))
2440 		ah->misc_mode |= AR_PCU_ALWAYS_PERFORM_KEYSEARCH;
2441 
2442 	common->crypt_caps |= ATH_CRYPT_CAP_CIPHER_AESCCM;
2443 
2444 	if (ah->hw_version.devid != AR2427_DEVID_PCIE)
2445 		pCap->hw_caps |= ATH9K_HW_CAP_HT;
2446 	else
2447 		pCap->hw_caps &= ~ATH9K_HW_CAP_HT;
2448 
2449 	if (AR_SREV_9271(ah))
2450 		pCap->num_gpio_pins = AR9271_NUM_GPIO;
2451 	else if (AR_DEVID_7010(ah))
2452 		pCap->num_gpio_pins = AR7010_NUM_GPIO;
2453 	else if (AR_SREV_9300_20_OR_LATER(ah))
2454 		pCap->num_gpio_pins = AR9300_NUM_GPIO;
2455 	else if (AR_SREV_9287_11_OR_LATER(ah))
2456 		pCap->num_gpio_pins = AR9287_NUM_GPIO;
2457 	else if (AR_SREV_9285_12_OR_LATER(ah))
2458 		pCap->num_gpio_pins = AR9285_NUM_GPIO;
2459 	else if (AR_SREV_9280_20_OR_LATER(ah))
2460 		pCap->num_gpio_pins = AR928X_NUM_GPIO;
2461 	else
2462 		pCap->num_gpio_pins = AR_NUM_GPIO;
2463 
2464 	if (AR_SREV_9160_10_OR_LATER(ah) || AR_SREV_9100(ah))
2465 		pCap->rts_aggr_limit = ATH_AMPDU_LIMIT_MAX;
2466 	else
2467 		pCap->rts_aggr_limit = (8 * 1024);
2468 
2469 #if defined(CONFIG_RFKILL) || defined(CONFIG_RFKILL_MODULE)
2470 	ah->rfsilent = ah->eep_ops->get_eeprom(ah, EEP_RF_SILENT);
2471 	if (ah->rfsilent & EEP_RFSILENT_ENABLED) {
2472 		ah->rfkill_gpio =
2473 			MS(ah->rfsilent, EEP_RFSILENT_GPIO_SEL);
2474 		ah->rfkill_polarity =
2475 			MS(ah->rfsilent, EEP_RFSILENT_POLARITY);
2476 
2477 		pCap->hw_caps |= ATH9K_HW_CAP_RFSILENT;
2478 	}
2479 #endif
2480 	if (AR_SREV_9271(ah) || AR_SREV_9300_20_OR_LATER(ah))
2481 		pCap->hw_caps |= ATH9K_HW_CAP_AUTOSLEEP;
2482 	else
2483 		pCap->hw_caps &= ~ATH9K_HW_CAP_AUTOSLEEP;
2484 
2485 	if (AR_SREV_9280(ah) || AR_SREV_9285(ah))
2486 		pCap->hw_caps &= ~ATH9K_HW_CAP_4KB_SPLITTRANS;
2487 	else
2488 		pCap->hw_caps |= ATH9K_HW_CAP_4KB_SPLITTRANS;
2489 
2490 	if (AR_SREV_9300_20_OR_LATER(ah)) {
2491 		pCap->hw_caps |= ATH9K_HW_CAP_EDMA | ATH9K_HW_CAP_FASTCLOCK;
2492 		if (!AR_SREV_9330(ah) && !AR_SREV_9485(ah))
2493 			pCap->hw_caps |= ATH9K_HW_CAP_LDPC;
2494 
2495 		pCap->rx_hp_qdepth = ATH9K_HW_RX_HP_QDEPTH;
2496 		pCap->rx_lp_qdepth = ATH9K_HW_RX_LP_QDEPTH;
2497 		pCap->rx_status_len = sizeof(struct ar9003_rxs);
2498 		pCap->tx_desc_len = sizeof(struct ar9003_txc);
2499 		pCap->txs_len = sizeof(struct ar9003_txs);
2500 	} else {
2501 		pCap->tx_desc_len = sizeof(struct ath_desc);
2502 		if (AR_SREV_9280_20(ah))
2503 			pCap->hw_caps |= ATH9K_HW_CAP_FASTCLOCK;
2504 	}
2505 
2506 	if (AR_SREV_9300_20_OR_LATER(ah))
2507 		pCap->hw_caps |= ATH9K_HW_CAP_RAC_SUPPORTED;
2508 
2509 	if (AR_SREV_9300_20_OR_LATER(ah))
2510 		ah->ent_mode = REG_READ(ah, AR_ENT_OTP);
2511 
2512 	if (AR_SREV_9287_11_OR_LATER(ah) || AR_SREV_9271(ah))
2513 		pCap->hw_caps |= ATH9K_HW_CAP_SGI_20;
2514 
2515 	if (AR_SREV_9285(ah))
2516 		if (ah->eep_ops->get_eeprom(ah, EEP_MODAL_VER) >= 3) {
2517 			ant_div_ctl1 =
2518 				ah->eep_ops->get_eeprom(ah, EEP_ANT_DIV_CTL1);
2519 			if ((ant_div_ctl1 & 0x1) && ((ant_div_ctl1 >> 3) & 0x1))
2520 				pCap->hw_caps |= ATH9K_HW_CAP_ANT_DIV_COMB;
2521 		}
2522 	if (AR_SREV_9300_20_OR_LATER(ah)) {
2523 		if (ah->eep_ops->get_eeprom(ah, EEP_CHAIN_MASK_REDUCE))
2524 			pCap->hw_caps |= ATH9K_HW_CAP_APM;
2525 	}
2526 
2527 
2528 	if (AR_SREV_9330(ah) || AR_SREV_9485(ah)) {
2529 		ant_div_ctl1 = ah->eep_ops->get_eeprom(ah, EEP_ANT_DIV_CTL1);
2530 		/*
2531 		 * enable the diversity-combining algorithm only when
2532 		 * both enable_lna_div and enable_fast_div are set
2533 		 *		Table for Diversity
2534 		 * ant_div_alt_lnaconf		bit 0-1
2535 		 * ant_div_main_lnaconf		bit 2-3
2536 		 * ant_div_alt_gaintb		bit 4
2537 		 * ant_div_main_gaintb		bit 5
2538 		 * enable_ant_div_lnadiv	bit 6
2539 		 * enable_ant_fast_div		bit 7
2540 		 */
2541 		if ((ant_div_ctl1 >> 0x6) == 0x3)
2542 			pCap->hw_caps |= ATH9K_HW_CAP_ANT_DIV_COMB;
2543 	}
2544 
2545 	if (AR_SREV_9485_10(ah)) {
2546 		pCap->pcie_lcr_extsync_en = true;
2547 		pCap->pcie_lcr_offset = 0x80;
2548 	}
2549 
2550 	if (ath9k_hw_dfs_tested(ah))
2551 		pCap->hw_caps |= ATH9K_HW_CAP_DFS;
2552 
2553 	tx_chainmask = pCap->tx_chainmask;
2554 	rx_chainmask = pCap->rx_chainmask;
2555 	while (tx_chainmask || rx_chainmask) {
2556 		if (tx_chainmask & BIT(0))
2557 			pCap->max_txchains++;
2558 		if (rx_chainmask & BIT(0))
2559 			pCap->max_rxchains++;
2560 
2561 		tx_chainmask >>= 1;
2562 		rx_chainmask >>= 1;
2563 	}
2564 
2565 	if (AR_SREV_9300_20_OR_LATER(ah)) {
2566 		ah->enabled_cals |= TX_IQ_CAL;
2567 		if (AR_SREV_9485_OR_LATER(ah))
2568 			ah->enabled_cals |= TX_IQ_ON_AGC_CAL;
2569 	}
2570 
2571 	if (AR_SREV_9462(ah)) {
2572 
2573 		if (!(ah->ent_mode & AR_ENT_OTP_49GHZ_DISABLE))
2574 			pCap->hw_caps |= ATH9K_HW_CAP_MCI;
2575 
2576 		if (AR_SREV_9462_20(ah))
2577 			pCap->hw_caps |= ATH9K_HW_CAP_RTT;
2578 
2579 	}
2580 
2581 
2582 	if (AR_SREV_9280_20_OR_LATER(ah)) {
2583 		pCap->hw_caps |= ATH9K_HW_WOW_DEVICE_CAPABLE |
2584 				 ATH9K_HW_WOW_PATTERN_MATCH_EXACT;
2585 
2586 		if (AR_SREV_9280(ah))
2587 			pCap->hw_caps |= ATH9K_HW_WOW_PATTERN_MATCH_DWORD;
2588 	}
2589 
2590 	return 0;
2591 }
2592 
2593 /****************************/
2594 /* GPIO / RFKILL / Antennae */
2595 /****************************/
2596 
2597 static void ath9k_hw_gpio_cfg_output_mux(struct ath_hw *ah,
2598 					 u32 gpio, u32 type)
2599 {
2600 	int addr;
2601 	u32 gpio_shift, tmp;
2602 
2603 	if (gpio > 11)
2604 		addr = AR_GPIO_OUTPUT_MUX3;
2605 	else if (gpio > 5)
2606 		addr = AR_GPIO_OUTPUT_MUX2;
2607 	else
2608 		addr = AR_GPIO_OUTPUT_MUX1;
2609 
2610 	gpio_shift = (gpio % 6) * 5;
2611 
2612 	if (AR_SREV_9280_20_OR_LATER(ah)
2613 	    || (addr != AR_GPIO_OUTPUT_MUX1)) {
2614 		REG_RMW(ah, addr, (type << gpio_shift),
2615 			(0x1f << gpio_shift));
2616 	} else {
2617 		tmp = REG_READ(ah, addr);
2618 		tmp = ((tmp & 0x1F0) << 1) | (tmp & ~0x1F0);
2619 		tmp &= ~(0x1f << gpio_shift);
2620 		tmp |= (type << gpio_shift);
2621 		REG_WRITE(ah, addr, tmp);
2622 	}
2623 }
2624 
2625 void ath9k_hw_cfg_gpio_input(struct ath_hw *ah, u32 gpio)
2626 {
2627 	u32 gpio_shift;
2628 
2629 	BUG_ON(gpio >= ah->caps.num_gpio_pins);
2630 
2631 	if (AR_DEVID_7010(ah)) {
2632 		gpio_shift = gpio;
2633 		REG_RMW(ah, AR7010_GPIO_OE,
2634 			(AR7010_GPIO_OE_AS_INPUT << gpio_shift),
2635 			(AR7010_GPIO_OE_MASK << gpio_shift));
2636 		return;
2637 	}
2638 
2639 	gpio_shift = gpio << 1;
2640 	REG_RMW(ah,
2641 		AR_GPIO_OE_OUT,
2642 		(AR_GPIO_OE_OUT_DRV_NO << gpio_shift),
2643 		(AR_GPIO_OE_OUT_DRV << gpio_shift));
2644 }
2645 EXPORT_SYMBOL(ath9k_hw_cfg_gpio_input);
2646 
2647 u32 ath9k_hw_gpio_get(struct ath_hw *ah, u32 gpio)
2648 {
2649 #define MS_REG_READ(x, y) \
2650 	(MS(REG_READ(ah, AR_GPIO_IN_OUT), x##_GPIO_IN_VAL) & (AR_GPIO_BIT(y)))
2651 
2652 	if (gpio >= ah->caps.num_gpio_pins)
2653 		return 0xffffffff;
2654 
2655 	if (AR_DEVID_7010(ah)) {
2656 		u32 val;
2657 		val = REG_READ(ah, AR7010_GPIO_IN);
2658 		return (MS(val, AR7010_GPIO_IN_VAL) & AR_GPIO_BIT(gpio)) == 0;
2659 	} else if (AR_SREV_9300_20_OR_LATER(ah))
2660 		return (MS(REG_READ(ah, AR_GPIO_IN), AR9300_GPIO_IN_VAL) &
2661 			AR_GPIO_BIT(gpio)) != 0;
2662 	else if (AR_SREV_9271(ah))
2663 		return MS_REG_READ(AR9271, gpio) != 0;
2664 	else if (AR_SREV_9287_11_OR_LATER(ah))
2665 		return MS_REG_READ(AR9287, gpio) != 0;
2666 	else if (AR_SREV_9285_12_OR_LATER(ah))
2667 		return MS_REG_READ(AR9285, gpio) != 0;
2668 	else if (AR_SREV_9280_20_OR_LATER(ah))
2669 		return MS_REG_READ(AR928X, gpio) != 0;
2670 	else
2671 		return MS_REG_READ(AR, gpio) != 0;
2672 }
2673 EXPORT_SYMBOL(ath9k_hw_gpio_get);
2674 
2675 void ath9k_hw_cfg_output(struct ath_hw *ah, u32 gpio,
2676 			 u32 ah_signal_type)
2677 {
2678 	u32 gpio_shift;
2679 
2680 	if (AR_DEVID_7010(ah)) {
2681 		gpio_shift = gpio;
2682 		REG_RMW(ah, AR7010_GPIO_OE,
2683 			(AR7010_GPIO_OE_AS_OUTPUT << gpio_shift),
2684 			(AR7010_GPIO_OE_MASK << gpio_shift));
2685 		return;
2686 	}
2687 
2688 	ath9k_hw_gpio_cfg_output_mux(ah, gpio, ah_signal_type);
2689 	gpio_shift = 2 * gpio;
2690 	REG_RMW(ah,
2691 		AR_GPIO_OE_OUT,
2692 		(AR_GPIO_OE_OUT_DRV_ALL << gpio_shift),
2693 		(AR_GPIO_OE_OUT_DRV << gpio_shift));
2694 }
2695 EXPORT_SYMBOL(ath9k_hw_cfg_output);
2696 
2697 void ath9k_hw_set_gpio(struct ath_hw *ah, u32 gpio, u32 val)
2698 {
2699 	if (AR_DEVID_7010(ah)) {
2700 		val = val ? 0 : 1;
2701 		REG_RMW(ah, AR7010_GPIO_OUT, ((val&1) << gpio),
2702 			AR_GPIO_BIT(gpio));
2703 		return;
2704 	}
2705 
2706 	if (AR_SREV_9271(ah))
2707 		val = ~val;
2708 
2709 	REG_RMW(ah, AR_GPIO_IN_OUT, ((val & 1) << gpio),
2710 		AR_GPIO_BIT(gpio));
2711 }
2712 EXPORT_SYMBOL(ath9k_hw_set_gpio);
2713 
2714 void ath9k_hw_setantenna(struct ath_hw *ah, u32 antenna)
2715 {
2716 	REG_WRITE(ah, AR_DEF_ANTENNA, (antenna & 0x7));
2717 }
2718 EXPORT_SYMBOL(ath9k_hw_setantenna);
2719 
2720 /*********************/
2721 /* General Operation */
2722 /*********************/
2723 
2724 u32 ath9k_hw_getrxfilter(struct ath_hw *ah)
2725 {
2726 	u32 bits = REG_READ(ah, AR_RX_FILTER);
2727 	u32 phybits = REG_READ(ah, AR_PHY_ERR);
2728 
2729 	if (phybits & AR_PHY_ERR_RADAR)
2730 		bits |= ATH9K_RX_FILTER_PHYRADAR;
2731 	if (phybits & (AR_PHY_ERR_OFDM_TIMING | AR_PHY_ERR_CCK_TIMING))
2732 		bits |= ATH9K_RX_FILTER_PHYERR;
2733 
2734 	return bits;
2735 }
2736 EXPORT_SYMBOL(ath9k_hw_getrxfilter);
2737 
2738 void ath9k_hw_setrxfilter(struct ath_hw *ah, u32 bits)
2739 {
2740 	u32 phybits;
2741 
2742 	ENABLE_REGWRITE_BUFFER(ah);
2743 
2744 	if (AR_SREV_9462(ah))
2745 		bits |= ATH9K_RX_FILTER_CONTROL_WRAPPER;
2746 
2747 	REG_WRITE(ah, AR_RX_FILTER, bits);
2748 
2749 	phybits = 0;
2750 	if (bits & ATH9K_RX_FILTER_PHYRADAR)
2751 		phybits |= AR_PHY_ERR_RADAR;
2752 	if (bits & ATH9K_RX_FILTER_PHYERR)
2753 		phybits |= AR_PHY_ERR_OFDM_TIMING | AR_PHY_ERR_CCK_TIMING;
2754 	REG_WRITE(ah, AR_PHY_ERR, phybits);
2755 
2756 	if (phybits)
2757 		REG_SET_BIT(ah, AR_RXCFG, AR_RXCFG_ZLFDMA);
2758 	else
2759 		REG_CLR_BIT(ah, AR_RXCFG, AR_RXCFG_ZLFDMA);
2760 
2761 	REGWRITE_BUFFER_FLUSH(ah);
2762 }
2763 EXPORT_SYMBOL(ath9k_hw_setrxfilter);
2764 
2765 bool ath9k_hw_phy_disable(struct ath_hw *ah)
2766 {
2767 	if (ath9k_hw_mci_is_enabled(ah))
2768 		ar9003_mci_bt_gain_ctrl(ah);
2769 
2770 	if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_WARM))
2771 		return false;
2772 
2773 	ath9k_hw_init_pll(ah, NULL);
2774 	ah->htc_reset_init = true;
2775 	return true;
2776 }
2777 EXPORT_SYMBOL(ath9k_hw_phy_disable);
2778 
2779 bool ath9k_hw_disable(struct ath_hw *ah)
2780 {
2781 	if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
2782 		return false;
2783 
2784 	if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_COLD))
2785 		return false;
2786 
2787 	ath9k_hw_init_pll(ah, NULL);
2788 	return true;
2789 }
2790 EXPORT_SYMBOL(ath9k_hw_disable);
2791 
2792 static int get_antenna_gain(struct ath_hw *ah, struct ath9k_channel *chan)
2793 {
2794 	enum eeprom_param gain_param;
2795 
2796 	if (IS_CHAN_2GHZ(chan))
2797 		gain_param = EEP_ANTENNA_GAIN_2G;
2798 	else
2799 		gain_param = EEP_ANTENNA_GAIN_5G;
2800 
2801 	return ah->eep_ops->get_eeprom(ah, gain_param);
2802 }
2803 
2804 void ath9k_hw_apply_txpower(struct ath_hw *ah, struct ath9k_channel *chan,
2805 			    bool test)
2806 {
2807 	struct ath_regulatory *reg = ath9k_hw_regulatory(ah);
2808 	struct ieee80211_channel *channel;
2809 	int chan_pwr, new_pwr, max_gain;
2810 	int ant_gain, ant_reduction = 0;
2811 
2812 	if (!chan)
2813 		return;
2814 
2815 	channel = chan->chan;
2816 	chan_pwr = min_t(int, channel->max_power * 2, MAX_RATE_POWER);
2817 	new_pwr = min_t(int, chan_pwr, reg->power_limit);
2818 	max_gain = chan_pwr - new_pwr + channel->max_antenna_gain * 2;
2819 
2820 	ant_gain = get_antenna_gain(ah, chan);
2821 	if (ant_gain > max_gain)
2822 		ant_reduction = ant_gain - max_gain;
2823 
2824 	ah->eep_ops->set_txpower(ah, chan,
2825 				 ath9k_regd_get_ctl(reg, chan),
2826 				 ant_reduction, new_pwr, test);
2827 }
2828 
2829 void ath9k_hw_set_txpowerlimit(struct ath_hw *ah, u32 limit, bool test)
2830 {
2831 	struct ath_regulatory *reg = ath9k_hw_regulatory(ah);
2832 	struct ath9k_channel *chan = ah->curchan;
2833 	struct ieee80211_channel *channel = chan->chan;
2834 
2835 	reg->power_limit = min_t(u32, limit, MAX_RATE_POWER);
2836 	if (test)
2837 		channel->max_power = MAX_RATE_POWER / 2;
2838 
2839 	ath9k_hw_apply_txpower(ah, chan, test);
2840 
2841 	if (test)
2842 		channel->max_power = DIV_ROUND_UP(reg->max_power_level, 2);
2843 }
2844 EXPORT_SYMBOL(ath9k_hw_set_txpowerlimit);
2845 
2846 void ath9k_hw_setopmode(struct ath_hw *ah)
2847 {
2848 	ath9k_hw_set_operating_mode(ah, ah->opmode);
2849 }
2850 EXPORT_SYMBOL(ath9k_hw_setopmode);
2851 
2852 void ath9k_hw_setmcastfilter(struct ath_hw *ah, u32 filter0, u32 filter1)
2853 {
2854 	REG_WRITE(ah, AR_MCAST_FIL0, filter0);
2855 	REG_WRITE(ah, AR_MCAST_FIL1, filter1);
2856 }
2857 EXPORT_SYMBOL(ath9k_hw_setmcastfilter);
2858 
2859 void ath9k_hw_write_associd(struct ath_hw *ah)
2860 {
2861 	struct ath_common *common = ath9k_hw_common(ah);
2862 
2863 	REG_WRITE(ah, AR_BSS_ID0, get_unaligned_le32(common->curbssid));
2864 	REG_WRITE(ah, AR_BSS_ID1, get_unaligned_le16(common->curbssid + 4) |
2865 		  ((common->curaid & 0x3fff) << AR_BSS_ID1_AID_S));
2866 }
2867 EXPORT_SYMBOL(ath9k_hw_write_associd);
2868 
2869 #define ATH9K_MAX_TSF_READ 10
2870 
2871 u64 ath9k_hw_gettsf64(struct ath_hw *ah)
2872 {
2873 	u32 tsf_lower, tsf_upper1, tsf_upper2;
2874 	int i;
2875 
2876 	tsf_upper1 = REG_READ(ah, AR_TSF_U32);
2877 	for (i = 0; i < ATH9K_MAX_TSF_READ; i++) {
2878 		tsf_lower = REG_READ(ah, AR_TSF_L32);
2879 		tsf_upper2 = REG_READ(ah, AR_TSF_U32);
2880 		if (tsf_upper2 == tsf_upper1)
2881 			break;
2882 		tsf_upper1 = tsf_upper2;
2883 	}
2884 
2885 	WARN_ON( i == ATH9K_MAX_TSF_READ );
2886 
2887 	return (((u64)tsf_upper1 << 32) | tsf_lower);
2888 }
2889 EXPORT_SYMBOL(ath9k_hw_gettsf64);
2890 
2891 void ath9k_hw_settsf64(struct ath_hw *ah, u64 tsf64)
2892 {
2893 	REG_WRITE(ah, AR_TSF_L32, tsf64 & 0xffffffff);
2894 	REG_WRITE(ah, AR_TSF_U32, (tsf64 >> 32) & 0xffffffff);
2895 }
2896 EXPORT_SYMBOL(ath9k_hw_settsf64);
2897 
2898 void ath9k_hw_reset_tsf(struct ath_hw *ah)
2899 {
2900 	if (!ath9k_hw_wait(ah, AR_SLP32_MODE, AR_SLP32_TSF_WRITE_STATUS, 0,
2901 			   AH_TSF_WRITE_TIMEOUT))
2902 		ath_dbg(ath9k_hw_common(ah), RESET,
2903 			"AR_SLP32_TSF_WRITE_STATUS limit exceeded\n");
2904 
2905 	REG_WRITE(ah, AR_RESET_TSF, AR_RESET_TSF_ONCE);
2906 }
2907 EXPORT_SYMBOL(ath9k_hw_reset_tsf);
2908 
2909 void ath9k_hw_set_tsfadjust(struct ath_hw *ah, bool set)
2910 {
2911 	if (set)
2912 		ah->misc_mode |= AR_PCU_TX_ADD_TSF;
2913 	else
2914 		ah->misc_mode &= ~AR_PCU_TX_ADD_TSF;
2915 }
2916 EXPORT_SYMBOL(ath9k_hw_set_tsfadjust);
2917 
2918 void ath9k_hw_set11nmac2040(struct ath_hw *ah)
2919 {
2920 	struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf;
2921 	u32 macmode;
2922 
2923 	if (conf_is_ht40(conf) && !ah->config.cwm_ignore_extcca)
2924 		macmode = AR_2040_JOINED_RX_CLEAR;
2925 	else
2926 		macmode = 0;
2927 
2928 	REG_WRITE(ah, AR_2040_MODE, macmode);
2929 }
2930 
2931 /* HW Generic timers configuration */
2932 
2933 static const struct ath_gen_timer_configuration gen_tmr_configuration[] =
2934 {
2935 	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
2936 	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
2937 	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
2938 	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
2939 	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
2940 	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
2941 	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
2942 	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
2943 	{AR_NEXT_NDP2_TIMER, AR_NDP2_PERIOD, AR_NDP2_TIMER_MODE, 0x0001},
2944 	{AR_NEXT_NDP2_TIMER + 1*4, AR_NDP2_PERIOD + 1*4,
2945 				AR_NDP2_TIMER_MODE, 0x0002},
2946 	{AR_NEXT_NDP2_TIMER + 2*4, AR_NDP2_PERIOD + 2*4,
2947 				AR_NDP2_TIMER_MODE, 0x0004},
2948 	{AR_NEXT_NDP2_TIMER + 3*4, AR_NDP2_PERIOD + 3*4,
2949 				AR_NDP2_TIMER_MODE, 0x0008},
2950 	{AR_NEXT_NDP2_TIMER + 4*4, AR_NDP2_PERIOD + 4*4,
2951 				AR_NDP2_TIMER_MODE, 0x0010},
2952 	{AR_NEXT_NDP2_TIMER + 5*4, AR_NDP2_PERIOD + 5*4,
2953 				AR_NDP2_TIMER_MODE, 0x0020},
2954 	{AR_NEXT_NDP2_TIMER + 6*4, AR_NDP2_PERIOD + 6*4,
2955 				AR_NDP2_TIMER_MODE, 0x0040},
2956 	{AR_NEXT_NDP2_TIMER + 7*4, AR_NDP2_PERIOD + 7*4,
2957 				AR_NDP2_TIMER_MODE, 0x0080}
2958 };
2959 
2960 /* HW generic timer primitives */
2961 
2962 /* compute and clear index of rightmost 1 */
2963 static u32 rightmost_index(struct ath_gen_timer_table *timer_table, u32 *mask)
2964 {
2965 	u32 b;
2966 
2967 	b = *mask;
2968 	b &= (0-b);
2969 	*mask &= ~b;
2970 	b *= debruijn32;
2971 	b >>= 27;
2972 
2973 	return timer_table->gen_timer_index[b];
2974 }
2975 
2976 u32 ath9k_hw_gettsf32(struct ath_hw *ah)
2977 {
2978 	return REG_READ(ah, AR_TSF_L32);
2979 }
2980 EXPORT_SYMBOL(ath9k_hw_gettsf32);
2981 
2982 struct ath_gen_timer *ath_gen_timer_alloc(struct ath_hw *ah,
2983 					  void (*trigger)(void *),
2984 					  void (*overflow)(void *),
2985 					  void *arg,
2986 					  u8 timer_index)
2987 {
2988 	struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
2989 	struct ath_gen_timer *timer;
2990 
2991 	timer = kzalloc(sizeof(struct ath_gen_timer), GFP_KERNEL);
2992 
2993 	if (timer == NULL) {
2994 		ath_err(ath9k_hw_common(ah),
2995 			"Failed to allocate memory for hw timer[%d]\n",
2996 			timer_index);
2997 		return NULL;
2998 	}
2999 
3000 	/* allocate a hardware generic timer slot */
3001 	timer_table->timers[timer_index] = timer;
3002 	timer->index = timer_index;
3003 	timer->trigger = trigger;
3004 	timer->overflow = overflow;
3005 	timer->arg = arg;
3006 
3007 	return timer;
3008 }
3009 EXPORT_SYMBOL(ath_gen_timer_alloc);
3010 
3011 void ath9k_hw_gen_timer_start(struct ath_hw *ah,
3012 			      struct ath_gen_timer *timer,
3013 			      u32 trig_timeout,
3014 			      u32 timer_period)
3015 {
3016 	struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
3017 	u32 tsf, timer_next;
3018 
3019 	BUG_ON(!timer_period);
3020 
3021 	set_bit(timer->index, &timer_table->timer_mask.timer_bits);
3022 
3023 	tsf = ath9k_hw_gettsf32(ah);
3024 
3025 	timer_next = tsf + trig_timeout;
3026 
3027 	ath_dbg(ath9k_hw_common(ah), HWTIMER,
3028 		"current tsf %x period %x timer_next %x\n",
3029 		tsf, timer_period, timer_next);
3030 
3031 	/*
3032 	 * Program generic timer registers
3033 	 */
3034 	REG_WRITE(ah, gen_tmr_configuration[timer->index].next_addr,
3035 		 timer_next);
3036 	REG_WRITE(ah, gen_tmr_configuration[timer->index].period_addr,
3037 		  timer_period);
3038 	REG_SET_BIT(ah, gen_tmr_configuration[timer->index].mode_addr,
3039 		    gen_tmr_configuration[timer->index].mode_mask);
3040 
3041 	if (AR_SREV_9462(ah)) {
3042 		/*
3043 		 * Starting from AR9462, each generic timer can select which tsf
3044 		 * to use. But we still follow the old rule, 0 - 7 use tsf and
3045 		 * 8 - 15  use tsf2.
3046 		 */
3047 		if ((timer->index < AR_GEN_TIMER_BANK_1_LEN))
3048 			REG_CLR_BIT(ah, AR_MAC_PCU_GEN_TIMER_TSF_SEL,
3049 				       (1 << timer->index));
3050 		else
3051 			REG_SET_BIT(ah, AR_MAC_PCU_GEN_TIMER_TSF_SEL,
3052 				       (1 << timer->index));
3053 	}
3054 
3055 	/* Enable both trigger and thresh interrupt masks */
3056 	REG_SET_BIT(ah, AR_IMR_S5,
3057 		(SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_THRESH) |
3058 		SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_TRIG)));
3059 }
3060 EXPORT_SYMBOL(ath9k_hw_gen_timer_start);
3061 
3062 void ath9k_hw_gen_timer_stop(struct ath_hw *ah, struct ath_gen_timer *timer)
3063 {
3064 	struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
3065 
3066 	if ((timer->index < AR_FIRST_NDP_TIMER) ||
3067 		(timer->index >= ATH_MAX_GEN_TIMER)) {
3068 		return;
3069 	}
3070 
3071 	/* Clear generic timer enable bits. */
3072 	REG_CLR_BIT(ah, gen_tmr_configuration[timer->index].mode_addr,
3073 			gen_tmr_configuration[timer->index].mode_mask);
3074 
3075 	/* Disable both trigger and thresh interrupt masks */
3076 	REG_CLR_BIT(ah, AR_IMR_S5,
3077 		(SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_THRESH) |
3078 		SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_TRIG)));
3079 
3080 	clear_bit(timer->index, &timer_table->timer_mask.timer_bits);
3081 }
3082 EXPORT_SYMBOL(ath9k_hw_gen_timer_stop);
3083 
3084 void ath_gen_timer_free(struct ath_hw *ah, struct ath_gen_timer *timer)
3085 {
3086 	struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
3087 
3088 	/* free the hardware generic timer slot */
3089 	timer_table->timers[timer->index] = NULL;
3090 	kfree(timer);
3091 }
3092 EXPORT_SYMBOL(ath_gen_timer_free);
3093 
3094 /*
3095  * Generic Timer Interrupts handling
3096  */
3097 void ath_gen_timer_isr(struct ath_hw *ah)
3098 {
3099 	struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
3100 	struct ath_gen_timer *timer;
3101 	struct ath_common *common = ath9k_hw_common(ah);
3102 	u32 trigger_mask, thresh_mask, index;
3103 
3104 	/* get hardware generic timer interrupt status */
3105 	trigger_mask = ah->intr_gen_timer_trigger;
3106 	thresh_mask = ah->intr_gen_timer_thresh;
3107 	trigger_mask &= timer_table->timer_mask.val;
3108 	thresh_mask &= timer_table->timer_mask.val;
3109 
3110 	trigger_mask &= ~thresh_mask;
3111 
3112 	while (thresh_mask) {
3113 		index = rightmost_index(timer_table, &thresh_mask);
3114 		timer = timer_table->timers[index];
3115 		BUG_ON(!timer);
3116 		ath_dbg(common, HWTIMER, "TSF overflow for Gen timer %d\n",
3117 			index);
3118 		timer->overflow(timer->arg);
3119 	}
3120 
3121 	while (trigger_mask) {
3122 		index = rightmost_index(timer_table, &trigger_mask);
3123 		timer = timer_table->timers[index];
3124 		BUG_ON(!timer);
3125 		ath_dbg(common, HWTIMER,
3126 			"Gen timer[%d] trigger\n", index);
3127 		timer->trigger(timer->arg);
3128 	}
3129 }
3130 EXPORT_SYMBOL(ath_gen_timer_isr);
3131 
3132 /********/
3133 /* HTC  */
3134 /********/
3135 
3136 static struct {
3137 	u32 version;
3138 	const char * name;
3139 } ath_mac_bb_names[] = {
3140 	/* Devices with external radios */
3141 	{ AR_SREV_VERSION_5416_PCI,	"5416" },
3142 	{ AR_SREV_VERSION_5416_PCIE,	"5418" },
3143 	{ AR_SREV_VERSION_9100,		"9100" },
3144 	{ AR_SREV_VERSION_9160,		"9160" },
3145 	/* Single-chip solutions */
3146 	{ AR_SREV_VERSION_9280,		"9280" },
3147 	{ AR_SREV_VERSION_9285,		"9285" },
3148 	{ AR_SREV_VERSION_9287,         "9287" },
3149 	{ AR_SREV_VERSION_9271,         "9271" },
3150 	{ AR_SREV_VERSION_9300,         "9300" },
3151 	{ AR_SREV_VERSION_9330,         "9330" },
3152 	{ AR_SREV_VERSION_9340,		"9340" },
3153 	{ AR_SREV_VERSION_9485,         "9485" },
3154 	{ AR_SREV_VERSION_9462,         "9462" },
3155 	{ AR_SREV_VERSION_9550,         "9550" },
3156 };
3157 
3158 /* For devices with external radios */
3159 static struct {
3160 	u16 version;
3161 	const char * name;
3162 } ath_rf_names[] = {
3163 	{ 0,				"5133" },
3164 	{ AR_RAD5133_SREV_MAJOR,	"5133" },
3165 	{ AR_RAD5122_SREV_MAJOR,	"5122" },
3166 	{ AR_RAD2133_SREV_MAJOR,	"2133" },
3167 	{ AR_RAD2122_SREV_MAJOR,	"2122" }
3168 };
3169 
3170 /*
3171  * Return the MAC/BB name. "????" is returned if the MAC/BB is unknown.
3172  */
3173 static const char *ath9k_hw_mac_bb_name(u32 mac_bb_version)
3174 {
3175 	int i;
3176 
3177 	for (i=0; i<ARRAY_SIZE(ath_mac_bb_names); i++) {
3178 		if (ath_mac_bb_names[i].version == mac_bb_version) {
3179 			return ath_mac_bb_names[i].name;
3180 		}
3181 	}
3182 
3183 	return "????";
3184 }
3185 
3186 /*
3187  * Return the RF name. "????" is returned if the RF is unknown.
3188  * Used for devices with external radios.
3189  */
3190 static const char *ath9k_hw_rf_name(u16 rf_version)
3191 {
3192 	int i;
3193 
3194 	for (i=0; i<ARRAY_SIZE(ath_rf_names); i++) {
3195 		if (ath_rf_names[i].version == rf_version) {
3196 			return ath_rf_names[i].name;
3197 		}
3198 	}
3199 
3200 	return "????";
3201 }
3202 
3203 void ath9k_hw_name(struct ath_hw *ah, char *hw_name, size_t len)
3204 {
3205 	int used;
3206 
3207 	/* chipsets >= AR9280 are single-chip */
3208 	if (AR_SREV_9280_20_OR_LATER(ah)) {
3209 		used = snprintf(hw_name, len,
3210 			       "Atheros AR%s Rev:%x",
3211 			       ath9k_hw_mac_bb_name(ah->hw_version.macVersion),
3212 			       ah->hw_version.macRev);
3213 	}
3214 	else {
3215 		used = snprintf(hw_name, len,
3216 			       "Atheros AR%s MAC/BB Rev:%x AR%s RF Rev:%x",
3217 			       ath9k_hw_mac_bb_name(ah->hw_version.macVersion),
3218 			       ah->hw_version.macRev,
3219 			       ath9k_hw_rf_name((ah->hw_version.analog5GhzRev &
3220 						AR_RADIO_SREV_MAJOR)),
3221 			       ah->hw_version.phyRev);
3222 	}
3223 
3224 	hw_name[used] = '\0';
3225 }
3226 EXPORT_SYMBOL(ath9k_hw_name);
3227