1*6e778a7eSPedro F. Giffuni /*-
2*6e778a7eSPedro F. Giffuni * SPDX-License-Identifier: ISC
3*6e778a7eSPedro F. Giffuni *
414779705SSam Leffler * Copyright (c) 2002-2008 Sam Leffler, Errno Consulting
514779705SSam Leffler * Copyright (c) 2002-2008 Atheros Communications, Inc.
614779705SSam Leffler *
714779705SSam Leffler * Permission to use, copy, modify, and/or distribute this software for any
814779705SSam Leffler * purpose with or without fee is hereby granted, provided that the above
914779705SSam Leffler * copyright notice and this permission notice appear in all copies.
1014779705SSam Leffler *
1114779705SSam Leffler * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
1214779705SSam Leffler * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
1314779705SSam Leffler * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
1414779705SSam Leffler * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
1514779705SSam Leffler * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
1614779705SSam Leffler * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
1714779705SSam Leffler * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
1814779705SSam Leffler */
1914779705SSam Leffler #include "opt_ah.h"
2014779705SSam Leffler
2114779705SSam Leffler #include "ah.h"
2214779705SSam Leffler #include "ah_internal.h"
2314779705SSam Leffler
2414779705SSam Leffler #include "ar5416/ar5416.h"
2514779705SSam Leffler #include "ar5416/ar5416reg.h"
2614779705SSam Leffler
2714779705SSam Leffler /*
2814779705SSam Leffler * Checks to see if an interrupt is pending on our NIC
2914779705SSam Leffler *
3014779705SSam Leffler * Returns: TRUE if an interrupt is pending
3114779705SSam Leffler * FALSE if not
3214779705SSam Leffler */
3314779705SSam Leffler HAL_BOOL
ar5416IsInterruptPending(struct ath_hal * ah)3414779705SSam Leffler ar5416IsInterruptPending(struct ath_hal *ah)
3514779705SSam Leffler {
3614779705SSam Leffler uint32_t isr;
379f25ad52SAdrian Chadd
389f25ad52SAdrian Chadd if (AR_SREV_HOWL(ah))
399f25ad52SAdrian Chadd return AH_TRUE;
409f25ad52SAdrian Chadd
4114779705SSam Leffler /*
4214779705SSam Leffler * Some platforms trigger our ISR before applying power to
4314779705SSam Leffler * the card, so make sure the INTPEND is really 1, not 0xffffffff.
4414779705SSam Leffler */
4514779705SSam Leffler isr = OS_REG_READ(ah, AR_INTR_ASYNC_CAUSE);
4614779705SSam Leffler if (isr != AR_INTR_SPURIOUS && (isr & AR_INTR_MAC_IRQ) != 0)
4714779705SSam Leffler return AH_TRUE;
4814779705SSam Leffler
4914779705SSam Leffler isr = OS_REG_READ(ah, AR_INTR_SYNC_CAUSE);
5014779705SSam Leffler if (isr != AR_INTR_SPURIOUS && (isr & AR_INTR_SYNC_DEFAULT))
5114779705SSam Leffler return AH_TRUE;
5214779705SSam Leffler
5314779705SSam Leffler return AH_FALSE;
5414779705SSam Leffler }
5514779705SSam Leffler
5614779705SSam Leffler /*
5714779705SSam Leffler * Reads the Interrupt Status Register value from the NIC, thus deasserting
5814779705SSam Leffler * the interrupt line, and returns both the masked and unmasked mapped ISR
5914779705SSam Leffler * values. The value returned is mapped to abstract the hw-specific bit
6014779705SSam Leffler * locations in the Interrupt Status Register.
6114779705SSam Leffler *
626f5fe81eSAdrian Chadd * (*masked) is cleared on initial call.
636f5fe81eSAdrian Chadd *
6414779705SSam Leffler * Returns: A hardware-abstracted bitmap of all non-masked-out
6514779705SSam Leffler * interrupts pending, as well as an unmasked value
6614779705SSam Leffler */
6714779705SSam Leffler HAL_BOOL
ar5416GetPendingInterrupts(struct ath_hal * ah,HAL_INT * masked)6814779705SSam Leffler ar5416GetPendingInterrupts(struct ath_hal *ah, HAL_INT *masked)
6914779705SSam Leffler {
70084c4719SAdrian Chadd uint32_t isr, isr0, isr1, sync_cause = 0, o_sync_cause = 0;
7162f62f4fSAdrian Chadd HAL_CAPABILITIES *pCap = &AH_PRIVATE(ah)->ah_caps;
7214779705SSam Leffler
7331fdf3d6SAdrian Chadd #ifdef AH_INTERRUPT_DEBUGGING
7431fdf3d6SAdrian Chadd /*
7531fdf3d6SAdrian Chadd * Blank the interrupt debugging area regardless.
7631fdf3d6SAdrian Chadd */
7731fdf3d6SAdrian Chadd bzero(&ah->ah_intrstate, sizeof(ah->ah_intrstate));
78b779c10bSAdrian Chadd ah->ah_syncstate = 0;
7931fdf3d6SAdrian Chadd #endif
8031fdf3d6SAdrian Chadd
8114779705SSam Leffler /*
8214779705SSam Leffler * Verify there's a mac interrupt and the RTC is on.
8314779705SSam Leffler */
849f25ad52SAdrian Chadd if (AR_SREV_HOWL(ah)) {
859f25ad52SAdrian Chadd *masked = 0;
869f25ad52SAdrian Chadd isr = OS_REG_READ(ah, AR_ISR);
879f25ad52SAdrian Chadd } else {
8814779705SSam Leffler if ((OS_REG_READ(ah, AR_INTR_ASYNC_CAUSE) & AR_INTR_MAC_IRQ) &&
8914779705SSam Leffler (OS_REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M) == AR_RTC_STATUS_ON)
9014779705SSam Leffler isr = OS_REG_READ(ah, AR_ISR);
9114779705SSam Leffler else
9214779705SSam Leffler isr = 0;
93b779c10bSAdrian Chadd #ifdef AH_INTERRUPT_DEBUGGING
94b779c10bSAdrian Chadd ah->ah_syncstate =
95b779c10bSAdrian Chadd #endif
96084c4719SAdrian Chadd o_sync_cause = sync_cause = OS_REG_READ(ah, AR_INTR_SYNC_CAUSE);
9714779705SSam Leffler sync_cause &= AR_INTR_SYNC_DEFAULT;
9814779705SSam Leffler *masked = 0;
996f5fe81eSAdrian Chadd
1006f5fe81eSAdrian Chadd if (isr == 0 && sync_cause == 0)
10114779705SSam Leffler return AH_FALSE;
1029f25ad52SAdrian Chadd }
10314779705SSam Leffler
10431fdf3d6SAdrian Chadd #ifdef AH_INTERRUPT_DEBUGGING
10531fdf3d6SAdrian Chadd ah->ah_intrstate[0] = isr;
10631fdf3d6SAdrian Chadd ah->ah_intrstate[1] = OS_REG_READ(ah, AR_ISR_S0);
10731fdf3d6SAdrian Chadd ah->ah_intrstate[2] = OS_REG_READ(ah, AR_ISR_S1);
10831fdf3d6SAdrian Chadd ah->ah_intrstate[3] = OS_REG_READ(ah, AR_ISR_S2);
10931fdf3d6SAdrian Chadd ah->ah_intrstate[4] = OS_REG_READ(ah, AR_ISR_S3);
11031fdf3d6SAdrian Chadd ah->ah_intrstate[5] = OS_REG_READ(ah, AR_ISR_S4);
11131fdf3d6SAdrian Chadd ah->ah_intrstate[6] = OS_REG_READ(ah, AR_ISR_S5);
11231fdf3d6SAdrian Chadd #endif
11331fdf3d6SAdrian Chadd
11414779705SSam Leffler if (isr != 0) {
11514779705SSam Leffler struct ath_hal_5212 *ahp = AH5212(ah);
11614779705SSam Leffler uint32_t mask2;
11714779705SSam Leffler
11814779705SSam Leffler mask2 = 0;
11914779705SSam Leffler if (isr & AR_ISR_BCNMISC) {
12014779705SSam Leffler uint32_t isr2 = OS_REG_READ(ah, AR_ISR_S2);
12114779705SSam Leffler if (isr2 & AR_ISR_S2_TIM)
12214779705SSam Leffler mask2 |= HAL_INT_TIM;
12314779705SSam Leffler if (isr2 & AR_ISR_S2_DTIM)
12414779705SSam Leffler mask2 |= HAL_INT_DTIM;
12514779705SSam Leffler if (isr2 & AR_ISR_S2_DTIMSYNC)
12614779705SSam Leffler mask2 |= HAL_INT_DTIMSYNC;
12714779705SSam Leffler if (isr2 & (AR_ISR_S2_CABEND ))
12814779705SSam Leffler mask2 |= HAL_INT_CABEND;
12914779705SSam Leffler if (isr2 & AR_ISR_S2_GTT)
13014779705SSam Leffler mask2 |= HAL_INT_GTT;
13114779705SSam Leffler if (isr2 & AR_ISR_S2_CST)
13214779705SSam Leffler mask2 |= HAL_INT_CST;
13314779705SSam Leffler if (isr2 & AR_ISR_S2_TSFOOR)
13414779705SSam Leffler mask2 |= HAL_INT_TSFOOR;
13562f62f4fSAdrian Chadd
13662f62f4fSAdrian Chadd /*
13762f62f4fSAdrian Chadd * Don't mask out AR_BCNMISC; instead mask
13862f62f4fSAdrian Chadd * out what causes it.
13962f62f4fSAdrian Chadd */
14062f62f4fSAdrian Chadd OS_REG_WRITE(ah, AR_ISR_S2, isr2);
14162f62f4fSAdrian Chadd isr &= ~AR_ISR_BCNMISC;
14214779705SSam Leffler }
14314779705SSam Leffler
14414779705SSam Leffler if (isr == 0xffffffff) {
14514779705SSam Leffler *masked = 0;
146c2ede4b3SMartin Blapp return AH_FALSE;
14714779705SSam Leffler }
14814779705SSam Leffler
14914779705SSam Leffler *masked = isr & HAL_INT_COMMON;
15062f62f4fSAdrian Chadd
15162f62f4fSAdrian Chadd if (isr & (AR_ISR_RXMINTR | AR_ISR_RXINTM))
15262f62f4fSAdrian Chadd *masked |= HAL_INT_RX;
15362f62f4fSAdrian Chadd if (isr & (AR_ISR_TXMINTR | AR_ISR_TXINTM))
15462f62f4fSAdrian Chadd *masked |= HAL_INT_TX;
15562f62f4fSAdrian Chadd
15662f62f4fSAdrian Chadd /*
15762f62f4fSAdrian Chadd * When doing RX interrupt mitigation, the RXOK bit is set
15862f62f4fSAdrian Chadd * in AR_ISR even if the relevant bit in AR_IMR is clear.
15962f62f4fSAdrian Chadd * Since this interrupt may be due to another source, don't
16062f62f4fSAdrian Chadd * just automatically set HAL_INT_RX if it's set, otherwise
16162f62f4fSAdrian Chadd * we could prematurely service the RX queue.
16262f62f4fSAdrian Chadd *
16362f62f4fSAdrian Chadd * In some cases, the driver can even handle all the RX
16462f62f4fSAdrian Chadd * frames just before the mitigation interrupt fires.
16562f62f4fSAdrian Chadd * The subsequent RX processing trip will then end up
16662f62f4fSAdrian Chadd * processing 0 frames.
16762f62f4fSAdrian Chadd */
16862f62f4fSAdrian Chadd #ifdef AH_AR5416_INTERRUPT_MITIGATION
16962f62f4fSAdrian Chadd if (isr & AR_ISR_RXERR)
17062f62f4fSAdrian Chadd *masked |= HAL_INT_RX;
17162f62f4fSAdrian Chadd #else
17214779705SSam Leffler if (isr & (AR_ISR_RXOK | AR_ISR_RXERR))
17314779705SSam Leffler *masked |= HAL_INT_RX;
17462f62f4fSAdrian Chadd #endif
17562f62f4fSAdrian Chadd
17662f62f4fSAdrian Chadd if (isr & (AR_ISR_TXOK | AR_ISR_TXDESC | AR_ISR_TXERR |
17762f62f4fSAdrian Chadd AR_ISR_TXEOL)) {
17814779705SSam Leffler *masked |= HAL_INT_TX;
17962f62f4fSAdrian Chadd
18062f62f4fSAdrian Chadd isr0 = OS_REG_READ(ah, AR_ISR_S0);
18162f62f4fSAdrian Chadd OS_REG_WRITE(ah, AR_ISR_S0, isr0);
18262f62f4fSAdrian Chadd isr1 = OS_REG_READ(ah, AR_ISR_S1);
18362f62f4fSAdrian Chadd OS_REG_WRITE(ah, AR_ISR_S1, isr1);
18462f62f4fSAdrian Chadd
18562f62f4fSAdrian Chadd /*
18662f62f4fSAdrian Chadd * Don't clear the primary ISR TX bits, clear
18762f62f4fSAdrian Chadd * what causes them (S0/S1.)
18862f62f4fSAdrian Chadd */
18962f62f4fSAdrian Chadd isr &= ~(AR_ISR_TXOK | AR_ISR_TXDESC |
19062f62f4fSAdrian Chadd AR_ISR_TXERR | AR_ISR_TXEOL);
19162f62f4fSAdrian Chadd
19214779705SSam Leffler ahp->ah_intrTxqs |= MS(isr0, AR_ISR_S0_QCU_TXOK);
19314779705SSam Leffler ahp->ah_intrTxqs |= MS(isr0, AR_ISR_S0_QCU_TXDESC);
19414779705SSam Leffler ahp->ah_intrTxqs |= MS(isr1, AR_ISR_S1_QCU_TXERR);
19514779705SSam Leffler ahp->ah_intrTxqs |= MS(isr1, AR_ISR_S1_QCU_TXEOL);
19614779705SSam Leffler }
19714779705SSam Leffler
19862f62f4fSAdrian Chadd if ((isr & AR_ISR_GENTMR) || (! pCap->halAutoSleepSupport)) {
1994f49ef43SRui Paulo uint32_t isr5;
20062f62f4fSAdrian Chadd isr5 = OS_REG_READ(ah, AR_ISR_S5);
20162f62f4fSAdrian Chadd OS_REG_WRITE(ah, AR_ISR_S5, isr5);
20262f62f4fSAdrian Chadd isr &= ~AR_ISR_GENTMR;
20362f62f4fSAdrian Chadd
20462f62f4fSAdrian Chadd if (! pCap->halAutoSleepSupport)
2054f49ef43SRui Paulo if (isr5 & AR_ISR_S5_TIM_TIMER)
2064f49ef43SRui Paulo *masked |= HAL_INT_TIM_TIMER;
2074f49ef43SRui Paulo }
20814779705SSam Leffler *masked |= mask2;
20914779705SSam Leffler }
2109f25ad52SAdrian Chadd
21162f62f4fSAdrian Chadd /*
21262f62f4fSAdrian Chadd * Since we're not using AR_ISR_RAC, clear the status bits
21362f62f4fSAdrian Chadd * for handled interrupts here. For bits whose interrupt
21462f62f4fSAdrian Chadd * source is a secondary register, those bits should've been
21562f62f4fSAdrian Chadd * masked out - instead of those bits being written back,
21662f62f4fSAdrian Chadd * their source (ie, the secondary status registers) should
21762f62f4fSAdrian Chadd * be cleared. That way there are no race conditions with
21862f62f4fSAdrian Chadd * new triggers coming in whilst they've been read/cleared.
21962f62f4fSAdrian Chadd */
22062f62f4fSAdrian Chadd OS_REG_WRITE(ah, AR_ISR, isr);
22162f62f4fSAdrian Chadd /* Flush previous write */
22262f62f4fSAdrian Chadd OS_REG_READ(ah, AR_ISR);
22362f62f4fSAdrian Chadd
2249f25ad52SAdrian Chadd if (AR_SREV_HOWL(ah))
2259f25ad52SAdrian Chadd return AH_TRUE;
2269f25ad52SAdrian Chadd
22714779705SSam Leffler if (sync_cause != 0) {
228084c4719SAdrian Chadd HALDEBUG(ah, HAL_DEBUG_INTERRUPT, "%s: sync_cause=0x%x\n",
229084c4719SAdrian Chadd __func__,
230084c4719SAdrian Chadd o_sync_cause);
23114779705SSam Leffler if (sync_cause & (AR_INTR_SYNC_HOST1_FATAL | AR_INTR_SYNC_HOST1_PERR)) {
23214779705SSam Leffler *masked |= HAL_INT_FATAL;
23314779705SSam Leffler }
23414779705SSam Leffler if (sync_cause & AR_INTR_SYNC_RADM_CPL_TIMEOUT) {
23514779705SSam Leffler HALDEBUG(ah, HAL_DEBUG_ANY, "%s: RADM CPL timeout\n",
23614779705SSam Leffler __func__);
23714779705SSam Leffler OS_REG_WRITE(ah, AR_RC, AR_RC_HOSTIF);
23814779705SSam Leffler OS_REG_WRITE(ah, AR_RC, 0);
23914779705SSam Leffler *masked |= HAL_INT_FATAL;
24014779705SSam Leffler }
24114779705SSam Leffler /*
24214779705SSam Leffler * On fatal errors collect ISR state for debugging.
24314779705SSam Leffler */
24414779705SSam Leffler if (*masked & HAL_INT_FATAL) {
24514779705SSam Leffler AH_PRIVATE(ah)->ah_fatalState[0] = isr;
24614779705SSam Leffler AH_PRIVATE(ah)->ah_fatalState[1] = sync_cause;
24714779705SSam Leffler HALDEBUG(ah, HAL_DEBUG_ANY,
24814779705SSam Leffler "%s: fatal error, ISR_RAC 0x%x SYNC_CAUSE 0x%x\n",
24914779705SSam Leffler __func__, isr, sync_cause);
25014779705SSam Leffler }
25114779705SSam Leffler
25214779705SSam Leffler OS_REG_WRITE(ah, AR_INTR_SYNC_CAUSE_CLR, sync_cause);
25314779705SSam Leffler /* NB: flush write */
25414779705SSam Leffler (void) OS_REG_READ(ah, AR_INTR_SYNC_CAUSE_CLR);
25514779705SSam Leffler }
25614779705SSam Leffler return AH_TRUE;
25714779705SSam Leffler }
25814779705SSam Leffler
25914779705SSam Leffler /*
26014779705SSam Leffler * Atomically enables NIC interrupts. Interrupts are passed in
26114779705SSam Leffler * via the enumerated bitmask in ints.
26214779705SSam Leffler */
26314779705SSam Leffler HAL_INT
ar5416SetInterrupts(struct ath_hal * ah,HAL_INT ints)26414779705SSam Leffler ar5416SetInterrupts(struct ath_hal *ah, HAL_INT ints)
26514779705SSam Leffler {
26614779705SSam Leffler struct ath_hal_5212 *ahp = AH5212(ah);
26714779705SSam Leffler uint32_t omask = ahp->ah_maskReg;
26814779705SSam Leffler uint32_t mask, mask2;
26914779705SSam Leffler
27014779705SSam Leffler HALDEBUG(ah, HAL_DEBUG_INTERRUPT, "%s: 0x%x => 0x%x\n",
27114779705SSam Leffler __func__, omask, ints);
27214779705SSam Leffler
27314779705SSam Leffler if (omask & HAL_INT_GLOBAL) {
27414779705SSam Leffler HALDEBUG(ah, HAL_DEBUG_INTERRUPT, "%s: disable IER\n", __func__);
27514779705SSam Leffler OS_REG_WRITE(ah, AR_IER, AR_IER_DISABLE);
27614779705SSam Leffler (void) OS_REG_READ(ah, AR_IER);
27714779705SSam Leffler
2789f25ad52SAdrian Chadd if (! AR_SREV_HOWL(ah)) {
27914779705SSam Leffler OS_REG_WRITE(ah, AR_INTR_ASYNC_ENABLE, 0);
28014779705SSam Leffler (void) OS_REG_READ(ah, AR_INTR_ASYNC_ENABLE);
28114779705SSam Leffler
28214779705SSam Leffler OS_REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0);
28314779705SSam Leffler (void) OS_REG_READ(ah, AR_INTR_SYNC_ENABLE);
28414779705SSam Leffler }
2859f25ad52SAdrian Chadd }
28614779705SSam Leffler
28714779705SSam Leffler mask = ints & HAL_INT_COMMON;
28814779705SSam Leffler mask2 = 0;
28914779705SSam Leffler
290b569f9f5SAdrian Chadd #ifdef AH_AR5416_INTERRUPT_MITIGATION
2916893df41SAdrian Chadd /*
2926893df41SAdrian Chadd * Overwrite default mask if Interrupt mitigation
2936893df41SAdrian Chadd * is specified for AR5416
2946893df41SAdrian Chadd */
2956893df41SAdrian Chadd if (ints & HAL_INT_RX)
2966893df41SAdrian Chadd mask |= AR_IMR_RXERR | AR_IMR_RXMINTR | AR_IMR_RXINTM;
2976893df41SAdrian Chadd #else
2989bf15204SAdrian Chadd if (ints & HAL_INT_RX)
2999bf15204SAdrian Chadd mask |= AR_IMR_RXOK | AR_IMR_RXERR | AR_IMR_RXDESC;
3009bf15204SAdrian Chadd #endif
30114779705SSam Leffler if (ints & HAL_INT_TX) {
30214779705SSam Leffler if (ahp->ah_txOkInterruptMask)
30314779705SSam Leffler mask |= AR_IMR_TXOK;
30414779705SSam Leffler if (ahp->ah_txErrInterruptMask)
30514779705SSam Leffler mask |= AR_IMR_TXERR;
30614779705SSam Leffler if (ahp->ah_txDescInterruptMask)
30714779705SSam Leffler mask |= AR_IMR_TXDESC;
30814779705SSam Leffler if (ahp->ah_txEolInterruptMask)
30914779705SSam Leffler mask |= AR_IMR_TXEOL;
31071d6fe72SAdrian Chadd if (ahp->ah_txUrnInterruptMask)
31171d6fe72SAdrian Chadd mask |= AR_IMR_TXURN;
31214779705SSam Leffler }
31314779705SSam Leffler if (ints & (HAL_INT_BMISC)) {
31414779705SSam Leffler mask |= AR_IMR_BCNMISC;
31514779705SSam Leffler if (ints & HAL_INT_TIM)
31614779705SSam Leffler mask2 |= AR_IMR_S2_TIM;
31714779705SSam Leffler if (ints & HAL_INT_DTIM)
31814779705SSam Leffler mask2 |= AR_IMR_S2_DTIM;
31914779705SSam Leffler if (ints & HAL_INT_DTIMSYNC)
32014779705SSam Leffler mask2 |= AR_IMR_S2_DTIMSYNC;
32114779705SSam Leffler if (ints & HAL_INT_CABEND)
32214779705SSam Leffler mask2 |= (AR_IMR_S2_CABEND );
32314779705SSam Leffler if (ints & HAL_INT_CST)
32414779705SSam Leffler mask2 |= AR_IMR_S2_CST;
32514779705SSam Leffler if (ints & HAL_INT_TSFOOR)
32614779705SSam Leffler mask2 |= AR_IMR_S2_TSFOOR;
32714779705SSam Leffler }
32814779705SSam Leffler
329d0a0ebc6SAdrian Chadd if (ints & (HAL_INT_GTT | HAL_INT_CST)) {
330d0a0ebc6SAdrian Chadd mask |= AR_IMR_BCNMISC;
331d0a0ebc6SAdrian Chadd if (ints & HAL_INT_GTT)
332d0a0ebc6SAdrian Chadd mask2 |= AR_IMR_S2_GTT;
333d0a0ebc6SAdrian Chadd if (ints & HAL_INT_CST)
334d0a0ebc6SAdrian Chadd mask2 |= AR_IMR_S2_CST;
335d0a0ebc6SAdrian Chadd }
336d0a0ebc6SAdrian Chadd
33714779705SSam Leffler /* Write the new IMR and store off our SW copy. */
33814779705SSam Leffler HALDEBUG(ah, HAL_DEBUG_INTERRUPT, "%s: new IMR 0x%x\n", __func__, mask);
33914779705SSam Leffler OS_REG_WRITE(ah, AR_IMR, mask);
340552c5506SAdrian Chadd /* Flush write */
341552c5506SAdrian Chadd (void) OS_REG_READ(ah, AR_IMR);
342552c5506SAdrian Chadd
34314779705SSam Leffler mask = OS_REG_READ(ah, AR_IMR_S2) & ~(AR_IMR_S2_TIM |
34414779705SSam Leffler AR_IMR_S2_DTIM |
34514779705SSam Leffler AR_IMR_S2_DTIMSYNC |
34614779705SSam Leffler AR_IMR_S2_CABEND |
34714779705SSam Leffler AR_IMR_S2_CABTO |
34814779705SSam Leffler AR_IMR_S2_TSFOOR |
34914779705SSam Leffler AR_IMR_S2_GTT |
35014779705SSam Leffler AR_IMR_S2_CST);
35114779705SSam Leffler OS_REG_WRITE(ah, AR_IMR_S2, mask | mask2);
35214779705SSam Leffler
35314779705SSam Leffler ahp->ah_maskReg = ints;
35414779705SSam Leffler
35514779705SSam Leffler /* Re-enable interrupts if they were enabled before. */
35614779705SSam Leffler if (ints & HAL_INT_GLOBAL) {
35714779705SSam Leffler HALDEBUG(ah, HAL_DEBUG_INTERRUPT, "%s: enable IER\n", __func__);
35814779705SSam Leffler OS_REG_WRITE(ah, AR_IER, AR_IER_ENABLE);
35914779705SSam Leffler
3609f25ad52SAdrian Chadd if (! AR_SREV_HOWL(ah)) {
36140ce4246SSam Leffler mask = AR_INTR_MAC_IRQ;
36240ce4246SSam Leffler if (ints & HAL_INT_GPIO)
36340ce4246SSam Leffler mask |= SM(AH5416(ah)->ah_gpioMask,
36440ce4246SSam Leffler AR_INTR_ASYNC_MASK_GPIO);
36540ce4246SSam Leffler OS_REG_WRITE(ah, AR_INTR_ASYNC_ENABLE, mask);
36640ce4246SSam Leffler OS_REG_WRITE(ah, AR_INTR_ASYNC_MASK, mask);
36714779705SSam Leffler
36840ce4246SSam Leffler mask = AR_INTR_SYNC_DEFAULT;
36940ce4246SSam Leffler if (ints & HAL_INT_GPIO)
37040ce4246SSam Leffler mask |= SM(AH5416(ah)->ah_gpioMask,
37140ce4246SSam Leffler AR_INTR_SYNC_MASK_GPIO);
37240ce4246SSam Leffler OS_REG_WRITE(ah, AR_INTR_SYNC_ENABLE, mask);
37340ce4246SSam Leffler OS_REG_WRITE(ah, AR_INTR_SYNC_MASK, mask);
37414779705SSam Leffler }
3759f25ad52SAdrian Chadd }
37614779705SSam Leffler
37714779705SSam Leffler return omask;
37814779705SSam Leffler }
379