Lines Matching defs:sc

183 static void	ath_txq_init(struct ath_softc *sc, struct ath_txq *, int);
188 static int ath_tx_processq(struct ath_softc *sc, struct ath_txq *txq,
261 ath_legacy_attach_comp_func(struct ath_softc *sc)
269 switch (sc->sc_txqsetup &~ (1<<sc->sc_cabq->axq_qnum)) {
271 TASK_INIT(&sc->sc_txtask, 0, ath_tx_proc_q0, sc);
274 TASK_INIT(&sc->sc_txtask, 0, ath_tx_proc_q0123, sc);
277 TASK_INIT(&sc->sc_txtask, 0, ath_tx_proc, sc);
299 _ath_power_setpower(struct ath_softc *sc, int power_state, int selfgen,
302 ATH_LOCK_ASSERT(sc);
304 DPRINTF(sc, ATH_DEBUG_PWRSAVE, "%s: (%s:%d) state=%d, refcnt=%d, target=%d, cur=%d\n",
309 sc->sc_powersave_refcnt,
310 sc->sc_target_powerstate,
311 sc->sc_cur_powerstate);
313 sc->sc_target_powerstate = power_state;
322 if ((sc->sc_powersave_refcnt == 0 || power_state == HAL_PM_AWAKE) &&
323 power_state != sc->sc_cur_powerstate) {
324 sc->sc_cur_powerstate = power_state;
325 ath_hal_setpower(sc->sc_ah, power_state);
336 sc->sc_cur_powerstate == HAL_PM_AWAKE &&
337 sc->sc_target_selfgen_state != HAL_PM_AWAKE) {
338 ath_hal_setselfgenpower(sc->sc_ah,
339 sc->sc_target_selfgen_state);
353 _ath_power_set_selfgen(struct ath_softc *sc, int power_state, const char *file, int line)
356 ATH_LOCK_ASSERT(sc);
358 DPRINTF(sc, ATH_DEBUG_PWRSAVE, "%s: (%s:%d) state=%d, refcnt=%d\n",
363 sc->sc_target_selfgen_state);
365 sc->sc_target_selfgen_state = power_state;
374 if (sc->sc_cur_powerstate == HAL_PM_AWAKE) {
375 ath_hal_setselfgenpower(sc->sc_ah, power_state);
389 _ath_power_set_power_state(struct ath_softc *sc, int power_state, const char *file, int line)
391 ATH_LOCK_ASSERT(sc);
393 DPRINTF(sc, ATH_DEBUG_PWRSAVE, "%s: (%s:%d) state=%d, refcnt=%d\n",
398 sc->sc_powersave_refcnt);
400 sc->sc_powersave_refcnt++;
406 if (power_state != sc->sc_cur_powerstate) {
407 ath_hal_setpower(sc->sc_ah, power_state);
408 sc->sc_cur_powerstate = power_state;
412 if (sc->sc_cur_powerstate == HAL_PM_AWAKE &&
413 sc->sc_target_selfgen_state != HAL_PM_AWAKE) {
414 ath_hal_setselfgenpower(sc->sc_ah,
415 sc->sc_target_selfgen_state);
427 _ath_power_restore_power_state(struct ath_softc *sc, const char *file, int line)
430 ATH_LOCK_ASSERT(sc);
432 DPRINTF(sc, ATH_DEBUG_PWRSAVE, "%s: (%s:%d) refcnt=%d, target state=%d\n",
436 sc->sc_powersave_refcnt,
437 sc->sc_target_powerstate);
439 if (sc->sc_powersave_refcnt == 0)
440 device_printf(sc->sc_dev, "%s: refcnt=0?\n", __func__);
442 sc->sc_powersave_refcnt--;
444 if (sc->sc_powersave_refcnt == 0 &&
445 sc->sc_target_powerstate != sc->sc_cur_powerstate) {
446 sc->sc_cur_powerstate = sc->sc_target_powerstate;
447 ath_hal_setpower(sc->sc_ah, sc->sc_target_powerstate);
453 if (sc->sc_cur_powerstate == HAL_PM_AWAKE &&
454 sc->sc_target_selfgen_state != HAL_PM_AWAKE) {
455 ath_hal_setselfgenpower(sc->sc_ah,
456 sc->sc_target_selfgen_state);
473 ath_setup_hal_config(struct ath_softc *sc, HAL_OPS_CONFIG *ah_config)
477 if (sc->sc_pci_devinfo & (ATH_PCI_CUS198 | ATH_PCI_CUS230)) {
484 device_printf(sc->sc_dev, "configuring for %s\n",
485 (sc->sc_pci_devinfo & ATH_PCI_CUS198) ?
489 if (sc->sc_pci_devinfo & ATH_PCI_CUS217)
490 device_printf(sc->sc_dev, "CUS217 card detected\n");
492 if (sc->sc_pci_devinfo & ATH_PCI_CUS252)
493 device_printf(sc->sc_dev, "CUS252 card detected\n");
495 if (sc->sc_pci_devinfo & ATH_PCI_AR9565_1ANT)
496 device_printf(sc->sc_dev, "WB335 1-ANT card detected\n");
498 if (sc->sc_pci_devinfo & ATH_PCI_AR9565_2ANT)
499 device_printf(sc->sc_dev, "WB335 2-ANT card detected\n");
501 if (sc->sc_pci_devinfo & ATH_PCI_BT_ANT_DIV)
502 device_printf(sc->sc_dev,
505 if (sc->sc_pci_devinfo & ATH_PCI_KILLER)
506 device_printf(sc->sc_dev, "Killer Wireless card detected\n");
515 if (sc->sc_pci_devinfo & (ATH9K_PCI_AR9565_1ANT | ATH9K_PCI_AR9565_2ANT)) {
516 if (!(sc->sc_pci_devinfo & ATH9K_PCI_BT_ANT_DIV))
520 if (sc->sc_pci_devinfo & ATH9K_PCI_BT_ANT_DIV) {
522 device_printf(sc->sc_dev, "Set BT/WLAN RX diversity capability\n");
526 if (sc->sc_pci_devinfo & ATH_PCI_D3_L1_WAR) {
528 device_printf(sc->sc_dev, "Enable WAR for ASPM D3/L1\n");
532 if (sc->sc_pci_devinfo & ATH9K_PCI_NO_PLL_PWRSAVE) {
534 device_printf(sc->sc_dev, "Disable PLL PowerSave\n");
546 ath_fetch_mac_kenv(struct ath_softc *sc, uint8_t *macaddr)
560 device_get_name(sc->sc_dev),
561 device_get_unit(sc->sc_dev));
569 device_printf(sc->sc_dev,
599 ath_attach(u_int16_t devid, struct ath_softc *sc)
601 struct ieee80211com *ic = &sc->sc_ic;
609 DPRINTF(sc, ATH_DEBUG_ANY, "%s: devid 0x%x\n", __func__, devid);
611 ic->ic_softc = sc;
612 ic->ic_name = device_get_nameunit(sc->sc_dev);
621 ath_setup_hal_config(sc, &ah_config);
623 ah = ath_hal_attach(devid, sc, sc->sc_st, sc->sc_sh,
624 sc->sc_eepromdata, &ah_config, &status);
626 device_printf(sc->sc_dev,
631 sc->sc_ah = ah;
632 sc->sc_invalid = 0; /* ready to go, enable interrupt handling */
634 sc->sc_debug = ath_debug;
644 ATH_LOCK(sc);
645 ath_power_setpower(sc, HAL_PM_AWAKE, 1);
646 ATH_UNLOCK(sc);
654 if (ath_hal_hasedma(sc->sc_ah)) {
655 sc->sc_isedma = 1;
656 ath_recv_setup_edma(sc);
657 ath_xmit_setup_edma(sc);
659 ath_recv_setup_legacy(sc);
660 ath_xmit_setup_legacy(sc);
663 if (ath_hal_hasmybeacon(sc->sc_ah)) {
664 sc->sc_do_mybeacon = 1;
674 sc->sc_mrretry = ath_hal_setupxtxdesc(ah, NULL, 0,0, 0,0, 0,0);
682 sc->sc_needmib = 1;
687 sc->sc_keymax = ath_hal_keycachesize(ah);
688 if (sc->sc_keymax > ATH_KEYMAX) {
689 device_printf(sc->sc_dev,
691 ATH_KEYMAX, sc->sc_keymax);
692 sc->sc_keymax = ATH_KEYMAX;
698 for (i = 0; i < sc->sc_keymax; i++)
704 error = ath_getchannels(sc);
711 ath_rate_setup(sc, IEEE80211_MODE_11A);
712 ath_rate_setup(sc, IEEE80211_MODE_11B);
713 ath_rate_setup(sc, IEEE80211_MODE_11G);
714 ath_rate_setup(sc, IEEE80211_MODE_TURBO_A);
715 ath_rate_setup(sc, IEEE80211_MODE_TURBO_G);
716 ath_rate_setup(sc, IEEE80211_MODE_STURBO_A);
717 ath_rate_setup(sc, IEEE80211_MODE_11NA);
718 ath_rate_setup(sc, IEEE80211_MODE_11NG);
719 ath_rate_setup(sc, IEEE80211_MODE_HALF);
720 ath_rate_setup(sc, IEEE80211_MODE_QUARTER);
723 ath_setcurmode(sc, IEEE80211_MODE_11A);
728 error = ath_desc_alloc(sc);
730 device_printf(sc->sc_dev,
734 error = ath_txdma_setup(sc);
736 device_printf(sc->sc_dev,
744 error = ath_rxdma_setup(sc);
746 device_printf(sc->sc_dev,
751 callout_init_mtx(&sc->sc_cal_ch, &sc->sc_mtx, 0);
752 callout_init_mtx(&sc->sc_wd_ch, &sc->sc_mtx, 0);
754 ATH_TXBUF_LOCK_INIT(sc);
756 sc->sc_tq = taskqueue_create("ath_taskq", M_NOWAIT,
757 taskqueue_thread_enqueue, &sc->sc_tq);
758 taskqueue_start_threads(&sc->sc_tq, 1, PI_NET, "%s taskq",
759 device_get_nameunit(sc->sc_dev));
761 TASK_INIT(&sc->sc_rxtask, 0, sc->sc_rx.recv_tasklet, sc);
762 TASK_INIT(&sc->sc_bmisstask, 0, ath_bmiss_proc, sc);
763 TASK_INIT(&sc->sc_tsfoortask, 0, ath_tsfoor_proc, sc);
764 TASK_INIT(&sc->sc_bstucktask,0, ath_bstuck_proc, sc);
765 TASK_INIT(&sc->sc_resettask,0, ath_reset_proc, sc);
766 TASK_INIT(&sc->sc_txqtask, 0, ath_txq_sched_tasklet, sc);
767 TASK_INIT(&sc->sc_fataltask, 0, ath_fatal_proc, sc);
777 sc->sc_bhalq = ath_beaconq_setup(sc);
778 if (sc->sc_bhalq == (u_int) -1) {
779 device_printf(sc->sc_dev,
784 sc->sc_cabq = ath_txq_setup(sc, HAL_TX_QUEUE_CAB, 0);
785 if (sc->sc_cabq == NULL) {
786 device_printf(sc->sc_dev, "unable to setup CAB xmit queue!\n");
791 if (!ath_tx_setup(sc, WME_AC_BK, HAL_WME_AC_BK)) {
792 device_printf(sc->sc_dev,
798 if (!ath_tx_setup(sc, WME_AC_BE, HAL_WME_AC_BE) ||
799 !ath_tx_setup(sc, WME_AC_VI, HAL_WME_AC_VI) ||
800 !ath_tx_setup(sc, WME_AC_VO, HAL_WME_AC_VO)) {
808 if (sc->sc_ac2q[WME_AC_VI] != NULL)
809 ath_tx_cleanupq(sc, sc->sc_ac2q[WME_AC_VI]);
810 if (sc->sc_ac2q[WME_AC_BE] != NULL)
811 ath_tx_cleanupq(sc, sc->sc_ac2q[WME_AC_BE]);
812 sc->sc_ac2q[WME_AC_BE] = sc->sc_ac2q[WME_AC_BK];
813 sc->sc_ac2q[WME_AC_VI] = sc->sc_ac2q[WME_AC_BK];
814 sc->sc_ac2q[WME_AC_VO] = sc->sc_ac2q[WME_AC_BK];
823 sc->sc_tx.xmit_attach_comp_func(sc);
831 sc->sc_setdefantenna = ath_setdefantenna;
832 sc->sc_rc = ath_rate_attach(sc);
833 if (sc->sc_rc == NULL) {
839 if (! ath_dfs_attach(sc)) {
840 device_printf(sc->sc_dev,
847 if (ath_spectral_attach(sc) < 0) {
848 device_printf(sc->sc_dev,
855 if (ath_btcoex_attach(sc) < 0) {
856 device_printf(sc->sc_dev,
863 if (ath_lna_div_attach(sc) < 0) {
864 device_printf(sc->sc_dev,
871 TASK_INIT(&sc->sc_dfstask, 0, ath_dfs_tasklet, sc);
874 sc->sc_blinking = 0;
875 sc->sc_ledstate = 1;
876 sc->sc_ledon = 0; /* low true */
877 sc->sc_ledidle = (2700*hz)/1000; /* 2.7sec */
878 callout_init(&sc->sc_ledtimer, 1);
891 sc->sc_hardled = (1 == 0);
892 sc->sc_led_net_pin = -1;
893 sc->sc_led_pwr_pin = -1;
899 sc->sc_softled = (devid == AR5212_DEVID_IBM || devid == AR5211_DEVID);
900 ath_led_config(sc);
953 sc->sc_splitmic = 1;
960 sc->sc_wmetkipmic = 1;
962 sc->sc_hasclrkey = ath_hal_ciphersupported(ah, HAL_CIPHER_CLR);
966 if (ath_hal_hasmcastkeysearch(sc->sc_ah) &&
967 !ath_hal_getmcastkeysearch(sc->sc_ah)) {
968 ath_hal_setmcastkeysearch(sc->sc_ah, 1);
970 sc->sc_mcastkey = ath_hal_getmcastkeysearch(ah);
977 setbit(sc->sc_keymap, i);
978 setbit(sc->sc_keymap, i+64);
979 if (sc->sc_splitmic) {
980 setbit(sc->sc_keymap, i+32);
981 setbit(sc->sc_keymap, i+32+64);
997 if (sc->sc_ac2q[WME_AC_BE] != sc->sc_ac2q[WME_AC_BK])
1004 sc->sc_hasbmask = ath_hal_hasbssidmask(ah);
1005 sc->sc_hasbmatch = ath_hal_hasbssidmatch(ah);
1006 sc->sc_hastsfadd = ath_hal_hastsfadjust(ah);
1007 sc->sc_rxslink = ath_hal_self_linked_final_rxdesc(ah);
1012 sc->sc_rxtsf32 = 1;
1015 device_printf(sc->sc_dev, "RX timestamp: %d bits\n", i);
1019 device_printf(sc->sc_dev, "TX timestamp: %d bits\n", i);
1022 sc->sc_hasenforcetxop = ath_hal_hasenforcetxop(ah);
1023 sc->sc_rx_lnamixer = ath_hal_hasrxlnamixer(ah);
1024 sc->sc_hasdivcomb = ath_hal_hasdivantcomb(ah);
1041 if (sc->sc_pci_devinfo & (ATH9K_PCI_AR9565_1ANT | ATH9K_PCI_AR9565_2ANT)) {
1042 device_printf(sc->sc_dev, "%s: WB335: disabling LNA mixer diversity\n",
1044 sc->sc_dolnadiv = 0;
1065 sc->sc_txq_data_minfree = 10;
1075 sc->sc_txq_mcastq_maxdepth = MIN(64, ath_txbuf / 4);
1080 sc->sc_txq_node_psq_maxdepth = 16;
1093 sc->sc_txq_node_maxdepth = MIN(128, ath_txbuf / 4);
1096 sc->sc_cabq_enable = 1;
1106 if (resource_int_value(device_get_name(sc->sc_dev),
1107 device_get_unit(sc->sc_dev), "rx_chainmask",
1109 device_printf(sc->sc_dev, "Setting RX chainmask to 0x%x\n",
1111 (void) ath_hal_setrxchainmask(sc->sc_ah, rx_chainmask);
1113 if (resource_int_value(device_get_name(sc->sc_dev),
1114 device_get_unit(sc->sc_dev), "tx_chainmask",
1116 device_printf(sc->sc_dev, "Setting TX chainmask to 0x%x\n",
1118 (void) ath_hal_settxchainmask(sc->sc_ah, tx_chainmask);
1126 ath_hal_getrxchainmask(ah, &sc->sc_rxchainmask);
1127 ath_hal_gettxchainmask(ah, &sc->sc_txchainmask);
1133 sc->sc_mrrprot = 0; /* XXX should be a capability */
1139 &sc->sc_ent_cfg) == HAL_OK)
1140 sc->sc_use_ent = 1;
1151 device_printf(sc->sc_dev, "[HT] enabling HT modes\n");
1153 sc->sc_mrrprot = 1; /* XXX should be a capability */
1170 device_printf(sc->sc_dev,
1198 sc->sc_rx_stbc = 1;
1199 device_printf(sc->sc_dev,
1205 sc->sc_tx_stbc = 1;
1206 device_printf(sc->sc_dev,
1212 &sc->sc_rts_aggr_limit);
1213 if (sc->sc_rts_aggr_limit != (64 * 1024))
1214 device_printf(sc->sc_dev,
1216 sc->sc_rts_aggr_limit / 1024);
1223 sc->sc_has_ldpc = 1;
1224 device_printf(sc->sc_dev,
1230 device_printf(sc->sc_dev,
1238 sc->sc_hwq_limit_aggr = ATH_AGGR_MIN_QDEPTH;
1239 sc->sc_hwq_limit_nonaggr = ATH_NONAGGR_MIN_QDEPTH;
1240 sc->sc_tid_hwq_lo = ATH_AGGR_SCHED_LOW;
1241 sc->sc_tid_hwq_hi = ATH_AGGR_SCHED_HIGH;
1242 sc->sc_aggr_limit = ATH_AGGR_MAXSIZE;
1243 sc->sc_delim_min_pad = 0;
1252 sc->sc_ah->ah_config.ah_serialise_reg_war = 1;
1253 device_printf(sc->sc_dev,
1260 TAILQ_INIT(&sc->sc_rx_rxlist[HAL_RX_QUEUE_HP]);
1261 TAILQ_INIT(&sc->sc_rx_rxlist[HAL_RX_QUEUE_LP]);
1272 sc->sc_defant = ath_hal_getdefantenna(ah);
1278 sc->sc_hasveol = ath_hal_hasveol(ah);
1281 if (ath_fetch_mac_kenv(sc, ic->ic_macaddr) == 0) {
1288 if (sc->sc_hasbmask)
1289 ath_hal_getbssidmask(ah, sc->sc_hwbssidmask);
1292 ic->ic_max_keyix = sc->sc_keymax;
1297 sc->sc_opmode = HAL_M_STA;
1312 sc->sc_node_free = ic->ic_node_free;
1314 sc->sc_node_cleanup = ic->ic_node_cleanup;
1322 sc->sc_addba_request = ic->ic_addba_request;
1323 sc->sc_addba_response = ic->ic_addba_response;
1324 sc->sc_addba_stop = ic->ic_addba_stop;
1325 sc->sc_bar_response = ic->ic_bar_response;
1326 sc->sc_addba_response_timeout = ic->ic_addba_response_timeout;
1344 &sc->sc_tx_th.wt_ihdr, sizeof(sc->sc_tx_th), 0,
1346 &sc->sc_rx_th.wr_ihdr, sizeof(sc->sc_rx_th), 1,
1353 &sc->sc_tx_th.wt_ihdr, sizeof(sc->sc_tx_th),
1355 &sc->sc_rx_th.wr_ihdr, sizeof(sc->sc_rx_th),
1363 if_ath_alq_init(&sc->sc_alq, device_get_nameunit(sc->sc_dev));
1364 if_ath_alq_setcfg(&sc->sc_alq,
1365 sc->sc_ah->ah_macVersion,
1366 sc->sc_ah->ah_macRev,
1367 sc->sc_ah->ah_phyRev,
1368 sc->sc_ah->ah_magic);
1375 ath_sysctlattach(sc);
1376 ath_sysctl_stats_attach(sc);
1377 ath_sysctl_hal_attach(sc);
1381 ath_announce(sc);
1386 ATH_LOCK(sc);
1387 ath_power_setpower(sc, HAL_PM_FULL_SLEEP, 1);
1388 ATH_UNLOCK(sc);
1392 ath_tx_cleanup(sc);
1393 ath_desc_free(sc);
1394 ath_txdma_teardown(sc);
1395 ath_rxdma_teardown(sc);
1400 sc->sc_invalid = 1;
1405 ath_detach(struct ath_softc *sc)
1428 ATH_LOCK(sc);
1429 ath_power_set_power_state(sc, HAL_PM_AWAKE);
1430 ath_power_setpower(sc, HAL_PM_AWAKE, 1);
1435 ath_stop(sc);
1436 ATH_UNLOCK(sc);
1438 ieee80211_ifdetach(&sc->sc_ic);
1439 taskqueue_free(sc->sc_tq);
1441 if (sc->sc_tx99 != NULL)
1442 sc->sc_tx99->detach(sc->sc_tx99);
1444 ath_rate_detach(sc->sc_rc);
1446 if_ath_alq_tidyup(&sc->sc_alq);
1448 ath_lna_div_detach(sc);
1449 ath_btcoex_detach(sc);
1450 ath_spectral_detach(sc);
1451 ath_dfs_detach(sc);
1452 ath_desc_free(sc);
1453 ath_txdma_teardown(sc);
1454 ath_rxdma_teardown(sc);
1455 ath_tx_cleanup(sc);
1456 ath_hal_detach(sc->sc_ah); /* NB: sets chip in full sleep */
1468 assign_address(struct ath_softc *sc, uint8_t mac[IEEE80211_ADDR_LEN], int clone)
1472 if (clone && sc->sc_hasbmask) {
1475 if ((sc->sc_bssidmask & (1<<i)) == 0)
1481 sc->sc_bssidmask |= 1<<i;
1482 sc->sc_hwbssidmask[0] &= ~mac[0];
1484 sc->sc_nbssid0++;
1488 reclaim_address(struct ath_softc *sc, const uint8_t mac[IEEE80211_ADDR_LEN])
1493 if (i != 0 || --sc->sc_nbssid0 == 0) {
1494 sc->sc_bssidmask &= ~(1<<i);
1498 if (sc->sc_bssidmask & (1<<i))
1500 sc->sc_hwbssidmask[0] |= mask;
1511 assign_bslot(struct ath_softc *sc)
1517 if (sc->sc_bslot[slot] == NULL) {
1518 if (sc->sc_bslot[(slot+1)%ATH_BCBUF] == NULL &&
1519 sc->sc_bslot[(slot-1)%ATH_BCBUF] == NULL)
1533 struct ath_softc *sc = ic->ic_softc;
1544 ATH_LOCK(sc);
1548 if (sc->sc_nstavaps != 0) { /* XXX only 1 for now */
1549 device_printf(sc->sc_dev, "only 1 sta vap supported\n");
1552 if (sc->sc_nvaps) {
1567 if (sc->sc_nvaps != 0) { /* XXX only 1 for now */
1568 device_printf(sc->sc_dev,
1577 if (sc->sc_nvaps != 0) {
1578 device_printf(sc->sc_dev,
1588 if (sc->sc_nvaps != 0 && ic->ic_opmode != opmode) {
1603 if (sc->sc_nvaps != 0 && ic->ic_opmode == IEEE80211_M_STA) {
1604 device_printf(sc->sc_dev,
1614 if (sc->sc_nvaps == 0)
1620 device_printf(sc->sc_dev, "unknown opmode %d\n", opmode);
1626 if (needbeacon & TAILQ_EMPTY(&sc->sc_bbuf)) {
1627 device_printf(sc->sc_dev, "no beacon buffer available\n");
1633 assign_address(sc, mac, flags & IEEE80211_CLONE_BSSID);
1634 ath_hal_setbssidmask(sc->sc_ah, sc->sc_hwbssidmask);
1639 ATH_UNLOCK(sc);
1641 ATH_LOCK(sc);
1643 device_printf(sc->sc_dev, "%s: error %d creating vap\n",
1697 avp->av_bcbuf = TAILQ_FIRST(&sc->sc_bbuf);
1698 TAILQ_REMOVE(&sc->sc_bbuf, avp->av_bcbuf, bf_list);
1699 if (opmode != IEEE80211_M_IBSS || !sc->sc_hasveol) {
1704 avp->av_bslot = assign_bslot(sc);
1705 KASSERT(sc->sc_bslot[avp->av_bslot] == NULL,
1707 sc->sc_bslot[avp->av_bslot] = vap;
1708 sc->sc_nbcnvaps++;
1710 if (sc->sc_hastsfadd && sc->sc_nbcnvaps > 0) {
1716 sc->sc_stagbeacons = 1;
1718 ath_txq_init(sc, &avp->av_mcastq, ATH_TXQ_SWQ);
1723 sc->sc_nvaps++;
1725 sc->sc_nstavaps++;
1727 sc->sc_nmeshvaps++;
1731 sc->sc_opmode = HAL_M_IBSS;
1734 sc->sc_opmode = HAL_M_STA;
1739 sc->sc_tdma = 1;
1741 sc->sc_stagbeacons = 0;
1751 sc->sc_opmode = HAL_M_HOSTAP;
1754 sc->sc_opmode = HAL_M_MONITOR;
1760 if (sc->sc_hastsfadd) {
1764 ath_hal_settsfadjust(sc->sc_ah, sc->sc_stagbeacons);
1770 sc->sc_swbmiss = 1;
1772 ATH_UNLOCK(sc);
1779 reclaim_address(sc, mac);
1780 ath_hal_setbssidmask(sc->sc_ah, sc->sc_hwbssidmask);
1783 ATH_UNLOCK(sc);
1791 struct ath_softc *sc = ic->ic_softc;
1792 struct ath_hal *ah = sc->sc_ah;
1795 ATH_LOCK(sc);
1796 ath_power_set_power_state(sc, HAL_PM_AWAKE);
1797 ATH_UNLOCK(sc);
1799 DPRINTF(sc, ATH_DEBUG_RESET, "%s: called\n", __func__);
1800 if (sc->sc_running) {
1808 ath_stoprecv(sc, 1); /* stop recv side */
1809 ath_rx_flush(sc);
1810 ath_draintxq(sc, ATH_RESET_DEFAULT); /* stop hw xmit side */
1834 ath_draintxq(sc, ATH_RESET_DEFAULT);
1836 ATH_LOCK(sc);
1844 sc->sc_bslot[avp->av_bslot] = NULL;
1845 sc->sc_nbcnvaps--;
1847 ath_beacon_return(sc, avp->av_bcbuf);
1849 if (sc->sc_nbcnvaps == 0) {
1850 sc->sc_stagbeacons = 0;
1851 if (sc->sc_hastsfadd)
1852 ath_hal_settsfadjust(sc->sc_ah, 0);
1857 ath_tx_draintxq(sc, &avp->av_mcastq);
1863 sc->sc_nstavaps--;
1864 if (sc->sc_nstavaps == 0 && sc->sc_swbmiss)
1865 sc->sc_swbmiss = 0;
1869 reclaim_address(sc, vap->iv_myaddr);
1870 ath_hal_setbssidmask(ah, sc->sc_hwbssidmask);
1872 sc->sc_nmeshvaps--;
1875 sc->sc_nvaps--;
1878 if (sc->sc_tdma && sc->sc_nvaps == 0) {
1879 sc->sc_tdma = 0;
1880 sc->sc_swbmiss = 0;
1885 if (sc->sc_running) {
1890 if (ath_startrecv(sc) != 0)
1891 device_printf(sc->sc_dev,
1893 if (sc->sc_beacons) { /* restart beacons */
1895 if (sc->sc_tdma)
1896 ath_tdma_config(sc, NULL);
1899 ath_beacon_config(sc, NULL);
1901 ath_hal_intrset(ah, sc->sc_imask);
1905 ath_power_restore_power_state(sc);
1906 ATH_UNLOCK(sc);
1910 ath_suspend(struct ath_softc *sc)
1912 struct ieee80211com *ic = &sc->sc_ic;
1914 sc->sc_resume_up = ic->ic_nrunning != 0;
1931 ath_hal_intrset(sc->sc_ah, 0);
1932 taskqueue_block(sc->sc_tq);
1934 ATH_LOCK(sc);
1935 callout_stop(&sc->sc_cal_ch);
1936 ATH_UNLOCK(sc);
1943 ath_hal_enablepcie(sc->sc_ah, 1, 1);
1953 ath_reset_keycache(struct ath_softc *sc)
1955 struct ieee80211com *ic = &sc->sc_ic;
1956 struct ath_hal *ah = sc->sc_ah;
1959 ATH_LOCK(sc);
1960 ath_power_set_power_state(sc, HAL_PM_AWAKE);
1961 for (i = 0; i < sc->sc_keymax; i++)
1963 ath_power_restore_power_state(sc);
1964 ATH_UNLOCK(sc);
1973 ath_update_chainmasks(struct ath_softc *sc, struct ieee80211_channel *chan)
1980 sc->sc_cur_rxchainmask = sc->sc_rxchainmask;
1982 sc->sc_cur_txchainmask = sc->sc_txchainmask;
1984 sc->sc_cur_txchainmask = 1;
1987 DPRINTF(sc, ATH_DEBUG_RESET,
1990 sc->sc_cur_txchainmask,
1991 sc->sc_cur_rxchainmask);
1995 ath_resume(struct ath_softc *sc)
1997 struct ieee80211com *ic = &sc->sc_ic;
1998 struct ath_hal *ah = sc->sc_ah;
2007 ath_update_chainmasks(sc,
2008 sc->sc_curchan != NULL ? sc->sc_curchan : ic->ic_curchan);
2009 ath_hal_setchainmasks(sc->sc_ah, sc->sc_cur_txchainmask,
2010 sc->sc_cur_rxchainmask);
2013 ATH_LOCK(sc);
2014 ath_power_setselfgen(sc, HAL_PM_AWAKE);
2015 ath_power_set_power_state(sc, HAL_PM_AWAKE);
2016 ath_power_setpower(sc, HAL_PM_AWAKE, 1);
2017 ATH_UNLOCK(sc);
2019 ath_hal_reset(ah, sc->sc_opmode,
2020 sc->sc_curchan != NULL ? sc->sc_curchan : ic->ic_curchan,
2022 ath_reset_keycache(sc);
2024 ATH_RX_LOCK(sc);
2025 sc->sc_rx_stopped = 1;
2026 sc->sc_rx_resetted = 1;
2027 ATH_RX_UNLOCK(sc);
2030 ath_dfs_radar_enable(sc, ic->ic_curchan);
2033 ath_spectral_enable(sc, ic->ic_curchan);
2038 ath_btcoex_enable(sc, ic->ic_curchan);
2044 if (sc->sc_hasenforcetxop && sc->sc_tdma)
2045 ath_hal_setenforcetxop(sc->sc_ah, 1);
2047 ath_hal_setenforcetxop(sc->sc_ah, 0);
2050 ath_led_config(sc);
2053 if (sc->sc_resume_up)
2056 ATH_LOCK(sc);
2057 ath_power_restore_power_state(sc);
2058 ATH_UNLOCK(sc);
2064 ath_shutdown(struct ath_softc *sc)
2067 ATH_LOCK(sc);
2068 ath_stop(sc);
2069 ATH_UNLOCK(sc);
2079 struct ath_softc *sc = arg;
2080 struct ath_hal *ah = sc->sc_ah;
2088 ATH_PCU_LOCK(sc);
2089 if (sc->sc_inreset_cnt) {
2093 DPRINTF(sc, ATH_DEBUG_ANY,
2096 ATH_PCU_UNLOCK(sc);
2100 if (sc->sc_invalid) {
2105 DPRINTF(sc, ATH_DEBUG_ANY, "%s: invalid; ignored\n", __func__);
2106 ATH_PCU_UNLOCK(sc);
2110 ATH_PCU_UNLOCK(sc);
2114 ATH_LOCK(sc);
2115 ath_power_set_power_state(sc, HAL_PM_AWAKE);
2116 ATH_UNLOCK(sc);
2118 if (sc->sc_ic.ic_nrunning == 0 && sc->sc_running == 0) {
2121 DPRINTF(sc, ATH_DEBUG_ANY, "%s: ic_nrunning %d sc_running %d\n",
2122 __func__, sc->sc_ic.ic_nrunning, sc->sc_running);
2125 ATH_PCU_UNLOCK(sc);
2127 ATH_LOCK(sc);
2128 ath_power_restore_power_state(sc);
2129 ATH_UNLOCK(sc);
2140 DPRINTF(sc, ATH_DEBUG_INTR, "%s: status 0x%x\n", __func__, status);
2141 ATH_KTR(sc, ATH_KTR_INTERRUPTS, 1, "ath_intr: mask=0x%.8x", status);
2143 if_ath_alq_post_intr(&sc->sc_alq, status, ah->ah_intrstate,
2147 ATH_KTR(sc, ATH_KTR_INTERRUPTS, 5,
2161 sc->sc_intr_stats.sync_intr[i]++;
2164 status &= sc->sc_imask; /* discard unasked for bits */
2168 ATH_PCU_UNLOCK(sc);
2170 ATH_LOCK(sc);
2171 ath_power_restore_power_state(sc);
2172 ATH_UNLOCK(sc);
2181 sc->sc_intr_cnt++;
2182 ATH_PCU_UNLOCK(sc);
2190 sc->sc_stats.ast_hardware++;
2192 taskqueue_enqueue(sc->sc_tq, &sc->sc_fataltask);
2202 if (sc->sc_tdma) {
2203 if (sc->sc_tdmaswba == 0) {
2204 struct ieee80211com *ic = &sc->sc_ic;
2207 ath_tdma_beacon_send(sc, vap);
2208 sc->sc_tdmaswba =
2211 sc->sc_tdmaswba--;
2215 ath_beacon_proc(sc, 0);
2222 sc->sc_rx.recv_sched(sc, 1);
2228 ATH_KTR(sc, ATH_KTR_ERROR, 0, "ath_intr: RXEOL");
2229 if (! sc->sc_isedma) {
2230 ATH_PCU_LOCK(sc);
2236 sc->sc_stats.ast_rxeol++;
2246 imask = sc->sc_imask;
2260 if (! sc->sc_kickpcu)
2261 sc->sc_rxlink = NULL;
2262 sc->sc_kickpcu = 1;
2263 ATH_PCU_UNLOCK(sc);
2270 sc->sc_rx.recv_sched(sc, 1);
2273 sc->sc_stats.ast_txurn++;
2282 sc->sc_stats.ast_rx_intr++;
2283 sc->sc_rx.recv_sched(sc, 1);
2286 sc->sc_stats.ast_tx_intr++;
2292 if (! sc->sc_isedma) {
2293 ATH_PCU_LOCK(sc);
2295 ath_hal_gettxintrtxqs(sc->sc_ah, &txqs);
2296 ATH_KTR(sc, ATH_KTR_INTERRUPTS, 3,
2299 sc->sc_txq_active,
2300 sc->sc_txq_active | txqs);
2301 sc->sc_txq_active |= txqs;
2302 ATH_PCU_UNLOCK(sc);
2304 taskqueue_enqueue(sc->sc_tq, &sc->sc_txtask);
2307 sc->sc_stats.ast_bmiss++;
2308 taskqueue_enqueue(sc->sc_tq, &sc->sc_bmisstask);
2311 sc->sc_stats.ast_tx_timeout++;
2313 sc->sc_stats.ast_tx_cst++;
2315 sc->sc_stats.ast_mib++;
2316 ATH_PCU_LOCK(sc);
2326 ath_hal_mibevent(ah, &sc->sc_halstats);
2333 if (sc->sc_kickpcu == 0)
2334 ath_hal_intrset(ah, sc->sc_imask);
2335 ATH_PCU_UNLOCK(sc);
2339 ATH_KTR(sc, ATH_KTR_ERROR, 0, "ath_intr: RXORN");
2340 sc->sc_stats.ast_rxorn++;
2349 sc->sc_stats.ast_tsfoor++;
2350 ATH_LOCK(sc);
2351 ath_power_setpower(sc, HAL_PM_AWAKE, 0);
2352 ATH_UNLOCK(sc);
2353 taskqueue_enqueue(sc->sc_tq, &sc->sc_tsfoortask);
2354 device_printf(sc->sc_dev, "%s: TSFOOR\n", __func__);
2357 ath_btcoex_mci_intr(sc);
2360 ATH_PCU_LOCK(sc);
2361 sc->sc_intr_cnt--;
2362 ATH_PCU_UNLOCK(sc);
2364 ATH_LOCK(sc);
2365 ath_power_restore_power_state(sc);
2366 ATH_UNLOCK(sc);
2372 struct ath_softc *sc = arg;
2377 if (sc->sc_invalid)
2380 device_printf(sc->sc_dev, "hardware error; resetting\n");
2386 if (ath_hal_getfatalstate(sc->sc_ah, &sp, &len)) {
2389 device_printf(sc->sc_dev,
2393 ath_reset(sc, ATH_RESET_NOLOSS, HAL_RESET_FORCE_COLD);
2399 struct ath_softc *sc = vap->iv_ic->ic_softc;
2415 ATH_LOCK(sc);
2416 ath_power_set_power_state(sc, HAL_PM_AWAKE);
2417 ATH_UNLOCK(sc);
2420 u_int64_t lastrx = sc->sc_lastrx;
2421 u_int64_t tsf = ath_hal_gettsf64(sc->sc_ah);
2426 DPRINTF(sc, ATH_DEBUG_BEACON,
2433 sc->sc_stats.ast_bmiss_phantom++;
2435 ATH_LOCK(sc);
2436 ath_power_restore_power_state(sc);
2437 ATH_UNLOCK(sc);
2454 ATH_LOCK(sc);
2455 ath_power_setpower(sc, HAL_PM_AWAKE, 0);
2456 ath_power_restore_power_state(sc);
2457 ATH_UNLOCK(sc);
2459 DPRINTF(sc, ATH_DEBUG_BEACON,
2465 sc->sc_syncbeacon = 1;
2488 struct ath_softc *sc = arg;
2491 DPRINTF(sc, ATH_DEBUG_ANY, "%s: pending %u\n", __func__, pending);
2493 ATH_LOCK(sc);
2494 ath_power_set_power_state(sc, HAL_PM_AWAKE);
2495 ATH_UNLOCK(sc);
2497 ath_beacon_miss(sc);
2505 if (ath_hal_gethangstate(sc->sc_ah, 0xff, &hangs) && hangs != 0) {
2506 ath_reset(sc, ATH_RESET_NOLOSS, HAL_RESET_BBPANIC);
2507 device_printf(sc->sc_dev,
2510 ath_reset(sc, ATH_RESET_NOLOSS, HAL_RESET_FORCE_COLD);
2511 ieee80211_beacon_miss(&sc->sc_ic);
2515 sc->sc_syncbeacon = 1;
2517 ATH_LOCK(sc);
2518 ath_power_restore_power_state(sc);
2519 ATH_UNLOCK(sc);
2532 struct ath_softc *sc = arg;
2534 DPRINTF(sc, ATH_DEBUG_ANY, "%s: pending %u\n", __func__, pending);
2536 ATH_LOCK(sc);
2537 ath_power_set_power_state(sc, HAL_PM_AWAKE);
2538 ATH_UNLOCK(sc);
2545 ath_reset(sc, ATH_RESET_NOLOSS, HAL_RESET_FORCE_COLD);
2548 sc->sc_syncbeacon = 1;
2550 ATH_LOCK(sc);
2551 ath_power_restore_power_state(sc);
2552 ATH_UNLOCK(sc);
2562 ath_settkipmic(struct ath_softc *sc)
2564 struct ieee80211com *ic = &sc->sc_ic;
2566 if ((ic->ic_cryptocaps & IEEE80211_CRYPTO_TKIP) && !sc->sc_wmetkipmic) {
2568 ath_hal_settkipmic(sc->sc_ah, AH_FALSE);
2571 ath_hal_settkipmic(sc->sc_ah, AH_TRUE);
2578 ath_vap_clear_quiet_ie(struct ath_softc *sc)
2580 struct ieee80211com *ic = &sc->sc_ic;
2592 ath_init(struct ath_softc *sc)
2594 struct ieee80211com *ic = &sc->sc_ic;
2595 struct ath_hal *ah = sc->sc_ah;
2598 ATH_LOCK_ASSERT(sc);
2603 ath_power_setselfgen(sc, HAL_PM_AWAKE);
2604 ath_power_set_power_state(sc, HAL_PM_AWAKE);
2605 ath_power_setpower(sc, HAL_PM_AWAKE, 1);
2611 ath_stop(sc);
2620 ath_settkipmic(sc);
2621 ath_update_chainmasks(sc, ic->ic_curchan);
2622 ath_hal_setchainmasks(sc->sc_ah, sc->sc_cur_txchainmask,
2623 sc->sc_cur_rxchainmask);
2625 if (!ath_hal_reset(ah, sc->sc_opmode, ic->ic_curchan, AH_FALSE,
2627 device_printf(sc->sc_dev,
2632 ATH_RX_LOCK(sc);
2633 sc->sc_rx_stopped = 1;
2634 sc->sc_rx_resetted = 1;
2635 ATH_RX_UNLOCK(sc);
2638 ath_vap_clear_quiet_ie(sc);
2640 ath_chan_change(sc, ic->ic_curchan);
2643 ath_dfs_radar_enable(sc, ic->ic_curchan);
2646 ath_spectral_enable(sc, ic->ic_curchan);
2651 ath_btcoex_enable(sc, ic->ic_curchan);
2657 if (sc->sc_hasenforcetxop && sc->sc_tdma)
2658 ath_hal_setenforcetxop(sc->sc_ah, 1);
2660 ath_hal_setenforcetxop(sc->sc_ah, 0);
2666 sc->sc_diversity = ath_hal_getdiversity(ah);
2667 sc->sc_lastlongcal = ticks;
2668 sc->sc_resetcal = 1;
2669 sc->sc_lastcalreset = 0;
2670 sc->sc_lastani = ticks;
2671 sc->sc_lastshortcal = ticks;
2672 sc->sc_doresetcal = AH_FALSE;
2678 sc->sc_beacons = 0;
2687 if (ath_startrecv(sc) != 0) {
2688 device_printf(sc->sc_dev, "unable to start recv logic\n");
2689 ath_power_restore_power_state(sc);
2696 sc->sc_imask = HAL_INT_RX | HAL_INT_TX
2704 if (sc->sc_isedma)
2705 sc->sc_imask |= (HAL_INT_RXHP | HAL_INT_RXLP);
2712 if (! sc->sc_isedma)
2713 sc->sc_imask |= HAL_INT_RXEOL;
2718 if (sc->sc_btcoex_mci)
2719 sc->sc_imask |= HAL_INT_MCI;
2725 if (sc->sc_needmib && ic->ic_opmode == IEEE80211_M_STA)
2726 sc->sc_imask |= HAL_INT_MIB;
2735 sc->sc_imask |= HAL_INT_TSFOOR;
2739 sc->sc_imask |= HAL_INT_GTT;
2741 DPRINTF(sc, ATH_DEBUG_RESET, "%s: imask=0x%x\n",
2742 __func__, sc->sc_imask);
2744 sc->sc_running = 1;
2745 callout_reset(&sc->sc_wd_ch, hz, ath_watchdog, sc);
2746 ath_hal_intrset(ah, sc->sc_imask);
2748 ath_power_restore_power_state(sc);
2754 ath_stop(struct ath_softc *sc)
2756 struct ath_hal *ah = sc->sc_ah;
2758 ATH_LOCK_ASSERT(sc);
2763 ath_power_set_power_state(sc, HAL_PM_AWAKE);
2765 if (sc->sc_running) {
2782 if (sc->sc_tx99 != NULL)
2783 sc->sc_tx99->stop(sc->sc_tx99);
2785 callout_stop(&sc->sc_wd_ch);
2786 sc->sc_wd_timer = 0;
2787 sc->sc_running = 0;
2788 if (!sc->sc_invalid) {
2789 if (sc->sc_softled) {
2790 callout_stop(&sc->sc_ledtimer);
2791 ath_hal_gpioset(ah, sc->sc_ledpin,
2792 !sc->sc_ledon);
2793 sc->sc_blinking = 0;
2798 if (!sc->sc_invalid) {
2799 ath_stoprecv(sc, 1);
2802 sc->sc_rxlink = NULL;
2803 ath_draintxq(sc, ATH_RESET_DEFAULT);
2804 ath_beacon_free(sc); /* XXX not needed */
2808 ath_power_restore_power_state(sc);
2821 ath_txrx_stop_locked(struct ath_softc *sc)
2825 ATH_UNLOCK_ASSERT(sc);
2826 ATH_PCU_LOCK_ASSERT(sc);
2834 while (sc->sc_rxproc_cnt || sc->sc_txproc_cnt ||
2835 sc->sc_txstart_cnt || sc->sc_intr_cnt) {
2838 msleep(sc, &sc->sc_pcu_mtx, 0, "ath_txrx_stop",
2844 device_printf(sc->sc_dev,
2852 ath_txrx_stop(struct ath_softc *sc)
2854 ATH_UNLOCK_ASSERT(sc);
2855 ATH_PCU_UNLOCK_ASSERT(sc);
2857 ATH_PCU_LOCK(sc);
2858 ath_txrx_stop_locked(sc);
2859 ATH_PCU_UNLOCK(sc);
2864 ath_txrx_start(struct ath_softc *sc)
2867 taskqueue_unblock(sc->sc_tq);
2888 ath_reset_grablock(struct ath_softc *sc, int dowait)
2893 ATH_PCU_LOCK_ASSERT(sc);
2895 if (sc->sc_inreset_cnt == 0) {
2903 ATH_PCU_UNLOCK(sc);
2910 ATH_PCU_LOCK(sc);
2918 sc->sc_inreset_cnt++;
2921 device_printf(sc->sc_dev,
2926 device_printf(sc->sc_dev,
2942 ath_reset(struct ath_softc *sc, ATH_RESET_TYPE reset_type,
2945 struct ieee80211com *ic = &sc->sc_ic;
2946 struct ath_hal *ah = sc->sc_ah;
2950 DPRINTF(sc, ATH_DEBUG_RESET, "%s: called\n", __func__);
2953 ATH_PCU_UNLOCK_ASSERT(sc);
2954 ATH_UNLOCK_ASSERT(sc);
2957 taskqueue_block(sc->sc_tq);
2962 ATH_LOCK(sc);
2963 ath_power_set_power_state(sc, HAL_PM_AWAKE);
2964 ATH_UNLOCK(sc);
2966 ATH_PCU_LOCK(sc);
2974 if (ath_reset_grablock(sc, 1) == 0) {
2975 device_printf(sc->sc_dev, "%s: concurrent reset! Danger!\n",
2986 ath_txrx_stop_locked(sc);
2988 ATH_PCU_UNLOCK(sc);
2995 ath_stoprecv(sc, (reset_type != ATH_RESET_NOLOSS));
2996 ath_rx_flush(sc);
3003 ath_draintxq(sc, reset_type); /* stop xmit side */
3005 ath_settkipmic(sc); /* configure TKIP MIC handling */
3007 ath_update_chainmasks(sc, ic->ic_curchan);
3008 ath_hal_setchainmasks(sc->sc_ah, sc->sc_cur_txchainmask,
3009 sc->sc_cur_rxchainmask);
3010 if (!ath_hal_reset(ah, sc->sc_opmode, ic->ic_curchan, AH_TRUE,
3012 device_printf(sc->sc_dev,
3015 sc->sc_diversity = ath_hal_getdiversity(ah);
3017 ATH_RX_LOCK(sc);
3018 sc->sc_rx_stopped = 1;
3019 sc->sc_rx_resetted = 1;
3020 ATH_RX_UNLOCK(sc);
3023 ath_vap_clear_quiet_ie(sc);
3026 ath_dfs_radar_enable(sc, ic->ic_curchan);
3029 ath_spectral_enable(sc, ic->ic_curchan);
3034 ath_btcoex_enable(sc, ic->ic_curchan);
3040 if (sc->sc_hasenforcetxop && sc->sc_tdma)
3041 ath_hal_setenforcetxop(sc->sc_ah, 1);
3043 ath_hal_setenforcetxop(sc->sc_ah, 0);
3045 if (ath_startrecv(sc) != 0) /* restart recv */
3046 device_printf(sc->sc_dev,
3053 ath_chan_change(sc, ic->ic_curchan);
3054 if (sc->sc_beacons) { /* restart beacons */
3056 if (sc->sc_tdma)
3057 ath_tdma_config(sc, NULL);
3060 ath_beacon_config(sc, NULL);
3075 ATH_PCU_LOCK(sc);
3076 sc->sc_inreset_cnt--;
3077 sc->sc_txstart_cnt++;
3079 ath_hal_intrset(ah, sc->sc_imask);
3080 ATH_PCU_UNLOCK(sc);
3092 ath_txrx_start(sc);
3099 if (ATH_TXQ_SETUP(sc, i)) {
3100 ATH_TXQ_LOCK(&sc->sc_txq[i]);
3101 ath_txq_restart_dma(sc, &sc->sc_txq[i]);
3102 ATH_TXQ_UNLOCK(&sc->sc_txq[i]);
3104 ATH_TX_LOCK(sc);
3105 ath_txq_sched(sc, &sc->sc_txq[i]);
3106 ATH_TX_UNLOCK(sc);
3111 ATH_LOCK(sc);
3112 ath_power_restore_power_state(sc);
3113 ATH_UNLOCK(sc);
3115 ATH_PCU_LOCK(sc);
3116 sc->sc_txstart_cnt--;
3117 ATH_PCU_UNLOCK(sc);
3124 ath_tx_kick(sc); /* restart xmit */
3132 struct ath_softc *sc = ic->ic_softc;
3133 struct ath_hal *ah = sc->sc_ah;
3147 return ath_reset(sc, ATH_RESET_FULL, HAL_RESET_NORMAL);
3151 _ath_getbuf_locked(struct ath_softc *sc, ath_buf_type_t btype)
3155 ATH_TXBUF_LOCK_ASSERT(sc);
3158 bf = TAILQ_FIRST(&sc->sc_txbuf_mgmt);
3160 bf = TAILQ_FIRST(&sc->sc_txbuf);
3163 sc->sc_stats.ast_tx_getnobuf++;
3166 sc->sc_stats.ast_tx_getbusybuf++;
3173 TAILQ_REMOVE(&sc->sc_txbuf_mgmt, bf, bf_list);
3175 TAILQ_REMOVE(&sc->sc_txbuf, bf, bf_list);
3176 sc->sc_txbuf_cnt--;
3183 if (sc->sc_txbuf_cnt < 0) {
3184 device_printf(sc->sc_dev,
3187 sc->sc_txbuf_cnt = 0;
3195 DPRINTF(sc, ATH_DEBUG_XMIT, "%s: %s\n", __func__,
3196 TAILQ_FIRST(&sc->sc_txbuf) == NULL ?
3218 if (sc->sc_isedma) {
3219 bf->bf_descid = sc->sc_txbuf_descid;
3220 sc->sc_txbuf_descid++;
3241 ath_buf_clone(struct ath_softc *sc, struct ath_buf *bf)
3245 tbf = ath_getbuf(sc,
3278 bus_dmamap_sync(sc->sc_dmat, bf->bf_dmamap,
3280 bus_dmamap_unload(sc->sc_dmat, bf->bf_dmamap);
3293 ath_getbuf(struct ath_softc *sc, ath_buf_type_t btype)
3297 ATH_TXBUF_LOCK(sc);
3298 bf = _ath_getbuf_locked(sc, btype);
3304 bf = _ath_getbuf_locked(sc, ATH_BUFTYPE_NORMAL);
3305 ATH_TXBUF_UNLOCK(sc);
3307 DPRINTF(sc, ATH_DEBUG_XMIT, "%s: stop queue\n", __func__);
3308 sc->sc_stats.ast_tx_qstop++;
3322 struct ath_softc *sc = ic->ic_softc;
3332 ATH_PCU_LOCK(sc);
3333 if (sc->sc_inreset_cnt > 0) {
3334 DPRINTF(sc, ATH_DEBUG_XMIT,
3336 ATH_PCU_UNLOCK(sc);
3337 sc->sc_stats.ast_tx_qstop++;
3338 ATH_KTR(sc, ATH_KTR_TX, 0, "ath_start_task: OACTIVE, finish");
3341 sc->sc_txstart_cnt++;
3342 ATH_PCU_UNLOCK(sc);
3345 ATH_LOCK(sc);
3346 ath_power_set_power_state(sc, HAL_PM_AWAKE);
3347 ATH_UNLOCK(sc);
3349 ATH_KTR(sc, ATH_KTR_TX, 0, "ath_transmit: start");
3354 ATH_TX_LOCK(sc);
3375 (ATH_NODE(ni)->an_swq_depth > sc->sc_txq_node_maxdepth)) {
3376 sc->sc_stats.ast_tx_nodeq_overflow++;
3398 (sc->sc_txbuf_cnt <= sc->sc_txq_data_minfree)) {
3399 sc->sc_stats.ast_tx_nobuf++;
3415 bf = ath_getbuf(sc, ATH_BUFTYPE_MGMT);
3417 bf = ath_getbuf(sc, ATH_BUFTYPE_NORMAL);
3426 sc->sc_stats.ast_tx_nobuf++;
3444 !ath_txfrag_setup(sc, &frags, m, ni)) {
3445 DPRINTF(sc, ATH_DEBUG_XMIT,
3447 sc->sc_stats.ast_tx_nofrag++;
3516 if (ath_tx_start(sc, ni, bf, m)) {
3522 ATH_TXBUF_LOCK(sc);
3523 ath_returnbuf_head(sc, bf);
3528 ath_txfrag_cleanup(sc, &frags, ni);
3529 ATH_TXBUF_UNLOCK(sc);
3544 ath_tx_update_tim(sc, ni, 1);
3552 DPRINTF(sc, ATH_DEBUG_XMIT,
3570 sc->sc_wd_timer = 5;
3573 ATH_TX_UNLOCK(sc);
3578 ATH_PCU_LOCK(sc);
3579 sc->sc_txstart_cnt--;
3580 ATH_PCU_UNLOCK(sc);
3583 ATH_LOCK(sc);
3584 ath_power_restore_power_state(sc);
3585 ATH_UNLOCK(sc);
3587 ATH_KTR(sc, ATH_KTR_TX, 0, "ath_transmit: finished");
3601 struct ath_softc *sc = vap->iv_ic->ic_softc;
3603 DPRINTF(sc, ATH_DEBUG_KEYCACHE, "%s:\n", __func__);
3604 taskqueue_block(sc->sc_tq);
3610 struct ath_softc *sc = vap->iv_ic->ic_softc;
3612 DPRINTF(sc, ATH_DEBUG_KEYCACHE, "%s:\n", __func__);
3613 taskqueue_unblock(sc->sc_tq);
3619 struct ath_softc *sc = ic->ic_softc;
3623 ATH_LOCK(sc);
3624 ath_power_set_power_state(sc, HAL_PM_AWAKE);
3625 rfilt = ath_calcrxfilter(sc);
3626 ath_hal_setrxfilter(sc->sc_ah, rfilt);
3627 ath_power_restore_power_state(sc);
3628 ATH_UNLOCK(sc);
3630 DPRINTF(sc, ATH_DEBUG_MODE, "%s: RX filter 0x%x\n", __func__, rfilt);
3658 ath_update_mcast_hw(struct ath_softc *sc)
3660 struct ieee80211com *ic = &sc->sc_ic;
3676 ath_hal_setmcastfilter(sc->sc_ah, mfilt[0], mfilt[1]);
3678 DPRINTF(sc, ATH_DEBUG_MODE, "%s: MC filter %08x:%08x\n",
3689 struct ath_softc *sc = ic->ic_softc;
3691 ATH_LOCK(sc);
3692 ath_power_set_power_state(sc, HAL_PM_AWAKE);
3693 ATH_UNLOCK(sc);
3695 ath_update_mcast_hw(sc);
3697 ATH_LOCK(sc);
3698 ath_power_restore_power_state(sc);
3699 ATH_UNLOCK(sc);
3703 ath_mode_init(struct ath_softc *sc)
3705 struct ieee80211com *ic = &sc->sc_ic;
3706 struct ath_hal *ah = sc->sc_ah;
3712 rfilt = ath_calcrxfilter(sc);
3722 ath_update_mcast_hw(sc);
3729 ath_setslottime(struct ath_softc *sc)
3731 struct ieee80211com *ic = &sc->sc_ic;
3732 struct ath_hal *ah = sc->sc_ah;
3749 DPRINTF(sc, ATH_DEBUG_RESET,
3755 ATH_LOCK(sc);
3756 ath_power_set_power_state(sc, HAL_PM_AWAKE);
3758 ath_power_restore_power_state(sc);
3759 sc->sc_updateslot = OK;
3760 ATH_UNLOCK(sc);
3770 struct ath_softc *sc = ic->ic_softc;
3781 sc->sc_updateslot = UPDATE;
3783 ath_setslottime(sc);
3814 struct ath_softc *sc = arg;
3817 device_printf(sc->sc_dev, "%s: resetting\n", __func__);
3819 ath_reset(sc, ATH_RESET_NOLOSS, HAL_RESET_FORCE_COLD);
3828 struct ath_softc *sc = arg;
3831 if (ath_hal_gethangstate(sc->sc_ah, 0xff, &hangs) && hangs != 0)
3832 device_printf(sc->sc_dev, "bb hang detected (0x%x)\n", hangs);
3835 if (if_ath_alq_checkdebug(&sc->sc_alq, ATH_ALQ_STUCK_BEACON))
3836 if_ath_alq_post(&sc->sc_alq, ATH_ALQ_STUCK_BEACON, 0, NULL);
3839 device_printf(sc->sc_dev, "stuck beacon; resetting (bmiss count %u)\n",
3840 sc->sc_bmisscount);
3841 sc->sc_stats.ast_bstuck++;
3846 ath_reset(sc, ATH_RESET_NOLOSS, HAL_RESET_FORCE_COLD);
3850 ath_desc_alloc(struct ath_softc *sc)
3854 error = ath_descdma_setup(sc, &sc->sc_txdma, &sc->sc_txbuf,
3855 "tx", sc->sc_tx_desclen, ath_txbuf, ATH_MAX_SCATTER);
3859 sc->sc_txbuf_cnt = ath_txbuf;
3861 error = ath_descdma_setup(sc, &sc->sc_txdma_mgmt, &sc->sc_txbuf_mgmt,
3862 "tx_mgmt", sc->sc_tx_desclen, ath_txbuf_mgmt,
3865 ath_descdma_cleanup(sc, &sc->sc_txdma, &sc->sc_txbuf);
3874 error = ath_descdma_setup(sc, &sc->sc_bdma, &sc->sc_bbuf,
3875 "beacon", sc->sc_tx_desclen, ATH_BCBUF, 1);
3877 ath_descdma_cleanup(sc, &sc->sc_txdma, &sc->sc_txbuf);
3878 ath_descdma_cleanup(sc, &sc->sc_txdma_mgmt,
3879 &sc->sc_txbuf_mgmt);
3886 ath_desc_free(struct ath_softc *sc)
3889 if (sc->sc_bdma.dd_desc_len != 0)
3890 ath_descdma_cleanup(sc, &sc->sc_bdma, &sc->sc_bbuf);
3891 if (sc->sc_txdma.dd_desc_len != 0)
3892 ath_descdma_cleanup(sc, &sc->sc_txdma, &sc->sc_txbuf);
3893 if (sc->sc_txdma_mgmt.dd_desc_len != 0)
3894 ath_descdma_cleanup(sc, &sc->sc_txdma_mgmt,
3895 &sc->sc_txbuf_mgmt);
3902 struct ath_softc *sc = ic->ic_softc;
3903 const size_t space = sizeof(struct ath_node) + sc->sc_rc->arc_space;
3911 ath_rate_node_init(sc, an);
3915 device_get_nameunit(sc->sc_dev), an);
3919 ath_tx_tid_init(sc, an);
3925 DPRINTF(sc, ATH_DEBUG_NODE, "%s: %6D: an %p\n", __func__, mac, ":", an);
3933 struct ath_softc *sc = ic->ic_softc;
3935 DPRINTF(sc, ATH_DEBUG_NODE, "%s: %6D: an %p\n", __func__,
3939 ath_tx_node_flush(sc, ATH_NODE(ni));
3940 ath_rate_node_cleanup(sc, ATH_NODE(ni));
3941 sc->sc_node_cleanup(ni);
3948 struct ath_softc *sc = ic->ic_softc;
3950 DPRINTF(sc, ATH_DEBUG_NODE, "%s: %6D: an %p\n", __func__,
3953 sc->sc_node_free(ni);
3960 struct ath_softc *sc = ic->ic_softc;
3961 struct ath_hal *ah = sc->sc_ah;
3974 ath_setdefantenna(struct ath_softc *sc, u_int antenna)
3976 struct ath_hal *ah = sc->sc_ah;
3980 if (sc->sc_defant != antenna)
3981 sc->sc_stats.ast_ant_defswitch++;
3982 sc->sc_defant = antenna;
3983 sc->sc_rxotherant = 0;
3987 ath_txq_init(struct ath_softc *sc, struct ath_txq *txq, int qnum)
3995 txq->axq_softc = sc;
3999 ATH_TXQ_LOCK_INIT(sc, txq);
4006 ath_txq_setup(struct ath_softc *sc, int qtype, int subtype)
4008 struct ath_hal *ah = sc->sc_ah;
4029 if (sc->sc_isedma)
4044 if (qnum >= nitems(sc->sc_txq)) {
4045 device_printf(sc->sc_dev,
4047 qnum, nitems(sc->sc_txq));
4051 if (!ATH_TXQ_SETUP(sc, qnum)) {
4052 ath_txq_init(sc, &sc->sc_txq[qnum], qnum);
4053 sc->sc_txqsetup |= 1<<qnum;
4055 return &sc->sc_txq[qnum];
4068 ath_tx_setup(struct ath_softc *sc, int ac, int haltype)
4072 if (ac >= nitems(sc->sc_ac2q)) {
4073 device_printf(sc->sc_dev, "AC %u out of range, max %zu!\n",
4074 ac, nitems(sc->sc_ac2q));
4077 txq = ath_txq_setup(sc, HAL_TX_QUEUE_DATA, haltype);
4080 sc->sc_ac2q[ac] = txq;
4090 ath_txq_update(struct ath_softc *sc, int ac)
4093 struct ieee80211com *ic = &sc->sc_ic;
4094 struct ath_txq *txq = sc->sc_ac2q[ac];
4097 struct ath_hal *ah = sc->sc_ah;
4105 if (sc->sc_tdma) {
4124 qi.tqi_readyTime = sc->sc_tdmaslotlen;
4147 DPRINTF(sc, ATH_DEBUG_RESET,
4153 device_printf(sc->sc_dev, "unable to update hardware queue "
4169 struct ath_softc *sc = ic->ic_softc;
4171 return !ath_txq_update(sc, WME_AC_BE) ||
4172 !ath_txq_update(sc, WME_AC_BK) ||
4173 !ath_txq_update(sc, WME_AC_VI) ||
4174 !ath_txq_update(sc, WME_AC_VO) ? EIO : 0;
4181 ath_tx_cleanupq(struct ath_softc *sc, struct ath_txq *txq)
4184 ath_hal_releasetxqueue(sc->sc_ah, txq->axq_qnum);
4185 sc->sc_txqsetup &= ~(1<<txq->axq_qnum);
4193 ath_tx_cleanup(struct ath_softc *sc)
4197 ATH_TXBUF_LOCK_DESTROY(sc);
4199 if (ATH_TXQ_SETUP(sc, i))
4200 ath_tx_cleanupq(sc, &sc->sc_txq[i]);
4208 ath_tx_findrix(const struct ath_softc *sc, uint8_t rate)
4210 int rix = sc->sc_rixmap[rate];
4216 ath_tx_update_stats(struct ath_softc *sc, struct ath_tx_status *ts,
4220 struct ieee80211com *ic = &sc->sc_ic;
4230 sc->sc_stats.ast_ant_tx[txant]++;
4231 sc->sc_ant_tx[txant]++;
4233 sc->sc_stats.ast_tx_altrate++;
4244 sc->sc_stats.ast_tx_xretries++;
4246 sc->sc_stats.ast_tx_fifoerr++;
4248 sc->sc_stats.ast_tx_filtered++;
4250 sc->sc_stats.ast_tx_xtxop++;
4252 sc->sc_stats.ast_tx_timerexpired++;
4255 sc->sc_stats.ast_ff_txerr++;
4259 sc->sc_stats.ast_tx_desccfgerr++;
4269 sc->sc_stats.ast_tx_data_underrun++;
4271 sc->sc_stats.ast_tx_delim_underrun++;
4275 sc->sc_stats.ast_tx_shortretry += sr;
4276 sc->sc_stats.ast_tx_longretry += lr;
4286 ath_tx_default_comp(struct ath_softc *sc, struct ath_buf *bf, int fail)
4299 device_printf(sc->sc_dev,
4306 device_printf(sc->sc_dev,
4328 ATH_TX_LOCK(sc);
4329 ath_tx_update_tim(sc, bf->bf_node, 0);
4330 ATH_TX_UNLOCK(sc);
4339 ath_tx_freebuf(sc, bf, st);
4346 ath_tx_update_ratectrl(struct ath_softc *sc, struct ieee80211_node *ni,
4367 ath_rate_tx_complete(sc, an, rc, ts, frmlen, rc_framelen,
4381 ath_tx_process_buf_completion(struct ath_softc *sc, struct ath_txq *txq,
4386 ATH_TX_UNLOCK_ASSERT(sc);
4392 ath_tx_update_stats(sc, ts, bf);
4413 ath_tx_update_ratectrl(sc, ni,
4420 ath_tx_default_comp(sc, bf, 0);
4422 bf->bf_comp(sc, bf, 0);
4431 ath_tx_processq(struct ath_softc *sc, struct ath_txq *txq, int dosched)
4433 struct ath_hal *ah = sc->sc_ah;
4439 struct ieee80211com *ic = &sc->sc_ic;
4444 DPRINTF(sc, ATH_DEBUG_TX_PROC, "%s: tx queue %u head %p link %p\n",
4446 (caddr_t)(uintptr_t) ath_hal_gettxbuf(sc->sc_ah, txq->axq_qnum),
4449 ATH_KTR(sc, ATH_KTR_TXCOMP, 4,
4452 (caddr_t)(uintptr_t) ath_hal_gettxbuf(sc->sc_ah, txq->axq_qnum),
4470 if (sc->sc_debug & ATH_DEBUG_XMIT_DESC)
4471 ath_printtxbuf(sc, bf, txq->axq_qnum, 0,
4473 else if ((sc->sc_debug & ATH_DEBUG_RESET) && (dosched == 0))
4474 ath_printtxbuf(sc, bf, txq->axq_qnum, 0,
4478 if (if_ath_alq_checkdebug(&sc->sc_alq,
4480 if_ath_alq_post(&sc->sc_alq, ATH_ALQ_EDMA_TXSTATUS,
4481 sc->sc_tx_statuslen,
4487 ATH_KTR(sc, ATH_KTR_TXCOMP, 3,
4499 device_printf(sc->sc_dev,
4507 device_printf(sc->sc_dev,
4537 ATH_KTR(sc, ATH_KTR_TXCOMP, 5,
4548 sc->sc_stats.ast_tx_rssi = ts->ts_rssi;
4549 ATH_RSSI_LPF(sc->sc_halstats.ns_avgtxrssi,
4559 ath_tx_process_buf_completion(sc, txq, ts, bf);
4573 ATH_TX_LOCK(sc);
4574 ath_txq_sched(sc, txq);
4575 ATH_TX_UNLOCK(sc);
4578 ATH_KTR(sc, ATH_KTR_TXCOMP, 1,
4594 struct ath_softc *sc = arg;
4597 ATH_PCU_LOCK(sc);
4598 sc->sc_txproc_cnt++;
4599 txqs = sc->sc_txq_active;
4600 sc->sc_txq_active &= ~txqs;
4601 ATH_PCU_UNLOCK(sc);
4603 ATH_LOCK(sc);
4604 ath_power_set_power_state(sc, HAL_PM_AWAKE);
4605 ATH_UNLOCK(sc);
4607 ATH_KTR(sc, ATH_KTR_TXCOMP, 1,
4610 if (TXQACTIVE(txqs, 0) && ath_tx_processq(sc, &sc->sc_txq[0], 1))
4612 sc->sc_lastrx = ath_hal_gettsf64(sc->sc_ah);
4613 if (TXQACTIVE(txqs, sc->sc_cabq->axq_qnum))
4614 ath_tx_processq(sc, sc->sc_cabq, 1);
4615 sc->sc_wd_timer = 0;
4617 if (sc->sc_softled)
4618 ath_led_event(sc, sc->sc_txrix);
4620 ATH_PCU_LOCK(sc);
4621 sc->sc_txproc_cnt--;
4622 ATH_PCU_UNLOCK(sc);
4624 ATH_LOCK(sc);
4625 ath_power_restore_power_state(sc);
4626 ATH_UNLOCK(sc);
4628 ath_tx_kick(sc);
4638 struct ath_softc *sc = arg;
4642 ATH_PCU_LOCK(sc);
4643 sc->sc_txproc_cnt++;
4644 txqs = sc->sc_txq_active;
4645 sc->sc_txq_active &= ~txqs;
4646 ATH_PCU_UNLOCK(sc);
4648 ATH_LOCK(sc);
4649 ath_power_set_power_state(sc, HAL_PM_AWAKE);
4650 ATH_UNLOCK(sc);
4652 ATH_KTR(sc, ATH_KTR_TXCOMP, 1,
4660 nacked += ath_tx_processq(sc, &sc->sc_txq[0], 1);
4662 nacked += ath_tx_processq(sc, &sc->sc_txq[1], 1);
4664 nacked += ath_tx_processq(sc, &sc->sc_txq[2], 1);
4666 nacked += ath_tx_processq(sc, &sc->sc_txq[3], 1);
4667 if (TXQACTIVE(txqs, sc->sc_cabq->axq_qnum))
4668 ath_tx_processq(sc, sc->sc_cabq, 1);
4670 sc->sc_lastrx = ath_hal_gettsf64(sc->sc_ah);
4672 sc->sc_wd_timer = 0;
4674 if (sc->sc_softled)
4675 ath_led_event(sc, sc->sc_txrix);
4677 ATH_PCU_LOCK(sc);
4678 sc->sc_txproc_cnt--;
4679 ATH_PCU_UNLOCK(sc);
4681 ATH_LOCK(sc);
4682 ath_power_restore_power_state(sc);
4683 ATH_UNLOCK(sc);
4685 ath_tx_kick(sc);
4694 struct ath_softc *sc = arg;
4698 ATH_PCU_LOCK(sc);
4699 sc->sc_txproc_cnt++;
4700 txqs = sc->sc_txq_active;
4701 sc->sc_txq_active &= ~txqs;
4702 ATH_PCU_UNLOCK(sc);
4704 ATH_LOCK(sc);
4705 ath_power_set_power_state(sc, HAL_PM_AWAKE);
4706 ATH_UNLOCK(sc);
4708 ATH_KTR(sc, ATH_KTR_TXCOMP, 1, "ath_tx_proc: txqs=0x%08x", txqs);
4715 if (ATH_TXQ_SETUP(sc, i) && TXQACTIVE(txqs, i))
4716 nacked += ath_tx_processq(sc, &sc->sc_txq[i], 1);
4718 sc->sc_lastrx = ath_hal_gettsf64(sc->sc_ah);
4720 sc->sc_wd_timer = 0;
4722 if (sc->sc_softled)
4723 ath_led_event(sc, sc->sc_txrix);
4725 ATH_PCU_LOCK(sc);
4726 sc->sc_txproc_cnt--;
4727 ATH_PCU_UNLOCK(sc);
4729 ATH_LOCK(sc);
4730 ath_power_restore_power_state(sc);
4731 ATH_UNLOCK(sc);
4733 ath_tx_kick(sc);
4743 struct ath_softc *sc = arg;
4747 ATH_PCU_LOCK(sc);
4749 if (sc->sc_inreset_cnt > 0) {
4750 device_printf(sc->sc_dev,
4752 ATH_PCU_UNLOCK(sc);
4756 sc->sc_txproc_cnt++;
4757 ATH_PCU_UNLOCK(sc);
4759 ATH_LOCK(sc);
4760 ath_power_set_power_state(sc, HAL_PM_AWAKE);
4761 ATH_UNLOCK(sc);
4763 ATH_TX_LOCK(sc);
4765 if (ATH_TXQ_SETUP(sc, i)) {
4766 ath_txq_sched(sc, &sc->sc_txq[i]);
4769 ATH_TX_UNLOCK(sc);
4771 ATH_LOCK(sc);
4772 ath_power_restore_power_state(sc);
4773 ATH_UNLOCK(sc);
4775 ATH_PCU_LOCK(sc);
4776 sc->sc_txproc_cnt--;
4777 ATH_PCU_UNLOCK(sc);
4781 ath_returnbuf_tail(struct ath_softc *sc, struct ath_buf *bf)
4784 ATH_TXBUF_LOCK_ASSERT(sc);
4787 TAILQ_INSERT_TAIL(&sc->sc_txbuf_mgmt, bf, bf_list);
4789 TAILQ_INSERT_TAIL(&sc->sc_txbuf, bf, bf_list);
4790 sc->sc_txbuf_cnt++;
4791 if (sc->sc_txbuf_cnt > ath_txbuf) {
4792 device_printf(sc->sc_dev,
4796 sc->sc_txbuf_cnt = ath_txbuf;
4802 ath_returnbuf_head(struct ath_softc *sc, struct ath_buf *bf)
4805 ATH_TXBUF_LOCK_ASSERT(sc);
4808 TAILQ_INSERT_HEAD(&sc->sc_txbuf_mgmt, bf, bf_list);
4810 TAILQ_INSERT_HEAD(&sc->sc_txbuf, bf, bf_list);
4811 sc->sc_txbuf_cnt++;
4812 if (sc->sc_txbuf_cnt > ATH_TXBUF) {
4813 device_printf(sc->sc_dev,
4817 sc->sc_txbuf_cnt = ATH_TXBUF;
4826 ath_txq_freeholdingbuf(struct ath_softc *sc, struct ath_txq *txq)
4828 ATH_TXBUF_UNLOCK_ASSERT(sc);
4836 ATH_TXBUF_LOCK(sc);
4837 ath_returnbuf_tail(sc, txq->axq_holdingbf);
4838 ATH_TXBUF_UNLOCK(sc);
4848 ath_txq_addholdingbuf(struct ath_softc *sc, struct ath_buf *bf)
4852 txq = &sc->sc_txq[bf->bf_state.bfs_tx_queue];
4854 ATH_TXBUF_UNLOCK_ASSERT(sc);
4861 device_printf(sc->sc_dev, "%s: bf=%p: invalid tx queue (%d)\n",
4866 ath_returnbuf_tail(sc, bf);
4869 ath_txq_freeholdingbuf(sc, txq);
4891 ath_freebuf(struct ath_softc *sc, struct ath_buf *bf)
4895 txq = &sc->sc_txq[bf->bf_state.bfs_tx_queue];
4905 ath_txq_addholdingbuf(sc, bf);
4913 ATH_TXBUF_LOCK(sc);
4914 ath_returnbuf_tail(sc, bf);
4915 ATH_TXBUF_UNLOCK(sc);
4925 ath_tx_freebuf(struct ath_softc *sc, struct ath_buf *bf, int status)
4936 bus_dmamap_sync(sc->sc_dmat, bf->bf_dmamap,
4938 bus_dmamap_unload(sc->sc_dmat, bf->bf_dmamap);
4945 ath_freebuf(sc, bf);
4952 ath_tx_draintxq_get_one(struct ath_softc *sc, struct ath_txq *txq)
4970 device_printf(sc->sc_dev,
4986 device_printf(sc->sc_dev,
5007 ath_tx_draintxq(struct ath_softc *sc, struct ath_txq *txq)
5010 struct ath_hal *ah = sc->sc_ah;
5021 bf = ath_tx_draintxq_get_one(sc, txq);
5029 if (sc->sc_debug & ATH_DEBUG_RESET) {
5030 struct ieee80211com *ic = &sc->sc_ic;
5039 if (! sc->sc_isedma) {
5044 ath_printtxbuf(sc, bf, txq->axq_qnum, ix, status);
5061 bf->bf_comp(sc, bf, 1);
5063 ath_tx_default_comp(sc, bf, 1);
5070 ath_txq_freeholdingbuf(sc, txq);
5077 ath_tx_txq_drain(sc, txq);
5081 ath_tx_stopdma(struct ath_softc *sc, struct ath_txq *txq)
5083 struct ath_hal *ah = sc->sc_ah;
5087 DPRINTF(sc, ATH_DEBUG_RESET,
5104 if ((sc->sc_debug & ATH_DEBUG_RESET)
5106 ath_printtxbuf(sc, txq->axq_holdingbf, txq->axq_qnum, 0, 0);
5112 ath_stoptxdma(struct ath_softc *sc)
5114 struct ath_hal *ah = sc->sc_ah;
5118 if (sc->sc_invalid)
5121 if (!sc->sc_invalid) {
5123 DPRINTF(sc, ATH_DEBUG_RESET, "%s: tx queue [%u] %p, link %p\n",
5124 __func__, sc->sc_bhalq,
5125 (caddr_t)(uintptr_t) ath_hal_gettxbuf(ah, sc->sc_bhalq),
5129 (void) ath_hal_stoptxdma(ah, sc->sc_bhalq);
5133 if (ATH_TXQ_SETUP(sc, i)) {
5134 ATH_TXQ_LOCK(&sc->sc_txq[i]);
5135 ath_tx_stopdma(sc, &sc->sc_txq[i]);
5136 ATH_TXQ_UNLOCK(&sc->sc_txq[i]);
5146 ath_tx_dump(struct ath_softc *sc, struct ath_txq *txq)
5148 struct ath_hal *ah = sc->sc_ah;
5152 if (! (sc->sc_debug & ATH_DEBUG_RESET))
5155 device_printf(sc->sc_dev, "%s: Q%d: begin\n",
5158 ath_printtxbuf(sc, bf, txq->axq_qnum, i,
5163 device_printf(sc->sc_dev, "%s: Q%d: end\n",
5172 ath_legacy_tx_drain(struct ath_softc *sc, ATH_RESET_TYPE reset_type)
5174 struct ath_hal *ah = sc->sc_ah;
5178 (void) ath_stoptxdma(sc);
5188 if (ATH_TXQ_SETUP(sc, i)) {
5190 if (sc->sc_debug & ATH_DEBUG_RESET)
5191 ath_tx_dump(sc, &sc->sc_txq[i]);
5194 ath_tx_processq(sc, &sc->sc_txq[i], 0);
5195 ATH_TXQ_LOCK(&sc->sc_txq[i]);
5200 ath_txq_freeholdingbuf(sc, &sc->sc_txq[i]);
5207 bf_last = ATH_TXQ_LAST(&sc->sc_txq[i],
5212 &sc->sc_txq[i].axq_link);
5214 sc->sc_txq[i].axq_link = NULL;
5216 ATH_TXQ_UNLOCK(&sc->sc_txq[i]);
5218 ath_tx_draintxq(sc, &sc->sc_txq[i]);
5222 if (sc->sc_debug & ATH_DEBUG_RESET) {
5223 struct ath_buf *bf = TAILQ_FIRST(&sc->sc_bbuf);
5225 ath_printtxbuf(sc, bf, sc->sc_bhalq, 0,
5228 ieee80211_dump_pkt(&sc->sc_ic,
5234 sc->sc_wd_timer = 0;
5241 ath_chan_change(struct ath_softc *sc, struct ieee80211_channel *chan)
5250 if (mode != sc->sc_curmode)
5251 ath_setcurmode(sc, mode);
5252 sc->sc_curchan = chan;
5262 ath_chan_set(struct ath_softc *sc, struct ieee80211_channel *chan)
5264 struct ieee80211com *ic = &sc->sc_ic;
5265 struct ath_hal *ah = sc->sc_ah;
5269 ATH_PCU_UNLOCK_ASSERT(sc);
5270 ATH_UNLOCK_ASSERT(sc);
5273 taskqueue_block(sc->sc_tq);
5275 ATH_PCU_LOCK(sc);
5281 if (ath_reset_grablock(sc, 1) == 0) {
5282 device_printf(sc->sc_dev, "%s: concurrent reset! Danger!\n",
5287 ath_txrx_stop_locked(sc);
5289 ATH_PCU_UNLOCK(sc);
5291 DPRINTF(sc, ATH_DEBUG_RESET, "%s: %u (%u MHz, flags 0x%x)\n",
5294 if (chan != sc->sc_curchan) {
5305 ath_stoprecv(sc, 1); /* turn off frame recv */
5309 ath_rx_flush(sc);
5310 ath_draintxq(sc, ATH_RESET_NOLOSS);
5314 ath_draintxq(sc, ATH_RESET_FULL); /* clear pending tx frames */
5316 ath_update_chainmasks(sc, chan);
5317 ath_hal_setchainmasks(sc->sc_ah, sc->sc_cur_txchainmask,
5318 sc->sc_cur_rxchainmask);
5319 if (!ath_hal_reset(ah, sc->sc_opmode, chan, AH_TRUE,
5321 device_printf(sc->sc_dev, "%s: unable to reset "
5328 sc->sc_diversity = ath_hal_getdiversity(ah);
5330 ATH_RX_LOCK(sc);
5331 sc->sc_rx_stopped = 1;
5332 sc->sc_rx_resetted = 1;
5333 ATH_RX_UNLOCK(sc);
5336 ath_vap_clear_quiet_ie(sc);
5339 ath_dfs_radar_enable(sc, chan);
5342 ath_spectral_enable(sc, chan);
5348 ath_btcoex_enable(sc, ic->ic_curchan);
5354 if (sc->sc_hasenforcetxop && sc->sc_tdma)
5355 ath_hal_setenforcetxop(sc->sc_ah, 1);
5357 ath_hal_setenforcetxop(sc->sc_ah, 0);
5362 if (ath_startrecv(sc) != 0) {
5363 device_printf(sc->sc_dev,
5373 ath_chan_change(sc, chan);
5379 if (sc->sc_beacons) { /* restart beacons */
5381 if (sc->sc_tdma)
5382 ath_tdma_config(sc, NULL);
5385 ath_beacon_config(sc, NULL);
5392 ath_hal_intrset(ah, sc->sc_imask);
5397 ATH_PCU_LOCK(sc);
5398 sc->sc_inreset_cnt--;
5400 ath_hal_intrset(ah, sc->sc_imask);
5401 ATH_PCU_UNLOCK(sc);
5403 ath_txrx_start(sc);
5416 struct ath_softc *sc = arg;
5417 struct ath_hal *ah = sc->sc_ah;
5418 struct ieee80211com *ic = &sc->sc_ic;
5423 ATH_LOCK_ASSERT(sc);
5428 ath_power_set_power_state(sc, HAL_PM_AWAKE);
5431 if (sc->sc_inreset_cnt)
5436 longCal = (ticks - sc->sc_lastlongcal >= ath_longcalinterval*hz);
5437 aniCal = (ticks - sc->sc_lastani >= ath_anicalinterval*hz/1000);
5438 if (sc->sc_doresetcal)
5439 shortCal = (ticks - sc->sc_lastshortcal >= ath_shortcalinterval*hz/1000);
5441 DPRINTF(sc, ATH_DEBUG_CALIBRATE, "%s: shortCal=%d; longCal=%d; aniCal=%d\n", __func__, shortCal, longCal, aniCal);
5443 sc->sc_stats.ast_ani_cal++;
5444 sc->sc_lastani = ticks;
5445 ath_hal_ani_poll(ah, sc->sc_curchan);
5449 sc->sc_stats.ast_per_cal++;
5450 sc->sc_lastlongcal = ticks;
5456 DPRINTF(sc, ATH_DEBUG_CALIBRATE,
5458 sc->sc_stats.ast_per_rfgain++;
5459 sc->sc_resetcal = 0;
5460 sc->sc_doresetcal = AH_TRUE;
5461 taskqueue_enqueue(sc->sc_tq, &sc->sc_resettask);
5462 callout_reset(&sc->sc_cal_ch, 1, ath_calibrate, sc);
5463 ath_power_restore_power_state(sc);
5470 if (sc->sc_resetcal) {
5471 (void) ath_hal_calreset(ah, sc->sc_curchan);
5472 sc->sc_lastcalreset = ticks;
5473 sc->sc_lastshortcal = ticks;
5474 sc->sc_resetcal = 0;
5475 sc->sc_doresetcal = AH_TRUE;
5482 if (ath_hal_calibrateN(ah, sc->sc_curchan, longCal, &isCalDone)) {
5490 DPRINTF(sc, ATH_DEBUG_ANY,
5492 __func__, sc->sc_curchan->ic_freq);
5493 sc->sc_stats.ast_per_calfail++;
5510 sc->sc_lastshortcal = ticks;
5522 sc->sc_lastshortcal = ticks;
5524 if (sc->sc_opmode != HAL_M_HOSTAP)
5526 sc->sc_doresetcal = AH_TRUE;
5530 if (sc->sc_lastcalreset == 0)
5531 sc->sc_lastcalreset = sc->sc_lastlongcal;
5532 else if (ticks - sc->sc_lastcalreset >= ath_resetcalinterval*hz)
5533 sc->sc_resetcal = 1; /* setup reset next trip */
5534 sc->sc_doresetcal = AH_FALSE;
5541 DPRINTF(sc, ATH_DEBUG_CALIBRATE, "%s: next +%u (%sisCalDone)\n",
5543 callout_reset(&sc->sc_cal_ch, nextcal, ath_calibrate, sc);
5545 DPRINTF(sc, ATH_DEBUG_CALIBRATE, "%s: calibration disabled\n",
5552 ath_power_restore_power_state(sc);
5558 struct ath_softc *sc = ic->ic_softc;
5559 struct ath_hal *ah = sc->sc_ah;
5565 ATH_LOCK(sc);
5566 sc->sc_scanning = 1;
5567 sc->sc_syncbeacon = 0;
5568 rfilt = ath_calcrxfilter(sc);
5569 ATH_UNLOCK(sc);
5571 ATH_PCU_LOCK(sc);
5574 ATH_PCU_UNLOCK(sc);
5576 DPRINTF(sc, ATH_DEBUG_STATE, "%s: RX filter 0x%x bssid %s aid 0\n",
5583 struct ath_softc *sc = ic->ic_softc;
5584 struct ath_hal *ah = sc->sc_ah;
5587 ATH_LOCK(sc);
5588 sc->sc_scanning = 0;
5589 rfilt = ath_calcrxfilter(sc);
5590 ATH_UNLOCK(sc);
5592 ATH_PCU_LOCK(sc);
5594 ath_hal_setassocid(ah, sc->sc_curbssid, sc->sc_curaid);
5597 ATH_PCU_UNLOCK(sc);
5599 DPRINTF(sc, ATH_DEBUG_STATE, "%s: RX filter 0x%x bssid %s aid 0x%x\n",
5600 __func__, rfilt, ether_sprintf(sc->sc_curbssid),
5601 sc->sc_curaid);
5623 struct ath_softc *sc = ic->ic_softc;
5625 //DPRINTF(sc, ATH_DEBUG_STATE, "%s: called\n", __func__);
5626 device_printf(sc->sc_dev, "%s: called\n", __func__);
5676 struct ath_softc *sc = ic->ic_softc;
5683 DPRINTF(sc, ATH_DEBUG_QUIETIE,
5686 ath_hal_set_quiet(sc->sc_ah, 0, 0, 0, HAL_QUIET_DISABLE);
5709 DPRINTF(sc, ATH_DEBUG_QUIETIE,
5722 DPRINTF(sc, ATH_DEBUG_QUIETIE,
5732 DPRINTF(sc, ATH_DEBUG_QUIETIE,
5739 DPRINTF(sc, ATH_DEBUG_QUIETIE,
5744 DPRINTF(sc, ATH_DEBUG_QUIETIE,
5754 DPRINTF(sc, ATH_DEBUG_QUIETIE,
5756 ath_hal_set_quiet(sc->sc_ah,
5776 struct ath_softc *sc = ic->ic_softc;
5778 ATH_LOCK(sc);
5779 ath_power_set_power_state(sc, HAL_PM_AWAKE);
5780 ATH_UNLOCK(sc);
5782 (void) ath_chan_set(sc, ic->ic_curchan);
5789 ATH_LOCK(sc);
5790 if (!sc->sc_scanning && ic->ic_curchan == ic->ic_bsschan)
5791 sc->sc_syncbeacon = 1;
5792 ath_power_restore_power_state(sc);
5793 ATH_UNLOCK(sc);
5818 struct ath_softc *sc = ic->ic_softc;
5820 struct ath_hal *ah = sc->sc_ah;
5838 DPRINTF(sc, ATH_DEBUG_STATE, "%s: %s -> %s\n", __func__,
5852 ATH_LOCK(sc);
5862 ath_power_setselfgen(sc, HAL_PM_AWAKE);
5867 ath_power_set_power_state(sc, HAL_PM_AWAKE);
5873 callout_stop(&sc->sc_cal_ch);
5874 ATH_UNLOCK(sc);
5890 ATH_LOCK(sc);
5891 ath_power_setselfgen(sc, HAL_PM_AWAKE);
5892 ath_power_setpower(sc, HAL_PM_AWAKE, 1);
5893 ATH_UNLOCK(sc);
5896 sc->sc_imask &~ (HAL_INT_SWBA | HAL_INT_BMISS));
5897 sc->sc_imask &= ~(HAL_INT_SWBA | HAL_INT_BMISS);
5898 sc->sc_beacons = 0;
5899 taskqueue_unblock(sc->sc_tq);
5903 rfilt = ath_calcrxfilter(sc);
5913 sc->sc_curaid = ni->ni_associd;
5914 IEEE80211_ADDR_COPY(sc->sc_curbssid, ni->ni_bssid);
5915 ath_hal_setassocid(ah, sc->sc_curbssid, sc->sc_curaid);
5917 DPRINTF(sc, ATH_DEBUG_STATE, "%s: RX filter 0x%x bssid %s aid 0x%x\n",
5918 __func__, rfilt, ether_sprintf(sc->sc_curbssid), sc->sc_curaid);
5997 DPRINTF(sc, ATH_DEBUG_STATE,
6030 ath_hal_stoptxdma(ah, sc->sc_bhalq);
6032 error = ath_beacon_alloc(sc, ni);
6045 sc->sc_syncbeacon = 1;
6046 } else if (!sc->sc_beacons) {
6049 ath_tdma_config(sc, vap);
6052 ath_beacon_config(sc, vap);
6053 sc->sc_beacons = 1;
6080 DPRINTF(sc, ATH_DEBUG_BEACON,
6082 sc->sc_syncbeacon = 1;
6084 ath_beacon_config(sc, vap);
6103 sc->sc_beacons = 1;
6113 ath_hal_intrset(ah, sc->sc_imask);
6128 sc->sc_halstats.ns_avgbrssi = ATH_RSSI_DUMMY_MARKER;
6129 sc->sc_halstats.ns_avgrssi = ATH_RSSI_DUMMY_MARKER;
6130 sc->sc_halstats.ns_avgtxrssi = ATH_RSSI_DUMMY_MARKER;
6135 ATH_LOCK(sc);
6136 ath_power_setselfgen(sc, HAL_PM_AWAKE);
6137 ath_power_setpower(sc, HAL_PM_AWAKE, 1);
6145 callout_reset(&sc->sc_cal_ch, 1, ath_calibrate, sc);
6147 DPRINTF(sc, ATH_DEBUG_CALIBRATE,
6150 ATH_UNLOCK(sc);
6152 taskqueue_unblock(sc->sc_tq);
6165 sc->sc_imask &= ~(HAL_INT_SWBA | HAL_INT_BMISS);
6167 ath_hal_intrset(ah, sc->sc_imask &~ HAL_INT_GLOBAL);
6168 taskqueue_block(sc->sc_tq);
6169 sc->sc_beacons = 0;
6188 if (sc->sc_nvaps == 1 &&
6190 DPRINTF(sc, ATH_DEBUG_BEACON, "%s: syncbeacon=%d\n", __func__, sc->sc_syncbeacon);
6191 ATH_LOCK(sc);
6196 ath_power_setselfgen(sc, HAL_PM_NETWORK_SLEEP);
6206 if (sc->sc_syncbeacon == 0) {
6207 ath_power_setpower(sc, HAL_PM_NETWORK_SLEEP, 1);
6209 ATH_UNLOCK(sc);
6235 ATH_LOCK(sc);
6238 callout_reset(&sc->sc_cal_ch, 1, ath_calibrate, sc);
6240 DPRINTF(sc, ATH_DEBUG_CALIBRATE,
6243 ATH_UNLOCK(sc);
6252 ATH_LOCK(sc);
6253 ath_power_restore_power_state(sc);
6254 ATH_UNLOCK(sc);
6270 struct ath_softc *sc = vap->iv_ic->ic_softc;
6289 ath_keyset(sc, vap, &ni->ni_ucastkey, vap->iv_bss);
6303 struct ath_softc *sc = vap->iv_ic->ic_softc;
6306 an->an_mcastrix = ath_tx_findrix(sc, tp->mcastrate);
6307 an->an_mgmtrix = ath_tx_findrix(sc, tp->mgmtrate);
6309 DPRINTF(sc, ATH_DEBUG_NODE, "%s: %6D: reassoc; isnew=%d, is_powersave=%d\n",
6317 ath_rate_newassoc(sc, an, isnew);
6321 (vap->iv_flags & IEEE80211_F_PRIVACY) == 0 && sc->sc_hasclrkey &&
6335 DPRINTF(sc, ATH_DEBUG_NODE,
6343 ath_tx_node_reassoc(sc, an);
6347 ath_tx_node_wakeup(sc, an);
6355 struct ath_softc *sc = ic->ic_softc;
6356 struct ath_hal *ah = sc->sc_ah;
6359 DPRINTF(sc, ATH_DEBUG_REGDOMAIN,
6367 DPRINTF(sc, ATH_DEBUG_REGDOMAIN, "%s: failed, status %u\n",
6379 struct ath_softc *sc = ic->ic_softc;
6380 struct ath_hal *ah = sc->sc_ah;
6382 DPRINTF(sc, ATH_DEBUG_REGDOMAIN, "%s: use rd %u cc %d\n",
6392 ath_getchannels(struct ath_softc *sc)
6394 struct ieee80211com *ic = &sc->sc_ic;
6395 struct ath_hal *ah = sc->sc_ah;
6404 device_printf(sc->sc_dev,
6409 (void) ath_hal_getregdomain(ah, &sc->sc_eerd);
6410 ath_hal_getcountrycode(ah, &sc->sc_eecc); /* NB: cannot fail */
6413 ic->ic_regdomain.regdomain = (uint16_t) sc->sc_eerd;
6414 ic->ic_regdomain.country = (uint16_t) sc->sc_eecc;
6421 DPRINTF(sc, ATH_DEBUG_REGDOMAIN,
6423 __func__, sc->sc_eerd, sc->sc_eecc,
6430 ath_rate_setup(struct ath_softc *sc, u_int mode)
6432 struct ath_hal *ah = sc->sc_ah;
6467 DPRINTF(sc, ATH_DEBUG_ANY, "%s: invalid mode %u\n",
6471 sc->sc_rates[mode] = rt;
6476 ath_setcurmode(struct ath_softc *sc, enum ieee80211_phymode mode)
6503 memset(sc->sc_rixmap, 0xff, sizeof(sc->sc_rixmap));
6504 rt = sc->sc_rates[mode];
6509 sc->sc_rixmap[ieeerate] = i;
6511 sc->sc_rixmap[ieeerate | IEEE80211_RATE_MCS] = i;
6513 memset(sc->sc_hwmap, 0, sizeof(sc->sc_hwmap));
6514 for (i = 0; i < nitems(sc->sc_hwmap); i++) {
6516 sc->sc_hwmap[i].ledon = (500 * hz) / 1000;
6517 sc->sc_hwmap[i].ledoff = (130 * hz) / 1000;
6520 sc->sc_hwmap[i].ieeerate =
6523 sc->sc_hwmap[i].ieeerate |= IEEE80211_RATE_MCS;
6524 sc->sc_hwmap[i].txflags = IEEE80211_RADIOTAP_F_DATAPAD;
6527 sc->sc_hwmap[i].txflags |= IEEE80211_RADIOTAP_F_SHORTPRE;
6528 sc->sc_hwmap[i].rxflags = sc->sc_hwmap[i].txflags;
6530 if (blinkrates[j].rate == sc->sc_hwmap[i].ieeerate)
6534 sc->sc_hwmap[i].ledon = (blinkrates[j].timeOn * hz) / 1000;
6535 sc->sc_hwmap[i].ledoff = (blinkrates[j].timeOff * hz) / 1000;
6537 sc->sc_currates = rt;
6538 sc->sc_curmode = mode;
6544 sc->sc_protrix = ath_tx_findrix(sc, 2*2);
6546 sc->sc_protrix = ath_tx_findrix(sc, 2*1);
6553 struct ath_softc *sc = arg;
6554 struct ieee80211com *ic = &sc->sc_ic;
6557 ATH_LOCK_ASSERT(sc);
6559 if (sc->sc_wd_timer != 0 && --sc->sc_wd_timer == 0) {
6562 ath_power_set_power_state(sc, HAL_PM_AWAKE);
6564 if (ath_hal_gethangstate(sc->sc_ah, 0xffff, &hangs) &&
6566 device_printf(sc->sc_dev, "%s hang detected (0x%x)\n",
6569 device_printf(sc->sc_dev, "device timeout\n");
6572 sc->sc_stats.ast_watchdog++;
6574 ath_power_restore_power_state(sc);
6584 taskqueue_enqueue(sc->sc_tq, &sc->sc_resettask);
6587 callout_schedule(&sc->sc_wd_ch, hz);
6593 struct ath_softc *sc = ic->ic_softc;
6596 ATH_LOCK(sc);
6603 if (sc->sc_running) {
6604 ath_power_set_power_state(sc, HAL_PM_AWAKE);
6605 ath_mode_init(sc);
6606 ath_power_restore_power_state(sc);
6607 } else if (!sc->sc_invalid) {
6617 error = ath_init(sc);
6620 ath_stop(sc);
6621 if (!sc->sc_invalid)
6622 ath_power_setpower(sc, HAL_PM_FULL_SLEEP, 1);
6624 ATH_UNLOCK(sc);
6628 if (sc->sc_tx99 != NULL)
6629 sc->sc_tx99->start(sc->sc_tx99);
6640 ath_announce(struct ath_softc *sc)
6642 struct ath_hal *ah = sc->sc_ah;
6644 device_printf(sc->sc_dev, "%s mac %d.%d RF%s phy %d.%d\n",
6647 device_printf(sc->sc_dev, "2GHz radio: 0x%.4x; 5GHz radio: 0x%.4x\n",
6652 struct ath_txq *txq = sc->sc_ac2q[i];
6653 device_printf(sc->sc_dev,
6657 device_printf(sc->sc_dev, "Use hw queue %u for CAB traffic\n",
6658 sc->sc_cabq->axq_qnum);
6659 device_printf(sc->sc_dev, "Use hw queue %u for beacons\n",
6660 sc->sc_bhalq);
6663 device_printf(sc->sc_dev, "using %u rx buffers\n", ath_rxbuf);
6665 device_printf(sc->sc_dev, "using %u tx buffers\n", ath_txbuf);
6666 if (sc->sc_mcastkey && bootverbose)
6667 device_printf(sc->sc_dev, "using multicast key search\n");
6673 struct ath_softc *sc = (struct ath_softc *) p;
6674 struct ieee80211com *ic = &sc->sc_ic;
6681 if (ath_dfs_process_radar_event(sc, sc->sc_curchan)) {
6698 ieee80211_dfs_notify_radar(ic, sc->sc_curchan);
6715 struct ath_softc *sc = ic->ic_softc;
6720 DPRINTF(sc, ATH_DEBUG_NODE_PWRSAVE, "%s: %6D: enable=%d\n",
6728 ath_tx_node_sleep(sc, an);
6730 ath_tx_node_wakeup(sc, an);
6782 struct ath_softc *sc = ic->ic_softc;
6787 ATH_TX_LOCK(sc);
6798 ATH_TX_UNLOCK(sc);
6817 DPRINTF(sc, ATH_DEBUG_NODE_PWRSAVE,
6823 ATH_TX_UNLOCK(sc);
6825 DPRINTF(sc, ATH_DEBUG_NODE_PWRSAVE,
6832 ATH_TX_UNLOCK(sc);
6836 DPRINTF(sc, ATH_DEBUG_NODE_PWRSAVE,
6843 ATH_TX_UNLOCK(sc);
6849 DPRINTF(sc, ATH_DEBUG_NODE_PWRSAVE,
6856 ATH_TX_UNLOCK(sc);
6864 ATH_TX_UNLOCK(sc);
6865 DPRINTF(sc, ATH_DEBUG_NODE_PWRSAVE,
6911 ath_tx_update_tim(struct ath_softc *sc, struct ieee80211_node *ni,
6932 ATH_TX_LOCK_ASSERT(sc);
6938 DPRINTF(sc, ATH_DEBUG_NODE_PWRSAVE,
6957 DPRINTF(sc, ATH_DEBUG_NODE_PWRSAVE,
6987 struct ath_softc *sc = ic->ic_softc;
7032 ATH_TX_LOCK(sc);
7039 DPRINTF(sc, ATH_DEBUG_NODE_PWRSAVE,
7044 ATH_TX_UNLOCK(sc);
7064 ATH_TX_UNLOCK(sc);
7065 DPRINTF(sc, ATH_DEBUG_NODE_PWRSAVE,
7085 ath_tx_tid_sched(sc, atid);
7091 ATH_TX_UNLOCK(sc);
7092 taskqueue_enqueue(sc->sc_tq, &sc->sc_txqtask);
7093 DPRINTF(sc, ATH_DEBUG_NODE_PWRSAVE,
7102 ATH_TX_UNLOCK(sc);
7107 DPRINTF(sc, ATH_DEBUG_NODE_PWRSAVE,