1 /* $OpenBSD: ar5008.c,v 1.35 2016/01/05 18:41:15 stsp Exp $ */ 2 3 /*- 4 * Copyright (c) 2009 Damien Bergamini <damien.bergamini@free.fr> 5 * Copyright (c) 2008-2009 Atheros Communications Inc. 6 * 7 * Permission to use, copy, modify, and/or distribute this software for any 8 * purpose with or without fee is hereby granted, provided that the above 9 * copyright notice and this permission notice appear in all copies. 10 * 11 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES 12 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF 13 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR 14 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES 15 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN 16 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF 17 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. 18 */ 19 20 /* 21 * Driver for Atheros 802.11a/g/n chipsets. 22 * Routines common to AR5008, AR9001 and AR9002 families. 23 */ 24 25 #include "bpfilter.h" 26 27 #include <sys/param.h> 28 #include <sys/sockio.h> 29 #include <sys/mbuf.h> 30 #include <sys/kernel.h> 31 #include <sys/socket.h> 32 #include <sys/systm.h> 33 #include <sys/malloc.h> 34 #include <sys/queue.h> 35 #include <sys/timeout.h> 36 #include <sys/conf.h> 37 #include <sys/device.h> 38 #include <sys/stdint.h> /* uintptr_t */ 39 #include <sys/endian.h> 40 41 #include <machine/bus.h> 42 43 #if NBPFILTER > 0 44 #include <net/bpf.h> 45 #endif 46 #include <net/if.h> 47 #include <net/if_media.h> 48 49 #include <netinet/in.h> 50 #include <netinet/if_ether.h> 51 52 #include <net80211/ieee80211_var.h> 53 #include <net80211/ieee80211_amrr.h> 54 #include <net80211/ieee80211_radiotap.h> 55 56 #include <dev/ic/athnreg.h> 57 #include <dev/ic/athnvar.h> 58 59 #include <dev/ic/ar5008reg.h> 60 61 int ar5008_attach(struct athn_softc *); 62 int ar5008_read_eep_word(struct athn_softc *, uint32_t, uint16_t *); 63 int ar5008_read_rom(struct athn_softc *); 64 void ar5008_swap_rom(struct athn_softc *); 65 int ar5008_gpio_read(struct athn_softc *, int); 66 void ar5008_gpio_write(struct athn_softc *, int, int); 67 void ar5008_gpio_config_input(struct athn_softc *, int); 68 void ar5008_gpio_config_output(struct athn_softc *, int, int); 69 void ar5008_rfsilent_init(struct athn_softc *); 70 int ar5008_dma_alloc(struct athn_softc *); 71 void ar5008_dma_free(struct athn_softc *); 72 int ar5008_tx_alloc(struct athn_softc *); 73 void ar5008_tx_free(struct athn_softc *); 74 int ar5008_rx_alloc(struct athn_softc *); 75 void ar5008_rx_free(struct athn_softc *); 76 void ar5008_rx_enable(struct athn_softc *); 77 void ar5008_rx_radiotap(struct athn_softc *, struct mbuf *, 78 struct ar_rx_desc *); 79 void ar5008_rx_intr(struct athn_softc *); 80 int ar5008_tx_process(struct athn_softc *, int); 81 void ar5008_tx_intr(struct athn_softc *); 82 int ar5008_swba_intr(struct athn_softc *); 83 int ar5008_intr(struct athn_softc *); 84 int ar5008_tx(struct athn_softc *, struct mbuf *, struct ieee80211_node *, 85 int); 86 void ar5008_set_rf_mode(struct athn_softc *, struct ieee80211_channel *); 87 int ar5008_rf_bus_request(struct athn_softc *); 88 void ar5008_rf_bus_release(struct athn_softc *); 89 void ar5008_set_phy(struct athn_softc *, struct ieee80211_channel *, 90 struct ieee80211_channel *); 91 void ar5008_set_delta_slope(struct athn_softc *, struct ieee80211_channel *, 92 struct ieee80211_channel *); 93 void ar5008_enable_antenna_diversity(struct athn_softc *); 94 void ar5008_init_baseband(struct athn_softc *); 95 void ar5008_disable_phy(struct athn_softc *); 96 void ar5008_init_chains(struct athn_softc *); 97 void ar5008_set_rxchains(struct athn_softc *); 98 void ar5008_read_noisefloor(struct athn_softc *, int16_t *, int16_t *); 99 void ar5008_write_noisefloor(struct athn_softc *, int16_t *, int16_t *); 100 void ar5008_get_noisefloor(struct athn_softc *, struct ieee80211_channel *); 101 void ar5008_bb_load_noisefloor(struct athn_softc *); 102 void ar5008_noisefloor_calib(struct athn_softc *); 103 void ar5008_do_noisefloor_calib(struct athn_softc *); 104 void ar5008_do_calib(struct athn_softc *); 105 void ar5008_next_calib(struct athn_softc *); 106 void ar5008_calib_iq(struct athn_softc *); 107 void ar5008_calib_adc_gain(struct athn_softc *); 108 void ar5008_calib_adc_dc_off(struct athn_softc *); 109 void ar5008_write_txpower(struct athn_softc *, int16_t power[]); 110 void ar5008_set_viterbi_mask(struct athn_softc *, int); 111 void ar5008_hw_init(struct athn_softc *, struct ieee80211_channel *, 112 struct ieee80211_channel *); 113 uint8_t ar5008_get_vpd(uint8_t, const uint8_t *, const uint8_t *, int); 114 void ar5008_get_pdadcs(struct athn_softc *, uint8_t, struct athn_pier *, 115 struct athn_pier *, int, int, uint8_t, uint8_t *, uint8_t *); 116 void ar5008_get_lg_tpow(struct athn_softc *, struct ieee80211_channel *, 117 uint8_t, const struct ar_cal_target_power_leg *, int, uint8_t[]); 118 void ar5008_get_ht_tpow(struct athn_softc *, struct ieee80211_channel *, 119 uint8_t, const struct ar_cal_target_power_ht *, int, uint8_t[]); 120 void ar5008_set_noise_immunity_level(struct athn_softc *, int); 121 void ar5008_enable_ofdm_weak_signal(struct athn_softc *); 122 void ar5008_disable_ofdm_weak_signal(struct athn_softc *); 123 void ar5008_set_cck_weak_signal(struct athn_softc *, int); 124 void ar5008_set_firstep_level(struct athn_softc *, int); 125 void ar5008_set_spur_immunity_level(struct athn_softc *, int); 126 127 /* Extern functions. */ 128 void athn_stop(struct ifnet *, int); 129 int athn_interpolate(int, int, int, int, int); 130 int athn_txtime(struct athn_softc *, int, int, u_int); 131 void athn_inc_tx_trigger_level(struct athn_softc *); 132 int athn_tx_pending(struct athn_softc *, int); 133 void athn_stop_tx_dma(struct athn_softc *, int); 134 void athn_get_delta_slope(uint32_t, uint32_t *, uint32_t *); 135 void athn_config_pcie(struct athn_softc *); 136 void athn_config_nonpcie(struct athn_softc *); 137 uint8_t athn_chan2fbin(struct ieee80211_channel *); 138 uint8_t ar5416_get_rf_rev(struct athn_softc *); 139 void ar5416_reset_addac(struct athn_softc *, struct ieee80211_channel *); 140 void ar5416_rf_reset(struct athn_softc *, struct ieee80211_channel *); 141 void ar5416_reset_bb_gain(struct athn_softc *, struct ieee80211_channel *); 142 void ar9280_reset_rx_gain(struct athn_softc *, struct ieee80211_channel *); 143 void ar9280_reset_tx_gain(struct athn_softc *, struct ieee80211_channel *); 144 145 146 int 147 ar5008_attach(struct athn_softc *sc) 148 { 149 struct athn_ops *ops = &sc->ops; 150 struct ieee80211com *ic = &sc->sc_ic; 151 struct ar_base_eep_header *base; 152 uint8_t eep_ver, kc_entries_log; 153 int error; 154 155 /* Set callbacks for AR5008, AR9001 and AR9002 families. */ 156 ops->gpio_read = ar5008_gpio_read; 157 ops->gpio_write = ar5008_gpio_write; 158 ops->gpio_config_input = ar5008_gpio_config_input; 159 ops->gpio_config_output = ar5008_gpio_config_output; 160 ops->rfsilent_init = ar5008_rfsilent_init; 161 162 ops->dma_alloc = ar5008_dma_alloc; 163 ops->dma_free = ar5008_dma_free; 164 ops->rx_enable = ar5008_rx_enable; 165 ops->intr = ar5008_intr; 166 ops->tx = ar5008_tx; 167 168 ops->set_rf_mode = ar5008_set_rf_mode; 169 ops->rf_bus_request = ar5008_rf_bus_request; 170 ops->rf_bus_release = ar5008_rf_bus_release; 171 ops->set_phy = ar5008_set_phy; 172 ops->set_delta_slope = ar5008_set_delta_slope; 173 ops->enable_antenna_diversity = ar5008_enable_antenna_diversity; 174 ops->init_baseband = ar5008_init_baseband; 175 ops->disable_phy = ar5008_disable_phy; 176 ops->set_rxchains = ar5008_set_rxchains; 177 ops->noisefloor_calib = ar5008_do_noisefloor_calib; 178 ops->do_calib = ar5008_do_calib; 179 ops->next_calib = ar5008_next_calib; 180 ops->hw_init = ar5008_hw_init; 181 182 ops->set_noise_immunity_level = ar5008_set_noise_immunity_level; 183 ops->enable_ofdm_weak_signal = ar5008_enable_ofdm_weak_signal; 184 ops->disable_ofdm_weak_signal = ar5008_disable_ofdm_weak_signal; 185 ops->set_cck_weak_signal = ar5008_set_cck_weak_signal; 186 ops->set_firstep_level = ar5008_set_firstep_level; 187 ops->set_spur_immunity_level = ar5008_set_spur_immunity_level; 188 189 /* Set MAC registers offsets. */ 190 sc->obs_off = AR_OBS; 191 sc->gpio_input_en_off = AR_GPIO_INPUT_EN_VAL; 192 193 if (!(sc->flags & ATHN_FLAG_PCIE)) 194 athn_config_nonpcie(sc); 195 else 196 athn_config_pcie(sc); 197 198 /* Read entire ROM content in memory. */ 199 if ((error = ar5008_read_rom(sc)) != 0) { 200 printf("%s: could not read ROM\n", sc->sc_dev.dv_xname); 201 return (error); 202 } 203 204 /* Get RF revision. */ 205 sc->rf_rev = ar5416_get_rf_rev(sc); 206 207 base = sc->eep; 208 eep_ver = (base->version >> 12) & 0xf; 209 sc->eep_rev = (base->version & 0xfff); 210 if (eep_ver != AR_EEP_VER || sc->eep_rev == 0) { 211 printf("%s: unsupported ROM version %d.%d\n", 212 sc->sc_dev.dv_xname, eep_ver, sc->eep_rev); 213 return (EINVAL); 214 } 215 216 if (base->opCapFlags & AR_OPFLAGS_11A) 217 sc->flags |= ATHN_FLAG_11A; 218 if (base->opCapFlags & AR_OPFLAGS_11G) 219 sc->flags |= ATHN_FLAG_11G; 220 if (base->opCapFlags & AR_OPFLAGS_11N) 221 sc->flags |= ATHN_FLAG_11N; 222 223 IEEE80211_ADDR_COPY(ic->ic_myaddr, base->macAddr); 224 225 /* Check if we have a hardware radio switch. */ 226 if (base->rfSilent & AR_EEP_RFSILENT_ENABLED) { 227 sc->flags |= ATHN_FLAG_RFSILENT; 228 /* Get GPIO pin used by hardware radio switch. */ 229 sc->rfsilent_pin = MS(base->rfSilent, 230 AR_EEP_RFSILENT_GPIO_SEL); 231 /* Get polarity of hardware radio switch. */ 232 if (base->rfSilent & AR_EEP_RFSILENT_POLARITY) 233 sc->flags |= ATHN_FLAG_RFSILENT_REVERSED; 234 } 235 236 /* Get the number of HW key cache entries. */ 237 kc_entries_log = MS(base->deviceCap, AR_EEP_DEVCAP_KC_ENTRIES); 238 sc->kc_entries = (kc_entries_log != 0) ? 239 1 << kc_entries_log : AR_KEYTABLE_SIZE; 240 241 sc->txchainmask = base->txMask; 242 if (sc->mac_ver == AR_SREV_VERSION_5416_PCI && 243 !(base->opCapFlags & AR_OPFLAGS_11A)) { 244 /* For single-band AR5416 PCI, use GPIO pin 0. */ 245 sc->rxchainmask = ar5008_gpio_read(sc, 0) ? 0x5 : 0x7; 246 } else 247 sc->rxchainmask = base->rxMask; 248 249 ops->setup(sc); 250 return (0); 251 } 252 253 /* 254 * Read 16-bit word from ROM. 255 */ 256 int 257 ar5008_read_eep_word(struct athn_softc *sc, uint32_t addr, uint16_t *val) 258 { 259 uint32_t reg; 260 int ntries; 261 262 reg = AR_READ(sc, AR_EEPROM_OFFSET(addr)); 263 for (ntries = 0; ntries < 1000; ntries++) { 264 reg = AR_READ(sc, AR_EEPROM_STATUS_DATA); 265 if (!(reg & (AR_EEPROM_STATUS_DATA_BUSY | 266 AR_EEPROM_STATUS_DATA_PROT_ACCESS))) { 267 *val = MS(reg, AR_EEPROM_STATUS_DATA_VAL); 268 return (0); 269 } 270 DELAY(10); 271 } 272 *val = 0xffff; 273 return (ETIMEDOUT); 274 } 275 276 int 277 ar5008_read_rom(struct athn_softc *sc) 278 { 279 uint32_t addr, end; 280 uint16_t magic, sum, *eep; 281 int need_swap = 0; 282 int error; 283 284 /* Determine ROM endianness. */ 285 error = ar5008_read_eep_word(sc, AR_EEPROM_MAGIC_OFFSET, &magic); 286 if (error != 0) 287 return (error); 288 if (magic != AR_EEPROM_MAGIC) { 289 if (magic != swap16(AR_EEPROM_MAGIC)) { 290 DPRINTF(("invalid ROM magic 0x%x != 0x%x\n", 291 magic, AR_EEPROM_MAGIC)); 292 return (EIO); 293 } 294 DPRINTF(("non-native ROM endianness\n")); 295 need_swap = 1; 296 } 297 298 /* Allocate space to store ROM in host memory. */ 299 sc->eep = malloc(sc->eep_size, M_DEVBUF, M_NOWAIT); 300 if (sc->eep == NULL) 301 return (ENOMEM); 302 303 /* Read entire ROM and compute checksum. */ 304 sum = 0; 305 eep = sc->eep; 306 end = sc->eep_base + sc->eep_size / sizeof(uint16_t); 307 for (addr = sc->eep_base; addr < end; addr++, eep++) { 308 if ((error = ar5008_read_eep_word(sc, addr, eep)) != 0) { 309 DPRINTF(("could not read ROM at 0x%x\n", addr)); 310 return (error); 311 } 312 if (need_swap) 313 *eep = swap16(*eep); 314 sum ^= *eep; 315 } 316 if (sum != 0xffff) { 317 printf("%s: bad ROM checksum 0x%04x\n", 318 sc->sc_dev.dv_xname, sum); 319 return (EIO); 320 } 321 if (need_swap) 322 ar5008_swap_rom(sc); 323 324 return (0); 325 } 326 327 void 328 ar5008_swap_rom(struct athn_softc *sc) 329 { 330 struct ar_base_eep_header *base = sc->eep; 331 332 /* Swap common fields first. */ 333 base->length = swap16(base->length); 334 base->version = swap16(base->version); 335 base->regDmn[0] = swap16(base->regDmn[0]); 336 base->regDmn[1] = swap16(base->regDmn[1]); 337 base->rfSilent = swap16(base->rfSilent); 338 base->blueToothOptions = swap16(base->blueToothOptions); 339 base->deviceCap = swap16(base->deviceCap); 340 341 /* Swap device-dependent fields. */ 342 sc->ops.swap_rom(sc); 343 } 344 345 /* 346 * Access to General Purpose Input/Output ports. 347 */ 348 int 349 ar5008_gpio_read(struct athn_softc *sc, int pin) 350 { 351 KASSERT(pin < sc->ngpiopins); 352 if ((sc->flags & ATHN_FLAG_USB) && !AR_SREV_9271(sc)) 353 return (!((AR_READ(sc, AR7010_GPIO_IN) >> pin) & 1)); 354 return ((AR_READ(sc, AR_GPIO_IN_OUT) >> (sc->ngpiopins + pin)) & 1); 355 } 356 357 void 358 ar5008_gpio_write(struct athn_softc *sc, int pin, int set) 359 { 360 uint32_t reg; 361 362 KASSERT(pin < sc->ngpiopins); 363 364 if (sc->flags & ATHN_FLAG_USB) 365 set = !set; /* AR9271/AR7010 is reversed. */ 366 367 if ((sc->flags & ATHN_FLAG_USB) && !AR_SREV_9271(sc)) { 368 /* Special case for AR7010. */ 369 reg = AR_READ(sc, AR7010_GPIO_OUT); 370 if (set) 371 reg |= 1 << pin; 372 else 373 reg &= ~(1 << pin); 374 AR_WRITE(sc, AR7010_GPIO_OUT, reg); 375 } else { 376 reg = AR_READ(sc, AR_GPIO_IN_OUT); 377 if (set) 378 reg |= 1 << pin; 379 else 380 reg &= ~(1 << pin); 381 AR_WRITE(sc, AR_GPIO_IN_OUT, reg); 382 } 383 AR_WRITE_BARRIER(sc); 384 } 385 386 void 387 ar5008_gpio_config_input(struct athn_softc *sc, int pin) 388 { 389 uint32_t reg; 390 391 if ((sc->flags & ATHN_FLAG_USB) && !AR_SREV_9271(sc)) { 392 /* Special case for AR7010. */ 393 AR_SETBITS(sc, AR7010_GPIO_OE, 1 << pin); 394 } else { 395 reg = AR_READ(sc, AR_GPIO_OE_OUT); 396 reg &= ~(AR_GPIO_OE_OUT_DRV_M << (pin * 2)); 397 reg |= AR_GPIO_OE_OUT_DRV_NO << (pin * 2); 398 AR_WRITE(sc, AR_GPIO_OE_OUT, reg); 399 } 400 AR_WRITE_BARRIER(sc); 401 } 402 403 void 404 ar5008_gpio_config_output(struct athn_softc *sc, int pin, int type) 405 { 406 uint32_t reg; 407 int mux, off; 408 409 if ((sc->flags & ATHN_FLAG_USB) && !AR_SREV_9271(sc)) { 410 /* Special case for AR7010. */ 411 AR_CLRBITS(sc, AR7010_GPIO_OE, 1 << pin); 412 AR_WRITE_BARRIER(sc); 413 return; 414 } 415 mux = pin / 6; 416 off = pin % 6; 417 418 reg = AR_READ(sc, AR_GPIO_OUTPUT_MUX(mux)); 419 if (!AR_SREV_9280_20_OR_LATER(sc) && mux == 0) 420 reg = (reg & ~0x1f0) | (reg & 0x1f0) << 1; 421 reg &= ~(0x1f << (off * 5)); 422 reg |= (type & 0x1f) << (off * 5); 423 AR_WRITE(sc, AR_GPIO_OUTPUT_MUX(mux), reg); 424 425 reg = AR_READ(sc, AR_GPIO_OE_OUT); 426 reg &= ~(AR_GPIO_OE_OUT_DRV_M << (pin * 2)); 427 reg |= AR_GPIO_OE_OUT_DRV_ALL << (pin * 2); 428 AR_WRITE(sc, AR_GPIO_OE_OUT, reg); 429 AR_WRITE_BARRIER(sc); 430 } 431 432 void 433 ar5008_rfsilent_init(struct athn_softc *sc) 434 { 435 uint32_t reg; 436 437 /* Configure hardware radio switch. */ 438 AR_SETBITS(sc, AR_GPIO_INPUT_EN_VAL, AR_GPIO_INPUT_EN_VAL_RFSILENT_BB); 439 reg = AR_READ(sc, AR_GPIO_INPUT_MUX2); 440 reg = RW(reg, AR_GPIO_INPUT_MUX2_RFSILENT, 0); 441 AR_WRITE(sc, AR_GPIO_INPUT_MUX2, reg); 442 ar5008_gpio_config_input(sc, sc->rfsilent_pin); 443 AR_SETBITS(sc, AR_PHY_TEST, AR_PHY_TEST_RFSILENT_BB); 444 if (!(sc->flags & ATHN_FLAG_RFSILENT_REVERSED)) { 445 AR_SETBITS(sc, AR_GPIO_INTR_POL, 446 AR_GPIO_INTR_POL_PIN(sc->rfsilent_pin)); 447 } 448 AR_WRITE_BARRIER(sc); 449 } 450 451 int 452 ar5008_dma_alloc(struct athn_softc *sc) 453 { 454 int error; 455 456 error = ar5008_tx_alloc(sc); 457 if (error != 0) 458 return (error); 459 460 error = ar5008_rx_alloc(sc); 461 if (error != 0) 462 return (error); 463 464 return (0); 465 } 466 467 void 468 ar5008_dma_free(struct athn_softc *sc) 469 { 470 ar5008_tx_free(sc); 471 ar5008_rx_free(sc); 472 } 473 474 int 475 ar5008_tx_alloc(struct athn_softc *sc) 476 { 477 struct athn_tx_buf *bf; 478 bus_size_t size; 479 int error, nsegs, i; 480 481 /* 482 * Allocate a pool of Tx descriptors shared between all Tx queues. 483 */ 484 size = ATHN_NTXBUFS * AR5008_MAX_SCATTER * sizeof(struct ar_tx_desc); 485 486 error = bus_dmamap_create(sc->sc_dmat, size, 1, size, 0, 487 BUS_DMA_NOWAIT, &sc->map); 488 if (error != 0) 489 goto fail; 490 491 error = bus_dmamem_alloc(sc->sc_dmat, size, 4, 0, &sc->seg, 1, 492 &nsegs, BUS_DMA_NOWAIT | BUS_DMA_ZERO); 493 if (error != 0) 494 goto fail; 495 496 error = bus_dmamem_map(sc->sc_dmat, &sc->seg, 1, size, 497 (caddr_t *)&sc->descs, BUS_DMA_NOWAIT | BUS_DMA_COHERENT); 498 if (error != 0) 499 goto fail; 500 501 error = bus_dmamap_load_raw(sc->sc_dmat, sc->map, &sc->seg, 1, size, 502 BUS_DMA_NOWAIT); 503 if (error != 0) 504 goto fail; 505 506 SIMPLEQ_INIT(&sc->txbufs); 507 for (i = 0; i < ATHN_NTXBUFS; i++) { 508 bf = &sc->txpool[i]; 509 510 error = bus_dmamap_create(sc->sc_dmat, ATHN_TXBUFSZ, 511 AR5008_MAX_SCATTER, ATHN_TXBUFSZ, 0, BUS_DMA_NOWAIT, 512 &bf->bf_map); 513 if (error != 0) { 514 printf("%s: could not create Tx buf DMA map\n", 515 sc->sc_dev.dv_xname); 516 goto fail; 517 } 518 519 bf->bf_descs = 520 &((struct ar_tx_desc *)sc->descs)[i * AR5008_MAX_SCATTER]; 521 bf->bf_daddr = sc->map->dm_segs[0].ds_addr + 522 i * AR5008_MAX_SCATTER * sizeof(struct ar_tx_desc); 523 524 SIMPLEQ_INSERT_TAIL(&sc->txbufs, bf, bf_list); 525 } 526 return (0); 527 fail: 528 ar5008_tx_free(sc); 529 return (error); 530 } 531 532 void 533 ar5008_tx_free(struct athn_softc *sc) 534 { 535 struct athn_tx_buf *bf; 536 int i; 537 538 for (i = 0; i < ATHN_NTXBUFS; i++) { 539 bf = &sc->txpool[i]; 540 541 if (bf->bf_map != NULL) 542 bus_dmamap_destroy(sc->sc_dmat, bf->bf_map); 543 } 544 /* Free Tx descriptors. */ 545 if (sc->map != NULL) { 546 if (sc->descs != NULL) { 547 bus_dmamap_unload(sc->sc_dmat, sc->map); 548 bus_dmamem_unmap(sc->sc_dmat, (caddr_t)sc->descs, 549 ATHN_NTXBUFS * AR5008_MAX_SCATTER * 550 sizeof(struct ar_tx_desc)); 551 bus_dmamem_free(sc->sc_dmat, &sc->seg, 1); 552 } 553 bus_dmamap_destroy(sc->sc_dmat, sc->map); 554 } 555 } 556 557 int 558 ar5008_rx_alloc(struct athn_softc *sc) 559 { 560 struct athn_rxq *rxq = &sc->rxq[0]; 561 struct athn_rx_buf *bf; 562 struct ar_rx_desc *ds; 563 bus_size_t size; 564 int error, nsegs, i; 565 566 rxq->bf = mallocarray(ATHN_NRXBUFS, sizeof(*bf), M_DEVBUF, 567 M_NOWAIT | M_ZERO); 568 if (rxq->bf == NULL) 569 return (ENOMEM); 570 571 size = ATHN_NRXBUFS * sizeof(struct ar_rx_desc); 572 573 error = bus_dmamap_create(sc->sc_dmat, size, 1, size, 0, 574 BUS_DMA_NOWAIT, &rxq->map); 575 if (error != 0) 576 goto fail; 577 578 error = bus_dmamem_alloc(sc->sc_dmat, size, 0, 0, &rxq->seg, 1, 579 &nsegs, BUS_DMA_NOWAIT | BUS_DMA_ZERO); 580 if (error != 0) 581 goto fail; 582 583 error = bus_dmamem_map(sc->sc_dmat, &rxq->seg, 1, size, 584 (caddr_t *)&rxq->descs, BUS_DMA_NOWAIT | BUS_DMA_COHERENT); 585 if (error != 0) 586 goto fail; 587 588 error = bus_dmamap_load_raw(sc->sc_dmat, rxq->map, &rxq->seg, 1, 589 size, BUS_DMA_NOWAIT); 590 if (error != 0) 591 goto fail; 592 593 for (i = 0; i < ATHN_NRXBUFS; i++) { 594 bf = &rxq->bf[i]; 595 ds = &((struct ar_rx_desc *)rxq->descs)[i]; 596 597 error = bus_dmamap_create(sc->sc_dmat, ATHN_RXBUFSZ, 1, 598 ATHN_RXBUFSZ, 0, BUS_DMA_NOWAIT | BUS_DMA_ALLOCNOW, 599 &bf->bf_map); 600 if (error != 0) { 601 printf("%s: could not create Rx buf DMA map\n", 602 sc->sc_dev.dv_xname); 603 goto fail; 604 } 605 /* 606 * Assumes MCLGETI returns cache-line-size aligned buffers. 607 */ 608 bf->bf_m = MCLGETI(NULL, M_DONTWAIT, NULL, ATHN_RXBUFSZ); 609 if (bf->bf_m == NULL) { 610 printf("%s: could not allocate Rx mbuf\n", 611 sc->sc_dev.dv_xname); 612 error = ENOBUFS; 613 goto fail; 614 } 615 616 error = bus_dmamap_load(sc->sc_dmat, bf->bf_map, 617 mtod(bf->bf_m, void *), ATHN_RXBUFSZ, NULL, 618 BUS_DMA_NOWAIT | BUS_DMA_READ); 619 if (error != 0) { 620 printf("%s: could not DMA map Rx buffer\n", 621 sc->sc_dev.dv_xname); 622 goto fail; 623 } 624 625 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, ATHN_RXBUFSZ, 626 BUS_DMASYNC_PREREAD); 627 628 bf->bf_desc = ds; 629 bf->bf_daddr = rxq->map->dm_segs[0].ds_addr + 630 i * sizeof(struct ar_rx_desc); 631 } 632 return (0); 633 fail: 634 ar5008_rx_free(sc); 635 return (error); 636 } 637 638 void 639 ar5008_rx_free(struct athn_softc *sc) 640 { 641 struct athn_rxq *rxq = &sc->rxq[0]; 642 struct athn_rx_buf *bf; 643 int i; 644 645 if (rxq->bf == NULL) 646 return; 647 for (i = 0; i < ATHN_NRXBUFS; i++) { 648 bf = &rxq->bf[i]; 649 650 if (bf->bf_map != NULL) 651 bus_dmamap_destroy(sc->sc_dmat, bf->bf_map); 652 if (bf->bf_m != NULL) 653 m_freem(bf->bf_m); 654 } 655 free(rxq->bf, M_DEVBUF, 0); 656 657 /* Free Rx descriptors. */ 658 if (rxq->map != NULL) { 659 if (rxq->descs != NULL) { 660 bus_dmamap_unload(sc->sc_dmat, rxq->map); 661 bus_dmamem_unmap(sc->sc_dmat, (caddr_t)rxq->descs, 662 ATHN_NRXBUFS * sizeof(struct ar_rx_desc)); 663 bus_dmamem_free(sc->sc_dmat, &rxq->seg, 1); 664 } 665 bus_dmamap_destroy(sc->sc_dmat, rxq->map); 666 } 667 } 668 669 void 670 ar5008_rx_enable(struct athn_softc *sc) 671 { 672 struct athn_rxq *rxq = &sc->rxq[0]; 673 struct athn_rx_buf *bf; 674 struct ar_rx_desc *ds; 675 int i; 676 677 /* Setup and link Rx descriptors. */ 678 SIMPLEQ_INIT(&rxq->head); 679 rxq->lastds = NULL; 680 for (i = 0; i < ATHN_NRXBUFS; i++) { 681 bf = &rxq->bf[i]; 682 ds = bf->bf_desc; 683 684 memset(ds, 0, sizeof(*ds)); 685 ds->ds_data = bf->bf_map->dm_segs[0].ds_addr; 686 ds->ds_ctl1 = SM(AR_RXC1_BUF_LEN, ATHN_RXBUFSZ); 687 688 if (rxq->lastds != NULL) { 689 ((struct ar_rx_desc *)rxq->lastds)->ds_link = 690 bf->bf_daddr; 691 } 692 SIMPLEQ_INSERT_TAIL(&rxq->head, bf, bf_list); 693 rxq->lastds = ds; 694 } 695 bus_dmamap_sync(sc->sc_dmat, rxq->map, 0, rxq->map->dm_mapsize, 696 BUS_DMASYNC_PREREAD); 697 698 /* Enable Rx. */ 699 AR_WRITE(sc, AR_RXDP, SIMPLEQ_FIRST(&rxq->head)->bf_daddr); 700 AR_WRITE(sc, AR_CR, AR_CR_RXE); 701 AR_WRITE_BARRIER(sc); 702 } 703 704 #if NBPFILTER > 0 705 void 706 ar5008_rx_radiotap(struct athn_softc *sc, struct mbuf *m, 707 struct ar_rx_desc *ds) 708 { 709 #define IEEE80211_RADIOTAP_F_SHORTGI 0x80 /* XXX from FBSD */ 710 711 struct athn_rx_radiotap_header *tap = &sc->sc_rxtap; 712 struct ieee80211com *ic = &sc->sc_ic; 713 struct mbuf mb; 714 uint64_t tsf; 715 uint32_t tstamp; 716 uint8_t rate; 717 718 /* Extend the 15-bit timestamp from Rx descriptor to 64-bit TSF. */ 719 tstamp = ds->ds_status2; 720 tsf = AR_READ(sc, AR_TSF_U32); 721 tsf = tsf << 32 | AR_READ(sc, AR_TSF_L32); 722 if ((tsf & 0x7fff) < tstamp) 723 tsf -= 0x8000; 724 tsf = (tsf & ~0x7fff) | tstamp; 725 726 tap->wr_flags = IEEE80211_RADIOTAP_F_FCS; 727 tap->wr_tsft = htole64(tsf); 728 tap->wr_chan_freq = htole16(ic->ic_bss->ni_chan->ic_freq); 729 tap->wr_chan_flags = htole16(ic->ic_bss->ni_chan->ic_flags); 730 tap->wr_dbm_antsignal = MS(ds->ds_status4, AR_RXS4_RSSI_COMBINED); 731 /* XXX noise. */ 732 tap->wr_antenna = MS(ds->ds_status3, AR_RXS3_ANTENNA); 733 tap->wr_rate = 0; /* In case it can't be found below. */ 734 if (AR_SREV_5416_20_OR_LATER(sc)) 735 rate = MS(ds->ds_status0, AR_RXS0_RATE); 736 else 737 rate = MS(ds->ds_status3, AR_RXS3_RATE); 738 if (rate & 0x80) { /* HT. */ 739 /* Bit 7 set means HT MCS instead of rate. */ 740 tap->wr_rate = rate; 741 if (!(ds->ds_status3 & AR_RXS3_GI)) 742 tap->wr_flags |= IEEE80211_RADIOTAP_F_SHORTGI; 743 744 } else if (rate & 0x10) { /* CCK. */ 745 if (rate & 0x04) 746 tap->wr_flags |= IEEE80211_RADIOTAP_F_SHORTPRE; 747 switch (rate & ~0x14) { 748 case 0xb: tap->wr_rate = 2; break; 749 case 0xa: tap->wr_rate = 4; break; 750 case 0x9: tap->wr_rate = 11; break; 751 case 0x8: tap->wr_rate = 22; break; 752 } 753 } else { /* OFDM. */ 754 switch (rate) { 755 case 0xb: tap->wr_rate = 12; break; 756 case 0xf: tap->wr_rate = 18; break; 757 case 0xa: tap->wr_rate = 24; break; 758 case 0xe: tap->wr_rate = 36; break; 759 case 0x9: tap->wr_rate = 48; break; 760 case 0xd: tap->wr_rate = 72; break; 761 case 0x8: tap->wr_rate = 96; break; 762 case 0xc: tap->wr_rate = 108; break; 763 } 764 } 765 mb.m_data = (caddr_t)tap; 766 mb.m_len = sc->sc_rxtap_len; 767 mb.m_next = m; 768 mb.m_nextpkt = NULL; 769 mb.m_type = 0; 770 mb.m_flags = 0; 771 bpf_mtap(sc->sc_drvbpf, &mb, BPF_DIRECTION_IN); 772 } 773 #endif 774 775 static __inline int 776 ar5008_rx_process(struct athn_softc *sc) 777 { 778 struct ieee80211com *ic = &sc->sc_ic; 779 struct ifnet *ifp = &ic->ic_if; 780 struct athn_rxq *rxq = &sc->rxq[0]; 781 struct athn_rx_buf *bf, *nbf; 782 struct ar_rx_desc *ds; 783 struct ieee80211_frame *wh; 784 struct ieee80211_rxinfo rxi; 785 struct ieee80211_node *ni; 786 struct mbuf *m, *m1; 787 int error, len; 788 789 bf = SIMPLEQ_FIRST(&rxq->head); 790 if (__predict_false(bf == NULL)) { /* Should not happen. */ 791 printf("%s: Rx queue is empty!\n", sc->sc_dev.dv_xname); 792 return (ENOENT); 793 } 794 ds = bf->bf_desc; 795 796 if (!(ds->ds_status8 & AR_RXS8_DONE)) { 797 /* 798 * On some parts, the status words can get corrupted 799 * (including the "done" bit), so we check the next 800 * descriptor "done" bit. If it is set, it is a good 801 * indication that the status words are corrupted, so 802 * we skip this descriptor and drop the frame. 803 */ 804 nbf = SIMPLEQ_NEXT(bf, bf_list); 805 if (nbf != NULL && 806 (((struct ar_rx_desc *)nbf->bf_desc)->ds_status8 & 807 AR_RXS8_DONE)) { 808 DPRINTF(("corrupted descriptor status=0x%x\n", 809 ds->ds_status8)); 810 /* HW will not "move" RXDP in this case, so do it. */ 811 AR_WRITE(sc, AR_RXDP, nbf->bf_daddr); 812 AR_WRITE_BARRIER(sc); 813 ifp->if_ierrors++; 814 goto skip; 815 } 816 return (EBUSY); 817 } 818 819 if (__predict_false(ds->ds_status1 & AR_RXS1_MORE)) { 820 /* Drop frames that span multiple Rx descriptors. */ 821 DPRINTF(("dropping split frame\n")); 822 ifp->if_ierrors++; 823 goto skip; 824 } 825 if (!(ds->ds_status8 & AR_RXS8_FRAME_OK)) { 826 if (ds->ds_status8 & AR_RXS8_CRC_ERR) 827 DPRINTFN(6, ("CRC error\n")); 828 else if (ds->ds_status8 & AR_RXS8_PHY_ERR) 829 DPRINTFN(6, ("PHY error=0x%x\n", 830 MS(ds->ds_status8, AR_RXS8_PHY_ERR_CODE))); 831 else if (ds->ds_status8 & AR_RXS8_DECRYPT_CRC_ERR) 832 DPRINTFN(6, ("Decryption CRC error\n")); 833 else if (ds->ds_status8 & AR_RXS8_MICHAEL_ERR) { 834 DPRINTFN(2, ("Michael MIC failure\n")); 835 /* Report Michael MIC failures to net80211. */ 836 ic->ic_stats.is_rx_locmicfail++; 837 ieee80211_michael_mic_failure(ic, 0); 838 /* 839 * XXX Check that it is not a control frame 840 * (invalid MIC failures on valid ctl frames). 841 */ 842 } 843 ifp->if_ierrors++; 844 goto skip; 845 } 846 847 len = MS(ds->ds_status1, AR_RXS1_DATA_LEN); 848 if (__predict_false(len < IEEE80211_MIN_LEN || len > ATHN_RXBUFSZ)) { 849 DPRINTF(("corrupted descriptor length=%d\n", len)); 850 ifp->if_ierrors++; 851 goto skip; 852 } 853 854 /* Allocate a new Rx buffer. */ 855 m1 = MCLGETI(NULL, M_DONTWAIT, NULL, ATHN_RXBUFSZ); 856 if (__predict_false(m1 == NULL)) { 857 ic->ic_stats.is_rx_nombuf++; 858 ifp->if_ierrors++; 859 goto skip; 860 } 861 862 /* Sync and unmap the old Rx buffer. */ 863 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, ATHN_RXBUFSZ, 864 BUS_DMASYNC_POSTREAD); 865 bus_dmamap_unload(sc->sc_dmat, bf->bf_map); 866 867 /* Map the new Rx buffer. */ 868 error = bus_dmamap_load(sc->sc_dmat, bf->bf_map, mtod(m1, void *), 869 ATHN_RXBUFSZ, NULL, BUS_DMA_NOWAIT | BUS_DMA_READ); 870 if (__predict_false(error != 0)) { 871 m_freem(m1); 872 873 /* Remap the old Rx buffer or panic. */ 874 error = bus_dmamap_load(sc->sc_dmat, bf->bf_map, 875 mtod(bf->bf_m, void *), ATHN_RXBUFSZ, NULL, 876 BUS_DMA_NOWAIT | BUS_DMA_READ); 877 KASSERT(error != 0); 878 ifp->if_ierrors++; 879 goto skip; 880 } 881 882 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, ATHN_RXBUFSZ, 883 BUS_DMASYNC_PREREAD); 884 885 /* Write physical address of new Rx buffer. */ 886 ds->ds_data = bf->bf_map->dm_segs[0].ds_addr; 887 888 m = bf->bf_m; 889 bf->bf_m = m1; 890 891 /* Finalize mbuf. */ 892 m->m_pkthdr.len = m->m_len = len; 893 894 /* Grab a reference to the source node. */ 895 wh = mtod(m, struct ieee80211_frame *); 896 ni = ieee80211_find_rxnode(ic, wh); 897 898 /* Remove any HW padding after the 802.11 header. */ 899 if (!(wh->i_fc[0] & IEEE80211_FC0_TYPE_CTL)) { 900 u_int hdrlen = ieee80211_get_hdrlen(wh); 901 if (hdrlen & 3) { 902 memmove((caddr_t)wh + 2, wh, hdrlen); 903 m_adj(m, 2); 904 } 905 } 906 #if NBPFILTER > 0 907 if (__predict_false(sc->sc_drvbpf != NULL)) 908 ar5008_rx_radiotap(sc, m, ds); 909 #endif 910 /* Trim 802.11 FCS after radiotap. */ 911 m_adj(m, -IEEE80211_CRC_LEN); 912 913 /* Send the frame to the 802.11 layer. */ 914 rxi.rxi_flags = 0; /* XXX */ 915 rxi.rxi_rssi = MS(ds->ds_status4, AR_RXS4_RSSI_COMBINED); 916 rxi.rxi_tstamp = ds->ds_status2; 917 ieee80211_input(ifp, m, ni, &rxi); 918 919 /* Node is no longer needed. */ 920 ieee80211_release_node(ic, ni); 921 922 skip: 923 /* Unlink this descriptor from head. */ 924 SIMPLEQ_REMOVE_HEAD(&rxq->head, bf_list); 925 memset(&ds->ds_status0, 0, 36); /* XXX Really needed? */ 926 ds->ds_status8 &= ~AR_RXS8_DONE; 927 ds->ds_link = 0; 928 929 /* Re-use this descriptor and link it to tail. */ 930 if (__predict_true(!SIMPLEQ_EMPTY(&rxq->head))) 931 ((struct ar_rx_desc *)rxq->lastds)->ds_link = bf->bf_daddr; 932 else 933 AR_WRITE(sc, AR_RXDP, bf->bf_daddr); 934 SIMPLEQ_INSERT_TAIL(&rxq->head, bf, bf_list); 935 rxq->lastds = ds; 936 937 /* Re-enable Rx. */ 938 AR_WRITE(sc, AR_CR, AR_CR_RXE); 939 AR_WRITE_BARRIER(sc); 940 return (0); 941 } 942 943 void 944 ar5008_rx_intr(struct athn_softc *sc) 945 { 946 while (ar5008_rx_process(sc) == 0); 947 } 948 949 int 950 ar5008_tx_process(struct athn_softc *sc, int qid) 951 { 952 struct ieee80211com *ic = &sc->sc_ic; 953 struct ifnet *ifp = &ic->ic_if; 954 struct athn_txq *txq = &sc->txq[qid]; 955 struct athn_node *an; 956 struct athn_tx_buf *bf; 957 struct ar_tx_desc *ds; 958 uint8_t failcnt; 959 960 bf = SIMPLEQ_FIRST(&txq->head); 961 if (bf == NULL) 962 return (ENOENT); 963 /* Get descriptor of last DMA segment. */ 964 ds = &((struct ar_tx_desc *)bf->bf_descs)[bf->bf_map->dm_nsegs - 1]; 965 966 if (!(ds->ds_status9 & AR_TXS9_DONE)) 967 return (EBUSY); 968 969 SIMPLEQ_REMOVE_HEAD(&txq->head, bf_list); 970 ifp->if_opackets++; 971 972 sc->sc_tx_timer = 0; 973 974 if (ds->ds_status1 & AR_TXS1_EXCESSIVE_RETRIES) 975 ifp->if_oerrors++; 976 977 if (ds->ds_status1 & AR_TXS1_UNDERRUN) 978 athn_inc_tx_trigger_level(sc); 979 980 an = (struct athn_node *)bf->bf_ni; 981 /* 982 * NB: the data fail count contains the number of un-acked tries 983 * for the final series used. We must add the number of tries for 984 * each series that was fully processed. 985 */ 986 failcnt = MS(ds->ds_status1, AR_TXS1_DATA_FAIL_CNT); 987 /* NB: Assume two tries per series. */ 988 failcnt += MS(ds->ds_status9, AR_TXS9_FINAL_IDX) * 2; 989 990 /* Update rate control statistics. */ 991 an->amn.amn_txcnt++; 992 if (failcnt > 0) 993 an->amn.amn_retrycnt++; 994 995 DPRINTFN(5, ("Tx done qid=%d status1=%d fail count=%d\n", 996 qid, ds->ds_status1, failcnt)); 997 998 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, bf->bf_map->dm_mapsize, 999 BUS_DMASYNC_POSTWRITE); 1000 bus_dmamap_unload(sc->sc_dmat, bf->bf_map); 1001 1002 m_freem(bf->bf_m); 1003 bf->bf_m = NULL; 1004 ieee80211_release_node(ic, bf->bf_ni); 1005 bf->bf_ni = NULL; 1006 1007 /* Link Tx buffer back to global free list. */ 1008 SIMPLEQ_INSERT_TAIL(&sc->txbufs, bf, bf_list); 1009 return (0); 1010 } 1011 1012 void 1013 ar5008_tx_intr(struct athn_softc *sc) 1014 { 1015 struct ieee80211com *ic = &sc->sc_ic; 1016 struct ifnet *ifp = &ic->ic_if; 1017 uint16_t mask = 0; 1018 uint32_t reg; 1019 int qid; 1020 1021 reg = AR_READ(sc, AR_ISR_S0_S); 1022 mask |= MS(reg, AR_ISR_S0_QCU_TXOK); 1023 mask |= MS(reg, AR_ISR_S0_QCU_TXDESC); 1024 1025 reg = AR_READ(sc, AR_ISR_S1_S); 1026 mask |= MS(reg, AR_ISR_S1_QCU_TXERR); 1027 mask |= MS(reg, AR_ISR_S1_QCU_TXEOL); 1028 1029 DPRINTFN(4, ("Tx interrupt mask=0x%x\n", mask)); 1030 for (qid = 0; mask != 0; mask >>= 1, qid++) { 1031 if (mask & 1) 1032 while (ar5008_tx_process(sc, qid) == 0); 1033 } 1034 if (!SIMPLEQ_EMPTY(&sc->txbufs)) { 1035 ifq_clr_oactive(&ifp->if_snd); 1036 ifp->if_start(ifp); 1037 } 1038 } 1039 1040 #ifndef IEEE80211_STA_ONLY 1041 /* 1042 * Process Software Beacon Alert interrupts. 1043 */ 1044 int 1045 ar5008_swba_intr(struct athn_softc *sc) 1046 { 1047 struct ieee80211com *ic = &sc->sc_ic; 1048 struct ifnet *ifp = &ic->ic_if; 1049 struct ieee80211_node *ni = ic->ic_bss; 1050 struct athn_tx_buf *bf = sc->bcnbuf; 1051 struct ieee80211_frame *wh; 1052 struct ar_tx_desc *ds; 1053 struct mbuf *m; 1054 uint8_t ridx, hwrate; 1055 int error, totlen; 1056 1057 if (ic->ic_tim_mcast_pending && 1058 mq_empty(&ni->ni_savedq) && 1059 SIMPLEQ_EMPTY(&sc->txq[ATHN_QID_CAB].head)) 1060 ic->ic_tim_mcast_pending = 0; 1061 1062 if (ic->ic_dtim_count == 0) 1063 ic->ic_dtim_count = ic->ic_dtim_period - 1; 1064 else 1065 ic->ic_dtim_count--; 1066 1067 /* Make sure previous beacon has been sent. */ 1068 if (athn_tx_pending(sc, ATHN_QID_BEACON)) { 1069 DPRINTF(("beacon stuck\n")); 1070 return (EBUSY); 1071 } 1072 /* Get new beacon. */ 1073 m = ieee80211_beacon_alloc(ic, ic->ic_bss); 1074 if (__predict_false(m == NULL)) 1075 return (ENOBUFS); 1076 /* Assign sequence number. */ 1077 wh = mtod(m, struct ieee80211_frame *); 1078 *(uint16_t *)&wh->i_seq[0] = 1079 htole16(ic->ic_bss->ni_txseq << IEEE80211_SEQ_SEQ_SHIFT); 1080 ic->ic_bss->ni_txseq++; 1081 1082 /* Unmap and free old beacon if any. */ 1083 if (__predict_true(bf->bf_m != NULL)) { 1084 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, 1085 bf->bf_map->dm_mapsize, BUS_DMASYNC_POSTWRITE); 1086 bus_dmamap_unload(sc->sc_dmat, bf->bf_map); 1087 m_freem(bf->bf_m); 1088 bf->bf_m = NULL; 1089 } 1090 /* DMA map new beacon. */ 1091 error = bus_dmamap_load_mbuf(sc->sc_dmat, bf->bf_map, m, 1092 BUS_DMA_NOWAIT | BUS_DMA_WRITE); 1093 if (__predict_false(error != 0)) { 1094 m_freem(m); 1095 return (error); 1096 } 1097 bf->bf_m = m; 1098 1099 /* Setup Tx descriptor (simplified ar5008_tx()). */ 1100 ds = bf->bf_descs; 1101 memset(ds, 0, sizeof(*ds)); 1102 1103 totlen = m->m_pkthdr.len + IEEE80211_CRC_LEN; 1104 ds->ds_ctl0 = SM(AR_TXC0_FRAME_LEN, totlen); 1105 ds->ds_ctl0 |= SM(AR_TXC0_XMIT_POWER, AR_MAX_RATE_POWER); 1106 ds->ds_ctl1 = SM(AR_TXC1_FRAME_TYPE, AR_FRAME_TYPE_BEACON); 1107 ds->ds_ctl1 |= AR_TXC1_NO_ACK; 1108 ds->ds_ctl6 = SM(AR_TXC6_ENCR_TYPE, AR_ENCR_TYPE_CLEAR); 1109 1110 /* Write number of tries. */ 1111 ds->ds_ctl2 = SM(AR_TXC2_XMIT_DATA_TRIES0, 1); 1112 1113 /* Write Tx rate. */ 1114 ridx = (ic->ic_curmode == IEEE80211_MODE_11A) ? 1115 ATHN_RIDX_OFDM6 : ATHN_RIDX_CCK1; 1116 hwrate = athn_rates[ridx].hwrate; 1117 ds->ds_ctl3 = SM(AR_TXC3_XMIT_RATE0, hwrate); 1118 1119 /* Write Tx chains. */ 1120 ds->ds_ctl7 = SM(AR_TXC7_CHAIN_SEL0, sc->txchainmask); 1121 1122 ds->ds_data = bf->bf_map->dm_segs[0].ds_addr; 1123 /* Segment length must be a multiple of 4. */ 1124 ds->ds_ctl1 |= SM(AR_TXC1_BUF_LEN, 1125 (bf->bf_map->dm_segs[0].ds_len + 3) & ~3); 1126 1127 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, bf->bf_map->dm_mapsize, 1128 BUS_DMASYNC_PREWRITE); 1129 1130 /* Stop Tx DMA before putting the new beacon on the queue. */ 1131 athn_stop_tx_dma(sc, ATHN_QID_BEACON); 1132 1133 AR_WRITE(sc, AR_QTXDP(ATHN_QID_BEACON), bf->bf_daddr); 1134 1135 for(;;) { 1136 if (SIMPLEQ_EMPTY(&sc->txbufs)) 1137 break; 1138 1139 m = mq_dequeue(&ni->ni_savedq); 1140 if (m == NULL) 1141 break; 1142 if (!mq_empty(&ni->ni_savedq)) { 1143 /* more queued frames, set the more data bit */ 1144 wh = mtod(m, struct ieee80211_frame *); 1145 wh->i_fc[1] |= IEEE80211_FC1_MORE_DATA; 1146 } 1147 1148 if (sc->ops.tx(sc, m, ni, ATHN_TXFLAG_CAB) != 0) { 1149 ieee80211_release_node(ic, ni); 1150 ifp->if_oerrors++; 1151 break; 1152 } 1153 } 1154 1155 /* Kick Tx. */ 1156 AR_WRITE(sc, AR_Q_TXE, 1 << ATHN_QID_BEACON); 1157 AR_WRITE_BARRIER(sc); 1158 return (0); 1159 } 1160 #endif 1161 1162 int 1163 ar5008_intr(struct athn_softc *sc) 1164 { 1165 uint32_t intr, intr2, intr5, sync; 1166 1167 /* Get pending interrupts. */ 1168 intr = AR_READ(sc, AR_INTR_ASYNC_CAUSE); 1169 if (!(intr & AR_INTR_MAC_IRQ) || intr == AR_INTR_SPURIOUS) { 1170 intr = AR_READ(sc, AR_INTR_SYNC_CAUSE); 1171 if (intr == AR_INTR_SPURIOUS || (intr & sc->isync) == 0) 1172 return (0); /* Not for us. */ 1173 } 1174 1175 if ((AR_READ(sc, AR_INTR_ASYNC_CAUSE) & AR_INTR_MAC_IRQ) && 1176 (AR_READ(sc, AR_RTC_STATUS) & AR_RTC_STATUS_M) == AR_RTC_STATUS_ON) 1177 intr = AR_READ(sc, AR_ISR); 1178 else 1179 intr = 0; 1180 sync = AR_READ(sc, AR_INTR_SYNC_CAUSE) & sc->isync; 1181 if (intr == 0 && sync == 0) 1182 return (0); /* Not for us. */ 1183 1184 if (intr != 0) { 1185 if (intr & AR_ISR_BCNMISC) { 1186 intr2 = AR_READ(sc, AR_ISR_S2); 1187 if (intr2 & AR_ISR_S2_TIM) 1188 /* TBD */; 1189 if (intr2 & AR_ISR_S2_TSFOOR) 1190 /* TBD */; 1191 } 1192 intr = AR_READ(sc, AR_ISR_RAC); 1193 if (intr == AR_INTR_SPURIOUS) 1194 return (1); 1195 1196 #ifndef IEEE80211_STA_ONLY 1197 if (intr & AR_ISR_SWBA) 1198 ar5008_swba_intr(sc); 1199 #endif 1200 if (intr & (AR_ISR_RXMINTR | AR_ISR_RXINTM)) 1201 ar5008_rx_intr(sc); 1202 if (intr & (AR_ISR_RXOK | AR_ISR_RXERR | AR_ISR_RXORN)) 1203 ar5008_rx_intr(sc); 1204 1205 if (intr & (AR_ISR_TXOK | AR_ISR_TXDESC | 1206 AR_ISR_TXERR | AR_ISR_TXEOL)) 1207 ar5008_tx_intr(sc); 1208 1209 intr5 = AR_READ(sc, AR_ISR_S5_S); 1210 if (intr & AR_ISR_GENTMR) { 1211 if (intr5 & AR_ISR_GENTMR) { 1212 DPRINTF(("GENTMR trigger=%d thresh=%d\n", 1213 MS(intr5, AR_ISR_S5_GENTIMER_TRIG), 1214 MS(intr5, AR_ISR_S5_GENTIMER_THRESH))); 1215 } 1216 } 1217 1218 if (intr5 & AR_ISR_S5_TIM_TIMER) 1219 /* TBD */; 1220 } 1221 if (sync != 0) { 1222 if (sync & (AR_INTR_SYNC_HOST1_FATAL | 1223 AR_INTR_SYNC_HOST1_PERR)) 1224 /* TBD */; 1225 1226 if (sync & AR_INTR_SYNC_RADM_CPL_TIMEOUT) { 1227 AR_WRITE(sc, AR_RC, AR_RC_HOSTIF); 1228 AR_WRITE(sc, AR_RC, 0); 1229 } 1230 1231 if ((sc->flags & ATHN_FLAG_RFSILENT) && 1232 (sync & AR_INTR_SYNC_GPIO_PIN(sc->rfsilent_pin))) { 1233 struct ifnet *ifp = &sc->sc_ic.ic_if; 1234 1235 printf("%s: radio switch turned off\n", 1236 sc->sc_dev.dv_xname); 1237 /* Turn the interface down. */ 1238 ifp->if_flags &= ~IFF_UP; 1239 athn_stop(ifp, 1); 1240 return (1); 1241 } 1242 1243 AR_WRITE(sc, AR_INTR_SYNC_CAUSE, sync); 1244 (void)AR_READ(sc, AR_INTR_SYNC_CAUSE); 1245 } 1246 return (1); 1247 } 1248 1249 int 1250 ar5008_tx(struct athn_softc *sc, struct mbuf *m, struct ieee80211_node *ni, 1251 int txflags) 1252 { 1253 struct ieee80211com *ic = &sc->sc_ic; 1254 struct ieee80211_key *k = NULL; 1255 struct ieee80211_frame *wh; 1256 struct athn_series series[4]; 1257 struct ar_tx_desc *ds, *lastds; 1258 struct athn_txq *txq; 1259 struct athn_tx_buf *bf; 1260 struct athn_node *an = (void *)ni; 1261 struct mbuf *m1; 1262 uintptr_t entry; 1263 uint16_t qos; 1264 uint8_t txpower, type, encrtype, tid, ridx[4]; 1265 int i, error, totlen, hasqos, qid; 1266 1267 /* Grab a Tx buffer from our global free list. */ 1268 bf = SIMPLEQ_FIRST(&sc->txbufs); 1269 KASSERT(bf != NULL); 1270 1271 /* Map 802.11 frame type to hardware frame type. */ 1272 wh = mtod(m, struct ieee80211_frame *); 1273 if ((wh->i_fc[0] & IEEE80211_FC0_TYPE_MASK) == 1274 IEEE80211_FC0_TYPE_MGT) { 1275 /* NB: Beacons do not use ar5008_tx(). */ 1276 if ((wh->i_fc[0] & IEEE80211_FC0_SUBTYPE_MASK) == 1277 IEEE80211_FC0_SUBTYPE_PROBE_RESP) 1278 type = AR_FRAME_TYPE_PROBE_RESP; 1279 else if ((wh->i_fc[0] & IEEE80211_FC0_SUBTYPE_MASK) == 1280 IEEE80211_FC0_SUBTYPE_ATIM) 1281 type = AR_FRAME_TYPE_ATIM; 1282 else 1283 type = AR_FRAME_TYPE_NORMAL; 1284 } else if ((wh->i_fc[0] & 1285 (IEEE80211_FC0_TYPE_MASK | IEEE80211_FC0_SUBTYPE_MASK)) == 1286 (IEEE80211_FC0_TYPE_CTL | IEEE80211_FC0_SUBTYPE_PS_POLL)) { 1287 type = AR_FRAME_TYPE_PSPOLL; 1288 } else 1289 type = AR_FRAME_TYPE_NORMAL; 1290 1291 if (wh->i_fc[1] & IEEE80211_FC1_PROTECTED) { 1292 k = ieee80211_get_txkey(ic, wh, ni); 1293 if ((m = ieee80211_encrypt(ic, m, k)) == NULL) 1294 return (ENOBUFS); 1295 wh = mtod(m, struct ieee80211_frame *); 1296 } 1297 1298 /* XXX 2-byte padding for QoS and 4-addr headers. */ 1299 1300 /* Select the HW Tx queue to use for this frame. */ 1301 if ((hasqos = ieee80211_has_qos(wh))) { 1302 qos = ieee80211_get_qos(wh); 1303 tid = qos & IEEE80211_QOS_TID; 1304 qid = athn_ac2qid[ieee80211_up_to_ac(ic, tid)]; 1305 } else if (type == AR_FRAME_TYPE_PSPOLL) { 1306 qid = ATHN_QID_PSPOLL; 1307 } else if (txflags & ATHN_TXFLAG_CAB) { 1308 qid = ATHN_QID_CAB; 1309 } else 1310 qid = ATHN_QID_AC_BE; 1311 txq = &sc->txq[qid]; 1312 1313 /* Select the transmit rates to use for this frame. */ 1314 if (IEEE80211_IS_MULTICAST(wh->i_addr1) || 1315 (wh->i_fc[0] & IEEE80211_FC0_TYPE_MASK) != 1316 IEEE80211_FC0_TYPE_DATA) { 1317 /* Use lowest rate for all tries. */ 1318 ridx[0] = ridx[1] = ridx[2] = ridx[3] = 1319 (ic->ic_curmode == IEEE80211_MODE_11A) ? 1320 ATHN_RIDX_OFDM6 : ATHN_RIDX_CCK1; 1321 } else if (ic->ic_fixed_rate != -1) { 1322 /* Use same fixed rate for all tries. */ 1323 ridx[0] = ridx[1] = ridx[2] = ridx[3] = 1324 sc->fixed_ridx; 1325 } else { 1326 int txrate = ni->ni_txrate; 1327 /* Use fallback table of the node. */ 1328 for (i = 0; i < 4; i++) { 1329 ridx[i] = an->ridx[txrate]; 1330 txrate = an->fallback[txrate]; 1331 } 1332 } 1333 1334 #if NBPFILTER > 0 1335 if (__predict_false(sc->sc_drvbpf != NULL)) { 1336 struct athn_tx_radiotap_header *tap = &sc->sc_txtap; 1337 struct mbuf mb; 1338 1339 tap->wt_flags = 0; 1340 /* Use initial transmit rate. */ 1341 tap->wt_rate = athn_rates[ridx[0]].rate; 1342 tap->wt_chan_freq = htole16(ic->ic_bss->ni_chan->ic_freq); 1343 tap->wt_chan_flags = htole16(ic->ic_bss->ni_chan->ic_flags); 1344 tap->wt_hwqueue = qid; 1345 if (ridx[0] != ATHN_RIDX_CCK1 && 1346 (ic->ic_flags & IEEE80211_F_SHPREAMBLE)) 1347 tap->wt_flags |= IEEE80211_RADIOTAP_F_SHORTPRE; 1348 mb.m_data = (caddr_t)tap; 1349 mb.m_len = sc->sc_txtap_len; 1350 mb.m_next = m; 1351 mb.m_nextpkt = NULL; 1352 mb.m_type = 0; 1353 mb.m_flags = 0; 1354 bpf_mtap(sc->sc_drvbpf, &mb, BPF_DIRECTION_OUT); 1355 } 1356 #endif 1357 1358 /* DMA map mbuf. */ 1359 error = bus_dmamap_load_mbuf(sc->sc_dmat, bf->bf_map, m, 1360 BUS_DMA_NOWAIT | BUS_DMA_WRITE); 1361 if (__predict_false(error != 0)) { 1362 if (error != EFBIG) { 1363 printf("%s: can't map mbuf (error %d)\n", 1364 sc->sc_dev.dv_xname, error); 1365 m_freem(m); 1366 return (error); 1367 } 1368 /* 1369 * DMA mapping requires too many DMA segments; linearize 1370 * mbuf in kernel virtual address space and retry. 1371 */ 1372 MGETHDR(m1, M_DONTWAIT, MT_DATA); 1373 if (m1 == NULL) { 1374 m_freem(m); 1375 return (ENOBUFS); 1376 } 1377 if (m->m_pkthdr.len > MHLEN) { 1378 MCLGET(m1, M_DONTWAIT); 1379 if (!(m1->m_flags & M_EXT)) { 1380 m_freem(m); 1381 m_freem(m1); 1382 return (ENOBUFS); 1383 } 1384 } 1385 m_copydata(m, 0, m->m_pkthdr.len, mtod(m1, caddr_t)); 1386 m1->m_pkthdr.len = m1->m_len = m->m_pkthdr.len; 1387 m_freem(m); 1388 m = m1; 1389 1390 error = bus_dmamap_load_mbuf(sc->sc_dmat, bf->bf_map, m, 1391 BUS_DMA_NOWAIT | BUS_DMA_WRITE); 1392 if (error != 0) { 1393 printf("%s: can't map mbuf (error %d)\n", 1394 sc->sc_dev.dv_xname, error); 1395 m_freem(m); 1396 return (error); 1397 } 1398 } 1399 bf->bf_m = m; 1400 bf->bf_ni = ni; 1401 bf->bf_txflags = txflags; 1402 1403 wh = mtod(m, struct ieee80211_frame *); 1404 1405 totlen = m->m_pkthdr.len + IEEE80211_CRC_LEN; 1406 1407 /* Clear all Tx descriptors that we will use. */ 1408 memset(bf->bf_descs, 0, bf->bf_map->dm_nsegs * sizeof(*ds)); 1409 1410 /* Setup first Tx descriptor. */ 1411 ds = bf->bf_descs; 1412 1413 ds->ds_ctl0 = AR_TXC0_INTR_REQ | AR_TXC0_CLR_DEST_MASK; 1414 txpower = AR_MAX_RATE_POWER; /* Get from per-rate registers. */ 1415 ds->ds_ctl0 |= SM(AR_TXC0_XMIT_POWER, txpower); 1416 1417 ds->ds_ctl1 = SM(AR_TXC1_FRAME_TYPE, type); 1418 1419 if (IEEE80211_IS_MULTICAST(wh->i_addr1) || 1420 (hasqos && (qos & IEEE80211_QOS_ACK_POLICY_MASK) == 1421 IEEE80211_QOS_ACK_POLICY_NOACK)) 1422 ds->ds_ctl1 |= AR_TXC1_NO_ACK; 1423 1424 if (0 && k != NULL) { 1425 /* 1426 * Map 802.11 cipher to hardware encryption type and 1427 * compute MIC+ICV overhead. 1428 */ 1429 switch (k->k_cipher) { 1430 case IEEE80211_CIPHER_WEP40: 1431 case IEEE80211_CIPHER_WEP104: 1432 encrtype = AR_ENCR_TYPE_WEP; 1433 totlen += 4; 1434 break; 1435 case IEEE80211_CIPHER_TKIP: 1436 encrtype = AR_ENCR_TYPE_TKIP; 1437 totlen += 12; 1438 break; 1439 case IEEE80211_CIPHER_CCMP: 1440 encrtype = AR_ENCR_TYPE_AES; 1441 totlen += 8; 1442 break; 1443 default: 1444 panic("unsupported cipher"); 1445 } 1446 /* 1447 * NB: The key cache entry index is stored in the key 1448 * private field when the key is installed. 1449 */ 1450 entry = (uintptr_t)k->k_priv; 1451 ds->ds_ctl1 |= SM(AR_TXC1_DEST_IDX, entry); 1452 ds->ds_ctl0 |= AR_TXC0_DEST_IDX_VALID; 1453 } else 1454 encrtype = AR_ENCR_TYPE_CLEAR; 1455 ds->ds_ctl6 = SM(AR_TXC6_ENCR_TYPE, encrtype); 1456 1457 /* Check if frame must be protected using RTS/CTS or CTS-to-self. */ 1458 if (!IEEE80211_IS_MULTICAST(wh->i_addr1)) { 1459 /* NB: Group frames are sent using CCK in 802.11b/g. */ 1460 if (totlen > ic->ic_rtsthreshold) { 1461 ds->ds_ctl0 |= AR_TXC0_RTS_ENABLE; 1462 } else if ((ic->ic_flags & IEEE80211_F_USEPROT) && 1463 athn_rates[ridx[0]].phy == IEEE80211_T_OFDM) { 1464 if (ic->ic_protmode == IEEE80211_PROT_RTSCTS) 1465 ds->ds_ctl0 |= AR_TXC0_RTS_ENABLE; 1466 else if (ic->ic_protmode == IEEE80211_PROT_CTSONLY) 1467 ds->ds_ctl0 |= AR_TXC0_CTS_ENABLE; 1468 } 1469 } 1470 if (ds->ds_ctl0 & (AR_TXC0_RTS_ENABLE | AR_TXC0_CTS_ENABLE)) { 1471 /* Disable multi-rate retries when protection is used. */ 1472 ridx[1] = ridx[2] = ridx[3] = ridx[0]; 1473 } 1474 /* Setup multi-rate retries. */ 1475 for (i = 0; i < 4; i++) { 1476 series[i].hwrate = athn_rates[ridx[i]].hwrate; 1477 if (athn_rates[ridx[i]].phy == IEEE80211_T_DS && 1478 ridx[i] != ATHN_RIDX_CCK1 && 1479 (ic->ic_flags & IEEE80211_F_SHPREAMBLE)) 1480 series[i].hwrate |= 0x04; 1481 series[i].dur = 0; 1482 } 1483 if (!(ds->ds_ctl1 & AR_TXC1_NO_ACK)) { 1484 /* Compute duration for each series. */ 1485 for (i = 0; i < 4; i++) { 1486 series[i].dur = athn_txtime(sc, IEEE80211_ACK_LEN, 1487 athn_rates[ridx[i]].rspridx, ic->ic_flags); 1488 } 1489 } 1490 1491 /* Write number of tries for each series. */ 1492 ds->ds_ctl2 = 1493 SM(AR_TXC2_XMIT_DATA_TRIES0, 2) | 1494 SM(AR_TXC2_XMIT_DATA_TRIES1, 2) | 1495 SM(AR_TXC2_XMIT_DATA_TRIES2, 2) | 1496 SM(AR_TXC2_XMIT_DATA_TRIES3, 4); 1497 1498 /* Tell HW to update duration field in 802.11 header. */ 1499 if (type != AR_FRAME_TYPE_PSPOLL) 1500 ds->ds_ctl2 |= AR_TXC2_DUR_UPDATE_ENA; 1501 1502 /* Write Tx rate for each series. */ 1503 ds->ds_ctl3 = 1504 SM(AR_TXC3_XMIT_RATE0, series[0].hwrate) | 1505 SM(AR_TXC3_XMIT_RATE1, series[1].hwrate) | 1506 SM(AR_TXC3_XMIT_RATE2, series[2].hwrate) | 1507 SM(AR_TXC3_XMIT_RATE3, series[3].hwrate); 1508 1509 /* Write duration for each series. */ 1510 ds->ds_ctl4 = 1511 SM(AR_TXC4_PACKET_DUR0, series[0].dur) | 1512 SM(AR_TXC4_PACKET_DUR1, series[1].dur); 1513 ds->ds_ctl5 = 1514 SM(AR_TXC5_PACKET_DUR2, series[2].dur) | 1515 SM(AR_TXC5_PACKET_DUR3, series[3].dur); 1516 1517 /* Use the same Tx chains for all tries. */ 1518 ds->ds_ctl7 = 1519 SM(AR_TXC7_CHAIN_SEL0, sc->txchainmask) | 1520 SM(AR_TXC7_CHAIN_SEL1, sc->txchainmask) | 1521 SM(AR_TXC7_CHAIN_SEL2, sc->txchainmask) | 1522 SM(AR_TXC7_CHAIN_SEL3, sc->txchainmask); 1523 #ifdef notyet 1524 /* Use the same short GI setting for all tries. */ 1525 if (ic->ic_flags & IEEE80211_F_SHGI) 1526 ds->ds_ctl7 |= AR_TXC7_GI0123; 1527 /* Use the same channel width for all tries. */ 1528 if (ic->ic_flags & IEEE80211_F_CBW40) 1529 ds->ds_ctl7 |= AR_TXC7_2040_0123; 1530 #endif 1531 1532 if (ds->ds_ctl0 & (AR_TXC0_RTS_ENABLE | AR_TXC0_CTS_ENABLE)) { 1533 uint8_t protridx, hwrate; 1534 uint16_t dur = 0; 1535 1536 /* Use the same protection mode for all tries. */ 1537 if (ds->ds_ctl0 & AR_TXC0_RTS_ENABLE) { 1538 ds->ds_ctl4 |= AR_TXC4_RTSCTS_QUAL01; 1539 ds->ds_ctl5 |= AR_TXC5_RTSCTS_QUAL23; 1540 } 1541 /* Select protection rate (suboptimal but ok). */ 1542 protridx = (ic->ic_curmode == IEEE80211_MODE_11A) ? 1543 ATHN_RIDX_OFDM6 : ATHN_RIDX_CCK2; 1544 if (ds->ds_ctl0 & AR_TXC0_RTS_ENABLE) { 1545 /* Account for CTS duration. */ 1546 dur += athn_txtime(sc, IEEE80211_ACK_LEN, 1547 athn_rates[protridx].rspridx, ic->ic_flags); 1548 } 1549 dur += athn_txtime(sc, totlen, ridx[0], ic->ic_flags); 1550 if (!(ds->ds_ctl1 & AR_TXC1_NO_ACK)) { 1551 /* Account for ACK duration. */ 1552 dur += athn_txtime(sc, IEEE80211_ACK_LEN, 1553 athn_rates[ridx[0]].rspridx, ic->ic_flags); 1554 } 1555 /* Write protection frame duration and rate. */ 1556 ds->ds_ctl2 |= SM(AR_TXC2_BURST_DUR, dur); 1557 hwrate = athn_rates[protridx].hwrate; 1558 if (protridx == ATHN_RIDX_CCK2 && 1559 (ic->ic_flags & IEEE80211_F_SHPREAMBLE)) 1560 hwrate |= 0x04; 1561 ds->ds_ctl7 |= SM(AR_TXC7_RTSCTS_RATE, hwrate); 1562 } 1563 1564 /* Finalize first Tx descriptor and fill others (if any). */ 1565 ds->ds_ctl0 |= SM(AR_TXC0_FRAME_LEN, totlen); 1566 1567 for (i = 0; i < bf->bf_map->dm_nsegs; i++, ds++) { 1568 ds->ds_data = bf->bf_map->dm_segs[i].ds_addr; 1569 ds->ds_ctl1 |= SM(AR_TXC1_BUF_LEN, 1570 bf->bf_map->dm_segs[i].ds_len); 1571 1572 if (i != bf->bf_map->dm_nsegs - 1) 1573 ds->ds_ctl1 |= AR_TXC1_MORE; 1574 ds->ds_link = 0; 1575 1576 /* Chain Tx descriptor. */ 1577 if (i != 0) 1578 lastds->ds_link = bf->bf_daddr + i * sizeof(*ds); 1579 lastds = ds; 1580 } 1581 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, bf->bf_map->dm_mapsize, 1582 BUS_DMASYNC_PREWRITE); 1583 1584 if (!SIMPLEQ_EMPTY(&txq->head)) 1585 ((struct ar_tx_desc *)txq->lastds)->ds_link = bf->bf_daddr; 1586 else 1587 AR_WRITE(sc, AR_QTXDP(qid), bf->bf_daddr); 1588 txq->lastds = lastds; 1589 SIMPLEQ_REMOVE_HEAD(&sc->txbufs, bf_list); 1590 SIMPLEQ_INSERT_TAIL(&txq->head, bf, bf_list); 1591 1592 ds = bf->bf_descs; 1593 DPRINTFN(6, ("Tx qid=%d nsegs=%d ctl0=0x%x ctl1=0x%x ctl3=0x%x\n", 1594 qid, bf->bf_map->dm_nsegs, ds->ds_ctl0, ds->ds_ctl1, ds->ds_ctl3)); 1595 1596 /* Kick Tx. */ 1597 AR_WRITE(sc, AR_Q_TXE, 1 << qid); 1598 AR_WRITE_BARRIER(sc); 1599 return (0); 1600 } 1601 1602 void 1603 ar5008_set_rf_mode(struct athn_softc *sc, struct ieee80211_channel *c) 1604 { 1605 uint32_t reg; 1606 1607 reg = IEEE80211_IS_CHAN_2GHZ(c) ? 1608 AR_PHY_MODE_DYNAMIC : AR_PHY_MODE_OFDM; 1609 if (!AR_SREV_9280_10_OR_LATER(sc)) { 1610 reg |= IEEE80211_IS_CHAN_2GHZ(c) ? 1611 AR_PHY_MODE_RF2GHZ : AR_PHY_MODE_RF5GHZ; 1612 } else if (IEEE80211_IS_CHAN_5GHZ(c) && 1613 (sc->flags & ATHN_FLAG_FAST_PLL_CLOCK)) { 1614 reg |= AR_PHY_MODE_DYNAMIC | AR_PHY_MODE_DYN_CCK_DISABLE; 1615 } 1616 AR_WRITE(sc, AR_PHY_MODE, reg); 1617 AR_WRITE_BARRIER(sc); 1618 } 1619 1620 static __inline uint32_t 1621 ar5008_synth_delay(struct athn_softc *sc) 1622 { 1623 uint32_t delay; 1624 1625 delay = MS(AR_READ(sc, AR_PHY_RX_DELAY), AR_PHY_RX_DELAY_DELAY); 1626 if (sc->sc_ic.ic_curmode == IEEE80211_MODE_11B) 1627 delay = (delay * 4) / 22; 1628 else 1629 delay = delay / 10; /* in 100ns steps */ 1630 return (delay); 1631 } 1632 1633 int 1634 ar5008_rf_bus_request(struct athn_softc *sc) 1635 { 1636 int ntries; 1637 1638 /* Request RF Bus grant. */ 1639 AR_WRITE(sc, AR_PHY_RFBUS_REQ, AR_PHY_RFBUS_REQ_EN); 1640 for (ntries = 0; ntries < 10000; ntries++) { 1641 if (AR_READ(sc, AR_PHY_RFBUS_GRANT) & AR_PHY_RFBUS_GRANT_EN) 1642 return (0); 1643 DELAY(10); 1644 } 1645 DPRINTF(("could not kill baseband Rx")); 1646 return (ETIMEDOUT); 1647 } 1648 1649 void 1650 ar5008_rf_bus_release(struct athn_softc *sc) 1651 { 1652 /* Wait for the synthesizer to settle. */ 1653 DELAY(AR_BASE_PHY_ACTIVE_DELAY + ar5008_synth_delay(sc)); 1654 1655 /* Release the RF Bus grant. */ 1656 AR_WRITE(sc, AR_PHY_RFBUS_REQ, 0); 1657 AR_WRITE_BARRIER(sc); 1658 } 1659 1660 void 1661 ar5008_set_phy(struct athn_softc *sc, struct ieee80211_channel *c, 1662 struct ieee80211_channel *extc) 1663 { 1664 uint32_t phy; 1665 1666 if (AR_SREV_9285_10_OR_LATER(sc)) 1667 phy = AR_READ(sc, AR_PHY_TURBO) & AR_PHY_FC_ENABLE_DAC_FIFO; 1668 else 1669 phy = 0; 1670 phy |= AR_PHY_FC_HT_EN | AR_PHY_FC_SHORT_GI_40 | 1671 AR_PHY_FC_SINGLE_HT_LTF1 | AR_PHY_FC_WALSH; 1672 if (extc != NULL) { 1673 phy |= AR_PHY_FC_DYN2040_EN; 1674 if (extc > c) /* XXX */ 1675 phy |= AR_PHY_FC_DYN2040_PRI_CH; 1676 } 1677 AR_WRITE(sc, AR_PHY_TURBO, phy); 1678 1679 AR_WRITE(sc, AR_2040_MODE, 1680 (extc != NULL) ? AR_2040_JOINED_RX_CLEAR : 0); 1681 1682 /* Set global transmit timeout. */ 1683 AR_WRITE(sc, AR_GTXTO, SM(AR_GTXTO_TIMEOUT_LIMIT, 25)); 1684 /* Set carrier sense timeout. */ 1685 AR_WRITE(sc, AR_CST, SM(AR_CST_TIMEOUT_LIMIT, 15)); 1686 AR_WRITE_BARRIER(sc); 1687 } 1688 1689 void 1690 ar5008_set_delta_slope(struct athn_softc *sc, struct ieee80211_channel *c, 1691 struct ieee80211_channel *extc) 1692 { 1693 uint32_t coeff, exp, man, reg; 1694 1695 /* Set Delta Slope (exponent and mantissa). */ 1696 coeff = (100 << 24) / c->ic_freq; 1697 athn_get_delta_slope(coeff, &exp, &man); 1698 DPRINTFN(5, ("delta slope coeff exp=%u man=%u\n", exp, man)); 1699 1700 reg = AR_READ(sc, AR_PHY_TIMING3); 1701 reg = RW(reg, AR_PHY_TIMING3_DSC_EXP, exp); 1702 reg = RW(reg, AR_PHY_TIMING3_DSC_MAN, man); 1703 AR_WRITE(sc, AR_PHY_TIMING3, reg); 1704 1705 /* For Short GI, coeff is 9/10 that of normal coeff. */ 1706 coeff = (9 * coeff) / 10; 1707 athn_get_delta_slope(coeff, &exp, &man); 1708 DPRINTFN(5, ("delta slope coeff exp=%u man=%u\n", exp, man)); 1709 1710 reg = AR_READ(sc, AR_PHY_HALFGI); 1711 reg = RW(reg, AR_PHY_HALFGI_DSC_EXP, exp); 1712 reg = RW(reg, AR_PHY_HALFGI_DSC_MAN, man); 1713 AR_WRITE(sc, AR_PHY_HALFGI, reg); 1714 AR_WRITE_BARRIER(sc); 1715 } 1716 1717 void 1718 ar5008_enable_antenna_diversity(struct athn_softc *sc) 1719 { 1720 AR_SETBITS(sc, AR_PHY_CCK_DETECT, 1721 AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV); 1722 AR_WRITE_BARRIER(sc); 1723 } 1724 1725 void 1726 ar5008_init_baseband(struct athn_softc *sc) 1727 { 1728 uint32_t synth_delay; 1729 1730 synth_delay = ar5008_synth_delay(sc); 1731 /* Activate the PHY (includes baseband activate and synthesizer on). */ 1732 AR_WRITE(sc, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN); 1733 AR_WRITE_BARRIER(sc); 1734 DELAY(AR_BASE_PHY_ACTIVE_DELAY + synth_delay); 1735 } 1736 1737 void 1738 ar5008_disable_phy(struct athn_softc *sc) 1739 { 1740 AR_WRITE(sc, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS); 1741 AR_WRITE_BARRIER(sc); 1742 } 1743 1744 void 1745 ar5008_init_chains(struct athn_softc *sc) 1746 { 1747 if (sc->rxchainmask == 0x5 || sc->txchainmask == 0x5) 1748 AR_SETBITS(sc, AR_PHY_ANALOG_SWAP, AR_PHY_SWAP_ALT_CHAIN); 1749 1750 /* Setup chain masks. */ 1751 if (sc->mac_ver <= AR_SREV_VERSION_9160 && 1752 (sc->rxchainmask == 0x3 || sc->rxchainmask == 0x5)) { 1753 AR_WRITE(sc, AR_PHY_RX_CHAINMASK, 0x7); 1754 AR_WRITE(sc, AR_PHY_CAL_CHAINMASK, 0x7); 1755 } else { 1756 AR_WRITE(sc, AR_PHY_RX_CHAINMASK, sc->rxchainmask); 1757 AR_WRITE(sc, AR_PHY_CAL_CHAINMASK, sc->rxchainmask); 1758 } 1759 AR_WRITE(sc, AR_SELFGEN_MASK, sc->txchainmask); 1760 AR_WRITE_BARRIER(sc); 1761 } 1762 1763 void 1764 ar5008_set_rxchains(struct athn_softc *sc) 1765 { 1766 if (sc->rxchainmask == 0x3 || sc->rxchainmask == 0x5) { 1767 AR_WRITE(sc, AR_PHY_RX_CHAINMASK, sc->rxchainmask); 1768 AR_WRITE(sc, AR_PHY_CAL_CHAINMASK, sc->rxchainmask); 1769 AR_WRITE_BARRIER(sc); 1770 } 1771 } 1772 1773 void 1774 ar5008_read_noisefloor(struct athn_softc *sc, int16_t *nf, int16_t *nf_ext) 1775 { 1776 /* Sign-extends 9-bit value (assumes upper bits are zeroes). */ 1777 #define SIGN_EXT(v) (((v) ^ 0x100) - 0x100) 1778 uint32_t reg; 1779 int i; 1780 1781 for (i = 0; i < sc->nrxchains; i++) { 1782 reg = AR_READ(sc, AR_PHY_CCA(i)); 1783 if (AR_SREV_9280_10_OR_LATER(sc)) 1784 nf[i] = MS(reg, AR9280_PHY_MINCCA_PWR); 1785 else 1786 nf[i] = MS(reg, AR_PHY_MINCCA_PWR); 1787 nf[i] = SIGN_EXT(nf[i]); 1788 1789 reg = AR_READ(sc, AR_PHY_EXT_CCA(i)); 1790 if (AR_SREV_9280_10_OR_LATER(sc)) 1791 nf_ext[i] = MS(reg, AR9280_PHY_EXT_MINCCA_PWR); 1792 else 1793 nf_ext[i] = MS(reg, AR_PHY_EXT_MINCCA_PWR); 1794 nf_ext[i] = SIGN_EXT(nf_ext[i]); 1795 } 1796 #undef SIGN_EXT 1797 } 1798 1799 void 1800 ar5008_write_noisefloor(struct athn_softc *sc, int16_t *nf, int16_t *nf_ext) 1801 { 1802 uint32_t reg; 1803 int i; 1804 1805 for (i = 0; i < sc->nrxchains; i++) { 1806 reg = AR_READ(sc, AR_PHY_CCA(i)); 1807 reg = RW(reg, AR_PHY_MAXCCA_PWR, nf[i]); 1808 AR_WRITE(sc, AR_PHY_CCA(i), reg); 1809 1810 reg = AR_READ(sc, AR_PHY_EXT_CCA(i)); 1811 reg = RW(reg, AR_PHY_EXT_MAXCCA_PWR, nf_ext[i]); 1812 AR_WRITE(sc, AR_PHY_EXT_CCA(i), reg); 1813 } 1814 AR_WRITE_BARRIER(sc); 1815 } 1816 1817 void 1818 ar5008_get_noisefloor(struct athn_softc *sc, struct ieee80211_channel *c) 1819 { 1820 int16_t nf[AR_MAX_CHAINS], nf_ext[AR_MAX_CHAINS]; 1821 int i; 1822 1823 if (AR_READ(sc, AR_PHY_AGC_CONTROL) & AR_PHY_AGC_CONTROL_NF) { 1824 /* Noisefloor calibration not finished. */ 1825 return; 1826 } 1827 /* Noisefloor calibration is finished. */ 1828 ar5008_read_noisefloor(sc, nf, nf_ext); 1829 1830 /* Update noisefloor history. */ 1831 for (i = 0; i < sc->nrxchains; i++) { 1832 sc->nf_hist[sc->nf_hist_cur].nf[i] = nf[i]; 1833 sc->nf_hist[sc->nf_hist_cur].nf_ext[i] = nf_ext[i]; 1834 } 1835 if (++sc->nf_hist_cur >= ATHN_NF_CAL_HIST_MAX) 1836 sc->nf_hist_cur = 0; 1837 } 1838 1839 void 1840 ar5008_bb_load_noisefloor(struct athn_softc *sc) 1841 { 1842 int16_t nf[AR_MAX_CHAINS], nf_ext[AR_MAX_CHAINS]; 1843 int i, ntries; 1844 1845 /* Write filtered noisefloor values. */ 1846 for (i = 0; i < sc->nrxchains; i++) { 1847 nf[i] = sc->nf_priv[i] * 2; 1848 nf_ext[i] = sc->nf_ext_priv[i] * 2; 1849 } 1850 ar5008_write_noisefloor(sc, nf, nf_ext); 1851 1852 /* Load filtered noisefloor values into baseband. */ 1853 AR_CLRBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_ENABLE_NF); 1854 AR_CLRBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NO_UPDATE_NF); 1855 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NF); 1856 /* Wait for load to complete. */ 1857 for (ntries = 0; ntries < 1000; ntries++) { 1858 if (!(AR_READ(sc, AR_PHY_AGC_CONTROL) & AR_PHY_AGC_CONTROL_NF)) 1859 break; 1860 DELAY(50); 1861 } 1862 if (ntries == 1000) { 1863 DPRINTF(("failed to load noisefloor values\n")); 1864 return; 1865 } 1866 1867 /* Restore noisefloor values to initial (max) values. */ 1868 for (i = 0; i < AR_MAX_CHAINS; i++) 1869 nf[i] = nf_ext[i] = -50 * 2; 1870 ar5008_write_noisefloor(sc, nf, nf_ext); 1871 } 1872 1873 void 1874 ar5008_noisefloor_calib(struct athn_softc *sc) 1875 { 1876 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_ENABLE_NF); 1877 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NO_UPDATE_NF); 1878 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NF); 1879 AR_WRITE_BARRIER(sc); 1880 } 1881 1882 void 1883 ar5008_do_noisefloor_calib(struct athn_softc *sc) 1884 { 1885 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NF); 1886 AR_WRITE_BARRIER(sc); 1887 } 1888 1889 void 1890 ar5008_do_calib(struct athn_softc *sc) 1891 { 1892 uint32_t mode, reg; 1893 int log; 1894 1895 reg = AR_READ(sc, AR_PHY_TIMING_CTRL4_0); 1896 log = AR_SREV_9280_10_OR_LATER(sc) ? 10 : 2; 1897 reg = RW(reg, AR_PHY_TIMING_CTRL4_IQCAL_LOG_COUNT_MAX, log); 1898 AR_WRITE(sc, AR_PHY_TIMING_CTRL4_0, reg); 1899 1900 if (sc->cur_calib_mask & ATHN_CAL_ADC_GAIN) 1901 mode = AR_PHY_CALMODE_ADC_GAIN; 1902 else if (sc->cur_calib_mask & ATHN_CAL_ADC_DC) 1903 mode = AR_PHY_CALMODE_ADC_DC_PER; 1904 else /* ATHN_CAL_IQ */ 1905 mode = AR_PHY_CALMODE_IQ; 1906 AR_WRITE(sc, AR_PHY_CALMODE, mode); 1907 1908 DPRINTF(("starting calibration mode=0x%x\n", mode)); 1909 AR_SETBITS(sc, AR_PHY_TIMING_CTRL4_0, AR_PHY_TIMING_CTRL4_DO_CAL); 1910 AR_WRITE_BARRIER(sc); 1911 } 1912 1913 void 1914 ar5008_next_calib(struct athn_softc *sc) 1915 { 1916 /* Check if we have any calibration in progress. */ 1917 if (sc->cur_calib_mask != 0) { 1918 if (!(AR_READ(sc, AR_PHY_TIMING_CTRL4_0) & 1919 AR_PHY_TIMING_CTRL4_DO_CAL)) { 1920 /* Calibration completed for current sample. */ 1921 if (sc->cur_calib_mask & ATHN_CAL_ADC_GAIN) 1922 ar5008_calib_adc_gain(sc); 1923 else if (sc->cur_calib_mask & ATHN_CAL_ADC_DC) 1924 ar5008_calib_adc_dc_off(sc); 1925 else /* ATHN_CAL_IQ */ 1926 ar5008_calib_iq(sc); 1927 } 1928 } 1929 } 1930 1931 void 1932 ar5008_calib_iq(struct athn_softc *sc) 1933 { 1934 struct athn_iq_cal *cal; 1935 uint32_t reg, i_coff_denom, q_coff_denom; 1936 int32_t i_coff, q_coff; 1937 int i, iq_corr_neg; 1938 1939 for (i = 0; i < AR_MAX_CHAINS; i++) { 1940 cal = &sc->calib.iq[i]; 1941 1942 /* Accumulate IQ calibration measures (clear on read). */ 1943 cal->pwr_meas_i += AR_READ(sc, AR_PHY_CAL_MEAS_0(i)); 1944 cal->pwr_meas_q += AR_READ(sc, AR_PHY_CAL_MEAS_1(i)); 1945 cal->iq_corr_meas += 1946 (int32_t)AR_READ(sc, AR_PHY_CAL_MEAS_2(i)); 1947 } 1948 if (!AR_SREV_9280_10_OR_LATER(sc) && 1949 ++sc->calib.nsamples < AR_CAL_SAMPLES) { 1950 /* Not enough samples accumulated, continue. */ 1951 ar5008_do_calib(sc); 1952 return; 1953 } 1954 1955 for (i = 0; i < sc->nrxchains; i++) { 1956 cal = &sc->calib.iq[i]; 1957 1958 if (cal->pwr_meas_q == 0) 1959 continue; 1960 1961 if ((iq_corr_neg = cal->iq_corr_meas < 0)) 1962 cal->iq_corr_meas = -cal->iq_corr_meas; 1963 1964 i_coff_denom = 1965 (cal->pwr_meas_i / 2 + cal->pwr_meas_q / 2) / 128; 1966 q_coff_denom = cal->pwr_meas_q / 64; 1967 1968 if (i_coff_denom == 0 || q_coff_denom == 0) 1969 continue; /* Prevents division by zero. */ 1970 1971 i_coff = cal->iq_corr_meas / i_coff_denom; 1972 q_coff = (cal->pwr_meas_i / q_coff_denom) - 64; 1973 1974 /* Negate i_coff if iq_corr_meas is positive. */ 1975 if (!iq_corr_neg) 1976 i_coff = 0x40 - (i_coff & 0x3f); 1977 if (q_coff > 15) 1978 q_coff = 15; 1979 else if (q_coff <= -16) 1980 q_coff = -16; /* XXX Linux has a bug here? */ 1981 1982 DPRINTFN(2, ("IQ calibration for chain %d\n", i)); 1983 reg = AR_READ(sc, AR_PHY_TIMING_CTRL4(i)); 1984 reg = RW(reg, AR_PHY_TIMING_CTRL4_IQCORR_Q_I_COFF, i_coff); 1985 reg = RW(reg, AR_PHY_TIMING_CTRL4_IQCORR_Q_Q_COFF, q_coff); 1986 AR_WRITE(sc, AR_PHY_TIMING_CTRL4(i), reg); 1987 } 1988 1989 /* Apply new settings. */ 1990 AR_SETBITS(sc, AR_PHY_TIMING_CTRL4_0, 1991 AR_PHY_TIMING_CTRL4_IQCORR_ENABLE); 1992 AR_WRITE_BARRIER(sc); 1993 1994 /* IQ calibration done. */ 1995 sc->cur_calib_mask &= ~ATHN_CAL_IQ; 1996 memset(&sc->calib, 0, sizeof(sc->calib)); 1997 } 1998 1999 void 2000 ar5008_calib_adc_gain(struct athn_softc *sc) 2001 { 2002 struct athn_adc_cal *cal; 2003 uint32_t reg, gain_mismatch_i, gain_mismatch_q; 2004 int i; 2005 2006 for (i = 0; i < AR_MAX_CHAINS; i++) { 2007 cal = &sc->calib.adc_gain[i]; 2008 2009 /* Accumulate ADC gain measures (clear on read). */ 2010 cal->pwr_meas_odd_i += AR_READ(sc, AR_PHY_CAL_MEAS_0(i)); 2011 cal->pwr_meas_even_i += AR_READ(sc, AR_PHY_CAL_MEAS_1(i)); 2012 cal->pwr_meas_odd_q += AR_READ(sc, AR_PHY_CAL_MEAS_2(i)); 2013 cal->pwr_meas_even_q += AR_READ(sc, AR_PHY_CAL_MEAS_3(i)); 2014 } 2015 if (!AR_SREV_9280_10_OR_LATER(sc) && 2016 ++sc->calib.nsamples < AR_CAL_SAMPLES) { 2017 /* Not enough samples accumulated, continue. */ 2018 ar5008_do_calib(sc); 2019 return; 2020 } 2021 2022 for (i = 0; i < sc->nrxchains; i++) { 2023 cal = &sc->calib.adc_gain[i]; 2024 2025 if (cal->pwr_meas_odd_i == 0 || cal->pwr_meas_even_q == 0) 2026 continue; /* Prevents division by zero. */ 2027 2028 gain_mismatch_i = 2029 (cal->pwr_meas_even_i * 32) / cal->pwr_meas_odd_i; 2030 gain_mismatch_q = 2031 (cal->pwr_meas_odd_q * 32) / cal->pwr_meas_even_q; 2032 2033 DPRINTFN(2, ("ADC gain calibration for chain %d\n", i)); 2034 reg = AR_READ(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(i)); 2035 reg = RW(reg, AR_PHY_NEW_ADC_DC_GAIN_IGAIN, gain_mismatch_i); 2036 reg = RW(reg, AR_PHY_NEW_ADC_DC_GAIN_QGAIN, gain_mismatch_q); 2037 AR_WRITE(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(i), reg); 2038 } 2039 2040 /* Apply new settings. */ 2041 AR_SETBITS(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(0), 2042 AR_PHY_NEW_ADC_GAIN_CORR_ENABLE); 2043 AR_WRITE_BARRIER(sc); 2044 2045 /* ADC gain calibration done. */ 2046 sc->cur_calib_mask &= ~ATHN_CAL_ADC_GAIN; 2047 memset(&sc->calib, 0, sizeof(sc->calib)); 2048 } 2049 2050 void 2051 ar5008_calib_adc_dc_off(struct athn_softc *sc) 2052 { 2053 struct athn_adc_cal *cal; 2054 int32_t dc_offset_mismatch_i, dc_offset_mismatch_q; 2055 uint32_t reg; 2056 int count, i; 2057 2058 for (i = 0; i < AR_MAX_CHAINS; i++) { 2059 cal = &sc->calib.adc_dc_offset[i]; 2060 2061 /* Accumulate ADC DC offset measures (clear on read). */ 2062 cal->pwr_meas_odd_i += AR_READ(sc, AR_PHY_CAL_MEAS_0(i)); 2063 cal->pwr_meas_even_i += AR_READ(sc, AR_PHY_CAL_MEAS_1(i)); 2064 cal->pwr_meas_odd_q += AR_READ(sc, AR_PHY_CAL_MEAS_2(i)); 2065 cal->pwr_meas_even_q += AR_READ(sc, AR_PHY_CAL_MEAS_3(i)); 2066 } 2067 if (!AR_SREV_9280_10_OR_LATER(sc) && 2068 ++sc->calib.nsamples < AR_CAL_SAMPLES) { 2069 /* Not enough samples accumulated, continue. */ 2070 ar5008_do_calib(sc); 2071 return; 2072 } 2073 2074 if (AR_SREV_9280_10_OR_LATER(sc)) 2075 count = (1 << (10 + 5)); 2076 else 2077 count = (1 << ( 2 + 5)) * AR_CAL_SAMPLES; 2078 for (i = 0; i < sc->nrxchains; i++) { 2079 cal = &sc->calib.adc_dc_offset[i]; 2080 2081 dc_offset_mismatch_i = 2082 (cal->pwr_meas_even_i - cal->pwr_meas_odd_i * 2) / count; 2083 dc_offset_mismatch_q = 2084 (cal->pwr_meas_odd_q - cal->pwr_meas_even_q * 2) / count; 2085 2086 DPRINTFN(2, ("ADC DC offset calibration for chain %d\n", i)); 2087 reg = AR_READ(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(i)); 2088 reg = RW(reg, AR_PHY_NEW_ADC_DC_GAIN_QDC, 2089 dc_offset_mismatch_q); 2090 reg = RW(reg, AR_PHY_NEW_ADC_DC_GAIN_IDC, 2091 dc_offset_mismatch_i); 2092 AR_WRITE(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(i), reg); 2093 } 2094 2095 /* Apply new settings. */ 2096 AR_SETBITS(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(0), 2097 AR_PHY_NEW_ADC_DC_OFFSET_CORR_ENABLE); 2098 AR_WRITE_BARRIER(sc); 2099 2100 /* ADC DC offset calibration done. */ 2101 sc->cur_calib_mask &= ~ATHN_CAL_ADC_DC; 2102 memset(&sc->calib, 0, sizeof(sc->calib)); 2103 } 2104 2105 void 2106 ar5008_write_txpower(struct athn_softc *sc, int16_t power[ATHN_POWER_COUNT]) 2107 { 2108 AR_WRITE(sc, AR_PHY_POWER_TX_RATE1, 2109 (power[ATHN_POWER_OFDM18 ] & 0x3f) << 24 | 2110 (power[ATHN_POWER_OFDM12 ] & 0x3f) << 16 | 2111 (power[ATHN_POWER_OFDM9 ] & 0x3f) << 8 | 2112 (power[ATHN_POWER_OFDM6 ] & 0x3f)); 2113 AR_WRITE(sc, AR_PHY_POWER_TX_RATE2, 2114 (power[ATHN_POWER_OFDM54 ] & 0x3f) << 24 | 2115 (power[ATHN_POWER_OFDM48 ] & 0x3f) << 16 | 2116 (power[ATHN_POWER_OFDM36 ] & 0x3f) << 8 | 2117 (power[ATHN_POWER_OFDM24 ] & 0x3f)); 2118 AR_WRITE(sc, AR_PHY_POWER_TX_RATE3, 2119 (power[ATHN_POWER_CCK2_SP ] & 0x3f) << 24 | 2120 (power[ATHN_POWER_CCK2_LP ] & 0x3f) << 16 | 2121 (power[ATHN_POWER_XR ] & 0x3f) << 8 | 2122 (power[ATHN_POWER_CCK1_LP ] & 0x3f)); 2123 AR_WRITE(sc, AR_PHY_POWER_TX_RATE4, 2124 (power[ATHN_POWER_CCK11_SP] & 0x3f) << 24 | 2125 (power[ATHN_POWER_CCK11_LP] & 0x3f) << 16 | 2126 (power[ATHN_POWER_CCK55_SP] & 0x3f) << 8 | 2127 (power[ATHN_POWER_CCK55_LP] & 0x3f)); 2128 AR_WRITE(sc, AR_PHY_POWER_TX_RATE5, 2129 (power[ATHN_POWER_HT20(3) ] & 0x3f) << 24 | 2130 (power[ATHN_POWER_HT20(2) ] & 0x3f) << 16 | 2131 (power[ATHN_POWER_HT20(1) ] & 0x3f) << 8 | 2132 (power[ATHN_POWER_HT20(0) ] & 0x3f)); 2133 AR_WRITE(sc, AR_PHY_POWER_TX_RATE6, 2134 (power[ATHN_POWER_HT20(7) ] & 0x3f) << 24 | 2135 (power[ATHN_POWER_HT20(6) ] & 0x3f) << 16 | 2136 (power[ATHN_POWER_HT20(5) ] & 0x3f) << 8 | 2137 (power[ATHN_POWER_HT20(4) ] & 0x3f)); 2138 AR_WRITE(sc, AR_PHY_POWER_TX_RATE7, 2139 (power[ATHN_POWER_HT40(3) ] & 0x3f) << 24 | 2140 (power[ATHN_POWER_HT40(2) ] & 0x3f) << 16 | 2141 (power[ATHN_POWER_HT40(1) ] & 0x3f) << 8 | 2142 (power[ATHN_POWER_HT40(0) ] & 0x3f)); 2143 AR_WRITE(sc, AR_PHY_POWER_TX_RATE8, 2144 (power[ATHN_POWER_HT40(7) ] & 0x3f) << 24 | 2145 (power[ATHN_POWER_HT40(6) ] & 0x3f) << 16 | 2146 (power[ATHN_POWER_HT40(5) ] & 0x3f) << 8 | 2147 (power[ATHN_POWER_HT40(4) ] & 0x3f)); 2148 AR_WRITE(sc, AR_PHY_POWER_TX_RATE9, 2149 (power[ATHN_POWER_OFDM_EXT] & 0x3f) << 24 | 2150 (power[ATHN_POWER_CCK_EXT ] & 0x3f) << 16 | 2151 (power[ATHN_POWER_OFDM_DUP] & 0x3f) << 8 | 2152 (power[ATHN_POWER_CCK_DUP ] & 0x3f)); 2153 AR_WRITE_BARRIER(sc); 2154 } 2155 2156 void 2157 ar5008_set_viterbi_mask(struct athn_softc *sc, int bin) 2158 { 2159 uint32_t mask[4], reg; 2160 uint8_t m[62], p[62]; /* XXX use bit arrays? */ 2161 int i, bit, cur; 2162 2163 /* Compute pilot mask. */ 2164 cur = -6000; 2165 for (i = 0; i < 4; i++) { 2166 mask[i] = 0; 2167 for (bit = 0; bit < 30; bit++) { 2168 if (abs(cur - bin) < 100) 2169 mask[i] |= 1 << bit; 2170 cur += 100; 2171 } 2172 if (cur == 0) /* Skip entry "0". */ 2173 cur = 100; 2174 } 2175 /* Write entries from -6000 to -3100. */ 2176 AR_WRITE(sc, AR_PHY_TIMING7, mask[0]); 2177 AR_WRITE(sc, AR_PHY_TIMING9, mask[0]); 2178 /* Write entries from -3000 to -100. */ 2179 AR_WRITE(sc, AR_PHY_TIMING8, mask[1]); 2180 AR_WRITE(sc, AR_PHY_TIMING10, mask[1]); 2181 /* Write entries from 100 to 3000. */ 2182 AR_WRITE(sc, AR_PHY_PILOT_MASK_01_30, mask[2]); 2183 AR_WRITE(sc, AR_PHY_CHANNEL_MASK_01_30, mask[2]); 2184 /* Write entries from 3100 to 6000. */ 2185 AR_WRITE(sc, AR_PHY_PILOT_MASK_31_60, mask[3]); 2186 AR_WRITE(sc, AR_PHY_CHANNEL_MASK_31_60, mask[3]); 2187 2188 /* Compute viterbi mask. */ 2189 for (cur = 6100; cur >= 0; cur -= 100) 2190 p[+cur / 100] = abs(cur - bin) < 75; 2191 for (cur = -100; cur >= -6100; cur -= 100) 2192 m[-cur / 100] = abs(cur - bin) < 75; 2193 2194 /* Write viterbi mask (XXX needs to be reworked). */ 2195 reg = 2196 m[46] << 30 | m[47] << 28 | m[48] << 26 | m[49] << 24 | 2197 m[50] << 22 | m[51] << 20 | m[52] << 18 | m[53] << 16 | 2198 m[54] << 14 | m[55] << 12 | m[56] << 10 | m[57] << 8 | 2199 m[58] << 6 | m[59] << 4 | m[60] << 2 | m[61] << 0; 2200 AR_WRITE(sc, AR_PHY_BIN_MASK_1, reg); 2201 AR_WRITE(sc, AR_PHY_VIT_MASK2_M_46_61, reg); 2202 2203 /* XXX m[48] should be m[38] ? */ 2204 reg = m[31] << 28 | m[32] << 26 | m[33] << 24 | 2205 m[34] << 22 | m[35] << 20 | m[36] << 18 | m[37] << 16 | 2206 m[48] << 14 | m[39] << 12 | m[40] << 10 | m[41] << 8 | 2207 m[42] << 6 | m[43] << 4 | m[44] << 2 | m[45] << 0; 2208 AR_WRITE(sc, AR_PHY_BIN_MASK_2, reg); 2209 AR_WRITE(sc, AR_PHY_VIT_MASK2_M_31_45, reg); 2210 2211 /* XXX This one is weird too. */ 2212 reg = 2213 m[16] << 30 | m[16] << 28 | m[18] << 26 | m[18] << 24 | 2214 m[20] << 22 | m[20] << 20 | m[22] << 18 | m[22] << 16 | 2215 m[24] << 14 | m[24] << 12 | m[25] << 10 | m[26] << 8 | 2216 m[27] << 6 | m[28] << 4 | m[29] << 2 | m[30] << 0; 2217 AR_WRITE(sc, AR_PHY_BIN_MASK_3, reg); 2218 AR_WRITE(sc, AR_PHY_VIT_MASK2_M_16_30, reg); 2219 2220 reg = 2221 m[ 0] << 30 | m[ 1] << 28 | m[ 2] << 26 | m[ 3] << 24 | 2222 m[ 4] << 22 | m[ 5] << 20 | m[ 6] << 18 | m[ 7] << 16 | 2223 m[ 8] << 14 | m[ 9] << 12 | m[10] << 10 | m[11] << 8 | 2224 m[12] << 6 | m[13] << 4 | m[14] << 2 | m[15] << 0; 2225 AR_WRITE(sc, AR_PHY_MASK_CTL, reg); 2226 AR_WRITE(sc, AR_PHY_VIT_MASK2_M_00_15, reg); 2227 2228 reg = p[15] << 28 | p[14] << 26 | p[13] << 24 | 2229 p[12] << 22 | p[11] << 20 | p[10] << 18 | p[ 9] << 16 | 2230 p[ 8] << 14 | p[ 7] << 12 | p[ 6] << 10 | p[ 5] << 8 | 2231 p[ 4] << 6 | p[ 3] << 4 | p[ 2] << 2 | p[ 1] << 0; 2232 AR_WRITE(sc, AR_PHY_BIN_MASK2_1, reg); 2233 AR_WRITE(sc, AR_PHY_VIT_MASK2_P_15_01, reg); 2234 2235 reg = p[30] << 28 | p[29] << 26 | p[28] << 24 | 2236 p[27] << 22 | p[26] << 20 | p[25] << 18 | p[24] << 16 | 2237 p[23] << 14 | p[22] << 12 | p[21] << 10 | p[20] << 8 | 2238 p[19] << 6 | p[18] << 4 | p[17] << 2 | p[16] << 0; 2239 AR_WRITE(sc, AR_PHY_BIN_MASK2_2, reg); 2240 AR_WRITE(sc, AR_PHY_VIT_MASK2_P_30_16, reg); 2241 2242 reg = p[45] << 28 | p[44] << 26 | p[43] << 24 | 2243 p[42] << 22 | p[41] << 20 | p[40] << 18 | p[39] << 16 | 2244 p[38] << 14 | p[37] << 12 | p[36] << 10 | p[35] << 8 | 2245 p[34] << 6 | p[33] << 4 | p[32] << 2 | p[31] << 0; 2246 AR_WRITE(sc, AR_PHY_BIN_MASK2_3, reg); 2247 AR_WRITE(sc, AR_PHY_VIT_MASK2_P_45_31, reg); 2248 2249 reg = 2250 p[61] << 30 | p[60] << 28 | p[59] << 26 | p[58] << 24 | 2251 p[57] << 22 | p[56] << 20 | p[55] << 18 | p[54] << 16 | 2252 p[53] << 14 | p[52] << 12 | p[51] << 10 | p[50] << 8 | 2253 p[49] << 6 | p[48] << 4 | p[47] << 2 | p[46] << 0; 2254 AR_WRITE(sc, AR_PHY_BIN_MASK2_4, reg); 2255 AR_WRITE(sc, AR_PHY_VIT_MASK2_P_61_46, reg); 2256 AR_WRITE_BARRIER(sc); 2257 } 2258 2259 void 2260 ar5008_hw_init(struct athn_softc *sc, struct ieee80211_channel *c, 2261 struct ieee80211_channel *extc) 2262 { 2263 struct athn_ops *ops = &sc->ops; 2264 const struct athn_ini *ini = sc->ini; 2265 const uint32_t *pvals; 2266 uint32_t reg; 2267 int i; 2268 2269 AR_WRITE(sc, AR_PHY(0), 0x00000007); 2270 AR_WRITE(sc, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_EXTERNAL_RADIO); 2271 2272 if (!AR_SINGLE_CHIP(sc)) 2273 ar5416_reset_addac(sc, c); 2274 2275 AR_WRITE(sc, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_INTERNAL_ADDAC); 2276 2277 /* First initialization step (depends on channel band/bandwidth). */ 2278 if (extc != NULL) { 2279 if (IEEE80211_IS_CHAN_2GHZ(c)) 2280 pvals = ini->vals_2g40; 2281 else 2282 pvals = ini->vals_5g40; 2283 } else { 2284 if (IEEE80211_IS_CHAN_2GHZ(c)) 2285 pvals = ini->vals_2g20; 2286 else 2287 pvals = ini->vals_5g20; 2288 } 2289 DPRINTFN(4, ("writing modal init vals\n")); 2290 for (i = 0; i < ini->nregs; i++) { 2291 uint32_t val = pvals[i]; 2292 2293 /* Fix AR_AN_TOP2 initialization value if required. */ 2294 if (ini->regs[i] == AR_AN_TOP2 && 2295 (sc->flags & ATHN_FLAG_AN_TOP2_FIXUP)) 2296 val &= ~AR_AN_TOP2_PWDCLKIND; 2297 AR_WRITE(sc, ini->regs[i], val); 2298 if (AR_IS_ANALOG_REG(ini->regs[i])) { 2299 AR_WRITE_BARRIER(sc); 2300 DELAY(100); 2301 } 2302 if ((i & 0x1f) == 0) 2303 DELAY(1); 2304 } 2305 AR_WRITE_BARRIER(sc); 2306 2307 if (sc->rx_gain != NULL) 2308 ar9280_reset_rx_gain(sc, c); 2309 if (sc->tx_gain != NULL) 2310 ar9280_reset_tx_gain(sc, c); 2311 2312 if (AR_SREV_9271_10(sc)) { 2313 AR_WRITE(sc, AR_PHY(68), 0x30002311); 2314 AR_WRITE(sc, AR_PHY_RF_CTL3, 0x0a020001); 2315 } 2316 AR_WRITE_BARRIER(sc); 2317 2318 /* Second initialization step (common to all channels). */ 2319 DPRINTFN(4, ("writing common init vals\n")); 2320 for (i = 0; i < ini->ncmregs; i++) { 2321 AR_WRITE(sc, ini->cmregs[i], ini->cmvals[i]); 2322 if (AR_IS_ANALOG_REG(ini->cmregs[i])) { 2323 AR_WRITE_BARRIER(sc); 2324 DELAY(100); 2325 } 2326 if ((i & 0x1f) == 0) 2327 DELAY(1); 2328 } 2329 AR_WRITE_BARRIER(sc); 2330 2331 if (!AR_SINGLE_CHIP(sc)) 2332 ar5416_reset_bb_gain(sc, c); 2333 2334 if (IEEE80211_IS_CHAN_5GHZ(c) && 2335 (sc->flags & ATHN_FLAG_FAST_PLL_CLOCK)) { 2336 /* Update modal values for fast PLL clock. */ 2337 if (extc != NULL) 2338 pvals = ini->fastvals_5g40; 2339 else 2340 pvals = ini->fastvals_5g20; 2341 DPRINTFN(4, ("writing fast pll clock init vals\n")); 2342 for (i = 0; i < ini->nfastregs; i++) { 2343 AR_WRITE(sc, ini->fastregs[i], pvals[i]); 2344 if (AR_IS_ANALOG_REG(ini->fastregs[i])) { 2345 AR_WRITE_BARRIER(sc); 2346 DELAY(100); 2347 } 2348 if ((i & 0x1f) == 0) 2349 DELAY(1); 2350 } 2351 } 2352 2353 /* 2354 * Set the RX_ABORT and RX_DIS bits to prevent frames with corrupted 2355 * descriptor status. 2356 */ 2357 AR_SETBITS(sc, AR_DIAG_SW, AR_DIAG_RX_DIS | AR_DIAG_RX_ABORT); 2358 2359 /* Hardware workarounds for occasional Rx data corruption. */ 2360 if (AR_SREV_9280_10_OR_LATER(sc)) { 2361 reg = AR_READ(sc, AR_PCU_MISC_MODE2); 2362 if (!AR_SREV_9271(sc)) 2363 reg &= ~AR_PCU_MISC_MODE2_HWWAR1; 2364 if (AR_SREV_9287_10_OR_LATER(sc)) 2365 reg &= ~AR_PCU_MISC_MODE2_HWWAR2; 2366 AR_WRITE(sc, AR_PCU_MISC_MODE2, reg); 2367 2368 } else if (AR_SREV_5416_20_OR_LATER(sc)) { 2369 /* Disable baseband clock gating. */ 2370 AR_WRITE(sc, AR_PHY(651), 0x11); 2371 2372 if (AR_SREV_9160(sc)) { 2373 /* Disable RIFS search to fix baseband hang. */ 2374 AR_CLRBITS(sc, AR_PHY_HEAVY_CLIP_FACTOR_RIFS, 2375 AR_PHY_RIFS_INIT_DELAY_M); 2376 } 2377 } 2378 AR_WRITE_BARRIER(sc); 2379 2380 ar5008_set_phy(sc, c, extc); 2381 ar5008_init_chains(sc); 2382 2383 if (sc->flags & ATHN_FLAG_OLPC) { 2384 extern int ticks; 2385 sc->olpc_ticks = ticks; 2386 ops->olpc_init(sc); 2387 } 2388 2389 ops->set_txpower(sc, c, extc); 2390 2391 if (!AR_SINGLE_CHIP(sc)) 2392 ar5416_rf_reset(sc, c); 2393 } 2394 2395 uint8_t 2396 ar5008_get_vpd(uint8_t pwr, const uint8_t *pwrPdg, const uint8_t *vpdPdg, 2397 int nicepts) 2398 { 2399 uint8_t vpd; 2400 int i, lo, hi; 2401 2402 for (i = 0; i < nicepts; i++) 2403 if (pwrPdg[i] > pwr) 2404 break; 2405 hi = i; 2406 lo = hi - 1; 2407 if (lo == -1) 2408 lo = hi; 2409 else if (hi == nicepts) 2410 hi = lo; 2411 2412 vpd = athn_interpolate(pwr, pwrPdg[lo], vpdPdg[lo], 2413 pwrPdg[hi], vpdPdg[hi]); 2414 return (vpd); 2415 } 2416 2417 void 2418 ar5008_get_pdadcs(struct athn_softc *sc, uint8_t fbin, 2419 struct athn_pier *lopier, struct athn_pier *hipier, int nxpdgains, 2420 int nicepts, uint8_t overlap, uint8_t *boundaries, uint8_t *pdadcs) 2421 { 2422 #define DB(x) ((x) / 2) /* Convert half dB to dB. */ 2423 uint8_t minpwr[AR_PD_GAINS_IN_MASK], maxpwr[AR_PD_GAINS_IN_MASK]; 2424 uint8_t vpd[AR_MAX_PWR_RANGE_IN_HALF_DB], pwr; 2425 uint8_t lovpd, hivpd, boundary; 2426 int16_t ss, delta, vpdstep, val; 2427 int i, j, npdadcs, nvpds, maxidx, tgtidx; 2428 2429 /* Compute min and max power in half dB for each pdGain. */ 2430 for (i = 0; i < nxpdgains; i++) { 2431 minpwr[i] = MAX(lopier->pwr[i][0], hipier->pwr[i][0]); 2432 maxpwr[i] = MIN(lopier->pwr[i][nicepts - 1], 2433 hipier->pwr[i][nicepts - 1]); 2434 } 2435 2436 /* Fill phase domain analog-to-digital converter (PDADC) table. */ 2437 npdadcs = 0; 2438 for (i = 0; i < nxpdgains; i++) { 2439 if (i != nxpdgains - 1) 2440 boundaries[i] = DB(maxpwr[i] + minpwr[i + 1]) / 2; 2441 else 2442 boundaries[i] = DB(maxpwr[i]); 2443 if (boundaries[i] > AR_MAX_RATE_POWER) 2444 boundaries[i] = AR_MAX_RATE_POWER; 2445 2446 if (i == 0 && !AR_SREV_5416_20_OR_LATER(sc)) { 2447 /* Fix the gain delta (AR5416 1.0 only). */ 2448 delta = boundaries[0] - 23; 2449 boundaries[0] = 23; 2450 } else 2451 delta = 0; 2452 2453 /* Find starting index for this pdGain. */ 2454 if (i != 0) { 2455 ss = boundaries[i - 1] - DB(minpwr[i]) - 2456 overlap + 1 + delta; 2457 } else if (AR_SREV_9280_10_OR_LATER(sc)) { 2458 ss = -DB(minpwr[i]); 2459 } else 2460 ss = 0; 2461 2462 /* Compute Vpd table for this pdGain. */ 2463 nvpds = DB(maxpwr[i] - minpwr[i]) + 1; 2464 memset(vpd, 0, sizeof(vpd)); 2465 pwr = minpwr[i]; 2466 for (j = 0; j < nvpds; j++) { 2467 /* Get lower and higher Vpd. */ 2468 lovpd = ar5008_get_vpd(pwr, lopier->pwr[i], 2469 lopier->vpd[i], nicepts); 2470 hivpd = ar5008_get_vpd(pwr, hipier->pwr[i], 2471 hipier->vpd[i], nicepts); 2472 2473 /* Interpolate the final Vpd. */ 2474 vpd[j] = athn_interpolate(fbin, 2475 lopier->fbin, lovpd, hipier->fbin, hivpd); 2476 2477 pwr += 2; /* In half dB. */ 2478 } 2479 2480 /* Extrapolate data for ss < 0. */ 2481 if (vpd[1] > vpd[0]) 2482 vpdstep = vpd[1] - vpd[0]; 2483 else 2484 vpdstep = 1; 2485 while (ss < 0 && npdadcs < AR_NUM_PDADC_VALUES - 1) { 2486 val = vpd[0] + ss * vpdstep; 2487 pdadcs[npdadcs++] = MAX(val, 0); 2488 ss++; 2489 } 2490 2491 tgtidx = boundaries[i] + overlap - DB(minpwr[i]); 2492 maxidx = MIN(tgtidx, nvpds); 2493 while (ss < maxidx && npdadcs < AR_NUM_PDADC_VALUES - 1) 2494 pdadcs[npdadcs++] = vpd[ss++]; 2495 2496 if (tgtidx < maxidx) 2497 continue; 2498 2499 /* Extrapolate data for maxidx <= ss <= tgtidx. */ 2500 if (vpd[nvpds - 1] > vpd[nvpds - 2]) 2501 vpdstep = vpd[nvpds - 1] - vpd[nvpds - 2]; 2502 else 2503 vpdstep = 1; 2504 while (ss <= tgtidx && npdadcs < AR_NUM_PDADC_VALUES - 1) { 2505 val = vpd[nvpds - 1] + (ss - maxidx + 1) * vpdstep; 2506 pdadcs[npdadcs++] = MIN(val, 255); 2507 ss++; 2508 } 2509 } 2510 2511 /* Fill remaining PDADC and boundaries entries. */ 2512 if (AR_SREV_9285(sc)) 2513 boundary = AR9285_PD_GAIN_BOUNDARY_DEFAULT; 2514 else /* Fill with latest. */ 2515 boundary = boundaries[nxpdgains - 1]; 2516 2517 for (; nxpdgains < AR_PD_GAINS_IN_MASK; nxpdgains++) 2518 boundaries[nxpdgains] = boundary; 2519 2520 for (; npdadcs < AR_NUM_PDADC_VALUES; npdadcs++) 2521 pdadcs[npdadcs] = pdadcs[npdadcs - 1]; 2522 #undef DB 2523 } 2524 2525 void 2526 ar5008_get_lg_tpow(struct athn_softc *sc, struct ieee80211_channel *c, 2527 uint8_t ctl, const struct ar_cal_target_power_leg *tgt, int nchans, 2528 uint8_t tpow[4]) 2529 { 2530 uint8_t fbin; 2531 int i, lo, hi; 2532 2533 /* Find interval (lower and upper indices). */ 2534 fbin = athn_chan2fbin(c); 2535 for (i = 0; i < nchans; i++) { 2536 if (tgt[i].bChannel == AR_BCHAN_UNUSED || 2537 tgt[i].bChannel > fbin) 2538 break; 2539 } 2540 hi = i; 2541 lo = hi - 1; 2542 if (lo == -1) 2543 lo = hi; 2544 else if (hi == nchans || tgt[hi].bChannel == AR_BCHAN_UNUSED) 2545 hi = lo; 2546 2547 /* Interpolate values. */ 2548 for (i = 0; i < 4; i++) { 2549 tpow[i] = athn_interpolate(fbin, 2550 tgt[lo].bChannel, tgt[lo].tPow2x[i], 2551 tgt[hi].bChannel, tgt[hi].tPow2x[i]); 2552 } 2553 /* XXX Apply conformance testing limit. */ 2554 } 2555 2556 void 2557 ar5008_get_ht_tpow(struct athn_softc *sc, struct ieee80211_channel *c, 2558 uint8_t ctl, const struct ar_cal_target_power_ht *tgt, int nchans, 2559 uint8_t tpow[8]) 2560 { 2561 uint8_t fbin; 2562 int i, lo, hi; 2563 2564 /* Find interval (lower and upper indices). */ 2565 fbin = athn_chan2fbin(c); 2566 for (i = 0; i < nchans; i++) { 2567 if (tgt[i].bChannel == AR_BCHAN_UNUSED || 2568 tgt[i].bChannel > fbin) 2569 break; 2570 } 2571 hi = i; 2572 lo = hi - 1; 2573 if (lo == -1) 2574 lo = hi; 2575 else if (hi == nchans || tgt[hi].bChannel == AR_BCHAN_UNUSED) 2576 hi = lo; 2577 2578 /* Interpolate values. */ 2579 for (i = 0; i < 8; i++) { 2580 tpow[i] = athn_interpolate(fbin, 2581 tgt[lo].bChannel, tgt[lo].tPow2x[i], 2582 tgt[hi].bChannel, tgt[hi].tPow2x[i]); 2583 } 2584 /* XXX Apply conformance testing limit. */ 2585 } 2586 2587 /* 2588 * Adaptive noise immunity. 2589 */ 2590 void 2591 ar5008_set_noise_immunity_level(struct athn_softc *sc, int level) 2592 { 2593 int high = level == 4; 2594 uint32_t reg; 2595 2596 reg = AR_READ(sc, AR_PHY_DESIRED_SZ); 2597 reg = RW(reg, AR_PHY_DESIRED_SZ_TOT_DES, high ? -62 : -55); 2598 AR_WRITE(sc, AR_PHY_DESIRED_SZ, reg); 2599 2600 reg = AR_READ(sc, AR_PHY_AGC_CTL1); 2601 reg = RW(reg, AR_PHY_AGC_CTL1_COARSE_LOW, high ? -70 : -64); 2602 reg = RW(reg, AR_PHY_AGC_CTL1_COARSE_HIGH, high ? -12 : -14); 2603 AR_WRITE(sc, AR_PHY_AGC_CTL1, reg); 2604 2605 reg = AR_READ(sc, AR_PHY_FIND_SIG); 2606 reg = RW(reg, AR_PHY_FIND_SIG_FIRPWR, high ? -80 : -78); 2607 AR_WRITE(sc, AR_PHY_FIND_SIG, reg); 2608 2609 AR_WRITE_BARRIER(sc); 2610 } 2611 2612 void 2613 ar5008_enable_ofdm_weak_signal(struct athn_softc *sc) 2614 { 2615 uint32_t reg; 2616 2617 reg = AR_READ(sc, AR_PHY_SFCORR_LOW); 2618 reg = RW(reg, AR_PHY_SFCORR_LOW_M1_THRESH_LOW, 50); 2619 reg = RW(reg, AR_PHY_SFCORR_LOW_M2_THRESH_LOW, 40); 2620 reg = RW(reg, AR_PHY_SFCORR_LOW_M2COUNT_THR_LOW, 48); 2621 AR_WRITE(sc, AR_PHY_SFCORR_LOW, reg); 2622 2623 reg = AR_READ(sc, AR_PHY_SFCORR); 2624 reg = RW(reg, AR_PHY_SFCORR_M1_THRESH, 77); 2625 reg = RW(reg, AR_PHY_SFCORR_M2_THRESH, 64); 2626 reg = RW(reg, AR_PHY_SFCORR_M2COUNT_THR, 16); 2627 AR_WRITE(sc, AR_PHY_SFCORR, reg); 2628 2629 reg = AR_READ(sc, AR_PHY_SFCORR_EXT); 2630 reg = RW(reg, AR_PHY_SFCORR_EXT_M1_THRESH_LOW, 50); 2631 reg = RW(reg, AR_PHY_SFCORR_EXT_M2_THRESH_LOW, 40); 2632 reg = RW(reg, AR_PHY_SFCORR_EXT_M1_THRESH, 77); 2633 reg = RW(reg, AR_PHY_SFCORR_EXT_M2_THRESH, 64); 2634 AR_WRITE(sc, AR_PHY_SFCORR_EXT, reg); 2635 2636 AR_SETBITS(sc, AR_PHY_SFCORR_LOW, 2637 AR_PHY_SFCORR_LOW_USE_SELF_CORR_LOW); 2638 AR_WRITE_BARRIER(sc); 2639 } 2640 2641 void 2642 ar5008_disable_ofdm_weak_signal(struct athn_softc *sc) 2643 { 2644 uint32_t reg; 2645 2646 reg = AR_READ(sc, AR_PHY_SFCORR_LOW); 2647 reg = RW(reg, AR_PHY_SFCORR_LOW_M1_THRESH_LOW, 127); 2648 reg = RW(reg, AR_PHY_SFCORR_LOW_M2_THRESH_LOW, 127); 2649 reg = RW(reg, AR_PHY_SFCORR_LOW_M2COUNT_THR_LOW, 63); 2650 AR_WRITE(sc, AR_PHY_SFCORR_LOW, reg); 2651 2652 reg = AR_READ(sc, AR_PHY_SFCORR); 2653 reg = RW(reg, AR_PHY_SFCORR_M1_THRESH, 127); 2654 reg = RW(reg, AR_PHY_SFCORR_M2_THRESH, 127); 2655 reg = RW(reg, AR_PHY_SFCORR_M2COUNT_THR, 31); 2656 AR_WRITE(sc, AR_PHY_SFCORR, reg); 2657 2658 reg = AR_READ(sc, AR_PHY_SFCORR_EXT); 2659 reg = RW(reg, AR_PHY_SFCORR_EXT_M1_THRESH_LOW, 127); 2660 reg = RW(reg, AR_PHY_SFCORR_EXT_M2_THRESH_LOW, 127); 2661 reg = RW(reg, AR_PHY_SFCORR_EXT_M1_THRESH, 127); 2662 reg = RW(reg, AR_PHY_SFCORR_EXT_M2_THRESH, 127); 2663 AR_WRITE(sc, AR_PHY_SFCORR_EXT, reg); 2664 2665 AR_CLRBITS(sc, AR_PHY_SFCORR_LOW, 2666 AR_PHY_SFCORR_LOW_USE_SELF_CORR_LOW); 2667 AR_WRITE_BARRIER(sc); 2668 } 2669 2670 void 2671 ar5008_set_cck_weak_signal(struct athn_softc *sc, int high) 2672 { 2673 uint32_t reg; 2674 2675 reg = AR_READ(sc, AR_PHY_CCK_DETECT); 2676 reg = RW(reg, AR_PHY_CCK_DETECT_WEAK_SIG_THR_CCK, high ? 6 : 8); 2677 AR_WRITE(sc, AR_PHY_CCK_DETECT, reg); 2678 AR_WRITE_BARRIER(sc); 2679 } 2680 2681 void 2682 ar5008_set_firstep_level(struct athn_softc *sc, int level) 2683 { 2684 uint32_t reg; 2685 2686 reg = AR_READ(sc, AR_PHY_FIND_SIG); 2687 reg = RW(reg, AR_PHY_FIND_SIG_FIRSTEP, level * 4); 2688 AR_WRITE(sc, AR_PHY_FIND_SIG, reg); 2689 AR_WRITE_BARRIER(sc); 2690 } 2691 2692 void 2693 ar5008_set_spur_immunity_level(struct athn_softc *sc, int level) 2694 { 2695 uint32_t reg; 2696 2697 reg = AR_READ(sc, AR_PHY_TIMING5); 2698 reg = RW(reg, AR_PHY_TIMING5_CYCPWR_THR1, (level + 1) * 2); 2699 AR_WRITE(sc, AR_PHY_TIMING5, reg); 2700 AR_WRITE_BARRIER(sc); 2701 } 2702