1 /* $NetBSD: arn5008.c,v 1.20 2024/07/05 04:31:50 rin Exp $ */
2 /* $OpenBSD: ar5008.c,v 1.21 2012/08/25 12:14:31 kettenis Exp $ */
3
4 /*-
5 * Copyright (c) 2009 Damien Bergamini <damien.bergamini@free.fr>
6 * Copyright (c) 2008-2009 Atheros Communications Inc.
7 *
8 * Permission to use, copy, modify, and/or distribute this software for any
9 * purpose with or without fee is hereby granted, provided that the above
10 * copyright notice and this permission notice appear in all copies.
11 *
12 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
13 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
14 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
15 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
16 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
17 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
18 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
19 */
20
21 /*
22 * Driver for Atheros 802.11a/g/n chipsets.
23 * Routines common to AR5008, AR9001 and AR9002 families.
24 */
25
26 #include <sys/cdefs.h>
27 __KERNEL_RCSID(0, "$NetBSD: arn5008.c,v 1.20 2024/07/05 04:31:50 rin Exp $");
28
29 #include <sys/param.h>
30 #include <sys/sockio.h>
31 #include <sys/mbuf.h>
32 #include <sys/kernel.h>
33 #include <sys/socket.h>
34 #include <sys/systm.h>
35 #include <sys/malloc.h>
36 #include <sys/queue.h>
37 #include <sys/conf.h>
38 #include <sys/device.h>
39
40 #include <sys/bus.h>
41 #include <sys/endian.h>
42 #include <sys/intr.h>
43
44 #include <net/bpf.h>
45 #include <net/if.h>
46 #include <net/if_arp.h>
47 #include <net/if_dl.h>
48 #include <net/if_ether.h>
49 #include <net/if_media.h>
50 #include <net/if_types.h>
51
52 #include <netinet/in.h>
53 #include <netinet/in_systm.h>
54 #include <netinet/in_var.h>
55 #include <netinet/ip.h>
56
57 #include <net80211/ieee80211_var.h>
58 #include <net80211/ieee80211_amrr.h>
59 #include <net80211/ieee80211_radiotap.h>
60
61 #include <dev/ic/athnreg.h>
62 #include <dev/ic/athnvar.h>
63
64 #include <dev/ic/arn5008reg.h>
65 #include <dev/ic/arn5008.h>
66 #include <dev/ic/arn5416.h>
67 #include <dev/ic/arn9280.h>
68
69 #define Static static
70
71 Static void ar5008_calib_adc_dc_off(struct athn_softc *);
72 Static void ar5008_calib_adc_gain(struct athn_softc *);
73 Static void ar5008_calib_iq(struct athn_softc *);
74 Static void ar5008_disable_ofdm_weak_signal(struct athn_softc *);
75 Static void ar5008_disable_phy(struct athn_softc *);
76 Static int ar5008_dma_alloc(struct athn_softc *);
77 Static void ar5008_dma_free(struct athn_softc *);
78 Static void ar5008_do_calib(struct athn_softc *);
79 Static void ar5008_do_noisefloor_calib(struct athn_softc *);
80 Static void ar5008_enable_antenna_diversity(struct athn_softc *);
81 Static void ar5008_enable_ofdm_weak_signal(struct athn_softc *);
82 Static uint8_t ar5008_get_vpd(uint8_t, const uint8_t *, const uint8_t *, int);
83 Static void ar5008_gpio_config_input(struct athn_softc *, int);
84 Static void ar5008_gpio_config_output(struct athn_softc *, int, int);
85 Static int ar5008_gpio_read(struct athn_softc *, int);
86 Static void ar5008_gpio_write(struct athn_softc *, int, int);
87 Static void ar5008_hw_init(struct athn_softc *, struct ieee80211_channel *,
88 struct ieee80211_channel *);
89 Static void ar5008_init_baseband(struct athn_softc *);
90 Static void ar5008_init_chains(struct athn_softc *);
91 Static int ar5008_intr_status(struct athn_softc *);
92 Static int ar5008_intr(struct athn_softc *);
93 Static void ar5008_next_calib(struct athn_softc *);
94 Static int ar5008_read_eep_word(struct athn_softc *, uint32_t,
95 uint16_t *);
96 Static int ar5008_read_rom(struct athn_softc *);
97 Static void ar5008_rf_bus_release(struct athn_softc *);
98 Static int ar5008_rf_bus_request(struct athn_softc *);
99 Static void ar5008_rfsilent_init(struct athn_softc *);
100 Static int ar5008_rx_alloc(struct athn_softc *);
101 Static void ar5008_rx_enable(struct athn_softc *);
102 Static void ar5008_rx_free(struct athn_softc *);
103 Static void ar5008_rx_intr(struct athn_softc *);
104 Static void ar5008_rx_radiotap(struct athn_softc *, struct mbuf *,
105 struct ar_rx_desc *);
106 Static void ar5008_set_cck_weak_signal(struct athn_softc *, int);
107 Static void ar5008_set_delta_slope(struct athn_softc *,
108 struct ieee80211_channel *, struct ieee80211_channel *);
109 Static void ar5008_set_firstep_level(struct athn_softc *, int);
110 Static void ar5008_set_noise_immunity_level(struct athn_softc *, int);
111 Static void ar5008_set_phy(struct athn_softc *, struct ieee80211_channel *,
112 struct ieee80211_channel *);
113 Static void ar5008_set_rf_mode(struct athn_softc *,
114 struct ieee80211_channel *);
115 Static void ar5008_set_rxchains(struct athn_softc *);
116 Static void ar5008_set_spur_immunity_level(struct athn_softc *, int);
117 Static void ar5008_swap_rom(struct athn_softc *);
118 Static int ar5008_swba_intr(struct athn_softc *);
119 Static int ar5008_tx(struct athn_softc *, struct mbuf *,
120 struct ieee80211_node *, int);
121 Static int ar5008_tx_alloc(struct athn_softc *);
122 Static void ar5008_tx_free(struct athn_softc *);
123 Static void ar5008_tx_intr(struct athn_softc *);
124 Static int ar5008_tx_process(struct athn_softc *, int);
125
126 #ifdef notused
127 Static void ar5008_bb_load_noisefloor(struct athn_softc *);
128 Static void ar5008_get_noisefloor(struct athn_softc *,
129 struct ieee80211_channel *);
130 Static void ar5008_noisefloor_calib(struct athn_softc *);
131 Static void ar5008_read_noisefloor(struct athn_softc *, int16_t *,
132 int16_t *);
133 Static void ar5008_write_noisefloor(struct athn_softc *, int16_t *,
134 int16_t *);
135 #endif /* notused */
136
137 // bf->bf_m = MCLGETI(NULL, M_DONTWAIT, NULL, ATHN_RXBUFSZ);
138
139 /*
140 * XXX: see if_iwn.c:MCLGETIalt() for a better solution.
141 */
142 static struct mbuf *
MCLGETI(struct athn_softc * sc __unused,int how,struct ifnet * ifp __unused,u_int size)143 MCLGETI(struct athn_softc *sc __unused, int how,
144 struct ifnet *ifp __unused, u_int size)
145 {
146 struct mbuf *m;
147
148 MGETHDR(m, how, MT_DATA);
149 if (m == NULL)
150 return NULL;
151
152 MEXTMALLOC(m, size, how);
153 if ((m->m_flags & M_EXT) == 0) {
154 m_freem(m);
155 return NULL;
156 }
157 return m;
158 }
159
160 PUBLIC int
ar5008_attach(struct athn_softc * sc)161 ar5008_attach(struct athn_softc *sc)
162 {
163 struct athn_ops *ops = &sc->sc_ops;
164 struct ieee80211com *ic = &sc->sc_ic;
165 struct ar_base_eep_header *base;
166 uint8_t eep_ver, kc_entries_log;
167 int error;
168
169 /* Set callbacks for AR5008, AR9001 and AR9002 families. */
170 ops->gpio_read = ar5008_gpio_read;
171 ops->gpio_write = ar5008_gpio_write;
172 ops->gpio_config_input = ar5008_gpio_config_input;
173 ops->gpio_config_output = ar5008_gpio_config_output;
174 ops->rfsilent_init = ar5008_rfsilent_init;
175
176 ops->dma_alloc = ar5008_dma_alloc;
177 ops->dma_free = ar5008_dma_free;
178 ops->rx_enable = ar5008_rx_enable;
179 ops->intr_status = ar5008_intr_status;
180 ops->intr = ar5008_intr;
181 ops->tx = ar5008_tx;
182
183 ops->set_rf_mode = ar5008_set_rf_mode;
184 ops->rf_bus_request = ar5008_rf_bus_request;
185 ops->rf_bus_release = ar5008_rf_bus_release;
186 ops->set_phy = ar5008_set_phy;
187 ops->set_delta_slope = ar5008_set_delta_slope;
188 ops->enable_antenna_diversity = ar5008_enable_antenna_diversity;
189 ops->init_baseband = ar5008_init_baseband;
190 ops->disable_phy = ar5008_disable_phy;
191 ops->set_rxchains = ar5008_set_rxchains;
192 ops->noisefloor_calib = ar5008_do_noisefloor_calib;
193 ops->do_calib = ar5008_do_calib;
194 ops->next_calib = ar5008_next_calib;
195 ops->hw_init = ar5008_hw_init;
196
197 ops->set_noise_immunity_level = ar5008_set_noise_immunity_level;
198 ops->enable_ofdm_weak_signal = ar5008_enable_ofdm_weak_signal;
199 ops->disable_ofdm_weak_signal = ar5008_disable_ofdm_weak_signal;
200 ops->set_cck_weak_signal = ar5008_set_cck_weak_signal;
201 ops->set_firstep_level = ar5008_set_firstep_level;
202 ops->set_spur_immunity_level = ar5008_set_spur_immunity_level;
203
204 /* Set MAC registers offsets. */
205 sc->sc_obs_off = AR_OBS;
206 sc->sc_gpio_input_en_off = AR_GPIO_INPUT_EN_VAL;
207
208 if (!(sc->sc_flags & ATHN_FLAG_PCIE))
209 athn_config_nonpcie(sc);
210 else
211 athn_config_pcie(sc);
212
213 /* Read entire ROM content in memory. */
214 if ((error = ar5008_read_rom(sc)) != 0) {
215 aprint_error_dev(sc->sc_dev, "could not read ROM\n");
216 return error;
217 }
218
219 /* Get RF revision. */
220 sc->sc_rf_rev = ar5416_get_rf_rev(sc);
221
222 base = sc->sc_eep;
223 eep_ver = (base->version >> 12) & 0xf;
224 sc->sc_eep_rev = (base->version & 0xfff);
225 if (eep_ver != AR_EEP_VER || sc->sc_eep_rev == 0) {
226 aprint_error_dev(sc->sc_dev, "unsupported ROM version %d.%d\n",
227 eep_ver, sc->sc_eep_rev);
228 return EINVAL;
229 }
230
231 if (base->opCapFlags & AR_OPFLAGS_11A)
232 sc->sc_flags |= ATHN_FLAG_11A;
233 if (base->opCapFlags & AR_OPFLAGS_11G)
234 sc->sc_flags |= ATHN_FLAG_11G;
235 if (base->opCapFlags & AR_OPFLAGS_11N)
236 sc->sc_flags |= ATHN_FLAG_11N;
237
238 IEEE80211_ADDR_COPY(ic->ic_myaddr, base->macAddr);
239
240 /* Check if we have a hardware radio switch. */
241 if (base->rfSilent & AR_EEP_RFSILENT_ENABLED) {
242 sc->sc_flags |= ATHN_FLAG_RFSILENT;
243 /* Get GPIO pin used by hardware radio switch. */
244 sc->sc_rfsilent_pin = MS(base->rfSilent,
245 AR_EEP_RFSILENT_GPIO_SEL);
246 /* Get polarity of hardware radio switch. */
247 if (base->rfSilent & AR_EEP_RFSILENT_POLARITY)
248 sc->sc_flags |= ATHN_FLAG_RFSILENT_REVERSED;
249 }
250
251 /* Get the number of HW key cache entries. */
252 kc_entries_log = MS(base->deviceCap, AR_EEP_DEVCAP_KC_ENTRIES);
253 sc->sc_kc_entries = kc_entries_log != 0 ?
254 1 << kc_entries_log : AR_KEYTABLE_SIZE;
255
256 sc->sc_txchainmask = base->txMask;
257 if (sc->sc_mac_ver == AR_SREV_VERSION_5416_PCI &&
258 !(base->opCapFlags & AR_OPFLAGS_11A)) {
259 /* For single-band AR5416 PCI, use GPIO pin 0. */
260 sc->sc_rxchainmask = ar5008_gpio_read(sc, 0) ? 0x5 : 0x7;
261 }
262 else
263 sc->sc_rxchainmask = base->rxMask;
264
265 ops->setup(sc);
266 return 0;
267 }
268
269 /*
270 * Read 16-bit word from ROM.
271 */
272 Static int
ar5008_read_eep_word(struct athn_softc * sc,uint32_t addr,uint16_t * val)273 ar5008_read_eep_word(struct athn_softc *sc, uint32_t addr, uint16_t *val)
274 {
275 uint32_t reg;
276 int ntries;
277
278 reg = AR_READ(sc, AR_EEPROM_OFFSET(addr));
279 for (ntries = 0; ntries < 1000; ntries++) {
280 reg = AR_READ(sc, AR_EEPROM_STATUS_DATA);
281 if (!(reg & (AR_EEPROM_STATUS_DATA_BUSY |
282 AR_EEPROM_STATUS_DATA_PROT_ACCESS))) {
283 *val = MS(reg, AR_EEPROM_STATUS_DATA_VAL);
284 return 0;
285 }
286 DELAY(10);
287 }
288 *val = 0xffff;
289 return ETIMEDOUT;
290 }
291
292 Static int
ar5008_read_rom(struct athn_softc * sc)293 ar5008_read_rom(struct athn_softc *sc)
294 {
295 uint32_t addr, end;
296 uint16_t magic, sum, *eep;
297 int need_swap = 0;
298 int error;
299
300 /* Determine ROM endianness. */
301 error = ar5008_read_eep_word(sc, AR_EEPROM_MAGIC_OFFSET, &magic);
302 if (error != 0)
303 return error;
304 if (magic != AR_EEPROM_MAGIC) {
305 if (magic != bswap16(AR_EEPROM_MAGIC)) {
306 DPRINTFN(DBG_INIT, sc,
307 "invalid ROM magic 0x%x != 0x%x\n",
308 magic, AR_EEPROM_MAGIC);
309 return EIO;
310 }
311 DPRINTFN(DBG_INIT, sc, "non-native ROM endianness\n");
312 need_swap = 1;
313 }
314
315 /* Allocate space to store ROM in host memory. */
316 sc->sc_eep = malloc(sc->sc_eep_size, M_DEVBUF, M_WAITOK);
317
318 /* Read entire ROM and compute checksum. */
319 sum = 0;
320 eep = sc->sc_eep;
321 end = sc->sc_eep_base + sc->sc_eep_size / sizeof(uint16_t);
322 for (addr = sc->sc_eep_base; addr < end; addr++, eep++) {
323 if ((error = ar5008_read_eep_word(sc, addr, eep)) != 0) {
324 DPRINTFN(DBG_INIT, sc,
325 "could not read ROM at 0x%x\n", addr);
326 return error;
327 }
328 if (need_swap)
329 *eep = bswap16(*eep);
330 sum ^= *eep;
331 }
332 if (sum != 0xffff) {
333 aprint_error_dev(sc->sc_dev, "bad ROM checksum 0x%04x\n", sum);
334 return EIO;
335 }
336 if (need_swap)
337 ar5008_swap_rom(sc);
338
339 return 0;
340 }
341
342 Static void
ar5008_swap_rom(struct athn_softc * sc)343 ar5008_swap_rom(struct athn_softc *sc)
344 {
345 struct ar_base_eep_header *base = sc->sc_eep;
346
347 /* Swap common fields first. */
348 base->length = bswap16(base->length);
349 base->version = bswap16(base->version);
350 base->regDmn[0] = bswap16(base->regDmn[0]);
351 base->regDmn[1] = bswap16(base->regDmn[1]);
352 base->rfSilent = bswap16(base->rfSilent);
353 base->blueToothOptions = bswap16(base->blueToothOptions);
354 base->deviceCap = bswap16(base->deviceCap);
355
356 /* Swap device-dependent fields. */
357 sc->sc_ops.swap_rom(sc);
358 }
359
360 /*
361 * Access to General Purpose Input/Output ports.
362 */
363 Static int
ar5008_gpio_read(struct athn_softc * sc,int pin)364 ar5008_gpio_read(struct athn_softc *sc, int pin)
365 {
366
367 KASSERT(pin < sc->sc_ngpiopins);
368 if ((sc->sc_flags & ATHN_FLAG_USB) && !AR_SREV_9271(sc))
369 return !((AR_READ(sc, AR7010_GPIO_IN) >> pin) & 1);
370 return (AR_READ(sc, AR_GPIO_IN_OUT) >> (sc->sc_ngpiopins + pin)) & 1;
371 }
372
373 Static void
ar5008_gpio_write(struct athn_softc * sc,int pin,int set)374 ar5008_gpio_write(struct athn_softc *sc, int pin, int set)
375 {
376 uint32_t reg;
377
378 KASSERT(pin < sc->sc_ngpiopins);
379
380 if (sc->sc_flags & ATHN_FLAG_USB)
381 set = !set; /* AR9271/AR7010 is reversed. */
382
383 if ((sc->sc_flags & ATHN_FLAG_USB) && !AR_SREV_9271(sc)) {
384 /* Special case for AR7010. */
385 reg = AR_READ(sc, AR7010_GPIO_OUT);
386 if (set)
387 reg |= 1 << pin;
388 else
389 reg &= ~(1 << pin);
390 AR_WRITE(sc, AR7010_GPIO_OUT, reg);
391 }
392 else {
393 reg = AR_READ(sc, AR_GPIO_IN_OUT);
394 if (set)
395 reg |= 1 << pin;
396 else
397 reg &= ~(1 << pin);
398 AR_WRITE(sc, AR_GPIO_IN_OUT, reg);
399 }
400 AR_WRITE_BARRIER(sc);
401 }
402
403 Static void
ar5008_gpio_config_input(struct athn_softc * sc,int pin)404 ar5008_gpio_config_input(struct athn_softc *sc, int pin)
405 {
406 uint32_t reg;
407
408 if ((sc->sc_flags & ATHN_FLAG_USB) && !AR_SREV_9271(sc)) {
409 /* Special case for AR7010. */
410 AR_SETBITS(sc, AR7010_GPIO_OE, 1 << pin);
411 }
412 else {
413 reg = AR_READ(sc, AR_GPIO_OE_OUT);
414 reg &= ~(AR_GPIO_OE_OUT_DRV_M << (pin * 2));
415 reg |= AR_GPIO_OE_OUT_DRV_NO << (pin * 2);
416 AR_WRITE(sc, AR_GPIO_OE_OUT, reg);
417 }
418 AR_WRITE_BARRIER(sc);
419 }
420
421 Static void
ar5008_gpio_config_output(struct athn_softc * sc,int pin,int type)422 ar5008_gpio_config_output(struct athn_softc *sc, int pin, int type)
423 {
424 uint32_t reg;
425 int mux, off;
426
427 if ((sc->sc_flags & ATHN_FLAG_USB) && !AR_SREV_9271(sc)) {
428 /* Special case for AR7010. */
429 AR_CLRBITS(sc, AR7010_GPIO_OE, 1 << pin);
430 AR_WRITE_BARRIER(sc);
431 return;
432 }
433 mux = pin / 6;
434 off = pin % 6;
435
436 reg = AR_READ(sc, AR_GPIO_OUTPUT_MUX(mux));
437 if (!AR_SREV_9280_20_OR_LATER(sc) && mux == 0)
438 reg = (reg & ~0x1f0) | (reg & 0x1f0) << 1;
439 reg &= ~(0x1f << (off * 5));
440 reg |= (type & 0x1f) << (off * 5);
441 AR_WRITE(sc, AR_GPIO_OUTPUT_MUX(mux), reg);
442
443 reg = AR_READ(sc, AR_GPIO_OE_OUT);
444 reg &= ~(AR_GPIO_OE_OUT_DRV_M << (pin * 2));
445 reg |= AR_GPIO_OE_OUT_DRV_ALL << (pin * 2);
446 AR_WRITE(sc, AR_GPIO_OE_OUT, reg);
447 AR_WRITE_BARRIER(sc);
448 }
449
450 Static void
ar5008_rfsilent_init(struct athn_softc * sc)451 ar5008_rfsilent_init(struct athn_softc *sc)
452 {
453 uint32_t reg;
454
455 /* Configure hardware radio switch. */
456 AR_SETBITS(sc, AR_GPIO_INPUT_EN_VAL, AR_GPIO_INPUT_EN_VAL_RFSILENT_BB);
457 reg = AR_READ(sc, AR_GPIO_INPUT_MUX2);
458 reg = RW(reg, AR_GPIO_INPUT_MUX2_RFSILENT, 0);
459 AR_WRITE(sc, AR_GPIO_INPUT_MUX2, reg);
460 ar5008_gpio_config_input(sc, sc->sc_rfsilent_pin);
461 AR_SETBITS(sc, AR_PHY_TEST, AR_PHY_TEST_RFSILENT_BB);
462 if (!(sc->sc_flags & ATHN_FLAG_RFSILENT_REVERSED)) {
463 AR_SETBITS(sc, AR_GPIO_INTR_POL,
464 AR_GPIO_INTR_POL_PIN(sc->sc_rfsilent_pin));
465 }
466 AR_WRITE_BARRIER(sc);
467 }
468
469 Static int
ar5008_dma_alloc(struct athn_softc * sc)470 ar5008_dma_alloc(struct athn_softc *sc)
471 {
472 int error;
473
474 error = ar5008_tx_alloc(sc);
475 if (error != 0)
476 return error;
477
478 error = ar5008_rx_alloc(sc);
479 if (error != 0)
480 return error;
481
482 return 0;
483 }
484
485 Static void
ar5008_dma_free(struct athn_softc * sc)486 ar5008_dma_free(struct athn_softc *sc)
487 {
488
489 ar5008_tx_free(sc);
490 ar5008_rx_free(sc);
491 }
492
493 Static int
ar5008_tx_alloc(struct athn_softc * sc)494 ar5008_tx_alloc(struct athn_softc *sc)
495 {
496 struct athn_tx_buf *bf;
497 bus_size_t size;
498 int error, nsegs, i;
499
500 /*
501 * Allocate a pool of Tx descriptors shared between all Tx queues.
502 */
503 size = ATHN_NTXBUFS * AR5008_MAX_SCATTER * sizeof(struct ar_tx_desc);
504
505 error = bus_dmamap_create(sc->sc_dmat, size, 1, size, 0,
506 BUS_DMA_NOWAIT, &sc->sc_map);
507 if (error != 0)
508 goto fail;
509
510 error = bus_dmamem_alloc(sc->sc_dmat, size, 4, 0, &sc->sc_seg, 1,
511 // XXX &nsegs, BUS_DMA_NOWAIT | BUS_DMA_ZERO);
512 &nsegs, BUS_DMA_NOWAIT);
513 if (error != 0)
514 goto fail;
515
516 error = bus_dmamem_map(sc->sc_dmat, &sc->sc_seg, 1, size,
517 (void **)&sc->sc_descs, BUS_DMA_NOWAIT | BUS_DMA_COHERENT);
518 if (error != 0)
519 goto fail;
520
521 error = bus_dmamap_load(sc->sc_dmat, sc->sc_map, sc->sc_descs,
522 size, NULL, BUS_DMA_NOWAIT);
523 if (error != 0)
524 goto fail;
525
526 SIMPLEQ_INIT(&sc->sc_txbufs);
527 for (i = 0; i < ATHN_NTXBUFS; i++) {
528 bf = &sc->sc_txpool[i];
529
530 error = bus_dmamap_create(sc->sc_dmat, ATHN_TXBUFSZ,
531 AR5008_MAX_SCATTER, ATHN_TXBUFSZ, 0, BUS_DMA_NOWAIT,
532 &bf->bf_map);
533 if (error != 0) {
534 aprint_error_dev(sc->sc_dev,
535 "could not create Tx buf DMA map\n");
536 goto fail;
537 }
538
539 bf->bf_descs =
540 &((struct ar_tx_desc *)sc->sc_descs)[i * AR5008_MAX_SCATTER];
541 bf->bf_daddr = sc->sc_map->dm_segs[0].ds_addr +
542 i * AR5008_MAX_SCATTER * sizeof(struct ar_tx_desc);
543
544 SIMPLEQ_INSERT_TAIL(&sc->sc_txbufs, bf, bf_list);
545 }
546 return 0;
547 fail:
548 ar5008_tx_free(sc);
549 return error;
550 }
551
552 Static void
ar5008_tx_free(struct athn_softc * sc)553 ar5008_tx_free(struct athn_softc *sc)
554 {
555 struct athn_tx_buf *bf;
556 int i;
557
558 for (i = 0; i < ATHN_NTXBUFS; i++) {
559 bf = &sc->sc_txpool[i];
560
561 if (bf->bf_map != NULL)
562 bus_dmamap_destroy(sc->sc_dmat, bf->bf_map);
563 }
564 /* Free Tx descriptors. */
565 if (sc->sc_map != NULL) {
566 if (sc->sc_descs != NULL) {
567 bus_dmamap_unload(sc->sc_dmat, sc->sc_map);
568 bus_dmamem_unmap(sc->sc_dmat, (void *)sc->sc_descs,
569 ATHN_NTXBUFS * AR5008_MAX_SCATTER *
570 sizeof(struct ar_tx_desc));
571 bus_dmamem_free(sc->sc_dmat, &sc->sc_seg, 1);
572 }
573 bus_dmamap_destroy(sc->sc_dmat, sc->sc_map);
574 }
575 }
576
577 Static int
ar5008_rx_alloc(struct athn_softc * sc)578 ar5008_rx_alloc(struct athn_softc *sc)
579 {
580 struct athn_rxq *rxq = &sc->sc_rxq[0];
581 struct athn_rx_buf *bf;
582 struct ar_rx_desc *ds;
583 bus_size_t size;
584 int error, nsegs, i;
585
586 rxq->bf = malloc(ATHN_NRXBUFS * sizeof(*bf), M_DEVBUF,
587 M_WAITOK | M_ZERO);
588
589 size = ATHN_NRXBUFS * sizeof(struct ar_rx_desc);
590
591 error = bus_dmamap_create(sc->sc_dmat, size, 1, size, 0,
592 BUS_DMA_NOWAIT, &rxq->map);
593 if (error != 0)
594 goto fail;
595
596 error = bus_dmamem_alloc(sc->sc_dmat, size, 0, 0, &rxq->seg, 1,
597 // &nsegs, BUS_DMA_NOWAIT | BUS_DMA_ZERO);
598 &nsegs, BUS_DMA_NOWAIT);
599 if (error != 0)
600 goto fail;
601
602 error = bus_dmamem_map(sc->sc_dmat, &rxq->seg, 1, size,
603 (void **)&rxq->descs, BUS_DMA_NOWAIT | BUS_DMA_COHERENT);
604 if (error != 0)
605 goto fail;
606
607 error = bus_dmamap_load(sc->sc_dmat, rxq->map, rxq->descs,
608 size, NULL, BUS_DMA_NOWAIT);
609 if (error != 0)
610 goto fail;
611
612 for (i = 0; i < ATHN_NRXBUFS; i++) {
613 bf = &rxq->bf[i];
614 ds = &((struct ar_rx_desc *)rxq->descs)[i];
615
616 error = bus_dmamap_create(sc->sc_dmat, ATHN_RXBUFSZ, 1,
617 ATHN_RXBUFSZ, 0, BUS_DMA_NOWAIT | BUS_DMA_ALLOCNOW,
618 &bf->bf_map);
619 if (error != 0) {
620 aprint_error_dev(sc->sc_dev,
621 " could not create Rx buf DMA map\n");
622 goto fail;
623 }
624 /*
625 * Assumes MCLGETI returns cache-line-size aligned buffers.
626 * XXX: does ours?
627 */
628 bf->bf_m = MCLGETI(NULL, M_DONTWAIT, NULL, ATHN_RXBUFSZ);
629 if (bf->bf_m == NULL) {
630 aprint_error_dev(sc->sc_dev,
631 "could not allocate Rx mbuf\n");
632 error = ENOBUFS;
633 goto fail;
634 }
635
636 error = bus_dmamap_load(sc->sc_dmat, bf->bf_map,
637 mtod(bf->bf_m, void *), ATHN_RXBUFSZ, NULL,
638 BUS_DMA_NOWAIT | BUS_DMA_READ);
639 if (error != 0) {
640 aprint_error_dev(sc->sc_dev,
641 "could not DMA map Rx buffer\n");
642 goto fail;
643 }
644
645 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, ATHN_RXBUFSZ,
646 BUS_DMASYNC_PREREAD);
647
648 bf->bf_desc = ds;
649 bf->bf_daddr = rxq->map->dm_segs[0].ds_addr +
650 i * sizeof(struct ar_rx_desc);
651 }
652 return 0;
653 fail:
654 ar5008_rx_free(sc);
655 return error;
656 }
657
658 Static void
ar5008_rx_free(struct athn_softc * sc)659 ar5008_rx_free(struct athn_softc *sc)
660 {
661 struct athn_rxq *rxq = &sc->sc_rxq[0];
662 struct athn_rx_buf *bf;
663 int i;
664
665 if (rxq->bf == NULL)
666 return;
667 for (i = 0; i < ATHN_NRXBUFS; i++) {
668 bf = &rxq->bf[i];
669
670 if (bf->bf_map != NULL)
671 bus_dmamap_destroy(sc->sc_dmat, bf->bf_map);
672 m_freem(bf->bf_m);
673 }
674 free(rxq->bf, M_DEVBUF);
675
676 /* Free Rx descriptors. */
677 if (rxq->map != NULL) {
678 if (rxq->descs != NULL) {
679 bus_dmamap_unload(sc->sc_dmat, rxq->map);
680 bus_dmamem_unmap(sc->sc_dmat, (void *)rxq->descs,
681 ATHN_NRXBUFS * sizeof(struct ar_rx_desc));
682 bus_dmamem_free(sc->sc_dmat, &rxq->seg, 1);
683 }
684 bus_dmamap_destroy(sc->sc_dmat, rxq->map);
685 }
686 }
687
688 Static void
ar5008_rx_enable(struct athn_softc * sc)689 ar5008_rx_enable(struct athn_softc *sc)
690 {
691 struct athn_rxq *rxq = &sc->sc_rxq[0];
692 struct athn_rx_buf *bf;
693 struct ar_rx_desc *ds;
694 int i;
695
696 /* Setup and link Rx descriptors. */
697 SIMPLEQ_INIT(&rxq->head);
698 rxq->lastds = NULL;
699 for (i = 0; i < ATHN_NRXBUFS; i++) {
700 bf = &rxq->bf[i];
701 ds = bf->bf_desc;
702
703 memset(ds, 0, sizeof(*ds));
704 ds->ds_data = bf->bf_map->dm_segs[0].ds_addr;
705 ds->ds_ctl1 = SM(AR_RXC1_BUF_LEN, ATHN_RXBUFSZ);
706
707 if (rxq->lastds != NULL) {
708 ((struct ar_rx_desc *)rxq->lastds)->ds_link =
709 bf->bf_daddr;
710 }
711 SIMPLEQ_INSERT_TAIL(&rxq->head, bf, bf_list);
712 rxq->lastds = ds;
713 }
714 bus_dmamap_sync(sc->sc_dmat, rxq->map, 0, rxq->map->dm_mapsize,
715 BUS_DMASYNC_PREREAD);
716
717 /* Enable Rx. */
718 AR_WRITE(sc, AR_RXDP, SIMPLEQ_FIRST(&rxq->head)->bf_daddr);
719 AR_WRITE(sc, AR_CR, AR_CR_RXE);
720 AR_WRITE_BARRIER(sc);
721 }
722
723 Static void
ar5008_rx_radiotap(struct athn_softc * sc,struct mbuf * m,struct ar_rx_desc * ds)724 ar5008_rx_radiotap(struct athn_softc *sc, struct mbuf *m,
725 struct ar_rx_desc *ds)
726 {
727 struct athn_rx_radiotap_header *tap = &sc->sc_rxtap;
728 struct ieee80211com *ic = &sc->sc_ic;
729 uint64_t tsf;
730 uint32_t tstamp;
731 uint8_t rate;
732
733 /* Extend the 15-bit timestamp from Rx descriptor to 64-bit TSF. */
734 tstamp = ds->ds_status2;
735 tsf = AR_READ(sc, AR_TSF_U32);
736 tsf = tsf << 32 | AR_READ(sc, AR_TSF_L32);
737 if ((tsf & 0x7fff) < tstamp)
738 tsf -= 0x8000;
739 tsf = (tsf & ~0x7fff) | tstamp;
740
741 tap->wr_flags = IEEE80211_RADIOTAP_F_FCS;
742 tap->wr_tsft = htole64(tsf);
743 tap->wr_chan_freq = htole16(ic->ic_curchan->ic_freq);
744 tap->wr_chan_flags = htole16(ic->ic_curchan->ic_flags);
745 tap->wr_dbm_antsignal = MS(ds->ds_status4, AR_RXS4_RSSI_COMBINED);
746 /* XXX noise. */
747 tap->wr_antenna = MS(ds->ds_status3, AR_RXS3_ANTENNA);
748 tap->wr_rate = 0; /* In case it can't be found below. */
749 if (AR_SREV_5416_20_OR_LATER(sc))
750 rate = MS(ds->ds_status0, AR_RXS0_RATE);
751 else
752 rate = MS(ds->ds_status3, AR_RXS3_RATE);
753 if (rate & 0x80) { /* HT. */
754 /* Bit 7 set means HT MCS instead of rate. */
755 tap->wr_rate = rate;
756 if (!(ds->ds_status3 & AR_RXS3_GI))
757 tap->wr_flags |= IEEE80211_RADIOTAP_F_SHORTGI;
758
759 }
760 else if (rate & 0x10) { /* CCK. */
761 if (rate & 0x04)
762 tap->wr_flags |= IEEE80211_RADIOTAP_F_SHORTPRE;
763 switch (rate & ~0x14) {
764 case 0xb: tap->wr_rate = 2; break;
765 case 0xa: tap->wr_rate = 4; break;
766 case 0x9: tap->wr_rate = 11; break;
767 case 0x8: tap->wr_rate = 22; break;
768 }
769 }
770 else { /* OFDM. */
771 switch (rate) {
772 case 0xb: tap->wr_rate = 12; break;
773 case 0xf: tap->wr_rate = 18; break;
774 case 0xa: tap->wr_rate = 24; break;
775 case 0xe: tap->wr_rate = 36; break;
776 case 0x9: tap->wr_rate = 48; break;
777 case 0xd: tap->wr_rate = 72; break;
778 case 0x8: tap->wr_rate = 96; break;
779 case 0xc: tap->wr_rate = 108; break;
780 }
781 }
782 bpf_mtap2(sc->sc_drvbpf, tap, sc->sc_rxtap_len, m, BPF_D_IN);
783 }
784
785 static __inline int
ar5008_rx_process(struct athn_softc * sc)786 ar5008_rx_process(struct athn_softc *sc)
787 {
788 struct ieee80211com *ic = &sc->sc_ic;
789 struct ifnet *ifp = &sc->sc_if;
790 struct athn_rxq *rxq = &sc->sc_rxq[0];
791 struct athn_rx_buf *bf, *nbf;
792 struct ar_rx_desc *ds;
793 struct ieee80211_frame *wh;
794 struct ieee80211_node *ni;
795 struct mbuf *m, *m1;
796 u_int32_t rstamp;
797 int error, len, rssi, s;
798
799 bf = SIMPLEQ_FIRST(&rxq->head);
800 if (__predict_false(bf == NULL)) { /* Should not happen. */
801 aprint_error_dev(sc->sc_dev, "Rx queue is empty!\n");
802 return ENOENT;
803 }
804 ds = bf->bf_desc;
805
806 if (!(ds->ds_status8 & AR_RXS8_DONE)) {
807 /*
808 * On some parts, the status words can get corrupted
809 * (including the "done" bit), so we check the next
810 * descriptor "done" bit. If it is set, it is a good
811 * indication that the status words are corrupted, so
812 * we skip this descriptor and drop the frame.
813 */
814 nbf = SIMPLEQ_NEXT(bf, bf_list);
815 if (nbf != NULL &&
816 (((struct ar_rx_desc *)nbf->bf_desc)->ds_status8 &
817 AR_RXS8_DONE)) {
818 DPRINTFN(DBG_RX, sc,
819 "corrupted descriptor status=0x%x\n",
820 ds->ds_status8);
821 /* HW will not "move" RXDP in this case, so do it. */
822 AR_WRITE(sc, AR_RXDP, nbf->bf_daddr);
823 AR_WRITE_BARRIER(sc);
824 if_statinc(ifp, if_ierrors);
825 goto skip;
826 }
827 return EBUSY;
828 }
829
830 if (__predict_false(ds->ds_status1 & AR_RXS1_MORE)) {
831 /* Drop frames that span multiple Rx descriptors. */
832 DPRINTFN(DBG_RX, sc, "dropping split frame\n");
833 if_statinc(ifp, if_ierrors);
834 goto skip;
835 }
836 if (!(ds->ds_status8 & AR_RXS8_FRAME_OK)) {
837 if (ds->ds_status8 & AR_RXS8_CRC_ERR)
838 DPRINTFN(DBG_RX, sc, "CRC error\n");
839 else if (ds->ds_status8 & AR_RXS8_PHY_ERR)
840 DPRINTFN(DBG_RX, sc, "PHY error=0x%x\n",
841 MS(ds->ds_status8, AR_RXS8_PHY_ERR_CODE));
842 else if (ds->ds_status8 & AR_RXS8_DECRYPT_CRC_ERR)
843 DPRINTFN(DBG_RX, sc, "Decryption CRC error\n");
844 else if (ds->ds_status8 & AR_RXS8_MICHAEL_ERR) {
845 DPRINTFN(DBG_RX, sc, "Michael MIC failure\n");
846
847 len = MS(ds->ds_status1, AR_RXS1_DATA_LEN);
848 m = bf->bf_m;
849 m_set_rcvif(m, ifp);
850 m->m_pkthdr.len = m->m_len = len;
851 wh = mtod(m, struct ieee80211_frame *);
852
853 /* Report Michael MIC failures to net80211. */
854 ieee80211_notify_michael_failure(ic, wh, 0 /* XXX: keyix */);
855 }
856 if_statinc(ifp, if_ierrors);
857 goto skip;
858 }
859
860 len = MS(ds->ds_status1, AR_RXS1_DATA_LEN);
861 if (__predict_false(len < (int)IEEE80211_MIN_LEN || len > ATHN_RXBUFSZ)) {
862 DPRINTFN(DBG_RX, sc, "corrupted descriptor length=%d\n", len);
863 if_statinc(ifp, if_ierrors);
864 goto skip;
865 }
866
867 /* Allocate a new Rx buffer. */
868 m1 = MCLGETI(NULL, M_DONTWAIT, NULL, ATHN_RXBUFSZ);
869 if (__predict_false(m1 == NULL)) {
870 ic->ic_stats.is_rx_nobuf++;
871 if_statinc(ifp, if_ierrors);
872 goto skip;
873 }
874
875 /* Sync and unmap the old Rx buffer. */
876 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, ATHN_RXBUFSZ,
877 BUS_DMASYNC_POSTREAD);
878 bus_dmamap_unload(sc->sc_dmat, bf->bf_map);
879
880 /* Map the new Rx buffer. */
881 error = bus_dmamap_load(sc->sc_dmat, bf->bf_map, mtod(m1, void *),
882 ATHN_RXBUFSZ, NULL, BUS_DMA_NOWAIT | BUS_DMA_READ);
883 if (__predict_false(error != 0)) {
884 m_freem(m1);
885
886 /* Remap the old Rx buffer or panic. */
887 error = bus_dmamap_load(sc->sc_dmat, bf->bf_map,
888 mtod(bf->bf_m, void *), ATHN_RXBUFSZ, NULL,
889 BUS_DMA_NOWAIT | BUS_DMA_READ);
890 KASSERT(error != 0);
891 if_statinc(ifp, if_ierrors);
892 goto skip;
893 }
894
895 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, ATHN_RXBUFSZ,
896 BUS_DMASYNC_PREREAD);
897
898 /* Write physical address of new Rx buffer. */
899 ds->ds_data = bf->bf_map->dm_segs[0].ds_addr;
900
901 m = bf->bf_m;
902 bf->bf_m = m1;
903
904 /* Finalize mbuf. */
905 m_set_rcvif(m, ifp);
906 m->m_pkthdr.len = m->m_len = len;
907
908 s = splnet();
909
910 /* Grab a reference to the source node. */
911 wh = mtod(m, struct ieee80211_frame *);
912 ni = ieee80211_find_rxnode(ic, (struct ieee80211_frame_min *)wh);
913
914 /* Remove any HW padding after the 802.11 header. */
915 if (!(wh->i_fc[0] & IEEE80211_FC0_TYPE_CTL)) {
916 u_int hdrlen = ieee80211_anyhdrsize(wh);
917 if (hdrlen & 3) {
918 memmove((uint8_t *)wh + 2, wh, hdrlen);
919 m_adj(m, 2);
920 }
921 }
922 if (__predict_false(sc->sc_drvbpf != NULL))
923 ar5008_rx_radiotap(sc, m, ds);
924
925 /* Trim 802.11 FCS after radiotap. */
926 m_adj(m, -IEEE80211_CRC_LEN);
927
928 /* Send the frame to the 802.11 layer. */
929 rssi = MS(ds->ds_status4, AR_RXS4_RSSI_COMBINED);
930 rstamp = ds->ds_status2;
931 ieee80211_input(ic, m, ni, rssi, rstamp);
932
933 /* Node is no longer needed. */
934 ieee80211_free_node(ni);
935
936 splx(s);
937
938 skip:
939 /* Unlink this descriptor from head. */
940 SIMPLEQ_REMOVE_HEAD(&rxq->head, bf_list);
941 memset(&ds->ds_status0, 0, 36); /* XXX Really needed? */
942 ds->ds_status8 &= ~AR_RXS8_DONE;
943 ds->ds_link = 0;
944
945 /* Re-use this descriptor and link it to tail. */
946 if (__predict_true(!SIMPLEQ_EMPTY(&rxq->head)))
947 ((struct ar_rx_desc *)rxq->lastds)->ds_link = bf->bf_daddr;
948 else
949 AR_WRITE(sc, AR_RXDP, bf->bf_daddr);
950 SIMPLEQ_INSERT_TAIL(&rxq->head, bf, bf_list);
951 rxq->lastds = ds;
952
953 /* Re-enable Rx. */
954 AR_WRITE(sc, AR_CR, AR_CR_RXE);
955 AR_WRITE_BARRIER(sc);
956 return 0;
957 }
958
959 Static void
ar5008_rx_intr(struct athn_softc * sc)960 ar5008_rx_intr(struct athn_softc *sc)
961 {
962
963 while (ar5008_rx_process(sc) == 0)
964 continue;
965 }
966
967 Static int
ar5008_tx_process(struct athn_softc * sc,int qid)968 ar5008_tx_process(struct athn_softc *sc, int qid)
969 {
970 struct ifnet *ifp = &sc->sc_if;
971 struct athn_txq *txq = &sc->sc_txq[qid];
972 struct athn_node *an;
973 struct athn_tx_buf *bf;
974 struct ar_tx_desc *ds;
975 uint8_t failcnt;
976
977 bf = SIMPLEQ_FIRST(&txq->head);
978 if (bf == NULL)
979 return ENOENT;
980 /* Get descriptor of last DMA segment. */
981 ds = &((struct ar_tx_desc *)bf->bf_descs)[bf->bf_map->dm_nsegs - 1];
982
983 if (!(ds->ds_status9 & AR_TXS9_DONE))
984 return EBUSY;
985
986 SIMPLEQ_REMOVE_HEAD(&txq->head, bf_list);
987 if_statinc(ifp, if_opackets);
988
989 sc->sc_tx_timer = 0;
990
991 if (ds->ds_status1 & AR_TXS1_EXCESSIVE_RETRIES)
992 if_statinc(ifp, if_oerrors);
993
994 if (ds->ds_status1 & AR_TXS1_UNDERRUN)
995 athn_inc_tx_trigger_level(sc);
996
997 an = (struct athn_node *)bf->bf_ni;
998 /*
999 * NB: the data fail count contains the number of un-acked tries
1000 * for the final series used. We must add the number of tries for
1001 * each series that was fully processed.
1002 */
1003 failcnt = MS(ds->ds_status1, AR_TXS1_DATA_FAIL_CNT);
1004 /* NB: Assume two tries per series. */
1005 failcnt += MS(ds->ds_status9, AR_TXS9_FINAL_IDX) * 2;
1006
1007 /* Update rate control statistics. */
1008 an->amn.amn_txcnt++;
1009 if (failcnt > 0)
1010 an->amn.amn_retrycnt++;
1011
1012 DPRINTFN(DBG_TX, sc, "Tx done qid=%d status1=%d fail count=%d\n",
1013 qid, ds->ds_status1, failcnt);
1014
1015 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, bf->bf_map->dm_mapsize,
1016 BUS_DMASYNC_POSTWRITE);
1017 bus_dmamap_unload(sc->sc_dmat, bf->bf_map);
1018
1019 m_freem(bf->bf_m);
1020 bf->bf_m = NULL;
1021 ieee80211_free_node(bf->bf_ni);
1022 bf->bf_ni = NULL;
1023
1024 /* Link Tx buffer back to global free list. */
1025 SIMPLEQ_INSERT_TAIL(&sc->sc_txbufs, bf, bf_list);
1026 return 0;
1027 }
1028
1029 Static void
ar5008_tx_intr(struct athn_softc * sc)1030 ar5008_tx_intr(struct athn_softc *sc)
1031 {
1032 struct ifnet *ifp = &sc->sc_if;
1033 uint16_t mask = 0;
1034 uint32_t reg;
1035 int qid, s;
1036
1037 s = splnet();
1038
1039 reg = AR_READ(sc, AR_ISR_S0_S);
1040 mask |= MS(reg, AR_ISR_S0_QCU_TXOK);
1041 mask |= MS(reg, AR_ISR_S0_QCU_TXDESC);
1042
1043 reg = AR_READ(sc, AR_ISR_S1_S);
1044 mask |= MS(reg, AR_ISR_S1_QCU_TXERR);
1045 mask |= MS(reg, AR_ISR_S1_QCU_TXEOL);
1046
1047 DPRINTFN(DBG_TX, sc, "Tx interrupt mask=0x%x\n", mask);
1048 for (qid = 0; mask != 0; mask >>= 1, qid++) {
1049 if (mask & 1)
1050 while (ar5008_tx_process(sc, qid) == 0);
1051 }
1052 if (!SIMPLEQ_EMPTY(&sc->sc_txbufs)) {
1053 ifp->if_flags &= ~IFF_OACTIVE;
1054 ifp->if_start(ifp); /* in softint */
1055 }
1056
1057 splx(s);
1058 }
1059
1060 #ifndef IEEE80211_STA_ONLY
1061 /*
1062 * Process Software Beacon Alert interrupts.
1063 */
1064 Static int
ar5008_swba_intr(struct athn_softc * sc)1065 ar5008_swba_intr(struct athn_softc *sc)
1066 {
1067 struct ieee80211com *ic = &sc->sc_ic;
1068 struct ifnet *ifp = &sc->sc_if;
1069 struct ieee80211_node *ni = ic->ic_bss;
1070 struct athn_tx_buf *bf = sc->sc_bcnbuf;
1071 struct ieee80211_frame *wh;
1072 struct ieee80211_beacon_offsets bo;
1073 struct ar_tx_desc *ds;
1074 struct mbuf *m;
1075 uint8_t ridx, hwrate;
1076 int error, totlen;
1077
1078 #if notyet
1079 if (ic->ic_tim_mcast_pending &&
1080 IF_IS_EMPTY(&ni->ni_savedq) &&
1081 SIMPLEQ_EMPTY(&sc->sc_txq[ATHN_QID_CAB].head))
1082 ic->ic_tim_mcast_pending = 0;
1083 #endif
1084 if (ic->ic_dtim_count == 0)
1085 ic->ic_dtim_count = ic->ic_dtim_period - 1;
1086 else
1087 ic->ic_dtim_count--;
1088
1089 /* Make sure previous beacon has been sent. */
1090 if (athn_tx_pending(sc, ATHN_QID_BEACON)) {
1091 DPRINTFN(DBG_INTR, sc, "beacon stuck\n");
1092 return EBUSY;
1093 }
1094 /* Get new beacon. */
1095 m = ieee80211_beacon_alloc(ic, ic->ic_bss, &bo);
1096 if (__predict_false(m == NULL))
1097 return ENOBUFS;
1098 /* Assign sequence number. */
1099 /* XXX: use non-QoS tid? */
1100 wh = mtod(m, struct ieee80211_frame *);
1101 *(uint16_t *)&wh->i_seq[0] =
1102 htole16(ic->ic_bss->ni_txseqs[0] << IEEE80211_SEQ_SEQ_SHIFT);
1103 ic->ic_bss->ni_txseqs[0]++;
1104
1105 /* Unmap and free old beacon if any. */
1106 if (__predict_true(bf->bf_m != NULL)) {
1107 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0,
1108 bf->bf_map->dm_mapsize, BUS_DMASYNC_POSTWRITE);
1109 bus_dmamap_unload(sc->sc_dmat, bf->bf_map);
1110 m_freem(bf->bf_m);
1111 bf->bf_m = NULL;
1112 }
1113 /* DMA map new beacon. */
1114 error = bus_dmamap_load_mbuf(sc->sc_dmat, bf->bf_map, m,
1115 BUS_DMA_NOWAIT | BUS_DMA_WRITE);
1116 if (__predict_false(error != 0)) {
1117 m_freem(m);
1118 return error;
1119 }
1120 bf->bf_m = m;
1121
1122 /* Setup Tx descriptor (simplified ar5008_tx()). */
1123 ds = bf->bf_descs;
1124 memset(ds, 0, sizeof(*ds));
1125
1126 totlen = m->m_pkthdr.len + IEEE80211_CRC_LEN;
1127 ds->ds_ctl0 = SM(AR_TXC0_FRAME_LEN, totlen);
1128 ds->ds_ctl0 |= SM(AR_TXC0_XMIT_POWER, AR_MAX_RATE_POWER);
1129 ds->ds_ctl1 = SM(AR_TXC1_FRAME_TYPE, AR_FRAME_TYPE_BEACON);
1130 ds->ds_ctl1 |= AR_TXC1_NO_ACK;
1131 ds->ds_ctl6 = SM(AR_TXC6_ENCR_TYPE, AR_ENCR_TYPE_CLEAR);
1132
1133 /* Write number of tries. */
1134 ds->ds_ctl2 = SM(AR_TXC2_XMIT_DATA_TRIES0, 1);
1135
1136 /* Write Tx rate. */
1137 ridx = (ic->ic_curmode == IEEE80211_MODE_11A) ?
1138 ATHN_RIDX_OFDM6 : ATHN_RIDX_CCK1;
1139 hwrate = athn_rates[ridx].hwrate;
1140 ds->ds_ctl3 = SM(AR_TXC3_XMIT_RATE0, hwrate);
1141
1142 /* Write Tx chains. */
1143 ds->ds_ctl7 = SM(AR_TXC7_CHAIN_SEL0, sc->sc_txchainmask);
1144
1145 ds->ds_data = bf->bf_map->dm_segs[0].ds_addr;
1146 /* Segment length must be a multiple of 4. */
1147 ds->ds_ctl1 |= SM(AR_TXC1_BUF_LEN,
1148 (bf->bf_map->dm_segs[0].ds_len + 3) & ~3);
1149
1150 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, bf->bf_map->dm_mapsize,
1151 BUS_DMASYNC_PREWRITE);
1152
1153 /* Stop Tx DMA before putting the new beacon on the queue. */
1154 athn_stop_tx_dma(sc, ATHN_QID_BEACON);
1155
1156 AR_WRITE(sc, AR_QTXDP(ATHN_QID_BEACON), bf->bf_daddr);
1157
1158 for(;;) {
1159 if (SIMPLEQ_EMPTY(&sc->sc_txbufs))
1160 break;
1161
1162 IF_DEQUEUE(&ni->ni_savedq, m);
1163 if (m == NULL)
1164 break;
1165 if (!IF_IS_EMPTY(&ni->ni_savedq)) {
1166 /* more queued frames, set the more data bit */
1167 wh = mtod(m, struct ieee80211_frame *);
1168 wh->i_fc[1] |= IEEE80211_FC1_MORE_DATA;
1169 }
1170
1171 if (sc->sc_ops.tx(sc, m, ni, ATHN_TXFLAG_CAB) != 0) {
1172 ieee80211_free_node(ni);
1173 if_statinc(ifp, if_oerrors);
1174 break;
1175 }
1176 }
1177
1178 /* Kick Tx. */
1179 AR_WRITE(sc, AR_Q_TXE, 1 << ATHN_QID_BEACON);
1180 AR_WRITE_BARRIER(sc);
1181 return 0;
1182 }
1183 #endif
1184
1185 static int
ar5008_get_intr_status(struct athn_softc * sc,uint32_t * intrp,uint32_t * syncp)1186 ar5008_get_intr_status(struct athn_softc *sc, uint32_t *intrp, uint32_t *syncp)
1187 {
1188 uint32_t intr, sync;
1189
1190 /* Get pending interrupts. */
1191 intr = AR_READ(sc, AR_INTR_ASYNC_CAUSE);
1192 if (!(intr & AR_INTR_MAC_IRQ) || intr == AR_INTR_SPURIOUS) {
1193 intr = AR_READ(sc, AR_INTR_SYNC_CAUSE);
1194 if (intr == AR_INTR_SPURIOUS || (intr & sc->sc_isync) == 0)
1195 return 0; /* Not for us. */
1196 }
1197
1198 if ((AR_READ(sc, AR_INTR_ASYNC_CAUSE) & AR_INTR_MAC_IRQ) &&
1199 (AR_READ(sc, AR_RTC_STATUS) & AR_RTC_STATUS_M) == AR_RTC_STATUS_ON)
1200 intr = AR_READ(sc, AR_ISR);
1201 else
1202 intr = 0;
1203 sync = AR_READ(sc, AR_INTR_SYNC_CAUSE) & sc->sc_isync;
1204 if (intr == 0 && sync == 0)
1205 return 0; /* Not for us. */
1206
1207 *intrp = intr;
1208 *syncp = sync;
1209 return 1;
1210 }
1211
1212
1213 Static int
ar5008_intr_status(struct athn_softc * sc)1214 ar5008_intr_status(struct athn_softc *sc)
1215 {
1216 uint32_t intr, sync;
1217
1218 return ar5008_get_intr_status(sc, &intr, &sync);
1219 }
1220
1221 Static int
ar5008_intr(struct athn_softc * sc)1222 ar5008_intr(struct athn_softc *sc)
1223 {
1224 uint32_t intr, intr5, sync;
1225 #ifndef IEEE80211_STA_ONLY
1226 int s;
1227 #endif
1228
1229 if (!ar5008_get_intr_status(sc, &intr, &sync))
1230 return 0;
1231
1232 if (intr != 0) {
1233 if (intr & AR_ISR_BCNMISC) {
1234 uint32_t intr2 = AR_READ(sc, AR_ISR_S2);
1235 #if notyet
1236 if (intr2 & AR_ISR_S2_TIM)
1237 /* TBD */;
1238 if (intr2 & AR_ISR_S2_TSFOOR)
1239 /* TBD */;
1240 #else
1241 __USE(intr2);
1242 #endif
1243 }
1244 intr = AR_READ(sc, AR_ISR_RAC);
1245 if (intr == AR_INTR_SPURIOUS)
1246 return 1;
1247
1248 #ifndef IEEE80211_STA_ONLY
1249 if (intr & AR_ISR_SWBA) {
1250 s = splnet();
1251 ar5008_swba_intr(sc);
1252 splx(s);
1253 }
1254 #endif
1255 if (intr & (AR_ISR_RXMINTR | AR_ISR_RXINTM))
1256 ar5008_rx_intr(sc);
1257 if (intr & (AR_ISR_RXOK | AR_ISR_RXERR | AR_ISR_RXORN))
1258 ar5008_rx_intr(sc);
1259
1260 if (intr & (AR_ISR_TXOK | AR_ISR_TXDESC |
1261 AR_ISR_TXERR | AR_ISR_TXEOL))
1262 ar5008_tx_intr(sc);
1263
1264 intr5 = AR_READ(sc, AR_ISR_S5_S);
1265 if (intr & AR_ISR_GENTMR) {
1266 if (intr5 & AR_ISR_GENTMR) {
1267 DPRINTFN(DBG_INTR, sc,
1268 "GENTMR trigger=%d thresh=%d\n",
1269 MS(intr5, AR_ISR_S5_GENTIMER_TRIG),
1270 MS(intr5, AR_ISR_S5_GENTIMER_THRESH));
1271 }
1272 }
1273 #if notyet
1274 if (intr5 & AR_ISR_S5_TIM_TIMER) {
1275 /* TBD */;
1276 }
1277 #endif
1278 }
1279 if (sync != 0) {
1280 #if notyet
1281 if (sync &
1282 (AR_INTR_SYNC_HOST1_FATAL | AR_INTR_SYNC_HOST1_PERR)) {
1283 /* TBD */;
1284 }
1285 #endif
1286 if (sync & AR_INTR_SYNC_RADM_CPL_TIMEOUT) {
1287 AR_WRITE(sc, AR_RC, AR_RC_HOSTIF);
1288 AR_WRITE(sc, AR_RC, 0);
1289 }
1290
1291 if ((sc->sc_flags & ATHN_FLAG_RFSILENT) &&
1292 (sync & AR_INTR_SYNC_GPIO_PIN(sc->sc_rfsilent_pin))) {
1293 AR_WRITE(sc, AR_INTR_SYNC_ENABLE, 0);
1294 (void)AR_READ(sc, AR_INTR_SYNC_ENABLE);
1295 pmf_event_inject(sc->sc_dev, PMFE_RADIO_OFF);
1296 }
1297
1298 AR_WRITE(sc, AR_INTR_SYNC_CAUSE, sync);
1299 (void)AR_READ(sc, AR_INTR_SYNC_CAUSE);
1300 }
1301 return 1;
1302 }
1303
1304 Static int
ar5008_tx(struct athn_softc * sc,struct mbuf * m,struct ieee80211_node * ni,int txflags)1305 ar5008_tx(struct athn_softc *sc, struct mbuf *m, struct ieee80211_node *ni,
1306 int txflags)
1307 {
1308 struct ieee80211com *ic = &sc->sc_ic;
1309 struct ieee80211_key *k = NULL;
1310 struct ieee80211_frame *wh;
1311 struct athn_series series[4];
1312 struct ar_tx_desc *ds, *lastds;
1313 struct athn_txq *txq;
1314 struct athn_tx_buf *bf;
1315 struct athn_node *an = (void *)ni;
1316 struct mbuf *m1;
1317 uint16_t qos;
1318 uint8_t txpower, type, encrtype, ridx[4];
1319 int i, error, totlen, hasqos, qid;
1320
1321 /* Grab a Tx buffer from our global free list. */
1322 bf = SIMPLEQ_FIRST(&sc->sc_txbufs);
1323 KASSERT(bf != NULL);
1324
1325 /* Map 802.11 frame type to hardware frame type. */
1326 wh = mtod(m, struct ieee80211_frame *);
1327 if ((wh->i_fc[0] & IEEE80211_FC0_TYPE_MASK) ==
1328 IEEE80211_FC0_TYPE_MGT) {
1329 /* NB: Beacons do not use ar5008_tx(). */
1330 if ((wh->i_fc[0] & IEEE80211_FC0_SUBTYPE_MASK) ==
1331 IEEE80211_FC0_SUBTYPE_PROBE_RESP)
1332 type = AR_FRAME_TYPE_PROBE_RESP;
1333 else if ((wh->i_fc[0] & IEEE80211_FC0_SUBTYPE_MASK) ==
1334 IEEE80211_FC0_SUBTYPE_ATIM)
1335 type = AR_FRAME_TYPE_ATIM;
1336 else
1337 type = AR_FRAME_TYPE_NORMAL;
1338 }
1339 else if ((wh->i_fc[0] &
1340 (IEEE80211_FC0_TYPE_MASK | IEEE80211_FC0_SUBTYPE_MASK)) ==
1341 (IEEE80211_FC0_TYPE_CTL | IEEE80211_FC0_SUBTYPE_PS_POLL)) {
1342 type = AR_FRAME_TYPE_PSPOLL;
1343 }
1344 else
1345 type = AR_FRAME_TYPE_NORMAL;
1346
1347 if (wh->i_fc[1] & IEEE80211_FC1_PROTECTED) {
1348 k = ieee80211_crypto_encap(ic, ni, m);
1349 if (k == NULL)
1350 return ENOBUFS;
1351
1352 /* packet header may have moved, reset our local pointer */
1353 wh = mtod(m, struct ieee80211_frame *);
1354 }
1355
1356 /* XXX 2-byte padding for QoS and 4-addr headers. */
1357
1358 /* Select the HW Tx queue to use for this frame. */
1359 if ((hasqos = ieee80211_has_qos(wh))) {
1360 #ifdef notyet_edca
1361 uint8_t tid;
1362
1363 qos = ieee80211_get_qos(wh);
1364 tid = qos & IEEE80211_QOS_TID;
1365 qid = athn_ac2qid[ieee80211_up_to_ac(ic, tid)];
1366 #else
1367 qos = ieee80211_get_qos(wh);
1368 qid = ATHN_QID_AC_BE;
1369 #endif /* notyet_edca */
1370 }
1371 else if (type == AR_FRAME_TYPE_PSPOLL) {
1372 qos = 0;
1373 qid = ATHN_QID_PSPOLL;
1374 }
1375 else if (txflags & ATHN_TXFLAG_CAB) {
1376 qos = 0;
1377 qid = ATHN_QID_CAB;
1378 }
1379 else {
1380 qos = 0;
1381 qid = ATHN_QID_AC_BE;
1382 }
1383 txq = &sc->sc_txq[qid];
1384
1385 /* Select the transmit rates to use for this frame. */
1386 if (IEEE80211_IS_MULTICAST(wh->i_addr1) ||
1387 (wh->i_fc[0] & IEEE80211_FC0_TYPE_MASK) !=
1388 IEEE80211_FC0_TYPE_DATA) {
1389 /* Use lowest rate for all tries. */
1390 ridx[0] = ridx[1] = ridx[2] = ridx[3] =
1391 (ic->ic_curmode == IEEE80211_MODE_11A) ?
1392 ATHN_RIDX_OFDM6 : ATHN_RIDX_CCK1;
1393 }
1394 else if (ic->ic_fixed_rate != -1) {
1395 /* Use same fixed rate for all tries. */
1396 ridx[0] = ridx[1] = ridx[2] = ridx[3] =
1397 sc->sc_fixed_ridx;
1398 }
1399 else {
1400 int txrate = ni->ni_txrate;
1401 /* Use fallback table of the node. */
1402 for (i = 0; i < 4; i++) {
1403 ridx[i] = an->ridx[txrate];
1404 txrate = an->fallback[txrate];
1405 }
1406 }
1407
1408 if (__predict_false(sc->sc_drvbpf != NULL)) {
1409 struct athn_tx_radiotap_header *tap = &sc->sc_txtap;
1410
1411 tap->wt_flags = 0;
1412 /* Use initial transmit rate. */
1413 tap->wt_rate = athn_rates[ridx[0]].rate;
1414 tap->wt_chan_freq = htole16(ic->ic_curchan->ic_freq);
1415 tap->wt_chan_flags = htole16(ic->ic_curchan->ic_flags);
1416 // XXX tap->wt_hwqueue = qid;
1417 if (ridx[0] != ATHN_RIDX_CCK1 &&
1418 (ic->ic_flags & IEEE80211_F_SHPREAMBLE))
1419 tap->wt_flags |= IEEE80211_RADIOTAP_F_SHORTPRE;
1420
1421 bpf_mtap2(sc->sc_drvbpf, tap, sc->sc_txtap_len, m, BPF_D_OUT);
1422 }
1423
1424 /* DMA map mbuf. */
1425 error = bus_dmamap_load_mbuf(sc->sc_dmat, bf->bf_map, m,
1426 BUS_DMA_NOWAIT | BUS_DMA_WRITE);
1427 if (__predict_false(error != 0)) {
1428 if (error != EFBIG) {
1429 aprint_error_dev(sc->sc_dev,
1430 "can't map mbuf (error %d)\n", error);
1431 m_freem(m);
1432 return error;
1433 }
1434 /*
1435 * DMA mapping requires too many DMA segments; linearize
1436 * mbuf in kernel virtual address space and retry.
1437 */
1438 MGETHDR(m1, M_DONTWAIT, MT_DATA);
1439 if (m1 == NULL) {
1440 m_freem(m);
1441 return ENOBUFS;
1442 }
1443 if (m->m_pkthdr.len > (int)MHLEN) {
1444 MCLGET(m1, M_DONTWAIT);
1445 if (!(m1->m_flags & M_EXT)) {
1446 m_freem(m);
1447 m_freem(m1);
1448 return ENOBUFS;
1449 }
1450 }
1451 m_copydata(m, 0, m->m_pkthdr.len, mtod(m1, void *));
1452 m1->m_pkthdr.len = m1->m_len = m->m_pkthdr.len;
1453 m_freem(m);
1454 m = m1;
1455
1456 error = bus_dmamap_load_mbuf(sc->sc_dmat, bf->bf_map, m,
1457 BUS_DMA_NOWAIT | BUS_DMA_WRITE);
1458 if (error != 0) {
1459 aprint_error_dev(sc->sc_dev,
1460 "can't map mbuf (error %d)\n", error);
1461 m_freem(m);
1462 return error;
1463 }
1464 }
1465 bf->bf_m = m;
1466 bf->bf_ni = ni;
1467 bf->bf_txflags = txflags;
1468
1469 wh = mtod(m, struct ieee80211_frame *);
1470
1471 totlen = m->m_pkthdr.len + IEEE80211_CRC_LEN;
1472
1473 /* Clear all Tx descriptors that we will use. */
1474 memset(bf->bf_descs, 0, bf->bf_map->dm_nsegs * sizeof(*ds));
1475
1476 /* Setup first Tx descriptor. */
1477 ds = bf->bf_descs;
1478
1479 ds->ds_ctl0 = AR_TXC0_INTR_REQ | AR_TXC0_CLR_DEST_MASK;
1480 txpower = AR_MAX_RATE_POWER; /* Get from per-rate registers. */
1481 ds->ds_ctl0 |= SM(AR_TXC0_XMIT_POWER, txpower);
1482
1483 ds->ds_ctl1 = SM(AR_TXC1_FRAME_TYPE, type);
1484
1485 if (IEEE80211_IS_MULTICAST(wh->i_addr1) ||
1486 (hasqos && (qos & IEEE80211_QOS_ACKPOLICY_MASK) ==
1487 IEEE80211_QOS_ACKPOLICY_NOACK))
1488 ds->ds_ctl1 |= AR_TXC1_NO_ACK;
1489 #if notyet
1490 if (0 && k != NULL) {
1491 uintptr_t entry;
1492
1493 /*
1494 * Map 802.11 cipher to hardware encryption type and
1495 * compute MIC+ICV overhead.
1496 */
1497 totlen += k->wk_keylen;
1498 switch (k->wk_cipher->ic_cipher) {
1499 case IEEE80211_CIPHER_WEP:
1500 encrtype = AR_ENCR_TYPE_WEP;
1501 break;
1502 case IEEE80211_CIPHER_TKIP:
1503 encrtype = AR_ENCR_TYPE_TKIP;
1504 break;
1505 case IEEE80211_CIPHER_AES_OCB:
1506 case IEEE80211_CIPHER_AES_CCM:
1507 encrtype = AR_ENCR_TYPE_AES;
1508 break;
1509 default:
1510 panic("unsupported cipher");
1511 }
1512 /*
1513 * NB: The key cache entry index is stored in the key
1514 * private field when the key is installed.
1515 */
1516 entry = (uintptr_t)k->k_priv;
1517 ds->ds_ctl1 |= SM(AR_TXC1_DEST_IDX, entry);
1518 ds->ds_ctl0 |= AR_TXC0_DEST_IDX_VALID;
1519 }
1520 else
1521 #endif
1522 encrtype = AR_ENCR_TYPE_CLEAR;
1523 ds->ds_ctl6 = SM(AR_TXC6_ENCR_TYPE, encrtype);
1524
1525 /* Check if frame must be protected using RTS/CTS or CTS-to-self. */
1526 if (!IEEE80211_IS_MULTICAST(wh->i_addr1)) {
1527 /* NB: Group frames are sent using CCK in 802.11b/g. */
1528 if (totlen > ic->ic_rtsthreshold) {
1529 ds->ds_ctl0 |= AR_TXC0_RTS_ENABLE;
1530 }
1531 else if ((ic->ic_flags & IEEE80211_F_USEPROT) &&
1532 athn_rates[ridx[0]].phy == IEEE80211_T_OFDM) {
1533 if (ic->ic_protmode == IEEE80211_PROT_RTSCTS)
1534 ds->ds_ctl0 |= AR_TXC0_RTS_ENABLE;
1535 else if (ic->ic_protmode == IEEE80211_PROT_CTSONLY)
1536 ds->ds_ctl0 |= AR_TXC0_CTS_ENABLE;
1537 }
1538 }
1539 if (ds->ds_ctl0 & (AR_TXC0_RTS_ENABLE | AR_TXC0_CTS_ENABLE)) {
1540 /* Disable multi-rate retries when protection is used. */
1541 ridx[1] = ridx[2] = ridx[3] = ridx[0];
1542 }
1543 /* Setup multi-rate retries. */
1544 for (i = 0; i < 4; i++) {
1545 series[i].hwrate = athn_rates[ridx[i]].hwrate;
1546 if (athn_rates[ridx[i]].phy == IEEE80211_T_DS &&
1547 ridx[i] != ATHN_RIDX_CCK1 &&
1548 (ic->ic_flags & IEEE80211_F_SHPREAMBLE))
1549 series[i].hwrate |= 0x04;
1550 series[i].dur = 0;
1551 }
1552 if (!(ds->ds_ctl1 & AR_TXC1_NO_ACK)) {
1553 /* Compute duration for each series. */
1554 for (i = 0; i < 4; i++) {
1555 series[i].dur = athn_txtime(sc, IEEE80211_ACK_LEN,
1556 athn_rates[ridx[i]].rspridx, ic->ic_flags);
1557 }
1558 }
1559
1560 /* Write number of tries for each series. */
1561 ds->ds_ctl2 =
1562 SM(AR_TXC2_XMIT_DATA_TRIES0, 2) |
1563 SM(AR_TXC2_XMIT_DATA_TRIES1, 2) |
1564 SM(AR_TXC2_XMIT_DATA_TRIES2, 2) |
1565 SM(AR_TXC2_XMIT_DATA_TRIES3, 4);
1566
1567 /* Tell HW to update duration field in 802.11 header. */
1568 if (type != AR_FRAME_TYPE_PSPOLL)
1569 ds->ds_ctl2 |= AR_TXC2_DUR_UPDATE_ENA;
1570
1571 /* Write Tx rate for each series. */
1572 ds->ds_ctl3 =
1573 SM(AR_TXC3_XMIT_RATE0, series[0].hwrate) |
1574 SM(AR_TXC3_XMIT_RATE1, series[1].hwrate) |
1575 SM(AR_TXC3_XMIT_RATE2, series[2].hwrate) |
1576 SM(AR_TXC3_XMIT_RATE3, series[3].hwrate);
1577
1578 /* Write duration for each series. */
1579 ds->ds_ctl4 =
1580 SM(AR_TXC4_PACKET_DUR0, series[0].dur) |
1581 SM(AR_TXC4_PACKET_DUR1, series[1].dur);
1582 ds->ds_ctl5 =
1583 SM(AR_TXC5_PACKET_DUR2, series[2].dur) |
1584 SM(AR_TXC5_PACKET_DUR3, series[3].dur);
1585
1586 /* Use the same Tx chains for all tries. */
1587 ds->ds_ctl7 =
1588 SM(AR_TXC7_CHAIN_SEL0, sc->sc_txchainmask) |
1589 SM(AR_TXC7_CHAIN_SEL1, sc->sc_txchainmask) |
1590 SM(AR_TXC7_CHAIN_SEL2, sc->sc_txchainmask) |
1591 SM(AR_TXC7_CHAIN_SEL3, sc->sc_txchainmask);
1592 #ifdef notyet
1593 #ifndef IEEE80211_NO_HT
1594 /* Use the same short GI setting for all tries. */
1595 if (ic->ic_flags & IEEE80211_F_SHGI)
1596 ds->ds_ctl7 |= AR_TXC7_GI0123;
1597 /* Use the same channel width for all tries. */
1598 if (ic->ic_flags & IEEE80211_F_CBW40)
1599 ds->ds_ctl7 |= AR_TXC7_2040_0123;
1600 #endif
1601 #endif
1602
1603 if (ds->ds_ctl0 & (AR_TXC0_RTS_ENABLE | AR_TXC0_CTS_ENABLE)) {
1604 uint8_t protridx, hwrate;
1605 uint16_t dur = 0;
1606
1607 /* Use the same protection mode for all tries. */
1608 if (ds->ds_ctl0 & AR_TXC0_RTS_ENABLE) {
1609 ds->ds_ctl4 |= AR_TXC4_RTSCTS_QUAL01;
1610 ds->ds_ctl5 |= AR_TXC5_RTSCTS_QUAL23;
1611 }
1612 /* Select protection rate (suboptimal but ok). */
1613 protridx = (ic->ic_curmode == IEEE80211_MODE_11A) ?
1614 ATHN_RIDX_OFDM6 : ATHN_RIDX_CCK2;
1615 if (ds->ds_ctl0 & AR_TXC0_RTS_ENABLE) {
1616 /* Account for CTS duration. */
1617 dur += athn_txtime(sc, IEEE80211_ACK_LEN,
1618 athn_rates[protridx].rspridx, ic->ic_flags);
1619 }
1620 dur += athn_txtime(sc, totlen, ridx[0], ic->ic_flags);
1621 if (!(ds->ds_ctl1 & AR_TXC1_NO_ACK)) {
1622 /* Account for ACK duration. */
1623 dur += athn_txtime(sc, IEEE80211_ACK_LEN,
1624 athn_rates[ridx[0]].rspridx, ic->ic_flags);
1625 }
1626 /* Write protection frame duration and rate. */
1627 ds->ds_ctl2 |= SM(AR_TXC2_BURST_DUR, dur);
1628 hwrate = athn_rates[protridx].hwrate;
1629 if (protridx == ATHN_RIDX_CCK2 &&
1630 (ic->ic_flags & IEEE80211_F_SHPREAMBLE))
1631 hwrate |= 0x04;
1632 ds->ds_ctl7 |= SM(AR_TXC7_RTSCTS_RATE, hwrate);
1633 }
1634
1635 /* Finalize first Tx descriptor and fill others (if any). */
1636 ds->ds_ctl0 |= SM(AR_TXC0_FRAME_LEN, totlen);
1637
1638 lastds = NULL; /* XXX: gcc */
1639 for (i = 0; i < bf->bf_map->dm_nsegs; i++, ds++) {
1640 ds->ds_data = bf->bf_map->dm_segs[i].ds_addr;
1641 ds->ds_ctl1 |= SM(AR_TXC1_BUF_LEN,
1642 bf->bf_map->dm_segs[i].ds_len);
1643
1644 if (i != bf->bf_map->dm_nsegs - 1)
1645 ds->ds_ctl1 |= AR_TXC1_MORE;
1646 ds->ds_link = 0;
1647
1648 /* Chain Tx descriptor. */
1649 if (i != 0)
1650 lastds->ds_link = bf->bf_daddr + i * sizeof(*ds);
1651 lastds = ds;
1652 }
1653 bus_dmamap_sync(sc->sc_dmat, bf->bf_map, 0, bf->bf_map->dm_mapsize,
1654 BUS_DMASYNC_PREWRITE);
1655
1656 if (!SIMPLEQ_EMPTY(&txq->head))
1657 ((struct ar_tx_desc *)txq->lastds)->ds_link = bf->bf_daddr;
1658 else
1659 AR_WRITE(sc, AR_QTXDP(qid), bf->bf_daddr);
1660 txq->lastds = lastds;
1661 SIMPLEQ_REMOVE_HEAD(&sc->sc_txbufs, bf_list);
1662 SIMPLEQ_INSERT_TAIL(&txq->head, bf, bf_list);
1663
1664 ds = bf->bf_descs;
1665 DPRINTFN(DBG_TX, sc,
1666 "Tx qid=%d nsegs=%d ctl0=0x%x ctl1=0x%x ctl3=0x%x\n",
1667 qid, bf->bf_map->dm_nsegs, ds->ds_ctl0, ds->ds_ctl1, ds->ds_ctl3);
1668
1669 /* Kick Tx. */
1670 AR_WRITE(sc, AR_Q_TXE, 1 << qid);
1671 AR_WRITE_BARRIER(sc);
1672 return 0;
1673 }
1674
1675 Static void
ar5008_set_rf_mode(struct athn_softc * sc,struct ieee80211_channel * c)1676 ar5008_set_rf_mode(struct athn_softc *sc, struct ieee80211_channel *c)
1677 {
1678 uint32_t reg;
1679
1680 reg = IEEE80211_IS_CHAN_2GHZ(c) ?
1681 AR_PHY_MODE_DYNAMIC : AR_PHY_MODE_OFDM;
1682 if (!AR_SREV_9280_10_OR_LATER(sc)) {
1683 reg |= IEEE80211_IS_CHAN_2GHZ(c) ?
1684 AR_PHY_MODE_RF2GHZ : AR_PHY_MODE_RF5GHZ;
1685 }
1686 else if (IEEE80211_IS_CHAN_5GHZ(c) &&
1687 (sc->sc_flags & ATHN_FLAG_FAST_PLL_CLOCK)) {
1688 reg |= AR_PHY_MODE_DYNAMIC | AR_PHY_MODE_DYN_CCK_DISABLE;
1689 }
1690 AR_WRITE(sc, AR_PHY_MODE, reg);
1691 AR_WRITE_BARRIER(sc);
1692 }
1693
1694 static __inline uint32_t
ar5008_synth_delay(struct athn_softc * sc)1695 ar5008_synth_delay(struct athn_softc *sc)
1696 {
1697 uint32_t synth_delay;
1698
1699 synth_delay = MS(AR_READ(sc, AR_PHY_RX_DELAY), AR_PHY_RX_DELAY_DELAY);
1700 if (sc->sc_ic.ic_curmode == IEEE80211_MODE_11B)
1701 synth_delay = (synth_delay * 4) / 22;
1702 else
1703 synth_delay = synth_delay / 10; /* in 100ns steps */
1704 return synth_delay;
1705 }
1706
1707 Static int
ar5008_rf_bus_request(struct athn_softc * sc)1708 ar5008_rf_bus_request(struct athn_softc *sc)
1709 {
1710 int ntries;
1711
1712 /* Request RF Bus grant. */
1713 AR_WRITE(sc, AR_PHY_RFBUS_REQ, AR_PHY_RFBUS_REQ_EN);
1714 for (ntries = 0; ntries < 10000; ntries++) {
1715 if (AR_READ(sc, AR_PHY_RFBUS_GRANT) & AR_PHY_RFBUS_GRANT_EN)
1716 return 0;
1717 DELAY(10);
1718 }
1719 DPRINTFN(DBG_RF, sc, "could not kill baseband Rx");
1720 return ETIMEDOUT;
1721 }
1722
1723 Static void
ar5008_rf_bus_release(struct athn_softc * sc)1724 ar5008_rf_bus_release(struct athn_softc *sc)
1725 {
1726
1727 /* Wait for the synthesizer to settle. */
1728 DELAY(AR_BASE_PHY_ACTIVE_DELAY + ar5008_synth_delay(sc));
1729
1730 /* Release the RF Bus grant. */
1731 AR_WRITE(sc, AR_PHY_RFBUS_REQ, 0);
1732 AR_WRITE_BARRIER(sc);
1733 }
1734
1735 Static void
ar5008_set_phy(struct athn_softc * sc,struct ieee80211_channel * c,struct ieee80211_channel * extc)1736 ar5008_set_phy(struct athn_softc *sc, struct ieee80211_channel *c,
1737 struct ieee80211_channel *extc)
1738 {
1739 uint32_t phy;
1740
1741 if (AR_SREV_9285_10_OR_LATER(sc))
1742 phy = AR_READ(sc, AR_PHY_TURBO) & AR_PHY_FC_ENABLE_DAC_FIFO;
1743 else
1744 phy = 0;
1745 phy |= AR_PHY_FC_HT_EN | AR_PHY_FC_SHORT_GI_40 |
1746 AR_PHY_FC_SINGLE_HT_LTF1 | AR_PHY_FC_WALSH;
1747 #ifndef IEEE80211_NO_HT
1748 if (extc != NULL) {
1749 phy |= AR_PHY_FC_DYN2040_EN;
1750 if (extc > c) /* XXX */
1751 phy |= AR_PHY_FC_DYN2040_PRI_CH;
1752 }
1753 #endif
1754 AR_WRITE(sc, AR_PHY_TURBO, phy);
1755
1756 AR_WRITE(sc, AR_2040_MODE,
1757 (extc != NULL) ? AR_2040_JOINED_RX_CLEAR : 0);
1758
1759 /* Set global transmit timeout. */
1760 AR_WRITE(sc, AR_GTXTO, SM(AR_GTXTO_TIMEOUT_LIMIT, 25));
1761 /* Set carrier sense timeout. */
1762 AR_WRITE(sc, AR_CST, SM(AR_CST_TIMEOUT_LIMIT, 15));
1763 AR_WRITE_BARRIER(sc);
1764 }
1765
1766 Static void
ar5008_set_delta_slope(struct athn_softc * sc,struct ieee80211_channel * c,struct ieee80211_channel * extc)1767 ar5008_set_delta_slope(struct athn_softc *sc, struct ieee80211_channel *c,
1768 struct ieee80211_channel *extc)
1769 {
1770 uint32_t coeff, exp, man, reg;
1771
1772 /* Set Delta Slope (exponent and mantissa). */
1773 coeff = (100 << 24) / c->ic_freq;
1774 athn_get_delta_slope(coeff, &exp, &man);
1775 DPRINTFN(DBG_RX, sc, "delta slope coeff exp=%u man=%u\n", exp, man);
1776
1777 reg = AR_READ(sc, AR_PHY_TIMING3);
1778 reg = RW(reg, AR_PHY_TIMING3_DSC_EXP, exp);
1779 reg = RW(reg, AR_PHY_TIMING3_DSC_MAN, man);
1780 AR_WRITE(sc, AR_PHY_TIMING3, reg);
1781
1782 /* For Short GI, coeff is 9/10 that of normal coeff. */
1783 coeff = (9 * coeff) / 10;
1784 athn_get_delta_slope(coeff, &exp, &man);
1785 DPRINTFN(DBG_RX, sc, "delta slope coeff exp=%u man=%u\n", exp, man);
1786
1787 reg = AR_READ(sc, AR_PHY_HALFGI);
1788 reg = RW(reg, AR_PHY_HALFGI_DSC_EXP, exp);
1789 reg = RW(reg, AR_PHY_HALFGI_DSC_MAN, man);
1790 AR_WRITE(sc, AR_PHY_HALFGI, reg);
1791 AR_WRITE_BARRIER(sc);
1792 }
1793
1794 Static void
ar5008_enable_antenna_diversity(struct athn_softc * sc)1795 ar5008_enable_antenna_diversity(struct athn_softc *sc)
1796 {
1797
1798 AR_SETBITS(sc, AR_PHY_CCK_DETECT,
1799 AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV);
1800 AR_WRITE_BARRIER(sc);
1801 }
1802
1803 Static void
ar5008_init_baseband(struct athn_softc * sc)1804 ar5008_init_baseband(struct athn_softc *sc)
1805 {
1806 uint32_t synth_delay;
1807
1808 synth_delay = ar5008_synth_delay(sc);
1809 /* Activate the PHY (includes baseband activate and synthesizer on). */
1810 AR_WRITE(sc, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
1811 AR_WRITE_BARRIER(sc);
1812 DELAY(AR_BASE_PHY_ACTIVE_DELAY + synth_delay);
1813 }
1814
1815 Static void
ar5008_disable_phy(struct athn_softc * sc)1816 ar5008_disable_phy(struct athn_softc *sc)
1817 {
1818
1819 AR_WRITE(sc, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS);
1820 AR_WRITE_BARRIER(sc);
1821 }
1822
1823 Static void
ar5008_init_chains(struct athn_softc * sc)1824 ar5008_init_chains(struct athn_softc *sc)
1825 {
1826
1827 if (sc->sc_rxchainmask == 0x5 || sc->sc_txchainmask == 0x5)
1828 AR_SETBITS(sc, AR_PHY_ANALOG_SWAP, AR_PHY_SWAP_ALT_CHAIN);
1829
1830 /* Setup chain masks. */
1831 if (sc->sc_mac_ver <= AR_SREV_VERSION_9160 &&
1832 (sc->sc_rxchainmask == 0x3 || sc->sc_rxchainmask == 0x5)) {
1833 AR_WRITE(sc, AR_PHY_RX_CHAINMASK, 0x7);
1834 AR_WRITE(sc, AR_PHY_CAL_CHAINMASK, 0x7);
1835 }
1836 else {
1837 AR_WRITE(sc, AR_PHY_RX_CHAINMASK, sc->sc_rxchainmask);
1838 AR_WRITE(sc, AR_PHY_CAL_CHAINMASK, sc->sc_rxchainmask);
1839 }
1840 AR_WRITE(sc, AR_SELFGEN_MASK, sc->sc_txchainmask);
1841 AR_WRITE_BARRIER(sc);
1842 }
1843
1844 Static void
ar5008_set_rxchains(struct athn_softc * sc)1845 ar5008_set_rxchains(struct athn_softc *sc)
1846 {
1847
1848 if (sc->sc_rxchainmask == 0x3 || sc->sc_rxchainmask == 0x5) {
1849 AR_WRITE(sc, AR_PHY_RX_CHAINMASK, sc->sc_rxchainmask);
1850 AR_WRITE(sc, AR_PHY_CAL_CHAINMASK, sc->sc_rxchainmask);
1851 AR_WRITE_BARRIER(sc);
1852 }
1853 }
1854
1855 #ifdef notused
1856 Static void
ar5008_read_noisefloor(struct athn_softc * sc,int16_t * nf,int16_t * nf_ext)1857 ar5008_read_noisefloor(struct athn_softc *sc, int16_t *nf, int16_t *nf_ext)
1858 {
1859 /* Sign-extends 9-bit value (assumes upper bits are zeroes). */
1860 #define SIGN_EXT(v) (((v) ^ 0x100) - 0x100)
1861 uint32_t reg;
1862 int i;
1863
1864 for (i = 0; i < sc->sc_nrxchains; i++) {
1865 reg = AR_READ(sc, AR_PHY_CCA(i));
1866 if (AR_SREV_9280_10_OR_LATER(sc))
1867 nf[i] = MS(reg, AR9280_PHY_MINCCA_PWR);
1868 else
1869 nf[i] = MS(reg, AR_PHY_MINCCA_PWR);
1870 nf[i] = SIGN_EXT(nf[i]);
1871
1872 reg = AR_READ(sc, AR_PHY_EXT_CCA(i));
1873 if (AR_SREV_9280_10_OR_LATER(sc))
1874 nf_ext[i] = MS(reg, AR9280_PHY_EXT_MINCCA_PWR);
1875 else
1876 nf_ext[i] = MS(reg, AR_PHY_EXT_MINCCA_PWR);
1877 nf_ext[i] = SIGN_EXT(nf_ext[i]);
1878 }
1879 #undef SIGN_EXT
1880 }
1881 #endif /* notused */
1882
1883 #ifdef notused
1884 Static void
ar5008_write_noisefloor(struct athn_softc * sc,int16_t * nf,int16_t * nf_ext)1885 ar5008_write_noisefloor(struct athn_softc *sc, int16_t *nf, int16_t *nf_ext)
1886 {
1887 uint32_t reg;
1888 int i;
1889
1890 for (i = 0; i < sc->sc_nrxchains; i++) {
1891 reg = AR_READ(sc, AR_PHY_CCA(i));
1892 reg = RW(reg, AR_PHY_MAXCCA_PWR, nf[i]);
1893 AR_WRITE(sc, AR_PHY_CCA(i), reg);
1894
1895 reg = AR_READ(sc, AR_PHY_EXT_CCA(i));
1896 reg = RW(reg, AR_PHY_EXT_MAXCCA_PWR, nf_ext[i]);
1897 AR_WRITE(sc, AR_PHY_EXT_CCA(i), reg);
1898 }
1899 AR_WRITE_BARRIER(sc);
1900 }
1901 #endif /* notused */
1902
1903 #ifdef notused
1904 Static void
ar5008_get_noisefloor(struct athn_softc * sc,struct ieee80211_channel * c)1905 ar5008_get_noisefloor(struct athn_softc *sc, struct ieee80211_channel *c)
1906 {
1907 int16_t nf[AR_MAX_CHAINS], nf_ext[AR_MAX_CHAINS];
1908 int i;
1909
1910 if (AR_READ(sc, AR_PHY_AGC_CONTROL) & AR_PHY_AGC_CONTROL_NF) {
1911 /* Noisefloor calibration not finished. */
1912 return;
1913 }
1914 /* Noisefloor calibration is finished. */
1915 ar5008_read_noisefloor(sc, nf, nf_ext);
1916
1917 /* Update noisefloor history. */
1918 for (i = 0; i < sc->sc_nrxchains; i++) {
1919 sc->sc_nf_hist[sc->sc_nf_hist_cur].nf[i] = nf[i];
1920 sc->sc_nf_hist[sc->sc_nf_hist_cur].nf_ext[i] = nf_ext[i];
1921 }
1922 if (++sc->sc_nf_hist_cur >= ATHN_NF_CAL_HIST_MAX)
1923 sc->sc_nf_hist_cur = 0;
1924 }
1925 #endif /* notused */
1926
1927 #ifdef notused
1928 Static void
ar5008_bb_load_noisefloor(struct athn_softc * sc)1929 ar5008_bb_load_noisefloor(struct athn_softc *sc)
1930 {
1931 int16_t nf[AR_MAX_CHAINS], nf_ext[AR_MAX_CHAINS];
1932 int i, ntries;
1933
1934 /* Write filtered noisefloor values. */
1935 for (i = 0; i < sc->sc_nrxchains; i++) {
1936 nf[i] = sc->sc_nf_priv[i] * 2;
1937 nf_ext[i] = sc->sc_nf_ext_priv[i] * 2;
1938 }
1939 ar5008_write_noisefloor(sc, nf, nf_ext);
1940
1941 /* Load filtered noisefloor values into baseband. */
1942 AR_CLRBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_ENABLE_NF);
1943 AR_CLRBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NO_UPDATE_NF);
1944 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NF);
1945 /* Wait for load to complete. */
1946 for (ntries = 0; ntries < 1000; ntries++) {
1947 if (!(AR_READ(sc, AR_PHY_AGC_CONTROL) & AR_PHY_AGC_CONTROL_NF))
1948 break;
1949 DELAY(50);
1950 }
1951 if (ntries == 1000) {
1952 DPRINTFN(DBG_RF, sc, "failed to load noisefloor values\n");
1953 return;
1954 }
1955
1956 /* Restore noisefloor values to initial (max) values. */
1957 for (i = 0; i < AR_MAX_CHAINS; i++)
1958 nf[i] = nf_ext[i] = -50 * 2;
1959 ar5008_write_noisefloor(sc, nf, nf_ext);
1960 }
1961 #endif /* notused */
1962
1963 #ifdef notused
1964 Static void
ar5008_noisefloor_calib(struct athn_softc * sc)1965 ar5008_noisefloor_calib(struct athn_softc *sc)
1966 {
1967
1968 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_ENABLE_NF);
1969 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NO_UPDATE_NF);
1970 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NF);
1971 AR_WRITE_BARRIER(sc);
1972 }
1973 #endif /* notused */
1974
1975 Static void
ar5008_do_noisefloor_calib(struct athn_softc * sc)1976 ar5008_do_noisefloor_calib(struct athn_softc *sc)
1977 {
1978
1979 AR_SETBITS(sc, AR_PHY_AGC_CONTROL, AR_PHY_AGC_CONTROL_NF);
1980 AR_WRITE_BARRIER(sc);
1981 }
1982
1983 Static void
ar5008_do_calib(struct athn_softc * sc)1984 ar5008_do_calib(struct athn_softc *sc)
1985 {
1986 uint32_t mode, reg;
1987 int log;
1988
1989 reg = AR_READ(sc, AR_PHY_TIMING_CTRL4_0);
1990 log = AR_SREV_9280_10_OR_LATER(sc) ? 10 : 2;
1991 reg = RW(reg, AR_PHY_TIMING_CTRL4_IQCAL_LOG_COUNT_MAX, log);
1992 AR_WRITE(sc, AR_PHY_TIMING_CTRL4_0, reg);
1993
1994 if (sc->sc_cur_calib_mask & ATHN_CAL_ADC_GAIN)
1995 mode = AR_PHY_CALMODE_ADC_GAIN;
1996 else if (sc->sc_cur_calib_mask & ATHN_CAL_ADC_DC)
1997 mode = AR_PHY_CALMODE_ADC_DC_PER;
1998 else /* ATHN_CAL_IQ */
1999 mode = AR_PHY_CALMODE_IQ;
2000 AR_WRITE(sc, AR_PHY_CALMODE, mode);
2001
2002 DPRINTFN(DBG_RF, sc, "starting calibration mode=0x%x\n", mode);
2003 AR_SETBITS(sc, AR_PHY_TIMING_CTRL4_0, AR_PHY_TIMING_CTRL4_DO_CAL);
2004 AR_WRITE_BARRIER(sc);
2005 }
2006
2007 Static void
ar5008_next_calib(struct athn_softc * sc)2008 ar5008_next_calib(struct athn_softc *sc)
2009 {
2010
2011 /* Check if we have any calibration in progress. */
2012 if (sc->sc_cur_calib_mask != 0) {
2013 if (!(AR_READ(sc, AR_PHY_TIMING_CTRL4_0) &
2014 AR_PHY_TIMING_CTRL4_DO_CAL)) {
2015 /* Calibration completed for current sample. */
2016 if (sc->sc_cur_calib_mask & ATHN_CAL_ADC_GAIN)
2017 ar5008_calib_adc_gain(sc);
2018 else if (sc->sc_cur_calib_mask & ATHN_CAL_ADC_DC)
2019 ar5008_calib_adc_dc_off(sc);
2020 else /* ATHN_CAL_IQ */
2021 ar5008_calib_iq(sc);
2022 }
2023 }
2024 }
2025
2026 Static void
ar5008_calib_iq(struct athn_softc * sc)2027 ar5008_calib_iq(struct athn_softc *sc)
2028 {
2029 struct athn_iq_cal *cal;
2030 uint32_t reg, i_coff_denom, q_coff_denom;
2031 int32_t i_coff, q_coff;
2032 int i, iq_corr_neg;
2033
2034 for (i = 0; i < AR_MAX_CHAINS; i++) {
2035 cal = &sc->sc_calib.iq[i];
2036
2037 /* Accumulate IQ calibration measures (clear on read). */
2038 cal->pwr_meas_i += AR_READ(sc, AR_PHY_CAL_MEAS_0(i));
2039 cal->pwr_meas_q += AR_READ(sc, AR_PHY_CAL_MEAS_1(i));
2040 cal->iq_corr_meas +=
2041 (int32_t)AR_READ(sc, AR_PHY_CAL_MEAS_2(i));
2042 }
2043 if (!AR_SREV_9280_10_OR_LATER(sc) &&
2044 ++sc->sc_calib.nsamples < AR_CAL_SAMPLES) {
2045 /* Not enough samples accumulated, continue. */
2046 ar5008_do_calib(sc);
2047 return;
2048 }
2049
2050 for (i = 0; i < sc->sc_nrxchains; i++) {
2051 cal = &sc->sc_calib.iq[i];
2052
2053 if (cal->pwr_meas_q == 0)
2054 continue;
2055
2056 if ((iq_corr_neg = cal->iq_corr_meas) < 0)
2057 cal->iq_corr_meas = -cal->iq_corr_meas;
2058
2059 i_coff_denom =
2060 (cal->pwr_meas_i / 2 + cal->pwr_meas_q / 2) / 128;
2061 q_coff_denom = cal->pwr_meas_q / 64;
2062
2063 if (i_coff_denom == 0 || q_coff_denom == 0)
2064 continue; /* Prevents division by zero. */
2065
2066 i_coff = cal->iq_corr_meas / i_coff_denom;
2067 q_coff = (cal->pwr_meas_i / q_coff_denom) - 64;
2068
2069 /* Negate i_coff if iq_corr_meas is positive. */
2070 if (!iq_corr_neg)
2071 i_coff = 0x40 - (i_coff & 0x3f);
2072 if (q_coff > 15)
2073 q_coff = 15;
2074 else if (q_coff <= -16)
2075 q_coff = -16; /* XXX Linux has a bug here? */
2076
2077 DPRINTFN(DBG_RF, sc, "IQ calibration for chain %d\n", i);
2078 reg = AR_READ(sc, AR_PHY_TIMING_CTRL4(i));
2079 reg = RW(reg, AR_PHY_TIMING_CTRL4_IQCORR_Q_I_COFF, i_coff);
2080 reg = RW(reg, AR_PHY_TIMING_CTRL4_IQCORR_Q_Q_COFF, q_coff);
2081 AR_WRITE(sc, AR_PHY_TIMING_CTRL4(i), reg);
2082 }
2083
2084 /* Apply new settings. */
2085 AR_SETBITS(sc, AR_PHY_TIMING_CTRL4_0,
2086 AR_PHY_TIMING_CTRL4_IQCORR_ENABLE);
2087 AR_WRITE_BARRIER(sc);
2088
2089 /* IQ calibration done. */
2090 sc->sc_cur_calib_mask &= ~ATHN_CAL_IQ;
2091 memset(&sc->sc_calib, 0, sizeof(sc->sc_calib));
2092 }
2093
2094 Static void
ar5008_calib_adc_gain(struct athn_softc * sc)2095 ar5008_calib_adc_gain(struct athn_softc *sc)
2096 {
2097 struct athn_adc_cal *cal;
2098 uint32_t reg, gain_mismatch_i, gain_mismatch_q;
2099 int i;
2100
2101 for (i = 0; i < AR_MAX_CHAINS; i++) {
2102 cal = &sc->sc_calib.adc_gain[i];
2103
2104 /* Accumulate ADC gain measures (clear on read). */
2105 cal->pwr_meas_odd_i += AR_READ(sc, AR_PHY_CAL_MEAS_0(i));
2106 cal->pwr_meas_even_i += AR_READ(sc, AR_PHY_CAL_MEAS_1(i));
2107 cal->pwr_meas_odd_q += AR_READ(sc, AR_PHY_CAL_MEAS_2(i));
2108 cal->pwr_meas_even_q += AR_READ(sc, AR_PHY_CAL_MEAS_3(i));
2109 }
2110 if (!AR_SREV_9280_10_OR_LATER(sc) &&
2111 ++sc->sc_calib.nsamples < AR_CAL_SAMPLES) {
2112 /* Not enough samples accumulated, continue. */
2113 ar5008_do_calib(sc);
2114 return;
2115 }
2116
2117 for (i = 0; i < sc->sc_nrxchains; i++) {
2118 cal = &sc->sc_calib.adc_gain[i];
2119
2120 if (cal->pwr_meas_odd_i == 0 || cal->pwr_meas_even_q == 0)
2121 continue; /* Prevents division by zero. */
2122
2123 gain_mismatch_i =
2124 (cal->pwr_meas_even_i * 32) / cal->pwr_meas_odd_i;
2125 gain_mismatch_q =
2126 (cal->pwr_meas_odd_q * 32) / cal->pwr_meas_even_q;
2127
2128 DPRINTFN(DBG_RF, sc, "ADC gain calibration for chain %d\n", i);
2129 reg = AR_READ(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(i));
2130 reg = RW(reg, AR_PHY_NEW_ADC_DC_GAIN_IGAIN, gain_mismatch_i);
2131 reg = RW(reg, AR_PHY_NEW_ADC_DC_GAIN_QGAIN, gain_mismatch_q);
2132 AR_WRITE(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(i), reg);
2133 }
2134
2135 /* Apply new settings. */
2136 AR_SETBITS(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(0),
2137 AR_PHY_NEW_ADC_GAIN_CORR_ENABLE);
2138 AR_WRITE_BARRIER(sc);
2139
2140 /* ADC gain calibration done. */
2141 sc->sc_cur_calib_mask &= ~ATHN_CAL_ADC_GAIN;
2142 memset(&sc->sc_calib, 0, sizeof(sc->sc_calib));
2143 }
2144
2145 Static void
ar5008_calib_adc_dc_off(struct athn_softc * sc)2146 ar5008_calib_adc_dc_off(struct athn_softc *sc)
2147 {
2148 struct athn_adc_cal *cal;
2149 int32_t dc_offset_mismatch_i, dc_offset_mismatch_q;
2150 uint32_t reg;
2151 int count, i;
2152
2153 for (i = 0; i < AR_MAX_CHAINS; i++) {
2154 cal = &sc->sc_calib.adc_dc_offset[i];
2155
2156 /* Accumulate ADC DC offset measures (clear on read). */
2157 cal->pwr_meas_odd_i += AR_READ(sc, AR_PHY_CAL_MEAS_0(i));
2158 cal->pwr_meas_even_i += AR_READ(sc, AR_PHY_CAL_MEAS_1(i));
2159 cal->pwr_meas_odd_q += AR_READ(sc, AR_PHY_CAL_MEAS_2(i));
2160 cal->pwr_meas_even_q += AR_READ(sc, AR_PHY_CAL_MEAS_3(i));
2161 }
2162 if (!AR_SREV_9280_10_OR_LATER(sc) &&
2163 ++sc->sc_calib.nsamples < AR_CAL_SAMPLES) {
2164 /* Not enough samples accumulated, continue. */
2165 ar5008_do_calib(sc);
2166 return;
2167 }
2168
2169 if (AR_SREV_9280_10_OR_LATER(sc))
2170 count = (1 << (10 + 5));
2171 else
2172 count = (1 << ( 2 + 5)) * AR_CAL_SAMPLES;
2173 for (i = 0; i < sc->sc_nrxchains; i++) {
2174 cal = &sc->sc_calib.adc_dc_offset[i];
2175
2176 dc_offset_mismatch_i =
2177 (cal->pwr_meas_even_i - cal->pwr_meas_odd_i * 2) / count;
2178 dc_offset_mismatch_q =
2179 (cal->pwr_meas_odd_q - cal->pwr_meas_even_q * 2) / count;
2180
2181 DPRINTFN(DBG_RF, sc, "ADC DC offset calibration for chain %d\n", i);
2182 reg = AR_READ(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(i));
2183 reg = RW(reg, AR_PHY_NEW_ADC_DC_GAIN_QDC,
2184 dc_offset_mismatch_q);
2185 reg = RW(reg, AR_PHY_NEW_ADC_DC_GAIN_IDC,
2186 dc_offset_mismatch_i);
2187 AR_WRITE(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(i), reg);
2188 }
2189
2190 /* Apply new settings. */
2191 AR_SETBITS(sc, AR_PHY_NEW_ADC_DC_GAIN_CORR(0),
2192 AR_PHY_NEW_ADC_DC_OFFSET_CORR_ENABLE);
2193 AR_WRITE_BARRIER(sc);
2194
2195 /* ADC DC offset calibration done. */
2196 sc->sc_cur_calib_mask &= ~ATHN_CAL_ADC_DC;
2197 memset(&sc->sc_calib, 0, sizeof(sc->sc_calib));
2198 }
2199
2200 PUBLIC void
ar5008_write_txpower(struct athn_softc * sc,int16_t power[ATHN_POWER_COUNT])2201 ar5008_write_txpower(struct athn_softc *sc, int16_t power[ATHN_POWER_COUNT])
2202 {
2203
2204 AR_WRITE(sc, AR_PHY_POWER_TX_RATE1,
2205 (power[ATHN_POWER_OFDM18 ] & 0x3f) << 24 |
2206 (power[ATHN_POWER_OFDM12 ] & 0x3f) << 16 |
2207 (power[ATHN_POWER_OFDM9 ] & 0x3f) << 8 |
2208 (power[ATHN_POWER_OFDM6 ] & 0x3f));
2209 AR_WRITE(sc, AR_PHY_POWER_TX_RATE2,
2210 (power[ATHN_POWER_OFDM54 ] & 0x3f) << 24 |
2211 (power[ATHN_POWER_OFDM48 ] & 0x3f) << 16 |
2212 (power[ATHN_POWER_OFDM36 ] & 0x3f) << 8 |
2213 (power[ATHN_POWER_OFDM24 ] & 0x3f));
2214 AR_WRITE(sc, AR_PHY_POWER_TX_RATE3,
2215 (power[ATHN_POWER_CCK2_SP ] & 0x3f) << 24 |
2216 (power[ATHN_POWER_CCK2_LP ] & 0x3f) << 16 |
2217 (power[ATHN_POWER_XR ] & 0x3f) << 8 |
2218 (power[ATHN_POWER_CCK1_LP ] & 0x3f));
2219 AR_WRITE(sc, AR_PHY_POWER_TX_RATE4,
2220 (power[ATHN_POWER_CCK11_SP] & 0x3f) << 24 |
2221 (power[ATHN_POWER_CCK11_LP] & 0x3f) << 16 |
2222 (power[ATHN_POWER_CCK55_SP] & 0x3f) << 8 |
2223 (power[ATHN_POWER_CCK55_LP] & 0x3f));
2224 #ifndef IEEE80211_NO_HT
2225 AR_WRITE(sc, AR_PHY_POWER_TX_RATE5,
2226 (power[ATHN_POWER_HT20(3) ] & 0x3f) << 24 |
2227 (power[ATHN_POWER_HT20(2) ] & 0x3f) << 16 |
2228 (power[ATHN_POWER_HT20(1) ] & 0x3f) << 8 |
2229 (power[ATHN_POWER_HT20(0) ] & 0x3f));
2230 AR_WRITE(sc, AR_PHY_POWER_TX_RATE6,
2231 (power[ATHN_POWER_HT20(7) ] & 0x3f) << 24 |
2232 (power[ATHN_POWER_HT20(6) ] & 0x3f) << 16 |
2233 (power[ATHN_POWER_HT20(5) ] & 0x3f) << 8 |
2234 (power[ATHN_POWER_HT20(4) ] & 0x3f));
2235 AR_WRITE(sc, AR_PHY_POWER_TX_RATE7,
2236 (power[ATHN_POWER_HT40(3) ] & 0x3f) << 24 |
2237 (power[ATHN_POWER_HT40(2) ] & 0x3f) << 16 |
2238 (power[ATHN_POWER_HT40(1) ] & 0x3f) << 8 |
2239 (power[ATHN_POWER_HT40(0) ] & 0x3f));
2240 AR_WRITE(sc, AR_PHY_POWER_TX_RATE8,
2241 (power[ATHN_POWER_HT40(7) ] & 0x3f) << 24 |
2242 (power[ATHN_POWER_HT40(6) ] & 0x3f) << 16 |
2243 (power[ATHN_POWER_HT40(5) ] & 0x3f) << 8 |
2244 (power[ATHN_POWER_HT40(4) ] & 0x3f));
2245 AR_WRITE(sc, AR_PHY_POWER_TX_RATE9,
2246 (power[ATHN_POWER_OFDM_EXT] & 0x3f) << 24 |
2247 (power[ATHN_POWER_CCK_EXT ] & 0x3f) << 16 |
2248 (power[ATHN_POWER_OFDM_DUP] & 0x3f) << 8 |
2249 (power[ATHN_POWER_CCK_DUP ] & 0x3f));
2250 #endif
2251 AR_WRITE_BARRIER(sc);
2252 }
2253
2254 PUBLIC void
ar5008_set_viterbi_mask(struct athn_softc * sc,int bin)2255 ar5008_set_viterbi_mask(struct athn_softc *sc, int bin)
2256 {
2257 uint32_t mask[4], reg;
2258 uint8_t m[62], p[62]; /* XXX use bit arrays? */
2259 int i, bit, cur;
2260
2261 /* Compute pilot mask. */
2262 cur = -6000;
2263 for (i = 0; i < 4; i++) {
2264 mask[i] = 0;
2265 for (bit = 0; bit < 30; bit++) {
2266 if (abs(cur - bin) < 100)
2267 mask[i] |= 1 << bit;
2268 cur += 100;
2269 }
2270 if (cur == 0) /* Skip entry "0". */
2271 cur = 100;
2272 }
2273 /* Write entries from -6000 to -3100. */
2274 AR_WRITE(sc, AR_PHY_TIMING7, mask[0]);
2275 AR_WRITE(sc, AR_PHY_TIMING9, mask[0]);
2276 /* Write entries from -3000 to -100. */
2277 AR_WRITE(sc, AR_PHY_TIMING8, mask[1]);
2278 AR_WRITE(sc, AR_PHY_TIMING10, mask[1]);
2279 /* Write entries from 100 to 3000. */
2280 AR_WRITE(sc, AR_PHY_PILOT_MASK_01_30, mask[2]);
2281 AR_WRITE(sc, AR_PHY_CHANNEL_MASK_01_30, mask[2]);
2282 /* Write entries from 3100 to 6000. */
2283 AR_WRITE(sc, AR_PHY_PILOT_MASK_31_60, mask[3]);
2284 AR_WRITE(sc, AR_PHY_CHANNEL_MASK_31_60, mask[3]);
2285
2286 /* Compute viterbi mask. */
2287 for (cur = 6100; cur >= 0; cur -= 100)
2288 p[+cur / 100] = abs(cur - bin) < 75;
2289 for (cur = 0; cur >= -6100; cur -= 100)
2290 m[-cur / 100] = abs(cur - bin) < 75;
2291
2292 /* Write viterbi mask (XXX needs to be reworked). */
2293 reg =
2294 m[46] << 30 | m[47] << 28 | m[48] << 26 | m[49] << 24 |
2295 m[50] << 22 | m[51] << 20 | m[52] << 18 | m[53] << 16 |
2296 m[54] << 14 | m[55] << 12 | m[56] << 10 | m[57] << 8 |
2297 m[58] << 6 | m[59] << 4 | m[60] << 2 | m[61] << 0;
2298 AR_WRITE(sc, AR_PHY_BIN_MASK_1, reg);
2299 AR_WRITE(sc, AR_PHY_VIT_MASK2_M_46_61, reg);
2300
2301 /* XXX m[48] should be m[38] ? */
2302 reg = m[31] << 28 | m[32] << 26 | m[33] << 24 |
2303 m[34] << 22 | m[35] << 20 | m[36] << 18 | m[37] << 16 |
2304 m[48] << 14 | m[39] << 12 | m[40] << 10 | m[41] << 8 |
2305 m[42] << 6 | m[43] << 4 | m[44] << 2 | m[45] << 0;
2306 AR_WRITE(sc, AR_PHY_BIN_MASK_2, reg);
2307 AR_WRITE(sc, AR_PHY_VIT_MASK2_M_31_45, reg);
2308
2309 /* XXX This one is weird too. */
2310 reg =
2311 m[16] << 30 | m[16] << 28 | m[18] << 26 | m[18] << 24 |
2312 m[20] << 22 | m[20] << 20 | m[22] << 18 | m[22] << 16 |
2313 m[24] << 14 | m[24] << 12 | m[25] << 10 | m[26] << 8 |
2314 m[27] << 6 | m[28] << 4 | m[29] << 2 | m[30] << 0;
2315 AR_WRITE(sc, AR_PHY_BIN_MASK_3, reg);
2316 AR_WRITE(sc, AR_PHY_VIT_MASK2_M_16_30, reg);
2317
2318 reg =
2319 m[ 0] << 30 | m[ 1] << 28 | m[ 2] << 26 | m[ 3] << 24 |
2320 m[ 4] << 22 | m[ 5] << 20 | m[ 6] << 18 | m[ 7] << 16 |
2321 m[ 8] << 14 | m[ 9] << 12 | m[10] << 10 | m[11] << 8 |
2322 m[12] << 6 | m[13] << 4 | m[14] << 2 | m[15] << 0;
2323 AR_WRITE(sc, AR_PHY_MASK_CTL, reg);
2324 AR_WRITE(sc, AR_PHY_VIT_MASK2_M_00_15, reg);
2325
2326 reg = p[15] << 28 | p[14] << 26 | p[13] << 24 |
2327 p[12] << 22 | p[11] << 20 | p[10] << 18 | p[ 9] << 16 |
2328 p[ 8] << 14 | p[ 7] << 12 | p[ 6] << 10 | p[ 5] << 8 |
2329 p[ 4] << 6 | p[ 3] << 4 | p[ 2] << 2 | p[ 1] << 0;
2330 AR_WRITE(sc, AR_PHY_BIN_MASK2_1, reg);
2331 AR_WRITE(sc, AR_PHY_VIT_MASK2_P_15_01, reg);
2332
2333 reg = p[30] << 28 | p[29] << 26 | p[28] << 24 |
2334 p[27] << 22 | p[26] << 20 | p[25] << 18 | p[24] << 16 |
2335 p[23] << 14 | p[22] << 12 | p[21] << 10 | p[20] << 8 |
2336 p[19] << 6 | p[18] << 4 | p[17] << 2 | p[16] << 0;
2337 AR_WRITE(sc, AR_PHY_BIN_MASK2_2, reg);
2338 AR_WRITE(sc, AR_PHY_VIT_MASK2_P_30_16, reg);
2339
2340 reg = p[45] << 28 | p[44] << 26 | p[43] << 24 |
2341 p[42] << 22 | p[41] << 20 | p[40] << 18 | p[39] << 16 |
2342 p[38] << 14 | p[37] << 12 | p[36] << 10 | p[35] << 8 |
2343 p[34] << 6 | p[33] << 4 | p[32] << 2 | p[31] << 0;
2344 AR_WRITE(sc, AR_PHY_BIN_MASK2_3, reg);
2345 AR_WRITE(sc, AR_PHY_VIT_MASK2_P_45_31, reg);
2346
2347 reg =
2348 p[61] << 30 | p[60] << 28 | p[59] << 26 | p[58] << 24 |
2349 p[57] << 22 | p[56] << 20 | p[55] << 18 | p[54] << 16 |
2350 p[53] << 14 | p[52] << 12 | p[51] << 10 | p[50] << 8 |
2351 p[49] << 6 | p[48] << 4 | p[47] << 2 | p[46] << 0;
2352 AR_WRITE(sc, AR_PHY_BIN_MASK2_4, reg);
2353 AR_WRITE(sc, AR_PHY_VIT_MASK2_P_61_46, reg);
2354 AR_WRITE_BARRIER(sc);
2355 }
2356
2357 Static void
ar5008_hw_init(struct athn_softc * sc,struct ieee80211_channel * c,struct ieee80211_channel * extc)2358 ar5008_hw_init(struct athn_softc *sc, struct ieee80211_channel *c,
2359 struct ieee80211_channel *extc)
2360 {
2361 struct athn_ops *ops = &sc->sc_ops;
2362 const struct athn_ini *ini = sc->sc_ini;
2363 const uint32_t *pvals;
2364 uint32_t reg;
2365 int i;
2366
2367 AR_WRITE(sc, AR_PHY(0), 0x00000007);
2368 AR_WRITE(sc, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_EXTERNAL_RADIO);
2369
2370 if (!AR_SINGLE_CHIP(sc))
2371 ar5416_reset_addac(sc, c);
2372
2373 AR_WRITE(sc, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_INTERNAL_ADDAC);
2374
2375 /* First initialization step (depends on channel band/bandwidth). */
2376 #ifndef IEEE80211_NO_HT
2377 if (extc != NULL) {
2378 if (IEEE80211_IS_CHAN_2GHZ(c))
2379 pvals = ini->vals_2g40;
2380 else
2381 pvals = ini->vals_5g40;
2382 }
2383 else
2384 #endif
2385 {
2386 if (IEEE80211_IS_CHAN_2GHZ(c))
2387 pvals = ini->vals_2g20;
2388 else
2389 pvals = ini->vals_5g20;
2390 }
2391 DPRINTFN(DBG_INIT, sc, "writing modal init vals\n");
2392 for (i = 0; i < ini->nregs; i++) {
2393 uint32_t val = pvals[i];
2394
2395 /* Fix AR_AN_TOP2 initialization value if required. */
2396 if (ini->regs[i] == AR_AN_TOP2 &&
2397 (sc->sc_flags & ATHN_FLAG_AN_TOP2_FIXUP))
2398 val &= ~AR_AN_TOP2_PWDCLKIND;
2399 AR_WRITE(sc, ini->regs[i], val);
2400 if (AR_IS_ANALOG_REG(ini->regs[i])) {
2401 AR_WRITE_BARRIER(sc);
2402 DELAY(100);
2403 }
2404 if ((i & 0x1f) == 0)
2405 DELAY(1);
2406 }
2407 AR_WRITE_BARRIER(sc);
2408
2409 if (sc->sc_rx_gain != NULL)
2410 ar9280_reset_rx_gain(sc, c);
2411 if (sc->sc_tx_gain != NULL)
2412 ar9280_reset_tx_gain(sc, c);
2413
2414 if (AR_SREV_9271_10(sc)) {
2415 AR_WRITE(sc, AR_PHY(68), 0x30002311);
2416 AR_WRITE(sc, AR_PHY_RF_CTL3, 0x0a020001);
2417 }
2418 AR_WRITE_BARRIER(sc);
2419
2420 /* Second initialization step (common to all channels). */
2421 DPRINTFN(DBG_INIT, sc, "writing common init vals\n");
2422 for (i = 0; i < ini->ncmregs; i++) {
2423 AR_WRITE(sc, ini->cmregs[i], ini->cmvals[i]);
2424 if (AR_IS_ANALOG_REG(ini->cmregs[i])) {
2425 AR_WRITE_BARRIER(sc);
2426 DELAY(100);
2427 }
2428 if ((i & 0x1f) == 0)
2429 DELAY(1);
2430 }
2431 AR_WRITE_BARRIER(sc);
2432
2433 if (!AR_SINGLE_CHIP(sc))
2434 ar5416_reset_bb_gain(sc, c);
2435
2436 if (IEEE80211_IS_CHAN_5GHZ(c) &&
2437 (sc->sc_flags & ATHN_FLAG_FAST_PLL_CLOCK)) {
2438 /* Update modal values for fast PLL clock. */
2439 #ifndef IEEE80211_NO_HT
2440 if (extc != NULL)
2441 pvals = ini->fastvals_5g40;
2442 else
2443 #endif
2444 pvals = ini->fastvals_5g20;
2445 DPRINTFN(DBG_INIT, sc, "writing fast pll clock init vals\n");
2446 for (i = 0; i < ini->nfastregs; i++) {
2447 AR_WRITE(sc, ini->fastregs[i], pvals[i]);
2448 if (AR_IS_ANALOG_REG(ini->fastregs[i])) {
2449 AR_WRITE_BARRIER(sc);
2450 DELAY(100);
2451 }
2452 if ((i & 0x1f) == 0)
2453 DELAY(1);
2454 }
2455 }
2456
2457 /*
2458 * Set the RX_ABORT and RX_DIS bits to prevent frames with corrupted
2459 * descriptor status.
2460 */
2461 AR_SETBITS(sc, AR_DIAG_SW, AR_DIAG_RX_DIS | AR_DIAG_RX_ABORT);
2462
2463 /* Hardware workarounds for occasional Rx data corruption. */
2464 if (AR_SREV_9280_10_OR_LATER(sc)) {
2465 reg = AR_READ(sc, AR_PCU_MISC_MODE2);
2466 if (!AR_SREV_9271(sc))
2467 reg &= ~AR_PCU_MISC_MODE2_HWWAR1;
2468 if (AR_SREV_9287_10_OR_LATER(sc))
2469 reg &= ~AR_PCU_MISC_MODE2_HWWAR2;
2470 AR_WRITE(sc, AR_PCU_MISC_MODE2, reg);
2471
2472 }
2473 else if (AR_SREV_5416_20_OR_LATER(sc)) {
2474 /* Disable baseband clock gating. */
2475 AR_WRITE(sc, AR_PHY(651), 0x11);
2476
2477 if (AR_SREV_9160(sc)) {
2478 /* Disable RIFS search to fix baseband hang. */
2479 AR_CLRBITS(sc, AR_PHY_HEAVY_CLIP_FACTOR_RIFS,
2480 AR_PHY_RIFS_INIT_DELAY_M);
2481 }
2482 }
2483 AR_WRITE_BARRIER(sc);
2484
2485 ar5008_set_phy(sc, c, extc);
2486 ar5008_init_chains(sc);
2487
2488 if (sc->sc_flags & ATHN_FLAG_OLPC) {
2489 sc->sc_olpc_ticks = ticks;
2490 ops->olpc_init(sc);
2491 }
2492
2493 ops->set_txpower(sc, c, extc);
2494
2495 if (!AR_SINGLE_CHIP(sc))
2496 ar5416_rf_reset(sc, c);
2497 }
2498
2499 Static uint8_t
ar5008_get_vpd(uint8_t pwr,const uint8_t * pwrPdg,const uint8_t * vpdPdg,int nicepts)2500 ar5008_get_vpd(uint8_t pwr, const uint8_t *pwrPdg, const uint8_t *vpdPdg,
2501 int nicepts)
2502 {
2503 uint8_t vpd;
2504 int i, lo, hi;
2505
2506 for (i = 0; i < nicepts; i++)
2507 if (pwrPdg[i] > pwr)
2508 break;
2509 hi = i;
2510 lo = hi - 1;
2511 if (lo == -1)
2512 lo = hi;
2513 else if (hi == nicepts)
2514 hi = lo;
2515
2516 vpd = athn_interpolate(pwr, pwrPdg[lo], vpdPdg[lo],
2517 pwrPdg[hi], vpdPdg[hi]);
2518 return vpd;
2519 }
2520
2521 PUBLIC void
ar5008_get_pdadcs(struct athn_softc * sc,uint8_t fbin,struct athn_pier * lopier,struct athn_pier * hipier,int nxpdgains,int nicepts,uint8_t overlap,uint8_t * boundaries,uint8_t * pdadcs)2522 ar5008_get_pdadcs(struct athn_softc *sc, uint8_t fbin,
2523 struct athn_pier *lopier, struct athn_pier *hipier, int nxpdgains,
2524 int nicepts, uint8_t overlap, uint8_t *boundaries, uint8_t *pdadcs)
2525 {
2526 #define DB(x) ((x) / 2) /* Convert half dB to dB. */
2527 uint8_t minpwr[AR_PD_GAINS_IN_MASK], maxpwr[AR_PD_GAINS_IN_MASK];
2528 uint8_t vpd[AR_MAX_PWR_RANGE_IN_HALF_DB], pwr;
2529 uint8_t lovpd, hivpd, boundary;
2530 int16_t ss, delta, vpdstep, val;
2531 int i, j, npdadcs, nvpds, maxidx, tgtidx;
2532
2533 /* Compute min and max power in half dB for each pdGain. */
2534 for (i = 0; i < nxpdgains; i++) {
2535 minpwr[i] = MAX(lopier->pwr[i][0], hipier->pwr[i][0]);
2536 maxpwr[i] = MIN(lopier->pwr[i][nicepts - 1],
2537 hipier->pwr[i][nicepts - 1]);
2538 }
2539
2540 /* Fill phase domain analog-to-digital converter (PDADC) table. */
2541 npdadcs = 0;
2542 for (i = 0; i < nxpdgains; i++) {
2543 if (i != nxpdgains - 1)
2544 boundaries[i] = DB(maxpwr[i] + minpwr[i + 1]) / 2;
2545 else
2546 boundaries[i] = DB(maxpwr[i]);
2547 if (boundaries[i] > AR_MAX_RATE_POWER)
2548 boundaries[i] = AR_MAX_RATE_POWER;
2549
2550 if (i == 0 && !AR_SREV_5416_20_OR_LATER(sc)) {
2551 /* Fix the gain delta (AR5416 1.0 only). */
2552 delta = boundaries[0] - 23;
2553 boundaries[0] = 23;
2554 }
2555 else
2556 delta = 0;
2557
2558 /* Find starting index for this pdGain. */
2559 if (i != 0) {
2560 ss = boundaries[i - 1] - DB(minpwr[i]) -
2561 overlap + 1 + delta;
2562 }
2563 else if (AR_SREV_9280_10_OR_LATER(sc))
2564 ss = -DB(minpwr[i]);
2565 else
2566 ss = 0;
2567
2568 /* Compute Vpd table for this pdGain. */
2569 nvpds = DB(maxpwr[i] - minpwr[i]) + 1;
2570 memset(vpd, 0, sizeof(vpd));
2571 pwr = minpwr[i];
2572 for (j = 0; j < nvpds; j++) {
2573 /* Get lower and higher Vpd. */
2574 lovpd = ar5008_get_vpd(pwr, lopier->pwr[i],
2575 lopier->vpd[i], nicepts);
2576 hivpd = ar5008_get_vpd(pwr, hipier->pwr[i],
2577 hipier->vpd[i], nicepts);
2578
2579 /* Interpolate the final Vpd. */
2580 vpd[j] = athn_interpolate(fbin,
2581 lopier->fbin, lovpd, hipier->fbin, hivpd);
2582
2583 pwr += 2; /* In half dB. */
2584 }
2585
2586 /* Extrapolate data for ss < 0. */
2587 if (vpd[1] > vpd[0])
2588 vpdstep = vpd[1] - vpd[0];
2589 else
2590 vpdstep = 1;
2591 while (ss < 0 && npdadcs < AR_NUM_PDADC_VALUES - 1) {
2592 val = vpd[0] + ss * vpdstep;
2593 pdadcs[npdadcs++] = MAX(val, 0);
2594 ss++;
2595 }
2596
2597 tgtidx = boundaries[i] + overlap - DB(minpwr[i]);
2598 maxidx = MIN(tgtidx, nvpds);
2599 while (ss < maxidx && npdadcs < AR_NUM_PDADC_VALUES - 1)
2600 pdadcs[npdadcs++] = vpd[ss++];
2601
2602 if (tgtidx < maxidx)
2603 continue;
2604
2605 /* Extrapolate data for maxidx <= ss <= tgtidx. */
2606 if (vpd[nvpds - 1] > vpd[nvpds - 2])
2607 vpdstep = vpd[nvpds - 1] - vpd[nvpds - 2];
2608 else
2609 vpdstep = 1;
2610 while (ss <= tgtidx && npdadcs < AR_NUM_PDADC_VALUES - 1) {
2611 val = vpd[nvpds - 1] + (ss - maxidx + 1) * vpdstep;
2612 pdadcs[npdadcs++] = MIN(val, 255);
2613 ss++;
2614 }
2615 }
2616
2617 /* Fill remaining PDADC and boundaries entries. */
2618 if (AR_SREV_9285(sc))
2619 boundary = AR9285_PD_GAIN_BOUNDARY_DEFAULT;
2620 else /* Fill with latest. */
2621 boundary = boundaries[nxpdgains - 1];
2622
2623 for (; nxpdgains < AR_PD_GAINS_IN_MASK; nxpdgains++)
2624 boundaries[nxpdgains] = boundary;
2625
2626 for (; npdadcs < AR_NUM_PDADC_VALUES; npdadcs++)
2627 pdadcs[npdadcs] = pdadcs[npdadcs - 1];
2628 #undef DB
2629 }
2630
2631 PUBLIC void
ar5008_get_lg_tpow(struct athn_softc * sc,struct ieee80211_channel * c,uint8_t ctl,const struct ar_cal_target_power_leg * tgt,int nchans,uint8_t tpow[4])2632 ar5008_get_lg_tpow(struct athn_softc *sc, struct ieee80211_channel *c,
2633 uint8_t ctl, const struct ar_cal_target_power_leg *tgt, int nchans,
2634 uint8_t tpow[4])
2635 {
2636 uint8_t fbin;
2637 int i, lo, hi;
2638
2639 /* Find interval (lower and upper indices). */
2640 fbin = athn_chan2fbin(c);
2641 for (i = 0; i < nchans; i++) {
2642 if (tgt[i].bChannel == AR_BCHAN_UNUSED ||
2643 tgt[i].bChannel > fbin)
2644 break;
2645 }
2646 hi = i;
2647 lo = hi - 1;
2648 if (lo == -1)
2649 lo = hi;
2650 else if (hi == nchans || tgt[hi].bChannel == AR_BCHAN_UNUSED)
2651 hi = lo;
2652
2653 /* Interpolate values. */
2654 for (i = 0; i < 4; i++) {
2655 tpow[i] = athn_interpolate(fbin,
2656 tgt[lo].bChannel, tgt[lo].tPow2x[i],
2657 tgt[hi].bChannel, tgt[hi].tPow2x[i]);
2658 }
2659 /* XXX Apply conformance testing limit. */
2660 }
2661
2662 #ifndef IEEE80211_NO_HT
2663 PUBLIC void
ar5008_get_ht_tpow(struct athn_softc * sc,struct ieee80211_channel * c,uint8_t ctl,const struct ar_cal_target_power_ht * tgt,int nchans,uint8_t tpow[8])2664 ar5008_get_ht_tpow(struct athn_softc *sc, struct ieee80211_channel *c,
2665 uint8_t ctl, const struct ar_cal_target_power_ht *tgt, int nchans,
2666 uint8_t tpow[8])
2667 {
2668 uint8_t fbin;
2669 int i, lo, hi;
2670
2671 /* Find interval (lower and upper indices). */
2672 fbin = athn_chan2fbin(c);
2673 for (i = 0; i < nchans; i++) {
2674 if (tgt[i].bChannel == AR_BCHAN_UNUSED ||
2675 tgt[i].bChannel > fbin)
2676 break;
2677 }
2678 hi = i;
2679 lo = hi - 1;
2680 if (lo == -1)
2681 lo = hi;
2682 else if (hi == nchans || tgt[hi].bChannel == AR_BCHAN_UNUSED)
2683 hi = lo;
2684
2685 /* Interpolate values. */
2686 for (i = 0; i < 8; i++) {
2687 tpow[i] = athn_interpolate(fbin,
2688 tgt[lo].bChannel, tgt[lo].tPow2x[i],
2689 tgt[hi].bChannel, tgt[hi].tPow2x[i]);
2690 }
2691 /* XXX Apply conformance testing limit. */
2692 }
2693 #endif
2694
2695 /*
2696 * Adaptive noise immunity.
2697 */
2698 Static void
ar5008_set_noise_immunity_level(struct athn_softc * sc,int level)2699 ar5008_set_noise_immunity_level(struct athn_softc *sc, int level)
2700 {
2701 int high = level == 4;
2702 uint32_t reg;
2703
2704 reg = AR_READ(sc, AR_PHY_DESIRED_SZ);
2705 reg = RW(reg, AR_PHY_DESIRED_SZ_TOT_DES, high ? -62 : -55);
2706 AR_WRITE(sc, AR_PHY_DESIRED_SZ, reg);
2707
2708 reg = AR_READ(sc, AR_PHY_AGC_CTL1);
2709 reg = RW(reg, AR_PHY_AGC_CTL1_COARSE_LOW, high ? -70 : -64);
2710 reg = RW(reg, AR_PHY_AGC_CTL1_COARSE_HIGH, high ? -12 : -14);
2711 AR_WRITE(sc, AR_PHY_AGC_CTL1, reg);
2712
2713 reg = AR_READ(sc, AR_PHY_FIND_SIG);
2714 reg = RW(reg, AR_PHY_FIND_SIG_FIRPWR, high ? -80 : -78);
2715 AR_WRITE(sc, AR_PHY_FIND_SIG, reg);
2716
2717 AR_WRITE_BARRIER(sc);
2718 }
2719
2720 Static void
ar5008_enable_ofdm_weak_signal(struct athn_softc * sc)2721 ar5008_enable_ofdm_weak_signal(struct athn_softc *sc)
2722 {
2723 uint32_t reg;
2724
2725 reg = AR_READ(sc, AR_PHY_SFCORR_LOW);
2726 reg = RW(reg, AR_PHY_SFCORR_LOW_M1_THRESH_LOW, 50);
2727 reg = RW(reg, AR_PHY_SFCORR_LOW_M2_THRESH_LOW, 40);
2728 reg = RW(reg, AR_PHY_SFCORR_LOW_M2COUNT_THR_LOW, 48);
2729 AR_WRITE(sc, AR_PHY_SFCORR_LOW, reg);
2730
2731 reg = AR_READ(sc, AR_PHY_SFCORR);
2732 reg = RW(reg, AR_PHY_SFCORR_M1_THRESH, 77);
2733 reg = RW(reg, AR_PHY_SFCORR_M2_THRESH, 64);
2734 reg = RW(reg, AR_PHY_SFCORR_M2COUNT_THR, 16);
2735 AR_WRITE(sc, AR_PHY_SFCORR, reg);
2736
2737 reg = AR_READ(sc, AR_PHY_SFCORR_EXT);
2738 reg = RW(reg, AR_PHY_SFCORR_EXT_M1_THRESH_LOW, 50);
2739 reg = RW(reg, AR_PHY_SFCORR_EXT_M2_THRESH_LOW, 40);
2740 reg = RW(reg, AR_PHY_SFCORR_EXT_M1_THRESH, 77);
2741 reg = RW(reg, AR_PHY_SFCORR_EXT_M2_THRESH, 64);
2742 AR_WRITE(sc, AR_PHY_SFCORR_EXT, reg);
2743
2744 AR_SETBITS(sc, AR_PHY_SFCORR_LOW,
2745 AR_PHY_SFCORR_LOW_USE_SELF_CORR_LOW);
2746 AR_WRITE_BARRIER(sc);
2747 }
2748
2749 Static void
ar5008_disable_ofdm_weak_signal(struct athn_softc * sc)2750 ar5008_disable_ofdm_weak_signal(struct athn_softc *sc)
2751 {
2752 uint32_t reg;
2753
2754 reg = AR_READ(sc, AR_PHY_SFCORR_LOW);
2755 reg = RW(reg, AR_PHY_SFCORR_LOW_M1_THRESH_LOW, 127);
2756 reg = RW(reg, AR_PHY_SFCORR_LOW_M2_THRESH_LOW, 127);
2757 reg = RW(reg, AR_PHY_SFCORR_LOW_M2COUNT_THR_LOW, 63);
2758 AR_WRITE(sc, AR_PHY_SFCORR_LOW, reg);
2759
2760 reg = AR_READ(sc, AR_PHY_SFCORR);
2761 reg = RW(reg, AR_PHY_SFCORR_M1_THRESH, 127);
2762 reg = RW(reg, AR_PHY_SFCORR_M2_THRESH, 127);
2763 reg = RW(reg, AR_PHY_SFCORR_M2COUNT_THR, 31);
2764 AR_WRITE(sc, AR_PHY_SFCORR, reg);
2765
2766 reg = AR_READ(sc, AR_PHY_SFCORR_EXT);
2767 reg = RW(reg, AR_PHY_SFCORR_EXT_M1_THRESH_LOW, 127);
2768 reg = RW(reg, AR_PHY_SFCORR_EXT_M2_THRESH_LOW, 127);
2769 reg = RW(reg, AR_PHY_SFCORR_EXT_M1_THRESH, 127);
2770 reg = RW(reg, AR_PHY_SFCORR_EXT_M2_THRESH, 127);
2771 AR_WRITE(sc, AR_PHY_SFCORR_EXT, reg);
2772
2773 AR_CLRBITS(sc, AR_PHY_SFCORR_LOW,
2774 AR_PHY_SFCORR_LOW_USE_SELF_CORR_LOW);
2775 AR_WRITE_BARRIER(sc);
2776 }
2777
2778 Static void
ar5008_set_cck_weak_signal(struct athn_softc * sc,int high)2779 ar5008_set_cck_weak_signal(struct athn_softc *sc, int high)
2780 {
2781 uint32_t reg;
2782
2783 reg = AR_READ(sc, AR_PHY_CCK_DETECT);
2784 reg = RW(reg, AR_PHY_CCK_DETECT_WEAK_SIG_THR_CCK, high ? 6 : 8);
2785 AR_WRITE(sc, AR_PHY_CCK_DETECT, reg);
2786 AR_WRITE_BARRIER(sc);
2787 }
2788
2789 Static void
ar5008_set_firstep_level(struct athn_softc * sc,int level)2790 ar5008_set_firstep_level(struct athn_softc *sc, int level)
2791 {
2792 uint32_t reg;
2793
2794 reg = AR_READ(sc, AR_PHY_FIND_SIG);
2795 reg = RW(reg, AR_PHY_FIND_SIG_FIRSTEP, level * 4);
2796 AR_WRITE(sc, AR_PHY_FIND_SIG, reg);
2797 AR_WRITE_BARRIER(sc);
2798 }
2799
2800 Static void
ar5008_set_spur_immunity_level(struct athn_softc * sc,int level)2801 ar5008_set_spur_immunity_level(struct athn_softc *sc, int level)
2802 {
2803 uint32_t reg;
2804
2805 reg = AR_READ(sc, AR_PHY_TIMING5);
2806 reg = RW(reg, AR_PHY_TIMING5_CYCPWR_THR1, (level + 1) * 2);
2807 AR_WRITE(sc, AR_PHY_TIMING5, reg);
2808 AR_WRITE_BARRIER(sc);
2809 }
2810