Lines Matching defs:dr
1028 struct bwn_dma_ring *dr;
1036 dr = bwn_dma_select(mac, M_WME_GETAC(m));
1037 if (dr->dr_stop == 1 ||
1038 bwn_dma_freeslot(dr) < BWN_TX_SLOTS_PER_FRAME) {
1039 dr->dr_stop = 1;
1178 struct bwn_dma_ring *dr = bwn_dma_select(mac, M_WME_GETAC(*mp));
1183 uint8_t *txhdr_cache = (uint8_t *)dr->dr_txhdr_cache;
1184 int error, slot, backup[2] = { dr->dr_curslot, dr->dr_usedslot };
1187 KASSERT(!dr->dr_stop, ("%s:%d: fail", __func__, __LINE__));
1192 slot = bwn_dma_getslot(dr);
1193 dr->getdesc(dr, slot, &desc, &mt);
1197 error = bwn_set_txhdr(dr->dr_mac, ni, m,
1199 BWN_DMA_COOKIE(dr, slot));
1202 error = bus_dmamap_load(dr->dr_txring_dtag, mt->mt_dmap,
1210 bus_dmamap_sync(dr->dr_txring_dtag, mt->mt_dmap,
1212 dr->setdesc(dr, desc, mt->mt_paddr, BWN_HDRSIZE(mac), 1, 0, 0);
1213 bus_dmamap_sync(dr->dr_ring_dtag, dr->dr_ring_dmap,
1216 slot = bwn_dma_getslot(dr);
1217 dr->getdesc(dr, slot, &desc, &mt);
1254 dr->setdesc(dr, desc, mt->mt_paddr, m->m_pkthdr.len, 0, 1, 1);
1255 bus_dmamap_sync(dr->dr_ring_dtag, dr->dr_ring_dmap,
1260 dr->start_transfer(dr, bwn_dma_nextslot(dr, slot));
1263 dr->dr_curslot = backup[0];
1264 dr->dr_usedslot = backup[1];
2819 struct bwn_dma_ring *dr;
2825 dr = malloc(sizeof(*dr), M_DEVBUF, M_NOWAIT | M_ZERO);
2826 if (dr == NULL)
2828 dr->dr_numslots = BWN_RXRING_SLOTS;
2830 dr->dr_numslots = BWN_TXRING_SLOTS;
2832 dr->dr_meta = malloc(dr->dr_numslots * sizeof(struct bwn_dmadesc_meta),
2834 if (dr->dr_meta == NULL)
2837 dr->dr_type = mac->mac_dmatype;
2838 dr->dr_mac = mac;
2839 dr->dr_base = bwn_dma_base(dr->dr_type, controller_index);
2840 dr->dr_index = controller_index;
2841 if (dr->dr_type == BHND_DMA_ADDR_64BIT) {
2842 dr->getdesc = bwn_dma_64_getdesc;
2843 dr->setdesc = bwn_dma_64_setdesc;
2844 dr->start_transfer = bwn_dma_64_start_transfer;
2845 dr->suspend = bwn_dma_64_suspend;
2846 dr->resume = bwn_dma_64_resume;
2847 dr->get_curslot = bwn_dma_64_get_curslot;
2848 dr->set_curslot = bwn_dma_64_set_curslot;
2850 dr->getdesc = bwn_dma_32_getdesc;
2851 dr->setdesc = bwn_dma_32_setdesc;
2852 dr->start_transfer = bwn_dma_32_start_transfer;
2853 dr->suspend = bwn_dma_32_suspend;
2854 dr->resume = bwn_dma_32_resume;
2855 dr->get_curslot = bwn_dma_32_get_curslot;
2856 dr->set_curslot = bwn_dma_32_set_curslot;
2859 dr->dr_tx = 1;
2860 dr->dr_curslot = -1;
2862 if (dr->dr_index == 0) {
2866 dr->dr_rx_bufsize =
2868 dr->dr_frameoffset =
2872 dr->dr_rx_bufsize =
2874 dr->dr_frameoffset =
2882 error = bwn_dma_allocringmemory(dr);
2894 dr->dr_txhdr_cache = contigmalloc(
2895 (dr->dr_numslots / BWN_TX_SLOTS_PER_FRAME) *
2898 if (dr->dr_txhdr_cache == NULL) {
2917 &dr->dr_txring_dtag);
2924 for (i = 0; i < dr->dr_numslots; i += 2) {
2925 dr->getdesc(dr, i, &desc, &mt);
2931 error = bus_dmamap_create(dr->dr_txring_dtag, 0,
2939 dr->getdesc(dr, i + 1, &desc, &mt);
2955 &dr->dr_spare_dmap);
2962 for (i = 0; i < dr->dr_numslots; i++) {
2963 dr->getdesc(dr, i, &desc, &mt);
2972 error = bwn_dma_newbuf(dr, desc, mt, 1);
2980 bus_dmamap_sync(dr->dr_ring_dtag, dr->dr_ring_dmap,
2983 dr->dr_usedslot = dr->dr_numslots;
2987 return (dr);
2990 free(dr->dr_txhdr_cache, M_DEVBUF);
2992 free(dr->dr_meta, M_DEVBUF);
2994 free(dr, M_DEVBUF);
2999 bwn_dma_ringfree(struct bwn_dma_ring **dr)
3002 if (dr == NULL)
3005 bwn_dma_free_descbufs(*dr);
3006 bwn_dma_free_ringmemory(*dr);
3008 free((*dr)->dr_txhdr_cache, M_DEVBUF);
3009 free((*dr)->dr_meta, M_DEVBUF);
3010 free(*dr, M_DEVBUF);
3012 *dr = NULL;
3016 bwn_dma_32_getdesc(struct bwn_dma_ring *dr, int slot,
3021 *meta = &(dr->dr_meta[slot]);
3022 desc = dr->dr_ring_descbase;
3029 bwn_dma_32_setdesc(struct bwn_dma_ring *dr,
3039 descbase = dr->dr_ring_descbase;
3040 dma = &dr->dr_mac->mac_method.dma;
3044 KASSERT(slot >= 0 && slot < dr->dr_numslots,
3050 if (slot == dr->dr_numslots - 1)
3066 bwn_dma_32_start_transfer(struct bwn_dma_ring *dr, int slot)
3069 BWN_DMA_WRITE(dr, BWN_DMA32_TXINDEX,
3074 bwn_dma_32_suspend(struct bwn_dma_ring *dr)
3077 BWN_DMA_WRITE(dr, BWN_DMA32_TXCTL,
3078 BWN_DMA_READ(dr, BWN_DMA32_TXCTL) | BWN_DMA32_TXSUSPEND);
3082 bwn_dma_32_resume(struct bwn_dma_ring *dr)
3085 BWN_DMA_WRITE(dr, BWN_DMA32_TXCTL,
3086 BWN_DMA_READ(dr, BWN_DMA32_TXCTL) & ~BWN_DMA32_TXSUSPEND);
3090 bwn_dma_32_get_curslot(struct bwn_dma_ring *dr)
3094 val = BWN_DMA_READ(dr, BWN_DMA32_RXSTATUS);
3101 bwn_dma_32_set_curslot(struct bwn_dma_ring *dr, int slot)
3104 BWN_DMA_WRITE(dr, BWN_DMA32_RXINDEX,
3109 bwn_dma_64_getdesc(struct bwn_dma_ring *dr, int slot,
3114 *meta = &(dr->dr_meta[slot]);
3115 desc = dr->dr_ring_descbase;
3122 bwn_dma_64_setdesc(struct bwn_dma_ring *dr,
3135 descbase = dr->dr_ring_descbase;
3136 dma = &dr->dr_mac->mac_method.dma;
3140 KASSERT(slot >= 0 && slot < dr->dr_numslots,
3149 if (slot == dr->dr_numslots - 1)
3170 bwn_dma_64_start_transfer(struct bwn_dma_ring *dr, int slot)
3173 BWN_DMA_WRITE(dr, BWN_DMA64_TXINDEX,
3178 bwn_dma_64_suspend(struct bwn_dma_ring *dr)
3181 BWN_DMA_WRITE(dr, BWN_DMA64_TXCTL,
3182 BWN_DMA_READ(dr, BWN_DMA64_TXCTL) | BWN_DMA64_TXSUSPEND);
3186 bwn_dma_64_resume(struct bwn_dma_ring *dr)
3189 BWN_DMA_WRITE(dr, BWN_DMA64_TXCTL,
3190 BWN_DMA_READ(dr, BWN_DMA64_TXCTL) & ~BWN_DMA64_TXSUSPEND);
3194 bwn_dma_64_get_curslot(struct bwn_dma_ring *dr)
3198 val = BWN_DMA_READ(dr, BWN_DMA64_RXSTATUS);
3205 bwn_dma_64_set_curslot(struct bwn_dma_ring *dr, int slot)
3208 BWN_DMA_WRITE(dr, BWN_DMA64_RXINDEX,
3213 bwn_dma_allocringmemory(struct bwn_dma_ring *dr)
3215 struct bwn_mac *mac = dr->dr_mac;
3230 &dr->dr_ring_dtag);
3237 error = bus_dmamem_alloc(dr->dr_ring_dtag,
3238 &dr->dr_ring_descbase, BUS_DMA_WAITOK | BUS_DMA_ZERO,
3239 &dr->dr_ring_dmap);
3245 error = bus_dmamap_load(dr->dr_ring_dtag, dr->dr_ring_dmap,
3246 dr->dr_ring_descbase, BWN_DMA_RINGMEMSIZE,
3247 bwn_dma_ring_addr, &dr->dr_ring_dmabase, BUS_DMA_NOWAIT);
3258 bwn_dma_setup(struct bwn_dma_ring *dr)
3266 mac = dr->dr_mac;
3270 paddr = dr->dr_ring_dmabase;
3276 if (dr->dr_tx) {
3277 dr->dr_curslot = -1;
3279 if (dr->dr_type == BHND_DMA_ADDR_64BIT) {
3284 BWN_DMA_WRITE(dr, BWN_DMA64_TXCTL, value);
3285 BWN_DMA_WRITE(dr, BWN_DMA64_TXRINGLO, addrlo);
3286 BWN_DMA_WRITE(dr, BWN_DMA64_TXRINGHI, addrhi);
3292 BWN_DMA_WRITE(dr, BWN_DMA32_TXCTL, value);
3293 BWN_DMA_WRITE(dr, BWN_DMA32_TXRING, addrlo);
3301 dr->dr_usedslot = dr->dr_numslots;
3303 if (dr->dr_type == BHND_DMA_ADDR_64BIT) {
3304 value = (dr->dr_frameoffset << BWN_DMA64_RXFROFF_SHIFT);
3309 BWN_DMA_WRITE(dr, BWN_DMA64_RXCTL, value);
3310 BWN_DMA_WRITE(dr, BWN_DMA64_RXRINGLO, addrlo);
3311 BWN_DMA_WRITE(dr, BWN_DMA64_RXRINGHI, addrhi);
3312 BWN_DMA_WRITE(dr, BWN_DMA64_RXINDEX, dr->dr_numslots *
3315 value = (dr->dr_frameoffset << BWN_DMA32_RXFROFF_SHIFT);
3320 BWN_DMA_WRITE(dr, BWN_DMA32_RXCTL, value);
3321 BWN_DMA_WRITE(dr, BWN_DMA32_RXRING, addrlo);
3322 BWN_DMA_WRITE(dr, BWN_DMA32_RXINDEX, dr->dr_numslots *
3328 bwn_dma_free_ringmemory(struct bwn_dma_ring *dr)
3331 bus_dmamap_unload(dr->dr_ring_dtag, dr->dr_ring_dmap);
3332 bus_dmamem_free(dr->dr_ring_dtag, dr->dr_ring_descbase,
3333 dr->dr_ring_dmap);
3337 bwn_dma_cleanup(struct bwn_dma_ring *dr)
3340 if (dr->dr_tx) {
3341 bwn_dma_tx_reset(dr->dr_mac, dr->dr_base, dr->dr_type);
3342 if (dr->dr_type == BHND_DMA_ADDR_64BIT) {
3343 BWN_DMA_WRITE(dr, BWN_DMA64_TXRINGLO, 0);
3344 BWN_DMA_WRITE(dr, BWN_DMA64_TXRINGHI, 0);
3346 BWN_DMA_WRITE(dr, BWN_DMA32_TXRING, 0);
3348 bwn_dma_rx_reset(dr->dr_mac, dr->dr_base, dr->dr_type);
3349 if (dr->dr_type == BHND_DMA_ADDR_64BIT) {
3350 BWN_DMA_WRITE(dr, BWN_DMA64_RXRINGLO, 0);
3351 BWN_DMA_WRITE(dr, BWN_DMA64_RXRINGHI, 0);
3353 BWN_DMA_WRITE(dr, BWN_DMA32_RXRING, 0);
3358 bwn_dma_free_descbufs(struct bwn_dma_ring *dr)
3362 struct bwn_mac *mac = dr->dr_mac;
3367 if (!dr->dr_usedslot)
3369 for (i = 0; i < dr->dr_numslots; i++) {
3370 dr->getdesc(dr, i, &desc, &meta);
3373 if (!dr->dr_tx)
3378 if (dr->dr_tx) {
3380 bus_dmamap_unload(dr->dr_txring_dtag,
3387 bwn_dma_free_descbuf(dr, meta);
3490 bwn_dma_free_descbuf(struct bwn_dma_ring *dr,
3505 bwn_dma_set_redzone(struct bwn_dma_ring *dr, struct mbuf *m)
3513 KASSERT(dr->dr_rx_bufsize >= dr->dr_frameoffset +
3516 frame = mtod(m, char *) + dr->dr_frameoffset;
3521 bwn_dma_check_redzone(struct bwn_dma_ring *dr, struct mbuf *m)
3523 unsigned char *f = mtod(m, char *) + dr->dr_frameoffset;
5377 bwn_dma_rx(struct bwn_dma_ring *dr)
5381 KASSERT(!dr->dr_tx, ("%s:%d: fail", __func__, __LINE__));
5382 curslot = dr->get_curslot(dr);
5383 KASSERT(curslot >= 0 && curslot < dr->dr_numslots,
5386 slot = dr->dr_curslot;
5387 for (; slot != curslot; slot = bwn_dma_nextslot(dr, slot))
5388 bwn_dma_rxeof(dr, &slot);
5390 bus_dmamap_sync(dr->dr_ring_dtag, dr->dr_ring_dmap,
5393 dr->set_curslot(dr, slot);
5394 dr->dr_curslot = slot;
5531 bwn_dma_freeslot(struct bwn_dma_ring *dr)
5533 BWN_ASSERT_LOCKED(dr->dr_mac->mac_sc);
5535 return (dr->dr_numslots - dr->dr_usedslot);
5539 bwn_dma_nextslot(struct bwn_dma_ring *dr, int slot)
5541 BWN_ASSERT_LOCKED(dr->dr_mac->mac_sc);
5543 KASSERT(slot >= -1 && slot <= dr->dr_numslots - 1,
5545 if (slot == dr->dr_numslots - 1)
5551 bwn_dma_rxeof(struct bwn_dma_ring *dr, int *slot)
5553 struct bwn_mac *mac = dr->dr_mac;
5565 dr->getdesc(dr, *slot, &desc, &meta);
5570 if (bwn_dma_newbuf(dr, desc, meta, 0)) {
5581 if (bwn_dma_check_redzone(dr, m)) {
5583 bwn_dma_set_redzone(dr, m);
5588 if (len > dr->dr_rx_bufsize) {
5591 dr->getdesc(dr, *slot, &desc, &meta);
5592 bwn_dma_set_redzone(dr, meta->mt_m);
5595 *slot = bwn_dma_nextslot(dr, *slot);
5597 tmp -= dr->dr_rx_bufsize;
5603 len, dr->dr_rx_bufsize, cnt);
5624 m->m_len = m->m_pkthdr.len = len + dr->dr_frameoffset;
5625 m_adj(m, dr->dr_frameoffset);
5627 bwn_rxeof(dr->dr_mac, m, rxhdr);
5787 bwn_dma_newbuf(struct bwn_dma_ring *dr, struct bwn_dmadesc_generic *desc,
5790 struct bwn_mac *mac = dr->dr_mac;
5814 bwn_dma_set_redzone(dr, m);
5819 error = bus_dmamap_load_mbuf(dma->rxbuf_dtag, dr->dr_spare_dmap, m,
5842 meta->mt_dmap = dr->dr_spare_dmap;
5843 dr->dr_spare_dmap = map;
5857 dr->setdesc(dr, desc, meta->mt_paddr, meta->mt_m->m_len -
6132 struct bwn_dma_ring *dr;
6140 dr = bwn_dma_parse_cookie(mac, status, status->cookie, &slot);
6141 if (dr == NULL) {
6145 KASSERT(dr->dr_tx, ("%s:%d: fail", __func__, __LINE__));
6148 KASSERT(slot >= 0 && slot < dr->dr_numslots,
6150 dr->getdesc(dr, slot, &desc, &meta);
6153 bus_dmamap_unload(dr->dr_txring_dtag, meta->mt_dmap);
6169 dr->dr_usedslot--;
6172 slot = bwn_dma_nextslot(dr, slot);
6175 if (dr->dr_stop) {
6176 KASSERT(bwn_dma_freeslot(dr) >= BWN_TX_SLOTS_PER_FRAME,
6178 dr->dr_stop = 0;
6872 bwn_dma_getslot(struct bwn_dma_ring *dr)
6876 BWN_ASSERT_LOCKED(dr->dr_mac->mac_sc);
6878 KASSERT(dr->dr_tx, ("%s:%d: fail", __func__, __LINE__));
6879 KASSERT(!(dr->dr_stop), ("%s:%d: fail", __func__, __LINE__));
6880 KASSERT(bwn_dma_freeslot(dr) != 0, ("%s:%d: fail", __func__, __LINE__));
6882 slot = bwn_dma_nextslot(dr, dr->dr_curslot);
6884 dr->dr_curslot = slot;
6885 dr->dr_usedslot++;
7297 struct bwn_dma_ring *dr;
7304 dr = dma->wme[WME_AC_BK];
7307 dr = dma->wme[WME_AC_BE];
7310 dr = dma->wme[WME_AC_VI];
7313 dr = dma->wme[WME_AC_VO];
7316 dr = dma->mcast;
7319 dr = NULL;
7324 if (*slot < 0 || *slot >= dr->dr_numslots) {
7334 dr->dr_numslots);
7338 return (dr);
7359 bwn_dma_ringstop(struct bwn_dma_ring **dr)
7362 if (dr == NULL)
7365 bwn_dma_cleanup(*dr);