Lines Matching defs:rxr
638 struct ix_rxring *rxr;
649 rxr = &sc->rx_rings[i];
652 ifr[n].ifr_info = rxr->rx_ring;
734 struct ix_rxring *rxr = sc->rx_rings;
828 IXGBE_WRITE_REG(&sc->hw, rxr[i].tail, rxr->last_desc_filled);
1075 struct ix_rxring *rxr = que->rxr;
1079 ixgbe_rxeof(rxr);
1081 ixgbe_rxrefill(rxr);
1100 struct ix_rxring *rxr = sc->rx_rings;
1110 ixgbe_rxeof(rxr);
1112 ixgbe_rxrefill(rxr);
1953 struct ix_rxring *rxr = &sc->rx_rings[i];
1958 ifiq->ifiq_softc = rxr;
1959 rxr->ifiq = ifiq;
1963 ix_rxq_kstats(sc, rxr);
2142 struct ix_rxring *rxr;
2198 rxr = &sc->rx_rings[i];
2200 rxr->sc = sc;
2201 rxr->me = i;
2202 timeout_set(&rxr->rx_refill, ixgbe_rxrefill, rxr);
2205 &rxr->rxdma, BUS_DMA_NOWAIT)) {
2210 rxr->rx_base = (union ixgbe_adv_rx_desc *)rxr->rxdma.dma_vaddr;
2211 bzero((void *)rxr->rx_base, rsize);
2221 que->rxr = &sc->rx_rings[i];
2229 for (rxr = sc->rx_rings; rxconf > 0; rxr++, rxconf--)
2230 ixgbe_dma_free(sc, &rxr->rxdma);
2686 ixgbe_get_buf(struct ix_rxring *rxr, int i)
2688 struct ix_softc *sc = rxr->sc;
2694 rxbuf = &rxr->rx_buffers[i];
2695 rxdesc = &rxr->rx_base[i];
2710 error = bus_dmamap_load_mbuf(rxr->rxdma.dma_tag, rxbuf->map,
2717 bus_dmamap_sync(rxr->rxdma.dma_tag, rxbuf->map,
2735 ixgbe_allocate_receive_buffers(struct ix_rxring *rxr)
2737 struct ix_softc *sc = rxr->sc;
2742 if (!(rxr->rx_buffers = mallocarray(sc->num_rx_desc,
2750 rxbuf = rxr->rx_buffers;
2752 error = bus_dmamap_create(rxr->rxdma.dma_tag, 16 * 1024, 1,
2760 bus_dmamap_sync(rxr->rxdma.dma_tag, rxr->rxdma.dma_map, 0,
2761 rxr->rxdma.dma_map->dm_mapsize,
2776 ixgbe_setup_receive_ring(struct ix_rxring *rxr)
2778 struct ix_softc *sc = rxr->sc;
2785 bzero((void *)rxr->rx_base, rsize);
2787 if ((error = ixgbe_allocate_receive_buffers(rxr)) != 0)
2791 rxr->next_to_check = 0;
2792 rxr->last_desc_filled = sc->num_rx_desc - 1;
2794 if_rxr_init(&rxr->rx_ring, 2 * ((ifp->if_hardmtu / MCLBYTES) + 1),
2797 ixgbe_rxfill(rxr);
2798 if (if_rxr_inuse(&rxr->rx_ring) == 0) {
2808 ixgbe_rxfill(struct ix_rxring *rxr)
2810 struct ix_softc *sc = rxr->sc;
2815 bus_dmamap_sync(rxr->rxdma.dma_tag, rxr->rxdma.dma_map,
2816 0, rxr->rxdma.dma_map->dm_mapsize,
2819 i = rxr->last_desc_filled;
2820 for (slots = if_rxr_get(&rxr->rx_ring, sc->num_rx_desc);
2825 if (ixgbe_get_buf(rxr, i) != 0)
2828 rxr->last_desc_filled = i;
2832 bus_dmamap_sync(rxr->rxdma.dma_tag, rxr->rxdma.dma_map,
2833 0, rxr->rxdma.dma_map->dm_mapsize,
2836 if_rxr_put(&rxr->rx_ring, slots);
2844 struct ix_rxring *rxr = xrxr;
2845 struct ix_softc *sc = rxr->sc;
2847 if (ixgbe_rxfill(rxr)) {
2849 IXGBE_WRITE_REG(&sc->hw, rxr->tail, rxr->last_desc_filled);
2850 } else if (if_rxr_inuse(&rxr->rx_ring) == 0)
2851 timeout_add(&rxr->rx_refill, 1);
2863 struct ix_rxring *rxr = sc->rx_rings;
2866 for (i = 0; i < sc->num_queues; i++, rxr++)
2867 if (ixgbe_setup_receive_ring(rxr))
2887 struct ix_rxring *rxr = sc->rx_rings;
2930 for (i = 0; i < sc->num_queues; i++, rxr++) {
2931 uint64_t rdba = rxr->rxdma.dma_map->dm_segs[0].ds_addr;
2945 rxr->tail = IXGBE_RDT(i);
2959 IXGBE_WRITE_REG(hw, rxr->tail, 0);
3065 struct ix_rxring *rxr;
3068 for (i = 0, rxr = sc->rx_rings; i < sc->num_queues; i++, rxr++)
3069 if_rxr_init(&rxr->rx_ring, 0, 0);
3071 for (i = 0, rxr = sc->rx_rings; i < sc->num_queues; i++, rxr++)
3072 ixgbe_free_receive_buffers(rxr);
3081 ixgbe_free_receive_buffers(struct ix_rxring *rxr)
3087 sc = rxr->sc;
3088 if (rxr->rx_buffers != NULL) {
3090 rxbuf = &rxr->rx_buffers[i];
3092 bus_dmamap_sync(rxr->rxdma.dma_tag, rxbuf->map,
3095 bus_dmamap_unload(rxr->rxdma.dma_tag,
3101 bus_dmamap_destroy(rxr->rxdma.dma_tag,
3106 free(rxr->rx_buffers, M_DEVBUF,
3108 rxr->rx_buffers = NULL;
3120 ixgbe_rxeof(struct ix_rxring *rxr)
3122 struct ix_softc *sc = rxr->sc;
3137 i = rxr->next_to_check;
3138 while (if_rxr_inuse(&rxr->rx_ring) > 0) {
3142 bus_dmamap_sync(rxr->rxdma.dma_tag, rxr->rxdma.dma_map,
3145 rxdesc = &rxr->rx_base[i];
3148 bus_dmamap_sync(rxr->rxdma.dma_tag, rxr->rxdma.dma_map,
3156 rxbuf = &rxr->rx_buffers[i];
3159 bus_dmamap_sync(rxr->rxdma.dma_tag, rxbuf->map, 0,
3161 bus_dmamap_unload(rxr->rxdma.dma_tag, rxbuf->map);
3189 i, if_rxr_inuse(&rxr->rx_ring),
3190 rxr->last_desc_filled);
3205 nxbuf = &rxr->rx_buffers[nextp];
3257 if_rxr_put(&rxr->rx_ring, 1);
3258 bus_dmamap_sync(rxr->rxdma.dma_tag, rxr->rxdma.dma_map,
3266 rxr->next_to_check = i;
3268 if (ifiq_input(rxr->ifiq, &ml))
3269 if_rxr_livelocked(&rxr->rx_ring);
3854 ix_rxq_kstats(struct ix_softc *sc, struct ix_rxring *rxr)
3859 ks = kstat_create(sc->dev.dv_xname, 0, "ix-rxq", rxr->me,
3868 ks->ks_softc = rxr;
3873 rxr->kstat = ks;
3984 struct ix_rxring *rxr = ks->ks_softc;
3985 struct ix_softc *sc = rxr->sc;
3987 uint32_t i = rxr->me;