xref: /onnv-gate/usr/src/uts/common/io/hxge/hxge_txdma.c (revision 11387:0072514d53c7)
16349Sqs148142 /*
26349Sqs148142  * CDDL HEADER START
36349Sqs148142  *
46349Sqs148142  * The contents of this file are subject to the terms of the
56349Sqs148142  * Common Development and Distribution License (the "License").
66349Sqs148142  * You may not use this file except in compliance with the License.
76349Sqs148142  *
86349Sqs148142  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
96349Sqs148142  * or http://www.opensolaris.org/os/licensing.
106349Sqs148142  * See the License for the specific language governing permissions
116349Sqs148142  * and limitations under the License.
126349Sqs148142  *
136349Sqs148142  * When distributing Covered Code, include this CDDL HEADER in each
146349Sqs148142  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
156349Sqs148142  * If applicable, add the following below this CDDL HEADER, with the
166349Sqs148142  * fields enclosed by brackets "[]" replaced with your own identifying
176349Sqs148142  * information: Portions Copyright [yyyy] [name of copyright owner]
186349Sqs148142  *
196349Sqs148142  * CDDL HEADER END
206349Sqs148142  */
216349Sqs148142 /*
228476SMichael.Speer@Sun.COM  * Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
236349Sqs148142  * Use is subject to license terms.
246349Sqs148142  */
256349Sqs148142 
266349Sqs148142 #include <hxge_impl.h>
276349Sqs148142 #include <hxge_txdma.h>
286349Sqs148142 #include <sys/llc1.h>
296349Sqs148142 
306349Sqs148142 uint32_t hxge_reclaim_pending = TXDMA_RECLAIM_PENDING_DEFAULT;
318476SMichael.Speer@Sun.COM uint32_t hxge_tx_minfree = 64;
326349Sqs148142 uint32_t hxge_tx_intr_thres = 0;
336349Sqs148142 uint32_t hxge_tx_max_gathers = TX_MAX_GATHER_POINTERS;
346349Sqs148142 uint32_t hxge_tx_tiny_pack = 1;
356349Sqs148142 uint32_t hxge_tx_use_bcopy = 1;
366349Sqs148142 
376349Sqs148142 extern uint32_t hxge_tx_ring_size;
386349Sqs148142 extern uint32_t hxge_bcopy_thresh;
396349Sqs148142 extern uint32_t hxge_dvma_thresh;
406349Sqs148142 extern uint32_t hxge_dma_stream_thresh;
416349Sqs148142 extern dma_method_t hxge_force_dma;
426349Sqs148142 
436349Sqs148142 /* Device register access attributes for PIO.  */
446349Sqs148142 extern ddi_device_acc_attr_t hxge_dev_reg_acc_attr;
456349Sqs148142 
466349Sqs148142 /* Device descriptor access attributes for DMA.  */
476349Sqs148142 extern ddi_device_acc_attr_t hxge_dev_desc_dma_acc_attr;
486349Sqs148142 
496349Sqs148142 /* Device buffer access attributes for DMA.  */
506349Sqs148142 extern ddi_device_acc_attr_t hxge_dev_buf_dma_acc_attr;
516349Sqs148142 extern ddi_dma_attr_t hxge_desc_dma_attr;
526349Sqs148142 extern ddi_dma_attr_t hxge_tx_dma_attr;
536349Sqs148142 
546349Sqs148142 static hxge_status_t hxge_map_txdma(p_hxge_t hxgep);
556349Sqs148142 static void hxge_unmap_txdma(p_hxge_t hxgep);
566349Sqs148142 static hxge_status_t hxge_txdma_hw_start(p_hxge_t hxgep);
576349Sqs148142 static void hxge_txdma_hw_stop(p_hxge_t hxgep);
586349Sqs148142 
596349Sqs148142 static hxge_status_t hxge_map_txdma_channel(p_hxge_t hxgep, uint16_t channel,
606349Sqs148142     p_hxge_dma_common_t *dma_buf_p, p_tx_ring_t *tx_desc_p,
616349Sqs148142     uint32_t num_chunks, p_hxge_dma_common_t *dma_cntl_p,
626349Sqs148142     p_tx_mbox_t *tx_mbox_p);
636349Sqs148142 static void hxge_unmap_txdma_channel(p_hxge_t hxgep, uint16_t channel,
646349Sqs148142     p_tx_ring_t tx_ring_p, p_tx_mbox_t tx_mbox_p);
656349Sqs148142 static hxge_status_t hxge_map_txdma_channel_buf_ring(p_hxge_t hxgep, uint16_t,
666349Sqs148142     p_hxge_dma_common_t *, p_tx_ring_t *, uint32_t);
676349Sqs148142 static void hxge_unmap_txdma_channel_buf_ring(p_hxge_t hxgep,
686349Sqs148142     p_tx_ring_t tx_ring_p);
696349Sqs148142 static void hxge_map_txdma_channel_cfg_ring(p_hxge_t, uint16_t,
706349Sqs148142     p_hxge_dma_common_t *, p_tx_ring_t, p_tx_mbox_t *);
716349Sqs148142 static void hxge_unmap_txdma_channel_cfg_ring(p_hxge_t hxgep,
726349Sqs148142     p_tx_ring_t tx_ring_p, p_tx_mbox_t tx_mbox_p);
736349Sqs148142 static hxge_status_t hxge_txdma_start_channel(p_hxge_t hxgep, uint16_t channel,
746349Sqs148142     p_tx_ring_t tx_ring_p, p_tx_mbox_t tx_mbox_p);
756349Sqs148142 static hxge_status_t hxge_txdma_stop_channel(p_hxge_t hxgep, uint16_t channel,
766349Sqs148142     p_tx_ring_t tx_ring_p, p_tx_mbox_t tx_mbox_p);
776349Sqs148142 static p_tx_ring_t hxge_txdma_get_ring(p_hxge_t hxgep, uint16_t channel);
786349Sqs148142 static hxge_status_t hxge_tx_err_evnts(p_hxge_t hxgep, uint_t index,
796349Sqs148142     p_hxge_ldv_t ldvp, tdc_stat_t cs);
806349Sqs148142 static p_tx_mbox_t hxge_txdma_get_mbox(p_hxge_t hxgep, uint16_t channel);
816349Sqs148142 static hxge_status_t hxge_txdma_fatal_err_recover(p_hxge_t hxgep,
826349Sqs148142     uint16_t channel, p_tx_ring_t tx_ring_p);
836349Sqs148142 static hxge_status_t hxge_tx_port_fatal_err_recover(p_hxge_t hxgep);
846349Sqs148142 
856349Sqs148142 hxge_status_t
hxge_init_txdma_channels(p_hxge_t hxgep)866349Sqs148142 hxge_init_txdma_channels(p_hxge_t hxgep)
876349Sqs148142 {
886349Sqs148142 	hxge_status_t	status = HXGE_OK;
896349Sqs148142 	block_reset_t	reset_reg;
906349Sqs148142 
916349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL, "==> hxge_init_txdma_channels"));
926349Sqs148142 
936349Sqs148142 	/*
946349Sqs148142 	 * Reset TDC block from PEU to cleanup any unknown configuration.
956349Sqs148142 	 * This may be resulted from previous reboot.
966349Sqs148142 	 */
976349Sqs148142 	reset_reg.value = 0;
986349Sqs148142 	reset_reg.bits.tdc_rst = 1;
996349Sqs148142 	HXGE_REG_WR32(hxgep->hpi_handle, BLOCK_RESET, reset_reg.value);
1006349Sqs148142 
1016349Sqs148142 	HXGE_DELAY(1000);
1026349Sqs148142 
1036349Sqs148142 	status = hxge_map_txdma(hxgep);
1046349Sqs148142 	if (status != HXGE_OK) {
1056349Sqs148142 		HXGE_ERROR_MSG((hxgep, HXGE_ERR_CTL,
1066349Sqs148142 		    "<== hxge_init_txdma_channels: status 0x%x", status));
1076349Sqs148142 		return (status);
1086349Sqs148142 	}
1096349Sqs148142 
1106349Sqs148142 	status = hxge_txdma_hw_start(hxgep);
1116349Sqs148142 	if (status != HXGE_OK) {
1126349Sqs148142 		hxge_unmap_txdma(hxgep);
1136349Sqs148142 		return (status);
1146349Sqs148142 	}
1156349Sqs148142 
1166349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
1176349Sqs148142 	    "<== hxge_init_txdma_channels: status 0x%x", status));
1186349Sqs148142 
1196349Sqs148142 	return (HXGE_OK);
1206349Sqs148142 }
1216349Sqs148142 
1226349Sqs148142 void
hxge_uninit_txdma_channels(p_hxge_t hxgep)1236349Sqs148142 hxge_uninit_txdma_channels(p_hxge_t hxgep)
1246349Sqs148142 {
1256349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL, "==> hxge_uninit_txdma_channels"));
1266349Sqs148142 
1276349Sqs148142 	hxge_txdma_hw_stop(hxgep);
1286349Sqs148142 	hxge_unmap_txdma(hxgep);
1296349Sqs148142 
1306349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL, "<== hxge_uinit_txdma_channels"));
1316349Sqs148142 }
1326349Sqs148142 
1336349Sqs148142 void
hxge_setup_dma_common(p_hxge_dma_common_t dest_p,p_hxge_dma_common_t src_p,uint32_t entries,uint32_t size)1346349Sqs148142 hxge_setup_dma_common(p_hxge_dma_common_t dest_p, p_hxge_dma_common_t src_p,
1356349Sqs148142     uint32_t entries, uint32_t size)
1366349Sqs148142 {
1376349Sqs148142 	size_t tsize;
1386349Sqs148142 	*dest_p = *src_p;
1396349Sqs148142 	tsize = size * entries;
1406349Sqs148142 	dest_p->alength = tsize;
1416349Sqs148142 	dest_p->nblocks = entries;
1426349Sqs148142 	dest_p->block_size = size;
1436349Sqs148142 	dest_p->offset += tsize;
1446349Sqs148142 
1456349Sqs148142 	src_p->kaddrp = (caddr_t)dest_p->kaddrp + tsize;
1466349Sqs148142 	src_p->alength -= tsize;
1476349Sqs148142 	src_p->dma_cookie.dmac_laddress += tsize;
1486349Sqs148142 	src_p->dma_cookie.dmac_size -= tsize;
1496349Sqs148142 }
1506349Sqs148142 
1516349Sqs148142 hxge_status_t
hxge_reset_txdma_channel(p_hxge_t hxgep,uint16_t channel,uint64_t reg_data)1526349Sqs148142 hxge_reset_txdma_channel(p_hxge_t hxgep, uint16_t channel, uint64_t reg_data)
1536349Sqs148142 {
1546349Sqs148142 	hpi_status_t	rs = HPI_SUCCESS;
1556349Sqs148142 	hxge_status_t	status = HXGE_OK;
1566349Sqs148142 	hpi_handle_t	handle;
1576349Sqs148142 
1586349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, " ==> hxge_reset_txdma_channel"));
1596349Sqs148142 
1606349Sqs148142 	handle = HXGE_DEV_HPI_HANDLE(hxgep);
1616349Sqs148142 	if ((reg_data & TDC_TDR_RST_MASK) == TDC_TDR_RST_MASK) {
1626349Sqs148142 		rs = hpi_txdma_channel_reset(handle, channel);
1636349Sqs148142 	} else {
1646349Sqs148142 		rs = hpi_txdma_channel_control(handle, TXDMA_RESET, channel);
1656349Sqs148142 	}
1666349Sqs148142 
1676349Sqs148142 	if (rs != HPI_SUCCESS) {
1686349Sqs148142 		status = HXGE_ERROR | rs;
1696349Sqs148142 	}
1706349Sqs148142 
1716349Sqs148142 	/*
1726349Sqs148142 	 * Reset the tail (kick) register to 0. (Hardware will not reset it. Tx
1736349Sqs148142 	 * overflow fatal error if tail is not set to 0 after reset!
1746349Sqs148142 	 */
1756349Sqs148142 	TXDMA_REG_WRITE64(handle, TDC_TDR_KICK, channel, 0);
1766349Sqs148142 
1776349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, " <== hxge_reset_txdma_channel"));
1786349Sqs148142 
1796349Sqs148142 	return (status);
1806349Sqs148142 }
1816349Sqs148142 
1826349Sqs148142 hxge_status_t
hxge_init_txdma_channel_event_mask(p_hxge_t hxgep,uint16_t channel,tdc_int_mask_t * mask_p)1836349Sqs148142 hxge_init_txdma_channel_event_mask(p_hxge_t hxgep, uint16_t channel,
1846349Sqs148142     tdc_int_mask_t *mask_p)
1856349Sqs148142 {
1866349Sqs148142 	hpi_handle_t	handle;
1876349Sqs148142 	hpi_status_t	rs = HPI_SUCCESS;
1886349Sqs148142 	hxge_status_t	status = HXGE_OK;
1896349Sqs148142 
1906349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
1916349Sqs148142 	    "<== hxge_init_txdma_channel_event_mask"));
1926349Sqs148142 
1936349Sqs148142 	handle = HXGE_DEV_HPI_HANDLE(hxgep);
1946349Sqs148142 
1956349Sqs148142 	/*
1966349Sqs148142 	 * Mask off tx_rng_oflow since it is a false alarm. The driver
1976349Sqs148142 	 * ensures not over flowing the hardware and check the hardware
1986349Sqs148142 	 * status.
1996349Sqs148142 	 */
2006349Sqs148142 	mask_p->bits.tx_rng_oflow = 1;
2016349Sqs148142 	rs = hpi_txdma_event_mask(handle, OP_SET, channel, mask_p);
2026349Sqs148142 	if (rs != HPI_SUCCESS) {
2036349Sqs148142 		status = HXGE_ERROR | rs;
2046349Sqs148142 	}
2056349Sqs148142 
2066349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
2076349Sqs148142 	    "==> hxge_init_txdma_channel_event_mask"));
2086349Sqs148142 	return (status);
2096349Sqs148142 }
2106349Sqs148142 
2116349Sqs148142 hxge_status_t
hxge_enable_txdma_channel(p_hxge_t hxgep,uint16_t channel,p_tx_ring_t tx_desc_p,p_tx_mbox_t mbox_p)2126349Sqs148142 hxge_enable_txdma_channel(p_hxge_t hxgep,
2136349Sqs148142     uint16_t channel, p_tx_ring_t tx_desc_p, p_tx_mbox_t mbox_p)
2146349Sqs148142 {
2156349Sqs148142 	hpi_handle_t	handle;
2166349Sqs148142 	hpi_status_t	rs = HPI_SUCCESS;
2176349Sqs148142 	hxge_status_t	status = HXGE_OK;
2186349Sqs148142 
2196349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL, "==> hxge_enable_txdma_channel"));
2206349Sqs148142 
2216349Sqs148142 	handle = HXGE_DEV_HPI_HANDLE(hxgep);
2226349Sqs148142 	/*
2236349Sqs148142 	 * Use configuration data composed at init time. Write to hardware the
2246349Sqs148142 	 * transmit ring configurations.
2256349Sqs148142 	 */
2266349Sqs148142 	rs = hpi_txdma_ring_config(handle, OP_SET, channel,
2276349Sqs148142 	    (uint64_t *)&(tx_desc_p->tx_ring_cfig.value));
2286349Sqs148142 
2296349Sqs148142 	if (rs != HPI_SUCCESS) {
2306349Sqs148142 		return (HXGE_ERROR | rs);
2316349Sqs148142 	}
2326349Sqs148142 
2336349Sqs148142 	/* Write to hardware the mailbox */
2346349Sqs148142 	rs = hpi_txdma_mbox_config(handle, OP_SET, channel,
2356349Sqs148142 	    (uint64_t *)&mbox_p->tx_mbox.dma_cookie.dmac_laddress);
2366349Sqs148142 
2376349Sqs148142 	if (rs != HPI_SUCCESS) {
2386349Sqs148142 		return (HXGE_ERROR | rs);
2396349Sqs148142 	}
2406349Sqs148142 
2416349Sqs148142 	/* Start the DMA engine. */
2426349Sqs148142 	rs = hpi_txdma_channel_init_enable(handle, channel);
2436349Sqs148142 	if (rs != HPI_SUCCESS) {
2446349Sqs148142 		return (HXGE_ERROR | rs);
2456349Sqs148142 	}
2466349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL, "<== hxge_enable_txdma_channel"));
2476349Sqs148142 	return (status);
2486349Sqs148142 }
2496349Sqs148142 
2506349Sqs148142 void
hxge_fill_tx_hdr(p_mblk_t mp,boolean_t fill_len,boolean_t l4_cksum,int pkt_len,uint8_t npads,p_tx_pkt_hdr_all_t pkthdrp)2516349Sqs148142 hxge_fill_tx_hdr(p_mblk_t mp, boolean_t fill_len, boolean_t l4_cksum,
2526349Sqs148142     int pkt_len, uint8_t npads, p_tx_pkt_hdr_all_t pkthdrp)
2536349Sqs148142 {
2546349Sqs148142 	p_tx_pkt_header_t	hdrp;
2556349Sqs148142 	p_mblk_t		nmp;
2566349Sqs148142 	uint64_t		tmp;
2576349Sqs148142 	size_t			mblk_len;
2586349Sqs148142 	size_t			iph_len;
2596349Sqs148142 	size_t			hdrs_size;
2606349Sqs148142 	uint8_t			*ip_buf;
2616349Sqs148142 	uint16_t		eth_type;
2626349Sqs148142 	uint8_t			ipproto;
2636349Sqs148142 	boolean_t		is_vlan = B_FALSE;
2646349Sqs148142 	size_t			eth_hdr_size;
2656349Sqs148142 	uint8_t hdrs_buf[sizeof (struct ether_header) + 64 + sizeof (uint32_t)];
2666349Sqs148142 
2676349Sqs148142 	HXGE_DEBUG_MSG((NULL, TX_CTL, "==> hxge_fill_tx_hdr: mp $%p", mp));
2686349Sqs148142 
2696349Sqs148142 	/*
2706349Sqs148142 	 * Caller should zero out the headers first.
2716349Sqs148142 	 */
2726349Sqs148142 	hdrp = (p_tx_pkt_header_t)&pkthdrp->pkthdr;
2736349Sqs148142 
2746349Sqs148142 	if (fill_len) {
2756349Sqs148142 		HXGE_DEBUG_MSG((NULL, TX_CTL,
2766349Sqs148142 		    "==> hxge_fill_tx_hdr: pkt_len %d npads %d",
2776349Sqs148142 		    pkt_len, npads));
2786349Sqs148142 		tmp = (uint64_t)pkt_len;
2796349Sqs148142 		hdrp->value |= (tmp << TX_PKT_HEADER_TOT_XFER_LEN_SHIFT);
2806349Sqs148142 
2816349Sqs148142 		goto fill_tx_header_done;
2826349Sqs148142 	}
2836349Sqs148142 	tmp = (uint64_t)npads;
2846349Sqs148142 	hdrp->value |= (tmp << TX_PKT_HEADER_PAD_SHIFT);
2856349Sqs148142 
2866349Sqs148142 	/*
2876349Sqs148142 	 * mp is the original data packet (does not include the Neptune
2886349Sqs148142 	 * transmit header).
2896349Sqs148142 	 */
2906349Sqs148142 	nmp = mp;
2916349Sqs148142 	mblk_len = (size_t)nmp->b_wptr - (size_t)nmp->b_rptr;
2926349Sqs148142 	HXGE_DEBUG_MSG((NULL, TX_CTL,
2936349Sqs148142 	    "==> hxge_fill_tx_hdr: mp $%p b_rptr $%p len %d",
2946349Sqs148142 	    mp, nmp->b_rptr, mblk_len));
2956349Sqs148142 	ip_buf = NULL;
2966349Sqs148142 	bcopy(nmp->b_rptr, &hdrs_buf[0], sizeof (struct ether_vlan_header));
2976349Sqs148142 	eth_type = ntohs(((p_ether_header_t)hdrs_buf)->ether_type);
2986349Sqs148142 	HXGE_DEBUG_MSG((NULL, TX_CTL,
2996349Sqs148142 	    "==> : hxge_fill_tx_hdr: (value 0x%llx) ether type 0x%x",
3006349Sqs148142 	    eth_type, hdrp->value));
3016349Sqs148142 
3026349Sqs148142 	if (eth_type < ETHERMTU) {
3036349Sqs148142 		tmp = 1ull;
3046349Sqs148142 		hdrp->value |= (tmp << TX_PKT_HEADER_LLC_SHIFT);
3056349Sqs148142 		HXGE_DEBUG_MSG((NULL, TX_CTL,
3066349Sqs148142 		    "==> hxge_tx_pkt_hdr_init: LLC value 0x%llx", hdrp->value));
3076349Sqs148142 		if (*(hdrs_buf + sizeof (struct ether_header)) ==
3086349Sqs148142 		    LLC_SNAP_SAP) {
3096349Sqs148142 			eth_type = ntohs(*((uint16_t *)(hdrs_buf +
3106349Sqs148142 			    sizeof (struct ether_header) + 6)));
3116349Sqs148142 			HXGE_DEBUG_MSG((NULL, TX_CTL,
3126349Sqs148142 			    "==> hxge_tx_pkt_hdr_init: LLC ether type 0x%x",
3136349Sqs148142 			    eth_type));
3146349Sqs148142 		} else {
3156349Sqs148142 			goto fill_tx_header_done;
3166349Sqs148142 		}
3176349Sqs148142 	} else if (eth_type == VLAN_ETHERTYPE) {
3186349Sqs148142 		tmp = 1ull;
3196349Sqs148142 		hdrp->value |= (tmp << TX_PKT_HEADER_VLAN__SHIFT);
3206349Sqs148142 
3216349Sqs148142 		eth_type = ntohs(((struct ether_vlan_header *)
3226349Sqs148142 		    hdrs_buf)->ether_type);
3236349Sqs148142 		is_vlan = B_TRUE;
3246349Sqs148142 		HXGE_DEBUG_MSG((NULL, TX_CTL,
3256349Sqs148142 		    "==> hxge_tx_pkt_hdr_init: VLAN value 0x%llx",
3266349Sqs148142 		    hdrp->value));
3276349Sqs148142 	}
3286349Sqs148142 	if (!is_vlan) {
3296349Sqs148142 		eth_hdr_size = sizeof (struct ether_header);
3306349Sqs148142 	} else {
3316349Sqs148142 		eth_hdr_size = sizeof (struct ether_vlan_header);
3326349Sqs148142 	}
3336349Sqs148142 
3346349Sqs148142 	switch (eth_type) {
3356349Sqs148142 	case ETHERTYPE_IP:
3366349Sqs148142 		if (mblk_len > eth_hdr_size + sizeof (uint8_t)) {
3376349Sqs148142 			ip_buf = nmp->b_rptr + eth_hdr_size;
3386349Sqs148142 			mblk_len -= eth_hdr_size;
3396349Sqs148142 			iph_len = ((*ip_buf) & 0x0f);
3406349Sqs148142 			if (mblk_len > (iph_len + sizeof (uint32_t))) {
3416349Sqs148142 				ip_buf = nmp->b_rptr;
3426349Sqs148142 				ip_buf += eth_hdr_size;
3436349Sqs148142 			} else {
3446349Sqs148142 				ip_buf = NULL;
3456349Sqs148142 			}
3466349Sqs148142 		}
3476349Sqs148142 		if (ip_buf == NULL) {
3486349Sqs148142 			hdrs_size = 0;
3496349Sqs148142 			((p_ether_header_t)hdrs_buf)->ether_type = 0;
3506349Sqs148142 			while ((nmp) && (hdrs_size < sizeof (hdrs_buf))) {
3516349Sqs148142 				mblk_len = (size_t)nmp->b_wptr -
3526349Sqs148142 				    (size_t)nmp->b_rptr;
3536349Sqs148142 				if (mblk_len >=
3546349Sqs148142 				    (sizeof (hdrs_buf) - hdrs_size))
3556349Sqs148142 					mblk_len = sizeof (hdrs_buf) -
3566349Sqs148142 					    hdrs_size;
3576349Sqs148142 				bcopy(nmp->b_rptr,
3586349Sqs148142 				    &hdrs_buf[hdrs_size], mblk_len);
3596349Sqs148142 				hdrs_size += mblk_len;
3606349Sqs148142 				nmp = nmp->b_cont;
3616349Sqs148142 			}
3626349Sqs148142 			ip_buf = hdrs_buf;
3636349Sqs148142 			ip_buf += eth_hdr_size;
3646349Sqs148142 			iph_len = ((*ip_buf) & 0x0f);
3656349Sqs148142 		}
3666349Sqs148142 		ipproto = ip_buf[9];
3676349Sqs148142 
3686349Sqs148142 		tmp = (uint64_t)iph_len;
3696349Sqs148142 		hdrp->value |= (tmp << TX_PKT_HEADER_IHL_SHIFT);
3706349Sqs148142 		tmp = (uint64_t)(eth_hdr_size >> 1);
3716349Sqs148142 		hdrp->value |= (tmp << TX_PKT_HEADER_L3START_SHIFT);
3726349Sqs148142 
3736349Sqs148142 		HXGE_DEBUG_MSG((NULL, TX_CTL, "==> hxge_fill_tx_hdr: IPv4 "
3746349Sqs148142 		    " iph_len %d l3start %d eth_hdr_size %d proto 0x%x"
3756349Sqs148142 		    "tmp 0x%x", iph_len, hdrp->bits.l3start, eth_hdr_size,
3766349Sqs148142 		    ipproto, tmp));
3776349Sqs148142 		HXGE_DEBUG_MSG((NULL, TX_CTL,
3786349Sqs148142 		    "==> hxge_tx_pkt_hdr_init: IP value 0x%llx", hdrp->value));
3796349Sqs148142 		break;
3806349Sqs148142 
3816349Sqs148142 	case ETHERTYPE_IPV6:
3826349Sqs148142 		hdrs_size = 0;
3836349Sqs148142 		((p_ether_header_t)hdrs_buf)->ether_type = 0;
3846349Sqs148142 		while ((nmp) && (hdrs_size < sizeof (hdrs_buf))) {
3856349Sqs148142 			mblk_len = (size_t)nmp->b_wptr - (size_t)nmp->b_rptr;
3866349Sqs148142 			if (mblk_len >= (sizeof (hdrs_buf) - hdrs_size))
3876349Sqs148142 				mblk_len = sizeof (hdrs_buf) - hdrs_size;
3886349Sqs148142 			bcopy(nmp->b_rptr, &hdrs_buf[hdrs_size], mblk_len);
3896349Sqs148142 			hdrs_size += mblk_len;
3906349Sqs148142 			nmp = nmp->b_cont;
3916349Sqs148142 		}
3926349Sqs148142 		ip_buf = hdrs_buf;
3936349Sqs148142 		ip_buf += eth_hdr_size;
3946349Sqs148142 
3956349Sqs148142 		tmp = 1ull;
3966349Sqs148142 		hdrp->value |= (tmp << TX_PKT_HEADER_IP_VER_SHIFT);
3976349Sqs148142 
3986349Sqs148142 		tmp = (eth_hdr_size >> 1);
3996349Sqs148142 		hdrp->value |= (tmp << TX_PKT_HEADER_L3START_SHIFT);
4006349Sqs148142 
4016349Sqs148142 		/* byte 6 is the next header protocol */
4026349Sqs148142 		ipproto = ip_buf[6];
4036349Sqs148142 
4046349Sqs148142 		HXGE_DEBUG_MSG((NULL, TX_CTL, "==> hxge_fill_tx_hdr: IPv6 "
4056349Sqs148142 		    " iph_len %d l3start %d eth_hdr_size %d proto 0x%x",
4066349Sqs148142 		    iph_len, hdrp->bits.l3start, eth_hdr_size, ipproto));
4076349Sqs148142 		HXGE_DEBUG_MSG((NULL, TX_CTL, "==> hxge_tx_pkt_hdr_init: IPv6 "
4086349Sqs148142 		    "value 0x%llx", hdrp->value));
4096349Sqs148142 		break;
4106349Sqs148142 
4116349Sqs148142 	default:
4126349Sqs148142 		HXGE_DEBUG_MSG((NULL, TX_CTL, "==> hxge_fill_tx_hdr: non-IP"));
4136349Sqs148142 		goto fill_tx_header_done;
4146349Sqs148142 	}
4156349Sqs148142 
4166349Sqs148142 	switch (ipproto) {
4176349Sqs148142 	case IPPROTO_TCP:
4186349Sqs148142 		HXGE_DEBUG_MSG((NULL, TX_CTL,
4196349Sqs148142 		    "==> hxge_fill_tx_hdr: TCP (cksum flag %d)", l4_cksum));
4206349Sqs148142 		if (l4_cksum) {
4216349Sqs148142 			tmp = 1ull;
4226349Sqs148142 			hdrp->value |= (tmp << TX_PKT_HEADER_PKT_TYPE_SHIFT);
4236349Sqs148142 			HXGE_DEBUG_MSG((NULL, TX_CTL,
4246349Sqs148142 			    "==> hxge_tx_pkt_hdr_init: TCP CKSUM"
4256349Sqs148142 			    "value 0x%llx", hdrp->value));
4266349Sqs148142 		}
4276349Sqs148142 		HXGE_DEBUG_MSG((NULL, TX_CTL,
4286349Sqs148142 		    "==> hxge_tx_pkt_hdr_init: TCP value 0x%llx", hdrp->value));
4296349Sqs148142 		break;
4306349Sqs148142 
4316349Sqs148142 	case IPPROTO_UDP:
4326349Sqs148142 		HXGE_DEBUG_MSG((NULL, TX_CTL, "==> hxge_fill_tx_hdr: UDP"));
4336349Sqs148142 		if (l4_cksum) {
4346349Sqs148142 			tmp = 0x2ull;
4356349Sqs148142 			hdrp->value |= (tmp << TX_PKT_HEADER_PKT_TYPE_SHIFT);
4366349Sqs148142 		}
4376349Sqs148142 		HXGE_DEBUG_MSG((NULL, TX_CTL,
4386349Sqs148142 		    "==> hxge_tx_pkt_hdr_init: UDP value 0x%llx",
4396349Sqs148142 		    hdrp->value));
4406349Sqs148142 		break;
4416349Sqs148142 
4426349Sqs148142 	default:
4436349Sqs148142 		goto fill_tx_header_done;
4446349Sqs148142 	}
4456349Sqs148142 
4466349Sqs148142 fill_tx_header_done:
4476349Sqs148142 	HXGE_DEBUG_MSG((NULL, TX_CTL,
4486349Sqs148142 	    "==> hxge_fill_tx_hdr: pkt_len %d npads %d value 0x%llx",
4496349Sqs148142 	    pkt_len, npads, hdrp->value));
4506349Sqs148142 	HXGE_DEBUG_MSG((NULL, TX_CTL, "<== hxge_fill_tx_hdr"));
4516349Sqs148142 }
4526349Sqs148142 
4536349Sqs148142 /*ARGSUSED*/
4546349Sqs148142 p_mblk_t
hxge_tx_pkt_header_reserve(p_mblk_t mp,uint8_t * npads)4556349Sqs148142 hxge_tx_pkt_header_reserve(p_mblk_t mp, uint8_t *npads)
4566349Sqs148142 {
4576349Sqs148142 	p_mblk_t newmp = NULL;
4586349Sqs148142 
4596349Sqs148142 	if ((newmp = allocb(TX_PKT_HEADER_SIZE, BPRI_MED)) == NULL) {
4606349Sqs148142 		HXGE_DEBUG_MSG((NULL, TX_CTL,
4616349Sqs148142 		    "<== hxge_tx_pkt_header_reserve: allocb failed"));
4626349Sqs148142 		return (NULL);
4636349Sqs148142 	}
4646349Sqs148142 	HXGE_DEBUG_MSG((NULL, TX_CTL,
4656349Sqs148142 	    "==> hxge_tx_pkt_header_reserve: get new mp"));
4666349Sqs148142 	DB_TYPE(newmp) = M_DATA;
4676349Sqs148142 	newmp->b_rptr = newmp->b_wptr = DB_LIM(newmp);
4686349Sqs148142 	linkb(newmp, mp);
4696349Sqs148142 	newmp->b_rptr -= TX_PKT_HEADER_SIZE;
4706349Sqs148142 
4716349Sqs148142 	HXGE_DEBUG_MSG((NULL, TX_CTL,
4726349Sqs148142 	    "==>hxge_tx_pkt_header_reserve: b_rptr $%p b_wptr $%p",
4736349Sqs148142 	    newmp->b_rptr, newmp->b_wptr));
4746349Sqs148142 	HXGE_DEBUG_MSG((NULL, TX_CTL,
4756349Sqs148142 	    "<== hxge_tx_pkt_header_reserve: use new mp"));
4766349Sqs148142 	return (newmp);
4776349Sqs148142 }
4786349Sqs148142 
4796349Sqs148142 int
hxge_tx_pkt_nmblocks(p_mblk_t mp,int * tot_xfer_len_p)4806349Sqs148142 hxge_tx_pkt_nmblocks(p_mblk_t mp, int *tot_xfer_len_p)
4816349Sqs148142 {
4826349Sqs148142 	uint_t		nmblks;
4836349Sqs148142 	ssize_t		len;
4846349Sqs148142 	uint_t		pkt_len;
4856349Sqs148142 	p_mblk_t	nmp, bmp, tmp;
4866349Sqs148142 	uint8_t		*b_wptr;
4876349Sqs148142 
4886349Sqs148142 	HXGE_DEBUG_MSG((NULL, TX_CTL,
4896349Sqs148142 	    "==> hxge_tx_pkt_nmblocks: mp $%p rptr $%p wptr $%p len %d",
4906349Sqs148142 	    mp, mp->b_rptr, mp->b_wptr, MBLKL(mp)));
4916349Sqs148142 
4926349Sqs148142 	nmp = mp;
4936349Sqs148142 	bmp = mp;
4946349Sqs148142 	nmblks = 0;
4956349Sqs148142 	pkt_len = 0;
4966349Sqs148142 	*tot_xfer_len_p = 0;
4976349Sqs148142 
4986349Sqs148142 	while (nmp) {
4996349Sqs148142 		len = MBLKL(nmp);
5006349Sqs148142 		HXGE_DEBUG_MSG((NULL, TX_CTL, "==> hxge_tx_pkt_nmblocks: "
5016349Sqs148142 		    "len %d pkt_len %d nmblks %d tot_xfer_len %d",
5026349Sqs148142 		    len, pkt_len, nmblks, *tot_xfer_len_p));
5036349Sqs148142 
5046349Sqs148142 		if (len <= 0) {
5056349Sqs148142 			bmp = nmp;
5066349Sqs148142 			nmp = nmp->b_cont;
5076349Sqs148142 			HXGE_DEBUG_MSG((NULL, TX_CTL,
5086349Sqs148142 			    "==> hxge_tx_pkt_nmblocks:"
5096349Sqs148142 			    " len (0) pkt_len %d nmblks %d", pkt_len, nmblks));
5106349Sqs148142 			continue;
5116349Sqs148142 		}
5126349Sqs148142 		*tot_xfer_len_p += len;
5136349Sqs148142 		HXGE_DEBUG_MSG((NULL, TX_CTL, "==> hxge_tx_pkt_nmblocks: "
5146349Sqs148142 		    "len %d pkt_len %d nmblks %d tot_xfer_len %d",
5156349Sqs148142 		    len, pkt_len, nmblks, *tot_xfer_len_p));
5166349Sqs148142 
5176349Sqs148142 		if (len < hxge_bcopy_thresh) {
5186349Sqs148142 			HXGE_DEBUG_MSG((NULL, TX_CTL,
5196349Sqs148142 			    "==> hxge_tx_pkt_nmblocks: "
5206349Sqs148142 			    "len %d (< thresh) pkt_len %d nmblks %d",
5216349Sqs148142 			    len, pkt_len, nmblks));
5226349Sqs148142 			if (pkt_len == 0)
5236349Sqs148142 				nmblks++;
5246349Sqs148142 			pkt_len += len;
5256349Sqs148142 			if (pkt_len >= hxge_bcopy_thresh) {
5266349Sqs148142 				pkt_len = 0;
5276349Sqs148142 				len = 0;
5286349Sqs148142 				nmp = bmp;
5296349Sqs148142 			}
5306349Sqs148142 		} else {
5316349Sqs148142 			HXGE_DEBUG_MSG((NULL, TX_CTL,
5326349Sqs148142 			    "==> hxge_tx_pkt_nmblocks: "
5336349Sqs148142 			    "len %d (> thresh) pkt_len %d nmblks %d",
5346349Sqs148142 			    len, pkt_len, nmblks));
5356349Sqs148142 			pkt_len = 0;
5366349Sqs148142 			nmblks++;
5376349Sqs148142 			/*
5386349Sqs148142 			 * Hardware limits the transfer length to 4K. If len is
5396349Sqs148142 			 * more than 4K, we need to break it up to at most 2
5406349Sqs148142 			 * more blocks.
5416349Sqs148142 			 */
5426349Sqs148142 			if (len > TX_MAX_TRANSFER_LENGTH) {
5436349Sqs148142 				uint32_t nsegs;
5446349Sqs148142 
5456349Sqs148142 				HXGE_DEBUG_MSG((NULL, TX_CTL,
5466349Sqs148142 				    "==> hxge_tx_pkt_nmblocks: "
5476349Sqs148142 				    "len %d pkt_len %d nmblks %d nsegs %d",
5486349Sqs148142 				    len, pkt_len, nmblks, nsegs));
5496349Sqs148142 				nsegs = 1;
5506349Sqs148142 				if (len % (TX_MAX_TRANSFER_LENGTH * 2)) {
5516349Sqs148142 					++nsegs;
5526349Sqs148142 				}
5536349Sqs148142 				do {
5546349Sqs148142 					b_wptr = nmp->b_rptr +
5556349Sqs148142 					    TX_MAX_TRANSFER_LENGTH;
5566349Sqs148142 					nmp->b_wptr = b_wptr;
5576349Sqs148142 					if ((tmp = dupb(nmp)) == NULL) {
5586349Sqs148142 						return (0);
5596349Sqs148142 					}
5606349Sqs148142 					tmp->b_rptr = b_wptr;
5616349Sqs148142 					tmp->b_wptr = nmp->b_wptr;
5626349Sqs148142 					tmp->b_cont = nmp->b_cont;
5636349Sqs148142 					nmp->b_cont = tmp;
5646349Sqs148142 					nmblks++;
5656349Sqs148142 					if (--nsegs) {
5666349Sqs148142 						nmp = tmp;
5676349Sqs148142 					}
5686349Sqs148142 				} while (nsegs);
5696349Sqs148142 				nmp = tmp;
5706349Sqs148142 			}
5716349Sqs148142 		}
5726349Sqs148142 
5736349Sqs148142 		/*
5746349Sqs148142 		 * Hardware limits the transmit gather pointers to 15.
5756349Sqs148142 		 */
5766349Sqs148142 		if (nmp->b_cont && (nmblks + TX_GATHER_POINTERS_THRESHOLD) >
5776349Sqs148142 		    TX_MAX_GATHER_POINTERS) {
5786349Sqs148142 			HXGE_DEBUG_MSG((NULL, TX_CTL,
5796349Sqs148142 			    "==> hxge_tx_pkt_nmblocks: pull msg - "
5806349Sqs148142 			    "len %d pkt_len %d nmblks %d",
5816349Sqs148142 			    len, pkt_len, nmblks));
5826349Sqs148142 			/* Pull all message blocks from b_cont */
5836349Sqs148142 			if ((tmp = msgpullup(nmp->b_cont, -1)) == NULL) {
5846349Sqs148142 				return (0);
5856349Sqs148142 			}
5866349Sqs148142 			freemsg(nmp->b_cont);
5876349Sqs148142 			nmp->b_cont = tmp;
5886349Sqs148142 			pkt_len = 0;
5896349Sqs148142 		}
5906349Sqs148142 		bmp = nmp;
5916349Sqs148142 		nmp = nmp->b_cont;
5926349Sqs148142 	}
5936349Sqs148142 
5946349Sqs148142 	HXGE_DEBUG_MSG((NULL, TX_CTL,
5956349Sqs148142 	    "<== hxge_tx_pkt_nmblocks: rptr $%p wptr $%p "
5966349Sqs148142 	    "nmblks %d len %d tot_xfer_len %d",
5976349Sqs148142 	    mp->b_rptr, mp->b_wptr, nmblks, MBLKL(mp), *tot_xfer_len_p));
5986349Sqs148142 	return (nmblks);
5996349Sqs148142 }
6006349Sqs148142 
6016349Sqs148142 boolean_t
hxge_txdma_reclaim(p_hxge_t hxgep,p_tx_ring_t tx_ring_p,int nmblks)6026349Sqs148142 hxge_txdma_reclaim(p_hxge_t hxgep, p_tx_ring_t tx_ring_p, int nmblks)
6036349Sqs148142 {
6046349Sqs148142 	boolean_t		status = B_TRUE;
6056349Sqs148142 	p_hxge_dma_common_t	tx_desc_dma_p;
6066349Sqs148142 	hxge_dma_common_t	desc_area;
6076349Sqs148142 	p_tx_desc_t		tx_desc_ring_vp;
6086349Sqs148142 	p_tx_desc_t		tx_desc_p;
6096349Sqs148142 	p_tx_desc_t		tx_desc_pp;
6106349Sqs148142 	tx_desc_t		r_tx_desc;
6116349Sqs148142 	p_tx_msg_t		tx_msg_ring;
6126349Sqs148142 	p_tx_msg_t		tx_msg_p;
6136349Sqs148142 	hpi_handle_t		handle;
6146349Sqs148142 	tdc_tdr_head_t		tx_head;
6156349Sqs148142 	uint32_t		pkt_len;
6166349Sqs148142 	uint_t			tx_rd_index;
6176349Sqs148142 	uint16_t		head_index, tail_index;
6186349Sqs148142 	uint8_t			tdc;
6196349Sqs148142 	boolean_t		head_wrap, tail_wrap;
6206349Sqs148142 	p_hxge_tx_ring_stats_t	tdc_stats;
6216349Sqs148142 	tdc_byte_cnt_t		byte_cnt;
6226349Sqs148142 	tdc_tdr_qlen_t		qlen;
6236349Sqs148142 	int			rc;
6246349Sqs148142 
6256349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "==> hxge_txdma_reclaim"));
6266349Sqs148142 
6276349Sqs148142 	status = ((tx_ring_p->descs_pending < hxge_reclaim_pending) &&
6286349Sqs148142 	    (nmblks != 0));
6296349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL,
6306349Sqs148142 	    "==> hxge_txdma_reclaim: pending %d  reclaim %d nmblks %d",
6316349Sqs148142 	    tx_ring_p->descs_pending, hxge_reclaim_pending, nmblks));
6326349Sqs148142 
6336349Sqs148142 	if (!status) {
6346349Sqs148142 		tx_desc_dma_p = &tx_ring_p->tdc_desc;
6356349Sqs148142 		desc_area = tx_ring_p->tdc_desc;
6366349Sqs148142 		tx_desc_ring_vp = tx_desc_dma_p->kaddrp;
6376349Sqs148142 		tx_desc_ring_vp = (p_tx_desc_t)DMA_COMMON_VPTR(desc_area);
6386349Sqs148142 		tx_rd_index = tx_ring_p->rd_index;
6396349Sqs148142 		tx_desc_p = &tx_desc_ring_vp[tx_rd_index];
6406349Sqs148142 		tx_msg_ring = tx_ring_p->tx_msg_ring;
6416349Sqs148142 		tx_msg_p = &tx_msg_ring[tx_rd_index];
6426349Sqs148142 		tdc = tx_ring_p->tdc;
6436349Sqs148142 		tdc_stats = tx_ring_p->tdc_stats;
6446349Sqs148142 		if (tx_ring_p->descs_pending > tdc_stats->tx_max_pend) {
6456349Sqs148142 			tdc_stats->tx_max_pend = tx_ring_p->descs_pending;
6466349Sqs148142 		}
6476349Sqs148142 		tail_index = tx_ring_p->wr_index;
6486349Sqs148142 		tail_wrap = tx_ring_p->wr_index_wrap;
6496349Sqs148142 
6506349Sqs148142 		/*
6516349Sqs148142 		 * tdc_byte_cnt reg can be used to get bytes transmitted. It
6526349Sqs148142 		 * includes padding too in case of runt packets.
6536349Sqs148142 		 */
6546349Sqs148142 		handle = HXGE_DEV_HPI_HANDLE(hxgep);
6556349Sqs148142 		TXDMA_REG_READ64(handle, TDC_BYTE_CNT, tdc, &byte_cnt.value);
6566349Sqs148142 		tdc_stats->obytes_with_pad += byte_cnt.bits.byte_count;
6576349Sqs148142 
6586349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
6596349Sqs148142 		    "==> hxge_txdma_reclaim: tdc %d tx_rd_index %d "
6606349Sqs148142 		    "tail_index %d tail_wrap %d tx_desc_p $%p ($%p) ",
6616349Sqs148142 		    tdc, tx_rd_index, tail_index, tail_wrap,
6626349Sqs148142 		    tx_desc_p, (*(uint64_t *)tx_desc_p)));
6636349Sqs148142 
6646349Sqs148142 		/*
6656349Sqs148142 		 * Read the hardware maintained transmit head and wrap around
6666349Sqs148142 		 * bit.
6676349Sqs148142 		 */
6686349Sqs148142 		TXDMA_REG_READ64(handle, TDC_TDR_HEAD, tdc, &tx_head.value);
6696349Sqs148142 		head_index = tx_head.bits.head;
6706349Sqs148142 		head_wrap = tx_head.bits.wrap;
6716349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
6726349Sqs148142 		    "==> hxge_txdma_reclaim: "
6736349Sqs148142 		    "tx_rd_index %d tail %d tail_wrap %d head %d wrap %d",
6746349Sqs148142 		    tx_rd_index, tail_index, tail_wrap, head_index, head_wrap));
6756349Sqs148142 
6766349Sqs148142 		/*
6776349Sqs148142 		 * For debug only. This can be used to verify the qlen and make
6786349Sqs148142 		 * sure the hardware is wrapping the Tdr correctly.
6796349Sqs148142 		 */
6806349Sqs148142 		TXDMA_REG_READ64(handle, TDC_TDR_QLEN, tdc, &qlen.value);
6816349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
6826349Sqs148142 		    "==> hxge_txdma_reclaim: tdr_qlen %d tdr_pref_qlen %d",
6836349Sqs148142 		    qlen.bits.tdr_qlen, qlen.bits.tdr_pref_qlen));
6846349Sqs148142 
6856349Sqs148142 		if (head_index == tail_index) {
6866349Sqs148142 			if (TXDMA_RING_EMPTY(head_index, head_wrap, tail_index,
6876349Sqs148142 			    tail_wrap) && (head_index == tx_rd_index)) {
6886349Sqs148142 				HXGE_DEBUG_MSG((hxgep, TX_CTL,
6896349Sqs148142 				    "==> hxge_txdma_reclaim: EMPTY"));
6906349Sqs148142 				return (B_TRUE);
6916349Sqs148142 			}
6926349Sqs148142 			HXGE_DEBUG_MSG((hxgep, TX_CTL,
6936349Sqs148142 			    "==> hxge_txdma_reclaim: Checking if ring full"));
6946349Sqs148142 			if (TXDMA_RING_FULL(head_index, head_wrap, tail_index,
6956349Sqs148142 			    tail_wrap)) {
6966349Sqs148142 				HXGE_DEBUG_MSG((hxgep, TX_CTL,
6976349Sqs148142 				    "==> hxge_txdma_reclaim: full"));
6986349Sqs148142 				return (B_FALSE);
6996349Sqs148142 			}
7006349Sqs148142 		}
7016349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
7026349Sqs148142 		    "==> hxge_txdma_reclaim: tx_rd_index and head_index"));
7036349Sqs148142 
7046349Sqs148142 		/* XXXX: limit the # of reclaims */
7056349Sqs148142 		tx_desc_pp = &r_tx_desc;
7066349Sqs148142 		while ((tx_rd_index != head_index) &&
7076349Sqs148142 		    (tx_ring_p->descs_pending != 0)) {
7086349Sqs148142 			HXGE_DEBUG_MSG((hxgep, TX_CTL,
7096349Sqs148142 			    "==> hxge_txdma_reclaim: Checking if pending"));
7106349Sqs148142 			HXGE_DEBUG_MSG((hxgep, TX_CTL,
7116349Sqs148142 			    "==> hxge_txdma_reclaim: descs_pending %d ",
7126349Sqs148142 			    tx_ring_p->descs_pending));
7136349Sqs148142 			HXGE_DEBUG_MSG((hxgep, TX_CTL,
7146349Sqs148142 			    "==> hxge_txdma_reclaim: "
7156349Sqs148142 			    "(tx_rd_index %d head_index %d (tx_desc_p $%p)",
7166349Sqs148142 			    tx_rd_index, head_index, tx_desc_p));
7176349Sqs148142 
7186349Sqs148142 			tx_desc_pp->value = tx_desc_p->value;
7196349Sqs148142 			HXGE_DEBUG_MSG((hxgep, TX_CTL,
7206349Sqs148142 			    "==> hxge_txdma_reclaim: "
7216349Sqs148142 			    "(tx_rd_index %d head_index %d "
7226349Sqs148142 			    "tx_desc_p $%p (desc value 0x%llx) ",
7236349Sqs148142 			    tx_rd_index, head_index,
7246349Sqs148142 			    tx_desc_pp, (*(uint64_t *)tx_desc_pp)));
7256349Sqs148142 			HXGE_DEBUG_MSG((hxgep, TX_CTL,
7266349Sqs148142 			    "==> hxge_txdma_reclaim: dump desc:"));
7276349Sqs148142 
7286349Sqs148142 			/*
7296349Sqs148142 			 * tdc_byte_cnt reg can be used to get bytes
7306349Sqs148142 			 * transmitted
7316349Sqs148142 			 */
7326349Sqs148142 			pkt_len = tx_desc_pp->bits.tr_len;
7336349Sqs148142 			tdc_stats->obytes += pkt_len;
7346349Sqs148142 			tdc_stats->opackets += tx_desc_pp->bits.sop;
7356349Sqs148142 			HXGE_DEBUG_MSG((hxgep, TX_CTL,
7366349Sqs148142 			    "==> hxge_txdma_reclaim: pkt_len %d "
7376349Sqs148142 			    "tdc channel %d opackets %d",
7386349Sqs148142 			    pkt_len, tdc, tdc_stats->opackets));
7396349Sqs148142 
7406349Sqs148142 			if (tx_msg_p->flags.dma_type == USE_DVMA) {
7416349Sqs148142 				HXGE_DEBUG_MSG((hxgep, TX_CTL,
7426349Sqs148142 				    "tx_desc_p = $%p tx_desc_pp = $%p "
7436349Sqs148142 				    "index = %d",
7446349Sqs148142 				    tx_desc_p, tx_desc_pp,
7456349Sqs148142 				    tx_ring_p->rd_index));
7466349Sqs148142 				(void) dvma_unload(tx_msg_p->dvma_handle,
7476349Sqs148142 				    0, -1);
7486349Sqs148142 				tx_msg_p->dvma_handle = NULL;
7496349Sqs148142 				if (tx_ring_p->dvma_wr_index ==
7506349Sqs148142 				    tx_ring_p->dvma_wrap_mask) {
7516349Sqs148142 					tx_ring_p->dvma_wr_index = 0;
7526349Sqs148142 				} else {
7536349Sqs148142 					tx_ring_p->dvma_wr_index++;
7546349Sqs148142 				}
7556349Sqs148142 				tx_ring_p->dvma_pending--;
7566349Sqs148142 			} else if (tx_msg_p->flags.dma_type == USE_DMA) {
7576349Sqs148142 				HXGE_DEBUG_MSG((hxgep, TX_CTL,
7586349Sqs148142 				    "==> hxge_txdma_reclaim: USE DMA"));
7596349Sqs148142 				if (rc = ddi_dma_unbind_handle
7606349Sqs148142 				    (tx_msg_p->dma_handle)) {
7616349Sqs148142 					cmn_err(CE_WARN, "hxge_reclaim: "
7626349Sqs148142 					    "ddi_dma_unbind_handle "
7636349Sqs148142 					    "failed. status %d", rc);
7646349Sqs148142 				}
7656349Sqs148142 			}
7666349Sqs148142 
7676349Sqs148142 			HXGE_DEBUG_MSG((hxgep, TX_CTL,
7686349Sqs148142 			    "==> hxge_txdma_reclaim: count packets"));
7696349Sqs148142 
7706349Sqs148142 			/*
7716349Sqs148142 			 * count a chained packet only once.
7726349Sqs148142 			 */
7736349Sqs148142 			if (tx_msg_p->tx_message != NULL) {
7746349Sqs148142 				freemsg(tx_msg_p->tx_message);
7756349Sqs148142 				tx_msg_p->tx_message = NULL;
7766349Sqs148142 			}
7776349Sqs148142 			tx_msg_p->flags.dma_type = USE_NONE;
7786349Sqs148142 			tx_rd_index = tx_ring_p->rd_index;
7796349Sqs148142 			tx_rd_index = (tx_rd_index + 1) &
7806349Sqs148142 			    tx_ring_p->tx_wrap_mask;
7816349Sqs148142 			tx_ring_p->rd_index = tx_rd_index;
7826349Sqs148142 			tx_ring_p->descs_pending--;
7836349Sqs148142 			tx_desc_p = &tx_desc_ring_vp[tx_rd_index];
7846349Sqs148142 			tx_msg_p = &tx_msg_ring[tx_rd_index];
7856349Sqs148142 		}
7866349Sqs148142 
7878718SMichael.Speer@Sun.COM 		status = (nmblks <= ((int)tx_ring_p->tx_ring_size -
7888718SMichael.Speer@Sun.COM 		    (int)tx_ring_p->descs_pending - TX_FULL_MARK));
7896349Sqs148142 		if (status) {
790*11387SSurya.Prakki@Sun.COM 			(void) cas32((uint32_t *)&tx_ring_p->queueing, 1, 0);
7916349Sqs148142 		}
7926349Sqs148142 	} else {
7938718SMichael.Speer@Sun.COM 		status = (nmblks <= ((int)tx_ring_p->tx_ring_size -
7948718SMichael.Speer@Sun.COM 		    (int)tx_ring_p->descs_pending - TX_FULL_MARK));
7956349Sqs148142 	}
7966349Sqs148142 
7976349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL,
7986349Sqs148142 	    "<== hxge_txdma_reclaim status = 0x%08x", status));
7996349Sqs148142 	return (status);
8006349Sqs148142 }
8016349Sqs148142 
8026349Sqs148142 uint_t
hxge_tx_intr(caddr_t arg1,caddr_t arg2)8036349Sqs148142 hxge_tx_intr(caddr_t arg1, caddr_t arg2)
8046349Sqs148142 {
8056349Sqs148142 	p_hxge_ldv_t	ldvp = (p_hxge_ldv_t)arg1;
8066349Sqs148142 	p_hxge_t	hxgep = (p_hxge_t)arg2;
8076349Sqs148142 	p_hxge_ldg_t	ldgp;
8086349Sqs148142 	uint8_t		channel;
8096349Sqs148142 	uint32_t	vindex;
8106349Sqs148142 	hpi_handle_t	handle;
8116349Sqs148142 	tdc_stat_t	cs;
8126349Sqs148142 	p_tx_ring_t	*tx_rings;
8136349Sqs148142 	p_tx_ring_t	tx_ring_p;
8146349Sqs148142 	hpi_status_t	rs = HPI_SUCCESS;
8156349Sqs148142 	uint_t		serviced = DDI_INTR_UNCLAIMED;
8166349Sqs148142 	hxge_status_t	status = HXGE_OK;
8176349Sqs148142 
8186349Sqs148142 	if (ldvp == NULL) {
8196349Sqs148142 		HXGE_DEBUG_MSG((NULL, INT_CTL,
8206349Sqs148142 		    "<== hxge_tx_intr: hxgep $%p ldvp $%p", hxgep, ldvp));
8216349Sqs148142 		return (DDI_INTR_UNCLAIMED);
8226349Sqs148142 	}
8236349Sqs148142 
8246349Sqs148142 	if (arg2 == NULL || (void *) ldvp->hxgep != arg2) {
8256349Sqs148142 		hxgep = ldvp->hxgep;
8266349Sqs148142 	}
8276349Sqs148142 
8287465SMichael.Speer@Sun.COM 	/*
8297465SMichael.Speer@Sun.COM 	 * If the interface is not started, just swallow the interrupt
8307465SMichael.Speer@Sun.COM 	 * and don't rearm the logical device.
8317465SMichael.Speer@Sun.COM 	 */
8327465SMichael.Speer@Sun.COM 	if (hxgep->hxge_mac_state != HXGE_MAC_STARTED)
8337465SMichael.Speer@Sun.COM 		return (DDI_INTR_CLAIMED);
8347465SMichael.Speer@Sun.COM 
8356349Sqs148142 	HXGE_DEBUG_MSG((hxgep, INT_CTL,
8366349Sqs148142 	    "==> hxge_tx_intr: hxgep(arg2) $%p ldvp(arg1) $%p", hxgep, ldvp));
8376349Sqs148142 
8386349Sqs148142 	/*
8396349Sqs148142 	 * This interrupt handler is for a specific transmit dma channel.
8406349Sqs148142 	 */
8416349Sqs148142 	handle = HXGE_DEV_HPI_HANDLE(hxgep);
8426349Sqs148142 
8436349Sqs148142 	/* Get the control and status for this channel. */
8446349Sqs148142 	channel = ldvp->channel;
8456349Sqs148142 	ldgp = ldvp->ldgp;
8466349Sqs148142 	HXGE_DEBUG_MSG((hxgep, INT_CTL,
8476349Sqs148142 	    "==> hxge_tx_intr: hxgep $%p ldvp (ldvp) $%p channel %d",
8486349Sqs148142 	    hxgep, ldvp, channel));
8496349Sqs148142 
8506349Sqs148142 	rs = hpi_txdma_control_status(handle, OP_GET, channel, &cs);
8516349Sqs148142 	vindex = ldvp->vdma_index;
8526349Sqs148142 	HXGE_DEBUG_MSG((hxgep, INT_CTL,
8536349Sqs148142 	    "==> hxge_tx_intr:channel %d ring index %d status 0x%08x",
8546349Sqs148142 	    channel, vindex, rs));
8556349Sqs148142 
8566349Sqs148142 	if (!rs && cs.bits.marked) {
8576349Sqs148142 		HXGE_DEBUG_MSG((hxgep, INT_CTL,
8586349Sqs148142 		    "==> hxge_tx_intr:channel %d ring index %d "
8596349Sqs148142 		    "status 0x%08x (marked bit set)", channel, vindex, rs));
8606349Sqs148142 		tx_rings = hxgep->tx_rings->rings;
8616349Sqs148142 		tx_ring_p = tx_rings[vindex];
8626349Sqs148142 		HXGE_DEBUG_MSG((hxgep, INT_CTL,
8636349Sqs148142 		    "==> hxge_tx_intr:channel %d ring index %d "
8646349Sqs148142 		    "status 0x%08x (marked bit set, calling reclaim)",
8656349Sqs148142 		    channel, vindex, rs));
8666349Sqs148142 
8676349Sqs148142 		MUTEX_ENTER(&tx_ring_p->lock);
8686349Sqs148142 		(void) hxge_txdma_reclaim(hxgep, tx_rings[vindex], 0);
8696349Sqs148142 		MUTEX_EXIT(&tx_ring_p->lock);
8706349Sqs148142 		mac_tx_update(hxgep->mach);
8716349Sqs148142 	}
8726349Sqs148142 
8736349Sqs148142 	/*
8746349Sqs148142 	 * Process other transmit control and status. Check the ldv state.
8756349Sqs148142 	 */
8766349Sqs148142 	status = hxge_tx_err_evnts(hxgep, ldvp->vdma_index, ldvp, cs);
8776349Sqs148142 
8786349Sqs148142 	/* Clear the error bits */
8796349Sqs148142 	RXDMA_REG_WRITE64(handle, TDC_STAT, channel, cs.value);
8806349Sqs148142 
8816349Sqs148142 	/*
8826349Sqs148142 	 * Rearm this logical group if this is a single device group.
8836349Sqs148142 	 */
8846349Sqs148142 	if (ldgp->nldvs == 1) {
8856349Sqs148142 		HXGE_DEBUG_MSG((hxgep, INT_CTL, "==> hxge_tx_intr: rearm"));
8866349Sqs148142 		if (status == HXGE_OK) {
8876349Sqs148142 			(void) hpi_intr_ldg_mgmt_set(handle, ldgp->ldg,
8886349Sqs148142 			    B_TRUE, ldgp->ldg_timer);
8896349Sqs148142 		}
8906349Sqs148142 	}
8916349Sqs148142 	HXGE_DEBUG_MSG((hxgep, INT_CTL, "<== hxge_tx_intr"));
8926349Sqs148142 	serviced = DDI_INTR_CLAIMED;
8936349Sqs148142 	return (serviced);
8946349Sqs148142 }
8956349Sqs148142 
8966349Sqs148142 void
hxge_txdma_stop(p_hxge_t hxgep)8976349Sqs148142 hxge_txdma_stop(p_hxge_t hxgep)
8986349Sqs148142 {
8996349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "==> hxge_txdma_stop"));
9006349Sqs148142 
9016349Sqs148142 	(void) hxge_tx_vmac_disable(hxgep);
9026349Sqs148142 	(void) hxge_txdma_hw_mode(hxgep, HXGE_DMA_STOP);
9036349Sqs148142 
9046349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "<== hxge_txdma_stop"));
9056349Sqs148142 }
9066349Sqs148142 
9076349Sqs148142 hxge_status_t
hxge_txdma_hw_mode(p_hxge_t hxgep,boolean_t enable)9086349Sqs148142 hxge_txdma_hw_mode(p_hxge_t hxgep, boolean_t enable)
9096349Sqs148142 {
9106349Sqs148142 	int		i, ndmas;
9116349Sqs148142 	uint16_t	channel;
9126349Sqs148142 	p_tx_rings_t	tx_rings;
9136349Sqs148142 	p_tx_ring_t	*tx_desc_rings;
9146349Sqs148142 	hpi_handle_t	handle;
9156349Sqs148142 	hpi_status_t	rs = HPI_SUCCESS;
9166349Sqs148142 	hxge_status_t	status = HXGE_OK;
9176349Sqs148142 
9186349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
9196349Sqs148142 	    "==> hxge_txdma_hw_mode: enable mode %d", enable));
9206349Sqs148142 
9216349Sqs148142 	if (!(hxgep->drv_state & STATE_HW_INITIALIZED)) {
9226349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
9236349Sqs148142 		    "<== hxge_txdma_mode: not initialized"));
9246349Sqs148142 		return (HXGE_ERROR);
9256349Sqs148142 	}
9266349Sqs148142 	tx_rings = hxgep->tx_rings;
9276349Sqs148142 	if (tx_rings == NULL) {
9286349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
9296349Sqs148142 		    "<== hxge_txdma_hw_mode: NULL global ring pointer"));
9306349Sqs148142 		return (HXGE_ERROR);
9316349Sqs148142 	}
9326349Sqs148142 	tx_desc_rings = tx_rings->rings;
9336349Sqs148142 	if (tx_desc_rings == NULL) {
9346349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
9356349Sqs148142 		    "<== hxge_txdma_hw_mode: NULL rings pointer"));
9366349Sqs148142 		return (HXGE_ERROR);
9376349Sqs148142 	}
9386349Sqs148142 	ndmas = tx_rings->ndmas;
9396349Sqs148142 	if (!ndmas) {
9406349Sqs148142 		HXGE_ERROR_MSG((hxgep, HXGE_ERR_CTL,
9416349Sqs148142 		    "<== hxge_txdma_hw_mode: no dma channel allocated"));
9426349Sqs148142 		return (HXGE_ERROR);
9436349Sqs148142 	}
9446349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL, "==> hxge_txdma_hw_mode: "
9456349Sqs148142 	    "tx_rings $%p tx_desc_rings $%p ndmas %d",
9466349Sqs148142 	    tx_rings, tx_desc_rings, ndmas));
9476349Sqs148142 
9486349Sqs148142 	handle = HXGE_DEV_HPI_HANDLE(hxgep);
9496349Sqs148142 	for (i = 0; i < ndmas; i++) {
9506349Sqs148142 		if (tx_desc_rings[i] == NULL) {
9516349Sqs148142 			continue;
9526349Sqs148142 		}
9536349Sqs148142 		channel = tx_desc_rings[i]->tdc;
9546349Sqs148142 		HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
9556349Sqs148142 		    "==> hxge_txdma_hw_mode: channel %d", channel));
9566349Sqs148142 		if (enable) {
9576349Sqs148142 			rs = hpi_txdma_channel_enable(handle, channel);
9586349Sqs148142 			HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
9596349Sqs148142 			    "==> hxge_txdma_hw_mode: channel %d (enable) "
9606349Sqs148142 			    "rs 0x%x", channel, rs));
9616349Sqs148142 		} else {
9626349Sqs148142 			/*
9636349Sqs148142 			 * Stop the dma channel and waits for the stop done. If
9646349Sqs148142 			 * the stop done bit is not set, then force an error so
9656349Sqs148142 			 * TXC will stop. All channels bound to this port need
9666349Sqs148142 			 * to be stopped and reset after injecting an interrupt
9676349Sqs148142 			 * error.
9686349Sqs148142 			 */
9696349Sqs148142 			rs = hpi_txdma_channel_disable(handle, channel);
9706349Sqs148142 			HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
9716349Sqs148142 			    "==> hxge_txdma_hw_mode: channel %d (disable) "
9726349Sqs148142 			    "rs 0x%x", channel, rs));
9736349Sqs148142 		}
9746349Sqs148142 	}
9756349Sqs148142 
9766349Sqs148142 	status = ((rs == HPI_SUCCESS) ? HXGE_OK : HXGE_ERROR | rs);
9776349Sqs148142 
9786349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
9796349Sqs148142 	    "<== hxge_txdma_hw_mode: status 0x%x", status));
9806349Sqs148142 
9816349Sqs148142 	return (status);
9826349Sqs148142 }
9836349Sqs148142 
9846349Sqs148142 void
hxge_txdma_enable_channel(p_hxge_t hxgep,uint16_t channel)9856349Sqs148142 hxge_txdma_enable_channel(p_hxge_t hxgep, uint16_t channel)
9866349Sqs148142 {
9876349Sqs148142 	hpi_handle_t handle;
9886349Sqs148142 
9896349Sqs148142 	HXGE_DEBUG_MSG((hxgep, DMA_CTL,
9906349Sqs148142 	    "==> hxge_txdma_enable_channel: channel %d", channel));
9916349Sqs148142 
9926349Sqs148142 	handle = HXGE_DEV_HPI_HANDLE(hxgep);
9936349Sqs148142 	/* enable the transmit dma channels */
9946349Sqs148142 	(void) hpi_txdma_channel_enable(handle, channel);
9956349Sqs148142 
9966349Sqs148142 	HXGE_DEBUG_MSG((hxgep, DMA_CTL, "<== hxge_txdma_enable_channel"));
9976349Sqs148142 }
9986349Sqs148142 
9996349Sqs148142 void
hxge_txdma_disable_channel(p_hxge_t hxgep,uint16_t channel)10006349Sqs148142 hxge_txdma_disable_channel(p_hxge_t hxgep, uint16_t channel)
10016349Sqs148142 {
10026349Sqs148142 	hpi_handle_t handle;
10036349Sqs148142 
10046349Sqs148142 	HXGE_DEBUG_MSG((hxgep, DMA_CTL,
10056349Sqs148142 	    "==> hxge_txdma_disable_channel: channel %d", channel));
10066349Sqs148142 
10076349Sqs148142 	handle = HXGE_DEV_HPI_HANDLE(hxgep);
10086349Sqs148142 	/* stop the transmit dma channels */
10096349Sqs148142 	(void) hpi_txdma_channel_disable(handle, channel);
10106349Sqs148142 
10116349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "<== hxge_txdma_disable_channel"));
10126349Sqs148142 }
10136349Sqs148142 
10146349Sqs148142 int
hxge_txdma_stop_inj_err(p_hxge_t hxgep,int channel)10156349Sqs148142 hxge_txdma_stop_inj_err(p_hxge_t hxgep, int channel)
10166349Sqs148142 {
10176349Sqs148142 	hpi_handle_t	handle;
10186349Sqs148142 	int		status;
10196349Sqs148142 	hpi_status_t	rs = HPI_SUCCESS;
10206349Sqs148142 
10216349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "==> hxge_txdma_stop_inj_err"));
10226349Sqs148142 
10236349Sqs148142 	/*
10246349Sqs148142 	 * Stop the dma channel waits for the stop done. If the stop done bit
10256349Sqs148142 	 * is not set, then create an error.
10266349Sqs148142 	 */
10276349Sqs148142 	handle = HXGE_DEV_HPI_HANDLE(hxgep);
10286349Sqs148142 	rs = hpi_txdma_channel_disable(handle, channel);
10296349Sqs148142 	status = ((rs == HPI_SUCCESS) ? HXGE_OK : HXGE_ERROR | rs);
10306349Sqs148142 	if (status == HXGE_OK) {
10316349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
10326349Sqs148142 		    "<== hxge_txdma_stop_inj_err (channel %d): "
10336349Sqs148142 		    "stopped OK", channel));
10346349Sqs148142 		return (status);
10356349Sqs148142 	}
10366349Sqs148142 
10376349Sqs148142 	HXGE_ERROR_MSG((hxgep, HXGE_ERR_CTL,
10386349Sqs148142 	    "==> hxge_txdma_stop_inj_err (channel): stop failed (0x%x) "
10396349Sqs148142 	    " (injected error but still not stopped)", channel, rs));
10406349Sqs148142 
10416349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "<== hxge_txdma_stop_inj_err"));
10426349Sqs148142 
10436349Sqs148142 	return (status);
10446349Sqs148142 }
10456349Sqs148142 
10466349Sqs148142 /*ARGSUSED*/
10476349Sqs148142 void
hxge_fixup_txdma_rings(p_hxge_t hxgep)10486349Sqs148142 hxge_fixup_txdma_rings(p_hxge_t hxgep)
10496349Sqs148142 {
10506349Sqs148142 	int		index, ndmas;
10516349Sqs148142 	uint16_t	channel;
10526349Sqs148142 	p_tx_rings_t	tx_rings;
10536349Sqs148142 
10546349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "==> hxge_fixup_txdma_rings"));
10556349Sqs148142 
10566349Sqs148142 	/*
10576349Sqs148142 	 * For each transmit channel, reclaim each descriptor and free buffers.
10586349Sqs148142 	 */
10596349Sqs148142 	tx_rings = hxgep->tx_rings;
10606349Sqs148142 	if (tx_rings == NULL) {
10616349Sqs148142 		HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
10626349Sqs148142 		    "<== hxge_fixup_txdma_rings: NULL ring pointer"));
10636349Sqs148142 		return;
10646349Sqs148142 	}
10656349Sqs148142 
10666349Sqs148142 	ndmas = tx_rings->ndmas;
10676349Sqs148142 	if (!ndmas) {
10686349Sqs148142 		HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
10696349Sqs148142 		    "<== hxge_fixup_txdma_rings: no channel allocated"));
10706349Sqs148142 		return;
10716349Sqs148142 	}
10726349Sqs148142 
10736349Sqs148142 	if (tx_rings->rings == NULL) {
10746349Sqs148142 		HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
10756349Sqs148142 		    "<== hxge_fixup_txdma_rings: NULL rings pointer"));
10766349Sqs148142 		return;
10776349Sqs148142 	}
10786349Sqs148142 
10796349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL, "==> hxge_fixup_txdma_rings: "
10806349Sqs148142 	    "tx_rings $%p tx_desc_rings $%p ndmas %d",
10816349Sqs148142 	    tx_rings, tx_rings->rings, ndmas));
10826349Sqs148142 
10836349Sqs148142 	for (index = 0; index < ndmas; index++) {
10846349Sqs148142 		channel = tx_rings->rings[index]->tdc;
10856349Sqs148142 		HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
10866349Sqs148142 		    "==> hxge_fixup_txdma_rings: channel %d", channel));
10876349Sqs148142 		hxge_txdma_fixup_channel(hxgep, tx_rings->rings[index],
10886349Sqs148142 		    channel);
10896349Sqs148142 	}
10906349Sqs148142 
10916349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "<== hxge_fixup_txdma_rings"));
10926349Sqs148142 }
10936349Sqs148142 
10946349Sqs148142 /*ARGSUSED*/
10956349Sqs148142 void
hxge_txdma_fix_channel(p_hxge_t hxgep,uint16_t channel)10966349Sqs148142 hxge_txdma_fix_channel(p_hxge_t hxgep, uint16_t channel)
10976349Sqs148142 {
10986349Sqs148142 	p_tx_ring_t ring_p;
10996349Sqs148142 
11006349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "==> hxge_txdma_fix_channel"));
11016349Sqs148142 
11026349Sqs148142 	ring_p = hxge_txdma_get_ring(hxgep, channel);
11036349Sqs148142 	if (ring_p == NULL) {
11046349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL, "<== hxge_txdma_fix_channel"));
11056349Sqs148142 		return;
11066349Sqs148142 	}
11076349Sqs148142 
11086349Sqs148142 	if (ring_p->tdc != channel) {
11096349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
11106349Sqs148142 		    "<== hxge_txdma_fix_channel: channel not matched "
11116349Sqs148142 		    "ring tdc %d passed channel", ring_p->tdc, channel));
11126349Sqs148142 		return;
11136349Sqs148142 	}
11146349Sqs148142 
11156349Sqs148142 	hxge_txdma_fixup_channel(hxgep, ring_p, channel);
11166349Sqs148142 
11176349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "<== hxge_txdma_fix_channel"));
11186349Sqs148142 }
11196349Sqs148142 
11206349Sqs148142 /*ARGSUSED*/
11216349Sqs148142 void
hxge_txdma_fixup_channel(p_hxge_t hxgep,p_tx_ring_t ring_p,uint16_t channel)11226349Sqs148142 hxge_txdma_fixup_channel(p_hxge_t hxgep, p_tx_ring_t ring_p, uint16_t channel)
11236349Sqs148142 {
11246349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "==> hxge_txdma_fixup_channel"));
11256349Sqs148142 
11266349Sqs148142 	if (ring_p == NULL) {
11276349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
11286349Sqs148142 		    "<== hxge_txdma_fixup_channel: NULL ring pointer"));
11296349Sqs148142 		return;
11306349Sqs148142 	}
11316349Sqs148142 	if (ring_p->tdc != channel) {
11326349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
11336349Sqs148142 		    "<== hxge_txdma_fixup_channel: channel not matched "
11346349Sqs148142 		    "ring tdc %d passed channel", ring_p->tdc, channel));
11356349Sqs148142 		return;
11366349Sqs148142 	}
11376349Sqs148142 	MUTEX_ENTER(&ring_p->lock);
11386349Sqs148142 	(void) hxge_txdma_reclaim(hxgep, ring_p, 0);
11396349Sqs148142 
11406349Sqs148142 	ring_p->rd_index = 0;
11416349Sqs148142 	ring_p->wr_index = 0;
11426349Sqs148142 	ring_p->ring_head.value = 0;
11436349Sqs148142 	ring_p->ring_kick_tail.value = 0;
11446349Sqs148142 	ring_p->descs_pending = 0;
11456349Sqs148142 	MUTEX_EXIT(&ring_p->lock);
11466349Sqs148142 
11476349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "<== hxge_txdma_fixup_channel"));
11486349Sqs148142 }
11496349Sqs148142 
11506349Sqs148142 /*ARGSUSED*/
11516349Sqs148142 void
hxge_txdma_hw_kick(p_hxge_t hxgep)11526349Sqs148142 hxge_txdma_hw_kick(p_hxge_t hxgep)
11536349Sqs148142 {
11546349Sqs148142 	int		index, ndmas;
11556349Sqs148142 	uint16_t	channel;
11566349Sqs148142 	p_tx_rings_t	tx_rings;
11576349Sqs148142 
11586349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "==> hxge_txdma_hw_kick"));
11596349Sqs148142 
11606349Sqs148142 	tx_rings = hxgep->tx_rings;
11616349Sqs148142 	if (tx_rings == NULL) {
11626349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
11636349Sqs148142 		    "<== hxge_txdma_hw_kick: NULL ring pointer"));
11646349Sqs148142 		return;
11656349Sqs148142 	}
11666349Sqs148142 	ndmas = tx_rings->ndmas;
11676349Sqs148142 	if (!ndmas) {
11686349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
11696349Sqs148142 		    "<== hxge_txdma_hw_kick: no channel allocated"));
11706349Sqs148142 		return;
11716349Sqs148142 	}
11726349Sqs148142 	if (tx_rings->rings == NULL) {
11736349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
11746349Sqs148142 		    "<== hxge_txdma_hw_kick: NULL rings pointer"));
11756349Sqs148142 		return;
11766349Sqs148142 	}
11776349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL, "==> hxge_txdma_hw_kick: "
11786349Sqs148142 	    "tx_rings $%p tx_desc_rings $%p ndmas %d",
11796349Sqs148142 	    tx_rings, tx_rings->rings, ndmas));
11806349Sqs148142 
11816349Sqs148142 	for (index = 0; index < ndmas; index++) {
11826349Sqs148142 		channel = tx_rings->rings[index]->tdc;
11836349Sqs148142 		HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
11846349Sqs148142 		    "==> hxge_txdma_hw_kick: channel %d", channel));
11856349Sqs148142 		hxge_txdma_hw_kick_channel(hxgep, tx_rings->rings[index],
11866349Sqs148142 		    channel);
11876349Sqs148142 	}
11886349Sqs148142 
11896349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "<== hxge_txdma_hw_kick"));
11906349Sqs148142 }
11916349Sqs148142 
11926349Sqs148142 /*ARGSUSED*/
11936349Sqs148142 void
hxge_txdma_kick_channel(p_hxge_t hxgep,uint16_t channel)11946349Sqs148142 hxge_txdma_kick_channel(p_hxge_t hxgep, uint16_t channel)
11956349Sqs148142 {
11966349Sqs148142 	p_tx_ring_t ring_p;
11976349Sqs148142 
11986349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "==> hxge_txdma_kick_channel"));
11996349Sqs148142 
12006349Sqs148142 	ring_p = hxge_txdma_get_ring(hxgep, channel);
12016349Sqs148142 	if (ring_p == NULL) {
12026349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL, " hxge_txdma_kick_channel"));
12036349Sqs148142 		return;
12046349Sqs148142 	}
12056349Sqs148142 
12066349Sqs148142 	if (ring_p->tdc != channel) {
12076349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
12086349Sqs148142 		    "<== hxge_txdma_kick_channel: channel not matched "
12096349Sqs148142 		    "ring tdc %d passed channel", ring_p->tdc, channel));
12106349Sqs148142 		return;
12116349Sqs148142 	}
12126349Sqs148142 
12136349Sqs148142 	hxge_txdma_hw_kick_channel(hxgep, ring_p, channel);
12146349Sqs148142 
12156349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "<== hxge_txdma_kick_channel"));
12166349Sqs148142 }
12176349Sqs148142 
12186349Sqs148142 /*ARGSUSED*/
12196349Sqs148142 void
hxge_txdma_hw_kick_channel(p_hxge_t hxgep,p_tx_ring_t ring_p,uint16_t channel)12206349Sqs148142 hxge_txdma_hw_kick_channel(p_hxge_t hxgep, p_tx_ring_t ring_p, uint16_t channel)
12216349Sqs148142 {
12226349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "==> hxge_txdma_hw_kick_channel"));
12236349Sqs148142 
12246349Sqs148142 	if (ring_p == NULL) {
12256349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
12266349Sqs148142 		    "<== hxge_txdma_hw_kick_channel: NULL ring pointer"));
12276349Sqs148142 		return;
12286349Sqs148142 	}
12296349Sqs148142 
12306349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "<== hxge_txdma_hw_kick_channel"));
12316349Sqs148142 }
12326349Sqs148142 
12336349Sqs148142 /*ARGSUSED*/
12346349Sqs148142 void
hxge_check_tx_hang(p_hxge_t hxgep)12356349Sqs148142 hxge_check_tx_hang(p_hxge_t hxgep)
12366349Sqs148142 {
12376349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "==> hxge_check_tx_hang"));
12386349Sqs148142 
12396349Sqs148142 	/*
12406349Sqs148142 	 * Needs inputs from hardware for regs: head index had not moved since
12416349Sqs148142 	 * last timeout. packets not transmitted or stuffed registers.
12426349Sqs148142 	 */
12436349Sqs148142 	if (hxge_txdma_hung(hxgep)) {
12446349Sqs148142 		hxge_fixup_hung_txdma_rings(hxgep);
12456349Sqs148142 	}
12466349Sqs148142 
12476349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "<== hxge_check_tx_hang"));
12486349Sqs148142 }
12496349Sqs148142 
12506349Sqs148142 int
hxge_txdma_hung(p_hxge_t hxgep)12516349Sqs148142 hxge_txdma_hung(p_hxge_t hxgep)
12526349Sqs148142 {
12536349Sqs148142 	int		index, ndmas;
12546349Sqs148142 	uint16_t	channel;
12556349Sqs148142 	p_tx_rings_t	tx_rings;
12566349Sqs148142 	p_tx_ring_t	tx_ring_p;
12576349Sqs148142 
12586349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "==> hxge_txdma_hung"));
12596349Sqs148142 
12606349Sqs148142 	tx_rings = hxgep->tx_rings;
12616349Sqs148142 	if (tx_rings == NULL) {
12626349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
12636349Sqs148142 		    "<== hxge_txdma_hung: NULL ring pointer"));
12646349Sqs148142 		return (B_FALSE);
12656349Sqs148142 	}
12666349Sqs148142 
12676349Sqs148142 	ndmas = tx_rings->ndmas;
12686349Sqs148142 	if (!ndmas) {
12696349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
12706349Sqs148142 		    "<== hxge_txdma_hung: no channel allocated"));
12716349Sqs148142 		return (B_FALSE);
12726349Sqs148142 	}
12736349Sqs148142 
12746349Sqs148142 	if (tx_rings->rings == NULL) {
12756349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
12766349Sqs148142 		    "<== hxge_txdma_hung: NULL rings pointer"));
12776349Sqs148142 		return (B_FALSE);
12786349Sqs148142 	}
12796349Sqs148142 
12806349Sqs148142 	for (index = 0; index < ndmas; index++) {
12816349Sqs148142 		channel = tx_rings->rings[index]->tdc;
12826349Sqs148142 		tx_ring_p = tx_rings->rings[index];
12836349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
12846349Sqs148142 		    "==> hxge_txdma_hung: channel %d", channel));
12856349Sqs148142 		if (hxge_txdma_channel_hung(hxgep, tx_ring_p, channel)) {
12866349Sqs148142 			return (B_TRUE);
12876349Sqs148142 		}
12886349Sqs148142 	}
12896349Sqs148142 
12906349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "<== hxge_txdma_hung"));
12916349Sqs148142 
12926349Sqs148142 	return (B_FALSE);
12936349Sqs148142 }
12946349Sqs148142 
12956349Sqs148142 int
hxge_txdma_channel_hung(p_hxge_t hxgep,p_tx_ring_t tx_ring_p,uint16_t channel)12966349Sqs148142 hxge_txdma_channel_hung(p_hxge_t hxgep, p_tx_ring_t tx_ring_p, uint16_t channel)
12976349Sqs148142 {
12986349Sqs148142 	uint16_t	head_index, tail_index;
12996349Sqs148142 	boolean_t	head_wrap, tail_wrap;
13006349Sqs148142 	hpi_handle_t	handle;
13016349Sqs148142 	tdc_tdr_head_t	tx_head;
13026349Sqs148142 	uint_t		tx_rd_index;
13036349Sqs148142 
13046349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "==> hxge_txdma_channel_hung"));
13056349Sqs148142 
13066349Sqs148142 	handle = HXGE_DEV_HPI_HANDLE(hxgep);
13076349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL,
13086349Sqs148142 	    "==> hxge_txdma_channel_hung: channel %d", channel));
13096349Sqs148142 	MUTEX_ENTER(&tx_ring_p->lock);
13106349Sqs148142 	(void) hxge_txdma_reclaim(hxgep, tx_ring_p, 0);
13116349Sqs148142 
13126349Sqs148142 	tail_index = tx_ring_p->wr_index;
13136349Sqs148142 	tail_wrap = tx_ring_p->wr_index_wrap;
13146349Sqs148142 	tx_rd_index = tx_ring_p->rd_index;
13156349Sqs148142 	MUTEX_EXIT(&tx_ring_p->lock);
13166349Sqs148142 
13176349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL,
13186349Sqs148142 	    "==> hxge_txdma_channel_hung: tdc %d tx_rd_index %d "
13196349Sqs148142 	    "tail_index %d tail_wrap %d ",
13206349Sqs148142 	    channel, tx_rd_index, tail_index, tail_wrap));
13216349Sqs148142 	/*
13226349Sqs148142 	 * Read the hardware maintained transmit head and wrap around bit.
13236349Sqs148142 	 */
13246349Sqs148142 	(void) hpi_txdma_ring_head_get(handle, channel, &tx_head);
13256349Sqs148142 	head_index = tx_head.bits.head;
13266349Sqs148142 	head_wrap = tx_head.bits.wrap;
13276349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "==> hxge_txdma_channel_hung: "
13286349Sqs148142 	    "tx_rd_index %d tail %d tail_wrap %d head %d wrap %d",
13296349Sqs148142 	    tx_rd_index, tail_index, tail_wrap, head_index, head_wrap));
13306349Sqs148142 
13316349Sqs148142 	if (TXDMA_RING_EMPTY(head_index, head_wrap, tail_index, tail_wrap) &&
13326349Sqs148142 	    (head_index == tx_rd_index)) {
13336349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
13346349Sqs148142 		    "==> hxge_txdma_channel_hung: EMPTY"));
13356349Sqs148142 		return (B_FALSE);
13366349Sqs148142 	}
13376349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL,
13386349Sqs148142 	    "==> hxge_txdma_channel_hung: Checking if ring full"));
13396349Sqs148142 	if (TXDMA_RING_FULL(head_index, head_wrap, tail_index, tail_wrap)) {
13406349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
13416349Sqs148142 		    "==> hxge_txdma_channel_hung: full"));
13426349Sqs148142 		return (B_TRUE);
13436349Sqs148142 	}
13446349Sqs148142 
13456349Sqs148142 	/* If not full, check with hardware to see if it is hung */
13466349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "<== hxge_txdma_channel_hung"));
13476349Sqs148142 
13486349Sqs148142 	return (B_FALSE);
13496349Sqs148142 }
13506349Sqs148142 
13516349Sqs148142 /*ARGSUSED*/
13526349Sqs148142 void
hxge_fixup_hung_txdma_rings(p_hxge_t hxgep)13536349Sqs148142 hxge_fixup_hung_txdma_rings(p_hxge_t hxgep)
13546349Sqs148142 {
13556349Sqs148142 	int		index, ndmas;
13566349Sqs148142 	uint16_t	channel;
13576349Sqs148142 	p_tx_rings_t	tx_rings;
13586349Sqs148142 
13596349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "==> hxge_fixup_hung_txdma_rings"));
13606349Sqs148142 	tx_rings = hxgep->tx_rings;
13616349Sqs148142 	if (tx_rings == NULL) {
13626349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
13636349Sqs148142 		    "<== hxge_fixup_hung_txdma_rings: NULL ring pointer"));
13646349Sqs148142 		return;
13656349Sqs148142 	}
13666349Sqs148142 	ndmas = tx_rings->ndmas;
13676349Sqs148142 	if (!ndmas) {
13686349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
13696349Sqs148142 		    "<== hxge_fixup_hung_txdma_rings: no channel allocated"));
13706349Sqs148142 		return;
13716349Sqs148142 	}
13726349Sqs148142 	if (tx_rings->rings == NULL) {
13736349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
13746349Sqs148142 		    "<== hxge_fixup_hung_txdma_rings: NULL rings pointer"));
13756349Sqs148142 		return;
13766349Sqs148142 	}
13776349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "==> hxge_fixup_hung_txdma_rings: "
13786349Sqs148142 	    "tx_rings $%p tx_desc_rings $%p ndmas %d",
13796349Sqs148142 	    tx_rings, tx_rings->rings, ndmas));
13806349Sqs148142 
13816349Sqs148142 	for (index = 0; index < ndmas; index++) {
13826349Sqs148142 		channel = tx_rings->rings[index]->tdc;
13836349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
13846349Sqs148142 		    "==> hxge_fixup_hung_txdma_rings: channel %d", channel));
13856349Sqs148142 		hxge_txdma_fixup_hung_channel(hxgep, tx_rings->rings[index],
13866349Sqs148142 		    channel);
13876349Sqs148142 	}
13886349Sqs148142 
13896349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "<== hxge_fixup_hung_txdma_rings"));
13906349Sqs148142 }
13916349Sqs148142 
13926349Sqs148142 /*ARGSUSED*/
13936349Sqs148142 void
hxge_txdma_fix_hung_channel(p_hxge_t hxgep,uint16_t channel)13946349Sqs148142 hxge_txdma_fix_hung_channel(p_hxge_t hxgep, uint16_t channel)
13956349Sqs148142 {
13966349Sqs148142 	p_tx_ring_t ring_p;
13976349Sqs148142 
13986349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "==> hxge_txdma_fix_hung_channel"));
13996349Sqs148142 	ring_p = hxge_txdma_get_ring(hxgep, channel);
14006349Sqs148142 	if (ring_p == NULL) {
14016349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
14026349Sqs148142 		    "<== hxge_txdma_fix_hung_channel"));
14036349Sqs148142 		return;
14046349Sqs148142 	}
14056349Sqs148142 	if (ring_p->tdc != channel) {
14066349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
14076349Sqs148142 		    "<== hxge_txdma_fix_hung_channel: channel not matched "
14086349Sqs148142 		    "ring tdc %d passed channel", ring_p->tdc, channel));
14096349Sqs148142 		return;
14106349Sqs148142 	}
14116349Sqs148142 	hxge_txdma_fixup_channel(hxgep, ring_p, channel);
14126349Sqs148142 
14136349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "<== hxge_txdma_fix_hung_channel"));
14146349Sqs148142 }
14156349Sqs148142 
14166349Sqs148142 /*ARGSUSED*/
14176349Sqs148142 void
hxge_txdma_fixup_hung_channel(p_hxge_t hxgep,p_tx_ring_t ring_p,uint16_t channel)14186349Sqs148142 hxge_txdma_fixup_hung_channel(p_hxge_t hxgep, p_tx_ring_t ring_p,
14196349Sqs148142     uint16_t channel)
14206349Sqs148142 {
14216349Sqs148142 	hpi_handle_t	handle;
14226349Sqs148142 	int		status = HXGE_OK;
14236349Sqs148142 
14246349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "==> hxge_txdma_fixup_hung_channel"));
14256349Sqs148142 
14266349Sqs148142 	if (ring_p == NULL) {
14276349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
14286349Sqs148142 		    "<== hxge_txdma_fixup_hung_channel: NULL ring pointer"));
14296349Sqs148142 		return;
14306349Sqs148142 	}
14316349Sqs148142 	if (ring_p->tdc != channel) {
14326349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
14336349Sqs148142 		    "<== hxge_txdma_fixup_hung_channel: channel "
14346349Sqs148142 		    "not matched ring tdc %d passed channel",
14356349Sqs148142 		    ring_p->tdc, channel));
14366349Sqs148142 		return;
14376349Sqs148142 	}
14386349Sqs148142 	/* Reclaim descriptors */
14396349Sqs148142 	MUTEX_ENTER(&ring_p->lock);
14406349Sqs148142 	(void) hxge_txdma_reclaim(hxgep, ring_p, 0);
14416349Sqs148142 	MUTEX_EXIT(&ring_p->lock);
14426349Sqs148142 
14436349Sqs148142 	handle = HXGE_DEV_HPI_HANDLE(hxgep);
14446349Sqs148142 	/*
14456349Sqs148142 	 * Stop the dma channel waits for the stop done. If the stop done bit
14466349Sqs148142 	 * is not set, then force an error.
14476349Sqs148142 	 */
14486349Sqs148142 	status = hpi_txdma_channel_disable(handle, channel);
14496349Sqs148142 	if (!(status & HPI_TXDMA_STOP_FAILED)) {
14506349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
14516349Sqs148142 		    "<== hxge_txdma_fixup_hung_channel: stopped OK "
14526349Sqs148142 		    "ring tdc %d passed channel %d", ring_p->tdc, channel));
14536349Sqs148142 		return;
14546349Sqs148142 	}
14556349Sqs148142 	/* Stop done bit will be set as a result of error injection */
14566349Sqs148142 	status = hpi_txdma_channel_disable(handle, channel);
14576349Sqs148142 	if (!(status & HPI_TXDMA_STOP_FAILED)) {
14586349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
14596349Sqs148142 		    "<== hxge_txdma_fixup_hung_channel: stopped again"
14606349Sqs148142 		    "ring tdc %d passed channel", ring_p->tdc, channel));
14616349Sqs148142 		return;
14626349Sqs148142 	}
14636349Sqs148142 
14646349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL,
14656349Sqs148142 	    "<== hxge_txdma_fixup_hung_channel: stop done still not set!! "
14666349Sqs148142 	    "ring tdc %d passed channel", ring_p->tdc, channel));
14676349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "<== hxge_txdma_fixup_hung_channel"));
14686349Sqs148142 }
14696349Sqs148142 
14706349Sqs148142 /*ARGSUSED*/
14716349Sqs148142 void
hxge_reclaim_rings(p_hxge_t hxgep)14726349Sqs148142 hxge_reclaim_rings(p_hxge_t hxgep)
14736349Sqs148142 {
14746349Sqs148142 	int		index, ndmas;
14756349Sqs148142 	uint16_t	channel;
14766349Sqs148142 	p_tx_rings_t	tx_rings;
14776349Sqs148142 	p_tx_ring_t	tx_ring_p;
14786349Sqs148142 
14796349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "==> hxge_reclaim_ring"));
14806349Sqs148142 	tx_rings = hxgep->tx_rings;
14816349Sqs148142 	if (tx_rings == NULL) {
14826349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
14836349Sqs148142 		    "<== hxge_reclain_rimgs: NULL ring pointer"));
14846349Sqs148142 		return;
14856349Sqs148142 	}
14866349Sqs148142 	ndmas = tx_rings->ndmas;
14876349Sqs148142 	if (!ndmas) {
14886349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
14896349Sqs148142 		    "<== hxge_reclain_rimgs: no channel allocated"));
14906349Sqs148142 		return;
14916349Sqs148142 	}
14926349Sqs148142 	if (tx_rings->rings == NULL) {
14936349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
14946349Sqs148142 		    "<== hxge_reclain_rimgs: NULL rings pointer"));
14956349Sqs148142 		return;
14966349Sqs148142 	}
14976349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "==> hxge_reclain_rimgs: "
14986349Sqs148142 	    "tx_rings $%p tx_desc_rings $%p ndmas %d",
14996349Sqs148142 	    tx_rings, tx_rings->rings, ndmas));
15006349Sqs148142 
15016349Sqs148142 	for (index = 0; index < ndmas; index++) {
15026349Sqs148142 		channel = tx_rings->rings[index]->tdc;
15036349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL, "==> reclain_rimgs: channel %d",
15046349Sqs148142 		    channel));
15056349Sqs148142 		tx_ring_p = tx_rings->rings[index];
15066349Sqs148142 		MUTEX_ENTER(&tx_ring_p->lock);
15076349Sqs148142 		(void) hxge_txdma_reclaim(hxgep, tx_ring_p, channel);
15086349Sqs148142 		MUTEX_EXIT(&tx_ring_p->lock);
15096349Sqs148142 	}
15106349Sqs148142 
15116349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "<== hxge_reclaim_rings"));
15126349Sqs148142 }
15136349Sqs148142 
15146349Sqs148142 /*
15156349Sqs148142  * Static functions start here.
15166349Sqs148142  */
15176349Sqs148142 static hxge_status_t
hxge_map_txdma(p_hxge_t hxgep)15186349Sqs148142 hxge_map_txdma(p_hxge_t hxgep)
15196349Sqs148142 {
15206349Sqs148142 	int			i, ndmas;
15216349Sqs148142 	uint16_t		channel;
15226349Sqs148142 	p_tx_rings_t		tx_rings;
15236349Sqs148142 	p_tx_ring_t		*tx_desc_rings;
15246349Sqs148142 	p_tx_mbox_areas_t	tx_mbox_areas_p;
15256349Sqs148142 	p_tx_mbox_t		*tx_mbox_p;
15266349Sqs148142 	p_hxge_dma_pool_t	dma_buf_poolp;
15276349Sqs148142 	p_hxge_dma_pool_t	dma_cntl_poolp;
15286349Sqs148142 	p_hxge_dma_common_t	*dma_buf_p;
15296349Sqs148142 	p_hxge_dma_common_t	*dma_cntl_p;
15306349Sqs148142 	hxge_status_t		status = HXGE_OK;
15316349Sqs148142 
15326349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL, "==> hxge_map_txdma"));
15336349Sqs148142 
15346349Sqs148142 	dma_buf_poolp = hxgep->tx_buf_pool_p;
15356349Sqs148142 	dma_cntl_poolp = hxgep->tx_cntl_pool_p;
15366349Sqs148142 
15376349Sqs148142 	if (!dma_buf_poolp->buf_allocated || !dma_cntl_poolp->buf_allocated) {
15386349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
15396349Sqs148142 		    "==> hxge_map_txdma: buf not allocated"));
15406349Sqs148142 		return (HXGE_ERROR);
15416349Sqs148142 	}
15426349Sqs148142 	ndmas = dma_buf_poolp->ndmas;
15436349Sqs148142 	if (!ndmas) {
15446349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
15456349Sqs148142 		    "<== hxge_map_txdma: no dma allocated"));
15466349Sqs148142 		return (HXGE_ERROR);
15476349Sqs148142 	}
15486349Sqs148142 	dma_buf_p = dma_buf_poolp->dma_buf_pool_p;
15496349Sqs148142 	dma_cntl_p = dma_cntl_poolp->dma_buf_pool_p;
15506349Sqs148142 
15516349Sqs148142 	tx_rings = (p_tx_rings_t)KMEM_ZALLOC(sizeof (tx_rings_t), KM_SLEEP);
15526349Sqs148142 	tx_desc_rings = (p_tx_ring_t *)KMEM_ZALLOC(
15536349Sqs148142 	    sizeof (p_tx_ring_t) * ndmas, KM_SLEEP);
15546349Sqs148142 
15556349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL, "==> hxge_map_txdma: "
15566349Sqs148142 	    "tx_rings $%p tx_desc_rings $%p", tx_rings, tx_desc_rings));
15576349Sqs148142 
15586349Sqs148142 	tx_mbox_areas_p = (p_tx_mbox_areas_t)
15596349Sqs148142 	    KMEM_ZALLOC(sizeof (tx_mbox_areas_t), KM_SLEEP);
15606349Sqs148142 	tx_mbox_p = (p_tx_mbox_t *)KMEM_ZALLOC(
15616349Sqs148142 	    sizeof (p_tx_mbox_t) * ndmas, KM_SLEEP);
15626349Sqs148142 
15636349Sqs148142 	/*
15646349Sqs148142 	 * Map descriptors from the buffer pools for each dma channel.
15656349Sqs148142 	 */
15666349Sqs148142 	for (i = 0; i < ndmas; i++) {
15676349Sqs148142 		/*
15686349Sqs148142 		 * Set up and prepare buffer blocks, descriptors and mailbox.
15696349Sqs148142 		 */
15706349Sqs148142 		channel = ((p_hxge_dma_common_t)dma_buf_p[i])->dma_channel;
15716349Sqs148142 		status = hxge_map_txdma_channel(hxgep, channel,
15726349Sqs148142 		    (p_hxge_dma_common_t *)&dma_buf_p[i],
15736349Sqs148142 		    (p_tx_ring_t *)&tx_desc_rings[i],
15746349Sqs148142 		    dma_buf_poolp->num_chunks[i],
15756349Sqs148142 		    (p_hxge_dma_common_t *)&dma_cntl_p[i],
15766349Sqs148142 		    (p_tx_mbox_t *)&tx_mbox_p[i]);
15776349Sqs148142 		if (status != HXGE_OK) {
15786349Sqs148142 			goto hxge_map_txdma_fail1;
15796349Sqs148142 		}
15806349Sqs148142 		tx_desc_rings[i]->index = (uint16_t)i;
15816349Sqs148142 		tx_desc_rings[i]->tdc_stats = &hxgep->statsp->tdc_stats[i];
15826349Sqs148142 	}
15836349Sqs148142 
15846349Sqs148142 	tx_rings->ndmas = ndmas;
15856349Sqs148142 	tx_rings->rings = tx_desc_rings;
15866349Sqs148142 	hxgep->tx_rings = tx_rings;
15876349Sqs148142 	tx_mbox_areas_p->txmbox_areas_p = tx_mbox_p;
15886349Sqs148142 	hxgep->tx_mbox_areas_p = tx_mbox_areas_p;
15896349Sqs148142 
15906349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL, "==> hxge_map_txdma: "
15916349Sqs148142 	    "tx_rings $%p rings $%p", hxgep->tx_rings, hxgep->tx_rings->rings));
15926349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL, "==> hxge_map_txdma: "
15936349Sqs148142 	    "tx_rings $%p tx_desc_rings $%p",
15946349Sqs148142 	    hxgep->tx_rings, tx_desc_rings));
15956349Sqs148142 
15966349Sqs148142 	goto hxge_map_txdma_exit;
15976349Sqs148142 
15986349Sqs148142 hxge_map_txdma_fail1:
15996349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
16006349Sqs148142 	    "==> hxge_map_txdma: uninit tx desc "
16016349Sqs148142 	    "(status 0x%x channel %d i %d)", hxgep, status, channel, i));
16026349Sqs148142 	i--;
16036349Sqs148142 	for (; i >= 0; i--) {
16046349Sqs148142 		channel = ((p_hxge_dma_common_t)dma_buf_p[i])->dma_channel;
16056349Sqs148142 		hxge_unmap_txdma_channel(hxgep, channel, tx_desc_rings[i],
16066349Sqs148142 		    tx_mbox_p[i]);
16076349Sqs148142 	}
16086349Sqs148142 
16096349Sqs148142 	KMEM_FREE(tx_desc_rings, sizeof (p_tx_ring_t) * ndmas);
16106349Sqs148142 	KMEM_FREE(tx_rings, sizeof (tx_rings_t));
16116349Sqs148142 	KMEM_FREE(tx_mbox_p, sizeof (p_tx_mbox_t) * ndmas);
16126349Sqs148142 	KMEM_FREE(tx_mbox_areas_p, sizeof (tx_mbox_areas_t));
16136349Sqs148142 
16146349Sqs148142 hxge_map_txdma_exit:
16156349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
16166349Sqs148142 	    "==> hxge_map_txdma: (status 0x%x channel %d)", status, channel));
16176349Sqs148142 
16186349Sqs148142 	return (status);
16196349Sqs148142 }
16206349Sqs148142 
16216349Sqs148142 static void
hxge_unmap_txdma(p_hxge_t hxgep)16226349Sqs148142 hxge_unmap_txdma(p_hxge_t hxgep)
16236349Sqs148142 {
16246349Sqs148142 	int			i, ndmas;
16256349Sqs148142 	uint8_t			channel;
16266349Sqs148142 	p_tx_rings_t		tx_rings;
16276349Sqs148142 	p_tx_ring_t		*tx_desc_rings;
16286349Sqs148142 	p_tx_mbox_areas_t	tx_mbox_areas_p;
16296349Sqs148142 	p_tx_mbox_t		*tx_mbox_p;
16306349Sqs148142 	p_hxge_dma_pool_t	dma_buf_poolp;
16316349Sqs148142 
16326349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL, "==> hxge_unmap_txdma"));
16336349Sqs148142 
16346349Sqs148142 	dma_buf_poolp = hxgep->tx_buf_pool_p;
16356349Sqs148142 	if (!dma_buf_poolp->buf_allocated) {
16366349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
16376349Sqs148142 		    "==> hxge_unmap_txdma: buf not allocated"));
16386349Sqs148142 		return;
16396349Sqs148142 	}
16406349Sqs148142 	ndmas = dma_buf_poolp->ndmas;
16416349Sqs148142 	if (!ndmas) {
16426349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
16436349Sqs148142 		    "<== hxge_unmap_txdma: no dma allocated"));
16446349Sqs148142 		return;
16456349Sqs148142 	}
16466349Sqs148142 	tx_rings = hxgep->tx_rings;
16476349Sqs148142 	tx_desc_rings = tx_rings->rings;
16486349Sqs148142 	if (tx_rings == NULL) {
16496349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
16506349Sqs148142 		    "<== hxge_unmap_txdma: NULL ring pointer"));
16516349Sqs148142 		return;
16526349Sqs148142 	}
16536349Sqs148142 	tx_desc_rings = tx_rings->rings;
16546349Sqs148142 	if (tx_desc_rings == NULL) {
16556349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
16566349Sqs148142 		    "<== hxge_unmap_txdma: NULL ring pointers"));
16576349Sqs148142 		return;
16586349Sqs148142 	}
16596349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL, "==> hxge_unmap_txdma: "
16606349Sqs148142 	    "tx_rings $%p tx_desc_rings $%p ndmas %d",
16616349Sqs148142 	    tx_rings, tx_desc_rings, ndmas));
16626349Sqs148142 
16636349Sqs148142 	tx_mbox_areas_p = hxgep->tx_mbox_areas_p;
16646349Sqs148142 	tx_mbox_p = tx_mbox_areas_p->txmbox_areas_p;
16656349Sqs148142 
16666349Sqs148142 	for (i = 0; i < ndmas; i++) {
16676349Sqs148142 		channel = tx_desc_rings[i]->tdc;
16686349Sqs148142 		(void) hxge_unmap_txdma_channel(hxgep, channel,
16696349Sqs148142 		    (p_tx_ring_t)tx_desc_rings[i],
16706349Sqs148142 		    (p_tx_mbox_t)tx_mbox_p[i]);
16716349Sqs148142 	}
16726349Sqs148142 
16736349Sqs148142 	KMEM_FREE(tx_desc_rings, sizeof (p_tx_ring_t) * ndmas);
16746349Sqs148142 	KMEM_FREE(tx_rings, sizeof (tx_rings_t));
16756349Sqs148142 	KMEM_FREE(tx_mbox_p, sizeof (p_tx_mbox_t) * ndmas);
16766349Sqs148142 	KMEM_FREE(tx_mbox_areas_p, sizeof (tx_mbox_areas_t));
16776349Sqs148142 
16786349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL, "<== hxge_unmap_txdma"));
16796349Sqs148142 }
16806349Sqs148142 
16816349Sqs148142 static hxge_status_t
hxge_map_txdma_channel(p_hxge_t hxgep,uint16_t channel,p_hxge_dma_common_t * dma_buf_p,p_tx_ring_t * tx_desc_p,uint32_t num_chunks,p_hxge_dma_common_t * dma_cntl_p,p_tx_mbox_t * tx_mbox_p)16826349Sqs148142 hxge_map_txdma_channel(p_hxge_t hxgep, uint16_t channel,
16836349Sqs148142     p_hxge_dma_common_t *dma_buf_p, p_tx_ring_t *tx_desc_p,
16846349Sqs148142     uint32_t num_chunks, p_hxge_dma_common_t *dma_cntl_p,
16856349Sqs148142     p_tx_mbox_t *tx_mbox_p)
16866349Sqs148142 {
16876349Sqs148142 	int status = HXGE_OK;
16886349Sqs148142 
16896349Sqs148142 	/*
16906349Sqs148142 	 * Set up and prepare buffer blocks, descriptors and mailbox.
16916349Sqs148142 	 */
16926349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
16936349Sqs148142 	    "==> hxge_map_txdma_channel (channel %d)", channel));
16946349Sqs148142 
16956349Sqs148142 	/*
16966349Sqs148142 	 * Transmit buffer blocks
16976349Sqs148142 	 */
16986349Sqs148142 	status = hxge_map_txdma_channel_buf_ring(hxgep, channel,
16996349Sqs148142 	    dma_buf_p, tx_desc_p, num_chunks);
17006349Sqs148142 	if (status != HXGE_OK) {
17016349Sqs148142 		HXGE_ERROR_MSG((hxgep, HXGE_ERR_CTL,
17026349Sqs148142 		    "==> hxge_map_txdma_channel (channel %d): "
17036349Sqs148142 		    "map buffer failed 0x%x", channel, status));
17046349Sqs148142 		goto hxge_map_txdma_channel_exit;
17056349Sqs148142 	}
17066349Sqs148142 	/*
17076349Sqs148142 	 * Transmit block ring, and mailbox.
17086349Sqs148142 	 */
17096349Sqs148142 	hxge_map_txdma_channel_cfg_ring(hxgep, channel, dma_cntl_p, *tx_desc_p,
17106349Sqs148142 	    tx_mbox_p);
17116349Sqs148142 
17126349Sqs148142 	goto hxge_map_txdma_channel_exit;
17136349Sqs148142 
17146349Sqs148142 hxge_map_txdma_channel_fail1:
17156349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
17166349Sqs148142 	    "==> hxge_map_txdma_channel: unmap buf"
17176349Sqs148142 	    "(status 0x%x channel %d)", status, channel));
17186349Sqs148142 	hxge_unmap_txdma_channel_buf_ring(hxgep, *tx_desc_p);
17196349Sqs148142 
17206349Sqs148142 hxge_map_txdma_channel_exit:
17216349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
17226349Sqs148142 	    "<== hxge_map_txdma_channel: (status 0x%x channel %d)",
17236349Sqs148142 	    status, channel));
17246349Sqs148142 
17256349Sqs148142 	return (status);
17266349Sqs148142 }
17276349Sqs148142 
17286349Sqs148142 /*ARGSUSED*/
17296349Sqs148142 static void
hxge_unmap_txdma_channel(p_hxge_t hxgep,uint16_t channel,p_tx_ring_t tx_ring_p,p_tx_mbox_t tx_mbox_p)17306349Sqs148142 hxge_unmap_txdma_channel(p_hxge_t hxgep, uint16_t channel,
17316349Sqs148142     p_tx_ring_t tx_ring_p, p_tx_mbox_t tx_mbox_p)
17326349Sqs148142 {
17336349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
17346349Sqs148142 	    "==> hxge_unmap_txdma_channel (channel %d)", channel));
17356349Sqs148142 
17366349Sqs148142 	/* unmap tx block ring, and mailbox.  */
17376349Sqs148142 	(void) hxge_unmap_txdma_channel_cfg_ring(hxgep, tx_ring_p, tx_mbox_p);
17386349Sqs148142 
17396349Sqs148142 	/* unmap buffer blocks */
17406349Sqs148142 	(void) hxge_unmap_txdma_channel_buf_ring(hxgep, tx_ring_p);
17416349Sqs148142 
17426349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL, "<== hxge_unmap_txdma_channel"));
17436349Sqs148142 }
17446349Sqs148142 
17456349Sqs148142 /*ARGSUSED*/
17466349Sqs148142 static void
hxge_map_txdma_channel_cfg_ring(p_hxge_t hxgep,uint16_t dma_channel,p_hxge_dma_common_t * dma_cntl_p,p_tx_ring_t tx_ring_p,p_tx_mbox_t * tx_mbox_p)17476349Sqs148142 hxge_map_txdma_channel_cfg_ring(p_hxge_t hxgep, uint16_t dma_channel,
17486349Sqs148142     p_hxge_dma_common_t *dma_cntl_p, p_tx_ring_t tx_ring_p,
17496349Sqs148142     p_tx_mbox_t *tx_mbox_p)
17506349Sqs148142 {
17516349Sqs148142 	p_tx_mbox_t		mboxp;
17526349Sqs148142 	p_hxge_dma_common_t	cntl_dmap;
17536349Sqs148142 	p_hxge_dma_common_t	dmap;
17546349Sqs148142 	tdc_tdr_cfg_t		*tx_ring_cfig_p;
17556349Sqs148142 	tdc_tdr_kick_t		*tx_ring_kick_p;
17566349Sqs148142 	tdc_tdr_cfg_t		*tx_cs_p;
17576349Sqs148142 	tdc_int_mask_t		*tx_evmask_p;
17586349Sqs148142 	tdc_mbh_t		*mboxh_p;
17596349Sqs148142 	tdc_mbl_t		*mboxl_p;
17606349Sqs148142 	uint64_t		tx_desc_len;
17616349Sqs148142 
17626349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
17636349Sqs148142 	    "==> hxge_map_txdma_channel_cfg_ring"));
17646349Sqs148142 
17656349Sqs148142 	cntl_dmap = *dma_cntl_p;
17666349Sqs148142 
17676349Sqs148142 	dmap = (p_hxge_dma_common_t)&tx_ring_p->tdc_desc;
17686349Sqs148142 	hxge_setup_dma_common(dmap, cntl_dmap, tx_ring_p->tx_ring_size,
17696349Sqs148142 	    sizeof (tx_desc_t));
17706349Sqs148142 
17716349Sqs148142 	/*
17726349Sqs148142 	 * Zero out transmit ring descriptors.
17736349Sqs148142 	 */
17746349Sqs148142 	bzero((caddr_t)dmap->kaddrp, dmap->alength);
17756349Sqs148142 	tx_ring_cfig_p = &(tx_ring_p->tx_ring_cfig);
17766349Sqs148142 	tx_ring_kick_p = &(tx_ring_p->tx_ring_kick);
17776349Sqs148142 	tx_cs_p = &(tx_ring_p->tx_cs);
17786349Sqs148142 	tx_evmask_p = &(tx_ring_p->tx_evmask);
17796349Sqs148142 	tx_ring_cfig_p->value = 0;
17806349Sqs148142 	tx_ring_kick_p->value = 0;
17816349Sqs148142 	tx_cs_p->value = 0;
17826349Sqs148142 	tx_evmask_p->value = 0;
17836349Sqs148142 
17846349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
17856349Sqs148142 	    "==> hxge_map_txdma_channel_cfg_ring: channel %d des $%p",
17866349Sqs148142 	    dma_channel, dmap->dma_cookie.dmac_laddress));
17876349Sqs148142 
17886349Sqs148142 	tx_ring_cfig_p->value = 0;
17896349Sqs148142 
17906349Sqs148142 	/* Hydra len is 11 bits and the lower 5 bits are 0s */
17916349Sqs148142 	tx_desc_len = (uint64_t)(tx_ring_p->tx_ring_size >> 5);
17926349Sqs148142 	tx_ring_cfig_p->value =
17936349Sqs148142 	    (dmap->dma_cookie.dmac_laddress & TDC_TDR_CFG_ADDR_MASK) |
17946349Sqs148142 	    (tx_desc_len << TDC_TDR_CFG_LEN_SHIFT);
17956349Sqs148142 
17966349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
17976349Sqs148142 	    "==> hxge_map_txdma_channel_cfg_ring: channel %d cfg 0x%llx",
17986349Sqs148142 	    dma_channel, tx_ring_cfig_p->value));
17996349Sqs148142 
18006349Sqs148142 	tx_cs_p->bits.reset = 1;
18016349Sqs148142 
18026349Sqs148142 	/* Map in mailbox */
18036349Sqs148142 	mboxp = (p_tx_mbox_t)KMEM_ZALLOC(sizeof (tx_mbox_t), KM_SLEEP);
18046349Sqs148142 	dmap = (p_hxge_dma_common_t)&mboxp->tx_mbox;
18056349Sqs148142 	hxge_setup_dma_common(dmap, cntl_dmap, 1, sizeof (txdma_mailbox_t));
18066349Sqs148142 	mboxh_p = (tdc_mbh_t *)&tx_ring_p->tx_mbox_mbh;
18076349Sqs148142 	mboxl_p = (tdc_mbl_t *)&tx_ring_p->tx_mbox_mbl;
18086349Sqs148142 	mboxh_p->value = mboxl_p->value = 0;
18096349Sqs148142 
18106349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
18116349Sqs148142 	    "==> hxge_map_txdma_channel_cfg_ring: mbox 0x%lx",
18126349Sqs148142 	    dmap->dma_cookie.dmac_laddress));
18136349Sqs148142 
18146349Sqs148142 	mboxh_p->bits.mbaddr = ((dmap->dma_cookie.dmac_laddress >>
18156349Sqs148142 	    TDC_MBH_ADDR_SHIFT) & TDC_MBH_MASK);
18166349Sqs148142 	mboxl_p->bits.mbaddr = ((dmap->dma_cookie.dmac_laddress &
18176349Sqs148142 	    TDC_MBL_MASK) >> TDC_MBL_SHIFT);
18186349Sqs148142 
18196349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
18206349Sqs148142 	    "==> hxge_map_txdma_channel_cfg_ring: mbox 0x%lx",
18216349Sqs148142 	    dmap->dma_cookie.dmac_laddress));
18226349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
18236349Sqs148142 	    "==> hxge_map_txdma_channel_cfg_ring: hmbox $%p mbox $%p",
18246349Sqs148142 	    mboxh_p->bits.mbaddr, mboxl_p->bits.mbaddr));
18256349Sqs148142 
18266349Sqs148142 	/*
18276349Sqs148142 	 * Set page valid and no mask
18286349Sqs148142 	 */
18296349Sqs148142 	tx_ring_p->page_hdl.value = 0;
18306349Sqs148142 
18316349Sqs148142 	*tx_mbox_p = mboxp;
18326349Sqs148142 
18336349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
18346349Sqs148142 	    "<== hxge_map_txdma_channel_cfg_ring"));
18356349Sqs148142 }
18366349Sqs148142 
18376349Sqs148142 /*ARGSUSED*/
18386349Sqs148142 static void
hxge_unmap_txdma_channel_cfg_ring(p_hxge_t hxgep,p_tx_ring_t tx_ring_p,p_tx_mbox_t tx_mbox_p)18396349Sqs148142 hxge_unmap_txdma_channel_cfg_ring(p_hxge_t hxgep,
18406349Sqs148142     p_tx_ring_t tx_ring_p, p_tx_mbox_t tx_mbox_p)
18416349Sqs148142 {
18426349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
18436349Sqs148142 	    "==> hxge_unmap_txdma_channel_cfg_ring: channel %d",
18446349Sqs148142 	    tx_ring_p->tdc));
18456349Sqs148142 
18466349Sqs148142 	KMEM_FREE(tx_mbox_p, sizeof (tx_mbox_t));
18476349Sqs148142 
18486349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
18496349Sqs148142 	    "<== hxge_unmap_txdma_channel_cfg_ring"));
18506349Sqs148142 }
18516349Sqs148142 
18526349Sqs148142 static hxge_status_t
hxge_map_txdma_channel_buf_ring(p_hxge_t hxgep,uint16_t channel,p_hxge_dma_common_t * dma_buf_p,p_tx_ring_t * tx_desc_p,uint32_t num_chunks)18536349Sqs148142 hxge_map_txdma_channel_buf_ring(p_hxge_t hxgep, uint16_t channel,
18546349Sqs148142     p_hxge_dma_common_t *dma_buf_p,
18556349Sqs148142     p_tx_ring_t *tx_desc_p, uint32_t num_chunks)
18566349Sqs148142 {
18576349Sqs148142 	p_hxge_dma_common_t	dma_bufp, tmp_bufp;
18586349Sqs148142 	p_hxge_dma_common_t	dmap;
18596349Sqs148142 	hxge_os_dma_handle_t	tx_buf_dma_handle;
18606349Sqs148142 	p_tx_ring_t		tx_ring_p;
18616349Sqs148142 	p_tx_msg_t		tx_msg_ring;
18626349Sqs148142 	hxge_status_t		status = HXGE_OK;
18636349Sqs148142 	int			ddi_status = DDI_SUCCESS;
18646349Sqs148142 	int			i, j, index;
18656349Sqs148142 	uint32_t		size, bsize;
18666349Sqs148142 	uint32_t		nblocks, nmsgs;
18678718SMichael.Speer@Sun.COM 	char			qname[TASKQ_NAMELEN];
18686349Sqs148142 
18696349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
18706349Sqs148142 	    "==> hxge_map_txdma_channel_buf_ring"));
18716349Sqs148142 
18726349Sqs148142 	dma_bufp = tmp_bufp = *dma_buf_p;
18736349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
18746349Sqs148142 	    " hxge_map_txdma_channel_buf_ring: channel %d to map %d "
18756349Sqs148142 	    "chunks bufp $%p", channel, num_chunks, dma_bufp));
18766349Sqs148142 
18776349Sqs148142 	nmsgs = 0;
18786349Sqs148142 	for (i = 0; i < num_chunks; i++, tmp_bufp++) {
18796349Sqs148142 		nmsgs += tmp_bufp->nblocks;
18806349Sqs148142 		HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
18816349Sqs148142 		    "==> hxge_map_txdma_channel_buf_ring: channel %d "
18826349Sqs148142 		    "bufp $%p nblocks %d nmsgs %d",
18836349Sqs148142 		    channel, tmp_bufp, tmp_bufp->nblocks, nmsgs));
18846349Sqs148142 	}
18856349Sqs148142 	if (!nmsgs) {
18866349Sqs148142 		HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
18876349Sqs148142 		    "<== hxge_map_txdma_channel_buf_ring: channel %d "
18886349Sqs148142 		    "no msg blocks", channel));
18896349Sqs148142 		status = HXGE_ERROR;
18906349Sqs148142 
18916349Sqs148142 		goto hxge_map_txdma_channel_buf_ring_exit;
18926349Sqs148142 	}
18938718SMichael.Speer@Sun.COM 
18946349Sqs148142 	tx_ring_p = (p_tx_ring_t)KMEM_ZALLOC(sizeof (tx_ring_t), KM_SLEEP);
18958718SMichael.Speer@Sun.COM 	tx_ring_p->hxgep = hxgep;
18968718SMichael.Speer@Sun.COM 	(void) snprintf(qname, TASKQ_NAMELEN, "hxge_%d_%d",
18978718SMichael.Speer@Sun.COM 	    hxgep->instance, channel);
18988718SMichael.Speer@Sun.COM 	tx_ring_p->taskq = ddi_taskq_create(hxgep->dip, qname, 1,
18998718SMichael.Speer@Sun.COM 	    TASKQ_DEFAULTPRI, 0);
19008718SMichael.Speer@Sun.COM 	if (tx_ring_p->taskq == NULL) {
19018718SMichael.Speer@Sun.COM 		goto hxge_map_txdma_channel_buf_ring_fail1;
19028718SMichael.Speer@Sun.COM 	}
19038718SMichael.Speer@Sun.COM 
19046349Sqs148142 	MUTEX_INIT(&tx_ring_p->lock, NULL, MUTEX_DRIVER,
19056349Sqs148142 	    (void *) hxgep->interrupt_cookie);
19066349Sqs148142 	/*
19076349Sqs148142 	 * Allocate transmit message rings and handles for packets not to be
19086349Sqs148142 	 * copied to premapped buffers.
19096349Sqs148142 	 */
19106349Sqs148142 	size = nmsgs * sizeof (tx_msg_t);
19116349Sqs148142 	tx_msg_ring = KMEM_ZALLOC(size, KM_SLEEP);
19126349Sqs148142 	for (i = 0; i < nmsgs; i++) {
19136349Sqs148142 		ddi_status = ddi_dma_alloc_handle(hxgep->dip, &hxge_tx_dma_attr,
19146349Sqs148142 		    DDI_DMA_DONTWAIT, 0, &tx_msg_ring[i].dma_handle);
19156349Sqs148142 		if (ddi_status != DDI_SUCCESS) {
19166349Sqs148142 			status |= HXGE_DDI_FAILED;
19176349Sqs148142 			break;
19186349Sqs148142 		}
19196349Sqs148142 	}
19206349Sqs148142 
19216349Sqs148142 	if (i < nmsgs) {
19226349Sqs148142 		HXGE_DEBUG_MSG((hxgep, HXGE_ERR_CTL,
19236349Sqs148142 		    "Allocate handles failed."));
19246349Sqs148142 
19256349Sqs148142 		goto hxge_map_txdma_channel_buf_ring_fail1;
19266349Sqs148142 	}
19276349Sqs148142 	tx_ring_p->tdc = channel;
19286349Sqs148142 	tx_ring_p->tx_msg_ring = tx_msg_ring;
19296349Sqs148142 	tx_ring_p->tx_ring_size = nmsgs;
19306349Sqs148142 	tx_ring_p->num_chunks = num_chunks;
19316349Sqs148142 	if (!hxge_tx_intr_thres) {
19326349Sqs148142 		hxge_tx_intr_thres = tx_ring_p->tx_ring_size / 4;
19336349Sqs148142 	}
19346349Sqs148142 	tx_ring_p->tx_wrap_mask = tx_ring_p->tx_ring_size - 1;
19356349Sqs148142 	tx_ring_p->rd_index = 0;
19366349Sqs148142 	tx_ring_p->wr_index = 0;
19376349Sqs148142 	tx_ring_p->ring_head.value = 0;
19386349Sqs148142 	tx_ring_p->ring_kick_tail.value = 0;
19396349Sqs148142 	tx_ring_p->descs_pending = 0;
19406349Sqs148142 
19416349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
19426349Sqs148142 	    "==> hxge_map_txdma_channel_buf_ring: channel %d "
19436349Sqs148142 	    "actual tx desc max %d nmsgs %d (config hxge_tx_ring_size %d)",
19446349Sqs148142 	    channel, tx_ring_p->tx_ring_size, nmsgs, hxge_tx_ring_size));
19456349Sqs148142 
19466349Sqs148142 	/*
19476349Sqs148142 	 * Map in buffers from the buffer pool.
19486349Sqs148142 	 */
19496349Sqs148142 	index = 0;
19506349Sqs148142 	bsize = dma_bufp->block_size;
19516349Sqs148142 
19526349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL, "==> hxge_map_txdma_channel_buf_ring: "
19536349Sqs148142 	    "dma_bufp $%p tx_rng_p $%p tx_msg_rng_p $%p bsize %d",
19546349Sqs148142 	    dma_bufp, tx_ring_p, tx_msg_ring, bsize));
19556349Sqs148142 
19566349Sqs148142 	for (i = 0; i < num_chunks; i++, dma_bufp++) {
19576349Sqs148142 		bsize = dma_bufp->block_size;
19586349Sqs148142 		nblocks = dma_bufp->nblocks;
19598476SMichael.Speer@Sun.COM 		tx_buf_dma_handle = dma_bufp->dma_handle;
19606349Sqs148142 		HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
19616349Sqs148142 		    "==> hxge_map_txdma_channel_buf_ring: dma chunk %d "
19626349Sqs148142 		    "size %d dma_bufp $%p",
19636349Sqs148142 		    i, sizeof (hxge_dma_common_t), dma_bufp));
19646349Sqs148142 
19656349Sqs148142 		for (j = 0; j < nblocks; j++) {
19666349Sqs148142 			tx_msg_ring[index].buf_dma_handle = tx_buf_dma_handle;
19678476SMichael.Speer@Sun.COM 			tx_msg_ring[index].offset_index = j;
19686349Sqs148142 			dmap = &tx_msg_ring[index++].buf_dma;
19696349Sqs148142 			HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
19706349Sqs148142 			    "==> hxge_map_txdma_channel_buf_ring: j %d"
19716349Sqs148142 			    "dmap $%p", i, dmap));
19726349Sqs148142 			hxge_setup_dma_common(dmap, dma_bufp, 1, bsize);
19736349Sqs148142 		}
19746349Sqs148142 	}
19756349Sqs148142 
19766349Sqs148142 	if (i < num_chunks) {
19776349Sqs148142 		status = HXGE_ERROR;
19786349Sqs148142 
19796349Sqs148142 		goto hxge_map_txdma_channel_buf_ring_fail1;
19806349Sqs148142 	}
19816349Sqs148142 
19826349Sqs148142 	*tx_desc_p = tx_ring_p;
19836349Sqs148142 
19846349Sqs148142 	goto hxge_map_txdma_channel_buf_ring_exit;
19856349Sqs148142 
19866349Sqs148142 hxge_map_txdma_channel_buf_ring_fail1:
19878718SMichael.Speer@Sun.COM 	if (tx_ring_p->taskq) {
19888718SMichael.Speer@Sun.COM 		ddi_taskq_destroy(tx_ring_p->taskq);
19898718SMichael.Speer@Sun.COM 		tx_ring_p->taskq = NULL;
19908718SMichael.Speer@Sun.COM 	}
19918718SMichael.Speer@Sun.COM 
19926349Sqs148142 	index--;
19936349Sqs148142 	for (; index >= 0; index--) {
19946349Sqs148142 		if (tx_msg_ring[index].dma_handle != NULL) {
19956349Sqs148142 			ddi_dma_free_handle(&tx_msg_ring[index].dma_handle);
19966349Sqs148142 		}
19976349Sqs148142 	}
19986349Sqs148142 	MUTEX_DESTROY(&tx_ring_p->lock);
19996349Sqs148142 	KMEM_FREE(tx_msg_ring, size);
20006349Sqs148142 	KMEM_FREE(tx_ring_p, sizeof (tx_ring_t));
20016349Sqs148142 
20026349Sqs148142 	status = HXGE_ERROR;
20036349Sqs148142 
20046349Sqs148142 hxge_map_txdma_channel_buf_ring_exit:
20056349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
20066349Sqs148142 	    "<== hxge_map_txdma_channel_buf_ring status 0x%x", status));
20076349Sqs148142 
20086349Sqs148142 	return (status);
20096349Sqs148142 }
20106349Sqs148142 
20116349Sqs148142 /*ARGSUSED*/
20126349Sqs148142 static void
hxge_unmap_txdma_channel_buf_ring(p_hxge_t hxgep,p_tx_ring_t tx_ring_p)20136349Sqs148142 hxge_unmap_txdma_channel_buf_ring(p_hxge_t hxgep, p_tx_ring_t tx_ring_p)
20146349Sqs148142 {
20156349Sqs148142 	p_tx_msg_t	tx_msg_ring;
20166349Sqs148142 	p_tx_msg_t	tx_msg_p;
20176349Sqs148142 	int		i;
20186349Sqs148142 
20196349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
20206349Sqs148142 	    "==> hxge_unmap_txdma_channel_buf_ring"));
20216349Sqs148142 	if (tx_ring_p == NULL) {
20226349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
20236349Sqs148142 		    "<== hxge_unmap_txdma_channel_buf_ring: NULL ringp"));
20246349Sqs148142 		return;
20256349Sqs148142 	}
20266349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
20276349Sqs148142 	    "==> hxge_unmap_txdma_channel_buf_ring: channel %d",
20286349Sqs148142 	    tx_ring_p->tdc));
20296349Sqs148142 
20308718SMichael.Speer@Sun.COM 	MUTEX_ENTER(&tx_ring_p->lock);
20316349Sqs148142 	tx_msg_ring = tx_ring_p->tx_msg_ring;
20326349Sqs148142 	for (i = 0; i < tx_ring_p->tx_ring_size; i++) {
20336349Sqs148142 		tx_msg_p = &tx_msg_ring[i];
20346349Sqs148142 		if (tx_msg_p->flags.dma_type == USE_DVMA) {
20356349Sqs148142 			HXGE_DEBUG_MSG((hxgep, MEM3_CTL, "entry = %d", i));
20366349Sqs148142 			(void) dvma_unload(tx_msg_p->dvma_handle, 0, -1);
20376349Sqs148142 			tx_msg_p->dvma_handle = NULL;
20386349Sqs148142 			if (tx_ring_p->dvma_wr_index ==
20396349Sqs148142 			    tx_ring_p->dvma_wrap_mask) {
20406349Sqs148142 				tx_ring_p->dvma_wr_index = 0;
20416349Sqs148142 			} else {
20426349Sqs148142 				tx_ring_p->dvma_wr_index++;
20436349Sqs148142 			}
20446349Sqs148142 			tx_ring_p->dvma_pending--;
20456349Sqs148142 		} else if (tx_msg_p->flags.dma_type == USE_DMA) {
20466349Sqs148142 			if (ddi_dma_unbind_handle(tx_msg_p->dma_handle)) {
20476349Sqs148142 				cmn_err(CE_WARN, "hxge_unmap_tx_bug_ring: "
20486349Sqs148142 				    "ddi_dma_unbind_handle failed.");
20496349Sqs148142 			}
20506349Sqs148142 		}
20516349Sqs148142 		if (tx_msg_p->tx_message != NULL) {
20526349Sqs148142 			freemsg(tx_msg_p->tx_message);
20536349Sqs148142 			tx_msg_p->tx_message = NULL;
20546349Sqs148142 		}
20556349Sqs148142 	}
20566349Sqs148142 
20576349Sqs148142 	for (i = 0; i < tx_ring_p->tx_ring_size; i++) {
20586349Sqs148142 		if (tx_msg_ring[i].dma_handle != NULL) {
20596349Sqs148142 			ddi_dma_free_handle(&tx_msg_ring[i].dma_handle);
20606349Sqs148142 		}
20616349Sqs148142 	}
20628718SMichael.Speer@Sun.COM 	MUTEX_EXIT(&tx_ring_p->lock);
20638718SMichael.Speer@Sun.COM 
20648718SMichael.Speer@Sun.COM 	if (tx_ring_p->taskq) {
20658718SMichael.Speer@Sun.COM 		ddi_taskq_destroy(tx_ring_p->taskq);
20668718SMichael.Speer@Sun.COM 		tx_ring_p->taskq = NULL;
20678718SMichael.Speer@Sun.COM 	}
20686349Sqs148142 
20696349Sqs148142 	MUTEX_DESTROY(&tx_ring_p->lock);
20706349Sqs148142 	KMEM_FREE(tx_msg_ring, sizeof (tx_msg_t) * tx_ring_p->tx_ring_size);
20716349Sqs148142 	KMEM_FREE(tx_ring_p, sizeof (tx_ring_t));
20726349Sqs148142 
20736349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
20746349Sqs148142 	    "<== hxge_unmap_txdma_channel_buf_ring"));
20756349Sqs148142 }
20766349Sqs148142 
20776349Sqs148142 static hxge_status_t
hxge_txdma_hw_start(p_hxge_t hxgep)20786349Sqs148142 hxge_txdma_hw_start(p_hxge_t hxgep)
20796349Sqs148142 {
20806349Sqs148142 	int			i, ndmas;
20816349Sqs148142 	uint16_t		channel;
20826349Sqs148142 	p_tx_rings_t		tx_rings;
20836349Sqs148142 	p_tx_ring_t		*tx_desc_rings;
20846349Sqs148142 	p_tx_mbox_areas_t	tx_mbox_areas_p;
20856349Sqs148142 	p_tx_mbox_t		*tx_mbox_p;
20866349Sqs148142 	hxge_status_t		status = HXGE_OK;
20876349Sqs148142 	uint64_t		tmp;
20886349Sqs148142 
20896349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL, "==> hxge_txdma_hw_start"));
20906349Sqs148142 
20916349Sqs148142 	/*
20926349Sqs148142 	 * Initialize REORD Table 1. Disable VMAC 2. Reset the FIFO Err Stat.
20936349Sqs148142 	 * 3. Scrub memory and check for errors.
20946349Sqs148142 	 */
20956349Sqs148142 	(void) hxge_tx_vmac_disable(hxgep);
20966349Sqs148142 
20976349Sqs148142 	/*
20986349Sqs148142 	 * Clear the error status
20996349Sqs148142 	 */
21006349Sqs148142 	HXGE_REG_WR64(hxgep->hpi_handle, TDC_FIFO_ERR_STAT, 0x7);
21016349Sqs148142 
21026349Sqs148142 	/*
21036349Sqs148142 	 * Scrub the rtab memory for the TDC and reset the TDC.
21046349Sqs148142 	 */
21056349Sqs148142 	HXGE_REG_WR64(hxgep->hpi_handle, TDC_REORD_TBL_DATA_HI, 0x0ULL);
21066349Sqs148142 	HXGE_REG_WR64(hxgep->hpi_handle, TDC_REORD_TBL_DATA_LO, 0x0ULL);
21076349Sqs148142 
21086349Sqs148142 	for (i = 0; i < 256; i++) {
21096349Sqs148142 		HXGE_REG_WR64(hxgep->hpi_handle, TDC_REORD_TBL_CMD,
21106349Sqs148142 		    (uint64_t)i);
21116349Sqs148142 
21126349Sqs148142 		/*
21136349Sqs148142 		 * Write the command register with an indirect read instruction
21146349Sqs148142 		 */
21156349Sqs148142 		tmp = (0x1ULL << 30) | i;
21166349Sqs148142 		HXGE_REG_WR64(hxgep->hpi_handle, TDC_REORD_TBL_CMD, tmp);
21176349Sqs148142 
21186349Sqs148142 		/*
21196349Sqs148142 		 * Wait for status done
21206349Sqs148142 		 */
21216349Sqs148142 		tmp = 0;
21226349Sqs148142 		do {
21236349Sqs148142 			HXGE_REG_RD64(hxgep->hpi_handle, TDC_REORD_TBL_CMD,
21246349Sqs148142 			    &tmp);
21256349Sqs148142 		} while (((tmp >> 31) & 0x1ULL) == 0x0);
21266349Sqs148142 	}
21276349Sqs148142 
21286349Sqs148142 	for (i = 0; i < 256; i++) {
21296349Sqs148142 		/*
21306349Sqs148142 		 * Write the command register with an indirect read instruction
21316349Sqs148142 		 */
21326349Sqs148142 		tmp = (0x1ULL << 30) | i;
21336349Sqs148142 		HXGE_REG_WR64(hxgep->hpi_handle, TDC_REORD_TBL_CMD, tmp);
21346349Sqs148142 
21356349Sqs148142 		/*
21366349Sqs148142 		 * Wait for status done
21376349Sqs148142 		 */
21386349Sqs148142 		tmp = 0;
21396349Sqs148142 		do {
21406349Sqs148142 			HXGE_REG_RD64(hxgep->hpi_handle, TDC_REORD_TBL_CMD,
21416349Sqs148142 			    &tmp);
21426349Sqs148142 		} while (((tmp >> 31) & 0x1ULL) == 0x0);
21436349Sqs148142 
21446349Sqs148142 		HXGE_REG_RD64(hxgep->hpi_handle, TDC_REORD_TBL_DATA_HI, &tmp);
21456349Sqs148142 		if (0x1ff00ULL != (0x1ffffULL & tmp)) {
21466349Sqs148142 			HXGE_ERROR_MSG((hxgep, HXGE_ERR_CTL, "PANIC ReordTbl "
21476349Sqs148142 			    "unexpected data (hi), entry: %x, value: 0x%0llx\n",
21486349Sqs148142 			    i, (unsigned long long)tmp));
21496864Sqs148142 			status = HXGE_ERROR;
21506349Sqs148142 		}
21516349Sqs148142 
21526349Sqs148142 		HXGE_REG_RD64(hxgep->hpi_handle, TDC_REORD_TBL_DATA_LO, &tmp);
21536349Sqs148142 		if (tmp != 0) {
21546349Sqs148142 			HXGE_ERROR_MSG((hxgep, HXGE_ERR_CTL, "PANIC ReordTbl "
21556349Sqs148142 			    "unexpected data (lo), entry: %x\n", i));
21566864Sqs148142 			status = HXGE_ERROR;
21576349Sqs148142 		}
21586349Sqs148142 
21596349Sqs148142 		HXGE_REG_RD64(hxgep->hpi_handle, TDC_FIFO_ERR_STAT, &tmp);
21606349Sqs148142 		if (tmp != 0) {
21616349Sqs148142 			HXGE_ERROR_MSG((hxgep, HXGE_ERR_CTL, "PANIC ReordTbl "
21626349Sqs148142 			    "parity error, entry: %x, val 0x%llx\n",
21636349Sqs148142 			    i, (unsigned long long)tmp));
21646864Sqs148142 			status = HXGE_ERROR;
21656349Sqs148142 		}
21666349Sqs148142 
21676349Sqs148142 		HXGE_REG_RD64(hxgep->hpi_handle, TDC_FIFO_ERR_STAT, &tmp);
21686349Sqs148142 		if (tmp != 0) {
21696349Sqs148142 			HXGE_ERROR_MSG((hxgep, HXGE_ERR_CTL, "PANIC ReordTbl "
21706349Sqs148142 			    "parity error, entry: %x\n", i));
21716864Sqs148142 			status = HXGE_ERROR;
21726349Sqs148142 		}
21736349Sqs148142 	}
21746349Sqs148142 
21756864Sqs148142 	if (status != HXGE_OK)
21766864Sqs148142 		goto hxge_txdma_hw_start_exit;
21776864Sqs148142 
21786349Sqs148142 	/*
21796349Sqs148142 	 * Reset FIFO Error Status for the TDC and enable FIFO error events.
21806349Sqs148142 	 */
21816349Sqs148142 	HXGE_REG_WR64(hxgep->hpi_handle, TDC_FIFO_ERR_STAT, 0x7);
21826349Sqs148142 	HXGE_REG_WR64(hxgep->hpi_handle, TDC_FIFO_ERR_MASK, 0x0);
21836349Sqs148142 
21846349Sqs148142 	/*
21856349Sqs148142 	 * Initialize the Transmit DMAs.
21866349Sqs148142 	 */
21876349Sqs148142 	tx_rings = hxgep->tx_rings;
21886349Sqs148142 	if (tx_rings == NULL) {
21896349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
21906349Sqs148142 		    "<== hxge_txdma_hw_start: NULL ring pointer"));
21916349Sqs148142 		return (HXGE_ERROR);
21926349Sqs148142 	}
21936864Sqs148142 
21946349Sqs148142 	tx_desc_rings = tx_rings->rings;
21956349Sqs148142 	if (tx_desc_rings == NULL) {
21966349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
21976349Sqs148142 		    "<== hxge_txdma_hw_start: NULL ring pointers"));
21986349Sqs148142 		return (HXGE_ERROR);
21996349Sqs148142 	}
22006349Sqs148142 	ndmas = tx_rings->ndmas;
22016349Sqs148142 	if (!ndmas) {
22026349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
22036349Sqs148142 		    "<== hxge_txdma_hw_start: no dma channel allocated"));
22046349Sqs148142 		return (HXGE_ERROR);
22056349Sqs148142 	}
22066349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL, "==> hxge_txdma_hw_start: "
22076349Sqs148142 	    "tx_rings $%p tx_desc_rings $%p ndmas %d",
22086349Sqs148142 	    tx_rings, tx_desc_rings, ndmas));
22096349Sqs148142 
22106349Sqs148142 	tx_mbox_areas_p = hxgep->tx_mbox_areas_p;
22116349Sqs148142 	tx_mbox_p = tx_mbox_areas_p->txmbox_areas_p;
22126349Sqs148142 
22136349Sqs148142 	/*
22146349Sqs148142 	 * Init the DMAs.
22156349Sqs148142 	 */
22166349Sqs148142 	for (i = 0; i < ndmas; i++) {
22176349Sqs148142 		channel = tx_desc_rings[i]->tdc;
22186349Sqs148142 		status = hxge_txdma_start_channel(hxgep, channel,
22196349Sqs148142 		    (p_tx_ring_t)tx_desc_rings[i],
22206349Sqs148142 		    (p_tx_mbox_t)tx_mbox_p[i]);
22216349Sqs148142 		if (status != HXGE_OK) {
22226349Sqs148142 			goto hxge_txdma_hw_start_fail1;
22236349Sqs148142 		}
22246349Sqs148142 	}
22256349Sqs148142 
22266349Sqs148142 	(void) hxge_tx_vmac_enable(hxgep);
22276349Sqs148142 
22286349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
22296349Sqs148142 	    "==> hxge_txdma_hw_start: tx_rings $%p rings $%p",
22306349Sqs148142 	    hxgep->tx_rings, hxgep->tx_rings->rings));
22316349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
22326349Sqs148142 	    "==> hxge_txdma_hw_start: tx_rings $%p tx_desc_rings $%p",
22336349Sqs148142 	    hxgep->tx_rings, tx_desc_rings));
22346349Sqs148142 
22356349Sqs148142 	goto hxge_txdma_hw_start_exit;
22366349Sqs148142 
22376349Sqs148142 hxge_txdma_hw_start_fail1:
22386349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
22396349Sqs148142 	    "==> hxge_txdma_hw_start: disable (status 0x%x channel %d i %d)",
22406349Sqs148142 	    status, channel, i));
22416349Sqs148142 
22426349Sqs148142 	for (; i >= 0; i--) {
22436349Sqs148142 		channel = tx_desc_rings[i]->tdc,
22446349Sqs148142 		    (void) hxge_txdma_stop_channel(hxgep, channel,
22456349Sqs148142 		    (p_tx_ring_t)tx_desc_rings[i],
22466349Sqs148142 		    (p_tx_mbox_t)tx_mbox_p[i]);
22476349Sqs148142 	}
22486349Sqs148142 
22496349Sqs148142 hxge_txdma_hw_start_exit:
22506349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
22516349Sqs148142 	    "==> hxge_txdma_hw_start: (status 0x%x)", status));
22526349Sqs148142 
22536349Sqs148142 	return (status);
22546349Sqs148142 }
22556349Sqs148142 
22566349Sqs148142 static void
hxge_txdma_hw_stop(p_hxge_t hxgep)22576349Sqs148142 hxge_txdma_hw_stop(p_hxge_t hxgep)
22586349Sqs148142 {
22596349Sqs148142 	int			i, ndmas;
22606349Sqs148142 	uint16_t		channel;
22616349Sqs148142 	p_tx_rings_t		tx_rings;
22626349Sqs148142 	p_tx_ring_t		*tx_desc_rings;
22636349Sqs148142 	p_tx_mbox_areas_t	tx_mbox_areas_p;
22646349Sqs148142 	p_tx_mbox_t		*tx_mbox_p;
22656349Sqs148142 
22666349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL, "==> hxge_txdma_hw_stop"));
22676349Sqs148142 
22686349Sqs148142 	tx_rings = hxgep->tx_rings;
22696349Sqs148142 	if (tx_rings == NULL) {
22706349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
22716349Sqs148142 		    "<== hxge_txdma_hw_stop: NULL ring pointer"));
22726349Sqs148142 		return;
22736349Sqs148142 	}
22746349Sqs148142 
22756349Sqs148142 	tx_desc_rings = tx_rings->rings;
22766349Sqs148142 	if (tx_desc_rings == NULL) {
22776349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
22786349Sqs148142 		    "<== hxge_txdma_hw_stop: NULL ring pointers"));
22796349Sqs148142 		return;
22806349Sqs148142 	}
22816349Sqs148142 
22826349Sqs148142 	ndmas = tx_rings->ndmas;
22836349Sqs148142 	if (!ndmas) {
22846349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
22856349Sqs148142 		    "<== hxge_txdma_hw_stop: no dma channel allocated"));
22866349Sqs148142 		return;
22876349Sqs148142 	}
22886349Sqs148142 
22896349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL, "==> hxge_txdma_hw_stop: "
22906349Sqs148142 	    "tx_rings $%p tx_desc_rings $%p", tx_rings, tx_desc_rings));
22916349Sqs148142 
22926349Sqs148142 	tx_mbox_areas_p = hxgep->tx_mbox_areas_p;
22936349Sqs148142 	tx_mbox_p = tx_mbox_areas_p->txmbox_areas_p;
22946349Sqs148142 
22956349Sqs148142 	for (i = 0; i < ndmas; i++) {
22966349Sqs148142 		channel = tx_desc_rings[i]->tdc;
22976349Sqs148142 		(void) hxge_txdma_stop_channel(hxgep, channel,
22986349Sqs148142 		    (p_tx_ring_t)tx_desc_rings[i],
22996349Sqs148142 		    (p_tx_mbox_t)tx_mbox_p[i]);
23006349Sqs148142 	}
23016349Sqs148142 
23026349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL, "==> hxge_txdma_hw_stop: "
23036349Sqs148142 	    "tx_rings $%p tx_desc_rings $%p", tx_rings, tx_desc_rings));
23046349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL, "<== hxge_txdma_hw_stop"));
23056349Sqs148142 }
23066349Sqs148142 
23076349Sqs148142 static hxge_status_t
hxge_txdma_start_channel(p_hxge_t hxgep,uint16_t channel,p_tx_ring_t tx_ring_p,p_tx_mbox_t tx_mbox_p)23086349Sqs148142 hxge_txdma_start_channel(p_hxge_t hxgep, uint16_t channel,
23096349Sqs148142     p_tx_ring_t tx_ring_p, p_tx_mbox_t tx_mbox_p)
23106349Sqs148142 {
23116349Sqs148142 	hxge_status_t status = HXGE_OK;
23126349Sqs148142 
23136349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
23146349Sqs148142 	    "==> hxge_txdma_start_channel (channel %d)", channel));
23156349Sqs148142 	/*
23166349Sqs148142 	 * TXDMA/TXC must be in stopped state.
23176349Sqs148142 	 */
23186349Sqs148142 	(void) hxge_txdma_stop_inj_err(hxgep, channel);
23196349Sqs148142 
23206349Sqs148142 	/*
23216349Sqs148142 	 * Reset TXDMA channel
23226349Sqs148142 	 */
23236349Sqs148142 	tx_ring_p->tx_cs.value = 0;
23246349Sqs148142 	tx_ring_p->tx_cs.bits.reset = 1;
23256349Sqs148142 	status = hxge_reset_txdma_channel(hxgep, channel,
23266349Sqs148142 	    tx_ring_p->tx_cs.value);
23276349Sqs148142 	if (status != HXGE_OK) {
23286349Sqs148142 		HXGE_ERROR_MSG((hxgep, HXGE_ERR_CTL,
23296349Sqs148142 		    "==> hxge_txdma_start_channel (channel %d)"
23306349Sqs148142 		    " reset channel failed 0x%x", channel, status));
23316349Sqs148142 
23326349Sqs148142 		goto hxge_txdma_start_channel_exit;
23336349Sqs148142 	}
23346349Sqs148142 
23356349Sqs148142 	/*
23366349Sqs148142 	 * Initialize the TXDMA channel specific FZC control configurations.
23376349Sqs148142 	 * These FZC registers are pertaining to each TX channel (i.e. logical
23386349Sqs148142 	 * pages).
23396349Sqs148142 	 */
23406349Sqs148142 	status = hxge_init_fzc_txdma_channel(hxgep, channel,
23416349Sqs148142 	    tx_ring_p, tx_mbox_p);
23426349Sqs148142 	if (status != HXGE_OK) {
23436349Sqs148142 		goto hxge_txdma_start_channel_exit;
23446349Sqs148142 	}
23456349Sqs148142 
23466349Sqs148142 	/*
23476349Sqs148142 	 * Initialize the event masks.
23486349Sqs148142 	 */
23496349Sqs148142 	tx_ring_p->tx_evmask.value = 0;
23506349Sqs148142 	status = hxge_init_txdma_channel_event_mask(hxgep,
23516349Sqs148142 	    channel, &tx_ring_p->tx_evmask);
23526349Sqs148142 	if (status != HXGE_OK) {
23536349Sqs148142 		goto hxge_txdma_start_channel_exit;
23546349Sqs148142 	}
23556349Sqs148142 
23566349Sqs148142 	/*
23576349Sqs148142 	 * Load TXDMA descriptors, buffers, mailbox, initialise the DMA
23586349Sqs148142 	 * channels and enable each DMA channel.
23596349Sqs148142 	 */
23606349Sqs148142 	status = hxge_enable_txdma_channel(hxgep, channel,
23616349Sqs148142 	    tx_ring_p, tx_mbox_p);
23626349Sqs148142 	if (status != HXGE_OK) {
23636349Sqs148142 		goto hxge_txdma_start_channel_exit;
23646349Sqs148142 	}
23656349Sqs148142 
23666349Sqs148142 hxge_txdma_start_channel_exit:
23676349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL, "<== hxge_txdma_start_channel"));
23686349Sqs148142 
23696349Sqs148142 	return (status);
23706349Sqs148142 }
23716349Sqs148142 
23726349Sqs148142 /*ARGSUSED*/
23736349Sqs148142 static hxge_status_t
hxge_txdma_stop_channel(p_hxge_t hxgep,uint16_t channel,p_tx_ring_t tx_ring_p,p_tx_mbox_t tx_mbox_p)23746349Sqs148142 hxge_txdma_stop_channel(p_hxge_t hxgep, uint16_t channel,
23756349Sqs148142     p_tx_ring_t tx_ring_p, p_tx_mbox_t tx_mbox_p)
23766349Sqs148142 {
23776349Sqs148142 	int status = HXGE_OK;
23786349Sqs148142 
23796349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
23806349Sqs148142 	    "==> hxge_txdma_stop_channel: channel %d", channel));
23816349Sqs148142 
23826349Sqs148142 	/*
23836349Sqs148142 	 * Stop (disable) TXDMA and TXC (if stop bit is set and STOP_N_GO bit
23846349Sqs148142 	 * not set, the TXDMA reset state will not be set if reset TXDMA.
23856349Sqs148142 	 */
23866349Sqs148142 	(void) hxge_txdma_stop_inj_err(hxgep, channel);
23876349Sqs148142 
23886349Sqs148142 	/*
23896349Sqs148142 	 * Reset TXDMA channel
23906349Sqs148142 	 */
23916349Sqs148142 	tx_ring_p->tx_cs.value = 0;
23926349Sqs148142 	tx_ring_p->tx_cs.bits.reset = 1;
23936349Sqs148142 	status = hxge_reset_txdma_channel(hxgep, channel,
23946349Sqs148142 	    tx_ring_p->tx_cs.value);
23956349Sqs148142 	if (status != HXGE_OK) {
23966349Sqs148142 		goto hxge_txdma_stop_channel_exit;
23976349Sqs148142 	}
23986349Sqs148142 
23996349Sqs148142 hxge_txdma_stop_channel_exit:
24006349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL, "<== hxge_txdma_stop_channel"));
24016349Sqs148142 
24026349Sqs148142 	return (status);
24036349Sqs148142 }
24046349Sqs148142 
24056349Sqs148142 static p_tx_ring_t
hxge_txdma_get_ring(p_hxge_t hxgep,uint16_t channel)24066349Sqs148142 hxge_txdma_get_ring(p_hxge_t hxgep, uint16_t channel)
24076349Sqs148142 {
24086349Sqs148142 	int		index, ndmas;
24096349Sqs148142 	uint16_t	tdc;
24106349Sqs148142 	p_tx_rings_t	tx_rings;
24116349Sqs148142 
24126349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "==> hxge_txdma_get_ring"));
24136349Sqs148142 
24146349Sqs148142 	tx_rings = hxgep->tx_rings;
24156349Sqs148142 	if (tx_rings == NULL) {
24166349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
24176349Sqs148142 		    "<== hxge_txdma_get_ring: NULL ring pointer"));
24186349Sqs148142 		return (NULL);
24196349Sqs148142 	}
24206349Sqs148142 	ndmas = tx_rings->ndmas;
24216349Sqs148142 	if (!ndmas) {
24226349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
24236349Sqs148142 		    "<== hxge_txdma_get_ring: no channel allocated"));
24246349Sqs148142 		return (NULL);
24256349Sqs148142 	}
24266349Sqs148142 	if (tx_rings->rings == NULL) {
24276349Sqs148142 		HXGE_DEBUG_MSG((hxgep, TX_CTL,
24286349Sqs148142 		    "<== hxge_txdma_get_ring: NULL rings pointer"));
24296349Sqs148142 		return (NULL);
24306349Sqs148142 	}
24316349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL, "==> hxge_txdma_get_ring: "
24326349Sqs148142 	    "tx_rings $%p tx_desc_rings $%p ndmas %d",
24336349Sqs148142 	    tx_rings, tx_rings, ndmas));
24346349Sqs148142 
24356349Sqs148142 	for (index = 0; index < ndmas; index++) {
24366349Sqs148142 		tdc = tx_rings->rings[index]->tdc;
24376349Sqs148142 		HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
24386349Sqs148142 		    "==> hxge_fixup_txdma_rings: channel %d", tdc));
24396349Sqs148142 		if (channel == tdc) {
24406349Sqs148142 			HXGE_DEBUG_MSG((hxgep, TX_CTL,
24416349Sqs148142 			    "<== hxge_txdma_get_ring: tdc %d ring $%p",
24426349Sqs148142 			    tdc, tx_rings->rings[index]));
24436349Sqs148142 			return (p_tx_ring_t)(tx_rings->rings[index]);
24446349Sqs148142 		}
24456349Sqs148142 	}
24466349Sqs148142 
24476349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "<== hxge_txdma_get_ring"));
24486349Sqs148142 
24496349Sqs148142 	return (NULL);
24506349Sqs148142 }
24516349Sqs148142 
24526349Sqs148142 static p_tx_mbox_t
hxge_txdma_get_mbox(p_hxge_t hxgep,uint16_t channel)24536349Sqs148142 hxge_txdma_get_mbox(p_hxge_t hxgep, uint16_t channel)
24546349Sqs148142 {
24556349Sqs148142 	int			index, tdc, ndmas;
24566349Sqs148142 	p_tx_rings_t		tx_rings;
24576349Sqs148142 	p_tx_mbox_areas_t	tx_mbox_areas_p;
24586349Sqs148142 	p_tx_mbox_t		*tx_mbox_p;
24596349Sqs148142 
24606349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "==> hxge_txdma_get_mbox"));
24616349Sqs148142 
24626349Sqs148142 	tx_rings = hxgep->tx_rings;
24636349Sqs148142 	if (tx_rings == NULL) {
24646349Sqs148142 		HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
24656349Sqs148142 		    "<== hxge_txdma_get_mbox: NULL ring pointer"));
24666349Sqs148142 		return (NULL);
24676349Sqs148142 	}
24686349Sqs148142 	tx_mbox_areas_p = hxgep->tx_mbox_areas_p;
24696349Sqs148142 	if (tx_mbox_areas_p == NULL) {
24706349Sqs148142 		HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
24716349Sqs148142 		    "<== hxge_txdma_get_mbox: NULL mbox pointer"));
24726349Sqs148142 		return (NULL);
24736349Sqs148142 	}
24746349Sqs148142 	tx_mbox_p = tx_mbox_areas_p->txmbox_areas_p;
24756349Sqs148142 
24766349Sqs148142 	ndmas = tx_rings->ndmas;
24776349Sqs148142 	if (!ndmas) {
24786349Sqs148142 		HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
24796349Sqs148142 		    "<== hxge_txdma_get_mbox: no channel allocated"));
24806349Sqs148142 		return (NULL);
24816349Sqs148142 	}
24826349Sqs148142 	if (tx_rings->rings == NULL) {
24836349Sqs148142 		HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
24846349Sqs148142 		    "<== hxge_txdma_get_mbox: NULL rings pointer"));
24856349Sqs148142 		return (NULL);
24866349Sqs148142 	}
24876349Sqs148142 	HXGE_DEBUG_MSG((hxgep, MEM3_CTL, "==> hxge_txdma_get_mbox: "
24886349Sqs148142 	    "tx_rings $%p tx_desc_rings $%p ndmas %d",
24896349Sqs148142 	    tx_rings, tx_rings, ndmas));
24906349Sqs148142 
24916349Sqs148142 	for (index = 0; index < ndmas; index++) {
24926349Sqs148142 		tdc = tx_rings->rings[index]->tdc;
24936349Sqs148142 		HXGE_DEBUG_MSG((hxgep, MEM3_CTL,
24946349Sqs148142 		    "==> hxge_txdma_get_mbox: channel %d", tdc));
24956349Sqs148142 		if (channel == tdc) {
24966349Sqs148142 			HXGE_DEBUG_MSG((hxgep, TX_CTL,
24976349Sqs148142 			    "<== hxge_txdma_get_mbox: tdc %d ring $%p",
24986349Sqs148142 			    tdc, tx_rings->rings[index]));
24996349Sqs148142 			return (p_tx_mbox_t)(tx_mbox_p[index]);
25006349Sqs148142 		}
25016349Sqs148142 	}
25026349Sqs148142 
25036349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "<== hxge_txdma_get_mbox"));
25046349Sqs148142 
25056349Sqs148142 	return (NULL);
25066349Sqs148142 }
25076349Sqs148142 
25086349Sqs148142 /*ARGSUSED*/
25096349Sqs148142 static hxge_status_t
hxge_tx_err_evnts(p_hxge_t hxgep,uint_t index,p_hxge_ldv_t ldvp,tdc_stat_t cs)25106349Sqs148142 hxge_tx_err_evnts(p_hxge_t hxgep, uint_t index, p_hxge_ldv_t ldvp,
25116349Sqs148142     tdc_stat_t cs)
25126349Sqs148142 {
25136349Sqs148142 	hpi_handle_t		handle;
25146349Sqs148142 	uint8_t			channel;
25156349Sqs148142 	p_tx_ring_t		*tx_rings;
25166349Sqs148142 	p_tx_ring_t		tx_ring_p;
25176349Sqs148142 	p_hxge_tx_ring_stats_t	tdc_stats;
25186349Sqs148142 	boolean_t		txchan_fatal = B_FALSE;
25196349Sqs148142 	hxge_status_t		status = HXGE_OK;
25206349Sqs148142 	tdc_drop_cnt_t		drop_cnt;
25216349Sqs148142 
25226349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_ERR_CTL, "==> hxge_tx_err_evnts"));
25236349Sqs148142 	handle = HXGE_DEV_HPI_HANDLE(hxgep);
25246349Sqs148142 	channel = ldvp->channel;
25256349Sqs148142 
25266349Sqs148142 	tx_rings = hxgep->tx_rings->rings;
25276349Sqs148142 	tx_ring_p = tx_rings[index];
25286349Sqs148142 	tdc_stats = tx_ring_p->tdc_stats;
25296349Sqs148142 
25306349Sqs148142 	/* Get the error counts if any */
25316349Sqs148142 	TXDMA_REG_READ64(handle, TDC_DROP_CNT, channel, &drop_cnt.value);
25326349Sqs148142 	tdc_stats->count_hdr_size_err += drop_cnt.bits.hdr_size_error_count;
25336349Sqs148142 	tdc_stats->count_runt += drop_cnt.bits.runt_count;
25346349Sqs148142 	tdc_stats->count_abort += drop_cnt.bits.abort_count;
25356349Sqs148142 
25366349Sqs148142 	if (cs.bits.peu_resp_err) {
25376349Sqs148142 		tdc_stats->peu_resp_err++;
25386349Sqs148142 		HXGE_FM_REPORT_ERROR(hxgep, channel,
25396349Sqs148142 		    HXGE_FM_EREPORT_TDMC_PEU_RESP_ERR);
25406349Sqs148142 		HXGE_ERROR_MSG((hxgep, HXGE_ERR_CTL,
25416349Sqs148142 		    "==> hxge_tx_err_evnts(channel %d): "
25426349Sqs148142 		    "fatal error: peu_resp_err", channel));
25436349Sqs148142 		txchan_fatal = B_TRUE;
25446349Sqs148142 	}
25456349Sqs148142 
25466349Sqs148142 	if (cs.bits.pkt_size_hdr_err) {
25476349Sqs148142 		tdc_stats->pkt_size_hdr_err++;
25486349Sqs148142 		HXGE_FM_REPORT_ERROR(hxgep, channel,
25496349Sqs148142 		    HXGE_FM_EREPORT_TDMC_PKT_SIZE_HDR_ERR);
25506349Sqs148142 		HXGE_ERROR_MSG((hxgep, HXGE_ERR_CTL,
25516349Sqs148142 		    "==> hxge_tx_err_evnts(channel %d): "
25526349Sqs148142 		    "fatal error: pkt_size_hdr_err", channel));
25536349Sqs148142 		txchan_fatal = B_TRUE;
25546349Sqs148142 	}
25556349Sqs148142 
25566349Sqs148142 	if (cs.bits.runt_pkt_drop_err) {
25576349Sqs148142 		tdc_stats->runt_pkt_drop_err++;
25586349Sqs148142 		HXGE_FM_REPORT_ERROR(hxgep, channel,
25596349Sqs148142 		    HXGE_FM_EREPORT_TDMC_RUNT_PKT_DROP_ERR);
25606349Sqs148142 		HXGE_ERROR_MSG((hxgep, HXGE_ERR_CTL,
25616349Sqs148142 		    "==> hxge_tx_err_evnts(channel %d): "
25626349Sqs148142 		    "fatal error: runt_pkt_drop_err", channel));
25636349Sqs148142 		txchan_fatal = B_TRUE;
25646349Sqs148142 	}
25656349Sqs148142 
25666349Sqs148142 	if (cs.bits.pkt_size_err) {
25676349Sqs148142 		tdc_stats->pkt_size_err++;
25686349Sqs148142 		HXGE_FM_REPORT_ERROR(hxgep, channel,
25696349Sqs148142 		    HXGE_FM_EREPORT_TDMC_PKT_SIZE_ERR);
25706349Sqs148142 		HXGE_ERROR_MSG((hxgep, HXGE_ERR_CTL,
25716349Sqs148142 		    "==> hxge_tx_err_evnts(channel %d): "
25726349Sqs148142 		    "fatal error: pkt_size_err", channel));
25736349Sqs148142 		txchan_fatal = B_TRUE;
25746349Sqs148142 	}
25756349Sqs148142 
25766349Sqs148142 	if (cs.bits.tx_rng_oflow) {
25776349Sqs148142 		tdc_stats->tx_rng_oflow++;
25786349Sqs148142 		if (tdc_stats->tx_rng_oflow)
25796349Sqs148142 			HXGE_ERROR_MSG((hxgep, HXGE_ERR_CTL,
25806349Sqs148142 			    "==> hxge_tx_err_evnts(channel %d): "
25816349Sqs148142 			    "fatal error: tx_rng_oflow", channel));
25826349Sqs148142 	}
25836349Sqs148142 
25846349Sqs148142 	if (cs.bits.pref_par_err) {
25856349Sqs148142 		tdc_stats->pref_par_err++;
25866349Sqs148142 
25876349Sqs148142 		/* Get the address of parity error read data */
25886349Sqs148142 		TXDMA_REG_READ64(hxgep->hpi_handle, TDC_PREF_PAR_LOG,
25896349Sqs148142 		    channel, &tdc_stats->errlog.value);
25906349Sqs148142 
25916349Sqs148142 		HXGE_FM_REPORT_ERROR(hxgep, channel,
25926349Sqs148142 		    HXGE_FM_EREPORT_TDMC_PREF_PAR_ERR);
25936349Sqs148142 		HXGE_ERROR_MSG((hxgep, HXGE_ERR_CTL,
25946349Sqs148142 		    "==> hxge_tx_err_evnts(channel %d): "
25956349Sqs148142 		    "fatal error: pref_par_err", channel));
25966349Sqs148142 		txchan_fatal = B_TRUE;
25976349Sqs148142 	}
25986349Sqs148142 
25996349Sqs148142 	if (cs.bits.tdr_pref_cpl_to) {
26006349Sqs148142 		tdc_stats->tdr_pref_cpl_to++;
26016349Sqs148142 		HXGE_FM_REPORT_ERROR(hxgep, channel,
26026349Sqs148142 		    HXGE_FM_EREPORT_TDMC_TDR_PREF_CPL_TO);
26036349Sqs148142 		HXGE_ERROR_MSG((hxgep, HXGE_ERR_CTL,
26046349Sqs148142 		    "==> hxge_tx_err_evnts(channel %d): "
26058103SQiyan.Sun@Sun.COM 		    "fatal error: tdr_pref_cpl_to", channel));
26066349Sqs148142 		txchan_fatal = B_TRUE;
26076349Sqs148142 	}
26086349Sqs148142 
26096349Sqs148142 	if (cs.bits.pkt_cpl_to) {
26106349Sqs148142 		tdc_stats->pkt_cpl_to++;
26116349Sqs148142 		HXGE_FM_REPORT_ERROR(hxgep, channel,
26126349Sqs148142 		    HXGE_FM_EREPORT_TDMC_PKT_CPL_TO);
26136349Sqs148142 		HXGE_ERROR_MSG((hxgep, HXGE_ERR_CTL,
26146349Sqs148142 		    "==> hxge_tx_err_evnts(channel %d): "
26156349Sqs148142 		    "fatal error: pkt_cpl_to", channel));
26166349Sqs148142 		txchan_fatal = B_TRUE;
26176349Sqs148142 	}
26186349Sqs148142 
26196349Sqs148142 	if (cs.bits.invalid_sop) {
26206349Sqs148142 		tdc_stats->invalid_sop++;
26216349Sqs148142 		HXGE_FM_REPORT_ERROR(hxgep, channel,
26226349Sqs148142 		    HXGE_FM_EREPORT_TDMC_INVALID_SOP);
26236349Sqs148142 		HXGE_ERROR_MSG((hxgep, HXGE_ERR_CTL,
26246349Sqs148142 		    "==> hxge_tx_err_evnts(channel %d): "
26256349Sqs148142 		    "fatal error: invalid_sop", channel));
26266349Sqs148142 		txchan_fatal = B_TRUE;
26276349Sqs148142 	}
26286349Sqs148142 
26296349Sqs148142 	if (cs.bits.unexpected_sop) {
26306349Sqs148142 		tdc_stats->unexpected_sop++;
26316349Sqs148142 		HXGE_FM_REPORT_ERROR(hxgep, channel,
26326349Sqs148142 		    HXGE_FM_EREPORT_TDMC_UNEXPECTED_SOP);
26336349Sqs148142 		HXGE_ERROR_MSG((hxgep, HXGE_ERR_CTL,
26346349Sqs148142 		    "==> hxge_tx_err_evnts(channel %d): "
26356349Sqs148142 		    "fatal error: unexpected_sop", channel));
26366349Sqs148142 		txchan_fatal = B_TRUE;
26376349Sqs148142 	}
26386349Sqs148142 
26396349Sqs148142 	/* Clear error injection source in case this is an injected error */
26406349Sqs148142 	TXDMA_REG_WRITE64(hxgep->hpi_handle, TDC_STAT_INT_DBG, channel, 0);
26416349Sqs148142 
26426349Sqs148142 	if (txchan_fatal) {
26436349Sqs148142 		HXGE_ERROR_MSG((hxgep, HXGE_ERR_CTL,
26446349Sqs148142 		    " hxge_tx_err_evnts: "
26456349Sqs148142 		    " fatal error on channel %d cs 0x%llx\n",
26466349Sqs148142 		    channel, cs.value));
26476349Sqs148142 		status = hxge_txdma_fatal_err_recover(hxgep, channel,
26486349Sqs148142 		    tx_ring_p);
26496349Sqs148142 		if (status == HXGE_OK) {
26506349Sqs148142 			FM_SERVICE_RESTORED(hxgep);
26516349Sqs148142 		}
26526349Sqs148142 	}
26536349Sqs148142 
26546349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_ERR_CTL, "<== hxge_tx_err_evnts"));
26556349Sqs148142 
26566349Sqs148142 	return (status);
26576349Sqs148142 }
26586349Sqs148142 
26596349Sqs148142 hxge_status_t
hxge_txdma_handle_sys_errors(p_hxge_t hxgep)26606349Sqs148142 hxge_txdma_handle_sys_errors(p_hxge_t hxgep)
26616349Sqs148142 {
26626349Sqs148142 	hpi_handle_t		handle;
26636349Sqs148142 	hxge_status_t		status = HXGE_OK;
26646349Sqs148142 	tdc_fifo_err_stat_t	fifo_stat;
26656349Sqs148142 	hxge_tdc_sys_stats_t	*tdc_sys_stats;
26666349Sqs148142 
26676349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "==> hxge_txdma_handle_sys_errors"));
26686349Sqs148142 
26696349Sqs148142 	handle = HXGE_DEV_HPI_HANDLE(hxgep);
26706349Sqs148142 
26716349Sqs148142 	/*
26726349Sqs148142 	 * The FIFO is shared by all channels.
26736349Sqs148142 	 * Get the status of Reorder Buffer and Reorder Table Buffer Errors
26746349Sqs148142 	 */
26756349Sqs148142 	HXGE_REG_RD64(handle, TDC_FIFO_ERR_STAT, &fifo_stat.value);
26766349Sqs148142 
26776349Sqs148142 	/*
26786349Sqs148142 	 * Clear the error bits. Note that writing a 1 clears the bit. Writing
26796349Sqs148142 	 * a 0 does nothing.
26806349Sqs148142 	 */
26816349Sqs148142 	HXGE_REG_WR64(handle, TDC_FIFO_ERR_STAT, fifo_stat.value);
26826349Sqs148142 
26836349Sqs148142 	tdc_sys_stats = &hxgep->statsp->tdc_sys_stats;
26846349Sqs148142 	if (fifo_stat.bits.reord_tbl_par_err) {
26856349Sqs148142 		tdc_sys_stats->reord_tbl_par_err++;
26867918SQiyan.Sun@Sun.COM 		HXGE_FM_REPORT_ERROR(hxgep, NULL,
26877918SQiyan.Sun@Sun.COM 		    HXGE_FM_EREPORT_TDMC_REORD_TBL_PAR);
26887918SQiyan.Sun@Sun.COM 		HXGE_ERROR_MSG((hxgep, HXGE_ERR_CTL,
26897918SQiyan.Sun@Sun.COM 		    "==> hxge_txdma_handle_sys_errors: fatal error: "
26907918SQiyan.Sun@Sun.COM 		    "reord_tbl_par_err"));
26916349Sqs148142 	}
26926349Sqs148142 
26936349Sqs148142 	if (fifo_stat.bits.reord_buf_ded_err) {
26946349Sqs148142 		tdc_sys_stats->reord_buf_ded_err++;
26956349Sqs148142 		HXGE_FM_REPORT_ERROR(hxgep, NULL,
26966349Sqs148142 		    HXGE_FM_EREPORT_TDMC_REORD_BUF_DED);
26976349Sqs148142 		HXGE_ERROR_MSG((hxgep, HXGE_ERR_CTL,
26986349Sqs148142 		    "==> hxge_txdma_handle_sys_errors: "
26996349Sqs148142 		    "fatal error: reord_buf_ded_err"));
27006349Sqs148142 	}
27016349Sqs148142 
27026349Sqs148142 	if (fifo_stat.bits.reord_buf_sec_err) {
27036349Sqs148142 		tdc_sys_stats->reord_buf_sec_err++;
27046349Sqs148142 		if (tdc_sys_stats->reord_buf_sec_err == 1)
27056349Sqs148142 			HXGE_ERROR_MSG((hxgep, HXGE_ERR_CTL,
27066349Sqs148142 			    "==> hxge_txdma_handle_sys_errors: "
27076349Sqs148142 			    "reord_buf_sec_err"));
27086349Sqs148142 	}
27096349Sqs148142 
27107918SQiyan.Sun@Sun.COM 	if (fifo_stat.bits.reord_tbl_par_err ||
27117918SQiyan.Sun@Sun.COM 	    fifo_stat.bits.reord_buf_ded_err) {
27126349Sqs148142 		status = hxge_tx_port_fatal_err_recover(hxgep);
27136349Sqs148142 		if (status == HXGE_OK) {
27146349Sqs148142 			FM_SERVICE_RESTORED(hxgep);
27156349Sqs148142 		}
27166349Sqs148142 	}
27176349Sqs148142 
27186349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_CTL, "<== hxge_txdma_handle_sys_errors"));
27196349Sqs148142 
27206349Sqs148142 	return (status);
27216349Sqs148142 }
27226349Sqs148142 
27236349Sqs148142 static hxge_status_t
hxge_txdma_fatal_err_recover(p_hxge_t hxgep,uint16_t channel,p_tx_ring_t tx_ring_p)27246349Sqs148142 hxge_txdma_fatal_err_recover(p_hxge_t hxgep, uint16_t channel,
27256349Sqs148142     p_tx_ring_t tx_ring_p)
27266349Sqs148142 {
27276349Sqs148142 	hpi_handle_t	handle;
27286349Sqs148142 	hpi_status_t	rs = HPI_SUCCESS;
27296349Sqs148142 	p_tx_mbox_t	tx_mbox_p;
27306349Sqs148142 	hxge_status_t	status = HXGE_OK;
27316349Sqs148142 
27326349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_ERR_CTL, "==> hxge_txdma_fatal_err_recover"));
27336349Sqs148142 	HXGE_ERROR_MSG((hxgep, HXGE_ERR_CTL,
27346349Sqs148142 	    "Recovering from TxDMAChannel#%d error...", channel));
27356349Sqs148142 
27366349Sqs148142 	/*
27376349Sqs148142 	 * Stop the dma channel waits for the stop done. If the stop done bit
27386349Sqs148142 	 * is not set, then create an error.
27396349Sqs148142 	 */
27406349Sqs148142 	handle = HXGE_DEV_HPI_HANDLE(hxgep);
27416349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_ERR_CTL, "stopping txdma channel(%d)",
27426349Sqs148142 	    channel));
27436349Sqs148142 	MUTEX_ENTER(&tx_ring_p->lock);
27446349Sqs148142 	rs = hpi_txdma_channel_control(handle, TXDMA_STOP, channel);
27456349Sqs148142 	if (rs != HPI_SUCCESS) {
27466349Sqs148142 		HXGE_ERROR_MSG((hxgep, HXGE_ERR_CTL,
27476349Sqs148142 		    "==> hxge_txdma_fatal_err_recover (channel %d): "
27486349Sqs148142 		    "stop failed ", channel));
27496349Sqs148142 
27506349Sqs148142 		goto fail;
27516349Sqs148142 	}
27526349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_ERR_CTL, "reclaiming txdma channel(%d)",
27536349Sqs148142 	    channel));
27546349Sqs148142 	(void) hxge_txdma_reclaim(hxgep, tx_ring_p, 0);
27556349Sqs148142 
27566349Sqs148142 	/*
27576349Sqs148142 	 * Reset TXDMA channel
27586349Sqs148142 	 */
27596349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_ERR_CTL, "resetting txdma channel(%d)",
27606349Sqs148142 	    channel));
27616349Sqs148142 	if ((rs = hpi_txdma_channel_control(handle, TXDMA_RESET, channel)) !=
27626349Sqs148142 	    HPI_SUCCESS) {
27636349Sqs148142 		HXGE_ERROR_MSG((hxgep, HXGE_ERR_CTL,
27646349Sqs148142 		    "==> hxge_txdma_fatal_err_recover (channel %d)"
27656349Sqs148142 		    " reset channel failed 0x%x", channel, rs));
27666349Sqs148142 
27676349Sqs148142 		goto fail;
27686349Sqs148142 	}
27696349Sqs148142 	/*
27706349Sqs148142 	 * Reset the tail (kick) register to 0. (Hardware will not reset it. Tx
27716349Sqs148142 	 * overflow fatal error if tail is not set to 0 after reset!
27726349Sqs148142 	 */
27736349Sqs148142 	TXDMA_REG_WRITE64(handle, TDC_TDR_KICK, channel, 0);
27746349Sqs148142 
27756349Sqs148142 	/*
27766349Sqs148142 	 * Restart TXDMA channel
27776349Sqs148142 	 *
27786349Sqs148142 	 * Initialize the TXDMA channel specific FZC control configurations.
27796349Sqs148142 	 * These FZC registers are pertaining to each TX channel (i.e. logical
27806349Sqs148142 	 * pages).
27816349Sqs148142 	 */
27826349Sqs148142 	tx_mbox_p = hxge_txdma_get_mbox(hxgep, channel);
27836349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_ERR_CTL, "restarting txdma channel(%d)",
27846349Sqs148142 	    channel));
27856349Sqs148142 	status = hxge_init_fzc_txdma_channel(hxgep, channel,
27866349Sqs148142 	    tx_ring_p, tx_mbox_p);
27876349Sqs148142 	if (status != HXGE_OK)
27886349Sqs148142 		goto fail;
27896349Sqs148142 
27906349Sqs148142 	/*
27916349Sqs148142 	 * Initialize the event masks.
27926349Sqs148142 	 */
27936349Sqs148142 	tx_ring_p->tx_evmask.value = 0;
27946349Sqs148142 	status = hxge_init_txdma_channel_event_mask(hxgep, channel,
27956349Sqs148142 	    &tx_ring_p->tx_evmask);
27966349Sqs148142 	if (status != HXGE_OK)
27976349Sqs148142 		goto fail;
27986349Sqs148142 
27996349Sqs148142 	tx_ring_p->wr_index_wrap = B_FALSE;
28006349Sqs148142 	tx_ring_p->wr_index = 0;
28016349Sqs148142 	tx_ring_p->rd_index = 0;
28026349Sqs148142 
28036349Sqs148142 	/*
28046349Sqs148142 	 * Load TXDMA descriptors, buffers, mailbox, initialise the DMA
28056349Sqs148142 	 * channels and enable each DMA channel.
28066349Sqs148142 	 */
28076349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_ERR_CTL, "enabling txdma channel(%d)",
28086349Sqs148142 	    channel));
28096349Sqs148142 	status = hxge_enable_txdma_channel(hxgep, channel,
28106349Sqs148142 	    tx_ring_p, tx_mbox_p);
28116349Sqs148142 	MUTEX_EXIT(&tx_ring_p->lock);
28126349Sqs148142 	if (status != HXGE_OK)
28136349Sqs148142 		goto fail;
28146349Sqs148142 
28156349Sqs148142 	HXGE_ERROR_MSG((hxgep, HXGE_ERR_CTL,
28166349Sqs148142 	    "Recovery Successful, TxDMAChannel#%d Restored", channel));
28176349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_ERR_CTL, "==> hxge_txdma_fatal_err_recover"));
28186349Sqs148142 
28196349Sqs148142 	return (HXGE_OK);
28206349Sqs148142 
28216349Sqs148142 fail:
28226349Sqs148142 	MUTEX_EXIT(&tx_ring_p->lock);
28236349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_ERR_CTL,
28246349Sqs148142 	    "hxge_txdma_fatal_err_recover (channel %d): "
28256349Sqs148142 	    "failed to recover this txdma channel", channel));
28266349Sqs148142 	HXGE_ERROR_MSG((hxgep, HXGE_ERR_CTL, "Recovery failed"));
28276349Sqs148142 
28286349Sqs148142 	return (status);
28296349Sqs148142 }
28306349Sqs148142 
28316349Sqs148142 static hxge_status_t
hxge_tx_port_fatal_err_recover(p_hxge_t hxgep)28326349Sqs148142 hxge_tx_port_fatal_err_recover(p_hxge_t hxgep)
28336349Sqs148142 {
28346349Sqs148142 	hpi_handle_t	handle;
28356349Sqs148142 	hpi_status_t	rs = HPI_SUCCESS;
28366349Sqs148142 	hxge_status_t	status = HXGE_OK;
28376349Sqs148142 	p_tx_ring_t	*tx_desc_rings;
28386349Sqs148142 	p_tx_rings_t	tx_rings;
28396349Sqs148142 	p_tx_ring_t	tx_ring_p;
28406349Sqs148142 	int		i, ndmas;
28416349Sqs148142 	uint16_t	channel;
28426349Sqs148142 	block_reset_t	reset_reg;
28436349Sqs148142 
28446349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_ERR_CTL,
28456349Sqs148142 	    "==> hxge_tx_port_fatal_err_recover"));
28466349Sqs148142 	HXGE_ERROR_MSG((hxgep, HXGE_ERR_CTL,
28476349Sqs148142 	    "Recovering from TxPort error..."));
28486349Sqs148142 
28496349Sqs148142 	handle = HXGE_DEV_HPI_HANDLE(hxgep);
28506349Sqs148142 
28516349Sqs148142 	/* Reset TDC block from PEU for this fatal error */
28526349Sqs148142 	reset_reg.value = 0;
28536349Sqs148142 	reset_reg.bits.tdc_rst = 1;
28546349Sqs148142 	HXGE_REG_WR32(handle, BLOCK_RESET, reset_reg.value);
28556349Sqs148142 
28566349Sqs148142 	HXGE_DELAY(1000);
28576349Sqs148142 
28586349Sqs148142 	/*
28596349Sqs148142 	 * Stop the dma channel waits for the stop done. If the stop done bit
28606349Sqs148142 	 * is not set, then create an error.
28616349Sqs148142 	 */
28626349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_ERR_CTL, "stopping all DMA channels..."));
28636349Sqs148142 
28646349Sqs148142 	tx_rings = hxgep->tx_rings;
28656349Sqs148142 	tx_desc_rings = tx_rings->rings;
28666349Sqs148142 	ndmas = tx_rings->ndmas;
28676349Sqs148142 
28686349Sqs148142 	for (i = 0; i < ndmas; i++) {
28696349Sqs148142 		if (tx_desc_rings[i] == NULL) {
28706349Sqs148142 			continue;
28716349Sqs148142 		}
28726349Sqs148142 		tx_ring_p = tx_rings->rings[i];
28736349Sqs148142 		MUTEX_ENTER(&tx_ring_p->lock);
28746349Sqs148142 	}
28756349Sqs148142 
28766349Sqs148142 	for (i = 0; i < ndmas; i++) {
28776349Sqs148142 		if (tx_desc_rings[i] == NULL) {
28786349Sqs148142 			continue;
28796349Sqs148142 		}
28806349Sqs148142 		channel = tx_desc_rings[i]->tdc;
28816349Sqs148142 		tx_ring_p = tx_rings->rings[i];
28826349Sqs148142 		rs = hpi_txdma_channel_control(handle, TXDMA_STOP, channel);
28836349Sqs148142 		if (rs != HPI_SUCCESS) {
28846349Sqs148142 			HXGE_ERROR_MSG((hxgep, HXGE_ERR_CTL,
28856349Sqs148142 			    "==> hxge_txdma_fatal_err_recover (channel %d): "
28866349Sqs148142 			    "stop failed ", channel));
28876349Sqs148142 
28886349Sqs148142 			goto fail;
28896349Sqs148142 		}
28906349Sqs148142 	}
28916349Sqs148142 
28926349Sqs148142 	/*
28936349Sqs148142 	 * Do reclaim on all of th DMAs.
28946349Sqs148142 	 */
28956349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_ERR_CTL, "reclaiming all DMA channels..."));
28966349Sqs148142 	for (i = 0; i < ndmas; i++) {
28976349Sqs148142 		if (tx_desc_rings[i] == NULL) {
28986349Sqs148142 			continue;
28996349Sqs148142 		}
29006349Sqs148142 		tx_ring_p = tx_rings->rings[i];
29016349Sqs148142 		(void) hxge_txdma_reclaim(hxgep, tx_ring_p, 0);
29026349Sqs148142 	}
29036349Sqs148142 
29046349Sqs148142 	/* Restart the TDC */
29056349Sqs148142 	if ((status = hxge_txdma_hw_start(hxgep)) != HXGE_OK)
29066349Sqs148142 		goto fail;
29076349Sqs148142 
29086349Sqs148142 	for (i = 0; i < ndmas; i++) {
29096349Sqs148142 		if (tx_desc_rings[i] == NULL) {
29106349Sqs148142 			continue;
29116349Sqs148142 		}
29126349Sqs148142 		tx_ring_p = tx_rings->rings[i];
29136349Sqs148142 		MUTEX_EXIT(&tx_ring_p->lock);
29146349Sqs148142 	}
29156349Sqs148142 
29166349Sqs148142 	HXGE_ERROR_MSG((hxgep, HXGE_ERR_CTL,
29176349Sqs148142 	    "Recovery Successful, TxPort Restored"));
29186349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_ERR_CTL,
29196349Sqs148142 	    "<== hxge_tx_port_fatal_err_recover"));
29206349Sqs148142 	return (HXGE_OK);
29216349Sqs148142 
29226349Sqs148142 fail:
29236349Sqs148142 	for (i = 0; i < ndmas; i++) {
29246349Sqs148142 		if (tx_desc_rings[i] == NULL) {
29256349Sqs148142 			continue;
29266349Sqs148142 		}
29276349Sqs148142 		tx_ring_p = tx_rings->rings[i];
29286349Sqs148142 		MUTEX_EXIT(&tx_ring_p->lock);
29296349Sqs148142 	}
29306349Sqs148142 
29316349Sqs148142 	HXGE_ERROR_MSG((hxgep, HXGE_ERR_CTL, "Recovery failed"));
29326349Sqs148142 	HXGE_DEBUG_MSG((hxgep, TX_ERR_CTL,
29336349Sqs148142 	    "hxge_txdma_fatal_err_recover (channel %d): "
29346349Sqs148142 	    "failed to recover this txdma channel"));
29356349Sqs148142 
29366349Sqs148142 	return (status);
29376349Sqs148142 }
2938