1aaf4363eSJerin Jacob /* SPDX-License-Identifier: BSD-3-Clause
2aaf4363eSJerin Jacob * Copyright(c) 2016 Cavium, Inc
31c421f18SJerin Jacob */
41c421f18SJerin Jacob
51c421f18SJerin Jacob #include <unistd.h>
61c421f18SJerin Jacob #include <stdint.h>
71c421f18SJerin Jacob #include <stdio.h>
81c421f18SJerin Jacob #include <stdlib.h>
91c421f18SJerin Jacob
101c421f18SJerin Jacob #include <rte_atomic.h>
111c421f18SJerin Jacob #include <rte_branch_prediction.h>
121c421f18SJerin Jacob #include <rte_byteorder.h>
131c421f18SJerin Jacob #include <rte_common.h>
141c421f18SJerin Jacob #include <rte_cycles.h>
151c421f18SJerin Jacob #include <rte_errno.h>
16df96fd0dSBruce Richardson #include <ethdev_driver.h>
171c421f18SJerin Jacob #include <rte_ether.h>
181c421f18SJerin Jacob #include <rte_log.h>
191c421f18SJerin Jacob #include <rte_mbuf.h>
201c421f18SJerin Jacob #include <rte_prefetch.h>
211c421f18SJerin Jacob
221c421f18SJerin Jacob #include "base/nicvf_plat.h"
231c421f18SJerin Jacob
241c421f18SJerin Jacob #include "nicvf_ethdev.h"
251c421f18SJerin Jacob #include "nicvf_rxtx.h"
261c421f18SJerin Jacob #include "nicvf_logs.h"
271c421f18SJerin Jacob
28e3866e73SThomas Monjalon static inline void __rte_hot
fill_sq_desc_header(union sq_entry_t * entry,struct rte_mbuf * pkt)291c421f18SJerin Jacob fill_sq_desc_header(union sq_entry_t *entry, struct rte_mbuf *pkt)
301c421f18SJerin Jacob {
311c421f18SJerin Jacob /* Local variable sqe to avoid read from sq desc memory*/
321c421f18SJerin Jacob union sq_entry_t sqe;
331c421f18SJerin Jacob uint64_t ol_flags;
341c421f18SJerin Jacob
351c421f18SJerin Jacob /* Fill SQ header descriptor */
361c421f18SJerin Jacob sqe.buff[0] = 0;
371c421f18SJerin Jacob sqe.hdr.subdesc_type = SQ_DESC_TYPE_HEADER;
381c421f18SJerin Jacob /* Number of sub-descriptors following this one */
391c421f18SJerin Jacob sqe.hdr.subdesc_cnt = pkt->nb_segs;
401c421f18SJerin Jacob sqe.hdr.tot_len = pkt->pkt_len;
411c421f18SJerin Jacob
421c421f18SJerin Jacob ol_flags = pkt->ol_flags & NICVF_TX_OFFLOAD_MASK;
431c421f18SJerin Jacob if (unlikely(ol_flags)) {
441c421f18SJerin Jacob /* L4 cksum */
45daa02b5cSOlivier Matz uint64_t l4_flags = ol_flags & RTE_MBUF_F_TX_L4_MASK;
46daa02b5cSOlivier Matz if (l4_flags == RTE_MBUF_F_TX_TCP_CKSUM)
471c421f18SJerin Jacob sqe.hdr.csum_l4 = SEND_L4_CSUM_TCP;
48daa02b5cSOlivier Matz else if (l4_flags == RTE_MBUF_F_TX_UDP_CKSUM)
491c421f18SJerin Jacob sqe.hdr.csum_l4 = SEND_L4_CSUM_UDP;
501c421f18SJerin Jacob else
511c421f18SJerin Jacob sqe.hdr.csum_l4 = SEND_L4_CSUM_DISABLE;
529e9036bfSKamil Rytarowski
539e9036bfSKamil Rytarowski sqe.hdr.l3_offset = pkt->l2_len;
541c421f18SJerin Jacob sqe.hdr.l4_offset = pkt->l3_len + pkt->l2_len;
551c421f18SJerin Jacob
561c421f18SJerin Jacob /* L3 cksum */
57daa02b5cSOlivier Matz if (ol_flags & RTE_MBUF_F_TX_IP_CKSUM)
581c421f18SJerin Jacob sqe.hdr.csum_l3 = 1;
591c421f18SJerin Jacob }
601c421f18SJerin Jacob
611c421f18SJerin Jacob entry->buff[0] = sqe.buff[0];
621c421f18SJerin Jacob }
631c421f18SJerin Jacob
64e3866e73SThomas Monjalon static inline void __rte_hot
fill_sq_desc_header_zero_w1(union sq_entry_t * entry,struct rte_mbuf * pkt)65823ebfc2SSubrahmanyam Nilla fill_sq_desc_header_zero_w1(union sq_entry_t *entry,
66823ebfc2SSubrahmanyam Nilla struct rte_mbuf *pkt)
67823ebfc2SSubrahmanyam Nilla {
68823ebfc2SSubrahmanyam Nilla fill_sq_desc_header(entry, pkt);
69823ebfc2SSubrahmanyam Nilla entry->buff[1] = 0ULL;
70823ebfc2SSubrahmanyam Nilla }
71823ebfc2SSubrahmanyam Nilla
72e3866e73SThomas Monjalon void __rte_hot
nicvf_single_pool_free_xmited_buffers(struct nicvf_txq * sq)731c421f18SJerin Jacob nicvf_single_pool_free_xmited_buffers(struct nicvf_txq *sq)
741c421f18SJerin Jacob {
751c421f18SJerin Jacob int j = 0;
761c421f18SJerin Jacob uint32_t curr_head;
771c421f18SJerin Jacob uint32_t head = sq->head;
781c421f18SJerin Jacob struct rte_mbuf **txbuffs = sq->txbuffs;
7927595cd8STyler Retzlaff alignas(RTE_CACHE_LINE_SIZE) void *obj_p[NICVF_MAX_TX_FREE_THRESH];
801c421f18SJerin Jacob
811c421f18SJerin Jacob curr_head = nicvf_addr_read(sq->sq_head) >> 4;
821c421f18SJerin Jacob while (head != curr_head) {
831c421f18SJerin Jacob if (txbuffs[head])
841c421f18SJerin Jacob obj_p[j++] = txbuffs[head];
851c421f18SJerin Jacob
861c421f18SJerin Jacob head = (head + 1) & sq->qlen_mask;
871c421f18SJerin Jacob }
881c421f18SJerin Jacob
891c421f18SJerin Jacob rte_mempool_put_bulk(sq->pool, obj_p, j);
901c421f18SJerin Jacob sq->head = curr_head;
911c421f18SJerin Jacob sq->xmit_bufs -= j;
921c421f18SJerin Jacob NICVF_TX_ASSERT(sq->xmit_bufs >= 0);
931c421f18SJerin Jacob }
941c421f18SJerin Jacob
95e3866e73SThomas Monjalon void __rte_hot
nicvf_multi_pool_free_xmited_buffers(struct nicvf_txq * sq)961c421f18SJerin Jacob nicvf_multi_pool_free_xmited_buffers(struct nicvf_txq *sq)
971c421f18SJerin Jacob {
981c421f18SJerin Jacob uint32_t n = 0;
991c421f18SJerin Jacob uint32_t curr_head;
1001c421f18SJerin Jacob uint32_t head = sq->head;
1011c421f18SJerin Jacob struct rte_mbuf **txbuffs = sq->txbuffs;
1021c421f18SJerin Jacob
1031c421f18SJerin Jacob curr_head = nicvf_addr_read(sq->sq_head) >> 4;
1041c421f18SJerin Jacob while (head != curr_head) {
1051c421f18SJerin Jacob if (txbuffs[head]) {
1061c421f18SJerin Jacob rte_pktmbuf_free_seg(txbuffs[head]);
1071c421f18SJerin Jacob n++;
1081c421f18SJerin Jacob }
1091c421f18SJerin Jacob
1101c421f18SJerin Jacob head = (head + 1) & sq->qlen_mask;
1111c421f18SJerin Jacob }
1121c421f18SJerin Jacob
1131c421f18SJerin Jacob sq->head = curr_head;
1141c421f18SJerin Jacob sq->xmit_bufs -= n;
1151c421f18SJerin Jacob NICVF_TX_ASSERT(sq->xmit_bufs >= 0);
1161c421f18SJerin Jacob }
1171c421f18SJerin Jacob
118e3866e73SThomas Monjalon static inline uint32_t __rte_hot
nicvf_free_tx_desc(struct nicvf_txq * sq)1191c421f18SJerin Jacob nicvf_free_tx_desc(struct nicvf_txq *sq)
1201c421f18SJerin Jacob {
1211c421f18SJerin Jacob return ((sq->head - sq->tail - 1) & sq->qlen_mask);
1221c421f18SJerin Jacob }
1231c421f18SJerin Jacob
1241c421f18SJerin Jacob /* Send Header + Packet */
1251c421f18SJerin Jacob #define TX_DESC_PER_PKT 2
1261c421f18SJerin Jacob
127e3866e73SThomas Monjalon static inline uint32_t __rte_hot
nicvf_free_xmitted_buffers(struct nicvf_txq * sq,struct rte_mbuf ** tx_pkts,uint16_t nb_pkts)1281c421f18SJerin Jacob nicvf_free_xmitted_buffers(struct nicvf_txq *sq, struct rte_mbuf **tx_pkts,
1291c421f18SJerin Jacob uint16_t nb_pkts)
1301c421f18SJerin Jacob {
1311c421f18SJerin Jacob uint32_t free_desc = nicvf_free_tx_desc(sq);
1321c421f18SJerin Jacob
1331c421f18SJerin Jacob if (free_desc < nb_pkts * TX_DESC_PER_PKT ||
1341c421f18SJerin Jacob sq->xmit_bufs > sq->tx_free_thresh) {
1351c421f18SJerin Jacob if (unlikely(sq->pool == NULL))
1361c421f18SJerin Jacob sq->pool = tx_pkts[0]->pool;
1371c421f18SJerin Jacob
1381c421f18SJerin Jacob sq->pool_free(sq);
1391c421f18SJerin Jacob /* Freed now, let see the number of free descs again */
1401c421f18SJerin Jacob free_desc = nicvf_free_tx_desc(sq);
1411c421f18SJerin Jacob }
1421c421f18SJerin Jacob return free_desc;
1431c421f18SJerin Jacob }
1441c421f18SJerin Jacob
145e3866e73SThomas Monjalon uint16_t __rte_hot
nicvf_xmit_pkts(void * tx_queue,struct rte_mbuf ** tx_pkts,uint16_t nb_pkts)1461c421f18SJerin Jacob nicvf_xmit_pkts(void *tx_queue, struct rte_mbuf **tx_pkts, uint16_t nb_pkts)
1471c421f18SJerin Jacob {
1481c421f18SJerin Jacob int i;
1491c421f18SJerin Jacob uint32_t free_desc;
1501c421f18SJerin Jacob uint32_t tail;
1511c421f18SJerin Jacob struct nicvf_txq *sq = tx_queue;
1521c421f18SJerin Jacob union sq_entry_t *desc_ptr = sq->desc;
1531c421f18SJerin Jacob struct rte_mbuf **txbuffs = sq->txbuffs;
1541c421f18SJerin Jacob struct rte_mbuf *pkt;
1551c421f18SJerin Jacob uint32_t qlen_mask = sq->qlen_mask;
1561c421f18SJerin Jacob
1571c421f18SJerin Jacob tail = sq->tail;
1581c421f18SJerin Jacob free_desc = nicvf_free_xmitted_buffers(sq, tx_pkts, nb_pkts);
1591c421f18SJerin Jacob
1601c421f18SJerin Jacob for (i = 0; i < nb_pkts && (int)free_desc >= TX_DESC_PER_PKT; i++) {
1611c421f18SJerin Jacob pkt = tx_pkts[i];
1621c421f18SJerin Jacob
1631c421f18SJerin Jacob txbuffs[tail] = NULL;
1641c421f18SJerin Jacob fill_sq_desc_header(desc_ptr + tail, pkt);
1651c421f18SJerin Jacob tail = (tail + 1) & qlen_mask;
1661c421f18SJerin Jacob
1671c421f18SJerin Jacob txbuffs[tail] = pkt;
1681c421f18SJerin Jacob fill_sq_desc_gather(desc_ptr + tail, pkt);
1691c421f18SJerin Jacob tail = (tail + 1) & qlen_mask;
1701c421f18SJerin Jacob free_desc -= TX_DESC_PER_PKT;
1711c421f18SJerin Jacob }
1721c421f18SJerin Jacob
17347636efeSKiran Kumar if (likely(i)) {
1741c421f18SJerin Jacob sq->tail = tail;
1751c421f18SJerin Jacob sq->xmit_bufs += i;
1761c421f18SJerin Jacob rte_wmb();
1771c421f18SJerin Jacob
1781c421f18SJerin Jacob /* Inform HW to xmit the packets */
1791c421f18SJerin Jacob nicvf_addr_write(sq->sq_door, i * TX_DESC_PER_PKT);
18047636efeSKiran Kumar }
1811c421f18SJerin Jacob return i;
1821c421f18SJerin Jacob }
1831c421f18SJerin Jacob
184e3866e73SThomas Monjalon uint16_t __rte_hot
nicvf_xmit_pkts_multiseg(void * tx_queue,struct rte_mbuf ** tx_pkts,uint16_t nb_pkts)1851c421f18SJerin Jacob nicvf_xmit_pkts_multiseg(void *tx_queue, struct rte_mbuf **tx_pkts,
1861c421f18SJerin Jacob uint16_t nb_pkts)
1871c421f18SJerin Jacob {
1881c421f18SJerin Jacob int i, k;
1891c421f18SJerin Jacob uint32_t used_desc, next_used_desc, used_bufs, free_desc, tail;
1901c421f18SJerin Jacob struct nicvf_txq *sq = tx_queue;
1911c421f18SJerin Jacob union sq_entry_t *desc_ptr = sq->desc;
1921c421f18SJerin Jacob struct rte_mbuf **txbuffs = sq->txbuffs;
1931c421f18SJerin Jacob struct rte_mbuf *pkt, *seg;
1941c421f18SJerin Jacob uint32_t qlen_mask = sq->qlen_mask;
1951c421f18SJerin Jacob uint16_t nb_segs;
1961c421f18SJerin Jacob
1971c421f18SJerin Jacob tail = sq->tail;
1981c421f18SJerin Jacob used_desc = 0;
1991c421f18SJerin Jacob used_bufs = 0;
2001c421f18SJerin Jacob
2011c421f18SJerin Jacob free_desc = nicvf_free_xmitted_buffers(sq, tx_pkts, nb_pkts);
2021c421f18SJerin Jacob
2031c421f18SJerin Jacob for (i = 0; i < nb_pkts; i++) {
2041c421f18SJerin Jacob pkt = tx_pkts[i];
2051c421f18SJerin Jacob
2061c421f18SJerin Jacob nb_segs = pkt->nb_segs;
2071c421f18SJerin Jacob
2081c421f18SJerin Jacob next_used_desc = used_desc + nb_segs + 1;
2091c421f18SJerin Jacob if (next_used_desc > free_desc)
2101c421f18SJerin Jacob break;
2111c421f18SJerin Jacob used_desc = next_used_desc;
2121c421f18SJerin Jacob used_bufs += nb_segs;
2131c421f18SJerin Jacob
2141c421f18SJerin Jacob txbuffs[tail] = NULL;
215823ebfc2SSubrahmanyam Nilla fill_sq_desc_header_zero_w1(desc_ptr + tail, pkt);
2161c421f18SJerin Jacob tail = (tail + 1) & qlen_mask;
2171c421f18SJerin Jacob
2181c421f18SJerin Jacob txbuffs[tail] = pkt;
2191c421f18SJerin Jacob fill_sq_desc_gather(desc_ptr + tail, pkt);
2201c421f18SJerin Jacob tail = (tail + 1) & qlen_mask;
2211c421f18SJerin Jacob
2221c421f18SJerin Jacob seg = pkt->next;
2231c421f18SJerin Jacob for (k = 1; k < nb_segs; k++) {
2241c421f18SJerin Jacob txbuffs[tail] = seg;
2251c421f18SJerin Jacob fill_sq_desc_gather(desc_ptr + tail, seg);
2261c421f18SJerin Jacob tail = (tail + 1) & qlen_mask;
2271c421f18SJerin Jacob seg = seg->next;
2281c421f18SJerin Jacob }
2291c421f18SJerin Jacob }
2301c421f18SJerin Jacob
23147636efeSKiran Kumar if (likely(used_desc)) {
2321c421f18SJerin Jacob sq->tail = tail;
2331c421f18SJerin Jacob sq->xmit_bufs += used_bufs;
2341c421f18SJerin Jacob rte_wmb();
2351c421f18SJerin Jacob
2361c421f18SJerin Jacob /* Inform HW to xmit the packets */
2371c421f18SJerin Jacob nicvf_addr_write(sq->sq_door, used_desc);
23847636efeSKiran Kumar }
23942473d67SJerin Jacob return i;
2401c421f18SJerin Jacob }
241e2d7fc9fSJerin Jacob
24227595cd8STyler Retzlaff static const alignas(RTE_CACHE_LINE_SIZE) uint32_t ptype_table[16][16] = {
243e2d7fc9fSJerin Jacob [L3_NONE][L4_NONE] = RTE_PTYPE_UNKNOWN,
244e2d7fc9fSJerin Jacob [L3_NONE][L4_IPSEC_ESP] = RTE_PTYPE_UNKNOWN,
245e2d7fc9fSJerin Jacob [L3_NONE][L4_IPFRAG] = RTE_PTYPE_L4_FRAG,
246e2d7fc9fSJerin Jacob [L3_NONE][L4_IPCOMP] = RTE_PTYPE_UNKNOWN,
247e2d7fc9fSJerin Jacob [L3_NONE][L4_TCP] = RTE_PTYPE_L4_TCP,
248e2d7fc9fSJerin Jacob [L3_NONE][L4_UDP_PASS1] = RTE_PTYPE_L4_UDP,
249e2d7fc9fSJerin Jacob [L3_NONE][L4_GRE] = RTE_PTYPE_TUNNEL_GRE,
250e2d7fc9fSJerin Jacob [L3_NONE][L4_UDP_PASS2] = RTE_PTYPE_L4_UDP,
251e2d7fc9fSJerin Jacob [L3_NONE][L4_UDP_GENEVE] = RTE_PTYPE_TUNNEL_GENEVE,
252e2d7fc9fSJerin Jacob [L3_NONE][L4_UDP_VXLAN] = RTE_PTYPE_TUNNEL_VXLAN,
253e2d7fc9fSJerin Jacob [L3_NONE][L4_NVGRE] = RTE_PTYPE_TUNNEL_NVGRE,
254e2d7fc9fSJerin Jacob
255e2d7fc9fSJerin Jacob [L3_IPV4][L4_NONE] = RTE_PTYPE_L3_IPV4 | RTE_PTYPE_UNKNOWN,
256e2d7fc9fSJerin Jacob [L3_IPV4][L4_IPSEC_ESP] = RTE_PTYPE_L3_IPV4 | RTE_PTYPE_L3_IPV4,
257e2d7fc9fSJerin Jacob [L3_IPV4][L4_IPFRAG] = RTE_PTYPE_L3_IPV4 | RTE_PTYPE_L4_FRAG,
258e2d7fc9fSJerin Jacob [L3_IPV4][L4_IPCOMP] = RTE_PTYPE_L3_IPV4 | RTE_PTYPE_UNKNOWN,
259e2d7fc9fSJerin Jacob [L3_IPV4][L4_TCP] = RTE_PTYPE_L3_IPV4 | RTE_PTYPE_L4_TCP,
260e2d7fc9fSJerin Jacob [L3_IPV4][L4_UDP_PASS1] = RTE_PTYPE_L3_IPV4 | RTE_PTYPE_L4_UDP,
261e2d7fc9fSJerin Jacob [L3_IPV4][L4_GRE] = RTE_PTYPE_L3_IPV4 | RTE_PTYPE_TUNNEL_GRE,
262e2d7fc9fSJerin Jacob [L3_IPV4][L4_UDP_PASS2] = RTE_PTYPE_L3_IPV4 | RTE_PTYPE_L4_UDP,
263e2d7fc9fSJerin Jacob [L3_IPV4][L4_UDP_GENEVE] = RTE_PTYPE_L3_IPV4 | RTE_PTYPE_TUNNEL_GENEVE,
264e2d7fc9fSJerin Jacob [L3_IPV4][L4_UDP_VXLAN] = RTE_PTYPE_L3_IPV4 | RTE_PTYPE_TUNNEL_VXLAN,
265e2d7fc9fSJerin Jacob [L3_IPV4][L4_NVGRE] = RTE_PTYPE_L3_IPV4 | RTE_PTYPE_TUNNEL_NVGRE,
266e2d7fc9fSJerin Jacob
267e2d7fc9fSJerin Jacob [L3_IPV4_OPT][L4_NONE] = RTE_PTYPE_L3_IPV4_EXT | RTE_PTYPE_UNKNOWN,
268e2d7fc9fSJerin Jacob [L3_IPV4_OPT][L4_IPSEC_ESP] = RTE_PTYPE_L3_IPV4_EXT |
269e2d7fc9fSJerin Jacob RTE_PTYPE_L3_IPV4,
270e2d7fc9fSJerin Jacob [L3_IPV4_OPT][L4_IPFRAG] = RTE_PTYPE_L3_IPV4_EXT | RTE_PTYPE_L4_FRAG,
271e2d7fc9fSJerin Jacob [L3_IPV4_OPT][L4_IPCOMP] = RTE_PTYPE_L3_IPV4_EXT | RTE_PTYPE_UNKNOWN,
272e2d7fc9fSJerin Jacob [L3_IPV4_OPT][L4_TCP] = RTE_PTYPE_L3_IPV4_EXT | RTE_PTYPE_L4_TCP,
273e2d7fc9fSJerin Jacob [L3_IPV4_OPT][L4_UDP_PASS1] = RTE_PTYPE_L3_IPV4_EXT | RTE_PTYPE_L4_UDP,
274e2d7fc9fSJerin Jacob [L3_IPV4_OPT][L4_GRE] = RTE_PTYPE_L3_IPV4_EXT | RTE_PTYPE_TUNNEL_GRE,
275e2d7fc9fSJerin Jacob [L3_IPV4_OPT][L4_UDP_PASS2] = RTE_PTYPE_L3_IPV4_EXT | RTE_PTYPE_L4_UDP,
276e2d7fc9fSJerin Jacob [L3_IPV4_OPT][L4_UDP_GENEVE] = RTE_PTYPE_L3_IPV4_EXT |
277e2d7fc9fSJerin Jacob RTE_PTYPE_TUNNEL_GENEVE,
278e2d7fc9fSJerin Jacob [L3_IPV4_OPT][L4_UDP_VXLAN] = RTE_PTYPE_L3_IPV4_EXT |
279e2d7fc9fSJerin Jacob RTE_PTYPE_TUNNEL_VXLAN,
280e2d7fc9fSJerin Jacob [L3_IPV4_OPT][L4_NVGRE] = RTE_PTYPE_L3_IPV4_EXT |
281e2d7fc9fSJerin Jacob RTE_PTYPE_TUNNEL_NVGRE,
282e2d7fc9fSJerin Jacob
283e2d7fc9fSJerin Jacob [L3_IPV6][L4_NONE] = RTE_PTYPE_L3_IPV6 | RTE_PTYPE_UNKNOWN,
284e2d7fc9fSJerin Jacob [L3_IPV6][L4_IPSEC_ESP] = RTE_PTYPE_L3_IPV6 | RTE_PTYPE_L3_IPV4,
285e2d7fc9fSJerin Jacob [L3_IPV6][L4_IPFRAG] = RTE_PTYPE_L3_IPV6 | RTE_PTYPE_L4_FRAG,
286e2d7fc9fSJerin Jacob [L3_IPV6][L4_IPCOMP] = RTE_PTYPE_L3_IPV6 | RTE_PTYPE_UNKNOWN,
287e2d7fc9fSJerin Jacob [L3_IPV6][L4_TCP] = RTE_PTYPE_L3_IPV6 | RTE_PTYPE_L4_TCP,
288e2d7fc9fSJerin Jacob [L3_IPV6][L4_UDP_PASS1] = RTE_PTYPE_L3_IPV6 | RTE_PTYPE_L4_UDP,
289e2d7fc9fSJerin Jacob [L3_IPV6][L4_GRE] = RTE_PTYPE_L3_IPV6 | RTE_PTYPE_TUNNEL_GRE,
290e2d7fc9fSJerin Jacob [L3_IPV6][L4_UDP_PASS2] = RTE_PTYPE_L3_IPV6 | RTE_PTYPE_L4_UDP,
291e2d7fc9fSJerin Jacob [L3_IPV6][L4_UDP_GENEVE] = RTE_PTYPE_L3_IPV6 | RTE_PTYPE_TUNNEL_GENEVE,
292e2d7fc9fSJerin Jacob [L3_IPV6][L4_UDP_VXLAN] = RTE_PTYPE_L3_IPV6 | RTE_PTYPE_TUNNEL_VXLAN,
293e2d7fc9fSJerin Jacob [L3_IPV6][L4_NVGRE] = RTE_PTYPE_L3_IPV6 | RTE_PTYPE_TUNNEL_NVGRE,
294e2d7fc9fSJerin Jacob
295e2d7fc9fSJerin Jacob [L3_IPV6_OPT][L4_NONE] = RTE_PTYPE_L3_IPV6_EXT | RTE_PTYPE_UNKNOWN,
296e2d7fc9fSJerin Jacob [L3_IPV6_OPT][L4_IPSEC_ESP] = RTE_PTYPE_L3_IPV6_EXT |
297e2d7fc9fSJerin Jacob RTE_PTYPE_L3_IPV4,
298e2d7fc9fSJerin Jacob [L3_IPV6_OPT][L4_IPFRAG] = RTE_PTYPE_L3_IPV6_EXT | RTE_PTYPE_L4_FRAG,
299e2d7fc9fSJerin Jacob [L3_IPV6_OPT][L4_IPCOMP] = RTE_PTYPE_L3_IPV6_EXT | RTE_PTYPE_UNKNOWN,
300e2d7fc9fSJerin Jacob [L3_IPV6_OPT][L4_TCP] = RTE_PTYPE_L3_IPV6_EXT | RTE_PTYPE_L4_TCP,
301e2d7fc9fSJerin Jacob [L3_IPV6_OPT][L4_UDP_PASS1] = RTE_PTYPE_L3_IPV6_EXT | RTE_PTYPE_L4_UDP,
302e2d7fc9fSJerin Jacob [L3_IPV6_OPT][L4_GRE] = RTE_PTYPE_L3_IPV6_EXT | RTE_PTYPE_TUNNEL_GRE,
303e2d7fc9fSJerin Jacob [L3_IPV6_OPT][L4_UDP_PASS2] = RTE_PTYPE_L3_IPV6_EXT | RTE_PTYPE_L4_UDP,
304e2d7fc9fSJerin Jacob [L3_IPV6_OPT][L4_UDP_GENEVE] = RTE_PTYPE_L3_IPV6_EXT |
305e2d7fc9fSJerin Jacob RTE_PTYPE_TUNNEL_GENEVE,
306e2d7fc9fSJerin Jacob [L3_IPV6_OPT][L4_UDP_VXLAN] = RTE_PTYPE_L3_IPV6_EXT |
307e2d7fc9fSJerin Jacob RTE_PTYPE_TUNNEL_VXLAN,
308e2d7fc9fSJerin Jacob [L3_IPV6_OPT][L4_NVGRE] = RTE_PTYPE_L3_IPV6_EXT |
309e2d7fc9fSJerin Jacob RTE_PTYPE_TUNNEL_NVGRE,
310e2d7fc9fSJerin Jacob
311e2d7fc9fSJerin Jacob [L3_ET_STOP][L4_NONE] = RTE_PTYPE_UNKNOWN,
312e2d7fc9fSJerin Jacob [L3_ET_STOP][L4_IPSEC_ESP] = RTE_PTYPE_UNKNOWN,
313e2d7fc9fSJerin Jacob [L3_ET_STOP][L4_IPFRAG] = RTE_PTYPE_L4_FRAG,
314e2d7fc9fSJerin Jacob [L3_ET_STOP][L4_IPCOMP] = RTE_PTYPE_UNKNOWN,
315e2d7fc9fSJerin Jacob [L3_ET_STOP][L4_TCP] = RTE_PTYPE_L4_TCP,
316e2d7fc9fSJerin Jacob [L3_ET_STOP][L4_UDP_PASS1] = RTE_PTYPE_L4_UDP,
317e2d7fc9fSJerin Jacob [L3_ET_STOP][L4_GRE] = RTE_PTYPE_TUNNEL_GRE,
318e2d7fc9fSJerin Jacob [L3_ET_STOP][L4_UDP_PASS2] = RTE_PTYPE_L4_UDP,
319e2d7fc9fSJerin Jacob [L3_ET_STOP][L4_UDP_GENEVE] = RTE_PTYPE_TUNNEL_GENEVE,
320e2d7fc9fSJerin Jacob [L3_ET_STOP][L4_UDP_VXLAN] = RTE_PTYPE_TUNNEL_VXLAN,
321e2d7fc9fSJerin Jacob [L3_ET_STOP][L4_NVGRE] = RTE_PTYPE_TUNNEL_NVGRE,
322e2d7fc9fSJerin Jacob
323e2d7fc9fSJerin Jacob [L3_OTHER][L4_NONE] = RTE_PTYPE_UNKNOWN,
324e2d7fc9fSJerin Jacob [L3_OTHER][L4_IPSEC_ESP] = RTE_PTYPE_UNKNOWN,
325e2d7fc9fSJerin Jacob [L3_OTHER][L4_IPFRAG] = RTE_PTYPE_L4_FRAG,
326e2d7fc9fSJerin Jacob [L3_OTHER][L4_IPCOMP] = RTE_PTYPE_UNKNOWN,
327e2d7fc9fSJerin Jacob [L3_OTHER][L4_TCP] = RTE_PTYPE_L4_TCP,
328e2d7fc9fSJerin Jacob [L3_OTHER][L4_UDP_PASS1] = RTE_PTYPE_L4_UDP,
329e2d7fc9fSJerin Jacob [L3_OTHER][L4_GRE] = RTE_PTYPE_TUNNEL_GRE,
330e2d7fc9fSJerin Jacob [L3_OTHER][L4_UDP_PASS2] = RTE_PTYPE_L4_UDP,
331e2d7fc9fSJerin Jacob [L3_OTHER][L4_UDP_GENEVE] = RTE_PTYPE_TUNNEL_GENEVE,
332e2d7fc9fSJerin Jacob [L3_OTHER][L4_UDP_VXLAN] = RTE_PTYPE_TUNNEL_VXLAN,
333e2d7fc9fSJerin Jacob [L3_OTHER][L4_NVGRE] = RTE_PTYPE_TUNNEL_NVGRE,
334e2d7fc9fSJerin Jacob };
335e2d7fc9fSJerin Jacob
336e3866e73SThomas Monjalon static inline uint32_t __rte_hot
nicvf_rx_classify_pkt(cqe_rx_word0_t cqe_rx_w0)337e2d7fc9fSJerin Jacob nicvf_rx_classify_pkt(cqe_rx_word0_t cqe_rx_w0)
338e2d7fc9fSJerin Jacob {
339e2d7fc9fSJerin Jacob return ptype_table[cqe_rx_w0.l3_type][cqe_rx_w0.l4_type];
340e2d7fc9fSJerin Jacob }
341e2d7fc9fSJerin Jacob
342e3866e73SThomas Monjalon static inline uint64_t __rte_hot
nicvf_set_olflags(const cqe_rx_word0_t cqe_rx_w0)3435e64c812SPavan Nikhilesh nicvf_set_olflags(const cqe_rx_word0_t cqe_rx_w0)
3445e64c812SPavan Nikhilesh {
34527595cd8STyler Retzlaff static const alignas(RTE_CACHE_LINE_SIZE) uint64_t flag_table[3] = {
346daa02b5cSOlivier Matz RTE_MBUF_F_RX_IP_CKSUM_GOOD | RTE_MBUF_F_RX_L4_CKSUM_GOOD,
347daa02b5cSOlivier Matz RTE_MBUF_F_RX_IP_CKSUM_BAD | RTE_MBUF_F_RX_L4_CKSUM_UNKNOWN,
348daa02b5cSOlivier Matz RTE_MBUF_F_RX_IP_CKSUM_GOOD | RTE_MBUF_F_RX_L4_CKSUM_BAD,
3495e64c812SPavan Nikhilesh };
3505e64c812SPavan Nikhilesh
3515e64c812SPavan Nikhilesh const uint8_t idx = (cqe_rx_w0.err_opcode == CQE_RX_ERR_L4_CHK) << 1 |
3525e64c812SPavan Nikhilesh (cqe_rx_w0.err_opcode == CQE_RX_ERR_IP_CHK);
3535e64c812SPavan Nikhilesh return flag_table[idx];
3545e64c812SPavan Nikhilesh }
3555e64c812SPavan Nikhilesh
356e3866e73SThomas Monjalon static inline int __rte_hot
nicvf_fill_rbdr(struct nicvf_rxq * rxq,int to_fill)357e2d7fc9fSJerin Jacob nicvf_fill_rbdr(struct nicvf_rxq *rxq, int to_fill)
358e2d7fc9fSJerin Jacob {
359e2d7fc9fSJerin Jacob int i;
360e2d7fc9fSJerin Jacob uint32_t ltail, next_tail;
361e2d7fc9fSJerin Jacob struct nicvf_rbdr *rbdr = rxq->shared_rbdr;
362e2d7fc9fSJerin Jacob uint64_t mbuf_phys_off = rxq->mbuf_phys_off;
363e2d7fc9fSJerin Jacob struct rbdr_entry_t *desc = rbdr->desc;
364e2d7fc9fSJerin Jacob uint32_t qlen_mask = rbdr->qlen_mask;
365e2d7fc9fSJerin Jacob uintptr_t door = rbdr->rbdr_door;
36627595cd8STyler Retzlaff alignas(RTE_CACHE_LINE_SIZE) void *obj_p[NICVF_MAX_RX_FREE_THRESH];
367e2d7fc9fSJerin Jacob
368e2d7fc9fSJerin Jacob if (unlikely(rte_mempool_get_bulk(rxq->pool, obj_p, to_fill) < 0)) {
3696d3cbd56SKamil Rytarowski rte_eth_devices[rxq->port_id].data->rx_mbuf_alloc_failed +=
3706d3cbd56SKamil Rytarowski to_fill;
371e2d7fc9fSJerin Jacob return 0;
372e2d7fc9fSJerin Jacob }
373e2d7fc9fSJerin Jacob
374e2d7fc9fSJerin Jacob NICVF_RX_ASSERT((unsigned int)to_fill <= (qlen_mask -
375e2d7fc9fSJerin Jacob (nicvf_addr_read(rbdr->rbdr_status) & NICVF_RBDR_COUNT_MASK)));
376e2d7fc9fSJerin Jacob
377*e12a0166STyler Retzlaff next_tail = rte_atomic_fetch_add_explicit(&rbdr->next_tail, to_fill,
378*e12a0166STyler Retzlaff rte_memory_order_acquire);
379e2d7fc9fSJerin Jacob ltail = next_tail;
380e2d7fc9fSJerin Jacob for (i = 0; i < to_fill; i++) {
381e2d7fc9fSJerin Jacob struct rbdr_entry_t *entry = desc + (ltail & qlen_mask);
382e2d7fc9fSJerin Jacob
383e2d7fc9fSJerin Jacob entry->full_addr = nicvf_mbuff_virt2phy((uintptr_t)obj_p[i],
384e2d7fc9fSJerin Jacob mbuf_phys_off);
385e2d7fc9fSJerin Jacob ltail++;
386e2d7fc9fSJerin Jacob }
387e2d7fc9fSJerin Jacob
388*e12a0166STyler Retzlaff rte_wait_until_equal_32((uint32_t *)(uintptr_t)&rbdr->tail, next_tail,
389*e12a0166STyler Retzlaff rte_memory_order_relaxed);
390e2d7fc9fSJerin Jacob
391*e12a0166STyler Retzlaff rte_atomic_store_explicit(&rbdr->tail, ltail, rte_memory_order_release);
392e2d7fc9fSJerin Jacob nicvf_addr_write(door, to_fill);
393e2d7fc9fSJerin Jacob return to_fill;
394e2d7fc9fSJerin Jacob }
395e2d7fc9fSJerin Jacob
396e3866e73SThomas Monjalon static inline int32_t __rte_hot
nicvf_rx_pkts_to_process(struct nicvf_rxq * rxq,uint16_t nb_pkts,int32_t available_space)397e2d7fc9fSJerin Jacob nicvf_rx_pkts_to_process(struct nicvf_rxq *rxq, uint16_t nb_pkts,
398e2d7fc9fSJerin Jacob int32_t available_space)
399e2d7fc9fSJerin Jacob {
400e2d7fc9fSJerin Jacob if (unlikely(available_space < nb_pkts))
401e2d7fc9fSJerin Jacob rxq->available_space = nicvf_addr_read(rxq->cq_status)
402e2d7fc9fSJerin Jacob & NICVF_CQ_CQE_COUNT_MASK;
403e2d7fc9fSJerin Jacob
404e2d7fc9fSJerin Jacob return RTE_MIN(nb_pkts, available_space);
405e2d7fc9fSJerin Jacob }
406e2d7fc9fSJerin Jacob
407e3866e73SThomas Monjalon static inline void __rte_hot
nicvf_rx_offload(cqe_rx_word0_t cqe_rx_w0,cqe_rx_word2_t cqe_rx_w2,struct rte_mbuf * pkt)408e2d7fc9fSJerin Jacob nicvf_rx_offload(cqe_rx_word0_t cqe_rx_w0, cqe_rx_word2_t cqe_rx_w2,
409e2d7fc9fSJerin Jacob struct rte_mbuf *pkt)
410e2d7fc9fSJerin Jacob {
411e2d7fc9fSJerin Jacob if (likely(cqe_rx_w0.rss_alg)) {
412e2d7fc9fSJerin Jacob pkt->hash.rss = cqe_rx_w2.rss_tag;
413daa02b5cSOlivier Matz pkt->ol_flags |= RTE_MBUF_F_RX_RSS_HASH;
4145e64c812SPavan Nikhilesh
415e2d7fc9fSJerin Jacob }
416e2d7fc9fSJerin Jacob }
417e2d7fc9fSJerin Jacob
4185e64c812SPavan Nikhilesh static __rte_always_inline uint16_t
nicvf_recv_pkts(void * rx_queue,struct rte_mbuf ** rx_pkts,uint16_t nb_pkts,const uint32_t flag)4195e64c812SPavan Nikhilesh nicvf_recv_pkts(void *rx_queue, struct rte_mbuf **rx_pkts, uint16_t nb_pkts,
4205e64c812SPavan Nikhilesh const uint32_t flag)
421e2d7fc9fSJerin Jacob {
422e2d7fc9fSJerin Jacob uint32_t i, to_process;
423e2d7fc9fSJerin Jacob struct cqe_rx_t *cqe_rx;
424e2d7fc9fSJerin Jacob struct rte_mbuf *pkt;
425e2d7fc9fSJerin Jacob cqe_rx_word0_t cqe_rx_w0;
426e2d7fc9fSJerin Jacob cqe_rx_word1_t cqe_rx_w1;
427e2d7fc9fSJerin Jacob cqe_rx_word2_t cqe_rx_w2;
428e2d7fc9fSJerin Jacob cqe_rx_word3_t cqe_rx_w3;
429e2d7fc9fSJerin Jacob struct nicvf_rxq *rxq = rx_queue;
430e2d7fc9fSJerin Jacob union cq_entry_t *desc = rxq->desc;
431e2d7fc9fSJerin Jacob const uint64_t cqe_mask = rxq->qlen_mask;
432e2d7fc9fSJerin Jacob uint64_t rb0_ptr, mbuf_phys_off = rxq->mbuf_phys_off;
4335c7ccb26SJerin Jacob const uint64_t mbuf_init = rxq->mbuf_initializer.value;
434e2d7fc9fSJerin Jacob uint32_t cqe_head = rxq->head & cqe_mask;
435e2d7fc9fSJerin Jacob int32_t available_space = rxq->available_space;
436e2d7fc9fSJerin Jacob const uint8_t rbptr_offset = rxq->rbptr_offset;
437e2d7fc9fSJerin Jacob
438e2d7fc9fSJerin Jacob to_process = nicvf_rx_pkts_to_process(rxq, nb_pkts, available_space);
439e2d7fc9fSJerin Jacob
440e2d7fc9fSJerin Jacob for (i = 0; i < to_process; i++) {
441e2d7fc9fSJerin Jacob rte_prefetch_non_temporal(&desc[cqe_head + 2]);
442e2d7fc9fSJerin Jacob cqe_rx = (struct cqe_rx_t *)&desc[cqe_head];
443e2d7fc9fSJerin Jacob NICVF_RX_ASSERT(((struct cq_entry_type_t *)cqe_rx)->cqe_type
444e2d7fc9fSJerin Jacob == CQE_TYPE_RX);
445e2d7fc9fSJerin Jacob
446e2d7fc9fSJerin Jacob NICVF_LOAD_PAIR(cqe_rx_w0.u64, cqe_rx_w1.u64, cqe_rx);
447e2d7fc9fSJerin Jacob NICVF_LOAD_PAIR(cqe_rx_w2.u64, cqe_rx_w3.u64, &cqe_rx->word2);
448e2d7fc9fSJerin Jacob rb0_ptr = *((uint64_t *)cqe_rx + rbptr_offset);
449e2d7fc9fSJerin Jacob pkt = (struct rte_mbuf *)nicvf_mbuff_phy2virt
450e2d7fc9fSJerin Jacob (rb0_ptr - cqe_rx_w1.align_pad, mbuf_phys_off);
4515e64c812SPavan Nikhilesh
4525e64c812SPavan Nikhilesh if (flag & NICVF_RX_OFFLOAD_NONE)
453e2d7fc9fSJerin Jacob pkt->ol_flags = 0;
4545e64c812SPavan Nikhilesh if (flag & NICVF_RX_OFFLOAD_CKSUM)
4555e64c812SPavan Nikhilesh pkt->ol_flags = nicvf_set_olflags(cqe_rx_w0);
456d3bf2564SRakesh Kudurumalla if (flag & NICVF_RX_OFFLOAD_VLAN_STRIP) {
457d3bf2564SRakesh Kudurumalla if (unlikely(cqe_rx_w0.vlan_stripped)) {
458daa02b5cSOlivier Matz pkt->ol_flags |= RTE_MBUF_F_RX_VLAN
459daa02b5cSOlivier Matz | RTE_MBUF_F_RX_VLAN_STRIPPED;
460d3bf2564SRakesh Kudurumalla pkt->vlan_tci =
461d3bf2564SRakesh Kudurumalla rte_cpu_to_be_16(cqe_rx_w2.vlan_tci);
462d3bf2564SRakesh Kudurumalla }
463d3bf2564SRakesh Kudurumalla }
464e2d7fc9fSJerin Jacob pkt->data_len = cqe_rx_w3.rb0_sz;
465e2d7fc9fSJerin Jacob pkt->pkt_len = cqe_rx_w3.rb0_sz;
466e2d7fc9fSJerin Jacob pkt->packet_type = nicvf_rx_classify_pkt(cqe_rx_w0);
4675c7ccb26SJerin Jacob nicvf_mbuff_init_update(pkt, mbuf_init, cqe_rx_w1.align_pad);
468e2d7fc9fSJerin Jacob nicvf_rx_offload(cqe_rx_w0, cqe_rx_w2, pkt);
469e2d7fc9fSJerin Jacob rx_pkts[i] = pkt;
470e2d7fc9fSJerin Jacob cqe_head = (cqe_head + 1) & cqe_mask;
471e2d7fc9fSJerin Jacob nicvf_prefetch_store_keep(pkt);
472e2d7fc9fSJerin Jacob }
473e2d7fc9fSJerin Jacob
474e2d7fc9fSJerin Jacob if (likely(to_process)) {
475e2d7fc9fSJerin Jacob rxq->available_space -= to_process;
476e2d7fc9fSJerin Jacob rxq->head = cqe_head;
477e2d7fc9fSJerin Jacob nicvf_addr_write(rxq->cq_door, to_process);
478e2d7fc9fSJerin Jacob rxq->recv_buffers += to_process;
479e2d7fc9fSJerin Jacob }
480b31eb105SJerin Jacob if (rxq->recv_buffers > rxq->rx_free_thresh) {
481b31eb105SJerin Jacob rxq->recv_buffers -= nicvf_fill_rbdr(rxq, rxq->rx_free_thresh);
482b31eb105SJerin Jacob NICVF_RX_ASSERT(rxq->recv_buffers >= 0);
483e2d7fc9fSJerin Jacob }
484e2d7fc9fSJerin Jacob
485e2d7fc9fSJerin Jacob return to_process;
486e2d7fc9fSJerin Jacob }
487e2d7fc9fSJerin Jacob
488e3866e73SThomas Monjalon uint16_t __rte_hot
nicvf_recv_pkts_no_offload(void * rx_queue,struct rte_mbuf ** rx_pkts,uint16_t nb_pkts)4895e64c812SPavan Nikhilesh nicvf_recv_pkts_no_offload(void *rx_queue, struct rte_mbuf **rx_pkts,
4905e64c812SPavan Nikhilesh uint16_t nb_pkts)
4915e64c812SPavan Nikhilesh {
4925e64c812SPavan Nikhilesh return nicvf_recv_pkts(rx_queue, rx_pkts, nb_pkts,
4935e64c812SPavan Nikhilesh NICVF_RX_OFFLOAD_NONE);
4945e64c812SPavan Nikhilesh }
4955e64c812SPavan Nikhilesh
496e3866e73SThomas Monjalon uint16_t __rte_hot
nicvf_recv_pkts_cksum(void * rx_queue,struct rte_mbuf ** rx_pkts,uint16_t nb_pkts)4975e64c812SPavan Nikhilesh nicvf_recv_pkts_cksum(void *rx_queue, struct rte_mbuf **rx_pkts,
4985e64c812SPavan Nikhilesh uint16_t nb_pkts)
4995e64c812SPavan Nikhilesh {
5005e64c812SPavan Nikhilesh return nicvf_recv_pkts(rx_queue, rx_pkts, nb_pkts,
5015e64c812SPavan Nikhilesh NICVF_RX_OFFLOAD_CKSUM);
5025e64c812SPavan Nikhilesh }
5035e64c812SPavan Nikhilesh
504e3866e73SThomas Monjalon uint16_t __rte_hot
nicvf_recv_pkts_vlan_strip(void * rx_queue,struct rte_mbuf ** rx_pkts,uint16_t nb_pkts)505d3bf2564SRakesh Kudurumalla nicvf_recv_pkts_vlan_strip(void *rx_queue, struct rte_mbuf **rx_pkts,
506d3bf2564SRakesh Kudurumalla uint16_t nb_pkts)
507d3bf2564SRakesh Kudurumalla {
508d3bf2564SRakesh Kudurumalla return nicvf_recv_pkts(rx_queue, rx_pkts, nb_pkts,
509d3bf2564SRakesh Kudurumalla NICVF_RX_OFFLOAD_NONE | NICVF_RX_OFFLOAD_VLAN_STRIP);
510d3bf2564SRakesh Kudurumalla }
511d3bf2564SRakesh Kudurumalla
512e3866e73SThomas Monjalon uint16_t __rte_hot
nicvf_recv_pkts_cksum_vlan_strip(void * rx_queue,struct rte_mbuf ** rx_pkts,uint16_t nb_pkts)513d3bf2564SRakesh Kudurumalla nicvf_recv_pkts_cksum_vlan_strip(void *rx_queue, struct rte_mbuf **rx_pkts,
514d3bf2564SRakesh Kudurumalla uint16_t nb_pkts)
515d3bf2564SRakesh Kudurumalla {
516d3bf2564SRakesh Kudurumalla return nicvf_recv_pkts(rx_queue, rx_pkts, nb_pkts,
517d3bf2564SRakesh Kudurumalla NICVF_RX_OFFLOAD_CKSUM | NICVF_RX_OFFLOAD_VLAN_STRIP);
518d3bf2564SRakesh Kudurumalla }
519d3bf2564SRakesh Kudurumalla
520e3866e73SThomas Monjalon static __rte_always_inline uint16_t __rte_hot
nicvf_process_cq_mseg_entry(struct cqe_rx_t * cqe_rx,uint64_t mbuf_phys_off,struct rte_mbuf ** rx_pkt,uint8_t rbptr_offset,uint64_t mbuf_init,const uint32_t flag)521e2d7fc9fSJerin Jacob nicvf_process_cq_mseg_entry(struct cqe_rx_t *cqe_rx,
5225c7ccb26SJerin Jacob uint64_t mbuf_phys_off,
5235c7ccb26SJerin Jacob struct rte_mbuf **rx_pkt, uint8_t rbptr_offset,
5245e64c812SPavan Nikhilesh uint64_t mbuf_init, const uint32_t flag)
525e2d7fc9fSJerin Jacob {
526e2d7fc9fSJerin Jacob struct rte_mbuf *pkt, *seg, *prev;
527e2d7fc9fSJerin Jacob cqe_rx_word0_t cqe_rx_w0;
528e2d7fc9fSJerin Jacob cqe_rx_word1_t cqe_rx_w1;
529e2d7fc9fSJerin Jacob cqe_rx_word2_t cqe_rx_w2;
530e2d7fc9fSJerin Jacob uint16_t *rb_sz, nb_segs, seg_idx;
531e2d7fc9fSJerin Jacob uint64_t *rb_ptr;
532e2d7fc9fSJerin Jacob
533e2d7fc9fSJerin Jacob NICVF_LOAD_PAIR(cqe_rx_w0.u64, cqe_rx_w1.u64, cqe_rx);
534e2d7fc9fSJerin Jacob NICVF_RX_ASSERT(cqe_rx_w0.cqe_type == CQE_TYPE_RX);
535e2d7fc9fSJerin Jacob cqe_rx_w2 = cqe_rx->word2;
536e2d7fc9fSJerin Jacob rb_sz = &cqe_rx->word3.rb0_sz;
537e2d7fc9fSJerin Jacob rb_ptr = (uint64_t *)cqe_rx + rbptr_offset;
538e2d7fc9fSJerin Jacob nb_segs = cqe_rx_w0.rb_cnt;
539e2d7fc9fSJerin Jacob pkt = (struct rte_mbuf *)nicvf_mbuff_phy2virt
540e2d7fc9fSJerin Jacob (rb_ptr[0] - cqe_rx_w1.align_pad, mbuf_phys_off);
541e2d7fc9fSJerin Jacob
542e2d7fc9fSJerin Jacob pkt->pkt_len = cqe_rx_w1.pkt_len;
543e2d7fc9fSJerin Jacob pkt->data_len = rb_sz[nicvf_frag_num(0)];
5445c7ccb26SJerin Jacob nicvf_mbuff_init_mseg_update(
5455c7ccb26SJerin Jacob pkt, mbuf_init, cqe_rx_w1.align_pad, nb_segs);
546e2d7fc9fSJerin Jacob pkt->packet_type = nicvf_rx_classify_pkt(cqe_rx_w0);
5475e64c812SPavan Nikhilesh if (flag & NICVF_RX_OFFLOAD_NONE)
5485e64c812SPavan Nikhilesh pkt->ol_flags = 0;
5495e64c812SPavan Nikhilesh if (flag & NICVF_RX_OFFLOAD_CKSUM)
5505e64c812SPavan Nikhilesh pkt->ol_flags = nicvf_set_olflags(cqe_rx_w0);
551d3bf2564SRakesh Kudurumalla if (flag & NICVF_RX_OFFLOAD_VLAN_STRIP) {
552d3bf2564SRakesh Kudurumalla if (unlikely(cqe_rx_w0.vlan_stripped)) {
553daa02b5cSOlivier Matz pkt->ol_flags |= RTE_MBUF_F_RX_VLAN
554daa02b5cSOlivier Matz | RTE_MBUF_F_RX_VLAN_STRIPPED;
555d3bf2564SRakesh Kudurumalla pkt->vlan_tci = rte_cpu_to_be_16(cqe_rx_w2.vlan_tci);
556d3bf2564SRakesh Kudurumalla }
557d3bf2564SRakesh Kudurumalla }
558e2d7fc9fSJerin Jacob nicvf_rx_offload(cqe_rx_w0, cqe_rx_w2, pkt);
559e2d7fc9fSJerin Jacob
560e2d7fc9fSJerin Jacob *rx_pkt = pkt;
561e2d7fc9fSJerin Jacob prev = pkt;
562e2d7fc9fSJerin Jacob for (seg_idx = 1; seg_idx < nb_segs; seg_idx++) {
563e2d7fc9fSJerin Jacob seg = (struct rte_mbuf *)nicvf_mbuff_phy2virt
564e2d7fc9fSJerin Jacob (rb_ptr[seg_idx], mbuf_phys_off);
565e2d7fc9fSJerin Jacob
566e2d7fc9fSJerin Jacob prev->next = seg;
567e2d7fc9fSJerin Jacob seg->data_len = rb_sz[nicvf_frag_num(seg_idx)];
5685c7ccb26SJerin Jacob nicvf_mbuff_init_update(seg, mbuf_init, 0);
569e2d7fc9fSJerin Jacob
570e2d7fc9fSJerin Jacob prev = seg;
571e2d7fc9fSJerin Jacob }
572e2d7fc9fSJerin Jacob prev->next = NULL;
573e2d7fc9fSJerin Jacob return nb_segs;
574e2d7fc9fSJerin Jacob }
575e2d7fc9fSJerin Jacob
576e3866e73SThomas Monjalon static __rte_always_inline uint16_t __rte_hot
nicvf_recv_pkts_multiseg(void * rx_queue,struct rte_mbuf ** rx_pkts,uint16_t nb_pkts,const uint32_t flag)577e2d7fc9fSJerin Jacob nicvf_recv_pkts_multiseg(void *rx_queue, struct rte_mbuf **rx_pkts,
5785e64c812SPavan Nikhilesh uint16_t nb_pkts, const uint32_t flag)
579e2d7fc9fSJerin Jacob {
580e2d7fc9fSJerin Jacob union cq_entry_t *cq_entry;
581e2d7fc9fSJerin Jacob struct cqe_rx_t *cqe_rx;
582e2d7fc9fSJerin Jacob struct nicvf_rxq *rxq = rx_queue;
583e2d7fc9fSJerin Jacob union cq_entry_t *desc = rxq->desc;
584e2d7fc9fSJerin Jacob const uint64_t cqe_mask = rxq->qlen_mask;
585e2d7fc9fSJerin Jacob uint64_t mbuf_phys_off = rxq->mbuf_phys_off;
586e2d7fc9fSJerin Jacob uint32_t i, to_process, cqe_head, buffers_consumed = 0;
587e2d7fc9fSJerin Jacob int32_t available_space = rxq->available_space;
588e2d7fc9fSJerin Jacob uint16_t nb_segs;
5895c7ccb26SJerin Jacob const uint64_t mbuf_init = rxq->mbuf_initializer.value;
590e2d7fc9fSJerin Jacob const uint8_t rbptr_offset = rxq->rbptr_offset;
591e2d7fc9fSJerin Jacob
592e2d7fc9fSJerin Jacob cqe_head = rxq->head & cqe_mask;
593e2d7fc9fSJerin Jacob to_process = nicvf_rx_pkts_to_process(rxq, nb_pkts, available_space);
594e2d7fc9fSJerin Jacob
595e2d7fc9fSJerin Jacob for (i = 0; i < to_process; i++) {
596e2d7fc9fSJerin Jacob rte_prefetch_non_temporal(&desc[cqe_head + 2]);
597e2d7fc9fSJerin Jacob cq_entry = &desc[cqe_head];
598e2d7fc9fSJerin Jacob cqe_rx = (struct cqe_rx_t *)cq_entry;
599e2d7fc9fSJerin Jacob nb_segs = nicvf_process_cq_mseg_entry(cqe_rx, mbuf_phys_off,
6005e64c812SPavan Nikhilesh rx_pkts + i, rbptr_offset, mbuf_init, flag);
601e2d7fc9fSJerin Jacob buffers_consumed += nb_segs;
602e2d7fc9fSJerin Jacob cqe_head = (cqe_head + 1) & cqe_mask;
603e2d7fc9fSJerin Jacob nicvf_prefetch_store_keep(rx_pkts[i]);
604e2d7fc9fSJerin Jacob }
605e2d7fc9fSJerin Jacob
606e2d7fc9fSJerin Jacob if (likely(to_process)) {
607e2d7fc9fSJerin Jacob rxq->available_space -= to_process;
608e2d7fc9fSJerin Jacob rxq->head = cqe_head;
609e2d7fc9fSJerin Jacob nicvf_addr_write(rxq->cq_door, to_process);
610e2d7fc9fSJerin Jacob rxq->recv_buffers += buffers_consumed;
611e2d7fc9fSJerin Jacob }
612b31eb105SJerin Jacob if (rxq->recv_buffers > rxq->rx_free_thresh) {
613b31eb105SJerin Jacob rxq->recv_buffers -= nicvf_fill_rbdr(rxq, rxq->rx_free_thresh);
614b31eb105SJerin Jacob NICVF_RX_ASSERT(rxq->recv_buffers >= 0);
615e2d7fc9fSJerin Jacob }
616e2d7fc9fSJerin Jacob
617e2d7fc9fSJerin Jacob return to_process;
618e2d7fc9fSJerin Jacob }
619da14e00cSJerin Jacob
620e3866e73SThomas Monjalon uint16_t __rte_hot
nicvf_recv_pkts_multiseg_no_offload(void * rx_queue,struct rte_mbuf ** rx_pkts,uint16_t nb_pkts)6215e64c812SPavan Nikhilesh nicvf_recv_pkts_multiseg_no_offload(void *rx_queue, struct rte_mbuf **rx_pkts,
6225e64c812SPavan Nikhilesh uint16_t nb_pkts)
6235e64c812SPavan Nikhilesh {
6245e64c812SPavan Nikhilesh return nicvf_recv_pkts_multiseg(rx_queue, rx_pkts, nb_pkts,
6255e64c812SPavan Nikhilesh NICVF_RX_OFFLOAD_NONE);
6265e64c812SPavan Nikhilesh }
6275e64c812SPavan Nikhilesh
628e3866e73SThomas Monjalon uint16_t __rte_hot
nicvf_recv_pkts_multiseg_cksum(void * rx_queue,struct rte_mbuf ** rx_pkts,uint16_t nb_pkts)6295e64c812SPavan Nikhilesh nicvf_recv_pkts_multiseg_cksum(void *rx_queue, struct rte_mbuf **rx_pkts,
6305e64c812SPavan Nikhilesh uint16_t nb_pkts)
6315e64c812SPavan Nikhilesh {
6325e64c812SPavan Nikhilesh return nicvf_recv_pkts_multiseg(rx_queue, rx_pkts, nb_pkts,
6335e64c812SPavan Nikhilesh NICVF_RX_OFFLOAD_CKSUM);
6345e64c812SPavan Nikhilesh }
6355e64c812SPavan Nikhilesh
636e3866e73SThomas Monjalon uint16_t __rte_hot
nicvf_recv_pkts_multiseg_vlan_strip(void * rx_queue,struct rte_mbuf ** rx_pkts,uint16_t nb_pkts)637d3bf2564SRakesh Kudurumalla nicvf_recv_pkts_multiseg_vlan_strip(void *rx_queue, struct rte_mbuf **rx_pkts,
638d3bf2564SRakesh Kudurumalla uint16_t nb_pkts)
639d3bf2564SRakesh Kudurumalla {
640d3bf2564SRakesh Kudurumalla return nicvf_recv_pkts_multiseg(rx_queue, rx_pkts, nb_pkts,
641d3bf2564SRakesh Kudurumalla NICVF_RX_OFFLOAD_NONE | NICVF_RX_OFFLOAD_VLAN_STRIP);
642d3bf2564SRakesh Kudurumalla }
643d3bf2564SRakesh Kudurumalla
644e3866e73SThomas Monjalon uint16_t __rte_hot
nicvf_recv_pkts_multiseg_cksum_vlan_strip(void * rx_queue,struct rte_mbuf ** rx_pkts,uint16_t nb_pkts)645d3bf2564SRakesh Kudurumalla nicvf_recv_pkts_multiseg_cksum_vlan_strip(void *rx_queue,
646d3bf2564SRakesh Kudurumalla struct rte_mbuf **rx_pkts, uint16_t nb_pkts)
647d3bf2564SRakesh Kudurumalla {
648d3bf2564SRakesh Kudurumalla return nicvf_recv_pkts_multiseg(rx_queue, rx_pkts, nb_pkts,
649d3bf2564SRakesh Kudurumalla NICVF_RX_OFFLOAD_CKSUM | NICVF_RX_OFFLOAD_VLAN_STRIP);
650d3bf2564SRakesh Kudurumalla }
651d3bf2564SRakesh Kudurumalla
652da14e00cSJerin Jacob uint32_t
nicvf_dev_rx_queue_count(void * rx_queue)6538d7d4fcdSKonstantin Ananyev nicvf_dev_rx_queue_count(void *rx_queue)
654da14e00cSJerin Jacob {
655da14e00cSJerin Jacob struct nicvf_rxq *rxq;
656da14e00cSJerin Jacob
6578d7d4fcdSKonstantin Ananyev rxq = rx_queue;
658da14e00cSJerin Jacob return nicvf_addr_read(rxq->cq_status) & NICVF_CQ_CQE_COUNT_MASK;
659da14e00cSJerin Jacob }
66086b4eb42SJerin Jacob
66186b4eb42SJerin Jacob uint32_t
nicvf_dev_rbdr_refill(struct rte_eth_dev * dev,uint16_t queue_idx)66286b4eb42SJerin Jacob nicvf_dev_rbdr_refill(struct rte_eth_dev *dev, uint16_t queue_idx)
66386b4eb42SJerin Jacob {
66486b4eb42SJerin Jacob struct nicvf_rxq *rxq;
66586b4eb42SJerin Jacob uint32_t to_process;
66686b4eb42SJerin Jacob uint32_t rx_free;
66786b4eb42SJerin Jacob
66886b4eb42SJerin Jacob rxq = dev->data->rx_queues[queue_idx];
66986b4eb42SJerin Jacob to_process = rxq->recv_buffers;
67086b4eb42SJerin Jacob while (rxq->recv_buffers > 0) {
67186b4eb42SJerin Jacob rx_free = RTE_MIN(rxq->recv_buffers, NICVF_MAX_RX_FREE_THRESH);
67286b4eb42SJerin Jacob rxq->recv_buffers -= nicvf_fill_rbdr(rxq, rx_free);
67386b4eb42SJerin Jacob }
67486b4eb42SJerin Jacob
67586b4eb42SJerin Jacob assert(rxq->recv_buffers == 0);
67686b4eb42SJerin Jacob return to_process;
67786b4eb42SJerin Jacob }
678