176668754SAndrew Boyer /* SPDX-License-Identifier: BSD-3-Clause 2a5205992SAndrew Boyer * Copyright 2018-2022 Advanced Micro Devices, Inc. 3a27d9013SAlfredo Cardigliano */ 4a27d9013SAlfredo Cardigliano 5a27d9013SAlfredo Cardigliano #ifndef _IONIC_RXTX_H_ 6a27d9013SAlfredo Cardigliano #define _IONIC_RXTX_H_ 7a27d9013SAlfredo Cardigliano 8e86a6fccSAndrew Boyer #include <stdint.h> 9e86a6fccSAndrew Boyer 10e86a6fccSAndrew Boyer #include "ionic_if.h" 11e86a6fccSAndrew Boyer 12e86a6fccSAndrew Boyer struct ionic_rx_qcq; 13e86a6fccSAndrew Boyer struct ionic_tx_qcq; 14e86a6fccSAndrew Boyer struct rte_eth_dev; 15e86a6fccSAndrew Boyer struct rte_eth_rxconf; 16e86a6fccSAndrew Boyer struct rte_eth_rxq_info; 17e86a6fccSAndrew Boyer struct rte_eth_txconf; 18e86a6fccSAndrew Boyer struct rte_eth_txq_info; 19e86a6fccSAndrew Boyer struct rte_mbuf; 20e86a6fccSAndrew Boyer struct rte_mempool; 21a27d9013SAlfredo Cardigliano 22a27d9013SAlfredo Cardigliano struct ionic_rx_service { 23a27d9013SAlfredo Cardigliano /* cb in */ 24a27d9013SAlfredo Cardigliano struct rte_mbuf **rx_pkts; 25a27d9013SAlfredo Cardigliano /* cb out */ 26a27d9013SAlfredo Cardigliano uint16_t nb_rx; 27a27d9013SAlfredo Cardigliano }; 28a27d9013SAlfredo Cardigliano 29e86a6fccSAndrew Boyer #define IONIC_CSUM_FLAG_MASK (IONIC_RXQ_COMP_CSUM_F_VLAN - 1) 30a27d9013SAlfredo Cardigliano 31e86a6fccSAndrew Boyer extern const uint64_t ionic_csum_flags[IONIC_CSUM_FLAG_MASK]; 32e86a6fccSAndrew Boyer extern const uint32_t ionic_ptype_table[IONIC_RXQ_COMP_PKT_TYPE_MASK]; 33e86a6fccSAndrew Boyer 34e86a6fccSAndrew Boyer /* ionic_rxtx.c */ 35a27d9013SAlfredo Cardigliano int ionic_dev_rx_queue_setup(struct rte_eth_dev *dev, uint16_t rx_queue_id, 36a27d9013SAlfredo Cardigliano uint16_t nb_desc, uint32_t socket_id, 37a27d9013SAlfredo Cardigliano const struct rte_eth_rxconf *rx_conf, struct rte_mempool *mp); 387483341aSXueming Li void ionic_dev_rx_queue_release(struct rte_eth_dev *dev, uint16_t qid); 39a27d9013SAlfredo Cardigliano int ionic_dev_rx_queue_start(struct rte_eth_dev *dev, uint16_t rx_queue_id); 40a27d9013SAlfredo Cardigliano int ionic_dev_rx_queue_stop(struct rte_eth_dev *eth_dev, uint16_t rx_queue_id); 41a27d9013SAlfredo Cardigliano 42a27d9013SAlfredo Cardigliano int ionic_dev_tx_queue_setup(struct rte_eth_dev *dev, uint16_t tx_queue_id, 43a27d9013SAlfredo Cardigliano uint16_t nb_desc, uint32_t socket_id, 44a27d9013SAlfredo Cardigliano const struct rte_eth_txconf *tx_conf); 457483341aSXueming Li void ionic_dev_tx_queue_release(struct rte_eth_dev *dev, uint16_t qid); 46a27d9013SAlfredo Cardigliano int ionic_dev_tx_queue_stop(struct rte_eth_dev *eth_dev, uint16_t tx_queue_id); 47a27d9013SAlfredo Cardigliano int ionic_dev_tx_queue_start(struct rte_eth_dev *dev, uint16_t tx_queue_id); 48a27d9013SAlfredo Cardigliano 49a27d9013SAlfredo Cardigliano void ionic_rxq_info_get(struct rte_eth_dev *dev, uint16_t queue_id, 50a27d9013SAlfredo Cardigliano struct rte_eth_rxq_info *qinfo); 51a27d9013SAlfredo Cardigliano void ionic_txq_info_get(struct rte_eth_dev *dev, uint16_t queue_id, 52a27d9013SAlfredo Cardigliano struct rte_eth_txq_info *qinfo); 53a27d9013SAlfredo Cardigliano 540983a74aSAndrew Boyer int ionic_dev_rx_descriptor_status(void *rx_queue, uint16_t offset); 5560625147SAndrew Boyer int ionic_dev_tx_descriptor_status(void *tx_queue, uint16_t offset); 560983a74aSAndrew Boyer 57ba6a168aSSivaramakrishnan Venkat const uint32_t *ionic_dev_supported_ptypes_get(struct rte_eth_dev *dev, 58ba6a168aSSivaramakrishnan Venkat size_t *no_of_elements); 59b5b56afdSAndrew Boyer 60e86a6fccSAndrew Boyer int ionic_tx_tso(struct ionic_tx_qcq *txq, struct rte_mbuf *txm); 61e86a6fccSAndrew Boyer 62e86a6fccSAndrew Boyer uint16_t ionic_prep_pkts(void *tx_queue, struct rte_mbuf **tx_pkts, 63e86a6fccSAndrew Boyer uint16_t nb_pkts); 64e86a6fccSAndrew Boyer 65e86a6fccSAndrew Boyer /* ionic_rxtx_simple.c */ 66e86a6fccSAndrew Boyer uint16_t ionic_recv_pkts(void *rx_queue, struct rte_mbuf **rx_pkts, 67e86a6fccSAndrew Boyer uint16_t nb_pkts); 68e86a6fccSAndrew Boyer uint16_t ionic_xmit_pkts(void *tx_queue, struct rte_mbuf **tx_pkts, 69e86a6fccSAndrew Boyer uint16_t nb_pkts); 70e86a6fccSAndrew Boyer 71e86a6fccSAndrew Boyer int ionic_rx_fill(struct ionic_rx_qcq *rxq); 72e86a6fccSAndrew Boyer 73e86a6fccSAndrew Boyer /* ionic_rxtx_sg.c */ 74e86a6fccSAndrew Boyer uint16_t ionic_recv_pkts_sg(void *rx_queue, struct rte_mbuf **rx_pkts, 75e86a6fccSAndrew Boyer uint16_t nb_pkts); 76e86a6fccSAndrew Boyer uint16_t ionic_xmit_pkts_sg(void *tx_queue, struct rte_mbuf **tx_pkts, 77e86a6fccSAndrew Boyer uint16_t nb_pkts); 78e86a6fccSAndrew Boyer 79e86a6fccSAndrew Boyer int ionic_rx_fill_sg(struct ionic_rx_qcq *rxq); 80e86a6fccSAndrew Boyer 81*90fa040aSNeel Patel static inline void 82*90fa040aSNeel Patel ionic_rxq_flush(struct ionic_queue *q) 83*90fa040aSNeel Patel { 84*90fa040aSNeel Patel struct ionic_rxq_desc *desc_base = q->base; 85*90fa040aSNeel Patel struct ionic_rxq_desc *cmb_desc_base = q->cmb_base; 86*90fa040aSNeel Patel 87*90fa040aSNeel Patel if (q->cmb_base) { 88*90fa040aSNeel Patel if (q->head_idx < q->cmb_head_idx) { 89*90fa040aSNeel Patel /* copy [cmb_head, num_descs) */ 90*90fa040aSNeel Patel rte_memcpy((void *)&cmb_desc_base[q->cmb_head_idx], 91*90fa040aSNeel Patel (void *)&desc_base[q->cmb_head_idx], 92*90fa040aSNeel Patel (q->num_descs - q->cmb_head_idx) * sizeof(*desc_base)); 93*90fa040aSNeel Patel /* copy [0, head) */ 94*90fa040aSNeel Patel rte_memcpy((void *)&cmb_desc_base[0], 95*90fa040aSNeel Patel (void *)&desc_base[0], 96*90fa040aSNeel Patel q->head_idx * sizeof(*desc_base)); 97*90fa040aSNeel Patel } else { 98*90fa040aSNeel Patel /* copy [cmb_head, head) */ 99*90fa040aSNeel Patel rte_memcpy((void *)&cmb_desc_base[q->cmb_head_idx], 100*90fa040aSNeel Patel (void *)&desc_base[q->cmb_head_idx], 101*90fa040aSNeel Patel (q->head_idx - q->cmb_head_idx) * sizeof(*desc_base)); 102*90fa040aSNeel Patel } 103*90fa040aSNeel Patel q->cmb_head_idx = q->head_idx; 104*90fa040aSNeel Patel } 105*90fa040aSNeel Patel 106*90fa040aSNeel Patel ionic_q_flush(q); 107*90fa040aSNeel Patel } 108*90fa040aSNeel Patel 109*90fa040aSNeel Patel static inline void 110*90fa040aSNeel Patel ionic_txq_flush(struct ionic_queue *q) 111*90fa040aSNeel Patel { 112*90fa040aSNeel Patel struct ionic_txq_desc *desc_base = q->base; 113*90fa040aSNeel Patel struct ionic_txq_desc *cmb_desc_base = q->cmb_base; 114*90fa040aSNeel Patel 115*90fa040aSNeel Patel if (q->cmb_base) { 116*90fa040aSNeel Patel if (q->head_idx < q->cmb_head_idx) { 117*90fa040aSNeel Patel /* copy [cmb_head, num_descs) */ 118*90fa040aSNeel Patel rte_memcpy((void *)&cmb_desc_base[q->cmb_head_idx], 119*90fa040aSNeel Patel (void *)&desc_base[q->cmb_head_idx], 120*90fa040aSNeel Patel (q->num_descs - q->cmb_head_idx) * sizeof(*desc_base)); 121*90fa040aSNeel Patel /* copy [0, head) */ 122*90fa040aSNeel Patel rte_memcpy((void *)&cmb_desc_base[0], 123*90fa040aSNeel Patel (void *)&desc_base[0], 124*90fa040aSNeel Patel q->head_idx * sizeof(*desc_base)); 125*90fa040aSNeel Patel } else { 126*90fa040aSNeel Patel /* copy [cmb_head, head) */ 127*90fa040aSNeel Patel rte_memcpy((void *)&cmb_desc_base[q->cmb_head_idx], 128*90fa040aSNeel Patel (void *)&desc_base[q->cmb_head_idx], 129*90fa040aSNeel Patel (q->head_idx - q->cmb_head_idx) * sizeof(*desc_base)); 130*90fa040aSNeel Patel } 131*90fa040aSNeel Patel q->cmb_head_idx = q->head_idx; 132*90fa040aSNeel Patel } 133*90fa040aSNeel Patel 134*90fa040aSNeel Patel ionic_q_flush(q); 135*90fa040aSNeel Patel } 136*90fa040aSNeel Patel 137a27d9013SAlfredo Cardigliano #endif /* _IONIC_RXTX_H_ */ 138