176668754SAndrew Boyer /* SPDX-License-Identifier: BSD-3-Clause 2a5205992SAndrew Boyer * Copyright 2018-2022 Advanced Micro Devices, Inc. 3a27d9013SAlfredo Cardigliano */ 4a27d9013SAlfredo Cardigliano 5a27d9013SAlfredo Cardigliano #ifndef _IONIC_RXTX_H_ 6a27d9013SAlfredo Cardigliano #define _IONIC_RXTX_H_ 7a27d9013SAlfredo Cardigliano 8e86a6fccSAndrew Boyer #include <stdint.h> 9e86a6fccSAndrew Boyer 10e86a6fccSAndrew Boyer #include "ionic_if.h" 11e86a6fccSAndrew Boyer 12e86a6fccSAndrew Boyer struct ionic_rx_qcq; 13e86a6fccSAndrew Boyer struct ionic_tx_qcq; 14e86a6fccSAndrew Boyer struct rte_eth_dev; 15e86a6fccSAndrew Boyer struct rte_eth_rxconf; 16e86a6fccSAndrew Boyer struct rte_eth_rxq_info; 17e86a6fccSAndrew Boyer struct rte_eth_txconf; 18e86a6fccSAndrew Boyer struct rte_eth_txq_info; 19e86a6fccSAndrew Boyer struct rte_mbuf; 20e86a6fccSAndrew Boyer struct rte_mempool; 21a27d9013SAlfredo Cardigliano 22a27d9013SAlfredo Cardigliano struct ionic_rx_service { 23a27d9013SAlfredo Cardigliano /* cb in */ 24a27d9013SAlfredo Cardigliano struct rte_mbuf **rx_pkts; 25a27d9013SAlfredo Cardigliano /* cb out */ 26a27d9013SAlfredo Cardigliano uint16_t nb_rx; 27a27d9013SAlfredo Cardigliano }; 28a27d9013SAlfredo Cardigliano 29e86a6fccSAndrew Boyer #define IONIC_CSUM_FLAG_MASK (IONIC_RXQ_COMP_CSUM_F_VLAN - 1) 30a27d9013SAlfredo Cardigliano 31e86a6fccSAndrew Boyer extern const uint64_t ionic_csum_flags[IONIC_CSUM_FLAG_MASK]; 32e86a6fccSAndrew Boyer extern const uint32_t ionic_ptype_table[IONIC_RXQ_COMP_PKT_TYPE_MASK]; 33e86a6fccSAndrew Boyer 34e86a6fccSAndrew Boyer /* ionic_rxtx.c */ 35a27d9013SAlfredo Cardigliano int ionic_dev_rx_queue_setup(struct rte_eth_dev *dev, uint16_t rx_queue_id, 36a27d9013SAlfredo Cardigliano uint16_t nb_desc, uint32_t socket_id, 37a27d9013SAlfredo Cardigliano const struct rte_eth_rxconf *rx_conf, struct rte_mempool *mp); 387483341aSXueming Li void ionic_dev_rx_queue_release(struct rte_eth_dev *dev, uint16_t qid); 39a27d9013SAlfredo Cardigliano int ionic_dev_rx_queue_start(struct rte_eth_dev *dev, uint16_t rx_queue_id); 40*7bb08900SAndrew Boyer int ionic_dev_rx_queue_stop(struct rte_eth_dev *dev, uint16_t rx_queue_id); 41a27d9013SAlfredo Cardigliano 42a27d9013SAlfredo Cardigliano int ionic_dev_tx_queue_setup(struct rte_eth_dev *dev, uint16_t tx_queue_id, 43a27d9013SAlfredo Cardigliano uint16_t nb_desc, uint32_t socket_id, 44a27d9013SAlfredo Cardigliano const struct rte_eth_txconf *tx_conf); 457483341aSXueming Li void ionic_dev_tx_queue_release(struct rte_eth_dev *dev, uint16_t qid); 46a27d9013SAlfredo Cardigliano int ionic_dev_tx_queue_start(struct rte_eth_dev *dev, uint16_t tx_queue_id); 47*7bb08900SAndrew Boyer int ionic_dev_tx_queue_stop(struct rte_eth_dev *dev, uint16_t tx_queue_id); 48*7bb08900SAndrew Boyer 49*7bb08900SAndrew Boyer /* Helpers for optimized dev_stop() */ 50*7bb08900SAndrew Boyer void ionic_dev_rx_queue_stop_firsthalf(struct rte_eth_dev *dev, 51*7bb08900SAndrew Boyer uint16_t rx_queue_id); 52*7bb08900SAndrew Boyer void ionic_dev_rx_queue_stop_secondhalf(struct rte_eth_dev *dev, 53*7bb08900SAndrew Boyer uint16_t rx_queue_id); 54*7bb08900SAndrew Boyer void ionic_dev_tx_queue_stop_firsthalf(struct rte_eth_dev *dev, 55*7bb08900SAndrew Boyer uint16_t tx_queue_id); 56*7bb08900SAndrew Boyer void ionic_dev_tx_queue_stop_secondhalf(struct rte_eth_dev *dev, 57*7bb08900SAndrew Boyer uint16_t tx_queue_id); 58a27d9013SAlfredo Cardigliano 59a27d9013SAlfredo Cardigliano void ionic_rxq_info_get(struct rte_eth_dev *dev, uint16_t queue_id, 60a27d9013SAlfredo Cardigliano struct rte_eth_rxq_info *qinfo); 61a27d9013SAlfredo Cardigliano void ionic_txq_info_get(struct rte_eth_dev *dev, uint16_t queue_id, 62a27d9013SAlfredo Cardigliano struct rte_eth_txq_info *qinfo); 63a27d9013SAlfredo Cardigliano 640983a74aSAndrew Boyer int ionic_dev_rx_descriptor_status(void *rx_queue, uint16_t offset); 6560625147SAndrew Boyer int ionic_dev_tx_descriptor_status(void *tx_queue, uint16_t offset); 660983a74aSAndrew Boyer 67ba6a168aSSivaramakrishnan Venkat const uint32_t *ionic_dev_supported_ptypes_get(struct rte_eth_dev *dev, 68ba6a168aSSivaramakrishnan Venkat size_t *no_of_elements); 69b5b56afdSAndrew Boyer 70e86a6fccSAndrew Boyer int ionic_tx_tso(struct ionic_tx_qcq *txq, struct rte_mbuf *txm); 71e86a6fccSAndrew Boyer 72e86a6fccSAndrew Boyer uint16_t ionic_prep_pkts(void *tx_queue, struct rte_mbuf **tx_pkts, 73e86a6fccSAndrew Boyer uint16_t nb_pkts); 74e86a6fccSAndrew Boyer 75e86a6fccSAndrew Boyer /* ionic_rxtx_simple.c */ 76e86a6fccSAndrew Boyer uint16_t ionic_recv_pkts(void *rx_queue, struct rte_mbuf **rx_pkts, 77e86a6fccSAndrew Boyer uint16_t nb_pkts); 78e86a6fccSAndrew Boyer uint16_t ionic_xmit_pkts(void *tx_queue, struct rte_mbuf **tx_pkts, 79e86a6fccSAndrew Boyer uint16_t nb_pkts); 80e86a6fccSAndrew Boyer 81e86a6fccSAndrew Boyer int ionic_rx_fill(struct ionic_rx_qcq *rxq); 82e86a6fccSAndrew Boyer 83e86a6fccSAndrew Boyer /* ionic_rxtx_sg.c */ 84e86a6fccSAndrew Boyer uint16_t ionic_recv_pkts_sg(void *rx_queue, struct rte_mbuf **rx_pkts, 85e86a6fccSAndrew Boyer uint16_t nb_pkts); 86e86a6fccSAndrew Boyer uint16_t ionic_xmit_pkts_sg(void *tx_queue, struct rte_mbuf **tx_pkts, 87e86a6fccSAndrew Boyer uint16_t nb_pkts); 88e86a6fccSAndrew Boyer 89e86a6fccSAndrew Boyer int ionic_rx_fill_sg(struct ionic_rx_qcq *rxq); 90e86a6fccSAndrew Boyer 9190fa040aSNeel Patel static inline void 9290fa040aSNeel Patel ionic_rxq_flush(struct ionic_queue *q) 9390fa040aSNeel Patel { 9490fa040aSNeel Patel struct ionic_rxq_desc *desc_base = q->base; 9590fa040aSNeel Patel struct ionic_rxq_desc *cmb_desc_base = q->cmb_base; 9690fa040aSNeel Patel 9790fa040aSNeel Patel if (q->cmb_base) { 9890fa040aSNeel Patel if (q->head_idx < q->cmb_head_idx) { 9990fa040aSNeel Patel /* copy [cmb_head, num_descs) */ 10090fa040aSNeel Patel rte_memcpy((void *)&cmb_desc_base[q->cmb_head_idx], 10190fa040aSNeel Patel (void *)&desc_base[q->cmb_head_idx], 10290fa040aSNeel Patel (q->num_descs - q->cmb_head_idx) * sizeof(*desc_base)); 10390fa040aSNeel Patel /* copy [0, head) */ 10490fa040aSNeel Patel rte_memcpy((void *)&cmb_desc_base[0], 10590fa040aSNeel Patel (void *)&desc_base[0], 10690fa040aSNeel Patel q->head_idx * sizeof(*desc_base)); 10790fa040aSNeel Patel } else { 10890fa040aSNeel Patel /* copy [cmb_head, head) */ 10990fa040aSNeel Patel rte_memcpy((void *)&cmb_desc_base[q->cmb_head_idx], 11090fa040aSNeel Patel (void *)&desc_base[q->cmb_head_idx], 11190fa040aSNeel Patel (q->head_idx - q->cmb_head_idx) * sizeof(*desc_base)); 11290fa040aSNeel Patel } 11390fa040aSNeel Patel q->cmb_head_idx = q->head_idx; 11490fa040aSNeel Patel } 11590fa040aSNeel Patel 11690fa040aSNeel Patel ionic_q_flush(q); 11790fa040aSNeel Patel } 11890fa040aSNeel Patel 11990fa040aSNeel Patel static inline void 12090fa040aSNeel Patel ionic_txq_flush(struct ionic_queue *q) 12190fa040aSNeel Patel { 12290fa040aSNeel Patel struct ionic_txq_desc *desc_base = q->base; 12390fa040aSNeel Patel struct ionic_txq_desc *cmb_desc_base = q->cmb_base; 12490fa040aSNeel Patel 12590fa040aSNeel Patel if (q->cmb_base) { 12690fa040aSNeel Patel if (q->head_idx < q->cmb_head_idx) { 12790fa040aSNeel Patel /* copy [cmb_head, num_descs) */ 12890fa040aSNeel Patel rte_memcpy((void *)&cmb_desc_base[q->cmb_head_idx], 12990fa040aSNeel Patel (void *)&desc_base[q->cmb_head_idx], 13090fa040aSNeel Patel (q->num_descs - q->cmb_head_idx) * sizeof(*desc_base)); 13190fa040aSNeel Patel /* copy [0, head) */ 13290fa040aSNeel Patel rte_memcpy((void *)&cmb_desc_base[0], 13390fa040aSNeel Patel (void *)&desc_base[0], 13490fa040aSNeel Patel q->head_idx * sizeof(*desc_base)); 13590fa040aSNeel Patel } else { 13690fa040aSNeel Patel /* copy [cmb_head, head) */ 13790fa040aSNeel Patel rte_memcpy((void *)&cmb_desc_base[q->cmb_head_idx], 13890fa040aSNeel Patel (void *)&desc_base[q->cmb_head_idx], 13990fa040aSNeel Patel (q->head_idx - q->cmb_head_idx) * sizeof(*desc_base)); 14090fa040aSNeel Patel } 14190fa040aSNeel Patel q->cmb_head_idx = q->head_idx; 14290fa040aSNeel Patel } 14390fa040aSNeel Patel 14490fa040aSNeel Patel ionic_q_flush(q); 14590fa040aSNeel Patel } 14690fa040aSNeel Patel 147a27d9013SAlfredo Cardigliano #endif /* _IONIC_RXTX_H_ */ 148