18fd92a66SOlivier Matz /* SPDX-License-Identifier: BSD-3-Clause 25bfc9fc1SYongseok Koh * Copyright 2017 6WIND S.A. 35feecc57SShahaf Shuler * Copyright 2017 Mellanox Technologies, Ltd 45bfc9fc1SYongseok Koh */ 55bfc9fc1SYongseok Koh 65bfc9fc1SYongseok Koh #ifndef RTE_PMD_MLX5_RXTX_VEC_H_ 75bfc9fc1SYongseok Koh #define RTE_PMD_MLX5_RXTX_VEC_H_ 85bfc9fc1SYongseok Koh 95bfc9fc1SYongseok Koh #include <rte_common.h> 105bfc9fc1SYongseok Koh #include <rte_mbuf.h> 115bfc9fc1SYongseok Koh 127b4f1e6bSMatan Azrad #include <mlx5_prm.h> 137b4f1e6bSMatan Azrad 145bfc9fc1SYongseok Koh #include "mlx5_autoconf.h" 15b8dc6b0eSVu Pham 16dbccb4cdSShahaf Shuler /* HW checksum offload capabilities of vectorized Tx. */ 17dbccb4cdSShahaf Shuler #define MLX5_VEC_TX_CKSUM_OFFLOAD_CAP \ 18*295968d1SFerruh Yigit (RTE_ETH_TX_OFFLOAD_IPV4_CKSUM | \ 19*295968d1SFerruh Yigit RTE_ETH_TX_OFFLOAD_UDP_CKSUM | \ 20*295968d1SFerruh Yigit RTE_ETH_TX_OFFLOAD_TCP_CKSUM | \ 21*295968d1SFerruh Yigit RTE_ETH_TX_OFFLOAD_OUTER_IPV4_CKSUM) 22dbccb4cdSShahaf Shuler 235bfc9fc1SYongseok Koh /* 245bfc9fc1SYongseok Koh * Compile time sanity check for vectorized functions. 255bfc9fc1SYongseok Koh */ 265bfc9fc1SYongseok Koh 275bfc9fc1SYongseok Koh #define S_ASSERT_RTE_MBUF(s) \ 285bfc9fc1SYongseok Koh static_assert(s, "A field of struct rte_mbuf is changed") 295bfc9fc1SYongseok Koh #define S_ASSERT_MLX5_CQE(s) \ 305bfc9fc1SYongseok Koh static_assert(s, "A field of struct mlx5_cqe is changed") 315bfc9fc1SYongseok Koh 325bfc9fc1SYongseok Koh /* rxq_cq_decompress_v() */ 335bfc9fc1SYongseok Koh S_ASSERT_RTE_MBUF(offsetof(struct rte_mbuf, pkt_len) == 345bfc9fc1SYongseok Koh offsetof(struct rte_mbuf, rx_descriptor_fields1) + 4); 355bfc9fc1SYongseok Koh S_ASSERT_RTE_MBUF(offsetof(struct rte_mbuf, data_len) == 365bfc9fc1SYongseok Koh offsetof(struct rte_mbuf, rx_descriptor_fields1) + 8); 375bfc9fc1SYongseok Koh S_ASSERT_RTE_MBUF(offsetof(struct rte_mbuf, hash) == 385bfc9fc1SYongseok Koh offsetof(struct rte_mbuf, rx_descriptor_fields1) + 12); 395bfc9fc1SYongseok Koh 405bfc9fc1SYongseok Koh /* rxq_cq_to_ptype_oflags_v() */ 415bfc9fc1SYongseok Koh S_ASSERT_RTE_MBUF(offsetof(struct rte_mbuf, ol_flags) == 425bfc9fc1SYongseok Koh offsetof(struct rte_mbuf, rearm_data) + 8); 435bfc9fc1SYongseok Koh S_ASSERT_RTE_MBUF(offsetof(struct rte_mbuf, rearm_data) == 445bfc9fc1SYongseok Koh RTE_ALIGN(offsetof(struct rte_mbuf, rearm_data), 16)); 455bfc9fc1SYongseok Koh 465bfc9fc1SYongseok Koh /* rxq_burst_v() */ 475bfc9fc1SYongseok Koh S_ASSERT_RTE_MBUF(offsetof(struct rte_mbuf, pkt_len) == 485bfc9fc1SYongseok Koh offsetof(struct rte_mbuf, rx_descriptor_fields1) + 4); 495bfc9fc1SYongseok Koh S_ASSERT_RTE_MBUF(offsetof(struct rte_mbuf, data_len) == 505bfc9fc1SYongseok Koh offsetof(struct rte_mbuf, rx_descriptor_fields1) + 8); 51570acdb1SYongseok Koh #if (RTE_CACHE_LINE_SIZE == 128) 52570acdb1SYongseok Koh S_ASSERT_MLX5_CQE(offsetof(struct mlx5_cqe, pkt_info) == 64); 53570acdb1SYongseok Koh #else 545bfc9fc1SYongseok Koh S_ASSERT_MLX5_CQE(offsetof(struct mlx5_cqe, pkt_info) == 0); 55570acdb1SYongseok Koh #endif 565bfc9fc1SYongseok Koh S_ASSERT_MLX5_CQE(offsetof(struct mlx5_cqe, rx_hash_res) == 575bfc9fc1SYongseok Koh offsetof(struct mlx5_cqe, pkt_info) + 12); 588b8f7994SMatan Azrad S_ASSERT_MLX5_CQE(offsetof(struct mlx5_cqe, rsvd1) + 11 == 595bfc9fc1SYongseok Koh offsetof(struct mlx5_cqe, hdr_type_etc)); 605bfc9fc1SYongseok Koh S_ASSERT_MLX5_CQE(offsetof(struct mlx5_cqe, vlan_info) == 615bfc9fc1SYongseok Koh offsetof(struct mlx5_cqe, hdr_type_etc) + 2); 628b8f7994SMatan Azrad S_ASSERT_MLX5_CQE(offsetof(struct mlx5_cqe, lro_num_seg) + 12 == 635bfc9fc1SYongseok Koh offsetof(struct mlx5_cqe, byte_cnt)); 645bfc9fc1SYongseok Koh S_ASSERT_MLX5_CQE(offsetof(struct mlx5_cqe, sop_drop_qpn) == 655bfc9fc1SYongseok Koh RTE_ALIGN(offsetof(struct mlx5_cqe, sop_drop_qpn), 8)); 665bfc9fc1SYongseok Koh S_ASSERT_MLX5_CQE(offsetof(struct mlx5_cqe, op_own) == 675bfc9fc1SYongseok Koh offsetof(struct mlx5_cqe, sop_drop_qpn) + 7); 685bfc9fc1SYongseok Koh 695bfc9fc1SYongseok Koh #endif /* RTE_PMD_MLX5_RXTX_VEC_H_ */ 70