1 /* SPDX-License-Identifier: BSD-3-Clause 2 * Copyright(c) 2017 Huawei Technologies Co., Ltd 3 */ 4 5 #ifndef _HINIC_PMD_NICIO_H_ 6 #define _HINIC_PMD_NICIO_H_ 7 8 #define RX_BUF_LEN_16K 16384 9 #define RX_BUF_LEN_1_5K 1536 10 11 /* vhd type */ 12 #define HINIC_VHD_TYPE_0B 2 13 #define HINIC_VHD_TYPE_10B 1 14 #define HINIC_VHD_TYPE_12B 0 15 16 #define HINIC_Q_CTXT_MAX 42 17 18 /* performance: ci addr RTE_CACHE_SIZE(64B) alignment */ 19 #define HINIC_CI_Q_ADDR_SIZE 64 20 21 #define CI_TABLE_SIZE(num_qps, pg_sz) \ 22 (ALIGN((num_qps) * HINIC_CI_Q_ADDR_SIZE, pg_sz)) 23 24 #define HINIC_CI_VADDR(base_addr, q_id) \ 25 ((u8 *)(base_addr) + (q_id) * HINIC_CI_Q_ADDR_SIZE) 26 27 #define HINIC_CI_PADDR(base_paddr, q_id) \ 28 ((base_paddr) + (q_id) * HINIC_CI_Q_ADDR_SIZE) 29 30 #define Q_CTXT_SIZE 48 31 #define TSO_LRO_CTXT_SIZE 240 32 33 #define SQ_CTXT_OFFSET(max_sqs, max_rqs, q_id) \ 34 (((max_rqs) + (max_sqs)) * TSO_LRO_CTXT_SIZE + \ 35 (q_id) * Q_CTXT_SIZE) 36 37 #define RQ_CTXT_OFFSET(max_sqs, max_rqs, q_id) \ 38 (((max_rqs) + (max_sqs)) * TSO_LRO_CTXT_SIZE + \ 39 (max_sqs) * Q_CTXT_SIZE + (q_id) * Q_CTXT_SIZE) 40 41 #define SQ_CTXT_SIZE(num_sqs) \ 42 ((u16)(sizeof(struct hinic_qp_ctxt_header) + \ 43 (num_sqs) * sizeof(struct hinic_sq_ctxt))) 44 45 #define RQ_CTXT_SIZE(num_rqs) \ 46 ((u16)(sizeof(struct hinic_qp_ctxt_header) + \ 47 (num_rqs) * sizeof(struct hinic_rq_ctxt))) 48 49 #define SQ_CTXT_CEQ_ATTR_CEQ_ID_SHIFT 8 50 #define SQ_CTXT_CEQ_ATTR_GLOBAL_SQ_ID_SHIFT 13 51 #define SQ_CTXT_CEQ_ATTR_EN_SHIFT 23 52 #define SQ_CTXT_CEQ_ATTR_ARM_SHIFT 31 53 54 #define SQ_CTXT_CEQ_ATTR_CEQ_ID_MASK 0x1FU 55 #define SQ_CTXT_CEQ_ATTR_GLOBAL_SQ_ID_MASK 0x3FFU 56 #define SQ_CTXT_CEQ_ATTR_EN_MASK 0x1U 57 #define SQ_CTXT_CEQ_ATTR_ARM_MASK 0x1U 58 59 #define SQ_CTXT_CEQ_ATTR_SET(val, member) \ 60 (((val) & SQ_CTXT_CEQ_ATTR_##member##_MASK) << \ 61 SQ_CTXT_CEQ_ATTR_##member##_SHIFT) 62 63 #define SQ_CTXT_CI_IDX_SHIFT 11 64 #define SQ_CTXT_CI_OWNER_SHIFT 23 65 66 #define SQ_CTXT_CI_IDX_MASK 0xFFFU 67 #define SQ_CTXT_CI_OWNER_MASK 0x1U 68 69 #define SQ_CTXT_CI_SET(val, member) \ 70 (((val) & SQ_CTXT_CI_##member##_MASK) << SQ_CTXT_CI_##member##_SHIFT) 71 72 #define SQ_CTXT_WQ_PAGE_HI_PFN_SHIFT 0 73 #define SQ_CTXT_WQ_PAGE_PI_SHIFT 20 74 75 #define SQ_CTXT_WQ_PAGE_HI_PFN_MASK 0xFFFFFU 76 #define SQ_CTXT_WQ_PAGE_PI_MASK 0xFFFU 77 78 #define SQ_CTXT_WQ_PAGE_SET(val, member) \ 79 (((val) & SQ_CTXT_WQ_PAGE_##member##_MASK) << \ 80 SQ_CTXT_WQ_PAGE_##member##_SHIFT) 81 82 #define SQ_CTXT_PREF_CACHE_THRESHOLD_SHIFT 0 83 #define SQ_CTXT_PREF_CACHE_MAX_SHIFT 14 84 #define SQ_CTXT_PREF_CACHE_MIN_SHIFT 25 85 86 #define SQ_CTXT_PREF_CACHE_THRESHOLD_MASK 0x3FFFU 87 #define SQ_CTXT_PREF_CACHE_MAX_MASK 0x7FFU 88 #define SQ_CTXT_PREF_CACHE_MIN_MASK 0x7FU 89 90 #define SQ_CTXT_PREF_WQ_PFN_HI_SHIFT 0 91 #define SQ_CTXT_PREF_CI_SHIFT 20 92 93 #define SQ_CTXT_PREF_WQ_PFN_HI_MASK 0xFFFFFU 94 #define SQ_CTXT_PREF_CI_MASK 0xFFFU 95 96 #define SQ_CTXT_PREF_SET(val, member) \ 97 (((val) & SQ_CTXT_PREF_##member##_MASK) << \ 98 SQ_CTXT_PREF_##member##_SHIFT) 99 100 #define SQ_CTXT_WQ_BLOCK_PFN_HI_SHIFT 0 101 102 #define SQ_CTXT_WQ_BLOCK_PFN_HI_MASK 0x7FFFFFU 103 104 #define SQ_CTXT_WQ_BLOCK_SET(val, member) \ 105 (((val) & SQ_CTXT_WQ_BLOCK_##member##_MASK) << \ 106 SQ_CTXT_WQ_BLOCK_##member##_SHIFT) 107 108 #define RQ_CTXT_CEQ_ATTR_EN_SHIFT 0 109 #define RQ_CTXT_CEQ_ATTR_OWNER_SHIFT 1 110 111 #define RQ_CTXT_CEQ_ATTR_EN_MASK 0x1U 112 #define RQ_CTXT_CEQ_ATTR_OWNER_MASK 0x1U 113 114 #define RQ_CTXT_CEQ_ATTR_SET(val, member) \ 115 (((val) & RQ_CTXT_CEQ_ATTR_##member##_MASK) << \ 116 RQ_CTXT_CEQ_ATTR_##member##_SHIFT) 117 118 #define RQ_CTXT_PI_IDX_SHIFT 0 119 #define RQ_CTXT_PI_INTR_SHIFT 22 120 #define RQ_CTXT_PI_CEQ_ARM_SHIFT 31 121 122 #define RQ_CTXT_PI_IDX_MASK 0xFFFU 123 #define RQ_CTXT_PI_INTR_MASK 0x3FFU 124 #define RQ_CTXT_PI_CEQ_ARM_MASK 0x1U 125 126 #define RQ_CTXT_PI_SET(val, member) \ 127 (((val) & RQ_CTXT_PI_##member##_MASK) << RQ_CTXT_PI_##member##_SHIFT) 128 129 #define RQ_CTXT_WQ_PAGE_HI_PFN_SHIFT 0 130 #define RQ_CTXT_WQ_PAGE_CI_SHIFT 20 131 132 #define RQ_CTXT_WQ_PAGE_HI_PFN_MASK 0xFFFFFU 133 #define RQ_CTXT_WQ_PAGE_CI_MASK 0xFFFU 134 135 #define RQ_CTXT_WQ_PAGE_SET(val, member) \ 136 (((val) & RQ_CTXT_WQ_PAGE_##member##_MASK) << \ 137 RQ_CTXT_WQ_PAGE_##member##_SHIFT) 138 139 #define RQ_CTXT_PREF_CACHE_THRESHOLD_SHIFT 0 140 #define RQ_CTXT_PREF_CACHE_MAX_SHIFT 14 141 #define RQ_CTXT_PREF_CACHE_MIN_SHIFT 25 142 143 #define RQ_CTXT_PREF_CACHE_THRESHOLD_MASK 0x3FFFU 144 #define RQ_CTXT_PREF_CACHE_MAX_MASK 0x7FFU 145 #define RQ_CTXT_PREF_CACHE_MIN_MASK 0x7FU 146 147 #define RQ_CTXT_PREF_WQ_PFN_HI_SHIFT 0 148 #define RQ_CTXT_PREF_CI_SHIFT 20 149 150 #define RQ_CTXT_PREF_WQ_PFN_HI_MASK 0xFFFFFU 151 #define RQ_CTXT_PREF_CI_MASK 0xFFFU 152 153 #define RQ_CTXT_PREF_SET(val, member) \ 154 (((val) & RQ_CTXT_PREF_##member##_MASK) << \ 155 RQ_CTXT_PREF_##member##_SHIFT) 156 157 #define RQ_CTXT_WQ_BLOCK_PFN_HI_SHIFT 0 158 159 #define RQ_CTXT_WQ_BLOCK_PFN_HI_MASK 0x7FFFFFU 160 161 #define RQ_CTXT_WQ_BLOCK_SET(val, member) \ 162 (((val) & RQ_CTXT_WQ_BLOCK_##member##_MASK) << \ 163 RQ_CTXT_WQ_BLOCK_##member##_SHIFT) 164 165 #define SIZE_16BYTES(size) (ALIGN((size), 16) >> 4) 166 167 enum hinic_qp_ctxt_type { 168 HINIC_QP_CTXT_TYPE_SQ, 169 HINIC_QP_CTXT_TYPE_RQ, 170 }; 171 172 struct hinic_sq { 173 struct hinic_wq *wq; 174 volatile u16 *cons_idx_addr; 175 void __iomem *db_addr; 176 177 u16 q_id; 178 u16 owner; 179 u16 sq_depth; 180 }; 181 182 struct hinic_rq { 183 struct hinic_wq *wq; 184 volatile u16 *pi_virt_addr; 185 dma_addr_t pi_dma_addr; 186 187 u16 irq_id; 188 u16 msix_entry_idx; 189 u16 q_id; 190 u16 rq_depth; 191 }; 192 193 struct hinic_qp { 194 struct hinic_sq sq; 195 struct hinic_rq rq; 196 }; 197 198 struct hinic_event { 199 void (*tx_ack)(void *handle, u16 q_id); 200 /* status: 0 - link down; 1 - link up */ 201 void (*link_change)(void *handle, int status); 202 }; 203 204 struct hinic_nic_io { 205 struct hinic_hwdev *hwdev; 206 207 u16 global_qpn; 208 209 struct hinic_wq *sq_wq; 210 struct hinic_wq *rq_wq; 211 212 u16 max_qps; 213 u16 num_qps; 214 215 u16 num_sqs; 216 u16 num_rqs; 217 218 u16 sq_depth; 219 u16 rq_depth; 220 221 u16 rq_buf_size; 222 u16 vhd_mode; 223 224 struct hinic_qp *qps; 225 /* sq ci mem base addr of the function */ 226 void *ci_vaddr_base; 227 dma_addr_t ci_dma_base; 228 229 struct hinic_event event; 230 void *event_handle; 231 }; 232 233 struct hinic_sq_db { 234 u32 db_info; 235 }; 236 237 int hinic_init_qp_ctxts(struct hinic_hwdev *hwdev); 238 239 void hinic_free_qp_ctxts(struct hinic_hwdev *hwdev); 240 241 int hinic_rx_tx_flush(struct hinic_hwdev *hwdev); 242 243 int hinic_get_sq_free_wqebbs(struct hinic_hwdev *hwdev, u16 q_id); 244 245 u16 hinic_get_sq_local_ci(struct hinic_hwdev *hwdev, u16 q_id); 246 247 void hinic_update_sq_local_ci(struct hinic_hwdev *hwdev, u16 q_id, 248 int wqebb_cnt); 249 250 void hinic_return_sq_wqe(struct hinic_hwdev *hwdev, u16 q_id, 251 int num_wqebbs, u16 owner); 252 253 int hinic_get_rq_free_wqebbs(struct hinic_hwdev *hwdev, u16 q_id); 254 255 void *hinic_get_rq_wqe(struct hinic_hwdev *hwdev, u16 q_id, u16 *pi); 256 257 void hinic_return_rq_wqe(struct hinic_hwdev *hwdev, u16 q_id, int num_wqebbs); 258 259 u16 hinic_get_rq_local_ci(struct hinic_hwdev *hwdev, u16 q_id); 260 261 void hinic_update_rq_local_ci(struct hinic_hwdev *hwdev, u16 q_id, int wqe_cnt); 262 263 int hinic_init_nicio(struct hinic_hwdev *hwdev); 264 265 void hinic_deinit_nicio(struct hinic_hwdev *hwdev); 266 267 int hinic_convert_rx_buf_size(u32 rx_buf_sz, u32 *match_sz); 268 269 #endif /* _HINIC_PMD_NICIO_H_ */ 270