xref: /dpdk/lib/eal/loongarch/include/rte_vect.h (revision c6552d9a8deffa448de2d5e2e726f50508c1efd2)
129631ee5SMin Zhou /* SPDX-License-Identifier: BSD-3-Clause
229631ee5SMin Zhou  * Copyright(c) 2022 Loongson Technology Corporation Limited
329631ee5SMin Zhou  */
429631ee5SMin Zhou 
529631ee5SMin Zhou #ifndef RTE_VECT_LOONGARCH_H
629631ee5SMin Zhou #define RTE_VECT_LOONGARCH_H
729631ee5SMin Zhou 
829631ee5SMin Zhou #include <stdint.h>
929631ee5SMin Zhou #include "generic/rte_vect.h"
1029631ee5SMin Zhou #include "rte_common.h"
1129631ee5SMin Zhou 
1229631ee5SMin Zhou #ifdef __cplusplus
1329631ee5SMin Zhou extern "C" {
1429631ee5SMin Zhou #endif
1529631ee5SMin Zhou 
1629631ee5SMin Zhou #define RTE_VECT_DEFAULT_SIMD_BITWIDTH RTE_VECT_SIMD_DISABLED
1729631ee5SMin Zhou 
18*c6552d9aSTyler Retzlaff typedef union __rte_aligned(16) xmm {
1929631ee5SMin Zhou 	int8_t   i8[16];
2029631ee5SMin Zhou 	int16_t  i16[8];
2129631ee5SMin Zhou 	int32_t  i32[4];
2229631ee5SMin Zhou 	int64_t  i64[2];
2329631ee5SMin Zhou 	uint8_t  u8[16];
2429631ee5SMin Zhou 	uint16_t u16[8];
2529631ee5SMin Zhou 	uint32_t u32[4];
2629631ee5SMin Zhou 	uint64_t u64[2];
2729631ee5SMin Zhou 	double   pd[2];
28*c6552d9aSTyler Retzlaff } xmm_t;
2929631ee5SMin Zhou 
3029631ee5SMin Zhou #define XMM_SIZE        (sizeof(xmm_t))
3129631ee5SMin Zhou #define XMM_MASK        (XMM_SIZE - 1)
3229631ee5SMin Zhou 
33*c6552d9aSTyler Retzlaff typedef union __rte_aligned(16) rte_xmm {
3429631ee5SMin Zhou 	xmm_t	 x;
3529631ee5SMin Zhou 	uint8_t	 u8[XMM_SIZE / sizeof(uint8_t)];
3629631ee5SMin Zhou 	uint16_t u16[XMM_SIZE / sizeof(uint16_t)];
3729631ee5SMin Zhou 	uint32_t u32[XMM_SIZE / sizeof(uint32_t)];
3829631ee5SMin Zhou 	uint64_t u64[XMM_SIZE / sizeof(uint64_t)];
3929631ee5SMin Zhou 	double   pd[XMM_SIZE / sizeof(double)];
40*c6552d9aSTyler Retzlaff } rte_xmm_t;
4129631ee5SMin Zhou 
4229631ee5SMin Zhou static inline xmm_t
vect_load_128(void * p)4329631ee5SMin Zhou vect_load_128(void *p)
4429631ee5SMin Zhou {
4529631ee5SMin Zhou 	xmm_t ret = *((xmm_t *)p);
4629631ee5SMin Zhou 
4729631ee5SMin Zhou 	return ret;
4829631ee5SMin Zhou }
4929631ee5SMin Zhou 
5029631ee5SMin Zhou static inline xmm_t
vect_and(xmm_t data,xmm_t mask)5129631ee5SMin Zhou vect_and(xmm_t data, xmm_t mask)
5229631ee5SMin Zhou {
5329631ee5SMin Zhou 	rte_xmm_t ret = {.x = data };
5429631ee5SMin Zhou 	rte_xmm_t m = {.x = mask };
5529631ee5SMin Zhou 	ret.u64[0] &= m.u64[0];
5629631ee5SMin Zhou 	ret.u64[1] &= m.u64[1];
5729631ee5SMin Zhou 
5829631ee5SMin Zhou 	return ret.x;
5929631ee5SMin Zhou }
6029631ee5SMin Zhou 
6129631ee5SMin Zhou #ifdef __cplusplus
6229631ee5SMin Zhou }
6329631ee5SMin Zhou #endif
6429631ee5SMin Zhou 
6529631ee5SMin Zhou #endif /* RTE_VECT_LOONGARCH_H */
66