1*0928368fSKristof Beyls /*
2*0928368fSKristof Beyls * Armv7-A specific checksum implementation using NEON
3*0928368fSKristof Beyls *
4*0928368fSKristof Beyls * Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
5*0928368fSKristof Beyls * See https://llvm.org/LICENSE.txt for license information.
6*0928368fSKristof Beyls * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7*0928368fSKristof Beyls */
8*0928368fSKristof Beyls
9*0928368fSKristof Beyls #include "networking.h"
10*0928368fSKristof Beyls #include "../chksum_common.h"
11*0928368fSKristof Beyls
12*0928368fSKristof Beyls #ifndef __ARM_NEON
13*0928368fSKristof Beyls #pragma GCC target("+simd")
14*0928368fSKristof Beyls #endif
15*0928368fSKristof Beyls
16*0928368fSKristof Beyls #include <arm_neon.h>
17*0928368fSKristof Beyls
18*0928368fSKristof Beyls unsigned short
__chksum_arm_simd(const void * ptr,unsigned int nbytes)19*0928368fSKristof Beyls __chksum_arm_simd(const void *ptr, unsigned int nbytes)
20*0928368fSKristof Beyls {
21*0928368fSKristof Beyls bool swap = (uintptr_t) ptr & 1;
22*0928368fSKristof Beyls uint64x1_t vsum = { 0 };
23*0928368fSKristof Beyls
24*0928368fSKristof Beyls if (unlikely(nbytes < 40))
25*0928368fSKristof Beyls {
26*0928368fSKristof Beyls uint64_t sum = slurp_small(ptr, nbytes);
27*0928368fSKristof Beyls return fold_and_swap(sum, false);
28*0928368fSKristof Beyls }
29*0928368fSKristof Beyls
30*0928368fSKristof Beyls /* 8-byte align pointer */
31*0928368fSKristof Beyls /* Inline slurp_head-like code since we use NEON here */
32*0928368fSKristof Beyls Assert(nbytes >= 8);
33*0928368fSKristof Beyls uint32_t off = (uintptr_t) ptr & 7;
34*0928368fSKristof Beyls if (likely(off != 0))
35*0928368fSKristof Beyls {
36*0928368fSKristof Beyls const uint64_t *may_alias ptr64 = align_ptr(ptr, 8);
37*0928368fSKristof Beyls uint64x1_t vword64 = vld1_u64(ptr64);
38*0928368fSKristof Beyls /* Get rid of bytes 0..off-1 */
39*0928368fSKristof Beyls uint64x1_t vmask = vdup_n_u64(ALL_ONES);
40*0928368fSKristof Beyls int64x1_t vshiftl = vdup_n_s64(CHAR_BIT * off);
41*0928368fSKristof Beyls vmask = vshl_u64(vmask, vshiftl);
42*0928368fSKristof Beyls vword64 = vand_u64(vword64, vmask);
43*0928368fSKristof Beyls uint32x2_t vtmp = vreinterpret_u32_u64(vword64);
44*0928368fSKristof Beyls /* Set accumulator */
45*0928368fSKristof Beyls vsum = vpaddl_u32(vtmp);
46*0928368fSKristof Beyls /* Update pointer and remaining size */
47*0928368fSKristof Beyls ptr = (char *) ptr64 + 8;
48*0928368fSKristof Beyls nbytes -= 8 - off;
49*0928368fSKristof Beyls }
50*0928368fSKristof Beyls Assert(((uintptr_t) ptr & 7) == 0);
51*0928368fSKristof Beyls
52*0928368fSKristof Beyls /* Sum groups of 64 bytes */
53*0928368fSKristof Beyls uint64x2_t vsum0 = { 0, 0 };
54*0928368fSKristof Beyls uint64x2_t vsum1 = { 0, 0 };
55*0928368fSKristof Beyls uint64x2_t vsum2 = { 0, 0 };
56*0928368fSKristof Beyls uint64x2_t vsum3 = { 0, 0 };
57*0928368fSKristof Beyls const uint32_t *may_alias ptr32 = ptr;
58*0928368fSKristof Beyls for (uint32_t i = 0; i < nbytes / 64; i++)
59*0928368fSKristof Beyls {
60*0928368fSKristof Beyls uint32x4_t vtmp0 = vld1q_u32(ptr32);
61*0928368fSKristof Beyls uint32x4_t vtmp1 = vld1q_u32(ptr32 + 4);
62*0928368fSKristof Beyls uint32x4_t vtmp2 = vld1q_u32(ptr32 + 8);
63*0928368fSKristof Beyls uint32x4_t vtmp3 = vld1q_u32(ptr32 + 12);
64*0928368fSKristof Beyls vsum0 = vpadalq_u32(vsum0, vtmp0);
65*0928368fSKristof Beyls vsum1 = vpadalq_u32(vsum1, vtmp1);
66*0928368fSKristof Beyls vsum2 = vpadalq_u32(vsum2, vtmp2);
67*0928368fSKristof Beyls vsum3 = vpadalq_u32(vsum3, vtmp3);
68*0928368fSKristof Beyls ptr32 += 16;
69*0928368fSKristof Beyls }
70*0928368fSKristof Beyls nbytes %= 64;
71*0928368fSKristof Beyls
72*0928368fSKristof Beyls /* Fold vsum1/vsum2/vsum3 into vsum0 */
73*0928368fSKristof Beyls vsum0 = vpadalq_u32(vsum0, vreinterpretq_u32_u64(vsum2));
74*0928368fSKristof Beyls vsum1 = vpadalq_u32(vsum1, vreinterpretq_u32_u64(vsum3));
75*0928368fSKristof Beyls vsum0 = vpadalq_u32(vsum0, vreinterpretq_u32_u64(vsum1));
76*0928368fSKristof Beyls
77*0928368fSKristof Beyls /* Add any trailing 16-byte groups */
78*0928368fSKristof Beyls while (likely(nbytes >= 16))
79*0928368fSKristof Beyls {
80*0928368fSKristof Beyls uint32x4_t vtmp0 = vld1q_u32(ptr32);
81*0928368fSKristof Beyls vsum0 = vpadalq_u32(vsum0, vtmp0);
82*0928368fSKristof Beyls ptr32 += 4;
83*0928368fSKristof Beyls nbytes -= 16;
84*0928368fSKristof Beyls }
85*0928368fSKristof Beyls Assert(nbytes < 16);
86*0928368fSKristof Beyls
87*0928368fSKristof Beyls /* Fold vsum0 into vsum */
88*0928368fSKristof Beyls {
89*0928368fSKristof Beyls /* 4xu32 (4x32b) -> 2xu64 (2x33b) */
90*0928368fSKristof Beyls vsum0 = vpaddlq_u32(vreinterpretq_u32_u64(vsum0));
91*0928368fSKristof Beyls /* 4xu32 (2x(1b+32b)) -> 2xu64 (2x(0b+32b)) */
92*0928368fSKristof Beyls vsum0 = vpaddlq_u32(vreinterpretq_u32_u64(vsum0));
93*0928368fSKristof Beyls /* 4xu32 (4x32b) -> 2xu64 (2x33b) */
94*0928368fSKristof Beyls Assert((vgetq_lane_u64(vsum0, 0) >> 32) == 0);
95*0928368fSKristof Beyls Assert((vgetq_lane_u64(vsum0, 1) >> 32) == 0);
96*0928368fSKristof Beyls uint32x2_t vtmp = vmovn_u64(vsum0);
97*0928368fSKristof Beyls /* Add to accumulator */
98*0928368fSKristof Beyls vsum = vpadal_u32(vsum, vtmp);
99*0928368fSKristof Beyls }
100*0928368fSKristof Beyls
101*0928368fSKristof Beyls /* Add any trailing group of 8 bytes */
102*0928368fSKristof Beyls if (nbytes & 8)
103*0928368fSKristof Beyls {
104*0928368fSKristof Beyls uint32x2_t vtmp = vld1_u32(ptr32);
105*0928368fSKristof Beyls /* Add to accumulator */
106*0928368fSKristof Beyls vsum = vpadal_u32(vsum, vtmp);
107*0928368fSKristof Beyls ptr32 += 2;
108*0928368fSKristof Beyls nbytes -= 8;
109*0928368fSKristof Beyls }
110*0928368fSKristof Beyls Assert(nbytes < 8);
111*0928368fSKristof Beyls
112*0928368fSKristof Beyls /* Handle any trailing 1..7 bytes */
113*0928368fSKristof Beyls if (likely(nbytes != 0))
114*0928368fSKristof Beyls {
115*0928368fSKristof Beyls Assert(((uintptr_t) ptr32 & 7) == 0);
116*0928368fSKristof Beyls Assert(nbytes < 8);
117*0928368fSKristof Beyls uint64x1_t vword64 = vld1_u64((const uint64_t *) ptr32);
118*0928368fSKristof Beyls /* Get rid of bytes 7..nbytes */
119*0928368fSKristof Beyls uint64x1_t vmask = vdup_n_u64(ALL_ONES);
120*0928368fSKristof Beyls int64x1_t vshiftr = vdup_n_s64(-CHAR_BIT * (8 - nbytes));
121*0928368fSKristof Beyls vmask = vshl_u64(vmask, vshiftr);/* Shift right */
122*0928368fSKristof Beyls vword64 = vand_u64(vword64, vmask);
123*0928368fSKristof Beyls /* Fold 64-bit sum to 33 bits */
124*0928368fSKristof Beyls vword64 = vpaddl_u32(vreinterpret_u32_u64(vword64));
125*0928368fSKristof Beyls /* Add to accumulator */
126*0928368fSKristof Beyls vsum = vpadal_u32(vsum, vreinterpret_u32_u64(vword64));
127*0928368fSKristof Beyls }
128*0928368fSKristof Beyls
129*0928368fSKristof Beyls /* Fold 64-bit vsum to 32 bits */
130*0928368fSKristof Beyls vsum = vpaddl_u32(vreinterpret_u32_u64(vsum));
131*0928368fSKristof Beyls vsum = vpaddl_u32(vreinterpret_u32_u64(vsum));
132*0928368fSKristof Beyls Assert(vget_lane_u32(vreinterpret_u32_u64(vsum), 1) == 0);
133*0928368fSKristof Beyls
134*0928368fSKristof Beyls /* Fold 32-bit vsum to 16 bits */
135*0928368fSKristof Beyls uint32x2_t vsum32 = vreinterpret_u32_u64(vsum);
136*0928368fSKristof Beyls vsum32 = vpaddl_u16(vreinterpret_u16_u32(vsum32));
137*0928368fSKristof Beyls vsum32 = vpaddl_u16(vreinterpret_u16_u32(vsum32));
138*0928368fSKristof Beyls Assert(vget_lane_u16(vreinterpret_u16_u32(vsum32), 1) == 0);
139*0928368fSKristof Beyls Assert(vget_lane_u16(vreinterpret_u16_u32(vsum32), 2) == 0);
140*0928368fSKristof Beyls Assert(vget_lane_u16(vreinterpret_u16_u32(vsum32), 3) == 0);
141*0928368fSKristof Beyls
142*0928368fSKristof Beyls /* Convert to 16-bit scalar */
143*0928368fSKristof Beyls uint16_t sum = vget_lane_u16(vreinterpret_u16_u32(vsum32), 0);
144*0928368fSKristof Beyls
145*0928368fSKristof Beyls if (unlikely(swap))/* Odd base pointer is unexpected */
146*0928368fSKristof Beyls {
147*0928368fSKristof Beyls sum = bswap16(sum);
148*0928368fSKristof Beyls }
149*0928368fSKristof Beyls return sum;
150*0928368fSKristof Beyls }
151