1eda14cbcSMatt Macy /*
2eda14cbcSMatt Macy * CDDL HEADER START
3eda14cbcSMatt Macy *
4eda14cbcSMatt Macy * The contents of this file are subject to the terms of the
5eda14cbcSMatt Macy * Common Development and Distribution License (the "License").
6eda14cbcSMatt Macy * You may not use this file except in compliance with the License.
7eda14cbcSMatt Macy *
8eda14cbcSMatt Macy * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9*271171e0SMartin Matuska * or https://opensource.org/licenses/CDDL-1.0.
10eda14cbcSMatt Macy * See the License for the specific language governing permissions
11eda14cbcSMatt Macy * and limitations under the License.
12eda14cbcSMatt Macy *
13eda14cbcSMatt Macy * When distributing Covered Code, include this CDDL HEADER in each
14eda14cbcSMatt Macy * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15eda14cbcSMatt Macy * If applicable, add the following below this CDDL HEADER, with the
16eda14cbcSMatt Macy * fields enclosed by brackets "[]" replaced with your own identifying
17eda14cbcSMatt Macy * information: Portions Copyright [yyyy] [name of copyright owner]
18eda14cbcSMatt Macy *
19eda14cbcSMatt Macy * CDDL HEADER END
20eda14cbcSMatt Macy */
21eda14cbcSMatt Macy /*
22eda14cbcSMatt Macy * Copyright (C) 2016 Romain Dolbeau. All rights reserved.
23eda14cbcSMatt Macy * Copyright (C) 2016 Gvozden Nešković. All rights reserved.
24eda14cbcSMatt Macy */
25eda14cbcSMatt Macy
26eda14cbcSMatt Macy #include <sys/isa_defs.h>
27eda14cbcSMatt Macy
28eda14cbcSMatt Macy #if defined(__x86_64) && defined(HAVE_AVX512F)
29eda14cbcSMatt Macy
30eda14cbcSMatt Macy #include <sys/types.h>
31eda14cbcSMatt Macy #include <sys/simd.h>
32eda14cbcSMatt Macy #include <sys/debug.h>
33eda14cbcSMatt Macy
34eda14cbcSMatt Macy #ifdef __linux__
35eda14cbcSMatt Macy #define __asm __asm__ __volatile__
36eda14cbcSMatt Macy #endif
37eda14cbcSMatt Macy
38eda14cbcSMatt Macy #define _REG_CNT(_0, _1, _2, _3, _4, _5, _6, _7, N, ...) N
39eda14cbcSMatt Macy #define REG_CNT(r...) _REG_CNT(r, 8, 7, 6, 5, 4, 3, 2, 1)
40eda14cbcSMatt Macy
41eda14cbcSMatt Macy #define VR0_(REG, ...) "zmm"#REG
42eda14cbcSMatt Macy #define VR1_(_1, REG, ...) "zmm"#REG
43eda14cbcSMatt Macy #define VR2_(_1, _2, REG, ...) "zmm"#REG
44eda14cbcSMatt Macy #define VR3_(_1, _2, _3, REG, ...) "zmm"#REG
45eda14cbcSMatt Macy #define VR4_(_1, _2, _3, _4, REG, ...) "zmm"#REG
46eda14cbcSMatt Macy #define VR5_(_1, _2, _3, _4, _5, REG, ...) "zmm"#REG
47eda14cbcSMatt Macy #define VR6_(_1, _2, _3, _4, _5, _6, REG, ...) "zmm"#REG
48eda14cbcSMatt Macy #define VR7_(_1, _2, _3, _4, _5, _6, _7, REG, ...) "zmm"#REG
49eda14cbcSMatt Macy
50eda14cbcSMatt Macy #define VR0(r...) VR0_(r)
51eda14cbcSMatt Macy #define VR1(r...) VR1_(r)
52eda14cbcSMatt Macy #define VR2(r...) VR2_(r, 1)
53eda14cbcSMatt Macy #define VR3(r...) VR3_(r, 1, 2)
54eda14cbcSMatt Macy #define VR4(r...) VR4_(r, 1, 2)
55eda14cbcSMatt Macy #define VR5(r...) VR5_(r, 1, 2, 3)
56eda14cbcSMatt Macy #define VR6(r...) VR6_(r, 1, 2, 3, 4)
57eda14cbcSMatt Macy #define VR7(r...) VR7_(r, 1, 2, 3, 4, 5)
58eda14cbcSMatt Macy
59eda14cbcSMatt Macy #define VRy0_(REG, ...) "ymm"#REG
60eda14cbcSMatt Macy #define VRy1_(_1, REG, ...) "ymm"#REG
61eda14cbcSMatt Macy #define VRy2_(_1, _2, REG, ...) "ymm"#REG
62eda14cbcSMatt Macy #define VRy3_(_1, _2, _3, REG, ...) "ymm"#REG
63eda14cbcSMatt Macy #define VRy4_(_1, _2, _3, _4, REG, ...) "ymm"#REG
64eda14cbcSMatt Macy #define VRy5_(_1, _2, _3, _4, _5, REG, ...) "ymm"#REG
65eda14cbcSMatt Macy #define VRy6_(_1, _2, _3, _4, _5, _6, REG, ...) "ymm"#REG
66eda14cbcSMatt Macy #define VRy7_(_1, _2, _3, _4, _5, _6, _7, REG, ...) "ymm"#REG
67eda14cbcSMatt Macy
68eda14cbcSMatt Macy #define VRy0(r...) VRy0_(r)
69eda14cbcSMatt Macy #define VRy1(r...) VRy1_(r)
70eda14cbcSMatt Macy #define VRy2(r...) VRy2_(r, 1)
71eda14cbcSMatt Macy #define VRy3(r...) VRy3_(r, 1, 2)
72eda14cbcSMatt Macy #define VRy4(r...) VRy4_(r, 1, 2)
73eda14cbcSMatt Macy #define VRy5(r...) VRy5_(r, 1, 2, 3)
74eda14cbcSMatt Macy #define VRy6(r...) VRy6_(r, 1, 2, 3, 4)
75eda14cbcSMatt Macy #define VRy7(r...) VRy7_(r, 1, 2, 3, 4, 5)
76eda14cbcSMatt Macy
77eda14cbcSMatt Macy #define R_01(REG1, REG2, ...) REG1, REG2
78eda14cbcSMatt Macy #define _R_23(_0, _1, REG2, REG3, ...) REG2, REG3
79eda14cbcSMatt Macy #define R_23(REG...) _R_23(REG, 1, 2, 3)
80eda14cbcSMatt Macy
81eda14cbcSMatt Macy #define ELEM_SIZE 64
82eda14cbcSMatt Macy
83eda14cbcSMatt Macy typedef struct v {
84eda14cbcSMatt Macy uint8_t b[ELEM_SIZE] __attribute__((aligned(ELEM_SIZE)));
85eda14cbcSMatt Macy } v_t;
86eda14cbcSMatt Macy
87eda14cbcSMatt Macy
88eda14cbcSMatt Macy #define XOR_ACC(src, r...) \
89eda14cbcSMatt Macy { \
90eda14cbcSMatt Macy switch (REG_CNT(r)) { \
91eda14cbcSMatt Macy case 4: \
92eda14cbcSMatt Macy __asm( \
93eda14cbcSMatt Macy "vpxorq 0x00(%[SRC]), %%" VR0(r)", %%" VR0(r) "\n" \
94eda14cbcSMatt Macy "vpxorq 0x40(%[SRC]), %%" VR1(r)", %%" VR1(r) "\n" \
95eda14cbcSMatt Macy "vpxorq 0x80(%[SRC]), %%" VR2(r)", %%" VR2(r) "\n" \
96eda14cbcSMatt Macy "vpxorq 0xc0(%[SRC]), %%" VR3(r)", %%" VR3(r) "\n" \
97eda14cbcSMatt Macy : : [SRC] "r" (src)); \
98eda14cbcSMatt Macy break; \
99eda14cbcSMatt Macy } \
100eda14cbcSMatt Macy }
101eda14cbcSMatt Macy
102eda14cbcSMatt Macy #define XOR(r...) \
103eda14cbcSMatt Macy { \
104eda14cbcSMatt Macy switch (REG_CNT(r)) { \
105eda14cbcSMatt Macy case 8: \
106eda14cbcSMatt Macy __asm( \
107eda14cbcSMatt Macy "vpxorq %" VR0(r) ", %" VR4(r)", %" VR4(r) "\n" \
108eda14cbcSMatt Macy "vpxorq %" VR1(r) ", %" VR5(r)", %" VR5(r) "\n" \
109eda14cbcSMatt Macy "vpxorq %" VR2(r) ", %" VR6(r)", %" VR6(r) "\n" \
110eda14cbcSMatt Macy "vpxorq %" VR3(r) ", %" VR7(r)", %" VR7(r)); \
111eda14cbcSMatt Macy break; \
112eda14cbcSMatt Macy case 4: \
113eda14cbcSMatt Macy __asm( \
114eda14cbcSMatt Macy "vpxorq %" VR0(r) ", %" VR2(r)", %" VR2(r) "\n" \
115eda14cbcSMatt Macy "vpxorq %" VR1(r) ", %" VR3(r)", %" VR3(r)); \
116eda14cbcSMatt Macy break; \
117eda14cbcSMatt Macy } \
118eda14cbcSMatt Macy }
119eda14cbcSMatt Macy
120eda14cbcSMatt Macy
121eda14cbcSMatt Macy #define ZERO(r...) XOR(r, r)
122eda14cbcSMatt Macy
123eda14cbcSMatt Macy
124eda14cbcSMatt Macy #define COPY(r...) \
125eda14cbcSMatt Macy { \
126eda14cbcSMatt Macy switch (REG_CNT(r)) { \
127eda14cbcSMatt Macy case 8: \
128eda14cbcSMatt Macy __asm( \
129eda14cbcSMatt Macy "vmovdqa64 %" VR0(r) ", %" VR4(r) "\n" \
130eda14cbcSMatt Macy "vmovdqa64 %" VR1(r) ", %" VR5(r) "\n" \
131eda14cbcSMatt Macy "vmovdqa64 %" VR2(r) ", %" VR6(r) "\n" \
132eda14cbcSMatt Macy "vmovdqa64 %" VR3(r) ", %" VR7(r)); \
133eda14cbcSMatt Macy break; \
134eda14cbcSMatt Macy case 4: \
135eda14cbcSMatt Macy __asm( \
136eda14cbcSMatt Macy "vmovdqa64 %" VR0(r) ", %" VR2(r) "\n" \
137eda14cbcSMatt Macy "vmovdqa64 %" VR1(r) ", %" VR3(r)); \
138eda14cbcSMatt Macy break; \
139eda14cbcSMatt Macy } \
140eda14cbcSMatt Macy }
141eda14cbcSMatt Macy
142eda14cbcSMatt Macy #define LOAD(src, r...) \
143eda14cbcSMatt Macy { \
144eda14cbcSMatt Macy switch (REG_CNT(r)) { \
145eda14cbcSMatt Macy case 4: \
146eda14cbcSMatt Macy __asm( \
147eda14cbcSMatt Macy "vmovdqa64 0x00(%[SRC]), %%" VR0(r) "\n" \
148eda14cbcSMatt Macy "vmovdqa64 0x40(%[SRC]), %%" VR1(r) "\n" \
149eda14cbcSMatt Macy "vmovdqa64 0x80(%[SRC]), %%" VR2(r) "\n" \
150eda14cbcSMatt Macy "vmovdqa64 0xc0(%[SRC]), %%" VR3(r) "\n" \
151eda14cbcSMatt Macy : : [SRC] "r" (src)); \
152eda14cbcSMatt Macy break; \
153eda14cbcSMatt Macy } \
154eda14cbcSMatt Macy }
155eda14cbcSMatt Macy
156eda14cbcSMatt Macy #define STORE(dst, r...) \
157eda14cbcSMatt Macy { \
158eda14cbcSMatt Macy switch (REG_CNT(r)) { \
159eda14cbcSMatt Macy case 4: \
160eda14cbcSMatt Macy __asm( \
161eda14cbcSMatt Macy "vmovdqa64 %%" VR0(r) ", 0x00(%[DST])\n" \
162eda14cbcSMatt Macy "vmovdqa64 %%" VR1(r) ", 0x40(%[DST])\n" \
163eda14cbcSMatt Macy "vmovdqa64 %%" VR2(r) ", 0x80(%[DST])\n" \
164eda14cbcSMatt Macy "vmovdqa64 %%" VR3(r) ", 0xc0(%[DST])\n" \
165eda14cbcSMatt Macy : : [DST] "r" (dst)); \
166eda14cbcSMatt Macy break; \
167eda14cbcSMatt Macy } \
168eda14cbcSMatt Macy }
169eda14cbcSMatt Macy
170eda14cbcSMatt Macy #define MUL2_SETUP() \
171eda14cbcSMatt Macy { \
172eda14cbcSMatt Macy __asm("vmovq %0, %%xmm31" :: "r"(0x1d1d1d1d1d1d1d1d)); \
173eda14cbcSMatt Macy __asm("vpbroadcastq %xmm31, %zmm31"); \
174eda14cbcSMatt Macy __asm("vmovq %0, %%xmm30" :: "r"(0x8080808080808080)); \
175eda14cbcSMatt Macy __asm("vpbroadcastq %xmm30, %zmm30"); \
176eda14cbcSMatt Macy __asm("vmovq %0, %%xmm29" :: "r"(0xfefefefefefefefe)); \
177eda14cbcSMatt Macy __asm("vpbroadcastq %xmm29, %zmm29"); \
178eda14cbcSMatt Macy }
179eda14cbcSMatt Macy
180eda14cbcSMatt Macy #define _MUL2(r...) \
181eda14cbcSMatt Macy { \
182eda14cbcSMatt Macy switch (REG_CNT(r)) { \
183eda14cbcSMatt Macy case 2: \
184eda14cbcSMatt Macy __asm( \
185eda14cbcSMatt Macy "vpandq %" VR0(r)", %zmm30, %zmm26\n" \
186eda14cbcSMatt Macy "vpandq %" VR1(r)", %zmm30, %zmm25\n" \
187eda14cbcSMatt Macy "vpsrlq $7, %zmm26, %zmm28\n" \
188eda14cbcSMatt Macy "vpsrlq $7, %zmm25, %zmm27\n" \
189eda14cbcSMatt Macy "vpsllq $1, %zmm26, %zmm26\n" \
190eda14cbcSMatt Macy "vpsllq $1, %zmm25, %zmm25\n" \
191eda14cbcSMatt Macy "vpsubq %zmm28, %zmm26, %zmm26\n" \
192eda14cbcSMatt Macy "vpsubq %zmm27, %zmm25, %zmm25\n" \
193eda14cbcSMatt Macy "vpsllq $1, %" VR0(r)", %" VR0(r) "\n" \
194eda14cbcSMatt Macy "vpsllq $1, %" VR1(r)", %" VR1(r) "\n" \
195eda14cbcSMatt Macy "vpandq %zmm26, %zmm31, %zmm26\n" \
196eda14cbcSMatt Macy "vpandq %zmm25, %zmm31, %zmm25\n" \
197eda14cbcSMatt Macy "vpternlogd $0x6c,%zmm29, %zmm26, %" VR0(r) "\n" \
198eda14cbcSMatt Macy "vpternlogd $0x6c,%zmm29, %zmm25, %" VR1(r)); \
199eda14cbcSMatt Macy break; \
200eda14cbcSMatt Macy default: \
201eda14cbcSMatt Macy VERIFY(0); \
202eda14cbcSMatt Macy } \
203eda14cbcSMatt Macy }
204eda14cbcSMatt Macy
205eda14cbcSMatt Macy #define MUL2(r...) \
206eda14cbcSMatt Macy { \
207eda14cbcSMatt Macy switch (REG_CNT(r)) { \
208eda14cbcSMatt Macy case 4: \
209eda14cbcSMatt Macy _MUL2(R_01(r)); \
210eda14cbcSMatt Macy _MUL2(R_23(r)); \
211eda14cbcSMatt Macy break; \
212eda14cbcSMatt Macy case 2: \
213eda14cbcSMatt Macy _MUL2(r); \
214eda14cbcSMatt Macy break; \
215eda14cbcSMatt Macy } \
216eda14cbcSMatt Macy }
217eda14cbcSMatt Macy
218eda14cbcSMatt Macy #define MUL4(r...) \
219eda14cbcSMatt Macy { \
220eda14cbcSMatt Macy MUL2(r); \
221eda14cbcSMatt Macy MUL2(r); \
222eda14cbcSMatt Macy }
223eda14cbcSMatt Macy
224eda14cbcSMatt Macy
225eda14cbcSMatt Macy /* General multiplication by adding powers of two */
226eda14cbcSMatt Macy
227eda14cbcSMatt Macy #define _mul_x2_in 21, 22
228eda14cbcSMatt Macy #define _mul_x2_acc 23, 24
229eda14cbcSMatt Macy
230eda14cbcSMatt Macy #define _MUL_PARAM(x, in, acc) \
231eda14cbcSMatt Macy { \
232eda14cbcSMatt Macy if (x & 0x01) { COPY(in, acc); } else { ZERO(acc); } \
233eda14cbcSMatt Macy if (x & 0xfe) { MUL2(in); } \
234eda14cbcSMatt Macy if (x & 0x02) { XOR(in, acc); } \
235eda14cbcSMatt Macy if (x & 0xfc) { MUL2(in); } \
236eda14cbcSMatt Macy if (x & 0x04) { XOR(in, acc); } \
237eda14cbcSMatt Macy if (x & 0xf8) { MUL2(in); } \
238eda14cbcSMatt Macy if (x & 0x08) { XOR(in, acc); } \
239eda14cbcSMatt Macy if (x & 0xf0) { MUL2(in); } \
240eda14cbcSMatt Macy if (x & 0x10) { XOR(in, acc); } \
241eda14cbcSMatt Macy if (x & 0xe0) { MUL2(in); } \
242eda14cbcSMatt Macy if (x & 0x20) { XOR(in, acc); } \
243eda14cbcSMatt Macy if (x & 0xc0) { MUL2(in); } \
244eda14cbcSMatt Macy if (x & 0x40) { XOR(in, acc); } \
245eda14cbcSMatt Macy if (x & 0x80) { MUL2(in); XOR(in, acc); } \
246eda14cbcSMatt Macy }
247eda14cbcSMatt Macy
248eda14cbcSMatt Macy #define MUL_x2_DEFINE(x) \
249eda14cbcSMatt Macy static void \
250eda14cbcSMatt Macy mul_x2_ ## x(void) { _MUL_PARAM(x, _mul_x2_in, _mul_x2_acc); }
251eda14cbcSMatt Macy
252eda14cbcSMatt Macy
253eda14cbcSMatt Macy MUL_x2_DEFINE(0); MUL_x2_DEFINE(1); MUL_x2_DEFINE(2); MUL_x2_DEFINE(3);
254eda14cbcSMatt Macy MUL_x2_DEFINE(4); MUL_x2_DEFINE(5); MUL_x2_DEFINE(6); MUL_x2_DEFINE(7);
255eda14cbcSMatt Macy MUL_x2_DEFINE(8); MUL_x2_DEFINE(9); MUL_x2_DEFINE(10); MUL_x2_DEFINE(11);
256eda14cbcSMatt Macy MUL_x2_DEFINE(12); MUL_x2_DEFINE(13); MUL_x2_DEFINE(14); MUL_x2_DEFINE(15);
257eda14cbcSMatt Macy MUL_x2_DEFINE(16); MUL_x2_DEFINE(17); MUL_x2_DEFINE(18); MUL_x2_DEFINE(19);
258eda14cbcSMatt Macy MUL_x2_DEFINE(20); MUL_x2_DEFINE(21); MUL_x2_DEFINE(22); MUL_x2_DEFINE(23);
259eda14cbcSMatt Macy MUL_x2_DEFINE(24); MUL_x2_DEFINE(25); MUL_x2_DEFINE(26); MUL_x2_DEFINE(27);
260eda14cbcSMatt Macy MUL_x2_DEFINE(28); MUL_x2_DEFINE(29); MUL_x2_DEFINE(30); MUL_x2_DEFINE(31);
261eda14cbcSMatt Macy MUL_x2_DEFINE(32); MUL_x2_DEFINE(33); MUL_x2_DEFINE(34); MUL_x2_DEFINE(35);
262eda14cbcSMatt Macy MUL_x2_DEFINE(36); MUL_x2_DEFINE(37); MUL_x2_DEFINE(38); MUL_x2_DEFINE(39);
263eda14cbcSMatt Macy MUL_x2_DEFINE(40); MUL_x2_DEFINE(41); MUL_x2_DEFINE(42); MUL_x2_DEFINE(43);
264eda14cbcSMatt Macy MUL_x2_DEFINE(44); MUL_x2_DEFINE(45); MUL_x2_DEFINE(46); MUL_x2_DEFINE(47);
265eda14cbcSMatt Macy MUL_x2_DEFINE(48); MUL_x2_DEFINE(49); MUL_x2_DEFINE(50); MUL_x2_DEFINE(51);
266eda14cbcSMatt Macy MUL_x2_DEFINE(52); MUL_x2_DEFINE(53); MUL_x2_DEFINE(54); MUL_x2_DEFINE(55);
267eda14cbcSMatt Macy MUL_x2_DEFINE(56); MUL_x2_DEFINE(57); MUL_x2_DEFINE(58); MUL_x2_DEFINE(59);
268eda14cbcSMatt Macy MUL_x2_DEFINE(60); MUL_x2_DEFINE(61); MUL_x2_DEFINE(62); MUL_x2_DEFINE(63);
269eda14cbcSMatt Macy MUL_x2_DEFINE(64); MUL_x2_DEFINE(65); MUL_x2_DEFINE(66); MUL_x2_DEFINE(67);
270eda14cbcSMatt Macy MUL_x2_DEFINE(68); MUL_x2_DEFINE(69); MUL_x2_DEFINE(70); MUL_x2_DEFINE(71);
271eda14cbcSMatt Macy MUL_x2_DEFINE(72); MUL_x2_DEFINE(73); MUL_x2_DEFINE(74); MUL_x2_DEFINE(75);
272eda14cbcSMatt Macy MUL_x2_DEFINE(76); MUL_x2_DEFINE(77); MUL_x2_DEFINE(78); MUL_x2_DEFINE(79);
273eda14cbcSMatt Macy MUL_x2_DEFINE(80); MUL_x2_DEFINE(81); MUL_x2_DEFINE(82); MUL_x2_DEFINE(83);
274eda14cbcSMatt Macy MUL_x2_DEFINE(84); MUL_x2_DEFINE(85); MUL_x2_DEFINE(86); MUL_x2_DEFINE(87);
275eda14cbcSMatt Macy MUL_x2_DEFINE(88); MUL_x2_DEFINE(89); MUL_x2_DEFINE(90); MUL_x2_DEFINE(91);
276eda14cbcSMatt Macy MUL_x2_DEFINE(92); MUL_x2_DEFINE(93); MUL_x2_DEFINE(94); MUL_x2_DEFINE(95);
277eda14cbcSMatt Macy MUL_x2_DEFINE(96); MUL_x2_DEFINE(97); MUL_x2_DEFINE(98); MUL_x2_DEFINE(99);
278eda14cbcSMatt Macy MUL_x2_DEFINE(100); MUL_x2_DEFINE(101); MUL_x2_DEFINE(102); MUL_x2_DEFINE(103);
279eda14cbcSMatt Macy MUL_x2_DEFINE(104); MUL_x2_DEFINE(105); MUL_x2_DEFINE(106); MUL_x2_DEFINE(107);
280eda14cbcSMatt Macy MUL_x2_DEFINE(108); MUL_x2_DEFINE(109); MUL_x2_DEFINE(110); MUL_x2_DEFINE(111);
281eda14cbcSMatt Macy MUL_x2_DEFINE(112); MUL_x2_DEFINE(113); MUL_x2_DEFINE(114); MUL_x2_DEFINE(115);
282eda14cbcSMatt Macy MUL_x2_DEFINE(116); MUL_x2_DEFINE(117); MUL_x2_DEFINE(118); MUL_x2_DEFINE(119);
283eda14cbcSMatt Macy MUL_x2_DEFINE(120); MUL_x2_DEFINE(121); MUL_x2_DEFINE(122); MUL_x2_DEFINE(123);
284eda14cbcSMatt Macy MUL_x2_DEFINE(124); MUL_x2_DEFINE(125); MUL_x2_DEFINE(126); MUL_x2_DEFINE(127);
285eda14cbcSMatt Macy MUL_x2_DEFINE(128); MUL_x2_DEFINE(129); MUL_x2_DEFINE(130); MUL_x2_DEFINE(131);
286eda14cbcSMatt Macy MUL_x2_DEFINE(132); MUL_x2_DEFINE(133); MUL_x2_DEFINE(134); MUL_x2_DEFINE(135);
287eda14cbcSMatt Macy MUL_x2_DEFINE(136); MUL_x2_DEFINE(137); MUL_x2_DEFINE(138); MUL_x2_DEFINE(139);
288eda14cbcSMatt Macy MUL_x2_DEFINE(140); MUL_x2_DEFINE(141); MUL_x2_DEFINE(142); MUL_x2_DEFINE(143);
289eda14cbcSMatt Macy MUL_x2_DEFINE(144); MUL_x2_DEFINE(145); MUL_x2_DEFINE(146); MUL_x2_DEFINE(147);
290eda14cbcSMatt Macy MUL_x2_DEFINE(148); MUL_x2_DEFINE(149); MUL_x2_DEFINE(150); MUL_x2_DEFINE(151);
291eda14cbcSMatt Macy MUL_x2_DEFINE(152); MUL_x2_DEFINE(153); MUL_x2_DEFINE(154); MUL_x2_DEFINE(155);
292eda14cbcSMatt Macy MUL_x2_DEFINE(156); MUL_x2_DEFINE(157); MUL_x2_DEFINE(158); MUL_x2_DEFINE(159);
293eda14cbcSMatt Macy MUL_x2_DEFINE(160); MUL_x2_DEFINE(161); MUL_x2_DEFINE(162); MUL_x2_DEFINE(163);
294eda14cbcSMatt Macy MUL_x2_DEFINE(164); MUL_x2_DEFINE(165); MUL_x2_DEFINE(166); MUL_x2_DEFINE(167);
295eda14cbcSMatt Macy MUL_x2_DEFINE(168); MUL_x2_DEFINE(169); MUL_x2_DEFINE(170); MUL_x2_DEFINE(171);
296eda14cbcSMatt Macy MUL_x2_DEFINE(172); MUL_x2_DEFINE(173); MUL_x2_DEFINE(174); MUL_x2_DEFINE(175);
297eda14cbcSMatt Macy MUL_x2_DEFINE(176); MUL_x2_DEFINE(177); MUL_x2_DEFINE(178); MUL_x2_DEFINE(179);
298eda14cbcSMatt Macy MUL_x2_DEFINE(180); MUL_x2_DEFINE(181); MUL_x2_DEFINE(182); MUL_x2_DEFINE(183);
299eda14cbcSMatt Macy MUL_x2_DEFINE(184); MUL_x2_DEFINE(185); MUL_x2_DEFINE(186); MUL_x2_DEFINE(187);
300eda14cbcSMatt Macy MUL_x2_DEFINE(188); MUL_x2_DEFINE(189); MUL_x2_DEFINE(190); MUL_x2_DEFINE(191);
301eda14cbcSMatt Macy MUL_x2_DEFINE(192); MUL_x2_DEFINE(193); MUL_x2_DEFINE(194); MUL_x2_DEFINE(195);
302eda14cbcSMatt Macy MUL_x2_DEFINE(196); MUL_x2_DEFINE(197); MUL_x2_DEFINE(198); MUL_x2_DEFINE(199);
303eda14cbcSMatt Macy MUL_x2_DEFINE(200); MUL_x2_DEFINE(201); MUL_x2_DEFINE(202); MUL_x2_DEFINE(203);
304eda14cbcSMatt Macy MUL_x2_DEFINE(204); MUL_x2_DEFINE(205); MUL_x2_DEFINE(206); MUL_x2_DEFINE(207);
305eda14cbcSMatt Macy MUL_x2_DEFINE(208); MUL_x2_DEFINE(209); MUL_x2_DEFINE(210); MUL_x2_DEFINE(211);
306eda14cbcSMatt Macy MUL_x2_DEFINE(212); MUL_x2_DEFINE(213); MUL_x2_DEFINE(214); MUL_x2_DEFINE(215);
307eda14cbcSMatt Macy MUL_x2_DEFINE(216); MUL_x2_DEFINE(217); MUL_x2_DEFINE(218); MUL_x2_DEFINE(219);
308eda14cbcSMatt Macy MUL_x2_DEFINE(220); MUL_x2_DEFINE(221); MUL_x2_DEFINE(222); MUL_x2_DEFINE(223);
309eda14cbcSMatt Macy MUL_x2_DEFINE(224); MUL_x2_DEFINE(225); MUL_x2_DEFINE(226); MUL_x2_DEFINE(227);
310eda14cbcSMatt Macy MUL_x2_DEFINE(228); MUL_x2_DEFINE(229); MUL_x2_DEFINE(230); MUL_x2_DEFINE(231);
311eda14cbcSMatt Macy MUL_x2_DEFINE(232); MUL_x2_DEFINE(233); MUL_x2_DEFINE(234); MUL_x2_DEFINE(235);
312eda14cbcSMatt Macy MUL_x2_DEFINE(236); MUL_x2_DEFINE(237); MUL_x2_DEFINE(238); MUL_x2_DEFINE(239);
313eda14cbcSMatt Macy MUL_x2_DEFINE(240); MUL_x2_DEFINE(241); MUL_x2_DEFINE(242); MUL_x2_DEFINE(243);
314eda14cbcSMatt Macy MUL_x2_DEFINE(244); MUL_x2_DEFINE(245); MUL_x2_DEFINE(246); MUL_x2_DEFINE(247);
315eda14cbcSMatt Macy MUL_x2_DEFINE(248); MUL_x2_DEFINE(249); MUL_x2_DEFINE(250); MUL_x2_DEFINE(251);
316eda14cbcSMatt Macy MUL_x2_DEFINE(252); MUL_x2_DEFINE(253); MUL_x2_DEFINE(254); MUL_x2_DEFINE(255);
317eda14cbcSMatt Macy
318eda14cbcSMatt Macy
319eda14cbcSMatt Macy typedef void (*mul_fn_ptr_t)(void);
320eda14cbcSMatt Macy
321eda14cbcSMatt Macy static const mul_fn_ptr_t __attribute__((aligned(256)))
322eda14cbcSMatt Macy gf_x2_mul_fns[256] = {
323eda14cbcSMatt Macy mul_x2_0, mul_x2_1, mul_x2_2, mul_x2_3, mul_x2_4, mul_x2_5,
324eda14cbcSMatt Macy mul_x2_6, mul_x2_7, mul_x2_8, mul_x2_9, mul_x2_10, mul_x2_11,
325eda14cbcSMatt Macy mul_x2_12, mul_x2_13, mul_x2_14, mul_x2_15, mul_x2_16, mul_x2_17,
326eda14cbcSMatt Macy mul_x2_18, mul_x2_19, mul_x2_20, mul_x2_21, mul_x2_22, mul_x2_23,
327eda14cbcSMatt Macy mul_x2_24, mul_x2_25, mul_x2_26, mul_x2_27, mul_x2_28, mul_x2_29,
328eda14cbcSMatt Macy mul_x2_30, mul_x2_31, mul_x2_32, mul_x2_33, mul_x2_34, mul_x2_35,
329eda14cbcSMatt Macy mul_x2_36, mul_x2_37, mul_x2_38, mul_x2_39, mul_x2_40, mul_x2_41,
330eda14cbcSMatt Macy mul_x2_42, mul_x2_43, mul_x2_44, mul_x2_45, mul_x2_46, mul_x2_47,
331eda14cbcSMatt Macy mul_x2_48, mul_x2_49, mul_x2_50, mul_x2_51, mul_x2_52, mul_x2_53,
332eda14cbcSMatt Macy mul_x2_54, mul_x2_55, mul_x2_56, mul_x2_57, mul_x2_58, mul_x2_59,
333eda14cbcSMatt Macy mul_x2_60, mul_x2_61, mul_x2_62, mul_x2_63, mul_x2_64, mul_x2_65,
334eda14cbcSMatt Macy mul_x2_66, mul_x2_67, mul_x2_68, mul_x2_69, mul_x2_70, mul_x2_71,
335eda14cbcSMatt Macy mul_x2_72, mul_x2_73, mul_x2_74, mul_x2_75, mul_x2_76, mul_x2_77,
336eda14cbcSMatt Macy mul_x2_78, mul_x2_79, mul_x2_80, mul_x2_81, mul_x2_82, mul_x2_83,
337eda14cbcSMatt Macy mul_x2_84, mul_x2_85, mul_x2_86, mul_x2_87, mul_x2_88, mul_x2_89,
338eda14cbcSMatt Macy mul_x2_90, mul_x2_91, mul_x2_92, mul_x2_93, mul_x2_94, mul_x2_95,
339eda14cbcSMatt Macy mul_x2_96, mul_x2_97, mul_x2_98, mul_x2_99, mul_x2_100, mul_x2_101,
340eda14cbcSMatt Macy mul_x2_102, mul_x2_103, mul_x2_104, mul_x2_105, mul_x2_106, mul_x2_107,
341eda14cbcSMatt Macy mul_x2_108, mul_x2_109, mul_x2_110, mul_x2_111, mul_x2_112, mul_x2_113,
342eda14cbcSMatt Macy mul_x2_114, mul_x2_115, mul_x2_116, mul_x2_117, mul_x2_118, mul_x2_119,
343eda14cbcSMatt Macy mul_x2_120, mul_x2_121, mul_x2_122, mul_x2_123, mul_x2_124, mul_x2_125,
344eda14cbcSMatt Macy mul_x2_126, mul_x2_127, mul_x2_128, mul_x2_129, mul_x2_130, mul_x2_131,
345eda14cbcSMatt Macy mul_x2_132, mul_x2_133, mul_x2_134, mul_x2_135, mul_x2_136, mul_x2_137,
346eda14cbcSMatt Macy mul_x2_138, mul_x2_139, mul_x2_140, mul_x2_141, mul_x2_142, mul_x2_143,
347eda14cbcSMatt Macy mul_x2_144, mul_x2_145, mul_x2_146, mul_x2_147, mul_x2_148, mul_x2_149,
348eda14cbcSMatt Macy mul_x2_150, mul_x2_151, mul_x2_152, mul_x2_153, mul_x2_154, mul_x2_155,
349eda14cbcSMatt Macy mul_x2_156, mul_x2_157, mul_x2_158, mul_x2_159, mul_x2_160, mul_x2_161,
350eda14cbcSMatt Macy mul_x2_162, mul_x2_163, mul_x2_164, mul_x2_165, mul_x2_166, mul_x2_167,
351eda14cbcSMatt Macy mul_x2_168, mul_x2_169, mul_x2_170, mul_x2_171, mul_x2_172, mul_x2_173,
352eda14cbcSMatt Macy mul_x2_174, mul_x2_175, mul_x2_176, mul_x2_177, mul_x2_178, mul_x2_179,
353eda14cbcSMatt Macy mul_x2_180, mul_x2_181, mul_x2_182, mul_x2_183, mul_x2_184, mul_x2_185,
354eda14cbcSMatt Macy mul_x2_186, mul_x2_187, mul_x2_188, mul_x2_189, mul_x2_190, mul_x2_191,
355eda14cbcSMatt Macy mul_x2_192, mul_x2_193, mul_x2_194, mul_x2_195, mul_x2_196, mul_x2_197,
356eda14cbcSMatt Macy mul_x2_198, mul_x2_199, mul_x2_200, mul_x2_201, mul_x2_202, mul_x2_203,
357eda14cbcSMatt Macy mul_x2_204, mul_x2_205, mul_x2_206, mul_x2_207, mul_x2_208, mul_x2_209,
358eda14cbcSMatt Macy mul_x2_210, mul_x2_211, mul_x2_212, mul_x2_213, mul_x2_214, mul_x2_215,
359eda14cbcSMatt Macy mul_x2_216, mul_x2_217, mul_x2_218, mul_x2_219, mul_x2_220, mul_x2_221,
360eda14cbcSMatt Macy mul_x2_222, mul_x2_223, mul_x2_224, mul_x2_225, mul_x2_226, mul_x2_227,
361eda14cbcSMatt Macy mul_x2_228, mul_x2_229, mul_x2_230, mul_x2_231, mul_x2_232, mul_x2_233,
362eda14cbcSMatt Macy mul_x2_234, mul_x2_235, mul_x2_236, mul_x2_237, mul_x2_238, mul_x2_239,
363eda14cbcSMatt Macy mul_x2_240, mul_x2_241, mul_x2_242, mul_x2_243, mul_x2_244, mul_x2_245,
364eda14cbcSMatt Macy mul_x2_246, mul_x2_247, mul_x2_248, mul_x2_249, mul_x2_250, mul_x2_251,
365eda14cbcSMatt Macy mul_x2_252, mul_x2_253, mul_x2_254, mul_x2_255
366eda14cbcSMatt Macy };
367eda14cbcSMatt Macy
368eda14cbcSMatt Macy #define MUL(c, r...) \
369eda14cbcSMatt Macy { \
370eda14cbcSMatt Macy switch (REG_CNT(r)) { \
371eda14cbcSMatt Macy case 4: \
372eda14cbcSMatt Macy COPY(R_01(r), _mul_x2_in); \
373eda14cbcSMatt Macy gf_x2_mul_fns[c](); \
374eda14cbcSMatt Macy COPY(_mul_x2_acc, R_01(r)); \
375eda14cbcSMatt Macy COPY(R_23(r), _mul_x2_in); \
376eda14cbcSMatt Macy gf_x2_mul_fns[c](); \
377eda14cbcSMatt Macy COPY(_mul_x2_acc, R_23(r)); \
378eda14cbcSMatt Macy break; \
379eda14cbcSMatt Macy default: \
380eda14cbcSMatt Macy VERIFY(0); \
381eda14cbcSMatt Macy } \
382eda14cbcSMatt Macy }
383eda14cbcSMatt Macy
384eda14cbcSMatt Macy
385eda14cbcSMatt Macy #define raidz_math_begin() kfpu_begin()
386eda14cbcSMatt Macy #define raidz_math_end() kfpu_end()
387eda14cbcSMatt Macy
388eda14cbcSMatt Macy
389eda14cbcSMatt Macy #define SYN_STRIDE 4
390eda14cbcSMatt Macy
391eda14cbcSMatt Macy #define ZERO_STRIDE 4
392eda14cbcSMatt Macy #define ZERO_DEFINE() {}
393eda14cbcSMatt Macy #define ZERO_D 0, 1, 2, 3
394eda14cbcSMatt Macy
395eda14cbcSMatt Macy #define COPY_STRIDE 4
396eda14cbcSMatt Macy #define COPY_DEFINE() {}
397eda14cbcSMatt Macy #define COPY_D 0, 1, 2, 3
398eda14cbcSMatt Macy
399eda14cbcSMatt Macy #define ADD_STRIDE 4
400eda14cbcSMatt Macy #define ADD_DEFINE() {}
401eda14cbcSMatt Macy #define ADD_D 0, 1, 2, 3
402eda14cbcSMatt Macy
403eda14cbcSMatt Macy #define MUL_STRIDE 4
404eda14cbcSMatt Macy #define MUL_DEFINE() MUL2_SETUP()
405eda14cbcSMatt Macy #define MUL_D 0, 1, 2, 3
406eda14cbcSMatt Macy
407eda14cbcSMatt Macy #define GEN_P_STRIDE 4
408eda14cbcSMatt Macy #define GEN_P_DEFINE() {}
409eda14cbcSMatt Macy #define GEN_P_P 0, 1, 2, 3
410eda14cbcSMatt Macy
411eda14cbcSMatt Macy #define GEN_PQ_STRIDE 4
412eda14cbcSMatt Macy #define GEN_PQ_DEFINE() {}
413eda14cbcSMatt Macy #define GEN_PQ_D 0, 1, 2, 3
414eda14cbcSMatt Macy #define GEN_PQ_C 4, 5, 6, 7
415eda14cbcSMatt Macy
416eda14cbcSMatt Macy #define GEN_PQR_STRIDE 4
417eda14cbcSMatt Macy #define GEN_PQR_DEFINE() {}
418eda14cbcSMatt Macy #define GEN_PQR_D 0, 1, 2, 3
419eda14cbcSMatt Macy #define GEN_PQR_C 4, 5, 6, 7
420eda14cbcSMatt Macy
421eda14cbcSMatt Macy #define SYN_Q_DEFINE() {}
422eda14cbcSMatt Macy #define SYN_Q_D 0, 1, 2, 3
423eda14cbcSMatt Macy #define SYN_Q_X 4, 5, 6, 7
424eda14cbcSMatt Macy
425eda14cbcSMatt Macy #define SYN_R_DEFINE() {}
426eda14cbcSMatt Macy #define SYN_R_D 0, 1, 2, 3
427eda14cbcSMatt Macy #define SYN_R_X 4, 5, 6, 7
428eda14cbcSMatt Macy
429eda14cbcSMatt Macy #define SYN_PQ_DEFINE() {}
430eda14cbcSMatt Macy #define SYN_PQ_D 0, 1, 2, 3
431eda14cbcSMatt Macy #define SYN_PQ_X 4, 5, 6, 7
432eda14cbcSMatt Macy
433eda14cbcSMatt Macy #define REC_PQ_STRIDE 4
434eda14cbcSMatt Macy #define REC_PQ_DEFINE() MUL2_SETUP()
435eda14cbcSMatt Macy #define REC_PQ_X 0, 1, 2, 3
436eda14cbcSMatt Macy #define REC_PQ_Y 4, 5, 6, 7
437eda14cbcSMatt Macy #define REC_PQ_T 8, 9, 10, 11
438eda14cbcSMatt Macy
439eda14cbcSMatt Macy #define SYN_PR_DEFINE() {}
440eda14cbcSMatt Macy #define SYN_PR_D 0, 1, 2, 3
441eda14cbcSMatt Macy #define SYN_PR_X 4, 5, 6, 7
442eda14cbcSMatt Macy
443eda14cbcSMatt Macy #define REC_PR_STRIDE 4
444eda14cbcSMatt Macy #define REC_PR_DEFINE() MUL2_SETUP()
445eda14cbcSMatt Macy #define REC_PR_X 0, 1, 2, 3
446eda14cbcSMatt Macy #define REC_PR_Y 4, 5, 6, 7
447eda14cbcSMatt Macy #define REC_PR_T 8, 9, 10, 11
448eda14cbcSMatt Macy
449eda14cbcSMatt Macy #define SYN_QR_DEFINE() {}
450eda14cbcSMatt Macy #define SYN_QR_D 0, 1, 2, 3
451eda14cbcSMatt Macy #define SYN_QR_X 4, 5, 6, 7
452eda14cbcSMatt Macy
453eda14cbcSMatt Macy #define REC_QR_STRIDE 4
454eda14cbcSMatt Macy #define REC_QR_DEFINE() MUL2_SETUP()
455eda14cbcSMatt Macy #define REC_QR_X 0, 1, 2, 3
456eda14cbcSMatt Macy #define REC_QR_Y 4, 5, 6, 7
457eda14cbcSMatt Macy #define REC_QR_T 8, 9, 10, 11
458eda14cbcSMatt Macy
459eda14cbcSMatt Macy #define SYN_PQR_DEFINE() {}
460eda14cbcSMatt Macy #define SYN_PQR_D 0, 1, 2, 3
461eda14cbcSMatt Macy #define SYN_PQR_X 4, 5, 6, 7
462eda14cbcSMatt Macy
463eda14cbcSMatt Macy #define REC_PQR_STRIDE 4
464eda14cbcSMatt Macy #define REC_PQR_DEFINE() MUL2_SETUP()
465eda14cbcSMatt Macy #define REC_PQR_X 0, 1, 2, 3
466eda14cbcSMatt Macy #define REC_PQR_Y 4, 5, 6, 7
467eda14cbcSMatt Macy #define REC_PQR_Z 8, 9, 10, 11
468eda14cbcSMatt Macy #define REC_PQR_XS 12, 13, 14, 15
469eda14cbcSMatt Macy #define REC_PQR_YS 16, 17, 18, 19
470eda14cbcSMatt Macy
471eda14cbcSMatt Macy
472eda14cbcSMatt Macy #include <sys/vdev_raidz_impl.h>
473eda14cbcSMatt Macy #include "vdev_raidz_math_impl.h"
474eda14cbcSMatt Macy
475eda14cbcSMatt Macy DEFINE_GEN_METHODS(avx512f);
476eda14cbcSMatt Macy DEFINE_REC_METHODS(avx512f);
477eda14cbcSMatt Macy
478eda14cbcSMatt Macy static boolean_t
raidz_will_avx512f_work(void)479eda14cbcSMatt Macy raidz_will_avx512f_work(void)
480eda14cbcSMatt Macy {
481eda14cbcSMatt Macy return (kfpu_allowed() && zfs_avx_available() &&
482eda14cbcSMatt Macy zfs_avx2_available() && zfs_avx512f_available());
483eda14cbcSMatt Macy }
484eda14cbcSMatt Macy
485eda14cbcSMatt Macy const raidz_impl_ops_t vdev_raidz_avx512f_impl = {
486eda14cbcSMatt Macy .init = NULL,
487eda14cbcSMatt Macy .fini = NULL,
488eda14cbcSMatt Macy .gen = RAIDZ_GEN_METHODS(avx512f),
489eda14cbcSMatt Macy .rec = RAIDZ_REC_METHODS(avx512f),
490eda14cbcSMatt Macy .is_supported = &raidz_will_avx512f_work,
491eda14cbcSMatt Macy .name = "avx512f"
492eda14cbcSMatt Macy };
493eda14cbcSMatt Macy
494eda14cbcSMatt Macy #endif /* defined(__x86_64) && defined(HAVE_AVX512F) */
495