xref: /dpdk/lib/eal/arm/include/rte_pause_64.h (revision 2f1a90f0455b4920df3a767ab5d9be37dcbf0d12)
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2017 Cavium, Inc
3  * Copyright(c) 2019 Arm Limited
4  */
5 
6 #ifndef _RTE_PAUSE_ARM64_H_
7 #define _RTE_PAUSE_ARM64_H_
8 
9 #include <rte_common.h>
10 #include <rte_stdatomic.h>
11 
12 #ifdef RTE_ARM_USE_WFE
13 #define RTE_WAIT_UNTIL_EQUAL_ARCH_DEFINED
14 #endif
15 
16 #include "generic/rte_pause.h"
17 
18 #ifdef __cplusplus
19 extern "C" {
20 #endif
21 
22 static inline void rte_pause(void)
23 {
24 	asm volatile("yield" ::: "memory");
25 }
26 
27 /* Send a local event to quit WFE/WFxT. */
28 #define __RTE_ARM_SEVL() { asm volatile("sevl" : : : "memory"); }
29 
30 /* Send a global event to quit WFE/WFxT for all cores. */
31 #define __RTE_ARM_SEV() { asm volatile("sev" : : : "memory"); }
32 
33 /* Put processor into low power WFE(Wait For Event) state. */
34 #define __RTE_ARM_WFE() { asm volatile("wfe" : : : "memory"); }
35 
36 /* Put processor into low power WFET (WFE with Timeout) state. */
37 #ifdef RTE_ARM_FEATURE_WFXT
38 #define __RTE_ARM_WFET(t) {                               \
39 	asm volatile("wfet %x[to]"                        \
40 			:                                 \
41 			: [to] "r" (t)                    \
42 			: "memory");                      \
43 	}
44 #else
45 #define __RTE_ARM_WFET(t) { RTE_SET_USED(t); }
46 #endif
47 
48 /*
49  * Atomic exclusive load from addr, it returns the 8-bit content of
50  * *addr while making it 'monitored', when it is written by someone
51  * else, the 'monitored' state is cleared and an event is generated
52  * implicitly to exit WFE.
53  */
54 #define __RTE_ARM_LOAD_EXC_8(src, dst, memorder) {       \
55 	if (memorder == rte_memory_order_relaxed) {       \
56 		asm volatile("ldxrb %w[tmp], [%x[addr]]"  \
57 			: [tmp] "=&r" (dst)               \
58 			: [addr] "r" (src)                \
59 			: "memory");                      \
60 	} else {                                          \
61 		asm volatile("ldaxrb %w[tmp], [%x[addr]]" \
62 			: [tmp] "=&r" (dst)               \
63 			: [addr] "r" (src)                \
64 			: "memory");                      \
65 	} }
66 
67 /*
68  * Atomic exclusive load from addr, it returns the 16-bit content of
69  * *addr while making it 'monitored', when it is written by someone
70  * else, the 'monitored' state is cleared and an event is generated
71  * implicitly to exit WFE.
72  */
73 #define __RTE_ARM_LOAD_EXC_16(src, dst, memorder) {       \
74 	if (memorder == rte_memory_order_relaxed) {       \
75 		asm volatile("ldxrh %w[tmp], [%x[addr]]"  \
76 			: [tmp] "=&r" (dst)               \
77 			: [addr] "r" (src)                \
78 			: "memory");                      \
79 	} else {                                          \
80 		asm volatile("ldaxrh %w[tmp], [%x[addr]]" \
81 			: [tmp] "=&r" (dst)               \
82 			: [addr] "r" (src)                \
83 			: "memory");                      \
84 	} }
85 
86 /*
87  * Atomic exclusive load from addr, it returns the 32-bit content of
88  * *addr while making it 'monitored', when it is written by someone
89  * else, the 'monitored' state is cleared and an event is generated
90  * implicitly to exit WFE.
91  */
92 #define __RTE_ARM_LOAD_EXC_32(src, dst, memorder) {      \
93 	if (memorder == rte_memory_order_relaxed) {      \
94 		asm volatile("ldxr %w[tmp], [%x[addr]]"  \
95 			: [tmp] "=&r" (dst)              \
96 			: [addr] "r" (src)               \
97 			: "memory");                     \
98 	} else {                                         \
99 		asm volatile("ldaxr %w[tmp], [%x[addr]]" \
100 			: [tmp] "=&r" (dst)              \
101 			: [addr] "r" (src)               \
102 			: "memory");                     \
103 	} }
104 
105 /*
106  * Atomic exclusive load from addr, it returns the 64-bit content of
107  * *addr while making it 'monitored', when it is written by someone
108  * else, the 'monitored' state is cleared and an event is generated
109  * implicitly to exit WFE.
110  */
111 #define __RTE_ARM_LOAD_EXC_64(src, dst, memorder) {      \
112 	if (memorder == rte_memory_order_relaxed) {      \
113 		asm volatile("ldxr %x[tmp], [%x[addr]]"  \
114 			: [tmp] "=&r" (dst)              \
115 			: [addr] "r" (src)               \
116 			: "memory");                     \
117 	} else {                                         \
118 		asm volatile("ldaxr %x[tmp], [%x[addr]]" \
119 			: [tmp] "=&r" (dst)              \
120 			: [addr] "r" (src)               \
121 			: "memory");                     \
122 	} }
123 
124 /*
125  * Atomic exclusive load from addr, it returns the 128-bit content of
126  * *addr while making it 'monitored', when it is written by someone
127  * else, the 'monitored' state is cleared and an event is generated
128  * implicitly to exit WFE.
129  */
130 #define __RTE_ARM_LOAD_EXC_128(src, dst, memorder) {                    \
131 	volatile rte_int128_t *dst_128 = (volatile rte_int128_t *)&dst; \
132 	if (memorder == rte_memory_order_relaxed) {                     \
133 		asm volatile("ldxp %x[tmp0], %x[tmp1], [%x[addr]]"      \
134 			: [tmp0] "=&r" (dst_128->val[0]),               \
135 			  [tmp1] "=&r" (dst_128->val[1])                \
136 			: [addr] "r" (src)                              \
137 			: "memory");                                    \
138 	} else {                                                        \
139 		asm volatile("ldaxp %x[tmp0], %x[tmp1], [%x[addr]]"     \
140 			: [tmp0] "=&r" (dst_128->val[0]),               \
141 			  [tmp1] "=&r" (dst_128->val[1])                \
142 			: [addr] "r" (src)                              \
143 			: "memory");                                    \
144 	} }                                                             \
145 
146 #define __RTE_ARM_LOAD_EXC(src, dst, memorder, size) {     \
147 	RTE_BUILD_BUG_ON(size != 8 && size != 16 &&        \
148 		size != 32 && size != 64 && size != 128);  \
149 	if (size == 8)                                    \
150 		__RTE_ARM_LOAD_EXC_8(src, dst, memorder)   \
151 	else if (size == 16)                               \
152 		__RTE_ARM_LOAD_EXC_16(src, dst, memorder)  \
153 	else if (size == 32)                               \
154 		__RTE_ARM_LOAD_EXC_32(src, dst, memorder)  \
155 	else if (size == 64)                               \
156 		__RTE_ARM_LOAD_EXC_64(src, dst, memorder)  \
157 	else if (size == 128)                              \
158 		__RTE_ARM_LOAD_EXC_128(src, dst, memorder) \
159 }
160 
161 #ifdef RTE_WAIT_UNTIL_EQUAL_ARCH_DEFINED
162 
163 static __rte_always_inline void
164 rte_wait_until_equal_16(volatile uint16_t *addr, uint16_t expected,
165 		rte_memory_order memorder)
166 {
167 	uint16_t value;
168 
169 	RTE_BUILD_BUG_ON(memorder != rte_memory_order_acquire &&
170 		memorder != rte_memory_order_relaxed);
171 
172 	__RTE_ARM_LOAD_EXC_16(addr, value, memorder)
173 	if (value != expected) {
174 		__RTE_ARM_SEVL()
175 		do {
176 			__RTE_ARM_WFE()
177 			__RTE_ARM_LOAD_EXC_16(addr, value, memorder)
178 		} while (value != expected);
179 	}
180 }
181 
182 static __rte_always_inline void
183 rte_wait_until_equal_32(volatile uint32_t *addr, uint32_t expected,
184 		rte_memory_order memorder)
185 {
186 	uint32_t value;
187 
188 	RTE_BUILD_BUG_ON(memorder != rte_memory_order_acquire &&
189 		memorder != rte_memory_order_relaxed);
190 
191 	__RTE_ARM_LOAD_EXC_32(addr, value, memorder)
192 	if (value != expected) {
193 		__RTE_ARM_SEVL()
194 		do {
195 			__RTE_ARM_WFE()
196 			__RTE_ARM_LOAD_EXC_32(addr, value, memorder)
197 		} while (value != expected);
198 	}
199 }
200 
201 static __rte_always_inline void
202 rte_wait_until_equal_64(volatile uint64_t *addr, uint64_t expected,
203 		rte_memory_order memorder)
204 {
205 	uint64_t value;
206 
207 	RTE_BUILD_BUG_ON(memorder != rte_memory_order_acquire &&
208 		memorder != rte_memory_order_relaxed);
209 
210 	__RTE_ARM_LOAD_EXC_64(addr, value, memorder)
211 	if (value != expected) {
212 		__RTE_ARM_SEVL()
213 		do {
214 			__RTE_ARM_WFE()
215 			__RTE_ARM_LOAD_EXC_64(addr, value, memorder)
216 		} while (value != expected);
217 	}
218 }
219 
220 #define RTE_WAIT_UNTIL_MASKED(addr, mask, cond, expected, memorder) do {  \
221 	RTE_BUILD_BUG_ON(!__builtin_constant_p(memorder));                \
222 	RTE_BUILD_BUG_ON(memorder != rte_memory_order_acquire &&          \
223 		memorder != rte_memory_order_relaxed);                    \
224 	const uint32_t size = sizeof(*(addr)) << 3;                       \
225 	typeof(*(addr)) expected_value = (expected);                      \
226 	typeof(*(addr)) value;                                            \
227 	__RTE_ARM_LOAD_EXC((addr), value, memorder, size)                 \
228 	if (!((value & (mask)) cond expected_value)) {                    \
229 		__RTE_ARM_SEVL()                                          \
230 		do {                                                      \
231 			__RTE_ARM_WFE()                                   \
232 			__RTE_ARM_LOAD_EXC((addr), value, memorder, size) \
233 		} while (!((value & (mask)) cond expected_value));        \
234 	}                                                                 \
235 } while (0)
236 
237 #endif /* RTE_WAIT_UNTIL_EQUAL_ARCH_DEFINED */
238 
239 #ifdef __cplusplus
240 }
241 #endif
242 
243 #endif /* _RTE_PAUSE_ARM64_H_ */
244