xref: /netbsd-src/sys/arch/vax/include/macros.h (revision 23c8222edbfb0f0932d88a8351d3a0cf817dfb9e)
1 /*	$NetBSD: macros.h,v 1.31 2004/07/01 22:58:13 kleink Exp $	*/
2 
3 /*
4  * Copyright (c) 1994, 1998, 2000 Ludd, University of Lule}, Sweden.
5  * All rights reserved.
6  *
7  * Redistribution and use in source and binary forms, with or without
8  * modification, are permitted provided that the following conditions
9  * are met:
10  * 1. Redistributions of source code must retain the above copyright
11  *    notice, this list of conditions and the following disclaimer.
12  * 2. Redistributions in binary form must reproduce the above copyright
13  *    notice, this list of conditions and the following disclaimer in the
14  *    documentation and/or other materials provided with the distribution.
15  * 3. All advertising materials mentioning features or use of this software
16  *    must display the following acknowledgement:
17  *     This product includes software developed at Ludd, University of Lule}.
18  * 4. The name of the author may not be used to endorse or promote products
19  *    derived from this software without specific prior written permission
20  *
21  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
22  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
23  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
24  * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
25  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
26  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
30  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31  */
32 
33  /* All bugs are subject to removal without further notice */
34 
35 #if !defined(_VAX_MACROS_H_) && !defined(__lint__)
36 #define _VAX_MACROS_H_
37 
38 void	__blkset(void *, int, size_t);
39 void	__blkcpy(const void *, void *, size_t);
40 
41 /* Here general macros are supposed to be stored */
42 
43 static __inline__ int __attribute__((__unused__))
44 ffs(int reg)
45 {
46 	register int val;
47 
48 	__asm__ __volatile ("ffs $0,$32,%1,%0;"
49 			    "bneq 1f;"
50 			    "mnegl $1,%0;"
51 			    "1:;"
52 			    "incl %0"
53 			: "=&r" (val)
54 			: "r" (reg) );
55 	return	val;
56 }
57 
58 static __inline__ void __attribute__((__unused__))
59 _remque(void *p)
60 {
61 	__asm__ __volatile ("remque (%0),%0;clrl 4(%0)"
62 			:
63 			: "r" (p)
64 			: "memory" );
65 }
66 
67 static __inline__ void  __attribute__((__unused__))
68 _insque(void *p, void *q)
69 {
70 	__asm__ __volatile ("insque (%0),(%1)"
71 			:
72 			: "r" (p),"r" (q)
73 			: "memory" );
74 }
75 
76 static __inline__ void * __attribute__((__unused__))
77 memcpy(void *to, const void *from, size_t len)
78 {
79 	if (len > 65535) {
80 		__blkcpy(from, to, len);
81 	} else {
82 		__asm__ __volatile ("movc3 %0,%1,%2"
83 			:
84 			: "g" (len), "m" (*(char *)from), "m" (*(char *)to)
85 			:"r0","r1","r2","r3","r4","r5","memory","cc");
86 	}
87 	return to;
88 }
89 static __inline__ void * __attribute__((__unused__))
90 memmove(void *to, const void *from, size_t len)
91 {
92 	if (len > 65535) {
93 		__blkcpy(from, to, len);
94 	} else {
95 		__asm__ __volatile ("movc3 %0,%1,%2"
96 			:
97 			: "g" (len), "m" (*(char *)from), "m" (*(char *)to)
98 			:"r0","r1","r2","r3","r4","r5","memory","cc");
99 	}
100 	return to;
101 }
102 
103 #ifdef notdef /* bcopy() is obsoleted in kernel */
104 static __inline__ void __attribute__((__unused__))
105 bcopy(const void *from, void *to, size_t len)
106 {
107 	__asm__ __volatile ("movc3 %0,%1,%2"
108 			:
109 			: "g" (len), "m" (*(char *)from), "m" (*(char *)to)
110 			:"r0","r1","r2","r3","r4","r5","memory","cc");
111 }
112 #endif
113 
114 static __inline__ void * __attribute__((__unused__))
115 memset(void *block, int c, size_t len)
116 {
117 	if (len > 65535) {
118 		__blkset(block, c, len);
119 	} else {
120 		__asm__ __volatile ("movc5 $0,(%%sp),%2,%1,%0"
121 			:
122 			: "m" (*(char *)block), "g" (len), "g" (c)
123 			:"r0","r1","r2","r3","r4","r5","memory","cc");
124 	}
125 	return block;
126 }
127 
128 #ifdef notdef /* bzero() is obsoleted in kernel */
129 static __inline__ void __attribute__((__unused__))
130 bzero(void *block, size_t len)
131 {
132 	if (len > 65535)
133 		__blkset(block, 0, len);
134 	else {
135 		__asm__ __volatile ("movc5 $0,(%%sp),$0,%1,%0"
136 			:
137 			: "m" (*(char *)block), "g" (len)
138 			:"r0","r1","r2","r3","r4","r5","memory","cc");
139 	}
140 }
141 #endif
142 
143 #ifdef notdef
144 /* XXX - the return syntax of memcmp is wrong */
145 static __inline__ int __attribute__((__unused__))
146 memcmp(const void *b1, const void *b2, size_t len)
147 {
148 	register int ret;
149 
150 	__asm__ __volatile("cmpc3 %3,(%1),(%2);"
151 			   "movl %%r0,%0"
152 			: "=r" (ret)
153 			: "r" (b1), "r" (b2), "r" (len)
154 			: "r0","r1","r2","r3" );
155 	return ret;
156 }
157 
158 static __inline__ int __attribute__((__unused__))
159 bcmp(const void *b1, const void *b2, size_t len)
160 {
161 	register int ret;
162 
163 	__asm__ __volatile("cmpc3 %3,(%1),(%2);"
164 			   "movl %%r0,%0"
165 			: "=r" (ret)
166 			: "r" (b1), "r" (b2), "r" (len)
167 			: "r0","r1","r2","r3" );
168 	return ret;
169 }
170 
171 /* Begin nya */
172 static __inline__ size_t __attribute__((__unused__))
173 strlen(const char *cp)
174 {
175         register size_t ret;
176 
177         __asm__ __volatile("locc $0,$65535,(%1);"
178 			   "subl3 %%r0,$65535,%0"
179                         : "=r" (ret)
180                         : "r" (cp)
181                         : "r0","r1","cc" );
182         return  ret;
183 }
184 
185 static __inline__ char * __attribute__((__unused__))
186 strcat(char *cp, const char *c2)
187 {
188         __asm__ __volatile("locc $0,$65535,(%1);"
189 			   "subl3 %%r0,$65535,%%r2;"
190 			   "incl %%r2;"
191                            "locc $0,$65535,(%0);"
192 			   "movc3 %%r2,(%1),(%%r1)"
193                         :
194                         : "r" (cp), "r" (c2)
195                         : "r0","r1","r2","r3","r4","r5","memory","cc");
196         return  cp;
197 }
198 
199 static __inline__ char * __attribute__((__unused__))
200 strncat(char *cp, const char *c2, size_t count)
201 {
202         __asm__ __volatile("locc $0,%2,(%1);"
203 			   "subl3 %%r0,%2,%%r2;"
204                            "locc $0,$65535,(%0);"
205 			   "movc3 %%r2,(%1),(%%r1);"
206 			   "movb $0,(%%r3)"
207                         :
208                         : "r" (cp), "r" (c2), "g"(count)
209                         : "r0","r1","r2","r3","r4","r5","memory","cc");
210         return  cp;
211 }
212 
213 static __inline__ char * __attribute__((__unused__))
214 strcpy(char *cp, const char *c2)
215 {
216         __asm__ __volatile("locc $0,$65535,(%1);"
217 			   "subl3 %%r0,$65535,%%r2;"
218                            "movc3 %%r2,(%1),(%0);"
219 			   "movb $0,(%%r3)"
220                         :
221                         : "r" (cp), "r" (c2)
222                         : "r0","r1","r2","r3","r4","r5","memory","cc");
223         return  cp;
224 }
225 
226 static __inline__ char * __attribute__((__unused__))
227 strncpy(char *cp, const char *c2, size_t len)
228 {
229         __asm__ __volatile("movl %2,%%r2;"
230 			   "locc $0,%%r2,(%1);"
231 			   "beql 1f;"
232 			   "subl3 %%r0,%2,%%r2;"
233                            "clrb (%0)[%%r2];"
234 			   "1:;"
235 			   "movc3 %%r2,(%1),(%0)"
236                         :
237                         : "r" (cp), "r" (c2), "g"(len)
238                         : "r0","r1","r2","r3","r4","r5","memory","cc");
239         return  cp;
240 }
241 
242 static __inline__ void * __attribute__((__unused__))
243 memchr(const void *cp, int c, size_t len)
244 {
245         void *ret;
246         __asm__ __volatile("locc %2,%3,(%1);"
247 			   "bneq 1f;"
248 			   "clrl %%r1;"
249 			   "1:;"
250 			   "movl %%r1,%0"
251                         : "=g"(ret)
252                         : "r" (cp), "r" (c), "g"(len)
253                         : "r0","r1","cc");
254         return  ret;
255 }
256 
257 static __inline__ int __attribute__((__unused__))
258 strcmp(const char *cp, const char *c2)
259 {
260         register int ret;
261         __asm__ __volatile("locc $0,$65535,(%1);"
262 			   "subl3 %%r0,$65535,%%r0;"
263 			   "incl %%r0;"
264                            "cmpc3 %%r0,(%1),(%2);"
265 			   "beql 1f;"
266 			   "movl $1,%%r2;"
267                            "cmpb (%%r1),(%%r3);"
268 			   "bcc 1f;"
269 			   "mnegl $1,%%r2;"
270 			   "1:;"
271 			   "movl %%r2,%0"
272                         : "=g"(ret)
273                         : "r" (cp), "r" (c2)
274                         : "r0","r1","r2","r3","cc");
275         return  ret;
276 }
277 #endif
278 
279 #if 0 /* unused, but no point in deleting it since it _is_ an instruction */
280 static __inline__ int __attribute__((__unused__))
281 locc(int mask, char *cp, size_t size){
282 	register ret;
283 
284 	__asm__ __volatile("locc %1,%2,(%3);"
285 			   "movl %%r0,%0"
286 			: "=r" (ret)
287 			: "r" (mask),"r"(size),"r"(cp)
288 			: "r0","r1" );
289 	return	ret;
290 }
291 #endif
292 
293 static __inline__ int __attribute__((__unused__))
294 scanc(u_int size, const u_char *cp, const u_char *table, int mask)
295 {
296 	register int ret;
297 
298 	__asm__ __volatile("scanc %1,(%2),(%3),%4;"
299 			   "movl %%r0,%0"
300 			: "=g"(ret)
301 			: "r"(size),"r"(cp),"r"(table),"r"(mask)
302 			: "r0","r1","r2","r3" );
303 	return ret;
304 }
305 
306 static __inline__ int __attribute__((__unused__))
307 skpc(int mask, size_t size, u_char *cp)
308 {
309 	register int ret;
310 
311 	__asm__ __volatile("skpc %1,%2,(%3);"
312 			   "movl %%r0,%0"
313 			: "=g"(ret)
314 			: "r"(mask),"r"(size),"r"(cp)
315 			: "r0","r1" );
316 	return	ret;
317 }
318 
319 /*
320  * Set/clear a bit at a memory position; interlocked.
321  * Return 0 if already set, 1 otherwise.
322  */
323 static __inline__ int __attribute__((__unused__))
324 bbssi(int bitnr, long *addr)
325 {
326 	register int ret;
327 
328 	__asm__ __volatile("clrl %%r0;"
329 			   "bbssi %1,%2,1f;"
330 			   "incl %%r0;"
331 			   "1:;"
332 			   "movl %%r0,%0"
333 		: "=&r"(ret)
334 		: "g"(bitnr),"m"(*addr)
335 		: "r0","cc","memory");
336 	return ret;
337 }
338 
339 static __inline__ int __attribute__((__unused__))
340 bbcci(int bitnr, long *addr)
341 {
342 	register int ret;
343 
344 	__asm__ __volatile("clrl %%r0;"
345 			   "bbcci %1,%2,1f;"
346 			   "incl %%r0;"
347 			   "1:;"
348 			   "movl %%r0,%0"
349 		: "=&r"(ret)
350 		: "g"(bitnr),"m"(*addr)
351 		: "r0","cc","memory");
352 	return ret;
353 }
354 
355 #define setrunqueue(p)	\
356 	__asm__ __volatile("movl %0,%%r0;jsb Setrq" :: "g"(p):"r0","r1","r2")
357 
358 #define remrunqueue(p)	\
359 	__asm__ __volatile("movl %0,%%r0;jsb Remrq" :: "g"(p):"r0","r1","r2")
360 
361 #define cpu_switch(p, newp) ({ 						\
362 	register int ret;						\
363 	__asm__ __volatile("movpsl -(%%sp);jsb Swtch; movl %%r0,%0"	\
364 	    : "=g"(ret) ::"r0","r1","r2","r3","r4","r5");		\
365 	ret; })
366 
367 #define	cpu_switchto(p, newp)						\
368 	__asm __volatile("movpsl -(%%sp); movl %0,%%r2; jsb Swtchto"	\
369 	    :: "g" (newp) : "r0", "r1", "r2", "r3", "r4", "r5")
370 
371 /*
372  * Interlock instructions. Used both in multiprocessor environments to
373  * lock between CPUs and in uniprocessor systems when locking is required
374  * between I/O devices and the master CPU.
375  */
376 /*
377  * Insqti() locks and inserts an element into the end of a queue.
378  * Returns -1 if interlock failed, 1 if inserted OK and 0 if first in queue.
379  */
380 static __inline__ int __attribute__((__unused__))
381 insqti(void *entry, void *header) {
382 	register int ret;
383 
384 	__asm__ __volatile(
385 		"	mnegl $1,%0;"
386 		"	insqti (%1),(%2);"
387 		"	bcs 1f;"		/* failed insert */
388 		"	beql 2f;"		/* jump if first entry */
389 		"	movl $1,%0;"
390 		"	brb 1f;"
391 		"2:	clrl %0;"
392 		"	1:;"
393 			: "=&g"(ret)
394 			: "r"(entry), "r"(header)
395 			: "memory");
396 
397 	return ret;
398 }
399 
400 /*
401  * Remqhi() removes an element from the head of the queue.
402  * Returns -1 if interlock failed, 0 if queue empty, address of the
403  * removed element otherwise.
404  */
405 static __inline__ void * __attribute__((__unused__))
406 remqhi(void *header) {
407 	register void *ret;
408 
409 	__asm__ __volatile(
410 		"	remqhi (%1),%0;"
411 		"	bcs 1f;"		/* failed interlock */
412 		"	bvs 2f;"		/* nothing was removed */
413 		"	brb 3f;"
414 		"1:	mnegl $1,%0;"
415 		"	brb 3f;"
416 		"2:	clrl %0;"
417 		"	3:;"
418 			: "=&g"(ret)
419 			: "r"(header)
420 			: "memory");
421 
422 	return ret;
423 }
424 #define	ILCK_FAILED	-1	/* Interlock failed */
425 #define	Q_EMPTY		0	/* Queue is/was empty */
426 #define	Q_OK		1	/* Inserted OK */
427 
428 #endif	/* !_VAX_MACROS_H_ && !__lint__ */
429