xref: /netbsd-src/sys/arch/vax/include/macros.h (revision 0dd5877adce57db949b16ae963e5a6831cccdfb6)
1 /*	$NetBSD: macros.h,v 1.24 2002/02/10 22:06:12 thorpej Exp $	*/
2 
3 /*
4  * Copyright (c) 1994, 1998, 2000 Ludd, University of Lule}, Sweden.
5  * All rights reserved.
6  *
7  * Redistribution and use in source and binary forms, with or without
8  * modification, are permitted provided that the following conditions
9  * are met:
10  * 1. Redistributions of source code must retain the above copyright
11  *    notice, this list of conditions and the following disclaimer.
12  * 2. Redistributions in binary form must reproduce the above copyright
13  *    notice, this list of conditions and the following disclaimer in the
14  *    documentation and/or other materials provided with the distribution.
15  * 3. All advertising materials mentioning features or use of this software
16  *    must display the following acknowledgement:
17  *     This product includes software developed at Ludd, University of Lule}.
18  * 4. The name of the author may not be used to endorse or promote products
19  *    derived from this software without specific prior written permission
20  *
21  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
22  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
23  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
24  * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
25  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
26  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
30  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31  */
32 
33  /* All bugs are subject to removal without further notice */
34 
35 #if !defined(_VAX_MACROS_H_) && !defined(lint)
36 #define _VAX_MACROS_H_
37 
38 /* Here general macros are supposed to be stored */
39 
40 static __inline__ int __attribute__((__unused__))
41 ffs(int reg)
42 {
43 	register int val;
44 
45 	__asm__ __volatile ("ffs $0,$32,%1,%0
46 			bneq 1f
47 			mnegl $1,%0
48 		1:	incl %0"
49 			: "=&r" (val)
50 			: "r" (reg) );
51 	return	val;
52 }
53 
54 static __inline__ void __attribute__((__unused__))
55 _remque(void *p)
56 {
57 	__asm__ __volatile ("remque (%0),%0;clrl 4(%0)"
58 			:
59 			: "r" (p)
60 			: "memory" );
61 }
62 
63 static __inline__ void  __attribute__((__unused__))
64 _insque(void *p, void *q)
65 {
66 	__asm__ __volatile ("insque (%0),(%1)"
67 			:
68 			: "r" (p),"r" (q)
69 			: "memory" );
70 }
71 
72 static __inline__ void * __attribute__((__unused__))
73 memcpy(void *to, const void *from, size_t len)
74 {
75 	__asm__ __volatile ("movc3 %0,%1,%2"
76 			:
77 			: "g" (len), "m" (*(char *)from), "m" (*(char *)to)
78 			:"r0","r1","r2","r3","r4","r5","memory","cc");
79 	return to;
80 }
81 static __inline__ void * __attribute__((__unused__))
82 memmove(void *to, const void *from, size_t len)
83 {
84 	__asm__ __volatile ("movc3 %0,%1,%2"
85 			:
86 			: "g" (len), "m" (*(char *)from), "m" (*(char *)to)
87 			:"r0","r1","r2","r3","r4","r5","memory","cc");
88 	return to;
89 }
90 
91 static __inline__ void __attribute__((__unused__))
92 bcopy(const void *from, void *to, size_t len)
93 {
94 	__asm__ __volatile ("movc3 %0,%1,%2"
95 			:
96 			: "g" (len), "m" (*(char *)from), "m" (*(char *)to)
97 			:"r0","r1","r2","r3","r4","r5","memory","cc");
98 }
99 
100 void	__blkset(void *, int, size_t);
101 
102 static __inline__ void * __attribute__((__unused__))
103 memset(void *block, int c, size_t len)
104 {
105 	if (len > 65535)
106 		__blkset(block, c, len);
107 	else {
108 		__asm__ __volatile ("movc5 $0,(sp),%2,%1,%0"
109 			:
110 			: "m" (*(char *)block), "g" (len), "g" (c)
111 			:"r0","r1","r2","r3","r4","r5","memory","cc");
112 	}
113 	return block;
114 }
115 
116 static __inline__ void __attribute__((__unused__))
117 bzero(void *block, size_t len)
118 {
119 	if (len > 65535)
120 		__blkset(block, 0, len);
121 	else {
122 		__asm__ __volatile ("movc5 $0,(sp),$0,%1,%0"
123 			:
124 			: "m" (*(char *)block), "g" (len)
125 			:"r0","r1","r2","r3","r4","r5","memory","cc");
126 	}
127 }
128 
129 /* XXX - the return syntax of memcmp is wrong */
130 static __inline__ int __attribute__((__unused__))
131 memcmp(const void *b1, const void *b2, size_t len)
132 {
133 	register int ret;
134 
135 	__asm__ __volatile("cmpc3 %3,(%1),(%2);movl r0,%0"
136 			: "=r" (ret)
137 			: "r" (b1), "r" (b2), "r" (len)
138 			: "r0","r1","r2","r3" );
139 	return ret;
140 }
141 
142 static __inline__ int __attribute__((__unused__))
143 bcmp(const void *b1, const void *b2, size_t len)
144 {
145 	register int ret;
146 
147 	__asm__ __volatile("cmpc3 %3,(%1),(%2);movl r0,%0"
148 			: "=r" (ret)
149 			: "r" (b1), "r" (b2), "r" (len)
150 			: "r0","r1","r2","r3" );
151 	return ret;
152 }
153 
154 /* Begin nya */
155 static __inline__ size_t __attribute__((__unused__))
156 strlen(const char *cp)
157 {
158         register size_t ret;
159 
160         __asm__ __volatile("locc $0,$65535,(%1);subl3 r0,$65535,%0"
161                         : "=r" (ret)
162                         : "r" (cp)
163                         : "r0","r1","cc" );
164         return  ret;
165 }
166 
167 static __inline__ char * __attribute__((__unused__))
168 strcat(char *cp, const char *c2)
169 {
170         __asm__ __volatile("locc $0,$65535,(%1);subl3 r0,$65535,r2;incl r2;
171                             locc $0,$65535,(%0);movc3 r2,(%1),(r1)"
172                         :
173                         : "r" (cp), "r" (c2)
174                         : "r0","r1","r2","r3","r4","r5","memory","cc");
175         return  cp;
176 }
177 
178 static __inline__ char * __attribute__((__unused__))
179 strncat(char *cp, const char *c2, size_t count)
180 {
181         __asm__ __volatile("locc $0,%2,(%1);subl3 r0,%2,r2;
182                             locc $0,$65535,(%0);movc3 r2,(%1),(r1);movb $0,(r3)"
183                         :
184                         : "r" (cp), "r" (c2), "g"(count)
185                         : "r0","r1","r2","r3","r4","r5","memory","cc");
186         return  cp;
187 }
188 
189 static __inline__ char * __attribute__((__unused__))
190 strcpy(char *cp, const char *c2)
191 {
192         __asm__ __volatile("locc $0,$65535,(%1);subl3 r0,$65535,r2;
193                             movc3 r2,(%1),(%0);movb $0,(r3)"
194                         :
195                         : "r" (cp), "r" (c2)
196                         : "r0","r1","r2","r3","r4","r5","memory","cc");
197         return  cp;
198 }
199 
200 static __inline__ char * __attribute__((__unused__))
201 strncpy(char *cp, const char *c2, size_t len)
202 {
203         __asm__ __volatile("movl %2,r2;locc $0,r2,(%1);beql 1f;subl3 r0,%2,r2;
204                             clrb (%0)[r2];1:;movc3 r2,(%1),(%0)"
205                         :
206                         : "r" (cp), "r" (c2), "g"(len)
207                         : "r0","r1","r2","r3","r4","r5","memory","cc");
208         return  cp;
209 }
210 
211 static __inline__ void * __attribute__((__unused__))
212 memchr(const void *cp, int c, size_t len)
213 {
214         void *ret;
215         __asm__ __volatile("locc %2,%3,(%1);bneq 1f;clrl r1;1:movl r1,%0"
216                         : "=g"(ret)
217                         : "r" (cp), "r" (c), "g"(len)
218                         : "r0","r1","cc");
219         return  ret;
220 }
221 
222 static __inline__ int __attribute__((__unused__))
223 strcmp(const char *cp, const char *c2)
224 {
225         register int ret;
226         __asm__ __volatile("locc $0,$65535,(%1);subl3 r0,$65535,r0;incl r0;
227                             cmpc3 r0,(%1),(%2);beql 1f;movl $1,r2;
228                             cmpb (r1),(r3);bcc 1f;movl $-1,r2;1:movl r2,%0"
229                         : "=g"(ret)
230                         : "r" (cp), "r" (c2)
231                         : "r0","r1","r2","r3","cc");
232         return  ret;
233 }
234 /* End nya */
235 
236 #if 0 /* unused, but no point in deleting it since it _is_ an instruction */
237 static __inline__ int __attribute__((__unused__))
238 locc(int mask, char *cp, size_t size){
239 	register ret;
240 
241 	__asm__ __volatile("locc %1,%2,(%3);movl r0,%0"
242 			: "=r" (ret)
243 			: "r" (mask),"r"(size),"r"(cp)
244 			: "r0","r1" );
245 	return	ret;
246 }
247 #endif
248 
249 static __inline__ int __attribute__((__unused__))
250 scanc(u_int size, const u_char *cp, const u_char *table, int mask)
251 {
252 	register int ret;
253 
254 	__asm__ __volatile("scanc	%1,(%2),(%3),%4;movl r0,%0"
255 			: "=g"(ret)
256 			: "r"(size),"r"(cp),"r"(table),"r"(mask)
257 			: "r0","r1","r2","r3" );
258 	return ret;
259 }
260 
261 static __inline__ int __attribute__((__unused__))
262 skpc(int mask, size_t size, u_char *cp)
263 {
264 	register int ret;
265 
266 	__asm__ __volatile("skpc %1,%2,(%3);movl r0,%0"
267 			: "=g"(ret)
268 			: "r"(mask),"r"(size),"r"(cp)
269 			: "r0","r1" );
270 	return	ret;
271 }
272 
273 /*
274  * Set/clear a bit at a memory position; interlocked.
275  * Return 0 if already set, 1 otherwise.
276  */
277 static __inline__ int __attribute__((__unused__))
278 bbssi(int bitnr, long *addr)
279 {
280 	register int ret;
281 
282 	__asm__ __volatile("clrl r0;bbssi %1,%2,1f;incl r0;1:movl r0,%0"
283 		: "=&r"(ret)
284 		: "g"(bitnr),"m"(*addr)
285 		: "r0","cc","memory");
286 	return ret;
287 }
288 
289 static __inline__ int __attribute__((__unused__))
290 bbcci(int bitnr, long *addr)
291 {
292 	register int ret;
293 
294 	__asm__ __volatile("clrl r0;bbcci %1,%2,1f;incl r0;1:movl r0,%0"
295 		: "=&r"(ret)
296 		: "g"(bitnr),"m"(*addr)
297 		: "r0","cc","memory");
298 	return ret;
299 }
300 
301 #define setrunqueue(p)	\
302 	__asm__ __volatile("movl %0,r0;jsb Setrq":: "g"(p):"r0","r1","r2");
303 
304 #define remrunqueue(p)	\
305 	__asm__ __volatile("movl %0,r0;jsb Remrq":: "g"(p):"r0","r1","r2");
306 
307 #define cpu_switch(p) \
308 	__asm__ __volatile("movl %0,r6;movpsl -(sp);jsb Swtch" \
309 	    ::"g"(p):"r0","r1","r2","r3","r4","r5","r6");
310 
311 /*
312  * Interlock instructions. Used both in multiprocessor environments to
313  * lock between CPUs and in uniprocessor systems when locking is required
314  * between I/O devices and the master CPU.
315  */
316 /*
317  * Insqti() locks and inserts an element into the end of a queue.
318  * Returns -1 if interlock failed, 1 if inserted OK and 0 if first in queue.
319  */
320 static __inline__ int __attribute__((__unused__))
321 insqti(void *entry, void *header) {
322 	register int ret;
323 
324 	__asm__ __volatile("
325 			mnegl $1,%0;
326 			insqti (%1),(%2);
327 			bcs 1f;			# failed insert
328 			beql 2f;		# jump if first entry
329 			movl $1,%0;
330 			brb 1f;
331 		2:	clrl %0;
332 			1:;"
333 			: "=&g"(ret)
334 			: "r"(entry), "r"(header)
335 			: "memory");
336 
337 	return ret;
338 }
339 
340 /*
341  * Remqhi() removes an element from the head of the queue.
342  * Returns -1 if interlock failed, 0 if queue empty, address of the
343  * removed element otherwise.
344  */
345 static __inline__ void * __attribute__((__unused__))
346 remqhi(void *header) {
347 	register void *ret;
348 
349 	__asm__ __volatile("
350 			remqhi (%1),%0;
351 			bcs 1f;			# failed interlock
352 			bvs 2f;			# nothing was removed
353 			brb 3f;
354 		1:	mnegl $1,%0;
355 			brb 3f;
356 		2:	clrl %0;
357 			3:;"
358 			: "=&g"(ret)
359 			: "r"(header)
360 			: "memory");
361 
362 	return ret;
363 }
364 #define	ILCK_FAILED	-1	/* Interlock failed */
365 #define	Q_EMPTY		0	/* Queue is/was empty */
366 #define	Q_OK		1	/* Inserted OK */
367 
368 #endif	/* _VAX_MACROS_H_ */
369