xref: /netbsd-src/sys/arch/mips/include/cache_ls2.h (revision 1b968d3ccff46aa0efca0e693c6b75e27af37339)
1*1b968d3cSsimonb /*	$NetBSD: cache_ls2.h,v 1.4 2020/07/26 08:08:41 simonb Exp $	*/
2fab892c9Smatt 
3fab892c9Smatt /*-
4fab892c9Smatt  * Copyright (c) 2009 The NetBSD Foundation, Inc.
5fab892c9Smatt  * All rights reserved.
6fab892c9Smatt  *
7fab892c9Smatt  * This code is derived from software contributed to The NetBSD Foundation
8fab892c9Smatt  * by Matt Thomas <matt@3am-software.com>.
9fab892c9Smatt  *
10fab892c9Smatt  * Redistribution and use in source and binary forms, with or without
11fab892c9Smatt  * modification, are permitted provided that the following conditions
12fab892c9Smatt  * are met:
13fab892c9Smatt  * 1. Redistributions of source code must retain the above copyright
14fab892c9Smatt  *    notice, this list of conditions and the following disclaimer.
15fab892c9Smatt  * 2. Redistributions in binary form must reproduce the above copyright
16fab892c9Smatt  *    notice, this list of conditions and the following disclaimer in the
17fab892c9Smatt  *    documentation and/or other materials provided with the distribution.
18fab892c9Smatt  *
19fab892c9Smatt  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
20fab892c9Smatt  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
21fab892c9Smatt  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22fab892c9Smatt  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
23fab892c9Smatt  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
24fab892c9Smatt  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
25fab892c9Smatt  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
26fab892c9Smatt  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
27fab892c9Smatt  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
28fab892c9Smatt  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
29fab892c9Smatt  * POSSIBILITY OF SUCH DAMAGE.
30fab892c9Smatt  */
31fab892c9Smatt 
32fab892c9Smatt #ifndef _MIPS_CACHE_LS2_H_
33fab892c9Smatt #define	_MIPS_CACHE_LS2_H_
34fab892c9Smatt 
35fab892c9Smatt /*
36fab892c9Smatt  * Cache definitions/operations for Loongson-style caches.
37fab892c9Smatt  */
38fab892c9Smatt #define	CACHEOP_LS2_I_INDEX_INV		0
39fab892c9Smatt #define	CACHEOP_LS2_D_INDEX_WB_INV	1
40fab892c9Smatt #define	CACHEOP_LS2_S_INDEX_WB_INV	3
41fab892c9Smatt #define	CACHEOP_LS2_D_HIT_INV		17
42fab892c9Smatt #define	CACHEOP_LS2_S_HIT_INV		19
43fab892c9Smatt #define	CACHEOP_LS2_D_HIT_WB_INV	21
44fab892c9Smatt #define	CACHEOP_LS2_S_HIT_WB_INV	23
45fab892c9Smatt 
46fab892c9Smatt #if !defined(_LOCORE)
47fab892c9Smatt /*
48fab892c9Smatt  * The way is encoded in the bottom 2 bits of VA.
49fab892c9Smatt  */
50fab892c9Smatt 
51fab892c9Smatt #define	cache_op_ls2_8line_4way(va, op)					\
52fab892c9Smatt 	__asm volatile(							\
53fab892c9Smatt                 ".set noreorder					\n\t"	\
5454096a5eSmatt                 "cache %1, 0x00(%0); cache %1, 0x20(%0)		\n\t"	\
5554096a5eSmatt                 "cache %1, 0x40(%0); cache %1, 0x60(%0)		\n\t"	\
5654096a5eSmatt                 "cache %1, 0x80(%0); cache %1, 0xa0(%0)		\n\t"	\
5754096a5eSmatt                 "cache %1, 0xc0(%0); cache %1, 0xe0(%0)		\n\t"	\
5854096a5eSmatt                 "cache %1, 0x01(%0); cache %1, 0x21(%0)		\n\t"	\
5954096a5eSmatt                 "cache %1, 0x41(%0); cache %1, 0x61(%0)		\n\t"	\
6054096a5eSmatt                 "cache %1, 0x81(%0); cache %1, 0xa1(%0)		\n\t"	\
6154096a5eSmatt                 "cache %1, 0xc1(%0); cache %1, 0xe1(%0)		\n\t"	\
6254096a5eSmatt                 "cache %1, 0x02(%0); cache %1, 0x22(%0)		\n\t"	\
6354096a5eSmatt                 "cache %1, 0x42(%0); cache %1, 0x62(%0)		\n\t"	\
6454096a5eSmatt                 "cache %1, 0x82(%0); cache %1, 0xa2(%0)		\n\t"	\
6554096a5eSmatt                 "cache %1, 0xc2(%0); cache %1, 0xe2(%0)		\n\t"	\
6654096a5eSmatt                 "cache %1, 0x03(%0); cache %1, 0x23(%0)		\n\t"	\
6754096a5eSmatt                 "cache %1, 0x43(%0); cache %1, 0x63(%0)		\n\t"	\
6854096a5eSmatt                 "cache %1, 0x83(%0); cache %1, 0xa3(%0)		\n\t"	\
6954096a5eSmatt                 "cache %1, 0xc3(%0); cache %1, 0xe3(%0)		\n\t"	\
70fab892c9Smatt                 ".set reorder"						\
71fab892c9Smatt             :								\
72fab892c9Smatt             : "r" (va), "i" (op)					\
73fab892c9Smatt             : "memory");
74fab892c9Smatt 
75fab892c9Smatt #define	cache_op_ls2_line_4way(va, op)					\
76fab892c9Smatt 	__asm volatile(							\
77fab892c9Smatt                 ".set noreorder					\n\t"	\
78fab892c9Smatt                 "cache %1, 0(%0); cache %1, 1(%0)		\n\t"	\
79fab892c9Smatt                 "cache %1, 2(%0); cache %1, 3(%0)		\n\t"	\
80fab892c9Smatt                 ".set reorder"						\
81fab892c9Smatt             :								\
82fab892c9Smatt             : "r" (va), "i" (op)					\
83fab892c9Smatt             : "memory");
84fab892c9Smatt 
85fab892c9Smatt #define	cache_op_ls2_8line(va, op)					\
86fab892c9Smatt 	__asm volatile(							\
87fab892c9Smatt                 ".set noreorder					\n\t"	\
88fab892c9Smatt                 "cache %1, 0x00(%0); cache %1, 0x20(%0)		\n\t"	\
89fab892c9Smatt                 "cache %1, 0x40(%0); cache %1, 0x60(%0)		\n\t"	\
90fab892c9Smatt                 "cache %1, 0x80(%0); cache %1, 0xa0(%0)		\n\t"	\
91fab892c9Smatt                 "cache %1, 0xc0(%0); cache %1, 0xe0(%0)		\n\t"	\
92fab892c9Smatt                 ".set reorder"						\
93fab892c9Smatt             :								\
94fab892c9Smatt             : "r" (va), "i" (op)					\
95fab892c9Smatt             : "memory");
96fab892c9Smatt 
97fab892c9Smatt #define	cache_op_ls2_line(va, op)					\
98fab892c9Smatt 	__asm volatile(							\
99fab892c9Smatt                 ".set noreorder					\n\t"	\
100fab892c9Smatt                 "cache %1, 0(%0)				\n\t"	\
101fab892c9Smatt                 ".set reorder"						\
102fab892c9Smatt             :								\
103fab892c9Smatt             : "r" (va), "i" (op)					\
104fab892c9Smatt             : "memory");
105fab892c9Smatt 
106fab892c9Smatt void	ls2_icache_sync_all(void);
107d7e78fcfSmatt void	ls2_icache_sync_range(register_t, vsize_t);
108fab892c9Smatt void	ls2_icache_sync_range_index(vaddr_t, vsize_t);
109fab892c9Smatt 
110fab892c9Smatt void	ls2_pdcache_wbinv_all(void);
111d7e78fcfSmatt void	ls2_pdcache_wbinv_range(register_t, vsize_t);
112fab892c9Smatt void	ls2_pdcache_wbinv_range_index(vaddr_t, vsize_t);
113fab892c9Smatt 
114d7e78fcfSmatt void	ls2_pdcache_inv_range(register_t, vsize_t);
115d7e78fcfSmatt void	ls2_pdcache_wb_range(register_t, vsize_t);
116fab892c9Smatt 
117fab892c9Smatt void	ls2_sdcache_wbinv_all(void);
118d7e78fcfSmatt void	ls2_sdcache_wbinv_range(register_t, vsize_t);
119fab892c9Smatt void	ls2_sdcache_wbinv_range_index(vaddr_t, vsize_t);
120fab892c9Smatt 
121d7e78fcfSmatt void	ls2_sdcache_inv_range(register_t, vsize_t);
122d7e78fcfSmatt void	ls2_sdcache_wb_range(register_t, vsize_t);
123fab892c9Smatt 
124fab892c9Smatt #endif /* !_LOCORE */
125fab892c9Smatt #endif /* !_MIPS_CACHE_LS2_H_ */
126