xref: /netbsd-src/sys/arch/arm/arm/cpufunc_asm_arm10.S (revision beb9bdb00e5421761976d5c277c0da84fd703f9b)
1/*	$NetBSD: cpufunc_asm_arm10.S,v 1.12 2022/10/20 06:58:38 skrll Exp $	*/
2
3/*
4 * Copyright (c) 2002 ARM Limited
5 * All rights reserved.
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
9 * are met:
10 * 1. Redistributions of source code must retain the above copyright
11 *    notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 *    notice, this list of conditions and the following disclaimer in the
14 *    documentation and/or other materials provided with the distribution.
15 * 3. The name of the company may not be used to endorse or promote
16 *    products derived from this software without specific prior written
17 *    permission.
18 *
19 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
20 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
21 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
22 * IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
23 * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
24 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
25 * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
26 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
27 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
28 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
29 * SUCH DAMAGE.
30 *
31 * ARM10 assembly functions for CPU / MMU / TLB specific operations
32 */
33
34#include <machine/asm.h>
35#include <arm/locore.h>
36#include "assym.h"
37
38/*
39 * TLB functions
40 */
41ENTRY(arm10_tlb_flushID_SE)
42	mcr	p15, 0, r0, c8, c6, 1	/* flush D tlb single entry */
43	mcr	p15, 0, r0, c8, c5, 1	/* flush I tlb single entry */
44#if PAGE_SIZE == 2 * L2_S_SIZE
45	add	r0, r0, #L2_S_SIZE
46	mcr	p15, 0, r0, c8, c6, 1	/* flush D tlb single entry */
47	mcr	p15, 0, r0, c8, c5, 1	/* flush I tlb single entry */
48#endif
49	RET
50END(arm10_tlb_flushID_SE)
51
52ENTRY(arm10_tlb_flushI_SE)
53	mcr	p15, 0, r0, c8, c5, 1	/* flush I tlb single entry */
54#if PAGE_SIZE == 2 * L2_S_SIZE
55	add	r0, r0, #L2_S_SIZE
56	mcr	p15, 0, r0, c8, c5, 1	/* flush I tlb single entry */
57#endif
58	RET
59END(arm10_tlb_flushI_SE)
60
61
62/*
63 * Context switch.
64 *
65 * These are the CPU-specific parts of the context switcher cpu_switch()
66 * These functions actually perform the TTB reload.
67 */
68ENTRY(arm10_context_switch)
69	/*
70	 * We can assume that the caches will only contain kernel addresses
71	 * at this point.  So no need to flush them again.
72	 */
73	mcr	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
74	mcr	p15, 0, r0, c2, c0, 0	/* set the new TTB */
75	mcr	p15, 0, r0, c8, c7, 0	/* and flush the I+D tlbs */
76
77	/* Paranoia -- make sure the pipeline is empty. */
78	nop
79	nop
80	nop
81	RET
82END(arm10_context_switch)
83