xref: /netbsd-src/sys/arch/aarch64/aarch64/start.S (revision 90313c06e62e910bf0d1bb24faa9d17dcefd0ab6)
1/*	$NetBSD: start.S,v 1.14 2024/02/07 04:20:26 msaitoh Exp $	*/
2
3/*
4 * Copyright (c) 2017 Ryo Shimizu
5 * All rights reserved.
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
9 * are met:
10 * 1. Redistributions of source code must retain the above copyright
11 *    notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 *    notice, this list of conditions and the following disclaimer in the
14 *    documentation and/or other materials provided with the distribution.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 * DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
20 * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
21 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22 * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
23 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
24 * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
25 * IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
26 * POSSIBILITY OF SUCH DAMAGE.
27 */
28
29#include "opt_arm_debug.h"	/* VERBOSE_INIT_ARM and EARLYCONS */
30
31#include <sys/cdefs.h>
32
33#include <aarch64/asm.h>
34#include "assym.h"
35
36RCSID("$NetBSD: start.S,v 1.14 2024/02/07 04:20:26 msaitoh Exp $")
37
38/*
39 * Padding at start of kernel image to make room for 64-byte header
40 * (non-ELF booting)
41 */
42.header:
43	.space	64, 0x0
44
45/*
46 * Kernel start routine for aarch64 boards.
47 */
48	.global start
49start:
50	/* DON'T CLOBBER X0-X3 REGISTERS. THEY ARE UBOOT ARGUMENTS */
51
52	/*
53	 * Relocate to L2_SIZE(2Mbyte) align if necessary
54	 *
55	 * x8 = currently loaded address
56	 * x9 = (x8 + L2_SIZE - 1) & -L2_SIZE = new (aligned) loaded address
57	 */
58	adrl	x8, .header
59	mov	x9, #(L2_SIZE-1)
60	add	x9, x9, x8
61	and	x9, x9, #-L2_SIZE
62	cmp	x8, x9
63	b.eq	9f
64
65	/* x10 = size = (_edata - __kernel_text) */
66	adrl	x10, _edata
67	adrl	x11, __kernel_text
68	sub	x10, x10, x11
69
70	/* do memmove(x9, x8, x10) */
71	add	x8, x8, x10
72	add	x13, x9, x10
731:
74	ldp	x11, x12, [x8, #-16]!
75	stp	x11, x12, [x13, #-16]!
76	cmp	x13, x9
77	b.hi	1b
78
79	/* jump to new (aligned) loaded address */
80	add	x9, x9, #(start - .header)	/* skip header */
81	br	x9
829:
83
84
85	/*
86	 * Zero the BSS
87	 */
88	adrl	x8, __bss_start__
89	adrl	x9, __bss_end__
90	/* while (x8 < x9) *(uint128_t *)x8++ = 0; */
91	b	2f
921:	stp	xzr, xzr, [x8], #16
932:	cmp	x8, x9
94	b.lo	1b
95
96
97	mrs	x8, CurrentEL
98	lsr	x8, x8, #2
99	cmp	x8, #0x2
100	b.lo	1f
101
102	mrs	x8, sctlr_el2
103#ifdef __AARCH64EB__
104	orr	x8, x8, #SCTLR_EE	/* set: Big Endian */
105#else
106	bic	x8, x8, #SCTLR_EE	/* clear: Little Endian */
107#endif
108	msr	sctlr_el2, x8
109	isb
110
1111:
112	mrs	x8, sctlr_el1
113#ifdef __AARCH64EB__
114	orr	x8, x8, #(SCTLR_EE | SCTLR_E0E)	/* set: Big Endian */
115#else
116	bic	x8, x8, #(SCTLR_EE | SCTLR_E0E)	/* clear: Little Endian */
117#endif
118	msr	sctlr_el1, x8
119	isb
120
121
122	adr	x9, start
123	ldr	x10, =start
124
125	sub	x10, x10, x9
126
127	/* address of kern_vtopdiff (relative) */
128	adrl	x8, kern_vtopdiff
129	str	x10, [x8]	/* kern_vtopdiff = start(virt) - start(phys) */
130
131	/*
132	 * store uboot arguments to uboot_args[4]
133	 */
134
135	/* address of uboot_args (relative) */
136	adrl	x8, uboot_args
137	str	x0, [x8, #(8*0)]
138	str	x1, [x8, #(8*1)]
139	str	x2, [x8, #(8*2)]
140	str	x3, [x8, #(8*3)]
141
142	/*
143	 * ARM64 boot protocol has FDT address in x0 *
144	 */
145	adrl	x8, fdt_addr_r
146	str	x0, [x8]
147
148	adrl	x8, pmapboot_pagebase
149	ldr	x9, =ARM_BOOTSTRAP_LxPT
150	sub	x9, x9, x10
151	str	x9, [x8]
152
153	b	aarch64_start		/* aarch64_start() @ aarch64/locore.S */
154
155ENTRY_NP(uartputc)
156#ifdef EARLYCONS
157	b	___CONCAT(EARLYCONS, _platform_early_putchar)
158#endif
159	ret
160