Fri Aug 24 19:06:30 2018 UTC ()
set correctly TCR_EL1 for inner shareable when MULTIPROCESSOR


(ryo)
diff -r1.18 -r1.19 src/sys/arch/aarch64/aarch64/locore.S

cvs diff -r1.18 -r1.19 src/sys/arch/aarch64/aarch64/locore.S (switch to unified diff)

--- src/sys/arch/aarch64/aarch64/locore.S 2018/08/10 21:06:42 1.18
+++ src/sys/arch/aarch64/aarch64/locore.S 2018/08/24 19:06:30 1.19
@@ -1,1063 +1,1063 @@ @@ -1,1063 +1,1063 @@
1/* $NetBSD: locore.S,v 1.18 2018/08/10 21:06:42 ryo Exp $ */ 1/* $NetBSD: locore.S,v 1.19 2018/08/24 19:06:30 ryo Exp $ */
2 2
3/* 3/*
4 * Copyright (c) 2017 Ryo Shimizu <ryo@nerv.org> 4 * Copyright (c) 2017 Ryo Shimizu <ryo@nerv.org>
5 * All rights reserved. 5 * All rights reserved.
6 * 6 *
7 * Redistribution and use in source and binary forms, with or without 7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions 8 * modification, are permitted provided that the following conditions
9 * are met: 9 * are met:
10 * 1. Redistributions of source code must retain the above copyright 10 * 1. Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer. 11 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright 12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the 13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution. 14 * documentation and/or other materials provided with the distribution.
15 * 15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR 16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 17 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 18 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 * DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, 19 * DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
20 * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 20 * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
21 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 21 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22 * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 22 * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
23 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, 23 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
24 * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING 24 * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
25 * IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 25 * IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
26 * POSSIBILITY OF SUCH DAMAGE. 26 * POSSIBILITY OF SUCH DAMAGE.
27 */ 27 */
28 28
29#include "opt_cpuoptions.h" 29#include "opt_cpuoptions.h"
30#include "opt_multiprocessor.h" 30#include "opt_multiprocessor.h"
31#include "opt_ddb.h" 31#include "opt_ddb.h"
32#include "opt_arm_debug.h" 32#include "opt_arm_debug.h"
33 33
34#include <aarch64/asm.h> 34#include <aarch64/asm.h>
35#include <aarch64/hypervisor.h> 35#include <aarch64/hypervisor.h>
36#include "assym.h" 36#include "assym.h"
37 37
38RCSID("$NetBSD: locore.S,v 1.18 2018/08/10 21:06:42 ryo Exp $") 38RCSID("$NetBSD: locore.S,v 1.19 2018/08/24 19:06:30 ryo Exp $")
39 39
40/* #define DEBUG_LOCORE */ 40/* #define DEBUG_LOCORE */
41/* #define DEBUG_MMU */ 41/* #define DEBUG_MMU */
42 42
43#if (defined(VERBOSE_INIT_ARM) || defined(DEBUG_LOCORE)) && defined(EARLYCONS) 43#if (defined(VERBOSE_INIT_ARM) || defined(DEBUG_LOCORE)) && defined(EARLYCONS)
44#define VERBOSE_LOCORE 44#define VERBOSE_LOCORE
45#endif 45#endif
46 46
47#define LOCORE_EL2 47#define LOCORE_EL2
48 48
49/* attributes are defined in MAIR_EL1 */ 49/* attributes are defined in MAIR_EL1 */
50#define L2_BLKPAG_ATTR_NORMAL_WB LX_BLKPAG_ATTR_INDX_0 50#define L2_BLKPAG_ATTR_NORMAL_WB LX_BLKPAG_ATTR_INDX_0
51#define L2_BLKPAG_ATTR_NORMAL_NC LX_BLKPAG_ATTR_INDX_1 51#define L2_BLKPAG_ATTR_NORMAL_NC LX_BLKPAG_ATTR_INDX_1
52#define L2_BLKPAG_ATTR_NORMAL_WT LX_BLKPAG_ATTR_INDX_2 52#define L2_BLKPAG_ATTR_NORMAL_WT LX_BLKPAG_ATTR_INDX_2
53#define L2_BLKPAG_ATTR_DEVICE_MEM LX_BLKPAG_ATTR_INDX_3 53#define L2_BLKPAG_ATTR_DEVICE_MEM LX_BLKPAG_ATTR_INDX_3
54 54
55#define PRINT(string) bl xprint;.asciz string;.align 2 55#define PRINT(string) bl xprint;.asciz string;.align 2
56 56
57#ifdef VERBOSE_LOCORE 57#ifdef VERBOSE_LOCORE
58#define VERBOSE(string) PRINT(string) 58#define VERBOSE(string) PRINT(string)
59#else 59#else
60#define VERBOSE(string) 60#define VERBOSE(string)
61#endif 61#endif
62 62
63/* load far effective address (pc relative) */ 63/* load far effective address (pc relative) */
64.macro ADDR, reg, addr 64.macro ADDR, reg, addr
65 adrp \reg, \addr 65 adrp \reg, \addr
66 add \reg, \reg, #:lo12:\addr 66 add \reg, \reg, #:lo12:\addr
67.endm 67.endm
68 68
69ENTRY_NP(aarch64_start) 69ENTRY_NP(aarch64_start)
70 /* Zero the BSS. The size must be aligned 16, usually it should be. */ 70 /* Zero the BSS. The size must be aligned 16, usually it should be. */
71 ADDR x0, __bss_start__ 71 ADDR x0, __bss_start__
72 ADDR x1, __bss_end__ 72 ADDR x1, __bss_end__
73 b 2f 73 b 2f
741: stp xzr, xzr, [x0], #16 741: stp xzr, xzr, [x0], #16
752: cmp x0, x1 752: cmp x0, x1
76 b.lo 1b 76 b.lo 1b
77 77
78 /* set stack pointer for boot */ 78 /* set stack pointer for boot */
79 ADDR x0, bootstk 79 ADDR x0, bootstk
80 mov sp, x0 80 mov sp, x0
81 81
82#ifdef DEBUG_LOCORE 82#ifdef DEBUG_LOCORE
83 PRINT("PC = ") 83 PRINT("PC = ")
84 bl 1f 84 bl 1f
851: mov x0, lr 851: mov x0, lr
86 bl print_x0 86 bl print_x0
87 87
88 PRINT("SP = ") 88 PRINT("SP = ")
89 bl 1f 89 bl 1f
901: mov x0, sp 901: mov x0, sp
91 bl print_x0 91 bl print_x0
92 92
93 PRINT("CurrentEL = ") 93 PRINT("CurrentEL = ")
94 mrs x0, CurrentEL 94 mrs x0, CurrentEL
95 lsr x0, x0, #2 95 lsr x0, x0, #2
96 bl print_x0 96 bl print_x0
97 97
98 cmp x0, #2 98 cmp x0, #2
99 bne 1f 99 bne 1f
100 100
101 /* EL2 registers can be accessed in EL2 or higher */ 101 /* EL2 registers can be accessed in EL2 or higher */
102 PRINT("SCTLR_EL2 = ") 102 PRINT("SCTLR_EL2 = ")
103 mrs x0, sctlr_el2 103 mrs x0, sctlr_el2
104 bl print_x0 104 bl print_x0
105 105
106 PRINT("HCR_EL2 = ") 106 PRINT("HCR_EL2 = ")
107 mrs x0, hcr_el2 107 mrs x0, hcr_el2
108 bl print_x0 108 bl print_x0
1091: 1091:
110 110
111 PRINT("CNTFREQ_EL0 = ") 111 PRINT("CNTFREQ_EL0 = ")
112 mrs x0, cntfrq_el0 112 mrs x0, cntfrq_el0
113 bl print_x0 113 bl print_x0
114 114
115 PRINT("DAIF = ") 115 PRINT("DAIF = ")
116 mrs x0, daif 116 mrs x0, daif
117 bl print_x0 117 bl print_x0
118 118
119 PRINT("MPIDR_EL1 = ") 119 PRINT("MPIDR_EL1 = ")
120 mrs x0, mpidr_el1 120 mrs x0, mpidr_el1
121 bl print_x0 121 bl print_x0
122 122
123 PRINT("L2CTLR_EL1 = ") 123 PRINT("L2CTLR_EL1 = ")
124 mrs x0, s3_1_c11_c0_2 124 mrs x0, s3_1_c11_c0_2
125 bl print_x0 125 bl print_x0
126 126
127 PRINT("ID_AA64MPFR0_EL1 = ") 127 PRINT("ID_AA64MPFR0_EL1 = ")
128 mrs x0, id_aa64pfr0_el1 128 mrs x0, id_aa64pfr0_el1
129 bl print_x0 129 bl print_x0
130 130
131 PRINT("ID_AA64MPFR1_EL1 = ") 131 PRINT("ID_AA64MPFR1_EL1 = ")
132 mrs x0, id_aa64pfr1_el1 132 mrs x0, id_aa64pfr1_el1
133 bl print_x0 133 bl print_x0
134 134
135 PRINT("ID_AA64ISAR0_EL1 = ") 135 PRINT("ID_AA64ISAR0_EL1 = ")
136 mrs x0, id_aa64isar0_el1 136 mrs x0, id_aa64isar0_el1
137 bl print_x0 137 bl print_x0
138 138
139 PRINT("ID_AA64ISAR1_EL1 = ") 139 PRINT("ID_AA64ISAR1_EL1 = ")
140 mrs x0, id_aa64isar1_el1 140 mrs x0, id_aa64isar1_el1
141 bl print_x0 141 bl print_x0
142 142
143 143
144 PRINT("ID_AA64MMFR0_EL1 = ") 144 PRINT("ID_AA64MMFR0_EL1 = ")
145 mrs x0, id_aa64mmfr0_el1 145 mrs x0, id_aa64mmfr0_el1
146 bl print_x0 146 bl print_x0
147 147
148 PRINT("ID_AA64MMFR1_EL1 = ") 148 PRINT("ID_AA64MMFR1_EL1 = ")
149 mrs x0, id_aa64mmfr1_el1 149 mrs x0, id_aa64mmfr1_el1
150 bl print_x0 150 bl print_x0
151#endif 151#endif
152 152
153 153
154#ifdef LOCORE_EL2 154#ifdef LOCORE_EL2
155 VERBOSE("Drop to EL1...") 155 VERBOSE("Drop to EL1...")
156# include <aarch64/aarch64/locore_el2.S> 156# include <aarch64/aarch64/locore_el2.S>
157 VERBOSE("OK\r\n") 157 VERBOSE("OK\r\n")
158#ifdef DEBUG_LOCORE 158#ifdef DEBUG_LOCORE
159 PRINT("CurrentEL = ") 159 PRINT("CurrentEL = ")
160 mrs x0, CurrentEL 160 mrs x0, CurrentEL
161 lsr x0, x0, #2 161 lsr x0, x0, #2
162 bl print_x0 162 bl print_x0
163#endif /* DEBUG_LOCORE */ 163#endif /* DEBUG_LOCORE */
164#endif /* LOCORE_EL2 */ 164#endif /* LOCORE_EL2 */
165 165
166#ifdef DEBUG_LOCORE 166#ifdef DEBUG_LOCORE
167 PRINT("DAIF = ") 167 PRINT("DAIF = ")
168 mrs x0, daif 168 mrs x0, daif
169 bl print_x0 169 bl print_x0
170#endif 170#endif
171 171
172 bl mmu_disable 172 bl mmu_disable
173 173
174 bl init_sysregs 174 bl init_sysregs
175 175
176 bl arm_boot_l0pt_init 176 bl arm_boot_l0pt_init
177 177
178 VERBOSE("MMU Enable...") 178 VERBOSE("MMU Enable...")
179 bl mmu_enable 179 bl mmu_enable
180 VERBOSE("OK\r\n") 180 VERBOSE("OK\r\n")
181 181
182 /* set exception vector */ 182 /* set exception vector */
183 ldr x2, =el1_vectors /* el1_vectors is in kva */ 183 ldr x2, =el1_vectors /* el1_vectors is in kva */
184 msr vbar_el1, x2 184 msr vbar_el1, x2
185 185
186#ifdef DEBUG_LOCORE 186#ifdef DEBUG_LOCORE
187 PRINT("SPSR_EL1 = ") 187 PRINT("SPSR_EL1 = ")
188 mrs x0, spsr_el1 188 mrs x0, spsr_el1
189 bl print_x0 189 bl print_x0
190 190
191 PRINT("DAIF = ") 191 PRINT("DAIF = ")
192 mrs x0, daif 192 mrs x0, daif
193 bl print_x0 193 bl print_x0
194 194
195 PRINT("VSTART = ") 195 PRINT("VSTART = ")
196 ldr x0, =vstart /* virtual address of vstart */ 196 ldr x0, =vstart /* virtual address of vstart */
197 bl print_x0 197 bl print_x0
198#endif 198#endif
199 199
200 ldr x0, =vstart /* virtual address of vstart */ 200 ldr x0, =vstart /* virtual address of vstart */
201 br x0 /* jump to the kernel virtual address */ 201 br x0 /* jump to the kernel virtual address */
202 202
203/* 203/*
204 * vstart is in kernel virtual address 204 * vstart is in kernel virtual address
205 */ 205 */
206vstart: 206vstart:
207 ADDR x0, lwp0uspace 207 ADDR x0, lwp0uspace
208 add x0, x0, #(UPAGES * PAGE_SIZE) 208 add x0, x0, #(UPAGES * PAGE_SIZE)
209 sub x0, x0, #TF_SIZE /* lwp0space + USPACE - TF_SIZE */ 209 sub x0, x0, #TF_SIZE /* lwp0space + USPACE - TF_SIZE */
210 mov sp, x0 /* define lwp0 ksp bottom */ 210 mov sp, x0 /* define lwp0 ksp bottom */
211 211
212#ifdef DEBUG_LOCORE 212#ifdef DEBUG_LOCORE
213 PRINT("VSP = ") 213 PRINT("VSP = ")
214 mov x0, sp 214 mov x0, sp
215 bl print_x0 215 bl print_x0
216#endif 216#endif
217 217
218 msr tpidr_el0, xzr /* tpidr_el0 (for TLS) = NULL */ 218 msr tpidr_el0, xzr /* tpidr_el0 (for TLS) = NULL */
219 ADDR x0, cpu_info_store /* cpu_info_store is cpu_info[0] */ 219 ADDR x0, cpu_info_store /* cpu_info_store is cpu_info[0] */
220 msr tpidr_el1, x0 /* curcpu is cpu_info[0] */ 220 msr tpidr_el1, x0 /* curcpu is cpu_info[0] */
221 221
222 mov fp, #0 /* trace back starts here */ 222 mov fp, #0 /* trace back starts here */
223 PRINT("initarm\r\n") 223 PRINT("initarm\r\n")
224 bl _C_LABEL(initarm) /* Off we go */ 224 bl _C_LABEL(initarm) /* Off we go */
225 225
226 PRINT("main\r\n") 226 PRINT("main\r\n")
227 bl _C_LABEL(main) /* call main() */ 227 bl _C_LABEL(main) /* call main() */
228 228
229 adr x0, .Lmainreturned 229 adr x0, .Lmainreturned
230 b _C_LABEL(panic) 230 b _C_LABEL(panic)
231 /* NOTREACHED */ 231 /* NOTREACHED */
232END(aarch64_start) 232END(aarch64_start)
233 233
234.Lmainreturned: 234.Lmainreturned:
235 .asciz "main() returned" 235 .asciz "main() returned"
236 236
237 .align 3 237 .align 3
238 .text 238 .text
239 239
240#ifdef MULTIPROCESSOR 240#ifdef MULTIPROCESSOR
241 241
242#if defined(VERBOSE_LOCORE) || defined(DEBUG_LOCORE) 242#if defined(VERBOSE_LOCORE) || defined(DEBUG_LOCORE)
243/* 243/*
244 * print "[CPU$x27] " (x27 as cpuid) 244 * print "[CPU$x27] " (x27 as cpuid)
245 * XXX: max 4 digit 245 * XXX: max 4 digit
246 */ 246 */
247printcpu: 247printcpu:
248 stp x0, lr, [sp, #-16]! 248 stp x0, lr, [sp, #-16]!
249 stp x25, x26, [sp, #-16]! 249 stp x25, x26, [sp, #-16]!
250 PRINT("[CPU") 250 PRINT("[CPU")
251 mov x26, x27 /* n = cpuid */ 251 mov x26, x27 /* n = cpuid */
252 mov x25, xzr /* zeropad = 0 */ 252 mov x25, xzr /* zeropad = 0 */
253 mov x1, #1000 253 mov x1, #1000
254 udiv x0, x26, x1 /* x0 = n / 1000 */ 254 udiv x0, x26, x1 /* x0 = n / 1000 */
255 msub x26, x0, x1, x26 /* n %= 1000 */ 255 msub x26, x0, x1, x26 /* n %= 1000 */
256 cbz x0, 1f /* if (x0 == 0) goto 1f */ 256 cbz x0, 1f /* if (x0 == 0) goto 1f */
257 add x0, x0, #'0' 257 add x0, x0, #'0'
258 bl uartputc 258 bl uartputc
259 mov x25, #1 /* zeropad = 1 */ 259 mov x25, #1 /* zeropad = 1 */
2601: 2601:
261 mov x1, #100 261 mov x1, #100
262 udiv x0, x26, x1 /* x0 = n / 100 */ 262 udiv x0, x26, x1 /* x0 = n / 100 */
263 msub x26, x0, x1, x26 /* n %= 100 */ 263 msub x26, x0, x1, x26 /* n %= 100 */
264 adds x25, x25, x0 /* if ((zeropad + x0) == 0) */ 264 adds x25, x25, x0 /* if ((zeropad + x0) == 0) */
265 beq 1f /* goto 1f */ 265 beq 1f /* goto 1f */
266 add x0, x0, #'0' 266 add x0, x0, #'0'
267 bl uartputc 267 bl uartputc
268 mov x25, #1 /* zeropad = 1 */ 268 mov x25, #1 /* zeropad = 1 */
2691: 2691:
270 mov x1, #10 270 mov x1, #10
271 udiv x0, x26, x1 /* x0 = n / 10 */ 271 udiv x0, x26, x1 /* x0 = n / 10 */
272 msub x26, x0, x1, x26 /* n %= 10 */ 272 msub x26, x0, x1, x26 /* n %= 10 */
273 adds x25, x25, x0 /* if ((zeropad + x0) == 0) */ 273 adds x25, x25, x0 /* if ((zeropad + x0) == 0) */
274 beq 1f /* goto 1f */ 274 beq 1f /* goto 1f */
275 add x0, x0, #'0' 275 add x0, x0, #'0'
276 bl uartputc 276 bl uartputc
2771: 2771:
278 add x0, x26, #'0' 278 add x0, x26, #'0'
279 bl uartputc 279 bl uartputc
280 PRINT("] ") 280 PRINT("] ")
281 ldp x25, x26, [sp], #16 281 ldp x25, x26, [sp], #16
282 ldp x0, lr, [sp], #16 282 ldp x0, lr, [sp], #16
283 ret 283 ret
284#define PRINTCPU() bl printcpu 284#define PRINTCPU() bl printcpu
285#else 285#else
286#define PRINTCPU() 286#define PRINTCPU()
287#endif /* VERBOSE_LOCORE || DEBUG_LOCORE */ 287#endif /* VERBOSE_LOCORE || DEBUG_LOCORE */
288 288
289#ifdef VERBOSE_LOCORE 289#ifdef VERBOSE_LOCORE
290#define VERBOSE_PRINTCPU() PRINTCPU() 290#define VERBOSE_PRINTCPU() PRINTCPU()
291#else 291#else
292#define VERBOSE_PRINTCPU() 292#define VERBOSE_PRINTCPU()
293#endif 293#endif
294 294
295ENTRY_NP(aarch64_mpstart) 295ENTRY_NP(aarch64_mpstart)
296ENTRY_NP(cortex_mpstart) /* compat arm */ 296ENTRY_NP(cortex_mpstart) /* compat arm */
297 /* 297 /*
298 * XXX: 298 * XXX:
299 * cpuid(index) is read from MPIDR_EL1.AFF0. AFF1,2,3 are ignored. 299 * cpuid(index) is read from MPIDR_EL1.AFF0. AFF1,2,3 are ignored.
300 * cpuid should be passed from primary processor... 300 * cpuid should be passed from primary processor...
301 */ 301 */
302 mrs x27, mpidr_el1 302 mrs x27, mpidr_el1
303 and x27, x27, #MPIDR_AFF0 /* XXX: cpuid = mpidr_el1 & Aff0 */ 303 and x27, x27, #MPIDR_AFF0 /* XXX: cpuid = mpidr_el1 & Aff0 */
304 mov x0, #1 304 mov x0, #1
305 lsl x28, x0, x27 /* x28 = 1 << cpuid */ 305 lsl x28, x0, x27 /* x28 = 1 << cpuid */
306 mov x0, x28 306 mov x0, x28
307 307
308 /* x27 = cpuid, x28 = (1 << cpuid) */ 308 /* x27 = cpuid, x28 = (1 << cpuid) */
309 309
310 /* set stack pointer for boot */ 310 /* set stack pointer for boot */
311#define BOOT_STACKSIZE 256 311#define BOOT_STACKSIZE 256
312 mov x1, #BOOT_STACKSIZE 312 mov x1, #BOOT_STACKSIZE
313 mul x1, x1, x27 313 mul x1, x1, x27
314 ADDR x0, bootstk_cpus 314 ADDR x0, bootstk_cpus
315 sub sp, x0, x1 /* sp = bootstk_cpus - BOOT_STACKSIZE * cpuid */ 315 sub sp, x0, x1 /* sp = bootstk_cpus - BOOT_STACKSIZE * cpuid */
316 316
317#ifdef DEBUG_LOCORE 317#ifdef DEBUG_LOCORE
318 PRINTCPU() 318 PRINTCPU()
319 PRINT("PC = ") 319 PRINT("PC = ")
320 bl 1f 320 bl 1f
3211: mov x0, lr 3211: mov x0, lr
322 bl print_x0 322 bl print_x0
323 323
324 PRINTCPU() 324 PRINTCPU()
325 PRINT("SP = ") 325 PRINT("SP = ")
326 bl 1f 326 bl 1f
3271: mov x0, sp 3271: mov x0, sp
328 bl print_x0 328 bl print_x0
329 329
330 PRINTCPU() 330 PRINTCPU()
331 PRINT("CurrentEL = ") 331 PRINT("CurrentEL = ")
332 mrs x0, CurrentEL 332 mrs x0, CurrentEL
333 lsr x0, x0, #2 333 lsr x0, x0, #2
334 bl print_x0 334 bl print_x0
335#endif 335#endif
336 336
337#ifdef LOCORE_EL2 337#ifdef LOCORE_EL2
338#ifdef DEBUG_LOCORE 338#ifdef DEBUG_LOCORE
339 VERBOSE_PRINTCPU() 339 VERBOSE_PRINTCPU()
340 VERBOSE("Drop to EL1...") 340 VERBOSE("Drop to EL1...")
341#endif 341#endif
342 bl drop_to_el1 342 bl drop_to_el1
343#ifdef DEBUG_LOCORE 343#ifdef DEBUG_LOCORE
344 VERBOSE("OK\r\n") 344 VERBOSE("OK\r\n")
345#endif 345#endif
346#ifdef DEBUG_LOCORE 346#ifdef DEBUG_LOCORE
347 PRINTCPU() 347 PRINTCPU()
348 PRINT("CurrentEL = ") 348 PRINT("CurrentEL = ")
349 mrs x0, CurrentEL 349 mrs x0, CurrentEL
350 lsr x0, x0, #2 350 lsr x0, x0, #2
351 bl print_x0 351 bl print_x0
352#endif /* DEBUG_LOCORE */ 352#endif /* DEBUG_LOCORE */
353#endif /* LOCORE_EL2 */ 353#endif /* LOCORE_EL2 */
354 354
355 bl mmu_disable 355 bl mmu_disable
356 356
357 bl init_sysregs 357 bl init_sysregs
358 358
359#ifdef DEBUG_LOCORE 359#ifdef DEBUG_LOCORE
360 VERBOSE_PRINTCPU() 360 VERBOSE_PRINTCPU()
361 VERBOSE("MMU Enable...") 361 VERBOSE("MMU Enable...")
362#endif 362#endif
363 bl mmu_enable 363 bl mmu_enable
364#ifdef DEBUG_LOCORE 364#ifdef DEBUG_LOCORE
365 VERBOSE("OK\r\n") 365 VERBOSE("OK\r\n")
366#endif 366#endif
367 367
368 /* jump to virtual address */ 368 /* jump to virtual address */
369 ldr x0, =mp_vstart 369 ldr x0, =mp_vstart
370 br x0 370 br x0
371 371
372mp_vstart: 372mp_vstart:
373 /* set exception vector */ 373 /* set exception vector */
374 ADDR x0, el1_vectors 374 ADDR x0, el1_vectors
375 msr vbar_el1, x0 375 msr vbar_el1, x0
376 376
377#ifdef DEBUG_LOCORE 377#ifdef DEBUG_LOCORE
378 PRINTCPU() 378 PRINTCPU()
379 PRINT("PC = ") 379 PRINT("PC = ")
380 bl 1f 380 bl 1f
3811: mov x0, lr 3811: mov x0, lr
382 bl print_x0 382 bl print_x0
383 383
384 PRINTCPU() 384 PRINTCPU()
385 PRINT("arm_cpu_hatched = ") 385 PRINT("arm_cpu_hatched = ")
386 ADDR x0, _C_LABEL(arm_cpu_hatched) 386 ADDR x0, _C_LABEL(arm_cpu_hatched)
387 ldr x0, [x0] 387 ldr x0, [x0]
388 bl print_x0 388 bl print_x0
389 389
390 PRINTCPU() 390 PRINTCPU()
391 PRINT("my cpubit = ") 391 PRINT("my cpubit = ")
392 mov x0, x28 392 mov x0, x28
393 bl print_x0 393 bl print_x0
394#endif 394#endif
395 395
396 ADDR x0, _C_LABEL(cpus_midr) 396 ADDR x0, _C_LABEL(cpus_midr)
397 mrs x1, midr_el1 397 mrs x1, midr_el1
398 str w1, [x0, x27, lsl #2] /* cpu_midr[cpuid] = midr_el1 */ 398 str w1, [x0, x27, lsl #2] /* cpu_midr[cpuid] = midr_el1 */
399 399
400 ADDR x0, _C_LABEL(cpus_mpidr) 400 ADDR x0, _C_LABEL(cpus_mpidr)
401 mrs x1, mpidr_el1 401 mrs x1, mpidr_el1
402 str x1, [x0, x27, lsl #3] /* cpu_mpidr[cpuid] = mpidr_el1 */ 402 str x1, [x0, x27, lsl #3] /* cpu_mpidr[cpuid] = mpidr_el1 */
403 403
404 404
405 /* 405 /*
406 * atomic_or_32(&arm_cpu_hatched, 1 << cpuid) 406 * atomic_or_32(&arm_cpu_hatched, 1 << cpuid)
407 * to tell my activity to primary processor. 407 * to tell my activity to primary processor.
408 */ 408 */
409 ADDR x0, _C_LABEL(arm_cpu_hatched) 409 ADDR x0, _C_LABEL(arm_cpu_hatched)
410 mov x1, x28 410 mov x1, x28
411 bl _C_LABEL(atomic_or_32) /* hatched! */ 411 bl _C_LABEL(atomic_or_32) /* hatched! */
412 sev 412 sev
413 413
414#ifdef DEBUG_LOCORE 414#ifdef DEBUG_LOCORE
415 PRINTCPU() 415 PRINTCPU()
416 PRINT("arm_cpu_hatched -> ") 416 PRINT("arm_cpu_hatched -> ")
417 ADDR x0, _C_LABEL(arm_cpu_hatched) 417 ADDR x0, _C_LABEL(arm_cpu_hatched)
418 ldr x0, [x0] 418 ldr x0, [x0]
419 bl print_x0 419 bl print_x0
420#endif 420#endif
421 421
422#ifdef DEBUG_LOCORE 422#ifdef DEBUG_LOCORE
423 PRINTCPU() 423 PRINTCPU()
424 PRINT("Hatched.\r\n") 424 PRINT("Hatched.\r\n")
425#endif 425#endif
426 426
427 /* wait for my bit of arm_cpu_mbox become true */ 427 /* wait for my bit of arm_cpu_mbox become true */
428 ADDR x1, _C_LABEL(arm_cpu_mbox) 428 ADDR x1, _C_LABEL(arm_cpu_mbox)
4291: 4291:
430 dmb sy 430 dmb sy
431 ldr x0, [x1] 431 ldr x0, [x1]
432 tst x0, x28 432 tst x0, x28
433 bne 9f 433 bne 9f
434 wfe 434 wfe
435 b 1b 435 b 1b
4369: 4369:
437 437
438#ifdef DEBUG_LOCORE 438#ifdef DEBUG_LOCORE
439 /* XXX: delay to prevent the mixing of console output */ 439 /* XXX: delay to prevent the mixing of console output */
440 mov x0, #0x4000000 440 mov x0, #0x4000000
441 mul x0, x0, x27 /* delay (cpuid * 0x4000000) */ 441 mul x0, x0, x27 /* delay (cpuid * 0x4000000) */
4421: subs x0, x0, #1 4421: subs x0, x0, #1
443 bne 1b 443 bne 1b
444 444
445 PRINTCPU() 445 PRINTCPU()
446 PRINT("MBOX received\r\n") 446 PRINT("MBOX received\r\n")
447 447
448 PRINTCPU() 448 PRINTCPU()
449 PRINT("arm_cpu_mbox = ") 449 PRINT("arm_cpu_mbox = ")
450 ADDR x0, _C_LABEL(arm_cpu_mbox) 450 ADDR x0, _C_LABEL(arm_cpu_mbox)
451 ldr x0, [x0] 451 ldr x0, [x0]
452 bl print_x0 452 bl print_x0
453#endif 453#endif
454 454
455 msr tpidr_el0, xzr /* tpidr_el0 (for TLS) = NULL */ 455 msr tpidr_el0, xzr /* tpidr_el0 (for TLS) = NULL */
456 456
457 /* fill my cpu_info */ 457 /* fill my cpu_info */
458 ADDR x0, _C_LABEL(cpu_info) 458 ADDR x0, _C_LABEL(cpu_info)
459 ldr x0, [x0, x27, lsl #3] /* x0 = cpu_info[cpuid] */ 459 ldr x0, [x0, x27, lsl #3] /* x0 = cpu_info[cpuid] */
460 msr tpidr_el1, x0 /* tpidr_el1 = my cpu_info */ 460 msr tpidr_el1, x0 /* tpidr_el1 = my cpu_info */
461 461
462 ldr x1, [x0, #CI_IDLELWP] /* x1 = curcpu()->ci_data.cpu_idlelwp */ 462 ldr x1, [x0, #CI_IDLELWP] /* x1 = curcpu()->ci_data.cpu_idlelwp */
463 str x1, [x0, #CI_CURLWP] /* curlwp is idlelwp */ 463 str x1, [x0, #CI_CURLWP] /* curlwp is idlelwp */
464 464
465 ldr x2, [x1, #L_PCB] /* x2 = lwp_getpcb(idlelwp) */ 465 ldr x2, [x1, #L_PCB] /* x2 = lwp_getpcb(idlelwp) */
466 add x2, x2, #(UPAGES * PAGE_SIZE) 466 add x2, x2, #(UPAGES * PAGE_SIZE)
467 sub sp, x2, #TF_SIZE /* sp = pcb + USPACE - TF_SIZE */ 467 sub sp, x2, #TF_SIZE /* sp = pcb + USPACE - TF_SIZE */
468 468
469 469
470 mov fp, xzr /* trace back starts here */ 470 mov fp, xzr /* trace back starts here */
471 bl _C_LABEL(cpu_hatch) 471 bl _C_LABEL(cpu_hatch)
472 mov x0, xzr 472 mov x0, xzr
473 b _C_LABEL(idle_loop) /* never to return */ 473 b _C_LABEL(idle_loop) /* never to return */
474END(aarch64_mpstart) 474END(aarch64_mpstart)
475 475
476#else /* MULTIPROCESSOR */ 476#else /* MULTIPROCESSOR */
477 477
478ENTRY_NP(aarch64_mpstart) 478ENTRY_NP(aarch64_mpstart)
479ENTRY_NP(cortex_mpstart) /* compat arm */ 479ENTRY_NP(cortex_mpstart) /* compat arm */
4801: wfi 4801: wfi
481 b 1b 481 b 1b
482END(aarch64_mpstart) 482END(aarch64_mpstart)
483 483
484#endif /* MULTIPROCESSOR */ 484#endif /* MULTIPROCESSOR */
485 485
486/* 486/*
487 * xprint - print strings pointed by $PC(LR) 487 * xprint - print strings pointed by $PC(LR)
488 * and return to the end of string. 488 * and return to the end of string.
489 * e.g.) 489 * e.g.)
490 * bl xprint <- call 490 * bl xprint <- call
491 * .ascii "Hello\r\n\0" <- wouldn't return here 491 * .ascii "Hello\r\n\0" <- wouldn't return here
492 * .align 2 492 * .align 2
493 * nop <- return to here 493 * nop <- return to here
494 */ 494 */
495 .global xprint 495 .global xprint
496xprint: 496xprint:
497 mov x11, lr 497 mov x11, lr
498 mov x12, x0 498 mov x12, x0
499 ldrb w0, [x11], #1 499 ldrb w0, [x11], #1
500 cbz w0, 2f 500 cbz w0, 2f
501 501
5021: 5021:
503 bl uartputc 503 bl uartputc
504 ldrb w0, [x11], #1 504 ldrb w0, [x11], #1
505 cbnz w0, 1b 505 cbnz w0, 1b
506 506
5072: 5072:
508 add x11, x11, #3 508 add x11, x11, #3
509 bic lr, x11, #3 509 bic lr, x11, #3
510 mov x0, x12 510 mov x0, x12
511 ret 511 ret
512END(xprint) 512END(xprint)
513 513
514 .global _C_LABEL(uartputs) 514 .global _C_LABEL(uartputs)
515_C_LABEL(uartputs): 515_C_LABEL(uartputs):
516 mov x11, x0 516 mov x11, x0
517 ldrb w0, [x11], #1 517 ldrb w0, [x11], #1
518 cbz w0, 9f 518 cbz w0, 9f
5191: bl uartputc 5191: bl uartputc
520 ldrb w0, [x11], #1 520 ldrb w0, [x11], #1
521 cbnz w0, 1b 521 cbnz w0, 1b
5229: 5229:
523 mov x0, x11 523 mov x0, x11
524 ret 524 ret
525END(_C_LABEL(uartputs)) 525END(_C_LABEL(uartputs))
526 526
527 .global _print_x0 527 .global _print_x0
528_print_x0: 528_print_x0:
529 stp x0, lr, [sp, #-16]! 529 stp x0, lr, [sp, #-16]!
530 stp x4, x5, [sp, #-16]! 530 stp x4, x5, [sp, #-16]!
531 stp x6, x7, [sp, #-16]! 531 stp x6, x7, [sp, #-16]!
532 532
533 mov x7, x0 /* number to display */ 533 mov x7, x0 /* number to display */
534 mov x4, #60 /* num of shift */ 534 mov x4, #60 /* num of shift */
535 mov x5, #0xf /* mask */ 535 mov x5, #0xf /* mask */
5361: 5361:
537 ror x0, x7, x4 537 ror x0, x7, x4
538 and x0, x0, x5 538 and x0, x0, x5
539 cmp x0, #10 539 cmp x0, #10
540 blt 2f 540 blt 2f
541 add x0, x0, #('a' - 10 - '0') 541 add x0, x0, #('a' - 10 - '0')
5422: add x0, x0, #'0' 5422: add x0, x0, #'0'
543 bl uartputc 543 bl uartputc
544 subs x4, x4, #4 544 subs x4, x4, #4
545 bge 1b 545 bge 1b
546 546
547 ldp x6, x7, [sp], #16 547 ldp x6, x7, [sp], #16
548 ldp x4, x5, [sp], #16 548 ldp x4, x5, [sp], #16
549 ldp x0, lr, [sp], #16 549 ldp x0, lr, [sp], #16
550 ret 550 ret
551END(_print_x0) 551END(_print_x0)
552 552
553 .global _C_LABEL(print_x0) 553 .global _C_LABEL(print_x0)
554_C_LABEL(print_x0): 554_C_LABEL(print_x0):
555 stp x0, lr, [sp, #-16]! 555 stp x0, lr, [sp, #-16]!
556 bl _print_x0 556 bl _print_x0
557 PRINT("\r\n") 557 PRINT("\r\n")
558 ldp x0, lr, [sp], #16 558 ldp x0, lr, [sp], #16
559 ret 559 ret
560END(_C_LABEL(print_x0)) 560END(_C_LABEL(print_x0))
561 561
562printn_x1: 562printn_x1:
563 stp x0, lr, [sp, #-16]! 563 stp x0, lr, [sp, #-16]!
564 mov x0, x1 564 mov x0, x1
565 bl _print_x0 565 bl _print_x0
566 ldp x0, lr, [sp], #16 566 ldp x0, lr, [sp], #16
567 ret 567 ret
568 568
569print_x2: 569print_x2:
570 stp x0, lr, [sp, #-16]! 570 stp x0, lr, [sp, #-16]!
571 mov x0, x2 571 mov x0, x2
572 bl _print_x0 572 bl _print_x0
573 PRINT("\r\n") 573 PRINT("\r\n")
574 ldp x0, lr, [sp], #16 574 ldp x0, lr, [sp], #16
575 ret 575 ret
576 576
577arm_boot_l0pt_init: 577arm_boot_l0pt_init:
578 stp x0, lr, [sp, #-16]! 578 stp x0, lr, [sp, #-16]!
579 579
580 /* Clean the page table */ 580 /* Clean the page table */
581 ADDR x0, mmutables_start 581 ADDR x0, mmutables_start
582 ADDR x1, mmutables_end 582 ADDR x1, mmutables_end
5831: 5831:
584 stp xzr, xzr, [x0], #16 584 stp xzr, xzr, [x0], #16
585 stp xzr, xzr, [x0], #16 585 stp xzr, xzr, [x0], #16
586 stp xzr, xzr, [x0], #16 586 stp xzr, xzr, [x0], #16
587 stp xzr, xzr, [x0], #16 587 stp xzr, xzr, [x0], #16
588 cmp x0, x1 588 cmp x0, x1
589 b.lo 1b 589 b.lo 1b
590 590
591 VERBOSE("Creating VA=PA tables\r\n") 591 VERBOSE("Creating VA=PA tables\r\n")
592 592
593 /* VA=PA table, link L0->L1 */ 593 /* VA=PA table, link L0->L1 */
594 ADDR x0, ttbr0_l0table 594 ADDR x0, ttbr0_l0table
595 mov x1, #0 595 mov x1, #0
596 ADDR x2, ttbr0_l1table 596 ADDR x2, ttbr0_l1table
597 bl l0_settable 597 bl l0_settable
598 598
599 /* VA=PA L1 blocks */ 599 /* VA=PA L1 blocks */
600 ADDR x0, ttbr0_l1table 600 ADDR x0, ttbr0_l1table
601 mov x1, #0 /* VA */ 601 mov x1, #0 /* VA */
602 mov x2, #0 /* PA */ 602 mov x2, #0 /* PA */
603 mov x3, #L2_BLKPAG_ATTR_DEVICE_MEM 603 mov x3, #L2_BLKPAG_ATTR_DEVICE_MEM
604 mov x4, #4 /* 4GB = whole 32bit */ 604 mov x4, #4 /* 4GB = whole 32bit */
605 bl l1_setblocks 605 bl l1_setblocks
606 606
607 VERBOSE("Creating KSEG tables\r\n") 607 VERBOSE("Creating KSEG tables\r\n")
608 608
609 /* KSEG table, link L0->L1 */ 609 /* KSEG table, link L0->L1 */
610 ADDR x0, ttbr1_l0table 610 ADDR x0, ttbr1_l0table
611 mov x1, #AARCH64_KSEG_START 611 mov x1, #AARCH64_KSEG_START
612 ADDR x2, ttbr1_l1table_kseg 612 ADDR x2, ttbr1_l1table_kseg
613 bl l0_settable 613 bl l0_settable
614 614
615 /* KSEG L1 blocks */ 615 /* KSEG L1 blocks */
616 ADDR x0, ttbr1_l1table_kseg 616 ADDR x0, ttbr1_l1table_kseg
617 mov x1, #AARCH64_KSEG_START 617 mov x1, #AARCH64_KSEG_START
618 mov x2, #0 618 mov x2, #0
619 mov x3, #L2_BLKPAG_ATTR_NORMAL_WB 619 mov x3, #L2_BLKPAG_ATTR_NORMAL_WB
620 orr x3, x3, #(LX_BLKPAG_PXN|LX_BLKPAG_UXN) 620 orr x3, x3, #(LX_BLKPAG_PXN|LX_BLKPAG_UXN)
621 mov x4, #Ln_ENTRIES /* whole l1 table */ 621 mov x4, #Ln_ENTRIES /* whole l1 table */
622 bl l1_setblocks 622 bl l1_setblocks
623 623
624 VERBOSE("Creating KVA=PA tables\r\n") 624 VERBOSE("Creating KVA=PA tables\r\n")
625 625
626 /* KVA=PA table, link L0->L1 */ 626 /* KVA=PA table, link L0->L1 */
627 ADDR x0, ttbr1_l0table 627 ADDR x0, ttbr1_l0table
628 mov x1, #VM_MIN_KERNEL_ADDRESS 628 mov x1, #VM_MIN_KERNEL_ADDRESS
629 ADDR x2, ttbr1_l1table_kva 629 ADDR x2, ttbr1_l1table_kva
630 bl l0_settable 630 bl l0_settable
631 631
632 /* KVA=PA table, link L1->L2 */ 632 /* KVA=PA table, link L1->L2 */
633 ADDR x0, ttbr1_l1table_kva 633 ADDR x0, ttbr1_l1table_kva
634 mov x1, #VM_MIN_KERNEL_ADDRESS 634 mov x1, #VM_MIN_KERNEL_ADDRESS
635 ADDR x2, ttbr1_l2table_kva 635 ADDR x2, ttbr1_l2table_kva
636 bl l1_settable 636 bl l1_settable
637 637
638 /* KVA=PA L2 blocks */ 638 /* KVA=PA L2 blocks */
639 ADDR x0, ttbr1_l2table_kva 639 ADDR x0, ttbr1_l2table_kva
640 adr x2, start /* physical addr. before MMU */ 640 adr x2, start /* physical addr. before MMU */
641 and x2, x2, #L2_BLK_OA /* L2 block size aligned (2MB) */ 641 and x2, x2, #L2_BLK_OA /* L2 block size aligned (2MB) */
642 mov x1, #VM_MIN_KERNEL_ADDRESS 642 mov x1, #VM_MIN_KERNEL_ADDRESS
643 mov x3, #(L2_BLKPAG_ATTR_NORMAL_WB|LX_BLKPAG_UXN) 643 mov x3, #(L2_BLKPAG_ATTR_NORMAL_WB|LX_BLKPAG_UXN)
644 644
645 /* kernelsize = _end - start */ 645 /* kernelsize = _end - start */
646 ldr x1, =start 646 ldr x1, =start
647 ldr x4, =_end 647 ldr x4, =_end
648 sub x4, x4, x1 648 sub x4, x4, x1
649 649
650 /* round up kernelsize to L2_SIZE (2MB) */ 650 /* round up kernelsize to L2_SIZE (2MB) */
651 add x4, x4, #L2_SIZE 651 add x4, x4, #L2_SIZE
652 sub x4, x4, #1 652 sub x4, x4, #1
653 lsr x4, x4, #L2_SHIFT 653 lsr x4, x4, #L2_SHIFT
654 bl l2_setblocks 654 bl l2_setblocks
655 655
656 /* map READONLY from VM_MIN_KERNEL_ADDRESS to __data_start */ 656 /* map READONLY from VM_MIN_KERNEL_ADDRESS to __data_start */
657 VERBOSE("Set kernel text/rodata READONLY\r\n") 657 VERBOSE("Set kernel text/rodata READONLY\r\n")
658 ldr x3, =__data_start 658 ldr x3, =__data_start
659 ands x0, x3, #(L2_SIZE - 1) 659 ands x0, x3, #(L2_SIZE - 1)
660 beq 1f 660 beq 1f
661 ldr x1, =_erodata 661 ldr x1, =_erodata
662 and x1, x1, #L2_ADDR_BITS /* _erodata & L2_ADDR_BIT */ 662 and x1, x1, #L2_ADDR_BITS /* _erodata & L2_ADDR_BIT */
663 and x0, x3, #L2_ADDR_BITS /* __data_start & L2_ADDR_BIT */ 663 and x0, x3, #L2_ADDR_BITS /* __data_start & L2_ADDR_BIT */
664 cmp x0, x1 664 cmp x0, x1
665 bne 1f 665 bne 1f
666 /* __data_start and _erodata are in same L2 block */ 666 /* __data_start and _erodata are in same L2 block */
667 PRINT("Warning: data section not aligned on size of L2 block\r\n") 667 PRINT("Warning: data section not aligned on size of L2 block\r\n")
6681: 6681:
669 /* x3 = l2pde_index(__data_start) */ 669 /* x3 = l2pde_index(__data_start) */
670 and x3, x3, #L2_ADDR_BITS 670 and x3, x3, #L2_ADDR_BITS
671 lsr x3, x3, #L2_SHIFT 671 lsr x3, x3, #L2_SHIFT
672 672
673 /* x2 = l2pde_inex(VM_MIN_KERNEL_ADDRESS) */ 673 /* x2 = l2pde_inex(VM_MIN_KERNEL_ADDRESS) */
674 mov x2, #VM_MIN_KERNEL_ADDRESS 674 mov x2, #VM_MIN_KERNEL_ADDRESS
675 and x2, x2, #L2_ADDR_BITS 675 and x2, x2, #L2_ADDR_BITS
676 lsr x2, x2, #L2_SHIFT 676 lsr x2, x2, #L2_SHIFT
677 677
678 ADDR x1, ttbr1_l2table_kva 678 ADDR x1, ttbr1_l2table_kva
679 b 9f 679 b 9f
6801: 6801:
681 ldr x0, [x1, x2, lsl #3] /* x0 = l2table[x2] */ 681 ldr x0, [x1, x2, lsl #3] /* x0 = l2table[x2] */
682 and x0, x0, #~LX_BLKPAG_AP 682 and x0, x0, #~LX_BLKPAG_AP
683 orr x0, x0, #LX_BLKPAG_AP_RO 683 orr x0, x0, #LX_BLKPAG_AP_RO
684 str x0, [x1, x2, lsl #3] /* l2table[x2] = x0 */ 684 str x0, [x1, x2, lsl #3] /* l2table[x2] = x0 */
685 add x2, x2, #1 685 add x2, x2, #1
6869: 6869:
687 cmp x2, x3 687 cmp x2, x3
688 blo 1b 688 blo 1b
689 689
690 690
691 /* add eXecute Never bit from _rodata to _end */ 691 /* add eXecute Never bit from _rodata to _end */
692 VERBOSE("Set kernel rodata/data non-Executable\r\n") 692 VERBOSE("Set kernel rodata/data non-Executable\r\n")
693 ldr x0, =__rodata_start 693 ldr x0, =__rodata_start
694 ands x0, x0, #(L2_SIZE - 1) 694 ands x0, x0, #(L2_SIZE - 1)
695 beq 1f 695 beq 1f
696 PRINT("Warning: rodata section not aligned on size of L2 block\r\n") 696 PRINT("Warning: rodata section not aligned on size of L2 block\r\n")
6971: 6971:
698 /* x2 = l2pde_index(__rodata_start) */ 698 /* x2 = l2pde_index(__rodata_start) */
699 ldr x2, =__rodata_start 699 ldr x2, =__rodata_start
700 mov x0, #(L2_SIZE - 1) 700 mov x0, #(L2_SIZE - 1)
701 add x2, x2, x0 /* round block */ 701 add x2, x2, x0 /* round block */
702 and x2, x2, #L2_ADDR_BITS 702 and x2, x2, #L2_ADDR_BITS
703 lsr x2, x2, #L2_SHIFT 703 lsr x2, x2, #L2_SHIFT
704 704
705 /* x3 = l2pde_inex(_end) */ 705 /* x3 = l2pde_inex(_end) */
706 ldr x3, =_end 706 ldr x3, =_end
707 and x3, x3, #L2_ADDR_BITS 707 and x3, x3, #L2_ADDR_BITS
708 lsr x3, x3, #L2_SHIFT 708 lsr x3, x3, #L2_SHIFT
709 709
710 ADDR x1, ttbr1_l2table_kva 710 ADDR x1, ttbr1_l2table_kva
711 b 9f 711 b 9f
7121: 7121:
713 ldr x0, [x1, x2, lsl #3] /* x0 = l2table[x2] */ 713 ldr x0, [x1, x2, lsl #3] /* x0 = l2table[x2] */
714 orr x0, x0, #(LX_BLKPAG_UXN|LX_BLKPAG_PXN) 714 orr x0, x0, #(LX_BLKPAG_UXN|LX_BLKPAG_PXN)
715 str x0, [x1, x2, lsl #3] /* l2table[x2] = x0 */ 715 str x0, [x1, x2, lsl #3] /* l2table[x2] = x0 */
716 add x2, x2, #1 716 add x2, x2, #1
7179: 7179:
718 cmp x2, x3 /* including the L2 block of _end[] */ 718 cmp x2, x3 /* including the L2 block of _end[] */
719 bls 1b 719 bls 1b
720 720
721 721
722 VERBOSE("Creating devmap tables\r\n") 722 VERBOSE("Creating devmap tables\r\n")
723 /* devmap=PA table, link L1->L2 */ 723 /* devmap=PA table, link L1->L2 */
724 ADDR x0, ttbr1_l1table_kva 724 ADDR x0, ttbr1_l1table_kva
725 ldr x1, .L_devmap_addr 725 ldr x1, .L_devmap_addr
726 ADDR x2, ttbr1_l2table_devmap 726 ADDR x2, ttbr1_l2table_devmap
727 bl l1_settable 727 bl l1_settable
728 728
729 ldp x0, lr, [sp], #16 729 ldp x0, lr, [sp], #16
730 ret 730 ret
731 731
732 .align 3 732 .align 3
733.L_devmap_addr: 733.L_devmap_addr:
734 .quad VM_KERNEL_IO_ADDRESS 734 .quad VM_KERNEL_IO_ADDRESS
735 735
736/* 736/*
737 * x0 = l0table 737 * x0 = l0table
738 * x1 = vaddr 738 * x1 = vaddr
739 * x2 = l1table 739 * x2 = l1table
740 */ 740 */
741l0_settable: 741l0_settable:
742 stp x0, lr, [sp, #-16]! 742 stp x0, lr, [sp, #-16]!
743 743
744 and x2, x2, #~PAGE_MASK 744 and x2, x2, #~PAGE_MASK
745 mov x8, #L0_TABLE 745 mov x8, #L0_TABLE
746 orr x2, x2, x8 746 orr x2, x2, x8
747 and x1, x1, #L0_ADDR_BITS 747 and x1, x1, #L0_ADDR_BITS
748 lsr x1, x1, #L0_SHIFT 748 lsr x1, x1, #L0_SHIFT
749 str x2, [x0, x1, lsl #3] /* l0table[x1] = x2 */ 749 str x2, [x0, x1, lsl #3] /* l0table[x1] = x2 */
750 750
751#ifdef DEBUG_MMU 751#ifdef DEBUG_MMU
752 PRINT("L0 entry[") 752 PRINT("L0 entry[")
753 bl printn_x1 753 bl printn_x1
754 PRINT("]=") 754 PRINT("]=")
755 bl print_x2 755 bl print_x2
756#endif 756#endif
757 757
758 ldp x0, lr, [sp], #16 758 ldp x0, lr, [sp], #16
759 ret 759 ret
760 760
761/* 761/*
762 * x0 = l1table 762 * x0 = l1table
763 * x1 = vaddr 763 * x1 = vaddr
764 * x2 = paddr 764 * x2 = paddr
765 * x3 = attr 765 * x3 = attr
766 * x4 = N entries 766 * x4 = N entries
767 */ 767 */
768l1_setblocks: 768l1_setblocks:
769 stp x0, lr, [sp, #-16]! 769 stp x0, lr, [sp, #-16]!
770 770
771 and x2, x2, #L1_ADDR_BITS 771 and x2, x2, #L1_ADDR_BITS
772 mov x8, #L1_BLOCK 772 mov x8, #L1_BLOCK
773 orr x2, x2, x8 773 orr x2, x2, x8
774 orr x2, x2, x3 774 orr x2, x2, x3
775 mov x8, #(LX_BLKPAG_AF|LX_BLKPAG_AP_RW) 775 mov x8, #(LX_BLKPAG_AF|LX_BLKPAG_AP_RW)
776 orr x2, x2, x8 776 orr x2, x2, x8
777#ifdef MULTIPROCESSOR 777#ifdef MULTIPROCESSOR
778 orr x2, x2, #LX_BLKPAG_SH_IS 778 orr x2, x2, #LX_BLKPAG_SH_IS
779#endif 779#endif
780 and x1, x1, #L1_ADDR_BITS 780 and x1, x1, #L1_ADDR_BITS
781 lsr x1, x1, #L1_SHIFT 781 lsr x1, x1, #L1_SHIFT
7821: 7821:
783 str x2, [x0, x1, lsl #3] /* l1table[x1] = x2 */ 783 str x2, [x0, x1, lsl #3] /* l1table[x1] = x2 */
784#ifdef DEBUG_MMU 784#ifdef DEBUG_MMU
785 PRINT("L1 entry[") 785 PRINT("L1 entry[")
786 bl printn_x1 786 bl printn_x1
787 PRINT("]=") 787 PRINT("]=")
788 bl print_x2 788 bl print_x2
789#endif 789#endif
790 mov x3, #L1_SIZE 790 mov x3, #L1_SIZE
791 add x2, x2, x3 791 add x2, x2, x3
792 add x1, x1, #1 792 add x1, x1, #1
793 subs x4, x4, #1 793 subs x4, x4, #1
794 bne 1b 794 bne 1b
795 795
796 ldp x0, lr, [sp], #16 796 ldp x0, lr, [sp], #16
797 ret 797 ret
798 798
799/* 799/*
800 * x0 = l1table 800 * x0 = l1table
801 * x1 = vaddr 801 * x1 = vaddr
802 * x2 = l2table 802 * x2 = l2table
803 */ 803 */
804l1_settable: 804l1_settable:
805 stp x0, lr, [sp, #-16]! 805 stp x0, lr, [sp, #-16]!
806 806
807 and x2, x2, #~PAGE_MASK 807 and x2, x2, #~PAGE_MASK
808 mov x8, #L1_TABLE 808 mov x8, #L1_TABLE
809 orr x2, x2, x8 809 orr x2, x2, x8
810 and x1, x1, #L1_ADDR_BITS 810 and x1, x1, #L1_ADDR_BITS
811 lsr x1, x1, #L1_SHIFT 811 lsr x1, x1, #L1_SHIFT
812 str x2, [x0, x1, lsl #3] /* l1table[x1] = x2 */ 812 str x2, [x0, x1, lsl #3] /* l1table[x1] = x2 */
813 813
814#ifdef DEBUG_MMU 814#ifdef DEBUG_MMU
815 PRINT("L1 entry[") 815 PRINT("L1 entry[")
816 bl printn_x1 816 bl printn_x1
817 PRINT("]=") 817 PRINT("]=")
818 bl print_x2 818 bl print_x2
819#endif 819#endif
820 820
821 ldp x0, lr, [sp], #16 821 ldp x0, lr, [sp], #16
822 ret 822 ret
823 823
824/* 824/*
825 * x0 = l2table 825 * x0 = l2table
826 * x1 = vaddr 826 * x1 = vaddr
827 * x2 = paddr 827 * x2 = paddr
828 * x3 = attr 828 * x3 = attr
829 * x4 = N entries 829 * x4 = N entries
830 */ 830 */
831l2_setblocks: 831l2_setblocks:
832 stp x0, lr, [sp, #-16]! 832 stp x0, lr, [sp, #-16]!
833 833
834 and x2, x2, #L2_BLOCK_MASK 834 and x2, x2, #L2_BLOCK_MASK
835 mov x8, #L2_BLOCK 835 mov x8, #L2_BLOCK
836 orr x2, x2, x8 836 orr x2, x2, x8
837 orr x2, x2, x3 837 orr x2, x2, x3
838 mov x8, #(LX_BLKPAG_AF|LX_BLKPAG_AP_RW) 838 mov x8, #(LX_BLKPAG_AF|LX_BLKPAG_AP_RW)
839 orr x2, x2, x8 839 orr x2, x2, x8
840#ifdef MULTIPROCESSOR 840#ifdef MULTIPROCESSOR
841 orr x2, x2, #LX_BLKPAG_SH_IS 841 orr x2, x2, #LX_BLKPAG_SH_IS
842#endif 842#endif
843 and x1, x1, #L2_ADDR_BITS 843 and x1, x1, #L2_ADDR_BITS
844 lsr x1, x1, #L2_SHIFT 844 lsr x1, x1, #L2_SHIFT
8451: 8451:
846 str x2, [x0, x1, lsl #3] /* l2table[x1] = x2 */ 846 str x2, [x0, x1, lsl #3] /* l2table[x1] = x2 */
847#ifdef DEBUG_MMU 847#ifdef DEBUG_MMU
848 PRINT("L2 entry[") 848 PRINT("L2 entry[")
849 bl printn_x1 849 bl printn_x1
850 PRINT("]=") 850 PRINT("]=")
851 bl print_x2 851 bl print_x2
852#endif 852#endif
853 mov x3, #L2_SIZE 853 mov x3, #L2_SIZE
854 add x2, x2, x3 854 add x2, x2, x3
855 add x1, x1, #1 855 add x1, x1, #1
856 subs x4, x4, #1 856 subs x4, x4, #1
857 bne 1b 857 bne 1b
858 858
859 ldp x0, lr, [sp], #16 859 ldp x0, lr, [sp], #16
860 ret 860 ret
861 861
862init_sysregs: 862init_sysregs:
863 stp x0, lr, [sp, #-16]! 863 stp x0, lr, [sp, #-16]!
864 864
865 /* Disable debug event */ 865 /* Disable debug event */
866 msr mdscr_el1, xzr 866 msr mdscr_el1, xzr
867 867
868 /* Clear context id register */ 868 /* Clear context id register */
869 msr contextidr_el1, xzr 869 msr contextidr_el1, xzr
870 870
871 /* No trap system register access, and Trap FP/SIMD access */ 871 /* No trap system register access, and Trap FP/SIMD access */
872 msr cpacr_el1, xzr 872 msr cpacr_el1, xzr
873 873
874 /* allow to read CNTVCT_EL0 and CNTFRQ_EL0 from EL0 */ 874 /* allow to read CNTVCT_EL0 and CNTFRQ_EL0 from EL0 */
875 mrs x0, cntkctl_el1 875 mrs x0, cntkctl_el1
876 orr x0, x0, #CNTKCTL_EL0VCTEN 876 orr x0, x0, #CNTKCTL_EL0VCTEN
877 msr cntkctl_el1, x0 877 msr cntkctl_el1, x0
878 878
879 /* any exception not masked */ 879 /* any exception not masked */
880 msr daif, xzr 880 msr daif, xzr
881 881
882 ldp x0, lr, [sp], #16 882 ldp x0, lr, [sp], #16
883 ret 883 ret
884 884
885mmu_disable: 885mmu_disable:
886 dsb sy 886 dsb sy
887 mrs x0, sctlr_el1 887 mrs x0, sctlr_el1
888 bic x0, x0, SCTLR_M /* clear MMU enable bit */ 888 bic x0, x0, SCTLR_M /* clear MMU enable bit */
889 msr sctlr_el1, x0 889 msr sctlr_el1, x0
890 isb 890 isb
891 ret 891 ret
892 892
893mmu_enable: 893mmu_enable:
894 dsb sy 894 dsb sy
895 895
896 ADDR x0, ttbr0_l0table 896 ADDR x0, ttbr0_l0table
897 msr ttbr0_el1, x0 897 msr ttbr0_el1, x0
898 ADDR x0, ttbr1_l0table 898 ADDR x0, ttbr1_l0table
899 msr ttbr1_el1, x0 899 msr ttbr1_el1, x0
900 isb 900 isb
901 901
902 /* Invalidate all TLB */ 902 /* Invalidate all TLB */
903 dsb ishst 903 dsb ishst
904#ifdef MULTIPROCESSOR 904#ifdef MULTIPROCESSOR
905 tlbi vmalle1is 905 tlbi vmalle1is
906#else 906#else
907 tlbi vmalle1 907 tlbi vmalle1
908#endif 908#endif
909 dsb ish 909 dsb ish
910 isb 910 isb
911 911
912 ldr x0, mair_setting 912 ldr x0, mair_setting
913 msr mair_el1, x0 913 msr mair_el1, x0
914 914
915 915
916 /* TCR_EL1:IPS[34:32] = AA64MMFR0:PARange[3:0] */ 916 /* TCR_EL1:IPS[34:32] = AA64MMFR0:PARange[3:0] */
917 ldr x0, tcr_setting 917 ldr x0, tcr_setting
918 mrs x1, id_aa64mmfr0_el1 918 mrs x1, id_aa64mmfr0_el1
919 bfi x0, x1, #32, #3 919 bfi x0, x1, #32, #3
 920#ifdef MULTIPROCESSOR
 921 ldr x1, tcr_setting_inner_shareable
 922 orr x0, x0, x1
 923#endif
920 msr tcr_el1, x0 924 msr tcr_el1, x0
921 925
922 /* 926 /*
923 * configure SCTLR 927 * configure SCTLR
924 */ 928 */
925 mrs x0, sctlr_el1 929 mrs x0, sctlr_el1
926 ldr x1, sctlr_clear 930 ldr x1, sctlr_clear
927 bic x0, x0, x1 931 bic x0, x0, x1
928 ldr x1, sctlr_set 932 ldr x1, sctlr_set
929 orr x0, x0, x1 933 orr x0, x0, x1
930 934
931 ldr x1, sctlr_ee 935 ldr x1, sctlr_ee
932#ifdef __AARCH64EB__ 936#ifdef __AARCH64EB__
933 orr x0, x0, x1 /* set: BigEndian */ 937 orr x0, x0, x1 /* set: BigEndian */
934#else 938#else
935 bic x0, x0, x1 /* clear: LittleEndian */ 939 bic x0, x0, x1 /* clear: LittleEndian */
936#endif 940#endif
937#ifdef MULTIPROCESSOR 
938 ldr x1, tcr_setting_inner_shareable 
939 orr x0, x0, x1 
940#endif 
941 msr sctlr_el1, x0 /* enabling MMU! */ 941 msr sctlr_el1, x0 /* enabling MMU! */
942 isb 942 isb
943 943
944 ret 944 ret
945 945
946 .align 3 946 .align 3
947mair_setting: 947mair_setting:
948 .quad ( \ 948 .quad ( \
949 __SHIFTIN(MAIR_NORMAL_WB, MAIR_ATTR0) | \ 949 __SHIFTIN(MAIR_NORMAL_WB, MAIR_ATTR0) | \
950 __SHIFTIN(MAIR_NORMAL_NC, MAIR_ATTR1) | \ 950 __SHIFTIN(MAIR_NORMAL_NC, MAIR_ATTR1) | \
951 __SHIFTIN(MAIR_NORMAL_WT, MAIR_ATTR2) | \ 951 __SHIFTIN(MAIR_NORMAL_WT, MAIR_ATTR2) | \
952 __SHIFTIN(MAIR_DEVICE_nGnRnE, MAIR_ATTR3)) 952 __SHIFTIN(MAIR_DEVICE_nGnRnE, MAIR_ATTR3))
953 953
954#define VIRT_BIT 48 954#define VIRT_BIT 48
955tcr_setting: 955tcr_setting:
956 .quad ( \ 956 .quad ( \
957 __SHIFTIN(64 - VIRT_BIT, TCR_T1SZ) | \ 957 __SHIFTIN(64 - VIRT_BIT, TCR_T1SZ) | \
958 __SHIFTIN(64 - VIRT_BIT, TCR_T0SZ) | \ 958 __SHIFTIN(64 - VIRT_BIT, TCR_T0SZ) | \
959 TCR_AS64K | \ 959 TCR_AS64K | \
960 TCR_TG1_4KB | TCR_TG0_4KB | \ 960 TCR_TG1_4KB | TCR_TG0_4KB | \
961 TCR_ORGN0_WB_WA | \ 961 TCR_ORGN0_WB_WA | \
962 TCR_IRGN0_WB_WA | \ 962 TCR_IRGN0_WB_WA | \
963 TCR_ORGN1_WB_WA | \ 963 TCR_ORGN1_WB_WA | \
964 TCR_IRGN1_WB_WA) 964 TCR_IRGN1_WB_WA)
965#ifdef MULTIPROCESSOR 965#ifdef MULTIPROCESSOR
966tcr_setting_inner_shareable: 966tcr_setting_inner_shareable:
967 .quad (TCR_SH0_INNER | TCR_SH1_INNER) 967 .quad (TCR_SH0_INNER | TCR_SH1_INNER)
968#endif 968#endif
969 969
970 970
971#ifdef AARCH64_ALIGNMENT_CHECK 971#ifdef AARCH64_ALIGNMENT_CHECK
972#define SCTLR_A_CONFIG SCTLR_A 972#define SCTLR_A_CONFIG SCTLR_A
973#else 973#else
974#define SCTLR_A_CONFIG 0 974#define SCTLR_A_CONFIG 0
975#endif 975#endif
976 976
977#ifdef AARCH64_EL0_STACK_ALIGNMENT_CHECK 977#ifdef AARCH64_EL0_STACK_ALIGNMENT_CHECK
978#define SCTLR_SA0_CONFIG SCTLR_SA0 978#define SCTLR_SA0_CONFIG SCTLR_SA0
979#else 979#else
980#define SCTLR_SA0_CONFIG 0 980#define SCTLR_SA0_CONFIG 0
981#endif 981#endif
982 982
983#ifdef AARCH64_EL1_STACK_ALIGNMENT_CHECK 983#ifdef AARCH64_EL1_STACK_ALIGNMENT_CHECK
984#define SCTLR_SA_CONFIG SCTLR_SA 984#define SCTLR_SA_CONFIG SCTLR_SA
985#else 985#else
986#define SCTLR_SA_CONFIG 0 986#define SCTLR_SA_CONFIG 0
987#endif 987#endif
988 988
989 989
990sctlr_ee: 990sctlr_ee:
991 .quad (SCTLR_EE | SCTLR_EOE) /* Endiannes of Exception and EL0 */ 991 .quad (SCTLR_EE | SCTLR_EOE) /* Endiannes of Exception and EL0 */
992sctlr_set: 992sctlr_set:
993 .quad ( \ 993 .quad ( \
994 SCTLR_LSMAOE | /* Load/Store Multiple Atomicity and Ordering */ \ 994 SCTLR_LSMAOE | /* Load/Store Multiple Atomicity and Ordering */ \
995 SCTLR_nTLSMD | /* no Trap Load/Store Multiple to Device */ \ 995 SCTLR_nTLSMD | /* no Trap Load/Store Multiple to Device */ \
996 SCTLR_UCI | /* Enables EL0 DC {CVAU,CIVAC,CVAC}, IC IVAU */ \ 996 SCTLR_UCI | /* Enables EL0 DC {CVAU,CIVAC,CVAC}, IC IVAU */ \
997 SCTLR_SPAN | /* This field resets to 1 */ \ 997 SCTLR_SPAN | /* This field resets to 1 */ \
998 SCTLR_UCT | /* Enables EL0 access to the CTR_EL0 */ \ 998 SCTLR_UCT | /* Enables EL0 access to the CTR_EL0 */ \
999 SCTLR_nTWE | /* EL0 WFE non-trapping */ \ 999 SCTLR_nTWE | /* EL0 WFE non-trapping */ \
1000 SCTLR_nTWI | /* EL0 WFI non-trapping */ \ 1000 SCTLR_nTWI | /* EL0 WFI non-trapping */ \
1001 SCTLR_DZE | /* Enables access to the DC ZVA instruction */ \ 1001 SCTLR_DZE | /* Enables access to the DC ZVA instruction */ \
1002 SCTLR_I | /* Instruction cache enable */ \ 1002 SCTLR_I | /* Instruction cache enable */ \
1003 SCTLR_SED | /* SETEND instruction disable */ \ 1003 SCTLR_SED | /* SETEND instruction disable */ \
1004 SCTLR_C | /* Cache enable */ \ 1004 SCTLR_C | /* Cache enable */ \
1005 SCTLR_M | /* MMU Enable */ \ 1005 SCTLR_M | /* MMU Enable */ \
1006 SCTLR_SA0_CONFIG | \ 1006 SCTLR_SA0_CONFIG | \
1007 SCTLR_SA_CONFIG | \ 1007 SCTLR_SA_CONFIG | \
1008 SCTLR_A_CONFIG | \ 1008 SCTLR_A_CONFIG | \
1009 0) 1009 0)
1010sctlr_clear: 1010sctlr_clear:
1011 .quad ( \ 1011 .quad ( \
1012 SCTLR_IESB | /* Enable Implicit ErrorSynchronizationBarrier */ \ 1012 SCTLR_IESB | /* Enable Implicit ErrorSynchronizationBarrier */ \
1013 SCTLR_WXN | /* Write permission implies Execute Never (W^X) */ \ 1013 SCTLR_WXN | /* Write permission implies Execute Never (W^X) */ \
1014 SCTLR_UMA | /* EL0 Controls access to interrupt masks */ \ 1014 SCTLR_UMA | /* EL0 Controls access to interrupt masks */ \
1015 SCTLR_ITD | /* IT instruction disable */ \ 1015 SCTLR_ITD | /* IT instruction disable */ \
1016 SCTLR_THEE | /* T32EE is not implemented */ \ 1016 SCTLR_THEE | /* T32EE is not implemented */ \
1017 SCTLR_CP15BEN | /* CP15 barrier enable */ \ 1017 SCTLR_CP15BEN | /* CP15 barrier enable */ \
1018 SCTLR_SA0 | /* Enable EL0 stack alignment check */ \ 1018 SCTLR_SA0 | /* Enable EL0 stack alignment check */ \
1019 SCTLR_SA | /* Enable SP alignment check */ \ 1019 SCTLR_SA | /* Enable SP alignment check */ \
1020 SCTLR_A | /* Alignment check enable */ \ 1020 SCTLR_A | /* Alignment check enable */ \
1021 0) 1021 0)
1022 1022
1023 1023
1024 .bss 1024 .bss
1025 1025
1026 .align PGSHIFT 1026 .align PGSHIFT
1027 .global _C_LABEL(lwp0uspace) 1027 .global _C_LABEL(lwp0uspace)
1028_C_LABEL(lwp0uspace): 1028_C_LABEL(lwp0uspace):
1029 .space UPAGES * PAGE_SIZE 1029 .space UPAGES * PAGE_SIZE
1030bootstk: 1030bootstk:
1031 1031
1032#ifdef MULTIPROCESSOR 1032#ifdef MULTIPROCESSOR
1033 .space BOOT_STACKSIZE * (MAXCPUS - 1) 1033 .space BOOT_STACKSIZE * (MAXCPUS - 1)
1034bootstk_cpus: 1034bootstk_cpus:
1035#endif 1035#endif
1036 1036
1037 1037
1038 .align PGSHIFT 1038 .align PGSHIFT
1039mmutables_start: 1039mmutables_start:
1040/* 1040/*
1041 * PA == VA mapping using L1 1G block (whole 32bit) 1041 * PA == VA mapping using L1 1G block (whole 32bit)
1042 */ 1042 */
1043ttbr0_l0table: 1043ttbr0_l0table:
1044 .space PAGE_SIZE 1044 .space PAGE_SIZE
1045ttbr0_l1table: 1045ttbr0_l1table:
1046 .space PAGE_SIZE 1046 .space PAGE_SIZE
1047 1047
1048/* 1048/*
1049 * KVA => PA mapping using L2 2MB block (kernelsize, max 2MB*512=2Gbyte) 1049 * KVA => PA mapping using L2 2MB block (kernelsize, max 2MB*512=2Gbyte)
1050 * DEVMAP => PA mapping using L2 2MB block (devmap size, max 2MB*512=2Gbyte) 1050 * DEVMAP => PA mapping using L2 2MB block (devmap size, max 2MB*512=2Gbyte)
1051 * KSEG => PA mapping using L1 1GB block * 512 1051 * KSEG => PA mapping using L1 1GB block * 512
1052 */ 1052 */
1053ttbr1_l0table: 1053ttbr1_l0table:
1054 .space PAGE_SIZE 1054 .space PAGE_SIZE
1055ttbr1_l1table_kseg: 1055ttbr1_l1table_kseg:
1056 .space PAGE_SIZE 1056 .space PAGE_SIZE
1057ttbr1_l1table_kva: 1057ttbr1_l1table_kva:
1058 .space PAGE_SIZE 1058 .space PAGE_SIZE
1059ttbr1_l2table_kva: 1059ttbr1_l2table_kva:
1060 .space PAGE_SIZE 1060 .space PAGE_SIZE
1061ttbr1_l2table_devmap: 1061ttbr1_l2table_devmap:
1062 .space PAGE_SIZE 1062 .space PAGE_SIZE
1063mmutables_end: 1063mmutables_end: