Fri Aug 28 13:00:29 2020 UTC ()
Change to the KVA stack address straight after the MMU is turned on


(skrll)
diff -r1.24 -r1.25 src/sys/arch/arm/arm/armv6_start.S

cvs diff -r1.24 -r1.25 src/sys/arch/arm/arm/armv6_start.S (expand / switch to unified diff)

--- src/sys/arch/arm/arm/armv6_start.S 2020/08/28 12:56:19 1.24
+++ src/sys/arch/arm/arm/armv6_start.S 2020/08/28 13:00:29 1.25
@@ -1,14 +1,14 @@ @@ -1,14 +1,14 @@
1/* $NetBSD: armv6_start.S,v 1.24 2020/08/28 12:56:19 skrll Exp $ */ 1/* $NetBSD: armv6_start.S,v 1.25 2020/08/28 13:00:29 skrll Exp $ */
2 2
3/*- 3/*-
4 * Copyright (c) 2012, 2017, 2018 The NetBSD Foundation, Inc. 4 * Copyright (c) 2012, 2017, 2018 The NetBSD Foundation, Inc.
5 * All rights reserved. 5 * All rights reserved.
6 * 6 *
7 * This code is derived from software contributed to The NetBSD Foundation 7 * This code is derived from software contributed to The NetBSD Foundation
8 * by Matt Thomas of 3am Software Foundry and Nick Hudson. 8 * by Matt Thomas of 3am Software Foundry and Nick Hudson.
9 * 9 *
10 * Redistribution and use in source and binary forms, with or without 10 * Redistribution and use in source and binary forms, with or without
11 * modification, are permitted provided that the following conditions 11 * modification, are permitted provided that the following conditions
12 * are met: 12 * are met:
13 * 1. Redistributions of source code must retain the above copyright 13 * 1. Redistributions of source code must retain the above copyright
14 * notice, this list of conditions and the following disclaimer. 14 * notice, this list of conditions and the following disclaimer.
@@ -457,26 +457,29 @@ generic_startv7: @@ -457,26 +457,29 @@ generic_startv7:
457 /* 457 /*
458 * Turn on the MMU. Return to virtual address space. 458 * Turn on the MMU. Return to virtual address space.
459 */ 459 */
460 movw r0, #:lower16:TEMP_L1_TABLE 460 movw r0, #:lower16:TEMP_L1_TABLE
461 movt r0, #:upper16:TEMP_L1_TABLE 461 movt r0, #:upper16:TEMP_L1_TABLE
462 sub r0, R_VTOPDIFF 462 sub r0, R_VTOPDIFF
463 463
464 // Return to virtual addess after the call to armv7_mmuinit 464 // Return to virtual addess after the call to armv7_mmuinit
465 movw lr, #:lower16:generic_vstartv7 465 movw lr, #:lower16:generic_vstartv7
466 movt lr, #:upper16:generic_vstartv7 466 movt lr, #:upper16:generic_vstartv7
467 b armv7_mmuinit 467 b armv7_mmuinit
468 468
469generic_vstartv7: 469generic_vstartv7:
 470 // Stack to KVA address
 471 add sp, sp, R_VTOPDIFF
 472
470 VPRINTF("virtual\n\r") 473 VPRINTF("virtual\n\r")
471 474
472#if defined(KASAN) 475#if defined(KASAN)
473 ldr r0, =start_stacks_bottom 476 ldr r0, =start_stacks_bottom
474 bl _C_LABEL(kasan_early_init) 477 bl _C_LABEL(kasan_early_init)
475 478
476 VPRINTF("kasan\n\r") 479 VPRINTF("kasan\n\r")
477#endif 480#endif
478 481
479 /* r0 = &cpu_info_store[0] */ 482 /* r0 = &cpu_info_store[0] */
480 movw r0, #:lower16:cpu_info_store 483 movw r0, #:lower16:cpu_info_store
481 movt r0, #:upper16:cpu_info_store 484 movt r0, #:upper16:cpu_info_store
482 485
@@ -511,26 +514,29 @@ generic_startv6: @@ -511,26 +514,29 @@ generic_startv6:
511 bl arm_build_translation_table 514 bl arm_build_translation_table
512 515
513 XPUTC(#'E') 516 XPUTC(#'E')
514 /* 517 /*
515 * Turn on the MMU. Return to new enabled address space. 518 * Turn on the MMU. Return to new enabled address space.
516 */ 519 */
517 ldr r0, =TEMP_L1_TABLE 520 ldr r0, =TEMP_L1_TABLE
518 sub r0, R_VTOPDIFF 521 sub r0, R_VTOPDIFF
519 522
520 ldr lr, =generic_vstartv6 523 ldr lr, =generic_vstartv6
521 b armv6_mmuinit 524 b armv6_mmuinit
522 525
523generic_vstartv6: 526generic_vstartv6:
 527 // Stack to KVA address
 528 add sp, sp, R_VTOPDIFF
 529
524 VPRINTF("virtual\n\r") 530 VPRINTF("virtual\n\r")
525 531
526#if defined(KASAN) 532#if defined(KASAN)
527 ldr r0, =start_stacks_bottom 533 ldr r0, =start_stacks_bottom
528 bl _C_LABEL(kasan_early_init) 534 bl _C_LABEL(kasan_early_init)
529 535
530 VPRINTF("kasan\n\r") 536 VPRINTF("kasan\n\r")
531#endif 537#endif
532 538
533 VPRINTF("go\n\r") 539 VPRINTF("go\n\r")
534 540
535 /* 541 /*
536 * Jump to start in locore.S, which in turn will call initarm and main. 542 * Jump to start in locore.S, which in turn will call initarm and main.
@@ -872,27 +878,27 @@ ENTRY_NP(cpu_mpstart) @@ -872,27 +878,27 @@ ENTRY_NP(cpu_mpstart)
872 movw r0, #:lower16:TEMP_L1_TABLE 878 movw r0, #:lower16:TEMP_L1_TABLE
873 movt r0, #:upper16:TEMP_L1_TABLE 879 movt r0, #:upper16:TEMP_L1_TABLE
874 sub r0, R_VTOPDIFF 880 sub r0, R_VTOPDIFF
875 881
876 movw lr, #:lower16:armv7_mpcontinuation 882 movw lr, #:lower16:armv7_mpcontinuation
877 movt lr, #:upper16:armv7_mpcontinuation 883 movt lr, #:upper16:armv7_mpcontinuation
878 b armv7_mmuinit 884 b armv7_mmuinit
879ASEND(cpu_mpstart) 885ASEND(cpu_mpstart)
880 886
881/* 887/*
882 * Now running with real kernel VA via bootstrap tables 888 * Now running with real kernel VA via bootstrap tables
883 */ 889 */
884armv7_mpcontinuation: 890armv7_mpcontinuation:
885 // Adjust stack back to KVA address 891 // Stack to KVA address
886 add sp, sp, R_VTOPDIFF 892 add sp, sp, R_VTOPDIFF
887 893
888 VPRINTF("virtual\n\r") 894 VPRINTF("virtual\n\r")
889 895
890 // index into cpu_mpidr[] or cpu_number if not found 896 // index into cpu_mpidr[] or cpu_number if not found
891 mov r0, R_INDEX 897 mov r0, R_INDEX
892 bl cpu_init_secondary_processor 898 bl cpu_init_secondary_processor
893 899
894 /* r0 = &cpu_info_store[0] */ 900 /* r0 = &cpu_info_store[0] */
895 movw r0, #:lower16:cpu_info_store 901 movw r0, #:lower16:cpu_info_store
896 movt r0, #:upper16:cpu_info_store 902 movt r0, #:upper16:cpu_info_store
897 903
898 mov r4, #CPU_INFO_SIZE 904 mov r4, #CPU_INFO_SIZE