Tue Aug 29 06:28:27 2017 UTC ()
Revert the *_PROT_RO_* define changes and do all the work in the *_PROT()
macros for RO kernel pages.

cats boots again - no idea how it slipped through my testing previously


(skrll)
diff -r1.151 -r1.152 src/sys/arch/arm/include/arm32/pmap.h

cvs diff -r1.151 -r1.152 src/sys/arch/arm/include/arm32/pmap.h (expand / switch to unified diff)

--- src/sys/arch/arm/include/arm32/pmap.h 2017/07/11 20:42:17 1.151
+++ src/sys/arch/arm/include/arm32/pmap.h 2017/08/29 06:28:26 1.152
@@ -1,14 +1,14 @@ @@ -1,14 +1,14 @@
1/* $NetBSD: pmap.h,v 1.151 2017/07/11 20:42:17 skrll Exp $ */ 1/* $NetBSD: pmap.h,v 1.152 2017/08/29 06:28:26 skrll Exp $ */
2 2
3/* 3/*
4 * Copyright (c) 2002, 2003 Wasabi Systems, Inc. 4 * Copyright (c) 2002, 2003 Wasabi Systems, Inc.
5 * All rights reserved. 5 * All rights reserved.
6 * 6 *
7 * Written by Jason R. Thorpe & Steve C. Woodford for Wasabi Systems, Inc. 7 * Written by Jason R. Thorpe & Steve C. Woodford for Wasabi Systems, Inc.
8 * 8 *
9 * Redistribution and use in source and binary forms, with or without 9 * Redistribution and use in source and binary forms, with or without
10 * modification, are permitted provided that the following conditions 10 * modification, are permitted provided that the following conditions
11 * are met: 11 * are met:
12 * 1. Redistributions of source code must retain the above copyright 12 * 1. Redistributions of source code must retain the above copyright
13 * notice, this list of conditions and the following disclaimer. 13 * notice, this list of conditions and the following disclaimer.
14 * 2. Redistributions in binary form must reproduce the above copyright 14 * 2. Redistributions in binary form must reproduce the above copyright
@@ -718,84 +718,84 @@ extern void (*pmap_zero_page_func)(paddr @@ -718,84 +718,84 @@ extern void (*pmap_zero_page_func)(paddr
718#define PMAP_DOMAIN_KERNEL 0 /* The kernel pmap uses domain #0 */ 718#define PMAP_DOMAIN_KERNEL 0 /* The kernel pmap uses domain #0 */
719#ifdef ARM_MMU_EXTENDED 719#ifdef ARM_MMU_EXTENDED
720#define PMAP_DOMAIN_USER 1 /* User pmaps use domain #1 */ 720#define PMAP_DOMAIN_USER 1 /* User pmaps use domain #1 */
721#endif 721#endif
722 722
723/* 723/*
724 * These macros define the various bit masks in the PTE. 724 * These macros define the various bit masks in the PTE.
725 * 725 *
726 * We use these macros since we use different bits on different processor 726 * We use these macros since we use different bits on different processor
727 * models. 727 * models.
728 */ 728 */
729#define L1_S_PROT_U_generic (L1_S_AP(AP_U)) 729#define L1_S_PROT_U_generic (L1_S_AP(AP_U))
730#define L1_S_PROT_W_generic (L1_S_AP(AP_W)) 730#define L1_S_PROT_W_generic (L1_S_AP(AP_W))
731#define L1_S_PROT_RO_generic (L1_S_AP(AP_R)) /* AP_W == AP_R */ 731#define L1_S_PROT_RO_generic (0)
732#define L1_S_PROT_MASK_generic (L1_S_PROT_U|L1_S_PROT_W|L1_S_PROT_RO) 732#define L1_S_PROT_MASK_generic (L1_S_PROT_U|L1_S_PROT_W|L1_S_PROT_RO)
733 733
734#define L1_S_PROT_U_xscale (L1_S_AP(AP_U)) 734#define L1_S_PROT_U_xscale (L1_S_AP(AP_U))
735#define L1_S_PROT_W_xscale (L1_S_AP(AP_W)) 735#define L1_S_PROT_W_xscale (L1_S_AP(AP_W))
736#define L1_S_PROT_RO_xscale (L1_S_AP(AP_R)) /* AP_W == AP_R */ 736#define L1_S_PROT_RO_xscale (0)
737#define L1_S_PROT_MASK_xscale (L1_S_PROT_U|L1_S_PROT_W|L1_S_PROT_RO) 737#define L1_S_PROT_MASK_xscale (L1_S_PROT_U|L1_S_PROT_W|L1_S_PROT_RO)
738 738
739#define L1_S_PROT_U_armv6 (L1_S_AP(AP_R) | L1_S_AP(AP_U)) 739#define L1_S_PROT_U_armv6 (L1_S_AP(AP_R) | L1_S_AP(AP_U))
740#define L1_S_PROT_W_armv6 (L1_S_AP(AP_W)) 740#define L1_S_PROT_W_armv6 (L1_S_AP(AP_W))
741#define L1_S_PROT_RO_armv6 (L1_S_AP(AP_R) | L1_S_AP(AP_RO)) 741#define L1_S_PROT_RO_armv6 (L1_S_AP(AP_R) | L1_S_AP(AP_RO))
742#define L1_S_PROT_MASK_armv6 (L1_S_PROT_U|L1_S_PROT_W|L1_S_PROT_RO) 742#define L1_S_PROT_MASK_armv6 (L1_S_PROT_U|L1_S_PROT_W|L1_S_PROT_RO)
743 743
744#define L1_S_PROT_U_armv7 (L1_S_AP(AP_R) | L1_S_AP(AP_U)) 744#define L1_S_PROT_U_armv7 (L1_S_AP(AP_R) | L1_S_AP(AP_U))
745#define L1_S_PROT_W_armv7 (L1_S_AP(AP_W)) 745#define L1_S_PROT_W_armv7 (L1_S_AP(AP_W))
746#define L1_S_PROT_RO_armv7 (L1_S_AP(AP_R) | L1_S_AP(AP_RO)) 746#define L1_S_PROT_RO_armv7 (L1_S_AP(AP_R) | L1_S_AP(AP_RO))
747#define L1_S_PROT_MASK_armv7 (L1_S_PROT_U|L1_S_PROT_W|L1_S_PROT_RO) 747#define L1_S_PROT_MASK_armv7 (L1_S_PROT_U|L1_S_PROT_W|L1_S_PROT_RO)
748 748
749#define L1_S_CACHE_MASK_generic (L1_S_B|L1_S_C) 749#define L1_S_CACHE_MASK_generic (L1_S_B|L1_S_C)
750#define L1_S_CACHE_MASK_xscale (L1_S_B|L1_S_C|L1_S_XS_TEX(TEX_XSCALE_X)) 750#define L1_S_CACHE_MASK_xscale (L1_S_B|L1_S_C|L1_S_XS_TEX(TEX_XSCALE_X))
751#define L1_S_CACHE_MASK_armv6 (L1_S_B|L1_S_C|L1_S_XS_TEX(TEX_ARMV6_TEX)) 751#define L1_S_CACHE_MASK_armv6 (L1_S_B|L1_S_C|L1_S_XS_TEX(TEX_ARMV6_TEX))
752#define L1_S_CACHE_MASK_armv6n (L1_S_B|L1_S_C|L1_S_XS_TEX(TEX_ARMV6_TEX)|L1_S_V6_S) 752#define L1_S_CACHE_MASK_armv6n (L1_S_B|L1_S_C|L1_S_XS_TEX(TEX_ARMV6_TEX)|L1_S_V6_S)
753#define L1_S_CACHE_MASK_armv7 (L1_S_B|L1_S_C|L1_S_XS_TEX(TEX_ARMV6_TEX)|L1_S_V6_S) 753#define L1_S_CACHE_MASK_armv7 (L1_S_B|L1_S_C|L1_S_XS_TEX(TEX_ARMV6_TEX)|L1_S_V6_S)
754 754
755#define L2_L_PROT_U_generic (L2_AP(AP_U)) 755#define L2_L_PROT_U_generic (L2_AP(AP_U))
756#define L2_L_PROT_W_generic (L2_AP(AP_W)) 756#define L2_L_PROT_W_generic (L2_AP(AP_W))
757#define L2_L_PROT_RO_generic (L2_AP(AP_R)) 757#define L2_L_PROT_RO_generic (0)
758#define L2_L_PROT_MASK_generic (L2_L_PROT_U|L2_L_PROT_W|L2_L_PROT_RO) 758#define L2_L_PROT_MASK_generic (L2_L_PROT_U|L2_L_PROT_W|L2_L_PROT_RO)
759 759
760#define L2_L_PROT_U_xscale (L2_AP(AP_U)) 760#define L2_L_PROT_U_xscale (L2_AP(AP_U))
761#define L2_L_PROT_W_xscale (L2_AP(AP_W)) 761#define L2_L_PROT_W_xscale (L2_AP(AP_W))
762#define L2_L_PROT_RO_xscale (L2_AP(AP_R)) 762#define L2_L_PROT_RO_xscale (0)
763#define L2_L_PROT_MASK_xscale (L2_L_PROT_U|L2_L_PROT_W|L2_L_PROT_RO) 763#define L2_L_PROT_MASK_xscale (L2_L_PROT_U|L2_L_PROT_W|L2_L_PROT_RO)
764 764
765#define L2_L_PROT_U_armv6n (L2_AP0(AP_R) | L2_AP0(AP_U)) 765#define L2_L_PROT_U_armv6n (L2_AP0(AP_R) | L2_AP0(AP_U))
766#define L2_L_PROT_W_armv6n (L2_AP0(AP_W)) 766#define L2_L_PROT_W_armv6n (L2_AP0(AP_W))
767#define L2_L_PROT_RO_armv6n (L2_AP0(AP_R) | L2_AP0(AP_RO)) 767#define L2_L_PROT_RO_armv6n (L2_AP0(AP_R) | L2_AP0(AP_RO))
768#define L2_L_PROT_MASK_armv6n (L2_L_PROT_U|L2_L_PROT_W|L2_L_PROT_RO) 768#define L2_L_PROT_MASK_armv6n (L2_L_PROT_U|L2_L_PROT_W|L2_L_PROT_RO)
769 769
770#define L2_L_PROT_U_armv7 (L2_AP0(AP_R) | L2_AP0(AP_U)) 770#define L2_L_PROT_U_armv7 (L2_AP0(AP_R) | L2_AP0(AP_U))
771#define L2_L_PROT_W_armv7 (L2_AP0(AP_W)) 771#define L2_L_PROT_W_armv7 (L2_AP0(AP_W))
772#define L2_L_PROT_RO_armv7 (L2_AP0(AP_R) | L2_AP0(AP_RO)) 772#define L2_L_PROT_RO_armv7 (L2_AP0(AP_R) | L2_AP0(AP_RO))
773#define L2_L_PROT_MASK_armv7 (L2_L_PROT_U|L2_L_PROT_W|L2_L_PROT_RO) 773#define L2_L_PROT_MASK_armv7 (L2_L_PROT_U|L2_L_PROT_W|L2_L_PROT_RO)
774 774
775#define L2_L_CACHE_MASK_generic (L2_B|L2_C) 775#define L2_L_CACHE_MASK_generic (L2_B|L2_C)
776#define L2_L_CACHE_MASK_xscale (L2_B|L2_C|L2_XS_L_TEX(TEX_XSCALE_X)) 776#define L2_L_CACHE_MASK_xscale (L2_B|L2_C|L2_XS_L_TEX(TEX_XSCALE_X))
777#define L2_L_CACHE_MASK_armv6 (L2_B|L2_C|L2_V6_L_TEX(TEX_ARMV6_TEX)) 777#define L2_L_CACHE_MASK_armv6 (L2_B|L2_C|L2_V6_L_TEX(TEX_ARMV6_TEX))
778#define L2_L_CACHE_MASK_armv6n (L2_B|L2_C|L2_V6_L_TEX(TEX_ARMV6_TEX)|L2_XS_S) 778#define L2_L_CACHE_MASK_armv6n (L2_B|L2_C|L2_V6_L_TEX(TEX_ARMV6_TEX)|L2_XS_S)
779#define L2_L_CACHE_MASK_armv7 (L2_B|L2_C|L2_V6_L_TEX(TEX_ARMV6_TEX)|L2_XS_S) 779#define L2_L_CACHE_MASK_armv7 (L2_B|L2_C|L2_V6_L_TEX(TEX_ARMV6_TEX)|L2_XS_S)
780 780
781#define L2_S_PROT_U_generic (L2_AP(AP_U)) 781#define L2_S_PROT_U_generic (L2_AP(AP_U))
782#define L2_S_PROT_W_generic (L2_AP(AP_W)) 782#define L2_S_PROT_W_generic (L2_AP(AP_W))
783#define L2_S_PROT_RO_generic (L2_AP(AP_R)) 783#define L2_S_PROT_RO_generic (0)
784#define L2_S_PROT_MASK_generic (L2_S_PROT_U|L2_S_PROT_W|L2_S_PROT_RO) 784#define L2_S_PROT_MASK_generic (L2_S_PROT_U|L2_S_PROT_W|L2_S_PROT_RO)
785 785
786#define L2_S_PROT_U_xscale (L2_AP0(AP_U)) 786#define L2_S_PROT_U_xscale (L2_AP0(AP_U))
787#define L2_S_PROT_W_xscale (L2_AP0(AP_W)) 787#define L2_S_PROT_W_xscale (L2_AP0(AP_W))
788#define L2_S_PROT_RO_xscale (L2_AP(AP_R)) 788#define L2_S_PROT_RO_xscale (0)
789#define L2_S_PROT_MASK_xscale (L2_S_PROT_U|L2_S_PROT_W|L2_S_PROT_RO) 789#define L2_S_PROT_MASK_xscale (L2_S_PROT_U|L2_S_PROT_W|L2_S_PROT_RO)
790 790
791#define L2_S_PROT_U_armv6n (L2_AP0(AP_R) | L2_AP0(AP_U)) 791#define L2_S_PROT_U_armv6n (L2_AP0(AP_R) | L2_AP0(AP_U))
792#define L2_S_PROT_W_armv6n (L2_AP0(AP_W)) 792#define L2_S_PROT_W_armv6n (L2_AP0(AP_W))
793#define L2_S_PROT_RO_armv6n (L2_AP0(AP_R) | L2_AP0(AP_RO)) 793#define L2_S_PROT_RO_armv6n (L2_AP0(AP_R) | L2_AP0(AP_RO))
794#define L2_S_PROT_MASK_armv6n (L2_S_PROT_U|L2_S_PROT_W|L2_S_PROT_RO) 794#define L2_S_PROT_MASK_armv6n (L2_S_PROT_U|L2_S_PROT_W|L2_S_PROT_RO)
795 795
796#define L2_S_PROT_U_armv7 (L2_AP0(AP_R) | L2_AP0(AP_U)) 796#define L2_S_PROT_U_armv7 (L2_AP0(AP_R) | L2_AP0(AP_U))
797#define L2_S_PROT_W_armv7 (L2_AP0(AP_W)) 797#define L2_S_PROT_W_armv7 (L2_AP0(AP_W))
798#define L2_S_PROT_RO_armv7 (L2_AP0(AP_R) | L2_AP0(AP_RO)) 798#define L2_S_PROT_RO_armv7 (L2_AP0(AP_R) | L2_AP0(AP_RO))
799#define L2_S_PROT_MASK_armv7 (L2_S_PROT_U|L2_S_PROT_W|L2_S_PROT_RO) 799#define L2_S_PROT_MASK_armv7 (L2_S_PROT_U|L2_S_PROT_W|L2_S_PROT_RO)
800 800
801#define L2_S_CACHE_MASK_generic (L2_B|L2_C) 801#define L2_S_CACHE_MASK_generic (L2_B|L2_C)
@@ -1018,51 +1018,64 @@ extern void (*pmap_zero_page_func)(paddr @@ -1018,51 +1018,64 @@ extern void (*pmap_zero_page_func)(paddr
1018#define L1_C_PROTO L1_C_PROTO_armv7 1018#define L1_C_PROTO L1_C_PROTO_armv7
1019#define L2_S_PROTO L2_S_PROTO_armv7 1019#define L2_S_PROTO L2_S_PROTO_armv7
1020 1020
1021#define pmap_copy_page(s, d) pmap_copy_page_generic((s), (d)) 1021#define pmap_copy_page(s, d) pmap_copy_page_generic((s), (d))
1022#define pmap_zero_page(d) pmap_zero_page_generic((d)) 1022#define pmap_zero_page(d) pmap_zero_page_generic((d))
1023#endif /* ARM_NMMUS > 1 */ 1023#endif /* ARM_NMMUS > 1 */
1024 1024
1025/* 1025/*
1026 * Macros to set and query the write permission on page descriptors. 1026 * Macros to set and query the write permission on page descriptors.
1027 */ 1027 */
1028#define l1pte_set_writable(pte) (((pte) & ~L1_S_PROT_RO) | L1_S_PROT_W) 1028#define l1pte_set_writable(pte) (((pte) & ~L1_S_PROT_RO) | L1_S_PROT_W)
1029#define l1pte_set_readonly(pte) (((pte) & ~L1_S_PROT_W) | L1_S_PROT_RO) 1029#define l1pte_set_readonly(pte) (((pte) & ~L1_S_PROT_W) | L1_S_PROT_RO)
1030 1030
1031#define l2pte_set_writable(pte) (L2_S_PROT_W == L2_S_PROT_RO ? \ 1031#define l2pte_set_writable(pte) (((pte) & ~L2_S_PROT_RO) | L2_S_PROT_W)
1032 ((pte) | L2_S_PROT_W) : (((pte) & ~L2_S_PROT_RO) | L2_S_PROT_W)) 1032#define l2pte_set_readonly(pte) (((pte) & ~L2_S_PROT_W) | L2_S_PROT_RO)
1033 
1034#define l2pte_set_readonly(pte) (L2_S_PROT_W == L2_S_PROT_RO ? \ 
1035 ((pte) & ~L2_S_PROT_RO) : (((pte) & ~L2_S_PROT_W) | L2_S_PROT_RO)) 
1036 1033
1037#define l2pte_writable_p(pte) (((pte) & L2_S_PROT_W) == L2_S_PROT_W && \ 1034#define l2pte_writable_p(pte) (((pte) & L2_S_PROT_W) == L2_S_PROT_W && \
1038 (L2_S_PROT_W == L2_S_PROT_RO || \ 1035 (L2_S_PROT_RO == 0 || \
1039 ((pte) & L2_S_PROT_RO) != L2_S_PROT_RO)) 1036 ((pte) & L2_S_PROT_RO) != L2_S_PROT_RO))
1040 1037
1041/* 1038/*
1042 * These macros return various bits based on kernel/user and protection. 1039 * These macros return various bits based on kernel/user and protection.
1043 * Note that the compiler will usually fold these at compile time. 1040 * Note that the compiler will usually fold these at compile time.
1044 */ 1041 */
1045#define L1_S_PROT(ku, pr) ((((ku) == PTE_USER) ? L1_S_PROT_U : 0) | \ 1042
1046 (((pr) & VM_PROT_WRITE) ? L1_S_PROT_W : \ 1043#define L1_S_PROT(ku, pr) ( \
1047 (L1_S_PROT_W == L1_S_PROT_RO ? 0 : L1_S_PROT_RO))) 1044 (((ku) == PTE_USER) ? \
1048 1045 L1_S_PROT_U | (((pr) & VM_PROT_WRITE) ? L1_S_PROT_W : 0) \
1049#define L2_L_PROT(ku, pr) ((((ku) == PTE_USER) ? L2_L_PROT_U : 0) | \ 1046 : \
1050 (((pr) & VM_PROT_WRITE) ? L2_L_PROT_W : \ 1047 (((L1_S_PROT_RO && \
1051 (L2_L_PROT_W == L2_L_PROT_RO ? 0 : L2_L_PROT_RO))) 1048 ((pr) & (VM_PROT_READ | VM_PROT_WRITE)) == VM_PROT_READ) ? \
1052 1049 L1_S_PROT_RO : L1_S_PROT_W))) \
1053#define L2_S_PROT(ku, pr) ((((ku) == PTE_USER) ? L2_S_PROT_U : 0) | \ 1050 )
1054 (((pr) & VM_PROT_WRITE) ? L2_S_PROT_W : \ 1051
1055 (L2_S_PROT_W == L2_S_PROT_RO ? 0 : L2_S_PROT_RO))) 1052#define L2_L_PROT(ku, pr) ( \
 1053 (((ku) == PTE_USER) ? \
 1054 L2_L_PROT_U | (((pr) & VM_PROT_WRITE) ? L2_L_PROT_W : 0) \
 1055 : \
 1056 (((L2_L_PROT_RO && \
 1057 ((pr) & (VM_PROT_READ | VM_PROT_WRITE)) == VM_PROT_READ) ? \
 1058 L2_L_PROT_RO : L2_L_PROT_W))) \
 1059 )
 1060
 1061#define L2_S_PROT(ku, pr) ( \
 1062 (((ku) == PTE_USER) ? \
 1063 L2_S_PROT_U | (((pr) & VM_PROT_WRITE) ? L2_S_PROT_W : 0) \
 1064 : \
 1065 (((L2_S_PROT_RO && \
 1066 ((pr) & (VM_PROT_READ | VM_PROT_WRITE)) == VM_PROT_READ) ? \
 1067 L2_S_PROT_RO : L2_S_PROT_W))) \
 1068 )
1056 1069
1057/* 1070/*
1058 * Macros to test if a mapping is mappable with an L1 SuperSection, 1071 * Macros to test if a mapping is mappable with an L1 SuperSection,
1059 * L1 Section, or an L2 Large Page mapping. 1072 * L1 Section, or an L2 Large Page mapping.
1060 */ 1073 */
1061#define L1_SS_MAPPABLE_P(va, pa, size) \ 1074#define L1_SS_MAPPABLE_P(va, pa, size) \
1062 ((((va) | (pa)) & L1_SS_OFFSET) == 0 && (size) >= L1_SS_SIZE) 1075 ((((va) | (pa)) & L1_SS_OFFSET) == 0 && (size) >= L1_SS_SIZE)
1063 1076
1064#define L1_S_MAPPABLE_P(va, pa, size) \ 1077#define L1_S_MAPPABLE_P(va, pa, size) \
1065 ((((va) | (pa)) & L1_S_OFFSET) == 0 && (size) >= L1_S_SIZE) 1078 ((((va) | (pa)) & L1_S_OFFSET) == 0 && (size) >= L1_S_SIZE)
1066 1079
1067#define L2_L_MAPPABLE_P(va, pa, size) \ 1080#define L2_L_MAPPABLE_P(va, pa, size) \
1068 ((((va) | (pa)) & L2_L_OFFSET) == 0 && (size) >= L2_L_SIZE) 1081 ((((va) | (pa)) & L2_L_OFFSET) == 0 && (size) >= L2_L_SIZE)