Wed Apr 15 17:00:08 2020 UTC ()
Use large pages for the kASan shadow, same as kMSan.


(maxv)
diff -r1.3 -r1.4 src/sys/arch/amd64/include/asan.h

cvs diff -r1.3 -r1.4 src/sys/arch/amd64/include/asan.h (expand / switch to unified diff)

--- src/sys/arch/amd64/include/asan.h 2019/03/09 08:42:25 1.3
+++ src/sys/arch/amd64/include/asan.h 2020/04/15 17:00:07 1.4
@@ -1,14 +1,14 @@ @@ -1,14 +1,14 @@
1/* $NetBSD: asan.h,v 1.3 2019/03/09 08:42:25 maxv Exp $ */ 1/* $NetBSD: asan.h,v 1.4 2020/04/15 17:00:07 maxv Exp $ */
2 2
3/* 3/*
4 * Copyright (c) 2018 The NetBSD Foundation, Inc. 4 * Copyright (c) 2018 The NetBSD Foundation, Inc.
5 * All rights reserved. 5 * All rights reserved.
6 * 6 *
7 * This code is derived from software contributed to The NetBSD Foundation 7 * This code is derived from software contributed to The NetBSD Foundation
8 * by Maxime Villard. 8 * by Maxime Villard.
9 * 9 *
10 * Redistribution and use in source and binary forms, with or without 10 * Redistribution and use in source and binary forms, with or without
11 * modification, are permitted provided that the following conditions 11 * modification, are permitted provided that the following conditions
12 * are met: 12 * are met:
13 * 1. Redistributions of source code must retain the above copyright 13 * 1. Redistributions of source code must retain the above copyright
14 * notice, this list of conditions and the following disclaimer. 14 * notice, this list of conditions and the following disclaimer.
@@ -82,49 +82,80 @@ __md_early_palloc(void) @@ -82,49 +82,80 @@ __md_early_palloc(void)
82 return ret; 82 return ret;
83} 83}
84 84
85static paddr_t 85static paddr_t
86__md_palloc(void) 86__md_palloc(void)
87{ 87{
88 paddr_t pa; 88 paddr_t pa;
89 89
90 if (__predict_false(__md_early)) 90 if (__predict_false(__md_early))
91 pa = __md_early_palloc(); 91 pa = __md_early_palloc();
92 else 92 else
93 pa = pmap_get_physpage(); 93 pa = pmap_get_physpage();
94 94
 95 /* The page is zeroed. */
95 return pa; 96 return pa;
96} 97}
97 98
 99static inline paddr_t
 100__md_palloc_large(void)
 101{
 102 struct pglist pglist;
 103 int ret;
 104
 105 if (__predict_false(__md_early))
 106 return 0;
 107 if (!uvm.page_init_done)
 108 return 0;
 109
 110 ret = uvm_pglistalloc(NBPD_L2, 0, ~0UL, NBPD_L2, 0,
 111 &pglist, 1, 0);
 112 if (ret != 0)
 113 return 0;
 114
 115 /* The page may not be zeroed. */
 116 return VM_PAGE_TO_PHYS(TAILQ_FIRST(&pglist));
 117}
 118
98static void 119static void
99kasan_md_shadow_map_page(vaddr_t va) 120kasan_md_shadow_map_page(vaddr_t va)
100{ 121{
 122 const pt_entry_t pteflags = PTE_W | pmap_pg_nx | PTE_P;
101 paddr_t pa; 123 paddr_t pa;
102 124
103 if (!pmap_valid_entry(L4_BASE[pl4_i(va)])) { 125 if (!pmap_valid_entry(L4_BASE[pl4_i(va)])) {
104 pa = __md_palloc(); 126 pa = __md_palloc();
105 L4_BASE[pl4_i(va)] = pa | PTE_W | pmap_pg_nx | PTE_P; 127 L4_BASE[pl4_i(va)] = pa | pteflags;
106 } 128 }
107 if (!pmap_valid_entry(L3_BASE[pl3_i(va)])) { 129 if (!pmap_valid_entry(L3_BASE[pl3_i(va)])) {
108 pa = __md_palloc(); 130 pa = __md_palloc();
109 L3_BASE[pl3_i(va)] = pa | PTE_W | pmap_pg_nx | PTE_P; 131 L3_BASE[pl3_i(va)] = pa | pteflags;
110 } 132 }
111 if (!pmap_valid_entry(L2_BASE[pl2_i(va)])) { 133 if (!pmap_valid_entry(L2_BASE[pl2_i(va)])) {
 134 if ((pa = __md_palloc_large()) != 0) {
 135 L2_BASE[pl2_i(va)] = pa | pteflags | PTE_PS |
 136 pmap_pg_g;
 137 __insn_barrier();
 138 __builtin_memset((void *)va, 0, NBPD_L2);
 139 return;
 140 }
112 pa = __md_palloc(); 141 pa = __md_palloc();
113 L2_BASE[pl2_i(va)] = pa | PTE_W | pmap_pg_nx | PTE_P; 142 L2_BASE[pl2_i(va)] = pa | pteflags;
 143 } else if (L2_BASE[pl2_i(va)] & PTE_PS) {
 144 return;
114 } 145 }
115 if (!pmap_valid_entry(L1_BASE[pl1_i(va)])) { 146 if (!pmap_valid_entry(L1_BASE[pl1_i(va)])) {
116 pa = __md_palloc(); 147 pa = __md_palloc();
117 L1_BASE[pl1_i(va)] = pa | PTE_W | pmap_pg_g | pmap_pg_nx | PTE_P; 148 L1_BASE[pl1_i(va)] = pa | pteflags | pmap_pg_g;
118 } 149 }
119} 150}
120 151
121/* 152/*
122 * Map only the current stack. We will map the rest in kasan_init. 153 * Map only the current stack. We will map the rest in kasan_init.
123 */ 154 */
124static void 155static void
125kasan_md_early_init(void *stack) 156kasan_md_early_init(void *stack)
126{ 157{
127 kasan_shadow_map(stack, USPACE); 158 kasan_shadow_map(stack, USPACE);
128 __md_early = false; 159 __md_early = false;
129} 160}
130 161