Wed Apr 15 16:28:29 2020 UTC ()
Use large pages for the kMSan shadows. This greatly improves performance,
and slightly reduces memory consumption.


(maxv)
diff -r1.1 -r1.2 src/sys/arch/amd64/include/msan.h
diff -r1.9 -r1.10 src/sys/kern/subr_msan.c

cvs diff -r1.1 -r1.2 src/sys/arch/amd64/include/msan.h (expand / switch to unified diff)

--- src/sys/arch/amd64/include/msan.h 2019/11/14 16:23:52 1.1
+++ src/sys/arch/amd64/include/msan.h 2020/04/15 16:28:29 1.2
@@ -1,14 +1,14 @@ @@ -1,14 +1,14 @@
1/* $NetBSD: msan.h,v 1.1 2019/11/14 16:23:52 maxv Exp $ */ 1/* $NetBSD: msan.h,v 1.2 2020/04/15 16:28:29 maxv Exp $ */
2 2
3/* 3/*
4 * Copyright (c) 2019 The NetBSD Foundation, Inc. 4 * Copyright (c) 2019 The NetBSD Foundation, Inc.
5 * All rights reserved. 5 * All rights reserved.
6 * 6 *
7 * This code is derived from software contributed to The NetBSD Foundation 7 * This code is derived from software contributed to The NetBSD Foundation
8 * by Maxime Villard. 8 * by Maxime Villard.
9 * 9 *
10 * Redistribution and use in source and binary forms, with or without 10 * Redistribution and use in source and binary forms, with or without
11 * modification, are permitted provided that the following conditions 11 * modification, are permitted provided that the following conditions
12 * are met: 12 * are met:
13 * 1. Redistributions of source code must retain the above copyright 13 * 1. Redistributions of source code must retain the above copyright
14 * notice, this list of conditions and the following disclaimer. 14 * notice, this list of conditions and the following disclaimer.
@@ -87,51 +87,82 @@ kmsan_md_addr_to_orig(const void *addr) @@ -87,51 +87,82 @@ kmsan_md_addr_to_orig(const void *addr)
87 } 87 }
88} 88}
89 89
90static inline bool 90static inline bool
91kmsan_md_unsupported(vaddr_t addr) 91kmsan_md_unsupported(vaddr_t addr)
92{ 92{
93 return (addr >= (vaddr_t)PTE_BASE && 93 return (addr >= (vaddr_t)PTE_BASE &&
94 addr < ((vaddr_t)PTE_BASE + NBPD_L4)); 94 addr < ((vaddr_t)PTE_BASE + NBPD_L4));
95} 95}
96 96
97static inline paddr_t 97static inline paddr_t
98__md_palloc(void) 98__md_palloc(void)
99{ 99{
 100 /* The page is zeroed. */
100 return pmap_get_physpage(); 101 return pmap_get_physpage();
101} 102}
102 103
 104static inline paddr_t
 105__md_palloc_large(void)
 106{
 107 struct pglist pglist;
 108 int ret;
 109
 110 if (!uvm.page_init_done)
 111 return 0;
 112
 113 kmsan_init_arg(sizeof(psize_t) + 4 * sizeof(paddr_t) +
 114 sizeof(struct pglist *) + 2 * sizeof(int));
 115 ret = uvm_pglistalloc(NBPD_L2, 0, ~0UL, NBPD_L2, 0,
 116 &pglist, 1, 0);
 117 if (ret != 0)
 118 return 0;
 119
 120 /* The page may not be zeroed. */
 121 return VM_PAGE_TO_PHYS(TAILQ_FIRST(&pglist));
 122}
 123
103static void 124static void
104kmsan_md_shadow_map_page(vaddr_t va) 125kmsan_md_shadow_map_page(vaddr_t va)
105{ 126{
 127 const pt_entry_t pteflags = PTE_W | pmap_pg_nx | PTE_P;
106 paddr_t pa; 128 paddr_t pa;
107 129
108 KASSERT(va >= __MD_SHADOW_START && va < __MD_SHADOW_END); 130 KASSERT(va >= __MD_SHADOW_START && va < __MD_SHADOW_END);
109 131
110 if (!pmap_valid_entry(L4_BASE[pl4_i(va)])) { 132 if (!pmap_valid_entry(L4_BASE[pl4_i(va)])) {
111 pa = __md_palloc(); 133 pa = __md_palloc();
112 L4_BASE[pl4_i(va)] = pa | PTE_W | pmap_pg_nx | PTE_P; 134 L4_BASE[pl4_i(va)] = pa | pteflags;
113 } 135 }
114 if (!pmap_valid_entry(L3_BASE[pl3_i(va)])) { 136 if (!pmap_valid_entry(L3_BASE[pl3_i(va)])) {
115 pa = __md_palloc(); 137 pa = __md_palloc();
116 L3_BASE[pl3_i(va)] = pa | PTE_W | pmap_pg_nx | PTE_P; 138 L3_BASE[pl3_i(va)] = pa | pteflags;
117 } 139 }
118 if (!pmap_valid_entry(L2_BASE[pl2_i(va)])) { 140 if (!pmap_valid_entry(L2_BASE[pl2_i(va)])) {
 141 if ((pa = __md_palloc_large()) != 0) {
 142 L2_BASE[pl2_i(va)] = pa | pteflags | PTE_PS |
 143 pmap_pg_g;
 144 __insn_barrier();
 145 __builtin_memset(va, 0, NBPD_L2);
 146 return;
 147 }
119 pa = __md_palloc(); 148 pa = __md_palloc();
120 L2_BASE[pl2_i(va)] = pa | PTE_W | pmap_pg_nx | PTE_P; 149 L2_BASE[pl2_i(va)] = pa | pteflags;
 150 } else if (L2_BASE[pl2_i(va)] & PTE_PS) {
 151 return;
121 } 152 }
122 if (!pmap_valid_entry(L1_BASE[pl1_i(va)])) { 153 if (!pmap_valid_entry(L1_BASE[pl1_i(va)])) {
123 pa = __md_palloc(); 154 pa = __md_palloc();
124 L1_BASE[pl1_i(va)] = pa | PTE_W | pmap_pg_g | pmap_pg_nx | PTE_P; 155 L1_BASE[pl1_i(va)] = pa | pteflags | pmap_pg_g;
125 } 156 }
126} 157}
127 158
128static void 159static void
129kmsan_md_init(void) 160kmsan_md_init(void)
130{ 161{
131 extern struct bootspace bootspace; 162 extern struct bootspace bootspace;
132 size_t i; 163 size_t i;
133 164
134 CTASSERT((__MD_SHADOW_SIZE / NBPD_L4) == NL4_SLOT_KMSAN); 165 CTASSERT((__MD_SHADOW_SIZE / NBPD_L4) == NL4_SLOT_KMSAN);
135 166
136 /* Kernel. */ 167 /* Kernel. */
137 for (i = 0; i < BTSPACE_NSEGS; i++) { 168 for (i = 0; i < BTSPACE_NSEGS; i++) {

cvs diff -r1.9 -r1.10 src/sys/kern/subr_msan.c (expand / switch to unified diff)

--- src/sys/kern/subr_msan.c 2020/04/03 18:26:14 1.9
+++ src/sys/kern/subr_msan.c 2020/04/15 16:28:28 1.10
@@ -1,14 +1,14 @@ @@ -1,14 +1,14 @@
1/* $NetBSD: subr_msan.c,v 1.9 2020/04/03 18:26:14 maxv Exp $ */ 1/* $NetBSD: subr_msan.c,v 1.10 2020/04/15 16:28:28 maxv Exp $ */
2 2
3/* 3/*
4 * Copyright (c) 2019-2020 The NetBSD Foundation, Inc. 4 * Copyright (c) 2019-2020 The NetBSD Foundation, Inc.
5 * All rights reserved. 5 * All rights reserved.
6 * 6 *
7 * This code is derived from software contributed to The NetBSD Foundation 7 * This code is derived from software contributed to The NetBSD Foundation
8 * by Maxime Villard. 8 * by Maxime Villard.
9 * 9 *
10 * Redistribution and use in source and binary forms, with or without 10 * Redistribution and use in source and binary forms, with or without
11 * modification, are permitted provided that the following conditions 11 * modification, are permitted provided that the following conditions
12 * are met: 12 * are met:
13 * 1. Redistributions of source code must retain the above copyright 13 * 1. Redistributions of source code must retain the above copyright
14 * notice, this list of conditions and the following disclaimer. 14 * notice, this list of conditions and the following disclaimer.
@@ -20,46 +20,49 @@ @@ -20,46 +20,49 @@
20 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 20 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
21 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 21 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS 22 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
23 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 23 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
24 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 24 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
25 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 25 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
26 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 26 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
27 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 27 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
28 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 28 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
29 * POSSIBILITY OF SUCH DAMAGE. 29 * POSSIBILITY OF SUCH DAMAGE.
30 */ 30 */
31 31
32#include <sys/cdefs.h> 32#include <sys/cdefs.h>
33__KERNEL_RCSID(0, "$NetBSD: subr_msan.c,v 1.9 2020/04/03 18:26:14 maxv Exp $"); 33__KERNEL_RCSID(0, "$NetBSD: subr_msan.c,v 1.10 2020/04/15 16:28:28 maxv Exp $");
34 34
35#include <sys/param.h> 35#include <sys/param.h>
36#include <sys/device.h> 36#include <sys/device.h>
37#include <sys/kernel.h> 37#include <sys/kernel.h>
38#include <sys/param.h> 38#include <sys/param.h>
39#include <sys/conf.h> 39#include <sys/conf.h>
40#include <sys/systm.h> 40#include <sys/systm.h>
41#include <sys/types.h> 41#include <sys/types.h>
42#include <sys/kprintf.h> 42#include <sys/kprintf.h>
43#include <sys/kmem.h> 43#include <sys/kmem.h>
44#include <sys/mbuf.h> 44#include <sys/mbuf.h>
45#include <sys/buf.h> 45#include <sys/buf.h>
46#include <sys/cpu.h> 46#include <sys/cpu.h>
47#include <sys/msan.h> 47#include <sys/msan.h>
48 48
49#include <uvm/uvm.h> 49#include <uvm/uvm.h>
50 50
51static void kmsan_printf(const char *, ...); 51static void kmsan_printf(const char *, ...);
52 52
 53void kmsan_init_arg(size_t);
 54void kmsan_init_ret(size_t);
 55
53#ifdef KMSAN_PANIC 56#ifdef KMSAN_PANIC
54#define REPORT panic 57#define REPORT panic
55#else 58#else
56#define REPORT kmsan_printf 59#define REPORT kmsan_printf
57#endif 60#endif
58 61
59/* -------------------------------------------------------------------------- */ 62/* -------------------------------------------------------------------------- */
60 63
61/* 64/*
62 * Part of the compiler ABI. 65 * Part of the compiler ABI.
63 */ 66 */
64 67
65typedef uint32_t msan_orig_t; 68typedef uint32_t msan_orig_t;
@@ -346,29 +349,26 @@ kmsan_shadow_check(const void *addr, siz @@ -346,29 +349,26 @@ kmsan_shadow_check(const void *addr, siz
346 return; 349 return;
347 if (__predict_false(kmsan_md_unsupported((vaddr_t)addr))) 350 if (__predict_false(kmsan_md_unsupported((vaddr_t)addr)))
348 return; 351 return;
349 352
350 shad = kmsan_md_addr_to_shad(addr); 353 shad = kmsan_md_addr_to_shad(addr);
351 for (i = 0; i < size; i++) { 354 for (i = 0; i < size; i++) {
352 if (__predict_true(shad[i] == 0)) 355 if (__predict_true(shad[i] == 0))
353 continue; 356 continue;
354 kmsan_report_hook((const char *)addr + i, size, i, hook); 357 kmsan_report_hook((const char *)addr + i, size, i, hook);
355 break; 358 break;
356 } 359 }
357} 360}
358 361
359void kmsan_init_arg(size_t); 
360void kmsan_init_ret(size_t); 
361 
362void 362void
363kmsan_init_arg(size_t n) 363kmsan_init_arg(size_t n)
364{ 364{
365 msan_lwp_t *lwp; 365 msan_lwp_t *lwp;
366 uint8_t *arg; 366 uint8_t *arg;
367 367
368 if (__predict_false(!kmsan_enabled)) 368 if (__predict_false(!kmsan_enabled))
369 return; 369 return;
370 lwp = curlwp->l_kmsan; 370 lwp = curlwp->l_kmsan;
371 arg = lwp->tls[lwp->ctx].param; 371 arg = lwp->tls[lwp->ctx].param;
372 __builtin_memset(arg, 0, n); 372 __builtin_memset(arg, 0, n);
373} 373}
374 374