| @@ -1,361 +1,358 @@ | | | @@ -1,361 +1,358 @@ |
1 | /* $NetBSD: spl.S,v 1.46 2020/05/17 12:11:11 ad Exp $ */ | | 1 | /* $NetBSD: spl.S,v 1.47 2020/08/29 07:16:03 maxv Exp $ */ |
2 | | | 2 | |
3 | /* | | 3 | /* |
4 | * Copyright (c) 2003 Wasabi Systems, Inc. | | 4 | * Copyright (c) 2003 Wasabi Systems, Inc. |
5 | * All rights reserved. | | 5 | * All rights reserved. |
6 | * | | 6 | * |
7 | * Written by Frank van der Linden for Wasabi Systems, Inc. | | 7 | * Written by Frank van der Linden for Wasabi Systems, Inc. |
8 | * | | 8 | * |
9 | * Redistribution and use in source and binary forms, with or without | | 9 | * Redistribution and use in source and binary forms, with or without |
10 | * modification, are permitted provided that the following conditions | | 10 | * modification, are permitted provided that the following conditions |
11 | * are met: | | 11 | * are met: |
12 | * 1. Redistributions of source code must retain the above copyright | | 12 | * 1. Redistributions of source code must retain the above copyright |
13 | * notice, this list of conditions and the following disclaimer. | | 13 | * notice, this list of conditions and the following disclaimer. |
14 | * 2. Redistributions in binary form must reproduce the above copyright | | 14 | * 2. Redistributions in binary form must reproduce the above copyright |
15 | * notice, this list of conditions and the following disclaimer in the | | 15 | * notice, this list of conditions and the following disclaimer in the |
16 | * documentation and/or other materials provided with the distribution. | | 16 | * documentation and/or other materials provided with the distribution. |
17 | * 3. All advertising materials mentioning features or use of this software | | 17 | * 3. All advertising materials mentioning features or use of this software |
18 | * must display the following acknowledgement: | | 18 | * must display the following acknowledgement: |
19 | * This product includes software developed for the NetBSD Project by | | 19 | * This product includes software developed for the NetBSD Project by |
20 | * Wasabi Systems, Inc. | | 20 | * Wasabi Systems, Inc. |
21 | * 4. The name of Wasabi Systems, Inc. may not be used to endorse | | 21 | * 4. The name of Wasabi Systems, Inc. may not be used to endorse |
22 | * or promote products derived from this software without specific prior | | 22 | * or promote products derived from this software without specific prior |
23 | * written permission. | | 23 | * written permission. |
24 | * | | 24 | * |
25 | * THIS SOFTWARE IS PROVIDED BY WASABI SYSTEMS, INC. ``AS IS'' AND | | 25 | * THIS SOFTWARE IS PROVIDED BY WASABI SYSTEMS, INC. ``AS IS'' AND |
26 | * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED | | 26 | * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED |
27 | * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | | 27 | * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
28 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL WASABI SYSTEMS, INC | | 28 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL WASABI SYSTEMS, INC |
29 | * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR | | 29 | * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR |
30 | * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF | | 30 | * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF |
31 | * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS | | 31 | * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS |
32 | * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN | | 32 | * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN |
33 | * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) | | 33 | * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) |
34 | * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE | | 34 | * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE |
35 | * POSSIBILITY OF SUCH DAMAGE. | | 35 | * POSSIBILITY OF SUCH DAMAGE. |
36 | */ | | 36 | */ |
37 | | | 37 | |
38 | /* | | 38 | /* |
39 | * Copyright (c) 1998, 2007, 2008 The NetBSD Foundation, Inc. | | 39 | * Copyright (c) 1998, 2007, 2008 The NetBSD Foundation, Inc. |
40 | * All rights reserved. | | 40 | * All rights reserved. |
41 | * | | 41 | * |
42 | * This code is derived from software contributed to The NetBSD Foundation | | 42 | * This code is derived from software contributed to The NetBSD Foundation |
43 | * by Charles M. Hannum and Andrew Doran. | | 43 | * by Charles M. Hannum and Andrew Doran. |
44 | * | | 44 | * |
45 | * Redistribution and use in source and binary forms, with or without | | 45 | * Redistribution and use in source and binary forms, with or without |
46 | * modification, are permitted provided that the following conditions | | 46 | * modification, are permitted provided that the following conditions |
47 | * are met: | | 47 | * are met: |
48 | * 1. Redistributions of source code must retain the above copyright | | 48 | * 1. Redistributions of source code must retain the above copyright |
49 | * notice, this list of conditions and the following disclaimer. | | 49 | * notice, this list of conditions and the following disclaimer. |
50 | * 2. Redistributions in binary form must reproduce the above copyright | | 50 | * 2. Redistributions in binary form must reproduce the above copyright |
51 | * notice, this list of conditions and the following disclaimer in the | | 51 | * notice, this list of conditions and the following disclaimer in the |
52 | * documentation and/or other materials provided with the distribution. | | 52 | * documentation and/or other materials provided with the distribution. |
53 | * | | 53 | * |
54 | * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS | | 54 | * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS |
55 | * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED | | 55 | * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED |
56 | * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | | 56 | * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
57 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS | | 57 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS |
58 | * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR | | 58 | * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR |
59 | * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF | | 59 | * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF |
60 | * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS | | 60 | * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS |
61 | * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN | | 61 | * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN |
62 | * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) | | 62 | * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) |
63 | * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE | | 63 | * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE |
64 | * POSSIBILITY OF SUCH DAMAGE. | | 64 | * POSSIBILITY OF SUCH DAMAGE. |
65 | */ | | 65 | */ |
66 | | | 66 | |
67 | #include "opt_ddb.h" | | 67 | #include "opt_ddb.h" |
68 | #include "opt_kasan.h" | | 68 | #include "opt_kasan.h" |
69 | #include "opt_kmsan.h" | | 69 | #include "opt_kmsan.h" |
70 | | | 70 | |
71 | #define ALIGN_TEXT .align 16,0x90 | | 71 | #define ALIGN_TEXT .align 16,0x90 |
72 | | | 72 | |
73 | #include <machine/asm.h> | | 73 | #include <machine/asm.h> |
74 | #include <machine/trap.h> | | 74 | #include <machine/trap.h> |
75 | #include <machine/segments.h> | | 75 | #include <machine/segments.h> |
76 | #include <machine/frameasm.h> | | 76 | #include <machine/frameasm.h> |
77 | | | 77 | |
78 | #include "assym.h" | | 78 | #include "assym.h" |
79 | | | 79 | |
80 | .text | | 80 | .text |
81 | | | 81 | |
82 | /* | | 82 | /* |
83 | * int splraise(int s); | | 83 | * int splraise(int s); |
84 | */ | | 84 | */ |
85 | ENTRY(splraise) | | 85 | ENTRY(splraise) |
86 | movl CPUVAR(ILEVEL),%eax | | 86 | movl CPUVAR(ILEVEL),%eax |
87 | cmpl %edi,%eax | | 87 | cmpl %edi,%eax |
88 | cmoval %eax,%edi | | 88 | cmoval %eax,%edi |
89 | movl %edi,CPUVAR(ILEVEL) | | 89 | movl %edi,CPUVAR(ILEVEL) |
90 | KMSAN_INIT_RET(4) | | 90 | KMSAN_INIT_RET(4) |
91 | ret | | 91 | ret |
92 | END(splraise) | | 92 | END(splraise) |
93 | | | 93 | |
94 | /* | | 94 | /* |
95 | * Xsoftintr() | | 95 | * Xsoftintr() |
96 | * | | 96 | * |
97 | * Switch to the LWP assigned to handle interrupts from the given | | 97 | * Switch to the LWP assigned to handle interrupts from the given |
98 | * source. We borrow the VM context from the interrupted LWP. | | 98 | * source. We borrow the VM context from the interrupted LWP. |
99 | * | | 99 | * |
100 | * On entry: | | 100 | * On entry: |
101 | * | | 101 | * |
102 | * %rax intrsource | | 102 | * %rax intrsource |
103 | * %r13 address to return to | | 103 | * %r13 address to return to |
104 | */ | | 104 | */ |
105 | IDTVEC(softintr) | | 105 | IDTVEC(softintr) |
106 | /* set up struct switchframe */ | | 106 | /* set up struct switchframe */ |
107 | pushq $_C_LABEL(softintr_ret) | | 107 | pushq $_C_LABEL(softintr_ret) |
108 | pushq %rbx | | 108 | pushq %rbx |
109 | pushq %r12 | | 109 | pushq %r12 |
110 | pushq %r13 | | 110 | pushq %r13 |
111 | pushq %r14 | | 111 | pushq %r14 |
112 | pushq %r15 | | 112 | pushq %r15 |
113 | | | 113 | |
114 | movl $IPL_HIGH,CPUVAR(ILEVEL) | | 114 | movl $IPL_HIGH,CPUVAR(ILEVEL) |
115 | movq CPUVAR(CURLWP),%r15 | | 115 | movq CPUVAR(CURLWP),%r15 |
116 | movq IS_LWP(%rax),%rdi /* switch to handler LWP */ | | 116 | movq IS_LWP(%rax),%rdi /* switch to handler LWP */ |
117 | movq L_PCB(%rdi),%rdx | | 117 | movq L_PCB(%rdi),%rdx |
118 | movq L_PCB(%r15),%rcx | | 118 | movq L_PCB(%r15),%rcx |
119 | movq %rdi,CPUVAR(CURLWP) | | 119 | movq %rdi,CPUVAR(CURLWP) |
120 | | | 120 | |
121 | #ifdef KASAN | | 121 | #ifdef KASAN |
122 | /* clear the new stack */ | | 122 | /* clear the new stack */ |
123 | pushq %rax | | 123 | pushq %rax |
124 | pushq %rdx | | 124 | pushq %rdx |
125 | pushq %rcx | | 125 | pushq %rcx |
126 | callq _C_LABEL(kasan_softint) | | 126 | callq _C_LABEL(kasan_softint) |
127 | popq %rcx | | 127 | popq %rcx |
128 | popq %rdx | | 128 | popq %rdx |
129 | popq %rax | | 129 | popq %rax |
130 | #endif | | 130 | #endif |
131 | | | 131 | |
132 | #ifdef KMSAN | | 132 | #ifdef KMSAN |
133 | pushq %rax | | 133 | pushq %rax |
134 | pushq %rdx | | 134 | pushq %rdx |
135 | pushq %rcx | | 135 | pushq %rcx |
136 | callq _C_LABEL(kmsan_softint) | | 136 | callq _C_LABEL(kmsan_softint) |
137 | popq %rcx | | 137 | popq %rcx |
138 | popq %rdx | | 138 | popq %rdx |
139 | popq %rax | | 139 | popq %rax |
140 | #endif | | 140 | #endif |
141 | | | 141 | |
142 | /* save old context */ | | 142 | /* save old context */ |
143 | movq %rsp,PCB_RSP(%rcx) | | 143 | movq %rsp,PCB_RSP(%rcx) |
144 | movq %rbp,PCB_RBP(%rcx) | | 144 | movq %rbp,PCB_RBP(%rcx) |
145 | | | 145 | |
146 | /* switch to the new stack */ | | 146 | /* switch to the new stack */ |
147 | movq PCB_RSP0(%rdx),%rsp | | 147 | movq PCB_RSP0(%rdx),%rsp |
148 | | | 148 | |
149 | /* dispatch */ | | 149 | /* dispatch */ |
150 | STI(di) | | 150 | STI(di) |
151 | movq %r15,%rdi /* interrupted LWP */ | | 151 | movq %r15,%rdi /* interrupted LWP */ |
152 | movl IS_MAXLEVEL(%rax),%esi /* ipl to run at */ | | 152 | movl IS_MAXLEVEL(%rax),%esi /* ipl to run at */ |
153 | call _C_LABEL(softint_dispatch)/* run handlers */ | | 153 | call _C_LABEL(softint_dispatch)/* run handlers */ |
154 | CLI(di) | | 154 | CLI(di) |
155 | | | 155 | |
156 | /* restore old context */ | | 156 | /* restore old context */ |
157 | movq L_PCB(%r15),%rcx | | 157 | movq L_PCB(%r15),%rcx |
158 | movq PCB_RSP(%rcx),%rsp | | 158 | movq PCB_RSP(%rcx),%rsp |
159 | | | 159 | |
160 | /* | | 160 | /* |
161 | * for non-interlocked mutex release to work safely the change | | 161 | * for non-interlocked mutex release to work safely the change |
162 | * to ci_curlwp must not languish in the store buffer. therefore | | 162 | * to ci_curlwp must not languish in the store buffer. therefore |
163 | * we use XCHG and not MOV here. see kern_mutex.c. | | 163 | * we use XCHG and not MOV here. see kern_mutex.c. |
164 | */ | | 164 | */ |
165 | xchgq %r15,CPUVAR(CURLWP) /* restore curlwp */ | | 165 | xchgq %r15,CPUVAR(CURLWP) /* restore curlwp */ |
166 | popq %r15 /* unwind switchframe */ | | 166 | popq %r15 /* unwind switchframe */ |
167 | addq $(5 * 8),%rsp | | 167 | addq $(5 * 8),%rsp |
168 | jmp *%r13 /* back to Xspllower/Xdoreti */ | | 168 | jmp *%r13 /* back to Xspllower/Xdoreti */ |
169 | IDTVEC_END(softintr) | | 169 | IDTVEC_END(softintr) |
170 | | | 170 | |
171 | /* | | 171 | /* |
172 | * softintr_ret() | | 172 | * softintr_ret() |
173 | * | | 173 | * |
174 | * Trampoline function that gets returned to by cpu_switchto() when | | 174 | * Trampoline function that gets returned to by cpu_switchto() when |
175 | * an interrupt handler blocks. On entry: | | 175 | * an interrupt handler blocks. On entry: |
176 | * | | 176 | * |
177 | * %rax prevlwp from cpu_switchto() | | 177 | * %rax prevlwp from cpu_switchto() |
178 | */ | | 178 | */ |
179 | ENTRY(softintr_ret) | | 179 | ENTRY(softintr_ret) |
180 | incl CPUVAR(MTX_COUNT) /* re-adjust after mi_switch */ | | 180 | incl CPUVAR(MTX_COUNT) /* re-adjust after mi_switch */ |
181 | CLI(ax) /* %rax not used by Xspllower/Xdoreti */ | | 181 | CLI(ax) /* %rax not used by Xspllower/Xdoreti */ |
182 | jmp *%r13 /* back to Xspllower/Xdoreti */ | | 182 | jmp *%r13 /* back to Xspllower/Xdoreti */ |
183 | END(softintr_ret) | | 183 | END(softintr_ret) |
184 | | | 184 | |
185 | /* | | 185 | /* |
186 | * void softint_trigger(uintptr_t machdep); | | 186 | * void softint_trigger(uintptr_t machdep); |
187 | * | | 187 | * |
188 | * Software interrupt registration. | | 188 | * Software interrupt registration. |
189 | */ | | 189 | */ |
190 | ENTRY(softint_trigger) | | 190 | ENTRY(softint_trigger) |
191 | orl %edi,CPUVAR(IPENDING) /* atomic on local cpu */ | | 191 | orl %edi,CPUVAR(IPENDING) /* atomic on local cpu */ |
192 | ret | | 192 | ret |
193 | END(softint_trigger) | | 193 | END(softint_trigger) |
194 | | | 194 | |
195 | /* | | 195 | /* |
196 | * Xrecurse_preempt() | | 196 | * Xrecurse_preempt() |
197 | * | | 197 | * |
198 | * Handles preemption interrupts via Xspllower(). | | 198 | * Handles preemption interrupts via Xspllower(). |
199 | */ | | 199 | */ |
200 | IDTVEC(recurse_preempt) | | 200 | IDTVEC(recurse_preempt) |
201 | movl $IPL_PREEMPT,CPUVAR(ILEVEL) | | 201 | movl $IPL_PREEMPT,CPUVAR(ILEVEL) |
202 | STI(di) | | 202 | STI(di) |
203 | xorq %rdi,%rdi | | 203 | xorq %rdi,%rdi |
204 | KMSAN_INIT_ARG(8) | | 204 | KMSAN_INIT_ARG(8) |
205 | call _C_LABEL(kpreempt) | | 205 | call _C_LABEL(kpreempt) |
206 | CLI(di) | | 206 | CLI(di) |
207 | jmp *%r13 /* back to Xspllower */ | | 207 | jmp *%r13 /* back to Xspllower */ |
208 | IDTVEC_END(recurse_preempt) | | 208 | IDTVEC_END(recurse_preempt) |
209 | | | 209 | |
210 | /* | | 210 | /* |
211 | * Xresume_preempt() | | 211 | * Xresume_preempt() |
212 | * | | 212 | * |
213 | * Handles preemption interrupts via Xdoreti(). | | 213 | * Handles preemption interrupts via Xdoreti(). |
214 | */ | | 214 | */ |
215 | IDTVEC(resume_preempt) | | 215 | IDTVEC(resume_preempt) |
216 | movl $IPL_PREEMPT,CPUVAR(ILEVEL) | | 216 | movl $IPL_PREEMPT,CPUVAR(ILEVEL) |
217 | STI(ax) | | 217 | STI(ax) |
218 | testq $SEL_RPL,TF_CS(%rsp) | | 218 | testq $SEL_RPL,TF_CS(%rsp) |
219 | jnz 1f | | 219 | jnz 1f |
220 | movq TF_RIP(%rsp),%rdi | | 220 | movq TF_RIP(%rsp),%rdi |
221 | KMSAN_INIT_ARG(8) | | 221 | KMSAN_INIT_ARG(8) |
222 | call _C_LABEL(kpreempt) /* from kernel */ | | 222 | call _C_LABEL(kpreempt) /* from kernel */ |
223 | CLI(ax) | | 223 | CLI(ax) |
224 | jmp *%r13 /* back to Xdoreti */ | | 224 | jmp *%r13 /* back to Xdoreti */ |
225 | 1: | | 225 | 1: |
226 | call _C_LABEL(preempt) /* from user */ | | 226 | call _C_LABEL(preempt) /* from user */ |
227 | CLI(ax) | | 227 | CLI(ax) |
228 | jmp *%r13 /* back to Xdoreti */ | | 228 | jmp *%r13 /* back to Xdoreti */ |
229 | IDTVEC_END(resume_preempt) | | 229 | IDTVEC_END(resume_preempt) |
230 | | | 230 | |
231 | /* | | 231 | /* |
232 | * void spllower(int s); | | 232 | * void spllower(int s); |
233 | * | | 233 | * |
234 | * For cmpxchg8b, edx/ecx are the high words and eax/ebx the low. | | 234 | * For cmpxchg8b, edx/ecx are the high words and eax/ebx the low. |
235 | * | | 235 | * |
236 | * edx : eax = old level / old ipending | | 236 | * edx : eax = old level / old ipending |
237 | * ecx : ebx = new level / old ipending | | 237 | * ecx : ebx = new level / old ipending |
238 | */ | | 238 | */ |
239 | ENTRY(spllower) | | 239 | ENTRY(spllower) |
240 | movl CPUVAR(ILEVEL),%edx | | 240 | movl CPUVAR(ILEVEL),%edx |
241 | movq %rbx,%r8 | | 241 | movq %rbx,%r8 |
242 | cmpl %edx,%edi /* new level is lower? */ | | 242 | cmpl %edx,%edi /* new level is lower? */ |
243 | jae 1f | | 243 | jae 1f |
244 | 0: | | 244 | 0: |
245 | movl CPUVAR(IPENDING),%eax | | 245 | movl CPUVAR(IPENDING),%eax |
246 | movl %edi,%ecx | | 246 | movl %edi,%ecx |
247 | testl %eax,CPUVAR(IUNMASK)(,%rcx,4)/* deferred interrupts? */ | | 247 | testl %eax,CPUVAR(IUNMASK)(,%rcx,4)/* deferred interrupts? */ |
248 | movl %eax,%ebx | | 248 | movl %eax,%ebx |
249 | /* | | 249 | /* |
250 | * On the P4 this jump is cheaper than patching in junk | | 250 | * On the P4 this jump is cheaper than patching in junk |
251 | * using cmov. Is cmpxchg expensive if it fails? | | 251 | * using cmov. Is cmpxchg expensive if it fails? |
252 | */ | | 252 | */ |
253 | jnz 2f | | 253 | jnz 2f |
254 | cmpxchg8b CPUVAR(ISTATE) /* swap in new ilevel */ | | 254 | cmpxchg8b CPUVAR(ISTATE) /* swap in new ilevel */ |
255 | jnz 0b | | 255 | jnz 0b |
256 | 1: | | 256 | 1: |
257 | movq %r8,%rbx | | 257 | movq %r8,%rbx |
258 | ret | | 258 | ret |
259 | 2: | | 259 | 2: |
260 | movq %r8,%rbx | | 260 | movq %r8,%rbx |
261 | jmp _C_LABEL(Xspllower) | | 261 | jmp _C_LABEL(Xspllower) |
262 | END(spllower) | | 262 | END(spllower) |
263 | | | 263 | |
264 | /* | | 264 | /* |
265 | * void Xspllower(int s); | | 265 | * void Xspllower(int s); |
266 | * | | 266 | * |
267 | * Process pending interrupts. | | 267 | * Process pending interrupts. |
268 | * | | 268 | * |
269 | * Important registers: | | 269 | * Important registers: |
270 | * ebx - cpl | | 270 | * ebx - cpl |
271 | * r13 - address to resume loop at | | 271 | * r13 - address to resume loop at |
272 | * | | 272 | * |
273 | * It is important that the bit scan instruction is bsr, it will get | | 273 | * It is important that the bit scan instruction is bsr, it will get |
274 | * the highest 2 bits (currently the IPI and clock handlers) first, | | 274 | * the highest 2 bits (currently the IPI and clock handlers) first, |
275 | * to avoid deadlocks where one CPU sends an IPI, another one is at | | 275 | * to avoid deadlocks where one CPU sends an IPI, another one is at |
276 | * splhigh() and defers it, lands in here via splx(), and handles | | 276 | * splhigh() and defers it, lands in here via splx(), and handles |
277 | * a lower-prio one first, which needs to take the kernel lock --> | | 277 | * a lower-prio one first, which needs to take the kernel lock --> |
278 | * the sending CPU will never see the that CPU accept the IPI | | 278 | * the sending CPU will never see the that CPU accept the IPI |
279 | * (see pmap_tlb_shootnow). | | 279 | * (see pmap_tlb_shootnow). |
280 | */ | | 280 | */ |
281 | IDTVEC(spllower) | | 281 | IDTVEC(spllower) |
282 | pushq %rbx | | 282 | pushq %rbx |
283 | pushq %r13 | | 283 | pushq %r13 |
284 | pushq %r12 | | 284 | pushq %r12 |
285 | movl %edi,%ebx | | 285 | movl %edi,%ebx |
286 | leaq 1f(%rip),%r13 /* address to resume loop at */ | | 286 | leaq 1f(%rip),%r13 /* address to resume loop at */ |
287 | 1: | | 287 | 1: |
288 | movl %ebx,%eax /* get cpl */ | | 288 | movl %ebx,%eax /* get cpl */ |
289 | movl CPUVAR(IUNMASK)(,%rax,4),%eax | | 289 | movl CPUVAR(IUNMASK)(,%rax,4),%eax |
290 | CLI(si) | | 290 | CLI(si) |
291 | andl CPUVAR(IPENDING),%eax /* any non-masked bits left? */ | | 291 | andl CPUVAR(IPENDING),%eax /* any non-masked bits left? */ |
292 | jz 2f | | 292 | jz 2f |
293 | bsrl %eax,%eax | | 293 | bsrl %eax,%eax |
294 | btrl %eax,CPUVAR(IPENDING) | | 294 | btrl %eax,CPUVAR(IPENDING) |
295 | movq CPUVAR(ISOURCES)(,%rax,8),%rax | | 295 | movq CPUVAR(ISOURCES)(,%rax,8),%rax |
296 | jmp *IS_RECURSE(%rax) | | 296 | jmp *IS_RECURSE(%rax) |
297 | 2: | | 297 | 2: |
298 | movl %ebx,CPUVAR(ILEVEL) | | 298 | movl %ebx,CPUVAR(ILEVEL) |
299 | STI(si) | | 299 | STI(si) |
300 | popq %r12 | | 300 | popq %r12 |
301 | popq %r13 | | 301 | popq %r13 |
302 | popq %rbx | | 302 | popq %rbx |
303 | ret | | 303 | ret |
304 | IDTVEC_END(spllower) | | 304 | IDTVEC_END(spllower) |
305 | | | 305 | |
306 | /* | | 306 | /* |
307 | * void Xdoreti(void); | | 307 | * void Xdoreti(void); |
308 | * | | 308 | * |
309 | * Handle return from interrupt after device handler finishes. | | 309 | * Handle return from interrupt after device handler finishes. |
310 | * | | 310 | * |
311 | * Important registers: | | 311 | * Important registers: |
312 | * ebx - cpl to restore | | 312 | * ebx - cpl to restore |
313 | * r13 - address to resume loop at | | 313 | * r13 - address to resume loop at |
314 | */ | | 314 | */ |
315 | IDTVEC(doreti) | | 315 | IDTVEC(doreti) |
316 | popq %rbx /* get previous priority */ | | 316 | popq %rbx /* get previous priority */ |
317 | decl CPUVAR(IDEPTH) | | 317 | decl CPUVAR(IDEPTH) |
318 | leaq 1f(%rip),%r13 | | 318 | leaq 1f(%rip),%r13 |
319 | 1: | | 319 | 1: |
320 | movl %ebx,%eax | | 320 | movl %ebx,%eax |
321 | movl CPUVAR(IUNMASK)(,%rax,4),%eax | | 321 | movl CPUVAR(IUNMASK)(,%rax,4),%eax |
322 | CLI(si) | | 322 | CLI(si) |
323 | andl CPUVAR(IPENDING),%eax | | 323 | andl CPUVAR(IPENDING),%eax |
324 | jz 2f | | 324 | jz 2f |
325 | bsrl %eax,%eax /* slow, but not worth optimizing */ | | 325 | bsrl %eax,%eax /* slow, but not worth optimizing */ |
326 | btrl %eax,CPUVAR(IPENDING) | | 326 | btrl %eax,CPUVAR(IPENDING) |
327 | movq CPUVAR(ISOURCES)(,%rax,8),%rax | | 327 | movq CPUVAR(ISOURCES)(,%rax,8),%rax |
328 | jmp *IS_RESUME(%rax) | | 328 | jmp *IS_RESUME(%rax) |
329 | 2: /* Check for ASTs on exit to user mode. */ | | 329 | 2: /* Check for ASTs on exit to user mode. */ |
330 | movl %ebx,CPUVAR(ILEVEL) | | 330 | movl %ebx,CPUVAR(ILEVEL) |
331 | 5: | | 331 | 5: |
332 | testb $SEL_RPL,TF_CS(%rsp) | | 332 | testb $SEL_RPL,TF_CS(%rsp) |
333 | jz 6f | | 333 | jz 6f |
334 | | | 334 | .Ldoreti_checkast: |
335 | .type _C_LABEL(doreti_checkast), @function | | | |
336 | LABEL(doreti_checkast) | | | |
337 | movq CPUVAR(CURLWP),%r14 | | 335 | movq CPUVAR(CURLWP),%r14 |
338 | CHECK_ASTPENDING(%r14) | | 336 | CHECK_ASTPENDING(%r14) |
339 | je 3f | | 337 | je 3f |
340 | CLEAR_ASTPENDING(%r14) | | 338 | CLEAR_ASTPENDING(%r14) |
341 | STI(si) | | 339 | STI(si) |
342 | movl $T_ASTFLT,TF_TRAPNO(%rsp) /* XXX undo later.. */ | | 340 | movl $T_ASTFLT,TF_TRAPNO(%rsp) /* XXX undo later.. */ |
343 | /* Pushed T_ASTFLT into tf_trapno on entry. */ | | 341 | /* Pushed T_ASTFLT into tf_trapno on entry. */ |
344 | movq %rsp,%rdi | | 342 | movq %rsp,%rdi |
345 | KMSAN_INIT_ARG(8) | | 343 | KMSAN_INIT_ARG(8) |
346 | call _C_LABEL(trap) | | 344 | call _C_LABEL(trap) |
347 | CLI(si) | | 345 | CLI(si) |
348 | jmp doreti_checkast | | 346 | jmp .Ldoreti_checkast |
349 | 3: | | 347 | 3: |
350 | CHECK_DEFERRED_SWITCH | | 348 | CHECK_DEFERRED_SWITCH |
351 | jnz 9f | | 349 | jnz 9f |
352 | HANDLE_DEFERRED_FPU | | 350 | HANDLE_DEFERRED_FPU |
353 | 6: | | 351 | 6: |
354 | INTRFASTEXIT | | 352 | INTRFASTEXIT |
355 | 9: | | 353 | 9: |
356 | STI(si) | | 354 | STI(si) |
357 | call _C_LABEL(do_pmap_load) | | 355 | call _C_LABEL(do_pmap_load) |
358 | CLI(si) | | 356 | CLI(si) |
359 | jmp doreti_checkast /* recheck ASTs */ | | 357 | jmp .Ldoreti_checkast /* recheck ASTs */ |
360 | END(doreti_checkast) | | | |
361 | IDTVEC_END(doreti) | | 358 | IDTVEC_END(doreti) |