| @@ -1,1000 +1,1000 @@ | | | @@ -1,1000 +1,1000 @@ |
1 | /* $NetBSD: locore.s,v 1.284 2008/07/10 15:38:40 nakayama Exp $ */ | | 1 | /* $NetBSD: locore.s,v 1.285 2008/10/05 02:00:53 nakayama Exp $ */ |
2 | | | 2 | |
3 | /* | | 3 | /* |
4 | * Copyright (c) 1996-2002 Eduardo Horvath | | 4 | * Copyright (c) 1996-2002 Eduardo Horvath |
5 | * Copyright (c) 1996 Paul Kranenburg | | 5 | * Copyright (c) 1996 Paul Kranenburg |
6 | * Copyright (c) 1996 | | 6 | * Copyright (c) 1996 |
7 | * The President and Fellows of Harvard College. | | 7 | * The President and Fellows of Harvard College. |
8 | * All rights reserved. | | 8 | * All rights reserved. |
9 | * Copyright (c) 1992, 1993 | | 9 | * Copyright (c) 1992, 1993 |
10 | * The Regents of the University of California. | | 10 | * The Regents of the University of California. |
11 | * All rights reserved. | | 11 | * All rights reserved. |
12 | * | | 12 | * |
13 | * This software was developed by the Computer Systems Engineering group | | 13 | * This software was developed by the Computer Systems Engineering group |
14 | * at Lawrence Berkeley Laboratory under DARPA contract BG 91-66 and | | 14 | * at Lawrence Berkeley Laboratory under DARPA contract BG 91-66 and |
15 | * contributed to Berkeley. | | 15 | * contributed to Berkeley. |
16 | * | | 16 | * |
17 | * All advertising materials mentioning features or use of this software | | 17 | * All advertising materials mentioning features or use of this software |
18 | * must display the following acknowledgement: | | 18 | * must display the following acknowledgement: |
19 | * This product includes software developed by the University of | | 19 | * This product includes software developed by the University of |
20 | * California, Lawrence Berkeley Laboratory. | | 20 | * California, Lawrence Berkeley Laboratory. |
21 | * This product includes software developed by Harvard University. | | 21 | * This product includes software developed by Harvard University. |
22 | * | | 22 | * |
23 | * Redistribution and use in source and binary forms, with or without | | 23 | * Redistribution and use in source and binary forms, with or without |
24 | * modification, are permitted provided that the following conditions | | 24 | * modification, are permitted provided that the following conditions |
25 | * are met: | | 25 | * are met: |
26 | * 1. Redistributions of source code must retain the above copyright | | 26 | * 1. Redistributions of source code must retain the above copyright |
27 | * notice, this list of conditions and the following disclaimer. | | 27 | * notice, this list of conditions and the following disclaimer. |
28 | * 2. Redistributions in binary form must reproduce the above copyright | | 28 | * 2. Redistributions in binary form must reproduce the above copyright |
29 | * notice, this list of conditions and the following disclaimer in the | | 29 | * notice, this list of conditions and the following disclaimer in the |
30 | * documentation and/or other materials provided with the | | 30 | * documentation and/or other materials provided with the |
31 | * distribution. | | 31 | * distribution. |
32 | * 3. All advertising materials mentioning features or use of this | | 32 | * 3. All advertising materials mentioning features or use of this |
33 | * software must display the following acknowledgement: | | 33 | * software must display the following acknowledgement: |
34 | * This product includes software developed by the University of | | 34 | * This product includes software developed by the University of |
35 | * California, Berkeley and its contributors. | | 35 | * California, Berkeley and its contributors. |
36 | * This product includes software developed by Harvard University. | | 36 | * This product includes software developed by Harvard University. |
37 | * This product includes software developed by Paul Kranenburg. | | 37 | * This product includes software developed by Paul Kranenburg. |
38 | * 4. Neither the name of the University nor the names of its | | 38 | * 4. Neither the name of the University nor the names of its |
39 | * contributors may be used to endorse or promote products derived | | 39 | * contributors may be used to endorse or promote products derived |
40 | * from this software without specific prior written permission. | | 40 | * from this software without specific prior written permission. |
41 | * | | 41 | * |
42 | * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' | | 42 | * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' |
43 | * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, | | 43 | * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, |
44 | * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A | | 44 | * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A |
45 | * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR | | 45 | * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR |
46 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | | 46 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
47 | * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | | 47 | * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
48 | * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | | 48 | * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
49 | * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON | | 49 | * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON |
50 | * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR | | 50 | * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR |
51 | * TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF | | 51 | * TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF |
52 | * THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH | | 52 | * THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH |
53 | * DAMAGE. | | 53 | * DAMAGE. |
54 | * | | 54 | * |
55 | * @(#)locore.s 8.4 (Berkeley) 12/10/93 | | 55 | * @(#)locore.s 8.4 (Berkeley) 12/10/93 |
56 | */ | | 56 | */ |
57 | | | 57 | |
58 | #define SPITFIRE /* We don't support Cheetah (USIII) yet */ | | 58 | #define SPITFIRE /* We don't support Cheetah (USIII) yet */ |
59 | #undef PARANOID /* Extremely expensive consistency checks */ | | 59 | #undef PARANOID /* Extremely expensive consistency checks */ |
60 | #undef NO_VCACHE /* Map w/D$ disabled */ | | 60 | #undef NO_VCACHE /* Map w/D$ disabled */ |
61 | #undef TRAPSTATS /* Count traps */ | | 61 | #undef TRAPSTATS /* Count traps */ |
62 | #undef TRAPS_USE_IG /* Use Interrupt Globals for all traps */ | | 62 | #undef TRAPS_USE_IG /* Use Interrupt Globals for all traps */ |
63 | #define HWREF /* Track ref/mod bits in trap handlers */ | | 63 | #define HWREF /* Track ref/mod bits in trap handlers */ |
64 | #undef DCACHE_BUG /* Flush D$ around ASI_PHYS accesses */ | | 64 | #undef DCACHE_BUG /* Flush D$ around ASI_PHYS accesses */ |
65 | #undef NO_TSB /* Don't use TSB */ | | 65 | #undef NO_TSB /* Don't use TSB */ |
66 | #define USE_BLOCK_STORE_LOAD /* enable block load/store ops */ | | 66 | #define USE_BLOCK_STORE_LOAD /* enable block load/store ops */ |
67 | #define BB_ERRATA_1 /* writes to TICK_CMPR may fail */ | | 67 | #define BB_ERRATA_1 /* writes to TICK_CMPR may fail */ |
68 | | | 68 | |
69 | #include "opt_ddb.h" | | 69 | #include "opt_ddb.h" |
70 | #include "opt_kgdb.h" | | 70 | #include "opt_kgdb.h" |
71 | #include "opt_multiprocessor.h" | | 71 | #include "opt_multiprocessor.h" |
72 | #include "opt_compat_netbsd.h" | | 72 | #include "opt_compat_netbsd.h" |
73 | #include "opt_compat_netbsd32.h" | | 73 | #include "opt_compat_netbsd32.h" |
74 | #include "opt_lockdebug.h" | | 74 | #include "opt_lockdebug.h" |
75 | | | 75 | |
76 | #include "assym.h" | | 76 | #include "assym.h" |
77 | #include <machine/param.h> | | 77 | #include <machine/param.h> |
78 | #include <sparc64/sparc64/intreg.h> | | 78 | #include <sparc64/sparc64/intreg.h> |
79 | #include <sparc64/sparc64/timerreg.h> | | 79 | #include <sparc64/sparc64/timerreg.h> |
80 | #include <machine/ctlreg.h> | | 80 | #include <machine/ctlreg.h> |
81 | #include <machine/psl.h> | | 81 | #include <machine/psl.h> |
82 | #include <machine/signal.h> | | 82 | #include <machine/signal.h> |
83 | #include <machine/trap.h> | | 83 | #include <machine/trap.h> |
84 | #include <machine/frame.h> | | 84 | #include <machine/frame.h> |
85 | #include <machine/pte.h> | | 85 | #include <machine/pte.h> |
86 | #include <machine/pmap.h> | | 86 | #include <machine/pmap.h> |
87 | #include <machine/intr.h> | | 87 | #include <machine/intr.h> |
88 | #include <machine/asm.h> | | 88 | #include <machine/asm.h> |
89 | #include <sys/syscall.h> | | 89 | #include <sys/syscall.h> |
90 | | | 90 | |
91 | #include "ksyms.h" | | 91 | #include "ksyms.h" |
92 | | | 92 | |
93 | /* A few convenient abbreviations for trapframe fields. */ | | 93 | /* A few convenient abbreviations for trapframe fields. */ |
94 | #define TF_G TF_GLOBAL | | 94 | #define TF_G TF_GLOBAL |
95 | #define TF_O TF_OUT | | 95 | #define TF_O TF_OUT |
96 | #define TF_L TF_LOCAL | | 96 | #define TF_L TF_LOCAL |
97 | #define TF_I TF_IN | | 97 | #define TF_I TF_IN |
98 | | | 98 | |
99 | #undef CURLWP | | 99 | #undef CURLWP |
100 | #undef CPCB | | 100 | #undef CPCB |
101 | #undef FPLWP | | 101 | #undef FPLWP |
102 | | | 102 | |
103 | #define CURLWP (CPUINFO_VA + CI_CURLWP) | | 103 | #define CURLWP (CPUINFO_VA + CI_CURLWP) |
104 | #define CPCB (CPUINFO_VA + CI_CPCB) | | 104 | #define CPCB (CPUINFO_VA + CI_CPCB) |
105 | #define FPLWP (CPUINFO_VA + CI_FPLWP) | | 105 | #define FPLWP (CPUINFO_VA + CI_FPLWP) |
106 | | | 106 | |
107 | /* Let us use same syntax as C code */ | | 107 | /* Let us use same syntax as C code */ |
108 | #define Debugger() ta 1; nop | | 108 | #define Debugger() ta 1; nop |
109 | | | 109 | |
110 | #if 1 | | 110 | #if 1 |
111 | /* | | 111 | /* |
112 | * Try to issue an elf note to ask the Solaris | | 112 | * Try to issue an elf note to ask the Solaris |
113 | * bootloader to align the kernel properly. | | 113 | * bootloader to align the kernel properly. |
114 | */ | | 114 | */ |
115 | .section .note | | 115 | .section .note |
116 | .word 0x0d | | 116 | .word 0x0d |
117 | .word 4 ! Dunno why | | 117 | .word 4 ! Dunno why |
118 | .word 1 | | 118 | .word 1 |
119 | 0: .asciz "SUNW Solaris" | | 119 | 0: .asciz "SUNW Solaris" |
120 | 1: | | 120 | 1: |
121 | .align 4 | | 121 | .align 4 |
122 | .word 0x0400000 | | 122 | .word 0x0400000 |
123 | #endif | | 123 | #endif |
124 | | | 124 | |
125 | .register %g2,#scratch | | 125 | .register %g2,#scratch |
126 | .register %g3,#scratch | | 126 | .register %g3,#scratch |
127 | | | 127 | |
128 | /* | | 128 | /* |
129 | * Here are some defines to try to maintain consistency but still | | 129 | * Here are some defines to try to maintain consistency but still |
130 | * support 32-and 64-bit compilers. | | 130 | * support 32-and 64-bit compilers. |
131 | */ | | 131 | */ |
132 | #ifdef _LP64 | | 132 | #ifdef _LP64 |
133 | /* reg that points to base of data/text segment */ | | 133 | /* reg that points to base of data/text segment */ |
134 | #define BASEREG %g4 | | 134 | #define BASEREG %g4 |
135 | /* first constants for storage allocation */ | | 135 | /* first constants for storage allocation */ |
136 | #define LNGSZ 8 | | 136 | #define LNGSZ 8 |
137 | #define LNGSHFT 3 | | 137 | #define LNGSHFT 3 |
138 | #define PTRSZ 8 | | 138 | #define PTRSZ 8 |
139 | #define PTRSHFT 3 | | 139 | #define PTRSHFT 3 |
140 | #define POINTER .xword | | 140 | #define POINTER .xword |
141 | #define ULONG .xword | | 141 | #define ULONG .xword |
142 | /* Now instructions to load/store pointers & long ints */ | | 142 | /* Now instructions to load/store pointers & long ints */ |
143 | #define LDLNG ldx | | 143 | #define LDLNG ldx |
144 | #define LDULNG ldx | | 144 | #define LDULNG ldx |
145 | #define STLNG stx | | 145 | #define STLNG stx |
146 | #define STULNG stx | | 146 | #define STULNG stx |
147 | #define LDPTR ldx | | 147 | #define LDPTR ldx |
148 | #define LDPTRA ldxa | | 148 | #define LDPTRA ldxa |
149 | #define STPTR stx | | 149 | #define STPTR stx |
150 | #define STPTRA stxa | | 150 | #define STPTRA stxa |
151 | #define CASPTR casxa | | 151 | #define CASPTR casxa |
152 | /* Now something to calculate the stack bias */ | | 152 | /* Now something to calculate the stack bias */ |
153 | #define STKB BIAS | | 153 | #define STKB BIAS |
154 | #define CCCR %xcc | | 154 | #define CCCR %xcc |
155 | #else | | 155 | #else |
156 | #define BASEREG %g0 | | 156 | #define BASEREG %g0 |
157 | #define LNGSZ 4 | | 157 | #define LNGSZ 4 |
158 | #define LNGSHFT 2 | | 158 | #define LNGSHFT 2 |
159 | #define PTRSZ 4 | | 159 | #define PTRSZ 4 |
160 | #define PTRSHFT 2 | | 160 | #define PTRSHFT 2 |
161 | #define POINTER .word | | 161 | #define POINTER .word |
162 | #define ULONG .word | | 162 | #define ULONG .word |
163 | /* Instructions to load/store pointers & long ints */ | | 163 | /* Instructions to load/store pointers & long ints */ |
164 | #define LDLNG ldsw | | 164 | #define LDLNG ldsw |
165 | #define LDULNG lduw | | 165 | #define LDULNG lduw |
166 | #define STLNG stw | | 166 | #define STLNG stw |
167 | #define STULNG stw | | 167 | #define STULNG stw |
168 | #define LDPTR lduw | | 168 | #define LDPTR lduw |
169 | #define LDPTRA lduwa | | 169 | #define LDPTRA lduwa |
170 | #define STPTR stw | | 170 | #define STPTR stw |
171 | #define STPTRA stwa | | 171 | #define STPTRA stwa |
172 | #define CASPTR casa | | 172 | #define CASPTR casa |
173 | #define STKB 0 | | 173 | #define STKB 0 |
174 | #define CCCR %icc | | 174 | #define CCCR %icc |
175 | #endif | | 175 | #endif |
176 | | | 176 | |
177 | /* | | 177 | /* |
178 | * GNU assembler does not understand `.empty' directive; Sun assembler | | 178 | * GNU assembler does not understand `.empty' directive; Sun assembler |
179 | * gripes about labels without it. To allow cross-compilation using | | 179 | * gripes about labels without it. To allow cross-compilation using |
180 | * the Sun assembler, and because .empty directives are useful | | 180 | * the Sun assembler, and because .empty directives are useful |
181 | * documentation, we use this trick. | | 181 | * documentation, we use this trick. |
182 | */ | | 182 | */ |
183 | #ifdef SUN_AS | | 183 | #ifdef SUN_AS |
184 | #define EMPTY .empty | | 184 | #define EMPTY .empty |
185 | #else | | 185 | #else |
186 | #define EMPTY /* .empty */ | | 186 | #define EMPTY /* .empty */ |
187 | #endif | | 187 | #endif |
188 | | | 188 | |
189 | /* use as needed to align things on longword boundaries */ | | 189 | /* use as needed to align things on longword boundaries */ |
190 | #define _ALIGN .align 8 | | 190 | #define _ALIGN .align 8 |
191 | #define ICACHE_ALIGN .align 32 | | 191 | #define ICACHE_ALIGN .align 32 |
192 | | | 192 | |
193 | /* Give this real authority: reset the machine */ | | 193 | /* Give this real authority: reset the machine */ |
194 | #define NOTREACHED sir | | 194 | #define NOTREACHED sir |
195 | | | 195 | |
196 | /* | | 196 | /* |
197 | * This macro will clear out a cache line before an explicit | | 197 | * This macro will clear out a cache line before an explicit |
198 | * access to that location. It's mostly used to make certain | | 198 | * access to that location. It's mostly used to make certain |
199 | * loads bypassing the D$ do not get stale D$ data. | | 199 | * loads bypassing the D$ do not get stale D$ data. |
200 | * | | 200 | * |
201 | * It uses a register with the address to clear and a temporary | | 201 | * It uses a register with the address to clear and a temporary |
202 | * which is destroyed. | | 202 | * which is destroyed. |
203 | */ | | 203 | */ |
204 | #ifdef DCACHE_BUG | | 204 | #ifdef DCACHE_BUG |
205 | #define DLFLUSH(a,t) \ | | 205 | #define DLFLUSH(a,t) \ |
206 | andn a, 0x1f, t; \ | | 206 | andn a, 0x1f, t; \ |
207 | stxa %g0, [ t ] ASI_DCACHE_TAG; \ | | 207 | stxa %g0, [ t ] ASI_DCACHE_TAG; \ |
208 | membar #Sync | | 208 | membar #Sync |
209 | /* The following can be used if the pointer is 16-byte aligned */ | | 209 | /* The following can be used if the pointer is 16-byte aligned */ |
210 | #define DLFLUSH2(t) \ | | 210 | #define DLFLUSH2(t) \ |
211 | stxa %g0, [ t ] ASI_DCACHE_TAG; \ | | 211 | stxa %g0, [ t ] ASI_DCACHE_TAG; \ |
212 | membar #Sync | | 212 | membar #Sync |
213 | #else | | 213 | #else |
214 | #define DLFLUSH(a,t) | | 214 | #define DLFLUSH(a,t) |
215 | #define DLFLUSH2(t) | | 215 | #define DLFLUSH2(t) |
216 | #endif | | 216 | #endif |
217 | | | 217 | |
218 | | | 218 | |
219 | /* | | 219 | /* |
220 | * Combine 2 regs -- used to convert 64-bit ILP32 | | 220 | * Combine 2 regs -- used to convert 64-bit ILP32 |
221 | * values to LP64. | | 221 | * values to LP64. |
222 | */ | | 222 | */ |
223 | #define COMBINE(r1, r2, d) \ | | 223 | #define COMBINE(r1, r2, d) \ |
224 | sllx r1, 32, d; \ | | 224 | sllx r1, 32, d; \ |
225 | or d, r2, d | | 225 | or d, r2, d |
226 | | | 226 | |
227 | /* | | 227 | /* |
228 | * Split 64-bit value in 1 reg into high and low halves. | | 228 | * Split 64-bit value in 1 reg into high and low halves. |
229 | * Used for ILP32 return values. | | 229 | * Used for ILP32 return values. |
230 | */ | | 230 | */ |
231 | #define SPLIT(r0, r1) \ | | 231 | #define SPLIT(r0, r1) \ |
232 | srl r0, 0, r1; \ | | 232 | srl r0, 0, r1; \ |
233 | srlx r0, 32, r0 | | 233 | srlx r0, 32, r0 |
234 | | | 234 | |
235 | | | 235 | |
236 | /* | | 236 | /* |
237 | * A handy macro for maintaining instrumentation counters. | | 237 | * A handy macro for maintaining instrumentation counters. |
238 | * Note that this clobbers %o0, %o1 and %o2. Normal usage is | | 238 | * Note that this clobbers %o0, %o1 and %o2. Normal usage is |
239 | * something like: | | 239 | * something like: |
240 | * foointr: | | 240 | * foointr: |
241 | * TRAP_SETUP(...) ! makes %o registers safe | | 241 | * TRAP_SETUP(...) ! makes %o registers safe |
242 | * INCR(_C_LABEL(cnt)+V_FOO) ! count a foo | | 242 | * INCR(_C_LABEL(cnt)+V_FOO) ! count a foo |
243 | */ | | 243 | */ |
244 | #define INCR(what) \ | | 244 | #define INCR(what) \ |
245 | sethi %hi(what), %o0; \ | | 245 | sethi %hi(what), %o0; \ |
246 | or %o0, %lo(what), %o0; \ | | 246 | or %o0, %lo(what), %o0; \ |
247 | 99: \ | | 247 | 99: \ |
248 | lduw [%o0], %o1; \ | | 248 | lduw [%o0], %o1; \ |
249 | add %o1, 1, %o2; \ | | 249 | add %o1, 1, %o2; \ |
250 | casa [%o0] ASI_P, %o1, %o2; \ | | 250 | casa [%o0] ASI_P, %o1, %o2; \ |
251 | cmp %o1, %o2; \ | | 251 | cmp %o1, %o2; \ |
252 | bne,pn %icc, 99b; \ | | 252 | bne,pn %icc, 99b; \ |
253 | nop | | 253 | nop |
254 | | | 254 | |
255 | /* | | 255 | /* |
256 | * A couple of handy macros to save and restore globals to/from | | 256 | * A couple of handy macros to save and restore globals to/from |
257 | * locals. Since udivrem uses several globals, and it's called | | 257 | * locals. Since udivrem uses several globals, and it's called |
258 | * from vsprintf, we need to do this before and after doing a printf. | | 258 | * from vsprintf, we need to do this before and after doing a printf. |
259 | */ | | 259 | */ |
260 | #define GLOBTOLOC \ | | 260 | #define GLOBTOLOC \ |
261 | mov %g1, %l1; \ | | 261 | mov %g1, %l1; \ |
262 | mov %g2, %l2; \ | | 262 | mov %g2, %l2; \ |
263 | mov %g3, %l3; \ | | 263 | mov %g3, %l3; \ |
264 | mov %g4, %l4; \ | | 264 | mov %g4, %l4; \ |
265 | mov %g5, %l5; \ | | 265 | mov %g5, %l5; \ |
266 | mov %g6, %l6; \ | | 266 | mov %g6, %l6; \ |
267 | mov %g7, %l7 | | 267 | mov %g7, %l7 |
268 | | | 268 | |
269 | #define LOCTOGLOB \ | | 269 | #define LOCTOGLOB \ |
270 | mov %l1, %g1; \ | | 270 | mov %l1, %g1; \ |
271 | mov %l2, %g2; \ | | 271 | mov %l2, %g2; \ |
272 | mov %l3, %g3; \ | | 272 | mov %l3, %g3; \ |
273 | mov %l4, %g4; \ | | 273 | mov %l4, %g4; \ |
274 | mov %l5, %g5; \ | | 274 | mov %l5, %g5; \ |
275 | mov %l6, %g6; \ | | 275 | mov %l6, %g6; \ |
276 | mov %l7, %g7 | | 276 | mov %l7, %g7 |
277 | | | 277 | |
278 | /* Load strings address into register; NOTE: hidden local label 99 */ | | 278 | /* Load strings address into register; NOTE: hidden local label 99 */ |
279 | #define LOAD_ASCIZ(reg, s) \ | | 279 | #define LOAD_ASCIZ(reg, s) \ |
280 | set 99f, reg ; \ | | 280 | set 99f, reg ; \ |
281 | .data ; \ | | 281 | .data ; \ |
282 | 99: .asciz s ; \ | | 282 | 99: .asciz s ; \ |
283 | _ALIGN ; \ | | 283 | _ALIGN ; \ |
284 | .text | | 284 | .text |
285 | | | 285 | |
286 | /* | | 286 | /* |
287 | * Handy stack conversion macros. | | 287 | * Handy stack conversion macros. |
288 | * They correctly switch to requested stack type | | 288 | * They correctly switch to requested stack type |
289 | * regardless of the current stack. | | 289 | * regardless of the current stack. |
290 | */ | | 290 | */ |
291 | | | 291 | |
292 | #define TO_STACK64(size) \ | | 292 | #define TO_STACK64(size) \ |
293 | save %sp, size, %sp; \ | | 293 | save %sp, size, %sp; \ |
294 | add %sp, -BIAS, %o0; /* Convert to 64-bits */ \ | | 294 | add %sp, -BIAS, %o0; /* Convert to 64-bits */ \ |
295 | andcc %sp, 1, %g0; /* 64-bit stack? */ \ | | 295 | andcc %sp, 1, %g0; /* 64-bit stack? */ \ |
296 | movz %icc, %o0, %sp | | 296 | movz %icc, %o0, %sp |
297 | | | 297 | |
298 | #define TO_STACK32(size) \ | | 298 | #define TO_STACK32(size) \ |
299 | save %sp, size, %sp; \ | | 299 | save %sp, size, %sp; \ |
300 | add %sp, +BIAS, %o0; /* Convert to 32-bits */ \ | | 300 | add %sp, +BIAS, %o0; /* Convert to 32-bits */ \ |
301 | andcc %sp, 1, %g0; /* 64-bit stack? */ \ | | 301 | andcc %sp, 1, %g0; /* 64-bit stack? */ \ |
302 | movnz %icc, %o0, %sp | | 302 | movnz %icc, %o0, %sp |
303 | | | 303 | |
304 | #ifdef _LP64 | | 304 | #ifdef _LP64 |
305 | #define STACKFRAME(size) TO_STACK64(size) | | 305 | #define STACKFRAME(size) TO_STACK64(size) |
306 | #else | | 306 | #else |
307 | #define STACKFRAME(size) TO_STACK32(size) | | 307 | #define STACKFRAME(size) TO_STACK32(size) |
308 | #endif | | 308 | #endif |
309 | | | 309 | |
310 | #ifdef USE_BLOCK_STORE_LOAD | | 310 | #ifdef USE_BLOCK_STORE_LOAD |
311 | /* | | 311 | /* |
312 | * The following routines allow fpu use in the kernel. | | 312 | * The following routines allow fpu use in the kernel. |
313 | * | | 313 | * |
314 | * They allocate a stack frame and use all local regs. Extra | | 314 | * They allocate a stack frame and use all local regs. Extra |
315 | * local storage can be requested by setting the siz parameter, | | 315 | * local storage can be requested by setting the siz parameter, |
316 | * and can be accessed at %sp+CC64FSZ. | | 316 | * and can be accessed at %sp+CC64FSZ. |
317 | */ | | 317 | */ |
318 | | | 318 | |
319 | #define ENABLE_FPU(siz) \ | | 319 | #define ENABLE_FPU(siz) \ |
320 | save %sp, -(CC64FSZ), %sp; /* Allocate a stack frame */ \ | | 320 | save %sp, -(CC64FSZ), %sp; /* Allocate a stack frame */ \ |
321 | sethi %hi(FPLWP), %l1; \ | | 321 | sethi %hi(FPLWP), %l1; \ |
322 | add %fp, STKB-FS_SIZE, %l0; /* Allocate a fpstate */ \ | | 322 | add %fp, STKB-FS_SIZE, %l0; /* Allocate a fpstate */ \ |
323 | LDPTR [%l1 + %lo(FPLWP)], %l2; /* Load fplwp */ \ | | 323 | LDPTR [%l1 + %lo(FPLWP)], %l2; /* Load fplwp */ \ |
324 | andn %l0, BLOCK_ALIGN, %l0; /* Align it */ \ | | 324 | andn %l0, BLOCK_ALIGN, %l0; /* Align it */ \ |
325 | clr %l3; /* NULL fpstate */ \ | | 325 | clr %l3; /* NULL fpstate */ \ |
326 | brz,pt %l2, 1f; /* fplwp == NULL? */ \ | | 326 | brz,pt %l2, 1f; /* fplwp == NULL? */ \ |
327 | add %l0, -STKB-CC64FSZ-(siz), %sp; /* Set proper %sp */ \ | | 327 | add %l0, -STKB-CC64FSZ-(siz), %sp; /* Set proper %sp */ \ |
328 | LDPTR [%l2 + L_FPSTATE], %l3; \ | | 328 | LDPTR [%l2 + L_FPSTATE], %l3; \ |
329 | brz,pn %l3, 1f; /* Make sure we have an fpstate */ \ | | 329 | brz,pn %l3, 1f; /* Make sure we have an fpstate */ \ |
330 | mov %l3, %o0; \ | | 330 | mov %l3, %o0; \ |
331 | call _C_LABEL(savefpstate); /* Save the old fpstate */ \ | | 331 | call _C_LABEL(savefpstate); /* Save the old fpstate */ \ |
332 | 1: \ | | 332 | 1: \ |
333 | set EINTSTACK-STKB, %l4; /* Are we on intr stack? */ \ | | 333 | set EINTSTACK-STKB, %l4; /* Are we on intr stack? */ \ |
334 | cmp %sp, %l4; \ | | 334 | cmp %sp, %l4; \ |
335 | bgu,pt %xcc, 1f; \ | | 335 | bgu,pt %xcc, 1f; \ |
336 | set INTSTACK-STKB, %l4; \ | | 336 | set INTSTACK-STKB, %l4; \ |
337 | cmp %sp, %l4; \ | | 337 | cmp %sp, %l4; \ |
338 | blu %xcc, 1f; \ | | 338 | blu %xcc, 1f; \ |
339 | 0: \ | | 339 | 0: \ |
340 | sethi %hi(_C_LABEL(lwp0)), %l4; /* Yes, use lpw0 */ \ | | 340 | sethi %hi(_C_LABEL(lwp0)), %l4; /* Yes, use lpw0 */ \ |
341 | ba,pt %xcc, 2f; /* XXXX needs to change to CPUs idle proc */ \ | | 341 | ba,pt %xcc, 2f; /* XXXX needs to change to CPUs idle proc */ \ |
342 | or %l4, %lo(_C_LABEL(lwp0)), %l5; \ | | 342 | or %l4, %lo(_C_LABEL(lwp0)), %l5; \ |
343 | 1: \ | | 343 | 1: \ |
344 | sethi %hi(CURLWP), %l4; /* Use curlwp */ \ | | 344 | sethi %hi(CURLWP), %l4; /* Use curlwp */ \ |
345 | LDPTR [%l4 + %lo(CURLWP)], %l5; \ | | 345 | LDPTR [%l4 + %lo(CURLWP)], %l5; \ |
346 | brz,pn %l5, 0b; nop; /* If curlwp is NULL need to use lwp0 */ \ | | 346 | brz,pn %l5, 0b; nop; /* If curlwp is NULL need to use lwp0 */ \ |
347 | 2: \ | | 347 | 2: \ |
348 | LDPTR [%l5 + L_FPSTATE], %l6; /* Save old fpstate */ \ | | 348 | LDPTR [%l5 + L_FPSTATE], %l6; /* Save old fpstate */ \ |
349 | STPTR %l0, [%l5 + L_FPSTATE]; /* Insert new fpstate */ \ | | 349 | STPTR %l0, [%l5 + L_FPSTATE]; /* Insert new fpstate */ \ |
350 | STPTR %l5, [%l1 + %lo(FPLWP)]; /* Set new fplwp */ \ | | 350 | STPTR %l5, [%l1 + %lo(FPLWP)]; /* Set new fplwp */ \ |
351 | wr %g0, FPRS_FEF, %fprs /* Enable FPU */ | | 351 | wr %g0, FPRS_FEF, %fprs /* Enable FPU */ |
352 | #endif | | 352 | #endif |
353 | | | 353 | |
354 | /* | | 354 | /* |
355 | * Weve saved our possible fpstate, now disable the fpu | | 355 | * Weve saved our possible fpstate, now disable the fpu |
356 | * and continue with life. | | 356 | * and continue with life. |
357 | */ | | 357 | */ |
358 | #ifdef DEBUG | | 358 | #ifdef DEBUG |
359 | #define __CHECK_FPU \ | | 359 | #define __CHECK_FPU \ |
360 | LDPTR [%l5 + L_FPSTATE], %l7; \ | | 360 | LDPTR [%l5 + L_FPSTATE], %l7; \ |
361 | cmp %l7, %l0; \ | | 361 | cmp %l7, %l0; \ |
362 | tnz 1; | | 362 | tnz 1; |
363 | #else | | 363 | #else |
364 | #define __CHECK_FPU | | 364 | #define __CHECK_FPU |
365 | #endif | | 365 | #endif |
366 | | | 366 | |
367 | #define RESTORE_FPU \ | | 367 | #define RESTORE_FPU \ |
368 | __CHECK_FPU \ | | 368 | __CHECK_FPU \ |
369 | STPTR %l2, [%l1 + %lo(FPLWP)]; /* Restore old fproc */ \ | | 369 | STPTR %l2, [%l1 + %lo(FPLWP)]; /* Restore old fproc */ \ |
370 | wr %g0, 0, %fprs; /* Disable fpu */ \ | | 370 | wr %g0, 0, %fprs; /* Disable fpu */ \ |
371 | brz,pt %l3, 1f; /* Skip if no fpstate */ \ | | 371 | brz,pt %l3, 1f; /* Skip if no fpstate */ \ |
372 | STPTR %l6, [%l5 + L_FPSTATE]; /* Restore old fpstate */ \ | | 372 | STPTR %l6, [%l5 + L_FPSTATE]; /* Restore old fpstate */ \ |
373 | \ | | 373 | \ |
374 | mov %l3, %o0; \ | | 374 | mov %l3, %o0; \ |
375 | call _C_LABEL(loadfpstate); /* Re-load orig fpstate */ \ | | 375 | call _C_LABEL(loadfpstate); /* Re-load orig fpstate */ \ |
376 | 1: \ | | 376 | 1: \ |
377 | membar #Sync; /* Finish all FP ops */ | | 377 | membar #Sync; /* Finish all FP ops */ |
378 | | | 378 | |
379 | | | 379 | |
380 | | | 380 | |
381 | .data | | 381 | .data |
382 | .globl _C_LABEL(data_start) | | 382 | .globl _C_LABEL(data_start) |
383 | _C_LABEL(data_start): ! Start of data segment | | 383 | _C_LABEL(data_start): ! Start of data segment |
384 | #define DATA_START _C_LABEL(data_start) | | 384 | #define DATA_START _C_LABEL(data_start) |
385 | | | 385 | |
386 | #if 1 | | 386 | #if 1 |
387 | /* XXX this shouldn't be needed... but kernel usually hangs without it */ | | 387 | /* XXX this shouldn't be needed... but kernel usually hangs without it */ |
388 | .space USPACE | | 388 | .space USPACE |
389 | #endif | | 389 | #endif |
390 | | | 390 | |
391 | #ifdef KGDB | | 391 | #ifdef KGDB |
392 | /* | | 392 | /* |
393 | * Another item that must be aligned, easiest to put it here. | | 393 | * Another item that must be aligned, easiest to put it here. |
394 | */ | | 394 | */ |
395 | KGDB_STACK_SIZE = 2048 | | 395 | KGDB_STACK_SIZE = 2048 |
396 | .globl _C_LABEL(kgdb_stack) | | 396 | .globl _C_LABEL(kgdb_stack) |
397 | _C_LABEL(kgdb_stack): | | 397 | _C_LABEL(kgdb_stack): |
398 | .space KGDB_STACK_SIZE ! hope this is enough | | 398 | .space KGDB_STACK_SIZE ! hope this is enough |
399 | #endif | | 399 | #endif |
400 | | | 400 | |
401 | #ifdef NOTDEF_DEBUG | | 401 | #ifdef NOTDEF_DEBUG |
402 | /* | | 402 | /* |
403 | * This stack is used when we detect kernel stack corruption. | | 403 | * This stack is used when we detect kernel stack corruption. |
404 | */ | | 404 | */ |
405 | .space USPACE | | 405 | .space USPACE |
406 | .align 16 | | 406 | .align 16 |
407 | panicstack: | | 407 | panicstack: |
408 | #endif | | 408 | #endif |
409 | | | 409 | |
410 | /* | | 410 | /* |
411 | * romp is the prom entry pointer | | 411 | * romp is the prom entry pointer |
412 | * romtba is the prom trap table base address | | 412 | * romtba is the prom trap table base address |
413 | */ | | 413 | */ |
414 | .globl romp | | 414 | .globl romp |
415 | romp: POINTER 0 | | 415 | romp: POINTER 0 |
416 | .globl romtba | | 416 | .globl romtba |
417 | romtba: POINTER 0 | | 417 | romtba: POINTER 0 |
418 | | | 418 | |
419 | _ALIGN | | 419 | _ALIGN |
420 | .text | | 420 | .text |
421 | | | 421 | |
422 | /* | | 422 | /* |
423 | * The v9 trap frame is stored in the special trap registers. The | | 423 | * The v9 trap frame is stored in the special trap registers. The |
424 | * register window is only modified on window overflow, underflow, | | 424 | * register window is only modified on window overflow, underflow, |
425 | * and clean window traps, where it points to the register window | | 425 | * and clean window traps, where it points to the register window |
426 | * needing service. Traps have space for 8 instructions, except for | | 426 | * needing service. Traps have space for 8 instructions, except for |
427 | * the window overflow, underflow, and clean window traps which are | | 427 | * the window overflow, underflow, and clean window traps which are |
428 | * 32 instructions long, large enough to in-line. | | 428 | * 32 instructions long, large enough to in-line. |
429 | * | | 429 | * |
430 | * The spitfire CPU (Ultra I) has 4 different sets of global registers. | | 430 | * The spitfire CPU (Ultra I) has 4 different sets of global registers. |
431 | * (blah blah...) | | 431 | * (blah blah...) |
432 | * | | 432 | * |
433 | * I used to generate these numbers by address arithmetic, but gas's | | 433 | * I used to generate these numbers by address arithmetic, but gas's |
434 | * expression evaluator has about as much sense as your average slug | | 434 | * expression evaluator has about as much sense as your average slug |
435 | * (oddly enough, the code looks about as slimy too). Thus, all the | | 435 | * (oddly enough, the code looks about as slimy too). Thus, all the |
436 | * trap numbers are given as arguments to the trap macros. This means | | 436 | * trap numbers are given as arguments to the trap macros. This means |
437 | * there is one line per trap. Sigh. | | 437 | * there is one line per trap. Sigh. |
438 | * | | 438 | * |
439 | * Hardware interrupt vectors can be `linked'---the linkage is to regular | | 439 | * Hardware interrupt vectors can be `linked'---the linkage is to regular |
440 | * C code---or rewired to fast in-window handlers. The latter are good | | 440 | * C code---or rewired to fast in-window handlers. The latter are good |
441 | * for unbuffered hardware like the Zilog serial chip and the AMD audio | | 441 | * for unbuffered hardware like the Zilog serial chip and the AMD audio |
442 | * chip, where many interrupts can be handled trivially with pseudo-DMA | | 442 | * chip, where many interrupts can be handled trivially with pseudo-DMA |
443 | * or similar. Only one `fast' interrupt can be used per level, however, | | 443 | * or similar. Only one `fast' interrupt can be used per level, however, |
444 | * and direct and `fast' interrupts are incompatible. Routines in intr.c | | 444 | * and direct and `fast' interrupts are incompatible. Routines in intr.c |
445 | * handle setting these, with optional paranoia. | | 445 | * handle setting these, with optional paranoia. |
446 | */ | | 446 | */ |
447 | | | 447 | |
448 | /* | | 448 | /* |
449 | * TA8 -- trap align for 8 instruction traps | | 449 | * TA8 -- trap align for 8 instruction traps |
450 | * TA32 -- trap align for 32 instruction traps | | 450 | * TA32 -- trap align for 32 instruction traps |
451 | */ | | 451 | */ |
452 | #define TA8 .align 32 | | 452 | #define TA8 .align 32 |
453 | #define TA32 .align 128 | | 453 | #define TA32 .align 128 |
454 | | | 454 | |
455 | /* | | 455 | /* |
456 | * v9 trap macros: | | 456 | * v9 trap macros: |
457 | * | | 457 | * |
458 | * We have a problem with v9 traps; we have no registers to put the | | 458 | * We have a problem with v9 traps; we have no registers to put the |
459 | * trap type into. But we do have a %tt register which already has | | 459 | * trap type into. But we do have a %tt register which already has |
460 | * that information. Trap types in these macros are all dummys. | | 460 | * that information. Trap types in these macros are all dummys. |
461 | */ | | 461 | */ |
462 | /* regular vectored traps */ | | 462 | /* regular vectored traps */ |
463 | | | 463 | |
464 | #if KTR_COMPILE & KTR_TRAP | | 464 | #if KTR_COMPILE & KTR_TRAP |
465 | #if 0 | | 465 | #if 0 |
466 | #define TRACEWIN wrpr %g0, PSTATE_KERN|PSTATE_IG, %pstate;\ | | 466 | #define TRACEWIN wrpr %g0, PSTATE_KERN|PSTATE_IG, %pstate;\ |
467 | sethi %hi(9f), %g1; ba,pt %icc,ktr_trap_gen; or %g1, %lo(9f), %g1; 9: | | 467 | sethi %hi(9f), %g1; ba,pt %icc,ktr_trap_gen; or %g1, %lo(9f), %g1; 9: |
468 | #else | | 468 | #else |
469 | #define TRACEWIN | | 469 | #define TRACEWIN |
470 | #endif | | 470 | #endif |
471 | #define TRACEFLT sethi %hi(1f), %g1; ba,pt %icc,ktr_trap_gen;\ | | 471 | #define TRACEFLT sethi %hi(1f), %g1; ba,pt %icc,ktr_trap_gen;\ |
472 | or %g1, %lo(1f), %g1; 1: | | 472 | or %g1, %lo(1f), %g1; 1: |
473 | #define VTRAP(type, label) \ | | 473 | #define VTRAP(type, label) \ |
474 | sethi %hi(label), %g1; ba,pt %icc,ktr_trap_gen;\ | | 474 | sethi %hi(label), %g1; ba,pt %icc,ktr_trap_gen;\ |
475 | or %g1, %lo(label), %g1; NOTREACHED; TA8 | | 475 | or %g1, %lo(label), %g1; NOTREACHED; TA8 |
476 | #else | | 476 | #else |
477 | #define TRACEWIN | | 477 | #define TRACEWIN |
478 | #define TRACEFLT | | 478 | #define TRACEFLT |
479 | #define VTRAP(type, label) \ | | 479 | #define VTRAP(type, label) \ |
480 | ba,a,pt %icc,label; nop; NOTREACHED; TA8 | | 480 | ba,a,pt %icc,label; nop; NOTREACHED; TA8 |
481 | #endif | | 481 | #endif |
482 | | | 482 | |
483 | /* hardware interrupts (can be linked or made `fast') */ | | 483 | /* hardware interrupts (can be linked or made `fast') */ |
484 | #define HARDINT4U(lev) \ | | 484 | #define HARDINT4U(lev) \ |
485 | VTRAP(lev, _C_LABEL(sparc_interrupt)) | | 485 | VTRAP(lev, _C_LABEL(sparc_interrupt)) |
486 | | | 486 | |
487 | /* software interrupts (may not be made direct, sorry---but you | | 487 | /* software interrupts (may not be made direct, sorry---but you |
488 | should not be using them trivially anyway) */ | | 488 | should not be using them trivially anyway) */ |
489 | #define SOFTINT4U(lev, bit) \ | | 489 | #define SOFTINT4U(lev, bit) \ |
490 | HARDINT4U(lev) | | 490 | HARDINT4U(lev) |
491 | | | 491 | |
492 | /* traps that just call trap() */ | | 492 | /* traps that just call trap() */ |
493 | #define TRAP(type) VTRAP(type, slowtrap) | | 493 | #define TRAP(type) VTRAP(type, slowtrap) |
494 | | | 494 | |
495 | /* architecturally undefined traps (cause panic) */ | | 495 | /* architecturally undefined traps (cause panic) */ |
496 | #ifndef DEBUG | | 496 | #ifndef DEBUG |
497 | #define UTRAP(type) sir; VTRAP(type, slowtrap) | | 497 | #define UTRAP(type) sir; VTRAP(type, slowtrap) |
498 | #else | | 498 | #else |
499 | #define UTRAP(type) VTRAP(type, slowtrap) | | 499 | #define UTRAP(type) VTRAP(type, slowtrap) |
500 | #endif | | 500 | #endif |
501 | | | 501 | |
502 | /* software undefined traps (may be replaced) */ | | 502 | /* software undefined traps (may be replaced) */ |
503 | #define STRAP(type) VTRAP(type, slowtrap) | | 503 | #define STRAP(type) VTRAP(type, slowtrap) |
504 | | | 504 | |
505 | /* breakpoint acts differently under kgdb */ | | 505 | /* breakpoint acts differently under kgdb */ |
506 | #ifdef KGDB | | 506 | #ifdef KGDB |
507 | #define BPT VTRAP(T_BREAKPOINT, bpt) | | 507 | #define BPT VTRAP(T_BREAKPOINT, bpt) |
508 | #define BPT_KGDB_EXEC VTRAP(T_KGDB_EXEC, bpt) | | 508 | #define BPT_KGDB_EXEC VTRAP(T_KGDB_EXEC, bpt) |
509 | #else | | 509 | #else |
510 | #define BPT TRAP(T_BREAKPOINT) | | 510 | #define BPT TRAP(T_BREAKPOINT) |
511 | #define BPT_KGDB_EXEC TRAP(T_KGDB_EXEC) | | 511 | #define BPT_KGDB_EXEC TRAP(T_KGDB_EXEC) |
512 | #endif | | 512 | #endif |
513 | | | 513 | |
514 | #define SYSCALL VTRAP(0x100, syscall_setup) | | 514 | #define SYSCALL VTRAP(0x100, syscall_setup) |
515 | #ifdef notyet | | 515 | #ifdef notyet |
516 | #define ZS_INTERRUPT ba,a,pt %icc, zshard; nop; TA8 | | 516 | #define ZS_INTERRUPT ba,a,pt %icc, zshard; nop; TA8 |
517 | #else | | 517 | #else |
518 | #define ZS_INTERRUPT4U HARDINT4U(12) | | 518 | #define ZS_INTERRUPT4U HARDINT4U(12) |
519 | #endif | | 519 | #endif |
520 | | | 520 | |
521 | | | 521 | |
522 | /* | | 522 | /* |
523 | * Macro to clear %tt so we don't get confused with old traps. | | 523 | * Macro to clear %tt so we don't get confused with old traps. |
524 | */ | | 524 | */ |
525 | #ifdef DEBUG | | 525 | #ifdef DEBUG |
526 | #define CLRTT wrpr %g0,0x1ff,%tt | | 526 | #define CLRTT wrpr %g0,0x1ff,%tt |
527 | #else | | 527 | #else |
528 | #define CLRTT | | 528 | #define CLRTT |
529 | #endif | | 529 | #endif |
530 | | | 530 | |
531 | /* | | 531 | /* |
532 | * Here are some oft repeated traps as macros. | | 532 | * Here are some oft repeated traps as macros. |
533 | */ | | 533 | */ |
534 | | | 534 | |
535 | /* spill a 64-bit register window */ | | 535 | /* spill a 64-bit register window */ |
536 | #define SPILL64(label,as) \ | | 536 | #define SPILL64(label,as) \ |
537 | TRACEWIN; \ | | 537 | TRACEWIN; \ |
538 | label: \ | | 538 | label: \ |
539 | wr %g0, as, %asi; \ | | 539 | wr %g0, as, %asi; \ |
540 | stxa %l0, [%sp+BIAS+0x00]%asi; \ | | 540 | stxa %l0, [%sp+BIAS+0x00]%asi; \ |
541 | stxa %l1, [%sp+BIAS+0x08]%asi; \ | | 541 | stxa %l1, [%sp+BIAS+0x08]%asi; \ |
542 | stxa %l2, [%sp+BIAS+0x10]%asi; \ | | 542 | stxa %l2, [%sp+BIAS+0x10]%asi; \ |
543 | stxa %l3, [%sp+BIAS+0x18]%asi; \ | | 543 | stxa %l3, [%sp+BIAS+0x18]%asi; \ |
544 | stxa %l4, [%sp+BIAS+0x20]%asi; \ | | 544 | stxa %l4, [%sp+BIAS+0x20]%asi; \ |
545 | stxa %l5, [%sp+BIAS+0x28]%asi; \ | | 545 | stxa %l5, [%sp+BIAS+0x28]%asi; \ |
546 | stxa %l6, [%sp+BIAS+0x30]%asi; \ | | 546 | stxa %l6, [%sp+BIAS+0x30]%asi; \ |
547 | \ | | 547 | \ |
548 | stxa %l7, [%sp+BIAS+0x38]%asi; \ | | 548 | stxa %l7, [%sp+BIAS+0x38]%asi; \ |
549 | stxa %i0, [%sp+BIAS+0x40]%asi; \ | | 549 | stxa %i0, [%sp+BIAS+0x40]%asi; \ |
550 | stxa %i1, [%sp+BIAS+0x48]%asi; \ | | 550 | stxa %i1, [%sp+BIAS+0x48]%asi; \ |
551 | stxa %i2, [%sp+BIAS+0x50]%asi; \ | | 551 | stxa %i2, [%sp+BIAS+0x50]%asi; \ |
552 | stxa %i3, [%sp+BIAS+0x58]%asi; \ | | 552 | stxa %i3, [%sp+BIAS+0x58]%asi; \ |
553 | stxa %i4, [%sp+BIAS+0x60]%asi; \ | | 553 | stxa %i4, [%sp+BIAS+0x60]%asi; \ |
554 | stxa %i5, [%sp+BIAS+0x68]%asi; \ | | 554 | stxa %i5, [%sp+BIAS+0x68]%asi; \ |
555 | stxa %i6, [%sp+BIAS+0x70]%asi; \ | | 555 | stxa %i6, [%sp+BIAS+0x70]%asi; \ |
556 | \ | | 556 | \ |
557 | stxa %i7, [%sp+BIAS+0x78]%asi; \ | | 557 | stxa %i7, [%sp+BIAS+0x78]%asi; \ |
558 | saved; \ | | 558 | saved; \ |
559 | CLRTT; \ | | 559 | CLRTT; \ |
560 | retry; \ | | 560 | retry; \ |
561 | NOTREACHED; \ | | 561 | NOTREACHED; \ |
562 | TA32 | | 562 | TA32 |
563 | | | 563 | |
564 | /* spill a 32-bit register window */ | | 564 | /* spill a 32-bit register window */ |
565 | #define SPILL32(label,as) \ | | 565 | #define SPILL32(label,as) \ |
566 | TRACEWIN; \ | | 566 | TRACEWIN; \ |
567 | label: \ | | 567 | label: \ |
568 | wr %g0, as, %asi; \ | | 568 | wr %g0, as, %asi; \ |
569 | srl %sp, 0, %sp; /* fixup 32-bit pointers */ \ | | 569 | srl %sp, 0, %sp; /* fixup 32-bit pointers */ \ |
570 | stwa %l0, [%sp+0x00]%asi; \ | | 570 | stwa %l0, [%sp+0x00]%asi; \ |
571 | stwa %l1, [%sp+0x04]%asi; \ | | 571 | stwa %l1, [%sp+0x04]%asi; \ |
572 | stwa %l2, [%sp+0x08]%asi; \ | | 572 | stwa %l2, [%sp+0x08]%asi; \ |
573 | stwa %l3, [%sp+0x0c]%asi; \ | | 573 | stwa %l3, [%sp+0x0c]%asi; \ |
574 | stwa %l4, [%sp+0x10]%asi; \ | | 574 | stwa %l4, [%sp+0x10]%asi; \ |
575 | stwa %l5, [%sp+0x14]%asi; \ | | 575 | stwa %l5, [%sp+0x14]%asi; \ |
576 | \ | | 576 | \ |
577 | stwa %l6, [%sp+0x18]%asi; \ | | 577 | stwa %l6, [%sp+0x18]%asi; \ |
578 | stwa %l7, [%sp+0x1c]%asi; \ | | 578 | stwa %l7, [%sp+0x1c]%asi; \ |
579 | stwa %i0, [%sp+0x20]%asi; \ | | 579 | stwa %i0, [%sp+0x20]%asi; \ |
580 | stwa %i1, [%sp+0x24]%asi; \ | | 580 | stwa %i1, [%sp+0x24]%asi; \ |
581 | stwa %i2, [%sp+0x28]%asi; \ | | 581 | stwa %i2, [%sp+0x28]%asi; \ |
582 | stwa %i3, [%sp+0x2c]%asi; \ | | 582 | stwa %i3, [%sp+0x2c]%asi; \ |
583 | stwa %i4, [%sp+0x30]%asi; \ | | 583 | stwa %i4, [%sp+0x30]%asi; \ |
584 | stwa %i5, [%sp+0x34]%asi; \ | | 584 | stwa %i5, [%sp+0x34]%asi; \ |
585 | \ | | 585 | \ |
586 | stwa %i6, [%sp+0x38]%asi; \ | | 586 | stwa %i6, [%sp+0x38]%asi; \ |
587 | stwa %i7, [%sp+0x3c]%asi; \ | | 587 | stwa %i7, [%sp+0x3c]%asi; \ |
588 | saved; \ | | 588 | saved; \ |
589 | CLRTT; \ | | 589 | CLRTT; \ |
590 | retry; \ | | 590 | retry; \ |
591 | NOTREACHED; \ | | 591 | NOTREACHED; \ |
592 | TA32 | | 592 | TA32 |
593 | | | 593 | |
594 | /* Spill either 32-bit or 64-bit register window. */ | | 594 | /* Spill either 32-bit or 64-bit register window. */ |
595 | #define SPILLBOTH(label64,label32,as) \ | | 595 | #define SPILLBOTH(label64,label32,as) \ |
596 | TRACEWIN; \ | | 596 | TRACEWIN; \ |
597 | andcc %sp, 1, %g0; \ | | 597 | andcc %sp, 1, %g0; \ |
598 | bnz,pt %xcc, label64+4; /* Is it a v9 or v8 stack? */ \ | | 598 | bnz,pt %xcc, label64+4; /* Is it a v9 or v8 stack? */ \ |
599 | wr %g0, as, %asi; \ | | 599 | wr %g0, as, %asi; \ |
600 | ba,pt %xcc, label32+8; \ | | 600 | ba,pt %xcc, label32+8; \ |
601 | srl %sp, 0, %sp; /* fixup 32-bit pointers */ \ | | 601 | srl %sp, 0, %sp; /* fixup 32-bit pointers */ \ |
602 | NOTREACHED; \ | | 602 | NOTREACHED; \ |
603 | TA32 | | 603 | TA32 |
604 | | | 604 | |
605 | /* fill a 64-bit register window */ | | 605 | /* fill a 64-bit register window */ |
606 | #define FILL64(label,as) \ | | 606 | #define FILL64(label,as) \ |
607 | TRACEWIN; \ | | 607 | TRACEWIN; \ |
608 | label: \ | | 608 | label: \ |
609 | wr %g0, as, %asi; \ | | 609 | wr %g0, as, %asi; \ |
610 | ldxa [%sp+BIAS+0x00]%asi, %l0; \ | | 610 | ldxa [%sp+BIAS+0x00]%asi, %l0; \ |
611 | ldxa [%sp+BIAS+0x08]%asi, %l1; \ | | 611 | ldxa [%sp+BIAS+0x08]%asi, %l1; \ |
612 | ldxa [%sp+BIAS+0x10]%asi, %l2; \ | | 612 | ldxa [%sp+BIAS+0x10]%asi, %l2; \ |
613 | ldxa [%sp+BIAS+0x18]%asi, %l3; \ | | 613 | ldxa [%sp+BIAS+0x18]%asi, %l3; \ |
614 | ldxa [%sp+BIAS+0x20]%asi, %l4; \ | | 614 | ldxa [%sp+BIAS+0x20]%asi, %l4; \ |
615 | ldxa [%sp+BIAS+0x28]%asi, %l5; \ | | 615 | ldxa [%sp+BIAS+0x28]%asi, %l5; \ |
616 | ldxa [%sp+BIAS+0x30]%asi, %l6; \ | | 616 | ldxa [%sp+BIAS+0x30]%asi, %l6; \ |
617 | \ | | 617 | \ |
618 | ldxa [%sp+BIAS+0x38]%asi, %l7; \ | | 618 | ldxa [%sp+BIAS+0x38]%asi, %l7; \ |
619 | ldxa [%sp+BIAS+0x40]%asi, %i0; \ | | 619 | ldxa [%sp+BIAS+0x40]%asi, %i0; \ |
620 | ldxa [%sp+BIAS+0x48]%asi, %i1; \ | | 620 | ldxa [%sp+BIAS+0x48]%asi, %i1; \ |
621 | ldxa [%sp+BIAS+0x50]%asi, %i2; \ | | 621 | ldxa [%sp+BIAS+0x50]%asi, %i2; \ |
622 | ldxa [%sp+BIAS+0x58]%asi, %i3; \ | | 622 | ldxa [%sp+BIAS+0x58]%asi, %i3; \ |
623 | ldxa [%sp+BIAS+0x60]%asi, %i4; \ | | 623 | ldxa [%sp+BIAS+0x60]%asi, %i4; \ |
624 | ldxa [%sp+BIAS+0x68]%asi, %i5; \ | | 624 | ldxa [%sp+BIAS+0x68]%asi, %i5; \ |
625 | ldxa [%sp+BIAS+0x70]%asi, %i6; \ | | 625 | ldxa [%sp+BIAS+0x70]%asi, %i6; \ |
626 | \ | | 626 | \ |
627 | ldxa [%sp+BIAS+0x78]%asi, %i7; \ | | 627 | ldxa [%sp+BIAS+0x78]%asi, %i7; \ |
628 | restored; \ | | 628 | restored; \ |
629 | CLRTT; \ | | 629 | CLRTT; \ |
630 | retry; \ | | 630 | retry; \ |
631 | NOTREACHED; \ | | 631 | NOTREACHED; \ |
632 | TA32 | | 632 | TA32 |
633 | | | 633 | |
634 | /* fill a 32-bit register window */ | | 634 | /* fill a 32-bit register window */ |
635 | #define FILL32(label,as) \ | | 635 | #define FILL32(label,as) \ |
636 | TRACEWIN; \ | | 636 | TRACEWIN; \ |
637 | label: \ | | 637 | label: \ |
638 | wr %g0, as, %asi; \ | | 638 | wr %g0, as, %asi; \ |
639 | srl %sp, 0, %sp; /* fixup 32-bit pointers */ \ | | 639 | srl %sp, 0, %sp; /* fixup 32-bit pointers */ \ |
640 | lda [%sp+0x00]%asi, %l0; \ | | 640 | lda [%sp+0x00]%asi, %l0; \ |
641 | lda [%sp+0x04]%asi, %l1; \ | | 641 | lda [%sp+0x04]%asi, %l1; \ |
642 | lda [%sp+0x08]%asi, %l2; \ | | 642 | lda [%sp+0x08]%asi, %l2; \ |
643 | lda [%sp+0x0c]%asi, %l3; \ | | 643 | lda [%sp+0x0c]%asi, %l3; \ |
644 | lda [%sp+0x10]%asi, %l4; \ | | 644 | lda [%sp+0x10]%asi, %l4; \ |
645 | lda [%sp+0x14]%asi, %l5; \ | | 645 | lda [%sp+0x14]%asi, %l5; \ |
646 | \ | | 646 | \ |
647 | lda [%sp+0x18]%asi, %l6; \ | | 647 | lda [%sp+0x18]%asi, %l6; \ |
648 | lda [%sp+0x1c]%asi, %l7; \ | | 648 | lda [%sp+0x1c]%asi, %l7; \ |
649 | lda [%sp+0x20]%asi, %i0; \ | | 649 | lda [%sp+0x20]%asi, %i0; \ |
650 | lda [%sp+0x24]%asi, %i1; \ | | 650 | lda [%sp+0x24]%asi, %i1; \ |
651 | lda [%sp+0x28]%asi, %i2; \ | | 651 | lda [%sp+0x28]%asi, %i2; \ |
652 | lda [%sp+0x2c]%asi, %i3; \ | | 652 | lda [%sp+0x2c]%asi, %i3; \ |
653 | lda [%sp+0x30]%asi, %i4; \ | | 653 | lda [%sp+0x30]%asi, %i4; \ |
654 | lda [%sp+0x34]%asi, %i5; \ | | 654 | lda [%sp+0x34]%asi, %i5; \ |
655 | \ | | 655 | \ |
656 | lda [%sp+0x38]%asi, %i6; \ | | 656 | lda [%sp+0x38]%asi, %i6; \ |
657 | lda [%sp+0x3c]%asi, %i7; \ | | 657 | lda [%sp+0x3c]%asi, %i7; \ |
658 | restored; \ | | 658 | restored; \ |
659 | CLRTT; \ | | 659 | CLRTT; \ |
660 | retry; \ | | 660 | retry; \ |
661 | NOTREACHED; \ | | 661 | NOTREACHED; \ |
662 | TA32 | | 662 | TA32 |
663 | | | 663 | |
664 | /* fill either 32-bit or 64-bit register window. */ | | 664 | /* fill either 32-bit or 64-bit register window. */ |
665 | #define FILLBOTH(label64,label32,as) \ | | 665 | #define FILLBOTH(label64,label32,as) \ |
666 | TRACEWIN; \ | | 666 | TRACEWIN; \ |
667 | andcc %sp, 1, %i0; \ | | 667 | andcc %sp, 1, %i0; \ |
668 | bnz (label64)+4; /* See if it's a v9 stack or v8 */ \ | | 668 | bnz (label64)+4; /* See if it's a v9 stack or v8 */ \ |
669 | wr %g0, as, %asi; \ | | 669 | wr %g0, as, %asi; \ |
670 | ba (label32)+8; \ | | 670 | ba (label32)+8; \ |
671 | srl %sp, 0, %sp; /* fixup 32-bit pointers */ \ | | 671 | srl %sp, 0, %sp; /* fixup 32-bit pointers */ \ |
672 | NOTREACHED; \ | | 672 | NOTREACHED; \ |
673 | TA32 | | 673 | TA32 |
674 | | | 674 | |
675 | .globl start, _C_LABEL(kernel_text) | | 675 | .globl start, _C_LABEL(kernel_text) |
676 | _C_LABEL(kernel_text) = kernel_start ! for kvm_mkdb(8) | | 676 | _C_LABEL(kernel_text) = kernel_start ! for kvm_mkdb(8) |
677 | kernel_start: | | 677 | kernel_start: |
678 | /* Traps from TL=0 -- traps from user mode */ | | 678 | /* Traps from TL=0 -- traps from user mode */ |
679 | #ifdef __STDC__ | | 679 | #ifdef __STDC__ |
680 | #define TABLE(name) user_ ## name | | 680 | #define TABLE(name) user_ ## name |
681 | #else | | 681 | #else |
682 | #define TABLE(name) user_/**/name | | 682 | #define TABLE(name) user_/**/name |
683 | #endif | | 683 | #endif |
684 | .globl _C_LABEL(trapbase) | | 684 | .globl _C_LABEL(trapbase) |
685 | _C_LABEL(trapbase): | | 685 | _C_LABEL(trapbase): |
686 | b dostart; nop; TA8 ! 000 = reserved -- Use it to boot | | 686 | b dostart; nop; TA8 ! 000 = reserved -- Use it to boot |
687 | /* We should not get the next 5 traps */ | | 687 | /* We should not get the next 5 traps */ |
688 | UTRAP(0x001) ! 001 = POR Reset -- ROM should get this | | 688 | UTRAP(0x001) ! 001 = POR Reset -- ROM should get this |
689 | UTRAP(0x002) ! 002 = WDR -- ROM should get this | | 689 | UTRAP(0x002) ! 002 = WDR -- ROM should get this |
690 | UTRAP(0x003) ! 003 = XIR -- ROM should get this | | 690 | UTRAP(0x003) ! 003 = XIR -- ROM should get this |
691 | UTRAP(0x004) ! 004 = SIR -- ROM should get this | | 691 | UTRAP(0x004) ! 004 = SIR -- ROM should get this |
692 | UTRAP(0x005) ! 005 = RED state exception | | 692 | UTRAP(0x005) ! 005 = RED state exception |
693 | UTRAP(0x006); UTRAP(0x007) | | 693 | UTRAP(0x006); UTRAP(0x007) |
694 | VTRAP(T_INST_EXCEPT, textfault) ! 008 = instr. access exept | | 694 | VTRAP(T_INST_EXCEPT, textfault) ! 008 = instr. access exept |
695 | VTRAP(T_TEXTFAULT, textfault) ! 009 = instr access MMU miss | | 695 | VTRAP(T_TEXTFAULT, textfault) ! 009 = instr access MMU miss |
696 | VTRAP(T_INST_ERROR, textfault) ! 00a = instr. access err | | 696 | VTRAP(T_INST_ERROR, textfault) ! 00a = instr. access err |
697 | UTRAP(0x00b); UTRAP(0x00c); UTRAP(0x00d); UTRAP(0x00e); UTRAP(0x00f) | | 697 | UTRAP(0x00b); UTRAP(0x00c); UTRAP(0x00d); UTRAP(0x00e); UTRAP(0x00f) |
698 | TRAP(T_ILLINST) ! 010 = illegal instruction | | 698 | TRAP(T_ILLINST) ! 010 = illegal instruction |
699 | TRAP(T_PRIVINST) ! 011 = privileged instruction | | 699 | TRAP(T_PRIVINST) ! 011 = privileged instruction |
700 | UTRAP(0x012) ! 012 = unimplemented LDD | | 700 | UTRAP(0x012) ! 012 = unimplemented LDD |
701 | UTRAP(0x013) ! 013 = unimplemented STD | | 701 | UTRAP(0x013) ! 013 = unimplemented STD |
702 | UTRAP(0x014); UTRAP(0x015); UTRAP(0x016); UTRAP(0x017); UTRAP(0x018) | | 702 | UTRAP(0x014); UTRAP(0x015); UTRAP(0x016); UTRAP(0x017); UTRAP(0x018) |
703 | UTRAP(0x019); UTRAP(0x01a); UTRAP(0x01b); UTRAP(0x01c); UTRAP(0x01d) | | 703 | UTRAP(0x019); UTRAP(0x01a); UTRAP(0x01b); UTRAP(0x01c); UTRAP(0x01d) |
704 | UTRAP(0x01e); UTRAP(0x01f) | | 704 | UTRAP(0x01e); UTRAP(0x01f) |
705 | TRAP(T_FPDISABLED) ! 020 = fp instr, but EF bit off in psr | | 705 | TRAP(T_FPDISABLED) ! 020 = fp instr, but EF bit off in psr |
706 | TRAP(T_FP_IEEE_754) ! 021 = ieee 754 exception | | 706 | TRAP(T_FP_IEEE_754) ! 021 = ieee 754 exception |
707 | TRAP(T_FP_OTHER) ! 022 = other fp exception | | 707 | TRAP(T_FP_OTHER) ! 022 = other fp exception |
708 | TRAP(T_TAGOF) ! 023 = tag overflow | | 708 | TRAP(T_TAGOF) ! 023 = tag overflow |
709 | TRACEWIN ! DEBUG -- 4 insns | | 709 | TRACEWIN ! DEBUG -- 4 insns |
710 | rdpr %cleanwin, %o7 ! 024-027 = clean window trap | | 710 | rdpr %cleanwin, %o7 ! 024-027 = clean window trap |
711 | inc %o7 ! This handler is in-lined and cannot fault | | 711 | inc %o7 ! This handler is in-lined and cannot fault |
712 | #ifdef DEBUG | | 712 | #ifdef DEBUG |
713 | set 0xbadcafe, %l0 ! DEBUG -- compiler should not rely on zero-ed registers. | | 713 | set 0xbadcafe, %l0 ! DEBUG -- compiler should not rely on zero-ed registers. |
714 | #else | | 714 | #else |
715 | clr %l0 | | 715 | clr %l0 |
716 | #endif | | 716 | #endif |
717 | wrpr %g0, %o7, %cleanwin ! Nucleus (trap&IRQ) code does not need clean windows | | 717 | wrpr %g0, %o7, %cleanwin ! Nucleus (trap&IRQ) code does not need clean windows |
718 | | | 718 | |
719 | mov %l0,%l1; mov %l0,%l2 ! Clear out %l0-%l8 and %o0-%o8 and inc %cleanwin and done | | 719 | mov %l0,%l1; mov %l0,%l2 ! Clear out %l0-%l8 and %o0-%o8 and inc %cleanwin and done |
720 | mov %l0,%l3; mov %l0,%l4 | | 720 | mov %l0,%l3; mov %l0,%l4 |
721 | #if 0 | | 721 | #if 0 |
722 | #ifdef DIAGNOSTIC | | 722 | #ifdef DIAGNOSTIC |
723 | !! | | 723 | !! |
724 | !! Check the sp redzone | | 724 | !! Check the sp redzone |
725 | !! | | 725 | !! |
726 | !! Since we can't spill the current window, we'll just keep | | 726 | !! Since we can't spill the current window, we'll just keep |
727 | !! track of the frame pointer. Problems occur when the routine | | 727 | !! track of the frame pointer. Problems occur when the routine |
728 | !! allocates and uses stack storage. | | 728 | !! allocates and uses stack storage. |
729 | !! | | 729 | !! |
730 | ! rdpr %wstate, %l5 ! User stack? | | 730 | ! rdpr %wstate, %l5 ! User stack? |
731 | ! cmp %l5, WSTATE_KERN | | 731 | ! cmp %l5, WSTATE_KERN |
732 | ! bne,pt %icc, 7f | | 732 | ! bne,pt %icc, 7f |
733 | sethi %hi(CPCB), %l5 | | 733 | sethi %hi(CPCB), %l5 |
734 | LDPTR [%l5 + %lo(CPCB)], %l5 ! If pcb < fp < pcb+sizeof(pcb) | | 734 | LDPTR [%l5 + %lo(CPCB)], %l5 ! If pcb < fp < pcb+sizeof(pcb) |
735 | inc PCB_SIZE, %l5 ! then we have a stack overflow | | 735 | inc PCB_SIZE, %l5 ! then we have a stack overflow |
736 | btst %fp, 1 ! 64-bit stack? | | 736 | btst %fp, 1 ! 64-bit stack? |
737 | sub %fp, %l5, %l7 | | 737 | sub %fp, %l5, %l7 |
738 | bnz,a,pt %icc, 1f | | 738 | bnz,a,pt %icc, 1f |
739 | inc BIAS, %l7 ! Remove BIAS | | 739 | inc BIAS, %l7 ! Remove BIAS |
740 | 1: | | 740 | 1: |
741 | cmp %l7, PCB_SIZE | | 741 | cmp %l7, PCB_SIZE |
742 | blu %xcc, cleanwin_overflow | | 742 | blu %xcc, cleanwin_overflow |
743 | #endif | | 743 | #endif |
744 | #endif | | 744 | #endif |
745 | mov %l0, %l5 | | 745 | mov %l0, %l5 |
746 | mov %l0, %l6; mov %l0, %l7; mov %l0, %o0; mov %l0, %o1 | | 746 | mov %l0, %l6; mov %l0, %l7; mov %l0, %o0; mov %l0, %o1 |
747 | | | 747 | |
748 | mov %l0, %o2; mov %l0, %o3; mov %l0, %o4; mov %l0, %o5; | | 748 | mov %l0, %o2; mov %l0, %o3; mov %l0, %o4; mov %l0, %o5; |
749 | mov %l0, %o6; mov %l0, %o7 | | 749 | mov %l0, %o6; mov %l0, %o7 |
750 | CLRTT | | 750 | CLRTT |
751 | retry; nop; NOTREACHED; TA32 | | 751 | retry; nop; NOTREACHED; TA32 |
752 | TRAP(T_DIV0) ! 028 = divide by zero | | 752 | TRAP(T_DIV0) ! 028 = divide by zero |
753 | UTRAP(0x029) ! 029 = internal processor error | | 753 | UTRAP(0x029) ! 029 = internal processor error |
754 | UTRAP(0x02a); UTRAP(0x02b); UTRAP(0x02c); UTRAP(0x02d); UTRAP(0x02e); UTRAP(0x02f) | | 754 | UTRAP(0x02a); UTRAP(0x02b); UTRAP(0x02c); UTRAP(0x02d); UTRAP(0x02e); UTRAP(0x02f) |
755 | VTRAP(T_DATAFAULT, winfault) ! 030 = data fetch fault | | 755 | VTRAP(T_DATAFAULT, winfault) ! 030 = data fetch fault |
756 | UTRAP(0x031) ! 031 = data MMU miss -- no MMU | | 756 | UTRAP(0x031) ! 031 = data MMU miss -- no MMU |
757 | VTRAP(T_DATA_ERROR, winfault) ! 032 = data access error | | 757 | VTRAP(T_DATA_ERROR, winfault) ! 032 = data access error |
758 | VTRAP(T_DATA_PROT, winfault) ! 033 = data protection fault | | 758 | VTRAP(T_DATA_PROT, winfault) ! 033 = data protection fault |
759 | TRAP(T_ALIGN) ! 034 = address alignment error -- we could fix it inline... | | 759 | TRAP(T_ALIGN) ! 034 = address alignment error -- we could fix it inline... |
760 | TRAP(T_LDDF_ALIGN) ! 035 = LDDF address alignment error -- we could fix it inline... | | 760 | TRAP(T_LDDF_ALIGN) ! 035 = LDDF address alignment error -- we could fix it inline... |
761 | TRAP(T_STDF_ALIGN) ! 036 = STDF address alignment error -- we could fix it inline... | | 761 | TRAP(T_STDF_ALIGN) ! 036 = STDF address alignment error -- we could fix it inline... |
762 | TRAP(T_PRIVACT) ! 037 = privileged action | | 762 | TRAP(T_PRIVACT) ! 037 = privileged action |
763 | UTRAP(0x038); UTRAP(0x039); UTRAP(0x03a); UTRAP(0x03b); UTRAP(0x03c); | | 763 | UTRAP(0x038); UTRAP(0x039); UTRAP(0x03a); UTRAP(0x03b); UTRAP(0x03c); |
764 | UTRAP(0x03d); UTRAP(0x03e); UTRAP(0x03f); | | 764 | UTRAP(0x03d); UTRAP(0x03e); UTRAP(0x03f); |
765 | VTRAP(T_ASYNC_ERROR, winfault) ! 040 = data fetch fault | | 765 | VTRAP(T_ASYNC_ERROR, winfault) ! 040 = data fetch fault |
766 | SOFTINT4U(1, IE_L1) ! 041 = level 1 interrupt | | 766 | SOFTINT4U(1, IE_L1) ! 041 = level 1 interrupt |
767 | HARDINT4U(2) ! 042 = level 2 interrupt | | 767 | HARDINT4U(2) ! 042 = level 2 interrupt |
768 | HARDINT4U(3) ! 043 = level 3 interrupt | | 768 | HARDINT4U(3) ! 043 = level 3 interrupt |
769 | SOFTINT4U(4, IE_L4) ! 044 = level 4 interrupt | | 769 | SOFTINT4U(4, IE_L4) ! 044 = level 4 interrupt |
770 | HARDINT4U(5) ! 045 = level 5 interrupt | | 770 | HARDINT4U(5) ! 045 = level 5 interrupt |
771 | SOFTINT4U(6, IE_L6) ! 046 = level 6 interrupt | | 771 | SOFTINT4U(6, IE_L6) ! 046 = level 6 interrupt |
772 | HARDINT4U(7) ! 047 = level 7 interrupt | | 772 | HARDINT4U(7) ! 047 = level 7 interrupt |
773 | HARDINT4U(8) ! 048 = level 8 interrupt | | 773 | HARDINT4U(8) ! 048 = level 8 interrupt |
774 | HARDINT4U(9) ! 049 = level 9 interrupt | | 774 | HARDINT4U(9) ! 049 = level 9 interrupt |
775 | HARDINT4U(10) ! 04a = level 10 interrupt | | 775 | HARDINT4U(10) ! 04a = level 10 interrupt |
776 | HARDINT4U(11) ! 04b = level 11 interrupt | | 776 | HARDINT4U(11) ! 04b = level 11 interrupt |
777 | ZS_INTERRUPT4U ! 04c = level 12 (zs) interrupt | | 777 | ZS_INTERRUPT4U ! 04c = level 12 (zs) interrupt |
778 | HARDINT4U(13) ! 04d = level 13 interrupt | | 778 | HARDINT4U(13) ! 04d = level 13 interrupt |
779 | HARDINT4U(14) ! 04e = level 14 interrupt | | 779 | HARDINT4U(14) ! 04e = level 14 interrupt |
780 | HARDINT4U(15) ! 04f = nonmaskable interrupt | | 780 | HARDINT4U(15) ! 04f = nonmaskable interrupt |
781 | UTRAP(0x050); UTRAP(0x051); UTRAP(0x052); UTRAP(0x053); UTRAP(0x054); UTRAP(0x055) | | 781 | UTRAP(0x050); UTRAP(0x051); UTRAP(0x052); UTRAP(0x053); UTRAP(0x054); UTRAP(0x055) |
782 | UTRAP(0x056); UTRAP(0x057); UTRAP(0x058); UTRAP(0x059); UTRAP(0x05a); UTRAP(0x05b) | | 782 | UTRAP(0x056); UTRAP(0x057); UTRAP(0x058); UTRAP(0x059); UTRAP(0x05a); UTRAP(0x05b) |
783 | UTRAP(0x05c); UTRAP(0x05d); UTRAP(0x05e); UTRAP(0x05f) | | 783 | UTRAP(0x05c); UTRAP(0x05d); UTRAP(0x05e); UTRAP(0x05f) |
784 | VTRAP(0x060, interrupt_vector); ! 060 = interrupt vector | | 784 | VTRAP(0x060, interrupt_vector); ! 060 = interrupt vector |
785 | TRAP(T_PA_WATCHPT) ! 061 = physical address data watchpoint | | 785 | TRAP(T_PA_WATCHPT) ! 061 = physical address data watchpoint |
786 | TRAP(T_VA_WATCHPT) ! 062 = virtual address data watchpoint | | 786 | TRAP(T_VA_WATCHPT) ! 062 = virtual address data watchpoint |
787 | UTRAP(T_ECCERR) ! We'll implement this one later | | 787 | UTRAP(T_ECCERR) ! We'll implement this one later |
788 | ufast_IMMU_miss: ! 064 = fast instr access MMU miss | | 788 | ufast_IMMU_miss: ! 064 = fast instr access MMU miss |
789 | TRACEFLT ! DEBUG | | 789 | TRACEFLT ! DEBUG |
790 | ldxa [%g0] ASI_IMMU_8KPTR, %g2 ! Load IMMU 8K TSB pointer | | 790 | ldxa [%g0] ASI_IMMU_8KPTR, %g2 ! Load IMMU 8K TSB pointer |
791 | #ifdef NO_TSB | | 791 | #ifdef NO_TSB |
792 | ba,a %icc, instr_miss | | 792 | ba,a %icc, instr_miss |
793 | #endif | | 793 | #endif |
794 | ldxa [%g0] ASI_IMMU, %g1 ! Load IMMU tag target register | | 794 | ldxa [%g0] ASI_IMMU, %g1 ! Load IMMU tag target register |
795 | ldda [%g2] ASI_NUCLEUS_QUAD_LDD, %g4 ! Load TSB tag:data into %g4:%g5 | | 795 | ldda [%g2] ASI_NUCLEUS_QUAD_LDD, %g4 ! Load TSB tag:data into %g4:%g5 |
796 | brgez,pn %g5, instr_miss ! Entry invalid? Punt | | 796 | brgez,pn %g5, instr_miss ! Entry invalid? Punt |
797 | cmp %g1, %g4 ! Compare TLB tags | | 797 | cmp %g1, %g4 ! Compare TLB tags |
798 | bne,pn %xcc, instr_miss ! Got right tag? | | 798 | bne,pn %xcc, instr_miss ! Got right tag? |
799 | nop | | 799 | nop |
800 | CLRTT | | 800 | CLRTT |
801 | stxa %g5, [%g0] ASI_IMMU_DATA_IN ! Enter new mapping | | 801 | stxa %g5, [%g0] ASI_IMMU_DATA_IN ! Enter new mapping |
802 | retry ! Try new mapping | | 802 | retry ! Try new mapping |
803 | 1: | | 803 | 1: |
804 | sir | | 804 | sir |
805 | TA32 | | 805 | TA32 |
806 | ufast_DMMU_miss: ! 068 = fast data access MMU miss | | 806 | ufast_DMMU_miss: ! 068 = fast data access MMU miss |
807 | TRACEFLT ! DEBUG | | 807 | TRACEFLT ! DEBUG |
808 | ldxa [%g0] ASI_DMMU_8KPTR, %g2! Load DMMU 8K TSB pointer | | 808 | ldxa [%g0] ASI_DMMU_8KPTR, %g2! Load DMMU 8K TSB pointer |
809 | | | 809 | |
810 | #ifdef NO_TSB | | 810 | #ifdef NO_TSB |
811 | ba,a %icc, data_miss | | 811 | ba,a %icc, data_miss |
812 | #endif | | 812 | #endif |
813 | ldxa [%g0] ASI_DMMU, %g1 ! Load DMMU tag target register | | 813 | ldxa [%g0] ASI_DMMU, %g1 ! Load DMMU tag target register |
814 | ldda [%g2] ASI_NUCLEUS_QUAD_LDD, %g4 ! Load TSB tag and data into %g4 and %g5 | | 814 | ldda [%g2] ASI_NUCLEUS_QUAD_LDD, %g4 ! Load TSB tag and data into %g4 and %g5 |
815 | brgez,pn %g5, data_miss ! Entry invalid? Punt | | 815 | brgez,pn %g5, data_miss ! Entry invalid? Punt |
816 | cmp %g1, %g4 ! Compare TLB tags | | 816 | cmp %g1, %g4 ! Compare TLB tags |
817 | bnz,pn %xcc, data_miss ! Got right tag? | | 817 | bnz,pn %xcc, data_miss ! Got right tag? |
818 | nop | | 818 | nop |
819 | CLRTT | | 819 | CLRTT |
820 | #ifdef TRAPSTATS | | 820 | #ifdef TRAPSTATS |
821 | sethi %hi(_C_LABEL(udhit)), %g1 | | 821 | sethi %hi(_C_LABEL(udhit)), %g1 |
822 | lduw [%g1+%lo(_C_LABEL(udhit))], %g2 | | 822 | lduw [%g1+%lo(_C_LABEL(udhit))], %g2 |
823 | inc %g2 | | 823 | inc %g2 |
824 | stw %g2, [%g1+%lo(_C_LABEL(udhit))] | | 824 | stw %g2, [%g1+%lo(_C_LABEL(udhit))] |
825 | #endif | | 825 | #endif |
826 | stxa %g5, [%g0] ASI_DMMU_DATA_IN ! Enter new mapping | | 826 | stxa %g5, [%g0] ASI_DMMU_DATA_IN ! Enter new mapping |
827 | retry ! Try new mapping | | 827 | retry ! Try new mapping |
828 | 1: | | 828 | 1: |
829 | sir | | 829 | sir |
830 | TA32 | | 830 | TA32 |
831 | ufast_DMMU_protection: ! 06c = fast data access MMU protection | | 831 | ufast_DMMU_protection: ! 06c = fast data access MMU protection |
832 | TRACEFLT ! DEBUG -- we're perilously close to 32 insns | | 832 | TRACEFLT ! DEBUG -- we're perilously close to 32 insns |
833 | #ifdef TRAPSTATS | | 833 | #ifdef TRAPSTATS |
834 | sethi %hi(_C_LABEL(udprot)), %g1 | | 834 | sethi %hi(_C_LABEL(udprot)), %g1 |
835 | lduw [%g1+%lo(_C_LABEL(udprot))], %g2 | | 835 | lduw [%g1+%lo(_C_LABEL(udprot))], %g2 |
836 | inc %g2 | | 836 | inc %g2 |
837 | stw %g2, [%g1+%lo(_C_LABEL(udprot))] | | 837 | stw %g2, [%g1+%lo(_C_LABEL(udprot))] |
838 | #endif | | 838 | #endif |
839 | #ifdef HWREF | | 839 | #ifdef HWREF |
840 | ba,a,pt %xcc, dmmu_write_fault | | 840 | ba,a,pt %xcc, dmmu_write_fault |
841 | #else | | 841 | #else |
842 | ba,a,pt %xcc, winfault | | 842 | ba,a,pt %xcc, winfault |
843 | #endif | | 843 | #endif |
844 | nop | | 844 | nop |
845 | TA32 | | 845 | TA32 |
846 | UTRAP(0x070) ! Implementation dependent traps | | 846 | UTRAP(0x070) ! Implementation dependent traps |
847 | UTRAP(0x071); UTRAP(0x072); UTRAP(0x073); UTRAP(0x074); UTRAP(0x075); UTRAP(0x076) | | 847 | UTRAP(0x071); UTRAP(0x072); UTRAP(0x073); UTRAP(0x074); UTRAP(0x075); UTRAP(0x076) |
848 | UTRAP(0x077); UTRAP(0x078); UTRAP(0x079); UTRAP(0x07a); UTRAP(0x07b); UTRAP(0x07c) | | 848 | UTRAP(0x077); UTRAP(0x078); UTRAP(0x079); UTRAP(0x07a); UTRAP(0x07b); UTRAP(0x07c) |
849 | UTRAP(0x07d); UTRAP(0x07e); UTRAP(0x07f) | | 849 | UTRAP(0x07d); UTRAP(0x07e); UTRAP(0x07f) |
850 | TABLE(uspill): | | 850 | TABLE(uspill): |
851 | SPILL64(uspill8,ASI_AIUS) ! 0x080 spill_0_normal -- used to save user windows in user mode | | 851 | SPILL64(uspill8,ASI_AIUS) ! 0x080 spill_0_normal -- used to save user windows in user mode |
852 | SPILL32(uspill4,ASI_AIUS) ! 0x084 spill_1_normal | | 852 | SPILL32(uspill4,ASI_AIUS) ! 0x084 spill_1_normal |
853 | SPILLBOTH(uspill8,uspill4,ASI_AIUS) ! 0x088 spill_2_normal | | 853 | SPILLBOTH(uspill8,uspill4,ASI_AIUS) ! 0x088 spill_2_normal |
854 | UTRAP(0x08c); TA32 ! 0x08c spill_3_normal | | 854 | UTRAP(0x08c); TA32 ! 0x08c spill_3_normal |
855 | TABLE(kspill): | | 855 | TABLE(kspill): |
856 | SPILL64(kspill8,ASI_N) ! 0x090 spill_4_normal -- used to save supervisor windows | | 856 | SPILL64(kspill8,ASI_N) ! 0x090 spill_4_normal -- used to save supervisor windows |
857 | SPILL32(kspill4,ASI_N) ! 0x094 spill_5_normal | | 857 | SPILL32(kspill4,ASI_N) ! 0x094 spill_5_normal |
858 | SPILLBOTH(kspill8,kspill4,ASI_N) ! 0x098 spill_6_normal | | 858 | SPILLBOTH(kspill8,kspill4,ASI_N) ! 0x098 spill_6_normal |
859 | UTRAP(0x09c); TA32 ! 0x09c spill_7_normal | | 859 | UTRAP(0x09c); TA32 ! 0x09c spill_7_normal |
860 | TABLE(uspillk): | | 860 | TABLE(uspillk): |
861 | SPILL64(uspillk8,ASI_AIUS) ! 0x0a0 spill_0_other -- used to save user windows in supervisor mode | | 861 | SPILL64(uspillk8,ASI_AIUS) ! 0x0a0 spill_0_other -- used to save user windows in supervisor mode |
862 | SPILL32(uspillk4,ASI_AIUS) ! 0x0a4 spill_1_other | | 862 | SPILL32(uspillk4,ASI_AIUS) ! 0x0a4 spill_1_other |
863 | SPILLBOTH(uspillk8,uspillk4,ASI_AIUS) ! 0x0a8 spill_2_other | | 863 | SPILLBOTH(uspillk8,uspillk4,ASI_AIUS) ! 0x0a8 spill_2_other |
864 | UTRAP(0x0ac); TA32 ! 0x0ac spill_3_other | | 864 | UTRAP(0x0ac); TA32 ! 0x0ac spill_3_other |
865 | UTRAP(0x0b0); TA32 ! 0x0b0 spill_4_other | | 865 | UTRAP(0x0b0); TA32 ! 0x0b0 spill_4_other |
866 | UTRAP(0x0b4); TA32 ! 0x0b4 spill_5_other | | 866 | UTRAP(0x0b4); TA32 ! 0x0b4 spill_5_other |
867 | UTRAP(0x0b8); TA32 ! 0x0b8 spill_6_other | | 867 | UTRAP(0x0b8); TA32 ! 0x0b8 spill_6_other |
868 | UTRAP(0x0bc); TA32 ! 0x0bc spill_7_other | | 868 | UTRAP(0x0bc); TA32 ! 0x0bc spill_7_other |
869 | TABLE(ufill): | | 869 | TABLE(ufill): |
870 | FILL64(ufill8,ASI_AIUS) ! 0x0c0 fill_0_normal -- used to fill windows when running user mode | | 870 | FILL64(ufill8,ASI_AIUS) ! 0x0c0 fill_0_normal -- used to fill windows when running user mode |
871 | FILL32(ufill4,ASI_AIUS) ! 0x0c4 fill_1_normal | | 871 | FILL32(ufill4,ASI_AIUS) ! 0x0c4 fill_1_normal |
872 | FILLBOTH(ufill8,ufill4,ASI_AIUS) ! 0x0c8 fill_2_normal | | 872 | FILLBOTH(ufill8,ufill4,ASI_AIUS) ! 0x0c8 fill_2_normal |
873 | UTRAP(0x0cc); TA32 ! 0x0cc fill_3_normal | | 873 | UTRAP(0x0cc); TA32 ! 0x0cc fill_3_normal |
874 | TABLE(kfill): | | 874 | TABLE(kfill): |
875 | FILL64(kfill8,ASI_N) ! 0x0d0 fill_4_normal -- used to fill windows when running supervisor mode | | 875 | FILL64(kfill8,ASI_N) ! 0x0d0 fill_4_normal -- used to fill windows when running supervisor mode |
876 | FILL32(kfill4,ASI_N) ! 0x0d4 fill_5_normal | | 876 | FILL32(kfill4,ASI_N) ! 0x0d4 fill_5_normal |
877 | FILLBOTH(kfill8,kfill4,ASI_N) ! 0x0d8 fill_6_normal | | 877 | FILLBOTH(kfill8,kfill4,ASI_N) ! 0x0d8 fill_6_normal |
878 | UTRAP(0x0dc); TA32 ! 0x0dc fill_7_normal | | 878 | UTRAP(0x0dc); TA32 ! 0x0dc fill_7_normal |
879 | TABLE(ufillk): | | 879 | TABLE(ufillk): |
880 | FILL64(ufillk8,ASI_AIUS) ! 0x0e0 fill_0_other | | 880 | FILL64(ufillk8,ASI_AIUS) ! 0x0e0 fill_0_other |
881 | FILL32(ufillk4,ASI_AIUS) ! 0x0e4 fill_1_other | | 881 | FILL32(ufillk4,ASI_AIUS) ! 0x0e4 fill_1_other |
882 | FILLBOTH(ufillk8,ufillk4,ASI_AIUS) ! 0x0e8 fill_2_other | | 882 | FILLBOTH(ufillk8,ufillk4,ASI_AIUS) ! 0x0e8 fill_2_other |
883 | UTRAP(0x0ec); TA32 ! 0x0ec fill_3_other | | 883 | UTRAP(0x0ec); TA32 ! 0x0ec fill_3_other |
884 | UTRAP(0x0f0); TA32 ! 0x0f0 fill_4_other | | 884 | UTRAP(0x0f0); TA32 ! 0x0f0 fill_4_other |
885 | UTRAP(0x0f4); TA32 ! 0x0f4 fill_5_other | | 885 | UTRAP(0x0f4); TA32 ! 0x0f4 fill_5_other |
886 | UTRAP(0x0f8); TA32 ! 0x0f8 fill_6_other | | 886 | UTRAP(0x0f8); TA32 ! 0x0f8 fill_6_other |
887 | UTRAP(0x0fc); TA32 ! 0x0fc fill_7_other | | 887 | UTRAP(0x0fc); TA32 ! 0x0fc fill_7_other |
888 | TABLE(syscall): | | 888 | TABLE(syscall): |
889 | SYSCALL ! 0x100 = sun syscall | | 889 | SYSCALL ! 0x100 = sun syscall |
890 | BPT ! 0x101 = pseudo breakpoint instruction | | 890 | BPT ! 0x101 = pseudo breakpoint instruction |
891 | STRAP(0x102); STRAP(0x103); STRAP(0x104); STRAP(0x105); STRAP(0x106); STRAP(0x107) | | 891 | STRAP(0x102); STRAP(0x103); STRAP(0x104); STRAP(0x105); STRAP(0x106); STRAP(0x107) |
892 | SYSCALL ! 0x108 = svr4 syscall | | 892 | SYSCALL ! 0x108 = svr4 syscall |
893 | SYSCALL ! 0x109 = bsd syscall | | 893 | SYSCALL ! 0x109 = bsd syscall |
894 | BPT_KGDB_EXEC ! 0x10a = enter kernel gdb on kernel startup | | 894 | BPT_KGDB_EXEC ! 0x10a = enter kernel gdb on kernel startup |
895 | STRAP(0x10b); STRAP(0x10c); STRAP(0x10d); STRAP(0x10e); STRAP(0x10f); | | 895 | STRAP(0x10b); STRAP(0x10c); STRAP(0x10d); STRAP(0x10e); STRAP(0x10f); |
896 | STRAP(0x110); STRAP(0x111); STRAP(0x112); STRAP(0x113); STRAP(0x114); STRAP(0x115); STRAP(0x116); STRAP(0x117) | | 896 | STRAP(0x110); STRAP(0x111); STRAP(0x112); STRAP(0x113); STRAP(0x114); STRAP(0x115); STRAP(0x116); STRAP(0x117) |
897 | STRAP(0x118); STRAP(0x119); STRAP(0x11a); STRAP(0x11b); STRAP(0x11c); STRAP(0x11d); STRAP(0x11e); STRAP(0x11f) | | 897 | STRAP(0x118); STRAP(0x119); STRAP(0x11a); STRAP(0x11b); STRAP(0x11c); STRAP(0x11d); STRAP(0x11e); STRAP(0x11f) |
898 | STRAP(0x120); STRAP(0x121); STRAP(0x122); STRAP(0x123); STRAP(0x124); STRAP(0x125); STRAP(0x126); STRAP(0x127) | | 898 | STRAP(0x120); STRAP(0x121); STRAP(0x122); STRAP(0x123); STRAP(0x124); STRAP(0x125); STRAP(0x126); STRAP(0x127) |
899 | STRAP(0x128); STRAP(0x129); STRAP(0x12a); STRAP(0x12b); STRAP(0x12c); STRAP(0x12d); STRAP(0x12e); STRAP(0x12f) | | 899 | STRAP(0x128); STRAP(0x129); STRAP(0x12a); STRAP(0x12b); STRAP(0x12c); STRAP(0x12d); STRAP(0x12e); STRAP(0x12f) |
900 | STRAP(0x130); STRAP(0x131); STRAP(0x132); STRAP(0x133); STRAP(0x134); STRAP(0x135); STRAP(0x136); STRAP(0x137) | | 900 | STRAP(0x130); STRAP(0x131); STRAP(0x132); STRAP(0x133); STRAP(0x134); STRAP(0x135); STRAP(0x136); STRAP(0x137) |
901 | STRAP(0x138); STRAP(0x139); STRAP(0x13a); STRAP(0x13b); STRAP(0x13c); STRAP(0x13d); STRAP(0x13e); STRAP(0x13f) | | 901 | STRAP(0x138); STRAP(0x139); STRAP(0x13a); STRAP(0x13b); STRAP(0x13c); STRAP(0x13d); STRAP(0x13e); STRAP(0x13f) |
902 | SYSCALL ! 0x140 SVID syscall (Solaris 2.7) | | 902 | SYSCALL ! 0x140 SVID syscall (Solaris 2.7) |
903 | SYSCALL ! 0x141 SPARC International syscall | | 903 | SYSCALL ! 0x141 SPARC International syscall |
904 | SYSCALL ! 0x142 OS Vendor syscall | | 904 | SYSCALL ! 0x142 OS Vendor syscall |
905 | SYSCALL ! 0x143 HW OEM syscall | | 905 | SYSCALL ! 0x143 HW OEM syscall |
906 | STRAP(0x144); STRAP(0x145); STRAP(0x146); STRAP(0x147) | | 906 | STRAP(0x144); STRAP(0x145); STRAP(0x146); STRAP(0x147) |
907 | STRAP(0x148); STRAP(0x149); STRAP(0x14a); STRAP(0x14b); STRAP(0x14c); STRAP(0x14d); STRAP(0x14e); STRAP(0x14f) | | 907 | STRAP(0x148); STRAP(0x149); STRAP(0x14a); STRAP(0x14b); STRAP(0x14c); STRAP(0x14d); STRAP(0x14e); STRAP(0x14f) |
908 | STRAP(0x150); STRAP(0x151); STRAP(0x152); STRAP(0x153); STRAP(0x154); STRAP(0x155); STRAP(0x156); STRAP(0x157) | | 908 | STRAP(0x150); STRAP(0x151); STRAP(0x152); STRAP(0x153); STRAP(0x154); STRAP(0x155); STRAP(0x156); STRAP(0x157) |
909 | STRAP(0x158); STRAP(0x159); STRAP(0x15a); STRAP(0x15b); STRAP(0x15c); STRAP(0x15d); STRAP(0x15e); STRAP(0x15f) | | 909 | STRAP(0x158); STRAP(0x159); STRAP(0x15a); STRAP(0x15b); STRAP(0x15c); STRAP(0x15d); STRAP(0x15e); STRAP(0x15f) |
910 | STRAP(0x160); STRAP(0x161); STRAP(0x162); STRAP(0x163); STRAP(0x164); STRAP(0x165); STRAP(0x166); STRAP(0x167) | | 910 | STRAP(0x160); STRAP(0x161); STRAP(0x162); STRAP(0x163); STRAP(0x164); STRAP(0x165); STRAP(0x166); STRAP(0x167) |
911 | STRAP(0x168); STRAP(0x169); STRAP(0x16a); STRAP(0x16b); STRAP(0x16c); STRAP(0x16d); STRAP(0x16e); STRAP(0x16f) | | 911 | STRAP(0x168); STRAP(0x169); STRAP(0x16a); STRAP(0x16b); STRAP(0x16c); STRAP(0x16d); STRAP(0x16e); STRAP(0x16f) |
912 | STRAP(0x170); STRAP(0x171); STRAP(0x172); STRAP(0x173); STRAP(0x174); STRAP(0x175); STRAP(0x176); STRAP(0x177) | | 912 | STRAP(0x170); STRAP(0x171); STRAP(0x172); STRAP(0x173); STRAP(0x174); STRAP(0x175); STRAP(0x176); STRAP(0x177) |
913 | STRAP(0x178); STRAP(0x179); STRAP(0x17a); STRAP(0x17b); STRAP(0x17c); STRAP(0x17d); STRAP(0x17e); STRAP(0x17f) | | 913 | STRAP(0x178); STRAP(0x179); STRAP(0x17a); STRAP(0x17b); STRAP(0x17c); STRAP(0x17d); STRAP(0x17e); STRAP(0x17f) |
914 | ! Traps beyond 0x17f are reserved | | 914 | ! Traps beyond 0x17f are reserved |
915 | UTRAP(0x180); UTRAP(0x181); UTRAP(0x182); UTRAP(0x183); UTRAP(0x184); UTRAP(0x185); UTRAP(0x186); UTRAP(0x187) | | 915 | UTRAP(0x180); UTRAP(0x181); UTRAP(0x182); UTRAP(0x183); UTRAP(0x184); UTRAP(0x185); UTRAP(0x186); UTRAP(0x187) |
916 | UTRAP(0x188); UTRAP(0x189); UTRAP(0x18a); UTRAP(0x18b); UTRAP(0x18c); UTRAP(0x18d); UTRAP(0x18e); UTRAP(0x18f) | | 916 | UTRAP(0x188); UTRAP(0x189); UTRAP(0x18a); UTRAP(0x18b); UTRAP(0x18c); UTRAP(0x18d); UTRAP(0x18e); UTRAP(0x18f) |
917 | UTRAP(0x190); UTRAP(0x191); UTRAP(0x192); UTRAP(0x193); UTRAP(0x194); UTRAP(0x195); UTRAP(0x196); UTRAP(0x197) | | 917 | UTRAP(0x190); UTRAP(0x191); UTRAP(0x192); UTRAP(0x193); UTRAP(0x194); UTRAP(0x195); UTRAP(0x196); UTRAP(0x197) |
918 | UTRAP(0x198); UTRAP(0x199); UTRAP(0x19a); UTRAP(0x19b); UTRAP(0x19c); UTRAP(0x19d); UTRAP(0x19e); UTRAP(0x19f) | | 918 | UTRAP(0x198); UTRAP(0x199); UTRAP(0x19a); UTRAP(0x19b); UTRAP(0x19c); UTRAP(0x19d); UTRAP(0x19e); UTRAP(0x19f) |
919 | UTRAP(0x1a0); UTRAP(0x1a1); UTRAP(0x1a2); UTRAP(0x1a3); UTRAP(0x1a4); UTRAP(0x1a5); UTRAP(0x1a6); UTRAP(0x1a7) | | 919 | UTRAP(0x1a0); UTRAP(0x1a1); UTRAP(0x1a2); UTRAP(0x1a3); UTRAP(0x1a4); UTRAP(0x1a5); UTRAP(0x1a6); UTRAP(0x1a7) |
920 | UTRAP(0x1a8); UTRAP(0x1a9); UTRAP(0x1aa); UTRAP(0x1ab); UTRAP(0x1ac); UTRAP(0x1ad); UTRAP(0x1ae); UTRAP(0x1af) | | 920 | UTRAP(0x1a8); UTRAP(0x1a9); UTRAP(0x1aa); UTRAP(0x1ab); UTRAP(0x1ac); UTRAP(0x1ad); UTRAP(0x1ae); UTRAP(0x1af) |
921 | UTRAP(0x1b0); UTRAP(0x1b1); UTRAP(0x1b2); UTRAP(0x1b3); UTRAP(0x1b4); UTRAP(0x1b5); UTRAP(0x1b6); UTRAP(0x1b7) | | 921 | UTRAP(0x1b0); UTRAP(0x1b1); UTRAP(0x1b2); UTRAP(0x1b3); UTRAP(0x1b4); UTRAP(0x1b5); UTRAP(0x1b6); UTRAP(0x1b7) |
922 | UTRAP(0x1b8); UTRAP(0x1b9); UTRAP(0x1ba); UTRAP(0x1bb); UTRAP(0x1bc); UTRAP(0x1bd); UTRAP(0x1be); UTRAP(0x1bf) | | 922 | UTRAP(0x1b8); UTRAP(0x1b9); UTRAP(0x1ba); UTRAP(0x1bb); UTRAP(0x1bc); UTRAP(0x1bd); UTRAP(0x1be); UTRAP(0x1bf) |
923 | UTRAP(0x1c0); UTRAP(0x1c1); UTRAP(0x1c2); UTRAP(0x1c3); UTRAP(0x1c4); UTRAP(0x1c5); UTRAP(0x1c6); UTRAP(0x1c7) | | 923 | UTRAP(0x1c0); UTRAP(0x1c1); UTRAP(0x1c2); UTRAP(0x1c3); UTRAP(0x1c4); UTRAP(0x1c5); UTRAP(0x1c6); UTRAP(0x1c7) |
924 | UTRAP(0x1c8); UTRAP(0x1c9); UTRAP(0x1ca); UTRAP(0x1cb); UTRAP(0x1cc); UTRAP(0x1cd); UTRAP(0x1ce); UTRAP(0x1cf) | | 924 | UTRAP(0x1c8); UTRAP(0x1c9); UTRAP(0x1ca); UTRAP(0x1cb); UTRAP(0x1cc); UTRAP(0x1cd); UTRAP(0x1ce); UTRAP(0x1cf) |
925 | UTRAP(0x1d0); UTRAP(0x1d1); UTRAP(0x1d2); UTRAP(0x1d3); UTRAP(0x1d4); UTRAP(0x1d5); UTRAP(0x1d6); UTRAP(0x1d7) | | 925 | UTRAP(0x1d0); UTRAP(0x1d1); UTRAP(0x1d2); UTRAP(0x1d3); UTRAP(0x1d4); UTRAP(0x1d5); UTRAP(0x1d6); UTRAP(0x1d7) |
926 | UTRAP(0x1d8); UTRAP(0x1d9); UTRAP(0x1da); UTRAP(0x1db); UTRAP(0x1dc); UTRAP(0x1dd); UTRAP(0x1de); UTRAP(0x1df) | | 926 | UTRAP(0x1d8); UTRAP(0x1d9); UTRAP(0x1da); UTRAP(0x1db); UTRAP(0x1dc); UTRAP(0x1dd); UTRAP(0x1de); UTRAP(0x1df) |
927 | UTRAP(0x1e0); UTRAP(0x1e1); UTRAP(0x1e2); UTRAP(0x1e3); UTRAP(0x1e4); UTRAP(0x1e5); UTRAP(0x1e6); UTRAP(0x1e7) | | 927 | UTRAP(0x1e0); UTRAP(0x1e1); UTRAP(0x1e2); UTRAP(0x1e3); UTRAP(0x1e4); UTRAP(0x1e5); UTRAP(0x1e6); UTRAP(0x1e7) |
928 | UTRAP(0x1e8); UTRAP(0x1e9); UTRAP(0x1ea); UTRAP(0x1eb); UTRAP(0x1ec); UTRAP(0x1ed); UTRAP(0x1ee); UTRAP(0x1ef) | | 928 | UTRAP(0x1e8); UTRAP(0x1e9); UTRAP(0x1ea); UTRAP(0x1eb); UTRAP(0x1ec); UTRAP(0x1ed); UTRAP(0x1ee); UTRAP(0x1ef) |
929 | UTRAP(0x1f0); UTRAP(0x1f1); UTRAP(0x1f2); UTRAP(0x1f3); UTRAP(0x1f4); UTRAP(0x1f5); UTRAP(0x1f6); UTRAP(0x1f7) | | 929 | UTRAP(0x1f0); UTRAP(0x1f1); UTRAP(0x1f2); UTRAP(0x1f3); UTRAP(0x1f4); UTRAP(0x1f5); UTRAP(0x1f6); UTRAP(0x1f7) |
930 | UTRAP(0x1f8); UTRAP(0x1f9); UTRAP(0x1fa); UTRAP(0x1fb); UTRAP(0x1fc); UTRAP(0x1fd); UTRAP(0x1fe); UTRAP(0x1ff) | | 930 | UTRAP(0x1f8); UTRAP(0x1f9); UTRAP(0x1fa); UTRAP(0x1fb); UTRAP(0x1fc); UTRAP(0x1fd); UTRAP(0x1fe); UTRAP(0x1ff) |
931 | | | 931 | |
932 | /* Traps from TL>0 -- traps from supervisor mode */ | | 932 | /* Traps from TL>0 -- traps from supervisor mode */ |
933 | #undef TABLE | | 933 | #undef TABLE |
934 | #ifdef __STDC__ | | 934 | #ifdef __STDC__ |
935 | #define TABLE(name) nucleus_ ## name | | 935 | #define TABLE(name) nucleus_ ## name |
936 | #else | | 936 | #else |
937 | #define TABLE(name) nucleus_/**/name | | 937 | #define TABLE(name) nucleus_/**/name |
938 | #endif | | 938 | #endif |
939 | trapbase_priv: | | 939 | trapbase_priv: |
940 | UTRAP(0x000) ! 000 = reserved -- Use it to boot | | 940 | UTRAP(0x000) ! 000 = reserved -- Use it to boot |
941 | /* We should not get the next 5 traps */ | | 941 | /* We should not get the next 5 traps */ |
942 | UTRAP(0x001) ! 001 = POR Reset -- ROM should get this | | 942 | UTRAP(0x001) ! 001 = POR Reset -- ROM should get this |
943 | UTRAP(0x002) ! 002 = WDR Watchdog -- ROM should get this | | 943 | UTRAP(0x002) ! 002 = WDR Watchdog -- ROM should get this |
944 | UTRAP(0x003) ! 003 = XIR -- ROM should get this | | 944 | UTRAP(0x003) ! 003 = XIR -- ROM should get this |
945 | UTRAP(0x004) ! 004 = SIR -- ROM should get this | | 945 | UTRAP(0x004) ! 004 = SIR -- ROM should get this |
946 | UTRAP(0x005) ! 005 = RED state exception | | 946 | UTRAP(0x005) ! 005 = RED state exception |
947 | UTRAP(0x006); UTRAP(0x007) | | 947 | UTRAP(0x006); UTRAP(0x007) |
948 | ktextfault: | | 948 | ktextfault: |
949 | VTRAP(T_INST_EXCEPT, textfault) ! 008 = instr. access exept | | 949 | VTRAP(T_INST_EXCEPT, textfault) ! 008 = instr. access exept |
950 | VTRAP(T_TEXTFAULT, textfault) ! 009 = instr access MMU miss -- no MMU | | 950 | VTRAP(T_TEXTFAULT, textfault) ! 009 = instr access MMU miss -- no MMU |
951 | VTRAP(T_INST_ERROR, textfault) ! 00a = instr. access err | | 951 | VTRAP(T_INST_ERROR, textfault) ! 00a = instr. access err |
952 | UTRAP(0x00b); UTRAP(0x00c); UTRAP(0x00d); UTRAP(0x00e); UTRAP(0x00f) | | 952 | UTRAP(0x00b); UTRAP(0x00c); UTRAP(0x00d); UTRAP(0x00e); UTRAP(0x00f) |
953 | TRAP(T_ILLINST) ! 010 = illegal instruction | | 953 | TRAP(T_ILLINST) ! 010 = illegal instruction |
954 | TRAP(T_PRIVINST) ! 011 = privileged instruction | | 954 | TRAP(T_PRIVINST) ! 011 = privileged instruction |
955 | UTRAP(0x012) ! 012 = unimplemented LDD | | 955 | UTRAP(0x012) ! 012 = unimplemented LDD |
956 | UTRAP(0x013) ! 013 = unimplemented STD | | 956 | UTRAP(0x013) ! 013 = unimplemented STD |
957 | UTRAP(0x014); UTRAP(0x015); UTRAP(0x016); UTRAP(0x017); UTRAP(0x018) | | 957 | UTRAP(0x014); UTRAP(0x015); UTRAP(0x016); UTRAP(0x017); UTRAP(0x018) |
958 | UTRAP(0x019); UTRAP(0x01a); UTRAP(0x01b); UTRAP(0x01c); UTRAP(0x01d) | | 958 | UTRAP(0x019); UTRAP(0x01a); UTRAP(0x01b); UTRAP(0x01c); UTRAP(0x01d) |
959 | UTRAP(0x01e); UTRAP(0x01f) | | 959 | UTRAP(0x01e); UTRAP(0x01f) |
960 | TRAP(T_FPDISABLED) ! 020 = fp instr, but EF bit off in psr | | 960 | TRAP(T_FPDISABLED) ! 020 = fp instr, but EF bit off in psr |
961 | TRAP(T_FP_IEEE_754) ! 021 = ieee 754 exception | | 961 | TRAP(T_FP_IEEE_754) ! 021 = ieee 754 exception |
962 | TRAP(T_FP_OTHER) ! 022 = other fp exception | | 962 | TRAP(T_FP_OTHER) ! 022 = other fp exception |
963 | TRAP(T_TAGOF) ! 023 = tag overflow | | 963 | TRAP(T_TAGOF) ! 023 = tag overflow |
964 | TRACEWIN ! DEBUG | | 964 | TRACEWIN ! DEBUG |
965 | clr %l0 | | 965 | clr %l0 |
966 | #ifdef DEBUG | | 966 | #ifdef DEBUG |
967 | set 0xbadbeef, %l0 ! DEBUG | | 967 | set 0xbadbeef, %l0 ! DEBUG |
968 | #endif | | 968 | #endif |
969 | mov %l0, %l1; mov %l0, %l2 ! 024-027 = clean window trap | | 969 | mov %l0, %l1; mov %l0, %l2 ! 024-027 = clean window trap |
970 | rdpr %cleanwin, %o7 ! This handler is in-lined and cannot fault | | 970 | rdpr %cleanwin, %o7 ! This handler is in-lined and cannot fault |
971 | inc %o7; mov %l0, %l3 ! Nucleus (trap&IRQ) code does not need clean windows | | 971 | inc %o7; mov %l0, %l3 ! Nucleus (trap&IRQ) code does not need clean windows |
972 | wrpr %g0, %o7, %cleanwin ! Clear out %l0-%l8 and %o0-%o8 and inc %cleanwin and done | | 972 | wrpr %g0, %o7, %cleanwin ! Clear out %l0-%l8 and %o0-%o8 and inc %cleanwin and done |
973 | #ifdef NOT_DEBUG | | 973 | #ifdef NOT_DEBUG |
974 | !! | | 974 | !! |
975 | !! Check the sp redzone | | 975 | !! Check the sp redzone |
976 | !! | | 976 | !! |
977 | rdpr %wstate, t1 | | 977 | rdpr %wstate, t1 |
978 | cmp t1, WSTATE_KERN | | 978 | cmp t1, WSTATE_KERN |
979 | bne,pt icc, 7f | | 979 | bne,pt icc, 7f |
980 | sethi %hi(_C_LABEL(redzone)), t1 | | 980 | sethi %hi(_C_LABEL(redzone)), t1 |
981 | ldx [t1 + %lo(_C_LABEL(redzone))], t2 | | 981 | ldx [t1 + %lo(_C_LABEL(redzone))], t2 |
982 | cmp %sp, t2 ! if sp >= t2, not in red zone | | 982 | cmp %sp, t2 ! if sp >= t2, not in red zone |
983 | blu panic_red ! and can continue normally | | 983 | blu panic_red ! and can continue normally |
984 | 7: | | 984 | 7: |
985 | #endif | | 985 | #endif |
986 | mov %l0, %l4; mov %l0, %l5; mov %l0, %l6; mov %l0, %l7 | | 986 | mov %l0, %l4; mov %l0, %l5; mov %l0, %l6; mov %l0, %l7 |
987 | mov %l0, %o0; mov %l0, %o1; mov %l0, %o2; mov %l0, %o3 | | 987 | mov %l0, %o0; mov %l0, %o1; mov %l0, %o2; mov %l0, %o3 |
988 | | | 988 | |
989 | mov %l0, %o4; mov %l0, %o5; mov %l0, %o6; mov %l0, %o7 | | 989 | mov %l0, %o4; mov %l0, %o5; mov %l0, %o6; mov %l0, %o7 |
990 | CLRTT | | 990 | CLRTT |
991 | retry; nop; TA32 | | 991 | retry; nop; TA32 |
992 | TRAP(T_DIV0) ! 028 = divide by zero | | 992 | TRAP(T_DIV0) ! 028 = divide by zero |
993 | UTRAP(0x029) ! 029 = internal processor error | | 993 | UTRAP(0x029) ! 029 = internal processor error |
994 | UTRAP(0x02a); UTRAP(0x02b); UTRAP(0x02c); UTRAP(0x02d); UTRAP(0x02e); UTRAP(0x02f) | | 994 | UTRAP(0x02a); UTRAP(0x02b); UTRAP(0x02c); UTRAP(0x02d); UTRAP(0x02e); UTRAP(0x02f) |
995 | kdatafault: | | 995 | kdatafault: |
996 | VTRAP(T_DATAFAULT, winfault) ! 030 = data fetch fault | | 996 | VTRAP(T_DATAFAULT, winfault) ! 030 = data fetch fault |
997 | UTRAP(0x031) ! 031 = data MMU miss -- no MMU | | 997 | UTRAP(0x031) ! 031 = data MMU miss -- no MMU |
998 | VTRAP(T_DATA_ERROR, winfault) ! 032 = data fetch fault | | 998 | VTRAP(T_DATA_ERROR, winfault) ! 032 = data fetch fault |
999 | VTRAP(T_DATA_PROT, winfault) ! 033 = data fetch fault | | 999 | VTRAP(T_DATA_PROT, winfault) ! 033 = data fetch fault |
1000 | VTRAP(T_ALIGN, checkalign) ! 034 = address alignment error -- we could fix it inline... | | 1000 | VTRAP(T_ALIGN, checkalign) ! 034 = address alignment error -- we could fix it inline... |
| @@ -2660,4201 +2660,4202 @@ Ldatafault_internal: | | | @@ -2660,4201 +2660,4202 @@ Ldatafault_internal: |
2660 | #endif | | 2660 | #endif |
2661 | sth %o1, [%sp + CC64FSZ + STKB + TF_TT] | | 2661 | sth %o1, [%sp + CC64FSZ + STKB + TF_TT] |
2662 | stx %g1, [%sp + CC64FSZ + STKB + TF_TSTATE] ! set tf.tf_psr, tf.tf_pc | | 2662 | stx %g1, [%sp + CC64FSZ + STKB + TF_TSTATE] ! set tf.tf_psr, tf.tf_pc |
2663 | stx %g2, [%sp + CC64FSZ + STKB + TF_PC] ! set tf.tf_npc | | 2663 | stx %g2, [%sp + CC64FSZ + STKB + TF_PC] ! set tf.tf_npc |
2664 | stx %g3, [%sp + CC64FSZ + STKB + TF_NPC] | | 2664 | stx %g3, [%sp + CC64FSZ + STKB + TF_NPC] |
2665 | | | 2665 | |
2666 | rdpr %pil, %g4 | | 2666 | rdpr %pil, %g4 |
2667 | stb %g4, [%sp + CC64FSZ + STKB + TF_PIL] | | 2667 | stb %g4, [%sp + CC64FSZ + STKB + TF_PIL] |
2668 | stb %g4, [%sp + CC64FSZ + STKB + TF_OLDPIL] | | 2668 | stb %g4, [%sp + CC64FSZ + STKB + TF_OLDPIL] |
2669 | | | 2669 | |
2670 | #if 1 | | 2670 | #if 1 |
2671 | rdpr %tl, %g7 | | 2671 | rdpr %tl, %g7 |
2672 | dec %g7 | | 2672 | dec %g7 |
2673 | movrlz %g7, %g0, %g7 | | 2673 | movrlz %g7, %g0, %g7 |
2674 | CHKPT(%g1,%g3,0x21) | | 2674 | CHKPT(%g1,%g3,0x21) |
2675 | wrpr %g0, %g7, %tl ! Revert to kernel mode | | 2675 | wrpr %g0, %g7, %tl ! Revert to kernel mode |
2676 | #else | | 2676 | #else |
2677 | CHKPT(%g1,%g3,0x21) | | 2677 | CHKPT(%g1,%g3,0x21) |
2678 | wrpr %g0, 0, %tl ! Revert to kernel mode | | 2678 | wrpr %g0, 0, %tl ! Revert to kernel mode |
2679 | #endif | | 2679 | #endif |
2680 | /* Finish stackframe, call C trap handler */ | | 2680 | /* Finish stackframe, call C trap handler */ |
2681 | flushw ! Get this clean so we won't take any more user faults | | 2681 | flushw ! Get this clean so we won't take any more user faults |
2682 | #ifdef NOTDEF_DEBUG | | 2682 | #ifdef NOTDEF_DEBUG |
2683 | set CPCB, %o7 | | 2683 | set CPCB, %o7 |
2684 | LDPTR [%o7], %o7 | | 2684 | LDPTR [%o7], %o7 |
2685 | ldub [%o7 + PCB_NSAVED], %o7 | | 2685 | ldub [%o7 + PCB_NSAVED], %o7 |
2686 | brz,pt %o7, 2f | | 2686 | brz,pt %o7, 2f |
2687 | nop | | 2687 | nop |
2688 | save %sp, -CC64FSZ, %sp | | 2688 | save %sp, -CC64FSZ, %sp |
2689 | set 1f, %o0 | | 2689 | set 1f, %o0 |
2690 | call printf | | 2690 | call printf |
2691 | mov %i7, %o1 | | 2691 | mov %i7, %o1 |
2692 | ta 1; nop | | 2692 | ta 1; nop |
2693 | restore | | 2693 | restore |
2694 | .data | | 2694 | .data |
2695 | 1: .asciz "datafault: nsaved = %d\n" | | 2695 | 1: .asciz "datafault: nsaved = %d\n" |
2696 | _ALIGN | | 2696 | _ALIGN |
2697 | .text | | 2697 | .text |
2698 | 2: | | 2698 | 2: |
2699 | #endif | | 2699 | #endif |
2700 | !! In the EMBEDANY memory model %g4 points to the start of the data segment. | | 2700 | !! In the EMBEDANY memory model %g4 points to the start of the data segment. |
2701 | !! In our case we need to clear it before calling any C-code | | 2701 | !! In our case we need to clear it before calling any C-code |
2702 | clr %g4 | | 2702 | clr %g4 |
2703 | | | 2703 | |
2704 | /* | | 2704 | /* |
2705 | * Right now the registers have the following values: | | 2705 | * Right now the registers have the following values: |
2706 | * | | 2706 | * |
2707 | * %o0 -- MMU_TAG_ACCESS | | 2707 | * %o0 -- MMU_TAG_ACCESS |
2708 | * %o1 -- TT | | 2708 | * %o1 -- TT |
2709 | * %o2 -- afar | | 2709 | * %o2 -- afar |
2710 | * %o3 -- afsr | | 2710 | * %o3 -- afsr |
2711 | * %o4 -- sfar | | 2711 | * %o4 -- sfar |
2712 | * %o5 -- sfsr | | 2712 | * %o5 -- sfsr |
2713 | */ | | 2713 | */ |
2714 | | | 2714 | |
2715 | cmp %o1, T_DATA_ERROR | | 2715 | cmp %o1, T_DATA_ERROR |
2716 | st %g5, [%sp + CC64FSZ + STKB + TF_Y] | | 2716 | st %g5, [%sp + CC64FSZ + STKB + TF_Y] |
2717 | wr %g0, ASI_PRIMARY_NOFAULT, %asi ! Restore default ASI | | 2717 | wr %g0, ASI_PRIMARY_NOFAULT, %asi ! Restore default ASI |
2718 | be,pn %icc, data_error | | 2718 | be,pn %icc, data_error |
2719 | wrpr %g0, PSTATE_INTR, %pstate ! reenable interrupts | | 2719 | wrpr %g0, PSTATE_INTR, %pstate ! reenable interrupts |
2720 | | | 2720 | |
2721 | mov %o0, %o3 ! (argument: trap address) | | 2721 | mov %o0, %o3 ! (argument: trap address) |
2722 | mov %g2, %o2 ! (argument: trap pc) | | 2722 | mov %g2, %o2 ! (argument: trap pc) |
2723 | call _C_LABEL(data_access_fault) ! data_access_fault(&tf, type, | | 2723 | call _C_LABEL(data_access_fault) ! data_access_fault(&tf, type, |
2724 | ! pc, addr, sfva, sfsr) | | 2724 | ! pc, addr, sfva, sfsr) |
2725 | add %sp, CC64FSZ + STKB, %o0 ! (argument: &tf) | | 2725 | add %sp, CC64FSZ + STKB, %o0 ! (argument: &tf) |
2726 | wrpr %g0, PSTATE_KERN, %pstate ! disable interrupts | | 2726 | wrpr %g0, PSTATE_KERN, %pstate ! disable interrupts |
2727 | | | 2727 | |
2728 | data_recover: | | 2728 | data_recover: |
2729 | CHKPT(%o1,%o2,1) | | 2729 | CHKPT(%o1,%o2,1) |
2730 | #ifdef TRAPSTATS | | 2730 | #ifdef TRAPSTATS |
2731 | set _C_LABEL(uintrcnt), %g1 | | 2731 | set _C_LABEL(uintrcnt), %g1 |
2732 | stw %g0, [%g1] | | 2732 | stw %g0, [%g1] |
2733 | set _C_LABEL(iveccnt), %g1 | | 2733 | set _C_LABEL(iveccnt), %g1 |
2734 | stw %g0, [%g1] | | 2734 | stw %g0, [%g1] |
2735 | #endif | | 2735 | #endif |
2736 | b return_from_trap ! go return | | 2736 | b return_from_trap ! go return |
2737 | ldx [%sp + CC64FSZ + STKB + TF_TSTATE], %g1 ! Load this for return_from_trap | | 2737 | ldx [%sp + CC64FSZ + STKB + TF_TSTATE], %g1 ! Load this for return_from_trap |
2738 | NOTREACHED | | 2738 | NOTREACHED |
2739 | | | 2739 | |
2740 | data_error: | | 2740 | data_error: |
2741 | call _C_LABEL(data_access_error) ! data_access_error(&tf, type, | | 2741 | call _C_LABEL(data_access_error) ! data_access_error(&tf, type, |
2742 | ! afva, afsr, sfva, sfsr) | | 2742 | ! afva, afsr, sfva, sfsr) |
2743 | add %sp, CC64FSZ + STKB, %o0 ! (argument: &tf) | | 2743 | add %sp, CC64FSZ + STKB, %o0 ! (argument: &tf) |
2744 | ba data_recover | | 2744 | ba data_recover |
2745 | nop | | 2745 | nop |
2746 | NOTREACHED | | 2746 | NOTREACHED |
2747 | | | 2747 | |
2748 | /* | | 2748 | /* |
2749 | * Each memory instruction access fault from a fast access handler comes here. | | 2749 | * Each memory instruction access fault from a fast access handler comes here. |
2750 | * We will quickly check if this is an original prom mapping before going | | 2750 | * We will quickly check if this is an original prom mapping before going |
2751 | * to the generic fault handler | | 2751 | * to the generic fault handler |
2752 | * | | 2752 | * |
2753 | * We will assume that %pil is not lost so we won't bother to save it | | 2753 | * We will assume that %pil is not lost so we won't bother to save it |
2754 | * unless we're in an interrupt handler. | | 2754 | * unless we're in an interrupt handler. |
2755 | * | | 2755 | * |
2756 | * On entry: | | 2756 | * On entry: |
2757 | * We are on one of the alternate set of globals | | 2757 | * We are on one of the alternate set of globals |
2758 | * %g1 = MMU tag target | | 2758 | * %g1 = MMU tag target |
2759 | * %g2 = TSB entry ptr | | 2759 | * %g2 = TSB entry ptr |
2760 | * %g3 = TLB Tag Access | | 2760 | * %g3 = TLB Tag Access |
2761 | * | | 2761 | * |
2762 | * On return: | | 2762 | * On return: |
2763 | * | | 2763 | * |
2764 | */ | | 2764 | */ |
2765 | | | 2765 | |
2766 | ICACHE_ALIGN | | 2766 | ICACHE_ALIGN |
2767 | instr_miss: | | 2767 | instr_miss: |
2768 | #ifdef TRAPSTATS | | 2768 | #ifdef TRAPSTATS |
2769 | set _C_LABEL(ktmiss), %g3 | | 2769 | set _C_LABEL(ktmiss), %g3 |
2770 | set _C_LABEL(utmiss), %g4 | | 2770 | set _C_LABEL(utmiss), %g4 |
2771 | rdpr %tl, %g6 | | 2771 | rdpr %tl, %g6 |
2772 | dec %g6 | | 2772 | dec %g6 |
2773 | movrz %g6, %g4, %g3 | | 2773 | movrz %g6, %g4, %g3 |
2774 | lduw [%g3], %g4 | | 2774 | lduw [%g3], %g4 |
2775 | inc %g4 | | 2775 | inc %g4 |
2776 | stw %g4, [%g3] | | 2776 | stw %g4, [%g3] |
2777 | #endif | | 2777 | #endif |
2778 | mov TLB_TAG_ACCESS, %g3 ! Get real fault page | | 2778 | mov TLB_TAG_ACCESS, %g3 ! Get real fault page |
2779 | sethi %hi(0x1fff), %g7 ! 8K context mask | | 2779 | sethi %hi(0x1fff), %g7 ! 8K context mask |
2780 | ldxa [%g3] ASI_IMMU, %g3 ! from tag access register | | 2780 | ldxa [%g3] ASI_IMMU, %g3 ! from tag access register |
2781 | sethi %hi(CPUINFO_VA+CI_CTXBUSY), %g4 | | 2781 | sethi %hi(CPUINFO_VA+CI_CTXBUSY), %g4 |
2782 | or %g7, %lo(0x1fff), %g7 | | 2782 | or %g7, %lo(0x1fff), %g7 |
2783 | LDPTR [%g4 + %lo(CPUINFO_VA+CI_CTXBUSY)], %g4 | | 2783 | LDPTR [%g4 + %lo(CPUINFO_VA+CI_CTXBUSY)], %g4 |
2784 | srax %g3, HOLESHIFT, %g5 ! Check for valid address | | 2784 | srax %g3, HOLESHIFT, %g5 ! Check for valid address |
2785 | and %g3, %g7, %g6 ! Isolate context | | 2785 | and %g3, %g7, %g6 ! Isolate context |
2786 | sllx %g6, 3, %g6 ! Make it into an offset into ctxbusy | | 2786 | sllx %g6, 3, %g6 ! Make it into an offset into ctxbusy |
2787 | inc %g5 ! (0 or -1) -> (1 or 0) | | 2787 | inc %g5 ! (0 or -1) -> (1 or 0) |
2788 | ldx [%g4+%g6], %g4 ! Load up our page table. | | 2788 | ldx [%g4+%g6], %g4 ! Load up our page table. |
2789 | #ifdef DEBUG | | 2789 | #ifdef DEBUG |
2790 | /* Make sure we don't try to replace a kernel translation */ | | 2790 | /* Make sure we don't try to replace a kernel translation */ |
2791 | /* This should not be necessary */ | | 2791 | /* This should not be necessary */ |
2792 | brnz,pt %g6, 1f ! If user context continue miss | | 2792 | brnz,pt %g6, 1f ! If user context continue miss |
2793 | sethi %hi(KERNBASE), %g7 ! Don't need %lo | | 2793 | sethi %hi(KERNBASE), %g7 ! Don't need %lo |
2794 | set 0x0800000, %g6 ! 8MB | | 2794 | set 0x0800000, %g6 ! 8MB |
2795 | sub %g3, %g7, %g7 | | 2795 | sub %g3, %g7, %g7 |
2796 | cmp %g7, %g6 | | 2796 | cmp %g7, %g6 |
2797 | mov 6, %g6 ! debug | | 2797 | mov 6, %g6 ! debug |
2798 | sethi %hi(DATA_START), %g7 | | 2798 | sethi %hi(DATA_START), %g7 |
2799 | stb %g6, [%g7+0x30] ! debug | | 2799 | stb %g6, [%g7+0x30] ! debug |
2800 | tlu %xcc, 1; nop | | 2800 | tlu %xcc, 1; nop |
2801 | blu,pn %xcc, textfault ! Next insn in delay slot is unimportant | | 2801 | blu,pn %xcc, textfault ! Next insn in delay slot is unimportant |
2802 | mov 7, %g6 ! debug | | 2802 | mov 7, %g6 ! debug |
2803 | stb %g6, [%g7+0x30] ! debug | | 2803 | stb %g6, [%g7+0x30] ! debug |
2804 | 1: | | 2804 | 1: |
2805 | #endif | | 2805 | #endif |
2806 | srlx %g3, STSHIFT, %g6 | | 2806 | srlx %g3, STSHIFT, %g6 |
2807 | cmp %g5, 1 | | 2807 | cmp %g5, 1 |
2808 | bgu,pn %xcc, textfault ! Error! | | 2808 | bgu,pn %xcc, textfault ! Error! |
2809 | srlx %g3, PDSHIFT, %g5 | | 2809 | srlx %g3, PDSHIFT, %g5 |
2810 | and %g6, STMASK, %g6 | | 2810 | and %g6, STMASK, %g6 |
2811 | sll %g6, 3, %g6 | | 2811 | sll %g6, 3, %g6 |
2812 | and %g5, PDMASK, %g5 | | 2812 | and %g5, PDMASK, %g5 |
2813 | nop | | 2813 | nop |
2814 | | | 2814 | |
2815 | sll %g5, 3, %g5 | | 2815 | sll %g5, 3, %g5 |
2816 | add %g6, %g4, %g4 | | 2816 | add %g6, %g4, %g4 |
2817 | ldxa [%g4] ASI_PHYS_CACHED, %g4 | | 2817 | ldxa [%g4] ASI_PHYS_CACHED, %g4 |
2818 | srlx %g3, PTSHIFT, %g6 ! Convert to ptab offset | | 2818 | srlx %g3, PTSHIFT, %g6 ! Convert to ptab offset |
2819 | and %g6, PTMASK, %g6 | | 2819 | and %g6, PTMASK, %g6 |
2820 | add %g5, %g4, %g5 | | 2820 | add %g5, %g4, %g5 |
2821 | brz,pn %g4, textfault ! NULL entry? check somewhere else | | 2821 | brz,pn %g4, textfault ! NULL entry? check somewhere else |
2822 | nop | | 2822 | nop |
2823 | | | 2823 | |
2824 | ldxa [%g5] ASI_PHYS_CACHED, %g4 | | 2824 | ldxa [%g5] ASI_PHYS_CACHED, %g4 |
2825 | sll %g6, 3, %g6 | | 2825 | sll %g6, 3, %g6 |
2826 | brz,pn %g4, textfault ! NULL entry? check somewhere else | | 2826 | brz,pn %g4, textfault ! NULL entry? check somewhere else |
2827 | add %g6, %g4, %g6 | | 2827 | add %g6, %g4, %g6 |
2828 | 1: | | 2828 | 1: |
2829 | ldxa [%g6] ASI_PHYS_CACHED, %g4 | | 2829 | ldxa [%g6] ASI_PHYS_CACHED, %g4 |
2830 | brgez,pn %g4, textfault | | 2830 | brgez,pn %g4, textfault |
2831 | nop | | 2831 | nop |
2832 | | | 2832 | |
2833 | /* Check if it's an executable mapping. */ | | 2833 | /* Check if it's an executable mapping. */ |
2834 | andcc %g4, TTE_EXEC, %g0 | | 2834 | andcc %g4, TTE_EXEC, %g0 |
2835 | bz,pn %xcc, textfault | | 2835 | bz,pn %xcc, textfault |
2836 | nop | | 2836 | nop |
2837 | | | 2837 | |
2838 | or %g4, TTE_ACCESS, %g7 ! Update accessed bit | | 2838 | or %g4, TTE_ACCESS, %g7 ! Update accessed bit |
2839 | btst TTE_ACCESS, %g4 ! Need to update access git? | | 2839 | btst TTE_ACCESS, %g4 ! Need to update access git? |
2840 | bne,pt %xcc, 1f | | 2840 | bne,pt %xcc, 1f |
2841 | nop | | 2841 | nop |
2842 | casxa [%g6] ASI_PHYS_CACHED, %g4, %g7 ! and store it | | 2842 | casxa [%g6] ASI_PHYS_CACHED, %g4, %g7 ! and store it |
2843 | cmp %g4, %g7 | | 2843 | cmp %g4, %g7 |
2844 | bne,pn %xcc, 1b | | 2844 | bne,pn %xcc, 1b |
2845 | or %g4, TTE_ACCESS, %g4 ! Update accessed bit | | 2845 | or %g4, TTE_ACCESS, %g4 ! Update accessed bit |
2846 | 1: | | 2846 | 1: |
2847 | stx %g1, [%g2] ! Update TSB entry tag | | 2847 | stx %g1, [%g2] ! Update TSB entry tag |
2848 | stx %g4, [%g2+8] ! Update TSB entry data | | 2848 | stx %g4, [%g2+8] ! Update TSB entry data |
2849 | #ifdef DEBUG | | 2849 | #ifdef DEBUG |
2850 | set DATA_START, %g6 ! debug | | 2850 | set DATA_START, %g6 ! debug |
2851 | stx %g3, [%g6+8] ! debug | | 2851 | stx %g3, [%g6+8] ! debug |
2852 | set 0xaa, %g3 ! debug | | 2852 | set 0xaa, %g3 ! debug |
2853 | stx %g4, [%g6] ! debug -- what we tried to enter in TLB | | 2853 | stx %g4, [%g6] ! debug -- what we tried to enter in TLB |
2854 | stb %g3, [%g6+0x20] ! debug | | 2854 | stb %g3, [%g6+0x20] ! debug |
2855 | #endif | | 2855 | #endif |
2856 | stxa %g4, [%g0] ASI_IMMU_DATA_IN ! Enter new mapping | | 2856 | stxa %g4, [%g0] ASI_IMMU_DATA_IN ! Enter new mapping |
2857 | membar #Sync | | 2857 | membar #Sync |
2858 | CLRTT | | 2858 | CLRTT |
2859 | retry | | 2859 | retry |
2860 | NOTREACHED | | 2860 | NOTREACHED |
2861 | !! | | 2861 | !! |
2862 | !! Check our prom mappings -- temporary | | 2862 | !! Check our prom mappings -- temporary |
2863 | !! | | 2863 | !! |
2864 | | | 2864 | |
2865 | /* | | 2865 | /* |
2866 | * Each memory text access fault, from user or kernel mode, | | 2866 | * Each memory text access fault, from user or kernel mode, |
2867 | * comes here. | | 2867 | * comes here. |
2868 | * | | 2868 | * |
2869 | * We will assume that %pil is not lost so we won't bother to save it | | 2869 | * We will assume that %pil is not lost so we won't bother to save it |
2870 | * unless we're in an interrupt handler. | | 2870 | * unless we're in an interrupt handler. |
2871 | * | | 2871 | * |
2872 | * On entry: | | 2872 | * On entry: |
2873 | * We are on one of the alternate set of globals | | 2873 | * We are on one of the alternate set of globals |
2874 | * %g1 = MMU tag target | | 2874 | * %g1 = MMU tag target |
2875 | * %g2 = %tl | | 2875 | * %g2 = %tl |
2876 | * %g3 = %tl - 1 | | 2876 | * %g3 = %tl - 1 |
2877 | * | | 2877 | * |
2878 | * On return: | | 2878 | * On return: |
2879 | * | | 2879 | * |
2880 | */ | | 2880 | */ |
2881 | | | 2881 | |
2882 | textfault: | | 2882 | textfault: |
2883 | wrpr %g0, PSTATE_KERN|PSTATE_AG, %pstate ! We need to save volatile stuff to AG regs | | 2883 | wrpr %g0, PSTATE_KERN|PSTATE_AG, %pstate ! We need to save volatile stuff to AG regs |
2884 | #ifdef TRAPS_USE_IG | | 2884 | #ifdef TRAPS_USE_IG |
2885 | wrpr %g0, PSTATE_KERN|PSTATE_IG, %pstate ! We need to save volatile stuff to AG regs | | 2885 | wrpr %g0, PSTATE_KERN|PSTATE_IG, %pstate ! We need to save volatile stuff to AG regs |
2886 | #endif | | 2886 | #endif |
2887 | wr %g0, ASI_IMMU, %asi | | 2887 | wr %g0, ASI_IMMU, %asi |
2888 | ldxa [%g0 + TLB_TAG_ACCESS] %asi, %g1 ! Get fault address from tag access register | | 2888 | ldxa [%g0 + TLB_TAG_ACCESS] %asi, %g1 ! Get fault address from tag access register |
2889 | ldxa [SFSR] %asi, %g3 ! get sync fault status register | | 2889 | ldxa [SFSR] %asi, %g3 ! get sync fault status register |
2890 | membar #LoadStore | | 2890 | membar #LoadStore |
2891 | stxa %g0, [SFSR] %asi ! Clear out old info | | 2891 | stxa %g0, [SFSR] %asi ! Clear out old info |
2892 | | | 2892 | |
2893 | TRAP_SETUP(-CC64FSZ-TF_SIZE) | | 2893 | TRAP_SETUP(-CC64FSZ-TF_SIZE) |
2894 | INCR(_C_LABEL(uvmexp)+V_FAULTS) ! cnt.v_faults++ (clobbers %o0,%o1,%o2) | | 2894 | INCR(_C_LABEL(uvmexp)+V_FAULTS) ! cnt.v_faults++ (clobbers %o0,%o1,%o2) |
2895 | | | 2895 | |
2896 | mov %g3, %o3 | | 2896 | mov %g3, %o3 |
2897 | | | 2897 | |
2898 | wrpr %g0, PSTATE_KERN, %pstate ! Switch to normal globals | | 2898 | wrpr %g0, PSTATE_KERN, %pstate ! Switch to normal globals |
2899 | ldxa [%g0] ASI_AFSR, %o4 ! get async fault status | | 2899 | ldxa [%g0] ASI_AFSR, %o4 ! get async fault status |
2900 | ldxa [%g0] ASI_AFAR, %o5 ! get async fault address | | 2900 | ldxa [%g0] ASI_AFAR, %o5 ! get async fault address |
2901 | mov -1, %o0 | | 2901 | mov -1, %o0 |
2902 | stxa %o0, [%g0] ASI_AFSR ! Clear this out | | 2902 | stxa %o0, [%g0] ASI_AFSR ! Clear this out |
2903 | stx %g1, [%sp + CC64FSZ + STKB + TF_G + (1*8)] ! save g1 | | 2903 | stx %g1, [%sp + CC64FSZ + STKB + TF_G + (1*8)] ! save g1 |
2904 | stx %g2, [%sp + CC64FSZ + STKB + TF_G + (2*8)] ! save g2 | | 2904 | stx %g2, [%sp + CC64FSZ + STKB + TF_G + (2*8)] ! save g2 |
2905 | stx %g3, [%sp + CC64FSZ + STKB + TF_G + (3*8)] ! (sneak g3 in here) | | 2905 | stx %g3, [%sp + CC64FSZ + STKB + TF_G + (3*8)] ! (sneak g3 in here) |
2906 | rdpr %tt, %o1 ! Find out what caused this trap | | 2906 | rdpr %tt, %o1 ! Find out what caused this trap |
2907 | stx %g4, [%sp + CC64FSZ + STKB + TF_G + (4*8)] ! sneak in g4 | | 2907 | stx %g4, [%sp + CC64FSZ + STKB + TF_G + (4*8)] ! sneak in g4 |
2908 | rdpr %tstate, %g1 | | 2908 | rdpr %tstate, %g1 |
2909 | stx %g5, [%sp + CC64FSZ + STKB + TF_G + (5*8)] ! sneak in g5 | | 2909 | stx %g5, [%sp + CC64FSZ + STKB + TF_G + (5*8)] ! sneak in g5 |
2910 | rdpr %tpc, %o2 ! sync virt addr; must be read first | | 2910 | rdpr %tpc, %o2 ! sync virt addr; must be read first |
2911 | stx %g6, [%sp + CC64FSZ + STKB + TF_G + (6*8)] ! sneak in g6 | | 2911 | stx %g6, [%sp + CC64FSZ + STKB + TF_G + (6*8)] ! sneak in g6 |
2912 | rdpr %tnpc, %g3 | | 2912 | rdpr %tnpc, %g3 |
2913 | stx %g7, [%sp + CC64FSZ + STKB + TF_G + (7*8)] ! sneak in g7 | | 2913 | stx %g7, [%sp + CC64FSZ + STKB + TF_G + (7*8)] ! sneak in g7 |
2914 | rd %y, %g5 ! save y | | 2914 | rd %y, %g5 ! save y |
2915 | | | 2915 | |
2916 | /* Finish stackframe, call C trap handler */ | | 2916 | /* Finish stackframe, call C trap handler */ |
2917 | stx %g1, [%sp + CC64FSZ + STKB + TF_TSTATE] ! set tf.tf_psr, tf.tf_pc | | 2917 | stx %g1, [%sp + CC64FSZ + STKB + TF_TSTATE] ! set tf.tf_psr, tf.tf_pc |
2918 | sth %o1, [%sp + CC64FSZ + STKB + TF_TT] ! debug | | 2918 | sth %o1, [%sp + CC64FSZ + STKB + TF_TT] ! debug |
2919 | | | 2919 | |
2920 | stx %o2, [%sp + CC64FSZ + STKB + TF_PC] | | 2920 | stx %o2, [%sp + CC64FSZ + STKB + TF_PC] |
2921 | stx %g3, [%sp + CC64FSZ + STKB + TF_NPC] ! set tf.tf_npc | | 2921 | stx %g3, [%sp + CC64FSZ + STKB + TF_NPC] ! set tf.tf_npc |
2922 | | | 2922 | |
2923 | rdpr %pil, %g4 | | 2923 | rdpr %pil, %g4 |
2924 | stb %g4, [%sp + CC64FSZ + STKB + TF_PIL] | | 2924 | stb %g4, [%sp + CC64FSZ + STKB + TF_PIL] |
2925 | stb %g4, [%sp + CC64FSZ + STKB + TF_OLDPIL] | | 2925 | stb %g4, [%sp + CC64FSZ + STKB + TF_OLDPIL] |
2926 | | | 2926 | |
2927 | rdpr %tl, %g7 | | 2927 | rdpr %tl, %g7 |
2928 | dec %g7 | | 2928 | dec %g7 |
2929 | movrlz %g7, %g0, %g7 | | 2929 | movrlz %g7, %g0, %g7 |
2930 | CHKPT(%g1,%g3,0x22) | | 2930 | CHKPT(%g1,%g3,0x22) |
2931 | wrpr %g0, %g7, %tl ! Revert to kernel mode | | 2931 | wrpr %g0, %g7, %tl ! Revert to kernel mode |
2932 | | | 2932 | |
2933 | wr %g0, ASI_PRIMARY_NOFAULT, %asi ! Restore default ASI | | 2933 | wr %g0, ASI_PRIMARY_NOFAULT, %asi ! Restore default ASI |
2934 | flushw ! Get rid of any user windows so we don't deadlock | | 2934 | flushw ! Get rid of any user windows so we don't deadlock |
2935 | | | 2935 | |
2936 | !! In the EMBEDANY memory model %g4 points to the start of the data segment. | | 2936 | !! In the EMBEDANY memory model %g4 points to the start of the data segment. |
2937 | !! In our case we need to clear it before calling any C-code | | 2937 | !! In our case we need to clear it before calling any C-code |
2938 | clr %g4 | | 2938 | clr %g4 |
2939 | | | 2939 | |
2940 | /* Use trap type to see what handler to call */ | | 2940 | /* Use trap type to see what handler to call */ |
2941 | cmp %o1, T_INST_ERROR | | 2941 | cmp %o1, T_INST_ERROR |
2942 | be,pn %xcc, text_error | | 2942 | be,pn %xcc, text_error |
2943 | st %g5, [%sp + CC64FSZ + STKB + TF_Y] ! set tf.tf_y | | 2943 | st %g5, [%sp + CC64FSZ + STKB + TF_Y] ! set tf.tf_y |
2944 | | | 2944 | |
2945 | wrpr %g0, PSTATE_INTR, %pstate ! reenable interrupts | | 2945 | wrpr %g0, PSTATE_INTR, %pstate ! reenable interrupts |
2946 | call _C_LABEL(text_access_fault) ! mem_access_fault(&tf, type, pc, sfsr) | | 2946 | call _C_LABEL(text_access_fault) ! mem_access_fault(&tf, type, pc, sfsr) |
2947 | add %sp, CC64FSZ + STKB, %o0 ! (argument: &tf) | | 2947 | add %sp, CC64FSZ + STKB, %o0 ! (argument: &tf) |
2948 | text_recover: | | 2948 | text_recover: |
2949 | CHKPT(%o1,%o2,2) | | 2949 | CHKPT(%o1,%o2,2) |
2950 | wrpr %g0, PSTATE_KERN, %pstate ! disable interrupts | | 2950 | wrpr %g0, PSTATE_KERN, %pstate ! disable interrupts |
2951 | b return_from_trap ! go return | | 2951 | b return_from_trap ! go return |
2952 | ldx [%sp + CC64FSZ + STKB + TF_TSTATE], %g1 ! Load this for return_from_trap | | 2952 | ldx [%sp + CC64FSZ + STKB + TF_TSTATE], %g1 ! Load this for return_from_trap |
2953 | NOTREACHED | | 2953 | NOTREACHED |
2954 | | | 2954 | |
2955 | text_error: | | 2955 | text_error: |
2956 | wrpr %g0, PSTATE_INTR, %pstate ! reenable interrupts | | 2956 | wrpr %g0, PSTATE_INTR, %pstate ! reenable interrupts |
2957 | call _C_LABEL(text_access_error) ! mem_access_fault(&tfm type, sfva [pc], sfsr, | | 2957 | call _C_LABEL(text_access_error) ! mem_access_fault(&tfm type, sfva [pc], sfsr, |
2958 | ! afva, afsr); | | 2958 | ! afva, afsr); |
2959 | add %sp, CC64FSZ + STKB, %o0 ! (argument: &tf) | | 2959 | add %sp, CC64FSZ + STKB, %o0 ! (argument: &tf) |
2960 | ba text_recover | | 2960 | ba text_recover |
2961 | nop | | 2961 | nop |
2962 | NOTREACHED | | 2962 | NOTREACHED |
2963 | | | 2963 | |
2964 | /* | | 2964 | /* |
2965 | * We're here because we took an alignment fault in NUCLEUS context. | | 2965 | * We're here because we took an alignment fault in NUCLEUS context. |
2966 | * This could be a kernel bug or it could be due to saving a user | | 2966 | * This could be a kernel bug or it could be due to saving a user |
2967 | * window to an invalid stack pointer. | | 2967 | * window to an invalid stack pointer. |
2968 | * | | 2968 | * |
2969 | * If the latter is the case, we could try to emulate unaligned accesses, | | 2969 | * If the latter is the case, we could try to emulate unaligned accesses, |
2970 | * but we really don't know where to store the registers since we can't | | 2970 | * but we really don't know where to store the registers since we can't |
2971 | * determine if there's a stack bias. Or we could store all the regs | | 2971 | * determine if there's a stack bias. Or we could store all the regs |
2972 | * into the PCB and punt, until the user program uses up all the CPU's | | 2972 | * into the PCB and punt, until the user program uses up all the CPU's |
2973 | * register windows and we run out of places to store them. So for | | 2973 | * register windows and we run out of places to store them. So for |
2974 | * simplicity we'll just blow them away and enter the trap code which | | 2974 | * simplicity we'll just blow them away and enter the trap code which |
2975 | * will generate a bus error. Debugging the problem will be a bit | | 2975 | * will generate a bus error. Debugging the problem will be a bit |
2976 | * complicated since lots of register windows will be lost, but what | | 2976 | * complicated since lots of register windows will be lost, but what |
2977 | * can we do? | | 2977 | * can we do? |
2978 | */ | | 2978 | */ |
2979 | checkalign: | | 2979 | checkalign: |
2980 | rdpr %tl, %g2 | | 2980 | rdpr %tl, %g2 |
2981 | subcc %g2, 1, %g1 | | 2981 | subcc %g2, 1, %g1 |
2982 | bneg,pn %icc, slowtrap ! Huh? | | 2982 | bneg,pn %icc, slowtrap ! Huh? |
2983 | sethi %hi(CPCB), %g6 ! get current pcb | | 2983 | sethi %hi(CPCB), %g6 ! get current pcb |
2984 | | | 2984 | |
2985 | wrpr %g1, 0, %tl | | 2985 | wrpr %g1, 0, %tl |
2986 | rdpr %tt, %g7 | | 2986 | rdpr %tt, %g7 |
2987 | rdpr %tstate, %g4 | | 2987 | rdpr %tstate, %g4 |
2988 | andn %g7, 0x3f, %g5 | | 2988 | andn %g7, 0x3f, %g5 |
2989 | cmp %g5, 0x080 ! window spill traps are all 0b 0000 10xx xxxx | | 2989 | cmp %g5, 0x080 ! window spill traps are all 0b 0000 10xx xxxx |
2990 | bne,a,pn %icc, slowtrap | | 2990 | bne,a,pn %icc, slowtrap |
2991 | wrpr %g1, 0, %tl ! Revert TL XXX wrpr in a delay slot... | | 2991 | wrpr %g1, 0, %tl ! Revert TL XXX wrpr in a delay slot... |
2992 | | | 2992 | |
2993 | #ifdef DEBUG | | 2993 | #ifdef DEBUG |
2994 | cmp %g7, 0x34 ! If we took a datafault just before this trap | | 2994 | cmp %g7, 0x34 ! If we took a datafault just before this trap |
2995 | bne,pt %icc, checkalignspill ! our stack's probably bad so we need to switch somewhere else | | 2995 | bne,pt %icc, checkalignspill ! our stack's probably bad so we need to switch somewhere else |
2996 | nop | | 2996 | nop |
2997 | | | 2997 | |
2998 | !! | | 2998 | !! |
2999 | !! Double data fault -- bad stack? | | 2999 | !! Double data fault -- bad stack? |
3000 | !! | | 3000 | !! |
3001 | wrpr %g2, %tl ! Restore trap level. | | 3001 | wrpr %g2, %tl ! Restore trap level. |
3002 | sir ! Just issue a reset and don't try to recover. | | 3002 | sir ! Just issue a reset and don't try to recover. |
3003 | mov %fp, %l6 ! Save the frame pointer | | 3003 | mov %fp, %l6 ! Save the frame pointer |
3004 | set EINTSTACK+USPACE+CC64FSZ-STKB, %fp ! Set the frame pointer to the middle of the idle stack | | 3004 | set EINTSTACK+USPACE+CC64FSZ-STKB, %fp ! Set the frame pointer to the middle of the idle stack |
3005 | add %fp, -CC64FSZ, %sp ! Create a stackframe | | 3005 | add %fp, -CC64FSZ, %sp ! Create a stackframe |
3006 | wrpr %g0, 15, %pil ! Disable interrupts, too | | 3006 | wrpr %g0, 15, %pil ! Disable interrupts, too |
3007 | wrpr %g0, %g0, %canrestore ! Our stack is hozed and our PCB | | 3007 | wrpr %g0, %g0, %canrestore ! Our stack is hozed and our PCB |
3008 | wrpr %g0, 7, %cansave ! probably is too, so blow away | | 3008 | wrpr %g0, 7, %cansave ! probably is too, so blow away |
3009 | ba slowtrap ! all our register windows. | | 3009 | ba slowtrap ! all our register windows. |
3010 | wrpr %g0, 0x101, %tt | | 3010 | wrpr %g0, 0x101, %tt |
3011 | #endif | | 3011 | #endif |
3012 | checkalignspill: | | 3012 | checkalignspill: |
3013 | /* | | 3013 | /* |
3014 | * %g1 -- current tl | | 3014 | * %g1 -- current tl |
3015 | * %g2 -- original tl | | 3015 | * %g2 -- original tl |
3016 | * %g4 -- tstate | | 3016 | * %g4 -- tstate |
3017 | * %g7 -- tt | | 3017 | * %g7 -- tt |
3018 | */ | | 3018 | */ |
3019 | | | 3019 | |
3020 | and %g4, CWP, %g5 | | 3020 | and %g4, CWP, %g5 |
3021 | wrpr %g5, %cwp ! Go back to the original register win | | 3021 | wrpr %g5, %cwp ! Go back to the original register win |
3022 | | | 3022 | |
3023 | /* | | 3023 | /* |
3024 | * Remember: | | 3024 | * Remember: |
3025 | * | | 3025 | * |
3026 | * %otherwin = 0 | | 3026 | * %otherwin = 0 |
3027 | * %cansave = NWINDOWS - 2 - %canrestore | | 3027 | * %cansave = NWINDOWS - 2 - %canrestore |
3028 | */ | | 3028 | */ |
3029 | | | 3029 | |
3030 | rdpr %otherwin, %g6 | | 3030 | rdpr %otherwin, %g6 |
3031 | rdpr %canrestore, %g3 | | 3031 | rdpr %canrestore, %g3 |
3032 | rdpr %ver, %g5 | | 3032 | rdpr %ver, %g5 |
3033 | sub %g3, %g6, %g3 ! Calculate %canrestore - %g7 | | 3033 | sub %g3, %g6, %g3 ! Calculate %canrestore - %g7 |
3034 | and %g5, CWP, %g5 ! NWINDOWS-1 | | 3034 | and %g5, CWP, %g5 ! NWINDOWS-1 |
3035 | movrlz %g3, %g0, %g3 ! Clamp at zero | | 3035 | movrlz %g3, %g0, %g3 ! Clamp at zero |
3036 | wrpr %g0, 0, %otherwin | | 3036 | wrpr %g0, 0, %otherwin |
3037 | wrpr %g3, 0, %canrestore ! This is the new canrestore | | 3037 | wrpr %g3, 0, %canrestore ! This is the new canrestore |
3038 | dec %g5 ! NWINDOWS-2 | | 3038 | dec %g5 ! NWINDOWS-2 |
3039 | wrpr %g5, 0, %cleanwin ! Set cleanwin to max, since we're in-kernel | | 3039 | wrpr %g5, 0, %cleanwin ! Set cleanwin to max, since we're in-kernel |
3040 | sub %g5, %g3, %g5 ! NWINDOWS-2-%canrestore | | 3040 | sub %g5, %g3, %g5 ! NWINDOWS-2-%canrestore |
3041 | wrpr %g5, 0, %cansave | | 3041 | wrpr %g5, 0, %cansave |
3042 | | | 3042 | |
3043 | wrpr %g0, T_ALIGN, %tt ! This was an alignment fault | | 3043 | wrpr %g0, T_ALIGN, %tt ! This was an alignment fault |
3044 | /* | | 3044 | /* |
3045 | * Now we need to determine if this was a userland store or not. | | 3045 | * Now we need to determine if this was a userland store or not. |
3046 | * Userland stores occur in anything other than the kernel spill | | 3046 | * Userland stores occur in anything other than the kernel spill |
3047 | * handlers (trap type 09x). | | 3047 | * handlers (trap type 09x). |
3048 | */ | | 3048 | */ |
3049 | and %g7, 0xff0, %g5 | | 3049 | and %g7, 0xff0, %g5 |
3050 | cmp %g5, 0x90 | | 3050 | cmp %g5, 0x90 |
3051 | bz,pn %icc, slowtrap | | 3051 | bz,pn %icc, slowtrap |
3052 | nop | | 3052 | nop |
3053 | bclr TSTATE_PRIV, %g4 | | 3053 | bclr TSTATE_PRIV, %g4 |
3054 | wrpr %g4, 0, %tstate | | 3054 | wrpr %g4, 0, %tstate |
3055 | ba,a,pt %icc, slowtrap | | 3055 | ba,a,pt %icc, slowtrap |
3056 | nop | | 3056 | nop |
3057 | | | 3057 | |
3058 | /* | | 3058 | /* |
3059 | * slowtrap() builds a trap frame and calls trap(). | | 3059 | * slowtrap() builds a trap frame and calls trap(). |
3060 | * This is called `slowtrap' because it *is*.... | | 3060 | * This is called `slowtrap' because it *is*.... |
3061 | * We have to build a full frame for ptrace(), for instance. | | 3061 | * We have to build a full frame for ptrace(), for instance. |
3062 | * | | 3062 | * |
3063 | * Registers: | | 3063 | * Registers: |
3064 | * | | 3064 | * |
3065 | */ | | 3065 | */ |
3066 | slowtrap: | | 3066 | slowtrap: |
3067 | #ifdef TRAPS_USE_IG | | 3067 | #ifdef TRAPS_USE_IG |
3068 | wrpr %g0, PSTATE_KERN|PSTATE_IG, %pstate ! DEBUG | | 3068 | wrpr %g0, PSTATE_KERN|PSTATE_IG, %pstate ! DEBUG |
3069 | #endif | | 3069 | #endif |
3070 | #ifdef DIAGNOSTIC | | 3070 | #ifdef DIAGNOSTIC |
3071 | /* Make sure kernel stack is aligned */ | | 3071 | /* Make sure kernel stack is aligned */ |
3072 | btst 0x03, %sp ! 32-bit stack OK? | | 3072 | btst 0x03, %sp ! 32-bit stack OK? |
3073 | and %sp, 0x07, %g4 ! 64-bit stack OK? | | 3073 | and %sp, 0x07, %g4 ! 64-bit stack OK? |
3074 | bz,pt %icc, 1f | | 3074 | bz,pt %icc, 1f |
3075 | cmp %g4, 0x1 ! Must end in 0b001 | | 3075 | cmp %g4, 0x1 ! Must end in 0b001 |
3076 | be,pt %icc, 1f | | 3076 | be,pt %icc, 1f |
3077 | rdpr %wstate, %g7 | | 3077 | rdpr %wstate, %g7 |
3078 | cmp %g7, WSTATE_KERN | | 3078 | cmp %g7, WSTATE_KERN |
3079 | bnz,pt %icc, 1f ! User stack -- we'll blow it away | | 3079 | bnz,pt %icc, 1f ! User stack -- we'll blow it away |
3080 | nop | | 3080 | nop |
3081 | sethi %hi(PANICSTACK), %sp | | 3081 | sethi %hi(PANICSTACK), %sp |
3082 | LDPTR [%sp + %lo(PANICSTACK)], %sp | | 3082 | LDPTR [%sp + %lo(PANICSTACK)], %sp |
3083 | add %sp, -CC64FSZ-STKB, %sp | | 3083 | add %sp, -CC64FSZ-STKB, %sp |
3084 | 1: | | 3084 | 1: |
3085 | #endif | | 3085 | #endif |
3086 | rdpr %tt, %g4 | | 3086 | rdpr %tt, %g4 |
3087 | rdpr %tstate, %g1 | | 3087 | rdpr %tstate, %g1 |
3088 | rdpr %tpc, %g2 | | 3088 | rdpr %tpc, %g2 |
3089 | rdpr %tnpc, %g3 | | 3089 | rdpr %tnpc, %g3 |
3090 | | | 3090 | |
3091 | TRAP_SETUP(-CC64FSZ-TF_SIZE) | | 3091 | TRAP_SETUP(-CC64FSZ-TF_SIZE) |
3092 | Lslowtrap_reenter: | | 3092 | Lslowtrap_reenter: |
3093 | stx %g1, [%sp + CC64FSZ + STKB + TF_TSTATE] | | 3093 | stx %g1, [%sp + CC64FSZ + STKB + TF_TSTATE] |
3094 | mov %g4, %o1 ! (type) | | 3094 | mov %g4, %o1 ! (type) |
3095 | stx %g2, [%sp + CC64FSZ + STKB + TF_PC] | | 3095 | stx %g2, [%sp + CC64FSZ + STKB + TF_PC] |
3096 | rd %y, %g5 | | 3096 | rd %y, %g5 |
3097 | stx %g3, [%sp + CC64FSZ + STKB + TF_NPC] | | 3097 | stx %g3, [%sp + CC64FSZ + STKB + TF_NPC] |
3098 | mov %g1, %o3 ! (pstate) | | 3098 | mov %g1, %o3 ! (pstate) |
3099 | st %g5, [%sp + CC64FSZ + STKB + TF_Y] | | 3099 | st %g5, [%sp + CC64FSZ + STKB + TF_Y] |
3100 | mov %g2, %o2 ! (pc) | | 3100 | mov %g2, %o2 ! (pc) |
3101 | sth %o1, [%sp + CC64FSZ + STKB + TF_TT]! debug | | 3101 | sth %o1, [%sp + CC64FSZ + STKB + TF_TT]! debug |
3102 | | | 3102 | |
3103 | wrpr %g0, PSTATE_KERN, %pstate ! Get back to normal globals | | 3103 | wrpr %g0, PSTATE_KERN, %pstate ! Get back to normal globals |
3104 | stx %g1, [%sp + CC64FSZ + STKB + TF_G + (1*8)] | | 3104 | stx %g1, [%sp + CC64FSZ + STKB + TF_G + (1*8)] |
3105 | stx %g2, [%sp + CC64FSZ + STKB + TF_G + (2*8)] | | 3105 | stx %g2, [%sp + CC64FSZ + STKB + TF_G + (2*8)] |
3106 | add %sp, CC64FSZ + STKB, %o0 ! (&tf) | | 3106 | add %sp, CC64FSZ + STKB, %o0 ! (&tf) |
3107 | stx %g3, [%sp + CC64FSZ + STKB + TF_G + (3*8)] | | 3107 | stx %g3, [%sp + CC64FSZ + STKB + TF_G + (3*8)] |
3108 | stx %g4, [%sp + CC64FSZ + STKB + TF_G + (4*8)] | | 3108 | stx %g4, [%sp + CC64FSZ + STKB + TF_G + (4*8)] |
3109 | stx %g5, [%sp + CC64FSZ + STKB + TF_G + (5*8)] | | 3109 | stx %g5, [%sp + CC64FSZ + STKB + TF_G + (5*8)] |
3110 | rdpr %pil, %g5 | | 3110 | rdpr %pil, %g5 |
3111 | stx %g6, [%sp + CC64FSZ + STKB + TF_G + (6*8)] | | 3111 | stx %g6, [%sp + CC64FSZ + STKB + TF_G + (6*8)] |
3112 | stx %g7, [%sp + CC64FSZ + STKB + TF_G + (7*8)] | | 3112 | stx %g7, [%sp + CC64FSZ + STKB + TF_G + (7*8)] |
3113 | stb %g5, [%sp + CC64FSZ + STKB + TF_PIL] | | 3113 | stb %g5, [%sp + CC64FSZ + STKB + TF_PIL] |
3114 | stb %g5, [%sp + CC64FSZ + STKB + TF_OLDPIL] | | 3114 | stb %g5, [%sp + CC64FSZ + STKB + TF_OLDPIL] |
3115 | /* | | 3115 | /* |
3116 | * Phew, ready to enable traps and call C code. | | 3116 | * Phew, ready to enable traps and call C code. |
3117 | */ | | 3117 | */ |
3118 | rdpr %tl, %g1 | | 3118 | rdpr %tl, %g1 |
3119 | dec %g1 | | 3119 | dec %g1 |
3120 | movrlz %g1, %g0, %g1 | | 3120 | movrlz %g1, %g0, %g1 |
3121 | CHKPT(%g2,%g3,0x24) | | 3121 | CHKPT(%g2,%g3,0x24) |
3122 | wrpr %g0, %g1, %tl ! Revert to kernel mode | | 3122 | wrpr %g0, %g1, %tl ! Revert to kernel mode |
3123 | !! In the EMBEDANY memory model %g4 points to the start of the data segment. | | 3123 | !! In the EMBEDANY memory model %g4 points to the start of the data segment. |
3124 | !! In our case we need to clear it before calling any C-code | | 3124 | !! In our case we need to clear it before calling any C-code |
3125 | clr %g4 | | 3125 | clr %g4 |
3126 | | | 3126 | |
3127 | wr %g0, ASI_PRIMARY_NOFAULT, %asi ! Restore default ASI | | 3127 | wr %g0, ASI_PRIMARY_NOFAULT, %asi ! Restore default ASI |
3128 | wrpr %g0, PSTATE_INTR, %pstate ! traps on again | | 3128 | wrpr %g0, PSTATE_INTR, %pstate ! traps on again |
3129 | call _C_LABEL(trap) ! trap(tf, type, pc, pstate) | | 3129 | call _C_LABEL(trap) ! trap(tf, type, pc, pstate) |
3130 | nop | | 3130 | nop |
3131 | | | 3131 | |
3132 | CHKPT(%o1,%o2,3) | | 3132 | CHKPT(%o1,%o2,3) |
3133 | ba,a,pt %icc, return_from_trap | | 3133 | ba,a,pt %icc, return_from_trap |
3134 | nop | | 3134 | nop |
3135 | NOTREACHED | | 3135 | NOTREACHED |
3136 | #if 1 | | 3136 | #if 1 |
3137 | /* | | 3137 | /* |
3138 | * This code is no longer needed. | | 3138 | * This code is no longer needed. |
3139 | */ | | 3139 | */ |
3140 | /* | | 3140 | /* |
3141 | * Do a `software' trap by re-entering the trap code, possibly first | | 3141 | * Do a `software' trap by re-entering the trap code, possibly first |
3142 | * switching from interrupt stack to kernel stack. This is used for | | 3142 | * switching from interrupt stack to kernel stack. This is used for |
3143 | * scheduling and signal ASTs (which generally occur from softclock or | | 3143 | * scheduling and signal ASTs (which generally occur from softclock or |
3144 | * tty or net interrupts). | | 3144 | * tty or net interrupts). |
3145 | * | | 3145 | * |
3146 | * We enter with the trap type in %g1. All we have to do is jump to | | 3146 | * We enter with the trap type in %g1. All we have to do is jump to |
3147 | * Lslowtrap_reenter above, but maybe after switching stacks.... | | 3147 | * Lslowtrap_reenter above, but maybe after switching stacks.... |
3148 | * | | 3148 | * |
3149 | * We should be running alternate globals. The normal globals and | | 3149 | * We should be running alternate globals. The normal globals and |
3150 | * out registers were just loaded from the old trap frame. | | 3150 | * out registers were just loaded from the old trap frame. |
3151 | * | | 3151 | * |
3152 | * Input Params: | | 3152 | * Input Params: |
3153 | * %g1 = tstate | | 3153 | * %g1 = tstate |
3154 | * %g2 = tpc | | 3154 | * %g2 = tpc |
3155 | * %g3 = tnpc | | 3155 | * %g3 = tnpc |
3156 | * %g4 = tt == T_AST | | 3156 | * %g4 = tt == T_AST |
3157 | */ | | 3157 | */ |
3158 | softtrap: | | 3158 | softtrap: |
3159 | sethi %hi(EINTSTACK-STKB), %g5 | | 3159 | sethi %hi(EINTSTACK-STKB), %g5 |
3160 | sethi %hi(EINTSTACK-INTSTACK), %g7 | | 3160 | sethi %hi(EINTSTACK-INTSTACK), %g7 |
3161 | or %g5, %lo(EINTSTACK-STKB), %g5 | | 3161 | or %g5, %lo(EINTSTACK-STKB), %g5 |
3162 | dec %g7 | | 3162 | dec %g7 |
3163 | sub %g5, %sp, %g5 | | 3163 | sub %g5, %sp, %g5 |
3164 | sethi %hi(CPCB), %g6 | | 3164 | sethi %hi(CPCB), %g6 |
3165 | andncc %g5, %g7, %g0 | | 3165 | andncc %g5, %g7, %g0 |
3166 | bnz,pt %xcc, Lslowtrap_reenter | | 3166 | bnz,pt %xcc, Lslowtrap_reenter |
3167 | LDPTR [%g6 + %lo(CPCB)], %g7 | | 3167 | LDPTR [%g6 + %lo(CPCB)], %g7 |
3168 | set USPACE-CC64FSZ-TF_SIZE-STKB, %g5 | | 3168 | set USPACE-CC64FSZ-TF_SIZE-STKB, %g5 |
3169 | add %g7, %g5, %g6 | | 3169 | add %g7, %g5, %g6 |
3170 | SET_SP_REDZONE(%g7, %g5) | | 3170 | SET_SP_REDZONE(%g7, %g5) |
3171 | #ifdef DEBUG | | 3171 | #ifdef DEBUG |
3172 | stx %g1, [%g6 + CC64FSZ + STKB + TF_FAULT] ! Generate a new trapframe | | 3172 | stx %g1, [%g6 + CC64FSZ + STKB + TF_FAULT] ! Generate a new trapframe |
3173 | #endif | | 3173 | #endif |
3174 | stx %i0, [%g6 + CC64FSZ + STKB + TF_O + (0*8)] ! but don't bother with | | 3174 | stx %i0, [%g6 + CC64FSZ + STKB + TF_O + (0*8)] ! but don't bother with |
3175 | stx %i1, [%g6 + CC64FSZ + STKB + TF_O + (1*8)] ! locals and ins | | 3175 | stx %i1, [%g6 + CC64FSZ + STKB + TF_O + (1*8)] ! locals and ins |
3176 | stx %i2, [%g6 + CC64FSZ + STKB + TF_O + (2*8)] | | 3176 | stx %i2, [%g6 + CC64FSZ + STKB + TF_O + (2*8)] |
3177 | stx %i3, [%g6 + CC64FSZ + STKB + TF_O + (3*8)] | | 3177 | stx %i3, [%g6 + CC64FSZ + STKB + TF_O + (3*8)] |
3178 | stx %i4, [%g6 + CC64FSZ + STKB + TF_O + (4*8)] | | 3178 | stx %i4, [%g6 + CC64FSZ + STKB + TF_O + (4*8)] |
3179 | stx %i5, [%g6 + CC64FSZ + STKB + TF_O + (5*8)] | | 3179 | stx %i5, [%g6 + CC64FSZ + STKB + TF_O + (5*8)] |
3180 | stx %i6, [%g6 + CC64FSZ + STKB + TF_O + (6*8)] | | 3180 | stx %i6, [%g6 + CC64FSZ + STKB + TF_O + (6*8)] |
3181 | stx %i7, [%g6 + CC64FSZ + STKB + TF_O + (7*8)] | | 3181 | stx %i7, [%g6 + CC64FSZ + STKB + TF_O + (7*8)] |
3182 | #ifdef DEBUG | | 3182 | #ifdef DEBUG |
3183 | ldx [%sp + CC64FSZ + STKB + TF_I + (0*8)], %l0 ! Copy over the rest of the regs | | 3183 | ldx [%sp + CC64FSZ + STKB + TF_I + (0*8)], %l0 ! Copy over the rest of the regs |
3184 | ldx [%sp + CC64FSZ + STKB + TF_I + (1*8)], %l1 ! But just dirty the locals | | 3184 | ldx [%sp + CC64FSZ + STKB + TF_I + (1*8)], %l1 ! But just dirty the locals |
3185 | ldx [%sp + CC64FSZ + STKB + TF_I + (2*8)], %l2 | | 3185 | ldx [%sp + CC64FSZ + STKB + TF_I + (2*8)], %l2 |
3186 | ldx [%sp + CC64FSZ + STKB + TF_I + (3*8)], %l3 | | 3186 | ldx [%sp + CC64FSZ + STKB + TF_I + (3*8)], %l3 |
3187 | ldx [%sp + CC64FSZ + STKB + TF_I + (4*8)], %l4 | | 3187 | ldx [%sp + CC64FSZ + STKB + TF_I + (4*8)], %l4 |
3188 | ldx [%sp + CC64FSZ + STKB + TF_I + (5*8)], %l5 | | 3188 | ldx [%sp + CC64FSZ + STKB + TF_I + (5*8)], %l5 |
3189 | ldx [%sp + CC64FSZ + STKB + TF_I + (6*8)], %l6 | | 3189 | ldx [%sp + CC64FSZ + STKB + TF_I + (6*8)], %l6 |
3190 | ldx [%sp + CC64FSZ + STKB + TF_I + (7*8)], %l7 | | 3190 | ldx [%sp + CC64FSZ + STKB + TF_I + (7*8)], %l7 |
3191 | stx %l0, [%g6 + CC64FSZ + STKB + TF_I + (0*8)] | | 3191 | stx %l0, [%g6 + CC64FSZ + STKB + TF_I + (0*8)] |
3192 | stx %l1, [%g6 + CC64FSZ + STKB + TF_I + (1*8)] | | 3192 | stx %l1, [%g6 + CC64FSZ + STKB + TF_I + (1*8)] |
3193 | stx %l2, [%g6 + CC64FSZ + STKB + TF_I + (2*8)] | | 3193 | stx %l2, [%g6 + CC64FSZ + STKB + TF_I + (2*8)] |
3194 | stx %l3, [%g6 + CC64FSZ + STKB + TF_I + (3*8)] | | 3194 | stx %l3, [%g6 + CC64FSZ + STKB + TF_I + (3*8)] |
3195 | stx %l4, [%g6 + CC64FSZ + STKB + TF_I + (4*8)] | | 3195 | stx %l4, [%g6 + CC64FSZ + STKB + TF_I + (4*8)] |
3196 | stx %l5, [%g6 + CC64FSZ + STKB + TF_I + (5*8)] | | 3196 | stx %l5, [%g6 + CC64FSZ + STKB + TF_I + (5*8)] |
3197 | stx %l6, [%g6 + CC64FSZ + STKB + TF_I + (6*8)] | | 3197 | stx %l6, [%g6 + CC64FSZ + STKB + TF_I + (6*8)] |
3198 | stx %l7, [%g6 + CC64FSZ + STKB + TF_I + (7*8)] | | 3198 | stx %l7, [%g6 + CC64FSZ + STKB + TF_I + (7*8)] |
3199 | ldx [%sp + CC64FSZ + STKB + TF_L + (0*8)], %l0 | | 3199 | ldx [%sp + CC64FSZ + STKB + TF_L + (0*8)], %l0 |
3200 | ldx [%sp + CC64FSZ + STKB + TF_L + (1*8)], %l1 | | 3200 | ldx [%sp + CC64FSZ + STKB + TF_L + (1*8)], %l1 |
3201 | ldx [%sp + CC64FSZ + STKB + TF_L + (2*8)], %l2 | | 3201 | ldx [%sp + CC64FSZ + STKB + TF_L + (2*8)], %l2 |
3202 | ldx [%sp + CC64FSZ + STKB + TF_L + (3*8)], %l3 | | 3202 | ldx [%sp + CC64FSZ + STKB + TF_L + (3*8)], %l3 |
3203 | ldx [%sp + CC64FSZ + STKB + TF_L + (4*8)], %l4 | | 3203 | ldx [%sp + CC64FSZ + STKB + TF_L + (4*8)], %l4 |
3204 | ldx [%sp + CC64FSZ + STKB + TF_L + (5*8)], %l5 | | 3204 | ldx [%sp + CC64FSZ + STKB + TF_L + (5*8)], %l5 |
3205 | ldx [%sp + CC64FSZ + STKB + TF_L + (6*8)], %l6 | | 3205 | ldx [%sp + CC64FSZ + STKB + TF_L + (6*8)], %l6 |
3206 | ldx [%sp + CC64FSZ + STKB + TF_L + (7*8)], %l7 | | 3206 | ldx [%sp + CC64FSZ + STKB + TF_L + (7*8)], %l7 |
3207 | stx %l0, [%g6 + CC64FSZ + STKB + TF_L + (0*8)] | | 3207 | stx %l0, [%g6 + CC64FSZ + STKB + TF_L + (0*8)] |
3208 | stx %l1, [%g6 + CC64FSZ + STKB + TF_L + (1*8)] | | 3208 | stx %l1, [%g6 + CC64FSZ + STKB + TF_L + (1*8)] |
3209 | stx %l2, [%g6 + CC64FSZ + STKB + TF_L + (2*8)] | | 3209 | stx %l2, [%g6 + CC64FSZ + STKB + TF_L + (2*8)] |
3210 | stx %l3, [%g6 + CC64FSZ + STKB + TF_L + (3*8)] | | 3210 | stx %l3, [%g6 + CC64FSZ + STKB + TF_L + (3*8)] |
3211 | stx %l4, [%g6 + CC64FSZ + STKB + TF_L + (4*8)] | | 3211 | stx %l4, [%g6 + CC64FSZ + STKB + TF_L + (4*8)] |
3212 | stx %l5, [%g6 + CC64FSZ + STKB + TF_L + (5*8)] | | 3212 | stx %l5, [%g6 + CC64FSZ + STKB + TF_L + (5*8)] |
3213 | stx %l6, [%g6 + CC64FSZ + STKB + TF_L + (6*8)] | | 3213 | stx %l6, [%g6 + CC64FSZ + STKB + TF_L + (6*8)] |
3214 | stx %l7, [%g6 + CC64FSZ + STKB + TF_L + (7*8)] | | 3214 | stx %l7, [%g6 + CC64FSZ + STKB + TF_L + (7*8)] |
3215 | #endif | | 3215 | #endif |
3216 | ba,pt %xcc, Lslowtrap_reenter | | 3216 | ba,pt %xcc, Lslowtrap_reenter |
3217 | mov %g6, %sp | | 3217 | mov %g6, %sp |
3218 | #endif | | 3218 | #endif |
3219 | | | 3219 | |
3220 | #if 0 | | 3220 | #if 0 |
3221 | /* | | 3221 | /* |
3222 | * breakpoint: capture as much info as possible and then call DDB | | 3222 | * breakpoint: capture as much info as possible and then call DDB |
3223 | * or trap, as the case may be. | | 3223 | * or trap, as the case may be. |
3224 | * | | 3224 | * |
3225 | * First, we switch to interrupt globals, and blow away %g7. Then | | 3225 | * First, we switch to interrupt globals, and blow away %g7. Then |
3226 | * switch down one stackframe -- just fiddle w/cwp, don't save or | | 3226 | * switch down one stackframe -- just fiddle w/cwp, don't save or |
3227 | * we'll trap. Then slowly save all the globals into our static | | 3227 | * we'll trap. Then slowly save all the globals into our static |
3228 | * register buffer. etc. etc. | | 3228 | * register buffer. etc. etc. |
3229 | */ | | 3229 | */ |
3230 | | | 3230 | |
3231 | breakpoint: | | 3231 | breakpoint: |
3232 | wrpr %g0, PSTATE_KERN|PSTATE_IG, %pstate ! Get IG to use | | 3232 | wrpr %g0, PSTATE_KERN|PSTATE_IG, %pstate ! Get IG to use |
3233 | rdpr %cwp, %g7 | | 3233 | rdpr %cwp, %g7 |
3234 | inc 1, %g7 ! Equivalent of save | | 3234 | inc 1, %g7 ! Equivalent of save |
3235 | wrpr %g7, 0, %cwp ! Now we have some unused locals to fiddle with | | 3235 | wrpr %g7, 0, %cwp ! Now we have some unused locals to fiddle with |
3236 | XXX ddb_regs is now ddb-regp and is a pointer not a symbol. | | 3236 | XXX ddb_regs is now ddb-regp and is a pointer not a symbol. |
3237 | set _C_LABEL(ddb_regs), %l0 | | 3237 | set _C_LABEL(ddb_regs), %l0 |
3238 | stx %g1, [%l0+DBR_IG+(1*8)] ! Save IGs | | 3238 | stx %g1, [%l0+DBR_IG+(1*8)] ! Save IGs |
3239 | stx %g2, [%l0+DBR_IG+(2*8)] | | 3239 | stx %g2, [%l0+DBR_IG+(2*8)] |
3240 | stx %g3, [%l0+DBR_IG+(3*8)] | | 3240 | stx %g3, [%l0+DBR_IG+(3*8)] |
3241 | stx %g4, [%l0+DBR_IG+(4*8)] | | 3241 | stx %g4, [%l0+DBR_IG+(4*8)] |
3242 | stx %g5, [%l0+DBR_IG+(5*8)] | | 3242 | stx %g5, [%l0+DBR_IG+(5*8)] |
3243 | stx %g6, [%l0+DBR_IG+(6*8)] | | 3243 | stx %g6, [%l0+DBR_IG+(6*8)] |
3244 | stx %g7, [%l0+DBR_IG+(7*8)] | | 3244 | stx %g7, [%l0+DBR_IG+(7*8)] |
3245 | wrpr %g0, PSTATE_KERN|PSTATE_MG, %pstate ! Get MG to use | | 3245 | wrpr %g0, PSTATE_KERN|PSTATE_MG, %pstate ! Get MG to use |
3246 | stx %g1, [%l0+DBR_MG+(1*8)] ! Save MGs | | 3246 | stx %g1, [%l0+DBR_MG+(1*8)] ! Save MGs |
3247 | stx %g2, [%l0+DBR_MG+(2*8)] | | 3247 | stx %g2, [%l0+DBR_MG+(2*8)] |
3248 | stx %g3, [%l0+DBR_MG+(3*8)] | | 3248 | stx %g3, [%l0+DBR_MG+(3*8)] |
3249 | stx %g4, [%l0+DBR_MG+(4*8)] | | 3249 | stx %g4, [%l0+DBR_MG+(4*8)] |
3250 | stx %g5, [%l0+DBR_MG+(5*8)] | | 3250 | stx %g5, [%l0+DBR_MG+(5*8)] |
3251 | stx %g6, [%l0+DBR_MG+(6*8)] | | 3251 | stx %g6, [%l0+DBR_MG+(6*8)] |
3252 | stx %g7, [%l0+DBR_MG+(7*8)] | | 3252 | stx %g7, [%l0+DBR_MG+(7*8)] |
3253 | wrpr %g0, PSTATE_KERN|PSTATE_AG, %pstate ! Get AG to use | | 3253 | wrpr %g0, PSTATE_KERN|PSTATE_AG, %pstate ! Get AG to use |
3254 | stx %g1, [%l0+DBR_AG+(1*8)] ! Save AGs | | 3254 | stx %g1, [%l0+DBR_AG+(1*8)] ! Save AGs |
3255 | stx %g2, [%l0+DBR_AG+(2*8)] | | 3255 | stx %g2, [%l0+DBR_AG+(2*8)] |
3256 | stx %g3, [%l0+DBR_AG+(3*8)] | | 3256 | stx %g3, [%l0+DBR_AG+(3*8)] |
3257 | stx %g4, [%l0+DBR_AG+(4*8)] | | 3257 | stx %g4, [%l0+DBR_AG+(4*8)] |
3258 | stx %g5, [%l0+DBR_AG+(5*8)] | | 3258 | stx %g5, [%l0+DBR_AG+(5*8)] |
3259 | stx %g6, [%l0+DBR_AG+(6*8)] | | 3259 | stx %g6, [%l0+DBR_AG+(6*8)] |
3260 | stx %g7, [%l0+DBR_AG+(7*8)] | | 3260 | stx %g7, [%l0+DBR_AG+(7*8)] |
3261 | wrpr %g0, PSTATE_KERN, %pstate ! Get G to use | | 3261 | wrpr %g0, PSTATE_KERN, %pstate ! Get G to use |
3262 | stx %g1, [%l0+DBR_G+(1*8)] ! Save Gs | | 3262 | stx %g1, [%l0+DBR_G+(1*8)] ! Save Gs |
3263 | stx %g2, [%l0+DBR_G+(2*8)] | | 3263 | stx %g2, [%l0+DBR_G+(2*8)] |
3264 | stx %g3, [%l0+DBR_G+(3*8)] | | 3264 | stx %g3, [%l0+DBR_G+(3*8)] |
3265 | stx %g4, [%l0+DBR_G+(4*8)] | | 3265 | stx %g4, [%l0+DBR_G+(4*8)] |
3266 | stx %g5, [%l0+DBR_G+(5*8)] | | 3266 | stx %g5, [%l0+DBR_G+(5*8)] |
3267 | stx %g6, [%l0+DBR_G+(6*8)] | | 3267 | stx %g6, [%l0+DBR_G+(6*8)] |
3268 | stx %g7, [%l0+DBR_G+(7*8)] | | 3268 | stx %g7, [%l0+DBR_G+(7*8)] |
3269 | rdpr %canrestore, %l1 | | 3269 | rdpr %canrestore, %l1 |
3270 | stb %l1, [%l0+DBR_CANRESTORE] | | 3270 | stb %l1, [%l0+DBR_CANRESTORE] |
3271 | rdpr %cansave, %l2 | | 3271 | rdpr %cansave, %l2 |
3272 | stb %l2, [%l0+DBR_CANSAVE] | | 3272 | stb %l2, [%l0+DBR_CANSAVE] |
3273 | rdpr %cleanwin, %l3 | | 3273 | rdpr %cleanwin, %l3 |
3274 | stb %l3, [%l0+DBR_CLEANWIN] | | 3274 | stb %l3, [%l0+DBR_CLEANWIN] |
3275 | rdpr %wstate, %l4 | | 3275 | rdpr %wstate, %l4 |
3276 | stb %l4, [%l0+DBR_WSTATE] | | 3276 | stb %l4, [%l0+DBR_WSTATE] |
3277 | rd %y, %l5 | | 3277 | rd %y, %l5 |
3278 | stw %l5, [%l0+DBR_Y] | | 3278 | stw %l5, [%l0+DBR_Y] |
3279 | rdpr %tl, %l6 | | 3279 | rdpr %tl, %l6 |
3280 | stb %l6, [%l0+DBR_TL] | | 3280 | stb %l6, [%l0+DBR_TL] |
3281 | dec 1, %g7 | | 3281 | dec 1, %g7 |
3282 | #endif | | 3282 | #endif |
3283 | | | 3283 | |
3284 | /* | | 3284 | /* |
3285 | * I will not touch any of the DDB or KGDB stuff until I know what's going | | 3285 | * I will not touch any of the DDB or KGDB stuff until I know what's going |
3286 | * on with the symbol table. This is all still v7/v8 code and needs to be fixed. | | 3286 | * on with the symbol table. This is all still v7/v8 code and needs to be fixed. |
3287 | */ | | 3287 | */ |
3288 | #ifdef KGDB | | 3288 | #ifdef KGDB |
3289 | /* | | 3289 | /* |
3290 | * bpt is entered on all breakpoint traps. | | 3290 | * bpt is entered on all breakpoint traps. |
3291 | * If this is a kernel breakpoint, we do not want to call trap(). | | 3291 | * If this is a kernel breakpoint, we do not want to call trap(). |
3292 | * Among other reasons, this way we can set breakpoints in trap(). | | 3292 | * Among other reasons, this way we can set breakpoints in trap(). |
3293 | */ | | 3293 | */ |
3294 | bpt: | | 3294 | bpt: |
3295 | set TSTATE_PRIV, %l4 | | 3295 | set TSTATE_PRIV, %l4 |
3296 | andcc %l4, %l0, %g0 ! breakpoint from kernel? | | 3296 | andcc %l4, %l0, %g0 ! breakpoint from kernel? |
3297 | bz slowtrap ! no, go do regular trap | | 3297 | bz slowtrap ! no, go do regular trap |
3298 | nop | | 3298 | nop |
3299 | | | 3299 | |
3300 | /* | | 3300 | /* |
3301 | * Build a trap frame for kgdb_trap_glue to copy. | | 3301 | * Build a trap frame for kgdb_trap_glue to copy. |
3302 | * Enable traps but set ipl high so that we will not | | 3302 | * Enable traps but set ipl high so that we will not |
3303 | * see interrupts from within breakpoints. | | 3303 | * see interrupts from within breakpoints. |
3304 | */ | | 3304 | */ |
3305 | save %sp, -CCFSZ-TF_SIZE, %sp ! allocate a trap frame | | 3305 | save %sp, -CCFSZ-TF_SIZE, %sp ! allocate a trap frame |
3306 | TRAP_SETUP(-CCFSZ-TF_SIZE) | | 3306 | TRAP_SETUP(-CCFSZ-TF_SIZE) |
3307 | or %l0, PSR_PIL, %l4 ! splhigh() | | 3307 | or %l0, PSR_PIL, %l4 ! splhigh() |
3308 | wr %l4, 0, %psr ! the manual claims that this | | 3308 | wr %l4, 0, %psr ! the manual claims that this |
3309 | wr %l4, PSR_ET, %psr ! song and dance is necessary | | 3309 | wr %l4, PSR_ET, %psr ! song and dance is necessary |
3310 | std %l0, [%sp + CCFSZ + 0] ! tf.tf_psr, tf.tf_pc | | 3310 | std %l0, [%sp + CCFSZ + 0] ! tf.tf_psr, tf.tf_pc |
3311 | mov %l3, %o0 ! trap type arg for kgdb_trap_glue | | 3311 | mov %l3, %o0 ! trap type arg for kgdb_trap_glue |
3312 | rd %y, %l3 | | 3312 | rd %y, %l3 |
3313 | std %l2, [%sp + CCFSZ + 8] ! tf.tf_npc, tf.tf_y | | 3313 | std %l2, [%sp + CCFSZ + 8] ! tf.tf_npc, tf.tf_y |
3314 | rd %wim, %l3 | | 3314 | rd %wim, %l3 |
3315 | st %l3, [%sp + CCFSZ + 16] ! tf.tf_wim (a kgdb-only r/o field) | | 3315 | st %l3, [%sp + CCFSZ + 16] ! tf.tf_wim (a kgdb-only r/o field) |
3316 | st %g1, [%sp + CCFSZ + 20] ! tf.tf_global[1] | | 3316 | st %g1, [%sp + CCFSZ + 20] ! tf.tf_global[1] |
3317 | std %g2, [%sp + CCFSZ + 24] ! etc | | 3317 | std %g2, [%sp + CCFSZ + 24] ! etc |
3318 | std %g4, [%sp + CCFSZ + 32] | | 3318 | std %g4, [%sp + CCFSZ + 32] |
3319 | std %g6, [%sp + CCFSZ + 40] | | 3319 | std %g6, [%sp + CCFSZ + 40] |
3320 | std %i0, [%sp + CCFSZ + 48] ! tf.tf_in[0..1] | | 3320 | std %i0, [%sp + CCFSZ + 48] ! tf.tf_in[0..1] |
3321 | std %i2, [%sp + CCFSZ + 56] ! etc | | 3321 | std %i2, [%sp + CCFSZ + 56] ! etc |
3322 | std %i4, [%sp + CCFSZ + 64] | | 3322 | std %i4, [%sp + CCFSZ + 64] |
3323 | std %i6, [%sp + CCFSZ + 72] | | 3323 | std %i6, [%sp + CCFSZ + 72] |
3324 | | | 3324 | |
3325 | /* | | 3325 | /* |
3326 | * Now call kgdb_trap_glue(); if it returns, call trap(). | | 3326 | * Now call kgdb_trap_glue(); if it returns, call trap(). |
3327 | */ | | 3327 | */ |
3328 | mov %o0, %l3 ! gotta save trap type | | 3328 | mov %o0, %l3 ! gotta save trap type |
3329 | call _C_LABEL(kgdb_trap_glue) ! kgdb_trap_glue(type, &trapframe) | | 3329 | call _C_LABEL(kgdb_trap_glue) ! kgdb_trap_glue(type, &trapframe) |
3330 | add %sp, CCFSZ, %o1 ! (&trapframe) | | 3330 | add %sp, CCFSZ, %o1 ! (&trapframe) |
3331 | | | 3331 | |
3332 | /* | | 3332 | /* |
3333 | * Use slowtrap to call trap---but first erase our tracks | | 3333 | * Use slowtrap to call trap---but first erase our tracks |
3334 | * (put the registers back the way they were). | | 3334 | * (put the registers back the way they were). |
3335 | */ | | 3335 | */ |
3336 | mov %l3, %o0 ! slowtrap will need trap type | | 3336 | mov %l3, %o0 ! slowtrap will need trap type |
3337 | ld [%sp + CCFSZ + 12], %l3 | | 3337 | ld [%sp + CCFSZ + 12], %l3 |
3338 | wr %l3, 0, %y | | 3338 | wr %l3, 0, %y |
3339 | ld [%sp + CCFSZ + 20], %g1 | | 3339 | ld [%sp + CCFSZ + 20], %g1 |
3340 | ldd [%sp + CCFSZ + 24], %g2 | | 3340 | ldd [%sp + CCFSZ + 24], %g2 |
3341 | ldd [%sp + CCFSZ + 32], %g4 | | 3341 | ldd [%sp + CCFSZ + 32], %g4 |
3342 | b Lslowtrap_reenter | | 3342 | b Lslowtrap_reenter |
3343 | ldd [%sp + CCFSZ + 40], %g6 | | 3343 | ldd [%sp + CCFSZ + 40], %g6 |
3344 | | | 3344 | |
3345 | /* | | 3345 | /* |
3346 | * Enter kernel breakpoint. Write all the windows (not including the | | 3346 | * Enter kernel breakpoint. Write all the windows (not including the |
3347 | * current window) into the stack, so that backtrace works. Copy the | | 3347 | * current window) into the stack, so that backtrace works. Copy the |
3348 | * supplied trap frame to the kgdb stack and switch stacks. | | 3348 | * supplied trap frame to the kgdb stack and switch stacks. |
3349 | * | | 3349 | * |
3350 | * kgdb_trap_glue(type, tf0) | | 3350 | * kgdb_trap_glue(type, tf0) |
3351 | * int type; | | 3351 | * int type; |
3352 | * struct trapframe *tf0; | | 3352 | * struct trapframe *tf0; |
3353 | */ | | 3353 | */ |
3354 | ENTRY_NOPROFILE(kgdb_trap_glue) | | 3354 | ENTRY_NOPROFILE(kgdb_trap_glue) |
3355 | save %sp, -CCFSZ, %sp | | 3355 | save %sp, -CCFSZ, %sp |
3356 | | | 3356 | |
3357 | flushw ! flush all windows | | 3357 | flushw ! flush all windows |
3358 | mov %sp, %l4 ! %l4 = current %sp | | 3358 | mov %sp, %l4 ! %l4 = current %sp |
3359 | | | 3359 | |
3360 | /* copy trapframe to top of kgdb stack */ | | 3360 | /* copy trapframe to top of kgdb stack */ |
3361 | set _C_LABEL(kgdb_stack) + KGDB_STACK_SIZE - 80, %l0 | | 3361 | set _C_LABEL(kgdb_stack) + KGDB_STACK_SIZE - 80, %l0 |
3362 | ! %l0 = tfcopy -> end_of_kgdb_stack | | 3362 | ! %l0 = tfcopy -> end_of_kgdb_stack |
3363 | mov 80, %l1 | | 3363 | mov 80, %l1 |
3364 | 1: ldd [%i1], %l2 | | 3364 | 1: ldd [%i1], %l2 |
3365 | inc 8, %i1 | | 3365 | inc 8, %i1 |
3366 | deccc 8, %l1 | | 3366 | deccc 8, %l1 |
3367 | std %l2, [%l0] | | 3367 | std %l2, [%l0] |
3368 | bg 1b | | 3368 | bg 1b |
3369 | inc 8, %l0 | | 3369 | inc 8, %l0 |
3370 | | | 3370 | |
3371 | #ifdef NOTDEF_DEBUG | | 3371 | #ifdef NOTDEF_DEBUG |
3372 | /* save old red zone and then turn it off */ | | 3372 | /* save old red zone and then turn it off */ |
3373 | sethi %hi(_C_LABEL(redzone)), %l7 | | 3373 | sethi %hi(_C_LABEL(redzone)), %l7 |
3374 | ld [%l7 + %lo(_C_LABEL(redzone))], %l6 | | 3374 | ld [%l7 + %lo(_C_LABEL(redzone))], %l6 |
3375 | st %g0, [%l7 + %lo(_C_LABEL(redzone))] | | 3375 | st %g0, [%l7 + %lo(_C_LABEL(redzone))] |
3376 | #endif | | 3376 | #endif |
3377 | /* switch to kgdb stack */ | | 3377 | /* switch to kgdb stack */ |
3378 | add %l0, -CCFSZ-TF_SIZE, %sp | | 3378 | add %l0, -CCFSZ-TF_SIZE, %sp |
3379 | | | 3379 | |
3380 | /* if (kgdb_trap(type, tfcopy)) kgdb_rett(tfcopy); */ | | 3380 | /* if (kgdb_trap(type, tfcopy)) kgdb_rett(tfcopy); */ |
3381 | mov %i0, %o0 | | 3381 | mov %i0, %o0 |
3382 | call _C_LABEL(kgdb_trap) | | 3382 | call _C_LABEL(kgdb_trap) |
3383 | add %l0, -80, %o1 | | 3383 | add %l0, -80, %o1 |
3384 | tst %o0 | | 3384 | tst %o0 |
3385 | bnz,a kgdb_rett | | 3385 | bnz,a kgdb_rett |
3386 | add %l0, -80, %g1 | | 3386 | add %l0, -80, %g1 |
3387 | | | 3387 | |
3388 | /* | | 3388 | /* |
3389 | * kgdb_trap() did not handle the trap at all so the stack is | | 3389 | * kgdb_trap() did not handle the trap at all so the stack is |
3390 | * still intact. A simple `restore' will put everything back, | | 3390 | * still intact. A simple `restore' will put everything back, |
3391 | * after we reset the stack pointer. | | 3391 | * after we reset the stack pointer. |
3392 | */ | | 3392 | */ |
3393 | mov %l4, %sp | | 3393 | mov %l4, %sp |
3394 | #ifdef NOTDEF_DEBUG | | 3394 | #ifdef NOTDEF_DEBUG |
3395 | st %l6, [%l7 + %lo(_C_LABEL(redzone))] ! restore red zone | | 3395 | st %l6, [%l7 + %lo(_C_LABEL(redzone))] ! restore red zone |
3396 | #endif | | 3396 | #endif |
3397 | ret | | 3397 | ret |
3398 | restore | | 3398 | restore |
3399 | | | 3399 | |
3400 | /* | | 3400 | /* |
3401 | * Return from kgdb trap. This is sort of special. | | 3401 | * Return from kgdb trap. This is sort of special. |
3402 | * | | 3402 | * |
3403 | * We know that kgdb_trap_glue wrote the window above it, so that we will | | 3403 | * We know that kgdb_trap_glue wrote the window above it, so that we will |
3404 | * be able to (and are sure to have to) load it up. We also know that we | | 3404 | * be able to (and are sure to have to) load it up. We also know that we |
3405 | * came from kernel land and can assume that the %fp (%i6) we load here | | 3405 | * came from kernel land and can assume that the %fp (%i6) we load here |
3406 | * is proper. We must also be sure not to lower ipl (it is at splhigh()) | | 3406 | * is proper. We must also be sure not to lower ipl (it is at splhigh()) |
3407 | * until we have traps disabled, due to the SPARC taking traps at the | | 3407 | * until we have traps disabled, due to the SPARC taking traps at the |
3408 | * new ipl before noticing that PSR_ET has been turned off. We are on | | 3408 | * new ipl before noticing that PSR_ET has been turned off. We are on |
3409 | * the kgdb stack, so this could be disastrous. | | 3409 | * the kgdb stack, so this could be disastrous. |
3410 | * | | 3410 | * |
3411 | * Note that the trapframe argument in %g1 points into the current stack | | 3411 | * Note that the trapframe argument in %g1 points into the current stack |
3412 | * frame (current window). We abandon this window when we move %g1->tf_psr | | 3412 | * frame (current window). We abandon this window when we move %g1->tf_psr |
3413 | * into %psr, but we will not have loaded the new %sp yet, so again traps | | 3413 | * into %psr, but we will not have loaded the new %sp yet, so again traps |
3414 | * must be disabled. | | 3414 | * must be disabled. |
3415 | */ | | 3415 | */ |
3416 | kgdb_rett: | | 3416 | kgdb_rett: |
3417 | rd %psr, %g4 ! turn off traps | | 3417 | rd %psr, %g4 ! turn off traps |
3418 | wr %g4, PSR_ET, %psr | | 3418 | wr %g4, PSR_ET, %psr |
3419 | /* use the three-instruction delay to do something useful */ | | 3419 | /* use the three-instruction delay to do something useful */ |
3420 | ld [%g1], %g2 ! pick up new %psr | | 3420 | ld [%g1], %g2 ! pick up new %psr |
3421 | ld [%g1 + 12], %g3 ! set %y | | 3421 | ld [%g1 + 12], %g3 ! set %y |
3422 | wr %g3, 0, %y | | 3422 | wr %g3, 0, %y |
3423 | #ifdef NOTDEF_DEBUG | | 3423 | #ifdef NOTDEF_DEBUG |
3424 | st %l6, [%l7 + %lo(_C_LABEL(redzone))] ! and restore red zone | | 3424 | st %l6, [%l7 + %lo(_C_LABEL(redzone))] ! and restore red zone |
3425 | #endif | | 3425 | #endif |
3426 | wr %g0, 0, %wim ! enable window changes | | 3426 | wr %g0, 0, %wim ! enable window changes |
3427 | nop; nop; nop | | 3427 | nop; nop; nop |
3428 | /* now safe to set the new psr (changes CWP, leaves traps disabled) */ | | 3428 | /* now safe to set the new psr (changes CWP, leaves traps disabled) */ |
3429 | wr %g2, 0, %psr ! set rett psr (including cond codes) | | 3429 | wr %g2, 0, %psr ! set rett psr (including cond codes) |
3430 | /* 3 instruction delay before we can use the new window */ | | 3430 | /* 3 instruction delay before we can use the new window */ |
3431 | /*1*/ ldd [%g1 + 24], %g2 ! set new %g2, %g3 | | 3431 | /*1*/ ldd [%g1 + 24], %g2 ! set new %g2, %g3 |
3432 | /*2*/ ldd [%g1 + 32], %g4 ! set new %g4, %g5 | | 3432 | /*2*/ ldd [%g1 + 32], %g4 ! set new %g4, %g5 |
3433 | /*3*/ ldd [%g1 + 40], %g6 ! set new %g6, %g7 | | 3433 | /*3*/ ldd [%g1 + 40], %g6 ! set new %g6, %g7 |
3434 | | | 3434 | |
3435 | /* now we can use the new window */ | | 3435 | /* now we can use the new window */ |
3436 | mov %g1, %l4 | | 3436 | mov %g1, %l4 |
3437 | ld [%l4 + 4], %l1 ! get new pc | | 3437 | ld [%l4 + 4], %l1 ! get new pc |
3438 | ld [%l4 + 8], %l2 ! get new npc | | 3438 | ld [%l4 + 8], %l2 ! get new npc |
3439 | ld [%l4 + 20], %g1 ! set new %g1 | | 3439 | ld [%l4 + 20], %g1 ! set new %g1 |
3440 | | | 3440 | |
3441 | /* set up returnee's out registers, including its %sp */ | | 3441 | /* set up returnee's out registers, including its %sp */ |
3442 | ldd [%l4 + 48], %i0 | | 3442 | ldd [%l4 + 48], %i0 |
3443 | ldd [%l4 + 56], %i2 | | 3443 | ldd [%l4 + 56], %i2 |
3444 | ldd [%l4 + 64], %i4 | | 3444 | ldd [%l4 + 64], %i4 |
3445 | ldd [%l4 + 72], %i6 | | 3445 | ldd [%l4 + 72], %i6 |
3446 | | | 3446 | |
3447 | /* load returnee's window, making the window above it be invalid */ | | 3447 | /* load returnee's window, making the window above it be invalid */ |
3448 | restore | | 3448 | restore |
3449 | restore %g0, 1, %l1 ! move to inval window and set %l1 = 1 | | 3449 | restore %g0, 1, %l1 ! move to inval window and set %l1 = 1 |
3450 | rd %psr, %l0 | | 3450 | rd %psr, %l0 |
3451 | srl %l1, %l0, %l1 | | 3451 | srl %l1, %l0, %l1 |
3452 | wr %l1, 0, %wim ! %wim = 1 << (%psr & 31) | | 3452 | wr %l1, 0, %wim ! %wim = 1 << (%psr & 31) |
3453 | sethi %hi(CPCB), %l1 | | 3453 | sethi %hi(CPCB), %l1 |
3454 | LDPTR [%l1 + %lo(CPCB)], %l1 | | 3454 | LDPTR [%l1 + %lo(CPCB)], %l1 |
3455 | and %l0, 31, %l0 ! CWP = %psr & 31; | | 3455 | and %l0, 31, %l0 ! CWP = %psr & 31; |
3456 | ! st %l0, [%l1 + PCB_WIM] ! cpcb->pcb_wim = CWP; | | 3456 | ! st %l0, [%l1 + PCB_WIM] ! cpcb->pcb_wim = CWP; |
3457 | save %g0, %g0, %g0 ! back to window to reload | | 3457 | save %g0, %g0, %g0 ! back to window to reload |
3458 | ! LOADWIN(%sp) | | 3458 | ! LOADWIN(%sp) |
3459 | save %g0, %g0, %g0 ! back to trap window | | 3459 | save %g0, %g0, %g0 ! back to trap window |
3460 | /* note, we have not altered condition codes; safe to just rett */ | | 3460 | /* note, we have not altered condition codes; safe to just rett */ |
3461 | RETT | | 3461 | RETT |
3462 | #endif | | 3462 | #endif |
3463 | | | 3463 | |
3464 | /* | | 3464 | /* |
3465 | * syscall_setup() builds a trap frame and calls syscall(). | | 3465 | * syscall_setup() builds a trap frame and calls syscall(). |
3466 | * sun_syscall is same but delivers sun system call number | | 3466 | * sun_syscall is same but delivers sun system call number |
3467 | * XXX should not have to save&reload ALL the registers just for | | 3467 | * XXX should not have to save&reload ALL the registers just for |
3468 | * ptrace... | | 3468 | * ptrace... |
3469 | */ | | 3469 | */ |
3470 | syscall_setup: | | 3470 | syscall_setup: |
3471 | #ifdef TRAPS_USE_IG | | 3471 | #ifdef TRAPS_USE_IG |
3472 | wrpr %g0, PSTATE_KERN|PSTATE_IG, %pstate ! DEBUG | | 3472 | wrpr %g0, PSTATE_KERN|PSTATE_IG, %pstate ! DEBUG |
3473 | #endif | | 3473 | #endif |
3474 | TRAP_SETUP(-CC64FSZ-TF_SIZE) | | 3474 | TRAP_SETUP(-CC64FSZ-TF_SIZE) |
3475 | | | 3475 | |
3476 | #ifdef DEBUG | | 3476 | #ifdef DEBUG |
3477 | rdpr %tt, %o1 ! debug | | 3477 | rdpr %tt, %o1 ! debug |
3478 | sth %o1, [%sp + CC64FSZ + STKB + TF_TT]! debug | | 3478 | sth %o1, [%sp + CC64FSZ + STKB + TF_TT]! debug |
3479 | #endif | | 3479 | #endif |
3480 | | | 3480 | |
3481 | wrpr %g0, PSTATE_KERN, %pstate ! Get back to normal globals | | 3481 | wrpr %g0, PSTATE_KERN, %pstate ! Get back to normal globals |
3482 | stx %g1, [%sp + CC64FSZ + STKB + TF_G + ( 1*8)] | | 3482 | stx %g1, [%sp + CC64FSZ + STKB + TF_G + ( 1*8)] |
3483 | mov %g1, %o1 ! code | | 3483 | mov %g1, %o1 ! code |
3484 | rdpr %tpc, %o2 ! (pc) | | 3484 | rdpr %tpc, %o2 ! (pc) |
3485 | stx %g2, [%sp + CC64FSZ + STKB + TF_G + ( 2*8)] | | 3485 | stx %g2, [%sp + CC64FSZ + STKB + TF_G + ( 2*8)] |
3486 | rdpr %tstate, %g1 | | 3486 | rdpr %tstate, %g1 |
3487 | stx %g3, [%sp + CC64FSZ + STKB + TF_G + ( 3*8)] | | 3487 | stx %g3, [%sp + CC64FSZ + STKB + TF_G + ( 3*8)] |
3488 | rdpr %tnpc, %o3 | | 3488 | rdpr %tnpc, %o3 |
3489 | stx %g4, [%sp + CC64FSZ + STKB + TF_G + ( 4*8)] | | 3489 | stx %g4, [%sp + CC64FSZ + STKB + TF_G + ( 4*8)] |
3490 | rd %y, %o4 | | 3490 | rd %y, %o4 |
3491 | stx %g5, [%sp + CC64FSZ + STKB + TF_G + ( 5*8)] | | 3491 | stx %g5, [%sp + CC64FSZ + STKB + TF_G + ( 5*8)] |
3492 | stx %g6, [%sp + CC64FSZ + STKB + TF_G + ( 6*8)] | | 3492 | stx %g6, [%sp + CC64FSZ + STKB + TF_G + ( 6*8)] |
3493 | CHKPT(%g5,%g6,0x31) | | 3493 | CHKPT(%g5,%g6,0x31) |
3494 | wrpr %g0, 0, %tl ! return to tl=0 | | 3494 | wrpr %g0, 0, %tl ! return to tl=0 |
3495 | stx %g7, [%sp + CC64FSZ + STKB + TF_G + ( 7*8)] | | 3495 | stx %g7, [%sp + CC64FSZ + STKB + TF_G + ( 7*8)] |
3496 | add %sp, CC64FSZ + STKB, %o0 ! (&tf) | | 3496 | add %sp, CC64FSZ + STKB, %o0 ! (&tf) |
3497 | | | 3497 | |
3498 | stx %g1, [%sp + CC64FSZ + STKB + TF_TSTATE] | | 3498 | stx %g1, [%sp + CC64FSZ + STKB + TF_TSTATE] |
3499 | stx %o2, [%sp + CC64FSZ + STKB + TF_PC] | | 3499 | stx %o2, [%sp + CC64FSZ + STKB + TF_PC] |
3500 | stx %o3, [%sp + CC64FSZ + STKB + TF_NPC] | | 3500 | stx %o3, [%sp + CC64FSZ + STKB + TF_NPC] |
3501 | st %o4, [%sp + CC64FSZ + STKB + TF_Y] | | 3501 | st %o4, [%sp + CC64FSZ + STKB + TF_Y] |
3502 | | | 3502 | |
3503 | rdpr %pil, %g5 | | 3503 | rdpr %pil, %g5 |
3504 | stb %g5, [%sp + CC64FSZ + STKB + TF_PIL] | | 3504 | stb %g5, [%sp + CC64FSZ + STKB + TF_PIL] |
3505 | stb %g5, [%sp + CC64FSZ + STKB + TF_OLDPIL] | | 3505 | stb %g5, [%sp + CC64FSZ + STKB + TF_OLDPIL] |
3506 | | | 3506 | |
3507 | !! In the EMBEDANY memory model %g4 points to the start of the data segment. | | 3507 | !! In the EMBEDANY memory model %g4 points to the start of the data segment. |
3508 | !! In our case we need to clear it before calling any C-code | | 3508 | !! In our case we need to clear it before calling any C-code |
3509 | clr %g4 | | 3509 | clr %g4 |
3510 | wr %g0, ASI_PRIMARY_NOFAULT, %asi ! Restore default ASI | | 3510 | wr %g0, ASI_PRIMARY_NOFAULT, %asi ! Restore default ASI |
3511 | | | 3511 | |
3512 | sethi %hi(CURLWP), %l1 | | 3512 | sethi %hi(CURLWP), %l1 |
3513 | LDPTR [%l1 + %lo(CURLWP)], %l1 | | 3513 | LDPTR [%l1 + %lo(CURLWP)], %l1 |
3514 | LDPTR [%l1 + L_PROC], %l1 ! now %l1 points to p | | 3514 | LDPTR [%l1 + L_PROC], %l1 ! now %l1 points to p |
3515 | LDPTR [%l1 + P_MD_SYSCALL], %l1 | | 3515 | LDPTR [%l1 + P_MD_SYSCALL], %l1 |
3516 | call %l1 | | 3516 | call %l1 |
3517 | wrpr %g0, PSTATE_INTR, %pstate ! turn on interrupts | | 3517 | wrpr %g0, PSTATE_INTR, %pstate ! turn on interrupts |
3518 | | | 3518 | |
3519 | /* see `lwp_trampoline' for the reason for this label */ | | 3519 | /* see `lwp_trampoline' for the reason for this label */ |
3520 | return_from_syscall: | | 3520 | return_from_syscall: |
3521 | wrpr %g0, PSTATE_KERN, %pstate ! Disable intterrupts | | 3521 | wrpr %g0, PSTATE_KERN, %pstate ! Disable intterrupts |
3522 | CHKPT(%o1,%o2,0x32) | | 3522 | CHKPT(%o1,%o2,0x32) |
3523 | wrpr %g0, 0, %tl ! Return to tl==0 | | 3523 | wrpr %g0, 0, %tl ! Return to tl==0 |
3524 | CHKPT(%o1,%o2,4) | | 3524 | CHKPT(%o1,%o2,4) |
3525 | ba,a,pt %icc, return_from_trap | | 3525 | ba,a,pt %icc, return_from_trap |
3526 | nop | | 3526 | nop |
3527 | NOTREACHED | | 3527 | NOTREACHED |
3528 | | | 3528 | |
3529 | /* | | 3529 | /* |
3530 | * interrupt_vector: | | 3530 | * interrupt_vector: |
3531 | * | | 3531 | * |
3532 | * Spitfire chips never get level interrupts directly from H/W. | | 3532 | * Spitfire chips never get level interrupts directly from H/W. |
3533 | * Instead, all interrupts come in as interrupt_vector traps. | | 3533 | * Instead, all interrupts come in as interrupt_vector traps. |
3534 | * The interrupt number or handler address is an 11 bit number | | 3534 | * The interrupt number or handler address is an 11 bit number |
3535 | * encoded in the first interrupt data word. Additional words | | 3535 | * encoded in the first interrupt data word. Additional words |
3536 | * are application specific and used primarily for cross-calls. | | 3536 | * are application specific and used primarily for cross-calls. |
3537 | * | | 3537 | * |
3538 | * The interrupt vector handler then needs to identify the | | 3538 | * The interrupt vector handler then needs to identify the |
3539 | * interrupt source from the interrupt number and arrange to | | 3539 | * interrupt source from the interrupt number and arrange to |
3540 | * invoke the interrupt handler. This can either be done directly | | 3540 | * invoke the interrupt handler. This can either be done directly |
3541 | * from here, or a softint at a particular level can be issued. | | 3541 | * from here, or a softint at a particular level can be issued. |
3542 | * | | 3542 | * |
3543 | * To call an interrupt directly and not overflow the trap stack, | | 3543 | * To call an interrupt directly and not overflow the trap stack, |
3544 | * the trap registers should be saved on the stack, registers | | 3544 | * the trap registers should be saved on the stack, registers |
3545 | * cleaned, trap-level decremented, the handler called, and then | | 3545 | * cleaned, trap-level decremented, the handler called, and then |
3546 | * the process must be reversed. | | 3546 | * the process must be reversed. |
3547 | * | | 3547 | * |
3548 | * To simplify life all we do here is issue an appropriate softint. | | 3548 | * To simplify life all we do here is issue an appropriate softint. |
3549 | * | | 3549 | * |
3550 | * Note: It is impossible to identify or change a device's | | 3550 | * Note: It is impossible to identify or change a device's |
3551 | * interrupt number until it is probed. That's the | | 3551 | * interrupt number until it is probed. That's the |
3552 | * purpose for all the funny interrupt acknowledge | | 3552 | * purpose for all the funny interrupt acknowledge |
3553 | * code. | | 3553 | * code. |
3554 | * | | 3554 | * |
3555 | */ | | 3555 | */ |
3556 | | | 3556 | |
3557 | /* | | 3557 | /* |
3558 | * Vectored interrupts: | | 3558 | * Vectored interrupts: |
3559 | * | | 3559 | * |
3560 | * When an interrupt comes in, interrupt_vector uses the interrupt | | 3560 | * When an interrupt comes in, interrupt_vector uses the interrupt |
3561 | * vector number to lookup the appropriate intrhand from the intrlev | | 3561 | * vector number to lookup the appropriate intrhand from the intrlev |
3562 | * array. It then looks up the interrupt level from the intrhand | | 3562 | * array. It then looks up the interrupt level from the intrhand |
3563 | * structure. It uses the level to index the intrpending array, | | 3563 | * structure. It uses the level to index the intrpending array, |
3564 | * which is 8 slots for each possible interrupt level (so we can | | 3564 | * which is 8 slots for each possible interrupt level (so we can |
3565 | * shift instead of multiply for address calculation). It hunts for | | 3565 | * shift instead of multiply for address calculation). It hunts for |
3566 | * any available slot at that level. Available slots are NULL. | | 3566 | * any available slot at that level. Available slots are NULL. |
3567 | * | | 3567 | * |
3568 | * Then interrupt_vector uses the interrupt level in the intrhand | | 3568 | * Then interrupt_vector uses the interrupt level in the intrhand |
3569 | * to issue a softint of the appropriate level. The softint handler | | 3569 | * to issue a softint of the appropriate level. The softint handler |
3570 | * figures out what level interrupt it's handling and pulls the first | | 3570 | * figures out what level interrupt it's handling and pulls the first |
3571 | * intrhand pointer out of the intrpending array for that interrupt | | 3571 | * intrhand pointer out of the intrpending array for that interrupt |
3572 | * level, puts a NULL in its place, clears the interrupt generator, | | 3572 | * level, puts a NULL in its place, clears the interrupt generator, |
3573 | * and invokes the interrupt handler. | | 3573 | * and invokes the interrupt handler. |
3574 | */ | | 3574 | */ |
3575 | | | 3575 | |
3576 | /* intrpending array is now in per-CPU structure. */ | | 3576 | /* intrpending array is now in per-CPU structure. */ |
3577 | | | 3577 | |
3578 | #ifdef DEBUG | | 3578 | #ifdef DEBUG |
3579 | #define INTRDEBUG_VECTOR 0x1 | | 3579 | #define INTRDEBUG_VECTOR 0x1 |
3580 | #define INTRDEBUG_LEVEL 0x2 | | 3580 | #define INTRDEBUG_LEVEL 0x2 |
3581 | #define INTRDEBUG_FUNC 0x4 | | 3581 | #define INTRDEBUG_FUNC 0x4 |
3582 | #define INTRDEBUG_SPUR 0x8 | | 3582 | #define INTRDEBUG_SPUR 0x8 |
3583 | .data | | 3583 | .data |
3584 | .globl _C_LABEL(intrdebug) | | 3584 | .globl _C_LABEL(intrdebug) |
3585 | _C_LABEL(intrdebug): .word 0x0 | | 3585 | _C_LABEL(intrdebug): .word 0x0 |
3586 | /* | | 3586 | /* |
3587 | * Note: we use the local label `97' to branch forward to, to skip | | 3587 | * Note: we use the local label `97' to branch forward to, to skip |
3588 | * actual debugging code following a `intrdebug' bit test. | | 3588 | * actual debugging code following a `intrdebug' bit test. |
3589 | */ | | 3589 | */ |
3590 | #endif | | 3590 | #endif |
3591 | .text | | 3591 | .text |
3592 | interrupt_vector: | | 3592 | interrupt_vector: |
3593 | #ifdef TRAPSTATS | | 3593 | #ifdef TRAPSTATS |
3594 | set _C_LABEL(kiveccnt), %g1 | | 3594 | set _C_LABEL(kiveccnt), %g1 |
3595 | set _C_LABEL(iveccnt), %g2 | | 3595 | set _C_LABEL(iveccnt), %g2 |
3596 | rdpr %tl, %g3 | | 3596 | rdpr %tl, %g3 |
3597 | dec %g3 | | 3597 | dec %g3 |
3598 | movrz %g3, %g2, %g1 | | 3598 | movrz %g3, %g2, %g1 |
3599 | lduw [%g1], %g2 | | 3599 | lduw [%g1], %g2 |
3600 | inc %g2 | | 3600 | inc %g2 |
3601 | stw %g2, [%g1] | | 3601 | stw %g2, [%g1] |
3602 | #endif | | 3602 | #endif |
3603 | ldxa [%g0] ASI_IRSR, %g1 | | 3603 | ldxa [%g0] ASI_IRSR, %g1 |
3604 | mov IRDR_0H, %g7 | | 3604 | mov IRDR_0H, %g7 |
3605 | ldxa [%g7] ASI_IRDR, %g7 ! Get interrupt number | | 3605 | ldxa [%g7] ASI_IRDR, %g7 ! Get interrupt number |
3606 | membar #Sync | | 3606 | membar #Sync |
3607 | | | 3607 | |
3608 | #if KTR_COMPILE & KTR_INTR | | 3608 | #if KTR_COMPILE & KTR_INTR |
3609 | CATR(KTR_TRAP, "interrupt_vector: tl %d ASI_IRSR %p ASI_IRDR %p", | | 3609 | CATR(KTR_TRAP, "interrupt_vector: tl %d ASI_IRSR %p ASI_IRDR %p", |
3610 | %g3, %g5, %g6, 10, 11, 12) | | 3610 | %g3, %g5, %g6, 10, 11, 12) |
3611 | rdpr %tl, %g5 | | 3611 | rdpr %tl, %g5 |
3612 | stx %g5, [%g3 + KTR_PARM1] | | 3612 | stx %g5, [%g3 + KTR_PARM1] |
3613 | stx %g1, [%g3 + KTR_PARM2] | | 3613 | stx %g1, [%g3 + KTR_PARM2] |
3614 | stx %g7, [%g3 + KTR_PARM3] | | 3614 | stx %g7, [%g3 + KTR_PARM3] |
3615 | 12: | | 3615 | 12: |
3616 | #endif | | 3616 | #endif |
3617 | | | 3617 | |
3618 | btst IRSR_BUSY, %g1 | | 3618 | btst IRSR_BUSY, %g1 |
3619 | bz,pn %icc, 3f ! spurious interrupt | | 3619 | bz,pn %icc, 3f ! spurious interrupt |
3620 | #ifdef MULTIPROCESSOR | | 3620 | #ifdef MULTIPROCESSOR |
3621 | sethi %hi(KERNBASE), %g1 | | 3621 | sethi %hi(KERNBASE), %g1 |
3622 | | | 3622 | |
3623 | cmp %g7, %g1 | | 3623 | cmp %g7, %g1 |
3624 | bl,pt %xcc, Lsoftint_regular ! >= KERNBASE is a fast cross-call | | 3624 | bl,pt %xcc, Lsoftint_regular ! >= KERNBASE is a fast cross-call |
3625 | cmp %g7, MAXINTNUM | | 3625 | cmp %g7, MAXINTNUM |
3626 | | | 3626 | |
3627 | mov IRDR_1H, %g2 | | 3627 | mov IRDR_1H, %g2 |
3628 | ldxa [%g2] ASI_IRDR, %g2 ! Get IPI handler argument 1 | | 3628 | ldxa [%g2] ASI_IRDR, %g2 ! Get IPI handler argument 1 |
3629 | mov IRDR_2H, %g3 | | 3629 | mov IRDR_2H, %g3 |
3630 | ldxa [%g3] ASI_IRDR, %g3 ! Get IPI handler argument 2 | | 3630 | ldxa [%g3] ASI_IRDR, %g3 ! Get IPI handler argument 2 |
3631 | | | 3631 | |
3632 | stxa %g0, [%g0] ASI_IRSR ! Ack IRQ | | 3632 | stxa %g0, [%g0] ASI_IRSR ! Ack IRQ |
3633 | membar #Sync ! Should not be needed due to retry | | 3633 | membar #Sync ! Should not be needed due to retry |
3634 | | | 3634 | |
3635 | jmpl %g7, %g0 | | 3635 | jmpl %g7, %g0 |
3636 | nop | | 3636 | nop |
3637 | #else | | 3637 | #else |
3638 | cmp %g7, MAXINTNUM | | 3638 | cmp %g7, MAXINTNUM |
3639 | #endif | | 3639 | #endif |
3640 | | | 3640 | |
3641 | Lsoftint_regular: | | 3641 | Lsoftint_regular: |
3642 | stxa %g0, [%g0] ASI_IRSR ! Ack IRQ | | 3642 | stxa %g0, [%g0] ASI_IRSR ! Ack IRQ |
3643 | membar #Sync ! Should not be needed due to retry | | 3643 | membar #Sync ! Should not be needed due to retry |
3644 | sllx %g7, PTRSHFT, %g5 ! Calculate entry number | | 3644 | sllx %g7, PTRSHFT, %g5 ! Calculate entry number |
3645 | sethi %hi(_C_LABEL(intrlev)), %g3 | | 3645 | sethi %hi(_C_LABEL(intrlev)), %g3 |
3646 | bgeu,pn %xcc, 3f | | 3646 | bgeu,pn %xcc, 3f |
3647 | or %g3, %lo(_C_LABEL(intrlev)), %g3 | | 3647 | or %g3, %lo(_C_LABEL(intrlev)), %g3 |
3648 | LDPTR [%g3 + %g5], %g5 ! We have a pointer to the handler | | 3648 | LDPTR [%g3 + %g5], %g5 ! We have a pointer to the handler |
3649 | brz,pn %g5, 3f ! NULL means it isn't registered yet. Skip it. | | 3649 | brz,pn %g5, 3f ! NULL means it isn't registered yet. Skip it. |
3650 | nop | | 3650 | nop |
3651 | | | 3651 | |
3652 | setup_sparcintr: | | 3652 | setup_sparcintr: |
3653 | LDPTR [%g5+IH_PEND], %g6 ! Read pending flag | | 3653 | LDPTR [%g5+IH_PEND], %g6 ! Read pending flag |
3654 | brnz,pn %g6, ret_from_intr_vector ! Skip it if it's running | | 3654 | brnz,pn %g6, ret_from_intr_vector ! Skip it if it's running |
3655 | ldub [%g5+IH_PIL], %g6 ! Read interrupt mask | | 3655 | ldub [%g5+IH_PIL], %g6 ! Read interrupt mask |
3656 | sethi %hi(CPUINFO_VA+CI_INTRPENDING), %g1 | | 3656 | sethi %hi(CPUINFO_VA+CI_INTRPENDING), %g1 |
3657 | sll %g6, PTRSHFT, %g3 ! Find start of table for this IPL | | 3657 | sll %g6, PTRSHFT, %g3 ! Find start of table for this IPL |
3658 | or %g1, %lo(CPUINFO_VA+CI_INTRPENDING), %g1 | | 3658 | or %g1, %lo(CPUINFO_VA+CI_INTRPENDING), %g1 |
3659 | add %g1, %g3, %g1 | | 3659 | add %g1, %g3, %g1 |
3660 | 1: | | 3660 | 1: |
3661 | LDPTR [%g1], %g3 ! Load list head | | 3661 | LDPTR [%g1], %g3 ! Load list head |
3662 | STPTR %g3, [%g5+IH_PEND] ! Link our intrhand node in | | 3662 | STPTR %g3, [%g5+IH_PEND] ! Link our intrhand node in |
3663 | mov %g5, %g7 | | 3663 | mov %g5, %g7 |
3664 | CASPTR [%g1] ASI_N, %g3, %g7 | | 3664 | CASPTR [%g1] ASI_N, %g3, %g7 |
3665 | cmp %g7, %g3 ! Did it work? | | 3665 | cmp %g7, %g3 ! Did it work? |
3666 | bne,pn CCCR, 1b ! No, try again | | 3666 | bne,pn CCCR, 1b ! No, try again |
3667 | nop | | 3667 | EMPTY |
3668 | 2: | | 3668 | 2: |
3669 | mov 1, %g7 | | 3669 | mov 1, %g7 |
3670 | sll %g7, %g6, %g6 | | 3670 | sll %g7, %g6, %g6 |
3671 | wr %g6, 0, SET_SOFTINT ! Invoke a softint | | 3671 | wr %g6, 0, SET_SOFTINT ! Invoke a softint |
3672 | | | 3672 | |
3673 | ret_from_intr_vector: | | 3673 | ret_from_intr_vector: |
3674 | #if KTR_COMPILE & KTR_INTR | | 3674 | #if KTR_COMPILE & KTR_INTR |
3675 | CATR(KTR_TRAP, "ret_from_intr_vector: tl %d, tstate %p, tpc %p", | | 3675 | CATR(KTR_TRAP, "ret_from_intr_vector: tl %d, tstate %p, tpc %p", |
3676 | %g3, %g4, %g5, 10, 11, 12) | | 3676 | %g3, %g4, %g5, 10, 11, 12) |
3677 | rdpr %tl, %g5 | | 3677 | rdpr %tl, %g5 |
3678 | stx %g5, [%g3 + KTR_PARM1] | | 3678 | stx %g5, [%g3 + KTR_PARM1] |
3679 | rdpr %tstate, %g5 | | 3679 | rdpr %tstate, %g5 |
3680 | stx %g5, [%g3 + KTR_PARM2] | | 3680 | stx %g5, [%g3 + KTR_PARM2] |
3681 | rdpr %tpc, %g5 | | 3681 | rdpr %tpc, %g5 |
3682 | stx %g5, [%g3 + KTR_PARM3] | | 3682 | stx %g5, [%g3 + KTR_PARM3] |
3683 | 12: | | 3683 | 12: |
3684 | #endif | | 3684 | #endif |
3685 | retry | | 3685 | retry |
3686 | NOTREACHED | | 3686 | NOTREACHED |
3687 | | | 3687 | |
3688 | 3: | | 3688 | 3: |
3689 | #ifdef NOT_DEBUG | | 3689 | #ifdef NOT_DEBUG |
3690 | set _C_LABEL(intrdebug), %g6 | | 3690 | set _C_LABEL(intrdebug), %g6 |
3691 | ld [%g6], %g6 | | 3691 | ld [%g6], %g6 |
3692 | btst INTRDEBUG_SPUR, %g6 | | 3692 | btst INTRDEBUG_SPUR, %g6 |
3693 | bz,pt %icc, 97f | | 3693 | bz,pt %icc, 97f |
3694 | nop | | 3694 | nop |
3695 | #endif | | 3695 | #endif |
3696 | #if 1 | | 3696 | #if 1 |
3697 | STACKFRAME(-CC64FSZ) ! Get a clean register window | | 3697 | STACKFRAME(-CC64FSZ) ! Get a clean register window |
3698 | LOAD_ASCIZ(%o0, "interrupt_vector: spurious vector %lx at pil %d\r\n") | | 3698 | LOAD_ASCIZ(%o0, "interrupt_vector: spurious vector %lx at pil %d\r\n") |
3699 | mov %g7, %o1 | | 3699 | mov %g7, %o1 |
3700 | GLOBTOLOC | | 3700 | GLOBTOLOC |
3701 | clr %g4 | | 3701 | clr %g4 |
3702 | call prom_printf | | 3702 | call prom_printf |
3703 | rdpr %pil, %o2 | | 3703 | rdpr %pil, %o2 |
3704 | LOCTOGLOB | | 3704 | LOCTOGLOB |
3705 | restore | | 3705 | restore |
3706 | 97: | | 3706 | 97: |
3707 | #endif | | 3707 | #endif |
3708 | ba,a ret_from_intr_vector | | 3708 | ba,a ret_from_intr_vector |
3709 | nop ! XXX spitfire bug? | | 3709 | nop ! XXX spitfire bug? |
3710 | | | 3710 | |
3711 | #if defined(MULTIPROCESSOR) | | 3711 | #if defined(MULTIPROCESSOR) |
3712 | /* | | 3712 | /* |
3713 | * IPI handler to do nothing, but causes rescheduling.. | | 3713 | * IPI handler to do nothing, but causes rescheduling.. |
3714 | * void sparc64_ipi_nop(void *); | | 3714 | * void sparc64_ipi_nop(void *); |
3715 | */ | | 3715 | */ |
3716 | ENTRY(sparc64_ipi_nop) | | 3716 | ENTRY(sparc64_ipi_nop) |
3717 | ba,a ret_from_intr_vector | | 3717 | ba,a ret_from_intr_vector |
3718 | nop | | 3718 | nop |
3719 | | | 3719 | |
3720 | /* | | 3720 | /* |
3721 | * IPI handler to halt the CPU. Just calls the C vector. | | 3721 | * IPI handler to halt the CPU. Just calls the C vector. |
3722 | * void sparc64_ipi_halt(void *); | | 3722 | * void sparc64_ipi_halt(void *); |
3723 | */ | | 3723 | */ |
3724 | ENTRY(sparc64_ipi_halt) | | 3724 | ENTRY(sparc64_ipi_halt) |
3725 | call _C_LABEL(sparc64_ipi_halt_thiscpu) | | 3725 | call _C_LABEL(sparc64_ipi_halt_thiscpu) |
3726 | clr %g4 | | 3726 | clr %g4 |
3727 | sir | | 3727 | sir |
3728 | | | 3728 | |
3729 | /* | | 3729 | /* |
3730 | * IPI handler to pause the CPU. We just trap to the debugger if it | | 3730 | * IPI handler to pause the CPU. We just trap to the debugger if it |
3731 | * is configured, otherwise just return. | | 3731 | * is configured, otherwise just return. |
3732 | */ | | 3732 | */ |
3733 | ENTRY(sparc64_ipi_pause) | | 3733 | ENTRY(sparc64_ipi_pause) |
3734 | #if defined(DDB) | | 3734 | #if defined(DDB) |
3735 | sparc64_ipi_pause_trap_point: | | 3735 | sparc64_ipi_pause_trap_point: |
3736 | ta 1 | | 3736 | ta 1 |
3737 | nop | | 3737 | nop |
3738 | #endif | | 3738 | #endif |
3739 | ba,a ret_from_intr_vector | | 3739 | ba,a ret_from_intr_vector |
3740 | nop | | 3740 | nop |
3741 | | | 3741 | |
3742 | /* | | 3742 | /* |
3743 | * Increment IPI event counter, defined in machine/{cpu,intr}.h. | | 3743 | * Increment IPI event counter, defined in machine/{cpu,intr}.h. |
3744 | */ | | 3744 | */ |
3745 | #define IPIEVC_INC(n,r1,r2) \ | | 3745 | #define IPIEVC_INC(n,r1,r2) \ |
3746 | sethi %hi(CPUINFO_VA+CI_IPIEVC+EVC_SIZE*n), r2; \ | | 3746 | sethi %hi(CPUINFO_VA+CI_IPIEVC+EVC_SIZE*n), r2; \ |
3747 | ldx [r2 + %lo(CPUINFO_VA+CI_IPIEVC+EVC_SIZE*n)], r1; \ | | 3747 | ldx [r2 + %lo(CPUINFO_VA+CI_IPIEVC+EVC_SIZE*n)], r1; \ |
3748 | inc r1; \ | | 3748 | inc r1; \ |
3749 | stx r1, [r2 + %lo(CPUINFO_VA+CI_IPIEVC+EVC_SIZE*n)] | | 3749 | stx r1, [r2 + %lo(CPUINFO_VA+CI_IPIEVC+EVC_SIZE*n)] |
3750 | | | 3750 | |
3751 | /* | | 3751 | /* |
3752 | * IPI handler to flush single pte. | | 3752 | * IPI handler to flush single pte. |
3753 | * void sparc64_ipi_flush_pte(void *); | | 3753 | * void sparc64_ipi_flush_pte(void *); |
3754 | * | | 3754 | * |
3755 | * On entry: | | 3755 | * On entry: |
3756 | * %g2 = vaddr_t va | | 3756 | * %g2 = vaddr_t va |
3757 | * %g3 = int ctx | | 3757 | * %g3 = int ctx |
3758 | */ | | 3758 | */ |
3759 | ENTRY(sparc64_ipi_flush_pte) | | 3759 | ENTRY(sparc64_ipi_flush_pte) |
3760 | #if KTR_COMPILE & KTR_PMAP | | 3760 | #if KTR_COMPILE & KTR_PMAP |
3761 | CATR(KTR_TRAP, "sparc64_ipi_flush_pte:", | | 3761 | CATR(KTR_TRAP, "sparc64_ipi_flush_pte:", |
3762 | %g1, %g3, %g4, 10, 11, 12) | | 3762 | %g1, %g3, %g4, 10, 11, 12) |
3763 | 12: | | 3763 | 12: |
3764 | #endif | | 3764 | #endif |
3765 | #ifdef SPITFIRE | | 3765 | #ifdef SPITFIRE |
3766 | srlx %g2, PG_SHIFT4U, %g2 ! drop unused va bits | | 3766 | srlx %g2, PG_SHIFT4U, %g2 ! drop unused va bits |
3767 | mov CTX_SECONDARY, %g5 | | 3767 | mov CTX_SECONDARY, %g5 |
3768 | sllx %g2, PG_SHIFT4U, %g2 | | 3768 | sllx %g2, PG_SHIFT4U, %g2 |
3769 | ldxa [%g5] ASI_DMMU, %g6 ! Save secondary context | | 3769 | ldxa [%g5] ASI_DMMU, %g6 ! Save secondary context |
3770 | sethi %hi(KERNBASE), %g7 | | 3770 | sethi %hi(KERNBASE), %g7 |
3771 | membar #LoadStore | | 3771 | membar #LoadStore |
3772 | stxa %g3, [%g5] ASI_DMMU ! Insert context to demap | | 3772 | stxa %g3, [%g5] ASI_DMMU ! Insert context to demap |
3773 | membar #Sync | | 3773 | membar #Sync |
3774 | or %g2, DEMAP_PAGE_SECONDARY, %g2 ! Demap page from secondary context only | | 3774 | or %g2, DEMAP_PAGE_SECONDARY, %g2 ! Demap page from secondary context only |
3775 | stxa %g2, [%g2] ASI_DMMU_DEMAP ! Do the demap | | 3775 | stxa %g2, [%g2] ASI_DMMU_DEMAP ! Do the demap |
3776 | stxa %g2, [%g2] ASI_IMMU_DEMAP ! to both TLBs | | 3776 | stxa %g2, [%g2] ASI_IMMU_DEMAP ! to both TLBs |
3777 | #ifdef _LP64 | | 3777 | #ifdef _LP64 |
3778 | srl %g2, 0, %g2 ! and make sure it's both 32- and 64-bit entries | | 3778 | srl %g2, 0, %g2 ! and make sure it's both 32- and 64-bit entries |
3779 | stxa %g2, [%g2] ASI_DMMU_DEMAP ! Do the demap | | 3779 | stxa %g2, [%g2] ASI_DMMU_DEMAP ! Do the demap |
3780 | stxa %g2, [%g2] ASI_IMMU_DEMAP ! Do the demap | | 3780 | stxa %g2, [%g2] ASI_IMMU_DEMAP ! Do the demap |
3781 | #endif | | 3781 | #endif |
3782 | flush %g7 | | 3782 | flush %g7 |
3783 | stxa %g6, [%g5] ASI_DMMU ! Restore secondary context | | 3783 | stxa %g6, [%g5] ASI_DMMU ! Restore secondary context |
3784 | membar #Sync | | 3784 | membar #Sync |
3785 | IPIEVC_INC(IPI_EVCNT_TLB_PTE,%g2,%g3) | | 3785 | IPIEVC_INC(IPI_EVCNT_TLB_PTE,%g2,%g3) |
3786 | #else | | 3786 | #else |
3787 | ! Not yet | | 3787 | ! Not yet |
3788 | #endif | | 3788 | #endif |
3789 | | | 3789 | |
3790 | ba,a ret_from_intr_vector | | 3790 | ba,a ret_from_intr_vector |
3791 | nop | | 3791 | nop |
3792 | | | 3792 | |
3793 | /* | | 3793 | /* |
3794 | * IPI handler to flush single context. | | 3794 | * IPI handler to flush single context. |
3795 | * void sparc64_ipi_flush_ctx(void *); | | 3795 | * void sparc64_ipi_flush_ctx(void *); |
3796 | * | | 3796 | * |
3797 | * On entry: | | 3797 | * On entry: |
3798 | * %g2 = int ctx | | 3798 | * %g2 = int ctx |
3799 | */ | | 3799 | */ |
3800 | ENTRY(sparc64_ipi_flush_ctx) | | 3800 | ENTRY(sparc64_ipi_flush_ctx) |
3801 | #if KTR_COMPILE & KTR_PMAP | | 3801 | #if KTR_COMPILE & KTR_PMAP |
3802 | CATR(KTR_TRAP, "sparc64_ipi_flush_ctx:", | | 3802 | CATR(KTR_TRAP, "sparc64_ipi_flush_ctx:", |
3803 | %g1, %g3, %g4, 10, 11, 12) | | 3803 | %g1, %g3, %g4, 10, 11, 12) |
3804 | 12: | | 3804 | 12: |
3805 | #endif | | 3805 | #endif |
3806 | #ifdef SPITFIRE | | 3806 | #ifdef SPITFIRE |
3807 | mov CTX_SECONDARY, %g5 | | 3807 | mov CTX_SECONDARY, %g5 |
3808 | ldxa [%g5] ASI_DMMU, %g6 ! Save secondary context | | 3808 | ldxa [%g5] ASI_DMMU, %g6 ! Save secondary context |
3809 | sethi %hi(KERNBASE), %g7 | | 3809 | sethi %hi(KERNBASE), %g7 |
3810 | membar #LoadStore | | 3810 | membar #LoadStore |
3811 | stxa %g2, [%g5] ASI_DMMU ! Insert context to demap | | 3811 | stxa %g2, [%g5] ASI_DMMU ! Insert context to demap |
3812 | set DEMAP_CTX_SECONDARY, %g3 | | 3812 | set DEMAP_CTX_SECONDARY, %g3 |
3813 | membar #Sync | | 3813 | membar #Sync |
3814 | stxa %g3, [%g3] ASI_DMMU_DEMAP ! Do the demap | | 3814 | stxa %g3, [%g3] ASI_DMMU_DEMAP ! Do the demap |
3815 | stxa %g3, [%g3] ASI_IMMU_DEMAP ! Do the demap | | 3815 | stxa %g3, [%g3] ASI_IMMU_DEMAP ! Do the demap |
3816 | flush %g7 | | 3816 | flush %g7 |
3817 | stxa %g6, [%g5] ASI_DMMU ! Restore secondary context | | 3817 | stxa %g6, [%g5] ASI_DMMU ! Restore secondary context |
3818 | membar #Sync | | 3818 | membar #Sync |
3819 | IPIEVC_INC(IPI_EVCNT_TLB_CTX,%g2,%g3) | | 3819 | IPIEVC_INC(IPI_EVCNT_TLB_CTX,%g2,%g3) |
3820 | #else | | 3820 | #else |
3821 | ! Not yet | | 3821 | ! Not yet |
3822 | #endif | | 3822 | #endif |
3823 | | | 3823 | |
3824 | ba,a ret_from_intr_vector | | 3824 | ba,a ret_from_intr_vector |
3825 | nop | | 3825 | nop |
3826 | | | 3826 | |
3827 | /* | | 3827 | /* |
3828 | * IPI handler to flush the whole TLB. | | 3828 | * IPI handler to flush the whole TLB. |
3829 | * void sparc64_ipi_flush_all(void *); | | 3829 | * void sparc64_ipi_flush_all(void *); |
3830 | */ | | 3830 | */ |
3831 | ENTRY(sparc64_ipi_flush_all) | | 3831 | ENTRY(sparc64_ipi_flush_all) |
3832 | #if KTR_COMPILE & KTR_PMAP | | 3832 | #if KTR_COMPILE & KTR_PMAP |
3833 | CATR(KTR_TRAP, "sparc64_ipi_flush_all: %p %p", | | 3833 | CATR(KTR_TRAP, "sparc64_ipi_flush_all: %p %p", |
3834 | %g1, %g4, %g5, 10, 11, 12) | | 3834 | %g1, %g4, %g5, 10, 11, 12) |
3835 | stx %g3, [%g1 + KTR_PARM1] | | 3835 | stx %g3, [%g1 + KTR_PARM1] |
3836 | stx %g2, [%g1 + KTR_PARM2] | | 3836 | stx %g2, [%g1 + KTR_PARM2] |
3837 | 12: | | 3837 | 12: |
3838 | #endif | | 3838 | #endif |
3839 | | | 3839 | |
3840 | set (63 * 8), %g1 ! last TLB entry | | 3840 | set (63 * 8), %g1 ! last TLB entry |
3841 | membar #Sync | | 3841 | membar #Sync |
3842 | | | 3842 | |
3843 | ! %g1 = loop counter | | 3843 | ! %g1 = loop counter |
3844 | ! %g2 = TLB data value | | 3844 | ! %g2 = TLB data value |
3845 | | | 3845 | |
3846 | 0: | | 3846 | 0: |
3847 | ldxa [%g1] ASI_DMMU_TLB_DATA, %g2 ! fetch the TLB data | | 3847 | ldxa [%g1] ASI_DMMU_TLB_DATA, %g2 ! fetch the TLB data |
3848 | btst TTE_L, %g2 ! locked entry? | | 3848 | btst TTE_L, %g2 ! locked entry? |
3849 | bnz,pt %icc, 1f ! if so, skip | | 3849 | bnz,pt %icc, 1f ! if so, skip |
3850 | nop | | 3850 | nop |
3851 | | | 3851 | |
3852 | stxa %g0, [%g1] ASI_DMMU_TLB_DATA ! zap it | | 3852 | stxa %g0, [%g1] ASI_DMMU_TLB_DATA ! zap it |
3853 | membar #Sync | | 3853 | membar #Sync |
3854 | | | 3854 | |
3855 | 1: | | 3855 | 1: |
3856 | dec 8, %g1 | | 3856 | dec 8, %g1 |
3857 | brgz,pt %g1, 0b ! loop over all entries | | 3857 | brgz,pt %g1, 0b ! loop over all entries |
3858 | nop | | 3858 | nop |
3859 | | | 3859 | |
3860 | set (63 * 8), %g1 ! last TLB entry | | 3860 | set (63 * 8), %g1 ! last TLB entry |
3861 | | | 3861 | |
3862 | 0: | | 3862 | 0: |
3863 | ldxa [%g1] ASI_IMMU_TLB_DATA, %g2 ! fetch the TLB data | | 3863 | ldxa [%g1] ASI_IMMU_TLB_DATA, %g2 ! fetch the TLB data |
3864 | btst TTE_L, %g2 ! locked entry? | | 3864 | btst TTE_L, %g2 ! locked entry? |
3865 | bnz,pt %icc, 1f ! if so, skip | | 3865 | bnz,pt %icc, 1f ! if so, skip |
3866 | nop | | 3866 | nop |
3867 | | | 3867 | |
3868 | stxa %g0, [%g1] ASI_IMMU_TLB_DATA ! zap it | | 3868 | stxa %g0, [%g1] ASI_IMMU_TLB_DATA ! zap it |
3869 | membar #Sync | | 3869 | membar #Sync |
3870 | | | 3870 | |
3871 | 1: | | 3871 | 1: |
3872 | dec 8, %g1 | | 3872 | dec 8, %g1 |
3873 | brgz,pt %g1, 0b ! loop over all entries | | 3873 | brgz,pt %g1, 0b ! loop over all entries |
3874 | nop | | 3874 | nop |
3875 | | | 3875 | |
3876 | sethi %hi(KERNBASE), %g4 | | 3876 | sethi %hi(KERNBASE), %g4 |
3877 | membar #Sync | | 3877 | membar #Sync |
3878 | flush %g4 | | 3878 | flush %g4 |
3879 | | | 3879 | |
3880 | ba,a ret_from_intr_vector | | 3880 | ba,a ret_from_intr_vector |
3881 | nop | | 3881 | nop |
3882 | | | 3882 | |
3883 | /* | | 3883 | /* |
3884 | * Secondary CPU bootstrap code. | | 3884 | * Secondary CPU bootstrap code. |
3885 | */ | | 3885 | */ |
3886 | .text | | 3886 | .text |
3887 | .align 32 | | 3887 | .align 32 |
3888 | 1: rd %pc, %l0 | | 3888 | 1: rd %pc, %l0 |
3889 | LDULNG [%l0 + (4f-1b)], %l1 | | 3889 | LDULNG [%l0 + (4f-1b)], %l1 |
3890 | add %l0, (6f-1b), %l2 | | 3890 | add %l0, (6f-1b), %l2 |
3891 | clr %l3 | | 3891 | clr %l3 |
3892 | 2: cmp %l3, %l1 | | 3892 | 2: cmp %l3, %l1 |
3893 | be CCCR, 3f | | 3893 | be CCCR, 3f |
3894 | nop | | 3894 | nop |
3895 | ldx [%l2 + TTE_VPN], %l4 | | 3895 | ldx [%l2 + TTE_VPN], %l4 |
3896 | ldx [%l2 + TTE_DATA], %l5 | | 3896 | ldx [%l2 + TTE_DATA], %l5 |
3897 | wr %g0, ASI_DMMU, %asi | | 3897 | wr %g0, ASI_DMMU, %asi |
3898 | stxa %l4, [%g0 + TLB_TAG_ACCESS] %asi | | 3898 | stxa %l4, [%g0 + TLB_TAG_ACCESS] %asi |
3899 | stxa %l5, [%g0] ASI_DMMU_DATA_IN | | 3899 | stxa %l5, [%g0] ASI_DMMU_DATA_IN |
3900 | wr %g0, ASI_IMMU, %asi | | 3900 | wr %g0, ASI_IMMU, %asi |
3901 | stxa %l4, [%g0 + TLB_TAG_ACCESS] %asi | | 3901 | stxa %l4, [%g0 + TLB_TAG_ACCESS] %asi |
3902 | stxa %l5, [%g0] ASI_IMMU_DATA_IN | | 3902 | stxa %l5, [%g0] ASI_IMMU_DATA_IN |
3903 | membar #Sync | | 3903 | membar #Sync |
3904 | flush %l4 | | 3904 | flush %l4 |
3905 | add %l2, PTE_SIZE, %l2 | | 3905 | add %l2, PTE_SIZE, %l2 |
3906 | add %l3, 1, %l3 | | 3906 | add %l3, 1, %l3 |
3907 | ba %xcc, 2b | | 3907 | ba %xcc, 2b |
3908 | nop | | 3908 | nop |
3909 | 3: LDULNG [%l0 + (5f-1b)], %l1 | | 3909 | 3: LDULNG [%l0 + (5f-1b)], %l1 |
3910 | LDULNG [%l0 + (7f-1b)], %g2 ! Load cpu_info address. | | 3910 | LDULNG [%l0 + (7f-1b)], %g2 ! Load cpu_info address. |
3911 | jmpl %l1, %g0 | | 3911 | jmpl %l1, %g0 |
3912 | nop | | 3912 | nop |
3913 | | | 3913 | |
3914 | .align PTRSZ | | 3914 | .align PTRSZ |
3915 | 4: ULONG 0x0 | | 3915 | 4: ULONG 0x0 |
3916 | 5: ULONG 0x0 | | 3916 | 5: ULONG 0x0 |
3917 | 7: ULONG 0x0 | | 3917 | 7: ULONG 0x0 |
3918 | _ALIGN | | 3918 | _ALIGN |
3919 | 6: | | 3919 | 6: |
3920 | | | 3920 | |
3921 | #define DATA(name) \ | | 3921 | #define DATA(name) \ |
3922 | .data ; \ | | 3922 | .data ; \ |
3923 | .align PTRSZ ; \ | | 3923 | .align PTRSZ ; \ |
3924 | .globl name ; \ | | 3924 | .globl name ; \ |
3925 | name: | | 3925 | name: |
3926 | | | 3926 | |
3927 | DATA(mp_tramp_code) | | 3927 | DATA(mp_tramp_code) |
3928 | POINTER 1b | | 3928 | POINTER 1b |
3929 | DATA(mp_tramp_code_len) | | 3929 | DATA(mp_tramp_code_len) |
3930 | ULONG 6b-1b | | 3930 | ULONG 6b-1b |
3931 | DATA(mp_tramp_tlb_slots) | | 3931 | DATA(mp_tramp_tlb_slots) |
3932 | ULONG 4b-1b | | 3932 | ULONG 4b-1b |
3933 | DATA(mp_tramp_func) | | 3933 | DATA(mp_tramp_func) |
3934 | ULONG 5b-1b | | 3934 | ULONG 5b-1b |
3935 | DATA(mp_tramp_ci) | | 3935 | DATA(mp_tramp_ci) |
3936 | ULONG 7b-1b | | 3936 | ULONG 7b-1b |
3937 | | | 3937 | |
3938 | .text | | 3938 | .text |
3939 | .align 32 | | 3939 | .align 32 |
3940 | #endif /* MULTIPROCESSOR */ | | 3940 | #endif /* MULTIPROCESSOR */ |
3941 | | | 3941 | |
3942 | /* | | 3942 | /* |
3943 | * Ultra1 and Ultra2 CPUs use soft interrupts for everything. What we do | | 3943 | * Ultra1 and Ultra2 CPUs use soft interrupts for everything. What we do |
3944 | * on a soft interrupt, is we should check which bits in ASR_SOFTINT(0x16) | | 3944 | * on a soft interrupt, is we should check which bits in ASR_SOFTINT(0x16) |
3945 | * are set, handle those interrupts, then clear them by setting the | | 3945 | * are set, handle those interrupts, then clear them by setting the |
3946 | * appropriate bits in ASR_CLEAR_SOFTINT(0x15). | | 3946 | * appropriate bits in ASR_CLEAR_SOFTINT(0x15). |
3947 | * | | 3947 | * |
3948 | * We have an array of 8 interrupt vector slots for each of 15 interrupt | | 3948 | * We have an array of 8 interrupt vector slots for each of 15 interrupt |
3949 | * levels. If a vectored interrupt can be dispatched, the dispatch | | 3949 | * levels. If a vectored interrupt can be dispatched, the dispatch |
3950 | * routine will place a pointer to an intrhand structure in one of | | 3950 | * routine will place a pointer to an intrhand structure in one of |
3951 | * the slots. The interrupt handler will go through the list to look | | 3951 | * the slots. The interrupt handler will go through the list to look |
3952 | * for an interrupt to dispatch. If it finds one it will pull it off | | 3952 | * for an interrupt to dispatch. If it finds one it will pull it off |
3953 | * the list, free the entry, and call the handler. The code is like | | 3953 | * the list, free the entry, and call the handler. The code is like |
3954 | * this: | | 3954 | * this: |
3955 | * | | 3955 | * |
3956 | * for (i=0; i<8; i++) | | 3956 | * for (i=0; i<8; i++) |
3957 | * if (ih = intrpending[intlev][i]) { | | 3957 | * if (ih = intrpending[intlev][i]) { |
3958 | * intrpending[intlev][i] = NULL; | | 3958 | * intrpending[intlev][i] = NULL; |
3959 | * if ((*ih->ih_fun)(ih->ih_arg ? ih->ih_arg : &frame)) | | 3959 | * if ((*ih->ih_fun)(ih->ih_arg ? ih->ih_arg : &frame)) |
3960 | * return; | | 3960 | * return; |
3961 | * strayintr(&frame); | | 3961 | * strayintr(&frame); |
3962 | * return; | | 3962 | * return; |
3963 | * } | | 3963 | * } |
3964 | * | | 3964 | * |
3965 | * Otherwise we go back to the old style of polled interrupts. | | 3965 | * Otherwise we go back to the old style of polled interrupts. |
3966 | * | | 3966 | * |
3967 | * After preliminary setup work, the interrupt is passed to each | | 3967 | * After preliminary setup work, the interrupt is passed to each |
3968 | * registered handler in turn. These are expected to return nonzero if | | 3968 | * registered handler in turn. These are expected to return nonzero if |
3969 | * they took care of the interrupt. If a handler claims the interrupt, | | 3969 | * they took care of the interrupt. If a handler claims the interrupt, |
3970 | * we exit (hardware interrupts are latched in the requestor so we'll | | 3970 | * we exit (hardware interrupts are latched in the requestor so we'll |
3971 | * just take another interrupt in the unlikely event of simultaneous | | 3971 | * just take another interrupt in the unlikely event of simultaneous |
3972 | * interrupts from two different devices at the same level). If we go | | 3972 | * interrupts from two different devices at the same level). If we go |
3973 | * through all the registered handlers and no one claims it, we report a | | 3973 | * through all the registered handlers and no one claims it, we report a |
3974 | * stray interrupt. This is more or less done as: | | 3974 | * stray interrupt. This is more or less done as: |
3975 | * | | 3975 | * |
3976 | * for (ih = intrhand[intlev]; ih; ih = ih->ih_next) | | 3976 | * for (ih = intrhand[intlev]; ih; ih = ih->ih_next) |
3977 | * if ((*ih->ih_fun)(ih->ih_arg ? ih->ih_arg : &frame)) | | 3977 | * if ((*ih->ih_fun)(ih->ih_arg ? ih->ih_arg : &frame)) |
3978 | * return; | | 3978 | * return; |
3979 | * strayintr(&frame); | | 3979 | * strayintr(&frame); |
3980 | * | | 3980 | * |
3981 | * Inputs: | | 3981 | * Inputs: |
3982 | * %l0 = %tstate | | 3982 | * %l0 = %tstate |
3983 | * %l1 = return pc | | 3983 | * %l1 = return pc |
3984 | * %l2 = return npc | | 3984 | * %l2 = return npc |
3985 | * %l3 = interrupt level | | 3985 | * %l3 = interrupt level |
3986 | * (software interrupt only) %l4 = bits to clear in interrupt register | | 3986 | * (software interrupt only) %l4 = bits to clear in interrupt register |
3987 | * | | 3987 | * |
3988 | * Internal: | | 3988 | * Internal: |
3989 | * %l4, %l5: local variables | | 3989 | * %l4, %l5: local variables |
3990 | * %l6 = %y | | 3990 | * %l6 = %y |
3991 | * %l7 = %g1 | | 3991 | * %l7 = %g1 |
3992 | * %g2..%g7 go to stack | | 3992 | * %g2..%g7 go to stack |
3993 | * | | 3993 | * |
3994 | * An interrupt frame is built in the space for a full trapframe; | | 3994 | * An interrupt frame is built in the space for a full trapframe; |
3995 | * this contains the psr, pc, npc, and interrupt level. | | 3995 | * this contains the psr, pc, npc, and interrupt level. |
3996 | * | | 3996 | * |
3997 | * The level of this interrupt is determined by: | | 3997 | * The level of this interrupt is determined by: |
3998 | * | | 3998 | * |
3999 | * IRQ# = %tt - 0x40 | | 3999 | * IRQ# = %tt - 0x40 |
4000 | */ | | 4000 | */ |
4001 | | | 4001 | |
4002 | ENTRY_NOPROFILE(sparc_interrupt) | | 4002 | ENTRY_NOPROFILE(sparc_interrupt) |
4003 | #ifdef TRAPS_USE_IG | | 4003 | #ifdef TRAPS_USE_IG |
4004 | ! This is for interrupt debugging | | 4004 | ! This is for interrupt debugging |
4005 | wrpr %g0, PSTATE_KERN|PSTATE_IG, %pstate ! DEBUG | | 4005 | wrpr %g0, PSTATE_KERN|PSTATE_IG, %pstate ! DEBUG |
4006 | #endif | | 4006 | #endif |
4007 | /* | | 4007 | /* |
4008 | * If this is a %tick softint, clear it then call interrupt_vector. | | 4008 | * If this is a %tick softint, clear it then call interrupt_vector. |
4009 | */ | | 4009 | */ |
4010 | rd SOFTINT, %g1 | | 4010 | rd SOFTINT, %g1 |
4011 | btst 1, %g1 | | 4011 | btst 1, %g1 |
4012 | bz,pt %icc, 0f | | 4012 | bz,pt %icc, 0f |
4013 | sethi %hi(CPUINFO_VA+CI_TICK_IH), %g3 | | 4013 | sethi %hi(CPUINFO_VA+CI_TICK_IH), %g3 |
4014 | wr %g0, 1, CLEAR_SOFTINT | | 4014 | wr %g0, 1, CLEAR_SOFTINT |
4015 | ba,pt %icc, setup_sparcintr | | 4015 | ba,pt %icc, setup_sparcintr |
4016 | LDPTR [%g3 + %lo(CPUINFO_VA+CI_TICK_IH)], %g5 | | 4016 | LDPTR [%g3 + %lo(CPUINFO_VA+CI_TICK_IH)], %g5 |
4017 | 0: | | 4017 | 0: |
4018 | | | 4018 | |
4019 | ! Increment the per-cpu interrupt level | | 4019 | ! Increment the per-cpu interrupt level |
4020 | sethi %hi(CPUINFO_VA+CI_IDEPTH), %g1 | | 4020 | sethi %hi(CPUINFO_VA+CI_IDEPTH), %g1 |
4021 | ld [%g1 + %lo(CPUINFO_VA+CI_IDEPTH)], %g2 | | 4021 | ld [%g1 + %lo(CPUINFO_VA+CI_IDEPTH)], %g2 |
4022 | inc %g2 | | 4022 | inc %g2 |
4023 | st %g2, [%g1 + %lo(CPUINFO_VA+CI_IDEPTH)] | | 4023 | st %g2, [%g1 + %lo(CPUINFO_VA+CI_IDEPTH)] |
4024 | | | 4024 | |
4025 | #ifdef TRAPSTATS | | 4025 | #ifdef TRAPSTATS |
4026 | sethi %hi(_C_LABEL(kintrcnt)), %g1 | | 4026 | sethi %hi(_C_LABEL(kintrcnt)), %g1 |
4027 | sethi %hi(_C_LABEL(uintrcnt)), %g2 | | 4027 | sethi %hi(_C_LABEL(uintrcnt)), %g2 |
4028 | or %g1, %lo(_C_LABEL(kintrcnt)), %g1 | | 4028 | or %g1, %lo(_C_LABEL(kintrcnt)), %g1 |
4029 | or %g1, %lo(_C_LABEL(uintrcnt)), %g2 | | 4029 | or %g1, %lo(_C_LABEL(uintrcnt)), %g2 |
4030 | rdpr %tl, %g3 | | 4030 | rdpr %tl, %g3 |
4031 | dec %g3 | | 4031 | dec %g3 |
4032 | movrz %g3, %g2, %g1 | | 4032 | movrz %g3, %g2, %g1 |
4033 | lduw [%g1], %g2 | | 4033 | lduw [%g1], %g2 |
4034 | inc %g2 | | 4034 | inc %g2 |
4035 | stw %g2, [%g1] | | 4035 | stw %g2, [%g1] |
4036 | /* See if we're on the interrupt stack already. */ | | 4036 | /* See if we're on the interrupt stack already. */ |
4037 | set EINTSTACK, %g2 | | 4037 | set EINTSTACK, %g2 |
4038 | set (EINTSTACK-INTSTACK), %g1 | | 4038 | set (EINTSTACK-INTSTACK), %g1 |
4039 | btst 1, %sp | | 4039 | btst 1, %sp |
4040 | add %sp, BIAS, %g3 | | 4040 | add %sp, BIAS, %g3 |
4041 | movz %icc, %sp, %g3 | | 4041 | movz %icc, %sp, %g3 |
4042 | srl %g3, 0, %g3 | | 4042 | srl %g3, 0, %g3 |
4043 | sub %g2, %g3, %g3 | | 4043 | sub %g2, %g3, %g3 |
4044 | cmp %g3, %g1 | | 4044 | cmp %g3, %g1 |
4045 | bgu 1f | | 4045 | bgu 1f |
4046 | set _C_LABEL(intristk), %g1 | | 4046 | set _C_LABEL(intristk), %g1 |
4047 | lduw [%g1], %g2 | | 4047 | lduw [%g1], %g2 |
4048 | inc %g2 | | 4048 | inc %g2 |
4049 | stw %g2, [%g1] | | 4049 | stw %g2, [%g1] |
4050 | 1: | | 4050 | 1: |
4051 | #endif | | 4051 | #endif |
4052 | INTR_SETUP(-CC64FSZ-TF_SIZE) | | 4052 | INTR_SETUP(-CC64FSZ-TF_SIZE) |
4053 | ! Switch to normal globals so we can save them | | 4053 | ! Switch to normal globals so we can save them |
4054 | wrpr %g0, PSTATE_KERN, %pstate | | 4054 | wrpr %g0, PSTATE_KERN, %pstate |
4055 | stx %g1, [%sp + CC64FSZ + STKB + TF_G + ( 1*8)] | | 4055 | stx %g1, [%sp + CC64FSZ + STKB + TF_G + ( 1*8)] |
4056 | stx %g2, [%sp + CC64FSZ + STKB + TF_G + ( 2*8)] | | 4056 | stx %g2, [%sp + CC64FSZ + STKB + TF_G + ( 2*8)] |
4057 | stx %g3, [%sp + CC64FSZ + STKB + TF_G + ( 3*8)] | | 4057 | stx %g3, [%sp + CC64FSZ + STKB + TF_G + ( 3*8)] |
4058 | stx %g4, [%sp + CC64FSZ + STKB + TF_G + ( 4*8)] | | 4058 | stx %g4, [%sp + CC64FSZ + STKB + TF_G + ( 4*8)] |
4059 | stx %g5, [%sp + CC64FSZ + STKB + TF_G + ( 5*8)] | | 4059 | stx %g5, [%sp + CC64FSZ + STKB + TF_G + ( 5*8)] |
4060 | stx %g6, [%sp + CC64FSZ + STKB + TF_G + ( 6*8)] | | 4060 | stx %g6, [%sp + CC64FSZ + STKB + TF_G + ( 6*8)] |
4061 | stx %g7, [%sp + CC64FSZ + STKB + TF_G + ( 7*8)] | | 4061 | stx %g7, [%sp + CC64FSZ + STKB + TF_G + ( 7*8)] |
4062 | | | 4062 | |
4063 | /* | | 4063 | /* |
4064 | * In the EMBEDANY memory model %g4 points to the start of the | | 4064 | * In the EMBEDANY memory model %g4 points to the start of the |
4065 | * data segment. In our case we need to clear it before calling | | 4065 | * data segment. In our case we need to clear it before calling |
4066 | * any C-code. | | 4066 | * any C-code. |
4067 | */ | | 4067 | */ |
4068 | clr %g4 | | 4068 | clr %g4 |
4069 | | | 4069 | |
4070 | flushw ! Do not remove this insn -- causes interrupt loss | | 4070 | flushw ! Do not remove this insn -- causes interrupt loss |
4071 | rd %y, %l6 | | 4071 | rd %y, %l6 |
4072 | INCR(_C_LABEL(uvmexp)+V_INTR) ! cnt.v_intr++; (clobbers %o0,%o1,%o2) | | 4072 | INCR(_C_LABEL(uvmexp)+V_INTR) ! cnt.v_intr++; (clobbers %o0,%o1,%o2) |
4073 | rdpr %tt, %l5 ! Find out our current IPL | | 4073 | rdpr %tt, %l5 ! Find out our current IPL |
4074 | rdpr %tstate, %l0 | | 4074 | rdpr %tstate, %l0 |
4075 | rdpr %tpc, %l1 | | 4075 | rdpr %tpc, %l1 |
4076 | rdpr %tnpc, %l2 | | 4076 | rdpr %tnpc, %l2 |
4077 | rdpr %tl, %l3 ! Dump our trap frame now we have taken the IRQ | | 4077 | rdpr %tl, %l3 ! Dump our trap frame now we have taken the IRQ |
4078 | stw %l6, [%sp + CC64FSZ + STKB + TF_Y] ! Silly, but we need to save this for rft | | 4078 | stw %l6, [%sp + CC64FSZ + STKB + TF_Y] ! Silly, but we need to save this for rft |
4079 | dec %l3 | | 4079 | dec %l3 |
4080 | CHKPT(%l4,%l7,0x26) | | 4080 | CHKPT(%l4,%l7,0x26) |
4081 | wrpr %g0, %l3, %tl | | 4081 | wrpr %g0, %l3, %tl |
4082 | sth %l5, [%sp + CC64FSZ + STKB + TF_TT]! debug | | 4082 | sth %l5, [%sp + CC64FSZ + STKB + TF_TT]! debug |
4083 | stx %l0, [%sp + CC64FSZ + STKB + TF_TSTATE] ! set up intrframe/clockframe | | 4083 | stx %l0, [%sp + CC64FSZ + STKB + TF_TSTATE] ! set up intrframe/clockframe |
4084 | stx %l1, [%sp + CC64FSZ + STKB + TF_PC] | | 4084 | stx %l1, [%sp + CC64FSZ + STKB + TF_PC] |
4085 | btst TSTATE_PRIV, %l0 ! User mode? | | 4085 | btst TSTATE_PRIV, %l0 ! User mode? |
4086 | stx %l2, [%sp + CC64FSZ + STKB + TF_NPC] | | 4086 | stx %l2, [%sp + CC64FSZ + STKB + TF_NPC] |
4087 | | | 4087 | |
4088 | sub %l5, 0x40, %l6 ! Convert to interrupt level | | 4088 | sub %l5, 0x40, %l6 ! Convert to interrupt level |
4089 | sethi %hi(_C_LABEL(intr_evcnts)), %l4 | | 4089 | sethi %hi(_C_LABEL(intr_evcnts)), %l4 |
4090 | stb %l6, [%sp + CC64FSZ + STKB + TF_PIL] ! set up intrframe/clockframe | | 4090 | stb %l6, [%sp + CC64FSZ + STKB + TF_PIL] ! set up intrframe/clockframe |
4091 | rdpr %pil, %o1 | | 4091 | rdpr %pil, %o1 |
4092 | mulx %l6, EVC_SIZE, %l3 | | 4092 | mulx %l6, EVC_SIZE, %l3 |
4093 | or %l4, %lo(_C_LABEL(intr_evcnts)), %l4 ! intrcnt[intlev]++; | | 4093 | or %l4, %lo(_C_LABEL(intr_evcnts)), %l4 ! intrcnt[intlev]++; |
4094 | stb %o1, [%sp + CC64FSZ + STKB + TF_OLDPIL] ! old %pil | | 4094 | stb %o1, [%sp + CC64FSZ + STKB + TF_OLDPIL] ! old %pil |
4095 | ldx [%l4 + %l3], %o0 | | 4095 | ldx [%l4 + %l3], %o0 |
4096 | add %l4, %l3, %l4 | | 4096 | add %l4, %l3, %l4 |
4097 | clr %l5 ! Zero handled count | | 4097 | clr %l5 ! Zero handled count |
4098 | #ifdef MULTIPROCESSOR | | 4098 | #ifdef MULTIPROCESSOR |
4099 | mov 1, %l3 ! Ack softint | | 4099 | mov 1, %l3 ! Ack softint |
4100 | 1: add %o0, 1, %l7 | | 4100 | 1: add %o0, 1, %l7 |
4101 | casxa [%l4] ASI_N, %o0, %l7 | | 4101 | casxa [%l4] ASI_N, %o0, %l7 |
4102 | cmp %o0, %l7 | | 4102 | cmp %o0, %l7 |
4103 | bne,a,pn %xcc, 1b ! retry if changed | | 4103 | bne,a,pn %xcc, 1b ! retry if changed |
4104 | mov %l7, %o0 | | 4104 | mov %l7, %o0 |
4105 | #else | | 4105 | #else |
4106 | inc %o0 | | 4106 | inc %o0 |
4107 | mov 1, %l3 ! Ack softint | | 4107 | mov 1, %l3 ! Ack softint |
4108 | stx %o0, [%l4] | | 4108 | stx %o0, [%l4] |
4109 | #endif | | 4109 | #endif |
4110 | sll %l3, %l6, %l3 ! Generate IRQ mask | | 4110 | sll %l3, %l6, %l3 ! Generate IRQ mask |
4111 | | | 4111 | |
4112 | wrpr %l6, %pil | | 4112 | wrpr %l6, %pil |
4113 | | | 4113 | |
4114 | sparc_intr_retry: | | 4114 | sparc_intr_retry: |
4115 | wr %l3, 0, CLEAR_SOFTINT ! (don't clear possible %tick IRQ) | | 4115 | wr %l3, 0, CLEAR_SOFTINT ! (don't clear possible %tick IRQ) |
4116 | sethi %hi(CPUINFO_VA+CI_INTRPENDING), %l4 | | 4116 | sethi %hi(CPUINFO_VA+CI_INTRPENDING), %l4 |
4117 | sll %l6, PTRSHFT, %l2 | | 4117 | sll %l6, PTRSHFT, %l2 |
4118 | or %l4, %lo(CPUINFO_VA+CI_INTRPENDING), %l4 | | 4118 | or %l4, %lo(CPUINFO_VA+CI_INTRPENDING), %l4 |
4119 | add %l2, %l4, %l4 | | 4119 | add %l2, %l4, %l4 |
4120 | | | 4120 | |
4121 | 1: | | 4121 | 1: |
4122 | membar #StoreLoad ! Make sure any failed casxa insns complete | | 4122 | membar #StoreLoad ! Make sure any failed casxa insns complete |
4123 | LDPTR [%l4], %l2 ! Check a slot | | 4123 | LDPTR [%l4], %l2 ! Check a slot |
4124 | cmp %l2, -1 | | 4124 | cmp %l2, -1 |
4125 | beq,pn CCCR, intrcmplt ! Empty list? | | 4125 | beq,pn CCCR, intrcmplt ! Empty list? |
4126 | mov -1, %l7 | | 4126 | mov -1, %l7 |
4127 | membar #LoadStore | | 4127 | membar #LoadStore |
4128 | CASPTR [%l4] ASI_N, %l2, %l7 ! Grab the entire list | | 4128 | CASPTR [%l4] ASI_N, %l2, %l7 ! Grab the entire list |
4129 | cmp %l7, %l2 | | 4129 | cmp %l7, %l2 |
4130 | bne,pn CCCR, 1b | | 4130 | bne,pn CCCR, 1b |
4131 | nop | | 4131 | EMPTY |
4132 | 2: | | 4132 | 2: |
4133 | add %sp, CC64FSZ+STKB, %o2 ! tf = %sp + CC64FSZ + STKB | | 4133 | add %sp, CC64FSZ+STKB, %o2 ! tf = %sp + CC64FSZ + STKB |
4134 | LDPTR [%l2 + IH_PEND], %l7 ! save ih->ih_pending | | 4134 | LDPTR [%l2 + IH_PEND], %l7 ! save ih->ih_pending |
4135 | membar #LoadStore | | 4135 | membar #LoadStore |
4136 | STPTR %g0, [%l2 + IH_PEND] ! Clear pending flag | | 4136 | STPTR %g0, [%l2 + IH_PEND] ! Clear pending flag |
4137 | membar #Sync | | 4137 | membar #Sync |
4138 | LDPTR [%l2 + IH_FUN], %o4 ! ih->ih_fun | | 4138 | LDPTR [%l2 + IH_FUN], %o4 ! ih->ih_fun |
4139 | LDPTR [%l2 + IH_ARG], %o0 ! ih->ih_arg | | 4139 | LDPTR [%l2 + IH_ARG], %o0 ! ih->ih_arg |
4140 | | | 4140 | |
4141 | wrpr %g0, PSTATE_INTR, %pstate ! Reenable interrupts | | 4141 | wrpr %g0, PSTATE_INTR, %pstate ! Reenable interrupts |
4142 | jmpl %o4, %o7 ! handled = (*ih->ih_fun)(...) | | 4142 | jmpl %o4, %o7 ! handled = (*ih->ih_fun)(...) |
4143 | movrz %o0, %o2, %o0 ! arg = (arg == 0) ? arg : tf | | 4143 | movrz %o0, %o2, %o0 ! arg = (arg == 0) ? arg : tf |
4144 | wrpr %g0, PSTATE_KERN, %pstate ! Disable interrupts | | 4144 | wrpr %g0, PSTATE_KERN, %pstate ! Disable interrupts |
4145 | LDPTR [%l2 + IH_CLR], %l1 | | 4145 | LDPTR [%l2 + IH_CLR], %l1 |
4146 | membar #Sync | | 4146 | membar #Sync |
4147 | | | 4147 | |
4148 | brz,pn %l1, 0f | | 4148 | brz,pn %l1, 0f |
4149 | add %l5, %o0, %l5 | | 4149 | add %l5, %o0, %l5 |
4150 | stx %g0, [%l1] ! Clear intr source | | 4150 | stx %g0, [%l1] ! Clear intr source |
4151 | membar #Sync ! Should not be needed | | 4151 | membar #Sync ! Should not be needed |
4152 | 0: | | 4152 | 0: |
4153 | cmp %l7, -1 | | 4153 | cmp %l7, -1 |
4154 | bne,pn CCCR, 2b ! 'Nother? | | 4154 | bne,pn CCCR, 2b ! 'Nother? |
4155 | mov %l7, %l2 | | 4155 | mov %l7, %l2 |
4156 | | | 4156 | |
4157 | intrcmplt: | | 4157 | intrcmplt: |
4158 | /* | | 4158 | /* |
4159 | * Re-read SOFTINT to see if any new pending interrupts | | 4159 | * Re-read SOFTINT to see if any new pending interrupts |
4160 | * at this level. | | 4160 | * at this level. |
4161 | */ | | 4161 | */ |
4162 | mov 1, %l3 ! Ack softint | | 4162 | mov 1, %l3 ! Ack softint |
4163 | rd SOFTINT, %l7 ! %l5 contains #intr handled. | | 4163 | rd SOFTINT, %l7 ! %l5 contains #intr handled. |
4164 | sll %l3, %l6, %l3 ! Generate IRQ mask | | 4164 | sll %l3, %l6, %l3 ! Generate IRQ mask |
4165 | btst %l3, %l7 ! leave mask in %l3 for retry code | | 4165 | btst %l3, %l7 ! leave mask in %l3 for retry code |
4166 | bnz,pn %icc, sparc_intr_retry | | 4166 | bnz,pn %icc, sparc_intr_retry |
4167 | mov 1, %l5 ! initialize intr count for next run | | 4167 | mov 1, %l5 ! initialize intr count for next run |
4168 | | | 4168 | |
4169 | ! Decrement this cpu's interrupt depth | | 4169 | ! Decrement this cpu's interrupt depth |
4170 | sethi %hi(CPUINFO_VA+CI_IDEPTH), %l4 | | 4170 | sethi %hi(CPUINFO_VA+CI_IDEPTH), %l4 |
4171 | ld [%l4 + %lo(CPUINFO_VA+CI_IDEPTH)], %l5 | | 4171 | ld [%l4 + %lo(CPUINFO_VA+CI_IDEPTH)], %l5 |
4172 | dec %l5 | | 4172 | dec %l5 |
4173 | st %l5, [%l4 + %lo(CPUINFO_VA+CI_IDEPTH)] | | 4173 | st %l5, [%l4 + %lo(CPUINFO_VA+CI_IDEPTH)] |
4174 | | | 4174 | |
4175 | #ifdef DEBUG | | 4175 | #ifdef DEBUG |
4176 | set _C_LABEL(intrdebug), %o2 | | 4176 | set _C_LABEL(intrdebug), %o2 |
4177 | ld [%o2], %o2 | | 4177 | ld [%o2], %o2 |
4178 | btst INTRDEBUG_FUNC, %o2 | | 4178 | btst INTRDEBUG_FUNC, %o2 |
4179 | bz,a,pt %icc, 97f | | 4179 | bz,a,pt %icc, 97f |
4180 | nop | | 4180 | nop |
4181 | | | 4181 | |
4182 | STACKFRAME(-CC64FSZ) ! Get a clean register window | | 4182 | STACKFRAME(-CC64FSZ) ! Get a clean register window |
4183 | LOAD_ASCIZ(%o0, "sparc_interrupt: done\r\n") | | 4183 | LOAD_ASCIZ(%o0, "sparc_interrupt: done\r\n") |
4184 | GLOBTOLOC | | 4184 | GLOBTOLOC |
4185 | call prom_printf | | 4185 | call prom_printf |
4186 | nop | | 4186 | nop |
4187 | LOCTOGLOB | | 4187 | LOCTOGLOB |
4188 | restore | | 4188 | restore |
4189 | 97: | | 4189 | 97: |
4190 | #endif | | 4190 | #endif |
4191 | | | 4191 | |
4192 | ldub [%sp + CC64FSZ + STKB + TF_OLDPIL], %l3 ! restore old %pil | | 4192 | ldub [%sp + CC64FSZ + STKB + TF_OLDPIL], %l3 ! restore old %pil |
4193 | wrpr %l3, 0, %pil | | 4193 | wrpr %l3, 0, %pil |
4194 | | | 4194 | |
4195 | CHKPT(%o1,%o2,5) | | 4195 | CHKPT(%o1,%o2,5) |
4196 | ba,a,pt %icc, return_from_trap | | 4196 | ba,a,pt %icc, return_from_trap |
4197 | nop | | 4197 | nop |
4198 | | | 4198 | |
4199 | #ifdef notyet | | 4199 | #ifdef notyet |
4200 | /* | | 4200 | /* |
4201 | * Level 12 (ZS serial) interrupt. Handle it quickly, schedule a | | 4201 | * Level 12 (ZS serial) interrupt. Handle it quickly, schedule a |
4202 | * software interrupt, and get out. Do the software interrupt directly | | 4202 | * software interrupt, and get out. Do the software interrupt directly |
4203 | * if we would just take it on the way out. | | 4203 | * if we would just take it on the way out. |
4204 | * | | 4204 | * |
4205 | * Input: | | 4205 | * Input: |
4206 | * %l0 = %psr | | 4206 | * %l0 = %psr |
4207 | * %l1 = return pc | | 4207 | * %l1 = return pc |
4208 | * %l2 = return npc | | 4208 | * %l2 = return npc |
4209 | * Internal: | | 4209 | * Internal: |
4210 | * %l3 = zs device | | 4210 | * %l3 = zs device |
4211 | * %l4, %l5 = temporary | | 4211 | * %l4, %l5 = temporary |
4212 | * %l6 = rr3 (or temporary data) + 0x100 => need soft int | | 4212 | * %l6 = rr3 (or temporary data) + 0x100 => need soft int |
4213 | * %l7 = zs soft status | | 4213 | * %l7 = zs soft status |
4214 | */ | | 4214 | */ |
4215 | zshard: | | 4215 | zshard: |
4216 | #endif /* notyet */ | | 4216 | #endif /* notyet */ |
4217 | | | 4217 | |
4218 | .globl return_from_trap, rft_kernel, rft_user | | 4218 | .globl return_from_trap, rft_kernel, rft_user |
4219 | .globl softtrap, slowtrap | | 4219 | .globl softtrap, slowtrap |
4220 | | | 4220 | |
4221 | /* | | 4221 | /* |
4222 | * Various return-from-trap routines (see return_from_trap). | | 4222 | * Various return-from-trap routines (see return_from_trap). |
4223 | */ | | 4223 | */ |
4224 | | | 4224 | |
4225 | /* | | 4225 | /* |
4226 | * Return from trap. | | 4226 | * Return from trap. |
4227 | * registers are: | | 4227 | * registers are: |
4228 | * | | 4228 | * |
4229 | * [%sp + CC64FSZ + STKB] => trap frame | | 4229 | * [%sp + CC64FSZ + STKB] => trap frame |
4230 | * | | 4230 | * |
4231 | * We must load all global, out, and trap registers from the trap frame. | | 4231 | * We must load all global, out, and trap registers from the trap frame. |
4232 | * | | 4232 | * |
4233 | * If returning to kernel, we should be at the proper trap level because | | 4233 | * If returning to kernel, we should be at the proper trap level because |
4234 | * we don't touch %tl. | | 4234 | * we don't touch %tl. |
4235 | * | | 4235 | * |
4236 | * When returning to user mode, the trap level does not matter, as it | | 4236 | * When returning to user mode, the trap level does not matter, as it |
4237 | * will be set explicitly. | | 4237 | * will be set explicitly. |
4238 | * | | 4238 | * |
4239 | * If we are returning to user code, we must: | | 4239 | * If we are returning to user code, we must: |
4240 | * 1. Check for register windows in the pcb that belong on the stack. | | 4240 | * 1. Check for register windows in the pcb that belong on the stack. |
4241 | * If there are any, reload them | | 4241 | * If there are any, reload them |
4242 | */ | | 4242 | */ |
4243 | return_from_trap: | | 4243 | return_from_trap: |
4244 | #ifdef DEBUG | | 4244 | #ifdef DEBUG |
4245 | !! Make sure we don't have pc == npc == 0 or we suck. | | 4245 | !! Make sure we don't have pc == npc == 0 or we suck. |
4246 | ldx [%sp + CC64FSZ + STKB + TF_PC], %g2 | | 4246 | ldx [%sp + CC64FSZ + STKB + TF_PC], %g2 |
4247 | ldx [%sp + CC64FSZ + STKB + TF_NPC], %g3 | | 4247 | ldx [%sp + CC64FSZ + STKB + TF_NPC], %g3 |
4248 | orcc %g2, %g3, %g0 | | 4248 | orcc %g2, %g3, %g0 |
4249 | tz %icc, 1 | | 4249 | tz %icc, 1 |
4250 | #endif | | 4250 | #endif |
4251 | | | 4251 | |
4252 | #if KTR_COMPILE & KTR_TRAP | | 4252 | #if KTR_COMPILE & KTR_TRAP |
4253 | CATR(KTR_TRAP, "rft: sp=%p pc=%p npc=%p tstate=%p", | | 4253 | CATR(KTR_TRAP, "rft: sp=%p pc=%p npc=%p tstate=%p", |
4254 | %g2, %g3, %g4, 10, 11, 12) | | 4254 | %g2, %g3, %g4, 10, 11, 12) |
4255 | stx %i6, [%g2 + KTR_PARM1] | | 4255 | stx %i6, [%g2 + KTR_PARM1] |
4256 | ldx [%sp + CC64FSZ + STKB + TF_PC], %g3 | | 4256 | ldx [%sp + CC64FSZ + STKB + TF_PC], %g3 |
4257 | stx %g3, [%g2 + KTR_PARM2] | | 4257 | stx %g3, [%g2 + KTR_PARM2] |
4258 | ldx [%sp + CC64FSZ + STKB + TF_NPC], %g3 | | 4258 | ldx [%sp + CC64FSZ + STKB + TF_NPC], %g3 |
4259 | stx %g3, [%g2 + KTR_PARM3] | | 4259 | stx %g3, [%g2 + KTR_PARM3] |
4260 | ldx [%sp + CC64FSZ + STKB + TF_TSTATE], %g3 | | 4260 | ldx [%sp + CC64FSZ + STKB + TF_TSTATE], %g3 |
4261 | stx %g3, [%g2 + KTR_PARM4] | | 4261 | stx %g3, [%g2 + KTR_PARM4] |
4262 | 12: | | 4262 | 12: |
4263 | #endif | | 4263 | #endif |
4264 | | | 4264 | |
4265 | !! | | 4265 | !! |
4266 | !! We'll make sure we flush our pcb here, rather than later. | | 4266 | !! We'll make sure we flush our pcb here, rather than later. |
4267 | !! | | 4267 | !! |
4268 | ldx [%sp + CC64FSZ + STKB + TF_TSTATE], %g1 | | 4268 | ldx [%sp + CC64FSZ + STKB + TF_TSTATE], %g1 |
4269 | btst TSTATE_PRIV, %g1 ! returning to userland? | | 4269 | btst TSTATE_PRIV, %g1 ! returning to userland? |
4270 | | | 4270 | |
4271 | !! | | 4271 | !! |
4272 | !! Let all pending interrupts drain before returning to userland | | 4272 | !! Let all pending interrupts drain before returning to userland |
4273 | !! | | 4273 | !! |
4274 | bnz,pn %icc, 1f ! Returning to userland? | | 4274 | bnz,pn %icc, 1f ! Returning to userland? |
4275 | nop | | 4275 | nop |
4276 | wrpr %g0, PSTATE_INTR, %pstate | | 4276 | wrpr %g0, PSTATE_INTR, %pstate |
4277 | wrpr %g0, %g0, %pil ! Lower IPL | | 4277 | wrpr %g0, %g0, %pil ! Lower IPL |
4278 | 1: | | 4278 | 1: |
4279 | wrpr %g0, PSTATE_KERN, %pstate ! Make sure we have normal globals & no IRQs | | 4279 | wrpr %g0, PSTATE_KERN, %pstate ! Make sure we have normal globals & no IRQs |
4280 | | | 4280 | |
4281 | /* Restore normal globals */ | | 4281 | /* Restore normal globals */ |
4282 | ldx [%sp + CC64FSZ + STKB + TF_G + (1*8)], %g1 | | 4282 | ldx [%sp + CC64FSZ + STKB + TF_G + (1*8)], %g1 |
4283 | ldx [%sp + CC64FSZ + STKB + TF_G + (2*8)], %g2 | | 4283 | ldx [%sp + CC64FSZ + STKB + TF_G + (2*8)], %g2 |
4284 | ldx [%sp + CC64FSZ + STKB + TF_G + (3*8)], %g3 | | 4284 | ldx [%sp + CC64FSZ + STKB + TF_G + (3*8)], %g3 |
4285 | ldx [%sp + CC64FSZ + STKB + TF_G + (4*8)], %g4 | | 4285 | ldx [%sp + CC64FSZ + STKB + TF_G + (4*8)], %g4 |
4286 | ldx [%sp + CC64FSZ + STKB + TF_G + (5*8)], %g5 | | 4286 | ldx [%sp + CC64FSZ + STKB + TF_G + (5*8)], %g5 |
4287 | ldx [%sp + CC64FSZ + STKB + TF_G + (6*8)], %g6 | | 4287 | ldx [%sp + CC64FSZ + STKB + TF_G + (6*8)], %g6 |
4288 | ldx [%sp + CC64FSZ + STKB + TF_G + (7*8)], %g7 | | 4288 | ldx [%sp + CC64FSZ + STKB + TF_G + (7*8)], %g7 |
4289 | /* Switch to alternate globals and load outs */ | | 4289 | /* Switch to alternate globals and load outs */ |
4290 | wrpr %g0, PSTATE_KERN|PSTATE_AG, %pstate | | 4290 | wrpr %g0, PSTATE_KERN|PSTATE_AG, %pstate |
4291 | #ifdef TRAPS_USE_IG | | 4291 | #ifdef TRAPS_USE_IG |
4292 | wrpr %g0, PSTATE_KERN|PSTATE_IG, %pstate ! DEBUG | | 4292 | wrpr %g0, PSTATE_KERN|PSTATE_IG, %pstate ! DEBUG |
4293 | #endif | | 4293 | #endif |
4294 | ldx [%sp + CC64FSZ + STKB + TF_O + (0*8)], %i0 | | 4294 | ldx [%sp + CC64FSZ + STKB + TF_O + (0*8)], %i0 |
4295 | ldx [%sp + CC64FSZ + STKB + TF_O + (1*8)], %i1 | | 4295 | ldx [%sp + CC64FSZ + STKB + TF_O + (1*8)], %i1 |
4296 | ldx [%sp + CC64FSZ + STKB + TF_O + (2*8)], %i2 | | 4296 | ldx [%sp + CC64FSZ + STKB + TF_O + (2*8)], %i2 |
4297 | ldx [%sp + CC64FSZ + STKB + TF_O + (3*8)], %i3 | | 4297 | ldx [%sp + CC64FSZ + STKB + TF_O + (3*8)], %i3 |
4298 | ldx [%sp + CC64FSZ + STKB + TF_O + (4*8)], %i4 | | 4298 | ldx [%sp + CC64FSZ + STKB + TF_O + (4*8)], %i4 |
4299 | ldx [%sp + CC64FSZ + STKB + TF_O + (5*8)], %i5 | | 4299 | ldx [%sp + CC64FSZ + STKB + TF_O + (5*8)], %i5 |
4300 | ldx [%sp + CC64FSZ + STKB + TF_O + (6*8)], %i6 | | 4300 | ldx [%sp + CC64FSZ + STKB + TF_O + (6*8)], %i6 |
4301 | ldx [%sp + CC64FSZ + STKB + TF_O + (7*8)], %i7 | | 4301 | ldx [%sp + CC64FSZ + STKB + TF_O + (7*8)], %i7 |
4302 | /* Now load trap registers into alternate globals */ | | 4302 | /* Now load trap registers into alternate globals */ |
4303 | ld [%sp + CC64FSZ + STKB + TF_Y], %g4 | | 4303 | ld [%sp + CC64FSZ + STKB + TF_Y], %g4 |
4304 | ldx [%sp + CC64FSZ + STKB + TF_TSTATE], %g1 ! load new values | | 4304 | ldx [%sp + CC64FSZ + STKB + TF_TSTATE], %g1 ! load new values |
4305 | wr %g4, 0, %y | | 4305 | wr %g4, 0, %y |
4306 | ldx [%sp + CC64FSZ + STKB + TF_PC], %g2 | | 4306 | ldx [%sp + CC64FSZ + STKB + TF_PC], %g2 |
4307 | ldx [%sp + CC64FSZ + STKB + TF_NPC], %g3 | | 4307 | ldx [%sp + CC64FSZ + STKB + TF_NPC], %g3 |
4308 | | | 4308 | |
4309 | #ifdef NOTDEF_DEBUG | | 4309 | #ifdef NOTDEF_DEBUG |
4310 | ldub [%sp + CC64FSZ + STKB + TF_PIL], %g5 ! restore %pil | | 4310 | ldub [%sp + CC64FSZ + STKB + TF_PIL], %g5 ! restore %pil |
4311 | wrpr %g5, %pil ! DEBUG | | 4311 | wrpr %g5, %pil ! DEBUG |
4312 | #endif | | 4312 | #endif |
4313 | | | 4313 | |
4314 | /* Returning to user mode or kernel mode? */ | | 4314 | /* Returning to user mode or kernel mode? */ |
4315 | btst TSTATE_PRIV, %g1 ! returning to userland? | | 4315 | btst TSTATE_PRIV, %g1 ! returning to userland? |
4316 | CHKPT(%g4, %g7, 6) | | 4316 | CHKPT(%g4, %g7, 6) |
4317 | bz,pt %icc, rft_user | | 4317 | bz,pt %icc, rft_user |
4318 | sethi %hi(CPUINFO_VA+CI_WANT_AST), %g7 ! first instr of rft_user | | 4318 | sethi %hi(CPUINFO_VA+CI_WANT_AST), %g7 ! first instr of rft_user |
4319 | | | 4319 | |
4320 | /* | | 4320 | /* |
4321 | * Return from trap, to kernel. | | 4321 | * Return from trap, to kernel. |
4322 | * | | 4322 | * |
4323 | * We will assume, for the moment, that all kernel traps are properly stacked | | 4323 | * We will assume, for the moment, that all kernel traps are properly stacked |
4324 | * in the trap registers, so all we have to do is insert the (possibly modified) | | 4324 | * in the trap registers, so all we have to do is insert the (possibly modified) |
4325 | * register values into the trap registers then do a retry. | | 4325 | * register values into the trap registers then do a retry. |
4326 | * | | 4326 | * |
4327 | */ | | 4327 | */ |
4328 | rft_kernel: | | 4328 | rft_kernel: |
4329 | rdpr %tl, %g4 ! Grab a set of trap registers | | 4329 | rdpr %tl, %g4 ! Grab a set of trap registers |
4330 | inc %g4 | | 4330 | inc %g4 |
4331 | wrpr %g4, %g0, %tl | | 4331 | wrpr %g4, %g0, %tl |
4332 | wrpr %g3, 0, %tnpc | | 4332 | wrpr %g3, 0, %tnpc |
4333 | wrpr %g2, 0, %tpc | | 4333 | wrpr %g2, 0, %tpc |
4334 | wrpr %g1, 0, %tstate | | 4334 | wrpr %g1, 0, %tstate |
4335 | CHKPT(%g1,%g2,7) | | 4335 | CHKPT(%g1,%g2,7) |
4336 | restore | | 4336 | restore |
4337 | CHKPT(%g1,%g2,0) ! Clear this out | | 4337 | CHKPT(%g1,%g2,0) ! Clear this out |
4338 | rdpr %tstate, %g1 ! Since we may have trapped our regs may be toast | | 4338 | rdpr %tstate, %g1 ! Since we may have trapped our regs may be toast |
4339 | rdpr %cwp, %g2 | | 4339 | rdpr %cwp, %g2 |
4340 | andn %g1, CWP, %g1 | | 4340 | andn %g1, CWP, %g1 |
4341 | wrpr %g1, %g2, %tstate ! Put %cwp in %tstate | | 4341 | wrpr %g1, %g2, %tstate ! Put %cwp in %tstate |
4342 | CLRTT | | 4342 | CLRTT |
4343 | #ifdef TRAPSTATS | | 4343 | #ifdef TRAPSTATS |
4344 | rdpr %tl, %g2 | | 4344 | rdpr %tl, %g2 |
4345 | set _C_LABEL(rftkcnt), %g1 | | 4345 | set _C_LABEL(rftkcnt), %g1 |
4346 | sllx %g2, 2, %g2 | | 4346 | sllx %g2, 2, %g2 |
4347 | add %g1, %g2, %g1 | | 4347 | add %g1, %g2, %g1 |
4348 | lduw [%g1], %g2 | | 4348 | lduw [%g1], %g2 |
4349 | inc %g2 | | 4349 | inc %g2 |
4350 | stw %g2, [%g1] | | 4350 | stw %g2, [%g1] |
4351 | #endif | | 4351 | #endif |
4352 | #if 0 | | 4352 | #if 0 |
4353 | wrpr %g0, 0, %cleanwin ! DEBUG | | 4353 | wrpr %g0, 0, %cleanwin ! DEBUG |
4354 | #endif | | 4354 | #endif |
4355 | #if defined(DDB) && defined(MULTIPROCESSOR) | | 4355 | #if defined(DDB) && defined(MULTIPROCESSOR) |
4356 | set sparc64_ipi_pause_trap_point, %g1 | | 4356 | set sparc64_ipi_pause_trap_point, %g1 |
4357 | rdpr %tpc, %g2 | | 4357 | rdpr %tpc, %g2 |
4358 | cmp %g1, %g2 | | 4358 | cmp %g1, %g2 |
4359 | bne,pt %icc, 0f | | 4359 | bne,pt %icc, 0f |
4360 | nop | | 4360 | nop |
4361 | done | | 4361 | done |
4362 | 0: | | 4362 | 0: |
4363 | #endif | | 4363 | #endif |
4364 | retry | | 4364 | retry |
4365 | NOTREACHED | | 4365 | NOTREACHED |
4366 | /* | | 4366 | /* |
4367 | * Return from trap, to user. Checks for scheduling trap (`ast') first; | | 4367 | * Return from trap, to user. Checks for scheduling trap (`ast') first; |
4368 | * will re-enter trap() if set. Note that we may have to switch from | | 4368 | * will re-enter trap() if set. Note that we may have to switch from |
4369 | * the interrupt stack to the kernel stack in this case. | | 4369 | * the interrupt stack to the kernel stack in this case. |
4370 | * %g1 = %tstate | | 4370 | * %g1 = %tstate |
4371 | * %g2 = return %pc | | 4371 | * %g2 = return %pc |
4372 | * %g3 = return %npc | | 4372 | * %g3 = return %npc |
4373 | * If returning to a valid window, just set psr and return. | | 4373 | * If returning to a valid window, just set psr and return. |
4374 | */ | | 4374 | */ |
4375 | .data | | 4375 | .data |
4376 | rft_wcnt: .word 0 | | 4376 | rft_wcnt: .word 0 |
4377 | .text | | 4377 | .text |
4378 | | | 4378 | |
4379 | rft_user: | | 4379 | rft_user: |
4380 | ! sethi %hi(CPUINFO_VA+CI_WANT_AST), %g7 ! (done above) | | 4380 | ! sethi %hi(CPUINFO_VA+CI_WANT_AST), %g7 ! (done above) |
4381 | lduw [%g7 + %lo(CPUINFO_VA+CI_WANT_AST)], %g7! want AST trap? | | 4381 | lduw [%g7 + %lo(CPUINFO_VA+CI_WANT_AST)], %g7! want AST trap? |
4382 | brnz,pn %g7, softtrap ! yes, re-enter trap with type T_AST | | 4382 | brnz,pn %g7, softtrap ! yes, re-enter trap with type T_AST |
4383 | mov T_AST, %g4 | | 4383 | mov T_AST, %g4 |
4384 | | | 4384 | |
4385 | CHKPT(%g4,%g7,8) | | 4385 | CHKPT(%g4,%g7,8) |
4386 | #ifdef NOTDEF_DEBUG | | 4386 | #ifdef NOTDEF_DEBUG |
4387 | sethi %hi(CPCB), %g4 | | 4387 | sethi %hi(CPCB), %g4 |
4388 | LDPTR [%g4 + %lo(CPCB)], %g4 | | 4388 | LDPTR [%g4 + %lo(CPCB)], %g4 |
4389 | ldub [%g4 + PCB_NSAVED], %g4 ! nsaved | | 4389 | ldub [%g4 + PCB_NSAVED], %g4 ! nsaved |
4390 | brz,pt %g4, 2f ! Only print if nsaved <> 0 | | 4390 | brz,pt %g4, 2f ! Only print if nsaved <> 0 |
4391 | nop | | 4391 | nop |
4392 | | | 4392 | |
4393 | set 1f, %o0 | | 4393 | set 1f, %o0 |
4394 | mov %g4, %o1 | | 4394 | mov %g4, %o1 |
4395 | mov %g2, %o2 ! pc | | 4395 | mov %g2, %o2 ! pc |
4396 | wr %g0, ASI_DMMU, %asi ! restore the user context | | 4396 | wr %g0, ASI_DMMU, %asi ! restore the user context |
4397 | ldxa [CTX_SECONDARY] %asi, %o3 ! ctx | | 4397 | ldxa [CTX_SECONDARY] %asi, %o3 ! ctx |
4398 | GLOBTOLOC | | 4398 | GLOBTOLOC |
4399 | mov %g3, %o5 | | 4399 | mov %g3, %o5 |
4400 | call printf | | 4400 | call printf |
4401 | mov %i6, %o4 ! sp | | 4401 | mov %i6, %o4 ! sp |
4402 | ! wrpr %g0, PSTATE_INTR, %pstate ! Allow IRQ service | | 4402 | ! wrpr %g0, PSTATE_INTR, %pstate ! Allow IRQ service |
4403 | ! wrpr %g0, PSTATE_KERN, %pstate ! DenyIRQ service | | 4403 | ! wrpr %g0, PSTATE_KERN, %pstate ! DenyIRQ service |
4404 | LOCTOGLOB | | 4404 | LOCTOGLOB |
4405 | 1: | | 4405 | 1: |
4406 | .data | | 4406 | .data |
4407 | .asciz "rft_user: nsaved=%x pc=%d ctx=%x sp=%x npc=%p\n" | | 4407 | .asciz "rft_user: nsaved=%x pc=%d ctx=%x sp=%x npc=%p\n" |
4408 | _ALIGN | | 4408 | _ALIGN |
4409 | .text | | 4409 | .text |
4410 | #endif | | 4410 | #endif |
4411 | | | 4411 | |
4412 | /* | | 4412 | /* |
4413 | * NB: only need to do this after a cache miss | | 4413 | * NB: only need to do this after a cache miss |
4414 | */ | | 4414 | */ |
4415 | #ifdef TRAPSTATS | | 4415 | #ifdef TRAPSTATS |
4416 | set _C_LABEL(rftucnt), %g6 | | 4416 | set _C_LABEL(rftucnt), %g6 |
4417 | lduw [%g6], %g7 | | 4417 | lduw [%g6], %g7 |
4418 | inc %g7 | | 4418 | inc %g7 |
4419 | stw %g7, [%g6] | | 4419 | stw %g7, [%g6] |
4420 | #endif | | 4420 | #endif |
4421 | /* | | 4421 | /* |
4422 | * Now check to see if any regs are saved in the pcb and restore them. | | 4422 | * Now check to see if any regs are saved in the pcb and restore them. |
4423 | * | | 4423 | * |
4424 | * Here we need to undo the damage caused by switching to a kernel | | 4424 | * Here we need to undo the damage caused by switching to a kernel |
4425 | * stack. | | 4425 | * stack. |
4426 | * | | 4426 | * |
4427 | * We will use alternate globals %g4..%g7 because %g1..%g3 are used | | 4427 | * We will use alternate globals %g4..%g7 because %g1..%g3 are used |
4428 | * by the data fault trap handlers and we don't want possible conflict. | | 4428 | * by the data fault trap handlers and we don't want possible conflict. |
4429 | */ | | 4429 | */ |
4430 | | | 4430 | |
4431 | sethi %hi(CPCB), %g6 | | 4431 | sethi %hi(CPCB), %g6 |
4432 | rdpr %otherwin, %g7 ! restore register window controls | | 4432 | rdpr %otherwin, %g7 ! restore register window controls |
4433 | #ifdef DEBUG | | 4433 | #ifdef DEBUG |
4434 | rdpr %canrestore, %g5 ! DEBUG | | 4434 | rdpr %canrestore, %g5 ! DEBUG |
4435 | tst %g5 ! DEBUG | | 4435 | tst %g5 ! DEBUG |
4436 | tnz %icc, 1; nop ! DEBUG | | 4436 | tnz %icc, 1; nop ! DEBUG |
4437 | ! mov %g0, %g5 ! There should be *NO* %canrestore | | 4437 | ! mov %g0, %g5 ! There should be *NO* %canrestore |
4438 | add %g7, %g5, %g7 ! DEBUG | | 4438 | add %g7, %g5, %g7 ! DEBUG |
4439 | #endif | | 4439 | #endif |
4440 | wrpr %g0, %g7, %canrestore | | 4440 | wrpr %g0, %g7, %canrestore |
4441 | LDPTR [%g6 + %lo(CPCB)], %g6 | | 4441 | LDPTR [%g6 + %lo(CPCB)], %g6 |
4442 | wrpr %g0, 0, %otherwin | | 4442 | wrpr %g0, 0, %otherwin |
4443 | | | 4443 | |
4444 | CHKPT(%g4,%g7,9) | | 4444 | CHKPT(%g4,%g7,9) |
4445 | ldub [%g6 + PCB_NSAVED], %g7 ! Any saved reg windows? | | 4445 | ldub [%g6 + PCB_NSAVED], %g7 ! Any saved reg windows? |
4446 | wrpr %g0, WSTATE_USER, %wstate ! Need to know where our sp points | | 4446 | wrpr %g0, WSTATE_USER, %wstate ! Need to know where our sp points |
4447 | | | 4447 | |
4448 | #ifdef DEBUG | | 4448 | #ifdef DEBUG |
4449 | set rft_wcnt, %g4 ! Keep track of all the windows we restored | | 4449 | set rft_wcnt, %g4 ! Keep track of all the windows we restored |
4450 | stw %g7, [%g4] | | 4450 | stw %g7, [%g4] |
4451 | #endif | | 4451 | #endif |
4452 | | | 4452 | |
4453 | brz,pt %g7, 5f ! No saved reg wins | | 4453 | brz,pt %g7, 5f ! No saved reg wins |
4454 | nop | | 4454 | nop |
4455 | dec %g7 ! We can do this now or later. Move to last entry | | 4455 | dec %g7 ! We can do this now or later. Move to last entry |
4456 | | | 4456 | |
4457 | #ifdef DEBUG | | 4457 | #ifdef DEBUG |
4458 | rdpr %canrestore, %g4 ! DEBUG Make sure we've restored everything | | 4458 | rdpr %canrestore, %g4 ! DEBUG Make sure we've restored everything |
4459 | brnz,a,pn %g4, 0f ! DEBUG | | 4459 | brnz,a,pn %g4, 0f ! DEBUG |
4460 | sir ! DEBUG we should NOT have any usable windows here | | 4460 | sir ! DEBUG we should NOT have any usable windows here |
4461 | 0: ! DEBUG | | 4461 | 0: ! DEBUG |
4462 | wrpr %g0, 5, %tl | | 4462 | wrpr %g0, 5, %tl |
4463 | #endif | | 4463 | #endif |
4464 | rdpr %otherwin, %g4 | | 4464 | rdpr %otherwin, %g4 |
4465 | sll %g7, 7, %g5 ! calculate ptr into rw64 array 8*16 == 128 or 7 bits | | 4465 | sll %g7, 7, %g5 ! calculate ptr into rw64 array 8*16 == 128 or 7 bits |
4466 | brz,pt %g4, 6f ! We should not have any user windows left | | 4466 | brz,pt %g4, 6f ! We should not have any user windows left |
4467 | add %g5, %g6, %g5 | | 4467 | add %g5, %g6, %g5 |
4468 | | | 4468 | |
4469 | set 1f, %o0 | | 4469 | set 1f, %o0 |
4470 | mov %g7, %o1 | | 4470 | mov %g7, %o1 |
4471 | mov %g4, %o2 | | 4471 | mov %g4, %o2 |
4472 | call printf | | 4472 | call printf |
4473 | wrpr %g0, PSTATE_KERN, %pstate | | 4473 | wrpr %g0, PSTATE_KERN, %pstate |
4474 | set 2f, %o0 | | 4474 | set 2f, %o0 |
4475 | call panic | | 4475 | call panic |
4476 | nop | | 4476 | nop |
4477 | NOTREACHED | | 4477 | NOTREACHED |
4478 | .data | | 4478 | .data |
4479 | 1: .asciz "pcb_nsaved=%x and otherwin=%x\n" | | 4479 | 1: .asciz "pcb_nsaved=%x and otherwin=%x\n" |
4480 | 2: .asciz "rft_user\n" | | 4480 | 2: .asciz "rft_user\n" |
4481 | _ALIGN | | 4481 | _ALIGN |
4482 | .text | | 4482 | .text |
4483 | 6: | | 4483 | 6: |
4484 | 3: | | 4484 | 3: |
4485 | restored ! Load in the window | | 4485 | restored ! Load in the window |
4486 | restore ! This should not trap! | | 4486 | restore ! This should not trap! |
4487 | ldx [%g5 + PCB_RW + ( 0*8)], %l0 ! Load the window from the pcb | | 4487 | ldx [%g5 + PCB_RW + ( 0*8)], %l0 ! Load the window from the pcb |
4488 | ldx [%g5 + PCB_RW + ( 1*8)], %l1 | | 4488 | ldx [%g5 + PCB_RW + ( 1*8)], %l1 |
4489 | ldx [%g5 + PCB_RW + ( 2*8)], %l2 | | 4489 | ldx [%g5 + PCB_RW + ( 2*8)], %l2 |
4490 | ldx [%g5 + PCB_RW + ( 3*8)], %l3 | | 4490 | ldx [%g5 + PCB_RW + ( 3*8)], %l3 |
4491 | ldx [%g5 + PCB_RW + ( 4*8)], %l4 | | 4491 | ldx [%g5 + PCB_RW + ( 4*8)], %l4 |
4492 | ldx [%g5 + PCB_RW + ( 5*8)], %l5 | | 4492 | ldx [%g5 + PCB_RW + ( 5*8)], %l5 |
4493 | ldx [%g5 + PCB_RW + ( 6*8)], %l6 | | 4493 | ldx [%g5 + PCB_RW + ( 6*8)], %l6 |
4494 | ldx [%g5 + PCB_RW + ( 7*8)], %l7 | | 4494 | ldx [%g5 + PCB_RW + ( 7*8)], %l7 |
4495 | | | 4495 | |
4496 | ldx [%g5 + PCB_RW + ( 8*8)], %i0 | | 4496 | ldx [%g5 + PCB_RW + ( 8*8)], %i0 |
4497 | ldx [%g5 + PCB_RW + ( 9*8)], %i1 | | 4497 | ldx [%g5 + PCB_RW + ( 9*8)], %i1 |
4498 | ldx [%g5 + PCB_RW + (10*8)], %i2 | | 4498 | ldx [%g5 + PCB_RW + (10*8)], %i2 |
4499 | ldx [%g5 + PCB_RW + (11*8)], %i3 | | 4499 | ldx [%g5 + PCB_RW + (11*8)], %i3 |
4500 | ldx [%g5 + PCB_RW + (12*8)], %i4 | | 4500 | ldx [%g5 + PCB_RW + (12*8)], %i4 |
4501 | ldx [%g5 + PCB_RW + (13*8)], %i5 | | 4501 | ldx [%g5 + PCB_RW + (13*8)], %i5 |
4502 | ldx [%g5 + PCB_RW + (14*8)], %i6 | | 4502 | ldx [%g5 + PCB_RW + (14*8)], %i6 |
4503 | ldx [%g5 + PCB_RW + (15*8)], %i7 | | 4503 | ldx [%g5 + PCB_RW + (15*8)], %i7 |
4504 | | | 4504 | |
4505 | #ifdef DEBUG | | 4505 | #ifdef DEBUG |
4506 | stx %g0, [%g5 + PCB_RW + (14*8)] ! DEBUG mark that we've saved this one | | 4506 | stx %g0, [%g5 + PCB_RW + (14*8)] ! DEBUG mark that we've saved this one |
4507 | #endif | | 4507 | #endif |
4508 | | | 4508 | |
4509 | cmp %g5, %g6 | | 4509 | cmp %g5, %g6 |
4510 | bgu,pt %xcc, 3b ! Next one? | | 4510 | bgu,pt %xcc, 3b ! Next one? |
4511 | dec 8*16, %g5 | | 4511 | dec 8*16, %g5 |
4512 | | | 4512 | |
4513 | rdpr %ver, %g5 | | 4513 | rdpr %ver, %g5 |
4514 | stb %g0, [%g6 + PCB_NSAVED] ! Clear them out so we won't do this again | | 4514 | stb %g0, [%g6 + PCB_NSAVED] ! Clear them out so we won't do this again |
4515 | and %g5, CWP, %g5 | | 4515 | and %g5, CWP, %g5 |
4516 | add %g5, %g7, %g4 | | 4516 | add %g5, %g7, %g4 |
4517 | dec 1, %g5 ! NWINDOWS-1-1 | | 4517 | dec 1, %g5 ! NWINDOWS-1-1 |
4518 | wrpr %g5, 0, %cansave | | 4518 | wrpr %g5, 0, %cansave |
4519 | wrpr %g0, 0, %canrestore ! Make sure we have no freeloaders XXX | | 4519 | wrpr %g0, 0, %canrestore ! Make sure we have no freeloaders XXX |
4520 | wrpr %g0, WSTATE_USER, %wstate ! Save things to user space | | 4520 | wrpr %g0, WSTATE_USER, %wstate ! Save things to user space |
4521 | mov %g7, %g5 ! We already did one restore | | 4521 | mov %g7, %g5 ! We already did one restore |
4522 | 4: | | 4522 | 4: |
4523 | rdpr %canrestore, %g4 | | 4523 | rdpr %canrestore, %g4 |
4524 | inc %g4 | | 4524 | inc %g4 |
4525 | deccc %g5 | | 4525 | deccc %g5 |
4526 | wrpr %g4, 0, %cleanwin ! Make *sure* we don't trap to cleanwin | | 4526 | wrpr %g4, 0, %cleanwin ! Make *sure* we don't trap to cleanwin |
4527 | bge,a,pt %xcc, 4b ! return to starting regwin | | 4527 | bge,a,pt %xcc, 4b ! return to starting regwin |
4528 | save %g0, %g0, %g0 ! This may force a datafault | | 4528 | save %g0, %g0, %g0 ! This may force a datafault |
4529 | | | 4529 | |
4530 | #ifdef DEBUG | | 4530 | #ifdef DEBUG |
4531 | wrpr %g0, 0, %tl | | 4531 | wrpr %g0, 0, %tl |
4532 | #endif | | 4532 | #endif |
4533 | #ifdef TRAPSTATS | | 4533 | #ifdef TRAPSTATS |
4534 | set _C_LABEL(rftuld), %g5 | | 4534 | set _C_LABEL(rftuld), %g5 |
4535 | lduw [%g5], %g4 | | 4535 | lduw [%g5], %g4 |
4536 | inc %g4 | | 4536 | inc %g4 |
4537 | stw %g4, [%g5] | | 4537 | stw %g4, [%g5] |
4538 | #endif | | 4538 | #endif |
4539 | !! | | 4539 | !! |
4540 | !! We can't take any save faults in here 'cause they will never be serviced | | 4540 | !! We can't take any save faults in here 'cause they will never be serviced |
4541 | !! | | 4541 | !! |
4542 | | | 4542 | |
4543 | #ifdef DEBUG | | 4543 | #ifdef DEBUG |
4544 | sethi %hi(CPCB), %g5 | | 4544 | sethi %hi(CPCB), %g5 |
4545 | LDPTR [%g5 + %lo(CPCB)], %g5 | | 4545 | LDPTR [%g5 + %lo(CPCB)], %g5 |
4546 | ldub [%g5 + PCB_NSAVED], %g5 ! Any saved reg windows? | | 4546 | ldub [%g5 + PCB_NSAVED], %g5 ! Any saved reg windows? |
4547 | tst %g5 | | 4547 | tst %g5 |
4548 | tnz %icc, 1; nop ! Debugger if we still have saved windows | | 4548 | tnz %icc, 1; nop ! Debugger if we still have saved windows |
4549 | bne,a rft_user ! Try starting over again | | 4549 | bne,a rft_user ! Try starting over again |
4550 | sethi %hi(CPUINFO_VA+CI_WANT_AST), %g7 | | 4550 | sethi %hi(CPUINFO_VA+CI_WANT_AST), %g7 |
4551 | #endif | | 4551 | #endif |
4552 | /* | | 4552 | /* |
4553 | * Set up our return trapframe so we can recover if we trap from here | | 4553 | * Set up our return trapframe so we can recover if we trap from here |
4554 | * on in. | | 4554 | * on in. |
4555 | */ | | 4555 | */ |
4556 | wrpr %g0, 1, %tl ! Set up the trap state | | 4556 | wrpr %g0, 1, %tl ! Set up the trap state |
4557 | wrpr %g2, 0, %tpc | | 4557 | wrpr %g2, 0, %tpc |
4558 | wrpr %g3, 0, %tnpc | | 4558 | wrpr %g3, 0, %tnpc |
4559 | ba,pt %icc, 6f | | 4559 | ba,pt %icc, 6f |
4560 | wrpr %g1, %g0, %tstate | | 4560 | wrpr %g1, %g0, %tstate |
4561 | | | 4561 | |
4562 | 5: | | 4562 | 5: |
4563 | /* | | 4563 | /* |
4564 | * Set up our return trapframe so we can recover if we trap from here | | 4564 | * Set up our return trapframe so we can recover if we trap from here |
4565 | * on in. | | 4565 | * on in. |
4566 | */ | | 4566 | */ |
4567 | wrpr %g0, 1, %tl ! Set up the trap state | | 4567 | wrpr %g0, 1, %tl ! Set up the trap state |
4568 | wrpr %g2, 0, %tpc | | 4568 | wrpr %g2, 0, %tpc |
4569 | wrpr %g3, 0, %tnpc | | 4569 | wrpr %g3, 0, %tnpc |
4570 | wrpr %g1, %g0, %tstate | | 4570 | wrpr %g1, %g0, %tstate |
4571 | restore | | 4571 | restore |
4572 | 6: | | 4572 | 6: |
4573 | CHKPT(%g4,%g7,0xa) | | 4573 | CHKPT(%g4,%g7,0xa) |
4574 | rdpr %canrestore, %g5 | | 4574 | rdpr %canrestore, %g5 |
4575 | wrpr %g5, 0, %cleanwin ! Force cleanup of kernel windows | | 4575 | wrpr %g5, 0, %cleanwin ! Force cleanup of kernel windows |
4576 | | | 4576 | |
4577 | #ifdef NOTDEF_DEBUG | | 4577 | #ifdef NOTDEF_DEBUG |
4578 | ldx [%g6 + CC64FSZ + STKB + TF_L + (0*8)], %g5! DEBUG -- get proper value for %l0 | | 4578 | ldx [%g6 + CC64FSZ + STKB + TF_L + (0*8)], %g5! DEBUG -- get proper value for %l0 |
4579 | cmp %l0, %g5 | | 4579 | cmp %l0, %g5 |
4580 | be,a,pt %icc, 1f | | 4580 | be,a,pt %icc, 1f |
4581 | nop | | 4581 | nop |
4582 | ! sir ! WATCHDOG | | 4582 | ! sir ! WATCHDOG |
4583 | set badregs, %g1 ! Save the suspect regs | | 4583 | set badregs, %g1 ! Save the suspect regs |
4584 | stw %l0, [%g1+(4*0)] | | 4584 | stw %l0, [%g1+(4*0)] |
4585 | stw %l1, [%g1+(4*1)] | | 4585 | stw %l1, [%g1+(4*1)] |
4586 | stw %l2, [%g1+(4*2)] | | 4586 | stw %l2, [%g1+(4*2)] |
4587 | stw %l3, [%g1+(4*3)] | | 4587 | stw %l3, [%g1+(4*3)] |
4588 | stw %l4, [%g1+(4*4)] | | 4588 | stw %l4, [%g1+(4*4)] |
4589 | stw %l5, [%g1+(4*5)] | | 4589 | stw %l5, [%g1+(4*5)] |
4590 | stw %l6, [%g1+(4*6)] | | 4590 | stw %l6, [%g1+(4*6)] |
4591 | stw %l7, [%g1+(4*7)] | | 4591 | stw %l7, [%g1+(4*7)] |
4592 | stw %i0, [%g1+(4*8)+(4*0)] | | 4592 | stw %i0, [%g1+(4*8)+(4*0)] |
4593 | stw %i1, [%g1+(4*8)+(4*1)] | | 4593 | stw %i1, [%g1+(4*8)+(4*1)] |
4594 | stw %i2, [%g1+(4*8)+(4*2)] | | 4594 | stw %i2, [%g1+(4*8)+(4*2)] |
4595 | stw %i3, [%g1+(4*8)+(4*3)] | | 4595 | stw %i3, [%g1+(4*8)+(4*3)] |
4596 | stw %i4, [%g1+(4*8)+(4*4)] | | 4596 | stw %i4, [%g1+(4*8)+(4*4)] |
4597 | stw %i5, [%g1+(4*8)+(4*5)] | | 4597 | stw %i5, [%g1+(4*8)+(4*5)] |
4598 | stw %i6, [%g1+(4*8)+(4*6)] | | 4598 | stw %i6, [%g1+(4*8)+(4*6)] |
4599 | stw %i7, [%g1+(4*8)+(4*7)] | | 4599 | stw %i7, [%g1+(4*8)+(4*7)] |
4600 | save | | 4600 | save |
4601 | inc %g7 | | 4601 | inc %g7 |
4602 | wrpr %g7, 0, %otherwin | | 4602 | wrpr %g7, 0, %otherwin |
4603 | wrpr %g0, 0, %canrestore | | 4603 | wrpr %g0, 0, %canrestore |
4604 | wrpr %g0, WSTATE_KERN, %wstate ! Need to know where our sp points | | 4604 | wrpr %g0, WSTATE_KERN, %wstate ! Need to know where our sp points |
4605 | set rft_wcnt, %g4 ! Restore nsaved before trapping | | 4605 | set rft_wcnt, %g4 ! Restore nsaved before trapping |
4606 | sethi %hi(CPCB), %g6 | | 4606 | sethi %hi(CPCB), %g6 |
4607 | LDPTR [%g6 + %lo(CPCB)], %g6 | | 4607 | LDPTR [%g6 + %lo(CPCB)], %g6 |
4608 | lduw [%g4], %g4 | | 4608 | lduw [%g4], %g4 |
4609 | stb %g4, [%g6 + PCB_NSAVED] | | 4609 | stb %g4, [%g6 + PCB_NSAVED] |
4610 | ta 1 | | 4610 | ta 1 |
4611 | sir | | 4611 | sir |
4612 | .data | | 4612 | .data |
4613 | badregs: | | 4613 | badregs: |
4614 | .space 16*4 | | 4614 | .space 16*4 |
4615 | .text | | 4615 | .text |
4616 | 1: | | 4616 | 1: |
4617 | #endif | | 4617 | #endif |
4618 | | | 4618 | |
4619 | rdpr %tstate, %g1 | | 4619 | rdpr %tstate, %g1 |
4620 | rdpr %cwp, %g7 ! Find our cur window | | 4620 | rdpr %cwp, %g7 ! Find our cur window |
4621 | andn %g1, CWP, %g1 ! Clear it from %tstate | | 4621 | andn %g1, CWP, %g1 ! Clear it from %tstate |
4622 | wrpr %g1, %g7, %tstate ! Set %tstate with %cwp | | 4622 | wrpr %g1, %g7, %tstate ! Set %tstate with %cwp |
4623 | CHKPT(%g4,%g7,0xb) | | 4623 | CHKPT(%g4,%g7,0xb) |
4624 | | | 4624 | |
4625 | wr %g0, ASI_DMMU, %asi ! restore the user context | | 4625 | wr %g0, ASI_DMMU, %asi ! restore the user context |
4626 | ldxa [CTX_SECONDARY] %asi, %g4 | | 4626 | ldxa [CTX_SECONDARY] %asi, %g4 |
4627 | sethi %hi(KERNBASE), %g7 ! Should not be needed due to retry | | 4627 | sethi %hi(KERNBASE), %g7 ! Should not be needed due to retry |
4628 | stxa %g4, [CTX_PRIMARY] %asi | | 4628 | stxa %g4, [CTX_PRIMARY] %asi |
4629 | membar #Sync ! Should not be needed due to retry | | 4629 | membar #Sync ! Should not be needed due to retry |
4630 | flush %g7 ! Should not be needed due to retry | | 4630 | flush %g7 ! Should not be needed due to retry |
4631 | CLRTT | | 4631 | CLRTT |
4632 | CHKPT(%g4,%g7,0xd) | | 4632 | CHKPT(%g4,%g7,0xd) |
4633 | #ifdef TRAPSTATS | | 4633 | #ifdef TRAPSTATS |
4634 | set _C_LABEL(rftudone), %g1 | | 4634 | set _C_LABEL(rftudone), %g1 |
4635 | lduw [%g1], %g2 | | 4635 | lduw [%g1], %g2 |
4636 | inc %g2 | | 4636 | inc %g2 |
4637 | stw %g2, [%g1] | | 4637 | stw %g2, [%g1] |
4638 | #endif | | 4638 | #endif |
4639 | #ifdef DEBUG | | 4639 | #ifdef DEBUG |
4640 | sethi %hi(CPCB), %g5 | | 4640 | sethi %hi(CPCB), %g5 |
4641 | LDPTR [%g5 + %lo(CPCB)], %g5 | | 4641 | LDPTR [%g5 + %lo(CPCB)], %g5 |
4642 | ldub [%g5 + PCB_NSAVED], %g5 ! Any saved reg windows? | | 4642 | ldub [%g5 + PCB_NSAVED], %g5 ! Any saved reg windows? |
4643 | tst %g5 | | 4643 | tst %g5 |
4644 | tnz %icc, 1; nop ! Debugger if we still have saved windows! | | 4644 | tnz %icc, 1; nop ! Debugger if we still have saved windows! |
4645 | #endif | | 4645 | #endif |
4646 | wrpr %g0, 0, %pil ! Enable all interrupts | | 4646 | wrpr %g0, 0, %pil ! Enable all interrupts |
4647 | retry | | 4647 | retry |
4648 | | | 4648 | |
4649 | ! exported end marker for kernel gdb | | 4649 | ! exported end marker for kernel gdb |
4650 | .globl _C_LABEL(endtrapcode) | | 4650 | .globl _C_LABEL(endtrapcode) |
4651 | _C_LABEL(endtrapcode): | | 4651 | _C_LABEL(endtrapcode): |
4652 | | | 4652 | |
4653 | #ifdef DDB | | 4653 | #ifdef DDB |
4654 | !!! | | 4654 | !!! |
4655 | !!! Dump the DTLB to phys address in %o0 and print it | | 4655 | !!! Dump the DTLB to phys address in %o0 and print it |
4656 | !!! | | 4656 | !!! |
4657 | !!! Only toast a few %o registers | | 4657 | !!! Only toast a few %o registers |
4658 | !!! | | 4658 | !!! |
4659 | | | 4659 | |
4660 | ENTRY_NOPROFILE(dump_dtlb) | | 4660 | ENTRY_NOPROFILE(dump_dtlb) |
4661 | clr %o1 | | 4661 | clr %o1 |
4662 | add %o1, (64 * 8), %o3 | | 4662 | add %o1, (64 * 8), %o3 |
4663 | 1: | | 4663 | 1: |
4664 | ldxa [%o1] ASI_DMMU_TLB_TAG, %o2 | | 4664 | ldxa [%o1] ASI_DMMU_TLB_TAG, %o2 |
4665 | membar #Sync | | 4665 | membar #Sync |
4666 | stx %o2, [%o0] | | 4666 | stx %o2, [%o0] |
4667 | membar #Sync | | 4667 | membar #Sync |
4668 | inc 8, %o0 | | 4668 | inc 8, %o0 |
4669 | ldxa [%o1] ASI_DMMU_TLB_DATA, %o4 | | 4669 | ldxa [%o1] ASI_DMMU_TLB_DATA, %o4 |
4670 | membar #Sync | | 4670 | membar #Sync |
4671 | inc 8, %o1 | | 4671 | inc 8, %o1 |
4672 | stx %o4, [%o0] | | 4672 | stx %o4, [%o0] |
4673 | cmp %o1, %o3 | | 4673 | cmp %o1, %o3 |
4674 | membar #Sync | | 4674 | membar #Sync |
4675 | bl 1b | | 4675 | bl 1b |
4676 | inc 8, %o0 | | 4676 | inc 8, %o0 |
4677 | | | 4677 | |
4678 | retl | | 4678 | retl |
4679 | nop | | 4679 | nop |
4680 | | | 4680 | |
4681 | ENTRY_NOPROFILE(dump_itlb) | | 4681 | ENTRY_NOPROFILE(dump_itlb) |
4682 | clr %o1 | | 4682 | clr %o1 |
4683 | add %o1, (64 * 8), %o3 | | 4683 | add %o1, (64 * 8), %o3 |
4684 | 1: | | 4684 | 1: |
4685 | ldxa [%o1] ASI_IMMU_TLB_TAG, %o2 | | 4685 | ldxa [%o1] ASI_IMMU_TLB_TAG, %o2 |
4686 | membar #Sync | | 4686 | membar #Sync |
4687 | stx %o2, [%o0] | | 4687 | stx %o2, [%o0] |
4688 | membar #Sync | | 4688 | membar #Sync |
4689 | inc 8, %o0 | | 4689 | inc 8, %o0 |
4690 | ldxa [%o1] ASI_IMMU_TLB_DATA, %o4 | | 4690 | ldxa [%o1] ASI_IMMU_TLB_DATA, %o4 |
4691 | membar #Sync | | 4691 | membar #Sync |
4692 | inc 8, %o1 | | 4692 | inc 8, %o1 |
4693 | stx %o4, [%o0] | | 4693 | stx %o4, [%o0] |
4694 | cmp %o1, %o3 | | 4694 | cmp %o1, %o3 |
4695 | membar #Sync | | 4695 | membar #Sync |
4696 | bl 1b | | 4696 | bl 1b |
4697 | inc 8, %o0 | | 4697 | inc 8, %o0 |
4698 | | | 4698 | |
4699 | retl | | 4699 | retl |
4700 | nop | | 4700 | nop |
4701 | | | 4701 | |
4702 | #ifdef _LP64 | | 4702 | #ifdef _LP64 |
4703 | ENTRY_NOPROFILE(print_dtlb) | | 4703 | ENTRY_NOPROFILE(print_dtlb) |
4704 | save %sp, -CC64FSZ, %sp | | 4704 | save %sp, -CC64FSZ, %sp |
4705 | clr %l1 | | 4705 | clr %l1 |
4706 | add %l1, (64 * 8), %l3 | | 4706 | add %l1, (64 * 8), %l3 |
4707 | clr %l2 | | 4707 | clr %l2 |
4708 | 1: | | 4708 | 1: |
4709 | ldxa [%l1] ASI_DMMU_TLB_TAG, %o2 | | 4709 | ldxa [%l1] ASI_DMMU_TLB_TAG, %o2 |
4710 | membar #Sync | | 4710 | membar #Sync |
4711 | mov %l2, %o1 | | 4711 | mov %l2, %o1 |
4712 | ldxa [%l1] ASI_DMMU_TLB_DATA, %o3 | | 4712 | ldxa [%l1] ASI_DMMU_TLB_DATA, %o3 |
4713 | membar #Sync | | 4713 | membar #Sync |
4714 | inc %l2 | | 4714 | inc %l2 |
4715 | set 2f, %o0 | | 4715 | set 2f, %o0 |
4716 | call _C_LABEL(db_printf) | | 4716 | call _C_LABEL(db_printf) |
4717 | inc 8, %l1 | | 4717 | inc 8, %l1 |
4718 | | | 4718 | |
4719 | ldxa [%l1] ASI_DMMU_TLB_TAG, %o2 | | 4719 | ldxa [%l1] ASI_DMMU_TLB_TAG, %o2 |
4720 | membar #Sync | | 4720 | membar #Sync |
4721 | mov %l2, %o1 | | 4721 | mov %l2, %o1 |
4722 | ldxa [%l1] ASI_DMMU_TLB_DATA, %o3 | | 4722 | ldxa [%l1] ASI_DMMU_TLB_DATA, %o3 |
4723 | membar #Sync | | 4723 | membar #Sync |
4724 | inc %l2 | | 4724 | inc %l2 |
4725 | set 3f, %o0 | | 4725 | set 3f, %o0 |
4726 | call _C_LABEL(db_printf) | | 4726 | call _C_LABEL(db_printf) |
4727 | inc 8, %l1 | | 4727 | inc 8, %l1 |
4728 | | | 4728 | |
4729 | cmp %l1, %l3 | | 4729 | cmp %l1, %l3 |
4730 | bl 1b | | 4730 | bl 1b |
4731 | inc 8, %l0 | | 4731 | inc 8, %l0 |
4732 | | | 4732 | |
4733 | ret | | 4733 | ret |
4734 | restore | | 4734 | restore |
4735 | | | 4735 | |
4736 | | | 4736 | |
4737 | ENTRY_NOPROFILE(print_itlb) | | 4737 | ENTRY_NOPROFILE(print_itlb) |
4738 | save %sp, -CC64FSZ, %sp | | 4738 | save %sp, -CC64FSZ, %sp |
4739 | clr %l1 | | 4739 | clr %l1 |
4740 | add %l1, (64 * 8), %l3 | | 4740 | add %l1, (64 * 8), %l3 |
4741 | clr %l2 | | 4741 | clr %l2 |
4742 | 1: | | 4742 | 1: |
4743 | ldxa [%l1] ASI_IMMU_TLB_TAG, %o2 | | 4743 | ldxa [%l1] ASI_IMMU_TLB_TAG, %o2 |
4744 | membar #Sync | | 4744 | membar #Sync |
4745 | mov %l2, %o1 | | 4745 | mov %l2, %o1 |
4746 | ldxa [%l1] ASI_IMMU_TLB_DATA, %o3 | | 4746 | ldxa [%l1] ASI_IMMU_TLB_DATA, %o3 |
4747 | membar #Sync | | 4747 | membar #Sync |
4748 | inc %l2 | | 4748 | inc %l2 |
4749 | set 2f, %o0 | | 4749 | set 2f, %o0 |
4750 | call _C_LABEL(db_printf) | | 4750 | call _C_LABEL(db_printf) |
4751 | inc 8, %l1 | | 4751 | inc 8, %l1 |
4752 | | | 4752 | |
4753 | ldxa [%l1] ASI_IMMU_TLB_TAG, %o2 | | 4753 | ldxa [%l1] ASI_IMMU_TLB_TAG, %o2 |
4754 | membar #Sync | | 4754 | membar #Sync |
4755 | mov %l2, %o1 | | 4755 | mov %l2, %o1 |
4756 | ldxa [%l1] ASI_IMMU_TLB_DATA, %o3 | | 4756 | ldxa [%l1] ASI_IMMU_TLB_DATA, %o3 |
4757 | membar #Sync | | 4757 | membar #Sync |
4758 | inc %l2 | | 4758 | inc %l2 |
4759 | set 3f, %o0 | | 4759 | set 3f, %o0 |
4760 | call _C_LABEL(db_printf) | | 4760 | call _C_LABEL(db_printf) |
4761 | inc 8, %l1 | | 4761 | inc 8, %l1 |
4762 | | | 4762 | |
4763 | cmp %l1, %l3 | | 4763 | cmp %l1, %l3 |
4764 | bl 1b | | 4764 | bl 1b |
4765 | inc 8, %l0 | | 4765 | inc 8, %l0 |
4766 | | | 4766 | |
4767 | ret | | 4767 | ret |
4768 | restore | | 4768 | restore |
4769 | | | 4769 | |
4770 | .data | | 4770 | .data |
4771 | 2: | | 4771 | 2: |
4772 | .asciz "%2d:%016lx %016lx " | | 4772 | .asciz "%2d:%016lx %016lx " |
4773 | 3: | | 4773 | 3: |
4774 | .asciz "%2d:%016lx %016lx\r\n" | | 4774 | .asciz "%2d:%016lx %016lx\r\n" |
4775 | .text | | 4775 | .text |
4776 | #else | | 4776 | #else |
4777 | ENTRY_NOPROFILE(print_dtlb) | | 4777 | ENTRY_NOPROFILE(print_dtlb) |
4778 | save %sp, -CC64FSZ, %sp | | 4778 | save %sp, -CC64FSZ, %sp |
4779 | clr %l1 | | 4779 | clr %l1 |
4780 | add %l1, (64 * 8), %l3 | | 4780 | add %l1, (64 * 8), %l3 |
4781 | clr %l2 | | 4781 | clr %l2 |
4782 | 1: | | 4782 | 1: |
4783 | ldxa [%l1] ASI_DMMU_TLB_TAG, %o2 | | 4783 | ldxa [%l1] ASI_DMMU_TLB_TAG, %o2 |
4784 | membar #Sync | | 4784 | membar #Sync |
4785 | srl %o2, 0, %o3 | | 4785 | srl %o2, 0, %o3 |
4786 | mov %l2, %o1 | | 4786 | mov %l2, %o1 |
4787 | srax %o2, 32, %o2 | | 4787 | srax %o2, 32, %o2 |
4788 | ldxa [%l1] ASI_DMMU_TLB_DATA, %o4 | | 4788 | ldxa [%l1] ASI_DMMU_TLB_DATA, %o4 |
4789 | membar #Sync | | 4789 | membar #Sync |
4790 | srl %o4, 0, %o5 | | 4790 | srl %o4, 0, %o5 |
4791 | inc %l2 | | 4791 | inc %l2 |
4792 | srax %o4, 32, %o4 | | 4792 | srax %o4, 32, %o4 |
4793 | set 2f, %o0 | | 4793 | set 2f, %o0 |
4794 | call _C_LABEL(db_printf) | | 4794 | call _C_LABEL(db_printf) |
4795 | inc 8, %l1 | | 4795 | inc 8, %l1 |
4796 | | | 4796 | |
4797 | ldxa [%l1] ASI_DMMU_TLB_TAG, %o2 | | 4797 | ldxa [%l1] ASI_DMMU_TLB_TAG, %o2 |
4798 | membar #Sync | | 4798 | membar #Sync |
4799 | srl %o2, 0, %o3 | | 4799 | srl %o2, 0, %o3 |
4800 | mov %l2, %o1 | | 4800 | mov %l2, %o1 |
4801 | srax %o2, 32, %o2 | | 4801 | srax %o2, 32, %o2 |
4802 | ldxa [%l1] ASI_DMMU_TLB_DATA, %o4 | | 4802 | ldxa [%l1] ASI_DMMU_TLB_DATA, %o4 |
4803 | membar #Sync | | 4803 | membar #Sync |
4804 | srl %o4, 0, %o5 | | 4804 | srl %o4, 0, %o5 |
4805 | inc %l2 | | 4805 | inc %l2 |
4806 | srax %o4, 32, %o4 | | 4806 | srax %o4, 32, %o4 |
4807 | set 3f, %o0 | | 4807 | set 3f, %o0 |
4808 | call _C_LABEL(db_printf) | | 4808 | call _C_LABEL(db_printf) |
4809 | inc 8, %l1 | | 4809 | inc 8, %l1 |
4810 | | | 4810 | |
4811 | cmp %l1, %l3 | | 4811 | cmp %l1, %l3 |
4812 | bl 1b | | 4812 | bl 1b |
4813 | inc 8, %l0 | | 4813 | inc 8, %l0 |
4814 | | | 4814 | |
4815 | ret | | 4815 | ret |
4816 | restore | | 4816 | restore |
4817 | | | 4817 | |
4818 | ENTRY_NOPROFILE(print_itlb) | | 4818 | ENTRY_NOPROFILE(print_itlb) |
4819 | save %sp, -CC64FSZ, %sp | | 4819 | save %sp, -CC64FSZ, %sp |
4820 | clr %l1 | | 4820 | clr %l1 |
4821 | add %l1, (64 * 8), %l3 | | 4821 | add %l1, (64 * 8), %l3 |
4822 | clr %l2 | | 4822 | clr %l2 |
4823 | 1: | | 4823 | 1: |
4824 | ldxa [%l1] ASI_IMMU_TLB_TAG, %o2 | | 4824 | ldxa [%l1] ASI_IMMU_TLB_TAG, %o2 |
4825 | membar #Sync | | 4825 | membar #Sync |
4826 | srl %o2, 0, %o3 | | 4826 | srl %o2, 0, %o3 |
4827 | mov %l2, %o1 | | 4827 | mov %l2, %o1 |
4828 | srax %o2, 32, %o2 | | 4828 | srax %o2, 32, %o2 |
4829 | ldxa [%l1] ASI_IMMU_TLB_DATA, %o4 | | 4829 | ldxa [%l1] ASI_IMMU_TLB_DATA, %o4 |
4830 | membar #Sync | | 4830 | membar #Sync |
4831 | srl %o4, 0, %o5 | | 4831 | srl %o4, 0, %o5 |
4832 | inc %l2 | | 4832 | inc %l2 |
4833 | srax %o4, 32, %o4 | | 4833 | srax %o4, 32, %o4 |
4834 | set 2f, %o0 | | 4834 | set 2f, %o0 |
4835 | call _C_LABEL(db_printf) | | 4835 | call _C_LABEL(db_printf) |
4836 | inc 8, %l1 | | 4836 | inc 8, %l1 |
4837 | | | 4837 | |
4838 | ldxa [%l1] ASI_IMMU_TLB_TAG, %o2 | | 4838 | ldxa [%l1] ASI_IMMU_TLB_TAG, %o2 |
4839 | membar #Sync | | 4839 | membar #Sync |
4840 | srl %o2, 0, %o3 | | 4840 | srl %o2, 0, %o3 |
4841 | mov %l2, %o1 | | 4841 | mov %l2, %o1 |
4842 | srax %o2, 32, %o2 | | 4842 | srax %o2, 32, %o2 |
4843 | ldxa [%l1] ASI_IMMU_TLB_DATA, %o4 | | 4843 | ldxa [%l1] ASI_IMMU_TLB_DATA, %o4 |
4844 | membar #Sync | | 4844 | membar #Sync |
4845 | srl %o4, 0, %o5 | | 4845 | srl %o4, 0, %o5 |
4846 | inc %l2 | | 4846 | inc %l2 |
4847 | srax %o4, 32, %o4 | | 4847 | srax %o4, 32, %o4 |
4848 | set 3f, %o0 | | 4848 | set 3f, %o0 |
4849 | call _C_LABEL(db_printf) | | 4849 | call _C_LABEL(db_printf) |
4850 | inc 8, %l1 | | 4850 | inc 8, %l1 |
4851 | | | 4851 | |
4852 | cmp %l1, %l3 | | 4852 | cmp %l1, %l3 |
4853 | bl 1b | | 4853 | bl 1b |
4854 | inc 8, %l0 | | 4854 | inc 8, %l0 |
4855 | | | 4855 | |
4856 | ret | | 4856 | ret |
4857 | restore | | 4857 | restore |
4858 | | | 4858 | |
4859 | .data | | 4859 | .data |
4860 | 2: | | 4860 | 2: |
4861 | .asciz "%2d:%08x:%08x %08x:%08x " | | 4861 | .asciz "%2d:%08x:%08x %08x:%08x " |
4862 | 3: | | 4862 | 3: |
4863 | .asciz "%2d:%08x:%08x %08x:%08x\r\n" | | 4863 | .asciz "%2d:%08x:%08x %08x:%08x\r\n" |
4864 | .text | | 4864 | .text |
4865 | #endif | | 4865 | #endif |
4866 | #endif | | 4866 | #endif |
4867 | | | 4867 | |
4868 | /* | | 4868 | /* |
4869 | * Kernel entry point. | | 4869 | * Kernel entry point. |
4870 | * | | 4870 | * |
4871 | * The contract between bootloader and kernel is: | | 4871 | * The contract between bootloader and kernel is: |
4872 | * | | 4872 | * |
4873 | * %o0 OpenFirmware entry point, to keep Sun's updaters happy | | 4873 | * %o0 OpenFirmware entry point, to keep Sun's updaters happy |
4874 | * %o1 Address of boot information vector (see bootinfo.h) | | 4874 | * %o1 Address of boot information vector (see bootinfo.h) |
4875 | * %o2 Length of the vector, in bytes | | 4875 | * %o2 Length of the vector, in bytes |
4876 | * %o3 OpenFirmware entry point, to mimic Sun bootloader behavior | | 4876 | * %o3 OpenFirmware entry point, to mimic Sun bootloader behavior |
4877 | * %o4 OpenFirmware, to meet earlier NetBSD kernels expectations | | 4877 | * %o4 OpenFirmware, to meet earlier NetBSD kernels expectations |
4878 | */ | | 4878 | */ |
4879 | .align 8 | | 4879 | .align 8 |
4880 | start: | | 4880 | start: |
4881 | dostart: | | 4881 | dostart: |
4882 | wrpr %g0, 0, %tick ! XXXXXXX clear %tick register for now | | 4882 | wrpr %g0, 0, %tick ! XXXXXXX clear %tick register for now |
4883 | mov 1, %g1 | | 4883 | mov 1, %g1 |
4884 | sllx %g1, 63, %g1 | | 4884 | sllx %g1, 63, %g1 |
4885 | wr %g1, TICK_CMPR ! XXXXXXX clear and disable %tick_cmpr as well | | 4885 | wr %g1, TICK_CMPR ! XXXXXXX clear and disable %tick_cmpr as well |
4886 | /* | | 4886 | /* |
4887 | * Startup. | | 4887 | * Startup. |
4888 | * | | 4888 | * |
4889 | * The Sun FCODE bootloader is nice and loads us where we want | | 4889 | * The Sun FCODE bootloader is nice and loads us where we want |
4890 | * to be. We have a full set of mappings already set up for us. | | 4890 | * to be. We have a full set of mappings already set up for us. |
4891 | * | | 4891 | * |
4892 | * I think we end up having an entire 16M allocated to us. | | 4892 | * I think we end up having an entire 16M allocated to us. |
4893 | * | | 4893 | * |
4894 | * We enter with the prom entry vector in %o0, dvec in %o1, | | 4894 | * We enter with the prom entry vector in %o0, dvec in %o1, |
4895 | * and the bootops vector in %o2. | | 4895 | * and the bootops vector in %o2. |
4896 | * | | 4896 | * |
4897 | * All we need to do is: | | 4897 | * All we need to do is: |
4898 | * | | 4898 | * |
4899 | * 1: Save the prom vector | | 4899 | * 1: Save the prom vector |
4900 | * | | 4900 | * |
4901 | * 2: Create a decent stack for ourselves | | 4901 | * 2: Create a decent stack for ourselves |
4902 | * | | 4902 | * |
4903 | * 3: Install the permanent 4MB kernel mapping | | 4903 | * 3: Install the permanent 4MB kernel mapping |
4904 | * | | 4904 | * |
4905 | * 4: Call the C language initialization code | | 4905 | * 4: Call the C language initialization code |
4906 | * | | 4906 | * |
4907 | */ | | 4907 | */ |
4908 | | | 4908 | |
4909 | /* | | 4909 | /* |
4910 | * Set the psr into a known state: | | 4910 | * Set the psr into a known state: |
4911 | * Set supervisor mode, interrupt level >= 13, traps enabled | | 4911 | * Set supervisor mode, interrupt level >= 13, traps enabled |
4912 | */ | | 4912 | */ |
4913 | wrpr %g0, 13, %pil | | 4913 | wrpr %g0, 13, %pil |
4914 | wrpr %g0, PSTATE_INTR|PSTATE_PEF, %pstate | | 4914 | wrpr %g0, PSTATE_INTR|PSTATE_PEF, %pstate |
4915 | wr %g0, FPRS_FEF, %fprs ! Turn on FPU | | 4915 | wr %g0, FPRS_FEF, %fprs ! Turn on FPU |
4916 | | | 4916 | |
4917 | /* | | 4917 | /* |
4918 | * Step 2: Set up a v8-like stack if we need to | | 4918 | * Step 2: Set up a v8-like stack if we need to |
4919 | */ | | 4919 | */ |
4920 | | | 4920 | |
4921 | #ifdef _LP64 | | 4921 | #ifdef _LP64 |
4922 | btst 1, %sp | | 4922 | btst 1, %sp |
4923 | bnz,pt %icc, 0f | | 4923 | bnz,pt %icc, 0f |
4924 | nop | | 4924 | nop |
4925 | add %sp, -BIAS, %sp | | 4925 | add %sp, -BIAS, %sp |
4926 | #else | | 4926 | #else |
4927 | btst 1, %sp | | 4927 | btst 1, %sp |
4928 | bz,pt %icc, 0f | | 4928 | bz,pt %icc, 0f |
4929 | nop | | 4929 | nop |
4930 | add %sp, BIAS, %sp | | 4930 | add %sp, BIAS, %sp |
4931 | #endif | | 4931 | #endif |
4932 | 0: | | 4932 | 0: |
4933 | | | 4933 | |
4934 | call _C_LABEL(bootstrap) | | 4934 | call _C_LABEL(bootstrap) |
4935 | clr %g4 ! Clear data segment pointer | | 4935 | clr %g4 ! Clear data segment pointer |
4936 | | | 4936 | |
4937 | /* | | 4937 | /* |
4938 | * Initialize the boot CPU. Basically: | | 4938 | * Initialize the boot CPU. Basically: |
4939 | * | | 4939 | * |
4940 | * Locate the cpu_info structure for this CPU. | | 4940 | * Locate the cpu_info structure for this CPU. |
4941 | * Establish a locked mapping for interrupt stack. | | 4941 | * Establish a locked mapping for interrupt stack. |
4942 | * Switch to the initial stack. | | 4942 | * Switch to the initial stack. |
4943 | * Call the routine passed in in cpu_info->ci_spinup | | 4943 | * Call the routine passed in in cpu_info->ci_spinup |
4944 | */ | | 4944 | */ |
4945 | | | 4945 | |
4946 | #ifdef NO_VCACHE | | 4946 | #ifdef NO_VCACHE |
4947 | #define TTE_DATABITS TTE_L|TTE_CP|TTE_P|TTE_W | | 4947 | #define TTE_DATABITS TTE_L|TTE_CP|TTE_P|TTE_W |
4948 | #else | | 4948 | #else |
4949 | #define TTE_DATABITS TTE_L|TTE_CP|TTE_CV|TTE_P|TTE_W | | 4949 | #define TTE_DATABITS TTE_L|TTE_CP|TTE_CV|TTE_P|TTE_W |
4950 | #endif | | 4950 | #endif |
4951 | | | 4951 | |
4952 | | | 4952 | |
4953 | ENTRY_NOPROFILE(cpu_initialize) /* for cosmetic reasons - nicer backtrace */ | | 4953 | ENTRY_NOPROFILE(cpu_initialize) /* for cosmetic reasons - nicer backtrace */ |
4954 | /* | | 4954 | /* |
4955 | * Step 5: is no more. | | 4955 | * Step 5: is no more. |
4956 | */ | | 4956 | */ |
4957 | | | 4957 | |
4958 | /* | | 4958 | /* |
4959 | * Step 6: hunt through cpus list and find the one that | | 4959 | * Step 6: hunt through cpus list and find the one that |
4960 | * matches our UPAID. | | 4960 | * matches our UPAID. |
4961 | */ | | 4961 | */ |
4962 | sethi %hi(_C_LABEL(cpus)), %l1 | | 4962 | sethi %hi(_C_LABEL(cpus)), %l1 |
4963 | ldxa [%g0] ASI_MID_REG, %l2 | | 4963 | ldxa [%g0] ASI_MID_REG, %l2 |
4964 | LDPTR [%l1 + %lo(_C_LABEL(cpus))], %l1 | | 4964 | LDPTR [%l1 + %lo(_C_LABEL(cpus))], %l1 |
4965 | srax %l2, 17, %l2 ! Isolate UPAID from CPU reg | | 4965 | srax %l2, 17, %l2 ! Isolate UPAID from CPU reg |
4966 | and %l2, 0x1f, %l2 | | 4966 | and %l2, 0x1f, %l2 |
4967 | 0: | | 4967 | 0: |
4968 | ld [%l1 + CI_UPAID], %l3 ! Load UPAID | | 4968 | ld [%l1 + CI_UPAID], %l3 ! Load UPAID |
4969 | cmp %l3, %l2 ! Does it match? | | 4969 | cmp %l3, %l2 ! Does it match? |
4970 | bne,a,pt %icc, 0b ! no | | 4970 | bne,a,pt %icc, 0b ! no |
4971 | LDPTR [%l1 + CI_NEXT], %l1 ! Load next cpu_info pointer | | 4971 | LDPTR [%l1 + CI_NEXT], %l1 ! Load next cpu_info pointer |
4972 | | | 4972 | |
4973 | | | 4973 | |
4974 | /* | | 4974 | /* |
4975 | * Get pointer to our cpu_info struct | | 4975 | * Get pointer to our cpu_info struct |
4976 | */ | | 4976 | */ |
4977 | | | 4977 | |
4978 | ldx [%l1 + CI_PADDR], %l1 ! Load the interrupt stack's PA | | 4978 | ldx [%l1 + CI_PADDR], %l1 ! Load the interrupt stack's PA |
4979 | | | 4979 | |
4980 | sethi %hi(0xa0000000), %l2 ! V=1|SZ=01|NFO=0|IE=0 | | 4980 | sethi %hi(0xa0000000), %l2 ! V=1|SZ=01|NFO=0|IE=0 |
4981 | sllx %l2, 32, %l2 ! Shift it into place | | 4981 | sllx %l2, 32, %l2 ! Shift it into place |
4982 | | | 4982 | |
4983 | mov -1, %l3 ! Create a nice mask | | 4983 | mov -1, %l3 ! Create a nice mask |
4984 | sllx %l3, 41, %l4 ! Mask off high bits | | 4984 | sllx %l3, 41, %l4 ! Mask off high bits |
4985 | or %l4, 0xfff, %l4 ! We can just load this in 12 (of 13) bits | | 4985 | or %l4, 0xfff, %l4 ! We can just load this in 12 (of 13) bits |
4986 | | | 4986 | |
4987 | andn %l1, %l4, %l1 ! Mask the phys page number | | 4987 | andn %l1, %l4, %l1 ! Mask the phys page number |
4988 | | | 4988 | |
4989 | or %l2, %l1, %l1 ! Now take care of the high bits | | 4989 | or %l2, %l1, %l1 ! Now take care of the high bits |
4990 | or %l1, TTE_DATABITS, %l2 ! And low bits: L=1|CP=1|CV=?|E=0|P=1|W=1|G=0 | | 4990 | or %l1, TTE_DATABITS, %l2 ! And low bits: L=1|CP=1|CV=?|E=0|P=1|W=1|G=0 |
4991 | | | 4991 | |
4992 | !! | | 4992 | !! |
4993 | !! Now, map in the interrupt stack as context==0 | | 4993 | !! Now, map in the interrupt stack as context==0 |
4994 | !! | | 4994 | !! |
4995 | set TLB_TAG_ACCESS, %l5 | | 4995 | set TLB_TAG_ACCESS, %l5 |
4996 | set INTSTACK, %l0 | | 4996 | set INTSTACK, %l0 |
4997 | stxa %l0, [%l5] ASI_DMMU ! Make DMMU point to it | | 4997 | stxa %l0, [%l5] ASI_DMMU ! Make DMMU point to it |
4998 | stxa %l2, [%g0] ASI_DMMU_DATA_IN ! Store it | | 4998 | stxa %l2, [%g0] ASI_DMMU_DATA_IN ! Store it |
4999 | membar #Sync | | 4999 | membar #Sync |
5000 | | | 5000 | |
5001 | !! Setup kernel stack (we rely on curlwp on this cpu | | 5001 | !! Setup kernel stack (we rely on curlwp on this cpu |
5002 | !! being lwp0 here and it's uarea is mapped special | | 5002 | !! being lwp0 here and it's uarea is mapped special |
5003 | !! and already accessible here) | | 5003 | !! and already accessible here) |
5004 | flushw | | 5004 | flushw |
5005 | sethi %hi(CPUINFO_VA+CI_CURLWP), %l0 | | 5005 | sethi %hi(CPUINFO_VA+CI_CURLWP), %l0 |
5006 | LDPTR [%l0 + %lo(CPUINFO_VA+CI_CURLWP)], %l0 | | 5006 | LDPTR [%l0 + %lo(CPUINFO_VA+CI_CURLWP)], %l0 |
5007 | set USPACE - TF_SIZE - CC64FSZ, %l1 | | 5007 | set USPACE - TF_SIZE - CC64FSZ, %l1 |
5008 | LDPTR [%l0 + L_ADDR], %l0 | | 5008 | LDPTR [%l0 + L_ADDR], %l0 |
5009 | add %l1, %l0, %l0 | | 5009 | add %l1, %l0, %l0 |
5010 | #ifdef _LP64 | | 5010 | #ifdef _LP64 |
5011 | andn %l0, 0x0f, %l0 ! Needs to be 16-byte aligned | | 5011 | andn %l0, 0x0f, %l0 ! Needs to be 16-byte aligned |
5012 | sub %l0, BIAS, %l0 ! and biased | | 5012 | sub %l0, BIAS, %l0 ! and biased |
5013 | #endif | | 5013 | #endif |
5014 | mov %l0, %sp | | 5014 | mov %l0, %sp |
5015 | flushw | | 5015 | flushw |
5016 | | | 5016 | |
5017 | #ifdef DEBUG | | 5017 | #ifdef DEBUG |
5018 | set _C_LABEL(pmapdebug), %o1 | | 5018 | set _C_LABEL(pmapdebug), %o1 |
5019 | ld [%o1], %o1 | | 5019 | ld [%o1], %o1 |
5020 | sethi %hi(0x40000), %o2 | | 5020 | sethi %hi(0x40000), %o2 |
5021 | btst %o2, %o1 | | 5021 | btst %o2, %o1 |
5022 | bz 0f | | 5022 | bz 0f |
5023 | | | 5023 | |
5024 | set 1f, %o0 ! Debug printf | | 5024 | set 1f, %o0 ! Debug printf |
5025 | call _C_LABEL(prom_printf) | | 5025 | call _C_LABEL(prom_printf) |
5026 | .data | | 5026 | .data |
5027 | 1: | | 5027 | 1: |
5028 | .asciz "Setting trap base...\r\n" | | 5028 | .asciz "Setting trap base...\r\n" |
5029 | _ALIGN | | 5029 | _ALIGN |
5030 | .text | | 5030 | .text |
5031 | 0: | | 5031 | 0: |
5032 | #endif | | 5032 | #endif |
5033 | /* | | 5033 | /* |
5034 | * Step 7: change the trap base register, and install our TSB pointers | | 5034 | * Step 7: change the trap base register, and install our TSB pointers |
5035 | */ | | 5035 | */ |
5036 | | | 5036 | |
5037 | /* | | 5037 | /* |
5038 | * install our TSB pointers | | 5038 | * install our TSB pointers |
5039 | */ | | 5039 | */ |
5040 | sethi %hi(CPUINFO_VA+CI_TSB_DMMU), %l0 | | 5040 | sethi %hi(CPUINFO_VA+CI_TSB_DMMU), %l0 |
5041 | sethi %hi(CPUINFO_VA+CI_TSB_IMMU), %l1 | | 5041 | sethi %hi(CPUINFO_VA+CI_TSB_IMMU), %l1 |
5042 | sethi %hi(_C_LABEL(tsbsize)), %l2 | | 5042 | sethi %hi(_C_LABEL(tsbsize)), %l2 |
5043 | sethi %hi(0x1fff), %l3 | | 5043 | sethi %hi(0x1fff), %l3 |
5044 | sethi %hi(TSB), %l4 | | 5044 | sethi %hi(TSB), %l4 |
5045 | LDPTR [%l0 + %lo(CPUINFO_VA+CI_TSB_DMMU)], %l0 | | 5045 | LDPTR [%l0 + %lo(CPUINFO_VA+CI_TSB_DMMU)], %l0 |
5046 | LDPTR [%l1 + %lo(CPUINFO_VA+CI_TSB_IMMU)], %l1 | | 5046 | LDPTR [%l1 + %lo(CPUINFO_VA+CI_TSB_IMMU)], %l1 |
5047 | ld [%l2 + %lo(_C_LABEL(tsbsize))], %l2 | | 5047 | ld [%l2 + %lo(_C_LABEL(tsbsize))], %l2 |
5048 | or %l3, %lo(0x1fff), %l3 | | 5048 | or %l3, %lo(0x1fff), %l3 |
5049 | or %l4, %lo(TSB), %l4 | | 5049 | or %l4, %lo(TSB), %l4 |
5050 | | | 5050 | |
5051 | andn %l0, %l3, %l0 ! Mask off size and split bits | | 5051 | andn %l0, %l3, %l0 ! Mask off size and split bits |
5052 | or %l0, %l2, %l0 ! Make a TSB pointer | | 5052 | or %l0, %l2, %l0 ! Make a TSB pointer |
5053 | stxa %l0, [%l4] ASI_DMMU ! Install data TSB pointer | | 5053 | stxa %l0, [%l4] ASI_DMMU ! Install data TSB pointer |
5054 | | | 5054 | |
5055 | andn %l1, %l3, %l1 ! Mask off size and split bits | | 5055 | andn %l1, %l3, %l1 ! Mask off size and split bits |
5056 | or %l1, %l2, %l1 ! Make a TSB pointer | | 5056 | or %l1, %l2, %l1 ! Make a TSB pointer |
5057 | stxa %l1, [%l4] ASI_IMMU ! Install instruction TSB pointer | | 5057 | stxa %l1, [%l4] ASI_IMMU ! Install instruction TSB pointer |
5058 | membar #Sync | | 5058 | membar #Sync |
5059 | set 1f, %l1 | | 5059 | set 1f, %l1 |
5060 | flush %l1 | | 5060 | flush %l1 |
5061 | 1: | | 5061 | 1: |
5062 | | | 5062 | |
5063 | /* set trap table */ | | 5063 | /* set trap table */ |
5064 | set _C_LABEL(trapbase), %l1 | | 5064 | set _C_LABEL(trapbase), %l1 |
5065 | call _C_LABEL(prom_set_trap_table) ! Now we should be running 100% from our handlers | | 5065 | call _C_LABEL(prom_set_trap_table) ! Now we should be running 100% from our handlers |
5066 | mov %l1, %o0 | | 5066 | mov %l1, %o0 |
5067 | wrpr %l1, 0, %tba ! Make sure the PROM didn't foul up. | | 5067 | wrpr %l1, 0, %tba ! Make sure the PROM didn't foul up. |
5068 | | | 5068 | |
5069 | /* | | 5069 | /* |
5070 | * Switch to the kernel mode and run away. | | 5070 | * Switch to the kernel mode and run away. |
5071 | */ | | 5071 | */ |
5072 | wrpr %g0, WSTATE_KERN, %wstate | | 5072 | wrpr %g0, WSTATE_KERN, %wstate |
5073 | | | 5073 | |
5074 | #ifdef DEBUG | | 5074 | #ifdef DEBUG |
5075 | wrpr %g0, 1, %tl ! Debug -- start at tl==3 so we'll watchdog | | 5075 | wrpr %g0, 1, %tl ! Debug -- start at tl==3 so we'll watchdog |
5076 | wrpr %g0, 0x1ff, %tt ! Debug -- clear out unused trap regs | | 5076 | wrpr %g0, 0x1ff, %tt ! Debug -- clear out unused trap regs |
5077 | wrpr %g0, 0, %tpc | | 5077 | wrpr %g0, 0, %tpc |
5078 | wrpr %g0, 0, %tnpc | | 5078 | wrpr %g0, 0, %tnpc |
5079 | wrpr %g0, 0, %tstate | | 5079 | wrpr %g0, 0, %tstate |
5080 | wrpr %g0, 0, %tl | | 5080 | wrpr %g0, 0, %tl |
5081 | #endif | | 5081 | #endif |
5082 | | | 5082 | |
5083 | #ifdef DEBUG | | 5083 | #ifdef DEBUG |
5084 | set _C_LABEL(pmapdebug), %o1 | | 5084 | set _C_LABEL(pmapdebug), %o1 |
5085 | ld [%o1], %o1 | | 5085 | ld [%o1], %o1 |
5086 | sethi %hi(0x40000), %o2 | | 5086 | sethi %hi(0x40000), %o2 |
5087 | btst %o2, %o1 | | 5087 | btst %o2, %o1 |
5088 | bz 0f | | 5088 | bz 0f |
5089 | | | 5089 | |
5090 | set 1f, %o0 ! Debug printf | | 5090 | set 1f, %o0 ! Debug printf |
5091 | call _C_LABEL(prom_printf) | | 5091 | call _C_LABEL(prom_printf) |
5092 | .data | | 5092 | .data |
5093 | 1: | | 5093 | 1: |
5094 | .asciz "Calling startup routine...\r\n" | | 5094 | .asciz "Calling startup routine...\r\n" |
5095 | _ALIGN | | 5095 | _ALIGN |
5096 | .text | | 5096 | .text |
5097 | 0: | | 5097 | 0: |
5098 | #endif | | 5098 | #endif |
5099 | /* | | 5099 | /* |
5100 | * Call our startup routine. | | 5100 | * Call our startup routine. |
5101 | */ | | 5101 | */ |
5102 | | | 5102 | |
5103 | sethi %hi(CPUINFO_VA+CI_SPINUP), %l0 | | 5103 | sethi %hi(CPUINFO_VA+CI_SPINUP), %l0 |
5104 | LDPTR [%l0 + %lo(CPUINFO_VA+CI_SPINUP)], %o1 | | 5104 | LDPTR [%l0 + %lo(CPUINFO_VA+CI_SPINUP)], %o1 |
5105 | | | 5105 | |
5106 | call %o1 ! Call routine | | 5106 | call %o1 ! Call routine |
5107 | clr %o0 ! our frame arg is ignored | | 5107 | clr %o0 ! our frame arg is ignored |
5108 | | | 5108 | |
5109 | set 1f, %o0 ! Main should never come back here | | 5109 | set 1f, %o0 ! Main should never come back here |
5110 | call _C_LABEL(panic) | | 5110 | call _C_LABEL(panic) |
5111 | nop | | 5111 | nop |
5112 | .data | | 5112 | .data |
5113 | 1: | | 5113 | 1: |
5114 | .asciz "main() returned\r\n" | | 5114 | .asciz "main() returned\r\n" |
5115 | _ALIGN | | 5115 | _ALIGN |
5116 | .text | | 5116 | .text |
5117 | | | 5117 | |
5118 | #if defined(MULTIPROCESSOR) | | 5118 | #if defined(MULTIPROCESSOR) |
5119 | /* | | 5119 | /* |
5120 | * cpu_mp_startup is called with: | | 5120 | * cpu_mp_startup is called with: |
5121 | * | | 5121 | * |
5122 | * %g2 = cpu_args | | 5122 | * %g2 = cpu_args |
5123 | */ | | 5123 | */ |
5124 | ENTRY(cpu_mp_startup) | | 5124 | ENTRY(cpu_mp_startup) |
5125 | mov 1, %o0 | | 5125 | mov 1, %o0 |
5126 | sllx %o0, 63, %o0 | | 5126 | sllx %o0, 63, %o0 |
5127 | wr %o0, TICK_CMPR ! XXXXXXX clear and disable %tick_cmpr for now | | 5127 | wr %o0, TICK_CMPR ! XXXXXXX clear and disable %tick_cmpr for now |
5128 | wrpr %g0, 0, %tick ! XXXXXXX clear %tick register as well | | 5128 | wrpr %g0, 0, %tick ! XXXXXXX clear %tick register as well |
5129 | wrpr %g0, 0, %cleanwin | | 5129 | wrpr %g0, 0, %cleanwin |
5130 | wrpr %g0, 0, %tl ! Make sure we're not in NUCLEUS mode | | 5130 | wrpr %g0, 0, %tl ! Make sure we're not in NUCLEUS mode |
5131 | wrpr %g0, WSTATE_KERN, %wstate | | 5131 | wrpr %g0, WSTATE_KERN, %wstate |
5132 | wrpr %g0, PSTATE_KERN, %pstate | | 5132 | wrpr %g0, PSTATE_KERN, %pstate |
5133 | flushw | | 5133 | flushw |
5134 | | | 5134 | |
5135 | /* | | 5135 | /* |
5136 | * Get pointer to our cpu_info struct | | 5136 | * Get pointer to our cpu_info struct |
5137 | */ | | 5137 | */ |
5138 | ldx [%g2 + CBA_CPUINFO], %l1 ! Load the interrupt stack's PA | | 5138 | ldx [%g2 + CBA_CPUINFO], %l1 ! Load the interrupt stack's PA |
5139 | sethi %hi(0xa0000000), %l2 ! V=1|SZ=01|NFO=0|IE=0 | | 5139 | sethi %hi(0xa0000000), %l2 ! V=1|SZ=01|NFO=0|IE=0 |
5140 | sllx %l2, 32, %l2 ! Shift it into place | | 5140 | sllx %l2, 32, %l2 ! Shift it into place |
5141 | mov -1, %l3 ! Create a nice mask | | 5141 | mov -1, %l3 ! Create a nice mask |
5142 | sllx %l3, 41, %l4 ! Mask off high bits | | 5142 | sllx %l3, 41, %l4 ! Mask off high bits |
5143 | or %l4, 0xfff, %l4 ! We can just load this in 12 (of 13) bits | | 5143 | or %l4, 0xfff, %l4 ! We can just load this in 12 (of 13) bits |
5144 | andn %l1, %l4, %l1 ! Mask the phys page number | | 5144 | andn %l1, %l4, %l1 ! Mask the phys page number |
5145 | or %l2, %l1, %l1 ! Now take care of the high bits | | 5145 | or %l2, %l1, %l1 ! Now take care of the high bits |
5146 | or %l1, TTE_DATABITS, %l2 ! And low bits: L=1|CP=1|CV=?|E=0|P=1|W=1|G=0 | | 5146 | or %l1, TTE_DATABITS, %l2 ! And low bits: L=1|CP=1|CV=?|E=0|P=1|W=1|G=0 |
5147 | | | 5147 | |
5148 | /* | | 5148 | /* |
5149 | * Now, map in the interrupt stack & cpu_info as context==0 | | 5149 | * Now, map in the interrupt stack & cpu_info as context==0 |
5150 | */ | | 5150 | */ |
5151 | set TLB_TAG_ACCESS, %l5 | | 5151 | set TLB_TAG_ACCESS, %l5 |
5152 | set INTSTACK, %l0 | | 5152 | set INTSTACK, %l0 |
5153 | stxa %l0, [%l5] ASI_DMMU ! Make DMMU point to it | | 5153 | stxa %l0, [%l5] ASI_DMMU ! Make DMMU point to it |
5154 | stxa %l2, [%g0] ASI_DMMU_DATA_IN ! Store it | | 5154 | stxa %l2, [%g0] ASI_DMMU_DATA_IN ! Store it |
5155 | | | 5155 | |
5156 | /* | | 5156 | /* |
5157 | * Set 0 as primary context XXX | | 5157 | * Set 0 as primary context XXX |
5158 | */ | | 5158 | */ |
5159 | mov CTX_PRIMARY, %o0 | | 5159 | mov CTX_PRIMARY, %o0 |
5160 | stxa %g0, [%o0] ASI_DMMU | | 5160 | stxa %g0, [%o0] ASI_DMMU |
5161 | membar #Sync | | 5161 | membar #Sync |
5162 | | | 5162 | |
5163 | /* | | 5163 | /* |
5164 | * Temporarily use the interrupt stack | | 5164 | * Temporarily use the interrupt stack |
5165 | */ | | 5165 | */ |
5166 | #ifdef _LP64 | | 5166 | #ifdef _LP64 |
5167 | set ((EINTSTACK - CC64FSZ - TF_SIZE)) & ~0x0f - BIAS, %sp | | 5167 | set ((EINTSTACK - CC64FSZ - TF_SIZE)) & ~0x0f - BIAS, %sp |
5168 | #else | | 5168 | #else |
5169 | set EINTSTACK - CC64FSZ - TF_SIZE, %sp | | 5169 | set EINTSTACK - CC64FSZ - TF_SIZE, %sp |
5170 | #endif | | 5170 | #endif |
5171 | set 1, %fp | | 5171 | set 1, %fp |
5172 | clr %i7 | | 5172 | clr %i7 |
5173 | | | 5173 | |
5174 | /* | | 5174 | /* |
5175 | * install our TSB pointers | | 5175 | * install our TSB pointers |
5176 | */ | | 5176 | */ |
5177 | sethi %hi(CPUINFO_VA+CI_TSB_DMMU), %l0 | | 5177 | sethi %hi(CPUINFO_VA+CI_TSB_DMMU), %l0 |
5178 | sethi %hi(CPUINFO_VA+CI_TSB_IMMU), %l1 | | 5178 | sethi %hi(CPUINFO_VA+CI_TSB_IMMU), %l1 |
5179 | sethi %hi(_C_LABEL(tsbsize)), %l2 | | 5179 | sethi %hi(_C_LABEL(tsbsize)), %l2 |
5180 | sethi %hi(0x1fff), %l3 | | 5180 | sethi %hi(0x1fff), %l3 |
5181 | sethi %hi(TSB), %l4 | | 5181 | sethi %hi(TSB), %l4 |
5182 | LDPTR [%l0 + %lo(CPUINFO_VA+CI_TSB_DMMU)], %l0 | | 5182 | LDPTR [%l0 + %lo(CPUINFO_VA+CI_TSB_DMMU)], %l0 |
5183 | LDPTR [%l1 + %lo(CPUINFO_VA+CI_TSB_IMMU)], %l1 | | 5183 | LDPTR [%l1 + %lo(CPUINFO_VA+CI_TSB_IMMU)], %l1 |
5184 | ld [%l2 + %lo(_C_LABEL(tsbsize))], %l2 | | 5184 | ld [%l2 + %lo(_C_LABEL(tsbsize))], %l2 |
5185 | or %l3, %lo(0x1fff), %l3 | | 5185 | or %l3, %lo(0x1fff), %l3 |
5186 | or %l4, %lo(TSB), %l4 | | 5186 | or %l4, %lo(TSB), %l4 |
5187 | | | 5187 | |
5188 | andn %l0, %l3, %l0 ! Mask off size and split bits | | 5188 | andn %l0, %l3, %l0 ! Mask off size and split bits |
5189 | or %l0, %l2, %l0 ! Make a TSB pointer | | 5189 | or %l0, %l2, %l0 ! Make a TSB pointer |
5190 | stxa %l0, [%l4] ASI_DMMU ! Install data TSB pointer | | 5190 | stxa %l0, [%l4] ASI_DMMU ! Install data TSB pointer |
5191 | membar #Sync | | 5191 | membar #Sync |
5192 | | | 5192 | |
5193 | andn %l1, %l3, %l1 ! Mask off size and split bits | | 5193 | andn %l1, %l3, %l1 ! Mask off size and split bits |
5194 | or %l1, %l2, %l1 ! Make a TSB pointer | | 5194 | or %l1, %l2, %l1 ! Make a TSB pointer |
5195 | stxa %l1, [%l4] ASI_IMMU ! Install instruction TSB pointer | | 5195 | stxa %l1, [%l4] ASI_IMMU ! Install instruction TSB pointer |
5196 | membar #Sync | | 5196 | membar #Sync |
5197 | set 1f, %o0 | | 5197 | set 1f, %o0 |
5198 | flush %o0 | | 5198 | flush %o0 |
5199 | 1: | | 5199 | 1: |
5200 | | | 5200 | |
5201 | /* set trap table */ | | 5201 | /* set trap table */ |
5202 | set _C_LABEL(trapbase), %l1 | | 5202 | set _C_LABEL(trapbase), %l1 |
5203 | call _C_LABEL(prom_set_trap_table) | | 5203 | call _C_LABEL(prom_set_trap_table) |
5204 | mov %l1, %o0 | | 5204 | mov %l1, %o0 |
5205 | wrpr %l1, 0, %tba ! Make sure the PROM didn't | | 5205 | wrpr %l1, 0, %tba ! Make sure the PROM didn't |
5206 | ! foul up. | | 5206 | ! foul up. |
5207 | /* | | 5207 | /* |
5208 | * Use this CPUs idlelewp's uarea stack | | 5208 | * Use this CPUs idlelewp's uarea stack |
5209 | */ | | 5209 | */ |
5210 | sethi %hi(CPUINFO_VA+CI_IDLELWP), %l0 | | 5210 | sethi %hi(CPUINFO_VA+CI_IDLELWP), %l0 |
5211 | LDPTR [%l0 + %lo(CPUINFO_VA+CI_IDLELWP)], %l0 | | 5211 | LDPTR [%l0 + %lo(CPUINFO_VA+CI_IDLELWP)], %l0 |
5212 | set USPACE - TF_SIZE - CC64FSZ, %l1 | | 5212 | set USPACE - TF_SIZE - CC64FSZ, %l1 |
5213 | LDPTR [%l0 + L_ADDR], %l0 | | 5213 | LDPTR [%l0 + L_ADDR], %l0 |
5214 | add %l0, %l1, %l0 | | 5214 | add %l0, %l1, %l0 |
5215 | #ifdef _LP64 | | 5215 | #ifdef _LP64 |
5216 | andn %l0, 0x0f, %l0 ! Needs to be 16-byte aligned | | 5216 | andn %l0, 0x0f, %l0 ! Needs to be 16-byte aligned |
5217 | sub %l0, BIAS, %l0 ! and biased | | 5217 | sub %l0, BIAS, %l0 ! and biased |
5218 | #endif | | 5218 | #endif |
5219 | mov %l0, %sp | | 5219 | mov %l0, %sp |
5220 | flushw | | 5220 | flushw |
5221 | | | 5221 | |
5222 | /* | | 5222 | /* |
5223 | * Switch to the kernel mode and run away. | | 5223 | * Switch to the kernel mode and run away. |
5224 | */ | | 5224 | */ |
5225 | wrpr %g0, 13, %pil | | 5225 | wrpr %g0, 13, %pil |
5226 | wrpr %g0, PSTATE_INTR|PSTATE_PEF, %pstate | | 5226 | wrpr %g0, PSTATE_INTR|PSTATE_PEF, %pstate |
5227 | wr %g0, FPRS_FEF, %fprs ! Turn on FPU | | 5227 | wr %g0, FPRS_FEF, %fprs ! Turn on FPU |
5228 | | | 5228 | |
5229 | call _C_LABEL(cpu_hatch) | | 5229 | call _C_LABEL(cpu_hatch) |
5230 | clr %g4 | | 5230 | clr %g4 |
5231 | | | 5231 | |
5232 | b _C_LABEL(idle_loop) | | 5232 | b _C_LABEL(idle_loop) |
5233 | clr %o0 | | 5233 | clr %o0 |
5234 | | | 5234 | |
5235 | NOTREACHED | | 5235 | NOTREACHED |
5236 | | | 5236 | |
5237 | .globl cpu_mp_startup_end | | 5237 | .globl cpu_mp_startup_end |
5238 | cpu_mp_startup_end: | | 5238 | cpu_mp_startup_end: |
5239 | #endif /* MULTIPROCESSOR */ | | 5239 | #endif /* MULTIPROCESSOR */ |
5240 | | | 5240 | |
5241 | .align 8 | | 5241 | .align 8 |
5242 | ENTRY(get_romtba) | | 5242 | ENTRY(get_romtba) |
5243 | retl | | 5243 | retl |
5244 | rdpr %tba, %o0 | | 5244 | rdpr %tba, %o0 |
5245 | /* | | 5245 | /* |
5246 | * int get_maxctx(void) | | 5246 | * int get_maxctx(void) |
5247 | * | | 5247 | * |
5248 | * Get number of available contexts. | | 5248 | * Get number of available contexts. |
5249 | * | | 5249 | * |
5250 | */ | | 5250 | */ |
5251 | .align 8 | | 5251 | .align 8 |
5252 | ENTRY(get_maxctx) | | 5252 | ENTRY(get_maxctx) |
5253 | set CTX_SECONDARY, %o1 ! Store -1 in the context register | | 5253 | set CTX_SECONDARY, %o1 ! Store -1 in the context register |
5254 | mov -1, %o2 | | 5254 | mov -1, %o2 |
5255 | stxa %o2, [%o1] ASI_DMMU | | 5255 | stxa %o2, [%o1] ASI_DMMU |
5256 | membar #Sync | | 5256 | membar #Sync |
5257 | ldxa [%o1] ASI_DMMU, %o0 ! then read it back | | 5257 | ldxa [%o1] ASI_DMMU, %o0 ! then read it back |
5258 | membar #Sync | | 5258 | membar #Sync |
5259 | stxa %g0, [%o1] ASI_DMMU | | 5259 | stxa %g0, [%o1] ASI_DMMU |
5260 | membar #Sync | | 5260 | membar #Sync |
5261 | retl | | 5261 | retl |
5262 | inc %o0 | | 5262 | inc %o0 |
5263 | | | 5263 | |
5264 | /* | | 5264 | /* |
5265 | * openfirmware(cell* param); | | 5265 | * openfirmware(cell* param); |
5266 | * | | 5266 | * |
5267 | * OpenFirmware entry point | | 5267 | * OpenFirmware entry point |
5268 | * | | 5268 | * |
5269 | * If we're running in 32-bit mode we need to convert to a 64-bit stack | | 5269 | * If we're running in 32-bit mode we need to convert to a 64-bit stack |
5270 | * and 64-bit cells. The cells we'll allocate off the stack for simplicity. | | 5270 | * and 64-bit cells. The cells we'll allocate off the stack for simplicity. |
5271 | */ | | 5271 | */ |
5272 | .align 8 | | 5272 | .align 8 |
5273 | ENTRY(openfirmware) | | 5273 | ENTRY(openfirmware) |
5274 | sethi %hi(romp), %o4 | | 5274 | sethi %hi(romp), %o4 |
5275 | andcc %sp, 1, %g0 | | 5275 | andcc %sp, 1, %g0 |
5276 | bz,pt %icc, 1f | | 5276 | bz,pt %icc, 1f |
5277 | LDPTR [%o4+%lo(romp)], %o4 ! v9 stack, just load the addr and callit | | 5277 | LDPTR [%o4+%lo(romp)], %o4 ! v9 stack, just load the addr and callit |
5278 | save %sp, -CC64FSZ, %sp | | 5278 | save %sp, -CC64FSZ, %sp |
5279 | rdpr %pil, %i2 | | 5279 | rdpr %pil, %i2 |
5280 | mov PIL_HIGH, %i3 | | 5280 | mov PIL_HIGH, %i3 |
5281 | cmp %i3, %i2 | | 5281 | cmp %i3, %i2 |
5282 | movle %icc, %i2, %i3 | | 5282 | movle %icc, %i2, %i3 |
5283 | wrpr %g0, %i3, %pil | | 5283 | wrpr %g0, %i3, %pil |
5284 | mov %i0, %o0 | | 5284 | mov %i0, %o0 |
5285 | mov %g1, %l1 | | 5285 | mov %g1, %l1 |
5286 | mov %g2, %l2 | | 5286 | mov %g2, %l2 |
5287 | mov %g3, %l3 | | 5287 | mov %g3, %l3 |
5288 | mov %g4, %l4 | | 5288 | mov %g4, %l4 |
5289 | mov %g5, %l5 | | 5289 | mov %g5, %l5 |
5290 | mov %g6, %l6 | | 5290 | mov %g6, %l6 |
5291 | mov %g7, %l7 | | 5291 | mov %g7, %l7 |
5292 | rdpr %pstate, %l0 | | 5292 | rdpr %pstate, %l0 |
5293 | jmpl %i4, %o7 | | 5293 | jmpl %i4, %o7 |
5294 | #if !defined(_LP64) | | 5294 | #if !defined(_LP64) |
5295 | wrpr %g0, PSTATE_PROM, %pstate | | 5295 | wrpr %g0, PSTATE_PROM, %pstate |
5296 | #else | | 5296 | #else |
5297 | wrpr %g0, PSTATE_PROM|PSTATE_IE, %pstate | | 5297 | wrpr %g0, PSTATE_PROM|PSTATE_IE, %pstate |
5298 | #endif | | 5298 | #endif |
5299 | wrpr %l0, %g0, %pstate | | 5299 | wrpr %l0, %g0, %pstate |
5300 | mov %l1, %g1 | | 5300 | mov %l1, %g1 |
5301 | mov %l2, %g2 | | 5301 | mov %l2, %g2 |
5302 | mov %l3, %g3 | | 5302 | mov %l3, %g3 |
5303 | mov %l4, %g4 | | 5303 | mov %l4, %g4 |
5304 | mov %l5, %g5 | | 5304 | mov %l5, %g5 |
5305 | mov %l6, %g6 | | 5305 | mov %l6, %g6 |
5306 | mov %l7, %g7 | | 5306 | mov %l7, %g7 |
5307 | wrpr %i2, 0, %pil | | 5307 | wrpr %i2, 0, %pil |
5308 | ret | | 5308 | ret |
5309 | restore %o0, %g0, %o0 | | 5309 | restore %o0, %g0, %o0 |
5310 | | | 5310 | |
5311 | 1: ! v8 -- need to screw with stack & params | | 5311 | 1: ! v8 -- need to screw with stack & params |
5312 | #ifdef NOTDEF_DEBUG | | 5312 | #ifdef NOTDEF_DEBUG |
5313 | mov %o7, %o5 | | 5313 | mov %o7, %o5 |
5314 | call globreg_check | | 5314 | call globreg_check |
5315 | nop | | 5315 | nop |
5316 | mov %o5, %o7 | | 5316 | mov %o5, %o7 |
5317 | #endif | | 5317 | #endif |
5318 | save %sp, -CC64FSZ, %sp ! Get a new 64-bit stack frame | | 5318 | save %sp, -CC64FSZ, %sp ! Get a new 64-bit stack frame |
5319 | add %sp, -BIAS, %sp | | 5319 | add %sp, -BIAS, %sp |
5320 | rdpr %pstate, %l0 | | 5320 | rdpr %pstate, %l0 |
5321 | srl %sp, 0, %sp | | 5321 | srl %sp, 0, %sp |
5322 | rdpr %pil, %i2 ! s = splx(level) | | 5322 | rdpr %pil, %i2 ! s = splx(level) |
5323 | mov %i0, %o0 | | 5323 | mov %i0, %o0 |
5324 | mov PIL_HIGH, %i3 | | 5324 | mov PIL_HIGH, %i3 |
5325 | mov %g1, %l1 | | 5325 | mov %g1, %l1 |
5326 | mov %g2, %l2 | | 5326 | mov %g2, %l2 |
5327 | cmp %i3, %i2 | | 5327 | cmp %i3, %i2 |
5328 | mov %g3, %l3 | | 5328 | mov %g3, %l3 |
5329 | mov %g4, %l4 | | 5329 | mov %g4, %l4 |
5330 | mov %g5, %l5 | | 5330 | mov %g5, %l5 |
5331 | movle %icc, %i2, %i3 | | 5331 | movle %icc, %i2, %i3 |
5332 | mov %g6, %l6 | | 5332 | mov %g6, %l6 |
5333 | mov %g7, %l7 | | 5333 | mov %g7, %l7 |
5334 | wrpr %i3, %g0, %pil | | 5334 | wrpr %i3, %g0, %pil |
5335 | jmpl %i4, %o7 | | 5335 | jmpl %i4, %o7 |
5336 | ! Enable 64-bit addresses for the prom | | 5336 | ! Enable 64-bit addresses for the prom |
5337 | #if defined(_LP64) | | 5337 | #if defined(_LP64) |
5338 | wrpr %g0, PSTATE_PROM, %pstate | | 5338 | wrpr %g0, PSTATE_PROM, %pstate |
5339 | #else | | 5339 | #else |
5340 | wrpr %g0, PSTATE_PROM|PSTATE_IE, %pstate | | 5340 | wrpr %g0, PSTATE_PROM|PSTATE_IE, %pstate |
5341 | #endif | | 5341 | #endif |
5342 | wrpr %l0, 0, %pstate | | 5342 | wrpr %l0, 0, %pstate |
5343 | wrpr %i2, 0, %pil | | 5343 | wrpr %i2, 0, %pil |
5344 | mov %l1, %g1 | | 5344 | mov %l1, %g1 |
5345 | mov %l2, %g2 | | 5345 | mov %l2, %g2 |
5346 | mov %l3, %g3 | | 5346 | mov %l3, %g3 |
5347 | mov %l4, %g4 | | 5347 | mov %l4, %g4 |
5348 | mov %l5, %g5 | | 5348 | mov %l5, %g5 |
5349 | mov %l6, %g6 | | 5349 | mov %l6, %g6 |
5350 | mov %l7, %g7 | | 5350 | mov %l7, %g7 |
5351 | ret | | 5351 | ret |
5352 | restore %o0, %g0, %o0 | | 5352 | restore %o0, %g0, %o0 |
5353 | | | 5353 | |
5354 | /* | | 5354 | /* |
5355 | * void ofw_exit(cell_t args[]) | | 5355 | * void ofw_exit(cell_t args[]) |
5356 | */ | | 5356 | */ |
5357 | ENTRY(openfirmware_exit) | | 5357 | ENTRY(openfirmware_exit) |
5358 | STACKFRAME(-CC64FSZ) | | 5358 | STACKFRAME(-CC64FSZ) |
5359 | flushw ! Flush register windows | | 5359 | flushw ! Flush register windows |
5360 | | | 5360 | |
5361 | wrpr %g0, PIL_HIGH, %pil ! Disable interrupts | | 5361 | wrpr %g0, PIL_HIGH, %pil ! Disable interrupts |
5362 | sethi %hi(romtba), %l5 | | 5362 | sethi %hi(romtba), %l5 |
5363 | LDPTR [%l5 + %lo(romtba)], %l5 | | 5363 | LDPTR [%l5 + %lo(romtba)], %l5 |
5364 | wrpr %l5, 0, %tba ! restore the ofw trap table | | 5364 | wrpr %l5, 0, %tba ! restore the ofw trap table |
5365 | | | 5365 | |
5366 | /* Arrange locked kernel stack as PROM stack */ | | 5366 | /* Arrange locked kernel stack as PROM stack */ |
5367 | set EINTSTACK - CC64FSZ, %l5 | | 5367 | set EINTSTACK - CC64FSZ, %l5 |
5368 | | | 5368 | |
5369 | andn %l5, 0x0f, %l5 ! Needs to be 16-byte aligned | | 5369 | andn %l5, 0x0f, %l5 ! Needs to be 16-byte aligned |
5370 | sub %l5, BIAS, %l5 ! and biased | | 5370 | sub %l5, BIAS, %l5 ! and biased |
5371 | mov %l5, %sp | | 5371 | mov %l5, %sp |
5372 | flushw | | 5372 | flushw |
5373 | | | 5373 | |
5374 | sethi %hi(romp), %l6 | | 5374 | sethi %hi(romp), %l6 |
5375 | LDPTR [%l6 + %lo(romp)], %l6 | | 5375 | LDPTR [%l6 + %lo(romp)], %l6 |
5376 | | | 5376 | |
5377 | mov CTX_PRIMARY, %l3 ! set context 0 | | 5377 | mov CTX_PRIMARY, %l3 ! set context 0 |
5378 | stxa %g0, [%l3] ASI_DMMU | | 5378 | stxa %g0, [%l3] ASI_DMMU |
5379 | membar #Sync | | 5379 | membar #Sync |
5380 | | | 5380 | |
5381 | wrpr %g0, PSTATE_PROM, %pstate ! Disable interrupts | | 5381 | wrpr %g0, PSTATE_PROM, %pstate ! Disable interrupts |
5382 | ! and enable 64-bit addresses | | 5382 | ! and enable 64-bit addresses |
5383 | wrpr %g0, 0, %tl ! force trap level 0 | | 5383 | wrpr %g0, 0, %tl ! force trap level 0 |
5384 | call %l6 | | 5384 | call %l6 |
5385 | mov %i0, %o0 | | 5385 | mov %i0, %o0 |
5386 | NOTREACHED | | 5386 | NOTREACHED |
5387 | | | 5387 | |
5388 | /* | | 5388 | /* |
5389 | * sp_tlb_flush_pte(vaddr_t va, int ctx) | | 5389 | * sp_tlb_flush_pte(vaddr_t va, int ctx) |
5390 | * | | 5390 | * |
5391 | * Flush tte from both IMMU and DMMU. | | 5391 | * Flush tte from both IMMU and DMMU. |
5392 | * | | 5392 | * |
5393 | * This uses %o0-%o5 | | 5393 | * This uses %o0-%o5 |
5394 | */ | | 5394 | */ |
5395 | .align 8 | | 5395 | .align 8 |
5396 | ENTRY(sp_tlb_flush_pte) | | 5396 | ENTRY(sp_tlb_flush_pte) |
5397 | #ifdef DEBUG | | 5397 | #ifdef DEBUG |
5398 | set DATA_START, %o4 ! Forget any recent TLB misses | | 5398 | set DATA_START, %o4 ! Forget any recent TLB misses |
5399 | stx %g0, [%o4] | | 5399 | stx %g0, [%o4] |
5400 | stx %g0, [%o4+16] | | 5400 | stx %g0, [%o4+16] |
5401 | #endif | | 5401 | #endif |
5402 | #ifdef DEBUG | | 5402 | #ifdef DEBUG |
5403 | set pmapdebug, %o3 | | 5403 | set pmapdebug, %o3 |
5404 | lduw [%o3], %o3 | | 5404 | lduw [%o3], %o3 |
5405 | ! movrz %o1, -1, %o3 ! Print on either pmapdebug & PDB_DEMAP or ctx == 0 | | 5405 | ! movrz %o1, -1, %o3 ! Print on either pmapdebug & PDB_DEMAP or ctx == 0 |
5406 | btst 0x0020, %o3 | | 5406 | btst 0x0020, %o3 |
5407 | bz,pt %icc, 2f | | 5407 | bz,pt %icc, 2f |
5408 | nop | | 5408 | nop |
5409 | save %sp, -CC64FSZ, %sp | | 5409 | save %sp, -CC64FSZ, %sp |
5410 | set 1f, %o0 | | 5410 | set 1f, %o0 |
5411 | mov %i1, %o1 | | 5411 | mov %i1, %o1 |
5412 | andn %i0, 0xfff, %o3 | | 5412 | andn %i0, 0xfff, %o3 |
5413 | or %o3, 0x010, %o3 | | 5413 | or %o3, 0x010, %o3 |
5414 | call _C_LABEL(printf) | | 5414 | call _C_LABEL(printf) |
5415 | mov %i0, %o2 | | 5415 | mov %i0, %o2 |
5416 | restore | | 5416 | restore |
5417 | .data | | 5417 | .data |
5418 | 1: | | 5418 | 1: |
5419 | .asciz "sp_tlb_flush_pte: demap ctx=%x va=%08x res=%x\r\n" | | 5419 | .asciz "sp_tlb_flush_pte: demap ctx=%x va=%08x res=%x\r\n" |
5420 | _ALIGN | | 5420 | _ALIGN |
5421 | .text | | 5421 | .text |
5422 | 2: | | 5422 | 2: |
5423 | #endif | | 5423 | #endif |
5424 | #ifdef SPITFIRE | | 5424 | #ifdef SPITFIRE |
5425 | #ifdef MULTIPROCESSOR | | 5425 | #ifdef MULTIPROCESSOR |
5426 | rdpr %pstate, %o3 | | 5426 | rdpr %pstate, %o3 |
5427 | andn %o3, PSTATE_IE, %o4 ! disable interrupts | | 5427 | andn %o3, PSTATE_IE, %o4 ! disable interrupts |
5428 | wrpr %o4, 0, %pstate | | 5428 | wrpr %o4, 0, %pstate |
5429 | #endif | | 5429 | #endif |
5430 | srlx %o0, PG_SHIFT4U, %o0 ! drop unused va bits | | 5430 | srlx %o0, PG_SHIFT4U, %o0 ! drop unused va bits |
5431 | mov CTX_SECONDARY, %o2 | | 5431 | mov CTX_SECONDARY, %o2 |
5432 | sllx %o0, PG_SHIFT4U, %o0 | | 5432 | sllx %o0, PG_SHIFT4U, %o0 |
5433 | ldxa [%o2] ASI_DMMU, %o5 ! Save secondary context | | 5433 | ldxa [%o2] ASI_DMMU, %o5 ! Save secondary context |
5434 | sethi %hi(KERNBASE), %o4 | | 5434 | sethi %hi(KERNBASE), %o4 |
5435 | membar #LoadStore | | 5435 | membar #LoadStore |
5436 | stxa %o1, [%o2] ASI_DMMU ! Insert context to demap | | 5436 | stxa %o1, [%o2] ASI_DMMU ! Insert context to demap |
5437 | membar #Sync | | 5437 | membar #Sync |
5438 | or %o0, DEMAP_PAGE_SECONDARY, %o0 ! Demap page from secondary context only | | 5438 | or %o0, DEMAP_PAGE_SECONDARY, %o0 ! Demap page from secondary context only |
5439 | stxa %o0, [%o0] ASI_DMMU_DEMAP ! Do the demap | | 5439 | stxa %o0, [%o0] ASI_DMMU_DEMAP ! Do the demap |
5440 | stxa %o0, [%o0] ASI_IMMU_DEMAP ! to both TLBs | | 5440 | stxa %o0, [%o0] ASI_IMMU_DEMAP ! to both TLBs |
5441 | #ifdef _LP64 | | 5441 | #ifdef _LP64 |
5442 | srl %o0, 0, %o0 ! and make sure it's both 32- and 64-bit entries | | 5442 | srl %o0, 0, %o0 ! and make sure it's both 32- and 64-bit entries |
5443 | stxa %o0, [%o0] ASI_DMMU_DEMAP ! Do the demap | | 5443 | stxa %o0, [%o0] ASI_DMMU_DEMAP ! Do the demap |
5444 | stxa %o0, [%o0] ASI_IMMU_DEMAP ! Do the demap | | 5444 | stxa %o0, [%o0] ASI_IMMU_DEMAP ! Do the demap |
5445 | #endif | | 5445 | #endif |
5446 | flush %o4 | | 5446 | flush %o4 |
5447 | stxa %o5, [%o2] ASI_DMMU ! Restore secondary context | | 5447 | stxa %o5, [%o2] ASI_DMMU ! Restore secondary context |
5448 | membar #Sync | | 5448 | membar #Sync |
5449 | retl | | 5449 | retl |
5450 | #ifdef MULTIPROCESSOR | | 5450 | #ifdef MULTIPROCESSOR |
5451 | wrpr %o3, %pstate ! restore interrupts | | 5451 | wrpr %o3, %pstate ! restore interrupts |
5452 | #else | | 5452 | #else |
5453 | nop | | 5453 | nop |
5454 | #endif | | 5454 | #endif |
5455 | #else | | 5455 | #else |
5456 | !! | | 5456 | !! |
5457 | !! Cheetahs do not support flushing the IMMU from secondary context | | 5457 | !! Cheetahs do not support flushing the IMMU from secondary context |
5458 | !! | | 5458 | !! |
5459 | rdpr %tl, %o3 | | 5459 | rdpr %tl, %o3 |
5460 | mov CTX_PRIMARY, %o2 | | 5460 | mov CTX_PRIMARY, %o2 |
5461 | brnz,pt %o3, 1f | | 5461 | brnz,pt %o3, 1f |
5462 | andn %o0, 0xfff, %o0 ! drop unused va bits | | 5462 | andn %o0, 0xfff, %o0 ! drop unused va bits |
5463 | wrpr %g0, 1, %tl ! Make sure we're NUCLEUS | | 5463 | wrpr %g0, 1, %tl ! Make sure we're NUCLEUS |
5464 | 1: | | 5464 | 1: |
5465 | ldxa [%o2] ASI_DMMU, %o5 ! Save primary context | | 5465 | ldxa [%o2] ASI_DMMU, %o5 ! Save primary context |
5466 | sethi %hi(KERNBASE), %o4 | | 5466 | sethi %hi(KERNBASE), %o4 |
5467 | membar #LoadStore | | 5467 | membar #LoadStore |
5468 | stxa %o1, [%o2] ASI_DMMU ! Insert context to demap | | 5468 | stxa %o1, [%o2] ASI_DMMU ! Insert context to demap |
5469 | membar #Sync | | 5469 | membar #Sync |
5470 | or %o0, DEMAP_PAGE_PRIMARY, %o0 | | 5470 | or %o0, DEMAP_PAGE_PRIMARY, %o0 |
5471 | stxa %o0, [%o0] ASI_DMMU_DEMAP ! Do the demap | | 5471 | stxa %o0, [%o0] ASI_DMMU_DEMAP ! Do the demap |
5472 | stxa %o0, [%o0] ASI_IMMU_DEMAP ! to both TLBs | | 5472 | stxa %o0, [%o0] ASI_IMMU_DEMAP ! to both TLBs |
5473 | srl %o0, 0, %o0 ! and make sure it's both 32- and 64-bit entries | | 5473 | srl %o0, 0, %o0 ! and make sure it's both 32- and 64-bit entries |
5474 | stxa %o0, [%o0] ASI_DMMU_DEMAP ! Do the demap | | 5474 | stxa %o0, [%o0] ASI_DMMU_DEMAP ! Do the demap |
5475 | stxa %o0, [%o0] ASI_IMMU_DEMAP ! Do the demap | | 5475 | stxa %o0, [%o0] ASI_IMMU_DEMAP ! Do the demap |
5476 | flush %o4 | | 5476 | flush %o4 |
5477 | stxa %o5, [%o2] ASI_DMMU ! Restore primary context | | 5477 | stxa %o5, [%o2] ASI_DMMU ! Restore primary context |
5478 | brz,pt %o3, 1f | | 5478 | brz,pt %o3, 1f |
5479 | flush %o4 | | 5479 | flush %o4 |
5480 | retl | | 5480 | retl |
5481 | nop | | 5481 | nop |
5482 | 1: | | 5482 | 1: |
5483 | retl | | 5483 | retl |
5484 | wrpr %g0, 0, %tl ! Return to kernel mode. | | 5484 | wrpr %g0, 0, %tl ! Return to kernel mode. |
5485 | #endif | | 5485 | #endif |
5486 | | | 5486 | |
5487 | /* | | 5487 | /* |
5488 | * sp_tlb_flush_ctx(int ctx) | | 5488 | * sp_tlb_flush_ctx(int ctx) |
5489 | * | | 5489 | * |
5490 | * Flush entire context from both IMMU and DMMU. | | 5490 | * Flush entire context from both IMMU and DMMU. |
5491 | * | | 5491 | * |
5492 | * This uses %o0-%o5 | | 5492 | * This uses %o0-%o5 |
5493 | */ | | 5493 | */ |
5494 | .align 8 | | 5494 | .align 8 |
5495 | ENTRY(sp_tlb_flush_ctx) | | 5495 | ENTRY(sp_tlb_flush_ctx) |
5496 | #ifdef DEBUG | | 5496 | #ifdef DEBUG |
5497 | set DATA_START, %o4 ! Forget any recent TLB misses | | 5497 | set DATA_START, %o4 ! Forget any recent TLB misses |
5498 | stx %g0, [%o4] | | 5498 | stx %g0, [%o4] |
5499 | #endif | | 5499 | #endif |
5500 | #ifdef NOTDEF_DEBUG | | 5500 | #ifdef NOTDEF_DEBUG |
5501 | save %sp, -CC64FSZ, %sp | | 5501 | save %sp, -CC64FSZ, %sp |
5502 | set 1f, %o0 | | 5502 | set 1f, %o0 |
5503 | call printf | | 5503 | call printf |
5504 | mov %i0, %o1 | | 5504 | mov %i0, %o1 |
5505 | restore | | 5505 | restore |
5506 | .data | | 5506 | .data |
5507 | 1: | | 5507 | 1: |
5508 | .asciz "sp_tlb_flush_ctx: context flush of %d attempted\r\n" | | 5508 | .asciz "sp_tlb_flush_ctx: context flush of %d attempted\r\n" |
5509 | _ALIGN | | 5509 | _ALIGN |
5510 | .text | | 5510 | .text |
5511 | #endif | | 5511 | #endif |
5512 | #ifdef DIAGNOSTIC | | 5512 | #ifdef DIAGNOSTIC |
5513 | brnz,pt %o0, 2f | | 5513 | brnz,pt %o0, 2f |
5514 | nop | | 5514 | nop |
5515 | set 1f, %o0 | | 5515 | set 1f, %o0 |
5516 | call panic | | 5516 | call panic |
5517 | nop | | 5517 | nop |
5518 | .data | | 5518 | .data |
5519 | 1: | | 5519 | 1: |
5520 | .asciz "sp_tlb_flush_ctx: attempted demap of NUCLEUS context\r\n" | | 5520 | .asciz "sp_tlb_flush_ctx: attempted demap of NUCLEUS context\r\n" |
5521 | _ALIGN | | 5521 | _ALIGN |
5522 | .text | | 5522 | .text |
5523 | 2: | | 5523 | 2: |
5524 | #endif | | 5524 | #endif |
5525 | #ifdef SPITFIRE | | 5525 | #ifdef SPITFIRE |
5526 | #ifdef MULTIPROCESSOR | | 5526 | #ifdef MULTIPROCESSOR |
5527 | rdpr %pstate, %o3 | | 5527 | rdpr %pstate, %o3 |
5528 | andn %o3, PSTATE_IE, %o4 ! disable interrupts | | 5528 | andn %o3, PSTATE_IE, %o4 ! disable interrupts |
5529 | wrpr %o4, 0, %pstate | | 5529 | wrpr %o4, 0, %pstate |
5530 | #endif | | 5530 | #endif |
5531 | mov CTX_SECONDARY, %o2 | | 5531 | mov CTX_SECONDARY, %o2 |
5532 | ldxa [%o2] ASI_DMMU, %o1 ! Save secondary context | | 5532 | ldxa [%o2] ASI_DMMU, %o1 ! Save secondary context |
5533 | sethi %hi(KERNBASE), %o4 | | 5533 | sethi %hi(KERNBASE), %o4 |
5534 | membar #LoadStore | | 5534 | membar #LoadStore |
5535 | stxa %o0, [%o2] ASI_DMMU ! Insert context to demap | | 5535 | stxa %o0, [%o2] ASI_DMMU ! Insert context to demap |
5536 | set DEMAP_CTX_SECONDARY, %o5 | | 5536 | set DEMAP_CTX_SECONDARY, %o5 |
5537 | membar #Sync | | 5537 | membar #Sync |
5538 | stxa %o5, [%o5] ASI_DMMU_DEMAP ! Do the demap | | 5538 | stxa %o5, [%o5] ASI_DMMU_DEMAP ! Do the demap |
5539 | stxa %o5, [%o5] ASI_IMMU_DEMAP ! Do the demap | | 5539 | stxa %o5, [%o5] ASI_IMMU_DEMAP ! Do the demap |
5540 | flush %o4 | | 5540 | flush %o4 |
5541 | stxa %o1, [%o2] ASI_DMMU ! Restore secondary context | | 5541 | stxa %o1, [%o2] ASI_DMMU ! Restore secondary context |
5542 | membar #Sync | | 5542 | membar #Sync |
5543 | retl | | 5543 | retl |
5544 | #ifdef MULTIPROCESSOR | | 5544 | #ifdef MULTIPROCESSOR |
5545 | wrpr %o3, %pstate ! restore interrupts | | 5545 | wrpr %o3, %pstate ! restore interrupts |
5546 | #else | | 5546 | #else |
5547 | nop | | 5547 | nop |
5548 | #endif | | 5548 | #endif |
5549 | #else | | 5549 | #else |
5550 | rdpr %tl, %o3 | | 5550 | rdpr %tl, %o3 |
5551 | mov CTX_PRIMARY, %o2 | | 5551 | mov CTX_PRIMARY, %o2 |
5552 | brnz %o3, 1f | | 5552 | brnz %o3, 1f |
5553 | sethi %hi(KERNBASE), %o4 | | 5553 | sethi %hi(KERNBASE), %o4 |
5554 | wrpr %g0, 1, %tl | | 5554 | wrpr %g0, 1, %tl |
5555 | 1: | | 5555 | 1: |
5556 | ldxa [%o2] ASI_DMMU, %o1 ! Save secondary context | | 5556 | ldxa [%o2] ASI_DMMU, %o1 ! Save secondary context |
5557 | membar #LoadStore | | 5557 | membar #LoadStore |
5558 | stxa %o0, [%o2] ASI_DMMU ! Insert context to demap | | 5558 | stxa %o0, [%o2] ASI_DMMU ! Insert context to demap |
5559 | membar #Sync | | 5559 | membar #Sync |
5560 | set DEMAP_CTX_PRIMARY, %o5 | | 5560 | set DEMAP_CTX_PRIMARY, %o5 |
5561 | stxa %o5, [%o5] ASI_DMMU_DEMAP ! Do the demap | | 5561 | stxa %o5, [%o5] ASI_DMMU_DEMAP ! Do the demap |
5562 | stxa %o5, [%o5] ASI_IMMU_DEMAP ! Do the demap | | 5562 | stxa %o5, [%o5] ASI_IMMU_DEMAP ! Do the demap |
5563 | membar #Sync | | 5563 | membar #Sync |
5564 | stxa %o1, [%o2] ASI_DMMU ! Restore secondary asi | | 5564 | stxa %o1, [%o2] ASI_DMMU ! Restore secondary asi |
5565 | membar #Sync | | 5565 | membar #Sync |
5566 | brz,pt %o3, 1f | | 5566 | brz,pt %o3, 1f |
5567 | flush %o4 | | 5567 | flush %o4 |
5568 | retl | | 5568 | retl |
5569 | nop | | 5569 | nop |
5570 | 1: | | 5570 | 1: |
5571 | retl | | 5571 | retl |
5572 | wrpr %g0, 0, %tl ! Return to kernel mode. | | 5572 | wrpr %g0, 0, %tl ! Return to kernel mode. |
5573 | #endif | | 5573 | #endif |
5574 | | | 5574 | |
5575 | /* | | 5575 | /* |
5576 | * sp_tlb_flush_all(void) | | 5576 | * sp_tlb_flush_all(void) |
5577 | * | | 5577 | * |
5578 | * Flush all user TLB entries from both IMMU and DMMU. | | 5578 | * Flush all user TLB entries from both IMMU and DMMU. |
5579 | */ | | 5579 | */ |
5580 | .align 8 | | 5580 | .align 8 |
5581 | ENTRY(sp_tlb_flush_all) | | 5581 | ENTRY(sp_tlb_flush_all) |
5582 | #ifdef SPITFIRE | | 5582 | #ifdef SPITFIRE |
5583 | rdpr %pstate, %o3 | | 5583 | rdpr %pstate, %o3 |
5584 | andn %o3, PSTATE_IE, %o4 ! disable interrupts | | 5584 | andn %o3, PSTATE_IE, %o4 ! disable interrupts |
5585 | wrpr %o4, 0, %pstate | | 5585 | wrpr %o4, 0, %pstate |
5586 | set (63 * 8), %o0 ! last TLB entry | | 5586 | set (63 * 8), %o0 ! last TLB entry |
5587 | set CTX_SECONDARY, %o4 | | 5587 | set CTX_SECONDARY, %o4 |
5588 | ldxa [%o4] ASI_DMMU, %o4 ! save secondary context | | 5588 | ldxa [%o4] ASI_DMMU, %o4 ! save secondary context |
5589 | set CTX_MASK, %o5 | | 5589 | set CTX_MASK, %o5 |
5590 | membar #Sync | | 5590 | membar #Sync |
5591 | | | 5591 | |
5592 | ! %o0 = loop counter | | 5592 | ! %o0 = loop counter |
5593 | ! %o1 = ctx value | | 5593 | ! %o1 = ctx value |
5594 | ! %o2 = TLB tag value | | 5594 | ! %o2 = TLB tag value |
5595 | ! %o3 = saved %pstate | | 5595 | ! %o3 = saved %pstate |
5596 | ! %o4 = saved secondary ctx | | 5596 | ! %o4 = saved secondary ctx |
5597 | ! %o5 = CTX_MASK | | 5597 | ! %o5 = CTX_MASK |
5598 | | | 5598 | |
5599 | 0: | | 5599 | 0: |
5600 | ldxa [%o0] ASI_DMMU_TLB_TAG, %o2 ! fetch the TLB tag | | 5600 | ldxa [%o0] ASI_DMMU_TLB_TAG, %o2 ! fetch the TLB tag |
5601 | andcc %o2, %o5, %o1 ! context 0? | | 5601 | andcc %o2, %o5, %o1 ! context 0? |
5602 | bz,pt %xcc, 1f ! if so, skip | | 5602 | bz,pt %xcc, 1f ! if so, skip |
5603 | mov CTX_SECONDARY, %o2 | | 5603 | mov CTX_SECONDARY, %o2 |
5604 | | | 5604 | |
5605 | stxa %o1, [%o2] ASI_DMMU ! set the context | | 5605 | stxa %o1, [%o2] ASI_DMMU ! set the context |
5606 | set DEMAP_CTX_SECONDARY, %o2 | | 5606 | set DEMAP_CTX_SECONDARY, %o2 |
5607 | membar #Sync | | 5607 | membar #Sync |
5608 | stxa %o2, [%o2] ASI_DMMU_DEMAP ! do the demap | | 5608 | stxa %o2, [%o2] ASI_DMMU_DEMAP ! do the demap |
5609 | membar #Sync | | 5609 | membar #Sync |
5610 | | | 5610 | |
5611 | 1: | | 5611 | 1: |
5612 | dec 8, %o0 | | 5612 | dec 8, %o0 |
5613 | brgz,pt %o0, 0b ! loop over all entries | | 5613 | brgz,pt %o0, 0b ! loop over all entries |
5614 | nop | | 5614 | nop |
5615 | | | 5615 | |
5616 | /* | | 5616 | /* |
5617 | * now do the IMMU | | 5617 | * now do the IMMU |
5618 | */ | | 5618 | */ |
5619 | | | 5619 | |
5620 | set (63 * 8), %o0 ! last TLB entry | | 5620 | set (63 * 8), %o0 ! last TLB entry |
5621 | | | 5621 | |
5622 | 0: | | 5622 | 0: |
5623 | ldxa [%o0] ASI_IMMU_TLB_TAG, %o2 ! fetch the TLB tag | | 5623 | ldxa [%o0] ASI_IMMU_TLB_TAG, %o2 ! fetch the TLB tag |
5624 | andcc %o2, %o5, %o1 ! context 0? | | 5624 | andcc %o2, %o5, %o1 ! context 0? |
5625 | bz,pt %xcc, 1f ! if so, skip | | 5625 | bz,pt %xcc, 1f ! if so, skip |
5626 | mov CTX_SECONDARY, %o2 | | 5626 | mov CTX_SECONDARY, %o2 |
5627 | | | 5627 | |
5628 | stxa %o1, [%o2] ASI_DMMU ! set the context | | 5628 | stxa %o1, [%o2] ASI_DMMU ! set the context |
5629 | set DEMAP_CTX_SECONDARY, %o2 | | 5629 | set DEMAP_CTX_SECONDARY, %o2 |
5630 | membar #Sync | | 5630 | membar #Sync |
5631 | stxa %o2, [%o2] ASI_IMMU_DEMAP ! do the demap | | 5631 | stxa %o2, [%o2] ASI_IMMU_DEMAP ! do the demap |
5632 | membar #Sync | | 5632 | membar #Sync |
5633 | | | 5633 | |
5634 | 1: | | 5634 | 1: |
5635 | dec 8, %o0 | | 5635 | dec 8, %o0 |
5636 | brgz,pt %o0, 0b ! loop over all entries | | 5636 | brgz,pt %o0, 0b ! loop over all entries |
5637 | nop | | 5637 | nop |
5638 | | | 5638 | |
5639 | set CTX_SECONDARY, %o2 | | 5639 | set CTX_SECONDARY, %o2 |
5640 | stxa %o4, [%o2] ASI_DMMU ! restore secondary ctx | | 5640 | stxa %o4, [%o2] ASI_DMMU ! restore secondary ctx |
5641 | sethi %hi(KERNBASE), %o4 | | 5641 | sethi %hi(KERNBASE), %o4 |
5642 | membar #Sync | | 5642 | membar #Sync |
5643 | flush %o4 | | 5643 | flush %o4 |
5644 | retl | | 5644 | retl |
5645 | wrpr %o3, %pstate | | 5645 | wrpr %o3, %pstate |
5646 | #else | | 5646 | #else |
5647 | WRITEME | | 5647 | WRITEME |
5648 | #endif | | 5648 | #endif |
5649 | | | 5649 | |
5650 | /* | | 5650 | /* |
5651 | * blast_dcache() | | 5651 | * blast_dcache() |
5652 | * | | 5652 | * |
5653 | * Clear out all of D$ regardless of contents | | 5653 | * Clear out all of D$ regardless of contents |
5654 | * Does not modify %o0 | | 5654 | * Does not modify %o0 |
5655 | * | | 5655 | * |
5656 | */ | | 5656 | */ |
5657 | .align 8 | | 5657 | .align 8 |
5658 | ENTRY(blast_dcache) | | 5658 | ENTRY(blast_dcache) |
5659 | /* | | 5659 | /* |
5660 | * We turn off interrupts for the duration to prevent RED exceptions. | | 5660 | * We turn off interrupts for the duration to prevent RED exceptions. |
5661 | */ | | 5661 | */ |
5662 | #ifdef PROF | | 5662 | #ifdef PROF |
5663 | save %sp, -CC64FSZ, %sp | | 5663 | save %sp, -CC64FSZ, %sp |
5664 | #endif | | 5664 | #endif |
5665 | | | 5665 | |
5666 | rdpr %pstate, %o3 | | 5666 | rdpr %pstate, %o3 |
5667 | set (2 * NBPG) - 8, %o1 | | 5667 | set (2 * NBPG) - 32, %o1 |
5668 | andn %o3, PSTATE_IE, %o4 ! Turn off PSTATE_IE bit | | 5668 | andn %o3, PSTATE_IE, %o4 ! Turn off PSTATE_IE bit |
5669 | wrpr %o4, 0, %pstate | | 5669 | wrpr %o4, 0, %pstate |
5670 | 1: | | 5670 | 1: |
5671 | stxa %g0, [%o1] ASI_DCACHE_TAG | | 5671 | stxa %g0, [%o1] ASI_DCACHE_TAG |
5672 | brnz,pt %o1, 1b | | 5672 | brnz,pt %o1, 1b |
5673 | dec 8, %o1 | | 5673 | dec 32, %o1 |
5674 | sethi %hi(KERNBASE), %o2 | | 5674 | sethi %hi(KERNBASE), %o2 |
5675 | flush %o2 | | 5675 | flush %o2 |
| | | 5676 | membar #Sync |
5676 | #ifdef PROF | | 5677 | #ifdef PROF |
5677 | wrpr %o3, %pstate | | 5678 | wrpr %o3, %pstate |
5678 | ret | | 5679 | ret |
5679 | restore | | 5680 | restore |
5680 | #else | | 5681 | #else |
5681 | retl | | 5682 | retl |
5682 | wrpr %o3, %pstate | | 5683 | wrpr %o3, %pstate |
5683 | #endif | | 5684 | #endif |
5684 | | | 5685 | |
5685 | /* | | 5686 | /* |
5686 | * blast_icache() | | 5687 | * blast_icache() |
5687 | * | | 5688 | * |
5688 | * Clear out all of I$ regardless of contents | | 5689 | * Clear out all of I$ regardless of contents |
5689 | * Does not modify %o0 | | 5690 | * Does not modify %o0 |
5690 | * | | 5691 | * |
5691 | */ | | 5692 | */ |
5692 | .align 8 | | 5693 | .align 8 |
5693 | ENTRY(blast_icache) | | 5694 | ENTRY(blast_icache) |
5694 | /* | | 5695 | /* |
5695 | * We turn off interrupts for the duration to prevent RED exceptions. | | 5696 | * We turn off interrupts for the duration to prevent RED exceptions. |
5696 | */ | | 5697 | */ |
5697 | rdpr %pstate, %o3 | | 5698 | rdpr %pstate, %o3 |
5698 | set (2 * NBPG) - 8, %o1 | | 5699 | set (2 * NBPG) - 32, %o1 |
5699 | andn %o3, PSTATE_IE, %o4 ! Turn off PSTATE_IE bit | | 5700 | andn %o3, PSTATE_IE, %o4 ! Turn off PSTATE_IE bit |
5700 | wrpr %o4, 0, %pstate | | 5701 | wrpr %o4, 0, %pstate |
5701 | 1: | | 5702 | 1: |
5702 | stxa %g0, [%o1] ASI_ICACHE_TAG | | 5703 | stxa %g0, [%o1] ASI_ICACHE_TAG |
5703 | brnz,pt %o1, 1b | | 5704 | brnz,pt %o1, 1b |
5704 | dec 8, %o1 | | 5705 | dec 32, %o1 |
5705 | sethi %hi(KERNBASE), %o2 | | 5706 | sethi %hi(KERNBASE), %o2 |
5706 | flush %o2 | | 5707 | flush %o2 |
| | | 5708 | membar #Sync |
5707 | retl | | 5709 | retl |
5708 | wrpr %o3, %pstate | | 5710 | wrpr %o3, %pstate |
5709 | | | 5711 | |
5710 | /* | | 5712 | /* |
5711 | * dcache_flush_page(paddr_t pa) | | 5713 | * dcache_flush_page(paddr_t pa) |
5712 | * | | 5714 | * |
5713 | * Clear one page from D$. | | 5715 | * Clear one page from D$. |
5714 | * | | 5716 | * |
5715 | */ | | 5717 | */ |
5716 | .align 8 | | 5718 | .align 8 |
5717 | ENTRY(dcache_flush_page) | | 5719 | ENTRY(dcache_flush_page) |
5718 | #ifndef _LP64 | | 5720 | #ifndef _LP64 |
5719 | COMBINE(%o0, %o1, %o0) | | 5721 | COMBINE(%o0, %o1, %o0) |
5720 | #endif | | 5722 | #endif |
5721 | mov -1, %o1 ! Generate mask for tag: bits [29..2] | | 5723 | mov -1, %o1 ! Generate mask for tag: bits [29..2] |
5722 | srlx %o0, 13-2, %o2 ! Tag is PA bits <40:13> in bits <29:2> | | 5724 | srlx %o0, 13-2, %o2 ! Tag is PA bits <40:13> in bits <29:2> |
5723 | clr %o4 | | 5725 | clr %o4 |
5724 | srl %o1, 2, %o1 ! Now we have bits <29:0> set | | 5726 | srl %o1, 2, %o1 ! Now we have bits <29:0> set |
5725 | set (2*NBPG), %o5 | | 5727 | set (2*NBPG), %o5 |
5726 | ba,pt %icc, 1f | | 5728 | ba,pt %icc, 1f |
5727 | andn %o1, 3, %o1 ! Now we have bits <29:2> set | | 5729 | andn %o1, 3, %o1 ! Now we have bits <29:2> set |
5728 | | | 5730 | |
5729 | .align 8 | | 5731 | .align 8 |
5730 | 1: | | 5732 | 1: |
5731 | ldxa [%o4] ASI_DCACHE_TAG, %o3 | | 5733 | ldxa [%o4] ASI_DCACHE_TAG, %o3 |
5732 | mov %o4, %o0 | | 5734 | mov %o4, %o0 |
5733 | deccc 32, %o5 | | 5735 | deccc 32, %o5 |
5734 | bl,pn %icc, 2f | | 5736 | bl,pn %icc, 2f |
5735 | inc 32, %o4 | | 5737 | inc 32, %o4 |
5736 | | | 5738 | |
5737 | xor %o3, %o2, %o3 | | 5739 | xor %o3, %o2, %o3 |
5738 | andcc %o3, %o1, %g0 | | 5740 | andcc %o3, %o1, %g0 |
5739 | bne,pt %xcc, 1b | | 5741 | bne,pt %xcc, 1b |
5740 | membar #LoadStore | | 5742 | membar #LoadStore |
5741 | | | 5743 | |
5742 | stxa %g0, [%o0] ASI_DCACHE_TAG | | 5744 | stxa %g0, [%o0] ASI_DCACHE_TAG |
5743 | ba,pt %icc, 1b | | 5745 | ba,pt %icc, 1b |
5744 | membar #StoreLoad | | 5746 | membar #StoreLoad |
5745 | 2: | | 5747 | 2: |
5746 | | | 5748 | |
5747 | wr %g0, ASI_PRIMARY_NOFAULT, %asi | | | |
5748 | sethi %hi(KERNBASE), %o5 | | 5749 | sethi %hi(KERNBASE), %o5 |
5749 | flush %o5 | | 5750 | flush %o5 |
5750 | retl | | 5751 | retl |
5751 | membar #Sync | | 5752 | membar #Sync |
5752 | | | 5753 | |
5753 | /* | | 5754 | /* |
5754 | * icache_flush_page(paddr_t pa) | | 5755 | * icache_flush_page(paddr_t pa) |
5755 | * | | 5756 | * |
5756 | * Clear one page from I$. | | 5757 | * Clear one page from I$. |
5757 | * | | 5758 | * |
5758 | */ | | 5759 | */ |
5759 | .align 8 | | 5760 | .align 8 |
5760 | ENTRY(icache_flush_page) | | 5761 | ENTRY(icache_flush_page) |
5761 | #ifndef _LP64 | | 5762 | #ifndef _LP64 |
5762 | COMBINE(%o0, %o1, %o0) | | 5763 | COMBINE(%o0, %o1, %o0) |
5763 | #endif | | 5764 | #endif |
5764 | | | 5765 | |
5765 | #ifdef SPITFIRE | | 5766 | #ifdef SPITFIRE |
5766 | !! | | 5767 | !! |
5767 | !! Linux sez that I$ flushes are not needed for cheetah. | | 5768 | !! Linux sez that I$ flushes are not needed for cheetah. |
5768 | !! | | 5769 | !! |
5769 | | | 5770 | |
5770 | !! Now do the I$ | | 5771 | !! Now do the I$ |
5771 | srlx %o0, 13-8, %o2 | | 5772 | srlx %o0, 13-8, %o2 |
5772 | mov -1, %o1 ! Generate mask for tag: bits [35..8] | | 5773 | mov -1, %o1 ! Generate mask for tag: bits [35..8] |
5773 | srl %o1, 32-35+7, %o1 | | 5774 | srl %o1, 32-35+7, %o1 |
5774 | clr %o4 | | 5775 | clr %o4 |
5775 | sll %o1, 7, %o1 ! Mask | | 5776 | sll %o1, 7, %o1 ! Mask |
5776 | set (2*NBPG), %o5 | | 5777 | set (2*NBPG), %o5 |
5777 | | | 5778 | |
5778 | 1: | | 5779 | 1: |
5779 | ldda [%o4] ASI_ICACHE_TAG, %g0 ! Tag goes in %g1 | | 5780 | ldda [%o4] ASI_ICACHE_TAG, %g0 ! Tag goes in %g1 |
5780 | dec 16, %o5 | | 5781 | dec 32, %o5 |
5781 | xor %g1, %o2, %g1 | | 5782 | xor %g1, %o2, %g1 |
5782 | andcc %g1, %o1, %g0 | | 5783 | andcc %g1, %o1, %g0 |
5783 | bne,pt %xcc, 2f | | 5784 | bne,pt %xcc, 2f |
5784 | membar #LoadStore | | 5785 | membar #LoadStore |
5785 | stxa %g0, [%o4] ASI_ICACHE_TAG | | 5786 | stxa %g0, [%o4] ASI_ICACHE_TAG |
5786 | membar #StoreLoad | | 5787 | membar #StoreLoad |
5787 | 2: | | 5788 | 2: |
5788 | brnz,pt %o5, 1b | | 5789 | brnz,pt %o5, 1b |
5789 | inc 16, %o4 | | 5790 | inc 32, %o4 |
5790 | #endif | | 5791 | #endif |
5791 | sethi %hi(KERNBASE), %o5 | | 5792 | sethi %hi(KERNBASE), %o5 |
5792 | flush %o5 | | 5793 | flush %o5 |
5793 | membar #Sync | | 5794 | membar #Sync |
5794 | retl | | 5795 | retl |
5795 | nop | | 5796 | nop |
5796 | | | 5797 | |
5797 | /* | | 5798 | /* |
5798 | * cache_flush_phys(paddr_t, psize_t, int); | | 5799 | * cache_flush_phys(paddr_t, psize_t, int); |
5799 | * | | 5800 | * |
5800 | * Clear a set of paddrs from the D$, I$ and if param3 is | | 5801 | * Clear a set of paddrs from the D$, I$ and if param3 is |
5801 | * non-zero, E$. (E$ is not supported yet). | | 5802 | * non-zero, E$. (E$ is not supported yet). |
5802 | */ | | 5803 | */ |
5803 | | | 5804 | |
5804 | .align 8 | | 5805 | .align 8 |
5805 | ENTRY(cache_flush_phys) | | 5806 | ENTRY(cache_flush_phys) |
5806 | #ifndef _LP64 | | 5807 | #ifndef _LP64 |
5807 | COMBINE(%o0, %o1, %o0) | | 5808 | COMBINE(%o0, %o1, %o0) |
5808 | COMBINE(%o2, %o3, %o1) | | 5809 | COMBINE(%o2, %o3, %o1) |
5809 | mov %o4, %o2 | | 5810 | mov %o4, %o2 |
5810 | #endif | | 5811 | #endif |
5811 | #ifdef DEBUG | | 5812 | #ifdef DEBUG |
5812 | tst %o2 ! Want to clear E$? | | 5813 | tst %o2 ! Want to clear E$? |
5813 | tnz 1 ! Error! | | 5814 | tnz 1 ! Error! |
5814 | #endif | | 5815 | #endif |
5815 | add %o0, %o1, %o1 ! End PA | | 5816 | add %o0, %o1, %o1 ! End PA |
5816 | | | 5817 | |
5817 | !! | | 5818 | !! |
5818 | !! Both D$ and I$ tags match pa bits 40-13, but | | 5819 | !! Both D$ and I$ tags match pa bits 40-13, but |
5819 | !! they are shifted different amounts. So we'll | | 5820 | !! they are shifted different amounts. So we'll |
5820 | !! generate a mask for bits 40-13. | | 5821 | !! generate a mask for bits 40-13. |
5821 | !! | | 5822 | !! |
5822 | | | 5823 | |
5823 | mov -1, %o2 ! Generate mask for tag: bits [40..13] | | 5824 | mov -1, %o2 ! Generate mask for tag: bits [40..13] |
5824 | srl %o2, 5, %o2 ! 32-5 = [27..0] | | 5825 | srl %o2, 5, %o2 ! 32-5 = [27..0] |
5825 | sllx %o2, 13, %o2 ! 27+13 = [40..13] | | 5826 | sllx %o2, 13, %o2 ! 27+13 = [40..13] |
5826 | | | 5827 | |
5827 | and %o2, %o0, %o0 ! Mask away uninteresting bits | | 5828 | and %o2, %o0, %o0 ! Mask away uninteresting bits |
5828 | and %o2, %o1, %o1 ! (probably not necessary) | | 5829 | and %o2, %o1, %o1 ! (probably not necessary) |
5829 | | | 5830 | |
5830 | set (2*NBPG), %o5 | | 5831 | set (2*NBPG), %o5 |
5831 | clr %o4 | | 5832 | clr %o4 |
5832 | 1: | | 5833 | 1: |
5833 | ldxa [%o4] ASI_DCACHE_TAG, %o3 | | 5834 | ldxa [%o4] ASI_DCACHE_TAG, %o3 |
5834 | #ifdef SPITFIRE | | 5835 | #ifdef SPITFIRE |
5835 | ldda [%o4] ASI_ICACHE_TAG, %g0 ! Tag goes in %g1 -- not on cheetah | | 5836 | ldda [%o4] ASI_ICACHE_TAG, %g0 ! Tag goes in %g1 -- not on cheetah |
5836 | #endif | | 5837 | #endif |
5837 | sllx %o3, 40-29, %o3 ! Shift D$ tag into place | | 5838 | sllx %o3, 40-29, %o3 ! Shift D$ tag into place |
5838 | and %o3, %o2, %o3 ! Mask out trash | | 5839 | and %o3, %o2, %o3 ! Mask out trash |
5839 | cmp %o0, %o3 | | 5840 | cmp %o0, %o3 |
5840 | blt,pt %xcc, 2f ! Too low | | 5841 | blt,pt %xcc, 2f ! Too low |
5841 | sllx %g1, 40-35, %g1 ! Shift I$ tag into place | | 5842 | sllx %g1, 40-35, %g1 ! Shift I$ tag into place |
5842 | cmp %o1, %o3 | | 5843 | cmp %o1, %o3 |
5843 | bgt,pt %xcc, 2f ! Too high | | 5844 | bgt,pt %xcc, 2f ! Too high |
5844 | nop | | 5845 | nop |
5845 | | | 5846 | |
5846 | membar #LoadStore | | 5847 | membar #LoadStore |
5847 | stxa %g0, [%o4] ASI_DCACHE_TAG ! Just right | | 5848 | stxa %g0, [%o4] ASI_DCACHE_TAG ! Just right |
5848 | 2: | | 5849 | 2: |
5849 | #ifndef SPITFIRE | | 5850 | #ifndef SPITFIRE |
5850 | cmp %o0, %g1 | | 5851 | cmp %o0, %g1 |
5851 | blt,pt %xcc, 3f | | 5852 | blt,pt %xcc, 3f |
5852 | cmp %o1, %g1 | | 5853 | cmp %o1, %g1 |
5853 | bgt,pt %icc, 3f | | 5854 | bgt,pt %icc, 3f |
5854 | nop | | 5855 | nop |
5855 | stxa %g0, [%o4] ASI_ICACHE_TAG | | 5856 | stxa %g0, [%o4] ASI_ICACHE_TAG |
5856 | 3: | | 5857 | 3: |
5857 | #endif | | 5858 | #endif |
5858 | membar #StoreLoad | | 5859 | membar #StoreLoad |
5859 | dec 16, %o5 | | 5860 | dec 32, %o5 |
5860 | brgz,pt %o5, 1b | | 5861 | brgz,pt %o5, 1b |
5861 | inc 16, %o4 | | 5862 | inc 32, %o4 |
5862 | | | 5863 | |
5863 | sethi %hi(KERNBASE), %o5 | | 5864 | sethi %hi(KERNBASE), %o5 |
5864 | flush %o5 | | 5865 | flush %o5 |
5865 | membar #Sync | | 5866 | membar #Sync |
5866 | retl | | 5867 | retl |
5867 | nop | | 5868 | nop |
5868 | | | 5869 | |
5869 | #ifdef COMPAT_16 | | 5870 | #ifdef COMPAT_16 |
5870 | #ifdef _LP64 | | 5871 | #ifdef _LP64 |
5871 | /* | | 5872 | /* |
5872 | * XXXXX Still needs lotsa cleanup after sendsig is complete and offsets are known | | 5873 | * XXXXX Still needs lotsa cleanup after sendsig is complete and offsets are known |
5873 | * | | 5874 | * |
5874 | * The following code is copied to the top of the user stack when each | | 5875 | * The following code is copied to the top of the user stack when each |
5875 | * process is exec'ed, and signals are `trampolined' off it. | | 5876 | * process is exec'ed, and signals are `trampolined' off it. |
5876 | * | | 5877 | * |
5877 | * When this code is run, the stack looks like: | | 5878 | * When this code is run, the stack looks like: |
5878 | * [%sp] 128 bytes to which registers can be dumped | | 5879 | * [%sp] 128 bytes to which registers can be dumped |
5879 | * [%sp + 128] signal number (goes in %o0) | | 5880 | * [%sp + 128] signal number (goes in %o0) |
5880 | * [%sp + 128 + 4] signal code (goes in %o1) | | 5881 | * [%sp + 128 + 4] signal code (goes in %o1) |
5881 | * [%sp + 128 + 8] first word of saved state (sigcontext) | | 5882 | * [%sp + 128 + 8] first word of saved state (sigcontext) |
5882 | * . | | 5883 | * . |
5883 | * . | | 5884 | * . |
5884 | * . | | 5885 | * . |
5885 | * [%sp + NNN] last word of saved state | | 5886 | * [%sp + NNN] last word of saved state |
5886 | * (followed by previous stack contents or top of signal stack). | | 5887 | * (followed by previous stack contents or top of signal stack). |
5887 | * The address of the function to call is in %g1; the old %g1 and %o0 | | 5888 | * The address of the function to call is in %g1; the old %g1 and %o0 |
5888 | * have already been saved in the sigcontext. We are running in a clean | | 5889 | * have already been saved in the sigcontext. We are running in a clean |
5889 | * window, all previous windows now being saved to the stack. | | 5890 | * window, all previous windows now being saved to the stack. |
5890 | * | | 5891 | * |
5891 | * Note that [%sp + 128 + 8] == %sp + 128 + 16. The copy at %sp+128+8 | | 5892 | * Note that [%sp + 128 + 8] == %sp + 128 + 16. The copy at %sp+128+8 |
5892 | * will eventually be removed, with a hole left in its place, if things | | 5893 | * will eventually be removed, with a hole left in its place, if things |
5893 | * work out. | | 5894 | * work out. |
5894 | */ | | 5895 | */ |
5895 | ENTRY_NOPROFILE(sigcode) | | 5896 | ENTRY_NOPROFILE(sigcode) |
5896 | /* | | 5897 | /* |
5897 | * XXX the `save' and `restore' below are unnecessary: should | | 5898 | * XXX the `save' and `restore' below are unnecessary: should |
5898 | * replace with simple arithmetic on %sp | | 5899 | * replace with simple arithmetic on %sp |
5899 | * | | 5900 | * |
5900 | * Make room on the stack for 64 %f registers + %fsr. This comes | | 5901 | * Make room on the stack for 64 %f registers + %fsr. This comes |
5901 | * out to 64*4+8 or 264 bytes, but this must be aligned to a multiple | | 5902 | * out to 64*4+8 or 264 bytes, but this must be aligned to a multiple |
5902 | * of 64, or 320 bytes. | | 5903 | * of 64, or 320 bytes. |
5903 | */ | | 5904 | */ |
5904 | save %sp, -CC64FSZ - 320, %sp | | 5905 | save %sp, -CC64FSZ - 320, %sp |
5905 | mov %g2, %l2 ! save globals in %l registers | | 5906 | mov %g2, %l2 ! save globals in %l registers |
5906 | mov %g3, %l3 | | 5907 | mov %g3, %l3 |
5907 | mov %g4, %l4 | | 5908 | mov %g4, %l4 |
5908 | mov %g5, %l5 | | 5909 | mov %g5, %l5 |
5909 | mov %g6, %l6 | | 5910 | mov %g6, %l6 |
5910 | mov %g7, %l7 | | 5911 | mov %g7, %l7 |
5911 | /* | | 5912 | /* |
5912 | * Saving the fpu registers is expensive, so do it iff it is | | 5913 | * Saving the fpu registers is expensive, so do it iff it is |
5913 | * enabled and dirty. | | 5914 | * enabled and dirty. |
5914 | */ | | 5915 | */ |
5915 | rd %fprs, %l0 | | 5916 | rd %fprs, %l0 |
5916 | btst FPRS_DL|FPRS_DU, %l0 ! All clean? | | 5917 | btst FPRS_DL|FPRS_DU, %l0 ! All clean? |
5917 | bz,pt %icc, 2f | | 5918 | bz,pt %icc, 2f |
5918 | btst FPRS_DL, %l0 ! test dl | | 5919 | btst FPRS_DL, %l0 ! test dl |
5919 | bz,pt %icc, 1f | | 5920 | bz,pt %icc, 1f |
5920 | btst FPRS_DU, %l0 ! test du | | 5921 | btst FPRS_DU, %l0 ! test du |
5921 | | | 5922 | |
5922 | ! fpu is enabled, oh well | | 5923 | ! fpu is enabled, oh well |
5923 | stx %fsr, [%sp + CC64FSZ + BIAS + 0] | | 5924 | stx %fsr, [%sp + CC64FSZ + BIAS + 0] |
5924 | add %sp, BIAS+CC64FSZ+BLOCK_SIZE, %l0 ! Generate a pointer so we can | | 5925 | add %sp, BIAS+CC64FSZ+BLOCK_SIZE, %l0 ! Generate a pointer so we can |
5925 | andn %l0, BLOCK_ALIGN, %l0 ! do a block store | | 5926 | andn %l0, BLOCK_ALIGN, %l0 ! do a block store |
5926 | stda %f0, [%l0] ASI_BLK_P | | 5927 | stda %f0, [%l0] ASI_BLK_P |
5927 | inc BLOCK_SIZE, %l0 | | 5928 | inc BLOCK_SIZE, %l0 |
5928 | stda %f16, [%l0] ASI_BLK_P | | 5929 | stda %f16, [%l0] ASI_BLK_P |
5929 | 1: | | 5930 | 1: |
5930 | bz,pt %icc, 2f | | 5931 | bz,pt %icc, 2f |
5931 | add %sp, BIAS+CC64FSZ+BLOCK_SIZE, %l0 ! Generate a pointer so we can | | 5932 | add %sp, BIAS+CC64FSZ+BLOCK_SIZE, %l0 ! Generate a pointer so we can |
5932 | andn %l0, BLOCK_ALIGN, %l0 ! do a block store | | 5933 | andn %l0, BLOCK_ALIGN, %l0 ! do a block store |
5933 | add %l0, 2*BLOCK_SIZE, %l0 ! and skip what we already stored | | 5934 | add %l0, 2*BLOCK_SIZE, %l0 ! and skip what we already stored |
5934 | stda %f32, [%l0] ASI_BLK_P | | 5935 | stda %f32, [%l0] ASI_BLK_P |
5935 | inc BLOCK_SIZE, %l0 | | 5936 | inc BLOCK_SIZE, %l0 |
5936 | stda %f48, [%l0] ASI_BLK_P | | 5937 | stda %f48, [%l0] ASI_BLK_P |
5937 | 2: | | 5938 | 2: |
5938 | membar #Sync | | 5939 | membar #Sync |
5939 | rd %fprs, %l0 ! reload fprs copy, for checking after | | 5940 | rd %fprs, %l0 ! reload fprs copy, for checking after |
5940 | rd %y, %l1 ! in any case, save %y | | 5941 | rd %y, %l1 ! in any case, save %y |
5941 | lduw [%fp + BIAS + 128], %o0 ! sig | | 5942 | lduw [%fp + BIAS + 128], %o0 ! sig |
5942 | lduw [%fp + BIAS + 128 + 4], %o1 ! code | | 5943 | lduw [%fp + BIAS + 128 + 4], %o1 ! code |
5943 | call %g1 ! (*sa->sa_handler)(sig,code,scp) | | 5944 | call %g1 ! (*sa->sa_handler)(sig,code,scp) |
5944 | add %fp, BIAS + 128 + 8, %o2 ! scp | | 5945 | add %fp, BIAS + 128 + 8, %o2 ! scp |
5945 | wr %l1, %g0, %y ! in any case, restore %y | | 5946 | wr %l1, %g0, %y ! in any case, restore %y |
5946 | | | 5947 | |
5947 | /* | | 5948 | /* |
5948 | * Now that the handler has returned, re-establish all the state | | 5949 | * Now that the handler has returned, re-establish all the state |
5949 | * we just saved above, then do a sigreturn. | | 5950 | * we just saved above, then do a sigreturn. |
5950 | */ | | 5951 | */ |
5951 | btst FPRS_DL|FPRS_DU, %l0 ! All clean? | | 5952 | btst FPRS_DL|FPRS_DU, %l0 ! All clean? |
5952 | bz,pt %icc, 2f | | 5953 | bz,pt %icc, 2f |
5953 | btst FPRS_DL, %l0 ! test dl | | 5954 | btst FPRS_DL, %l0 ! test dl |
5954 | bz,pt %icc, 1f | | 5955 | bz,pt %icc, 1f |
5955 | btst FPRS_DU, %l0 ! test du | | 5956 | btst FPRS_DU, %l0 ! test du |
5956 | | | 5957 | |
5957 | ldx [%sp + CC64FSZ + BIAS + 0], %fsr | | 5958 | ldx [%sp + CC64FSZ + BIAS + 0], %fsr |
5958 | add %sp, BIAS+CC64FSZ+BLOCK_SIZE, %l0 ! Generate a pointer so we can | | 5959 | add %sp, BIAS+CC64FSZ+BLOCK_SIZE, %l0 ! Generate a pointer so we can |
5959 | andn %l0, BLOCK_ALIGN, %l0 ! do a block load | | 5960 | andn %l0, BLOCK_ALIGN, %l0 ! do a block load |
5960 | ldda [%l0] ASI_BLK_P, %f0 | | 5961 | ldda [%l0] ASI_BLK_P, %f0 |
5961 | inc BLOCK_SIZE, %l0 | | 5962 | inc BLOCK_SIZE, %l0 |
5962 | ldda [%l0] ASI_BLK_P, %f16 | | 5963 | ldda [%l0] ASI_BLK_P, %f16 |
5963 | 1: | | 5964 | 1: |
5964 | bz,pt %icc, 2f | | 5965 | bz,pt %icc, 2f |
5965 | nop | | 5966 | nop |
5966 | add %sp, BIAS+CC64FSZ+BLOCK_SIZE, %l0 ! Generate a pointer so we can | | 5967 | add %sp, BIAS+CC64FSZ+BLOCK_SIZE, %l0 ! Generate a pointer so we can |
5967 | andn %l0, BLOCK_ALIGN, %l0 ! do a block load | | 5968 | andn %l0, BLOCK_ALIGN, %l0 ! do a block load |
5968 | inc 2*BLOCK_SIZE, %l0 ! and skip what we already loaded | | 5969 | inc 2*BLOCK_SIZE, %l0 ! and skip what we already loaded |
5969 | ldda [%l0] ASI_BLK_P, %f32 | | 5970 | ldda [%l0] ASI_BLK_P, %f32 |
5970 | inc BLOCK_SIZE, %l0 | | 5971 | inc BLOCK_SIZE, %l0 |
5971 | ldda [%l0] ASI_BLK_P, %f48 | | 5972 | ldda [%l0] ASI_BLK_P, %f48 |
5972 | 2: | | 5973 | 2: |
5973 | mov %l2, %g2 | | 5974 | mov %l2, %g2 |
5974 | mov %l3, %g3 | | 5975 | mov %l3, %g3 |
5975 | mov %l4, %g4 | | 5976 | mov %l4, %g4 |
5976 | mov %l5, %g5 | | 5977 | mov %l5, %g5 |
5977 | mov %l6, %g6 | | 5978 | mov %l6, %g6 |
5978 | mov %l7, %g7 | | 5979 | mov %l7, %g7 |
5979 | membar #Sync | | 5980 | membar #Sync |
5980 | | | 5981 | |
5981 | restore %g0, SYS_compat_16___sigreturn14, %g1 ! get registers back & set syscall # | | 5982 | restore %g0, SYS_compat_16___sigreturn14, %g1 ! get registers back & set syscall # |
5982 | add %sp, BIAS + 128 + 8, %o0! compute scp | | 5983 | add %sp, BIAS + 128 + 8, %o0! compute scp |
5983 | ! andn %o0, 0x0f, %o0 | | 5984 | ! andn %o0, 0x0f, %o0 |
5984 | t ST_SYSCALL ! sigreturn(scp) | | 5985 | t ST_SYSCALL ! sigreturn(scp) |
5985 | ! sigreturn does not return unless it fails | | 5986 | ! sigreturn does not return unless it fails |
5986 | mov SYS_exit, %g1 ! exit(errno) | | 5987 | mov SYS_exit, %g1 ! exit(errno) |
5987 | t ST_SYSCALL | | 5988 | t ST_SYSCALL |
5988 | /* NOTREACHED */ | | 5989 | /* NOTREACHED */ |
5989 | | | 5990 | |
5990 | .globl _C_LABEL(esigcode) | | 5991 | .globl _C_LABEL(esigcode) |
5991 | _C_LABEL(esigcode): | | 5992 | _C_LABEL(esigcode): |
5992 | #endif | | 5993 | #endif |
5993 | | | 5994 | |
5994 | #if !defined(_LP64) | | 5995 | #if !defined(_LP64) |
5995 | | | 5996 | |
5996 | #define SIGCODE_NAME sigcode | | 5997 | #define SIGCODE_NAME sigcode |
5997 | #define ESIGCODE_NAME esigcode | | 5998 | #define ESIGCODE_NAME esigcode |
5998 | #define SIGRETURN_NAME SYS_compat_16___sigreturn14 | | 5999 | #define SIGRETURN_NAME SYS_compat_16___sigreturn14 |
5999 | #define EXIT_NAME SYS_exit | | 6000 | #define EXIT_NAME SYS_exit |
6000 | | | 6001 | |
6001 | #include "sigcode32.s" | | 6002 | #include "sigcode32.s" |
6002 | | | 6003 | |
6003 | #endif | | 6004 | #endif |
6004 | #endif | | 6005 | #endif |
6005 | | | 6006 | |
6006 | /* | | 6007 | /* |
6007 | * Primitives | | 6008 | * Primitives |
6008 | */ | | 6009 | */ |
6009 | #ifdef ENTRY | | 6010 | #ifdef ENTRY |
6010 | #undef ENTRY | | 6011 | #undef ENTRY |
6011 | #endif | | 6012 | #endif |
6012 | | | 6013 | |
6013 | #ifdef GPROF | | 6014 | #ifdef GPROF |
6014 | .globl _mcount | | 6015 | .globl _mcount |
6015 | #define ENTRY(x) \ | | 6016 | #define ENTRY(x) \ |
6016 | .globl _C_LABEL(x); .proc 1; .type _C_LABEL(x),@function; \ | | 6017 | .globl _C_LABEL(x); .proc 1; .type _C_LABEL(x),@function; \ |
6017 | _C_LABEL(x): ; \ | | 6018 | _C_LABEL(x): ; \ |
6018 | .data; \ | | 6019 | .data; \ |
6019 | .align 8; \ | | 6020 | .align 8; \ |
6020 | 0: .uaword 0; .uaword 0; \ | | 6021 | 0: .uaword 0; .uaword 0; \ |
6021 | .text; \ | | 6022 | .text; \ |
6022 | save %sp, -CC64FSZ, %sp; \ | | 6023 | save %sp, -CC64FSZ, %sp; \ |
6023 | sethi %hi(0b), %o0; \ | | 6024 | sethi %hi(0b), %o0; \ |
6024 | call _mcount; \ | | 6025 | call _mcount; \ |
6025 | or %o0, %lo(0b), %o0; \ | | 6026 | or %o0, %lo(0b), %o0; \ |
6026 | restore | | 6027 | restore |
6027 | #else | | 6028 | #else |
6028 | #define ENTRY(x) .globl _C_LABEL(x); .proc 1; \ | | 6029 | #define ENTRY(x) .globl _C_LABEL(x); .proc 1; \ |
6029 | .type _C_LABEL(x),@function; _C_LABEL(x): | | 6030 | .type _C_LABEL(x),@function; _C_LABEL(x): |
6030 | #endif | | 6031 | #endif |
6031 | #define ALTENTRY(x) .globl _C_LABEL(x); _C_LABEL(x): | | 6032 | #define ALTENTRY(x) .globl _C_LABEL(x); _C_LABEL(x): |
6032 | | | 6033 | |
6033 | /* | | 6034 | /* |
6034 | * getfp() - get stack frame pointer | | 6035 | * getfp() - get stack frame pointer |
6035 | */ | | 6036 | */ |
6036 | ENTRY(getfp) | | 6037 | ENTRY(getfp) |
6037 | retl | | 6038 | retl |
6038 | mov %fp, %o0 | | 6039 | mov %fp, %o0 |
6039 | | | 6040 | |
6040 | /* | | 6041 | /* |
6041 | * copyinstr(fromaddr, toaddr, maxlength, &lencopied) | | 6042 | * copyinstr(fromaddr, toaddr, maxlength, &lencopied) |
6042 | * | | 6043 | * |
6043 | * Copy a null terminated string from the user address space into | | 6044 | * Copy a null terminated string from the user address space into |
6044 | * the kernel address space. | | 6045 | * the kernel address space. |
6045 | */ | | 6046 | */ |
6046 | ENTRY(copyinstr) | | 6047 | ENTRY(copyinstr) |
6047 | ! %o0 = fromaddr, %o1 = toaddr, %o2 = maxlen, %o3 = &lencopied | | 6048 | ! %o0 = fromaddr, %o1 = toaddr, %o2 = maxlen, %o3 = &lencopied |
6048 | #ifdef NOTDEF_DEBUG | | 6049 | #ifdef NOTDEF_DEBUG |
6049 | save %sp, -CC64FSZ, %sp | | 6050 | save %sp, -CC64FSZ, %sp |
6050 | set 8f, %o0 | | 6051 | set 8f, %o0 |
6051 | mov %i0, %o1 | | 6052 | mov %i0, %o1 |
6052 | mov %i1, %o2 | | 6053 | mov %i1, %o2 |
6053 | mov %i2, %o3 | | 6054 | mov %i2, %o3 |
6054 | call printf | | 6055 | call printf |
6055 | mov %i3, %o4 | | 6056 | mov %i3, %o4 |
6056 | restore | | 6057 | restore |
6057 | .data | | 6058 | .data |
6058 | 8: .asciz "copyinstr: from=%x to=%x max=%x &len=%x\n" | | 6059 | 8: .asciz "copyinstr: from=%x to=%x max=%x &len=%x\n" |
6059 | _ALIGN | | 6060 | _ALIGN |
6060 | .text | | 6061 | .text |
6061 | #endif | | 6062 | #endif |
6062 | brgz,pt %o2, 1f ! Make sure len is valid | | 6063 | brgz,pt %o2, 1f ! Make sure len is valid |
6063 | sethi %hi(CPCB), %o4 ! (first instr of copy) | | 6064 | sethi %hi(CPCB), %o4 ! (first instr of copy) |
6064 | retl | | 6065 | retl |
6065 | mov ENAMETOOLONG, %o0 | | 6066 | mov ENAMETOOLONG, %o0 |
6066 | 1: | | 6067 | 1: |
6067 | LDPTR [%o4 + %lo(CPCB)], %o4 ! catch faults | | 6068 | LDPTR [%o4 + %lo(CPCB)], %o4 ! catch faults |
6068 | set Lcsfault, %o5 | | 6069 | set Lcsfault, %o5 |
6069 | membar #Sync | | 6070 | membar #Sync |
6070 | STPTR %o5, [%o4 + PCB_ONFAULT] | | 6071 | STPTR %o5, [%o4 + PCB_ONFAULT] |
6071 | | | 6072 | |
6072 | mov %o1, %o5 ! save = toaddr; | | 6073 | mov %o1, %o5 ! save = toaddr; |
6073 | ! XXX should do this in bigger chunks when possible | | 6074 | ! XXX should do this in bigger chunks when possible |
6074 | 0: ! loop: | | 6075 | 0: ! loop: |
6075 | ldsba [%o0] ASI_AIUS, %g1 ! c = *fromaddr; | | 6076 | ldsba [%o0] ASI_AIUS, %g1 ! c = *fromaddr; |
6076 | stb %g1, [%o1] ! *toaddr++ = c; | | 6077 | stb %g1, [%o1] ! *toaddr++ = c; |
6077 | inc %o1 | | 6078 | inc %o1 |
6078 | brz,a,pn %g1, Lcsdone ! if (c == NULL) | | 6079 | brz,a,pn %g1, Lcsdone ! if (c == NULL) |
6079 | clr %o0 ! { error = 0; done; } | | 6080 | clr %o0 ! { error = 0; done; } |
6080 | deccc %o2 ! if (--len > 0) { | | 6081 | deccc %o2 ! if (--len > 0) { |
6081 | bg,pt %icc, 0b ! fromaddr++; | | 6082 | bg,pt %icc, 0b ! fromaddr++; |
6082 | inc %o0 ! goto loop; | | 6083 | inc %o0 ! goto loop; |
6083 | ba,pt %xcc, Lcsdone ! } | | 6084 | ba,pt %xcc, Lcsdone ! } |
6084 | mov ENAMETOOLONG, %o0 ! error = ENAMETOOLONG; | | 6085 | mov ENAMETOOLONG, %o0 ! error = ENAMETOOLONG; |
6085 | NOTREACHED | | 6086 | NOTREACHED |
6086 | | | 6087 | |
6087 | /* | | 6088 | /* |
6088 | * copyoutstr(fromaddr, toaddr, maxlength, &lencopied) | | 6089 | * copyoutstr(fromaddr, toaddr, maxlength, &lencopied) |
6089 | * | | 6090 | * |
6090 | * Copy a null terminated string from the kernel | | 6091 | * Copy a null terminated string from the kernel |
6091 | * address space to the user address space. | | 6092 | * address space to the user address space. |
6092 | */ | | 6093 | */ |
6093 | ENTRY(copyoutstr) | | 6094 | ENTRY(copyoutstr) |
6094 | ! %o0 = fromaddr, %o1 = toaddr, %o2 = maxlen, %o3 = &lencopied | | 6095 | ! %o0 = fromaddr, %o1 = toaddr, %o2 = maxlen, %o3 = &lencopied |
6095 | #ifdef NOTDEF_DEBUG | | 6096 | #ifdef NOTDEF_DEBUG |
6096 | save %sp, -CC64FSZ, %sp | | 6097 | save %sp, -CC64FSZ, %sp |
6097 | set 8f, %o0 | | 6098 | set 8f, %o0 |
6098 | mov %i0, %o1 | | 6099 | mov %i0, %o1 |
6099 | mov %i1, %o2 | | 6100 | mov %i1, %o2 |
6100 | mov %i2, %o3 | | 6101 | mov %i2, %o3 |
6101 | call printf | | 6102 | call printf |
6102 | mov %i3, %o4 | | 6103 | mov %i3, %o4 |
6103 | restore | | 6104 | restore |
6104 | .data | | 6105 | .data |
6105 | 8: .asciz "copyoutstr: from=%x to=%x max=%x &len=%x\n" | | 6106 | 8: .asciz "copyoutstr: from=%x to=%x max=%x &len=%x\n" |
6106 | _ALIGN | | 6107 | _ALIGN |
6107 | .text | | 6108 | .text |
6108 | #endif | | 6109 | #endif |
6109 | brgz,pt %o2, 1f ! Make sure len is valid | | 6110 | brgz,pt %o2, 1f ! Make sure len is valid |
6110 | sethi %hi(CPCB), %o4 ! (first instr of copy) | | 6111 | sethi %hi(CPCB), %o4 ! (first instr of copy) |
6111 | retl | | 6112 | retl |
6112 | mov ENAMETOOLONG, %o0 | | 6113 | mov ENAMETOOLONG, %o0 |
6113 | 1: | | 6114 | 1: |
6114 | LDPTR [%o4 + %lo(CPCB)], %o4 ! catch faults | | 6115 | LDPTR [%o4 + %lo(CPCB)], %o4 ! catch faults |
6115 | set Lcsfault, %o5 | | 6116 | set Lcsfault, %o5 |
6116 | membar #Sync | | 6117 | membar #Sync |
6117 | STPTR %o5, [%o4 + PCB_ONFAULT] | | 6118 | STPTR %o5, [%o4 + PCB_ONFAULT] |
6118 | | | 6119 | |
6119 | mov %o1, %o5 ! save = toaddr; | | 6120 | mov %o1, %o5 ! save = toaddr; |
6120 | ! XXX should do this in bigger chunks when possible | | 6121 | ! XXX should do this in bigger chunks when possible |
6121 | 0: ! loop: | | 6122 | 0: ! loop: |
6122 | ldsb [%o0], %g1 ! c = *fromaddr; | | 6123 | ldsb [%o0], %g1 ! c = *fromaddr; |
6123 | stba %g1, [%o1] ASI_AIUS ! *toaddr++ = c; | | 6124 | stba %g1, [%o1] ASI_AIUS ! *toaddr++ = c; |
6124 | inc %o1 | | 6125 | inc %o1 |
6125 | brz,a,pn %g1, Lcsdone ! if (c == NULL) | | 6126 | brz,a,pn %g1, Lcsdone ! if (c == NULL) |
6126 | clr %o0 ! { error = 0; done; } | | 6127 | clr %o0 ! { error = 0; done; } |
6127 | deccc %o2 ! if (--len > 0) { | | 6128 | deccc %o2 ! if (--len > 0) { |
6128 | bg,pt %icc, 0b ! fromaddr++; | | 6129 | bg,pt %icc, 0b ! fromaddr++; |
6129 | inc %o0 ! goto loop; | | 6130 | inc %o0 ! goto loop; |
6130 | ! } | | 6131 | ! } |
6131 | mov ENAMETOOLONG, %o0 ! error = ENAMETOOLONG; | | 6132 | mov ENAMETOOLONG, %o0 ! error = ENAMETOOLONG; |
6132 | Lcsdone: ! done: | | 6133 | Lcsdone: ! done: |
6133 | sub %o1, %o5, %o1 ! len = to - save; | | 6134 | sub %o1, %o5, %o1 ! len = to - save; |
6134 | brnz,a %o3, 1f ! if (lencopied) | | 6135 | brnz,a %o3, 1f ! if (lencopied) |
6135 | STPTR %o1, [%o3] ! *lencopied = len; | | 6136 | STPTR %o1, [%o3] ! *lencopied = len; |
6136 | 1: | | 6137 | 1: |
6137 | retl ! cpcb->pcb_onfault = 0; | | 6138 | retl ! cpcb->pcb_onfault = 0; |
6138 | STPTR %g0, [%o4 + PCB_ONFAULT]! return (error); | | 6139 | STPTR %g0, [%o4 + PCB_ONFAULT]! return (error); |
6139 | | | 6140 | |
6140 | Lcsfault: | | 6141 | Lcsfault: |
6141 | #ifdef NOTDEF_DEBUG | | 6142 | #ifdef NOTDEF_DEBUG |
6142 | save %sp, -CC64FSZ, %sp | | 6143 | save %sp, -CC64FSZ, %sp |
6143 | set 5f, %o0 | | 6144 | set 5f, %o0 |
6144 | call printf | | 6145 | call printf |
6145 | nop | | 6146 | nop |
6146 | restore | | 6147 | restore |
6147 | .data | | 6148 | .data |
6148 | 5: .asciz "Lcsfault: recovering\n" | | 6149 | 5: .asciz "Lcsfault: recovering\n" |
6149 | _ALIGN | | 6150 | _ALIGN |
6150 | .text | | 6151 | .text |
6151 | #endif | | 6152 | #endif |
6152 | b Lcsdone ! error = EFAULT; | | 6153 | b Lcsdone ! error = EFAULT; |
6153 | mov EFAULT, %o0 ! goto ret; | | 6154 | mov EFAULT, %o0 ! goto ret; |
6154 | | | 6155 | |
6155 | /* | | 6156 | /* |
6156 | * copystr(fromaddr, toaddr, maxlength, &lencopied) | | 6157 | * copystr(fromaddr, toaddr, maxlength, &lencopied) |
6157 | * | | 6158 | * |
6158 | * Copy a null terminated string from one point to another in | | 6159 | * Copy a null terminated string from one point to another in |
6159 | * the kernel address space. (This is a leaf procedure, but | | 6160 | * the kernel address space. (This is a leaf procedure, but |
6160 | * it does not seem that way to the C compiler.) | | 6161 | * it does not seem that way to the C compiler.) |
6161 | */ | | 6162 | */ |
6162 | ENTRY(copystr) | | 6163 | ENTRY(copystr) |
6163 | brgz,pt %o2, 0f ! Make sure len is valid | | 6164 | brgz,pt %o2, 0f ! Make sure len is valid |
6164 | mov %o1, %o5 ! to0 = to; | | 6165 | mov %o1, %o5 ! to0 = to; |
6165 | retl | | 6166 | retl |
6166 | mov ENAMETOOLONG, %o0 | | 6167 | mov ENAMETOOLONG, %o0 |
6167 | 0: ! loop: | | 6168 | 0: ! loop: |
6168 | ldsb [%o0], %o4 ! c = *from; | | 6169 | ldsb [%o0], %o4 ! c = *from; |
6169 | tst %o4 | | 6170 | tst %o4 |
6170 | stb %o4, [%o1] ! *to++ = c; | | 6171 | stb %o4, [%o1] ! *to++ = c; |
6171 | be 1f ! if (c == 0) | | 6172 | be 1f ! if (c == 0) |
6172 | inc %o1 ! goto ok; | | 6173 | inc %o1 ! goto ok; |
6173 | deccc %o2 ! if (--len > 0) { | | 6174 | deccc %o2 ! if (--len > 0) { |
6174 | bg,a 0b ! from++; | | 6175 | bg,a 0b ! from++; |
6175 | inc %o0 ! goto loop; | | 6176 | inc %o0 ! goto loop; |
6176 | b 2f ! } | | 6177 | b 2f ! } |
6177 | mov ENAMETOOLONG, %o0 ! ret = ENAMETOOLONG; goto done; | | 6178 | mov ENAMETOOLONG, %o0 ! ret = ENAMETOOLONG; goto done; |
6178 | 1: ! ok: | | 6179 | 1: ! ok: |
6179 | clr %o0 ! ret = 0; | | 6180 | clr %o0 ! ret = 0; |
6180 | 2: | | 6181 | 2: |
6181 | sub %o1, %o5, %o1 ! len = to - to0; | | 6182 | sub %o1, %o5, %o1 ! len = to - to0; |
6182 | tst %o3 ! if (lencopied) | | 6183 | tst %o3 ! if (lencopied) |
6183 | bnz,a 3f | | 6184 | bnz,a 3f |
6184 | STPTR %o1, [%o3] ! *lencopied = len; | | 6185 | STPTR %o1, [%o3] ! *lencopied = len; |
6185 | 3: | | 6186 | 3: |
6186 | retl | | 6187 | retl |
6187 | nop | | 6188 | nop |
6188 | #ifdef DIAGNOSTIC | | 6189 | #ifdef DIAGNOSTIC |
6189 | 4: | | 6190 | 4: |
6190 | sethi %hi(5f), %o0 | | 6191 | sethi %hi(5f), %o0 |
6191 | call _C_LABEL(panic) | | 6192 | call _C_LABEL(panic) |
6192 | or %lo(5f), %o0, %o0 | | 6193 | or %lo(5f), %o0, %o0 |
6193 | .data | | 6194 | .data |
6194 | 5: | | 6195 | 5: |
6195 | .asciz "copystr" | | 6196 | .asciz "copystr" |
6196 | _ALIGN | | 6197 | _ALIGN |
6197 | .text | | 6198 | .text |
6198 | #endif | | 6199 | #endif |
6199 | | | 6200 | |
6200 | /* | | 6201 | /* |
6201 | * copyin(src, dst, len) | | 6202 | * copyin(src, dst, len) |
6202 | * | | 6203 | * |
6203 | * Copy specified amount of data from user space into the kernel. | | 6204 | * Copy specified amount of data from user space into the kernel. |
6204 | * | | 6205 | * |
6205 | * This is a modified version of memcpy that uses ASI_AIUS. When | | 6206 | * This is a modified version of memcpy that uses ASI_AIUS. When |
6206 | * memcpy is optimized to use block copy ASIs, this should be also. | | 6207 | * memcpy is optimized to use block copy ASIs, this should be also. |
6207 | */ | | 6208 | */ |
6208 | | | 6209 | |
6209 | #define BCOPY_SMALL 32 /* if < 32, copy by bytes */ | | 6210 | #define BCOPY_SMALL 32 /* if < 32, copy by bytes */ |
6210 | | | 6211 | |
6211 | ENTRY(copyin) | | 6212 | ENTRY(copyin) |
6212 | ! flushw ! Make sure we don't have stack probs & lose hibits of %o | | 6213 | ! flushw ! Make sure we don't have stack probs & lose hibits of %o |
6213 | #ifdef NOTDEF_DEBUG | | 6214 | #ifdef NOTDEF_DEBUG |
6214 | save %sp, -CC64FSZ, %sp | | 6215 | save %sp, -CC64FSZ, %sp |
6215 | set 1f, %o0 | | 6216 | set 1f, %o0 |
6216 | mov %i0, %o1 | | 6217 | mov %i0, %o1 |
6217 | mov %i1, %o2 | | 6218 | mov %i1, %o2 |
6218 | call printf | | 6219 | call printf |
6219 | mov %i2, %o3 | | 6220 | mov %i2, %o3 |
6220 | restore | | 6221 | restore |
6221 | .data | | 6222 | .data |
6222 | 1: .asciz "copyin: src=%x dest=%x len=%x\n" | | 6223 | 1: .asciz "copyin: src=%x dest=%x len=%x\n" |
6223 | _ALIGN | | 6224 | _ALIGN |
6224 | .text | | 6225 | .text |
6225 | #endif | | 6226 | #endif |
6226 | sethi %hi(CPCB), %o3 | | 6227 | sethi %hi(CPCB), %o3 |
6227 | wr %g0, ASI_AIUS, %asi | | 6228 | wr %g0, ASI_AIUS, %asi |
6228 | LDPTR [%o3 + %lo(CPCB)], %o3 | | 6229 | LDPTR [%o3 + %lo(CPCB)], %o3 |
6229 | set Lcopyfault, %o4 | | 6230 | set Lcopyfault, %o4 |
6230 | ! mov %o7, %g7 ! save return address | | 6231 | ! mov %o7, %g7 ! save return address |
6231 | membar #Sync | | 6232 | membar #Sync |
6232 | STPTR %o4, [%o3 + PCB_ONFAULT] | | 6233 | STPTR %o4, [%o3 + PCB_ONFAULT] |
6233 | cmp %o2, BCOPY_SMALL | | 6234 | cmp %o2, BCOPY_SMALL |
6234 | Lcopyin_start: | | 6235 | Lcopyin_start: |
6235 | bge,a Lcopyin_fancy ! if >= this many, go be fancy. | | 6236 | bge,a Lcopyin_fancy ! if >= this many, go be fancy. |
6236 | btst 7, %o0 ! (part of being fancy) | | 6237 | btst 7, %o0 ! (part of being fancy) |
6237 | | | 6238 | |
6238 | /* | | 6239 | /* |
6239 | * Not much to copy, just do it a byte at a time. | | 6240 | * Not much to copy, just do it a byte at a time. |
6240 | */ | | 6241 | */ |
6241 | deccc %o2 ! while (--len >= 0) | | 6242 | deccc %o2 ! while (--len >= 0) |
6242 | bl 1f | | 6243 | bl 1f |
6243 | 0: | | 6244 | 0: |
6244 | inc %o0 | | 6245 | inc %o0 |
6245 | ldsba [%o0 - 1] %asi, %o4! *dst++ = (++src)[-1]; | | 6246 | ldsba [%o0 - 1] %asi, %o4! *dst++ = (++src)[-1]; |
6246 | stb %o4, [%o1] | | 6247 | stb %o4, [%o1] |
6247 | deccc %o2 | | 6248 | deccc %o2 |
6248 | bge 0b | | 6249 | bge 0b |
6249 | inc %o1 | | 6250 | inc %o1 |
6250 | 1: | | 6251 | 1: |
6251 | ba Lcopyin_done | | 6252 | ba Lcopyin_done |
6252 | clr %o0 | | 6253 | clr %o0 |
6253 | NOTREACHED | | 6254 | NOTREACHED |
6254 | | | 6255 | |
6255 | /* | | 6256 | /* |
6256 | * Plenty of data to copy, so try to do it optimally. | | 6257 | * Plenty of data to copy, so try to do it optimally. |
6257 | */ | | 6258 | */ |
6258 | Lcopyin_fancy: | | 6259 | Lcopyin_fancy: |
6259 | ! check for common case first: everything lines up. | | 6260 | ! check for common case first: everything lines up. |
6260 | ! btst 7, %o0 ! done already | | 6261 | ! btst 7, %o0 ! done already |
6261 | bne 1f | | 6262 | bne 1f |
6262 | EMPTY | | 6263 | EMPTY |
6263 | btst 7, %o1 | | 6264 | btst 7, %o1 |
6264 | be,a Lcopyin_doubles | | 6265 | be,a Lcopyin_doubles |
6265 | dec 8, %o2 ! if all lined up, len -= 8, goto copyin_doubes | | 6266 | dec 8, %o2 ! if all lined up, len -= 8, goto copyin_doubes |
6266 | | | 6267 | |
6267 | ! If the low bits match, we can make these line up. | | 6268 | ! If the low bits match, we can make these line up. |
6268 | 1: | | 6269 | 1: |
6269 | xor %o0, %o1, %o3 ! t = src ^ dst; | | 6270 | xor %o0, %o1, %o3 ! t = src ^ dst; |
6270 | btst 1, %o3 ! if (t & 1) { | | 6271 | btst 1, %o3 ! if (t & 1) { |
6271 | be,a 1f | | 6272 | be,a 1f |
6272 | btst 1, %o0 ! [delay slot: if (src & 1)] | | 6273 | btst 1, %o0 ! [delay slot: if (src & 1)] |
6273 | | | 6274 | |
6274 | ! low bits do not match, must copy by bytes. | | 6275 | ! low bits do not match, must copy by bytes. |
6275 | 0: | | 6276 | 0: |
6276 | ldsba [%o0] %asi, %o4 ! do { | | 6277 | ldsba [%o0] %asi, %o4 ! do { |
6277 | inc %o0 ! (++dst)[-1] = *src++; | | 6278 | inc %o0 ! (++dst)[-1] = *src++; |
6278 | inc %o1 | | 6279 | inc %o1 |
6279 | deccc %o2 | | 6280 | deccc %o2 |
6280 | bnz 0b ! } while (--len != 0); | | 6281 | bnz 0b ! } while (--len != 0); |
6281 | stb %o4, [%o1 - 1] | | 6282 | stb %o4, [%o1 - 1] |
6282 | ba Lcopyin_done | | 6283 | ba Lcopyin_done |
6283 | clr %o0 | | 6284 | clr %o0 |
6284 | NOTREACHED | | 6285 | NOTREACHED |
6285 | | | 6286 | |
6286 | ! lowest bit matches, so we can copy by words, if nothing else | | 6287 | ! lowest bit matches, so we can copy by words, if nothing else |
6287 | 1: | | 6288 | 1: |
6288 | be,a 1f ! if (src & 1) { | | 6289 | be,a 1f ! if (src & 1) { |
6289 | btst 2, %o3 ! [delay slot: if (t & 2)] | | 6290 | btst 2, %o3 ! [delay slot: if (t & 2)] |
6290 | | | 6291 | |
6291 | ! although low bits match, both are 1: must copy 1 byte to align | | 6292 | ! although low bits match, both are 1: must copy 1 byte to align |
6292 | ldsba [%o0] %asi, %o4 ! *dst++ = *src++; | | 6293 | ldsba [%o0] %asi, %o4 ! *dst++ = *src++; |
6293 | stb %o4, [%o1] | | 6294 | stb %o4, [%o1] |
6294 | inc %o0 | | 6295 | inc %o0 |
6295 | inc %o1 | | 6296 | inc %o1 |
6296 | dec %o2 ! len--; | | 6297 | dec %o2 ! len--; |
6297 | btst 2, %o3 ! } [if (t & 2)] | | 6298 | btst 2, %o3 ! } [if (t & 2)] |
6298 | 1: | | 6299 | 1: |
6299 | be,a 1f ! if (t & 2) { | | 6300 | be,a 1f ! if (t & 2) { |
6300 | btst 2, %o0 ! [delay slot: if (src & 2)] | | 6301 | btst 2, %o0 ! [delay slot: if (src & 2)] |
6301 | dec 2, %o2 ! len -= 2; | | 6302 | dec 2, %o2 ! len -= 2; |
6302 | 0: | | 6303 | 0: |
6303 | ldsha [%o0] %asi, %o4 ! do { | | 6304 | ldsha [%o0] %asi, %o4 ! do { |
6304 | sth %o4, [%o1] ! *(short *)dst = *(short *)src; | | 6305 | sth %o4, [%o1] ! *(short *)dst = *(short *)src; |
6305 | inc 2, %o0 ! dst += 2, src += 2; | | 6306 | inc 2, %o0 ! dst += 2, src += 2; |
6306 | deccc 2, %o2 ! } while ((len -= 2) >= 0); | | 6307 | deccc 2, %o2 ! } while ((len -= 2) >= 0); |
6307 | bge 0b | | 6308 | bge 0b |
6308 | inc 2, %o1 | | 6309 | inc 2, %o1 |
6309 | b Lcopyin_mopb ! goto mop_up_byte; | | 6310 | b Lcopyin_mopb ! goto mop_up_byte; |
6310 | btst 1, %o2 ! } [delay slot: if (len & 1)] | | 6311 | btst 1, %o2 ! } [delay slot: if (len & 1)] |
6311 | NOTREACHED | | 6312 | NOTREACHED |
6312 | | | 6313 | |
6313 | ! low two bits match, so we can copy by longwords | | 6314 | ! low two bits match, so we can copy by longwords |
6314 | 1: | | 6315 | 1: |
6315 | be,a 1f ! if (src & 2) { | | 6316 | be,a 1f ! if (src & 2) { |
6316 | btst 4, %o3 ! [delay slot: if (t & 4)] | | 6317 | btst 4, %o3 ! [delay slot: if (t & 4)] |
6317 | | | 6318 | |
6318 | ! although low 2 bits match, they are 10: must copy one short to align | | 6319 | ! although low 2 bits match, they are 10: must copy one short to align |
6319 | ldsha [%o0] %asi, %o4 ! (*short *)dst = *(short *)src; | | 6320 | ldsha [%o0] %asi, %o4 ! (*short *)dst = *(short *)src; |
6320 | sth %o4, [%o1] | | 6321 | sth %o4, [%o1] |
6321 | inc 2, %o0 ! dst += 2; | | 6322 | inc 2, %o0 ! dst += 2; |
6322 | inc 2, %o1 ! src += 2; | | 6323 | inc 2, %o1 ! src += 2; |
6323 | dec 2, %o2 ! len -= 2; | | 6324 | dec 2, %o2 ! len -= 2; |
6324 | btst 4, %o3 ! } [if (t & 4)] | | 6325 | btst 4, %o3 ! } [if (t & 4)] |
6325 | 1: | | 6326 | 1: |
6326 | be,a 1f ! if (t & 4) { | | 6327 | be,a 1f ! if (t & 4) { |
6327 | btst 4, %o0 ! [delay slot: if (src & 4)] | | 6328 | btst 4, %o0 ! [delay slot: if (src & 4)] |
6328 | dec 4, %o2 ! len -= 4; | | 6329 | dec 4, %o2 ! len -= 4; |
6329 | 0: | | 6330 | 0: |
6330 | lduwa [%o0] %asi, %o4 ! do { | | 6331 | lduwa [%o0] %asi, %o4 ! do { |
6331 | st %o4, [%o1] ! *(int *)dst = *(int *)src; | | 6332 | st %o4, [%o1] ! *(int *)dst = *(int *)src; |
6332 | inc 4, %o0 ! dst += 4, src += 4; | | 6333 | inc 4, %o0 ! dst += 4, src += 4; |
6333 | deccc 4, %o2 ! } while ((len -= 4) >= 0); | | 6334 | deccc 4, %o2 ! } while ((len -= 4) >= 0); |
6334 | bge 0b | | 6335 | bge 0b |
6335 | inc 4, %o1 | | 6336 | inc 4, %o1 |
6336 | b Lcopyin_mopw ! goto mop_up_word_and_byte; | | 6337 | b Lcopyin_mopw ! goto mop_up_word_and_byte; |
6337 | btst 2, %o2 ! } [delay slot: if (len & 2)] | | 6338 | btst 2, %o2 ! } [delay slot: if (len & 2)] |
6338 | NOTREACHED | | 6339 | NOTREACHED |
6339 | | | 6340 | |
6340 | ! low three bits match, so we can copy by doublewords | | 6341 | ! low three bits match, so we can copy by doublewords |
6341 | 1: | | 6342 | 1: |
6342 | be 1f ! if (src & 4) { | | 6343 | be 1f ! if (src & 4) { |
6343 | dec 8, %o2 ! [delay slot: len -= 8] | | 6344 | dec 8, %o2 ! [delay slot: len -= 8] |
6344 | lduwa [%o0] %asi, %o4 ! *(int *)dst = *(int *)src; | | 6345 | lduwa [%o0] %asi, %o4 ! *(int *)dst = *(int *)src; |
6345 | st %o4, [%o1] | | 6346 | st %o4, [%o1] |
6346 | inc 4, %o0 ! dst += 4, src += 4, len -= 4; | | 6347 | inc 4, %o0 ! dst += 4, src += 4, len -= 4; |
6347 | inc 4, %o1 | | 6348 | inc 4, %o1 |
6348 | dec 4, %o2 ! } | | 6349 | dec 4, %o2 ! } |
6349 | 1: | | 6350 | 1: |
6350 | Lcopyin_doubles: | | 6351 | Lcopyin_doubles: |
6351 | ldxa [%o0] %asi, %g1 ! do { | | 6352 | ldxa [%o0] %asi, %g1 ! do { |
6352 | stx %g1, [%o1] ! *(double *)dst = *(double *)src; | | 6353 | stx %g1, [%o1] ! *(double *)dst = *(double *)src; |
6353 | inc 8, %o0 ! dst += 8, src += 8; | | 6354 | inc 8, %o0 ! dst += 8, src += 8; |
6354 | deccc 8, %o2 ! } while ((len -= 8) >= 0); | | 6355 | deccc 8, %o2 ! } while ((len -= 8) >= 0); |
6355 | bge Lcopyin_doubles | | 6356 | bge Lcopyin_doubles |
6356 | inc 8, %o1 | | 6357 | inc 8, %o1 |
6357 | | | 6358 | |
6358 | ! check for a usual case again (save work) | | 6359 | ! check for a usual case again (save work) |
6359 | btst 7, %o2 ! if ((len & 7) == 0) | | 6360 | btst 7, %o2 ! if ((len & 7) == 0) |
6360 | be Lcopyin_done ! goto copyin_done; | | 6361 | be Lcopyin_done ! goto copyin_done; |
6361 | | | 6362 | |
6362 | btst 4, %o2 ! if ((len & 4)) == 0) | | 6363 | btst 4, %o2 ! if ((len & 4)) == 0) |
6363 | be,a Lcopyin_mopw ! goto mop_up_word_and_byte; | | 6364 | be,a Lcopyin_mopw ! goto mop_up_word_and_byte; |
6364 | btst 2, %o2 ! [delay slot: if (len & 2)] | | 6365 | btst 2, %o2 ! [delay slot: if (len & 2)] |
6365 | lduwa [%o0] %asi, %o4 ! *(int *)dst = *(int *)src; | | 6366 | lduwa [%o0] %asi, %o4 ! *(int *)dst = *(int *)src; |
6366 | st %o4, [%o1] | | 6367 | st %o4, [%o1] |
6367 | inc 4, %o0 ! dst += 4; | | 6368 | inc 4, %o0 ! dst += 4; |
6368 | inc 4, %o1 ! src += 4; | | 6369 | inc 4, %o1 ! src += 4; |
6369 | btst 2, %o2 ! } [if (len & 2)] | | 6370 | btst 2, %o2 ! } [if (len & 2)] |
6370 | | | 6371 | |
6371 | 1: | | 6372 | 1: |
6372 | ! mop up trailing word (if present) and byte (if present). | | 6373 | ! mop up trailing word (if present) and byte (if present). |
6373 | Lcopyin_mopw: | | 6374 | Lcopyin_mopw: |
6374 | be Lcopyin_mopb ! no word, go mop up byte | | 6375 | be Lcopyin_mopb ! no word, go mop up byte |
6375 | btst 1, %o2 ! [delay slot: if (len & 1)] | | 6376 | btst 1, %o2 ! [delay slot: if (len & 1)] |
6376 | ldsha [%o0] %asi, %o4 ! *(short *)dst = *(short *)src; | | 6377 | ldsha [%o0] %asi, %o4 ! *(short *)dst = *(short *)src; |
6377 | be Lcopyin_done ! if ((len & 1) == 0) goto done; | | 6378 | be Lcopyin_done ! if ((len & 1) == 0) goto done; |
6378 | sth %o4, [%o1] | | 6379 | sth %o4, [%o1] |
6379 | ldsba [%o0 + 2] %asi, %o4 ! dst[2] = src[2]; | | 6380 | ldsba [%o0 + 2] %asi, %o4 ! dst[2] = src[2]; |
6380 | stb %o4, [%o1 + 2] | | 6381 | stb %o4, [%o1 + 2] |
6381 | ba Lcopyin_done | | 6382 | ba Lcopyin_done |
6382 | clr %o0 | | 6383 | clr %o0 |
6383 | NOTREACHED | | 6384 | NOTREACHED |
6384 | | | 6385 | |
6385 | ! mop up trailing byte (if present). | | 6386 | ! mop up trailing byte (if present). |
6386 | Lcopyin_mopb: | | 6387 | Lcopyin_mopb: |
6387 | be,a Lcopyin_done | | 6388 | be,a Lcopyin_done |
6388 | nop | | 6389 | nop |
6389 | ldsba [%o0] %asi, %o4 | | 6390 | ldsba [%o0] %asi, %o4 |
6390 | stb %o4, [%o1] | | 6391 | stb %o4, [%o1] |
6391 | | | 6392 | |
6392 | Lcopyin_done: | | 6393 | Lcopyin_done: |
6393 | sethi %hi(CPCB), %o3 | | 6394 | sethi %hi(CPCB), %o3 |
6394 | ! stb %o4,[%o1] ! Store last byte -- should not be needed | | 6395 | ! stb %o4,[%o1] ! Store last byte -- should not be needed |
6395 | LDPTR [%o3 + %lo(CPCB)], %o3 | | 6396 | LDPTR [%o3 + %lo(CPCB)], %o3 |
6396 | membar #Sync | | 6397 | membar #Sync |
6397 | STPTR %g0, [%o3 + PCB_ONFAULT] | | 6398 | STPTR %g0, [%o3 + PCB_ONFAULT] |
6398 | wr %g0, ASI_PRIMARY_NOFAULT, %asi ! Restore ASI | | 6399 | wr %g0, ASI_PRIMARY_NOFAULT, %asi ! Restore ASI |
6399 | retl | | 6400 | retl |
6400 | clr %o0 ! return 0 | | 6401 | clr %o0 ! return 0 |
6401 | | | 6402 | |
6402 | /* | | 6403 | /* |
6403 | * copyout(src, dst, len) | | 6404 | * copyout(src, dst, len) |
6404 | * | | 6405 | * |
6405 | * Copy specified amount of data from kernel to user space. | | 6406 | * Copy specified amount of data from kernel to user space. |
6406 | * Just like copyin, except that the `dst' addresses are user space | | 6407 | * Just like copyin, except that the `dst' addresses are user space |
6407 | * rather than the `src' addresses. | | 6408 | * rather than the `src' addresses. |
6408 | * | | 6409 | * |
6409 | * This is a modified version of memcpy that uses ASI_AIUS. When | | 6410 | * This is a modified version of memcpy that uses ASI_AIUS. When |
6410 | * memcpy is optimized to use block copy ASIs, this should be also. | | 6411 | * memcpy is optimized to use block copy ASIs, this should be also. |
6411 | */ | | 6412 | */ |
6412 | /* | | 6413 | /* |
6413 | * This needs to be reimplemented to really do the copy. | | 6414 | * This needs to be reimplemented to really do the copy. |
6414 | */ | | 6415 | */ |
6415 | ENTRY(copyout) | | 6416 | ENTRY(copyout) |
6416 | /* | | 6417 | /* |
6417 | * ******NOTE****** this depends on memcpy() not using %g7 | | 6418 | * ******NOTE****** this depends on memcpy() not using %g7 |
6418 | */ | | 6419 | */ |
6419 | #ifdef NOTDEF_DEBUG | | 6420 | #ifdef NOTDEF_DEBUG |
6420 | save %sp, -CC64FSZ, %sp | | 6421 | save %sp, -CC64FSZ, %sp |
6421 | set 1f, %o0 | | 6422 | set 1f, %o0 |
6422 | mov %i0, %o1 | | 6423 | mov %i0, %o1 |
6423 | set CTX_SECONDARY, %o4 | | 6424 | set CTX_SECONDARY, %o4 |
6424 | mov %i1, %o2 | | 6425 | mov %i1, %o2 |
6425 | ldxa [%o4] ASI_DMMU, %o4 | | 6426 | ldxa [%o4] ASI_DMMU, %o4 |
6426 | call printf | | 6427 | call printf |
6427 | mov %i2, %o3 | | 6428 | mov %i2, %o3 |
6428 | restore | | 6429 | restore |
6429 | .data | | 6430 | .data |
6430 | 1: .asciz "copyout: src=%x dest=%x len=%x ctx=%d\n" | | 6431 | 1: .asciz "copyout: src=%x dest=%x len=%x ctx=%d\n" |
6431 | _ALIGN | | 6432 | _ALIGN |
6432 | .text | | 6433 | .text |
6433 | #endif | | 6434 | #endif |
6434 | Ldocopy: | | 6435 | Ldocopy: |
6435 | sethi %hi(CPCB), %o3 | | 6436 | sethi %hi(CPCB), %o3 |
6436 | wr %g0, ASI_AIUS, %asi | | 6437 | wr %g0, ASI_AIUS, %asi |
6437 | LDPTR [%o3 + %lo(CPCB)], %o3 | | 6438 | LDPTR [%o3 + %lo(CPCB)], %o3 |
6438 | set Lcopyfault, %o4 | | 6439 | set Lcopyfault, %o4 |
6439 | ! mov %o7, %g7 ! save return address | | 6440 | ! mov %o7, %g7 ! save return address |
6440 | membar #Sync | | 6441 | membar #Sync |
6441 | STPTR %o4, [%o3 + PCB_ONFAULT] | | 6442 | STPTR %o4, [%o3 + PCB_ONFAULT] |
6442 | cmp %o2, BCOPY_SMALL | | 6443 | cmp %o2, BCOPY_SMALL |
6443 | Lcopyout_start: | | 6444 | Lcopyout_start: |
6444 | membar #StoreStore | | 6445 | membar #StoreStore |
6445 | bge,a Lcopyout_fancy ! if >= this many, go be fancy. | | 6446 | bge,a Lcopyout_fancy ! if >= this many, go be fancy. |
6446 | btst 7, %o0 ! (part of being fancy) | | 6447 | btst 7, %o0 ! (part of being fancy) |
6447 | | | 6448 | |
6448 | /* | | 6449 | /* |
6449 | * Not much to copy, just do it a byte at a time. | | 6450 | * Not much to copy, just do it a byte at a time. |
6450 | */ | | 6451 | */ |
6451 | deccc %o2 ! while (--len >= 0) | | 6452 | deccc %o2 ! while (--len >= 0) |
6452 | bl 1f | | 6453 | bl 1f |
6453 | EMPTY | | 6454 | EMPTY |
6454 | 0: | | 6455 | 0: |
6455 | inc %o0 | | 6456 | inc %o0 |
6456 | ldsb [%o0 - 1], %o4! (++dst)[-1] = *src++; | | 6457 | ldsb [%o0 - 1], %o4! (++dst)[-1] = *src++; |
6457 | stba %o4, [%o1] %asi | | 6458 | stba %o4, [%o1] %asi |
6458 | deccc %o2 | | 6459 | deccc %o2 |
6459 | bge 0b | | 6460 | bge 0b |
6460 | inc %o1 | | 6461 | inc %o1 |
6461 | 1: | | 6462 | 1: |
6462 | ba Lcopyout_done | | 6463 | ba Lcopyout_done |
6463 | clr %o0 | | 6464 | clr %o0 |
6464 | NOTREACHED | | 6465 | NOTREACHED |
6465 | | | 6466 | |
6466 | /* | | 6467 | /* |
6467 | * Plenty of data to copy, so try to do it optimally. | | 6468 | * Plenty of data to copy, so try to do it optimally. |
6468 | */ | | 6469 | */ |
6469 | Lcopyout_fancy: | | 6470 | Lcopyout_fancy: |
6470 | ! check for common case first: everything lines up. | | 6471 | ! check for common case first: everything lines up. |
6471 | ! btst 7, %o0 ! done already | | 6472 | ! btst 7, %o0 ! done already |
6472 | bne 1f | | 6473 | bne 1f |
6473 | EMPTY | | 6474 | EMPTY |
6474 | btst 7, %o1 | | 6475 | btst 7, %o1 |
6475 | be,a Lcopyout_doubles | | 6476 | be,a Lcopyout_doubles |
6476 | dec 8, %o2 ! if all lined up, len -= 8, goto copyout_doubes | | 6477 | dec 8, %o2 ! if all lined up, len -= 8, goto copyout_doubes |
6477 | | | 6478 | |
6478 | ! If the low bits match, we can make these line up. | | 6479 | ! If the low bits match, we can make these line up. |
6479 | 1: | | 6480 | 1: |
6480 | xor %o0, %o1, %o3 ! t = src ^ dst; | | 6481 | xor %o0, %o1, %o3 ! t = src ^ dst; |
6481 | btst 1, %o3 ! if (t & 1) { | | 6482 | btst 1, %o3 ! if (t & 1) { |
6482 | be,a 1f | | 6483 | be,a 1f |
6483 | btst 1, %o0 ! [delay slot: if (src & 1)] | | 6484 | btst 1, %o0 ! [delay slot: if (src & 1)] |
6484 | | | 6485 | |
6485 | ! low bits do not match, must copy by bytes. | | 6486 | ! low bits do not match, must copy by bytes. |
6486 | 0: | | 6487 | 0: |
6487 | ldsb [%o0], %o4 ! do { | | 6488 | ldsb [%o0], %o4 ! do { |
6488 | inc %o0 ! (++dst)[-1] = *src++; | | 6489 | inc %o0 ! (++dst)[-1] = *src++; |
6489 | inc %o1 | | 6490 | inc %o1 |
6490 | deccc %o2 | | 6491 | deccc %o2 |
6491 | bnz 0b ! } while (--len != 0); | | 6492 | bnz 0b ! } while (--len != 0); |
6492 | stba %o4, [%o1 - 1] %asi | | 6493 | stba %o4, [%o1 - 1] %asi |
6493 | ba Lcopyout_done | | 6494 | ba Lcopyout_done |
6494 | clr %o0 | | 6495 | clr %o0 |
6495 | NOTREACHED | | 6496 | NOTREACHED |
6496 | | | 6497 | |
6497 | ! lowest bit matches, so we can copy by words, if nothing else | | 6498 | ! lowest bit matches, so we can copy by words, if nothing else |
6498 | 1: | | 6499 | 1: |
6499 | be,a 1f ! if (src & 1) { | | 6500 | be,a 1f ! if (src & 1) { |
6500 | btst 2, %o3 ! [delay slot: if (t & 2)] | | 6501 | btst 2, %o3 ! [delay slot: if (t & 2)] |
6501 | | | 6502 | |
6502 | ! although low bits match, both are 1: must copy 1 byte to align | | 6503 | ! although low bits match, both are 1: must copy 1 byte to align |
6503 | ldsb [%o0], %o4 ! *dst++ = *src++; | | 6504 | ldsb [%o0], %o4 ! *dst++ = *src++; |
6504 | stba %o4, [%o1] %asi | | 6505 | stba %o4, [%o1] %asi |
6505 | inc %o0 | | 6506 | inc %o0 |
6506 | inc %o1 | | 6507 | inc %o1 |
6507 | dec %o2 ! len--; | | 6508 | dec %o2 ! len--; |
6508 | btst 2, %o3 ! } [if (t & 2)] | | 6509 | btst 2, %o3 ! } [if (t & 2)] |
6509 | 1: | | 6510 | 1: |
6510 | be,a 1f ! if (t & 2) { | | 6511 | be,a 1f ! if (t & 2) { |
6511 | btst 2, %o0 ! [delay slot: if (src & 2)] | | 6512 | btst 2, %o0 ! [delay slot: if (src & 2)] |
6512 | dec 2, %o2 ! len -= 2; | | 6513 | dec 2, %o2 ! len -= 2; |
6513 | 0: | | 6514 | 0: |
6514 | ldsh [%o0], %o4 ! do { | | 6515 | ldsh [%o0], %o4 ! do { |
6515 | stha %o4, [%o1] %asi ! *(short *)dst = *(short *)src; | | 6516 | stha %o4, [%o1] %asi ! *(short *)dst = *(short *)src; |
6516 | inc 2, %o0 ! dst += 2, src += 2; | | 6517 | inc 2, %o0 ! dst += 2, src += 2; |
6517 | deccc 2, %o2 ! } while ((len -= 2) >= 0); | | 6518 | deccc 2, %o2 ! } while ((len -= 2) >= 0); |
6518 | bge 0b | | 6519 | bge 0b |
6519 | inc 2, %o1 | | 6520 | inc 2, %o1 |
6520 | b Lcopyout_mopb ! goto mop_up_byte; | | 6521 | b Lcopyout_mopb ! goto mop_up_byte; |
6521 | btst 1, %o2 ! } [delay slot: if (len & 1)] | | 6522 | btst 1, %o2 ! } [delay slot: if (len & 1)] |
6522 | NOTREACHED | | 6523 | NOTREACHED |
6523 | | | 6524 | |
6524 | ! low two bits match, so we can copy by longwords | | 6525 | ! low two bits match, so we can copy by longwords |
6525 | 1: | | 6526 | 1: |
6526 | be,a 1f ! if (src & 2) { | | 6527 | be,a 1f ! if (src & 2) { |
6527 | btst 4, %o3 ! [delay slot: if (t & 4)] | | 6528 | btst 4, %o3 ! [delay slot: if (t & 4)] |
6528 | | | 6529 | |
6529 | ! although low 2 bits match, they are 10: must copy one short to align | | 6530 | ! although low 2 bits match, they are 10: must copy one short to align |
6530 | ldsh [%o0], %o4 ! (*short *)dst = *(short *)src; | | 6531 | ldsh [%o0], %o4 ! (*short *)dst = *(short *)src; |
6531 | stha %o4, [%o1] %asi | | 6532 | stha %o4, [%o1] %asi |
6532 | inc 2, %o0 ! dst += 2; | | 6533 | inc 2, %o0 ! dst += 2; |
6533 | inc 2, %o1 ! src += 2; | | 6534 | inc 2, %o1 ! src += 2; |
6534 | dec 2, %o2 ! len -= 2; | | 6535 | dec 2, %o2 ! len -= 2; |
6535 | btst 4, %o3 ! } [if (t & 4)] | | 6536 | btst 4, %o3 ! } [if (t & 4)] |
6536 | 1: | | 6537 | 1: |
6537 | be,a 1f ! if (t & 4) { | | 6538 | be,a 1f ! if (t & 4) { |
6538 | btst 4, %o0 ! [delay slot: if (src & 4)] | | 6539 | btst 4, %o0 ! [delay slot: if (src & 4)] |
6539 | dec 4, %o2 ! len -= 4; | | 6540 | dec 4, %o2 ! len -= 4; |
6540 | 0: | | 6541 | 0: |
6541 | lduw [%o0], %o4 ! do { | | 6542 | lduw [%o0], %o4 ! do { |
6542 | sta %o4, [%o1] %asi ! *(int *)dst = *(int *)src; | | 6543 | sta %o4, [%o1] %asi ! *(int *)dst = *(int *)src; |
6543 | inc 4, %o0 ! dst += 4, src += 4; | | 6544 | inc 4, %o0 ! dst += 4, src += 4; |
6544 | deccc 4, %o2 ! } while ((len -= 4) >= 0); | | 6545 | deccc 4, %o2 ! } while ((len -= 4) >= 0); |
6545 | bge 0b | | 6546 | bge 0b |
6546 | inc 4, %o1 | | 6547 | inc 4, %o1 |
6547 | b Lcopyout_mopw ! goto mop_up_word_and_byte; | | 6548 | b Lcopyout_mopw ! goto mop_up_word_and_byte; |
6548 | btst 2, %o2 ! } [delay slot: if (len & 2)] | | 6549 | btst 2, %o2 ! } [delay slot: if (len & 2)] |
6549 | NOTREACHED | | 6550 | NOTREACHED |
6550 | | | 6551 | |
6551 | ! low three bits match, so we can copy by doublewords | | 6552 | ! low three bits match, so we can copy by doublewords |
6552 | 1: | | 6553 | 1: |
6553 | be 1f ! if (src & 4) { | | 6554 | be 1f ! if (src & 4) { |
6554 | dec 8, %o2 ! [delay slot: len -= 8] | | 6555 | dec 8, %o2 ! [delay slot: len -= 8] |
6555 | lduw [%o0], %o4 ! *(int *)dst = *(int *)src; | | 6556 | lduw [%o0], %o4 ! *(int *)dst = *(int *)src; |
6556 | sta %o4, [%o1] %asi | | 6557 | sta %o4, [%o1] %asi |
6557 | inc 4, %o0 ! dst += 4, src += 4, len -= 4; | | 6558 | inc 4, %o0 ! dst += 4, src += 4, len -= 4; |
6558 | inc 4, %o1 | | 6559 | inc 4, %o1 |
6559 | dec 4, %o2 ! } | | 6560 | dec 4, %o2 ! } |
6560 | 1: | | 6561 | 1: |
6561 | Lcopyout_doubles: | | 6562 | Lcopyout_doubles: |
6562 | ldx [%o0], %g1 ! do { | | 6563 | ldx [%o0], %g1 ! do { |
6563 | stxa %g1, [%o1] %asi ! *(double *)dst = *(double *)src; | | 6564 | stxa %g1, [%o1] %asi ! *(double *)dst = *(double *)src; |
6564 | inc 8, %o0 ! dst += 8, src += 8; | | 6565 | inc 8, %o0 ! dst += 8, src += 8; |
6565 | deccc 8, %o2 ! } while ((len -= 8) >= 0); | | 6566 | deccc 8, %o2 ! } while ((len -= 8) >= 0); |
6566 | bge Lcopyout_doubles | | 6567 | bge Lcopyout_doubles |
6567 | inc 8, %o1 | | 6568 | inc 8, %o1 |
6568 | | | 6569 | |
6569 | ! check for a usual case again (save work) | | 6570 | ! check for a usual case again (save work) |
6570 | btst 7, %o2 ! if ((len & 7) == 0) | | 6571 | btst 7, %o2 ! if ((len & 7) == 0) |
6571 | be Lcopyout_done ! goto copyout_done; | | 6572 | be Lcopyout_done ! goto copyout_done; |
6572 | | | 6573 | |
6573 | btst 4, %o2 ! if ((len & 4)) == 0) | | 6574 | btst 4, %o2 ! if ((len & 4)) == 0) |
6574 | be,a Lcopyout_mopw ! goto mop_up_word_and_byte; | | 6575 | be,a Lcopyout_mopw ! goto mop_up_word_and_byte; |
6575 | btst 2, %o2 ! [delay slot: if (len & 2)] | | 6576 | btst 2, %o2 ! [delay slot: if (len & 2)] |
6576 | lduw [%o0], %o4 ! *(int *)dst = *(int *)src; | | 6577 | lduw [%o0], %o4 ! *(int *)dst = *(int *)src; |
6577 | sta %o4, [%o1] %asi | | 6578 | sta %o4, [%o1] %asi |
6578 | inc 4, %o0 ! dst += 4; | | 6579 | inc 4, %o0 ! dst += 4; |
6579 | inc 4, %o1 ! src += 4; | | 6580 | inc 4, %o1 ! src += 4; |
6580 | btst 2, %o2 ! } [if (len & 2)] | | 6581 | btst 2, %o2 ! } [if (len & 2)] |
6581 | | | 6582 | |
6582 | 1: | | 6583 | 1: |
6583 | ! mop up trailing word (if present) and byte (if present). | | 6584 | ! mop up trailing word (if present) and byte (if present). |
6584 | Lcopyout_mopw: | | 6585 | Lcopyout_mopw: |
6585 | be Lcopyout_mopb ! no word, go mop up byte | | 6586 | be Lcopyout_mopb ! no word, go mop up byte |
6586 | btst 1, %o2 ! [delay slot: if (len & 1)] | | 6587 | btst 1, %o2 ! [delay slot: if (len & 1)] |
6587 | ldsh [%o0], %o4 ! *(short *)dst = *(short *)src; | | 6588 | ldsh [%o0], %o4 ! *(short *)dst = *(short *)src; |
6588 | be Lcopyout_done ! if ((len & 1) == 0) goto done; | | 6589 | be Lcopyout_done ! if ((len & 1) == 0) goto done; |
6589 | stha %o4, [%o1] %asi | | 6590 | stha %o4, [%o1] %asi |
6590 | ldsb [%o0 + 2], %o4 ! dst[2] = src[2]; | | 6591 | ldsb [%o0 + 2], %o4 ! dst[2] = src[2]; |
6591 | stba %o4, [%o1 + 2] %asi | | 6592 | stba %o4, [%o1 + 2] %asi |
6592 | ba Lcopyout_done | | 6593 | ba Lcopyout_done |
6593 | clr %o0 | | 6594 | clr %o0 |
6594 | NOTREACHED | | 6595 | NOTREACHED |
6595 | | | 6596 | |
6596 | ! mop up trailing byte (if present). | | 6597 | ! mop up trailing byte (if present). |
6597 | Lcopyout_mopb: | | 6598 | Lcopyout_mopb: |
6598 | be,a Lcopyout_done | | 6599 | be,a Lcopyout_done |
6599 | nop | | 6600 | nop |
6600 | ldsb [%o0], %o4 | | 6601 | ldsb [%o0], %o4 |
6601 | stba %o4, [%o1] %asi | | 6602 | stba %o4, [%o1] %asi |
6602 | | | 6603 | |
6603 | Lcopyout_done: | | 6604 | Lcopyout_done: |
6604 | sethi %hi(CPCB), %o3 | | 6605 | sethi %hi(CPCB), %o3 |
6605 | LDPTR [%o3 + %lo(CPCB)], %o3 | | 6606 | LDPTR [%o3 + %lo(CPCB)], %o3 |
6606 | membar #Sync | | 6607 | membar #Sync |
6607 | STPTR %g0, [%o3 + PCB_ONFAULT] | | 6608 | STPTR %g0, [%o3 + PCB_ONFAULT] |
6608 | ! jmp %g7 + 8 ! Original instr | | 6609 | ! jmp %g7 + 8 ! Original instr |
6609 | wr %g0, ASI_PRIMARY_NOFAULT, %asi ! Restore ASI | | 6610 | wr %g0, ASI_PRIMARY_NOFAULT, %asi ! Restore ASI |
6610 | membar #StoreStore|#StoreLoad | | 6611 | membar #StoreStore|#StoreLoad |
6611 | retl ! New instr | | 6612 | retl ! New instr |
6612 | clr %o0 ! return 0 | | 6613 | clr %o0 ! return 0 |
6613 | | | 6614 | |
6614 | ! Copyin or copyout fault. Clear cpcb->pcb_onfault and return EFAULT. | | 6615 | ! Copyin or copyout fault. Clear cpcb->pcb_onfault and return EFAULT. |
6615 | ! Note that although we were in memcpy, there is no state to clean up; | | 6616 | ! Note that although we were in memcpy, there is no state to clean up; |
6616 | ! the only special thing is that we have to return to [g7 + 8] rather than | | 6617 | ! the only special thing is that we have to return to [g7 + 8] rather than |
6617 | ! [o7 + 8]. | | 6618 | ! [o7 + 8]. |
6618 | Lcopyfault: | | 6619 | Lcopyfault: |
6619 | sethi %hi(CPCB), %o3 | | 6620 | sethi %hi(CPCB), %o3 |
6620 | LDPTR [%o3 + %lo(CPCB)], %o3 | | 6621 | LDPTR [%o3 + %lo(CPCB)], %o3 |
6621 | STPTR %g0, [%o3 + PCB_ONFAULT] | | 6622 | STPTR %g0, [%o3 + PCB_ONFAULT] |
6622 | membar #StoreStore|#StoreLoad | | 6623 | membar #StoreStore|#StoreLoad |
6623 | #ifdef NOTDEF_DEBUG | | 6624 | #ifdef NOTDEF_DEBUG |
6624 | save %sp, -CC64FSZ, %sp | | 6625 | save %sp, -CC64FSZ, %sp |
6625 | set 1f, %o0 | | 6626 | set 1f, %o0 |
6626 | call printf | | 6627 | call printf |
6627 | nop | | 6628 | nop |
6628 | restore | | 6629 | restore |
6629 | .data | | 6630 | .data |
6630 | 1: .asciz "copyfault: fault occurred\n" | | 6631 | 1: .asciz "copyfault: fault occurred\n" |
6631 | _ALIGN | | 6632 | _ALIGN |
6632 | .text | | 6633 | .text |
6633 | #endif | | 6634 | #endif |
6634 | wr %g0, ASI_PRIMARY_NOFAULT, %asi ! Restore ASI | | 6635 | wr %g0, ASI_PRIMARY_NOFAULT, %asi ! Restore ASI |
6635 | retl | | 6636 | retl |
6636 | mov EFAULT, %o0 | | 6637 | mov EFAULT, %o0 |
6637 | | | 6638 | |
6638 | ENTRY(cpu_idle) | | 6639 | ENTRY(cpu_idle) |
6639 | retl | | 6640 | retl |
6640 | nop | | 6641 | nop |
6641 | | | 6642 | |
6642 | | | 6643 | |
6643 | /* | | 6644 | /* |
6644 | * cpu_switchto() switches to an lwp to run and runs it, saving the | | 6645 | * cpu_switchto() switches to an lwp to run and runs it, saving the |
6645 | * current one away. | | 6646 | * current one away. |
6646 | * | | 6647 | * |
6647 | * stuct lwp * cpu_switchto(struct lwp *current, struct lwp *next) | | 6648 | * stuct lwp * cpu_switchto(struct lwp *current, struct lwp *next) |
6648 | * Switch to the specified next LWP | | 6649 | * Switch to the specified next LWP |
6649 | * Arguments: | | 6650 | * Arguments: |
6650 | * i0 'struct lwp *' of the current LWP | | 6651 | * i0 'struct lwp *' of the current LWP |
6651 | * i1 'struct lwp *' of the LWP to switch to | | 6652 | * i1 'struct lwp *' of the LWP to switch to |
6652 | * Returns: | | 6653 | * Returns: |
6653 | * the old lwp switched away from | | 6654 | * the old lwp switched away from |
6654 | */ | | 6655 | */ |
6655 | ENTRY(cpu_switchto) | | 6656 | ENTRY(cpu_switchto) |
6656 | save %sp, -CC64FSZ, %sp | | 6657 | save %sp, -CC64FSZ, %sp |
6657 | /* | | 6658 | /* |
6658 | * REGISTER USAGE AT THIS POINT: | | 6659 | * REGISTER USAGE AT THIS POINT: |
6659 | * %l1 = newpcb | | 6660 | * %l1 = newpcb |
6660 | * %l3 = new trapframe | | 6661 | * %l3 = new trapframe |
6661 | * %l4 = new l->l_proc | | 6662 | * %l4 = new l->l_proc |
6662 | * %l5 = pcb of oldlwp | | 6663 | * %l5 = pcb of oldlwp |
6663 | * %l6 = %hi(CPCB) | | 6664 | * %l6 = %hi(CPCB) |
6664 | * %l7 = %hi(CURLWP) | | 6665 | * %l7 = %hi(CURLWP) |
6665 | * %i0 = oldlwp | | 6666 | * %i0 = oldlwp |
6666 | * %i1 = lwp | | 6667 | * %i1 = lwp |
6667 | * %o0 = tmp 1 | | 6668 | * %o0 = tmp 1 |
6668 | * %o1 = tmp 2 | | 6669 | * %o1 = tmp 2 |
6669 | * %o2 = tmp 3 | | 6670 | * %o2 = tmp 3 |
6670 | * %o3 = tmp 4 | | 6671 | * %o3 = tmp 4 |
6671 | */ | | 6672 | */ |
6672 | | | 6673 | |
6673 | flushw ! save all register windows except this one | | 6674 | flushw ! save all register windows except this one |
6674 | wrpr %g0, PSTATE_KERN, %pstate ! make sure we're on normal globals | | 6675 | wrpr %g0, PSTATE_KERN, %pstate ! make sure we're on normal globals |
6675 | ! with traps turned off | | 6676 | ! with traps turned off |
6676 | | | 6677 | |
6677 | brz,pn %i0, 1f | | 6678 | brz,pn %i0, 1f |
6678 | sethi %hi(CPCB), %l6 | | 6679 | sethi %hi(CPCB), %l6 |
6679 | | | 6680 | |
6680 | rdpr %pstate, %o1 ! oldpstate = %pstate; | | 6681 | rdpr %pstate, %o1 ! oldpstate = %pstate; |
6681 | LDPTR [%i0 + L_ADDR], %l5 | | 6682 | LDPTR [%i0 + L_ADDR], %l5 |
6682 | | | 6683 | |
6683 | stx %i7, [%l5 + PCB_PC] | | 6684 | stx %i7, [%l5 + PCB_PC] |
6684 | stx %i6, [%l5 + PCB_SP] | | 6685 | stx %i6, [%l5 + PCB_SP] |
6685 | sth %o1, [%l5 + PCB_PSTATE] | | 6686 | sth %o1, [%l5 + PCB_PSTATE] |
6686 | | | 6687 | |
6687 | rdpr %cwp, %o2 ! Useless | | 6688 | rdpr %cwp, %o2 ! Useless |
6688 | stb %o2, [%l5 + PCB_CWP] | | 6689 | stb %o2, [%l5 + PCB_CWP] |
6689 | | | 6690 | |
6690 | 1: | | 6691 | 1: |
6691 | sethi %hi(CURLWP), %l7 | | 6692 | sethi %hi(CURLWP), %l7 |
6692 | | | 6693 | |
6693 | LDPTR [%i1 + L_ADDR], %l1 ! newpcb = l->l_addr; | | 6694 | LDPTR [%i1 + L_ADDR], %l1 ! newpcb = l->l_addr; |
6694 | | | 6695 | |
6695 | /* | | 6696 | /* |
6696 | * Load the new lwp. To load, we must change stacks and | | 6697 | * Load the new lwp. To load, we must change stacks and |
6697 | * alter cpcb and the window control registers, hence we must | | 6698 | * alter cpcb and the window control registers, hence we must |
6698 | * keep interrupts disabled. | | 6699 | * keep interrupts disabled. |
6699 | */ | | 6700 | */ |
6700 | | | 6701 | |
6701 | STPTR %i1, [%l7 + %lo(CURLWP)] ! curlwp = l; | | 6702 | STPTR %i1, [%l7 + %lo(CURLWP)] ! curlwp = l; |
6702 | STPTR %l1, [%l6 + %lo(CPCB)] ! cpcb = newpcb; | | 6703 | STPTR %l1, [%l6 + %lo(CPCB)] ! cpcb = newpcb; |
6703 | | | 6704 | |
6704 | ldx [%l1 + PCB_SP], %i6 | | 6705 | ldx [%l1 + PCB_SP], %i6 |
6705 | ldx [%l1 + PCB_PC], %i7 | | 6706 | ldx [%l1 + PCB_PC], %i7 |
6706 | | | 6707 | |
6707 | wrpr %g0, 0, %otherwin ! These two insns should be redundant | | 6708 | wrpr %g0, 0, %otherwin ! These two insns should be redundant |
6708 | wrpr %g0, 0, %canrestore | | 6709 | wrpr %g0, 0, %canrestore |
6709 | rdpr %ver, %o3 | | 6710 | rdpr %ver, %o3 |
6710 | and %o3, CWP, %o3 | | 6711 | and %o3, CWP, %o3 |
6711 | wrpr %g0, %o3, %cleanwin | | 6712 | wrpr %g0, %o3, %cleanwin |
6712 | dec 1, %o3 ! NWINDOWS-1-1 | | 6713 | dec 1, %o3 ! NWINDOWS-1-1 |
6713 | wrpr %o3, %cansave | | 6714 | wrpr %o3, %cansave |
6714 | | | 6715 | |
6715 | /* finally, enable traps */ | | 6716 | /* finally, enable traps */ |
6716 | wrpr %g0, PSTATE_INTR, %pstate | | 6717 | wrpr %g0, PSTATE_INTR, %pstate |
6717 | | | 6718 | |
6718 | !flushw | | 6719 | !flushw |
6719 | !membar #Sync | | 6720 | !membar #Sync |
6720 | | | 6721 | |
6721 | /* | | 6722 | /* |
6722 | * Check for restartable atomic sequences (RAS) | | 6723 | * Check for restartable atomic sequences (RAS) |
6723 | */ | | 6724 | */ |
6724 | LDPTR [%i1 + L_PROC], %l4 ! now %l4 points to p | | 6725 | LDPTR [%i1 + L_PROC], %l4 ! now %l4 points to p |
6725 | mov %l4, %o0 ! p is first arg to ras_lookup | | 6726 | mov %l4, %o0 ! p is first arg to ras_lookup |
6726 | LDPTR [%o0 + P_RASLIST], %o1 ! any RAS in p? | | 6727 | LDPTR [%o0 + P_RASLIST], %o1 ! any RAS in p? |
6727 | brz,pt %o1, Lsw_noras ! no, skip RAS check | | 6728 | brz,pt %o1, Lsw_noras ! no, skip RAS check |
6728 | LDPTR [%i1 + L_TF], %l3 ! pointer to trap frame | | 6729 | LDPTR [%i1 + L_TF], %l3 ! pointer to trap frame |
6729 | call _C_LABEL(ras_lookup) | | 6730 | call _C_LABEL(ras_lookup) |
6730 | LDPTR [%l3 + TF_PC], %o1 | | 6731 | LDPTR [%l3 + TF_PC], %o1 |
6731 | cmp %o0, -1 | | 6732 | cmp %o0, -1 |
6732 | be,pt %xcc, Lsw_noras | | 6733 | be,pt %xcc, Lsw_noras |
6733 | add %o0, 4, %o1 | | 6734 | add %o0, 4, %o1 |
6734 | STPTR %o0, [%l3 + TF_PC] ! store rewound %pc | | 6735 | STPTR %o0, [%l3 + TF_PC] ! store rewound %pc |
6735 | STPTR %o1, [%l3 + TF_NPC] ! and %npc | | 6736 | STPTR %o1, [%l3 + TF_NPC] ! and %npc |
6736 | | | 6737 | |
6737 | Lsw_noras: | | 6738 | Lsw_noras: |
6738 | | | 6739 | |
6739 | /* | | 6740 | /* |
6740 | * We are resuming the process that was running at the | | 6741 | * We are resuming the process that was running at the |
6741 | * call to switch(). Just set psr ipl and return. | | 6742 | * call to switch(). Just set psr ipl and return. |
6742 | */ | | 6743 | */ |
6743 | ! wrpr %g0, 0, %cleanwin ! DEBUG | | 6744 | ! wrpr %g0, 0, %cleanwin ! DEBUG |
6744 | clr %g4 ! This needs to point to the base of the data segment | | 6745 | clr %g4 ! This needs to point to the base of the data segment |
6745 | wr %g0, ASI_PRIMARY_NOFAULT, %asi ! Restore default ASI | | 6746 | wr %g0, ASI_PRIMARY_NOFAULT, %asi ! Restore default ASI |
6746 | !wrpr %g0, PSTATE_INTR, %pstate | | 6747 | !wrpr %g0, PSTATE_INTR, %pstate |
6747 | ret | | 6748 | ret |
6748 | restore %i0, %g0, %o0 ! return old curlwp | | 6749 | restore %i0, %g0, %o0 ! return old curlwp |
6749 | | | 6750 | |
6750 | /* | | 6751 | /* |
6751 | * Snapshot the current process so that stack frames are up to date. | | 6752 | * Snapshot the current process so that stack frames are up to date. |
6752 | * Only used just before a crash dump. | | 6753 | * Only used just before a crash dump. |
6753 | */ | | 6754 | */ |
6754 | ENTRY(snapshot) | | 6755 | ENTRY(snapshot) |
6755 | rdpr %pstate, %o1 ! save psr | | 6756 | rdpr %pstate, %o1 ! save psr |
6756 | stx %o7, [%o0 + PCB_PC] ! save pc | | 6757 | stx %o7, [%o0 + PCB_PC] ! save pc |
6757 | stx %o6, [%o0 + PCB_SP] ! save sp | | 6758 | stx %o6, [%o0 + PCB_SP] ! save sp |
6758 | rdpr %pil, %o2 | | 6759 | rdpr %pil, %o2 |
6759 | sth %o1, [%o0 + PCB_PSTATE] | | 6760 | sth %o1, [%o0 + PCB_PSTATE] |
6760 | rdpr %cwp, %o3 | | 6761 | rdpr %cwp, %o3 |
6761 | stb %o2, [%o0 + PCB_PIL] | | 6762 | stb %o2, [%o0 + PCB_PIL] |
6762 | stb %o3, [%o0 + PCB_CWP] | | 6763 | stb %o3, [%o0 + PCB_CWP] |
6763 | | | 6764 | |
6764 | flushw | | 6765 | flushw |
6765 | save %sp, -CC64FSZ, %sp | | 6766 | save %sp, -CC64FSZ, %sp |
6766 | flushw | | 6767 | flushw |
6767 | ret | | 6768 | ret |
6768 | restore | | 6769 | restore |
6769 | | | 6770 | |
6770 | /* | | 6771 | /* |
6771 | * cpu_lwp_fork() arranges for lwp_trampoline() to run when the | | 6772 | * cpu_lwp_fork() arranges for lwp_trampoline() to run when the |
6772 | * nascent lwp is selected by switch(). | | 6773 | * nascent lwp is selected by switch(). |
6773 | * | | 6774 | * |
6774 | * The switch frame will contain pointer to struct lwp of this lwp in | | 6775 | * The switch frame will contain pointer to struct lwp of this lwp in |
6775 | * %l2, a pointer to the function to call in %l0, and an argument to | | 6776 | * %l2, a pointer to the function to call in %l0, and an argument to |
6776 | * pass to it in %l1 (we abuse the callee-saved registers). | | 6777 | * pass to it in %l1 (we abuse the callee-saved registers). |
6777 | * | | 6778 | * |
6778 | * We enter lwp_trampoline as if we are "returning" from | | 6779 | * We enter lwp_trampoline as if we are "returning" from |
6779 | * cpu_switchto(), so %o0 contains previous lwp (the one we are | | 6780 | * cpu_switchto(), so %o0 contains previous lwp (the one we are |
6780 | * switching from) that we pass to lwp_startup(). | | 6781 | * switching from) that we pass to lwp_startup(). |
6781 | * | | 6782 | * |
6782 | * If the function *(%l0) returns, we arrange for an immediate return | | 6783 | * If the function *(%l0) returns, we arrange for an immediate return |
6783 | * to user mode. This happens in two known cases: after execve(2) of | | 6784 | * to user mode. This happens in two known cases: after execve(2) of |
6784 | * init, and when returning a child to user mode after a fork(2). | | 6785 | * init, and when returning a child to user mode after a fork(2). |
6785 | * | | 6786 | * |
6786 | * If were setting up a kernel thread, the function *(%l0) will not | | 6787 | * If were setting up a kernel thread, the function *(%l0) will not |
6787 | * return. | | 6788 | * return. |
6788 | */ | | 6789 | */ |
6789 | ENTRY(lwp_trampoline) | | 6790 | ENTRY(lwp_trampoline) |
6790 | /* | | 6791 | /* |
6791 | * Note: cpu_lwp_fork() has set up a stack frame for us to run | | 6792 | * Note: cpu_lwp_fork() has set up a stack frame for us to run |
6792 | * in, so we can call other functions from here without using | | 6793 | * in, so we can call other functions from here without using |
6793 | * `save ... restore'. | | 6794 | * `save ... restore'. |
6794 | */ | | 6795 | */ |
6795 | | | 6796 | |
6796 | ! newlwp in %l2, oldlwp in %o0 | | 6797 | ! newlwp in %l2, oldlwp in %o0 |
6797 | call lwp_startup | | 6798 | call lwp_startup |
6798 | mov %l2, %o1 | | 6799 | mov %l2, %o1 |
6799 | | | 6800 | |
6800 | call %l0 ! re-use current frame | | 6801 | call %l0 ! re-use current frame |
6801 | mov %l1, %o0 | | 6802 | mov %l1, %o0 |
6802 | | | 6803 | |
6803 | /* | | 6804 | /* |
6804 | * Going to userland - set proper tstate in trap frame | | 6805 | * Going to userland - set proper tstate in trap frame |
6805 | */ | | 6806 | */ |
6806 | set (ASI_PRIMARY_NO_FAULT<<TSTATE_ASI_SHIFT)|((PSTATE_USER)<<TSTATE_PSTATE_SHIFT), %g1 | | 6807 | set (ASI_PRIMARY_NO_FAULT<<TSTATE_ASI_SHIFT)|((PSTATE_USER)<<TSTATE_PSTATE_SHIFT), %g1 |
6807 | stx %g1, [%sp + CC64FSZ + STKB + TF_TSTATE] | | 6808 | stx %g1, [%sp + CC64FSZ + STKB + TF_TSTATE] |
6808 | | | 6809 | |
6809 | /* | | 6810 | /* |
6810 | * Here we finish up as in syscall, but simplified. | | 6811 | * Here we finish up as in syscall, but simplified. |
6811 | */ | | 6812 | */ |
6812 | CHKPT(%o3,%o4,0x35) | | 6813 | CHKPT(%o3,%o4,0x35) |
6813 | ba,a,pt %icc, return_from_trap | | 6814 | ba,a,pt %icc, return_from_trap |
6814 | nop | | 6815 | nop |
6815 | | | 6816 | |
6816 | /* | | 6817 | /* |
6817 | * {fu,su}{,i}{byte,word} | | 6818 | * {fu,su}{,i}{byte,word} |
6818 | */ | | 6819 | */ |
6819 | ALTENTRY(fuiword) | | 6820 | ALTENTRY(fuiword) |
6820 | ENTRY(fuword) | | 6821 | ENTRY(fuword) |
6821 | btst 3, %o0 ! has low bits set... | | 6822 | btst 3, %o0 ! has low bits set... |
6822 | bnz Lfsbadaddr ! go return -1 | | 6823 | bnz Lfsbadaddr ! go return -1 |
6823 | EMPTY | | 6824 | EMPTY |
6824 | sethi %hi(CPCB), %o2 ! cpcb->pcb_onfault = Lfserr; | | 6825 | sethi %hi(CPCB), %o2 ! cpcb->pcb_onfault = Lfserr; |
6825 | set Lfserr, %o3 | | 6826 | set Lfserr, %o3 |
6826 | LDPTR [%o2 + %lo(CPCB)], %o2 | | 6827 | LDPTR [%o2 + %lo(CPCB)], %o2 |
6827 | membar #LoadStore | | 6828 | membar #LoadStore |
6828 | STPTR %o3, [%o2 + PCB_ONFAULT] | | 6829 | STPTR %o3, [%o2 + PCB_ONFAULT] |
6829 | membar #Sync | | 6830 | membar #Sync |
6830 | LDPTRA [%o0] ASI_AIUS, %o0 ! fetch the word | | 6831 | LDPTRA [%o0] ASI_AIUS, %o0 ! fetch the word |
6831 | membar #Sync | | 6832 | membar #Sync |
6832 | STPTR %g0, [%o2 + PCB_ONFAULT]! but first clear onfault | | 6833 | STPTR %g0, [%o2 + PCB_ONFAULT]! but first clear onfault |
6833 | retl ! phew, made it, return the word | | 6834 | retl ! phew, made it, return the word |
6834 | membar #StoreStore|#StoreLoad | | 6835 | membar #StoreStore|#StoreLoad |
6835 | | | 6836 | |
6836 | Lfserr: | | 6837 | Lfserr: |
6837 | STPTR %g0, [%o2 + PCB_ONFAULT]! error in r/w, clear pcb_onfault | | 6838 | STPTR %g0, [%o2 + PCB_ONFAULT]! error in r/w, clear pcb_onfault |
6838 | membar #StoreStore|#StoreLoad | | 6839 | membar #StoreStore|#StoreLoad |
6839 | Lfsbadaddr: | | 6840 | Lfsbadaddr: |
6840 | #ifndef _LP64 | | 6841 | #ifndef _LP64 |
6841 | mov -1, %o1 | | 6842 | mov -1, %o1 |
6842 | #endif | | 6843 | #endif |
6843 | retl ! and return error indicator | | 6844 | retl ! and return error indicator |
6844 | mov -1, %o0 | | 6845 | mov -1, %o0 |
6845 | | | 6846 | |
6846 | /* | | 6847 | /* |
6847 | * This is just like Lfserr, but it's a global label that allows | | 6848 | * This is just like Lfserr, but it's a global label that allows |
6848 | * mem_access_fault() to check to see that we don't want to try to | | 6849 | * mem_access_fault() to check to see that we don't want to try to |
6849 | * page in the fault. It's used by fuswintr() etc. | | 6850 | * page in the fault. It's used by fuswintr() etc. |
6850 | */ | | 6851 | */ |
6851 | .globl _C_LABEL(Lfsbail) | | 6852 | .globl _C_LABEL(Lfsbail) |
6852 | _C_LABEL(Lfsbail): | | 6853 | _C_LABEL(Lfsbail): |
6853 | STPTR %g0, [%o2 + PCB_ONFAULT]! error in r/w, clear pcb_onfault | | 6854 | STPTR %g0, [%o2 + PCB_ONFAULT]! error in r/w, clear pcb_onfault |
6854 | membar #StoreStore|#StoreLoad | | 6855 | membar #StoreStore|#StoreLoad |
6855 | retl ! and return error indicator | | 6856 | retl ! and return error indicator |
6856 | mov -1, %o0 | | 6857 | mov -1, %o0 |
6857 | | | 6858 | |
6858 | /* | | 6859 | /* |
6859 | * Like fusword but callable from interrupt context. | | 6860 | * Like fusword but callable from interrupt context. |
6860 | * Fails if data isn't resident. | | 6861 | * Fails if data isn't resident. |
| @@ -8752,1389 +8753,1389 @@ L105: | | | @@ -8752,1389 +8753,1389 @@ L105: |
8752 | brlez,pn %o2, Lmemcpy_blockdone | | 8753 | brlez,pn %o2, Lmemcpy_blockdone |
8753 | faligndata %f6, %f8, %f46 | | 8754 | faligndata %f6, %f8, %f46 |
8754 | | | 8755 | |
8755 | stda %f32, [%o1] ASI_STORE | | 8756 | stda %f32, [%o1] ASI_STORE |
8756 | ba 3b | | 8757 | ba 3b |
8757 | inc BLOCK_SIZE, %o1 | | 8758 | inc BLOCK_SIZE, %o1 |
8758 | | | 8759 | |
8759 | | | 8760 | |
8760 | !! | | 8761 | !! |
8761 | !! Source at BLOCK_ALIGN+48 | | 8762 | !! Source at BLOCK_ALIGN+48 |
8762 | !! | | 8763 | !! |
8763 | !! We need to load 2 doubles by hand. | | 8764 | !! We need to load 2 doubles by hand. |
8764 | !! | | 8765 | !! |
8765 | L106: | | 8766 | L106: |
8766 | #ifdef RETURN_NAME | | 8767 | #ifdef RETURN_NAME |
8767 | sethi %hi(1f), %g1 | | 8768 | sethi %hi(1f), %g1 |
8768 | ba,pt %icc, 2f | | 8769 | ba,pt %icc, 2f |
8769 | or %g1, %lo(1f), %g1 | | 8770 | or %g1, %lo(1f), %g1 |
8770 | 1: | | 8771 | 1: |
8771 | .asciz "L106" | | 8772 | .asciz "L106" |
8772 | .align 8 | | 8773 | .align 8 |
8773 | 2: | | 8774 | 2: |
8774 | #endif | | 8775 | #endif |
8775 | fmovd %f0, %f10 | | 8776 | fmovd %f0, %f10 |
8776 | ldd [%o0], %f12 | | 8777 | ldd [%o0], %f12 |
8777 | inc 8, %o0 | | 8778 | inc 8, %o0 |
8778 | ldd [%o0], %f14 | | 8779 | ldd [%o0], %f14 |
8779 | inc 8, %o0 | | 8780 | inc 8, %o0 |
8780 | | | 8781 | |
8781 | cmp %o0, %o5 | | 8782 | cmp %o0, %o5 |
8782 | bleu,a,pn %icc, 2f | | 8783 | bleu,a,pn %icc, 2f |
8783 | ldda [%o0] ASI_BLK_P, %f16 | | 8784 | ldda [%o0] ASI_BLK_P, %f16 |
8784 | membar #Sync | | 8785 | membar #Sync |
8785 | 2: | | 8786 | 2: |
8786 | inc BLOCK_SIZE, %o0 | | 8787 | inc BLOCK_SIZE, %o0 |
8787 | 3: | | 8788 | 3: |
8788 | faligndata %f10, %f12, %f32 | | 8789 | faligndata %f10, %f12, %f32 |
8789 | cmp %o0, %o5 | | 8790 | cmp %o0, %o5 |
8790 | faligndata %f12, %f14, %f34 | | 8791 | faligndata %f12, %f14, %f34 |
8791 | bleu,a,pn %icc, 2f | | 8792 | bleu,a,pn %icc, 2f |
8792 | ldda [%o0] ASI_BLK_P, %f48 | | 8793 | ldda [%o0] ASI_BLK_P, %f48 |
8793 | membar #Sync | | 8794 | membar #Sync |
8794 | 2: | | 8795 | 2: |
8795 | faligndata %f14, %f16, %f36 | | 8796 | faligndata %f14, %f16, %f36 |
8796 | dec BLOCK_SIZE, %o2 | | 8797 | dec BLOCK_SIZE, %o2 |
8797 | faligndata %f16, %f18, %f38 | | 8798 | faligndata %f16, %f18, %f38 |
8798 | inc BLOCK_SIZE, %o0 | | 8799 | inc BLOCK_SIZE, %o0 |
8799 | faligndata %f18, %f20, %f40 | | 8800 | faligndata %f18, %f20, %f40 |
8800 | faligndata %f20, %f22, %f42 | | 8801 | faligndata %f20, %f22, %f42 |
8801 | faligndata %f22, %f24, %f44 | | 8802 | faligndata %f22, %f24, %f44 |
8802 | brlez,pn %o2, Lmemcpy_blockdone | | 8803 | brlez,pn %o2, Lmemcpy_blockdone |
8803 | faligndata %f24, %f26, %f46 | | 8804 | faligndata %f24, %f26, %f46 |
8804 | | | 8805 | |
8805 | stda %f32, [%o1] ASI_STORE | | 8806 | stda %f32, [%o1] ASI_STORE |
8806 | | | 8807 | |
8807 | faligndata %f26, %f28, %f32 | | 8808 | faligndata %f26, %f28, %f32 |
8808 | cmp %o0, %o5 | | 8809 | cmp %o0, %o5 |
8809 | faligndata %f28, %f30, %f34 | | 8810 | faligndata %f28, %f30, %f34 |
8810 | bleu,a,pn %icc, 2f | | 8811 | bleu,a,pn %icc, 2f |
8811 | ldda [%o0] ASI_BLK_P, %f0 | | 8812 | ldda [%o0] ASI_BLK_P, %f0 |
8812 | membar #Sync | | 8813 | membar #Sync |
8813 | 2: | | 8814 | 2: |
8814 | faligndata %f30, %f48, %f36 | | 8815 | faligndata %f30, %f48, %f36 |
8815 | dec BLOCK_SIZE, %o2 | | 8816 | dec BLOCK_SIZE, %o2 |
8816 | faligndata %f48, %f50, %f38 | | 8817 | faligndata %f48, %f50, %f38 |
8817 | inc BLOCK_SIZE, %o1 | | 8818 | inc BLOCK_SIZE, %o1 |
8818 | faligndata %f50, %f52, %f40 | | 8819 | faligndata %f50, %f52, %f40 |
8819 | faligndata %f52, %f54, %f42 | | 8820 | faligndata %f52, %f54, %f42 |
8820 | inc BLOCK_SIZE, %o0 | | 8821 | inc BLOCK_SIZE, %o0 |
8821 | faligndata %f54, %f56, %f44 | | 8822 | faligndata %f54, %f56, %f44 |
8822 | brlez,pn %o2, Lmemcpy_blockdone | | 8823 | brlez,pn %o2, Lmemcpy_blockdone |
8823 | faligndata %f56, %f58, %f46 | | 8824 | faligndata %f56, %f58, %f46 |
8824 | | | 8825 | |
8825 | stda %f32, [%o1] ASI_STORE | | 8826 | stda %f32, [%o1] ASI_STORE |
8826 | | | 8827 | |
8827 | faligndata %f58, %f60, %f32 | | 8828 | faligndata %f58, %f60, %f32 |
8828 | cmp %o0, %o5 | | 8829 | cmp %o0, %o5 |
8829 | faligndata %f60, %f62, %f34 | | 8830 | faligndata %f60, %f62, %f34 |
8830 | bleu,a,pn %icc, 2f | | 8831 | bleu,a,pn %icc, 2f |
8831 | ldda [%o0] ASI_BLK_P, %f16 | | 8832 | ldda [%o0] ASI_BLK_P, %f16 |
8832 | membar #Sync | | 8833 | membar #Sync |
8833 | 2: | | 8834 | 2: |
8834 | faligndata %f62, %f0, %f36 | | 8835 | faligndata %f62, %f0, %f36 |
8835 | dec BLOCK_SIZE, %o2 | | 8836 | dec BLOCK_SIZE, %o2 |
8836 | faligndata %f0, %f2, %f38 | | 8837 | faligndata %f0, %f2, %f38 |
8837 | inc BLOCK_SIZE, %o1 | | 8838 | inc BLOCK_SIZE, %o1 |
8838 | faligndata %f2, %f4, %f40 | | 8839 | faligndata %f2, %f4, %f40 |
8839 | faligndata %f4, %f6, %f42 | | 8840 | faligndata %f4, %f6, %f42 |
8840 | inc BLOCK_SIZE, %o0 | | 8841 | inc BLOCK_SIZE, %o0 |
8841 | faligndata %f6, %f8, %f44 | | 8842 | faligndata %f6, %f8, %f44 |
8842 | brlez,pn %o2, Lmemcpy_blockdone | | 8843 | brlez,pn %o2, Lmemcpy_blockdone |
8843 | faligndata %f8, %f10, %f46 | | 8844 | faligndata %f8, %f10, %f46 |
8844 | | | 8845 | |
8845 | stda %f32, [%o1] ASI_STORE | | 8846 | stda %f32, [%o1] ASI_STORE |
8846 | ba 3b | | 8847 | ba 3b |
8847 | inc BLOCK_SIZE, %o1 | | 8848 | inc BLOCK_SIZE, %o1 |
8848 | | | 8849 | |
8849 | | | 8850 | |
8850 | !! | | 8851 | !! |
8851 | !! Source at BLOCK_ALIGN+56 | | 8852 | !! Source at BLOCK_ALIGN+56 |
8852 | !! | | 8853 | !! |
8853 | !! We need to load 1 double by hand. | | 8854 | !! We need to load 1 double by hand. |
8854 | !! | | 8855 | !! |
8855 | L107: | | 8856 | L107: |
8856 | #ifdef RETURN_NAME | | 8857 | #ifdef RETURN_NAME |
8857 | sethi %hi(1f), %g1 | | 8858 | sethi %hi(1f), %g1 |
8858 | ba,pt %icc, 2f | | 8859 | ba,pt %icc, 2f |
8859 | or %g1, %lo(1f), %g1 | | 8860 | or %g1, %lo(1f), %g1 |
8860 | 1: | | 8861 | 1: |
8861 | .asciz "L107" | | 8862 | .asciz "L107" |
8862 | .align 8 | | 8863 | .align 8 |
8863 | 2: | | 8864 | 2: |
8864 | #endif | | 8865 | #endif |
8865 | fmovd %f0, %f12 | | 8866 | fmovd %f0, %f12 |
8866 | ldd [%o0], %f14 | | 8867 | ldd [%o0], %f14 |
8867 | inc 8, %o0 | | 8868 | inc 8, %o0 |
8868 | | | 8869 | |
8869 | cmp %o0, %o5 | | 8870 | cmp %o0, %o5 |
8870 | bleu,a,pn %icc, 2f | | 8871 | bleu,a,pn %icc, 2f |
8871 | ldda [%o0] ASI_BLK_P, %f16 | | 8872 | ldda [%o0] ASI_BLK_P, %f16 |
8872 | membar #Sync | | 8873 | membar #Sync |
8873 | 2: | | 8874 | 2: |
8874 | inc BLOCK_SIZE, %o0 | | 8875 | inc BLOCK_SIZE, %o0 |
8875 | 3: | | 8876 | 3: |
8876 | faligndata %f12, %f14, %f32 | | 8877 | faligndata %f12, %f14, %f32 |
8877 | cmp %o0, %o5 | | 8878 | cmp %o0, %o5 |
8878 | bleu,a,pn %icc, 2f | | 8879 | bleu,a,pn %icc, 2f |
8879 | ldda [%o0] ASI_BLK_P, %f48 | | 8880 | ldda [%o0] ASI_BLK_P, %f48 |
8880 | membar #Sync | | 8881 | membar #Sync |
8881 | 2: | | 8882 | 2: |
8882 | faligndata %f14, %f16, %f34 | | 8883 | faligndata %f14, %f16, %f34 |
8883 | dec BLOCK_SIZE, %o2 | | 8884 | dec BLOCK_SIZE, %o2 |
8884 | faligndata %f16, %f18, %f36 | | 8885 | faligndata %f16, %f18, %f36 |
8885 | inc BLOCK_SIZE, %o0 | | 8886 | inc BLOCK_SIZE, %o0 |
8886 | faligndata %f18, %f20, %f38 | | 8887 | faligndata %f18, %f20, %f38 |
8887 | faligndata %f20, %f22, %f40 | | 8888 | faligndata %f20, %f22, %f40 |
8888 | faligndata %f22, %f24, %f42 | | 8889 | faligndata %f22, %f24, %f42 |
8889 | faligndata %f24, %f26, %f44 | | 8890 | faligndata %f24, %f26, %f44 |
8890 | brlez,pn %o2, Lmemcpy_blockdone | | 8891 | brlez,pn %o2, Lmemcpy_blockdone |
8891 | faligndata %f26, %f28, %f46 | | 8892 | faligndata %f26, %f28, %f46 |
8892 | | | 8893 | |
8893 | stda %f32, [%o1] ASI_STORE | | 8894 | stda %f32, [%o1] ASI_STORE |
8894 | | | 8895 | |
8895 | faligndata %f28, %f30, %f32 | | 8896 | faligndata %f28, %f30, %f32 |
8896 | cmp %o0, %o5 | | 8897 | cmp %o0, %o5 |
8897 | bleu,a,pn %icc, 2f | | 8898 | bleu,a,pn %icc, 2f |
8898 | ldda [%o0] ASI_BLK_P, %f0 | | 8899 | ldda [%o0] ASI_BLK_P, %f0 |
8899 | membar #Sync | | 8900 | membar #Sync |
8900 | 2: | | 8901 | 2: |
8901 | faligndata %f30, %f48, %f34 | | 8902 | faligndata %f30, %f48, %f34 |
8902 | dec BLOCK_SIZE, %o2 | | 8903 | dec BLOCK_SIZE, %o2 |
8903 | faligndata %f48, %f50, %f36 | | 8904 | faligndata %f48, %f50, %f36 |
8904 | inc BLOCK_SIZE, %o1 | | 8905 | inc BLOCK_SIZE, %o1 |
8905 | faligndata %f50, %f52, %f38 | | 8906 | faligndata %f50, %f52, %f38 |
8906 | faligndata %f52, %f54, %f40 | | 8907 | faligndata %f52, %f54, %f40 |
8907 | inc BLOCK_SIZE, %o0 | | 8908 | inc BLOCK_SIZE, %o0 |
8908 | faligndata %f54, %f56, %f42 | | 8909 | faligndata %f54, %f56, %f42 |
8909 | faligndata %f56, %f58, %f44 | | 8910 | faligndata %f56, %f58, %f44 |
8910 | brlez,pn %o2, Lmemcpy_blockdone | | 8911 | brlez,pn %o2, Lmemcpy_blockdone |
8911 | faligndata %f58, %f60, %f46 | | 8912 | faligndata %f58, %f60, %f46 |
8912 | | | 8913 | |
8913 | stda %f32, [%o1] ASI_STORE | | 8914 | stda %f32, [%o1] ASI_STORE |
8914 | | | 8915 | |
8915 | faligndata %f60, %f62, %f32 | | 8916 | faligndata %f60, %f62, %f32 |
8916 | cmp %o0, %o5 | | 8917 | cmp %o0, %o5 |
8917 | bleu,a,pn %icc, 2f | | 8918 | bleu,a,pn %icc, 2f |
8918 | ldda [%o0] ASI_BLK_P, %f16 | | 8919 | ldda [%o0] ASI_BLK_P, %f16 |
8919 | membar #Sync | | 8920 | membar #Sync |
8920 | 2: | | 8921 | 2: |
8921 | faligndata %f62, %f0, %f34 | | 8922 | faligndata %f62, %f0, %f34 |
8922 | dec BLOCK_SIZE, %o2 | | 8923 | dec BLOCK_SIZE, %o2 |
8923 | faligndata %f0, %f2, %f36 | | 8924 | faligndata %f0, %f2, %f36 |
8924 | inc BLOCK_SIZE, %o1 | | 8925 | inc BLOCK_SIZE, %o1 |
8925 | faligndata %f2, %f4, %f38 | | 8926 | faligndata %f2, %f4, %f38 |
8926 | faligndata %f4, %f6, %f40 | | 8927 | faligndata %f4, %f6, %f40 |
8927 | inc BLOCK_SIZE, %o0 | | 8928 | inc BLOCK_SIZE, %o0 |
8928 | faligndata %f6, %f8, %f42 | | 8929 | faligndata %f6, %f8, %f42 |
8929 | faligndata %f8, %f10, %f44 | | 8930 | faligndata %f8, %f10, %f44 |
8930 | | | 8931 | |
8931 | brlez,pn %o2, Lmemcpy_blockdone | | 8932 | brlez,pn %o2, Lmemcpy_blockdone |
8932 | faligndata %f10, %f12, %f46 | | 8933 | faligndata %f10, %f12, %f46 |
8933 | | | 8934 | |
8934 | stda %f32, [%o1] ASI_STORE | | 8935 | stda %f32, [%o1] ASI_STORE |
8935 | ba 3b | | 8936 | ba 3b |
8936 | inc BLOCK_SIZE, %o1 | | 8937 | inc BLOCK_SIZE, %o1 |
8937 | | | 8938 | |
8938 | Lmemcpy_blockdone: | | 8939 | Lmemcpy_blockdone: |
8939 | inc BLOCK_SIZE, %o2 ! Fixup our overcommit | | 8940 | inc BLOCK_SIZE, %o2 ! Fixup our overcommit |
8940 | membar #Sync ! Finish any pending loads | | 8941 | membar #Sync ! Finish any pending loads |
8941 | #define FINISH_REG(f) \ | | 8942 | #define FINISH_REG(f) \ |
8942 | deccc 8, %o2; \ | | 8943 | deccc 8, %o2; \ |
8943 | bl,a Lmemcpy_blockfinish; \ | | 8944 | bl,a Lmemcpy_blockfinish; \ |
8944 | fmovd f, %f48; \ | | 8945 | fmovd f, %f48; \ |
8945 | std f, [%o1]; \ | | 8946 | std f, [%o1]; \ |
8946 | inc 8, %o1 | | 8947 | inc 8, %o1 |
8947 | | | 8948 | |
8948 | FINISH_REG(%f32) | | 8949 | FINISH_REG(%f32) |
8949 | FINISH_REG(%f34) | | 8950 | FINISH_REG(%f34) |
8950 | FINISH_REG(%f36) | | 8951 | FINISH_REG(%f36) |
8951 | FINISH_REG(%f38) | | 8952 | FINISH_REG(%f38) |
8952 | FINISH_REG(%f40) | | 8953 | FINISH_REG(%f40) |
8953 | FINISH_REG(%f42) | | 8954 | FINISH_REG(%f42) |
8954 | FINISH_REG(%f44) | | 8955 | FINISH_REG(%f44) |
8955 | FINISH_REG(%f46) | | 8956 | FINISH_REG(%f46) |
8956 | FINISH_REG(%f48) | | 8957 | FINISH_REG(%f48) |
8957 | #undef FINISH_REG | | 8958 | #undef FINISH_REG |
8958 | !! | | 8959 | !! |
8959 | !! The low 3 bits have the sub-word bits needed to be | | 8960 | !! The low 3 bits have the sub-word bits needed to be |
8960 | !! stored [because (x-8)&0x7 == x]. | | 8961 | !! stored [because (x-8)&0x7 == x]. |
8961 | !! | | 8962 | !! |
8962 | Lmemcpy_blockfinish: | | 8963 | Lmemcpy_blockfinish: |
8963 | brz,pn %o2, 2f ! 100% complete? | | 8964 | brz,pn %o2, 2f ! 100% complete? |
8964 | fmovd %f48, %f4 | | 8965 | fmovd %f48, %f4 |
8965 | cmp %o2, 8 ! Exactly 8 bytes? | | 8966 | cmp %o2, 8 ! Exactly 8 bytes? |
8966 | bz,a,pn CCCR, 2f | | 8967 | bz,a,pn CCCR, 2f |
8967 | std %f4, [%o1] | | 8968 | std %f4, [%o1] |
8968 | | | 8969 | |
8969 | btst 4, %o2 ! Word store? | | 8970 | btst 4, %o2 ! Word store? |
8970 | bz CCCR, 1f | | 8971 | bz CCCR, 1f |
8971 | nop | | 8972 | nop |
8972 | st %f4, [%o1] | | 8973 | st %f4, [%o1] |
8973 | inc 4, %o1 | | 8974 | inc 4, %o1 |
8974 | 1: | | 8975 | 1: |
8975 | btst 2, %o2 | | 8976 | btst 2, %o2 |
8976 | fzero %f0 | | 8977 | fzero %f0 |
8977 | bz 1f | | 8978 | bz 1f |
8978 | | | 8979 | |
8979 | mov -6, %o4 | | 8980 | mov -6, %o4 |
8980 | alignaddr %o1, %o4, %g0 | | 8981 | alignaddr %o1, %o4, %g0 |
8981 | | | 8982 | |
8982 | faligndata %f0, %f4, %f8 | | 8983 | faligndata %f0, %f4, %f8 |
8983 | | | 8984 | |
8984 | stda %f8, [%o1] ASI_FL16_P ! Store short | | 8985 | stda %f8, [%o1] ASI_FL16_P ! Store short |
8985 | inc 2, %o1 | | 8986 | inc 2, %o1 |
8986 | 1: | | 8987 | 1: |
8987 | btst 1, %o2 ! Byte aligned? | | 8988 | btst 1, %o2 ! Byte aligned? |
8988 | bz 2f | | 8989 | bz 2f |
8989 | | | 8990 | |
8990 | mov -7, %o0 ! Calculate dest - 7 | | 8991 | mov -7, %o0 ! Calculate dest - 7 |
8991 | alignaddr %o1, %o0, %g0 ! Calculate shift mask and dest. | | 8992 | alignaddr %o1, %o0, %g0 ! Calculate shift mask and dest. |
8992 | | | 8993 | |
8993 | faligndata %f0, %f4, %f8 ! Move 1st byte to low part of f8 | | 8994 | faligndata %f0, %f4, %f8 ! Move 1st byte to low part of f8 |
8994 | | | 8995 | |
8995 | stda %f8, [%o1] ASI_FL8_P ! Store 1st byte | | 8996 | stda %f8, [%o1] ASI_FL8_P ! Store 1st byte |
8996 | inc 1, %o1 ! Update address | | 8997 | inc 1, %o1 ! Update address |
8997 | 2: | | 8998 | 2: |
8998 | membar #Sync | | 8999 | membar #Sync |
8999 | #if 0 | | 9000 | #if 0 |
9000 | !! | | 9001 | !! |
9001 | !! verify copy success. | | 9002 | !! verify copy success. |
9002 | !! | | 9003 | !! |
9003 | | | 9004 | |
9004 | mov %i0, %o2 | | 9005 | mov %i0, %o2 |
9005 | mov %i1, %o4 | | 9006 | mov %i1, %o4 |
9006 | mov %i2, %l4 | | 9007 | mov %i2, %l4 |
9007 | 0: | | 9008 | 0: |
9008 | ldub [%o2], %o1 | | 9009 | ldub [%o2], %o1 |
9009 | inc %o2 | | 9010 | inc %o2 |
9010 | ldub [%o4], %o3 | | 9011 | ldub [%o4], %o3 |
9011 | inc %o4 | | 9012 | inc %o4 |
9012 | cmp %o3, %o1 | | 9013 | cmp %o3, %o1 |
9013 | bnz 1f | | 9014 | bnz 1f |
9014 | dec %l4 | | 9015 | dec %l4 |
9015 | brnz %l4, 0b | | 9016 | brnz %l4, 0b |
9016 | nop | | 9017 | nop |
9017 | ba 2f | | 9018 | ba 2f |
9018 | nop | | 9019 | nop |
9019 | | | 9020 | |
9020 | 1: | | 9021 | 1: |
9021 | set block_disable, %o0 | | 9022 | set block_disable, %o0 |
9022 | stx %o0, [%o0] | | 9023 | stx %o0, [%o0] |
9023 | | | 9024 | |
9024 | set 0f, %o0 | | 9025 | set 0f, %o0 |
9025 | call prom_printf | | 9026 | call prom_printf |
9026 | sub %i2, %l4, %o5 | | 9027 | sub %i2, %l4, %o5 |
9027 | set 1f, %o0 | | 9028 | set 1f, %o0 |
9028 | mov %i0, %o2 | | 9029 | mov %i0, %o2 |
9029 | mov %i1, %o1 | | 9030 | mov %i1, %o1 |
9030 | call prom_printf | | 9031 | call prom_printf |
9031 | mov %i2, %o3 | | 9032 | mov %i2, %o3 |
9032 | ta 1 | | 9033 | ta 1 |
9033 | .data | | 9034 | .data |
9034 | _ALIGN | | 9035 | _ALIGN |
9035 | 0: .asciz "block memcpy failed: %x@%p != %x@%p byte %d\r\n" | | 9036 | 0: .asciz "block memcpy failed: %x@%p != %x@%p byte %d\r\n" |
9036 | 1: .asciz "memcpy(%p, %p, %lx)\r\n" | | 9037 | 1: .asciz "memcpy(%p, %p, %lx)\r\n" |
9037 | _ALIGN | | 9038 | _ALIGN |
9038 | .text | | 9039 | .text |
9039 | 2: | | 9040 | 2: |
9040 | #endif | | 9041 | #endif |
9041 | #ifdef _KERNEL | | 9042 | #ifdef _KERNEL |
9042 | | | 9043 | |
9043 | /* | | 9044 | /* |
9044 | * Weve saved our possible fpstate, now disable the fpu | | 9045 | * Weve saved our possible fpstate, now disable the fpu |
9045 | * and continue with life. | | 9046 | * and continue with life. |
9046 | */ | | 9047 | */ |
9047 | RESTORE_FPU | | 9048 | RESTORE_FPU |
9048 | ret | | 9049 | ret |
9049 | restore %g1, 0, %o0 ! Return DEST for memcpy | | 9050 | restore %g1, 0, %o0 ! Return DEST for memcpy |
9050 | #endif | | 9051 | #endif |
9051 | retl | | 9052 | retl |
9052 | mov %g1, %o0 | | 9053 | mov %g1, %o0 |
9053 | #endif /* USE_BLOCK_STORE_LOAD */ | | 9054 | #endif /* USE_BLOCK_STORE_LOAD */ |
9054 | | | 9055 | |
9055 | | | 9056 | |
9056 | #if 1 | | 9057 | #if 1 |
9057 | /* | | 9058 | /* |
9058 | * XXXXXXXXXXXXXXXXXXXX | | 9059 | * XXXXXXXXXXXXXXXXXXXX |
9059 | * We need to make sure that this doesn't use floating point | | 9060 | * We need to make sure that this doesn't use floating point |
9060 | * before our trap handlers are installed or we could panic | | 9061 | * before our trap handlers are installed or we could panic |
9061 | * XXXXXXXXXXXXXXXXXXXX | | 9062 | * XXXXXXXXXXXXXXXXXXXX |
9062 | */ | | 9063 | */ |
9063 | /* | | 9064 | /* |
9064 | * memset(addr, c, len) | | 9065 | * memset(addr, c, len) |
9065 | * | | 9066 | * |
9066 | * We want to use VIS instructions if we're clearing out more than | | 9067 | * We want to use VIS instructions if we're clearing out more than |
9067 | * 256 bytes, but to do that we need to properly save and restore the | | 9068 | * 256 bytes, but to do that we need to properly save and restore the |
9068 | * FP registers. Unfortunately the code to do that in the kernel needs | | 9069 | * FP registers. Unfortunately the code to do that in the kernel needs |
9069 | * to keep track of the current owner of the FPU, hence the different | | 9070 | * to keep track of the current owner of the FPU, hence the different |
9070 | * code. | | 9071 | * code. |
9071 | * | | 9072 | * |
9072 | * XXXXX To produce more efficient code, we do not allow lengths | | 9073 | * XXXXX To produce more efficient code, we do not allow lengths |
9073 | * greater than 0x80000000000000000, which are negative numbers. | | 9074 | * greater than 0x80000000000000000, which are negative numbers. |
9074 | * This should not really be an issue since the VA hole should | | 9075 | * This should not really be an issue since the VA hole should |
9075 | * cause any such ranges to fail anyway. | | 9076 | * cause any such ranges to fail anyway. |
9076 | */ | | 9077 | */ |
9077 | ENTRY(memset) | | 9078 | ENTRY(memset) |
9078 | ! %o0 = addr, %o1 = pattern, %o2 = len | | 9079 | ! %o0 = addr, %o1 = pattern, %o2 = len |
9079 | mov %o0, %o4 ! Save original pointer | | 9080 | mov %o0, %o4 ! Save original pointer |
9080 | | | 9081 | |
9081 | Lmemset_internal: | | 9082 | Lmemset_internal: |
9082 | btst 7, %o0 ! Word aligned? | | 9083 | btst 7, %o0 ! Word aligned? |
9083 | bz,pn %xcc, 0f | | 9084 | bz,pn %xcc, 0f |
9084 | nop | | 9085 | nop |
9085 | inc %o0 | | 9086 | inc %o0 |
9086 | deccc %o2 ! Store up to 7 bytes | | 9087 | deccc %o2 ! Store up to 7 bytes |
9087 | bge,a,pt CCCR, Lmemset_internal | | 9088 | bge,a,pt CCCR, Lmemset_internal |
9088 | stb %o1, [%o0 - 1] | | 9089 | stb %o1, [%o0 - 1] |
9089 | | | 9090 | |
9090 | retl ! Duplicate Lmemset_done | | 9091 | retl ! Duplicate Lmemset_done |
9091 | mov %o4, %o0 | | 9092 | mov %o4, %o0 |
9092 | 0: | | 9093 | 0: |
9093 | /* | | 9094 | /* |
9094 | * Duplicate the pattern so it fills 64-bits. | | 9095 | * Duplicate the pattern so it fills 64-bits. |
9095 | */ | | 9096 | */ |
9096 | andcc %o1, 0x0ff, %o1 ! No need to extend zero | | 9097 | andcc %o1, 0x0ff, %o1 ! No need to extend zero |
9097 | bz,pt %icc, 1f | | 9098 | bz,pt %icc, 1f |
9098 | sllx %o1, 8, %o3 ! sigh. all dependent insns. | | 9099 | sllx %o1, 8, %o3 ! sigh. all dependent insns. |
9099 | or %o1, %o3, %o1 | | 9100 | or %o1, %o3, %o1 |
9100 | sllx %o1, 16, %o3 | | 9101 | sllx %o1, 16, %o3 |
9101 | or %o1, %o3, %o1 | | 9102 | or %o1, %o3, %o1 |
9102 | sllx %o1, 32, %o3 | | 9103 | sllx %o1, 32, %o3 |
9103 | or %o1, %o3, %o1 | | 9104 | or %o1, %o3, %o1 |
9104 | 1: | | 9105 | 1: |
9105 | #ifdef USE_BLOCK_STORE_LOAD | | 9106 | #ifdef USE_BLOCK_STORE_LOAD |
9106 | !! Now we are 64-bit aligned | | 9107 | !! Now we are 64-bit aligned |
9107 | cmp %o2, 256 ! Use block clear if len > 256 | | 9108 | cmp %o2, 256 ! Use block clear if len > 256 |
9108 | bge,pt CCCR, Lmemset_block ! use block store insns | | 9109 | bge,pt CCCR, Lmemset_block ! use block store insns |
9109 | #endif /* USE_BLOCK_STORE_LOAD */ | | 9110 | #endif /* USE_BLOCK_STORE_LOAD */ |
9110 | deccc 8, %o2 | | 9111 | deccc 8, %o2 |
9111 | Lmemset_longs: | | 9112 | Lmemset_longs: |
9112 | bl,pn CCCR, Lmemset_cleanup ! Less than 8 bytes left | | 9113 | bl,pn CCCR, Lmemset_cleanup ! Less than 8 bytes left |
9113 | nop | | 9114 | nop |
9114 | 3: | | 9115 | 3: |
9115 | inc 8, %o0 | | 9116 | inc 8, %o0 |
9116 | deccc 8, %o2 | | 9117 | deccc 8, %o2 |
9117 | bge,pt CCCR, 3b | | 9118 | bge,pt CCCR, 3b |
9118 | stx %o1, [%o0 - 8] ! Do 1 longword at a time | | 9119 | stx %o1, [%o0 - 8] ! Do 1 longword at a time |
9119 | | | 9120 | |
9120 | /* | | 9121 | /* |
9121 | * Len is in [-8..-1] where -8 => done, -7 => 1 byte to zero, | | 9122 | * Len is in [-8..-1] where -8 => done, -7 => 1 byte to zero, |
9122 | * -6 => two bytes, etc. Mop up this remainder, if any. | | 9123 | * -6 => two bytes, etc. Mop up this remainder, if any. |
9123 | */ | | 9124 | */ |
9124 | Lmemset_cleanup: | | 9125 | Lmemset_cleanup: |
9125 | btst 4, %o2 | | 9126 | btst 4, %o2 |
9126 | bz,pt CCCR, 5f ! if (len & 4) { | | 9127 | bz,pt CCCR, 5f ! if (len & 4) { |
9127 | nop | | 9128 | nop |
9128 | stw %o1, [%o0] ! *(int *)addr = 0; | | 9129 | stw %o1, [%o0] ! *(int *)addr = 0; |
9129 | inc 4, %o0 ! addr += 4; | | 9130 | inc 4, %o0 ! addr += 4; |
9130 | 5: | | 9131 | 5: |
9131 | btst 2, %o2 | | 9132 | btst 2, %o2 |
9132 | bz,pt CCCR, 7f ! if (len & 2) { | | 9133 | bz,pt CCCR, 7f ! if (len & 2) { |
9133 | nop | | 9134 | nop |
9134 | sth %o1, [%o0] ! *(short *)addr = 0; | | 9135 | sth %o1, [%o0] ! *(short *)addr = 0; |
9135 | inc 2, %o0 ! addr += 2; | | 9136 | inc 2, %o0 ! addr += 2; |
9136 | 7: | | 9137 | 7: |
9137 | btst 1, %o2 | | 9138 | btst 1, %o2 |
9138 | bnz,a %icc, Lmemset_done ! if (len & 1) | | 9139 | bnz,a %icc, Lmemset_done ! if (len & 1) |
9139 | stb %o1, [%o0] ! *addr = 0; | | 9140 | stb %o1, [%o0] ! *addr = 0; |
9140 | Lmemset_done: | | 9141 | Lmemset_done: |
9141 | retl | | 9142 | retl |
9142 | mov %o4, %o0 ! Restore ponter for memset (ugh) | | 9143 | mov %o4, %o0 ! Restore ponter for memset (ugh) |
9143 | | | 9144 | |
9144 | #ifdef USE_BLOCK_STORE_LOAD | | 9145 | #ifdef USE_BLOCK_STORE_LOAD |
9145 | Lmemset_block: | | 9146 | Lmemset_block: |
9146 | sethi %hi(block_disable), %o3 | | 9147 | sethi %hi(block_disable), %o3 |
9147 | ldx [ %o3 + %lo(block_disable) ], %o3 | | 9148 | ldx [ %o3 + %lo(block_disable) ], %o3 |
9148 | brnz,pn %o3, Lmemset_longs | | 9149 | brnz,pn %o3, Lmemset_longs |
9149 | !! Make sure our trap table is installed | | 9150 | !! Make sure our trap table is installed |
9150 | set _C_LABEL(trapbase), %o5 | | 9151 | set _C_LABEL(trapbase), %o5 |
9151 | rdpr %tba, %o3 | | 9152 | rdpr %tba, %o3 |
9152 | sub %o3, %o5, %o3 | | 9153 | sub %o3, %o5, %o3 |
9153 | brnz,pn %o3, Lmemset_longs ! No, then don't use block load/store | | 9154 | brnz,pn %o3, Lmemset_longs ! No, then don't use block load/store |
9154 | nop | | 9155 | nop |
9155 | /* | | 9156 | /* |
9156 | * Kernel: | | 9157 | * Kernel: |
9157 | * | | 9158 | * |
9158 | * Here we use VIS instructions to do a block clear of a page. | | 9159 | * Here we use VIS instructions to do a block clear of a page. |
9159 | * But before we can do that we need to save and enable the FPU. | | 9160 | * But before we can do that we need to save and enable the FPU. |
9160 | * The last owner of the FPU registers is fplwp, and | | 9161 | * The last owner of the FPU registers is fplwp, and |
9161 | * fplwp->l_md.md_fpstate is the current fpstate. If that's not | | 9162 | * fplwp->l_md.md_fpstate is the current fpstate. If that's not |
9162 | * null, call savefpstate() with it to store our current fp state. | | 9163 | * null, call savefpstate() with it to store our current fp state. |
9163 | * | | 9164 | * |
9164 | * Next, allocate an aligned fpstate on the stack. We will properly | | 9165 | * Next, allocate an aligned fpstate on the stack. We will properly |
9165 | * nest calls on a particular stack so this should not be a problem. | | 9166 | * nest calls on a particular stack so this should not be a problem. |
9166 | * | | 9167 | * |
9167 | * Now we grab either curlwp (or if we're on the interrupt stack | | 9168 | * Now we grab either curlwp (or if we're on the interrupt stack |
9168 | * lwp0). We stash its existing fpstate in a local register and | | 9169 | * lwp0). We stash its existing fpstate in a local register and |
9169 | * put our new fpstate in curlwp->p_md.md_fpstate. We point | | 9170 | * put our new fpstate in curlwp->p_md.md_fpstate. We point |
9170 | * fplwp at curlwp (or lwp0) and enable the FPU. | | 9171 | * fplwp at curlwp (or lwp0) and enable the FPU. |
9171 | * | | 9172 | * |
9172 | * If we are ever preempted, our FPU state will be saved in our | | 9173 | * If we are ever preempted, our FPU state will be saved in our |
9173 | * fpstate. Then, when we're resumed and we take an FPDISABLED | | 9174 | * fpstate. Then, when we're resumed and we take an FPDISABLED |
9174 | * trap, the trap handler will be able to fish our FPU state out | | 9175 | * trap, the trap handler will be able to fish our FPU state out |
9175 | * of curlwp (or lwp0). | | 9176 | * of curlwp (or lwp0). |
9176 | * | | 9177 | * |
9177 | * On exiting this routine we undo the damage: restore the original | | 9178 | * On exiting this routine we undo the damage: restore the original |
9178 | * pointer to curlwp->p_md.md_fpstate, clear our fplwp, and disable | | 9179 | * pointer to curlwp->p_md.md_fpstate, clear our fplwp, and disable |
9179 | * the MMU. | | 9180 | * the MMU. |
9180 | * | | 9181 | * |
9181 | */ | | 9182 | */ |
9182 | | | 9183 | |
9183 | ENABLE_FPU(0) | | 9184 | ENABLE_FPU(0) |
9184 | | | 9185 | |
9185 | !! We are now 8-byte aligned. We need to become 64-byte aligned. | | 9186 | !! We are now 8-byte aligned. We need to become 64-byte aligned. |
9186 | btst 63, %i0 | | 9187 | btst 63, %i0 |
9187 | bz,pt CCCR, 2f | | 9188 | bz,pt CCCR, 2f |
9188 | nop | | 9189 | nop |
9189 | 1: | | 9190 | 1: |
9190 | stx %i1, [%i0] | | 9191 | stx %i1, [%i0] |
9191 | inc 8, %i0 | | 9192 | inc 8, %i0 |
9192 | btst 63, %i0 | | 9193 | btst 63, %i0 |
9193 | bnz,pt %xcc, 1b | | 9194 | bnz,pt %xcc, 1b |
9194 | dec 8, %i2 | | 9195 | dec 8, %i2 |
9195 | | | 9196 | |
9196 | 2: | | 9197 | 2: |
9197 | brz %i1, 3f ! Skip the memory op | | 9198 | brz %i1, 3f ! Skip the memory op |
9198 | fzero %f0 ! if pattern is 0 | | 9199 | fzero %f0 ! if pattern is 0 |
9199 | | | 9200 | |
9200 | #ifdef _LP64 | | 9201 | #ifdef _LP64 |
9201 | stx %i1, [%i0] ! Flush this puppy to RAM | | 9202 | stx %i1, [%i0] ! Flush this puppy to RAM |
9202 | membar #StoreLoad | | 9203 | membar #StoreLoad |
9203 | ldd [%i0], %f0 | | 9204 | ldd [%i0], %f0 |
9204 | #else | | 9205 | #else |
9205 | stw %i1, [%i0] ! Flush this puppy to RAM | | 9206 | stw %i1, [%i0] ! Flush this puppy to RAM |
9206 | membar #StoreLoad | | 9207 | membar #StoreLoad |
9207 | ld [%i0], %f0 | | 9208 | ld [%i0], %f0 |
9208 | fmovsa %icc, %f0, %f1 | | 9209 | fmovsa %icc, %f0, %f1 |
9209 | #endif | | 9210 | #endif |
9210 | | | 9211 | |
9211 | 3: | | 9212 | 3: |
9212 | fmovd %f0, %f2 ! Duplicate the pattern | | 9213 | fmovd %f0, %f2 ! Duplicate the pattern |
9213 | fmovd %f0, %f4 | | 9214 | fmovd %f0, %f4 |
9214 | fmovd %f0, %f6 | | 9215 | fmovd %f0, %f6 |
9215 | fmovd %f0, %f8 | | 9216 | fmovd %f0, %f8 |
9216 | fmovd %f0, %f10 | | 9217 | fmovd %f0, %f10 |
9217 | fmovd %f0, %f12 | | 9218 | fmovd %f0, %f12 |
9218 | fmovd %f0, %f14 | | 9219 | fmovd %f0, %f14 |
9219 | | | 9220 | |
9220 | !! Remember: we were 8 bytes too far | | 9221 | !! Remember: we were 8 bytes too far |
9221 | dec 56, %i2 ! Go one iteration too far | | 9222 | dec 56, %i2 ! Go one iteration too far |
9222 | 5: | | 9223 | 5: |
9223 | stda %f0, [%i0] ASI_STORE ! Store 64 bytes | | 9224 | stda %f0, [%i0] ASI_STORE ! Store 64 bytes |
9224 | deccc BLOCK_SIZE, %i2 | | 9225 | deccc BLOCK_SIZE, %i2 |
9225 | bg,pt %icc, 5b | | 9226 | bg,pt %icc, 5b |
9226 | inc BLOCK_SIZE, %i0 | | 9227 | inc BLOCK_SIZE, %i0 |
9227 | | | 9228 | |
9228 | membar #Sync | | 9229 | membar #Sync |
9229 | /* | | 9230 | /* |
9230 | * We've saved our possible fpstate, now disable the fpu | | 9231 | * We've saved our possible fpstate, now disable the fpu |
9231 | * and continue with life. | | 9232 | * and continue with life. |
9232 | */ | | 9233 | */ |
9233 | RESTORE_FPU | | 9234 | RESTORE_FPU |
9234 | addcc %i2, 56, %i2 ! Restore the count | | 9235 | addcc %i2, 56, %i2 ! Restore the count |
9235 | ba,pt %xcc, Lmemset_longs ! Finish up the remainder | | 9236 | ba,pt %xcc, Lmemset_longs ! Finish up the remainder |
9236 | restore | | 9237 | restore |
9237 | #endif /* USE_BLOCK_STORE_LOAD */ | | 9238 | #endif /* USE_BLOCK_STORE_LOAD */ |
9238 | #endif | | 9239 | #endif |
9239 | | | 9240 | |
9240 | /* | | 9241 | /* |
9241 | * kcopy() is exactly like bcopy except that it set pcb_onfault such that | | 9242 | * kcopy() is exactly like bcopy except that it set pcb_onfault such that |
9242 | * when a fault occurs, it is able to return -1 to indicate this to the | | 9243 | * when a fault occurs, it is able to return -1 to indicate this to the |
9243 | * caller. | | 9244 | * caller. |
9244 | */ | | 9245 | */ |
9245 | ENTRY(kcopy) | | 9246 | ENTRY(kcopy) |
9246 | #ifdef DEBUG | | 9247 | #ifdef DEBUG |
9247 | set pmapdebug, %o4 | | 9248 | set pmapdebug, %o4 |
9248 | ld [%o4], %o4 | | 9249 | ld [%o4], %o4 |
9249 | btst 0x80, %o4 ! PDB_COPY | | 9250 | btst 0x80, %o4 ! PDB_COPY |
9250 | bz,pt %icc, 3f | | 9251 | bz,pt %icc, 3f |
9251 | nop | | 9252 | nop |
9252 | save %sp, -CC64FSZ, %sp | | 9253 | save %sp, -CC64FSZ, %sp |
9253 | mov %i0, %o1 | | 9254 | mov %i0, %o1 |
9254 | set 2f, %o0 | | 9255 | set 2f, %o0 |
9255 | mov %i1, %o2 | | 9256 | mov %i1, %o2 |
9256 | call printf | | 9257 | call printf |
9257 | mov %i2, %o3 | | 9258 | mov %i2, %o3 |
9258 | ! ta 1; nop | | 9259 | ! ta 1; nop |
9259 | restore | | 9260 | restore |
9260 | .data | | 9261 | .data |
9261 | 2: .asciz "kcopy(%p->%p,%x)\n" | | 9262 | 2: .asciz "kcopy(%p->%p,%x)\n" |
9262 | _ALIGN | | 9263 | _ALIGN |
9263 | .text | | 9264 | .text |
9264 | 3: | | 9265 | 3: |
9265 | #endif | | 9266 | #endif |
9266 | sethi %hi(CPCB), %o5 ! cpcb->pcb_onfault = Lkcerr; | | 9267 | sethi %hi(CPCB), %o5 ! cpcb->pcb_onfault = Lkcerr; |
9267 | LDPTR [%o5 + %lo(CPCB)], %o5 | | 9268 | LDPTR [%o5 + %lo(CPCB)], %o5 |
9268 | set Lkcerr, %o3 | | 9269 | set Lkcerr, %o3 |
9269 | LDPTR [%o5 + PCB_ONFAULT], %g1! save current onfault handler | | 9270 | LDPTR [%o5 + PCB_ONFAULT], %g1! save current onfault handler |
9270 | membar #LoadStore | | 9271 | membar #LoadStore |
9271 | STPTR %o3, [%o5 + PCB_ONFAULT] | | 9272 | STPTR %o3, [%o5 + PCB_ONFAULT] |
9272 | membar #StoreStore|#StoreLoad | | 9273 | membar #StoreStore|#StoreLoad |
9273 | | | 9274 | |
9274 | cmp %o2, BCOPY_SMALL | | 9275 | cmp %o2, BCOPY_SMALL |
9275 | Lkcopy_start: | | 9276 | Lkcopy_start: |
9276 | bge,a Lkcopy_fancy ! if >= this many, go be fancy. | | 9277 | bge,a Lkcopy_fancy ! if >= this many, go be fancy. |
9277 | btst 7, %o0 ! (part of being fancy) | | 9278 | btst 7, %o0 ! (part of being fancy) |
9278 | | | 9279 | |
9279 | /* | | 9280 | /* |
9280 | * Not much to copy, just do it a byte at a time. | | 9281 | * Not much to copy, just do it a byte at a time. |
9281 | */ | | 9282 | */ |
9282 | deccc %o2 ! while (--len >= 0) | | 9283 | deccc %o2 ! while (--len >= 0) |
9283 | bl 1f | | 9284 | bl 1f |
9284 | EMPTY | | 9285 | EMPTY |
9285 | 0: | | 9286 | 0: |
9286 | ldsb [%o0], %o4 ! *dst++ = *src++; | | 9287 | ldsb [%o0], %o4 ! *dst++ = *src++; |
9287 | inc %o0 | | 9288 | inc %o0 |
9288 | stb %o4, [%o1] | | 9289 | stb %o4, [%o1] |
9289 | deccc %o2 | | 9290 | deccc %o2 |
9290 | bge 0b | | 9291 | bge 0b |
9291 | inc %o1 | | 9292 | inc %o1 |
9292 | 1: | | 9293 | 1: |
9293 | membar #Sync ! Make sure all fauls are processed | | 9294 | membar #Sync ! Make sure all fauls are processed |
9294 | STPTR %g1, [%o5 + PCB_ONFAULT]! restore fault handler | | 9295 | STPTR %g1, [%o5 + PCB_ONFAULT]! restore fault handler |
9295 | membar #StoreStore|#StoreLoad | | 9296 | membar #StoreStore|#StoreLoad |
9296 | retl | | 9297 | retl |
9297 | clr %o0 | | 9298 | clr %o0 |
9298 | NOTREACHED | | 9299 | NOTREACHED |
9299 | | | 9300 | |
9300 | /* | | 9301 | /* |
9301 | * Plenty of data to copy, so try to do it optimally. | | 9302 | * Plenty of data to copy, so try to do it optimally. |
9302 | */ | | 9303 | */ |
9303 | Lkcopy_fancy: | | 9304 | Lkcopy_fancy: |
9304 | ! check for common case first: everything lines up. | | 9305 | ! check for common case first: everything lines up. |
9305 | ! btst 7, %o0 ! done already | | 9306 | ! btst 7, %o0 ! done already |
9306 | bne 1f | | 9307 | bne 1f |
9307 | EMPTY | | 9308 | EMPTY |
9308 | btst 7, %o1 | | 9309 | btst 7, %o1 |
9309 | be,a Lkcopy_doubles | | 9310 | be,a Lkcopy_doubles |
9310 | dec 8, %o2 ! if all lined up, len -= 8, goto kcopy_doubes | | 9311 | dec 8, %o2 ! if all lined up, len -= 8, goto kcopy_doubes |
9311 | | | 9312 | |
9312 | ! If the low bits match, we can make these line up. | | 9313 | ! If the low bits match, we can make these line up. |
9313 | 1: | | 9314 | 1: |
9314 | xor %o0, %o1, %o3 ! t = src ^ dst; | | 9315 | xor %o0, %o1, %o3 ! t = src ^ dst; |
9315 | btst 1, %o3 ! if (t & 1) { | | 9316 | btst 1, %o3 ! if (t & 1) { |
9316 | be,a 1f | | 9317 | be,a 1f |
9317 | btst 1, %o0 ! [delay slot: if (src & 1)] | | 9318 | btst 1, %o0 ! [delay slot: if (src & 1)] |
9318 | | | 9319 | |
9319 | ! low bits do not match, must copy by bytes. | | 9320 | ! low bits do not match, must copy by bytes. |
9320 | 0: | | 9321 | 0: |
9321 | ldsb [%o0], %o4 ! do { | | 9322 | ldsb [%o0], %o4 ! do { |
9322 | inc %o0 ! *dst++ = *src++; | | 9323 | inc %o0 ! *dst++ = *src++; |
9323 | stb %o4, [%o1] | | 9324 | stb %o4, [%o1] |
9324 | deccc %o2 | | 9325 | deccc %o2 |
9325 | bnz 0b ! } while (--len != 0); | | 9326 | bnz 0b ! } while (--len != 0); |
9326 | inc %o1 | | 9327 | inc %o1 |
9327 | membar #Sync ! Make sure all traps are taken | | 9328 | membar #Sync ! Make sure all traps are taken |
9328 | STPTR %g1, [%o5 + PCB_ONFAULT]! restore fault handler | | 9329 | STPTR %g1, [%o5 + PCB_ONFAULT]! restore fault handler |
9329 | membar #StoreStore|#StoreLoad | | 9330 | membar #StoreStore|#StoreLoad |
9330 | retl | | 9331 | retl |
9331 | clr %o0 | | 9332 | clr %o0 |
9332 | NOTREACHED | | 9333 | NOTREACHED |
9333 | | | 9334 | |
9334 | ! lowest bit matches, so we can copy by words, if nothing else | | 9335 | ! lowest bit matches, so we can copy by words, if nothing else |
9335 | 1: | | 9336 | 1: |
9336 | be,a 1f ! if (src & 1) { | | 9337 | be,a 1f ! if (src & 1) { |
9337 | btst 2, %o3 ! [delay slot: if (t & 2)] | | 9338 | btst 2, %o3 ! [delay slot: if (t & 2)] |
9338 | | | 9339 | |
9339 | ! although low bits match, both are 1: must copy 1 byte to align | | 9340 | ! although low bits match, both are 1: must copy 1 byte to align |
9340 | ldsb [%o0], %o4 ! *dst++ = *src++; | | 9341 | ldsb [%o0], %o4 ! *dst++ = *src++; |
9341 | inc %o0 | | 9342 | inc %o0 |
9342 | stb %o4, [%o1] | | 9343 | stb %o4, [%o1] |
9343 | dec %o2 ! len--; | | 9344 | dec %o2 ! len--; |
9344 | inc %o1 | | 9345 | inc %o1 |
9345 | btst 2, %o3 ! } [if (t & 2)] | | 9346 | btst 2, %o3 ! } [if (t & 2)] |
9346 | 1: | | 9347 | 1: |
9347 | be,a 1f ! if (t & 2) { | | 9348 | be,a 1f ! if (t & 2) { |
9348 | btst 2, %o0 ! [delay slot: if (src & 2)] | | 9349 | btst 2, %o0 ! [delay slot: if (src & 2)] |
9349 | dec 2, %o2 ! len -= 2; | | 9350 | dec 2, %o2 ! len -= 2; |
9350 | 0: | | 9351 | 0: |
9351 | ldsh [%o0], %o4 ! do { | | 9352 | ldsh [%o0], %o4 ! do { |
9352 | inc 2, %o0 ! dst += 2, src += 2; | | 9353 | inc 2, %o0 ! dst += 2, src += 2; |
9353 | sth %o4, [%o1] ! *(short *)dst = *(short *)src; | | 9354 | sth %o4, [%o1] ! *(short *)dst = *(short *)src; |
9354 | deccc 2, %o2 ! } while ((len -= 2) >= 0); | | 9355 | deccc 2, %o2 ! } while ((len -= 2) >= 0); |
9355 | bge 0b | | 9356 | bge 0b |
9356 | inc 2, %o1 | | 9357 | inc 2, %o1 |
9357 | b Lkcopy_mopb ! goto mop_up_byte; | | 9358 | b Lkcopy_mopb ! goto mop_up_byte; |
9358 | btst 1, %o2 ! } [delay slot: if (len & 1)] | | 9359 | btst 1, %o2 ! } [delay slot: if (len & 1)] |
9359 | NOTREACHED | | 9360 | NOTREACHED |
9360 | | | 9361 | |
9361 | ! low two bits match, so we can copy by longwords | | 9362 | ! low two bits match, so we can copy by longwords |
9362 | 1: | | 9363 | 1: |
9363 | be,a 1f ! if (src & 2) { | | 9364 | be,a 1f ! if (src & 2) { |
9364 | btst 4, %o3 ! [delay slot: if (t & 4)] | | 9365 | btst 4, %o3 ! [delay slot: if (t & 4)] |
9365 | | | 9366 | |
9366 | ! although low 2 bits match, they are 10: must copy one short to align | | 9367 | ! although low 2 bits match, they are 10: must copy one short to align |
9367 | ldsh [%o0], %o4 ! (*short *)dst = *(short *)src; | | 9368 | ldsh [%o0], %o4 ! (*short *)dst = *(short *)src; |
9368 | inc 2, %o0 ! dst += 2; | | 9369 | inc 2, %o0 ! dst += 2; |
9369 | sth %o4, [%o1] | | 9370 | sth %o4, [%o1] |
9370 | dec 2, %o2 ! len -= 2; | | 9371 | dec 2, %o2 ! len -= 2; |
9371 | inc 2, %o1 ! src += 2; | | 9372 | inc 2, %o1 ! src += 2; |
9372 | btst 4, %o3 ! } [if (t & 4)] | | 9373 | btst 4, %o3 ! } [if (t & 4)] |
9373 | 1: | | 9374 | 1: |
9374 | be,a 1f ! if (t & 4) { | | 9375 | be,a 1f ! if (t & 4) { |
9375 | btst 4, %o0 ! [delay slot: if (src & 4)] | | 9376 | btst 4, %o0 ! [delay slot: if (src & 4)] |
9376 | dec 4, %o2 ! len -= 4; | | 9377 | dec 4, %o2 ! len -= 4; |
9377 | 0: | | 9378 | 0: |
9378 | ld [%o0], %o4 ! do { | | 9379 | ld [%o0], %o4 ! do { |
9379 | inc 4, %o0 ! dst += 4, src += 4; | | 9380 | inc 4, %o0 ! dst += 4, src += 4; |
9380 | st %o4, [%o1] ! *(int *)dst = *(int *)src; | | 9381 | st %o4, [%o1] ! *(int *)dst = *(int *)src; |
9381 | deccc 4, %o2 ! } while ((len -= 4) >= 0); | | 9382 | deccc 4, %o2 ! } while ((len -= 4) >= 0); |
9382 | bge 0b | | 9383 | bge 0b |
9383 | inc 4, %o1 | | 9384 | inc 4, %o1 |
9384 | b Lkcopy_mopw ! goto mop_up_word_and_byte; | | 9385 | b Lkcopy_mopw ! goto mop_up_word_and_byte; |
9385 | btst 2, %o2 ! } [delay slot: if (len & 2)] | | 9386 | btst 2, %o2 ! } [delay slot: if (len & 2)] |
9386 | NOTREACHED | | 9387 | NOTREACHED |
9387 | | | 9388 | |
9388 | ! low three bits match, so we can copy by doublewords | | 9389 | ! low three bits match, so we can copy by doublewords |
9389 | 1: | | 9390 | 1: |
9390 | be 1f ! if (src & 4) { | | 9391 | be 1f ! if (src & 4) { |
9391 | dec 8, %o2 ! [delay slot: len -= 8] | | 9392 | dec 8, %o2 ! [delay slot: len -= 8] |
9392 | ld [%o0], %o4 ! *(int *)dst = *(int *)src; | | 9393 | ld [%o0], %o4 ! *(int *)dst = *(int *)src; |
9393 | inc 4, %o0 ! dst += 4, src += 4, len -= 4; | | 9394 | inc 4, %o0 ! dst += 4, src += 4, len -= 4; |
9394 | st %o4, [%o1] | | 9395 | st %o4, [%o1] |
9395 | dec 4, %o2 ! } | | 9396 | dec 4, %o2 ! } |
9396 | inc 4, %o1 | | 9397 | inc 4, %o1 |
9397 | 1: | | 9398 | 1: |
9398 | Lkcopy_doubles: | | 9399 | Lkcopy_doubles: |
9399 | ldx [%o0], %g5 ! do { | | 9400 | ldx [%o0], %g5 ! do { |
9400 | inc 8, %o0 ! dst += 8, src += 8; | | 9401 | inc 8, %o0 ! dst += 8, src += 8; |
9401 | stx %g5, [%o1] ! *(double *)dst = *(double *)src; | | 9402 | stx %g5, [%o1] ! *(double *)dst = *(double *)src; |
9402 | deccc 8, %o2 ! } while ((len -= 8) >= 0); | | 9403 | deccc 8, %o2 ! } while ((len -= 8) >= 0); |
9403 | bge Lkcopy_doubles | | 9404 | bge Lkcopy_doubles |
9404 | inc 8, %o1 | | 9405 | inc 8, %o1 |
9405 | | | 9406 | |
9406 | ! check for a usual case again (save work) | | 9407 | ! check for a usual case again (save work) |
9407 | btst 7, %o2 ! if ((len & 7) == 0) | | 9408 | btst 7, %o2 ! if ((len & 7) == 0) |
9408 | be Lkcopy_done ! goto kcopy_done; | | 9409 | be Lkcopy_done ! goto kcopy_done; |
9409 | | | 9410 | |
9410 | btst 4, %o2 ! if ((len & 4)) == 0) | | 9411 | btst 4, %o2 ! if ((len & 4)) == 0) |
9411 | be,a Lkcopy_mopw ! goto mop_up_word_and_byte; | | 9412 | be,a Lkcopy_mopw ! goto mop_up_word_and_byte; |
9412 | btst 2, %o2 ! [delay slot: if (len & 2)] | | 9413 | btst 2, %o2 ! [delay slot: if (len & 2)] |
9413 | ld [%o0], %o4 ! *(int *)dst = *(int *)src; | | 9414 | ld [%o0], %o4 ! *(int *)dst = *(int *)src; |
9414 | inc 4, %o0 ! dst += 4; | | 9415 | inc 4, %o0 ! dst += 4; |
9415 | st %o4, [%o1] | | 9416 | st %o4, [%o1] |
9416 | inc 4, %o1 ! src += 4; | | 9417 | inc 4, %o1 ! src += 4; |
9417 | btst 2, %o2 ! } [if (len & 2)] | | 9418 | btst 2, %o2 ! } [if (len & 2)] |
9418 | | | 9419 | |
9419 | 1: | | 9420 | 1: |
9420 | ! mop up trailing word (if present) and byte (if present). | | 9421 | ! mop up trailing word (if present) and byte (if present). |
9421 | Lkcopy_mopw: | | 9422 | Lkcopy_mopw: |
9422 | be Lkcopy_mopb ! no word, go mop up byte | | 9423 | be Lkcopy_mopb ! no word, go mop up byte |
9423 | btst 1, %o2 ! [delay slot: if (len & 1)] | | 9424 | btst 1, %o2 ! [delay slot: if (len & 1)] |
9424 | ldsh [%o0], %o4 ! *(short *)dst = *(short *)src; | | 9425 | ldsh [%o0], %o4 ! *(short *)dst = *(short *)src; |
9425 | be Lkcopy_done ! if ((len & 1) == 0) goto done; | | 9426 | be Lkcopy_done ! if ((len & 1) == 0) goto done; |
9426 | sth %o4, [%o1] | | 9427 | sth %o4, [%o1] |
9427 | ldsb [%o0 + 2], %o4 ! dst[2] = src[2]; | | 9428 | ldsb [%o0 + 2], %o4 ! dst[2] = src[2]; |
9428 | stb %o4, [%o1 + 2] | | 9429 | stb %o4, [%o1 + 2] |
9429 | membar #Sync ! Make sure all traps are taken | | 9430 | membar #Sync ! Make sure all traps are taken |
9430 | STPTR %g1, [%o5 + PCB_ONFAULT]! restore fault handler | | 9431 | STPTR %g1, [%o5 + PCB_ONFAULT]! restore fault handler |
9431 | membar #StoreStore|#StoreLoad | | 9432 | membar #StoreStore|#StoreLoad |
9432 | retl | | 9433 | retl |
9433 | clr %o0 | | 9434 | clr %o0 |
9434 | NOTREACHED | | 9435 | NOTREACHED |
9435 | | | 9436 | |
9436 | ! mop up trailing byte (if present). | | 9437 | ! mop up trailing byte (if present). |
9437 | Lkcopy_mopb: | | 9438 | Lkcopy_mopb: |
9438 | bne,a 1f | | 9439 | bne,a 1f |
9439 | ldsb [%o0], %o4 | | 9440 | ldsb [%o0], %o4 |
9440 | | | 9441 | |
9441 | Lkcopy_done: | | 9442 | Lkcopy_done: |
9442 | membar #Sync ! Make sure all traps are taken | | 9443 | membar #Sync ! Make sure all traps are taken |
9443 | STPTR %g1, [%o5 + PCB_ONFAULT]! restore fault handler | | 9444 | STPTR %g1, [%o5 + PCB_ONFAULT]! restore fault handler |
9444 | membar #StoreStore|#StoreLoad | | 9445 | membar #StoreStore|#StoreLoad |
9445 | retl | | 9446 | retl |
9446 | clr %o0 | | 9447 | clr %o0 |
9447 | NOTREACHED | | 9448 | NOTREACHED |
9448 | | | 9449 | |
9449 | 1: | | 9450 | 1: |
9450 | stb %o4, [%o1] | | 9451 | stb %o4, [%o1] |
9451 | membar #Sync ! Make sure all traps are taken | | 9452 | membar #Sync ! Make sure all traps are taken |
9452 | STPTR %g1, [%o5 + PCB_ONFAULT]! restore fault handler | | 9453 | STPTR %g1, [%o5 + PCB_ONFAULT]! restore fault handler |
9453 | membar #StoreStore|#StoreLoad | | 9454 | membar #StoreStore|#StoreLoad |
9454 | retl | | 9455 | retl |
9455 | clr %o0 | | 9456 | clr %o0 |
9456 | NOTREACHED | | 9457 | NOTREACHED |
9457 | | | 9458 | |
9458 | Lkcerr: | | 9459 | Lkcerr: |
9459 | #ifdef DEBUG | | 9460 | #ifdef DEBUG |
9460 | set pmapdebug, %o4 | | 9461 | set pmapdebug, %o4 |
9461 | ld [%o4], %o4 | | 9462 | ld [%o4], %o4 |
9462 | btst 0x80, %o4 ! PDB_COPY | | 9463 | btst 0x80, %o4 ! PDB_COPY |
9463 | bz,pt %icc, 3f | | 9464 | bz,pt %icc, 3f |
9464 | nop | | 9465 | nop |
9465 | save %sp, -CC64FSZ, %sp | | 9466 | save %sp, -CC64FSZ, %sp |
9466 | set 2f, %o0 | | 9467 | set 2f, %o0 |
9467 | call printf | | 9468 | call printf |
9468 | nop | | 9469 | nop |
9469 | ! ta 1; nop | | 9470 | ! ta 1; nop |
9470 | restore | | 9471 | restore |
9471 | .data | | 9472 | .data |
9472 | 2: .asciz "kcopy error\n" | | 9473 | 2: .asciz "kcopy error\n" |
9473 | _ALIGN | | 9474 | _ALIGN |
9474 | .text | | 9475 | .text |
9475 | 3: | | 9476 | 3: |
9476 | #endif | | 9477 | #endif |
9477 | STPTR %g1, [%o5 + PCB_ONFAULT]! restore fault handler | | 9478 | STPTR %g1, [%o5 + PCB_ONFAULT]! restore fault handler |
9478 | membar #StoreStore|#StoreLoad | | 9479 | membar #StoreStore|#StoreLoad |
9479 | retl ! and return error indicator | | 9480 | retl ! and return error indicator |
9480 | mov EFAULT, %o0 | | 9481 | mov EFAULT, %o0 |
9481 | NOTREACHED | | 9482 | NOTREACHED |
9482 | | | 9483 | |
9483 | #ifdef MULTIPROCESSOR | | 9484 | #ifdef MULTIPROCESSOR |
9484 | /* | | 9485 | /* |
9485 | * IPI handler to store the current FPU state. | | 9486 | * IPI handler to store the current FPU state. |
9486 | * void sparc64_ipi_save_fpstate(void *); | | 9487 | * void sparc64_ipi_save_fpstate(void *); |
9487 | * | | 9488 | * |
9488 | * On entry: | | 9489 | * On entry: |
9489 | * %g2 = lwp | | 9490 | * %g2 = lwp |
9490 | */ | | 9491 | */ |
9491 | ENTRY(sparc64_ipi_save_fpstate) | | 9492 | ENTRY(sparc64_ipi_save_fpstate) |
9492 | sethi %hi(FPLWP), %g1 | | 9493 | sethi %hi(FPLWP), %g1 |
9493 | LDPTR [%g1 + %lo(FPLWP)], %g3 | | 9494 | LDPTR [%g1 + %lo(FPLWP)], %g3 |
9494 | cmp %g3, %g2 | | 9495 | cmp %g3, %g2 |
9495 | bne,pn CCCR, 7f ! skip if fplwp has changed | | 9496 | bne,pn CCCR, 7f ! skip if fplwp has changed |
9496 | | | 9497 | |
9497 | rdpr %pstate, %g2 ! enable FP before we begin | | 9498 | rdpr %pstate, %g2 ! enable FP before we begin |
9498 | rd %fprs, %g5 | | 9499 | rd %fprs, %g5 |
9499 | wr %g0, FPRS_FEF, %fprs | | 9500 | wr %g0, FPRS_FEF, %fprs |
9500 | or %g2, PSTATE_PEF, %g2 | | 9501 | or %g2, PSTATE_PEF, %g2 |
9501 | wrpr %g2, 0, %pstate | | 9502 | wrpr %g2, 0, %pstate |
9502 | | | 9503 | |
9503 | LDPTR [%g3 + L_FPSTATE], %g3 | | 9504 | LDPTR [%g3 + L_FPSTATE], %g3 |
9504 | stx %fsr, [%g3 + FS_FSR] ! f->fs_fsr = getfsr(); | | 9505 | stx %fsr, [%g3 + FS_FSR] ! f->fs_fsr = getfsr(); |
9505 | rd %gsr, %g2 ! Save %gsr | | 9506 | rd %gsr, %g2 ! Save %gsr |
9506 | st %g2, [%g3 + FS_GSR] | | 9507 | st %g2, [%g3 + FS_GSR] |
9507 | #if FS_REGS > 0 | | 9508 | #if FS_REGS > 0 |
9508 | add %g3, FS_REGS, %g3 | | 9509 | add %g3, FS_REGS, %g3 |
9509 | #endif | | 9510 | #endif |
9510 | #ifdef DIAGNOSTIC | | 9511 | #ifdef DIAGNOSTIC |
9511 | btst BLOCK_ALIGN, %g3 ! Needs to be re-executed | | 9512 | btst BLOCK_ALIGN, %g3 ! Needs to be re-executed |
9512 | bnz,pn %icc, 6f ! Check alignment | | 9513 | bnz,pn %icc, 6f ! Check alignment |
9513 | #endif | | 9514 | #endif |
9514 | st %g0, [%g3 + FS_QSIZE - FS_REGS] ! f->fs_qsize = 0; | | 9515 | st %g0, [%g3 + FS_QSIZE - FS_REGS] ! f->fs_qsize = 0; |
9515 | btst FPRS_DL|FPRS_DU, %g5 ! Both FPU halves clean? | | 9516 | btst FPRS_DL|FPRS_DU, %g5 ! Both FPU halves clean? |
9516 | bz,pt %icc, 5f ! Then skip it | | 9517 | bz,pt %icc, 5f ! Then skip it |
9517 | | | 9518 | |
9518 | mov CTX_PRIMARY, %g2 | | 9519 | mov CTX_PRIMARY, %g2 |
9519 | ldxa [%g2] ASI_DMMU, %g6 | | 9520 | ldxa [%g2] ASI_DMMU, %g6 |
9520 | membar #LoadStore | | 9521 | membar #LoadStore |
9521 | stxa %g0, [%g2] ASI_DMMU ! Switch MMU to kernel primary context | | 9522 | stxa %g0, [%g2] ASI_DMMU ! Switch MMU to kernel primary context |
9522 | membar #Sync | | 9523 | membar #Sync |
9523 | | | 9524 | |
9524 | btst FPRS_DL, %g5 ! Lower FPU clean? | | 9525 | btst FPRS_DL, %g5 ! Lower FPU clean? |
9525 | bz,a,pt %icc, 1f ! Then skip it, but upper FPU not clean | | 9526 | bz,a,pt %icc, 1f ! Then skip it, but upper FPU not clean |
9526 | add %g3, 2*BLOCK_SIZE, %g3 ! Skip a block | | 9527 | add %g3, 2*BLOCK_SIZE, %g3 ! Skip a block |
9527 | | | 9528 | |
9528 | stda %f0, [%g3] ASI_BLK_P ! f->fs_f0 = etc; | | 9529 | stda %f0, [%g3] ASI_BLK_P ! f->fs_f0 = etc; |
9529 | inc BLOCK_SIZE, %g3 | | 9530 | inc BLOCK_SIZE, %g3 |
9530 | stda %f16, [%g3] ASI_BLK_P | | 9531 | stda %f16, [%g3] ASI_BLK_P |
9531 | | | 9532 | |
9532 | btst FPRS_DU, %g5 ! Upper FPU clean? | | 9533 | btst FPRS_DU, %g5 ! Upper FPU clean? |
9533 | bz,pt %icc, 2f ! Then skip it | | 9534 | bz,pt %icc, 2f ! Then skip it |
9534 | inc BLOCK_SIZE, %g3 | | 9535 | inc BLOCK_SIZE, %g3 |
9535 | 1: | | 9536 | 1: |
9536 | stda %f32, [%g3] ASI_BLK_P | | 9537 | stda %f32, [%g3] ASI_BLK_P |
9537 | inc BLOCK_SIZE, %g3 | | 9538 | inc BLOCK_SIZE, %g3 |
9538 | stda %f48, [%g3] ASI_BLK_P | | 9539 | stda %f48, [%g3] ASI_BLK_P |
9539 | 2: | | 9540 | 2: |
9540 | membar #Sync ! Finish operation so we can | | 9541 | membar #Sync ! Finish operation so we can |
9541 | brz,pn %g6, 5f ! Skip if context 0 | | 9542 | brz,pn %g6, 5f ! Skip if context 0 |
9542 | nop | | 9543 | nop |
9543 | stxa %g6, [%g2] ASI_DMMU ! Restore primary context | | 9544 | stxa %g6, [%g2] ASI_DMMU ! Restore primary context |
9544 | membar #Sync | | 9545 | membar #Sync |
9545 | 5: | | 9546 | 5: |
9546 | wr %g0, FPRS_FEF, %fprs ! Mark FPU clean | | 9547 | wr %g0, FPRS_FEF, %fprs ! Mark FPU clean |
9547 | STPTR %g0, [%g1 + %lo(FPLWP)] ! fplwp = NULL | | 9548 | STPTR %g0, [%g1 + %lo(FPLWP)] ! fplwp = NULL |
9548 | 7: | | 9549 | 7: |
9549 | IPIEVC_INC(IPI_EVCNT_FPU_SYNCH,%g2,%g3) | | 9550 | IPIEVC_INC(IPI_EVCNT_FPU_SYNCH,%g2,%g3) |
9550 | ba,a ret_from_intr_vector | | 9551 | ba,a ret_from_intr_vector |
9551 | nop | | 9552 | nop |
9552 | | | 9553 | |
9553 | #ifdef DIAGNOSTIC | | 9554 | #ifdef DIAGNOSTIC |
9554 | !! | | 9555 | !! |
9555 | !! Damn thing is *NOT* aligned on a 64-byte boundary | | 9556 | !! Damn thing is *NOT* aligned on a 64-byte boundary |
9556 | !! | | 9557 | !! |
9557 | 6: | | 9558 | 6: |
9558 | wr %g0, FPRS_FEF, %fprs | | 9559 | wr %g0, FPRS_FEF, %fprs |
9559 | ! XXX -- we should panic instead of silently entering debugger | | 9560 | ! XXX -- we should panic instead of silently entering debugger |
9560 | ta 1 | | 9561 | ta 1 |
9561 | nop | | 9562 | nop |
9562 | ba,a ret_from_intr_vector | | 9563 | ba,a ret_from_intr_vector |
9563 | nop | | 9564 | nop |
9564 | #endif | | 9565 | #endif |
9565 | | | 9566 | |
9566 | /* | | 9567 | /* |
9567 | * IPI handler to drop the current FPU state. | | 9568 | * IPI handler to drop the current FPU state. |
9568 | * void sparc64_ipi_drop_fpstate(void *); | | 9569 | * void sparc64_ipi_drop_fpstate(void *); |
9569 | * | | 9570 | * |
9570 | * On entry: | | 9571 | * On entry: |
9571 | * %g2 = lwp | | 9572 | * %g2 = lwp |
9572 | */ | | 9573 | */ |
9573 | ENTRY(sparc64_ipi_drop_fpstate) | | 9574 | ENTRY(sparc64_ipi_drop_fpstate) |
9574 | rdpr %pstate, %g1 | | 9575 | rdpr %pstate, %g1 |
9575 | wr %g0, FPRS_FEF, %fprs | | 9576 | wr %g0, FPRS_FEF, %fprs |
9576 | or %g1, PSTATE_PEF, %g1 | | 9577 | or %g1, PSTATE_PEF, %g1 |
9577 | wrpr %g1, 0, %pstate | | 9578 | wrpr %g1, 0, %pstate |
9578 | set FPLWP, %g1 | | 9579 | set FPLWP, %g1 |
9579 | CASPTR [%g1] ASI_N, %g2, %g0 ! fplwp = NULL if fplwp == %g2 | | 9580 | CASPTR [%g1] ASI_N, %g2, %g0 ! fplwp = NULL if fplwp == %g2 |
9580 | IPIEVC_INC(IPI_EVCNT_FPU_FLUSH,%g2,%g3) | | 9581 | IPIEVC_INC(IPI_EVCNT_FPU_FLUSH,%g2,%g3) |
9581 | ba,a ret_from_intr_vector | | 9582 | ba,a ret_from_intr_vector |
9582 | nop | | 9583 | nop |
9583 | #endif | | 9584 | #endif |
9584 | | | 9585 | |
9585 | /* | | 9586 | /* |
9586 | * clearfpstate() | | 9587 | * clearfpstate() |
9587 | * | | 9588 | * |
9588 | * Drops the current fpu state, without saving it. | | 9589 | * Drops the current fpu state, without saving it. |
9589 | */ | | 9590 | */ |
9590 | ENTRY(clearfpstate) | | 9591 | ENTRY(clearfpstate) |
9591 | rdpr %pstate, %o1 ! enable FPU | | 9592 | rdpr %pstate, %o1 ! enable FPU |
9592 | wr %g0, FPRS_FEF, %fprs | | 9593 | wr %g0, FPRS_FEF, %fprs |
9593 | or %o1, PSTATE_PEF, %o1 | | 9594 | or %o1, PSTATE_PEF, %o1 |
9594 | retl | | 9595 | retl |
9595 | wrpr %o1, 0, %pstate | | 9596 | wrpr %o1, 0, %pstate |
9596 | | | 9597 | |
9597 | /* | | 9598 | /* |
9598 | * savefpstate(f) struct fpstate *f; | | 9599 | * savefpstate(f) struct fpstate *f; |
9599 | * | | 9600 | * |
9600 | * Store the current FPU state. | | 9601 | * Store the current FPU state. |
9601 | * | | 9602 | * |
9602 | * Since the kernel may need to use the FPU and we have problems atomically | | 9603 | * Since the kernel may need to use the FPU and we have problems atomically |
9603 | * testing and enabling the FPU, we leave here with the FPRS_FEF bit set. | | 9604 | * testing and enabling the FPU, we leave here with the FPRS_FEF bit set. |
9604 | * Normally this should be turned on in loadfpstate(). | | 9605 | * Normally this should be turned on in loadfpstate(). |
9605 | */ | | 9606 | */ |
9606 | /* XXXXXXXXXX Assume caller created a proper stack frame */ | | 9607 | /* XXXXXXXXXX Assume caller created a proper stack frame */ |
9607 | ENTRY(savefpstate) | | 9608 | ENTRY(savefpstate) |
9608 | ! flushw ! Make sure we don't have stack probs & lose hibits of %o | | 9609 | ! flushw ! Make sure we don't have stack probs & lose hibits of %o |
9609 | rdpr %pstate, %o1 ! enable FP before we begin | | 9610 | rdpr %pstate, %o1 ! enable FP before we begin |
9610 | rd %fprs, %o5 | | 9611 | rd %fprs, %o5 |
9611 | wr %g0, FPRS_FEF, %fprs | | 9612 | wr %g0, FPRS_FEF, %fprs |
9612 | or %o1, PSTATE_PEF, %o1 | | 9613 | or %o1, PSTATE_PEF, %o1 |
9613 | wrpr %o1, 0, %pstate | | 9614 | wrpr %o1, 0, %pstate |
9614 | | | 9615 | |
9615 | stx %fsr, [%o0 + FS_FSR] ! f->fs_fsr = getfsr(); | | 9616 | stx %fsr, [%o0 + FS_FSR] ! f->fs_fsr = getfsr(); |
9616 | rd %gsr, %o4 ! Save %gsr | | 9617 | rd %gsr, %o4 ! Save %gsr |
9617 | st %o4, [%o0 + FS_GSR] | | 9618 | st %o4, [%o0 + FS_GSR] |
9618 | | | 9619 | |
9619 | add %o0, FS_REGS, %o2 | | 9620 | add %o0, FS_REGS, %o2 |
9620 | #ifdef DIAGNOSTIC | | 9621 | #ifdef DIAGNOSTIC |
9621 | btst BLOCK_ALIGN, %o2 ! Needs to be re-executed | | 9622 | btst BLOCK_ALIGN, %o2 ! Needs to be re-executed |
9622 | bnz,pn %icc, 6f ! Check alignment | | 9623 | bnz,pn %icc, 6f ! Check alignment |
9623 | #endif | | 9624 | #endif |
9624 | st %g0, [%o0 + FS_QSIZE] ! f->fs_qsize = 0; | | 9625 | st %g0, [%o0 + FS_QSIZE] ! f->fs_qsize = 0; |
9625 | btst FPRS_DL|FPRS_DU, %o5 ! Both FPU halves clean? | | 9626 | btst FPRS_DL|FPRS_DU, %o5 ! Both FPU halves clean? |
9626 | bz,pt %icc, 5f ! Then skip it | | 9627 | bz,pt %icc, 5f ! Then skip it |
9627 | | | 9628 | |
9628 | btst FPRS_DL, %o5 ! Lower FPU clean? | | 9629 | btst FPRS_DL, %o5 ! Lower FPU clean? |
9629 | membar #Sync | | 9630 | membar #Sync |
9630 | bz,a,pt %icc, 1f ! Then skip it, but upper FPU not clean | | 9631 | bz,a,pt %icc, 1f ! Then skip it, but upper FPU not clean |
9631 | add %o2, 2*BLOCK_SIZE, %o2 ! Skip a block | | 9632 | add %o2, 2*BLOCK_SIZE, %o2 ! Skip a block |
9632 | | | 9633 | |
9633 | stda %f0, [%o2] ASI_BLK_P ! f->fs_f0 = etc; | | 9634 | stda %f0, [%o2] ASI_BLK_P ! f->fs_f0 = etc; |
9634 | inc BLOCK_SIZE, %o2 | | 9635 | inc BLOCK_SIZE, %o2 |
9635 | stda %f16, [%o2] ASI_BLK_P | | 9636 | stda %f16, [%o2] ASI_BLK_P |
9636 | | | 9637 | |
9637 | btst FPRS_DU, %o5 ! Upper FPU clean? | | 9638 | btst FPRS_DU, %o5 ! Upper FPU clean? |
9638 | bz,pt %icc, 2f ! Then skip it | | 9639 | bz,pt %icc, 2f ! Then skip it |
9639 | inc BLOCK_SIZE, %o2 | | 9640 | inc BLOCK_SIZE, %o2 |
9640 | 1: | | 9641 | 1: |
9641 | stda %f32, [%o2] ASI_BLK_P | | 9642 | stda %f32, [%o2] ASI_BLK_P |
9642 | inc BLOCK_SIZE, %o2 | | 9643 | inc BLOCK_SIZE, %o2 |
9643 | stda %f48, [%o2] ASI_BLK_P | | 9644 | stda %f48, [%o2] ASI_BLK_P |
9644 | 2: | | 9645 | 2: |
9645 | membar #Sync ! Finish operation so we can | | 9646 | membar #Sync ! Finish operation so we can |
9646 | 5: | | 9647 | 5: |
9647 | retl | | 9648 | retl |
9648 | wr %g0, FPRS_FEF, %fprs ! Mark FPU clean | | 9649 | wr %g0, FPRS_FEF, %fprs ! Mark FPU clean |
9649 | | | 9650 | |
9650 | #ifdef DIAGNOSTIC | | 9651 | #ifdef DIAGNOSTIC |
9651 | !! | | 9652 | !! |
9652 | !! Damn thing is *NOT* aligned on a 64-byte boundary | | 9653 | !! Damn thing is *NOT* aligned on a 64-byte boundary |
9653 | !! | | 9654 | !! |
9654 | 6: | | 9655 | 6: |
9655 | wr %g0, FPRS_FEF, %fprs | | 9656 | wr %g0, FPRS_FEF, %fprs |
9656 | ! XXX -- we should panic instead of silently entering debugger | | 9657 | ! XXX -- we should panic instead of silently entering debugger |
9657 | ta 1 | | 9658 | ta 1 |
9658 | retl | | 9659 | retl |
9659 | nop | | 9660 | nop |
9660 | #endif | | 9661 | #endif |
9661 | | | 9662 | |
9662 | /* | | 9663 | /* |
9663 | * Load FPU state. | | 9664 | * Load FPU state. |
9664 | */ | | 9665 | */ |
9665 | /* XXXXXXXXXX Should test to see if we only need to do a partial restore */ | | 9666 | /* XXXXXXXXXX Should test to see if we only need to do a partial restore */ |
9666 | ENTRY(loadfpstate) | | 9667 | ENTRY(loadfpstate) |
9667 | flushw ! Make sure we don't have stack probs & lose hibits of %o | | 9668 | flushw ! Make sure we don't have stack probs & lose hibits of %o |
9668 | rdpr %pstate, %o1 ! enable FP before we begin | | 9669 | rdpr %pstate, %o1 ! enable FP before we begin |
9669 | ld [%o0 + FS_GSR], %o4 ! Restore %gsr | | 9670 | ld [%o0 + FS_GSR], %o4 ! Restore %gsr |
9670 | set PSTATE_PEF, %o2 | | 9671 | set PSTATE_PEF, %o2 |
9671 | wr %g0, FPRS_FEF, %fprs | | 9672 | wr %g0, FPRS_FEF, %fprs |
9672 | or %o1, %o2, %o1 | | 9673 | or %o1, %o2, %o1 |
9673 | wrpr %o1, 0, %pstate | | 9674 | wrpr %o1, 0, %pstate |
9674 | ldx [%o0 + FS_FSR], %fsr ! setfsr(f->fs_fsr); | | 9675 | ldx [%o0 + FS_FSR], %fsr ! setfsr(f->fs_fsr); |
9675 | add %o0, FS_REGS, %o3 ! This is zero... | | 9676 | add %o0, FS_REGS, %o3 ! This is zero... |
9676 | #ifdef DIAGNOSTIC | | 9677 | #ifdef DIAGNOSTIC |
9677 | btst BLOCK_ALIGN, %o3 | | 9678 | btst BLOCK_ALIGN, %o3 |
9678 | bne,pn %icc, 1f ! Only use block loads on aligned blocks | | 9679 | bne,pn %icc, 1f ! Only use block loads on aligned blocks |
9679 | #endif | | 9680 | #endif |
9680 | wr %o4, %g0, %gsr | | 9681 | wr %o4, %g0, %gsr |
9681 | membar #Sync | | 9682 | membar #Sync |
9682 | ldda [%o3] ASI_BLK_P, %f0 | | 9683 | ldda [%o3] ASI_BLK_P, %f0 |
9683 | inc BLOCK_SIZE, %o3 | | 9684 | inc BLOCK_SIZE, %o3 |
9684 | ldda [%o3] ASI_BLK_P, %f16 | | 9685 | ldda [%o3] ASI_BLK_P, %f16 |
9685 | inc BLOCK_SIZE, %o3 | | 9686 | inc BLOCK_SIZE, %o3 |
9686 | ldda [%o3] ASI_BLK_P, %f32 | | 9687 | ldda [%o3] ASI_BLK_P, %f32 |
9687 | inc BLOCK_SIZE, %o3 | | 9688 | inc BLOCK_SIZE, %o3 |
9688 | ldda [%o3] ASI_BLK_P, %f48 | | 9689 | ldda [%o3] ASI_BLK_P, %f48 |
9689 | membar #Sync ! Make sure loads are complete | | 9690 | membar #Sync ! Make sure loads are complete |
9690 | retl | | 9691 | retl |
9691 | wr %g0, FPRS_FEF, %fprs ! Clear dirty bits | | 9692 | wr %g0, FPRS_FEF, %fprs ! Clear dirty bits |
9692 | | | 9693 | |
9693 | #ifdef DIAGNOSTIC | | 9694 | #ifdef DIAGNOSTIC |
9694 | !! | | 9695 | !! |
9695 | !! Damn thing is *NOT* aligned on a 64-byte boundary | | 9696 | !! Damn thing is *NOT* aligned on a 64-byte boundary |
9696 | !! | | 9697 | !! |
9697 | 1: | | 9698 | 1: |
9698 | wr %g0, FPRS_FEF, %fprs ! Clear dirty bits | | 9699 | wr %g0, FPRS_FEF, %fprs ! Clear dirty bits |
9699 | ! XXX -- we should panic instead of silently entering debugger | | 9700 | ! XXX -- we should panic instead of silently entering debugger |
9700 | ta 1 | | 9701 | ta 1 |
9701 | retl | | 9702 | retl |
9702 | nop | | 9703 | nop |
9703 | #endif | | 9704 | #endif |
9704 | | | 9705 | |
9705 | /* | | 9706 | /* |
9706 | * ienab_bis(bis) int bis; | | 9707 | * ienab_bis(bis) int bis; |
9707 | * ienab_bic(bic) int bic; | | 9708 | * ienab_bic(bic) int bic; |
9708 | * | | 9709 | * |
9709 | * Set and clear bits in the interrupt register. | | 9710 | * Set and clear bits in the interrupt register. |
9710 | */ | | 9711 | */ |
9711 | | | 9712 | |
9712 | /* | | 9713 | /* |
9713 | * sun4u has separate asr's for clearing/setting the interrupt mask. | | 9714 | * sun4u has separate asr's for clearing/setting the interrupt mask. |
9714 | */ | | 9715 | */ |
9715 | ENTRY(ienab_bis) | | 9716 | ENTRY(ienab_bis) |
9716 | retl | | 9717 | retl |
9717 | wr %o0, 0, SET_SOFTINT ! SET_SOFTINT | | 9718 | wr %o0, 0, SET_SOFTINT ! SET_SOFTINT |
9718 | | | 9719 | |
9719 | ENTRY(ienab_bic) | | 9720 | ENTRY(ienab_bic) |
9720 | retl | | 9721 | retl |
9721 | wr %o0, 0, CLEAR_SOFTINT ! CLEAR_SOFTINT | | 9722 | wr %o0, 0, CLEAR_SOFTINT ! CLEAR_SOFTINT |
9722 | | | 9723 | |
9723 | /* | | 9724 | /* |
9724 | * send_softint(cpu, level, intrhand) | | 9725 | * send_softint(cpu, level, intrhand) |
9725 | * | | 9726 | * |
9726 | * Send a softint with an intrhand pointer so we can cause a vectored | | 9727 | * Send a softint with an intrhand pointer so we can cause a vectored |
9727 | * interrupt instead of a polled interrupt. This does pretty much the same | | 9728 | * interrupt instead of a polled interrupt. This does pretty much the same |
9728 | * as interrupt_vector. If cpu is -1 then send it to this CPU, if it's -2 | | 9729 | * as interrupt_vector. If cpu is -1 then send it to this CPU, if it's -2 |
9729 | * send it to any CPU, otherwise send it to a particular CPU. | | 9730 | * send it to any CPU, otherwise send it to a particular CPU. |
9730 | * | | 9731 | * |
9731 | * XXXX Dispatching to different CPUs is not implemented yet. | | 9732 | * XXXX Dispatching to different CPUs is not implemented yet. |
9732 | */ | | 9733 | */ |
9733 | ENTRY(send_softint) | | 9734 | ENTRY(send_softint) |
9734 | rdpr %pstate, %g1 | | 9735 | rdpr %pstate, %g1 |
9735 | andn %g1, PSTATE_IE, %g2 ! clear PSTATE.IE | | 9736 | andn %g1, PSTATE_IE, %g2 ! clear PSTATE.IE |
9736 | wrpr %g2, 0, %pstate | | 9737 | wrpr %g2, 0, %pstate |
9737 | | | 9738 | |
9738 | sethi %hi(CPUINFO_VA+CI_INTRPENDING), %o3 | | 9739 | sethi %hi(CPUINFO_VA+CI_INTRPENDING), %o3 |
9739 | LDPTR [%o2 + IH_PEND], %o5 | | 9740 | LDPTR [%o2 + IH_PEND], %o5 |
9740 | or %o3, %lo(CPUINFO_VA+CI_INTRPENDING), %o3 | | 9741 | or %o3, %lo(CPUINFO_VA+CI_INTRPENDING), %o3 |
9741 | brnz %o5, 1f | | 9742 | brnz %o5, 1f |
9742 | sll %o1, PTRSHFT, %o5 ! Find start of table for this IPL | | 9743 | sll %o1, PTRSHFT, %o5 ! Find start of table for this IPL |
9743 | add %o3, %o5, %o3 | | 9744 | add %o3, %o5, %o3 |
9744 | 2: | | 9745 | 2: |
9745 | LDPTR [%o3], %o5 ! Load list head | | 9746 | LDPTR [%o3], %o5 ! Load list head |
9746 | STPTR %o5, [%o2+IH_PEND] ! Link our intrhand node in | | 9747 | STPTR %o5, [%o2+IH_PEND] ! Link our intrhand node in |
9747 | mov %o2, %o4 | | 9748 | mov %o2, %o4 |
9748 | CASPTR [%o3] ASI_N, %o5, %o4 | | 9749 | CASPTR [%o3] ASI_N, %o5, %o4 |
9749 | cmp %o4, %o5 ! Did it work? | | 9750 | cmp %o4, %o5 ! Did it work? |
9750 | bne,pn CCCR, 2b ! No, try again | | 9751 | bne,pn CCCR, 2b ! No, try again |
9751 | nop | | 9752 | EMPTY |
9752 | | | 9753 | |
9753 | mov 1, %o3 ! Change from level to bitmask | | 9754 | mov 1, %o4 ! Change from level to bitmask |
9754 | sllx %o3, %o1, %o3 | | 9755 | sllx %o4, %o1, %o4 |
9755 | wr %o3, 0, SET_SOFTINT ! SET_SOFTINT | | 9756 | wr %o4, 0, SET_SOFTINT ! SET_SOFTINT |
9756 | 1: | | 9757 | 1: |
9757 | retl | | 9758 | retl |
9758 | wrpr %g1, 0, %pstate ! restore PSTATE.IE | | 9759 | wrpr %g1, 0, %pstate ! restore PSTATE.IE |
9759 | | | 9760 | |
9760 | /* | | 9761 | /* |
9761 | * Here is a very good random number generator. This implementation is | | 9762 | * Here is a very good random number generator. This implementation is |
9762 | * based on _Two Fast Implementations of the `Minimal Standard' Random | | 9763 | * based on _Two Fast Implementations of the `Minimal Standard' Random |
9763 | * Number Generator_, David G. Carta, Communications of the ACM, Jan 1990, | | 9764 | * Number Generator_, David G. Carta, Communications of the ACM, Jan 1990, |
9764 | * Vol 33 No 1. | | 9765 | * Vol 33 No 1. |
9765 | */ | | 9766 | */ |
9766 | /* | | 9767 | /* |
9767 | * This should be rewritten using the mulx instr. if I ever understand what it | | 9768 | * This should be rewritten using the mulx instr. if I ever understand what it |
9768 | * does. | | 9769 | * does. |
9769 | */ | | 9770 | */ |
9770 | .data | | 9771 | .data |
9771 | randseed: | | 9772 | randseed: |
9772 | .word 1 | | 9773 | .word 1 |
9773 | .text | | 9774 | .text |
9774 | ENTRY(random) | | 9775 | ENTRY(random) |
9775 | sethi %hi(16807), %o1 | | 9776 | sethi %hi(16807), %o1 |
9776 | wr %o1, %lo(16807), %y | | 9777 | wr %o1, %lo(16807), %y |
9777 | sethi %hi(randseed), %o5 | | 9778 | sethi %hi(randseed), %o5 |
9778 | ld [%o5 + %lo(randseed)], %o0 | | 9779 | ld [%o5 + %lo(randseed)], %o0 |
9779 | andcc %g0, 0, %o2 | | 9780 | andcc %g0, 0, %o2 |
9780 | mulscc %o2, %o0, %o2 | | 9781 | mulscc %o2, %o0, %o2 |
9781 | mulscc %o2, %o0, %o2 | | 9782 | mulscc %o2, %o0, %o2 |
9782 | mulscc %o2, %o0, %o2 | | 9783 | mulscc %o2, %o0, %o2 |
9783 | mulscc %o2, %o0, %o2 | | 9784 | mulscc %o2, %o0, %o2 |
9784 | mulscc %o2, %o0, %o2 | | 9785 | mulscc %o2, %o0, %o2 |
9785 | mulscc %o2, %o0, %o2 | | 9786 | mulscc %o2, %o0, %o2 |
9786 | mulscc %o2, %o0, %o2 | | 9787 | mulscc %o2, %o0, %o2 |
9787 | mulscc %o2, %o0, %o2 | | 9788 | mulscc %o2, %o0, %o2 |
9788 | mulscc %o2, %o0, %o2 | | 9789 | mulscc %o2, %o0, %o2 |
9789 | mulscc %o2, %o0, %o2 | | 9790 | mulscc %o2, %o0, %o2 |
9790 | mulscc %o2, %o0, %o2 | | 9791 | mulscc %o2, %o0, %o2 |
9791 | mulscc %o2, %o0, %o2 | | 9792 | mulscc %o2, %o0, %o2 |
9792 | mulscc %o2, %o0, %o2 | | 9793 | mulscc %o2, %o0, %o2 |
9793 | mulscc %o2, %o0, %o2 | | 9794 | mulscc %o2, %o0, %o2 |
9794 | mulscc %o2, %o0, %o2 | | 9795 | mulscc %o2, %o0, %o2 |
9795 | mulscc %o2, %g0, %o2 | | 9796 | mulscc %o2, %g0, %o2 |
9796 | rd %y, %o3 | | 9797 | rd %y, %o3 |
9797 | srl %o2, 16, %o1 | | 9798 | srl %o2, 16, %o1 |
9798 | set 0xffff, %o4 | | 9799 | set 0xffff, %o4 |
9799 | and %o4, %o2, %o0 | | 9800 | and %o4, %o2, %o0 |
9800 | sll %o0, 15, %o0 | | 9801 | sll %o0, 15, %o0 |
9801 | srl %o3, 17, %o3 | | 9802 | srl %o3, 17, %o3 |
9802 | or %o3, %o0, %o0 | | 9803 | or %o3, %o0, %o0 |
9803 | addcc %o0, %o1, %o0 | | 9804 | addcc %o0, %o1, %o0 |
9804 | bneg 1f | | 9805 | bneg 1f |
9805 | sethi %hi(0x7fffffff), %o1 | | 9806 | sethi %hi(0x7fffffff), %o1 |
9806 | retl | | 9807 | retl |
9807 | st %o0, [%o5 + %lo(randseed)] | | 9808 | st %o0, [%o5 + %lo(randseed)] |
9808 | 1: | | 9809 | 1: |
9809 | or %o1, %lo(0x7fffffff), %o1 | | 9810 | or %o1, %lo(0x7fffffff), %o1 |
9810 | add %o0, 1, %o0 | | 9811 | add %o0, 1, %o0 |
9811 | and %o1, %o0, %o0 | | 9812 | and %o1, %o0, %o0 |
9812 | retl | | 9813 | retl |
9813 | st %o0, [%o5 + %lo(randseed)] | | 9814 | st %o0, [%o5 + %lo(randseed)] |
9814 | | | 9815 | |
9815 | | | 9816 | |
9816 | #define MICROPERSEC (1000000) | | 9817 | #define MICROPERSEC (1000000) |
9817 | | | 9818 | |
9818 | /* | | 9819 | /* |
9819 | * delay function | | 9820 | * delay function |
9820 | * | | 9821 | * |
9821 | * void delay(N) -- delay N microseconds | | 9822 | * void delay(N) -- delay N microseconds |
9822 | * | | 9823 | * |
9823 | * Register usage: %o0 = "N" number of usecs to go (counts down to zero) | | 9824 | * Register usage: %o0 = "N" number of usecs to go (counts down to zero) |
9824 | * %o1 = "timerblurb" (stays constant) | | 9825 | * %o1 = "timerblurb" (stays constant) |
9825 | * %o2 = counter for 1 usec (counts down from %o1 to zero) | | 9826 | * %o2 = counter for 1 usec (counts down from %o1 to zero) |
9826 | * | | 9827 | * |
9827 | * | | 9828 | * |
9828 | * ci_cpu_clockrate should be tuned during CPU probe to the CPU | | 9829 | * ci_cpu_clockrate should be tuned during CPU probe to the CPU |
9829 | * clockrate in Hz | | 9830 | * clockrate in Hz |
9830 | * | | 9831 | * |
9831 | */ | | 9832 | */ |
9832 | ENTRY(delay) ! %o0 = n | | 9833 | ENTRY(delay) ! %o0 = n |
9833 | #if 1 | | 9834 | #if 1 |
9834 | rdpr %tick, %o1 ! Take timer snapshot | | 9835 | rdpr %tick, %o1 ! Take timer snapshot |
9835 | sethi %hi(CPUINFO_VA + CI_CLOCKRATE), %o2 | | 9836 | sethi %hi(CPUINFO_VA + CI_CLOCKRATE), %o2 |
9836 | sethi %hi(MICROPERSEC), %o3 | | 9837 | sethi %hi(MICROPERSEC), %o3 |
9837 | ldx [%o2 + %lo(CPUINFO_VA + CI_CLOCKRATE + 8)], %o4 ! Get scale factor | | 9838 | ldx [%o2 + %lo(CPUINFO_VA + CI_CLOCKRATE + 8)], %o4 ! Get scale factor |
9838 | brnz,pt %o4, 0f | | 9839 | brnz,pt %o4, 0f |
9839 | or %o3, %lo(MICROPERSEC), %o3 | | 9840 | or %o3, %lo(MICROPERSEC), %o3 |
9840 | | | 9841 | |
9841 | !! Calculate ticks/usec | | 9842 | !! Calculate ticks/usec |
9842 | ldx [%o2 + %lo(CPUINFO_VA + CI_CLOCKRATE)], %o4 ! No, we need to calculate it | | 9843 | ldx [%o2 + %lo(CPUINFO_VA + CI_CLOCKRATE)], %o4 ! No, we need to calculate it |
9843 | udivx %o4, %o3, %o4 | | 9844 | udivx %o4, %o3, %o4 |
9844 | stx %o4, [%o2 + %lo(CPUINFO_VA + CI_CLOCKRATE + 8)] ! Save it so we don't need to divide again | | 9845 | stx %o4, [%o2 + %lo(CPUINFO_VA + CI_CLOCKRATE + 8)] ! Save it so we don't need to divide again |
9845 | 0: | | 9846 | 0: |
9846 | | | 9847 | |
9847 | mulx %o0, %o4, %o0 ! Convert usec -> ticks | | 9848 | mulx %o0, %o4, %o0 ! Convert usec -> ticks |
9848 | rdpr %tick, %o2 ! Top of next itr | | 9849 | rdpr %tick, %o2 ! Top of next itr |
9849 | 1: | | 9850 | 1: |
9850 | sub %o2, %o1, %o3 ! How many ticks have gone by? | | 9851 | sub %o2, %o1, %o3 ! How many ticks have gone by? |
9851 | sub %o0, %o3, %o4 ! Decrement count by that much | | 9852 | sub %o0, %o3, %o4 ! Decrement count by that much |
9852 | movrgz %o3, %o4, %o0 ! But only if we're decrementing | | 9853 | movrgz %o3, %o4, %o0 ! But only if we're decrementing |
9853 | mov %o2, %o1 ! Remember last tick | | 9854 | mov %o2, %o1 ! Remember last tick |
9854 | brgz,pt %o0, 1b ! Done? | | 9855 | brgz,pt %o0, 1b ! Done? |
9855 | rdpr %tick, %o2 ! Get new tick | | 9856 | rdpr %tick, %o2 ! Get new tick |
9856 | | | 9857 | |
9857 | retl | | 9858 | retl |
9858 | nop | | 9859 | nop |
9859 | #else | | 9860 | #else |
9860 | /* This code only works if %tick does not wrap */ | | 9861 | /* This code only works if %tick does not wrap */ |
9861 | rdpr %tick, %g1 ! Take timer snapshot | | 9862 | rdpr %tick, %g1 ! Take timer snapshot |
9862 | sethi %hi(CPUINFO_VA + CI_CLOCKRATE), %g2 | | 9863 | sethi %hi(CPUINFO_VA + CI_CLOCKRATE), %g2 |
9863 | sethi %hi(MICROPERSEC), %o2 | | 9864 | sethi %hi(MICROPERSEC), %o2 |
9864 | ldx [%g2 + %lo(CPUINFO_VA + CI_CLOCKRATE)], %g2 ! Get scale factor | | 9865 | ldx [%g2 + %lo(CPUINFO_VA + CI_CLOCKRATE)], %g2 ! Get scale factor |
9865 | or %o2, %lo(MICROPERSEC), %o2 | | 9866 | or %o2, %lo(MICROPERSEC), %o2 |
9866 | ! sethi %hi(_C_LABEL(timerblurb), %o5 ! This is if we plan to tune the clock | | 9867 | ! sethi %hi(_C_LABEL(timerblurb), %o5 ! This is if we plan to tune the clock |
9867 | ! ld [%o5 + %lo(_C_LABEL(timerblurb))], %o5 ! with respect to the counter/timer | | 9868 | ! ld [%o5 + %lo(_C_LABEL(timerblurb))], %o5 ! with respect to the counter/timer |
9868 | mulx %o0, %g2, %g2 ! Scale it: (usec * Hz) / 1 x 10^6 = ticks | | 9869 | mulx %o0, %g2, %g2 ! Scale it: (usec * Hz) / 1 x 10^6 = ticks |
9869 | udivx %g2, %o2, %g2 | | 9870 | udivx %g2, %o2, %g2 |
9870 | add %g1, %g2, %g2 | | 9871 | add %g1, %g2, %g2 |
9871 | ! add %o5, %g2, %g2 5, %g2, %g2 ! But this gets complicated | | 9872 | ! add %o5, %g2, %g2 5, %g2, %g2 ! But this gets complicated |
9872 | rdpr %tick, %g1 ! Top of next itr | | 9873 | rdpr %tick, %g1 ! Top of next itr |
9873 | mov %g1, %g1 ! Erratum 50 | | 9874 | mov %g1, %g1 ! Erratum 50 |
9874 | 1: | | 9875 | 1: |
9875 | cmp %g1, %g2 | | 9876 | cmp %g1, %g2 |
9876 | bl,a,pn %xcc, 1b ! Done? | | 9877 | bl,a,pn %xcc, 1b ! Done? |
9877 | rdpr %tick, %g1 | | 9878 | rdpr %tick, %g1 |
9878 | | | 9879 | |
9879 | retl | | 9880 | retl |
9880 | nop | | 9881 | nop |
9881 | #endif | | 9882 | #endif |
9882 | /* | | 9883 | /* |
9883 | * If something's wrong with the standard setup do this stupid loop | | 9884 | * If something's wrong with the standard setup do this stupid loop |
9884 | * calibrated for a 143MHz processor. | | 9885 | * calibrated for a 143MHz processor. |
9885 | */ | | 9886 | */ |
9886 | Lstupid_delay: | | 9887 | Lstupid_delay: |
9887 | set 142857143/MICROPERSEC, %o1 | | 9888 | set 142857143/MICROPERSEC, %o1 |
9888 | Lstupid_loop: | | 9889 | Lstupid_loop: |
9889 | brnz,pt %o1, Lstupid_loop | | 9890 | brnz,pt %o1, Lstupid_loop |
9890 | dec %o1 | | 9891 | dec %o1 |
9891 | brnz,pt %o0, Lstupid_delay | | 9892 | brnz,pt %o0, Lstupid_delay |
9892 | dec %o0 | | 9893 | dec %o0 |
9893 | retl | | 9894 | retl |
9894 | nop | | 9895 | nop |
9895 | | | 9896 | |
9896 | /* | | 9897 | /* |
9897 | * next_tick(long increment) | | 9898 | * next_tick(long increment) |
9898 | * | | 9899 | * |
9899 | * Sets the %tick_cmpr register to fire off in `increment' machine | | 9900 | * Sets the %tick_cmpr register to fire off in `increment' machine |
9900 | * cycles in the future. Also handles %tick wraparound. In 32-bit | | 9901 | * cycles in the future. Also handles %tick wraparound. In 32-bit |
9901 | * mode we're limited to a 32-bit increment. | | 9902 | * mode we're limited to a 32-bit increment. |
9902 | */ | | 9903 | */ |
9903 | ENTRY(next_tick) | | 9904 | ENTRY(next_tick) |
9904 | rd TICK_CMPR, %o2 | | 9905 | rd TICK_CMPR, %o2 |
9905 | rdpr %tick, %o1 | | 9906 | rdpr %tick, %o1 |
9906 | | | 9907 | |
9907 | mov 1, %o3 ! Mask off high bits of these registers | | 9908 | mov 1, %o3 ! Mask off high bits of these registers |
9908 | sllx %o3, 63, %o3 | | 9909 | sllx %o3, 63, %o3 |
9909 | andn %o1, %o3, %o1 | | 9910 | andn %o1, %o3, %o1 |
9910 | andn %o2, %o3, %o2 | | 9911 | andn %o2, %o3, %o2 |
9911 | cmp %o1, %o2 ! Did we wrap? (tick < tick_cmpr) | | 9912 | cmp %o1, %o2 ! Did we wrap? (tick < tick_cmpr) |
9912 | bgt,pt %icc, 1f | | 9913 | bgt,pt %icc, 1f |
9913 | add %o1, 1000, %o1 ! Need some slack so we don't lose intrs. | | 9914 | add %o1, 1000, %o1 ! Need some slack so we don't lose intrs. |
9914 | | | 9915 | |
9915 | /* | | 9916 | /* |
9916 | * Handle the unlikely case of %tick wrapping. | | 9917 | * Handle the unlikely case of %tick wrapping. |
9917 | * | | 9918 | * |
9918 | * This should only happen every 10 years or more. | | 9919 | * This should only happen every 10 years or more. |
9919 | * | | 9920 | * |
9920 | * We need to increment the time base by the size of %tick in | | 9921 | * We need to increment the time base by the size of %tick in |
9921 | * microseconds. This will require some divides and multiplies | | 9922 | * microseconds. This will require some divides and multiplies |
9922 | * which can take time. So we re-read %tick. | | 9923 | * which can take time. So we re-read %tick. |
9923 | * | | 9924 | * |
9924 | */ | | 9925 | */ |
9925 | | | 9926 | |
9926 | /* XXXXX NOT IMPLEMENTED */ | | 9927 | /* XXXXX NOT IMPLEMENTED */ |
9927 | | | 9928 | |
9928 | | | 9929 | |
9929 | | | 9930 | |
9930 | 1: | | 9931 | 1: |
9931 | add %o2, %o0, %o2 | | 9932 | add %o2, %o0, %o2 |
9932 | andn %o2, %o3, %o4 | | 9933 | andn %o2, %o3, %o4 |
9933 | brlz,pn %o4, Ltick_ovflw | | 9934 | brlz,pn %o4, Ltick_ovflw |
9934 | cmp %o2, %o1 ! Has this tick passed? | | 9935 | cmp %o2, %o1 ! Has this tick passed? |
9935 | blt,pn %xcc, 1b ! Yes | | 9936 | blt,pn %xcc, 1b ! Yes |
9936 | nop | | 9937 | nop |
9937 | | | 9938 | |
9938 | #ifdef BB_ERRATA_1 | | 9939 | #ifdef BB_ERRATA_1 |
9939 | ba,a 2f | | 9940 | ba,a 2f |
9940 | nop | | 9941 | nop |
9941 | #else | | 9942 | #else |
9942 | retl | | 9943 | retl |
9943 | wr %o2, TICK_CMPR | | 9944 | wr %o2, TICK_CMPR |
9944 | #endif | | 9945 | #endif |
9945 | | | 9946 | |
9946 | Ltick_ovflw: | | 9947 | Ltick_ovflw: |
9947 | /* | | 9948 | /* |
9948 | * When we get here tick_cmpr has wrapped, but we don't know if %tick | | 9949 | * When we get here tick_cmpr has wrapped, but we don't know if %tick |
9949 | * has wrapped. If bit 62 is set then we have not wrapped and we can | | 9950 | * has wrapped. If bit 62 is set then we have not wrapped and we can |
9950 | * use the current value of %o4 as %tick. Otherwise we need to return | | 9951 | * use the current value of %o4 as %tick. Otherwise we need to return |
9951 | * to our loop with %o4 as %tick_cmpr (%o2). | | 9952 | * to our loop with %o4 as %tick_cmpr (%o2). |
9952 | */ | | 9953 | */ |
9953 | srlx %o3, 1, %o5 | | 9954 | srlx %o3, 1, %o5 |
9954 | btst %o5, %o1 | | 9955 | btst %o5, %o1 |
9955 | bz,pn %xcc, 1b | | 9956 | bz,pn %xcc, 1b |
9956 | mov %o4, %o2 | | 9957 | mov %o4, %o2 |
9957 | #ifdef BB_ERRATA_1 | | 9958 | #ifdef BB_ERRATA_1 |
9958 | ba,a 2f | | 9959 | ba,a 2f |
9959 | nop | | 9960 | nop |
9960 | .align 64 | | 9961 | .align 64 |
9961 | 2: wr %o2, TICK_CMPR | | 9962 | 2: wr %o2, TICK_CMPR |
9962 | rd TICK_CMPR, %g0 | | 9963 | rd TICK_CMPR, %g0 |
9963 | retl | | 9964 | retl |
9964 | nop | | 9965 | nop |
9965 | #else | | 9966 | #else |
9966 | retl | | 9967 | retl |
9967 | wr %o2, TICK_CMPR | | 9968 | wr %o2, TICK_CMPR |
9968 | #endif | | 9969 | #endif |
9969 | | | 9970 | |
9970 | | | 9971 | |
9971 | ENTRY(setjmp) | | 9972 | ENTRY(setjmp) |
9972 | save %sp, -CC64FSZ, %sp ! Need a frame to return to. | | 9973 | save %sp, -CC64FSZ, %sp ! Need a frame to return to. |
9973 | flushw | | 9974 | flushw |
9974 | stx %fp, [%i0+0] ! 64-bit stack pointer | | 9975 | stx %fp, [%i0+0] ! 64-bit stack pointer |
9975 | stx %i7, [%i0+8] ! 64-bit return pc | | 9976 | stx %i7, [%i0+8] ! 64-bit return pc |
9976 | ret | | 9977 | ret |
9977 | restore %g0, 0, %o0 | | 9978 | restore %g0, 0, %o0 |
9978 | | | 9979 | |
9979 | .data | | 9980 | .data |
9980 | Lpanic_ljmp: | | 9981 | Lpanic_ljmp: |
9981 | .asciz "longjmp botch" | | 9982 | .asciz "longjmp botch" |
9982 | _ALIGN | | 9983 | _ALIGN |
9983 | .text | | 9984 | .text |
9984 | | | 9985 | |
9985 | ENTRY(longjmp) | | 9986 | ENTRY(longjmp) |
9986 | save %sp, -CC64FSZ, %sp ! prepare to restore to (old) frame | | 9987 | save %sp, -CC64FSZ, %sp ! prepare to restore to (old) frame |
9987 | flushw | | 9988 | flushw |
9988 | mov 1, %i2 | | 9989 | mov 1, %i2 |
9989 | ldx [%i0+0], %fp ! get return stack | | 9990 | ldx [%i0+0], %fp ! get return stack |
9990 | movrz %i1, %i1, %i2 ! compute v ? v : 1 | | 9991 | movrz %i1, %i1, %i2 ! compute v ? v : 1 |
9991 | ldx [%i0+8], %i7 ! get rpc | | 9992 | ldx [%i0+8], %i7 ! get rpc |
9992 | ret | | 9993 | ret |
9993 | restore %i2, 0, %o0 | | 9994 | restore %i2, 0, %o0 |
9994 | | | 9995 | |
9995 | #if defined(DDB) || defined(KGDB) | | 9996 | #if defined(DDB) || defined(KGDB) |
9996 | /* | | 9997 | /* |
9997 | * Debug stuff. Dump the trap registers into buffer & set tl=0. | | 9998 | * Debug stuff. Dump the trap registers into buffer & set tl=0. |
9998 | * | | 9999 | * |
9999 | * %o0 = *ts | | 10000 | * %o0 = *ts |
10000 | */ | | 10001 | */ |
10001 | ENTRY(savetstate) | | 10002 | ENTRY(savetstate) |
10002 | mov %o0, %o1 | | 10003 | mov %o0, %o1 |
10003 | CHKPT(%o4,%o3,0x28) | | 10004 | CHKPT(%o4,%o3,0x28) |
10004 | rdpr %tl, %o0 | | 10005 | rdpr %tl, %o0 |
10005 | brz %o0, 2f | | 10006 | brz %o0, 2f |
10006 | mov %o0, %o2 | | 10007 | mov %o0, %o2 |
10007 | 1: | | 10008 | 1: |
10008 | rdpr %tstate, %o3 | | 10009 | rdpr %tstate, %o3 |
10009 | stx %o3, [%o1] | | 10010 | stx %o3, [%o1] |
10010 | deccc %o2 | | 10011 | deccc %o2 |
10011 | inc 8, %o1 | | 10012 | inc 8, %o1 |
10012 | rdpr %tpc, %o4 | | 10013 | rdpr %tpc, %o4 |
10013 | stx %o4, [%o1] | | 10014 | stx %o4, [%o1] |
10014 | inc 8, %o1 | | 10015 | inc 8, %o1 |
10015 | rdpr %tnpc, %o5 | | 10016 | rdpr %tnpc, %o5 |
10016 | stx %o5, [%o1] | | 10017 | stx %o5, [%o1] |
10017 | inc 8, %o1 | | 10018 | inc 8, %o1 |
10018 | rdpr %tt, %o4 | | 10019 | rdpr %tt, %o4 |
10019 | stx %o4, [%o1] | | 10020 | stx %o4, [%o1] |
10020 | inc 8, %o1 | | 10021 | inc 8, %o1 |
10021 | bnz 1b | | 10022 | bnz 1b |
10022 | wrpr %o2, 0, %tl | | 10023 | wrpr %o2, 0, %tl |
10023 | 2: | | 10024 | 2: |
10024 | retl | | 10025 | retl |
10025 | nop | | 10026 | nop |
10026 | | | 10027 | |
10027 | /* | | 10028 | /* |
10028 | * Debug stuff. Resore trap registers from buffer. | | 10029 | * Debug stuff. Resore trap registers from buffer. |
10029 | * | | 10030 | * |
10030 | * %o0 = %tl | | 10031 | * %o0 = %tl |
10031 | * %o1 = *ts | | 10032 | * %o1 = *ts |
10032 | * | | 10033 | * |
10033 | * Maybe this should be re-written to increment tl instead of decrementing. | | 10034 | * Maybe this should be re-written to increment tl instead of decrementing. |
10034 | */ | | 10035 | */ |
10035 | ENTRY(restoretstate) | | 10036 | ENTRY(restoretstate) |
10036 | CHKPT(%o4,%o3,0x36) | | 10037 | CHKPT(%o4,%o3,0x36) |
10037 | flushw ! Make sure we don't have stack probs & lose hibits of %o | | 10038 | flushw ! Make sure we don't have stack probs & lose hibits of %o |
10038 | brz,pn %o0, 2f | | 10039 | brz,pn %o0, 2f |
10039 | mov %o0, %o2 | | 10040 | mov %o0, %o2 |
10040 | CHKPT(%o4,%o3,0x29) | | 10041 | CHKPT(%o4,%o3,0x29) |
10041 | wrpr %o0, 0, %tl | | 10042 | wrpr %o0, 0, %tl |
10042 | 1: | | 10043 | 1: |
10043 | ldx [%o1], %o3 | | 10044 | ldx [%o1], %o3 |
10044 | deccc %o2 | | 10045 | deccc %o2 |
10045 | inc 8, %o1 | | 10046 | inc 8, %o1 |
10046 | wrpr %o3, 0, %tstate | | 10047 | wrpr %o3, 0, %tstate |
10047 | ldx [%o1], %o4 | | 10048 | ldx [%o1], %o4 |
10048 | inc 8, %o1 | | 10049 | inc 8, %o1 |
10049 | wrpr %o4, 0, %tpc | | 10050 | wrpr %o4, 0, %tpc |
10050 | ldx [%o1], %o5 | | 10051 | ldx [%o1], %o5 |
10051 | inc 8, %o1 | | 10052 | inc 8, %o1 |
10052 | wrpr %o5, 0, %tnpc | | 10053 | wrpr %o5, 0, %tnpc |
10053 | ldx [%o1], %o4 | | 10054 | ldx [%o1], %o4 |
10054 | inc 8, %o1 | | 10055 | inc 8, %o1 |
10055 | wrpr %o4, 0, %tt | | 10056 | wrpr %o4, 0, %tt |
10056 | bnz 1b | | 10057 | bnz 1b |
10057 | wrpr %o2, 0, %tl | | 10058 | wrpr %o2, 0, %tl |
10058 | 2: | | 10059 | 2: |
10059 | CHKPT(%o4,%o3,0x30) | | 10060 | CHKPT(%o4,%o3,0x30) |
10060 | retl | | 10061 | retl |
10061 | wrpr %o0, 0, %tl | | 10062 | wrpr %o0, 0, %tl |
10062 | | | 10063 | |
10063 | /* | | 10064 | /* |
10064 | * Switch to context in abs(%o0) | | 10065 | * Switch to context in abs(%o0) |
10065 | */ | | 10066 | */ |
10066 | ENTRY(switchtoctx) | | 10067 | ENTRY(switchtoctx) |
10067 | #ifdef SPITFIRE | | 10068 | #ifdef SPITFIRE |
10068 | set DEMAP_CTX_SECONDARY, %o3 | | 10069 | set DEMAP_CTX_SECONDARY, %o3 |
10069 | stxa %o3, [%o3] ASI_DMMU_DEMAP | | 10070 | stxa %o3, [%o3] ASI_DMMU_DEMAP |
10070 | mov CTX_SECONDARY, %o4 | | 10071 | mov CTX_SECONDARY, %o4 |
10071 | stxa %o3, [%o3] ASI_IMMU_DEMAP | | 10072 | stxa %o3, [%o3] ASI_IMMU_DEMAP |
10072 | membar #Sync | | 10073 | membar #Sync |
10073 | stxa %o0, [%o4] ASI_DMMU ! Maybe we should invali | | 10074 | stxa %o0, [%o4] ASI_DMMU ! Maybe we should invali |
10074 | sethi %hi(KERNBASE), %o2 | | 10075 | sethi %hi(KERNBASE), %o2 |
10075 | membar #Sync | | 10076 | membar #Sync |
10076 | flush %o2 | | 10077 | flush %o2 |
10077 | retl | | 10078 | retl |
10078 | nop | | 10079 | nop |
10079 | #else | | 10080 | #else |
10080 | /* UNIMPLEMENTED */ | | 10081 | /* UNIMPLEMENTED */ |
10081 | retl | | 10082 | retl |
10082 | nop | | 10083 | nop |
10083 | #endif | | 10084 | #endif |
10084 | | | 10085 | |
10085 | #ifndef _LP64 | | 10086 | #ifndef _LP64 |
10086 | /* | | 10087 | /* |
10087 | * Convert to 32-bit stack then call OF_sym2val() | | 10088 | * Convert to 32-bit stack then call OF_sym2val() |
10088 | */ | | 10089 | */ |
10089 | ENTRY(OF_sym2val32) | | 10090 | ENTRY(OF_sym2val32) |
10090 | save %sp, -CC64FSZ, %sp | | 10091 | save %sp, -CC64FSZ, %sp |
10091 | btst 7, %i0 | | 10092 | btst 7, %i0 |
10092 | bnz,pn %icc, 1f | | 10093 | bnz,pn %icc, 1f |
10093 | add %sp, BIAS, %o1 | | 10094 | add %sp, BIAS, %o1 |
10094 | btst 1, %sp | | 10095 | btst 1, %sp |
10095 | movnz %icc, %o1, %sp | | 10096 | movnz %icc, %o1, %sp |
10096 | call _C_LABEL(OF_sym2val) | | 10097 | call _C_LABEL(OF_sym2val) |
10097 | mov %i0, %o0 | | 10098 | mov %i0, %o0 |
10098 | 1: | | 10099 | 1: |
10099 | ret | | 10100 | ret |
10100 | restore %o0, 0, %o0 | | 10101 | restore %o0, 0, %o0 |
10101 | | | 10102 | |
10102 | /* | | 10103 | /* |
10103 | * Convert to 32-bit stack then call OF_val2sym() | | 10104 | * Convert to 32-bit stack then call OF_val2sym() |
10104 | */ | | 10105 | */ |
10105 | ENTRY(OF_val2sym32) | | 10106 | ENTRY(OF_val2sym32) |
10106 | save %sp, -CC64FSZ, %sp | | 10107 | save %sp, -CC64FSZ, %sp |
10107 | btst 7, %i0 | | 10108 | btst 7, %i0 |
10108 | bnz,pn %icc, 1f | | 10109 | bnz,pn %icc, 1f |
10109 | add %sp, BIAS, %o1 | | 10110 | add %sp, BIAS, %o1 |
10110 | btst 1, %sp | | 10111 | btst 1, %sp |
10111 | movnz %icc, %o1, %sp | | 10112 | movnz %icc, %o1, %sp |
10112 | call _C_LABEL(OF_val2sym) | | 10113 | call _C_LABEL(OF_val2sym) |
10113 | mov %i0, %o0 | | 10114 | mov %i0, %o0 |
10114 | 1: | | 10115 | 1: |
10115 | ret | | 10116 | ret |
10116 | restore %o0, 0, %o0 | | 10117 | restore %o0, 0, %o0 |
10117 | #endif /* _LP64 */ | | 10118 | #endif /* _LP64 */ |
10118 | #endif /* DDB */ | | 10119 | #endif /* DDB */ |
10119 | | | 10120 | |
10120 | .data | | 10121 | .data |
10121 | _ALIGN | | 10122 | _ALIGN |
10122 | #if NKSYMS || defined(DDB) || defined(LKM) | | 10123 | #if NKSYMS || defined(DDB) || defined(LKM) |
10123 | .globl _C_LABEL(esym) | | 10124 | .globl _C_LABEL(esym) |
10124 | _C_LABEL(esym): | | 10125 | _C_LABEL(esym): |
10125 | POINTER 0 | | 10126 | POINTER 0 |
10126 | .globl _C_LABEL(ssym) | | 10127 | .globl _C_LABEL(ssym) |
10127 | _C_LABEL(ssym): | | 10128 | _C_LABEL(ssym): |
10128 | POINTER 0 | | 10129 | POINTER 0 |
10129 | #endif | | 10130 | #endif |
10130 | ! XXX should it called lwp0paddr | | 10131 | ! XXX should it called lwp0paddr |
10131 | .globl _C_LABEL(proc0paddr) | | 10132 | .globl _C_LABEL(proc0paddr) |
10132 | _C_LABEL(proc0paddr): | | 10133 | _C_LABEL(proc0paddr): |
10133 | POINTER 0 | | 10134 | POINTER 0 |
10134 | | | 10135 | |
10135 | .comm _C_LABEL(promvec), PTRSZ | | 10136 | .comm _C_LABEL(promvec), PTRSZ |
10136 | | | 10137 | |
10137 | #ifdef DEBUG | | 10138 | #ifdef DEBUG |
10138 | .comm _C_LABEL(trapdebug), 4 | | 10139 | .comm _C_LABEL(trapdebug), 4 |
10139 | .comm _C_LABEL(pmapdebug), 4 | | 10140 | .comm _C_LABEL(pmapdebug), 4 |
10140 | #endif | | 10141 | #endif |