| @@ -1,967 +1,974 @@ | | | @@ -1,967 +1,974 @@ |
1 | /* $NetBSD: trap_subr.S,v 1.12 2015/01/26 04:47:53 nonaka Exp $ */ | | 1 | /* $NetBSD: trap_subr.S,v 1.13 2020/07/06 10:16:12 rin Exp $ */ |
2 | /*- | | 2 | /*- |
3 | * Copyright (c) 2010, 2011 The NetBSD Foundation, Inc. | | 3 | * Copyright (c) 2010, 2011 The NetBSD Foundation, Inc. |
4 | * All rights reserved. | | 4 | * All rights reserved. |
5 | * | | 5 | * |
6 | * This code is derived from software contributed to The NetBSD Foundation | | 6 | * This code is derived from software contributed to The NetBSD Foundation |
7 | * by Raytheon BBN Technologies Corp and Defense Advanced Research Projects | | 7 | * by Raytheon BBN Technologies Corp and Defense Advanced Research Projects |
8 | * Agency and which was developed by Matt Thomas of 3am Software Foundry. | | 8 | * Agency and which was developed by Matt Thomas of 3am Software Foundry. |
9 | * | | 9 | * |
10 | * This material is based upon work supported by the Defense Advanced Research | | 10 | * This material is based upon work supported by the Defense Advanced Research |
11 | * Projects Agency and Space and Naval Warfare Systems Center, Pacific, under | | 11 | * Projects Agency and Space and Naval Warfare Systems Center, Pacific, under |
12 | * Contract No. N66001-09-C-2073. | | 12 | * Contract No. N66001-09-C-2073. |
13 | * Approved for Public Release, Distribution Unlimited | | 13 | * Approved for Public Release, Distribution Unlimited |
14 | * | | 14 | * |
15 | * Redistribution and use in source and binary forms, with or without | | 15 | * Redistribution and use in source and binary forms, with or without |
16 | * modification, are permitted provided that the following conditions | | 16 | * modification, are permitted provided that the following conditions |
17 | * are met: | | 17 | * are met: |
18 | * 1. Redistributions of source code must retain the above copyright | | 18 | * 1. Redistributions of source code must retain the above copyright |
19 | * notice, this list of conditions and the following disclaimer. | | 19 | * notice, this list of conditions and the following disclaimer. |
20 | * 2. Redistributions in binary form must reproduce the above copyright | | 20 | * 2. Redistributions in binary form must reproduce the above copyright |
21 | * notice, this list of conditions and the following disclaimer in the | | 21 | * notice, this list of conditions and the following disclaimer in the |
22 | * documentation and/or other materials provided with the distribution. | | 22 | * documentation and/or other materials provided with the distribution. |
23 | * | | 23 | * |
24 | * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS | | 24 | * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS |
25 | * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED | | 25 | * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED |
26 | * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | | 26 | * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
27 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS | | 27 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS |
28 | * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR | | 28 | * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR |
29 | * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF | | 29 | * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF |
30 | * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS | | 30 | * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS |
31 | * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN | | 31 | * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN |
32 | * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) | | 32 | * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) |
33 | * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE | | 33 | * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE |
34 | * POSSIBILITY OF SUCH DAMAGE. | | 34 | * POSSIBILITY OF SUCH DAMAGE. |
35 | */ | | 35 | */ |
36 | | | 36 | |
37 | RCSID("$NetBSD: trap_subr.S,v 1.12 2015/01/26 04:47:53 nonaka Exp $") | | 37 | RCSID("$NetBSD: trap_subr.S,v 1.13 2020/07/06 10:16:12 rin Exp $") |
| | | 38 | |
| | | 39 | #ifdef _KERNEL_OPT |
| | | 40 | #include "opt_altivec.h" |
| | | 41 | #include "opt_ddb.h" |
| | | 42 | #include "opt_mpc85xx.h" |
| | | 43 | #include "opt_multiprocessor.h" |
| | | 44 | #endif |
38 | | | 45 | |
39 | .globl _C_LABEL(sctrapexit), _C_LABEL(trapexit), _C_LABEL(intrcall) | | 46 | .globl _C_LABEL(sctrapexit), _C_LABEL(trapexit), _C_LABEL(intrcall) |
40 | | | 47 | |
41 | /* | | 48 | /* |
42 | * We have a problem with critical (MSR[CE] or machine check (MSR[ME]) | | 49 | * We have a problem with critical (MSR[CE] or machine check (MSR[ME]) |
43 | * or debug (MSR[DE]) interrupts/exception in that they could happen | | 50 | * or debug (MSR[DE]) interrupts/exception in that they could happen |
44 | * inbetween the mtsprg1 %r2 and mfsprg1 %r2. If that happens, %r2 | | 51 | * inbetween the mtsprg1 %r2 and mfsprg1 %r2. If that happens, %r2 |
45 | * will be lost. Even if we moved to a different sprg, subsequent | | 52 | * will be lost. Even if we moved to a different sprg, subsequent |
46 | * expceptions would use SPRG1 and its value would be lost. The only | | 53 | * expceptions would use SPRG1 and its value would be lost. The only |
47 | * way to be safe for CE/ME/DE faults to save and restore SPRG1. | | 54 | * way to be safe for CE/ME/DE faults to save and restore SPRG1. |
48 | * | | 55 | * |
49 | * Since CE/ME/DE faults may happen anytime, we need r1 to always | | 56 | * Since CE/ME/DE faults may happen anytime, we need r1 to always |
50 | * contain a valid kernel stack pointer. Therefore we use r2 as | | 57 | * contain a valid kernel stack pointer. Therefore we use r2 as |
51 | * our temporary register. | | 58 | * our temporary register. |
52 | * | | 59 | * |
53 | * To prevent %r2 being overwritten, each "level" (normal, critical, | | 60 | * To prevent %r2 being overwritten, each "level" (normal, critical, |
54 | * mchk) uses a unique sprg to save %r2 (sprg1, sprg4, sprg5). | | 61 | * mchk) uses a unique sprg to save %r2 (sprg1, sprg4, sprg5). |
55 | * | | 62 | * |
56 | * Since we can't control how many nested exceptions we might get, | | 63 | * Since we can't control how many nested exceptions we might get, |
57 | * we don't use a dedicated save area. Instead we have a upwards | | 64 | * we don't use a dedicated save area. Instead we have a upwards |
58 | * growing "stack" of them; the pointer to which is kept in sprg3. | | 65 | * growing "stack" of them; the pointer to which is kept in sprg3. |
59 | * | | 66 | * |
60 | * To allocate from the stack, one fetches sprg3, adds the amount | | 67 | * To allocate from the stack, one fetches sprg3, adds the amount |
61 | * needed, saves sprg3, and then refers to the save using a | | 68 | * needed, saves sprg3, and then refers to the save using a |
62 | * displacement of -amount. | | 69 | * displacement of -amount. |
63 | */ | | 70 | */ |
64 | #define FRAME_EXC_PROLOGUE(start, sprg, srr) \ | | 71 | #define FRAME_EXC_PROLOGUE(start, sprg, srr) \ |
65 | mt##sprg %r2; /* save r2 */ \ | | 72 | mt##sprg %r2; /* save r2 */ \ |
66 | mfsprg3 %r2; /* get save_area pointer */ \ | | 73 | mfsprg3 %r2; /* get save_area pointer */ \ |
67 | addi %r2,%r2,4*(32-start); \ | | 74 | addi %r2,%r2,4*(32-start); \ |
68 | /* allocate save area */ \ | | 75 | /* allocate save area */ \ |
69 | mtsprg3 %r2; /* save updated pointer */ \ | | 76 | mtsprg3 %r2; /* save updated pointer */ \ |
70 | stmw %r##start,-4*(32-start)(%r2); \ | | 77 | stmw %r##start,-4*(32-start)(%r2); \ |
71 | /* free r24-r31 for use */ \ | | 78 | /* free r24-r31 for use */ \ |
72 | mf##sprg %r26; /* get saved r2 */ \ | | 79 | mf##sprg %r26; /* get saved r2 */ \ |
73 | mfcr %r27; /* get Condition Register */ \ | | 80 | mfcr %r27; /* get Condition Register */ \ |
74 | mfxer %r28; /* get XER */ \ | | 81 | mfxer %r28; /* get XER */ \ |
75 | mfspr %r30, SPR_##srr##0; /* get SRR0 */ \ | | 82 | mfspr %r30, SPR_##srr##0; /* get SRR0 */ \ |
76 | mfspr %r31, SPR_##srr##1 /* get SRR1 */ | | 83 | mfspr %r31, SPR_##srr##1 /* get SRR1 */ |
77 | | | 84 | |
78 | #define PROLOGUE_GET_DEAR mfspr %r24, SPR_DEAR | | 85 | #define PROLOGUE_GET_DEAR mfspr %r24, SPR_DEAR |
79 | #define PROLOGUE_GET_ESR mfspr %r25, SPR_ESR | | 86 | #define PROLOGUE_GET_ESR mfspr %r25, SPR_ESR |
80 | #define PROLOGUE_GET_SRRS mfsrr0 %r24; \ | | 87 | #define PROLOGUE_GET_SRRS mfsrr0 %r24; \ |
81 | mfsrr1 %r25 | | 88 | mfsrr1 %r25 |
82 | #define PROLOGUE_GET_SPRG1 mfsprg1 %r29 | | 89 | #define PROLOGUE_GET_SPRG1 mfsprg1 %r29 |
83 | #define PROLOGUE_GET_DBSR mfspr %r25, SPR_DBSR | | 90 | #define PROLOGUE_GET_DBSR mfspr %r25, SPR_DBSR |
84 | #define SAVE_ESR stw %r25, FRAME_ESR(%r1) | | 91 | #define SAVE_ESR stw %r25, FRAME_ESR(%r1) |
85 | #define SAVE_DEAR stw %r24, FRAME_DEAR(%r1) | | 92 | #define SAVE_DEAR stw %r24, FRAME_DEAR(%r1) |
86 | #define SAVE_DEAR_ESR SAVE_ESR; SAVE_DEAR | | 93 | #define SAVE_DEAR_ESR SAVE_ESR; SAVE_DEAR |
87 | #define SAVE_SRRS SAVE_DEAR_ESR | | 94 | #define SAVE_SRRS SAVE_DEAR_ESR |
88 | #define SAVE_SPRG1 stw %r29, FRAME_SPRG1(%r1) | | 95 | #define SAVE_SPRG1 stw %r29, FRAME_SPRG1(%r1) |
89 | #define SAVE_DBSR stw %r25, FRAME_DBSR(%r1) | | 96 | #define SAVE_DBSR stw %r25, FRAME_DBSR(%r1) |
90 | #define SAVE_NOTHING /* nothing */ | | 97 | #define SAVE_NOTHING /* nothing */ |
91 | #define RESTORE_SPRG1(r) lwz r, FRAME_SPRG1(%r1); \ | | 98 | #define RESTORE_SPRG1(r) lwz r, FRAME_SPRG1(%r1); \ |
92 | mtsprg1 r | | 99 | mtsprg1 r |
93 | #define RESTORE_SRR0(r) lwz r, FRAME_DEAR(%r1); \ | | 100 | #define RESTORE_SRR0(r) lwz r, FRAME_DEAR(%r1); \ |
94 | mtsrr0 r | | 101 | mtsrr0 r |
95 | #define RESTORE_SRR1(r) lwz r, FRAME_ESR(%r1); \ | | 102 | #define RESTORE_SRR1(r) lwz r, FRAME_ESR(%r1); \ |
96 | mtsrr1 r | | 103 | mtsrr1 r |
97 | | | 104 | |
98 | #define FRAME_PROLOGUE \ | | 105 | #define FRAME_PROLOGUE \ |
99 | FRAME_EXC_PROLOGUE(26, sprg1, SRR) | | 106 | FRAME_EXC_PROLOGUE(26, sprg1, SRR) |
100 | | | 107 | |
101 | #define FRAME_PROLOGUE_DEAR_ESR \ | | 108 | #define FRAME_PROLOGUE_DEAR_ESR \ |
102 | FRAME_EXC_PROLOGUE(24, sprg1, SRR); \ | | 109 | FRAME_EXC_PROLOGUE(24, sprg1, SRR); \ |
103 | PROLOGUE_GET_ESR; \ | | 110 | PROLOGUE_GET_ESR; \ |
104 | PROLOGUE_GET_DEAR | | 111 | PROLOGUE_GET_DEAR |
105 | | | 112 | |
106 | #define FRAME_PROLOGUE_ESR \ | | 113 | #define FRAME_PROLOGUE_ESR \ |
107 | FRAME_EXC_PROLOGUE(25, sprg1, SRR); \ | | 114 | FRAME_EXC_PROLOGUE(25, sprg1, SRR); \ |
108 | PROLOGUE_GET_ESR | | 115 | PROLOGUE_GET_ESR |
109 | | | 116 | |
110 | #define FRAME_TLBPROLOGUE \ | | 117 | #define FRAME_TLBPROLOGUE \ |
111 | FRAME_EXC_PROLOGUE(20, sprg1, SRR); \ | | 118 | FRAME_EXC_PROLOGUE(20, sprg1, SRR); \ |
112 | PROLOGUE_GET_ESR; \ | | 119 | PROLOGUE_GET_ESR; \ |
113 | PROLOGUE_GET_DEAR | | 120 | PROLOGUE_GET_DEAR |
114 | | | 121 | |
115 | #define FRAME_INTR_PROLOGUE \ | | 122 | #define FRAME_INTR_PROLOGUE \ |
116 | FRAME_EXC_PROLOGUE(26, sprg1, SRR) | | 123 | FRAME_EXC_PROLOGUE(26, sprg1, SRR) |
117 | | | 124 | |
118 | /* | | 125 | /* |
119 | * These need to save SRR0/SRR1 as well their SRR0/SRR1 in case normal | | 126 | * These need to save SRR0/SRR1 as well their SRR0/SRR1 in case normal |
120 | * exceptions happened during their execution. | | 127 | * exceptions happened during their execution. |
121 | */ | | 128 | */ |
122 | #define FRAME_CRIT_PROLOGUE \ | | 129 | #define FRAME_CRIT_PROLOGUE \ |
123 | FRAME_EXC_PROLOGUE(24, sprg4, CSRR); \ | | 130 | FRAME_EXC_PROLOGUE(24, sprg4, CSRR); \ |
124 | PROLOGUE_GET_SPRG1; \ | | 131 | PROLOGUE_GET_SPRG1; \ |
125 | PROLOGUE_GET_SRRS | | 132 | PROLOGUE_GET_SRRS |
126 | | | 133 | |
127 | #define FRAME_MCHK_PROLOGUE \ | | 134 | #define FRAME_MCHK_PROLOGUE \ |
128 | FRAME_EXC_PROLOGUE(24, sprg5, MCSRR); \ | | 135 | FRAME_EXC_PROLOGUE(24, sprg5, MCSRR); \ |
129 | PROLOGUE_GET_SPRG1; \ | | 136 | PROLOGUE_GET_SPRG1; \ |
130 | PROLOGUE_GET_SRRS | | 137 | PROLOGUE_GET_SRRS |
131 | | | 138 | |
132 | #define FRAME_DEBUG_PROLOGUE \ | | 139 | #define FRAME_DEBUG_PROLOGUE \ |
133 | FRAME_EXC_PROLOGUE(24, sprg4, CSRR); \ | | 140 | FRAME_EXC_PROLOGUE(24, sprg4, CSRR); \ |
134 | PROLOGUE_GET_SPRG1; \ | | 141 | PROLOGUE_GET_SPRG1; \ |
135 | PROLOGUE_GET_SRRS | | 142 | PROLOGUE_GET_SRRS |
136 | | | 143 | |
137 | /* | | 144 | /* |
138 | * DDB expects to fetch the LR from the previous frame. But it also | | 145 | * DDB expects to fetch the LR from the previous frame. But it also |
139 | * expects to be pointing at the instruction after the branch link. Since | | 146 | * expects to be pointing at the instruction after the branch link. Since |
140 | * we didn't branch, we need to advance it by to fake out DDB. But there's | | 147 | * we didn't branch, we need to advance it by to fake out DDB. But there's |
141 | * problem. If the routine is in either its first or last two instructions | | 148 | * problem. If the routine is in either its first or last two instructions |
142 | * (before or after its adjusted its stack pointer), we could possibly | | 149 | * (before or after its adjusted its stack pointer), we could possibly |
143 | * overwrite stored return address. So that stored return address needs to | | 150 | * overwrite stored return address. So that stored return address needs to |
144 | * saved and restored. | | 151 | * saved and restored. |
145 | */ | | 152 | */ |
146 | #if defined(DDB) | | 153 | #if defined(DDB) |
147 | #define FRAME_SAVE_SRR0_FOR_DDB \ | | 154 | #define FRAME_SAVE_SRR0_FOR_DDB \ |
148 | lwz %r29, FRAMELEN+CFRAME_LR(%r1); /* fetch old return address */\ | | 155 | lwz %r29, FRAMELEN+CFRAME_LR(%r1); /* fetch old return address */\ |
149 | stw %r29, FRAME_CFRAME_LR(%r1); /* save it */ \ | | 156 | stw %r29, FRAME_CFRAME_LR(%r1); /* save it */ \ |
150 | addi %r30, %r30, 4; /* point to s the next insn */ \ | | 157 | addi %r30, %r30, 4; /* point to s the next insn */ \ |
151 | stw %r30, FRAMELEN+CFRAME_LR(%r1) /* appease ddb stacktrace */ | | 158 | stw %r30, FRAMELEN+CFRAME_LR(%r1) /* appease ddb stacktrace */ |
152 | #define FRAME_RESTORE_RETURN_ADDRESS \ | | 159 | #define FRAME_RESTORE_RETURN_ADDRESS \ |
153 | lwz %r3, FRAME_CFRAME_LR(%r1); /* fetch old return address */ \ | | 160 | lwz %r3, FRAME_CFRAME_LR(%r1); /* fetch old return address */ \ |
154 | stw %r3, FRAMELEN+CFRAME_LR(%r1) /* restore it */ | | 161 | stw %r3, FRAMELEN+CFRAME_LR(%r1) /* restore it */ |
155 | #else | | 162 | #else |
156 | #define FRAME_SAVE_SRR0_FOR_DDB | | 163 | #define FRAME_SAVE_SRR0_FOR_DDB |
157 | #define FRAME_RESTORE_RETURN_ADDRESS | | 164 | #define FRAME_RESTORE_RETURN_ADDRESS |
158 | #endif | | 165 | #endif |
159 | | | 166 | |
160 | #ifdef PPC_HAVE_SPE | | 167 | #ifdef PPC_HAVE_SPE |
161 | #define FRAME_SAVE_SPEFSCR \ | | 168 | #define FRAME_SAVE_SPEFSCR \ |
162 | mfspefscr %r0; /* get spefscr */ \ | | 169 | mfspefscr %r0; /* get spefscr */ \ |
163 | stw %r0, FRAME_SPEFSCR(%r1) /* save into trapframe */ | | 170 | stw %r0, FRAME_SPEFSCR(%r1) /* save into trapframe */ |
164 | #define FRAME_RESTORE_SPEFSCR \ | | 171 | #define FRAME_RESTORE_SPEFSCR \ |
165 | lwz %r0, FRAME_SPEFSCR(%r1); /* fetch from trapframe */ \ | | 172 | lwz %r0, FRAME_SPEFSCR(%r1); /* fetch from trapframe */ \ |
166 | mtspefscr %r0 /* save spefscr */ | | 173 | mtspefscr %r0 /* save spefscr */ |
167 | #else | | 174 | #else |
168 | #define FRAME_SAVE_SPEFSCR | | 175 | #define FRAME_SAVE_SPEFSCR |
169 | #define FRAME_RESTORE_SPEFSCR | | 176 | #define FRAME_RESTORE_SPEFSCR |
170 | #endif | | 177 | #endif |
171 | /* | | 178 | /* |
172 | * Before the first memory refernence, we must have our state inside registers | | 179 | * Before the first memory refernence, we must have our state inside registers |
173 | * since the first memory access might cause an exception which would cause | | 180 | * since the first memory access might cause an exception which would cause |
174 | * SRR0/SRR1 and DEAR/ESR to become unrecoverable. CR and XER also need to be | | 181 | * SRR0/SRR1 and DEAR/ESR to become unrecoverable. CR and XER also need to be |
175 | * saved early since they will modified by instrction flow. The saved stack | | 182 | * saved early since they will modified by instrction flow. The saved stack |
176 | * pointer is also critical but LR and CTR can be deferred being saved until | | 183 | * pointer is also critical but LR and CTR can be deferred being saved until |
177 | * we are actually filling a trapframe. | | 184 | * we are actually filling a trapframe. |
178 | */ | | 185 | */ |
179 | #define FRAME_EXC_ENTER(exc, tf, start, save_prologue) \ | | 186 | #define FRAME_EXC_ENTER(exc, tf, start, save_prologue) \ |
180 | mtcr %r31; /* user mode exception? */ \ | | 187 | mtcr %r31; /* user mode exception? */ \ |
181 | mr %r31, %r1; /* save SP (SRR1 is safe in CR) */ \ | | 188 | mr %r31, %r1; /* save SP (SRR1 is safe in CR) */ \ |
182 | bf MSR_PR, 1f; /* nope, sp is good */ \ | | 189 | bf MSR_PR, 1f; /* nope, sp is good */ \ |
183 | mfsprg2 %r2; /* get curlwp */ \ | | 190 | mfsprg2 %r2; /* get curlwp */ \ |
184 | lwz %r2, L_PCB(%r2); /* get uarea of curlwp */ \ | | 191 | lwz %r2, L_PCB(%r2); /* get uarea of curlwp */ \ |
185 | addi %r1, %r2, USPACE-CALLFRAMELEN; \ | | 192 | addi %r1, %r2, USPACE-CALLFRAMELEN; \ |
186 | /* start stack at top of it */ \ | | 193 | /* start stack at top of it */ \ |
187 | 1: \ | | 194 | 1: \ |
188 | stwu %r31, -FRAMELEN(%r1); /* get space for trapframe */ \ | | 195 | stwu %r31, -FRAMELEN(%r1); /* get space for trapframe */ \ |
189 | stw %r0, FRAME_R0(%r1); /* save r0 */ \ | | 196 | stw %r0, FRAME_R0(%r1); /* save r0 */ \ |
190 | stw %r31, FRAME_R1(%r1); /* save (saved) r1 */ \ | | 197 | stw %r31, FRAME_R1(%r1); /* save (saved) r1 */ \ |
191 | stw %r26, FRAME_R2(%r1); /* save (saved) r2 */ \ | | 198 | stw %r26, FRAME_R2(%r1); /* save (saved) r2 */ \ |
192 | save_prologue; /* save SPRG1/ESR/DEAR */ \ | | 199 | save_prologue; /* save SPRG1/ESR/DEAR */ \ |
193 | /* At this point, r26, r29, and r31 have been saved so we */ \ | | 200 | /* At this point, r26, r29, and r31 have been saved so we */ \ |
194 | /* can use them for LR, CTR, and SRR1. */ \ | | 201 | /* can use them for LR, CTR, and SRR1. */ \ |
195 | mflr %r26; /* get Link Register */ \ | | 202 | mflr %r26; /* get Link Register */ \ |
196 | mfctr %r29; /* get CTR */ \ | | 203 | mfctr %r29; /* get CTR */ \ |
197 | mfcr %r31; /* get SRR1 */ \ | | 204 | mfcr %r31; /* get SRR1 */ \ |
198 | stmw %r26, FRAME_LR(%r1); /* save LR CR XER CTR SRR0/1 */ \ | | 205 | stmw %r26, FRAME_LR(%r1); /* save LR CR XER CTR SRR0/1 */ \ |
199 | FRAME_SAVE_SRR0_FOR_DDB; \ | | 206 | FRAME_SAVE_SRR0_FOR_DDB; \ |
200 | mr %r0, %r31; /* save SRR1 for a bit */ \ | | 207 | mr %r0, %r31; /* save SRR1 for a bit */ \ |
201 | mfsprg3 %r2; /* get save_area pointer */ \ | | 208 | mfsprg3 %r2; /* get save_area pointer */ \ |
202 | addi %r2,%r2,-4*(32-start); /* find our save area */ \ | | 209 | addi %r2,%r2,-4*(32-start); /* find our save area */ \ |
203 | lmw %r##start,0(%r2); /* get start-r31 */ \ | | 210 | lmw %r##start,0(%r2); /* get start-r31 */ \ |
204 | mtsprg3 %r2; /* save updated pointer */ \ | | 211 | mtsprg3 %r2; /* save updated pointer */ \ |
205 | stmw %r3, FRAME_R3(%r1); /* save r2-r31 */ \ | | 212 | stmw %r3, FRAME_R3(%r1); /* save r2-r31 */ \ |
206 | /* Now everything has been saved */ \ | | 213 | /* Now everything has been saved */ \ |
207 | mr %r31, %r0; /* move SRR1 back to r31 */ \ | | 214 | mr %r31, %r0; /* move SRR1 back to r31 */ \ |
208 | mfsprg2 %r13; /* put curlwp in r13 */ \ | | 215 | mfsprg2 %r13; /* put curlwp in r13 */ \ |
209 | FRAME_SAVE_SPEFSCR; \ | | 216 | FRAME_SAVE_SPEFSCR; \ |
210 | li %r7, exc; /* load EXC_* */ \ | | 217 | li %r7, exc; /* load EXC_* */ \ |
211 | stw %r7, FRAME_EXC(%r1); /* save into trapframe */ \ | | 218 | stw %r7, FRAME_EXC(%r1); /* save into trapframe */ \ |
212 | addi tf, %r1, FRAME_TF /* get address of trap frame */ | | 219 | addi tf, %r1, FRAME_TF /* get address of trap frame */ |
213 | | | 220 | |
214 | #define FRAME_EXC_EXIT(rfi, srr) \ | | 221 | #define FRAME_EXC_EXIT(rfi, srr) \ |
215 | FRAME_RESTORE_RETURN_ADDRESS; /* restore return address */ \ | | 222 | FRAME_RESTORE_RETURN_ADDRESS; /* restore return address */ \ |
216 | lmw %r26, FRAME_LR(%r1); /* get LR CR XER CTR SRR0/1 */ \ | | 223 | lmw %r26, FRAME_LR(%r1); /* get LR CR XER CTR SRR0/1 */ \ |
217 | oris %r31,%r31,PSL_CE@h; \ | | 224 | oris %r31,%r31,PSL_CE@h; \ |
218 | mtspr SPR_##srr##1, %r31; /* restore SRR1 */ \ | | 225 | mtspr SPR_##srr##1, %r31; /* restore SRR1 */ \ |
219 | mtspr SPR_##srr##0, %r30; /* restore SRR0 */ \ | | 226 | mtspr SPR_##srr##0, %r30; /* restore SRR0 */ \ |
220 | FRAME_RESTORE_SPEFSCR; \ | | 227 | FRAME_RESTORE_SPEFSCR; \ |
221 | mtctr %r29; /* restore CTR */ \ | | 228 | mtctr %r29; /* restore CTR */ \ |
222 | mtxer %r28; /* restore XER */ \ | | 229 | mtxer %r28; /* restore XER */ \ |
223 | mtcr %r27; /* restore CR */ \ | | 230 | mtcr %r27; /* restore CR */ \ |
224 | mtlr %r26; /* restore LR */ \ | | 231 | mtlr %r26; /* restore LR */ \ |
225 | lmw %r2, FRAME_R2(%r1); /* restore r2-r31 */ \ | | 232 | lmw %r2, FRAME_R2(%r1); /* restore r2-r31 */ \ |
226 | lwz %r0, FRAME_R0(%r1); /* restore r0 */ \ | | 233 | lwz %r0, FRAME_R0(%r1); /* restore r0 */ \ |
227 | lwz %r1, FRAME_R1(%r1); /* restore r1 */ \ | | 234 | lwz %r1, FRAME_R1(%r1); /* restore r1 */ \ |
228 | rfi /* return from interrupt */ | | 235 | rfi /* return from interrupt */ |
229 | | | 236 | |
230 | | | 237 | |
231 | #define FRAME_ENTER(exc, tf) \ | | 238 | #define FRAME_ENTER(exc, tf) \ |
232 | FRAME_EXC_ENTER(exc, tf, 26, SAVE_NOTHING) | | 239 | FRAME_EXC_ENTER(exc, tf, 26, SAVE_NOTHING) |
233 | | | 240 | |
234 | #define FRAME_ENTER_ESR(exc, tf) \ | | 241 | #define FRAME_ENTER_ESR(exc, tf) \ |
235 | FRAME_EXC_ENTER(exc, tf, 25, SAVE_ESR) | | 242 | FRAME_EXC_ENTER(exc, tf, 25, SAVE_ESR) |
236 | | | 243 | |
237 | #define FRAME_ENTER_DEAR_ESR(exc, tf) \ | | 244 | #define FRAME_ENTER_DEAR_ESR(exc, tf) \ |
238 | FRAME_EXC_ENTER(exc, tf, 24, SAVE_DEAR_ESR) | | 245 | FRAME_EXC_ENTER(exc, tf, 24, SAVE_DEAR_ESR) |
239 | | | 246 | |
240 | #define FRAME_EXIT FRAME_EXC_EXIT(rfi, SRR) | | 247 | #define FRAME_EXIT FRAME_EXC_EXIT(rfi, SRR) |
241 | | | 248 | |
242 | #define FRAME_TLBENTER(exc) \ | | 249 | #define FRAME_TLBENTER(exc) \ |
243 | FRAME_EXC_ENTER(exc, %r4, 20, SAVE_DEAR_ESR) | | 250 | FRAME_EXC_ENTER(exc, %r4, 20, SAVE_DEAR_ESR) |
244 | #define FRAME_TLBEXIT FRAME_EXC_EXIT(rfi, SRR) | | 251 | #define FRAME_TLBEXIT FRAME_EXC_EXIT(rfi, SRR) |
245 | | | 252 | |
246 | #define FRAME_MCHK_ENTER(exc) \ | | 253 | #define FRAME_MCHK_ENTER(exc) \ |
247 | FRAME_EXC_ENTER(exc, %r3, 26, SAVE_SPRG1; SAVE_SRRS) | | 254 | FRAME_EXC_ENTER(exc, %r3, 26, SAVE_SPRG1; SAVE_SRRS) |
248 | #define FRAME_MCHK_EXIT \ | | 255 | #define FRAME_MCHK_EXIT \ |
249 | RESTORE_SRR0(%r28); \ | | 256 | RESTORE_SRR0(%r28); \ |
250 | RESTORE_SRR1(%r27); \ | | 257 | RESTORE_SRR1(%r27); \ |
251 | RESTORE_SPRG1(%r26); \ | | 258 | RESTORE_SPRG1(%r26); \ |
252 | FRAME_EXC_EXIT(rfmci, MCSRR) | | 259 | FRAME_EXC_EXIT(rfmci, MCSRR) |
253 | | | 260 | |
254 | #define FRAME_DEBUG_ENTER(exc) \ | | 261 | #define FRAME_DEBUG_ENTER(exc) \ |
255 | FRAME_EXC_ENTER(exc, %r4, 26, SAVE_SPRG1; SAVE_SRRS) | | 262 | FRAME_EXC_ENTER(exc, %r4, 26, SAVE_SPRG1; SAVE_SRRS) |
256 | #define FRAME_DEBUG_EXIT \ | | 263 | #define FRAME_DEBUG_EXIT \ |
257 | RESTORE_SPRG1(%r26); FRAME_EXC_EXIT(rfci, CSRR) | | 264 | RESTORE_SPRG1(%r26); FRAME_EXC_EXIT(rfci, CSRR) |
258 | | | 265 | |
259 | #define FRAME_INTR_SP \ | | 266 | #define FRAME_INTR_SP \ |
260 | bf MSR_PR, 1f; /* nope, sp is good */ \ | | 267 | bf MSR_PR, 1f; /* nope, sp is good */ \ |
261 | mfsprg2 %r2; /* get curlwp */ \ | | 268 | mfsprg2 %r2; /* get curlwp */ \ |
262 | lwz %r2, L_PCB(%r2); /* get uarea of curlwp */ \ | | 269 | lwz %r2, L_PCB(%r2); /* get uarea of curlwp */ \ |
263 | addi %r1, %r2, USPACE-CALLFRAMELEN; \ | | 270 | addi %r1, %r2, USPACE-CALLFRAMELEN; \ |
264 | /* start stack at top of it */ \ | | 271 | /* start stack at top of it */ \ |
265 | 1: | | 272 | 1: |
266 | | | 273 | |
267 | #define FRAME_INTR_SP_NEW(sym) \ | | 274 | #define FRAME_INTR_SP_NEW(sym) \ |
268 | lis %r2,(sym)@ha; \ | | 275 | lis %r2,(sym)@ha; \ |
269 | addi %r1,%r2,(sym)@l | | 276 | addi %r1,%r2,(sym)@l |
270 | | | 277 | |
271 | #define FRAME_INTR_XENTER(exc, start, get_intr_sp, save_prologue) \ | | 278 | #define FRAME_INTR_XENTER(exc, start, get_intr_sp, save_prologue) \ |
272 | mtcr %r31; /* user mode exception? */ \ | | 279 | mtcr %r31; /* user mode exception? */ \ |
273 | mr %r31, %r1; /* save SP (SRR1 is safe in CR) */ \ | | 280 | mr %r31, %r1; /* save SP (SRR1 is safe in CR) */ \ |
274 | get_intr_sp; /* get kernel stack pointer */ \ | | 281 | get_intr_sp; /* get kernel stack pointer */ \ |
275 | stwu %r31, -FRAMELEN(%r1); /* get space for trapframe */ \ | | 282 | stwu %r31, -FRAMELEN(%r1); /* get space for trapframe */ \ |
276 | stw %r0, FRAME_R0(%r1); /* save r0 */ \ | | 283 | stw %r0, FRAME_R0(%r1); /* save r0 */ \ |
277 | stw %r31, FRAME_R1(%r1); /* save (saved) r1 */ \ | | 284 | stw %r31, FRAME_R1(%r1); /* save (saved) r1 */ \ |
278 | stw %r26, FRAME_R2(%r1); /* save (saved) r2 */ \ | | 285 | stw %r26, FRAME_R2(%r1); /* save (saved) r2 */ \ |
279 | save_prologue; /* save SPRG1 (maybe) */ \ | | 286 | save_prologue; /* save SPRG1 (maybe) */ \ |
280 | mflr %r26; /* get LR */ \ | | 287 | mflr %r26; /* get LR */ \ |
281 | mfctr %r29; /* get CTR */ \ | | 288 | mfctr %r29; /* get CTR */ \ |
282 | mfcr %r31; /* get SRR1 */ \ | | 289 | mfcr %r31; /* get SRR1 */ \ |
283 | stmw %r26, FRAME_LR(%r1); /* save LR CR XER CTR SRR0/1 */ \ | | 290 | stmw %r26, FRAME_LR(%r1); /* save LR CR XER CTR SRR0/1 */ \ |
284 | FRAME_SAVE_SRR0_FOR_DDB; \ | | 291 | FRAME_SAVE_SRR0_FOR_DDB; \ |
285 | stw %r3, FRAME_R3(%r1); /* save r3 */ \ | | 292 | stw %r3, FRAME_R3(%r1); /* save r3 */ \ |
286 | stw %r4, FRAME_R4(%r1); /* save r4 */ \ | | 293 | stw %r4, FRAME_R4(%r1); /* save r4 */ \ |
287 | stw %r5, FRAME_R5(%r1); /* save r5 */ \ | | 294 | stw %r5, FRAME_R5(%r1); /* save r5 */ \ |
288 | stw %r6, FRAME_R6(%r1); /* save r6 */ \ | | 295 | stw %r6, FRAME_R6(%r1); /* save r6 */ \ |
289 | stw %r7, FRAME_R7(%r1); /* save r7 */ \ | | 296 | stw %r7, FRAME_R7(%r1); /* save r7 */ \ |
290 | stw %r8, FRAME_R8(%r1); /* save r8 */ \ | | 297 | stw %r8, FRAME_R8(%r1); /* save r8 */ \ |
291 | stw %r9, FRAME_R9(%r1); /* save r9 */ \ | | 298 | stw %r9, FRAME_R9(%r1); /* save r9 */ \ |
292 | stw %r10, FRAME_R10(%r1); /* save r10 */ \ | | 299 | stw %r10, FRAME_R10(%r1); /* save r10 */ \ |
293 | stw %r11, FRAME_R11(%r1); /* save r11 */ \ | | 300 | stw %r11, FRAME_R11(%r1); /* save r11 */ \ |
294 | stw %r12, FRAME_R12(%r1); /* save r12 */ \ | | 301 | stw %r12, FRAME_R12(%r1); /* save r12 */ \ |
295 | stw %r13, FRAME_R13(%r1); /* save r13 */ \ | | 302 | stw %r13, FRAME_R13(%r1); /* save r13 */ \ |
296 | mfsprg3 %r2; /* get save_area pointer */ \ | | 303 | mfsprg3 %r2; /* get save_area pointer */ \ |
297 | addi %r2,%r2,-4*(32-start); /* find our save area */ \ | | 304 | addi %r2,%r2,-4*(32-start); /* find our save area */ \ |
298 | lmw %r##start,0(%r2); /* get start-r31 */ \ | | 305 | lmw %r##start,0(%r2); /* get start-r31 */ \ |
299 | mtsprg3 %r2; /* save updated pointer */ \ | | 306 | mtsprg3 %r2; /* save updated pointer */ \ |
300 | mfsprg2 %r13; /* put curlwp into r13 */ \ | | 307 | mfsprg2 %r13; /* put curlwp into r13 */ \ |
301 | li %r7, exc; /* load EXC_* */ \ | | 308 | li %r7, exc; /* load EXC_* */ \ |
302 | stw %r7, FRAME_EXC(%r1); /* save into trapframe */ \ | | 309 | stw %r7, FRAME_EXC(%r1); /* save into trapframe */ \ |
303 | addi %r3, %r1, FRAME_TF /* only argument is trapframe */ | | 310 | addi %r3, %r1, FRAME_TF /* only argument is trapframe */ |
304 | | | 311 | |
305 | #define FRAME_INTR_XEXIT(rfi, srr) \ | | 312 | #define FRAME_INTR_XEXIT(rfi, srr) \ |
306 | FRAME_RESTORE_RETURN_ADDRESS; /* restore return address */ \ | | 313 | FRAME_RESTORE_RETURN_ADDRESS; /* restore return address */ \ |
307 | lwz %r8, FRAME_LR(%r1); /* get LR */ \ | | 314 | lwz %r8, FRAME_LR(%r1); /* get LR */ \ |
308 | lwz %r9, FRAME_CR(%r1); /* get CR */ \ | | 315 | lwz %r9, FRAME_CR(%r1); /* get CR */ \ |
309 | lwz %r10, FRAME_XER(%r1); /* get XER */ \ | | 316 | lwz %r10, FRAME_XER(%r1); /* get XER */ \ |
310 | lwz %r11, FRAME_CTR(%r1); /* get CTR */ \ | | 317 | lwz %r11, FRAME_CTR(%r1); /* get CTR */ \ |
311 | lwz %r12, FRAME_SRR0(%r1); /* get SRR0 */ \ | | 318 | lwz %r12, FRAME_SRR0(%r1); /* get SRR0 */ \ |
312 | lwz %r13, FRAME_SRR1(%r1); /* get SRR1 */ \ | | 319 | lwz %r13, FRAME_SRR1(%r1); /* get SRR1 */ \ |
313 | mtspr SPR_##srr##1, %r13; /* restore SRR1 */ \ | | 320 | mtspr SPR_##srr##1, %r13; /* restore SRR1 */ \ |
314 | mtspr SPR_##srr##0, %r12; /* restore SRR0 */ \ | | 321 | mtspr SPR_##srr##0, %r12; /* restore SRR0 */ \ |
315 | mtctr %r11; /* restore CTR */ \ | | 322 | mtctr %r11; /* restore CTR */ \ |
316 | mtxer %r10; /* restore XER */ \ | | 323 | mtxer %r10; /* restore XER */ \ |
317 | mtcr %r9; /* restore CR */ \ | | 324 | mtcr %r9; /* restore CR */ \ |
318 | mtlr %r8; /* restore LR */ \ | | 325 | mtlr %r8; /* restore LR */ \ |
319 | lwz %r13, FRAME_R13(%r1); /* restore r13 */ \ | | 326 | lwz %r13, FRAME_R13(%r1); /* restore r13 */ \ |
320 | lwz %r12, FRAME_R12(%r1); /* restore r12 */ \ | | 327 | lwz %r12, FRAME_R12(%r1); /* restore r12 */ \ |
321 | lwz %r11, FRAME_R11(%r1); /* restore r11 */ \ | | 328 | lwz %r11, FRAME_R11(%r1); /* restore r11 */ \ |
322 | lwz %r10, FRAME_R10(%r1); /* restore r10 */ \ | | 329 | lwz %r10, FRAME_R10(%r1); /* restore r10 */ \ |
323 | lwz %r9, FRAME_R9(%r1); /* restore r9 */ \ | | 330 | lwz %r9, FRAME_R9(%r1); /* restore r9 */ \ |
324 | lwz %r8, FRAME_R8(%r1); /* restore r8 */ \ | | 331 | lwz %r8, FRAME_R8(%r1); /* restore r8 */ \ |
325 | lwz %r7, FRAME_R7(%r1); /* restore r7 */ \ | | 332 | lwz %r7, FRAME_R7(%r1); /* restore r7 */ \ |
326 | lwz %r6, FRAME_R6(%r1); /* restore r6 */ \ | | 333 | lwz %r6, FRAME_R6(%r1); /* restore r6 */ \ |
327 | lwz %r5, FRAME_R5(%r1); /* restore r5 */ \ | | 334 | lwz %r5, FRAME_R5(%r1); /* restore r5 */ \ |
328 | lwz %r4, FRAME_R4(%r1); /* restore r4 */ \ | | 335 | lwz %r4, FRAME_R4(%r1); /* restore r4 */ \ |
329 | lwz %r3, FRAME_R3(%r1); /* restore r3 */ \ | | 336 | lwz %r3, FRAME_R3(%r1); /* restore r3 */ \ |
330 | lwz %r2, FRAME_R2(%r1); /* restore r2 */ \ | | 337 | lwz %r2, FRAME_R2(%r1); /* restore r2 */ \ |
331 | lwz %r0, FRAME_R0(%r1); /* restore r0 */ \ | | 338 | lwz %r0, FRAME_R0(%r1); /* restore r0 */ \ |
332 | lwz %r1, FRAME_R1(%r1); /* restore r1 */ \ | | 339 | lwz %r1, FRAME_R1(%r1); /* restore r1 */ \ |
333 | rfi /* return from interrupt */ | | 340 | rfi /* return from interrupt */ |
334 | | | 341 | |
335 | #define FRAME_INTR_ENTER(exc) \ | | 342 | #define FRAME_INTR_ENTER(exc) \ |
336 | FRAME_INTR_XENTER(exc, 26, FRAME_INTR_SP, SAVE_NOTHING) | | 343 | FRAME_INTR_XENTER(exc, 26, FRAME_INTR_SP, SAVE_NOTHING) |
337 | #define FRAME_INTR_EXIT \ | | 344 | #define FRAME_INTR_EXIT \ |
338 | FRAME_INTR_XEXIT(rfi, SRR) | | 345 | FRAME_INTR_XEXIT(rfi, SRR) |
339 | #define FRAME_CRIT_ENTER(exc) \ | | 346 | #define FRAME_CRIT_ENTER(exc) \ |
340 | FRAME_INTR_XENTER(exc, 24, FRAME_INTR_SP, SAVE_SPRG1) | | 347 | FRAME_INTR_XENTER(exc, 24, FRAME_INTR_SP, SAVE_SPRG1) |
341 | #define FRAME_WDOG_ENTER(exc, sym) \ | | 348 | #define FRAME_WDOG_ENTER(exc, sym) \ |
342 | FRAME_INTR_XENTER(exc, 24, FRAME_INTR_SP_NEW(sym), SAVE_SPRG1) | | 349 | FRAME_INTR_XENTER(exc, 24, FRAME_INTR_SP_NEW(sym), SAVE_SPRG1) |
343 | #define FRAME_CRIT_EXIT \ | | 350 | #define FRAME_CRIT_EXIT \ |
344 | RESTORE_SRR0(%r4); \ | | 351 | RESTORE_SRR0(%r4); \ |
345 | RESTORE_SRR1(%r5); \ | | 352 | RESTORE_SRR1(%r5); \ |
346 | RESTORE_SPRG1(%r6); \ | | 353 | RESTORE_SPRG1(%r6); \ |
347 | FRAME_INTR_XEXIT(rfci, CSRR) | | 354 | FRAME_INTR_XEXIT(rfci, CSRR) |
348 | | | 355 | |
349 | #if defined(MULTIPROCESSOR) | | 356 | #if defined(MULTIPROCESSOR) |
350 | #define FRAME_TLBMISSLOCK \ | | 357 | #define FRAME_TLBMISSLOCK \ |
351 | GET_CPUINFO(%r23); \ | | 358 | GET_CPUINFO(%r23); \ |
352 | ldint %r22, CI_MTX_COUNT(%r23); \ | | 359 | ldint %r22, CI_MTX_COUNT(%r23); \ |
353 | subi %r22, %r22, 1; \ | | 360 | subi %r22, %r22, 1; \ |
354 | stint %r22, CI_MTX_COUNT(%r23); \ | | 361 | stint %r22, CI_MTX_COUNT(%r23); \ |
355 | isync; \ | | 362 | isync; \ |
356 | cmpwi %r22, 0; \ | | 363 | cmpwi %r22, 0; \ |
357 | bne 1f; \ | | 364 | bne 1f; \ |
358 | ldint %r22, CI_CPL(%r23); \ | | 365 | ldint %r22, CI_CPL(%r23); \ |
359 | stint %r22, CI_MTX_OLDSPL(%r23); \ | | 366 | stint %r22, CI_MTX_OLDSPL(%r23); \ |
360 | 1: lis %r23, _C_LABEL(pmap_tlb_miss_lock)@h; \ | | 367 | 1: lis %r23, _C_LABEL(pmap_tlb_miss_lock)@h; \ |
361 | ori %r23, %r23, _C_LABEL(pmap_tlb_miss_lock)@l; \ | | 368 | ori %r23, %r23, _C_LABEL(pmap_tlb_miss_lock)@l; \ |
362 | li %r20, MTX_LOCK; \ | | 369 | li %r20, MTX_LOCK; \ |
363 | 2: lwarx %r22, %r20, %r23; \ | | 370 | 2: lwarx %r22, %r20, %r23; \ |
364 | cmpwi %r22, __SIMPLELOCK_UNLOCKED; \ | | 371 | cmpwi %r22, __SIMPLELOCK_UNLOCKED; \ |
365 | beq+ 4f; \ | | 372 | beq+ 4f; \ |
366 | 3: lwzx %r22, %r20, %r23; \ | | 373 | 3: lwzx %r22, %r20, %r23; \ |
367 | cmpwi %r22, __SIMPLELOCK_UNLOCKED; \ | | 374 | cmpwi %r22, __SIMPLELOCK_UNLOCKED; \ |
368 | beq+ 2b; \ | | 375 | beq+ 2b; \ |
369 | b 3b; \ | | 376 | b 3b; \ |
370 | 4: li %r21, __SIMPLELOCK_LOCKED; \ | | 377 | 4: li %r21, __SIMPLELOCK_LOCKED; \ |
371 | stwcx. %r21, %r20, %r23; \ | | 378 | stwcx. %r21, %r20, %r23; \ |
372 | bne- 2b; \ | | 379 | bne- 2b; \ |
373 | isync; \ | | 380 | isync; \ |
374 | msync; | | 381 | msync; |
375 | #define FRAME_TLBMISSUNLOCK \ | | 382 | #define FRAME_TLBMISSUNLOCK \ |
376 | sync; \ | | 383 | sync; \ |
377 | lis %r23, _C_LABEL(pmap_tlb_miss_lock)@h; \ | | 384 | lis %r23, _C_LABEL(pmap_tlb_miss_lock)@h; \ |
378 | ori %r23, %r23, _C_LABEL(pmap_tlb_miss_lock)@l; \ | | 385 | ori %r23, %r23, _C_LABEL(pmap_tlb_miss_lock)@l; \ |
379 | li %r22, __SIMPLELOCK_UNLOCKED; \ | | 386 | li %r22, __SIMPLELOCK_UNLOCKED; \ |
380 | stw %r22, MTX_LOCK(%r23); \ | | 387 | stw %r22, MTX_LOCK(%r23); \ |
381 | isync; \ | | 388 | isync; \ |
382 | msync; \ | | 389 | msync; \ |
383 | GET_CPUINFO(%r23); \ | | 390 | GET_CPUINFO(%r23); \ |
384 | ldint %r22, CI_MTX_COUNT(%r23); \ | | 391 | ldint %r22, CI_MTX_COUNT(%r23); \ |
385 | addi %r22, %r22, 1; \ | | 392 | addi %r22, %r22, 1; \ |
386 | stint %r22, CI_MTX_COUNT(%r23); \ | | 393 | stint %r22, CI_MTX_COUNT(%r23); \ |
387 | isync; | | 394 | isync; |
388 | #else /* !MULTIPROCESSOR */ | | 395 | #else /* !MULTIPROCESSOR */ |
389 | #define FRAME_TLBMISSLOCK | | 396 | #define FRAME_TLBMISSLOCK |
390 | #define FRAME_TLBMISSUNLOCK | | 397 | #define FRAME_TLBMISSUNLOCK |
391 | #endif /* MULTIPROCESSOR */ | | 398 | #endif /* MULTIPROCESSOR */ |
392 | | | 399 | |
393 | .text | | 400 | .text |
394 | .p2align 4 | | 401 | .p2align 4 |
395 | _C_LABEL(critical_input_vector): | | 402 | _C_LABEL(critical_input_vector): |
396 | /* MSR[ME] is unchanged, all others cleared */ | | 403 | /* MSR[ME] is unchanged, all others cleared */ |
397 | FRAME_CRIT_PROLOGUE /* save SP r26-31 CR LR XER */ | | 404 | FRAME_CRIT_PROLOGUE /* save SP r26-31 CR LR XER */ |
398 | FRAME_CRIT_ENTER(EXC_CII) | | 405 | FRAME_CRIT_ENTER(EXC_CII) |
399 | bl _C_LABEL(intr_critintr) /* critintr(tf) */ | | 406 | bl _C_LABEL(intr_critintr) /* critintr(tf) */ |
400 | FRAME_CRIT_EXIT | | 407 | FRAME_CRIT_EXIT |
401 | | | 408 | |
402 | .p2align 4 | | 409 | .p2align 4 |
403 | _C_LABEL(machine_check_vector): | | 410 | _C_LABEL(machine_check_vector): |
404 | /* all MSR bits are cleared */ | | 411 | /* all MSR bits are cleared */ |
405 | FRAME_MCHK_PROLOGUE /* save SP r25-31 CR LR XER */ | | 412 | FRAME_MCHK_PROLOGUE /* save SP r25-31 CR LR XER */ |
406 | FRAME_MCHK_ENTER(EXC_MCHK) | | 413 | FRAME_MCHK_ENTER(EXC_MCHK) |
407 | /* | | 414 | /* |
408 | * MCAR/MCSR don't need to be saved early since MSR[ME] is cleared | | 415 | * MCAR/MCSR don't need to be saved early since MSR[ME] is cleared |
409 | * on entry. | | 416 | * on entry. |
410 | */ | | 417 | */ |
411 | mfspr %r7, SPR_MCAR | | 418 | mfspr %r7, SPR_MCAR |
412 | mfspr %r6, SPR_MCSR | | 419 | mfspr %r6, SPR_MCSR |
413 | stw %r6, FRAME_MCSR(%r1) | | 420 | stw %r6, FRAME_MCSR(%r1) |
414 | stw %r7, FRAME_MCAR(%r1) | | 421 | stw %r7, FRAME_MCAR(%r1) |
415 | li %r3, T_MACHINE_CHECK | | 422 | li %r3, T_MACHINE_CHECK |
416 | bl _C_LABEL(trap) /* trap(T_MACHINE_CHECK, tf) */ | | 423 | bl _C_LABEL(trap) /* trap(T_MACHINE_CHECK, tf) */ |
417 | FRAME_MCHK_EXIT | | 424 | FRAME_MCHK_EXIT |
418 | | | 425 | |
419 | .p2align 4 | | 426 | .p2align 4 |
420 | _C_LABEL(data_storage_vector): | | 427 | _C_LABEL(data_storage_vector): |
421 | /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ | | 428 | /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ |
422 | FRAME_PROLOGUE_DEAR_ESR /* save r2 DEAR ESR r24-31 CR XER SRR */ | | 429 | FRAME_PROLOGUE_DEAR_ESR /* save r2 DEAR ESR r24-31 CR XER SRR */ |
423 | FRAME_ENTER_DEAR_ESR(EXC_DSI, %r4) | | 430 | FRAME_ENTER_DEAR_ESR(EXC_DSI, %r4) |
424 | li %r3, T_DSI | | 431 | li %r3, T_DSI |
425 | /* FRAME_ENTER leaves SRR1 in %r31 */ | | 432 | /* FRAME_ENTER leaves SRR1 in %r31 */ |
426 | trapenter: | | 433 | trapenter: |
427 | trapagain: | | 434 | trapagain: |
428 | wrtee %r31 /* restore MSR[EE] */ | | 435 | wrtee %r31 /* restore MSR[EE] */ |
429 | | | 436 | |
430 | bl _C_LABEL(trap) /* trap(trapcode, tf) */ | | 437 | bl _C_LABEL(trap) /* trap(trapcode, tf) */ |
431 | _C_LABEL(trapexit): | | 438 | _C_LABEL(trapexit): |
432 | wrteei 0 /* disable interrupts */ | | 439 | wrteei 0 /* disable interrupts */ |
433 | # andis. %r0, %r31, PSL_CE@h | | 440 | # andis. %r0, %r31, PSL_CE@h |
434 | # tweqi %r0, 0 | | 441 | # tweqi %r0, 0 |
435 | andi. %r4, %r31, PSL_PR /* lets look at PSL_PR */ | | 442 | andi. %r4, %r31, PSL_PR /* lets look at PSL_PR */ |
436 | beq trapdone /* if clear, skip to exit */ | | 443 | beq trapdone /* if clear, skip to exit */ |
437 | lwz %r4, L_MD_ASTPENDING(%r13) /* get ast pending */ | | 444 | lwz %r4, L_MD_ASTPENDING(%r13) /* get ast pending */ |
438 | cmplwi %r4, 0 /* is there an ast pending */ | | 445 | cmplwi %r4, 0 /* is there an ast pending */ |
439 | beq+ trapdone /* nope, proceed to exit */ | | 446 | beq+ trapdone /* nope, proceed to exit */ |
440 | li %r6, EXC_AST /* yes. */ | | 447 | li %r6, EXC_AST /* yes. */ |
441 | stw %r6, FRAME_EXC(%r1) /* pretend this is an AST */ | | 448 | stw %r6, FRAME_EXC(%r1) /* pretend this is an AST */ |
442 | addi %r4, %r1, FRAME_TF /* get address of trap frame */ | | 449 | addi %r4, %r1, FRAME_TF /* get address of trap frame */ |
443 | li %r3, T_AST | | 450 | li %r3, T_AST |
444 | b trapagain /* and deal with it */ | | 451 | b trapagain /* and deal with it */ |
445 | trapdone: | | 452 | trapdone: |
446 | FRAME_EXIT | | 453 | FRAME_EXIT |
447 | | | 454 | |
448 | .p2align 4 | | 455 | .p2align 4 |
449 | _C_LABEL(instruction_storage_vector): | | 456 | _C_LABEL(instruction_storage_vector): |
450 | /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ | | 457 | /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ |
451 | FRAME_PROLOGUE_ESR /* save ESR r2 r25-31 CR XER SRR0/1 */ | | 458 | FRAME_PROLOGUE_ESR /* save ESR r2 r25-31 CR XER SRR0/1 */ |
452 | FRAME_ENTER_ESR(EXC_ISI, %r4) | | 459 | FRAME_ENTER_ESR(EXC_ISI, %r4) |
453 | li %r3, T_ISI | | 460 | li %r3, T_ISI |
454 | b trapenter | | 461 | b trapenter |
455 | | | 462 | |
456 | .p2align 4 | | 463 | .p2align 4 |
457 | _ENTRY(external_input_vector) | | 464 | _ENTRY(external_input_vector) |
458 | /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ | | 465 | /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ |
459 | FRAME_INTR_PROLOGUE /* save SP r25-31 CR LR XER */ | | 466 | FRAME_INTR_PROLOGUE /* save SP r25-31 CR LR XER */ |
460 | FRAME_INTR_ENTER(EXC_EXI) | | 467 | FRAME_INTR_ENTER(EXC_EXI) |
461 | | | 468 | |
462 | bl _C_LABEL(intr_extintr) | | 469 | bl _C_LABEL(intr_extintr) |
463 | _C_LABEL(intrcall): | | 470 | _C_LABEL(intrcall): |
464 | GET_CPUINFO(%r6) /* get curcpu() */ | | 471 | GET_CPUINFO(%r6) /* get curcpu() */ |
465 | lwz %r5, FRAME_SRR1(%r1) /* get saved SRR1 */ | | 472 | lwz %r5, FRAME_SRR1(%r1) /* get saved SRR1 */ |
466 | # andis. %r0, %r5, PSL_CE@h | | 473 | # andis. %r0, %r5, PSL_CE@h |
467 | # tweqi %r0, 0 | | 474 | # tweqi %r0, 0 |
468 | andi. %r4, %r5, PSL_PR /* lets look at PSL_PR */ | | 475 | andi. %r4, %r5, PSL_PR /* lets look at PSL_PR */ |
469 | beq intrexit /* if clear, skip to exit */ | | 476 | beq intrexit /* if clear, skip to exit */ |
470 | lwz %r4, L_MD_ASTPENDING(%r13) /* get ast pending */ | | 477 | lwz %r4, L_MD_ASTPENDING(%r13) /* get ast pending */ |
471 | cmplwi %r4, 0 /* is there an ast pending */ | | 478 | cmplwi %r4, 0 /* is there an ast pending */ |
472 | beq+ intrexit /* nope, proceed to exit */ | | 479 | beq+ intrexit /* nope, proceed to exit */ |
473 | stmw %r14, FRAME_R14(%r1) /* save rest of registers */ | | 480 | stmw %r14, FRAME_R14(%r1) /* save rest of registers */ |
474 | FRAME_SAVE_SPEFSCR | | 481 | FRAME_SAVE_SPEFSCR |
475 | mr %r31, %r5 /* needed for trapagain */ | | 482 | mr %r31, %r5 /* needed for trapagain */ |
476 | li %r4, EXC_AST /* */ | | 483 | li %r4, EXC_AST /* */ |
477 | stw %r4, FRAME_EXC(%r1) /* pretend this is an AST */ | | 484 | stw %r4, FRAME_EXC(%r1) /* pretend this is an AST */ |
478 | addi %r4, %r1, FRAME_TF /* get address of trap frame */ | | 485 | addi %r4, %r1, FRAME_TF /* get address of trap frame */ |
479 | li %r3, T_AST | | 486 | li %r3, T_AST |
480 | b trapagain /* and deal with it */ | | 487 | b trapagain /* and deal with it */ |
481 | intrexit: | | 488 | intrexit: |
482 | FRAME_INTR_EXIT | | 489 | FRAME_INTR_EXIT |
483 | | | 490 | |
484 | .p2align 4 | | 491 | .p2align 4 |
485 | _C_LABEL(alignment_vector): | | 492 | _C_LABEL(alignment_vector): |
486 | /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ | | 493 | /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ |
487 | FRAME_PROLOGUE_DEAR_ESR /* save SP r25-31 CR LR XER */ | | 494 | FRAME_PROLOGUE_DEAR_ESR /* save SP r25-31 CR LR XER */ |
488 | FRAME_ENTER_DEAR_ESR(EXC_ALI, %r4) | | 495 | FRAME_ENTER_DEAR_ESR(EXC_ALI, %r4) |
489 | li %r3, T_ALIGNMENT | | 496 | li %r3, T_ALIGNMENT |
490 | b trapenter | | 497 | b trapenter |
491 | | | 498 | |
492 | .p2align 4 | | 499 | .p2align 4 |
493 | _C_LABEL(program_vector): | | 500 | _C_LABEL(program_vector): |
494 | /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ | | 501 | /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ |
495 | FRAME_PROLOGUE_ESR /* save SP r25-31 CR LR XER */ | | 502 | FRAME_PROLOGUE_ESR /* save SP r25-31 CR LR XER */ |
496 | FRAME_ENTER_ESR(EXC_PGM, %r4) | | 503 | FRAME_ENTER_ESR(EXC_PGM, %r4) |
497 | li %r3, T_PROGRAM | | 504 | li %r3, T_PROGRAM |
498 | b trapenter | | 505 | b trapenter |
499 | | | 506 | |
500 | #ifdef SPR_IVOR7 | | 507 | #ifdef SPR_IVOR7 |
501 | .p2align 4 | | 508 | .p2align 4 |
502 | _C_LABEL(fp_unavailable_vector): | | 509 | _C_LABEL(fp_unavailable_vector): |
503 | /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ | | 510 | /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ |
504 | FRAME_PROLOGUE_ESR /* save SP r25-31 CR LR XER */ | | 511 | FRAME_PROLOGUE_ESR /* save SP r25-31 CR LR XER */ |
505 | FRAME_ENTER_ESR(EXC_FPU, %r4) | | 512 | FRAME_ENTER_ESR(EXC_FPU, %r4) |
506 | li %r3, T_FP_UNAVAILABLE | | 513 | li %r3, T_FP_UNAVAILABLE |
507 | b trapenter | | 514 | b trapenter |
508 | #endif | | 515 | #endif |
509 | | | 516 | |
510 | .p2align 4 | | 517 | .p2align 4 |
511 | _C_LABEL(system_call_vector): | | 518 | _C_LABEL(system_call_vector): |
512 | /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ | | 519 | /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ |
513 | FRAME_PROLOGUE /* save SP r26-31 CR LR XER */ | | 520 | FRAME_PROLOGUE /* save SP r26-31 CR LR XER */ |
514 | FRAME_ENTER(EXC_SC, %r3) | | 521 | FRAME_ENTER(EXC_SC, %r3) |
515 | | | 522 | |
516 | wrteei 1 /* enable interrupts */ | | 523 | wrteei 1 /* enable interrupts */ |
517 | lwz %r7, L_PROC(%r13) /* get proc for lwp */ | | 524 | lwz %r7, L_PROC(%r13) /* get proc for lwp */ |
518 | lwz %r8, P_MD_SYSCALL(%r7) /* get syscall */ | | 525 | lwz %r8, P_MD_SYSCALL(%r7) /* get syscall */ |
519 | mtlr %r8 /* need to call indirect */ | | 526 | mtlr %r8 /* need to call indirect */ |
520 | blrl /* syscall(tf) */ | | 527 | blrl /* syscall(tf) */ |
521 | _C_LABEL(sctrapexit): | | 528 | _C_LABEL(sctrapexit): |
522 | wrteei 0 /* disable interrupts */ | | 529 | wrteei 0 /* disable interrupts */ |
523 | lwz %r4, L_MD_ASTPENDING(%r13) /* get ast pending */ | | 530 | lwz %r4, L_MD_ASTPENDING(%r13) /* get ast pending */ |
524 | cmplwi %r4, 0 /* is there an ast pending */ | | 531 | cmplwi %r4, 0 /* is there an ast pending */ |
525 | beq+ trapdone /* nope, proceed to exit */ | | 532 | beq+ trapdone /* nope, proceed to exit */ |
526 | li %r0, EXC_AST /* yes. */ | | 533 | li %r0, EXC_AST /* yes. */ |
527 | stw %r0, FRAME_EXC(%r1) /* pretend this is an AST */ | | 534 | stw %r0, FRAME_EXC(%r1) /* pretend this is an AST */ |
528 | addi %r4, %r1, FRAME_TF /* get address of trap frame */ | | 535 | addi %r4, %r1, FRAME_TF /* get address of trap frame */ |
529 | li %r3, T_AST | | 536 | li %r3, T_AST |
530 | b trapenter /* and deal with it */ | | 537 | b trapenter /* and deal with it */ |
531 | | | 538 | |
532 | #ifdef SPR_IVOR9 | | 539 | #ifdef SPR_IVOR9 |
533 | .p2align 4 | | 540 | .p2align 4 |
534 | _C_LABEL(ap_unavailable_vector): | | 541 | _C_LABEL(ap_unavailable_vector): |
535 | /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ | | 542 | /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ |
536 | FRAME_PROLOGUE /* save SP r25-31 CR LR XER */ | | 543 | FRAME_PROLOGUE /* save SP r25-31 CR LR XER */ |
537 | FRAME_ENTER(EXC_PGM, %r4) | | 544 | FRAME_ENTER(EXC_PGM, %r4) |
538 | li %r3, T_AP_UNAVAILABLE | | 545 | li %r3, T_AP_UNAVAILABLE |
539 | b trapenter | | 546 | b trapenter |
540 | #endif | | 547 | #endif |
541 | | | 548 | |
542 | .p2align 4 | | 549 | .p2align 4 |
543 | _C_LABEL(decrementer_vector): | | 550 | _C_LABEL(decrementer_vector): |
544 | /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ | | 551 | /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ |
545 | FRAME_INTR_PROLOGUE /* save SP r25-31 CR LR XER */ | | 552 | FRAME_INTR_PROLOGUE /* save SP r25-31 CR LR XER */ |
546 | FRAME_INTR_ENTER(EXC_DECR) | | 553 | FRAME_INTR_ENTER(EXC_DECR) |
547 | | | 554 | |
548 | bl _C_LABEL(intr_decrintr) | | 555 | bl _C_LABEL(intr_decrintr) |
549 | b intrexit | | 556 | b intrexit |
550 | | | 557 | |
551 | .p2align 4 | | 558 | .p2align 4 |
552 | _C_LABEL(fixed_interval_timer_vector): | | 559 | _C_LABEL(fixed_interval_timer_vector): |
553 | /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ | | 560 | /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ |
554 | FRAME_PROLOGUE /* save SP r25-31 CR LR XER */ | | 561 | FRAME_PROLOGUE /* save SP r25-31 CR LR XER */ |
555 | FRAME_INTR_ENTER(EXC_FIT) | | 562 | FRAME_INTR_ENTER(EXC_FIT) |
556 | | | 563 | |
557 | bl _C_LABEL(intr_fitintr) | | 564 | bl _C_LABEL(intr_fitintr) |
558 | b intrexit | | 565 | b intrexit |
559 | | | 566 | |
560 | #ifdef E500_WDOG_STACK | | 567 | #ifdef E500_WDOG_STACK |
561 | .data | | 568 | .data |
562 | .lcomm wdogstk,4096 | | 569 | .lcomm wdogstk,4096 |
563 | #endif | | 570 | #endif |
564 | .text | | 571 | .text |
565 | .p2align 4 | | 572 | .p2align 4 |
566 | _C_LABEL(watchdog_timer_vector): | | 573 | _C_LABEL(watchdog_timer_vector): |
567 | /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ | | 574 | /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ |
568 | FRAME_CRIT_PROLOGUE /* save SP r25-31 CR LR XER */ | | 575 | FRAME_CRIT_PROLOGUE /* save SP r25-31 CR LR XER */ |
569 | #ifdef E500_WDOG_STACK | | 576 | #ifdef E500_WDOG_STACK |
570 | FRAME_WDOG_ENTER(EXC_WDOG, wdogstk+4096-CALLFRAMELEN) | | 577 | FRAME_WDOG_ENTER(EXC_WDOG, wdogstk+4096-CALLFRAMELEN) |
571 | #else | | 578 | #else |
572 | FRAME_CRIT_ENTER(EXC_WDOG); | | 579 | FRAME_CRIT_ENTER(EXC_WDOG); |
573 | #endif | | 580 | #endif |
574 | | | 581 | |
575 | bl _C_LABEL(intr_wdogintr) | | 582 | bl _C_LABEL(intr_wdogintr) |
576 | FRAME_CRIT_EXIT | | 583 | FRAME_CRIT_EXIT |
577 | | | 584 | |
578 | .p2align 4 | | 585 | .p2align 4 |
579 | _C_LABEL(data_tlb_error_vector): | | 586 | _C_LABEL(data_tlb_error_vector): |
580 | /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ | | 587 | /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ |
581 | FRAME_TLBPROLOGUE | | 588 | FRAME_TLBPROLOGUE |
582 | FRAME_TLBMISSLOCK | | 589 | FRAME_TLBMISSLOCK |
583 | /* | | 590 | /* |
584 | * Registers as this point: | | 591 | * Registers as this point: |
585 | * | | 592 | * |
586 | * r2 = cpu_info | | 593 | * r2 = cpu_info |
587 | * r20 = scratch | | 594 | * r20 = scratch |
588 | * r21 = scratch | | 595 | * r21 = scratch |
589 | * r22 = scratch | | 596 | * r22 = scratch |
590 | * r23 = scratch | | 597 | * r23 = scratch |
591 | * r24 = DEAR | | 598 | * r24 = DEAR |
592 | * r25 = ESR | | 599 | * r25 = ESR |
593 | * r26 = saved r2 | | 600 | * r26 = saved r2 |
594 | * r27 = CR | | 601 | * r27 = CR |
595 | * r28 = XER | | 602 | * r28 = XER |
596 | * r29 = scratch | | 603 | * r29 = scratch |
597 | * r30 = SRR0 | | 604 | * r30 = SRR0 |
598 | * r31 = SRR1 | | 605 | * r31 = SRR1 |
599 | * | | 606 | * |
600 | * Except for r29, these values must be retained. However we must | | 607 | * Except for r29, these values must be retained. However we must |
601 | * be cognizant of nesting. There are two cases here, both related. | | 608 | * be cognizant of nesting. There are two cases here, both related. |
602 | * | | 609 | * |
603 | * We get a critical input or machine check exception and the kernel | | 610 | * We get a critical input or machine check exception and the kernel |
604 | * stack doesn't have a TLB entry so we take an exception. The other | | 611 | * stack doesn't have a TLB entry so we take an exception. The other |
605 | * nesting path is some page used by the exception handler will cause | | 612 | * nesting path is some page used by the exception handler will cause |
606 | * a TLB data error. | | 613 | * a TLB data error. |
607 | * | | 614 | * |
608 | * The second case (more probable) is that the PTE loading will fail | | 615 | * The second case (more probable) is that the PTE loading will fail |
609 | * so we will have to do a hard trap to resolve it. But in doing so | | 616 | * so we will have to do a hard trap to resolve it. But in doing so |
610 | * we need to save a trapframe which could result in another DTLB | | 617 | * we need to save a trapframe which could result in another DTLB |
611 | * fault. | | 618 | * fault. |
612 | * | | 619 | * |
613 | * In all cases, the save area stack shall protect us. | | 620 | * In all cases, the save area stack shall protect us. |
614 | */ | | 621 | */ |
615 | /* | | 622 | /* |
616 | * Attempt to update the TLB from the page table. | | 623 | * Attempt to update the TLB from the page table. |
617 | */ | | 624 | */ |
618 | mflr %r29 /* save LR */ | | 625 | mflr %r29 /* save LR */ |
619 | mr %r23, %r24 /* address of exception */ | | 626 | mr %r23, %r24 /* address of exception */ |
620 | rlwinm %r22, %r31, /* index into ci_pmap_segtab */\ | | 627 | rlwinm %r22, %r31, /* index into ci_pmap_segtab */\ |
621 | MSR_DS+PTR_SCALESHIFT+1, \ | | 628 | MSR_DS+PTR_SCALESHIFT+1, \ |
622 | 31-PTR_SCALESHIFT, \ | | 629 | 31-PTR_SCALESHIFT, \ |
623 | 31-PTR_SCALESHIFT /* move PSL_DS[27] to bit 29 */ | | 630 | 31-PTR_SCALESHIFT /* move PSL_DS[27] to bit 29 */ |
624 | bl pte_load | | 631 | bl pte_load |
625 | FRAME_TLBMISSUNLOCK | | 632 | FRAME_TLBMISSUNLOCK |
626 | mtlr %r29 /* restore LR */ | | 633 | mtlr %r29 /* restore LR */ |
627 | /* | | 634 | /* |
628 | * If we returned, pte load failed so let trap deal with it but | | 635 | * If we returned, pte load failed so let trap deal with it but |
629 | * has kept the contents of r24-r31 (expect r29) intact. | | 636 | * has kept the contents of r24-r31 (expect r29) intact. |
630 | */ | | 637 | */ |
631 | FRAME_TLBENTER(EXC_DSI) | | 638 | FRAME_TLBENTER(EXC_DSI) |
632 | li %r3, T_DATA_TLB_ERROR | | 639 | li %r3, T_DATA_TLB_ERROR |
633 | b trapenter | | 640 | b trapenter |
634 | | | 641 | |
635 | .p2align 4 | | 642 | .p2align 4 |
636 | _C_LABEL(instruction_tlb_error_vector): | | 643 | _C_LABEL(instruction_tlb_error_vector): |
637 | /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ | | 644 | /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ |
638 | FRAME_TLBPROLOGUE | | 645 | FRAME_TLBPROLOGUE |
639 | FRAME_TLBMISSLOCK | | 646 | FRAME_TLBMISSLOCK |
640 | /* | | 647 | /* |
641 | * Attempt to update the TLB from the page table. | | 648 | * Attempt to update the TLB from the page table. |
642 | */ | | 649 | */ |
643 | mflr %r29 /* save LR */ | | 650 | mflr %r29 /* save LR */ |
644 | mr %r23, %r30 /* PC of exception */ | | 651 | mr %r23, %r30 /* PC of exception */ |
645 | rlwinm %r22, %r31, /* index into ci_pmap_segtab */\ | | 652 | rlwinm %r22, %r31, /* index into ci_pmap_segtab */\ |
646 | MSR_IS+PTR_SCALESHIFT+1, \ | | 653 | MSR_IS+PTR_SCALESHIFT+1, \ |
647 | 31-PTR_SCALESHIFT, \ | | 654 | 31-PTR_SCALESHIFT, \ |
648 | 31-PTR_SCALESHIFT /* move PSL_IS[26] to bit 29 */ | | 655 | 31-PTR_SCALESHIFT /* move PSL_IS[26] to bit 29 */ |
649 | bl pte_load | | 656 | bl pte_load |
650 | FRAME_TLBMISSUNLOCK | | 657 | FRAME_TLBMISSUNLOCK |
651 | mtlr %r29 /* restore LR */ | | 658 | mtlr %r29 /* restore LR */ |
652 | /* | | 659 | /* |
653 | * If we returned, pte load failed so let trap deal with it but | | 660 | * If we returned, pte load failed so let trap deal with it but |
654 | * has kept the contents of r24-r31 (expect r29) intact. | | 661 | * has kept the contents of r24-r31 (expect r29) intact. |
655 | */ | | 662 | */ |
656 | FRAME_TLBENTER(EXC_ISI) | | 663 | FRAME_TLBENTER(EXC_ISI) |
657 | li %r3, T_INSTRUCTION_TLB_ERROR | | 664 | li %r3, T_INSTRUCTION_TLB_ERROR |
658 | b trapenter | | 665 | b trapenter |
659 | | | 666 | |
660 | .p2align 4 | | 667 | .p2align 4 |
661 | _C_LABEL(debug_vector): | | 668 | _C_LABEL(debug_vector): |
662 | FRAME_CRIT_PROLOGUE /* save SP r25-31 CR LR XER */ | | 669 | FRAME_CRIT_PROLOGUE /* save SP r25-31 CR LR XER */ |
663 | FRAME_CRIT_ENTER(EXC_DEBUG) | | 670 | FRAME_CRIT_ENTER(EXC_DEBUG) |
664 | mfspr %r6, SPR_DBSR | | 671 | mfspr %r6, SPR_DBSR |
665 | stw %r6, FRAME_ESR(%r1) | | 672 | stw %r6, FRAME_ESR(%r1) |
666 | li %r3, T_DEBUG | | 673 | li %r3, T_DEBUG |
667 | bl _C_LABEL(trap) | | 674 | bl _C_LABEL(trap) |
668 | FRAME_CRIT_EXIT | | 675 | FRAME_CRIT_EXIT |
669 | | | 676 | |
670 | .p2align 4 | | 677 | .p2align 4 |
671 | _C_LABEL(spv_unavailable_vector): | | 678 | _C_LABEL(spv_unavailable_vector): |
672 | FRAME_PROLOGUE_ESR /* save SP r25-31 CR LR XER */ | | 679 | FRAME_PROLOGUE_ESR /* save SP r25-31 CR LR XER */ |
673 | FRAME_ENTER_ESR(EXC_VEC, %r4) | | 680 | FRAME_ENTER_ESR(EXC_VEC, %r4) |
674 | li %r3, T_SPE_UNAVAILABLE | | 681 | li %r3, T_SPE_UNAVAILABLE |
675 | b trapenter | | 682 | b trapenter |
676 | | | 683 | |
677 | .p2align 4 | | 684 | .p2align 4 |
678 | _C_LABEL(fpdata_vector): | | 685 | _C_LABEL(fpdata_vector): |
679 | FRAME_PROLOGUE_ESR /* save SP r25-31 CR LR XER */ | | 686 | FRAME_PROLOGUE_ESR /* save SP r25-31 CR LR XER */ |
680 | FRAME_ENTER_ESR(EXC_FPA, %r4) | | 687 | FRAME_ENTER_ESR(EXC_FPA, %r4) |
681 | li %r3, T_EMBEDDED_FP_DATA | | 688 | li %r3, T_EMBEDDED_FP_DATA |
682 | b trapenter | | 689 | b trapenter |
683 | | | 690 | |
684 | .p2align 4 | | 691 | .p2align 4 |
685 | _C_LABEL(fpround_vector): | | 692 | _C_LABEL(fpround_vector): |
686 | FRAME_PROLOGUE_ESR /* save SP r25-31 CR LR XER */ | | 693 | FRAME_PROLOGUE_ESR /* save SP r25-31 CR LR XER */ |
687 | FRAME_ENTER_ESR(EXC_FPA, %r4) | | 694 | FRAME_ENTER_ESR(EXC_FPA, %r4) |
688 | li %r3, T_EMBEDDED_FP_ROUND | | 695 | li %r3, T_EMBEDDED_FP_ROUND |
689 | b trapenter | | 696 | b trapenter |
690 | | | 697 | |
691 | .p2align 4 | | 698 | .p2align 4 |
692 | _C_LABEL(perfmon_vector): | | 699 | _C_LABEL(perfmon_vector): |
693 | FRAME_PROLOGUE_ESR /* save SP r25-31 CR LR XER */ | | 700 | FRAME_PROLOGUE_ESR /* save SP r25-31 CR LR XER */ |
694 | FRAME_ENTER_ESR(EXC_PERF, %r4) | | 701 | FRAME_ENTER_ESR(EXC_PERF, %r4) |
695 | li %r3, T_EMBEDDED_PERF_MONITOR | | 702 | li %r3, T_EMBEDDED_PERF_MONITOR |
696 | b trapenter | | 703 | b trapenter |
697 | | | 704 | |
698 | .p2align 4 | | 705 | .p2align 4 |
699 | pte_load: | | 706 | pte_load: |
700 | /* | | 707 | /* |
701 | * r2 = scratch | | 708 | * r2 = scratch |
702 | * r20 = scratch | | 709 | * r20 = scratch |
703 | * r21 = scratch | | 710 | * r21 = scratch |
704 | * r22 = index into ci_pmap_{kern,user}_segtab | | 711 | * r22 = index into ci_pmap_{kern,user}_segtab |
705 | * r23 = faulting address | | 712 | * r23 = faulting address |
706 | * The rest are for reference and aren't modifiable. If the load | | 713 | * The rest are for reference and aren't modifiable. If the load |
707 | * fails, they will be used by FRAME_TLBENTER to create the trapframe. | | 714 | * fails, they will be used by FRAME_TLBENTER to create the trapframe. |
708 | * r24 = DEAR | | 715 | * r24 = DEAR |
709 | * r25 = ESR | | 716 | * r25 = ESR |
710 | * r26 = saved r2 | | 717 | * r26 = saved r2 |
711 | * r27 = CR | | 718 | * r27 = CR |
712 | * r28 = XER | | 719 | * r28 = XER |
713 | * r29 = LR | | 720 | * r29 = LR |
714 | * r30 = SRR0 | | 721 | * r30 = SRR0 |
715 | * r31 = SRR1 | | 722 | * r31 = SRR1 |
716 | */ | | 723 | */ |
717 | cmplwi %cr2, %r22, 0 /* remember address space */ | | 724 | cmplwi %cr2, %r22, 0 /* remember address space */ |
718 | GET_CPUINFO(%r2) | | 725 | GET_CPUINFO(%r2) |
719 | addi %r22, %r22, CI_PMAP_SEGTAB /* index into segtab(s) */ | | 726 | addi %r22, %r22, CI_PMAP_SEGTAB /* index into segtab(s) */ |
720 | lwzx %r20, %r22, %r2 /* load kern/user L1 PT addr */ | | 727 | lwzx %r20, %r22, %r2 /* load kern/user L1 PT addr */ |
721 | cmplwi %r20, 0 /* is segtab null? */ | | 728 | cmplwi %r20, 0 /* is segtab null? */ |
722 | beqlr %cr0 /* yes, return to fallback to trap */ | | 729 | beqlr %cr0 /* yes, return to fallback to trap */ |
723 | | | 730 | |
724 | rlwinm %r22, %r23, NSEGPG_SCALESHIFT + PTR_SCALESHIFT, \ | | 731 | rlwinm %r22, %r23, NSEGPG_SCALESHIFT + PTR_SCALESHIFT, \ |
725 | 31-(NSEGPG_SCALESHIFT + PTR_SCALESHIFT - 1), \ | | 732 | 31-(NSEGPG_SCALESHIFT + PTR_SCALESHIFT - 1), \ |
726 | 31-PTR_SCALESHIFT /* extract addr bits [0:9] to [20:29] */ | | 733 | 31-PTR_SCALESHIFT /* extract addr bits [0:9] to [20:29] */ |
727 | lwzx %r20, %r22, %r20 /* load address of page table page */ | | 734 | lwzx %r20, %r22, %r20 /* load address of page table page */ |
728 | cmplwi %r20, 0 /* is page null? */ | | 735 | cmplwi %r20, 0 /* is page null? */ |
729 | beqlr %cr0 /* yes, return to fallback to trap */ | | 736 | beqlr %cr0 /* yes, return to fallback to trap */ |
730 | | | 737 | |
731 | rlwinm %r22, %r23, \ | | 738 | rlwinm %r22, %r23, \ |
732 | NSEGPG_SCALESHIFT + NPTEPG_SCALESHIFT + PTE_SCALESHIFT, \ | | 739 | NSEGPG_SCALESHIFT + NPTEPG_SCALESHIFT + PTE_SCALESHIFT, \ |
733 | 31-(NPTEPG_SCALESHIFT + PTE_SCALESHIFT - 1), \ | | 740 | 31-(NPTEPG_SCALESHIFT + PTE_SCALESHIFT - 1), \ |
734 | 31-PTE_SCALESHIFT /* extract addr bits [10:19] to [20:29] */ | | 741 | 31-PTE_SCALESHIFT /* extract addr bits [10:19] to [20:29] */ |
735 | lwzx %r20, %r22, %r20 /* load PTE from page table page */ | | 742 | lwzx %r20, %r22, %r20 /* load PTE from page table page */ |
736 | cmplwi %r20, 0 /* is there a valid PTE? */ | | 743 | cmplwi %r20, 0 /* is there a valid PTE? */ |
737 | beqlr %cr0 /* no, return to fallback to trap */ | | 744 | beqlr %cr0 /* no, return to fallback to trap */ |
738 | | | 745 | |
739 | #if (PTE_UNSYNCED << 1) != PTE_xX | | 746 | #if (PTE_UNSYNCED << 1) != PTE_xX |
740 | #error PTE_UNSYNCED definition error | | 747 | #error PTE_UNSYNCED definition error |
741 | #endif | | 748 | #endif |
742 | #if (PTE_UNMODIFIED << 1) != PTE_xW | | 749 | #if (PTE_UNMODIFIED << 1) != PTE_xW |
743 | #error PTE_UNMODIFIED definition error | | 750 | #error PTE_UNMODIFIED definition error |
744 | #endif | | 751 | #endif |
745 | andi. %r22, %r20, (PTE_UNSYNCED|PTE_UNMODIFIED) | | 752 | andi. %r22, %r20, (PTE_UNSYNCED|PTE_UNMODIFIED) |
746 | /* Does the PTE need to be changed? */ | | 753 | /* Does the PTE need to be changed? */ |
747 | rotlwi %r22, %r22, 1 /* if so, clear the right PTE bits */ | | 754 | rotlwi %r22, %r22, 1 /* if so, clear the right PTE bits */ |
748 | andc %r20, %r20, %r22 /* pte &= ~((pte & (PTE_UNSYNCED|PTE_UNMODIFIED)) << 1)*/ | | 755 | andc %r20, %r20, %r22 /* pte &= ~((pte & (PTE_UNSYNCED|PTE_UNMODIFIED)) << 1)*/ |
749 | | | 756 | |
750 | /* | | 757 | /* |
751 | * r24-r32 = (no touch) | | 758 | * r24-r32 = (no touch) |
752 | * r23 = scratch (was fault addr) | | 759 | * r23 = scratch (was fault addr) |
753 | * r22 = scratch | | 760 | * r22 = scratch |
754 | * r21 = scratch | | 761 | * r21 = scratch |
755 | * r20 = pte | | 762 | * r20 = pte |
756 | * cr2 = AS 0=eq/!0=ne | | 763 | * cr2 = AS 0=eq/!0=ne |
757 | */ | | 764 | */ |
758 | | | 765 | |
759 | /* | | 766 | /* |
760 | * This is all E500 specific. We should have a patchable branch | | 767 | * This is all E500 specific. We should have a patchable branch |
761 | * to support other BookE (440) implementations. | | 768 | * to support other BookE (440) implementations. |
762 | */ | | 769 | */ |
763 | e500_pte_load: | | 770 | e500_pte_load: |
764 | bne+ %cr2, 1f /* user access? MAS1 is ok. */ | | 771 | bne+ %cr2, 1f /* user access? MAS1 is ok. */ |
765 | mfspr %r22, SPR_MAS1 /* get MAS1 */ | | 772 | mfspr %r22, SPR_MAS1 /* get MAS1 */ |
766 | lis %r21, MAS1_TID@h /* get TID mask */ | | 773 | lis %r21, MAS1_TID@h /* get TID mask */ |
767 | andc %r22, %r22, %r21 /* clear TID */ | | 774 | andc %r22, %r22, %r21 /* clear TID */ |
768 | mtspr SPR_MAS1, %r22 /* save MAS1 */ | | 775 | mtspr SPR_MAS1, %r22 /* save MAS1 */ |
769 | 1: | | 776 | 1: |
770 | andi. %r21, %r20, PTE_WIMGE_MASK /* extract WIMGE from PTE */ | | 777 | andi. %r21, %r20, PTE_WIMGE_MASK /* extract WIMGE from PTE */ |
771 | cmplwi %r21, PTE_M /* if just PTE_M is set, */ | | 778 | cmplwi %r21, PTE_M /* if just PTE_M is set, */ |
772 | beq+ %cr0, 2f /* skip munging mas2 */ | | 779 | beq+ %cr0, 2f /* skip munging mas2 */ |
773 | mfspr %r22, SPR_MAS2 /* get MAS2 (updated by error) */ | | 780 | mfspr %r22, SPR_MAS2 /* get MAS2 (updated by error) */ |
774 | clrrwi %r22, %r22, PTE_RWX_SHIFT /* clear WIMGE bits */ | | 781 | clrrwi %r22, %r22, PTE_RWX_SHIFT /* clear WIMGE bits */ |
775 | or %r22, %r22, %r21 /* combine with MAS2 contents */ | | 782 | or %r22, %r22, %r21 /* combine with MAS2 contents */ |
776 | mtspr SPR_MAS2, %r22 /* put back into MAS2 */ | | 783 | mtspr SPR_MAS2, %r22 /* put back into MAS2 */ |
777 | 2: | | 784 | 2: |
778 | /* | | 785 | /* |
779 | * r23 = fault addr | | 786 | * r23 = fault addr |
780 | * r22 = scratch | | 787 | * r22 = scratch |
781 | * r21 = scratch | | 788 | * r21 = scratch |
782 | * r20 = pte | | 789 | * r20 = pte |
783 | */ | | 790 | */ |
784 | | | 791 | |
785 | /* | | 792 | /* |
786 | * In MAS3, the protection bits are in the low 6 bits: | | 793 | * In MAS3, the protection bits are in the low 6 bits: |
787 | * UX SX UW SW UR SR | | 794 | * UX SX UW SW UR SR |
788 | * The User bits are 1 bit left of their Supervisor counterparts. | | 795 | * The User bits are 1 bit left of their Supervisor counterparts. |
789 | * Rotate the PTE protection bits left until they wrap around to become | | 796 | * Rotate the PTE protection bits left until they wrap around to become |
790 | * the least significant bits, where the Supervisor protection bits | | 797 | * the least significant bits, where the Supervisor protection bits |
791 | * are located. Increase the rotate amount by 1 to place them where | | 798 | * are located. Increase the rotate amount by 1 to place them where |
792 | * the User protection bits are located. We get that 1 by extracting | | 799 | * the User protection bits are located. We get that 1 by extracting |
793 | * the MAS1[TS] (set for User access) and moving it to bit 31 (LSB). | | 800 | * the MAS1[TS] (set for User access) and moving it to bit 31 (LSB). |
794 | */ | | 801 | */ |
795 | mfspr %r21, SPR_MAS1 /* get MAS1 which has TS bit */ | | 802 | mfspr %r21, SPR_MAS1 /* get MAS1 which has TS bit */ |
796 | extrwi %r21, %r21, 1, 31-MAS1_TS_SHIFT | | 803 | extrwi %r21, %r21, 1, 31-MAS1_TS_SHIFT |
797 | /* extract MAS1_TS to LSB */ | | 804 | /* extract MAS1_TS to LSB */ |
798 | clrrwi %r23, %r20, PAGE_SHIFT /* clear non-RPN bits from PTE */ | | 805 | clrrwi %r23, %r20, PAGE_SHIFT /* clear non-RPN bits from PTE */ |
799 | andi. %r20, %r20, PTE_RWX_MASK /* isolate protection bits */ | | 806 | andi. %r20, %r20, PTE_RWX_MASK /* isolate protection bits */ |
800 | rotrwi %r20, %r20, PTE_RWX_SHIFT | | 807 | rotrwi %r20, %r20, PTE_RWX_SHIFT |
801 | andi. %r22, %r20, (MAS3_SW|MAS3_SR) /* user pages need to be R/W by kernel */ | | 808 | andi. %r22, %r20, (MAS3_SW|MAS3_SR) /* user pages need to be R/W by kernel */ |
802 | rotlw %r20, %r20, %r21 /* rotate protection to correct loc */ | | 809 | rotlw %r20, %r20, %r21 /* rotate protection to correct loc */ |
803 | or %r20, %r20, %r22 /* combine system protection bits */ | | 810 | or %r20, %r20, %r22 /* combine system protection bits */ |
804 | or %r23, %r23, %r20 /* combine RPN and protection bits */ | | 811 | or %r23, %r23, %r20 /* combine RPN and protection bits */ |
805 | mtspr SPR_MAS3, %r23 /* put into MAS3 */ | | 812 | mtspr SPR_MAS3, %r23 /* put into MAS3 */ |
806 | isync /* because ECORE500RM tells us too */ | | 813 | isync /* because ECORE500RM tells us too */ |
807 | tlbwe /* write the TLB entry */ | | 814 | tlbwe /* write the TLB entry */ |
808 | /* | | 815 | /* |
809 | * Increment a counter to show how many tlb misses we've handled here. | | 816 | * Increment a counter to show how many tlb misses we've handled here. |
810 | */ | | 817 | */ |
811 | lmw %r30, CI_EV_TLBMISS_SOFT(%r2) | | 818 | lmw %r30, CI_EV_TLBMISS_SOFT(%r2) |
812 | addic %r31, %r31, 1 | | 819 | addic %r31, %r31, 1 |
813 | addze %r30, %r30 | | 820 | addze %r30, %r30 |
814 | stmw %r30, CI_EV_TLBMISS_SOFT(%r2) | | 821 | stmw %r30, CI_EV_TLBMISS_SOFT(%r2) |
815 | | | 822 | |
816 | FRAME_TLBMISSUNLOCK | | 823 | FRAME_TLBMISSUNLOCK |
817 | | | 824 | |
818 | /* | | 825 | /* |
819 | * Cleanup and leave. We know any higher priority exception will | | 826 | * Cleanup and leave. We know any higher priority exception will |
820 | * save and restore SPRG1 and %r2 thereby preserving their values. | | 827 | * save and restore SPRG1 and %r2 thereby preserving their values. |
821 | * | | 828 | * |
822 | * r24 = DEAR (don't care) | | 829 | * r24 = DEAR (don't care) |
823 | * r25 = ESR (don't care) | | 830 | * r25 = ESR (don't care) |
824 | * r26 = saved r2 | | 831 | * r26 = saved r2 |
825 | * r27 = CR | | 832 | * r27 = CR |
826 | * r28 = XER | | 833 | * r28 = XER |
827 | * r29 = LR | | 834 | * r29 = LR |
828 | * r30 = LSW of counter | | 835 | * r30 = LSW of counter |
829 | * r31 = MSW of counter | | 836 | * r31 = MSW of counter |
830 | */ | | 837 | */ |
831 | mtlr %r29 /* restore Link Register */ | | 838 | mtlr %r29 /* restore Link Register */ |
832 | mtxer %r28 /* restore XER */ | | 839 | mtxer %r28 /* restore XER */ |
833 | mtcr %r27 /* restore Condition Register */ | | 840 | mtcr %r27 /* restore Condition Register */ |
834 | mtsprg1 %r26 /* save saved r2 across load multiple */ | | 841 | mtsprg1 %r26 /* save saved r2 across load multiple */ |
835 | mfsprg3 %r2 /* get end of save area */ | | 842 | mfsprg3 %r2 /* get end of save area */ |
836 | addi %r2,%r2,-4*(32-20) /* adjust save area down */ | | 843 | addi %r2,%r2,-4*(32-20) /* adjust save area down */ |
837 | lmw %r20,0(%r2) /* restore r20-r31 */ | | 844 | lmw %r20,0(%r2) /* restore r20-r31 */ |
838 | mtsprg3 %r2 /* save new end of save area */ | | 845 | mtsprg3 %r2 /* save new end of save area */ |
839 | mfsprg1 %r2 /* restore r2 */ | | 846 | mfsprg1 %r2 /* restore r2 */ |
840 | rfi | | 847 | rfi |
841 | | | 848 | |
842 | .p2align 4 | | 849 | .p2align 4 |
843 | .globl _C_LABEL(exception_init) | | 850 | .globl _C_LABEL(exception_init) |
844 | _C_LABEL(exception_init): | | 851 | _C_LABEL(exception_init): |
845 | lis %r6,_C_LABEL(critical_input_vector)@h | | 852 | lis %r6,_C_LABEL(critical_input_vector)@h |
846 | mtspr SPR_IVPR, %r6 | | 853 | mtspr SPR_IVPR, %r6 |
847 | | | 854 | |
848 | ori %r5,%r6,_C_LABEL(critical_input_vector)@l | | 855 | ori %r5,%r6,_C_LABEL(critical_input_vector)@l |
849 | mtspr SPR_IVOR0, %r5 | | 856 | mtspr SPR_IVOR0, %r5 |
850 | | | 857 | |
851 | ori %r5,%r6,_C_LABEL(machine_check_vector)@l | | 858 | ori %r5,%r6,_C_LABEL(machine_check_vector)@l |
852 | mtspr SPR_IVOR1, %r5 | | 859 | mtspr SPR_IVOR1, %r5 |
853 | | | 860 | |
854 | ori %r5,%r6,_C_LABEL(data_storage_vector)@l | | 861 | ori %r5,%r6,_C_LABEL(data_storage_vector)@l |
855 | mtspr SPR_IVOR2, %r5 | | 862 | mtspr SPR_IVOR2, %r5 |
856 | | | 863 | |
857 | ori %r5,%r6,_C_LABEL(instruction_storage_vector)@l | | 864 | ori %r5,%r6,_C_LABEL(instruction_storage_vector)@l |
858 | mtspr SPR_IVOR3, %r5 | | 865 | mtspr SPR_IVOR3, %r5 |
859 | | | 866 | |
860 | ori %r5,%r6,_C_LABEL(external_input_vector)@l | | 867 | ori %r5,%r6,_C_LABEL(external_input_vector)@l |
861 | mtspr SPR_IVOR4, %r5 | | 868 | mtspr SPR_IVOR4, %r5 |
862 | | | 869 | |
863 | ori %r5,%r6,_C_LABEL(alignment_vector)@l | | 870 | ori %r5,%r6,_C_LABEL(alignment_vector)@l |
864 | mtspr SPR_IVOR5, %r5 | | 871 | mtspr SPR_IVOR5, %r5 |
865 | | | 872 | |
866 | ori %r5,%r6,_C_LABEL(program_vector)@l | | 873 | ori %r5,%r6,_C_LABEL(program_vector)@l |
867 | mtspr SPR_IVOR6, %r5 | | 874 | mtspr SPR_IVOR6, %r5 |
868 | | | 875 | |
869 | #ifdef SPR_IVOR7 | | 876 | #ifdef SPR_IVOR7 |
870 | ori %r5,%r6,_C_LABEL(fp_unavailable_vector)@l | | 877 | ori %r5,%r6,_C_LABEL(fp_unavailable_vector)@l |
871 | mtspr SPR_IVOR7, %r5 | | 878 | mtspr SPR_IVOR7, %r5 |
872 | #endif | | 879 | #endif |
873 | | | 880 | |
874 | ori %r5,%r6,_C_LABEL(system_call_vector)@l | | 881 | ori %r5,%r6,_C_LABEL(system_call_vector)@l |
875 | mtspr SPR_IVOR8, %r5 | | 882 | mtspr SPR_IVOR8, %r5 |
876 | | | 883 | |
877 | #ifdef SPR_IVOR9 | | 884 | #ifdef SPR_IVOR9 |
878 | ori %r5,%r6,_C_LABEL(ap_unavailable_vector)@l | | 885 | ori %r5,%r6,_C_LABEL(ap_unavailable_vector)@l |
879 | mtspr SPR_IVOR9, %r5 | | 886 | mtspr SPR_IVOR9, %r5 |
880 | #endif | | 887 | #endif |
881 | | | 888 | |
882 | ori %r5,%r6,_C_LABEL(decrementer_vector)@l | | 889 | ori %r5,%r6,_C_LABEL(decrementer_vector)@l |
883 | mtspr SPR_IVOR10, %r5 | | 890 | mtspr SPR_IVOR10, %r5 |
884 | | | 891 | |
885 | ori %r5,%r6,_C_LABEL(fixed_interval_timer_vector)@l | | 892 | ori %r5,%r6,_C_LABEL(fixed_interval_timer_vector)@l |
886 | mtspr SPR_IVOR11, %r5 | | 893 | mtspr SPR_IVOR11, %r5 |
887 | | | 894 | |
888 | ori %r5,%r6,_C_LABEL(watchdog_timer_vector)@l | | 895 | ori %r5,%r6,_C_LABEL(watchdog_timer_vector)@l |
889 | mtspr SPR_IVOR12, %r5 | | 896 | mtspr SPR_IVOR12, %r5 |
890 | | | 897 | |
891 | ori %r5,%r6,_C_LABEL(data_tlb_error_vector)@l | | 898 | ori %r5,%r6,_C_LABEL(data_tlb_error_vector)@l |
892 | mtspr SPR_IVOR13, %r5 | | 899 | mtspr SPR_IVOR13, %r5 |
893 | | | 900 | |
894 | ori %r5,%r6,_C_LABEL(instruction_tlb_error_vector)@l | | 901 | ori %r5,%r6,_C_LABEL(instruction_tlb_error_vector)@l |
895 | mtspr SPR_IVOR14, %r5 | | 902 | mtspr SPR_IVOR14, %r5 |
896 | | | 903 | |
897 | ori %r5,%r6,_C_LABEL(debug_vector)@l | | 904 | ori %r5,%r6,_C_LABEL(debug_vector)@l |
898 | mtspr SPR_IVOR15, %r5 | | 905 | mtspr SPR_IVOR15, %r5 |
899 | | | 906 | |
900 | ori %r5,%r6,_C_LABEL(spv_unavailable_vector)@l | | 907 | ori %r5,%r6,_C_LABEL(spv_unavailable_vector)@l |
901 | mtspr SPR_IVOR32, %r5 | | 908 | mtspr SPR_IVOR32, %r5 |
902 | | | 909 | |
903 | ori %r5,%r6,_C_LABEL(fpdata_vector)@l | | 910 | ori %r5,%r6,_C_LABEL(fpdata_vector)@l |
904 | mtspr SPR_IVOR33, %r5 | | 911 | mtspr SPR_IVOR33, %r5 |
905 | | | 912 | |
906 | ori %r5,%r6,_C_LABEL(fpround_vector)@l | | 913 | ori %r5,%r6,_C_LABEL(fpround_vector)@l |
907 | mtspr SPR_IVOR34, %r5 | | 914 | mtspr SPR_IVOR34, %r5 |
908 | | | 915 | |
909 | ori %r5,%r6,_C_LABEL(perfmon_vector)@l | | 916 | ori %r5,%r6,_C_LABEL(perfmon_vector)@l |
910 | mtspr SPR_IVOR35, %r5 | | 917 | mtspr SPR_IVOR35, %r5 |
911 | | | 918 | |
912 | mfspr %r5, SPR_PIR /* get Processor ID register */ | | 919 | mfspr %r5, SPR_PIR /* get Processor ID register */ |
913 | cmplwi %r5,0 | | 920 | cmplwi %r5,0 |
914 | bnelr /* return if non-0 (non-primary) */ | | 921 | bnelr /* return if non-0 (non-primary) */ |
915 | | | 922 | |
916 | lis %r5,_C_LABEL(powerpc_intrsw)@ha | | 923 | lis %r5,_C_LABEL(powerpc_intrsw)@ha |
917 | stw %r3,_C_LABEL(powerpc_intrsw)@l(%r5) | | 924 | stw %r3,_C_LABEL(powerpc_intrsw)@l(%r5) |
918 | | | 925 | |
919 | blr | | 926 | blr |
920 | | | 927 | |
921 | #ifdef notyet | | 928 | #ifdef notyet |
922 | .data | | 929 | .data |
923 | .lcomm ddbstk,4096 | | 930 | .lcomm ddbstk,4096 |
924 | .text | | 931 | .text |
925 | | | 932 | |
926 | _ENTRY(cpu_Debugger) | | 933 | _ENTRY(cpu_Debugger) |
927 | mflr %r0 | | 934 | mflr %r0 |
928 | stw %r0, CFRAME_LR(%r1) | | 935 | stw %r0, CFRAME_LR(%r1) |
929 | | | 936 | |
930 | mfmsr %r3 | | 937 | mfmsr %r3 |
931 | wrteei 0 | | 938 | wrteei 0 |
932 | mr %r4,%r1 | | 939 | mr %r4,%r1 |
933 | lis %r10,ddbstk@ha | | 940 | lis %r10,ddbstk@ha |
934 | addi %r10,%r10,ddbstk@l | | 941 | addi %r10,%r10,ddbstk@l |
935 | sub %r5,%r1,%r10 | | 942 | sub %r5,%r1,%r10 |
936 | cmplwi %r5,4096 | | 943 | cmplwi %r5,4096 |
937 | blt %cr0, 1f | | 944 | blt %cr0, 1f |
938 | addi %r1,%r10,4096-CALLFRAMELEN | | 945 | addi %r1,%r10,4096-CALLFRAMELEN |
939 | 1: | | 946 | 1: |
940 | stwu %r4,-FRAMELEN(%r1) | | 947 | stwu %r4,-FRAMELEN(%r1) |
941 | stw %r4,FRAME_R1(%r1) | | 948 | stw %r4,FRAME_R1(%r1) |
942 | stmw %r13,FRAME_R13(%r1) | | 949 | stmw %r13,FRAME_R13(%r1) |
943 | mr %r26,%r0 | | 950 | mr %r26,%r0 |
944 | mfcr %r27 | | 951 | mfcr %r27 |
945 | mfxer %r28 | | 952 | mfxer %r28 |
946 | mfctr %r29 | | 953 | mfctr %r29 |
947 | mr %r30,%r0 | | 954 | mr %r30,%r0 |
948 | mr %r31,%r3 | | 955 | mr %r31,%r3 |
949 | stmw %r26,FRAME_LR(%r1) | | 956 | stmw %r26,FRAME_LR(%r1) |
950 | mr %r31,%r1 | | 957 | mr %r31,%r1 |
951 | mr %r1,%r10 | | 958 | mr %r1,%r10 |
952 | addi %r4,%r1,FRAME_TF | | 959 | addi %r4,%r1,FRAME_TF |
953 | li %r3,EXC_PGM | | 960 | li %r3,EXC_PGM |
954 | stw %r3,FRAME_EXC(%r1) | | 961 | stw %r3,FRAME_EXC(%r1) |
955 | li %r3,T_PROGRAM | | 962 | li %r3,T_PROGRAM |
956 | bl _C_LABEL(trap) | | 963 | bl _C_LABEL(trap) |
957 | lmw %r26,FRAME_LR(%r1) | | 964 | lmw %r26,FRAME_LR(%r1) |
958 | mtlr %r26 | | 965 | mtlr %r26 |
959 | mtcr %r27 | | 966 | mtcr %r27 |
960 | mtxer %r28 | | 967 | mtxer %r28 |
961 | mtctr %r29 | | 968 | mtctr %r29 |
962 | mr %r0,%r31 | | 969 | mr %r0,%r31 |
963 | lmw %r13,FRAME_R13(%r1) | | 970 | lmw %r13,FRAME_R13(%r1) |
964 | lwz %r1,FRAME_R1(%r1) | | 971 | lwz %r1,FRAME_R1(%r1) |
965 | wrtee %r0 | | 972 | wrtee %r0 |
966 | blr | | 973 | blr |
967 | #endif /* notyet */ | | 974 | #endif /* notyet */ |