2 |
- |
1 |
/*
|
|
|
2 |
* arm exception handlers
|
|
|
3 |
*/
|
|
|
4 |
#include "arm.s"
|
|
|
5 |
|
|
|
6 |
#undef B /* B is for 'botch' */
|
|
|
7 |
|
|
|
8 |
/*
|
|
|
9 |
* exception vectors, copied by trapinit() to somewhere useful
|
|
|
10 |
*/
|
|
|
11 |
TEXT vectors(SB), 1, $-4
|
|
|
12 |
MOVW 0x18(R15), R15 /* reset */
|
|
|
13 |
MOVW 0x18(R15), R15 /* undefined instr. */
|
|
|
14 |
MOVW 0x18(R15), R15 /* SWI & SMC */
|
|
|
15 |
MOVW 0x18(R15), R15 /* prefetch abort */
|
|
|
16 |
MOVW 0x18(R15), R15 /* data abort */
|
|
|
17 |
MOVW 0x18(R15), R15 /* hypervisor call */
|
|
|
18 |
MOVW 0x18(R15), R15 /* IRQ */
|
|
|
19 |
MOVW 0x18(R15), R15 /* FIQ */
|
|
|
20 |
|
|
|
21 |
TEXT vtable(SB), 1, $-4
|
|
|
22 |
WORD $_vrst-KZERO(SB) /* reset, in svc mode already */
|
|
|
23 |
WORD $_vund(SB) /* undefined, switch to svc mode */
|
|
|
24 |
WORD $_vsvc(SB) /* swi, in svc mode already */
|
|
|
25 |
WORD $_vpabt(SB) /* prefetch abort, switch to svc mode */
|
|
|
26 |
WORD $_vdabt(SB) /* data abort, switch to svc mode */
|
|
|
27 |
WORD $_vhype(SB) /* hypervisor call */
|
|
|
28 |
WORD $_virq(SB) /* IRQ, switch to svc mode */
|
|
|
29 |
WORD $_vfiq(SB) /* FIQ, switch to svc mode */
|
|
|
30 |
|
|
|
31 |
/*
|
|
|
32 |
* reset - start additional cpus
|
|
|
33 |
*/
|
|
|
34 |
TEXT _vrst(SB), 1, $-4
|
|
|
35 |
/* running in the zero segment (pc is lower 256MB) */
|
|
|
36 |
CPSMODE(PsrMsvc) /* should be redundant */
|
|
|
37 |
CPSID
|
|
|
38 |
CPSAE
|
|
|
39 |
SETEND(0) /* force little-endian */
|
|
|
40 |
BARRIERS
|
|
|
41 |
SETZSB
|
|
|
42 |
MOVW $PsrMsvc, SPSR
|
|
|
43 |
MOVW $0, R14
|
|
|
44 |
|
|
|
45 |
/* invalidate i-cache and branch-target cache */
|
|
|
46 |
MTCP CpSC, 0, PC, C(CpCACHE), C(CpCACHEinvi), CpCACHEall
|
|
|
47 |
BARRIERS
|
|
|
48 |
|
|
|
49 |
BL cpureset(SB)
|
|
|
50 |
spin:
|
|
|
51 |
B spin
|
|
|
52 |
|
|
|
53 |
/*
|
|
|
54 |
* system call
|
|
|
55 |
*/
|
|
|
56 |
TEXT _vsvc(SB), 1, $-4 /* SWI */
|
|
|
57 |
CLREX
|
|
|
58 |
BARRIERS
|
|
|
59 |
/* stack is m->stack */
|
|
|
60 |
MOVW.W R14, -4(R13) /* ureg->pc = interrupted PC */
|
|
|
61 |
MOVW SPSR, R14 /* ureg->psr = SPSR */
|
|
|
62 |
MOVW.W R14, -4(R13) /* ... */
|
|
|
63 |
MOVW $PsrMsvc, R14 /* ureg->type = PsrMsvc */
|
|
|
64 |
MOVW.W R14, -4(R13) /* ... */
|
|
|
65 |
|
|
|
66 |
/* avoid the ambiguity described in notes/movm.w. */
|
|
|
67 |
MOVM.DB.S [R0-R14], (R13) /* save user level registers */
|
|
|
68 |
SUB $(NREGS*4), R13 /* r13 now points to ureg */
|
|
|
69 |
|
|
|
70 |
MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
|
|
|
71 |
|
|
|
72 |
/*
|
|
|
73 |
* set up m and up registers since user registers could contain anything
|
|
|
74 |
*/
|
|
|
75 |
CPUID(R1)
|
|
|
76 |
SLL $2, R1 /* convert to word index */
|
|
|
77 |
MOVW $machaddr(SB), R2
|
|
|
78 |
ADD R1, R2
|
|
|
79 |
MOVW (R2), R(MACH) /* m = machaddr[cpuid] */
|
|
|
80 |
CMP $0, R(MACH)
|
|
|
81 |
MOVW.EQ $MACHADDR, R0 /* paranoia: use MACHADDR if 0 */
|
|
|
82 |
MOVW 8(R(MACH)), R(USER) /* up = m->proc */
|
|
|
83 |
|
|
|
84 |
MOVW ((NREGS+1)*4)(R13), R2 /* saved SPSR (user mode) */
|
|
|
85 |
|
|
|
86 |
MOVW R13, R0 /* first arg is pointer to ureg */
|
|
|
87 |
SUB $8, R13 /* space for argument+link */
|
|
|
88 |
|
|
|
89 |
BL syscall(SB)
|
|
|
90 |
/*
|
|
|
91 |
* caller saves on plan 9, so registers other than 9, 10, 13 & 14
|
|
|
92 |
* may have been trashed when we get here.
|
|
|
93 |
*/
|
|
|
94 |
|
|
|
95 |
MOVW $setR12(SB), R12 /* reload kernel's SB */
|
|
|
96 |
|
|
|
97 |
ADD $(8+4*NREGS), R13 /* make r13 point to ureg->type */
|
|
|
98 |
|
|
|
99 |
MOVW 8(R13), R14 /* restore link */
|
|
|
100 |
MOVW 4(R13), R0 /* restore SPSR */
|
|
|
101 |
/*
|
|
|
102 |
* return from user-mode exception.
|
|
|
103 |
* expects new SPSR in R0. R13 must point to ureg->type.
|
|
|
104 |
*/
|
|
|
105 |
_rfue:
|
|
|
106 |
TEXT rfue(SB), 1, $-4
|
|
|
107 |
MOVW R0, SPSR /* ... */
|
|
|
108 |
|
|
|
109 |
/*
|
|
|
110 |
* order on stack is type, psr, pc, but RFEV7 needs pc, psr.
|
|
|
111 |
* step on type and previous word to hold temporary values.
|
|
|
112 |
* we could instead change the order in which psr & pc are pushed.
|
|
|
113 |
*/
|
|
|
114 |
MOVW 4(R13), R1 /* psr */
|
|
|
115 |
MOVW 8(R13), R2 /* pc */
|
|
|
116 |
MOVW R2, 4(R13) /* pc */
|
|
|
117 |
MOVW R1, 8(R13) /* psr */
|
|
|
118 |
|
|
|
119 |
MOVM.DB.S (R13), [R0-R14] /* restore user registers */
|
|
|
120 |
ADD $4, R13 /* pop type, sp -> pc */
|
|
|
121 |
RFEV7W(13)
|
|
|
122 |
|
|
|
123 |
|
|
|
124 |
TEXT _vund(SB), 1, $-4 /* undefined */
|
|
|
125 |
/* sp is m->sund */
|
|
|
126 |
MOVM.IA [R0-R4], (R13) /* free some working space */
|
|
|
127 |
MOVW $PsrMund, R0
|
|
|
128 |
B _vswitch
|
|
|
129 |
|
|
|
130 |
TEXT _vpabt(SB), 1, $-4 /* prefetch abort */
|
|
|
131 |
/* sp is m->sabt */
|
|
|
132 |
MOVM.IA [R0-R4], (R13) /* free some working space */
|
|
|
133 |
MOVW $PsrMabt, R0 /* r0 = type */
|
|
|
134 |
B _vswitch
|
|
|
135 |
|
|
|
136 |
TEXT _vdabt(SB), 1, $-4 /* data abort */
|
|
|
137 |
/* sp is m->sabt */
|
|
|
138 |
MOVM.IA [R0-R4], (R13) /* free some working space */
|
|
|
139 |
MOVW $(PsrMabt+1), R0 /* r0 = type */
|
|
|
140 |
B _vswitch
|
|
|
141 |
|
|
|
142 |
TEXT _virq(SB), 1, $-4 /* IRQ */
|
|
|
143 |
/* sp is m->sirq */
|
|
|
144 |
MOVM.IA [R0-R4], (R13) /* free some working space */
|
|
|
145 |
MOVW $PsrMirq, R0 /* r0 = type */
|
|
|
146 |
B _vswitch
|
|
|
147 |
|
|
|
148 |
/*
|
|
|
149 |
* come here with type in R0 and R13 pointing above saved [r0-r4].
|
|
|
150 |
* we'll switch to SVC mode and then call trap.
|
|
|
151 |
*/
|
|
|
152 |
_vswitch:
|
|
|
153 |
// TEXT _vswtch(SB), 1, $-4 /* make symbol visible to debuggers */
|
|
|
154 |
CLREX
|
|
|
155 |
BARRIERS
|
|
|
156 |
MOVW SPSR, R1 /* save SPSR for ureg */
|
|
|
157 |
/*
|
|
|
158 |
* R12 needs to be set before using PsrMbz, so BIGENDCHECK code has
|
|
|
159 |
* been moved below.
|
|
|
160 |
*/
|
|
|
161 |
MOVW R14, R2 /* save interrupted pc for ureg */
|
|
|
162 |
MOVW R13, R3 /* save pointer to where the original [R0-R4] are */
|
|
|
163 |
|
|
|
164 |
/*
|
|
|
165 |
* switch processor to svc mode. this switches the banked registers
|
|
|
166 |
* (r13 [sp] and r14 [link]) to those of svc mode (so we must be sure
|
|
|
167 |
* to never get here already in svc mode).
|
|
|
168 |
*/
|
|
|
169 |
CPSMODE(PsrMsvc) /* switch! */
|
|
|
170 |
CPSID
|
|
|
171 |
|
|
|
172 |
AND.S $0xf, R1, R4 /* interrupted code kernel or user? */
|
|
|
173 |
BEQ _userexcep
|
|
|
174 |
|
|
|
175 |
/*
|
|
|
176 |
* here for trap from SVC mode
|
|
|
177 |
*/
|
|
|
178 |
|
|
|
179 |
/* push ureg->{type, psr, pc} onto Msvc stack.
|
|
|
180 |
* r13 points to ureg->type after.
|
|
|
181 |
*/
|
|
|
182 |
MOVM.DB.W [R0-R2], (R13)
|
|
|
183 |
MOVM.IA (R3), [R0-R4] /* restore [R0-R4] from previous mode's stack */
|
|
|
184 |
|
|
|
185 |
/*
|
|
|
186 |
* avoid the ambiguity described in notes/movm.w.
|
|
|
187 |
* In order to get a predictable value in R13 after the stores,
|
|
|
188 |
* separate the store-multiple from the stack-pointer adjustment.
|
|
|
189 |
* We'll assume that the old value of R13 should be stored on the stack.
|
|
|
190 |
*/
|
|
|
191 |
/* save kernel level registers, at end r13 points to ureg */
|
|
|
192 |
MOVM.DB [R0-R14], (R13)
|
|
|
193 |
SUB $(NREGS*4), R13 /* SP now points to saved R0 */
|
|
|
194 |
|
|
|
195 |
MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
|
|
|
196 |
/* previous mode was svc, so the saved spsr should be sane. */
|
|
|
197 |
MOVW ((NREGS+1)*4)(R13), R1
|
|
|
198 |
|
|
|
199 |
MOVM.IA (R13), [R0-R8] /* restore a few user registers */
|
|
|
200 |
|
|
|
201 |
MOVW R13, R0 /* first arg is pointer to ureg */
|
|
|
202 |
SUB $(4*2), R13 /* space for argument+link (for debugger) */
|
|
|
203 |
MOVW $0xdeaddead, R11 /* marker */
|
|
|
204 |
|
|
|
205 |
BL trap(SB) /* trap(ureg) */
|
|
|
206 |
/*
|
|
|
207 |
* caller saves on plan 9, so registers other than 9, 10, 13 & 14
|
|
|
208 |
* may have been trashed when we get here.
|
|
|
209 |
*/
|
|
|
210 |
|
|
|
211 |
MOVW $setR12(SB), R12 /* reload kernel's SB */
|
|
|
212 |
|
|
|
213 |
ADD $(4*2+4*NREGS), R13 /* make r13 point to ureg->type */
|
|
|
214 |
|
|
|
215 |
/*
|
|
|
216 |
* if we interrupted a previous trap's handler and are now
|
|
|
217 |
* returning to it, we need to propagate the current R(MACH) (R10)
|
|
|
218 |
* by overriding the saved one on the stack, since we may have
|
|
|
219 |
* been rescheduled and be on a different processor now than
|
|
|
220 |
* at entry.
|
|
|
221 |
*/
|
|
|
222 |
MOVW R(MACH), (-(NREGS-MACH)*4)(R13) /* restore current cpu's MACH */
|
|
|
223 |
|
|
|
224 |
MOVW 8(R13), R14 /* restore link */
|
|
|
225 |
MOVW 4(R13), R0 /* restore SPSR */
|
|
|
226 |
|
|
|
227 |
/* return from kernel-mode exception */
|
|
|
228 |
MOVW R0, SPSR /* ... */
|
|
|
229 |
|
|
|
230 |
/*
|
|
|
231 |
* order on stack is type, psr, pc, but RFEV7 needs pc, psr.
|
|
|
232 |
* step on type and previous word to hold temporary values.
|
|
|
233 |
* we could instead change the order in which psr & pc are pushed.
|
|
|
234 |
*/
|
|
|
235 |
MOVW 4(R13), R1 /* psr */
|
|
|
236 |
MOVW 8(R13), R2 /* pc */
|
|
|
237 |
MOVW R2, 4(R13) /* pc */
|
|
|
238 |
MOVW R1, 8(R13) /* psr */
|
|
|
239 |
|
|
|
240 |
/* restore kernel regs other than SP; we're using it */
|
|
|
241 |
SUB $(NREGS*4), R13
|
|
|
242 |
MOVM.IA.W (R13), [R0-R12]
|
|
|
243 |
ADD $4, R13 /* skip saved kernel SP */
|
|
|
244 |
MOVM.IA.W (R13), [R14]
|
|
|
245 |
ADD $4, R13 /* pop type, sp -> pc */
|
|
|
246 |
BARRIERS
|
|
|
247 |
RFEV7W(13)
|
|
|
248 |
|
|
|
249 |
/*
|
|
|
250 |
* here for trap from USER mode
|
|
|
251 |
*/
|
|
|
252 |
_userexcep:
|
|
|
253 |
MOVM.DB.W [R0-R2], (R13) /* set ureg->{type, psr, pc}; r13 points to ureg->type */
|
|
|
254 |
MOVM.IA (R3), [R0-R4] /* restore [R0-R4] from previous mode's stack */
|
|
|
255 |
|
|
|
256 |
/* avoid the ambiguity described in notes/movm.w. */
|
|
|
257 |
MOVM.DB.S [R0-R14], (R13) /* save kernel level registers */
|
|
|
258 |
SUB $(NREGS*4), R13 /* r13 now points to ureg */
|
|
|
259 |
|
|
|
260 |
MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
|
|
|
261 |
|
|
|
262 |
/*
|
|
|
263 |
* set up m and up registers since user registers could contain anything
|
|
|
264 |
*/
|
|
|
265 |
CPUID(R1)
|
|
|
266 |
SLL $2, R1 /* convert to word index */
|
|
|
267 |
MOVW $machaddr(SB), R2
|
|
|
268 |
ADD R1, R2
|
|
|
269 |
MOVW (R2), R(MACH) /* m = machaddr[cpuid] */
|
|
|
270 |
CMP $0, R(MACH)
|
|
|
271 |
MOVW.EQ $MACHADDR, R0 /* paranoia: use MACHADDR if 0 */
|
|
|
272 |
MOVW 8(R(MACH)), R(USER) /* up = m->proc */
|
|
|
273 |
|
|
|
274 |
MOVW ((NREGS+1)*4)(R13), R2 /* saved SPSR */
|
|
|
275 |
|
|
|
276 |
MOVW R13, R0 /* first arg is pointer to ureg */
|
|
|
277 |
SUB $(4*2), R13 /* space for argument+link (for debugger) */
|
|
|
278 |
|
|
|
279 |
BL trap(SB) /* trap(ureg) */
|
|
|
280 |
/*
|
|
|
281 |
* caller saves on plan 9, so registers other than 9, 10, 13 & 14
|
|
|
282 |
* may have been trashed when we get here.
|
|
|
283 |
*/
|
|
|
284 |
|
|
|
285 |
ADD $(4*2+4*NREGS), R13 /* make r13 point to ureg->type */
|
|
|
286 |
|
|
|
287 |
MOVW 8(R13), R14 /* restore link */
|
|
|
288 |
MOVW 4(R13), R0 /* restore SPSR */
|
|
|
289 |
|
|
|
290 |
MOVW 4(R13), R0 /* restore SPSR */
|
|
|
291 |
B _rfue
|
|
|
292 |
|
|
|
293 |
|
|
|
294 |
TEXT _vfiq(SB), 1, $-4 /* FIQ */
|
|
|
295 |
PUTC('?')
|
|
|
296 |
PUTC('f')
|
|
|
297 |
PUTC('i')
|
|
|
298 |
PUTC('q')
|
|
|
299 |
RFE /* FIQ is special, ignore it for now */
|
|
|
300 |
|
|
|
301 |
TEXT _vhype(SB), 1, $-4
|
|
|
302 |
PUTC('?')
|
|
|
303 |
PUTC('h')
|
|
|
304 |
PUTC('y')
|
|
|
305 |
PUTC('p')
|
|
|
306 |
RFE
|
|
|
307 |
|
|
|
308 |
/*
|
|
|
309 |
* set the stack value for the mode passed in R0
|
|
|
310 |
*/
|
|
|
311 |
TEXT setr13(SB), 1, $-4
|
|
|
312 |
MOVW 4(FP), R1
|
|
|
313 |
|
|
|
314 |
MOVW CPSR, R2
|
|
|
315 |
BIC $(PsrMask|PsrMbz), R2, R3
|
|
|
316 |
ORR $(PsrDirq|PsrDfiq), R3
|
|
|
317 |
ORR R0, R3
|
|
|
318 |
|
|
|
319 |
MOVW R3, CPSR /* switch to new mode */
|
|
|
320 |
|
|
|
321 |
MOVW R13, R0 /* return old sp */
|
|
|
322 |
MOVW R1, R13 /* install new one */
|
|
|
323 |
|
|
|
324 |
MOVW R2, CPSR /* switch back to old mode */
|
|
|
325 |
RET
|