2 |
- |
1 |
/*
|
|
|
2 |
* arm exception handlers
|
|
|
3 |
*/
|
|
|
4 |
#include "arm.s"
|
|
|
5 |
|
|
|
6 |
#undef B /* B is for 'botch' */
|
|
|
7 |
|
|
|
8 |
/*
|
|
|
9 |
* exception vectors, copied by trapinit() to somewhere useful
|
|
|
10 |
*/
|
|
|
11 |
TEXT vectors(SB), 1, $-4
|
|
|
12 |
MOVW 0x18(R15), R15 /* reset */
|
|
|
13 |
MOVW 0x18(R15), R15 /* undefined instr. */
|
|
|
14 |
MOVW 0x18(R15), R15 /* SWI & SMC */
|
|
|
15 |
MOVW 0x18(R15), R15 /* prefetch abort */
|
|
|
16 |
MOVW 0x18(R15), R15 /* data abort */
|
|
|
17 |
MOVW 0x18(R15), R15 /* reserved */
|
|
|
18 |
MOVW 0x18(R15), R15 /* IRQ */
|
|
|
19 |
MOVW 0x18(R15), R15 /* FIQ */
|
|
|
20 |
|
|
|
21 |
TEXT vtable(SB), 1, $-4
|
|
|
22 |
WORD $_vsvc(SB) /* reset, in svc mode already */
|
|
|
23 |
WORD $_vund(SB) /* undefined, switch to svc mode */
|
|
|
24 |
WORD $_vsvc(SB) /* swi, in svc mode already */
|
|
|
25 |
WORD $_vpabt(SB) /* prefetch abort, switch to svc mode */
|
|
|
26 |
WORD $_vdabt(SB) /* data abort, switch to svc mode */
|
|
|
27 |
WORD $_vsvc(SB) /* reserved */
|
|
|
28 |
WORD $_virq(SB) /* IRQ, switch to svc mode */
|
|
|
29 |
// WORD $_vfiq(SB) /* FIQ, switch to svc mode */
|
|
|
30 |
WORD $_virq(SB) /* FIQ, switch to svc mode */
|
|
|
31 |
|
|
|
32 |
TEXT _vrst(SB), 1, $-4
|
|
|
33 |
BL _reset(SB)
|
|
|
34 |
|
|
|
35 |
TEXT _vsvc(SB), 1, $-4 /* SWI */
|
|
|
36 |
MOVW.W R14, -4(R13) /* ureg->pc = interrupted PC */
|
|
|
37 |
MOVW SPSR, R14 /* ureg->psr = SPSR */
|
|
|
38 |
MOVW.W R14, -4(R13) /* ... */
|
|
|
39 |
MOVW $PsrMsvc, R14 /* ureg->type = PsrMsvc */
|
|
|
40 |
MOVW.W R14, -4(R13) /* ... */
|
|
|
41 |
|
|
|
42 |
/* avoid the ambiguity described in notes/movm.w. */
|
|
|
43 |
// MOVM.DB.W.S [R0-R14], (R13) /* save user level registers, at end r13 points to ureg */
|
|
|
44 |
MOVM.DB.S [R0-R14], (R13) /* save user level registers */
|
|
|
45 |
SUB $(15*4), R13 /* r13 now points to ureg */
|
|
|
46 |
|
|
|
47 |
MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
|
|
|
48 |
|
|
|
49 |
// MOVW $(KSEG0+16*KiB-MACHSIZE), R10 /* m */
|
|
|
50 |
MOVW $(L1-MACHSIZE), R10 /* m */
|
|
|
51 |
MOVW 8(R10), R9 /* up */
|
|
|
52 |
|
|
|
53 |
MOVW R13, R0 /* first arg is pointer to ureg */
|
|
|
54 |
SUB $8, R13 /* space for argument+link */
|
|
|
55 |
|
|
|
56 |
BL syscall(SB)
|
|
|
57 |
|
|
|
58 |
ADD $(8+4*15), R13 /* make r13 point to ureg->type */
|
|
|
59 |
MOVW 8(R13), R14 /* restore link */
|
|
|
60 |
MOVW 4(R13), R0 /* restore SPSR */
|
|
|
61 |
MOVW R0, SPSR /* ... */
|
|
|
62 |
MOVM.DB.S (R13), [R0-R14] /* restore registers */
|
|
|
63 |
ADD $8, R13 /* pop past ureg->{type+psr} */
|
|
|
64 |
RFE /* MOVM.IA.S.W (R13), [R15] */
|
|
|
65 |
|
|
|
66 |
TEXT _vund(SB), 1, $-4 /* undefined */
|
|
|
67 |
MOVM.IA [R0-R4], (R13) /* free some working space */
|
|
|
68 |
MOVW $PsrMund, R0
|
|
|
69 |
B _vswitch
|
|
|
70 |
|
|
|
71 |
TEXT _vpabt(SB), 1, $-4 /* prefetch abort */
|
|
|
72 |
MOVM.IA [R0-R4], (R13) /* free some working space */
|
|
|
73 |
MOVW $PsrMabt, R0 /* r0 = type */
|
|
|
74 |
B _vswitch
|
|
|
75 |
|
|
|
76 |
TEXT _vdabt(SB), 1, $-4 /* data abort */
|
|
|
77 |
MOVM.IA [R0-R4], (R13) /* free some working space */
|
|
|
78 |
MOVW $(PsrMabt+1), R0 /* r0 = type */
|
|
|
79 |
B _vswitch
|
|
|
80 |
|
|
|
81 |
TEXT _virq(SB), 1, $-4 /* IRQ */
|
|
|
82 |
MOVM.IA [R0-R4], (R13) /* free some working space */
|
|
|
83 |
MOVW $PsrMirq, R0 /* r0 = type */
|
|
|
84 |
B _vswitch
|
|
|
85 |
|
|
|
86 |
/*
|
|
|
87 |
* come here with type in R0 and R13 pointing above saved [r0-r4].
|
|
|
88 |
* we'll switch to SVC mode and then call trap.
|
|
|
89 |
*/
|
|
|
90 |
_vswitch:
|
|
|
91 |
MOVW SPSR, R1 /* save SPSR for ureg */
|
|
|
92 |
MOVW R14, R2 /* save interrupted pc for ureg */
|
|
|
93 |
MOVW R13, R3 /* save pointer to where the original [R0-R4] are */
|
|
|
94 |
|
|
|
95 |
/*
|
|
|
96 |
* switch processor to svc mode. this switches the banked registers
|
|
|
97 |
* (r13 [sp] and r14 [link]) to those of svc mode.
|
|
|
98 |
*/
|
|
|
99 |
MOVW CPSR, R14
|
|
|
100 |
BIC $PsrMask, R14
|
|
|
101 |
ORR $(PsrDirq|PsrDfiq|PsrMsvc), R14
|
|
|
102 |
MOVW R14, CPSR /* switch! */
|
|
|
103 |
|
|
|
104 |
AND.S $0xf, R1, R4 /* interrupted code kernel or user? */
|
|
|
105 |
BEQ _userexcep
|
|
|
106 |
|
|
|
107 |
/* here for trap from SVC mode */
|
|
|
108 |
MOVM.DB.W [R0-R2], (R13) /* set ureg->{type, psr, pc}; r13 points to ureg->type */
|
|
|
109 |
MOVM.IA (R3), [R0-R4] /* restore [R0-R4] from previous mode's stack */
|
|
|
110 |
|
|
|
111 |
/*
|
|
|
112 |
* avoid the ambiguity described in notes/movm.w.
|
|
|
113 |
* In order to get a predictable value in R13 after the stores,
|
|
|
114 |
* separate the store-multiple from the stack-pointer adjustment.
|
|
|
115 |
* We'll assume that the old value of R13 should be stored on the stack.
|
|
|
116 |
*/
|
|
|
117 |
/* save kernel level registers, at end r13 points to ureg */
|
|
|
118 |
// MOVM.DB.W [R0-R14], (R13)
|
|
|
119 |
MOVM.DB [R0-R14], (R13)
|
|
|
120 |
SUB $(15*4), R13 /* SP now points to saved R0 */
|
|
|
121 |
|
|
|
122 |
MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
|
|
|
123 |
|
|
|
124 |
MOVW R13, R0 /* first arg is pointer to ureg */
|
|
|
125 |
SUB $(4*2), R13 /* space for argument+link (for debugger) */
|
|
|
126 |
MOVW $0xdeaddead, R11 /* marker */
|
|
|
127 |
|
|
|
128 |
BL trap(SB)
|
|
|
129 |
|
|
|
130 |
ADD $(4*2+4*15), R13 /* make r13 point to ureg->type */
|
|
|
131 |
MOVW 8(R13), R14 /* restore link */
|
|
|
132 |
MOVW 4(R13), R0 /* restore SPSR */
|
|
|
133 |
MOVW R0, SPSR /* ... */
|
|
|
134 |
|
|
|
135 |
MOVM.DB (R13), [R0-R14] /* restore registers */
|
|
|
136 |
|
|
|
137 |
ADD $(4*2), R13 /* pop past ureg->{type+psr} to pc */
|
|
|
138 |
RFE /* MOVM.IA.S.W (R13), [R15] */
|
|
|
139 |
|
|
|
140 |
/* here for trap from USER mode */
|
|
|
141 |
_userexcep:
|
|
|
142 |
MOVM.DB.W [R0-R2], (R13) /* set ureg->{type, psr, pc}; r13 points to ureg->type */
|
|
|
143 |
MOVM.IA (R3), [R0-R4] /* restore [R0-R4] from previous mode's stack */
|
|
|
144 |
|
|
|
145 |
/* avoid the ambiguity described in notes/movm.w. */
|
|
|
146 |
// MOVM.DB.W.S [R0-R14], (R13) /* save kernel level registers, at end r13 points to ureg */
|
|
|
147 |
MOVM.DB.S [R0-R14], (R13) /* save kernel level registers */
|
|
|
148 |
SUB $(15*4), R13 /* r13 now points to ureg */
|
|
|
149 |
|
|
|
150 |
MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
|
|
|
151 |
|
|
|
152 |
// MOVW $(KSEG0+16*KiB-MACHSIZE), R10 /* m */
|
|
|
153 |
MOVW $(L1-MACHSIZE), R10 /* m */
|
|
|
154 |
MOVW 8(R10), R9 /* up */
|
|
|
155 |
|
|
|
156 |
MOVW R13, R0 /* first arg is pointer to ureg */
|
|
|
157 |
SUB $(4*2), R13 /* space for argument+link (for debugger) */
|
|
|
158 |
|
|
|
159 |
BL trap(SB)
|
|
|
160 |
|
|
|
161 |
ADD $(4*2+4*15), R13 /* make r13 point to ureg->type */
|
|
|
162 |
MOVW 8(R13), R14 /* restore link */
|
|
|
163 |
MOVW 4(R13), R0 /* restore SPSR */
|
|
|
164 |
MOVW R0, SPSR /* ... */
|
|
|
165 |
MOVM.DB.S (R13), [R0-R14] /* restore registers */
|
|
|
166 |
ADD $(4*2), R13 /* pop past ureg->{type+psr} */
|
|
|
167 |
RFE /* MOVM.IA.S.W (R13), [R15] */
|
|
|
168 |
|
|
|
169 |
TEXT _vfiq(SB), 1, $-4 /* FIQ */
|
|
|
170 |
RFE /* FIQ is special, ignore it for now */
|
|
|
171 |
|
|
|
172 |
/*
|
|
|
173 |
* set the stack value for the mode passed in R0
|
|
|
174 |
*/
|
|
|
175 |
TEXT setr13(SB), 1, $-4
|
|
|
176 |
MOVW 4(FP), R1
|
|
|
177 |
|
|
|
178 |
MOVW CPSR, R2
|
|
|
179 |
BIC $PsrMask, R2, R3
|
|
|
180 |
ORR R0, R3
|
|
|
181 |
MOVW R3, CPSR /* switch to new mode */
|
|
|
182 |
|
|
|
183 |
MOVW R13, R0 /* return old sp */
|
|
|
184 |
MOVW R1, R13 /* install new one */
|
|
|
185 |
|
|
|
186 |
MOVW R2, CPSR /* switch back to old mode */
|
|
|
187 |
RET
|