Warning: Attempt to read property "date" on null in /usr/local/www/websvn.planix.org/blame.php on line 247

Warning: Attempt to read property "msg" on null in /usr/local/www/websvn.planix.org/blame.php on line 247
WebSVN – planix.SVN – Blame – /os/branches/feature_unix/sys/src/9/omap/cache.v7.s – Rev 2

Subversion Repositories planix.SVN

Rev

Go to most recent revision | Details | Last modification | View Log | RSS feed

Rev Author Line No. Line
2 - 1
/*
2
 * cortex arm arch v7 cache flushing and invalidation
3
 * shared by l.s and rebootcode.s
4
 */
5
 
6
TEXT cacheiinv(SB), $-4				/* I invalidate */
7
	MOVW	$0, R0
8
	MCR	CpSC, 0, R0, C(CpCACHE), C(CpCACHEinvi), CpCACHEall /* ok on cortex */
9
	ISB
10
	RET
11
 
12
/*
13
 * set/way operators, passed a suitable set/way value in R0.
14
 */
15
TEXT cachedwb_sw(SB), $-4
16
	MCR	CpSC, 0, R0, C(CpCACHE), C(CpCACHEwb), CpCACHEsi
17
	RET
18
 
19
TEXT cachedwbinv_sw(SB), $-4
20
	MCR	CpSC, 0, R0, C(CpCACHE), C(CpCACHEwbi), CpCACHEsi
21
	RET
22
 
23
TEXT cachedinv_sw(SB), $-4
24
	MCR	CpSC, 0, R0, C(CpCACHE), C(CpCACHEinvd), CpCACHEsi
25
	RET
26
 
27
	/* set cache size select */
28
TEXT setcachelvl(SB), $-4
29
	MCR	CpSC, CpIDcssel, R0, C(CpID), C(CpIDidct), 0
30
	ISB
31
	RET
32
 
33
	/* return cache sizes */
34
TEXT getwayssets(SB), $-4
35
	MRC	CpSC, CpIDcsize, R0, C(CpID), C(CpIDidct), 0
36
	RET
37
 
38
/*
39
 * l1 cache operations.
40
 * l1 and l2 ops are intended to be called from C, thus need save no
41
 * caller's regs, only those we need to preserve across calls.
42
 */
43
 
44
TEXT cachedwb(SB), $-4
45
	MOVW.W	R14, -8(R13)
46
	MOVW	$cachedwb_sw(SB), R0
47
	MOVW	$1, R8
48
	BL	wholecache(SB)
49
	MOVW.P	8(R13), R15
50
 
51
TEXT cachedwbinv(SB), $-4
52
	MOVW.W	R14, -8(R13)
53
	MOVW	$cachedwbinv_sw(SB), R0
54
	MOVW	$1, R8
55
	BL	wholecache(SB)
56
	MOVW.P	8(R13), R15
57
 
58
TEXT cachedinv(SB), $-4
59
	MOVW.W	R14, -8(R13)
60
	MOVW	$cachedinv_sw(SB), R0
61
	MOVW	$1, R8
62
	BL	wholecache(SB)
63
	MOVW.P	8(R13), R15
64
 
65
TEXT cacheuwbinv(SB), $-4
66
	MOVM.DB.W [R14], (R13)	/* save lr on stack */
67
	MOVW	CPSR, R1
68
	CPSID			/* splhi */
69
 
70
	MOVM.DB.W [R1], (R13)	/* save R1 on stack */
71
 
72
	BL	cachedwbinv(SB)
73
	BL	cacheiinv(SB)
74
 
75
	MOVM.IA.W (R13), [R1]	/* restore R1 (saved CPSR) */
76
	MOVW	R1, CPSR
77
	MOVM.IA.W (R13), [R14]	/* restore lr */
78
	RET
79
 
80
/*
81
 * l2 cache operations
82
 */
83
 
84
TEXT l2cacheuwb(SB), $-4
85
	MOVW.W	R14, -8(R13)
86
	MOVW	$cachedwb_sw(SB), R0
87
	MOVW	$2, R8
88
	BL	wholecache(SB)
89
	MOVW.P	8(R13), R15
90
 
91
TEXT l2cacheuwbinv(SB), $-4
92
	MOVW.W	R14, -8(R13)
93
	MOVW	CPSR, R1
94
	CPSID			/* splhi */
95
 
96
	MOVM.DB.W [R1], (R13)	/* save R1 on stack */
97
 
98
	MOVW	$cachedwbinv_sw(SB), R0
99
	MOVW	$2, R8
100
	BL	wholecache(SB)
101
	BL	l2cacheuinv(SB)
102
 
103
	MOVM.IA.W (R13), [R1]	/* restore R1 (saved CPSR) */
104
	MOVW	R1, CPSR
105
	MOVW.P	8(R13), R15
106
 
107
TEXT l2cacheuinv(SB), $-4
108
	MOVW.W	R14, -8(R13)
109
	MOVW	$cachedinv_sw(SB), R0
110
	MOVW	$2, R8
111
	BL	wholecache(SB)
112
	MOVW.P	8(R13), R15
113
 
114
/*
115
 * these shift values are for the Cortex-A8 L1 cache (A=2, L=6) and
116
 * the Cortex-A8 L2 cache (A=3, L=6).
117
 * A = log2(# of ways), L = log2(bytes per cache line).
118
 * see armv7 arch ref p. 1403.
119
 */
120
#define L1WAYSH 30
121
#define L1SETSH 6
122
#define L2WAYSH 29
123
#define L2SETSH 6
124
 
125
/*
126
 * callers are assumed to be the above l1 and l2 ops.
127
 * R0 is the function to call in the innermost loop.
128
 * R8 is the cache level (one-origin: 1 or 2).
129
 *
130
 * initial translation by 5c, then massaged by hand.
131
 */
132
TEXT wholecache+0(SB), $-4
133
	MOVW	R0, R1		/* save argument for inner loop in R1 */
134
	SUB	$1, R8		/* convert cache level to zero origin */
135
 
136
	/* we may not have the MMU on yet, so map R1 to PC's space */
137
	BIC	$KSEGM,	R1	/* strip segment from address */
138
	MOVW	PC, R2		/* get PC's segment ... */
139
	AND	$KSEGM, R2
140
	CMP	$0, R2		/* PC segment should be non-zero on omap */
141
	BEQ	buggery
142
	ORR	R2, R1		/* combine them */
143
 
144
	/* drain write buffers */
145
	BARRIERS
146
	MCR	CpSC, 0, R0, C(CpCACHE), C(CpCACHEwb), CpCACHEwait
147
	ISB
148
 
149
	MOVW	CPSR, R2
150
	MOVM.DB.W [R2,R14], (SP) /* save regs on stack */
151
	CPSID			/* splhi to make entire op atomic */
152
 
153
	/* get cache sizes */
154
	SLL	$1, R8, R0	/* R0 = (cache - 1) << 1 */
155
	MCR	CpSC, CpIDcssel, R0, C(CpID), C(CpIDidct), 0 /* set cache size select */
156
	ISB
157
	MRC	CpSC, CpIDcsize, R0, C(CpID), C(CpIDidct), 0 /* get cache sizes */
158
 
159
	/* compute # of ways and sets for this cache level */
160
	SRA	$3, R0, R5	/* R5 (ways) = R0 >> 3 */
161
	AND	$1023, R5	/* R5 = (R0 >> 3) & MASK(10) */
162
	ADD	$1, R5		/* R5 (ways) = ((R0 >> 3) & MASK(10)) + 1 */
163
 
164
	SRA	$13, R0, R2	/* R2 = R0 >> 13 */
165
	AND	$32767, R2	/* R2 = (R0 >> 13) & MASK(15) */
166
	ADD	$1, R2		/* R2 (sets) = ((R0 >> 13) & MASK(15)) + 1 */
167
 
168
	/* precompute set/way shifts for inner loop */
169
	CMP	$0, R8		/* cache == 1? */
170
	MOVW.EQ	$L1WAYSH, R3 	/* yes */
171
	MOVW.EQ	$L1SETSH, R4
172
	MOVW.NE	$L2WAYSH, R3	/* no */
173
	MOVW.NE	$L2SETSH, R4
174
 
175
	/* iterate over ways */
176
	MOVW	$0, R7		/* R7: way */
177
outer:
178
	/* iterate over sets */
179
	MOVW	$0, R6		/* R6: set */
180
inner:
181
	/* compute set/way register contents */
182
	SLL	R3, R7, R0 	/* R0 = way << R3 (L?WAYSH) */
183
	ORR	R8<<1, R0	/* R0 = way << L?WAYSH | (cache - 1) << 1 */
184
	ORR	R6<<R4, R0 	/* R0 = way<<L?WAYSH | (cache-1)<<1 |set<<R4 */
185
 
186
	BL	(R1)		/* call set/way operation with R0 */
187
 
188
	ADD	$1, R6		/* set++ */
189
	CMP	R2, R6		/* set >= sets? */
190
	BLT	inner		/* no, do next set */
191
 
192
	ADD	$1, R7		/* way++ */
193
	CMP	R5, R7		/* way >= ways? */
194
	BLT	outer		/* no, do next way */
195
 
196
	MOVM.IA.W (SP), [R2,R14] /* restore regs */
197
	MOVW	R2, CPSR	/* splx */
198
 
199
	/* drain write buffers */
200
	MCR	CpSC, 0, R0, C(CpCACHE), C(CpCACHEwb), CpCACHEwait
201
	ISB
202
	RET
203
 
204
buggery:
205
PUTC('?')
206
	MOVW	PC, R0
207
//	B	pczeroseg(SB)
208
	RET