Print this page
cpuid for ARMv7
Split |
Close |
Expand all |
Collapse all |
--- old/usr/src/uts/armv7/ml/cache.s
+++ new/usr/src/uts/armv7/ml/cache.s
1 1 /*
2 2 * This file and its contents are supplied under the terms of the
3 3 * Common Development and Distribution License ("CDDL"), version 1.0.
4 4 * You may only use this file in accordance with the terms of version
5 5 * 1.0 of the CDDL.
6 6 *
7 7 * A full copy of the text of the CDDL should have accompanied this
8 8 * source. A copy of the CDDL is also available via the Internet at
9 9 * http://www.illumos.org/license/CDDL.
10 10 */
11 11
12 12 /*
13 13 * Copyright 2013 Joyent, Inc. All rights reserved.
14 14 */
15 15
16 16 .file "cache.s"
17 17
18 18 /* XXXARM: rework cache/tlb maintenance functions to handle ARMv7 */
19 19
20 20 /*
21 21 * Cache and memory barrier operations
22 22 */
23 23
24 24 #include <sys/asm_linkage.h>
25 25
26 26 #if defined(lint) || defined(__lint)
27 27
28 28 void
29 29 membar_sync(void)
30 30 {}
31 31
32 32 void
33 33 membar_enter(void)
34 34 {}
35 35
36 36 void
37 37 membar_exit(void)
38 38 {}
39 39
40 40 void
41 41 membar_producer(void)
42 42 {}
43 43
44 44 void
45 45 membar_consumer(void)
46 46 {}
47 47
48 48 void
49 49 instr_sbarrier(void)
50 50 {}
51 51
52 52 void
53 53 data_sbarrier(void)
54 54 {}
55 55
56 56 #else /* __lint */
57 57
58 58 /*
59 59 * NOTE: membar_enter, membar_exit, membar_producer, and
60 60 * membar_consumer are identical routines. We define them
61 61 * separately, instead of using ALTENTRY definitions to alias
62 62 * them together, so that DTrace and debuggers will see a unique
63 63 * address for them, allowing more accurate tracing.
64 64 */
65 65 ENTRY(membar_enter)
66 66 ALTENTRY(membar_sync)
67 67 dmb
68 68 bx lr
69 69 SET_SIZE(membar_sync)
70 70 SET_SIZE(membar_enter)
71 71
72 72 ENTRY(membar_exit)
73 73 dmb
74 74 bx lr
75 75 SET_SIZE(membar_exit)
76 76
77 77 ENTRY(membar_producer)
78 78 dmb
79 79 bx lr
80 80 SET_SIZE(membar_producer)
81 81
82 82 ENTRY(membar_consumer)
83 83 dmb
84 84 bx lr
85 85 SET_SIZE(membar_consumer)
86 86
87 87 ENTRY(instr_sbarrier)
88 88 isb
89 89 bx lr
90 90 SET_SIZE(membar_consumer)
↓ open down ↓ |
90 lines elided |
↑ open up ↑ |
91 91
92 92 ENTRY(data_sbarrier)
93 93 isb
94 94 bx lr
95 95 SET_SIZE(data_sbarrier)
96 96
97 97 #endif /* __lint */
98 98
99 99 #if defined(lint) || defined(__lint)
100 100
101 -/* The ARM architecture uses a modified Harvard Architecture which means that we
101 +/*
102 + * The ARM architecture uses a modified Harvard Architecture which means that we
102 103 * get the joys of fixing up this mess. Primarily this means that when we update
103 104 * data, it gets written to do the data cache. That needs to be flushed to main
104 105 * memory and then the instruction cache needs to be invalidated. This is
105 106 * particularly important for things like krtld and DTrace. While the data cache
106 107 * does write itself out over time, we cannot rely on it having written itself
107 108 * out to the state that we care about by the time that we'd like it to. As
108 109 * such, we need to ensure that it's been flushed out ourselves. This also means
109 110 * that we could accidentally flush a region of the icache that's already
110 111 * updated itself, but that's just what we have to do to keep Von Neumann's
111 112 * spirt and great gift alive.
112 113 *
113 114 * The controllers for the caches have a few different options for invalidation.
114 115 * One may:
115 116 *
116 117 * o Invalidate or flush the entire cache
117 118 * o Invalidate or flush a cache line
118 119 * o Invalidate or flush a cache range
119 120 *
120 121 * We opt to take the third option here for the general case of making sure that
121 122 * text has been synchronized. While the data cache allows us to both invalidate
122 123 * and flush the cache line, we don't currently have a need to do the
123 124 * invalidation.
124 125 *
125 126 * Note that all of these operations should be aligned on an 8-byte boundary.
126 127 * The instructions actually only end up using bits [31:5] of an address.
127 128 * Callers are required to ensure that this is the case.
128 129 */
129 130
130 131 void
131 132 armv7_icache_disable(void)
132 133 {}
133 134
134 135 void
135 136 armv7_icache_enable(void)
136 137 {}
137 138
138 139 void
139 140 armv7_dcache_disable(void)
140 141 {}
141 142
142 143 void
143 144 armv7_dcache_enable(void)
144 145 {}
145 146
146 147 void
147 148 armv7_icache_inval(void)
148 149 {}
149 150
150 151 void
151 152 armv7_dcache_inval(void)
152 153 {}
153 154
154 155 void
155 156 armv7_dcache_flush(void)
156 157 {}
157 158
158 159 void
159 160 armv7_text_flush_range(caddr_t start, size_t len)
160 161 {}
161 162
162 163 void
163 164 armv7_text_flush(void)
164 165 {}
165 166
166 167 #else /* __lint */
167 168
168 169 ENTRY(armv7_icache_enable)
169 170 mrc p15, 0, r0, c1, c0, 0
170 171 orr r0, #0x1000
171 172 mcr p15, 0, r0, c1, c0, 0
172 173 SET_SIZE(armv7_icache_enable)
173 174
174 175 ENTRY(armv7_dcache_enable)
175 176 mrc p15, 0, r0, c1, c0, 0
176 177 orr r0, #0x4
177 178 mcr p15, 0, r0, c1, c0, 0
178 179 SET_SIZE(armv7_dcache_enable)
179 180
180 181 ENTRY(armv7_icache_disable)
181 182 mrc p15, 0, r0, c1, c0, 0
182 183 bic r0, #0x1000
183 184 mcr p15, 0, r0, c1, c0, 0
184 185 SET_SIZE(armv7_icache_disable)
185 186
186 187 ENTRY(armv7_dcache_disable)
187 188 mrc p15, 0, r0, c1, c0, 0
188 189 bic r0, #0x4
189 190 mcr p15, 0, r0, c1, c0, 0
190 191 SET_SIZE(armv7_dcache_disable)
191 192
192 193 ENTRY(armv7_icache_inval)
193 194 mov r0, #0
194 195 mcr p15, 0, r0, c7, c5, 0 @ Invalidate i-cache
195 196 bx lr
196 197 SET_SIZE(armv7_icache_inval)
197 198
198 199 ENTRY(armv7_dcache_inval)
199 200 mov r0, #0
200 201 mcr p15, 0, r0, c7, c6, 0 @ Invalidate d-cache
201 202 dsb
202 203 bx lr
203 204 SET_SIZE(armv7_dcache_inval)
204 205
205 206 ENTRY(armv7_dcache_flush)
206 207 mov r0, #0
207 208 mcr p15, 0, r0, c7, c10, 4 @ Flush d-cache
208 209 dsb
209 210 bx lr
210 211 SET_SIZE(armv7_dcache_flush)
211 212
212 213 ENTRY(armv7_text_flush_range)
213 214 add r1, r1, r0
214 215 sub r1, r1, r0
215 216 mcrr p15, 0, r1, r0, c5 @ Invalidate i-cache range
216 217 mcrr p15, 0, r1, r0, c12 @ Flush d-cache range
217 218 dsb
218 219 isb
219 220 bx lr
220 221 SET_SIZE(armv7_text_flush_range)
221 222
222 223 ENTRY(armv7_text_flush)
223 224 mov r0, #0
224 225 mcr p15, 0, r0, c7, c5, 0 @ Invalidate i-cache
225 226 mcr p15, 0, r0, c7, c10, 4 @ Flush d-cache
226 227 dsb
227 228 isb
228 229 bx lr
229 230 SET_SIZE(armv7_text_flush)
230 231
231 232 #endif
232 233
233 234 #ifdef __lint
234 235
235 236 /*
236 237 * Perform all of the operations necessary for tlb maintenance after an update
237 238 * to the page tables.
238 239 */
239 240 void
240 241 armv7_tlb_sync(void)
241 242 {}
242 243
243 244 #else /* __lint */
244 245
245 246 ENTRY(armv7_tlb_sync)
246 247 mov r0, #0
247 248 mcr p15, 0, r0, c7, c10, 4 @ Flush d-cache
248 249 dsb
249 250 mcr p15, 0, r0, c8, c7, 0 @ invalidate tlb
250 251 mcr p15, 0, r0, c8, c5, 0 @ Invalidate I-cache + btc
251 252 dsb
252 253 isb
253 254 bx lr
254 255 SET_SIZE(armv7_tlb_sync)
255 256
256 257 #endif /* __lint */
↓ open down ↓ |
145 lines elided |
↑ open up ↑ |
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX