|
1 // Copyright (c) 1998-2009 Nokia Corporation and/or its subsidiary(-ies). |
|
2 // All rights reserved. |
|
3 // This component and the accompanying materials are made available |
|
4 // under the terms of the License "Eclipse Public License v1.0" |
|
5 // which accompanies this distribution, and is available |
|
6 // at the URL "http://www.eclipse.org/legal/epl-v10.html". |
|
7 // |
|
8 // Initial Contributors: |
|
9 // Nokia Corporation - initial contribution. |
|
10 // |
|
11 // Contributors: |
|
12 // |
|
13 // Description: |
|
14 // e32\include\nkern\nk_cpu.h |
|
15 // |
|
16 // WARNING: This file contains some APIs which are internal and are subject |
|
17 // to change without notice. Such APIs should therefore not be used |
|
18 // outside the Kernel and Hardware Services package. |
|
19 // |
|
20 |
|
21 /** |
|
22 @file |
|
23 @publishedPartner |
|
24 @released |
|
25 */ |
|
26 |
|
27 #ifndef __NK_CPU_H__ |
|
28 #define __NK_CPU_H__ |
|
29 |
|
30 #include <cpudefs.h> |
|
31 |
|
32 #ifdef __CPU_ARM |
|
33 #if defined(__CPU_GENERIC_ARM4__) |
|
34 // no cache no MMU |
|
35 #define __CPU_ARM_ABORT_MODEL_RESTORED |
|
36 #endif |
|
37 |
|
38 #if defined(__CPU_ARM710T__) || defined(__CPU_ARM720T__) |
|
39 #define __CPU_HAS_MMU |
|
40 #define __CPU_HAS_CACHE |
|
41 #define __CPU_ARM_ABORT_MODEL_UPDATED |
|
42 #define __CPU_WRITE_BUFFER |
|
43 #endif |
|
44 |
|
45 #ifdef __CPU_SA1__ |
|
46 #define __CPU_HAS_MMU |
|
47 #define __CPU_HAS_CACHE |
|
48 #define __CPU_ARM_ABORT_MODEL_RESTORED |
|
49 #define __CPU_SPLIT_CACHE |
|
50 #define __CPU_SPLIT_TLB |
|
51 #define __CPU_WRITE_BUFFER |
|
52 #define __CPU_HAS_ALT_D_CACHE |
|
53 #define __CPU_WRITE_BACK_CACHE |
|
54 #define __CPU_CACHE_FLUSH_BY_DATA_READ |
|
55 #define __CPU_HAS_SINGLE_ENTRY_DCACHE_FLUSH |
|
56 #endif |
|
57 |
|
58 #if defined(__CPU_ARM920T__) || defined(__CPU_ARM925T__) || defined(__CPU_ARM926J__) |
|
59 #define __CPU_HAS_MMU |
|
60 #define __CPU_HAS_CACHE |
|
61 #define __CPU_ARM_ABORT_MODEL_RESTORED |
|
62 #define __CPU_SPLIT_CACHE |
|
63 #define __CPU_SPLIT_TLB |
|
64 #define __CPU_WRITE_BUFFER |
|
65 #define __CPU_WRITE_BACK_CACHE |
|
66 #define __CPU_CACHE_FLUSH_BY_WAY_SET_INDEX |
|
67 #define __CPU_CACHE_POLICY_IN_PTE |
|
68 #define __CPU_HAS_CACHE_TYPE_REGISTER |
|
69 #define __CPU_HAS_SINGLE_ENTRY_ITLB_FLUSH |
|
70 #define __CPU_HAS_SINGLE_ENTRY_ICACHE_FLUSH |
|
71 #define __CPU_HAS_SINGLE_ENTRY_DCACHE_FLUSH |
|
72 #endif |
|
73 |
|
74 #ifdef __CPU_XSCALE__ |
|
75 #define __CPU_HAS_MMU |
|
76 #define __CPU_HAS_CACHE |
|
77 #define __CPU_ARM_ABORT_MODEL_RESTORED |
|
78 #define __CPU_SPLIT_CACHE |
|
79 #define __CPU_SPLIT_TLB |
|
80 #define __CPU_WRITE_BUFFER |
|
81 #ifndef __CPU_XSCALE_MANZANO__ |
|
82 #define __CPU_HAS_ALT_D_CACHE |
|
83 #endif |
|
84 #define __CPU_WRITE_BACK_CACHE |
|
85 #define __CPU_CACHE_WRITE_ALLOCATE |
|
86 #ifdef __CPU_XSCALE_MANZANO__ |
|
87 #define __CPU_CACHE_FLUSH_BY_WAY_SET_INDEX |
|
88 #else |
|
89 #define __CPU_CACHE_FLUSH_BY_LINE_ALLOC |
|
90 #endif |
|
91 #define __CPU_CACHE_POLICY_IN_PTE |
|
92 #define __CPU_HAS_CACHE_TYPE_REGISTER |
|
93 #define __CPU_HAS_SINGLE_ENTRY_ITLB_FLUSH |
|
94 #define __CPU_HAS_SINGLE_ENTRY_ICACHE_FLUSH |
|
95 #define __CPU_HAS_SINGLE_ENTRY_DCACHE_FLUSH |
|
96 #define __CPU_HAS_BTB |
|
97 #define __CPU_USE_MMU_TEX_FIELD |
|
98 #define __CPU_HAS_COPROCESSOR_ACCESS_REG |
|
99 #define __CPU_HAS_ACTLR |
|
100 #endif |
|
101 |
|
102 #if defined(__CPU_ARM1136__) || defined(__CPU_ARM11MP__) || defined(__CPU_ARM1176__) || defined(__CPU_CORTEX_A8__) || defined(__CPU_CORTEX_A9__) |
|
103 #define __CPU_HAS_MMU |
|
104 #define __CPU_HAS_CACHE |
|
105 #define __CPU_CACHE_PHYSICAL_TAG |
|
106 #define __CPU_SUPPORTS_FAST_PROCESS_SWITCH |
|
107 #define __CPU_ARM_ABORT_MODEL_RESTORED |
|
108 #define __CPU_SPLIT_CACHE |
|
109 |
|
110 #if defined(__CPU_CORTEX_A9__) || defined(__CPU_CORTEX_A8__) || defined(__CPU_ARM1136__) |
|
111 #define __CPU_SPLIT_TLB |
|
112 #endif |
|
113 |
|
114 #if defined(__CPU_CORTEX_A8__) |
|
115 /* Internal cache controller maintains both inner & outer caches. |
|
116 * @internalComponent |
|
117 */ |
|
118 #define __CPU_OUTER_CACHE_IS_INTERNAL_CACHE |
|
119 #endif |
|
120 |
|
121 |
|
122 |
|
123 #if defined(__CPU_CORTEX_A9__) || defined(__CPU_ARM11MP__) |
|
124 #define __CPU_SUPPORTS_TLBIMVAA |
|
125 #endif |
|
126 |
|
127 #if defined(__CPU_CORTEX_A9__) |
|
128 #ifdef __SMP__ |
|
129 // #define __CPU_SUPPORTS_PAGE_TABLE_WALK_TO_L1_CACHE |
|
130 #endif |
|
131 #endif |
|
132 |
|
133 #if (defined(__CPU_ARM1136__) && defined(__CPU_ARM1136_ERRATUM_399234_FIXED)) || (defined(__CPU_ARM11MP__) && defined (__SMP__) ) |
|
134 // Page tables on these platforms are either uncached or write through cached. |
|
135 #else |
|
136 // Page/directory tables are fully cached (write-back) on these platforms. |
|
137 #define __CPU_PAGE_TABLES_FULLY_CACHED |
|
138 #endif |
|
139 |
|
140 #define __CPU_WRITE_BUFFER |
|
141 #define __CPU_WRITE_BACK_CACHE |
|
142 #define __CPU_CACHE_WRITE_ALLOCATE |
|
143 #define __CPU_CACHE_FLUSH_BY_WAY_SET_INDEX |
|
144 #define __CPU_CACHE_POLICY_IN_PTE |
|
145 #define __CPU_HAS_CACHE_TYPE_REGISTER |
|
146 #define __CPU_HAS_SINGLE_ENTRY_ITLB_FLUSH |
|
147 #define __CPU_HAS_SINGLE_ENTRY_ICACHE_FLUSH |
|
148 #define __CPU_HAS_SINGLE_ENTRY_DCACHE_FLUSH |
|
149 #define __CPU_HAS_BTB |
|
150 #define __CPU_HAS_COPROCESSOR_ACCESS_REG |
|
151 #define __CPU_HAS_PREFETCH_BUFFER |
|
152 #define __CPU_HAS_ACTLR |
|
153 #define __CPU_HAS_TTBR1 |
|
154 |
|
155 #if !defined(__CPU_ARM1136__) |
|
156 #define __CPU_MEMORY_TYPE_REMAPPING |
|
157 #endif |
|
158 |
|
159 #if defined(__CPU_ARM11MP__) && defined(__SMP__) |
|
160 #define __BROADCAST_CACHE_MAINTENANCE__ |
|
161 #endif |
|
162 |
|
163 #if defined(__CPU_ARM11MP__) || defined(__CPU_ARM1176__) |
|
164 #define __CPU_NEEDS_BTAC_FLUSH_AFTER_ASID_CHANGE |
|
165 #endif |
|
166 |
|
167 #define __CPU_CACHE_HAS_COLOUR |
|
168 #define __CPU_I_CACHE_HAS_COLOUR |
|
169 |
|
170 #if defined(__CPU_ARM1136__) || defined(__CPU_ARM1176__) |
|
171 #define __CPU_D_CACHE_HAS_COLOUR |
|
172 #elif defined(__CPU_ARM11MP__) |
|
173 // MPCore has physically indexed D cache, so no colour problems |
|
174 #else |
|
175 // Assume other ARM cores have virtually indexed D cache with broken alias avoidence hardware... |
|
176 #define __CPU_D_CACHE_HAS_COLOUR |
|
177 #endif |
|
178 |
|
179 |
|
180 #endif |
|
181 |
|
182 |
|
183 #ifdef __FIQ_RESERVED_FOR_SECURE_STATE__ |
|
184 #define __FIQ_IS_UNCONTROLLED__ |
|
185 #endif |
|
186 |
|
187 #if defined(__CPU_MEMORY_TYPE_REMAPPING) || defined(__MEMMODEL_FLEXIBLE__) |
|
188 #define __MMU_USE_SYMMETRIC_ACCESS_PERMISSIONS |
|
189 #endif |
|
190 |
|
191 |
|
192 #if defined(__CPU_ARM1136__) && !defined(__CPU_ARM1136_ERRATUM_353494_FIXED) && defined(__MMU_USE_SYMMETRIC_ACCESS_PERMISSIONS) |
|
193 #define ERRATUM_353494_MODE_CHANGE(cc,r) FLUSH_BTB(cc,r) |
|
194 #else |
|
195 #define ERRATUM_353494_MODE_CHANGE(cc,r) |
|
196 #endif |
|
197 |
|
198 #ifdef __CPU_HAS_MMU |
|
199 #define __CPU_ARM_USE_DOMAINS |
|
200 #endif |
|
201 |
|
202 #if defined(__ARM_L210_CACHE__) || defined(__ARM_L220_CACHE__)|| defined(__ARM_PL310_CACHE__) |
|
203 /** |
|
204 Indicates the presense of external cache controller. |
|
205 @internalTechnology |
|
206 */ |
|
207 #define __HAS_EXTERNAL_CACHE__ |
|
208 #endif |
|
209 |
|
210 #ifndef __CPU_HAS_MMU |
|
211 #define CPWAIT(cc,r) /**< @internalTechnology */ |
|
212 #endif |
|
213 |
|
214 #include <arm_vfp.h> |
|
215 |
|
216 // CP15 definitions |
|
217 #if defined(__CPU_ARM710T__) || defined(__CPU_ARM720T__) |
|
218 #define FLUSH_DCACHE(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c7, 0 "); /**< @internalTechnology */ |
|
219 #define FLUSH_ICACHE(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c7, 0 "); /**< @internalTechnology */ |
|
220 #define FLUSH_IDCACHE(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c7, 0 "); /**< @internalTechnology */ |
|
221 #define FLUSH_DTLB(cc,r) asm("mcr"#cc" p15, 0, "#r", c8, c7, 0 "); /**< @internalTechnology */ |
|
222 #define FLUSH_ITLB(cc,r) asm("mcr"#cc" p15, 0, "#r", c8, c7, 0 "); /**< @internalTechnology */ |
|
223 #define FLUSH_IDTLB(cc,r) asm("mcr"#cc" p15, 0, "#r", c8, c7, 0 "); /**< @internalTechnology */ |
|
224 #define FLUSH_DTLB_ENTRY(cc,addr) asm("mcr"#cc" p15, 0, "#addr", c8, c7, 1 "); /**< @internalTechnology */ |
|
225 #define FLUSH_ITLB_ENTRY(cc,addr) asm("mcr"#cc" p15, 0, "#addr", c8, c7, 1 "); /**< @internalTechnology */ |
|
226 #define FLUSH_IDTLB_ENTRY(cc,addr) asm("mcr"#cc" p15, 0, "#addr", c8, c7, 1 "); /**< @internalTechnology */ |
|
227 #define DRAIN_WRITE_BUFFER(cc,r,rd) // this seems dodgy on Windermere and it works without it |
|
228 #define CPWAIT(cc,r) /**< @internalTechnology */ |
|
229 |
|
230 #elif defined(__CPU_SA1__) |
|
231 #define FLUSH_ICACHE(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c5, 0 "); /**< @internalTechnology */ |
|
232 #define PURGE_DCACHE_LINE(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c6, 1 "); /**< @internalTechnology */ |
|
233 #define CLEAN_DCACHE_LINE(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c10, 1 "); /**< @internalTechnology */ |
|
234 #define FLUSH_DCACHE_LINE(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c10, 1 "); asm("mcr"#cc" p15, 0, "#r", c7, c6, 1 ");/**< @internalTechnology */ |
|
235 #define FLUSH_DTLB(cc,r) asm("mcr"#cc" p15, 0, "#r", c8, c6, 0 "); /**< @internalTechnology */ |
|
236 #define FLUSH_ITLB(cc,r) asm("mcr"#cc" p15, 0, "#r", c8, c5, 0 "); /**< @internalTechnology */ |
|
237 #define FLUSH_IDTLB(cc,r) asm("mcr"#cc" p15, 0, "#r", c8, c7, 0 "); /**< @internalTechnology */ |
|
238 #define FLUSH_DTLB_ENTRY(cc,addr) asm("mcr"#cc" p15, 0, "#addr", c8, c6, 1 ");/**< @internalTechnology */ |
|
239 #define DRAIN_WRITE_BUFFER(cc,r,rd) asm("mcr"#cc" p15, 0, "#r", c7, c10, 4 "); |
|
240 #define CPWAIT(cc,r) /**< @internalTechnology */ |
|
241 |
|
242 #elif defined(__CPU_ARM920T__) || defined(__CPU_ARM925T__) || defined(__CPU_ARM926J__) |
|
243 #define FLUSH_ICACHE(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c5, 0 "); /**< @internalTechnology */ |
|
244 #define FLUSH_ICACHE_LINE(cc,r,tmp) asm("mcr"#cc" p15, 0, "#r", c7, c5, 1 "); /**< @internalTechnology */ |
|
245 #define PURGE_DCACHE_LINE(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c6, 1 "); /**< @internalTechnology */ |
|
246 #define CLEAN_DCACHE_LINE(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c10, 1 "); /**< @internalTechnology */ |
|
247 #define CLEAN_DCACHE_INDEX(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c10, 2 "); /**< @internalTechnology */ |
|
248 #define FLUSH_DCACHE_LINE(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c14, 1 "); /**< @internalTechnology */ |
|
249 #define FLUSH_DCACHE_INDEX(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c14, 2 "); /**< @internalTechnology */ |
|
250 #define FLUSH_DTLB(cc,r) asm("mcr"#cc" p15, 0, "#r", c8, c6, 0 "); /**< @internalTechnology */ |
|
251 #define FLUSH_ITLB(cc,r) asm("mcr"#cc" p15, 0, "#r", c8, c5, 0 "); /**< @internalTechnology */ |
|
252 #define FLUSH_IDTLB(cc,r) asm("mcr"#cc" p15, 0, "#r", c8, c7, 0 "); /**< @internalTechnology */ |
|
253 #define FLUSH_DTLB_ENTRY(cc,addr) asm("mcr"#cc" p15, 0, "#addr", c8, c6, 1 ");/**< @internalTechnology */ |
|
254 #define FLUSH_ITLB_ENTRY(cc,addr) asm("mcr"#cc" p15, 0, "#addr", c8, c5, 1 ");/**< @internalTechnology */ |
|
255 #define DRAIN_WRITE_BUFFER(cc,r,rd) asm("mcr"#cc" p15, 0, "#r", c7, c10, 4 "); |
|
256 #define CPWAIT(cc,r) /**< @internalTechnology */ |
|
257 #define CACHE_MAINTENANCE_PDE_PTE_UPDATED(r) DRAIN_WRITE_BUFFER(,r,r); |
|
258 |
|
259 #elif defined(__CPU_XSCALE__) |
|
260 //#define FLUSH_ICACHE(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c5, 0 "); |
|
261 #define FLUSH_ICACHE(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c5, 0; sub"#cc" pc, pc, #4 ");/**< @internalTechnology */ // A step hack |
|
262 #define FLUSH_ICACHE_LINE(cc,r,tmp) asm("mcr"#cc" p15, 0, "#r", c7, c5, 1 "); /**< @internalTechnology */ |
|
263 #ifdef __CPU_XSCALE_MANZANO__ |
|
264 #define PURGE_DCACHE_LINE(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c6, 1 "); /**< @internalTechnology */ |
|
265 #define CLEAN_DCACHE_LINE(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c10, 1 "); /**< @internalTechnology */ |
|
266 #define CLEAN_DCACHE_INDEX(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c10, 2 "); /**< @internalTechnology */ |
|
267 #define FLUSH_DCACHE_LINE(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c14, 1 "); /**< @internalTechnology */ |
|
268 #define FLUSH_DCACHE_INDEX(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c14, 2 "); /**< @internalTechnology */ |
|
269 #else |
|
270 #define PURGE_DCACHE_LINE(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c6, 1 "); asm("nop "); /**< @internalTechnology */ // PXA250 ERRATUM 96 |
|
271 #define CLEAN_DCACHE_LINE(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c10, 1 "); asm("nop ");/**< @internalTechnology */ // PXA250 ERRATUM 96 |
|
272 #define FLUSH_DCACHE_LINE(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c10, 1 "); asm("mcr"#cc" p15, 0, "#r", c7, c6, 1 "); asm("nop "); /**< @internalTechnology */ // PXA250 ERRATUM 96 |
|
273 #define ALLOC_DCACHE_LINE(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c2, 5 "); /**< @internalTechnology */ |
|
274 #endif |
|
275 #define FLUSH_DTLB(cc,r) asm("mcr"#cc" p15, 0, "#r", c8, c6, 0 "); /**< @internalTechnology */ |
|
276 #define FLUSH_ITLB(cc,r) asm("mcr"#cc" p15, 0, "#r", c8, c5, 0 "); /**< @internalTechnology */ |
|
277 #define FLUSH_IDTLB(cc,r) asm("mcr"#cc" p15, 0, "#r", c8, c7, 0 "); /**< @internalTechnology */ |
|
278 #define FLUSH_DTLB_ENTRY(cc,addr) asm("mcr"#cc" p15, 0, "#addr", c8, c6, 1 "); asm("nop "); asm ("nop "); /**< @internalTechnology */ // PXA250 ERRATUM 21 |
|
279 #define FLUSH_ITLB_ENTRY(cc,addr) asm("mcr"#cc" p15, 0, "#addr", c8, c5, 1 "); /**< @internalTechnology */ |
|
280 |
|
281 #ifdef __CPU_XSCALE_MANZANO__ |
|
282 #define DRAIN_WRITE_BUFFER(cc,r,rd) asm("mcr"#cc" p15, 0, "#r", c7, c10, 4 "); |
|
283 #else //__CPU_XSCALE_MANZANO__ |
|
284 // PXA250 ERRATUM 14 |
|
285 #define DRAIN_WRITE_BUFFER(cc,r,rd) asm("mcr"#cc" p15, 0, "#r", c7, c10, 4 "); \ |
|
286 asm("ldr"#cc" "#rd", [pc] "); \ |
|
287 asm("add pc, pc, #0 "); \ |
|
288 asm(".word %a0" : : "i" ((TInt)&SuperPageAddress)); \ |
|
289 asm("ldr"#cc" "#rd", ["#rd"] "); \ |
|
290 asm("ldr"#cc" "#rd", ["#rd", #%a0]" : : "i" _FOFF(TSuperPage,iUncachedAddress)); \ |
|
291 asm("ldr"#cc" "#rd", ["#rd"] "); |
|
292 #endif //else __CPU_XSCALE_MANZANO__ |
|
293 //#define FLUSH_BTB(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c5, 6 "); |
|
294 #define FLUSH_BTB(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c5, 6; sub"#cc" pc, pc, #4 "); /**< @internalTechnology */ // A step hack |
|
295 #define CPWAIT(cc,r) asm("mrc"#cc" p15, 0, "#r", c2, c0, 0; mov"#cc" "#r","#r"; sub"#cc" pc, pc, #4 "); /**< @internalTechnology */ |
|
296 #define GET_CAR(cc,r) asm("mrc"#cc" p15, 0, "#r", c15, c1, 0 "); /**< @internalTechnology */ |
|
297 #define SET_CAR(cc,r) asm("mcr"#cc" p15, 0, "#r", c15, c1, 0 "); /**< @internalTechnology */ |
|
298 |
|
299 #elif defined(__CPU_ARMV6) // end of elif __CPU_XSCALE |
|
300 |
|
301 #if !defined(__CPU_ARM1136_ERRATUM_411920_FIXED) && (defined(__CPU_ARM1136__) || defined(__CPU_ARM1176__)) |
|
302 /** @internalTechnology */ |
|
303 #define FLUSH_ICACHE(cc,r,rt) asm("mrs "#rt", cpsr"); \ |
|
304 CPSIDAIF; \ |
|
305 asm("mcr"#cc" p15, 0, "#r", c7, c5, 0 "); \ |
|
306 asm("mcr"#cc" p15, 0, "#r", c7, c5, 0 "); \ |
|
307 asm("mcr"#cc" p15, 0, "#r", c7, c5, 0 "); \ |
|
308 asm("mcr"#cc" p15, 0, "#r", c7, c5, 0 "); \ |
|
309 asm("msr cpsr_c, "#rt); \ |
|
310 asm("nop"); \ |
|
311 asm("nop"); \ |
|
312 asm("nop"); \ |
|
313 asm("nop"); \ |
|
314 asm("nop"); \ |
|
315 asm("nop"); \ |
|
316 asm("nop"); \ |
|
317 asm("nop"); \ |
|
318 asm("nop"); \ |
|
319 asm("nop"); \ |
|
320 asm("nop"); |
|
321 |
|
322 #else |
|
323 #define FLUSH_ICACHE(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c5, 0 "); /**< @internalTechnology */ |
|
324 #endif // else !(__CPU_ARM1136_ERRATUM_411920_FIXED) && (__CPU_ARM1136__ || __CPU_ARM1176__) |
|
325 #if defined(__CPU_ARM1136_ERRATUM_371025_FIXED) || !defined(__CPU_ARM1136__) |
|
326 #define FLUSH_ICACHE_LINE(cc,r,tmp) asm("mcr"#cc" p15, 0, "#r", c7, c5, 1 "); /**< @internalTechnology */ |
|
327 #else // workaround for erratum 371025... |
|
328 /** @internalTechnology */ |
|
329 #define FLUSH_ICACHE_LINE(cc,r,tmp) asm("orr"#cc" "#tmp", "#r", #0xC0000000 "); \ |
|
330 asm("bic"#cc" "#tmp", "#tmp", #1 "); \ |
|
331 asm("mcr"#cc" p15, 0, "#tmp", c7, c5, 2 "); \ |
|
332 asm("sub"#cc" "#tmp", "#tmp", #0x40000000 "); \ |
|
333 asm("mcr"#cc" p15, 0, "#tmp", c7, c5, 2 "); \ |
|
334 asm("sub"#cc" "#tmp", "#tmp", #0x40000000 "); \ |
|
335 asm("mcr"#cc" p15, 0, "#tmp", c7, c5, 2 "); \ |
|
336 asm("sub"#cc" "#tmp", "#tmp", #0x40000000 "); \ |
|
337 asm("mcr"#cc" p15, 0, "#tmp", c7, c5, 2 "); |
|
338 #endif //else (__CPU_ARM1136_ERRATUM_371025_FIXED) || !(__CPU_ARM1136__) |
|
339 #define FLUSH_ICACHE_INDEX(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c5, 2 "); /**< @internalTechnology */ |
|
340 #define PURGE_DCACHE_LINE(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c6, 1 "); /**< @internalTechnology */ |
|
341 #define PURGE_DCACHE_INDEX(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c6, 2 "); /**< @internalTechnology */ |
|
342 #define CLEAN_DCACHE_LINE(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c10, 1 "); /**< @internalTechnology */ |
|
343 |
|
344 #define CLEAN_DCACHE_INDEX(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c10, 2 "); /**< @internalTechnology */ |
|
345 #define FLUSH_DCACHE_LINE(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c14, 1 "); /**< @internalTechnology */ |
|
346 #define FLUSH_DCACHE_INDEX(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c14, 2 "); /**< @internalTechnology */ |
|
347 #define FLUSH_ITLB(cc,r) asm("mcr"#cc" p15, 0, "#r", c8, c5, 0 "); /**< @internalTechnology */ |
|
348 #define FLUSH_DTLB(cc,r) asm("mcr"#cc" p15, 0, "#r", c8, c6, 0 "); /**< @internalTechnology */ |
|
349 #define FLUSH_IDTLB(cc,r) asm("mcr"#cc" p15, 0, "#r", c8, c7, 0 "); /**< @internalTechnology */ |
|
350 |
|
351 |
|
352 // addr must include ASID |
|
353 #if defined (__CPU_ARM11MP__) |
|
354 #define FLUSH_ITLB_ENTRY(cc,addr) asm("mcr"#cc" p15, 0, "#addr", c8, c5, 3 "); /**< @internalTechnology */ |
|
355 #define FLUSH_DTLB_ENTRY(cc,addr) asm("mcr"#cc" p15, 0, "#addr", c8, c6, 3 "); /**< @internalTechnology */ |
|
356 #else //(__CPU_ARM11MP__) |
|
357 #define FLUSH_ITLB_ENTRY(cc,addr) asm("mcr"#cc" p15, 0, "#addr", c8, c5, 1 "); /**< @internalTechnology */ |
|
358 #define FLUSH_DTLB_ENTRY(cc,addr) asm("mcr"#cc" p15, 0, "#addr", c8, c6, 1 "); /**< @internalTechnology */ |
|
359 #endif // else (__CPU_ARM11MP__) |
|
360 #define FLUSH_ITLB_ASID(cc,asid) asm("mcr"#cc" p15, 0, "#asid", c8, c5, 2 "); /**< @internalTechnology */ |
|
361 #define FLUSH_DTLB_ASID(cc,asid) asm("mcr"#cc" p15, 0, "#asid", c8, c6, 2 "); /**< @internalTechnology */ |
|
362 |
|
363 #define DRAIN_WRITE_BUFFER(cc,r,rd) asm("mcr"#cc" p15, 0, "#r", c7, c10, 4 "); |
|
364 #define DATA_MEMORY_BARRIER(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c10, 5 "); |
|
365 #define FLUSH_PREFETCH_BUFFER(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c5, 4 "); /**< @internalTechnology */ |
|
366 #define FLUSH_BTB(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c5, 6 "); /**< @internalTechnology */ |
|
367 #define CPWAIT(cc,r) /**< @internalTechnology */ // not sure about this |
|
368 #define GET_CAR(cc,r) asm("mrc"#cc" p15, 0, "#r", c1, c0, 2 "); /**< @internalTechnology */ |
|
369 #define SET_CAR(cc,r) asm("mcr"#cc" p15, 0, "#r", c1, c0, 2 "); /**< @internalTechnology */ |
|
370 |
|
371 #if defined(__CPU_PAGE_TABLES_FULLY_CACHED) |
|
372 #define CACHE_MAINTENANCE_PDE_PTE_UPDATED(r) CLEAN_DCACHE_LINE(,r);\ |
|
373 DRAIN_WRITE_BUFFER(,r,r); |
|
374 #else |
|
375 #define CACHE_MAINTENANCE_PDE_PTE_UPDATED(r) DRAIN_WRITE_BUFFER(,r,r); |
|
376 #endif //end of __CPU_PAGE_TABLES_FULLY_CACHED |
|
377 |
|
378 #elif defined(__CPU_ARMV7) // end of elif (__CPU_ARMV6) |
|
379 |
|
380 // Define new-style cache/TLB maintenance instructions |
|
381 #if defined(__CPU_CORTEX_A9__) && !defined(__CPU_ARM_A9_ERRATUM_571618_FIXED) |
|
382 // ARM Cortex-A9 MPCore erratum 571618 workaround |
|
383 // Execute memory barrier before interruptible CP15 operations |
|
384 #define ICIALLU asm("mcr p15, 0, r0, c7, c10, 5 "); \ |
|
385 asm("mcr p15, 0, r0, c7, c5, 0 "); /**< @internalTechnology */ |
|
386 #else |
|
387 #define ICIALLU asm("mcr p15, 0, r0, c7, c5, 0 "); /**< @internalTechnology */ |
|
388 #endif // end of else (__CPU_CORTEX_A9__) && !(__CPU_ARM_A9_ERRATUM_571618_FIXED) |
|
389 #define ICIMVAU(r) asm("mcr p15, 0, "#r", c7, c5, 1 "); /**< @internalTechnology */ |
|
390 #define BPIALL asm("mcr p15, 0, r0, c7, c5, 6 "); /**< @internalTechnology */ |
|
391 #define BPIMVA(r) asm("mcr p15, 0, "#r", c7, c5, 7 "); /**< @internalTechnology */ |
|
392 #define DCIMVAC(r) asm("mcr p15, 0, "#r", c7, c6, 1 "); /**< @internalTechnology */ |
|
393 #define DCISW(r) asm("mcr p15, 0, "#r", c7, c6, 2 "); /**< @internalTechnology */ |
|
394 #define DCCMVAC(r) asm("mcr p15, 0, "#r", c7, c10, 1 "); /**< @internalTechnology */ |
|
395 #define DCCSW(r) asm("mcr p15, 0, "#r", c7, c10, 2 "); /**< @internalTechnology */ |
|
396 #define DCCMVAU(r) asm("mcr p15, 0, "#r", c7, c11, 1 "); /**< @internalTechnology */ |
|
397 #define DCCIMVAC(r) asm("mcr p15, 0, "#r", c7, c14, 1 "); /**< @internalTechnology */ |
|
398 #define DCCISW(r) asm("mcr p15, 0, "#r", c7, c14, 2 "); /**< @internalTechnology */ |
|
399 |
|
400 #ifdef __SMP__ |
|
401 #if defined(__CPU_CORTEX_A9__) && !defined(__CPU_ARM_A9_ERRATUM_571618_FIXED) |
|
402 // ARM Cortex-A9 MPCore erratum 571618 workaround |
|
403 // Execute memory barrier before interruptible CP15 operations |
|
404 #define ICIALLUIS asm("mcr p15, 0, r0, c7, c10, 5 "); \ |
|
405 asm("mcr p15, 0, r0, c7, c1, 0 "); /**< @internalTechnology */ |
|
406 #else |
|
407 #define ICIALLUIS asm("mcr p15, 0, r0, c7, c1, 0 "); /**< @internalTechnology */ |
|
408 #endif //end of else (__CPU_CORTEX_A9__) && !(__CPU_ARM_A9_ERRATUM_571618_FIXED) |
|
409 #define BPIALLIS asm("mcr p15, 0, r0, c7, c1, 6 "); /**< @internalTechnology */ |
|
410 #endif // end of __SMP__ |
|
411 |
|
412 #ifdef __CPU_SPLIT_TLB |
|
413 #define ITLBIALL asm("mcr p15, 0, r0, c8, c5, 0 "); /**< @internalTechnology */ |
|
414 #define ITLBIMVA(r) asm("mcr p15, 0, "#r", c8, c5, 1 "); /**< @internalTechnology */ |
|
415 #define ITLBIASID(r) asm("mcr p15, 0, "#r", c8, c5, 2 "); /**< @internalTechnology */ |
|
416 #define DTLBIALL asm("mcr p15, 0, r0, c8, c6, 0 "); /**< @internalTechnology */ |
|
417 #define DTLBIMVA(r) asm("mcr p15, 0, "#r", c8, c6, 1 "); /**< @internalTechnology */ |
|
418 #define DTLBIASID(r) asm("mcr p15, 0, "#r", c8, c6, 2 "); /**< @internalTechnology */ |
|
419 #endif |
|
420 #define UTLBIALL asm("mcr p15, 0, r0, c8, c7, 0 "); /**< @internalTechnology */ |
|
421 #define UTLBIMVA(r) asm("mcr p15, 0, "#r", c8, c7, 1 "); /**< @internalTechnology */ |
|
422 #if defined(__CPU_CORTEX_A9__) && !defined(__CPU_ARM_A9_ERRATUM_571618_FIXED) |
|
423 // ARM Cortex-A9 MPCore erratum 571618 workaround |
|
424 // Execute memory barrier before interruptible CP15 operations |
|
425 #define UTLBIASID(r) asm("mcr p15, 0, r0, c7, c10, 5 "); \ |
|
426 asm("mcr p15, 0, "#r", c8, c7, 2 "); /**< @internalTechnology */ |
|
427 #else |
|
428 #define UTLBIASID(r) asm("mcr p15, 0, "#r", c8, c7, 2 "); /**< @internalTechnology */ |
|
429 #endif // end of else (__CPU_CORTEX_A9__) && !(__CPU_ARM_A9_ERRATUM_571618_FIXED) |
|
430 |
|
431 #ifdef __CPU_SUPPORTS_TLBIMVAA |
|
432 #ifdef __CPU_SPLIT_TLB |
|
433 #define ITLBIMVAA(r) asm("mcr p15, 0, "#r", c8, c5, 3 "); /**< @internalTechnology */ |
|
434 #define DTLBIMVAA(r) asm("mcr p15, 0, "#r", c8, c6, 3 "); /**< @internalTechnology */ |
|
435 #endif // end of __CPU_SPLIT_TLB |
|
436 #define UTLBIMVAA(r) asm("mcr p15, 0, "#r", c8, c7, 3 "); /**< @internalTechnology */ |
|
437 #endif // end of __CPU_SUPPORTS_TLBIMVAA |
|
438 |
|
439 #ifdef __SMP__ |
|
440 #ifdef __CPU_SPLIT_TLB |
|
441 #define ITLBIALLIS asm("mcr p15, 0, r0, c8, c3, 0 "); /**< @internalTechnology */ |
|
442 #define ITLBIMVAIS(r) asm("mcr p15, 0, "#r", c8, c3, 1 "); /**< @internalTechnology */ |
|
443 #if defined(__CPU_CORTEX_A9__) && !defined(__CPU_ARM_A9_ERRATUM_571618_FIXED) |
|
444 // ARM Cortex-A9 MPCore erratum 571618 workaround |
|
445 // Execute memory barrier before interruptible CP15 operations |
|
446 #define ITLBIASIDIS(r) asm("mcr p15, 0, r0, c7, c10, 5 "); \ |
|
447 asm("mcr p15, 0, "#r", c8, c3, 2 "); /**< @internalTechnology */ |
|
448 #else |
|
449 #define ITLBIASIDIS(r) asm("mcr p15, 0, "#r", c8, c3, 2 "); /**< @internalTechnology */ |
|
450 #endif // end of else (__CPU_CORTEX_A9__) && !(__CPU_ARM_A9_ERRATUM_571618_FIXED) |
|
451 #define DTLBIALLIS asm("mcr p15, 0, r0, c8, c3, 0 "); /**< @internalTechnology */ |
|
452 #define DTLBIMVAIS(r) asm("mcr p15, 0, "#r", c8, c3, 1 "); /**< @internalTechnology */ |
|
453 #if defined(__CPU_CORTEX_A9__) && !defined(__CPU_ARM_A9_ERRATUM_571618_FIXED) |
|
454 // ARM Cortex-A9 MPCore erratum 571618 workaround |
|
455 // Execute memory barrier before interruptible CP15 operations |
|
456 #define DTLBIASIDIS(r) asm("mcr p15, 0, r0, c7, c10, 5 "); \ |
|
457 asm("mcr p15, 0, "#r", c8, c3, 2 "); /**< @internalTechnology */ |
|
458 #else |
|
459 #define DTLBIASIDIS(r) asm("mcr p15, 0, "#r", c8, c3, 2 "); /**< @internalTechnology */ |
|
460 #endif // end of else (__CPU_CORTEX_A9__) && !(__CPU_ARM_A9_ERRATUM_571618_FIXED) |
|
461 #endif // end of __CPU_SPLIT_TLB |
|
462 #define UTLBIALLIS asm("mcr p15, 0, r0, c8, c3, 0 "); /**< @internalTechnology */ |
|
463 #define UTLBIMVAIS(r) asm("mcr p15, 0, "#r", c8, c3, 1 "); /**< @internalTechnology */ |
|
464 #if defined(__CPU_CORTEX_A9__) && !defined(__CPU_ARM_A9_ERRATUM_571618_FIXED) |
|
465 // ARM Cortex-A9 MPCore erratum 571618 workaround |
|
466 // Execute memory barrier before interruptible CP15 operations |
|
467 #define UTLBIASIDIS(r) asm("mcr p15, 0, r0, c7, c10, 5 "); \ |
|
468 asm("mcr p15, 0, "#r", c8, c3, 2 "); /**< @internalTechnology */ |
|
469 #else |
|
470 #define UTLBIASIDIS(r) asm("mcr p15, 0, "#r", c8, c3, 2 "); /**< @internalTechnology */ |
|
471 #endif // end of else (__CPU_CORTEX_A9__) && !(__CPU_ARM_A9_ERRATUM_571618_FIXED) |
|
472 |
|
473 #ifdef __CPU_SUPPORTS_TLBIMVAA |
|
474 #ifdef __CPU_SPLIT_TLB |
|
475 #define ITLBIMVAAIS(r) asm("mcr p15, 0, "#r", c8, c3, 3 "); /**< @internalTechnology */ |
|
476 #define DTLBIMVAAIS(r) asm("mcr p15, 0, "#r", c8, c3, 3 "); /**< @internalTechnology */ |
|
477 #endif // end of __CPU_SPLIT_TLB |
|
478 #define UTLBIMVAAIS(r) asm("mcr p15, 0, "#r", c8, c3, 3 "); /**< @internalTechnology */ |
|
479 #endif // end of __CPU_SUPPORTS_TLBIMVAA |
|
480 #endif // end of __SMP__ |
|
481 |
|
482 |
|
483 #define DRAIN_WRITE_BUFFER(cc,r,rd) __DATA_SYNC_BARRIER__(r) |
|
484 #define DATA_MEMORY_BARRIER(cc,r) __DATA_MEMORY_BARRIER__(r) |
|
485 #define FLUSH_PREFETCH_BUFFER(cc,r) __INST_SYNC_BARRIER__(r) /**< @internalTechnology */ |
|
486 //#define FLUSH_BTB(cc,r) asm("mcr"#cc" p15, 0, "#r", c7, c5, 6 "); /**< @internalTechnology */ |
|
487 |
|
488 #define CPWAIT(cc,r) /**< @internalTechnology */ // not sure about this |
|
489 #define GET_CAR(cc,r) asm("mrc"#cc" p15, 0, "#r", c1, c0, 2 "); /**< @internalTechnology */ |
|
490 #define SET_CAR(cc,r) asm("mcr"#cc" p15, 0, "#r", c1, c0, 2 "); \ |
|
491 __INST_SYNC_BARRIER__(r) /**< @internalTechnology */ |
|
492 |
|
493 #if !defined(__CPU_SUPPORTS_PAGE_TABLE_WALK_TO_L1_CACHE) && defined(__CPU_PAGE_TABLES_FULLY_CACHED) |
|
494 #define CACHE_MAINTENANCE_PDE_PTE_UPDATED(r) DCCMVAU(r); \ |
|
495 __DATA_SYNC_BARRIER__(r); |
|
496 #else |
|
497 #define CACHE_MAINTENANCE_PDE_PTE_UPDATED(r) __DATA_SYNC_BARRIER__(r); |
|
498 #endif // end of !(__CPU_SUPPORTS_PAGE_TABLE_WALK_TO_L1_CACHE) && (__CPU_PAGE_TABLES_FULLY_CACHED) |
|
499 |
|
500 #endif // end of of elif (__CPU_ARMV7) |
|
501 |
|
502 |
|
503 /** |
|
504 CPU_ARM1136_ERRATUM_317041: Bits [4:3] of Translation Table Base address registers (TTBR0, TTBR1) |
|
505 do not read back correctly, but instead always return 0. |
|
506 @internalComponent |
|
507 @released |
|
508 */ |
|
509 #if defined(__CPU_ARM1136__) && defined(__HAS_EXTERNAL_CACHE__) && !defined(__CPU_ARM1136_ERRATUM_317041_FIXED) |
|
510 #define UPDATE_PW_CACHING_ATTRIBUTES(cc,r) asm("orr"#cc" "#r", "#r", #0x18") |
|
511 #else |
|
512 #define UPDATE_PW_CACHING_ATTRIBUTES(cc,r) |
|
513 #endif |
|
514 |
|
515 // Instruction macros |
|
516 |
|
517 #if defined(__CPU_ARMV6) || defined(__CPU_ARMV7) |
|
518 #define SRSgen(P,U,W,mode) asm(".word %a0" : : "i" ((TInt)(0xf84d0500|(P<<24)|(U<<23)|(W<<21)|(mode)))); |
|
519 #define SRSIA(mode) SRSgen(0,1,0,mode) |
|
520 #define SRSIAW(mode) SRSgen(0,1,1,mode) |
|
521 #define SRSDB(mode) SRSgen(1,0,0,mode) |
|
522 #define SRSDBW(mode) SRSgen(1,0,1,mode) |
|
523 #define SRSIB(mode) SRSgen(1,1,0,mode) |
|
524 #define SRSIBW(mode) SRSgen(1,1,1,mode) |
|
525 #define SRSDA(mode) SRSgen(0,0,0,mode) |
|
526 #define SRSDAW(mode) SRSgen(0,0,1,mode) |
|
527 #define RFEgen(P,U,W,base) asm(".word %a0" : : "i" ((TInt)(0xf8100a00|(P<<24)|(U<<23)|(W<<21)|(base<<16)))); |
|
528 #define RFEIA(base) RFEgen(0,1,0,base) |
|
529 #define RFEIAW(base) RFEgen(0,1,1,base) |
|
530 #define RFEDB(base) RFEgen(1,0,0,base) |
|
531 #define RFEDBW(base) RFEgen(1,0,1,base) |
|
532 #define RFEIB(base) RFEgen(1,1,0,base) |
|
533 #define RFEIBW(base) RFEgen(1,1,1,base) |
|
534 #define RFEDA(base) RFEgen(0,0,0,base) |
|
535 #define RFEDAW(base) RFEgen(0,0,1,base) |
|
536 #elif defined(__CPU_XSCALE__) // end of (__CPU_ARMV6) || (__CPU_ARMV7) |
|
537 #define MAR(acc,RdLo,RdHi) MCRR(0,0,RdLo,RdHi,acc) |
|
538 #define MARcc(cc,acc,RdLo,RdHi) MCRR(cc,0,0,RdLo,RdHi,acc) |
|
539 #define MRA(acc,RdLo,RdHi) MRRC(0,0,RdLo,RdHi,acc) |
|
540 #define MRAcc(cc,acc,RdLo,RdHi) MRRC(cc,0,0,RdLo,RdHi,acc) |
|
541 #define MIAgen(cc,acc,Rm,Rs,opc3) asm(".word %a0" : : "i" ((TInt)0x0e200010|((cc)<<28)|((opc3)<<16)|((Rs)<<12)|((acc)<<5)|(Rm))); |
|
542 #define MIA(acc,Rm,Rs) MIAgen(CC_AL,acc,Rm,Rs,0) |
|
543 #define MIAPH(acc,Rm,Rs) MIAgen(CC_AL,acc,Rm,Rs,8) |
|
544 #define MIABB(acc,Rm,Rs) MIAgen(CC_AL,acc,Rm,Rs,12) |
|
545 #define MIATB(acc,Rm,Rs) MIAgen(CC_AL,acc,Rm,Rs,13) |
|
546 #define MIABT(acc,Rm,Rs) MIAgen(CC_AL,acc,Rm,Rs,14) |
|
547 #define MIATT(acc,Rm,Rs) MIAgen(CC_AL,acc,Rm,Rs,15) |
|
548 #define MIAcc(cc,acc,Rm,Rs) MIAgen(cc,acc,Rm,Rs,0) |
|
549 #define MIAPHcc(cc,acc,Rm,Rs) MIAgen(cc,acc,Rm,Rs,8) |
|
550 #define MIABBcc(cc,acc,Rm,Rs) MIAgen(cc,acc,Rm,Rs,12) |
|
551 #define MIATBcc(cc,acc,Rm,Rs) MIAgen(cc,acc,Rm,Rs,13) |
|
552 #define MIABTcc(cc,acc,Rm,Rs) MIAgen(cc,acc,Rm,Rs,14) |
|
553 #define MIATTcc(cc,acc,Rm,Rs) MIAgen(cc,acc,Rm,Rs,15) |
|
554 #endif // end of elif (__CPU_XSCALE__) |
|
555 |
|
556 #ifdef __CPU_ARM_HAS_CPS |
|
557 #define CPSgen(im,mm,f,mode) asm(".word %a0" : : "i" ((TInt)(0xf1000000|((im)<<18)|((mm)<<17)|((f)<<6)|(mode)))) |
|
558 #if __ARM_ASSEMBLER_ISA__ >= 6 |
|
559 #define CPSIDAIF asm("cpsidaif ") |
|
560 #define CPSIDAI asm("cpsidai ") |
|
561 #define CPSIDIF asm("cpsidif ") |
|
562 #define CPSIDI asm("cpsidi ") |
|
563 #define CPSIDF asm("cpsidf ") |
|
564 #define CPSIEAIF asm("cpsieaif ") |
|
565 #define CPSIEI asm("cpsiei ") |
|
566 #define CPSIEF asm("cpsief ") |
|
567 #define CPSIEIF asm("cpsieif ") |
|
568 #else |
|
569 #define CPSIDAIF CPSgen(3,0,7,0) // disable all interrupts, leave mode alone |
|
570 #define CPSIDAI CPSgen(3,0,6,0) // disable IRQs, leave mode alone |
|
571 #define CPSIDIF CPSgen(3,0,3,0) // disable IRQs and FIQs, leave mode alone |
|
572 #define CPSIDI CPSgen(3,0,2,0) // disable IRQs, leave mode alone |
|
573 #define CPSIDF CPSgen(3,0,1,0) // disable FIQs, leave mode alone |
|
574 #define CPSIEAIF CPSgen(2,0,7,0) // enable all interrupts, leave mode alone |
|
575 #define CPSIEI CPSgen(2,0,2,0) // enable IRQs, leave mode alone |
|
576 #define CPSIEF CPSgen(2,0,1,0) // enable FIQs, leave mode alone |
|
577 #define CPSIEIF CPSgen(2,0,3,0) // enable IRQs and FIQs, leave mode alone |
|
578 #endif // end of __ARM_ASSEMBLER_ISA__ >= 6 |
|
579 #define CPSIDAIFM(mode) CPSgen(3,1,7,mode) // disable all interrupts and change mode |
|
580 #define CPSIDIFM(mode) CPSgen(3,1,3,mode) // disable all interrupts and change mode |
|
581 #define CPSIDAIM(mode) CPSgen(3,1,6,mode) // disable IRQs and change mode |
|
582 #define CPSIDIM(mode) CPSgen(3,1,2,mode) // disable IRQs and change mode |
|
583 #define CPSIDFM(mode) CPSgen(3,1,1,mode) // disable FIQs and change mode |
|
584 #define CPSIEAIFM(mode) CPSgen(2,1,7,mode) // enable all interrupts and change mode |
|
585 #define CPSIEIM(mode) CPSgen(2,1,2,mode) // enable IRQs and change mode |
|
586 #define CPSIEFM(mode) CPSgen(2,1,1,mode) // enable FIQs and change mode |
|
587 #define CPSIEIFM(mode) CPSgen(2,1,3,mode) // enable IRQs and FIQs, and change mode |
|
588 #define CPSCHM(mode) CPSgen(0,1,0,mode) // change mode, leave interrupt masks alone |
|
589 #endif // end of __CPU_ARM_HAS_CPS |
|
590 |
|
591 // Processor modes |
|
592 #define MODE_USR 0x10 |
|
593 #define MODE_FIQ 0x11 |
|
594 #define MODE_IRQ 0x12 |
|
595 #define MODE_SVC 0x13 |
|
596 #define MODE_ABT 0x17 |
|
597 #define MODE_UND 0x1b |
|
598 #define MODE_SYS 0x1f |
|
599 |
|
600 // Macros for changing processor made and interrupt status |
|
601 // |
|
602 // Two instructions are necessary prior to ARMv6, and these may be interleaved. |
|
603 // |
|
604 // SET_MODE - sets mode and intrrupts status |
|
605 // SET_INTS - sets interrupts status (requires knowing the current mode at compile time) |
|
606 // INTS_ON - enables interrupts (requires the cpsr value be available at run time) |
|
607 // INTS_OFF - disables interrupts (requires the cpsr value be available at run time) |
|
608 |
|
609 #ifdef __CPU_ARM_HAS_CPS |
|
610 |
|
611 #define INTS_ALL_OFF IDIF |
|
612 #define INTS_IRQ_OFF IDI |
|
613 #define INTS_FIQ_ON IEF |
|
614 #define INTS_ALL_ON IEIF |
|
615 |
|
616 #define CONCAT2(a,b) a##b |
|
617 #define CONCAT3(a,b,c) a##b##c |
|
618 |
|
619 #define SET_MODE_1(rd, newMode, newInts) |
|
620 #define SET_MODE_2(rd, newMode, newInts) CONCAT3(CPS, newInts, M)(newMode) |
|
621 |
|
622 #define SET_INTS_1(rd, currentMode, newInts) |
|
623 #define SET_INTS_2(rd, currentMode, newInts) CONCAT2(CPS, newInts) |
|
624 |
|
625 #define INTS_ON_1(rd, rCpsr, newInts) |
|
626 #define INTS_ON_2(rd, rCpsr, newInts) CONCAT2(CPS, newInts) |
|
627 |
|
628 #define INTS_OFF_1(rd, rCpsr, newInts) |
|
629 #define INTS_OFF_2(rd, rCpsr, newInts) CONCAT2(CPS, newInts) |
|
630 |
|
631 #else // __CPU_ARM_HAS_CPS |
|
632 |
|
633 #define INTS_ALL_OFF 0xc0 |
|
634 #define INTS_IRQ_OFF 0x80 |
|
635 #define INTS_FIQ_ON 0x80 |
|
636 #define INTS_ALL_ON 0x00 |
|
637 |
|
638 #define SET_MODE_1(rd, newMode, newInts) asm("mov "#rd", #%a0" : : "i" (newMode | newInts)) |
|
639 #define SET_MODE_2(rd, newMode, newInts) asm("msr cpsr_c, "#rd) |
|
640 |
|
641 #define SET_INTS_1(rd, currentMode, newInts) SET_MODE_1(rd, currentMode, newInts) |
|
642 #define SET_INTS_2(rd, currentMode, newInts) SET_MODE_2(rd, currentMode, newInts) |
|
643 |
|
644 #define INTS_ON_1(rd, rCpsr, newInts) asm("bic "#rd", "#rCpsr", #%a0" : : "i" (newInts ^ 0xc0)) |
|
645 #define INTS_ON_2(rd, rCpsr, newInts) asm("msr cpsr_c, "#rd) |
|
646 |
|
647 #define INTS_OFF_1(rd, rCpsr, newInts) asm("orr "#rd", "#rCpsr", #%a0" : : "i" (newInts)) |
|
648 #define INTS_OFF_2(rd, rCpsr, newInts) asm("msr cpsr_c, "#rd) |
|
649 |
|
650 #endif // end of __CPU_ARM_HAS_CPS |
|
651 |
|
652 #define SET_MODE(rd, newMode, newInts) SET_MODE_1(rd, newMode, newInts); SET_MODE_2(rd, newMode, newInts) |
|
653 #define SET_INTS(rd, currentMode, newInts) SET_INTS_1(rd, currentMode, newInts); SET_INTS_2(rd, currentMode, newInts) |
|
654 #define INTS_ON(rd, rCpsr, newInts) INTS_ON_1(rd, rCpsr, newInts); INTS_ON_2(rd, rCpsr, newInts) |
|
655 #define INTS_OFF(rd, rCpsr, newInts) INTS_OFF_1(rd, rCpsr, newInts); INTS_OFF_2(rd, rCpsr, newInts) |
|
656 |
|
657 #define __chill() |
|
658 |
|
659 #if defined(__SMP__) && !defined(__CPU_ARM_HAS_LDREX_STREX_V6K) |
|
660 #error SMP not allowed without v6K |
|
661 #endif |
|
662 #if defined(__SMP__) && !defined(__CPU_HAS_CP15_THREAD_ID_REG) |
|
663 #error SMP not allowed without thread ID registers |
|
664 #endif |
|
665 |
|
666 #endif // end of __CPU_ARM |
|
667 |
|
668 #if defined(__CPU_X86) && defined(__EPOC32__) |
|
669 #define __CPU_HAS_MMU |
|
670 #define __CPU_HAS_CACHE |
|
671 #define __CPU_SUPPORTS_FAST_PROCESS_SWITCH |
|
672 |
|
673 // Page/directory tables are cached on X86. |
|
674 #define __CPU_PAGE_TABLES_FULLY_CACHED |
|
675 |
|
676 #if defined(__VC32__) |
|
677 #define X86_PAUSE _asm rep nop |
|
678 #define __chill() do { _asm rep nop } while(0) |
|
679 #elif defined(__GCC32__) |
|
680 #define X86_PAUSE __asm__ __volatile__("pause "); |
|
681 #define __chill() __asm__ __volatile__("pause ") |
|
682 #else |
|
683 #error Unknown x86 compiler |
|
684 #endif |
|
685 |
|
686 #if defined(__cplusplus) |
|
687 extern "C" { |
|
688 #endif |
|
689 #if defined(__VC32__) |
|
690 extern int _inp(unsigned short); // input byte (compiler intrinsic) |
|
691 extern unsigned short _inpw(unsigned short); // input word (compiler intrinsic) |
|
692 extern unsigned long _inpd(unsigned short); // input dword (compiler intrinsic) |
|
693 extern int _outp(unsigned short, int); // output byte (compiler intrinsic) |
|
694 extern unsigned short _outpw(unsigned short, unsigned short); // output word (compiler intrinsic) |
|
695 extern unsigned long _outpd(unsigned short, unsigned long); // output dword (compiler intrinsic) |
|
696 |
|
697 #pragma intrinsic(_inp, _inpw, _inpd, _outp, _outpw, _outpd) |
|
698 |
|
699 #define x86_in8(port) ((TUint8)_inp(port)) |
|
700 #define x86_in16(port) ((TUint16)_inpw(port)) |
|
701 #define x86_in32(port) ((TUint32)_inpd(port)) |
|
702 #define x86_out8(port,data) ((void)_outp((port),(TUint8)(data))) |
|
703 #define x86_out16(port,data) ((void)_outpw((port),(TUint16)(data))) |
|
704 #define x86_out32(port,data) ((void)_outpd((port),(TUint32)(data))) |
|
705 |
|
706 #elif defined(__GCC32__) // end of (__VC32__) |
|
707 inline TUint8 _inpb(TUint16 port) |
|
708 { |
|
709 TUint8 ret; |
|
710 __asm__ __volatile__("in al, dx" : "=a" (ret) : "d" (port)); |
|
711 return ret; |
|
712 } |
|
713 |
|
714 inline TUint16 _inpw(TUint16 port) |
|
715 { |
|
716 TUint8 ret; |
|
717 __asm__ __volatile__("in ax, dx" : "=a" (ret) : "d" (port)); |
|
718 return ret; |
|
719 } |
|
720 |
|
721 inline TUint32 _inpd(TUint16 port) |
|
722 { |
|
723 TUint32 ret; |
|
724 __asm__ __volatile__("in eax, dx" : "=a" (ret) : "d" (port)); |
|
725 return ret; |
|
726 } |
|
727 |
|
728 inline void _outpb(TUint16 port, TUint8 data) |
|
729 { |
|
730 __asm__ __volatile__("out dx, al" : : "d" (port), "a" (data)); |
|
731 } |
|
732 |
|
733 inline void _outpw(TUint16 port, TUint16 data) |
|
734 { |
|
735 __asm__ __volatile__("out dx, ax" : : "d" (port), "a" (data)); |
|
736 } |
|
737 |
|
738 inline void _outpd(TUint16 port, TUint32 data) |
|
739 { |
|
740 __asm__ __volatile__("out dx, eax" : : "d" (port), "a" (data)); |
|
741 } |
|
742 |
|
743 #define x86_in8(port) (_inpb(port)) |
|
744 #define x86_in16(port) (_inpw(port)) |
|
745 #define x86_in32(port) (_inpd(port)) |
|
746 #define x86_out8(port,data) (_outpb((port),(TUint8)(data))) |
|
747 #define x86_out16(port,data) (_outpw((port),(TUint16)(data))) |
|
748 #define x86_out32(port,data) (_outpd((port),(TUint32)(data))) |
|
749 |
|
750 #else // end of elif (__GCC32__) |
|
751 #error Unknown x86 compiler |
|
752 #endif |
|
753 #if defined(__cplusplus) |
|
754 } |
|
755 #endif // end of (__VC32__) elif __GCC32__ else |
|
756 |
|
757 #endif //__CPU_X86 && __EPOC32__ |
|
758 |
|
759 |
|
760 #undef __USER_MEMORY_GUARDS_ENABLED__ |
|
761 #if defined(_DEBUG) && !defined(__KERNEL_APIS_DISABLE_USER_MEMORY_GUARDS__) |
|
762 #if defined(__MEMMODEL_MULTIPLE__) || defined(__MEMMODEL_FLEXIBLE__) |
|
763 #if defined(__CPU_ARM) |
|
764 #define __USER_MEMORY_GUARDS_ENABLED__ |
|
765 #endif |
|
766 #endif |
|
767 #endif // end of (_DEBUG) && !(__KERNEL_APIS_DISABLE_USER_MEMORY_GUARDS__) |
|
768 |
|
769 |
|
770 #ifdef __USER_MEMORY_GUARDS_ENABLED__ |
|
771 |
|
772 #define USER_MEMORY_GUARD_ON(cc,save,temp) \ |
|
773 asm("mrc"#cc" p15, 0, "#save", c3, c0, 0 "); \ |
|
774 asm("bic"#cc" "#temp", "#save", #0xc0000000 "); \ |
|
775 asm("mcr"#cc" p15, 0, "#temp", c3, c0, 0 "); \ |
|
776 __INST_SYNC_BARRIER__(save) // Set DACR so no access to domain 15 |
|
777 |
|
778 #define USER_MEMORY_GUARD_OFF(cc,save,temp) \ |
|
779 asm("mrc"#cc" p15, 0, "#save", c3, c0, 0 "); \ |
|
780 asm("orr"#cc" "#temp", "#save", #0x40000000 "); \ |
|
781 asm("mcr"#cc" p15, 0, "#temp", c3, c0, 0 "); \ |
|
782 __INST_SYNC_BARRIER__(save) // Set DACR so client of domain 15 |
|
783 |
|
784 #define USER_MEMORY_GUARD_RESTORE(save,temp) \ |
|
785 asm("mrc p15, 0, "#temp", c3, c0, 0 "); \ |
|
786 asm("and "#save", "#save", #0xc0000000 "); \ |
|
787 asm("bic "#temp", "#temp", #0xc0000000 "); \ |
|
788 asm("orr "#temp", "#temp", "#save ); \ |
|
789 asm("mcr p15, 0, "#temp", c3, c0, 0 "); \ |
|
790 __INST_SYNC_BARRIER__(save) // Restore domain 15 in DACR from value in 'save' |
|
791 |
|
792 #define USER_MEMORY_GUARD_ON_IF_MODE_USR(rd) \ |
|
793 asm("mrs "#rd", spsr"); \ |
|
794 asm("tst "#rd", #0x0f "); \ |
|
795 USER_MEMORY_GUARD_ON(eq,rd,rd) // If spsr is mode_usr then set DACR so no access to domain 15 |
|
796 |
|
797 #define USER_MEMORY_GUARD_OFF_IF_MODE_USR(rd) \ |
|
798 asm("mrs "#rd", spsr"); \ |
|
799 asm("tst "#rd", #0x0f "); \ |
|
800 USER_MEMORY_GUARD_OFF(eq,rd,rd) // If spsr is mode_usr then set DACR so client of domain 15 |
|
801 |
|
802 #else // !__USER_MEMORY_GUARDS_ENABLED__ |
|
803 |
|
804 #define USER_MEMORY_GUARD_ON(cc,save,temp) |
|
805 #define USER_MEMORY_GUARD_OFF(cc,save,temp) |
|
806 #define USER_MEMORY_GUARD_RESTORE(save,temp) |
|
807 #define USER_MEMORY_GUARD_ON_IF_MODE_USR(rd) |
|
808 #define USER_MEMORY_GUARD_OFF_IF_MODE_USR(rd) |
|
809 |
|
810 #endif // end of else __USER_MEMORY_GUARDS_ENABLED__ |
|
811 |
|
812 #endif // End of file |