1 /***************************************************************************
3 * Open \______ \ ____ ____ | | _\_ |__ _______ ___
4 * Source | _// _ \_/ ___\| |/ /| __ \ / _ \ \/ /
5 * Jukebox | | ( <_> ) \___| < | \_\ ( <_> > < <
6 * Firmware |____|_ /\____/ \___ >__|_ \|___ /\____/__/\_ \
10 * Copyright (C) 2006,2007 by Greg White
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation; either version 2
15 * of the License, or (at your option) any later version.
17 * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
18 * KIND, either express or implied.
20 ****************************************************************************/
24 /* Used by ARMv4 & ARMv5 CPUs with cp15 register and MMU */
25 /* WARNING : assume size of a data cache line == 32 bytes */
30 * void ttb_init(void);
32 .section .text, "ax", %progbits
35 .type ttb_init, %function
37 ldr r0, =TTB_BASE_ADDR @
39 mcr p15, 0, r0, c2, c0, 0 @ Set the TTB base address
40 mcr p15, 0, r1, c3, c0, 0 @ Set all domains to manager status
42 .size ttb_init, .-ttb_init
45 * void map_section(unsigned int pa, unsigned int va, int mb, int flags);
47 .section .text, "ax", %progbits
50 .type map_section, %function
57 @ pa |= (flags | 0x412);
59 @ 10: superuser - r/w, user - no access
61 @ 3,2: Cache flags (flags (r3))
62 @ 1: Section signature
67 @ unsigned int* ttbPtr = TTB_BASE + (va >> 20);
68 @ sections are 1MB size
70 ldr r3, =TTB_BASE_ADDR
71 add r1, r3, r1, lsl #0x2
73 @ Add MB to pa, flags are already present in pa, but addition
74 @ should not effect them
76 @ for( ; mb>0; mb--, pa += (1 << 20))
90 .size map_section, .-map_section
93 * void enable_mmu(void);
95 .section .text, "ax", %progbits
98 .type enable_mmu, %function
101 mcr p15, 0, r0, c8, c7, 0 @ invalidate TLB
102 mcr p15, 0, r0, c7, c7,0 @ invalidate both i and dcache
103 mrc p15, 0, r0, c1, c0, 0 @
104 orr r0, r0, #1 @ enable mmu bit, i and dcache
105 orr r0, r0, #1<<2 @ enable dcache
106 orr r0, r0, #1<<12 @ enable icache
107 mcr p15, 0, r0, c1, c0, 0 @
113 .size enable_mmu, .-enable_mmu
116 /** Cache coherency **/
119 * Invalidate DCache for this range
121 * void invalidate_dcache_range(const void *base, unsigned int size);
123 .section .text, "ax", %progbits
125 .global invalidate_dcache_range
126 .type invalidate_dcache_range, %function
127 @ MVA format: 31:5 = Modified virtual address, 4:0 = SBZ
128 invalidate_dcache_range:
129 add r1, r0, r1 @ size -> end
130 cmp r1, r0 @ end <= start?
132 bic r0, r0, #31 @ Align start to cache line (down)
134 mcr p15, 0, r0, c7, c14, 1 @ Clean and invalidate line by MVA
137 mcrhi p15, 0, r0, c7, c14, 1 @ Clean and invalidate line by MVA
140 mcrhi p15, 0, r0, c7, c14, 1 @ Clean and invalidate line by MVA
143 mcrhi p15, 0, r0, c7, c14, 1 @ Clean and invalidate line by MVA
146 mcrhi p15, 0, r0, c7, c14, 1 @ Clean and invalidate line by MVA
149 mcrhi p15, 0, r0, c7, c14, 1 @ Clean and invalidate line by MVA
152 mcrhi p15, 0, r0, c7, c14, 1 @ Clean and invalidate line by MVA
155 mcrhi p15, 0, r0, c7, c14, 1 @ Clean and invalidate line by MVA
160 mcr p15, 0, r0, c7, c10, 4 @ Drain write buffer
162 .size invalidate_dcache_range, .-invalidate_dcache_range
165 * clean DCache for this range
166 * forces DCache writeback for the specified range
167 * void clean_dcache_range(const void *base, unsigned int size);
169 .section .text, "ax", %progbits
171 .global clean_dcache_range
172 .type clean_dcache_range, %function
173 @ MVA format: 31:5 = Modified virtual address, 4:0 = SBZ
175 add r1, r0, r1 @ size -> end
176 cmp r1, r0 @ end <= start?
178 bic r0, r0, #31 @ Align start to cache line (down)
180 mcr p15, 0, r0, c7, c10, 1 @ Clean line by MVA
183 mcrhi p15, 0, r0, c7, c10, 1 @ Clean line by MVA
186 mcrhi p15, 0, r0, c7, c10, 1 @ Clean line by MVA
189 mcrhi p15, 0, r0, c7, c10, 1 @ Clean line by MVA
192 mcrhi p15, 0, r0, c7, c10, 1 @ Clean line by MVA
195 mcrhi p15, 0, r0, c7, c10, 1 @ Clean line by MVA
198 mcrhi p15, 0, r0, c7, c10, 1 @ Clean line by MVA
201 mcrhi p15, 0, r0, c7, c10, 1 @ Clean line by MVA
204 bhi 1b @clean_start @
206 mcr p15, 0, r0, c7, c10, 4 @ Drain write buffer
208 .size clean_dcache_range, .-clean_dcache_range
211 * Dump DCache for this range
212 * will *NOT* do write back except for buffer edges not on a line boundary
213 * void dump_dcache_range(const void *base, unsigned int size);
215 .section .text, "ax", %progbits
217 .global dump_dcache_range
218 .type dump_dcache_range, %function
219 @ MVA format: 31:5 = Modified virtual address, 4:0 = SBZ
221 add r1, r0, r1 @ size -> end
222 cmp r1, r0 @ end <= start?
224 tst r0, #31 @ Check first line for bits set
225 bicne r0, r0, #31 @ Clear low five bits (down)
226 mcrne p15, 0, r0, c7, c14, 1 @ Clean and invalidate line by MVA
227 @ if not cache aligned
228 addne r0, r0, #32 @ Move to the next cache line
230 tst r1, #31 @ Check last line for bits set
231 bicne r1, r1, #31 @ Clear low five bits (down)
232 mcrne p15, 0, r1, c7, c14, 1 @ Clean and invalidate line by MVA
233 @ if not cache aligned
234 cmp r1, r0 @ end <= start now?
236 mcrhi p15, 0, r0, c7, c6, 1 @ Invalidate line by MVA
239 mcrhi p15, 0, r0, c7, c6, 1 @ Invalidate line by MVA
242 mcrhi p15, 0, r0, c7, c6, 1 @ Invalidate line by MVA
245 mcrhi p15, 0, r0, c7, c6, 1 @ Invalidate line by MVA
248 mcrhi p15, 0, r0, c7, c6, 1 @ Invalidate line by MVA
251 mcrhi p15, 0, r0, c7, c6, 1 @ Invalidate line by MVA
254 mcrhi p15, 0, r0, c7, c6, 1 @ Invalidate line by MVA
257 mcrhi p15, 0, r0, c7, c6, 1 @ Invalidate line by MVA
260 bhi 1b @ dump_start @
262 mcr p15, 0, r0, c7, c10, 4 @ Drain write buffer
264 .size dump_dcache_range, .-dump_dcache_range
267 * Cleans entire DCache
268 * void clean_dcache(void);
270 .section .text, "ax", %progbits
273 .type clean_dcache, %function
274 .global cpucache_flush @ Alias
277 @ Index format: 31:26 = index, 7:5 = segment, remainder = SBZ
278 mov r1, #0x00000000 @
280 mcr p15, 0, r0, c7, c10, 2 @ Clean entry by index
281 add r0, r1, #0x00000020 @
282 mcr p15, 0, r0, c7, c10, 2 @ Clean entry by index
283 add r0, r0, #0x00000020 @
284 mcr p15, 0, r0, c7, c10, 2 @ Clean entry by index
285 add r0, r0, #0x00000020 @
286 mcr p15, 0, r0, c7, c10, 2 @ Clean entry by index
287 add r0, r0, #0x00000020 @
288 mcr p15, 0, r0, c7, c10, 2 @ Clean entry by index
289 add r0, r0, #0x00000020 @
290 mcr p15, 0, r0, c7, c10, 2 @ Clean entry by index
291 add r0, r0, #0x00000020 @
292 mcr p15, 0, r0, c7, c10, 2 @ Clean entry by index
293 add r0, r0, #0x00000020 @
294 mcr p15, 0, r0, c7, c10, 2 @ Clean entry by index
295 adds r1, r1, #0x04000000 @ will wrap to zero at loop end
296 bne 1b @ clean_start @
297 mcr p15, 0, r1, c7, c10, 4 @ Drain write buffer
299 .size clean_dcache, .-clean_dcache
302 * Invalidate entire DCache
304 * void invalidate_dcache(void);
306 .section .text, "ax", %progbits
308 .global invalidate_dcache
309 .type invalidate_dcache, %function
311 @ Index format: 31:26 = index, 7:5 = segment, remainder = SBZ
312 mov r1, #0x00000000 @
314 mcr p15, 0, r0, c7, c14, 2 @ Clean and invalidate entry by index
315 add r0, r1, #0x00000020 @
316 mcr p15, 0, r0, c7, c14, 2 @ Clean and invalidate entry by index
317 add r0, r0, #0x00000020 @
318 mcr p15, 0, r0, c7, c14, 2 @ Clean and invalidate entry by index
319 add r0, r0, #0x00000020 @
320 mcr p15, 0, r0, c7, c14, 2 @ Clean and invalidate entry by index
321 add r0, r0, #0x00000020 @
322 mcr p15, 0, r0, c7, c14, 2 @ Clean and invalidate entry by index
323 add r0, r0, #0x00000020 @
324 mcr p15, 0, r0, c7, c14, 2 @ Clean and invalidate entry by index
325 add r0, r0, #0x00000020 @
326 mcr p15, 0, r0, c7, c14, 2 @ Clean and invalidate entry by index
327 add r0, r0, #0x00000020 @
328 mcr p15, 0, r0, c7, c14, 2 @ Clean and invalidate entry by index
329 adds r1, r1, #0x04000000 @ will wrap to zero at loop end
331 mcr p15, 0, r1, c7, c10, 4 @ Drain write buffer
333 .size invalidate_dcache, .-invalidate_dcache
336 * Invalidate entire ICache and DCache
338 * void invalidate_idcache(void);
340 .section .text, "ax", %progbits
342 .global invalidate_idcache
343 .type invalidate_idcache, %function
344 .global cpucache_invalidate @ Alias
347 mov r2, lr @ save lr to r1, call uses r0 only
348 bl invalidate_dcache @ Clean and invalidate entire DCache
349 mcr p15, 0, r1, c7, c5, 0 @ Invalidate ICache (r1=0 from call)
351 .size invalidate_idcache, .-invalidate_idcache