Fix the ARM assembly to ensure that the stack is not used.
[kugel-rb.git] / firmware / target / arm / mmu-arm.c
blob7a5303d54db4f52cb99262b4c8cd7d3abbb729a2
1 /***************************************************************************
2 * __________ __ ___.
3 * Open \______ \ ____ ____ | | _\_ |__ _______ ___
4 * Source | _// _ \_/ ___\| |/ /| __ \ / _ \ \/ /
5 * Jukebox | | ( <_> ) \___| < | \_\ ( <_> > < <
6 * Firmware |____|_ /\____/ \___ >__|_ \|___ /\____/__/\_ \
7 * \/ \/ \/ \/ \/
8 * $Id$
10 * Copyright (C) 2006,2007 by Greg White
12 * All files in this archive are subject to the GNU General Public License.
13 * See the file COPYING in the source tree root for full license agreement.
15 * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
16 * KIND, either express or implied.
18 ****************************************************************************/
19 #include "cpu.h"
20 #include "mmu-arm.h"
21 #include "panic.h"
23 void __attribute__((naked)) ttb_init(void) {
24 asm volatile
26 "mcr p15, 0, %[ttbB], c2, c0, 0 \n" /* Set the TTB base address */
27 "mcr p15, 0, %[ffff], c3, c0, 0 \n" /* Set all domains to manager status */
28 "bx lr \n"
30 : [ttbB] "r" (TTB_BASE),
31 [ffff] "r" (0xFFFFFFFF)
35 void __attribute__((naked)) map_section(unsigned int pa, unsigned int va, int mb, int flags) {
36 asm volatile
38 /* pa &= (-1 << 20); // align to 1MB */
39 "mov r0, r0, lsr #20 \n"
40 "mov r0, r0, lsl #20 \n"
42 /* pa |= (flags | 0x412);
43 * bit breakdown:
44 * 10: superuser - r/w, user - no access
45 * 4: should be "1"
46 * 3,2: Cache flags (flags (r3))
47 * 1: Section signature
50 "orr r0, r0, r3 \n"
51 "orr r0, r0, #0x410 \n"
52 "orr r0, r0, #0x2 \n"
57 register int *ttb_base asm ("r3") = TTB_BASE; /* force in r3 */
59 asm volatile
61 /* unsigned int* ttbPtr = TTB_BASE + (va >> 20);
62 * sections are 1MB size
65 "mov r1, r1, lsr #20 \n"
66 "add r1, %[ttbB], r1, lsl #0x2 \n"
68 /* Add MB to pa, flags are already present in pa, but addition
69 * should not effect them
71 * #define MB (1 << 20)
72 * for( ; mb>0; mb--, pa += MB)
73 * {
74 * *(ttbPtr++) = pa;
75 * }
76 * #undef MB
79 "cmp r2, #0 \n"
80 "bxle lr \n"
81 "mov r3, #0x0 \n"
82 "loop: \n"
83 "str r0, [r1], #4 \n"
84 "add r0, r0, #0x100000 \n"
85 "add r3, r3, #0x1 \n"
86 "cmp r2, r3 \n"
87 "bne loop \n"
88 "bx lr \n"
90 : [ttbB] "r" (ttb_base) /* This /HAS/ to be in r3 */
92 (void) pa;
93 (void) va;
94 (void) mb;
95 (void) flags;
98 void __attribute__((naked)) enable_mmu(void) {
99 asm volatile(
100 "mov r0, #0 \n"
101 "mcr p15, 0, r0, c8, c7, 0 \n" /* invalidate TLB */
102 "mcr p15, 0, r0, c7, c7,0 \n" /* invalidate both icache and dcache */
103 "mrc p15, 0, r0, c1, c0, 0 \n"
104 "orr r0, r0, #1 \n" /* enable mmu bit, icache and dcache */
105 "orr r0, r0, #1<<2 \n" /* enable dcache */
106 "orr r0, r0, #1<<12 \n" /* enable icache */
107 "mcr p15, 0, r0, c1, c0, 0 \n"
108 "nop \n"
109 "nop \n"
110 "nop \n"
111 "nop \n"
112 "bx lr \n"
115 : "r0"
119 #if CONFIG_CPU == IMX31L
120 void __attribute__((naked)) invalidate_dcache_range(const void *base, unsigned int size)
122 asm volatile(
123 "add r1, r1, r0 \n"
124 "mov r2, #0 \n"
125 "mcrr p15, 0, r1, r0, c14 \n" /* Clean and invalidate dcache range */
126 "mcr p15, 0, r2, c7, c10, 4 \n" /* Data synchronization barrier */
127 "bx lr \n"
129 (void)base; (void)size;
131 #else
132 /* Invalidate DCache for this range */
133 /* Will do write back */
134 void invalidate_dcache_range(const void *base, unsigned int size) {
135 unsigned int addr = (((int) base) & ~31); /* Align start to cache line*/
136 unsigned int end = ((addr+size) & ~31)+64; /* Align end to cache line, pad */
137 asm volatile(
138 "inv_start: \n"
139 "mcr p15, 0, %0, c7, c14, 1 \n" /* Clean and invalidate this line */
140 "add %0, %0, #32 \n"
141 "cmp %0, %1 \n"
142 "mcrne p15, 0, %0, c7, c14, 1 \n" /* Clean and invalidate this line */
143 "addne %0, %0, #32 \n"
144 "cmpne %0, %1 \n"
145 "mcrne p15, 0, %0, c7, c14, 1 \n" /* Clean and invalidate this line */
146 "addne %0, %0, #32 \n"
147 "cmpne %0, %1 \n"
148 "mcrne p15, 0, %0, c7, c14, 1 \n" /* Clean and invalidate this line */
149 "addne %0, %0, #32 \n"
150 "cmpne %0, %1 \n"
151 "mcrne p15, 0, %0, c7, c14, 1 \n" /* Clean and invalidate this line */
152 "addne %0, %0, #32 \n"
153 "cmpne %0, %1 \n"
154 "mcrne p15, 0, %0, c7, c14, 1 \n" /* Clean and invalidate this line */
155 "addne %0, %0, #32 \n"
156 "cmpne %0, %1 \n"
157 "mcrne p15, 0, %0, c7, c14, 1 \n" /* Clean and invalidate this line */
158 "addne %0, %0, #32 \n"
159 "cmpne %0, %1 \n"
160 "mcrne p15, 0, %0, c7, c14, 1 \n" /* Clean and invalidate this line */
161 "addne %0, %0, #32 \n"
162 "cmpne %0, %1 \n"
163 "bne inv_start \n"
164 "mov %0, #0\n"
165 "mcr p15,0,%0,c7,c10,4\n" /* Drain write buffer */
166 : : "r" (addr), "r" (end)
169 #endif
172 #if CONFIG_CPU == IMX31L
173 void __attribute__((naked)) clean_dcache_range(const void *base, unsigned int size)
175 asm volatile(
176 "add r1, r1, r0 \n"
177 "mov r2, #0 \n"
178 "mcrr p15, 0, r1, r0, c12 \n" /* Clean dcache range */
179 "mcr p15, 0, r2, c7, c10, 4 \n" /* Data synchronization barrier */
180 "bx lr \n"
182 (void)base; (void)size;
184 #else
185 /* clean DCache for this range */
186 /* forces DCache writeback for the specified range */
187 void clean_dcache_range(const void *base, unsigned int size) {
188 unsigned int addr = (int) base;
189 unsigned int end = addr+size+32;
190 asm volatile(
191 "bic %0, %0, #31 \n"
192 "clean_start: \n"
193 "mcr p15, 0, %0, c7, c10, 1 \n" /* Clean this line */
194 "add %0, %0, #32 \n"
195 "cmp %0, %1 \n"
196 "mcrlo p15, 0, %0, c7, c10, 1 \n" /* Clean this line */
197 "addlo %0, %0, #32 \n"
198 "cmplo %0, %1 \n"
199 "mcrlo p15, 0, %0, c7, c10, 1 \n" /* Clean this line */
200 "addlo %0, %0, #32 \n"
201 "cmplo %0, %1 \n"
202 "mcrlo p15, 0, %0, c7, c10, 1 \n" /* Clean this line */
203 "addlo %0, %0, #32 \n"
204 "cmplo %0, %1 \n"
205 "mcrlo p15, 0, %0, c7, c10, 1 \n" /* Clean this line */
206 "addlo %0, %0, #32 \n"
207 "cmplo %0, %1 \n"
208 "mcrlo p15, 0, %0, c7, c10, 1 \n" /* Clean this line */
209 "addlo %0, %0, #32 \n"
210 "cmplo %0, %1 \n"
211 "mcrlo p15, 0, %0, c7, c10, 1 \n" /* Clean this line */
212 "addlo %0, %0, #32 \n"
213 "cmplo %0, %1 \n"
214 "mcrlo p15, 0, %0, c7, c10, 1 \n" /* Clean this line */
215 "addlo %0, %0, #32 \n"
216 "cmplo %0, %1 \n"
217 "blo clean_start \n"
218 "mov %0, #0\n"
219 "mcr p15,0,%0,c7,c10,4 \n" /* Drain write buffer */
220 : : "r" (addr), "r" (end));
222 #endif
224 #if CONFIG_CPU == IMX31L
225 void __attribute__((naked)) dump_dcache_range(const void *base, unsigned int size)
227 asm volatile(
228 "add r1, r1, r0 \n"
229 "mcrr p15, 0, r1, r0, c6 \n"
230 "bx lr \n"
232 (void)base; (void)size;
234 #else
235 /* Dump DCache for this range */
236 /* Will *NOT* do write back */
237 void dump_dcache_range(const void *base, unsigned int size) {
238 unsigned int addr = (int) base;
239 unsigned int end = addr+size;
240 asm volatile(
241 "tst %0, #31 \n" /* Check to see if low five bits are set */
242 "bic %0, %0, #31 \n" /* Clear them */
243 "mcrne p15, 0, %0, c7, c14, 1 \n" /* Clean and invalidate this line, if those bits were set */
244 "add %0, %0, #32 \n" /* Move to the next cache line */
245 "tst %1, #31 \n" /* Check last line for bits set */
246 "bic %1, %1, #31 \n" /* Clear those bits */
247 "mcrne p15, 0, %1, c7, c14, 1 \n" /* Clean and invalidate this line, if not cache aligned */
248 "dump_start: \n"
249 "mcr p15, 0, %0, c7, c6, 1 \n" /* Invalidate this line */
250 "add %0, %0, #32 \n" /* Next cache line */
251 "cmp %0, %1 \n"
252 "bne dump_start \n"
253 "dump_end: \n"
254 "mcr p15,0,%0,c7,c10,4 \n" /* Drain write buffer */
255 : : "r" (addr), "r" (end));
257 #endif
259 #if CONFIG_CPU == IMX31L
260 void __attribute__((naked)) clean_dcache(void)
262 asm volatile (
263 /* Clean entire data cache */
264 "mov r0, #0 \n"
265 "mcr p15, 0, r0, c7, c10, 0 \n"
266 "bx lr \n"
269 #else
270 /* Cleans entire DCache */
271 void clean_dcache(void)
273 unsigned int index, addr, low;
275 for(index = 0; index <= 63; index++)
277 for(low = 0;low <= 7; low++)
279 addr = (index << 26) | (low << 5);
280 asm volatile
282 "mcr p15, 0, %[addr], c7, c10, 2 \n" /* Clean this entry by index */
284 : [addr] "r" (addr)
289 #endif