3 #include <linux/linkage.h>
4 #include <linux/init.h>
5 #include <asm/assembler.h>
7 #include <asm/pgtable-hwdef.h>
8 #include <asm/pgtable.h>
10 #include <asm/ptrace.h>
11 #include "proc-macros.S"
14 * The size of one data cache line.
16 #define CACHE_DLINESIZE 16
19 * The number of data cache segments.
21 #define CACHE_DSEGMENTS 2
24 * The number of lines in a cache segment.
26 #define CACHE_DENTRIES 256
29 * This is the size at which it becomes more efficient to
30 * clean the whole cache, rather than using the individual
31 * cache line maintainence instructions.
33 #define CACHE_DLIMIT 8192
37 * cpu_arm925_proc_init()
39 ENTRY(cpu_arm925_proc_init)
43 * cpu_arm925_proc_fin()
45 ENTRY(cpu_arm925_proc_fin)
46 mrc p15, 0, r0, c1, c0, 0 @ ctrl register
47 bic r0, r0, #0x1000 @ ...i............
48 bic r0, r0, #0x000e @ ............wca.
49 mcr p15, 0, r0, c1, c0, 0 @ disable caches
53 * cpu_arm925_reset(loc)
55 * Perform a soft reset of the system. Put the CPU into the
56 * same state as it would be if it had been reset, and branch
57 * to what would be the reset vector.
59 * loc: location to jump to for soft reset
62 ENTRY(cpu_arm925_reset)
63 /* Send software reset to MPU and DSP */
65 orr ip, ip, #0x00fe0000
66 orr ip, ip, #0x0000ce00
71 mcr p15, 0, ip, c7, c7, 0 @ invalidate I,D caches
72 mcr p15, 0, ip, c7, c10, 4 @ drain WB
74 mcr p15, 0, ip, c8, c7, 0 @ invalidate I & D TLBs
76 mrc p15, 0, ip, c1, c0, 0 @ ctrl register
77 bic ip, ip, #0x000f @ ............wcam
78 bic ip, ip, #0x1100 @ ...i...s........
79 mcr p15, 0, ip, c1, c0, 0 @ ctrl register
83 * cpu_arm925_do_idle()
85 * Called with IRQs disabled
88 ENTRY(cpu_arm925_do_idle)
90 mrc p15, 0, r1, c1, c0, 0 @ Read control register
91 mcr p15, 0, r0, c7, c10, 4 @ Drain write buffer
93 mcr p15, 0, r2, c1, c0, 0 @ Disable I cache
94 mcr p15, 0, r0, c7, c0, 4 @ Wait for interrupt
95 mcr p15, 0, r1, c1, c0, 0 @ Restore ICache enable
99 * flush_user_cache_all()
101 * Clean and invalidate all cache entries in a particular
104 ENTRY(arm925_flush_user_cache_all)
108 * flush_kern_cache_all()
110 * Clean and invalidate the entire cache.
112 ENTRY(arm925_flush_kern_cache_all)
116 #ifdef CONFIG_CPU_DCACHE_WRITETHROUGH
117 mcr p15, 0, ip, c7, c6, 0 @ invalidate D cache
119 /* Flush entries in both segments at once, see NOTE1 above */
120 mov r3, #(CACHE_DENTRIES - 1) << 4 @ 256 entries in segment
121 2: mcr p15, 0, r3, c7, c14, 2 @ clean+invalidate D index
123 bcs 2b @ entries 255 to 0
126 mcrne p15, 0, ip, c7, c5, 0 @ invalidate I cache
127 mcrne p15, 0, ip, c7, c10, 4 @ drain WB
131 * flush_user_cache_range(start, end, flags)
133 * Clean and invalidate a range of cache entries in the
134 * specified address range.
136 * - start - start address (inclusive)
137 * - end - end address (exclusive)
138 * - flags - vm_flags describing address space
140 ENTRY(arm925_flush_user_cache_range)
142 sub r3, r1, r0 @ calculate total size
143 cmp r3, #CACHE_DLIMIT
144 bgt __flush_whole_cache
146 #ifdef CONFIG_CPU_DCACHE_WRITETHROUGH
147 mcr p15, 0, r0, c7, c6, 1 @ invalidate D entry
148 mcrne p15, 0, r0, c7, c5, 1 @ invalidate I entry
149 add r0, r0, #CACHE_DLINESIZE
150 mcr p15, 0, r0, c7, c6, 1 @ invalidate D entry
151 mcrne p15, 0, r0, c7, c5, 1 @ invalidate I entry
152 add r0, r0, #CACHE_DLINESIZE
154 mcr p15, 0, r0, c7, c14, 1 @ clean and invalidate D entry
155 mcrne p15, 0, r0, c7, c5, 1 @ invalidate I entry
156 add r0, r0, #CACHE_DLINESIZE
157 mcr p15, 0, r0, c7, c14, 1 @ clean and invalidate D entry
158 mcrne p15, 0, r0, c7, c5, 1 @ invalidate I entry
159 add r0, r0, #CACHE_DLINESIZE
164 mcrne p15, 0, ip, c7, c10, 4 @ drain WB
168 * coherent_kern_range(start, end)
170 * Ensure coherency between the Icache and the Dcache in the
171 * region described by start, end. If you have non-snooping
172 * Harvard caches, you need to implement this function.
174 * - start - virtual start address
175 * - end - virtual end address
177 ENTRY(arm925_coherent_kern_range)
181 * coherent_user_range(start, end)
183 * Ensure coherency between the Icache and the Dcache in the
184 * region described by start, end. If you have non-snooping
185 * Harvard caches, you need to implement this function.
187 * - start - virtual start address
188 * - end - virtual end address
190 ENTRY(arm925_coherent_user_range)
191 bic r0, r0, #CACHE_DLINESIZE - 1
192 1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry
193 mcr p15, 0, r0, c7, c5, 1 @ invalidate I entry
194 add r0, r0, #CACHE_DLINESIZE
197 mcr p15, 0, r0, c7, c10, 4 @ drain WB
201 * flush_kern_dcache_area(void *addr, size_t size)
203 * Ensure no D cache aliasing occurs, either with itself or
206 * - addr - kernel address
207 * - size - region size
209 ENTRY(arm925_flush_kern_dcache_area)
211 1: mcr p15, 0, r0, c7, c14, 1 @ clean+invalidate D entry
212 add r0, r0, #CACHE_DLINESIZE
216 mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache
217 mcr p15, 0, r0, c7, c10, 4 @ drain WB
221 * dma_inv_range(start, end)
223 * Invalidate (discard) the specified virtual address range.
224 * May not write back any entries. If 'start' or 'end'
225 * are not cache line aligned, those lines must be written
228 * - start - virtual start address
229 * - end - virtual end address
233 arm925_dma_inv_range:
234 #ifndef CONFIG_CPU_DCACHE_WRITETHROUGH
235 tst r0, #CACHE_DLINESIZE - 1
236 mcrne p15, 0, r0, c7, c10, 1 @ clean D entry
237 tst r1, #CACHE_DLINESIZE - 1
238 mcrne p15, 0, r1, c7, c10, 1 @ clean D entry
240 bic r0, r0, #CACHE_DLINESIZE - 1
241 1: mcr p15, 0, r0, c7, c6, 1 @ invalidate D entry
242 add r0, r0, #CACHE_DLINESIZE
245 mcr p15, 0, r0, c7, c10, 4 @ drain WB
249 * dma_clean_range(start, end)
251 * Clean the specified virtual address range.
253 * - start - virtual start address
254 * - end - virtual end address
258 arm925_dma_clean_range:
259 #ifndef CONFIG_CPU_DCACHE_WRITETHROUGH
260 bic r0, r0, #CACHE_DLINESIZE - 1
261 1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry
262 add r0, r0, #CACHE_DLINESIZE
266 mcr p15, 0, r0, c7, c10, 4 @ drain WB
270 * dma_flush_range(start, end)
272 * Clean and invalidate the specified virtual address range.
274 * - start - virtual start address
275 * - end - virtual end address
277 ENTRY(arm925_dma_flush_range)
278 bic r0, r0, #CACHE_DLINESIZE - 1
280 #ifndef CONFIG_CPU_DCACHE_WRITETHROUGH
281 mcr p15, 0, r0, c7, c14, 1 @ clean+invalidate D entry
283 mcr p15, 0, r0, c7, c6, 1 @ invalidate D entry
285 add r0, r0, #CACHE_DLINESIZE
288 mcr p15, 0, r0, c7, c10, 4 @ drain WB
292 * dma_map_area(start, size, dir)
293 * - start - kernel virtual start address
294 * - size - size of region
295 * - dir - DMA direction
297 ENTRY(arm925_dma_map_area)
299 cmp r2, #DMA_TO_DEVICE
300 beq arm925_dma_clean_range
301 bcs arm925_dma_inv_range
302 b arm925_dma_flush_range
303 ENDPROC(arm925_dma_map_area)
306 * dma_unmap_area(start, size, dir)
307 * - start - kernel virtual start address
308 * - size - size of region
309 * - dir - DMA direction
311 ENTRY(arm925_dma_unmap_area)
313 ENDPROC(arm925_dma_unmap_area)
315 ENTRY(arm925_cache_fns)
316 .long arm925_flush_kern_cache_all
317 .long arm925_flush_user_cache_all
318 .long arm925_flush_user_cache_range
319 .long arm925_coherent_kern_range
320 .long arm925_coherent_user_range
321 .long arm925_flush_kern_dcache_area
322 .long arm925_dma_map_area
323 .long arm925_dma_unmap_area
324 .long arm925_dma_flush_range
326 ENTRY(cpu_arm925_dcache_clean_area)
327 #ifndef CONFIG_CPU_DCACHE_WRITETHROUGH
328 1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry
329 add r0, r0, #CACHE_DLINESIZE
330 subs r1, r1, #CACHE_DLINESIZE
333 mcr p15, 0, r0, c7, c10, 4 @ drain WB
336 /* =============================== PageTable ============================== */
339 * cpu_arm925_switch_mm(pgd)
341 * Set the translation base pointer to be as described by pgd.
343 * pgd: new page tables
346 ENTRY(cpu_arm925_switch_mm)
349 #ifdef CONFIG_CPU_DCACHE_WRITETHROUGH
350 mcr p15, 0, ip, c7, c6, 0 @ invalidate D cache
352 /* Flush entries in bothe segments at once, see NOTE1 above */
353 mov r3, #(CACHE_DENTRIES - 1) << 4 @ 256 entries in segment
354 2: mcr p15, 0, r3, c7, c14, 2 @ clean & invalidate D index
356 bcs 2b @ entries 255 to 0
358 mcr p15, 0, ip, c7, c5, 0 @ invalidate I cache
359 mcr p15, 0, ip, c7, c10, 4 @ drain WB
360 mcr p15, 0, r0, c2, c0, 0 @ load page table pointer
361 mcr p15, 0, ip, c8, c7, 0 @ invalidate I & D TLBs
366 * cpu_arm925_set_pte_ext(ptep, pte, ext)
368 * Set a PTE and flush it out
371 ENTRY(cpu_arm925_set_pte_ext)
375 #ifndef CONFIG_CPU_DCACHE_WRITETHROUGH
376 mcr p15, 0, r0, c7, c10, 1 @ clean D entry
378 mcr p15, 0, r0, c7, c10, 4 @ drain WB
379 #endif /* CONFIG_MMU */
384 .type __arm925_setup, #function
387 #if defined(CONFIG_CPU_ICACHE_STREAMING_DISABLE)
391 /* Transparent on, D-cache clean & flush mode. See NOTE2 above */
392 orr r0,r0,#1 << 1 @ transparent mode on
393 mcr p15, 0, r0, c15, c1, 0 @ write TI config register
396 mcr p15, 0, r0, c7, c7 @ invalidate I,D caches on v4
397 mcr p15, 0, r0, c7, c10, 4 @ drain write buffer on v4
399 mcr p15, 0, r0, c8, c7 @ invalidate I,D TLBs on v4
402 #ifdef CONFIG_CPU_DCACHE_WRITETHROUGH
403 mov r0, #4 @ disable write-back on caches explicitly
404 mcr p15, 7, r0, c15, c0, 0
409 mrc p15, 0, r0, c1, c0 @ get control register v4
412 #ifdef CONFIG_CPU_CACHE_ROUND_ROBIN
413 orr r0, r0, #0x4000 @ .1.. .... .... ....
416 .size __arm925_setup, . - __arm925_setup
420 * .RVI ZFRS BLDP WCAM
421 * .011 0001 ..11 1101
424 .type arm925_crval, #object
426 crval clear=0x00007f3f, mmuset=0x0000313d, ucset=0x00001130
431 * Purpose : Function pointers used to access above functions - all calls
434 .type arm925_processor_functions, #object
435 arm925_processor_functions:
436 .word v4t_early_abort
438 .word cpu_arm925_proc_init
439 .word cpu_arm925_proc_fin
440 .word cpu_arm925_reset
441 .word cpu_arm925_do_idle
442 .word cpu_arm925_dcache_clean_area
443 .word cpu_arm925_switch_mm
444 .word cpu_arm925_set_pte_ext
445 .size arm925_processor_functions, . - arm925_processor_functions
449 .type cpu_arch_name, #object
452 .size cpu_arch_name, . - cpu_arch_name
454 .type cpu_elf_name, #object
457 .size cpu_elf_name, . - cpu_elf_name
459 .type cpu_arm925_name, #object
462 .size cpu_arm925_name, . - cpu_arm925_name
466 .section ".proc.info.init", #alloc, #execinstr
468 .type __arm925_proc_info,#object
472 .long PMD_TYPE_SECT | \
474 PMD_SECT_AP_WRITE | \
476 .long PMD_TYPE_SECT | \
478 PMD_SECT_AP_WRITE | \
483 .long HWCAP_SWP | HWCAP_HALF | HWCAP_THUMB
484 .long cpu_arm925_name
485 .long arm925_processor_functions
488 .long arm925_cache_fns
489 .size __arm925_proc_info, . - __arm925_proc_info
491 .type __arm915_proc_info,#object
495 .long PMD_TYPE_SECT | \
497 PMD_SECT_AP_WRITE | \
499 .long PMD_TYPE_SECT | \
501 PMD_SECT_AP_WRITE | \
506 .long HWCAP_SWP | HWCAP_HALF | HWCAP_THUMB
507 .long cpu_arm925_name
508 .long arm925_processor_functions
511 .long arm925_cache_fns
512 .size __arm925_proc_info, . - __arm925_proc_info