1 /***************************************************************************
2 * Copyright (C) 2009 by David Brownell *
4 * Copyright (C) ST-Ericsson SA 2011 michel.jaouen@stericsson.com *
6 * This program is free software; you can redistribute it and/or modify *
7 * it under the terms of the GNU General Public License as published by *
8 * the Free Software Foundation; either version 2 of the License, or *
9 * (at your option) any later version. *
11 * This program is distributed in the hope that it will be useful, *
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
14 * GNU General Public License for more details. *
16 * You should have received a copy of the GNU General Public License *
17 * along with this program. If not, see <http://www.gnu.org/licenses/>. *
18 ***************************************************************************/
24 #include <helper/replacements.h>
27 #include "arm_disassembler.h"
30 #include <helper/binarybuffer.h>
31 #include <helper/command.h>
37 #include "arm_opcodes.h"
39 #include "target_type.h"
41 static void armv7a_show_fault_registers(struct target
*target
)
43 uint32_t dfsr
, ifsr
, dfar
, ifar
;
44 struct armv7a_common
*armv7a
= target_to_armv7a(target
);
45 struct arm_dpm
*dpm
= armv7a
->arm
.dpm
;
48 retval
= dpm
->prepare(dpm
);
49 if (retval
!= ERROR_OK
)
52 /* ARMV4_5_MRC(cpnum, op1, r0, CRn, CRm, op2) */
54 /* c5/c0 - {data, instruction} fault status registers */
55 retval
= dpm
->instr_read_data_r0(dpm
,
56 ARMV4_5_MRC(15, 0, 0, 5, 0, 0),
58 if (retval
!= ERROR_OK
)
61 retval
= dpm
->instr_read_data_r0(dpm
,
62 ARMV4_5_MRC(15, 0, 0, 5, 0, 1),
64 if (retval
!= ERROR_OK
)
67 /* c6/c0 - {data, instruction} fault address registers */
68 retval
= dpm
->instr_read_data_r0(dpm
,
69 ARMV4_5_MRC(15, 0, 0, 6, 0, 0),
71 if (retval
!= ERROR_OK
)
74 retval
= dpm
->instr_read_data_r0(dpm
,
75 ARMV4_5_MRC(15, 0, 0, 6, 0, 2),
77 if (retval
!= ERROR_OK
)
80 LOG_USER("Data fault registers DFSR: %8.8" PRIx32
81 ", DFAR: %8.8" PRIx32
, dfsr
, dfar
);
82 LOG_USER("Instruction fault registers IFSR: %8.8" PRIx32
83 ", IFAR: %8.8" PRIx32
, ifsr
, ifar
);
86 /* (void) */ dpm
->finish(dpm
);
90 /* retrieve main id register */
91 static int armv7a_read_midr(struct target
*target
)
93 int retval
= ERROR_FAIL
;
94 struct armv7a_common
*armv7a
= target_to_armv7a(target
);
95 struct arm_dpm
*dpm
= armv7a
->arm
.dpm
;
97 retval
= dpm
->prepare(dpm
);
98 if (retval
!= ERROR_OK
)
100 /* MRC p15,0,<Rd>,c0,c0,0; read main id register*/
102 retval
= dpm
->instr_read_data_r0(dpm
,
103 ARMV4_5_MRC(15, 0, 0, 0, 0, 0),
105 if (retval
!= ERROR_OK
)
108 armv7a
->rev
= (midr
& 0xf);
109 armv7a
->partnum
= (midr
>> 4) & 0xfff;
110 armv7a
->arch
= (midr
>> 16) & 0xf;
111 armv7a
->variant
= (midr
>> 20) & 0xf;
112 armv7a
->implementor
= (midr
>> 24) & 0xff;
113 LOG_INFO("%s rev %" PRIx32
", partnum %" PRIx32
", arch %" PRIx32
114 ", variant %" PRIx32
", implementor %" PRIx32
,
120 armv7a
->implementor
);
127 static int armv7a_read_ttbcr(struct target
*target
)
129 struct armv7a_common
*armv7a
= target_to_armv7a(target
);
130 struct arm_dpm
*dpm
= armv7a
->arm
.dpm
;
131 uint32_t ttbcr
, ttbcr_n
;
132 int retval
= dpm
->prepare(dpm
);
133 if (retval
!= ERROR_OK
)
135 /* MRC p15,0,<Rt>,c2,c0,2 ; Read CP15 Translation Table Base Control Register*/
136 retval
= dpm
->instr_read_data_r0(dpm
,
137 ARMV4_5_MRC(15, 0, 0, 2, 0, 2),
139 if (retval
!= ERROR_OK
)
142 LOG_DEBUG("ttbcr %" PRIx32
, ttbcr
);
144 ttbcr_n
= ttbcr
& 0x7;
145 armv7a
->armv7a_mmu
.ttbcr
= ttbcr
;
146 armv7a
->armv7a_mmu
.cached
= 1;
149 * ARM Architecture Reference Manual (ARMv7-A and ARMv7-Redition),
150 * document # ARM DDI 0406C
152 armv7a
->armv7a_mmu
.ttbr_range
[0] = 0xffffffff >> ttbcr_n
;
153 armv7a
->armv7a_mmu
.ttbr_range
[1] = 0xffffffff;
154 armv7a
->armv7a_mmu
.ttbr_mask
[0] = 0xffffffff << (14 - ttbcr_n
);
155 armv7a
->armv7a_mmu
.ttbr_mask
[1] = 0xffffffff << 14;
156 armv7a
->armv7a_mmu
.cached
= 1;
158 retval
= armv7a_read_midr(target
);
159 if (retval
!= ERROR_OK
)
162 /* FIXME: why this special case based on part number? */
163 if ((armv7a
->partnum
& 0xf) == 0) {
164 /* ARM DDI 0344H , ARM DDI 0407F */
165 armv7a
->armv7a_mmu
.ttbr_mask
[0] = 7 << (32 - ttbcr_n
);
168 LOG_DEBUG("ttbr1 %s, ttbr0_mask %" PRIx32
" ttbr1_mask %" PRIx32
,
169 (ttbcr_n
!= 0) ? "used" : "not used",
170 armv7a
->armv7a_mmu
.ttbr_mask
[0],
171 armv7a
->armv7a_mmu
.ttbr_mask
[1]);
178 /* method adapted to Cortex-A : reused ARM v4 v5 method */
179 int armv7a_mmu_translate_va(struct target
*target
, uint32_t va
, uint32_t *val
)
181 uint32_t first_lvl_descriptor
= 0x0;
182 uint32_t second_lvl_descriptor
= 0x0;
184 struct armv7a_common
*armv7a
= target_to_armv7a(target
);
185 struct arm_dpm
*dpm
= armv7a
->arm
.dpm
;
186 uint32_t ttbidx
= 0; /* default to ttbr0 */
192 retval
= dpm
->prepare(dpm
);
193 if (retval
!= ERROR_OK
)
196 /* MRC p15,0,<Rt>,c2,c0,2 ; Read CP15 Translation Table Base Control Register*/
197 retval
= dpm
->instr_read_data_r0(dpm
,
198 ARMV4_5_MRC(15, 0, 0, 2, 0, 2),
200 if (retval
!= ERROR_OK
)
203 /* if ttbcr has changed or was not read before, re-read the information */
204 if ((armv7a
->armv7a_mmu
.cached
== 0) ||
205 (armv7a
->armv7a_mmu
.ttbcr
!= ttbcr
)) {
206 armv7a_read_ttbcr(target
);
209 /* if va is above the range handled by ttbr0, select ttbr1 */
210 if (va
> armv7a
->armv7a_mmu
.ttbr_range
[0]) {
214 /* MRC p15,0,<Rt>,c2,c0,ttbidx */
215 retval
= dpm
->instr_read_data_r0(dpm
,
216 ARMV4_5_MRC(15, 0, 0, 2, 0, ttbidx
),
218 if (retval
!= ERROR_OK
)
221 ttb_mask
= armv7a
->armv7a_mmu
.ttbr_mask
[ttbidx
];
222 va_mask
= 0xfff00000 & armv7a
->armv7a_mmu
.ttbr_range
[ttbidx
];
224 LOG_DEBUG("ttb_mask %" PRIx32
" va_mask %" PRIx32
" ttbidx %i",
225 ttb_mask
, va_mask
, ttbidx
);
226 retval
= armv7a
->armv7a_mmu
.read_physical_memory(target
,
227 (ttb
& ttb_mask
) | ((va
& va_mask
) >> 18),
228 4, 1, (uint8_t *)&first_lvl_descriptor
);
229 if (retval
!= ERROR_OK
)
231 first_lvl_descriptor
= target_buffer_get_u32(target
, (uint8_t *)
232 &first_lvl_descriptor
);
233 /* reuse armv4_5 piece of code, specific armv7a changes may come later */
234 LOG_DEBUG("1st lvl desc: %8.8" PRIx32
"", first_lvl_descriptor
);
236 if ((first_lvl_descriptor
& 0x3) == 0) {
237 LOG_ERROR("Address translation failure");
238 return ERROR_TARGET_TRANSLATION_FAULT
;
242 if ((first_lvl_descriptor
& 0x40002) == 2) {
243 /* section descriptor */
244 *val
= (first_lvl_descriptor
& 0xfff00000) | (va
& 0x000fffff);
246 } else if ((first_lvl_descriptor
& 0x40002) == 0x40002) {
247 /* supersection descriptor */
248 if (first_lvl_descriptor
& 0x00f001e0) {
249 LOG_ERROR("Physical address does not fit into 32 bits");
250 return ERROR_TARGET_TRANSLATION_FAULT
;
252 *val
= (first_lvl_descriptor
& 0xff000000) | (va
& 0x00ffffff);
257 retval
= armv7a
->armv7a_mmu
.read_physical_memory(target
,
258 (first_lvl_descriptor
& 0xfffffc00) | ((va
& 0x000ff000) >> 10),
259 4, 1, (uint8_t *)&second_lvl_descriptor
);
260 if (retval
!= ERROR_OK
)
263 second_lvl_descriptor
= target_buffer_get_u32(target
, (uint8_t *)
264 &second_lvl_descriptor
);
266 LOG_DEBUG("2nd lvl desc: %8.8" PRIx32
"", second_lvl_descriptor
);
268 if ((second_lvl_descriptor
& 0x3) == 0) {
269 LOG_ERROR("Address translation failure");
270 return ERROR_TARGET_TRANSLATION_FAULT
;
273 if ((second_lvl_descriptor
& 0x3) == 1) {
274 /* large page descriptor */
275 *val
= (second_lvl_descriptor
& 0xffff0000) | (va
& 0x0000ffff);
277 /* small page descriptor */
278 *val
= (second_lvl_descriptor
& 0xfffff000) | (va
& 0x00000fff);
287 /* V7 method VA TO PA */
288 int armv7a_mmu_translate_va_pa(struct target
*target
, uint32_t va
,
289 uint32_t *val
, int meminfo
)
291 int retval
= ERROR_FAIL
;
292 struct armv7a_common
*armv7a
= target_to_armv7a(target
);
293 struct arm_dpm
*dpm
= armv7a
->arm
.dpm
;
294 uint32_t virt
= va
& ~0xfff;
295 uint32_t NOS
, NS
, INNER
, OUTER
;
297 retval
= dpm
->prepare(dpm
);
298 if (retval
!= ERROR_OK
)
300 /* mmu must be enable in order to get a correct translation
301 * use VA to PA CP15 register for conversion */
302 retval
= dpm
->instr_write_data_r0(dpm
,
303 ARMV4_5_MCR(15, 0, 0, 7, 8, 0),
305 if (retval
!= ERROR_OK
)
307 retval
= dpm
->instr_read_data_r0(dpm
,
308 ARMV4_5_MRC(15, 0, 0, 7, 4, 0),
310 /* decode memory attribute */
311 NOS
= (*val
>> 10) & 1; /* Not Outer shareable */
312 NS
= (*val
>> 9) & 1; /* Non secure */
313 INNER
= (*val
>> 4) & 0x7;
314 OUTER
= (*val
>> 2) & 0x3;
316 if (retval
!= ERROR_OK
)
318 *val
= (*val
& ~0xfff) + (va
& 0xfff);
320 LOG_WARNING("virt = phys : MMU disable !!");
322 LOG_INFO("%" PRIx32
" : %" PRIx32
" %s outer shareable %s secured",
324 NOS
== 1 ? "not" : " ",
325 NS
== 1 ? "not" : "");
328 LOG_INFO("outer: Non-Cacheable");
331 LOG_INFO("outer: Write-Back, Write-Allocate");
334 LOG_INFO("outer: Write-Through, No Write-Allocate");
337 LOG_INFO("outer: Write-Back, no Write-Allocate");
342 LOG_INFO("inner: Non-Cacheable");
345 LOG_INFO("inner: Strongly-ordered");
348 LOG_INFO("inner: Device");
351 LOG_INFO("inner: Write-Back, Write-Allocate");
354 LOG_INFO("inner: Write-Through");
357 LOG_INFO("inner: Write-Back, no Write-Allocate");
360 LOG_INFO("inner: %" PRIx32
" ???", INNER
);
370 /* FIXME: remove it */
371 static int armv7a_l2x_cache_init(struct target
*target
, uint32_t base
, uint32_t way
)
373 struct armv7a_l2x_cache
*l2x_cache
;
374 struct target_list
*head
= target
->head
;
377 struct armv7a_common
*armv7a
= target_to_armv7a(target
);
378 l2x_cache
= calloc(1, sizeof(struct armv7a_l2x_cache
));
379 l2x_cache
->base
= base
;
380 l2x_cache
->way
= way
;
381 /*LOG_INFO("cache l2 initialized base %x way %d",
382 l2x_cache->base,l2x_cache->way);*/
383 if (armv7a
->armv7a_mmu
.armv7a_cache
.outer_cache
)
384 LOG_INFO("outer cache already initialized\n");
385 armv7a
->armv7a_mmu
.armv7a_cache
.outer_cache
= l2x_cache
;
386 /* initialize all target in this cluster (smp target)
387 * l2 cache must be configured after smp declaration */
388 while (head
!= (struct target_list
*)NULL
) {
390 if (curr
!= target
) {
391 armv7a
= target_to_armv7a(curr
);
392 if (armv7a
->armv7a_mmu
.armv7a_cache
.outer_cache
)
393 LOG_ERROR("smp target : outer cache already initialized\n");
394 armv7a
->armv7a_mmu
.armv7a_cache
.outer_cache
= l2x_cache
;
401 /* FIXME: remove it */
402 COMMAND_HANDLER(handle_cache_l2x
)
404 struct target
*target
= get_current_target(CMD_CTX
);
408 return ERROR_COMMAND_SYNTAX_ERROR
;
410 /* command_print(CMD_CTX, "%s %s", CMD_ARGV[0], CMD_ARGV[1]); */
411 COMMAND_PARSE_NUMBER(u32
, CMD_ARGV
[0], base
);
412 COMMAND_PARSE_NUMBER(u32
, CMD_ARGV
[1], way
);
414 /* AP address is in bits 31:24 of DP_SELECT */
415 armv7a_l2x_cache_init(target
, base
, way
);
420 int armv7a_handle_cache_info_command(struct command_context
*cmd_ctx
,
421 struct armv7a_cache_common
*armv7a_cache
)
423 struct armv7a_l2x_cache
*l2x_cache
= (struct armv7a_l2x_cache
*)
424 (armv7a_cache
->outer_cache
);
428 if (armv7a_cache
->info
== -1) {
429 command_print(cmd_ctx
, "cache not yet identified");
433 for (cl
= 0; cl
< armv7a_cache
->loc
; cl
++) {
434 struct armv7a_arch_cache
*arch
= &(armv7a_cache
->arch
[cl
]);
436 if (arch
->ctype
& 1) {
437 command_print(cmd_ctx
,
438 "L%d I-Cache: linelen %" PRIi32
439 ", associativity %" PRIi32
441 ", cachesize %" PRId32
" KBytes",
443 arch
->i_size
.linelen
,
444 arch
->i_size
.associativity
,
446 arch
->i_size
.cachesize
);
449 if (arch
->ctype
>= 2) {
450 command_print(cmd_ctx
,
451 "L%d D-Cache: linelen %" PRIi32
452 ", associativity %" PRIi32
454 ", cachesize %" PRId32
" KBytes",
456 arch
->d_u_size
.linelen
,
457 arch
->d_u_size
.associativity
,
458 arch
->d_u_size
.nsets
,
459 arch
->d_u_size
.cachesize
);
463 if (l2x_cache
!= NULL
)
464 command_print(cmd_ctx
, "Outer unified cache Base Address 0x%" PRIx32
", %" PRId32
" ways",
465 l2x_cache
->base
, l2x_cache
->way
);
470 /* retrieve core id cluster id */
471 static int armv7a_read_mpidr(struct target
*target
)
473 int retval
= ERROR_FAIL
;
474 struct armv7a_common
*armv7a
= target_to_armv7a(target
);
475 struct arm_dpm
*dpm
= armv7a
->arm
.dpm
;
477 retval
= dpm
->prepare(dpm
);
478 if (retval
!= ERROR_OK
)
480 /* MRC p15,0,<Rd>,c0,c0,5; read Multiprocessor ID register*/
482 retval
= dpm
->instr_read_data_r0(dpm
,
483 ARMV4_5_MRC(15, 0, 0, 0, 0, 5),
485 if (retval
!= ERROR_OK
)
488 /* ARMv7R uses a different format for MPIDR.
489 * When configured uniprocessor (most R cores) it reads as 0.
490 * This will need to be implemented for multiprocessor ARMv7R cores. */
491 if (armv7a
->is_armv7r
) {
493 LOG_ERROR("MPIDR nonzero in ARMv7-R target");
498 armv7a
->multi_processor_system
= (mpidr
>> 30) & 1;
499 armv7a
->cluster_id
= (mpidr
>> 8) & 0xf;
500 armv7a
->cpu_id
= mpidr
& 0x3;
501 LOG_INFO("%s cluster %x core %x %s", target_name(target
),
504 armv7a
->multi_processor_system
== 0 ? "multi core" : "mono core");
507 LOG_ERROR("MPIDR not in multiprocessor format");
516 static int get_cache_info(struct arm_dpm
*dpm
, int cl
, int ct
, uint32_t *cache_reg
)
518 int retval
= ERROR_OK
;
520 /* select cache level */
521 retval
= dpm
->instr_write_data_r0(dpm
,
522 ARMV4_5_MCR(15, 2, 0, 0, 0, 0),
523 (cl
<< 1) | (ct
== 1 ? 1 : 0));
524 if (retval
!= ERROR_OK
)
527 retval
= dpm
->instr_read_data_r0(dpm
,
528 ARMV4_5_MRC(15, 1, 0, 0, 0, 0),
534 static struct armv7a_cachesize
decode_cache_reg(uint32_t cache_reg
)
536 struct armv7a_cachesize size
;
539 size
.linelen
= 16 << (cache_reg
& 0x7);
540 size
.associativity
= ((cache_reg
>> 3) & 0x3ff) + 1;
541 size
.nsets
= ((cache_reg
>> 13) & 0x7fff) + 1;
542 size
.cachesize
= size
.linelen
* size
.associativity
* size
.nsets
/ 1024;
544 /* compute info for set way operation on cache */
545 size
.index_shift
= (cache_reg
& 0x7) + 4;
546 size
.index
= (cache_reg
>> 13) & 0x7fff;
547 size
.way
= ((cache_reg
>> 3) & 0x3ff);
549 while (((size
.way
<< i
) & 0x80000000) == 0)
556 int armv7a_identify_cache(struct target
*target
)
558 /* read cache descriptor */
559 int retval
= ERROR_FAIL
;
560 struct armv7a_common
*armv7a
= target_to_armv7a(target
);
561 struct arm_dpm
*dpm
= armv7a
->arm
.dpm
;
562 uint32_t csselr
, clidr
, ctr
;
565 struct armv7a_cache_common
*cache
=
566 &(armv7a
->armv7a_mmu
.armv7a_cache
);
568 if (!armv7a
->is_armv7r
)
569 armv7a_read_ttbcr(target
);
571 retval
= dpm
->prepare(dpm
);
572 if (retval
!= ERROR_OK
)
576 * mrc p15, 0, r0, c0, c0, 1 @ read ctr */
577 retval
= dpm
->instr_read_data_r0(dpm
,
578 ARMV4_5_MRC(15, 0, 0, 0, 0, 1),
580 if (retval
!= ERROR_OK
)
583 cache
->iminline
= 4UL << (ctr
& 0xf);
584 cache
->dminline
= 4UL << ((ctr
& 0xf0000) >> 16);
585 LOG_DEBUG("ctr %" PRIx32
" ctr.iminline %" PRId32
" ctr.dminline %" PRId32
,
586 ctr
, cache
->iminline
, cache
->dminline
);
589 * mrc p15, 1, r0, c0, c0, 1 @ read clidr */
590 retval
= dpm
->instr_read_data_r0(dpm
,
591 ARMV4_5_MRC(15, 1, 0, 0, 0, 1),
593 if (retval
!= ERROR_OK
)
596 cache
->loc
= (clidr
& 0x7000000) >> 24;
597 LOG_DEBUG("Number of cache levels to PoC %" PRId32
, cache
->loc
);
599 /* retrieve selected cache for later restore
600 * MRC p15, 2,<Rd>, c0, c0, 0; Read CSSELR */
601 retval
= dpm
->instr_read_data_r0(dpm
,
602 ARMV4_5_MRC(15, 2, 0, 0, 0, 0),
604 if (retval
!= ERROR_OK
)
607 /* retrieve all available inner caches */
608 for (cl
= 0; cl
< cache
->loc
; clidr
>>= 3, cl
++) {
610 /* isolate cache type at current level */
613 /* skip reserved values */
614 if (ctype
> CACHE_LEVEL_HAS_UNIFIED_CACHE
)
617 /* separate d or unified d/i cache at this level ? */
618 if (ctype
& (CACHE_LEVEL_HAS_UNIFIED_CACHE
| CACHE_LEVEL_HAS_D_CACHE
)) {
619 /* retrieve d-cache info */
620 retval
= get_cache_info(dpm
, cl
, 0, &cache_reg
);
621 if (retval
!= ERROR_OK
)
623 cache
->arch
[cl
].d_u_size
= decode_cache_reg(cache_reg
);
625 LOG_DEBUG("data/unified cache index %d << %d, way %d << %d",
626 cache
->arch
[cl
].d_u_size
.index
,
627 cache
->arch
[cl
].d_u_size
.index_shift
,
628 cache
->arch
[cl
].d_u_size
.way
,
629 cache
->arch
[cl
].d_u_size
.way_shift
);
631 LOG_DEBUG("cacheline %d bytes %d KBytes asso %d ways",
632 cache
->arch
[cl
].d_u_size
.linelen
,
633 cache
->arch
[cl
].d_u_size
.cachesize
,
634 cache
->arch
[cl
].d_u_size
.associativity
);
637 /* separate i-cache at this level ? */
638 if (ctype
& CACHE_LEVEL_HAS_I_CACHE
) {
639 /* retrieve i-cache info */
640 retval
= get_cache_info(dpm
, cl
, 1, &cache_reg
);
641 if (retval
!= ERROR_OK
)
643 cache
->arch
[cl
].i_size
= decode_cache_reg(cache_reg
);
645 LOG_DEBUG("instruction cache index %d << %d, way %d << %d",
646 cache
->arch
[cl
].i_size
.index
,
647 cache
->arch
[cl
].i_size
.index_shift
,
648 cache
->arch
[cl
].i_size
.way
,
649 cache
->arch
[cl
].i_size
.way_shift
);
651 LOG_DEBUG("cacheline %d bytes %d KBytes asso %d ways",
652 cache
->arch
[cl
].i_size
.linelen
,
653 cache
->arch
[cl
].i_size
.cachesize
,
654 cache
->arch
[cl
].i_size
.associativity
);
657 cache
->arch
[cl
].ctype
= ctype
;
660 /* restore selected cache */
661 dpm
->instr_write_data_r0(dpm
,
662 ARMV4_5_MRC(15, 2, 0, 0, 0, 0),
665 if (retval
!= ERROR_OK
)
668 /* if no l2 cache initialize l1 data cache flush function function */
669 if (armv7a
->armv7a_mmu
.armv7a_cache
.flush_all_data_cache
== NULL
) {
670 armv7a
->armv7a_mmu
.armv7a_cache
.flush_all_data_cache
=
671 armv7a_cache_auto_flush_all_data
;
674 armv7a
->armv7a_mmu
.armv7a_cache
.info
= 1;
677 armv7a_read_mpidr(target
);
682 static int armv7a_setup_semihosting(struct target
*target
, int enable
)
684 struct armv7a_common
*armv7a
= target_to_armv7a(target
);
688 ret
= mem_ap_read_atomic_u32(armv7a
->debug_ap
,
689 armv7a
->debug_base
+ CPUDBG_VCR
,
692 LOG_ERROR("Failed to read VCR register\n");
697 vcr
|= DBG_VCR_SVC_MASK
;
699 vcr
&= ~DBG_VCR_SVC_MASK
;
701 ret
= mem_ap_write_atomic_u32(armv7a
->debug_ap
,
702 armv7a
->debug_base
+ CPUDBG_VCR
,
705 LOG_ERROR("Failed to write VCR register\n");
710 int armv7a_init_arch_info(struct target
*target
, struct armv7a_common
*armv7a
)
712 struct arm
*arm
= &armv7a
->arm
;
713 arm
->arch_info
= armv7a
;
714 target
->arch_info
= &armv7a
->arm
;
715 arm
->setup_semihosting
= armv7a_setup_semihosting
;
716 /* target is useful in all function arm v4 5 compatible */
717 armv7a
->arm
.target
= target
;
718 armv7a
->arm
.common_magic
= ARM_COMMON_MAGIC
;
719 armv7a
->common_magic
= ARMV7_COMMON_MAGIC
;
720 armv7a
->armv7a_mmu
.armv7a_cache
.info
= -1;
721 armv7a
->armv7a_mmu
.armv7a_cache
.outer_cache
= NULL
;
722 armv7a
->armv7a_mmu
.armv7a_cache
.flush_all_data_cache
= NULL
;
723 armv7a
->armv7a_mmu
.armv7a_cache
.auto_cache_enabled
= 1;
727 int armv7a_arch_state(struct target
*target
)
729 static const char *state
[] = {
730 "disabled", "enabled"
733 struct armv7a_common
*armv7a
= target_to_armv7a(target
);
734 struct arm
*arm
= &armv7a
->arm
;
736 if (armv7a
->common_magic
!= ARMV7_COMMON_MAGIC
) {
737 LOG_ERROR("BUG: called for a non-ARMv7A target");
738 return ERROR_COMMAND_SYNTAX_ERROR
;
741 arm_arch_state(target
);
743 if (armv7a
->is_armv7r
) {
744 LOG_USER("D-Cache: %s, I-Cache: %s",
745 state
[armv7a
->armv7a_mmu
.armv7a_cache
.d_u_cache_enabled
],
746 state
[armv7a
->armv7a_mmu
.armv7a_cache
.i_cache_enabled
]);
748 LOG_USER("MMU: %s, D-Cache: %s, I-Cache: %s",
749 state
[armv7a
->armv7a_mmu
.mmu_enabled
],
750 state
[armv7a
->armv7a_mmu
.armv7a_cache
.d_u_cache_enabled
],
751 state
[armv7a
->armv7a_mmu
.armv7a_cache
.i_cache_enabled
]);
754 if (arm
->core_mode
== ARM_MODE_ABT
)
755 armv7a_show_fault_registers(target
);
756 if (target
->debug_reason
== DBG_REASON_WATCHPOINT
)
757 LOG_USER("Watchpoint triggered at PC %#08x",
758 (unsigned) armv7a
->dpm
.wp_pc
);
763 static const struct command_registration l2_cache_commands
[] = {
766 .handler
= handle_cache_l2x
,
767 .mode
= COMMAND_EXEC
,
768 .help
= "configure l2x cache "
770 .usage
= "[base_addr] [number_of_way]",
772 COMMAND_REGISTRATION_DONE
776 const struct command_registration l2x_cache_command_handlers
[] = {
778 .name
= "cache_config",
779 .mode
= COMMAND_EXEC
,
780 .help
= "cache configuration for a target",
782 .chain
= l2_cache_commands
,
784 COMMAND_REGISTRATION_DONE
787 const struct command_registration armv7a_command_handlers
[] = {
789 .chain
= dap_command_handlers
,
792 .chain
= l2x_cache_command_handlers
,
795 .chain
= arm7a_cache_command_handlers
,
797 COMMAND_REGISTRATION_DONE