2 * QEMU model of the ZynqMP generic DMA
4 * Copyright (c) 2014 Xilinx Inc.
5 * Copyright (c) 2018 FEIMTECH AB
7 * Written by Edgar E. Iglesias <edgar.iglesias@xilinx.com>,
8 * Francisco Iglesias <francisco.iglesias@feimtech.se>
10 * Permission is hereby granted, free of charge, to any person obtaining a copy
11 * of this software and associated documentation files (the "Software"), to deal
12 * in the Software without restriction, including without limitation the rights
13 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14 * copies of the Software, and to permit persons to whom the Software is
15 * furnished to do so, subject to the following conditions:
17 * The above copyright notice and this permission notice shall be included in
18 * all copies or substantial portions of the Software.
20 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
21 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
23 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
24 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
25 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
29 #include "qemu/osdep.h"
30 #include "hw/dma/xlnx-zdma.h"
32 #include "hw/qdev-properties.h"
33 #include "migration/vmstate.h"
34 #include "qemu/bitops.h"
36 #include "qemu/module.h"
37 #include "qapi/error.h"
39 #ifndef XLNX_ZDMA_ERR_DEBUG
40 #define XLNX_ZDMA_ERR_DEBUG 0
43 REG32(ZDMA_ERR_CTRL
, 0x0)
44 FIELD(ZDMA_ERR_CTRL
, APB_ERR_RES
, 0, 1)
45 REG32(ZDMA_CH_ISR
, 0x100)
46 FIELD(ZDMA_CH_ISR
, DMA_PAUSE
, 11, 1)
47 FIELD(ZDMA_CH_ISR
, DMA_DONE
, 10, 1)
48 FIELD(ZDMA_CH_ISR
, AXI_WR_DATA
, 9, 1)
49 FIELD(ZDMA_CH_ISR
, AXI_RD_DATA
, 8, 1)
50 FIELD(ZDMA_CH_ISR
, AXI_RD_DST_DSCR
, 7, 1)
51 FIELD(ZDMA_CH_ISR
, AXI_RD_SRC_DSCR
, 6, 1)
52 FIELD(ZDMA_CH_ISR
, IRQ_DST_ACCT_ERR
, 5, 1)
53 FIELD(ZDMA_CH_ISR
, IRQ_SRC_ACCT_ERR
, 4, 1)
54 FIELD(ZDMA_CH_ISR
, BYTE_CNT_OVRFL
, 3, 1)
55 FIELD(ZDMA_CH_ISR
, DST_DSCR_DONE
, 2, 1)
56 FIELD(ZDMA_CH_ISR
, SRC_DSCR_DONE
, 1, 1)
57 FIELD(ZDMA_CH_ISR
, INV_APB
, 0, 1)
58 REG32(ZDMA_CH_IMR
, 0x104)
59 FIELD(ZDMA_CH_IMR
, DMA_PAUSE
, 11, 1)
60 FIELD(ZDMA_CH_IMR
, DMA_DONE
, 10, 1)
61 FIELD(ZDMA_CH_IMR
, AXI_WR_DATA
, 9, 1)
62 FIELD(ZDMA_CH_IMR
, AXI_RD_DATA
, 8, 1)
63 FIELD(ZDMA_CH_IMR
, AXI_RD_DST_DSCR
, 7, 1)
64 FIELD(ZDMA_CH_IMR
, AXI_RD_SRC_DSCR
, 6, 1)
65 FIELD(ZDMA_CH_IMR
, IRQ_DST_ACCT_ERR
, 5, 1)
66 FIELD(ZDMA_CH_IMR
, IRQ_SRC_ACCT_ERR
, 4, 1)
67 FIELD(ZDMA_CH_IMR
, BYTE_CNT_OVRFL
, 3, 1)
68 FIELD(ZDMA_CH_IMR
, DST_DSCR_DONE
, 2, 1)
69 FIELD(ZDMA_CH_IMR
, SRC_DSCR_DONE
, 1, 1)
70 FIELD(ZDMA_CH_IMR
, INV_APB
, 0, 1)
71 REG32(ZDMA_CH_IEN
, 0x108)
72 FIELD(ZDMA_CH_IEN
, DMA_PAUSE
, 11, 1)
73 FIELD(ZDMA_CH_IEN
, DMA_DONE
, 10, 1)
74 FIELD(ZDMA_CH_IEN
, AXI_WR_DATA
, 9, 1)
75 FIELD(ZDMA_CH_IEN
, AXI_RD_DATA
, 8, 1)
76 FIELD(ZDMA_CH_IEN
, AXI_RD_DST_DSCR
, 7, 1)
77 FIELD(ZDMA_CH_IEN
, AXI_RD_SRC_DSCR
, 6, 1)
78 FIELD(ZDMA_CH_IEN
, IRQ_DST_ACCT_ERR
, 5, 1)
79 FIELD(ZDMA_CH_IEN
, IRQ_SRC_ACCT_ERR
, 4, 1)
80 FIELD(ZDMA_CH_IEN
, BYTE_CNT_OVRFL
, 3, 1)
81 FIELD(ZDMA_CH_IEN
, DST_DSCR_DONE
, 2, 1)
82 FIELD(ZDMA_CH_IEN
, SRC_DSCR_DONE
, 1, 1)
83 FIELD(ZDMA_CH_IEN
, INV_APB
, 0, 1)
84 REG32(ZDMA_CH_IDS
, 0x10c)
85 FIELD(ZDMA_CH_IDS
, DMA_PAUSE
, 11, 1)
86 FIELD(ZDMA_CH_IDS
, DMA_DONE
, 10, 1)
87 FIELD(ZDMA_CH_IDS
, AXI_WR_DATA
, 9, 1)
88 FIELD(ZDMA_CH_IDS
, AXI_RD_DATA
, 8, 1)
89 FIELD(ZDMA_CH_IDS
, AXI_RD_DST_DSCR
, 7, 1)
90 FIELD(ZDMA_CH_IDS
, AXI_RD_SRC_DSCR
, 6, 1)
91 FIELD(ZDMA_CH_IDS
, IRQ_DST_ACCT_ERR
, 5, 1)
92 FIELD(ZDMA_CH_IDS
, IRQ_SRC_ACCT_ERR
, 4, 1)
93 FIELD(ZDMA_CH_IDS
, BYTE_CNT_OVRFL
, 3, 1)
94 FIELD(ZDMA_CH_IDS
, DST_DSCR_DONE
, 2, 1)
95 FIELD(ZDMA_CH_IDS
, SRC_DSCR_DONE
, 1, 1)
96 FIELD(ZDMA_CH_IDS
, INV_APB
, 0, 1)
97 REG32(ZDMA_CH_CTRL0
, 0x110)
98 FIELD(ZDMA_CH_CTRL0
, OVR_FETCH
, 7, 1)
99 FIELD(ZDMA_CH_CTRL0
, POINT_TYPE
, 6, 1)
100 FIELD(ZDMA_CH_CTRL0
, MODE
, 4, 2)
101 FIELD(ZDMA_CH_CTRL0
, RATE_CTRL
, 3, 1)
102 FIELD(ZDMA_CH_CTRL0
, CONT_ADDR
, 2, 1)
103 FIELD(ZDMA_CH_CTRL0
, CONT
, 1, 1)
104 REG32(ZDMA_CH_CTRL1
, 0x114)
105 FIELD(ZDMA_CH_CTRL1
, DST_ISSUE
, 5, 5)
106 FIELD(ZDMA_CH_CTRL1
, SRC_ISSUE
, 0, 5)
107 REG32(ZDMA_CH_FCI
, 0x118)
108 FIELD(ZDMA_CH_FCI
, PROG_CELL_CNT
, 2, 2)
109 FIELD(ZDMA_CH_FCI
, SIDE
, 1, 1)
110 FIELD(ZDMA_CH_FCI
, EN
, 0, 1)
111 REG32(ZDMA_CH_STATUS
, 0x11c)
112 FIELD(ZDMA_CH_STATUS
, STATE
, 0, 2)
113 REG32(ZDMA_CH_DATA_ATTR
, 0x120)
114 FIELD(ZDMA_CH_DATA_ATTR
, ARBURST
, 26, 2)
115 FIELD(ZDMA_CH_DATA_ATTR
, ARCACHE
, 22, 4)
116 FIELD(ZDMA_CH_DATA_ATTR
, ARQOS
, 18, 4)
117 FIELD(ZDMA_CH_DATA_ATTR
, ARLEN
, 14, 4)
118 FIELD(ZDMA_CH_DATA_ATTR
, AWBURST
, 12, 2)
119 FIELD(ZDMA_CH_DATA_ATTR
, AWCACHE
, 8, 4)
120 FIELD(ZDMA_CH_DATA_ATTR
, AWQOS
, 4, 4)
121 FIELD(ZDMA_CH_DATA_ATTR
, AWLEN
, 0, 4)
122 REG32(ZDMA_CH_DSCR_ATTR
, 0x124)
123 FIELD(ZDMA_CH_DSCR_ATTR
, AXCOHRNT
, 8, 1)
124 FIELD(ZDMA_CH_DSCR_ATTR
, AXCACHE
, 4, 4)
125 FIELD(ZDMA_CH_DSCR_ATTR
, AXQOS
, 0, 4)
126 REG32(ZDMA_CH_SRC_DSCR_WORD0
, 0x128)
127 REG32(ZDMA_CH_SRC_DSCR_WORD1
, 0x12c)
128 FIELD(ZDMA_CH_SRC_DSCR_WORD1
, MSB
, 0, 17)
129 REG32(ZDMA_CH_SRC_DSCR_WORD2
, 0x130)
130 FIELD(ZDMA_CH_SRC_DSCR_WORD2
, SIZE
, 0, 30)
131 REG32(ZDMA_CH_SRC_DSCR_WORD3
, 0x134)
132 FIELD(ZDMA_CH_SRC_DSCR_WORD3
, CMD
, 3, 2)
133 FIELD(ZDMA_CH_SRC_DSCR_WORD3
, INTR
, 2, 1)
134 FIELD(ZDMA_CH_SRC_DSCR_WORD3
, TYPE
, 1, 1)
135 FIELD(ZDMA_CH_SRC_DSCR_WORD3
, COHRNT
, 0, 1)
136 REG32(ZDMA_CH_DST_DSCR_WORD0
, 0x138)
137 REG32(ZDMA_CH_DST_DSCR_WORD1
, 0x13c)
138 FIELD(ZDMA_CH_DST_DSCR_WORD1
, MSB
, 0, 17)
139 REG32(ZDMA_CH_DST_DSCR_WORD2
, 0x140)
140 FIELD(ZDMA_CH_DST_DSCR_WORD2
, SIZE
, 0, 30)
141 REG32(ZDMA_CH_DST_DSCR_WORD3
, 0x144)
142 FIELD(ZDMA_CH_DST_DSCR_WORD3
, INTR
, 2, 1)
143 FIELD(ZDMA_CH_DST_DSCR_WORD3
, TYPE
, 1, 1)
144 FIELD(ZDMA_CH_DST_DSCR_WORD3
, COHRNT
, 0, 1)
145 REG32(ZDMA_CH_WR_ONLY_WORD0
, 0x148)
146 REG32(ZDMA_CH_WR_ONLY_WORD1
, 0x14c)
147 REG32(ZDMA_CH_WR_ONLY_WORD2
, 0x150)
148 REG32(ZDMA_CH_WR_ONLY_WORD3
, 0x154)
149 REG32(ZDMA_CH_SRC_START_LSB
, 0x158)
150 REG32(ZDMA_CH_SRC_START_MSB
, 0x15c)
151 FIELD(ZDMA_CH_SRC_START_MSB
, ADDR
, 0, 17)
152 REG32(ZDMA_CH_DST_START_LSB
, 0x160)
153 REG32(ZDMA_CH_DST_START_MSB
, 0x164)
154 FIELD(ZDMA_CH_DST_START_MSB
, ADDR
, 0, 17)
155 REG32(ZDMA_CH_RATE_CTRL
, 0x18c)
156 FIELD(ZDMA_CH_RATE_CTRL
, CNT
, 0, 12)
157 REG32(ZDMA_CH_SRC_CUR_PYLD_LSB
, 0x168)
158 REG32(ZDMA_CH_SRC_CUR_PYLD_MSB
, 0x16c)
159 FIELD(ZDMA_CH_SRC_CUR_PYLD_MSB
, ADDR
, 0, 17)
160 REG32(ZDMA_CH_DST_CUR_PYLD_LSB
, 0x170)
161 REG32(ZDMA_CH_DST_CUR_PYLD_MSB
, 0x174)
162 FIELD(ZDMA_CH_DST_CUR_PYLD_MSB
, ADDR
, 0, 17)
163 REG32(ZDMA_CH_SRC_CUR_DSCR_LSB
, 0x178)
164 REG32(ZDMA_CH_SRC_CUR_DSCR_MSB
, 0x17c)
165 FIELD(ZDMA_CH_SRC_CUR_DSCR_MSB
, ADDR
, 0, 17)
166 REG32(ZDMA_CH_DST_CUR_DSCR_LSB
, 0x180)
167 REG32(ZDMA_CH_DST_CUR_DSCR_MSB
, 0x184)
168 FIELD(ZDMA_CH_DST_CUR_DSCR_MSB
, ADDR
, 0, 17)
169 REG32(ZDMA_CH_TOTAL_BYTE
, 0x188)
170 REG32(ZDMA_CH_RATE_CNTL
, 0x18c)
171 FIELD(ZDMA_CH_RATE_CNTL
, CNT
, 0, 12)
172 REG32(ZDMA_CH_IRQ_SRC_ACCT
, 0x190)
173 FIELD(ZDMA_CH_IRQ_SRC_ACCT
, CNT
, 0, 8)
174 REG32(ZDMA_CH_IRQ_DST_ACCT
, 0x194)
175 FIELD(ZDMA_CH_IRQ_DST_ACCT
, CNT
, 0, 8)
176 REG32(ZDMA_CH_DBG0
, 0x198)
177 FIELD(ZDMA_CH_DBG0
, CMN_BUF_FREE
, 0, 9)
178 REG32(ZDMA_CH_DBG1
, 0x19c)
179 FIELD(ZDMA_CH_DBG1
, CMN_BUF_OCC
, 0, 9)
180 REG32(ZDMA_CH_CTRL2
, 0x200)
181 FIELD(ZDMA_CH_CTRL2
, EN
, 0, 1)
209 static void zdma_ch_imr_update_irq(XlnxZDMA
*s
)
213 pending
= s
->regs
[R_ZDMA_CH_ISR
] & ~s
->regs
[R_ZDMA_CH_IMR
];
215 qemu_set_irq(s
->irq_zdma_ch_imr
, pending
);
218 static void zdma_ch_isr_postw(RegisterInfo
*reg
, uint64_t val64
)
220 XlnxZDMA
*s
= XLNX_ZDMA(reg
->opaque
);
221 zdma_ch_imr_update_irq(s
);
224 static uint64_t zdma_ch_ien_prew(RegisterInfo
*reg
, uint64_t val64
)
226 XlnxZDMA
*s
= XLNX_ZDMA(reg
->opaque
);
227 uint32_t val
= val64
;
229 s
->regs
[R_ZDMA_CH_IMR
] &= ~val
;
230 zdma_ch_imr_update_irq(s
);
234 static uint64_t zdma_ch_ids_prew(RegisterInfo
*reg
, uint64_t val64
)
236 XlnxZDMA
*s
= XLNX_ZDMA(reg
->opaque
);
237 uint32_t val
= val64
;
239 s
->regs
[R_ZDMA_CH_IMR
] |= val
;
240 zdma_ch_imr_update_irq(s
);
244 static void zdma_set_state(XlnxZDMA
*s
, XlnxZDMAState state
)
247 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_STATUS
, STATE
, state
);
249 /* Signal error if we have an error condition. */
251 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_STATUS
, STATE
, 3);
255 static void zdma_src_done(XlnxZDMA
*s
)
258 cnt
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_IRQ_SRC_ACCT
, CNT
);
260 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_IRQ_SRC_ACCT
, CNT
, cnt
);
261 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, SRC_DSCR_DONE
, true);
263 /* Did we overflow? */
264 if (cnt
!= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_IRQ_SRC_ACCT
, CNT
)) {
265 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, IRQ_SRC_ACCT_ERR
, true);
267 zdma_ch_imr_update_irq(s
);
270 static void zdma_dst_done(XlnxZDMA
*s
)
273 cnt
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_IRQ_DST_ACCT
, CNT
);
275 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_IRQ_DST_ACCT
, CNT
, cnt
);
276 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, DST_DSCR_DONE
, true);
278 /* Did we overflow? */
279 if (cnt
!= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_IRQ_DST_ACCT
, CNT
)) {
280 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, IRQ_DST_ACCT_ERR
, true);
282 zdma_ch_imr_update_irq(s
);
285 static uint64_t zdma_get_regaddr64(XlnxZDMA
*s
, unsigned int basereg
)
289 addr
= s
->regs
[basereg
+ 1];
291 addr
|= s
->regs
[basereg
];
296 static void zdma_put_regaddr64(XlnxZDMA
*s
, unsigned int basereg
, uint64_t addr
)
298 s
->regs
[basereg
] = addr
;
299 s
->regs
[basereg
+ 1] = addr
>> 32;
302 static bool zdma_load_descriptor(XlnxZDMA
*s
, uint64_t addr
, void *buf
)
304 /* ZDMA descriptors must be aligned to their own size. */
305 if (addr
% sizeof(XlnxZDMADescr
)) {
306 qemu_log_mask(LOG_GUEST_ERROR
,
307 "zdma: unaligned descriptor at %" PRIx64
,
309 memset(buf
, 0x0, sizeof(XlnxZDMADescr
));
314 address_space_rw(s
->dma_as
, addr
, s
->attr
,
315 buf
, sizeof(XlnxZDMADescr
), false);
319 static void zdma_load_src_descriptor(XlnxZDMA
*s
)
322 unsigned int ptype
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, POINT_TYPE
);
324 if (ptype
== PT_REG
) {
325 memcpy(&s
->dsc_src
, &s
->regs
[R_ZDMA_CH_SRC_DSCR_WORD0
],
330 src_addr
= zdma_get_regaddr64(s
, R_ZDMA_CH_SRC_CUR_DSCR_LSB
);
332 if (!zdma_load_descriptor(s
, src_addr
, &s
->dsc_src
)) {
333 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, AXI_RD_SRC_DSCR
, true);
337 static void zdma_load_dst_descriptor(XlnxZDMA
*s
)
340 unsigned int ptype
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, POINT_TYPE
);
342 if (ptype
== PT_REG
) {
343 memcpy(&s
->dsc_dst
, &s
->regs
[R_ZDMA_CH_DST_DSCR_WORD0
],
348 dst_addr
= zdma_get_regaddr64(s
, R_ZDMA_CH_DST_CUR_DSCR_LSB
);
350 if (!zdma_load_descriptor(s
, dst_addr
, &s
->dsc_dst
)) {
351 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, AXI_RD_DST_DSCR
, true);
355 static uint64_t zdma_update_descr_addr(XlnxZDMA
*s
, bool type
,
356 unsigned int basereg
)
360 if (type
== DTYPE_LINEAR
) {
361 next
= zdma_get_regaddr64(s
, basereg
);
362 next
+= sizeof(s
->dsc_dst
);
363 zdma_put_regaddr64(s
, basereg
, next
);
365 addr
= zdma_get_regaddr64(s
, basereg
);
366 addr
+= sizeof(s
->dsc_dst
);
367 address_space_rw(s
->dma_as
, addr
, s
->attr
, (void *) &next
, 8, false);
368 zdma_put_regaddr64(s
, basereg
, next
);
373 static void zdma_write_dst(XlnxZDMA
*s
, uint8_t *buf
, uint32_t len
)
375 uint32_t dst_size
, dlen
;
376 bool dst_intr
, dst_type
;
377 unsigned int ptype
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, POINT_TYPE
);
378 unsigned int rw_mode
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, MODE
);
379 unsigned int burst_type
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_DATA_ATTR
,
382 /* FIXED burst types are only supported in simple dma mode. */
383 if (ptype
!= PT_REG
) {
384 burst_type
= AXI_BURST_INCR
;
388 dst_size
= FIELD_EX32(s
->dsc_dst
.words
[2], ZDMA_CH_DST_DSCR_WORD2
,
390 dst_type
= FIELD_EX32(s
->dsc_dst
.words
[3], ZDMA_CH_DST_DSCR_WORD3
,
392 if (dst_size
== 0 && ptype
== PT_MEM
) {
394 next
= zdma_update_descr_addr(s
, dst_type
,
395 R_ZDMA_CH_DST_CUR_DSCR_LSB
);
396 zdma_load_descriptor(s
, next
, &s
->dsc_dst
);
397 dst_size
= FIELD_EX32(s
->dsc_dst
.words
[2], ZDMA_CH_DST_DSCR_WORD2
,
399 dst_type
= FIELD_EX32(s
->dsc_dst
.words
[3], ZDMA_CH_DST_DSCR_WORD3
,
403 /* Match what hardware does by ignoring the dst_size and only using
404 * the src size for Simple register mode. */
405 if (ptype
== PT_REG
&& rw_mode
!= RW_MODE_WO
) {
409 dst_intr
= FIELD_EX32(s
->dsc_dst
.words
[3], ZDMA_CH_DST_DSCR_WORD3
,
412 dlen
= len
> dst_size
? dst_size
: len
;
413 if (burst_type
== AXI_BURST_FIXED
) {
414 if (dlen
> (s
->cfg
.bus_width
/ 8)) {
415 dlen
= s
->cfg
.bus_width
/ 8;
419 address_space_rw(s
->dma_as
, s
->dsc_dst
.addr
, s
->attr
, buf
, dlen
,
421 if (burst_type
== AXI_BURST_INCR
) {
422 s
->dsc_dst
.addr
+= dlen
;
428 if (dst_size
== 0 && dst_intr
) {
432 /* Write back to buffered descriptor. */
433 s
->dsc_dst
.words
[2] = FIELD_DP32(s
->dsc_dst
.words
[2],
434 ZDMA_CH_DST_DSCR_WORD2
,
440 static void zdma_process_descr(XlnxZDMA
*s
)
443 uint32_t src_size
, len
;
444 unsigned int src_cmd
;
445 bool src_intr
, src_type
;
446 unsigned int ptype
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, POINT_TYPE
);
447 unsigned int rw_mode
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, MODE
);
448 unsigned int burst_type
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_DATA_ATTR
,
451 src_addr
= s
->dsc_src
.addr
;
452 src_size
= FIELD_EX32(s
->dsc_src
.words
[2], ZDMA_CH_SRC_DSCR_WORD2
, SIZE
);
453 src_cmd
= FIELD_EX32(s
->dsc_src
.words
[3], ZDMA_CH_SRC_DSCR_WORD3
, CMD
);
454 src_type
= FIELD_EX32(s
->dsc_src
.words
[3], ZDMA_CH_SRC_DSCR_WORD3
, TYPE
);
455 src_intr
= FIELD_EX32(s
->dsc_src
.words
[3], ZDMA_CH_SRC_DSCR_WORD3
, INTR
);
457 /* FIXED burst types and non-rw modes are only supported in
460 if (ptype
!= PT_REG
) {
461 if (rw_mode
!= RW_MODE_RW
) {
462 qemu_log_mask(LOG_GUEST_ERROR
,
463 "zDMA: rw-mode=%d but not simple DMA mode.\n",
466 if (burst_type
!= AXI_BURST_INCR
) {
467 qemu_log_mask(LOG_GUEST_ERROR
,
468 "zDMA: burst_type=%d but not simple DMA mode.\n",
471 burst_type
= AXI_BURST_INCR
;
472 rw_mode
= RW_MODE_RW
;
475 if (rw_mode
== RW_MODE_WO
) {
476 /* In Simple DMA Write-Only, we need to push DST size bytes
477 * regardless of what SRC size is set to. */
478 src_size
= FIELD_EX32(s
->dsc_dst
.words
[2], ZDMA_CH_DST_DSCR_WORD2
,
480 memcpy(s
->buf
, &s
->regs
[R_ZDMA_CH_WR_ONLY_WORD0
], s
->cfg
.bus_width
/ 8);
484 len
= src_size
> ARRAY_SIZE(s
->buf
) ? ARRAY_SIZE(s
->buf
) : src_size
;
485 if (burst_type
== AXI_BURST_FIXED
) {
486 if (len
> (s
->cfg
.bus_width
/ 8)) {
487 len
= s
->cfg
.bus_width
/ 8;
491 if (rw_mode
== RW_MODE_WO
) {
492 if (len
> s
->cfg
.bus_width
/ 8) {
493 len
= s
->cfg
.bus_width
/ 8;
496 address_space_rw(s
->dma_as
, src_addr
, s
->attr
, s
->buf
, len
,
498 if (burst_type
== AXI_BURST_INCR
) {
503 if (rw_mode
!= RW_MODE_RO
) {
504 zdma_write_dst(s
, s
->buf
, len
);
507 s
->regs
[R_ZDMA_CH_TOTAL_BYTE
] += len
;
511 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, DMA_DONE
, true);
517 /* Load next descriptor. */
518 if (ptype
== PT_REG
|| src_cmd
== CMD_STOP
) {
519 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_CTRL2
, EN
, 0);
520 zdma_set_state(s
, DISABLED
);
524 if (src_cmd
== CMD_HALT
) {
525 zdma_set_state(s
, PAUSED
);
526 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, DMA_PAUSE
, 1);
527 zdma_ch_imr_update_irq(s
);
531 zdma_update_descr_addr(s
, src_type
, R_ZDMA_CH_SRC_CUR_DSCR_LSB
);
534 static void zdma_run(XlnxZDMA
*s
)
536 while (s
->state
== ENABLED
&& !s
->error
) {
537 zdma_load_src_descriptor(s
);
540 zdma_set_state(s
, DISABLED
);
542 zdma_process_descr(s
);
546 zdma_ch_imr_update_irq(s
);
549 static void zdma_update_descr_addr_from_start(XlnxZDMA
*s
)
551 uint64_t src_addr
, dst_addr
;
553 src_addr
= zdma_get_regaddr64(s
, R_ZDMA_CH_SRC_START_LSB
);
554 zdma_put_regaddr64(s
, R_ZDMA_CH_SRC_CUR_DSCR_LSB
, src_addr
);
555 dst_addr
= zdma_get_regaddr64(s
, R_ZDMA_CH_DST_START_LSB
);
556 zdma_put_regaddr64(s
, R_ZDMA_CH_DST_CUR_DSCR_LSB
, dst_addr
);
557 zdma_load_dst_descriptor(s
);
560 static void zdma_ch_ctrlx_postw(RegisterInfo
*reg
, uint64_t val64
)
562 XlnxZDMA
*s
= XLNX_ZDMA(reg
->opaque
);
564 if (ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL2
, EN
)) {
567 if (s
->state
== PAUSED
&&
568 ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, CONT
)) {
569 if (ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, CONT_ADDR
) == 1) {
570 zdma_update_descr_addr_from_start(s
);
572 bool src_type
= FIELD_EX32(s
->dsc_src
.words
[3],
573 ZDMA_CH_SRC_DSCR_WORD3
, TYPE
);
574 zdma_update_descr_addr(s
, src_type
,
575 R_ZDMA_CH_SRC_CUR_DSCR_LSB
);
577 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_CTRL0
, CONT
, false);
578 zdma_set_state(s
, ENABLED
);
579 } else if (s
->state
== DISABLED
) {
580 zdma_update_descr_addr_from_start(s
);
581 zdma_set_state(s
, ENABLED
);
584 /* Leave Paused state? */
585 if (s
->state
== PAUSED
&&
586 ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, CONT
)) {
587 zdma_set_state(s
, DISABLED
);
594 static RegisterAccessInfo zdma_regs_info
[] = {
595 { .name
= "ZDMA_ERR_CTRL", .addr
= A_ZDMA_ERR_CTRL
,
597 },{ .name
= "ZDMA_CH_ISR", .addr
= A_ZDMA_CH_ISR
,
600 .post_write
= zdma_ch_isr_postw
,
601 },{ .name
= "ZDMA_CH_IMR", .addr
= A_ZDMA_CH_IMR
,
605 },{ .name
= "ZDMA_CH_IEN", .addr
= A_ZDMA_CH_IEN
,
607 .pre_write
= zdma_ch_ien_prew
,
608 },{ .name
= "ZDMA_CH_IDS", .addr
= A_ZDMA_CH_IDS
,
610 .pre_write
= zdma_ch_ids_prew
,
611 },{ .name
= "ZDMA_CH_CTRL0", .addr
= A_ZDMA_CH_CTRL0
,
614 .post_write
= zdma_ch_ctrlx_postw
,
615 },{ .name
= "ZDMA_CH_CTRL1", .addr
= A_ZDMA_CH_CTRL1
,
618 },{ .name
= "ZDMA_CH_FCI", .addr
= A_ZDMA_CH_FCI
,
620 },{ .name
= "ZDMA_CH_STATUS", .addr
= A_ZDMA_CH_STATUS
,
623 },{ .name
= "ZDMA_CH_DATA_ATTR", .addr
= A_ZDMA_CH_DATA_ATTR
,
626 },{ .name
= "ZDMA_CH_DSCR_ATTR", .addr
= A_ZDMA_CH_DSCR_ATTR
,
628 },{ .name
= "ZDMA_CH_SRC_DSCR_WORD0", .addr
= A_ZDMA_CH_SRC_DSCR_WORD0
,
629 },{ .name
= "ZDMA_CH_SRC_DSCR_WORD1", .addr
= A_ZDMA_CH_SRC_DSCR_WORD1
,
631 },{ .name
= "ZDMA_CH_SRC_DSCR_WORD2", .addr
= A_ZDMA_CH_SRC_DSCR_WORD2
,
633 },{ .name
= "ZDMA_CH_SRC_DSCR_WORD3", .addr
= A_ZDMA_CH_SRC_DSCR_WORD3
,
635 },{ .name
= "ZDMA_CH_DST_DSCR_WORD0", .addr
= A_ZDMA_CH_DST_DSCR_WORD0
,
636 },{ .name
= "ZDMA_CH_DST_DSCR_WORD1", .addr
= A_ZDMA_CH_DST_DSCR_WORD1
,
638 },{ .name
= "ZDMA_CH_DST_DSCR_WORD2", .addr
= A_ZDMA_CH_DST_DSCR_WORD2
,
640 },{ .name
= "ZDMA_CH_DST_DSCR_WORD3", .addr
= A_ZDMA_CH_DST_DSCR_WORD3
,
642 },{ .name
= "ZDMA_CH_WR_ONLY_WORD0", .addr
= A_ZDMA_CH_WR_ONLY_WORD0
,
643 },{ .name
= "ZDMA_CH_WR_ONLY_WORD1", .addr
= A_ZDMA_CH_WR_ONLY_WORD1
,
644 },{ .name
= "ZDMA_CH_WR_ONLY_WORD2", .addr
= A_ZDMA_CH_WR_ONLY_WORD2
,
645 },{ .name
= "ZDMA_CH_WR_ONLY_WORD3", .addr
= A_ZDMA_CH_WR_ONLY_WORD3
,
646 },{ .name
= "ZDMA_CH_SRC_START_LSB", .addr
= A_ZDMA_CH_SRC_START_LSB
,
647 },{ .name
= "ZDMA_CH_SRC_START_MSB", .addr
= A_ZDMA_CH_SRC_START_MSB
,
649 },{ .name
= "ZDMA_CH_DST_START_LSB", .addr
= A_ZDMA_CH_DST_START_LSB
,
650 },{ .name
= "ZDMA_CH_DST_START_MSB", .addr
= A_ZDMA_CH_DST_START_MSB
,
652 },{ .name
= "ZDMA_CH_SRC_CUR_PYLD_LSB", .addr
= A_ZDMA_CH_SRC_CUR_PYLD_LSB
,
654 },{ .name
= "ZDMA_CH_SRC_CUR_PYLD_MSB", .addr
= A_ZDMA_CH_SRC_CUR_PYLD_MSB
,
657 },{ .name
= "ZDMA_CH_DST_CUR_PYLD_LSB", .addr
= A_ZDMA_CH_DST_CUR_PYLD_LSB
,
659 },{ .name
= "ZDMA_CH_DST_CUR_PYLD_MSB", .addr
= A_ZDMA_CH_DST_CUR_PYLD_MSB
,
662 },{ .name
= "ZDMA_CH_SRC_CUR_DSCR_LSB", .addr
= A_ZDMA_CH_SRC_CUR_DSCR_LSB
,
664 },{ .name
= "ZDMA_CH_SRC_CUR_DSCR_MSB", .addr
= A_ZDMA_CH_SRC_CUR_DSCR_MSB
,
667 },{ .name
= "ZDMA_CH_DST_CUR_DSCR_LSB", .addr
= A_ZDMA_CH_DST_CUR_DSCR_LSB
,
669 },{ .name
= "ZDMA_CH_DST_CUR_DSCR_MSB", .addr
= A_ZDMA_CH_DST_CUR_DSCR_MSB
,
672 },{ .name
= "ZDMA_CH_TOTAL_BYTE", .addr
= A_ZDMA_CH_TOTAL_BYTE
,
674 },{ .name
= "ZDMA_CH_RATE_CNTL", .addr
= A_ZDMA_CH_RATE_CNTL
,
676 },{ .name
= "ZDMA_CH_IRQ_SRC_ACCT", .addr
= A_ZDMA_CH_IRQ_SRC_ACCT
,
680 },{ .name
= "ZDMA_CH_IRQ_DST_ACCT", .addr
= A_ZDMA_CH_IRQ_DST_ACCT
,
684 },{ .name
= "ZDMA_CH_DBG0", .addr
= A_ZDMA_CH_DBG0
,
687 },{ .name
= "ZDMA_CH_DBG1", .addr
= A_ZDMA_CH_DBG1
,
690 },{ .name
= "ZDMA_CH_CTRL2", .addr
= A_ZDMA_CH_CTRL2
,
692 .post_write
= zdma_ch_ctrlx_postw
,
696 static void zdma_reset(DeviceState
*dev
)
698 XlnxZDMA
*s
= XLNX_ZDMA(dev
);
701 for (i
= 0; i
< ARRAY_SIZE(s
->regs_info
); ++i
) {
702 register_reset(&s
->regs_info
[i
]);
705 zdma_ch_imr_update_irq(s
);
708 static uint64_t zdma_read(void *opaque
, hwaddr addr
, unsigned size
)
710 XlnxZDMA
*s
= XLNX_ZDMA(opaque
);
711 RegisterInfo
*r
= &s
->regs_info
[addr
/ 4];
714 gchar
*path
= object_get_canonical_path(OBJECT(s
));
715 qemu_log("%s: Decode error: read from %" HWADDR_PRIx
"\n",
719 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, INV_APB
, true);
720 zdma_ch_imr_update_irq(s
);
723 return register_read(r
, ~0, NULL
, false);
726 static void zdma_write(void *opaque
, hwaddr addr
, uint64_t value
,
729 XlnxZDMA
*s
= XLNX_ZDMA(opaque
);
730 RegisterInfo
*r
= &s
->regs_info
[addr
/ 4];
733 gchar
*path
= object_get_canonical_path(OBJECT(s
));
734 qemu_log("%s: Decode error: write to %" HWADDR_PRIx
"=%" PRIx64
"\n",
738 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, INV_APB
, true);
739 zdma_ch_imr_update_irq(s
);
742 register_write(r
, value
, ~0, NULL
, false);
745 static const MemoryRegionOps zdma_ops
= {
748 .endianness
= DEVICE_LITTLE_ENDIAN
,
750 .min_access_size
= 4,
751 .max_access_size
= 4,
755 static void zdma_realize(DeviceState
*dev
, Error
**errp
)
757 XlnxZDMA
*s
= XLNX_ZDMA(dev
);
760 for (i
= 0; i
< ARRAY_SIZE(zdma_regs_info
); ++i
) {
761 RegisterInfo
*r
= &s
->regs_info
[zdma_regs_info
[i
].addr
/ 4];
763 *r
= (RegisterInfo
) {
764 .data
= (uint8_t *)&s
->regs
[
765 zdma_regs_info
[i
].addr
/ 4],
766 .data_size
= sizeof(uint32_t),
767 .access
= &zdma_regs_info
[i
],
773 s
->dma_as
= g_malloc0(sizeof(AddressSpace
));
774 address_space_init(s
->dma_as
, s
->dma_mr
, NULL
);
776 s
->dma_as
= &address_space_memory
;
778 s
->attr
= MEMTXATTRS_UNSPECIFIED
;
781 static void zdma_init(Object
*obj
)
783 XlnxZDMA
*s
= XLNX_ZDMA(obj
);
784 SysBusDevice
*sbd
= SYS_BUS_DEVICE(obj
);
786 memory_region_init_io(&s
->iomem
, obj
, &zdma_ops
, s
,
787 TYPE_XLNX_ZDMA
, ZDMA_R_MAX
* 4);
788 sysbus_init_mmio(sbd
, &s
->iomem
);
789 sysbus_init_irq(sbd
, &s
->irq_zdma_ch_imr
);
791 object_property_add_link(obj
, "dma", TYPE_MEMORY_REGION
,
792 (Object
**)&s
->dma_mr
,
793 qdev_prop_allow_set_link_before_realize
,
794 OBJ_PROP_LINK_STRONG
,
798 static const VMStateDescription vmstate_zdma
= {
799 .name
= TYPE_XLNX_ZDMA
,
801 .minimum_version_id
= 1,
802 .minimum_version_id_old
= 1,
803 .fields
= (VMStateField
[]) {
804 VMSTATE_UINT32_ARRAY(regs
, XlnxZDMA
, ZDMA_R_MAX
),
805 VMSTATE_UINT32(state
, XlnxZDMA
),
806 VMSTATE_UINT32_ARRAY(dsc_src
.words
, XlnxZDMA
, 4),
807 VMSTATE_UINT32_ARRAY(dsc_dst
.words
, XlnxZDMA
, 4),
808 VMSTATE_END_OF_LIST(),
812 static Property zdma_props
[] = {
813 DEFINE_PROP_UINT32("bus-width", XlnxZDMA
, cfg
.bus_width
, 64),
814 DEFINE_PROP_END_OF_LIST(),
817 static void zdma_class_init(ObjectClass
*klass
, void *data
)
819 DeviceClass
*dc
= DEVICE_CLASS(klass
);
821 dc
->reset
= zdma_reset
;
822 dc
->realize
= zdma_realize
;
823 device_class_set_props(dc
, zdma_props
);
824 dc
->vmsd
= &vmstate_zdma
;
827 static const TypeInfo zdma_info
= {
828 .name
= TYPE_XLNX_ZDMA
,
829 .parent
= TYPE_SYS_BUS_DEVICE
,
830 .instance_size
= sizeof(XlnxZDMA
),
831 .class_init
= zdma_class_init
,
832 .instance_init
= zdma_init
,
835 static void zdma_register_types(void)
837 type_register_static(&zdma_info
);
840 type_init(zdma_register_types
)