2 * QEMU model of the ZynqMP generic DMA
4 * Copyright (c) 2014 Xilinx Inc.
5 * Copyright (c) 2018 FEIMTECH AB
7 * Written by Edgar E. Iglesias <edgar.iglesias@xilinx.com>,
8 * Francisco Iglesias <francisco.iglesias@feimtech.se>
10 * Permission is hereby granted, free of charge, to any person obtaining a copy
11 * of this software and associated documentation files (the "Software"), to deal
12 * in the Software without restriction, including without limitation the rights
13 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14 * copies of the Software, and to permit persons to whom the Software is
15 * furnished to do so, subject to the following conditions:
17 * The above copyright notice and this permission notice shall be included in
18 * all copies or substantial portions of the Software.
20 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
21 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
23 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
24 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
25 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
29 #include "qemu/osdep.h"
30 #include "hw/dma/xlnx-zdma.h"
31 #include "qemu/bitops.h"
33 #include "qemu/module.h"
34 #include "qapi/error.h"
36 #ifndef XLNX_ZDMA_ERR_DEBUG
37 #define XLNX_ZDMA_ERR_DEBUG 0
40 REG32(ZDMA_ERR_CTRL
, 0x0)
41 FIELD(ZDMA_ERR_CTRL
, APB_ERR_RES
, 0, 1)
42 REG32(ZDMA_CH_ISR
, 0x100)
43 FIELD(ZDMA_CH_ISR
, DMA_PAUSE
, 11, 1)
44 FIELD(ZDMA_CH_ISR
, DMA_DONE
, 10, 1)
45 FIELD(ZDMA_CH_ISR
, AXI_WR_DATA
, 9, 1)
46 FIELD(ZDMA_CH_ISR
, AXI_RD_DATA
, 8, 1)
47 FIELD(ZDMA_CH_ISR
, AXI_RD_DST_DSCR
, 7, 1)
48 FIELD(ZDMA_CH_ISR
, AXI_RD_SRC_DSCR
, 6, 1)
49 FIELD(ZDMA_CH_ISR
, IRQ_DST_ACCT_ERR
, 5, 1)
50 FIELD(ZDMA_CH_ISR
, IRQ_SRC_ACCT_ERR
, 4, 1)
51 FIELD(ZDMA_CH_ISR
, BYTE_CNT_OVRFL
, 3, 1)
52 FIELD(ZDMA_CH_ISR
, DST_DSCR_DONE
, 2, 1)
53 FIELD(ZDMA_CH_ISR
, SRC_DSCR_DONE
, 1, 1)
54 FIELD(ZDMA_CH_ISR
, INV_APB
, 0, 1)
55 REG32(ZDMA_CH_IMR
, 0x104)
56 FIELD(ZDMA_CH_IMR
, DMA_PAUSE
, 11, 1)
57 FIELD(ZDMA_CH_IMR
, DMA_DONE
, 10, 1)
58 FIELD(ZDMA_CH_IMR
, AXI_WR_DATA
, 9, 1)
59 FIELD(ZDMA_CH_IMR
, AXI_RD_DATA
, 8, 1)
60 FIELD(ZDMA_CH_IMR
, AXI_RD_DST_DSCR
, 7, 1)
61 FIELD(ZDMA_CH_IMR
, AXI_RD_SRC_DSCR
, 6, 1)
62 FIELD(ZDMA_CH_IMR
, IRQ_DST_ACCT_ERR
, 5, 1)
63 FIELD(ZDMA_CH_IMR
, IRQ_SRC_ACCT_ERR
, 4, 1)
64 FIELD(ZDMA_CH_IMR
, BYTE_CNT_OVRFL
, 3, 1)
65 FIELD(ZDMA_CH_IMR
, DST_DSCR_DONE
, 2, 1)
66 FIELD(ZDMA_CH_IMR
, SRC_DSCR_DONE
, 1, 1)
67 FIELD(ZDMA_CH_IMR
, INV_APB
, 0, 1)
68 REG32(ZDMA_CH_IEN
, 0x108)
69 FIELD(ZDMA_CH_IEN
, DMA_PAUSE
, 11, 1)
70 FIELD(ZDMA_CH_IEN
, DMA_DONE
, 10, 1)
71 FIELD(ZDMA_CH_IEN
, AXI_WR_DATA
, 9, 1)
72 FIELD(ZDMA_CH_IEN
, AXI_RD_DATA
, 8, 1)
73 FIELD(ZDMA_CH_IEN
, AXI_RD_DST_DSCR
, 7, 1)
74 FIELD(ZDMA_CH_IEN
, AXI_RD_SRC_DSCR
, 6, 1)
75 FIELD(ZDMA_CH_IEN
, IRQ_DST_ACCT_ERR
, 5, 1)
76 FIELD(ZDMA_CH_IEN
, IRQ_SRC_ACCT_ERR
, 4, 1)
77 FIELD(ZDMA_CH_IEN
, BYTE_CNT_OVRFL
, 3, 1)
78 FIELD(ZDMA_CH_IEN
, DST_DSCR_DONE
, 2, 1)
79 FIELD(ZDMA_CH_IEN
, SRC_DSCR_DONE
, 1, 1)
80 FIELD(ZDMA_CH_IEN
, INV_APB
, 0, 1)
81 REG32(ZDMA_CH_IDS
, 0x10c)
82 FIELD(ZDMA_CH_IDS
, DMA_PAUSE
, 11, 1)
83 FIELD(ZDMA_CH_IDS
, DMA_DONE
, 10, 1)
84 FIELD(ZDMA_CH_IDS
, AXI_WR_DATA
, 9, 1)
85 FIELD(ZDMA_CH_IDS
, AXI_RD_DATA
, 8, 1)
86 FIELD(ZDMA_CH_IDS
, AXI_RD_DST_DSCR
, 7, 1)
87 FIELD(ZDMA_CH_IDS
, AXI_RD_SRC_DSCR
, 6, 1)
88 FIELD(ZDMA_CH_IDS
, IRQ_DST_ACCT_ERR
, 5, 1)
89 FIELD(ZDMA_CH_IDS
, IRQ_SRC_ACCT_ERR
, 4, 1)
90 FIELD(ZDMA_CH_IDS
, BYTE_CNT_OVRFL
, 3, 1)
91 FIELD(ZDMA_CH_IDS
, DST_DSCR_DONE
, 2, 1)
92 FIELD(ZDMA_CH_IDS
, SRC_DSCR_DONE
, 1, 1)
93 FIELD(ZDMA_CH_IDS
, INV_APB
, 0, 1)
94 REG32(ZDMA_CH_CTRL0
, 0x110)
95 FIELD(ZDMA_CH_CTRL0
, OVR_FETCH
, 7, 1)
96 FIELD(ZDMA_CH_CTRL0
, POINT_TYPE
, 6, 1)
97 FIELD(ZDMA_CH_CTRL0
, MODE
, 4, 2)
98 FIELD(ZDMA_CH_CTRL0
, RATE_CTRL
, 3, 1)
99 FIELD(ZDMA_CH_CTRL0
, CONT_ADDR
, 2, 1)
100 FIELD(ZDMA_CH_CTRL0
, CONT
, 1, 1)
101 REG32(ZDMA_CH_CTRL1
, 0x114)
102 FIELD(ZDMA_CH_CTRL1
, DST_ISSUE
, 5, 5)
103 FIELD(ZDMA_CH_CTRL1
, SRC_ISSUE
, 0, 5)
104 REG32(ZDMA_CH_FCI
, 0x118)
105 FIELD(ZDMA_CH_FCI
, PROG_CELL_CNT
, 2, 2)
106 FIELD(ZDMA_CH_FCI
, SIDE
, 1, 1)
107 FIELD(ZDMA_CH_FCI
, EN
, 0, 1)
108 REG32(ZDMA_CH_STATUS
, 0x11c)
109 FIELD(ZDMA_CH_STATUS
, STATE
, 0, 2)
110 REG32(ZDMA_CH_DATA_ATTR
, 0x120)
111 FIELD(ZDMA_CH_DATA_ATTR
, ARBURST
, 26, 2)
112 FIELD(ZDMA_CH_DATA_ATTR
, ARCACHE
, 22, 4)
113 FIELD(ZDMA_CH_DATA_ATTR
, ARQOS
, 18, 4)
114 FIELD(ZDMA_CH_DATA_ATTR
, ARLEN
, 14, 4)
115 FIELD(ZDMA_CH_DATA_ATTR
, AWBURST
, 12, 2)
116 FIELD(ZDMA_CH_DATA_ATTR
, AWCACHE
, 8, 4)
117 FIELD(ZDMA_CH_DATA_ATTR
, AWQOS
, 4, 4)
118 FIELD(ZDMA_CH_DATA_ATTR
, AWLEN
, 0, 4)
119 REG32(ZDMA_CH_DSCR_ATTR
, 0x124)
120 FIELD(ZDMA_CH_DSCR_ATTR
, AXCOHRNT
, 8, 1)
121 FIELD(ZDMA_CH_DSCR_ATTR
, AXCACHE
, 4, 4)
122 FIELD(ZDMA_CH_DSCR_ATTR
, AXQOS
, 0, 4)
123 REG32(ZDMA_CH_SRC_DSCR_WORD0
, 0x128)
124 REG32(ZDMA_CH_SRC_DSCR_WORD1
, 0x12c)
125 FIELD(ZDMA_CH_SRC_DSCR_WORD1
, MSB
, 0, 17)
126 REG32(ZDMA_CH_SRC_DSCR_WORD2
, 0x130)
127 FIELD(ZDMA_CH_SRC_DSCR_WORD2
, SIZE
, 0, 30)
128 REG32(ZDMA_CH_SRC_DSCR_WORD3
, 0x134)
129 FIELD(ZDMA_CH_SRC_DSCR_WORD3
, CMD
, 3, 2)
130 FIELD(ZDMA_CH_SRC_DSCR_WORD3
, INTR
, 2, 1)
131 FIELD(ZDMA_CH_SRC_DSCR_WORD3
, TYPE
, 1, 1)
132 FIELD(ZDMA_CH_SRC_DSCR_WORD3
, COHRNT
, 0, 1)
133 REG32(ZDMA_CH_DST_DSCR_WORD0
, 0x138)
134 REG32(ZDMA_CH_DST_DSCR_WORD1
, 0x13c)
135 FIELD(ZDMA_CH_DST_DSCR_WORD1
, MSB
, 0, 17)
136 REG32(ZDMA_CH_DST_DSCR_WORD2
, 0x140)
137 FIELD(ZDMA_CH_DST_DSCR_WORD2
, SIZE
, 0, 30)
138 REG32(ZDMA_CH_DST_DSCR_WORD3
, 0x144)
139 FIELD(ZDMA_CH_DST_DSCR_WORD3
, INTR
, 2, 1)
140 FIELD(ZDMA_CH_DST_DSCR_WORD3
, TYPE
, 1, 1)
141 FIELD(ZDMA_CH_DST_DSCR_WORD3
, COHRNT
, 0, 1)
142 REG32(ZDMA_CH_WR_ONLY_WORD0
, 0x148)
143 REG32(ZDMA_CH_WR_ONLY_WORD1
, 0x14c)
144 REG32(ZDMA_CH_WR_ONLY_WORD2
, 0x150)
145 REG32(ZDMA_CH_WR_ONLY_WORD3
, 0x154)
146 REG32(ZDMA_CH_SRC_START_LSB
, 0x158)
147 REG32(ZDMA_CH_SRC_START_MSB
, 0x15c)
148 FIELD(ZDMA_CH_SRC_START_MSB
, ADDR
, 0, 17)
149 REG32(ZDMA_CH_DST_START_LSB
, 0x160)
150 REG32(ZDMA_CH_DST_START_MSB
, 0x164)
151 FIELD(ZDMA_CH_DST_START_MSB
, ADDR
, 0, 17)
152 REG32(ZDMA_CH_RATE_CTRL
, 0x18c)
153 FIELD(ZDMA_CH_RATE_CTRL
, CNT
, 0, 12)
154 REG32(ZDMA_CH_SRC_CUR_PYLD_LSB
, 0x168)
155 REG32(ZDMA_CH_SRC_CUR_PYLD_MSB
, 0x16c)
156 FIELD(ZDMA_CH_SRC_CUR_PYLD_MSB
, ADDR
, 0, 17)
157 REG32(ZDMA_CH_DST_CUR_PYLD_LSB
, 0x170)
158 REG32(ZDMA_CH_DST_CUR_PYLD_MSB
, 0x174)
159 FIELD(ZDMA_CH_DST_CUR_PYLD_MSB
, ADDR
, 0, 17)
160 REG32(ZDMA_CH_SRC_CUR_DSCR_LSB
, 0x178)
161 REG32(ZDMA_CH_SRC_CUR_DSCR_MSB
, 0x17c)
162 FIELD(ZDMA_CH_SRC_CUR_DSCR_MSB
, ADDR
, 0, 17)
163 REG32(ZDMA_CH_DST_CUR_DSCR_LSB
, 0x180)
164 REG32(ZDMA_CH_DST_CUR_DSCR_MSB
, 0x184)
165 FIELD(ZDMA_CH_DST_CUR_DSCR_MSB
, ADDR
, 0, 17)
166 REG32(ZDMA_CH_TOTAL_BYTE
, 0x188)
167 REG32(ZDMA_CH_RATE_CNTL
, 0x18c)
168 FIELD(ZDMA_CH_RATE_CNTL
, CNT
, 0, 12)
169 REG32(ZDMA_CH_IRQ_SRC_ACCT
, 0x190)
170 FIELD(ZDMA_CH_IRQ_SRC_ACCT
, CNT
, 0, 8)
171 REG32(ZDMA_CH_IRQ_DST_ACCT
, 0x194)
172 FIELD(ZDMA_CH_IRQ_DST_ACCT
, CNT
, 0, 8)
173 REG32(ZDMA_CH_DBG0
, 0x198)
174 FIELD(ZDMA_CH_DBG0
, CMN_BUF_FREE
, 0, 9)
175 REG32(ZDMA_CH_DBG1
, 0x19c)
176 FIELD(ZDMA_CH_DBG1
, CMN_BUF_OCC
, 0, 9)
177 REG32(ZDMA_CH_CTRL2
, 0x200)
178 FIELD(ZDMA_CH_CTRL2
, EN
, 0, 1)
206 static void zdma_ch_imr_update_irq(XlnxZDMA
*s
)
210 pending
= s
->regs
[R_ZDMA_CH_ISR
] & ~s
->regs
[R_ZDMA_CH_IMR
];
212 qemu_set_irq(s
->irq_zdma_ch_imr
, pending
);
215 static void zdma_ch_isr_postw(RegisterInfo
*reg
, uint64_t val64
)
217 XlnxZDMA
*s
= XLNX_ZDMA(reg
->opaque
);
218 zdma_ch_imr_update_irq(s
);
221 static uint64_t zdma_ch_ien_prew(RegisterInfo
*reg
, uint64_t val64
)
223 XlnxZDMA
*s
= XLNX_ZDMA(reg
->opaque
);
224 uint32_t val
= val64
;
226 s
->regs
[R_ZDMA_CH_IMR
] &= ~val
;
227 zdma_ch_imr_update_irq(s
);
231 static uint64_t zdma_ch_ids_prew(RegisterInfo
*reg
, uint64_t val64
)
233 XlnxZDMA
*s
= XLNX_ZDMA(reg
->opaque
);
234 uint32_t val
= val64
;
236 s
->regs
[R_ZDMA_CH_IMR
] |= val
;
237 zdma_ch_imr_update_irq(s
);
241 static void zdma_set_state(XlnxZDMA
*s
, XlnxZDMAState state
)
244 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_STATUS
, STATE
, state
);
246 /* Signal error if we have an error condition. */
248 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_STATUS
, STATE
, 3);
252 static void zdma_src_done(XlnxZDMA
*s
)
255 cnt
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_IRQ_SRC_ACCT
, CNT
);
257 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_IRQ_SRC_ACCT
, CNT
, cnt
);
258 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, SRC_DSCR_DONE
, true);
260 /* Did we overflow? */
261 if (cnt
!= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_IRQ_SRC_ACCT
, CNT
)) {
262 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, IRQ_SRC_ACCT_ERR
, true);
264 zdma_ch_imr_update_irq(s
);
267 static void zdma_dst_done(XlnxZDMA
*s
)
270 cnt
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_IRQ_DST_ACCT
, CNT
);
272 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_IRQ_DST_ACCT
, CNT
, cnt
);
273 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, DST_DSCR_DONE
, true);
275 /* Did we overflow? */
276 if (cnt
!= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_IRQ_DST_ACCT
, CNT
)) {
277 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, IRQ_DST_ACCT_ERR
, true);
279 zdma_ch_imr_update_irq(s
);
282 static uint64_t zdma_get_regaddr64(XlnxZDMA
*s
, unsigned int basereg
)
286 addr
= s
->regs
[basereg
+ 1];
288 addr
|= s
->regs
[basereg
];
293 static void zdma_put_regaddr64(XlnxZDMA
*s
, unsigned int basereg
, uint64_t addr
)
295 s
->regs
[basereg
] = addr
;
296 s
->regs
[basereg
+ 1] = addr
>> 32;
299 static bool zdma_load_descriptor(XlnxZDMA
*s
, uint64_t addr
, void *buf
)
301 /* ZDMA descriptors must be aligned to their own size. */
302 if (addr
% sizeof(XlnxZDMADescr
)) {
303 qemu_log_mask(LOG_GUEST_ERROR
,
304 "zdma: unaligned descriptor at %" PRIx64
,
306 memset(buf
, 0x0, sizeof(XlnxZDMADescr
));
311 address_space_rw(s
->dma_as
, addr
, s
->attr
,
312 buf
, sizeof(XlnxZDMADescr
), false);
316 static void zdma_load_src_descriptor(XlnxZDMA
*s
)
319 unsigned int ptype
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, POINT_TYPE
);
321 if (ptype
== PT_REG
) {
322 memcpy(&s
->dsc_src
, &s
->regs
[R_ZDMA_CH_SRC_DSCR_WORD0
],
327 src_addr
= zdma_get_regaddr64(s
, R_ZDMA_CH_SRC_CUR_DSCR_LSB
);
329 if (!zdma_load_descriptor(s
, src_addr
, &s
->dsc_src
)) {
330 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, AXI_RD_SRC_DSCR
, true);
334 static void zdma_load_dst_descriptor(XlnxZDMA
*s
)
337 unsigned int ptype
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, POINT_TYPE
);
339 if (ptype
== PT_REG
) {
340 memcpy(&s
->dsc_dst
, &s
->regs
[R_ZDMA_CH_DST_DSCR_WORD0
],
345 dst_addr
= zdma_get_regaddr64(s
, R_ZDMA_CH_DST_CUR_DSCR_LSB
);
347 if (!zdma_load_descriptor(s
, dst_addr
, &s
->dsc_dst
)) {
348 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, AXI_RD_DST_DSCR
, true);
352 static uint64_t zdma_update_descr_addr(XlnxZDMA
*s
, bool type
,
353 unsigned int basereg
)
357 if (type
== DTYPE_LINEAR
) {
358 next
= zdma_get_regaddr64(s
, basereg
);
359 next
+= sizeof(s
->dsc_dst
);
360 zdma_put_regaddr64(s
, basereg
, next
);
362 addr
= zdma_get_regaddr64(s
, basereg
);
363 addr
+= sizeof(s
->dsc_dst
);
364 address_space_rw(s
->dma_as
, addr
, s
->attr
, (void *) &next
, 8, false);
365 zdma_put_regaddr64(s
, basereg
, next
);
370 static void zdma_write_dst(XlnxZDMA
*s
, uint8_t *buf
, uint32_t len
)
372 uint32_t dst_size
, dlen
;
373 bool dst_intr
, dst_type
;
374 unsigned int ptype
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, POINT_TYPE
);
375 unsigned int rw_mode
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, MODE
);
376 unsigned int burst_type
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_DATA_ATTR
,
379 /* FIXED burst types are only supported in simple dma mode. */
380 if (ptype
!= PT_REG
) {
381 burst_type
= AXI_BURST_INCR
;
385 dst_size
= FIELD_EX32(s
->dsc_dst
.words
[2], ZDMA_CH_DST_DSCR_WORD2
,
387 dst_type
= FIELD_EX32(s
->dsc_dst
.words
[3], ZDMA_CH_DST_DSCR_WORD3
,
389 if (dst_size
== 0 && ptype
== PT_MEM
) {
391 next
= zdma_update_descr_addr(s
, dst_type
,
392 R_ZDMA_CH_DST_CUR_DSCR_LSB
);
393 zdma_load_descriptor(s
, next
, &s
->dsc_dst
);
394 dst_size
= FIELD_EX32(s
->dsc_dst
.words
[2], ZDMA_CH_DST_DSCR_WORD2
,
396 dst_type
= FIELD_EX32(s
->dsc_dst
.words
[3], ZDMA_CH_DST_DSCR_WORD3
,
400 /* Match what hardware does by ignoring the dst_size and only using
401 * the src size for Simple register mode. */
402 if (ptype
== PT_REG
&& rw_mode
!= RW_MODE_WO
) {
406 dst_intr
= FIELD_EX32(s
->dsc_dst
.words
[3], ZDMA_CH_DST_DSCR_WORD3
,
409 dlen
= len
> dst_size
? dst_size
: len
;
410 if (burst_type
== AXI_BURST_FIXED
) {
411 if (dlen
> (s
->cfg
.bus_width
/ 8)) {
412 dlen
= s
->cfg
.bus_width
/ 8;
416 address_space_rw(s
->dma_as
, s
->dsc_dst
.addr
, s
->attr
, buf
, dlen
,
418 if (burst_type
== AXI_BURST_INCR
) {
419 s
->dsc_dst
.addr
+= dlen
;
425 if (dst_size
== 0 && dst_intr
) {
429 /* Write back to buffered descriptor. */
430 s
->dsc_dst
.words
[2] = FIELD_DP32(s
->dsc_dst
.words
[2],
431 ZDMA_CH_DST_DSCR_WORD2
,
437 static void zdma_process_descr(XlnxZDMA
*s
)
440 uint32_t src_size
, len
;
441 unsigned int src_cmd
;
442 bool src_intr
, src_type
;
443 unsigned int ptype
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, POINT_TYPE
);
444 unsigned int rw_mode
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, MODE
);
445 unsigned int burst_type
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_DATA_ATTR
,
448 src_addr
= s
->dsc_src
.addr
;
449 src_size
= FIELD_EX32(s
->dsc_src
.words
[2], ZDMA_CH_SRC_DSCR_WORD2
, SIZE
);
450 src_cmd
= FIELD_EX32(s
->dsc_src
.words
[3], ZDMA_CH_SRC_DSCR_WORD3
, CMD
);
451 src_type
= FIELD_EX32(s
->dsc_src
.words
[3], ZDMA_CH_SRC_DSCR_WORD3
, TYPE
);
452 src_intr
= FIELD_EX32(s
->dsc_src
.words
[3], ZDMA_CH_SRC_DSCR_WORD3
, INTR
);
454 /* FIXED burst types and non-rw modes are only supported in
457 if (ptype
!= PT_REG
) {
458 if (rw_mode
!= RW_MODE_RW
) {
459 qemu_log_mask(LOG_GUEST_ERROR
,
460 "zDMA: rw-mode=%d but not simple DMA mode.\n",
463 if (burst_type
!= AXI_BURST_INCR
) {
464 qemu_log_mask(LOG_GUEST_ERROR
,
465 "zDMA: burst_type=%d but not simple DMA mode.\n",
468 burst_type
= AXI_BURST_INCR
;
469 rw_mode
= RW_MODE_RW
;
472 if (rw_mode
== RW_MODE_WO
) {
473 /* In Simple DMA Write-Only, we need to push DST size bytes
474 * regardless of what SRC size is set to. */
475 src_size
= FIELD_EX32(s
->dsc_dst
.words
[2], ZDMA_CH_DST_DSCR_WORD2
,
477 memcpy(s
->buf
, &s
->regs
[R_ZDMA_CH_WR_ONLY_WORD0
], s
->cfg
.bus_width
/ 8);
481 len
= src_size
> ARRAY_SIZE(s
->buf
) ? ARRAY_SIZE(s
->buf
) : src_size
;
482 if (burst_type
== AXI_BURST_FIXED
) {
483 if (len
> (s
->cfg
.bus_width
/ 8)) {
484 len
= s
->cfg
.bus_width
/ 8;
488 if (rw_mode
== RW_MODE_WO
) {
489 if (len
> s
->cfg
.bus_width
/ 8) {
490 len
= s
->cfg
.bus_width
/ 8;
493 address_space_rw(s
->dma_as
, src_addr
, s
->attr
, s
->buf
, len
,
495 if (burst_type
== AXI_BURST_INCR
) {
500 if (rw_mode
!= RW_MODE_RO
) {
501 zdma_write_dst(s
, s
->buf
, len
);
504 s
->regs
[R_ZDMA_CH_TOTAL_BYTE
] += len
;
508 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, DMA_DONE
, true);
514 /* Load next descriptor. */
515 if (ptype
== PT_REG
|| src_cmd
== CMD_STOP
) {
516 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_CTRL2
, EN
, 0);
517 zdma_set_state(s
, DISABLED
);
521 if (src_cmd
== CMD_HALT
) {
522 zdma_set_state(s
, PAUSED
);
523 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, DMA_PAUSE
, 1);
524 zdma_ch_imr_update_irq(s
);
528 zdma_update_descr_addr(s
, src_type
, R_ZDMA_CH_SRC_CUR_DSCR_LSB
);
531 static void zdma_run(XlnxZDMA
*s
)
533 while (s
->state
== ENABLED
&& !s
->error
) {
534 zdma_load_src_descriptor(s
);
537 zdma_set_state(s
, DISABLED
);
539 zdma_process_descr(s
);
543 zdma_ch_imr_update_irq(s
);
546 static void zdma_update_descr_addr_from_start(XlnxZDMA
*s
)
548 uint64_t src_addr
, dst_addr
;
550 src_addr
= zdma_get_regaddr64(s
, R_ZDMA_CH_SRC_START_LSB
);
551 zdma_put_regaddr64(s
, R_ZDMA_CH_SRC_CUR_DSCR_LSB
, src_addr
);
552 dst_addr
= zdma_get_regaddr64(s
, R_ZDMA_CH_DST_START_LSB
);
553 zdma_put_regaddr64(s
, R_ZDMA_CH_DST_CUR_DSCR_LSB
, dst_addr
);
554 zdma_load_dst_descriptor(s
);
557 static void zdma_ch_ctrlx_postw(RegisterInfo
*reg
, uint64_t val64
)
559 XlnxZDMA
*s
= XLNX_ZDMA(reg
->opaque
);
561 if (ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL2
, EN
)) {
564 if (s
->state
== PAUSED
&&
565 ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, CONT
)) {
566 if (ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, CONT_ADDR
) == 1) {
567 zdma_update_descr_addr_from_start(s
);
569 bool src_type
= FIELD_EX32(s
->dsc_src
.words
[3],
570 ZDMA_CH_SRC_DSCR_WORD3
, TYPE
);
571 zdma_update_descr_addr(s
, src_type
,
572 R_ZDMA_CH_SRC_CUR_DSCR_LSB
);
574 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_CTRL0
, CONT
, false);
575 zdma_set_state(s
, ENABLED
);
576 } else if (s
->state
== DISABLED
) {
577 zdma_update_descr_addr_from_start(s
);
578 zdma_set_state(s
, ENABLED
);
581 /* Leave Paused state? */
582 if (s
->state
== PAUSED
&&
583 ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, CONT
)) {
584 zdma_set_state(s
, DISABLED
);
591 static RegisterAccessInfo zdma_regs_info
[] = {
592 { .name
= "ZDMA_ERR_CTRL", .addr
= A_ZDMA_ERR_CTRL
,
594 },{ .name
= "ZDMA_CH_ISR", .addr
= A_ZDMA_CH_ISR
,
597 .post_write
= zdma_ch_isr_postw
,
598 },{ .name
= "ZDMA_CH_IMR", .addr
= A_ZDMA_CH_IMR
,
602 },{ .name
= "ZDMA_CH_IEN", .addr
= A_ZDMA_CH_IEN
,
604 .pre_write
= zdma_ch_ien_prew
,
605 },{ .name
= "ZDMA_CH_IDS", .addr
= A_ZDMA_CH_IDS
,
607 .pre_write
= zdma_ch_ids_prew
,
608 },{ .name
= "ZDMA_CH_CTRL0", .addr
= A_ZDMA_CH_CTRL0
,
611 .post_write
= zdma_ch_ctrlx_postw
,
612 },{ .name
= "ZDMA_CH_CTRL1", .addr
= A_ZDMA_CH_CTRL1
,
615 },{ .name
= "ZDMA_CH_FCI", .addr
= A_ZDMA_CH_FCI
,
617 },{ .name
= "ZDMA_CH_STATUS", .addr
= A_ZDMA_CH_STATUS
,
620 },{ .name
= "ZDMA_CH_DATA_ATTR", .addr
= A_ZDMA_CH_DATA_ATTR
,
623 },{ .name
= "ZDMA_CH_DSCR_ATTR", .addr
= A_ZDMA_CH_DSCR_ATTR
,
625 },{ .name
= "ZDMA_CH_SRC_DSCR_WORD0", .addr
= A_ZDMA_CH_SRC_DSCR_WORD0
,
626 },{ .name
= "ZDMA_CH_SRC_DSCR_WORD1", .addr
= A_ZDMA_CH_SRC_DSCR_WORD1
,
628 },{ .name
= "ZDMA_CH_SRC_DSCR_WORD2", .addr
= A_ZDMA_CH_SRC_DSCR_WORD2
,
630 },{ .name
= "ZDMA_CH_SRC_DSCR_WORD3", .addr
= A_ZDMA_CH_SRC_DSCR_WORD3
,
632 },{ .name
= "ZDMA_CH_DST_DSCR_WORD0", .addr
= A_ZDMA_CH_DST_DSCR_WORD0
,
633 },{ .name
= "ZDMA_CH_DST_DSCR_WORD1", .addr
= A_ZDMA_CH_DST_DSCR_WORD1
,
635 },{ .name
= "ZDMA_CH_DST_DSCR_WORD2", .addr
= A_ZDMA_CH_DST_DSCR_WORD2
,
637 },{ .name
= "ZDMA_CH_DST_DSCR_WORD3", .addr
= A_ZDMA_CH_DST_DSCR_WORD3
,
639 },{ .name
= "ZDMA_CH_WR_ONLY_WORD0", .addr
= A_ZDMA_CH_WR_ONLY_WORD0
,
640 },{ .name
= "ZDMA_CH_WR_ONLY_WORD1", .addr
= A_ZDMA_CH_WR_ONLY_WORD1
,
641 },{ .name
= "ZDMA_CH_WR_ONLY_WORD2", .addr
= A_ZDMA_CH_WR_ONLY_WORD2
,
642 },{ .name
= "ZDMA_CH_WR_ONLY_WORD3", .addr
= A_ZDMA_CH_WR_ONLY_WORD3
,
643 },{ .name
= "ZDMA_CH_SRC_START_LSB", .addr
= A_ZDMA_CH_SRC_START_LSB
,
644 },{ .name
= "ZDMA_CH_SRC_START_MSB", .addr
= A_ZDMA_CH_SRC_START_MSB
,
646 },{ .name
= "ZDMA_CH_DST_START_LSB", .addr
= A_ZDMA_CH_DST_START_LSB
,
647 },{ .name
= "ZDMA_CH_DST_START_MSB", .addr
= A_ZDMA_CH_DST_START_MSB
,
649 },{ .name
= "ZDMA_CH_SRC_CUR_PYLD_LSB", .addr
= A_ZDMA_CH_SRC_CUR_PYLD_LSB
,
651 },{ .name
= "ZDMA_CH_SRC_CUR_PYLD_MSB", .addr
= A_ZDMA_CH_SRC_CUR_PYLD_MSB
,
654 },{ .name
= "ZDMA_CH_DST_CUR_PYLD_LSB", .addr
= A_ZDMA_CH_DST_CUR_PYLD_LSB
,
656 },{ .name
= "ZDMA_CH_DST_CUR_PYLD_MSB", .addr
= A_ZDMA_CH_DST_CUR_PYLD_MSB
,
659 },{ .name
= "ZDMA_CH_SRC_CUR_DSCR_LSB", .addr
= A_ZDMA_CH_SRC_CUR_DSCR_LSB
,
661 },{ .name
= "ZDMA_CH_SRC_CUR_DSCR_MSB", .addr
= A_ZDMA_CH_SRC_CUR_DSCR_MSB
,
664 },{ .name
= "ZDMA_CH_DST_CUR_DSCR_LSB", .addr
= A_ZDMA_CH_DST_CUR_DSCR_LSB
,
666 },{ .name
= "ZDMA_CH_DST_CUR_DSCR_MSB", .addr
= A_ZDMA_CH_DST_CUR_DSCR_MSB
,
669 },{ .name
= "ZDMA_CH_TOTAL_BYTE", .addr
= A_ZDMA_CH_TOTAL_BYTE
,
671 },{ .name
= "ZDMA_CH_RATE_CNTL", .addr
= A_ZDMA_CH_RATE_CNTL
,
673 },{ .name
= "ZDMA_CH_IRQ_SRC_ACCT", .addr
= A_ZDMA_CH_IRQ_SRC_ACCT
,
677 },{ .name
= "ZDMA_CH_IRQ_DST_ACCT", .addr
= A_ZDMA_CH_IRQ_DST_ACCT
,
681 },{ .name
= "ZDMA_CH_DBG0", .addr
= A_ZDMA_CH_DBG0
,
684 },{ .name
= "ZDMA_CH_DBG1", .addr
= A_ZDMA_CH_DBG1
,
687 },{ .name
= "ZDMA_CH_CTRL2", .addr
= A_ZDMA_CH_CTRL2
,
689 .post_write
= zdma_ch_ctrlx_postw
,
693 static void zdma_reset(DeviceState
*dev
)
695 XlnxZDMA
*s
= XLNX_ZDMA(dev
);
698 for (i
= 0; i
< ARRAY_SIZE(s
->regs_info
); ++i
) {
699 register_reset(&s
->regs_info
[i
]);
702 zdma_ch_imr_update_irq(s
);
705 static uint64_t zdma_read(void *opaque
, hwaddr addr
, unsigned size
)
707 XlnxZDMA
*s
= XLNX_ZDMA(opaque
);
708 RegisterInfo
*r
= &s
->regs_info
[addr
/ 4];
711 gchar
*path
= object_get_canonical_path(OBJECT(s
));
712 qemu_log("%s: Decode error: read from %" HWADDR_PRIx
"\n",
716 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, INV_APB
, true);
717 zdma_ch_imr_update_irq(s
);
720 return register_read(r
, ~0, NULL
, false);
723 static void zdma_write(void *opaque
, hwaddr addr
, uint64_t value
,
726 XlnxZDMA
*s
= XLNX_ZDMA(opaque
);
727 RegisterInfo
*r
= &s
->regs_info
[addr
/ 4];
730 gchar
*path
= object_get_canonical_path(OBJECT(s
));
731 qemu_log("%s: Decode error: write to %" HWADDR_PRIx
"=%" PRIx64
"\n",
735 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, INV_APB
, true);
736 zdma_ch_imr_update_irq(s
);
739 register_write(r
, value
, ~0, NULL
, false);
742 static const MemoryRegionOps zdma_ops
= {
745 .endianness
= DEVICE_LITTLE_ENDIAN
,
747 .min_access_size
= 4,
748 .max_access_size
= 4,
752 static void zdma_realize(DeviceState
*dev
, Error
**errp
)
754 XlnxZDMA
*s
= XLNX_ZDMA(dev
);
757 for (i
= 0; i
< ARRAY_SIZE(zdma_regs_info
); ++i
) {
758 RegisterInfo
*r
= &s
->regs_info
[zdma_regs_info
[i
].addr
/ 4];
760 *r
= (RegisterInfo
) {
761 .data
= (uint8_t *)&s
->regs
[
762 zdma_regs_info
[i
].addr
/ 4],
763 .data_size
= sizeof(uint32_t),
764 .access
= &zdma_regs_info
[i
],
770 s
->dma_as
= g_malloc0(sizeof(AddressSpace
));
771 address_space_init(s
->dma_as
, s
->dma_mr
, NULL
);
773 s
->dma_as
= &address_space_memory
;
775 s
->attr
= MEMTXATTRS_UNSPECIFIED
;
778 static void zdma_init(Object
*obj
)
780 XlnxZDMA
*s
= XLNX_ZDMA(obj
);
781 SysBusDevice
*sbd
= SYS_BUS_DEVICE(obj
);
783 memory_region_init_io(&s
->iomem
, obj
, &zdma_ops
, s
,
784 TYPE_XLNX_ZDMA
, ZDMA_R_MAX
* 4);
785 sysbus_init_mmio(sbd
, &s
->iomem
);
786 sysbus_init_irq(sbd
, &s
->irq_zdma_ch_imr
);
788 object_property_add_link(obj
, "dma", TYPE_MEMORY_REGION
,
789 (Object
**)&s
->dma_mr
,
790 qdev_prop_allow_set_link_before_realize
,
791 OBJ_PROP_LINK_STRONG
,
795 static const VMStateDescription vmstate_zdma
= {
796 .name
= TYPE_XLNX_ZDMA
,
798 .minimum_version_id
= 1,
799 .minimum_version_id_old
= 1,
800 .fields
= (VMStateField
[]) {
801 VMSTATE_UINT32_ARRAY(regs
, XlnxZDMA
, ZDMA_R_MAX
),
802 VMSTATE_UINT32(state
, XlnxZDMA
),
803 VMSTATE_UINT32_ARRAY(dsc_src
.words
, XlnxZDMA
, 4),
804 VMSTATE_UINT32_ARRAY(dsc_dst
.words
, XlnxZDMA
, 4),
805 VMSTATE_END_OF_LIST(),
809 static Property zdma_props
[] = {
810 DEFINE_PROP_UINT32("bus-width", XlnxZDMA
, cfg
.bus_width
, 64),
811 DEFINE_PROP_END_OF_LIST(),
814 static void zdma_class_init(ObjectClass
*klass
, void *data
)
816 DeviceClass
*dc
= DEVICE_CLASS(klass
);
818 dc
->reset
= zdma_reset
;
819 dc
->realize
= zdma_realize
;
820 dc
->props
= zdma_props
;
821 dc
->vmsd
= &vmstate_zdma
;
824 static const TypeInfo zdma_info
= {
825 .name
= TYPE_XLNX_ZDMA
,
826 .parent
= TYPE_SYS_BUS_DEVICE
,
827 .instance_size
= sizeof(XlnxZDMA
),
828 .class_init
= zdma_class_init
,
829 .instance_init
= zdma_init
,
832 static void zdma_register_types(void)
834 type_register_static(&zdma_info
);
837 type_init(zdma_register_types
)