2 * QEMU model of the ZynqMP generic DMA
4 * Copyright (c) 2014 Xilinx Inc.
5 * Copyright (c) 2018 FEIMTECH AB
7 * Written by Edgar E. Iglesias <edgar.iglesias@xilinx.com>,
8 * Francisco Iglesias <francisco.iglesias@feimtech.se>
10 * Permission is hereby granted, free of charge, to any person obtaining a copy
11 * of this software and associated documentation files (the "Software"), to deal
12 * in the Software without restriction, including without limitation the rights
13 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14 * copies of the Software, and to permit persons to whom the Software is
15 * furnished to do so, subject to the following conditions:
17 * The above copyright notice and this permission notice shall be included in
18 * all copies or substantial portions of the Software.
20 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
21 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
23 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
24 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
25 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
29 #include "qemu/osdep.h"
30 #include "hw/dma/xlnx-zdma.h"
32 #include "hw/qdev-properties.h"
33 #include "migration/vmstate.h"
34 #include "qemu/bitops.h"
36 #include "qemu/module.h"
37 #include "qapi/error.h"
39 #ifndef XLNX_ZDMA_ERR_DEBUG
40 #define XLNX_ZDMA_ERR_DEBUG 0
43 REG32(ZDMA_ERR_CTRL
, 0x0)
44 FIELD(ZDMA_ERR_CTRL
, APB_ERR_RES
, 0, 1)
45 REG32(ZDMA_CH_ISR
, 0x100)
46 FIELD(ZDMA_CH_ISR
, DMA_PAUSE
, 11, 1)
47 FIELD(ZDMA_CH_ISR
, DMA_DONE
, 10, 1)
48 FIELD(ZDMA_CH_ISR
, AXI_WR_DATA
, 9, 1)
49 FIELD(ZDMA_CH_ISR
, AXI_RD_DATA
, 8, 1)
50 FIELD(ZDMA_CH_ISR
, AXI_RD_DST_DSCR
, 7, 1)
51 FIELD(ZDMA_CH_ISR
, AXI_RD_SRC_DSCR
, 6, 1)
52 FIELD(ZDMA_CH_ISR
, IRQ_DST_ACCT_ERR
, 5, 1)
53 FIELD(ZDMA_CH_ISR
, IRQ_SRC_ACCT_ERR
, 4, 1)
54 FIELD(ZDMA_CH_ISR
, BYTE_CNT_OVRFL
, 3, 1)
55 FIELD(ZDMA_CH_ISR
, DST_DSCR_DONE
, 2, 1)
56 FIELD(ZDMA_CH_ISR
, SRC_DSCR_DONE
, 1, 1)
57 FIELD(ZDMA_CH_ISR
, INV_APB
, 0, 1)
58 REG32(ZDMA_CH_IMR
, 0x104)
59 FIELD(ZDMA_CH_IMR
, DMA_PAUSE
, 11, 1)
60 FIELD(ZDMA_CH_IMR
, DMA_DONE
, 10, 1)
61 FIELD(ZDMA_CH_IMR
, AXI_WR_DATA
, 9, 1)
62 FIELD(ZDMA_CH_IMR
, AXI_RD_DATA
, 8, 1)
63 FIELD(ZDMA_CH_IMR
, AXI_RD_DST_DSCR
, 7, 1)
64 FIELD(ZDMA_CH_IMR
, AXI_RD_SRC_DSCR
, 6, 1)
65 FIELD(ZDMA_CH_IMR
, IRQ_DST_ACCT_ERR
, 5, 1)
66 FIELD(ZDMA_CH_IMR
, IRQ_SRC_ACCT_ERR
, 4, 1)
67 FIELD(ZDMA_CH_IMR
, BYTE_CNT_OVRFL
, 3, 1)
68 FIELD(ZDMA_CH_IMR
, DST_DSCR_DONE
, 2, 1)
69 FIELD(ZDMA_CH_IMR
, SRC_DSCR_DONE
, 1, 1)
70 FIELD(ZDMA_CH_IMR
, INV_APB
, 0, 1)
71 REG32(ZDMA_CH_IEN
, 0x108)
72 FIELD(ZDMA_CH_IEN
, DMA_PAUSE
, 11, 1)
73 FIELD(ZDMA_CH_IEN
, DMA_DONE
, 10, 1)
74 FIELD(ZDMA_CH_IEN
, AXI_WR_DATA
, 9, 1)
75 FIELD(ZDMA_CH_IEN
, AXI_RD_DATA
, 8, 1)
76 FIELD(ZDMA_CH_IEN
, AXI_RD_DST_DSCR
, 7, 1)
77 FIELD(ZDMA_CH_IEN
, AXI_RD_SRC_DSCR
, 6, 1)
78 FIELD(ZDMA_CH_IEN
, IRQ_DST_ACCT_ERR
, 5, 1)
79 FIELD(ZDMA_CH_IEN
, IRQ_SRC_ACCT_ERR
, 4, 1)
80 FIELD(ZDMA_CH_IEN
, BYTE_CNT_OVRFL
, 3, 1)
81 FIELD(ZDMA_CH_IEN
, DST_DSCR_DONE
, 2, 1)
82 FIELD(ZDMA_CH_IEN
, SRC_DSCR_DONE
, 1, 1)
83 FIELD(ZDMA_CH_IEN
, INV_APB
, 0, 1)
84 REG32(ZDMA_CH_IDS
, 0x10c)
85 FIELD(ZDMA_CH_IDS
, DMA_PAUSE
, 11, 1)
86 FIELD(ZDMA_CH_IDS
, DMA_DONE
, 10, 1)
87 FIELD(ZDMA_CH_IDS
, AXI_WR_DATA
, 9, 1)
88 FIELD(ZDMA_CH_IDS
, AXI_RD_DATA
, 8, 1)
89 FIELD(ZDMA_CH_IDS
, AXI_RD_DST_DSCR
, 7, 1)
90 FIELD(ZDMA_CH_IDS
, AXI_RD_SRC_DSCR
, 6, 1)
91 FIELD(ZDMA_CH_IDS
, IRQ_DST_ACCT_ERR
, 5, 1)
92 FIELD(ZDMA_CH_IDS
, IRQ_SRC_ACCT_ERR
, 4, 1)
93 FIELD(ZDMA_CH_IDS
, BYTE_CNT_OVRFL
, 3, 1)
94 FIELD(ZDMA_CH_IDS
, DST_DSCR_DONE
, 2, 1)
95 FIELD(ZDMA_CH_IDS
, SRC_DSCR_DONE
, 1, 1)
96 FIELD(ZDMA_CH_IDS
, INV_APB
, 0, 1)
97 REG32(ZDMA_CH_CTRL0
, 0x110)
98 FIELD(ZDMA_CH_CTRL0
, OVR_FETCH
, 7, 1)
99 FIELD(ZDMA_CH_CTRL0
, POINT_TYPE
, 6, 1)
100 FIELD(ZDMA_CH_CTRL0
, MODE
, 4, 2)
101 FIELD(ZDMA_CH_CTRL0
, RATE_CTRL
, 3, 1)
102 FIELD(ZDMA_CH_CTRL0
, CONT_ADDR
, 2, 1)
103 FIELD(ZDMA_CH_CTRL0
, CONT
, 1, 1)
104 REG32(ZDMA_CH_CTRL1
, 0x114)
105 FIELD(ZDMA_CH_CTRL1
, DST_ISSUE
, 5, 5)
106 FIELD(ZDMA_CH_CTRL1
, SRC_ISSUE
, 0, 5)
107 REG32(ZDMA_CH_FCI
, 0x118)
108 FIELD(ZDMA_CH_FCI
, PROG_CELL_CNT
, 2, 2)
109 FIELD(ZDMA_CH_FCI
, SIDE
, 1, 1)
110 FIELD(ZDMA_CH_FCI
, EN
, 0, 1)
111 REG32(ZDMA_CH_STATUS
, 0x11c)
112 FIELD(ZDMA_CH_STATUS
, STATE
, 0, 2)
113 REG32(ZDMA_CH_DATA_ATTR
, 0x120)
114 FIELD(ZDMA_CH_DATA_ATTR
, ARBURST
, 26, 2)
115 FIELD(ZDMA_CH_DATA_ATTR
, ARCACHE
, 22, 4)
116 FIELD(ZDMA_CH_DATA_ATTR
, ARQOS
, 18, 4)
117 FIELD(ZDMA_CH_DATA_ATTR
, ARLEN
, 14, 4)
118 FIELD(ZDMA_CH_DATA_ATTR
, AWBURST
, 12, 2)
119 FIELD(ZDMA_CH_DATA_ATTR
, AWCACHE
, 8, 4)
120 FIELD(ZDMA_CH_DATA_ATTR
, AWQOS
, 4, 4)
121 FIELD(ZDMA_CH_DATA_ATTR
, AWLEN
, 0, 4)
122 REG32(ZDMA_CH_DSCR_ATTR
, 0x124)
123 FIELD(ZDMA_CH_DSCR_ATTR
, AXCOHRNT
, 8, 1)
124 FIELD(ZDMA_CH_DSCR_ATTR
, AXCACHE
, 4, 4)
125 FIELD(ZDMA_CH_DSCR_ATTR
, AXQOS
, 0, 4)
126 REG32(ZDMA_CH_SRC_DSCR_WORD0
, 0x128)
127 REG32(ZDMA_CH_SRC_DSCR_WORD1
, 0x12c)
128 FIELD(ZDMA_CH_SRC_DSCR_WORD1
, MSB
, 0, 17)
129 REG32(ZDMA_CH_SRC_DSCR_WORD2
, 0x130)
130 FIELD(ZDMA_CH_SRC_DSCR_WORD2
, SIZE
, 0, 30)
131 REG32(ZDMA_CH_SRC_DSCR_WORD3
, 0x134)
132 FIELD(ZDMA_CH_SRC_DSCR_WORD3
, CMD
, 3, 2)
133 FIELD(ZDMA_CH_SRC_DSCR_WORD3
, INTR
, 2, 1)
134 FIELD(ZDMA_CH_SRC_DSCR_WORD3
, TYPE
, 1, 1)
135 FIELD(ZDMA_CH_SRC_DSCR_WORD3
, COHRNT
, 0, 1)
136 REG32(ZDMA_CH_DST_DSCR_WORD0
, 0x138)
137 REG32(ZDMA_CH_DST_DSCR_WORD1
, 0x13c)
138 FIELD(ZDMA_CH_DST_DSCR_WORD1
, MSB
, 0, 17)
139 REG32(ZDMA_CH_DST_DSCR_WORD2
, 0x140)
140 FIELD(ZDMA_CH_DST_DSCR_WORD2
, SIZE
, 0, 30)
141 REG32(ZDMA_CH_DST_DSCR_WORD3
, 0x144)
142 FIELD(ZDMA_CH_DST_DSCR_WORD3
, INTR
, 2, 1)
143 FIELD(ZDMA_CH_DST_DSCR_WORD3
, TYPE
, 1, 1)
144 FIELD(ZDMA_CH_DST_DSCR_WORD3
, COHRNT
, 0, 1)
145 REG32(ZDMA_CH_WR_ONLY_WORD0
, 0x148)
146 REG32(ZDMA_CH_WR_ONLY_WORD1
, 0x14c)
147 REG32(ZDMA_CH_WR_ONLY_WORD2
, 0x150)
148 REG32(ZDMA_CH_WR_ONLY_WORD3
, 0x154)
149 REG32(ZDMA_CH_SRC_START_LSB
, 0x158)
150 REG32(ZDMA_CH_SRC_START_MSB
, 0x15c)
151 FIELD(ZDMA_CH_SRC_START_MSB
, ADDR
, 0, 17)
152 REG32(ZDMA_CH_DST_START_LSB
, 0x160)
153 REG32(ZDMA_CH_DST_START_MSB
, 0x164)
154 FIELD(ZDMA_CH_DST_START_MSB
, ADDR
, 0, 17)
155 REG32(ZDMA_CH_RATE_CTRL
, 0x18c)
156 FIELD(ZDMA_CH_RATE_CTRL
, CNT
, 0, 12)
157 REG32(ZDMA_CH_SRC_CUR_PYLD_LSB
, 0x168)
158 REG32(ZDMA_CH_SRC_CUR_PYLD_MSB
, 0x16c)
159 FIELD(ZDMA_CH_SRC_CUR_PYLD_MSB
, ADDR
, 0, 17)
160 REG32(ZDMA_CH_DST_CUR_PYLD_LSB
, 0x170)
161 REG32(ZDMA_CH_DST_CUR_PYLD_MSB
, 0x174)
162 FIELD(ZDMA_CH_DST_CUR_PYLD_MSB
, ADDR
, 0, 17)
163 REG32(ZDMA_CH_SRC_CUR_DSCR_LSB
, 0x178)
164 REG32(ZDMA_CH_SRC_CUR_DSCR_MSB
, 0x17c)
165 FIELD(ZDMA_CH_SRC_CUR_DSCR_MSB
, ADDR
, 0, 17)
166 REG32(ZDMA_CH_DST_CUR_DSCR_LSB
, 0x180)
167 REG32(ZDMA_CH_DST_CUR_DSCR_MSB
, 0x184)
168 FIELD(ZDMA_CH_DST_CUR_DSCR_MSB
, ADDR
, 0, 17)
169 REG32(ZDMA_CH_TOTAL_BYTE
, 0x188)
170 REG32(ZDMA_CH_RATE_CNTL
, 0x18c)
171 FIELD(ZDMA_CH_RATE_CNTL
, CNT
, 0, 12)
172 REG32(ZDMA_CH_IRQ_SRC_ACCT
, 0x190)
173 FIELD(ZDMA_CH_IRQ_SRC_ACCT
, CNT
, 0, 8)
174 REG32(ZDMA_CH_IRQ_DST_ACCT
, 0x194)
175 FIELD(ZDMA_CH_IRQ_DST_ACCT
, CNT
, 0, 8)
176 REG32(ZDMA_CH_DBG0
, 0x198)
177 FIELD(ZDMA_CH_DBG0
, CMN_BUF_FREE
, 0, 9)
178 REG32(ZDMA_CH_DBG1
, 0x19c)
179 FIELD(ZDMA_CH_DBG1
, CMN_BUF_OCC
, 0, 9)
180 REG32(ZDMA_CH_CTRL2
, 0x200)
181 FIELD(ZDMA_CH_CTRL2
, EN
, 0, 1)
209 static void zdma_ch_imr_update_irq(XlnxZDMA
*s
)
213 pending
= s
->regs
[R_ZDMA_CH_ISR
] & ~s
->regs
[R_ZDMA_CH_IMR
];
215 qemu_set_irq(s
->irq_zdma_ch_imr
, pending
);
218 static void zdma_ch_isr_postw(RegisterInfo
*reg
, uint64_t val64
)
220 XlnxZDMA
*s
= XLNX_ZDMA(reg
->opaque
);
221 zdma_ch_imr_update_irq(s
);
224 static uint64_t zdma_ch_ien_prew(RegisterInfo
*reg
, uint64_t val64
)
226 XlnxZDMA
*s
= XLNX_ZDMA(reg
->opaque
);
227 uint32_t val
= val64
;
229 s
->regs
[R_ZDMA_CH_IMR
] &= ~val
;
230 zdma_ch_imr_update_irq(s
);
234 static uint64_t zdma_ch_ids_prew(RegisterInfo
*reg
, uint64_t val64
)
236 XlnxZDMA
*s
= XLNX_ZDMA(reg
->opaque
);
237 uint32_t val
= val64
;
239 s
->regs
[R_ZDMA_CH_IMR
] |= val
;
240 zdma_ch_imr_update_irq(s
);
244 static void zdma_set_state(XlnxZDMA
*s
, XlnxZDMAState state
)
247 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_STATUS
, STATE
, state
);
249 /* Signal error if we have an error condition. */
251 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_STATUS
, STATE
, 3);
255 static void zdma_src_done(XlnxZDMA
*s
)
258 cnt
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_IRQ_SRC_ACCT
, CNT
);
260 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_IRQ_SRC_ACCT
, CNT
, cnt
);
261 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, SRC_DSCR_DONE
, true);
263 /* Did we overflow? */
264 if (cnt
!= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_IRQ_SRC_ACCT
, CNT
)) {
265 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, IRQ_SRC_ACCT_ERR
, true);
267 zdma_ch_imr_update_irq(s
);
270 static void zdma_dst_done(XlnxZDMA
*s
)
273 cnt
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_IRQ_DST_ACCT
, CNT
);
275 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_IRQ_DST_ACCT
, CNT
, cnt
);
276 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, DST_DSCR_DONE
, true);
278 /* Did we overflow? */
279 if (cnt
!= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_IRQ_DST_ACCT
, CNT
)) {
280 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, IRQ_DST_ACCT_ERR
, true);
282 zdma_ch_imr_update_irq(s
);
285 static uint64_t zdma_get_regaddr64(XlnxZDMA
*s
, unsigned int basereg
)
289 addr
= s
->regs
[basereg
+ 1];
291 addr
|= s
->regs
[basereg
];
296 static void zdma_put_regaddr64(XlnxZDMA
*s
, unsigned int basereg
, uint64_t addr
)
298 s
->regs
[basereg
] = addr
;
299 s
->regs
[basereg
+ 1] = addr
>> 32;
302 static void zdma_load_descriptor_reg(XlnxZDMA
*s
, unsigned int reg
,
303 XlnxZDMADescr
*descr
)
305 descr
->addr
= zdma_get_regaddr64(s
, reg
);
306 descr
->size
= s
->regs
[reg
+ 2];
307 descr
->attr
= s
->regs
[reg
+ 3];
310 static bool zdma_load_descriptor(XlnxZDMA
*s
, uint64_t addr
,
311 XlnxZDMADescr
*descr
)
313 /* ZDMA descriptors must be aligned to their own size. */
314 if (addr
% sizeof(XlnxZDMADescr
)) {
315 qemu_log_mask(LOG_GUEST_ERROR
,
316 "zdma: unaligned descriptor at %" PRIx64
,
318 memset(descr
, 0x0, sizeof(XlnxZDMADescr
));
323 descr
->addr
= address_space_ldq_le(&s
->dma_as
, addr
, s
->attr
, NULL
);
324 descr
->size
= address_space_ldl_le(&s
->dma_as
, addr
+ 8, s
->attr
, NULL
);
325 descr
->attr
= address_space_ldl_le(&s
->dma_as
, addr
+ 12, s
->attr
, NULL
);
329 static void zdma_load_src_descriptor(XlnxZDMA
*s
)
332 unsigned int ptype
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, POINT_TYPE
);
334 if (ptype
== PT_REG
) {
335 zdma_load_descriptor_reg(s
, R_ZDMA_CH_SRC_DSCR_WORD0
, &s
->dsc_src
);
339 src_addr
= zdma_get_regaddr64(s
, R_ZDMA_CH_SRC_CUR_DSCR_LSB
);
341 if (!zdma_load_descriptor(s
, src_addr
, &s
->dsc_src
)) {
342 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, AXI_RD_SRC_DSCR
, true);
346 static void zdma_update_descr_addr(XlnxZDMA
*s
, bool type
,
347 unsigned int basereg
)
351 if (type
== DTYPE_LINEAR
) {
352 addr
= zdma_get_regaddr64(s
, basereg
);
353 next
= addr
+ sizeof(s
->dsc_dst
);
355 addr
= zdma_get_regaddr64(s
, basereg
);
356 addr
+= sizeof(s
->dsc_dst
);
357 next
= address_space_ldq_le(&s
->dma_as
, addr
, s
->attr
, NULL
);
360 zdma_put_regaddr64(s
, basereg
, next
);
363 static void zdma_load_dst_descriptor(XlnxZDMA
*s
)
366 unsigned int ptype
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, POINT_TYPE
);
369 if (ptype
== PT_REG
) {
370 zdma_load_descriptor_reg(s
, R_ZDMA_CH_DST_DSCR_WORD0
, &s
->dsc_dst
);
374 dst_addr
= zdma_get_regaddr64(s
, R_ZDMA_CH_DST_CUR_DSCR_LSB
);
376 if (!zdma_load_descriptor(s
, dst_addr
, &s
->dsc_dst
)) {
377 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, AXI_RD_DST_DSCR
, true);
380 /* Advance the descriptor pointer. */
381 dst_type
= FIELD_EX32(s
->dsc_dst
.words
[3], ZDMA_CH_DST_DSCR_WORD3
, TYPE
);
382 zdma_update_descr_addr(s
, dst_type
, R_ZDMA_CH_DST_CUR_DSCR_LSB
);
385 static void zdma_write_dst(XlnxZDMA
*s
, uint8_t *buf
, uint32_t len
)
387 uint32_t dst_size
, dlen
;
389 unsigned int ptype
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, POINT_TYPE
);
390 unsigned int rw_mode
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, MODE
);
391 unsigned int burst_type
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_DATA_ATTR
,
394 /* FIXED burst types are only supported in simple dma mode. */
395 if (ptype
!= PT_REG
) {
396 burst_type
= AXI_BURST_INCR
;
400 dst_size
= FIELD_EX32(s
->dsc_dst
.words
[2], ZDMA_CH_DST_DSCR_WORD2
,
402 if (dst_size
== 0 && ptype
== PT_MEM
) {
403 zdma_load_dst_descriptor(s
);
404 dst_size
= FIELD_EX32(s
->dsc_dst
.words
[2], ZDMA_CH_DST_DSCR_WORD2
,
408 /* Match what hardware does by ignoring the dst_size and only using
409 * the src size for Simple register mode. */
410 if (ptype
== PT_REG
&& rw_mode
!= RW_MODE_WO
) {
414 dst_intr
= FIELD_EX32(s
->dsc_dst
.words
[3], ZDMA_CH_DST_DSCR_WORD3
,
417 dlen
= len
> dst_size
? dst_size
: len
;
418 if (burst_type
== AXI_BURST_FIXED
) {
419 if (dlen
> (s
->cfg
.bus_width
/ 8)) {
420 dlen
= s
->cfg
.bus_width
/ 8;
424 address_space_write(&s
->dma_as
, s
->dsc_dst
.addr
, s
->attr
, buf
, dlen
);
425 if (burst_type
== AXI_BURST_INCR
) {
426 s
->dsc_dst
.addr
+= dlen
;
432 if (dst_size
== 0 && dst_intr
) {
436 /* Write back to buffered descriptor. */
437 s
->dsc_dst
.words
[2] = FIELD_DP32(s
->dsc_dst
.words
[2],
438 ZDMA_CH_DST_DSCR_WORD2
,
444 static void zdma_process_descr(XlnxZDMA
*s
)
447 uint32_t src_size
, len
;
448 unsigned int src_cmd
;
449 bool src_intr
, src_type
;
450 unsigned int ptype
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, POINT_TYPE
);
451 unsigned int rw_mode
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, MODE
);
452 unsigned int burst_type
= ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_DATA_ATTR
,
455 src_addr
= s
->dsc_src
.addr
;
456 src_size
= FIELD_EX32(s
->dsc_src
.words
[2], ZDMA_CH_SRC_DSCR_WORD2
, SIZE
);
457 src_cmd
= FIELD_EX32(s
->dsc_src
.words
[3], ZDMA_CH_SRC_DSCR_WORD3
, CMD
);
458 src_type
= FIELD_EX32(s
->dsc_src
.words
[3], ZDMA_CH_SRC_DSCR_WORD3
, TYPE
);
459 src_intr
= FIELD_EX32(s
->dsc_src
.words
[3], ZDMA_CH_SRC_DSCR_WORD3
, INTR
);
461 /* FIXED burst types and non-rw modes are only supported in
464 if (ptype
!= PT_REG
) {
465 if (rw_mode
!= RW_MODE_RW
) {
466 qemu_log_mask(LOG_GUEST_ERROR
,
467 "zDMA: rw-mode=%d but not simple DMA mode.\n",
470 if (burst_type
!= AXI_BURST_INCR
) {
471 qemu_log_mask(LOG_GUEST_ERROR
,
472 "zDMA: burst_type=%d but not simple DMA mode.\n",
475 burst_type
= AXI_BURST_INCR
;
476 rw_mode
= RW_MODE_RW
;
479 if (rw_mode
== RW_MODE_WO
) {
480 /* In Simple DMA Write-Only, we need to push DST size bytes
481 * regardless of what SRC size is set to. */
482 src_size
= FIELD_EX32(s
->dsc_dst
.words
[2], ZDMA_CH_DST_DSCR_WORD2
,
484 memcpy(s
->buf
, &s
->regs
[R_ZDMA_CH_WR_ONLY_WORD0
], s
->cfg
.bus_width
/ 8);
488 len
= src_size
> ARRAY_SIZE(s
->buf
) ? ARRAY_SIZE(s
->buf
) : src_size
;
489 if (burst_type
== AXI_BURST_FIXED
) {
490 if (len
> (s
->cfg
.bus_width
/ 8)) {
491 len
= s
->cfg
.bus_width
/ 8;
495 if (rw_mode
== RW_MODE_WO
) {
496 if (len
> s
->cfg
.bus_width
/ 8) {
497 len
= s
->cfg
.bus_width
/ 8;
500 address_space_read(&s
->dma_as
, src_addr
, s
->attr
, s
->buf
, len
);
501 if (burst_type
== AXI_BURST_INCR
) {
506 if (rw_mode
!= RW_MODE_RO
) {
507 zdma_write_dst(s
, s
->buf
, len
);
510 s
->regs
[R_ZDMA_CH_TOTAL_BYTE
] += len
;
514 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, DMA_DONE
, true);
520 if (ptype
== PT_REG
|| src_cmd
== CMD_STOP
) {
521 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_CTRL2
, EN
, 0);
522 zdma_set_state(s
, DISABLED
);
525 if (src_cmd
== CMD_HALT
) {
526 zdma_set_state(s
, PAUSED
);
527 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, DMA_PAUSE
, 1);
528 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, DMA_DONE
, false);
529 zdma_ch_imr_update_irq(s
);
533 zdma_update_descr_addr(s
, src_type
, R_ZDMA_CH_SRC_CUR_DSCR_LSB
);
536 static void zdma_run(XlnxZDMA
*s
)
538 while (s
->state
== ENABLED
&& !s
->error
) {
539 zdma_load_src_descriptor(s
);
542 zdma_set_state(s
, DISABLED
);
544 zdma_process_descr(s
);
548 zdma_ch_imr_update_irq(s
);
551 static void zdma_update_descr_addr_from_start(XlnxZDMA
*s
)
553 uint64_t src_addr
, dst_addr
;
555 src_addr
= zdma_get_regaddr64(s
, R_ZDMA_CH_SRC_START_LSB
);
556 zdma_put_regaddr64(s
, R_ZDMA_CH_SRC_CUR_DSCR_LSB
, src_addr
);
557 dst_addr
= zdma_get_regaddr64(s
, R_ZDMA_CH_DST_START_LSB
);
558 zdma_put_regaddr64(s
, R_ZDMA_CH_DST_CUR_DSCR_LSB
, dst_addr
);
559 zdma_load_dst_descriptor(s
);
562 static void zdma_ch_ctrlx_postw(RegisterInfo
*reg
, uint64_t val64
)
564 XlnxZDMA
*s
= XLNX_ZDMA(reg
->opaque
);
566 if (ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL2
, EN
)) {
569 if (s
->state
== PAUSED
&&
570 ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, CONT
)) {
571 if (ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, CONT_ADDR
) == 1) {
572 zdma_update_descr_addr_from_start(s
);
574 bool src_type
= FIELD_EX32(s
->dsc_src
.words
[3],
575 ZDMA_CH_SRC_DSCR_WORD3
, TYPE
);
576 zdma_update_descr_addr(s
, src_type
,
577 R_ZDMA_CH_SRC_CUR_DSCR_LSB
);
579 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_CTRL0
, CONT
, false);
580 zdma_set_state(s
, ENABLED
);
581 } else if (s
->state
== DISABLED
) {
582 zdma_update_descr_addr_from_start(s
);
583 zdma_set_state(s
, ENABLED
);
586 /* Leave Paused state? */
587 if (s
->state
== PAUSED
&&
588 ARRAY_FIELD_EX32(s
->regs
, ZDMA_CH_CTRL0
, CONT
)) {
589 zdma_set_state(s
, DISABLED
);
596 static RegisterAccessInfo zdma_regs_info
[] = {
597 { .name
= "ZDMA_ERR_CTRL", .addr
= A_ZDMA_ERR_CTRL
,
599 },{ .name
= "ZDMA_CH_ISR", .addr
= A_ZDMA_CH_ISR
,
602 .post_write
= zdma_ch_isr_postw
,
603 },{ .name
= "ZDMA_CH_IMR", .addr
= A_ZDMA_CH_IMR
,
607 },{ .name
= "ZDMA_CH_IEN", .addr
= A_ZDMA_CH_IEN
,
609 .pre_write
= zdma_ch_ien_prew
,
610 },{ .name
= "ZDMA_CH_IDS", .addr
= A_ZDMA_CH_IDS
,
612 .pre_write
= zdma_ch_ids_prew
,
613 },{ .name
= "ZDMA_CH_CTRL0", .addr
= A_ZDMA_CH_CTRL0
,
616 .post_write
= zdma_ch_ctrlx_postw
,
617 },{ .name
= "ZDMA_CH_CTRL1", .addr
= A_ZDMA_CH_CTRL1
,
620 },{ .name
= "ZDMA_CH_FCI", .addr
= A_ZDMA_CH_FCI
,
622 },{ .name
= "ZDMA_CH_STATUS", .addr
= A_ZDMA_CH_STATUS
,
625 },{ .name
= "ZDMA_CH_DATA_ATTR", .addr
= A_ZDMA_CH_DATA_ATTR
,
628 },{ .name
= "ZDMA_CH_DSCR_ATTR", .addr
= A_ZDMA_CH_DSCR_ATTR
,
630 },{ .name
= "ZDMA_CH_SRC_DSCR_WORD0", .addr
= A_ZDMA_CH_SRC_DSCR_WORD0
,
631 },{ .name
= "ZDMA_CH_SRC_DSCR_WORD1", .addr
= A_ZDMA_CH_SRC_DSCR_WORD1
,
633 },{ .name
= "ZDMA_CH_SRC_DSCR_WORD2", .addr
= A_ZDMA_CH_SRC_DSCR_WORD2
,
635 },{ .name
= "ZDMA_CH_SRC_DSCR_WORD3", .addr
= A_ZDMA_CH_SRC_DSCR_WORD3
,
637 },{ .name
= "ZDMA_CH_DST_DSCR_WORD0", .addr
= A_ZDMA_CH_DST_DSCR_WORD0
,
638 },{ .name
= "ZDMA_CH_DST_DSCR_WORD1", .addr
= A_ZDMA_CH_DST_DSCR_WORD1
,
640 },{ .name
= "ZDMA_CH_DST_DSCR_WORD2", .addr
= A_ZDMA_CH_DST_DSCR_WORD2
,
642 },{ .name
= "ZDMA_CH_DST_DSCR_WORD3", .addr
= A_ZDMA_CH_DST_DSCR_WORD3
,
644 },{ .name
= "ZDMA_CH_WR_ONLY_WORD0", .addr
= A_ZDMA_CH_WR_ONLY_WORD0
,
645 },{ .name
= "ZDMA_CH_WR_ONLY_WORD1", .addr
= A_ZDMA_CH_WR_ONLY_WORD1
,
646 },{ .name
= "ZDMA_CH_WR_ONLY_WORD2", .addr
= A_ZDMA_CH_WR_ONLY_WORD2
,
647 },{ .name
= "ZDMA_CH_WR_ONLY_WORD3", .addr
= A_ZDMA_CH_WR_ONLY_WORD3
,
648 },{ .name
= "ZDMA_CH_SRC_START_LSB", .addr
= A_ZDMA_CH_SRC_START_LSB
,
649 },{ .name
= "ZDMA_CH_SRC_START_MSB", .addr
= A_ZDMA_CH_SRC_START_MSB
,
651 },{ .name
= "ZDMA_CH_DST_START_LSB", .addr
= A_ZDMA_CH_DST_START_LSB
,
652 },{ .name
= "ZDMA_CH_DST_START_MSB", .addr
= A_ZDMA_CH_DST_START_MSB
,
654 },{ .name
= "ZDMA_CH_SRC_CUR_PYLD_LSB", .addr
= A_ZDMA_CH_SRC_CUR_PYLD_LSB
,
656 },{ .name
= "ZDMA_CH_SRC_CUR_PYLD_MSB", .addr
= A_ZDMA_CH_SRC_CUR_PYLD_MSB
,
659 },{ .name
= "ZDMA_CH_DST_CUR_PYLD_LSB", .addr
= A_ZDMA_CH_DST_CUR_PYLD_LSB
,
661 },{ .name
= "ZDMA_CH_DST_CUR_PYLD_MSB", .addr
= A_ZDMA_CH_DST_CUR_PYLD_MSB
,
664 },{ .name
= "ZDMA_CH_SRC_CUR_DSCR_LSB", .addr
= A_ZDMA_CH_SRC_CUR_DSCR_LSB
,
666 },{ .name
= "ZDMA_CH_SRC_CUR_DSCR_MSB", .addr
= A_ZDMA_CH_SRC_CUR_DSCR_MSB
,
669 },{ .name
= "ZDMA_CH_DST_CUR_DSCR_LSB", .addr
= A_ZDMA_CH_DST_CUR_DSCR_LSB
,
671 },{ .name
= "ZDMA_CH_DST_CUR_DSCR_MSB", .addr
= A_ZDMA_CH_DST_CUR_DSCR_MSB
,
674 },{ .name
= "ZDMA_CH_TOTAL_BYTE", .addr
= A_ZDMA_CH_TOTAL_BYTE
,
676 },{ .name
= "ZDMA_CH_RATE_CNTL", .addr
= A_ZDMA_CH_RATE_CNTL
,
678 },{ .name
= "ZDMA_CH_IRQ_SRC_ACCT", .addr
= A_ZDMA_CH_IRQ_SRC_ACCT
,
682 },{ .name
= "ZDMA_CH_IRQ_DST_ACCT", .addr
= A_ZDMA_CH_IRQ_DST_ACCT
,
686 },{ .name
= "ZDMA_CH_DBG0", .addr
= A_ZDMA_CH_DBG0
,
691 * There's SW out there that will check the debug regs for free space.
692 * Claim that we always have 0x100 free.
695 },{ .name
= "ZDMA_CH_DBG1", .addr
= A_ZDMA_CH_DBG1
,
698 },{ .name
= "ZDMA_CH_CTRL2", .addr
= A_ZDMA_CH_CTRL2
,
700 .post_write
= zdma_ch_ctrlx_postw
,
704 static void zdma_reset(DeviceState
*dev
)
706 XlnxZDMA
*s
= XLNX_ZDMA(dev
);
709 for (i
= 0; i
< ARRAY_SIZE(s
->regs_info
); ++i
) {
710 register_reset(&s
->regs_info
[i
]);
713 zdma_ch_imr_update_irq(s
);
716 static uint64_t zdma_read(void *opaque
, hwaddr addr
, unsigned size
)
718 XlnxZDMA
*s
= XLNX_ZDMA(opaque
);
719 RegisterInfo
*r
= &s
->regs_info
[addr
/ 4];
722 char *path
= object_get_canonical_path(OBJECT(s
));
723 qemu_log("%s: Decode error: read from %" HWADDR_PRIx
"\n",
727 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, INV_APB
, true);
728 zdma_ch_imr_update_irq(s
);
731 return register_read(r
, ~0, NULL
, false);
734 static void zdma_write(void *opaque
, hwaddr addr
, uint64_t value
,
737 XlnxZDMA
*s
= XLNX_ZDMA(opaque
);
738 RegisterInfo
*r
= &s
->regs_info
[addr
/ 4];
741 char *path
= object_get_canonical_path(OBJECT(s
));
742 qemu_log("%s: Decode error: write to %" HWADDR_PRIx
"=%" PRIx64
"\n",
746 ARRAY_FIELD_DP32(s
->regs
, ZDMA_CH_ISR
, INV_APB
, true);
747 zdma_ch_imr_update_irq(s
);
750 register_write(r
, value
, ~0, NULL
, false);
753 static const MemoryRegionOps zdma_ops
= {
756 .endianness
= DEVICE_LITTLE_ENDIAN
,
758 .min_access_size
= 4,
759 .max_access_size
= 4,
763 static void zdma_realize(DeviceState
*dev
, Error
**errp
)
765 XlnxZDMA
*s
= XLNX_ZDMA(dev
);
769 error_setg(errp
, TYPE_XLNX_ZDMA
" 'dma' link not set");
772 address_space_init(&s
->dma_as
, s
->dma_mr
, "zdma-dma");
774 for (i
= 0; i
< ARRAY_SIZE(zdma_regs_info
); ++i
) {
775 RegisterInfo
*r
= &s
->regs_info
[zdma_regs_info
[i
].addr
/ 4];
777 *r
= (RegisterInfo
) {
778 .data
= (uint8_t *)&s
->regs
[
779 zdma_regs_info
[i
].addr
/ 4],
780 .data_size
= sizeof(uint32_t),
781 .access
= &zdma_regs_info
[i
],
786 s
->attr
= MEMTXATTRS_UNSPECIFIED
;
789 static void zdma_init(Object
*obj
)
791 XlnxZDMA
*s
= XLNX_ZDMA(obj
);
792 SysBusDevice
*sbd
= SYS_BUS_DEVICE(obj
);
794 memory_region_init_io(&s
->iomem
, obj
, &zdma_ops
, s
,
795 TYPE_XLNX_ZDMA
, ZDMA_R_MAX
* 4);
796 sysbus_init_mmio(sbd
, &s
->iomem
);
797 sysbus_init_irq(sbd
, &s
->irq_zdma_ch_imr
);
799 object_property_add_link(obj
, "dma", TYPE_MEMORY_REGION
,
800 (Object
**)&s
->dma_mr
,
801 qdev_prop_allow_set_link_before_realize
,
802 OBJ_PROP_LINK_STRONG
);
805 static const VMStateDescription vmstate_zdma
= {
806 .name
= TYPE_XLNX_ZDMA
,
808 .minimum_version_id
= 1,
809 .minimum_version_id_old
= 1,
810 .fields
= (VMStateField
[]) {
811 VMSTATE_UINT32_ARRAY(regs
, XlnxZDMA
, ZDMA_R_MAX
),
812 VMSTATE_UINT32(state
, XlnxZDMA
),
813 VMSTATE_UINT32_ARRAY(dsc_src
.words
, XlnxZDMA
, 4),
814 VMSTATE_UINT32_ARRAY(dsc_dst
.words
, XlnxZDMA
, 4),
815 VMSTATE_END_OF_LIST(),
819 static Property zdma_props
[] = {
820 DEFINE_PROP_UINT32("bus-width", XlnxZDMA
, cfg
.bus_width
, 64),
821 DEFINE_PROP_END_OF_LIST(),
824 static void zdma_class_init(ObjectClass
*klass
, void *data
)
826 DeviceClass
*dc
= DEVICE_CLASS(klass
);
828 dc
->reset
= zdma_reset
;
829 dc
->realize
= zdma_realize
;
830 device_class_set_props(dc
, zdma_props
);
831 dc
->vmsd
= &vmstate_zdma
;
834 static const TypeInfo zdma_info
= {
835 .name
= TYPE_XLNX_ZDMA
,
836 .parent
= TYPE_SYS_BUS_DEVICE
,
837 .instance_size
= sizeof(XlnxZDMA
),
838 .class_init
= zdma_class_init
,
839 .instance_init
= zdma_init
,
842 static void zdma_register_types(void)
844 type_register_static(&zdma_info
);
847 type_init(zdma_register_types
)