4 #include "hw/registerfields.h"
6 typedef struct QEMU_PACKED NvmeBar
{
37 NVME_REG_CAP
= offsetof(NvmeBar
, cap
),
38 NVME_REG_VS
= offsetof(NvmeBar
, vs
),
39 NVME_REG_INTMS
= offsetof(NvmeBar
, intms
),
40 NVME_REG_INTMC
= offsetof(NvmeBar
, intmc
),
41 NVME_REG_CC
= offsetof(NvmeBar
, cc
),
42 NVME_REG_CSTS
= offsetof(NvmeBar
, csts
),
43 NVME_REG_NSSR
= offsetof(NvmeBar
, nssr
),
44 NVME_REG_AQA
= offsetof(NvmeBar
, aqa
),
45 NVME_REG_ASQ
= offsetof(NvmeBar
, asq
),
46 NVME_REG_ACQ
= offsetof(NvmeBar
, acq
),
47 NVME_REG_CMBLOC
= offsetof(NvmeBar
, cmbloc
),
48 NVME_REG_CMBSZ
= offsetof(NvmeBar
, cmbsz
),
49 NVME_REG_BPINFO
= offsetof(NvmeBar
, bpinfo
),
50 NVME_REG_BPRSEL
= offsetof(NvmeBar
, bprsel
),
51 NVME_REG_BPMBL
= offsetof(NvmeBar
, bpmbl
),
52 NVME_REG_CMBMSC
= offsetof(NvmeBar
, cmbmsc
),
53 NVME_REG_CMBSTS
= offsetof(NvmeBar
, cmbsts
),
54 NVME_REG_PMRCAP
= offsetof(NvmeBar
, pmrcap
),
55 NVME_REG_PMRCTL
= offsetof(NvmeBar
, pmrctl
),
56 NVME_REG_PMRSTS
= offsetof(NvmeBar
, pmrsts
),
57 NVME_REG_PMREBS
= offsetof(NvmeBar
, pmrebs
),
58 NVME_REG_PMRSWTP
= offsetof(NvmeBar
, pmrswtp
),
59 NVME_REG_PMRMSCL
= offsetof(NvmeBar
, pmrmscl
),
60 NVME_REG_PMRMSCU
= offsetof(NvmeBar
, pmrmscu
),
63 typedef struct QEMU_PACKED NvmeEndGrpLog
{
64 uint8_t critical_warning
;
67 uint8_t avail_spare_thres
;
70 uint64_t end_estimate
[2];
71 uint64_t data_units_read
[2];
72 uint64_t data_units_written
[2];
73 uint64_t media_units_written
[2];
74 uint64_t host_read_commands
[2];
75 uint64_t host_write_commands
[2];
76 uint64_t media_integrity_errors
[2];
77 uint64_t no_err_info_log_entries
[2];
89 CAP_MPSMIN_SHIFT
= 48,
90 CAP_MPSMAX_SHIFT
= 52,
96 CAP_MQES_MASK
= 0xffff,
100 CAP_DSTRD_MASK
= 0xf,
101 CAP_NSSRS_MASK
= 0x1,
103 CAP_MPSMIN_MASK
= 0xf,
104 CAP_MPSMAX_MASK
= 0xf,
109 #define NVME_CAP_MQES(cap) (((cap) >> CAP_MQES_SHIFT) & CAP_MQES_MASK)
110 #define NVME_CAP_CQR(cap) (((cap) >> CAP_CQR_SHIFT) & CAP_CQR_MASK)
111 #define NVME_CAP_AMS(cap) (((cap) >> CAP_AMS_SHIFT) & CAP_AMS_MASK)
112 #define NVME_CAP_TO(cap) (((cap) >> CAP_TO_SHIFT) & CAP_TO_MASK)
113 #define NVME_CAP_DSTRD(cap) (((cap) >> CAP_DSTRD_SHIFT) & CAP_DSTRD_MASK)
114 #define NVME_CAP_NSSRS(cap) (((cap) >> CAP_NSSRS_SHIFT) & CAP_NSSRS_MASK)
115 #define NVME_CAP_CSS(cap) (((cap) >> CAP_CSS_SHIFT) & CAP_CSS_MASK)
116 #define NVME_CAP_MPSMIN(cap)(((cap) >> CAP_MPSMIN_SHIFT) & CAP_MPSMIN_MASK)
117 #define NVME_CAP_MPSMAX(cap)(((cap) >> CAP_MPSMAX_SHIFT) & CAP_MPSMAX_MASK)
118 #define NVME_CAP_PMRS(cap) (((cap) >> CAP_PMRS_SHIFT) & CAP_PMRS_MASK)
119 #define NVME_CAP_CMBS(cap) (((cap) >> CAP_CMBS_SHIFT) & CAP_CMBS_MASK)
121 #define NVME_CAP_SET_MQES(cap, val) \
122 ((cap) |= (uint64_t)((val) & CAP_MQES_MASK) << CAP_MQES_SHIFT)
123 #define NVME_CAP_SET_CQR(cap, val) \
124 ((cap) |= (uint64_t)((val) & CAP_CQR_MASK) << CAP_CQR_SHIFT)
125 #define NVME_CAP_SET_AMS(cap, val) \
126 ((cap) |= (uint64_t)((val) & CAP_AMS_MASK) << CAP_AMS_SHIFT)
127 #define NVME_CAP_SET_TO(cap, val) \
128 ((cap) |= (uint64_t)((val) & CAP_TO_MASK) << CAP_TO_SHIFT)
129 #define NVME_CAP_SET_DSTRD(cap, val) \
130 ((cap) |= (uint64_t)((val) & CAP_DSTRD_MASK) << CAP_DSTRD_SHIFT)
131 #define NVME_CAP_SET_NSSRS(cap, val) \
132 ((cap) |= (uint64_t)((val) & CAP_NSSRS_MASK) << CAP_NSSRS_SHIFT)
133 #define NVME_CAP_SET_CSS(cap, val) \
134 ((cap) |= (uint64_t)((val) & CAP_CSS_MASK) << CAP_CSS_SHIFT)
135 #define NVME_CAP_SET_MPSMIN(cap, val) \
136 ((cap) |= (uint64_t)((val) & CAP_MPSMIN_MASK) << CAP_MPSMIN_SHIFT)
137 #define NVME_CAP_SET_MPSMAX(cap, val) \
138 ((cap) |= (uint64_t)((val) & CAP_MPSMAX_MASK) << CAP_MPSMAX_SHIFT)
139 #define NVME_CAP_SET_PMRS(cap, val) \
140 ((cap) |= (uint64_t)((val) & CAP_PMRS_MASK) << CAP_PMRS_SHIFT)
141 #define NVME_CAP_SET_CMBS(cap, val) \
142 ((cap) |= (uint64_t)((val) & CAP_CMBS_MASK) << CAP_CMBS_SHIFT)
145 NVME_CAP_CSS_NVM
= 1 << 0,
146 NVME_CAP_CSS_CSI_SUPP
= 1 << 6,
147 NVME_CAP_CSS_ADMIN_ONLY
= 1 << 7,
156 CC_IOSQES_SHIFT
= 16,
157 CC_IOCQES_SHIFT
= 20,
166 CC_IOSQES_MASK
= 0xf,
167 CC_IOCQES_MASK
= 0xf,
170 #define NVME_CC_EN(cc) ((cc >> CC_EN_SHIFT) & CC_EN_MASK)
171 #define NVME_CC_CSS(cc) ((cc >> CC_CSS_SHIFT) & CC_CSS_MASK)
172 #define NVME_CC_MPS(cc) ((cc >> CC_MPS_SHIFT) & CC_MPS_MASK)
173 #define NVME_CC_AMS(cc) ((cc >> CC_AMS_SHIFT) & CC_AMS_MASK)
174 #define NVME_CC_SHN(cc) ((cc >> CC_SHN_SHIFT) & CC_SHN_MASK)
175 #define NVME_CC_IOSQES(cc) ((cc >> CC_IOSQES_SHIFT) & CC_IOSQES_MASK)
176 #define NVME_CC_IOCQES(cc) ((cc >> CC_IOCQES_SHIFT) & CC_IOCQES_MASK)
179 NVME_CC_CSS_NVM
= 0x0,
180 NVME_CC_CSS_CSI
= 0x6,
181 NVME_CC_CSS_ADMIN_ONLY
= 0x7,
184 #define NVME_SET_CC_EN(cc, val) \
185 (cc |= (uint32_t)((val) & CC_EN_MASK) << CC_EN_SHIFT)
186 #define NVME_SET_CC_CSS(cc, val) \
187 (cc |= (uint32_t)((val) & CC_CSS_MASK) << CC_CSS_SHIFT)
188 #define NVME_SET_CC_MPS(cc, val) \
189 (cc |= (uint32_t)((val) & CC_MPS_MASK) << CC_MPS_SHIFT)
190 #define NVME_SET_CC_AMS(cc, val) \
191 (cc |= (uint32_t)((val) & CC_AMS_MASK) << CC_AMS_SHIFT)
192 #define NVME_SET_CC_SHN(cc, val) \
193 (cc |= (uint32_t)((val) & CC_SHN_MASK) << CC_SHN_SHIFT)
194 #define NVME_SET_CC_IOSQES(cc, val) \
195 (cc |= (uint32_t)((val) & CC_IOSQES_MASK) << CC_IOSQES_SHIFT)
196 #define NVME_SET_CC_IOCQES(cc, val) \
197 (cc |= (uint32_t)((val) & CC_IOCQES_MASK) << CC_IOCQES_SHIFT)
203 CSTS_NSSRO_SHIFT
= 4,
209 CSTS_SHST_MASK
= 0x3,
210 CSTS_NSSRO_MASK
= 0x1,
214 NVME_CSTS_READY
= 1 << CSTS_RDY_SHIFT
,
215 NVME_CSTS_FAILED
= 1 << CSTS_CFS_SHIFT
,
216 NVME_CSTS_SHST_NORMAL
= 0 << CSTS_SHST_SHIFT
,
217 NVME_CSTS_SHST_PROGRESS
= 1 << CSTS_SHST_SHIFT
,
218 NVME_CSTS_SHST_COMPLETE
= 2 << CSTS_SHST_SHIFT
,
219 NVME_CSTS_NSSRO
= 1 << CSTS_NSSRO_SHIFT
,
222 #define NVME_CSTS_RDY(csts) ((csts >> CSTS_RDY_SHIFT) & CSTS_RDY_MASK)
223 #define NVME_CSTS_CFS(csts) ((csts >> CSTS_CFS_SHIFT) & CSTS_CFS_MASK)
224 #define NVME_CSTS_SHST(csts) ((csts >> CSTS_SHST_SHIFT) & CSTS_SHST_MASK)
225 #define NVME_CSTS_NSSRO(csts) ((csts >> CSTS_NSSRO_SHIFT) & CSTS_NSSRO_MASK)
233 AQA_ASQS_MASK
= 0xfff,
234 AQA_ACQS_MASK
= 0xfff,
237 #define NVME_AQA_ASQS(aqa) ((aqa >> AQA_ASQS_SHIFT) & AQA_ASQS_MASK)
238 #define NVME_AQA_ACQS(aqa) ((aqa >> AQA_ACQS_SHIFT) & AQA_ACQS_MASK)
240 enum NvmeCmblocShift
{
241 CMBLOC_BIR_SHIFT
= 0,
242 CMBLOC_CQMMS_SHIFT
= 3,
243 CMBLOC_CQPDS_SHIFT
= 4,
244 CMBLOC_CDPMLS_SHIFT
= 5,
245 CMBLOC_CDPCILS_SHIFT
= 6,
246 CMBLOC_CDMMMS_SHIFT
= 7,
247 CMBLOC_CQDA_SHIFT
= 8,
248 CMBLOC_OFST_SHIFT
= 12,
251 enum NvmeCmblocMask
{
252 CMBLOC_BIR_MASK
= 0x7,
253 CMBLOC_CQMMS_MASK
= 0x1,
254 CMBLOC_CQPDS_MASK
= 0x1,
255 CMBLOC_CDPMLS_MASK
= 0x1,
256 CMBLOC_CDPCILS_MASK
= 0x1,
257 CMBLOC_CDMMMS_MASK
= 0x1,
258 CMBLOC_CQDA_MASK
= 0x1,
259 CMBLOC_OFST_MASK
= 0xfffff,
262 #define NVME_CMBLOC_BIR(cmbloc) \
263 ((cmbloc >> CMBLOC_BIR_SHIFT) & CMBLOC_BIR_MASK)
264 #define NVME_CMBLOC_CQMMS(cmbloc) \
265 ((cmbloc >> CMBLOC_CQMMS_SHIFT) & CMBLOC_CQMMS_MASK)
266 #define NVME_CMBLOC_CQPDS(cmbloc) \
267 ((cmbloc >> CMBLOC_CQPDS_SHIFT) & CMBLOC_CQPDS_MASK)
268 #define NVME_CMBLOC_CDPMLS(cmbloc) \
269 ((cmbloc >> CMBLOC_CDPMLS_SHIFT) & CMBLOC_CDPMLS_MASK)
270 #define NVME_CMBLOC_CDPCILS(cmbloc) \
271 ((cmbloc >> CMBLOC_CDPCILS_SHIFT) & CMBLOC_CDPCILS_MASK)
272 #define NVME_CMBLOC_CDMMMS(cmbloc) \
273 ((cmbloc >> CMBLOC_CDMMMS_SHIFT) & CMBLOC_CDMMMS_MASK)
274 #define NVME_CMBLOC_CQDA(cmbloc) \
275 ((cmbloc >> CMBLOC_CQDA_SHIFT) & CMBLOC_CQDA_MASK)
276 #define NVME_CMBLOC_OFST(cmbloc) \
277 ((cmbloc >> CMBLOC_OFST_SHIFT) & CMBLOC_OFST_MASK)
279 #define NVME_CMBLOC_SET_BIR(cmbloc, val) \
280 (cmbloc |= (uint64_t)(val & CMBLOC_BIR_MASK) << CMBLOC_BIR_SHIFT)
281 #define NVME_CMBLOC_SET_CQMMS(cmbloc, val) \
282 (cmbloc |= (uint64_t)(val & CMBLOC_CQMMS_MASK) << CMBLOC_CQMMS_SHIFT)
283 #define NVME_CMBLOC_SET_CQPDS(cmbloc, val) \
284 (cmbloc |= (uint64_t)(val & CMBLOC_CQPDS_MASK) << CMBLOC_CQPDS_SHIFT)
285 #define NVME_CMBLOC_SET_CDPMLS(cmbloc, val) \
286 (cmbloc |= (uint64_t)(val & CMBLOC_CDPMLS_MASK) << CMBLOC_CDPMLS_SHIFT)
287 #define NVME_CMBLOC_SET_CDPCILS(cmbloc, val) \
288 (cmbloc |= (uint64_t)(val & CMBLOC_CDPCILS_MASK) << CMBLOC_CDPCILS_SHIFT)
289 #define NVME_CMBLOC_SET_CDMMMS(cmbloc, val) \
290 (cmbloc |= (uint64_t)(val & CMBLOC_CDMMMS_MASK) << CMBLOC_CDMMMS_SHIFT)
291 #define NVME_CMBLOC_SET_CQDA(cmbloc, val) \
292 (cmbloc |= (uint64_t)(val & CMBLOC_CQDA_MASK) << CMBLOC_CQDA_SHIFT)
293 #define NVME_CMBLOC_SET_OFST(cmbloc, val) \
294 (cmbloc |= (uint64_t)(val & CMBLOC_OFST_MASK) << CMBLOC_OFST_SHIFT)
296 #define NVME_CMBMSMC_SET_CRE (cmbmsc, val) \
297 (cmbmsc |= (uint64_t)(val & CMBLOC_OFST_MASK) << CMBMSC_CRE_SHIFT)
299 enum NvmeCmbszShift
{
302 CMBSZ_LISTS_SHIFT
= 2,
310 CMBSZ_SQS_MASK
= 0x1,
311 CMBSZ_CQS_MASK
= 0x1,
312 CMBSZ_LISTS_MASK
= 0x1,
313 CMBSZ_RDS_MASK
= 0x1,
314 CMBSZ_WDS_MASK
= 0x1,
315 CMBSZ_SZU_MASK
= 0xf,
316 CMBSZ_SZ_MASK
= 0xfffff,
319 #define NVME_CMBSZ_SQS(cmbsz) ((cmbsz >> CMBSZ_SQS_SHIFT) & CMBSZ_SQS_MASK)
320 #define NVME_CMBSZ_CQS(cmbsz) ((cmbsz >> CMBSZ_CQS_SHIFT) & CMBSZ_CQS_MASK)
321 #define NVME_CMBSZ_LISTS(cmbsz)((cmbsz >> CMBSZ_LISTS_SHIFT) & CMBSZ_LISTS_MASK)
322 #define NVME_CMBSZ_RDS(cmbsz) ((cmbsz >> CMBSZ_RDS_SHIFT) & CMBSZ_RDS_MASK)
323 #define NVME_CMBSZ_WDS(cmbsz) ((cmbsz >> CMBSZ_WDS_SHIFT) & CMBSZ_WDS_MASK)
324 #define NVME_CMBSZ_SZU(cmbsz) ((cmbsz >> CMBSZ_SZU_SHIFT) & CMBSZ_SZU_MASK)
325 #define NVME_CMBSZ_SZ(cmbsz) ((cmbsz >> CMBSZ_SZ_SHIFT) & CMBSZ_SZ_MASK)
327 #define NVME_CMBSZ_SET_SQS(cmbsz, val) \
328 (cmbsz |= (uint64_t)(val & CMBSZ_SQS_MASK) << CMBSZ_SQS_SHIFT)
329 #define NVME_CMBSZ_SET_CQS(cmbsz, val) \
330 (cmbsz |= (uint64_t)(val & CMBSZ_CQS_MASK) << CMBSZ_CQS_SHIFT)
331 #define NVME_CMBSZ_SET_LISTS(cmbsz, val) \
332 (cmbsz |= (uint64_t)(val & CMBSZ_LISTS_MASK) << CMBSZ_LISTS_SHIFT)
333 #define NVME_CMBSZ_SET_RDS(cmbsz, val) \
334 (cmbsz |= (uint64_t)(val & CMBSZ_RDS_MASK) << CMBSZ_RDS_SHIFT)
335 #define NVME_CMBSZ_SET_WDS(cmbsz, val) \
336 (cmbsz |= (uint64_t)(val & CMBSZ_WDS_MASK) << CMBSZ_WDS_SHIFT)
337 #define NVME_CMBSZ_SET_SZU(cmbsz, val) \
338 (cmbsz |= (uint64_t)(val & CMBSZ_SZU_MASK) << CMBSZ_SZU_SHIFT)
339 #define NVME_CMBSZ_SET_SZ(cmbsz, val) \
340 (cmbsz |= (uint64_t)(val & CMBSZ_SZ_MASK) << CMBSZ_SZ_SHIFT)
342 #define NVME_CMBSZ_GETSIZE(cmbsz) \
343 (NVME_CMBSZ_SZ(cmbsz) * (1 << (12 + 4 * NVME_CMBSZ_SZU(cmbsz))))
345 enum NvmeCmbmscShift
{
346 CMBMSC_CRE_SHIFT
= 0,
347 CMBMSC_CMSE_SHIFT
= 1,
348 CMBMSC_CBA_SHIFT
= 12,
351 enum NvmeCmbmscMask
{
352 CMBMSC_CRE_MASK
= 0x1,
353 CMBMSC_CMSE_MASK
= 0x1,
354 CMBMSC_CBA_MASK
= ((1ULL << 52) - 1),
357 #define NVME_CMBMSC_CRE(cmbmsc) \
358 ((cmbmsc >> CMBMSC_CRE_SHIFT) & CMBMSC_CRE_MASK)
359 #define NVME_CMBMSC_CMSE(cmbmsc) \
360 ((cmbmsc >> CMBMSC_CMSE_SHIFT) & CMBMSC_CMSE_MASK)
361 #define NVME_CMBMSC_CBA(cmbmsc) \
362 ((cmbmsc >> CMBMSC_CBA_SHIFT) & CMBMSC_CBA_MASK)
365 #define NVME_CMBMSC_SET_CRE(cmbmsc, val) \
366 (cmbmsc |= (uint64_t)(val & CMBMSC_CRE_MASK) << CMBMSC_CRE_SHIFT)
367 #define NVME_CMBMSC_SET_CMSE(cmbmsc, val) \
368 (cmbmsc |= (uint64_t)(val & CMBMSC_CMSE_MASK) << CMBMSC_CMSE_SHIFT)
369 #define NVME_CMBMSC_SET_CBA(cmbmsc, val) \
370 (cmbmsc |= (uint64_t)(val & CMBMSC_CBA_MASK) << CMBMSC_CBA_SHIFT)
372 enum NvmeCmbstsShift
{
373 CMBSTS_CBAI_SHIFT
= 0,
375 enum NvmeCmbstsMask
{
376 CMBSTS_CBAI_MASK
= 0x1,
379 #define NVME_CMBSTS_CBAI(cmbsts) \
380 ((cmbsts >> CMBSTS_CBAI_SHIFT) & CMBSTS_CBAI_MASK)
382 #define NVME_CMBSTS_SET_CBAI(cmbsts, val) \
383 (cmbsts |= (uint64_t)(val & CMBSTS_CBAI_MASK) << CMBSTS_CBAI_SHIFT)
385 enum NvmePmrcapShift
{
386 PMRCAP_RDS_SHIFT
= 3,
387 PMRCAP_WDS_SHIFT
= 4,
388 PMRCAP_BIR_SHIFT
= 5,
389 PMRCAP_PMRTU_SHIFT
= 8,
390 PMRCAP_PMRWBM_SHIFT
= 10,
391 PMRCAP_PMRTO_SHIFT
= 16,
392 PMRCAP_CMSS_SHIFT
= 24,
395 enum NvmePmrcapMask
{
396 PMRCAP_RDS_MASK
= 0x1,
397 PMRCAP_WDS_MASK
= 0x1,
398 PMRCAP_BIR_MASK
= 0x7,
399 PMRCAP_PMRTU_MASK
= 0x3,
400 PMRCAP_PMRWBM_MASK
= 0xf,
401 PMRCAP_PMRTO_MASK
= 0xff,
402 PMRCAP_CMSS_MASK
= 0x1,
405 #define NVME_PMRCAP_RDS(pmrcap) \
406 ((pmrcap >> PMRCAP_RDS_SHIFT) & PMRCAP_RDS_MASK)
407 #define NVME_PMRCAP_WDS(pmrcap) \
408 ((pmrcap >> PMRCAP_WDS_SHIFT) & PMRCAP_WDS_MASK)
409 #define NVME_PMRCAP_BIR(pmrcap) \
410 ((pmrcap >> PMRCAP_BIR_SHIFT) & PMRCAP_BIR_MASK)
411 #define NVME_PMRCAP_PMRTU(pmrcap) \
412 ((pmrcap >> PMRCAP_PMRTU_SHIFT) & PMRCAP_PMRTU_MASK)
413 #define NVME_PMRCAP_PMRWBM(pmrcap) \
414 ((pmrcap >> PMRCAP_PMRWBM_SHIFT) & PMRCAP_PMRWBM_MASK)
415 #define NVME_PMRCAP_PMRTO(pmrcap) \
416 ((pmrcap >> PMRCAP_PMRTO_SHIFT) & PMRCAP_PMRTO_MASK)
417 #define NVME_PMRCAP_CMSS(pmrcap) \
418 ((pmrcap >> PMRCAP_CMSS_SHIFT) & PMRCAP_CMSS_MASK)
420 #define NVME_PMRCAP_SET_RDS(pmrcap, val) \
421 (pmrcap |= (uint64_t)(val & PMRCAP_RDS_MASK) << PMRCAP_RDS_SHIFT)
422 #define NVME_PMRCAP_SET_WDS(pmrcap, val) \
423 (pmrcap |= (uint64_t)(val & PMRCAP_WDS_MASK) << PMRCAP_WDS_SHIFT)
424 #define NVME_PMRCAP_SET_BIR(pmrcap, val) \
425 (pmrcap |= (uint64_t)(val & PMRCAP_BIR_MASK) << PMRCAP_BIR_SHIFT)
426 #define NVME_PMRCAP_SET_PMRTU(pmrcap, val) \
427 (pmrcap |= (uint64_t)(val & PMRCAP_PMRTU_MASK) << PMRCAP_PMRTU_SHIFT)
428 #define NVME_PMRCAP_SET_PMRWBM(pmrcap, val) \
429 (pmrcap |= (uint64_t)(val & PMRCAP_PMRWBM_MASK) << PMRCAP_PMRWBM_SHIFT)
430 #define NVME_PMRCAP_SET_PMRTO(pmrcap, val) \
431 (pmrcap |= (uint64_t)(val & PMRCAP_PMRTO_MASK) << PMRCAP_PMRTO_SHIFT)
432 #define NVME_PMRCAP_SET_CMSS(pmrcap, val) \
433 (pmrcap |= (uint64_t)(val & PMRCAP_CMSS_MASK) << PMRCAP_CMSS_SHIFT)
435 enum NvmePmrctlShift
{
439 enum NvmePmrctlMask
{
440 PMRCTL_EN_MASK
= 0x1,
443 #define NVME_PMRCTL_EN(pmrctl) ((pmrctl >> PMRCTL_EN_SHIFT) & PMRCTL_EN_MASK)
445 #define NVME_PMRCTL_SET_EN(pmrctl, val) \
446 (pmrctl |= (uint64_t)(val & PMRCTL_EN_MASK) << PMRCTL_EN_SHIFT)
448 enum NvmePmrstsShift
{
449 PMRSTS_ERR_SHIFT
= 0,
450 PMRSTS_NRDY_SHIFT
= 8,
451 PMRSTS_HSTS_SHIFT
= 9,
452 PMRSTS_CBAI_SHIFT
= 12,
455 enum NvmePmrstsMask
{
456 PMRSTS_ERR_MASK
= 0xff,
457 PMRSTS_NRDY_MASK
= 0x1,
458 PMRSTS_HSTS_MASK
= 0x7,
459 PMRSTS_CBAI_MASK
= 0x1,
462 #define NVME_PMRSTS_ERR(pmrsts) \
463 ((pmrsts >> PMRSTS_ERR_SHIFT) & PMRSTS_ERR_MASK)
464 #define NVME_PMRSTS_NRDY(pmrsts) \
465 ((pmrsts >> PMRSTS_NRDY_SHIFT) & PMRSTS_NRDY_MASK)
466 #define NVME_PMRSTS_HSTS(pmrsts) \
467 ((pmrsts >> PMRSTS_HSTS_SHIFT) & PMRSTS_HSTS_MASK)
468 #define NVME_PMRSTS_CBAI(pmrsts) \
469 ((pmrsts >> PMRSTS_CBAI_SHIFT) & PMRSTS_CBAI_MASK)
471 #define NVME_PMRSTS_SET_ERR(pmrsts, val) \
472 (pmrsts |= (uint64_t)(val & PMRSTS_ERR_MASK) << PMRSTS_ERR_SHIFT)
473 #define NVME_PMRSTS_SET_NRDY(pmrsts, val) \
474 (pmrsts |= (uint64_t)(val & PMRSTS_NRDY_MASK) << PMRSTS_NRDY_SHIFT)
475 #define NVME_PMRSTS_SET_HSTS(pmrsts, val) \
476 (pmrsts |= (uint64_t)(val & PMRSTS_HSTS_MASK) << PMRSTS_HSTS_SHIFT)
477 #define NVME_PMRSTS_SET_CBAI(pmrsts, val) \
478 (pmrsts |= (uint64_t)(val & PMRSTS_CBAI_MASK) << PMRSTS_CBAI_SHIFT)
480 enum NvmePmrebsShift
{
481 PMREBS_PMRSZU_SHIFT
= 0,
482 PMREBS_RBB_SHIFT
= 4,
483 PMREBS_PMRWBZ_SHIFT
= 8,
486 enum NvmePmrebsMask
{
487 PMREBS_PMRSZU_MASK
= 0xf,
488 PMREBS_RBB_MASK
= 0x1,
489 PMREBS_PMRWBZ_MASK
= 0xffffff,
492 #define NVME_PMREBS_PMRSZU(pmrebs) \
493 ((pmrebs >> PMREBS_PMRSZU_SHIFT) & PMREBS_PMRSZU_MASK)
494 #define NVME_PMREBS_RBB(pmrebs) \
495 ((pmrebs >> PMREBS_RBB_SHIFT) & PMREBS_RBB_MASK)
496 #define NVME_PMREBS_PMRWBZ(pmrebs) \
497 ((pmrebs >> PMREBS_PMRWBZ_SHIFT) & PMREBS_PMRWBZ_MASK)
499 #define NVME_PMREBS_SET_PMRSZU(pmrebs, val) \
500 (pmrebs |= (uint64_t)(val & PMREBS_PMRSZU_MASK) << PMREBS_PMRSZU_SHIFT)
501 #define NVME_PMREBS_SET_RBB(pmrebs, val) \
502 (pmrebs |= (uint64_t)(val & PMREBS_RBB_MASK) << PMREBS_RBB_SHIFT)
503 #define NVME_PMREBS_SET_PMRWBZ(pmrebs, val) \
504 (pmrebs |= (uint64_t)(val & PMREBS_PMRWBZ_MASK) << PMREBS_PMRWBZ_SHIFT)
506 enum NvmePmrswtpShift
{
507 PMRSWTP_PMRSWTU_SHIFT
= 0,
508 PMRSWTP_PMRSWTV_SHIFT
= 8,
511 enum NvmePmrswtpMask
{
512 PMRSWTP_PMRSWTU_MASK
= 0xf,
513 PMRSWTP_PMRSWTV_MASK
= 0xffffff,
516 #define NVME_PMRSWTP_PMRSWTU(pmrswtp) \
517 ((pmrswtp >> PMRSWTP_PMRSWTU_SHIFT) & PMRSWTP_PMRSWTU_MASK)
518 #define NVME_PMRSWTP_PMRSWTV(pmrswtp) \
519 ((pmrswtp >> PMRSWTP_PMRSWTV_SHIFT) & PMRSWTP_PMRSWTV_MASK)
521 #define NVME_PMRSWTP_SET_PMRSWTU(pmrswtp, val) \
522 (pmrswtp |= (uint64_t)(val & PMRSWTP_PMRSWTU_MASK) << PMRSWTP_PMRSWTU_SHIFT)
523 #define NVME_PMRSWTP_SET_PMRSWTV(pmrswtp, val) \
524 (pmrswtp |= (uint64_t)(val & PMRSWTP_PMRSWTV_MASK) << PMRSWTP_PMRSWTV_SHIFT)
526 enum NvmePmrmsclShift
{
527 PMRMSCL_CMSE_SHIFT
= 1,
528 PMRMSCL_CBA_SHIFT
= 12,
531 enum NvmePmrmsclMask
{
532 PMRMSCL_CMSE_MASK
= 0x1,
533 PMRMSCL_CBA_MASK
= 0xfffff,
536 #define NVME_PMRMSCL_CMSE(pmrmscl) \
537 ((pmrmscl >> PMRMSCL_CMSE_SHIFT) & PMRMSCL_CMSE_MASK)
538 #define NVME_PMRMSCL_CBA(pmrmscl) \
539 ((pmrmscl >> PMRMSCL_CBA_SHIFT) & PMRMSCL_CBA_MASK)
541 #define NVME_PMRMSCL_SET_CMSE(pmrmscl, val) \
542 (pmrmscl |= (uint32_t)(val & PMRMSCL_CMSE_MASK) << PMRMSCL_CMSE_SHIFT)
543 #define NVME_PMRMSCL_SET_CBA(pmrmscl, val) \
544 (pmrmscl |= (uint32_t)(val & PMRMSCL_CBA_MASK) << PMRMSCL_CBA_SHIFT)
546 enum NvmeSglDescriptorType
{
547 NVME_SGL_DESCR_TYPE_DATA_BLOCK
= 0x0,
548 NVME_SGL_DESCR_TYPE_BIT_BUCKET
= 0x1,
549 NVME_SGL_DESCR_TYPE_SEGMENT
= 0x2,
550 NVME_SGL_DESCR_TYPE_LAST_SEGMENT
= 0x3,
551 NVME_SGL_DESCR_TYPE_KEYED_DATA_BLOCK
= 0x4,
553 NVME_SGL_DESCR_TYPE_VENDOR_SPECIFIC
= 0xf,
556 enum NvmeSglDescriptorSubtype
{
557 NVME_SGL_DESCR_SUBTYPE_ADDRESS
= 0x0,
560 typedef struct QEMU_PACKED NvmeSglDescriptor
{
567 #define NVME_SGL_TYPE(type) ((type >> 4) & 0xf)
568 #define NVME_SGL_SUBTYPE(type) (type & 0xf)
570 typedef union NvmeCmdDptr
{
576 NvmeSglDescriptor sgl
;
581 NVME_PSDT_SGL_MPTR_CONTIGUOUS
= 0x1,
582 NVME_PSDT_SGL_MPTR_SGL
= 0x2,
585 typedef struct QEMU_PACKED NvmeCmd
{
601 #define NVME_CMD_FLAGS_FUSE(flags) (flags & 0x3)
602 #define NVME_CMD_FLAGS_PSDT(flags) ((flags >> 6) & 0x3)
604 enum NvmeAdminCommands
{
605 NVME_ADM_CMD_DELETE_SQ
= 0x00,
606 NVME_ADM_CMD_CREATE_SQ
= 0x01,
607 NVME_ADM_CMD_GET_LOG_PAGE
= 0x02,
608 NVME_ADM_CMD_DELETE_CQ
= 0x04,
609 NVME_ADM_CMD_CREATE_CQ
= 0x05,
610 NVME_ADM_CMD_IDENTIFY
= 0x06,
611 NVME_ADM_CMD_ABORT
= 0x08,
612 NVME_ADM_CMD_SET_FEATURES
= 0x09,
613 NVME_ADM_CMD_GET_FEATURES
= 0x0a,
614 NVME_ADM_CMD_ASYNC_EV_REQ
= 0x0c,
615 NVME_ADM_CMD_ACTIVATE_FW
= 0x10,
616 NVME_ADM_CMD_DOWNLOAD_FW
= 0x11,
617 NVME_ADM_CMD_NS_ATTACHMENT
= 0x15,
618 NVME_ADM_CMD_DIRECTIVE_SEND
= 0x19,
619 NVME_ADM_CMD_VIRT_MNGMT
= 0x1c,
620 NVME_ADM_CMD_DIRECTIVE_RECV
= 0x1a,
621 NVME_ADM_CMD_DBBUF_CONFIG
= 0x7c,
622 NVME_ADM_CMD_FORMAT_NVM
= 0x80,
623 NVME_ADM_CMD_SECURITY_SEND
= 0x81,
624 NVME_ADM_CMD_SECURITY_RECV
= 0x82,
627 enum NvmeIoCommands
{
628 NVME_CMD_FLUSH
= 0x00,
629 NVME_CMD_WRITE
= 0x01,
630 NVME_CMD_READ
= 0x02,
631 NVME_CMD_WRITE_UNCOR
= 0x04,
632 NVME_CMD_COMPARE
= 0x05,
633 NVME_CMD_WRITE_ZEROES
= 0x08,
635 NVME_CMD_VERIFY
= 0x0c,
636 NVME_CMD_IO_MGMT_RECV
= 0x12,
637 NVME_CMD_COPY
= 0x19,
638 NVME_CMD_IO_MGMT_SEND
= 0x1d,
639 NVME_CMD_ZONE_MGMT_SEND
= 0x79,
640 NVME_CMD_ZONE_MGMT_RECV
= 0x7a,
641 NVME_CMD_ZONE_APPEND
= 0x7d,
644 typedef struct QEMU_PACKED NvmeDeleteQ
{
654 typedef struct QEMU_PACKED NvmeCreateCq
{
668 #define NVME_CQ_FLAGS_PC(cq_flags) (cq_flags & 0x1)
669 #define NVME_CQ_FLAGS_IEN(cq_flags) ((cq_flags >> 1) & 0x1)
676 typedef struct QEMU_PACKED NvmeCreateSq
{
690 #define NVME_SQ_FLAGS_PC(sq_flags) (sq_flags & 0x1)
691 #define NVME_SQ_FLAGS_QPRIO(sq_flags) ((sq_flags >> 1) & 0x3)
696 NVME_SQ_PRIO_URGENT
= 0,
697 NVME_SQ_PRIO_HIGH
= 1,
698 NVME_SQ_PRIO_NORMAL
= 2,
699 NVME_SQ_PRIO_LOW
= 3,
702 typedef struct QEMU_PACKED NvmeIdentify
{
719 typedef struct QEMU_PACKED NvmeRwCmd
{
740 NVME_RW_LR
= 1 << 15,
741 NVME_RW_FUA
= 1 << 14,
742 NVME_RW_DSM_FREQ_UNSPEC
= 0,
743 NVME_RW_DSM_FREQ_TYPICAL
= 1,
744 NVME_RW_DSM_FREQ_RARE
= 2,
745 NVME_RW_DSM_FREQ_READS
= 3,
746 NVME_RW_DSM_FREQ_WRITES
= 4,
747 NVME_RW_DSM_FREQ_RW
= 5,
748 NVME_RW_DSM_FREQ_ONCE
= 6,
749 NVME_RW_DSM_FREQ_PREFETCH
= 7,
750 NVME_RW_DSM_FREQ_TEMP
= 8,
751 NVME_RW_DSM_LATENCY_NONE
= 0 << 4,
752 NVME_RW_DSM_LATENCY_IDLE
= 1 << 4,
753 NVME_RW_DSM_LATENCY_NORM
= 2 << 4,
754 NVME_RW_DSM_LATENCY_LOW
= 3 << 4,
755 NVME_RW_DSM_SEQ_REQ
= 1 << 6,
756 NVME_RW_DSM_COMPRESSED
= 1 << 7,
757 NVME_RW_PIREMAP
= 1 << 9,
758 NVME_RW_PRINFO_PRACT
= 1 << 13,
759 NVME_RW_PRINFO_PRCHK_GUARD
= 1 << 12,
760 NVME_RW_PRINFO_PRCHK_APP
= 1 << 11,
761 NVME_RW_PRINFO_PRCHK_REF
= 1 << 10,
762 NVME_RW_PRINFO_PRCHK_MASK
= 7 << 10,
765 #define NVME_RW_PRINFO(control) ((control >> 10) & 0xf)
768 NVME_PRINFO_PRACT
= 1 << 3,
769 NVME_PRINFO_PRCHK_GUARD
= 1 << 2,
770 NVME_PRINFO_PRCHK_APP
= 1 << 1,
771 NVME_PRINFO_PRCHK_REF
= 1 << 0,
772 NVME_PRINFO_PRCHK_MASK
= 7 << 0,
775 typedef struct QEMU_PACKED NvmeDsmCmd
{
788 NVME_DSMGMT_IDR
= 1 << 0,
789 NVME_DSMGMT_IDW
= 1 << 1,
790 NVME_DSMGMT_AD
= 1 << 2,
793 typedef struct QEMU_PACKED NvmeDsmRange
{
800 NVME_COPY_FORMAT_0
= 0x0,
801 NVME_COPY_FORMAT_1
= 0x1,
804 typedef struct QEMU_PACKED NvmeCopyCmd
{
823 typedef struct QEMU_PACKED NvmeCopySourceRangeFormat0
{
831 } NvmeCopySourceRangeFormat0
;
833 typedef struct QEMU_PACKED NvmeCopySourceRangeFormat1
{
841 } NvmeCopySourceRangeFormat1
;
843 enum NvmeAsyncEventRequest
{
844 NVME_AER_TYPE_ERROR
= 0,
845 NVME_AER_TYPE_SMART
= 1,
846 NVME_AER_TYPE_NOTICE
= 2,
847 NVME_AER_TYPE_IO_SPECIFIC
= 6,
848 NVME_AER_TYPE_VENDOR_SPECIFIC
= 7,
849 NVME_AER_INFO_ERR_INVALID_DB_REGISTER
= 0,
850 NVME_AER_INFO_ERR_INVALID_DB_VALUE
= 1,
851 NVME_AER_INFO_ERR_DIAG_FAIL
= 2,
852 NVME_AER_INFO_ERR_PERS_INTERNAL_ERR
= 3,
853 NVME_AER_INFO_ERR_TRANS_INTERNAL_ERR
= 4,
854 NVME_AER_INFO_ERR_FW_IMG_LOAD_ERR
= 5,
855 NVME_AER_INFO_SMART_RELIABILITY
= 0,
856 NVME_AER_INFO_SMART_TEMP_THRESH
= 1,
857 NVME_AER_INFO_SMART_SPARE_THRESH
= 2,
858 NVME_AER_INFO_NOTICE_NS_ATTR_CHANGED
= 0,
861 typedef struct QEMU_PACKED NvmeAerResult
{
868 typedef struct QEMU_PACKED NvmeZonedResult
{
872 typedef struct QEMU_PACKED NvmeCqe
{
881 enum NvmeStatusCodes
{
882 NVME_SUCCESS
= 0x0000,
883 NVME_INVALID_OPCODE
= 0x0001,
884 NVME_INVALID_FIELD
= 0x0002,
885 NVME_CID_CONFLICT
= 0x0003,
886 NVME_DATA_TRAS_ERROR
= 0x0004,
887 NVME_POWER_LOSS_ABORT
= 0x0005,
888 NVME_INTERNAL_DEV_ERROR
= 0x0006,
889 NVME_CMD_ABORT_REQ
= 0x0007,
890 NVME_CMD_ABORT_SQ_DEL
= 0x0008,
891 NVME_CMD_ABORT_FAILED_FUSE
= 0x0009,
892 NVME_CMD_ABORT_MISSING_FUSE
= 0x000a,
893 NVME_INVALID_NSID
= 0x000b,
894 NVME_CMD_SEQ_ERROR
= 0x000c,
895 NVME_INVALID_SGL_SEG_DESCR
= 0x000d,
896 NVME_INVALID_NUM_SGL_DESCRS
= 0x000e,
897 NVME_DATA_SGL_LEN_INVALID
= 0x000f,
898 NVME_MD_SGL_LEN_INVALID
= 0x0010,
899 NVME_SGL_DESCR_TYPE_INVALID
= 0x0011,
900 NVME_INVALID_USE_OF_CMB
= 0x0012,
901 NVME_INVALID_PRP_OFFSET
= 0x0013,
902 NVME_CMD_SET_CMB_REJECTED
= 0x002b,
903 NVME_INVALID_CMD_SET
= 0x002c,
904 NVME_FDP_DISABLED
= 0x0029,
905 NVME_INVALID_PHID_LIST
= 0x002a,
906 NVME_LBA_RANGE
= 0x0080,
907 NVME_CAP_EXCEEDED
= 0x0081,
908 NVME_NS_NOT_READY
= 0x0082,
909 NVME_NS_RESV_CONFLICT
= 0x0083,
910 NVME_FORMAT_IN_PROGRESS
= 0x0084,
911 NVME_INVALID_CQID
= 0x0100,
912 NVME_INVALID_QID
= 0x0101,
913 NVME_MAX_QSIZE_EXCEEDED
= 0x0102,
914 NVME_ACL_EXCEEDED
= 0x0103,
915 NVME_RESERVED
= 0x0104,
916 NVME_AER_LIMIT_EXCEEDED
= 0x0105,
917 NVME_INVALID_FW_SLOT
= 0x0106,
918 NVME_INVALID_FW_IMAGE
= 0x0107,
919 NVME_INVALID_IRQ_VECTOR
= 0x0108,
920 NVME_INVALID_LOG_ID
= 0x0109,
921 NVME_INVALID_FORMAT
= 0x010a,
922 NVME_FW_REQ_RESET
= 0x010b,
923 NVME_INVALID_QUEUE_DEL
= 0x010c,
924 NVME_FID_NOT_SAVEABLE
= 0x010d,
925 NVME_FEAT_NOT_CHANGEABLE
= 0x010e,
926 NVME_FEAT_NOT_NS_SPEC
= 0x010f,
927 NVME_FW_REQ_SUSYSTEM_RESET
= 0x0110,
928 NVME_NS_ALREADY_ATTACHED
= 0x0118,
929 NVME_NS_PRIVATE
= 0x0119,
930 NVME_NS_NOT_ATTACHED
= 0x011a,
931 NVME_NS_CTRL_LIST_INVALID
= 0x011c,
932 NVME_INVALID_CTRL_ID
= 0x011f,
933 NVME_INVALID_SEC_CTRL_STATE
= 0x0120,
934 NVME_INVALID_NUM_RESOURCES
= 0x0121,
935 NVME_INVALID_RESOURCE_ID
= 0x0122,
936 NVME_CONFLICTING_ATTRS
= 0x0180,
937 NVME_INVALID_PROT_INFO
= 0x0181,
938 NVME_WRITE_TO_RO
= 0x0182,
939 NVME_CMD_SIZE_LIMIT
= 0x0183,
940 NVME_INVALID_ZONE_OP
= 0x01b6,
941 NVME_NOZRWA
= 0x01b7,
942 NVME_ZONE_BOUNDARY_ERROR
= 0x01b8,
943 NVME_ZONE_FULL
= 0x01b9,
944 NVME_ZONE_READ_ONLY
= 0x01ba,
945 NVME_ZONE_OFFLINE
= 0x01bb,
946 NVME_ZONE_INVALID_WRITE
= 0x01bc,
947 NVME_ZONE_TOO_MANY_ACTIVE
= 0x01bd,
948 NVME_ZONE_TOO_MANY_OPEN
= 0x01be,
949 NVME_ZONE_INVAL_TRANSITION
= 0x01bf,
950 NVME_WRITE_FAULT
= 0x0280,
951 NVME_UNRECOVERED_READ
= 0x0281,
952 NVME_E2E_GUARD_ERROR
= 0x0282,
953 NVME_E2E_APP_ERROR
= 0x0283,
954 NVME_E2E_REF_ERROR
= 0x0284,
955 NVME_CMP_FAILURE
= 0x0285,
956 NVME_ACCESS_DENIED
= 0x0286,
958 NVME_E2E_STORAGE_TAG_ERROR
= 0x0288,
961 NVME_NO_COMPLETE
= 0xffff,
964 typedef struct QEMU_PACKED NvmeFwSlotInfoLog
{
966 uint8_t reserved1
[7];
974 uint8_t reserved2
[448];
977 typedef struct QEMU_PACKED NvmeErrorLog
{
978 uint64_t error_count
;
981 uint16_t status_field
;
982 uint16_t param_error_location
;
989 typedef struct QEMU_PACKED NvmeSmartLog
{
990 uint8_t critical_warning
;
991 uint16_t temperature
;
992 uint8_t available_spare
;
993 uint8_t available_spare_threshold
;
994 uint8_t percentage_used
;
995 uint8_t reserved1
[26];
996 uint64_t data_units_read
[2];
997 uint64_t data_units_written
[2];
998 uint64_t host_read_commands
[2];
999 uint64_t host_write_commands
[2];
1000 uint64_t controller_busy_time
[2];
1001 uint64_t power_cycles
[2];
1002 uint64_t power_on_hours
[2];
1003 uint64_t unsafe_shutdowns
[2];
1004 uint64_t media_errors
[2];
1005 uint64_t number_of_error_log_entries
[2];
1006 uint8_t reserved2
[320];
1009 #define NVME_SMART_WARN_MAX 6
1010 enum NvmeSmartWarn
{
1011 NVME_SMART_SPARE
= 1 << 0,
1012 NVME_SMART_TEMPERATURE
= 1 << 1,
1013 NVME_SMART_RELIABILITY
= 1 << 2,
1014 NVME_SMART_MEDIA_READ_ONLY
= 1 << 3,
1015 NVME_SMART_FAILED_VOLATILE_MEDIA
= 1 << 4,
1016 NVME_SMART_PMR_UNRELIABLE
= 1 << 5,
1019 typedef struct NvmeEffectsLog
{
1026 NVME_CMD_EFF_CSUPP
= 1 << 0,
1027 NVME_CMD_EFF_LBCC
= 1 << 1,
1028 NVME_CMD_EFF_NCC
= 1 << 2,
1029 NVME_CMD_EFF_NIC
= 1 << 3,
1030 NVME_CMD_EFF_CCC
= 1 << 4,
1031 NVME_CMD_EFF_CSE_MASK
= 3 << 16,
1032 NVME_CMD_EFF_UUID_SEL
= 1 << 19,
1035 enum NvmeLogIdentifier
{
1036 NVME_LOG_ERROR_INFO
= 0x01,
1037 NVME_LOG_SMART_INFO
= 0x02,
1038 NVME_LOG_FW_SLOT_INFO
= 0x03,
1039 NVME_LOG_CHANGED_NSLIST
= 0x04,
1040 NVME_LOG_CMD_EFFECTS
= 0x05,
1041 NVME_LOG_ENDGRP
= 0x09,
1042 NVME_LOG_FDP_CONFS
= 0x20,
1043 NVME_LOG_FDP_RUH_USAGE
= 0x21,
1044 NVME_LOG_FDP_STATS
= 0x22,
1045 NVME_LOG_FDP_EVENTS
= 0x23,
1048 typedef struct QEMU_PACKED NvmePSD
{
1060 #define NVME_CONTROLLER_LIST_SIZE 2048
1061 #define NVME_IDENTIFY_DATA_SIZE 4096
1064 NVME_ID_CNS_NS
= 0x00,
1065 NVME_ID_CNS_CTRL
= 0x01,
1066 NVME_ID_CNS_NS_ACTIVE_LIST
= 0x02,
1067 NVME_ID_CNS_NS_DESCR_LIST
= 0x03,
1068 NVME_ID_CNS_CS_NS
= 0x05,
1069 NVME_ID_CNS_CS_CTRL
= 0x06,
1070 NVME_ID_CNS_CS_NS_ACTIVE_LIST
= 0x07,
1071 NVME_ID_CNS_NS_PRESENT_LIST
= 0x10,
1072 NVME_ID_CNS_NS_PRESENT
= 0x11,
1073 NVME_ID_CNS_NS_ATTACHED_CTRL_LIST
= 0x12,
1074 NVME_ID_CNS_CTRL_LIST
= 0x13,
1075 NVME_ID_CNS_PRIMARY_CTRL_CAP
= 0x14,
1076 NVME_ID_CNS_SECONDARY_CTRL_LIST
= 0x15,
1077 NVME_ID_CNS_CS_NS_PRESENT_LIST
= 0x1a,
1078 NVME_ID_CNS_CS_NS_PRESENT
= 0x1b,
1079 NVME_ID_CNS_IO_COMMAND_SET
= 0x1c,
1082 typedef struct QEMU_PACKED NvmeIdCtrl
{
1098 uint8_t rsvd100
[11];
1101 uint8_t rsvd128
[128];
1116 uint8_t tnvmcap
[16];
1117 uint8_t unvmcap
[16];
1130 uint8_t rsvd342
[170];
1146 uint8_t rsvd540
[228];
1147 uint8_t subnqn
[256];
1148 uint8_t rsvd1024
[1024];
1153 typedef struct NvmeIdCtrlZoned
{
1155 uint8_t rsvd1
[4095];
1158 typedef struct NvmeIdCtrlNvm
{
1165 uint8_t rsvd16
[4080];
1168 enum NvmeIdCtrlOaes
{
1169 NVME_OAES_NS_ATTR
= 1 << 8,
1172 enum NvmeIdCtrlCtratt
{
1173 NVME_CTRATT_ENDGRPS
= 1 << 4,
1174 NVME_CTRATT_ELBAS
= 1 << 15,
1175 NVME_CTRATT_FDPS
= 1 << 19,
1178 enum NvmeIdCtrlOacs
{
1179 NVME_OACS_SECURITY
= 1 << 0,
1180 NVME_OACS_FORMAT
= 1 << 1,
1181 NVME_OACS_FW
= 1 << 2,
1182 NVME_OACS_NS_MGMT
= 1 << 3,
1183 NVME_OACS_DIRECTIVES
= 1 << 5,
1184 NVME_OACS_DBBUF
= 1 << 8,
1187 enum NvmeIdCtrlOncs
{
1188 NVME_ONCS_COMPARE
= 1 << 0,
1189 NVME_ONCS_WRITE_UNCORR
= 1 << 1,
1190 NVME_ONCS_DSM
= 1 << 2,
1191 NVME_ONCS_WRITE_ZEROES
= 1 << 3,
1192 NVME_ONCS_FEATURES
= 1 << 4,
1193 NVME_ONCS_RESRVATIONS
= 1 << 5,
1194 NVME_ONCS_TIMESTAMP
= 1 << 6,
1195 NVME_ONCS_VERIFY
= 1 << 7,
1196 NVME_ONCS_COPY
= 1 << 8,
1199 enum NvmeIdCtrlOcfs
{
1200 NVME_OCFS_COPY_FORMAT_0
= 1 << NVME_COPY_FORMAT_0
,
1201 NVME_OCFS_COPY_FORMAT_1
= 1 << NVME_COPY_FORMAT_1
,
1204 enum NvmeIdctrlVwc
{
1205 NVME_VWC_PRESENT
= 1 << 0,
1206 NVME_VWC_NSID_BROADCAST_NO_SUPPORT
= 0 << 1,
1207 NVME_VWC_NSID_BROADCAST_RESERVED
= 1 << 1,
1208 NVME_VWC_NSID_BROADCAST_CTRL_SPEC
= 2 << 1,
1209 NVME_VWC_NSID_BROADCAST_SUPPORT
= 3 << 1,
1212 enum NvmeIdCtrlFrmw
{
1213 NVME_FRMW_SLOT1_RO
= 1 << 0,
1216 enum NvmeIdCtrlLpa
{
1217 NVME_LPA_NS_SMART
= 1 << 0,
1218 NVME_LPA_CSE
= 1 << 1,
1219 NVME_LPA_EXTENDED
= 1 << 2,
1222 enum NvmeIdCtrlCmic
{
1223 NVME_CMIC_MULTI_CTRL
= 1 << 1,
1226 enum NvmeNsAttachmentOperation
{
1227 NVME_NS_ATTACHMENT_ATTACH
= 0x0,
1228 NVME_NS_ATTACHMENT_DETACH
= 0x1,
1231 #define NVME_CTRL_SQES_MIN(sqes) ((sqes) & 0xf)
1232 #define NVME_CTRL_SQES_MAX(sqes) (((sqes) >> 4) & 0xf)
1233 #define NVME_CTRL_CQES_MIN(cqes) ((cqes) & 0xf)
1234 #define NVME_CTRL_CQES_MAX(cqes) (((cqes) >> 4) & 0xf)
1236 #define NVME_CTRL_SGLS_SUPPORT_MASK (0x3 << 0)
1237 #define NVME_CTRL_SGLS_SUPPORT_NO_ALIGN (0x1 << 0)
1238 #define NVME_CTRL_SGLS_SUPPORT_DWORD_ALIGN (0x1 << 1)
1239 #define NVME_CTRL_SGLS_KEYED (0x1 << 2)
1240 #define NVME_CTRL_SGLS_BITBUCKET (0x1 << 16)
1241 #define NVME_CTRL_SGLS_MPTR_CONTIGUOUS (0x1 << 17)
1242 #define NVME_CTRL_SGLS_EXCESS_LENGTH (0x1 << 18)
1243 #define NVME_CTRL_SGLS_MPTR_SGL (0x1 << 19)
1244 #define NVME_CTRL_SGLS_ADDR_OFFSET (0x1 << 20)
1246 #define NVME_ARB_AB(arb) (arb & 0x7)
1247 #define NVME_ARB_AB_NOLIMIT 0x7
1248 #define NVME_ARB_LPW(arb) ((arb >> 8) & 0xff)
1249 #define NVME_ARB_MPW(arb) ((arb >> 16) & 0xff)
1250 #define NVME_ARB_HPW(arb) ((arb >> 24) & 0xff)
1252 #define NVME_INTC_THR(intc) (intc & 0xff)
1253 #define NVME_INTC_TIME(intc) ((intc >> 8) & 0xff)
1255 #define NVME_INTVC_NOCOALESCING (0x1 << 16)
1257 #define NVME_TEMP_THSEL(temp) ((temp >> 20) & 0x3)
1258 #define NVME_TEMP_THSEL_OVER 0x0
1259 #define NVME_TEMP_THSEL_UNDER 0x1
1261 #define NVME_TEMP_TMPSEL(temp) ((temp >> 16) & 0xf)
1262 #define NVME_TEMP_TMPSEL_COMPOSITE 0x0
1264 #define NVME_TEMP_TMPTH(temp) (temp & 0xffff)
1266 #define NVME_AEC_SMART(aec) (aec & 0xff)
1267 #define NVME_AEC_NS_ATTR(aec) ((aec >> 8) & 0x1)
1268 #define NVME_AEC_FW_ACTIVATION(aec) ((aec >> 9) & 0x1)
1269 #define NVME_AEC_ENDGRP_NOTICE(aec) ((aec >> 14) & 0x1)
1271 #define NVME_ERR_REC_TLER(err_rec) (err_rec & 0xffff)
1272 #define NVME_ERR_REC_DULBE(err_rec) (err_rec & 0x10000)
1274 enum NvmeFeatureIds
{
1275 NVME_ARBITRATION
= 0x1,
1276 NVME_POWER_MANAGEMENT
= 0x2,
1277 NVME_LBA_RANGE_TYPE
= 0x3,
1278 NVME_TEMPERATURE_THRESHOLD
= 0x4,
1279 NVME_ERROR_RECOVERY
= 0x5,
1280 NVME_VOLATILE_WRITE_CACHE
= 0x6,
1281 NVME_NUMBER_OF_QUEUES
= 0x7,
1282 NVME_INTERRUPT_COALESCING
= 0x8,
1283 NVME_INTERRUPT_VECTOR_CONF
= 0x9,
1284 NVME_WRITE_ATOMICITY
= 0xa,
1285 NVME_ASYNCHRONOUS_EVENT_CONF
= 0xb,
1286 NVME_TIMESTAMP
= 0xe,
1287 NVME_HOST_BEHAVIOR_SUPPORT
= 0x16,
1288 NVME_COMMAND_SET_PROFILE
= 0x19,
1289 NVME_FDP_MODE
= 0x1d,
1290 NVME_FDP_EVENTS
= 0x1e,
1291 NVME_SOFTWARE_PROGRESS_MARKER
= 0x80,
1292 NVME_FID_MAX
= 0x100,
1295 typedef enum NvmeFeatureCap
{
1296 NVME_FEAT_CAP_SAVE
= 1 << 0,
1297 NVME_FEAT_CAP_NS
= 1 << 1,
1298 NVME_FEAT_CAP_CHANGE
= 1 << 2,
1301 typedef enum NvmeGetFeatureSelect
{
1302 NVME_GETFEAT_SELECT_CURRENT
= 0x0,
1303 NVME_GETFEAT_SELECT_DEFAULT
= 0x1,
1304 NVME_GETFEAT_SELECT_SAVED
= 0x2,
1305 NVME_GETFEAT_SELECT_CAP
= 0x3,
1306 } NvmeGetFeatureSelect
;
1308 #define NVME_GETSETFEAT_FID_MASK 0xff
1309 #define NVME_GETSETFEAT_FID(dw10) (dw10 & NVME_GETSETFEAT_FID_MASK)
1311 #define NVME_GETFEAT_SELECT_SHIFT 8
1312 #define NVME_GETFEAT_SELECT_MASK 0x7
1313 #define NVME_GETFEAT_SELECT(dw10) \
1314 ((dw10 >> NVME_GETFEAT_SELECT_SHIFT) & NVME_GETFEAT_SELECT_MASK)
1316 #define NVME_SETFEAT_SAVE_SHIFT 31
1317 #define NVME_SETFEAT_SAVE_MASK 0x1
1318 #define NVME_SETFEAT_SAVE(dw10) \
1319 ((dw10 >> NVME_SETFEAT_SAVE_SHIFT) & NVME_SETFEAT_SAVE_MASK)
1321 typedef struct QEMU_PACKED NvmeRangeType
{
1331 typedef struct NvmeHostBehaviorSupport
{
1336 } NvmeHostBehaviorSupport
;
1338 typedef struct QEMU_PACKED NvmeLBAF
{
1344 typedef struct QEMU_PACKED NvmeLBAFE
{
1350 #define NVME_NSID_BROADCAST 0xffffffff
1351 #define NVME_MAX_NLBAF 64
1353 typedef struct QEMU_PACKED NvmeIdNs
{
1389 NvmeLBAF lbaf
[NVME_MAX_NLBAF
];
1393 #define NVME_ID_NS_NVM_ELBAF_PIF(elbaf) (((elbaf) >> 7) & 0x3)
1395 typedef struct QEMU_PACKED NvmeIdNsNvm
{
1399 uint32_t elbaf
[NVME_MAX_NLBAF
];
1400 uint8_t rsvd268
[3828];
1403 typedef struct QEMU_PACKED NvmeIdNsDescr
{
1409 enum NvmeNsIdentifierLength
{
1410 NVME_NIDL_EUI64
= 8,
1411 NVME_NIDL_NGUID
= 16,
1412 NVME_NIDL_UUID
= 16,
1416 enum NvmeNsIdentifierType
{
1417 NVME_NIDT_EUI64
= 0x01,
1418 NVME_NIDT_NGUID
= 0x02,
1419 NVME_NIDT_UUID
= 0x03,
1420 NVME_NIDT_CSI
= 0x04,
1424 NVME_NMIC_NS_SHARED
= 1 << 0,
1428 NVME_CSI_NVM
= 0x00,
1429 NVME_CSI_ZONED
= 0x02,
1432 #define NVME_SET_CSI(vec, csi) (vec |= (uint8_t)(1 << (csi)))
1434 typedef struct QEMU_PACKED NvmeIdNsZoned
{
1446 uint8_t rsvd53
[2763];
1447 NvmeLBAFE lbafe
[16];
1448 uint8_t rsvd3072
[768];
1452 enum NvmeIdNsZonedOzcs
{
1453 NVME_ID_NS_ZONED_OZCS_RAZB
= 1 << 0,
1454 NVME_ID_NS_ZONED_OZCS_ZRWASUP
= 1 << 1,
1457 enum NvmeIdNsZonedZrwacap
{
1458 NVME_ID_NS_ZONED_ZRWACAP_EXPFLUSHSUP
= 1 << 0,
1461 /*Deallocate Logical Block Features*/
1462 #define NVME_ID_NS_DLFEAT_GUARD_CRC(dlfeat) ((dlfeat) & 0x10)
1463 #define NVME_ID_NS_DLFEAT_WRITE_ZEROES(dlfeat) ((dlfeat) & 0x08)
1465 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR(dlfeat) ((dlfeat) & 0x7)
1466 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR_UNDEFINED 0
1467 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR_ZEROES 1
1468 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR_ONES 2
1471 #define NVME_ID_NS_NSFEAT_THIN(nsfeat) ((nsfeat & 0x1))
1472 #define NVME_ID_NS_NSFEAT_DULBE(nsfeat) ((nsfeat >> 2) & 0x1)
1473 #define NVME_ID_NS_FLBAS_EXTENDED(flbas) ((flbas >> 4) & 0x1)
1474 #define NVME_ID_NS_FLBAS_INDEX(flbas) ((flbas & 0xf))
1475 #define NVME_ID_NS_MC_SEPARATE(mc) ((mc >> 1) & 0x1)
1476 #define NVME_ID_NS_MC_EXTENDED(mc) ((mc & 0x1))
1477 #define NVME_ID_NS_DPC_LAST_EIGHT(dpc) ((dpc >> 4) & 0x1)
1478 #define NVME_ID_NS_DPC_FIRST_EIGHT(dpc) ((dpc >> 3) & 0x1)
1479 #define NVME_ID_NS_DPC_TYPE_3(dpc) ((dpc >> 2) & 0x1)
1480 #define NVME_ID_NS_DPC_TYPE_2(dpc) ((dpc >> 1) & 0x1)
1481 #define NVME_ID_NS_DPC_TYPE_1(dpc) ((dpc & 0x1))
1482 #define NVME_ID_NS_DPC_TYPE_MASK 0x7
1485 NVME_ID_NS_DPS_TYPE_NONE
= 0,
1486 NVME_ID_NS_DPS_TYPE_1
= 1,
1487 NVME_ID_NS_DPS_TYPE_2
= 2,
1488 NVME_ID_NS_DPS_TYPE_3
= 3,
1489 NVME_ID_NS_DPS_TYPE_MASK
= 0x7,
1490 NVME_ID_NS_DPS_FIRST_EIGHT
= 8,
1493 enum NvmeIdNsFlbas
{
1494 NVME_ID_NS_FLBAS_EXTENDED
= 1 << 4,
1498 NVME_ID_NS_MC_EXTENDED
= 1 << 0,
1499 NVME_ID_NS_MC_SEPARATE
= 1 << 1,
1502 #define NVME_ID_NS_DPS_TYPE(dps) (dps & NVME_ID_NS_DPS_TYPE_MASK)
1505 NVME_PI_GUARD_16
= 0,
1506 NVME_PI_GUARD_64
= 2,
1509 typedef union NvmeDifTuple
{
1524 NVME_ZA_FINISHED_BY_CTLR
= 1 << 0,
1525 NVME_ZA_FINISH_RECOMMENDED
= 1 << 1,
1526 NVME_ZA_RESET_RECOMMENDED
= 1 << 2,
1527 NVME_ZA_ZRWA_VALID
= 1 << 3,
1528 NVME_ZA_ZD_EXT_VALID
= 1 << 7,
1531 typedef struct QEMU_PACKED NvmeZoneReportHeader
{
1534 } NvmeZoneReportHeader
;
1536 enum NvmeZoneReceiveAction
{
1537 NVME_ZONE_REPORT
= 0,
1538 NVME_ZONE_REPORT_EXTENDED
= 1,
1541 enum NvmeZoneReportType
{
1542 NVME_ZONE_REPORT_ALL
= 0,
1543 NVME_ZONE_REPORT_EMPTY
= 1,
1544 NVME_ZONE_REPORT_IMPLICITLY_OPEN
= 2,
1545 NVME_ZONE_REPORT_EXPLICITLY_OPEN
= 3,
1546 NVME_ZONE_REPORT_CLOSED
= 4,
1547 NVME_ZONE_REPORT_FULL
= 5,
1548 NVME_ZONE_REPORT_READ_ONLY
= 6,
1549 NVME_ZONE_REPORT_OFFLINE
= 7,
1553 NVME_ZONE_TYPE_RESERVED
= 0x00,
1554 NVME_ZONE_TYPE_SEQ_WRITE
= 0x02,
1557 typedef struct QEMU_PACKED NvmeZoneSendCmd
{
1572 enum NvmeZoneSendAction
{
1573 NVME_ZONE_ACTION_RSD
= 0x00,
1574 NVME_ZONE_ACTION_CLOSE
= 0x01,
1575 NVME_ZONE_ACTION_FINISH
= 0x02,
1576 NVME_ZONE_ACTION_OPEN
= 0x03,
1577 NVME_ZONE_ACTION_RESET
= 0x04,
1578 NVME_ZONE_ACTION_OFFLINE
= 0x05,
1579 NVME_ZONE_ACTION_SET_ZD_EXT
= 0x10,
1580 NVME_ZONE_ACTION_ZRWA_FLUSH
= 0x11,
1584 NVME_ZSFLAG_SELECT_ALL
= 1 << 0,
1585 NVME_ZSFLAG_ZRWA_ALLOC
= 1 << 1,
1588 typedef struct QEMU_PACKED NvmeZoneDescr
{
1599 typedef enum NvmeZoneState
{
1600 NVME_ZONE_STATE_RESERVED
= 0x00,
1601 NVME_ZONE_STATE_EMPTY
= 0x01,
1602 NVME_ZONE_STATE_IMPLICITLY_OPEN
= 0x02,
1603 NVME_ZONE_STATE_EXPLICITLY_OPEN
= 0x03,
1604 NVME_ZONE_STATE_CLOSED
= 0x04,
1605 NVME_ZONE_STATE_READ_ONLY
= 0x0d,
1606 NVME_ZONE_STATE_FULL
= 0x0e,
1607 NVME_ZONE_STATE_OFFLINE
= 0x0f,
1610 typedef struct QEMU_PACKED NvmePriCtrlCap
{
1628 uint8_t rsvd80
[4016];
1631 typedef enum NvmePriCtrlCapCrt
{
1632 NVME_CRT_VQ
= 1 << 0,
1633 NVME_CRT_VI
= 1 << 1,
1634 } NvmePriCtrlCapCrt
;
1636 typedef struct QEMU_PACKED NvmeSecCtrlEntry
{
1647 typedef struct QEMU_PACKED NvmeSecCtrlList
{
1650 NvmeSecCtrlEntry sec
[127];
1653 typedef enum NvmeVirtMngmtAction
{
1654 NVME_VIRT_MNGMT_ACTION_PRM_ALLOC
= 0x01,
1655 NVME_VIRT_MNGMT_ACTION_SEC_OFFLINE
= 0x07,
1656 NVME_VIRT_MNGMT_ACTION_SEC_ASSIGN
= 0x08,
1657 NVME_VIRT_MNGMT_ACTION_SEC_ONLINE
= 0x09,
1658 } NvmeVirtMngmtAction
;
1660 typedef enum NvmeVirtualResourceType
{
1661 NVME_VIRT_RES_QUEUE
= 0x00,
1662 NVME_VIRT_RES_INTERRUPT
= 0x01,
1663 } NvmeVirtualResourceType
;
1665 typedef struct NvmeDirectiveIdentify
{
1667 uint8_t unused1
[31];
1669 uint8_t unused33
[31];
1671 uint8_t unused65
[31];
1672 uint8_t rsvd64
[4000];
1673 } NvmeDirectiveIdentify
;
1675 enum NvmeDirectiveTypes
{
1676 NVME_DIRECTIVE_IDENTIFY
= 0x0,
1677 NVME_DIRECTIVE_DATA_PLACEMENT
= 0x2,
1680 enum NvmeDirectiveOperations
{
1681 NVME_DIRECTIVE_RETURN_PARAMS
= 0x1,
1684 typedef struct QEMU_PACKED NvmeFdpConfsHdr
{
1693 FIELD(FDPA
, RGIF
, 0, 4)
1694 FIELD(FDPA
, VWC
, 4, 1)
1695 FIELD(FDPA
, VALID
, 7, 1);
1697 typedef struct QEMU_PACKED NvmeFdpDescrHdr
{
1698 uint16_t descr_size
;
1711 NVME_RUHT_INITIALLY_ISOLATED
= 1,
1712 NVME_RUHT_PERSISTENTLY_ISOLATED
= 2,
1715 typedef struct QEMU_PACKED NvmeRuhDescr
{
1720 typedef struct QEMU_PACKED NvmeRuhuLog
{
1725 enum NvmeRuhAttributes
{
1726 NVME_RUHA_UNUSED
= 0,
1731 typedef struct QEMU_PACKED NvmeRuhuDescr
{
1736 typedef struct QEMU_PACKED NvmeFdpStatsLog
{
1743 typedef struct QEMU_PACKED NvmeFdpEventsLog
{
1744 uint32_t num_events
;
1748 enum NvmeFdpEventType
{
1749 FDP_EVT_RU_NOT_FULLY_WRITTEN
= 0x0,
1750 FDP_EVT_RU_ATL_EXCEEDED
= 0x1,
1751 FDP_EVT_CTRL_RESET_RUH
= 0x2,
1752 FDP_EVT_INVALID_PID
= 0x3,
1753 FDP_EVT_MEDIA_REALLOC
= 0x80,
1754 FDP_EVT_RUH_IMPLICIT_RU_CHANGE
= 0x81,
1757 enum NvmeFdpEventFlags
{
1759 FDPEF_NSIDV
= 1 << 1,
1763 typedef struct QEMU_PACKED NvmeFdpEvent
{
1769 uint64_t type_specific
[2];
1776 typedef struct QEMU_PACKED NvmePhidList
{
1781 typedef struct QEMU_PACKED NvmePhidDescr
{
1787 REG32(FEAT_FDP
, 0x0)
1788 FIELD(FEAT_FDP
, FDPE
, 0, 1)
1789 FIELD(FEAT_FDP
, CONF_NDX
, 8, 8);
1791 typedef struct QEMU_PACKED NvmeFdpEventDescr
{
1794 } NvmeFdpEventDescr
;
1796 REG32(NVME_IOMR
, 0x0)
1797 FIELD(NVME_IOMR
, MO
, 0, 8)
1798 FIELD(NVME_IOMR
, MOS
, 16, 16);
1801 NVME_IOMR_MO_NOP
= 0x0,
1802 NVME_IOMR_MO_RUH_STATUS
= 0x1,
1803 NVME_IOMR_MO_VENDOR_SPECIFIC
= 0x255,
1806 typedef struct QEMU_PACKED NvmeRuhStatus
{
1811 typedef struct QEMU_PACKED NvmeRuhStatusDescr
{
1817 } NvmeRuhStatusDescr
;
1819 REG32(NVME_IOMS
, 0x0)
1820 FIELD(NVME_IOMS
, MO
, 0, 8)
1821 FIELD(NVME_IOMS
, MOS
, 16, 16);
1824 NVME_IOMS_MO_NOP
= 0x0,
1825 NVME_IOMS_MO_RUH_UPDATE
= 0x1,
1828 static inline void _nvme_check_size(void)
1830 QEMU_BUILD_BUG_ON(sizeof(NvmeBar
) != 4096);
1831 QEMU_BUILD_BUG_ON(sizeof(NvmeAerResult
) != 4);
1832 QEMU_BUILD_BUG_ON(sizeof(NvmeZonedResult
) != 8);
1833 QEMU_BUILD_BUG_ON(sizeof(NvmeCqe
) != 16);
1834 QEMU_BUILD_BUG_ON(sizeof(NvmeDsmRange
) != 16);
1835 QEMU_BUILD_BUG_ON(sizeof(NvmeCopySourceRangeFormat0
) != 32);
1836 QEMU_BUILD_BUG_ON(sizeof(NvmeCopySourceRangeFormat1
) != 40);
1837 QEMU_BUILD_BUG_ON(sizeof(NvmeCmd
) != 64);
1838 QEMU_BUILD_BUG_ON(sizeof(NvmeDeleteQ
) != 64);
1839 QEMU_BUILD_BUG_ON(sizeof(NvmeCreateCq
) != 64);
1840 QEMU_BUILD_BUG_ON(sizeof(NvmeCreateSq
) != 64);
1841 QEMU_BUILD_BUG_ON(sizeof(NvmeIdentify
) != 64);
1842 QEMU_BUILD_BUG_ON(sizeof(NvmeRwCmd
) != 64);
1843 QEMU_BUILD_BUG_ON(sizeof(NvmeDsmCmd
) != 64);
1844 QEMU_BUILD_BUG_ON(sizeof(NvmeCopyCmd
) != 64);
1845 QEMU_BUILD_BUG_ON(sizeof(NvmeRangeType
) != 64);
1846 QEMU_BUILD_BUG_ON(sizeof(NvmeHostBehaviorSupport
) != 512);
1847 QEMU_BUILD_BUG_ON(sizeof(NvmeErrorLog
) != 64);
1848 QEMU_BUILD_BUG_ON(sizeof(NvmeFwSlotInfoLog
) != 512);
1849 QEMU_BUILD_BUG_ON(sizeof(NvmeSmartLog
) != 512);
1850 QEMU_BUILD_BUG_ON(sizeof(NvmeEffectsLog
) != 4096);
1851 QEMU_BUILD_BUG_ON(sizeof(NvmeIdCtrl
) != 4096);
1852 QEMU_BUILD_BUG_ON(sizeof(NvmeIdCtrlZoned
) != 4096);
1853 QEMU_BUILD_BUG_ON(sizeof(NvmeIdCtrlNvm
) != 4096);
1854 QEMU_BUILD_BUG_ON(sizeof(NvmeLBAF
) != 4);
1855 QEMU_BUILD_BUG_ON(sizeof(NvmeLBAFE
) != 16);
1856 QEMU_BUILD_BUG_ON(sizeof(NvmeIdNs
) != 4096);
1857 QEMU_BUILD_BUG_ON(sizeof(NvmeIdNsNvm
) != 4096);
1858 QEMU_BUILD_BUG_ON(sizeof(NvmeIdNsZoned
) != 4096);
1859 QEMU_BUILD_BUG_ON(sizeof(NvmeSglDescriptor
) != 16);
1860 QEMU_BUILD_BUG_ON(sizeof(NvmeIdNsDescr
) != 4);
1861 QEMU_BUILD_BUG_ON(sizeof(NvmeZoneDescr
) != 64);
1862 QEMU_BUILD_BUG_ON(sizeof(NvmeDifTuple
) != 16);
1863 QEMU_BUILD_BUG_ON(sizeof(NvmePriCtrlCap
) != 4096);
1864 QEMU_BUILD_BUG_ON(sizeof(NvmeSecCtrlEntry
) != 32);
1865 QEMU_BUILD_BUG_ON(sizeof(NvmeSecCtrlList
) != 4096);
1866 QEMU_BUILD_BUG_ON(sizeof(NvmeEndGrpLog
) != 512);
1867 QEMU_BUILD_BUG_ON(sizeof(NvmeDirectiveIdentify
) != 4096);