4 typedef struct QEMU_PACKED NvmeBar
{
41 CAP_MPSMIN_SHIFT
= 48,
42 CAP_MPSMAX_SHIFT
= 52,
48 CAP_MQES_MASK
= 0xffff,
55 CAP_MPSMIN_MASK
= 0xf,
56 CAP_MPSMAX_MASK
= 0xf,
61 #define NVME_CAP_MQES(cap) (((cap) >> CAP_MQES_SHIFT) & CAP_MQES_MASK)
62 #define NVME_CAP_CQR(cap) (((cap) >> CAP_CQR_SHIFT) & CAP_CQR_MASK)
63 #define NVME_CAP_AMS(cap) (((cap) >> CAP_AMS_SHIFT) & CAP_AMS_MASK)
64 #define NVME_CAP_TO(cap) (((cap) >> CAP_TO_SHIFT) & CAP_TO_MASK)
65 #define NVME_CAP_DSTRD(cap) (((cap) >> CAP_DSTRD_SHIFT) & CAP_DSTRD_MASK)
66 #define NVME_CAP_NSSRS(cap) (((cap) >> CAP_NSSRS_SHIFT) & CAP_NSSRS_MASK)
67 #define NVME_CAP_CSS(cap) (((cap) >> CAP_CSS_SHIFT) & CAP_CSS_MASK)
68 #define NVME_CAP_MPSMIN(cap)(((cap) >> CAP_MPSMIN_SHIFT) & CAP_MPSMIN_MASK)
69 #define NVME_CAP_MPSMAX(cap)(((cap) >> CAP_MPSMAX_SHIFT) & CAP_MPSMAX_MASK)
70 #define NVME_CAP_PMRS(cap) (((cap) >> CAP_PMRS_SHIFT) & CAP_PMRS_MASK)
71 #define NVME_CAP_CMBS(cap) (((cap) >> CAP_CMBS_SHIFT) & CAP_CMBS_MASK)
73 #define NVME_CAP_SET_MQES(cap, val) (cap |= (uint64_t)(val & CAP_MQES_MASK) \
75 #define NVME_CAP_SET_CQR(cap, val) (cap |= (uint64_t)(val & CAP_CQR_MASK) \
77 #define NVME_CAP_SET_AMS(cap, val) (cap |= (uint64_t)(val & CAP_AMS_MASK) \
79 #define NVME_CAP_SET_TO(cap, val) (cap |= (uint64_t)(val & CAP_TO_MASK) \
81 #define NVME_CAP_SET_DSTRD(cap, val) (cap |= (uint64_t)(val & CAP_DSTRD_MASK) \
83 #define NVME_CAP_SET_NSSRS(cap, val) (cap |= (uint64_t)(val & CAP_NSSRS_MASK) \
85 #define NVME_CAP_SET_CSS(cap, val) (cap |= (uint64_t)(val & CAP_CSS_MASK) \
87 #define NVME_CAP_SET_MPSMIN(cap, val) (cap |= (uint64_t)(val & CAP_MPSMIN_MASK)\
89 #define NVME_CAP_SET_MPSMAX(cap, val) (cap |= (uint64_t)(val & CAP_MPSMAX_MASK)\
91 #define NVME_CAP_SET_PMRS(cap, val) (cap |= (uint64_t)(val & CAP_PMRS_MASK) \
93 #define NVME_CAP_SET_CMBS(cap, val) (cap |= (uint64_t)(val & CAP_CMBS_MASK) \
97 NVME_CAP_CSS_NVM
= 1 << 0,
98 NVME_CAP_CSS_CSI_SUPP
= 1 << 6,
99 NVME_CAP_CSS_ADMIN_ONLY
= 1 << 7,
108 CC_IOSQES_SHIFT
= 16,
109 CC_IOCQES_SHIFT
= 20,
118 CC_IOSQES_MASK
= 0xf,
119 CC_IOCQES_MASK
= 0xf,
122 #define NVME_CC_EN(cc) ((cc >> CC_EN_SHIFT) & CC_EN_MASK)
123 #define NVME_CC_CSS(cc) ((cc >> CC_CSS_SHIFT) & CC_CSS_MASK)
124 #define NVME_CC_MPS(cc) ((cc >> CC_MPS_SHIFT) & CC_MPS_MASK)
125 #define NVME_CC_AMS(cc) ((cc >> CC_AMS_SHIFT) & CC_AMS_MASK)
126 #define NVME_CC_SHN(cc) ((cc >> CC_SHN_SHIFT) & CC_SHN_MASK)
127 #define NVME_CC_IOSQES(cc) ((cc >> CC_IOSQES_SHIFT) & CC_IOSQES_MASK)
128 #define NVME_CC_IOCQES(cc) ((cc >> CC_IOCQES_SHIFT) & CC_IOCQES_MASK)
131 NVME_CC_CSS_NVM
= 0x0,
132 NVME_CC_CSS_CSI
= 0x6,
133 NVME_CC_CSS_ADMIN_ONLY
= 0x7,
136 #define NVME_SET_CC_EN(cc, val) \
137 (cc |= (uint32_t)((val) & CC_EN_MASK) << CC_EN_SHIFT)
138 #define NVME_SET_CC_CSS(cc, val) \
139 (cc |= (uint32_t)((val) & CC_CSS_MASK) << CC_CSS_SHIFT)
140 #define NVME_SET_CC_MPS(cc, val) \
141 (cc |= (uint32_t)((val) & CC_MPS_MASK) << CC_MPS_SHIFT)
142 #define NVME_SET_CC_AMS(cc, val) \
143 (cc |= (uint32_t)((val) & CC_AMS_MASK) << CC_AMS_SHIFT)
144 #define NVME_SET_CC_SHN(cc, val) \
145 (cc |= (uint32_t)((val) & CC_SHN_MASK) << CC_SHN_SHIFT)
146 #define NVME_SET_CC_IOSQES(cc, val) \
147 (cc |= (uint32_t)((val) & CC_IOSQES_MASK) << CC_IOSQES_SHIFT)
148 #define NVME_SET_CC_IOCQES(cc, val) \
149 (cc |= (uint32_t)((val) & CC_IOCQES_MASK) << CC_IOCQES_SHIFT)
155 CSTS_NSSRO_SHIFT
= 4,
161 CSTS_SHST_MASK
= 0x3,
162 CSTS_NSSRO_MASK
= 0x1,
166 NVME_CSTS_READY
= 1 << CSTS_RDY_SHIFT
,
167 NVME_CSTS_FAILED
= 1 << CSTS_CFS_SHIFT
,
168 NVME_CSTS_SHST_NORMAL
= 0 << CSTS_SHST_SHIFT
,
169 NVME_CSTS_SHST_PROGRESS
= 1 << CSTS_SHST_SHIFT
,
170 NVME_CSTS_SHST_COMPLETE
= 2 << CSTS_SHST_SHIFT
,
171 NVME_CSTS_NSSRO
= 1 << CSTS_NSSRO_SHIFT
,
174 #define NVME_CSTS_RDY(csts) ((csts >> CSTS_RDY_SHIFT) & CSTS_RDY_MASK)
175 #define NVME_CSTS_CFS(csts) ((csts >> CSTS_CFS_SHIFT) & CSTS_CFS_MASK)
176 #define NVME_CSTS_SHST(csts) ((csts >> CSTS_SHST_SHIFT) & CSTS_SHST_MASK)
177 #define NVME_CSTS_NSSRO(csts) ((csts >> CSTS_NSSRO_SHIFT) & CSTS_NSSRO_MASK)
185 AQA_ASQS_MASK
= 0xfff,
186 AQA_ACQS_MASK
= 0xfff,
189 #define NVME_AQA_ASQS(aqa) ((aqa >> AQA_ASQS_SHIFT) & AQA_ASQS_MASK)
190 #define NVME_AQA_ACQS(aqa) ((aqa >> AQA_ACQS_SHIFT) & AQA_ACQS_MASK)
192 enum NvmeCmblocShift
{
193 CMBLOC_BIR_SHIFT
= 0,
194 CMBLOC_CQMMS_SHIFT
= 3,
195 CMBLOC_CQPDS_SHIFT
= 4,
196 CMBLOC_CDPMLS_SHIFT
= 5,
197 CMBLOC_CDPCILS_SHIFT
= 6,
198 CMBLOC_CDMMMS_SHIFT
= 7,
199 CMBLOC_CQDA_SHIFT
= 8,
200 CMBLOC_OFST_SHIFT
= 12,
203 enum NvmeCmblocMask
{
204 CMBLOC_BIR_MASK
= 0x7,
205 CMBLOC_CQMMS_MASK
= 0x1,
206 CMBLOC_CQPDS_MASK
= 0x1,
207 CMBLOC_CDPMLS_MASK
= 0x1,
208 CMBLOC_CDPCILS_MASK
= 0x1,
209 CMBLOC_CDMMMS_MASK
= 0x1,
210 CMBLOC_CQDA_MASK
= 0x1,
211 CMBLOC_OFST_MASK
= 0xfffff,
214 #define NVME_CMBLOC_BIR(cmbloc) \
215 ((cmbloc >> CMBLOC_BIR_SHIFT) & CMBLOC_BIR_MASK)
216 #define NVME_CMBLOC_CQMMS(cmbloc) \
217 ((cmbloc >> CMBLOC_CQMMS_SHIFT) & CMBLOC_CQMMS_MASK)
218 #define NVME_CMBLOC_CQPDS(cmbloc) \
219 ((cmbloc >> CMBLOC_CQPDS_SHIFT) & CMBLOC_CQPDS_MASK)
220 #define NVME_CMBLOC_CDPMLS(cmbloc) \
221 ((cmbloc >> CMBLOC_CDPMLS_SHIFT) & CMBLOC_CDPMLS_MASK)
222 #define NVME_CMBLOC_CDPCILS(cmbloc) \
223 ((cmbloc >> CMBLOC_CDPCILS_SHIFT) & CMBLOC_CDPCILS_MASK)
224 #define NVME_CMBLOC_CDMMMS(cmbloc) \
225 ((cmbloc >> CMBLOC_CDMMMS_SHIFT) & CMBLOC_CDMMMS_MASK)
226 #define NVME_CMBLOC_CQDA(cmbloc) \
227 ((cmbloc >> CMBLOC_CQDA_SHIFT) & CMBLOC_CQDA_MASK)
228 #define NVME_CMBLOC_OFST(cmbloc) \
229 ((cmbloc >> CMBLOC_OFST_SHIFT) & CMBLOC_OFST_MASK)
231 #define NVME_CMBLOC_SET_BIR(cmbloc, val) \
232 (cmbloc |= (uint64_t)(val & CMBLOC_BIR_MASK) << CMBLOC_BIR_SHIFT)
233 #define NVME_CMBLOC_SET_CQMMS(cmbloc, val) \
234 (cmbloc |= (uint64_t)(val & CMBLOC_CQMMS_MASK) << CMBLOC_CQMMS_SHIFT)
235 #define NVME_CMBLOC_SET_CQPDS(cmbloc, val) \
236 (cmbloc |= (uint64_t)(val & CMBLOC_CQPDS_MASK) << CMBLOC_CQPDS_SHIFT)
237 #define NVME_CMBLOC_SET_CDPMLS(cmbloc, val) \
238 (cmbloc |= (uint64_t)(val & CMBLOC_CDPMLS_MASK) << CMBLOC_CDPMLS_SHIFT)
239 #define NVME_CMBLOC_SET_CDPCILS(cmbloc, val) \
240 (cmbloc |= (uint64_t)(val & CMBLOC_CDPCILS_MASK) << CMBLOC_CDPCILS_SHIFT)
241 #define NVME_CMBLOC_SET_CDMMMS(cmbloc, val) \
242 (cmbloc |= (uint64_t)(val & CMBLOC_CDMMMS_MASK) << CMBLOC_CDMMMS_SHIFT)
243 #define NVME_CMBLOC_SET_CQDA(cmbloc, val) \
244 (cmbloc |= (uint64_t)(val & CMBLOC_CQDA_MASK) << CMBLOC_CQDA_SHIFT)
245 #define NVME_CMBLOC_SET_OFST(cmbloc, val) \
246 (cmbloc |= (uint64_t)(val & CMBLOC_OFST_MASK) << CMBLOC_OFST_SHIFT)
248 #define NVME_CMBMSMC_SET_CRE (cmbmsc, val) \
249 (cmbmsc |= (uint64_t)(val & CMBLOC_OFST_MASK) << CMBMSC_CRE_SHIFT)
251 enum NvmeCmbszShift
{
254 CMBSZ_LISTS_SHIFT
= 2,
262 CMBSZ_SQS_MASK
= 0x1,
263 CMBSZ_CQS_MASK
= 0x1,
264 CMBSZ_LISTS_MASK
= 0x1,
265 CMBSZ_RDS_MASK
= 0x1,
266 CMBSZ_WDS_MASK
= 0x1,
267 CMBSZ_SZU_MASK
= 0xf,
268 CMBSZ_SZ_MASK
= 0xfffff,
271 #define NVME_CMBSZ_SQS(cmbsz) ((cmbsz >> CMBSZ_SQS_SHIFT) & CMBSZ_SQS_MASK)
272 #define NVME_CMBSZ_CQS(cmbsz) ((cmbsz >> CMBSZ_CQS_SHIFT) & CMBSZ_CQS_MASK)
273 #define NVME_CMBSZ_LISTS(cmbsz)((cmbsz >> CMBSZ_LISTS_SHIFT) & CMBSZ_LISTS_MASK)
274 #define NVME_CMBSZ_RDS(cmbsz) ((cmbsz >> CMBSZ_RDS_SHIFT) & CMBSZ_RDS_MASK)
275 #define NVME_CMBSZ_WDS(cmbsz) ((cmbsz >> CMBSZ_WDS_SHIFT) & CMBSZ_WDS_MASK)
276 #define NVME_CMBSZ_SZU(cmbsz) ((cmbsz >> CMBSZ_SZU_SHIFT) & CMBSZ_SZU_MASK)
277 #define NVME_CMBSZ_SZ(cmbsz) ((cmbsz >> CMBSZ_SZ_SHIFT) & CMBSZ_SZ_MASK)
279 #define NVME_CMBSZ_SET_SQS(cmbsz, val) \
280 (cmbsz |= (uint64_t)(val & CMBSZ_SQS_MASK) << CMBSZ_SQS_SHIFT)
281 #define NVME_CMBSZ_SET_CQS(cmbsz, val) \
282 (cmbsz |= (uint64_t)(val & CMBSZ_CQS_MASK) << CMBSZ_CQS_SHIFT)
283 #define NVME_CMBSZ_SET_LISTS(cmbsz, val) \
284 (cmbsz |= (uint64_t)(val & CMBSZ_LISTS_MASK) << CMBSZ_LISTS_SHIFT)
285 #define NVME_CMBSZ_SET_RDS(cmbsz, val) \
286 (cmbsz |= (uint64_t)(val & CMBSZ_RDS_MASK) << CMBSZ_RDS_SHIFT)
287 #define NVME_CMBSZ_SET_WDS(cmbsz, val) \
288 (cmbsz |= (uint64_t)(val & CMBSZ_WDS_MASK) << CMBSZ_WDS_SHIFT)
289 #define NVME_CMBSZ_SET_SZU(cmbsz, val) \
290 (cmbsz |= (uint64_t)(val & CMBSZ_SZU_MASK) << CMBSZ_SZU_SHIFT)
291 #define NVME_CMBSZ_SET_SZ(cmbsz, val) \
292 (cmbsz |= (uint64_t)(val & CMBSZ_SZ_MASK) << CMBSZ_SZ_SHIFT)
294 #define NVME_CMBSZ_GETSIZE(cmbsz) \
295 (NVME_CMBSZ_SZ(cmbsz) * (1 << (12 + 4 * NVME_CMBSZ_SZU(cmbsz))))
297 enum NvmeCmbmscShift
{
298 CMBMSC_CRE_SHIFT
= 0,
299 CMBMSC_CMSE_SHIFT
= 1,
300 CMBMSC_CBA_SHIFT
= 12,
303 enum NvmeCmbmscMask
{
304 CMBMSC_CRE_MASK
= 0x1,
305 CMBMSC_CMSE_MASK
= 0x1,
306 CMBMSC_CBA_MASK
= ((1ULL << 52) - 1),
309 #define NVME_CMBMSC_CRE(cmbmsc) \
310 ((cmbmsc >> CMBMSC_CRE_SHIFT) & CMBMSC_CRE_MASK)
311 #define NVME_CMBMSC_CMSE(cmbmsc) \
312 ((cmbmsc >> CMBMSC_CMSE_SHIFT) & CMBMSC_CMSE_MASK)
313 #define NVME_CMBMSC_CBA(cmbmsc) \
314 ((cmbmsc >> CMBMSC_CBA_SHIFT) & CMBMSC_CBA_MASK)
317 #define NVME_CMBMSC_SET_CRE(cmbmsc, val) \
318 (cmbmsc |= (uint64_t)(val & CMBMSC_CRE_MASK) << CMBMSC_CRE_SHIFT)
319 #define NVME_CMBMSC_SET_CMSE(cmbmsc, val) \
320 (cmbmsc |= (uint64_t)(val & CMBMSC_CMSE_MASK) << CMBMSC_CMSE_SHIFT)
321 #define NVME_CMBMSC_SET_CBA(cmbmsc, val) \
322 (cmbmsc |= (uint64_t)(val & CMBMSC_CBA_MASK) << CMBMSC_CBA_SHIFT)
324 enum NvmeCmbstsShift
{
325 CMBSTS_CBAI_SHIFT
= 0,
327 enum NvmeCmbstsMask
{
328 CMBSTS_CBAI_MASK
= 0x1,
331 #define NVME_CMBSTS_CBAI(cmbsts) \
332 ((cmbsts >> CMBSTS_CBAI_SHIFT) & CMBSTS_CBAI_MASK)
334 #define NVME_CMBSTS_SET_CBAI(cmbsts, val) \
335 (cmbsts |= (uint64_t)(val & CMBSTS_CBAI_MASK) << CMBSTS_CBAI_SHIFT)
337 enum NvmePmrcapShift
{
338 PMRCAP_RDS_SHIFT
= 3,
339 PMRCAP_WDS_SHIFT
= 4,
340 PMRCAP_BIR_SHIFT
= 5,
341 PMRCAP_PMRTU_SHIFT
= 8,
342 PMRCAP_PMRWBM_SHIFT
= 10,
343 PMRCAP_PMRTO_SHIFT
= 16,
344 PMRCAP_CMSS_SHIFT
= 24,
347 enum NvmePmrcapMask
{
348 PMRCAP_RDS_MASK
= 0x1,
349 PMRCAP_WDS_MASK
= 0x1,
350 PMRCAP_BIR_MASK
= 0x7,
351 PMRCAP_PMRTU_MASK
= 0x3,
352 PMRCAP_PMRWBM_MASK
= 0xf,
353 PMRCAP_PMRTO_MASK
= 0xff,
354 PMRCAP_CMSS_MASK
= 0x1,
357 #define NVME_PMRCAP_RDS(pmrcap) \
358 ((pmrcap >> PMRCAP_RDS_SHIFT) & PMRCAP_RDS_MASK)
359 #define NVME_PMRCAP_WDS(pmrcap) \
360 ((pmrcap >> PMRCAP_WDS_SHIFT) & PMRCAP_WDS_MASK)
361 #define NVME_PMRCAP_BIR(pmrcap) \
362 ((pmrcap >> PMRCAP_BIR_SHIFT) & PMRCAP_BIR_MASK)
363 #define NVME_PMRCAP_PMRTU(pmrcap) \
364 ((pmrcap >> PMRCAP_PMRTU_SHIFT) & PMRCAP_PMRTU_MASK)
365 #define NVME_PMRCAP_PMRWBM(pmrcap) \
366 ((pmrcap >> PMRCAP_PMRWBM_SHIFT) & PMRCAP_PMRWBM_MASK)
367 #define NVME_PMRCAP_PMRTO(pmrcap) \
368 ((pmrcap >> PMRCAP_PMRTO_SHIFT) & PMRCAP_PMRTO_MASK)
369 #define NVME_PMRCAP_CMSS(pmrcap) \
370 ((pmrcap >> PMRCAP_CMSS_SHIFT) & PMRCAP_CMSS_MASK)
372 #define NVME_PMRCAP_SET_RDS(pmrcap, val) \
373 (pmrcap |= (uint64_t)(val & PMRCAP_RDS_MASK) << PMRCAP_RDS_SHIFT)
374 #define NVME_PMRCAP_SET_WDS(pmrcap, val) \
375 (pmrcap |= (uint64_t)(val & PMRCAP_WDS_MASK) << PMRCAP_WDS_SHIFT)
376 #define NVME_PMRCAP_SET_BIR(pmrcap, val) \
377 (pmrcap |= (uint64_t)(val & PMRCAP_BIR_MASK) << PMRCAP_BIR_SHIFT)
378 #define NVME_PMRCAP_SET_PMRTU(pmrcap, val) \
379 (pmrcap |= (uint64_t)(val & PMRCAP_PMRTU_MASK) << PMRCAP_PMRTU_SHIFT)
380 #define NVME_PMRCAP_SET_PMRWBM(pmrcap, val) \
381 (pmrcap |= (uint64_t)(val & PMRCAP_PMRWBM_MASK) << PMRCAP_PMRWBM_SHIFT)
382 #define NVME_PMRCAP_SET_PMRTO(pmrcap, val) \
383 (pmrcap |= (uint64_t)(val & PMRCAP_PMRTO_MASK) << PMRCAP_PMRTO_SHIFT)
384 #define NVME_PMRCAP_SET_CMSS(pmrcap, val) \
385 (pmrcap |= (uint64_t)(val & PMRCAP_CMSS_MASK) << PMRCAP_CMSS_SHIFT)
387 enum NvmePmrctlShift
{
391 enum NvmePmrctlMask
{
392 PMRCTL_EN_MASK
= 0x1,
395 #define NVME_PMRCTL_EN(pmrctl) ((pmrctl >> PMRCTL_EN_SHIFT) & PMRCTL_EN_MASK)
397 #define NVME_PMRCTL_SET_EN(pmrctl, val) \
398 (pmrctl |= (uint64_t)(val & PMRCTL_EN_MASK) << PMRCTL_EN_SHIFT)
400 enum NvmePmrstsShift
{
401 PMRSTS_ERR_SHIFT
= 0,
402 PMRSTS_NRDY_SHIFT
= 8,
403 PMRSTS_HSTS_SHIFT
= 9,
404 PMRSTS_CBAI_SHIFT
= 12,
407 enum NvmePmrstsMask
{
408 PMRSTS_ERR_MASK
= 0xff,
409 PMRSTS_NRDY_MASK
= 0x1,
410 PMRSTS_HSTS_MASK
= 0x7,
411 PMRSTS_CBAI_MASK
= 0x1,
414 #define NVME_PMRSTS_ERR(pmrsts) \
415 ((pmrsts >> PMRSTS_ERR_SHIFT) & PMRSTS_ERR_MASK)
416 #define NVME_PMRSTS_NRDY(pmrsts) \
417 ((pmrsts >> PMRSTS_NRDY_SHIFT) & PMRSTS_NRDY_MASK)
418 #define NVME_PMRSTS_HSTS(pmrsts) \
419 ((pmrsts >> PMRSTS_HSTS_SHIFT) & PMRSTS_HSTS_MASK)
420 #define NVME_PMRSTS_CBAI(pmrsts) \
421 ((pmrsts >> PMRSTS_CBAI_SHIFT) & PMRSTS_CBAI_MASK)
423 #define NVME_PMRSTS_SET_ERR(pmrsts, val) \
424 (pmrsts |= (uint64_t)(val & PMRSTS_ERR_MASK) << PMRSTS_ERR_SHIFT)
425 #define NVME_PMRSTS_SET_NRDY(pmrsts, val) \
426 (pmrsts |= (uint64_t)(val & PMRSTS_NRDY_MASK) << PMRSTS_NRDY_SHIFT)
427 #define NVME_PMRSTS_SET_HSTS(pmrsts, val) \
428 (pmrsts |= (uint64_t)(val & PMRSTS_HSTS_MASK) << PMRSTS_HSTS_SHIFT)
429 #define NVME_PMRSTS_SET_CBAI(pmrsts, val) \
430 (pmrsts |= (uint64_t)(val & PMRSTS_CBAI_MASK) << PMRSTS_CBAI_SHIFT)
432 enum NvmePmrebsShift
{
433 PMREBS_PMRSZU_SHIFT
= 0,
434 PMREBS_RBB_SHIFT
= 4,
435 PMREBS_PMRWBZ_SHIFT
= 8,
438 enum NvmePmrebsMask
{
439 PMREBS_PMRSZU_MASK
= 0xf,
440 PMREBS_RBB_MASK
= 0x1,
441 PMREBS_PMRWBZ_MASK
= 0xffffff,
444 #define NVME_PMREBS_PMRSZU(pmrebs) \
445 ((pmrebs >> PMREBS_PMRSZU_SHIFT) & PMREBS_PMRSZU_MASK)
446 #define NVME_PMREBS_RBB(pmrebs) \
447 ((pmrebs >> PMREBS_RBB_SHIFT) & PMREBS_RBB_MASK)
448 #define NVME_PMREBS_PMRWBZ(pmrebs) \
449 ((pmrebs >> PMREBS_PMRWBZ_SHIFT) & PMREBS_PMRWBZ_MASK)
451 #define NVME_PMREBS_SET_PMRSZU(pmrebs, val) \
452 (pmrebs |= (uint64_t)(val & PMREBS_PMRSZU_MASK) << PMREBS_PMRSZU_SHIFT)
453 #define NVME_PMREBS_SET_RBB(pmrebs, val) \
454 (pmrebs |= (uint64_t)(val & PMREBS_RBB_MASK) << PMREBS_RBB_SHIFT)
455 #define NVME_PMREBS_SET_PMRWBZ(pmrebs, val) \
456 (pmrebs |= (uint64_t)(val & PMREBS_PMRWBZ_MASK) << PMREBS_PMRWBZ_SHIFT)
458 enum NvmePmrswtpShift
{
459 PMRSWTP_PMRSWTU_SHIFT
= 0,
460 PMRSWTP_PMRSWTV_SHIFT
= 8,
463 enum NvmePmrswtpMask
{
464 PMRSWTP_PMRSWTU_MASK
= 0xf,
465 PMRSWTP_PMRSWTV_MASK
= 0xffffff,
468 #define NVME_PMRSWTP_PMRSWTU(pmrswtp) \
469 ((pmrswtp >> PMRSWTP_PMRSWTU_SHIFT) & PMRSWTP_PMRSWTU_MASK)
470 #define NVME_PMRSWTP_PMRSWTV(pmrswtp) \
471 ((pmrswtp >> PMRSWTP_PMRSWTV_SHIFT) & PMRSWTP_PMRSWTV_MASK)
473 #define NVME_PMRSWTP_SET_PMRSWTU(pmrswtp, val) \
474 (pmrswtp |= (uint64_t)(val & PMRSWTP_PMRSWTU_MASK) << PMRSWTP_PMRSWTU_SHIFT)
475 #define NVME_PMRSWTP_SET_PMRSWTV(pmrswtp, val) \
476 (pmrswtp |= (uint64_t)(val & PMRSWTP_PMRSWTV_MASK) << PMRSWTP_PMRSWTV_SHIFT)
478 enum NvmePmrmscShift
{
479 PMRMSC_CMSE_SHIFT
= 1,
480 PMRMSC_CBA_SHIFT
= 12,
483 enum NvmePmrmscMask
{
484 PMRMSC_CMSE_MASK
= 0x1,
485 PMRMSC_CBA_MASK
= 0xfffffffffffff,
488 #define NVME_PMRMSC_CMSE(pmrmsc) \
489 ((pmrmsc >> PMRMSC_CMSE_SHIFT) & PMRMSC_CMSE_MASK)
490 #define NVME_PMRMSC_CBA(pmrmsc) \
491 ((pmrmsc >> PMRMSC_CBA_SHIFT) & PMRMSC_CBA_MASK)
493 #define NVME_PMRMSC_SET_CMSE(pmrmsc, val) \
494 (pmrmsc |= (uint64_t)(val & PMRMSC_CMSE_MASK) << PMRMSC_CMSE_SHIFT)
495 #define NVME_PMRMSC_SET_CBA(pmrmsc, val) \
496 (pmrmsc |= (uint64_t)(val & PMRMSC_CBA_MASK) << PMRMSC_CBA_SHIFT)
498 enum NvmeSglDescriptorType
{
499 NVME_SGL_DESCR_TYPE_DATA_BLOCK
= 0x0,
500 NVME_SGL_DESCR_TYPE_BIT_BUCKET
= 0x1,
501 NVME_SGL_DESCR_TYPE_SEGMENT
= 0x2,
502 NVME_SGL_DESCR_TYPE_LAST_SEGMENT
= 0x3,
503 NVME_SGL_DESCR_TYPE_KEYED_DATA_BLOCK
= 0x4,
505 NVME_SGL_DESCR_TYPE_VENDOR_SPECIFIC
= 0xf,
508 enum NvmeSglDescriptorSubtype
{
509 NVME_SGL_DESCR_SUBTYPE_ADDRESS
= 0x0,
512 typedef struct QEMU_PACKED NvmeSglDescriptor
{
519 #define NVME_SGL_TYPE(type) ((type >> 4) & 0xf)
520 #define NVME_SGL_SUBTYPE(type) (type & 0xf)
522 typedef union NvmeCmdDptr
{
528 NvmeSglDescriptor sgl
;
533 NVME_PSDT_SGL_MPTR_CONTIGUOUS
= 0x1,
534 NVME_PSDT_SGL_MPTR_SGL
= 0x2,
537 typedef struct QEMU_PACKED NvmeCmd
{
553 #define NVME_CMD_FLAGS_FUSE(flags) (flags & 0x3)
554 #define NVME_CMD_FLAGS_PSDT(flags) ((flags >> 6) & 0x3)
556 enum NvmeAdminCommands
{
557 NVME_ADM_CMD_DELETE_SQ
= 0x00,
558 NVME_ADM_CMD_CREATE_SQ
= 0x01,
559 NVME_ADM_CMD_GET_LOG_PAGE
= 0x02,
560 NVME_ADM_CMD_DELETE_CQ
= 0x04,
561 NVME_ADM_CMD_CREATE_CQ
= 0x05,
562 NVME_ADM_CMD_IDENTIFY
= 0x06,
563 NVME_ADM_CMD_ABORT
= 0x08,
564 NVME_ADM_CMD_SET_FEATURES
= 0x09,
565 NVME_ADM_CMD_GET_FEATURES
= 0x0a,
566 NVME_ADM_CMD_ASYNC_EV_REQ
= 0x0c,
567 NVME_ADM_CMD_ACTIVATE_FW
= 0x10,
568 NVME_ADM_CMD_DOWNLOAD_FW
= 0x11,
569 NVME_ADM_CMD_FORMAT_NVM
= 0x80,
570 NVME_ADM_CMD_SECURITY_SEND
= 0x81,
571 NVME_ADM_CMD_SECURITY_RECV
= 0x82,
574 enum NvmeIoCommands
{
575 NVME_CMD_FLUSH
= 0x00,
576 NVME_CMD_WRITE
= 0x01,
577 NVME_CMD_READ
= 0x02,
578 NVME_CMD_WRITE_UNCOR
= 0x04,
579 NVME_CMD_COMPARE
= 0x05,
580 NVME_CMD_WRITE_ZEROES
= 0x08,
582 NVME_CMD_COPY
= 0x19,
583 NVME_CMD_ZONE_MGMT_SEND
= 0x79,
584 NVME_CMD_ZONE_MGMT_RECV
= 0x7a,
585 NVME_CMD_ZONE_APPEND
= 0x7d,
588 typedef struct QEMU_PACKED NvmeDeleteQ
{
598 typedef struct QEMU_PACKED NvmeCreateCq
{
612 #define NVME_CQ_FLAGS_PC(cq_flags) (cq_flags & 0x1)
613 #define NVME_CQ_FLAGS_IEN(cq_flags) ((cq_flags >> 1) & 0x1)
620 typedef struct QEMU_PACKED NvmeCreateSq
{
634 #define NVME_SQ_FLAGS_PC(sq_flags) (sq_flags & 0x1)
635 #define NVME_SQ_FLAGS_QPRIO(sq_flags) ((sq_flags >> 1) & 0x3)
640 NVME_SQ_PRIO_URGENT
= 0,
641 NVME_SQ_PRIO_HIGH
= 1,
642 NVME_SQ_PRIO_NORMAL
= 2,
643 NVME_SQ_PRIO_LOW
= 3,
646 typedef struct QEMU_PACKED NvmeIdentify
{
663 typedef struct QEMU_PACKED NvmeRwCmd
{
681 NVME_RW_LR
= 1 << 15,
682 NVME_RW_FUA
= 1 << 14,
683 NVME_RW_DSM_FREQ_UNSPEC
= 0,
684 NVME_RW_DSM_FREQ_TYPICAL
= 1,
685 NVME_RW_DSM_FREQ_RARE
= 2,
686 NVME_RW_DSM_FREQ_READS
= 3,
687 NVME_RW_DSM_FREQ_WRITES
= 4,
688 NVME_RW_DSM_FREQ_RW
= 5,
689 NVME_RW_DSM_FREQ_ONCE
= 6,
690 NVME_RW_DSM_FREQ_PREFETCH
= 7,
691 NVME_RW_DSM_FREQ_TEMP
= 8,
692 NVME_RW_DSM_LATENCY_NONE
= 0 << 4,
693 NVME_RW_DSM_LATENCY_IDLE
= 1 << 4,
694 NVME_RW_DSM_LATENCY_NORM
= 2 << 4,
695 NVME_RW_DSM_LATENCY_LOW
= 3 << 4,
696 NVME_RW_DSM_SEQ_REQ
= 1 << 6,
697 NVME_RW_DSM_COMPRESSED
= 1 << 7,
698 NVME_RW_PRINFO_PRACT
= 1 << 13,
699 NVME_RW_PRINFO_PRCHK_GUARD
= 1 << 12,
700 NVME_RW_PRINFO_PRCHK_APP
= 1 << 11,
701 NVME_RW_PRINFO_PRCHK_REF
= 1 << 10,
704 typedef struct QEMU_PACKED NvmeDsmCmd
{
717 NVME_DSMGMT_IDR
= 1 << 0,
718 NVME_DSMGMT_IDW
= 1 << 1,
719 NVME_DSMGMT_AD
= 1 << 2,
722 typedef struct QEMU_PACKED NvmeDsmRange
{
729 NVME_COPY_FORMAT_0
= 0x0,
732 typedef struct QEMU_PACKED NvmeCopyCmd
{
749 typedef struct QEMU_PACKED NvmeCopySourceRange
{
757 } NvmeCopySourceRange
;
759 enum NvmeAsyncEventRequest
{
760 NVME_AER_TYPE_ERROR
= 0,
761 NVME_AER_TYPE_SMART
= 1,
762 NVME_AER_TYPE_IO_SPECIFIC
= 6,
763 NVME_AER_TYPE_VENDOR_SPECIFIC
= 7,
764 NVME_AER_INFO_ERR_INVALID_DB_REGISTER
= 0,
765 NVME_AER_INFO_ERR_INVALID_DB_VALUE
= 1,
766 NVME_AER_INFO_ERR_DIAG_FAIL
= 2,
767 NVME_AER_INFO_ERR_PERS_INTERNAL_ERR
= 3,
768 NVME_AER_INFO_ERR_TRANS_INTERNAL_ERR
= 4,
769 NVME_AER_INFO_ERR_FW_IMG_LOAD_ERR
= 5,
770 NVME_AER_INFO_SMART_RELIABILITY
= 0,
771 NVME_AER_INFO_SMART_TEMP_THRESH
= 1,
772 NVME_AER_INFO_SMART_SPARE_THRESH
= 2,
775 typedef struct QEMU_PACKED NvmeAerResult
{
782 typedef struct QEMU_PACKED NvmeZonedResult
{
786 typedef struct QEMU_PACKED NvmeCqe
{
795 enum NvmeStatusCodes
{
796 NVME_SUCCESS
= 0x0000,
797 NVME_INVALID_OPCODE
= 0x0001,
798 NVME_INVALID_FIELD
= 0x0002,
799 NVME_CID_CONFLICT
= 0x0003,
800 NVME_DATA_TRAS_ERROR
= 0x0004,
801 NVME_POWER_LOSS_ABORT
= 0x0005,
802 NVME_INTERNAL_DEV_ERROR
= 0x0006,
803 NVME_CMD_ABORT_REQ
= 0x0007,
804 NVME_CMD_ABORT_SQ_DEL
= 0x0008,
805 NVME_CMD_ABORT_FAILED_FUSE
= 0x0009,
806 NVME_CMD_ABORT_MISSING_FUSE
= 0x000a,
807 NVME_INVALID_NSID
= 0x000b,
808 NVME_CMD_SEQ_ERROR
= 0x000c,
809 NVME_INVALID_SGL_SEG_DESCR
= 0x000d,
810 NVME_INVALID_NUM_SGL_DESCRS
= 0x000e,
811 NVME_DATA_SGL_LEN_INVALID
= 0x000f,
812 NVME_MD_SGL_LEN_INVALID
= 0x0010,
813 NVME_SGL_DESCR_TYPE_INVALID
= 0x0011,
814 NVME_INVALID_USE_OF_CMB
= 0x0012,
815 NVME_INVALID_PRP_OFFSET
= 0x0013,
816 NVME_CMD_SET_CMB_REJECTED
= 0x002b,
817 NVME_INVALID_CMD_SET
= 0x002c,
818 NVME_LBA_RANGE
= 0x0080,
819 NVME_CAP_EXCEEDED
= 0x0081,
820 NVME_NS_NOT_READY
= 0x0082,
821 NVME_NS_RESV_CONFLICT
= 0x0083,
822 NVME_INVALID_CQID
= 0x0100,
823 NVME_INVALID_QID
= 0x0101,
824 NVME_MAX_QSIZE_EXCEEDED
= 0x0102,
825 NVME_ACL_EXCEEDED
= 0x0103,
826 NVME_RESERVED
= 0x0104,
827 NVME_AER_LIMIT_EXCEEDED
= 0x0105,
828 NVME_INVALID_FW_SLOT
= 0x0106,
829 NVME_INVALID_FW_IMAGE
= 0x0107,
830 NVME_INVALID_IRQ_VECTOR
= 0x0108,
831 NVME_INVALID_LOG_ID
= 0x0109,
832 NVME_INVALID_FORMAT
= 0x010a,
833 NVME_FW_REQ_RESET
= 0x010b,
834 NVME_INVALID_QUEUE_DEL
= 0x010c,
835 NVME_FID_NOT_SAVEABLE
= 0x010d,
836 NVME_FEAT_NOT_CHANGEABLE
= 0x010e,
837 NVME_FEAT_NOT_NS_SPEC
= 0x010f,
838 NVME_FW_REQ_SUSYSTEM_RESET
= 0x0110,
839 NVME_CONFLICTING_ATTRS
= 0x0180,
840 NVME_INVALID_PROT_INFO
= 0x0181,
841 NVME_WRITE_TO_RO
= 0x0182,
842 NVME_CMD_SIZE_LIMIT
= 0x0183,
843 NVME_ZONE_BOUNDARY_ERROR
= 0x01b8,
844 NVME_ZONE_FULL
= 0x01b9,
845 NVME_ZONE_READ_ONLY
= 0x01ba,
846 NVME_ZONE_OFFLINE
= 0x01bb,
847 NVME_ZONE_INVALID_WRITE
= 0x01bc,
848 NVME_ZONE_TOO_MANY_ACTIVE
= 0x01bd,
849 NVME_ZONE_TOO_MANY_OPEN
= 0x01be,
850 NVME_ZONE_INVAL_TRANSITION
= 0x01bf,
851 NVME_WRITE_FAULT
= 0x0280,
852 NVME_UNRECOVERED_READ
= 0x0281,
853 NVME_E2E_GUARD_ERROR
= 0x0282,
854 NVME_E2E_APP_ERROR
= 0x0283,
855 NVME_E2E_REF_ERROR
= 0x0284,
856 NVME_CMP_FAILURE
= 0x0285,
857 NVME_ACCESS_DENIED
= 0x0286,
861 NVME_NO_COMPLETE
= 0xffff,
864 typedef struct QEMU_PACKED NvmeFwSlotInfoLog
{
866 uint8_t reserved1
[7];
874 uint8_t reserved2
[448];
877 typedef struct QEMU_PACKED NvmeErrorLog
{
878 uint64_t error_count
;
881 uint16_t status_field
;
882 uint16_t param_error_location
;
889 typedef struct QEMU_PACKED NvmeSmartLog
{
890 uint8_t critical_warning
;
891 uint16_t temperature
;
892 uint8_t available_spare
;
893 uint8_t available_spare_threshold
;
894 uint8_t percentage_used
;
895 uint8_t reserved1
[26];
896 uint64_t data_units_read
[2];
897 uint64_t data_units_written
[2];
898 uint64_t host_read_commands
[2];
899 uint64_t host_write_commands
[2];
900 uint64_t controller_busy_time
[2];
901 uint64_t power_cycles
[2];
902 uint64_t power_on_hours
[2];
903 uint64_t unsafe_shutdowns
[2];
904 uint64_t media_errors
[2];
905 uint64_t number_of_error_log_entries
[2];
906 uint8_t reserved2
[320];
909 #define NVME_SMART_WARN_MAX 6
911 NVME_SMART_SPARE
= 1 << 0,
912 NVME_SMART_TEMPERATURE
= 1 << 1,
913 NVME_SMART_RELIABILITY
= 1 << 2,
914 NVME_SMART_MEDIA_READ_ONLY
= 1 << 3,
915 NVME_SMART_FAILED_VOLATILE_MEDIA
= 1 << 4,
916 NVME_SMART_PMR_UNRELIABLE
= 1 << 5,
919 typedef struct NvmeEffectsLog
{
926 NVME_CMD_EFF_CSUPP
= 1 << 0,
927 NVME_CMD_EFF_LBCC
= 1 << 1,
928 NVME_CMD_EFF_NCC
= 1 << 2,
929 NVME_CMD_EFF_NIC
= 1 << 3,
930 NVME_CMD_EFF_CCC
= 1 << 4,
931 NVME_CMD_EFF_CSE_MASK
= 3 << 16,
932 NVME_CMD_EFF_UUID_SEL
= 1 << 19,
935 enum NvmeLogIdentifier
{
936 NVME_LOG_ERROR_INFO
= 0x01,
937 NVME_LOG_SMART_INFO
= 0x02,
938 NVME_LOG_FW_SLOT_INFO
= 0x03,
939 NVME_LOG_CMD_EFFECTS
= 0x05,
942 typedef struct QEMU_PACKED NvmePSD
{
954 #define NVME_IDENTIFY_DATA_SIZE 4096
957 NVME_ID_CNS_NS
= 0x00,
958 NVME_ID_CNS_CTRL
= 0x01,
959 NVME_ID_CNS_NS_ACTIVE_LIST
= 0x02,
960 NVME_ID_CNS_NS_DESCR_LIST
= 0x03,
961 NVME_ID_CNS_CS_NS
= 0x05,
962 NVME_ID_CNS_CS_CTRL
= 0x06,
963 NVME_ID_CNS_CS_NS_ACTIVE_LIST
= 0x07,
964 NVME_ID_CNS_NS_PRESENT_LIST
= 0x10,
965 NVME_ID_CNS_NS_PRESENT
= 0x11,
966 NVME_ID_CNS_CS_NS_PRESENT_LIST
= 0x1a,
967 NVME_ID_CNS_CS_NS_PRESENT
= 0x1b,
968 NVME_ID_CNS_IO_COMMAND_SET
= 0x1c,
971 typedef struct QEMU_PACKED NvmeIdCtrl
{
990 uint8_t rsvd128
[128];
1005 uint8_t tnvmcap
[16];
1006 uint8_t unvmcap
[16];
1016 uint8_t rsvd332
[180];
1032 uint8_t rsvd540
[228];
1033 uint8_t subnqn
[256];
1034 uint8_t rsvd1024
[1024];
1039 typedef struct NvmeIdCtrlZoned
{
1041 uint8_t rsvd1
[4095];
1044 enum NvmeIdCtrlOacs
{
1045 NVME_OACS_SECURITY
= 1 << 0,
1046 NVME_OACS_FORMAT
= 1 << 1,
1047 NVME_OACS_FW
= 1 << 2,
1050 enum NvmeIdCtrlOncs
{
1051 NVME_ONCS_COMPARE
= 1 << 0,
1052 NVME_ONCS_WRITE_UNCORR
= 1 << 1,
1053 NVME_ONCS_DSM
= 1 << 2,
1054 NVME_ONCS_WRITE_ZEROES
= 1 << 3,
1055 NVME_ONCS_FEATURES
= 1 << 4,
1056 NVME_ONCS_RESRVATIONS
= 1 << 5,
1057 NVME_ONCS_TIMESTAMP
= 1 << 6,
1058 NVME_ONCS_COPY
= 1 << 8,
1061 enum NvmeIdCtrlOcfs
{
1062 NVME_OCFS_COPY_FORMAT_0
= 1 << 0,
1065 enum NvmeIdCtrlFrmw
{
1066 NVME_FRMW_SLOT1_RO
= 1 << 0,
1069 enum NvmeIdCtrlLpa
{
1070 NVME_LPA_NS_SMART
= 1 << 0,
1071 NVME_LPA_CSE
= 1 << 1,
1072 NVME_LPA_EXTENDED
= 1 << 2,
1075 enum NvmeIdCtrlCmic
{
1076 NVME_CMIC_MULTI_CTRL
= 1 << 1,
1079 #define NVME_CTRL_SQES_MIN(sqes) ((sqes) & 0xf)
1080 #define NVME_CTRL_SQES_MAX(sqes) (((sqes) >> 4) & 0xf)
1081 #define NVME_CTRL_CQES_MIN(cqes) ((cqes) & 0xf)
1082 #define NVME_CTRL_CQES_MAX(cqes) (((cqes) >> 4) & 0xf)
1084 #define NVME_CTRL_SGLS_SUPPORT_MASK (0x3 << 0)
1085 #define NVME_CTRL_SGLS_SUPPORT_NO_ALIGN (0x1 << 0)
1086 #define NVME_CTRL_SGLS_SUPPORT_DWORD_ALIGN (0x1 << 1)
1087 #define NVME_CTRL_SGLS_KEYED (0x1 << 2)
1088 #define NVME_CTRL_SGLS_BITBUCKET (0x1 << 16)
1089 #define NVME_CTRL_SGLS_MPTR_CONTIGUOUS (0x1 << 17)
1090 #define NVME_CTRL_SGLS_EXCESS_LENGTH (0x1 << 18)
1091 #define NVME_CTRL_SGLS_MPTR_SGL (0x1 << 19)
1092 #define NVME_CTRL_SGLS_ADDR_OFFSET (0x1 << 20)
1094 #define NVME_ARB_AB(arb) (arb & 0x7)
1095 #define NVME_ARB_AB_NOLIMIT 0x7
1096 #define NVME_ARB_LPW(arb) ((arb >> 8) & 0xff)
1097 #define NVME_ARB_MPW(arb) ((arb >> 16) & 0xff)
1098 #define NVME_ARB_HPW(arb) ((arb >> 24) & 0xff)
1100 #define NVME_INTC_THR(intc) (intc & 0xff)
1101 #define NVME_INTC_TIME(intc) ((intc >> 8) & 0xff)
1103 #define NVME_INTVC_NOCOALESCING (0x1 << 16)
1105 #define NVME_TEMP_THSEL(temp) ((temp >> 20) & 0x3)
1106 #define NVME_TEMP_THSEL_OVER 0x0
1107 #define NVME_TEMP_THSEL_UNDER 0x1
1109 #define NVME_TEMP_TMPSEL(temp) ((temp >> 16) & 0xf)
1110 #define NVME_TEMP_TMPSEL_COMPOSITE 0x0
1112 #define NVME_TEMP_TMPTH(temp) (temp & 0xffff)
1114 #define NVME_AEC_SMART(aec) (aec & 0xff)
1115 #define NVME_AEC_NS_ATTR(aec) ((aec >> 8) & 0x1)
1116 #define NVME_AEC_FW_ACTIVATION(aec) ((aec >> 9) & 0x1)
1118 #define NVME_ERR_REC_TLER(err_rec) (err_rec & 0xffff)
1119 #define NVME_ERR_REC_DULBE(err_rec) (err_rec & 0x10000)
1121 enum NvmeFeatureIds
{
1122 NVME_ARBITRATION
= 0x1,
1123 NVME_POWER_MANAGEMENT
= 0x2,
1124 NVME_LBA_RANGE_TYPE
= 0x3,
1125 NVME_TEMPERATURE_THRESHOLD
= 0x4,
1126 NVME_ERROR_RECOVERY
= 0x5,
1127 NVME_VOLATILE_WRITE_CACHE
= 0x6,
1128 NVME_NUMBER_OF_QUEUES
= 0x7,
1129 NVME_INTERRUPT_COALESCING
= 0x8,
1130 NVME_INTERRUPT_VECTOR_CONF
= 0x9,
1131 NVME_WRITE_ATOMICITY
= 0xa,
1132 NVME_ASYNCHRONOUS_EVENT_CONF
= 0xb,
1133 NVME_TIMESTAMP
= 0xe,
1134 NVME_COMMAND_SET_PROFILE
= 0x19,
1135 NVME_SOFTWARE_PROGRESS_MARKER
= 0x80,
1136 NVME_FID_MAX
= 0x100,
1139 typedef enum NvmeFeatureCap
{
1140 NVME_FEAT_CAP_SAVE
= 1 << 0,
1141 NVME_FEAT_CAP_NS
= 1 << 1,
1142 NVME_FEAT_CAP_CHANGE
= 1 << 2,
1145 typedef enum NvmeGetFeatureSelect
{
1146 NVME_GETFEAT_SELECT_CURRENT
= 0x0,
1147 NVME_GETFEAT_SELECT_DEFAULT
= 0x1,
1148 NVME_GETFEAT_SELECT_SAVED
= 0x2,
1149 NVME_GETFEAT_SELECT_CAP
= 0x3,
1150 } NvmeGetFeatureSelect
;
1152 #define NVME_GETSETFEAT_FID_MASK 0xff
1153 #define NVME_GETSETFEAT_FID(dw10) (dw10 & NVME_GETSETFEAT_FID_MASK)
1155 #define NVME_GETFEAT_SELECT_SHIFT 8
1156 #define NVME_GETFEAT_SELECT_MASK 0x7
1157 #define NVME_GETFEAT_SELECT(dw10) \
1158 ((dw10 >> NVME_GETFEAT_SELECT_SHIFT) & NVME_GETFEAT_SELECT_MASK)
1160 #define NVME_SETFEAT_SAVE_SHIFT 31
1161 #define NVME_SETFEAT_SAVE_MASK 0x1
1162 #define NVME_SETFEAT_SAVE(dw10) \
1163 ((dw10 >> NVME_SETFEAT_SAVE_SHIFT) & NVME_SETFEAT_SAVE_MASK)
1165 typedef struct QEMU_PACKED NvmeRangeType
{
1175 typedef struct QEMU_PACKED NvmeLBAF
{
1181 typedef struct QEMU_PACKED NvmeLBAFE
{
1187 #define NVME_NSID_BROADCAST 0xffffffff
1189 typedef struct QEMU_PACKED NvmeIdNs
{
1223 uint8_t rsvd192
[192];
1227 typedef struct QEMU_PACKED NvmeIdNsDescr
{
1233 enum NvmeNsIdentifierLength
{
1234 NVME_NIDL_EUI64
= 8,
1235 NVME_NIDL_NGUID
= 16,
1236 NVME_NIDL_UUID
= 16,
1240 enum NvmeNsIdentifierType
{
1241 NVME_NIDT_EUI64
= 0x01,
1242 NVME_NIDT_NGUID
= 0x02,
1243 NVME_NIDT_UUID
= 0x03,
1244 NVME_NIDT_CSI
= 0x04,
1248 NVME_NMIC_NS_SHARED
= 1 << 0,
1252 NVME_CSI_NVM
= 0x00,
1253 NVME_CSI_ZONED
= 0x02,
1256 #define NVME_SET_CSI(vec, csi) (vec |= (uint8_t)(1 << (csi)))
1258 typedef struct QEMU_PACKED NvmeIdNsZoned
{
1265 uint8_t rsvd20
[2796];
1266 NvmeLBAFE lbafe
[16];
1267 uint8_t rsvd3072
[768];
1271 /*Deallocate Logical Block Features*/
1272 #define NVME_ID_NS_DLFEAT_GUARD_CRC(dlfeat) ((dlfeat) & 0x10)
1273 #define NVME_ID_NS_DLFEAT_WRITE_ZEROES(dlfeat) ((dlfeat) & 0x08)
1275 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR(dlfeat) ((dlfeat) & 0x7)
1276 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR_UNDEFINED 0
1277 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR_ZEROES 1
1278 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR_ONES 2
1281 #define NVME_ID_NS_NSFEAT_THIN(nsfeat) ((nsfeat & 0x1))
1282 #define NVME_ID_NS_NSFEAT_DULBE(nsfeat) ((nsfeat >> 2) & 0x1)
1283 #define NVME_ID_NS_FLBAS_EXTENDED(flbas) ((flbas >> 4) & 0x1)
1284 #define NVME_ID_NS_FLBAS_INDEX(flbas) ((flbas & 0xf))
1285 #define NVME_ID_NS_MC_SEPARATE(mc) ((mc >> 1) & 0x1)
1286 #define NVME_ID_NS_MC_EXTENDED(mc) ((mc & 0x1))
1287 #define NVME_ID_NS_DPC_LAST_EIGHT(dpc) ((dpc >> 4) & 0x1)
1288 #define NVME_ID_NS_DPC_FIRST_EIGHT(dpc) ((dpc >> 3) & 0x1)
1289 #define NVME_ID_NS_DPC_TYPE_3(dpc) ((dpc >> 2) & 0x1)
1290 #define NVME_ID_NS_DPC_TYPE_2(dpc) ((dpc >> 1) & 0x1)
1291 #define NVME_ID_NS_DPC_TYPE_1(dpc) ((dpc & 0x1))
1292 #define NVME_ID_NS_DPC_TYPE_MASK 0x7
1299 DPS_TYPE_MASK
= 0x7,
1300 DPS_FIRST_EIGHT
= 8,
1304 NVME_ZA_FINISHED_BY_CTLR
= 1 << 0,
1305 NVME_ZA_FINISH_RECOMMENDED
= 1 << 1,
1306 NVME_ZA_RESET_RECOMMENDED
= 1 << 2,
1307 NVME_ZA_ZD_EXT_VALID
= 1 << 7,
1310 typedef struct QEMU_PACKED NvmeZoneReportHeader
{
1313 } NvmeZoneReportHeader
;
1315 enum NvmeZoneReceiveAction
{
1316 NVME_ZONE_REPORT
= 0,
1317 NVME_ZONE_REPORT_EXTENDED
= 1,
1320 enum NvmeZoneReportType
{
1321 NVME_ZONE_REPORT_ALL
= 0,
1322 NVME_ZONE_REPORT_EMPTY
= 1,
1323 NVME_ZONE_REPORT_IMPLICITLY_OPEN
= 2,
1324 NVME_ZONE_REPORT_EXPLICITLY_OPEN
= 3,
1325 NVME_ZONE_REPORT_CLOSED
= 4,
1326 NVME_ZONE_REPORT_FULL
= 5,
1327 NVME_ZONE_REPORT_READ_ONLY
= 6,
1328 NVME_ZONE_REPORT_OFFLINE
= 7,
1332 NVME_ZONE_TYPE_RESERVED
= 0x00,
1333 NVME_ZONE_TYPE_SEQ_WRITE
= 0x02,
1336 enum NvmeZoneSendAction
{
1337 NVME_ZONE_ACTION_RSD
= 0x00,
1338 NVME_ZONE_ACTION_CLOSE
= 0x01,
1339 NVME_ZONE_ACTION_FINISH
= 0x02,
1340 NVME_ZONE_ACTION_OPEN
= 0x03,
1341 NVME_ZONE_ACTION_RESET
= 0x04,
1342 NVME_ZONE_ACTION_OFFLINE
= 0x05,
1343 NVME_ZONE_ACTION_SET_ZD_EXT
= 0x10,
1346 typedef struct QEMU_PACKED NvmeZoneDescr
{
1357 typedef enum NvmeZoneState
{
1358 NVME_ZONE_STATE_RESERVED
= 0x00,
1359 NVME_ZONE_STATE_EMPTY
= 0x01,
1360 NVME_ZONE_STATE_IMPLICITLY_OPEN
= 0x02,
1361 NVME_ZONE_STATE_EXPLICITLY_OPEN
= 0x03,
1362 NVME_ZONE_STATE_CLOSED
= 0x04,
1363 NVME_ZONE_STATE_READ_ONLY
= 0x0D,
1364 NVME_ZONE_STATE_FULL
= 0x0E,
1365 NVME_ZONE_STATE_OFFLINE
= 0x0F,
1368 static inline void _nvme_check_size(void)
1370 QEMU_BUILD_BUG_ON(sizeof(NvmeBar
) != 4096);
1371 QEMU_BUILD_BUG_ON(sizeof(NvmeAerResult
) != 4);
1372 QEMU_BUILD_BUG_ON(sizeof(NvmeZonedResult
) != 8);
1373 QEMU_BUILD_BUG_ON(sizeof(NvmeCqe
) != 16);
1374 QEMU_BUILD_BUG_ON(sizeof(NvmeDsmRange
) != 16);
1375 QEMU_BUILD_BUG_ON(sizeof(NvmeCopySourceRange
) != 32);
1376 QEMU_BUILD_BUG_ON(sizeof(NvmeCmd
) != 64);
1377 QEMU_BUILD_BUG_ON(sizeof(NvmeDeleteQ
) != 64);
1378 QEMU_BUILD_BUG_ON(sizeof(NvmeCreateCq
) != 64);
1379 QEMU_BUILD_BUG_ON(sizeof(NvmeCreateSq
) != 64);
1380 QEMU_BUILD_BUG_ON(sizeof(NvmeIdentify
) != 64);
1381 QEMU_BUILD_BUG_ON(sizeof(NvmeRwCmd
) != 64);
1382 QEMU_BUILD_BUG_ON(sizeof(NvmeDsmCmd
) != 64);
1383 QEMU_BUILD_BUG_ON(sizeof(NvmeCopyCmd
) != 64);
1384 QEMU_BUILD_BUG_ON(sizeof(NvmeRangeType
) != 64);
1385 QEMU_BUILD_BUG_ON(sizeof(NvmeErrorLog
) != 64);
1386 QEMU_BUILD_BUG_ON(sizeof(NvmeFwSlotInfoLog
) != 512);
1387 QEMU_BUILD_BUG_ON(sizeof(NvmeSmartLog
) != 512);
1388 QEMU_BUILD_BUG_ON(sizeof(NvmeEffectsLog
) != 4096);
1389 QEMU_BUILD_BUG_ON(sizeof(NvmeIdCtrl
) != 4096);
1390 QEMU_BUILD_BUG_ON(sizeof(NvmeIdCtrlZoned
) != 4096);
1391 QEMU_BUILD_BUG_ON(sizeof(NvmeLBAF
) != 4);
1392 QEMU_BUILD_BUG_ON(sizeof(NvmeLBAFE
) != 16);
1393 QEMU_BUILD_BUG_ON(sizeof(NvmeIdNs
) != 4096);
1394 QEMU_BUILD_BUG_ON(sizeof(NvmeIdNsZoned
) != 4096);
1395 QEMU_BUILD_BUG_ON(sizeof(NvmeSglDescriptor
) != 16);
1396 QEMU_BUILD_BUG_ON(sizeof(NvmeIdNsDescr
) != 4);
1397 QEMU_BUILD_BUG_ON(sizeof(NvmeZoneDescr
) != 64);