4 typedef struct QEMU_PACKED NvmeBar
{
41 CAP_MPSMIN_SHIFT
= 48,
42 CAP_MPSMAX_SHIFT
= 52,
48 CAP_MQES_MASK
= 0xffff,
55 CAP_MPSMIN_MASK
= 0xf,
56 CAP_MPSMAX_MASK
= 0xf,
61 #define NVME_CAP_MQES(cap) (((cap) >> CAP_MQES_SHIFT) & CAP_MQES_MASK)
62 #define NVME_CAP_CQR(cap) (((cap) >> CAP_CQR_SHIFT) & CAP_CQR_MASK)
63 #define NVME_CAP_AMS(cap) (((cap) >> CAP_AMS_SHIFT) & CAP_AMS_MASK)
64 #define NVME_CAP_TO(cap) (((cap) >> CAP_TO_SHIFT) & CAP_TO_MASK)
65 #define NVME_CAP_DSTRD(cap) (((cap) >> CAP_DSTRD_SHIFT) & CAP_DSTRD_MASK)
66 #define NVME_CAP_NSSRS(cap) (((cap) >> CAP_NSSRS_SHIFT) & CAP_NSSRS_MASK)
67 #define NVME_CAP_CSS(cap) (((cap) >> CAP_CSS_SHIFT) & CAP_CSS_MASK)
68 #define NVME_CAP_MPSMIN(cap)(((cap) >> CAP_MPSMIN_SHIFT) & CAP_MPSMIN_MASK)
69 #define NVME_CAP_MPSMAX(cap)(((cap) >> CAP_MPSMAX_SHIFT) & CAP_MPSMAX_MASK)
70 #define NVME_CAP_PMRS(cap) (((cap) >> CAP_PMRS_SHIFT) & CAP_PMRS_MASK)
71 #define NVME_CAP_CMBS(cap) (((cap) >> CAP_CMBS_SHIFT) & CAP_CMBS_MASK)
73 #define NVME_CAP_SET_MQES(cap, val) (cap |= (uint64_t)(val & CAP_MQES_MASK) \
75 #define NVME_CAP_SET_CQR(cap, val) (cap |= (uint64_t)(val & CAP_CQR_MASK) \
77 #define NVME_CAP_SET_AMS(cap, val) (cap |= (uint64_t)(val & CAP_AMS_MASK) \
79 #define NVME_CAP_SET_TO(cap, val) (cap |= (uint64_t)(val & CAP_TO_MASK) \
81 #define NVME_CAP_SET_DSTRD(cap, val) (cap |= (uint64_t)(val & CAP_DSTRD_MASK) \
83 #define NVME_CAP_SET_NSSRS(cap, val) (cap |= (uint64_t)(val & CAP_NSSRS_MASK) \
85 #define NVME_CAP_SET_CSS(cap, val) (cap |= (uint64_t)(val & CAP_CSS_MASK) \
87 #define NVME_CAP_SET_MPSMIN(cap, val) (cap |= (uint64_t)(val & CAP_MPSMIN_MASK)\
89 #define NVME_CAP_SET_MPSMAX(cap, val) (cap |= (uint64_t)(val & CAP_MPSMAX_MASK)\
91 #define NVME_CAP_SET_PMRS(cap, val) (cap |= (uint64_t)(val & CAP_PMRS_MASK) \
93 #define NVME_CAP_SET_CMBS(cap, val) (cap |= (uint64_t)(val & CAP_CMBS_MASK) \
97 NVME_CAP_CSS_NVM
= 1 << 0,
98 NVME_CAP_CSS_CSI_SUPP
= 1 << 6,
99 NVME_CAP_CSS_ADMIN_ONLY
= 1 << 7,
108 CC_IOSQES_SHIFT
= 16,
109 CC_IOCQES_SHIFT
= 20,
118 CC_IOSQES_MASK
= 0xf,
119 CC_IOCQES_MASK
= 0xf,
122 #define NVME_CC_EN(cc) ((cc >> CC_EN_SHIFT) & CC_EN_MASK)
123 #define NVME_CC_CSS(cc) ((cc >> CC_CSS_SHIFT) & CC_CSS_MASK)
124 #define NVME_CC_MPS(cc) ((cc >> CC_MPS_SHIFT) & CC_MPS_MASK)
125 #define NVME_CC_AMS(cc) ((cc >> CC_AMS_SHIFT) & CC_AMS_MASK)
126 #define NVME_CC_SHN(cc) ((cc >> CC_SHN_SHIFT) & CC_SHN_MASK)
127 #define NVME_CC_IOSQES(cc) ((cc >> CC_IOSQES_SHIFT) & CC_IOSQES_MASK)
128 #define NVME_CC_IOCQES(cc) ((cc >> CC_IOCQES_SHIFT) & CC_IOCQES_MASK)
131 NVME_CC_CSS_NVM
= 0x0,
132 NVME_CC_CSS_CSI
= 0x6,
133 NVME_CC_CSS_ADMIN_ONLY
= 0x7,
136 #define NVME_SET_CC_EN(cc, val) \
137 (cc |= (uint32_t)((val) & CC_EN_MASK) << CC_EN_SHIFT)
138 #define NVME_SET_CC_CSS(cc, val) \
139 (cc |= (uint32_t)((val) & CC_CSS_MASK) << CC_CSS_SHIFT)
140 #define NVME_SET_CC_MPS(cc, val) \
141 (cc |= (uint32_t)((val) & CC_MPS_MASK) << CC_MPS_SHIFT)
142 #define NVME_SET_CC_AMS(cc, val) \
143 (cc |= (uint32_t)((val) & CC_AMS_MASK) << CC_AMS_SHIFT)
144 #define NVME_SET_CC_SHN(cc, val) \
145 (cc |= (uint32_t)((val) & CC_SHN_MASK) << CC_SHN_SHIFT)
146 #define NVME_SET_CC_IOSQES(cc, val) \
147 (cc |= (uint32_t)((val) & CC_IOSQES_MASK) << CC_IOSQES_SHIFT)
148 #define NVME_SET_CC_IOCQES(cc, val) \
149 (cc |= (uint32_t)((val) & CC_IOCQES_MASK) << CC_IOCQES_SHIFT)
155 CSTS_NSSRO_SHIFT
= 4,
161 CSTS_SHST_MASK
= 0x3,
162 CSTS_NSSRO_MASK
= 0x1,
166 NVME_CSTS_READY
= 1 << CSTS_RDY_SHIFT
,
167 NVME_CSTS_FAILED
= 1 << CSTS_CFS_SHIFT
,
168 NVME_CSTS_SHST_NORMAL
= 0 << CSTS_SHST_SHIFT
,
169 NVME_CSTS_SHST_PROGRESS
= 1 << CSTS_SHST_SHIFT
,
170 NVME_CSTS_SHST_COMPLETE
= 2 << CSTS_SHST_SHIFT
,
171 NVME_CSTS_NSSRO
= 1 << CSTS_NSSRO_SHIFT
,
174 #define NVME_CSTS_RDY(csts) ((csts >> CSTS_RDY_SHIFT) & CSTS_RDY_MASK)
175 #define NVME_CSTS_CFS(csts) ((csts >> CSTS_CFS_SHIFT) & CSTS_CFS_MASK)
176 #define NVME_CSTS_SHST(csts) ((csts >> CSTS_SHST_SHIFT) & CSTS_SHST_MASK)
177 #define NVME_CSTS_NSSRO(csts) ((csts >> CSTS_NSSRO_SHIFT) & CSTS_NSSRO_MASK)
185 AQA_ASQS_MASK
= 0xfff,
186 AQA_ACQS_MASK
= 0xfff,
189 #define NVME_AQA_ASQS(aqa) ((aqa >> AQA_ASQS_SHIFT) & AQA_ASQS_MASK)
190 #define NVME_AQA_ACQS(aqa) ((aqa >> AQA_ACQS_SHIFT) & AQA_ACQS_MASK)
192 enum NvmeCmblocShift
{
193 CMBLOC_BIR_SHIFT
= 0,
194 CMBLOC_CQMMS_SHIFT
= 3,
195 CMBLOC_CQPDS_SHIFT
= 4,
196 CMBLOC_CDPMLS_SHIFT
= 5,
197 CMBLOC_CDPCILS_SHIFT
= 6,
198 CMBLOC_CDMMMS_SHIFT
= 7,
199 CMBLOC_CQDA_SHIFT
= 8,
200 CMBLOC_OFST_SHIFT
= 12,
203 enum NvmeCmblocMask
{
204 CMBLOC_BIR_MASK
= 0x7,
205 CMBLOC_CQMMS_MASK
= 0x1,
206 CMBLOC_CQPDS_MASK
= 0x1,
207 CMBLOC_CDPMLS_MASK
= 0x1,
208 CMBLOC_CDPCILS_MASK
= 0x1,
209 CMBLOC_CDMMMS_MASK
= 0x1,
210 CMBLOC_CQDA_MASK
= 0x1,
211 CMBLOC_OFST_MASK
= 0xfffff,
214 #define NVME_CMBLOC_BIR(cmbloc) \
215 ((cmbloc >> CMBLOC_BIR_SHIFT) & CMBLOC_BIR_MASK)
216 #define NVME_CMBLOC_CQMMS(cmbloc) \
217 ((cmbloc >> CMBLOC_CQMMS_SHIFT) & CMBLOC_CQMMS_MASK)
218 #define NVME_CMBLOC_CQPDS(cmbloc) \
219 ((cmbloc >> CMBLOC_CQPDS_SHIFT) & CMBLOC_CQPDS_MASK)
220 #define NVME_CMBLOC_CDPMLS(cmbloc) \
221 ((cmbloc >> CMBLOC_CDPMLS_SHIFT) & CMBLOC_CDPMLS_MASK)
222 #define NVME_CMBLOC_CDPCILS(cmbloc) \
223 ((cmbloc >> CMBLOC_CDPCILS_SHIFT) & CMBLOC_CDPCILS_MASK)
224 #define NVME_CMBLOC_CDMMMS(cmbloc) \
225 ((cmbloc >> CMBLOC_CDMMMS_SHIFT) & CMBLOC_CDMMMS_MASK)
226 #define NVME_CMBLOC_CQDA(cmbloc) \
227 ((cmbloc >> CMBLOC_CQDA_SHIFT) & CMBLOC_CQDA_MASK)
228 #define NVME_CMBLOC_OFST(cmbloc) \
229 ((cmbloc >> CMBLOC_OFST_SHIFT) & CMBLOC_OFST_MASK)
231 #define NVME_CMBLOC_SET_BIR(cmbloc, val) \
232 (cmbloc |= (uint64_t)(val & CMBLOC_BIR_MASK) << CMBLOC_BIR_SHIFT)
233 #define NVME_CMBLOC_SET_CQMMS(cmbloc, val) \
234 (cmbloc |= (uint64_t)(val & CMBLOC_CQMMS_MASK) << CMBLOC_CQMMS_SHIFT)
235 #define NVME_CMBLOC_SET_CQPDS(cmbloc, val) \
236 (cmbloc |= (uint64_t)(val & CMBLOC_CQPDS_MASK) << CMBLOC_CQPDS_SHIFT)
237 #define NVME_CMBLOC_SET_CDPMLS(cmbloc, val) \
238 (cmbloc |= (uint64_t)(val & CMBLOC_CDPMLS_MASK) << CMBLOC_CDPMLS_SHIFT)
239 #define NVME_CMBLOC_SET_CDPCILS(cmbloc, val) \
240 (cmbloc |= (uint64_t)(val & CMBLOC_CDPCILS_MASK) << CMBLOC_CDPCILS_SHIFT)
241 #define NVME_CMBLOC_SET_CDMMMS(cmbloc, val) \
242 (cmbloc |= (uint64_t)(val & CMBLOC_CDMMMS_MASK) << CMBLOC_CDMMMS_SHIFT)
243 #define NVME_CMBLOC_SET_CQDA(cmbloc, val) \
244 (cmbloc |= (uint64_t)(val & CMBLOC_CQDA_MASK) << CMBLOC_CQDA_SHIFT)
245 #define NVME_CMBLOC_SET_OFST(cmbloc, val) \
246 (cmbloc |= (uint64_t)(val & CMBLOC_OFST_MASK) << CMBLOC_OFST_SHIFT)
248 #define NVME_CMBMSMC_SET_CRE (cmbmsc, val) \
249 (cmbmsc |= (uint64_t)(val & CMBLOC_OFST_MASK) << CMBMSC_CRE_SHIFT)
251 enum NvmeCmbszShift
{
254 CMBSZ_LISTS_SHIFT
= 2,
262 CMBSZ_SQS_MASK
= 0x1,
263 CMBSZ_CQS_MASK
= 0x1,
264 CMBSZ_LISTS_MASK
= 0x1,
265 CMBSZ_RDS_MASK
= 0x1,
266 CMBSZ_WDS_MASK
= 0x1,
267 CMBSZ_SZU_MASK
= 0xf,
268 CMBSZ_SZ_MASK
= 0xfffff,
271 #define NVME_CMBSZ_SQS(cmbsz) ((cmbsz >> CMBSZ_SQS_SHIFT) & CMBSZ_SQS_MASK)
272 #define NVME_CMBSZ_CQS(cmbsz) ((cmbsz >> CMBSZ_CQS_SHIFT) & CMBSZ_CQS_MASK)
273 #define NVME_CMBSZ_LISTS(cmbsz)((cmbsz >> CMBSZ_LISTS_SHIFT) & CMBSZ_LISTS_MASK)
274 #define NVME_CMBSZ_RDS(cmbsz) ((cmbsz >> CMBSZ_RDS_SHIFT) & CMBSZ_RDS_MASK)
275 #define NVME_CMBSZ_WDS(cmbsz) ((cmbsz >> CMBSZ_WDS_SHIFT) & CMBSZ_WDS_MASK)
276 #define NVME_CMBSZ_SZU(cmbsz) ((cmbsz >> CMBSZ_SZU_SHIFT) & CMBSZ_SZU_MASK)
277 #define NVME_CMBSZ_SZ(cmbsz) ((cmbsz >> CMBSZ_SZ_SHIFT) & CMBSZ_SZ_MASK)
279 #define NVME_CMBSZ_SET_SQS(cmbsz, val) \
280 (cmbsz |= (uint64_t)(val & CMBSZ_SQS_MASK) << CMBSZ_SQS_SHIFT)
281 #define NVME_CMBSZ_SET_CQS(cmbsz, val) \
282 (cmbsz |= (uint64_t)(val & CMBSZ_CQS_MASK) << CMBSZ_CQS_SHIFT)
283 #define NVME_CMBSZ_SET_LISTS(cmbsz, val) \
284 (cmbsz |= (uint64_t)(val & CMBSZ_LISTS_MASK) << CMBSZ_LISTS_SHIFT)
285 #define NVME_CMBSZ_SET_RDS(cmbsz, val) \
286 (cmbsz |= (uint64_t)(val & CMBSZ_RDS_MASK) << CMBSZ_RDS_SHIFT)
287 #define NVME_CMBSZ_SET_WDS(cmbsz, val) \
288 (cmbsz |= (uint64_t)(val & CMBSZ_WDS_MASK) << CMBSZ_WDS_SHIFT)
289 #define NVME_CMBSZ_SET_SZU(cmbsz, val) \
290 (cmbsz |= (uint64_t)(val & CMBSZ_SZU_MASK) << CMBSZ_SZU_SHIFT)
291 #define NVME_CMBSZ_SET_SZ(cmbsz, val) \
292 (cmbsz |= (uint64_t)(val & CMBSZ_SZ_MASK) << CMBSZ_SZ_SHIFT)
294 #define NVME_CMBSZ_GETSIZE(cmbsz) \
295 (NVME_CMBSZ_SZ(cmbsz) * (1 << (12 + 4 * NVME_CMBSZ_SZU(cmbsz))))
297 enum NvmeCmbmscShift
{
298 CMBMSC_CRE_SHIFT
= 0,
299 CMBMSC_CMSE_SHIFT
= 1,
300 CMBMSC_CBA_SHIFT
= 12,
303 enum NvmeCmbmscMask
{
304 CMBMSC_CRE_MASK
= 0x1,
305 CMBMSC_CMSE_MASK
= 0x1,
306 CMBMSC_CBA_MASK
= ((1ULL << 52) - 1),
309 #define NVME_CMBMSC_CRE(cmbmsc) \
310 ((cmbmsc >> CMBMSC_CRE_SHIFT) & CMBMSC_CRE_MASK)
311 #define NVME_CMBMSC_CMSE(cmbmsc) \
312 ((cmbmsc >> CMBMSC_CMSE_SHIFT) & CMBMSC_CMSE_MASK)
313 #define NVME_CMBMSC_CBA(cmbmsc) \
314 ((cmbmsc >> CMBMSC_CBA_SHIFT) & CMBMSC_CBA_MASK)
317 #define NVME_CMBMSC_SET_CRE(cmbmsc, val) \
318 (cmbmsc |= (uint64_t)(val & CMBMSC_CRE_MASK) << CMBMSC_CRE_SHIFT)
319 #define NVME_CMBMSC_SET_CMSE(cmbmsc, val) \
320 (cmbmsc |= (uint64_t)(val & CMBMSC_CMSE_MASK) << CMBMSC_CMSE_SHIFT)
321 #define NVME_CMBMSC_SET_CBA(cmbmsc, val) \
322 (cmbmsc |= (uint64_t)(val & CMBMSC_CBA_MASK) << CMBMSC_CBA_SHIFT)
324 enum NvmeCmbstsShift
{
325 CMBSTS_CBAI_SHIFT
= 0,
327 enum NvmeCmbstsMask
{
328 CMBSTS_CBAI_MASK
= 0x1,
331 #define NVME_CMBSTS_CBAI(cmbsts) \
332 ((cmbsts >> CMBSTS_CBAI_SHIFT) & CMBSTS_CBAI_MASK)
334 #define NVME_CMBSTS_SET_CBAI(cmbsts, val) \
335 (cmbsts |= (uint64_t)(val & CMBSTS_CBAI_MASK) << CMBSTS_CBAI_SHIFT)
337 enum NvmePmrcapShift
{
338 PMRCAP_RDS_SHIFT
= 3,
339 PMRCAP_WDS_SHIFT
= 4,
340 PMRCAP_BIR_SHIFT
= 5,
341 PMRCAP_PMRTU_SHIFT
= 8,
342 PMRCAP_PMRWBM_SHIFT
= 10,
343 PMRCAP_PMRTO_SHIFT
= 16,
344 PMRCAP_CMSS_SHIFT
= 24,
347 enum NvmePmrcapMask
{
348 PMRCAP_RDS_MASK
= 0x1,
349 PMRCAP_WDS_MASK
= 0x1,
350 PMRCAP_BIR_MASK
= 0x7,
351 PMRCAP_PMRTU_MASK
= 0x3,
352 PMRCAP_PMRWBM_MASK
= 0xf,
353 PMRCAP_PMRTO_MASK
= 0xff,
354 PMRCAP_CMSS_MASK
= 0x1,
357 #define NVME_PMRCAP_RDS(pmrcap) \
358 ((pmrcap >> PMRCAP_RDS_SHIFT) & PMRCAP_RDS_MASK)
359 #define NVME_PMRCAP_WDS(pmrcap) \
360 ((pmrcap >> PMRCAP_WDS_SHIFT) & PMRCAP_WDS_MASK)
361 #define NVME_PMRCAP_BIR(pmrcap) \
362 ((pmrcap >> PMRCAP_BIR_SHIFT) & PMRCAP_BIR_MASK)
363 #define NVME_PMRCAP_PMRTU(pmrcap) \
364 ((pmrcap >> PMRCAP_PMRTU_SHIFT) & PMRCAP_PMRTU_MASK)
365 #define NVME_PMRCAP_PMRWBM(pmrcap) \
366 ((pmrcap >> PMRCAP_PMRWBM_SHIFT) & PMRCAP_PMRWBM_MASK)
367 #define NVME_PMRCAP_PMRTO(pmrcap) \
368 ((pmrcap >> PMRCAP_PMRTO_SHIFT) & PMRCAP_PMRTO_MASK)
369 #define NVME_PMRCAP_CMSS(pmrcap) \
370 ((pmrcap >> PMRCAP_CMSS_SHIFT) & PMRCAP_CMSS_MASK)
372 #define NVME_PMRCAP_SET_RDS(pmrcap, val) \
373 (pmrcap |= (uint64_t)(val & PMRCAP_RDS_MASK) << PMRCAP_RDS_SHIFT)
374 #define NVME_PMRCAP_SET_WDS(pmrcap, val) \
375 (pmrcap |= (uint64_t)(val & PMRCAP_WDS_MASK) << PMRCAP_WDS_SHIFT)
376 #define NVME_PMRCAP_SET_BIR(pmrcap, val) \
377 (pmrcap |= (uint64_t)(val & PMRCAP_BIR_MASK) << PMRCAP_BIR_SHIFT)
378 #define NVME_PMRCAP_SET_PMRTU(pmrcap, val) \
379 (pmrcap |= (uint64_t)(val & PMRCAP_PMRTU_MASK) << PMRCAP_PMRTU_SHIFT)
380 #define NVME_PMRCAP_SET_PMRWBM(pmrcap, val) \
381 (pmrcap |= (uint64_t)(val & PMRCAP_PMRWBM_MASK) << PMRCAP_PMRWBM_SHIFT)
382 #define NVME_PMRCAP_SET_PMRTO(pmrcap, val) \
383 (pmrcap |= (uint64_t)(val & PMRCAP_PMRTO_MASK) << PMRCAP_PMRTO_SHIFT)
384 #define NVME_PMRCAP_SET_CMSS(pmrcap, val) \
385 (pmrcap |= (uint64_t)(val & PMRCAP_CMSS_MASK) << PMRCAP_CMSS_SHIFT)
387 enum NvmePmrctlShift
{
391 enum NvmePmrctlMask
{
392 PMRCTL_EN_MASK
= 0x1,
395 #define NVME_PMRCTL_EN(pmrctl) ((pmrctl >> PMRCTL_EN_SHIFT) & PMRCTL_EN_MASK)
397 #define NVME_PMRCTL_SET_EN(pmrctl, val) \
398 (pmrctl |= (uint64_t)(val & PMRCTL_EN_MASK) << PMRCTL_EN_SHIFT)
400 enum NvmePmrstsShift
{
401 PMRSTS_ERR_SHIFT
= 0,
402 PMRSTS_NRDY_SHIFT
= 8,
403 PMRSTS_HSTS_SHIFT
= 9,
404 PMRSTS_CBAI_SHIFT
= 12,
407 enum NvmePmrstsMask
{
408 PMRSTS_ERR_MASK
= 0xff,
409 PMRSTS_NRDY_MASK
= 0x1,
410 PMRSTS_HSTS_MASK
= 0x7,
411 PMRSTS_CBAI_MASK
= 0x1,
414 #define NVME_PMRSTS_ERR(pmrsts) \
415 ((pmrsts >> PMRSTS_ERR_SHIFT) & PMRSTS_ERR_MASK)
416 #define NVME_PMRSTS_NRDY(pmrsts) \
417 ((pmrsts >> PMRSTS_NRDY_SHIFT) & PMRSTS_NRDY_MASK)
418 #define NVME_PMRSTS_HSTS(pmrsts) \
419 ((pmrsts >> PMRSTS_HSTS_SHIFT) & PMRSTS_HSTS_MASK)
420 #define NVME_PMRSTS_CBAI(pmrsts) \
421 ((pmrsts >> PMRSTS_CBAI_SHIFT) & PMRSTS_CBAI_MASK)
423 #define NVME_PMRSTS_SET_ERR(pmrsts, val) \
424 (pmrsts |= (uint64_t)(val & PMRSTS_ERR_MASK) << PMRSTS_ERR_SHIFT)
425 #define NVME_PMRSTS_SET_NRDY(pmrsts, val) \
426 (pmrsts |= (uint64_t)(val & PMRSTS_NRDY_MASK) << PMRSTS_NRDY_SHIFT)
427 #define NVME_PMRSTS_SET_HSTS(pmrsts, val) \
428 (pmrsts |= (uint64_t)(val & PMRSTS_HSTS_MASK) << PMRSTS_HSTS_SHIFT)
429 #define NVME_PMRSTS_SET_CBAI(pmrsts, val) \
430 (pmrsts |= (uint64_t)(val & PMRSTS_CBAI_MASK) << PMRSTS_CBAI_SHIFT)
432 enum NvmePmrebsShift
{
433 PMREBS_PMRSZU_SHIFT
= 0,
434 PMREBS_RBB_SHIFT
= 4,
435 PMREBS_PMRWBZ_SHIFT
= 8,
438 enum NvmePmrebsMask
{
439 PMREBS_PMRSZU_MASK
= 0xf,
440 PMREBS_RBB_MASK
= 0x1,
441 PMREBS_PMRWBZ_MASK
= 0xffffff,
444 #define NVME_PMREBS_PMRSZU(pmrebs) \
445 ((pmrebs >> PMREBS_PMRSZU_SHIFT) & PMREBS_PMRSZU_MASK)
446 #define NVME_PMREBS_RBB(pmrebs) \
447 ((pmrebs >> PMREBS_RBB_SHIFT) & PMREBS_RBB_MASK)
448 #define NVME_PMREBS_PMRWBZ(pmrebs) \
449 ((pmrebs >> PMREBS_PMRWBZ_SHIFT) & PMREBS_PMRWBZ_MASK)
451 #define NVME_PMREBS_SET_PMRSZU(pmrebs, val) \
452 (pmrebs |= (uint64_t)(val & PMREBS_PMRSZU_MASK) << PMREBS_PMRSZU_SHIFT)
453 #define NVME_PMREBS_SET_RBB(pmrebs, val) \
454 (pmrebs |= (uint64_t)(val & PMREBS_RBB_MASK) << PMREBS_RBB_SHIFT)
455 #define NVME_PMREBS_SET_PMRWBZ(pmrebs, val) \
456 (pmrebs |= (uint64_t)(val & PMREBS_PMRWBZ_MASK) << PMREBS_PMRWBZ_SHIFT)
458 enum NvmePmrswtpShift
{
459 PMRSWTP_PMRSWTU_SHIFT
= 0,
460 PMRSWTP_PMRSWTV_SHIFT
= 8,
463 enum NvmePmrswtpMask
{
464 PMRSWTP_PMRSWTU_MASK
= 0xf,
465 PMRSWTP_PMRSWTV_MASK
= 0xffffff,
468 #define NVME_PMRSWTP_PMRSWTU(pmrswtp) \
469 ((pmrswtp >> PMRSWTP_PMRSWTU_SHIFT) & PMRSWTP_PMRSWTU_MASK)
470 #define NVME_PMRSWTP_PMRSWTV(pmrswtp) \
471 ((pmrswtp >> PMRSWTP_PMRSWTV_SHIFT) & PMRSWTP_PMRSWTV_MASK)
473 #define NVME_PMRSWTP_SET_PMRSWTU(pmrswtp, val) \
474 (pmrswtp |= (uint64_t)(val & PMRSWTP_PMRSWTU_MASK) << PMRSWTP_PMRSWTU_SHIFT)
475 #define NVME_PMRSWTP_SET_PMRSWTV(pmrswtp, val) \
476 (pmrswtp |= (uint64_t)(val & PMRSWTP_PMRSWTV_MASK) << PMRSWTP_PMRSWTV_SHIFT)
478 enum NvmePmrmscShift
{
479 PMRMSC_CMSE_SHIFT
= 1,
480 PMRMSC_CBA_SHIFT
= 12,
483 enum NvmePmrmscMask
{
484 PMRMSC_CMSE_MASK
= 0x1,
485 PMRMSC_CBA_MASK
= 0xfffffffffffff,
488 #define NVME_PMRMSC_CMSE(pmrmsc) \
489 ((pmrmsc >> PMRMSC_CMSE_SHIFT) & PMRMSC_CMSE_MASK)
490 #define NVME_PMRMSC_CBA(pmrmsc) \
491 ((pmrmsc >> PMRMSC_CBA_SHIFT) & PMRMSC_CBA_MASK)
493 #define NVME_PMRMSC_SET_CMSE(pmrmsc, val) \
494 (pmrmsc |= (uint64_t)(val & PMRMSC_CMSE_MASK) << PMRMSC_CMSE_SHIFT)
495 #define NVME_PMRMSC_SET_CBA(pmrmsc, val) \
496 (pmrmsc |= (uint64_t)(val & PMRMSC_CBA_MASK) << PMRMSC_CBA_SHIFT)
498 enum NvmeSglDescriptorType
{
499 NVME_SGL_DESCR_TYPE_DATA_BLOCK
= 0x0,
500 NVME_SGL_DESCR_TYPE_BIT_BUCKET
= 0x1,
501 NVME_SGL_DESCR_TYPE_SEGMENT
= 0x2,
502 NVME_SGL_DESCR_TYPE_LAST_SEGMENT
= 0x3,
503 NVME_SGL_DESCR_TYPE_KEYED_DATA_BLOCK
= 0x4,
505 NVME_SGL_DESCR_TYPE_VENDOR_SPECIFIC
= 0xf,
508 enum NvmeSglDescriptorSubtype
{
509 NVME_SGL_DESCR_SUBTYPE_ADDRESS
= 0x0,
512 typedef struct QEMU_PACKED NvmeSglDescriptor
{
519 #define NVME_SGL_TYPE(type) ((type >> 4) & 0xf)
520 #define NVME_SGL_SUBTYPE(type) (type & 0xf)
522 typedef union NvmeCmdDptr
{
528 NvmeSglDescriptor sgl
;
533 NVME_PSDT_SGL_MPTR_CONTIGUOUS
= 0x1,
534 NVME_PSDT_SGL_MPTR_SGL
= 0x2,
537 typedef struct QEMU_PACKED NvmeCmd
{
553 #define NVME_CMD_FLAGS_FUSE(flags) (flags & 0x3)
554 #define NVME_CMD_FLAGS_PSDT(flags) ((flags >> 6) & 0x3)
556 enum NvmeAdminCommands
{
557 NVME_ADM_CMD_DELETE_SQ
= 0x00,
558 NVME_ADM_CMD_CREATE_SQ
= 0x01,
559 NVME_ADM_CMD_GET_LOG_PAGE
= 0x02,
560 NVME_ADM_CMD_DELETE_CQ
= 0x04,
561 NVME_ADM_CMD_CREATE_CQ
= 0x05,
562 NVME_ADM_CMD_IDENTIFY
= 0x06,
563 NVME_ADM_CMD_ABORT
= 0x08,
564 NVME_ADM_CMD_SET_FEATURES
= 0x09,
565 NVME_ADM_CMD_GET_FEATURES
= 0x0a,
566 NVME_ADM_CMD_ASYNC_EV_REQ
= 0x0c,
567 NVME_ADM_CMD_ACTIVATE_FW
= 0x10,
568 NVME_ADM_CMD_DOWNLOAD_FW
= 0x11,
569 NVME_ADM_CMD_FORMAT_NVM
= 0x80,
570 NVME_ADM_CMD_SECURITY_SEND
= 0x81,
571 NVME_ADM_CMD_SECURITY_RECV
= 0x82,
574 enum NvmeIoCommands
{
575 NVME_CMD_FLUSH
= 0x00,
576 NVME_CMD_WRITE
= 0x01,
577 NVME_CMD_READ
= 0x02,
578 NVME_CMD_WRITE_UNCOR
= 0x04,
579 NVME_CMD_COMPARE
= 0x05,
580 NVME_CMD_WRITE_ZEROES
= 0x08,
582 NVME_CMD_ZONE_MGMT_SEND
= 0x79,
583 NVME_CMD_ZONE_MGMT_RECV
= 0x7a,
584 NVME_CMD_ZONE_APPEND
= 0x7d,
587 typedef struct QEMU_PACKED NvmeDeleteQ
{
597 typedef struct QEMU_PACKED NvmeCreateCq
{
611 #define NVME_CQ_FLAGS_PC(cq_flags) (cq_flags & 0x1)
612 #define NVME_CQ_FLAGS_IEN(cq_flags) ((cq_flags >> 1) & 0x1)
619 typedef struct QEMU_PACKED NvmeCreateSq
{
633 #define NVME_SQ_FLAGS_PC(sq_flags) (sq_flags & 0x1)
634 #define NVME_SQ_FLAGS_QPRIO(sq_flags) ((sq_flags >> 1) & 0x3)
639 NVME_SQ_PRIO_URGENT
= 0,
640 NVME_SQ_PRIO_HIGH
= 1,
641 NVME_SQ_PRIO_NORMAL
= 2,
642 NVME_SQ_PRIO_LOW
= 3,
645 typedef struct QEMU_PACKED NvmeIdentify
{
662 typedef struct QEMU_PACKED NvmeRwCmd
{
680 NVME_RW_LR
= 1 << 15,
681 NVME_RW_FUA
= 1 << 14,
682 NVME_RW_DSM_FREQ_UNSPEC
= 0,
683 NVME_RW_DSM_FREQ_TYPICAL
= 1,
684 NVME_RW_DSM_FREQ_RARE
= 2,
685 NVME_RW_DSM_FREQ_READS
= 3,
686 NVME_RW_DSM_FREQ_WRITES
= 4,
687 NVME_RW_DSM_FREQ_RW
= 5,
688 NVME_RW_DSM_FREQ_ONCE
= 6,
689 NVME_RW_DSM_FREQ_PREFETCH
= 7,
690 NVME_RW_DSM_FREQ_TEMP
= 8,
691 NVME_RW_DSM_LATENCY_NONE
= 0 << 4,
692 NVME_RW_DSM_LATENCY_IDLE
= 1 << 4,
693 NVME_RW_DSM_LATENCY_NORM
= 2 << 4,
694 NVME_RW_DSM_LATENCY_LOW
= 3 << 4,
695 NVME_RW_DSM_SEQ_REQ
= 1 << 6,
696 NVME_RW_DSM_COMPRESSED
= 1 << 7,
697 NVME_RW_PRINFO_PRACT
= 1 << 13,
698 NVME_RW_PRINFO_PRCHK_GUARD
= 1 << 12,
699 NVME_RW_PRINFO_PRCHK_APP
= 1 << 11,
700 NVME_RW_PRINFO_PRCHK_REF
= 1 << 10,
703 typedef struct QEMU_PACKED NvmeDsmCmd
{
716 NVME_DSMGMT_IDR
= 1 << 0,
717 NVME_DSMGMT_IDW
= 1 << 1,
718 NVME_DSMGMT_AD
= 1 << 2,
721 typedef struct QEMU_PACKED NvmeDsmRange
{
727 enum NvmeAsyncEventRequest
{
728 NVME_AER_TYPE_ERROR
= 0,
729 NVME_AER_TYPE_SMART
= 1,
730 NVME_AER_TYPE_IO_SPECIFIC
= 6,
731 NVME_AER_TYPE_VENDOR_SPECIFIC
= 7,
732 NVME_AER_INFO_ERR_INVALID_DB_REGISTER
= 0,
733 NVME_AER_INFO_ERR_INVALID_DB_VALUE
= 1,
734 NVME_AER_INFO_ERR_DIAG_FAIL
= 2,
735 NVME_AER_INFO_ERR_PERS_INTERNAL_ERR
= 3,
736 NVME_AER_INFO_ERR_TRANS_INTERNAL_ERR
= 4,
737 NVME_AER_INFO_ERR_FW_IMG_LOAD_ERR
= 5,
738 NVME_AER_INFO_SMART_RELIABILITY
= 0,
739 NVME_AER_INFO_SMART_TEMP_THRESH
= 1,
740 NVME_AER_INFO_SMART_SPARE_THRESH
= 2,
743 typedef struct QEMU_PACKED NvmeAerResult
{
750 typedef struct QEMU_PACKED NvmeZonedResult
{
754 typedef struct QEMU_PACKED NvmeCqe
{
763 enum NvmeStatusCodes
{
764 NVME_SUCCESS
= 0x0000,
765 NVME_INVALID_OPCODE
= 0x0001,
766 NVME_INVALID_FIELD
= 0x0002,
767 NVME_CID_CONFLICT
= 0x0003,
768 NVME_DATA_TRAS_ERROR
= 0x0004,
769 NVME_POWER_LOSS_ABORT
= 0x0005,
770 NVME_INTERNAL_DEV_ERROR
= 0x0006,
771 NVME_CMD_ABORT_REQ
= 0x0007,
772 NVME_CMD_ABORT_SQ_DEL
= 0x0008,
773 NVME_CMD_ABORT_FAILED_FUSE
= 0x0009,
774 NVME_CMD_ABORT_MISSING_FUSE
= 0x000a,
775 NVME_INVALID_NSID
= 0x000b,
776 NVME_CMD_SEQ_ERROR
= 0x000c,
777 NVME_INVALID_SGL_SEG_DESCR
= 0x000d,
778 NVME_INVALID_NUM_SGL_DESCRS
= 0x000e,
779 NVME_DATA_SGL_LEN_INVALID
= 0x000f,
780 NVME_MD_SGL_LEN_INVALID
= 0x0010,
781 NVME_SGL_DESCR_TYPE_INVALID
= 0x0011,
782 NVME_INVALID_USE_OF_CMB
= 0x0012,
783 NVME_INVALID_PRP_OFFSET
= 0x0013,
784 NVME_CMD_SET_CMB_REJECTED
= 0x002b,
785 NVME_INVALID_CMD_SET
= 0x002c,
786 NVME_LBA_RANGE
= 0x0080,
787 NVME_CAP_EXCEEDED
= 0x0081,
788 NVME_NS_NOT_READY
= 0x0082,
789 NVME_NS_RESV_CONFLICT
= 0x0083,
790 NVME_INVALID_CQID
= 0x0100,
791 NVME_INVALID_QID
= 0x0101,
792 NVME_MAX_QSIZE_EXCEEDED
= 0x0102,
793 NVME_ACL_EXCEEDED
= 0x0103,
794 NVME_RESERVED
= 0x0104,
795 NVME_AER_LIMIT_EXCEEDED
= 0x0105,
796 NVME_INVALID_FW_SLOT
= 0x0106,
797 NVME_INVALID_FW_IMAGE
= 0x0107,
798 NVME_INVALID_IRQ_VECTOR
= 0x0108,
799 NVME_INVALID_LOG_ID
= 0x0109,
800 NVME_INVALID_FORMAT
= 0x010a,
801 NVME_FW_REQ_RESET
= 0x010b,
802 NVME_INVALID_QUEUE_DEL
= 0x010c,
803 NVME_FID_NOT_SAVEABLE
= 0x010d,
804 NVME_FEAT_NOT_CHANGEABLE
= 0x010e,
805 NVME_FEAT_NOT_NS_SPEC
= 0x010f,
806 NVME_FW_REQ_SUSYSTEM_RESET
= 0x0110,
807 NVME_CONFLICTING_ATTRS
= 0x0180,
808 NVME_INVALID_PROT_INFO
= 0x0181,
809 NVME_WRITE_TO_RO
= 0x0182,
810 NVME_ZONE_BOUNDARY_ERROR
= 0x01b8,
811 NVME_ZONE_FULL
= 0x01b9,
812 NVME_ZONE_READ_ONLY
= 0x01ba,
813 NVME_ZONE_OFFLINE
= 0x01bb,
814 NVME_ZONE_INVALID_WRITE
= 0x01bc,
815 NVME_ZONE_TOO_MANY_ACTIVE
= 0x01bd,
816 NVME_ZONE_TOO_MANY_OPEN
= 0x01be,
817 NVME_ZONE_INVAL_TRANSITION
= 0x01bf,
818 NVME_WRITE_FAULT
= 0x0280,
819 NVME_UNRECOVERED_READ
= 0x0281,
820 NVME_E2E_GUARD_ERROR
= 0x0282,
821 NVME_E2E_APP_ERROR
= 0x0283,
822 NVME_E2E_REF_ERROR
= 0x0284,
823 NVME_CMP_FAILURE
= 0x0285,
824 NVME_ACCESS_DENIED
= 0x0286,
828 NVME_NO_COMPLETE
= 0xffff,
831 typedef struct QEMU_PACKED NvmeFwSlotInfoLog
{
833 uint8_t reserved1
[7];
841 uint8_t reserved2
[448];
844 typedef struct QEMU_PACKED NvmeErrorLog
{
845 uint64_t error_count
;
848 uint16_t status_field
;
849 uint16_t param_error_location
;
856 typedef struct QEMU_PACKED NvmeSmartLog
{
857 uint8_t critical_warning
;
858 uint16_t temperature
;
859 uint8_t available_spare
;
860 uint8_t available_spare_threshold
;
861 uint8_t percentage_used
;
862 uint8_t reserved1
[26];
863 uint64_t data_units_read
[2];
864 uint64_t data_units_written
[2];
865 uint64_t host_read_commands
[2];
866 uint64_t host_write_commands
[2];
867 uint64_t controller_busy_time
[2];
868 uint64_t power_cycles
[2];
869 uint64_t power_on_hours
[2];
870 uint64_t unsafe_shutdowns
[2];
871 uint64_t media_errors
[2];
872 uint64_t number_of_error_log_entries
[2];
873 uint8_t reserved2
[320];
876 #define NVME_SMART_WARN_MAX 6
878 NVME_SMART_SPARE
= 1 << 0,
879 NVME_SMART_TEMPERATURE
= 1 << 1,
880 NVME_SMART_RELIABILITY
= 1 << 2,
881 NVME_SMART_MEDIA_READ_ONLY
= 1 << 3,
882 NVME_SMART_FAILED_VOLATILE_MEDIA
= 1 << 4,
883 NVME_SMART_PMR_UNRELIABLE
= 1 << 5,
886 typedef struct NvmeEffectsLog
{
893 NVME_CMD_EFF_CSUPP
= 1 << 0,
894 NVME_CMD_EFF_LBCC
= 1 << 1,
895 NVME_CMD_EFF_NCC
= 1 << 2,
896 NVME_CMD_EFF_NIC
= 1 << 3,
897 NVME_CMD_EFF_CCC
= 1 << 4,
898 NVME_CMD_EFF_CSE_MASK
= 3 << 16,
899 NVME_CMD_EFF_UUID_SEL
= 1 << 19,
902 enum NvmeLogIdentifier
{
903 NVME_LOG_ERROR_INFO
= 0x01,
904 NVME_LOG_SMART_INFO
= 0x02,
905 NVME_LOG_FW_SLOT_INFO
= 0x03,
906 NVME_LOG_CMD_EFFECTS
= 0x05,
909 typedef struct QEMU_PACKED NvmePSD
{
921 #define NVME_IDENTIFY_DATA_SIZE 4096
924 NVME_ID_CNS_NS
= 0x00,
925 NVME_ID_CNS_CTRL
= 0x01,
926 NVME_ID_CNS_NS_ACTIVE_LIST
= 0x02,
927 NVME_ID_CNS_NS_DESCR_LIST
= 0x03,
928 NVME_ID_CNS_CS_NS
= 0x05,
929 NVME_ID_CNS_CS_CTRL
= 0x06,
930 NVME_ID_CNS_CS_NS_ACTIVE_LIST
= 0x07,
931 NVME_ID_CNS_NS_PRESENT_LIST
= 0x10,
932 NVME_ID_CNS_NS_PRESENT
= 0x11,
933 NVME_ID_CNS_CS_NS_PRESENT_LIST
= 0x1a,
934 NVME_ID_CNS_CS_NS_PRESENT
= 0x1b,
935 NVME_ID_CNS_IO_COMMAND_SET
= 0x1c,
938 typedef struct QEMU_PACKED NvmeIdCtrl
{
957 uint8_t rsvd128
[128];
983 uint8_t rsvd332
[180];
999 uint8_t rsvd540
[228];
1000 uint8_t subnqn
[256];
1001 uint8_t rsvd1024
[1024];
1006 typedef struct NvmeIdCtrlZoned
{
1008 uint8_t rsvd1
[4095];
1011 enum NvmeIdCtrlOacs
{
1012 NVME_OACS_SECURITY
= 1 << 0,
1013 NVME_OACS_FORMAT
= 1 << 1,
1014 NVME_OACS_FW
= 1 << 2,
1017 enum NvmeIdCtrlOncs
{
1018 NVME_ONCS_COMPARE
= 1 << 0,
1019 NVME_ONCS_WRITE_UNCORR
= 1 << 1,
1020 NVME_ONCS_DSM
= 1 << 2,
1021 NVME_ONCS_WRITE_ZEROES
= 1 << 3,
1022 NVME_ONCS_FEATURES
= 1 << 4,
1023 NVME_ONCS_RESRVATIONS
= 1 << 5,
1024 NVME_ONCS_TIMESTAMP
= 1 << 6,
1027 enum NvmeIdCtrlFrmw
{
1028 NVME_FRMW_SLOT1_RO
= 1 << 0,
1031 enum NvmeIdCtrlLpa
{
1032 NVME_LPA_NS_SMART
= 1 << 0,
1033 NVME_LPA_CSE
= 1 << 1,
1034 NVME_LPA_EXTENDED
= 1 << 2,
1037 #define NVME_CTRL_SQES_MIN(sqes) ((sqes) & 0xf)
1038 #define NVME_CTRL_SQES_MAX(sqes) (((sqes) >> 4) & 0xf)
1039 #define NVME_CTRL_CQES_MIN(cqes) ((cqes) & 0xf)
1040 #define NVME_CTRL_CQES_MAX(cqes) (((cqes) >> 4) & 0xf)
1042 #define NVME_CTRL_SGLS_SUPPORT_MASK (0x3 << 0)
1043 #define NVME_CTRL_SGLS_SUPPORT_NO_ALIGN (0x1 << 0)
1044 #define NVME_CTRL_SGLS_SUPPORT_DWORD_ALIGN (0x1 << 1)
1045 #define NVME_CTRL_SGLS_KEYED (0x1 << 2)
1046 #define NVME_CTRL_SGLS_BITBUCKET (0x1 << 16)
1047 #define NVME_CTRL_SGLS_MPTR_CONTIGUOUS (0x1 << 17)
1048 #define NVME_CTRL_SGLS_EXCESS_LENGTH (0x1 << 18)
1049 #define NVME_CTRL_SGLS_MPTR_SGL (0x1 << 19)
1050 #define NVME_CTRL_SGLS_ADDR_OFFSET (0x1 << 20)
1052 #define NVME_ARB_AB(arb) (arb & 0x7)
1053 #define NVME_ARB_AB_NOLIMIT 0x7
1054 #define NVME_ARB_LPW(arb) ((arb >> 8) & 0xff)
1055 #define NVME_ARB_MPW(arb) ((arb >> 16) & 0xff)
1056 #define NVME_ARB_HPW(arb) ((arb >> 24) & 0xff)
1058 #define NVME_INTC_THR(intc) (intc & 0xff)
1059 #define NVME_INTC_TIME(intc) ((intc >> 8) & 0xff)
1061 #define NVME_INTVC_NOCOALESCING (0x1 << 16)
1063 #define NVME_TEMP_THSEL(temp) ((temp >> 20) & 0x3)
1064 #define NVME_TEMP_THSEL_OVER 0x0
1065 #define NVME_TEMP_THSEL_UNDER 0x1
1067 #define NVME_TEMP_TMPSEL(temp) ((temp >> 16) & 0xf)
1068 #define NVME_TEMP_TMPSEL_COMPOSITE 0x0
1070 #define NVME_TEMP_TMPTH(temp) (temp & 0xffff)
1072 #define NVME_AEC_SMART(aec) (aec & 0xff)
1073 #define NVME_AEC_NS_ATTR(aec) ((aec >> 8) & 0x1)
1074 #define NVME_AEC_FW_ACTIVATION(aec) ((aec >> 9) & 0x1)
1076 #define NVME_ERR_REC_TLER(err_rec) (err_rec & 0xffff)
1077 #define NVME_ERR_REC_DULBE(err_rec) (err_rec & 0x10000)
1079 enum NvmeFeatureIds
{
1080 NVME_ARBITRATION
= 0x1,
1081 NVME_POWER_MANAGEMENT
= 0x2,
1082 NVME_LBA_RANGE_TYPE
= 0x3,
1083 NVME_TEMPERATURE_THRESHOLD
= 0x4,
1084 NVME_ERROR_RECOVERY
= 0x5,
1085 NVME_VOLATILE_WRITE_CACHE
= 0x6,
1086 NVME_NUMBER_OF_QUEUES
= 0x7,
1087 NVME_INTERRUPT_COALESCING
= 0x8,
1088 NVME_INTERRUPT_VECTOR_CONF
= 0x9,
1089 NVME_WRITE_ATOMICITY
= 0xa,
1090 NVME_ASYNCHRONOUS_EVENT_CONF
= 0xb,
1091 NVME_TIMESTAMP
= 0xe,
1092 NVME_COMMAND_SET_PROFILE
= 0x19,
1093 NVME_SOFTWARE_PROGRESS_MARKER
= 0x80,
1094 NVME_FID_MAX
= 0x100,
1097 typedef enum NvmeFeatureCap
{
1098 NVME_FEAT_CAP_SAVE
= 1 << 0,
1099 NVME_FEAT_CAP_NS
= 1 << 1,
1100 NVME_FEAT_CAP_CHANGE
= 1 << 2,
1103 typedef enum NvmeGetFeatureSelect
{
1104 NVME_GETFEAT_SELECT_CURRENT
= 0x0,
1105 NVME_GETFEAT_SELECT_DEFAULT
= 0x1,
1106 NVME_GETFEAT_SELECT_SAVED
= 0x2,
1107 NVME_GETFEAT_SELECT_CAP
= 0x3,
1108 } NvmeGetFeatureSelect
;
1110 #define NVME_GETSETFEAT_FID_MASK 0xff
1111 #define NVME_GETSETFEAT_FID(dw10) (dw10 & NVME_GETSETFEAT_FID_MASK)
1113 #define NVME_GETFEAT_SELECT_SHIFT 8
1114 #define NVME_GETFEAT_SELECT_MASK 0x7
1115 #define NVME_GETFEAT_SELECT(dw10) \
1116 ((dw10 >> NVME_GETFEAT_SELECT_SHIFT) & NVME_GETFEAT_SELECT_MASK)
1118 #define NVME_SETFEAT_SAVE_SHIFT 31
1119 #define NVME_SETFEAT_SAVE_MASK 0x1
1120 #define NVME_SETFEAT_SAVE(dw10) \
1121 ((dw10 >> NVME_SETFEAT_SAVE_SHIFT) & NVME_SETFEAT_SAVE_MASK)
1123 typedef struct QEMU_PACKED NvmeRangeType
{
1133 typedef struct QEMU_PACKED NvmeLBAF
{
1139 typedef struct QEMU_PACKED NvmeLBAFE
{
1145 #define NVME_NSID_BROADCAST 0xffffffff
1147 typedef struct QEMU_PACKED NvmeIdNs
{
1178 uint8_t rsvd192
[192];
1182 typedef struct QEMU_PACKED NvmeIdNsDescr
{
1188 enum NvmeNsIdentifierLength
{
1189 NVME_NIDL_EUI64
= 8,
1190 NVME_NIDL_NGUID
= 16,
1191 NVME_NIDL_UUID
= 16,
1195 enum NvmeNsIdentifierType
{
1196 NVME_NIDT_EUI64
= 0x01,
1197 NVME_NIDT_NGUID
= 0x02,
1198 NVME_NIDT_UUID
= 0x03,
1199 NVME_NIDT_CSI
= 0x04,
1203 NVME_CSI_NVM
= 0x00,
1204 NVME_CSI_ZONED
= 0x02,
1207 #define NVME_SET_CSI(vec, csi) (vec |= (uint8_t)(1 << (csi)))
1209 typedef struct QEMU_PACKED NvmeIdNsZoned
{
1216 uint8_t rsvd20
[2796];
1217 NvmeLBAFE lbafe
[16];
1218 uint8_t rsvd3072
[768];
1222 /*Deallocate Logical Block Features*/
1223 #define NVME_ID_NS_DLFEAT_GUARD_CRC(dlfeat) ((dlfeat) & 0x10)
1224 #define NVME_ID_NS_DLFEAT_WRITE_ZEROES(dlfeat) ((dlfeat) & 0x08)
1226 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR(dlfeat) ((dlfeat) & 0x7)
1227 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR_UNDEFINED 0
1228 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR_ZEROES 1
1229 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR_ONES 2
1232 #define NVME_ID_NS_NSFEAT_THIN(nsfeat) ((nsfeat & 0x1))
1233 #define NVME_ID_NS_NSFEAT_DULBE(nsfeat) ((nsfeat >> 2) & 0x1)
1234 #define NVME_ID_NS_FLBAS_EXTENDED(flbas) ((flbas >> 4) & 0x1)
1235 #define NVME_ID_NS_FLBAS_INDEX(flbas) ((flbas & 0xf))
1236 #define NVME_ID_NS_MC_SEPARATE(mc) ((mc >> 1) & 0x1)
1237 #define NVME_ID_NS_MC_EXTENDED(mc) ((mc & 0x1))
1238 #define NVME_ID_NS_DPC_LAST_EIGHT(dpc) ((dpc >> 4) & 0x1)
1239 #define NVME_ID_NS_DPC_FIRST_EIGHT(dpc) ((dpc >> 3) & 0x1)
1240 #define NVME_ID_NS_DPC_TYPE_3(dpc) ((dpc >> 2) & 0x1)
1241 #define NVME_ID_NS_DPC_TYPE_2(dpc) ((dpc >> 1) & 0x1)
1242 #define NVME_ID_NS_DPC_TYPE_1(dpc) ((dpc & 0x1))
1243 #define NVME_ID_NS_DPC_TYPE_MASK 0x7
1250 DPS_TYPE_MASK
= 0x7,
1251 DPS_FIRST_EIGHT
= 8,
1255 NVME_ZA_FINISHED_BY_CTLR
= 1 << 0,
1256 NVME_ZA_FINISH_RECOMMENDED
= 1 << 1,
1257 NVME_ZA_RESET_RECOMMENDED
= 1 << 2,
1258 NVME_ZA_ZD_EXT_VALID
= 1 << 7,
1261 typedef struct QEMU_PACKED NvmeZoneReportHeader
{
1264 } NvmeZoneReportHeader
;
1266 enum NvmeZoneReceiveAction
{
1267 NVME_ZONE_REPORT
= 0,
1268 NVME_ZONE_REPORT_EXTENDED
= 1,
1271 enum NvmeZoneReportType
{
1272 NVME_ZONE_REPORT_ALL
= 0,
1273 NVME_ZONE_REPORT_EMPTY
= 1,
1274 NVME_ZONE_REPORT_IMPLICITLY_OPEN
= 2,
1275 NVME_ZONE_REPORT_EXPLICITLY_OPEN
= 3,
1276 NVME_ZONE_REPORT_CLOSED
= 4,
1277 NVME_ZONE_REPORT_FULL
= 5,
1278 NVME_ZONE_REPORT_READ_ONLY
= 6,
1279 NVME_ZONE_REPORT_OFFLINE
= 7,
1283 NVME_ZONE_TYPE_RESERVED
= 0x00,
1284 NVME_ZONE_TYPE_SEQ_WRITE
= 0x02,
1287 enum NvmeZoneSendAction
{
1288 NVME_ZONE_ACTION_RSD
= 0x00,
1289 NVME_ZONE_ACTION_CLOSE
= 0x01,
1290 NVME_ZONE_ACTION_FINISH
= 0x02,
1291 NVME_ZONE_ACTION_OPEN
= 0x03,
1292 NVME_ZONE_ACTION_RESET
= 0x04,
1293 NVME_ZONE_ACTION_OFFLINE
= 0x05,
1294 NVME_ZONE_ACTION_SET_ZD_EXT
= 0x10,
1297 typedef struct QEMU_PACKED NvmeZoneDescr
{
1308 typedef enum NvmeZoneState
{
1309 NVME_ZONE_STATE_RESERVED
= 0x00,
1310 NVME_ZONE_STATE_EMPTY
= 0x01,
1311 NVME_ZONE_STATE_IMPLICITLY_OPEN
= 0x02,
1312 NVME_ZONE_STATE_EXPLICITLY_OPEN
= 0x03,
1313 NVME_ZONE_STATE_CLOSED
= 0x04,
1314 NVME_ZONE_STATE_READ_ONLY
= 0x0D,
1315 NVME_ZONE_STATE_FULL
= 0x0E,
1316 NVME_ZONE_STATE_OFFLINE
= 0x0F,
1319 static inline void _nvme_check_size(void)
1321 QEMU_BUILD_BUG_ON(sizeof(NvmeBar
) != 4096);
1322 QEMU_BUILD_BUG_ON(sizeof(NvmeAerResult
) != 4);
1323 QEMU_BUILD_BUG_ON(sizeof(NvmeZonedResult
) != 8);
1324 QEMU_BUILD_BUG_ON(sizeof(NvmeCqe
) != 16);
1325 QEMU_BUILD_BUG_ON(sizeof(NvmeDsmRange
) != 16);
1326 QEMU_BUILD_BUG_ON(sizeof(NvmeCmd
) != 64);
1327 QEMU_BUILD_BUG_ON(sizeof(NvmeDeleteQ
) != 64);
1328 QEMU_BUILD_BUG_ON(sizeof(NvmeCreateCq
) != 64);
1329 QEMU_BUILD_BUG_ON(sizeof(NvmeCreateSq
) != 64);
1330 QEMU_BUILD_BUG_ON(sizeof(NvmeIdentify
) != 64);
1331 QEMU_BUILD_BUG_ON(sizeof(NvmeRwCmd
) != 64);
1332 QEMU_BUILD_BUG_ON(sizeof(NvmeDsmCmd
) != 64);
1333 QEMU_BUILD_BUG_ON(sizeof(NvmeRangeType
) != 64);
1334 QEMU_BUILD_BUG_ON(sizeof(NvmeErrorLog
) != 64);
1335 QEMU_BUILD_BUG_ON(sizeof(NvmeFwSlotInfoLog
) != 512);
1336 QEMU_BUILD_BUG_ON(sizeof(NvmeSmartLog
) != 512);
1337 QEMU_BUILD_BUG_ON(sizeof(NvmeEffectsLog
) != 4096);
1338 QEMU_BUILD_BUG_ON(sizeof(NvmeIdCtrl
) != 4096);
1339 QEMU_BUILD_BUG_ON(sizeof(NvmeIdCtrlZoned
) != 4096);
1340 QEMU_BUILD_BUG_ON(sizeof(NvmeLBAF
) != 4);
1341 QEMU_BUILD_BUG_ON(sizeof(NvmeLBAFE
) != 16);
1342 QEMU_BUILD_BUG_ON(sizeof(NvmeIdNs
) != 4096);
1343 QEMU_BUILD_BUG_ON(sizeof(NvmeIdNsZoned
) != 4096);
1344 QEMU_BUILD_BUG_ON(sizeof(NvmeSglDescriptor
) != 16);
1345 QEMU_BUILD_BUG_ON(sizeof(NvmeIdNsDescr
) != 4);
1346 QEMU_BUILD_BUG_ON(sizeof(NvmeZoneDescr
) != 64);