4 typedef struct NvmeBar
{
18 uint8_t padding
[3520]; /* not used by QEMU */
35 CAP_MPSMIN_SHIFT
= 48,
36 CAP_MPSMAX_SHIFT
= 52,
41 CAP_MQES_MASK
= 0xffff,
48 CAP_MPSMIN_MASK
= 0xf,
49 CAP_MPSMAX_MASK
= 0xf,
53 #define NVME_CAP_MQES(cap) (((cap) >> CAP_MQES_SHIFT) & CAP_MQES_MASK)
54 #define NVME_CAP_CQR(cap) (((cap) >> CAP_CQR_SHIFT) & CAP_CQR_MASK)
55 #define NVME_CAP_AMS(cap) (((cap) >> CAP_AMS_SHIFT) & CAP_AMS_MASK)
56 #define NVME_CAP_TO(cap) (((cap) >> CAP_TO_SHIFT) & CAP_TO_MASK)
57 #define NVME_CAP_DSTRD(cap) (((cap) >> CAP_DSTRD_SHIFT) & CAP_DSTRD_MASK)
58 #define NVME_CAP_NSSRS(cap) (((cap) >> CAP_NSSRS_SHIFT) & CAP_NSSRS_MASK)
59 #define NVME_CAP_CSS(cap) (((cap) >> CAP_CSS_SHIFT) & CAP_CSS_MASK)
60 #define NVME_CAP_MPSMIN(cap)(((cap) >> CAP_MPSMIN_SHIFT) & CAP_MPSMIN_MASK)
61 #define NVME_CAP_MPSMAX(cap)(((cap) >> CAP_MPSMAX_SHIFT) & CAP_MPSMAX_MASK)
63 #define NVME_CAP_SET_MQES(cap, val) (cap |= (uint64_t)(val & CAP_MQES_MASK) \
65 #define NVME_CAP_SET_CQR(cap, val) (cap |= (uint64_t)(val & CAP_CQR_MASK) \
67 #define NVME_CAP_SET_AMS(cap, val) (cap |= (uint64_t)(val & CAP_AMS_MASK) \
69 #define NVME_CAP_SET_TO(cap, val) (cap |= (uint64_t)(val & CAP_TO_MASK) \
71 #define NVME_CAP_SET_DSTRD(cap, val) (cap |= (uint64_t)(val & CAP_DSTRD_MASK) \
73 #define NVME_CAP_SET_NSSRS(cap, val) (cap |= (uint64_t)(val & CAP_NSSRS_MASK) \
75 #define NVME_CAP_SET_CSS(cap, val) (cap |= (uint64_t)(val & CAP_CSS_MASK) \
77 #define NVME_CAP_SET_MPSMIN(cap, val) (cap |= (uint64_t)(val & CAP_MPSMIN_MASK)\
79 #define NVME_CAP_SET_MPSMAX(cap, val) (cap |= (uint64_t)(val & CAP_MPSMAX_MASK)\
81 #define NVME_CAP_SET_PMRS(cap, val) (cap |= (uint64_t)(val & CAP_PMR_MASK)\
100 CC_IOSQES_MASK
= 0xf,
101 CC_IOCQES_MASK
= 0xf,
104 #define NVME_CC_EN(cc) ((cc >> CC_EN_SHIFT) & CC_EN_MASK)
105 #define NVME_CC_CSS(cc) ((cc >> CC_CSS_SHIFT) & CC_CSS_MASK)
106 #define NVME_CC_MPS(cc) ((cc >> CC_MPS_SHIFT) & CC_MPS_MASK)
107 #define NVME_CC_AMS(cc) ((cc >> CC_AMS_SHIFT) & CC_AMS_MASK)
108 #define NVME_CC_SHN(cc) ((cc >> CC_SHN_SHIFT) & CC_SHN_MASK)
109 #define NVME_CC_IOSQES(cc) ((cc >> CC_IOSQES_SHIFT) & CC_IOSQES_MASK)
110 #define NVME_CC_IOCQES(cc) ((cc >> CC_IOCQES_SHIFT) & CC_IOCQES_MASK)
116 CSTS_NSSRO_SHIFT
= 4,
122 CSTS_SHST_MASK
= 0x3,
123 CSTS_NSSRO_MASK
= 0x1,
127 NVME_CSTS_READY
= 1 << CSTS_RDY_SHIFT
,
128 NVME_CSTS_FAILED
= 1 << CSTS_CFS_SHIFT
,
129 NVME_CSTS_SHST_NORMAL
= 0 << CSTS_SHST_SHIFT
,
130 NVME_CSTS_SHST_PROGRESS
= 1 << CSTS_SHST_SHIFT
,
131 NVME_CSTS_SHST_COMPLETE
= 2 << CSTS_SHST_SHIFT
,
132 NVME_CSTS_NSSRO
= 1 << CSTS_NSSRO_SHIFT
,
135 #define NVME_CSTS_RDY(csts) ((csts >> CSTS_RDY_SHIFT) & CSTS_RDY_MASK)
136 #define NVME_CSTS_CFS(csts) ((csts >> CSTS_CFS_SHIFT) & CSTS_CFS_MASK)
137 #define NVME_CSTS_SHST(csts) ((csts >> CSTS_SHST_SHIFT) & CSTS_SHST_MASK)
138 #define NVME_CSTS_NSSRO(csts) ((csts >> CSTS_NSSRO_SHIFT) & CSTS_NSSRO_MASK)
146 AQA_ASQS_MASK
= 0xfff,
147 AQA_ACQS_MASK
= 0xfff,
150 #define NVME_AQA_ASQS(aqa) ((aqa >> AQA_ASQS_SHIFT) & AQA_ASQS_MASK)
151 #define NVME_AQA_ACQS(aqa) ((aqa >> AQA_ACQS_SHIFT) & AQA_ACQS_MASK)
153 enum NvmeCmblocShift
{
154 CMBLOC_BIR_SHIFT
= 0,
155 CMBLOC_OFST_SHIFT
= 12,
158 enum NvmeCmblocMask
{
159 CMBLOC_BIR_MASK
= 0x7,
160 CMBLOC_OFST_MASK
= 0xfffff,
163 #define NVME_CMBLOC_BIR(cmbloc) ((cmbloc >> CMBLOC_BIR_SHIFT) & \
165 #define NVME_CMBLOC_OFST(cmbloc)((cmbloc >> CMBLOC_OFST_SHIFT) & \
168 #define NVME_CMBLOC_SET_BIR(cmbloc, val) \
169 (cmbloc |= (uint64_t)(val & CMBLOC_BIR_MASK) << CMBLOC_BIR_SHIFT)
170 #define NVME_CMBLOC_SET_OFST(cmbloc, val) \
171 (cmbloc |= (uint64_t)(val & CMBLOC_OFST_MASK) << CMBLOC_OFST_SHIFT)
173 enum NvmeCmbszShift
{
176 CMBSZ_LISTS_SHIFT
= 2,
184 CMBSZ_SQS_MASK
= 0x1,
185 CMBSZ_CQS_MASK
= 0x1,
186 CMBSZ_LISTS_MASK
= 0x1,
187 CMBSZ_RDS_MASK
= 0x1,
188 CMBSZ_WDS_MASK
= 0x1,
189 CMBSZ_SZU_MASK
= 0xf,
190 CMBSZ_SZ_MASK
= 0xfffff,
193 #define NVME_CMBSZ_SQS(cmbsz) ((cmbsz >> CMBSZ_SQS_SHIFT) & CMBSZ_SQS_MASK)
194 #define NVME_CMBSZ_CQS(cmbsz) ((cmbsz >> CMBSZ_CQS_SHIFT) & CMBSZ_CQS_MASK)
195 #define NVME_CMBSZ_LISTS(cmbsz)((cmbsz >> CMBSZ_LISTS_SHIFT) & CMBSZ_LISTS_MASK)
196 #define NVME_CMBSZ_RDS(cmbsz) ((cmbsz >> CMBSZ_RDS_SHIFT) & CMBSZ_RDS_MASK)
197 #define NVME_CMBSZ_WDS(cmbsz) ((cmbsz >> CMBSZ_WDS_SHIFT) & CMBSZ_WDS_MASK)
198 #define NVME_CMBSZ_SZU(cmbsz) ((cmbsz >> CMBSZ_SZU_SHIFT) & CMBSZ_SZU_MASK)
199 #define NVME_CMBSZ_SZ(cmbsz) ((cmbsz >> CMBSZ_SZ_SHIFT) & CMBSZ_SZ_MASK)
201 #define NVME_CMBSZ_SET_SQS(cmbsz, val) \
202 (cmbsz |= (uint64_t)(val & CMBSZ_SQS_MASK) << CMBSZ_SQS_SHIFT)
203 #define NVME_CMBSZ_SET_CQS(cmbsz, val) \
204 (cmbsz |= (uint64_t)(val & CMBSZ_CQS_MASK) << CMBSZ_CQS_SHIFT)
205 #define NVME_CMBSZ_SET_LISTS(cmbsz, val) \
206 (cmbsz |= (uint64_t)(val & CMBSZ_LISTS_MASK) << CMBSZ_LISTS_SHIFT)
207 #define NVME_CMBSZ_SET_RDS(cmbsz, val) \
208 (cmbsz |= (uint64_t)(val & CMBSZ_RDS_MASK) << CMBSZ_RDS_SHIFT)
209 #define NVME_CMBSZ_SET_WDS(cmbsz, val) \
210 (cmbsz |= (uint64_t)(val & CMBSZ_WDS_MASK) << CMBSZ_WDS_SHIFT)
211 #define NVME_CMBSZ_SET_SZU(cmbsz, val) \
212 (cmbsz |= (uint64_t)(val & CMBSZ_SZU_MASK) << CMBSZ_SZU_SHIFT)
213 #define NVME_CMBSZ_SET_SZ(cmbsz, val) \
214 (cmbsz |= (uint64_t)(val & CMBSZ_SZ_MASK) << CMBSZ_SZ_SHIFT)
216 #define NVME_CMBSZ_GETSIZE(cmbsz) \
217 (NVME_CMBSZ_SZ(cmbsz) * (1 << (12 + 4 * NVME_CMBSZ_SZU(cmbsz))))
219 enum NvmePmrcapShift
{
220 PMRCAP_RDS_SHIFT
= 3,
221 PMRCAP_WDS_SHIFT
= 4,
222 PMRCAP_BIR_SHIFT
= 5,
223 PMRCAP_PMRTU_SHIFT
= 8,
224 PMRCAP_PMRWBM_SHIFT
= 10,
225 PMRCAP_PMRTO_SHIFT
= 16,
226 PMRCAP_CMSS_SHIFT
= 24,
229 enum NvmePmrcapMask
{
230 PMRCAP_RDS_MASK
= 0x1,
231 PMRCAP_WDS_MASK
= 0x1,
232 PMRCAP_BIR_MASK
= 0x7,
233 PMRCAP_PMRTU_MASK
= 0x3,
234 PMRCAP_PMRWBM_MASK
= 0xf,
235 PMRCAP_PMRTO_MASK
= 0xff,
236 PMRCAP_CMSS_MASK
= 0x1,
239 #define NVME_PMRCAP_RDS(pmrcap) \
240 ((pmrcap >> PMRCAP_RDS_SHIFT) & PMRCAP_RDS_MASK)
241 #define NVME_PMRCAP_WDS(pmrcap) \
242 ((pmrcap >> PMRCAP_WDS_SHIFT) & PMRCAP_WDS_MASK)
243 #define NVME_PMRCAP_BIR(pmrcap) \
244 ((pmrcap >> PMRCAP_BIR_SHIFT) & PMRCAP_BIR_MASK)
245 #define NVME_PMRCAP_PMRTU(pmrcap) \
246 ((pmrcap >> PMRCAP_PMRTU_SHIFT) & PMRCAP_PMRTU_MASK)
247 #define NVME_PMRCAP_PMRWBM(pmrcap) \
248 ((pmrcap >> PMRCAP_PMRWBM_SHIFT) & PMRCAP_PMRWBM_MASK)
249 #define NVME_PMRCAP_PMRTO(pmrcap) \
250 ((pmrcap >> PMRCAP_PMRTO_SHIFT) & PMRCAP_PMRTO_MASK)
251 #define NVME_PMRCAP_CMSS(pmrcap) \
252 ((pmrcap >> PMRCAP_CMSS_SHIFT) & PMRCAP_CMSS_MASK)
254 #define NVME_PMRCAP_SET_RDS(pmrcap, val) \
255 (pmrcap |= (uint64_t)(val & PMRCAP_RDS_MASK) << PMRCAP_RDS_SHIFT)
256 #define NVME_PMRCAP_SET_WDS(pmrcap, val) \
257 (pmrcap |= (uint64_t)(val & PMRCAP_WDS_MASK) << PMRCAP_WDS_SHIFT)
258 #define NVME_PMRCAP_SET_BIR(pmrcap, val) \
259 (pmrcap |= (uint64_t)(val & PMRCAP_BIR_MASK) << PMRCAP_BIR_SHIFT)
260 #define NVME_PMRCAP_SET_PMRTU(pmrcap, val) \
261 (pmrcap |= (uint64_t)(val & PMRCAP_PMRTU_MASK) << PMRCAP_PMRTU_SHIFT)
262 #define NVME_PMRCAP_SET_PMRWBM(pmrcap, val) \
263 (pmrcap |= (uint64_t)(val & PMRCAP_PMRWBM_MASK) << PMRCAP_PMRWBM_SHIFT)
264 #define NVME_PMRCAP_SET_PMRTO(pmrcap, val) \
265 (pmrcap |= (uint64_t)(val & PMRCAP_PMRTO_MASK) << PMRCAP_PMRTO_SHIFT)
266 #define NVME_PMRCAP_SET_CMSS(pmrcap, val) \
267 (pmrcap |= (uint64_t)(val & PMRCAP_CMSS_MASK) << PMRCAP_CMSS_SHIFT)
269 enum NvmePmrctlShift
{
273 enum NvmePmrctlMask
{
274 PMRCTL_EN_MASK
= 0x1,
277 #define NVME_PMRCTL_EN(pmrctl) ((pmrctl >> PMRCTL_EN_SHIFT) & PMRCTL_EN_MASK)
279 #define NVME_PMRCTL_SET_EN(pmrctl, val) \
280 (pmrctl |= (uint64_t)(val & PMRCTL_EN_MASK) << PMRCTL_EN_SHIFT)
282 enum NvmePmrstsShift
{
283 PMRSTS_ERR_SHIFT
= 0,
284 PMRSTS_NRDY_SHIFT
= 8,
285 PMRSTS_HSTS_SHIFT
= 9,
286 PMRSTS_CBAI_SHIFT
= 12,
289 enum NvmePmrstsMask
{
290 PMRSTS_ERR_MASK
= 0xff,
291 PMRSTS_NRDY_MASK
= 0x1,
292 PMRSTS_HSTS_MASK
= 0x7,
293 PMRSTS_CBAI_MASK
= 0x1,
296 #define NVME_PMRSTS_ERR(pmrsts) \
297 ((pmrsts >> PMRSTS_ERR_SHIFT) & PMRSTS_ERR_MASK)
298 #define NVME_PMRSTS_NRDY(pmrsts) \
299 ((pmrsts >> PMRSTS_NRDY_SHIFT) & PMRSTS_NRDY_MASK)
300 #define NVME_PMRSTS_HSTS(pmrsts) \
301 ((pmrsts >> PMRSTS_HSTS_SHIFT) & PMRSTS_HSTS_MASK)
302 #define NVME_PMRSTS_CBAI(pmrsts) \
303 ((pmrsts >> PMRSTS_CBAI_SHIFT) & PMRSTS_CBAI_MASK)
305 #define NVME_PMRSTS_SET_ERR(pmrsts, val) \
306 (pmrsts |= (uint64_t)(val & PMRSTS_ERR_MASK) << PMRSTS_ERR_SHIFT)
307 #define NVME_PMRSTS_SET_NRDY(pmrsts, val) \
308 (pmrsts |= (uint64_t)(val & PMRSTS_NRDY_MASK) << PMRSTS_NRDY_SHIFT)
309 #define NVME_PMRSTS_SET_HSTS(pmrsts, val) \
310 (pmrsts |= (uint64_t)(val & PMRSTS_HSTS_MASK) << PMRSTS_HSTS_SHIFT)
311 #define NVME_PMRSTS_SET_CBAI(pmrsts, val) \
312 (pmrsts |= (uint64_t)(val & PMRSTS_CBAI_MASK) << PMRSTS_CBAI_SHIFT)
314 enum NvmePmrebsShift
{
315 PMREBS_PMRSZU_SHIFT
= 0,
316 PMREBS_RBB_SHIFT
= 4,
317 PMREBS_PMRWBZ_SHIFT
= 8,
320 enum NvmePmrebsMask
{
321 PMREBS_PMRSZU_MASK
= 0xf,
322 PMREBS_RBB_MASK
= 0x1,
323 PMREBS_PMRWBZ_MASK
= 0xffffff,
326 #define NVME_PMREBS_PMRSZU(pmrebs) \
327 ((pmrebs >> PMREBS_PMRSZU_SHIFT) & PMREBS_PMRSZU_MASK)
328 #define NVME_PMREBS_RBB(pmrebs) \
329 ((pmrebs >> PMREBS_RBB_SHIFT) & PMREBS_RBB_MASK)
330 #define NVME_PMREBS_PMRWBZ(pmrebs) \
331 ((pmrebs >> PMREBS_PMRWBZ_SHIFT) & PMREBS_PMRWBZ_MASK)
333 #define NVME_PMREBS_SET_PMRSZU(pmrebs, val) \
334 (pmrebs |= (uint64_t)(val & PMREBS_PMRSZU_MASK) << PMREBS_PMRSZU_SHIFT)
335 #define NVME_PMREBS_SET_RBB(pmrebs, val) \
336 (pmrebs |= (uint64_t)(val & PMREBS_RBB_MASK) << PMREBS_RBB_SHIFT)
337 #define NVME_PMREBS_SET_PMRWBZ(pmrebs, val) \
338 (pmrebs |= (uint64_t)(val & PMREBS_PMRWBZ_MASK) << PMREBS_PMRWBZ_SHIFT)
340 enum NvmePmrswtpShift
{
341 PMRSWTP_PMRSWTU_SHIFT
= 0,
342 PMRSWTP_PMRSWTV_SHIFT
= 8,
345 enum NvmePmrswtpMask
{
346 PMRSWTP_PMRSWTU_MASK
= 0xf,
347 PMRSWTP_PMRSWTV_MASK
= 0xffffff,
350 #define NVME_PMRSWTP_PMRSWTU(pmrswtp) \
351 ((pmrswtp >> PMRSWTP_PMRSWTU_SHIFT) & PMRSWTP_PMRSWTU_MASK)
352 #define NVME_PMRSWTP_PMRSWTV(pmrswtp) \
353 ((pmrswtp >> PMRSWTP_PMRSWTV_SHIFT) & PMRSWTP_PMRSWTV_MASK)
355 #define NVME_PMRSWTP_SET_PMRSWTU(pmrswtp, val) \
356 (pmrswtp |= (uint64_t)(val & PMRSWTP_PMRSWTU_MASK) << PMRSWTP_PMRSWTU_SHIFT)
357 #define NVME_PMRSWTP_SET_PMRSWTV(pmrswtp, val) \
358 (pmrswtp |= (uint64_t)(val & PMRSWTP_PMRSWTV_MASK) << PMRSWTP_PMRSWTV_SHIFT)
360 enum NvmePmrmscShift
{
361 PMRMSC_CMSE_SHIFT
= 1,
362 PMRMSC_CBA_SHIFT
= 12,
365 enum NvmePmrmscMask
{
366 PMRMSC_CMSE_MASK
= 0x1,
367 PMRMSC_CBA_MASK
= 0xfffffffffffff,
370 #define NVME_PMRMSC_CMSE(pmrmsc) \
371 ((pmrmsc >> PMRMSC_CMSE_SHIFT) & PMRMSC_CMSE_MASK)
372 #define NVME_PMRMSC_CBA(pmrmsc) \
373 ((pmrmsc >> PMRMSC_CBA_SHIFT) & PMRMSC_CBA_MASK)
375 #define NVME_PMRMSC_SET_CMSE(pmrmsc, val) \
376 (pmrmsc |= (uint64_t)(val & PMRMSC_CMSE_MASK) << PMRMSC_CMSE_SHIFT)
377 #define NVME_PMRMSC_SET_CBA(pmrmsc, val) \
378 (pmrmsc |= (uint64_t)(val & PMRMSC_CBA_MASK) << PMRMSC_CBA_SHIFT)
380 typedef struct NvmeCmd
{
397 enum NvmeAdminCommands
{
398 NVME_ADM_CMD_DELETE_SQ
= 0x00,
399 NVME_ADM_CMD_CREATE_SQ
= 0x01,
400 NVME_ADM_CMD_GET_LOG_PAGE
= 0x02,
401 NVME_ADM_CMD_DELETE_CQ
= 0x04,
402 NVME_ADM_CMD_CREATE_CQ
= 0x05,
403 NVME_ADM_CMD_IDENTIFY
= 0x06,
404 NVME_ADM_CMD_ABORT
= 0x08,
405 NVME_ADM_CMD_SET_FEATURES
= 0x09,
406 NVME_ADM_CMD_GET_FEATURES
= 0x0a,
407 NVME_ADM_CMD_ASYNC_EV_REQ
= 0x0c,
408 NVME_ADM_CMD_ACTIVATE_FW
= 0x10,
409 NVME_ADM_CMD_DOWNLOAD_FW
= 0x11,
410 NVME_ADM_CMD_FORMAT_NVM
= 0x80,
411 NVME_ADM_CMD_SECURITY_SEND
= 0x81,
412 NVME_ADM_CMD_SECURITY_RECV
= 0x82,
415 enum NvmeIoCommands
{
416 NVME_CMD_FLUSH
= 0x00,
417 NVME_CMD_WRITE
= 0x01,
418 NVME_CMD_READ
= 0x02,
419 NVME_CMD_WRITE_UNCOR
= 0x04,
420 NVME_CMD_COMPARE
= 0x05,
421 NVME_CMD_WRITE_ZEROS
= 0x08,
425 typedef struct NvmeDeleteQ
{
435 typedef struct NvmeCreateCq
{
449 #define NVME_CQ_FLAGS_PC(cq_flags) (cq_flags & 0x1)
450 #define NVME_CQ_FLAGS_IEN(cq_flags) ((cq_flags >> 1) & 0x1)
452 typedef struct NvmeCreateSq
{
466 #define NVME_SQ_FLAGS_PC(sq_flags) (sq_flags & 0x1)
467 #define NVME_SQ_FLAGS_QPRIO(sq_flags) ((sq_flags >> 1) & 0x3)
469 enum NvmeQueueFlags
{
471 NVME_Q_PRIO_URGENT
= 0,
472 NVME_Q_PRIO_HIGH
= 1,
473 NVME_Q_PRIO_NORMAL
= 2,
477 typedef struct NvmeIdentify
{
489 typedef struct NvmeRwCmd
{
508 NVME_RW_LR
= 1 << 15,
509 NVME_RW_FUA
= 1 << 14,
510 NVME_RW_DSM_FREQ_UNSPEC
= 0,
511 NVME_RW_DSM_FREQ_TYPICAL
= 1,
512 NVME_RW_DSM_FREQ_RARE
= 2,
513 NVME_RW_DSM_FREQ_READS
= 3,
514 NVME_RW_DSM_FREQ_WRITES
= 4,
515 NVME_RW_DSM_FREQ_RW
= 5,
516 NVME_RW_DSM_FREQ_ONCE
= 6,
517 NVME_RW_DSM_FREQ_PREFETCH
= 7,
518 NVME_RW_DSM_FREQ_TEMP
= 8,
519 NVME_RW_DSM_LATENCY_NONE
= 0 << 4,
520 NVME_RW_DSM_LATENCY_IDLE
= 1 << 4,
521 NVME_RW_DSM_LATENCY_NORM
= 2 << 4,
522 NVME_RW_DSM_LATENCY_LOW
= 3 << 4,
523 NVME_RW_DSM_SEQ_REQ
= 1 << 6,
524 NVME_RW_DSM_COMPRESSED
= 1 << 7,
525 NVME_RW_PRINFO_PRACT
= 1 << 13,
526 NVME_RW_PRINFO_PRCHK_GUARD
= 1 << 12,
527 NVME_RW_PRINFO_PRCHK_APP
= 1 << 11,
528 NVME_RW_PRINFO_PRCHK_REF
= 1 << 10,
531 typedef struct NvmeDsmCmd
{
545 NVME_DSMGMT_IDR
= 1 << 0,
546 NVME_DSMGMT_IDW
= 1 << 1,
547 NVME_DSMGMT_AD
= 1 << 2,
550 typedef struct NvmeDsmRange
{
556 enum NvmeAsyncEventRequest
{
557 NVME_AER_TYPE_ERROR
= 0,
558 NVME_AER_TYPE_SMART
= 1,
559 NVME_AER_TYPE_IO_SPECIFIC
= 6,
560 NVME_AER_TYPE_VENDOR_SPECIFIC
= 7,
561 NVME_AER_INFO_ERR_INVALID_SQ
= 0,
562 NVME_AER_INFO_ERR_INVALID_DB
= 1,
563 NVME_AER_INFO_ERR_DIAG_FAIL
= 2,
564 NVME_AER_INFO_ERR_PERS_INTERNAL_ERR
= 3,
565 NVME_AER_INFO_ERR_TRANS_INTERNAL_ERR
= 4,
566 NVME_AER_INFO_ERR_FW_IMG_LOAD_ERR
= 5,
567 NVME_AER_INFO_SMART_RELIABILITY
= 0,
568 NVME_AER_INFO_SMART_TEMP_THRESH
= 1,
569 NVME_AER_INFO_SMART_SPARE_THRESH
= 2,
572 typedef struct NvmeAerResult
{
579 typedef struct NvmeCqe
{
588 enum NvmeStatusCodes
{
589 NVME_SUCCESS
= 0x0000,
590 NVME_INVALID_OPCODE
= 0x0001,
591 NVME_INVALID_FIELD
= 0x0002,
592 NVME_CID_CONFLICT
= 0x0003,
593 NVME_DATA_TRAS_ERROR
= 0x0004,
594 NVME_POWER_LOSS_ABORT
= 0x0005,
595 NVME_INTERNAL_DEV_ERROR
= 0x0006,
596 NVME_CMD_ABORT_REQ
= 0x0007,
597 NVME_CMD_ABORT_SQ_DEL
= 0x0008,
598 NVME_CMD_ABORT_FAILED_FUSE
= 0x0009,
599 NVME_CMD_ABORT_MISSING_FUSE
= 0x000a,
600 NVME_INVALID_NSID
= 0x000b,
601 NVME_CMD_SEQ_ERROR
= 0x000c,
602 NVME_LBA_RANGE
= 0x0080,
603 NVME_CAP_EXCEEDED
= 0x0081,
604 NVME_NS_NOT_READY
= 0x0082,
605 NVME_NS_RESV_CONFLICT
= 0x0083,
606 NVME_INVALID_CQID
= 0x0100,
607 NVME_INVALID_QID
= 0x0101,
608 NVME_MAX_QSIZE_EXCEEDED
= 0x0102,
609 NVME_ACL_EXCEEDED
= 0x0103,
610 NVME_RESERVED
= 0x0104,
611 NVME_AER_LIMIT_EXCEEDED
= 0x0105,
612 NVME_INVALID_FW_SLOT
= 0x0106,
613 NVME_INVALID_FW_IMAGE
= 0x0107,
614 NVME_INVALID_IRQ_VECTOR
= 0x0108,
615 NVME_INVALID_LOG_ID
= 0x0109,
616 NVME_INVALID_FORMAT
= 0x010a,
617 NVME_FW_REQ_RESET
= 0x010b,
618 NVME_INVALID_QUEUE_DEL
= 0x010c,
619 NVME_FID_NOT_SAVEABLE
= 0x010d,
620 NVME_FID_NOT_NSID_SPEC
= 0x010f,
621 NVME_FW_REQ_SUSYSTEM_RESET
= 0x0110,
622 NVME_CONFLICTING_ATTRS
= 0x0180,
623 NVME_INVALID_PROT_INFO
= 0x0181,
624 NVME_WRITE_TO_RO
= 0x0182,
625 NVME_WRITE_FAULT
= 0x0280,
626 NVME_UNRECOVERED_READ
= 0x0281,
627 NVME_E2E_GUARD_ERROR
= 0x0282,
628 NVME_E2E_APP_ERROR
= 0x0283,
629 NVME_E2E_REF_ERROR
= 0x0284,
630 NVME_CMP_FAILURE
= 0x0285,
631 NVME_ACCESS_DENIED
= 0x0286,
634 NVME_NO_COMPLETE
= 0xffff,
637 typedef struct NvmeFwSlotInfoLog
{
639 uint8_t reserved1
[7];
647 uint8_t reserved2
[448];
650 typedef struct NvmeErrorLog
{
651 uint64_t error_count
;
654 uint16_t status_field
;
655 uint16_t param_error_location
;
662 typedef struct NvmeSmartLog
{
663 uint8_t critical_warning
;
664 uint8_t temperature
[2];
665 uint8_t available_spare
;
666 uint8_t available_spare_threshold
;
667 uint8_t percentage_used
;
668 uint8_t reserved1
[26];
669 uint64_t data_units_read
[2];
670 uint64_t data_units_written
[2];
671 uint64_t host_read_commands
[2];
672 uint64_t host_write_commands
[2];
673 uint64_t controller_busy_time
[2];
674 uint64_t power_cycles
[2];
675 uint64_t power_on_hours
[2];
676 uint64_t unsafe_shutdowns
[2];
677 uint64_t media_errors
[2];
678 uint64_t number_of_error_log_entries
[2];
679 uint8_t reserved2
[320];
683 NVME_SMART_SPARE
= 1 << 0,
684 NVME_SMART_TEMPERATURE
= 1 << 1,
685 NVME_SMART_RELIABILITY
= 1 << 2,
686 NVME_SMART_MEDIA_READ_ONLY
= 1 << 3,
687 NVME_SMART_FAILED_VOLATILE_MEDIA
= 1 << 4,
691 NVME_LOG_ERROR_INFO
= 0x01,
692 NVME_LOG_SMART_INFO
= 0x02,
693 NVME_LOG_FW_SLOT_INFO
= 0x03,
696 typedef struct NvmePSD
{
708 #define NVME_IDENTIFY_DATA_SIZE 4096
711 NVME_ID_CNS_NS
= 0x0,
712 NVME_ID_CNS_CTRL
= 0x1,
713 NVME_ID_CNS_NS_ACTIVE_LIST
= 0x2,
716 typedef struct NvmeIdCtrl
{
726 uint8_t rsvd255
[178];
734 uint8_t rsvd511
[248];
745 uint8_t rsvd703
[174];
746 uint8_t rsvd2047
[1344];
751 enum NvmeIdCtrlOacs
{
752 NVME_OACS_SECURITY
= 1 << 0,
753 NVME_OACS_FORMAT
= 1 << 1,
754 NVME_OACS_FW
= 1 << 2,
757 enum NvmeIdCtrlOncs
{
758 NVME_ONCS_COMPARE
= 1 << 0,
759 NVME_ONCS_WRITE_UNCORR
= 1 << 1,
760 NVME_ONCS_DSM
= 1 << 2,
761 NVME_ONCS_WRITE_ZEROS
= 1 << 3,
762 NVME_ONCS_FEATURES
= 1 << 4,
763 NVME_ONCS_RESRVATIONS
= 1 << 5,
764 NVME_ONCS_TIMESTAMP
= 1 << 6,
767 #define NVME_CTRL_SQES_MIN(sqes) ((sqes) & 0xf)
768 #define NVME_CTRL_SQES_MAX(sqes) (((sqes) >> 4) & 0xf)
769 #define NVME_CTRL_CQES_MIN(cqes) ((cqes) & 0xf)
770 #define NVME_CTRL_CQES_MAX(cqes) (((cqes) >> 4) & 0xf)
772 typedef struct NvmeFeatureVal
{
773 uint32_t arbitration
;
775 uint32_t temp_thresh
;
777 uint32_t volatile_wc
;
779 uint32_t int_coalescing
;
780 uint32_t *int_vector_config
;
781 uint32_t write_atomicity
;
782 uint32_t async_config
;
783 uint32_t sw_prog_marker
;
786 #define NVME_ARB_AB(arb) (arb & 0x7)
787 #define NVME_ARB_LPW(arb) ((arb >> 8) & 0xff)
788 #define NVME_ARB_MPW(arb) ((arb >> 16) & 0xff)
789 #define NVME_ARB_HPW(arb) ((arb >> 24) & 0xff)
791 #define NVME_INTC_THR(intc) (intc & 0xff)
792 #define NVME_INTC_TIME(intc) ((intc >> 8) & 0xff)
794 enum NvmeFeatureIds
{
795 NVME_ARBITRATION
= 0x1,
796 NVME_POWER_MANAGEMENT
= 0x2,
797 NVME_LBA_RANGE_TYPE
= 0x3,
798 NVME_TEMPERATURE_THRESHOLD
= 0x4,
799 NVME_ERROR_RECOVERY
= 0x5,
800 NVME_VOLATILE_WRITE_CACHE
= 0x6,
801 NVME_NUMBER_OF_QUEUES
= 0x7,
802 NVME_INTERRUPT_COALESCING
= 0x8,
803 NVME_INTERRUPT_VECTOR_CONF
= 0x9,
804 NVME_WRITE_ATOMICITY
= 0xa,
805 NVME_ASYNCHRONOUS_EVENT_CONF
= 0xb,
806 NVME_TIMESTAMP
= 0xe,
807 NVME_SOFTWARE_PROGRESS_MARKER
= 0x80
810 typedef struct NvmeRangeType
{
820 typedef struct NvmeLBAF
{
826 typedef struct NvmeIdNs
{
849 /*Deallocate Logical Block Features*/
850 #define NVME_ID_NS_DLFEAT_GUARD_CRC(dlfeat) ((dlfeat) & 0x10)
851 #define NVME_ID_NS_DLFEAT_WRITE_ZEROES(dlfeat) ((dlfeat) & 0x08)
853 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR(dlfeat) ((dlfeat) & 0x7)
854 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR_UNDEFINED 0
855 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR_ZEROES 1
856 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR_ONES 2
859 #define NVME_ID_NS_NSFEAT_THIN(nsfeat) ((nsfeat & 0x1))
860 #define NVME_ID_NS_FLBAS_EXTENDED(flbas) ((flbas >> 4) & 0x1)
861 #define NVME_ID_NS_FLBAS_INDEX(flbas) ((flbas & 0xf))
862 #define NVME_ID_NS_MC_SEPARATE(mc) ((mc >> 1) & 0x1)
863 #define NVME_ID_NS_MC_EXTENDED(mc) ((mc & 0x1))
864 #define NVME_ID_NS_DPC_LAST_EIGHT(dpc) ((dpc >> 4) & 0x1)
865 #define NVME_ID_NS_DPC_FIRST_EIGHT(dpc) ((dpc >> 3) & 0x1)
866 #define NVME_ID_NS_DPC_TYPE_3(dpc) ((dpc >> 2) & 0x1)
867 #define NVME_ID_NS_DPC_TYPE_2(dpc) ((dpc >> 1) & 0x1)
868 #define NVME_ID_NS_DPC_TYPE_1(dpc) ((dpc & 0x1))
869 #define NVME_ID_NS_DPC_TYPE_MASK 0x7
880 static inline void _nvme_check_size(void)
882 QEMU_BUILD_BUG_ON(sizeof(NvmeAerResult
) != 4);
883 QEMU_BUILD_BUG_ON(sizeof(NvmeCqe
) != 16);
884 QEMU_BUILD_BUG_ON(sizeof(NvmeDsmRange
) != 16);
885 QEMU_BUILD_BUG_ON(sizeof(NvmeCmd
) != 64);
886 QEMU_BUILD_BUG_ON(sizeof(NvmeDeleteQ
) != 64);
887 QEMU_BUILD_BUG_ON(sizeof(NvmeCreateCq
) != 64);
888 QEMU_BUILD_BUG_ON(sizeof(NvmeCreateSq
) != 64);
889 QEMU_BUILD_BUG_ON(sizeof(NvmeIdentify
) != 64);
890 QEMU_BUILD_BUG_ON(sizeof(NvmeRwCmd
) != 64);
891 QEMU_BUILD_BUG_ON(sizeof(NvmeDsmCmd
) != 64);
892 QEMU_BUILD_BUG_ON(sizeof(NvmeRangeType
) != 64);
893 QEMU_BUILD_BUG_ON(sizeof(NvmeErrorLog
) != 64);
894 QEMU_BUILD_BUG_ON(sizeof(NvmeFwSlotInfoLog
) != 512);
895 QEMU_BUILD_BUG_ON(sizeof(NvmeSmartLog
) != 512);
896 QEMU_BUILD_BUG_ON(sizeof(NvmeIdCtrl
) != 4096);
897 QEMU_BUILD_BUG_ON(sizeof(NvmeIdNs
) != 4096);