Lines Matching refs:cqes
40 struct nvme_completion *cqes; member
152 ulong start = (ulong)&nvmeq->cqes[0]; in nvme_read_completion_status()
157 return readw(&(nvmeq->cqes[index].status)); in nvme_read_completion_status()
221 *result = readl(&(nvmeq->cqes[head].result)); in nvme_submit_sync_cmd()
249 nvmeq->cqes = (void *)memalign(4096, NVME_CQ_ALLOCATION); in nvme_alloc_queue()
250 if (!nvmeq->cqes) in nvme_alloc_queue()
252 memset((void *)nvmeq->cqes, 0, NVME_CQ_SIZE(depth)); in nvme_alloc_queue()
272 free((void *)nvmeq->cqes); in nvme_alloc_queue()
320 free((void *)nvmeq->cqes); in nvme_free_queue()
345 memset((void *)nvmeq->cqes, 0, NVME_CQ_SIZE(nvmeq->q_depth)); in nvme_init_queue()
346 flush_dcache_range((ulong)nvmeq->cqes, in nvme_init_queue()
347 (ulong)nvmeq->cqes + NVME_CQ_ALLOCATION); in nvme_init_queue()
397 nvme_writeq((ulong)nvmeq->cqes, &dev->bar->acq); in nvme_configure_admin_queue()
423 c.create_cq.prp1 = cpu_to_le64((ulong)nvmeq->cqes); in nvme_alloc_cq()