Lines Matching refs:cqes
38 struct nvme_completion *cqes; member
143 u64 start = (ulong)&nvmeq->cqes[index]; in nvme_read_completion_status()
148 return le16_to_cpu(readw(&(nvmeq->cqes[index].status))); in nvme_read_completion_status()
212 *result = le32_to_cpu(readl(&(nvmeq->cqes[head].result))); in nvme_submit_sync_cmd()
240 nvmeq->cqes = (void *)memalign(4096, NVME_CQ_SIZE(depth)); in nvme_alloc_queue()
241 if (!nvmeq->cqes) in nvme_alloc_queue()
243 memset((void *)nvmeq->cqes, 0, NVME_CQ_SIZE(depth)); in nvme_alloc_queue()
263 free((void *)nvmeq->cqes); in nvme_alloc_queue()
311 free((void *)nvmeq->cqes); in nvme_free_queue()
336 memset((void *)nvmeq->cqes, 0, NVME_CQ_SIZE(nvmeq->q_depth)); in nvme_init_queue()
337 flush_dcache_range((ulong)nvmeq->cqes, in nvme_init_queue()
338 (ulong)nvmeq->cqes + NVME_CQ_SIZE(nvmeq->q_depth)); in nvme_init_queue()
389 nvme_writeq((ulong)nvmeq->cqes, &dev->bar->acq); in nvme_configure_admin_queue()
415 c.create_cq.prp1 = cpu_to_le64((ulong)nvmeq->cqes); in nvme_alloc_cq()