Home
last modified time | relevance | path

Searched refs:rqd (Results 1 – 8 of 8) sorted by relevance

/drivers/lightnvm/
Dcore.c201 int nvm_submit_io(struct nvm_dev *dev, struct nvm_rq *rqd) in nvm_submit_io() argument
203 return dev->mt->submit_io(dev, rqd); in nvm_submit_io()
213 void nvm_addr_to_generic_mode(struct nvm_dev *dev, struct nvm_rq *rqd) in nvm_addr_to_generic_mode() argument
217 if (rqd->nr_ppas > 1) { in nvm_addr_to_generic_mode()
218 for (i = 0; i < rqd->nr_ppas; i++) in nvm_addr_to_generic_mode()
219 rqd->ppa_list[i] = dev_to_generic_addr(dev, in nvm_addr_to_generic_mode()
220 rqd->ppa_list[i]); in nvm_addr_to_generic_mode()
222 rqd->ppa_addr = dev_to_generic_addr(dev, rqd->ppa_addr); in nvm_addr_to_generic_mode()
227 void nvm_generic_to_addr_mode(struct nvm_dev *dev, struct nvm_rq *rqd) in nvm_generic_to_addr_mode() argument
231 if (rqd->nr_ppas > 1) { in nvm_generic_to_addr_mode()
[all …]
Drrpc.c23 struct nvm_rq *rqd, unsigned long flags);
68 struct nvm_rq *rqd; in rrpc_inflight_laddr_acquire() local
71 rqd = mempool_alloc(rrpc->rq_pool, GFP_ATOMIC); in rrpc_inflight_laddr_acquire()
72 if (!rqd) in rrpc_inflight_laddr_acquire()
75 inf = rrpc_get_inflight_rq(rqd); in rrpc_inflight_laddr_acquire()
77 mempool_free(rqd, rrpc->rq_pool); in rrpc_inflight_laddr_acquire()
81 return rqd; in rrpc_inflight_laddr_acquire()
84 static void rrpc_inflight_laddr_release(struct rrpc *rrpc, struct nvm_rq *rqd) in rrpc_inflight_laddr_release() argument
86 struct rrpc_inflight_rq *inf = rrpc_get_inflight_rq(rqd); in rrpc_inflight_laddr_release()
90 mempool_free(rqd, rrpc->rq_pool); in rrpc_inflight_laddr_release()
[all …]
Drrpc.h217 static inline struct rrpc_inflight_rq *rrpc_get_inflight_rq(struct nvm_rq *rqd) in rrpc_get_inflight_rq() argument
219 struct rrpc_rq *rrqd = nvm_rq_to_pdu(rqd); in rrpc_get_inflight_rq()
225 struct nvm_rq *rqd) in rrpc_lock_rq() argument
229 struct rrpc_inflight_rq *r = rrpc_get_inflight_rq(rqd); in rrpc_lock_rq()
244 static inline void rrpc_unlock_rq(struct rrpc *rrpc, struct nvm_rq *rqd) in rrpc_unlock_rq() argument
246 struct rrpc_inflight_rq *r = rrpc_get_inflight_rq(rqd); in rrpc_unlock_rq()
247 uint8_t pages = rqd->nr_ppas; in rrpc_unlock_rq()
Dgennvm.c555 static void gen_mark_blk_bad(struct nvm_dev *dev, struct nvm_rq *rqd) in gen_mark_blk_bad() argument
559 void *comp_bits = &rqd->ppa_status; in gen_mark_blk_bad()
561 nvm_addr_to_generic_mode(dev, rqd); in gen_mark_blk_bad()
564 if (rqd->nr_ppas == 1) { in gen_mark_blk_bad()
565 gen_mark_blk(dev, rqd->ppa_addr, NVM_BLK_ST_BAD); in gen_mark_blk_bad()
570 gen_mark_blk(dev, rqd->ppa_list[bit], NVM_BLK_ST_BAD); in gen_mark_blk_bad()
573 static void gen_end_io(struct nvm_rq *rqd) in gen_end_io() argument
575 struct nvm_tgt_instance *ins = rqd->ins; in gen_end_io()
577 if (rqd->error == NVM_RSP_ERR_FAILWRITE) in gen_end_io()
578 gen_mark_blk_bad(rqd->dev, rqd); in gen_end_io()
[all …]
Dsysblk.c272 struct nvm_rq rqd; in nvm_set_bb_tbl() local
280 memset(&rqd, 0, sizeof(struct nvm_rq)); in nvm_set_bb_tbl()
282 nvm_set_rqd_ppalist(dev, &rqd, s->ppas, s->nr_ppas, 1); in nvm_set_bb_tbl()
283 nvm_generic_to_addr_mode(dev, &rqd); in nvm_set_bb_tbl()
285 ret = dev->ops->set_bb_tbl(dev, &rqd.ppa_addr, rqd.nr_ppas, type); in nvm_set_bb_tbl()
286 nvm_free_rqd_ppalist(dev, &rqd); in nvm_set_bb_tbl()
/drivers/nvme/host/
Dlightnvm.c466 static inline void nvme_nvm_rqtocmd(struct request *rq, struct nvm_rq *rqd, in nvme_nvm_rqtocmd() argument
469 c->ph_rw.opcode = rqd->opcode; in nvme_nvm_rqtocmd()
471 c->ph_rw.spba = cpu_to_le64(rqd->ppa_addr.ppa); in nvme_nvm_rqtocmd()
472 c->ph_rw.metadata = cpu_to_le64(rqd->dma_meta_list); in nvme_nvm_rqtocmd()
473 c->ph_rw.control = cpu_to_le16(rqd->flags); in nvme_nvm_rqtocmd()
474 c->ph_rw.length = cpu_to_le16(rqd->nr_ppas - 1); in nvme_nvm_rqtocmd()
476 if (rqd->opcode == NVM_OP_HBWRITE || rqd->opcode == NVM_OP_HBREAD) in nvme_nvm_rqtocmd()
478 rqd->bio->bi_iter.bi_sector)); in nvme_nvm_rqtocmd()
483 struct nvm_rq *rqd = rq->end_io_data; in nvme_nvm_end_io() local
487 rqd->ppa_status = le64_to_cpu(cqe->result); in nvme_nvm_end_io()
[all …]
/drivers/block/
Dnull_blk.c421 struct nvm_rq *rqd = rq->end_io_data; in null_lnvm_end_io() local
423 nvm_end_io(rqd, error); in null_lnvm_end_io()
428 static int null_lnvm_submit_io(struct nvm_dev *dev, struct nvm_rq *rqd) in null_lnvm_submit_io() argument
432 struct bio *bio = rqd->bio; in null_lnvm_submit_io()
448 rq->end_io_data = rqd; in null_lnvm_submit_io()
/drivers/dma/
Dpl330.c534 struct list_head rqd; member
1646 list_add_tail(&descdone->rqd, &pl330->req_done); in pl330_update()
1651 list_for_each_entry_safe(descdone, tmp, &pl330->req_done, rqd) { in pl330_update()
1652 list_del(&descdone->rqd); in pl330_update()