Lines Matching full:cmdq
9 static void arm_cmdq_cq(struct erdma_cmdq *cmdq) in arm_cmdq_cq() argument
11 struct erdma_dev *dev = container_of(cmdq, struct erdma_dev, cmdq); in arm_cmdq_cq()
12 u64 db_data = FIELD_PREP(ERDMA_CQDB_CI_MASK, cmdq->cq.ci) | in arm_cmdq_cq()
14 FIELD_PREP(ERDMA_CQDB_CMDSN_MASK, cmdq->cq.cmdsn) | in arm_cmdq_cq()
15 FIELD_PREP(ERDMA_CQDB_IDX_MASK, cmdq->cq.cmdsn); in arm_cmdq_cq()
17 *cmdq->cq.db_record = db_data; in arm_cmdq_cq()
20 atomic64_inc(&cmdq->cq.armed_num); in arm_cmdq_cq()
23 static void kick_cmdq_db(struct erdma_cmdq *cmdq) in kick_cmdq_db() argument
25 struct erdma_dev *dev = container_of(cmdq, struct erdma_dev, cmdq); in kick_cmdq_db()
26 u64 db_data = FIELD_PREP(ERDMA_CMD_HDR_WQEBB_INDEX_MASK, cmdq->sq.pi); in kick_cmdq_db()
28 *cmdq->sq.db_record = db_data; in kick_cmdq_db()
32 static struct erdma_comp_wait *get_comp_wait(struct erdma_cmdq *cmdq) in get_comp_wait() argument
36 spin_lock(&cmdq->lock); in get_comp_wait()
37 comp_idx = find_first_zero_bit(cmdq->comp_wait_bitmap, in get_comp_wait()
38 cmdq->max_outstandings); in get_comp_wait()
39 if (comp_idx == cmdq->max_outstandings) { in get_comp_wait()
40 spin_unlock(&cmdq->lock); in get_comp_wait()
44 __set_bit(comp_idx, cmdq->comp_wait_bitmap); in get_comp_wait()
45 spin_unlock(&cmdq->lock); in get_comp_wait()
47 return &cmdq->wait_pool[comp_idx]; in get_comp_wait()
50 static void put_comp_wait(struct erdma_cmdq *cmdq, in put_comp_wait() argument
55 cmdq->wait_pool[comp_wait->ctx_id].cmd_status = ERDMA_CMD_STATUS_INIT; in put_comp_wait()
56 spin_lock(&cmdq->lock); in put_comp_wait()
57 used = __test_and_clear_bit(comp_wait->ctx_id, cmdq->comp_wait_bitmap); in put_comp_wait()
58 spin_unlock(&cmdq->lock); in put_comp_wait()
64 struct erdma_cmdq *cmdq) in erdma_cmdq_wait_res_init() argument
68 cmdq->wait_pool = in erdma_cmdq_wait_res_init()
69 devm_kcalloc(&dev->pdev->dev, cmdq->max_outstandings, in erdma_cmdq_wait_res_init()
71 if (!cmdq->wait_pool) in erdma_cmdq_wait_res_init()
74 spin_lock_init(&cmdq->lock); in erdma_cmdq_wait_res_init()
75 cmdq->comp_wait_bitmap = devm_bitmap_zalloc( in erdma_cmdq_wait_res_init()
76 &dev->pdev->dev, cmdq->max_outstandings, GFP_KERNEL); in erdma_cmdq_wait_res_init()
77 if (!cmdq->comp_wait_bitmap) in erdma_cmdq_wait_res_init()
80 for (i = 0; i < cmdq->max_outstandings; i++) { in erdma_cmdq_wait_res_init()
81 init_completion(&cmdq->wait_pool[i].wait_event); in erdma_cmdq_wait_res_init()
82 cmdq->wait_pool[i].ctx_id = i; in erdma_cmdq_wait_res_init()
90 struct erdma_cmdq *cmdq = &dev->cmdq; in erdma_cmdq_sq_init() local
91 struct erdma_cmdq_sq *sq = &cmdq->sq; in erdma_cmdq_sq_init()
95 sq->depth = cmdq->max_outstandings * sq->wqebb_cnt; in erdma_cmdq_sq_init()
122 struct erdma_cmdq *cmdq = &dev->cmdq; in erdma_cmdq_cq_init() local
123 struct erdma_cmdq_cq *cq = &cmdq->cq; in erdma_cmdq_cq_init()
126 cq->depth = cmdq->sq.depth; in erdma_cmdq_cq_init()
153 struct erdma_cmdq *cmdq = &dev->cmdq; in erdma_cmdq_eq_init() local
154 struct erdma_eq *eq = &cmdq->eq; in erdma_cmdq_eq_init()
157 eq->depth = cmdq->max_outstandings; in erdma_cmdq_eq_init()
185 struct erdma_cmdq *cmdq = &dev->cmdq; in erdma_cmdq_init() local
188 cmdq->max_outstandings = ERDMA_CMDQ_MAX_OUTSTANDING; in erdma_cmdq_init()
189 cmdq->use_event = false; in erdma_cmdq_init()
191 sema_init(&cmdq->credits, cmdq->max_outstandings); in erdma_cmdq_init()
193 err = erdma_cmdq_wait_res_init(dev, cmdq); in erdma_cmdq_init()
209 set_bit(ERDMA_CMDQ_STATE_OK_BIT, &cmdq->state); in erdma_cmdq_init()
215 (cmdq->cq.depth << CQE_SHIFT) + in erdma_cmdq_init()
217 cmdq->cq.qbuf, cmdq->cq.qbuf_dma_addr); in erdma_cmdq_init()
221 (cmdq->sq.depth << SQEBB_SHIFT) + in erdma_cmdq_init()
223 cmdq->sq.qbuf, cmdq->sq.qbuf_dma_addr); in erdma_cmdq_init()
230 /* after device init successfully, change cmdq to event mode. */ in erdma_finish_cmdq_init()
231 dev->cmdq.use_event = true; in erdma_finish_cmdq_init()
232 arm_cmdq_cq(&dev->cmdq); in erdma_finish_cmdq_init()
237 struct erdma_cmdq *cmdq = &dev->cmdq; in erdma_cmdq_destroy() local
239 clear_bit(ERDMA_CMDQ_STATE_OK_BIT, &cmdq->state); in erdma_cmdq_destroy()
242 (cmdq->eq.depth << EQE_SHIFT) + in erdma_cmdq_destroy()
244 cmdq->eq.qbuf, cmdq->eq.qbuf_dma_addr); in erdma_cmdq_destroy()
246 (cmdq->sq.depth << SQEBB_SHIFT) + in erdma_cmdq_destroy()
248 cmdq->sq.qbuf, cmdq->sq.qbuf_dma_addr); in erdma_cmdq_destroy()
250 (cmdq->cq.depth << CQE_SHIFT) + in erdma_cmdq_destroy()
252 cmdq->cq.qbuf, cmdq->cq.qbuf_dma_addr); in erdma_cmdq_destroy()
255 static void *get_next_valid_cmdq_cqe(struct erdma_cmdq *cmdq) in get_next_valid_cmdq_cqe() argument
257 __be32 *cqe = get_queue_entry(cmdq->cq.qbuf, cmdq->cq.ci, in get_next_valid_cmdq_cqe()
258 cmdq->cq.depth, CQE_SHIFT); in get_next_valid_cmdq_cqe()
262 return owner ^ !!(cmdq->cq.ci & cmdq->cq.depth) ? cqe : NULL; in get_next_valid_cmdq_cqe()
265 static void push_cmdq_sqe(struct erdma_cmdq *cmdq, u64 *req, size_t req_len, in push_cmdq_sqe() argument
273 comp_wait->sq_pi = cmdq->sq.pi; in push_cmdq_sqe()
275 wqe = get_queue_entry(cmdq->sq.qbuf, cmdq->sq.pi, cmdq->sq.depth, in push_cmdq_sqe()
279 cmdq->sq.pi += cmdq->sq.wqebb_cnt; in push_cmdq_sqe()
280 hdr |= FIELD_PREP(ERDMA_CMD_HDR_WQEBB_INDEX_MASK, cmdq->sq.pi) | in push_cmdq_sqe()
283 FIELD_PREP(ERDMA_CMD_HDR_WQEBB_CNT_MASK, cmdq->sq.wqebb_cnt - 1); in push_cmdq_sqe()
286 kick_cmdq_db(cmdq); in push_cmdq_sqe()
289 static int erdma_poll_single_cmd_completion(struct erdma_cmdq *cmdq) in erdma_poll_single_cmd_completion() argument
297 cqe = get_next_valid_cmdq_cqe(cmdq); in erdma_poll_single_cmd_completion()
301 cmdq->cq.ci++; in erdma_poll_single_cmd_completion()
307 sqe = get_queue_entry(cmdq->sq.qbuf, sqe_idx, cmdq->sq.depth, in erdma_poll_single_cmd_completion()
310 comp_wait = &cmdq->wait_pool[ctx_id]; in erdma_poll_single_cmd_completion()
316 cmdq->sq.ci += cmdq->sq.wqebb_cnt; in erdma_poll_single_cmd_completion()
320 if (cmdq->use_event) in erdma_poll_single_cmd_completion()
326 static void erdma_polling_cmd_completions(struct erdma_cmdq *cmdq) in erdma_polling_cmd_completions() argument
331 spin_lock_irqsave(&cmdq->cq.lock, flags); in erdma_polling_cmd_completions()
336 for (comp_num = 0; comp_num < cmdq->max_outstandings; comp_num++) in erdma_polling_cmd_completions()
337 if (erdma_poll_single_cmd_completion(cmdq)) in erdma_polling_cmd_completions()
340 if (comp_num && cmdq->use_event) in erdma_polling_cmd_completions()
341 arm_cmdq_cq(cmdq); in erdma_polling_cmd_completions()
343 spin_unlock_irqrestore(&cmdq->cq.lock, flags); in erdma_polling_cmd_completions()
346 void erdma_cmdq_completion_handler(struct erdma_cmdq *cmdq) in erdma_cmdq_completion_handler() argument
350 if (!test_bit(ERDMA_CMDQ_STATE_OK_BIT, &cmdq->state) || in erdma_cmdq_completion_handler()
351 !cmdq->use_event) in erdma_cmdq_completion_handler()
354 while (get_next_valid_eqe(&cmdq->eq)) { in erdma_cmdq_completion_handler()
355 cmdq->eq.ci++; in erdma_cmdq_completion_handler()
360 cmdq->cq.cmdsn++; in erdma_cmdq_completion_handler()
361 erdma_polling_cmd_completions(cmdq); in erdma_cmdq_completion_handler()
364 notify_eq(&cmdq->eq); in erdma_cmdq_completion_handler()
368 struct erdma_cmdq *cmdq, u32 timeout) in erdma_poll_cmd_completion() argument
373 erdma_polling_cmd_completions(cmdq); in erdma_poll_cmd_completion()
387 struct erdma_cmdq *cmdq, u32 timeout) in erdma_wait_cmd_completion() argument
395 spin_lock_irqsave(&cmdq->cq.lock, flags); in erdma_wait_cmd_completion()
397 spin_unlock_irqrestore(&cmdq->cq.lock, flags); in erdma_wait_cmd_completion()
410 int erdma_post_cmd_wait(struct erdma_cmdq *cmdq, void *req, u32 req_size, in erdma_post_cmd_wait() argument
416 if (!test_bit(ERDMA_CMDQ_STATE_OK_BIT, &cmdq->state)) in erdma_post_cmd_wait()
419 down(&cmdq->credits); in erdma_post_cmd_wait()
421 comp_wait = get_comp_wait(cmdq); in erdma_post_cmd_wait()
423 clear_bit(ERDMA_CMDQ_STATE_OK_BIT, &cmdq->state); in erdma_post_cmd_wait()
424 set_bit(ERDMA_CMDQ_STATE_CTX_ERR_BIT, &cmdq->state); in erdma_post_cmd_wait()
425 up(&cmdq->credits); in erdma_post_cmd_wait()
429 spin_lock(&cmdq->sq.lock); in erdma_post_cmd_wait()
430 push_cmdq_sqe(cmdq, req, req_size, comp_wait); in erdma_post_cmd_wait()
431 spin_unlock(&cmdq->sq.lock); in erdma_post_cmd_wait()
433 if (cmdq->use_event) in erdma_post_cmd_wait()
434 ret = erdma_wait_cmd_completion(comp_wait, cmdq, in erdma_post_cmd_wait()
437 ret = erdma_poll_cmd_completion(comp_wait, cmdq, in erdma_post_cmd_wait()
441 set_bit(ERDMA_CMDQ_STATE_TIMEOUT_BIT, &cmdq->state); in erdma_post_cmd_wait()
442 clear_bit(ERDMA_CMDQ_STATE_OK_BIT, &cmdq->state); in erdma_post_cmd_wait()
453 put_comp_wait(cmdq, comp_wait); in erdma_post_cmd_wait()
456 up(&cmdq->credits); in erdma_post_cmd_wait()