Home
last modified time | relevance | path

Searched refs:dq (Results 1 – 25 of 34) sorted by relevance

12

/drivers/soc/fsl/dpio/
Dqbman-portal.h212 void qbman_swp_dqrr_consume(struct qbman_swp *s, const struct dpaa2_dq *dq);
214 int qbman_result_has_new_result(struct qbman_swp *p, const struct dpaa2_dq *dq);
301 static inline int qbman_result_is_DQ(const struct dpaa2_dq *dq) in qbman_result_is_DQ() argument
303 return ((dq->dq.verb & QBMAN_RESULT_MASK) == QBMAN_RESULT_DQ); in qbman_result_is_DQ()
311 static inline int qbman_result_is_SCN(const struct dpaa2_dq *dq) in qbman_result_is_SCN() argument
313 return !qbman_result_is_DQ(dq); in qbman_result_is_SCN()
317 static inline int qbman_result_is_FQDAN(const struct dpaa2_dq *dq) in qbman_result_is_FQDAN() argument
319 return ((dq->dq.verb & QBMAN_RESULT_MASK) == QBMAN_RESULT_FQDAN); in qbman_result_is_FQDAN()
323 static inline int qbman_result_is_CDAN(const struct dpaa2_dq *dq) in qbman_result_is_CDAN() argument
325 return ((dq->dq.verb & QBMAN_RESULT_MASK) == QBMAN_RESULT_CDAN); in qbman_result_is_CDAN()
[all …]
Ddpio-service.c192 const struct dpaa2_dq *dq; in dpaa2_io_irq() local
202 dq = qbman_swp_dqrr_next(swp); in dpaa2_io_irq()
203 while (dq) { in dpaa2_io_irq()
204 if (qbman_result_is_SCN(dq)) { in dpaa2_io_irq()
208 q64 = qbman_result_SCN_ctx(dq); in dpaa2_io_irq()
214 qbman_swp_dqrr_consume(swp, dq); in dpaa2_io_irq()
218 dq = qbman_swp_dqrr_next(swp); in dpaa2_io_irq()
Dqbman-portal.c1213 verb = p->dq.verb; in qbman_swp_dqrr_next_direct()
1241 flags = p->dq.stat; in qbman_swp_dqrr_next_direct()
1305 verb = p->dq.verb; in qbman_swp_dqrr_next_mem_back()
1333 flags = p->dq.stat; in qbman_swp_dqrr_next_mem_back()
1351 void qbman_swp_dqrr_consume(struct qbman_swp *s, const struct dpaa2_dq *dq) in qbman_swp_dqrr_consume() argument
1353 qbman_write_register(s, QBMAN_CINH_SWP_DCAP, QBMAN_IDX_FROM_DQRR(dq)); in qbman_swp_dqrr_consume()
1373 int qbman_result_has_new_result(struct qbman_swp *s, const struct dpaa2_dq *dq) in qbman_result_has_new_result() argument
1375 if (dq->dq.tok != QMAN_DQ_TOKEN_VALID) in qbman_result_has_new_result()
1383 ((struct dpaa2_dq *)dq)->dq.tok = 0; in qbman_result_has_new_result()
1390 if (s->vdq.storage == dq) { in qbman_result_has_new_result()
/drivers/net/ethernet/cavium/liquidio/
Dcn66xx_regs.h311 #define CN6XXX_DMA_CNT(dq) \ argument
312 (CN6XXX_DMA_CNT_START + ((dq) * CN6XXX_DMA_OFFSET))
314 #define CN6XXX_DMA_INT_LEVEL(dq) \ argument
315 (CN6XXX_DMA_INT_LEVEL_START + ((dq) * CN6XXX_DMA_OFFSET))
317 #define CN6XXX_DMA_PKT_INT_LEVEL(dq) \ argument
318 (CN6XXX_DMA_INT_LEVEL_START + ((dq) * CN6XXX_DMA_OFFSET))
320 #define CN6XXX_DMA_TIME_INT_LEVEL(dq) \ argument
321 (CN6XXX_DMA_INT_LEVEL_START + 4 + ((dq) * CN6XXX_DMA_OFFSET))
323 #define CN6XXX_DMA_TIM(dq) \ argument
324 (CN6XXX_DMA_TIM_START + ((dq) * CN6XXX_DMA_OFFSET))
Dcn23xx_pf_regs.h366 #define CN23XX_DMA_CNT(dq) \ argument
367 (CN23XX_DMA_CNT_START + ((dq) * CN23XX_DMA_OFFSET))
369 #define CN23XX_DMA_INT_LEVEL(dq) \ argument
370 (CN23XX_DMA_INT_LEVEL_START + ((dq) * CN23XX_DMA_OFFSET))
372 #define CN23XX_DMA_PKT_INT_LEVEL(dq) \ argument
373 (CN23XX_DMA_INT_LEVEL_START + ((dq) * CN23XX_DMA_OFFSET))
375 #define CN23XX_DMA_TIME_INT_LEVEL(dq) \ argument
376 (CN23XX_DMA_INT_LEVEL_START + 4 + ((dq) * CN23XX_DMA_OFFSET))
378 #define CN23XX_DMA_TIM(dq) \ argument
379 (CN23XX_DMA_TIM_START + ((dq) * CN23XX_DMA_OFFSET))
/drivers/s390/crypto/
Dzcrypt_msgtype50.c108 unsigned char dq[64]; member
121 unsigned char dq[128]; member
134 unsigned char dq[256]; member
272 unsigned char *p, *q, *dp, *dq, *u, *inp; in ICACRT_msg_to_type50CRT_msg() local
294 dq = crb1->dq + sizeof(crb1->dq) - short_len; in ICACRT_msg_to_type50CRT_msg()
308 dq = crb2->dq + sizeof(crb2->dq) - short_len; in ICACRT_msg_to_type50CRT_msg()
323 dq = crb3->dq + sizeof(crb3->dq) - short_len; in ICACRT_msg_to_type50CRT_msg()
336 copy_from_user(dq, crt->bq_key, short_len) || in ICACRT_msg_to_type50CRT_msg()
Dzcrypt_cex2a.h80 unsigned char dq[64]; member
93 unsigned char dq[128]; member
106 unsigned char dq[256]; member
/drivers/scsi/hisi_sas/
Dhisi_sas_main.c422 struct hisi_sas_dq *dq; in hisi_sas_task_prep() local
454 *dq_pointer = dq = &hisi_hba->dq[dq_index]; in hisi_sas_task_prep()
460 *dq_pointer = dq = &hisi_hba->dq[queue]; in hisi_sas_task_prep()
495 spin_lock(&dq->lock); in hisi_sas_task_prep()
496 wr_q_index = dq->wr_point; in hisi_sas_task_prep()
497 dq->wr_point = (dq->wr_point + 1) % HISI_SAS_QUEUE_SLOTS; in hisi_sas_task_prep()
498 list_add_tail(&slot->delivery, &dq->list); in hisi_sas_task_prep()
499 spin_unlock(&dq->lock); in hisi_sas_task_prep()
504 dlvry_queue = dq->id; in hisi_sas_task_prep()
572 struct hisi_sas_dq *dq = NULL; in hisi_sas_task_exec() local
[all …]
Dhisi_sas.h223 struct hisi_sas_dq *dq; member
322 void (*start_delivery)(struct hisi_sas_dq *dq);
365 struct hisi_sas_dq *dq; member
434 struct hisi_sas_dq dq[HISI_SAS_MAX_QUEUES]; member
Dhisi_sas_v2_hw.c860 struct hisi_sas_dq *dq = &hisi_hba->dq[queue]; in alloc_dev_quirk_v2_hw() local
869 sas_dev->dq = dq; in alloc_dev_quirk_v2_hw()
1646 static void start_delivery_v2_hw(struct hisi_sas_dq *dq) in start_delivery_v2_hw() argument
1648 struct hisi_hba *hisi_hba = dq->hisi_hba; in start_delivery_v2_hw()
1650 int dlvry_queue = dq->id; in start_delivery_v2_hw()
1653 list_for_each_entry_safe(s, s1, &dq->list, delivery) { in start_delivery_v2_hw()
Dhisi_sas_v1_hw.c863 static void start_delivery_v1_hw(struct hisi_sas_dq *dq) in start_delivery_v1_hw() argument
865 struct hisi_hba *hisi_hba = dq->hisi_hba; in start_delivery_v1_hw()
867 int dlvry_queue = dq->id; in start_delivery_v1_hw()
870 list_for_each_entry_safe(s, s1, &dq->list, delivery) { in start_delivery_v1_hw()
Dhisi_sas_v3_hw.c1071 static void start_delivery_v3_hw(struct hisi_sas_dq *dq) in start_delivery_v3_hw() argument
1073 struct hisi_hba *hisi_hba = dq->hisi_hba; in start_delivery_v3_hw()
1075 int dlvry_queue = dq->id; in start_delivery_v3_hw()
1078 list_for_each_entry_safe(s, s1, &dq->list, delivery) { in start_delivery_v3_hw()
4597 struct hisi_sas_debugfs_dq *dq = in debugfs_alloc_v3_hw() local
4600 dq->hdr = devm_kmalloc(dev, sz, GFP_KERNEL); in debugfs_alloc_v3_hw()
4601 if (!dq->hdr) in debugfs_alloc_v3_hw()
4603 dq->dq = &hisi_hba->dq[d]; in debugfs_alloc_v3_hw()
/drivers/soc/fsl/qbman/
Dqman_test_api.c212 const struct qm_dqrr_entry *dq, in cb_dqrr() argument
215 if (WARN_ON(fd_neq(&fd_dq, &dq->fd))) { in cb_dqrr()
220 if (!(dq->stat & QM_DQRR_STAT_UNSCHEDULED) && !fd_neq(&fd_dq, &fd)) { in cb_dqrr()
Dqman.c694 const struct qm_dqrr_entry *dq, in qm_dqrr_cdc_consume_1ptr() argument
698 int idx = dqrr_ptr2idx(dq); in qm_dqrr_cdc_consume_1ptr()
701 DPAA_ASSERT((dqrr->ring + idx) == dq); in qm_dqrr_cdc_consume_1ptr()
1607 const struct qm_dqrr_entry *dq; in __poll_portal_fast() local
1614 dq = qm_dqrr_current(&p->p); in __poll_portal_fast()
1615 if (!dq) in __poll_portal_fast()
1618 if (dq->stat & QM_DQRR_STAT_UNSCHEDULED) { in __poll_portal_fast()
1631 if (dq->stat & QM_DQRR_STAT_FQ_EMPTY) in __poll_portal_fast()
1639 res = fq->cb.dqrr(p, fq, dq, sched_napi); in __poll_portal_fast()
1643 if (dq->stat & QM_DQRR_STAT_DQCR_EXPIRED) in __poll_portal_fast()
[all …]
/drivers/crypto/qat/qat_common/
Dqat_asym_algs.c40 dma_addr_t dq; member
66 char *dq; member
824 qat_req->in.rsa.dec_crt.dq = ctx->dma_dq; in qat_rsa_dec()
1066 ptr = rsa_key->dq; in qat_rsa_setkey_crt()
1071 ctx->dq = dma_alloc_coherent(dev, half_key_sz, &ctx->dma_dq, in qat_rsa_setkey_crt()
1073 if (!ctx->dq) in qat_rsa_setkey_crt()
1075 memcpy(ctx->dq + (half_key_sz - len), ptr, len); in qat_rsa_setkey_crt()
1093 memset(ctx->dq, '\0', half_key_sz); in qat_rsa_setkey_crt()
1094 dma_free_coherent(dev, half_key_sz, ctx->dq, ctx->dma_dq); in qat_rsa_setkey_crt()
1095 ctx->dq = NULL; in qat_rsa_setkey_crt()
[all …]
/drivers/ata/
Dsata_nv.c1699 struct defer_queue *dq = &pp->defer_queue; in nv_swncq_qc_to_dq() local
1702 WARN_ON(dq->tail - dq->head == ATA_MAX_QUEUE); in nv_swncq_qc_to_dq()
1703 dq->defer_bits |= (1 << qc->hw_tag); in nv_swncq_qc_to_dq()
1704 dq->tag[dq->tail++ & (ATA_MAX_QUEUE - 1)] = qc->hw_tag; in nv_swncq_qc_to_dq()
1710 struct defer_queue *dq = &pp->defer_queue; in nv_swncq_qc_from_dq() local
1713 if (dq->head == dq->tail) /* null queue */ in nv_swncq_qc_from_dq()
1716 tag = dq->tag[dq->head & (ATA_MAX_QUEUE - 1)]; in nv_swncq_qc_from_dq()
1717 dq->tag[dq->head++ & (ATA_MAX_QUEUE - 1)] = ATA_TAG_POISON; in nv_swncq_qc_from_dq()
1718 WARN_ON(!(dq->defer_bits & (1 << tag))); in nv_swncq_qc_from_dq()
1719 dq->defer_bits &= ~(1 << tag); in nv_swncq_qc_from_dq()
[all …]
/drivers/atm/
Dfirestream.c622 static struct FS_BPENTRY dq[60]; variable
1196 dq[qd].flags = td->flags; in fs_send()
1197 dq[qd].next = td->next; in fs_send()
1198 dq[qd].bsa = td->bsa; in fs_send()
1199 dq[qd].skb = td->skb; in fs_send()
1200 dq[qd].dev = td->dev; in fs_send()
1950 i, da[qd], dq[qd].flags, dq[qd].bsa, dq[qd].skb, dq[qd].dev); in firestream_remove_one()
/drivers/s390/block/
Ddasd.c2915 struct dasd_queue *dq; in dasd_block_tasklet() local
2927 dq = cqr->dq; in dasd_block_tasklet()
2928 spin_lock_irq(&dq->lock); in dasd_block_tasklet()
2931 spin_unlock_irq(&dq->lock); in dasd_block_tasklet()
2963 spin_lock_irq(&cqr->dq->lock); in _dasd_requeue_request()
2966 spin_unlock_irq(&cqr->dq->lock); in _dasd_requeue_request()
3033 spin_lock_irqsave(&cqr->dq->lock, flags); in dasd_flush_block_queue()
3037 spin_unlock_irqrestore(&cqr->dq->lock, flags); in dasd_flush_block_queue()
3069 struct dasd_queue *dq = hctx->driver_data; in do_dasd_request() local
3076 spin_lock_irq(&dq->lock); in do_dasd_request()
[all …]
/drivers/scsi/
Dqlogicpti.c1207 struct scsi_cmnd *dq; in qpti_intr() local
1210 dq = qlogicpti_intr_handler(qpti); in qpti_intr()
1212 if (dq != NULL) { in qpti_intr()
1216 next = (struct scsi_cmnd *) dq->host_scribble; in qpti_intr()
1217 dq->scsi_done(dq); in qpti_intr()
1218 dq = next; in qpti_intr()
1219 } while (dq != NULL); in qpti_intr()
/drivers/dma/fsl-dpaa2-qdma/
Ddpaa2-qdma.c400 struct dpaa2_dq *dq; in dpaa2_qdma_fqdan_cb() local
414 dq = dpaa2_io_store_next(ppriv->store, &is_last); in dpaa2_qdma_fqdan_cb()
415 } while (!is_last && !dq); in dpaa2_qdma_fqdan_cb()
416 if (!dq) { in dpaa2_qdma_fqdan_cb()
422 fd = dpaa2_dq_fd(dq); in dpaa2_qdma_fqdan_cb()
/drivers/crypto/caam/
Dcaampkc.c599 pdb->dq_dma = dma_map_single(dev, key->dq, q_sz, DMA_TO_DEVICE); in set_rsa_priv_f3_pdb()
867 kfree_sensitive(key->dq); in caam_rsa_free_key()
1013 rsa_key->dq = caam_read_rsa_crt(raw_key->dq, raw_key->dq_sz, q_sz); in caam_rsa_set_priv_key_form()
1014 if (!rsa_key->dq) in caam_rsa_set_priv_key_form()
1027 kfree_sensitive(rsa_key->dq); in caam_rsa_set_priv_key_form()
Dcaampkc.h77 u8 *dq; member
/drivers/net/ethernet/freescale/dpaa/
Ddpaa_eth.c2452 const struct qm_dqrr_entry *dq, in rx_error_dqrr() argument
2463 dpaa_bp = dpaa_bpid2pool(dq->fd.bpid); in rx_error_dqrr()
2473 dpaa_rx_error(net_dev, priv, percpu_priv, &dq->fd, fq->fqid); in rx_error_dqrr()
2642 const struct qm_dqrr_entry *dq, in rx_default_dqrr() argument
2650 const struct qm_fd *fd = &dq->fd; in rx_default_dqrr()
2672 dpaa_bp = dpaa_bpid2pool(dq->fd.bpid); in rx_default_dqrr()
2677 trace_dpaa_rx_fd(net_dev, fq, &dq->fd); in rx_default_dqrr()
2692 dpaa_fd_release(net_dev, &dq->fd); in rx_default_dqrr()
2801 const struct qm_dqrr_entry *dq, in conf_error_dqrr() argument
2816 dpaa_tx_error(net_dev, priv, percpu_priv, &dq->fd, fq->fqid); in conf_error_dqrr()
[all …]
/drivers/crypto/virtio/
Dvirtio_crypto_core.c44 struct data_queue *dq = &vcrypto->data_vq[vq->index]; in virtcrypto_dataq_callback() local
46 tasklet_schedule(&dq->done_task); in virtcrypto_dataq_callback()
/drivers/scsi/be2iscsi/
Dbe_cmds.c976 struct be_queue_info *dq, int length, in be_cmd_create_default_pdu_queue() argument
982 struct be_dma_mem *q_mem = &dq->dma_mem; in be_cmd_create_default_pdu_queue()
1041 dq->id = le16_to_cpu(resp->id); in be_cmd_create_default_pdu_queue()
1042 dq->created = true; in be_cmd_create_default_pdu_queue()
1049 defq_ring->id = dq->id; in be_cmd_create_default_pdu_queue()

12