/drivers/dma/ |
D | sirf-dma.c | 126 void (*exec_desc)(struct sirfsoc_dma_desc *sdesc, 132 void (*exec)(struct sirfsoc_dma_desc *sdesc, 162 static void sirfsoc_dma_execute_hw_a7v2(struct sirfsoc_dma_desc *sdesc, in sirfsoc_dma_execute_hw_a7v2() argument 165 if (sdesc->chain) { in sirfsoc_dma_execute_hw_a7v2() 167 writel_relaxed((sdesc->dir << SIRFSOC_DMA_DIR_CTRL_BIT_ATLAS7) | in sirfsoc_dma_execute_hw_a7v2() 168 (sdesc->chain << in sirfsoc_dma_execute_hw_a7v2() 174 writel_relaxed(sdesc->xlen, base + SIRFSOC_DMA_CH_XLEN); in sirfsoc_dma_execute_hw_a7v2() 175 writel_relaxed(sdesc->ylen, base + SIRFSOC_DMA_CH_YLEN); in sirfsoc_dma_execute_hw_a7v2() 176 writel_relaxed(sdesc->width, base + SIRFSOC_DMA_WIDTH_ATLAS7); in sirfsoc_dma_execute_hw_a7v2() 177 writel_relaxed((sdesc->width*((sdesc->ylen+1)>>1)), in sirfsoc_dma_execute_hw_a7v2() [all …]
|
D | sprd-dma.c | 487 struct sprd_dma_desc *sdesc) in sprd_dma_set_chn_config() argument 489 struct sprd_dma_chn_hw *cfg = &sdesc->chn_hw; in sprd_dma_set_chn_config() 548 static bool sprd_dma_check_trans_done(struct sprd_dma_desc *sdesc, in sprd_dma_check_trans_done() argument 566 struct sprd_dma_desc *sdesc; in dma_irq_handle() local 579 sdesc = schan->cur_desc; in dma_irq_handle() 580 if (!sdesc) { in dma_irq_handle() 592 vchan_cyclic_callback(&sdesc->vd); in dma_irq_handle() 595 trans_done = sprd_dma_check_trans_done(sdesc, int_type, in dma_irq_handle() 598 vchan_cookie_complete(&sdesc->vd); in dma_irq_handle() 651 struct sprd_dma_desc *sdesc = to_sprd_dma_desc(vd); in sprd_dma_tx_status() local [all …]
|
D | altera-msgdma.c | 534 struct msgdma_sw_desc *sdesc, *next; in msgdma_copy_desc_to_fifo() local 538 list_for_each_entry_safe(sdesc, next, &desc->tx_list, node) in msgdma_copy_desc_to_fifo() 539 msgdma_copy_one(mdev, sdesc); in msgdma_copy_desc_to_fifo()
|
/drivers/net/ethernet/chelsio/cxgb4vf/ |
D | sge.c | 195 static inline dma_addr_t get_buf_addr(const struct rx_sw_desc *sdesc) in get_buf_addr() argument 197 return sdesc->dma_addr & ~(dma_addr_t)(RX_LARGE_BUF | RX_UNMAPPED_BUF); in get_buf_addr() 207 static inline bool is_buf_mapped(const struct rx_sw_desc *sdesc) in is_buf_mapped() argument 209 return !(sdesc->dma_addr & RX_UNMAPPED_BUF); in is_buf_mapped() 380 struct tx_sw_desc *sdesc; in free_tx_desc() local 386 sdesc = &tq->sdesc[cidx]; in free_tx_desc() 392 if (sdesc->skb) { in free_tx_desc() 394 unmap_sgl(dev, sdesc->skb, sdesc->sgl, tq); in free_tx_desc() 395 dev_consume_skb_any(sdesc->skb); in free_tx_desc() 396 sdesc->skb = NULL; in free_tx_desc() [all …]
|
D | adapter.h | 142 struct rx_sw_desc *sdesc; /* address of SW RX descriptor ring */ member 248 struct tx_sw_desc *sdesc; /* address of SW TX descriptor ring */ member
|
/drivers/dma/sh/ |
D | shdma-base.c | 56 struct shdma_desc *sdesc; in shdma_chan_xfer_ld_queue() local 63 list_for_each_entry(sdesc, &schan->ld_queue, node) in shdma_chan_xfer_ld_queue() 64 if (sdesc->mark == DESC_SUBMITTED) { in shdma_chan_xfer_ld_queue() 65 ops->start_xfer(schan, sdesc); in shdma_chan_xfer_ld_queue() 159 struct shdma_desc *sdesc; in shdma_get_desc() local 161 list_for_each_entry(sdesc, &schan->ld_free, node) in shdma_get_desc() 162 if (sdesc->mark != DESC_PREPARED) { in shdma_get_desc() 163 BUG_ON(sdesc->mark != DESC_IDLE); in shdma_get_desc() 164 list_del(&sdesc->node); in shdma_get_desc() 165 return sdesc; in shdma_get_desc() [all …]
|
D | shdmac.c | 283 struct shdma_desc *sdesc) in sh_dmae_start_xfer() argument 287 struct sh_dmae_desc *sh_desc = container_of(sdesc, in sh_dmae_start_xfer() 290 sdesc->async_tx.cookie, sh_chan->shdma_chan.id, in sh_dmae_start_xfer() 378 struct shdma_desc *sdesc, in sh_dmae_desc_setup() argument 381 struct sh_dmae_desc *sh_desc = container_of(sdesc, in sh_dmae_desc_setup() 416 struct shdma_desc *sdesc) in sh_dmae_get_partial() argument 420 struct sh_dmae_desc *sh_desc = container_of(sdesc, in sh_dmae_get_partial() 454 struct shdma_desc *sdesc) in sh_dmae_desc_completed() argument 458 struct sh_dmae_desc *sh_desc = container_of(sdesc, in sh_dmae_desc_completed() 463 return (sdesc->direction == DMA_DEV_TO_MEM && in sh_dmae_desc_completed() [all …]
|
/drivers/crypto/bcm/ |
D | util.c | 188 struct sdesc { struct 218 struct sdesc *sdesc; in do_shash() local 228 sdesc = kmalloc(size, GFP_KERNEL); in do_shash() 229 if (!sdesc) { in do_shash() 233 sdesc->shash.tfm = hash; in do_shash() 243 rc = crypto_shash_init(&sdesc->shash); in do_shash() 248 rc = crypto_shash_update(&sdesc->shash, data1, data1_len); in do_shash() 254 rc = crypto_shash_update(&sdesc->shash, data2, data2_len); in do_shash() 260 rc = crypto_shash_final(&sdesc->shash, result); in do_shash() 266 kfree(sdesc); in do_shash()
|
/drivers/staging/media/soc_camera/ |
D | soc_camera.c | 657 struct soc_camera_desc *sdesc = to_soc_camera_desc(icd); in soc_camera_open() local 672 if (sdesc->subdev_desc.reset) in soc_camera_open() 674 sdesc->subdev_desc.reset(icd->control); in soc_camera_open() 1054 struct soc_camera_desc *sdesc = to_soc_camera_desc(icd); in scan_add_host() local 1055 struct soc_camera_subdev_desc *ssdd = &sdesc->subdev_desc; in scan_add_host() 1122 static int soc_camera_dyn_pdev(struct soc_camera_desc *sdesc, in soc_camera_dyn_pdev() argument 1140 ret = platform_device_add_data(pdev, sdesc, sizeof(*sdesc)); in soc_camera_dyn_pdev() 1221 struct soc_camera_desc *sdesc) in soc_camera_i2c_init() argument 1226 struct soc_camera_host_desc *shd = &sdesc->host_desc; in soc_camera_i2c_init() 1246 ssdd = kmemdup(&sdesc->subdev_desc, sizeof(*ssdd), GFP_KERNEL); in soc_camera_i2c_init() [all …]
|
/drivers/dma/xilinx/ |
D | zynqmp_dma.c | 303 struct zynqmp_dma_desc_ll *sdesc, in zynqmp_dma_config_sg_ll_desc() argument 307 struct zynqmp_dma_desc_ll *ddesc = sdesc + 1; in zynqmp_dma_config_sg_ll_desc() 309 sdesc->size = ddesc->size = len; in zynqmp_dma_config_sg_ll_desc() 310 sdesc->addr = src; in zynqmp_dma_config_sg_ll_desc() 313 sdesc->ctrl = ddesc->ctrl = ZYNQMP_DMA_DESC_CTRL_SIZE_256; in zynqmp_dma_config_sg_ll_desc() 315 sdesc->ctrl |= ZYNQMP_DMA_DESC_CTRL_COHRNT; in zynqmp_dma_config_sg_ll_desc() 321 ((uintptr_t)sdesc - (uintptr_t)chan->desc_pool_v); in zynqmp_dma_config_sg_ll_desc() 430 struct zynqmp_dma_desc_sw *sdesc) in zynqmp_dma_free_descriptor() argument 435 list_add_tail(&sdesc->node, &chan->free_list); in zynqmp_dma_free_descriptor() 436 list_for_each_entry_safe(child, next, &sdesc->tx_list, node) { in zynqmp_dma_free_descriptor()
|
/drivers/net/wireless/ath/ath10k/ |
D | ce.c | 497 struct ce_desc *desc, sdesc; in _ath10k_ce_send_nolock() local 525 sdesc.addr = __cpu_to_le32(buffer); in _ath10k_ce_send_nolock() 526 sdesc.nbytes = __cpu_to_le16(nbytes); in _ath10k_ce_send_nolock() 527 sdesc.flags = __cpu_to_le16(desc_flags); in _ath10k_ce_send_nolock() 529 *desc = sdesc; in _ath10k_ce_send_nolock() 554 struct ce_desc_64 *desc, sdesc; in _ath10k_ce_send_nolock_64() local 592 addr = (__le32 *)&sdesc.addr; in _ath10k_ce_send_nolock_64() 602 sdesc.nbytes = __cpu_to_le16(nbytes); in _ath10k_ce_send_nolock_64() 603 sdesc.flags = __cpu_to_le16(desc_flags); in _ath10k_ce_send_nolock_64() 605 *desc = sdesc; in _ath10k_ce_send_nolock_64() [all …]
|
/drivers/net/ethernet/chelsio/cxgb3/ |
D | sge.c | 240 struct tx_sw_desc *d = &q->sdesc[cidx]; in unmap_skb() 269 d = cidx + 1 == q->size ? q->sdesc : d + 1; in unmap_skb() 295 d = &q->sdesc[cidx]; in free_tx_desc() 308 d = q->sdesc; in free_tx_desc() 385 struct rx_sw_desc *d = &q->sdesc[cidx]; in free_rx_bufs() 507 struct rx_sw_desc *sd = &q->sdesc[q->pidx]; in refill_fl() 550 sd = q->sdesc; in refill_fl() 584 q->sdesc[q->pidx] = q->sdesc[idx]; in recycle_rx_buf() 687 kfree(q->fl[i].sdesc); in t3_free_qset() 699 if (q->txq[i].sdesc) { in t3_free_qset() [all …]
|
D | adapter.h | 115 struct rx_sw_desc *sdesc; /* address of SW Rx descriptor ring */ member 176 struct tx_sw_desc *sdesc; /* address of SW Tx descriptor ring */ member
|
/drivers/net/ethernet/chelsio/cxgb4/ |
D | sge.c | 377 d = &q->sdesc[cidx]; in free_tx_desc() 388 d = q->sdesc; in free_tx_desc() 497 struct rx_sw_desc *d = &q->sdesc[q->cidx]; in free_rx_bufs() 524 struct rx_sw_desc *d = &q->sdesc[q->cidx]; in unmap_rx_buf() 599 struct rx_sw_desc *sd = &q->sdesc[q->pidx]; in refill_fl() 640 sd = q->sdesc; in refill_fl() 669 sd = q->sdesc; in refill_fl() 1615 q->q.sdesc[last_desc].skb = skb; in cxgb4_eth_xmit() 1616 q->q.sdesc[last_desc].sgl = (struct ulptx_sgl *)sgl; in cxgb4_eth_xmit() 1974 tq->sdesc[last_desc].skb = skb; in cxgb4_vf_eth_xmit() [all …]
|
D | cxgb4.h | 646 struct rx_sw_desc *sdesc; /* address of SW Rx descriptor ring */ member 746 struct tx_sw_desc *sdesc; /* address of SW Tx descriptor ring */ member
|
D | cxgb4_uld.c | 471 kfree(txq->q.sdesc); in free_sge_txq_uld()
|
/drivers/crypto/ccp/ |
D | ccp-crypto-sha.c | 276 SHASH_DESC_ON_STACK(sdesc, shash); in ccp_sha_setkey() 292 sdesc->tfm = shash; in ccp_sha_setkey() 294 ret = crypto_shash_digest(sdesc, key, key_len, in ccp_sha_setkey()
|
/drivers/crypto/chelsio/ |
D | chcr_ipsec.c | 754 q->q.sdesc[last_desc].skb = skb; in chcr_ipsec_xmit() 755 q->q.sdesc[last_desc].sgl = (struct ulptx_sgl *)sgl; in chcr_ipsec_xmit()
|