Home
last modified time | relevance | path

Searched refs:descs (Results 1 – 25 of 39) sorted by relevance

12

/drivers/dma/ioat/
Ddma_v3.c94 static void xor_set_src(struct ioat_raw_descriptor *descs[2], in xor_set_src()
97 struct ioat_raw_descriptor *raw = descs[xor_idx_to_desc >> idx & 1]; in xor_set_src()
102 static dma_addr_t pq_get_src(struct ioat_raw_descriptor *descs[2], int idx) in pq_get_src()
104 struct ioat_raw_descriptor *raw = descs[pq_idx_to_desc >> idx & 1]; in pq_get_src()
116 static void pq_set_src(struct ioat_raw_descriptor *descs[2], in pq_set_src()
119 struct ioat_pq_descriptor *pq = (struct ioat_pq_descriptor *) descs[0]; in pq_set_src()
120 struct ioat_raw_descriptor *raw = descs[pq_idx_to_desc >> idx & 1]; in pq_set_src()
685 struct ioat_raw_descriptor *descs[2]; in __ioat3_prep_xor_lock() local
699 descs[0] = (struct ioat_raw_descriptor *) xor; in __ioat3_prep_xor_lock()
700 descs[1] = (struct ioat_raw_descriptor *) xor_ex; in __ioat3_prep_xor_lock()
[all …]
Ddma_v2.c480 int descs = 1 << order; in ioat2_alloc_ring() local
487 ring = kcalloc(descs, sizeof(*ring), flags); in ioat2_alloc_ring()
490 for (i = 0; i < descs; i++) { in ioat2_alloc_ring()
502 for (i = 0; i < descs-1; i++) { in ioat2_alloc_ring()
806 int descs; in ioat2_free_chan_resources() local
820 descs = ioat2_ring_space(ioat); in ioat2_free_chan_resources()
821 dev_dbg(to_dev(chan), "freeing %d idle descriptors\n", descs); in ioat2_free_chan_resources()
822 for (i = 0; i < descs; i++) { in ioat2_free_chan_resources()
827 if (descs < total_descs) in ioat2_free_chan_resources()
829 total_descs - descs); in ioat2_free_chan_resources()
[all …]
/drivers/net/ethernet/cirrus/
Dep93xx_eth.c162 struct ep93xx_descs *descs; member
244 rstat = ep->descs->rstat + entry; in ep93xx_rx()
286 struct ep93xx_rdesc *rxd = &ep->descs->rdesc[entry]; in ep93xx_rx()
315 struct ep93xx_rstat *rstat = ep->descs->rstat + ep->rx_pointer; in ep93xx_have_more_rx()
367 txd = &ep->descs->tdesc[entry]; in ep93xx_xmit()
402 tstat = ep->descs->tstat + entry; in ep93xx_tx_complete()
416 int length = ep->descs->tdesc[entry].tdesc1 & 0xfff; in ep93xx_tx_complete()
474 d = ep->descs->rdesc[i].buf_addr; in ep93xx_free_buffers()
485 d = ep->descs->tdesc[i].buf_addr; in ep93xx_free_buffers()
493 dma_free_coherent(dev, sizeof(struct ep93xx_descs), ep->descs, in ep93xx_free_buffers()
[all …]
/drivers/net/ethernet/ibm/
Dibmveth.c914 union ibmveth_buf_desc *descs) in ibmveth_send() argument
928 descs[0].desc, descs[1].desc, in ibmveth_send()
929 descs[2].desc, descs[3].desc, in ibmveth_send()
930 descs[4].desc, descs[5].desc, in ibmveth_send()
948 union ibmveth_buf_desc descs[6]; in ibmveth_start_xmit() local
989 memset(descs, 0, sizeof(descs)); in ibmveth_start_xmit()
1001 descs[0].fields.flags_len = desc_flags | skb->len; in ibmveth_start_xmit()
1002 descs[0].fields.address = adapter->bounce_buffer_dma; in ibmveth_start_xmit()
1004 if (ibmveth_send(adapter, descs)) { in ibmveth_start_xmit()
1021 descs[0].fields.flags_len = desc_flags | skb_headlen(skb); in ibmveth_start_xmit()
[all …]
/drivers/vhost/
Dvringh.c149 struct vring_desc **descs, int *desc_max) in move_to_indirect() argument
167 *descs = addr; in move_to_indirect()
202 struct vring_desc **descs, int *desc_max) in return_from_indirect() argument
207 *descs = vrh->vring.desc; in return_from_indirect()
260 struct vring_desc desc, *descs; in __vringh_iov() local
265 descs = vrh->vring.desc; in __vringh_iov()
283 err = slow_copy(vrh, &desc, &descs[i], rcheck, getrange, in __vringh_iov()
286 err = copy(&desc, &descs[i], sizeof(desc)); in __vringh_iov()
306 &descs, &desc_max); in __vringh_iov()
313 vringh_bad("Descriptor loop in %p", descs); in __vringh_iov()
[all …]
/drivers/regulator/
Dltc3589.c464 struct ltc3589_regulator *descs; in ltc3589_probe() local
476 descs = ltc3589->regulator_descs; in ltc3589_probe()
477 memcpy(descs, ltc3589_regulators, sizeof(ltc3589_regulators)); in ltc3589_probe()
479 descs[LTC3589_LDO3].desc.fixed_uV = 1800000; in ltc3589_probe()
480 descs[LTC3589_LDO4].desc.volt_table = ltc3589_ldo4; in ltc3589_probe()
482 descs[LTC3589_LDO3].desc.fixed_uV = 2800000; in ltc3589_probe()
483 descs[LTC3589_LDO4].desc.volt_table = ltc3589_12_ldo4; in ltc3589_probe()
/drivers/clk/mvebu/
Dclk-corediv.c42 const struct clk_corediv_desc *descs; member
192 .descs = mvebu_corediv_desc,
208 .descs = mvebu_corediv_desc,
224 .descs = mvebu_corediv_desc,
278 corediv[i].desc = soc_desc->descs + i; in mvebu_corediv_clk_init()
/drivers/net/ethernet/faraday/
Dftmac100.c67 struct ftmac100_descs *descs; member
311 return &priv->descs->rxdes[priv->rx_pointer]; in ftmac100_current_rxdes()
576 return &priv->descs->txdes[priv->tx_pointer]; in ftmac100_current_txdes()
581 return &priv->descs->txdes[priv->tx_clean_pointer]; in ftmac100_current_clean_txdes()
704 struct ftmac100_rxdes *rxdes = &priv->descs->rxdes[i]; in ftmac100_free_buffers()
716 struct ftmac100_txdes *txdes = &priv->descs->txdes[i]; in ftmac100_free_buffers()
728 priv->descs, priv->descs_dma_addr); in ftmac100_free_buffers()
735 priv->descs = dma_zalloc_coherent(priv->dev, in ftmac100_alloc_buffers()
739 if (!priv->descs) in ftmac100_alloc_buffers()
743 ftmac100_rxdes_set_end_of_ring(&priv->descs->rxdes[RX_QUEUE_ENTRIES - 1]); in ftmac100_alloc_buffers()
[all …]
Dftgmac100.c59 struct ftgmac100_descs *descs; member
329 return &priv->descs->rxdes[priv->rx_pointer]; in ftgmac100_current_rxdes()
615 return &priv->descs->txdes[priv->tx_pointer]; in ftgmac100_current_txdes()
621 return &priv->descs->txdes[priv->tx_clean_pointer]; in ftgmac100_current_clean_txdes()
749 struct ftgmac100_rxdes *rxdes = &priv->descs->rxdes[i]; in ftgmac100_free_buffers()
761 struct ftgmac100_txdes *txdes = &priv->descs->txdes[i]; in ftgmac100_free_buffers()
773 priv->descs, priv->descs_dma_addr); in ftgmac100_free_buffers()
780 priv->descs = dma_zalloc_coherent(priv->dev, in ftgmac100_alloc_buffers()
783 if (!priv->descs) in ftgmac100_alloc_buffers()
787 ftgmac100_rxdes_set_end_of_ring(&priv->descs->rxdes[RX_QUEUE_ENTRIES - 1]); in ftgmac100_alloc_buffers()
[all …]
/drivers/net/ethernet/marvell/
Dmvneta.c417 struct mvneta_tx_desc *descs; member
449 struct mvneta_rx_desc *descs; member
639 prefetch(rxq->descs + rxq->next_desc_to_proc); in mvneta_rxq_next_desc_get()
640 return rxq->descs + rx_desc; in mvneta_rxq_next_desc_get()
695 return txq->descs + tx_desc; in mvneta_txq_next_desc_get()
746 if (txq->descs != NULL) in mvneta_port_up()
755 if (rxq->descs != NULL) in mvneta_port_up()
1298 struct mvneta_tx_desc *tx_desc = txq->descs + in mvneta_txq_bufs_free()
1425 struct mvneta_rx_desc *rx_desc = rxq->descs + i; in mvneta_rxq_drop_pkts()
1657 struct mvneta_tx_desc *tx_desc = txq->descs + i; in mvneta_tx_tso()
[all …]
Dmvpp2.c808 struct mvpp2_tx_desc *descs; member
831 struct mvpp2_rx_desc *descs; member
4018 if (txq->descs != NULL) in mvpp2_egress_enable()
4095 prefetch(rxq->descs + rxq->next_desc_to_proc); in mvpp2_rxq_next_desc_get()
4096 return rxq->descs + rx_desc; in mvpp2_rxq_next_desc_get()
4150 return txq->descs + tx_desc; in mvpp2_txq_next_desc_get()
4410 struct mvpp2_tx_desc *tx_desc = txq->descs + in mvpp2_txq_bufs_free()
4472 aggr_txq->descs = dma_alloc_coherent(&pdev->dev, in mvpp2_aggr_txq_init()
4475 if (!aggr_txq->descs) in mvpp2_aggr_txq_init()
4479 BUG_ON(aggr_txq->descs != in mvpp2_aggr_txq_init()
[all …]
/drivers/media/pci/pt3/
Dpt3_dma.c149 adap->desc_buf[i].descs, adap->desc_buf[i].b_addr); in pt3_free_dmabuf()
188 adap->desc_buf[i].descs = p; in pt3_alloc_dmabuf()
192 d = &adap->desc_buf[i - 1].descs[DESCS_IN_PAGE - 1]; in pt3_alloc_dmabuf()
198 d = &adap->desc_buf[i].descs[j]; in pt3_alloc_dmabuf()
Dpt3.h95 struct xfer_desc *descs; /* PAGE_SIZE (xfer_desc[DESCS_IN_PAGE]) */ member
/drivers/scsi/fnic/
Dvnic_cq_copy.h35 desc = (struct fcpio_fw_req *)((u8 *)cq->ring.descs + in vnic_cq_copy_service()
50 desc = (struct fcpio_fw_req *)((u8 *)cq->ring.descs + in vnic_cq_copy_service()
Dvnic_wq_copy.h48 struct fcpio_host_req *desc = wq->ring.descs; in vnic_wq_copy_next_desc()
88 struct fcpio_host_req *wq_desc = wq->ring.descs; in vnic_wq_copy_service()
Dvnic_cq.h81 cq_desc = (struct cq_desc *)((u8 *)cq->ring.descs + in vnic_cq_service()
98 cq_desc = (struct cq_desc *)((u8 *)cq->ring.descs + in vnic_cq_service()
Dvnic_dev.c190 memset(ring->descs, 0, ring->size); in vnic_dev_clear_desc_ring()
211 ring->descs = (u8 *)ring->descs_unaligned + in vnic_dev_alloc_desc_ring()
223 if (ring->descs) { in vnic_dev_free_desc_ring()
228 ring->descs = NULL; in vnic_dev_free_desc_ring()
/drivers/net/ethernet/cisco/enic/
Dvnic_cq.h83 cq_desc = (struct cq_desc *)((u8 *)cq->ring.descs + in vnic_cq_service()
100 cq_desc = (struct cq_desc *)((u8 *)cq->ring.descs + in vnic_cq_service()
/drivers/net/ethernet/natsemi/
Dns83820.c420 __le32 *descs; member
537 sg = dev->rx_info.descs + (next_empty * DESC_SIZE); in ns83820_add_rx_skb()
548 …dev->rx_info.descs[((NR_RX_DESC + next_empty - 1) % NR_RX_DESC) * DESC_SIZE] = cpu_to_le32(dev->rx… in ns83820_add_rx_skb()
609 build_rx_desc(dev, dev->rx_info.descs + (DESC_SIZE * i), 0, 0, CMDSTS_OWN, 0); in clear_rx_desc()
729 dev->rx_info.next_rx_desc = dev->rx_info.descs; in ns83820_setup_rx()
846 (dev->rx_info.descs + (DESC_SIZE * dev->rx_info.next_rx)),
848 (dev->rx_info.descs + (DESC_SIZE * dev->rx_info.next_empty))
927 desc = info->descs + (DESC_SIZE * next_rx);
930 info->next_rx_desc = info->descs + (DESC_SIZE * next_rx);
1992 dev->rx_info.descs = pci_alloc_consistent(pci_dev,
[all …]
/drivers/dma/
Dsirf-dma.c403 LIST_HEAD(descs); in sirfsoc_dma_alloc_chan_resources()
421 list_add_tail(&sdesc->node, &descs); in sirfsoc_dma_alloc_chan_resources()
430 list_splice_tail_init(&descs, &schan->free); in sirfsoc_dma_alloc_chan_resources()
443 LIST_HEAD(descs); in sirfsoc_dma_free_chan_resources()
454 list_splice_tail_init(&schan->free, &descs); in sirfsoc_dma_free_chan_resources()
459 list_for_each_entry_safe(sdesc, tmp, &descs, node) in sirfsoc_dma_free_chan_resources()
Dmpc512x_dma.c511 LIST_HEAD(descs); in mpc_dma_alloc_chan_resources()
537 list_add_tail(&mdesc->node, &descs); in mpc_dma_alloc_chan_resources()
551 list_splice_tail_init(&descs, &mchan->free); in mpc_dma_alloc_chan_resources()
569 LIST_HEAD(descs); in mpc_dma_free_chan_resources()
580 list_splice_tail_init(&mchan->free, &descs); in mpc_dma_free_chan_resources()
592 list_for_each_entry_safe(mdesc, tmp, &descs, node) in mpc_dma_free_chan_resources()
/drivers/media/pci/solo6x10/
Dsolo6x10-p2m.c102 p2m_dev->descs = desc; in solo_p2m_dma_desc()
182 desc = &p2m_dev->descs[p2m_dev->desc_idx]; in solo_p2m_isr()
/drivers/gpu/drm/exynos/
Dexynos_drm_g2d.c180 struct g2d_buf_desc descs[MAX_REG_TYPE_NR]; member
716 buf_desc = &buf_info->descs[reg_type]; in g2d_map_cmdlist_gem()
792 buf_desc = &buf_info->descs[reg_type]; in g2d_unmap_cmdlist_gem()
995 buf_desc = &buf_info->descs[reg_type]; in g2d_check_reg_offset()
1009 buf_desc = &buf_info->descs[reg_type]; in g2d_check_reg_offset()
1024 buf_desc = &buf_info->descs[reg_type]; in g2d_check_reg_offset()
/drivers/soc/ti/
Dknav_qmss_acc.c188 kq->descs[idx] = val; in knav_acc_int_handler()
413 kq->descs = devm_kzalloc(range->kdev->dev, in knav_acc_init_queue()
415 if (!kq->descs) in knav_acc_init_queue()
/drivers/usb/gadget/function/
Df_fs.c114 struct usb_endpoint_descriptor *descs[3]; member
1076 desc = epfile->ep->descs[desc_idx]; in ffs_epfile_ioctl()
1665 ds = ep->descs[desc_idx]; in ffs_func_eps_enable()
2492 if (unlikely(ffs_ep->descs[ep_desc_id])) { in __ffs_func_bind_do_descs()
2498 ffs_ep->descs[ep_desc_id] = ds; in __ffs_func_bind_do_descs()
2502 ds->bEndpointAddress = ffs_ep->descs[0]->bEndpointAddress; in __ffs_func_bind_do_descs()
2504 ds->wMaxPacketSize = ffs_ep->descs[0]->wMaxPacketSize; in __ffs_func_bind_do_descs()
2584 struct usb_endpoint_descriptor **descs; in __ffs_func_bind_do_nums() local
2585 descs = func->eps[idx].descs; in __ffs_func_bind_do_nums()
2586 newValue = descs[descs[0] ? 0 : 1]->bEndpointAddress; in __ffs_func_bind_do_nums()

12