/drivers/tty/serial/8250/ |
D | 8250_dma.c | 17 struct uart_8250_dma *dma = p->dma; in __dma_tx_complete() local 22 dma_sync_single_for_cpu(dma->txchan->device->dev, dma->tx_addr, in __dma_tx_complete() 27 dma->tx_running = 0; in __dma_tx_complete() 29 uart_xmit_advance(&p->port, dma->tx_size); in __dma_tx_complete() 35 if (ret || !dma->tx_running) in __dma_tx_complete() 44 struct uart_8250_dma *dma = p->dma; in __dma_rx_complete() local 55 dma_status = dmaengine_tx_status(dma->rxchan, dma->rx_cookie, &state); in __dma_rx_complete() 59 count = dma->rx_size - state.residue; in __dma_rx_complete() 61 tty_insert_flip_string(tty_port, dma->rx_buf, count); in __dma_rx_complete() 63 dma->rx_running = 0; in __dma_rx_complete() [all …]
|
D | 8250_mtk.c | 74 struct uart_8250_dma *dma; member 94 struct uart_8250_dma *dma = up->dma; in mtk8250_dma_rx_complete() local 107 dmaengine_tx_status(dma->rxchan, dma->rx_cookie, &state); in mtk8250_dma_rx_complete() 108 total = dma->rx_size - state.residue; in mtk8250_dma_rx_complete() 111 if ((data->rx_pos + cnt) > dma->rx_size) in mtk8250_dma_rx_complete() 112 cnt = dma->rx_size - data->rx_pos; in mtk8250_dma_rx_complete() 114 ptr = (unsigned char *)(data->rx_pos + dma->rx_buf); in mtk8250_dma_rx_complete() 119 ptr = (unsigned char *)(dma->rx_buf); in mtk8250_dma_rx_complete() 136 struct uart_8250_dma *dma = up->dma; in mtk8250_rx_dma() local 139 desc = dmaengine_prep_slave_single(dma->rxchan, dma->rx_addr, in mtk8250_rx_dma() [all …]
|
/drivers/media/platform/xilinx/ |
D | xilinx-dma.c | 57 static int xvip_dma_verify_format(struct xvip_dma *dma) in xvip_dma_verify_format() argument 63 subdev = xvip_dma_remote_subdev(&dma->pad, &fmt.pad); in xvip_dma_verify_format() 72 if (dma->fmtinfo->code != fmt.format.code || in xvip_dma_verify_format() 73 dma->format.height != fmt.format.height || in xvip_dma_verify_format() 74 dma->format.width != fmt.format.width || in xvip_dma_verify_format() 75 dma->format.colorspace != fmt.format.colorspace) in xvip_dma_verify_format() 98 struct xvip_dma *dma = pipe->output; in xvip_pipeline_start_stop() local 104 entity = &dma->video.entity; in xvip_pipeline_start_stop() 195 struct xvip_dma *dma; in xvip_pipeline_validate() local 200 dma = to_xvip_dma(media_entity_to_video_device(entity)); in xvip_pipeline_validate() [all …]
|
/drivers/media/v4l2-core/ |
D | videobuf-dma-sg.c | 141 return &mem->dma; in videobuf_to_dma() 145 static void videobuf_dma_init(struct videobuf_dmabuf *dma) in videobuf_dma_init() argument 147 memset(dma, 0, sizeof(*dma)); in videobuf_dma_init() 148 dma->magic = MAGIC_DMABUF; in videobuf_dma_init() 151 static int videobuf_dma_init_user_locked(struct videobuf_dmabuf *dma, in videobuf_dma_init_user_locked() argument 158 dma->direction = direction; in videobuf_dma_init_user_locked() 159 switch (dma->direction) { in videobuf_dma_init_user_locked() 172 dma->offset = data & ~PAGE_MASK; in videobuf_dma_init_user_locked() 173 dma->size = size; in videobuf_dma_init_user_locked() 174 dma->nr_pages = last-first+1; in videobuf_dma_init_user_locked() [all …]
|
/drivers/i2c/busses/ |
D | i2c-stm32.c | 17 struct stm32_i2c_dma *dma; in stm32_i2c_dma_request() local 21 dma = devm_kzalloc(dev, sizeof(*dma), GFP_KERNEL); in stm32_i2c_dma_request() 22 if (!dma) in stm32_i2c_dma_request() 26 dma->chan_tx = dma_request_chan(dev, "tx"); in stm32_i2c_dma_request() 27 if (IS_ERR(dma->chan_tx)) { in stm32_i2c_dma_request() 28 ret = PTR_ERR(dma->chan_tx); in stm32_i2c_dma_request() 40 ret = dmaengine_slave_config(dma->chan_tx, &dma_sconfig); in stm32_i2c_dma_request() 47 dma->chan_rx = dma_request_chan(dev, "rx"); in stm32_i2c_dma_request() 48 if (IS_ERR(dma->chan_rx)) { in stm32_i2c_dma_request() 49 ret = PTR_ERR(dma->chan_rx); in stm32_i2c_dma_request() [all …]
|
D | i2c-at91-master.c | 135 struct at91_twi_dma *dma = &dev->dma; in at91_twi_dma_cleanup() local 139 if (dma->xfer_in_progress) { in at91_twi_dma_cleanup() 140 if (dma->direction == DMA_FROM_DEVICE) in at91_twi_dma_cleanup() 141 dmaengine_terminate_sync(dma->chan_rx); in at91_twi_dma_cleanup() 143 dmaengine_terminate_sync(dma->chan_tx); in at91_twi_dma_cleanup() 144 dma->xfer_in_progress = false; in at91_twi_dma_cleanup() 146 if (dma->buf_mapped) { in at91_twi_dma_cleanup() 147 dma_unmap_single(dev->dev, sg_dma_address(&dma->sg[0]), in at91_twi_dma_cleanup() 148 dev->buf_len, dma->direction); in at91_twi_dma_cleanup() 149 dma->buf_mapped = false; in at91_twi_dma_cleanup() [all …]
|
/drivers/misc/bcm-vk/ |
D | bcm_vk_sg.c | 27 struct bcm_vk_dma *dma, 30 static int bcm_vk_dma_free(struct device *dev, struct bcm_vk_dma *dma); 36 struct bcm_vk_dma *dma, in bcm_vk_dma_alloc() argument 60 dma->nr_pages = last - first + 1; in bcm_vk_dma_alloc() 63 dma->pages = kmalloc_array(dma->nr_pages, in bcm_vk_dma_alloc() 66 if (!dma->pages) in bcm_vk_dma_alloc() 70 data, vkdata->size, dma->nr_pages); in bcm_vk_dma_alloc() 72 dma->direction = direction; in bcm_vk_dma_alloc() 76 dma->nr_pages, in bcm_vk_dma_alloc() 78 dma->pages); in bcm_vk_dma_alloc() [all …]
|
/drivers/soc/ti/ |
D | knav_dma.c | 109 struct knav_dma_device *dma; member 256 static void dma_hw_enable_all(struct knav_dma_device *dma) in dma_hw_enable_all() argument 260 for (i = 0; i < dma->max_tx_chan; i++) { in dma_hw_enable_all() 261 writel_relaxed(0, &dma->reg_tx_chan[i].mode); in dma_hw_enable_all() 262 writel_relaxed(DMA_ENABLE, &dma->reg_tx_chan[i].control); in dma_hw_enable_all() 267 static void knav_dma_hw_init(struct knav_dma_device *dma) in knav_dma_hw_init() argument 272 spin_lock(&dma->lock); in knav_dma_hw_init() 273 v = dma->loopback ? DMA_LOOPBACK : 0; in knav_dma_hw_init() 274 writel_relaxed(v, &dma->reg_global->emulation_control); in knav_dma_hw_init() 276 v = readl_relaxed(&dma->reg_global->perf_control); in knav_dma_hw_init() [all …]
|
/drivers/media/pci/netup_unidvb/ |
D | netup_unidvb_core.c | 112 static void netup_unidvb_queue_cleanup(struct netup_dma *dma); 145 struct netup_dma *dma = priv; in netup_unidvb_tuner_ctrl() local 150 ndev = dma->ndev; in netup_unidvb_tuner_ctrl() 152 __func__, dma->num, is_dvb_tc); in netup_unidvb_tuner_ctrl() 154 mask = (dma->num == 0) ? GPIO_RFA_CTL : GPIO_RFB_CTL; in netup_unidvb_tuner_ctrl() 189 static void netup_unidvb_dma_enable(struct netup_dma *dma, int enable) in netup_unidvb_dma_enable() argument 191 u32 irq_mask = (dma->num == 0 ? in netup_unidvb_dma_enable() 194 dev_dbg(&dma->ndev->pci_dev->dev, in netup_unidvb_dma_enable() 195 "%s(): DMA%d enable %d\n", __func__, dma->num, enable); in netup_unidvb_dma_enable() 197 writel(BIT_DMA_RUN, &dma->regs->ctrlstat_set); in netup_unidvb_dma_enable() [all …]
|
/drivers/gpu/drm/ |
D | drm_dma.c | 63 dev->dma = kzalloc(sizeof(*dev->dma), GFP_KERNEL); in drm_legacy_dma_setup() 64 if (!dev->dma) in drm_legacy_dma_setup() 68 memset(&dev->dma->bufs[i], 0, sizeof(dev->dma->bufs[0])); in drm_legacy_dma_setup() 83 struct drm_device_dma *dma = dev->dma; in drm_legacy_dma_takedown() local 91 if (!dma) in drm_legacy_dma_takedown() 96 if (dma->bufs[i].seg_count) { in drm_legacy_dma_takedown() 100 dma->bufs[i].buf_count, in drm_legacy_dma_takedown() 101 dma->bufs[i].seg_count); in drm_legacy_dma_takedown() 102 for (j = 0; j < dma->bufs[i].seg_count; j++) { in drm_legacy_dma_takedown() 103 if (dma->bufs[i].seglist[j]) { in drm_legacy_dma_takedown() [all …]
|
/drivers/media/pci/ivtv/ |
D | ivtv-udma.c | 25 int ivtv_udma_fill_sg_list (struct ivtv_user_dma *dma, struct ivtv_dma_page_info *dma_page, int map… in ivtv_udma_fill_sg_list() argument 40 if (PageHighMem(dma->map[map_offset])) { in ivtv_udma_fill_sg_list() 43 if (dma->bouncemap[map_offset] == NULL) in ivtv_udma_fill_sg_list() 44 dma->bouncemap[map_offset] = alloc_page(GFP_KERNEL); in ivtv_udma_fill_sg_list() 45 if (dma->bouncemap[map_offset] == NULL) in ivtv_udma_fill_sg_list() 48 src = kmap_atomic(dma->map[map_offset]) + offset; in ivtv_udma_fill_sg_list() 49 memcpy(page_address(dma->bouncemap[map_offset]) + offset, src, len); in ivtv_udma_fill_sg_list() 52 sg_set_page(&dma->SGlist[map_offset], dma->bouncemap[map_offset], len, offset); in ivtv_udma_fill_sg_list() 55 sg_set_page(&dma->SGlist[map_offset], dma->map[map_offset], len, offset); in ivtv_udma_fill_sg_list() 63 void ivtv_udma_fill_sg_array (struct ivtv_user_dma *dma, u32 buffer_offset, u32 buffer_offset_2, u3… in ivtv_udma_fill_sg_array() argument [all …]
|
/drivers/gpu/drm/nouveau/ |
D | nouveau_dma.c | 65 val > chan->push.addr + (chan->dma.max << 2)) in READ_GET() 76 int ip = (chan->dma.ib_put * 2) + chan->dma.ib_base; in nv50_dma_push() 78 BUG_ON(chan->dma.ib_free < 1); in nv50_dma_push() 83 chan->dma.ib_put = (chan->dma.ib_put + 1) & chan->dma.ib_max; in nv50_dma_push() 89 nvif_wr32(&chan->user, 0x8c, chan->dma.ib_put); in nv50_dma_push() 92 chan->dma.ib_free--; in nv50_dma_push() 100 while (chan->dma.ib_free < count) { in nv50_dma_push_wait() 113 chan->dma.ib_free = get - chan->dma.ib_put; in nv50_dma_push_wait() 114 if (chan->dma.ib_free <= 0) in nv50_dma_push_wait() 115 chan->dma.ib_free += chan->dma.ib_max; in nv50_dma_push_wait() [all …]
|
/drivers/vfio/ |
D | vfio_iommu_type1.c | 176 struct vfio_dma *dma = rb_entry(node, struct vfio_dma, node); in vfio_find_dma() local 178 if (start + size <= dma->iova) in vfio_find_dma() 180 else if (start >= dma->iova + dma->size) in vfio_find_dma() 183 return dma; in vfio_find_dma() 197 struct vfio_dma *dma = rb_entry(node, struct vfio_dma, node); in vfio_find_dma_first_node() local 199 if (start < dma->iova + dma->size) { in vfio_find_dma_first_node() 201 dma_res = dma; in vfio_find_dma_first_node() 202 if (start >= dma->iova) in vfio_find_dma_first_node() 217 struct vfio_dma *dma; in vfio_link_dma() local 221 dma = rb_entry(parent, struct vfio_dma, node); in vfio_link_dma() [all …]
|
/drivers/thunderbolt/ |
D | dma_port.c | 203 struct tb_dma_port *dma; in dma_port_alloc() local 210 dma = kzalloc(sizeof(*dma), GFP_KERNEL); in dma_port_alloc() 211 if (!dma) in dma_port_alloc() 214 dma->buf = kmalloc_array(MAIL_DATA_DWORDS, sizeof(u32), GFP_KERNEL); in dma_port_alloc() 215 if (!dma->buf) { in dma_port_alloc() 216 kfree(dma); in dma_port_alloc() 220 dma->sw = sw; in dma_port_alloc() 221 dma->port = port; in dma_port_alloc() 222 dma->base = DMA_PORT_CAP; in dma_port_alloc() 224 return dma; in dma_port_alloc() [all …]
|
/drivers/net/ethernet/i825xx/ |
D | lib82596.c | 315 struct i596_dma *dma; member 370 return lp->dma_addr + ((unsigned long)v - (unsigned long)lp->dma); in virt_to_dma() 400 static inline int wait_istat(struct net_device *dev, struct i596_dma *dma, int delcnt, char *str) in wait_istat() argument 402 dma_sync_cpu(dev, &(dma->iscp), sizeof(struct i596_iscp)); in wait_istat() 403 while (--delcnt && dma->iscp.stat) { in wait_istat() 405 dma_sync_cpu(dev, &(dma->iscp), sizeof(struct i596_iscp)); in wait_istat() 409 dev->name, str, SWAP16(dma->iscp.stat)); in wait_istat() 416 static inline int wait_cmd(struct net_device *dev, struct i596_dma *dma, int delcnt, char *str) in wait_cmd() argument 418 dma_sync_cpu(dev, &(dma->scb), sizeof(struct i596_scb)); in wait_cmd() 419 while (--delcnt && dma->scb.command) { in wait_cmd() [all …]
|
/drivers/comedi/drivers/ |
D | comedi_isadma.c | 98 unsigned int comedi_isadma_poll(struct comedi_isadma *dma) in comedi_isadma_poll() argument 100 struct comedi_isadma_desc *desc = &dma->desc[dma->cur_dma]; in comedi_isadma_poll() 156 struct comedi_isadma *dma = NULL; in comedi_isadma_alloc() local 164 dma = kzalloc(sizeof(*dma), GFP_KERNEL); in comedi_isadma_alloc() 165 if (!dma) in comedi_isadma_alloc() 171 dma->desc = desc; in comedi_isadma_alloc() 172 dma->n_desc = n_desc; in comedi_isadma_alloc() 174 dma->dev = dev->hw_dev; in comedi_isadma_alloc() 184 dma->dev = dev->class_dev; in comedi_isadma_alloc() 195 dma->chan = dma_chans[0]; in comedi_isadma_alloc() [all …]
|
/drivers/crypto/qce/ |
D | dma.c | 11 int qce_dma_request(struct device *dev, struct qce_dma_data *dma) in qce_dma_request() argument 15 dma->txchan = dma_request_chan(dev, "tx"); in qce_dma_request() 16 if (IS_ERR(dma->txchan)) in qce_dma_request() 17 return PTR_ERR(dma->txchan); in qce_dma_request() 19 dma->rxchan = dma_request_chan(dev, "rx"); in qce_dma_request() 20 if (IS_ERR(dma->rxchan)) { in qce_dma_request() 21 ret = PTR_ERR(dma->rxchan); in qce_dma_request() 25 dma->result_buf = kmalloc(QCE_RESULT_BUF_SZ + QCE_IGNORE_BUF_SZ, in qce_dma_request() 27 if (!dma->result_buf) { in qce_dma_request() 32 dma->ignore_buf = dma->result_buf + QCE_RESULT_BUF_SZ; in qce_dma_request() [all …]
|
/drivers/net/ethernet/ |
D | lantiq_xrx200.c | 65 struct ltq_dma_channel dma; member 135 struct ltq_dma_desc *desc = &ch->dma.desc_base[ch->dma.desc]; in xrx200_flush_dma() 142 ch->dma.desc++; in xrx200_flush_dma() 143 ch->dma.desc %= LTQ_DESC_NUM; in xrx200_flush_dma() 152 ltq_dma_open(&priv->chan_tx.dma); in xrx200_open() 153 ltq_dma_enable_irq(&priv->chan_tx.dma); in xrx200_open() 156 ltq_dma_open(&priv->chan_rx.dma); in xrx200_open() 165 ltq_dma_enable_irq(&priv->chan_rx.dma); in xrx200_open() 179 ltq_dma_close(&priv->chan_rx.dma); in xrx200_close() 182 ltq_dma_close(&priv->chan_tx.dma); in xrx200_close() [all …]
|
/drivers/scsi/arm/ |
D | cumana_1.c | 49 u8 __iomem *dma = hostdata->pdma_io + 0x2000; in cumanascsi_pwrite() local 64 v=*laddr++; writew(L(v), dma); writew(H(v), dma); in cumanascsi_pwrite() 65 v=*laddr++; writew(L(v), dma); writew(H(v), dma); in cumanascsi_pwrite() 66 v=*laddr++; writew(L(v), dma); writew(H(v), dma); in cumanascsi_pwrite() 67 v=*laddr++; writew(L(v), dma); writew(H(v), dma); in cumanascsi_pwrite() 68 v=*laddr++; writew(L(v), dma); writew(H(v), dma); in cumanascsi_pwrite() 69 v=*laddr++; writew(L(v), dma); writew(H(v), dma); in cumanascsi_pwrite() 70 v=*laddr++; writew(L(v), dma); writew(H(v), dma); in cumanascsi_pwrite() 71 v=*laddr++; writew(L(v), dma); writew(H(v), dma); in cumanascsi_pwrite() 88 writeb(*addr++, dma); in cumanascsi_pwrite() [all …]
|
/drivers/mfd/ |
D | stm32-timers.c | 28 struct stm32_timers_dma *dma = p; in stm32_timers_dma_done() local 32 status = dmaengine_tx_status(dma->chan, dma->chan->cookie, &state); in stm32_timers_dma_done() 34 complete(&dma->completion); in stm32_timers_dma_done() 57 struct stm32_timers_dma *dma = &ddata->dma; in stm32_timers_dma_burst_read() local 75 if (!dma->chans[id]) in stm32_timers_dma_burst_read() 77 mutex_lock(&dma->lock); in stm32_timers_dma_burst_read() 80 dma->chan = dma->chans[id]; in stm32_timers_dma_burst_read() 89 config.src_addr = (dma_addr_t)dma->phys_base + TIM_DMAR; in stm32_timers_dma_burst_read() 91 ret = dmaengine_slave_config(dma->chan, &config); in stm32_timers_dma_burst_read() 95 desc = dmaengine_prep_slave_single(dma->chan, dma_buf, len, in stm32_timers_dma_burst_read() [all …]
|
/drivers/iio/adc/ |
D | ti_am335x_adc.c | 44 struct tiadc_dma dma; member 218 struct tiadc_dma *dma = &adc_dev->dma; in tiadc_dma_rx_complete() local 222 data = dma->buf + dma->current_period * dma->period_size; in tiadc_dma_rx_complete() 223 dma->current_period = 1 - dma->current_period; /* swap the buffer ID */ in tiadc_dma_rx_complete() 225 for (i = 0; i < dma->period_size; i += indio_dev->scan_bytes) { in tiadc_dma_rx_complete() 234 struct tiadc_dma *dma = &adc_dev->dma; in tiadc_start_dma() local 237 dma->current_period = 0; /* We start to fill period 0 */ in tiadc_start_dma() 246 dma->fifo_thresh = rounddown(FIFO1_THRESHOLD + 1, in tiadc_start_dma() 250 dma->period_size = rounddown(DMA_BUFFER_SIZE / 2, in tiadc_start_dma() 251 (dma->fifo_thresh + 1) * sizeof(u16)); in tiadc_start_dma() [all …]
|
/drivers/tty/serial/ |
D | samsung_tty.c | 154 struct s3c24xx_uart_dma *dma; member 290 struct s3c24xx_uart_dma *dma = ourport->dma; in s3c24xx_serial_stop_tx() local 310 if (dma && dma->tx_chan && ourport->tx_in_progress == S3C24XX_TX_DMA) { in s3c24xx_serial_stop_tx() 311 dmaengine_pause(dma->tx_chan); in s3c24xx_serial_stop_tx() 312 dmaengine_tx_status(dma->tx_chan, dma->tx_cookie, &state); in s3c24xx_serial_stop_tx() 313 dmaengine_terminate_all(dma->tx_chan); in s3c24xx_serial_stop_tx() 314 dma_sync_single_for_cpu(dma->tx_chan->device->dev, in s3c24xx_serial_stop_tx() 315 dma->tx_transfer_addr, dma->tx_size, in s3c24xx_serial_stop_tx() 317 async_tx_ack(dma->tx_desc); in s3c24xx_serial_stop_tx() 318 count = dma->tx_bytes_requested - state.residue; in s3c24xx_serial_stop_tx() [all …]
|
D | msm_serial.c | 245 static void msm_stop_dma(struct uart_port *port, struct msm_dma *dma) in msm_stop_dma() argument 251 mapped = dma->count; in msm_stop_dma() 252 dma->count = 0; in msm_stop_dma() 254 dmaengine_terminate_all(dma->chan); in msm_stop_dma() 264 val &= ~dma->enable_bit; in msm_stop_dma() 268 dma_unmap_single(dev, dma->phys, mapped, dma->dir); in msm_stop_dma() 273 struct msm_dma *dma; in msm_release_dma() local 275 dma = &msm_port->tx_dma; in msm_release_dma() 276 if (dma->chan) { in msm_release_dma() 277 msm_stop_dma(&msm_port->uart, dma); in msm_release_dma() [all …]
|
/drivers/spi/ |
D | spi-fsl-dspi.c | 230 struct fsl_dspi_dma *dma; member 361 struct fsl_dspi_dma *dma = dspi->dma; in dspi_tx_dma_callback() local 363 complete(&dma->cmd_tx_complete); in dspi_tx_dma_callback() 369 struct fsl_dspi_dma *dma = dspi->dma; in dspi_rx_dma_callback() local 374 dspi_push_rx(dspi, dspi->dma->rx_dma_buf[i]); in dspi_rx_dma_callback() 377 complete(&dma->cmd_rx_complete); in dspi_rx_dma_callback() 383 struct fsl_dspi_dma *dma = dspi->dma; in dspi_next_xfer_dma_submit() local 388 dspi->dma->tx_dma_buf[i] = dspi_pop_tx_pushr(dspi); in dspi_next_xfer_dma_submit() 390 dma->tx_desc = dmaengine_prep_slave_single(dma->chan_tx, in dspi_next_xfer_dma_submit() 391 dma->tx_dma_phys, in dspi_next_xfer_dma_submit() [all …]
|
/drivers/media/pci/ddbridge/ |
D | ddbridge-core.c | 126 struct ddb_dma *dma = io->dma; in ddb_set_dma_table() local 130 if (!dma) in ddb_set_dma_table() 132 for (i = 0; i < dma->num; i++) { in ddb_set_dma_table() 133 mem = dma->pbuf[i]; in ddb_set_dma_table() 134 ddbwritel(dev, mem & 0xffffffff, dma->bufregs + i * 8); in ddb_set_dma_table() 135 ddbwritel(dev, mem >> 32, dma->bufregs + i * 8 + 4); in ddb_set_dma_table() 137 dma->bufval = ((dma->div & 0x0f) << 16) | in ddb_set_dma_table() 138 ((dma->num & 0x1f) << 11) | in ddb_set_dma_table() 139 ((dma->size >> 7) & 0x7ff); in ddb_set_dma_table() 185 if (port->output->dma->running) { in ddb_unredirect() [all …]
|