Home
last modified time | relevance | path

Searched refs:dma (Results 1 – 25 of 699) sorted by relevance

12345678910>>...28

/drivers/tty/serial/8250/
D8250_dma.c21 struct uart_8250_dma *dma = p->dma; in __dma_tx_complete() local
26 dma_sync_single_for_cpu(dma->txchan->device->dev, dma->tx_addr, in __dma_tx_complete()
31 dma->tx_running = 0; in __dma_tx_complete()
33 xmit->tail += dma->tx_size; in __dma_tx_complete()
35 p->port.icount.tx += dma->tx_size; in __dma_tx_complete()
52 struct uart_8250_dma *dma = p->dma; in __dma_rx_complete() local
57 dma->rx_running = 0; in __dma_rx_complete()
58 dmaengine_tx_status(dma->rxchan, dma->rx_cookie, &state); in __dma_rx_complete()
60 count = dma->rx_size - state.residue; in __dma_rx_complete()
62 tty_insert_flip_string(tty_port, dma->rx_buf, count); in __dma_rx_complete()
[all …]
D8250_omap.c255 struct uart_8250_dma *dma = up->dma; in omap8250_restore_regs() local
257 if (dma && dma->tx_running) { in omap8250_restore_regs()
429 if (up->dma) in omap_8250_set_termios()
570 if (up->dma) { in omap8250_irq()
608 up->dma = NULL; in omap_8250_startup()
610 if (up->dma) { in omap_8250_startup()
615 up->dma = NULL; in omap_8250_startup()
637 if (up->dma) in omap_8250_startup()
638 up->dma->rx_dma(up, 0); in omap_8250_startup()
656 if (up->dma) in omap_8250_shutdown()
[all …]
/drivers/media/platform/xilinx/
Dxilinx-dma.c62 static int xvip_dma_verify_format(struct xvip_dma *dma) in xvip_dma_verify_format() argument
68 subdev = xvip_dma_remote_subdev(&dma->pad, &fmt.pad); in xvip_dma_verify_format()
77 if (dma->fmtinfo->code != fmt.format.code || in xvip_dma_verify_format()
78 dma->format.height != fmt.format.height || in xvip_dma_verify_format()
79 dma->format.width != fmt.format.width || in xvip_dma_verify_format()
80 dma->format.colorspace != fmt.format.colorspace) in xvip_dma_verify_format()
103 struct xvip_dma *dma = pipe->output; in xvip_pipeline_start_stop() local
109 entity = &dma->video.entity; in xvip_pipeline_start_stop()
194 struct xvip_dma *dma; in xvip_pipeline_validate() local
199 dma = to_xvip_dma(media_entity_to_video_device(entity)); in xvip_pipeline_validate()
[all …]
/drivers/media/v4l2-core/
Dvideobuf-dma-sg.c144 return &mem->dma; in videobuf_to_dma()
148 static void videobuf_dma_init(struct videobuf_dmabuf *dma) in videobuf_dma_init() argument
150 memset(dma, 0, sizeof(*dma)); in videobuf_dma_init()
151 dma->magic = MAGIC_DMABUF; in videobuf_dma_init()
154 static int videobuf_dma_init_user_locked(struct videobuf_dmabuf *dma, in videobuf_dma_init_user_locked() argument
161 dma->direction = direction; in videobuf_dma_init_user_locked()
162 switch (dma->direction) { in videobuf_dma_init_user_locked()
175 dma->offset = data & ~PAGE_MASK; in videobuf_dma_init_user_locked()
176 dma->size = size; in videobuf_dma_init_user_locked()
177 dma->nr_pages = last-first+1; in videobuf_dma_init_user_locked()
[all …]
/drivers/media/pci/netup_unidvb/
Dnetup_unidvb_core.c120 static void netup_unidvb_queue_cleanup(struct netup_dma *dma);
145 struct netup_dma *dma = priv; in netup_unidvb_tuner_ctrl() local
150 ndev = dma->ndev; in netup_unidvb_tuner_ctrl()
152 __func__, dma->num, is_dvb_tc); in netup_unidvb_tuner_ctrl()
154 mask = (dma->num == 0) ? GPIO_RFA_CTL : GPIO_RFB_CTL; in netup_unidvb_tuner_ctrl()
184 static void netup_unidvb_dma_enable(struct netup_dma *dma, int enable) in netup_unidvb_dma_enable() argument
186 u32 irq_mask = (dma->num == 0 ? in netup_unidvb_dma_enable()
189 dev_dbg(&dma->ndev->pci_dev->dev, in netup_unidvb_dma_enable()
190 "%s(): DMA%d enable %d\n", __func__, dma->num, enable); in netup_unidvb_dma_enable()
192 writel(BIT_DMA_RUN, &dma->regs->ctrlstat_set); in netup_unidvb_dma_enable()
[all …]
/drivers/soc/ti/
Dknav_dma.c117 struct knav_dma_device *dma; member
257 static void dma_hw_enable_all(struct knav_dma_device *dma) in dma_hw_enable_all() argument
261 for (i = 0; i < dma->max_tx_chan; i++) { in dma_hw_enable_all()
262 writel_relaxed(0, &dma->reg_tx_chan[i].mode); in dma_hw_enable_all()
263 writel_relaxed(DMA_ENABLE, &dma->reg_tx_chan[i].control); in dma_hw_enable_all()
268 static void knav_dma_hw_init(struct knav_dma_device *dma) in knav_dma_hw_init() argument
273 spin_lock(&dma->lock); in knav_dma_hw_init()
274 v = dma->loopback ? DMA_LOOPBACK : 0; in knav_dma_hw_init()
275 writel_relaxed(v, &dma->reg_global->emulation_control); in knav_dma_hw_init()
277 v = readl_relaxed(&dma->reg_global->perf_control); in knav_dma_hw_init()
[all …]
/drivers/gpu/drm/
Ddrm_dma.c60 dev->dma = kzalloc(sizeof(*dev->dma), GFP_KERNEL); in drm_legacy_dma_setup()
61 if (!dev->dma) in drm_legacy_dma_setup()
65 memset(&dev->dma->bufs[i], 0, sizeof(dev->dma->bufs[0])); in drm_legacy_dma_setup()
80 struct drm_device_dma *dma = dev->dma; in drm_legacy_dma_takedown() local
88 if (!dma) in drm_legacy_dma_takedown()
93 if (dma->bufs[i].seg_count) { in drm_legacy_dma_takedown()
97 dma->bufs[i].buf_count, in drm_legacy_dma_takedown()
98 dma->bufs[i].seg_count); in drm_legacy_dma_takedown()
99 for (j = 0; j < dma->bufs[i].seg_count; j++) { in drm_legacy_dma_takedown()
100 if (dma->bufs[i].seglist[j]) { in drm_legacy_dma_takedown()
[all …]
Ddrm_bufs.c602 struct drm_device_dma *dma = dev->dma; in drm_legacy_addbufs_agp() local
618 if (!dma) in drm_legacy_addbufs_agp()
666 entry = &dma->bufs[order]; in drm_legacy_addbufs_agp()
693 buf->idx = dma->buf_count + entry->buf_count; in drm_legacy_addbufs_agp()
698 buf->offset = (dma->byte_count + offset); in drm_legacy_addbufs_agp()
726 temp_buflist = krealloc(dma->buflist, in drm_legacy_addbufs_agp()
727 (dma->buf_count + entry->buf_count) * in drm_legacy_addbufs_agp()
728 sizeof(*dma->buflist), GFP_KERNEL); in drm_legacy_addbufs_agp()
736 dma->buflist = temp_buflist; in drm_legacy_addbufs_agp()
739 dma->buflist[i + dma->buf_count] = &entry->buflist[i]; in drm_legacy_addbufs_agp()
[all …]
/drivers/media/pci/ivtv/
Divtv-udma.c37 int ivtv_udma_fill_sg_list (struct ivtv_user_dma *dma, struct ivtv_dma_page_info *dma_page, int map… in ivtv_udma_fill_sg_list() argument
52 if (PageHighMem(dma->map[map_offset])) { in ivtv_udma_fill_sg_list()
55 if (dma->bouncemap[map_offset] == NULL) in ivtv_udma_fill_sg_list()
56 dma->bouncemap[map_offset] = alloc_page(GFP_KERNEL); in ivtv_udma_fill_sg_list()
57 if (dma->bouncemap[map_offset] == NULL) in ivtv_udma_fill_sg_list()
60 src = kmap_atomic(dma->map[map_offset]) + offset; in ivtv_udma_fill_sg_list()
61 memcpy(page_address(dma->bouncemap[map_offset]) + offset, src, len); in ivtv_udma_fill_sg_list()
64 sg_set_page(&dma->SGlist[map_offset], dma->bouncemap[map_offset], len, offset); in ivtv_udma_fill_sg_list()
67 sg_set_page(&dma->SGlist[map_offset], dma->map[map_offset], len, offset); in ivtv_udma_fill_sg_list()
75 void ivtv_udma_fill_sg_array (struct ivtv_user_dma *dma, u32 buffer_offset, u32 buffer_offset_2, u3… in ivtv_udma_fill_sg_array() argument
[all …]
/drivers/net/ethernet/i825xx/
Dlib82596.c315 struct i596_dma *dma; member
369 static inline int wait_istat(struct net_device *dev, struct i596_dma *dma, int delcnt, char *str) in wait_istat() argument
371 DMA_INV(dev, &(dma->iscp), sizeof(struct i596_iscp)); in wait_istat()
372 while (--delcnt && dma->iscp.stat) { in wait_istat()
374 DMA_INV(dev, &(dma->iscp), sizeof(struct i596_iscp)); in wait_istat()
378 dev->name, str, SWAP16(dma->iscp.stat)); in wait_istat()
385 static inline int wait_cmd(struct net_device *dev, struct i596_dma *dma, int delcnt, char *str) in wait_cmd() argument
387 DMA_INV(dev, &(dma->scb), sizeof(struct i596_scb)); in wait_cmd()
388 while (--delcnt && dma->scb.command) { in wait_cmd()
390 DMA_INV(dev, &(dma->scb), sizeof(struct i596_scb)); in wait_cmd()
[all …]
/drivers/gpu/drm/nouveau/
Dnouveau_dma.c35 mem = &mem[chan->dma.cur]; in OUT_RINGp()
40 chan->dma.cur += nr_dwords; in OUT_RINGp()
75 val > chan->push.vma.offset + (chan->dma.max << 2)) in READ_GET()
88 int ip = (chan->dma.ib_put * 2) + chan->dma.ib_base; in nv50_dma_push()
95 BUG_ON(chan->dma.ib_free < 1); in nv50_dma_push()
100 chan->dma.ib_put = (chan->dma.ib_put + 1) & chan->dma.ib_max; in nv50_dma_push()
106 nvif_wr32(&chan->user, 0x8c, chan->dma.ib_put); in nv50_dma_push()
107 chan->dma.ib_free--; in nv50_dma_push()
115 while (chan->dma.ib_free < count) { in nv50_dma_push_wait()
128 chan->dma.ib_free = get - chan->dma.ib_put; in nv50_dma_push_wait()
[all …]
/drivers/scsi/arm/
Dcumana_1.c34 void __iomem *dma
51 void __iomem *dma = priv(host)->dma + 0x2000; in NCR5380_pwrite() local
66 v=*laddr++; writew(L(v), dma); writew(H(v), dma); in NCR5380_pwrite()
67 v=*laddr++; writew(L(v), dma); writew(H(v), dma); in NCR5380_pwrite()
68 v=*laddr++; writew(L(v), dma); writew(H(v), dma); in NCR5380_pwrite()
69 v=*laddr++; writew(L(v), dma); writew(H(v), dma); in NCR5380_pwrite()
70 v=*laddr++; writew(L(v), dma); writew(H(v), dma); in NCR5380_pwrite()
71 v=*laddr++; writew(L(v), dma); writew(H(v), dma); in NCR5380_pwrite()
72 v=*laddr++; writew(L(v), dma); writew(H(v), dma); in NCR5380_pwrite()
73 v=*laddr++; writew(L(v), dma); writew(H(v), dma); in NCR5380_pwrite()
[all …]
/drivers/crypto/qce/
Ddma.c19 int qce_dma_request(struct device *dev, struct qce_dma_data *dma) in qce_dma_request() argument
23 dma->txchan = dma_request_slave_channel_reason(dev, "tx"); in qce_dma_request()
24 if (IS_ERR(dma->txchan)) in qce_dma_request()
25 return PTR_ERR(dma->txchan); in qce_dma_request()
27 dma->rxchan = dma_request_slave_channel_reason(dev, "rx"); in qce_dma_request()
28 if (IS_ERR(dma->rxchan)) { in qce_dma_request()
29 ret = PTR_ERR(dma->rxchan); in qce_dma_request()
33 dma->result_buf = kmalloc(QCE_RESULT_BUF_SZ + QCE_IGNORE_BUF_SZ, in qce_dma_request()
35 if (!dma->result_buf) { in qce_dma_request()
40 dma->ignore_buf = dma->result_buf + QCE_RESULT_BUF_SZ; in qce_dma_request()
[all …]
/drivers/block/rsxx/
Ddma.c88 struct rsxx_dma *dma; member
128 static unsigned int get_dma_size(struct rsxx_dma *dma) in get_dma_size() argument
130 if (dma->sub_page.cnt) in get_dma_size()
131 return dma->sub_page.cnt << 9; in get_dma_size()
140 struct rsxx_dma *dma) in set_tracker_dma() argument
142 trackers->list[tag].dma = dma; in set_tracker_dma()
148 return trackers->list[tag].dma; in get_tracker_dma()
171 trackers->list[tag].dma = NULL; in push_tracker()
224 static void rsxx_free_dma(struct rsxx_dma_ctrl *ctrl, struct rsxx_dma *dma) in rsxx_free_dma() argument
226 if (dma->cmd != HW_CMD_BLK_DISCARD) { in rsxx_free_dma()
[all …]
/drivers/staging/comedi/drivers/
Dcomedi_isadma.c109 unsigned int comedi_isadma_poll(struct comedi_isadma *dma) in comedi_isadma_poll() argument
111 struct comedi_isadma_desc *desc = &dma->desc[dma->cur_dma]; in comedi_isadma_poll()
168 struct comedi_isadma *dma = NULL; in comedi_isadma_alloc() local
176 dma = kzalloc(sizeof(*dma), GFP_KERNEL); in comedi_isadma_alloc()
177 if (!dma) in comedi_isadma_alloc()
183 dma->desc = desc; in comedi_isadma_alloc()
184 dma->n_desc = n_desc; in comedi_isadma_alloc()
194 dma->chan = dma_chans[0]; in comedi_isadma_alloc()
199 dma->chan2 = dma_chans[1]; in comedi_isadma_alloc()
202 desc = &dma->desc[i]; in comedi_isadma_alloc()
[all …]
/drivers/tty/serial/
Dmsm_serial.c84 void msm_stop_dma(struct uart_port *port, struct msm_dma *dma) in msm_stop_dma() argument
90 mapped = dma->count; in msm_stop_dma()
91 dma->count = 0; in msm_stop_dma()
93 dmaengine_terminate_all(dma->chan); in msm_stop_dma()
103 val &= ~dma->enable_bit; in msm_stop_dma()
107 dma_unmap_single(dev, dma->phys, mapped, dma->dir); in msm_stop_dma()
112 struct msm_dma *dma; in msm_release_dma() local
114 dma = &msm_port->tx_dma; in msm_release_dma()
115 if (dma->chan) { in msm_release_dma()
116 msm_stop_dma(&msm_port->uart, dma); in msm_release_dma()
[all …]
Dsamsung.c163 struct s3c24xx_uart_dma *dma = ourport->dma; in s3c24xx_serial_stop_tx() local
177 if (dma && dma->tx_chan && ourport->tx_in_progress == S3C24XX_TX_DMA) { in s3c24xx_serial_stop_tx()
178 dmaengine_pause(dma->tx_chan); in s3c24xx_serial_stop_tx()
179 dmaengine_tx_status(dma->tx_chan, dma->tx_cookie, &state); in s3c24xx_serial_stop_tx()
180 dmaengine_terminate_all(dma->tx_chan); in s3c24xx_serial_stop_tx()
182 dma->tx_transfer_addr, dma->tx_size, DMA_TO_DEVICE); in s3c24xx_serial_stop_tx()
183 async_tx_ack(dma->tx_desc); in s3c24xx_serial_stop_tx()
184 count = dma->tx_bytes_requested - state.residue; in s3c24xx_serial_stop_tx()
205 struct s3c24xx_uart_dma *dma = ourport->dma; in s3c24xx_serial_tx_dma_complete() local
211 dmaengine_tx_status(dma->tx_chan, dma->tx_cookie, &state); in s3c24xx_serial_tx_dma_complete()
[all …]
/drivers/gpu/drm/atmel-hlcdc/
Datmel_hlcdc_layer.c87 struct atmel_hlcdc_layer_dma_channel *dma = &layer->dma; in atmel_hlcdc_layer_update_apply() local
116 if (dma->status == ATMEL_HLCDC_LAYER_DISABLED) { in atmel_hlcdc_layer_update_apply()
139 dma->status = ATMEL_HLCDC_LAYER_ENABLED; in atmel_hlcdc_layer_update_apply()
163 dma->queue = fb_flip; in atmel_hlcdc_layer_update_apply()
179 struct atmel_hlcdc_layer_dma_channel *dma = &layer->dma; in atmel_hlcdc_layer_irq() local
199 flip = dma->queue ? dma->queue : dma->cur; in atmel_hlcdc_layer_irq()
265 atmel_hlcdc_layer_fb_flip_release_queue(layer, dma->cur); in atmel_hlcdc_layer_irq()
266 dma->cur = dma->queue; in atmel_hlcdc_layer_irq()
267 dma->queue = NULL; in atmel_hlcdc_layer_irq()
271 atmel_hlcdc_layer_fb_flip_release_queue(layer, dma->cur); in atmel_hlcdc_layer_irq()
[all …]
/drivers/net/appletalk/
Dltpc.c207 static int dma; variable
371 int dma = dev->dma; in handlefc() local
377 disable_dma(dma); in handlefc()
378 clear_dma_ff(dma); in handlefc()
379 set_dma_mode(dma,DMA_MODE_READ); in handlefc()
380 set_dma_addr(dma,virt_to_bus(ltdmacbuf)); in handlefc()
381 set_dma_count(dma,50); in handlefc()
382 enable_dma(dma); in handlefc()
394 int dma = dev->dma; in handlefd() local
399 disable_dma(dma); in handlefd()
[all …]
/drivers/dma/
DMakefile7 obj-$(CONFIG_DMA_VIRTUAL_CHANNELS) += virt-dma.o
8 obj-$(CONFIG_DMA_ACPI) += acpi-dma.o
9 obj-$(CONFIG_DMA_OF) += of-dma.o
19 obj-$(CONFIG_AXI_DMAC) += dma-axi-dmac.o
21 obj-$(CONFIG_DMA_BCM2835) += bcm2835-dma.o
22 obj-$(CONFIG_DMA_JZ4740) += dma-jz4740.o
23 obj-$(CONFIG_DMA_JZ4780) += dma-jz4780.o
24 obj-$(CONFIG_DMA_OMAP) += omap-dma.o
25 obj-$(CONFIG_DMA_SA11X0) += sa11x0-dma.o
26 obj-$(CONFIG_DMA_SUN4I) += sun4i-dma.o
[all …]
/drivers/i2c/busses/
Di2c-imx.c218 struct imx_i2c_dma *dma; member
293 struct imx_i2c_dma *dma; in i2c_imx_dma_request() local
298 dma = devm_kzalloc(dev, sizeof(*dma), GFP_KERNEL); in i2c_imx_dma_request()
299 if (!dma) in i2c_imx_dma_request()
302 dma->chan_tx = dma_request_slave_channel(dev, "tx"); in i2c_imx_dma_request()
303 if (!dma->chan_tx) { in i2c_imx_dma_request()
313 ret = dmaengine_slave_config(dma->chan_tx, &dma_sconfig); in i2c_imx_dma_request()
319 dma->chan_rx = dma_request_slave_channel(dev, "rx"); in i2c_imx_dma_request()
320 if (!dma->chan_rx) { in i2c_imx_dma_request()
330 ret = dmaengine_slave_config(dma->chan_rx, &dma_sconfig); in i2c_imx_dma_request()
[all …]
Di2c-at91.c143 struct at91_twi_dma dma; member
213 struct at91_twi_dma *dma = &dev->dma; in at91_twi_dma_cleanup() local
217 if (dma->xfer_in_progress) { in at91_twi_dma_cleanup()
218 if (dma->direction == DMA_FROM_DEVICE) in at91_twi_dma_cleanup()
219 dmaengine_terminate_all(dma->chan_rx); in at91_twi_dma_cleanup()
221 dmaengine_terminate_all(dma->chan_tx); in at91_twi_dma_cleanup()
222 dma->xfer_in_progress = false; in at91_twi_dma_cleanup()
224 if (dma->buf_mapped) { in at91_twi_dma_cleanup()
225 dma_unmap_single(dev->dev, sg_dma_address(&dma->sg[0]), in at91_twi_dma_cleanup()
226 dev->buf_len, dma->direction); in at91_twi_dma_cleanup()
[all …]
/drivers/net/wireless/b43legacy/
Ddma.c75 addr |= ring->dev->dma.translation; in op32_fill_descriptor()
199 return dev->dma.tx_ring1; in priority_to_txring()
207 ring = dev->dma.tx_ring3; in priority_to_txring()
210 ring = dev->dma.tx_ring2; in priority_to_txring()
213 ring = dev->dma.tx_ring1; in priority_to_txring()
216 ring = dev->dma.tx_ring0; in priority_to_txring()
219 ring = dev->dma.tx_ring4; in priority_to_txring()
222 ring = dev->dma.tx_ring5; in priority_to_txring()
545 u32 trans = ring->dev->dma.translation; in dmacontroller_setup()
775 struct b43legacy_dma *dma; in b43legacy_dma_free() local
[all …]
/drivers/vfio/
Dvfio_iommu_type1.c102 struct vfio_dma *dma = rb_entry(node, struct vfio_dma, node); in vfio_find_dma() local
104 if (start + size <= dma->iova) in vfio_find_dma()
106 else if (start >= dma->iova + dma->size) in vfio_find_dma()
109 return dma; in vfio_find_dma()
118 struct vfio_dma *dma; in vfio_link_dma() local
122 dma = rb_entry(parent, struct vfio_dma, node); in vfio_link_dma()
124 if (new->iova + new->size <= dma->iova) in vfio_link_dma()
326 static void vfio_unmap_unpin(struct vfio_iommu *iommu, struct vfio_dma *dma) in vfio_unmap_unpin() argument
328 dma_addr_t iova = dma->iova, end = dma->iova + dma->size; in vfio_unmap_unpin()
332 if (!dma->size) in vfio_unmap_unpin()
[all …]
/drivers/platform/goldfish/
Dgoldfish_pipe_v2.c198 struct goldfish_dma_context *dma; member
791 pipe->dma = NULL; in goldfish_pipe_open()
810 struct goldfish_dma_context *dma = pipe->dma; in goldfish_pipe_dma_release_host() local
813 if (!dma) in goldfish_pipe_dma_release_host()
818 if (dma->dma_vaddr) { in goldfish_pipe_dma_release_host()
820 dma->phys_begin; in goldfish_pipe_dma_release_host()
821 pipe->command_buffer->dma_maphost_params.sz = dma->dma_size; in goldfish_pipe_dma_release_host()
828 struct goldfish_dma_context *dma = pipe->dma; in goldfish_pipe_dma_release_guest() local
830 if (!dma) in goldfish_pipe_dma_release_guest()
833 if (dma->dma_vaddr) { in goldfish_pipe_dma_release_guest()
[all …]

12345678910>>...28