Lines Matching refs:dma
84 void msm_stop_dma(struct uart_port *port, struct msm_dma *dma) in msm_stop_dma() argument
90 mapped = dma->count; in msm_stop_dma()
91 dma->count = 0; in msm_stop_dma()
93 dmaengine_terminate_all(dma->chan); in msm_stop_dma()
103 val &= ~dma->enable_bit; in msm_stop_dma()
107 dma_unmap_single(dev, dma->phys, mapped, dma->dir); in msm_stop_dma()
112 struct msm_dma *dma; in msm_release_dma() local
114 dma = &msm_port->tx_dma; in msm_release_dma()
115 if (dma->chan) { in msm_release_dma()
116 msm_stop_dma(&msm_port->uart, dma); in msm_release_dma()
117 dma_release_channel(dma->chan); in msm_release_dma()
120 memset(dma, 0, sizeof(*dma)); in msm_release_dma()
122 dma = &msm_port->rx_dma; in msm_release_dma()
123 if (dma->chan) { in msm_release_dma()
124 msm_stop_dma(&msm_port->uart, dma); in msm_release_dma()
125 dma_release_channel(dma->chan); in msm_release_dma()
126 kfree(dma->virt); in msm_release_dma()
129 memset(dma, 0, sizeof(*dma)); in msm_release_dma()
136 struct msm_dma *dma; in msm_request_tx_dma() local
140 dma = &msm_port->tx_dma; in msm_request_tx_dma()
143 dma->chan = dma_request_slave_channel_reason(dev, "tx"); in msm_request_tx_dma()
144 if (IS_ERR(dma->chan)) in msm_request_tx_dma()
156 ret = dmaengine_slave_config(dma->chan, &conf); in msm_request_tx_dma()
160 dma->dir = DMA_TO_DEVICE; in msm_request_tx_dma()
163 dma->enable_bit = UARTDM_DMEN_TX_DM_ENABLE; in msm_request_tx_dma()
165 dma->enable_bit = UARTDM_DMEN_TX_BAM_ENABLE; in msm_request_tx_dma()
170 dma_release_channel(dma->chan); in msm_request_tx_dma()
172 memset(dma, 0, sizeof(*dma)); in msm_request_tx_dma()
179 struct msm_dma *dma; in msm_request_rx_dma() local
183 dma = &msm_port->rx_dma; in msm_request_rx_dma()
186 dma->chan = dma_request_slave_channel_reason(dev, "rx"); in msm_request_rx_dma()
187 if (IS_ERR(dma->chan)) in msm_request_rx_dma()
192 dma->virt = kzalloc(UARTDM_RX_SIZE, GFP_KERNEL); in msm_request_rx_dma()
193 if (!dma->virt) in msm_request_rx_dma()
203 ret = dmaengine_slave_config(dma->chan, &conf); in msm_request_rx_dma()
207 dma->dir = DMA_FROM_DEVICE; in msm_request_rx_dma()
210 dma->enable_bit = UARTDM_DMEN_RX_DM_ENABLE; in msm_request_rx_dma()
212 dma->enable_bit = UARTDM_DMEN_RX_BAM_ENABLE; in msm_request_rx_dma()
216 kfree(dma->virt); in msm_request_rx_dma()
218 dma_release_channel(dma->chan); in msm_request_rx_dma()
220 memset(dma, 0, sizeof(*dma)); in msm_request_rx_dma()
248 struct msm_dma *dma = &msm_port->tx_dma; in msm_start_tx() local
251 if (dma->count) in msm_start_tx()
270 struct msm_dma *dma = &msm_port->tx_dma; in msm_complete_tx_dma() local
280 if (!dma->count) in msm_complete_tx_dma()
283 status = dmaengine_tx_status(dma->chan, dma->cookie, &state); in msm_complete_tx_dma()
285 dma_unmap_single(port->dev, dma->phys, dma->count, dma->dir); in msm_complete_tx_dma()
288 val &= ~dma->enable_bit; in msm_complete_tx_dma()
296 count = dma->count - state.residue; in msm_complete_tx_dma()
298 dma->count = 0; in msm_complete_tx_dma()
319 struct msm_dma *dma = &msm_port->tx_dma; in msm_handle_tx_dma() local
326 dma->phys = dma_map_single(port->dev, cpu_addr, count, dma->dir); in msm_handle_tx_dma()
327 ret = dma_mapping_error(port->dev, dma->phys); in msm_handle_tx_dma()
331 dma->desc = dmaengine_prep_slave_single(dma->chan, dma->phys, in msm_handle_tx_dma()
335 if (!dma->desc) { in msm_handle_tx_dma()
340 dma->desc->callback = msm_complete_tx_dma; in msm_handle_tx_dma()
341 dma->desc->callback_param = msm_port; in msm_handle_tx_dma()
343 dma->cookie = dmaengine_submit(dma->desc); in msm_handle_tx_dma()
344 ret = dma_submit_error(dma->cookie); in msm_handle_tx_dma()
355 dma->count = count; in msm_handle_tx_dma()
358 val |= dma->enable_bit; in msm_handle_tx_dma()
368 dma_async_issue_pending(dma->chan); in msm_handle_tx_dma()
371 dma_unmap_single(port->dev, dma->phys, count, dma->dir); in msm_handle_tx_dma()
380 struct msm_dma *dma = &msm_port->rx_dma; in msm_complete_rx_dma() local
388 if (!dma->count) in msm_complete_rx_dma()
392 val &= ~dma->enable_bit; in msm_complete_rx_dma()
409 dma->count = 0; in msm_complete_rx_dma()
411 dma_unmap_single(port->dev, dma->phys, UARTDM_RX_SIZE, dma->dir); in msm_complete_rx_dma()
416 if (msm_port->break_detected && dma->virt[i] == 0) { in msm_complete_rx_dma()
428 sysrq = uart_handle_sysrq_char(port, dma->virt[i]); in msm_complete_rx_dma()
431 tty_insert_flip_char(tport, dma->virt[i], flag); in msm_complete_rx_dma()
444 struct msm_dma *dma = &msm_port->rx_dma; in msm_start_rx_dma() local
452 if (!dma->chan) in msm_start_rx_dma()
455 dma->phys = dma_map_single(uart->dev, dma->virt, in msm_start_rx_dma()
456 UARTDM_RX_SIZE, dma->dir); in msm_start_rx_dma()
457 ret = dma_mapping_error(uart->dev, dma->phys); in msm_start_rx_dma()
461 dma->desc = dmaengine_prep_slave_single(dma->chan, dma->phys, in msm_start_rx_dma()
464 if (!dma->desc) in msm_start_rx_dma()
467 dma->desc->callback = msm_complete_rx_dma; in msm_start_rx_dma()
468 dma->desc->callback_param = msm_port; in msm_start_rx_dma()
470 dma->cookie = dmaengine_submit(dma->desc); in msm_start_rx_dma()
471 ret = dma_submit_error(dma->cookie); in msm_start_rx_dma()
489 dma->count = UARTDM_RX_SIZE; in msm_start_rx_dma()
491 dma_async_issue_pending(dma->chan); in msm_start_rx_dma()
497 val |= dma->enable_bit; in msm_start_rx_dma()
509 dma_unmap_single(uart->dev, dma->phys, UARTDM_RX_SIZE, dma->dir); in msm_start_rx_dma()
515 struct msm_dma *dma = &msm_port->rx_dma; in msm_stop_rx() local
520 if (dma->chan) in msm_stop_rx()
521 msm_stop_dma(port, dma); in msm_stop_rx()
711 struct msm_dma *dma = &msm_port->tx_dma; in msm_handle_tx() local
754 if (!dma->chan || dma_count < dma_min) in msm_handle_tx()
774 struct msm_dma *dma = &msm_port->rx_dma; in msm_uart_irq() local
789 if (dma->count) { in msm_uart_irq()
798 dmaengine_terminate_all(dma->chan); in msm_uart_irq()
1080 struct msm_dma *dma = &msm_port->rx_dma; in msm_set_termios() local
1086 if (dma->chan) /* Terminate if any */ in msm_set_termios()
1087 msm_stop_dma(port, dma); in msm_set_termios()