Lines Matching refs:sdd
197 static void s3c64xx_flush_fifo(struct s3c64xx_spi_driver_data *sdd) in s3c64xx_flush_fifo() argument
199 void __iomem *regs = sdd->regs; in s3c64xx_flush_fifo()
218 } while (TX_FIFO_LVL(val, sdd) && loops--); in s3c64xx_flush_fifo()
221 dev_warn(&sdd->pdev->dev, "Timed out flushing TX FIFO\n"); in s3c64xx_flush_fifo()
227 if (RX_FIFO_LVL(val, sdd)) in s3c64xx_flush_fifo()
234 dev_warn(&sdd->pdev->dev, "Timed out flushing RX FIFO\n"); in s3c64xx_flush_fifo()
247 struct s3c64xx_spi_driver_data *sdd; in s3c64xx_spi_dmacb() local
252 sdd = container_of(data, in s3c64xx_spi_dmacb()
255 sdd = container_of(data, in s3c64xx_spi_dmacb()
258 spin_lock_irqsave(&sdd->lock, flags); in s3c64xx_spi_dmacb()
261 sdd->state &= ~RXBUSY; in s3c64xx_spi_dmacb()
262 if (!(sdd->state & TXBUSY)) in s3c64xx_spi_dmacb()
263 complete(&sdd->xfer_completion); in s3c64xx_spi_dmacb()
265 sdd->state &= ~TXBUSY; in s3c64xx_spi_dmacb()
266 if (!(sdd->state & RXBUSY)) in s3c64xx_spi_dmacb()
267 complete(&sdd->xfer_completion); in s3c64xx_spi_dmacb()
270 spin_unlock_irqrestore(&sdd->lock, flags); in s3c64xx_spi_dmacb()
276 struct s3c64xx_spi_driver_data *sdd; in prepare_dma() local
284 sdd = container_of((void *)dma, in prepare_dma()
287 config.src_addr = sdd->sfr_start + S3C64XX_SPI_RX_DATA; in prepare_dma()
288 config.src_addr_width = sdd->cur_bpw / 8; in prepare_dma()
292 sdd = container_of((void *)dma, in prepare_dma()
295 config.dst_addr = sdd->sfr_start + S3C64XX_SPI_TX_DATA; in prepare_dma()
296 config.dst_addr_width = sdd->cur_bpw / 8; in prepare_dma()
304 dev_err(&sdd->pdev->dev, "unable to prepare %s scatterlist", in prepare_dma()
315 dev_err(&sdd->pdev->dev, "DMA submission failed"); in prepare_dma()
325 struct s3c64xx_spi_driver_data *sdd = in s3c64xx_spi_set_cs() local
328 if (sdd->cntrlr_info->no_cs) in s3c64xx_spi_set_cs()
332 if (!(sdd->port_conf->quirks & S3C64XX_SPI_QUIRK_CS_AUTO)) { in s3c64xx_spi_set_cs()
333 writel(0, sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_set_cs()
335 u32 ssel = readl(sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_set_cs()
339 writel(ssel, sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_set_cs()
342 if (!(sdd->port_conf->quirks & S3C64XX_SPI_QUIRK_CS_AUTO)) in s3c64xx_spi_set_cs()
344 sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_set_cs()
350 struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(spi); in s3c64xx_spi_prepare_transfer() local
352 if (is_polling(sdd)) in s3c64xx_spi_prepare_transfer()
355 spi->dma_rx = sdd->rx_dma.ch; in s3c64xx_spi_prepare_transfer()
356 spi->dma_tx = sdd->tx_dma.ch; in s3c64xx_spi_prepare_transfer()
365 struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(master); in s3c64xx_spi_can_dma() local
367 return xfer->len > (FIFO_LVL_MASK(sdd) >> 1) + 1; in s3c64xx_spi_can_dma()
370 static int s3c64xx_enable_datapath(struct s3c64xx_spi_driver_data *sdd, in s3c64xx_enable_datapath() argument
373 void __iomem *regs = sdd->regs; in s3c64xx_enable_datapath()
391 writel(((xfer->len * 8 / sdd->cur_bpw) & 0xffff) in s3c64xx_enable_datapath()
397 sdd->state |= TXBUSY; in s3c64xx_enable_datapath()
401 ret = prepare_dma(&sdd->tx_dma, &xfer->tx_sg); in s3c64xx_enable_datapath()
403 switch (sdd->cur_bpw) { in s3c64xx_enable_datapath()
421 sdd->state |= RXBUSY; in s3c64xx_enable_datapath()
423 if (sdd->port_conf->high_speed && sdd->cur_speed >= 30000000UL in s3c64xx_enable_datapath()
424 && !(sdd->cur_mode & SPI_CPHA)) in s3c64xx_enable_datapath()
430 writel(((xfer->len * 8 / sdd->cur_bpw) & 0xffff) in s3c64xx_enable_datapath()
433 ret = prepare_dma(&sdd->rx_dma, &xfer->rx_sg); in s3c64xx_enable_datapath()
446 static u32 s3c64xx_spi_wait_for_timeout(struct s3c64xx_spi_driver_data *sdd, in s3c64xx_spi_wait_for_timeout() argument
449 void __iomem *regs = sdd->regs; in s3c64xx_spi_wait_for_timeout()
454 u32 max_fifo = (FIFO_LVL_MASK(sdd) >> 1) + 1; in s3c64xx_spi_wait_for_timeout()
461 } while (RX_FIFO_LVL(status, sdd) < max_fifo && --val); in s3c64xx_spi_wait_for_timeout()
464 return RX_FIFO_LVL(status, sdd); in s3c64xx_spi_wait_for_timeout()
467 static int s3c64xx_wait_for_dma(struct s3c64xx_spi_driver_data *sdd, in s3c64xx_wait_for_dma() argument
470 void __iomem *regs = sdd->regs; in s3c64xx_wait_for_dma()
476 ms = xfer->len * 8 * 1000 / sdd->cur_speed; in s3c64xx_wait_for_dma()
481 val = wait_for_completion_timeout(&sdd->xfer_completion, val); in s3c64xx_wait_for_dma()
495 while ((TX_FIFO_LVL(status, sdd) in s3c64xx_wait_for_dma()
496 || !S3C64XX_SPI_ST_TX_DONE(status, sdd)) in s3c64xx_wait_for_dma()
511 static int s3c64xx_wait_for_pio(struct s3c64xx_spi_driver_data *sdd, in s3c64xx_wait_for_pio() argument
514 void __iomem *regs = sdd->regs; in s3c64xx_wait_for_pio()
523 ms = xfer->len * 8 * 1000 / sdd->cur_speed; in s3c64xx_wait_for_pio()
529 } while (RX_FIFO_LVL(status, sdd) < xfer->len && --val); in s3c64xx_wait_for_pio()
536 sdd->state &= ~TXBUSY; in s3c64xx_wait_for_pio()
548 loops = xfer->len / ((FIFO_LVL_MASK(sdd) >> 1) + 1); in s3c64xx_wait_for_pio()
552 cpy_len = s3c64xx_spi_wait_for_timeout(sdd, in s3c64xx_wait_for_pio()
555 switch (sdd->cur_bpw) { in s3c64xx_wait_for_pio()
572 sdd->state &= ~RXBUSY; in s3c64xx_wait_for_pio()
577 static int s3c64xx_spi_config(struct s3c64xx_spi_driver_data *sdd) in s3c64xx_spi_config() argument
579 void __iomem *regs = sdd->regs; in s3c64xx_spi_config()
584 if (!sdd->port_conf->clk_from_cmu) { in s3c64xx_spi_config()
596 if (sdd->cur_mode & SPI_CPOL) in s3c64xx_spi_config()
599 if (sdd->cur_mode & SPI_CPHA) in s3c64xx_spi_config()
609 switch (sdd->cur_bpw) { in s3c64xx_spi_config()
626 if (sdd->port_conf->clk_from_cmu) { in s3c64xx_spi_config()
628 ret = clk_set_rate(sdd->src_clk, sdd->cur_speed * 2); in s3c64xx_spi_config()
631 sdd->cur_speed = clk_get_rate(sdd->src_clk) / 2; in s3c64xx_spi_config()
636 val |= ((clk_get_rate(sdd->src_clk) / sdd->cur_speed / 2 - 1) in s3c64xx_spi_config()
654 struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(master); in s3c64xx_spi_prepare_message() local
659 writel(cs->fb_delay & 0x3, sdd->regs + S3C64XX_SPI_FB_CLK); in s3c64xx_spi_prepare_message()
675 struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(master); in s3c64xx_spi_transfer_one() local
676 const unsigned int fifo_len = (FIFO_LVL_MASK(sdd) >> 1) + 1; in s3c64xx_spi_transfer_one()
686 reinit_completion(&sdd->xfer_completion); in s3c64xx_spi_transfer_one()
692 if (bpw != sdd->cur_bpw || speed != sdd->cur_speed) { in s3c64xx_spi_transfer_one()
693 sdd->cur_bpw = bpw; in s3c64xx_spi_transfer_one()
694 sdd->cur_speed = speed; in s3c64xx_spi_transfer_one()
695 sdd->cur_mode = spi->mode; in s3c64xx_spi_transfer_one()
696 status = s3c64xx_spi_config(sdd); in s3c64xx_spi_transfer_one()
701 if (!is_polling(sdd) && (xfer->len > fifo_len) && in s3c64xx_spi_transfer_one()
702 sdd->rx_dma.ch && sdd->tx_dma.ch) { in s3c64xx_spi_transfer_one()
705 } else if (is_polling(sdd) && xfer->len > fifo_len) { in s3c64xx_spi_transfer_one()
716 spin_lock_irqsave(&sdd->lock, flags); in s3c64xx_spi_transfer_one()
719 sdd->state &= ~RXBUSY; in s3c64xx_spi_transfer_one()
720 sdd->state &= ~TXBUSY; in s3c64xx_spi_transfer_one()
725 status = s3c64xx_enable_datapath(sdd, xfer, use_dma); in s3c64xx_spi_transfer_one()
727 spin_unlock_irqrestore(&sdd->lock, flags); in s3c64xx_spi_transfer_one()
735 status = s3c64xx_wait_for_dma(sdd, xfer); in s3c64xx_spi_transfer_one()
737 status = s3c64xx_wait_for_pio(sdd, xfer); in s3c64xx_spi_transfer_one()
743 (sdd->state & RXBUSY) ? 'f' : 'p', in s3c64xx_spi_transfer_one()
744 (sdd->state & TXBUSY) ? 'f' : 'p', in s3c64xx_spi_transfer_one()
750 if (xfer->tx_buf && (sdd->state & TXBUSY)) { in s3c64xx_spi_transfer_one()
751 dmaengine_pause(sdd->tx_dma.ch); in s3c64xx_spi_transfer_one()
752 dmaengine_tx_status(sdd->tx_dma.ch, sdd->tx_dma.cookie, &s); in s3c64xx_spi_transfer_one()
753 dmaengine_terminate_all(sdd->tx_dma.ch); in s3c64xx_spi_transfer_one()
757 if (xfer->rx_buf && (sdd->state & RXBUSY)) { in s3c64xx_spi_transfer_one()
758 dmaengine_pause(sdd->rx_dma.ch); in s3c64xx_spi_transfer_one()
759 dmaengine_tx_status(sdd->rx_dma.ch, sdd->rx_dma.cookie, &s); in s3c64xx_spi_transfer_one()
760 dmaengine_terminate_all(sdd->rx_dma.ch); in s3c64xx_spi_transfer_one()
765 s3c64xx_flush_fifo(sdd); in s3c64xx_spi_transfer_one()
833 struct s3c64xx_spi_driver_data *sdd; in s3c64xx_spi_setup() local
836 sdd = spi_master_get_devdata(spi->master); in s3c64xx_spi_setup()
869 pm_runtime_get_sync(&sdd->pdev->dev); in s3c64xx_spi_setup()
872 if (!sdd->port_conf->clk_from_cmu) { in s3c64xx_spi_setup()
876 speed = clk_get_rate(sdd->src_clk) / 2 / (0 + 1); in s3c64xx_spi_setup()
881 psr = clk_get_rate(sdd->src_clk) / 2 / spi->max_speed_hz - 1; in s3c64xx_spi_setup()
886 speed = clk_get_rate(sdd->src_clk) / 2 / (psr + 1); in s3c64xx_spi_setup()
896 speed = clk_get_rate(sdd->src_clk) / 2 / (psr + 1); in s3c64xx_spi_setup()
907 pm_runtime_mark_last_busy(&sdd->pdev->dev); in s3c64xx_spi_setup()
908 pm_runtime_put_autosuspend(&sdd->pdev->dev); in s3c64xx_spi_setup()
914 pm_runtime_mark_last_busy(&sdd->pdev->dev); in s3c64xx_spi_setup()
915 pm_runtime_put_autosuspend(&sdd->pdev->dev); in s3c64xx_spi_setup()
953 struct s3c64xx_spi_driver_data *sdd = data; in s3c64xx_spi_irq() local
954 struct spi_master *spi = sdd->master; in s3c64xx_spi_irq()
957 val = readl(sdd->regs + S3C64XX_SPI_STATUS); in s3c64xx_spi_irq()
977 writel(clr, sdd->regs + S3C64XX_SPI_PENDING_CLR); in s3c64xx_spi_irq()
978 writel(0, sdd->regs + S3C64XX_SPI_PENDING_CLR); in s3c64xx_spi_irq()
983 static void s3c64xx_spi_hwinit(struct s3c64xx_spi_driver_data *sdd) in s3c64xx_spi_hwinit() argument
985 struct s3c64xx_spi_info *sci = sdd->cntrlr_info; in s3c64xx_spi_hwinit()
986 void __iomem *regs = sdd->regs; in s3c64xx_spi_hwinit()
989 sdd->cur_speed = 0; in s3c64xx_spi_hwinit()
992 writel(0, sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_hwinit()
993 else if (!(sdd->port_conf->quirks & S3C64XX_SPI_QUIRK_CS_AUTO)) in s3c64xx_spi_hwinit()
994 writel(S3C64XX_SPI_CS_SIG_INACT, sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_hwinit()
999 if (!sdd->port_conf->clk_from_cmu) in s3c64xx_spi_hwinit()
1021 s3c64xx_flush_fifo(sdd); in s3c64xx_spi_hwinit()
1078 struct s3c64xx_spi_driver_data *sdd; in s3c64xx_spi_probe() local
1116 sdd = spi_master_get_devdata(master); in s3c64xx_spi_probe()
1117 sdd->port_conf = s3c64xx_spi_get_port_config(pdev); in s3c64xx_spi_probe()
1118 sdd->master = master; in s3c64xx_spi_probe()
1119 sdd->cntrlr_info = sci; in s3c64xx_spi_probe()
1120 sdd->pdev = pdev; in s3c64xx_spi_probe()
1121 sdd->sfr_start = mem_res->start; in s3c64xx_spi_probe()
1129 sdd->port_id = ret; in s3c64xx_spi_probe()
1131 sdd->port_id = pdev->id; in s3c64xx_spi_probe()
1134 sdd->cur_bpw = 8; in s3c64xx_spi_probe()
1136 sdd->tx_dma.direction = DMA_MEM_TO_DEV; in s3c64xx_spi_probe()
1137 sdd->rx_dma.direction = DMA_DEV_TO_MEM; in s3c64xx_spi_probe()
1140 master->bus_num = sdd->port_id; in s3c64xx_spi_probe()
1154 if (!is_polling(sdd)) in s3c64xx_spi_probe()
1157 sdd->regs = devm_ioremap_resource(&pdev->dev, mem_res); in s3c64xx_spi_probe()
1158 if (IS_ERR(sdd->regs)) { in s3c64xx_spi_probe()
1159 ret = PTR_ERR(sdd->regs); in s3c64xx_spi_probe()
1170 sdd->clk = devm_clk_get(&pdev->dev, "spi"); in s3c64xx_spi_probe()
1171 if (IS_ERR(sdd->clk)) { in s3c64xx_spi_probe()
1173 ret = PTR_ERR(sdd->clk); in s3c64xx_spi_probe()
1177 ret = clk_prepare_enable(sdd->clk); in s3c64xx_spi_probe()
1184 sdd->src_clk = devm_clk_get(&pdev->dev, clk_name); in s3c64xx_spi_probe()
1185 if (IS_ERR(sdd->src_clk)) { in s3c64xx_spi_probe()
1188 ret = PTR_ERR(sdd->src_clk); in s3c64xx_spi_probe()
1192 ret = clk_prepare_enable(sdd->src_clk); in s3c64xx_spi_probe()
1198 if (sdd->port_conf->clk_ioclk) { in s3c64xx_spi_probe()
1199 sdd->ioclk = devm_clk_get(&pdev->dev, "spi_ioclk"); in s3c64xx_spi_probe()
1200 if (IS_ERR(sdd->ioclk)) { in s3c64xx_spi_probe()
1202 ret = PTR_ERR(sdd->ioclk); in s3c64xx_spi_probe()
1206 ret = clk_prepare_enable(sdd->ioclk); in s3c64xx_spi_probe()
1213 if (!is_polling(sdd)) { in s3c64xx_spi_probe()
1215 sdd->rx_dma.ch = dma_request_chan(&pdev->dev, "rx"); in s3c64xx_spi_probe()
1216 if (IS_ERR(sdd->rx_dma.ch)) { in s3c64xx_spi_probe()
1218 ret = PTR_ERR(sdd->rx_dma.ch); in s3c64xx_spi_probe()
1221 sdd->tx_dma.ch = dma_request_chan(&pdev->dev, "tx"); in s3c64xx_spi_probe()
1222 if (IS_ERR(sdd->tx_dma.ch)) { in s3c64xx_spi_probe()
1224 ret = PTR_ERR(sdd->tx_dma.ch); in s3c64xx_spi_probe()
1236 s3c64xx_spi_hwinit(sdd); in s3c64xx_spi_probe()
1238 spin_lock_init(&sdd->lock); in s3c64xx_spi_probe()
1239 init_completion(&sdd->xfer_completion); in s3c64xx_spi_probe()
1242 "spi-s3c64xx", sdd); in s3c64xx_spi_probe()
1251 sdd->regs + S3C64XX_SPI_INT_EN); in s3c64xx_spi_probe()
1260 sdd->port_id, master->num_chipselect); in s3c64xx_spi_probe()
1262 mem_res, (FIFO_LVL_MASK(sdd) >> 1) + 1); in s3c64xx_spi_probe()
1274 if (!is_polling(sdd)) in s3c64xx_spi_probe()
1275 dma_release_channel(sdd->tx_dma.ch); in s3c64xx_spi_probe()
1277 if (!is_polling(sdd)) in s3c64xx_spi_probe()
1278 dma_release_channel(sdd->rx_dma.ch); in s3c64xx_spi_probe()
1280 clk_disable_unprepare(sdd->ioclk); in s3c64xx_spi_probe()
1282 clk_disable_unprepare(sdd->src_clk); in s3c64xx_spi_probe()
1284 clk_disable_unprepare(sdd->clk); in s3c64xx_spi_probe()
1294 struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(master); in s3c64xx_spi_remove() local
1298 writel(0, sdd->regs + S3C64XX_SPI_INT_EN); in s3c64xx_spi_remove()
1300 if (!is_polling(sdd)) { in s3c64xx_spi_remove()
1301 dma_release_channel(sdd->rx_dma.ch); in s3c64xx_spi_remove()
1302 dma_release_channel(sdd->tx_dma.ch); in s3c64xx_spi_remove()
1305 clk_disable_unprepare(sdd->ioclk); in s3c64xx_spi_remove()
1307 clk_disable_unprepare(sdd->src_clk); in s3c64xx_spi_remove()
1309 clk_disable_unprepare(sdd->clk); in s3c64xx_spi_remove()
1322 struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(master); in s3c64xx_spi_suspend() local
1332 sdd->cur_speed = 0; /* Output Clock is stopped */ in s3c64xx_spi_suspend()
1340 struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(master); in s3c64xx_spi_resume() local
1341 struct s3c64xx_spi_info *sci = sdd->cntrlr_info; in s3c64xx_spi_resume()
1359 struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(master); in s3c64xx_spi_runtime_suspend() local
1361 clk_disable_unprepare(sdd->clk); in s3c64xx_spi_runtime_suspend()
1362 clk_disable_unprepare(sdd->src_clk); in s3c64xx_spi_runtime_suspend()
1363 clk_disable_unprepare(sdd->ioclk); in s3c64xx_spi_runtime_suspend()
1371 struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(master); in s3c64xx_spi_runtime_resume() local
1374 if (sdd->port_conf->clk_ioclk) { in s3c64xx_spi_runtime_resume()
1375 ret = clk_prepare_enable(sdd->ioclk); in s3c64xx_spi_runtime_resume()
1380 ret = clk_prepare_enable(sdd->src_clk); in s3c64xx_spi_runtime_resume()
1384 ret = clk_prepare_enable(sdd->clk); in s3c64xx_spi_runtime_resume()
1388 s3c64xx_spi_hwinit(sdd); in s3c64xx_spi_runtime_resume()
1392 sdd->regs + S3C64XX_SPI_INT_EN); in s3c64xx_spi_runtime_resume()
1397 clk_disable_unprepare(sdd->src_clk); in s3c64xx_spi_runtime_resume()
1399 clk_disable_unprepare(sdd->ioclk); in s3c64xx_spi_runtime_resume()