Lines Matching refs:sdd

195 static void s3c64xx_flush_fifo(struct s3c64xx_spi_driver_data *sdd)  in s3c64xx_flush_fifo()  argument
197 void __iomem *regs = sdd->regs; in s3c64xx_flush_fifo()
216 } while (TX_FIFO_LVL(val, sdd) && loops--); in s3c64xx_flush_fifo()
219 dev_warn(&sdd->pdev->dev, "Timed out flushing TX FIFO\n"); in s3c64xx_flush_fifo()
225 if (RX_FIFO_LVL(val, sdd)) in s3c64xx_flush_fifo()
232 dev_warn(&sdd->pdev->dev, "Timed out flushing RX FIFO\n"); in s3c64xx_flush_fifo()
245 struct s3c64xx_spi_driver_data *sdd; in s3c64xx_spi_dmacb() local
250 sdd = container_of(data, in s3c64xx_spi_dmacb()
253 sdd = container_of(data, in s3c64xx_spi_dmacb()
256 spin_lock_irqsave(&sdd->lock, flags); in s3c64xx_spi_dmacb()
259 sdd->state &= ~RXBUSY; in s3c64xx_spi_dmacb()
260 if (!(sdd->state & TXBUSY)) in s3c64xx_spi_dmacb()
261 complete(&sdd->xfer_completion); in s3c64xx_spi_dmacb()
263 sdd->state &= ~TXBUSY; in s3c64xx_spi_dmacb()
264 if (!(sdd->state & RXBUSY)) in s3c64xx_spi_dmacb()
265 complete(&sdd->xfer_completion); in s3c64xx_spi_dmacb()
268 spin_unlock_irqrestore(&sdd->lock, flags); in s3c64xx_spi_dmacb()
274 struct s3c64xx_spi_driver_data *sdd; in prepare_dma() local
282 sdd = container_of((void *)dma, in prepare_dma()
285 config.src_addr = sdd->sfr_start + S3C64XX_SPI_RX_DATA; in prepare_dma()
286 config.src_addr_width = sdd->cur_bpw / 8; in prepare_dma()
290 sdd = container_of((void *)dma, in prepare_dma()
293 config.dst_addr = sdd->sfr_start + S3C64XX_SPI_TX_DATA; in prepare_dma()
294 config.dst_addr_width = sdd->cur_bpw / 8; in prepare_dma()
302 dev_err(&sdd->pdev->dev, "unable to prepare %s scatterlist", in prepare_dma()
313 dev_err(&sdd->pdev->dev, "DMA submission failed"); in prepare_dma()
323 struct s3c64xx_spi_driver_data *sdd = in s3c64xx_spi_set_cs() local
326 if (sdd->cntrlr_info->no_cs) in s3c64xx_spi_set_cs()
330 if (!(sdd->port_conf->quirks & S3C64XX_SPI_QUIRK_CS_AUTO)) { in s3c64xx_spi_set_cs()
331 writel(0, sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_set_cs()
333 u32 ssel = readl(sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_set_cs()
337 writel(ssel, sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_set_cs()
340 if (!(sdd->port_conf->quirks & S3C64XX_SPI_QUIRK_CS_AUTO)) in s3c64xx_spi_set_cs()
342 sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_set_cs()
348 struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(spi); in s3c64xx_spi_prepare_transfer() local
350 if (is_polling(sdd)) in s3c64xx_spi_prepare_transfer()
353 spi->dma_rx = sdd->rx_dma.ch; in s3c64xx_spi_prepare_transfer()
354 spi->dma_tx = sdd->tx_dma.ch; in s3c64xx_spi_prepare_transfer()
363 struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(master); in s3c64xx_spi_can_dma() local
365 return xfer->len > (FIFO_LVL_MASK(sdd) >> 1) + 1; in s3c64xx_spi_can_dma()
368 static int s3c64xx_enable_datapath(struct s3c64xx_spi_driver_data *sdd, in s3c64xx_enable_datapath() argument
371 void __iomem *regs = sdd->regs; in s3c64xx_enable_datapath()
389 writel(((xfer->len * 8 / sdd->cur_bpw) & 0xffff) in s3c64xx_enable_datapath()
395 sdd->state |= TXBUSY; in s3c64xx_enable_datapath()
399 ret = prepare_dma(&sdd->tx_dma, &xfer->tx_sg); in s3c64xx_enable_datapath()
401 switch (sdd->cur_bpw) { in s3c64xx_enable_datapath()
419 sdd->state |= RXBUSY; in s3c64xx_enable_datapath()
421 if (sdd->port_conf->high_speed && sdd->cur_speed >= 30000000UL in s3c64xx_enable_datapath()
422 && !(sdd->cur_mode & SPI_CPHA)) in s3c64xx_enable_datapath()
428 writel(((xfer->len * 8 / sdd->cur_bpw) & 0xffff) in s3c64xx_enable_datapath()
431 ret = prepare_dma(&sdd->rx_dma, &xfer->rx_sg); in s3c64xx_enable_datapath()
444 static u32 s3c64xx_spi_wait_for_timeout(struct s3c64xx_spi_driver_data *sdd, in s3c64xx_spi_wait_for_timeout() argument
447 void __iomem *regs = sdd->regs; in s3c64xx_spi_wait_for_timeout()
452 u32 max_fifo = (FIFO_LVL_MASK(sdd) >> 1) + 1; in s3c64xx_spi_wait_for_timeout()
459 } while (RX_FIFO_LVL(status, sdd) < max_fifo && --val); in s3c64xx_spi_wait_for_timeout()
462 return RX_FIFO_LVL(status, sdd); in s3c64xx_spi_wait_for_timeout()
465 static int s3c64xx_wait_for_dma(struct s3c64xx_spi_driver_data *sdd, in s3c64xx_wait_for_dma() argument
468 void __iomem *regs = sdd->regs; in s3c64xx_wait_for_dma()
474 ms = xfer->len * 8 * 1000 / sdd->cur_speed; in s3c64xx_wait_for_dma()
479 val = wait_for_completion_timeout(&sdd->xfer_completion, val); in s3c64xx_wait_for_dma()
493 while ((TX_FIFO_LVL(status, sdd) in s3c64xx_wait_for_dma()
494 || !S3C64XX_SPI_ST_TX_DONE(status, sdd)) in s3c64xx_wait_for_dma()
509 static int s3c64xx_wait_for_pio(struct s3c64xx_spi_driver_data *sdd, in s3c64xx_wait_for_pio() argument
512 void __iomem *regs = sdd->regs; in s3c64xx_wait_for_pio()
521 ms = xfer->len * 8 * 1000 / sdd->cur_speed; in s3c64xx_wait_for_pio()
527 } while (RX_FIFO_LVL(status, sdd) < xfer->len && --val); in s3c64xx_wait_for_pio()
534 sdd->state &= ~TXBUSY; in s3c64xx_wait_for_pio()
546 loops = xfer->len / ((FIFO_LVL_MASK(sdd) >> 1) + 1); in s3c64xx_wait_for_pio()
550 cpy_len = s3c64xx_spi_wait_for_timeout(sdd, in s3c64xx_wait_for_pio()
553 switch (sdd->cur_bpw) { in s3c64xx_wait_for_pio()
570 sdd->state &= ~RXBUSY; in s3c64xx_wait_for_pio()
575 static int s3c64xx_spi_config(struct s3c64xx_spi_driver_data *sdd) in s3c64xx_spi_config() argument
577 void __iomem *regs = sdd->regs; in s3c64xx_spi_config()
582 if (!sdd->port_conf->clk_from_cmu) { in s3c64xx_spi_config()
594 if (sdd->cur_mode & SPI_CPOL) in s3c64xx_spi_config()
597 if (sdd->cur_mode & SPI_CPHA) in s3c64xx_spi_config()
607 switch (sdd->cur_bpw) { in s3c64xx_spi_config()
624 if (sdd->port_conf->clk_from_cmu) { in s3c64xx_spi_config()
626 ret = clk_set_rate(sdd->src_clk, sdd->cur_speed * 2); in s3c64xx_spi_config()
629 sdd->cur_speed = clk_get_rate(sdd->src_clk) / 2; in s3c64xx_spi_config()
634 val |= ((clk_get_rate(sdd->src_clk) / sdd->cur_speed / 2 - 1) in s3c64xx_spi_config()
652 struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(master); in s3c64xx_spi_prepare_message() local
659 writel(0, sdd->regs + S3C64XX_SPI_FB_CLK); in s3c64xx_spi_prepare_message()
661 writel(cs->fb_delay & 0x3, sdd->regs + S3C64XX_SPI_FB_CLK); in s3c64xx_spi_prepare_message()
670 struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(master); in s3c64xx_spi_transfer_one() local
671 const unsigned int fifo_len = (FIFO_LVL_MASK(sdd) >> 1) + 1; in s3c64xx_spi_transfer_one()
681 reinit_completion(&sdd->xfer_completion); in s3c64xx_spi_transfer_one()
687 if (bpw != sdd->cur_bpw || speed != sdd->cur_speed) { in s3c64xx_spi_transfer_one()
688 sdd->cur_bpw = bpw; in s3c64xx_spi_transfer_one()
689 sdd->cur_speed = speed; in s3c64xx_spi_transfer_one()
690 sdd->cur_mode = spi->mode; in s3c64xx_spi_transfer_one()
691 status = s3c64xx_spi_config(sdd); in s3c64xx_spi_transfer_one()
696 if (!is_polling(sdd) && (xfer->len > fifo_len) && in s3c64xx_spi_transfer_one()
697 sdd->rx_dma.ch && sdd->tx_dma.ch) { in s3c64xx_spi_transfer_one()
700 } else if (is_polling(sdd) && xfer->len > fifo_len) { in s3c64xx_spi_transfer_one()
711 spin_lock_irqsave(&sdd->lock, flags); in s3c64xx_spi_transfer_one()
714 sdd->state &= ~RXBUSY; in s3c64xx_spi_transfer_one()
715 sdd->state &= ~TXBUSY; in s3c64xx_spi_transfer_one()
720 status = s3c64xx_enable_datapath(sdd, xfer, use_dma); in s3c64xx_spi_transfer_one()
722 spin_unlock_irqrestore(&sdd->lock, flags); in s3c64xx_spi_transfer_one()
730 status = s3c64xx_wait_for_dma(sdd, xfer); in s3c64xx_spi_transfer_one()
732 status = s3c64xx_wait_for_pio(sdd, xfer); in s3c64xx_spi_transfer_one()
738 (sdd->state & RXBUSY) ? 'f' : 'p', in s3c64xx_spi_transfer_one()
739 (sdd->state & TXBUSY) ? 'f' : 'p', in s3c64xx_spi_transfer_one()
745 if (xfer->tx_buf && (sdd->state & TXBUSY)) { in s3c64xx_spi_transfer_one()
746 dmaengine_pause(sdd->tx_dma.ch); in s3c64xx_spi_transfer_one()
747 dmaengine_tx_status(sdd->tx_dma.ch, sdd->tx_dma.cookie, &s); in s3c64xx_spi_transfer_one()
748 dmaengine_terminate_all(sdd->tx_dma.ch); in s3c64xx_spi_transfer_one()
752 if (xfer->rx_buf && (sdd->state & RXBUSY)) { in s3c64xx_spi_transfer_one()
753 dmaengine_pause(sdd->rx_dma.ch); in s3c64xx_spi_transfer_one()
754 dmaengine_tx_status(sdd->rx_dma.ch, sdd->rx_dma.cookie, &s); in s3c64xx_spi_transfer_one()
755 dmaengine_terminate_all(sdd->rx_dma.ch); in s3c64xx_spi_transfer_one()
760 s3c64xx_flush_fifo(sdd); in s3c64xx_spi_transfer_one()
826 struct s3c64xx_spi_driver_data *sdd; in s3c64xx_spi_setup() local
829 sdd = spi_master_get_devdata(spi->master); in s3c64xx_spi_setup()
844 pm_runtime_get_sync(&sdd->pdev->dev); in s3c64xx_spi_setup()
847 if (!sdd->port_conf->clk_from_cmu) { in s3c64xx_spi_setup()
851 speed = clk_get_rate(sdd->src_clk) / 2 / (0 + 1); in s3c64xx_spi_setup()
856 psr = clk_get_rate(sdd->src_clk) / 2 / spi->max_speed_hz - 1; in s3c64xx_spi_setup()
861 speed = clk_get_rate(sdd->src_clk) / 2 / (psr + 1); in s3c64xx_spi_setup()
871 speed = clk_get_rate(sdd->src_clk) / 2 / (psr + 1); in s3c64xx_spi_setup()
882 pm_runtime_mark_last_busy(&sdd->pdev->dev); in s3c64xx_spi_setup()
883 pm_runtime_put_autosuspend(&sdd->pdev->dev); in s3c64xx_spi_setup()
889 pm_runtime_mark_last_busy(&sdd->pdev->dev); in s3c64xx_spi_setup()
890 pm_runtime_put_autosuspend(&sdd->pdev->dev); in s3c64xx_spi_setup()
916 struct s3c64xx_spi_driver_data *sdd = data; in s3c64xx_spi_irq() local
917 struct spi_master *spi = sdd->master; in s3c64xx_spi_irq()
920 val = readl(sdd->regs + S3C64XX_SPI_STATUS); in s3c64xx_spi_irq()
940 writel(clr, sdd->regs + S3C64XX_SPI_PENDING_CLR); in s3c64xx_spi_irq()
941 writel(0, sdd->regs + S3C64XX_SPI_PENDING_CLR); in s3c64xx_spi_irq()
946 static void s3c64xx_spi_hwinit(struct s3c64xx_spi_driver_data *sdd) in s3c64xx_spi_hwinit() argument
948 struct s3c64xx_spi_info *sci = sdd->cntrlr_info; in s3c64xx_spi_hwinit()
949 void __iomem *regs = sdd->regs; in s3c64xx_spi_hwinit()
952 sdd->cur_speed = 0; in s3c64xx_spi_hwinit()
955 writel(0, sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_hwinit()
956 else if (!(sdd->port_conf->quirks & S3C64XX_SPI_QUIRK_CS_AUTO)) in s3c64xx_spi_hwinit()
957 writel(S3C64XX_SPI_CS_SIG_INACT, sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_hwinit()
962 if (!sdd->port_conf->clk_from_cmu) in s3c64xx_spi_hwinit()
984 s3c64xx_flush_fifo(sdd); in s3c64xx_spi_hwinit()
1035 struct s3c64xx_spi_driver_data *sdd; in s3c64xx_spi_probe() local
1073 sdd = spi_master_get_devdata(master); in s3c64xx_spi_probe()
1074 sdd->port_conf = s3c64xx_spi_get_port_config(pdev); in s3c64xx_spi_probe()
1075 sdd->master = master; in s3c64xx_spi_probe()
1076 sdd->cntrlr_info = sci; in s3c64xx_spi_probe()
1077 sdd->pdev = pdev; in s3c64xx_spi_probe()
1078 sdd->sfr_start = mem_res->start; in s3c64xx_spi_probe()
1086 sdd->port_id = ret; in s3c64xx_spi_probe()
1088 sdd->port_id = pdev->id; in s3c64xx_spi_probe()
1091 sdd->cur_bpw = 8; in s3c64xx_spi_probe()
1093 sdd->tx_dma.direction = DMA_MEM_TO_DEV; in s3c64xx_spi_probe()
1094 sdd->rx_dma.direction = DMA_DEV_TO_MEM; in s3c64xx_spi_probe()
1097 master->bus_num = sdd->port_id; in s3c64xx_spi_probe()
1111 if (!is_polling(sdd)) in s3c64xx_spi_probe()
1114 sdd->regs = devm_ioremap_resource(&pdev->dev, mem_res); in s3c64xx_spi_probe()
1115 if (IS_ERR(sdd->regs)) { in s3c64xx_spi_probe()
1116 ret = PTR_ERR(sdd->regs); in s3c64xx_spi_probe()
1127 sdd->clk = devm_clk_get(&pdev->dev, "spi"); in s3c64xx_spi_probe()
1128 if (IS_ERR(sdd->clk)) { in s3c64xx_spi_probe()
1130 ret = PTR_ERR(sdd->clk); in s3c64xx_spi_probe()
1134 ret = clk_prepare_enable(sdd->clk); in s3c64xx_spi_probe()
1141 sdd->src_clk = devm_clk_get(&pdev->dev, clk_name); in s3c64xx_spi_probe()
1142 if (IS_ERR(sdd->src_clk)) { in s3c64xx_spi_probe()
1145 ret = PTR_ERR(sdd->src_clk); in s3c64xx_spi_probe()
1149 ret = clk_prepare_enable(sdd->src_clk); in s3c64xx_spi_probe()
1155 if (sdd->port_conf->clk_ioclk) { in s3c64xx_spi_probe()
1156 sdd->ioclk = devm_clk_get(&pdev->dev, "spi_ioclk"); in s3c64xx_spi_probe()
1157 if (IS_ERR(sdd->ioclk)) { in s3c64xx_spi_probe()
1159 ret = PTR_ERR(sdd->ioclk); in s3c64xx_spi_probe()
1163 ret = clk_prepare_enable(sdd->ioclk); in s3c64xx_spi_probe()
1170 if (!is_polling(sdd)) { in s3c64xx_spi_probe()
1172 sdd->rx_dma.ch = dma_request_chan(&pdev->dev, "rx"); in s3c64xx_spi_probe()
1173 if (IS_ERR(sdd->rx_dma.ch)) { in s3c64xx_spi_probe()
1175 ret = PTR_ERR(sdd->rx_dma.ch); in s3c64xx_spi_probe()
1178 sdd->tx_dma.ch = dma_request_chan(&pdev->dev, "tx"); in s3c64xx_spi_probe()
1179 if (IS_ERR(sdd->tx_dma.ch)) { in s3c64xx_spi_probe()
1181 ret = PTR_ERR(sdd->tx_dma.ch); in s3c64xx_spi_probe()
1193 s3c64xx_spi_hwinit(sdd); in s3c64xx_spi_probe()
1195 spin_lock_init(&sdd->lock); in s3c64xx_spi_probe()
1196 init_completion(&sdd->xfer_completion); in s3c64xx_spi_probe()
1199 "spi-s3c64xx", sdd); in s3c64xx_spi_probe()
1208 sdd->regs + S3C64XX_SPI_INT_EN); in s3c64xx_spi_probe()
1217 sdd->port_id, master->num_chipselect); in s3c64xx_spi_probe()
1219 mem_res, (FIFO_LVL_MASK(sdd) >> 1) + 1); in s3c64xx_spi_probe()
1231 if (!is_polling(sdd)) in s3c64xx_spi_probe()
1232 dma_release_channel(sdd->tx_dma.ch); in s3c64xx_spi_probe()
1234 if (!is_polling(sdd)) in s3c64xx_spi_probe()
1235 dma_release_channel(sdd->rx_dma.ch); in s3c64xx_spi_probe()
1237 clk_disable_unprepare(sdd->ioclk); in s3c64xx_spi_probe()
1239 clk_disable_unprepare(sdd->src_clk); in s3c64xx_spi_probe()
1241 clk_disable_unprepare(sdd->clk); in s3c64xx_spi_probe()
1251 struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(master); in s3c64xx_spi_remove() local
1255 writel(0, sdd->regs + S3C64XX_SPI_INT_EN); in s3c64xx_spi_remove()
1257 if (!is_polling(sdd)) { in s3c64xx_spi_remove()
1258 dma_release_channel(sdd->rx_dma.ch); in s3c64xx_spi_remove()
1259 dma_release_channel(sdd->tx_dma.ch); in s3c64xx_spi_remove()
1262 clk_disable_unprepare(sdd->ioclk); in s3c64xx_spi_remove()
1264 clk_disable_unprepare(sdd->src_clk); in s3c64xx_spi_remove()
1266 clk_disable_unprepare(sdd->clk); in s3c64xx_spi_remove()
1279 struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(master); in s3c64xx_spi_suspend() local
1289 sdd->cur_speed = 0; /* Output Clock is stopped */ in s3c64xx_spi_suspend()
1297 struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(master); in s3c64xx_spi_resume() local
1298 struct s3c64xx_spi_info *sci = sdd->cntrlr_info; in s3c64xx_spi_resume()
1316 struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(master); in s3c64xx_spi_runtime_suspend() local
1318 clk_disable_unprepare(sdd->clk); in s3c64xx_spi_runtime_suspend()
1319 clk_disable_unprepare(sdd->src_clk); in s3c64xx_spi_runtime_suspend()
1320 clk_disable_unprepare(sdd->ioclk); in s3c64xx_spi_runtime_suspend()
1328 struct s3c64xx_spi_driver_data *sdd = spi_master_get_devdata(master); in s3c64xx_spi_runtime_resume() local
1331 if (sdd->port_conf->clk_ioclk) { in s3c64xx_spi_runtime_resume()
1332 ret = clk_prepare_enable(sdd->ioclk); in s3c64xx_spi_runtime_resume()
1337 ret = clk_prepare_enable(sdd->src_clk); in s3c64xx_spi_runtime_resume()
1341 ret = clk_prepare_enable(sdd->clk); in s3c64xx_spi_runtime_resume()
1345 s3c64xx_spi_hwinit(sdd); in s3c64xx_spi_runtime_resume()
1349 sdd->regs + S3C64XX_SPI_INT_EN); in s3c64xx_spi_runtime_resume()
1354 clk_disable_unprepare(sdd->src_clk); in s3c64xx_spi_runtime_resume()
1356 clk_disable_unprepare(sdd->ioclk); in s3c64xx_spi_runtime_resume()