Lines Matching +full:fsd +full:- +full:clk
1 // SPDX-License-Identifier: GPL-2.0+
10 #include <linux/clk.h>
11 #include <linux/dma-mapping.h>
19 #include <linux/platform_data/spi-s3c64xx.h>
26 /* Registers and bit-fields */
106 #define FIFO_LVL_MASK(i) ((i)->port_conf->fifo_lvl_mask[i->port_id])
108 (1 << (i)->port_conf->tx_st_done)) ? 1 : 0)
110 #define RX_FIFO_LVL(v, i) (((v) >> (i)->port_conf->rx_lvl_offset) & \
119 #define is_polling(x) (x->port_conf->quirks & S3C64XX_SPI_QUIRK_POLL)
131 * struct s3c64xx_spi_port_config - SPI Controller hardware info
132 * @fifo_lvl_mask: Bit-mask for {TX|RX}_FIFO_LVL bits in SPI_STATUS register.
161 * struct s3c64xx_spi_driver_data - Runtime info holder for SPI driver.
162 * @clk: Pointer to the spi clock.
183 struct clk *clk; member
184 struct clk *src_clk;
185 struct clk *ioclk;
203 void __iomem *regs = sdd->regs; in s3c64xx_flush_fifo()
222 } while (TX_FIFO_LVL(val, sdd) && loops--); in s3c64xx_flush_fifo()
225 dev_warn(&sdd->pdev->dev, "Timed out flushing TX FIFO\n"); in s3c64xx_flush_fifo()
235 } while (loops--); in s3c64xx_flush_fifo()
238 dev_warn(&sdd->pdev->dev, "Timed out flushing RX FIFO\n"); in s3c64xx_flush_fifo()
255 if (dma->direction == DMA_DEV_TO_MEM) in s3c64xx_spi_dmacb()
262 spin_lock_irqsave(&sdd->lock, flags); in s3c64xx_spi_dmacb()
264 if (dma->direction == DMA_DEV_TO_MEM) { in s3c64xx_spi_dmacb()
265 sdd->state &= ~RXBUSY; in s3c64xx_spi_dmacb()
266 if (!(sdd->state & TXBUSY)) in s3c64xx_spi_dmacb()
267 complete(&sdd->xfer_completion); in s3c64xx_spi_dmacb()
269 sdd->state &= ~TXBUSY; in s3c64xx_spi_dmacb()
270 if (!(sdd->state & RXBUSY)) in s3c64xx_spi_dmacb()
271 complete(&sdd->xfer_completion); in s3c64xx_spi_dmacb()
274 spin_unlock_irqrestore(&sdd->lock, flags); in s3c64xx_spi_dmacb()
287 if (dma->direction == DMA_DEV_TO_MEM) { in prepare_dma()
290 config.direction = dma->direction; in prepare_dma()
291 config.src_addr = sdd->sfr_start + S3C64XX_SPI_RX_DATA; in prepare_dma()
292 config.src_addr_width = sdd->cur_bpw / 8; in prepare_dma()
294 dmaengine_slave_config(dma->ch, &config); in prepare_dma()
298 config.direction = dma->direction; in prepare_dma()
299 config.dst_addr = sdd->sfr_start + S3C64XX_SPI_TX_DATA; in prepare_dma()
300 config.dst_addr_width = sdd->cur_bpw / 8; in prepare_dma()
302 dmaengine_slave_config(dma->ch, &config); in prepare_dma()
305 desc = dmaengine_prep_slave_sg(dma->ch, sgt->sgl, sgt->nents, in prepare_dma()
306 dma->direction, DMA_PREP_INTERRUPT); in prepare_dma()
308 dev_err(&sdd->pdev->dev, "unable to prepare %s scatterlist", in prepare_dma()
309 dma->direction == DMA_DEV_TO_MEM ? "rx" : "tx"); in prepare_dma()
310 return -ENOMEM; in prepare_dma()
313 desc->callback = s3c64xx_spi_dmacb; in prepare_dma()
314 desc->callback_param = dma; in prepare_dma()
316 dma->cookie = dmaengine_submit(desc); in prepare_dma()
317 ret = dma_submit_error(dma->cookie); in prepare_dma()
319 dev_err(&sdd->pdev->dev, "DMA submission failed"); in prepare_dma()
320 return -EIO; in prepare_dma()
323 dma_async_issue_pending(dma->ch); in prepare_dma()
330 spi_master_get_devdata(spi->master); in s3c64xx_spi_set_cs()
332 if (sdd->cntrlr_info->no_cs) in s3c64xx_spi_set_cs()
336 if (!(sdd->port_conf->quirks & S3C64XX_SPI_QUIRK_CS_AUTO)) { in s3c64xx_spi_set_cs()
337 writel(0, sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_set_cs()
339 u32 ssel = readl(sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_set_cs()
343 writel(ssel, sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_set_cs()
346 if (!(sdd->port_conf->quirks & S3C64XX_SPI_QUIRK_CS_AUTO)) in s3c64xx_spi_set_cs()
348 sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_set_cs()
360 sdd->rx_dma.ch = dma_request_chan(&sdd->pdev->dev, "rx"); in s3c64xx_spi_prepare_transfer()
361 if (IS_ERR(sdd->rx_dma.ch)) { in s3c64xx_spi_prepare_transfer()
362 dev_err(&sdd->pdev->dev, "Failed to get RX DMA channel\n"); in s3c64xx_spi_prepare_transfer()
363 sdd->rx_dma.ch = NULL; in s3c64xx_spi_prepare_transfer()
367 sdd->tx_dma.ch = dma_request_chan(&sdd->pdev->dev, "tx"); in s3c64xx_spi_prepare_transfer()
368 if (IS_ERR(sdd->tx_dma.ch)) { in s3c64xx_spi_prepare_transfer()
369 dev_err(&sdd->pdev->dev, "Failed to get TX DMA channel\n"); in s3c64xx_spi_prepare_transfer()
370 dma_release_channel(sdd->rx_dma.ch); in s3c64xx_spi_prepare_transfer()
371 sdd->tx_dma.ch = NULL; in s3c64xx_spi_prepare_transfer()
372 sdd->rx_dma.ch = NULL; in s3c64xx_spi_prepare_transfer()
376 spi->dma_rx = sdd->rx_dma.ch; in s3c64xx_spi_prepare_transfer()
377 spi->dma_tx = sdd->tx_dma.ch; in s3c64xx_spi_prepare_transfer()
390 if (sdd->rx_dma.ch && sdd->tx_dma.ch) { in s3c64xx_spi_unprepare_transfer()
391 dma_release_channel(sdd->rx_dma.ch); in s3c64xx_spi_unprepare_transfer()
392 dma_release_channel(sdd->tx_dma.ch); in s3c64xx_spi_unprepare_transfer()
393 sdd->rx_dma.ch = NULL; in s3c64xx_spi_unprepare_transfer()
394 sdd->tx_dma.ch = NULL; in s3c64xx_spi_unprepare_transfer()
406 if (sdd->rx_dma.ch && sdd->tx_dma.ch) { in s3c64xx_spi_can_dma()
407 return xfer->len > (FIFO_LVL_MASK(sdd) >> 1) + 1; in s3c64xx_spi_can_dma()
417 void __iomem *regs = sdd->regs; in s3c64xx_enable_datapath()
435 writel(((xfer->len * 8 / sdd->cur_bpw) & 0xffff) in s3c64xx_enable_datapath()
440 if (xfer->tx_buf != NULL) { in s3c64xx_enable_datapath()
441 sdd->state |= TXBUSY; in s3c64xx_enable_datapath()
445 ret = prepare_dma(&sdd->tx_dma, &xfer->tx_sg); in s3c64xx_enable_datapath()
447 switch (sdd->cur_bpw) { in s3c64xx_enable_datapath()
450 xfer->tx_buf, xfer->len / 4); in s3c64xx_enable_datapath()
454 xfer->tx_buf, xfer->len / 2); in s3c64xx_enable_datapath()
458 xfer->tx_buf, xfer->len); in s3c64xx_enable_datapath()
464 if (xfer->rx_buf != NULL) { in s3c64xx_enable_datapath()
465 sdd->state |= RXBUSY; in s3c64xx_enable_datapath()
467 if (sdd->port_conf->high_speed && sdd->cur_speed >= 30000000UL in s3c64xx_enable_datapath()
468 && !(sdd->cur_mode & SPI_CPHA)) in s3c64xx_enable_datapath()
474 writel(((xfer->len * 8 / sdd->cur_bpw) & 0xffff) in s3c64xx_enable_datapath()
477 ret = prepare_dma(&sdd->rx_dma, &xfer->rx_sg); in s3c64xx_enable_datapath()
493 void __iomem *regs = sdd->regs; in s3c64xx_spi_wait_for_timeout()
505 } while (RX_FIFO_LVL(status, sdd) < max_fifo && --val); in s3c64xx_spi_wait_for_timeout()
514 void __iomem *regs = sdd->regs; in s3c64xx_wait_for_dma()
520 ms = xfer->len * 8 * 1000 / sdd->cur_speed; in s3c64xx_wait_for_dma()
525 val = wait_for_completion_timeout(&sdd->xfer_completion, val); in s3c64xx_wait_for_dma()
529 * proceed further else return -EIO. in s3c64xx_wait_for_dma()
536 if (val && !xfer->rx_buf) { in s3c64xx_wait_for_dma()
541 && --val) { in s3c64xx_wait_for_dma()
550 return -EIO; in s3c64xx_wait_for_dma()
558 void __iomem *regs = sdd->regs; in s3c64xx_wait_for_pio()
567 ms = xfer->len * 8 * 1000 / sdd->cur_speed; in s3c64xx_wait_for_pio()
573 } while (RX_FIFO_LVL(status, sdd) < xfer->len && --val); in s3c64xx_wait_for_pio()
576 return -EIO; in s3c64xx_wait_for_pio()
579 if (!xfer->rx_buf) { in s3c64xx_wait_for_pio()
580 sdd->state &= ~TXBUSY; in s3c64xx_wait_for_pio()
592 loops = xfer->len / ((FIFO_LVL_MASK(sdd) >> 1) + 1); in s3c64xx_wait_for_pio()
593 buf = xfer->rx_buf; in s3c64xx_wait_for_pio()
599 switch (sdd->cur_bpw) { in s3c64xx_wait_for_pio()
615 } while (loops--); in s3c64xx_wait_for_pio()
616 sdd->state &= ~RXBUSY; in s3c64xx_wait_for_pio()
623 void __iomem *regs = sdd->regs; in s3c64xx_spi_config()
626 int div = sdd->port_conf->clk_div; in s3c64xx_spi_config()
629 if (!sdd->port_conf->clk_from_cmu) { in s3c64xx_spi_config()
641 if (sdd->cur_mode & SPI_CPOL) in s3c64xx_spi_config()
644 if (sdd->cur_mode & SPI_CPHA) in s3c64xx_spi_config()
654 switch (sdd->cur_bpw) { in s3c64xx_spi_config()
669 if ((sdd->cur_mode & SPI_LOOP) && sdd->port_conf->has_loopback) in s3c64xx_spi_config()
674 if (sdd->port_conf->clk_from_cmu) { in s3c64xx_spi_config()
675 ret = clk_set_rate(sdd->src_clk, sdd->cur_speed * div); in s3c64xx_spi_config()
678 sdd->cur_speed = clk_get_rate(sdd->src_clk) / div; in s3c64xx_spi_config()
683 val |= ((clk_get_rate(sdd->src_clk) / sdd->cur_speed / div - 1) in s3c64xx_spi_config()
702 struct spi_device *spi = msg->spi; in s3c64xx_spi_prepare_message()
703 struct s3c64xx_spi_csinfo *cs = spi->controller_data; in s3c64xx_spi_prepare_message()
708 writel(0, sdd->regs + S3C64XX_SPI_FB_CLK); in s3c64xx_spi_prepare_message()
710 writel(cs->fb_delay & 0x3, sdd->regs + S3C64XX_SPI_FB_CLK); in s3c64xx_spi_prepare_message()
717 struct spi_controller *ctlr = spi->controller; in s3c64xx_spi_max_transfer_size()
719 return ctlr->can_dma ? S3C64XX_SPI_PACKET_CNT_MASK : SIZE_MAX; in s3c64xx_spi_max_transfer_size()
737 reinit_completion(&sdd->xfer_completion); in s3c64xx_spi_transfer_one()
740 bpw = xfer->bits_per_word; in s3c64xx_spi_transfer_one()
741 speed = xfer->speed_hz; in s3c64xx_spi_transfer_one()
743 if (bpw != sdd->cur_bpw || speed != sdd->cur_speed) { in s3c64xx_spi_transfer_one()
744 sdd->cur_bpw = bpw; in s3c64xx_spi_transfer_one()
745 sdd->cur_speed = speed; in s3c64xx_spi_transfer_one()
746 sdd->cur_mode = spi->mode; in s3c64xx_spi_transfer_one()
752 if (!is_polling(sdd) && (xfer->len > fifo_len) && in s3c64xx_spi_transfer_one()
753 sdd->rx_dma.ch && sdd->tx_dma.ch) { in s3c64xx_spi_transfer_one()
756 } else if (xfer->len > fifo_len) { in s3c64xx_spi_transfer_one()
757 tx_buf = xfer->tx_buf; in s3c64xx_spi_transfer_one()
758 rx_buf = xfer->rx_buf; in s3c64xx_spi_transfer_one()
759 origin_len = xfer->len; in s3c64xx_spi_transfer_one()
761 target_len = xfer->len; in s3c64xx_spi_transfer_one()
762 if (xfer->len > fifo_len) in s3c64xx_spi_transfer_one()
763 xfer->len = fifo_len; in s3c64xx_spi_transfer_one()
767 spin_lock_irqsave(&sdd->lock, flags); in s3c64xx_spi_transfer_one()
770 sdd->state &= ~RXBUSY; in s3c64xx_spi_transfer_one()
771 sdd->state &= ~TXBUSY; in s3c64xx_spi_transfer_one()
778 spin_unlock_irqrestore(&sdd->lock, flags); in s3c64xx_spi_transfer_one()
781 dev_err(&spi->dev, "failed to enable data path for transfer: %d\n", status); in s3c64xx_spi_transfer_one()
791 dev_err(&spi->dev, in s3c64xx_spi_transfer_one()
792 "I/O Error: rx-%d tx-%d rx-%c tx-%c len-%d dma-%d res-(%d)\n", in s3c64xx_spi_transfer_one()
793 xfer->rx_buf ? 1 : 0, xfer->tx_buf ? 1 : 0, in s3c64xx_spi_transfer_one()
794 (sdd->state & RXBUSY) ? 'f' : 'p', in s3c64xx_spi_transfer_one()
795 (sdd->state & TXBUSY) ? 'f' : 'p', in s3c64xx_spi_transfer_one()
796 xfer->len, use_dma ? 1 : 0, status); in s3c64xx_spi_transfer_one()
801 if (xfer->tx_buf && (sdd->state & TXBUSY)) { in s3c64xx_spi_transfer_one()
802 dmaengine_pause(sdd->tx_dma.ch); in s3c64xx_spi_transfer_one()
803 dmaengine_tx_status(sdd->tx_dma.ch, sdd->tx_dma.cookie, &s); in s3c64xx_spi_transfer_one()
804 dmaengine_terminate_all(sdd->tx_dma.ch); in s3c64xx_spi_transfer_one()
805 dev_err(&spi->dev, "TX residue: %d\n", s.residue); in s3c64xx_spi_transfer_one()
808 if (xfer->rx_buf && (sdd->state & RXBUSY)) { in s3c64xx_spi_transfer_one()
809 dmaengine_pause(sdd->rx_dma.ch); in s3c64xx_spi_transfer_one()
810 dmaengine_tx_status(sdd->rx_dma.ch, sdd->rx_dma.cookie, &s); in s3c64xx_spi_transfer_one()
811 dmaengine_terminate_all(sdd->rx_dma.ch); in s3c64xx_spi_transfer_one()
812 dev_err(&spi->dev, "RX residue: %d\n", s.residue); in s3c64xx_spi_transfer_one()
819 target_len -= xfer->len; in s3c64xx_spi_transfer_one()
821 if (xfer->tx_buf) in s3c64xx_spi_transfer_one()
822 xfer->tx_buf += xfer->len; in s3c64xx_spi_transfer_one()
824 if (xfer->rx_buf) in s3c64xx_spi_transfer_one()
825 xfer->rx_buf += xfer->len; in s3c64xx_spi_transfer_one()
828 xfer->len = fifo_len; in s3c64xx_spi_transfer_one()
830 xfer->len = target_len; in s3c64xx_spi_transfer_one()
836 xfer->tx_buf = tx_buf; in s3c64xx_spi_transfer_one()
837 xfer->rx_buf = rx_buf; in s3c64xx_spi_transfer_one()
838 xfer->len = origin_len; in s3c64xx_spi_transfer_one()
851 slave_np = spi->dev.of_node; in s3c64xx_get_slave_ctrldata()
853 dev_err(&spi->dev, "device node not found\n"); in s3c64xx_get_slave_ctrldata()
854 return ERR_PTR(-EINVAL); in s3c64xx_get_slave_ctrldata()
859 return ERR_PTR(-ENOMEM); in s3c64xx_get_slave_ctrldata()
861 data_np = of_get_child_by_name(slave_np, "controller-data"); in s3c64xx_get_slave_ctrldata()
863 dev_info(&spi->dev, "feedback delay set to default (0)\n"); in s3c64xx_get_slave_ctrldata()
867 of_property_read_u32(data_np, "samsung,spi-feedback-delay", &fb_delay); in s3c64xx_get_slave_ctrldata()
868 cs->fb_delay = fb_delay; in s3c64xx_get_slave_ctrldata()
875 * and save the configuration in a local data-structure.
881 struct s3c64xx_spi_csinfo *cs = spi->controller_data; in s3c64xx_spi_setup()
886 sdd = spi_master_get_devdata(spi->master); in s3c64xx_spi_setup()
887 if (spi->dev.of_node) { in s3c64xx_spi_setup()
889 spi->controller_data = cs; in s3c64xx_spi_setup()
894 dev_err(&spi->dev, "No CS for SPI(%d)\n", spi->chip_select); in s3c64xx_spi_setup()
895 return -ENODEV; in s3c64xx_spi_setup()
901 pm_runtime_get_sync(&sdd->pdev->dev); in s3c64xx_spi_setup()
903 div = sdd->port_conf->clk_div; in s3c64xx_spi_setup()
906 if (!sdd->port_conf->clk_from_cmu) { in s3c64xx_spi_setup()
910 speed = clk_get_rate(sdd->src_clk) / div / (0 + 1); in s3c64xx_spi_setup()
912 if (spi->max_speed_hz > speed) in s3c64xx_spi_setup()
913 spi->max_speed_hz = speed; in s3c64xx_spi_setup()
915 psr = clk_get_rate(sdd->src_clk) / div / spi->max_speed_hz - 1; in s3c64xx_spi_setup()
918 psr--; in s3c64xx_spi_setup()
920 speed = clk_get_rate(sdd->src_clk) / div / (psr + 1); in s3c64xx_spi_setup()
921 if (spi->max_speed_hz < speed) { in s3c64xx_spi_setup()
925 err = -EINVAL; in s3c64xx_spi_setup()
930 speed = clk_get_rate(sdd->src_clk) / div / (psr + 1); in s3c64xx_spi_setup()
931 if (spi->max_speed_hz >= speed) { in s3c64xx_spi_setup()
932 spi->max_speed_hz = speed; in s3c64xx_spi_setup()
934 dev_err(&spi->dev, "Can't set %dHz transfer speed\n", in s3c64xx_spi_setup()
935 spi->max_speed_hz); in s3c64xx_spi_setup()
936 err = -EINVAL; in s3c64xx_spi_setup()
941 pm_runtime_mark_last_busy(&sdd->pdev->dev); in s3c64xx_spi_setup()
942 pm_runtime_put_autosuspend(&sdd->pdev->dev); in s3c64xx_spi_setup()
948 pm_runtime_mark_last_busy(&sdd->pdev->dev); in s3c64xx_spi_setup()
949 pm_runtime_put_autosuspend(&sdd->pdev->dev); in s3c64xx_spi_setup()
950 /* setup() returns with device de-selected */ in s3c64xx_spi_setup()
956 if (spi->dev.of_node) in s3c64xx_spi_setup()
967 if (spi->dev.of_node) in s3c64xx_spi_cleanup()
976 struct spi_master *spi = sdd->master; in s3c64xx_spi_irq()
979 val = readl(sdd->regs + S3C64XX_SPI_STATUS); in s3c64xx_spi_irq()
983 dev_err(&spi->dev, "RX overrun\n"); in s3c64xx_spi_irq()
987 dev_err(&spi->dev, "RX underrun\n"); in s3c64xx_spi_irq()
991 dev_err(&spi->dev, "TX overrun\n"); in s3c64xx_spi_irq()
995 dev_err(&spi->dev, "TX underrun\n"); in s3c64xx_spi_irq()
999 writel(clr, sdd->regs + S3C64XX_SPI_PENDING_CLR); in s3c64xx_spi_irq()
1000 writel(0, sdd->regs + S3C64XX_SPI_PENDING_CLR); in s3c64xx_spi_irq()
1007 struct s3c64xx_spi_info *sci = sdd->cntrlr_info; in s3c64xx_spi_hwinit()
1008 void __iomem *regs = sdd->regs; in s3c64xx_spi_hwinit()
1011 sdd->cur_speed = 0; in s3c64xx_spi_hwinit()
1013 if (sci->no_cs) in s3c64xx_spi_hwinit()
1014 writel(0, sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_hwinit()
1015 else if (!(sdd->port_conf->quirks & S3C64XX_SPI_QUIRK_CS_AUTO)) in s3c64xx_spi_hwinit()
1016 writel(S3C64XX_SPI_CS_SIG_INACT, sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_hwinit()
1018 /* Disable Interrupts - we use Polling if not DMA mode */ in s3c64xx_spi_hwinit()
1021 if (!sdd->port_conf->clk_from_cmu) in s3c64xx_spi_hwinit()
1022 writel(sci->src_clk_nr << S3C64XX_SPI_CLKSEL_SRCSHFT, in s3c64xx_spi_hwinit()
1054 return ERR_PTR(-ENOMEM); in s3c64xx_spi_parse_dt()
1056 if (of_property_read_u32(dev->of_node, "samsung,spi-src-clk", &temp)) { in s3c64xx_spi_parse_dt()
1058 sci->src_clk_nr = 0; in s3c64xx_spi_parse_dt()
1060 sci->src_clk_nr = temp; in s3c64xx_spi_parse_dt()
1063 if (of_property_read_u32(dev->of_node, "num-cs", &temp)) { in s3c64xx_spi_parse_dt()
1065 sci->num_cs = 1; in s3c64xx_spi_parse_dt()
1067 sci->num_cs = temp; in s3c64xx_spi_parse_dt()
1070 sci->no_cs = of_property_read_bool(dev->of_node, "no-cs-readback"); in s3c64xx_spi_parse_dt()
1085 if (pdev->dev.of_node) in s3c64xx_spi_get_port_config()
1086 return of_device_get_match_data(&pdev->dev); in s3c64xx_spi_get_port_config()
1088 return (const struct s3c64xx_spi_port_config *)platform_get_device_id(pdev)->driver_data; in s3c64xx_spi_get_port_config()
1095 struct s3c64xx_spi_info *sci = dev_get_platdata(&pdev->dev); in s3c64xx_spi_probe()
1100 if (!sci && pdev->dev.of_node) { in s3c64xx_spi_probe()
1101 sci = s3c64xx_spi_parse_dt(&pdev->dev); in s3c64xx_spi_probe()
1107 dev_err(&pdev->dev, "platform_data missing!\n"); in s3c64xx_spi_probe()
1108 return -ENODEV; in s3c64xx_spi_probe()
1113 dev_err(&pdev->dev, "Unable to get SPI MEM resource\n"); in s3c64xx_spi_probe()
1114 return -ENXIO; in s3c64xx_spi_probe()
1119 dev_warn(&pdev->dev, "Failed to get IRQ: %d\n", irq); in s3c64xx_spi_probe()
1123 master = spi_alloc_master(&pdev->dev, in s3c64xx_spi_probe()
1126 dev_err(&pdev->dev, "Unable to allocate SPI Master\n"); in s3c64xx_spi_probe()
1127 return -ENOMEM; in s3c64xx_spi_probe()
1133 sdd->port_conf = s3c64xx_spi_get_port_config(pdev); in s3c64xx_spi_probe()
1134 sdd->master = master; in s3c64xx_spi_probe()
1135 sdd->cntrlr_info = sci; in s3c64xx_spi_probe()
1136 sdd->pdev = pdev; in s3c64xx_spi_probe()
1137 sdd->sfr_start = mem_res->start; in s3c64xx_spi_probe()
1138 if (pdev->dev.of_node) { in s3c64xx_spi_probe()
1139 ret = of_alias_get_id(pdev->dev.of_node, "spi"); in s3c64xx_spi_probe()
1141 dev_err(&pdev->dev, "failed to get alias id, errno %d\n", in s3c64xx_spi_probe()
1145 sdd->port_id = ret; in s3c64xx_spi_probe()
1147 sdd->port_id = pdev->id; in s3c64xx_spi_probe()
1150 sdd->cur_bpw = 8; in s3c64xx_spi_probe()
1152 sdd->tx_dma.direction = DMA_MEM_TO_DEV; in s3c64xx_spi_probe()
1153 sdd->rx_dma.direction = DMA_DEV_TO_MEM; in s3c64xx_spi_probe()
1155 master->dev.of_node = pdev->dev.of_node; in s3c64xx_spi_probe()
1156 master->bus_num = sdd->port_id; in s3c64xx_spi_probe()
1157 master->setup = s3c64xx_spi_setup; in s3c64xx_spi_probe()
1158 master->cleanup = s3c64xx_spi_cleanup; in s3c64xx_spi_probe()
1159 master->prepare_transfer_hardware = s3c64xx_spi_prepare_transfer; in s3c64xx_spi_probe()
1160 master->unprepare_transfer_hardware = s3c64xx_spi_unprepare_transfer; in s3c64xx_spi_probe()
1161 master->prepare_message = s3c64xx_spi_prepare_message; in s3c64xx_spi_probe()
1162 master->transfer_one = s3c64xx_spi_transfer_one; in s3c64xx_spi_probe()
1163 master->max_transfer_size = s3c64xx_spi_max_transfer_size; in s3c64xx_spi_probe()
1164 master->num_chipselect = sci->num_cs; in s3c64xx_spi_probe()
1165 master->use_gpio_descriptors = true; in s3c64xx_spi_probe()
1166 master->dma_alignment = 8; in s3c64xx_spi_probe()
1167 master->bits_per_word_mask = SPI_BPW_MASK(32) | SPI_BPW_MASK(16) | in s3c64xx_spi_probe()
1169 /* the spi->mode bits understood by this driver: */ in s3c64xx_spi_probe()
1170 master->mode_bits = SPI_CPOL | SPI_CPHA | SPI_CS_HIGH; in s3c64xx_spi_probe()
1171 if (sdd->port_conf->has_loopback) in s3c64xx_spi_probe()
1172 master->mode_bits |= SPI_LOOP; in s3c64xx_spi_probe()
1173 master->auto_runtime_pm = true; in s3c64xx_spi_probe()
1175 master->can_dma = s3c64xx_spi_can_dma; in s3c64xx_spi_probe()
1177 sdd->regs = devm_ioremap_resource(&pdev->dev, mem_res); in s3c64xx_spi_probe()
1178 if (IS_ERR(sdd->regs)) { in s3c64xx_spi_probe()
1179 ret = PTR_ERR(sdd->regs); in s3c64xx_spi_probe()
1183 if (sci->cfg_gpio && sci->cfg_gpio()) { in s3c64xx_spi_probe()
1184 dev_err(&pdev->dev, "Unable to config gpio\n"); in s3c64xx_spi_probe()
1185 ret = -EBUSY; in s3c64xx_spi_probe()
1190 sdd->clk = devm_clk_get(&pdev->dev, "spi"); in s3c64xx_spi_probe()
1191 if (IS_ERR(sdd->clk)) { in s3c64xx_spi_probe()
1192 dev_err(&pdev->dev, "Unable to acquire clock 'spi'\n"); in s3c64xx_spi_probe()
1193 ret = PTR_ERR(sdd->clk); in s3c64xx_spi_probe()
1197 ret = clk_prepare_enable(sdd->clk); in s3c64xx_spi_probe()
1199 dev_err(&pdev->dev, "Couldn't enable clock 'spi'\n"); in s3c64xx_spi_probe()
1203 sprintf(clk_name, "spi_busclk%d", sci->src_clk_nr); in s3c64xx_spi_probe()
1204 sdd->src_clk = devm_clk_get(&pdev->dev, clk_name); in s3c64xx_spi_probe()
1205 if (IS_ERR(sdd->src_clk)) { in s3c64xx_spi_probe()
1206 dev_err(&pdev->dev, in s3c64xx_spi_probe()
1208 ret = PTR_ERR(sdd->src_clk); in s3c64xx_spi_probe()
1212 ret = clk_prepare_enable(sdd->src_clk); in s3c64xx_spi_probe()
1214 dev_err(&pdev->dev, "Couldn't enable clock '%s'\n", clk_name); in s3c64xx_spi_probe()
1218 if (sdd->port_conf->clk_ioclk) { in s3c64xx_spi_probe()
1219 sdd->ioclk = devm_clk_get(&pdev->dev, "spi_ioclk"); in s3c64xx_spi_probe()
1220 if (IS_ERR(sdd->ioclk)) { in s3c64xx_spi_probe()
1221 dev_err(&pdev->dev, "Unable to acquire 'ioclk'\n"); in s3c64xx_spi_probe()
1222 ret = PTR_ERR(sdd->ioclk); in s3c64xx_spi_probe()
1226 ret = clk_prepare_enable(sdd->ioclk); in s3c64xx_spi_probe()
1228 dev_err(&pdev->dev, "Couldn't enable clock 'ioclk'\n"); in s3c64xx_spi_probe()
1233 pm_runtime_set_autosuspend_delay(&pdev->dev, AUTOSUSPEND_TIMEOUT); in s3c64xx_spi_probe()
1234 pm_runtime_use_autosuspend(&pdev->dev); in s3c64xx_spi_probe()
1235 pm_runtime_set_active(&pdev->dev); in s3c64xx_spi_probe()
1236 pm_runtime_enable(&pdev->dev); in s3c64xx_spi_probe()
1237 pm_runtime_get_sync(&pdev->dev); in s3c64xx_spi_probe()
1242 spin_lock_init(&sdd->lock); in s3c64xx_spi_probe()
1243 init_completion(&sdd->xfer_completion); in s3c64xx_spi_probe()
1245 ret = devm_request_irq(&pdev->dev, irq, s3c64xx_spi_irq, 0, in s3c64xx_spi_probe()
1246 "spi-s3c64xx", sdd); in s3c64xx_spi_probe()
1248 dev_err(&pdev->dev, "Failed to request IRQ %d: %d\n", in s3c64xx_spi_probe()
1255 sdd->regs + S3C64XX_SPI_INT_EN); in s3c64xx_spi_probe()
1257 ret = devm_spi_register_master(&pdev->dev, master); in s3c64xx_spi_probe()
1259 dev_err(&pdev->dev, "cannot register SPI master: %d\n", ret); in s3c64xx_spi_probe()
1263 dev_dbg(&pdev->dev, "Samsung SoC SPI Driver loaded for Bus SPI-%d with %d Slaves attached\n", in s3c64xx_spi_probe()
1264 sdd->port_id, master->num_chipselect); in s3c64xx_spi_probe()
1265 dev_dbg(&pdev->dev, "\tIOmem=[%pR]\tFIFO %dbytes\n", in s3c64xx_spi_probe()
1268 pm_runtime_mark_last_busy(&pdev->dev); in s3c64xx_spi_probe()
1269 pm_runtime_put_autosuspend(&pdev->dev); in s3c64xx_spi_probe()
1274 pm_runtime_put_noidle(&pdev->dev); in s3c64xx_spi_probe()
1275 pm_runtime_disable(&pdev->dev); in s3c64xx_spi_probe()
1276 pm_runtime_set_suspended(&pdev->dev); in s3c64xx_spi_probe()
1278 clk_disable_unprepare(sdd->ioclk); in s3c64xx_spi_probe()
1280 clk_disable_unprepare(sdd->src_clk); in s3c64xx_spi_probe()
1282 clk_disable_unprepare(sdd->clk); in s3c64xx_spi_probe()
1294 pm_runtime_get_sync(&pdev->dev); in s3c64xx_spi_remove()
1296 writel(0, sdd->regs + S3C64XX_SPI_INT_EN); in s3c64xx_spi_remove()
1299 dma_release_channel(sdd->rx_dma.ch); in s3c64xx_spi_remove()
1300 dma_release_channel(sdd->tx_dma.ch); in s3c64xx_spi_remove()
1303 clk_disable_unprepare(sdd->ioclk); in s3c64xx_spi_remove()
1305 clk_disable_unprepare(sdd->src_clk); in s3c64xx_spi_remove()
1307 clk_disable_unprepare(sdd->clk); in s3c64xx_spi_remove()
1309 pm_runtime_put_noidle(&pdev->dev); in s3c64xx_spi_remove()
1310 pm_runtime_disable(&pdev->dev); in s3c64xx_spi_remove()
1311 pm_runtime_set_suspended(&pdev->dev); in s3c64xx_spi_remove()
1330 sdd->cur_speed = 0; /* Output Clock is stopped */ in s3c64xx_spi_suspend()
1339 struct s3c64xx_spi_info *sci = sdd->cntrlr_info; in s3c64xx_spi_resume()
1342 if (sci->cfg_gpio) in s3c64xx_spi_resume()
1343 sci->cfg_gpio(); in s3c64xx_spi_resume()
1359 clk_disable_unprepare(sdd->clk); in s3c64xx_spi_runtime_suspend()
1360 clk_disable_unprepare(sdd->src_clk); in s3c64xx_spi_runtime_suspend()
1361 clk_disable_unprepare(sdd->ioclk); in s3c64xx_spi_runtime_suspend()
1372 if (sdd->port_conf->clk_ioclk) { in s3c64xx_spi_runtime_resume()
1373 ret = clk_prepare_enable(sdd->ioclk); in s3c64xx_spi_runtime_resume()
1378 ret = clk_prepare_enable(sdd->src_clk); in s3c64xx_spi_runtime_resume()
1382 ret = clk_prepare_enable(sdd->clk); in s3c64xx_spi_runtime_resume()
1390 sdd->regs + S3C64XX_SPI_INT_EN); in s3c64xx_spi_runtime_resume()
1395 clk_disable_unprepare(sdd->src_clk); in s3c64xx_spi_runtime_resume()
1397 clk_disable_unprepare(sdd->ioclk); in s3c64xx_spi_runtime_resume()
1489 .name = "s3c2443-spi",
1492 .name = "s3c6410-spi",
1499 { .compatible = "samsung,s3c2443-spi",
1502 { .compatible = "samsung,s3c6410-spi",
1505 { .compatible = "samsung,s5pv210-spi",
1508 { .compatible = "samsung,exynos4210-spi",
1511 { .compatible = "samsung,exynos7-spi",
1514 { .compatible = "samsung,exynos5433-spi",
1517 { .compatible = "samsung,exynosautov9-spi",
1520 { .compatible = "tesla,fsd-spi",
1529 .name = "s3c64xx-spi",
1537 MODULE_ALIAS("platform:s3c64xx-spi");