Lines Matching refs:mdata

178 static void mtk_spi_reset(struct mtk_spi *mdata)  in mtk_spi_reset()  argument
183 reg_val = readl(mdata->base + SPI_CMD_REG); in mtk_spi_reset()
185 writel(reg_val, mdata->base + SPI_CMD_REG); in mtk_spi_reset()
187 reg_val = readl(mdata->base + SPI_CMD_REG); in mtk_spi_reset()
189 writel(reg_val, mdata->base + SPI_CMD_REG); in mtk_spi_reset()
199 struct mtk_spi *mdata = spi_master_get_devdata(master); in mtk_spi_prepare_message() local
204 reg_val = readl(mdata->base + SPI_CMD_REG); in mtk_spi_prepare_message()
232 if (mdata->dev_comp->enhance_timing) { in mtk_spi_prepare_message()
252 writel(reg_val, mdata->base + SPI_CMD_REG); in mtk_spi_prepare_message()
255 if (mdata->dev_comp->need_pad_sel) in mtk_spi_prepare_message()
256 writel(mdata->pad_sel[spi->chip_select], in mtk_spi_prepare_message()
257 mdata->base + SPI_PAD_SEL_REG); in mtk_spi_prepare_message()
265 struct mtk_spi *mdata = spi_master_get_devdata(spi->master); in mtk_spi_set_cs() local
267 reg_val = readl(mdata->base + SPI_CMD_REG); in mtk_spi_set_cs()
270 writel(reg_val, mdata->base + SPI_CMD_REG); in mtk_spi_set_cs()
273 writel(reg_val, mdata->base + SPI_CMD_REG); in mtk_spi_set_cs()
274 mdata->state = MTK_SPI_IDLE; in mtk_spi_set_cs()
275 mtk_spi_reset(mdata); in mtk_spi_set_cs()
283 struct mtk_spi *mdata = spi_master_get_devdata(master); in mtk_spi_prepare_transfer() local
285 spi_clk_hz = clk_get_rate(mdata->spi_clk); in mtk_spi_prepare_transfer()
294 if (mdata->dev_comp->enhance_timing) { in mtk_spi_prepare_transfer()
299 writel(reg_val, mdata->base + SPI_CFG2_REG); in mtk_spi_prepare_transfer()
304 writel(reg_val, mdata->base + SPI_CFG0_REG); in mtk_spi_prepare_transfer()
311 writel(reg_val, mdata->base + SPI_CFG0_REG); in mtk_spi_prepare_transfer()
314 reg_val = readl(mdata->base + SPI_CFG1_REG); in mtk_spi_prepare_transfer()
317 writel(reg_val, mdata->base + SPI_CFG1_REG); in mtk_spi_prepare_transfer()
323 struct mtk_spi *mdata = spi_master_get_devdata(master); in mtk_spi_setup_packet() local
325 packet_size = min_t(u32, mdata->xfer_len, MTK_SPI_PACKET_SIZE); in mtk_spi_setup_packet()
326 packet_loop = mdata->xfer_len / packet_size; in mtk_spi_setup_packet()
328 reg_val = readl(mdata->base + SPI_CFG1_REG); in mtk_spi_setup_packet()
332 writel(reg_val, mdata->base + SPI_CFG1_REG); in mtk_spi_setup_packet()
338 struct mtk_spi *mdata = spi_master_get_devdata(master); in mtk_spi_enable_transfer() local
340 cmd = readl(mdata->base + SPI_CMD_REG); in mtk_spi_enable_transfer()
341 if (mdata->state == MTK_SPI_IDLE) in mtk_spi_enable_transfer()
345 writel(cmd, mdata->base + SPI_CMD_REG); in mtk_spi_enable_transfer()
363 struct mtk_spi *mdata = spi_master_get_devdata(master); in mtk_spi_update_mdata_len() local
365 if (mdata->tx_sgl_len && mdata->rx_sgl_len) { in mtk_spi_update_mdata_len()
366 if (mdata->tx_sgl_len > mdata->rx_sgl_len) { in mtk_spi_update_mdata_len()
367 mult_delta = mtk_spi_get_mult_delta(mdata->rx_sgl_len); in mtk_spi_update_mdata_len()
368 mdata->xfer_len = mdata->rx_sgl_len - mult_delta; in mtk_spi_update_mdata_len()
369 mdata->rx_sgl_len = mult_delta; in mtk_spi_update_mdata_len()
370 mdata->tx_sgl_len -= mdata->xfer_len; in mtk_spi_update_mdata_len()
372 mult_delta = mtk_spi_get_mult_delta(mdata->tx_sgl_len); in mtk_spi_update_mdata_len()
373 mdata->xfer_len = mdata->tx_sgl_len - mult_delta; in mtk_spi_update_mdata_len()
374 mdata->tx_sgl_len = mult_delta; in mtk_spi_update_mdata_len()
375 mdata->rx_sgl_len -= mdata->xfer_len; in mtk_spi_update_mdata_len()
377 } else if (mdata->tx_sgl_len) { in mtk_spi_update_mdata_len()
378 mult_delta = mtk_spi_get_mult_delta(mdata->tx_sgl_len); in mtk_spi_update_mdata_len()
379 mdata->xfer_len = mdata->tx_sgl_len - mult_delta; in mtk_spi_update_mdata_len()
380 mdata->tx_sgl_len = mult_delta; in mtk_spi_update_mdata_len()
381 } else if (mdata->rx_sgl_len) { in mtk_spi_update_mdata_len()
382 mult_delta = mtk_spi_get_mult_delta(mdata->rx_sgl_len); in mtk_spi_update_mdata_len()
383 mdata->xfer_len = mdata->rx_sgl_len - mult_delta; in mtk_spi_update_mdata_len()
384 mdata->rx_sgl_len = mult_delta; in mtk_spi_update_mdata_len()
391 struct mtk_spi *mdata = spi_master_get_devdata(master); in mtk_spi_setup_dma_addr() local
393 if (mdata->tx_sgl) { in mtk_spi_setup_dma_addr()
395 mdata->base + SPI_TX_SRC_REG); in mtk_spi_setup_dma_addr()
397 if (mdata->dev_comp->dma_ext) in mtk_spi_setup_dma_addr()
399 mdata->base + SPI_TX_SRC_REG_64); in mtk_spi_setup_dma_addr()
403 if (mdata->rx_sgl) { in mtk_spi_setup_dma_addr()
405 mdata->base + SPI_RX_DST_REG); in mtk_spi_setup_dma_addr()
407 if (mdata->dev_comp->dma_ext) in mtk_spi_setup_dma_addr()
409 mdata->base + SPI_RX_DST_REG_64); in mtk_spi_setup_dma_addr()
420 struct mtk_spi *mdata = spi_master_get_devdata(master); in mtk_spi_fifo_transfer() local
422 mdata->cur_transfer = xfer; in mtk_spi_fifo_transfer()
423 mdata->xfer_len = min(MTK_SPI_MAX_FIFO_SIZE, xfer->len); in mtk_spi_fifo_transfer()
424 mdata->num_xfered = 0; in mtk_spi_fifo_transfer()
429 iowrite32_rep(mdata->base + SPI_TX_DATA_REG, xfer->tx_buf, cnt); in mtk_spi_fifo_transfer()
435 writel(reg_val, mdata->base + SPI_TX_DATA_REG); in mtk_spi_fifo_transfer()
448 struct mtk_spi *mdata = spi_master_get_devdata(master); in mtk_spi_dma_transfer() local
450 mdata->tx_sgl = NULL; in mtk_spi_dma_transfer()
451 mdata->rx_sgl = NULL; in mtk_spi_dma_transfer()
452 mdata->tx_sgl_len = 0; in mtk_spi_dma_transfer()
453 mdata->rx_sgl_len = 0; in mtk_spi_dma_transfer()
454 mdata->cur_transfer = xfer; in mtk_spi_dma_transfer()
455 mdata->num_xfered = 0; in mtk_spi_dma_transfer()
459 cmd = readl(mdata->base + SPI_CMD_REG); in mtk_spi_dma_transfer()
464 writel(cmd, mdata->base + SPI_CMD_REG); in mtk_spi_dma_transfer()
467 mdata->tx_sgl = xfer->tx_sg.sgl; in mtk_spi_dma_transfer()
469 mdata->rx_sgl = xfer->rx_sg.sgl; in mtk_spi_dma_transfer()
471 if (mdata->tx_sgl) { in mtk_spi_dma_transfer()
472 xfer->tx_dma = sg_dma_address(mdata->tx_sgl); in mtk_spi_dma_transfer()
473 mdata->tx_sgl_len = sg_dma_len(mdata->tx_sgl); in mtk_spi_dma_transfer()
475 if (mdata->rx_sgl) { in mtk_spi_dma_transfer()
476 xfer->rx_dma = sg_dma_address(mdata->rx_sgl); in mtk_spi_dma_transfer()
477 mdata->rx_sgl_len = sg_dma_len(mdata->rx_sgl); in mtk_spi_dma_transfer()
510 struct mtk_spi *mdata = spi_master_get_devdata(spi->master); in mtk_spi_setup() local
515 if (mdata->dev_comp->need_pad_sel && gpio_is_valid(spi->cs_gpio)) in mtk_spi_setup()
525 struct mtk_spi *mdata = spi_master_get_devdata(master); in mtk_spi_interrupt() local
526 struct spi_transfer *trans = mdata->cur_transfer; in mtk_spi_interrupt()
528 reg_val = readl(mdata->base + SPI_STATUS0_REG); in mtk_spi_interrupt()
530 mdata->state = MTK_SPI_PAUSED; in mtk_spi_interrupt()
532 mdata->state = MTK_SPI_IDLE; in mtk_spi_interrupt()
536 cnt = mdata->xfer_len / 4; in mtk_spi_interrupt()
537 ioread32_rep(mdata->base + SPI_RX_DATA_REG, in mtk_spi_interrupt()
538 trans->rx_buf + mdata->num_xfered, cnt); in mtk_spi_interrupt()
539 remainder = mdata->xfer_len % 4; in mtk_spi_interrupt()
541 reg_val = readl(mdata->base + SPI_RX_DATA_REG); in mtk_spi_interrupt()
543 mdata->num_xfered + in mtk_spi_interrupt()
550 mdata->num_xfered += mdata->xfer_len; in mtk_spi_interrupt()
551 if (mdata->num_xfered == trans->len) { in mtk_spi_interrupt()
556 len = trans->len - mdata->num_xfered; in mtk_spi_interrupt()
557 mdata->xfer_len = min(MTK_SPI_MAX_FIFO_SIZE, len); in mtk_spi_interrupt()
560 cnt = mdata->xfer_len / 4; in mtk_spi_interrupt()
561 iowrite32_rep(mdata->base + SPI_TX_DATA_REG, in mtk_spi_interrupt()
562 trans->tx_buf + mdata->num_xfered, cnt); in mtk_spi_interrupt()
564 remainder = mdata->xfer_len % 4; in mtk_spi_interrupt()
568 trans->tx_buf + (cnt * 4) + mdata->num_xfered, in mtk_spi_interrupt()
570 writel(reg_val, mdata->base + SPI_TX_DATA_REG); in mtk_spi_interrupt()
578 if (mdata->tx_sgl) in mtk_spi_interrupt()
579 trans->tx_dma += mdata->xfer_len; in mtk_spi_interrupt()
580 if (mdata->rx_sgl) in mtk_spi_interrupt()
581 trans->rx_dma += mdata->xfer_len; in mtk_spi_interrupt()
583 if (mdata->tx_sgl && (mdata->tx_sgl_len == 0)) { in mtk_spi_interrupt()
584 mdata->tx_sgl = sg_next(mdata->tx_sgl); in mtk_spi_interrupt()
585 if (mdata->tx_sgl) { in mtk_spi_interrupt()
586 trans->tx_dma = sg_dma_address(mdata->tx_sgl); in mtk_spi_interrupt()
587 mdata->tx_sgl_len = sg_dma_len(mdata->tx_sgl); in mtk_spi_interrupt()
590 if (mdata->rx_sgl && (mdata->rx_sgl_len == 0)) { in mtk_spi_interrupt()
591 mdata->rx_sgl = sg_next(mdata->rx_sgl); in mtk_spi_interrupt()
592 if (mdata->rx_sgl) { in mtk_spi_interrupt()
593 trans->rx_dma = sg_dma_address(mdata->rx_sgl); in mtk_spi_interrupt()
594 mdata->rx_sgl_len = sg_dma_len(mdata->rx_sgl); in mtk_spi_interrupt()
598 if (!mdata->tx_sgl && !mdata->rx_sgl) { in mtk_spi_interrupt()
600 cmd = readl(mdata->base + SPI_CMD_REG); in mtk_spi_interrupt()
603 writel(cmd, mdata->base + SPI_CMD_REG); in mtk_spi_interrupt()
620 struct mtk_spi *mdata; in mtk_spi_probe() local
625 master = spi_alloc_master(&pdev->dev, sizeof(*mdata)); in mtk_spi_probe()
648 mdata = spi_master_get_devdata(master); in mtk_spi_probe()
649 mdata->dev_comp = of_id->data; in mtk_spi_probe()
650 if (mdata->dev_comp->must_tx) in mtk_spi_probe()
653 if (mdata->dev_comp->need_pad_sel) { in mtk_spi_probe()
654 mdata->pad_num = of_property_count_u32_elems( in mtk_spi_probe()
657 if (mdata->pad_num < 0) { in mtk_spi_probe()
664 mdata->pad_sel = devm_kmalloc_array(&pdev->dev, mdata->pad_num, in mtk_spi_probe()
666 if (!mdata->pad_sel) { in mtk_spi_probe()
671 for (i = 0; i < mdata->pad_num; i++) { in mtk_spi_probe()
674 i, &mdata->pad_sel[i]); in mtk_spi_probe()
675 if (mdata->pad_sel[i] > MT8173_SPI_MAX_PAD_SEL) { in mtk_spi_probe()
677 i, mdata->pad_sel[i]); in mtk_spi_probe()
693 mdata->base = devm_ioremap_resource(&pdev->dev, res); in mtk_spi_probe()
694 if (IS_ERR(mdata->base)) { in mtk_spi_probe()
695 ret = PTR_ERR(mdata->base); in mtk_spi_probe()
715 mdata->parent_clk = devm_clk_get(&pdev->dev, "parent-clk"); in mtk_spi_probe()
716 if (IS_ERR(mdata->parent_clk)) { in mtk_spi_probe()
717 ret = PTR_ERR(mdata->parent_clk); in mtk_spi_probe()
722 mdata->sel_clk = devm_clk_get(&pdev->dev, "sel-clk"); in mtk_spi_probe()
723 if (IS_ERR(mdata->sel_clk)) { in mtk_spi_probe()
724 ret = PTR_ERR(mdata->sel_clk); in mtk_spi_probe()
729 mdata->spi_clk = devm_clk_get(&pdev->dev, "spi-clk"); in mtk_spi_probe()
730 if (IS_ERR(mdata->spi_clk)) { in mtk_spi_probe()
731 ret = PTR_ERR(mdata->spi_clk); in mtk_spi_probe()
736 ret = clk_prepare_enable(mdata->spi_clk); in mtk_spi_probe()
742 ret = clk_set_parent(mdata->sel_clk, mdata->parent_clk); in mtk_spi_probe()
745 clk_disable_unprepare(mdata->spi_clk); in mtk_spi_probe()
749 clk_disable_unprepare(mdata->spi_clk); in mtk_spi_probe()
759 if (mdata->dev_comp->need_pad_sel) { in mtk_spi_probe()
760 if (mdata->pad_num != master->num_chipselect) { in mtk_spi_probe()
763 mdata->pad_num, master->num_chipselect); in mtk_spi_probe()
789 if (mdata->dev_comp->dma_ext) in mtk_spi_probe()
811 struct mtk_spi *mdata = spi_master_get_devdata(master); in mtk_spi_remove() local
815 mtk_spi_reset(mdata); in mtk_spi_remove()
825 struct mtk_spi *mdata = spi_master_get_devdata(master); in mtk_spi_suspend() local
832 clk_disable_unprepare(mdata->spi_clk); in mtk_spi_suspend()
841 struct mtk_spi *mdata = spi_master_get_devdata(master); in mtk_spi_resume() local
844 ret = clk_prepare_enable(mdata->spi_clk); in mtk_spi_resume()
853 clk_disable_unprepare(mdata->spi_clk); in mtk_spi_resume()
863 struct mtk_spi *mdata = spi_master_get_devdata(master); in mtk_spi_runtime_suspend() local
865 clk_disable_unprepare(mdata->spi_clk); in mtk_spi_runtime_suspend()
873 struct mtk_spi *mdata = spi_master_get_devdata(master); in mtk_spi_runtime_resume() local
876 ret = clk_prepare_enable(mdata->spi_clk); in mtk_spi_runtime_resume()