Lines Matching refs:dws

38 static int mid_spi_dma_init(struct dw_spi *dws)  in mid_spi_dma_init()  argument
41 struct dw_dma_slave *tx = dws->dma_tx; in mid_spi_dma_init()
42 struct dw_dma_slave *rx = dws->dma_rx; in mid_spi_dma_init()
58 dws->rxchan = dma_request_channel(mask, mid_spi_dma_chan_filter, rx); in mid_spi_dma_init()
59 if (!dws->rxchan) in mid_spi_dma_init()
61 dws->master->dma_rx = dws->rxchan; in mid_spi_dma_init()
65 dws->txchan = dma_request_channel(mask, mid_spi_dma_chan_filter, tx); in mid_spi_dma_init()
66 if (!dws->txchan) in mid_spi_dma_init()
68 dws->master->dma_tx = dws->txchan; in mid_spi_dma_init()
70 dws->dma_inited = 1; in mid_spi_dma_init()
74 dma_release_channel(dws->rxchan); in mid_spi_dma_init()
79 static void mid_spi_dma_exit(struct dw_spi *dws) in mid_spi_dma_exit() argument
81 if (!dws->dma_inited) in mid_spi_dma_exit()
84 dmaengine_terminate_sync(dws->txchan); in mid_spi_dma_exit()
85 dma_release_channel(dws->txchan); in mid_spi_dma_exit()
87 dmaengine_terminate_sync(dws->rxchan); in mid_spi_dma_exit()
88 dma_release_channel(dws->rxchan); in mid_spi_dma_exit()
91 static irqreturn_t dma_transfer(struct dw_spi *dws) in dma_transfer() argument
93 u16 irq_status = dw_readl(dws, DW_SPI_ISR); in dma_transfer()
98 dw_readl(dws, DW_SPI_ICR); in dma_transfer()
99 spi_reset_chip(dws); in dma_transfer()
101 dev_err(&dws->master->dev, "%s: FIFO overrun/underrun\n", __func__); in dma_transfer()
102 dws->master->cur_msg->status = -EIO; in dma_transfer()
103 spi_finalize_current_transfer(dws->master); in dma_transfer()
110 struct dw_spi *dws = spi_controller_get_devdata(master); in mid_spi_can_dma() local
112 if (!dws->dma_inited) in mid_spi_can_dma()
115 return xfer->len > dws->fifo_len; in mid_spi_can_dma()
133 struct dw_spi *dws = arg; in dw_spi_dma_tx_done() local
135 clear_bit(TX_BUSY, &dws->dma_chan_busy); in dw_spi_dma_tx_done()
136 if (test_bit(RX_BUSY, &dws->dma_chan_busy)) in dw_spi_dma_tx_done()
138 spi_finalize_current_transfer(dws->master); in dw_spi_dma_tx_done()
141 static struct dma_async_tx_descriptor *dw_spi_dma_prepare_tx(struct dw_spi *dws, in dw_spi_dma_prepare_tx() argument
151 txconf.dst_addr = dws->dma_addr; in dw_spi_dma_prepare_tx()
154 txconf.dst_addr_width = convert_dma_width(dws->dma_width); in dw_spi_dma_prepare_tx()
157 dmaengine_slave_config(dws->txchan, &txconf); in dw_spi_dma_prepare_tx()
159 txdesc = dmaengine_prep_slave_sg(dws->txchan, in dw_spi_dma_prepare_tx()
168 txdesc->callback_param = dws; in dw_spi_dma_prepare_tx()
179 struct dw_spi *dws = arg; in dw_spi_dma_rx_done() local
181 clear_bit(RX_BUSY, &dws->dma_chan_busy); in dw_spi_dma_rx_done()
182 if (test_bit(TX_BUSY, &dws->dma_chan_busy)) in dw_spi_dma_rx_done()
184 spi_finalize_current_transfer(dws->master); in dw_spi_dma_rx_done()
187 static struct dma_async_tx_descriptor *dw_spi_dma_prepare_rx(struct dw_spi *dws, in dw_spi_dma_prepare_rx() argument
197 rxconf.src_addr = dws->dma_addr; in dw_spi_dma_prepare_rx()
200 rxconf.src_addr_width = convert_dma_width(dws->dma_width); in dw_spi_dma_prepare_rx()
203 dmaengine_slave_config(dws->rxchan, &rxconf); in dw_spi_dma_prepare_rx()
205 rxdesc = dmaengine_prep_slave_sg(dws->rxchan, in dw_spi_dma_prepare_rx()
214 rxdesc->callback_param = dws; in dw_spi_dma_prepare_rx()
219 static int mid_spi_dma_setup(struct dw_spi *dws, struct spi_transfer *xfer) in mid_spi_dma_setup() argument
223 dw_writel(dws, DW_SPI_DMARDLR, 0xf); in mid_spi_dma_setup()
224 dw_writel(dws, DW_SPI_DMATDLR, 0x10); in mid_spi_dma_setup()
230 dw_writel(dws, DW_SPI_DMACR, dma_ctrl); in mid_spi_dma_setup()
233 spi_umask_intr(dws, SPI_INT_TXOI | SPI_INT_RXUI | SPI_INT_RXOI); in mid_spi_dma_setup()
235 dws->transfer_handler = dma_transfer; in mid_spi_dma_setup()
240 static int mid_spi_dma_transfer(struct dw_spi *dws, struct spi_transfer *xfer) in mid_spi_dma_transfer() argument
245 txdesc = dw_spi_dma_prepare_tx(dws, xfer); in mid_spi_dma_transfer()
248 rxdesc = dw_spi_dma_prepare_rx(dws, xfer); in mid_spi_dma_transfer()
252 set_bit(RX_BUSY, &dws->dma_chan_busy); in mid_spi_dma_transfer()
254 dma_async_issue_pending(dws->rxchan); in mid_spi_dma_transfer()
258 set_bit(TX_BUSY, &dws->dma_chan_busy); in mid_spi_dma_transfer()
260 dma_async_issue_pending(dws->txchan); in mid_spi_dma_transfer()
266 static void mid_spi_dma_stop(struct dw_spi *dws) in mid_spi_dma_stop() argument
268 if (test_bit(TX_BUSY, &dws->dma_chan_busy)) { in mid_spi_dma_stop()
269 dmaengine_terminate_sync(dws->txchan); in mid_spi_dma_stop()
270 clear_bit(TX_BUSY, &dws->dma_chan_busy); in mid_spi_dma_stop()
272 if (test_bit(RX_BUSY, &dws->dma_chan_busy)) { in mid_spi_dma_stop()
273 dmaengine_terminate_sync(dws->rxchan); in mid_spi_dma_stop()
274 clear_bit(RX_BUSY, &dws->dma_chan_busy); in mid_spi_dma_stop()
299 int dw_spi_mid_init(struct dw_spi *dws) in dw_spi_mid_init() argument
309 clk_cdiv = readl(clk_reg + dws->bus_num * sizeof(u32)); in dw_spi_mid_init()
312 dws->max_freq = MRST_SPI_CLK_BASE / (clk_cdiv + 1); in dw_spi_mid_init()
317 dws->dma_tx = &mid_dma_tx; in dw_spi_mid_init()
318 dws->dma_rx = &mid_dma_rx; in dw_spi_mid_init()
319 dws->dma_ops = &mid_dma_ops; in dw_spi_mid_init()