Lines Matching +full:tx +full:- +full:level +full:- +full:trig
1 // SPDX-License-Identifier: GPL-2.0-or-later
5 * Copyright (C) 2008-2012 ST-Ericsson AB
11 * linux-2.6.17-rc3-mm1/drivers/spi/pxa2xx_spi.c
31 #include <linux/dma-mapping.h>
92 * SSP Control Register 0 - SSP_CR0
110 * SSP Control Register 0 - SSP_CR1
130 * SSP Status Register - SSP_SR
139 * SSP Clock Prescale Register - SSP_CPSR
144 * SSP Interrupt Mask Set/Clear Register - SSP_IMSC
152 * SSP Raw Interrupt Status Register - SSP_RIS
164 * SSP Masked Interrupt Status Register - SSP_MIS
176 * SSP Interrupt Clear Register - SSP_ICR
184 * SSP DMA Control Register - SSP_DMACR
192 * SSP Chip Select Control Register - SSP_CSR
198 * SSP Integration Test control Register - SSP_ITCR
204 * SSP Integration Test Input Register - SSP_ITIP
214 * SSP Integration Test output Register - SSP_ITOP
232 * SSP Test Data Register - SSP_TDR
245 #define STATE_ERROR ((void *) -1)
246 #define STATE_TIMEOUT ((void *) -2)
249 * SSP State - Whether Enabled or Disabled
255 * SSP DMA State - Whether DMA Enabled or Disabled
311 * struct vendor_data - vendor-specific config parameters
333 * struct pl022 - This is the private SSP driver data structure
340 * @host_info: controller-specific data from machine setup
349 * @tx: current position in TX buffer to be read
350 * @tx_end: end position in TX buffer to be read
355 * @exp_fifo_level: expected FIFO level
356 * @rx_lev_trig: receive FIFO watermark level which triggers IRQ
357 * @tx_lev_trig: transmit FIFO watermark level which triggers IRQ
359 * @dma_tx_channel: optional channel for TX DMA
361 * @sgt_tx: scattertable for the TX transfer
375 /* Message per-transfer pump */
381 void *tx; member
404 * struct chip_data - To maintain runtime state of SSP for each client chip
405 * @cr0: Value of control register CR0 of SSP - on later ST variants this
432 * internal_cs_control - Control chip select signals via SSP_CSR.
444 tmp = readw(SSP_CSR(pl022->virtbase)); in internal_cs_control()
446 tmp &= ~BIT(pl022->cur_cs); in internal_cs_control()
448 tmp |= BIT(pl022->cur_cs); in internal_cs_control()
449 writew(tmp, SSP_CSR(pl022->virtbase)); in internal_cs_control()
454 if (pl022->vendor->internal_cs_ctrl) in pl022_cs_control()
456 else if (pl022->cur_gpiod) in pl022_cs_control()
465 gpiod_set_value(pl022->cur_gpiod, !command); in pl022_cs_control()
469 * giveback - current spi_message is over, schedule next message and call
471 * set message->status; dma and pio irqs are blocked
477 pl022->next_msg_cs_active = false; in giveback()
479 last_transfer = list_last_entry(&pl022->cur_msg->transfers, in giveback()
489 if (!last_transfer->cs_change) { in giveback()
498 * after calling msg->complete (below) the driver that in giveback()
503 next_msg = spi_get_next_queued_message(pl022->host); in giveback()
509 if (next_msg && next_msg->spi != pl022->cur_msg->spi) in giveback()
511 if (!next_msg || pl022->cur_msg->state == STATE_ERROR) in giveback()
514 pl022->next_msg_cs_active = true; in giveback()
518 pl022->cur_msg = NULL; in giveback()
519 pl022->cur_transfer = NULL; in giveback()
520 pl022->cur_chip = NULL; in giveback()
523 writew((readw(SSP_CR1(pl022->virtbase)) & in giveback()
524 (~SSP_CR1_MASK_SSE)), SSP_CR1(pl022->virtbase)); in giveback()
526 spi_finalize_current_message(pl022->host); in giveback()
530 * flush - flush the FIFO to reach a clean state
537 dev_dbg(&pl022->adev->dev, "flush\n"); in flush()
539 while (readw(SSP_SR(pl022->virtbase)) & SSP_SR_MASK_RNE) in flush()
540 readw(SSP_DR(pl022->virtbase)); in flush()
541 } while ((readw(SSP_SR(pl022->virtbase)) & SSP_SR_MASK_BSY) && limit--); in flush()
543 pl022->exp_fifo_level = 0; in flush()
549 * restore_state - Load configuration of current chip
554 struct chip_data *chip = pl022->cur_chip; in restore_state()
556 if (pl022->vendor->extended_cr) in restore_state()
557 writel(chip->cr0, SSP_CR0(pl022->virtbase)); in restore_state()
559 writew(chip->cr0, SSP_CR0(pl022->virtbase)); in restore_state()
560 writew(chip->cr1, SSP_CR1(pl022->virtbase)); in restore_state()
561 writew(chip->dmacr, SSP_DMACR(pl022->virtbase)); in restore_state()
562 writew(chip->cpsr, SSP_CPSR(pl022->virtbase)); in restore_state()
563 writew(DISABLE_ALL_INTERRUPTS, SSP_IMSC(pl022->virtbase)); in restore_state()
564 writew(CLEAR_ALL_INTERRUPTS, SSP_ICR(pl022->virtbase)); in restore_state()
639 * load_ssp_default_config - Load default configuration for SSP
644 if (pl022->vendor->pl023) { in load_ssp_default_config()
645 writel(DEFAULT_SSP_REG_CR0_ST_PL023, SSP_CR0(pl022->virtbase)); in load_ssp_default_config()
646 writew(DEFAULT_SSP_REG_CR1_ST_PL023, SSP_CR1(pl022->virtbase)); in load_ssp_default_config()
647 } else if (pl022->vendor->extended_cr) { in load_ssp_default_config()
648 writel(DEFAULT_SSP_REG_CR0_ST, SSP_CR0(pl022->virtbase)); in load_ssp_default_config()
649 writew(DEFAULT_SSP_REG_CR1_ST, SSP_CR1(pl022->virtbase)); in load_ssp_default_config()
651 writew(DEFAULT_SSP_REG_CR0, SSP_CR0(pl022->virtbase)); in load_ssp_default_config()
652 writew(DEFAULT_SSP_REG_CR1, SSP_CR1(pl022->virtbase)); in load_ssp_default_config()
654 writew(DEFAULT_SSP_REG_DMACR, SSP_DMACR(pl022->virtbase)); in load_ssp_default_config()
655 writew(DEFAULT_SSP_REG_CPSR, SSP_CPSR(pl022->virtbase)); in load_ssp_default_config()
656 writew(DISABLE_ALL_INTERRUPTS, SSP_IMSC(pl022->virtbase)); in load_ssp_default_config()
657 writew(CLEAR_ALL_INTERRUPTS, SSP_ICR(pl022->virtbase)); in load_ssp_default_config()
661 * This will write to TX and read from RX according to the parameters
673 * To prevent this issue, the TX FIFO is only filled to the in readwriter()
674 * unused RX FIFO fill length, regardless of what the TX in readwriter()
677 dev_dbg(&pl022->adev->dev, in readwriter()
678 "%s, rx: %p, rxend: %p, tx: %p, txend: %p\n", in readwriter()
679 __func__, pl022->rx, pl022->rx_end, pl022->tx, pl022->tx_end); in readwriter()
682 while ((readw(SSP_SR(pl022->virtbase)) & SSP_SR_MASK_RNE) in readwriter()
683 && (pl022->rx < pl022->rx_end)) { in readwriter()
684 switch (pl022->read) { in readwriter()
686 readw(SSP_DR(pl022->virtbase)); in readwriter()
689 *(u8 *) (pl022->rx) = in readwriter()
690 readw(SSP_DR(pl022->virtbase)) & 0xFFU; in readwriter()
693 *(u16 *) (pl022->rx) = in readwriter()
694 (u16) readw(SSP_DR(pl022->virtbase)); in readwriter()
697 *(u32 *) (pl022->rx) = in readwriter()
698 readl(SSP_DR(pl022->virtbase)); in readwriter()
701 pl022->rx += (pl022->cur_chip->n_bytes); in readwriter()
702 pl022->exp_fifo_level--; in readwriter()
707 while ((pl022->exp_fifo_level < pl022->vendor->fifodepth) in readwriter()
708 && (pl022->tx < pl022->tx_end)) { in readwriter()
709 switch (pl022->write) { in readwriter()
711 writew(0x0, SSP_DR(pl022->virtbase)); in readwriter()
714 writew(*(u8 *) (pl022->tx), SSP_DR(pl022->virtbase)); in readwriter()
717 writew((*(u16 *) (pl022->tx)), SSP_DR(pl022->virtbase)); in readwriter()
720 writel(*(u32 *) (pl022->tx), SSP_DR(pl022->virtbase)); in readwriter()
723 pl022->tx += (pl022->cur_chip->n_bytes); in readwriter()
724 pl022->exp_fifo_level++; in readwriter()
728 * clock starts running when you put things into the TX FIFO, in readwriter()
731 while ((readw(SSP_SR(pl022->virtbase)) & SSP_SR_MASK_RNE) in readwriter()
732 && (pl022->rx < pl022->rx_end)) { in readwriter()
733 switch (pl022->read) { in readwriter()
735 readw(SSP_DR(pl022->virtbase)); in readwriter()
738 *(u8 *) (pl022->rx) = in readwriter()
739 readw(SSP_DR(pl022->virtbase)) & 0xFFU; in readwriter()
742 *(u16 *) (pl022->rx) = in readwriter()
743 (u16) readw(SSP_DR(pl022->virtbase)); in readwriter()
746 *(u32 *) (pl022->rx) = in readwriter()
747 readl(SSP_DR(pl022->virtbase)); in readwriter()
750 pl022->rx += (pl022->cur_chip->n_bytes); in readwriter()
751 pl022->exp_fifo_level--; in readwriter()
755 * When we exit here the TX FIFO should be full and the RX FIFO in readwriter()
761 * next_transfer - Move to the Next transfer in the current spi message
771 struct spi_message *msg = pl022->cur_msg; in next_transfer()
772 struct spi_transfer *trans = pl022->cur_transfer; in next_transfer()
775 if (trans->transfer_list.next != &msg->transfers) { in next_transfer()
776 pl022->cur_transfer = in next_transfer()
777 list_entry(trans->transfer_list.next, in next_transfer()
792 dma_unmap_sg(pl022->dma_tx_channel->device->dev, pl022->sgt_tx.sgl, in unmap_free_dma_scatter()
793 pl022->sgt_tx.nents, DMA_TO_DEVICE); in unmap_free_dma_scatter()
794 dma_unmap_sg(pl022->dma_rx_channel->device->dev, pl022->sgt_rx.sgl, in unmap_free_dma_scatter()
795 pl022->sgt_rx.nents, DMA_FROM_DEVICE); in unmap_free_dma_scatter()
796 sg_free_table(&pl022->sgt_rx); in unmap_free_dma_scatter()
797 sg_free_table(&pl022->sgt_tx); in unmap_free_dma_scatter()
803 struct spi_message *msg = pl022->cur_msg; in dma_callback()
805 BUG_ON(!pl022->sgt_rx.sgl); in dma_callback()
818 dma_sync_sg_for_cpu(&pl022->adev->dev, in dma_callback()
819 pl022->sgt_rx.sgl, in dma_callback()
820 pl022->sgt_rx.nents, in dma_callback()
823 for_each_sg(pl022->sgt_rx.sgl, sg, pl022->sgt_rx.nents, i) { in dma_callback()
824 dev_dbg(&pl022->adev->dev, "SPI RX SG ENTRY: %d", i); in dma_callback()
833 for_each_sg(pl022->sgt_tx.sgl, sg, pl022->sgt_tx.nents, i) { in dma_callback()
834 dev_dbg(&pl022->adev->dev, "SPI TX SG ENTRY: %d", i); in dma_callback()
835 print_hex_dump(KERN_ERR, "SPI TX: ", in dma_callback()
849 msg->actual_length += pl022->cur_transfer->len; in dma_callback()
851 msg->state = next_transfer(pl022); in dma_callback()
852 if (msg->state != STATE_DONE && pl022->cur_transfer->cs_change) in dma_callback()
854 tasklet_schedule(&pl022->pump_transfers); in dma_callback()
869 for_each_sg(sgtab->sgl, sg, sgtab->nents, i) { in setup_dma_scatter()
876 if (bytesleft < (PAGE_SIZE - offset_in_page(bufp))) in setup_dma_scatter()
879 mapbytes = PAGE_SIZE - offset_in_page(bufp); in setup_dma_scatter()
883 bytesleft -= mapbytes; in setup_dma_scatter()
884 dev_dbg(&pl022->adev->dev, in setup_dma_scatter()
885 "set RX/TX target page @ %p, %d bytes, %d left\n", in setup_dma_scatter()
890 for_each_sg(sgtab->sgl, sg, sgtab->nents, i) { in setup_dma_scatter()
895 sg_set_page(sg, virt_to_page(pl022->dummypage), in setup_dma_scatter()
897 bytesleft -= mapbytes; in setup_dma_scatter()
898 dev_dbg(&pl022->adev->dev, in setup_dma_scatter()
899 "set RX/TX to dummy page %d bytes, %d left\n", in setup_dma_scatter()
908 * configure_dma - configures the channels for the next transfer
914 .src_addr = SSP_DR(pl022->phybase), in configure_dma()
919 .dst_addr = SSP_DR(pl022->phybase), in configure_dma()
926 struct dma_chan *rxchan = pl022->dma_rx_channel; in configure_dma()
927 struct dma_chan *txchan = pl022->dma_tx_channel; in configure_dma()
933 return -ENODEV; in configure_dma()
936 * If supplied, the DMA burstsize should equal the FIFO trigger level. in configure_dma()
937 * Notice that the DMA engine uses one-to-one mapping. Since we can in configure_dma()
941 switch (pl022->rx_lev_trig) { in configure_dma()
958 rx_conf.src_maxburst = pl022->vendor->fifodepth >> 1; in configure_dma()
962 switch (pl022->tx_lev_trig) { in configure_dma()
979 tx_conf.dst_maxburst = pl022->vendor->fifodepth >> 1; in configure_dma()
983 switch (pl022->read) { in configure_dma()
999 switch (pl022->write) { in configure_dma()
1026 pages = DIV_ROUND_UP(pl022->cur_transfer->len, PAGE_SIZE); in configure_dma()
1027 dev_dbg(&pl022->adev->dev, "using %d pages for transfer\n", pages); in configure_dma()
1029 ret = sg_alloc_table(&pl022->sgt_rx, pages, GFP_ATOMIC); in configure_dma()
1033 ret = sg_alloc_table(&pl022->sgt_tx, pages, GFP_ATOMIC); in configure_dma()
1037 /* Fill in the scatterlists for the RX+TX buffers */ in configure_dma()
1038 setup_dma_scatter(pl022, pl022->rx, in configure_dma()
1039 pl022->cur_transfer->len, &pl022->sgt_rx); in configure_dma()
1040 setup_dma_scatter(pl022, pl022->tx, in configure_dma()
1041 pl022->cur_transfer->len, &pl022->sgt_tx); in configure_dma()
1044 rx_sglen = dma_map_sg(rxchan->device->dev, pl022->sgt_rx.sgl, in configure_dma()
1045 pl022->sgt_rx.nents, DMA_FROM_DEVICE); in configure_dma()
1049 tx_sglen = dma_map_sg(txchan->device->dev, pl022->sgt_tx.sgl, in configure_dma()
1050 pl022->sgt_tx.nents, DMA_TO_DEVICE); in configure_dma()
1056 pl022->sgt_rx.sgl, in configure_dma()
1064 pl022->sgt_tx.sgl, in configure_dma()
1072 rxdesc->callback = dma_callback; in configure_dma()
1073 rxdesc->callback_param = pl022; in configure_dma()
1075 /* Submit and fire RX and TX with TX last so we're ready to read! */ in configure_dma()
1080 pl022->dma_running = true; in configure_dma()
1088 dma_unmap_sg(txchan->device->dev, pl022->sgt_tx.sgl, in configure_dma()
1089 pl022->sgt_tx.nents, DMA_TO_DEVICE); in configure_dma()
1091 dma_unmap_sg(rxchan->device->dev, pl022->sgt_rx.sgl, in configure_dma()
1092 pl022->sgt_rx.nents, DMA_FROM_DEVICE); in configure_dma()
1094 sg_free_table(&pl022->sgt_tx); in configure_dma()
1096 sg_free_table(&pl022->sgt_rx); in configure_dma()
1098 return -ENOMEM; in configure_dma()
1109 * We need both RX and TX channels to do DMA, else do none in pl022_dma_probe()
1112 pl022->dma_rx_channel = dma_request_channel(mask, in pl022_dma_probe()
1113 pl022->host_info->dma_filter, in pl022_dma_probe()
1114 pl022->host_info->dma_rx_param); in pl022_dma_probe()
1115 if (!pl022->dma_rx_channel) { in pl022_dma_probe()
1116 dev_dbg(&pl022->adev->dev, "no RX DMA channel!\n"); in pl022_dma_probe()
1120 pl022->dma_tx_channel = dma_request_channel(mask, in pl022_dma_probe()
1121 pl022->host_info->dma_filter, in pl022_dma_probe()
1122 pl022->host_info->dma_tx_param); in pl022_dma_probe()
1123 if (!pl022->dma_tx_channel) { in pl022_dma_probe()
1124 dev_dbg(&pl022->adev->dev, "no TX DMA channel!\n"); in pl022_dma_probe()
1128 pl022->dummypage = kmalloc(PAGE_SIZE, GFP_KERNEL); in pl022_dma_probe()
1129 if (!pl022->dummypage) in pl022_dma_probe()
1132 dev_info(&pl022->adev->dev, "setup for DMA on RX %s, TX %s\n", in pl022_dma_probe()
1133 dma_chan_name(pl022->dma_rx_channel), in pl022_dma_probe()
1134 dma_chan_name(pl022->dma_tx_channel)); in pl022_dma_probe()
1139 dma_release_channel(pl022->dma_tx_channel); in pl022_dma_probe()
1141 dma_release_channel(pl022->dma_rx_channel); in pl022_dma_probe()
1142 pl022->dma_rx_channel = NULL; in pl022_dma_probe()
1144 dev_err(&pl022->adev->dev, in pl022_dma_probe()
1146 return -ENODEV; in pl022_dma_probe()
1151 struct device *dev = &pl022->adev->dev; in pl022_dma_autoprobe()
1162 pl022->dma_rx_channel = chan; in pl022_dma_autoprobe()
1164 chan = dma_request_chan(dev, "tx"); in pl022_dma_autoprobe()
1170 pl022->dma_tx_channel = chan; in pl022_dma_autoprobe()
1172 pl022->dummypage = kmalloc(PAGE_SIZE, GFP_KERNEL); in pl022_dma_autoprobe()
1173 if (!pl022->dummypage) { in pl022_dma_autoprobe()
1174 err = -ENOMEM; in pl022_dma_autoprobe()
1181 dma_release_channel(pl022->dma_tx_channel); in pl022_dma_autoprobe()
1182 pl022->dma_tx_channel = NULL; in pl022_dma_autoprobe()
1184 dma_release_channel(pl022->dma_rx_channel); in pl022_dma_autoprobe()
1185 pl022->dma_rx_channel = NULL; in pl022_dma_autoprobe()
1192 struct dma_chan *rxchan = pl022->dma_rx_channel; in terminate_dma()
1193 struct dma_chan *txchan = pl022->dma_tx_channel; in terminate_dma()
1198 pl022->dma_running = false; in terminate_dma()
1203 if (pl022->dma_running) in pl022_dma_remove()
1205 if (pl022->dma_tx_channel) in pl022_dma_remove()
1206 dma_release_channel(pl022->dma_tx_channel); in pl022_dma_remove()
1207 if (pl022->dma_rx_channel) in pl022_dma_remove()
1208 dma_release_channel(pl022->dma_rx_channel); in pl022_dma_remove()
1209 kfree(pl022->dummypage); in pl022_dma_remove()
1215 return -ENODEV; in configure_dma()
1234 * pl022_interrupt_handler - Interrupt handler for SSP controller
1243 * more data, and writes data in TX FIFO till it is not full. If we complete
1249 struct spi_message *msg = pl022->cur_msg; in pl022_interrupt_handler()
1253 dev_err(&pl022->adev->dev, in pl022_interrupt_handler()
1260 irq_status = readw(SSP_MIS(pl022->virtbase)); in pl022_interrupt_handler()
1272 * Overrun interrupt - bail out since our Data has been in pl022_interrupt_handler()
1275 dev_err(&pl022->adev->dev, "FIFO overrun\n"); in pl022_interrupt_handler()
1276 if (readw(SSP_SR(pl022->virtbase)) & SSP_SR_MASK_RFF) in pl022_interrupt_handler()
1277 dev_err(&pl022->adev->dev, in pl022_interrupt_handler()
1286 SSP_IMSC(pl022->virtbase)); in pl022_interrupt_handler()
1287 writew(CLEAR_ALL_INTERRUPTS, SSP_ICR(pl022->virtbase)); in pl022_interrupt_handler()
1288 writew((readw(SSP_CR1(pl022->virtbase)) & in pl022_interrupt_handler()
1289 (~SSP_CR1_MASK_SSE)), SSP_CR1(pl022->virtbase)); in pl022_interrupt_handler()
1290 msg->state = STATE_ERROR; in pl022_interrupt_handler()
1293 tasklet_schedule(&pl022->pump_transfers); in pl022_interrupt_handler()
1299 if (pl022->tx == pl022->tx_end) { in pl022_interrupt_handler()
1301 writew((readw(SSP_IMSC(pl022->virtbase)) & in pl022_interrupt_handler()
1303 SSP_IMSC(pl022->virtbase)); in pl022_interrupt_handler()
1309 * At this point, all TX will always be finished. in pl022_interrupt_handler()
1311 if (pl022->rx >= pl022->rx_end) { in pl022_interrupt_handler()
1313 SSP_IMSC(pl022->virtbase)); in pl022_interrupt_handler()
1314 writew(CLEAR_ALL_INTERRUPTS, SSP_ICR(pl022->virtbase)); in pl022_interrupt_handler()
1315 if (unlikely(pl022->rx > pl022->rx_end)) { in pl022_interrupt_handler()
1316 dev_warn(&pl022->adev->dev, "read %u surplus " in pl022_interrupt_handler()
1319 (u32) (pl022->rx - pl022->rx_end)); in pl022_interrupt_handler()
1322 msg->actual_length += pl022->cur_transfer->len; in pl022_interrupt_handler()
1324 msg->state = next_transfer(pl022); in pl022_interrupt_handler()
1325 if (msg->state != STATE_DONE && pl022->cur_transfer->cs_change) in pl022_interrupt_handler()
1327 tasklet_schedule(&pl022->pump_transfers); in pl022_interrupt_handler()
1344 residue = pl022->cur_transfer->len % pl022->cur_chip->n_bytes; in set_up_next_transfer()
1346 dev_err(&pl022->adev->dev, in set_up_next_transfer()
1349 pl022->cur_transfer->len, in set_up_next_transfer()
1350 pl022->cur_chip->n_bytes); in set_up_next_transfer()
1351 dev_err(&pl022->adev->dev, "skipping this message\n"); in set_up_next_transfer()
1352 return -EIO; in set_up_next_transfer()
1354 pl022->tx = (void *)transfer->tx_buf; in set_up_next_transfer()
1355 pl022->tx_end = pl022->tx + pl022->cur_transfer->len; in set_up_next_transfer()
1356 pl022->rx = (void *)transfer->rx_buf; in set_up_next_transfer()
1357 pl022->rx_end = pl022->rx + pl022->cur_transfer->len; in set_up_next_transfer()
1358 pl022->write = in set_up_next_transfer()
1359 pl022->tx ? pl022->cur_chip->write : WRITING_NULL; in set_up_next_transfer()
1360 pl022->read = pl022->rx ? pl022->cur_chip->read : READING_NULL; in set_up_next_transfer()
1365 * pump_transfers - Tasklet function which schedules next transfer
1378 message = pl022->cur_msg; in pump_transfers()
1379 transfer = pl022->cur_transfer; in pump_transfers()
1382 if (message->state == STATE_ERROR) { in pump_transfers()
1383 message->status = -EIO; in pump_transfers()
1389 if (message->state == STATE_DONE) { in pump_transfers()
1390 message->status = 0; in pump_transfers()
1396 if (message->state == STATE_RUNNING) { in pump_transfers()
1397 previous = list_entry(transfer->transfer_list.prev, in pump_transfers()
1407 if (previous->cs_change) in pump_transfers()
1411 message->state = STATE_RUNNING; in pump_transfers()
1415 message->state = STATE_ERROR; in pump_transfers()
1416 message->status = -EIO; in pump_transfers()
1423 if (pl022->cur_chip->enable_dma) { in pump_transfers()
1425 dev_dbg(&pl022->adev->dev, in pump_transfers()
1434 writew(ENABLE_ALL_INTERRUPTS & ~SSP_IMSC_MASK_RXIM, SSP_IMSC(pl022->virtbase)); in pump_transfers()
1440 * Default is to enable all interrupts except RX - in do_interrupt_dma_transfer()
1441 * this will be enabled once TX is complete in do_interrupt_dma_transfer()
1446 if (!pl022->next_msg_cs_active) in do_interrupt_dma_transfer()
1449 if (set_up_next_transfer(pl022, pl022->cur_transfer)) { in do_interrupt_dma_transfer()
1451 pl022->cur_msg->state = STATE_ERROR; in do_interrupt_dma_transfer()
1452 pl022->cur_msg->status = -EIO; in do_interrupt_dma_transfer()
1457 if (pl022->cur_chip->enable_dma) { in do_interrupt_dma_transfer()
1460 dev_dbg(&pl022->adev->dev, in do_interrupt_dma_transfer()
1469 writew((readw(SSP_CR1(pl022->virtbase)) | SSP_CR1_MASK_SSE), in do_interrupt_dma_transfer()
1470 SSP_CR1(pl022->virtbase)); in do_interrupt_dma_transfer()
1471 writew(irqflags, SSP_IMSC(pl022->virtbase)); in do_interrupt_dma_transfer()
1479 if (pl022->vendor->extended_cr) in print_current_status()
1480 read_cr0 = readl(SSP_CR0(pl022->virtbase)); in print_current_status()
1482 read_cr0 = readw(SSP_CR0(pl022->virtbase)); in print_current_status()
1483 read_cr1 = readw(SSP_CR1(pl022->virtbase)); in print_current_status()
1484 read_dmacr = readw(SSP_DMACR(pl022->virtbase)); in print_current_status()
1485 read_sr = readw(SSP_SR(pl022->virtbase)); in print_current_status()
1487 dev_warn(&pl022->adev->dev, "spi-pl022 CR0: %x\n", read_cr0); in print_current_status()
1488 dev_warn(&pl022->adev->dev, "spi-pl022 CR1: %x\n", read_cr1); in print_current_status()
1489 dev_warn(&pl022->adev->dev, "spi-pl022 DMACR: %x\n", read_dmacr); in print_current_status()
1490 dev_warn(&pl022->adev->dev, "spi-pl022 SR: %x\n", read_sr); in print_current_status()
1491 dev_warn(&pl022->adev->dev, in print_current_status()
1492 "spi-pl022 exp_fifo_level/fifodepth: %u/%d\n", in print_current_status()
1493 pl022->exp_fifo_level, in print_current_status()
1494 pl022->vendor->fifodepth); in print_current_status()
1505 message = pl022->cur_msg; in do_polling_transfer()
1507 while (message->state != STATE_DONE) { in do_polling_transfer()
1509 if (message->state == STATE_ERROR) in do_polling_transfer()
1511 transfer = pl022->cur_transfer; in do_polling_transfer()
1514 if (message->state == STATE_RUNNING) { in do_polling_transfer()
1516 list_entry(transfer->transfer_list.prev, in do_polling_transfer()
1519 if (previous->cs_change) in do_polling_transfer()
1523 message->state = STATE_RUNNING; in do_polling_transfer()
1524 if (!pl022->next_msg_cs_active) in do_polling_transfer()
1531 message->state = STATE_ERROR; in do_polling_transfer()
1536 writew((readw(SSP_CR1(pl022->virtbase)) | SSP_CR1_MASK_SSE), in do_polling_transfer()
1537 SSP_CR1(pl022->virtbase)); in do_polling_transfer()
1539 dev_dbg(&pl022->adev->dev, "polling transfer ongoing ...\n"); in do_polling_transfer()
1542 while (pl022->tx < pl022->tx_end || pl022->rx < pl022->rx_end) { in do_polling_transfer()
1546 dev_warn(&pl022->adev->dev, in do_polling_transfer()
1548 message->state = STATE_TIMEOUT; in do_polling_transfer()
1556 message->actual_length += pl022->cur_transfer->len; in do_polling_transfer()
1558 message->state = next_transfer(pl022); in do_polling_transfer()
1559 if (message->state != STATE_DONE in do_polling_transfer()
1560 && pl022->cur_transfer->cs_change) in do_polling_transfer()
1565 if (message->state == STATE_DONE) in do_polling_transfer()
1566 message->status = 0; in do_polling_transfer()
1567 else if (message->state == STATE_TIMEOUT) in do_polling_transfer()
1568 message->status = -EAGAIN; in do_polling_transfer()
1570 message->status = -EIO; in do_polling_transfer()
1582 pl022->cur_msg = msg; in pl022_transfer_one_message()
1583 msg->state = STATE_START; in pl022_transfer_one_message()
1585 pl022->cur_transfer = list_entry(msg->transfers.next, in pl022_transfer_one_message()
1589 pl022->cur_chip = spi_get_ctldata(msg->spi); in pl022_transfer_one_message()
1590 pl022->cur_cs = spi_get_chipselect(msg->spi, 0); in pl022_transfer_one_message()
1591 /* This is always available but may be set to -ENOENT */ in pl022_transfer_one_message()
1592 pl022->cur_gpiod = spi_get_csgpiod(msg->spi, 0); in pl022_transfer_one_message()
1597 if (pl022->cur_chip->xfer_type == POLLING_TRANSFER) in pl022_transfer_one_message()
1609 /* nothing more to do - disable spi/ssp and power off */ in pl022_unprepare_transfer_hardware()
1610 writew((readw(SSP_CR1(pl022->virtbase)) & in pl022_unprepare_transfer_hardware()
1611 (~SSP_CR1_MASK_SSE)), SSP_CR1(pl022->virtbase)); in pl022_unprepare_transfer_hardware()
1619 if ((chip_info->iface < SSP_INTERFACE_MOTOROLA_SPI) in verify_controller_parameters()
1620 || (chip_info->iface > SSP_INTERFACE_UNIDIRECTIONAL)) { in verify_controller_parameters()
1621 dev_err(&pl022->adev->dev, in verify_controller_parameters()
1623 return -EINVAL; in verify_controller_parameters()
1625 if ((chip_info->iface == SSP_INTERFACE_UNIDIRECTIONAL) && in verify_controller_parameters()
1626 (!pl022->vendor->unidir)) { in verify_controller_parameters()
1627 dev_err(&pl022->adev->dev, in verify_controller_parameters()
1630 return -EINVAL; in verify_controller_parameters()
1632 if ((chip_info->hierarchy != SSP_MASTER) in verify_controller_parameters()
1633 && (chip_info->hierarchy != SSP_SLAVE)) { in verify_controller_parameters()
1634 dev_err(&pl022->adev->dev, in verify_controller_parameters()
1636 return -EINVAL; in verify_controller_parameters()
1638 if ((chip_info->com_mode != INTERRUPT_TRANSFER) in verify_controller_parameters()
1639 && (chip_info->com_mode != DMA_TRANSFER) in verify_controller_parameters()
1640 && (chip_info->com_mode != POLLING_TRANSFER)) { in verify_controller_parameters()
1641 dev_err(&pl022->adev->dev, in verify_controller_parameters()
1643 return -EINVAL; in verify_controller_parameters()
1645 switch (chip_info->rx_lev_trig) { in verify_controller_parameters()
1652 if (pl022->vendor->fifodepth < 16) { in verify_controller_parameters()
1653 dev_err(&pl022->adev->dev, in verify_controller_parameters()
1654 "RX FIFO Trigger Level is configured incorrectly\n"); in verify_controller_parameters()
1655 return -EINVAL; in verify_controller_parameters()
1659 if (pl022->vendor->fifodepth < 32) { in verify_controller_parameters()
1660 dev_err(&pl022->adev->dev, in verify_controller_parameters()
1661 "RX FIFO Trigger Level is configured incorrectly\n"); in verify_controller_parameters()
1662 return -EINVAL; in verify_controller_parameters()
1666 dev_err(&pl022->adev->dev, in verify_controller_parameters()
1667 "RX FIFO Trigger Level is configured incorrectly\n"); in verify_controller_parameters()
1668 return -EINVAL; in verify_controller_parameters()
1670 switch (chip_info->tx_lev_trig) { in verify_controller_parameters()
1677 if (pl022->vendor->fifodepth < 16) { in verify_controller_parameters()
1678 dev_err(&pl022->adev->dev, in verify_controller_parameters()
1679 "TX FIFO Trigger Level is configured incorrectly\n"); in verify_controller_parameters()
1680 return -EINVAL; in verify_controller_parameters()
1684 if (pl022->vendor->fifodepth < 32) { in verify_controller_parameters()
1685 dev_err(&pl022->adev->dev, in verify_controller_parameters()
1686 "TX FIFO Trigger Level is configured incorrectly\n"); in verify_controller_parameters()
1687 return -EINVAL; in verify_controller_parameters()
1691 dev_err(&pl022->adev->dev, in verify_controller_parameters()
1692 "TX FIFO Trigger Level is configured incorrectly\n"); in verify_controller_parameters()
1693 return -EINVAL; in verify_controller_parameters()
1695 if (chip_info->iface == SSP_INTERFACE_NATIONAL_MICROWIRE) { in verify_controller_parameters()
1696 if ((chip_info->ctrl_len < SSP_BITS_4) in verify_controller_parameters()
1697 || (chip_info->ctrl_len > SSP_BITS_32)) { in verify_controller_parameters()
1698 dev_err(&pl022->adev->dev, in verify_controller_parameters()
1700 return -EINVAL; in verify_controller_parameters()
1702 if ((chip_info->wait_state != SSP_MWIRE_WAIT_ZERO) in verify_controller_parameters()
1703 && (chip_info->wait_state != SSP_MWIRE_WAIT_ONE)) { in verify_controller_parameters()
1704 dev_err(&pl022->adev->dev, in verify_controller_parameters()
1706 return -EINVAL; in verify_controller_parameters()
1709 if (pl022->vendor->extended_cr) { in verify_controller_parameters()
1710 if ((chip_info->duplex != in verify_controller_parameters()
1712 && (chip_info->duplex != in verify_controller_parameters()
1714 dev_err(&pl022->adev->dev, in verify_controller_parameters()
1716 return -EINVAL; in verify_controller_parameters()
1719 if (chip_info->duplex != SSP_MICROWIRE_CHANNEL_FULL_DUPLEX) { in verify_controller_parameters()
1720 dev_err(&pl022->adev->dev, in verify_controller_parameters()
1724 return -EINVAL; in verify_controller_parameters()
1744 rate = clk_get_rate(pl022->clk); in calculate_effective_freq()
1751 dev_warn(&pl022->adev->dev, in calculate_effective_freq()
1756 dev_err(&pl022->adev->dev, in calculate_effective_freq()
1759 return -EINVAL; in calculate_effective_freq()
1801 clk_freq->cpsdvsr = (u8) (best_cpsdvsr & 0xFF); in calculate_effective_freq()
1802 clk_freq->scr = (u8) (best_scr & 0xFF); in calculate_effective_freq()
1803 dev_dbg(&pl022->adev->dev, in calculate_effective_freq()
1806 dev_dbg(&pl022->adev->dev, "SSP cpsdvsr = %d, scr = %d\n", in calculate_effective_freq()
1807 clk_freq->cpsdvsr, clk_freq->scr); in calculate_effective_freq()
1829 * pl022_setup - setup function registered to SPI host framework
1847 struct pl022 *pl022 = spi_controller_get_devdata(spi->controller); in pl022_setup()
1848 unsigned int bits = spi->bits_per_word; in pl022_setup()
1850 struct device_node *np = spi->dev.of_node; in pl022_setup()
1852 if (!spi->max_speed_hz) in pl022_setup()
1853 return -EINVAL; in pl022_setup()
1861 return -ENOMEM; in pl022_setup()
1862 dev_dbg(&spi->dev, in pl022_setup()
1867 chip_info = spi->controller_data; in pl022_setup()
1876 of_property_read_u32(np, "pl022,com-mode", in pl022_setup()
1878 of_property_read_u32(np, "pl022,rx-level-trig", in pl022_setup()
1880 of_property_read_u32(np, "pl022,tx-level-trig", in pl022_setup()
1882 of_property_read_u32(np, "pl022,ctrl-len", in pl022_setup()
1884 of_property_read_u32(np, "pl022,wait-state", in pl022_setup()
1893 dev_dbg(&spi->dev, in pl022_setup()
1897 dev_dbg(&spi->dev, in pl022_setup()
1904 if ((0 == chip_info->clk_freq.cpsdvsr) in pl022_setup()
1905 && (0 == chip_info->clk_freq.scr)) { in pl022_setup()
1907 spi->max_speed_hz, in pl022_setup()
1912 memcpy(&clk_freq, &chip_info->clk_freq, sizeof(clk_freq)); in pl022_setup()
1915 clk_freq.cpsdvsr - 1; in pl022_setup()
1919 status = -EINVAL; in pl022_setup()
1920 dev_err(&spi->dev, in pl022_setup()
1927 dev_err(&spi->dev, "controller data is incorrect"); in pl022_setup()
1931 pl022->rx_lev_trig = chip_info->rx_lev_trig; in pl022_setup()
1932 pl022->tx_lev_trig = chip_info->tx_lev_trig; in pl022_setup()
1935 chip->xfer_type = chip_info->com_mode; in pl022_setup()
1938 if ((bits <= 3) || (bits > pl022->vendor->max_bpw)) { in pl022_setup()
1939 status = -ENOTSUPP; in pl022_setup()
1940 dev_err(&spi->dev, "illegal data size for this controller!\n"); in pl022_setup()
1941 dev_err(&spi->dev, "This controller can only handle 4 <= n <= %d bit words\n", in pl022_setup()
1942 pl022->vendor->max_bpw); in pl022_setup()
1945 dev_dbg(&spi->dev, "4 <= n <=8 bits per word\n"); in pl022_setup()
1946 chip->n_bytes = 1; in pl022_setup()
1947 chip->read = READING_U8; in pl022_setup()
1948 chip->write = WRITING_U8; in pl022_setup()
1950 dev_dbg(&spi->dev, "9 <= n <= 16 bits per word\n"); in pl022_setup()
1951 chip->n_bytes = 2; in pl022_setup()
1952 chip->read = READING_U16; in pl022_setup()
1953 chip->write = WRITING_U16; in pl022_setup()
1955 dev_dbg(&spi->dev, "17 <= n <= 32 bits per word\n"); in pl022_setup()
1956 chip->n_bytes = 4; in pl022_setup()
1957 chip->read = READING_U32; in pl022_setup()
1958 chip->write = WRITING_U32; in pl022_setup()
1962 chip->cr0 = 0; in pl022_setup()
1963 chip->cr1 = 0; in pl022_setup()
1964 chip->dmacr = 0; in pl022_setup()
1965 chip->cpsr = 0; in pl022_setup()
1966 if ((chip_info->com_mode == DMA_TRANSFER) in pl022_setup()
1967 && ((pl022->host_info)->enable_dma)) { in pl022_setup()
1968 chip->enable_dma = true; in pl022_setup()
1969 dev_dbg(&spi->dev, "DMA mode set in controller state\n"); in pl022_setup()
1970 SSP_WRITE_BITS(chip->dmacr, SSP_DMA_ENABLED, in pl022_setup()
1972 SSP_WRITE_BITS(chip->dmacr, SSP_DMA_ENABLED, in pl022_setup()
1975 chip->enable_dma = false; in pl022_setup()
1976 dev_dbg(&spi->dev, "DMA mode NOT set in controller state\n"); in pl022_setup()
1977 SSP_WRITE_BITS(chip->dmacr, SSP_DMA_DISABLED, in pl022_setup()
1979 SSP_WRITE_BITS(chip->dmacr, SSP_DMA_DISABLED, in pl022_setup()
1983 chip->cpsr = clk_freq.cpsdvsr; in pl022_setup()
1986 if (pl022->vendor->extended_cr) { in pl022_setup()
1989 if (pl022->vendor->pl023) { in pl022_setup()
1991 SSP_WRITE_BITS(chip->cr1, chip_info->clkdelay, in pl022_setup()
1995 SSP_WRITE_BITS(chip->cr0, chip_info->duplex, in pl022_setup()
1997 SSP_WRITE_BITS(chip->cr0, chip_info->ctrl_len, in pl022_setup()
1999 SSP_WRITE_BITS(chip->cr0, chip_info->iface, in pl022_setup()
2001 SSP_WRITE_BITS(chip->cr1, chip_info->wait_state, in pl022_setup()
2004 SSP_WRITE_BITS(chip->cr0, bits - 1, in pl022_setup()
2007 if (spi->mode & SPI_LSB_FIRST) { in pl022_setup()
2014 SSP_WRITE_BITS(chip->cr1, tmp, SSP_CR1_MASK_RENDN_ST, 4); in pl022_setup()
2015 SSP_WRITE_BITS(chip->cr1, etx, SSP_CR1_MASK_TENDN_ST, 5); in pl022_setup()
2016 SSP_WRITE_BITS(chip->cr1, chip_info->rx_lev_trig, in pl022_setup()
2018 SSP_WRITE_BITS(chip->cr1, chip_info->tx_lev_trig, in pl022_setup()
2021 SSP_WRITE_BITS(chip->cr0, bits - 1, in pl022_setup()
2023 SSP_WRITE_BITS(chip->cr0, chip_info->iface, in pl022_setup()
2028 if (spi->mode & SPI_CPOL) in pl022_setup()
2032 SSP_WRITE_BITS(chip->cr0, tmp, SSP_CR0_MASK_SPO, 6); in pl022_setup()
2034 if (spi->mode & SPI_CPHA) in pl022_setup()
2038 SSP_WRITE_BITS(chip->cr0, tmp, SSP_CR0_MASK_SPH, 7); in pl022_setup()
2040 SSP_WRITE_BITS(chip->cr0, clk_freq.scr, SSP_CR0_MASK_SCR, 8); in pl022_setup()
2042 if (pl022->vendor->loopback) { in pl022_setup()
2043 if (spi->mode & SPI_LOOP) in pl022_setup()
2047 SSP_WRITE_BITS(chip->cr1, tmp, SSP_CR1_MASK_LBM, 0); in pl022_setup()
2049 SSP_WRITE_BITS(chip->cr1, SSP_DISABLED, SSP_CR1_MASK_SSE, 1); in pl022_setup()
2050 SSP_WRITE_BITS(chip->cr1, chip_info->hierarchy, SSP_CR1_MASK_MS, 2); in pl022_setup()
2051 SSP_WRITE_BITS(chip->cr1, chip_info->slave_tx_disable, SSP_CR1_MASK_SOD, in pl022_setup()
2064 * pl022_cleanup - cleanup function registered to SPI host framework
2081 struct device_node *np = dev->of_node; in pl022_platform_data_dt_get()
2093 pd->bus_id = -1; in pl022_platform_data_dt_get()
2094 of_property_read_u32(np, "pl022,autosuspend-delay", in pl022_platform_data_dt_get()
2095 &pd->autosuspend_delay); in pl022_platform_data_dt_get()
2096 pd->rt = of_property_read_bool(np, "pl022,rt"); in pl022_platform_data_dt_get()
2103 struct device *dev = &adev->dev; in pl022_probe()
2105 dev_get_platdata(&adev->dev); in pl022_probe()
2110 dev_info(&adev->dev, in pl022_probe()
2111 "ARM PL022 driver, device ID: 0x%08x\n", adev->periphid); in pl022_probe()
2117 return -ENODEV; in pl022_probe()
2123 dev_err(&adev->dev, "probe - cannot alloc SPI host\n"); in pl022_probe()
2124 return -ENOMEM; in pl022_probe()
2128 pl022->host = host; in pl022_probe()
2129 pl022->host_info = platform_info; in pl022_probe()
2130 pl022->adev = adev; in pl022_probe()
2131 pl022->vendor = id->data; in pl022_probe()
2137 host->bus_num = platform_info->bus_id; in pl022_probe()
2138 host->cleanup = pl022_cleanup; in pl022_probe()
2139 host->setup = pl022_setup; in pl022_probe()
2140 host->auto_runtime_pm = true; in pl022_probe()
2141 host->transfer_one_message = pl022_transfer_one_message; in pl022_probe()
2142 host->unprepare_transfer_hardware = pl022_unprepare_transfer_hardware; in pl022_probe()
2143 host->rt = platform_info->rt; in pl022_probe()
2144 host->dev.of_node = dev->of_node; in pl022_probe()
2145 host->use_gpio_descriptors = true; in pl022_probe()
2148 * Supports mode 0-3, loopback, and active low CS. Transfers are in pl022_probe()
2151 host->mode_bits = SPI_CPOL | SPI_CPHA | SPI_CS_HIGH | SPI_LOOP; in pl022_probe()
2152 if (pl022->vendor->extended_cr) in pl022_probe()
2153 host->mode_bits |= SPI_LSB_FIRST; in pl022_probe()
2155 dev_dbg(&adev->dev, "BUSNO: %d\n", host->bus_num); in pl022_probe()
2161 pl022->phybase = adev->res.start; in pl022_probe()
2162 pl022->virtbase = devm_ioremap(dev, adev->res.start, in pl022_probe()
2163 resource_size(&adev->res)); in pl022_probe()
2164 if (pl022->virtbase == NULL) { in pl022_probe()
2165 status = -ENOMEM; in pl022_probe()
2168 dev_info(&adev->dev, "mapped registers from %pa to %p\n", in pl022_probe()
2169 &adev->res.start, pl022->virtbase); in pl022_probe()
2171 pl022->clk = devm_clk_get(&adev->dev, NULL); in pl022_probe()
2172 if (IS_ERR(pl022->clk)) { in pl022_probe()
2173 status = PTR_ERR(pl022->clk); in pl022_probe()
2174 dev_err(&adev->dev, "could not retrieve SSP/SPI bus clock\n"); in pl022_probe()
2178 status = clk_prepare_enable(pl022->clk); in pl022_probe()
2180 dev_err(&adev->dev, "could not enable SSP/SPI bus clock\n"); in pl022_probe()
2185 tasklet_init(&pl022->pump_transfers, pump_transfers, in pl022_probe()
2189 writew((readw(SSP_CR1(pl022->virtbase)) & (~SSP_CR1_MASK_SSE)), in pl022_probe()
2190 SSP_CR1(pl022->virtbase)); in pl022_probe()
2193 status = devm_request_irq(dev, adev->irq[0], pl022_interrupt_handler, in pl022_probe()
2196 dev_err(&adev->dev, "probe - cannot get IRQ (%d)\n", status); in pl022_probe()
2202 if (status == -EPROBE_DEFER) { in pl022_probe()
2209 platform_info->enable_dma = 1; in pl022_probe()
2210 else if (platform_info->enable_dma) { in pl022_probe()
2213 platform_info->enable_dma = 0; in pl022_probe()
2218 status = devm_spi_register_controller(&adev->dev, host); in pl022_probe()
2220 dev_err_probe(&adev->dev, status, in pl022_probe()
2227 if (platform_info->autosuspend_delay > 0) { in pl022_probe()
2228 dev_info(&adev->dev, in pl022_probe()
2230 platform_info->autosuspend_delay); in pl022_probe()
2232 platform_info->autosuspend_delay); in pl022_probe()
2240 if (platform_info->enable_dma) in pl022_probe()
2243 clk_disable_unprepare(pl022->clk); in pl022_probe()
2265 pm_runtime_get_noresume(&adev->dev); in pl022_remove()
2268 if (pl022->host_info->enable_dma) in pl022_remove()
2271 clk_disable_unprepare(pl022->clk); in pl022_remove()
2273 tasklet_disable(&pl022->pump_transfers); in pl022_remove()
2282 ret = spi_controller_suspend(pl022->host); in pl022_suspend()
2288 spi_controller_resume(pl022->host); in pl022_suspend()
2308 ret = spi_controller_resume(pl022->host); in pl022_resume()
2321 clk_disable_unprepare(pl022->clk); in pl022_runtime_suspend()
2332 clk_prepare_enable(pl022->clk); in pl022_runtime_resume()
2387 * and 8 locations deep TX/RX FIFO
2396 * and 32 locations deep TX/RX FIFO
2404 * ST-Ericsson derivative "PL023" (this is not
2407 * and 32 locations deep TX/RX FIFO but no extended
2430 .name = "ssp-pl022",