Lines Matching refs:sport

284 static void imx_uart_writel(struct imx_port *sport, u32 val, u32 offset)  in imx_uart_writel()  argument
288 sport->ucr1 = val; in imx_uart_writel()
291 sport->ucr2 = val; in imx_uart_writel()
294 sport->ucr3 = val; in imx_uart_writel()
297 sport->ucr4 = val; in imx_uart_writel()
300 sport->ufcr = val; in imx_uart_writel()
305 writel(val, sport->port.membase + offset); in imx_uart_writel()
308 static u32 imx_uart_readl(struct imx_port *sport, u32 offset) in imx_uart_readl() argument
312 return sport->ucr1; in imx_uart_readl()
321 if (!(sport->ucr2 & UCR2_SRST)) in imx_uart_readl()
322 sport->ucr2 = readl(sport->port.membase + offset); in imx_uart_readl()
323 return sport->ucr2; in imx_uart_readl()
326 return sport->ucr3; in imx_uart_readl()
329 return sport->ucr4; in imx_uart_readl()
332 return sport->ufcr; in imx_uart_readl()
335 return readl(sport->port.membase + offset); in imx_uart_readl()
339 static inline unsigned imx_uart_uts_reg(struct imx_port *sport) in imx_uart_uts_reg() argument
341 return sport->devdata->uts_reg; in imx_uart_uts_reg()
344 static inline int imx_uart_is_imx1(struct imx_port *sport) in imx_uart_is_imx1() argument
346 return sport->devdata->devtype == IMX1_UART; in imx_uart_is_imx1()
349 static inline int imx_uart_is_imx21(struct imx_port *sport) in imx_uart_is_imx21() argument
351 return sport->devdata->devtype == IMX21_UART; in imx_uart_is_imx21()
354 static inline int imx_uart_is_imx53(struct imx_port *sport) in imx_uart_is_imx53() argument
356 return sport->devdata->devtype == IMX53_UART; in imx_uart_is_imx53()
359 static inline int imx_uart_is_imx6q(struct imx_port *sport) in imx_uart_is_imx6q() argument
361 return sport->devdata->devtype == IMX6Q_UART; in imx_uart_is_imx6q()
367 static void imx_uart_ucrs_save(struct imx_port *sport, in imx_uart_ucrs_save() argument
371 ucr->ucr1 = imx_uart_readl(sport, UCR1); in imx_uart_ucrs_save()
372 ucr->ucr2 = imx_uart_readl(sport, UCR2); in imx_uart_ucrs_save()
373 ucr->ucr3 = imx_uart_readl(sport, UCR3); in imx_uart_ucrs_save()
376 static void imx_uart_ucrs_restore(struct imx_port *sport, in imx_uart_ucrs_restore() argument
380 imx_uart_writel(sport, ucr->ucr1, UCR1); in imx_uart_ucrs_restore()
381 imx_uart_writel(sport, ucr->ucr2, UCR2); in imx_uart_ucrs_restore()
382 imx_uart_writel(sport, ucr->ucr3, UCR3); in imx_uart_ucrs_restore()
387 static void imx_uart_rts_active(struct imx_port *sport, u32 *ucr2) in imx_uart_rts_active() argument
391 sport->port.mctrl |= TIOCM_RTS; in imx_uart_rts_active()
392 mctrl_gpio_set(sport->gpios, sport->port.mctrl); in imx_uart_rts_active()
396 static void imx_uart_rts_inactive(struct imx_port *sport, u32 *ucr2) in imx_uart_rts_inactive() argument
401 sport->port.mctrl &= ~TIOCM_RTS; in imx_uart_rts_inactive()
402 mctrl_gpio_set(sport->gpios, sport->port.mctrl); in imx_uart_rts_inactive()
408 struct imx_port *sport = (struct imx_port *)port; in imx_uart_start_rx() local
411 ucr1 = imx_uart_readl(sport, UCR1); in imx_uart_start_rx()
412 ucr2 = imx_uart_readl(sport, UCR2); in imx_uart_start_rx()
416 if (sport->dma_is_enabled) { in imx_uart_start_rx()
424 imx_uart_writel(sport, ucr2, UCR2); in imx_uart_start_rx()
425 imx_uart_writel(sport, ucr1, UCR1); in imx_uart_start_rx()
431 struct imx_port *sport = (struct imx_port *)port; in imx_uart_stop_tx() local
438 if (sport->dma_is_txing) in imx_uart_stop_tx()
441 ucr1 = imx_uart_readl(sport, UCR1); in imx_uart_stop_tx()
442 imx_uart_writel(sport, ucr1 & ~UCR1_TRDYEN, UCR1); in imx_uart_stop_tx()
446 imx_uart_readl(sport, USR2) & USR2_TXDC) { in imx_uart_stop_tx()
447 u32 ucr2 = imx_uart_readl(sport, UCR2), ucr4; in imx_uart_stop_tx()
449 imx_uart_rts_active(sport, &ucr2); in imx_uart_stop_tx()
451 imx_uart_rts_inactive(sport, &ucr2); in imx_uart_stop_tx()
452 imx_uart_writel(sport, ucr2, UCR2); in imx_uart_stop_tx()
456 ucr4 = imx_uart_readl(sport, UCR4); in imx_uart_stop_tx()
458 imx_uart_writel(sport, ucr4, UCR4); in imx_uart_stop_tx()
465 struct imx_port *sport = (struct imx_port *)port; in imx_uart_stop_rx() local
468 ucr1 = imx_uart_readl(sport, UCR1); in imx_uart_stop_rx()
469 ucr2 = imx_uart_readl(sport, UCR2); in imx_uart_stop_rx()
471 if (sport->dma_is_enabled) { in imx_uart_stop_rx()
477 imx_uart_writel(sport, ucr1, UCR1); in imx_uart_stop_rx()
480 imx_uart_writel(sport, ucr2, UCR2); in imx_uart_stop_rx()
486 struct imx_port *sport = (struct imx_port *)port; in imx_uart_enable_ms() local
488 mod_timer(&sport->timer, jiffies); in imx_uart_enable_ms()
490 mctrl_gpio_enable_ms(sport->gpios); in imx_uart_enable_ms()
493 static void imx_uart_dma_tx(struct imx_port *sport);
496 static inline void imx_uart_transmit_buffer(struct imx_port *sport) in imx_uart_transmit_buffer() argument
498 struct circ_buf *xmit = &sport->port.state->xmit; in imx_uart_transmit_buffer()
500 if (sport->port.x_char) { in imx_uart_transmit_buffer()
502 imx_uart_writel(sport, sport->port.x_char, URTX0); in imx_uart_transmit_buffer()
503 sport->port.icount.tx++; in imx_uart_transmit_buffer()
504 sport->port.x_char = 0; in imx_uart_transmit_buffer()
508 if (uart_circ_empty(xmit) || uart_tx_stopped(&sport->port)) { in imx_uart_transmit_buffer()
509 imx_uart_stop_tx(&sport->port); in imx_uart_transmit_buffer()
513 if (sport->dma_is_enabled) { in imx_uart_transmit_buffer()
519 ucr1 = imx_uart_readl(sport, UCR1); in imx_uart_transmit_buffer()
521 if (sport->dma_is_txing) { in imx_uart_transmit_buffer()
523 imx_uart_writel(sport, ucr1, UCR1); in imx_uart_transmit_buffer()
525 imx_uart_writel(sport, ucr1, UCR1); in imx_uart_transmit_buffer()
526 imx_uart_dma_tx(sport); in imx_uart_transmit_buffer()
533 !(imx_uart_readl(sport, imx_uart_uts_reg(sport)) & UTS_TXFULL)) { in imx_uart_transmit_buffer()
536 imx_uart_writel(sport, xmit->buf[xmit->tail], URTX0); in imx_uart_transmit_buffer()
538 sport->port.icount.tx++; in imx_uart_transmit_buffer()
542 uart_write_wakeup(&sport->port); in imx_uart_transmit_buffer()
545 imx_uart_stop_tx(&sport->port); in imx_uart_transmit_buffer()
550 struct imx_port *sport = data; in imx_uart_dma_tx_callback() local
551 struct scatterlist *sgl = &sport->tx_sgl[0]; in imx_uart_dma_tx_callback()
552 struct circ_buf *xmit = &sport->port.state->xmit; in imx_uart_dma_tx_callback()
556 spin_lock_irqsave(&sport->port.lock, flags); in imx_uart_dma_tx_callback()
558 dma_unmap_sg(sport->port.dev, sgl, sport->dma_tx_nents, DMA_TO_DEVICE); in imx_uart_dma_tx_callback()
560 ucr1 = imx_uart_readl(sport, UCR1); in imx_uart_dma_tx_callback()
562 imx_uart_writel(sport, ucr1, UCR1); in imx_uart_dma_tx_callback()
565 xmit->tail = (xmit->tail + sport->tx_bytes) & (UART_XMIT_SIZE - 1); in imx_uart_dma_tx_callback()
566 sport->port.icount.tx += sport->tx_bytes; in imx_uart_dma_tx_callback()
568 dev_dbg(sport->port.dev, "we finish the TX DMA.\n"); in imx_uart_dma_tx_callback()
570 sport->dma_is_txing = 0; in imx_uart_dma_tx_callback()
573 uart_write_wakeup(&sport->port); in imx_uart_dma_tx_callback()
575 if (!uart_circ_empty(xmit) && !uart_tx_stopped(&sport->port)) in imx_uart_dma_tx_callback()
576 imx_uart_dma_tx(sport); in imx_uart_dma_tx_callback()
577 else if (sport->port.rs485.flags & SER_RS485_ENABLED) { in imx_uart_dma_tx_callback()
578 u32 ucr4 = imx_uart_readl(sport, UCR4); in imx_uart_dma_tx_callback()
580 imx_uart_writel(sport, ucr4, UCR4); in imx_uart_dma_tx_callback()
583 spin_unlock_irqrestore(&sport->port.lock, flags); in imx_uart_dma_tx_callback()
587 static void imx_uart_dma_tx(struct imx_port *sport) in imx_uart_dma_tx() argument
589 struct circ_buf *xmit = &sport->port.state->xmit; in imx_uart_dma_tx()
590 struct scatterlist *sgl = sport->tx_sgl; in imx_uart_dma_tx()
592 struct dma_chan *chan = sport->dma_chan_tx; in imx_uart_dma_tx()
593 struct device *dev = sport->port.dev; in imx_uart_dma_tx()
597 if (sport->dma_is_txing) in imx_uart_dma_tx()
600 ucr4 = imx_uart_readl(sport, UCR4); in imx_uart_dma_tx()
602 imx_uart_writel(sport, ucr4, UCR4); in imx_uart_dma_tx()
604 sport->tx_bytes = uart_circ_chars_pending(xmit); in imx_uart_dma_tx()
607 sport->dma_tx_nents = 1; in imx_uart_dma_tx()
608 sg_init_one(sgl, xmit->buf + xmit->tail, sport->tx_bytes); in imx_uart_dma_tx()
610 sport->dma_tx_nents = 2; in imx_uart_dma_tx()
617 ret = dma_map_sg(dev, sgl, sport->dma_tx_nents, DMA_TO_DEVICE); in imx_uart_dma_tx()
622 desc = dmaengine_prep_slave_sg(chan, sgl, sport->dma_tx_nents, in imx_uart_dma_tx()
625 dma_unmap_sg(dev, sgl, sport->dma_tx_nents, in imx_uart_dma_tx()
631 desc->callback_param = sport; in imx_uart_dma_tx()
636 ucr1 = imx_uart_readl(sport, UCR1); in imx_uart_dma_tx()
638 imx_uart_writel(sport, ucr1, UCR1); in imx_uart_dma_tx()
641 sport->dma_is_txing = 1; in imx_uart_dma_tx()
650 struct imx_port *sport = (struct imx_port *)port; in imx_uart_start_tx() local
653 if (!sport->port.x_char && uart_circ_empty(&port->state->xmit)) in imx_uart_start_tx()
659 ucr2 = imx_uart_readl(sport, UCR2); in imx_uart_start_tx()
661 imx_uart_rts_active(sport, &ucr2); in imx_uart_start_tx()
663 imx_uart_rts_inactive(sport, &ucr2); in imx_uart_start_tx()
664 imx_uart_writel(sport, ucr2, UCR2); in imx_uart_start_tx()
673 if (!sport->dma_is_enabled) { in imx_uart_start_tx()
674 u32 ucr4 = imx_uart_readl(sport, UCR4); in imx_uart_start_tx()
676 imx_uart_writel(sport, ucr4, UCR4); in imx_uart_start_tx()
680 if (!sport->dma_is_enabled) { in imx_uart_start_tx()
681 ucr1 = imx_uart_readl(sport, UCR1); in imx_uart_start_tx()
682 imx_uart_writel(sport, ucr1 | UCR1_TRDYEN, UCR1); in imx_uart_start_tx()
685 if (sport->dma_is_enabled) { in imx_uart_start_tx()
686 if (sport->port.x_char) { in imx_uart_start_tx()
689 ucr1 = imx_uart_readl(sport, UCR1); in imx_uart_start_tx()
692 imx_uart_writel(sport, ucr1, UCR1); in imx_uart_start_tx()
698 imx_uart_dma_tx(sport); in imx_uart_start_tx()
705 struct imx_port *sport = dev_id; in imx_uart_rtsint() local
708 spin_lock(&sport->port.lock); in imx_uart_rtsint()
710 imx_uart_writel(sport, USR1_RTSD, USR1); in imx_uart_rtsint()
711 usr1 = imx_uart_readl(sport, USR1) & USR1_RTSS; in imx_uart_rtsint()
712 uart_handle_cts_change(&sport->port, !!usr1); in imx_uart_rtsint()
713 wake_up_interruptible(&sport->port.state->port.delta_msr_wait); in imx_uart_rtsint()
715 spin_unlock(&sport->port.lock); in imx_uart_rtsint()
721 struct imx_port *sport = dev_id; in imx_uart_txint() local
723 spin_lock(&sport->port.lock); in imx_uart_txint()
724 imx_uart_transmit_buffer(sport); in imx_uart_txint()
725 spin_unlock(&sport->port.lock); in imx_uart_txint()
731 struct imx_port *sport = dev_id; in imx_uart_rxint() local
733 struct tty_port *port = &sport->port.state->port; in imx_uart_rxint()
735 spin_lock(&sport->port.lock); in imx_uart_rxint()
737 while (imx_uart_readl(sport, USR2) & USR2_RDR) { in imx_uart_rxint()
741 sport->port.icount.rx++; in imx_uart_rxint()
743 rx = imx_uart_readl(sport, URXD0); in imx_uart_rxint()
745 usr2 = imx_uart_readl(sport, USR2); in imx_uart_rxint()
747 imx_uart_writel(sport, USR2_BRCD, USR2); in imx_uart_rxint()
748 if (uart_handle_break(&sport->port)) in imx_uart_rxint()
752 if (uart_handle_sysrq_char(&sport->port, (unsigned char)rx)) in imx_uart_rxint()
757 sport->port.icount.brk++; in imx_uart_rxint()
759 sport->port.icount.parity++; in imx_uart_rxint()
761 sport->port.icount.frame++; in imx_uart_rxint()
763 sport->port.icount.overrun++; in imx_uart_rxint()
765 if (rx & sport->port.ignore_status_mask) { in imx_uart_rxint()
771 rx &= (sport->port.read_status_mask | 0xFF); in imx_uart_rxint()
783 sport->port.sysrq = 0; in imx_uart_rxint()
787 if (sport->port.ignore_status_mask & URXD_DUMMY_READ) in imx_uart_rxint()
791 sport->port.icount.buf_overrun++; in imx_uart_rxint()
795 spin_unlock(&sport->port.lock); in imx_uart_rxint()
800 static void imx_uart_clear_rx_errors(struct imx_port *sport);
805 static unsigned int imx_uart_get_hwmctrl(struct imx_port *sport) in imx_uart_get_hwmctrl() argument
808 unsigned usr1 = imx_uart_readl(sport, USR1); in imx_uart_get_hwmctrl()
809 unsigned usr2 = imx_uart_readl(sport, USR2); in imx_uart_get_hwmctrl()
818 if (sport->dte_mode) in imx_uart_get_hwmctrl()
819 if (!(imx_uart_readl(sport, USR2) & USR2_RIIN)) in imx_uart_get_hwmctrl()
828 static void imx_uart_mctrl_check(struct imx_port *sport) in imx_uart_mctrl_check() argument
832 status = imx_uart_get_hwmctrl(sport); in imx_uart_mctrl_check()
833 changed = status ^ sport->old_status; in imx_uart_mctrl_check()
838 sport->old_status = status; in imx_uart_mctrl_check()
841 sport->port.icount.rng++; in imx_uart_mctrl_check()
843 sport->port.icount.dsr++; in imx_uart_mctrl_check()
845 uart_handle_dcd_change(&sport->port, status & TIOCM_CAR); in imx_uart_mctrl_check()
847 uart_handle_cts_change(&sport->port, status & TIOCM_CTS); in imx_uart_mctrl_check()
849 wake_up_interruptible(&sport->port.state->port.delta_msr_wait); in imx_uart_mctrl_check()
854 struct imx_port *sport = dev_id; in imx_uart_int() local
858 usr1 = imx_uart_readl(sport, USR1); in imx_uart_int()
859 usr2 = imx_uart_readl(sport, USR2); in imx_uart_int()
860 ucr1 = imx_uart_readl(sport, UCR1); in imx_uart_int()
861 ucr2 = imx_uart_readl(sport, UCR2); in imx_uart_int()
862 ucr3 = imx_uart_readl(sport, UCR3); in imx_uart_int()
863 ucr4 = imx_uart_readl(sport, UCR4); in imx_uart_int()
901 imx_uart_writel(sport, USR1_DTRD, USR1); in imx_uart_int()
903 spin_lock(&sport->port.lock); in imx_uart_int()
904 imx_uart_mctrl_check(sport); in imx_uart_int()
905 spin_unlock(&sport->port.lock); in imx_uart_int()
916 imx_uart_writel(sport, USR1_AWAKE, USR1); in imx_uart_int()
921 sport->port.icount.overrun++; in imx_uart_int()
922 imx_uart_writel(sport, USR2_ORE, USR2); in imx_uart_int()
934 struct imx_port *sport = (struct imx_port *)port; in imx_uart_tx_empty() local
937 ret = (imx_uart_readl(sport, USR2) & USR2_TXDC) ? TIOCSER_TEMT : 0; in imx_uart_tx_empty()
940 if (sport->dma_is_txing) in imx_uart_tx_empty()
949 struct imx_port *sport = (struct imx_port *)port; in imx_uart_get_mctrl() local
950 unsigned int ret = imx_uart_get_hwmctrl(sport); in imx_uart_get_mctrl()
952 mctrl_gpio_get(sport->gpios, &ret); in imx_uart_get_mctrl()
960 struct imx_port *sport = (struct imx_port *)port; in imx_uart_set_mctrl() local
970 ucr2 = imx_uart_readl(sport, UCR2); in imx_uart_set_mctrl()
982 imx_uart_writel(sport, ucr2, UCR2); in imx_uart_set_mctrl()
985 ucr3 = imx_uart_readl(sport, UCR3) & ~UCR3_DSR; in imx_uart_set_mctrl()
988 imx_uart_writel(sport, ucr3, UCR3); in imx_uart_set_mctrl()
990 uts = imx_uart_readl(sport, imx_uart_uts_reg(sport)) & ~UTS_LOOP; in imx_uart_set_mctrl()
993 imx_uart_writel(sport, uts, imx_uart_uts_reg(sport)); in imx_uart_set_mctrl()
995 mctrl_gpio_set(sport->gpios, mctrl); in imx_uart_set_mctrl()
1003 struct imx_port *sport = (struct imx_port *)port; in imx_uart_break_ctl() local
1007 spin_lock_irqsave(&sport->port.lock, flags); in imx_uart_break_ctl()
1009 ucr1 = imx_uart_readl(sport, UCR1) & ~UCR1_SNDBRK; in imx_uart_break_ctl()
1014 imx_uart_writel(sport, ucr1, UCR1); in imx_uart_break_ctl()
1016 spin_unlock_irqrestore(&sport->port.lock, flags); in imx_uart_break_ctl()
1025 struct imx_port *sport = from_timer(sport, t, timer); in imx_uart_timeout() local
1028 if (sport->port.state) { in imx_uart_timeout()
1029 spin_lock_irqsave(&sport->port.lock, flags); in imx_uart_timeout()
1030 imx_uart_mctrl_check(sport); in imx_uart_timeout()
1031 spin_unlock_irqrestore(&sport->port.lock, flags); in imx_uart_timeout()
1033 mod_timer(&sport->timer, jiffies + MCTRL_TIMEOUT); in imx_uart_timeout()
1049 struct imx_port *sport = data; in imx_uart_dma_rx_callback() local
1050 struct dma_chan *chan = sport->dma_chan_rx; in imx_uart_dma_rx_callback()
1051 struct scatterlist *sgl = &sport->rx_sgl; in imx_uart_dma_rx_callback()
1052 struct tty_port *port = &sport->port.state->port; in imx_uart_dma_rx_callback()
1054 struct circ_buf *rx_ring = &sport->rx_ring; in imx_uart_dma_rx_callback()
1060 status = dmaengine_tx_status(chan, sport->rx_cookie, &state); in imx_uart_dma_rx_callback()
1063 imx_uart_clear_rx_errors(sport); in imx_uart_dma_rx_callback()
1067 if (!(sport->port.ignore_status_mask & URXD_DUMMY_READ)) { in imx_uart_dma_rx_callback()
1084 bd_size = sg_dma_len(sgl) / sport->rx_periods; in imx_uart_dma_rx_callback()
1094 dma_sync_sg_for_cpu(sport->port.dev, sgl, 1, in imx_uart_dma_rx_callback()
1098 sport->rx_buf + rx_ring->tail, r_bytes); in imx_uart_dma_rx_callback()
1101 dma_sync_sg_for_device(sport->port.dev, sgl, 1, in imx_uart_dma_rx_callback()
1105 sport->port.icount.buf_overrun++; in imx_uart_dma_rx_callback()
1107 sport->port.icount.rx += w_bytes; in imx_uart_dma_rx_callback()
1116 dev_dbg(sport->port.dev, "We get %d bytes.\n", w_bytes); in imx_uart_dma_rx_callback()
1123 static int imx_uart_start_rx_dma(struct imx_port *sport) in imx_uart_start_rx_dma() argument
1125 struct scatterlist *sgl = &sport->rx_sgl; in imx_uart_start_rx_dma()
1126 struct dma_chan *chan = sport->dma_chan_rx; in imx_uart_start_rx_dma()
1127 struct device *dev = sport->port.dev; in imx_uart_start_rx_dma()
1131 sport->rx_ring.head = 0; in imx_uart_start_rx_dma()
1132 sport->rx_ring.tail = 0; in imx_uart_start_rx_dma()
1133 sport->rx_periods = RX_DMA_PERIODS; in imx_uart_start_rx_dma()
1135 sg_init_one(sgl, sport->rx_buf, RX_BUF_SIZE); in imx_uart_start_rx_dma()
1143 sg_dma_len(sgl), sg_dma_len(sgl) / sport->rx_periods, in imx_uart_start_rx_dma()
1152 desc->callback_param = sport; in imx_uart_start_rx_dma()
1155 sport->dma_is_rxing = 1; in imx_uart_start_rx_dma()
1156 sport->rx_cookie = dmaengine_submit(desc); in imx_uart_start_rx_dma()
1161 static void imx_uart_clear_rx_errors(struct imx_port *sport) in imx_uart_clear_rx_errors() argument
1163 struct tty_port *port = &sport->port.state->port; in imx_uart_clear_rx_errors()
1166 usr1 = imx_uart_readl(sport, USR1); in imx_uart_clear_rx_errors()
1167 usr2 = imx_uart_readl(sport, USR2); in imx_uart_clear_rx_errors()
1170 sport->port.icount.brk++; in imx_uart_clear_rx_errors()
1171 imx_uart_writel(sport, USR2_BRCD, USR2); in imx_uart_clear_rx_errors()
1172 uart_handle_break(&sport->port); in imx_uart_clear_rx_errors()
1174 sport->port.icount.buf_overrun++; in imx_uart_clear_rx_errors()
1178 sport->port.icount.frame++; in imx_uart_clear_rx_errors()
1179 imx_uart_writel(sport, USR1_FRAMERR, USR1); in imx_uart_clear_rx_errors()
1181 sport->port.icount.parity++; in imx_uart_clear_rx_errors()
1182 imx_uart_writel(sport, USR1_PARITYERR, USR1); in imx_uart_clear_rx_errors()
1187 sport->port.icount.overrun++; in imx_uart_clear_rx_errors()
1188 imx_uart_writel(sport, USR2_ORE, USR2); in imx_uart_clear_rx_errors()
1198 static void imx_uart_setup_ufcr(struct imx_port *sport, in imx_uart_setup_ufcr() argument
1204 val = imx_uart_readl(sport, UFCR) & (UFCR_RFDIV | UFCR_DCEDTE); in imx_uart_setup_ufcr()
1206 imx_uart_writel(sport, val, UFCR); in imx_uart_setup_ufcr()
1209 static void imx_uart_dma_exit(struct imx_port *sport) in imx_uart_dma_exit() argument
1211 if (sport->dma_chan_rx) { in imx_uart_dma_exit()
1212 dmaengine_terminate_sync(sport->dma_chan_rx); in imx_uart_dma_exit()
1213 dma_release_channel(sport->dma_chan_rx); in imx_uart_dma_exit()
1214 sport->dma_chan_rx = NULL; in imx_uart_dma_exit()
1215 sport->rx_cookie = -EINVAL; in imx_uart_dma_exit()
1216 kfree(sport->rx_buf); in imx_uart_dma_exit()
1217 sport->rx_buf = NULL; in imx_uart_dma_exit()
1220 if (sport->dma_chan_tx) { in imx_uart_dma_exit()
1221 dmaengine_terminate_sync(sport->dma_chan_tx); in imx_uart_dma_exit()
1222 dma_release_channel(sport->dma_chan_tx); in imx_uart_dma_exit()
1223 sport->dma_chan_tx = NULL; in imx_uart_dma_exit()
1227 static int imx_uart_dma_init(struct imx_port *sport) in imx_uart_dma_init() argument
1230 struct device *dev = sport->port.dev; in imx_uart_dma_init()
1234 sport->dma_chan_rx = dma_request_slave_channel(dev, "rx"); in imx_uart_dma_init()
1235 if (!sport->dma_chan_rx) { in imx_uart_dma_init()
1242 slave_config.src_addr = sport->port.mapbase + URXD0; in imx_uart_dma_init()
1246 ret = dmaengine_slave_config(sport->dma_chan_rx, &slave_config); in imx_uart_dma_init()
1252 sport->rx_buf = kzalloc(RX_BUF_SIZE, GFP_KERNEL); in imx_uart_dma_init()
1253 if (!sport->rx_buf) { in imx_uart_dma_init()
1257 sport->rx_ring.buf = sport->rx_buf; in imx_uart_dma_init()
1260 sport->dma_chan_tx = dma_request_slave_channel(dev, "tx"); in imx_uart_dma_init()
1261 if (!sport->dma_chan_tx) { in imx_uart_dma_init()
1268 slave_config.dst_addr = sport->port.mapbase + URTX0; in imx_uart_dma_init()
1271 ret = dmaengine_slave_config(sport->dma_chan_tx, &slave_config); in imx_uart_dma_init()
1279 imx_uart_dma_exit(sport); in imx_uart_dma_init()
1283 static void imx_uart_enable_dma(struct imx_port *sport) in imx_uart_enable_dma() argument
1287 imx_uart_setup_ufcr(sport, TXTL_DMA, RXTL_DMA); in imx_uart_enable_dma()
1290 ucr1 = imx_uart_readl(sport, UCR1); in imx_uart_enable_dma()
1292 imx_uart_writel(sport, ucr1, UCR1); in imx_uart_enable_dma()
1294 sport->dma_is_enabled = 1; in imx_uart_enable_dma()
1297 static void imx_uart_disable_dma(struct imx_port *sport) in imx_uart_disable_dma() argument
1302 ucr1 = imx_uart_readl(sport, UCR1); in imx_uart_disable_dma()
1304 imx_uart_writel(sport, ucr1, UCR1); in imx_uart_disable_dma()
1306 imx_uart_setup_ufcr(sport, TXTL_DEFAULT, RXTL_DEFAULT); in imx_uart_disable_dma()
1308 sport->dma_is_enabled = 0; in imx_uart_disable_dma()
1316 struct imx_port *sport = (struct imx_port *)port; in imx_uart_startup() local
1322 retval = clk_prepare_enable(sport->clk_per); in imx_uart_startup()
1325 retval = clk_prepare_enable(sport->clk_ipg); in imx_uart_startup()
1327 clk_disable_unprepare(sport->clk_per); in imx_uart_startup()
1331 imx_uart_setup_ufcr(sport, TXTL_DEFAULT, RXTL_DEFAULT); in imx_uart_startup()
1336 ucr4 = imx_uart_readl(sport, UCR4); in imx_uart_startup()
1342 imx_uart_writel(sport, ucr4 & ~UCR4_DREN, UCR4); in imx_uart_startup()
1345 if (!uart_console(port) && imx_uart_dma_init(sport) == 0) in imx_uart_startup()
1348 spin_lock_irqsave(&sport->port.lock, flags); in imx_uart_startup()
1352 ucr2 = imx_uart_readl(sport, UCR2); in imx_uart_startup()
1354 imx_uart_writel(sport, ucr2, UCR2); in imx_uart_startup()
1356 while (!(imx_uart_readl(sport, UCR2) & UCR2_SRST) && (--i > 0)) in imx_uart_startup()
1362 imx_uart_writel(sport, USR1_RTSD | USR1_DTRD, USR1); in imx_uart_startup()
1363 imx_uart_writel(sport, USR2_ORE, USR2); in imx_uart_startup()
1365 ucr1 = imx_uart_readl(sport, UCR1) & ~UCR1_RRDYEN; in imx_uart_startup()
1367 if (sport->have_rtscts) in imx_uart_startup()
1370 imx_uart_writel(sport, ucr1, UCR1); in imx_uart_startup()
1372 ucr4 = imx_uart_readl(sport, UCR4) & ~UCR4_OREN; in imx_uart_startup()
1373 if (!sport->dma_is_enabled) in imx_uart_startup()
1375 imx_uart_writel(sport, ucr4, UCR4); in imx_uart_startup()
1377 ucr2 = imx_uart_readl(sport, UCR2) & ~UCR2_ATEN; in imx_uart_startup()
1379 if (!sport->have_rtscts) in imx_uart_startup()
1385 if (!imx_uart_is_imx1(sport)) in imx_uart_startup()
1387 imx_uart_writel(sport, ucr2, UCR2); in imx_uart_startup()
1389 if (!imx_uart_is_imx1(sport)) { in imx_uart_startup()
1392 ucr3 = imx_uart_readl(sport, UCR3); in imx_uart_startup()
1396 if (sport->dte_mode) in imx_uart_startup()
1400 imx_uart_writel(sport, ucr3, UCR3); in imx_uart_startup()
1406 imx_uart_enable_ms(&sport->port); in imx_uart_startup()
1409 imx_uart_enable_dma(sport); in imx_uart_startup()
1410 imx_uart_start_rx_dma(sport); in imx_uart_startup()
1412 ucr1 = imx_uart_readl(sport, UCR1); in imx_uart_startup()
1414 imx_uart_writel(sport, ucr1, UCR1); in imx_uart_startup()
1416 ucr2 = imx_uart_readl(sport, UCR2); in imx_uart_startup()
1418 imx_uart_writel(sport, ucr2, UCR2); in imx_uart_startup()
1421 spin_unlock_irqrestore(&sport->port.lock, flags); in imx_uart_startup()
1428 struct imx_port *sport = (struct imx_port *)port; in imx_uart_shutdown() local
1432 if (sport->dma_is_enabled) { in imx_uart_shutdown()
1433 dmaengine_terminate_sync(sport->dma_chan_tx); in imx_uart_shutdown()
1434 if (sport->dma_is_txing) { in imx_uart_shutdown()
1435 dma_unmap_sg(sport->port.dev, &sport->tx_sgl[0], in imx_uart_shutdown()
1436 sport->dma_tx_nents, DMA_TO_DEVICE); in imx_uart_shutdown()
1437 sport->dma_is_txing = 0; in imx_uart_shutdown()
1439 dmaengine_terminate_sync(sport->dma_chan_rx); in imx_uart_shutdown()
1440 if (sport->dma_is_rxing) { in imx_uart_shutdown()
1441 dma_unmap_sg(sport->port.dev, &sport->rx_sgl, in imx_uart_shutdown()
1443 sport->dma_is_rxing = 0; in imx_uart_shutdown()
1446 spin_lock_irqsave(&sport->port.lock, flags); in imx_uart_shutdown()
1449 imx_uart_disable_dma(sport); in imx_uart_shutdown()
1450 spin_unlock_irqrestore(&sport->port.lock, flags); in imx_uart_shutdown()
1451 imx_uart_dma_exit(sport); in imx_uart_shutdown()
1454 mctrl_gpio_disable_ms(sport->gpios); in imx_uart_shutdown()
1456 spin_lock_irqsave(&sport->port.lock, flags); in imx_uart_shutdown()
1457 ucr2 = imx_uart_readl(sport, UCR2); in imx_uart_shutdown()
1459 imx_uart_writel(sport, ucr2, UCR2); in imx_uart_shutdown()
1461 ucr4 = imx_uart_readl(sport, UCR4); in imx_uart_shutdown()
1463 imx_uart_writel(sport, ucr4, UCR4); in imx_uart_shutdown()
1464 spin_unlock_irqrestore(&sport->port.lock, flags); in imx_uart_shutdown()
1469 del_timer_sync(&sport->timer); in imx_uart_shutdown()
1475 spin_lock_irqsave(&sport->port.lock, flags); in imx_uart_shutdown()
1476 ucr1 = imx_uart_readl(sport, UCR1); in imx_uart_shutdown()
1479 imx_uart_writel(sport, ucr1, UCR1); in imx_uart_shutdown()
1480 spin_unlock_irqrestore(&sport->port.lock, flags); in imx_uart_shutdown()
1482 clk_disable_unprepare(sport->clk_per); in imx_uart_shutdown()
1483 clk_disable_unprepare(sport->clk_ipg); in imx_uart_shutdown()
1489 struct imx_port *sport = (struct imx_port *)port; in imx_uart_flush_buffer() local
1490 struct scatterlist *sgl = &sport->tx_sgl[0]; in imx_uart_flush_buffer()
1494 if (!sport->dma_chan_tx) in imx_uart_flush_buffer()
1497 sport->tx_bytes = 0; in imx_uart_flush_buffer()
1498 dmaengine_terminate_all(sport->dma_chan_tx); in imx_uart_flush_buffer()
1499 if (sport->dma_is_txing) { in imx_uart_flush_buffer()
1502 dma_unmap_sg(sport->port.dev, sgl, sport->dma_tx_nents, in imx_uart_flush_buffer()
1504 ucr1 = imx_uart_readl(sport, UCR1); in imx_uart_flush_buffer()
1506 imx_uart_writel(sport, ucr1, UCR1); in imx_uart_flush_buffer()
1507 sport->dma_is_txing = 0; in imx_uart_flush_buffer()
1521 ubir = imx_uart_readl(sport, UBIR); in imx_uart_flush_buffer()
1522 ubmr = imx_uart_readl(sport, UBMR); in imx_uart_flush_buffer()
1523 uts = imx_uart_readl(sport, IMX21_UTS); in imx_uart_flush_buffer()
1525 ucr2 = imx_uart_readl(sport, UCR2); in imx_uart_flush_buffer()
1527 imx_uart_writel(sport, ucr2, UCR2); in imx_uart_flush_buffer()
1529 while (!(imx_uart_readl(sport, UCR2) & UCR2_SRST) && (--i > 0)) in imx_uart_flush_buffer()
1533 imx_uart_writel(sport, ubir, UBIR); in imx_uart_flush_buffer()
1534 imx_uart_writel(sport, ubmr, UBMR); in imx_uart_flush_buffer()
1535 imx_uart_writel(sport, uts, IMX21_UTS); in imx_uart_flush_buffer()
1542 struct imx_port *sport = (struct imx_port *)port; in imx_uart_set_termios() local
1561 del_timer_sync(&sport->timer); in imx_uart_set_termios()
1569 spin_lock_irqsave(&sport->port.lock, flags); in imx_uart_set_termios()
1575 old_ucr2 = imx_uart_readl(sport, UCR2); in imx_uart_set_termios()
1582 if (!sport->have_rtscts) in imx_uart_set_termios()
1592 imx_uart_rts_active(sport, &ucr2); in imx_uart_set_termios()
1594 imx_uart_rts_inactive(sport, &ucr2); in imx_uart_set_termios()
1616 sport->port.read_status_mask = 0; in imx_uart_set_termios()
1618 sport->port.read_status_mask |= (URXD_FRMERR | URXD_PRERR); in imx_uart_set_termios()
1620 sport->port.read_status_mask |= URXD_BRK; in imx_uart_set_termios()
1625 sport->port.ignore_status_mask = 0; in imx_uart_set_termios()
1627 sport->port.ignore_status_mask |= URXD_PRERR | URXD_FRMERR; in imx_uart_set_termios()
1629 sport->port.ignore_status_mask |= URXD_BRK; in imx_uart_set_termios()
1635 sport->port.ignore_status_mask |= URXD_OVRRUN; in imx_uart_set_termios()
1639 sport->port.ignore_status_mask |= URXD_DUMMY_READ; in imx_uart_set_termios()
1647 div = sport->port.uartclk / (baud * 16); in imx_uart_set_termios()
1649 baud = sport->port.uartclk / (quot * 16); in imx_uart_set_termios()
1651 div = sport->port.uartclk / (baud * 16); in imx_uart_set_termios()
1657 rational_best_approximation(16 * div * baud, sport->port.uartclk, in imx_uart_set_termios()
1660 tdiv64 = sport->port.uartclk; in imx_uart_set_termios()
1669 ufcr = imx_uart_readl(sport, UFCR); in imx_uart_set_termios()
1671 imx_uart_writel(sport, ufcr, UFCR); in imx_uart_set_termios()
1682 old_ubir = imx_uart_readl(sport, UBIR); in imx_uart_set_termios()
1683 old_ubmr = imx_uart_readl(sport, UBMR); in imx_uart_set_termios()
1685 imx_uart_writel(sport, num, UBIR); in imx_uart_set_termios()
1686 imx_uart_writel(sport, denom, UBMR); in imx_uart_set_termios()
1689 if (!imx_uart_is_imx1(sport)) in imx_uart_set_termios()
1690 imx_uart_writel(sport, sport->port.uartclk / div / 1000, in imx_uart_set_termios()
1693 imx_uart_writel(sport, ucr2, UCR2); in imx_uart_set_termios()
1695 if (UART_ENABLE_MS(&sport->port, termios->c_cflag)) in imx_uart_set_termios()
1696 imx_uart_enable_ms(&sport->port); in imx_uart_set_termios()
1698 spin_unlock_irqrestore(&sport->port.lock, flags); in imx_uart_set_termios()
1703 struct imx_port *sport = (struct imx_port *)port; in imx_uart_type() local
1705 return sport->port.type == PORT_IMX ? "IMX" : NULL; in imx_uart_type()
1713 struct imx_port *sport = (struct imx_port *)port; in imx_uart_config_port() local
1716 sport->port.type = PORT_IMX; in imx_uart_config_port()
1727 struct imx_port *sport = (struct imx_port *)port; in imx_uart_verify_port() local
1732 if (sport->port.irq != ser->irq) in imx_uart_verify_port()
1736 if (sport->port.uartclk / 16 != ser->baud_base) in imx_uart_verify_port()
1738 if (sport->port.mapbase != (unsigned long)ser->iomem_base) in imx_uart_verify_port()
1740 if (sport->port.iobase != ser->port) in imx_uart_verify_port()
1751 struct imx_port *sport = (struct imx_port *)port; in imx_uart_poll_init() local
1756 retval = clk_prepare_enable(sport->clk_ipg); in imx_uart_poll_init()
1759 retval = clk_prepare_enable(sport->clk_per); in imx_uart_poll_init()
1761 clk_disable_unprepare(sport->clk_ipg); in imx_uart_poll_init()
1763 imx_uart_setup_ufcr(sport, TXTL_DEFAULT, RXTL_DEFAULT); in imx_uart_poll_init()
1765 spin_lock_irqsave(&sport->port.lock, flags); in imx_uart_poll_init()
1774 ucr1 = imx_uart_readl(sport, UCR1); in imx_uart_poll_init()
1775 ucr2 = imx_uart_readl(sport, UCR2); in imx_uart_poll_init()
1777 if (imx_uart_is_imx1(sport)) in imx_uart_poll_init()
1786 imx_uart_writel(sport, ucr1, UCR1); in imx_uart_poll_init()
1787 imx_uart_writel(sport, ucr2, UCR2); in imx_uart_poll_init()
1790 imx_uart_writel(sport, ucr1 | UCR1_RRDYEN, UCR1); in imx_uart_poll_init()
1791 imx_uart_writel(sport, ucr2 | UCR2_ATEN, UCR2); in imx_uart_poll_init()
1793 spin_unlock_irqrestore(&sport->port.lock, flags); in imx_uart_poll_init()
1800 struct imx_port *sport = (struct imx_port *)port; in imx_uart_poll_get_char() local
1801 if (!(imx_uart_readl(sport, USR2) & USR2_RDR)) in imx_uart_poll_get_char()
1804 return imx_uart_readl(sport, URXD0) & URXD_RX_DATA; in imx_uart_poll_get_char()
1809 struct imx_port *sport = (struct imx_port *)port; in imx_uart_poll_put_char() local
1814 status = imx_uart_readl(sport, USR1); in imx_uart_poll_put_char()
1818 imx_uart_writel(sport, c, URTX0); in imx_uart_poll_put_char()
1822 status = imx_uart_readl(sport, USR2); in imx_uart_poll_put_char()
1831 struct imx_port *sport = (struct imx_port *)port; in imx_uart_rs485_config() local
1839 if (!sport->have_rtscts && !sport->have_rtsgpio) in imx_uart_rs485_config()
1844 if (sport->have_rtscts && !sport->have_rtsgpio && in imx_uart_rs485_config()
1849 ucr2 = imx_uart_readl(sport, UCR2); in imx_uart_rs485_config()
1851 imx_uart_rts_active(sport, &ucr2); in imx_uart_rs485_config()
1853 imx_uart_rts_inactive(sport, &ucr2); in imx_uart_rs485_config()
1854 imx_uart_writel(sport, ucr2, UCR2); in imx_uart_rs485_config()
1895 struct imx_port *sport = (struct imx_port *)port; in imx_uart_console_putchar() local
1897 while (imx_uart_readl(sport, imx_uart_uts_reg(sport)) & UTS_TXFULL) in imx_uart_console_putchar()
1900 imx_uart_writel(sport, ch, URTX0); in imx_uart_console_putchar()
1909 struct imx_port *sport = imx_uart_ports[co->index]; in imx_uart_console_write() local
1916 retval = clk_enable(sport->clk_per); in imx_uart_console_write()
1919 retval = clk_enable(sport->clk_ipg); in imx_uart_console_write()
1921 clk_disable(sport->clk_per); in imx_uart_console_write()
1925 if (sport->port.sysrq) in imx_uart_console_write()
1928 locked = spin_trylock_irqsave(&sport->port.lock, flags); in imx_uart_console_write()
1930 spin_lock_irqsave(&sport->port.lock, flags); in imx_uart_console_write()
1935 imx_uart_ucrs_save(sport, &old_ucr); in imx_uart_console_write()
1938 if (imx_uart_is_imx1(sport)) in imx_uart_console_write()
1943 imx_uart_writel(sport, ucr1, UCR1); in imx_uart_console_write()
1945 imx_uart_writel(sport, old_ucr.ucr2 | UCR2_TXEN, UCR2); in imx_uart_console_write()
1947 uart_console_write(&sport->port, s, count, imx_uart_console_putchar); in imx_uart_console_write()
1953 while (!(imx_uart_readl(sport, USR2) & USR2_TXDC)); in imx_uart_console_write()
1955 imx_uart_ucrs_restore(sport, &old_ucr); in imx_uart_console_write()
1958 spin_unlock_irqrestore(&sport->port.lock, flags); in imx_uart_console_write()
1960 clk_disable(sport->clk_ipg); in imx_uart_console_write()
1961 clk_disable(sport->clk_per); in imx_uart_console_write()
1969 imx_uart_console_get_options(struct imx_port *sport, int *baud, in imx_uart_console_get_options() argument
1973 if (imx_uart_readl(sport, UCR1) & UCR1_UARTEN) { in imx_uart_console_get_options()
1979 ucr2 = imx_uart_readl(sport, UCR2); in imx_uart_console_get_options()
1994 ubir = imx_uart_readl(sport, UBIR) & 0xffff; in imx_uart_console_get_options()
1995 ubmr = imx_uart_readl(sport, UBMR) & 0xffff; in imx_uart_console_get_options()
1997 ucfr_rfdiv = (imx_uart_readl(sport, UFCR) & UFCR_RFDIV) >> 7; in imx_uart_console_get_options()
2003 uartclk = clk_get_rate(sport->clk_per); in imx_uart_console_get_options()
2022 dev_info(sport->port.dev, "Console IMX rounded baud rate from %d to %d\n", in imx_uart_console_get_options()
2030 struct imx_port *sport; in imx_uart_console_setup() local
2044 sport = imx_uart_ports[co->index]; in imx_uart_console_setup()
2045 if (sport == NULL) in imx_uart_console_setup()
2049 retval = clk_prepare_enable(sport->clk_ipg); in imx_uart_console_setup()
2056 imx_uart_console_get_options(sport, &baud, &parity, &bits); in imx_uart_console_setup()
2058 imx_uart_setup_ufcr(sport, TXTL_DEFAULT, RXTL_DEFAULT); in imx_uart_console_setup()
2060 retval = uart_set_options(&sport->port, co, baud, parity, bits, flow); in imx_uart_console_setup()
2062 clk_disable(sport->clk_ipg); in imx_uart_console_setup()
2064 clk_unprepare(sport->clk_ipg); in imx_uart_console_setup()
2068 retval = clk_prepare(sport->clk_per); in imx_uart_console_setup()
2070 clk_unprepare(sport->clk_ipg); in imx_uart_console_setup()
2092 struct imx_port *sport = (struct imx_port *)port; in imx_uart_console_early_putchar() local
2094 while (imx_uart_readl(sport, IMX21_UTS) & UTS_TXFULL) in imx_uart_console_early_putchar()
2097 imx_uart_writel(sport, ch, URTX0); in imx_uart_console_early_putchar()
2141 static int imx_uart_probe_dt(struct imx_port *sport, in imx_uart_probe_dt() argument
2147 sport->devdata = of_device_get_match_data(&pdev->dev); in imx_uart_probe_dt()
2148 if (!sport->devdata) in imx_uart_probe_dt()
2157 sport->port.line = ret; in imx_uart_probe_dt()
2161 sport->have_rtscts = 1; in imx_uart_probe_dt()
2164 sport->dte_mode = 1; in imx_uart_probe_dt()
2167 sport->have_rtsgpio = 1; in imx_uart_probe_dt()
2172 static inline int imx_uart_probe_dt(struct imx_port *sport, in imx_uart_probe_dt() argument
2179 static void imx_uart_probe_pdata(struct imx_port *sport, in imx_uart_probe_pdata() argument
2184 sport->port.line = pdev->id; in imx_uart_probe_pdata()
2185 sport->devdata = (struct imx_uart_data *) pdev->id_entry->driver_data; in imx_uart_probe_pdata()
2191 sport->have_rtscts = 1; in imx_uart_probe_pdata()
2196 struct imx_port *sport; in imx_uart_probe() local
2203 sport = devm_kzalloc(&pdev->dev, sizeof(*sport), GFP_KERNEL); in imx_uart_probe()
2204 if (!sport) in imx_uart_probe()
2207 ret = imx_uart_probe_dt(sport, pdev); in imx_uart_probe()
2209 imx_uart_probe_pdata(sport, pdev); in imx_uart_probe()
2213 if (sport->port.line >= ARRAY_SIZE(imx_uart_ports)) { in imx_uart_probe()
2215 sport->port.line); in imx_uart_probe()
2228 sport->port.dev = &pdev->dev; in imx_uart_probe()
2229 sport->port.mapbase = res->start; in imx_uart_probe()
2230 sport->port.membase = base; in imx_uart_probe()
2231 sport->port.type = PORT_IMX, in imx_uart_probe()
2232 sport->port.iotype = UPIO_MEM; in imx_uart_probe()
2233 sport->port.irq = rxirq; in imx_uart_probe()
2234 sport->port.fifosize = 32; in imx_uart_probe()
2235 sport->port.ops = &imx_uart_pops; in imx_uart_probe()
2236 sport->port.rs485_config = imx_uart_rs485_config; in imx_uart_probe()
2237 sport->port.flags = UPF_BOOT_AUTOCONF; in imx_uart_probe()
2238 timer_setup(&sport->timer, imx_uart_timeout, 0); in imx_uart_probe()
2240 sport->gpios = mctrl_gpio_init(&sport->port, 0); in imx_uart_probe()
2241 if (IS_ERR(sport->gpios)) in imx_uart_probe()
2242 return PTR_ERR(sport->gpios); in imx_uart_probe()
2244 sport->clk_ipg = devm_clk_get(&pdev->dev, "ipg"); in imx_uart_probe()
2245 if (IS_ERR(sport->clk_ipg)) { in imx_uart_probe()
2246 ret = PTR_ERR(sport->clk_ipg); in imx_uart_probe()
2251 sport->clk_per = devm_clk_get(&pdev->dev, "per"); in imx_uart_probe()
2252 if (IS_ERR(sport->clk_per)) { in imx_uart_probe()
2253 ret = PTR_ERR(sport->clk_per); in imx_uart_probe()
2258 sport->port.uartclk = clk_get_rate(sport->clk_per); in imx_uart_probe()
2261 ret = clk_prepare_enable(sport->clk_ipg); in imx_uart_probe()
2268 sport->ucr1 = readl(sport->port.membase + UCR1); in imx_uart_probe()
2269 sport->ucr2 = readl(sport->port.membase + UCR2); in imx_uart_probe()
2270 sport->ucr3 = readl(sport->port.membase + UCR3); in imx_uart_probe()
2271 sport->ucr4 = readl(sport->port.membase + UCR4); in imx_uart_probe()
2272 sport->ufcr = readl(sport->port.membase + UFCR); in imx_uart_probe()
2274 uart_get_rs485_mode(&pdev->dev, &sport->port.rs485); in imx_uart_probe()
2276 if (sport->port.rs485.flags & SER_RS485_ENABLED && in imx_uart_probe()
2277 (!sport->have_rtscts && !sport->have_rtsgpio)) in imx_uart_probe()
2285 if (sport->port.rs485.flags & SER_RS485_ENABLED && in imx_uart_probe()
2286 sport->have_rtscts && !sport->have_rtsgpio && in imx_uart_probe()
2287 (!(sport->port.rs485.flags & SER_RS485_RTS_ON_SEND) && in imx_uart_probe()
2288 !(sport->port.rs485.flags & SER_RS485_RX_DURING_TX))) in imx_uart_probe()
2292 imx_uart_rs485_config(&sport->port, &sport->port.rs485); in imx_uart_probe()
2295 ucr1 = imx_uart_readl(sport, UCR1); in imx_uart_probe()
2298 imx_uart_writel(sport, ucr1, UCR1); in imx_uart_probe()
2300 if (!imx_uart_is_imx1(sport) && sport->dte_mode) { in imx_uart_probe()
2307 u32 ufcr = imx_uart_readl(sport, UFCR); in imx_uart_probe()
2309 imx_uart_writel(sport, ufcr | UFCR_DCEDTE, UFCR); in imx_uart_probe()
2316 imx_uart_writel(sport, in imx_uart_probe()
2322 u32 ufcr = imx_uart_readl(sport, UFCR); in imx_uart_probe()
2324 imx_uart_writel(sport, ufcr & ~UFCR_DCEDTE, UFCR); in imx_uart_probe()
2326 if (!imx_uart_is_imx1(sport)) in imx_uart_probe()
2328 imx_uart_writel(sport, ucr3, UCR3); in imx_uart_probe()
2331 clk_disable_unprepare(sport->clk_ipg); in imx_uart_probe()
2339 dev_name(&pdev->dev), sport); in imx_uart_probe()
2347 dev_name(&pdev->dev), sport); in imx_uart_probe()
2355 dev_name(&pdev->dev), sport); in imx_uart_probe()
2363 dev_name(&pdev->dev), sport); in imx_uart_probe()
2370 imx_uart_ports[sport->port.line] = sport; in imx_uart_probe()
2372 platform_set_drvdata(pdev, sport); in imx_uart_probe()
2374 return uart_add_one_port(&imx_uart_uart_driver, &sport->port); in imx_uart_probe()
2379 struct imx_port *sport = platform_get_drvdata(pdev); in imx_uart_remove() local
2381 return uart_remove_one_port(&imx_uart_uart_driver, &sport->port); in imx_uart_remove()
2384 static void imx_uart_restore_context(struct imx_port *sport) in imx_uart_restore_context() argument
2388 spin_lock_irqsave(&sport->port.lock, flags); in imx_uart_restore_context()
2389 if (!sport->context_saved) { in imx_uart_restore_context()
2390 spin_unlock_irqrestore(&sport->port.lock, flags); in imx_uart_restore_context()
2394 imx_uart_writel(sport, sport->saved_reg[4], UFCR); in imx_uart_restore_context()
2395 imx_uart_writel(sport, sport->saved_reg[5], UESC); in imx_uart_restore_context()
2396 imx_uart_writel(sport, sport->saved_reg[6], UTIM); in imx_uart_restore_context()
2397 imx_uart_writel(sport, sport->saved_reg[7], UBIR); in imx_uart_restore_context()
2398 imx_uart_writel(sport, sport->saved_reg[8], UBMR); in imx_uart_restore_context()
2399 imx_uart_writel(sport, sport->saved_reg[9], IMX21_UTS); in imx_uart_restore_context()
2400 imx_uart_writel(sport, sport->saved_reg[0], UCR1); in imx_uart_restore_context()
2401 imx_uart_writel(sport, sport->saved_reg[1] | UCR2_SRST, UCR2); in imx_uart_restore_context()
2402 imx_uart_writel(sport, sport->saved_reg[2], UCR3); in imx_uart_restore_context()
2403 imx_uart_writel(sport, sport->saved_reg[3], UCR4); in imx_uart_restore_context()
2404 sport->context_saved = false; in imx_uart_restore_context()
2405 spin_unlock_irqrestore(&sport->port.lock, flags); in imx_uart_restore_context()
2408 static void imx_uart_save_context(struct imx_port *sport) in imx_uart_save_context() argument
2413 spin_lock_irqsave(&sport->port.lock, flags); in imx_uart_save_context()
2414 sport->saved_reg[0] = imx_uart_readl(sport, UCR1); in imx_uart_save_context()
2415 sport->saved_reg[1] = imx_uart_readl(sport, UCR2); in imx_uart_save_context()
2416 sport->saved_reg[2] = imx_uart_readl(sport, UCR3); in imx_uart_save_context()
2417 sport->saved_reg[3] = imx_uart_readl(sport, UCR4); in imx_uart_save_context()
2418 sport->saved_reg[4] = imx_uart_readl(sport, UFCR); in imx_uart_save_context()
2419 sport->saved_reg[5] = imx_uart_readl(sport, UESC); in imx_uart_save_context()
2420 sport->saved_reg[6] = imx_uart_readl(sport, UTIM); in imx_uart_save_context()
2421 sport->saved_reg[7] = imx_uart_readl(sport, UBIR); in imx_uart_save_context()
2422 sport->saved_reg[8] = imx_uart_readl(sport, UBMR); in imx_uart_save_context()
2423 sport->saved_reg[9] = imx_uart_readl(sport, IMX21_UTS); in imx_uart_save_context()
2424 sport->context_saved = true; in imx_uart_save_context()
2425 spin_unlock_irqrestore(&sport->port.lock, flags); in imx_uart_save_context()
2428 static void imx_uart_enable_wakeup(struct imx_port *sport, bool on) in imx_uart_enable_wakeup() argument
2432 ucr3 = imx_uart_readl(sport, UCR3); in imx_uart_enable_wakeup()
2434 imx_uart_writel(sport, USR1_AWAKE, USR1); in imx_uart_enable_wakeup()
2439 imx_uart_writel(sport, ucr3, UCR3); in imx_uart_enable_wakeup()
2441 if (sport->have_rtscts) { in imx_uart_enable_wakeup()
2442 u32 ucr1 = imx_uart_readl(sport, UCR1); in imx_uart_enable_wakeup()
2447 imx_uart_writel(sport, ucr1, UCR1); in imx_uart_enable_wakeup()
2453 struct imx_port *sport = dev_get_drvdata(dev); in imx_uart_suspend_noirq() local
2455 imx_uart_save_context(sport); in imx_uart_suspend_noirq()
2457 clk_disable(sport->clk_ipg); in imx_uart_suspend_noirq()
2466 struct imx_port *sport = dev_get_drvdata(dev); in imx_uart_resume_noirq() local
2471 ret = clk_enable(sport->clk_ipg); in imx_uart_resume_noirq()
2475 imx_uart_restore_context(sport); in imx_uart_resume_noirq()
2482 struct imx_port *sport = dev_get_drvdata(dev); in imx_uart_suspend() local
2485 uart_suspend_port(&imx_uart_uart_driver, &sport->port); in imx_uart_suspend()
2486 disable_irq(sport->port.irq); in imx_uart_suspend()
2488 ret = clk_prepare_enable(sport->clk_ipg); in imx_uart_suspend()
2493 imx_uart_enable_wakeup(sport, true); in imx_uart_suspend()
2500 struct imx_port *sport = dev_get_drvdata(dev); in imx_uart_resume() local
2503 imx_uart_enable_wakeup(sport, false); in imx_uart_resume()
2505 uart_resume_port(&imx_uart_uart_driver, &sport->port); in imx_uart_resume()
2506 enable_irq(sport->port.irq); in imx_uart_resume()
2508 clk_disable_unprepare(sport->clk_ipg); in imx_uart_resume()
2515 struct imx_port *sport = dev_get_drvdata(dev); in imx_uart_freeze() local
2517 uart_suspend_port(&imx_uart_uart_driver, &sport->port); in imx_uart_freeze()
2519 return clk_prepare_enable(sport->clk_ipg); in imx_uart_freeze()
2524 struct imx_port *sport = dev_get_drvdata(dev); in imx_uart_thaw() local
2526 uart_resume_port(&imx_uart_uart_driver, &sport->port); in imx_uart_thaw()
2528 clk_disable_unprepare(sport->clk_ipg); in imx_uart_thaw()