Lines Matching +full:uart +full:- +full:fifosize
1 // SPDX-License-Identifier: GPL-2.0
12 #include <linux/dma-mapping.h>
171 struct uart_port uart; member
183 #define UART_TO_MSM(uart_port) container_of(uart_port, struct msm_port, uart)
188 writel_relaxed(val, port->membase + off); in msm_write()
194 return readl_relaxed(port->membase + off); in msm_read()
206 port->uartclk = 1843200; in msm_serial_set_mnd_regs_tcxo()
218 port->uartclk = 1843200; in msm_serial_set_mnd_regs_tcxoby4()
229 if (msm_port->is_uartdm) in msm_serial_set_mnd_regs()
232 if (port->uartclk == 19200000) in msm_serial_set_mnd_regs()
234 else if (port->uartclk == 4800000) in msm_serial_set_mnd_regs()
243 struct device *dev = port->dev; in msm_stop_dma()
247 mapped = dma->count; in msm_stop_dma()
248 dma->count = 0; in msm_stop_dma()
250 dmaengine_terminate_all(dma->chan); in msm_stop_dma()
260 val &= ~dma->enable_bit; in msm_stop_dma()
264 dma_unmap_single(dev, dma->phys, mapped, dma->dir); in msm_stop_dma()
271 dma = &msm_port->tx_dma; in msm_release_dma()
272 if (dma->chan) { in msm_release_dma()
273 msm_stop_dma(&msm_port->uart, dma); in msm_release_dma()
274 dma_release_channel(dma->chan); in msm_release_dma()
279 dma = &msm_port->rx_dma; in msm_release_dma()
280 if (dma->chan) { in msm_release_dma()
281 msm_stop_dma(&msm_port->uart, dma); in msm_release_dma()
282 dma_release_channel(dma->chan); in msm_release_dma()
283 kfree(dma->virt); in msm_release_dma()
291 struct device *dev = msm_port->uart.dev; in msm_request_tx_dma()
297 dma = &msm_port->tx_dma; in msm_request_tx_dma()
300 dma->chan = dma_request_chan(dev, "tx"); in msm_request_tx_dma()
301 if (IS_ERR(dma->chan)) in msm_request_tx_dma()
304 of_property_read_u32(dev->of_node, "qcom,tx-crci", &crci); in msm_request_tx_dma()
313 ret = dmaengine_slave_config(dma->chan, &conf); in msm_request_tx_dma()
317 dma->dir = DMA_TO_DEVICE; in msm_request_tx_dma()
319 if (msm_port->is_uartdm < UARTDM_1P4) in msm_request_tx_dma()
320 dma->enable_bit = UARTDM_DMEN_TX_DM_ENABLE; in msm_request_tx_dma()
322 dma->enable_bit = UARTDM_DMEN_TX_BAM_ENABLE; in msm_request_tx_dma()
327 dma_release_channel(dma->chan); in msm_request_tx_dma()
334 struct device *dev = msm_port->uart.dev; in msm_request_rx_dma()
340 dma = &msm_port->rx_dma; in msm_request_rx_dma()
343 dma->chan = dma_request_chan(dev, "rx"); in msm_request_rx_dma()
344 if (IS_ERR(dma->chan)) in msm_request_rx_dma()
347 of_property_read_u32(dev->of_node, "qcom,rx-crci", &crci); in msm_request_rx_dma()
349 dma->virt = kzalloc(UARTDM_RX_SIZE, GFP_KERNEL); in msm_request_rx_dma()
350 if (!dma->virt) in msm_request_rx_dma()
360 ret = dmaengine_slave_config(dma->chan, &conf); in msm_request_rx_dma()
364 dma->dir = DMA_FROM_DEVICE; in msm_request_rx_dma()
366 if (msm_port->is_uartdm < UARTDM_1P4) in msm_request_rx_dma()
367 dma->enable_bit = UARTDM_DMEN_RX_DM_ENABLE; in msm_request_rx_dma()
369 dma->enable_bit = UARTDM_DMEN_RX_BAM_ENABLE; in msm_request_rx_dma()
373 kfree(dma->virt); in msm_request_rx_dma()
375 dma_release_channel(dma->chan); in msm_request_rx_dma()
388 if (!timeout--) in msm_wait_for_xmitr()
398 msm_port->imr &= ~UART_IMR_TXLEV; in msm_stop_tx()
399 msm_write(port, msm_port->imr, UART_IMR); in msm_stop_tx()
405 struct msm_dma *dma = &msm_port->tx_dma; in msm_start_tx()
408 if (dma->count) in msm_start_tx()
411 msm_port->imr |= UART_IMR_TXLEV; in msm_start_tx()
412 msm_write(port, msm_port->imr, UART_IMR); in msm_start_tx()
425 struct uart_port *port = &msm_port->uart; in msm_complete_tx_dma()
426 struct circ_buf *xmit = &port->state->xmit; in msm_complete_tx_dma()
427 struct msm_dma *dma = &msm_port->tx_dma; in msm_complete_tx_dma()
434 spin_lock_irqsave(&port->lock, flags); in msm_complete_tx_dma()
437 if (!dma->count) in msm_complete_tx_dma()
440 status = dmaengine_tx_status(dma->chan, dma->cookie, &state); in msm_complete_tx_dma()
442 dma_unmap_single(port->dev, dma->phys, dma->count, dma->dir); in msm_complete_tx_dma()
445 val &= ~dma->enable_bit; in msm_complete_tx_dma()
448 if (msm_port->is_uartdm > UARTDM_1P3) { in msm_complete_tx_dma()
453 count = dma->count - state.residue; in msm_complete_tx_dma()
454 port->icount.tx += count; in msm_complete_tx_dma()
455 dma->count = 0; in msm_complete_tx_dma()
457 xmit->tail += count; in msm_complete_tx_dma()
458 xmit->tail &= UART_XMIT_SIZE - 1; in msm_complete_tx_dma()
461 msm_port->imr |= UART_IMR_TXLEV; in msm_complete_tx_dma()
462 msm_write(port, msm_port->imr, UART_IMR); in msm_complete_tx_dma()
469 spin_unlock_irqrestore(&port->lock, flags); in msm_complete_tx_dma()
474 struct circ_buf *xmit = &msm_port->uart.state->xmit; in msm_handle_tx_dma()
475 struct uart_port *port = &msm_port->uart; in msm_handle_tx_dma()
476 struct msm_dma *dma = &msm_port->tx_dma; in msm_handle_tx_dma()
481 cpu_addr = &xmit->buf[xmit->tail]; in msm_handle_tx_dma()
483 dma->phys = dma_map_single(port->dev, cpu_addr, count, dma->dir); in msm_handle_tx_dma()
484 ret = dma_mapping_error(port->dev, dma->phys); in msm_handle_tx_dma()
488 dma->desc = dmaengine_prep_slave_single(dma->chan, dma->phys, in msm_handle_tx_dma()
492 if (!dma->desc) { in msm_handle_tx_dma()
493 ret = -EIO; in msm_handle_tx_dma()
497 dma->desc->callback = msm_complete_tx_dma; in msm_handle_tx_dma()
498 dma->desc->callback_param = msm_port; in msm_handle_tx_dma()
500 dma->cookie = dmaengine_submit(dma->desc); in msm_handle_tx_dma()
501 ret = dma_submit_error(dma->cookie); in msm_handle_tx_dma()
509 msm_port->imr &= ~UART_IMR_TXLEV; in msm_handle_tx_dma()
510 msm_write(port, msm_port->imr, UART_IMR); in msm_handle_tx_dma()
512 dma->count = count; in msm_handle_tx_dma()
515 val |= dma->enable_bit; in msm_handle_tx_dma()
517 if (msm_port->is_uartdm < UARTDM_1P4) in msm_handle_tx_dma()
522 if (msm_port->is_uartdm > UARTDM_1P3) in msm_handle_tx_dma()
525 dma_async_issue_pending(dma->chan); in msm_handle_tx_dma()
528 dma_unmap_single(port->dev, dma->phys, count, dma->dir); in msm_handle_tx_dma()
535 struct uart_port *port = &msm_port->uart; in msm_complete_rx_dma()
536 struct tty_port *tport = &port->state->port; in msm_complete_rx_dma()
537 struct msm_dma *dma = &msm_port->rx_dma; in msm_complete_rx_dma()
542 spin_lock_irqsave(&port->lock, flags); in msm_complete_rx_dma()
545 if (!dma->count) in msm_complete_rx_dma()
549 val &= ~dma->enable_bit; in msm_complete_rx_dma()
553 port->icount.overrun++; in msm_complete_rx_dma()
560 port->icount.rx += count; in msm_complete_rx_dma()
562 dma->count = 0; in msm_complete_rx_dma()
564 dma_unmap_single(port->dev, dma->phys, UARTDM_RX_SIZE, dma->dir); in msm_complete_rx_dma()
569 if (msm_port->break_detected && dma->virt[i] == 0) { in msm_complete_rx_dma()
570 port->icount.brk++; in msm_complete_rx_dma()
572 msm_port->break_detected = false; in msm_complete_rx_dma()
577 if (!(port->read_status_mask & UART_SR_RX_BREAK)) in msm_complete_rx_dma()
580 spin_unlock_irqrestore(&port->lock, flags); in msm_complete_rx_dma()
581 sysrq = uart_handle_sysrq_char(port, dma->virt[i]); in msm_complete_rx_dma()
582 spin_lock_irqsave(&port->lock, flags); in msm_complete_rx_dma()
584 tty_insert_flip_char(tport, dma->virt[i], flag); in msm_complete_rx_dma()
589 spin_unlock_irqrestore(&port->lock, flags); in msm_complete_rx_dma()
597 struct msm_dma *dma = &msm_port->rx_dma; in msm_start_rx_dma()
598 struct uart_port *uart = &msm_port->uart; in msm_start_rx_dma() local
602 if (!dma->chan) in msm_start_rx_dma()
605 dma->phys = dma_map_single(uart->dev, dma->virt, in msm_start_rx_dma()
606 UARTDM_RX_SIZE, dma->dir); in msm_start_rx_dma()
607 ret = dma_mapping_error(uart->dev, dma->phys); in msm_start_rx_dma()
611 dma->desc = dmaengine_prep_slave_single(dma->chan, dma->phys, in msm_start_rx_dma()
614 if (!dma->desc) in msm_start_rx_dma()
617 dma->desc->callback = msm_complete_rx_dma; in msm_start_rx_dma()
618 dma->desc->callback_param = msm_port; in msm_start_rx_dma()
620 dma->cookie = dmaengine_submit(dma->desc); in msm_start_rx_dma()
621 ret = dma_submit_error(dma->cookie); in msm_start_rx_dma()
625 * Using DMA for FIFO off-load, no need for "Rx FIFO over in msm_start_rx_dma()
628 msm_port->imr &= ~(UART_IMR_RXLEV | UART_IMR_RXSTALE); in msm_start_rx_dma()
634 if (msm_port->is_uartdm < UARTDM_1P4) in msm_start_rx_dma()
635 msm_port->imr |= UART_IMR_RXSTALE; in msm_start_rx_dma()
637 msm_write(uart, msm_port->imr, UART_IMR); in msm_start_rx_dma()
639 dma->count = UARTDM_RX_SIZE; in msm_start_rx_dma()
641 dma_async_issue_pending(dma->chan); in msm_start_rx_dma()
643 msm_write(uart, UART_CR_CMD_RESET_STALE_INT, UART_CR); in msm_start_rx_dma()
644 msm_write(uart, UART_CR_CMD_STALE_EVENT_ENABLE, UART_CR); in msm_start_rx_dma()
646 val = msm_read(uart, UARTDM_DMEN); in msm_start_rx_dma()
647 val |= dma->enable_bit; in msm_start_rx_dma()
649 if (msm_port->is_uartdm < UARTDM_1P4) in msm_start_rx_dma()
650 msm_write(uart, val, UARTDM_DMEN); in msm_start_rx_dma()
652 msm_write(uart, UARTDM_RX_SIZE, UARTDM_DMRX); in msm_start_rx_dma()
654 if (msm_port->is_uartdm > UARTDM_1P3) in msm_start_rx_dma()
655 msm_write(uart, val, UARTDM_DMEN); in msm_start_rx_dma()
659 dma_unmap_single(uart->dev, dma->phys, UARTDM_RX_SIZE, dma->dir); in msm_start_rx_dma()
666 msm_write(uart, UART_CR_CMD_RESET_RX, UART_CR); in msm_start_rx_dma()
667 msm_write(uart, UART_CR_RX_ENABLE, UART_CR); in msm_start_rx_dma()
669 msm_write(uart, UART_CR_CMD_RESET_STALE_INT, UART_CR); in msm_start_rx_dma()
670 msm_write(uart, 0xFFFFFF, UARTDM_DMRX); in msm_start_rx_dma()
671 msm_write(uart, UART_CR_CMD_STALE_EVENT_ENABLE, UART_CR); in msm_start_rx_dma()
673 /* Re-enable RX interrupts */ in msm_start_rx_dma()
674 msm_port->imr |= (UART_IMR_RXLEV | UART_IMR_RXSTALE); in msm_start_rx_dma()
675 msm_write(uart, msm_port->imr, UART_IMR); in msm_start_rx_dma()
681 struct msm_dma *dma = &msm_port->rx_dma; in msm_stop_rx()
683 msm_port->imr &= ~(UART_IMR_RXLEV | UART_IMR_RXSTALE); in msm_stop_rx()
684 msm_write(port, msm_port->imr, UART_IMR); in msm_stop_rx()
686 if (dma->chan) in msm_stop_rx()
694 msm_port->imr |= UART_IMR_DELTA_CTS; in msm_enable_ms()
695 msm_write(port, msm_port->imr, UART_IMR); in msm_enable_ms()
699 __must_hold(&port->lock) in msm_handle_rx_dm()
701 struct tty_port *tport = &port->state->port; in msm_handle_rx_dm()
707 port->icount.overrun++; in msm_handle_rx_dm()
713 count = msm_read(port, UARTDM_RX_TOTAL_SNAP) - in msm_handle_rx_dm()
714 msm_port->old_snap_state; in msm_handle_rx_dm()
715 msm_port->old_snap_state = 0; in msm_handle_rx_dm()
718 msm_port->old_snap_state += count; in msm_handle_rx_dm()
723 port->icount.rx += count; in msm_handle_rx_dm()
731 msm_port->old_snap_state -= count; in msm_handle_rx_dm()
735 ioread32_rep(port->membase + UARTDM_RF, buf, 1); in msm_handle_rx_dm()
741 if (msm_port->break_detected && buf[i] == 0) { in msm_handle_rx_dm()
742 port->icount.brk++; in msm_handle_rx_dm()
744 msm_port->break_detected = false; in msm_handle_rx_dm()
749 if (!(port->read_status_mask & UART_SR_RX_BREAK)) in msm_handle_rx_dm()
752 spin_unlock(&port->lock); in msm_handle_rx_dm()
754 spin_lock(&port->lock); in msm_handle_rx_dm()
758 count -= r_count; in msm_handle_rx_dm()
761 spin_unlock(&port->lock); in msm_handle_rx_dm()
763 spin_lock(&port->lock); in msm_handle_rx_dm()
775 __must_hold(&port->lock) in msm_handle_rx()
777 struct tty_port *tport = &port->state->port; in msm_handle_rx()
785 port->icount.overrun++; in msm_handle_rx()
799 port->icount.brk++; in msm_handle_rx()
803 port->icount.frame++; in msm_handle_rx()
805 port->icount.rx++; in msm_handle_rx()
809 sr &= port->read_status_mask; in msm_handle_rx()
816 spin_unlock(&port->lock); in msm_handle_rx()
818 spin_lock(&port->lock); in msm_handle_rx()
823 spin_unlock(&port->lock); in msm_handle_rx()
825 spin_lock(&port->lock); in msm_handle_rx()
830 struct circ_buf *xmit = &port->state->xmit; in msm_handle_tx_pio()
836 if (msm_port->is_uartdm) in msm_handle_tx_pio()
837 tf = port->membase + UARTDM_TF; in msm_handle_tx_pio()
839 tf = port->membase + UART_TF; in msm_handle_tx_pio()
841 if (tx_count && msm_port->is_uartdm) in msm_handle_tx_pio()
851 if (msm_port->is_uartdm) in msm_handle_tx_pio()
852 num_chars = min(tx_count - tf_pointer, in msm_handle_tx_pio()
858 buf[i] = xmit->buf[xmit->tail + i]; in msm_handle_tx_pio()
859 port->icount.tx++; in msm_handle_tx_pio()
863 xmit->tail = (xmit->tail + num_chars) & (UART_XMIT_SIZE - 1); in msm_handle_tx_pio()
878 struct circ_buf *xmit = &msm_port->uart.state->xmit; in msm_handle_tx()
879 struct msm_dma *dma = &msm_port->tx_dma; in msm_handle_tx()
885 if (port->x_char) { in msm_handle_tx()
886 if (msm_port->is_uartdm) in msm_handle_tx()
887 tf = port->membase + UARTDM_TF; in msm_handle_tx()
889 tf = port->membase + UART_TF; in msm_handle_tx()
891 buf[0] = port->x_char; in msm_handle_tx()
893 if (msm_port->is_uartdm) in msm_handle_tx()
897 port->icount.tx++; in msm_handle_tx()
898 port->x_char = 0; in msm_handle_tx()
907 pio_count = CIRC_CNT_TO_END(xmit->head, xmit->tail, UART_XMIT_SIZE); in msm_handle_tx()
908 dma_count = CIRC_CNT_TO_END(xmit->head, xmit->tail, UART_XMIT_SIZE); in msm_handle_tx()
911 if (msm_port->is_uartdm > UARTDM_1P3) { in msm_handle_tx()
919 if (pio_count > port->fifosize) in msm_handle_tx()
920 pio_count = port->fifosize; in msm_handle_tx()
922 if (!dma->chan || dma_count < dma_min) in msm_handle_tx()
934 port->icount.cts++; in msm_handle_delta_cts()
935 wake_up_interruptible(&port->state->port.delta_msr_wait); in msm_handle_delta_cts()
942 struct msm_dma *dma = &msm_port->rx_dma; in msm_uart_irq()
947 spin_lock_irqsave(&port->lock, flags); in msm_uart_irq()
952 msm_port->break_detected = true; in msm_uart_irq()
957 if (dma->count) { in msm_uart_irq()
966 dmaengine_terminate_all(dma->chan); in msm_uart_irq()
967 } else if (msm_port->is_uartdm) { in msm_uart_irq()
978 msm_write(port, msm_port->imr, UART_IMR); /* restore interrupt */ in msm_uart_irq()
979 spin_unlock_irqrestore(&port->lock, flags); in msm_uart_irq()
1011 if (msm_port->is_uartdm) in msm_reset()
1073 target = clk_round_rate(msm_port->clk, 16 * baud); in msm_find_best_baud()
1079 if (entry->divisor <= divisor) { in msm_find_best_baud()
1080 result = target / entry->divisor / 16; in msm_find_best_baud()
1081 diff = abs(result - baud); in msm_find_best_baud()
1092 } else if (entry->divisor > divisor) { in msm_find_best_baud()
1094 target = clk_round_rate(msm_port->clk, old + 1); in msm_find_best_baud()
1123 spin_unlock_irqrestore(&port->lock, flags); in msm_set_baud_rate()
1126 clk_set_rate(msm_port->clk, rate); in msm_set_baud_rate()
1127 baud = rate / 16 / entry->divisor; in msm_set_baud_rate()
1129 spin_lock_irqsave(&port->lock, flags); in msm_set_baud_rate()
1131 port->uartclk = rate; in msm_set_baud_rate()
1133 msm_write(port, entry->code, UART_CSR); in msm_set_baud_rate()
1136 rxstale = entry->rxstale; in msm_set_baud_rate()
1138 if (msm_port->is_uartdm) { in msm_set_baud_rate()
1150 watermark = (port->fifosize * 3) / 4; in msm_set_baud_rate()
1163 msm_port->imr = UART_IMR_RXLEV | UART_IMR_RXSTALE | in msm_set_baud_rate()
1166 msm_write(port, msm_port->imr, UART_IMR); in msm_set_baud_rate()
1168 if (msm_port->is_uartdm) { in msm_set_baud_rate()
1181 clk_prepare_enable(msm_port->clk); in msm_init_clock()
1182 clk_prepare_enable(msm_port->pclk); in msm_init_clock()
1192 snprintf(msm_port->name, sizeof(msm_port->name), in msm_startup()
1193 "msm_serial%d", port->line); in msm_startup()
1197 if (likely(port->fifosize > 12)) in msm_startup()
1198 rfr_level = port->fifosize - 12; in msm_startup()
1200 rfr_level = port->fifosize; in msm_startup()
1205 if (msm_port->is_uartdm) in msm_startup()
1216 if (msm_port->is_uartdm) { in msm_startup()
1217 msm_request_tx_dma(msm_port, msm_port->uart.mapbase); in msm_startup()
1218 msm_request_rx_dma(msm_port, msm_port->uart.mapbase); in msm_startup()
1221 ret = request_irq(port->irq, msm_uart_irq, IRQF_TRIGGER_HIGH, in msm_startup()
1222 msm_port->name, port); in msm_startup()
1229 if (msm_port->is_uartdm) in msm_startup()
1232 clk_disable_unprepare(msm_port->pclk); in msm_startup()
1233 clk_disable_unprepare(msm_port->clk); in msm_startup()
1242 msm_port->imr = 0; in msm_shutdown()
1245 if (msm_port->is_uartdm) in msm_shutdown()
1248 clk_disable_unprepare(msm_port->clk); in msm_shutdown()
1250 free_irq(port->irq, port); in msm_shutdown()
1257 struct msm_dma *dma = &msm_port->rx_dma; in msm_set_termios()
1261 spin_lock_irqsave(&port->lock, flags); in msm_set_termios()
1263 if (dma->chan) /* Terminate if any */ in msm_set_termios()
1275 if (termios->c_cflag & PARENB) { in msm_set_termios()
1276 if (termios->c_cflag & PARODD) in msm_set_termios()
1278 else if (termios->c_cflag & CMSPAR) in msm_set_termios()
1286 switch (termios->c_cflag & CSIZE) { in msm_set_termios()
1304 if (termios->c_cflag & CSTOPB) in msm_set_termios()
1315 if (termios->c_cflag & CRTSCTS) { in msm_set_termios()
1322 port->read_status_mask = 0; in msm_set_termios()
1323 if (termios->c_iflag & INPCK) in msm_set_termios()
1324 port->read_status_mask |= UART_SR_PAR_FRAME_ERR; in msm_set_termios()
1325 if (termios->c_iflag & (IGNBRK | BRKINT | PARMRK)) in msm_set_termios()
1326 port->read_status_mask |= UART_SR_RX_BREAK; in msm_set_termios()
1328 uart_update_timeout(port, termios->c_cflag, baud); in msm_set_termios()
1333 spin_unlock_irqrestore(&port->lock, flags); in msm_set_termios()
1343 struct platform_device *pdev = to_platform_device(port->dev); in msm_release_port()
1352 release_mem_region(port->mapbase, size); in msm_release_port()
1353 iounmap(port->membase); in msm_release_port()
1354 port->membase = NULL; in msm_release_port()
1359 struct platform_device *pdev = to_platform_device(port->dev); in msm_request_port()
1366 return -ENXIO; in msm_request_port()
1370 if (!request_mem_region(port->mapbase, size, "msm_serial")) in msm_request_port()
1371 return -EBUSY; in msm_request_port()
1373 port->membase = ioremap(port->mapbase, size); in msm_request_port()
1374 if (!port->membase) { in msm_request_port()
1375 ret = -EBUSY; in msm_request_port()
1382 release_mem_region(port->mapbase, size); in msm_request_port()
1391 port->type = PORT_MSM; in msm_config_port()
1400 if (unlikely(ser->type != PORT_UNKNOWN && ser->type != PORT_MSM)) in msm_verify_port()
1401 return -EINVAL; in msm_verify_port()
1402 if (unlikely(port->irq != ser->irq)) in msm_verify_port()
1403 return -EINVAL; in msm_verify_port()
1414 clk_prepare_enable(msm_port->clk); in msm_power()
1415 clk_prepare_enable(msm_port->pclk); in msm_power()
1418 clk_disable_unprepare(msm_port->clk); in msm_power()
1419 clk_disable_unprepare(msm_port->pclk); in msm_power()
1430 unsigned int rf_reg = msm_port->is_uartdm ? UARTDM_RF : UART_RF; in msm_poll_get_char_single()
1447 c = sp[sizeof(slop) - count]; in msm_poll_get_char_dm()
1448 count--; in msm_poll_get_char_dm()
1461 count--; in msm_poll_get_char_dm()
1473 count = sizeof(slop) - 1; in msm_poll_get_char_dm()
1489 if (msm_port->is_uartdm) in msm_poll_get_char()
1509 if (msm_port->is_uartdm) in msm_poll_put_char()
1517 msm_write(port, c, msm_port->is_uartdm ? UARTDM_TF : UART_TF); in msm_poll_put_char()
1554 .uart = {
1558 .fifosize = 64,
1563 .uart = {
1567 .fifosize = 64,
1572 .uart = {
1576 .fifosize = 64,
1586 return &msm_uart_ports[line].uart; in msm_get_port_from_line()
1600 tf = port->membase + UARTDM_TF; in __msm_console_write()
1602 tf = port->membase + UART_TF; in __msm_console_write()
1610 if (port->sysrq) in __msm_console_write()
1613 locked = spin_trylock(&port->lock); in __msm_console_write()
1615 spin_lock(&port->lock); in __msm_console_write()
1627 num_chars = min(count - i, (unsigned int)sizeof(buf)); in __msm_console_write()
1654 spin_unlock(&port->lock); in __msm_console_write()
1663 BUG_ON(co->index < 0 || co->index >= UART_NR); in msm_console_write()
1665 port = msm_get_port_from_line(co->index); in msm_console_write()
1668 __msm_console_write(port, s, count, msm_port->is_uartdm); in msm_console_write()
1679 if (unlikely(co->index >= UART_NR || co->index < 0)) in msm_console_setup()
1680 return -ENXIO; in msm_console_setup()
1682 port = msm_get_port_from_line(co->index); in msm_console_setup()
1684 if (unlikely(!port->membase)) in msm_console_setup()
1685 return -ENXIO; in msm_console_setup()
1692 pr_info("msm_serial: console setup on port #%d\n", port->line); in msm_console_setup()
1700 struct earlycon_device *dev = con->data; in msm_serial_early_write()
1702 __msm_console_write(&dev->port, s, n, false); in msm_serial_early_write()
1708 if (!device->port.membase) in msm_serial_early_console_setup()
1709 return -ENODEV; in msm_serial_early_console_setup()
1711 device->con->write = msm_serial_early_write; in msm_serial_early_console_setup()
1714 OF_EARLYCON_DECLARE(msm_serial, "qcom,msm-uart",
1720 struct earlycon_device *dev = con->data; in msm_serial_early_write_dm()
1722 __msm_console_write(&dev->port, s, n, true); in msm_serial_early_write_dm()
1729 if (!device->port.membase) in msm_serial_early_console_setup_dm()
1730 return -ENODEV; in msm_serial_early_console_setup_dm()
1732 device->con->write = msm_serial_early_write_dm; in msm_serial_early_console_setup_dm()
1735 OF_EARLYCON_DECLARE(msm_serial_dm, "qcom,msm-uartdm",
1746 .index = -1,
1767 { .compatible = "qcom,msm-uartdm-v1.1", .data = (void *)UARTDM_1P1 },
1768 { .compatible = "qcom,msm-uartdm-v1.2", .data = (void *)UARTDM_1P2 },
1769 { .compatible = "qcom,msm-uartdm-v1.3", .data = (void *)UARTDM_1P3 },
1770 { .compatible = "qcom,msm-uartdm-v1.4", .data = (void *)UARTDM_1P4 },
1782 if (pdev->dev.of_node) in msm_serial_probe()
1783 line = of_alias_get_id(pdev->dev.of_node, "serial"); in msm_serial_probe()
1785 line = pdev->id; in msm_serial_probe()
1788 line = atomic_inc_return(&msm_uart_next_id) - 1; in msm_serial_probe()
1791 return -ENXIO; in msm_serial_probe()
1793 dev_info(&pdev->dev, "msm_serial: detected port #%d\n", line); in msm_serial_probe()
1796 port->dev = &pdev->dev; in msm_serial_probe()
1799 id = of_match_device(msm_uartdm_table, &pdev->dev); in msm_serial_probe()
1801 msm_port->is_uartdm = (unsigned long)id->data; in msm_serial_probe()
1803 msm_port->is_uartdm = 0; in msm_serial_probe()
1805 msm_port->clk = devm_clk_get(&pdev->dev, "core"); in msm_serial_probe()
1806 if (IS_ERR(msm_port->clk)) in msm_serial_probe()
1807 return PTR_ERR(msm_port->clk); in msm_serial_probe()
1809 if (msm_port->is_uartdm) { in msm_serial_probe()
1810 msm_port->pclk = devm_clk_get(&pdev->dev, "iface"); in msm_serial_probe()
1811 if (IS_ERR(msm_port->pclk)) in msm_serial_probe()
1812 return PTR_ERR(msm_port->pclk); in msm_serial_probe()
1815 port->uartclk = clk_get_rate(msm_port->clk); in msm_serial_probe()
1816 dev_info(&pdev->dev, "uartclk = %d\n", port->uartclk); in msm_serial_probe()
1820 return -ENXIO; in msm_serial_probe()
1821 port->mapbase = resource->start; in msm_serial_probe()
1825 return -ENXIO; in msm_serial_probe()
1826 port->irq = irq; in msm_serial_probe()
1827 port->has_sysrq = IS_ENABLED(CONFIG_SERIAL_MSM_CONSOLE); in msm_serial_probe()
1844 { .compatible = "qcom,msm-uart" },
1845 { .compatible = "qcom,msm-uartdm" },
1854 uart_suspend_port(&msm_uart_driver, &port->uart); in msm_serial_suspend()
1863 uart_resume_port(&msm_uart_driver, &port->uart); in msm_serial_resume()