Lines Matching +full:inactive +full:- +full:delay
1 // SPDX-License-Identifier: GPL-2.0-only
18 #include <linux/platform_data/spi-mt65xx.h>
21 #include <linux/spi/spi-mem.h>
22 #include <linux/dma-mapping.h>
116 * struct mtk_spi_compatible - device data structure
134 * struct mtk_spi - SPI driver instance
153 * @spimem_done: SPI-MEM operation completion
154 * @use_spimem: Enables SPI-MEM
156 * @tx_dma: DMA start for SPI-MEM TX
157 * @rx_dma: DMA start for SPI-MEM RX
233 { .compatible = "mediatek,spi-ipm",
236 { .compatible = "mediatek,mt2701-spi",
239 { .compatible = "mediatek,mt2712-spi",
242 { .compatible = "mediatek,mt6589-spi",
245 { .compatible = "mediatek,mt6765-spi",
248 { .compatible = "mediatek,mt7622-spi",
251 { .compatible = "mediatek,mt7629-spi",
254 { .compatible = "mediatek,mt8135-spi",
257 { .compatible = "mediatek,mt8173-spi",
260 { .compatible = "mediatek,mt8183-spi",
263 { .compatible = "mediatek,mt8192-spi",
266 { .compatible = "mediatek,mt6893-spi",
278 reg_val = readl(mdata->base + SPI_CMD_REG); in mtk_spi_reset()
280 writel(reg_val, mdata->base + SPI_CMD_REG); in mtk_spi_reset()
282 reg_val = readl(mdata->base + SPI_CMD_REG); in mtk_spi_reset()
284 writel(reg_val, mdata->base + SPI_CMD_REG); in mtk_spi_reset()
289 struct mtk_spi *mdata = spi_controller_get_devdata(spi->controller); in mtk_spi_set_hw_cs_timing()
290 struct spi_delay *cs_setup = &spi->cs_setup; in mtk_spi_set_hw_cs_timing()
291 struct spi_delay *cs_hold = &spi->cs_hold; in mtk_spi_set_hw_cs_timing()
292 struct spi_delay *cs_inactive = &spi->cs_inactive; in mtk_spi_set_hw_cs_timing()
293 u32 setup, hold, inactive; in mtk_spi_set_hw_cs_timing() local
295 int delay; in mtk_spi_set_hw_cs_timing() local
297 delay = spi_delay_to_ns(cs_setup, NULL); in mtk_spi_set_hw_cs_timing()
298 if (delay < 0) in mtk_spi_set_hw_cs_timing()
299 return delay; in mtk_spi_set_hw_cs_timing()
300 setup = (delay * DIV_ROUND_UP(mdata->spi_clk_hz, 1000000)) / 1000; in mtk_spi_set_hw_cs_timing()
302 delay = spi_delay_to_ns(cs_hold, NULL); in mtk_spi_set_hw_cs_timing()
303 if (delay < 0) in mtk_spi_set_hw_cs_timing()
304 return delay; in mtk_spi_set_hw_cs_timing()
305 hold = (delay * DIV_ROUND_UP(mdata->spi_clk_hz, 1000000)) / 1000; in mtk_spi_set_hw_cs_timing()
307 delay = spi_delay_to_ns(cs_inactive, NULL); in mtk_spi_set_hw_cs_timing()
308 if (delay < 0) in mtk_spi_set_hw_cs_timing()
309 return delay; in mtk_spi_set_hw_cs_timing()
310 inactive = (delay * DIV_ROUND_UP(mdata->spi_clk_hz, 1000000)) / 1000; in mtk_spi_set_hw_cs_timing()
313 reg_val = readl(mdata->base + SPI_CFG0_REG); in mtk_spi_set_hw_cs_timing()
314 if (mdata->dev_comp->enhance_timing) { in mtk_spi_set_hw_cs_timing()
318 reg_val |= (((hold - 1) & 0xffff) in mtk_spi_set_hw_cs_timing()
324 reg_val |= (((setup - 1) & 0xffff) in mtk_spi_set_hw_cs_timing()
331 reg_val |= (((hold - 1) & 0xff) << SPI_CFG0_CS_HOLD_OFFSET); in mtk_spi_set_hw_cs_timing()
336 reg_val |= (((setup - 1) & 0xff) in mtk_spi_set_hw_cs_timing()
340 writel(reg_val, mdata->base + SPI_CFG0_REG); in mtk_spi_set_hw_cs_timing()
343 if (inactive) { in mtk_spi_set_hw_cs_timing()
344 inactive = min_t(u32, inactive, 0x100); in mtk_spi_set_hw_cs_timing()
345 reg_val = readl(mdata->base + SPI_CFG1_REG); in mtk_spi_set_hw_cs_timing()
347 reg_val |= (((inactive - 1) & 0xff) << SPI_CFG1_CS_IDLE_OFFSET); in mtk_spi_set_hw_cs_timing()
348 writel(reg_val, mdata->base + SPI_CFG1_REG); in mtk_spi_set_hw_cs_timing()
359 struct mtk_chip_config *chip_config = spi->controller_data; in mtk_spi_hw_init()
362 cpu_latency_qos_update_request(&mdata->qos_request, 500); in mtk_spi_hw_init()
363 cpha = spi->mode & SPI_CPHA ? 1 : 0; in mtk_spi_hw_init()
364 cpol = spi->mode & SPI_CPOL ? 1 : 0; in mtk_spi_hw_init()
366 reg_val = readl(mdata->base + SPI_CMD_REG); in mtk_spi_hw_init()
367 if (mdata->dev_comp->ipm_design) { in mtk_spi_hw_init()
370 if (spi->mode & SPI_LOOP) in mtk_spi_hw_init()
386 if (spi->mode & SPI_LSB_FIRST) { in mtk_spi_hw_init()
403 if (mdata->dev_comp->enhance_timing) { in mtk_spi_hw_init()
405 if (spi->mode & SPI_CS_HIGH) in mtk_spi_hw_init()
410 if (chip_config->sample_sel) in mtk_spi_hw_init()
425 writel(reg_val, mdata->base + SPI_CMD_REG); in mtk_spi_hw_init()
428 if (mdata->dev_comp->need_pad_sel) in mtk_spi_hw_init()
429 writel(mdata->pad_sel[spi_get_chipselect(spi, 0)], in mtk_spi_hw_init()
430 mdata->base + SPI_PAD_SEL_REG); in mtk_spi_hw_init()
432 /* tick delay */ in mtk_spi_hw_init()
433 if (mdata->dev_comp->enhance_timing) { in mtk_spi_hw_init()
434 if (mdata->dev_comp->ipm_design) { in mtk_spi_hw_init()
435 reg_val = readl(mdata->base + SPI_CMD_REG); in mtk_spi_hw_init()
437 reg_val |= ((chip_config->tick_delay & 0x7) in mtk_spi_hw_init()
439 writel(reg_val, mdata->base + SPI_CMD_REG); in mtk_spi_hw_init()
441 reg_val = readl(mdata->base + SPI_CFG1_REG); in mtk_spi_hw_init()
443 reg_val |= ((chip_config->tick_delay & 0x7) in mtk_spi_hw_init()
445 writel(reg_val, mdata->base + SPI_CFG1_REG); in mtk_spi_hw_init()
448 reg_val = readl(mdata->base + SPI_CFG1_REG); in mtk_spi_hw_init()
450 reg_val |= ((chip_config->tick_delay & 0x3) in mtk_spi_hw_init()
452 writel(reg_val, mdata->base + SPI_CFG1_REG); in mtk_spi_hw_init()
463 return mtk_spi_hw_init(host, msg->spi); in mtk_spi_prepare_message()
471 cpu_latency_qos_update_request(&mdata->qos_request, PM_QOS_DEFAULT_VALUE); in mtk_spi_unprepare_message()
478 struct mtk_spi *mdata = spi_controller_get_devdata(spi->controller); in mtk_spi_set_cs()
480 if (spi->mode & SPI_CS_HIGH) in mtk_spi_set_cs()
483 reg_val = readl(mdata->base + SPI_CMD_REG); in mtk_spi_set_cs()
486 writel(reg_val, mdata->base + SPI_CMD_REG); in mtk_spi_set_cs()
489 writel(reg_val, mdata->base + SPI_CMD_REG); in mtk_spi_set_cs()
490 mdata->state = MTK_SPI_IDLE; in mtk_spi_set_cs()
501 if (speed_hz < mdata->spi_clk_hz / 2) in mtk_spi_prepare_transfer()
502 div = DIV_ROUND_UP(mdata->spi_clk_hz, speed_hz); in mtk_spi_prepare_transfer()
508 if (mdata->dev_comp->enhance_timing) { in mtk_spi_prepare_transfer()
509 reg_val = readl(mdata->base + SPI_CFG2_REG); in mtk_spi_prepare_transfer()
511 reg_val |= (((sck_time - 1) & 0xffff) in mtk_spi_prepare_transfer()
514 reg_val |= (((sck_time - 1) & 0xffff) in mtk_spi_prepare_transfer()
516 writel(reg_val, mdata->base + SPI_CFG2_REG); in mtk_spi_prepare_transfer()
518 reg_val = readl(mdata->base + SPI_CFG0_REG); in mtk_spi_prepare_transfer()
520 reg_val |= (((sck_time - 1) & 0xff) in mtk_spi_prepare_transfer()
523 reg_val |= (((sck_time - 1) & 0xff) << SPI_CFG0_SCK_LOW_OFFSET); in mtk_spi_prepare_transfer()
524 writel(reg_val, mdata->base + SPI_CFG0_REG); in mtk_spi_prepare_transfer()
533 if (mdata->dev_comp->ipm_design) in mtk_spi_setup_packet()
535 mdata->xfer_len, in mtk_spi_setup_packet()
539 mdata->xfer_len, in mtk_spi_setup_packet()
542 packet_loop = mdata->xfer_len / packet_size; in mtk_spi_setup_packet()
544 reg_val = readl(mdata->base + SPI_CFG1_REG); in mtk_spi_setup_packet()
545 if (mdata->dev_comp->ipm_design) in mtk_spi_setup_packet()
549 reg_val |= (packet_size - 1) << SPI_CFG1_PACKET_LENGTH_OFFSET; in mtk_spi_setup_packet()
551 reg_val |= (packet_loop - 1) << SPI_CFG1_PACKET_LOOP_OFFSET; in mtk_spi_setup_packet()
552 writel(reg_val, mdata->base + SPI_CFG1_REG); in mtk_spi_setup_packet()
560 cmd = readl(mdata->base + SPI_CMD_REG); in mtk_spi_enable_transfer()
561 if (mdata->state == MTK_SPI_IDLE) in mtk_spi_enable_transfer()
565 writel(cmd, mdata->base + SPI_CMD_REG); in mtk_spi_enable_transfer()
572 if (mdata->dev_comp->ipm_design) { in mtk_spi_get_mult_delta()
588 if (mdata->tx_sgl_len && mdata->rx_sgl_len) { in mtk_spi_update_mdata_len()
589 if (mdata->tx_sgl_len > mdata->rx_sgl_len) { in mtk_spi_update_mdata_len()
590 mult_delta = mtk_spi_get_mult_delta(mdata, mdata->rx_sgl_len); in mtk_spi_update_mdata_len()
591 mdata->xfer_len = mdata->rx_sgl_len - mult_delta; in mtk_spi_update_mdata_len()
592 mdata->rx_sgl_len = mult_delta; in mtk_spi_update_mdata_len()
593 mdata->tx_sgl_len -= mdata->xfer_len; in mtk_spi_update_mdata_len()
595 mult_delta = mtk_spi_get_mult_delta(mdata, mdata->tx_sgl_len); in mtk_spi_update_mdata_len()
596 mdata->xfer_len = mdata->tx_sgl_len - mult_delta; in mtk_spi_update_mdata_len()
597 mdata->tx_sgl_len = mult_delta; in mtk_spi_update_mdata_len()
598 mdata->rx_sgl_len -= mdata->xfer_len; in mtk_spi_update_mdata_len()
600 } else if (mdata->tx_sgl_len) { in mtk_spi_update_mdata_len()
601 mult_delta = mtk_spi_get_mult_delta(mdata, mdata->tx_sgl_len); in mtk_spi_update_mdata_len()
602 mdata->xfer_len = mdata->tx_sgl_len - mult_delta; in mtk_spi_update_mdata_len()
603 mdata->tx_sgl_len = mult_delta; in mtk_spi_update_mdata_len()
604 } else if (mdata->rx_sgl_len) { in mtk_spi_update_mdata_len()
605 mult_delta = mtk_spi_get_mult_delta(mdata, mdata->rx_sgl_len); in mtk_spi_update_mdata_len()
606 mdata->xfer_len = mdata->rx_sgl_len - mult_delta; in mtk_spi_update_mdata_len()
607 mdata->rx_sgl_len = mult_delta; in mtk_spi_update_mdata_len()
616 if (mdata->tx_sgl) { in mtk_spi_setup_dma_addr()
617 writel((u32)(xfer->tx_dma & MTK_SPI_32BITS_MASK), in mtk_spi_setup_dma_addr()
618 mdata->base + SPI_TX_SRC_REG); in mtk_spi_setup_dma_addr()
620 if (mdata->dev_comp->dma_ext) in mtk_spi_setup_dma_addr()
621 writel((u32)(xfer->tx_dma >> 32), in mtk_spi_setup_dma_addr()
622 mdata->base + SPI_TX_SRC_REG_64); in mtk_spi_setup_dma_addr()
626 if (mdata->rx_sgl) { in mtk_spi_setup_dma_addr()
627 writel((u32)(xfer->rx_dma & MTK_SPI_32BITS_MASK), in mtk_spi_setup_dma_addr()
628 mdata->base + SPI_RX_DST_REG); in mtk_spi_setup_dma_addr()
630 if (mdata->dev_comp->dma_ext) in mtk_spi_setup_dma_addr()
631 writel((u32)(xfer->rx_dma >> 32), in mtk_spi_setup_dma_addr()
632 mdata->base + SPI_RX_DST_REG_64); in mtk_spi_setup_dma_addr()
645 mdata->cur_transfer = xfer; in mtk_spi_fifo_transfer()
646 mdata->xfer_len = min(MTK_SPI_MAX_FIFO_SIZE, xfer->len); in mtk_spi_fifo_transfer()
647 mdata->num_xfered = 0; in mtk_spi_fifo_transfer()
648 mtk_spi_prepare_transfer(host, xfer->speed_hz); in mtk_spi_fifo_transfer()
651 if (xfer->tx_buf) { in mtk_spi_fifo_transfer()
652 cnt = xfer->len / 4; in mtk_spi_fifo_transfer()
653 iowrite32_rep(mdata->base + SPI_TX_DATA_REG, xfer->tx_buf, cnt); in mtk_spi_fifo_transfer()
654 remainder = xfer->len % 4; in mtk_spi_fifo_transfer()
657 memcpy(®_val, xfer->tx_buf + (cnt * 4), remainder); in mtk_spi_fifo_transfer()
658 writel(reg_val, mdata->base + SPI_TX_DATA_REG); in mtk_spi_fifo_transfer()
674 mdata->tx_sgl = NULL; in mtk_spi_dma_transfer()
675 mdata->rx_sgl = NULL; in mtk_spi_dma_transfer()
676 mdata->tx_sgl_len = 0; in mtk_spi_dma_transfer()
677 mdata->rx_sgl_len = 0; in mtk_spi_dma_transfer()
678 mdata->cur_transfer = xfer; in mtk_spi_dma_transfer()
679 mdata->num_xfered = 0; in mtk_spi_dma_transfer()
681 mtk_spi_prepare_transfer(host, xfer->speed_hz); in mtk_spi_dma_transfer()
683 cmd = readl(mdata->base + SPI_CMD_REG); in mtk_spi_dma_transfer()
684 if (xfer->tx_buf) in mtk_spi_dma_transfer()
686 if (xfer->rx_buf) in mtk_spi_dma_transfer()
688 writel(cmd, mdata->base + SPI_CMD_REG); in mtk_spi_dma_transfer()
690 if (xfer->tx_buf) in mtk_spi_dma_transfer()
691 mdata->tx_sgl = xfer->tx_sg.sgl; in mtk_spi_dma_transfer()
692 if (xfer->rx_buf) in mtk_spi_dma_transfer()
693 mdata->rx_sgl = xfer->rx_sg.sgl; in mtk_spi_dma_transfer()
695 if (mdata->tx_sgl) { in mtk_spi_dma_transfer()
696 xfer->tx_dma = sg_dma_address(mdata->tx_sgl); in mtk_spi_dma_transfer()
697 mdata->tx_sgl_len = sg_dma_len(mdata->tx_sgl); in mtk_spi_dma_transfer()
699 if (mdata->rx_sgl) { in mtk_spi_dma_transfer()
700 xfer->rx_dma = sg_dma_address(mdata->rx_sgl); in mtk_spi_dma_transfer()
701 mdata->rx_sgl_len = sg_dma_len(mdata->rx_sgl); in mtk_spi_dma_transfer()
716 struct mtk_spi *mdata = spi_controller_get_devdata(spi->controller); in mtk_spi_transfer_one()
720 if (mdata->dev_comp->ipm_design) { in mtk_spi_transfer_one()
721 if (!xfer->tx_buf || !xfer->rx_buf) { in mtk_spi_transfer_one()
723 if (xfer->rx_buf) in mtk_spi_transfer_one()
726 writel(reg_val, mdata->base + SPI_CFG3_IPM_REG); in mtk_spi_transfer_one()
729 if (host->can_dma(host, spi, xfer)) in mtk_spi_transfer_one()
739 /* Buffers for DMA transactions must be 4-byte aligned */ in mtk_spi_can_dma()
740 return (xfer->len > MTK_SPI_MAX_FIFO_SIZE && in mtk_spi_can_dma()
741 (unsigned long)xfer->tx_buf % 4 == 0 && in mtk_spi_can_dma()
742 (unsigned long)xfer->rx_buf % 4 == 0); in mtk_spi_can_dma()
747 struct mtk_spi *mdata = spi_controller_get_devdata(spi->controller); in mtk_spi_setup()
749 if (!spi->controller_data) in mtk_spi_setup()
750 spi->controller_data = (void *)&mtk_default_chip_info; in mtk_spi_setup()
752 if (mdata->dev_comp->need_pad_sel && spi_get_csgpiod(spi, 0)) in mtk_spi_setup()
753 /* CS de-asserted, gpiolib will handle inversion */ in mtk_spi_setup()
764 struct spi_transfer *xfer = mdata->cur_transfer; in mtk_spi_interrupt_thread()
766 if (!host->can_dma(host, NULL, xfer)) { in mtk_spi_interrupt_thread()
767 if (xfer->rx_buf) { in mtk_spi_interrupt_thread()
768 cnt = mdata->xfer_len / 4; in mtk_spi_interrupt_thread()
769 ioread32_rep(mdata->base + SPI_RX_DATA_REG, in mtk_spi_interrupt_thread()
770 xfer->rx_buf + mdata->num_xfered, cnt); in mtk_spi_interrupt_thread()
771 remainder = mdata->xfer_len % 4; in mtk_spi_interrupt_thread()
773 reg_val = readl(mdata->base + SPI_RX_DATA_REG); in mtk_spi_interrupt_thread()
774 memcpy(xfer->rx_buf + (cnt * 4) + mdata->num_xfered, in mtk_spi_interrupt_thread()
780 mdata->num_xfered += mdata->xfer_len; in mtk_spi_interrupt_thread()
781 if (mdata->num_xfered == xfer->len) { in mtk_spi_interrupt_thread()
786 len = xfer->len - mdata->num_xfered; in mtk_spi_interrupt_thread()
787 mdata->xfer_len = min(MTK_SPI_MAX_FIFO_SIZE, len); in mtk_spi_interrupt_thread()
790 if (xfer->tx_buf) { in mtk_spi_interrupt_thread()
791 cnt = mdata->xfer_len / 4; in mtk_spi_interrupt_thread()
792 iowrite32_rep(mdata->base + SPI_TX_DATA_REG, in mtk_spi_interrupt_thread()
793 xfer->tx_buf + mdata->num_xfered, cnt); in mtk_spi_interrupt_thread()
795 remainder = mdata->xfer_len % 4; in mtk_spi_interrupt_thread()
799 xfer->tx_buf + (cnt * 4) + mdata->num_xfered, in mtk_spi_interrupt_thread()
801 writel(reg_val, mdata->base + SPI_TX_DATA_REG); in mtk_spi_interrupt_thread()
810 if (mdata->tx_sgl) in mtk_spi_interrupt_thread()
811 xfer->tx_dma += mdata->xfer_len; in mtk_spi_interrupt_thread()
812 if (mdata->rx_sgl) in mtk_spi_interrupt_thread()
813 xfer->rx_dma += mdata->xfer_len; in mtk_spi_interrupt_thread()
815 if (mdata->tx_sgl && (mdata->tx_sgl_len == 0)) { in mtk_spi_interrupt_thread()
816 mdata->tx_sgl = sg_next(mdata->tx_sgl); in mtk_spi_interrupt_thread()
817 if (mdata->tx_sgl) { in mtk_spi_interrupt_thread()
818 xfer->tx_dma = sg_dma_address(mdata->tx_sgl); in mtk_spi_interrupt_thread()
819 mdata->tx_sgl_len = sg_dma_len(mdata->tx_sgl); in mtk_spi_interrupt_thread()
822 if (mdata->rx_sgl && (mdata->rx_sgl_len == 0)) { in mtk_spi_interrupt_thread()
823 mdata->rx_sgl = sg_next(mdata->rx_sgl); in mtk_spi_interrupt_thread()
824 if (mdata->rx_sgl) { in mtk_spi_interrupt_thread()
825 xfer->rx_dma = sg_dma_address(mdata->rx_sgl); in mtk_spi_interrupt_thread()
826 mdata->rx_sgl_len = sg_dma_len(mdata->rx_sgl); in mtk_spi_interrupt_thread()
830 if (!mdata->tx_sgl && !mdata->rx_sgl) { in mtk_spi_interrupt_thread()
832 cmd = readl(mdata->base + SPI_CMD_REG); in mtk_spi_interrupt_thread()
835 writel(cmd, mdata->base + SPI_CMD_REG); in mtk_spi_interrupt_thread()
855 reg_val = readl(mdata->base + SPI_STATUS0_REG); in mtk_spi_interrupt()
857 mdata->state = MTK_SPI_PAUSED; in mtk_spi_interrupt()
859 mdata->state = MTK_SPI_IDLE; in mtk_spi_interrupt()
861 /* SPI-MEM ops */ in mtk_spi_interrupt()
862 if (mdata->use_spimem) { in mtk_spi_interrupt()
863 complete(&mdata->spimem_done); in mtk_spi_interrupt()
875 if (op->data.dir != SPI_MEM_NO_DATA) { in mtk_spi_mem_adjust_op_size()
876 opcode_len = 1 + op->addr.nbytes + op->dummy.nbytes; in mtk_spi_mem_adjust_op_size()
877 if (opcode_len + op->data.nbytes > MTK_SPI_IPM_PACKET_SIZE) { in mtk_spi_mem_adjust_op_size()
878 op->data.nbytes = MTK_SPI_IPM_PACKET_SIZE - opcode_len; in mtk_spi_mem_adjust_op_size()
879 /* force data buffer dma-aligned. */ in mtk_spi_mem_adjust_op_size()
880 op->data.nbytes -= op->data.nbytes % 4; in mtk_spi_mem_adjust_op_size()
893 if (op->addr.nbytes && op->dummy.nbytes && in mtk_spi_mem_supports_op()
894 op->addr.buswidth != op->dummy.buswidth) in mtk_spi_mem_supports_op()
897 if (op->addr.nbytes + op->dummy.nbytes > 16) in mtk_spi_mem_supports_op()
900 if (op->data.nbytes > MTK_SPI_IPM_PACKET_SIZE) { in mtk_spi_mem_supports_op()
901 if (op->data.nbytes / MTK_SPI_IPM_PACKET_SIZE > in mtk_spi_mem_supports_op()
903 op->data.nbytes % MTK_SPI_IPM_PACKET_SIZE != 0) in mtk_spi_mem_supports_op()
915 writel((u32)(mdata->tx_dma & MTK_SPI_32BITS_MASK), in mtk_spi_mem_setup_dma_xfer()
916 mdata->base + SPI_TX_SRC_REG); in mtk_spi_mem_setup_dma_xfer()
918 if (mdata->dev_comp->dma_ext) in mtk_spi_mem_setup_dma_xfer()
919 writel((u32)(mdata->tx_dma >> 32), in mtk_spi_mem_setup_dma_xfer()
920 mdata->base + SPI_TX_SRC_REG_64); in mtk_spi_mem_setup_dma_xfer()
923 if (op->data.dir == SPI_MEM_DATA_IN) { in mtk_spi_mem_setup_dma_xfer()
924 writel((u32)(mdata->rx_dma & MTK_SPI_32BITS_MASK), in mtk_spi_mem_setup_dma_xfer()
925 mdata->base + SPI_RX_DST_REG); in mtk_spi_mem_setup_dma_xfer()
927 if (mdata->dev_comp->dma_ext) in mtk_spi_mem_setup_dma_xfer()
928 writel((u32)(mdata->rx_dma >> 32), in mtk_spi_mem_setup_dma_xfer()
929 mdata->base + SPI_RX_DST_REG_64); in mtk_spi_mem_setup_dma_xfer()
937 struct mtk_spi *mdata = spi_controller_get_devdata(mem->spi->controller); in mtk_spi_transfer_wait()
945 if (op->data.dir == SPI_MEM_NO_DATA) in mtk_spi_transfer_wait()
948 ms *= op->data.nbytes; in mtk_spi_transfer_wait()
949 ms = div_u64(ms, mem->spi->max_speed_hz); in mtk_spi_transfer_wait()
955 if (!wait_for_completion_timeout(&mdata->spimem_done, in mtk_spi_transfer_wait()
957 dev_err(mdata->dev, "spi-mem transfer timeout\n"); in mtk_spi_transfer_wait()
958 return -ETIMEDOUT; in mtk_spi_transfer_wait()
967 struct mtk_spi *mdata = spi_controller_get_devdata(mem->spi->controller); in mtk_spi_mem_exec_op()
972 mdata->use_spimem = true; in mtk_spi_mem_exec_op()
973 reinit_completion(&mdata->spimem_done); in mtk_spi_mem_exec_op()
976 mtk_spi_hw_init(mem->spi->controller, mem->spi); in mtk_spi_mem_exec_op()
977 mtk_spi_prepare_transfer(mem->spi->controller, op->max_freq); in mtk_spi_mem_exec_op()
979 reg_val = readl(mdata->base + SPI_CFG3_IPM_REG); in mtk_spi_mem_exec_op()
986 if (op->addr.nbytes || op->dummy.nbytes) in mtk_spi_mem_exec_op()
987 reg_val |= (op->addr.nbytes + op->dummy.nbytes) << in mtk_spi_mem_exec_op()
991 if (op->data.dir == SPI_MEM_NO_DATA) { in mtk_spi_mem_exec_op()
993 writel(0, mdata->base + SPI_CFG1_REG); in mtk_spi_mem_exec_op()
996 mdata->xfer_len = op->data.nbytes; in mtk_spi_mem_exec_op()
997 mtk_spi_setup_packet(mem->spi->controller); in mtk_spi_mem_exec_op()
1000 if (op->addr.nbytes || op->dummy.nbytes) { in mtk_spi_mem_exec_op()
1001 if (op->addr.buswidth == 1 || op->dummy.buswidth == 1) in mtk_spi_mem_exec_op()
1007 if (op->addr.buswidth == 2 || in mtk_spi_mem_exec_op()
1008 op->dummy.buswidth == 2 || in mtk_spi_mem_exec_op()
1009 op->data.buswidth == 2) in mtk_spi_mem_exec_op()
1011 else if (op->addr.buswidth == 4 || in mtk_spi_mem_exec_op()
1012 op->dummy.buswidth == 4 || in mtk_spi_mem_exec_op()
1013 op->data.buswidth == 4) in mtk_spi_mem_exec_op()
1022 if (op->data.dir == SPI_MEM_DATA_IN) in mtk_spi_mem_exec_op()
1026 writel(reg_val, mdata->base + SPI_CFG3_IPM_REG); in mtk_spi_mem_exec_op()
1028 tx_size = 1 + op->addr.nbytes + op->dummy.nbytes; in mtk_spi_mem_exec_op()
1029 if (op->data.dir == SPI_MEM_DATA_OUT) in mtk_spi_mem_exec_op()
1030 tx_size += op->data.nbytes; in mtk_spi_mem_exec_op()
1036 mdata->use_spimem = false; in mtk_spi_mem_exec_op()
1037 return -ENOMEM; in mtk_spi_mem_exec_op()
1040 tx_tmp_buf[0] = op->cmd.opcode; in mtk_spi_mem_exec_op()
1042 if (op->addr.nbytes) { in mtk_spi_mem_exec_op()
1045 for (i = 0; i < op->addr.nbytes; i++) in mtk_spi_mem_exec_op()
1046 tx_tmp_buf[i + 1] = op->addr.val >> in mtk_spi_mem_exec_op()
1047 (8 * (op->addr.nbytes - i - 1)); in mtk_spi_mem_exec_op()
1050 if (op->dummy.nbytes) in mtk_spi_mem_exec_op()
1051 memset(tx_tmp_buf + op->addr.nbytes + 1, in mtk_spi_mem_exec_op()
1053 op->dummy.nbytes); in mtk_spi_mem_exec_op()
1055 if (op->data.nbytes && op->data.dir == SPI_MEM_DATA_OUT) in mtk_spi_mem_exec_op()
1056 memcpy(tx_tmp_buf + op->dummy.nbytes + op->addr.nbytes + 1, in mtk_spi_mem_exec_op()
1057 op->data.buf.out, in mtk_spi_mem_exec_op()
1058 op->data.nbytes); in mtk_spi_mem_exec_op()
1060 mdata->tx_dma = dma_map_single(mdata->dev, tx_tmp_buf, in mtk_spi_mem_exec_op()
1062 if (dma_mapping_error(mdata->dev, mdata->tx_dma)) { in mtk_spi_mem_exec_op()
1063 ret = -ENOMEM; in mtk_spi_mem_exec_op()
1067 if (op->data.dir == SPI_MEM_DATA_IN) { in mtk_spi_mem_exec_op()
1068 if (!IS_ALIGNED((size_t)op->data.buf.in, 4)) { in mtk_spi_mem_exec_op()
1069 rx_tmp_buf = kzalloc(op->data.nbytes, in mtk_spi_mem_exec_op()
1072 ret = -ENOMEM; in mtk_spi_mem_exec_op()
1076 rx_tmp_buf = op->data.buf.in; in mtk_spi_mem_exec_op()
1079 mdata->rx_dma = dma_map_single(mdata->dev, in mtk_spi_mem_exec_op()
1081 op->data.nbytes, in mtk_spi_mem_exec_op()
1083 if (dma_mapping_error(mdata->dev, mdata->rx_dma)) { in mtk_spi_mem_exec_op()
1084 ret = -ENOMEM; in mtk_spi_mem_exec_op()
1089 reg_val = readl(mdata->base + SPI_CMD_REG); in mtk_spi_mem_exec_op()
1091 if (op->data.dir == SPI_MEM_DATA_IN) in mtk_spi_mem_exec_op()
1093 writel(reg_val, mdata->base + SPI_CMD_REG); in mtk_spi_mem_exec_op()
1095 mtk_spi_mem_setup_dma_xfer(mem->spi->controller, op); in mtk_spi_mem_exec_op()
1097 mtk_spi_enable_transfer(mem->spi->controller); in mtk_spi_mem_exec_op()
1105 reg_val = readl(mdata->base + SPI_CMD_REG); in mtk_spi_mem_exec_op()
1107 if (op->data.dir == SPI_MEM_DATA_IN) in mtk_spi_mem_exec_op()
1109 writel(reg_val, mdata->base + SPI_CMD_REG); in mtk_spi_mem_exec_op()
1112 if (op->data.dir == SPI_MEM_DATA_IN) { in mtk_spi_mem_exec_op()
1113 dma_unmap_single(mdata->dev, mdata->rx_dma, in mtk_spi_mem_exec_op()
1114 op->data.nbytes, DMA_FROM_DEVICE); in mtk_spi_mem_exec_op()
1115 if (!IS_ALIGNED((size_t)op->data.buf.in, 4)) in mtk_spi_mem_exec_op()
1116 memcpy(op->data.buf.in, rx_tmp_buf, op->data.nbytes); in mtk_spi_mem_exec_op()
1119 if (op->data.dir == SPI_MEM_DATA_IN && in mtk_spi_mem_exec_op()
1120 !IS_ALIGNED((size_t)op->data.buf.in, 4)) in mtk_spi_mem_exec_op()
1123 dma_unmap_single(mdata->dev, mdata->tx_dma, in mtk_spi_mem_exec_op()
1127 mdata->use_spimem = false; in mtk_spi_mem_exec_op()
1144 struct device *dev = &pdev->dev; in mtk_spi_probe()
1151 return dev_err_probe(dev, -ENOMEM, "failed to alloc spi host\n"); in mtk_spi_probe()
1153 host->auto_runtime_pm = true; in mtk_spi_probe()
1154 host->dev.of_node = dev->of_node; in mtk_spi_probe()
1155 host->mode_bits = SPI_CPOL | SPI_CPHA | SPI_LSB_FIRST; in mtk_spi_probe()
1157 host->set_cs = mtk_spi_set_cs; in mtk_spi_probe()
1158 host->prepare_message = mtk_spi_prepare_message; in mtk_spi_probe()
1159 host->unprepare_message = mtk_spi_unprepare_message; in mtk_spi_probe()
1160 host->transfer_one = mtk_spi_transfer_one; in mtk_spi_probe()
1161 host->can_dma = mtk_spi_can_dma; in mtk_spi_probe()
1162 host->setup = mtk_spi_setup; in mtk_spi_probe()
1163 host->set_cs_timing = mtk_spi_set_hw_cs_timing; in mtk_spi_probe()
1164 host->use_gpio_descriptors = true; in mtk_spi_probe()
1167 mdata->dev_comp = device_get_match_data(dev); in mtk_spi_probe()
1169 if (mdata->dev_comp->enhance_timing) in mtk_spi_probe()
1170 host->mode_bits |= SPI_CS_HIGH; in mtk_spi_probe()
1172 if (mdata->dev_comp->must_tx) in mtk_spi_probe()
1173 host->flags = SPI_CONTROLLER_MUST_TX; in mtk_spi_probe()
1174 if (mdata->dev_comp->ipm_design) in mtk_spi_probe()
1175 host->mode_bits |= SPI_LOOP | SPI_RX_DUAL | SPI_TX_DUAL | in mtk_spi_probe()
1178 if (mdata->dev_comp->ipm_design) { in mtk_spi_probe()
1179 mdata->dev = dev; in mtk_spi_probe()
1180 host->mem_ops = &mtk_spi_mem_ops; in mtk_spi_probe()
1181 host->mem_caps = &mtk_spi_mem_caps; in mtk_spi_probe()
1182 init_completion(&mdata->spimem_done); in mtk_spi_probe()
1185 if (mdata->dev_comp->need_pad_sel) { in mtk_spi_probe()
1186 mdata->pad_num = of_property_count_u32_elems(dev->of_node, in mtk_spi_probe()
1187 "mediatek,pad-select"); in mtk_spi_probe()
1188 if (mdata->pad_num < 0) in mtk_spi_probe()
1189 return dev_err_probe(dev, -EINVAL, in mtk_spi_probe()
1190 "No 'mediatek,pad-select' property\n"); in mtk_spi_probe()
1192 mdata->pad_sel = devm_kmalloc_array(dev, mdata->pad_num, in mtk_spi_probe()
1194 if (!mdata->pad_sel) in mtk_spi_probe()
1195 return -ENOMEM; in mtk_spi_probe()
1197 for (i = 0; i < mdata->pad_num; i++) { in mtk_spi_probe()
1198 of_property_read_u32_index(dev->of_node, in mtk_spi_probe()
1199 "mediatek,pad-select", in mtk_spi_probe()
1200 i, &mdata->pad_sel[i]); in mtk_spi_probe()
1201 if (mdata->pad_sel[i] > MT8173_SPI_MAX_PAD_SEL) in mtk_spi_probe()
1202 return dev_err_probe(dev, -EINVAL, in mtk_spi_probe()
1203 "wrong pad-sel[%d]: %u\n", in mtk_spi_probe()
1204 i, mdata->pad_sel[i]); in mtk_spi_probe()
1209 mdata->base = devm_platform_ioremap_resource(pdev, 0); in mtk_spi_probe()
1210 if (IS_ERR(mdata->base)) in mtk_spi_probe()
1211 return PTR_ERR(mdata->base); in mtk_spi_probe()
1217 if (!dev->dma_mask) in mtk_spi_probe()
1218 dev->dma_mask = &dev->coherent_dma_mask; in mtk_spi_probe()
1220 if (mdata->dev_comp->ipm_design) in mtk_spi_probe()
1225 mdata->parent_clk = devm_clk_get(dev, "parent-clk"); in mtk_spi_probe()
1226 if (IS_ERR(mdata->parent_clk)) in mtk_spi_probe()
1227 return dev_err_probe(dev, PTR_ERR(mdata->parent_clk), in mtk_spi_probe()
1228 "failed to get parent-clk\n"); in mtk_spi_probe()
1230 mdata->sel_clk = devm_clk_get(dev, "sel-clk"); in mtk_spi_probe()
1231 if (IS_ERR(mdata->sel_clk)) in mtk_spi_probe()
1232 return dev_err_probe(dev, PTR_ERR(mdata->sel_clk), "failed to get sel-clk\n"); in mtk_spi_probe()
1234 mdata->spi_clk = devm_clk_get(dev, "spi-clk"); in mtk_spi_probe()
1235 if (IS_ERR(mdata->spi_clk)) in mtk_spi_probe()
1236 return dev_err_probe(dev, PTR_ERR(mdata->spi_clk), "failed to get spi-clk\n"); in mtk_spi_probe()
1238 mdata->spi_hclk = devm_clk_get_optional(dev, "hclk"); in mtk_spi_probe()
1239 if (IS_ERR(mdata->spi_hclk)) in mtk_spi_probe()
1240 return dev_err_probe(dev, PTR_ERR(mdata->spi_hclk), "failed to get hclk\n"); in mtk_spi_probe()
1242 ret = clk_set_parent(mdata->sel_clk, mdata->parent_clk); in mtk_spi_probe()
1246 ret = clk_prepare_enable(mdata->spi_hclk); in mtk_spi_probe()
1250 ret = clk_prepare_enable(mdata->spi_clk); in mtk_spi_probe()
1252 clk_disable_unprepare(mdata->spi_hclk); in mtk_spi_probe()
1256 mdata->spi_clk_hz = clk_get_rate(mdata->spi_clk); in mtk_spi_probe()
1258 if (mdata->dev_comp->no_need_unprepare) { in mtk_spi_probe()
1259 clk_disable(mdata->spi_clk); in mtk_spi_probe()
1260 clk_disable(mdata->spi_hclk); in mtk_spi_probe()
1262 clk_disable_unprepare(mdata->spi_clk); in mtk_spi_probe()
1263 clk_disable_unprepare(mdata->spi_hclk); in mtk_spi_probe()
1266 cpu_latency_qos_add_request(&mdata->qos_request, PM_QOS_DEFAULT_VALUE); in mtk_spi_probe()
1268 if (mdata->dev_comp->need_pad_sel) { in mtk_spi_probe()
1269 if (mdata->pad_num != host->num_chipselect) in mtk_spi_probe()
1270 return dev_err_probe(dev, -EINVAL, in mtk_spi_probe()
1272 mdata->pad_num, host->num_chipselect); in mtk_spi_probe()
1274 if (!host->cs_gpiods && host->num_chipselect > 1) in mtk_spi_probe()
1275 return dev_err_probe(dev, -EINVAL, in mtk_spi_probe()
1279 if (mdata->dev_comp->dma_ext) in mtk_spi_probe()
1311 cpu_latency_qos_remove_request(&mdata->qos_request); in mtk_spi_remove()
1312 if (mdata->use_spimem && !completion_done(&mdata->spimem_done)) in mtk_spi_remove()
1313 complete(&mdata->spimem_done); in mtk_spi_remove()
1315 ret = pm_runtime_get_sync(&pdev->dev); in mtk_spi_remove()
1317 dev_warn(&pdev->dev, "Failed to resume hardware (%pe)\n", ERR_PTR(ret)); in mtk_spi_remove()
1326 if (mdata->dev_comp->no_need_unprepare) { in mtk_spi_remove()
1327 clk_unprepare(mdata->spi_clk); in mtk_spi_remove()
1328 clk_unprepare(mdata->spi_hclk); in mtk_spi_remove()
1332 pm_runtime_put_noidle(&pdev->dev); in mtk_spi_remove()
1333 pm_runtime_disable(&pdev->dev); in mtk_spi_remove()
1348 clk_disable_unprepare(mdata->spi_clk); in mtk_spi_suspend()
1349 clk_disable_unprepare(mdata->spi_hclk); in mtk_spi_suspend()
1366 ret = clk_prepare_enable(mdata->spi_clk); in mtk_spi_resume()
1372 ret = clk_prepare_enable(mdata->spi_hclk); in mtk_spi_resume()
1375 clk_disable_unprepare(mdata->spi_clk); in mtk_spi_resume()
1382 clk_disable_unprepare(mdata->spi_clk); in mtk_spi_resume()
1383 clk_disable_unprepare(mdata->spi_hclk); in mtk_spi_resume()
1396 if (mdata->dev_comp->no_need_unprepare) { in mtk_spi_runtime_suspend()
1397 clk_disable(mdata->spi_clk); in mtk_spi_runtime_suspend()
1398 clk_disable(mdata->spi_hclk); in mtk_spi_runtime_suspend()
1400 clk_disable_unprepare(mdata->spi_clk); in mtk_spi_runtime_suspend()
1401 clk_disable_unprepare(mdata->spi_hclk); in mtk_spi_runtime_suspend()
1413 if (mdata->dev_comp->no_need_unprepare) { in mtk_spi_runtime_resume()
1414 ret = clk_enable(mdata->spi_clk); in mtk_spi_runtime_resume()
1419 ret = clk_enable(mdata->spi_hclk); in mtk_spi_runtime_resume()
1422 clk_disable(mdata->spi_clk); in mtk_spi_runtime_resume()
1426 ret = clk_prepare_enable(mdata->spi_clk); in mtk_spi_runtime_resume()
1432 ret = clk_prepare_enable(mdata->spi_hclk); in mtk_spi_runtime_resume()
1435 clk_disable_unprepare(mdata->spi_clk); in mtk_spi_runtime_resume()
1452 .name = "mtk-spi",
1465 MODULE_ALIAS("platform:mtk-spi");