Lines Matching +full:axi +full:- +full:config

1 // SPDX-License-Identifier: GPL-2.0-or-later
5 * Copyright (C) 2010-2014 Xilinx, Inc. All rights reserved.
10 * The AXI Video Direct Memory Access (AXI VDMA) core is a soft Xilinx IP
11 * core that provides high-bandwidth direct memory access between memory
12 * and AXI4-Stream type video target peripherals. The core provides efficient
18 * registers are accessed through an AXI4-Lite slave interface.
20 * The AXI Direct Memory Access (AXI DMA) core is a soft Xilinx IP core that
21 * provides high-bandwidth one dimensional direct memory access between memory
22 * and AXI4-Stream target peripherals. It supports one receive and one
25 * The AXI CDMA, is a soft IP, which provides high-bandwidth Direct Memory
26 * Access (DMA) between a memory-mapped source address and a memory-mapped
29 * The AXI Multichannel Direct Memory Access (AXI MCDMA) core is a soft
30 * Xilinx IP that provides high-bandwidth direct memory access between
31 * memory and AXI4-Stream target peripherals. It provides scatter gather
50 #include <linux/io-64-nonatomic-lo-hi.h>
159 /* Axi VDMA Flush on Fsync bits */
167 /* AXI DMA Specific Registers/Offsets */
171 /* AXI DMA Specific Masks/Bit fields */
184 /* AXI CDMA Specific Registers/Offsets */
188 /* AXI CDMA Specific Masks */
194 /* AXI MCDMA Specific Registers/Offsets */
206 /* AXI MCDMA Specific Masks/Shifts */
219 * struct xilinx_vdma_desc_hw - Hardware Descriptor
240 * struct xilinx_axidma_desc_hw - Hardware Descriptor for AXI DMA
249 * @app: APP Fields @0x20 - 0x30
264 * struct xilinx_aximcdma_desc_hw - Hardware Descriptor for AXI MCDMA
273 * @app: APP Fields @0x20 - 0x30
288 * struct xilinx_cdma_desc_hw - Hardware Descriptor
310 * struct xilinx_vdma_tx_segment - Descriptor segment
322 * struct xilinx_axidma_tx_segment - Descriptor segment
334 * struct xilinx_aximcdma_tx_segment - Descriptor segment
346 * struct xilinx_cdma_tx_segment - Descriptor segment
358 * struct xilinx_dma_tx_descriptor - Per Transaction structure
376 * struct xilinx_dma_chan - Driver specific DMA channel structure
398 * @config: Device configuration info
435 struct xilinx_vdma_config config; member
452 * enum xdma_ip_type - DMA IP type.
454 * @XDMA_TYPE_AXIDMA: Axi dma ip.
455 * @XDMA_TYPE_CDMA: Axi cdma ip.
456 * @XDMA_TYPE_VDMA: Axi vdma ip.
457 * @XDMA_TYPE_AXIMCDMA: Axi MCDMA ip.
477 * struct xilinx_dma_device - DMA device structure
485 * @dma_config: DMA config structure
486 * @axi_clk: DMA Axi4-lite interace clock
520 readl_poll_timeout_atomic(chan->xdev->regs + chan->ctrl_offset + reg, \
526 return ioread32(chan->xdev->regs + reg); in dma_read()
531 iowrite32(value, chan->xdev->regs + reg); in dma_write()
537 dma_write(chan, chan->desc_offset + reg, value); in vdma_desc_write()
542 return dma_read(chan, chan->ctrl_offset + reg); in dma_ctrl_read()
548 dma_write(chan, chan->ctrl_offset + reg, value); in dma_ctrl_write()
564 * vdma_desc_write_64 - 64-bit descriptor write
578 writel(value_lsb, chan->xdev->regs + chan->desc_offset + reg); in vdma_desc_write_64()
581 writel(value_msb, chan->xdev->regs + chan->desc_offset + reg + 4); in vdma_desc_write_64()
586 lo_hi_writeq(value, chan->xdev->regs + chan->ctrl_offset + reg); in dma_writeq()
592 if (chan->ext_addr) in xilinx_write()
603 if (chan->ext_addr) { in xilinx_axidma_buf()
604 hw->buf_addr = lower_32_bits(buf_addr + sg_used + period_len); in xilinx_axidma_buf()
605 hw->buf_addr_msb = upper_32_bits(buf_addr + sg_used + in xilinx_axidma_buf()
608 hw->buf_addr = buf_addr + sg_used + period_len; in xilinx_axidma_buf()
616 if (chan->ext_addr) { in xilinx_aximcdma_buf()
617 hw->buf_addr = lower_32_bits(buf_addr + sg_used); in xilinx_aximcdma_buf()
618 hw->buf_addr_msb = upper_32_bits(buf_addr + sg_used); in xilinx_aximcdma_buf()
620 hw->buf_addr = buf_addr + sg_used; in xilinx_aximcdma_buf()
624 /* -----------------------------------------------------------------------------
629 * xilinx_vdma_alloc_tx_segment - Allocate transaction segment
640 segment = dma_pool_zalloc(chan->desc_pool, GFP_ATOMIC, &phys); in xilinx_vdma_alloc_tx_segment()
644 segment->phys = phys; in xilinx_vdma_alloc_tx_segment()
650 * xilinx_cdma_alloc_tx_segment - Allocate transaction segment
661 segment = dma_pool_zalloc(chan->desc_pool, GFP_ATOMIC, &phys); in xilinx_cdma_alloc_tx_segment()
665 segment->phys = phys; in xilinx_cdma_alloc_tx_segment()
671 * xilinx_axidma_alloc_tx_segment - Allocate transaction segment
682 spin_lock_irqsave(&chan->lock, flags); in xilinx_axidma_alloc_tx_segment()
683 if (!list_empty(&chan->free_seg_list)) { in xilinx_axidma_alloc_tx_segment()
684 segment = list_first_entry(&chan->free_seg_list, in xilinx_axidma_alloc_tx_segment()
687 list_del(&segment->node); in xilinx_axidma_alloc_tx_segment()
689 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_axidma_alloc_tx_segment()
692 dev_dbg(chan->dev, "Could not find free tx segment\n"); in xilinx_axidma_alloc_tx_segment()
698 * xilinx_aximcdma_alloc_tx_segment - Allocate transaction segment
709 spin_lock_irqsave(&chan->lock, flags); in xilinx_aximcdma_alloc_tx_segment()
710 if (!list_empty(&chan->free_seg_list)) { in xilinx_aximcdma_alloc_tx_segment()
711 segment = list_first_entry(&chan->free_seg_list, in xilinx_aximcdma_alloc_tx_segment()
714 list_del(&segment->node); in xilinx_aximcdma_alloc_tx_segment()
716 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_aximcdma_alloc_tx_segment()
723 u32 next_desc = hw->next_desc; in xilinx_dma_clean_hw_desc()
724 u32 next_desc_msb = hw->next_desc_msb; in xilinx_dma_clean_hw_desc()
728 hw->next_desc = next_desc; in xilinx_dma_clean_hw_desc()
729 hw->next_desc_msb = next_desc_msb; in xilinx_dma_clean_hw_desc()
734 u32 next_desc = hw->next_desc; in xilinx_mcdma_clean_hw_desc()
735 u32 next_desc_msb = hw->next_desc_msb; in xilinx_mcdma_clean_hw_desc()
739 hw->next_desc = next_desc; in xilinx_mcdma_clean_hw_desc()
740 hw->next_desc_msb = next_desc_msb; in xilinx_mcdma_clean_hw_desc()
744 * xilinx_dma_free_tx_segment - Free transaction segment
751 xilinx_dma_clean_hw_desc(&segment->hw); in xilinx_dma_free_tx_segment()
753 list_add_tail(&segment->node, &chan->free_seg_list); in xilinx_dma_free_tx_segment()
757 * xilinx_mcdma_free_tx_segment - Free transaction segment
765 xilinx_mcdma_clean_hw_desc(&segment->hw); in xilinx_mcdma_free_tx_segment()
767 list_add_tail(&segment->node, &chan->free_seg_list); in xilinx_mcdma_free_tx_segment()
771 * xilinx_cdma_free_tx_segment - Free transaction segment
778 dma_pool_free(chan->desc_pool, segment, segment->phys); in xilinx_cdma_free_tx_segment()
782 * xilinx_vdma_free_tx_segment - Free transaction segment
789 dma_pool_free(chan->desc_pool, segment, segment->phys); in xilinx_vdma_free_tx_segment()
793 * xilinx_dma_tx_descriptor - Allocate transaction descriptor
807 INIT_LIST_HEAD(&desc->segments); in xilinx_dma_alloc_tx_descriptor()
813 * xilinx_dma_free_tx_descriptor - Free transaction descriptor
829 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_VDMA) { in xilinx_dma_free_tx_descriptor()
830 list_for_each_entry_safe(segment, next, &desc->segments, node) { in xilinx_dma_free_tx_descriptor()
831 list_del(&segment->node); in xilinx_dma_free_tx_descriptor()
834 } else if (chan->xdev->dma_config->dmatype == XDMA_TYPE_CDMA) { in xilinx_dma_free_tx_descriptor()
836 &desc->segments, node) { in xilinx_dma_free_tx_descriptor()
837 list_del(&cdma_segment->node); in xilinx_dma_free_tx_descriptor()
840 } else if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) { in xilinx_dma_free_tx_descriptor()
842 &desc->segments, node) { in xilinx_dma_free_tx_descriptor()
843 list_del(&axidma_segment->node); in xilinx_dma_free_tx_descriptor()
848 &desc->segments, node) { in xilinx_dma_free_tx_descriptor()
849 list_del(&aximcdma_segment->node); in xilinx_dma_free_tx_descriptor()
860 * xilinx_dma_free_desc_list - Free descriptors list
870 list_del(&desc->node); in xilinx_dma_free_desc_list()
876 * xilinx_dma_free_descriptors - Free channel descriptors
883 spin_lock_irqsave(&chan->lock, flags); in xilinx_dma_free_descriptors()
885 xilinx_dma_free_desc_list(chan, &chan->pending_list); in xilinx_dma_free_descriptors()
886 xilinx_dma_free_desc_list(chan, &chan->done_list); in xilinx_dma_free_descriptors()
887 xilinx_dma_free_desc_list(chan, &chan->active_list); in xilinx_dma_free_descriptors()
889 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dma_free_descriptors()
893 * xilinx_dma_free_chan_resources - Free channel resources
901 dev_dbg(chan->dev, "Free all channel resources.\n"); in xilinx_dma_free_chan_resources()
905 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) { in xilinx_dma_free_chan_resources()
906 spin_lock_irqsave(&chan->lock, flags); in xilinx_dma_free_chan_resources()
907 INIT_LIST_HEAD(&chan->free_seg_list); in xilinx_dma_free_chan_resources()
908 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dma_free_chan_resources()
911 dma_free_coherent(chan->dev, sizeof(*chan->seg_v) * in xilinx_dma_free_chan_resources()
912 XILINX_DMA_NUM_DESCS, chan->seg_v, in xilinx_dma_free_chan_resources()
913 chan->seg_p); in xilinx_dma_free_chan_resources()
916 dma_free_coherent(chan->dev, sizeof(*chan->cyclic_seg_v), in xilinx_dma_free_chan_resources()
917 chan->cyclic_seg_v, chan->cyclic_seg_p); in xilinx_dma_free_chan_resources()
920 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIMCDMA) { in xilinx_dma_free_chan_resources()
921 spin_lock_irqsave(&chan->lock, flags); in xilinx_dma_free_chan_resources()
922 INIT_LIST_HEAD(&chan->free_seg_list); in xilinx_dma_free_chan_resources()
923 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dma_free_chan_resources()
926 dma_free_coherent(chan->dev, sizeof(*chan->seg_mv) * in xilinx_dma_free_chan_resources()
927 XILINX_DMA_NUM_DESCS, chan->seg_mv, in xilinx_dma_free_chan_resources()
928 chan->seg_p); in xilinx_dma_free_chan_resources()
931 if (chan->xdev->dma_config->dmatype != XDMA_TYPE_AXIDMA && in xilinx_dma_free_chan_resources()
932 chan->xdev->dma_config->dmatype != XDMA_TYPE_AXIMCDMA) { in xilinx_dma_free_chan_resources()
933 dma_pool_destroy(chan->desc_pool); in xilinx_dma_free_chan_resources()
934 chan->desc_pool = NULL; in xilinx_dma_free_chan_resources()
940 * xilinx_dma_get_residue - Compute residue for a given descriptor
958 list_for_each(entry, &desc->segments) { in xilinx_dma_get_residue()
959 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_CDMA) { in xilinx_dma_get_residue()
963 cdma_hw = &cdma_seg->hw; in xilinx_dma_get_residue()
964 residue += (cdma_hw->control - cdma_hw->status) & in xilinx_dma_get_residue()
965 chan->xdev->max_buffer_len; in xilinx_dma_get_residue()
966 } else if (chan->xdev->dma_config->dmatype == in xilinx_dma_get_residue()
971 axidma_hw = &axidma_seg->hw; in xilinx_dma_get_residue()
972 residue += (axidma_hw->control - axidma_hw->status) & in xilinx_dma_get_residue()
973 chan->xdev->max_buffer_len; in xilinx_dma_get_residue()
979 aximcdma_hw = &aximcdma_seg->hw; in xilinx_dma_get_residue()
981 (aximcdma_hw->control - aximcdma_hw->status) & in xilinx_dma_get_residue()
982 chan->xdev->max_buffer_len; in xilinx_dma_get_residue()
990 * xilinx_dma_chan_handle_cyclic - Cyclic dma callback
1002 callback = desc->async_tx.callback; in xilinx_dma_chan_handle_cyclic()
1003 callback_param = desc->async_tx.callback_param; in xilinx_dma_chan_handle_cyclic()
1005 spin_unlock_irqrestore(&chan->lock, *flags); in xilinx_dma_chan_handle_cyclic()
1007 spin_lock_irqsave(&chan->lock, *flags); in xilinx_dma_chan_handle_cyclic()
1012 * xilinx_dma_chan_desc_cleanup - Clean channel descriptors
1020 spin_lock_irqsave(&chan->lock, flags); in xilinx_dma_chan_desc_cleanup()
1022 list_for_each_entry_safe(desc, next, &chan->done_list, node) { in xilinx_dma_chan_desc_cleanup()
1025 if (desc->cyclic) { in xilinx_dma_chan_desc_cleanup()
1031 list_del(&desc->node); in xilinx_dma_chan_desc_cleanup()
1033 if (unlikely(desc->err)) { in xilinx_dma_chan_desc_cleanup()
1034 if (chan->direction == DMA_DEV_TO_MEM) in xilinx_dma_chan_desc_cleanup()
1042 result.residue = desc->residue; in xilinx_dma_chan_desc_cleanup()
1045 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dma_chan_desc_cleanup()
1046 dmaengine_desc_get_callback_invoke(&desc->async_tx, &result); in xilinx_dma_chan_desc_cleanup()
1047 spin_lock_irqsave(&chan->lock, flags); in xilinx_dma_chan_desc_cleanup()
1050 dma_run_dependencies(&desc->async_tx); in xilinx_dma_chan_desc_cleanup()
1054 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dma_chan_desc_cleanup()
1058 * xilinx_dma_do_tasklet - Schedule completion tasklet
1069 * xilinx_dma_alloc_chan_resources - Allocate channel resources
1080 if (chan->desc_pool) in xilinx_dma_alloc_chan_resources()
1087 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) { in xilinx_dma_alloc_chan_resources()
1089 chan->seg_v = dma_alloc_coherent(chan->dev, in xilinx_dma_alloc_chan_resources()
1090 sizeof(*chan->seg_v) * XILINX_DMA_NUM_DESCS, in xilinx_dma_alloc_chan_resources()
1091 &chan->seg_p, GFP_KERNEL); in xilinx_dma_alloc_chan_resources()
1092 if (!chan->seg_v) { in xilinx_dma_alloc_chan_resources()
1093 dev_err(chan->dev, in xilinx_dma_alloc_chan_resources()
1095 chan->id); in xilinx_dma_alloc_chan_resources()
1096 return -ENOMEM; in xilinx_dma_alloc_chan_resources()
1104 chan->cyclic_seg_v = dma_alloc_coherent(chan->dev, in xilinx_dma_alloc_chan_resources()
1105 sizeof(*chan->cyclic_seg_v), in xilinx_dma_alloc_chan_resources()
1106 &chan->cyclic_seg_p, in xilinx_dma_alloc_chan_resources()
1108 if (!chan->cyclic_seg_v) { in xilinx_dma_alloc_chan_resources()
1109 dev_err(chan->dev, in xilinx_dma_alloc_chan_resources()
1111 dma_free_coherent(chan->dev, sizeof(*chan->seg_v) * in xilinx_dma_alloc_chan_resources()
1112 XILINX_DMA_NUM_DESCS, chan->seg_v, in xilinx_dma_alloc_chan_resources()
1113 chan->seg_p); in xilinx_dma_alloc_chan_resources()
1114 return -ENOMEM; in xilinx_dma_alloc_chan_resources()
1116 chan->cyclic_seg_v->phys = chan->cyclic_seg_p; in xilinx_dma_alloc_chan_resources()
1119 chan->seg_v[i].hw.next_desc = in xilinx_dma_alloc_chan_resources()
1120 lower_32_bits(chan->seg_p + sizeof(*chan->seg_v) * in xilinx_dma_alloc_chan_resources()
1122 chan->seg_v[i].hw.next_desc_msb = in xilinx_dma_alloc_chan_resources()
1123 upper_32_bits(chan->seg_p + sizeof(*chan->seg_v) * in xilinx_dma_alloc_chan_resources()
1125 chan->seg_v[i].phys = chan->seg_p + in xilinx_dma_alloc_chan_resources()
1126 sizeof(*chan->seg_v) * i; in xilinx_dma_alloc_chan_resources()
1127 list_add_tail(&chan->seg_v[i].node, in xilinx_dma_alloc_chan_resources()
1128 &chan->free_seg_list); in xilinx_dma_alloc_chan_resources()
1130 } else if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIMCDMA) { in xilinx_dma_alloc_chan_resources()
1132 chan->seg_mv = dma_alloc_coherent(chan->dev, in xilinx_dma_alloc_chan_resources()
1133 sizeof(*chan->seg_mv) * in xilinx_dma_alloc_chan_resources()
1135 &chan->seg_p, GFP_KERNEL); in xilinx_dma_alloc_chan_resources()
1136 if (!chan->seg_mv) { in xilinx_dma_alloc_chan_resources()
1137 dev_err(chan->dev, in xilinx_dma_alloc_chan_resources()
1139 chan->id); in xilinx_dma_alloc_chan_resources()
1140 return -ENOMEM; in xilinx_dma_alloc_chan_resources()
1143 chan->seg_mv[i].hw.next_desc = in xilinx_dma_alloc_chan_resources()
1144 lower_32_bits(chan->seg_p + sizeof(*chan->seg_mv) * in xilinx_dma_alloc_chan_resources()
1146 chan->seg_mv[i].hw.next_desc_msb = in xilinx_dma_alloc_chan_resources()
1147 upper_32_bits(chan->seg_p + sizeof(*chan->seg_mv) * in xilinx_dma_alloc_chan_resources()
1149 chan->seg_mv[i].phys = chan->seg_p + in xilinx_dma_alloc_chan_resources()
1150 sizeof(*chan->seg_mv) * i; in xilinx_dma_alloc_chan_resources()
1151 list_add_tail(&chan->seg_mv[i].node, in xilinx_dma_alloc_chan_resources()
1152 &chan->free_seg_list); in xilinx_dma_alloc_chan_resources()
1154 } else if (chan->xdev->dma_config->dmatype == XDMA_TYPE_CDMA) { in xilinx_dma_alloc_chan_resources()
1155 chan->desc_pool = dma_pool_create("xilinx_cdma_desc_pool", in xilinx_dma_alloc_chan_resources()
1156 chan->dev, in xilinx_dma_alloc_chan_resources()
1161 chan->desc_pool = dma_pool_create("xilinx_vdma_desc_pool", in xilinx_dma_alloc_chan_resources()
1162 chan->dev, in xilinx_dma_alloc_chan_resources()
1168 if (!chan->desc_pool && in xilinx_dma_alloc_chan_resources()
1169 ((chan->xdev->dma_config->dmatype != XDMA_TYPE_AXIDMA) && in xilinx_dma_alloc_chan_resources()
1170 chan->xdev->dma_config->dmatype != XDMA_TYPE_AXIMCDMA)) { in xilinx_dma_alloc_chan_resources()
1171 dev_err(chan->dev, in xilinx_dma_alloc_chan_resources()
1173 chan->id); in xilinx_dma_alloc_chan_resources()
1174 return -ENOMEM; in xilinx_dma_alloc_chan_resources()
1179 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) { in xilinx_dma_alloc_chan_resources()
1180 /* For AXI DMA resetting once channel will reset the in xilinx_dma_alloc_chan_resources()
1187 if ((chan->xdev->dma_config->dmatype == XDMA_TYPE_CDMA) && chan->has_sg) in xilinx_dma_alloc_chan_resources()
1195 * xilinx_dma_calc_copysize - Calculate the amount of data to copy
1207 copy = min_t(size_t, size - done, in xilinx_dma_calc_copysize()
1208 chan->xdev->max_buffer_len); in xilinx_dma_calc_copysize()
1211 chan->xdev->common.copy_align) { in xilinx_dma_calc_copysize()
1217 (1 << chan->xdev->common.copy_align)); in xilinx_dma_calc_copysize()
1223 * xilinx_dma_tx_status - Get DMA transaction status
1244 spin_lock_irqsave(&chan->lock, flags); in xilinx_dma_tx_status()
1245 if (!list_empty(&chan->active_list)) { in xilinx_dma_tx_status()
1246 desc = list_last_entry(&chan->active_list, in xilinx_dma_tx_status()
1252 if (chan->has_sg && chan->xdev->dma_config->dmatype != XDMA_TYPE_VDMA) in xilinx_dma_tx_status()
1255 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dma_tx_status()
1263 * xilinx_dma_stop_transfer - Halt DMA channel
1281 * xilinx_cdma_stop_transfer - Wait for the current transfer to complete
1296 * xilinx_dma_start - Start DMA channel
1312 dev_err(chan->dev, "Cannot start channel %p: %x\n", in xilinx_dma_start()
1315 chan->err = true; in xilinx_dma_start()
1320 * xilinx_vdma_start_transfer - Starts VDMA transfer
1325 struct xilinx_vdma_config *config = &chan->config; in xilinx_vdma_start_transfer() local
1332 if (chan->err) in xilinx_vdma_start_transfer()
1335 if (!chan->idle) in xilinx_vdma_start_transfer()
1338 if (list_empty(&chan->pending_list)) in xilinx_vdma_start_transfer()
1341 desc = list_first_entry(&chan->pending_list, in xilinx_vdma_start_transfer()
1344 /* Configure the hardware using info in the config structure */ in xilinx_vdma_start_transfer()
1345 if (chan->has_vflip) { in xilinx_vdma_start_transfer()
1348 reg |= config->vflip_en; in xilinx_vdma_start_transfer()
1355 if (config->frm_cnt_en) in xilinx_vdma_start_transfer()
1361 if (config->park) in xilinx_vdma_start_transfer()
1368 j = chan->desc_submitcount; in xilinx_vdma_start_transfer()
1370 if (chan->direction == DMA_MEM_TO_DEV) { in xilinx_vdma_start_transfer()
1382 if (chan->err) in xilinx_vdma_start_transfer()
1386 if (chan->desc_submitcount < chan->num_frms) in xilinx_vdma_start_transfer()
1387 i = chan->desc_submitcount; in xilinx_vdma_start_transfer()
1389 list_for_each_entry(segment, &desc->segments, node) { in xilinx_vdma_start_transfer()
1390 if (chan->ext_addr) in xilinx_vdma_start_transfer()
1393 segment->hw.buf_addr, in xilinx_vdma_start_transfer()
1394 segment->hw.buf_addr_msb); in xilinx_vdma_start_transfer()
1398 segment->hw.buf_addr); in xilinx_vdma_start_transfer()
1407 vdma_desc_write(chan, XILINX_DMA_REG_HSIZE, last->hw.hsize); in xilinx_vdma_start_transfer()
1409 last->hw.stride); in xilinx_vdma_start_transfer()
1410 vdma_desc_write(chan, XILINX_DMA_REG_VSIZE, last->hw.vsize); in xilinx_vdma_start_transfer()
1412 chan->desc_submitcount++; in xilinx_vdma_start_transfer()
1413 chan->desc_pendingcount--; in xilinx_vdma_start_transfer()
1414 list_del(&desc->node); in xilinx_vdma_start_transfer()
1415 list_add_tail(&desc->node, &chan->active_list); in xilinx_vdma_start_transfer()
1416 if (chan->desc_submitcount == chan->num_frms) in xilinx_vdma_start_transfer()
1417 chan->desc_submitcount = 0; in xilinx_vdma_start_transfer()
1419 chan->idle = false; in xilinx_vdma_start_transfer()
1423 * xilinx_cdma_start_transfer - Starts cdma transfer
1432 if (chan->err) in xilinx_cdma_start_transfer()
1435 if (!chan->idle) in xilinx_cdma_start_transfer()
1438 if (list_empty(&chan->pending_list)) in xilinx_cdma_start_transfer()
1441 head_desc = list_first_entry(&chan->pending_list, in xilinx_cdma_start_transfer()
1443 tail_desc = list_last_entry(&chan->pending_list, in xilinx_cdma_start_transfer()
1445 tail_segment = list_last_entry(&tail_desc->segments, in xilinx_cdma_start_transfer()
1448 if (chan->desc_pendingcount <= XILINX_DMA_COALESCE_MAX) { in xilinx_cdma_start_transfer()
1450 ctrl_reg |= chan->desc_pendingcount << in xilinx_cdma_start_transfer()
1455 if (chan->has_sg) { in xilinx_cdma_start_transfer()
1463 head_desc->async_tx.phys); in xilinx_cdma_start_transfer()
1467 tail_segment->phys); in xilinx_cdma_start_transfer()
1473 segment = list_first_entry(&head_desc->segments, in xilinx_cdma_start_transfer()
1477 hw = &segment->hw; in xilinx_cdma_start_transfer()
1480 xilinx_prep_dma_addr_t(hw->src_addr)); in xilinx_cdma_start_transfer()
1482 xilinx_prep_dma_addr_t(hw->dest_addr)); in xilinx_cdma_start_transfer()
1486 hw->control & chan->xdev->max_buffer_len); in xilinx_cdma_start_transfer()
1489 list_splice_tail_init(&chan->pending_list, &chan->active_list); in xilinx_cdma_start_transfer()
1490 chan->desc_pendingcount = 0; in xilinx_cdma_start_transfer()
1491 chan->idle = false; in xilinx_cdma_start_transfer()
1495 * xilinx_dma_start_transfer - Starts DMA transfer
1504 if (chan->err) in xilinx_dma_start_transfer()
1507 if (list_empty(&chan->pending_list)) in xilinx_dma_start_transfer()
1510 if (!chan->idle) in xilinx_dma_start_transfer()
1513 head_desc = list_first_entry(&chan->pending_list, in xilinx_dma_start_transfer()
1515 tail_desc = list_last_entry(&chan->pending_list, in xilinx_dma_start_transfer()
1517 tail_segment = list_last_entry(&tail_desc->segments, in xilinx_dma_start_transfer()
1522 if (chan->desc_pendingcount <= XILINX_DMA_COALESCE_MAX) { in xilinx_dma_start_transfer()
1524 reg |= chan->desc_pendingcount << in xilinx_dma_start_transfer()
1529 if (chan->has_sg) in xilinx_dma_start_transfer()
1531 head_desc->async_tx.phys); in xilinx_dma_start_transfer()
1535 if (chan->err) in xilinx_dma_start_transfer()
1539 if (chan->has_sg) { in xilinx_dma_start_transfer()
1540 if (chan->cyclic) in xilinx_dma_start_transfer()
1542 chan->cyclic_seg_v->phys); in xilinx_dma_start_transfer()
1545 tail_segment->phys); in xilinx_dma_start_transfer()
1550 segment = list_first_entry(&head_desc->segments, in xilinx_dma_start_transfer()
1553 hw = &segment->hw; in xilinx_dma_start_transfer()
1556 xilinx_prep_dma_addr_t(hw->buf_addr)); in xilinx_dma_start_transfer()
1560 hw->control & chan->xdev->max_buffer_len); in xilinx_dma_start_transfer()
1563 list_splice_tail_init(&chan->pending_list, &chan->active_list); in xilinx_dma_start_transfer()
1564 chan->desc_pendingcount = 0; in xilinx_dma_start_transfer()
1565 chan->idle = false; in xilinx_dma_start_transfer()
1569 * xilinx_mcdma_start_transfer - Starts MCDMA transfer
1583 if (chan->err) in xilinx_mcdma_start_transfer()
1586 if (!chan->idle) in xilinx_mcdma_start_transfer()
1589 if (list_empty(&chan->pending_list)) in xilinx_mcdma_start_transfer()
1592 head_desc = list_first_entry(&chan->pending_list, in xilinx_mcdma_start_transfer()
1594 tail_desc = list_last_entry(&chan->pending_list, in xilinx_mcdma_start_transfer()
1596 tail_segment = list_last_entry(&tail_desc->segments, in xilinx_mcdma_start_transfer()
1599 reg = dma_ctrl_read(chan, XILINX_MCDMA_CHAN_CR_OFFSET(chan->tdest)); in xilinx_mcdma_start_transfer()
1601 if (chan->desc_pendingcount <= XILINX_MCDMA_COALESCE_MAX) { in xilinx_mcdma_start_transfer()
1603 reg |= chan->desc_pendingcount << in xilinx_mcdma_start_transfer()
1608 dma_ctrl_write(chan, XILINX_MCDMA_CHAN_CR_OFFSET(chan->tdest), reg); in xilinx_mcdma_start_transfer()
1611 xilinx_write(chan, XILINX_MCDMA_CHAN_CDESC_OFFSET(chan->tdest), in xilinx_mcdma_start_transfer()
1612 head_desc->async_tx.phys); in xilinx_mcdma_start_transfer()
1616 reg |= BIT(chan->tdest); in xilinx_mcdma_start_transfer()
1620 reg = dma_ctrl_read(chan, XILINX_MCDMA_CHAN_CR_OFFSET(chan->tdest)); in xilinx_mcdma_start_transfer()
1622 dma_ctrl_write(chan, XILINX_MCDMA_CHAN_CR_OFFSET(chan->tdest), reg); in xilinx_mcdma_start_transfer()
1626 if (chan->err) in xilinx_mcdma_start_transfer()
1630 xilinx_write(chan, XILINX_MCDMA_CHAN_TDESC_OFFSET(chan->tdest), in xilinx_mcdma_start_transfer()
1631 tail_segment->phys); in xilinx_mcdma_start_transfer()
1633 list_splice_tail_init(&chan->pending_list, &chan->active_list); in xilinx_mcdma_start_transfer()
1634 chan->desc_pendingcount = 0; in xilinx_mcdma_start_transfer()
1635 chan->idle = false; in xilinx_mcdma_start_transfer()
1639 * xilinx_dma_issue_pending - Issue pending transactions
1647 spin_lock_irqsave(&chan->lock, flags); in xilinx_dma_issue_pending()
1648 chan->start_transfer(chan); in xilinx_dma_issue_pending()
1649 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dma_issue_pending()
1653 * xilinx_dma_complete_descriptor - Mark the active descriptor as complete
1663 if (list_empty(&chan->active_list)) in xilinx_dma_complete_descriptor()
1666 list_for_each_entry_safe(desc, next, &chan->active_list, node) { in xilinx_dma_complete_descriptor()
1667 if (chan->has_sg && chan->xdev->dma_config->dmatype != in xilinx_dma_complete_descriptor()
1669 desc->residue = xilinx_dma_get_residue(chan, desc); in xilinx_dma_complete_descriptor()
1671 desc->residue = 0; in xilinx_dma_complete_descriptor()
1672 desc->err = chan->err; in xilinx_dma_complete_descriptor()
1674 list_del(&desc->node); in xilinx_dma_complete_descriptor()
1675 if (!desc->cyclic) in xilinx_dma_complete_descriptor()
1676 dma_cookie_complete(&desc->async_tx); in xilinx_dma_complete_descriptor()
1677 list_add_tail(&desc->node, &chan->done_list); in xilinx_dma_complete_descriptor()
1682 * xilinx_dma_reset - Reset DMA channel
1700 dev_err(chan->dev, "reset timeout, cr %x, sr %x\n", in xilinx_dma_reset()
1703 return -ETIMEDOUT; in xilinx_dma_reset()
1706 chan->err = false; in xilinx_dma_reset()
1707 chan->idle = true; in xilinx_dma_reset()
1708 chan->desc_pendingcount = 0; in xilinx_dma_reset()
1709 chan->desc_submitcount = 0; in xilinx_dma_reset()
1715 * xilinx_dma_chan_reset - Reset DMA channel and enable interrupts
1737 * xilinx_mcdma_irq_handler - MCDMA Interrupt handler
1748 if (chan->direction == DMA_DEV_TO_MEM) in xilinx_mcdma_irq_handler()
1760 if (chan->direction == DMA_DEV_TO_MEM) in xilinx_mcdma_irq_handler()
1761 chan_offset = chan->xdev->dma_config->max_channels / 2; in xilinx_mcdma_irq_handler()
1763 chan_offset = chan_offset + (chan_id - 1); in xilinx_mcdma_irq_handler()
1764 chan = chan->xdev->chan[chan_offset]; in xilinx_mcdma_irq_handler()
1766 status = dma_ctrl_read(chan, XILINX_MCDMA_CHAN_SR_OFFSET(chan->tdest)); in xilinx_mcdma_irq_handler()
1770 dma_ctrl_write(chan, XILINX_MCDMA_CHAN_SR_OFFSET(chan->tdest), in xilinx_mcdma_irq_handler()
1774 dev_err(chan->dev, "Channel %p has errors %x cdr %x tdr %x\n", in xilinx_mcdma_irq_handler()
1778 (chan->tdest)), in xilinx_mcdma_irq_handler()
1780 (chan->tdest))); in xilinx_mcdma_irq_handler()
1781 chan->err = true; in xilinx_mcdma_irq_handler()
1789 dev_dbg(chan->dev, "Inter-packet latency too long\n"); in xilinx_mcdma_irq_handler()
1793 spin_lock(&chan->lock); in xilinx_mcdma_irq_handler()
1795 chan->idle = true; in xilinx_mcdma_irq_handler()
1796 chan->start_transfer(chan); in xilinx_mcdma_irq_handler()
1797 spin_unlock(&chan->lock); in xilinx_mcdma_irq_handler()
1800 tasklet_schedule(&chan->tasklet); in xilinx_mcdma_irq_handler()
1805 * xilinx_dma_irq_handler - DMA Interrupt handler
1837 if (!chan->flush_on_fsync || in xilinx_dma_irq_handler()
1839 dev_err(chan->dev, in xilinx_dma_irq_handler()
1844 chan->err = true; in xilinx_dma_irq_handler()
1853 dev_dbg(chan->dev, "Inter-packet latency too long\n"); in xilinx_dma_irq_handler()
1857 spin_lock(&chan->lock); in xilinx_dma_irq_handler()
1859 chan->idle = true; in xilinx_dma_irq_handler()
1860 chan->start_transfer(chan); in xilinx_dma_irq_handler()
1861 spin_unlock(&chan->lock); in xilinx_dma_irq_handler()
1864 tasklet_schedule(&chan->tasklet); in xilinx_dma_irq_handler()
1869 * append_desc_queue - Queuing descriptor
1882 if (list_empty(&chan->pending_list)) in append_desc_queue()
1889 tail_desc = list_last_entry(&chan->pending_list, in append_desc_queue()
1891 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_VDMA) { in append_desc_queue()
1892 tail_segment = list_last_entry(&tail_desc->segments, in append_desc_queue()
1895 tail_segment->hw.next_desc = (u32)desc->async_tx.phys; in append_desc_queue()
1896 } else if (chan->xdev->dma_config->dmatype == XDMA_TYPE_CDMA) { in append_desc_queue()
1897 cdma_tail_segment = list_last_entry(&tail_desc->segments, in append_desc_queue()
1900 cdma_tail_segment->hw.next_desc = (u32)desc->async_tx.phys; in append_desc_queue()
1901 } else if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) { in append_desc_queue()
1902 axidma_tail_segment = list_last_entry(&tail_desc->segments, in append_desc_queue()
1905 axidma_tail_segment->hw.next_desc = (u32)desc->async_tx.phys; in append_desc_queue()
1908 list_last_entry(&tail_desc->segments, in append_desc_queue()
1911 aximcdma_tail_segment->hw.next_desc = (u32)desc->async_tx.phys; in append_desc_queue()
1919 list_add_tail(&desc->node, &chan->pending_list); in append_desc_queue()
1920 chan->desc_pendingcount++; in append_desc_queue()
1922 if (chan->has_sg && (chan->xdev->dma_config->dmatype == XDMA_TYPE_VDMA) in append_desc_queue()
1923 && unlikely(chan->desc_pendingcount > chan->num_frms)) { in append_desc_queue()
1924 dev_dbg(chan->dev, "desc pendingcount is too high\n"); in append_desc_queue()
1925 chan->desc_pendingcount = chan->num_frms; in append_desc_queue()
1930 * xilinx_dma_tx_submit - Submit DMA transaction
1938 struct xilinx_dma_chan *chan = to_xilinx_chan(tx->chan); in xilinx_dma_tx_submit()
1943 if (chan->cyclic) { in xilinx_dma_tx_submit()
1945 return -EBUSY; in xilinx_dma_tx_submit()
1948 if (chan->err) { in xilinx_dma_tx_submit()
1958 spin_lock_irqsave(&chan->lock, flags); in xilinx_dma_tx_submit()
1965 if (desc->cyclic) in xilinx_dma_tx_submit()
1966 chan->cyclic = true; in xilinx_dma_tx_submit()
1968 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dma_tx_submit()
1974 * xilinx_vdma_dma_prep_interleaved - prepare a descriptor for a
1992 if (!is_slave_direction(xt->dir)) in xilinx_vdma_dma_prep_interleaved()
1995 if (!xt->numf || !xt->sgl[0].size) in xilinx_vdma_dma_prep_interleaved()
1998 if (xt->frame_size != 1) in xilinx_vdma_dma_prep_interleaved()
2006 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common); in xilinx_vdma_dma_prep_interleaved()
2007 desc->async_tx.tx_submit = xilinx_dma_tx_submit; in xilinx_vdma_dma_prep_interleaved()
2008 async_tx_ack(&desc->async_tx); in xilinx_vdma_dma_prep_interleaved()
2016 hw = &segment->hw; in xilinx_vdma_dma_prep_interleaved()
2017 hw->vsize = xt->numf; in xilinx_vdma_dma_prep_interleaved()
2018 hw->hsize = xt->sgl[0].size; in xilinx_vdma_dma_prep_interleaved()
2019 hw->stride = (xt->sgl[0].icg + xt->sgl[0].size) << in xilinx_vdma_dma_prep_interleaved()
2021 hw->stride |= chan->config.frm_dly << in xilinx_vdma_dma_prep_interleaved()
2024 if (xt->dir != DMA_MEM_TO_DEV) { in xilinx_vdma_dma_prep_interleaved()
2025 if (chan->ext_addr) { in xilinx_vdma_dma_prep_interleaved()
2026 hw->buf_addr = lower_32_bits(xt->dst_start); in xilinx_vdma_dma_prep_interleaved()
2027 hw->buf_addr_msb = upper_32_bits(xt->dst_start); in xilinx_vdma_dma_prep_interleaved()
2029 hw->buf_addr = xt->dst_start; in xilinx_vdma_dma_prep_interleaved()
2032 if (chan->ext_addr) { in xilinx_vdma_dma_prep_interleaved()
2033 hw->buf_addr = lower_32_bits(xt->src_start); in xilinx_vdma_dma_prep_interleaved()
2034 hw->buf_addr_msb = upper_32_bits(xt->src_start); in xilinx_vdma_dma_prep_interleaved()
2036 hw->buf_addr = xt->src_start; in xilinx_vdma_dma_prep_interleaved()
2041 list_add_tail(&segment->node, &desc->segments); in xilinx_vdma_dma_prep_interleaved()
2044 segment = list_first_entry(&desc->segments, in xilinx_vdma_dma_prep_interleaved()
2046 desc->async_tx.phys = segment->phys; in xilinx_vdma_dma_prep_interleaved()
2048 return &desc->async_tx; in xilinx_vdma_dma_prep_interleaved()
2056 * xilinx_cdma_prep_memcpy - prepare descriptors for a memcpy transaction
2074 if (!len || len > chan->xdev->max_buffer_len) in xilinx_cdma_prep_memcpy()
2081 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common); in xilinx_cdma_prep_memcpy()
2082 desc->async_tx.tx_submit = xilinx_dma_tx_submit; in xilinx_cdma_prep_memcpy()
2089 hw = &segment->hw; in xilinx_cdma_prep_memcpy()
2090 hw->control = len; in xilinx_cdma_prep_memcpy()
2091 hw->src_addr = dma_src; in xilinx_cdma_prep_memcpy()
2092 hw->dest_addr = dma_dst; in xilinx_cdma_prep_memcpy()
2093 if (chan->ext_addr) { in xilinx_cdma_prep_memcpy()
2094 hw->src_addr_msb = upper_32_bits(dma_src); in xilinx_cdma_prep_memcpy()
2095 hw->dest_addr_msb = upper_32_bits(dma_dst); in xilinx_cdma_prep_memcpy()
2099 list_add_tail(&segment->node, &desc->segments); in xilinx_cdma_prep_memcpy()
2101 desc->async_tx.phys = segment->phys; in xilinx_cdma_prep_memcpy()
2102 hw->next_desc = segment->phys; in xilinx_cdma_prep_memcpy()
2104 return &desc->async_tx; in xilinx_cdma_prep_memcpy()
2112 * xilinx_dma_prep_slave_sg - prepare descriptors for a DMA_SLAVE transaction
2144 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common); in xilinx_dma_prep_slave_sg()
2145 desc->async_tx.tx_submit = xilinx_dma_tx_submit; in xilinx_dma_prep_slave_sg()
2166 hw = &segment->hw; in xilinx_dma_prep_slave_sg()
2172 hw->control = copy; in xilinx_dma_prep_slave_sg()
2174 if (chan->direction == DMA_MEM_TO_DEV) { in xilinx_dma_prep_slave_sg()
2176 memcpy(hw->app, app_w, sizeof(u32) * in xilinx_dma_prep_slave_sg()
2186 list_add_tail(&segment->node, &desc->segments); in xilinx_dma_prep_slave_sg()
2190 segment = list_first_entry(&desc->segments, in xilinx_dma_prep_slave_sg()
2192 desc->async_tx.phys = segment->phys; in xilinx_dma_prep_slave_sg()
2195 if (chan->direction == DMA_MEM_TO_DEV) { in xilinx_dma_prep_slave_sg()
2196 segment->hw.control |= XILINX_DMA_BD_SOP; in xilinx_dma_prep_slave_sg()
2197 segment = list_last_entry(&desc->segments, in xilinx_dma_prep_slave_sg()
2200 segment->hw.control |= XILINX_DMA_BD_EOP; in xilinx_dma_prep_slave_sg()
2203 return &desc->async_tx; in xilinx_dma_prep_slave_sg()
2211 * xilinx_dma_prep_dma_cyclic - prepare descriptors for a DMA_SLAVE transaction
2250 chan->direction = direction; in xilinx_dma_prep_dma_cyclic()
2251 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common); in xilinx_dma_prep_dma_cyclic()
2252 desc->async_tx.tx_submit = xilinx_dma_tx_submit; in xilinx_dma_prep_dma_cyclic()
2271 hw = &segment->hw; in xilinx_dma_prep_dma_cyclic()
2274 hw->control = copy; in xilinx_dma_prep_dma_cyclic()
2277 prev->hw.next_desc = segment->phys; in xilinx_dma_prep_dma_cyclic()
2286 list_add_tail(&segment->node, &desc->segments); in xilinx_dma_prep_dma_cyclic()
2290 head_segment = list_first_entry(&desc->segments, in xilinx_dma_prep_dma_cyclic()
2292 desc->async_tx.phys = head_segment->phys; in xilinx_dma_prep_dma_cyclic()
2294 desc->cyclic = true; in xilinx_dma_prep_dma_cyclic()
2299 segment = list_last_entry(&desc->segments, in xilinx_dma_prep_dma_cyclic()
2302 segment->hw.next_desc = (u32) head_segment->phys; in xilinx_dma_prep_dma_cyclic()
2306 head_segment->hw.control |= XILINX_DMA_BD_SOP; in xilinx_dma_prep_dma_cyclic()
2307 segment->hw.control |= XILINX_DMA_BD_EOP; in xilinx_dma_prep_dma_cyclic()
2310 return &desc->async_tx; in xilinx_dma_prep_dma_cyclic()
2318 * xilinx_mcdma_prep_slave_sg - prepare descriptors for a DMA_SLAVE transaction
2351 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common); in xilinx_mcdma_prep_slave_sg()
2352 desc->async_tx.tx_submit = xilinx_dma_tx_submit; in xilinx_mcdma_prep_slave_sg()
2371 copy = min_t(size_t, sg_dma_len(sg) - sg_used, in xilinx_mcdma_prep_slave_sg()
2372 chan->xdev->max_buffer_len); in xilinx_mcdma_prep_slave_sg()
2373 hw = &segment->hw; in xilinx_mcdma_prep_slave_sg()
2378 hw->control = copy; in xilinx_mcdma_prep_slave_sg()
2380 if (chan->direction == DMA_MEM_TO_DEV && app_w) { in xilinx_mcdma_prep_slave_sg()
2381 memcpy(hw->app, app_w, sizeof(u32) * in xilinx_mcdma_prep_slave_sg()
2390 list_add_tail(&segment->node, &desc->segments); in xilinx_mcdma_prep_slave_sg()
2394 segment = list_first_entry(&desc->segments, in xilinx_mcdma_prep_slave_sg()
2396 desc->async_tx.phys = segment->phys; in xilinx_mcdma_prep_slave_sg()
2399 if (chan->direction == DMA_MEM_TO_DEV) { in xilinx_mcdma_prep_slave_sg()
2400 segment->hw.control |= XILINX_MCDMA_BD_SOP; in xilinx_mcdma_prep_slave_sg()
2401 segment = list_last_entry(&desc->segments, in xilinx_mcdma_prep_slave_sg()
2404 segment->hw.control |= XILINX_MCDMA_BD_EOP; in xilinx_mcdma_prep_slave_sg()
2407 return &desc->async_tx; in xilinx_mcdma_prep_slave_sg()
2416 * xilinx_dma_terminate_all - Halt the channel and free descriptors
2427 if (!chan->cyclic) { in xilinx_dma_terminate_all()
2428 err = chan->stop_transfer(chan); in xilinx_dma_terminate_all()
2430 dev_err(chan->dev, "Cannot stop channel %p: %x\n", in xilinx_dma_terminate_all()
2433 chan->err = true; in xilinx_dma_terminate_all()
2440 chan->idle = true; in xilinx_dma_terminate_all()
2442 if (chan->cyclic) { in xilinx_dma_terminate_all()
2446 chan->cyclic = false; in xilinx_dma_terminate_all()
2449 if ((chan->xdev->dma_config->dmatype == XDMA_TYPE_CDMA) && chan->has_sg) in xilinx_dma_terminate_all()
2457 * xilinx_dma_channel_set_config - Configure VDMA channel
2458 * Run-time configuration for Axi VDMA, supports:
2460 * . configure interrupt coalescing and inter-packet delay threshold
2475 if (cfg->reset) in xilinx_vdma_channel_set_config()
2480 chan->config.frm_dly = cfg->frm_dly; in xilinx_vdma_channel_set_config()
2481 chan->config.park = cfg->park; in xilinx_vdma_channel_set_config()
2484 chan->config.gen_lock = cfg->gen_lock; in xilinx_vdma_channel_set_config()
2485 chan->config.master = cfg->master; in xilinx_vdma_channel_set_config()
2488 if (cfg->gen_lock && chan->genlock) { in xilinx_vdma_channel_set_config()
2491 dmacr |= cfg->master << XILINX_DMA_DMACR_MASTER_SHIFT; in xilinx_vdma_channel_set_config()
2494 chan->config.frm_cnt_en = cfg->frm_cnt_en; in xilinx_vdma_channel_set_config()
2495 chan->config.vflip_en = cfg->vflip_en; in xilinx_vdma_channel_set_config()
2497 if (cfg->park) in xilinx_vdma_channel_set_config()
2498 chan->config.park_frm = cfg->park_frm; in xilinx_vdma_channel_set_config()
2500 chan->config.park_frm = -1; in xilinx_vdma_channel_set_config()
2502 chan->config.coalesc = cfg->coalesc; in xilinx_vdma_channel_set_config()
2503 chan->config.delay = cfg->delay; in xilinx_vdma_channel_set_config()
2505 if (cfg->coalesc <= XILINX_DMA_DMACR_FRAME_COUNT_MAX) { in xilinx_vdma_channel_set_config()
2507 dmacr |= cfg->coalesc << XILINX_DMA_DMACR_FRAME_COUNT_SHIFT; in xilinx_vdma_channel_set_config()
2508 chan->config.coalesc = cfg->coalesc; in xilinx_vdma_channel_set_config()
2511 if (cfg->delay <= XILINX_DMA_DMACR_DELAY_MAX) { in xilinx_vdma_channel_set_config()
2513 dmacr |= cfg->delay << XILINX_DMA_DMACR_DELAY_SHIFT; in xilinx_vdma_channel_set_config()
2514 chan->config.delay = cfg->delay; in xilinx_vdma_channel_set_config()
2519 dmacr |= cfg->ext_fsync << XILINX_DMA_DMACR_FSYNCSRC_SHIFT; in xilinx_vdma_channel_set_config()
2527 /* -----------------------------------------------------------------------------
2532 * xilinx_dma_chan_remove - Per Channel remove function
2541 if (chan->irq > 0) in xilinx_dma_chan_remove()
2542 free_irq(chan->irq, chan); in xilinx_dma_chan_remove()
2544 tasklet_kill(&chan->tasklet); in xilinx_dma_chan_remove()
2546 list_del(&chan->common.device_node); in xilinx_dma_chan_remove()
2557 *axi_clk = devm_clk_get(&pdev->dev, "s_axi_lite_aclk"); in axidma_clk_init()
2559 return dev_err_probe(&pdev->dev, PTR_ERR(*axi_clk), "failed to get axi_aclk\n"); in axidma_clk_init()
2561 *tx_clk = devm_clk_get(&pdev->dev, "m_axi_mm2s_aclk"); in axidma_clk_init()
2565 *rx_clk = devm_clk_get(&pdev->dev, "m_axi_s2mm_aclk"); in axidma_clk_init()
2569 *sg_clk = devm_clk_get(&pdev->dev, "m_axi_sg_aclk"); in axidma_clk_init()
2575 dev_err(&pdev->dev, "failed to enable axi_clk (%d)\n", err); in axidma_clk_init()
2581 dev_err(&pdev->dev, "failed to enable tx_clk (%d)\n", err); in axidma_clk_init()
2587 dev_err(&pdev->dev, "failed to enable rx_clk (%d)\n", err); in axidma_clk_init()
2593 dev_err(&pdev->dev, "failed to enable sg_clk (%d)\n", err); in axidma_clk_init()
2619 *axi_clk = devm_clk_get(&pdev->dev, "s_axi_lite_aclk"); in axicdma_clk_init()
2621 return dev_err_probe(&pdev->dev, PTR_ERR(*axi_clk), "failed to get axi_aclk\n"); in axicdma_clk_init()
2623 *dev_clk = devm_clk_get(&pdev->dev, "m_axi_aclk"); in axicdma_clk_init()
2625 return dev_err_probe(&pdev->dev, PTR_ERR(*dev_clk), "failed to get dev_clk\n"); in axicdma_clk_init()
2629 dev_err(&pdev->dev, "failed to enable axi_clk (%d)\n", err); in axicdma_clk_init()
2635 dev_err(&pdev->dev, "failed to enable dev_clk (%d)\n", err); in axicdma_clk_init()
2653 *axi_clk = devm_clk_get(&pdev->dev, "s_axi_lite_aclk"); in axivdma_clk_init()
2655 return dev_err_probe(&pdev->dev, PTR_ERR(*axi_clk), "failed to get axi_aclk\n"); in axivdma_clk_init()
2657 *tx_clk = devm_clk_get(&pdev->dev, "m_axi_mm2s_aclk"); in axivdma_clk_init()
2661 *txs_clk = devm_clk_get(&pdev->dev, "m_axis_mm2s_aclk"); in axivdma_clk_init()
2665 *rx_clk = devm_clk_get(&pdev->dev, "m_axi_s2mm_aclk"); in axivdma_clk_init()
2669 *rxs_clk = devm_clk_get(&pdev->dev, "s_axis_s2mm_aclk"); in axivdma_clk_init()
2675 dev_err(&pdev->dev, "failed to enable axi_clk (%d)\n", in axivdma_clk_init()
2682 dev_err(&pdev->dev, "failed to enable tx_clk (%d)\n", err); in axivdma_clk_init()
2688 dev_err(&pdev->dev, "failed to enable txs_clk (%d)\n", err); in axivdma_clk_init()
2694 dev_err(&pdev->dev, "failed to enable rx_clk (%d)\n", err); in axivdma_clk_init()
2700 dev_err(&pdev->dev, "failed to enable rxs_clk (%d)\n", err); in axivdma_clk_init()
2720 clk_disable_unprepare(xdev->rxs_clk); in xdma_disable_allclks()
2721 clk_disable_unprepare(xdev->rx_clk); in xdma_disable_allclks()
2722 clk_disable_unprepare(xdev->txs_clk); in xdma_disable_allclks()
2723 clk_disable_unprepare(xdev->tx_clk); in xdma_disable_allclks()
2724 clk_disable_unprepare(xdev->axi_clk); in xdma_disable_allclks()
2728 * xilinx_dma_chan_probe - Per Channel Probing
2746 chan = devm_kzalloc(xdev->dev, sizeof(*chan), GFP_KERNEL); in xilinx_dma_chan_probe()
2748 return -ENOMEM; in xilinx_dma_chan_probe()
2750 chan->dev = xdev->dev; in xilinx_dma_chan_probe()
2751 chan->xdev = xdev; in xilinx_dma_chan_probe()
2752 chan->desc_pendingcount = 0x0; in xilinx_dma_chan_probe()
2753 chan->ext_addr = xdev->ext_addr; in xilinx_dma_chan_probe()
2759 chan->idle = true; in xilinx_dma_chan_probe()
2761 spin_lock_init(&chan->lock); in xilinx_dma_chan_probe()
2762 INIT_LIST_HEAD(&chan->pending_list); in xilinx_dma_chan_probe()
2763 INIT_LIST_HEAD(&chan->done_list); in xilinx_dma_chan_probe()
2764 INIT_LIST_HEAD(&chan->active_list); in xilinx_dma_chan_probe()
2765 INIT_LIST_HEAD(&chan->free_seg_list); in xilinx_dma_chan_probe()
2768 has_dre = of_property_read_bool(node, "xlnx,include-dre"); in xilinx_dma_chan_probe()
2770 chan->genlock = of_property_read_bool(node, "xlnx,genlock-mode"); in xilinx_dma_chan_probe()
2774 dev_err(xdev->dev, "missing xlnx,datawidth property\n"); in xilinx_dma_chan_probe()
2784 xdev->common.copy_align = fls(width - 1); in xilinx_dma_chan_probe()
2786 if (of_device_is_compatible(node, "xlnx,axi-vdma-mm2s-channel") || in xilinx_dma_chan_probe()
2787 of_device_is_compatible(node, "xlnx,axi-dma-mm2s-channel") || in xilinx_dma_chan_probe()
2788 of_device_is_compatible(node, "xlnx,axi-cdma-channel")) { in xilinx_dma_chan_probe()
2789 chan->direction = DMA_MEM_TO_DEV; in xilinx_dma_chan_probe()
2790 chan->id = xdev->mm2s_chan_id++; in xilinx_dma_chan_probe()
2791 chan->tdest = chan->id; in xilinx_dma_chan_probe()
2793 chan->ctrl_offset = XILINX_DMA_MM2S_CTRL_OFFSET; in xilinx_dma_chan_probe()
2794 if (xdev->dma_config->dmatype == XDMA_TYPE_VDMA) { in xilinx_dma_chan_probe()
2795 chan->desc_offset = XILINX_VDMA_MM2S_DESC_OFFSET; in xilinx_dma_chan_probe()
2796 chan->config.park = 1; in xilinx_dma_chan_probe()
2798 if (xdev->flush_on_fsync == XILINX_DMA_FLUSH_BOTH || in xilinx_dma_chan_probe()
2799 xdev->flush_on_fsync == XILINX_DMA_FLUSH_MM2S) in xilinx_dma_chan_probe()
2800 chan->flush_on_fsync = true; in xilinx_dma_chan_probe()
2803 "xlnx,axi-vdma-s2mm-channel") || in xilinx_dma_chan_probe()
2805 "xlnx,axi-dma-s2mm-channel")) { in xilinx_dma_chan_probe()
2806 chan->direction = DMA_DEV_TO_MEM; in xilinx_dma_chan_probe()
2807 chan->id = xdev->s2mm_chan_id++; in xilinx_dma_chan_probe()
2808 chan->tdest = chan->id - xdev->dma_config->max_channels / 2; in xilinx_dma_chan_probe()
2809 chan->has_vflip = of_property_read_bool(node, in xilinx_dma_chan_probe()
2810 "xlnx,enable-vert-flip"); in xilinx_dma_chan_probe()
2811 if (chan->has_vflip) { in xilinx_dma_chan_probe()
2812 chan->config.vflip_en = dma_read(chan, in xilinx_dma_chan_probe()
2817 if (xdev->dma_config->dmatype == XDMA_TYPE_AXIMCDMA) in xilinx_dma_chan_probe()
2818 chan->ctrl_offset = XILINX_MCDMA_S2MM_CTRL_OFFSET; in xilinx_dma_chan_probe()
2820 chan->ctrl_offset = XILINX_DMA_S2MM_CTRL_OFFSET; in xilinx_dma_chan_probe()
2822 if (xdev->dma_config->dmatype == XDMA_TYPE_VDMA) { in xilinx_dma_chan_probe()
2823 chan->desc_offset = XILINX_VDMA_S2MM_DESC_OFFSET; in xilinx_dma_chan_probe()
2824 chan->config.park = 1; in xilinx_dma_chan_probe()
2826 if (xdev->flush_on_fsync == XILINX_DMA_FLUSH_BOTH || in xilinx_dma_chan_probe()
2827 xdev->flush_on_fsync == XILINX_DMA_FLUSH_S2MM) in xilinx_dma_chan_probe()
2828 chan->flush_on_fsync = true; in xilinx_dma_chan_probe()
2831 dev_err(xdev->dev, "Invalid channel compatible node\n"); in xilinx_dma_chan_probe()
2832 return -EINVAL; in xilinx_dma_chan_probe()
2836 chan->irq = irq_of_parse_and_map(node, chan->tdest); in xilinx_dma_chan_probe()
2837 err = request_irq(chan->irq, xdev->dma_config->irq_handler, in xilinx_dma_chan_probe()
2838 IRQF_SHARED, "xilinx-dma-controller", chan); in xilinx_dma_chan_probe()
2840 dev_err(xdev->dev, "unable to request IRQ %d\n", chan->irq); in xilinx_dma_chan_probe()
2844 if (xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) { in xilinx_dma_chan_probe()
2845 chan->start_transfer = xilinx_dma_start_transfer; in xilinx_dma_chan_probe()
2846 chan->stop_transfer = xilinx_dma_stop_transfer; in xilinx_dma_chan_probe()
2847 } else if (xdev->dma_config->dmatype == XDMA_TYPE_AXIMCDMA) { in xilinx_dma_chan_probe()
2848 chan->start_transfer = xilinx_mcdma_start_transfer; in xilinx_dma_chan_probe()
2849 chan->stop_transfer = xilinx_dma_stop_transfer; in xilinx_dma_chan_probe()
2850 } else if (xdev->dma_config->dmatype == XDMA_TYPE_CDMA) { in xilinx_dma_chan_probe()
2851 chan->start_transfer = xilinx_cdma_start_transfer; in xilinx_dma_chan_probe()
2852 chan->stop_transfer = xilinx_cdma_stop_transfer; in xilinx_dma_chan_probe()
2854 chan->start_transfer = xilinx_vdma_start_transfer; in xilinx_dma_chan_probe()
2855 chan->stop_transfer = xilinx_dma_stop_transfer; in xilinx_dma_chan_probe()
2859 if (xdev->dma_config->dmatype != XDMA_TYPE_VDMA) { in xilinx_dma_chan_probe()
2860 if (xdev->dma_config->dmatype == XDMA_TYPE_AXIMCDMA || in xilinx_dma_chan_probe()
2863 chan->has_sg = true; in xilinx_dma_chan_probe()
2864 dev_dbg(chan->dev, "ch %d: SG %s\n", chan->id, in xilinx_dma_chan_probe()
2865 chan->has_sg ? "enabled" : "disabled"); in xilinx_dma_chan_probe()
2869 tasklet_setup(&chan->tasklet, xilinx_dma_do_tasklet); in xilinx_dma_chan_probe()
2875 chan->common.device = &xdev->common; in xilinx_dma_chan_probe()
2877 list_add_tail(&chan->common.device_node, &xdev->common.channels); in xilinx_dma_chan_probe()
2878 xdev->chan[chan->id] = chan; in xilinx_dma_chan_probe()
2883 dev_err(xdev->dev, "Reset channel failed\n"); in xilinx_dma_chan_probe()
2891 * xilinx_dma_child_probe - Per child node probe
2892 * It get number of dma-channels per child node from
2893 * device-tree and initializes all the channels.
2905 ret = of_property_read_u32(node, "dma-channels", &nr_channels); in xilinx_dma_child_probe()
2906 if (xdev->dma_config->dmatype == XDMA_TYPE_AXIMCDMA && ret < 0) in xilinx_dma_child_probe()
2907 dev_warn(xdev->dev, "missing dma-channels property\n"); in xilinx_dma_child_probe()
2916 * of_dma_xilinx_xlate - Translation function
2925 struct xilinx_dma_device *xdev = ofdma->of_dma_data; in of_dma_xilinx_xlate()
2926 int chan_id = dma_spec->args[0]; in of_dma_xilinx_xlate()
2928 if (chan_id >= xdev->dma_config->max_channels || !xdev->chan[chan_id]) in of_dma_xilinx_xlate()
2931 return dma_get_slave_channel(&xdev->chan[chan_id]->common); in of_dma_xilinx_xlate()
2962 { .compatible = "xlnx,axi-dma-1.00.a", .data = &axidma_config },
2963 { .compatible = "xlnx,axi-cdma-1.00.a", .data = &axicdma_config },
2964 { .compatible = "xlnx,axi-vdma-1.00.a", .data = &axivdma_config },
2965 { .compatible = "xlnx,axi-mcdma-1.00.a", .data = &aximcdma_config },
2971 * xilinx_dma_probe - Driver probe function
2981 struct device_node *node = pdev->dev.of_node; in xilinx_dma_probe()
2983 struct device_node *child, *np = pdev->dev.of_node; in xilinx_dma_probe()
2988 xdev = devm_kzalloc(&pdev->dev, sizeof(*xdev), GFP_KERNEL); in xilinx_dma_probe()
2990 return -ENOMEM; in xilinx_dma_probe()
2992 xdev->dev = &pdev->dev; in xilinx_dma_probe()
2997 if (match && match->data) { in xilinx_dma_probe()
2998 xdev->dma_config = match->data; in xilinx_dma_probe()
2999 clk_init = xdev->dma_config->clk_init; in xilinx_dma_probe()
3003 err = clk_init(pdev, &xdev->axi_clk, &xdev->tx_clk, &xdev->txs_clk, in xilinx_dma_probe()
3004 &xdev->rx_clk, &xdev->rxs_clk); in xilinx_dma_probe()
3009 xdev->regs = devm_platform_ioremap_resource(pdev, 0); in xilinx_dma_probe()
3010 if (IS_ERR(xdev->regs)) in xilinx_dma_probe()
3011 return PTR_ERR(xdev->regs); in xilinx_dma_probe()
3014 xdev->max_buffer_len = GENMASK(XILINX_DMA_MAX_TRANS_LEN_MAX - 1, 0); in xilinx_dma_probe()
3015 xdev->s2mm_chan_id = xdev->dma_config->max_channels / 2; in xilinx_dma_probe()
3017 if (xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA || in xilinx_dma_probe()
3018 xdev->dma_config->dmatype == XDMA_TYPE_AXIMCDMA) { in xilinx_dma_probe()
3019 if (!of_property_read_u32(node, "xlnx,sg-length-width", in xilinx_dma_probe()
3023 dev_warn(xdev->dev, in xilinx_dma_probe()
3024 "invalid xlnx,sg-length-width property value. Using default width\n"); in xilinx_dma_probe()
3027 dev_warn(xdev->dev, "Please ensure that IP supports buffer length > 23 bits\n"); in xilinx_dma_probe()
3028 xdev->max_buffer_len = in xilinx_dma_probe()
3029 GENMASK(len_width - 1, 0); in xilinx_dma_probe()
3034 if (xdev->dma_config->dmatype == XDMA_TYPE_VDMA) { in xilinx_dma_probe()
3035 err = of_property_read_u32(node, "xlnx,num-fstores", in xilinx_dma_probe()
3038 dev_err(xdev->dev, in xilinx_dma_probe()
3039 "missing xlnx,num-fstores property\n"); in xilinx_dma_probe()
3043 err = of_property_read_u32(node, "xlnx,flush-fsync", in xilinx_dma_probe()
3044 &xdev->flush_on_fsync); in xilinx_dma_probe()
3046 dev_warn(xdev->dev, in xilinx_dma_probe()
3047 "missing xlnx,flush-fsync property\n"); in xilinx_dma_probe()
3052 dev_warn(xdev->dev, "missing xlnx,addrwidth property\n"); in xilinx_dma_probe()
3055 xdev->ext_addr = true; in xilinx_dma_probe()
3057 xdev->ext_addr = false; in xilinx_dma_probe()
3060 dma_set_mask(xdev->dev, DMA_BIT_MASK(addr_width)); in xilinx_dma_probe()
3063 xdev->common.dev = &pdev->dev; in xilinx_dma_probe()
3065 INIT_LIST_HEAD(&xdev->common.channels); in xilinx_dma_probe()
3066 if (!(xdev->dma_config->dmatype == XDMA_TYPE_CDMA)) { in xilinx_dma_probe()
3067 dma_cap_set(DMA_SLAVE, xdev->common.cap_mask); in xilinx_dma_probe()
3068 dma_cap_set(DMA_PRIVATE, xdev->common.cap_mask); in xilinx_dma_probe()
3071 xdev->common.device_alloc_chan_resources = in xilinx_dma_probe()
3073 xdev->common.device_free_chan_resources = in xilinx_dma_probe()
3075 xdev->common.device_terminate_all = xilinx_dma_terminate_all; in xilinx_dma_probe()
3076 xdev->common.device_tx_status = xilinx_dma_tx_status; in xilinx_dma_probe()
3077 xdev->common.device_issue_pending = xilinx_dma_issue_pending; in xilinx_dma_probe()
3078 if (xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) { in xilinx_dma_probe()
3079 dma_cap_set(DMA_CYCLIC, xdev->common.cap_mask); in xilinx_dma_probe()
3080 xdev->common.device_prep_slave_sg = xilinx_dma_prep_slave_sg; in xilinx_dma_probe()
3081 xdev->common.device_prep_dma_cyclic = in xilinx_dma_probe()
3083 /* Residue calculation is supported by only AXI DMA and CDMA */ in xilinx_dma_probe()
3084 xdev->common.residue_granularity = in xilinx_dma_probe()
3086 } else if (xdev->dma_config->dmatype == XDMA_TYPE_CDMA) { in xilinx_dma_probe()
3087 dma_cap_set(DMA_MEMCPY, xdev->common.cap_mask); in xilinx_dma_probe()
3088 xdev->common.device_prep_dma_memcpy = xilinx_cdma_prep_memcpy; in xilinx_dma_probe()
3089 /* Residue calculation is supported by only AXI DMA and CDMA */ in xilinx_dma_probe()
3090 xdev->common.residue_granularity = in xilinx_dma_probe()
3092 } else if (xdev->dma_config->dmatype == XDMA_TYPE_AXIMCDMA) { in xilinx_dma_probe()
3093 xdev->common.device_prep_slave_sg = xilinx_mcdma_prep_slave_sg; in xilinx_dma_probe()
3095 xdev->common.device_prep_interleaved_dma = in xilinx_dma_probe()
3108 if (xdev->dma_config->dmatype == XDMA_TYPE_VDMA) { in xilinx_dma_probe()
3109 for (i = 0; i < xdev->dma_config->max_channels; i++) in xilinx_dma_probe()
3110 if (xdev->chan[i]) in xilinx_dma_probe()
3111 xdev->chan[i]->num_frms = num_frames; in xilinx_dma_probe()
3115 dma_async_device_register(&xdev->common); in xilinx_dma_probe()
3120 dev_err(&pdev->dev, "Unable to register DMA to DT\n"); in xilinx_dma_probe()
3121 dma_async_device_unregister(&xdev->common); in xilinx_dma_probe()
3125 if (xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) in xilinx_dma_probe()
3126 dev_info(&pdev->dev, "Xilinx AXI DMA Engine Driver Probed!!\n"); in xilinx_dma_probe()
3127 else if (xdev->dma_config->dmatype == XDMA_TYPE_CDMA) in xilinx_dma_probe()
3128 dev_info(&pdev->dev, "Xilinx AXI CDMA Engine Driver Probed!!\n"); in xilinx_dma_probe()
3129 else if (xdev->dma_config->dmatype == XDMA_TYPE_AXIMCDMA) in xilinx_dma_probe()
3130 dev_info(&pdev->dev, "Xilinx AXI MCDMA Engine Driver Probed!!\n"); in xilinx_dma_probe()
3132 dev_info(&pdev->dev, "Xilinx AXI VDMA Engine Driver Probed!!\n"); in xilinx_dma_probe()
3139 for (i = 0; i < xdev->dma_config->max_channels; i++) in xilinx_dma_probe()
3140 if (xdev->chan[i]) in xilinx_dma_probe()
3141 xilinx_dma_chan_remove(xdev->chan[i]); in xilinx_dma_probe()
3147 * xilinx_dma_remove - Driver remove function
3157 of_dma_controller_free(pdev->dev.of_node); in xilinx_dma_remove()
3159 dma_async_device_unregister(&xdev->common); in xilinx_dma_remove()
3161 for (i = 0; i < xdev->dma_config->max_channels; i++) in xilinx_dma_remove()
3162 if (xdev->chan[i]) in xilinx_dma_remove()
3163 xilinx_dma_chan_remove(xdev->chan[i]); in xilinx_dma_remove()
3172 .name = "xilinx-vdma",