Lines Matching +full:axi +full:- +full:config
1 // SPDX-License-Identifier: GPL-2.0-or-later
5 * Copyright (C) 2010-2014 Xilinx, Inc. All rights reserved.
10 * The AXI Video Direct Memory Access (AXI VDMA) core is a soft Xilinx IP
11 * core that provides high-bandwidth direct memory access between memory
12 * and AXI4-Stream type video target peripherals. The core provides efficient
18 * registers are accessed through an AXI4-Lite slave interface.
20 * The AXI Direct Memory Access (AXI DMA) core is a soft Xilinx IP core that
21 * provides high-bandwidth one dimensional direct memory access between memory
22 * and AXI4-Stream target peripherals. It supports one receive and one
25 * The AXI CDMA, is a soft IP, which provides high-bandwidth Direct Memory
26 * Access (DMA) between a memory-mapped source address and a memory-mapped
29 * The AXI Multichannel Direct Memory Access (AXI MCDMA) core is a soft
30 * Xilinx IP that provides high-bandwidth direct memory access between
31 * memory and AXI4-Stream target peripherals. It provides scatter gather
51 #include <linux/io-64-nonatomic-lo-hi.h>
162 /* Axi VDMA Flush on Fsync bits */
170 /* AXI DMA Specific Registers/Offsets */
174 /* AXI DMA Specific Masks/Bit fields */
190 /* AXI CDMA Specific Registers/Offsets */
194 /* AXI CDMA Specific Masks */
200 /* AXI MCDMA Specific Registers/Offsets */
212 /* AXI MCDMA Specific Masks/Shifts */
225 * struct xilinx_vdma_desc_hw - Hardware Descriptor
246 * struct xilinx_axidma_desc_hw - Hardware Descriptor for AXI DMA
255 * @app: APP Fields @0x20 - 0x30
270 * struct xilinx_aximcdma_desc_hw - Hardware Descriptor for AXI MCDMA
279 * @app: APP Fields @0x20 - 0x30
294 * struct xilinx_cdma_desc_hw - Hardware Descriptor
316 * struct xilinx_vdma_tx_segment - Descriptor segment
328 * struct xilinx_axidma_tx_segment - Descriptor segment
340 * struct xilinx_aximcdma_tx_segment - Descriptor segment
352 * struct xilinx_cdma_tx_segment - Descriptor segment
364 * struct xilinx_dma_tx_descriptor - Per Transaction structure
382 * struct xilinx_dma_chan - Driver specific DMA channel structure
405 * @config: Device configuration info
444 struct xilinx_vdma_config config; member
462 * enum xdma_ip_type - DMA IP type.
464 * @XDMA_TYPE_AXIDMA: Axi dma ip.
465 * @XDMA_TYPE_CDMA: Axi cdma ip.
466 * @XDMA_TYPE_VDMA: Axi vdma ip.
467 * @XDMA_TYPE_AXIMCDMA: Axi MCDMA ip.
487 * struct xilinx_dma_device - DMA device structure
495 * @dma_config: DMA config structure
496 * @axi_clk: DMA Axi4-lite interace clock
504 * @has_axistream_connected: AXI DMA connected to AXI Stream IP
532 readl_poll_timeout_atomic(chan->xdev->regs + chan->ctrl_offset + reg, \
538 return ioread32(chan->xdev->regs + reg); in dma_read()
543 iowrite32(value, chan->xdev->regs + reg); in dma_write()
549 dma_write(chan, chan->desc_offset + reg, value); in vdma_desc_write()
554 return dma_read(chan, chan->ctrl_offset + reg); in dma_ctrl_read()
560 dma_write(chan, chan->ctrl_offset + reg, value); in dma_ctrl_write()
576 * vdma_desc_write_64 - 64-bit descriptor write
590 writel(value_lsb, chan->xdev->regs + chan->desc_offset + reg); in vdma_desc_write_64()
593 writel(value_msb, chan->xdev->regs + chan->desc_offset + reg + 4); in vdma_desc_write_64()
598 lo_hi_writeq(value, chan->xdev->regs + chan->ctrl_offset + reg); in dma_writeq()
604 if (chan->ext_addr) in xilinx_write()
615 if (chan->ext_addr) { in xilinx_axidma_buf()
616 hw->buf_addr = lower_32_bits(buf_addr + sg_used + period_len); in xilinx_axidma_buf()
617 hw->buf_addr_msb = upper_32_bits(buf_addr + sg_used + in xilinx_axidma_buf()
620 hw->buf_addr = buf_addr + sg_used + period_len; in xilinx_axidma_buf()
628 if (chan->ext_addr) { in xilinx_aximcdma_buf()
629 hw->buf_addr = lower_32_bits(buf_addr + sg_used); in xilinx_aximcdma_buf()
630 hw->buf_addr_msb = upper_32_bits(buf_addr + sg_used); in xilinx_aximcdma_buf()
632 hw->buf_addr = buf_addr + sg_used; in xilinx_aximcdma_buf()
637 * xilinx_dma_get_metadata_ptr- Populate metadata pointer and payload length
650 seg = list_first_entry(&desc->segments, in xilinx_dma_get_metadata_ptr()
652 return seg->hw.app; in xilinx_dma_get_metadata_ptr()
659 /* -----------------------------------------------------------------------------
664 * xilinx_vdma_alloc_tx_segment - Allocate transaction segment
675 segment = dma_pool_zalloc(chan->desc_pool, GFP_ATOMIC, &phys); in xilinx_vdma_alloc_tx_segment()
679 segment->phys = phys; in xilinx_vdma_alloc_tx_segment()
685 * xilinx_cdma_alloc_tx_segment - Allocate transaction segment
696 segment = dma_pool_zalloc(chan->desc_pool, GFP_ATOMIC, &phys); in xilinx_cdma_alloc_tx_segment()
700 segment->phys = phys; in xilinx_cdma_alloc_tx_segment()
706 * xilinx_axidma_alloc_tx_segment - Allocate transaction segment
717 spin_lock_irqsave(&chan->lock, flags); in xilinx_axidma_alloc_tx_segment()
718 if (!list_empty(&chan->free_seg_list)) { in xilinx_axidma_alloc_tx_segment()
719 segment = list_first_entry(&chan->free_seg_list, in xilinx_axidma_alloc_tx_segment()
722 list_del(&segment->node); in xilinx_axidma_alloc_tx_segment()
724 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_axidma_alloc_tx_segment()
727 dev_dbg(chan->dev, "Could not find free tx segment\n"); in xilinx_axidma_alloc_tx_segment()
733 * xilinx_aximcdma_alloc_tx_segment - Allocate transaction segment
744 spin_lock_irqsave(&chan->lock, flags); in xilinx_aximcdma_alloc_tx_segment()
745 if (!list_empty(&chan->free_seg_list)) { in xilinx_aximcdma_alloc_tx_segment()
746 segment = list_first_entry(&chan->free_seg_list, in xilinx_aximcdma_alloc_tx_segment()
749 list_del(&segment->node); in xilinx_aximcdma_alloc_tx_segment()
751 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_aximcdma_alloc_tx_segment()
758 u32 next_desc = hw->next_desc; in xilinx_dma_clean_hw_desc()
759 u32 next_desc_msb = hw->next_desc_msb; in xilinx_dma_clean_hw_desc()
763 hw->next_desc = next_desc; in xilinx_dma_clean_hw_desc()
764 hw->next_desc_msb = next_desc_msb; in xilinx_dma_clean_hw_desc()
769 u32 next_desc = hw->next_desc; in xilinx_mcdma_clean_hw_desc()
770 u32 next_desc_msb = hw->next_desc_msb; in xilinx_mcdma_clean_hw_desc()
774 hw->next_desc = next_desc; in xilinx_mcdma_clean_hw_desc()
775 hw->next_desc_msb = next_desc_msb; in xilinx_mcdma_clean_hw_desc()
779 * xilinx_dma_free_tx_segment - Free transaction segment
786 xilinx_dma_clean_hw_desc(&segment->hw); in xilinx_dma_free_tx_segment()
788 list_add_tail(&segment->node, &chan->free_seg_list); in xilinx_dma_free_tx_segment()
792 * xilinx_mcdma_free_tx_segment - Free transaction segment
800 xilinx_mcdma_clean_hw_desc(&segment->hw); in xilinx_mcdma_free_tx_segment()
802 list_add_tail(&segment->node, &chan->free_seg_list); in xilinx_mcdma_free_tx_segment()
806 * xilinx_cdma_free_tx_segment - Free transaction segment
813 dma_pool_free(chan->desc_pool, segment, segment->phys); in xilinx_cdma_free_tx_segment()
817 * xilinx_vdma_free_tx_segment - Free transaction segment
824 dma_pool_free(chan->desc_pool, segment, segment->phys); in xilinx_vdma_free_tx_segment()
828 * xilinx_dma_alloc_tx_descriptor - Allocate transaction descriptor
842 INIT_LIST_HEAD(&desc->segments); in xilinx_dma_alloc_tx_descriptor()
848 * xilinx_dma_free_tx_descriptor - Free transaction descriptor
864 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_VDMA) { in xilinx_dma_free_tx_descriptor()
865 list_for_each_entry_safe(segment, next, &desc->segments, node) { in xilinx_dma_free_tx_descriptor()
866 list_del(&segment->node); in xilinx_dma_free_tx_descriptor()
869 } else if (chan->xdev->dma_config->dmatype == XDMA_TYPE_CDMA) { in xilinx_dma_free_tx_descriptor()
871 &desc->segments, node) { in xilinx_dma_free_tx_descriptor()
872 list_del(&cdma_segment->node); in xilinx_dma_free_tx_descriptor()
875 } else if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) { in xilinx_dma_free_tx_descriptor()
877 &desc->segments, node) { in xilinx_dma_free_tx_descriptor()
878 list_del(&axidma_segment->node); in xilinx_dma_free_tx_descriptor()
883 &desc->segments, node) { in xilinx_dma_free_tx_descriptor()
884 list_del(&aximcdma_segment->node); in xilinx_dma_free_tx_descriptor()
895 * xilinx_dma_free_desc_list - Free descriptors list
905 list_del(&desc->node); in xilinx_dma_free_desc_list()
911 * xilinx_dma_free_descriptors - Free channel descriptors
918 spin_lock_irqsave(&chan->lock, flags); in xilinx_dma_free_descriptors()
920 xilinx_dma_free_desc_list(chan, &chan->pending_list); in xilinx_dma_free_descriptors()
921 xilinx_dma_free_desc_list(chan, &chan->done_list); in xilinx_dma_free_descriptors()
922 xilinx_dma_free_desc_list(chan, &chan->active_list); in xilinx_dma_free_descriptors()
924 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dma_free_descriptors()
928 * xilinx_dma_free_chan_resources - Free channel resources
936 dev_dbg(chan->dev, "Free all channel resources.\n"); in xilinx_dma_free_chan_resources()
940 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) { in xilinx_dma_free_chan_resources()
941 spin_lock_irqsave(&chan->lock, flags); in xilinx_dma_free_chan_resources()
942 INIT_LIST_HEAD(&chan->free_seg_list); in xilinx_dma_free_chan_resources()
943 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dma_free_chan_resources()
946 dma_free_coherent(chan->dev, sizeof(*chan->seg_v) * in xilinx_dma_free_chan_resources()
947 XILINX_DMA_NUM_DESCS, chan->seg_v, in xilinx_dma_free_chan_resources()
948 chan->seg_p); in xilinx_dma_free_chan_resources()
951 dma_free_coherent(chan->dev, sizeof(*chan->cyclic_seg_v), in xilinx_dma_free_chan_resources()
952 chan->cyclic_seg_v, chan->cyclic_seg_p); in xilinx_dma_free_chan_resources()
955 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIMCDMA) { in xilinx_dma_free_chan_resources()
956 spin_lock_irqsave(&chan->lock, flags); in xilinx_dma_free_chan_resources()
957 INIT_LIST_HEAD(&chan->free_seg_list); in xilinx_dma_free_chan_resources()
958 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dma_free_chan_resources()
961 dma_free_coherent(chan->dev, sizeof(*chan->seg_mv) * in xilinx_dma_free_chan_resources()
962 XILINX_DMA_NUM_DESCS, chan->seg_mv, in xilinx_dma_free_chan_resources()
963 chan->seg_p); in xilinx_dma_free_chan_resources()
966 if (chan->xdev->dma_config->dmatype != XDMA_TYPE_AXIDMA && in xilinx_dma_free_chan_resources()
967 chan->xdev->dma_config->dmatype != XDMA_TYPE_AXIMCDMA) { in xilinx_dma_free_chan_resources()
968 dma_pool_destroy(chan->desc_pool); in xilinx_dma_free_chan_resources()
969 chan->desc_pool = NULL; in xilinx_dma_free_chan_resources()
975 * xilinx_dma_get_residue - Compute residue for a given descriptor
993 list_for_each(entry, &desc->segments) { in xilinx_dma_get_residue()
994 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_CDMA) { in xilinx_dma_get_residue()
998 cdma_hw = &cdma_seg->hw; in xilinx_dma_get_residue()
999 residue += (cdma_hw->control - cdma_hw->status) & in xilinx_dma_get_residue()
1000 chan->xdev->max_buffer_len; in xilinx_dma_get_residue()
1001 } else if (chan->xdev->dma_config->dmatype == in xilinx_dma_get_residue()
1006 axidma_hw = &axidma_seg->hw; in xilinx_dma_get_residue()
1007 residue += (axidma_hw->control - axidma_hw->status) & in xilinx_dma_get_residue()
1008 chan->xdev->max_buffer_len; in xilinx_dma_get_residue()
1014 aximcdma_hw = &aximcdma_seg->hw; in xilinx_dma_get_residue()
1016 (aximcdma_hw->control - aximcdma_hw->status) & in xilinx_dma_get_residue()
1017 chan->xdev->max_buffer_len; in xilinx_dma_get_residue()
1025 * xilinx_dma_chan_handle_cyclic - Cyclic dma callback
1036 dmaengine_desc_get_callback(&desc->async_tx, &cb); in xilinx_dma_chan_handle_cyclic()
1038 spin_unlock_irqrestore(&chan->lock, *flags); in xilinx_dma_chan_handle_cyclic()
1040 spin_lock_irqsave(&chan->lock, *flags); in xilinx_dma_chan_handle_cyclic()
1045 * xilinx_dma_chan_desc_cleanup - Clean channel descriptors
1053 spin_lock_irqsave(&chan->lock, flags); in xilinx_dma_chan_desc_cleanup()
1055 list_for_each_entry_safe(desc, next, &chan->done_list, node) { in xilinx_dma_chan_desc_cleanup()
1058 if (desc->cyclic) { in xilinx_dma_chan_desc_cleanup()
1064 list_del(&desc->node); in xilinx_dma_chan_desc_cleanup()
1066 if (unlikely(desc->err)) { in xilinx_dma_chan_desc_cleanup()
1067 if (chan->direction == DMA_DEV_TO_MEM) in xilinx_dma_chan_desc_cleanup()
1075 result.residue = desc->residue; in xilinx_dma_chan_desc_cleanup()
1078 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dma_chan_desc_cleanup()
1079 dmaengine_desc_get_callback_invoke(&desc->async_tx, &result); in xilinx_dma_chan_desc_cleanup()
1080 spin_lock_irqsave(&chan->lock, flags); in xilinx_dma_chan_desc_cleanup()
1083 dma_run_dependencies(&desc->async_tx); in xilinx_dma_chan_desc_cleanup()
1090 if (chan->terminating) in xilinx_dma_chan_desc_cleanup()
1094 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dma_chan_desc_cleanup()
1098 * xilinx_dma_do_tasklet - Schedule completion tasklet
1109 * xilinx_dma_alloc_chan_resources - Allocate channel resources
1120 if (chan->desc_pool) in xilinx_dma_alloc_chan_resources()
1127 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) { in xilinx_dma_alloc_chan_resources()
1129 chan->seg_v = dma_alloc_coherent(chan->dev, in xilinx_dma_alloc_chan_resources()
1130 sizeof(*chan->seg_v) * XILINX_DMA_NUM_DESCS, in xilinx_dma_alloc_chan_resources()
1131 &chan->seg_p, GFP_KERNEL); in xilinx_dma_alloc_chan_resources()
1132 if (!chan->seg_v) { in xilinx_dma_alloc_chan_resources()
1133 dev_err(chan->dev, in xilinx_dma_alloc_chan_resources()
1135 chan->id); in xilinx_dma_alloc_chan_resources()
1136 return -ENOMEM; in xilinx_dma_alloc_chan_resources()
1144 chan->cyclic_seg_v = dma_alloc_coherent(chan->dev, in xilinx_dma_alloc_chan_resources()
1145 sizeof(*chan->cyclic_seg_v), in xilinx_dma_alloc_chan_resources()
1146 &chan->cyclic_seg_p, in xilinx_dma_alloc_chan_resources()
1148 if (!chan->cyclic_seg_v) { in xilinx_dma_alloc_chan_resources()
1149 dev_err(chan->dev, in xilinx_dma_alloc_chan_resources()
1151 dma_free_coherent(chan->dev, sizeof(*chan->seg_v) * in xilinx_dma_alloc_chan_resources()
1152 XILINX_DMA_NUM_DESCS, chan->seg_v, in xilinx_dma_alloc_chan_resources()
1153 chan->seg_p); in xilinx_dma_alloc_chan_resources()
1154 return -ENOMEM; in xilinx_dma_alloc_chan_resources()
1156 chan->cyclic_seg_v->phys = chan->cyclic_seg_p; in xilinx_dma_alloc_chan_resources()
1159 chan->seg_v[i].hw.next_desc = in xilinx_dma_alloc_chan_resources()
1160 lower_32_bits(chan->seg_p + sizeof(*chan->seg_v) * in xilinx_dma_alloc_chan_resources()
1162 chan->seg_v[i].hw.next_desc_msb = in xilinx_dma_alloc_chan_resources()
1163 upper_32_bits(chan->seg_p + sizeof(*chan->seg_v) * in xilinx_dma_alloc_chan_resources()
1165 chan->seg_v[i].phys = chan->seg_p + in xilinx_dma_alloc_chan_resources()
1166 sizeof(*chan->seg_v) * i; in xilinx_dma_alloc_chan_resources()
1167 list_add_tail(&chan->seg_v[i].node, in xilinx_dma_alloc_chan_resources()
1168 &chan->free_seg_list); in xilinx_dma_alloc_chan_resources()
1170 } else if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIMCDMA) { in xilinx_dma_alloc_chan_resources()
1172 chan->seg_mv = dma_alloc_coherent(chan->dev, in xilinx_dma_alloc_chan_resources()
1173 sizeof(*chan->seg_mv) * in xilinx_dma_alloc_chan_resources()
1175 &chan->seg_p, GFP_KERNEL); in xilinx_dma_alloc_chan_resources()
1176 if (!chan->seg_mv) { in xilinx_dma_alloc_chan_resources()
1177 dev_err(chan->dev, in xilinx_dma_alloc_chan_resources()
1179 chan->id); in xilinx_dma_alloc_chan_resources()
1180 return -ENOMEM; in xilinx_dma_alloc_chan_resources()
1183 chan->seg_mv[i].hw.next_desc = in xilinx_dma_alloc_chan_resources()
1184 lower_32_bits(chan->seg_p + sizeof(*chan->seg_mv) * in xilinx_dma_alloc_chan_resources()
1186 chan->seg_mv[i].hw.next_desc_msb = in xilinx_dma_alloc_chan_resources()
1187 upper_32_bits(chan->seg_p + sizeof(*chan->seg_mv) * in xilinx_dma_alloc_chan_resources()
1189 chan->seg_mv[i].phys = chan->seg_p + in xilinx_dma_alloc_chan_resources()
1190 sizeof(*chan->seg_mv) * i; in xilinx_dma_alloc_chan_resources()
1191 list_add_tail(&chan->seg_mv[i].node, in xilinx_dma_alloc_chan_resources()
1192 &chan->free_seg_list); in xilinx_dma_alloc_chan_resources()
1194 } else if (chan->xdev->dma_config->dmatype == XDMA_TYPE_CDMA) { in xilinx_dma_alloc_chan_resources()
1195 chan->desc_pool = dma_pool_create("xilinx_cdma_desc_pool", in xilinx_dma_alloc_chan_resources()
1196 chan->dev, in xilinx_dma_alloc_chan_resources()
1201 chan->desc_pool = dma_pool_create("xilinx_vdma_desc_pool", in xilinx_dma_alloc_chan_resources()
1202 chan->dev, in xilinx_dma_alloc_chan_resources()
1208 if (!chan->desc_pool && in xilinx_dma_alloc_chan_resources()
1209 ((chan->xdev->dma_config->dmatype != XDMA_TYPE_AXIDMA) && in xilinx_dma_alloc_chan_resources()
1210 chan->xdev->dma_config->dmatype != XDMA_TYPE_AXIMCDMA)) { in xilinx_dma_alloc_chan_resources()
1211 dev_err(chan->dev, in xilinx_dma_alloc_chan_resources()
1213 chan->id); in xilinx_dma_alloc_chan_resources()
1214 return -ENOMEM; in xilinx_dma_alloc_chan_resources()
1219 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) { in xilinx_dma_alloc_chan_resources()
1220 /* For AXI DMA resetting once channel will reset the in xilinx_dma_alloc_chan_resources()
1227 if ((chan->xdev->dma_config->dmatype == XDMA_TYPE_CDMA) && chan->has_sg) in xilinx_dma_alloc_chan_resources()
1235 * xilinx_dma_calc_copysize - Calculate the amount of data to copy
1247 copy = min_t(size_t, size - done, in xilinx_dma_calc_copysize()
1248 chan->xdev->max_buffer_len); in xilinx_dma_calc_copysize()
1251 chan->xdev->common.copy_align) { in xilinx_dma_calc_copysize()
1257 (1 << chan->xdev->common.copy_align)); in xilinx_dma_calc_copysize()
1263 * xilinx_dma_tx_status - Get DMA transaction status
1284 spin_lock_irqsave(&chan->lock, flags); in xilinx_dma_tx_status()
1285 if (!list_empty(&chan->active_list)) { in xilinx_dma_tx_status()
1286 desc = list_last_entry(&chan->active_list, in xilinx_dma_tx_status()
1292 if (chan->has_sg && chan->xdev->dma_config->dmatype != XDMA_TYPE_VDMA) in xilinx_dma_tx_status()
1295 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dma_tx_status()
1303 * xilinx_dma_stop_transfer - Halt DMA channel
1321 * xilinx_cdma_stop_transfer - Wait for the current transfer to complete
1336 * xilinx_dma_start - Start DMA channel
1352 dev_err(chan->dev, "Cannot start channel %p: %x\n", in xilinx_dma_start()
1355 chan->err = true; in xilinx_dma_start()
1360 * xilinx_vdma_start_transfer - Starts VDMA transfer
1365 struct xilinx_vdma_config *config = &chan->config; in xilinx_vdma_start_transfer() local
1372 if (chan->err) in xilinx_vdma_start_transfer()
1375 if (!chan->idle) in xilinx_vdma_start_transfer()
1378 if (list_empty(&chan->pending_list)) in xilinx_vdma_start_transfer()
1381 desc = list_first_entry(&chan->pending_list, in xilinx_vdma_start_transfer()
1384 /* Configure the hardware using info in the config structure */ in xilinx_vdma_start_transfer()
1385 if (chan->has_vflip) { in xilinx_vdma_start_transfer()
1388 reg |= config->vflip_en; in xilinx_vdma_start_transfer()
1395 if (config->frm_cnt_en) in xilinx_vdma_start_transfer()
1401 if (config->park) in xilinx_vdma_start_transfer()
1408 if (config->park) { in xilinx_vdma_start_transfer()
1409 j = chan->desc_submitcount; in xilinx_vdma_start_transfer()
1411 if (chan->direction == DMA_MEM_TO_DEV) { in xilinx_vdma_start_transfer()
1424 if (chan->err) in xilinx_vdma_start_transfer()
1428 if (chan->desc_submitcount < chan->num_frms) in xilinx_vdma_start_transfer()
1429 i = chan->desc_submitcount; in xilinx_vdma_start_transfer()
1431 list_for_each_entry(segment, &desc->segments, node) { in xilinx_vdma_start_transfer()
1432 if (chan->ext_addr) in xilinx_vdma_start_transfer()
1435 segment->hw.buf_addr, in xilinx_vdma_start_transfer()
1436 segment->hw.buf_addr_msb); in xilinx_vdma_start_transfer()
1440 segment->hw.buf_addr); in xilinx_vdma_start_transfer()
1449 vdma_desc_write(chan, XILINX_DMA_REG_HSIZE, last->hw.hsize); in xilinx_vdma_start_transfer()
1451 last->hw.stride); in xilinx_vdma_start_transfer()
1452 vdma_desc_write(chan, XILINX_DMA_REG_VSIZE, last->hw.vsize); in xilinx_vdma_start_transfer()
1454 chan->desc_submitcount++; in xilinx_vdma_start_transfer()
1455 chan->desc_pendingcount--; in xilinx_vdma_start_transfer()
1456 list_move_tail(&desc->node, &chan->active_list); in xilinx_vdma_start_transfer()
1457 if (chan->desc_submitcount == chan->num_frms) in xilinx_vdma_start_transfer()
1458 chan->desc_submitcount = 0; in xilinx_vdma_start_transfer()
1460 chan->idle = false; in xilinx_vdma_start_transfer()
1464 * xilinx_cdma_start_transfer - Starts cdma transfer
1473 if (chan->err) in xilinx_cdma_start_transfer()
1476 if (!chan->idle) in xilinx_cdma_start_transfer()
1479 if (list_empty(&chan->pending_list)) in xilinx_cdma_start_transfer()
1482 head_desc = list_first_entry(&chan->pending_list, in xilinx_cdma_start_transfer()
1484 tail_desc = list_last_entry(&chan->pending_list, in xilinx_cdma_start_transfer()
1486 tail_segment = list_last_entry(&tail_desc->segments, in xilinx_cdma_start_transfer()
1489 if (chan->desc_pendingcount <= XILINX_DMA_COALESCE_MAX) { in xilinx_cdma_start_transfer()
1491 ctrl_reg |= chan->desc_pendingcount << in xilinx_cdma_start_transfer()
1496 if (chan->has_sg) { in xilinx_cdma_start_transfer()
1504 head_desc->async_tx.phys); in xilinx_cdma_start_transfer()
1508 tail_segment->phys); in xilinx_cdma_start_transfer()
1514 segment = list_first_entry(&head_desc->segments, in xilinx_cdma_start_transfer()
1518 hw = &segment->hw; in xilinx_cdma_start_transfer()
1521 xilinx_prep_dma_addr_t(hw->src_addr)); in xilinx_cdma_start_transfer()
1523 xilinx_prep_dma_addr_t(hw->dest_addr)); in xilinx_cdma_start_transfer()
1527 hw->control & chan->xdev->max_buffer_len); in xilinx_cdma_start_transfer()
1530 list_splice_tail_init(&chan->pending_list, &chan->active_list); in xilinx_cdma_start_transfer()
1531 chan->desc_pendingcount = 0; in xilinx_cdma_start_transfer()
1532 chan->idle = false; in xilinx_cdma_start_transfer()
1536 * xilinx_dma_start_transfer - Starts DMA transfer
1545 if (chan->err) in xilinx_dma_start_transfer()
1548 if (list_empty(&chan->pending_list)) in xilinx_dma_start_transfer()
1551 if (!chan->idle) in xilinx_dma_start_transfer()
1554 head_desc = list_first_entry(&chan->pending_list, in xilinx_dma_start_transfer()
1556 tail_desc = list_last_entry(&chan->pending_list, in xilinx_dma_start_transfer()
1558 tail_segment = list_last_entry(&tail_desc->segments, in xilinx_dma_start_transfer()
1563 if (chan->desc_pendingcount <= XILINX_DMA_COALESCE_MAX) { in xilinx_dma_start_transfer()
1565 reg |= chan->desc_pendingcount << in xilinx_dma_start_transfer()
1570 if (chan->has_sg) in xilinx_dma_start_transfer()
1572 head_desc->async_tx.phys); in xilinx_dma_start_transfer()
1574 reg |= chan->irq_delay << XILINX_DMA_CR_DELAY_SHIFT; in xilinx_dma_start_transfer()
1579 if (chan->err) in xilinx_dma_start_transfer()
1583 if (chan->has_sg) { in xilinx_dma_start_transfer()
1584 if (chan->cyclic) in xilinx_dma_start_transfer()
1586 chan->cyclic_seg_v->phys); in xilinx_dma_start_transfer()
1589 tail_segment->phys); in xilinx_dma_start_transfer()
1594 segment = list_first_entry(&head_desc->segments, in xilinx_dma_start_transfer()
1597 hw = &segment->hw; in xilinx_dma_start_transfer()
1600 xilinx_prep_dma_addr_t(hw->buf_addr)); in xilinx_dma_start_transfer()
1604 hw->control & chan->xdev->max_buffer_len); in xilinx_dma_start_transfer()
1607 list_splice_tail_init(&chan->pending_list, &chan->active_list); in xilinx_dma_start_transfer()
1608 chan->desc_pendingcount = 0; in xilinx_dma_start_transfer()
1609 chan->idle = false; in xilinx_dma_start_transfer()
1613 * xilinx_mcdma_start_transfer - Starts MCDMA transfer
1627 if (chan->err) in xilinx_mcdma_start_transfer()
1630 if (!chan->idle) in xilinx_mcdma_start_transfer()
1633 if (list_empty(&chan->pending_list)) in xilinx_mcdma_start_transfer()
1636 head_desc = list_first_entry(&chan->pending_list, in xilinx_mcdma_start_transfer()
1638 tail_desc = list_last_entry(&chan->pending_list, in xilinx_mcdma_start_transfer()
1640 tail_segment = list_last_entry(&tail_desc->segments, in xilinx_mcdma_start_transfer()
1643 reg = dma_ctrl_read(chan, XILINX_MCDMA_CHAN_CR_OFFSET(chan->tdest)); in xilinx_mcdma_start_transfer()
1645 if (chan->desc_pendingcount <= XILINX_MCDMA_COALESCE_MAX) { in xilinx_mcdma_start_transfer()
1647 reg |= chan->desc_pendingcount << in xilinx_mcdma_start_transfer()
1652 dma_ctrl_write(chan, XILINX_MCDMA_CHAN_CR_OFFSET(chan->tdest), reg); in xilinx_mcdma_start_transfer()
1655 xilinx_write(chan, XILINX_MCDMA_CHAN_CDESC_OFFSET(chan->tdest), in xilinx_mcdma_start_transfer()
1656 head_desc->async_tx.phys); in xilinx_mcdma_start_transfer()
1660 reg |= BIT(chan->tdest); in xilinx_mcdma_start_transfer()
1664 reg = dma_ctrl_read(chan, XILINX_MCDMA_CHAN_CR_OFFSET(chan->tdest)); in xilinx_mcdma_start_transfer()
1666 dma_ctrl_write(chan, XILINX_MCDMA_CHAN_CR_OFFSET(chan->tdest), reg); in xilinx_mcdma_start_transfer()
1670 if (chan->err) in xilinx_mcdma_start_transfer()
1674 xilinx_write(chan, XILINX_MCDMA_CHAN_TDESC_OFFSET(chan->tdest), in xilinx_mcdma_start_transfer()
1675 tail_segment->phys); in xilinx_mcdma_start_transfer()
1677 list_splice_tail_init(&chan->pending_list, &chan->active_list); in xilinx_mcdma_start_transfer()
1678 chan->desc_pendingcount = 0; in xilinx_mcdma_start_transfer()
1679 chan->idle = false; in xilinx_mcdma_start_transfer()
1683 * xilinx_dma_issue_pending - Issue pending transactions
1691 spin_lock_irqsave(&chan->lock, flags); in xilinx_dma_issue_pending()
1692 chan->start_transfer(chan); in xilinx_dma_issue_pending()
1693 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dma_issue_pending()
1697 * xilinx_dma_device_config - Configure the DMA channel
1699 * @config: channel configuration
1704 struct dma_slave_config *config) in xilinx_dma_device_config() argument
1710 * xilinx_dma_complete_descriptor - Mark the active descriptor as complete
1720 if (list_empty(&chan->active_list)) in xilinx_dma_complete_descriptor()
1723 list_for_each_entry_safe(desc, next, &chan->active_list, node) { in xilinx_dma_complete_descriptor()
1724 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) { in xilinx_dma_complete_descriptor()
1727 seg = list_last_entry(&desc->segments, in xilinx_dma_complete_descriptor()
1729 if (!(seg->hw.status & XILINX_DMA_BD_COMP_MASK) && chan->has_sg) in xilinx_dma_complete_descriptor()
1732 if (chan->has_sg && chan->xdev->dma_config->dmatype != in xilinx_dma_complete_descriptor()
1734 desc->residue = xilinx_dma_get_residue(chan, desc); in xilinx_dma_complete_descriptor()
1736 desc->residue = 0; in xilinx_dma_complete_descriptor()
1737 desc->err = chan->err; in xilinx_dma_complete_descriptor()
1739 list_del(&desc->node); in xilinx_dma_complete_descriptor()
1740 if (!desc->cyclic) in xilinx_dma_complete_descriptor()
1741 dma_cookie_complete(&desc->async_tx); in xilinx_dma_complete_descriptor()
1742 list_add_tail(&desc->node, &chan->done_list); in xilinx_dma_complete_descriptor()
1747 * xilinx_dma_reset - Reset DMA channel
1765 dev_err(chan->dev, "reset timeout, cr %x, sr %x\n", in xilinx_dma_reset()
1768 return -ETIMEDOUT; in xilinx_dma_reset()
1771 chan->err = false; in xilinx_dma_reset()
1772 chan->idle = true; in xilinx_dma_reset()
1773 chan->desc_pendingcount = 0; in xilinx_dma_reset()
1774 chan->desc_submitcount = 0; in xilinx_dma_reset()
1780 * xilinx_dma_chan_reset - Reset DMA channel and enable interrupts
1802 * xilinx_mcdma_irq_handler - MCDMA Interrupt handler
1813 if (chan->direction == DMA_DEV_TO_MEM) in xilinx_mcdma_irq_handler()
1825 if (chan->direction == DMA_DEV_TO_MEM) in xilinx_mcdma_irq_handler()
1826 chan_offset = chan->xdev->dma_config->max_channels / 2; in xilinx_mcdma_irq_handler()
1828 chan_offset = chan_offset + (chan_id - 1); in xilinx_mcdma_irq_handler()
1829 chan = chan->xdev->chan[chan_offset]; in xilinx_mcdma_irq_handler()
1831 status = dma_ctrl_read(chan, XILINX_MCDMA_CHAN_SR_OFFSET(chan->tdest)); in xilinx_mcdma_irq_handler()
1835 dma_ctrl_write(chan, XILINX_MCDMA_CHAN_SR_OFFSET(chan->tdest), in xilinx_mcdma_irq_handler()
1839 dev_err(chan->dev, "Channel %p has errors %x cdr %x tdr %x\n", in xilinx_mcdma_irq_handler()
1843 (chan->tdest)), in xilinx_mcdma_irq_handler()
1845 (chan->tdest))); in xilinx_mcdma_irq_handler()
1846 chan->err = true; in xilinx_mcdma_irq_handler()
1854 dev_dbg(chan->dev, "Inter-packet latency too long\n"); in xilinx_mcdma_irq_handler()
1858 spin_lock(&chan->lock); in xilinx_mcdma_irq_handler()
1860 chan->idle = true; in xilinx_mcdma_irq_handler()
1861 chan->start_transfer(chan); in xilinx_mcdma_irq_handler()
1862 spin_unlock(&chan->lock); in xilinx_mcdma_irq_handler()
1865 tasklet_hi_schedule(&chan->tasklet); in xilinx_mcdma_irq_handler()
1870 * xilinx_dma_irq_handler - DMA Interrupt handler
1902 if (!chan->flush_on_fsync || in xilinx_dma_irq_handler()
1904 dev_err(chan->dev, in xilinx_dma_irq_handler()
1909 chan->err = true; in xilinx_dma_irq_handler()
1915 spin_lock(&chan->lock); in xilinx_dma_irq_handler()
1917 chan->idle = true; in xilinx_dma_irq_handler()
1918 chan->start_transfer(chan); in xilinx_dma_irq_handler()
1919 spin_unlock(&chan->lock); in xilinx_dma_irq_handler()
1922 tasklet_schedule(&chan->tasklet); in xilinx_dma_irq_handler()
1927 * append_desc_queue - Queuing descriptor
1940 if (list_empty(&chan->pending_list)) in append_desc_queue()
1947 tail_desc = list_last_entry(&chan->pending_list, in append_desc_queue()
1949 if (chan->xdev->dma_config->dmatype == XDMA_TYPE_VDMA) { in append_desc_queue()
1950 tail_segment = list_last_entry(&tail_desc->segments, in append_desc_queue()
1953 tail_segment->hw.next_desc = (u32)desc->async_tx.phys; in append_desc_queue()
1954 } else if (chan->xdev->dma_config->dmatype == XDMA_TYPE_CDMA) { in append_desc_queue()
1955 cdma_tail_segment = list_last_entry(&tail_desc->segments, in append_desc_queue()
1958 cdma_tail_segment->hw.next_desc = (u32)desc->async_tx.phys; in append_desc_queue()
1959 } else if (chan->xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) { in append_desc_queue()
1960 axidma_tail_segment = list_last_entry(&tail_desc->segments, in append_desc_queue()
1963 axidma_tail_segment->hw.next_desc = (u32)desc->async_tx.phys; in append_desc_queue()
1966 list_last_entry(&tail_desc->segments, in append_desc_queue()
1969 aximcdma_tail_segment->hw.next_desc = (u32)desc->async_tx.phys; in append_desc_queue()
1977 list_add_tail(&desc->node, &chan->pending_list); in append_desc_queue()
1978 chan->desc_pendingcount++; in append_desc_queue()
1980 if (chan->has_sg && (chan->xdev->dma_config->dmatype == XDMA_TYPE_VDMA) in append_desc_queue()
1981 && unlikely(chan->desc_pendingcount > chan->num_frms)) { in append_desc_queue()
1982 dev_dbg(chan->dev, "desc pendingcount is too high\n"); in append_desc_queue()
1983 chan->desc_pendingcount = chan->num_frms; in append_desc_queue()
1988 * xilinx_dma_tx_submit - Submit DMA transaction
1996 struct xilinx_dma_chan *chan = to_xilinx_chan(tx->chan); in xilinx_dma_tx_submit()
2001 if (chan->cyclic) { in xilinx_dma_tx_submit()
2003 return -EBUSY; in xilinx_dma_tx_submit()
2006 if (chan->err) { in xilinx_dma_tx_submit()
2016 spin_lock_irqsave(&chan->lock, flags); in xilinx_dma_tx_submit()
2023 if (desc->cyclic) in xilinx_dma_tx_submit()
2024 chan->cyclic = true; in xilinx_dma_tx_submit()
2026 chan->terminating = false; in xilinx_dma_tx_submit()
2028 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dma_tx_submit()
2034 * xilinx_vdma_dma_prep_interleaved - prepare a descriptor for a
2052 if (!is_slave_direction(xt->dir)) in xilinx_vdma_dma_prep_interleaved()
2055 if (!xt->numf || !xt->sgl[0].size) in xilinx_vdma_dma_prep_interleaved()
2058 if (xt->numf & ~XILINX_DMA_VSIZE_MASK || in xilinx_vdma_dma_prep_interleaved()
2059 xt->sgl[0].size & ~XILINX_DMA_HSIZE_MASK) in xilinx_vdma_dma_prep_interleaved()
2062 if (xt->frame_size != 1) in xilinx_vdma_dma_prep_interleaved()
2070 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common); in xilinx_vdma_dma_prep_interleaved()
2071 desc->async_tx.tx_submit = xilinx_dma_tx_submit; in xilinx_vdma_dma_prep_interleaved()
2072 async_tx_ack(&desc->async_tx); in xilinx_vdma_dma_prep_interleaved()
2080 hw = &segment->hw; in xilinx_vdma_dma_prep_interleaved()
2081 hw->vsize = xt->numf; in xilinx_vdma_dma_prep_interleaved()
2082 hw->hsize = xt->sgl[0].size; in xilinx_vdma_dma_prep_interleaved()
2083 hw->stride = (xt->sgl[0].icg + xt->sgl[0].size) << in xilinx_vdma_dma_prep_interleaved()
2085 hw->stride |= chan->config.frm_dly << in xilinx_vdma_dma_prep_interleaved()
2088 if (xt->dir != DMA_MEM_TO_DEV) { in xilinx_vdma_dma_prep_interleaved()
2089 if (chan->ext_addr) { in xilinx_vdma_dma_prep_interleaved()
2090 hw->buf_addr = lower_32_bits(xt->dst_start); in xilinx_vdma_dma_prep_interleaved()
2091 hw->buf_addr_msb = upper_32_bits(xt->dst_start); in xilinx_vdma_dma_prep_interleaved()
2093 hw->buf_addr = xt->dst_start; in xilinx_vdma_dma_prep_interleaved()
2096 if (chan->ext_addr) { in xilinx_vdma_dma_prep_interleaved()
2097 hw->buf_addr = lower_32_bits(xt->src_start); in xilinx_vdma_dma_prep_interleaved()
2098 hw->buf_addr_msb = upper_32_bits(xt->src_start); in xilinx_vdma_dma_prep_interleaved()
2100 hw->buf_addr = xt->src_start; in xilinx_vdma_dma_prep_interleaved()
2105 list_add_tail(&segment->node, &desc->segments); in xilinx_vdma_dma_prep_interleaved()
2108 segment = list_first_entry(&desc->segments, in xilinx_vdma_dma_prep_interleaved()
2110 desc->async_tx.phys = segment->phys; in xilinx_vdma_dma_prep_interleaved()
2112 return &desc->async_tx; in xilinx_vdma_dma_prep_interleaved()
2120 * xilinx_cdma_prep_memcpy - prepare descriptors for a memcpy transaction
2138 if (!len || len > chan->xdev->max_buffer_len) in xilinx_cdma_prep_memcpy()
2145 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common); in xilinx_cdma_prep_memcpy()
2146 desc->async_tx.tx_submit = xilinx_dma_tx_submit; in xilinx_cdma_prep_memcpy()
2153 hw = &segment->hw; in xilinx_cdma_prep_memcpy()
2154 hw->control = len; in xilinx_cdma_prep_memcpy()
2155 hw->src_addr = dma_src; in xilinx_cdma_prep_memcpy()
2156 hw->dest_addr = dma_dst; in xilinx_cdma_prep_memcpy()
2157 if (chan->ext_addr) { in xilinx_cdma_prep_memcpy()
2158 hw->src_addr_msb = upper_32_bits(dma_src); in xilinx_cdma_prep_memcpy()
2159 hw->dest_addr_msb = upper_32_bits(dma_dst); in xilinx_cdma_prep_memcpy()
2163 list_add_tail(&segment->node, &desc->segments); in xilinx_cdma_prep_memcpy()
2165 desc->async_tx.phys = segment->phys; in xilinx_cdma_prep_memcpy()
2166 hw->next_desc = segment->phys; in xilinx_cdma_prep_memcpy()
2168 return &desc->async_tx; in xilinx_cdma_prep_memcpy()
2176 * xilinx_dma_prep_slave_sg - prepare descriptors for a DMA_SLAVE transaction
2208 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common); in xilinx_dma_prep_slave_sg()
2209 desc->async_tx.tx_submit = xilinx_dma_tx_submit; in xilinx_dma_prep_slave_sg()
2230 hw = &segment->hw; in xilinx_dma_prep_slave_sg()
2236 hw->control = copy; in xilinx_dma_prep_slave_sg()
2238 if (chan->direction == DMA_MEM_TO_DEV) { in xilinx_dma_prep_slave_sg()
2240 memcpy(hw->app, app_w, sizeof(u32) * in xilinx_dma_prep_slave_sg()
2250 list_add_tail(&segment->node, &desc->segments); in xilinx_dma_prep_slave_sg()
2254 segment = list_first_entry(&desc->segments, in xilinx_dma_prep_slave_sg()
2256 desc->async_tx.phys = segment->phys; in xilinx_dma_prep_slave_sg()
2259 if (chan->direction == DMA_MEM_TO_DEV) { in xilinx_dma_prep_slave_sg()
2260 segment->hw.control |= XILINX_DMA_BD_SOP; in xilinx_dma_prep_slave_sg()
2261 segment = list_last_entry(&desc->segments, in xilinx_dma_prep_slave_sg()
2264 segment->hw.control |= XILINX_DMA_BD_EOP; in xilinx_dma_prep_slave_sg()
2267 if (chan->xdev->has_axistream_connected) in xilinx_dma_prep_slave_sg()
2268 desc->async_tx.metadata_ops = &xilinx_dma_metadata_ops; in xilinx_dma_prep_slave_sg()
2270 return &desc->async_tx; in xilinx_dma_prep_slave_sg()
2278 * xilinx_dma_prep_dma_cyclic - prepare descriptors for a DMA_SLAVE transaction
2317 chan->direction = direction; in xilinx_dma_prep_dma_cyclic()
2318 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common); in xilinx_dma_prep_dma_cyclic()
2319 desc->async_tx.tx_submit = xilinx_dma_tx_submit; in xilinx_dma_prep_dma_cyclic()
2338 hw = &segment->hw; in xilinx_dma_prep_dma_cyclic()
2341 hw->control = copy; in xilinx_dma_prep_dma_cyclic()
2344 prev->hw.next_desc = segment->phys; in xilinx_dma_prep_dma_cyclic()
2353 list_add_tail(&segment->node, &desc->segments); in xilinx_dma_prep_dma_cyclic()
2357 head_segment = list_first_entry(&desc->segments, in xilinx_dma_prep_dma_cyclic()
2359 desc->async_tx.phys = head_segment->phys; in xilinx_dma_prep_dma_cyclic()
2361 desc->cyclic = true; in xilinx_dma_prep_dma_cyclic()
2366 segment = list_last_entry(&desc->segments, in xilinx_dma_prep_dma_cyclic()
2369 segment->hw.next_desc = (u32) head_segment->phys; in xilinx_dma_prep_dma_cyclic()
2373 head_segment->hw.control |= XILINX_DMA_BD_SOP; in xilinx_dma_prep_dma_cyclic()
2374 segment->hw.control |= XILINX_DMA_BD_EOP; in xilinx_dma_prep_dma_cyclic()
2377 return &desc->async_tx; in xilinx_dma_prep_dma_cyclic()
2385 * xilinx_mcdma_prep_slave_sg - prepare descriptors for a DMA_SLAVE transaction
2418 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common); in xilinx_mcdma_prep_slave_sg()
2419 desc->async_tx.tx_submit = xilinx_dma_tx_submit; in xilinx_mcdma_prep_slave_sg()
2438 copy = min_t(size_t, sg_dma_len(sg) - sg_used, in xilinx_mcdma_prep_slave_sg()
2439 chan->xdev->max_buffer_len); in xilinx_mcdma_prep_slave_sg()
2440 hw = &segment->hw; in xilinx_mcdma_prep_slave_sg()
2445 hw->control = copy; in xilinx_mcdma_prep_slave_sg()
2447 if (chan->direction == DMA_MEM_TO_DEV && app_w) { in xilinx_mcdma_prep_slave_sg()
2448 memcpy(hw->app, app_w, sizeof(u32) * in xilinx_mcdma_prep_slave_sg()
2457 list_add_tail(&segment->node, &desc->segments); in xilinx_mcdma_prep_slave_sg()
2461 segment = list_first_entry(&desc->segments, in xilinx_mcdma_prep_slave_sg()
2463 desc->async_tx.phys = segment->phys; in xilinx_mcdma_prep_slave_sg()
2466 if (chan->direction == DMA_MEM_TO_DEV) { in xilinx_mcdma_prep_slave_sg()
2467 segment->hw.control |= XILINX_MCDMA_BD_SOP; in xilinx_mcdma_prep_slave_sg()
2468 segment = list_last_entry(&desc->segments, in xilinx_mcdma_prep_slave_sg()
2471 segment->hw.control |= XILINX_MCDMA_BD_EOP; in xilinx_mcdma_prep_slave_sg()
2474 return &desc->async_tx; in xilinx_mcdma_prep_slave_sg()
2483 * xilinx_dma_terminate_all - Halt the channel and free descriptors
2494 if (!chan->cyclic) { in xilinx_dma_terminate_all()
2495 err = chan->stop_transfer(chan); in xilinx_dma_terminate_all()
2497 dev_err(chan->dev, "Cannot stop channel %p: %x\n", in xilinx_dma_terminate_all()
2500 chan->err = true; in xilinx_dma_terminate_all()
2506 chan->terminating = true; in xilinx_dma_terminate_all()
2508 chan->idle = true; in xilinx_dma_terminate_all()
2510 if (chan->cyclic) { in xilinx_dma_terminate_all()
2514 chan->cyclic = false; in xilinx_dma_terminate_all()
2517 if ((chan->xdev->dma_config->dmatype == XDMA_TYPE_CDMA) && chan->has_sg) in xilinx_dma_terminate_all()
2528 tasklet_kill(&chan->tasklet); in xilinx_dma_synchronize()
2532 * xilinx_vdma_channel_set_config - Configure VDMA channel
2533 * Run-time configuration for Axi VDMA, supports:
2535 * . configure interrupt coalescing and inter-packet delay threshold
2550 if (cfg->reset) in xilinx_vdma_channel_set_config()
2555 chan->config.frm_dly = cfg->frm_dly; in xilinx_vdma_channel_set_config()
2556 chan->config.park = cfg->park; in xilinx_vdma_channel_set_config()
2559 chan->config.gen_lock = cfg->gen_lock; in xilinx_vdma_channel_set_config()
2560 chan->config.master = cfg->master; in xilinx_vdma_channel_set_config()
2563 if (cfg->gen_lock && chan->genlock) { in xilinx_vdma_channel_set_config()
2566 dmacr |= cfg->master << XILINX_DMA_DMACR_MASTER_SHIFT; in xilinx_vdma_channel_set_config()
2569 chan->config.frm_cnt_en = cfg->frm_cnt_en; in xilinx_vdma_channel_set_config()
2570 chan->config.vflip_en = cfg->vflip_en; in xilinx_vdma_channel_set_config()
2572 if (cfg->park) in xilinx_vdma_channel_set_config()
2573 chan->config.park_frm = cfg->park_frm; in xilinx_vdma_channel_set_config()
2575 chan->config.park_frm = -1; in xilinx_vdma_channel_set_config()
2577 chan->config.coalesc = cfg->coalesc; in xilinx_vdma_channel_set_config()
2578 chan->config.delay = cfg->delay; in xilinx_vdma_channel_set_config()
2580 if (cfg->coalesc <= XILINX_DMA_DMACR_FRAME_COUNT_MAX) { in xilinx_vdma_channel_set_config()
2582 dmacr |= cfg->coalesc << XILINX_DMA_DMACR_FRAME_COUNT_SHIFT; in xilinx_vdma_channel_set_config()
2583 chan->config.coalesc = cfg->coalesc; in xilinx_vdma_channel_set_config()
2586 if (cfg->delay <= XILINX_DMA_DMACR_DELAY_MAX) { in xilinx_vdma_channel_set_config()
2588 dmacr |= cfg->delay << XILINX_DMA_DMACR_DELAY_SHIFT; in xilinx_vdma_channel_set_config()
2589 chan->config.delay = cfg->delay; in xilinx_vdma_channel_set_config()
2594 dmacr |= cfg->ext_fsync << XILINX_DMA_DMACR_FSYNCSRC_SHIFT; in xilinx_vdma_channel_set_config()
2602 /* -----------------------------------------------------------------------------
2607 * xilinx_dma_chan_remove - Per Channel remove function
2616 if (chan->irq > 0) in xilinx_dma_chan_remove()
2617 free_irq(chan->irq, chan); in xilinx_dma_chan_remove()
2619 tasklet_kill(&chan->tasklet); in xilinx_dma_chan_remove()
2621 list_del(&chan->common.device_node); in xilinx_dma_chan_remove()
2632 *axi_clk = devm_clk_get(&pdev->dev, "s_axi_lite_aclk"); in axidma_clk_init()
2634 return dev_err_probe(&pdev->dev, PTR_ERR(*axi_clk), "failed to get axi_aclk\n"); in axidma_clk_init()
2636 *tx_clk = devm_clk_get(&pdev->dev, "m_axi_mm2s_aclk"); in axidma_clk_init()
2640 *rx_clk = devm_clk_get(&pdev->dev, "m_axi_s2mm_aclk"); in axidma_clk_init()
2644 *sg_clk = devm_clk_get(&pdev->dev, "m_axi_sg_aclk"); in axidma_clk_init()
2650 dev_err(&pdev->dev, "failed to enable axi_clk (%d)\n", err); in axidma_clk_init()
2656 dev_err(&pdev->dev, "failed to enable tx_clk (%d)\n", err); in axidma_clk_init()
2662 dev_err(&pdev->dev, "failed to enable rx_clk (%d)\n", err); in axidma_clk_init()
2668 dev_err(&pdev->dev, "failed to enable sg_clk (%d)\n", err); in axidma_clk_init()
2694 *axi_clk = devm_clk_get(&pdev->dev, "s_axi_lite_aclk"); in axicdma_clk_init()
2696 return dev_err_probe(&pdev->dev, PTR_ERR(*axi_clk), "failed to get axi_aclk\n"); in axicdma_clk_init()
2698 *dev_clk = devm_clk_get(&pdev->dev, "m_axi_aclk"); in axicdma_clk_init()
2700 return dev_err_probe(&pdev->dev, PTR_ERR(*dev_clk), "failed to get dev_clk\n"); in axicdma_clk_init()
2704 dev_err(&pdev->dev, "failed to enable axi_clk (%d)\n", err); in axicdma_clk_init()
2710 dev_err(&pdev->dev, "failed to enable dev_clk (%d)\n", err); in axicdma_clk_init()
2728 *axi_clk = devm_clk_get(&pdev->dev, "s_axi_lite_aclk"); in axivdma_clk_init()
2730 return dev_err_probe(&pdev->dev, PTR_ERR(*axi_clk), "failed to get axi_aclk\n"); in axivdma_clk_init()
2732 *tx_clk = devm_clk_get(&pdev->dev, "m_axi_mm2s_aclk"); in axivdma_clk_init()
2736 *txs_clk = devm_clk_get(&pdev->dev, "m_axis_mm2s_aclk"); in axivdma_clk_init()
2740 *rx_clk = devm_clk_get(&pdev->dev, "m_axi_s2mm_aclk"); in axivdma_clk_init()
2744 *rxs_clk = devm_clk_get(&pdev->dev, "s_axis_s2mm_aclk"); in axivdma_clk_init()
2750 dev_err(&pdev->dev, "failed to enable axi_clk (%d)\n", in axivdma_clk_init()
2757 dev_err(&pdev->dev, "failed to enable tx_clk (%d)\n", err); in axivdma_clk_init()
2763 dev_err(&pdev->dev, "failed to enable txs_clk (%d)\n", err); in axivdma_clk_init()
2769 dev_err(&pdev->dev, "failed to enable rx_clk (%d)\n", err); in axivdma_clk_init()
2775 dev_err(&pdev->dev, "failed to enable rxs_clk (%d)\n", err); in axivdma_clk_init()
2795 clk_disable_unprepare(xdev->rxs_clk); in xdma_disable_allclks()
2796 clk_disable_unprepare(xdev->rx_clk); in xdma_disable_allclks()
2797 clk_disable_unprepare(xdev->txs_clk); in xdma_disable_allclks()
2798 clk_disable_unprepare(xdev->tx_clk); in xdma_disable_allclks()
2799 clk_disable_unprepare(xdev->axi_clk); in xdma_disable_allclks()
2803 * xilinx_dma_chan_probe - Per Channel Probing
2821 chan = devm_kzalloc(xdev->dev, sizeof(*chan), GFP_KERNEL); in xilinx_dma_chan_probe()
2823 return -ENOMEM; in xilinx_dma_chan_probe()
2825 chan->dev = xdev->dev; in xilinx_dma_chan_probe()
2826 chan->xdev = xdev; in xilinx_dma_chan_probe()
2827 chan->desc_pendingcount = 0x0; in xilinx_dma_chan_probe()
2828 chan->ext_addr = xdev->ext_addr; in xilinx_dma_chan_probe()
2834 chan->idle = true; in xilinx_dma_chan_probe()
2836 spin_lock_init(&chan->lock); in xilinx_dma_chan_probe()
2837 INIT_LIST_HEAD(&chan->pending_list); in xilinx_dma_chan_probe()
2838 INIT_LIST_HEAD(&chan->done_list); in xilinx_dma_chan_probe()
2839 INIT_LIST_HEAD(&chan->active_list); in xilinx_dma_chan_probe()
2840 INIT_LIST_HEAD(&chan->free_seg_list); in xilinx_dma_chan_probe()
2843 has_dre = of_property_read_bool(node, "xlnx,include-dre"); in xilinx_dma_chan_probe()
2845 of_property_read_u8(node, "xlnx,irq-delay", &chan->irq_delay); in xilinx_dma_chan_probe()
2847 chan->genlock = of_property_read_bool(node, "xlnx,genlock-mode"); in xilinx_dma_chan_probe()
2851 dev_err(xdev->dev, "missing xlnx,datawidth property\n"); in xilinx_dma_chan_probe()
2861 xdev->common.copy_align = (enum dmaengine_alignment)fls(width - 1); in xilinx_dma_chan_probe()
2863 if (of_device_is_compatible(node, "xlnx,axi-vdma-mm2s-channel") || in xilinx_dma_chan_probe()
2864 of_device_is_compatible(node, "xlnx,axi-dma-mm2s-channel") || in xilinx_dma_chan_probe()
2865 of_device_is_compatible(node, "xlnx,axi-cdma-channel")) { in xilinx_dma_chan_probe()
2866 chan->direction = DMA_MEM_TO_DEV; in xilinx_dma_chan_probe()
2867 chan->id = xdev->mm2s_chan_id++; in xilinx_dma_chan_probe()
2868 chan->tdest = chan->id; in xilinx_dma_chan_probe()
2870 chan->ctrl_offset = XILINX_DMA_MM2S_CTRL_OFFSET; in xilinx_dma_chan_probe()
2871 if (xdev->dma_config->dmatype == XDMA_TYPE_VDMA) { in xilinx_dma_chan_probe()
2872 chan->desc_offset = XILINX_VDMA_MM2S_DESC_OFFSET; in xilinx_dma_chan_probe()
2873 chan->config.park = 1; in xilinx_dma_chan_probe()
2875 if (xdev->flush_on_fsync == XILINX_DMA_FLUSH_BOTH || in xilinx_dma_chan_probe()
2876 xdev->flush_on_fsync == XILINX_DMA_FLUSH_MM2S) in xilinx_dma_chan_probe()
2877 chan->flush_on_fsync = true; in xilinx_dma_chan_probe()
2880 "xlnx,axi-vdma-s2mm-channel") || in xilinx_dma_chan_probe()
2882 "xlnx,axi-dma-s2mm-channel")) { in xilinx_dma_chan_probe()
2883 chan->direction = DMA_DEV_TO_MEM; in xilinx_dma_chan_probe()
2884 chan->id = xdev->s2mm_chan_id++; in xilinx_dma_chan_probe()
2885 chan->tdest = chan->id - xdev->dma_config->max_channels / 2; in xilinx_dma_chan_probe()
2886 chan->has_vflip = of_property_read_bool(node, in xilinx_dma_chan_probe()
2887 "xlnx,enable-vert-flip"); in xilinx_dma_chan_probe()
2888 if (chan->has_vflip) { in xilinx_dma_chan_probe()
2889 chan->config.vflip_en = dma_read(chan, in xilinx_dma_chan_probe()
2894 if (xdev->dma_config->dmatype == XDMA_TYPE_AXIMCDMA) in xilinx_dma_chan_probe()
2895 chan->ctrl_offset = XILINX_MCDMA_S2MM_CTRL_OFFSET; in xilinx_dma_chan_probe()
2897 chan->ctrl_offset = XILINX_DMA_S2MM_CTRL_OFFSET; in xilinx_dma_chan_probe()
2899 if (xdev->dma_config->dmatype == XDMA_TYPE_VDMA) { in xilinx_dma_chan_probe()
2900 chan->desc_offset = XILINX_VDMA_S2MM_DESC_OFFSET; in xilinx_dma_chan_probe()
2901 chan->config.park = 1; in xilinx_dma_chan_probe()
2903 if (xdev->flush_on_fsync == XILINX_DMA_FLUSH_BOTH || in xilinx_dma_chan_probe()
2904 xdev->flush_on_fsync == XILINX_DMA_FLUSH_S2MM) in xilinx_dma_chan_probe()
2905 chan->flush_on_fsync = true; in xilinx_dma_chan_probe()
2908 dev_err(xdev->dev, "Invalid channel compatible node\n"); in xilinx_dma_chan_probe()
2909 return -EINVAL; in xilinx_dma_chan_probe()
2913 chan->irq = of_irq_get(node, chan->tdest); in xilinx_dma_chan_probe()
2914 if (chan->irq < 0) in xilinx_dma_chan_probe()
2915 return dev_err_probe(xdev->dev, chan->irq, "failed to get irq\n"); in xilinx_dma_chan_probe()
2916 err = request_irq(chan->irq, xdev->dma_config->irq_handler, in xilinx_dma_chan_probe()
2917 IRQF_SHARED, "xilinx-dma-controller", chan); in xilinx_dma_chan_probe()
2919 dev_err(xdev->dev, "unable to request IRQ %d\n", chan->irq); in xilinx_dma_chan_probe()
2923 if (xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) { in xilinx_dma_chan_probe()
2924 chan->start_transfer = xilinx_dma_start_transfer; in xilinx_dma_chan_probe()
2925 chan->stop_transfer = xilinx_dma_stop_transfer; in xilinx_dma_chan_probe()
2926 } else if (xdev->dma_config->dmatype == XDMA_TYPE_AXIMCDMA) { in xilinx_dma_chan_probe()
2927 chan->start_transfer = xilinx_mcdma_start_transfer; in xilinx_dma_chan_probe()
2928 chan->stop_transfer = xilinx_dma_stop_transfer; in xilinx_dma_chan_probe()
2929 } else if (xdev->dma_config->dmatype == XDMA_TYPE_CDMA) { in xilinx_dma_chan_probe()
2930 chan->start_transfer = xilinx_cdma_start_transfer; in xilinx_dma_chan_probe()
2931 chan->stop_transfer = xilinx_cdma_stop_transfer; in xilinx_dma_chan_probe()
2933 chan->start_transfer = xilinx_vdma_start_transfer; in xilinx_dma_chan_probe()
2934 chan->stop_transfer = xilinx_dma_stop_transfer; in xilinx_dma_chan_probe()
2938 if (xdev->dma_config->dmatype != XDMA_TYPE_VDMA) { in xilinx_dma_chan_probe()
2939 if (xdev->dma_config->dmatype == XDMA_TYPE_AXIMCDMA || in xilinx_dma_chan_probe()
2942 chan->has_sg = true; in xilinx_dma_chan_probe()
2943 dev_dbg(chan->dev, "ch %d: SG %s\n", chan->id, in xilinx_dma_chan_probe()
2944 str_enabled_disabled(chan->has_sg)); in xilinx_dma_chan_probe()
2948 tasklet_setup(&chan->tasklet, xilinx_dma_do_tasklet); in xilinx_dma_chan_probe()
2954 chan->common.device = &xdev->common; in xilinx_dma_chan_probe()
2956 list_add_tail(&chan->common.device_node, &xdev->common.channels); in xilinx_dma_chan_probe()
2957 xdev->chan[chan->id] = chan; in xilinx_dma_chan_probe()
2962 dev_err(xdev->dev, "Reset channel failed\n"); in xilinx_dma_chan_probe()
2970 * xilinx_dma_child_probe - Per child node probe
2971 * It get number of dma-channels per child node from
2972 * device-tree and initializes all the channels.
2985 ret = of_property_read_u32(node, "dma-channels", &nr_channels); in xilinx_dma_child_probe()
2986 if (xdev->dma_config->dmatype == XDMA_TYPE_AXIMCDMA && ret < 0) in xilinx_dma_child_probe()
2987 dev_warn(xdev->dev, "missing dma-channels property\n"); in xilinx_dma_child_probe()
2999 * of_dma_xilinx_xlate - Translation function
3008 struct xilinx_dma_device *xdev = ofdma->of_dma_data; in of_dma_xilinx_xlate()
3009 int chan_id = dma_spec->args[0]; in of_dma_xilinx_xlate()
3011 if (chan_id >= xdev->dma_config->max_channels || !xdev->chan[chan_id]) in of_dma_xilinx_xlate()
3014 return dma_get_slave_channel(&xdev->chan[chan_id]->common); in of_dma_xilinx_xlate()
3045 { .compatible = "xlnx,axi-dma-1.00.a", .data = &axidma_config },
3046 { .compatible = "xlnx,axi-cdma-1.00.a", .data = &axicdma_config },
3047 { .compatible = "xlnx,axi-vdma-1.00.a", .data = &axivdma_config },
3048 { .compatible = "xlnx,axi-mcdma-1.00.a", .data = &aximcdma_config },
3054 * xilinx_dma_probe - Driver probe function
3064 struct device_node *node = pdev->dev.of_node; in xilinx_dma_probe()
3066 struct device_node *child, *np = pdev->dev.of_node; in xilinx_dma_probe()
3071 xdev = devm_kzalloc(&pdev->dev, sizeof(*xdev), GFP_KERNEL); in xilinx_dma_probe()
3073 return -ENOMEM; in xilinx_dma_probe()
3075 xdev->dev = &pdev->dev; in xilinx_dma_probe()
3080 if (match && match->data) { in xilinx_dma_probe()
3081 xdev->dma_config = match->data; in xilinx_dma_probe()
3082 clk_init = xdev->dma_config->clk_init; in xilinx_dma_probe()
3086 err = clk_init(pdev, &xdev->axi_clk, &xdev->tx_clk, &xdev->txs_clk, in xilinx_dma_probe()
3087 &xdev->rx_clk, &xdev->rxs_clk); in xilinx_dma_probe()
3092 xdev->regs = devm_platform_ioremap_resource(pdev, 0); in xilinx_dma_probe()
3093 if (IS_ERR(xdev->regs)) { in xilinx_dma_probe()
3094 err = PTR_ERR(xdev->regs); in xilinx_dma_probe()
3098 xdev->max_buffer_len = GENMASK(XILINX_DMA_MAX_TRANS_LEN_MAX - 1, 0); in xilinx_dma_probe()
3099 xdev->s2mm_chan_id = xdev->dma_config->max_channels / 2; in xilinx_dma_probe()
3101 if (xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA || in xilinx_dma_probe()
3102 xdev->dma_config->dmatype == XDMA_TYPE_AXIMCDMA) { in xilinx_dma_probe()
3103 if (!of_property_read_u32(node, "xlnx,sg-length-width", in xilinx_dma_probe()
3107 dev_warn(xdev->dev, in xilinx_dma_probe()
3108 "invalid xlnx,sg-length-width property value. Using default width\n"); in xilinx_dma_probe()
3111 dev_warn(xdev->dev, "Please ensure that IP supports buffer length > 23 bits\n"); in xilinx_dma_probe()
3112 xdev->max_buffer_len = in xilinx_dma_probe()
3113 GENMASK(len_width - 1, 0); in xilinx_dma_probe()
3118 if (xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) { in xilinx_dma_probe()
3119 xdev->has_axistream_connected = in xilinx_dma_probe()
3120 of_property_read_bool(node, "xlnx,axistream-connected"); in xilinx_dma_probe()
3123 if (xdev->dma_config->dmatype == XDMA_TYPE_VDMA) { in xilinx_dma_probe()
3124 err = of_property_read_u32(node, "xlnx,num-fstores", in xilinx_dma_probe()
3127 dev_err(xdev->dev, in xilinx_dma_probe()
3128 "missing xlnx,num-fstores property\n"); in xilinx_dma_probe()
3132 err = of_property_read_u32(node, "xlnx,flush-fsync", in xilinx_dma_probe()
3133 &xdev->flush_on_fsync); in xilinx_dma_probe()
3135 dev_warn(xdev->dev, in xilinx_dma_probe()
3136 "missing xlnx,flush-fsync property\n"); in xilinx_dma_probe()
3141 dev_warn(xdev->dev, "missing xlnx,addrwidth property\n"); in xilinx_dma_probe()
3144 xdev->ext_addr = true; in xilinx_dma_probe()
3146 xdev->ext_addr = false; in xilinx_dma_probe()
3149 if (xdev->has_axistream_connected) in xilinx_dma_probe()
3150 xdev->common.desc_metadata_modes = DESC_METADATA_ENGINE; in xilinx_dma_probe()
3153 err = dma_set_mask_and_coherent(xdev->dev, DMA_BIT_MASK(addr_width)); in xilinx_dma_probe()
3155 dev_err(xdev->dev, "DMA mask error %d\n", err); in xilinx_dma_probe()
3160 xdev->common.dev = &pdev->dev; in xilinx_dma_probe()
3162 INIT_LIST_HEAD(&xdev->common.channels); in xilinx_dma_probe()
3163 if (!(xdev->dma_config->dmatype == XDMA_TYPE_CDMA)) { in xilinx_dma_probe()
3164 dma_cap_set(DMA_SLAVE, xdev->common.cap_mask); in xilinx_dma_probe()
3165 dma_cap_set(DMA_PRIVATE, xdev->common.cap_mask); in xilinx_dma_probe()
3168 xdev->common.device_alloc_chan_resources = in xilinx_dma_probe()
3170 xdev->common.device_free_chan_resources = in xilinx_dma_probe()
3172 xdev->common.device_terminate_all = xilinx_dma_terminate_all; in xilinx_dma_probe()
3173 xdev->common.device_synchronize = xilinx_dma_synchronize; in xilinx_dma_probe()
3174 xdev->common.device_tx_status = xilinx_dma_tx_status; in xilinx_dma_probe()
3175 xdev->common.device_issue_pending = xilinx_dma_issue_pending; in xilinx_dma_probe()
3176 xdev->common.device_config = xilinx_dma_device_config; in xilinx_dma_probe()
3177 if (xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) { in xilinx_dma_probe()
3178 dma_cap_set(DMA_CYCLIC, xdev->common.cap_mask); in xilinx_dma_probe()
3179 xdev->common.device_prep_slave_sg = xilinx_dma_prep_slave_sg; in xilinx_dma_probe()
3180 xdev->common.device_prep_dma_cyclic = in xilinx_dma_probe()
3182 /* Residue calculation is supported by only AXI DMA and CDMA */ in xilinx_dma_probe()
3183 xdev->common.residue_granularity = in xilinx_dma_probe()
3185 } else if (xdev->dma_config->dmatype == XDMA_TYPE_CDMA) { in xilinx_dma_probe()
3186 dma_cap_set(DMA_MEMCPY, xdev->common.cap_mask); in xilinx_dma_probe()
3187 xdev->common.device_prep_dma_memcpy = xilinx_cdma_prep_memcpy; in xilinx_dma_probe()
3188 /* Residue calculation is supported by only AXI DMA and CDMA */ in xilinx_dma_probe()
3189 xdev->common.residue_granularity = in xilinx_dma_probe()
3191 } else if (xdev->dma_config->dmatype == XDMA_TYPE_AXIMCDMA) { in xilinx_dma_probe()
3192 xdev->common.device_prep_slave_sg = xilinx_mcdma_prep_slave_sg; in xilinx_dma_probe()
3194 xdev->common.device_prep_interleaved_dma = in xilinx_dma_probe()
3209 if (xdev->dma_config->dmatype == XDMA_TYPE_VDMA) { in xilinx_dma_probe()
3210 for (i = 0; i < xdev->dma_config->max_channels; i++) in xilinx_dma_probe()
3211 if (xdev->chan[i]) in xilinx_dma_probe()
3212 xdev->chan[i]->num_frms = num_frames; in xilinx_dma_probe()
3216 err = dma_async_device_register(&xdev->common); in xilinx_dma_probe()
3218 dev_err(xdev->dev, "failed to register the dma device\n"); in xilinx_dma_probe()
3225 dev_err(&pdev->dev, "Unable to register DMA to DT\n"); in xilinx_dma_probe()
3226 dma_async_device_unregister(&xdev->common); in xilinx_dma_probe()
3230 if (xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA) in xilinx_dma_probe()
3231 dev_info(&pdev->dev, "Xilinx AXI DMA Engine Driver Probed!!\n"); in xilinx_dma_probe()
3232 else if (xdev->dma_config->dmatype == XDMA_TYPE_CDMA) in xilinx_dma_probe()
3233 dev_info(&pdev->dev, "Xilinx AXI CDMA Engine Driver Probed!!\n"); in xilinx_dma_probe()
3234 else if (xdev->dma_config->dmatype == XDMA_TYPE_AXIMCDMA) in xilinx_dma_probe()
3235 dev_info(&pdev->dev, "Xilinx AXI MCDMA Engine Driver Probed!!\n"); in xilinx_dma_probe()
3237 dev_info(&pdev->dev, "Xilinx AXI VDMA Engine Driver Probed!!\n"); in xilinx_dma_probe()
3242 for (i = 0; i < xdev->dma_config->max_channels; i++) in xilinx_dma_probe()
3243 if (xdev->chan[i]) in xilinx_dma_probe()
3244 xilinx_dma_chan_remove(xdev->chan[i]); in xilinx_dma_probe()
3252 * xilinx_dma_remove - Driver remove function
3260 of_dma_controller_free(pdev->dev.of_node); in xilinx_dma_remove()
3262 dma_async_device_unregister(&xdev->common); in xilinx_dma_remove()
3264 for (i = 0; i < xdev->dma_config->max_channels; i++) in xilinx_dma_remove()
3265 if (xdev->chan[i]) in xilinx_dma_remove()
3266 xilinx_dma_chan_remove(xdev->chan[i]); in xilinx_dma_remove()
3273 .name = "xilinx-vdma",