Lines Matching refs:segment
316 * struct xilinx_vdma_tx_segment - Descriptor segment
319 * @phys: Physical address of segment
328 * struct xilinx_axidma_tx_segment - Descriptor segment
331 * @phys: Physical address of segment
340 * struct xilinx_aximcdma_tx_segment - Descriptor segment
343 * @phys: Physical address of segment
352 * struct xilinx_cdma_tx_segment - Descriptor segment
355 * @phys: Physical address of segment
413 * @cyclic_seg_v: Statically allocated segment base for cyclic transfers
664 * xilinx_vdma_alloc_tx_segment - Allocate transaction segment
667 * Return: The allocated segment on success and NULL on failure.
672 struct xilinx_vdma_tx_segment *segment;
675 segment = dma_pool_zalloc(chan->desc_pool, GFP_ATOMIC, &phys);
676 if (!segment)
679 segment->phys = phys;
681 return segment;
685 * xilinx_cdma_alloc_tx_segment - Allocate transaction segment
688 * Return: The allocated segment on success and NULL on failure.
693 struct xilinx_cdma_tx_segment *segment;
696 segment = dma_pool_zalloc(chan->desc_pool, GFP_ATOMIC, &phys);
697 if (!segment)
700 segment->phys = phys;
702 return segment;
706 * xilinx_axidma_alloc_tx_segment - Allocate transaction segment
709 * Return: The allocated segment on success and NULL on failure.
714 struct xilinx_axidma_tx_segment *segment = NULL;
719 segment = list_first_entry(&chan->free_seg_list,
722 list_del(&segment->node);
726 if (!segment)
727 dev_dbg(chan->dev, "Could not find free tx segment\n");
729 return segment;
733 * xilinx_aximcdma_alloc_tx_segment - Allocate transaction segment
736 * Return: The allocated segment on success and NULL on failure.
741 struct xilinx_aximcdma_tx_segment *segment = NULL;
746 segment = list_first_entry(&chan->free_seg_list,
749 list_del(&segment->node);
753 return segment;
779 * xilinx_dma_free_tx_segment - Free transaction segment
781 * @segment: DMA transaction segment
784 struct xilinx_axidma_tx_segment *segment)
786 xilinx_dma_clean_hw_desc(&segment->hw);
788 list_add_tail(&segment->node, &chan->free_seg_list);
792 * xilinx_mcdma_free_tx_segment - Free transaction segment
794 * @segment: DMA transaction segment
798 segment)
800 xilinx_mcdma_clean_hw_desc(&segment->hw);
802 list_add_tail(&segment->node, &chan->free_seg_list);
806 * xilinx_cdma_free_tx_segment - Free transaction segment
808 * @segment: DMA transaction segment
811 struct xilinx_cdma_tx_segment *segment)
813 dma_pool_free(chan->desc_pool, segment, segment->phys);
817 * xilinx_vdma_free_tx_segment - Free transaction segment
819 * @segment: DMA transaction segment
822 struct xilinx_vdma_tx_segment *segment)
824 dma_pool_free(chan->desc_pool, segment, segment->phys);
856 struct xilinx_vdma_tx_segment *segment, *next;
865 list_for_each_entry_safe(segment, next, &desc->segments, node) {
866 list_del(&segment->node);
867 xilinx_vdma_free_tx_segment(chan, segment);
1141 * so allocating a desc segment during channel allocation for
1150 "unable to allocate desc segment for cyclic DMA\n");
1368 struct xilinx_vdma_tx_segment *segment, *last = NULL;
1431 list_for_each_entry(segment, &desc->segments, node) {
1435 segment->hw.buf_addr,
1436 segment->hw.buf_addr_msb);
1440 segment->hw.buf_addr);
1442 last = segment;
1511 struct xilinx_cdma_tx_segment *segment;
1514 segment = list_first_entry(&head_desc->segments,
1518 hw = &segment->hw;
1591 struct xilinx_axidma_tx_segment *segment;
1594 segment = list_first_entry(&head_desc->segments,
1597 hw = &segment->hw;
2049 struct xilinx_vdma_tx_segment *segment;
2075 segment = xilinx_vdma_alloc_tx_segment(chan);
2076 if (!segment)
2080 hw = &segment->hw;
2104 /* Insert the segment into the descriptor segments list. */
2105 list_add_tail(&segment->node, &desc->segments);
2108 segment = list_first_entry(&desc->segments,
2110 desc->async_tx.phys = segment->phys;
2135 struct xilinx_cdma_tx_segment *segment;
2149 segment = xilinx_cdma_alloc_tx_segment(chan);
2150 if (!segment)
2153 hw = &segment->hw;
2162 /* Insert the segment into the descriptor segments list. */
2163 list_add_tail(&segment->node, &desc->segments);
2165 desc->async_tx.phys = segment->phys;
2166 hw->next_desc = segment->phys;
2193 struct xilinx_axidma_tx_segment *segment = NULL;
2219 /* Get a free segment */
2220 segment = xilinx_axidma_alloc_tx_segment(chan);
2221 if (!segment)
2230 hw = &segment->hw;
2247 * Insert the segment into the descriptor segments
2250 list_add_tail(&segment->node, &desc->segments);
2254 segment = list_first_entry(&desc->segments,
2256 desc->async_tx.phys = segment->phys;
2260 segment->hw.control |= XILINX_DMA_BD_SOP;
2261 segment = list_last_entry(&desc->segments,
2264 segment->hw.control |= XILINX_DMA_BD_EOP;
2295 struct xilinx_axidma_tx_segment *segment, *head_segment, *prev = NULL;
2327 /* Get a free segment */
2328 segment = xilinx_axidma_alloc_tx_segment(chan);
2329 if (!segment)
2338 hw = &segment->hw;
2344 prev->hw.next_desc = segment->phys;
2346 prev = segment;
2350 * Insert the segment into the descriptor segments
2353 list_add_tail(&segment->node, &desc->segments);
2366 segment = list_last_entry(&desc->segments,
2369 segment->hw.next_desc = (u32) head_segment->phys;
2374 segment->hw.control |= XILINX_DMA_BD_EOP;
2403 struct xilinx_aximcdma_tx_segment *segment = NULL;
2429 /* Get a free segment */
2430 segment = xilinx_aximcdma_alloc_tx_segment(chan);
2431 if (!segment)
2440 hw = &segment->hw;
2454 * Insert the segment into the descriptor segments
2457 list_add_tail(&segment->node, &desc->segments);
2461 segment = list_first_entry(&desc->segments,
2463 desc->async_tx.phys = segment->phys;
2467 segment->hw.control |= XILINX_MCDMA_BD_SOP;
2468 segment = list_last_entry(&desc->segments,
2471 segment->hw.control |= XILINX_MCDMA_BD_EOP;