Lines Matching +full:0 +full:xd

20 #define XDMAC_CH_WIDTH		0x100
22 #define XDMAC_TFA 0x08
24 #define XDMAC_TFA_MASK GENMASK(5, 0)
25 #define XDMAC_SADM 0x10
29 #define XDMAC_SADM_SAM_INC 0
30 #define XDMAC_DADM 0x14
35 #define XDMAC_EXSAD 0x18
36 #define XDMAC_EXDAD 0x1c
37 #define XDMAC_SAD 0x20
38 #define XDMAC_DAD 0x24
39 #define XDMAC_ITS 0x28
40 #define XDMAC_ITS_MASK GENMASK(25, 0)
41 #define XDMAC_TNUM 0x2c
42 #define XDMAC_TNUM_MASK GENMASK(15, 0)
43 #define XDMAC_TSS 0x30
44 #define XDMAC_TSS_REQ BIT(0)
45 #define XDMAC_IEN 0x34
47 #define XDMAC_IEN_ENDIEN BIT(0)
48 #define XDMAC_STAT 0x40
49 #define XDMAC_STAT_TENF BIT(0)
50 #define XDMAC_IR 0x44
52 #define XDMAC_IR_ENDF BIT(0)
53 #define XDMAC_ID 0x48
55 #define XDMAC_ID_ENDIDF BIT(0)
62 #define XDMAC_MAX_WORD_SIZE (XDMAC_ITS_MASK & ~GENMASK(3, 0))
89 struct uniphier_xdmac_desc *xd; member
132 struct uniphier_xdmac_desc *xd) in uniphier_xdmac_chan_start() argument
139 src_addr = xd->nodes[xd->cur_node].src; in uniphier_xdmac_chan_start()
140 dst_addr = xd->nodes[xd->cur_node].dst; in uniphier_xdmac_chan_start()
141 its = xd->nodes[xd->cur_node].burst_size; in uniphier_xdmac_chan_start()
142 tnum = xd->nodes[xd->cur_node].nr_burst; in uniphier_xdmac_chan_start()
148 if (xd->dir == DMA_DEV_TO_MEM) { in uniphier_xdmac_chan_start()
157 if (xd->dir == DMA_MEM_TO_DEV) { in uniphier_xdmac_chan_start()
209 writel(0, xc->reg_ch_base + XDMAC_TSS); in uniphier_xdmac_chan_stop()
219 struct uniphier_xdmac_desc *xd; in uniphier_xdmac_start() local
221 xd = uniphier_xdmac_next_desc(xc); in uniphier_xdmac_start()
222 if (xd) in uniphier_xdmac_start()
223 uniphier_xdmac_chan_start(xc, xd); in uniphier_xdmac_start()
225 /* set desc to chan regardless of xd is null */ in uniphier_xdmac_start()
226 xc->xd = xd; in uniphier_xdmac_start()
247 } else if ((stat & XDMAC_ID_ENDIDF) && xc->xd) { in uniphier_xdmac_chan_irq()
248 xc->xd->cur_node++; in uniphier_xdmac_chan_irq()
249 if (xc->xd->cur_node >= xc->xd->nr_node) { in uniphier_xdmac_chan_irq()
250 vchan_cookie_complete(&xc->xd->vd); in uniphier_xdmac_chan_irq()
253 uniphier_xdmac_chan_start(xc, xc->xd); in uniphier_xdmac_chan_irq()
268 for (i = 0; i < xdev->nr_chans; i++) in uniphier_xdmac_irq_handler()
284 struct uniphier_xdmac_desc *xd; in uniphier_xdmac_prep_dma_memcpy() local
294 xd = kzalloc(struct_size(xd, nodes, nr), GFP_NOWAIT); in uniphier_xdmac_prep_dma_memcpy()
295 if (!xd) in uniphier_xdmac_prep_dma_memcpy()
298 for (i = 0; i < nr; i++) { in uniphier_xdmac_prep_dma_memcpy()
300 xd->nodes[i].src = src; in uniphier_xdmac_prep_dma_memcpy()
301 xd->nodes[i].dst = dst; in uniphier_xdmac_prep_dma_memcpy()
302 xd->nodes[i].burst_size = burst_size; in uniphier_xdmac_prep_dma_memcpy()
303 xd->nodes[i].nr_burst = len / burst_size; in uniphier_xdmac_prep_dma_memcpy()
310 xd->dir = DMA_MEM_TO_MEM; in uniphier_xdmac_prep_dma_memcpy()
311 xd->nr_node = nr; in uniphier_xdmac_prep_dma_memcpy()
312 xd->cur_node = 0; in uniphier_xdmac_prep_dma_memcpy()
314 return vchan_tx_prep(vc, &xd->vd, flags); in uniphier_xdmac_prep_dma_memcpy()
325 struct uniphier_xdmac_desc *xd; in uniphier_xdmac_prep_slave_sg() local
350 xd = kzalloc(struct_size(xd, nodes, sg_len), GFP_NOWAIT); in uniphier_xdmac_prep_slave_sg()
351 if (!xd) in uniphier_xdmac_prep_slave_sg()
355 xd->nodes[i].src = (direction == DMA_DEV_TO_MEM) in uniphier_xdmac_prep_slave_sg()
357 xd->nodes[i].dst = (direction == DMA_MEM_TO_DEV) in uniphier_xdmac_prep_slave_sg()
359 xd->nodes[i].burst_size = maxburst * buswidth; in uniphier_xdmac_prep_slave_sg()
360 xd->nodes[i].nr_burst = in uniphier_xdmac_prep_slave_sg()
361 sg_dma_len(sg) / xd->nodes[i].burst_size; in uniphier_xdmac_prep_slave_sg()
371 if (sg_dma_len(sg) % xd->nodes[i].burst_size) { in uniphier_xdmac_prep_slave_sg()
374 kfree(xd); in uniphier_xdmac_prep_slave_sg()
378 if (xd->nodes[i].nr_burst > XDMAC_MAX_WORDS) { in uniphier_xdmac_prep_slave_sg()
381 kfree(xd); in uniphier_xdmac_prep_slave_sg()
386 xd->dir = direction; in uniphier_xdmac_prep_slave_sg()
387 xd->nr_node = sg_len; in uniphier_xdmac_prep_slave_sg()
388 xd->cur_node = 0; in uniphier_xdmac_prep_slave_sg()
390 return vchan_tx_prep(vc, &xd->vd, flags); in uniphier_xdmac_prep_slave_sg()
401 return 0; in uniphier_xdmac_slave_config()
409 int ret = 0; in uniphier_xdmac_terminate_all()
414 if (xc->xd) { in uniphier_xdmac_terminate_all()
415 vchan_terminate_vdesc(&xc->xd->vd); in uniphier_xdmac_terminate_all()
416 xc->xd = NULL; in uniphier_xdmac_terminate_all()
442 if (vchan_issue_pending(vc) && !xc->xd) in uniphier_xdmac_issue_pending()
469 int chan_id = dma_spec->args[0]; in of_dma_uniphier_xlate()
500 xdev->reg_base = devm_platform_ioremap_resource(pdev, 0); in uniphier_xdmac_probe()
525 for (i = 0; i < nr_chans; i++) in uniphier_xdmac_probe()
528 irq = platform_get_irq(pdev, 0); in uniphier_xdmac_probe()
529 if (irq < 0) in uniphier_xdmac_probe()
557 return 0; in uniphier_xdmac_probe()
589 return 0; in uniphier_xdmac_remove()