Lines Matching +full:big +full:- +full:endian +full:- +full:regs

1 // SPDX-License-Identifier: GPL-2.0+
3 // Copyright (c) 2013-2014 Freescale Semiconductor, Inc
9 #include <linux/dma-mapping.h>
13 #include "fsl-edma-common.h"
47 spin_lock(&fsl_chan->vchan.lock); in fsl_edma_tx_chan_handler()
49 if (!fsl_chan->edesc) { in fsl_edma_tx_chan_handler()
51 spin_unlock(&fsl_chan->vchan.lock); in fsl_edma_tx_chan_handler()
55 if (!fsl_chan->edesc->iscyclic) { in fsl_edma_tx_chan_handler()
56 list_del(&fsl_chan->edesc->vdesc.node); in fsl_edma_tx_chan_handler()
57 vchan_cookie_complete(&fsl_chan->edesc->vdesc); in fsl_edma_tx_chan_handler()
58 fsl_chan->edesc = NULL; in fsl_edma_tx_chan_handler()
59 fsl_chan->status = DMA_COMPLETE; in fsl_edma_tx_chan_handler()
60 fsl_chan->idle = true; in fsl_edma_tx_chan_handler()
62 vchan_cyclic_callback(&fsl_chan->edesc->vdesc); in fsl_edma_tx_chan_handler()
65 if (!fsl_chan->edesc) in fsl_edma_tx_chan_handler()
68 spin_unlock(&fsl_chan->vchan.lock); in fsl_edma_tx_chan_handler()
79 if (!fsl_chan->is_rxchan) in fsl_edma3_enable_request()
84 if (fsl_chan->is_rxchan) in fsl_edma3_enable_request()
90 if (fsl_chan->is_remote) in fsl_edma3_enable_request()
101 edma_writel_chreg(fsl_chan, fsl_chan->srcid, ch_mux); in fsl_edma3_enable_request()
111 struct edma_regs *regs = &fsl_chan->edma->regs; in fsl_edma_enable_request() local
112 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_enable_request()
117 if (fsl_chan->edma->drvdata->flags & FSL_EDMA_DRV_WRAP_IO) { in fsl_edma_enable_request()
118 edma_writeb(fsl_chan->edma, EDMA_SEEI_SEEI(ch), regs->seei); in fsl_edma_enable_request()
119 edma_writeb(fsl_chan->edma, ch, regs->serq); in fsl_edma_enable_request()
121 /* ColdFire is big endian, and accesses natively in fsl_edma_enable_request()
122 * big endian I/O peripherals in fsl_edma_enable_request()
124 iowrite8(EDMA_SEEI_SEEI(ch), regs->seei); in fsl_edma_enable_request()
125 iowrite8(ch, regs->serq); in fsl_edma_enable_request()
145 struct edma_regs *regs = &fsl_chan->edma->regs; in fsl_edma_disable_request() local
146 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_disable_request()
151 if (fsl_chan->edma->drvdata->flags & FSL_EDMA_DRV_WRAP_IO) { in fsl_edma_disable_request()
152 edma_writeb(fsl_chan->edma, ch, regs->cerq); in fsl_edma_disable_request()
153 edma_writeb(fsl_chan->edma, EDMA_CEEI_CEEI(ch), regs->ceei); in fsl_edma_disable_request()
155 /* ColdFire is big endian, and accesses natively in fsl_edma_disable_request()
156 * big endian I/O peripherals in fsl_edma_disable_request()
158 iowrite8(ch, regs->cerq); in fsl_edma_disable_request()
159 iowrite8(EDMA_CEEI_CEEI(ch), regs->ceei); in fsl_edma_disable_request()
192 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_chan_mux()
195 int endian_diff[4] = {3, 1, -1, -3}; in fsl_edma_chan_mux()
196 u32 dmamux_nr = fsl_chan->edma->drvdata->dmamuxs; in fsl_edma_chan_mux()
201 chans_per_mux = fsl_chan->edma->n_chans / dmamux_nr; in fsl_edma_chan_mux()
202 ch_off = fsl_chan->vchan.chan.chan_id % chans_per_mux; in fsl_edma_chan_mux()
204 if (fsl_chan->edma->drvdata->flags & FSL_EDMA_DRV_MUX_SWAP) in fsl_edma_chan_mux()
207 muxaddr = fsl_chan->edma->muxbase[ch / chans_per_mux]; in fsl_edma_chan_mux()
210 if (fsl_chan->edma->drvdata->flags & FSL_EDMA_DRV_CONFIG32) in fsl_edma_chan_mux()
223 val = ffs(addr_width) - 1; in fsl_edma_get_tcd_attr()
233 for (i = 0; i < fsl_desc->n_tcds; i++) in fsl_edma_free_desc()
234 dma_pool_free(fsl_desc->echan->tcd_pool, fsl_desc->tcd[i].vtcd, in fsl_edma_free_desc()
235 fsl_desc->tcd[i].ptcd); in fsl_edma_free_desc()
245 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_terminate_all()
247 fsl_chan->edesc = NULL; in fsl_edma_terminate_all()
248 fsl_chan->idle = true; in fsl_edma_terminate_all()
249 vchan_get_all_descriptors(&fsl_chan->vchan, &head); in fsl_edma_terminate_all()
250 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_terminate_all()
251 vchan_dma_desc_free_list(&fsl_chan->vchan, &head); in fsl_edma_terminate_all()
254 pm_runtime_allow(fsl_chan->pd_dev); in fsl_edma_terminate_all()
264 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_pause()
265 if (fsl_chan->edesc) { in fsl_edma_pause()
267 fsl_chan->status = DMA_PAUSED; in fsl_edma_pause()
268 fsl_chan->idle = true; in fsl_edma_pause()
270 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_pause()
279 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_resume()
280 if (fsl_chan->edesc) { in fsl_edma_resume()
282 fsl_chan->status = DMA_IN_PROGRESS; in fsl_edma_resume()
283 fsl_chan->idle = false; in fsl_edma_resume()
285 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_resume()
291 if (fsl_chan->dma_dir != DMA_NONE) in fsl_edma_unprep_slave_dma()
292 dma_unmap_resource(fsl_chan->vchan.chan.device->dev, in fsl_edma_unprep_slave_dma()
293 fsl_chan->dma_dev_addr, in fsl_edma_unprep_slave_dma()
294 fsl_chan->dma_dev_size, in fsl_edma_unprep_slave_dma()
295 fsl_chan->dma_dir, 0); in fsl_edma_unprep_slave_dma()
296 fsl_chan->dma_dir = DMA_NONE; in fsl_edma_unprep_slave_dma()
302 struct device *dev = fsl_chan->vchan.chan.device->dev; in fsl_edma_prep_slave_dma()
310 addr = fsl_chan->cfg.dst_addr; in fsl_edma_prep_slave_dma()
311 size = fsl_chan->cfg.dst_maxburst; in fsl_edma_prep_slave_dma()
315 addr = fsl_chan->cfg.src_addr; in fsl_edma_prep_slave_dma()
316 size = fsl_chan->cfg.src_maxburst; in fsl_edma_prep_slave_dma()
324 if (fsl_chan->dma_dir == dma_dir) in fsl_edma_prep_slave_dma()
329 fsl_chan->dma_dev_addr = dma_map_resource(dev, addr, size, dma_dir, 0); in fsl_edma_prep_slave_dma()
330 if (dma_mapping_error(dev, fsl_chan->dma_dev_addr)) in fsl_edma_prep_slave_dma()
332 fsl_chan->dma_dev_size = size; in fsl_edma_prep_slave_dma()
333 fsl_chan->dma_dir = dma_dir; in fsl_edma_prep_slave_dma()
343 memcpy(&fsl_chan->cfg, cfg, sizeof(*cfg)); in fsl_edma_slave_config()
352 struct fsl_edma_desc *edesc = fsl_chan->edesc; in fsl_edma_desc_residue()
353 enum dma_transfer_direction dir = edesc->dirn; in fsl_edma_desc_residue()
360 for (len = i = 0; i < fsl_chan->edesc->n_tcds; i++) { in fsl_edma_desc_residue()
361 nbytes = le32_to_cpu(edesc->tcd[i].vtcd->nbytes); in fsl_edma_desc_residue()
364 len += nbytes * le16_to_cpu(edesc->tcd[i].vtcd->biter); in fsl_edma_desc_residue()
376 for (i = 0; i < fsl_chan->edesc->n_tcds; i++) { in fsl_edma_desc_residue()
377 nbytes = le32_to_cpu(edesc->tcd[i].vtcd->nbytes); in fsl_edma_desc_residue()
381 size = nbytes * le16_to_cpu(edesc->tcd[i].vtcd->biter); in fsl_edma_desc_residue()
384 dma_addr = le32_to_cpu(edesc->tcd[i].vtcd->saddr); in fsl_edma_desc_residue()
386 dma_addr = le32_to_cpu(edesc->tcd[i].vtcd->daddr); in fsl_edma_desc_residue()
388 len -= size; in fsl_edma_desc_residue()
390 len += dma_addr + size - cur_addr; in fsl_edma_desc_residue()
411 return fsl_chan->status; in fsl_edma_tx_status()
413 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_tx_status()
414 vdesc = vchan_find_desc(&fsl_chan->vchan, cookie); in fsl_edma_tx_status()
415 if (fsl_chan->edesc && cookie == fsl_chan->edesc->vdesc.tx.cookie) in fsl_edma_tx_status()
416 txstate->residue = in fsl_edma_tx_status()
419 txstate->residue = in fsl_edma_tx_status()
422 txstate->residue = 0; in fsl_edma_tx_status()
424 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_tx_status()
426 return fsl_chan->status; in fsl_edma_tx_status()
436 * endian format. However, we need to load the TCD registers in in fsl_edma_set_tcd_regs()
437 * big- or little-endian obeying the eDMA engine model endian, in fsl_edma_set_tcd_regs()
442 edma_write_tcdreg(fsl_chan, tcd->saddr, saddr); in fsl_edma_set_tcd_regs()
443 edma_write_tcdreg(fsl_chan, tcd->daddr, daddr); in fsl_edma_set_tcd_regs()
445 edma_write_tcdreg(fsl_chan, tcd->attr, attr); in fsl_edma_set_tcd_regs()
446 edma_write_tcdreg(fsl_chan, tcd->soff, soff); in fsl_edma_set_tcd_regs()
448 edma_write_tcdreg(fsl_chan, tcd->nbytes, nbytes); in fsl_edma_set_tcd_regs()
449 edma_write_tcdreg(fsl_chan, tcd->slast, slast); in fsl_edma_set_tcd_regs()
451 edma_write_tcdreg(fsl_chan, tcd->citer, citer); in fsl_edma_set_tcd_regs()
452 edma_write_tcdreg(fsl_chan, tcd->biter, biter); in fsl_edma_set_tcd_regs()
453 edma_write_tcdreg(fsl_chan, tcd->doff, doff); in fsl_edma_set_tcd_regs()
455 edma_write_tcdreg(fsl_chan, tcd->dlast_sga, dlast_sga); in fsl_edma_set_tcd_regs()
457 csr = le16_to_cpu(tcd->csr); in fsl_edma_set_tcd_regs()
459 if (fsl_chan->is_sw) { in fsl_edma_set_tcd_regs()
461 tcd->csr = cpu_to_le16(csr); in fsl_edma_set_tcd_regs()
476 edma_write_tcdreg(fsl_chan, tcd->csr, csr); in fsl_edma_set_tcd_regs()
486 struct dma_slave_config *cfg = &fsl_chan->cfg; in fsl_edma_fill_tcd()
492 * endian format irrespective of the register endian model. in fsl_edma_fill_tcd()
493 * So we put the value in little endian in memory, waiting in fsl_edma_fill_tcd()
496 tcd->saddr = cpu_to_le32(src); in fsl_edma_fill_tcd()
497 tcd->daddr = cpu_to_le32(dst); in fsl_edma_fill_tcd()
499 tcd->attr = cpu_to_le16(attr); in fsl_edma_fill_tcd()
501 tcd->soff = cpu_to_le16(soff); in fsl_edma_fill_tcd()
503 if (fsl_chan->is_multi_fifo) { in fsl_edma_fill_tcd()
505 burst = cfg->direction == DMA_DEV_TO_MEM ? in fsl_edma_fill_tcd()
506 cfg->src_maxburst : cfg->dst_maxburst; in fsl_edma_fill_tcd()
507 nbytes |= EDMA_V3_TCD_NBYTES_MLOFF(-(burst * 4)); in fsl_edma_fill_tcd()
509 if (cfg->direction == DMA_MEM_TO_DEV) { in fsl_edma_fill_tcd()
518 tcd->nbytes = cpu_to_le32(nbytes); in fsl_edma_fill_tcd()
519 tcd->slast = cpu_to_le32(slast); in fsl_edma_fill_tcd()
521 tcd->citer = cpu_to_le16(EDMA_TCD_CITER_CITER(citer)); in fsl_edma_fill_tcd()
522 tcd->doff = cpu_to_le16(doff); in fsl_edma_fill_tcd()
524 tcd->dlast_sga = cpu_to_le32(dlast_sga); in fsl_edma_fill_tcd()
526 tcd->biter = cpu_to_le16(EDMA_TCD_BITER_BITER(biter)); in fsl_edma_fill_tcd()
536 if (fsl_chan->is_rxchan) in fsl_edma_fill_tcd()
539 if (fsl_chan->is_sw) in fsl_edma_fill_tcd()
542 tcd->csr = cpu_to_le16(csr); in fsl_edma_fill_tcd()
555 fsl_desc->echan = fsl_chan; in fsl_edma_alloc_desc()
556 fsl_desc->n_tcds = sg_len; in fsl_edma_alloc_desc()
558 fsl_desc->tcd[i].vtcd = dma_pool_alloc(fsl_chan->tcd_pool, in fsl_edma_alloc_desc()
559 GFP_NOWAIT, &fsl_desc->tcd[i].ptcd); in fsl_edma_alloc_desc()
560 if (!fsl_desc->tcd[i].vtcd) in fsl_edma_alloc_desc()
566 while (--i >= 0) in fsl_edma_alloc_desc()
567 dma_pool_free(fsl_chan->tcd_pool, fsl_desc->tcd[i].vtcd, in fsl_edma_alloc_desc()
568 fsl_desc->tcd[i].ptcd); in fsl_edma_alloc_desc()
596 fsl_desc->iscyclic = true; in fsl_edma_prep_dma_cyclic()
597 fsl_desc->dirn = direction; in fsl_edma_prep_dma_cyclic()
601 fsl_chan->attr = in fsl_edma_prep_dma_cyclic()
602 fsl_edma_get_tcd_attr(fsl_chan->cfg.dst_addr_width); in fsl_edma_prep_dma_cyclic()
603 nbytes = fsl_chan->cfg.dst_addr_width * in fsl_edma_prep_dma_cyclic()
604 fsl_chan->cfg.dst_maxburst; in fsl_edma_prep_dma_cyclic()
606 fsl_chan->attr = in fsl_edma_prep_dma_cyclic()
607 fsl_edma_get_tcd_attr(fsl_chan->cfg.src_addr_width); in fsl_edma_prep_dma_cyclic()
608 nbytes = fsl_chan->cfg.src_addr_width * in fsl_edma_prep_dma_cyclic()
609 fsl_chan->cfg.src_maxburst; in fsl_edma_prep_dma_cyclic()
619 last_sg = fsl_desc->tcd[(i + 1) % sg_len].ptcd; in fsl_edma_prep_dma_cyclic()
623 dst_addr = fsl_chan->dma_dev_addr; in fsl_edma_prep_dma_cyclic()
624 soff = fsl_chan->cfg.dst_addr_width; in fsl_edma_prep_dma_cyclic()
625 doff = fsl_chan->is_multi_fifo ? 4 : 0; in fsl_edma_prep_dma_cyclic()
627 src_addr = fsl_chan->dma_dev_addr; in fsl_edma_prep_dma_cyclic()
629 soff = fsl_chan->is_multi_fifo ? 4 : 0; in fsl_edma_prep_dma_cyclic()
630 doff = fsl_chan->cfg.src_addr_width; in fsl_edma_prep_dma_cyclic()
633 src_addr = fsl_chan->cfg.src_addr; in fsl_edma_prep_dma_cyclic()
634 dst_addr = fsl_chan->cfg.dst_addr; in fsl_edma_prep_dma_cyclic()
639 fsl_edma_fill_tcd(fsl_chan, fsl_desc->tcd[i].vtcd, src_addr, dst_addr, in fsl_edma_prep_dma_cyclic()
640 fsl_chan->attr, soff, nbytes, 0, iter, in fsl_edma_prep_dma_cyclic()
645 return vchan_tx_prep(&fsl_chan->vchan, &fsl_desc->vdesc, flags); in fsl_edma_prep_dma_cyclic()
669 fsl_desc->iscyclic = false; in fsl_edma_prep_slave_sg()
670 fsl_desc->dirn = direction; in fsl_edma_prep_slave_sg()
673 fsl_chan->attr = in fsl_edma_prep_slave_sg()
674 fsl_edma_get_tcd_attr(fsl_chan->cfg.dst_addr_width); in fsl_edma_prep_slave_sg()
675 nbytes = fsl_chan->cfg.dst_addr_width * in fsl_edma_prep_slave_sg()
676 fsl_chan->cfg.dst_maxburst; in fsl_edma_prep_slave_sg()
678 fsl_chan->attr = in fsl_edma_prep_slave_sg()
679 fsl_edma_get_tcd_attr(fsl_chan->cfg.src_addr_width); in fsl_edma_prep_slave_sg()
680 nbytes = fsl_chan->cfg.src_addr_width * in fsl_edma_prep_slave_sg()
681 fsl_chan->cfg.src_maxburst; in fsl_edma_prep_slave_sg()
687 dst_addr = fsl_chan->dma_dev_addr; in fsl_edma_prep_slave_sg()
688 soff = fsl_chan->cfg.dst_addr_width; in fsl_edma_prep_slave_sg()
691 src_addr = fsl_chan->dma_dev_addr; in fsl_edma_prep_slave_sg()
694 doff = fsl_chan->cfg.src_addr_width; in fsl_edma_prep_slave_sg()
697 src_addr = fsl_chan->cfg.src_addr; in fsl_edma_prep_slave_sg()
698 dst_addr = fsl_chan->cfg.dst_addr; in fsl_edma_prep_slave_sg()
711 fsl_chan->cfg.src_maxburst : in fsl_edma_prep_slave_sg()
712 fsl_chan->cfg.dst_maxburst; in fsl_edma_prep_slave_sg()
715 for (j = burst; j > 1; j--) { in fsl_edma_prep_slave_sg()
726 if (i < sg_len - 1) { in fsl_edma_prep_slave_sg()
727 last_sg = fsl_desc->tcd[(i + 1)].ptcd; in fsl_edma_prep_slave_sg()
728 fsl_edma_fill_tcd(fsl_chan, fsl_desc->tcd[i].vtcd, src_addr, in fsl_edma_prep_slave_sg()
729 dst_addr, fsl_chan->attr, soff, in fsl_edma_prep_slave_sg()
734 fsl_edma_fill_tcd(fsl_chan, fsl_desc->tcd[i].vtcd, src_addr, in fsl_edma_prep_slave_sg()
735 dst_addr, fsl_chan->attr, soff, in fsl_edma_prep_slave_sg()
741 return vchan_tx_prep(&fsl_chan->vchan, &fsl_desc->vdesc, flags); in fsl_edma_prep_slave_sg()
754 fsl_desc->iscyclic = false; in fsl_edma_prep_memcpy()
756 fsl_chan->is_sw = true; in fsl_edma_prep_memcpy()
759 fsl_edma_fill_tcd(fsl_chan, fsl_desc->tcd[0].vtcd, dma_src, dma_dst, in fsl_edma_prep_memcpy()
763 return vchan_tx_prep(&fsl_chan->vchan, &fsl_desc->vdesc, flags); in fsl_edma_prep_memcpy()
770 lockdep_assert_held(&fsl_chan->vchan.lock); in fsl_edma_xfer_desc()
772 vdesc = vchan_next_desc(&fsl_chan->vchan); in fsl_edma_xfer_desc()
775 fsl_chan->edesc = to_fsl_edma_desc(vdesc); in fsl_edma_xfer_desc()
776 fsl_edma_set_tcd_regs(fsl_chan, fsl_chan->edesc->tcd[0].vtcd); in fsl_edma_xfer_desc()
778 fsl_chan->status = DMA_IN_PROGRESS; in fsl_edma_xfer_desc()
779 fsl_chan->idle = false; in fsl_edma_xfer_desc()
787 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_issue_pending()
789 if (unlikely(fsl_chan->pm_state != RUNNING)) { in fsl_edma_issue_pending()
790 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_issue_pending()
795 if (vchan_issue_pending(&fsl_chan->vchan) && !fsl_chan->edesc) in fsl_edma_issue_pending()
798 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_issue_pending()
805 fsl_chan->tcd_pool = dma_pool_create("tcd_pool", chan->device->dev, in fsl_edma_alloc_chan_resources()
814 struct fsl_edma_engine *edma = fsl_chan->edma; in fsl_edma_free_chan_resources()
818 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_free_chan_resources()
820 if (edma->drvdata->dmamuxs) in fsl_edma_free_chan_resources()
822 fsl_chan->edesc = NULL; in fsl_edma_free_chan_resources()
823 vchan_get_all_descriptors(&fsl_chan->vchan, &head); in fsl_edma_free_chan_resources()
825 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_free_chan_resources()
827 vchan_dma_desc_free_list(&fsl_chan->vchan, &head); in fsl_edma_free_chan_resources()
828 dma_pool_destroy(fsl_chan->tcd_pool); in fsl_edma_free_chan_resources()
829 fsl_chan->tcd_pool = NULL; in fsl_edma_free_chan_resources()
830 fsl_chan->is_sw = false; in fsl_edma_free_chan_resources()
831 fsl_chan->srcid = 0; in fsl_edma_free_chan_resources()
839 &dmadev->channels, vchan.chan.device_node) { in fsl_edma_cleanup_vchan()
840 list_del(&chan->vchan.chan.device_node); in fsl_edma_cleanup_vchan()
841 tasklet_kill(&chan->vchan.task); in fsl_edma_cleanup_vchan()
855 bool is64 = !!(edma->drvdata->flags & FSL_EDMA_DRV_EDMA64); in fsl_edma_setup_regs()
857 edma->regs.cr = edma->membase + EDMA_CR; in fsl_edma_setup_regs()
858 edma->regs.es = edma->membase + EDMA_ES; in fsl_edma_setup_regs()
859 edma->regs.erql = edma->membase + EDMA_ERQ; in fsl_edma_setup_regs()
860 edma->regs.eeil = edma->membase + EDMA_EEI; in fsl_edma_setup_regs()
862 edma->regs.serq = edma->membase + (is64 ? EDMA64_SERQ : EDMA_SERQ); in fsl_edma_setup_regs()
863 edma->regs.cerq = edma->membase + (is64 ? EDMA64_CERQ : EDMA_CERQ); in fsl_edma_setup_regs()
864 edma->regs.seei = edma->membase + (is64 ? EDMA64_SEEI : EDMA_SEEI); in fsl_edma_setup_regs()
865 edma->regs.ceei = edma->membase + (is64 ? EDMA64_CEEI : EDMA_CEEI); in fsl_edma_setup_regs()
866 edma->regs.cint = edma->membase + (is64 ? EDMA64_CINT : EDMA_CINT); in fsl_edma_setup_regs()
867 edma->regs.cerr = edma->membase + (is64 ? EDMA64_CERR : EDMA_CERR); in fsl_edma_setup_regs()
868 edma->regs.ssrt = edma->membase + (is64 ? EDMA64_SSRT : EDMA_SSRT); in fsl_edma_setup_regs()
869 edma->regs.cdne = edma->membase + (is64 ? EDMA64_CDNE : EDMA_CDNE); in fsl_edma_setup_regs()
870 edma->regs.intl = edma->membase + (is64 ? EDMA64_INTL : EDMA_INTR); in fsl_edma_setup_regs()
871 edma->regs.errl = edma->membase + (is64 ? EDMA64_ERRL : EDMA_ERR); in fsl_edma_setup_regs()
874 edma->regs.erqh = edma->membase + EDMA64_ERQH; in fsl_edma_setup_regs()
875 edma->regs.eeih = edma->membase + EDMA64_EEIH; in fsl_edma_setup_regs()
876 edma->regs.errh = edma->membase + EDMA64_ERRH; in fsl_edma_setup_regs()
877 edma->regs.inth = edma->membase + EDMA64_INTH; in fsl_edma_setup_regs()