Lines Matching full:ecc

228 	struct edma_cc			*ecc;  member
309 static inline unsigned int edma_read(struct edma_cc *ecc, int offset) in edma_read() argument
311 return (unsigned int)__raw_readl(ecc->base + offset); in edma_read()
314 static inline void edma_write(struct edma_cc *ecc, int offset, int val) in edma_write() argument
316 __raw_writel(val, ecc->base + offset); in edma_write()
319 static inline void edma_modify(struct edma_cc *ecc, int offset, unsigned and, in edma_modify() argument
322 unsigned val = edma_read(ecc, offset); in edma_modify()
326 edma_write(ecc, offset, val); in edma_modify()
329 static inline void edma_and(struct edma_cc *ecc, int offset, unsigned and) in edma_and() argument
331 unsigned val = edma_read(ecc, offset); in edma_and()
334 edma_write(ecc, offset, val); in edma_and()
337 static inline void edma_or(struct edma_cc *ecc, int offset, unsigned or) in edma_or() argument
339 unsigned val = edma_read(ecc, offset); in edma_or()
342 edma_write(ecc, offset, val); in edma_or()
345 static inline unsigned int edma_read_array(struct edma_cc *ecc, int offset, in edma_read_array() argument
348 return edma_read(ecc, offset + (i << 2)); in edma_read_array()
351 static inline void edma_write_array(struct edma_cc *ecc, int offset, int i, in edma_write_array() argument
354 edma_write(ecc, offset + (i << 2), val); in edma_write_array()
357 static inline void edma_modify_array(struct edma_cc *ecc, int offset, int i, in edma_modify_array() argument
360 edma_modify(ecc, offset + (i << 2), and, or); in edma_modify_array()
363 static inline void edma_or_array(struct edma_cc *ecc, int offset, int i, in edma_or_array() argument
366 edma_or(ecc, offset + (i << 2), or); in edma_or_array()
369 static inline void edma_or_array2(struct edma_cc *ecc, int offset, int i, int j, in edma_or_array2() argument
372 edma_or(ecc, offset + ((i * 2 + j) << 2), or); in edma_or_array2()
375 static inline void edma_write_array2(struct edma_cc *ecc, int offset, int i, in edma_write_array2() argument
378 edma_write(ecc, offset + ((i * 2 + j) << 2), val); in edma_write_array2()
381 static inline unsigned int edma_shadow0_read(struct edma_cc *ecc, int offset) in edma_shadow0_read() argument
383 return edma_read(ecc, EDMA_SHADOW0 + offset); in edma_shadow0_read()
386 static inline unsigned int edma_shadow0_read_array(struct edma_cc *ecc, in edma_shadow0_read_array() argument
389 return edma_read(ecc, EDMA_SHADOW0 + offset + (i << 2)); in edma_shadow0_read_array()
392 static inline void edma_shadow0_write(struct edma_cc *ecc, int offset, in edma_shadow0_write() argument
395 edma_write(ecc, EDMA_SHADOW0 + offset, val); in edma_shadow0_write()
398 static inline void edma_shadow0_write_array(struct edma_cc *ecc, int offset, in edma_shadow0_write_array() argument
401 edma_write(ecc, EDMA_SHADOW0 + offset + (i << 2), val); in edma_shadow0_write_array()
404 static inline unsigned int edma_param_read(struct edma_cc *ecc, int offset, in edma_param_read() argument
407 return edma_read(ecc, EDMA_PARM + offset + (param_no << 5)); in edma_param_read()
410 static inline void edma_param_write(struct edma_cc *ecc, int offset, in edma_param_write() argument
413 edma_write(ecc, EDMA_PARM + offset + (param_no << 5), val); in edma_param_write()
416 static inline void edma_param_modify(struct edma_cc *ecc, int offset, in edma_param_modify() argument
419 edma_modify(ecc, EDMA_PARM + offset + (param_no << 5), and, or); in edma_param_modify()
422 static inline void edma_param_and(struct edma_cc *ecc, int offset, int param_no, in edma_param_and() argument
425 edma_and(ecc, EDMA_PARM + offset + (param_no << 5), and); in edma_param_and()
428 static inline void edma_param_or(struct edma_cc *ecc, int offset, int param_no, in edma_param_or() argument
431 edma_or(ecc, EDMA_PARM + offset + (param_no << 5), or); in edma_param_or()
434 static void edma_assign_priority_to_queue(struct edma_cc *ecc, int queue_no, in edma_assign_priority_to_queue() argument
439 edma_modify(ecc, EDMA_QUEPRI, ~(0x7 << bit), ((priority & 0x7) << bit)); in edma_assign_priority_to_queue()
444 struct edma_cc *ecc = echan->ecc; in edma_set_chmap() local
447 if (ecc->chmap_exist) { in edma_set_chmap()
449 edma_write_array(ecc, EDMA_DCHMAP, channel, (slot << 5)); in edma_set_chmap()
455 struct edma_cc *ecc = echan->ecc; in edma_setup_interrupt() local
461 edma_shadow0_write_array(ecc, SH_ICR, idx, ch_bit); in edma_setup_interrupt()
462 edma_shadow0_write_array(ecc, SH_IESR, idx, ch_bit); in edma_setup_interrupt()
464 edma_shadow0_write_array(ecc, SH_IECR, idx, ch_bit); in edma_setup_interrupt()
471 static void edma_write_slot(struct edma_cc *ecc, unsigned slot, in edma_write_slot() argument
475 if (slot >= ecc->num_slots) in edma_write_slot()
477 memcpy_toio(ecc->base + PARM_OFFSET(slot), param, PARM_SIZE); in edma_write_slot()
480 static int edma_read_slot(struct edma_cc *ecc, unsigned slot, in edma_read_slot() argument
484 if (slot >= ecc->num_slots) in edma_read_slot()
486 memcpy_fromio(param, ecc->base + PARM_OFFSET(slot), PARM_SIZE); in edma_read_slot()
493 * @ecc: pointer to edma_cc struct
506 static int edma_alloc_slot(struct edma_cc *ecc, int slot) in edma_alloc_slot() argument
511 if (ecc->chmap_exist && slot < ecc->num_channels) in edma_alloc_slot()
516 if (ecc->chmap_exist) in edma_alloc_slot()
519 slot = ecc->num_channels; in edma_alloc_slot()
521 slot = find_next_zero_bit(ecc->slot_inuse, in edma_alloc_slot()
522 ecc->num_slots, in edma_alloc_slot()
524 if (slot == ecc->num_slots) in edma_alloc_slot()
526 if (!test_and_set_bit(slot, ecc->slot_inuse)) in edma_alloc_slot()
529 } else if (slot >= ecc->num_slots) { in edma_alloc_slot()
531 } else if (test_and_set_bit(slot, ecc->slot_inuse)) { in edma_alloc_slot()
535 edma_write_slot(ecc, slot, &dummy_paramset); in edma_alloc_slot()
537 return EDMA_CTLR_CHAN(ecc->id, slot); in edma_alloc_slot()
540 static void edma_free_slot(struct edma_cc *ecc, unsigned slot) in edma_free_slot() argument
543 if (slot >= ecc->num_slots) in edma_free_slot()
546 edma_write_slot(ecc, slot, &dummy_paramset); in edma_free_slot()
547 clear_bit(slot, ecc->slot_inuse); in edma_free_slot()
552 * @ecc: pointer to edma_cc struct
558 static void edma_link(struct edma_cc *ecc, unsigned from, unsigned to) in edma_link() argument
561 dev_warn(ecc->dev, "Ignoring eDMA instance for linking\n"); in edma_link()
565 if (from >= ecc->num_slots || to >= ecc->num_slots) in edma_link()
568 edma_param_modify(ecc, PARM_LINK_BCNTRLD, from, 0xffff0000, in edma_link()
574 * @ecc: pointer to edma_cc struct
580 static dma_addr_t edma_get_position(struct edma_cc *ecc, unsigned slot, in edma_get_position() argument
589 return edma_read(ecc, offs); in edma_get_position()
600 struct edma_cc *ecc = echan->ecc; in edma_start() local
607 dev_dbg(ecc->dev, "ESR%d %08x\n", idx, in edma_start()
608 edma_shadow0_read_array(ecc, SH_ESR, idx)); in edma_start()
609 edma_shadow0_write_array(ecc, SH_ESR, idx, ch_bit); in edma_start()
612 dev_dbg(ecc->dev, "ER%d %08x\n", idx, in edma_start()
613 edma_shadow0_read_array(ecc, SH_ER, idx)); in edma_start()
615 edma_write_array(ecc, EDMA_ECR, idx, ch_bit); in edma_start()
616 edma_write_array(ecc, EDMA_EMCR, idx, ch_bit); in edma_start()
618 edma_shadow0_write_array(ecc, SH_SECR, idx, ch_bit); in edma_start()
619 edma_shadow0_write_array(ecc, SH_EESR, idx, ch_bit); in edma_start()
620 dev_dbg(ecc->dev, "EER%d %08x\n", idx, in edma_start()
621 edma_shadow0_read_array(ecc, SH_EER, idx)); in edma_start()
627 struct edma_cc *ecc = echan->ecc; in edma_stop() local
632 edma_shadow0_write_array(ecc, SH_EECR, idx, ch_bit); in edma_stop()
633 edma_shadow0_write_array(ecc, SH_ECR, idx, ch_bit); in edma_stop()
634 edma_shadow0_write_array(ecc, SH_SECR, idx, ch_bit); in edma_stop()
635 edma_write_array(ecc, EDMA_EMCR, idx, ch_bit); in edma_stop()
638 edma_shadow0_write_array(ecc, SH_ICR, idx, ch_bit); in edma_stop()
640 dev_dbg(ecc->dev, "EER%d %08x\n", idx, in edma_stop()
641 edma_shadow0_read_array(ecc, SH_EER, idx)); in edma_stop()
656 edma_shadow0_write_array(echan->ecc, SH_EECR, in edma_pause()
666 edma_shadow0_write_array(echan->ecc, SH_EESR, in edma_resume()
673 struct edma_cc *ecc = echan->ecc; in edma_trigger_channel() local
678 edma_shadow0_write_array(ecc, SH_ESR, idx, ch_bit); in edma_trigger_channel()
680 dev_dbg(ecc->dev, "ESR%d %08x\n", idx, in edma_trigger_channel()
681 edma_shadow0_read_array(ecc, SH_ESR, idx)); in edma_trigger_channel()
686 struct edma_cc *ecc = echan->ecc; in edma_clean_channel() local
691 dev_dbg(ecc->dev, "EMR%d %08x\n", idx, in edma_clean_channel()
692 edma_read_array(ecc, EDMA_EMR, idx)); in edma_clean_channel()
693 edma_shadow0_write_array(ecc, SH_ECR, idx, ch_bit); in edma_clean_channel()
695 edma_write_array(ecc, EDMA_EMCR, idx, ch_bit); in edma_clean_channel()
697 edma_shadow0_write_array(ecc, SH_SECR, idx, ch_bit); in edma_clean_channel()
698 edma_write(ecc, EDMA_CCERRCLR, BIT(16) | BIT(1) | BIT(0)); in edma_clean_channel()
705 struct edma_cc *ecc = echan->ecc; in edma_assign_channel_eventq() local
711 eventq_no = ecc->default_queue; in edma_assign_channel_eventq()
712 if (eventq_no >= ecc->num_tc) in edma_assign_channel_eventq()
716 edma_modify_array(ecc, EDMA_DMAQNUM, (channel >> 3), ~(0x7 << bit), in edma_assign_channel_eventq()
723 struct edma_cc *ecc = echan->ecc; in edma_alloc_channel() local
726 if (!test_bit(echan->ch_num, ecc->channels_mask)) { in edma_alloc_channel()
727 dev_err(ecc->dev, "Channel%d is reserved, can not be used!\n", in edma_alloc_channel()
733 edma_or_array2(ecc, EDMA_DRAE, 0, EDMA_REG_ARRAY_INDEX(channel), in edma_alloc_channel()
777 struct edma_cc *ecc = echan->ecc; in edma_execute() local
802 edma_write_slot(ecc, echan->slot[i], &edesc->pset[j].param); in edma_execute()
827 edma_link(ecc, echan->slot[i], echan->slot[i + 1]); in edma_execute()
839 edma_link(ecc, echan->slot[nslots - 1], echan->slot[1]); in edma_execute()
841 edma_link(ecc, echan->slot[nslots - 1], in edma_execute()
842 echan->ecc->dummy_slot); in edma_execute()
1116 edma_alloc_slot(echan->ecc, EDMA_SLOT_ANY); in edma_prep_slave_sg()
1246 echan->slot[1] = edma_alloc_slot(echan->ecc, in edma_prep_dma_memcpy()
1438 edma_alloc_slot(echan->ecc, EDMA_SLOT_ANY); in edma_prep_dma_cyclic()
1550 struct edma_cc *ecc = data; in dma_irq_handler() local
1556 ctlr = ecc->id; in dma_irq_handler()
1560 dev_vdbg(ecc->dev, "dma_irq_handler\n"); in dma_irq_handler()
1562 sh_ipr = edma_shadow0_read_array(ecc, SH_IPR, 0); in dma_irq_handler()
1564 sh_ipr = edma_shadow0_read_array(ecc, SH_IPR, 1); in dma_irq_handler()
1567 sh_ier = edma_shadow0_read_array(ecc, SH_IER, 1); in dma_irq_handler()
1570 sh_ier = edma_shadow0_read_array(ecc, SH_IER, 0); in dma_irq_handler()
1584 edma_shadow0_write_array(ecc, SH_ICR, bank, BIT(slot)); in dma_irq_handler()
1585 edma_completion_handler(&ecc->slave_chans[channel]); in dma_irq_handler()
1589 edma_shadow0_write(ecc, SH_IEVAL, 1); in dma_irq_handler()
1595 struct edma_cc *ecc = echan->ecc; in edma_error_handler() local
1605 err = edma_read_slot(ecc, echan->slot[0], &p); in edma_error_handler()
1636 static inline bool edma_error_pending(struct edma_cc *ecc) in edma_error_pending() argument
1638 if (edma_read_array(ecc, EDMA_EMR, 0) || in edma_error_pending()
1639 edma_read_array(ecc, EDMA_EMR, 1) || in edma_error_pending()
1640 edma_read(ecc, EDMA_QEMR) || edma_read(ecc, EDMA_CCERR)) in edma_error_pending()
1649 struct edma_cc *ecc = data; in dma_ccerr_handler() local
1655 ctlr = ecc->id; in dma_ccerr_handler()
1659 dev_vdbg(ecc->dev, "dma_ccerr_handler\n"); in dma_ccerr_handler()
1661 if (!edma_error_pending(ecc)) { in dma_ccerr_handler()
1667 dev_err(ecc->dev, "%s: Error interrupt without error event!\n", in dma_ccerr_handler()
1669 edma_write(ecc, EDMA_EEVAL, 1); in dma_ccerr_handler()
1678 val = edma_read_array(ecc, EDMA_EMR, j); in dma_ccerr_handler()
1682 dev_dbg(ecc->dev, "EMR%d 0x%08x\n", j, val); in dma_ccerr_handler()
1689 edma_write_array(ecc, EDMA_EMCR, j, BIT(i)); in dma_ccerr_handler()
1691 edma_shadow0_write_array(ecc, SH_SECR, j, in dma_ccerr_handler()
1693 edma_error_handler(&ecc->slave_chans[k]); in dma_ccerr_handler()
1697 val = edma_read(ecc, EDMA_QEMR); in dma_ccerr_handler()
1699 dev_dbg(ecc->dev, "QEMR 0x%02x\n", val); in dma_ccerr_handler()
1701 edma_write(ecc, EDMA_QEMCR, val); in dma_ccerr_handler()
1702 edma_shadow0_write(ecc, SH_QSECR, val); in dma_ccerr_handler()
1705 val = edma_read(ecc, EDMA_CCERR); in dma_ccerr_handler()
1707 dev_warn(ecc->dev, "CCERR 0x%08x\n", val); in dma_ccerr_handler()
1709 edma_write(ecc, EDMA_CCERRCLR, val); in dma_ccerr_handler()
1712 if (!edma_error_pending(ecc)) in dma_ccerr_handler()
1718 edma_write(ecc, EDMA_EEVAL, 1); in dma_ccerr_handler()
1726 struct edma_cc *ecc = echan->ecc; in edma_alloc_chan_resources() local
1727 struct device *dev = ecc->dev; in edma_alloc_chan_resources()
1733 } else if (ecc->tc_list) { in edma_alloc_chan_resources()
1735 echan->tc = &ecc->tc_list[ecc->info->default_queue]; in edma_alloc_chan_resources()
1743 echan->slot[0] = edma_alloc_slot(ecc, echan->ch_num); in edma_alloc_chan_resources()
1770 struct device *dev = echan->ecc->dev; in edma_free_chan_resources()
1781 edma_free_slot(echan->ecc, echan->slot[i]); in edma_free_chan_resources()
1787 edma_set_chmap(echan, echan->ecc->dummy_slot); in edma_free_chan_resources()
1840 pos = edma_get_position(echan->ecc, echan->slot[0], dst); in edma_residue()
1856 while (edma_shadow0_read_array(echan->ecc, event_reg, idx) & ch_bit) { in edma_residue()
1857 pos = edma_get_position(echan->ecc, echan->slot[0], dst); in edma_residue()
1980 static void edma_dma_init(struct edma_cc *ecc, bool legacy_mode) in edma_dma_init() argument
1982 struct dma_device *s_ddev = &ecc->dma_slave; in edma_dma_init()
1984 s32 *memcpy_channels = ecc->info->memcpy_channels; in edma_dma_init()
1990 if (ecc->legacy_mode && !memcpy_channels) { in edma_dma_init()
1991 dev_warn(ecc->dev, in edma_dma_init()
2019 s_ddev->dev = ecc->dev; in edma_dma_init()
2023 m_ddev = devm_kzalloc(ecc->dev, sizeof(*m_ddev), GFP_KERNEL); in edma_dma_init()
2025 dev_warn(ecc->dev, "memcpy is disabled due to OoM\n"); in edma_dma_init()
2029 ecc->dma_memcpy = m_ddev; in edma_dma_init()
2052 m_ddev->dev = ecc->dev; in edma_dma_init()
2054 } else if (!ecc->legacy_mode) { in edma_dma_init()
2055 dev_info(ecc->dev, "memcpy is disabled\n"); in edma_dma_init()
2059 for (i = 0; i < ecc->num_channels; i++) { in edma_dma_init()
2060 struct edma_chan *echan = &ecc->slave_chans[i]; in edma_dma_init()
2061 echan->ch_num = EDMA_CTLR_CHAN(ecc->id, i); in edma_dma_init()
2062 echan->ecc = ecc; in edma_dma_init()
2077 struct edma_cc *ecc) in edma_setup_from_hw() argument
2084 cccfg = edma_read(ecc, EDMA_CCCFG); in edma_setup_from_hw()
2087 ecc->num_region = BIT(value); in edma_setup_from_hw()
2090 ecc->num_channels = BIT(value + 1); in edma_setup_from_hw()
2093 ecc->num_qchannels = value * 2; in edma_setup_from_hw()
2096 ecc->num_slots = BIT(value + 4); in edma_setup_from_hw()
2099 ecc->num_tc = value + 1; in edma_setup_from_hw()
2101 ecc->chmap_exist = (cccfg & CHMAP_EXIST) ? true : false; in edma_setup_from_hw()
2104 dev_dbg(dev, "num_region: %u\n", ecc->num_region); in edma_setup_from_hw()
2105 dev_dbg(dev, "num_channels: %u\n", ecc->num_channels); in edma_setup_from_hw()
2106 dev_dbg(dev, "num_qchannels: %u\n", ecc->num_qchannels); in edma_setup_from_hw()
2107 dev_dbg(dev, "num_slots: %u\n", ecc->num_slots); in edma_setup_from_hw()
2108 dev_dbg(dev, "num_tc: %u\n", ecc->num_tc); in edma_setup_from_hw()
2109 dev_dbg(dev, "chmap_exist: %s\n", ecc->chmap_exist ? "yes" : "no"); in edma_setup_from_hw()
2125 queue_priority_map = devm_kcalloc(dev, ecc->num_tc + 1, sizeof(s8), in edma_setup_from_hw()
2130 for (i = 0; i < ecc->num_tc; i++) { in edma_setup_from_hw()
2290 struct edma_cc *ecc = ofdma->of_dma_data; in of_edma_xlate() local
2295 if (!ecc || dma_spec->args_count < 1) in of_edma_xlate()
2298 for (i = 0; i < ecc->num_channels; i++) { in of_edma_xlate()
2299 echan = &ecc->slave_chans[i]; in of_edma_xlate()
2309 if (echan->ecc->legacy_mode && dma_spec->args_count == 1) in of_edma_xlate()
2312 if (!echan->ecc->legacy_mode && dma_spec->args_count == 2 && in of_edma_xlate()
2313 dma_spec->args[1] < echan->ecc->num_tc) { in of_edma_xlate()
2314 echan->tc = &echan->ecc->tc_list[dma_spec->args[1]]; in of_edma_xlate()
2350 struct edma_cc *ecc; in edma_probe() local
2375 ecc = devm_kzalloc(dev, sizeof(*ecc), GFP_KERNEL); in edma_probe()
2376 if (!ecc) in edma_probe()
2379 ecc->dev = dev; in edma_probe()
2380 ecc->id = pdev->id; in edma_probe()
2381 ecc->legacy_mode = legacy_mode; in edma_probe()
2383 if (ecc->id < 0) in edma_probe()
2384 ecc->id = 0; in edma_probe()
2395 ecc->base = devm_ioremap_resource(dev, mem); in edma_probe()
2396 if (IS_ERR(ecc->base)) in edma_probe()
2397 return PTR_ERR(ecc->base); in edma_probe()
2399 platform_set_drvdata(pdev, ecc); in edma_probe()
2410 ret = edma_setup_from_hw(dev, info, ecc); in edma_probe()
2415 ecc->slave_chans = devm_kcalloc(dev, ecc->num_channels, in edma_probe()
2416 sizeof(*ecc->slave_chans), GFP_KERNEL); in edma_probe()
2418 ecc->slot_inuse = devm_kcalloc(dev, BITS_TO_LONGS(ecc->num_slots), in edma_probe()
2421 ecc->channels_mask = devm_kcalloc(dev, in edma_probe()
2422 BITS_TO_LONGS(ecc->num_channels), in edma_probe()
2424 if (!ecc->slave_chans || !ecc->slot_inuse || !ecc->channels_mask) { in edma_probe()
2430 bitmap_fill(ecc->channels_mask, ecc->num_channels); in edma_probe()
2432 ecc->default_queue = info->default_queue; in edma_probe()
2439 bitmap_set(ecc->slot_inuse, reserved[i][0], in edma_probe()
2447 bitmap_clear(ecc->channels_mask, reserved[i][0], in edma_probe()
2452 for (i = 0; i < ecc->num_slots; i++) { in edma_probe()
2454 if (!test_bit(i, ecc->slot_inuse)) in edma_probe()
2455 edma_write_slot(ecc, i, &dummy_paramset); in edma_probe()
2466 ecc); in edma_probe()
2471 ecc->ccint = irq; in edma_probe()
2482 ecc); in edma_probe()
2487 ecc->ccerrint = irq; in edma_probe()
2490 ecc->dummy_slot = edma_alloc_slot(ecc, EDMA_SLOT_ANY); in edma_probe()
2491 if (ecc->dummy_slot < 0) { in edma_probe()
2493 ret = ecc->dummy_slot; in edma_probe()
2499 if (!ecc->legacy_mode) { in edma_probe()
2504 ecc->tc_list = devm_kcalloc(dev, ecc->num_tc, in edma_probe()
2505 sizeof(*ecc->tc_list), GFP_KERNEL); in edma_probe()
2506 if (!ecc->tc_list) { in edma_probe()
2514 if (ret || i == ecc->num_tc) in edma_probe()
2517 ecc->tc_list[i].node = tc_args.np; in edma_probe()
2518 ecc->tc_list[i].id = i; in edma_probe()
2527 array_max = DIV_ROUND_UP(ecc->num_channels, BITS_PER_TYPE(u32)); in edma_probe()
2530 (u32 *)ecc->channels_mask, in edma_probe()
2541 edma_assign_priority_to_queue(ecc, queue_priority_mapping[i][0], in edma_probe()
2544 edma_write_array2(ecc, EDMA_DRAE, 0, 0, 0x0); in edma_probe()
2545 edma_write_array2(ecc, EDMA_DRAE, 0, 1, 0x0); in edma_probe()
2546 edma_write_array(ecc, EDMA_QRAE, 0, 0x0); in edma_probe()
2548 ecc->info = info; in edma_probe()
2551 edma_dma_init(ecc, legacy_mode); in edma_probe()
2553 for (i = 0; i < ecc->num_channels; i++) { in edma_probe()
2555 if (!test_bit(i, ecc->channels_mask)) in edma_probe()
2559 edma_assign_channel_eventq(&ecc->slave_chans[i], in edma_probe()
2562 edma_set_chmap(&ecc->slave_chans[i], ecc->dummy_slot); in edma_probe()
2565 ecc->dma_slave.filter.map = info->slave_map; in edma_probe()
2566 ecc->dma_slave.filter.mapcnt = info->slavecnt; in edma_probe()
2567 ecc->dma_slave.filter.fn = edma_filter_fn; in edma_probe()
2569 ret = dma_async_device_register(&ecc->dma_slave); in edma_probe()
2575 if (ecc->dma_memcpy) { in edma_probe()
2576 ret = dma_async_device_register(ecc->dma_memcpy); in edma_probe()
2580 dma_async_device_unregister(&ecc->dma_slave); in edma_probe()
2586 of_dma_controller_register(node, of_edma_xlate, ecc); in edma_probe()
2593 edma_free_slot(ecc, ecc->dummy_slot); in edma_probe()
2614 struct edma_cc *ecc = dev_get_drvdata(dev); in edma_remove() local
2616 devm_free_irq(dev, ecc->ccint, ecc); in edma_remove()
2617 devm_free_irq(dev, ecc->ccerrint, ecc); in edma_remove()
2619 edma_cleanupp_vchan(&ecc->dma_slave); in edma_remove()
2623 dma_async_device_unregister(&ecc->dma_slave); in edma_remove()
2624 if (ecc->dma_memcpy) in edma_remove()
2625 dma_async_device_unregister(ecc->dma_memcpy); in edma_remove()
2626 edma_free_slot(ecc, ecc->dummy_slot); in edma_remove()
2636 struct edma_cc *ecc = dev_get_drvdata(dev); in edma_pm_suspend() local
2637 struct edma_chan *echan = ecc->slave_chans; in edma_pm_suspend()
2640 for (i = 0; i < ecc->num_channels; i++) { in edma_pm_suspend()
2650 struct edma_cc *ecc = dev_get_drvdata(dev); in edma_pm_resume() local
2651 struct edma_chan *echan = ecc->slave_chans; in edma_pm_resume()
2656 edma_write_slot(ecc, ecc->dummy_slot, &dummy_paramset); in edma_pm_resume()
2658 queue_priority_mapping = ecc->info->queue_priority_mapping; in edma_pm_resume()
2662 edma_assign_priority_to_queue(ecc, queue_priority_mapping[i][0], in edma_pm_resume()
2665 for (i = 0; i < ecc->num_channels; i++) { in edma_pm_resume()
2668 edma_or_array2(ecc, EDMA_DRAE, 0, in edma_pm_resume()