Lines Matching +full:chip +full:- +full:id
1 // SPDX-License-Identifier: GPL-2.0
2 // (C) 2017-2018 Synopsys, Inc. (www.synopsys.com)
15 #include <linux/dma-mapping.h>
20 #include <linux/io-64-nonatomic-lo-hi.h>
32 #include "dw-axi-dmac.h"
34 #include "../virt-dma.h"
55 axi_dma_iowrite32(struct axi_dma_chip *chip, u32 reg, u32 val) in axi_dma_iowrite32() argument
57 iowrite32(val, chip->regs + reg); in axi_dma_iowrite32()
60 static inline u32 axi_dma_ioread32(struct axi_dma_chip *chip, u32 reg) in axi_dma_ioread32() argument
62 return ioread32(chip->regs + reg); in axi_dma_ioread32()
66 axi_dma_iowrite64(struct axi_dma_chip *chip, u32 reg, u64 val) in axi_dma_iowrite64() argument
68 iowrite64(val, chip->regs + reg); in axi_dma_iowrite64()
71 static inline u64 axi_dma_ioread64(struct axi_dma_chip *chip, u32 reg) in axi_dma_ioread64() argument
73 return ioread64(chip->regs + reg); in axi_dma_ioread64()
79 iowrite32(val, chan->chan_regs + reg); in axi_chan_iowrite32()
84 return ioread32(chan->chan_regs + reg); in axi_chan_ioread32()
94 iowrite32(lower_32_bits(val), chan->chan_regs + reg); in axi_chan_iowrite64()
95 iowrite32(upper_32_bits(val), chan->chan_regs + reg + 4); in axi_chan_iowrite64()
103 cfg_lo = (config->dst_multblk_type << CH_CFG_L_DST_MULTBLK_TYPE_POS | in axi_chan_config_write()
104 config->src_multblk_type << CH_CFG_L_SRC_MULTBLK_TYPE_POS); in axi_chan_config_write()
105 if (chan->chip->dw->hdata->reg_map_8_channels && in axi_chan_config_write()
106 !chan->chip->dw->hdata->use_cfg2) { in axi_chan_config_write()
107 cfg_hi = config->tt_fc << CH_CFG_H_TT_FC_POS | in axi_chan_config_write()
108 config->hs_sel_src << CH_CFG_H_HS_SEL_SRC_POS | in axi_chan_config_write()
109 config->hs_sel_dst << CH_CFG_H_HS_SEL_DST_POS | in axi_chan_config_write()
110 config->src_per << CH_CFG_H_SRC_PER_POS | in axi_chan_config_write()
111 config->dst_per << CH_CFG_H_DST_PER_POS | in axi_chan_config_write()
112 config->prior << CH_CFG_H_PRIORITY_POS; in axi_chan_config_write()
114 cfg_lo |= config->src_per << CH_CFG2_L_SRC_PER_POS | in axi_chan_config_write()
115 config->dst_per << CH_CFG2_L_DST_PER_POS; in axi_chan_config_write()
116 cfg_hi = config->tt_fc << CH_CFG2_H_TT_FC_POS | in axi_chan_config_write()
117 config->hs_sel_src << CH_CFG2_H_HS_SEL_SRC_POS | in axi_chan_config_write()
118 config->hs_sel_dst << CH_CFG2_H_HS_SEL_DST_POS | in axi_chan_config_write()
119 config->prior << CH_CFG2_H_PRIORITY_POS; in axi_chan_config_write()
125 static inline void axi_dma_disable(struct axi_dma_chip *chip) in axi_dma_disable() argument
129 val = axi_dma_ioread32(chip, DMAC_CFG); in axi_dma_disable()
131 axi_dma_iowrite32(chip, DMAC_CFG, val); in axi_dma_disable()
134 static inline void axi_dma_enable(struct axi_dma_chip *chip) in axi_dma_enable() argument
138 val = axi_dma_ioread32(chip, DMAC_CFG); in axi_dma_enable()
140 axi_dma_iowrite32(chip, DMAC_CFG, val); in axi_dma_enable()
143 static inline void axi_dma_irq_disable(struct axi_dma_chip *chip) in axi_dma_irq_disable() argument
147 val = axi_dma_ioread32(chip, DMAC_CFG); in axi_dma_irq_disable()
149 axi_dma_iowrite32(chip, DMAC_CFG, val); in axi_dma_irq_disable()
152 static inline void axi_dma_irq_enable(struct axi_dma_chip *chip) in axi_dma_irq_enable() argument
156 val = axi_dma_ioread32(chip, DMAC_CFG); in axi_dma_irq_enable()
158 axi_dma_iowrite32(chip, DMAC_CFG, val); in axi_dma_irq_enable()
198 if (chan->chip->dw->hdata->nr_channels >= DMAC_CHAN_16) { in axi_chan_disable()
199 val = axi_dma_ioread64(chan->chip, DMAC_CHEN); in axi_chan_disable()
200 if (chan->id >= DMAC_CHAN_16) { in axi_chan_disable()
201 val &= ~((u64)(BIT(chan->id) >> DMAC_CHAN_16) in axi_chan_disable()
203 val |= (u64)(BIT(chan->id) >> DMAC_CHAN_16) in axi_chan_disable()
206 val &= ~(BIT(chan->id) << DMAC_CHAN_EN_SHIFT); in axi_chan_disable()
207 val |= BIT(chan->id) << DMAC_CHAN_EN2_WE_SHIFT; in axi_chan_disable()
209 axi_dma_iowrite64(chan->chip, DMAC_CHEN, val); in axi_chan_disable()
211 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in axi_chan_disable()
212 val &= ~(BIT(chan->id) << DMAC_CHAN_EN_SHIFT); in axi_chan_disable()
213 if (chan->chip->dw->hdata->reg_map_8_channels) in axi_chan_disable()
214 val |= BIT(chan->id) << DMAC_CHAN_EN_WE_SHIFT; in axi_chan_disable()
216 val |= BIT(chan->id) << DMAC_CHAN_EN2_WE_SHIFT; in axi_chan_disable()
217 axi_dma_iowrite32(chan->chip, DMAC_CHEN, (u32)val); in axi_chan_disable()
225 if (chan->chip->dw->hdata->nr_channels >= DMAC_CHAN_16) { in axi_chan_enable()
226 val = axi_dma_ioread64(chan->chip, DMAC_CHEN); in axi_chan_enable()
227 if (chan->id >= DMAC_CHAN_16) { in axi_chan_enable()
228 val |= (u64)(BIT(chan->id) >> DMAC_CHAN_16) in axi_chan_enable()
230 (u64)(BIT(chan->id) >> DMAC_CHAN_16) in axi_chan_enable()
233 val |= BIT(chan->id) << DMAC_CHAN_EN_SHIFT | in axi_chan_enable()
234 BIT(chan->id) << DMAC_CHAN_EN2_WE_SHIFT; in axi_chan_enable()
236 axi_dma_iowrite64(chan->chip, DMAC_CHEN, val); in axi_chan_enable()
238 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in axi_chan_enable()
239 if (chan->chip->dw->hdata->reg_map_8_channels) { in axi_chan_enable()
240 val |= BIT(chan->id) << DMAC_CHAN_EN_SHIFT | in axi_chan_enable()
241 BIT(chan->id) << DMAC_CHAN_EN_WE_SHIFT; in axi_chan_enable()
243 val |= BIT(chan->id) << DMAC_CHAN_EN_SHIFT | in axi_chan_enable()
244 BIT(chan->id) << DMAC_CHAN_EN2_WE_SHIFT; in axi_chan_enable()
246 axi_dma_iowrite32(chan->chip, DMAC_CHEN, (u32)val); in axi_chan_enable()
254 if (chan->chip->dw->hdata->nr_channels >= DMAC_CHAN_16) in axi_chan_is_hw_enable()
255 val = axi_dma_ioread64(chan->chip, DMAC_CHEN); in axi_chan_is_hw_enable()
257 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in axi_chan_is_hw_enable()
259 if (chan->id >= DMAC_CHAN_16) in axi_chan_is_hw_enable()
260 return !!(val & ((u64)(BIT(chan->id) >> DMAC_CHAN_16) << DMAC_CHAN_BLOCK_SHIFT)); in axi_chan_is_hw_enable()
262 return !!(val & (BIT(chan->id) << DMAC_CHAN_EN_SHIFT)); in axi_chan_is_hw_enable()
265 static void axi_dma_hw_init(struct axi_dma_chip *chip) in axi_dma_hw_init() argument
270 for (i = 0; i < chip->dw->hdata->nr_channels; i++) { in axi_dma_hw_init()
271 axi_chan_irq_disable(&chip->dw->chan[i], DWAXIDMAC_IRQ_ALL); in axi_dma_hw_init()
272 axi_chan_disable(&chip->dw->chan[i]); in axi_dma_hw_init()
274 ret = dma_set_mask_and_coherent(chip->dev, DMA_BIT_MASK(64)); in axi_dma_hw_init()
276 dev_warn(chip->dev, "Unable to set coherent mask\n"); in axi_dma_hw_init()
282 u32 max_width = chan->chip->dw->hdata->m_data_width; in axi_chan_get_xfer_width()
289 return dma_chan_name(&chan->vc.chan); in axi_chan_name()
300 desc->hw_desc = kcalloc(num, sizeof(*desc->hw_desc), GFP_NOWAIT); in axi_desc_alloc()
301 if (!desc->hw_desc) { in axi_desc_alloc()
315 lli = dma_pool_zalloc(chan->desc_pool, GFP_NOWAIT, &phys); in axi_desc_get()
322 atomic_inc(&chan->descs_allocated); in axi_desc_get()
330 struct axi_dma_chan *chan = desc->chan; in axi_desc_put()
331 int count = atomic_read(&chan->descs_allocated); in axi_desc_put()
336 hw_desc = &desc->hw_desc[descs_put]; in axi_desc_put()
337 dma_pool_free(chan->desc_pool, hw_desc->lli, hw_desc->llp); in axi_desc_put()
340 kfree(desc->hw_desc); in axi_desc_put()
342 atomic_sub(descs_put, &chan->descs_allocated); in axi_desc_put()
345 atomic_read(&chan->descs_allocated)); in axi_desc_put()
371 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_tx_status()
373 vdesc = vchan_find_desc(&chan->vc, cookie); in dma_chan_tx_status()
375 length = vd_to_axi_desc(vdesc)->length; in dma_chan_tx_status()
376 completed_blocks = vd_to_axi_desc(vdesc)->completed_blocks; in dma_chan_tx_status()
377 len = vd_to_axi_desc(vdesc)->hw_desc[0].len; in dma_chan_tx_status()
379 bytes = length - completed_length; in dma_chan_tx_status()
382 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_tx_status()
390 desc->lli->llp = cpu_to_le64(adr); in write_desc_llp()
403 if (!chan->chip->apb_regs) { in dw_axi_dma_set_byte_halfword()
404 dev_dbg(chan->chip->dev, "apb_regs not initialized\n"); in dw_axi_dma_set_byte_halfword()
408 reg_width = __ffs(chan->config.dst_addr_width); in dw_axi_dma_set_byte_halfword()
412 val = ioread32(chan->chip->apb_regs + offset); in dw_axi_dma_set_byte_halfword()
415 val |= BIT(chan->id); in dw_axi_dma_set_byte_halfword()
417 val &= ~BIT(chan->id); in dw_axi_dma_set_byte_halfword()
419 iowrite32(val, chan->chip->apb_regs + offset); in dw_axi_dma_set_byte_halfword()
425 u32 priority = chan->chip->dw->hdata->priority[chan->id]; in axi_chan_block_xfer_start()
431 dev_err(chan2dev(chan), "%s is non-idle!\n", in axi_chan_block_xfer_start()
437 axi_dma_enable(chan->chip); in axi_chan_block_xfer_start()
445 switch (chan->direction) { in axi_chan_block_xfer_start()
448 config.tt_fc = chan->config.device_fc ? in axi_chan_block_xfer_start()
451 if (chan->chip->apb_regs) in axi_chan_block_xfer_start()
452 config.dst_per = chan->id; in axi_chan_block_xfer_start()
454 config.dst_per = chan->hw_handshake_num; in axi_chan_block_xfer_start()
457 config.tt_fc = chan->config.device_fc ? in axi_chan_block_xfer_start()
460 if (chan->chip->apb_regs) in axi_chan_block_xfer_start()
461 config.src_per = chan->id; in axi_chan_block_xfer_start()
463 config.src_per = chan->hw_handshake_num; in axi_chan_block_xfer_start()
470 write_chan_llp(chan, first->hw_desc[0].llp | lms); in axi_chan_block_xfer_start()
487 vd = vchan_next_desc(&chan->vc); in axi_chan_start_first_queued()
493 vd->tx.cookie); in axi_chan_start_first_queued()
502 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_issue_pending()
503 if (vchan_issue_pending(&chan->vc)) in dma_chan_issue_pending()
505 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_issue_pending()
512 vchan_synchronize(&chan->vc); in dw_axi_dma_synchronize()
521 dev_err(chan2dev(chan), "%s is non-idle!\n", in dma_chan_alloc_chan_resources()
523 return -EBUSY; in dma_chan_alloc_chan_resources()
526 /* LLI address must be aligned to a 64-byte boundary */ in dma_chan_alloc_chan_resources()
527 chan->desc_pool = dma_pool_create(dev_name(chan2dev(chan)), in dma_chan_alloc_chan_resources()
528 chan->chip->dev, in dma_chan_alloc_chan_resources()
531 if (!chan->desc_pool) { in dma_chan_alloc_chan_resources()
533 return -ENOMEM; in dma_chan_alloc_chan_resources()
537 pm_runtime_get(chan->chip->dev); in dma_chan_alloc_chan_resources()
548 dev_err(dchan2dev(dchan), "%s is non-idle!\n", in dma_chan_free_chan_resources()
554 vchan_free_chan_resources(&chan->vc); in dma_chan_free_chan_resources()
556 dma_pool_destroy(chan->desc_pool); in dma_chan_free_chan_resources()
557 chan->desc_pool = NULL; in dma_chan_free_chan_resources()
560 axi_chan_name(chan), atomic_read(&chan->descs_allocated)); in dma_chan_free_chan_resources()
562 pm_runtime_put(chan->chip->dev); in dma_chan_free_chan_resources()
567 struct axi_dma_chip *chip = chan->chip; in dw_axi_dma_set_hw_channel() local
570 if (!chip->apb_regs) { in dw_axi_dma_set_hw_channel()
571 dev_err(chip->dev, "apb_regs not initialized\n"); in dw_axi_dma_set_hw_channel()
581 val = chan->hw_handshake_num; in dw_axi_dma_set_hw_channel()
585 reg_value = lo_hi_readq(chip->apb_regs + DMAC_APB_HW_HS_SEL_0); in dw_axi_dma_set_hw_channel()
587 /* Channel is already allocated, set handshake as per channel ID */ in dw_axi_dma_set_hw_channel()
591 (chan->id * DMA_APB_HS_SEL_BIT_SIZE)); in dw_axi_dma_set_hw_channel()
592 reg_value |= (val << (chan->id * DMA_APB_HS_SEL_BIT_SIZE)); in dw_axi_dma_set_hw_channel()
593 lo_hi_writeq(reg_value, chip->apb_regs + DMAC_APB_HW_HS_SEL_0); in dw_axi_dma_set_hw_channel()
608 val = le32_to_cpu(desc->lli->ctl_hi); in set_desc_last()
610 desc->lli->ctl_hi = cpu_to_le32(val); in set_desc_last()
615 desc->lli->sar = cpu_to_le64(adr); in write_desc_sar()
620 desc->lli->dar = cpu_to_le64(adr); in write_desc_dar()
628 val = le32_to_cpu(desc->lli->ctl_lo); in set_desc_src_master()
630 desc->lli->ctl_lo = cpu_to_le32(val); in set_desc_src_master()
639 val = le32_to_cpu(hw_desc->lli->ctl_lo); in set_desc_dest_master()
640 if (desc->chan->chip->dw->hdata->nr_masters > 1) in set_desc_dest_master()
645 hw_desc->lli->ctl_lo = cpu_to_le32(val); in set_desc_dest_master()
652 unsigned int data_width = BIT(chan->chip->dw->hdata->m_data_width); in dw_axi_dma_set_hw_desc()
661 axi_block_ts = chan->chip->dw->hdata->block_size[chan->id]; in dw_axi_dma_set_hw_desc()
668 dev_err(chan->chip->dev, "invalid buffer alignment\n"); in dw_axi_dma_set_hw_desc()
669 return -EINVAL; in dw_axi_dma_set_hw_desc()
672 switch (chan->direction) { in dw_axi_dma_set_hw_desc()
674 reg_width = __ffs(chan->config.dst_addr_width); in dw_axi_dma_set_hw_desc()
675 device_addr = chan->config.dst_addr; in dw_axi_dma_set_hw_desc()
683 reg_width = __ffs(chan->config.src_addr_width); in dw_axi_dma_set_hw_desc()
684 device_addr = chan->config.src_addr; in dw_axi_dma_set_hw_desc()
692 return -EINVAL; in dw_axi_dma_set_hw_desc()
696 return -EINVAL; in dw_axi_dma_set_hw_desc()
698 hw_desc->lli = axi_desc_get(chan, &hw_desc->llp); in dw_axi_dma_set_hw_desc()
699 if (unlikely(!hw_desc->lli)) in dw_axi_dma_set_hw_desc()
700 return -ENOMEM; in dw_axi_dma_set_hw_desc()
704 if (chan->chip->dw->hdata->restrict_axi_burst_len) { in dw_axi_dma_set_hw_desc()
705 burst_len = chan->chip->dw->hdata->axi_rw_burst_len; in dw_axi_dma_set_hw_desc()
711 hw_desc->lli->ctl_hi = cpu_to_le32(ctlhi); in dw_axi_dma_set_hw_desc()
713 if (chan->direction == DMA_MEM_TO_DEV) { in dw_axi_dma_set_hw_desc()
721 hw_desc->lli->block_ts_lo = cpu_to_le32(block_ts - 1); in dw_axi_dma_set_hw_desc()
725 hw_desc->lli->ctl_lo = cpu_to_le32(ctllo); in dw_axi_dma_set_hw_desc()
729 hw_desc->len = len; in dw_axi_dma_set_hw_desc()
740 axi_block_ts = chan->chip->dw->hdata->block_size[chan->id]; in calculate_block_len()
744 data_width = BIT(chan->chip->dw->hdata->m_data_width); in calculate_block_len()
752 reg_width = __ffs(chan->config.src_addr_width); in calculate_block_len()
796 chan->direction = direction; in dw_axi_dma_chan_prep_cyclic()
797 desc->chan = chan; in dw_axi_dma_chan_prep_cyclic()
798 chan->cyclic = true; in dw_axi_dma_chan_prep_cyclic()
799 desc->length = 0; in dw_axi_dma_chan_prep_cyclic()
800 desc->period_len = period_len; in dw_axi_dma_chan_prep_cyclic()
803 hw_desc = &desc->hw_desc[i]; in dw_axi_dma_chan_prep_cyclic()
810 desc->length += hw_desc->len; in dw_axi_dma_chan_prep_cyclic()
811 /* Set end-of-link to the linked descriptor, so that cyclic in dw_axi_dma_chan_prep_cyclic()
819 llp = desc->hw_desc[0].llp; in dw_axi_dma_chan_prep_cyclic()
823 hw_desc = &desc->hw_desc[--total_segments]; in dw_axi_dma_chan_prep_cyclic()
825 llp = hw_desc->llp; in dw_axi_dma_chan_prep_cyclic()
830 return vchan_tx_prep(&chan->vc, &desc->vd, flags); in dw_axi_dma_chan_prep_cyclic()
876 desc->chan = chan; in dw_axi_dma_chan_prep_slave_sg()
877 desc->length = 0; in dw_axi_dma_chan_prep_slave_sg()
878 chan->direction = direction; in dw_axi_dma_chan_prep_slave_sg()
887 hw_desc = &desc->hw_desc[loop++]; in dw_axi_dma_chan_prep_slave_sg()
892 desc->length += hw_desc->len; in dw_axi_dma_chan_prep_slave_sg()
893 len -= segment_len; in dw_axi_dma_chan_prep_slave_sg()
898 /* Set end-of-link to the last link descriptor of list */ in dw_axi_dma_chan_prep_slave_sg()
899 set_desc_last(&desc->hw_desc[num_sgs - 1]); in dw_axi_dma_chan_prep_slave_sg()
903 hw_desc = &desc->hw_desc[--num_sgs]; in dw_axi_dma_chan_prep_slave_sg()
905 llp = hw_desc->llp; in dw_axi_dma_chan_prep_slave_sg()
910 return vchan_tx_prep(&chan->vc, &desc->vd, flags); in dw_axi_dma_chan_prep_slave_sg()
934 max_block_ts = chan->chip->dw->hdata->block_size[chan->id]; in dma_chan_prep_dma_memcpy()
941 desc->chan = chan; in dma_chan_prep_dma_memcpy()
943 desc->length = 0; in dma_chan_prep_dma_memcpy()
947 hw_desc = &desc->hw_desc[num]; in dma_chan_prep_dma_memcpy()
958 * BLOCK_TS register should be set to block_ts - 1 in dma_chan_prep_dma_memcpy()
966 hw_desc->lli = axi_desc_get(chan, &hw_desc->llp); in dma_chan_prep_dma_memcpy()
967 if (unlikely(!hw_desc->lli)) in dma_chan_prep_dma_memcpy()
972 hw_desc->lli->block_ts_lo = cpu_to_le32(block_ts - 1); in dma_chan_prep_dma_memcpy()
975 if (chan->chip->dw->hdata->restrict_axi_burst_len) { in dma_chan_prep_dma_memcpy()
976 u32 burst_len = chan->chip->dw->hdata->axi_rw_burst_len; in dma_chan_prep_dma_memcpy()
983 hw_desc->lli->ctl_hi = cpu_to_le32(reg); in dma_chan_prep_dma_memcpy()
991 hw_desc->lli->ctl_lo = cpu_to_le32(reg); in dma_chan_prep_dma_memcpy()
996 hw_desc->len = xfer_len; in dma_chan_prep_dma_memcpy()
997 desc->length += hw_desc->len; in dma_chan_prep_dma_memcpy()
999 len -= xfer_len; in dma_chan_prep_dma_memcpy()
1005 /* Set end-of-link to the last link descriptor of list */ in dma_chan_prep_dma_memcpy()
1006 set_desc_last(&desc->hw_desc[num - 1]); in dma_chan_prep_dma_memcpy()
1009 hw_desc = &desc->hw_desc[--num]; in dma_chan_prep_dma_memcpy()
1011 llp = hw_desc->llp; in dma_chan_prep_dma_memcpy()
1014 return vchan_tx_prep(&chan->vc, &desc->vd, flags); in dma_chan_prep_dma_memcpy()
1027 memcpy(&chan->config, config, sizeof(*config)); in dw_axi_dma_chan_slave_config()
1035 if (!desc->lli) { in axi_chan_dump_lli()
1036 dev_err(dchan2dev(&chan->vc.chan), "NULL LLI\n"); in axi_chan_dump_lli()
1040 dev_err(dchan2dev(&chan->vc.chan), in axi_chan_dump_lli()
1042 le64_to_cpu(desc->lli->sar), in axi_chan_dump_lli()
1043 le64_to_cpu(desc->lli->dar), in axi_chan_dump_lli()
1044 le64_to_cpu(desc->lli->llp), in axi_chan_dump_lli()
1045 le32_to_cpu(desc->lli->block_ts_lo), in axi_chan_dump_lli()
1046 le32_to_cpu(desc->lli->ctl_hi), in axi_chan_dump_lli()
1047 le32_to_cpu(desc->lli->ctl_lo)); in axi_chan_dump_lli()
1053 int count = atomic_read(&chan->descs_allocated); in axi_chan_list_dump_lli()
1057 axi_chan_dump_lli(chan, &desc_head->hw_desc[i]); in axi_chan_list_dump_lli()
1065 spin_lock_irqsave(&chan->vc.lock, flags); in axi_chan_handle_err()
1070 vd = vchan_next_desc(&chan->vc); in axi_chan_handle_err()
1077 list_del(&vd->node); in axi_chan_handle_err()
1082 axi_chan_name(chan), vd->tx.cookie, status); in axi_chan_handle_err()
1091 spin_unlock_irqrestore(&chan->vc.lock, flags); in axi_chan_handle_err()
1096 int count = atomic_read(&chan->descs_allocated); in axi_chan_block_xfer_complete()
1104 spin_lock_irqsave(&chan->vc.lock, flags); in axi_chan_block_xfer_complete()
1112 vd = vchan_next_desc(&chan->vc); in axi_chan_block_xfer_complete()
1119 if (chan->cyclic) { in axi_chan_block_xfer_complete()
1122 llp = lo_hi_readq(chan->chan_regs + CH_LLP); in axi_chan_block_xfer_complete()
1124 hw_desc = &desc->hw_desc[i]; in axi_chan_block_xfer_complete()
1125 if (hw_desc->llp == llp) { in axi_chan_block_xfer_complete()
1126 axi_chan_irq_clear(chan, hw_desc->lli->status_lo); in axi_chan_block_xfer_complete()
1127 hw_desc->lli->ctl_hi |= CH_CTL_H_LLI_VALID; in axi_chan_block_xfer_complete()
1128 desc->completed_blocks = i; in axi_chan_block_xfer_complete()
1130 if (((hw_desc->len * (i + 1)) % desc->period_len) == 0) in axi_chan_block_xfer_complete()
1140 list_del(&vd->node); in axi_chan_block_xfer_complete()
1148 spin_unlock_irqrestore(&chan->vc.lock, flags); in axi_chan_block_xfer_complete()
1153 struct axi_dma_chip *chip = dev_id; in dw_axi_dma_interrupt() local
1154 struct dw_axi_dma *dw = chip->dw; in dw_axi_dma_interrupt()
1160 axi_dma_irq_disable(chip); in dw_axi_dma_interrupt()
1163 for (i = 0; i < dw->hdata->nr_channels; i++) { in dw_axi_dma_interrupt()
1164 chan = &dw->chan[i]; in dw_axi_dma_interrupt()
1168 dev_vdbg(chip->dev, "%s %u IRQ status: 0x%08x\n", in dw_axi_dma_interrupt()
1177 /* Re-enable interrupts */ in dw_axi_dma_interrupt()
1178 axi_dma_irq_enable(chip); in dw_axi_dma_interrupt()
1186 u32 chan_active = BIT(chan->id) << DMAC_CHAN_EN_SHIFT; in dma_chan_terminate_all()
1194 ret = readl_poll_timeout_atomic(chan->chip->regs + DMAC_CHEN, val, in dma_chan_terminate_all()
1196 if (ret == -ETIMEDOUT) in dma_chan_terminate_all()
1200 if (chan->direction != DMA_MEM_TO_MEM) in dma_chan_terminate_all()
1202 if (chan->direction == DMA_MEM_TO_DEV) in dma_chan_terminate_all()
1205 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_terminate_all()
1207 vchan_get_all_descriptors(&chan->vc, &head); in dma_chan_terminate_all()
1209 chan->cyclic = false; in dma_chan_terminate_all()
1210 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_terminate_all()
1212 vchan_dma_desc_free_list(&chan->vc, &head); in dma_chan_terminate_all()
1226 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_pause()
1228 if (chan->chip->dw->hdata->nr_channels >= DMAC_CHAN_16) { in dma_chan_pause()
1229 val = axi_dma_ioread64(chan->chip, DMAC_CHSUSPREG); in dma_chan_pause()
1230 if (chan->id >= DMAC_CHAN_16) { in dma_chan_pause()
1231 val |= (u64)(BIT(chan->id) >> DMAC_CHAN_16) in dma_chan_pause()
1233 (u64)(BIT(chan->id) >> DMAC_CHAN_16) in dma_chan_pause()
1236 val |= BIT(chan->id) << DMAC_CHAN_SUSP2_SHIFT | in dma_chan_pause()
1237 BIT(chan->id) << DMAC_CHAN_SUSP2_WE_SHIFT; in dma_chan_pause()
1239 axi_dma_iowrite64(chan->chip, DMAC_CHSUSPREG, val); in dma_chan_pause()
1241 if (chan->chip->dw->hdata->reg_map_8_channels) { in dma_chan_pause()
1242 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in dma_chan_pause()
1243 val |= BIT(chan->id) << DMAC_CHAN_SUSP_SHIFT | in dma_chan_pause()
1244 BIT(chan->id) << DMAC_CHAN_SUSP_WE_SHIFT; in dma_chan_pause()
1245 axi_dma_iowrite32(chan->chip, DMAC_CHEN, (u32)val); in dma_chan_pause()
1247 val = axi_dma_ioread32(chan->chip, DMAC_CHSUSPREG); in dma_chan_pause()
1248 val |= BIT(chan->id) << DMAC_CHAN_SUSP2_SHIFT | in dma_chan_pause()
1249 BIT(chan->id) << DMAC_CHAN_SUSP2_WE_SHIFT; in dma_chan_pause()
1250 axi_dma_iowrite32(chan->chip, DMAC_CHSUSPREG, (u32)val); in dma_chan_pause()
1259 } while (--timeout); in dma_chan_pause()
1263 chan->is_paused = true; in dma_chan_pause()
1265 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_pause()
1267 return timeout ? 0 : -EAGAIN; in dma_chan_pause()
1275 if (chan->chip->dw->hdata->nr_channels >= DMAC_CHAN_16) { in axi_chan_resume()
1276 val = axi_dma_ioread64(chan->chip, DMAC_CHSUSPREG); in axi_chan_resume()
1277 if (chan->id >= DMAC_CHAN_16) { in axi_chan_resume()
1278 val &= ~((u64)(BIT(chan->id) >> DMAC_CHAN_16) in axi_chan_resume()
1280 val |= ((u64)(BIT(chan->id) >> DMAC_CHAN_16) in axi_chan_resume()
1283 val &= ~(BIT(chan->id) << DMAC_CHAN_SUSP2_SHIFT); in axi_chan_resume()
1284 val |= (BIT(chan->id) << DMAC_CHAN_SUSP2_WE_SHIFT); in axi_chan_resume()
1286 axi_dma_iowrite64(chan->chip, DMAC_CHSUSPREG, val); in axi_chan_resume()
1288 if (chan->chip->dw->hdata->reg_map_8_channels) { in axi_chan_resume()
1289 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in axi_chan_resume()
1290 val &= ~(BIT(chan->id) << DMAC_CHAN_SUSP_SHIFT); in axi_chan_resume()
1291 val |= (BIT(chan->id) << DMAC_CHAN_SUSP_WE_SHIFT); in axi_chan_resume()
1292 axi_dma_iowrite32(chan->chip, DMAC_CHEN, (u32)val); in axi_chan_resume()
1294 val = axi_dma_ioread32(chan->chip, DMAC_CHSUSPREG); in axi_chan_resume()
1295 val &= ~(BIT(chan->id) << DMAC_CHAN_SUSP2_SHIFT); in axi_chan_resume()
1296 val |= (BIT(chan->id) << DMAC_CHAN_SUSP2_WE_SHIFT); in axi_chan_resume()
1297 axi_dma_iowrite32(chan->chip, DMAC_CHSUSPREG, (u32)val); in axi_chan_resume()
1301 chan->is_paused = false; in axi_chan_resume()
1309 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_resume()
1311 if (chan->is_paused) in dma_chan_resume()
1314 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_resume()
1319 static int axi_dma_suspend(struct axi_dma_chip *chip) in axi_dma_suspend() argument
1321 axi_dma_irq_disable(chip); in axi_dma_suspend()
1322 axi_dma_disable(chip); in axi_dma_suspend()
1324 clk_disable_unprepare(chip->core_clk); in axi_dma_suspend()
1325 clk_disable_unprepare(chip->cfgr_clk); in axi_dma_suspend()
1330 static int axi_dma_resume(struct axi_dma_chip *chip) in axi_dma_resume() argument
1334 ret = clk_prepare_enable(chip->cfgr_clk); in axi_dma_resume()
1338 ret = clk_prepare_enable(chip->core_clk); in axi_dma_resume()
1342 axi_dma_enable(chip); in axi_dma_resume()
1343 axi_dma_irq_enable(chip); in axi_dma_resume()
1350 struct axi_dma_chip *chip = dev_get_drvdata(dev); in axi_dma_runtime_suspend() local
1352 return axi_dma_suspend(chip); in axi_dma_runtime_suspend()
1357 struct axi_dma_chip *chip = dev_get_drvdata(dev); in axi_dma_runtime_resume() local
1359 return axi_dma_resume(chip); in axi_dma_runtime_resume()
1365 struct dw_axi_dma *dw = ofdma->of_dma_data; in dw_axi_dma_of_xlate()
1369 dchan = dma_get_any_slave_channel(&dw->dma); in dw_axi_dma_of_xlate()
1374 chan->hw_handshake_num = dma_spec->args[0]; in dw_axi_dma_of_xlate()
1378 static int parse_device_properties(struct axi_dma_chip *chip) in parse_device_properties() argument
1380 struct device *dev = chip->dev; in parse_device_properties()
1384 ret = device_property_read_u32(dev, "dma-channels", &tmp); in parse_device_properties()
1388 return -EINVAL; in parse_device_properties()
1390 chip->dw->hdata->nr_channels = tmp; in parse_device_properties()
1392 chip->dw->hdata->reg_map_8_channels = true; in parse_device_properties()
1394 ret = device_property_read_u32(dev, "snps,dma-masters", &tmp); in parse_device_properties()
1398 return -EINVAL; in parse_device_properties()
1400 chip->dw->hdata->nr_masters = tmp; in parse_device_properties()
1402 ret = device_property_read_u32(dev, "snps,data-width", &tmp); in parse_device_properties()
1406 return -EINVAL; in parse_device_properties()
1408 chip->dw->hdata->m_data_width = tmp; in parse_device_properties()
1410 ret = device_property_read_u32_array(dev, "snps,block-size", carr, in parse_device_properties()
1411 chip->dw->hdata->nr_channels); in parse_device_properties()
1414 for (tmp = 0; tmp < chip->dw->hdata->nr_channels; tmp++) { in parse_device_properties()
1416 return -EINVAL; in parse_device_properties()
1418 chip->dw->hdata->block_size[tmp] = carr[tmp]; in parse_device_properties()
1422 chip->dw->hdata->nr_channels); in parse_device_properties()
1425 /* Priority value must be programmed within [0:nr_channels-1] range */ in parse_device_properties()
1426 for (tmp = 0; tmp < chip->dw->hdata->nr_channels; tmp++) { in parse_device_properties()
1427 if (carr[tmp] >= chip->dw->hdata->nr_channels) in parse_device_properties()
1428 return -EINVAL; in parse_device_properties()
1430 chip->dw->hdata->priority[tmp] = carr[tmp]; in parse_device_properties()
1433 /* axi-max-burst-len is optional property */ in parse_device_properties()
1434 ret = device_property_read_u32(dev, "snps,axi-max-burst-len", &tmp); in parse_device_properties()
1437 return -EINVAL; in parse_device_properties()
1439 return -EINVAL; in parse_device_properties()
1441 chip->dw->hdata->restrict_axi_burst_len = true; in parse_device_properties()
1442 chip->dw->hdata->axi_rw_burst_len = tmp; in parse_device_properties()
1450 struct axi_dma_chip *chip; in dw_probe() local
1458 chip = devm_kzalloc(&pdev->dev, sizeof(*chip), GFP_KERNEL); in dw_probe()
1459 if (!chip) in dw_probe()
1460 return -ENOMEM; in dw_probe()
1462 dw = devm_kzalloc(&pdev->dev, sizeof(*dw), GFP_KERNEL); in dw_probe()
1464 return -ENOMEM; in dw_probe()
1466 hdata = devm_kzalloc(&pdev->dev, sizeof(*hdata), GFP_KERNEL); in dw_probe()
1468 return -ENOMEM; in dw_probe()
1470 chip->dw = dw; in dw_probe()
1471 chip->dev = &pdev->dev; in dw_probe()
1472 chip->dw->hdata = hdata; in dw_probe()
1474 chip->irq = platform_get_irq(pdev, 0); in dw_probe()
1475 if (chip->irq < 0) in dw_probe()
1476 return chip->irq; in dw_probe()
1478 chip->regs = devm_platform_ioremap_resource(pdev, 0); in dw_probe()
1479 if (IS_ERR(chip->regs)) in dw_probe()
1480 return PTR_ERR(chip->regs); in dw_probe()
1482 flags = (uintptr_t)of_device_get_match_data(&pdev->dev); in dw_probe()
1484 chip->apb_regs = devm_platform_ioremap_resource(pdev, 1); in dw_probe()
1485 if (IS_ERR(chip->apb_regs)) in dw_probe()
1486 return PTR_ERR(chip->apb_regs); in dw_probe()
1490 resets = devm_reset_control_array_get_exclusive(&pdev->dev); in dw_probe()
1499 chip->dw->hdata->use_cfg2 = !!(flags & AXI_DMA_FLAG_USE_CFG2); in dw_probe()
1501 chip->core_clk = devm_clk_get(chip->dev, "core-clk"); in dw_probe()
1502 if (IS_ERR(chip->core_clk)) in dw_probe()
1503 return PTR_ERR(chip->core_clk); in dw_probe()
1505 chip->cfgr_clk = devm_clk_get(chip->dev, "cfgr-clk"); in dw_probe()
1506 if (IS_ERR(chip->cfgr_clk)) in dw_probe()
1507 return PTR_ERR(chip->cfgr_clk); in dw_probe()
1509 ret = parse_device_properties(chip); in dw_probe()
1513 dw->chan = devm_kcalloc(chip->dev, hdata->nr_channels, in dw_probe()
1514 sizeof(*dw->chan), GFP_KERNEL); in dw_probe()
1515 if (!dw->chan) in dw_probe()
1516 return -ENOMEM; in dw_probe()
1518 ret = devm_request_irq(chip->dev, chip->irq, dw_axi_dma_interrupt, in dw_probe()
1519 IRQF_SHARED, KBUILD_MODNAME, chip); in dw_probe()
1523 INIT_LIST_HEAD(&dw->dma.channels); in dw_probe()
1524 for (i = 0; i < hdata->nr_channels; i++) { in dw_probe()
1525 struct axi_dma_chan *chan = &dw->chan[i]; in dw_probe()
1527 chan->chip = chip; in dw_probe()
1528 chan->id = i; in dw_probe()
1529 chan->chan_regs = chip->regs + COMMON_REG_LEN + i * CHAN_REG_LEN; in dw_probe()
1530 atomic_set(&chan->descs_allocated, 0); in dw_probe()
1532 chan->vc.desc_free = vchan_desc_put; in dw_probe()
1533 vchan_init(&chan->vc, &dw->dma); in dw_probe()
1537 dma_cap_set(DMA_MEMCPY, dw->dma.cap_mask); in dw_probe()
1538 dma_cap_set(DMA_SLAVE, dw->dma.cap_mask); in dw_probe()
1539 dma_cap_set(DMA_CYCLIC, dw->dma.cap_mask); in dw_probe()
1542 dw->dma.max_burst = hdata->axi_rw_burst_len; in dw_probe()
1543 dw->dma.src_addr_widths = AXI_DMA_BUSWIDTHS; in dw_probe()
1544 dw->dma.dst_addr_widths = AXI_DMA_BUSWIDTHS; in dw_probe()
1545 dw->dma.directions = BIT(DMA_MEM_TO_MEM); in dw_probe()
1546 dw->dma.directions |= BIT(DMA_MEM_TO_DEV) | BIT(DMA_DEV_TO_MEM); in dw_probe()
1547 dw->dma.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; in dw_probe()
1549 dw->dma.dev = chip->dev; in dw_probe()
1550 dw->dma.device_tx_status = dma_chan_tx_status; in dw_probe()
1551 dw->dma.device_issue_pending = dma_chan_issue_pending; in dw_probe()
1552 dw->dma.device_terminate_all = dma_chan_terminate_all; in dw_probe()
1553 dw->dma.device_pause = dma_chan_pause; in dw_probe()
1554 dw->dma.device_resume = dma_chan_resume; in dw_probe()
1556 dw->dma.device_alloc_chan_resources = dma_chan_alloc_chan_resources; in dw_probe()
1557 dw->dma.device_free_chan_resources = dma_chan_free_chan_resources; in dw_probe()
1559 dw->dma.device_prep_dma_memcpy = dma_chan_prep_dma_memcpy; in dw_probe()
1560 dw->dma.device_synchronize = dw_axi_dma_synchronize; in dw_probe()
1561 dw->dma.device_config = dw_axi_dma_chan_slave_config; in dw_probe()
1562 dw->dma.device_prep_slave_sg = dw_axi_dma_chan_prep_slave_sg; in dw_probe()
1563 dw->dma.device_prep_dma_cyclic = dw_axi_dma_chan_prep_cyclic; in dw_probe()
1570 dw->dma.dev->dma_parms = &dw->dma_parms; in dw_probe()
1571 dma_set_max_seg_size(&pdev->dev, MAX_BLOCK_SIZE); in dw_probe()
1572 platform_set_drvdata(pdev, chip); in dw_probe()
1574 pm_runtime_enable(chip->dev); in dw_probe()
1581 pm_runtime_get_noresume(chip->dev); in dw_probe()
1582 ret = axi_dma_resume(chip); in dw_probe()
1586 axi_dma_hw_init(chip); in dw_probe()
1588 pm_runtime_put(chip->dev); in dw_probe()
1590 ret = dmaenginem_async_device_register(&dw->dma); in dw_probe()
1595 ret = of_dma_controller_register(pdev->dev.of_node, in dw_probe()
1598 dev_warn(&pdev->dev, in dw_probe()
1601 dev_info(chip->dev, "DesignWare AXI DMA Controller, %d channels\n", in dw_probe()
1602 dw->hdata->nr_channels); in dw_probe()
1607 pm_runtime_disable(chip->dev); in dw_probe()
1614 struct axi_dma_chip *chip = platform_get_drvdata(pdev); in dw_remove() local
1615 struct dw_axi_dma *dw = chip->dw; in dw_remove()
1620 clk_prepare_enable(chip->cfgr_clk); in dw_remove()
1621 clk_prepare_enable(chip->core_clk); in dw_remove()
1622 axi_dma_irq_disable(chip); in dw_remove()
1623 for (i = 0; i < dw->hdata->nr_channels; i++) { in dw_remove()
1624 axi_chan_disable(&chip->dw->chan[i]); in dw_remove()
1625 axi_chan_irq_disable(&chip->dw->chan[i], DWAXIDMAC_IRQ_ALL); in dw_remove()
1627 axi_dma_disable(chip); in dw_remove()
1629 pm_runtime_disable(chip->dev); in dw_remove()
1630 axi_dma_suspend(chip); in dw_remove()
1632 devm_free_irq(chip->dev, chip->irq, chip); in dw_remove()
1634 of_dma_controller_free(chip->dev->of_node); in dw_remove()
1636 list_for_each_entry_safe(chan, _chan, &dw->dma.channels, in dw_remove()
1638 list_del(&chan->vc.chan.device_node); in dw_remove()
1639 tasklet_kill(&chan->vc.task); in dw_remove()
1649 .compatible = "snps,axi-dma-1.01a"
1651 .compatible = "intel,kmb-axi-dma",
1654 .compatible = "starfive,jh7110-axi-dma",