Lines Matching refs:hsuc
45 static inline void hsu_chan_disable(struct hsu_dma_chan *hsuc)
47 hsu_chan_writel(hsuc, HSU_CH_CR, 0);
50 static inline void hsu_chan_enable(struct hsu_dma_chan *hsuc)
54 if (hsuc->direction == DMA_MEM_TO_DEV)
56 else if (hsuc->direction == DMA_DEV_TO_MEM)
59 hsu_chan_writel(hsuc, HSU_CH_CR, cr);
62 static void hsu_dma_chan_start(struct hsu_dma_chan *hsuc)
64 struct dma_slave_config *config = &hsuc->config;
65 struct hsu_dma_desc *desc = hsuc->desc;
70 if (hsuc->direction == DMA_MEM_TO_DEV) {
73 } else if (hsuc->direction == DMA_DEV_TO_MEM) {
78 hsu_chan_disable(hsuc);
80 hsu_chan_writel(hsuc, HSU_CH_DCR, 0);
81 hsu_chan_writel(hsuc, HSU_CH_BSR, bsr);
82 hsu_chan_writel(hsuc, HSU_CH_MTSR, mtsr);
87 hsu_chan_writel(hsuc, HSU_CH_DxSAR(i), desc->sg[i].addr);
88 hsu_chan_writel(hsuc, HSU_CH_DxTSR(i), desc->sg[i].len);
100 hsu_chan_writel(hsuc, HSU_CH_DCR, dcr);
102 hsu_chan_enable(hsuc);
105 static void hsu_dma_stop_channel(struct hsu_dma_chan *hsuc)
107 hsu_chan_disable(hsuc);
108 hsu_chan_writel(hsuc, HSU_CH_DCR, 0);
111 static void hsu_dma_start_channel(struct hsu_dma_chan *hsuc)
113 hsu_dma_chan_start(hsuc);
116 static void hsu_dma_start_transfer(struct hsu_dma_chan *hsuc)
121 vdesc = vchan_next_desc(&hsuc->vchan);
123 hsuc->desc = NULL;
128 hsuc->desc = to_hsu_dma_desc(vdesc);
131 hsu_dma_start_channel(hsuc);
154 struct hsu_dma_chan *hsuc;
162 hsuc = &chip->hsu->chan[nr];
168 spin_lock_irqsave(&hsuc->vchan.lock, flags);
169 sr = hsu_chan_readl(hsuc, HSU_CH_SR);
170 spin_unlock_irqrestore(&hsuc->vchan.lock, flags);
213 struct hsu_dma_chan *hsuc;
221 hsuc = &chip->hsu->chan[nr];
222 stat = this_cpu_ptr(hsuc->vchan.chan.local);
224 spin_lock_irqsave(&hsuc->vchan.lock, flags);
225 desc = hsuc->desc;
230 hsu_dma_start_channel(hsuc);
235 hsu_dma_start_transfer(hsuc);
238 spin_unlock_irqrestore(&hsuc->vchan.lock, flags);
274 struct hsu_dma_chan *hsuc = to_hsu_dma_chan(chan);
295 return vchan_tx_prep(&hsuc->vchan, &desc->vdesc, flags);
300 struct hsu_dma_chan *hsuc = to_hsu_dma_chan(chan);
303 spin_lock_irqsave(&hsuc->vchan.lock, flags);
304 if (vchan_issue_pending(&hsuc->vchan) && !hsuc->desc)
305 hsu_dma_start_transfer(hsuc);
306 spin_unlock_irqrestore(&hsuc->vchan.lock, flags);
309 static size_t hsu_dma_active_desc_size(struct hsu_dma_chan *hsuc)
311 struct hsu_dma_desc *desc = hsuc->desc;
320 bytes += hsu_chan_readl(hsuc, HSU_CH_DxTSR(i));
329 struct hsu_dma_chan *hsuc = to_hsu_dma_chan(chan);
339 spin_lock_irqsave(&hsuc->vchan.lock, flags);
340 vdesc = vchan_find_desc(&hsuc->vchan, cookie);
341 if (hsuc->desc && cookie == hsuc->desc->vdesc.tx.cookie) {
342 bytes = hsu_dma_active_desc_size(hsuc);
344 status = hsuc->desc->status;
349 spin_unlock_irqrestore(&hsuc->vchan.lock, flags);
357 struct hsu_dma_chan *hsuc = to_hsu_dma_chan(chan);
359 memcpy(&hsuc->config, config, sizeof(hsuc->config));
366 struct hsu_dma_chan *hsuc = to_hsu_dma_chan(chan);
369 spin_lock_irqsave(&hsuc->vchan.lock, flags);
370 if (hsuc->desc && hsuc->desc->status == DMA_IN_PROGRESS) {
371 hsu_chan_disable(hsuc);
372 hsuc->desc->status = DMA_PAUSED;
374 spin_unlock_irqrestore(&hsuc->vchan.lock, flags);
381 struct hsu_dma_chan *hsuc = to_hsu_dma_chan(chan);
384 spin_lock_irqsave(&hsuc->vchan.lock, flags);
385 if (hsuc->desc && hsuc->desc->status == DMA_PAUSED) {
386 hsuc->desc->status = DMA_IN_PROGRESS;
387 hsu_chan_enable(hsuc);
389 spin_unlock_irqrestore(&hsuc->vchan.lock, flags);
396 struct hsu_dma_chan *hsuc = to_hsu_dma_chan(chan);
400 spin_lock_irqsave(&hsuc->vchan.lock, flags);
402 hsu_dma_stop_channel(hsuc);
403 if (hsuc->desc) {
404 hsu_dma_desc_free(&hsuc->desc->vdesc);
405 hsuc->desc = NULL;
408 vchan_get_all_descriptors(&hsuc->vchan, &head);
409 spin_unlock_irqrestore(&hsuc->vchan.lock, flags);
410 vchan_dma_desc_free_list(&hsuc->vchan, &head);
422 struct hsu_dma_chan *hsuc = to_hsu_dma_chan(chan);
424 vchan_synchronize(&hsuc->vchan);
450 struct hsu_dma_chan *hsuc = &hsu->chan[i];
452 hsuc->vchan.desc_free = hsu_dma_desc_free;
453 vchan_init(&hsuc->vchan, &hsu->dma);
455 hsuc->direction = (i & 0x1) ? DMA_DEV_TO_MEM : DMA_MEM_TO_DEV;
456 hsuc->reg = addr + i * HSU_DMA_CHAN_LENGTH;
501 struct hsu_dma_chan *hsuc = &hsu->chan[i];
503 tasklet_kill(&hsuc->vchan.task);