Lines Matching defs:priv
22 static int sgdma_async_write(struct altera_tse_private *priv,
25 static int sgdma_async_read(struct altera_tse_private *priv);
28 sgdma_txphysaddr(struct altera_tse_private *priv,
32 sgdma_rxphysaddr(struct altera_tse_private *priv,
35 static int sgdma_txbusy(struct altera_tse_private *priv);
37 static int sgdma_rxbusy(struct altera_tse_private *priv);
40 queue_tx(struct altera_tse_private *priv, struct tse_buffer *buffer);
43 queue_rx(struct altera_tse_private *priv, struct tse_buffer *buffer);
46 dequeue_tx(struct altera_tse_private *priv);
49 dequeue_rx(struct altera_tse_private *priv);
52 queue_rx_peekhead(struct altera_tse_private *priv);
54 int sgdma_initialize(struct altera_tse_private *priv)
56 priv->txctrlreg = SGDMA_CTRLREG_ILASTD |
59 priv->rxctrlreg = SGDMA_CTRLREG_IDESCRIP |
63 INIT_LIST_HEAD(&priv->txlisthd);
64 INIT_LIST_HEAD(&priv->rxlisthd);
66 priv->rxdescphys = (dma_addr_t) 0;
67 priv->txdescphys = (dma_addr_t) 0;
69 priv->rxdescphys = dma_map_single(priv->device,
70 (void __force *)priv->rx_dma_desc,
71 priv->rxdescmem, DMA_BIDIRECTIONAL);
73 if (dma_mapping_error(priv->device, priv->rxdescphys)) {
74 sgdma_uninitialize(priv);
75 netdev_err(priv->dev, "error mapping rx descriptor memory\n");
79 priv->txdescphys = dma_map_single(priv->device,
80 (void __force *)priv->tx_dma_desc,
81 priv->txdescmem, DMA_TO_DEVICE);
83 if (dma_mapping_error(priv->device, priv->txdescphys)) {
84 sgdma_uninitialize(priv);
85 netdev_err(priv->dev, "error mapping tx descriptor memory\n");
90 memset_io(priv->tx_dma_desc, 0, priv->txdescmem);
91 memset_io(priv->rx_dma_desc, 0, priv->rxdescmem);
93 dma_sync_single_for_device(priv->device, priv->txdescphys,
94 priv->txdescmem, DMA_TO_DEVICE);
96 dma_sync_single_for_device(priv->device, priv->rxdescphys,
97 priv->rxdescmem, DMA_TO_DEVICE);
102 void sgdma_uninitialize(struct altera_tse_private *priv)
104 if (priv->rxdescphys)
105 dma_unmap_single(priv->device, priv->rxdescphys,
106 priv->rxdescmem, DMA_BIDIRECTIONAL);
108 if (priv->txdescphys)
109 dma_unmap_single(priv->device, priv->txdescphys,
110 priv->txdescmem, DMA_TO_DEVICE);
116 void sgdma_reset(struct altera_tse_private *priv)
119 memset_io(priv->tx_dma_desc, 0, priv->txdescmem);
120 memset_io(priv->rx_dma_desc, 0, priv->rxdescmem);
122 csrwr32(SGDMA_CTRLREG_RESET, priv->tx_dma_csr, sgdma_csroffs(control));
123 csrwr32(0, priv->tx_dma_csr, sgdma_csroffs(control));
125 csrwr32(SGDMA_CTRLREG_RESET, priv->rx_dma_csr, sgdma_csroffs(control));
126 csrwr32(0, priv->rx_dma_csr, sgdma_csroffs(control));
134 void sgdma_enable_rxirq(struct altera_tse_private *priv)
138 void sgdma_enable_txirq(struct altera_tse_private *priv)
142 void sgdma_disable_rxirq(struct altera_tse_private *priv)
146 void sgdma_disable_txirq(struct altera_tse_private *priv)
150 void sgdma_clear_rxirq(struct altera_tse_private *priv)
152 tse_set_bit(priv->rx_dma_csr, sgdma_csroffs(control),
156 void sgdma_clear_txirq(struct altera_tse_private *priv)
158 tse_set_bit(priv->tx_dma_csr, sgdma_csroffs(control),
167 int sgdma_tx_buffer(struct altera_tse_private *priv, struct tse_buffer *buffer)
170 (struct sgdma_descrip __iomem *)priv->tx_dma_desc;
176 if (sgdma_txbusy(priv))
181 sgdma_txphysaddr(priv, ndesc),
189 sgdma_async_write(priv, cdesc);
192 queue_tx(priv, buffer);
200 u32 sgdma_tx_completions(struct altera_tse_private *priv)
204 if (!sgdma_txbusy(priv) &&
205 ((csrrd8(priv->tx_dma_desc, sgdma_descroffs(control))
207 (dequeue_tx(priv))) {
214 void sgdma_start_rxdma(struct altera_tse_private *priv)
216 sgdma_async_read(priv);
219 void sgdma_add_rx_desc(struct altera_tse_private *priv,
222 queue_rx(priv, rxbuffer);
228 u32 sgdma_rx_status(struct altera_tse_private *priv)
231 (struct sgdma_descrip __iomem *)priv->rx_dma_desc;
236 u32 sts = csrrd32(priv->rx_dma_csr, sgdma_csroffs(status));
242 dma_sync_single_for_cpu(priv->device,
243 priv->rxdescphys,
256 rxbuffer = dequeue_rx(priv);
258 netdev_info(priv->dev,
262 csrwr32(0, priv->rx_dma_csr, sgdma_csroffs(control));
264 csrwr32(0xf, priv->rx_dma_csr, sgdma_csroffs(status));
267 sgdma_async_read(priv);
279 netdev_err(priv->dev,
285 sgdma_async_read(priv);
336 static int sgdma_async_read(struct altera_tse_private *priv)
339 (struct sgdma_descrip __iomem *)priv->rx_dma_desc;
345 if (!sgdma_rxbusy(priv)) {
346 rxbuffer = queue_rx_peekhead(priv);
348 netdev_err(priv->dev, "no rx buffers available\n");
354 sgdma_rxphysaddr(priv, ndesc),
362 dma_sync_single_for_device(priv->device,
363 priv->rxdescphys,
367 csrwr32(lower_32_bits(sgdma_rxphysaddr(priv, cdesc)),
368 priv->rx_dma_csr,
371 csrwr32((priv->rxctrlreg | SGDMA_CTRLREG_START),
372 priv->rx_dma_csr,
381 static int sgdma_async_write(struct altera_tse_private *priv,
384 if (sgdma_txbusy(priv))
388 csrwr32(0, priv->tx_dma_csr, sgdma_csroffs(control));
389 csrwr32(0x1f, priv->tx_dma_csr, sgdma_csroffs(status));
391 dma_sync_single_for_device(priv->device, priv->txdescphys,
394 csrwr32(lower_32_bits(sgdma_txphysaddr(priv, desc)),
395 priv->tx_dma_csr,
398 csrwr32((priv->txctrlreg | SGDMA_CTRLREG_START),
399 priv->tx_dma_csr,
406 sgdma_txphysaddr(struct altera_tse_private *priv,
409 dma_addr_t paddr = priv->txdescmem_busaddr;
410 uintptr_t offs = (uintptr_t)desc - (uintptr_t)priv->tx_dma_desc;
415 sgdma_rxphysaddr(struct altera_tse_private *priv,
418 dma_addr_t paddr = priv->rxdescmem_busaddr;
419 uintptr_t offs = (uintptr_t)desc - (uintptr_t)priv->rx_dma_desc;
445 queue_tx(struct altera_tse_private *priv, struct tse_buffer *buffer)
447 list_add_tail(&buffer->lh, &priv->txlisthd);
456 queue_rx(struct altera_tse_private *priv, struct tse_buffer *buffer)
458 list_add_tail(&buffer->lh, &priv->rxlisthd);
467 dequeue_tx(struct altera_tse_private *priv)
470 list_remove_head(&priv->txlisthd, buffer, struct tse_buffer, lh);
480 dequeue_rx(struct altera_tse_private *priv)
483 list_remove_head(&priv->rxlisthd, buffer, struct tse_buffer, lh);
494 queue_rx_peekhead(struct altera_tse_private *priv)
497 list_peek_head(&priv->rxlisthd, buffer, struct tse_buffer, lh);
503 static int sgdma_rxbusy(struct altera_tse_private *priv)
505 return csrrd32(priv->rx_dma_csr, sgdma_csroffs(status))
512 static int sgdma_txbusy(struct altera_tse_private *priv)
517 while ((csrrd32(priv->tx_dma_csr, sgdma_csroffs(status))
521 if (csrrd32(priv->tx_dma_csr, sgdma_csroffs(status))
523 netdev_err(priv->dev, "timeout waiting for tx dma\n");