/linux/drivers/spi/ |
H A D | spi-fsl-cpm.c | 82 if (mspi->rx_dma == mspi->dma_dummy_rx) in fsl_spi_cpm_bufs_start() 83 iowrite32be(mspi->rx_dma, &rx_bd->cbd_bufaddr); in fsl_spi_cpm_bufs_start() 85 iowrite32be(mspi->rx_dma + xfer_ofs, &rx_bd->cbd_bufaddr); in fsl_spi_cpm_bufs_start() 115 mspi->rx_dma = mspi->dma_dummy_rx; in fsl_spi_cpm_bufs() 148 mspi->rx_dma = dma_map_single(dev, mspi->rx, t->len, in fsl_spi_cpm_bufs() 150 if (dma_mapping_error(dev, mspi->rx_dma)) { in fsl_spi_cpm_bufs() 155 mspi->rx_dma = t->rx_dma; in fsl_spi_cpm_bufs() 184 dma_unmap_single(dev, mspi->rx_dma, t->len, DMA_FROM_DEVICE); in fsl_spi_cpm_bufs_complete()
|
H A D | spi-s3c64xx.c | 196 * @rx_dma: Local receive DMA data (e.g. chan and direction) 220 struct s3c64xx_spi_dma_data rx_dma; member 285 struct s3c64xx_spi_driver_data, rx_dma); in s3c64xx_spi_dmacb() 317 struct s3c64xx_spi_driver_data, rx_dma); in s3c64xx_prepare_dma() 388 sdd->rx_dma.ch = dma_request_chan(&sdd->pdev->dev, "rx"); in s3c64xx_spi_prepare_transfer() 389 if (IS_ERR(sdd->rx_dma.ch)) { in s3c64xx_spi_prepare_transfer() 391 sdd->rx_dma.ch = NULL; in s3c64xx_spi_prepare_transfer() 398 dma_release_channel(sdd->rx_dma.ch); in s3c64xx_spi_prepare_transfer() 400 sdd->rx_dma.ch = NULL; in s3c64xx_spi_prepare_transfer() 404 spi->dma_rx = sdd->rx_dma in s3c64xx_spi_prepare_transfer() [all...] |
H A D | spi-atmel.c | 833 xfer->rx_buf, (unsigned long long)xfer->rx_dma); in atmel_spi_next_xfer_dma_submit() 864 dma_addr_t *rx_dma, in atmel_spi_next_xfer_data() argument 867 *rx_dma = xfer->rx_dma + xfer->len - *plen; in atmel_spi_next_xfer_data() 930 dma_addr_t tx_dma, rx_dma; in atmel_spi_pdc_next_xfer() local 935 atmel_spi_next_xfer_data(host, xfer, &tx_dma, &rx_dma, &len); in atmel_spi_pdc_next_xfer() 938 spi_writel(as, RPR, rx_dma); in atmel_spi_pdc_next_xfer() 950 (unsigned long long)xfer->rx_dma); in atmel_spi_pdc_next_xfer() 954 atmel_spi_next_xfer_data(host, xfer, &tx_dma, &rx_dma, &len); in atmel_spi_pdc_next_xfer() 957 spi_writel(as, RNPR, rx_dma); in atmel_spi_pdc_next_xfer() [all...] |
H A D | spi-slave-mt27xx.c | 224 xfer->rx_dma = dma_map_single(dev, xfer->rx_buf, in mtk_spi_slave_dma_transfer() 226 if (dma_mapping_error(dev, xfer->rx_dma)) { in mtk_spi_slave_dma_transfer() 233 writel(xfer->rx_dma, mdata->base + SPIS_RX_DST_REG); in mtk_spi_slave_dma_transfer() 266 dma_unmap_single(dev, xfer->rx_dma, in mtk_spi_slave_dma_transfer() 349 dma_unmap_single(mdata->dev, trans->rx_dma, in mtk_spi_slave_interrupt()
|
H A D | spi-mt65xx.c | 157 * @rx_dma: DMA start for SPI-MEM RX 177 dma_addr_t rx_dma; member 638 writel((u32)(xfer->rx_dma & MTK_SPI_32BITS_MASK), in mtk_spi_setup_dma_addr() 642 writel((u32)(xfer->rx_dma >> 32), in mtk_spi_setup_dma_addr() 711 xfer->rx_dma = sg_dma_address(mdata->rx_sgl); in mtk_spi_dma_transfer() 824 xfer->rx_dma += mdata->xfer_len; in mtk_spi_interrupt_thread() 836 xfer->rx_dma = sg_dma_address(mdata->rx_sgl); in mtk_spi_interrupt_thread() 935 writel((u32)(mdata->rx_dma & MTK_SPI_32BITS_MASK), in mtk_spi_mem_setup_dma_xfer() 939 writel((u32)(mdata->rx_dma >> 32), in mtk_spi_mem_setup_dma_xfer() 1090 mdata->rx_dma in mtk_spi_mem_exec_op() [all...] |
H A D | spi-sunplus-sp7021.c | 149 writel(xfer->rx_dma, pspim->s_base + SP7021_SLAVE_DMA_ADDR_REG); in sp7021_spi_target_rx() 377 xfer->rx_dma = dma_map_single(dev, xfer->rx_buf, xfer->len, in sp7021_spi_target_transfer_one() 379 if (dma_mapping_error(dev, xfer->rx_dma)) in sp7021_spi_target_transfer_one() 382 dma_unmap_single(dev, xfer->rx_dma, xfer->len, DMA_FROM_DEVICE); in sp7021_spi_target_transfer_one()
|
H A D | spi-fsl-lib.h | 39 dma_addr_t rx_dma; member
|
H A D | spi-amlogic-spisg.c | 280 if (xfer->rx_buf || xfer->rx_dma) { in aml_spisg_setup_transfer() 357 } else if (xfer->rx_buf || xfer->rx_dma) { in aml_spisg_setup_transfer() 358 paddr = xfer->rx_dma; in aml_spisg_setup_transfer() 403 } else if (!xfer->rx_dma) { in aml_spisg_cleanup_transfer()
|
/linux/drivers/net/ethernet/amd/ |
H A D | au1000_eth.h | 46 struct rx_dma { struct 73 struct rx_dma *rx_dma_ring[NUM_RX_DMA]; 47 statusrx_dma global() argument 48 buff_statrx_dma global() argument 49 padrx_dma global() argument
|
H A D | au1000_eth.c | 636 aup->rx_dma_ring[i] = (struct rx_dma *) in au1000_setup_hw_rings() 637 (tx_base + 0x100 + sizeof(struct rx_dma) * i); in au1000_setup_hw_rings() 770 struct rx_dma *prxd; in au1000_rx()
|
/linux/drivers/iio/adc/ |
H A D | ad7606_spi.c | 279 struct dma_chan *rx_dma; in ad7606_spi_offload_probe() local 313 rx_dma = devm_spi_offload_rx_stream_request_dma_chan(dev, in ad7606_spi_offload_probe() 315 if (IS_ERR(rx_dma)) in ad7606_spi_offload_probe() 316 return dev_err_probe(dev, PTR_ERR(rx_dma), in ad7606_spi_offload_probe() 320 rx_dma, IIO_BUFFER_DIRECTION_IN); in ad7606_spi_offload_probe()
|
H A D | ad_sigma_delta.c | 812 struct dma_chan *rx_dma; in devm_ad_sd_setup_buffer_and_trigger() local 814 rx_dma = devm_spi_offload_rx_stream_request_dma_chan(dev, in devm_ad_sd_setup_buffer_and_trigger() 816 if (IS_ERR(rx_dma)) in devm_ad_sd_setup_buffer_and_trigger() 817 return dev_err_probe(dev, PTR_ERR(rx_dma), in devm_ad_sd_setup_buffer_and_trigger() 821 rx_dma, IIO_BUFFER_DIRECTION_IN); in devm_ad_sd_setup_buffer_and_trigger()
|
H A D | ad7944.c | 810 struct dma_chan *rx_dma; in ad7944_probe() local 831 rx_dma = devm_spi_offload_rx_stream_request_dma_chan(dev, in ad7944_probe() 833 if (IS_ERR(rx_dma)) in ad7944_probe() 834 return dev_err_probe(dev, PTR_ERR(rx_dma), in ad7944_probe() 846 indio_dev, rx_dma, IIO_BUFFER_DIRECTION_IN); in ad7944_probe()
|
H A D | ad4000.c | 854 struct dma_chan *rx_dma; in ad4000_spi_offload_setup() local 868 rx_dma = devm_spi_offload_rx_stream_request_dma_chan(dev, st->offload); in ad4000_spi_offload_setup() 869 if (IS_ERR(rx_dma)) in ad4000_spi_offload_setup() 870 return dev_err_probe(dev, PTR_ERR(rx_dma), in ad4000_spi_offload_setup() 873 ret = devm_iio_dmaengine_buffer_setup_with_handle(dev, indio_dev, rx_dma, in ad4000_spi_offload_setup()
|
H A D | ad4695.c | 1696 struct dma_chan *rx_dma; in ad4695_probe_spi_offload() local 1743 rx_dma = devm_spi_offload_rx_stream_request_dma_chan(dev, st->offload); in ad4695_probe_spi_offload() 1744 if (IS_ERR(rx_dma)) in ad4695_probe_spi_offload() 1745 return dev_err_probe(dev, PTR_ERR(rx_dma), in ad4695_probe_spi_offload() 1789 rx_dma, IIO_BUFFER_DIRECTION_IN); in ad4695_probe_spi_offload()
|
H A D | ad7380.c | 1821 struct dma_chan *rx_dma; in ad7380_probe_spi_offload() local 1850 rx_dma = devm_spi_offload_rx_stream_request_dma_chan(dev, st->offload); in ad7380_probe_spi_offload() 1851 if (IS_ERR(rx_dma)) in ad7380_probe_spi_offload() 1852 return dev_err_probe(dev, PTR_ERR(rx_dma), in ad7380_probe_spi_offload() 1856 rx_dma, IIO_BUFFER_DIRECTION_IN); in ad7380_probe_spi_offload()
|
/linux/drivers/net/ethernet/apple/ |
H A D | bmac.c | 62 volatile struct dbdma_regs __iomem *rx_dma; member 224 volatile struct dbdma_regs __iomem *rd = bp->rx_dma; in bmac_enable_and_reset_chip() 401 volatile struct dbdma_regs __iomem *rd = bp->rx_dma; in bmac_start_chip() 472 volatile struct dbdma_regs __iomem *rd = bp->rx_dma; in bmac_suspend() 603 volatile struct dbdma_regs __iomem *rd = bp->rx_dma; in bmac_init_rx_ring() 673 volatile struct dbdma_regs __iomem *rd = bp->rx_dma; in bmac_rxdma_intr() 1251 bp->rx_dma = ioremap(macio_resource_start(mdev, 2), macio_resource_len(mdev, 2)); in bmac_probe() 1252 if (!bp->rx_dma) in bmac_probe() 1304 iounmap(bp->rx_dma); in bmac_probe() 1332 volatile struct dbdma_regs __iomem *rd = bp->rx_dma; in bmac_close() [all...] |
H A D | mace.c | 48 volatile struct dbdma_regs __iomem *rx_dma; member 188 mp->rx_dma = ioremap(macio_resource_start(mdev, 2), 0x1000); in mace_probe() 189 if (mp->rx_dma == NULL) { in mace_probe() 262 iounmap(mp->rx_dma); in mace_probe() 292 iounmap(mp->rx_dma); in mace_remove() 435 volatile struct dbdma_regs __iomem *rd = mp->rx_dma; in mace_open() 505 volatile struct dbdma_regs __iomem *rd = mp->rx_dma; in mace_close() 812 volatile struct dbdma_regs __iomem *rd = mp->rx_dma; in mace_tx_timeout() 881 volatile struct dbdma_regs __iomem *rd = mp->rx_dma; in mace_rxdma_intr()
|
/linux/drivers/net/ethernet/agere/ |
H A D | et131x.c | 1538 struct rxdma_regs __iomem *rx_dma = &adapter->regs->rxdma; in et131x_config_rx_dma_regs() local 1549 writel(upper_32_bits(rx_local->rx_status_bus), &rx_dma->dma_wb_base_hi); in et131x_config_rx_dma_regs() 1550 writel(lower_32_bits(rx_local->rx_status_bus), &rx_dma->dma_wb_base_lo); in et131x_config_rx_dma_regs() 1555 writel(upper_32_bits(rx_local->ps_ring_physaddr), &rx_dma->psr_base_hi); in et131x_config_rx_dma_regs() 1556 writel(lower_32_bits(rx_local->ps_ring_physaddr), &rx_dma->psr_base_lo); in et131x_config_rx_dma_regs() 1557 writel(rx_local->psr_entries - 1, &rx_dma->psr_num_des); in et131x_config_rx_dma_regs() 1558 writel(0, &rx_dma->psr_full_offset); in et131x_config_rx_dma_regs() 1560 psr_num_des = readl(&rx_dma->psr_num_des) & ET_RXDMA_PSR_NUM_DES_MASK; in et131x_config_rx_dma_regs() 1562 &rx_dma->psr_min_des); in et131x_config_rx_dma_regs() 1578 num_des = &rx_dma in et131x_config_rx_dma_regs() 2126 struct rxdma_regs __iomem *rx_dma = &adapter->regs->rxdma; nic_return_rfd() local [all...] |
/linux/drivers/net/fddi/ |
H A D | defza.c | 425 writel_o((fp->rx_dma[i] + 0x1000) >> 9, in fza_rx_init() 427 writel_o(fp->rx_dma[i] >> 9 | FZA_RING_OWN_FZA, in fza_rx_init() 693 dma = fp->rx_dma[i]; in fza_rx() 767 fp->rx_dma[i] = dma; in fza_rx() 1173 dma_unmap_single(fp->bdev, fp->rx_dma[i], in fza_open() 1177 fp->rx_dma[i] = 0; in fza_open() 1183 fp->rx_dma[i] = dma; in fza_open() 1256 dma_unmap_single(fp->bdev, fp->rx_dma[i], in fza_close() 1259 fp->rx_dma[i] = 0; in fza_close()
|
/linux/drivers/atm/ |
H A D | eni.h | 81 void __iomem *rx_dma; /* RX DMA queue */ member
|
/linux/drivers/tty/serial/ |
H A D | msm_serial.c | 187 struct msm_dma rx_dma; member 298 dma = &msm_port->rx_dma; in msm_release_dma() 365 dma = &msm_port->rx_dma; in msm_request_rx_dma() 564 struct msm_dma *dma = &msm_port->rx_dma; in msm_complete_rx_dma() 622 struct msm_dma *dma = &msm_port->rx_dma; in msm_start_rx_dma() 709 struct msm_dma *dma = &msm_port->rx_dma; in msm_stop_rx() 956 struct msm_dma *dma = &msm_port->rx_dma; in msm_uart_irq() 1274 struct msm_dma *dma = &msm_port->rx_dma; in msm_set_termios()
|
/linux/drivers/net/can/peak_canfd/ |
H A D | peak_pciefd_main.c | 315 struct pciefd_rx_dma *rx_dma = priv->rx_dma_vaddr; in pciefd_irq_handler() local 322 priv->irq_status = le32_to_cpu(rx_dma->irq_status); in pciefd_irq_handler() 330 rx_dma->msg, in pciefd_irq_handler()
|
/linux/drivers/net/ethernet/sis/ |
H A D | sis190.c | 275 dma_addr_t rx_dma; member 891 SIS_W32(RxDescStartAddr, tp->rx_dma); in sis190_hw_start() 1081 &tp->rx_dma, GFP_KERNEL); in sis190_open() 1104 tp->rx_dma); in sis190_open() 1172 tp->rx_dma); in sis190_close()
|
/linux/drivers/net/ethernet/natsemi/ |
H A D | natsemi.c | 545 dma_addr_t rx_dma[RX_RING_SIZE]; member 1939 np->rx_dma[entry] = dma_map_single(&np->pci_dev->dev, in refill_rx() 1942 if (dma_mapping_error(&np->pci_dev->dev, np->rx_dma[entry])) { in refill_rx() 1947 np->rx_ring[entry].addr = cpu_to_le32(np->rx_dma[entry]); in refill_rx() 2033 dma_unmap_single(&np->pci_dev->dev, np->rx_dma[i], in drain_rx() 2358 np->rx_dma[entry], in netdev_rx() 2365 np->rx_dma[entry], in netdev_rx() 2370 np->rx_dma[entry], in netdev_rx()
|