Lines Matching defs:tdma
130 void (*set_global_pg_config)(struct tegra_adma *tdma);
164 struct tegra_adma *tdma;
207 static inline void tdma_write(struct tegra_adma *tdma, u32 reg, u32 val)
209 writel(val, tdma->base_addr + tdma->cdata->global_reg_offset + reg);
212 static inline u32 tdma_read(struct tegra_adma *tdma, u32 reg)
214 return readl(tdma->base_addr + tdma->cdata->global_reg_offset + reg);
217 static inline void tdma_ch_global_write(struct tegra_adma *tdma, u32 reg, u32 val)
219 writel(val, tdma->ch_base_addr + tdma->cdata->global_reg_offset + reg);
245 return tdc->tdma->dev;
263 static void tegra186_adma_global_page_config(struct tegra_adma *tdma)
269 tdma_write(tdma, TEGRA186_ADMA_GLOBAL_PAGE_CHGRP, 0);
270 tdma_write(tdma, TEGRA186_ADMA_GLOBAL_PAGE_RX_REQ, 0);
271 tdma_write(tdma, TEGRA186_ADMA_GLOBAL_PAGE_TX_REQ, 0);
272 tdma_write(tdma, TEGRA186_ADMA_GLOBAL_PAGE_CHGRP + (tdma->ch_page_no * 0x4), 0xff);
273 tdma_write(tdma, TEGRA186_ADMA_GLOBAL_PAGE_RX_REQ + (tdma->ch_page_no * 0x4), 0x1ffffff);
274 tdma_write(tdma, TEGRA186_ADMA_GLOBAL_PAGE_TX_REQ + (tdma->ch_page_no * 0x4), 0xffffff);
277 static void tegra264_adma_global_page_config(struct tegra_adma *tdma)
279 u32 global_page_offset = tdma->ch_page_no * TEGRA264_ADMA_GLOBAL_PAGE_OFFSET;
282 if (tdma->ch_page_no) {
283 tdma_write(tdma, TEGRA264_ADMA_GLOBAL_PAGE_CHGRP_0, 0);
284 tdma_write(tdma, TEGRA264_ADMA_GLOBAL_PAGE_CHGRP_1, 0);
285 tdma_write(tdma, TEGRA264_ADMA_GLOBAL_PAGE_RX_REQ_0, 0);
286 tdma_write(tdma, TEGRA264_ADMA_GLOBAL_PAGE_RX_REQ_1, 0);
287 tdma_write(tdma, TEGRA264_ADMA_GLOBAL_PAGE_TX_REQ_0, 0);
288 tdma_write(tdma, TEGRA264_ADMA_GLOBAL_PAGE_TX_REQ_1, 0);
292 tdma_write(tdma, TEGRA264_ADMA_GLOBAL_PAGE_CHGRP_0 + global_page_offset, 0xffffffff);
293 tdma_write(tdma, TEGRA264_ADMA_GLOBAL_PAGE_CHGRP_1 + global_page_offset, 0xffffffff);
294 tdma_write(tdma, TEGRA264_ADMA_GLOBAL_PAGE_RX_REQ_0 + global_page_offset, 0xffffffff);
295 tdma_write(tdma, TEGRA264_ADMA_GLOBAL_PAGE_RX_REQ_1 + global_page_offset, 0x1);
296 tdma_write(tdma, TEGRA264_ADMA_GLOBAL_PAGE_TX_REQ_0 + global_page_offset, 0xffffffff);
297 tdma_write(tdma, TEGRA264_ADMA_GLOBAL_PAGE_TX_REQ_1 + global_page_offset, 0x1);
300 static int tegra_adma_init(struct tegra_adma *tdma)
306 tdma_ch_global_write(tdma, tdma->cdata->global_int_clear, 0x1);
308 if (!tdma->base_addr)
312 tdma_write(tdma, ADMA_GLOBAL_SOFT_RESET, 0x1);
316 tdma->base_addr +
317 tdma->cdata->global_reg_offset +
323 if (tdma->cdata->set_global_pg_config)
324 tdma->cdata->set_global_pg_config(tdma);
327 tdma_write(tdma, ADMA_GLOBAL_CMD, 1);
335 struct tegra_adma *tdma = tdc->tdma;
341 if (sreq_index > tdma->cdata->ch_req_max) {
342 dev_err(tdma->dev, "invalid DMA request\n");
348 if (test_and_set_bit(sreq_index, &tdma->tx_requests_reserved)) {
349 dev_err(tdma->dev, "DMA request reserved\n");
355 if (test_and_set_bit(sreq_index, &tdma->rx_requests_reserved)) {
356 dev_err(tdma->dev, "DMA request reserved\n");
362 dev_WARN(tdma->dev, "channel %s has invalid transfer type\n",
375 struct tegra_adma *tdma = tdc->tdma;
382 clear_bit(tdc->sreq_index, &tdma->tx_requests_reserved);
386 clear_bit(tdc->sreq_index, &tdma->rx_requests_reserved);
390 dev_WARN(tdma->dev, "channel %s has invalid transfer type\n",
458 tdma_ch_write(tdc, ADMA_CH_TC - tdc->tdma->cdata->ch_tc_offset_diff, ch_regs->tc);
460 tdma_ch_write(tdc, ADMA_CH_LOWER_SRC_ADDR - tdc->tdma->cdata->ch_tc_offset_diff,
462 tdma_ch_write(tdc, ADMA_CH_LOWER_TRG_ADDR - tdc->tdma->cdata->ch_tc_offset_diff,
465 if (!tdc->tdma->cdata->global_ch_fifo_base)
468 tdma_write(tdc->tdma, tdc->global_ch_fifo_offset, ch_regs->fifo_ctrl);
471 tdma_write(tdc->tdma, tdc->global_ch_config_offset, ch_regs->global_config);
486 tdc->tdma->cdata->ch_tc_offset_diff);
658 const struct tegra_adma_chip_data *cdata = tdc->tdma->cdata;
807 struct tegra_adma *tdma = ofdma->of_dma_data;
818 dev_err(tdma->dev, "DMA request must not be 0\n");
822 chan = dma_get_any_slave_channel(&tdma->dma_dev);
834 struct tegra_adma *tdma = dev_get_drvdata(dev);
839 if (tdma->base_addr)
840 tdma->global_cmd = tdma_read(tdma, ADMA_GLOBAL_CMD);
842 if (!tdma->global_cmd)
845 for (i = 0; i < tdma->nr_channels; i++) {
846 tdc = &tdma->channels[i];
848 if (!tdc->tdma)
856 ch_reg->tc = tdma_ch_read(tdc, ADMA_CH_TC - tdma->cdata->ch_tc_offset_diff);
858 tdma->cdata->ch_tc_offset_diff);
860 tdma->cdata->ch_tc_offset_diff);
864 ch_reg->global_config = tdma_read(tdc->tdma, tdc->global_ch_config_offset);
866 if (!tdc->tdma->cdata->global_ch_fifo_base)
869 ch_reg->fifo_ctrl = tdma_read(tdc->tdma, tdc->global_ch_fifo_offset);
876 clk_disable_unprepare(tdma->ahub_clk);
883 struct tegra_adma *tdma = dev_get_drvdata(dev);
888 ret = clk_prepare_enable(tdma->ahub_clk);
893 if (tdma->base_addr) {
894 tdma_write(tdma, ADMA_GLOBAL_CMD, tdma->global_cmd);
895 if (tdma->cdata->set_global_pg_config)
896 tdma->cdata->set_global_pg_config(tdma);
899 if (!tdma->global_cmd)
902 for (i = 0; i < tdma->nr_channels; i++) {
903 tdc = &tdma->channels[i];
905 if (!tdc->tdma)
911 tdma_ch_write(tdc, ADMA_CH_TC - tdma->cdata->ch_tc_offset_diff, ch_reg->tc);
912 tdma_ch_write(tdc, ADMA_CH_LOWER_SRC_ADDR - tdma->cdata->ch_tc_offset_diff,
914 tdma_ch_write(tdc, ADMA_CH_LOWER_TRG_ADDR - tdma->cdata->ch_tc_offset_diff,
918 if (!tdc->tdma->cdata->global_ch_fifo_base)
921 tdma_write(tdc->tdma, tdc->global_ch_fifo_offset, ch_reg->fifo_ctrl);
924 tdma_write(tdc->tdma, tdc->global_ch_config_offset, ch_reg->global_config);
1019 struct tegra_adma *tdma;
1029 tdma = devm_kzalloc(&pdev->dev,
1030 struct_size(tdma, channels, cdata->nr_channels),
1032 if (!tdma)
1035 tdma->dev = &pdev->dev;
1036 tdma->cdata = cdata;
1037 tdma->nr_channels = cdata->nr_channels;
1038 platform_set_drvdata(pdev, tdma);
1042 tdma->ch_base_addr = devm_ioremap_resource(&pdev->dev, res_page);
1043 if (IS_ERR(tdma->ch_base_addr))
1044 return PTR_ERR(tdma->ch_base_addr);
1062 tdma->ch_page_no = page_no - 1;
1063 tdma->base_addr = devm_ioremap_resource(&pdev->dev, res_base);
1064 if (IS_ERR(tdma->base_addr))
1065 return PTR_ERR(tdma->base_addr);
1071 tdma->base_addr = devm_ioremap_resource(&pdev->dev, res_base);
1072 if (IS_ERR(tdma->base_addr))
1073 return PTR_ERR(tdma->base_addr);
1078 tdma->ch_base_addr = tdma->base_addr + cdata->ch_base_offset;
1081 tdma->ahub_clk = devm_clk_get(&pdev->dev, "d_audio");
1082 if (IS_ERR(tdma->ahub_clk)) {
1084 return PTR_ERR(tdma->ahub_clk);
1087 tdma->dma_chan_mask = devm_kzalloc(&pdev->dev,
1088 BITS_TO_LONGS(tdma->nr_channels) * sizeof(unsigned long),
1090 if (!tdma->dma_chan_mask)
1094 bitmap_fill(tdma->dma_chan_mask, tdma->nr_channels);
1097 (u32 *)tdma->dma_chan_mask,
1098 BITS_TO_U32(tdma->nr_channels));
1104 INIT_LIST_HEAD(&tdma->dma_dev.channels);
1105 for (i = 0; i < tdma->nr_channels; i++) {
1106 struct tegra_adma_chan *tdc = &tdma->channels[i];
1109 if (!test_bit(i, tdma->dma_chan_mask))
1112 tdc->chan_addr = tdma->ch_base_addr + (cdata->ch_reg_size * i);
1114 if (tdma->base_addr) {
1129 vchan_init(&tdc->vc, &tdma->dma_dev);
1131 tdc->tdma = tdma;
1140 ret = tegra_adma_init(tdma);
1144 dma_cap_set(DMA_SLAVE, tdma->dma_dev.cap_mask);
1145 dma_cap_set(DMA_PRIVATE, tdma->dma_dev.cap_mask);
1146 dma_cap_set(DMA_CYCLIC, tdma->dma_dev.cap_mask);
1148 tdma->dma_dev.dev = &pdev->dev;
1149 tdma->dma_dev.device_alloc_chan_resources =
1151 tdma->dma_dev.device_free_chan_resources =
1153 tdma->dma_dev.device_issue_pending = tegra_adma_issue_pending;
1154 tdma->dma_dev.device_prep_dma_cyclic = tegra_adma_prep_dma_cyclic;
1155 tdma->dma_dev.device_config = tegra_adma_slave_config;
1156 tdma->dma_dev.device_tx_status = tegra_adma_tx_status;
1157 tdma->dma_dev.device_terminate_all = tegra_adma_terminate_all;
1158 tdma->dma_dev.src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES);
1159 tdma->dma_dev.dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES);
1160 tdma->dma_dev.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV);
1161 tdma->dma_dev.residue_granularity = DMA_RESIDUE_GRANULARITY_SEGMENT;
1162 tdma->dma_dev.device_pause = tegra_adma_pause;
1163 tdma->dma_dev.device_resume = tegra_adma_resume;
1165 ret = dma_async_device_register(&tdma->dma_dev);
1172 tegra_dma_of_xlate, tdma);
1181 tdma->nr_channels);
1186 dma_async_device_unregister(&tdma->dma_dev);
1193 irq_dispose_mapping(tdma->channels[i].irq);
1200 struct tegra_adma *tdma = platform_get_drvdata(pdev);
1204 dma_async_device_unregister(&tdma->dma_dev);
1206 for (i = 0; i < tdma->nr_channels; ++i) {
1207 if (tdma->channels[i].irq)
1208 irq_dispose_mapping(tdma->channels[i].irq);