Lines Matching +full:prefetch +full:- +full:dma
1 // SPDX-License-Identifier: ISC
5 #include "../dma.h"
11 struct mt7915_dev *dev = phy->dev; in mt7915_init_tx_queues()
14 if (mtk_wed_device_active(&dev->mt76.mmio.wed)) { in mt7915_init_tx_queues()
15 if (is_mt798x(&dev->mt76)) in mt7915_init_tx_queues()
20 idx -= MT_TXQ_ID(0); in mt7915_init_tx_queues()
21 wed = &dev->mt76.mmio.wed; in mt7915_init_tx_queues()
24 return mt76_connac_init_tx_queues(phy->mt76, idx, n_desc, ring_base, in mt7915_init_tx_queues()
34 mt76_connac_tx_cleanup(&dev->mt76); in mt7915_poll_tx()
45 dev->wfdma_mask |= (1 << (q)); \ in mt7915_dma_config()
46 dev->q_int_mask[(q)] = int; \ in mt7915_dma_config()
47 dev->q_id[(q)] = id; \ in mt7915_dma_config()
54 if (is_mt7915(&dev->mt76)) { in mt7915_dma_config()
87 if (is_mt7916(&dev->mt76) && mtk_wed_device_active(&dev->mt76.mmio.wed)) { in mt7915_dma_config()
92 if (dev->hif2) in mt7915_dma_config()
125 #define PREFETCH(_base, _depth) ((_base) << 16 | (_depth)) in __mt7915_dma_prefetch() macro
128 /* prefetch SRAM wrapping boundary for tx/rx ring. */ in __mt7915_dma_prefetch()
129 mt76_wr(dev, MT_MCUQ_EXT_CTRL(MT_MCUQ_FWDL) + ofs, PREFETCH(0x0, 0x4)); in __mt7915_dma_prefetch()
130 mt76_wr(dev, MT_MCUQ_EXT_CTRL(MT_MCUQ_WM) + ofs, PREFETCH(0x40, 0x4)); in __mt7915_dma_prefetch()
131 mt76_wr(dev, MT_TXQ_EXT_CTRL(0) + ofs, PREFETCH(0x80, 0x4)); in __mt7915_dma_prefetch()
132 mt76_wr(dev, MT_TXQ_EXT_CTRL(1) + ofs, PREFETCH(0xc0, 0x4)); in __mt7915_dma_prefetch()
133 mt76_wr(dev, MT_MCUQ_EXT_CTRL(MT_MCUQ_WA) + ofs, PREFETCH(0x100, 0x4)); in __mt7915_dma_prefetch()
136 PREFETCH(0x140, 0x4)); in __mt7915_dma_prefetch()
138 PREFETCH(0x180, 0x4)); in __mt7915_dma_prefetch()
139 if (!is_mt7915(&dev->mt76)) { in __mt7915_dma_prefetch()
141 PREFETCH(0x1c0, 0x4)); in __mt7915_dma_prefetch()
145 PREFETCH(0x1c0 + base, 0x4)); in __mt7915_dma_prefetch()
147 PREFETCH(0x200 + base, 0x4)); in __mt7915_dma_prefetch()
149 PREFETCH(0x240 + base, 0x4)); in __mt7915_dma_prefetch()
154 if (is_mt7915(&dev->mt76)) { in __mt7915_dma_prefetch()
157 PREFETCH(0x140, 0x0)); in __mt7915_dma_prefetch()
159 PREFETCH(0x200 + base, 0x0)); in __mt7915_dma_prefetch()
161 PREFETCH(0x280 + base, 0x0)); in __mt7915_dma_prefetch()
168 if (dev->hif2) in mt7915_dma_prefetch()
169 __mt7915_dma_prefetch(dev, MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0)); in mt7915_dma_prefetch()
174 struct mt76_dev *mdev = &dev->mt76; in mt7915_dma_disable()
177 if (dev->hif2) in mt7915_dma_disable()
178 hif1_ofs = MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0); in mt7915_dma_disable()
200 if (dev->hif2) { in mt7915_dma_disable()
237 if (dev->hif2) { in mt7915_dma_disable()
257 struct mt76_dev *mdev = &dev->mt76; in mt7915_dma_start()
261 if (dev->hif2) in mt7915_dma_start()
262 hif1_ofs = MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0); in mt7915_dma_start()
279 if (dev->hif2) { in mt7915_dma_start()
303 if (!dev->phy.mt76->band_idx) in mt7915_dma_start()
306 if (dev->dbdc_support || dev->phy.mt76->band_idx) in mt7915_dma_start()
309 if (mtk_wed_device_active(&dev->mt76.mmio.wed) && wed_reset) { in mt7915_dma_start()
314 if (!is_mt798x(&dev->mt76)) in mt7915_dma_start()
323 mtk_wed_device_start(&dev->mt76.mmio.wed, wed_irq_mask); in mt7915_dma_start()
336 struct mt76_dev *mdev = &dev->mt76; in mt7915_dma_enable()
339 if (dev->hif2) in mt7915_dma_enable()
340 hif1_ofs = MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0); in mt7915_dma_enable()
342 /* reset dma idx */ in mt7915_dma_enable()
346 if (dev->hif2) { in mt7915_dma_enable()
361 if (dev->hif2) { in mt7915_dma_enable()
389 if (dev->hif2) { in mt7915_dma_enable()
410 struct mt76_dev *mdev = &dev->mt76; in mt7915_dma_init()
417 mt76_dma_attach(&dev->mt76); in mt7915_dma_init()
419 if (dev->hif2) in mt7915_dma_init()
420 hif1_ofs = MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0); in mt7915_dma_init()
424 if (mtk_wed_device_active(&mdev->mmio.wed)) { in mt7915_dma_init()
444 ret = mt7915_init_tx_queues(&dev->phy, in mt7915_dma_init()
445 MT_TXQ_ID(dev->phy.mt76->band_idx), in mt7915_dma_init()
453 MT_TXQ_ID(phy2->mt76->band_idx), in mt7915_dma_init()
461 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_WM, in mt7915_dma_init()
469 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_WA, in mt7915_dma_init()
477 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_FWDL, in mt7915_dma_init()
485 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MCU], in mt7915_dma_init()
494 if (mtk_wed_device_active(&mdev->mmio.wed) && is_mt7915(mdev)) { in mt7915_dma_init()
497 mdev->q_rx[MT_RXQ_MCU_WA].flags = MT_WED_Q_TXFREE; in mt7915_dma_init()
498 mdev->q_rx[MT_RXQ_MCU_WA].wed = &mdev->mmio.wed; in mt7915_dma_init()
503 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MCU_WA], in mt7915_dma_init()
510 if (!dev->phy.mt76->band_idx) { in mt7915_dma_init()
511 if (mtk_wed_device_active(&mdev->mmio.wed) && in mt7915_dma_init()
512 mtk_wed_get_rx_capa(&mdev->mmio.wed)) { in mt7915_dma_init()
513 mdev->q_rx[MT_RXQ_MAIN].flags = in mt7915_dma_init()
515 dev->mt76.rx_token_size += MT7915_RX_RING_SIZE; in mt7915_dma_init()
516 mdev->q_rx[MT_RXQ_MAIN].wed = &mdev->mmio.wed; in mt7915_dma_init()
519 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MAIN], in mt7915_dma_init()
533 if (mtk_wed_device_active(&mdev->mmio.wed)) { in mt7915_dma_init()
534 mdev->q_rx[MT_RXQ_MAIN_WA].flags = MT_WED_Q_TXFREE; in mt7915_dma_init()
535 mdev->q_rx[MT_RXQ_MAIN_WA].wed = &mdev->mmio.wed; in mt7915_dma_init()
542 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MAIN_WA], in mt7915_dma_init()
549 if (dev->dbdc_support || dev->phy.mt76->band_idx) { in mt7915_dma_init()
550 if (mtk_wed_device_active(&mdev->mmio.wed) && in mt7915_dma_init()
551 mtk_wed_get_rx_capa(&mdev->mmio.wed)) { in mt7915_dma_init()
552 mdev->q_rx[MT_RXQ_BAND1].flags = in mt7915_dma_init()
554 dev->mt76.rx_token_size += MT7915_RX_RING_SIZE; in mt7915_dma_init()
555 mdev->q_rx[MT_RXQ_BAND1].wed = &mdev->mmio.wed; in mt7915_dma_init()
559 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_BAND1], in mt7915_dma_init()
568 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_BAND1_WA], in mt7915_dma_init()
581 netif_napi_add_tx(&dev->mt76.tx_napi_dev, &dev->mt76.tx_napi, in mt7915_dma_init()
583 napi_enable(&dev->mt76.tx_napi); in mt7915_dma_init()
592 struct mt76_phy *mphy_ext = dev->mt76.phys[MT_BAND1]; in mt7915_dma_reset()
593 struct mtk_wed_device *wed = &dev->mt76.mmio.wed; in mt7915_dma_reset()
597 for (i = 0; i < ARRAY_SIZE(dev->mt76.phy.q_tx); i++) { in mt7915_dma_reset()
598 mt76_queue_tx_cleanup(dev, dev->mphy.q_tx[i], true); in mt7915_dma_reset()
600 mt76_queue_tx_cleanup(dev, mphy_ext->q_tx[i], true); in mt7915_dma_reset()
603 for (i = 0; i < ARRAY_SIZE(dev->mt76.q_mcu); i++) in mt7915_dma_reset()
604 mt76_queue_tx_cleanup(dev, dev->mt76.q_mcu[i], true); in mt7915_dma_reset()
606 mt76_for_each_q_rx(&dev->mt76, i) in mt7915_dma_reset()
607 mt76_queue_rx_cleanup(dev, &dev->mt76.q_rx[i]); in mt7915_dma_reset()
617 mt76_dma_wed_reset(&dev->mt76); in mt7915_dma_reset()
621 mt76_dma_reset_tx_queue(&dev->mt76, dev->mphy.q_tx[i]); in mt7915_dma_reset()
623 mt76_dma_reset_tx_queue(&dev->mt76, mphy_ext->q_tx[i]); in mt7915_dma_reset()
627 mt76_queue_reset(dev, dev->mt76.q_mcu[i]); in mt7915_dma_reset()
629 mt76_for_each_q_rx(&dev->mt76, i) { in mt7915_dma_reset()
630 if (mt76_queue_is_wed_tx_free(&dev->mt76.q_rx[i])) in mt7915_dma_reset()
633 mt76_queue_reset(dev, &dev->mt76.q_rx[i]); in mt7915_dma_reset()
636 mt76_tx_status_check(&dev->mt76, true); in mt7915_dma_reset()
638 mt76_for_each_q_rx(&dev->mt76, i) in mt7915_dma_reset()
641 if (mtk_wed_device_active(wed) && is_mt7915(&dev->mt76)) in mt7915_dma_reset()
654 mt76_dma_cleanup(&dev->mt76); in mt7915_dma_cleanup()