Lines Matching +full:prefetch +full:- +full:dma

1 // SPDX-License-Identifier: ISC
7 #include "../dma.h"
13 struct mt7996_dev *dev = phy->dev; in mt7996_init_tx_queues()
18 idx -= MT_TXQ_ID(0); in mt7996_init_tx_queues()
20 if (phy->mt76->band_idx == MT_BAND2) in mt7996_init_tx_queues()
26 return mt76_connac_init_tx_queues(phy->mt76, idx, n_desc, in mt7996_init_tx_queues()
36 mt76_connac_tx_cleanup(&dev->mt76); in mt7996_poll_tx()
47 dev->q_wfdma_mask |= (1 << (q)); \ in mt7996_dma_config()
48 dev->q_int_mask[(q)] = int; \ in mt7996_dma_config()
49 dev->q_id[(q)] = id; \ in mt7996_dma_config()
64 if (is_mt7996(&dev->mt76)) { in mt7996_dma_config()
74 if (dev->has_rro) { in mt7996_dma_config()
99 if (is_mt7996(&dev->mt76)) { in mt7996_dma_config()
126 #define PREFETCH(_depth) (__mt7996_dma_prefetch_base(&base, (_depth))) in __mt7996_dma_prefetch() macro
127 /* prefetch SRAM wrapping boundary for tx/rx ring. */ in __mt7996_dma_prefetch()
128 mt76_wr(dev, MT_MCUQ_EXT_CTRL(MT_MCUQ_FWDL) + ofs, PREFETCH(0x2)); in __mt7996_dma_prefetch()
129 mt76_wr(dev, MT_MCUQ_EXT_CTRL(MT_MCUQ_WM) + ofs, PREFETCH(0x2)); in __mt7996_dma_prefetch()
130 mt76_wr(dev, MT_TXQ_EXT_CTRL(0) + ofs, PREFETCH(0x8)); in __mt7996_dma_prefetch()
131 mt76_wr(dev, MT_TXQ_EXT_CTRL(1) + ofs, PREFETCH(0x8)); in __mt7996_dma_prefetch()
132 mt76_wr(dev, MT_MCUQ_EXT_CTRL(MT_MCUQ_WA) + ofs, PREFETCH(0x2)); in __mt7996_dma_prefetch()
133 mt76_wr(dev, MT_TXQ_EXT_CTRL(2) + ofs, PREFETCH(0x8)); in __mt7996_dma_prefetch()
134 mt76_wr(dev, MT_RXQ_BAND1_CTRL(MT_RXQ_MCU) + ofs, PREFETCH(0x2)); in __mt7996_dma_prefetch()
135 mt76_wr(dev, MT_RXQ_BAND1_CTRL(MT_RXQ_MCU_WA) + ofs, PREFETCH(0x2)); in __mt7996_dma_prefetch()
136 mt76_wr(dev, MT_RXQ_BAND1_CTRL(MT_RXQ_MAIN_WA) + ofs, PREFETCH(0x2)); in __mt7996_dma_prefetch()
138 queue = is_mt7996(&dev->mt76) ? MT_RXQ_BAND2_WA : MT_RXQ_BAND1_WA; in __mt7996_dma_prefetch()
139 mt76_wr(dev, MT_RXQ_BAND1_CTRL(queue) + ofs, PREFETCH(0x2)); in __mt7996_dma_prefetch()
141 mt76_wr(dev, MT_RXQ_BAND1_CTRL(MT_RXQ_MAIN) + ofs, PREFETCH(0x10)); in __mt7996_dma_prefetch()
143 queue = is_mt7996(&dev->mt76) ? MT_RXQ_BAND2 : MT_RXQ_BAND1; in __mt7996_dma_prefetch()
144 mt76_wr(dev, MT_RXQ_BAND1_CTRL(queue) + ofs, PREFETCH(0x10)); in __mt7996_dma_prefetch()
146 if (dev->has_rro) { in __mt7996_dma_prefetch()
148 PREFETCH(0x10)); in __mt7996_dma_prefetch()
150 PREFETCH(0x10)); in __mt7996_dma_prefetch()
152 PREFETCH(0x4)); in __mt7996_dma_prefetch()
154 PREFETCH(0x4)); in __mt7996_dma_prefetch()
156 PREFETCH(0x4)); in __mt7996_dma_prefetch()
158 PREFETCH(0x4)); in __mt7996_dma_prefetch()
160 PREFETCH(0x4)); in __mt7996_dma_prefetch()
162 #undef PREFETCH in __mt7996_dma_prefetch()
170 if (dev->hif2) in mt7996_dma_prefetch()
171 __mt7996_dma_prefetch(dev, MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0)); in mt7996_dma_prefetch()
178 if (dev->hif2) in mt7996_dma_disable()
179 hif1_ofs = MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0); in mt7996_dma_disable()
190 if (dev->hif2) { in mt7996_dma_disable()
209 if (dev->hif2) { in mt7996_dma_disable()
221 struct mtk_wed_device *wed = &dev->mt76.mmio.wed; in mt7996_dma_start()
225 if (dev->hif2) in mt7996_dma_start()
226 hif1_ofs = MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0); in mt7996_dma_start()
243 if (dev->hif2) in mt7996_dma_start()
282 if (dev->hif2) in mt7996_dma_enable()
283 hif1_ofs = MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0); in mt7996_dma_enable()
285 /* reset dma idx */ in mt7996_dma_enable()
287 if (dev->hif2) in mt7996_dma_enable()
295 if (dev->hif2) { in mt7996_dma_enable()
310 if (dev->hif2) in mt7996_dma_enable()
334 if (dev->hif2) { in mt7996_dma_enable()
359 if (dev->hif2) { in mt7996_dma_enable()
363 if (mtk_wed_device_active(&dev->mt76.mmio.wed) && in mt7996_dma_enable()
364 dev->has_rro) in mt7996_dma_enable()
378 struct mt76_dev *mdev = &dev->mt76; in mt7996_dma_rro_init()
383 mdev->q_rx[MT_RXQ_RRO_IND].flags = MT_WED_RRO_Q_IND; in mt7996_dma_rro_init()
384 mdev->q_rx[MT_RXQ_RRO_IND].wed = &mdev->mmio.wed; in mt7996_dma_rro_init()
385 ret = mt76_queue_alloc(dev, &mdev->q_rx[MT_RXQ_RRO_IND], in mt7996_dma_rro_init()
393 mdev->q_rx[MT_RXQ_MSDU_PAGE_BAND0].flags = in mt7996_dma_rro_init()
395 mdev->q_rx[MT_RXQ_MSDU_PAGE_BAND0].wed = &mdev->mmio.wed; in mt7996_dma_rro_init()
396 ret = mt76_queue_alloc(dev, &mdev->q_rx[MT_RXQ_MSDU_PAGE_BAND0], in mt7996_dma_rro_init()
406 mdev->q_rx[MT_RXQ_MSDU_PAGE_BAND1].flags = in mt7996_dma_rro_init()
408 mdev->q_rx[MT_RXQ_MSDU_PAGE_BAND1].wed = &mdev->mmio.wed; in mt7996_dma_rro_init()
409 ret = mt76_queue_alloc(dev, &mdev->q_rx[MT_RXQ_MSDU_PAGE_BAND1], in mt7996_dma_rro_init()
420 mdev->q_rx[MT_RXQ_MSDU_PAGE_BAND2].flags = in mt7996_dma_rro_init()
422 mdev->q_rx[MT_RXQ_MSDU_PAGE_BAND2].wed = &mdev->mmio.wed; in mt7996_dma_rro_init()
423 ret = mt76_queue_alloc(dev, &mdev->q_rx[MT_RXQ_MSDU_PAGE_BAND2], in mt7996_dma_rro_init()
432 irq_mask = mdev->mmio.irqmask | MT_INT_RRO_RX_DONE | in mt7996_dma_rro_init()
435 mtk_wed_device_start_hw_rro(&mdev->mmio.wed, irq_mask, false); in mt7996_dma_rro_init()
444 struct mtk_wed_device *wed = &dev->mt76.mmio.wed; in mt7996_dma_init()
445 struct mtk_wed_device *wed_hif2 = &dev->mt76.mmio.wed_hif2; in mt7996_dma_init()
452 mt76_dma_attach(&dev->mt76); in mt7996_dma_init()
454 if (dev->hif2) in mt7996_dma_init()
455 hif1_ofs = MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0); in mt7996_dma_init()
460 ret = mt7996_init_tx_queues(&dev->phy, in mt7996_dma_init()
461 MT_TXQ_ID(dev->mphy.band_idx), in mt7996_dma_init()
469 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_WM, in mt7996_dma_init()
477 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_WA, in mt7996_dma_init()
485 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_FWDL, in mt7996_dma_init()
493 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MCU], in mt7996_dma_init()
502 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MCU_WA], in mt7996_dma_init()
512 dev->mt76.q_rx[MT_RXQ_MAIN].flags = MT_WED_Q_RX(0); in mt7996_dma_init()
513 dev->mt76.q_rx[MT_RXQ_MAIN].wed = wed; in mt7996_dma_init()
516 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MAIN], in mt7996_dma_init()
525 if (mtk_wed_device_active(wed) && !dev->has_rro) { in mt7996_dma_init()
526 dev->mt76.q_rx[MT_RXQ_MAIN_WA].flags = MT_WED_Q_TXFREE; in mt7996_dma_init()
527 dev->mt76.q_rx[MT_RXQ_MAIN_WA].wed = wed; in mt7996_dma_init()
530 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MAIN_WA], in mt7996_dma_init()
541 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_BAND2], in mt7996_dma_init()
552 if (mtk_wed_device_active(wed_hif2) && !dev->has_rro) { in mt7996_dma_init()
553 dev->mt76.q_rx[MT_RXQ_BAND2_WA].flags = MT_WED_Q_TXFREE; in mt7996_dma_init()
554 dev->mt76.q_rx[MT_RXQ_BAND2_WA].wed = wed_hif2; in mt7996_dma_init()
557 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_BAND2_WA], in mt7996_dma_init()
567 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_BAND1], in mt7996_dma_init()
577 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_BAND1_WA], in mt7996_dma_init()
587 dev->has_rro) { in mt7996_dma_init()
589 dev->mt76.q_rx[MT_RXQ_RRO_BAND0].flags = in mt7996_dma_init()
591 dev->mt76.q_rx[MT_RXQ_RRO_BAND0].wed = wed; in mt7996_dma_init()
592 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_RRO_BAND0], in mt7996_dma_init()
601 dev->mt76.q_rx[MT_RXQ_TXFREE_BAND0].flags = MT_WED_Q_TXFREE; in mt7996_dma_init()
602 dev->mt76.q_rx[MT_RXQ_TXFREE_BAND0].wed = wed; in mt7996_dma_init()
604 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_TXFREE_BAND0], in mt7996_dma_init()
614 dev->mt76.q_rx[MT_RXQ_RRO_BAND2].flags = in mt7996_dma_init()
616 dev->mt76.q_rx[MT_RXQ_RRO_BAND2].wed = wed; in mt7996_dma_init()
617 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_RRO_BAND2], in mt7996_dma_init()
627 dev->mt76.q_rx[MT_RXQ_TXFREE_BAND2].flags = MT_WED_Q_TXFREE; in mt7996_dma_init()
628 dev->mt76.q_rx[MT_RXQ_TXFREE_BAND2].wed = wed_hif2; in mt7996_dma_init()
630 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_TXFREE_BAND2], in mt7996_dma_init()
644 netif_napi_add_tx(dev->mt76.tx_napi_dev, &dev->mt76.tx_napi, in mt7996_dma_init()
646 napi_enable(&dev->mt76.tx_napi); in mt7996_dma_init()
655 struct mt76_phy *phy2 = dev->mt76.phys[MT_BAND1]; in mt7996_dma_reset()
656 struct mt76_phy *phy3 = dev->mt76.phys[MT_BAND2]; in mt7996_dma_reset()
657 u32 hif1_ofs = MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0); in mt7996_dma_reset()
664 if (dev->hif2) in mt7996_dma_reset()
672 mt76_queue_tx_cleanup(dev, dev->mphy.q_tx[i], true); in mt7996_dma_reset()
674 mt76_queue_tx_cleanup(dev, phy2->q_tx[i], true); in mt7996_dma_reset()
676 mt76_queue_tx_cleanup(dev, phy3->q_tx[i], true); in mt7996_dma_reset()
680 mt76_queue_tx_cleanup(dev, dev->mt76.q_mcu[i], true); in mt7996_dma_reset()
682 mt76_for_each_q_rx(&dev->mt76, i) in mt7996_dma_reset()
683 mt76_queue_rx_cleanup(dev, &dev->mt76.q_rx[i]); in mt7996_dma_reset()
685 mt76_tx_status_check(&dev->mt76, true); in mt7996_dma_reset()
691 if (dev->hif2 && mtk_wed_device_active(&dev->mt76.mmio.wed_hif2)) in mt7996_dma_reset()
692 mtk_wed_device_dma_reset(&dev->mt76.mmio.wed_hif2); in mt7996_dma_reset()
694 if (mtk_wed_device_active(&dev->mt76.mmio.wed)) in mt7996_dma_reset()
695 mtk_wed_device_dma_reset(&dev->mt76.mmio.wed); in mt7996_dma_reset()
698 mt76_wed_dma_reset(&dev->mt76); in mt7996_dma_reset()
702 mt76_dma_reset_tx_queue(&dev->mt76, dev->mphy.q_tx[i]); in mt7996_dma_reset()
704 mt76_dma_reset_tx_queue(&dev->mt76, phy2->q_tx[i]); in mt7996_dma_reset()
706 mt76_dma_reset_tx_queue(&dev->mt76, phy3->q_tx[i]); in mt7996_dma_reset()
710 mt76_queue_reset(dev, dev->mt76.q_mcu[i]); in mt7996_dma_reset()
712 mt76_for_each_q_rx(&dev->mt76, i) { in mt7996_dma_reset()
713 if (mtk_wed_device_active(&dev->mt76.mmio.wed)) in mt7996_dma_reset()
714 if (mt76_queue_is_wed_rro(&dev->mt76.q_rx[i]) || in mt7996_dma_reset()
715 mt76_queue_is_wed_tx_free(&dev->mt76.q_rx[i])) in mt7996_dma_reset()
718 mt76_queue_reset(dev, &dev->mt76.q_rx[i]); in mt7996_dma_reset()
721 mt76_tx_status_check(&dev->mt76, true); in mt7996_dma_reset()
723 mt76_for_each_q_rx(&dev->mt76, i) in mt7996_dma_reset()
733 mt76_dma_cleanup(&dev->mt76); in mt7996_dma_cleanup()