Lines Matching +full:prefetch +full:- +full:dma

1 // SPDX-License-Identifier: ISC
7 #include "../dma.h"
13 struct mt7996_dev *dev = phy->dev; in mt7996_init_tx_queues()
18 idx -= MT_TXQ_ID(0); in mt7996_init_tx_queues()
20 if (phy->mt76->band_idx == MT_BAND2) in mt7996_init_tx_queues()
26 return mt76_connac_init_tx_queues(phy->mt76, idx, n_desc, in mt7996_init_tx_queues()
36 mt76_connac_tx_cleanup(&dev->mt76); in mt7996_poll_tx()
47 dev->q_wfdma_mask |= (1 << (q)); \ in mt7996_dma_config()
48 dev->q_int_mask[(q)] = int; \ in mt7996_dma_config()
49 dev->q_id[(q)] = id; \ in mt7996_dma_config()
64 if (is_mt7996(&dev->mt76)) { in mt7996_dma_config()
74 if (dev->has_rro) { in mt7996_dma_config()
99 if (is_mt7996(&dev->mt76)) { in mt7996_dma_config()
126 #define PREFETCH(_depth) (__mt7996_dma_prefetch_base(&base, (_depth))) in __mt7996_dma_prefetch() macro
127 /* prefetch SRAM wrapping boundary for tx/rx ring. */ in __mt7996_dma_prefetch()
128 mt76_wr(dev, MT_MCUQ_EXT_CTRL(MT_MCUQ_FWDL) + ofs, PREFETCH(0x2)); in __mt7996_dma_prefetch()
129 mt76_wr(dev, MT_MCUQ_EXT_CTRL(MT_MCUQ_WM) + ofs, PREFETCH(0x2)); in __mt7996_dma_prefetch()
130 mt76_wr(dev, MT_TXQ_EXT_CTRL(0) + ofs, PREFETCH(0x8)); in __mt7996_dma_prefetch()
131 mt76_wr(dev, MT_TXQ_EXT_CTRL(1) + ofs, PREFETCH(0x8)); in __mt7996_dma_prefetch()
132 mt76_wr(dev, MT_MCUQ_EXT_CTRL(MT_MCUQ_WA) + ofs, PREFETCH(0x2)); in __mt7996_dma_prefetch()
133 mt76_wr(dev, MT_TXQ_EXT_CTRL(2) + ofs, PREFETCH(0x8)); in __mt7996_dma_prefetch()
134 mt76_wr(dev, MT_RXQ_BAND1_CTRL(MT_RXQ_MCU) + ofs, PREFETCH(0x2)); in __mt7996_dma_prefetch()
135 mt76_wr(dev, MT_RXQ_BAND1_CTRL(MT_RXQ_MCU_WA) + ofs, PREFETCH(0x2)); in __mt7996_dma_prefetch()
136 mt76_wr(dev, MT_RXQ_BAND1_CTRL(MT_RXQ_MAIN_WA) + ofs, PREFETCH(0x2)); in __mt7996_dma_prefetch()
138 queue = is_mt7996(&dev->mt76) ? MT_RXQ_BAND2_WA : MT_RXQ_BAND1_WA; in __mt7996_dma_prefetch()
139 mt76_wr(dev, MT_RXQ_BAND1_CTRL(queue) + ofs, PREFETCH(0x2)); in __mt7996_dma_prefetch()
141 mt76_wr(dev, MT_RXQ_BAND1_CTRL(MT_RXQ_MAIN) + ofs, PREFETCH(0x10)); in __mt7996_dma_prefetch()
143 queue = is_mt7996(&dev->mt76) ? MT_RXQ_BAND2 : MT_RXQ_BAND1; in __mt7996_dma_prefetch()
144 mt76_wr(dev, MT_RXQ_BAND1_CTRL(queue) + ofs, PREFETCH(0x10)); in __mt7996_dma_prefetch()
146 if (dev->has_rro) { in __mt7996_dma_prefetch()
148 PREFETCH(0x10)); in __mt7996_dma_prefetch()
150 PREFETCH(0x10)); in __mt7996_dma_prefetch()
152 PREFETCH(0x4)); in __mt7996_dma_prefetch()
154 PREFETCH(0x4)); in __mt7996_dma_prefetch()
156 PREFETCH(0x4)); in __mt7996_dma_prefetch()
158 PREFETCH(0x4)); in __mt7996_dma_prefetch()
160 PREFETCH(0x4)); in __mt7996_dma_prefetch()
162 #undef PREFETCH in __mt7996_dma_prefetch()
170 if (dev->hif2) in mt7996_dma_prefetch()
171 __mt7996_dma_prefetch(dev, MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0)); in mt7996_dma_prefetch()
178 if (dev->hif2) in mt7996_dma_disable()
179 hif1_ofs = MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0); in mt7996_dma_disable()
190 if (dev->hif2) { in mt7996_dma_disable()
209 if (dev->hif2) { in mt7996_dma_disable()
221 struct mtk_wed_device *wed = &dev->mt76.mmio.wed; in mt7996_dma_start()
225 if (dev->hif2) in mt7996_dma_start()
226 hif1_ofs = MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0); in mt7996_dma_start()
242 if (dev->hif2) in mt7996_dma_start()
281 if (dev->hif2) in mt7996_dma_enable()
282 hif1_ofs = MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0); in mt7996_dma_enable()
284 /* reset dma idx */ in mt7996_dma_enable()
286 if (dev->hif2) in mt7996_dma_enable()
294 if (dev->hif2) { in mt7996_dma_enable()
309 if (dev->hif2) in mt7996_dma_enable()
333 if (dev->hif2) { in mt7996_dma_enable()
358 if (dev->hif2) { in mt7996_dma_enable()
362 if (mtk_wed_device_active(&dev->mt76.mmio.wed) && in mt7996_dma_enable()
363 dev->has_rro) in mt7996_dma_enable()
377 struct mt76_dev *mdev = &dev->mt76; in mt7996_dma_rro_init()
382 mdev->q_rx[MT_RXQ_RRO_IND].flags = MT_WED_RRO_Q_IND; in mt7996_dma_rro_init()
383 mdev->q_rx[MT_RXQ_RRO_IND].wed = &mdev->mmio.wed; in mt7996_dma_rro_init()
384 ret = mt76_queue_alloc(dev, &mdev->q_rx[MT_RXQ_RRO_IND], in mt7996_dma_rro_init()
392 mdev->q_rx[MT_RXQ_MSDU_PAGE_BAND0].flags = in mt7996_dma_rro_init()
394 mdev->q_rx[MT_RXQ_MSDU_PAGE_BAND0].wed = &mdev->mmio.wed; in mt7996_dma_rro_init()
395 ret = mt76_queue_alloc(dev, &mdev->q_rx[MT_RXQ_MSDU_PAGE_BAND0], in mt7996_dma_rro_init()
405 mdev->q_rx[MT_RXQ_MSDU_PAGE_BAND1].flags = in mt7996_dma_rro_init()
407 mdev->q_rx[MT_RXQ_MSDU_PAGE_BAND1].wed = &mdev->mmio.wed; in mt7996_dma_rro_init()
408 ret = mt76_queue_alloc(dev, &mdev->q_rx[MT_RXQ_MSDU_PAGE_BAND1], in mt7996_dma_rro_init()
419 mdev->q_rx[MT_RXQ_MSDU_PAGE_BAND2].flags = in mt7996_dma_rro_init()
421 mdev->q_rx[MT_RXQ_MSDU_PAGE_BAND2].wed = &mdev->mmio.wed; in mt7996_dma_rro_init()
422 ret = mt76_queue_alloc(dev, &mdev->q_rx[MT_RXQ_MSDU_PAGE_BAND2], in mt7996_dma_rro_init()
431 irq_mask = mdev->mmio.irqmask | MT_INT_RRO_RX_DONE | in mt7996_dma_rro_init()
434 mtk_wed_device_start_hw_rro(&mdev->mmio.wed, irq_mask, false); in mt7996_dma_rro_init()
443 struct mtk_wed_device *wed = &dev->mt76.mmio.wed; in mt7996_dma_init()
444 struct mtk_wed_device *wed_hif2 = &dev->mt76.mmio.wed_hif2; in mt7996_dma_init()
451 mt76_dma_attach(&dev->mt76); in mt7996_dma_init()
453 if (dev->hif2) in mt7996_dma_init()
454 hif1_ofs = MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0); in mt7996_dma_init()
459 ret = mt7996_init_tx_queues(&dev->phy, in mt7996_dma_init()
460 MT_TXQ_ID(dev->mphy.band_idx), in mt7996_dma_init()
468 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_WM, in mt7996_dma_init()
476 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_WA, in mt7996_dma_init()
484 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_FWDL, in mt7996_dma_init()
492 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MCU], in mt7996_dma_init()
501 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MCU_WA], in mt7996_dma_init()
511 dev->mt76.q_rx[MT_RXQ_MAIN].flags = MT_WED_Q_RX(0); in mt7996_dma_init()
512 dev->mt76.q_rx[MT_RXQ_MAIN].wed = wed; in mt7996_dma_init()
515 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MAIN], in mt7996_dma_init()
524 if (mtk_wed_device_active(wed) && !dev->has_rro) { in mt7996_dma_init()
525 dev->mt76.q_rx[MT_RXQ_MAIN_WA].flags = MT_WED_Q_TXFREE; in mt7996_dma_init()
526 dev->mt76.q_rx[MT_RXQ_MAIN_WA].wed = wed; in mt7996_dma_init()
529 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MAIN_WA], in mt7996_dma_init()
540 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_BAND2], in mt7996_dma_init()
551 if (mtk_wed_device_active(wed_hif2) && !dev->has_rro) { in mt7996_dma_init()
552 dev->mt76.q_rx[MT_RXQ_BAND2_WA].flags = MT_WED_Q_TXFREE; in mt7996_dma_init()
553 dev->mt76.q_rx[MT_RXQ_BAND2_WA].wed = wed_hif2; in mt7996_dma_init()
556 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_BAND2_WA], in mt7996_dma_init()
566 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_BAND1], in mt7996_dma_init()
576 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_BAND1_WA], in mt7996_dma_init()
586 dev->has_rro) { in mt7996_dma_init()
588 dev->mt76.q_rx[MT_RXQ_RRO_BAND0].flags = in mt7996_dma_init()
590 dev->mt76.q_rx[MT_RXQ_RRO_BAND0].wed = wed; in mt7996_dma_init()
591 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_RRO_BAND0], in mt7996_dma_init()
600 dev->mt76.q_rx[MT_RXQ_TXFREE_BAND0].flags = MT_WED_Q_TXFREE; in mt7996_dma_init()
601 dev->mt76.q_rx[MT_RXQ_TXFREE_BAND0].wed = wed; in mt7996_dma_init()
603 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_TXFREE_BAND0], in mt7996_dma_init()
613 dev->mt76.q_rx[MT_RXQ_RRO_BAND2].flags = in mt7996_dma_init()
615 dev->mt76.q_rx[MT_RXQ_RRO_BAND2].wed = wed; in mt7996_dma_init()
616 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_RRO_BAND2], in mt7996_dma_init()
626 dev->mt76.q_rx[MT_RXQ_TXFREE_BAND2].flags = MT_WED_Q_TXFREE; in mt7996_dma_init()
627 dev->mt76.q_rx[MT_RXQ_TXFREE_BAND2].wed = wed_hif2; in mt7996_dma_init()
629 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_TXFREE_BAND2], in mt7996_dma_init()
643 netif_napi_add_tx(&dev->mt76.tx_napi_dev, &dev->mt76.tx_napi, in mt7996_dma_init()
645 napi_enable(&dev->mt76.tx_napi); in mt7996_dma_init()
654 struct mt76_phy *phy2 = dev->mt76.phys[MT_BAND1]; in mt7996_dma_reset()
655 struct mt76_phy *phy3 = dev->mt76.phys[MT_BAND2]; in mt7996_dma_reset()
656 u32 hif1_ofs = MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0); in mt7996_dma_reset()
663 if (dev->hif2) in mt7996_dma_reset()
671 mt76_queue_tx_cleanup(dev, dev->mphy.q_tx[i], true); in mt7996_dma_reset()
673 mt76_queue_tx_cleanup(dev, phy2->q_tx[i], true); in mt7996_dma_reset()
675 mt76_queue_tx_cleanup(dev, phy3->q_tx[i], true); in mt7996_dma_reset()
679 mt76_queue_tx_cleanup(dev, dev->mt76.q_mcu[i], true); in mt7996_dma_reset()
681 mt76_for_each_q_rx(&dev->mt76, i) in mt7996_dma_reset()
682 mt76_queue_rx_cleanup(dev, &dev->mt76.q_rx[i]); in mt7996_dma_reset()
684 mt76_tx_status_check(&dev->mt76, true); in mt7996_dma_reset()
690 if (dev->hif2 && mtk_wed_device_active(&dev->mt76.mmio.wed_hif2)) in mt7996_dma_reset()
691 mtk_wed_device_dma_reset(&dev->mt76.mmio.wed_hif2); in mt7996_dma_reset()
693 if (mtk_wed_device_active(&dev->mt76.mmio.wed)) in mt7996_dma_reset()
694 mtk_wed_device_dma_reset(&dev->mt76.mmio.wed); in mt7996_dma_reset()
697 mt76_dma_wed_reset(&dev->mt76); in mt7996_dma_reset()
701 mt76_dma_reset_tx_queue(&dev->mt76, dev->mphy.q_tx[i]); in mt7996_dma_reset()
703 mt76_dma_reset_tx_queue(&dev->mt76, phy2->q_tx[i]); in mt7996_dma_reset()
705 mt76_dma_reset_tx_queue(&dev->mt76, phy3->q_tx[i]); in mt7996_dma_reset()
709 mt76_queue_reset(dev, dev->mt76.q_mcu[i]); in mt7996_dma_reset()
711 mt76_for_each_q_rx(&dev->mt76, i) { in mt7996_dma_reset()
712 if (mtk_wed_device_active(&dev->mt76.mmio.wed)) in mt7996_dma_reset()
713 if (mt76_queue_is_wed_rro(&dev->mt76.q_rx[i]) || in mt7996_dma_reset()
714 mt76_queue_is_wed_tx_free(&dev->mt76.q_rx[i])) in mt7996_dma_reset()
717 mt76_queue_reset(dev, &dev->mt76.q_rx[i]); in mt7996_dma_reset()
720 mt76_tx_status_check(&dev->mt76, true); in mt7996_dma_reset()
722 mt76_for_each_q_rx(&dev->mt76, i) in mt7996_dma_reset()
732 mt76_dma_cleanup(&dev->mt76); in mt7996_dma_cleanup()