Lines Matching full:dev
13 struct mt792x_dev *dev = dev_instance; in mt792x_irq_handler() local
15 mt76_wr(dev, dev->irq_map->host_irq_enable, 0); in mt792x_irq_handler()
17 if (!test_bit(MT76_STATE_INITIALIZED, &dev->mphy.state)) in mt792x_irq_handler()
20 tasklet_schedule(&dev->mt76.irq_tasklet); in mt792x_irq_handler()
28 struct mt792x_dev *dev = (struct mt792x_dev *)data; in mt792x_irq_tasklet() local
29 const struct mt792x_irq_map *irq_map = dev->irq_map; in mt792x_irq_tasklet()
32 mt76_wr(dev, irq_map->host_irq_enable, 0); in mt792x_irq_tasklet()
34 intr = mt76_rr(dev, MT_WFDMA0_HOST_INT_STA); in mt792x_irq_tasklet()
35 intr &= dev->mt76.mmio.irqmask; in mt792x_irq_tasklet()
36 mt76_wr(dev, MT_WFDMA0_HOST_INT_STA, intr); in mt792x_irq_tasklet()
38 trace_dev_irq(&dev->mt76, intr, dev->mt76.mmio.irqmask); in mt792x_irq_tasklet()
43 if (intr & dev->irq_map->tx.mcu_complete_mask) in mt792x_irq_tasklet()
44 mask |= dev->irq_map->tx.mcu_complete_mask; in mt792x_irq_tasklet()
49 intr_sw = mt76_rr(dev, MT_MCU_CMD); in mt792x_irq_tasklet()
51 mt76_wr(dev, MT_MCU_CMD, intr_sw); in mt792x_irq_tasklet()
58 mt76_set_irq_mask(&dev->mt76, irq_map->host_irq_enable, mask, 0); in mt792x_irq_tasklet()
60 if (intr & dev->irq_map->tx.all_complete_mask) in mt792x_irq_tasklet()
61 napi_schedule(&dev->mt76.tx_napi); in mt792x_irq_tasklet()
64 napi_schedule(&dev->mt76.napi[MT_RXQ_MCU]); in mt792x_irq_tasklet()
67 napi_schedule(&dev->mt76.napi[MT_RXQ_MCU_WA]); in mt792x_irq_tasklet()
70 napi_schedule(&dev->mt76.napi[MT_RXQ_MAIN]); in mt792x_irq_tasklet()
76 struct mt792x_dev *dev = container_of(mdev, struct mt792x_dev, mt76); in mt792x_rx_poll_complete() local
77 const struct mt792x_irq_map *irq_map = dev->irq_map; in mt792x_rx_poll_complete()
89 static void mt792x_dma_prefetch(struct mt792x_dev *dev) in mt792x_dma_prefetch() argument
91 if (is_mt7925(&dev->mt76)) { in mt792x_dma_prefetch()
93 mt76_wr(dev, MT_WFDMA0_RX_RING0_EXT_CTRL, PREFETCH(0x0000, 0x4)); in mt792x_dma_prefetch()
94 mt76_wr(dev, MT_WFDMA0_RX_RING1_EXT_CTRL, PREFETCH(0x0040, 0x4)); in mt792x_dma_prefetch()
95 mt76_wr(dev, MT_WFDMA0_RX_RING2_EXT_CTRL, PREFETCH(0x0080, 0x4)); in mt792x_dma_prefetch()
96 mt76_wr(dev, MT_WFDMA0_RX_RING3_EXT_CTRL, PREFETCH(0x00c0, 0x4)); in mt792x_dma_prefetch()
98 mt76_wr(dev, MT_WFDMA0_TX_RING0_EXT_CTRL, PREFETCH(0x0100, 0x10)); in mt792x_dma_prefetch()
99 mt76_wr(dev, MT_WFDMA0_TX_RING1_EXT_CTRL, PREFETCH(0x0200, 0x10)); in mt792x_dma_prefetch()
100 mt76_wr(dev, MT_WFDMA0_TX_RING2_EXT_CTRL, PREFETCH(0x0300, 0x10)); in mt792x_dma_prefetch()
101 mt76_wr(dev, MT_WFDMA0_TX_RING3_EXT_CTRL, PREFETCH(0x0400, 0x10)); in mt792x_dma_prefetch()
102 mt76_wr(dev, MT_WFDMA0_TX_RING15_EXT_CTRL, PREFETCH(0x0500, 0x4)); in mt792x_dma_prefetch()
103 mt76_wr(dev, MT_WFDMA0_TX_RING16_EXT_CTRL, PREFETCH(0x0540, 0x4)); in mt792x_dma_prefetch()
106 mt76_wr(dev, MT_WFDMA0_RX_RING0_EXT_CTRL, PREFETCH(0x0, 0x4)); in mt792x_dma_prefetch()
107 mt76_wr(dev, MT_WFDMA0_RX_RING2_EXT_CTRL, PREFETCH(0x40, 0x4)); in mt792x_dma_prefetch()
108 mt76_wr(dev, MT_WFDMA0_RX_RING3_EXT_CTRL, PREFETCH(0x80, 0x4)); in mt792x_dma_prefetch()
109 mt76_wr(dev, MT_WFDMA0_RX_RING4_EXT_CTRL, PREFETCH(0xc0, 0x4)); in mt792x_dma_prefetch()
110 mt76_wr(dev, MT_WFDMA0_RX_RING5_EXT_CTRL, PREFETCH(0x100, 0x4)); in mt792x_dma_prefetch()
112 mt76_wr(dev, MT_WFDMA0_TX_RING0_EXT_CTRL, PREFETCH(0x140, 0x4)); in mt792x_dma_prefetch()
113 mt76_wr(dev, MT_WFDMA0_TX_RING1_EXT_CTRL, PREFETCH(0x180, 0x4)); in mt792x_dma_prefetch()
114 mt76_wr(dev, MT_WFDMA0_TX_RING2_EXT_CTRL, PREFETCH(0x1c0, 0x4)); in mt792x_dma_prefetch()
115 mt76_wr(dev, MT_WFDMA0_TX_RING3_EXT_CTRL, PREFETCH(0x200, 0x4)); in mt792x_dma_prefetch()
116 mt76_wr(dev, MT_WFDMA0_TX_RING4_EXT_CTRL, PREFETCH(0x240, 0x4)); in mt792x_dma_prefetch()
117 mt76_wr(dev, MT_WFDMA0_TX_RING5_EXT_CTRL, PREFETCH(0x280, 0x4)); in mt792x_dma_prefetch()
118 mt76_wr(dev, MT_WFDMA0_TX_RING6_EXT_CTRL, PREFETCH(0x2c0, 0x4)); in mt792x_dma_prefetch()
119 mt76_wr(dev, MT_WFDMA0_TX_RING16_EXT_CTRL, PREFETCH(0x340, 0x4)); in mt792x_dma_prefetch()
120 mt76_wr(dev, MT_WFDMA0_TX_RING17_EXT_CTRL, PREFETCH(0x380, 0x4)); in mt792x_dma_prefetch()
124 int mt792x_dma_enable(struct mt792x_dev *dev) in mt792x_dma_enable() argument
126 if (is_mt7925(&dev->mt76)) in mt792x_dma_enable()
127 mt76_rmw(dev, MT_UWFDMA0_GLO_CFG_EXT1, BIT(28), BIT(28)); in mt792x_dma_enable()
130 mt792x_dma_prefetch(dev); in mt792x_dma_enable()
133 mt76_wr(dev, MT_WFDMA0_RST_DTX_PTR, ~0); in mt792x_dma_enable()
136 mt76_wr(dev, MT_WFDMA0_PRI_DLY_INT_CFG0, 0); in mt792x_dma_enable()
138 mt76_set(dev, MT_WFDMA0_GLO_CFG, in mt792x_dma_enable()
146 mt76_set(dev, MT_WFDMA0_GLO_CFG, in mt792x_dma_enable()
149 mt76_set(dev, MT_WFDMA_DUMMY_CR, MT_WFDMA_NEED_REINIT); in mt792x_dma_enable()
152 mt76_connac_irq_enable(&dev->mt76, in mt792x_dma_enable()
153 dev->irq_map->tx.all_complete_mask | in mt792x_dma_enable()
154 dev->irq_map->rx.data_complete_mask | in mt792x_dma_enable()
155 dev->irq_map->rx.wm2_complete_mask | in mt792x_dma_enable()
156 dev->irq_map->rx.wm_complete_mask | in mt792x_dma_enable()
158 mt76_set(dev, MT_MCU2HOST_SW_INT_ENA, MT_MCU_CMD_WAKE_RX_PCIE); in mt792x_dma_enable()
165 mt792x_dma_reset(struct mt792x_dev *dev, bool force) in mt792x_dma_reset() argument
169 err = mt792x_dma_disable(dev, force); in mt792x_dma_reset()
175 mt76_queue_reset(dev, dev->mphy.q_tx[i]); in mt792x_dma_reset()
178 mt76_queue_reset(dev, dev->mt76.q_mcu[i]); in mt792x_dma_reset()
180 mt76_for_each_q_rx(&dev->mt76, i) in mt792x_dma_reset()
181 mt76_queue_reset(dev, &dev->mt76.q_rx[i]); in mt792x_dma_reset()
183 mt76_tx_status_check(&dev->mt76, true); in mt792x_dma_reset()
185 return mt792x_dma_enable(dev); in mt792x_dma_reset()
188 int mt792x_wpdma_reset(struct mt792x_dev *dev, bool force) in mt792x_wpdma_reset() argument
193 for (i = 0; i < ARRAY_SIZE(dev->mt76.phy.q_tx); i++) in mt792x_wpdma_reset()
194 mt76_queue_tx_cleanup(dev, dev->mphy.q_tx[i], true); in mt792x_wpdma_reset()
196 for (i = 0; i < ARRAY_SIZE(dev->mt76.q_mcu); i++) in mt792x_wpdma_reset()
197 mt76_queue_tx_cleanup(dev, dev->mt76.q_mcu[i], true); in mt792x_wpdma_reset()
199 mt76_for_each_q_rx(&dev->mt76, i) in mt792x_wpdma_reset()
200 mt76_queue_rx_cleanup(dev, &dev->mt76.q_rx[i]); in mt792x_wpdma_reset()
203 err = mt792x_wfsys_reset(dev); in mt792x_wpdma_reset()
207 err = mt792x_dma_reset(dev, force); in mt792x_wpdma_reset()
211 mt76_for_each_q_rx(&dev->mt76, i) in mt792x_wpdma_reset()
212 mt76_queue_rx_reset(dev, i); in mt792x_wpdma_reset()
218 int mt792x_wpdma_reinit_cond(struct mt792x_dev *dev) in mt792x_wpdma_reinit_cond() argument
220 struct mt76_connac_pm *pm = &dev->pm; in mt792x_wpdma_reinit_cond()
224 if (mt792x_dma_need_reinit(dev)) { in mt792x_wpdma_reinit_cond()
226 mt76_wr(dev, dev->irq_map->host_irq_enable, 0); in mt792x_wpdma_reinit_cond()
227 mt76_wr(dev, MT_PCIE_MAC_INT_ENABLE, 0x0); in mt792x_wpdma_reinit_cond()
229 err = mt792x_wpdma_reset(dev, false); in mt792x_wpdma_reinit_cond()
231 dev_err(dev->mt76.dev, "wpdma reset failed\n"); in mt792x_wpdma_reinit_cond()
236 mt76_wr(dev, MT_PCIE_MAC_INT_ENABLE, 0xff); in mt792x_wpdma_reinit_cond()
244 int mt792x_dma_disable(struct mt792x_dev *dev, bool force) in mt792x_dma_disable() argument
247 mt76_clear(dev, MT_WFDMA0_GLO_CFG, in mt792x_dma_disable()
254 if (!mt76_poll_msec_tick(dev, MT_WFDMA0_GLO_CFG, in mt792x_dma_disable()
260 mt76_clear(dev, MT_WFDMA0_GLO_CFG_EXT0, in mt792x_dma_disable()
262 mt76_set(dev, MT_DMASHDL_SW_CONTROL, MT_DMASHDL_DMASHDL_BYPASS); in mt792x_dma_disable()
266 mt76_clear(dev, MT_WFDMA0_RST, in mt792x_dma_disable()
270 mt76_set(dev, MT_WFDMA0_RST, in mt792x_dma_disable()
279 void mt792x_dma_cleanup(struct mt792x_dev *dev) in mt792x_dma_cleanup() argument
282 mt76_clear(dev, MT_WFDMA0_GLO_CFG, in mt792x_dma_cleanup()
290 mt76_poll_msec_tick(dev, MT_WFDMA0_GLO_CFG, in mt792x_dma_cleanup()
295 mt76_clear(dev, MT_WFDMA0_RST, in mt792x_dma_cleanup()
299 mt76_set(dev, MT_WFDMA0_RST, in mt792x_dma_cleanup()
303 mt76_dma_cleanup(&dev->mt76); in mt792x_dma_cleanup()
309 struct mt792x_dev *dev; in mt792x_poll_tx() local
311 dev = container_of(napi, struct mt792x_dev, mt76.tx_napi); in mt792x_poll_tx()
313 if (!mt76_connac_pm_ref(&dev->mphy, &dev->pm)) { in mt792x_poll_tx()
315 queue_work(dev->mt76.wq, &dev->pm.wake_work); in mt792x_poll_tx()
319 mt76_connac_tx_cleanup(&dev->mt76); in mt792x_poll_tx()
321 mt76_connac_irq_enable(&dev->mt76, in mt792x_poll_tx()
322 dev->irq_map->tx.all_complete_mask); in mt792x_poll_tx()
323 mt76_connac_pm_unref(&dev->mphy, &dev->pm); in mt792x_poll_tx()
331 struct mt792x_dev *dev; in mt792x_poll_rx() local
334 dev = container_of(napi->dev, struct mt792x_dev, mt76.napi_dev); in mt792x_poll_rx()
336 if (!mt76_connac_pm_ref(&dev->mphy, &dev->pm)) { in mt792x_poll_rx()
338 queue_work(dev->mt76.wq, &dev->pm.wake_work); in mt792x_poll_rx()
342 mt76_connac_pm_unref(&dev->mphy, &dev->pm); in mt792x_poll_rx()
348 int mt792x_wfsys_reset(struct mt792x_dev *dev) in mt792x_wfsys_reset() argument
350 u32 addr = is_mt7921(&dev->mt76) ? 0x18000140 : 0x7c000140; in mt792x_wfsys_reset()
352 mt76_clear(dev, addr, WFSYS_SW_RST_B); in mt792x_wfsys_reset()
354 mt76_set(dev, addr, WFSYS_SW_RST_B); in mt792x_wfsys_reset()
356 if (!__mt76_poll_msec(&dev->mt76, addr, WFSYS_SW_INIT_DONE, in mt792x_wfsys_reset()