Lines Matching full:dev
56 struct net_device *dev; member
93 wed_m32(struct mtk_wed_device *dev, u32 reg, u32 mask, u32 val) in wed_m32() argument
95 regmap_update_bits(dev->hw->regs, reg, mask | val, val); in wed_m32()
99 wed_set(struct mtk_wed_device *dev, u32 reg, u32 mask) in wed_set() argument
101 return wed_m32(dev, reg, 0, mask); in wed_set()
105 wed_clr(struct mtk_wed_device *dev, u32 reg, u32 mask) in wed_clr() argument
107 return wed_m32(dev, reg, mask, 0); in wed_clr()
111 wdma_m32(struct mtk_wed_device *dev, u32 reg, u32 mask, u32 val) in wdma_m32() argument
113 wdma_w32(dev, reg, (wdma_r32(dev, reg) & ~mask) | val); in wdma_m32()
117 wdma_set(struct mtk_wed_device *dev, u32 reg, u32 mask) in wdma_set() argument
119 wdma_m32(dev, reg, 0, mask); in wdma_set()
123 wdma_clr(struct mtk_wed_device *dev, u32 reg, u32 mask) in wdma_clr() argument
125 wdma_m32(dev, reg, mask, 0); in wdma_clr()
129 wifi_r32(struct mtk_wed_device *dev, u32 reg) in wifi_r32() argument
131 return readl(dev->wlan.base + reg); in wifi_r32()
135 wifi_w32(struct mtk_wed_device *dev, u32 reg, u32 val) in wifi_w32() argument
137 writel(val, dev->wlan.base + reg); in wifi_w32()
141 mtk_wed_read_reset(struct mtk_wed_device *dev) in mtk_wed_read_reset() argument
143 return wed_r32(dev, MTK_WED_RESET); in mtk_wed_read_reset()
147 mtk_wdma_read_reset(struct mtk_wed_device *dev) in mtk_wdma_read_reset() argument
149 return wdma_r32(dev, MTK_WDMA_GLO_CFG); in mtk_wdma_read_reset()
153 mtk_wdma_v3_rx_reset(struct mtk_wed_device *dev) in mtk_wdma_v3_rx_reset() argument
157 if (!mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wdma_v3_rx_reset()
160 wdma_clr(dev, MTK_WDMA_PREF_TX_CFG, MTK_WDMA_PREF_TX_CFG_PREF_EN); in mtk_wdma_v3_rx_reset()
161 wdma_clr(dev, MTK_WDMA_PREF_RX_CFG, MTK_WDMA_PREF_RX_CFG_PREF_EN); in mtk_wdma_v3_rx_reset()
165 0, 10000, false, dev, MTK_WDMA_PREF_TX_CFG)) in mtk_wdma_v3_rx_reset()
166 dev_err(dev->hw->dev, "rx reset failed\n"); in mtk_wdma_v3_rx_reset()
170 0, 10000, false, dev, MTK_WDMA_PREF_RX_CFG)) in mtk_wdma_v3_rx_reset()
171 dev_err(dev->hw->dev, "rx reset failed\n"); in mtk_wdma_v3_rx_reset()
173 wdma_clr(dev, MTK_WDMA_WRBK_TX_CFG, MTK_WDMA_WRBK_TX_CFG_WRBK_EN); in mtk_wdma_v3_rx_reset()
174 wdma_clr(dev, MTK_WDMA_WRBK_RX_CFG, MTK_WDMA_WRBK_RX_CFG_WRBK_EN); in mtk_wdma_v3_rx_reset()
178 0, 10000, false, dev, MTK_WDMA_WRBK_TX_CFG)) in mtk_wdma_v3_rx_reset()
179 dev_err(dev->hw->dev, "rx reset failed\n"); in mtk_wdma_v3_rx_reset()
183 0, 10000, false, dev, MTK_WDMA_WRBK_RX_CFG)) in mtk_wdma_v3_rx_reset()
184 dev_err(dev->hw->dev, "rx reset failed\n"); in mtk_wdma_v3_rx_reset()
187 wdma_w32(dev, MTK_WDMA_PREF_RX_FIFO_CFG, in mtk_wdma_v3_rx_reset()
190 wdma_clr(dev, MTK_WDMA_PREF_RX_FIFO_CFG, in mtk_wdma_v3_rx_reset()
195 wdma_w32(dev, MTK_WDMA_XDMA_RX_FIFO_CFG, in mtk_wdma_v3_rx_reset()
203 wdma_clr(dev, MTK_WDMA_XDMA_RX_FIFO_CFG, in mtk_wdma_v3_rx_reset()
213 wdma_w32(dev, MTK_WDMA_WRBK_RX_FIFO_CFG(0), in mtk_wdma_v3_rx_reset()
215 wdma_w32(dev, MTK_WDMA_WRBK_RX_FIFO_CFG(1), in mtk_wdma_v3_rx_reset()
218 wdma_clr(dev, MTK_WDMA_WRBK_RX_FIFO_CFG(0), in mtk_wdma_v3_rx_reset()
220 wdma_clr(dev, MTK_WDMA_WRBK_RX_FIFO_CFG(1), in mtk_wdma_v3_rx_reset()
224 wdma_w32(dev, MTK_WDMA_PREF_SIDX_CFG, in mtk_wdma_v3_rx_reset()
226 wdma_clr(dev, MTK_WDMA_PREF_SIDX_CFG, in mtk_wdma_v3_rx_reset()
230 wdma_w32(dev, MTK_WDMA_WRBK_SIDX_CFG, in mtk_wdma_v3_rx_reset()
232 wdma_clr(dev, MTK_WDMA_WRBK_SIDX_CFG, in mtk_wdma_v3_rx_reset()
237 mtk_wdma_rx_reset(struct mtk_wed_device *dev) in mtk_wdma_rx_reset() argument
242 wdma_clr(dev, MTK_WDMA_GLO_CFG, MTK_WDMA_GLO_CFG_RX_DMA_EN); in mtk_wdma_rx_reset()
243 ret = readx_poll_timeout(mtk_wdma_read_reset, dev, status, in mtk_wdma_rx_reset()
246 dev_err(dev->hw->dev, "rx reset failed\n"); in mtk_wdma_rx_reset()
248 mtk_wdma_v3_rx_reset(dev); in mtk_wdma_rx_reset()
249 wdma_w32(dev, MTK_WDMA_RESET_IDX, MTK_WDMA_RESET_IDX_RX); in mtk_wdma_rx_reset()
250 wdma_w32(dev, MTK_WDMA_RESET_IDX, 0); in mtk_wdma_rx_reset()
252 for (i = 0; i < ARRAY_SIZE(dev->rx_wdma); i++) { in mtk_wdma_rx_reset()
253 if (dev->rx_wdma[i].desc) in mtk_wdma_rx_reset()
256 wdma_w32(dev, in mtk_wdma_rx_reset()
264 mtk_wed_check_busy(struct mtk_wed_device *dev, u32 reg, u32 mask) in mtk_wed_check_busy() argument
266 return !!(wed_r32(dev, reg) & mask); in mtk_wed_check_busy()
270 mtk_wed_poll_busy(struct mtk_wed_device *dev, u32 reg, u32 mask) in mtk_wed_poll_busy() argument
277 timeout, false, dev, reg, mask); in mtk_wed_poll_busy()
281 mtk_wdma_v3_tx_reset(struct mtk_wed_device *dev) in mtk_wdma_v3_tx_reset() argument
285 if (!mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wdma_v3_tx_reset()
288 wdma_clr(dev, MTK_WDMA_PREF_TX_CFG, MTK_WDMA_PREF_TX_CFG_PREF_EN); in mtk_wdma_v3_tx_reset()
289 wdma_clr(dev, MTK_WDMA_PREF_RX_CFG, MTK_WDMA_PREF_RX_CFG_PREF_EN); in mtk_wdma_v3_tx_reset()
293 0, 10000, false, dev, MTK_WDMA_PREF_TX_CFG)) in mtk_wdma_v3_tx_reset()
294 dev_err(dev->hw->dev, "tx reset failed\n"); in mtk_wdma_v3_tx_reset()
298 0, 10000, false, dev, MTK_WDMA_PREF_RX_CFG)) in mtk_wdma_v3_tx_reset()
299 dev_err(dev->hw->dev, "tx reset failed\n"); in mtk_wdma_v3_tx_reset()
301 wdma_clr(dev, MTK_WDMA_WRBK_TX_CFG, MTK_WDMA_WRBK_TX_CFG_WRBK_EN); in mtk_wdma_v3_tx_reset()
302 wdma_clr(dev, MTK_WDMA_WRBK_RX_CFG, MTK_WDMA_WRBK_RX_CFG_WRBK_EN); in mtk_wdma_v3_tx_reset()
306 0, 10000, false, dev, MTK_WDMA_WRBK_TX_CFG)) in mtk_wdma_v3_tx_reset()
307 dev_err(dev->hw->dev, "tx reset failed\n"); in mtk_wdma_v3_tx_reset()
311 0, 10000, false, dev, MTK_WDMA_WRBK_RX_CFG)) in mtk_wdma_v3_tx_reset()
312 dev_err(dev->hw->dev, "tx reset failed\n"); in mtk_wdma_v3_tx_reset()
315 wdma_w32(dev, MTK_WDMA_PREF_TX_FIFO_CFG, in mtk_wdma_v3_tx_reset()
318 wdma_clr(dev, MTK_WDMA_PREF_TX_FIFO_CFG, in mtk_wdma_v3_tx_reset()
323 wdma_w32(dev, MTK_WDMA_XDMA_TX_FIFO_CFG, in mtk_wdma_v3_tx_reset()
328 wdma_clr(dev, MTK_WDMA_XDMA_TX_FIFO_CFG, in mtk_wdma_v3_tx_reset()
335 wdma_w32(dev, MTK_WDMA_WRBK_TX_FIFO_CFG(0), in mtk_wdma_v3_tx_reset()
337 wdma_w32(dev, MTK_WDMA_WRBK_TX_FIFO_CFG(1), in mtk_wdma_v3_tx_reset()
340 wdma_clr(dev, MTK_WDMA_WRBK_TX_FIFO_CFG(0), in mtk_wdma_v3_tx_reset()
342 wdma_clr(dev, MTK_WDMA_WRBK_TX_FIFO_CFG(1), in mtk_wdma_v3_tx_reset()
346 wdma_w32(dev, MTK_WDMA_PREF_SIDX_CFG, in mtk_wdma_v3_tx_reset()
348 wdma_clr(dev, MTK_WDMA_PREF_SIDX_CFG, in mtk_wdma_v3_tx_reset()
352 wdma_w32(dev, MTK_WDMA_WRBK_SIDX_CFG, in mtk_wdma_v3_tx_reset()
354 wdma_clr(dev, MTK_WDMA_WRBK_SIDX_CFG, in mtk_wdma_v3_tx_reset()
359 mtk_wdma_tx_reset(struct mtk_wed_device *dev) in mtk_wdma_tx_reset() argument
364 wdma_clr(dev, MTK_WDMA_GLO_CFG, MTK_WDMA_GLO_CFG_TX_DMA_EN); in mtk_wdma_tx_reset()
365 if (readx_poll_timeout(mtk_wdma_read_reset, dev, status, in mtk_wdma_tx_reset()
367 dev_err(dev->hw->dev, "tx reset failed\n"); in mtk_wdma_tx_reset()
369 mtk_wdma_v3_tx_reset(dev); in mtk_wdma_tx_reset()
370 wdma_w32(dev, MTK_WDMA_RESET_IDX, MTK_WDMA_RESET_IDX_TX); in mtk_wdma_tx_reset()
371 wdma_w32(dev, MTK_WDMA_RESET_IDX, 0); in mtk_wdma_tx_reset()
373 for (i = 0; i < ARRAY_SIZE(dev->tx_wdma); i++) in mtk_wdma_tx_reset()
374 wdma_w32(dev, in mtk_wdma_tx_reset()
379 mtk_wed_reset(struct mtk_wed_device *dev, u32 mask) in mtk_wed_reset() argument
383 wed_w32(dev, MTK_WED_RESET, mask); in mtk_wed_reset()
384 if (readx_poll_timeout(mtk_wed_read_reset, dev, status, in mtk_wed_reset()
390 mtk_wed_wo_read_status(struct mtk_wed_device *dev) in mtk_wed_wo_read_status() argument
392 return wed_r32(dev, MTK_WED_SCR0 + 4 * MTK_WED_DUMMY_CR_WO_STATUS); in mtk_wed_wo_read_status()
396 mtk_wed_wo_reset(struct mtk_wed_device *dev) in mtk_wed_wo_reset() argument
398 struct mtk_wed_wo *wo = dev->hw->wed_wo; in mtk_wed_wo_reset()
403 mtk_wdma_tx_reset(dev); in mtk_wed_wo_reset()
404 mtk_wed_reset(dev, MTK_WED_RESET_WED); in mtk_wed_wo_reset()
411 if (readx_poll_timeout(mtk_wed_wo_read_status, dev, val, in mtk_wed_wo_reset()
414 dev_err(dev->hw->dev, "failed to disable wed-wo\n"); in mtk_wed_wo_reset()
419 switch (dev->hw->index) { in mtk_wed_wo_reset()
446 struct mtk_wed_device *dev; in mtk_wed_fe_reset() local
452 dev = hw->wed_dev; in mtk_wed_fe_reset()
453 if (!dev || !dev->wlan.reset) in mtk_wed_fe_reset()
457 err = dev->wlan.reset(dev); in mtk_wed_fe_reset()
459 dev_err(dev->dev, "wlan reset failed: %d\n", err); in mtk_wed_fe_reset()
473 struct mtk_wed_device *dev; in mtk_wed_fe_reset_complete() local
478 dev = hw->wed_dev; in mtk_wed_fe_reset_complete()
479 if (!dev || !dev->wlan.reset_complete) in mtk_wed_fe_reset_complete()
482 dev->wlan.reset_complete(dev); in mtk_wed_fe_reset_complete()
489 mtk_wed_assign(struct mtk_wed_device *dev) in mtk_wed_assign() argument
494 if (dev->wlan.bus_type == MTK_WED_BUS_PCIE) { in mtk_wed_assign()
495 hw = hw_list[pci_domain_nr(dev->wlan.pci_dev->bus)]; in mtk_wed_assign()
517 hw->wed_dev = dev; in mtk_wed_assign()
522 mtk_wed_amsdu_buffer_alloc(struct mtk_wed_device *dev) in mtk_wed_amsdu_buffer_alloc() argument
524 struct mtk_wed_hw *hw = dev->hw; in mtk_wed_amsdu_buffer_alloc()
531 wed_amsdu = devm_kcalloc(hw->dev, MTK_WED_AMSDU_NPAGES, in mtk_wed_amsdu_buffer_alloc()
548 wed_amsdu[i].txd_phy = dma_map_single(hw->dev, ptr, in mtk_wed_amsdu_buffer_alloc()
551 if (dma_mapping_error(hw->dev, wed_amsdu[i].txd_phy)) in mtk_wed_amsdu_buffer_alloc()
554 dev->hw->wed_amsdu = wed_amsdu; in mtk_wed_amsdu_buffer_alloc()
560 dma_unmap_single(hw->dev, wed_amsdu[i].txd_phy, in mtk_wed_amsdu_buffer_alloc()
566 mtk_wed_amsdu_free_buffer(struct mtk_wed_device *dev) in mtk_wed_amsdu_free_buffer() argument
568 struct mtk_wed_amsdu *wed_amsdu = dev->hw->wed_amsdu; in mtk_wed_amsdu_free_buffer()
575 dma_unmap_single(dev->hw->dev, wed_amsdu[i].txd_phy, in mtk_wed_amsdu_free_buffer()
583 mtk_wed_amsdu_init(struct mtk_wed_device *dev) in mtk_wed_amsdu_init() argument
585 struct mtk_wed_amsdu *wed_amsdu = dev->hw->wed_amsdu; in mtk_wed_amsdu_init()
592 wed_w32(dev, MTK_WED_AMSDU_HIFTXD_BASE_L(i), in mtk_wed_amsdu_init()
596 wed_w32(dev, MTK_WED_AMSDU_STA_INFO_INIT, MTK_WED_AMSDU_STA_RMVL | in mtk_wed_amsdu_init()
599 dev->wlan.amsdu_max_len >> 8) | in mtk_wed_amsdu_init()
601 dev->wlan.amsdu_max_subframes)); in mtk_wed_amsdu_init()
603 wed_w32(dev, MTK_WED_AMSDU_STA_INFO, MTK_WED_AMSDU_STA_INFO_DO_INIT); in mtk_wed_amsdu_init()
605 ret = mtk_wed_poll_busy(dev, MTK_WED_AMSDU_STA_INFO, in mtk_wed_amsdu_init()
608 dev_err(dev->hw->dev, "amsdu initialization failed\n"); in mtk_wed_amsdu_init()
613 wed_set(dev, MTK_WED_AMSDU_HIFTXD_CFG, in mtk_wed_amsdu_init()
614 FIELD_PREP(MTK_WED_AMSDU_HIFTXD_SRC, dev->hw->index)); in mtk_wed_amsdu_init()
617 wed_set(dev, MTK_WED_AMSDU_PSE, MTK_WED_AMSDU_PSE_RESET); in mtk_wed_amsdu_init()
618 ret = mtk_wed_poll_busy(dev, MTK_WED_MON_AMSDU_QMEM_STS1, BIT(29)); in mtk_wed_amsdu_init()
625 if (dev->wlan.id == 0x7991) in mtk_wed_amsdu_init()
626 wed_clr(dev, MTK_WED_AMSDU_FIFO, MTK_WED_AMSDU_IS_PRIOR0_RING); in mtk_wed_amsdu_init()
628 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_TX_AMSDU_EN); in mtk_wed_amsdu_init()
634 mtk_wed_tx_buffer_alloc(struct mtk_wed_device *dev) in mtk_wed_tx_buffer_alloc() argument
636 u32 desc_size = dev->hw->soc->tx_ring_desc_size; in mtk_wed_tx_buffer_alloc()
638 int token = dev->wlan.token_start; in mtk_wed_tx_buffer_alloc()
643 if (!mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_tx_buffer_alloc()
644 ring_size = dev->wlan.nbuf & ~(MTK_WED_BUF_PER_PAGE - 1); in mtk_wed_tx_buffer_alloc()
645 dev->tx_buf_ring.size = ring_size; in mtk_wed_tx_buffer_alloc()
647 dev->tx_buf_ring.size = MTK_WED_TX_BM_DMA_SIZE; in mtk_wed_tx_buffer_alloc()
650 n_pages = dev->tx_buf_ring.size / MTK_WED_BUF_PER_PAGE; in mtk_wed_tx_buffer_alloc()
656 dev->tx_buf_ring.pages = page_list; in mtk_wed_tx_buffer_alloc()
658 desc_ptr = dma_alloc_coherent(dev->hw->dev, in mtk_wed_tx_buffer_alloc()
659 dev->tx_buf_ring.size * desc_size, in mtk_wed_tx_buffer_alloc()
664 dev->tx_buf_ring.desc = desc_ptr; in mtk_wed_tx_buffer_alloc()
665 dev->tx_buf_ring.desc_phys = desc_phys; in mtk_wed_tx_buffer_alloc()
677 page_phys = dma_map_page(dev->hw->dev, page, 0, PAGE_SIZE, in mtk_wed_tx_buffer_alloc()
679 if (dma_mapping_error(dev->hw->dev, page_phys)) { in mtk_wed_tx_buffer_alloc()
686 dma_sync_single_for_cpu(dev->hw->dev, page_phys, PAGE_SIZE, in mtk_wed_tx_buffer_alloc()
697 if (!mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_tx_buffer_alloc()
700 txd_size = dev->wlan.init_buf(buf, buf_phys, in mtk_wed_tx_buffer_alloc()
704 if (mtk_wed_is_v1(dev->hw)) in mtk_wed_tx_buffer_alloc()
723 dma_sync_single_for_device(dev->hw->dev, page_phys, PAGE_SIZE, in mtk_wed_tx_buffer_alloc()
731 mtk_wed_free_tx_buffer(struct mtk_wed_device *dev) in mtk_wed_free_tx_buffer() argument
733 struct mtk_wed_buf *page_list = dev->tx_buf_ring.pages; in mtk_wed_free_tx_buffer()
734 struct mtk_wed_hw *hw = dev->hw; in mtk_wed_free_tx_buffer()
740 if (!dev->tx_buf_ring.desc) in mtk_wed_free_tx_buffer()
743 for (i = 0; i < dev->tx_buf_ring.size; i += MTK_WED_BUF_PER_PAGE) { in mtk_wed_free_tx_buffer()
750 dma_unmap_page(dev->hw->dev, page_phy, PAGE_SIZE, in mtk_wed_free_tx_buffer()
755 dma_free_coherent(dev->hw->dev, in mtk_wed_free_tx_buffer()
756 dev->tx_buf_ring.size * hw->soc->tx_ring_desc_size, in mtk_wed_free_tx_buffer()
757 dev->tx_buf_ring.desc, in mtk_wed_free_tx_buffer()
758 dev->tx_buf_ring.desc_phys); in mtk_wed_free_tx_buffer()
765 mtk_wed_hwrro_buffer_alloc(struct mtk_wed_device *dev) in mtk_wed_hwrro_buffer_alloc() argument
773 if (!dev->wlan.hw_rro) in mtk_wed_hwrro_buffer_alloc()
780 dev->hw_rro.size = dev->wlan.rx_nbuf & ~(MTK_WED_BUF_PER_PAGE - 1); in mtk_wed_hwrro_buffer_alloc()
781 dev->hw_rro.pages = page_list; in mtk_wed_hwrro_buffer_alloc()
782 desc = dma_alloc_coherent(dev->hw->dev, in mtk_wed_hwrro_buffer_alloc()
783 dev->wlan.rx_nbuf * sizeof(*desc), in mtk_wed_hwrro_buffer_alloc()
788 dev->hw_rro.desc = desc; in mtk_wed_hwrro_buffer_alloc()
789 dev->hw_rro.desc_phys = desc_phys; in mtk_wed_hwrro_buffer_alloc()
800 page_phys = dma_map_page(dev->hw->dev, page, 0, PAGE_SIZE, in mtk_wed_hwrro_buffer_alloc()
802 if (dma_mapping_error(dev->hw->dev, page_phys)) { in mtk_wed_hwrro_buffer_alloc()
809 dma_sync_single_for_cpu(dev->hw->dev, page_phys, PAGE_SIZE, in mtk_wed_hwrro_buffer_alloc()
820 dma_sync_single_for_device(dev->hw->dev, page_phys, PAGE_SIZE, in mtk_wed_hwrro_buffer_alloc()
828 mtk_wed_rx_buffer_alloc(struct mtk_wed_device *dev) in mtk_wed_rx_buffer_alloc() argument
833 dev->rx_buf_ring.size = dev->wlan.rx_nbuf; in mtk_wed_rx_buffer_alloc()
834 desc = dma_alloc_coherent(dev->hw->dev, in mtk_wed_rx_buffer_alloc()
835 dev->wlan.rx_nbuf * sizeof(*desc), in mtk_wed_rx_buffer_alloc()
840 dev->rx_buf_ring.desc = desc; in mtk_wed_rx_buffer_alloc()
841 dev->rx_buf_ring.desc_phys = desc_phys; in mtk_wed_rx_buffer_alloc()
842 dev->wlan.init_rx_buf(dev, dev->wlan.rx_npkt); in mtk_wed_rx_buffer_alloc()
844 return mtk_wed_hwrro_buffer_alloc(dev); in mtk_wed_rx_buffer_alloc()
848 mtk_wed_hwrro_free_buffer(struct mtk_wed_device *dev) in mtk_wed_hwrro_free_buffer() argument
850 struct mtk_wed_buf *page_list = dev->hw_rro.pages; in mtk_wed_hwrro_free_buffer()
851 struct mtk_wed_bm_desc *desc = dev->hw_rro.desc; in mtk_wed_hwrro_free_buffer()
854 if (!dev->wlan.hw_rro) in mtk_wed_hwrro_free_buffer()
870 dma_unmap_page(dev->hw->dev, buf_addr, PAGE_SIZE, in mtk_wed_hwrro_free_buffer()
875 dma_free_coherent(dev->hw->dev, dev->hw_rro.size * sizeof(*desc), in mtk_wed_hwrro_free_buffer()
876 desc, dev->hw_rro.desc_phys); in mtk_wed_hwrro_free_buffer()
883 mtk_wed_free_rx_buffer(struct mtk_wed_device *dev) in mtk_wed_free_rx_buffer() argument
885 struct mtk_wed_bm_desc *desc = dev->rx_buf_ring.desc; in mtk_wed_free_rx_buffer()
890 dev->wlan.release_rx_buf(dev); in mtk_wed_free_rx_buffer()
891 dma_free_coherent(dev->hw->dev, dev->rx_buf_ring.size * sizeof(*desc), in mtk_wed_free_rx_buffer()
892 desc, dev->rx_buf_ring.desc_phys); in mtk_wed_free_rx_buffer()
894 mtk_wed_hwrro_free_buffer(dev); in mtk_wed_free_rx_buffer()
898 mtk_wed_hwrro_init(struct mtk_wed_device *dev) in mtk_wed_hwrro_init() argument
900 if (!mtk_wed_get_rx_capa(dev) || !dev->wlan.hw_rro) in mtk_wed_hwrro_init()
903 wed_set(dev, MTK_WED_RRO_PG_BM_RX_DMAM, in mtk_wed_hwrro_init()
906 wed_w32(dev, MTK_WED_RRO_PG_BM_BASE, dev->hw_rro.desc_phys); in mtk_wed_hwrro_init()
908 wed_w32(dev, MTK_WED_RRO_PG_BM_INIT_PTR, in mtk_wed_hwrro_init()
914 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_PG_BM_EN); in mtk_wed_hwrro_init()
918 mtk_wed_rx_buffer_hw_init(struct mtk_wed_device *dev) in mtk_wed_rx_buffer_hw_init() argument
920 wed_w32(dev, MTK_WED_RX_BM_RX_DMAD, in mtk_wed_rx_buffer_hw_init()
921 FIELD_PREP(MTK_WED_RX_BM_RX_DMAD_SDL0, dev->wlan.rx_size)); in mtk_wed_rx_buffer_hw_init()
922 wed_w32(dev, MTK_WED_RX_BM_BASE, dev->rx_buf_ring.desc_phys); in mtk_wed_rx_buffer_hw_init()
923 wed_w32(dev, MTK_WED_RX_BM_INIT_PTR, MTK_WED_RX_BM_INIT_SW_TAIL | in mtk_wed_rx_buffer_hw_init()
924 FIELD_PREP(MTK_WED_RX_BM_SW_TAIL, dev->wlan.rx_npkt)); in mtk_wed_rx_buffer_hw_init()
925 wed_w32(dev, MTK_WED_RX_BM_DYN_ALLOC_TH, in mtk_wed_rx_buffer_hw_init()
927 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_BM_EN); in mtk_wed_rx_buffer_hw_init()
929 mtk_wed_hwrro_init(dev); in mtk_wed_rx_buffer_hw_init()
933 mtk_wed_free_ring(struct mtk_wed_device *dev, struct mtk_wed_ring *ring) in mtk_wed_free_ring() argument
938 dma_free_coherent(dev->hw->dev, ring->size * ring->desc_size, in mtk_wed_free_ring()
943 mtk_wed_free_rx_rings(struct mtk_wed_device *dev) in mtk_wed_free_rx_rings() argument
945 mtk_wed_free_rx_buffer(dev); in mtk_wed_free_rx_rings()
946 mtk_wed_free_ring(dev, &dev->rro.ring); in mtk_wed_free_rx_rings()
950 mtk_wed_free_tx_rings(struct mtk_wed_device *dev) in mtk_wed_free_tx_rings() argument
954 for (i = 0; i < ARRAY_SIZE(dev->tx_ring); i++) in mtk_wed_free_tx_rings()
955 mtk_wed_free_ring(dev, &dev->tx_ring[i]); in mtk_wed_free_tx_rings()
956 for (i = 0; i < ARRAY_SIZE(dev->rx_wdma); i++) in mtk_wed_free_tx_rings()
957 mtk_wed_free_ring(dev, &dev->rx_wdma[i]); in mtk_wed_free_tx_rings()
961 mtk_wed_set_ext_int(struct mtk_wed_device *dev, bool en) in mtk_wed_set_ext_int() argument
965 switch (dev->hw->version) { in mtk_wed_set_ext_int()
983 if (!dev->hw->num_flows) in mtk_wed_set_ext_int()
986 wed_w32(dev, MTK_WED_EXT_INT_MASK, en ? mask : 0); in mtk_wed_set_ext_int()
987 wed_r32(dev, MTK_WED_EXT_INT_MASK); in mtk_wed_set_ext_int()
991 mtk_wed_set_512_support(struct mtk_wed_device *dev, bool enable) in mtk_wed_set_512_support() argument
993 if (!mtk_wed_is_v2(dev->hw)) in mtk_wed_set_512_support()
997 wed_w32(dev, MTK_WED_TXDP_CTRL, MTK_WED_TXDP_DW9_OVERWR); in mtk_wed_set_512_support()
998 wed_w32(dev, MTK_WED_TXP_DW1, in mtk_wed_set_512_support()
1001 wed_w32(dev, MTK_WED_TXP_DW1, in mtk_wed_set_512_support()
1003 wed_clr(dev, MTK_WED_TXDP_CTRL, MTK_WED_TXDP_DW9_OVERWR); in mtk_wed_set_512_support()
1008 mtk_wed_check_wfdma_rx_fill(struct mtk_wed_device *dev, in mtk_wed_check_wfdma_rx_fill() argument
1023 dev_err(dev->hw->dev, "rx dma enable failed\n"); in mtk_wed_check_wfdma_rx_fill()
1031 mtk_wed_dma_disable(struct mtk_wed_device *dev) in mtk_wed_dma_disable() argument
1033 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_disable()
1037 wed_clr(dev, MTK_WED_WDMA_GLO_CFG, MTK_WED_WDMA_GLO_CFG_RX_DRV_EN); in mtk_wed_dma_disable()
1039 wed_clr(dev, MTK_WED_GLO_CFG, in mtk_wed_dma_disable()
1043 wdma_clr(dev, MTK_WDMA_GLO_CFG, in mtk_wed_dma_disable()
1048 if (mtk_wed_is_v1(dev->hw)) { in mtk_wed_dma_disable()
1049 regmap_write(dev->hw->mirror, dev->hw->index * 4, 0); in mtk_wed_dma_disable()
1050 wdma_clr(dev, MTK_WDMA_GLO_CFG, in mtk_wed_dma_disable()
1053 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_disable()
1057 wed_clr(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, in mtk_wed_dma_disable()
1059 wed_clr(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_dma_disable()
1062 if (mtk_wed_is_v3_or_greater(dev->hw) && in mtk_wed_dma_disable()
1063 mtk_wed_get_rx_capa(dev)) { in mtk_wed_dma_disable()
1064 wdma_clr(dev, MTK_WDMA_PREF_TX_CFG, in mtk_wed_dma_disable()
1066 wdma_clr(dev, MTK_WDMA_PREF_RX_CFG, in mtk_wed_dma_disable()
1071 mtk_wed_set_512_support(dev, false); in mtk_wed_dma_disable()
1075 mtk_wed_stop(struct mtk_wed_device *dev) in mtk_wed_stop() argument
1077 mtk_wed_set_ext_int(dev, false); in mtk_wed_stop()
1079 wed_w32(dev, MTK_WED_WPDMA_INT_TRIGGER, 0); in mtk_wed_stop()
1080 wed_w32(dev, MTK_WED_WDMA_INT_TRIGGER, 0); in mtk_wed_stop()
1081 wdma_w32(dev, MTK_WDMA_INT_MASK, 0); in mtk_wed_stop()
1082 wdma_w32(dev, MTK_WDMA_INT_GRP2, 0); in mtk_wed_stop()
1083 wed_w32(dev, MTK_WED_WPDMA_INT_MASK, 0); in mtk_wed_stop()
1085 if (!mtk_wed_get_rx_capa(dev)) in mtk_wed_stop()
1088 wed_w32(dev, MTK_WED_EXT_INT_MASK1, 0); in mtk_wed_stop()
1089 wed_w32(dev, MTK_WED_EXT_INT_MASK2, 0); in mtk_wed_stop()
1093 mtk_wed_deinit(struct mtk_wed_device *dev) in mtk_wed_deinit() argument
1095 mtk_wed_stop(dev); in mtk_wed_deinit()
1096 mtk_wed_dma_disable(dev); in mtk_wed_deinit()
1098 wed_clr(dev, MTK_WED_CTRL, in mtk_wed_deinit()
1104 if (mtk_wed_is_v1(dev->hw)) in mtk_wed_deinit()
1107 wed_clr(dev, MTK_WED_CTRL, in mtk_wed_deinit()
1112 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_deinit()
1113 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_TX_AMSDU_EN); in mtk_wed_deinit()
1114 wed_clr(dev, MTK_WED_RESET, MTK_WED_RESET_TX_AMSDU); in mtk_wed_deinit()
1115 wed_clr(dev, MTK_WED_PCIE_INT_CTRL, in mtk_wed_deinit()
1122 __mtk_wed_detach(struct mtk_wed_device *dev) in __mtk_wed_detach() argument
1124 struct mtk_wed_hw *hw = dev->hw; in __mtk_wed_detach()
1126 mtk_wed_deinit(dev); in __mtk_wed_detach()
1128 mtk_wdma_rx_reset(dev); in __mtk_wed_detach()
1129 mtk_wed_reset(dev, MTK_WED_RESET_WED); in __mtk_wed_detach()
1130 mtk_wed_amsdu_free_buffer(dev); in __mtk_wed_detach()
1131 mtk_wed_free_tx_buffer(dev); in __mtk_wed_detach()
1132 mtk_wed_free_tx_rings(dev); in __mtk_wed_detach()
1134 if (mtk_wed_get_rx_capa(dev)) { in __mtk_wed_detach()
1136 mtk_wed_wo_reset(dev); in __mtk_wed_detach()
1137 mtk_wed_free_rx_rings(dev); in __mtk_wed_detach()
1142 if (dev->wlan.bus_type == MTK_WED_BUS_PCIE) { in __mtk_wed_detach()
1145 wlan_node = dev->wlan.pci_dev->dev.of_node; in __mtk_wed_detach()
1152 hw->eth->dma_dev != hw->eth->dev) in __mtk_wed_detach()
1153 mtk_eth_set_dma_device(hw->eth, hw->eth->dev); in __mtk_wed_detach()
1155 memset(dev, 0, sizeof(*dev)); in __mtk_wed_detach()
1162 mtk_wed_detach(struct mtk_wed_device *dev) in mtk_wed_detach() argument
1165 __mtk_wed_detach(dev); in mtk_wed_detach()
1170 mtk_wed_bus_init(struct mtk_wed_device *dev) in mtk_wed_bus_init() argument
1172 switch (dev->wlan.bus_type) { in mtk_wed_bus_init()
1174 struct device_node *np = dev->hw->eth->dev->of_node; in mtk_wed_bus_init()
1176 if (mtk_wed_is_v2(dev->hw)) { in mtk_wed_bus_init()
1187 if (dev->wlan.msi) { in mtk_wed_bus_init()
1188 wed_w32(dev, MTK_WED_PCIE_CFG_INTM, in mtk_wed_bus_init()
1189 dev->hw->pcie_base | 0xc08); in mtk_wed_bus_init()
1190 wed_w32(dev, MTK_WED_PCIE_CFG_BASE, in mtk_wed_bus_init()
1191 dev->hw->pcie_base | 0xc04); in mtk_wed_bus_init()
1192 wed_w32(dev, MTK_WED_PCIE_INT_TRIGGER, BIT(8)); in mtk_wed_bus_init()
1194 wed_w32(dev, MTK_WED_PCIE_CFG_INTM, in mtk_wed_bus_init()
1195 dev->hw->pcie_base | 0x180); in mtk_wed_bus_init()
1196 wed_w32(dev, MTK_WED_PCIE_CFG_BASE, in mtk_wed_bus_init()
1197 dev->hw->pcie_base | 0x184); in mtk_wed_bus_init()
1198 wed_w32(dev, MTK_WED_PCIE_INT_TRIGGER, BIT(24)); in mtk_wed_bus_init()
1201 wed_w32(dev, MTK_WED_PCIE_INT_CTRL, in mtk_wed_bus_init()
1205 wed_set(dev, MTK_WED_PCIE_INT_CTRL, in mtk_wed_bus_init()
1209 dev->hw->index)); in mtk_wed_bus_init()
1213 wed_set(dev, MTK_WED_WPDMA_INT_CTRL, in mtk_wed_bus_init()
1223 mtk_wed_set_wpdma(struct mtk_wed_device *dev) in mtk_wed_set_wpdma() argument
1227 if (mtk_wed_is_v1(dev->hw)) { in mtk_wed_set_wpdma()
1228 wed_w32(dev, MTK_WED_WPDMA_CFG_BASE, dev->wlan.wpdma_phys); in mtk_wed_set_wpdma()
1232 mtk_wed_bus_init(dev); in mtk_wed_set_wpdma()
1234 wed_w32(dev, MTK_WED_WPDMA_CFG_BASE, dev->wlan.wpdma_int); in mtk_wed_set_wpdma()
1235 wed_w32(dev, MTK_WED_WPDMA_CFG_INT_MASK, dev->wlan.wpdma_mask); in mtk_wed_set_wpdma()
1236 wed_w32(dev, MTK_WED_WPDMA_CFG_TX, dev->wlan.wpdma_tx); in mtk_wed_set_wpdma()
1237 wed_w32(dev, MTK_WED_WPDMA_CFG_TX_FREE, dev->wlan.wpdma_txfree); in mtk_wed_set_wpdma()
1239 if (!mtk_wed_get_rx_capa(dev)) in mtk_wed_set_wpdma()
1242 wed_w32(dev, MTK_WED_WPDMA_RX_GLO_CFG, dev->wlan.wpdma_rx_glo); in mtk_wed_set_wpdma()
1243 wed_w32(dev, dev->hw->soc->regmap.wpdma_rx_ring0, dev->wlan.wpdma_rx); in mtk_wed_set_wpdma()
1245 if (!dev->wlan.hw_rro) in mtk_wed_set_wpdma()
1248 wed_w32(dev, MTK_WED_RRO_RX_D_CFG(0), dev->wlan.wpdma_rx_rro[0]); in mtk_wed_set_wpdma()
1249 wed_w32(dev, MTK_WED_RRO_RX_D_CFG(1), dev->wlan.wpdma_rx_rro[1]); in mtk_wed_set_wpdma()
1251 wed_w32(dev, MTK_WED_RRO_MSDU_PG_RING_CFG(i), in mtk_wed_set_wpdma()
1252 dev->wlan.wpdma_rx_pg + i * 0x10); in mtk_wed_set_wpdma()
1256 mtk_wed_hw_init_early(struct mtk_wed_device *dev) in mtk_wed_hw_init_early() argument
1261 mtk_wed_deinit(dev); in mtk_wed_hw_init_early()
1262 mtk_wed_reset(dev, MTK_WED_RESET_WED); in mtk_wed_hw_init_early()
1263 mtk_wed_set_wpdma(dev); in mtk_wed_hw_init_early()
1265 if (!mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_hw_init_early()
1271 wed_m32(dev, MTK_WED_WDMA_GLO_CFG, mask, set); in mtk_wed_hw_init_early()
1273 if (mtk_wed_is_v1(dev->hw)) { in mtk_wed_hw_init_early()
1274 u32 offset = dev->hw->index ? 0x04000400 : 0; in mtk_wed_hw_init_early()
1276 wdma_set(dev, MTK_WDMA_GLO_CFG, in mtk_wed_hw_init_early()
1281 wed_w32(dev, MTK_WED_WDMA_OFFSET0, 0x2a042a20 + offset); in mtk_wed_hw_init_early()
1282 wed_w32(dev, MTK_WED_WDMA_OFFSET1, 0x29002800 + offset); in mtk_wed_hw_init_early()
1283 wed_w32(dev, MTK_WED_PCIE_CFG_BASE, in mtk_wed_hw_init_early()
1284 MTK_PCIE_BASE(dev->hw->index)); in mtk_wed_hw_init_early()
1286 wed_w32(dev, MTK_WED_WDMA_CFG_BASE, dev->hw->wdma_phy); in mtk_wed_hw_init_early()
1287 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_ETH_DMAD_FMT); in mtk_wed_hw_init_early()
1288 wed_w32(dev, MTK_WED_WDMA_OFFSET0, in mtk_wed_hw_init_early()
1294 wed_w32(dev, MTK_WED_WDMA_OFFSET1, in mtk_wed_hw_init_early()
1303 mtk_wed_rro_ring_alloc(struct mtk_wed_device *dev, struct mtk_wed_ring *ring, in mtk_wed_rro_ring_alloc() argument
1306 ring->desc = dma_alloc_coherent(dev->hw->dev, in mtk_wed_rro_ring_alloc()
1320 mtk_wed_rro_alloc(struct mtk_wed_device *dev) in mtk_wed_rro_alloc() argument
1326 index = of_property_match_string(dev->hw->node, "memory-region-names", in mtk_wed_rro_alloc()
1331 np = of_parse_phandle(dev->hw->node, "memory-region", index); in mtk_wed_rro_alloc()
1341 dev->rro.miod_phys = rmem->base; in mtk_wed_rro_alloc()
1342 dev->rro.fdbk_phys = MTK_WED_MIOD_COUNT + dev->rro.miod_phys; in mtk_wed_rro_alloc()
1344 return mtk_wed_rro_ring_alloc(dev, &dev->rro.ring, in mtk_wed_rro_alloc()
1349 mtk_wed_rro_cfg(struct mtk_wed_device *dev) in mtk_wed_rro_cfg() argument
1351 struct mtk_wed_wo *wo = dev->hw->wed_wo; in mtk_wed_rro_cfg()
1380 mtk_wed_rro_hw_init(struct mtk_wed_device *dev) in mtk_wed_rro_hw_init() argument
1382 wed_w32(dev, MTK_WED_RROQM_MIOD_CFG, in mtk_wed_rro_hw_init()
1388 wed_w32(dev, MTK_WED_RROQM_MIOD_CTRL0, dev->rro.miod_phys); in mtk_wed_rro_hw_init()
1389 wed_w32(dev, MTK_WED_RROQM_MIOD_CTRL1, in mtk_wed_rro_hw_init()
1391 wed_w32(dev, MTK_WED_RROQM_FDBK_CTRL0, dev->rro.fdbk_phys); in mtk_wed_rro_hw_init()
1392 wed_w32(dev, MTK_WED_RROQM_FDBK_CTRL1, in mtk_wed_rro_hw_init()
1394 wed_w32(dev, MTK_WED_RROQM_FDBK_CTRL2, 0); in mtk_wed_rro_hw_init()
1395 wed_w32(dev, MTK_WED_RROQ_BASE_L, dev->rro.ring.desc_phys); in mtk_wed_rro_hw_init()
1397 wed_set(dev, MTK_WED_RROQM_RST_IDX, in mtk_wed_rro_hw_init()
1401 wed_w32(dev, MTK_WED_RROQM_RST_IDX, 0); in mtk_wed_rro_hw_init()
1402 wed_w32(dev, MTK_WED_RROQM_MIOD_CTRL2, MTK_WED_MIOD_CNT - 1); in mtk_wed_rro_hw_init()
1403 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_RX_RRO_QM_EN); in mtk_wed_rro_hw_init()
1407 mtk_wed_route_qm_hw_init(struct mtk_wed_device *dev) in mtk_wed_route_qm_hw_init() argument
1409 wed_w32(dev, MTK_WED_RESET, MTK_WED_RESET_RX_ROUTE_QM); in mtk_wed_route_qm_hw_init()
1413 if (!(wed_r32(dev, MTK_WED_RESET) & MTK_WED_RESET_RX_ROUTE_QM)) in mtk_wed_route_qm_hw_init()
1418 if (mtk_wed_is_v2(dev->hw)) { in mtk_wed_route_qm_hw_init()
1419 wed_clr(dev, MTK_WED_RTQM_GLO_CFG, MTK_WED_RTQM_Q_RST); in mtk_wed_route_qm_hw_init()
1420 wed_clr(dev, MTK_WED_RTQM_GLO_CFG, MTK_WED_RTQM_TXDMAD_FPORT); in mtk_wed_route_qm_hw_init()
1421 wed_set(dev, MTK_WED_RTQM_GLO_CFG, in mtk_wed_route_qm_hw_init()
1423 0x3 + dev->hw->index)); in mtk_wed_route_qm_hw_init()
1424 wed_clr(dev, MTK_WED_RTQM_GLO_CFG, MTK_WED_RTQM_Q_RST); in mtk_wed_route_qm_hw_init()
1426 wed_set(dev, MTK_WED_RTQM_ENQ_CFG0, in mtk_wed_route_qm_hw_init()
1428 0x3 + dev->hw->index)); in mtk_wed_route_qm_hw_init()
1431 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_RX_ROUTE_QM_EN); in mtk_wed_route_qm_hw_init()
1435 mtk_wed_hw_init(struct mtk_wed_device *dev) in mtk_wed_hw_init() argument
1437 if (dev->init_done) in mtk_wed_hw_init()
1440 dev->init_done = true; in mtk_wed_hw_init()
1441 mtk_wed_set_ext_int(dev, false); in mtk_wed_hw_init()
1443 wed_w32(dev, MTK_WED_TX_BM_BASE, dev->tx_buf_ring.desc_phys); in mtk_wed_hw_init()
1444 wed_w32(dev, MTK_WED_TX_BM_BUF_LEN, MTK_WED_PKT_SIZE); in mtk_wed_hw_init()
1446 if (mtk_wed_is_v1(dev->hw)) { in mtk_wed_hw_init()
1447 wed_w32(dev, MTK_WED_TX_BM_CTRL, in mtk_wed_hw_init()
1450 dev->tx_buf_ring.size / 128) | in mtk_wed_hw_init()
1453 wed_w32(dev, MTK_WED_TX_BM_DYN_THR, in mtk_wed_hw_init()
1456 } else if (mtk_wed_is_v2(dev->hw)) { in mtk_wed_hw_init()
1457 wed_w32(dev, MTK_WED_TX_BM_CTRL, in mtk_wed_hw_init()
1460 dev->tx_buf_ring.size / 128) | in mtk_wed_hw_init()
1463 wed_w32(dev, MTK_WED_TX_TKID_DYN_THR, in mtk_wed_hw_init()
1466 wed_w32(dev, MTK_WED_TX_BM_DYN_THR, in mtk_wed_hw_init()
1469 wed_w32(dev, MTK_WED_TX_TKID_CTRL, in mtk_wed_hw_init()
1472 dev->tx_buf_ring.size / 128) | in mtk_wed_hw_init()
1474 dev->tx_buf_ring.size / 128)); in mtk_wed_hw_init()
1477 wed_w32(dev, dev->hw->soc->regmap.tx_bm_tkid, in mtk_wed_hw_init()
1478 FIELD_PREP(MTK_WED_TX_BM_TKID_START, dev->wlan.token_start) | in mtk_wed_hw_init()
1480 dev->wlan.token_start + dev->wlan.nbuf - 1)); in mtk_wed_hw_init()
1482 mtk_wed_reset(dev, MTK_WED_RESET_TX_BM); in mtk_wed_hw_init()
1484 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_hw_init()
1486 wed_clr(dev, MTK_WED_TX_BM_CTRL, in mtk_wed_hw_init()
1489 wed_w32(dev, MTK_WED_TX_TKID_CTRL, in mtk_wed_hw_init()
1492 dev->wlan.nbuf / 128) | in mtk_wed_hw_init()
1494 dev->wlan.nbuf / 128)); in mtk_wed_hw_init()
1496 wed_set(dev, MTK_WED_TX_TKID_CTRL, in mtk_wed_hw_init()
1499 wed_w32(dev, MTK_WED_TX_BM_INIT_PTR, in mtk_wed_hw_init()
1504 if (mtk_wed_is_v1(dev->hw)) { in mtk_wed_hw_init()
1505 wed_set(dev, MTK_WED_CTRL, in mtk_wed_hw_init()
1508 } else if (mtk_wed_get_rx_capa(dev)) { in mtk_wed_hw_init()
1510 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX, in mtk_wed_hw_init()
1513 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX, 0); in mtk_wed_hw_init()
1516 wed_set(dev, MTK_WED_WPDMA_RX_D_PREF_RX0_SIDX, in mtk_wed_hw_init()
1518 wed_clr(dev, MTK_WED_WPDMA_RX_D_PREF_RX0_SIDX, in mtk_wed_hw_init()
1521 wed_set(dev, MTK_WED_WPDMA_RX_D_PREF_RX1_SIDX, in mtk_wed_hw_init()
1523 wed_clr(dev, MTK_WED_WPDMA_RX_D_PREF_RX1_SIDX, in mtk_wed_hw_init()
1527 wed_set(dev, MTK_WED_WPDMA_RX_D_PREF_FIFO_CFG, in mtk_wed_hw_init()
1530 wed_w32(dev, MTK_WED_WPDMA_RX_D_PREF_FIFO_CFG, 0); in mtk_wed_hw_init()
1532 mtk_wed_rx_buffer_hw_init(dev); in mtk_wed_hw_init()
1533 mtk_wed_rro_hw_init(dev); in mtk_wed_hw_init()
1534 mtk_wed_route_qm_hw_init(dev); in mtk_wed_hw_init()
1537 wed_clr(dev, MTK_WED_TX_BM_CTRL, MTK_WED_TX_BM_CTRL_PAUSE); in mtk_wed_hw_init()
1538 if (!mtk_wed_is_v1(dev->hw)) in mtk_wed_hw_init()
1539 wed_clr(dev, MTK_WED_TX_TKID_CTRL, MTK_WED_TX_TKID_CTRL_PAUSE); in mtk_wed_hw_init()
1563 mtk_wed_rx_reset(struct mtk_wed_device *dev) in mtk_wed_rx_reset() argument
1565 struct mtk_wed_wo *wo = dev->hw->wed_wo; in mtk_wed_rx_reset()
1575 if (dev->wlan.hw_rro) { in mtk_wed_rx_reset()
1576 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_IND_CMD_EN); in mtk_wed_rx_reset()
1577 mtk_wed_poll_busy(dev, MTK_WED_RRO_RX_HW_STS, in mtk_wed_rx_reset()
1579 mtk_wed_reset(dev, MTK_WED_RESET_RRO_RX_TO_PG); in mtk_wed_rx_reset()
1582 wed_clr(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, MTK_WED_WPDMA_RX_D_RX_DRV_EN); in mtk_wed_rx_reset()
1583 ret = mtk_wed_poll_busy(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, in mtk_wed_rx_reset()
1585 if (!ret && mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wed_rx_reset()
1586 ret = mtk_wed_poll_busy(dev, MTK_WED_WPDMA_RX_D_PREF_CFG, in mtk_wed_rx_reset()
1589 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_INT_AGENT); in mtk_wed_rx_reset()
1590 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_RX_D_DRV); in mtk_wed_rx_reset()
1592 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_rx_reset()
1594 wed_clr(dev, MTK_WED_WPDMA_RX_D_PREF_CFG, in mtk_wed_rx_reset()
1596 mtk_wed_poll_busy(dev, MTK_WED_WPDMA_RX_D_PREF_CFG, in mtk_wed_rx_reset()
1598 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX, in mtk_wed_rx_reset()
1602 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX, in mtk_wed_rx_reset()
1606 wed_set(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, in mtk_wed_rx_reset()
1609 wed_clr(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, in mtk_wed_rx_reset()
1613 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX, 0); in mtk_wed_rx_reset()
1617 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_RX_RRO_QM_EN); in mtk_wed_rx_reset()
1618 ret = mtk_wed_poll_busy(dev, MTK_WED_CTRL, in mtk_wed_rx_reset()
1621 mtk_wed_reset(dev, MTK_WED_RESET_RX_RRO_QM); in mtk_wed_rx_reset()
1623 wed_set(dev, MTK_WED_RROQM_RST_IDX, in mtk_wed_rx_reset()
1626 wed_w32(dev, MTK_WED_RROQM_RST_IDX, 0); in mtk_wed_rx_reset()
1629 if (dev->wlan.hw_rro) { in mtk_wed_rx_reset()
1631 wed_clr(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG, in mtk_wed_rx_reset()
1635 wed_clr(dev, MTK_WED_RRO_RX_D_CFG(2), MTK_WED_RRO_RX_D_DRV_EN); in mtk_wed_rx_reset()
1638 wed_w32(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG, in mtk_wed_rx_reset()
1640 mtk_wed_poll_busy(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG, in mtk_wed_rx_reset()
1644 wed_w32(dev, MTK_WED_RRO_RX_D_CFG(2), in mtk_wed_rx_reset()
1646 mtk_wed_poll_busy(dev, MTK_WED_RRO_RX_D_CFG(2), in mtk_wed_rx_reset()
1651 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_RX_ROUTE_QM_EN); in mtk_wed_rx_reset()
1652 ret = mtk_wed_poll_busy(dev, MTK_WED_CTRL, in mtk_wed_rx_reset()
1655 mtk_wed_reset(dev, MTK_WED_RESET_RX_ROUTE_QM); in mtk_wed_rx_reset()
1656 } else if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_rx_reset()
1657 wed_set(dev, MTK_WED_RTQM_RST, BIT(0)); in mtk_wed_rx_reset()
1658 wed_clr(dev, MTK_WED_RTQM_RST, BIT(0)); in mtk_wed_rx_reset()
1659 mtk_wed_reset(dev, MTK_WED_RESET_RX_ROUTE_QM); in mtk_wed_rx_reset()
1661 wed_set(dev, MTK_WED_RTQM_GLO_CFG, MTK_WED_RTQM_Q_RST); in mtk_wed_rx_reset()
1665 mtk_wdma_tx_reset(dev); in mtk_wed_rx_reset()
1668 wed_clr(dev, MTK_WED_WDMA_GLO_CFG, MTK_WED_WDMA_GLO_CFG_TX_DRV_EN); in mtk_wed_rx_reset()
1669 if (mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wed_rx_reset()
1670 mtk_wed_poll_busy(dev, MTK_WED_WPDMA_STATUS, in mtk_wed_rx_reset()
1673 mtk_wed_poll_busy(dev, MTK_WED_CTRL, in mtk_wed_rx_reset()
1675 mtk_wed_reset(dev, MTK_WED_RESET_WDMA_TX_DRV); in mtk_wed_rx_reset()
1678 ret = mtk_wed_poll_busy(dev, MTK_WED_GLO_CFG, in mtk_wed_rx_reset()
1680 wed_clr(dev, MTK_WED_GLO_CFG, MTK_WED_GLO_CFG_RX_DMA_EN); in mtk_wed_rx_reset()
1682 mtk_wed_reset(dev, MTK_WED_RESET_WED_RX_DMA); in mtk_wed_rx_reset()
1684 wed_set(dev, MTK_WED_RESET_IDX, in mtk_wed_rx_reset()
1685 dev->hw->soc->regmap.reset_idx_rx_mask); in mtk_wed_rx_reset()
1686 wed_w32(dev, MTK_WED_RESET_IDX, 0); in mtk_wed_rx_reset()
1690 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_BM_EN); in mtk_wed_rx_reset()
1691 mtk_wed_poll_busy(dev, MTK_WED_CTRL, in mtk_wed_rx_reset()
1693 mtk_wed_reset(dev, MTK_WED_RESET_RX_BM); in mtk_wed_rx_reset()
1695 if (dev->wlan.hw_rro) { in mtk_wed_rx_reset()
1696 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_PG_BM_EN); in mtk_wed_rx_reset()
1697 mtk_wed_poll_busy(dev, MTK_WED_CTRL, in mtk_wed_rx_reset()
1699 wed_set(dev, MTK_WED_RESET, MTK_WED_RESET_RX_PG_BM); in mtk_wed_rx_reset()
1700 wed_clr(dev, MTK_WED_RESET, MTK_WED_RESET_RX_PG_BM); in mtk_wed_rx_reset()
1712 for (i = 0; i < ARRAY_SIZE(dev->rx_ring); i++) { in mtk_wed_rx_reset()
1713 if (!dev->rx_ring[i].desc) in mtk_wed_rx_reset()
1716 mtk_wed_ring_reset(&dev->rx_ring[i], MTK_WED_RX_RING_SIZE, in mtk_wed_rx_reset()
1719 mtk_wed_free_rx_buffer(dev); in mtk_wed_rx_reset()
1720 mtk_wed_hwrro_free_buffer(dev); in mtk_wed_rx_reset()
1726 mtk_wed_reset_dma(struct mtk_wed_device *dev) in mtk_wed_reset_dma() argument
1732 for (i = 0; i < ARRAY_SIZE(dev->tx_ring); i++) { in mtk_wed_reset_dma()
1733 if (!dev->tx_ring[i].desc) in mtk_wed_reset_dma()
1736 mtk_wed_ring_reset(&dev->tx_ring[i], MTK_WED_TX_RING_SIZE, in mtk_wed_reset_dma()
1741 wed_clr(dev, MTK_WED_GLO_CFG, MTK_WED_GLO_CFG_TX_DMA_EN); in mtk_wed_reset_dma()
1742 busy = mtk_wed_poll_busy(dev, MTK_WED_GLO_CFG, in mtk_wed_reset_dma()
1745 mtk_wed_reset(dev, MTK_WED_RESET_WED_TX_DMA); in mtk_wed_reset_dma()
1747 wed_w32(dev, MTK_WED_RESET_IDX, in mtk_wed_reset_dma()
1748 dev->hw->soc->regmap.reset_idx_tx_mask); in mtk_wed_reset_dma()
1749 wed_w32(dev, MTK_WED_RESET_IDX, 0); in mtk_wed_reset_dma()
1753 busy = !!mtk_wdma_rx_reset(dev); in mtk_wed_reset_dma()
1754 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_reset_dma()
1756 wed_r32(dev, MTK_WED_WDMA_GLO_CFG); in mtk_wed_reset_dma()
1758 wed_w32(dev, MTK_WED_WDMA_GLO_CFG, val); in mtk_wed_reset_dma()
1760 wed_clr(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_reset_dma()
1765 busy = mtk_wed_poll_busy(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_reset_dma()
1767 if (!busy && mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wed_reset_dma()
1768 busy = mtk_wed_poll_busy(dev, MTK_WED_WDMA_RX_PREF_CFG, in mtk_wed_reset_dma()
1772 mtk_wed_reset(dev, MTK_WED_RESET_WDMA_INT_AGENT); in mtk_wed_reset_dma()
1773 mtk_wed_reset(dev, MTK_WED_RESET_WDMA_RX_DRV); in mtk_wed_reset_dma()
1775 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_reset_dma()
1777 wed_clr(dev, MTK_WED_WDMA_RX_PREF_CFG, in mtk_wed_reset_dma()
1779 mtk_wed_poll_busy(dev, MTK_WED_WDMA_RX_PREF_CFG, in mtk_wed_reset_dma()
1781 wed_clr(dev, MTK_WED_WDMA_RX_PREF_CFG, in mtk_wed_reset_dma()
1785 wed_w32(dev, MTK_WED_WDMA_RESET_IDX, in mtk_wed_reset_dma()
1789 wed_w32(dev, MTK_WED_WDMA_RESET_IDX, in mtk_wed_reset_dma()
1791 wed_w32(dev, MTK_WED_WDMA_RESET_IDX, 0); in mtk_wed_reset_dma()
1793 wed_set(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_reset_dma()
1796 wed_clr(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_reset_dma()
1801 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_TX_FREE_AGENT_EN); in mtk_wed_reset_dma()
1804 if (mtk_wed_is_v1(dev->hw)) in mtk_wed_reset_dma()
1806 wed_r32(dev, MTK_WED_TX_BM_INTF)); in mtk_wed_reset_dma()
1809 wed_r32(dev, MTK_WED_TX_TKID_INTF)); in mtk_wed_reset_dma()
1814 mtk_wed_reset(dev, MTK_WED_RESET_TX_FREE_AGENT); in mtk_wed_reset_dma()
1815 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_TX_BM_EN); in mtk_wed_reset_dma()
1816 mtk_wed_reset(dev, MTK_WED_RESET_TX_BM); in mtk_wed_reset_dma()
1819 busy = mtk_wed_poll_busy(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_reset_dma()
1821 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_reset_dma()
1825 busy = mtk_wed_poll_busy(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_reset_dma()
1829 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_INT_AGENT); in mtk_wed_reset_dma()
1830 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_TX_DRV); in mtk_wed_reset_dma()
1831 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_RX_DRV); in mtk_wed_reset_dma()
1832 if (mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wed_reset_dma()
1833 wed_w32(dev, MTK_WED_RX1_CTRL2, 0); in mtk_wed_reset_dma()
1835 wed_w32(dev, MTK_WED_WPDMA_RESET_IDX, in mtk_wed_reset_dma()
1838 wed_w32(dev, MTK_WED_WPDMA_RESET_IDX, 0); in mtk_wed_reset_dma()
1841 dev->init_done = false; in mtk_wed_reset_dma()
1842 if (mtk_wed_is_v1(dev->hw)) in mtk_wed_reset_dma()
1846 wed_w32(dev, MTK_WED_RESET_IDX, MTK_WED_RESET_WPDMA_IDX_RX); in mtk_wed_reset_dma()
1847 wed_w32(dev, MTK_WED_RESET_IDX, 0); in mtk_wed_reset_dma()
1850 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_reset_dma()
1852 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_TX_AMSDU_EN); in mtk_wed_reset_dma()
1853 mtk_wed_reset(dev, MTK_WED_RESET_TX_AMSDU); in mtk_wed_reset_dma()
1856 if (mtk_wed_get_rx_capa(dev)) in mtk_wed_reset_dma()
1857 mtk_wed_rx_reset(dev); in mtk_wed_reset_dma()
1861 mtk_wed_ring_alloc(struct mtk_wed_device *dev, struct mtk_wed_ring *ring, in mtk_wed_ring_alloc() argument
1864 ring->desc = dma_alloc_coherent(dev->hw->dev, size * desc_size, in mtk_wed_ring_alloc()
1877 mtk_wed_wdma_rx_ring_setup(struct mtk_wed_device *dev, int idx, int size, in mtk_wed_wdma_rx_ring_setup() argument
1882 if (idx >= ARRAY_SIZE(dev->rx_wdma)) in mtk_wed_wdma_rx_ring_setup()
1885 wdma = &dev->rx_wdma[idx]; in mtk_wed_wdma_rx_ring_setup()
1886 if (!reset && mtk_wed_ring_alloc(dev, wdma, MTK_WED_WDMA_RING_SIZE, in mtk_wed_wdma_rx_ring_setup()
1887 dev->hw->soc->wdma_desc_size, true)) in mtk_wed_wdma_rx_ring_setup()
1890 wdma_w32(dev, MTK_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_BASE, in mtk_wed_wdma_rx_ring_setup()
1892 wdma_w32(dev, MTK_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_COUNT, in mtk_wed_wdma_rx_ring_setup()
1894 wdma_w32(dev, MTK_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_CPU_IDX, 0); in mtk_wed_wdma_rx_ring_setup()
1896 wed_w32(dev, MTK_WED_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_BASE, in mtk_wed_wdma_rx_ring_setup()
1898 wed_w32(dev, MTK_WED_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_COUNT, in mtk_wed_wdma_rx_ring_setup()
1905 mtk_wed_wdma_tx_ring_setup(struct mtk_wed_device *dev, int idx, int size, in mtk_wed_wdma_tx_ring_setup() argument
1910 if (idx >= ARRAY_SIZE(dev->tx_wdma)) in mtk_wed_wdma_tx_ring_setup()
1913 wdma = &dev->tx_wdma[idx]; in mtk_wed_wdma_tx_ring_setup()
1914 if (!reset && mtk_wed_ring_alloc(dev, wdma, MTK_WED_WDMA_RING_SIZE, in mtk_wed_wdma_tx_ring_setup()
1915 dev->hw->soc->wdma_desc_size, true)) in mtk_wed_wdma_tx_ring_setup()
1918 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_wdma_tx_ring_setup()
1936 wdma_w32(dev, MTK_WDMA_RING_TX(idx) + MTK_WED_RING_OFS_BASE, in mtk_wed_wdma_tx_ring_setup()
1938 wdma_w32(dev, MTK_WDMA_RING_TX(idx) + MTK_WED_RING_OFS_COUNT, in mtk_wed_wdma_tx_ring_setup()
1940 wdma_w32(dev, MTK_WDMA_RING_TX(idx) + MTK_WED_RING_OFS_CPU_IDX, 0); in mtk_wed_wdma_tx_ring_setup()
1941 wdma_w32(dev, MTK_WDMA_RING_TX(idx) + MTK_WED_RING_OFS_DMA_IDX, 0); in mtk_wed_wdma_tx_ring_setup()
1947 wed_w32(dev, MTK_WED_WDMA_RING_TX + MTK_WED_RING_OFS_BASE, in mtk_wed_wdma_tx_ring_setup()
1949 wed_w32(dev, MTK_WED_WDMA_RING_TX + MTK_WED_RING_OFS_COUNT, in mtk_wed_wdma_tx_ring_setup()
1951 wed_w32(dev, MTK_WED_WDMA_RING_TX + MTK_WED_RING_OFS_CPU_IDX, in mtk_wed_wdma_tx_ring_setup()
1953 wed_w32(dev, MTK_WED_WDMA_RING_TX + MTK_WED_RING_OFS_DMA_IDX, in mtk_wed_wdma_tx_ring_setup()
1961 mtk_wed_ppe_check(struct mtk_wed_device *dev, struct sk_buff *skb, in mtk_wed_ppe_check() argument
1964 struct mtk_eth *eth = dev->hw->eth; in mtk_wed_ppe_check()
1976 mtk_ppe_check_skb(eth->ppe[dev->hw->index], skb, hash); in mtk_wed_ppe_check()
1980 mtk_wed_configure_irq(struct mtk_wed_device *dev, u32 irq_mask) in mtk_wed_configure_irq() argument
1985 wed_set(dev, MTK_WED_CTRL, in mtk_wed_configure_irq()
1991 if (mtk_wed_is_v1(dev->hw)) { in mtk_wed_configure_irq()
1992 wed_w32(dev, MTK_WED_PCIE_INT_TRIGGER, in mtk_wed_configure_irq()
1995 wed_w32(dev, MTK_WED_WPDMA_INT_TRIGGER, in mtk_wed_configure_irq()
1999 wed_clr(dev, MTK_WED_WDMA_INT_CTRL, wdma_mask); in mtk_wed_configure_irq()
2001 if (mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wed_configure_irq()
2002 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_TX_TKID_ALI_EN); in mtk_wed_configure_irq()
2005 wed_w32(dev, MTK_WED_WPDMA_INT_CTRL_TX, in mtk_wed_configure_irq()
2011 dev->wlan.tx_tbit[0]) | in mtk_wed_configure_irq()
2013 dev->wlan.tx_tbit[1])); in mtk_wed_configure_irq()
2016 wed_w32(dev, MTK_WED_WPDMA_INT_CTRL_TX_FREE, in mtk_wed_configure_irq()
2020 dev->wlan.txfree_tbit)); in mtk_wed_configure_irq()
2022 if (mtk_wed_get_rx_capa(dev)) { in mtk_wed_configure_irq()
2023 wed_w32(dev, MTK_WED_WPDMA_INT_CTRL_RX, in mtk_wed_configure_irq()
2029 dev->wlan.rx_tbit[0]) | in mtk_wed_configure_irq()
2031 dev->wlan.rx_tbit[1])); in mtk_wed_configure_irq()
2037 wed_w32(dev, MTK_WED_WDMA_INT_CLR, wdma_mask); in mtk_wed_configure_irq()
2038 wed_set(dev, MTK_WED_WDMA_INT_CTRL, in mtk_wed_configure_irq()
2040 dev->wdma_idx)); in mtk_wed_configure_irq()
2043 wed_w32(dev, MTK_WED_WDMA_INT_TRIGGER, wdma_mask); in mtk_wed_configure_irq()
2045 wdma_w32(dev, MTK_WDMA_INT_MASK, wdma_mask); in mtk_wed_configure_irq()
2046 wdma_w32(dev, MTK_WDMA_INT_GRP2, wdma_mask); in mtk_wed_configure_irq()
2047 wed_w32(dev, MTK_WED_WPDMA_INT_MASK, irq_mask); in mtk_wed_configure_irq()
2048 wed_w32(dev, MTK_WED_INT_MASK, irq_mask); in mtk_wed_configure_irq()
2053 mtk_wed_dma_enable(struct mtk_wed_device *dev) in mtk_wed_dma_enable() argument
2057 if (!mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_dma_enable()
2058 wed_set(dev, MTK_WED_WPDMA_INT_CTRL, in mtk_wed_dma_enable()
2060 wed_set(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_enable()
2063 wdma_set(dev, MTK_WDMA_GLO_CFG, in mtk_wed_dma_enable()
2067 wed_set(dev, MTK_WED_WPDMA_CTRL, MTK_WED_WPDMA_CTRL_SDL1_FIXED); in mtk_wed_dma_enable()
2069 wed_set(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_enable()
2073 wdma_set(dev, MTK_WDMA_GLO_CFG, MTK_WDMA_GLO_CFG_TX_DMA_EN); in mtk_wed_dma_enable()
2076 wed_set(dev, MTK_WED_GLO_CFG, in mtk_wed_dma_enable()
2080 wed_set(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_dma_enable()
2083 if (mtk_wed_is_v1(dev->hw)) { in mtk_wed_dma_enable()
2084 wdma_set(dev, MTK_WDMA_GLO_CFG, in mtk_wed_dma_enable()
2089 wed_set(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_enable()
2093 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_dma_enable()
2094 wed_set(dev, MTK_WED_WDMA_RX_PREF_CFG, in mtk_wed_dma_enable()
2097 wed_clr(dev, MTK_WED_WDMA_RX_PREF_CFG, in mtk_wed_dma_enable()
2099 wed_set(dev, MTK_WED_WDMA_RX_PREF_CFG, MTK_WED_WDMA_RX_PREF_EN); in mtk_wed_dma_enable()
2101 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_enable()
2103 wed_set(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_enable()
2108 wdma_set(dev, MTK_WDMA_PREF_RX_CFG, MTK_WDMA_PREF_RX_CFG_PREF_EN); in mtk_wed_dma_enable()
2109 wdma_set(dev, MTK_WDMA_WRBK_RX_CFG, MTK_WDMA_WRBK_RX_CFG_WRBK_EN); in mtk_wed_dma_enable()
2112 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_enable()
2116 if (!mtk_wed_get_rx_capa(dev)) in mtk_wed_dma_enable()
2119 wed_set(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_dma_enable()
2123 wed_clr(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, MTK_WED_WPDMA_RX_D_RXD_READ_LEN); in mtk_wed_dma_enable()
2124 wed_set(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, in mtk_wed_dma_enable()
2129 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_dma_enable()
2130 wed_set(dev, MTK_WED_WPDMA_RX_D_PREF_CFG, in mtk_wed_dma_enable()
2135 wed_set(dev, MTK_WED_RRO_RX_D_CFG(2), MTK_WED_RRO_RX_D_DRV_EN); in mtk_wed_dma_enable()
2136 wdma_set(dev, MTK_WDMA_PREF_TX_CFG, MTK_WDMA_PREF_TX_CFG_PREF_EN); in mtk_wed_dma_enable()
2137 wdma_set(dev, MTK_WDMA_WRBK_TX_CFG, MTK_WDMA_WRBK_TX_CFG_WRBK_EN); in mtk_wed_dma_enable()
2141 struct mtk_wed_ring *ring = &dev->rx_ring[i]; in mtk_wed_dma_enable()
2147 if (mtk_wed_check_wfdma_rx_fill(dev, ring)) { in mtk_wed_dma_enable()
2148 dev_err(dev->hw->dev, in mtk_wed_dma_enable()
2153 val = wifi_r32(dev, in mtk_wed_dma_enable()
2154 dev->wlan.wpdma_rx_glo - in mtk_wed_dma_enable()
2155 dev->wlan.phy_base) | MTK_WFMDA_RX_DMA_EN; in mtk_wed_dma_enable()
2156 wifi_w32(dev, in mtk_wed_dma_enable()
2157 dev->wlan.wpdma_rx_glo - dev->wlan.phy_base, in mtk_wed_dma_enable()
2163 mtk_wed_start_hw_rro(struct mtk_wed_device *dev, u32 irq_mask, bool reset) in mtk_wed_start_hw_rro() argument
2167 wed_w32(dev, MTK_WED_WPDMA_INT_MASK, irq_mask); in mtk_wed_start_hw_rro()
2168 wed_w32(dev, MTK_WED_INT_MASK, irq_mask); in mtk_wed_start_hw_rro()
2170 if (!mtk_wed_get_rx_capa(dev) || !dev->wlan.hw_rro) in mtk_wed_start_hw_rro()
2174 wed_set(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG, in mtk_wed_start_hw_rro()
2179 wed_set(dev, MTK_WED_RRO_RX_D_CFG(2), MTK_WED_RRO_MSDU_PG_DRV_CLR); in mtk_wed_start_hw_rro()
2180 wed_w32(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG, in mtk_wed_start_hw_rro()
2183 wed_w32(dev, MTK_WED_WPDMA_INT_CTRL_RRO_RX, in mtk_wed_start_hw_rro()
2189 dev->wlan.rro_rx_tbit[0]) | in mtk_wed_start_hw_rro()
2191 dev->wlan.rro_rx_tbit[1])); in mtk_wed_start_hw_rro()
2193 wed_w32(dev, MTK_WED_WPDMA_INT_CTRL_RRO_MSDU_PG, in mtk_wed_start_hw_rro()
2201 dev->wlan.rx_pg_tbit[0]) | in mtk_wed_start_hw_rro()
2203 dev->wlan.rx_pg_tbit[1]) | in mtk_wed_start_hw_rro()
2205 dev->wlan.rx_pg_tbit[2])); in mtk_wed_start_hw_rro()
2210 wed_set(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG, in mtk_wed_start_hw_rro()
2214 struct mtk_wed_ring *ring = &dev->rx_rro_ring[i]; in mtk_wed_start_hw_rro()
2219 if (mtk_wed_check_wfdma_rx_fill(dev, ring)) in mtk_wed_start_hw_rro()
2220 dev_err(dev->hw->dev, in mtk_wed_start_hw_rro()
2225 struct mtk_wed_ring *ring = &dev->rx_page_ring[i]; in mtk_wed_start_hw_rro()
2230 if (mtk_wed_check_wfdma_rx_fill(dev, ring)) in mtk_wed_start_hw_rro()
2231 dev_err(dev->hw->dev, in mtk_wed_start_hw_rro()
2237 mtk_wed_rro_rx_ring_setup(struct mtk_wed_device *dev, int idx, in mtk_wed_rro_rx_ring_setup() argument
2240 struct mtk_wed_ring *ring = &dev->rx_rro_ring[idx]; in mtk_wed_rro_rx_ring_setup()
2243 wed_w32(dev, MTK_WED_RRO_RX_D_RX(idx) + MTK_WED_RING_OFS_BASE, in mtk_wed_rro_rx_ring_setup()
2245 wed_w32(dev, MTK_WED_RRO_RX_D_RX(idx) + MTK_WED_RING_OFS_COUNT, in mtk_wed_rro_rx_ring_setup()
2251 mtk_wed_msdu_pg_rx_ring_setup(struct mtk_wed_device *dev, int idx, void __iomem *regs) in mtk_wed_msdu_pg_rx_ring_setup() argument
2253 struct mtk_wed_ring *ring = &dev->rx_page_ring[idx]; in mtk_wed_msdu_pg_rx_ring_setup()
2256 wed_w32(dev, MTK_WED_RRO_MSDU_PG_CTRL0(idx) + MTK_WED_RING_OFS_BASE, in mtk_wed_msdu_pg_rx_ring_setup()
2258 wed_w32(dev, MTK_WED_RRO_MSDU_PG_CTRL0(idx) + MTK_WED_RING_OFS_COUNT, in mtk_wed_msdu_pg_rx_ring_setup()
2264 mtk_wed_ind_rx_ring_setup(struct mtk_wed_device *dev, void __iomem *regs) in mtk_wed_ind_rx_ring_setup() argument
2266 struct mtk_wed_ring *ring = &dev->ind_cmd_ring; in mtk_wed_ind_rx_ring_setup()
2271 wed_w32(dev, MTK_WED_IND_CMD_RX_CTRL1 + MTK_WED_RING_OFS_BASE, in mtk_wed_ind_rx_ring_setup()
2274 wed_w32(dev, MTK_WED_IND_CMD_RX_CTRL1 + MTK_WED_RING_OFS_COUNT, in mtk_wed_ind_rx_ring_setup()
2278 wed_w32(dev, MTK_WED_RRO_CFG0, dev->wlan.phy_base + in mtk_wed_ind_rx_ring_setup()
2279 dev->wlan.ind_cmd.ack_sn_addr); in mtk_wed_ind_rx_ring_setup()
2280 wed_w32(dev, MTK_WED_RRO_CFG1, in mtk_wed_ind_rx_ring_setup()
2282 dev->wlan.ind_cmd.win_size) | in mtk_wed_ind_rx_ring_setup()
2284 dev->wlan.ind_cmd.particular_sid)); in mtk_wed_ind_rx_ring_setup()
2287 wed_w32(dev, MTK_WED_ADDR_ELEM_CFG0, in mtk_wed_ind_rx_ring_setup()
2288 dev->wlan.ind_cmd.particular_se_phys); in mtk_wed_ind_rx_ring_setup()
2290 for (i = 0; i < dev->wlan.ind_cmd.se_group_nums; i++) { in mtk_wed_ind_rx_ring_setup()
2291 wed_w32(dev, MTK_WED_RADDR_ELEM_TBL_WDATA, in mtk_wed_ind_rx_ring_setup()
2292 dev->wlan.ind_cmd.addr_elem_phys[i] >> 4); in mtk_wed_ind_rx_ring_setup()
2293 wed_w32(dev, MTK_WED_ADDR_ELEM_TBL_CFG, in mtk_wed_ind_rx_ring_setup()
2296 val = wed_r32(dev, MTK_WED_ADDR_ELEM_TBL_CFG); in mtk_wed_ind_rx_ring_setup()
2298 val = wed_r32(dev, MTK_WED_ADDR_ELEM_TBL_CFG); in mtk_wed_ind_rx_ring_setup()
2300 dev_err(dev->hw->dev, in mtk_wed_ind_rx_ring_setup()
2305 for (i = 0; i < dev->wlan.ind_cmd.particular_sid; i++) { in mtk_wed_ind_rx_ring_setup()
2306 wed_w32(dev, MTK_WED_PN_CHECK_WDATA_M, in mtk_wed_ind_rx_ring_setup()
2309 wed_w32(dev, MTK_WED_PN_CHECK_CFG, MTK_WED_PN_CHECK_WR | in mtk_wed_ind_rx_ring_setup()
2313 val = wed_r32(dev, MTK_WED_PN_CHECK_CFG); in mtk_wed_ind_rx_ring_setup()
2315 val = wed_r32(dev, MTK_WED_PN_CHECK_CFG); in mtk_wed_ind_rx_ring_setup()
2317 dev_err(dev->hw->dev, in mtk_wed_ind_rx_ring_setup()
2321 wed_w32(dev, MTK_WED_RX_IND_CMD_CNT0, MTK_WED_RX_IND_CMD_DBG_CNT_EN); in mtk_wed_ind_rx_ring_setup()
2322 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_IND_CMD_EN); in mtk_wed_ind_rx_ring_setup()
2328 mtk_wed_start(struct mtk_wed_device *dev, u32 irq_mask) in mtk_wed_start() argument
2332 if (mtk_wed_get_rx_capa(dev) && mtk_wed_rx_buffer_alloc(dev)) in mtk_wed_start()
2335 for (i = 0; i < ARRAY_SIZE(dev->rx_wdma); i++) in mtk_wed_start()
2336 if (!dev->rx_wdma[i].desc) in mtk_wed_start()
2337 mtk_wed_wdma_rx_ring_setup(dev, i, 16, false); in mtk_wed_start()
2339 mtk_wed_hw_init(dev); in mtk_wed_start()
2340 mtk_wed_configure_irq(dev, irq_mask); in mtk_wed_start()
2342 mtk_wed_set_ext_int(dev, true); in mtk_wed_start()
2344 if (mtk_wed_is_v1(dev->hw)) { in mtk_wed_start()
2345 u32 val = dev->wlan.wpdma_phys | MTK_PCIE_MIRROR_MAP_EN | in mtk_wed_start()
2347 dev->hw->index); in mtk_wed_start()
2349 val |= BIT(0) | (BIT(1) * !!dev->hw->index); in mtk_wed_start()
2350 regmap_write(dev->hw->mirror, dev->hw->index * 4, val); in mtk_wed_start()
2351 } else if (mtk_wed_get_rx_capa(dev)) { in mtk_wed_start()
2353 wed_w32(dev, MTK_WED_EXT_INT_MASK1, in mtk_wed_start()
2355 wed_w32(dev, MTK_WED_EXT_INT_MASK2, in mtk_wed_start()
2358 wed_r32(dev, MTK_WED_EXT_INT_MASK1); in mtk_wed_start()
2359 wed_r32(dev, MTK_WED_EXT_INT_MASK2); in mtk_wed_start()
2361 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_start()
2362 wed_w32(dev, MTK_WED_EXT_INT_MASK3, in mtk_wed_start()
2364 wed_r32(dev, MTK_WED_EXT_INT_MASK3); in mtk_wed_start()
2367 if (mtk_wed_rro_cfg(dev)) in mtk_wed_start()
2371 mtk_wed_set_512_support(dev, dev->wlan.wcid_512); in mtk_wed_start()
2372 mtk_wed_amsdu_init(dev); in mtk_wed_start()
2374 mtk_wed_dma_enable(dev); in mtk_wed_start()
2375 dev->running = true; in mtk_wed_start()
2379 mtk_wed_attach(struct mtk_wed_device *dev) in mtk_wed_attach() argument
2389 if ((dev->wlan.bus_type == MTK_WED_BUS_PCIE && in mtk_wed_attach()
2390 pci_domain_nr(dev->wlan.pci_dev->bus) > 1) || in mtk_wed_attach()
2401 hw = mtk_wed_assign(dev); in mtk_wed_attach()
2408 device = dev->wlan.bus_type == MTK_WED_BUS_PCIE in mtk_wed_attach()
2409 ? &dev->wlan.pci_dev->dev in mtk_wed_attach()
2410 : &dev->wlan.platform_dev->dev; in mtk_wed_attach()
2414 dev->hw = hw; in mtk_wed_attach()
2415 dev->dev = hw->dev; in mtk_wed_attach()
2416 dev->irq = hw->irq; in mtk_wed_attach()
2417 dev->wdma_idx = hw->index; in mtk_wed_attach()
2418 dev->version = hw->version; in mtk_wed_attach()
2419 dev->hw->pcie_base = mtk_wed_get_pcie_base(dev); in mtk_wed_attach()
2421 if (hw->eth->dma_dev == hw->eth->dev && in mtk_wed_attach()
2422 of_dma_is_coherent(hw->eth->dev->of_node)) in mtk_wed_attach()
2423 mtk_eth_set_dma_device(hw->eth, hw->dev); in mtk_wed_attach()
2425 ret = mtk_wed_tx_buffer_alloc(dev); in mtk_wed_attach()
2429 ret = mtk_wed_amsdu_buffer_alloc(dev); in mtk_wed_attach()
2433 if (mtk_wed_get_rx_capa(dev)) { in mtk_wed_attach()
2434 ret = mtk_wed_rro_alloc(dev); in mtk_wed_attach()
2439 mtk_wed_hw_init_early(dev); in mtk_wed_attach()
2444 dev->rev_id = wed_r32(dev, MTK_WED_REV_ID); in mtk_wed_attach()
2446 if (mtk_wed_get_rx_capa(dev)) in mtk_wed_attach()
2450 dev_err(dev->hw->dev, "failed to attach wed device\n"); in mtk_wed_attach()
2451 __mtk_wed_detach(dev); in mtk_wed_attach()
2460 mtk_wed_tx_ring_setup(struct mtk_wed_device *dev, int idx, void __iomem *regs, in mtk_wed_tx_ring_setup() argument
2463 struct mtk_wed_ring *ring = &dev->tx_ring[idx]; in mtk_wed_tx_ring_setup()
2477 if (WARN_ON(idx >= ARRAY_SIZE(dev->tx_ring))) in mtk_wed_tx_ring_setup()
2480 if (!reset && mtk_wed_ring_alloc(dev, ring, MTK_WED_TX_RING_SIZE, in mtk_wed_tx_ring_setup()
2484 if (mtk_wed_wdma_rx_ring_setup(dev, idx, MTK_WED_WDMA_RING_SIZE, in mtk_wed_tx_ring_setup()
2491 if (mtk_wed_is_v3_or_greater(dev->hw) && idx == 1) { in mtk_wed_tx_ring_setup()
2493 wed_set(dev, MTK_WED_WDMA_RX_PREF_CFG, in mtk_wed_tx_ring_setup()
2497 wed_clr(dev, MTK_WED_WDMA_RX_PREF_CFG, in mtk_wed_tx_ring_setup()
2502 wed_w32(dev, MTK_WED_WDMA_RX_PREF_FIFO_CFG, in mtk_wed_tx_ring_setup()
2505 wed_w32(dev, MTK_WED_WDMA_RX_PREF_FIFO_CFG, 0); in mtk_wed_tx_ring_setup()
2509 wpdma_tx_w32(dev, idx, MTK_WED_RING_OFS_BASE, ring->desc_phys); in mtk_wed_tx_ring_setup()
2510 wpdma_tx_w32(dev, idx, MTK_WED_RING_OFS_COUNT, MTK_WED_TX_RING_SIZE); in mtk_wed_tx_ring_setup()
2511 wpdma_tx_w32(dev, idx, MTK_WED_RING_OFS_CPU_IDX, 0); in mtk_wed_tx_ring_setup()
2513 wed_w32(dev, MTK_WED_WPDMA_RING_TX(idx) + MTK_WED_RING_OFS_BASE, in mtk_wed_tx_ring_setup()
2515 wed_w32(dev, MTK_WED_WPDMA_RING_TX(idx) + MTK_WED_RING_OFS_COUNT, in mtk_wed_tx_ring_setup()
2517 wed_w32(dev, MTK_WED_WPDMA_RING_TX(idx) + MTK_WED_RING_OFS_CPU_IDX, 0); in mtk_wed_tx_ring_setup()
2523 mtk_wed_txfree_ring_setup(struct mtk_wed_device *dev, void __iomem *regs) in mtk_wed_txfree_ring_setup() argument
2525 struct mtk_wed_ring *ring = &dev->txfree_ring; in mtk_wed_txfree_ring_setup()
2526 int i, index = mtk_wed_is_v1(dev->hw); in mtk_wed_txfree_ring_setup()
2539 wed_w32(dev, MTK_WED_RING_RX(index) + i, val); in mtk_wed_txfree_ring_setup()
2540 wed_w32(dev, MTK_WED_WPDMA_RING_RX(index) + i, val); in mtk_wed_txfree_ring_setup()
2547 mtk_wed_rx_ring_setup(struct mtk_wed_device *dev, int idx, void __iomem *regs, in mtk_wed_rx_ring_setup() argument
2550 struct mtk_wed_ring *ring = &dev->rx_ring[idx]; in mtk_wed_rx_ring_setup()
2552 if (WARN_ON(idx >= ARRAY_SIZE(dev->rx_ring))) in mtk_wed_rx_ring_setup()
2555 if (!reset && mtk_wed_ring_alloc(dev, ring, MTK_WED_RX_RING_SIZE, in mtk_wed_rx_ring_setup()
2559 if (mtk_wed_wdma_tx_ring_setup(dev, idx, MTK_WED_WDMA_RING_SIZE, in mtk_wed_rx_ring_setup()
2568 wpdma_rx_w32(dev, idx, MTK_WED_RING_OFS_BASE, ring->desc_phys); in mtk_wed_rx_ring_setup()
2569 wpdma_rx_w32(dev, idx, MTK_WED_RING_OFS_COUNT, MTK_WED_RX_RING_SIZE); in mtk_wed_rx_ring_setup()
2571 wed_w32(dev, MTK_WED_WPDMA_RING_RX_DATA(idx) + MTK_WED_RING_OFS_BASE, in mtk_wed_rx_ring_setup()
2573 wed_w32(dev, MTK_WED_WPDMA_RING_RX_DATA(idx) + MTK_WED_RING_OFS_COUNT, in mtk_wed_rx_ring_setup()
2580 mtk_wed_irq_get(struct mtk_wed_device *dev, u32 mask) in mtk_wed_irq_get() argument
2584 if (mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wed_irq_get()
2590 val = wed_r32(dev, MTK_WED_EXT_INT_STATUS); in mtk_wed_irq_get()
2591 wed_w32(dev, MTK_WED_EXT_INT_STATUS, val); in mtk_wed_irq_get()
2593 if (!dev->hw->num_flows) in mtk_wed_irq_get()
2596 pr_err("mtk_wed%d: error status=%08x\n", dev->hw->index, val); in mtk_wed_irq_get()
2598 val = wed_r32(dev, MTK_WED_INT_STATUS); in mtk_wed_irq_get()
2600 wed_w32(dev, MTK_WED_INT_STATUS, val); /* ACK */ in mtk_wed_irq_get()
2606 mtk_wed_irq_set_mask(struct mtk_wed_device *dev, u32 mask) in mtk_wed_irq_set_mask() argument
2608 if (!dev->running) in mtk_wed_irq_set_mask()
2611 mtk_wed_set_ext_int(dev, !!mask); in mtk_wed_irq_set_mask()
2612 wed_w32(dev, MTK_WED_INT_MASK, mask); in mtk_wed_irq_set_mask()
2675 if (!tc_can_offload(priv->dev)) in mtk_wed_setup_tc_block_cb()
2685 mtk_wed_setup_tc_block(struct mtk_wed_hw *hw, struct net_device *dev, in mtk_wed_setup_tc_block() argument
2705 block_cb = flow_block_cb_lookup(f->block, cb, dev); in mtk_wed_setup_tc_block()
2716 priv->dev = dev; in mtk_wed_setup_tc_block()
2717 block_cb = flow_block_cb_alloc(cb, dev, priv, NULL); in mtk_wed_setup_tc_block()
2728 block_cb = flow_block_cb_lookup(f->block, cb, dev); in mtk_wed_setup_tc_block()
2744 mtk_wed_setup_tc(struct mtk_wed_device *wed, struct net_device *dev, in mtk_wed_setup_tc() argument
2755 return mtk_wed_setup_tc_block(hw, dev, type_data); in mtk_wed_setup_tc()
2786 struct device_node *eth_np = eth->dev->of_node; in mtk_wed_add_hw()
2799 get_device(&pdev->dev); in mtk_wed_add_hw()
2822 hw->dev = &pdev->dev; in mtk_wed_add_hw()
2866 put_device(&pdev->dev); in mtk_wed_add_hw()
2888 put_device(hw->dev); in mtk_wed_exit()