Lines Matching refs:dev
56 struct net_device *dev;
93 wed_m32(struct mtk_wed_device *dev, u32 reg, u32 mask, u32 val)
95 regmap_update_bits(dev->hw->regs, reg, mask | val, val);
99 wed_set(struct mtk_wed_device *dev, u32 reg, u32 mask)
101 return wed_m32(dev, reg, 0, mask);
105 wed_clr(struct mtk_wed_device *dev, u32 reg, u32 mask)
107 return wed_m32(dev, reg, mask, 0);
111 wdma_m32(struct mtk_wed_device *dev, u32 reg, u32 mask, u32 val)
113 wdma_w32(dev, reg, (wdma_r32(dev, reg) & ~mask) | val);
117 wdma_set(struct mtk_wed_device *dev, u32 reg, u32 mask)
119 wdma_m32(dev, reg, 0, mask);
123 wdma_clr(struct mtk_wed_device *dev, u32 reg, u32 mask)
125 wdma_m32(dev, reg, mask, 0);
129 wifi_r32(struct mtk_wed_device *dev, u32 reg)
131 return readl(dev->wlan.base + reg);
135 wifi_w32(struct mtk_wed_device *dev, u32 reg, u32 val)
137 writel(val, dev->wlan.base + reg);
141 mtk_wed_read_reset(struct mtk_wed_device *dev)
143 return wed_r32(dev, MTK_WED_RESET);
147 mtk_wdma_read_reset(struct mtk_wed_device *dev)
149 return wdma_r32(dev, MTK_WDMA_GLO_CFG);
153 mtk_wdma_v3_rx_reset(struct mtk_wed_device *dev)
157 if (!mtk_wed_is_v3_or_greater(dev->hw))
160 wdma_clr(dev, MTK_WDMA_PREF_TX_CFG, MTK_WDMA_PREF_TX_CFG_PREF_EN);
161 wdma_clr(dev, MTK_WDMA_PREF_RX_CFG, MTK_WDMA_PREF_RX_CFG_PREF_EN);
165 0, 10000, false, dev, MTK_WDMA_PREF_TX_CFG))
166 dev_err(dev->hw->dev, "rx reset failed\n");
170 0, 10000, false, dev, MTK_WDMA_PREF_RX_CFG))
171 dev_err(dev->hw->dev, "rx reset failed\n");
173 wdma_clr(dev, MTK_WDMA_WRBK_TX_CFG, MTK_WDMA_WRBK_TX_CFG_WRBK_EN);
174 wdma_clr(dev, MTK_WDMA_WRBK_RX_CFG, MTK_WDMA_WRBK_RX_CFG_WRBK_EN);
178 0, 10000, false, dev, MTK_WDMA_WRBK_TX_CFG))
179 dev_err(dev->hw->dev, "rx reset failed\n");
183 0, 10000, false, dev, MTK_WDMA_WRBK_RX_CFG))
184 dev_err(dev->hw->dev, "rx reset failed\n");
187 wdma_w32(dev, MTK_WDMA_PREF_RX_FIFO_CFG,
190 wdma_clr(dev, MTK_WDMA_PREF_RX_FIFO_CFG,
195 wdma_w32(dev, MTK_WDMA_XDMA_RX_FIFO_CFG,
203 wdma_clr(dev, MTK_WDMA_XDMA_RX_FIFO_CFG,
213 wdma_w32(dev, MTK_WDMA_WRBK_RX_FIFO_CFG(0),
215 wdma_w32(dev, MTK_WDMA_WRBK_RX_FIFO_CFG(1),
218 wdma_clr(dev, MTK_WDMA_WRBK_RX_FIFO_CFG(0),
220 wdma_clr(dev, MTK_WDMA_WRBK_RX_FIFO_CFG(1),
224 wdma_w32(dev, MTK_WDMA_PREF_SIDX_CFG,
226 wdma_clr(dev, MTK_WDMA_PREF_SIDX_CFG,
230 wdma_w32(dev, MTK_WDMA_WRBK_SIDX_CFG,
232 wdma_clr(dev, MTK_WDMA_WRBK_SIDX_CFG,
237 mtk_wdma_rx_reset(struct mtk_wed_device *dev)
242 wdma_clr(dev, MTK_WDMA_GLO_CFG, MTK_WDMA_GLO_CFG_RX_DMA_EN);
243 ret = readx_poll_timeout(mtk_wdma_read_reset, dev, status,
246 dev_err(dev->hw->dev, "rx reset failed\n");
248 mtk_wdma_v3_rx_reset(dev);
249 wdma_w32(dev, MTK_WDMA_RESET_IDX, MTK_WDMA_RESET_IDX_RX);
250 wdma_w32(dev, MTK_WDMA_RESET_IDX, 0);
252 for (i = 0; i < ARRAY_SIZE(dev->rx_wdma); i++) {
253 if (dev->rx_wdma[i].desc)
256 wdma_w32(dev,
264 mtk_wed_check_busy(struct mtk_wed_device *dev, u32 reg, u32 mask)
266 return !!(wed_r32(dev, reg) & mask);
270 mtk_wed_poll_busy(struct mtk_wed_device *dev, u32 reg, u32 mask)
277 timeout, false, dev, reg, mask);
281 mtk_wdma_v3_tx_reset(struct mtk_wed_device *dev)
285 if (!mtk_wed_is_v3_or_greater(dev->hw))
288 wdma_clr(dev, MTK_WDMA_PREF_TX_CFG, MTK_WDMA_PREF_TX_CFG_PREF_EN);
289 wdma_clr(dev, MTK_WDMA_PREF_RX_CFG, MTK_WDMA_PREF_RX_CFG_PREF_EN);
293 0, 10000, false, dev, MTK_WDMA_PREF_TX_CFG))
294 dev_err(dev->hw->dev, "tx reset failed\n");
298 0, 10000, false, dev, MTK_WDMA_PREF_RX_CFG))
299 dev_err(dev->hw->dev, "tx reset failed\n");
301 wdma_clr(dev, MTK_WDMA_WRBK_TX_CFG, MTK_WDMA_WRBK_TX_CFG_WRBK_EN);
302 wdma_clr(dev, MTK_WDMA_WRBK_RX_CFG, MTK_WDMA_WRBK_RX_CFG_WRBK_EN);
306 0, 10000, false, dev, MTK_WDMA_WRBK_TX_CFG))
307 dev_err(dev->hw->dev, "tx reset failed\n");
311 0, 10000, false, dev, MTK_WDMA_WRBK_RX_CFG))
312 dev_err(dev->hw->dev, "tx reset failed\n");
315 wdma_w32(dev, MTK_WDMA_PREF_TX_FIFO_CFG,
318 wdma_clr(dev, MTK_WDMA_PREF_TX_FIFO_CFG,
323 wdma_w32(dev, MTK_WDMA_XDMA_TX_FIFO_CFG,
328 wdma_clr(dev, MTK_WDMA_XDMA_TX_FIFO_CFG,
335 wdma_w32(dev, MTK_WDMA_WRBK_TX_FIFO_CFG(0),
337 wdma_w32(dev, MTK_WDMA_WRBK_TX_FIFO_CFG(1),
340 wdma_clr(dev, MTK_WDMA_WRBK_TX_FIFO_CFG(0),
342 wdma_clr(dev, MTK_WDMA_WRBK_TX_FIFO_CFG(1),
346 wdma_w32(dev, MTK_WDMA_PREF_SIDX_CFG,
348 wdma_clr(dev, MTK_WDMA_PREF_SIDX_CFG,
352 wdma_w32(dev, MTK_WDMA_WRBK_SIDX_CFG,
354 wdma_clr(dev, MTK_WDMA_WRBK_SIDX_CFG,
359 mtk_wdma_tx_reset(struct mtk_wed_device *dev)
364 wdma_clr(dev, MTK_WDMA_GLO_CFG, MTK_WDMA_GLO_CFG_TX_DMA_EN);
365 if (readx_poll_timeout(mtk_wdma_read_reset, dev, status,
367 dev_err(dev->hw->dev, "tx reset failed\n");
369 mtk_wdma_v3_tx_reset(dev);
370 wdma_w32(dev, MTK_WDMA_RESET_IDX, MTK_WDMA_RESET_IDX_TX);
371 wdma_w32(dev, MTK_WDMA_RESET_IDX, 0);
373 for (i = 0; i < ARRAY_SIZE(dev->tx_wdma); i++)
374 wdma_w32(dev,
379 mtk_wed_reset(struct mtk_wed_device *dev, u32 mask)
383 wed_w32(dev, MTK_WED_RESET, mask);
384 if (readx_poll_timeout(mtk_wed_read_reset, dev, status,
390 mtk_wed_wo_read_status(struct mtk_wed_device *dev)
392 return wed_r32(dev, MTK_WED_SCR0 + 4 * MTK_WED_DUMMY_CR_WO_STATUS);
396 mtk_wed_wo_reset(struct mtk_wed_device *dev)
398 struct mtk_wed_wo *wo = dev->hw->wed_wo;
403 mtk_wdma_tx_reset(dev);
404 mtk_wed_reset(dev, MTK_WED_RESET_WED);
411 if (readx_poll_timeout(mtk_wed_wo_read_status, dev, val,
414 dev_err(dev->hw->dev, "failed to disable wed-wo\n");
419 switch (dev->hw->index) {
446 struct mtk_wed_device *dev;
452 dev = hw->wed_dev;
453 if (!dev || !dev->wlan.reset)
457 err = dev->wlan.reset(dev);
459 dev_err(dev->dev, "wlan reset failed: %d\n", err);
473 struct mtk_wed_device *dev;
478 dev = hw->wed_dev;
479 if (!dev || !dev->wlan.reset_complete)
482 dev->wlan.reset_complete(dev);
489 mtk_wed_assign(struct mtk_wed_device *dev)
494 if (dev->wlan.bus_type == MTK_WED_BUS_PCIE) {
495 hw = hw_list[pci_domain_nr(dev->wlan.pci_dev->bus)];
517 hw->wed_dev = dev;
522 mtk_wed_amsdu_buffer_alloc(struct mtk_wed_device *dev)
524 struct mtk_wed_hw *hw = dev->hw;
531 wed_amsdu = devm_kcalloc(hw->dev, MTK_WED_AMSDU_NPAGES,
548 wed_amsdu[i].txd_phy = dma_map_single(hw->dev, ptr,
551 if (dma_mapping_error(hw->dev, wed_amsdu[i].txd_phy))
554 dev->hw->wed_amsdu = wed_amsdu;
560 dma_unmap_single(hw->dev, wed_amsdu[i].txd_phy,
566 mtk_wed_amsdu_free_buffer(struct mtk_wed_device *dev)
568 struct mtk_wed_amsdu *wed_amsdu = dev->hw->wed_amsdu;
575 dma_unmap_single(dev->hw->dev, wed_amsdu[i].txd_phy,
583 mtk_wed_amsdu_init(struct mtk_wed_device *dev)
585 struct mtk_wed_amsdu *wed_amsdu = dev->hw->wed_amsdu;
592 wed_w32(dev, MTK_WED_AMSDU_HIFTXD_BASE_L(i),
596 wed_w32(dev, MTK_WED_AMSDU_STA_INFO_INIT, MTK_WED_AMSDU_STA_RMVL |
599 dev->wlan.amsdu_max_len >> 8) |
601 dev->wlan.amsdu_max_subframes));
603 wed_w32(dev, MTK_WED_AMSDU_STA_INFO, MTK_WED_AMSDU_STA_INFO_DO_INIT);
605 ret = mtk_wed_poll_busy(dev, MTK_WED_AMSDU_STA_INFO,
608 dev_err(dev->hw->dev, "amsdu initialization failed\n");
613 wed_set(dev, MTK_WED_AMSDU_HIFTXD_CFG,
614 FIELD_PREP(MTK_WED_AMSDU_HIFTXD_SRC, dev->hw->index));
617 wed_set(dev, MTK_WED_AMSDU_PSE, MTK_WED_AMSDU_PSE_RESET);
618 ret = mtk_wed_poll_busy(dev, MTK_WED_MON_AMSDU_QMEM_STS1, BIT(29));
625 if (dev->wlan.id == 0x7991)
626 wed_clr(dev, MTK_WED_AMSDU_FIFO, MTK_WED_AMSDU_IS_PRIOR0_RING);
628 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_TX_AMSDU_EN);
634 mtk_wed_tx_buffer_alloc(struct mtk_wed_device *dev)
636 u32 desc_size = dev->hw->soc->tx_ring_desc_size;
638 int token = dev->wlan.token_start;
643 if (!mtk_wed_is_v3_or_greater(dev->hw)) {
644 ring_size = dev->wlan.nbuf & ~(MTK_WED_BUF_PER_PAGE - 1);
645 dev->tx_buf_ring.size = ring_size;
647 dev->tx_buf_ring.size = MTK_WED_TX_BM_DMA_SIZE;
650 n_pages = dev->tx_buf_ring.size / MTK_WED_BUF_PER_PAGE;
656 dev->tx_buf_ring.pages = page_list;
658 desc_ptr = dma_alloc_coherent(dev->hw->dev,
659 dev->tx_buf_ring.size * desc_size,
664 dev->tx_buf_ring.desc = desc_ptr;
665 dev->tx_buf_ring.desc_phys = desc_phys;
677 page_phys = dma_map_page(dev->hw->dev, page, 0, PAGE_SIZE,
679 if (dma_mapping_error(dev->hw->dev, page_phys)) {
686 dma_sync_single_for_cpu(dev->hw->dev, page_phys, PAGE_SIZE,
697 if (!mtk_wed_is_v3_or_greater(dev->hw)) {
700 txd_size = dev->wlan.init_buf(buf, buf_phys,
704 if (mtk_wed_is_v1(dev->hw))
723 dma_sync_single_for_device(dev->hw->dev, page_phys, PAGE_SIZE,
731 mtk_wed_free_tx_buffer(struct mtk_wed_device *dev)
733 struct mtk_wed_buf *page_list = dev->tx_buf_ring.pages;
734 struct mtk_wed_hw *hw = dev->hw;
740 if (!dev->tx_buf_ring.desc)
743 for (i = 0; i < dev->tx_buf_ring.size; i += MTK_WED_BUF_PER_PAGE) {
750 dma_unmap_page(dev->hw->dev, page_phy, PAGE_SIZE,
755 dma_free_coherent(dev->hw->dev,
756 dev->tx_buf_ring.size * hw->soc->tx_ring_desc_size,
757 dev->tx_buf_ring.desc,
758 dev->tx_buf_ring.desc_phys);
765 mtk_wed_hwrro_buffer_alloc(struct mtk_wed_device *dev)
773 if (!dev->wlan.hw_rro)
780 dev->hw_rro.size = dev->wlan.rx_nbuf & ~(MTK_WED_BUF_PER_PAGE - 1);
781 dev->hw_rro.pages = page_list;
782 desc = dma_alloc_coherent(dev->hw->dev,
783 dev->wlan.rx_nbuf * sizeof(*desc),
788 dev->hw_rro.desc = desc;
789 dev->hw_rro.desc_phys = desc_phys;
800 page_phys = dma_map_page(dev->hw->dev, page, 0, PAGE_SIZE,
802 if (dma_mapping_error(dev->hw->dev, page_phys)) {
809 dma_sync_single_for_cpu(dev->hw->dev, page_phys, PAGE_SIZE,
820 dma_sync_single_for_device(dev->hw->dev, page_phys, PAGE_SIZE,
828 mtk_wed_rx_buffer_alloc(struct mtk_wed_device *dev)
833 dev->rx_buf_ring.size = dev->wlan.rx_nbuf;
834 desc = dma_alloc_coherent(dev->hw->dev,
835 dev->wlan.rx_nbuf * sizeof(*desc),
840 dev->rx_buf_ring.desc = desc;
841 dev->rx_buf_ring.desc_phys = desc_phys;
842 dev->wlan.init_rx_buf(dev, dev->wlan.rx_npkt);
844 return mtk_wed_hwrro_buffer_alloc(dev);
848 mtk_wed_hwrro_free_buffer(struct mtk_wed_device *dev)
850 struct mtk_wed_buf *page_list = dev->hw_rro.pages;
851 struct mtk_wed_bm_desc *desc = dev->hw_rro.desc;
854 if (!dev->wlan.hw_rro)
870 dma_unmap_page(dev->hw->dev, buf_addr, PAGE_SIZE,
875 dma_free_coherent(dev->hw->dev, dev->hw_rro.size * sizeof(*desc),
876 desc, dev->hw_rro.desc_phys);
883 mtk_wed_free_rx_buffer(struct mtk_wed_device *dev)
885 struct mtk_wed_bm_desc *desc = dev->rx_buf_ring.desc;
890 dev->wlan.release_rx_buf(dev);
891 dma_free_coherent(dev->hw->dev, dev->rx_buf_ring.size * sizeof(*desc),
892 desc, dev->rx_buf_ring.desc_phys);
894 mtk_wed_hwrro_free_buffer(dev);
898 mtk_wed_hwrro_init(struct mtk_wed_device *dev)
900 if (!mtk_wed_get_rx_capa(dev) || !dev->wlan.hw_rro)
903 wed_set(dev, MTK_WED_RRO_PG_BM_RX_DMAM,
906 wed_w32(dev, MTK_WED_RRO_PG_BM_BASE, dev->hw_rro.desc_phys);
908 wed_w32(dev, MTK_WED_RRO_PG_BM_INIT_PTR,
914 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_PG_BM_EN);
918 mtk_wed_rx_buffer_hw_init(struct mtk_wed_device *dev)
920 wed_w32(dev, MTK_WED_RX_BM_RX_DMAD,
921 FIELD_PREP(MTK_WED_RX_BM_RX_DMAD_SDL0, dev->wlan.rx_size));
922 wed_w32(dev, MTK_WED_RX_BM_BASE, dev->rx_buf_ring.desc_phys);
923 wed_w32(dev, MTK_WED_RX_BM_INIT_PTR, MTK_WED_RX_BM_INIT_SW_TAIL |
924 FIELD_PREP(MTK_WED_RX_BM_SW_TAIL, dev->wlan.rx_npkt));
925 wed_w32(dev, MTK_WED_RX_BM_DYN_ALLOC_TH,
927 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_BM_EN);
929 mtk_wed_hwrro_init(dev);
933 mtk_wed_free_ring(struct mtk_wed_device *dev, struct mtk_wed_ring *ring)
938 dma_free_coherent(dev->hw->dev, ring->size * ring->desc_size,
943 mtk_wed_free_rx_rings(struct mtk_wed_device *dev)
945 mtk_wed_free_rx_buffer(dev);
946 mtk_wed_free_ring(dev, &dev->rro.ring);
950 mtk_wed_free_tx_rings(struct mtk_wed_device *dev)
954 for (i = 0; i < ARRAY_SIZE(dev->tx_ring); i++)
955 mtk_wed_free_ring(dev, &dev->tx_ring[i]);
956 for (i = 0; i < ARRAY_SIZE(dev->rx_wdma); i++)
957 mtk_wed_free_ring(dev, &dev->rx_wdma[i]);
961 mtk_wed_set_ext_int(struct mtk_wed_device *dev, bool en)
965 switch (dev->hw->version) {
983 if (!dev->hw->num_flows)
986 wed_w32(dev, MTK_WED_EXT_INT_MASK, en ? mask : 0);
987 wed_r32(dev, MTK_WED_EXT_INT_MASK);
991 mtk_wed_set_512_support(struct mtk_wed_device *dev, bool enable)
993 if (!mtk_wed_is_v2(dev->hw))
997 wed_w32(dev, MTK_WED_TXDP_CTRL, MTK_WED_TXDP_DW9_OVERWR);
998 wed_w32(dev, MTK_WED_TXP_DW1,
1001 wed_w32(dev, MTK_WED_TXP_DW1,
1003 wed_clr(dev, MTK_WED_TXDP_CTRL, MTK_WED_TXDP_DW9_OVERWR);
1008 mtk_wed_check_wfdma_rx_fill(struct mtk_wed_device *dev,
1023 dev_err(dev->hw->dev, "rx dma enable failed\n");
1031 mtk_wed_dma_disable(struct mtk_wed_device *dev)
1033 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG,
1037 wed_clr(dev, MTK_WED_WDMA_GLO_CFG, MTK_WED_WDMA_GLO_CFG_RX_DRV_EN);
1039 wed_clr(dev, MTK_WED_GLO_CFG,
1043 wdma_clr(dev, MTK_WDMA_GLO_CFG,
1048 if (mtk_wed_is_v1(dev->hw)) {
1049 regmap_write(dev->hw->mirror, dev->hw->index * 4, 0);
1050 wdma_clr(dev, MTK_WDMA_GLO_CFG,
1053 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG,
1057 wed_clr(dev, MTK_WED_WPDMA_RX_D_GLO_CFG,
1059 wed_clr(dev, MTK_WED_WDMA_GLO_CFG,
1062 if (mtk_wed_is_v3_or_greater(dev->hw) &&
1063 mtk_wed_get_rx_capa(dev)) {
1064 wdma_clr(dev, MTK_WDMA_PREF_TX_CFG,
1066 wdma_clr(dev, MTK_WDMA_PREF_RX_CFG,
1071 mtk_wed_set_512_support(dev, false);
1075 mtk_wed_stop(struct mtk_wed_device *dev)
1077 mtk_wed_dma_disable(dev);
1078 mtk_wed_set_ext_int(dev, false);
1080 wed_w32(dev, MTK_WED_WPDMA_INT_TRIGGER, 0);
1081 wed_w32(dev, MTK_WED_WDMA_INT_TRIGGER, 0);
1082 wdma_w32(dev, MTK_WDMA_INT_MASK, 0);
1083 wdma_w32(dev, MTK_WDMA_INT_GRP2, 0);
1085 if (!mtk_wed_get_rx_capa(dev))
1088 wed_w32(dev, MTK_WED_EXT_INT_MASK1, 0);
1089 wed_w32(dev, MTK_WED_EXT_INT_MASK2, 0);
1093 mtk_wed_deinit(struct mtk_wed_device *dev)
1095 mtk_wed_stop(dev);
1097 wed_clr(dev, MTK_WED_CTRL,
1103 if (mtk_wed_is_v1(dev->hw))
1106 wed_clr(dev, MTK_WED_CTRL,
1111 if (mtk_wed_is_v3_or_greater(dev->hw)) {
1112 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_TX_AMSDU_EN);
1113 wed_clr(dev, MTK_WED_RESET, MTK_WED_RESET_TX_AMSDU);
1114 wed_clr(dev, MTK_WED_PCIE_INT_CTRL,
1121 __mtk_wed_detach(struct mtk_wed_device *dev)
1123 struct mtk_wed_hw *hw = dev->hw;
1125 mtk_wed_deinit(dev);
1127 mtk_wdma_rx_reset(dev);
1128 mtk_wed_reset(dev, MTK_WED_RESET_WED);
1129 mtk_wed_amsdu_free_buffer(dev);
1130 mtk_wed_free_tx_buffer(dev);
1131 mtk_wed_free_tx_rings(dev);
1133 if (mtk_wed_get_rx_capa(dev)) {
1135 mtk_wed_wo_reset(dev);
1136 mtk_wed_free_rx_rings(dev);
1141 if (dev->wlan.bus_type == MTK_WED_BUS_PCIE) {
1144 wlan_node = dev->wlan.pci_dev->dev.of_node;
1151 hw->eth->dma_dev != hw->eth->dev)
1152 mtk_eth_set_dma_device(hw->eth, hw->eth->dev);
1154 memset(dev, 0, sizeof(*dev));
1161 mtk_wed_detach(struct mtk_wed_device *dev)
1164 __mtk_wed_detach(dev);
1169 mtk_wed_bus_init(struct mtk_wed_device *dev)
1171 switch (dev->wlan.bus_type) {
1173 struct device_node *np = dev->hw->eth->dev->of_node;
1175 if (mtk_wed_is_v2(dev->hw)) {
1186 if (dev->wlan.msi) {
1187 wed_w32(dev, MTK_WED_PCIE_CFG_INTM,
1188 dev->hw->pcie_base | 0xc08);
1189 wed_w32(dev, MTK_WED_PCIE_CFG_BASE,
1190 dev->hw->pcie_base | 0xc04);
1191 wed_w32(dev, MTK_WED_PCIE_INT_TRIGGER, BIT(8));
1193 wed_w32(dev, MTK_WED_PCIE_CFG_INTM,
1194 dev->hw->pcie_base | 0x180);
1195 wed_w32(dev, MTK_WED_PCIE_CFG_BASE,
1196 dev->hw->pcie_base | 0x184);
1197 wed_w32(dev, MTK_WED_PCIE_INT_TRIGGER, BIT(24));
1200 wed_w32(dev, MTK_WED_PCIE_INT_CTRL,
1204 wed_set(dev, MTK_WED_PCIE_INT_CTRL,
1208 dev->hw->index));
1212 wed_set(dev, MTK_WED_WPDMA_INT_CTRL,
1222 mtk_wed_set_wpdma(struct mtk_wed_device *dev)
1226 if (mtk_wed_is_v1(dev->hw)) {
1227 wed_w32(dev, MTK_WED_WPDMA_CFG_BASE, dev->wlan.wpdma_phys);
1231 mtk_wed_bus_init(dev);
1233 wed_w32(dev, MTK_WED_WPDMA_CFG_BASE, dev->wlan.wpdma_int);
1234 wed_w32(dev, MTK_WED_WPDMA_CFG_INT_MASK, dev->wlan.wpdma_mask);
1235 wed_w32(dev, MTK_WED_WPDMA_CFG_TX, dev->wlan.wpdma_tx);
1236 wed_w32(dev, MTK_WED_WPDMA_CFG_TX_FREE, dev->wlan.wpdma_txfree);
1238 if (!mtk_wed_get_rx_capa(dev))
1241 wed_w32(dev, MTK_WED_WPDMA_RX_GLO_CFG, dev->wlan.wpdma_rx_glo);
1242 wed_w32(dev, dev->hw->soc->regmap.wpdma_rx_ring0, dev->wlan.wpdma_rx);
1244 if (!dev->wlan.hw_rro)
1247 wed_w32(dev, MTK_WED_RRO_RX_D_CFG(0), dev->wlan.wpdma_rx_rro[0]);
1248 wed_w32(dev, MTK_WED_RRO_RX_D_CFG(1), dev->wlan.wpdma_rx_rro[1]);
1250 wed_w32(dev, MTK_WED_RRO_MSDU_PG_RING_CFG(i),
1251 dev->wlan.wpdma_rx_pg + i * 0x10);
1255 mtk_wed_hw_init_early(struct mtk_wed_device *dev)
1260 mtk_wed_deinit(dev);
1261 mtk_wed_reset(dev, MTK_WED_RESET_WED);
1262 mtk_wed_set_wpdma(dev);
1264 if (!mtk_wed_is_v3_or_greater(dev->hw)) {
1270 wed_m32(dev, MTK_WED_WDMA_GLO_CFG, mask, set);
1272 if (mtk_wed_is_v1(dev->hw)) {
1273 u32 offset = dev->hw->index ? 0x04000400 : 0;
1275 wdma_set(dev, MTK_WDMA_GLO_CFG,
1280 wed_w32(dev, MTK_WED_WDMA_OFFSET0, 0x2a042a20 + offset);
1281 wed_w32(dev, MTK_WED_WDMA_OFFSET1, 0x29002800 + offset);
1282 wed_w32(dev, MTK_WED_PCIE_CFG_BASE,
1283 MTK_PCIE_BASE(dev->hw->index));
1285 wed_w32(dev, MTK_WED_WDMA_CFG_BASE, dev->hw->wdma_phy);
1286 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_ETH_DMAD_FMT);
1287 wed_w32(dev, MTK_WED_WDMA_OFFSET0,
1293 wed_w32(dev, MTK_WED_WDMA_OFFSET1,
1302 mtk_wed_rro_ring_alloc(struct mtk_wed_device *dev, struct mtk_wed_ring *ring,
1305 ring->desc = dma_alloc_coherent(dev->hw->dev,
1319 mtk_wed_rro_alloc(struct mtk_wed_device *dev)
1324 ret = of_reserved_mem_region_to_resource_byname(dev->hw->node, "wo-dlm", &res);
1328 dev->rro.miod_phys = res.start;
1329 dev->rro.fdbk_phys = MTK_WED_MIOD_COUNT + dev->rro.miod_phys;
1331 return mtk_wed_rro_ring_alloc(dev, &dev->rro.ring,
1336 mtk_wed_rro_cfg(struct mtk_wed_device *dev)
1338 struct mtk_wed_wo *wo = dev->hw->wed_wo;
1367 mtk_wed_rro_hw_init(struct mtk_wed_device *dev)
1369 wed_w32(dev, MTK_WED_RROQM_MIOD_CFG,
1375 wed_w32(dev, MTK_WED_RROQM_MIOD_CTRL0, dev->rro.miod_phys);
1376 wed_w32(dev, MTK_WED_RROQM_MIOD_CTRL1,
1378 wed_w32(dev, MTK_WED_RROQM_FDBK_CTRL0, dev->rro.fdbk_phys);
1379 wed_w32(dev, MTK_WED_RROQM_FDBK_CTRL1,
1381 wed_w32(dev, MTK_WED_RROQM_FDBK_CTRL2, 0);
1382 wed_w32(dev, MTK_WED_RROQ_BASE_L, dev->rro.ring.desc_phys);
1384 wed_set(dev, MTK_WED_RROQM_RST_IDX,
1388 wed_w32(dev, MTK_WED_RROQM_RST_IDX, 0);
1389 wed_w32(dev, MTK_WED_RROQM_MIOD_CTRL2, MTK_WED_MIOD_CNT - 1);
1390 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_RX_RRO_QM_EN);
1394 mtk_wed_route_qm_hw_init(struct mtk_wed_device *dev)
1396 wed_w32(dev, MTK_WED_RESET, MTK_WED_RESET_RX_ROUTE_QM);
1400 if (!(wed_r32(dev, MTK_WED_RESET) & MTK_WED_RESET_RX_ROUTE_QM))
1405 if (mtk_wed_is_v2(dev->hw)) {
1406 wed_clr(dev, MTK_WED_RTQM_GLO_CFG, MTK_WED_RTQM_Q_RST);
1407 wed_clr(dev, MTK_WED_RTQM_GLO_CFG, MTK_WED_RTQM_TXDMAD_FPORT);
1408 wed_set(dev, MTK_WED_RTQM_GLO_CFG,
1410 0x3 + dev->hw->index));
1411 wed_clr(dev, MTK_WED_RTQM_GLO_CFG, MTK_WED_RTQM_Q_RST);
1413 wed_set(dev, MTK_WED_RTQM_ENQ_CFG0,
1415 0x3 + dev->hw->index));
1418 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_RX_ROUTE_QM_EN);
1422 mtk_wed_hw_init(struct mtk_wed_device *dev)
1424 if (dev->init_done)
1427 dev->init_done = true;
1428 mtk_wed_set_ext_int(dev, false);
1430 wed_w32(dev, MTK_WED_TX_BM_BASE, dev->tx_buf_ring.desc_phys);
1431 wed_w32(dev, MTK_WED_TX_BM_BUF_LEN, MTK_WED_PKT_SIZE);
1433 if (mtk_wed_is_v1(dev->hw)) {
1434 wed_w32(dev, MTK_WED_TX_BM_CTRL,
1437 dev->tx_buf_ring.size / 128) |
1440 wed_w32(dev, MTK_WED_TX_BM_DYN_THR,
1443 } else if (mtk_wed_is_v2(dev->hw)) {
1444 wed_w32(dev, MTK_WED_TX_BM_CTRL,
1447 dev->tx_buf_ring.size / 128) |
1450 wed_w32(dev, MTK_WED_TX_TKID_DYN_THR,
1453 wed_w32(dev, MTK_WED_TX_BM_DYN_THR,
1456 wed_w32(dev, MTK_WED_TX_TKID_CTRL,
1459 dev->tx_buf_ring.size / 128) |
1461 dev->tx_buf_ring.size / 128));
1464 wed_w32(dev, dev->hw->soc->regmap.tx_bm_tkid,
1465 FIELD_PREP(MTK_WED_TX_BM_TKID_START, dev->wlan.token_start) |
1467 dev->wlan.token_start + dev->wlan.nbuf - 1));
1469 mtk_wed_reset(dev, MTK_WED_RESET_TX_BM);
1471 if (mtk_wed_is_v3_or_greater(dev->hw)) {
1473 wed_clr(dev, MTK_WED_TX_BM_CTRL,
1476 wed_w32(dev, MTK_WED_TX_TKID_CTRL,
1479 dev->wlan.nbuf / 128) |
1481 dev->wlan.nbuf / 128));
1483 wed_set(dev, MTK_WED_TX_TKID_CTRL,
1486 wed_w32(dev, MTK_WED_TX_BM_INIT_PTR,
1491 if (mtk_wed_is_v1(dev->hw)) {
1492 wed_set(dev, MTK_WED_CTRL,
1495 } else if (mtk_wed_get_rx_capa(dev)) {
1497 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX,
1500 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX, 0);
1503 wed_set(dev, MTK_WED_WPDMA_RX_D_PREF_RX0_SIDX,
1505 wed_clr(dev, MTK_WED_WPDMA_RX_D_PREF_RX0_SIDX,
1508 wed_set(dev, MTK_WED_WPDMA_RX_D_PREF_RX1_SIDX,
1510 wed_clr(dev, MTK_WED_WPDMA_RX_D_PREF_RX1_SIDX,
1514 wed_set(dev, MTK_WED_WPDMA_RX_D_PREF_FIFO_CFG,
1517 wed_w32(dev, MTK_WED_WPDMA_RX_D_PREF_FIFO_CFG, 0);
1519 mtk_wed_rx_buffer_hw_init(dev);
1520 mtk_wed_rro_hw_init(dev);
1521 mtk_wed_route_qm_hw_init(dev);
1524 wed_clr(dev, MTK_WED_TX_BM_CTRL, MTK_WED_TX_BM_CTRL_PAUSE);
1525 if (!mtk_wed_is_v1(dev->hw))
1526 wed_clr(dev, MTK_WED_TX_TKID_CTRL, MTK_WED_TX_TKID_CTRL_PAUSE);
1550 mtk_wed_rx_reset(struct mtk_wed_device *dev)
1552 struct mtk_wed_wo *wo = dev->hw->wed_wo;
1562 if (dev->wlan.hw_rro) {
1563 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_IND_CMD_EN);
1564 mtk_wed_poll_busy(dev, MTK_WED_RRO_RX_HW_STS,
1566 mtk_wed_reset(dev, MTK_WED_RESET_RRO_RX_TO_PG);
1569 wed_clr(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, MTK_WED_WPDMA_RX_D_RX_DRV_EN);
1570 ret = mtk_wed_poll_busy(dev, MTK_WED_WPDMA_RX_D_GLO_CFG,
1572 if (!ret && mtk_wed_is_v3_or_greater(dev->hw))
1573 ret = mtk_wed_poll_busy(dev, MTK_WED_WPDMA_RX_D_PREF_CFG,
1576 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_INT_AGENT);
1577 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_RX_D_DRV);
1579 if (mtk_wed_is_v3_or_greater(dev->hw)) {
1581 wed_clr(dev, MTK_WED_WPDMA_RX_D_PREF_CFG,
1583 mtk_wed_poll_busy(dev, MTK_WED_WPDMA_RX_D_PREF_CFG,
1585 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX,
1589 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX,
1593 wed_set(dev, MTK_WED_WPDMA_RX_D_GLO_CFG,
1596 wed_clr(dev, MTK_WED_WPDMA_RX_D_GLO_CFG,
1600 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX, 0);
1604 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_RX_RRO_QM_EN);
1605 ret = mtk_wed_poll_busy(dev, MTK_WED_CTRL,
1608 mtk_wed_reset(dev, MTK_WED_RESET_RX_RRO_QM);
1610 wed_set(dev, MTK_WED_RROQM_RST_IDX,
1613 wed_w32(dev, MTK_WED_RROQM_RST_IDX, 0);
1616 if (dev->wlan.hw_rro) {
1618 wed_clr(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG,
1622 wed_clr(dev, MTK_WED_RRO_RX_D_CFG(2), MTK_WED_RRO_RX_D_DRV_EN);
1625 wed_w32(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG,
1627 mtk_wed_poll_busy(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG,
1631 wed_w32(dev, MTK_WED_RRO_RX_D_CFG(2),
1633 mtk_wed_poll_busy(dev, MTK_WED_RRO_RX_D_CFG(2),
1638 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_RX_ROUTE_QM_EN);
1639 ret = mtk_wed_poll_busy(dev, MTK_WED_CTRL,
1642 mtk_wed_reset(dev, MTK_WED_RESET_RX_ROUTE_QM);
1643 } else if (mtk_wed_is_v3_or_greater(dev->hw)) {
1644 wed_set(dev, MTK_WED_RTQM_RST, BIT(0));
1645 wed_clr(dev, MTK_WED_RTQM_RST, BIT(0));
1646 mtk_wed_reset(dev, MTK_WED_RESET_RX_ROUTE_QM);
1648 wed_set(dev, MTK_WED_RTQM_GLO_CFG, MTK_WED_RTQM_Q_RST);
1652 mtk_wdma_tx_reset(dev);
1655 wed_clr(dev, MTK_WED_WDMA_GLO_CFG, MTK_WED_WDMA_GLO_CFG_TX_DRV_EN);
1656 if (mtk_wed_is_v3_or_greater(dev->hw))
1657 mtk_wed_poll_busy(dev, MTK_WED_WPDMA_STATUS,
1660 mtk_wed_poll_busy(dev, MTK_WED_CTRL,
1662 mtk_wed_reset(dev, MTK_WED_RESET_WDMA_TX_DRV);
1665 ret = mtk_wed_poll_busy(dev, MTK_WED_GLO_CFG,
1667 wed_clr(dev, MTK_WED_GLO_CFG, MTK_WED_GLO_CFG_RX_DMA_EN);
1669 mtk_wed_reset(dev, MTK_WED_RESET_WED_RX_DMA);
1671 wed_set(dev, MTK_WED_RESET_IDX,
1672 dev->hw->soc->regmap.reset_idx_rx_mask);
1673 wed_w32(dev, MTK_WED_RESET_IDX, 0);
1677 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_BM_EN);
1678 mtk_wed_poll_busy(dev, MTK_WED_CTRL,
1680 mtk_wed_reset(dev, MTK_WED_RESET_RX_BM);
1682 if (dev->wlan.hw_rro) {
1683 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_PG_BM_EN);
1684 mtk_wed_poll_busy(dev, MTK_WED_CTRL,
1686 wed_set(dev, MTK_WED_RESET, MTK_WED_RESET_RX_PG_BM);
1687 wed_clr(dev, MTK_WED_RESET, MTK_WED_RESET_RX_PG_BM);
1699 for (i = 0; i < ARRAY_SIZE(dev->rx_ring); i++) {
1700 if (!dev->rx_ring[i].desc)
1703 mtk_wed_ring_reset(&dev->rx_ring[i], MTK_WED_RX_RING_SIZE,
1706 mtk_wed_free_rx_buffer(dev);
1707 mtk_wed_hwrro_free_buffer(dev);
1713 mtk_wed_reset_dma(struct mtk_wed_device *dev)
1719 for (i = 0; i < ARRAY_SIZE(dev->tx_ring); i++) {
1720 if (!dev->tx_ring[i].desc)
1723 mtk_wed_ring_reset(&dev->tx_ring[i], MTK_WED_TX_RING_SIZE,
1728 wed_clr(dev, MTK_WED_GLO_CFG, MTK_WED_GLO_CFG_TX_DMA_EN);
1729 busy = mtk_wed_poll_busy(dev, MTK_WED_GLO_CFG,
1732 mtk_wed_reset(dev, MTK_WED_RESET_WED_TX_DMA);
1734 wed_w32(dev, MTK_WED_RESET_IDX,
1735 dev->hw->soc->regmap.reset_idx_tx_mask);
1736 wed_w32(dev, MTK_WED_RESET_IDX, 0);
1740 busy = !!mtk_wdma_rx_reset(dev);
1741 if (mtk_wed_is_v3_or_greater(dev->hw)) {
1743 wed_r32(dev, MTK_WED_WDMA_GLO_CFG);
1745 wed_w32(dev, MTK_WED_WDMA_GLO_CFG, val);
1747 wed_clr(dev, MTK_WED_WDMA_GLO_CFG,
1752 busy = mtk_wed_poll_busy(dev, MTK_WED_WDMA_GLO_CFG,
1754 if (!busy && mtk_wed_is_v3_or_greater(dev->hw))
1755 busy = mtk_wed_poll_busy(dev, MTK_WED_WDMA_RX_PREF_CFG,
1759 mtk_wed_reset(dev, MTK_WED_RESET_WDMA_INT_AGENT);
1760 mtk_wed_reset(dev, MTK_WED_RESET_WDMA_RX_DRV);
1762 if (mtk_wed_is_v3_or_greater(dev->hw)) {
1764 wed_clr(dev, MTK_WED_WDMA_RX_PREF_CFG,
1766 mtk_wed_poll_busy(dev, MTK_WED_WDMA_RX_PREF_CFG,
1768 wed_clr(dev, MTK_WED_WDMA_RX_PREF_CFG,
1772 wed_w32(dev, MTK_WED_WDMA_RESET_IDX,
1776 wed_w32(dev, MTK_WED_WDMA_RESET_IDX,
1778 wed_w32(dev, MTK_WED_WDMA_RESET_IDX, 0);
1780 wed_set(dev, MTK_WED_WDMA_GLO_CFG,
1783 wed_clr(dev, MTK_WED_WDMA_GLO_CFG,
1788 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_TX_FREE_AGENT_EN);
1791 if (mtk_wed_is_v1(dev->hw))
1793 wed_r32(dev, MTK_WED_TX_BM_INTF));
1796 wed_r32(dev, MTK_WED_TX_TKID_INTF));
1801 mtk_wed_reset(dev, MTK_WED_RESET_TX_FREE_AGENT);
1802 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_TX_BM_EN);
1803 mtk_wed_reset(dev, MTK_WED_RESET_TX_BM);
1806 busy = mtk_wed_poll_busy(dev, MTK_WED_WPDMA_GLO_CFG,
1808 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG,
1812 busy = mtk_wed_poll_busy(dev, MTK_WED_WPDMA_GLO_CFG,
1816 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_INT_AGENT);
1817 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_TX_DRV);
1818 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_RX_DRV);
1819 if (mtk_wed_is_v3_or_greater(dev->hw))
1820 wed_w32(dev, MTK_WED_RX1_CTRL2, 0);
1822 wed_w32(dev, MTK_WED_WPDMA_RESET_IDX,
1825 wed_w32(dev, MTK_WED_WPDMA_RESET_IDX, 0);
1828 dev->init_done = false;
1829 if (mtk_wed_is_v1(dev->hw))
1833 wed_w32(dev, MTK_WED_RESET_IDX, MTK_WED_RESET_WPDMA_IDX_RX);
1834 wed_w32(dev, MTK_WED_RESET_IDX, 0);
1837 if (mtk_wed_is_v3_or_greater(dev->hw)) {
1839 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_TX_AMSDU_EN);
1840 mtk_wed_reset(dev, MTK_WED_RESET_TX_AMSDU);
1843 if (mtk_wed_get_rx_capa(dev))
1844 mtk_wed_rx_reset(dev);
1848 mtk_wed_ring_alloc(struct mtk_wed_device *dev, struct mtk_wed_ring *ring,
1851 ring->desc = dma_alloc_coherent(dev->hw->dev, size * desc_size,
1864 mtk_wed_wdma_rx_ring_setup(struct mtk_wed_device *dev, int idx, int size,
1869 if (idx >= ARRAY_SIZE(dev->rx_wdma))
1872 wdma = &dev->rx_wdma[idx];
1873 if (!reset && mtk_wed_ring_alloc(dev, wdma, MTK_WED_WDMA_RING_SIZE,
1874 dev->hw->soc->wdma_desc_size, true))
1877 wdma_w32(dev, MTK_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_BASE,
1879 wdma_w32(dev, MTK_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_COUNT,
1881 wdma_w32(dev, MTK_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_CPU_IDX, 0);
1883 wed_w32(dev, MTK_WED_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_BASE,
1885 wed_w32(dev, MTK_WED_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_COUNT,
1892 mtk_wed_wdma_tx_ring_setup(struct mtk_wed_device *dev, int idx, int size,
1897 if (idx >= ARRAY_SIZE(dev->tx_wdma))
1900 wdma = &dev->tx_wdma[idx];
1901 if (!reset && mtk_wed_ring_alloc(dev, wdma, MTK_WED_WDMA_RING_SIZE,
1902 dev->hw->soc->wdma_desc_size, true))
1905 if (mtk_wed_is_v3_or_greater(dev->hw)) {
1923 wdma_w32(dev, MTK_WDMA_RING_TX(idx) + MTK_WED_RING_OFS_BASE,
1925 wdma_w32(dev, MTK_WDMA_RING_TX(idx) + MTK_WED_RING_OFS_COUNT,
1927 wdma_w32(dev, MTK_WDMA_RING_TX(idx) + MTK_WED_RING_OFS_CPU_IDX, 0);
1928 wdma_w32(dev, MTK_WDMA_RING_TX(idx) + MTK_WED_RING_OFS_DMA_IDX, 0);
1934 wed_w32(dev, MTK_WED_WDMA_RING_TX + MTK_WED_RING_OFS_BASE,
1936 wed_w32(dev, MTK_WED_WDMA_RING_TX + MTK_WED_RING_OFS_COUNT,
1938 wed_w32(dev, MTK_WED_WDMA_RING_TX + MTK_WED_RING_OFS_CPU_IDX,
1940 wed_w32(dev, MTK_WED_WDMA_RING_TX + MTK_WED_RING_OFS_DMA_IDX,
1948 mtk_wed_ppe_check(struct mtk_wed_device *dev, struct sk_buff *skb,
1951 struct mtk_eth *eth = dev->hw->eth;
1963 mtk_ppe_check_skb(eth->ppe[dev->hw->index], skb, hash);
1967 mtk_wed_configure_irq(struct mtk_wed_device *dev, u32 irq_mask)
1972 wed_set(dev, MTK_WED_CTRL,
1978 if (mtk_wed_is_v1(dev->hw)) {
1979 wed_w32(dev, MTK_WED_PCIE_INT_TRIGGER,
1982 wed_w32(dev, MTK_WED_WPDMA_INT_TRIGGER,
1986 wed_clr(dev, MTK_WED_WDMA_INT_CTRL, wdma_mask);
1988 if (mtk_wed_is_v3_or_greater(dev->hw))
1989 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_TX_TKID_ALI_EN);
1992 wed_w32(dev, MTK_WED_WPDMA_INT_CTRL_TX,
1998 dev->wlan.tx_tbit[0]) |
2000 dev->wlan.tx_tbit[1]));
2003 wed_w32(dev, MTK_WED_WPDMA_INT_CTRL_TX_FREE,
2007 dev->wlan.txfree_tbit));
2009 if (mtk_wed_get_rx_capa(dev)) {
2010 wed_w32(dev, MTK_WED_WPDMA_INT_CTRL_RX,
2016 dev->wlan.rx_tbit[0]) |
2018 dev->wlan.rx_tbit[1]));
2024 wed_w32(dev, MTK_WED_WDMA_INT_CLR, wdma_mask);
2025 wed_set(dev, MTK_WED_WDMA_INT_CTRL,
2027 dev->wdma_idx));
2030 wed_w32(dev, MTK_WED_WDMA_INT_TRIGGER, wdma_mask);
2032 wdma_w32(dev, MTK_WDMA_INT_MASK, wdma_mask);
2033 wdma_w32(dev, MTK_WDMA_INT_GRP2, wdma_mask);
2034 wed_w32(dev, MTK_WED_WPDMA_INT_MASK, irq_mask);
2035 wed_w32(dev, MTK_WED_INT_MASK, irq_mask);
2040 mtk_wed_dma_enable(struct mtk_wed_device *dev)
2044 if (!mtk_wed_is_v3_or_greater(dev->hw)) {
2045 wed_set(dev, MTK_WED_WPDMA_INT_CTRL,
2047 wed_set(dev, MTK_WED_WPDMA_GLO_CFG,
2050 wdma_set(dev, MTK_WDMA_GLO_CFG,
2054 wed_set(dev, MTK_WED_WPDMA_CTRL, MTK_WED_WPDMA_CTRL_SDL1_FIXED);
2056 wed_set(dev, MTK_WED_WPDMA_GLO_CFG,
2060 wdma_set(dev, MTK_WDMA_GLO_CFG, MTK_WDMA_GLO_CFG_TX_DMA_EN);
2063 wed_set(dev, MTK_WED_GLO_CFG,
2067 wed_set(dev, MTK_WED_WDMA_GLO_CFG,
2070 if (mtk_wed_is_v1(dev->hw)) {
2071 wdma_set(dev, MTK_WDMA_GLO_CFG,
2076 wed_set(dev, MTK_WED_WPDMA_GLO_CFG,
2080 if (mtk_wed_is_v3_or_greater(dev->hw)) {
2081 wed_set(dev, MTK_WED_WDMA_RX_PREF_CFG,
2084 wed_clr(dev, MTK_WED_WDMA_RX_PREF_CFG,
2086 wed_set(dev, MTK_WED_WDMA_RX_PREF_CFG, MTK_WED_WDMA_RX_PREF_EN);
2088 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG,
2090 wed_set(dev, MTK_WED_WPDMA_GLO_CFG,
2095 wdma_set(dev, MTK_WDMA_PREF_RX_CFG, MTK_WDMA_PREF_RX_CFG_PREF_EN);
2096 wdma_set(dev, MTK_WDMA_WRBK_RX_CFG, MTK_WDMA_WRBK_RX_CFG_WRBK_EN);
2099 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG,
2103 if (!mtk_wed_get_rx_capa(dev))
2106 wed_set(dev, MTK_WED_WDMA_GLO_CFG,
2110 wed_clr(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, MTK_WED_WPDMA_RX_D_RXD_READ_LEN);
2111 wed_set(dev, MTK_WED_WPDMA_RX_D_GLO_CFG,
2116 if (mtk_wed_is_v3_or_greater(dev->hw)) {
2117 wed_set(dev, MTK_WED_WPDMA_RX_D_PREF_CFG,
2122 wed_set(dev, MTK_WED_RRO_RX_D_CFG(2), MTK_WED_RRO_RX_D_DRV_EN);
2123 wdma_set(dev, MTK_WDMA_PREF_TX_CFG, MTK_WDMA_PREF_TX_CFG_PREF_EN);
2124 wdma_set(dev, MTK_WDMA_WRBK_TX_CFG, MTK_WDMA_WRBK_TX_CFG_WRBK_EN);
2128 struct mtk_wed_ring *ring = &dev->rx_ring[i];
2134 if (mtk_wed_check_wfdma_rx_fill(dev, ring)) {
2135 dev_err(dev->hw->dev,
2140 val = wifi_r32(dev,
2141 dev->wlan.wpdma_rx_glo -
2142 dev->wlan.phy_base) | MTK_WFMDA_RX_DMA_EN;
2143 wifi_w32(dev,
2144 dev->wlan.wpdma_rx_glo - dev->wlan.phy_base,
2150 mtk_wed_start_hw_rro(struct mtk_wed_device *dev, u32 irq_mask, bool reset)
2154 wed_w32(dev, MTK_WED_WPDMA_INT_MASK, irq_mask);
2155 wed_w32(dev, MTK_WED_INT_MASK, irq_mask);
2157 if (!mtk_wed_get_rx_capa(dev) || !dev->wlan.hw_rro)
2161 wed_set(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG,
2166 wed_set(dev, MTK_WED_RRO_RX_D_CFG(2), MTK_WED_RRO_MSDU_PG_DRV_CLR);
2167 wed_w32(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG,
2170 wed_w32(dev, MTK_WED_WPDMA_INT_CTRL_RRO_RX,
2176 dev->wlan.rro_rx_tbit[0]) |
2178 dev->wlan.rro_rx_tbit[1]));
2180 wed_w32(dev, MTK_WED_WPDMA_INT_CTRL_RRO_MSDU_PG,
2188 dev->wlan.rx_pg_tbit[0]) |
2190 dev->wlan.rx_pg_tbit[1]) |
2192 dev->wlan.rx_pg_tbit[2]));
2197 wed_set(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG,
2201 struct mtk_wed_ring *ring = &dev->rx_rro_ring[i];
2206 if (mtk_wed_check_wfdma_rx_fill(dev, ring))
2207 dev_err(dev->hw->dev,
2212 struct mtk_wed_ring *ring = &dev->rx_page_ring[i];
2217 if (mtk_wed_check_wfdma_rx_fill(dev, ring))
2218 dev_err(dev->hw->dev,
2224 mtk_wed_rro_rx_ring_setup(struct mtk_wed_device *dev, int idx,
2227 struct mtk_wed_ring *ring = &dev->rx_rro_ring[idx];
2230 wed_w32(dev, MTK_WED_RRO_RX_D_RX(idx) + MTK_WED_RING_OFS_BASE,
2232 wed_w32(dev, MTK_WED_RRO_RX_D_RX(idx) + MTK_WED_RING_OFS_COUNT,
2238 mtk_wed_msdu_pg_rx_ring_setup(struct mtk_wed_device *dev, int idx, void __iomem *regs)
2240 struct mtk_wed_ring *ring = &dev->rx_page_ring[idx];
2243 wed_w32(dev, MTK_WED_RRO_MSDU_PG_CTRL0(idx) + MTK_WED_RING_OFS_BASE,
2245 wed_w32(dev, MTK_WED_RRO_MSDU_PG_CTRL0(idx) + MTK_WED_RING_OFS_COUNT,
2251 mtk_wed_ind_rx_ring_setup(struct mtk_wed_device *dev, void __iomem *regs)
2253 struct mtk_wed_ring *ring = &dev->ind_cmd_ring;
2258 wed_w32(dev, MTK_WED_IND_CMD_RX_CTRL1 + MTK_WED_RING_OFS_BASE,
2261 wed_w32(dev, MTK_WED_IND_CMD_RX_CTRL1 + MTK_WED_RING_OFS_COUNT,
2265 wed_w32(dev, MTK_WED_RRO_CFG0, dev->wlan.phy_base +
2266 dev->wlan.ind_cmd.ack_sn_addr);
2267 wed_w32(dev, MTK_WED_RRO_CFG1,
2269 dev->wlan.ind_cmd.win_size) |
2271 dev->wlan.ind_cmd.particular_sid));
2274 wed_w32(dev, MTK_WED_ADDR_ELEM_CFG0,
2275 dev->wlan.ind_cmd.particular_se_phys);
2277 for (i = 0; i < dev->wlan.ind_cmd.se_group_nums; i++) {
2278 wed_w32(dev, MTK_WED_RADDR_ELEM_TBL_WDATA,
2279 dev->wlan.ind_cmd.addr_elem_phys[i] >> 4);
2280 wed_w32(dev, MTK_WED_ADDR_ELEM_TBL_CFG,
2283 val = wed_r32(dev, MTK_WED_ADDR_ELEM_TBL_CFG);
2285 val = wed_r32(dev, MTK_WED_ADDR_ELEM_TBL_CFG);
2287 dev_err(dev->hw->dev,
2292 for (i = 0; i < dev->wlan.ind_cmd.particular_sid; i++) {
2293 wed_w32(dev, MTK_WED_PN_CHECK_WDATA_M,
2296 wed_w32(dev, MTK_WED_PN_CHECK_CFG, MTK_WED_PN_CHECK_WR |
2300 val = wed_r32(dev, MTK_WED_PN_CHECK_CFG);
2302 val = wed_r32(dev, MTK_WED_PN_CHECK_CFG);
2304 dev_err(dev->hw->dev,
2308 wed_w32(dev, MTK_WED_RX_IND_CMD_CNT0, MTK_WED_RX_IND_CMD_DBG_CNT_EN);
2309 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_IND_CMD_EN);
2315 mtk_wed_start(struct mtk_wed_device *dev, u32 irq_mask)
2319 if (mtk_wed_get_rx_capa(dev) && mtk_wed_rx_buffer_alloc(dev))
2322 for (i = 0; i < ARRAY_SIZE(dev->rx_wdma); i++)
2323 if (!dev->rx_wdma[i].desc)
2324 mtk_wed_wdma_rx_ring_setup(dev, i, 16, false);
2326 mtk_wed_hw_init(dev);
2327 mtk_wed_configure_irq(dev, irq_mask);
2329 mtk_wed_set_ext_int(dev, true);
2331 if (mtk_wed_is_v1(dev->hw)) {
2332 u32 val = dev->wlan.wpdma_phys | MTK_PCIE_MIRROR_MAP_EN |
2334 dev->hw->index);
2336 val |= BIT(0) | (BIT(1) * !!dev->hw->index);
2337 regmap_write(dev->hw->mirror, dev->hw->index * 4, val);
2338 } else if (mtk_wed_get_rx_capa(dev)) {
2340 wed_w32(dev, MTK_WED_EXT_INT_MASK1,
2342 wed_w32(dev, MTK_WED_EXT_INT_MASK2,
2345 wed_r32(dev, MTK_WED_EXT_INT_MASK1);
2346 wed_r32(dev, MTK_WED_EXT_INT_MASK2);
2348 if (mtk_wed_is_v3_or_greater(dev->hw)) {
2349 wed_w32(dev, MTK_WED_EXT_INT_MASK3,
2351 wed_r32(dev, MTK_WED_EXT_INT_MASK3);
2354 if (mtk_wed_rro_cfg(dev))
2358 mtk_wed_set_512_support(dev, dev->wlan.wcid_512);
2359 mtk_wed_amsdu_init(dev);
2361 mtk_wed_dma_enable(dev);
2362 dev->running = true;
2366 mtk_wed_attach(struct mtk_wed_device *dev)
2376 if ((dev->wlan.bus_type == MTK_WED_BUS_PCIE &&
2377 pci_domain_nr(dev->wlan.pci_dev->bus) > 1) ||
2388 hw = mtk_wed_assign(dev);
2395 device = dev->wlan.bus_type == MTK_WED_BUS_PCIE
2396 ? &dev->wlan.pci_dev->dev
2397 : &dev->wlan.platform_dev->dev;
2401 dev->hw = hw;
2402 dev->dev = hw->dev;
2403 dev->irq = hw->irq;
2404 dev->wdma_idx = hw->index;
2405 dev->version = hw->version;
2406 dev->hw->pcie_base = mtk_wed_get_pcie_base(dev);
2408 if (hw->eth->dma_dev == hw->eth->dev &&
2409 of_dma_is_coherent(hw->eth->dev->of_node))
2410 mtk_eth_set_dma_device(hw->eth, hw->dev);
2412 ret = mtk_wed_tx_buffer_alloc(dev);
2416 ret = mtk_wed_amsdu_buffer_alloc(dev);
2420 if (mtk_wed_get_rx_capa(dev)) {
2421 ret = mtk_wed_rro_alloc(dev);
2426 mtk_wed_hw_init_early(dev);
2431 dev->rev_id = wed_r32(dev, MTK_WED_REV_ID);
2433 if (mtk_wed_get_rx_capa(dev))
2437 dev_err(dev->hw->dev, "failed to attach wed device\n");
2438 __mtk_wed_detach(dev);
2447 mtk_wed_tx_ring_setup(struct mtk_wed_device *dev, int idx, void __iomem *regs,
2450 struct mtk_wed_ring *ring = &dev->tx_ring[idx];
2464 if (WARN_ON(idx >= ARRAY_SIZE(dev->tx_ring)))
2467 if (!reset && mtk_wed_ring_alloc(dev, ring, MTK_WED_TX_RING_SIZE,
2471 if (mtk_wed_wdma_rx_ring_setup(dev, idx, MTK_WED_WDMA_RING_SIZE,
2478 if (mtk_wed_is_v3_or_greater(dev->hw) && idx == 1) {
2480 wed_set(dev, MTK_WED_WDMA_RX_PREF_CFG,
2484 wed_clr(dev, MTK_WED_WDMA_RX_PREF_CFG,
2489 wed_w32(dev, MTK_WED_WDMA_RX_PREF_FIFO_CFG,
2492 wed_w32(dev, MTK_WED_WDMA_RX_PREF_FIFO_CFG, 0);
2496 wpdma_tx_w32(dev, idx, MTK_WED_RING_OFS_BASE, ring->desc_phys);
2497 wpdma_tx_w32(dev, idx, MTK_WED_RING_OFS_COUNT, MTK_WED_TX_RING_SIZE);
2498 wpdma_tx_w32(dev, idx, MTK_WED_RING_OFS_CPU_IDX, 0);
2500 wed_w32(dev, MTK_WED_WPDMA_RING_TX(idx) + MTK_WED_RING_OFS_BASE,
2502 wed_w32(dev, MTK_WED_WPDMA_RING_TX(idx) + MTK_WED_RING_OFS_COUNT,
2504 wed_w32(dev, MTK_WED_WPDMA_RING_TX(idx) + MTK_WED_RING_OFS_CPU_IDX, 0);
2510 mtk_wed_txfree_ring_setup(struct mtk_wed_device *dev, void __iomem *regs)
2512 struct mtk_wed_ring *ring = &dev->txfree_ring;
2513 int i, index = mtk_wed_is_v1(dev->hw);
2526 wed_w32(dev, MTK_WED_RING_RX(index) + i, val);
2527 wed_w32(dev, MTK_WED_WPDMA_RING_RX(index) + i, val);
2534 mtk_wed_rx_ring_setup(struct mtk_wed_device *dev, int idx, void __iomem *regs,
2537 struct mtk_wed_ring *ring = &dev->rx_ring[idx];
2539 if (WARN_ON(idx >= ARRAY_SIZE(dev->rx_ring)))
2542 if (!reset && mtk_wed_ring_alloc(dev, ring, MTK_WED_RX_RING_SIZE,
2546 if (mtk_wed_wdma_tx_ring_setup(dev, idx, MTK_WED_WDMA_RING_SIZE,
2555 wpdma_rx_w32(dev, idx, MTK_WED_RING_OFS_BASE, ring->desc_phys);
2556 wpdma_rx_w32(dev, idx, MTK_WED_RING_OFS_COUNT, MTK_WED_RX_RING_SIZE);
2558 wed_w32(dev, MTK_WED_WPDMA_RING_RX_DATA(idx) + MTK_WED_RING_OFS_BASE,
2560 wed_w32(dev, MTK_WED_WPDMA_RING_RX_DATA(idx) + MTK_WED_RING_OFS_COUNT,
2567 mtk_wed_irq_get(struct mtk_wed_device *dev, u32 mask)
2571 if (mtk_wed_is_v3_or_greater(dev->hw))
2577 val = wed_r32(dev, MTK_WED_EXT_INT_STATUS);
2578 wed_w32(dev, MTK_WED_EXT_INT_STATUS, val);
2580 if (!dev->hw->num_flows)
2583 pr_err("mtk_wed%d: error status=%08x\n", dev->hw->index, val);
2585 val = wed_r32(dev, MTK_WED_INT_STATUS);
2587 wed_w32(dev, MTK_WED_INT_STATUS, val); /* ACK */
2593 mtk_wed_irq_set_mask(struct mtk_wed_device *dev, u32 mask)
2595 mtk_wed_set_ext_int(dev, !!mask);
2596 wed_w32(dev, MTK_WED_INT_MASK, mask);
2659 if (!priv || !tc_can_offload(priv->dev))
2670 mtk_wed_setup_tc_block(struct mtk_wed_hw *hw, struct net_device *dev,
2690 block_cb = flow_block_cb_lookup(f->block, cb, dev);
2701 priv->dev = dev;
2702 block_cb = flow_block_cb_alloc(cb, dev, priv, NULL);
2713 block_cb = flow_block_cb_lookup(f->block, cb, dev);
2730 mtk_wed_setup_tc(struct mtk_wed_device *wed, struct net_device *dev,
2741 return mtk_wed_setup_tc_block(hw, dev, type_data);
2772 struct device_node *eth_np = eth->dev->of_node;
2807 hw->dev = &pdev->dev;
2851 put_device(&pdev->dev);
2873 put_device(hw->dev);