Lines Matching +full:dma +full:- +full:info

1 // SPDX-License-Identifier: GPL-2.0 OR BSD-3-Clause
40 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_dma_recalc() local
43 rp = bd_ring->rp; in rtw89_pci_dma_recalc()
44 wp = bd_ring->wp; in rtw89_pci_dma_recalc()
45 len = bd_ring->len; in rtw89_pci_dma_recalc()
49 cnt = cur_rp >= rp ? cur_rp - rp : len - (rp - cur_rp); in rtw89_pci_dma_recalc()
51 if (info->rx_ring_eq_is_full) in rtw89_pci_dma_recalc()
54 cnt = cur_rp >= wp ? cur_rp - wp : len - (wp - cur_rp); in rtw89_pci_dma_recalc()
57 bd_ring->rp = cur_rp; in rtw89_pci_dma_recalc()
65 struct rtw89_pci_dma_ring *bd_ring = &tx_ring->bd_ring; in rtw89_pci_txbd_recalc()
66 u32 addr_idx = bd_ring->addr.idx; in rtw89_pci_txbd_recalc()
83 while (cnt--) { in rtw89_pci_release_fwcmd()
84 skb = skb_dequeue(&rtwpci->h2c_queue); in rtw89_pci_release_fwcmd()
86 rtw89_err(rtwdev, "failed to pre-release fwcmd\n"); in rtw89_pci_release_fwcmd()
89 skb_queue_tail(&rtwpci->h2c_release_queue, skb); in rtw89_pci_release_fwcmd()
92 qlen = skb_queue_len(&rtwpci->h2c_release_queue); in rtw89_pci_release_fwcmd()
94 qlen = qlen > RTW89_PCI_MULTITAG ? qlen - RTW89_PCI_MULTITAG : 0; in rtw89_pci_release_fwcmd()
96 while (qlen--) { in rtw89_pci_release_fwcmd()
97 skb = skb_dequeue(&rtwpci->h2c_release_queue); in rtw89_pci_release_fwcmd()
103 dma_unmap_single(&rtwpci->pdev->dev, tx_data->dma, skb->len, in rtw89_pci_release_fwcmd()
112 struct rtw89_pci_tx_ring *tx_ring = &rtwpci->tx_rings[RTW89_TXCH_CH12]; in rtw89_pci_reclaim_tx_fwcmd()
124 struct rtw89_pci_dma_ring *bd_ring = &rx_ring->bd_ring; in rtw89_pci_rxbd_recalc()
125 u32 addr_idx = bd_ring->addr.idx; in rtw89_pci_rxbd_recalc()
138 dma_addr_t dma; in rtw89_pci_sync_skb_for_cpu() local
141 dma = rx_info->dma; in rtw89_pci_sync_skb_for_cpu()
142 dma_sync_single_for_cpu(rtwdev->dev, dma, RTW89_PCI_RX_BUF_SIZE, in rtw89_pci_sync_skb_for_cpu()
150 dma_addr_t dma; in rtw89_pci_sync_skb_for_device() local
153 dma = rx_info->dma; in rtw89_pci_sync_skb_for_device()
154 dma_sync_single_for_device(rtwdev->dev, dma, RTW89_PCI_RX_BUF_SIZE, in rtw89_pci_sync_skb_for_device()
164 rxbd_info = (struct rtw89_pci_rxbd_info *)skb->data; in rtw89_pci_rxbd_info_update()
165 rx_info->fs = le32_get_bits(rxbd_info->dword, RTW89_PCI_RXBD_FS); in rtw89_pci_rxbd_info_update()
166 rx_info->ls = le32_get_bits(rxbd_info->dword, RTW89_PCI_RXBD_LS); in rtw89_pci_rxbd_info_update()
167 rx_info->len = le32_get_bits(rxbd_info->dword, RTW89_PCI_RXBD_WRITE_SIZE); in rtw89_pci_rxbd_info_update()
168 rx_info->tag = le32_get_bits(rxbd_info->dword, RTW89_PCI_RXBD_TAG); in rtw89_pci_rxbd_info_update()
175 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_ctrl_txdma_ch_pcie() local
176 const struct rtw89_reg_def *dma_stop1 = &info->dma_stop1; in rtw89_pci_ctrl_txdma_ch_pcie()
177 const struct rtw89_reg_def *dma_stop2 = &info->dma_stop2; in rtw89_pci_ctrl_txdma_ch_pcie()
180 rtw89_write32_clr(rtwdev, dma_stop1->addr, dma_stop1->mask); in rtw89_pci_ctrl_txdma_ch_pcie()
181 if (dma_stop2->addr) in rtw89_pci_ctrl_txdma_ch_pcie()
182 rtw89_write32_clr(rtwdev, dma_stop2->addr, dma_stop2->mask); in rtw89_pci_ctrl_txdma_ch_pcie()
184 rtw89_write32_set(rtwdev, dma_stop1->addr, dma_stop1->mask); in rtw89_pci_ctrl_txdma_ch_pcie()
185 if (dma_stop2->addr) in rtw89_pci_ctrl_txdma_ch_pcie()
186 rtw89_write32_set(rtwdev, dma_stop2->addr, dma_stop2->mask); in rtw89_pci_ctrl_txdma_ch_pcie()
192 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_ctrl_txdma_fw_ch_pcie() local
193 const struct rtw89_reg_def *dma_stop1 = &info->dma_stop1; in rtw89_pci_ctrl_txdma_fw_ch_pcie()
196 rtw89_write32_clr(rtwdev, dma_stop1->addr, B_AX_STOP_CH12); in rtw89_pci_ctrl_txdma_fw_ch_pcie()
198 rtw89_write32_set(rtwdev, dma_stop1->addr, B_AX_STOP_CH12); in rtw89_pci_ctrl_txdma_fw_ch_pcie()
208 u32 copy_len = rx_info->len - offset; in rtw89_skb_put_rx_data()
213 rx_info->len, desc_info->pkt_size, offset, fs, ls); in rtw89_skb_put_rx_data()
215 skb->data, rx_info->len); in rtw89_skb_put_rx_data()
216 /* length of a single segment skb is desc_info->pkt_size */ in rtw89_skb_put_rx_data()
218 copy_len = desc_info->pkt_size; in rtw89_skb_put_rx_data()
225 skb_put_data(new, skb->data + offset, copy_len); in rtw89_skb_put_rx_data()
233 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_get_rx_skb_idx() local
234 u32 wp = bd_ring->wp; in rtw89_pci_get_rx_skb_idx()
236 if (!info->rx_ring_eq_is_full) in rtw89_pci_get_rx_skb_idx()
239 if (++wp >= bd_ring->len) in rtw89_pci_get_rx_skb_idx()
248 struct rtw89_pci_dma_ring *bd_ring = &rx_ring->bd_ring; in rtw89_pci_rxbd_deliver_skbs()
250 struct rtw89_rx_desc_info *desc_info = &rx_ring->diliver_desc; in rtw89_pci_rxbd_deliver_skbs()
251 struct sk_buff *new = rx_ring->diliver_skb; in rtw89_pci_rxbd_deliver_skbs()
261 skb = rx_ring->buf[skb_idx]; in rtw89_pci_rxbd_deliver_skbs()
266 rtw89_err(rtwdev, "failed to update %d RXBD info: %d\n", in rtw89_pci_rxbd_deliver_skbs()
267 bd_ring->wp, ret); in rtw89_pci_rxbd_deliver_skbs()
272 fs = rx_info->fs; in rtw89_pci_rxbd_deliver_skbs()
273 ls = rx_info->ls; in rtw89_pci_rxbd_deliver_skbs()
281 if (desc_info->ready) { in rtw89_pci_rxbd_deliver_skbs()
282 rtw89_warn(rtwdev, "desc info should not be ready before first segment start\n"); in rtw89_pci_rxbd_deliver_skbs()
286 rtw89_chip_query_rxdesc(rtwdev, desc_info, skb->data, rxinfo_size); in rtw89_pci_rxbd_deliver_skbs()
288 new = rtw89_alloc_skb_for_rx(rtwdev, desc_info->pkt_size); in rtw89_pci_rxbd_deliver_skbs()
292 rx_ring->diliver_skb = new; in rtw89_pci_rxbd_deliver_skbs()
295 offset = desc_info->offset + desc_info->rxd_len; in rtw89_pci_rxbd_deliver_skbs()
308 if (!desc_info->ready) { in rtw89_pci_rxbd_deliver_skbs()
314 rx_ring->diliver_skb = NULL; in rtw89_pci_rxbd_deliver_skbs()
315 desc_info->ready = false; in rtw89_pci_rxbd_deliver_skbs()
326 rx_ring->diliver_skb = NULL; in rtw89_pci_rxbd_deliver_skbs()
327 desc_info->ready = false; in rtw89_pci_rxbd_deliver_skbs()
336 struct rtw89_pci_dma_ring *bd_ring = &rx_ring->bd_ring; in rtw89_pci_rxbd_deliver()
339 while (cnt && rtwdev->napi_budget_countdown > 0) { in rtw89_pci_rxbd_deliver()
349 cnt -= rx_cnt; in rtw89_pci_rxbd_deliver()
352 rtw89_write16(rtwdev, bd_ring->addr.idx, bd_ring->wp); in rtw89_pci_rxbd_deliver()
359 int countdown = rtwdev->napi_budget_countdown; in rtw89_pci_poll_rxq_dma()
362 rx_ring = &rtwpci->rx_rings[RTW89_RXCH_RXQ]; in rtw89_pci_poll_rxq_dma()
373 if (rtwdev->napi_budget_countdown <= 0) in rtw89_pci_poll_rxq_dma()
376 return budget - countdown; in rtw89_pci_poll_rxq_dma()
384 struct ieee80211_tx_info *info; in rtw89_pci_tx_status() local
388 info = IEEE80211_SKB_CB(skb); in rtw89_pci_tx_status()
389 ieee80211_tx_info_clear_status(info); in rtw89_pci_tx_status()
391 if (info->flags & IEEE80211_TX_CTL_NO_ACK) in rtw89_pci_tx_status()
392 info->flags |= IEEE80211_TX_STAT_NOACK_TRANSMITTED; in rtw89_pci_tx_status()
394 info->flags |= IEEE80211_TX_STAT_ACK; in rtw89_pci_tx_status()
395 tx_ring->tx_acked++; in rtw89_pci_tx_status()
397 if (info->flags & IEEE80211_TX_CTL_REQ_TX_STATUS) in rtw89_pci_tx_status()
402 tx_ring->tx_retry_lmt++; in rtw89_pci_tx_status()
405 tx_ring->tx_life_time++; in rtw89_pci_tx_status()
408 tx_ring->tx_mac_id_drop++; in rtw89_pci_tx_status()
416 ieee80211_tx_status_ni(rtwdev->hw, skb); in rtw89_pci_tx_status()
425 while (cnt--) { in rtw89_pci_reclaim_txbd()
426 txwd = list_first_entry_or_null(&tx_ring->busy_pages, struct rtw89_pci_tx_wd, list); in rtw89_pci_reclaim_txbd()
432 list_del_init(&txwd->list); in rtw89_pci_reclaim_txbd()
435 if (skb_queue_len(&txwd->queue) == 0) in rtw89_pci_reclaim_txbd()
443 struct rtw89_pci_tx_wd_ring *wd_ring = &tx_ring->wd_ring; in rtw89_pci_release_busy_txwd()
447 for (i = 0; i < wd_ring->page_num; i++) { in rtw89_pci_release_busy_txwd()
448 txwd = list_first_entry_or_null(&tx_ring->busy_pages, struct rtw89_pci_tx_wd, list); in rtw89_pci_release_busy_txwd()
452 list_del_init(&txwd->list); in rtw89_pci_release_busy_txwd()
461 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_release_txwd_skb()
464 u8 txch = tx_ring->txch; in rtw89_pci_release_txwd_skb()
466 if (!list_empty(&txwd->list)) { in rtw89_pci_release_txwd_skb()
471 if (!rtwpci->low_power && !list_empty(&txwd->list)) in rtw89_pci_release_txwd_skb()
476 skb_queue_walk_safe(&txwd->queue, skb, tmp) { in rtw89_pci_release_txwd_skb()
477 skb_unlink(skb, &txwd->queue); in rtw89_pci_release_txwd_skb()
480 dma_unmap_single(&rtwpci->pdev->dev, tx_data->dma, skb->len, in rtw89_pci_release_txwd_skb()
486 if (list_empty(&txwd->list)) in rtw89_pci_release_txwd_skb()
493 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_release_rpp()
500 seq = le32_get_bits(rpp->dword, RTW89_PCI_RPP_SEQ); in rtw89_pci_release_rpp()
501 qsel = le32_get_bits(rpp->dword, RTW89_PCI_RPP_QSEL); in rtw89_pci_release_rpp()
502 tx_status = le32_get_bits(rpp->dword, RTW89_PCI_RPP_TX_STATUS); in rtw89_pci_release_rpp()
510 tx_ring = &rtwpci->tx_rings[txch]; in rtw89_pci_release_rpp()
511 wd_ring = &tx_ring->wd_ring; in rtw89_pci_release_rpp()
512 txwd = &wd_ring->pages[seq]; in rtw89_pci_release_rpp()
520 struct rtw89_pci_tx_wd_ring *wd_ring = &tx_ring->wd_ring; in rtw89_pci_release_pending_txwd_skb()
524 for (i = 0; i < wd_ring->page_num; i++) { in rtw89_pci_release_pending_txwd_skb()
525 txwd = &wd_ring->pages[i]; in rtw89_pci_release_pending_txwd_skb()
527 if (!list_empty(&txwd->list)) in rtw89_pci_release_pending_txwd_skb()
538 struct rtw89_pci_dma_ring *bd_ring = &rx_ring->bd_ring; in rtw89_pci_release_tx_skbs()
551 skb = rx_ring->buf[skb_idx]; in rtw89_pci_release_tx_skbs()
556 rtw89_err(rtwdev, "failed to update %d RXBD info: %d\n", in rtw89_pci_release_tx_skbs()
557 bd_ring->wp, ret); in rtw89_pci_release_tx_skbs()
562 if (!rx_info->fs || !rx_info->ls) { in rtw89_pci_release_tx_skbs()
567 rtw89_chip_query_rxdesc(rtwdev, &desc_info, skb->data, rxinfo_size); in rtw89_pci_release_tx_skbs()
571 for (; offset + rpp_size <= rx_info->len; offset += rpp_size) { in rtw89_pci_release_tx_skbs()
572 rpp = (struct rtw89_pci_rpp_fmt *)(skb->data + offset); in rtw89_pci_release_tx_skbs()
591 struct rtw89_pci_dma_ring *bd_ring = &rx_ring->bd_ring; in rtw89_pci_release_tx()
604 cnt -= release_cnt; in rtw89_pci_release_tx()
607 rtw89_write16(rtwdev, bd_ring->addr.idx, bd_ring->wp); in rtw89_pci_release_tx()
617 rx_ring = &rtwpci->rx_rings[RTW89_RXCH_RPQ]; in rtw89_pci_poll_rpq_dma()
619 spin_lock_bh(&rtwpci->trx_lock); in rtw89_pci_poll_rpq_dma()
628 spin_unlock_bh(&rtwpci->trx_lock); in rtw89_pci_poll_rpq_dma()
632 rtwdev->napi_budget_countdown -= work_done; in rtw89_pci_poll_rpq_dma()
647 rx_ring = &rtwpci->rx_rings[i]; in rtw89_pci_isr_rxd_unavail()
648 bd_ring = &rx_ring->bd_ring; in rtw89_pci_isr_rxd_unavail()
650 reg_idx = rtw89_read32(rtwdev, bd_ring->addr.idx); in rtw89_pci_isr_rxd_unavail()
653 hw_idx_next = (hw_idx + 1) % bd_ring->len; in rtw89_pci_isr_rxd_unavail()
660 i, reg_idx, bd_ring->len); in rtw89_pci_isr_rxd_unavail()
668 isrs->halt_c2h_isrs = rtw89_read32(rtwdev, R_AX_HISR0) & rtwpci->halt_c2h_intrs; in rtw89_pci_recognize_intrs()
669 isrs->isrs[0] = rtw89_read32(rtwdev, R_AX_PCIE_HISR00) & rtwpci->intrs[0]; in rtw89_pci_recognize_intrs()
670 isrs->isrs[1] = rtw89_read32(rtwdev, R_AX_PCIE_HISR10) & rtwpci->intrs[1]; in rtw89_pci_recognize_intrs()
672 rtw89_write32(rtwdev, R_AX_HISR0, isrs->halt_c2h_isrs); in rtw89_pci_recognize_intrs()
673 rtw89_write32(rtwdev, R_AX_PCIE_HISR00, isrs->isrs[0]); in rtw89_pci_recognize_intrs()
674 rtw89_write32(rtwdev, R_AX_PCIE_HISR10, isrs->isrs[1]); in rtw89_pci_recognize_intrs()
682 isrs->ind_isrs = rtw89_read32(rtwdev, R_AX_PCIE_HISR00_V1) & rtwpci->ind_intrs; in rtw89_pci_recognize_intrs_v1()
683 isrs->halt_c2h_isrs = isrs->ind_isrs & B_AX_HS0ISR_IND_INT_EN ? in rtw89_pci_recognize_intrs_v1()
684 rtw89_read32(rtwdev, R_AX_HISR0) & rtwpci->halt_c2h_intrs : 0; in rtw89_pci_recognize_intrs_v1()
685 isrs->isrs[0] = isrs->ind_isrs & B_AX_HCI_AXIDMA_INT_EN ? in rtw89_pci_recognize_intrs_v1()
686 rtw89_read32(rtwdev, R_AX_HAXI_HISR00) & rtwpci->intrs[0] : 0; in rtw89_pci_recognize_intrs_v1()
687 isrs->isrs[1] = isrs->ind_isrs & B_AX_HS1ISR_IND_INT_EN ? in rtw89_pci_recognize_intrs_v1()
688 rtw89_read32(rtwdev, R_AX_HISR1) & rtwpci->intrs[1] : 0; in rtw89_pci_recognize_intrs_v1()
690 if (isrs->halt_c2h_isrs) in rtw89_pci_recognize_intrs_v1()
691 rtw89_write32(rtwdev, R_AX_HISR0, isrs->halt_c2h_isrs); in rtw89_pci_recognize_intrs_v1()
692 if (isrs->isrs[0]) in rtw89_pci_recognize_intrs_v1()
693 rtw89_write32(rtwdev, R_AX_HAXI_HISR00, isrs->isrs[0]); in rtw89_pci_recognize_intrs_v1()
694 if (isrs->isrs[1]) in rtw89_pci_recognize_intrs_v1()
695 rtw89_write32(rtwdev, R_AX_HISR1, isrs->isrs[1]); in rtw89_pci_recognize_intrs_v1()
703 isrs->ind_isrs = rtw89_read32(rtwdev, R_BE_PCIE_HISR) & rtwpci->ind_intrs; in rtw89_pci_recognize_intrs_v2()
704 isrs->halt_c2h_isrs = isrs->ind_isrs & B_BE_HS0ISR_IND_INT ? in rtw89_pci_recognize_intrs_v2()
705 rtw89_read32(rtwdev, R_BE_HISR0) & rtwpci->halt_c2h_intrs : 0; in rtw89_pci_recognize_intrs_v2()
706 isrs->isrs[0] = isrs->ind_isrs & B_BE_HCI_AXIDMA_INT ? in rtw89_pci_recognize_intrs_v2()
707 rtw89_read32(rtwdev, R_BE_HAXI_HISR00) & rtwpci->intrs[0] : 0; in rtw89_pci_recognize_intrs_v2()
708 isrs->isrs[1] = rtw89_read32(rtwdev, R_BE_PCIE_DMA_ISR); in rtw89_pci_recognize_intrs_v2()
710 if (isrs->halt_c2h_isrs) in rtw89_pci_recognize_intrs_v2()
711 rtw89_write32(rtwdev, R_BE_HISR0, isrs->halt_c2h_isrs); in rtw89_pci_recognize_intrs_v2()
712 if (isrs->isrs[0]) in rtw89_pci_recognize_intrs_v2()
713 rtw89_write32(rtwdev, R_BE_HAXI_HISR00, isrs->isrs[0]); in rtw89_pci_recognize_intrs_v2()
714 if (isrs->isrs[1]) in rtw89_pci_recognize_intrs_v2()
715 rtw89_write32(rtwdev, R_BE_PCIE_DMA_ISR, isrs->isrs[1]); in rtw89_pci_recognize_intrs_v2()
716 rtw89_write32(rtwdev, R_BE_PCIE_HISR, isrs->ind_isrs); in rtw89_pci_recognize_intrs_v2()
722 rtw89_write32(rtwdev, R_AX_HIMR0, rtwpci->halt_c2h_intrs); in rtw89_pci_enable_intr()
723 rtw89_write32(rtwdev, R_AX_PCIE_HIMR00, rtwpci->intrs[0]); in rtw89_pci_enable_intr()
724 rtw89_write32(rtwdev, R_AX_PCIE_HIMR10, rtwpci->intrs[1]); in rtw89_pci_enable_intr()
738 rtw89_write32(rtwdev, R_AX_PCIE_HIMR00_V1, rtwpci->ind_intrs); in rtw89_pci_enable_intr_v1()
739 rtw89_write32(rtwdev, R_AX_HIMR0, rtwpci->halt_c2h_intrs); in rtw89_pci_enable_intr_v1()
740 rtw89_write32(rtwdev, R_AX_HAXI_HIMR00, rtwpci->intrs[0]); in rtw89_pci_enable_intr_v1()
741 rtw89_write32(rtwdev, R_AX_HIMR1, rtwpci->intrs[1]); in rtw89_pci_enable_intr_v1()
753 rtw89_write32(rtwdev, R_BE_HIMR0, rtwpci->halt_c2h_intrs); in rtw89_pci_enable_intr_v2()
754 rtw89_write32(rtwdev, R_BE_HAXI_HIMR00, rtwpci->intrs[0]); in rtw89_pci_enable_intr_v2()
755 rtw89_write32(rtwdev, R_BE_PCIE_DMA_IMR_0_V1, rtwpci->intrs[1]); in rtw89_pci_enable_intr_v2()
756 rtw89_write32(rtwdev, R_BE_PCIE_HIMR0, rtwpci->ind_intrs); in rtw89_pci_enable_intr_v2()
769 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_ops_recovery_start()
772 spin_lock_irqsave(&rtwpci->irq_lock, flags); in rtw89_pci_ops_recovery_start()
776 spin_unlock_irqrestore(&rtwpci->irq_lock, flags); in rtw89_pci_ops_recovery_start()
781 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_ops_recovery_complete()
784 spin_lock_irqsave(&rtwpci->irq_lock, flags); in rtw89_pci_ops_recovery_complete()
788 spin_unlock_irqrestore(&rtwpci->irq_lock, flags); in rtw89_pci_ops_recovery_complete()
793 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_low_power_interrupt_handler()
797 rtwdev->napi_budget_countdown = budget; in rtw89_pci_low_power_interrupt_handler()
806 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_interrupt_threadfn()
807 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_interrupt_threadfn() local
808 const struct rtw89_pci_gen_def *gen_def = info->gen_def; in rtw89_pci_interrupt_threadfn()
812 spin_lock_irqsave(&rtwpci->irq_lock, flags); in rtw89_pci_interrupt_threadfn()
814 spin_unlock_irqrestore(&rtwpci->irq_lock, flags); in rtw89_pci_interrupt_threadfn()
816 if (unlikely(isrs.isrs[0] & gen_def->isr_rdu)) in rtw89_pci_interrupt_threadfn()
819 if (unlikely(isrs.halt_c2h_isrs & gen_def->isr_halt_c2h)) in rtw89_pci_interrupt_threadfn()
822 if (unlikely(isrs.halt_c2h_isrs & gen_def->isr_wdt_timeout)) in rtw89_pci_interrupt_threadfn()
825 if (unlikely(rtwpci->under_recovery)) in rtw89_pci_interrupt_threadfn()
828 if (unlikely(rtwpci->low_power)) { in rtw89_pci_interrupt_threadfn()
833 if (likely(rtwpci->running)) { in rtw89_pci_interrupt_threadfn()
835 napi_schedule(&rtwdev->napi); in rtw89_pci_interrupt_threadfn()
842 spin_lock_irqsave(&rtwpci->irq_lock, flags); in rtw89_pci_interrupt_threadfn()
843 if (likely(rtwpci->running)) in rtw89_pci_interrupt_threadfn()
845 spin_unlock_irqrestore(&rtwpci->irq_lock, flags); in rtw89_pci_interrupt_threadfn()
852 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_interrupt_handler()
856 spin_lock_irqsave(&rtwpci->irq_lock, flags); in rtw89_pci_interrupt_handler()
861 if (unlikely(!rtwpci->running)) { in rtw89_pci_interrupt_handler()
868 spin_unlock_irqrestore(&rtwpci->irq_lock, flags); in rtw89_pci_interrupt_handler()
882 #define DEF_TXCHADDRS_TYPE1(info, txch, v...) \ argument
891 #define DEF_TXCHADDRS(info, txch, v...) \ argument
910 DEF_TXCHADDRS(info, ACH0),
911 DEF_TXCHADDRS(info, ACH1),
912 DEF_TXCHADDRS(info, ACH2),
913 DEF_TXCHADDRS(info, ACH3),
914 DEF_TXCHADDRS(info, ACH4),
915 DEF_TXCHADDRS(info, ACH5),
916 DEF_TXCHADDRS(info, ACH6),
917 DEF_TXCHADDRS(info, ACH7),
918 DEF_TXCHADDRS(info, CH8),
919 DEF_TXCHADDRS(info, CH9),
920 DEF_TXCHADDRS_TYPE1(info, CH10),
921 DEF_TXCHADDRS_TYPE1(info, CH11),
922 DEF_TXCHADDRS(info, CH12),
933 DEF_TXCHADDRS(info, ACH0, _V1),
934 DEF_TXCHADDRS(info, ACH1, _V1),
935 DEF_TXCHADDRS(info, ACH2, _V1),
936 DEF_TXCHADDRS(info, ACH3, _V1),
937 DEF_TXCHADDRS(info, ACH4, _V1),
938 DEF_TXCHADDRS(info, ACH5, _V1),
939 DEF_TXCHADDRS(info, ACH6, _V1),
940 DEF_TXCHADDRS(info, ACH7, _V1),
941 DEF_TXCHADDRS(info, CH8, _V1),
942 DEF_TXCHADDRS(info, CH9, _V1),
943 DEF_TXCHADDRS_TYPE1(info, CH10, _V1),
944 DEF_TXCHADDRS_TYPE1(info, CH11, _V1),
945 DEF_TXCHADDRS(info, CH12, _V1),
985 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_get_txch_addrs() local
988 return -EINVAL; in rtw89_pci_get_txch_addrs()
990 *addr = &info->dma_addr_set->tx[txch]; in rtw89_pci_get_txch_addrs()
999 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_get_rxch_addrs() local
1002 return -EINVAL; in rtw89_pci_get_rxch_addrs()
1004 *addr = &info->dma_addr_set->rx[rxch]; in rtw89_pci_get_rxch_addrs()
1011 struct rtw89_pci_dma_ring *bd_ring = &ring->bd_ring; in rtw89_pci_get_avail_txbd_num()
1014 if (bd_ring->rp > bd_ring->wp) in rtw89_pci_get_avail_txbd_num()
1015 return bd_ring->rp - bd_ring->wp - 1; in rtw89_pci_get_avail_txbd_num()
1017 return bd_ring->len - (bd_ring->wp - bd_ring->rp) - 1; in rtw89_pci_get_avail_txbd_num()
1023 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in __rtw89_pci_check_and_reclaim_tx_fwcmd_resource()
1024 struct rtw89_pci_tx_ring *tx_ring = &rtwpci->tx_rings[RTW89_TXCH_CH12]; in __rtw89_pci_check_and_reclaim_tx_fwcmd_resource()
1027 spin_lock_bh(&rtwpci->trx_lock); in __rtw89_pci_check_and_reclaim_tx_fwcmd_resource()
1030 spin_unlock_bh(&rtwpci->trx_lock); in __rtw89_pci_check_and_reclaim_tx_fwcmd_resource()
1039 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in __rtw89_pci_check_and_reclaim_tx_resource_noio()
1040 struct rtw89_pci_tx_ring *tx_ring = &rtwpci->tx_rings[txch]; in __rtw89_pci_check_and_reclaim_tx_resource_noio()
1041 struct rtw89_pci_tx_wd_ring *wd_ring = &tx_ring->wd_ring; in __rtw89_pci_check_and_reclaim_tx_resource_noio()
1044 spin_lock_bh(&rtwpci->trx_lock); in __rtw89_pci_check_and_reclaim_tx_resource_noio()
1046 cnt = min(cnt, wd_ring->curr_num); in __rtw89_pci_check_and_reclaim_tx_resource_noio()
1047 spin_unlock_bh(&rtwpci->trx_lock); in __rtw89_pci_check_and_reclaim_tx_resource_noio()
1055 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in __rtw89_pci_check_and_reclaim_tx_resource()
1056 struct rtw89_pci_tx_ring *tx_ring = &rtwpci->tx_rings[txch]; in __rtw89_pci_check_and_reclaim_tx_resource()
1057 struct rtw89_pci_tx_wd_ring *wd_ring = &tx_ring->wd_ring; in __rtw89_pci_check_and_reclaim_tx_resource()
1058 const struct rtw89_chip_info *chip = rtwdev->chip; in __rtw89_pci_check_and_reclaim_tx_resource()
1064 rx_ring = &rtwpci->rx_rings[RTW89_RXCH_RPQ]; in __rtw89_pci_check_and_reclaim_tx_resource()
1066 spin_lock_bh(&rtwpci->trx_lock); in __rtw89_pci_check_and_reclaim_tx_resource()
1068 wd_cnt = wd_ring->curr_num; in __rtw89_pci_check_and_reclaim_tx_resource()
1083 wd_cnt = wd_ring->curr_num; in __rtw89_pci_check_and_reclaim_tx_resource()
1090 if (rtwpci->low_power || chip->small_fifo_size) in __rtw89_pci_check_and_reclaim_tx_resource()
1101 spin_unlock_bh(&rtwpci->trx_lock); in __rtw89_pci_check_and_reclaim_tx_resource()
1109 if (rtwdev->hci.paused) in rtw89_pci_check_and_reclaim_tx_resource()
1120 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in __rtw89_pci_tx_kick_off()
1121 struct rtw89_pci_dma_ring *bd_ring = &tx_ring->bd_ring; in __rtw89_pci_tx_kick_off()
1124 spin_lock_bh(&rtwpci->trx_lock); in __rtw89_pci_tx_kick_off()
1126 addr = bd_ring->addr.idx; in __rtw89_pci_tx_kick_off()
1127 host_idx = bd_ring->wp; in __rtw89_pci_tx_kick_off()
1130 spin_unlock_bh(&rtwpci->trx_lock); in __rtw89_pci_tx_kick_off()
1136 struct rtw89_pci_dma_ring *bd_ring = &tx_ring->bd_ring; in rtw89_pci_tx_bd_ring_update()
1139 len = bd_ring->len; in rtw89_pci_tx_bd_ring_update()
1140 host_idx = bd_ring->wp + n_txbd; in rtw89_pci_tx_bd_ring_update()
1141 host_idx = host_idx < len ? host_idx : host_idx - len; in rtw89_pci_tx_bd_ring_update()
1143 bd_ring->wp = host_idx; in rtw89_pci_tx_bd_ring_update()
1148 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_ops_tx_kick_off()
1149 struct rtw89_pci_tx_ring *tx_ring = &rtwpci->tx_rings[txch]; in rtw89_pci_ops_tx_kick_off()
1151 if (rtwdev->hci.paused) { in rtw89_pci_ops_tx_kick_off()
1152 set_bit(txch, rtwpci->kick_map); in rtw89_pci_ops_tx_kick_off()
1161 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_tx_kick_off_pending()
1166 if (!test_and_clear_bit(txch, rtwpci->kick_map)) in rtw89_pci_tx_kick_off_pending()
1169 tx_ring = &rtwpci->tx_rings[txch]; in rtw89_pci_tx_kick_off_pending()
1176 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in __pci_flush_txch()
1177 struct rtw89_pci_tx_ring *tx_ring = &rtwpci->tx_rings[txch]; in __pci_flush_txch()
1178 struct rtw89_pci_dma_ring *bd_ring = &tx_ring->bd_ring; in __pci_flush_txch()
1188 cur_idx = rtw89_read32(rtwdev, bd_ring->addr.idx); in __pci_flush_txch()
1190 if (cur_rp == bd_ring->wp) in __pci_flush_txch()
1203 const struct rtw89_pci_info *info = rtwdev->pci_info; in __rtw89_pci_ops_flush_txchs() local
1210 if (info->tx_dma_ch_mask & BIT(i)) in __rtw89_pci_ops_flush_txchs()
1221 __rtw89_pci_ops_flush_txchs(rtwdev, BIT(RTW89_TXCH_NUM) - 1, drop); in rtw89_pci_ops_flush_queues()
1226 dma_addr_t dma, u8 *add_info_nr) in rtw89_pci_fill_txaddr_info() argument
1230 txaddr_info->length = cpu_to_le16(total_len); in rtw89_pci_fill_txaddr_info()
1231 txaddr_info->option = cpu_to_le16(RTW89_PCI_ADDR_MSDU_LS | in rtw89_pci_fill_txaddr_info()
1233 txaddr_info->dma = cpu_to_le32(dma); in rtw89_pci_fill_txaddr_info()
1243 dma_addr_t dma, u8 *add_info_nr) in rtw89_pci_fill_txaddr_info_v1() argument
1254 remain -= len; in rtw89_pci_fill_txaddr_info_v1()
1259 txaddr_info->length_opt = cpu_to_le16(length_option); in rtw89_pci_fill_txaddr_info_v1()
1260 txaddr_info->dma_low_lsb = cpu_to_le16(FIELD_GET(GENMASK(15, 0), dma)); in rtw89_pci_fill_txaddr_info_v1()
1261 txaddr_info->dma_low_msb = cpu_to_le16(FIELD_GET(GENMASK(31, 16), dma)); in rtw89_pci_fill_txaddr_info_v1()
1263 dma += len; in rtw89_pci_fill_txaddr_info_v1()
1281 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_txwd_submit()
1282 const struct rtw89_chip_info *chip = rtwdev->chip; in rtw89_pci_txwd_submit()
1283 struct rtw89_tx_desc_info *desc_info = &tx_req->desc_info; in rtw89_pci_txwd_submit()
1286 struct pci_dev *pdev = rtwpci->pdev; in rtw89_pci_txwd_submit()
1287 struct sk_buff *skb = tx_req->skb; in rtw89_pci_txwd_submit()
1290 bool en_wd_info = desc_info->en_wd_info; in rtw89_pci_txwd_submit()
1294 dma_addr_t dma; in rtw89_pci_txwd_submit() local
1297 dma = dma_map_single(&pdev->dev, skb->data, skb->len, DMA_TO_DEVICE); in rtw89_pci_txwd_submit()
1298 if (dma_mapping_error(&pdev->dev, dma)) { in rtw89_pci_txwd_submit()
1299 rtw89_err(rtwdev, "failed to map skb dma data\n"); in rtw89_pci_txwd_submit()
1300 ret = -EBUSY; in rtw89_pci_txwd_submit()
1304 tx_data->dma = dma; in rtw89_pci_txwd_submit()
1305 rcu_assign_pointer(skb_data->wait, NULL); in rtw89_pci_txwd_submit()
1308 txwd_len = chip->txwd_body_size; in rtw89_pci_txwd_submit()
1309 txwd_len += en_wd_info ? chip->txwd_info_size : 0; in rtw89_pci_txwd_submit()
1311 txwp_info = txwd->vaddr + txwd_len; in rtw89_pci_txwd_submit()
1312 txwp_info->seq0 = cpu_to_le16(txwd->seq | RTW89_PCI_TXWP_VALID); in rtw89_pci_txwd_submit()
1313 txwp_info->seq1 = 0; in rtw89_pci_txwd_submit()
1314 txwp_info->seq2 = 0; in rtw89_pci_txwd_submit()
1315 txwp_info->seq3 = 0; in rtw89_pci_txwd_submit()
1317 tx_ring->tx_cnt++; in rtw89_pci_txwd_submit()
1318 txaddr_info_addr = txwd->vaddr + txwd_len + txwp_len; in rtw89_pci_txwd_submit()
1320 rtw89_chip_fill_txaddr_info(rtwdev, txaddr_info_addr, skb->len, in rtw89_pci_txwd_submit()
1321 dma, &desc_info->addr_info_nr); in rtw89_pci_txwd_submit()
1323 txwd->len = txwd_len + txwp_len + txaddr_info_len; in rtw89_pci_txwd_submit()
1325 rtw89_chip_fill_txdesc(rtwdev, desc_info, txwd->vaddr); in rtw89_pci_txwd_submit()
1327 skb_queue_tail(&txwd->queue, skb); in rtw89_pci_txwd_submit()
1340 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_fwcmd_submit()
1341 const struct rtw89_chip_info *chip = rtwdev->chip; in rtw89_pci_fwcmd_submit()
1342 struct rtw89_tx_desc_info *desc_info = &tx_req->desc_info; in rtw89_pci_fwcmd_submit()
1344 int txdesc_size = chip->h2c_desc_size; in rtw89_pci_fwcmd_submit()
1345 struct pci_dev *pdev = rtwpci->pdev; in rtw89_pci_fwcmd_submit()
1346 struct sk_buff *skb = tx_req->skb; in rtw89_pci_fwcmd_submit()
1348 dma_addr_t dma; in rtw89_pci_fwcmd_submit() local
1354 dma = dma_map_single(&pdev->dev, skb->data, skb->len, DMA_TO_DEVICE); in rtw89_pci_fwcmd_submit()
1355 if (dma_mapping_error(&pdev->dev, dma)) { in rtw89_pci_fwcmd_submit()
1356 rtw89_err(rtwdev, "failed to map fwcmd dma data\n"); in rtw89_pci_fwcmd_submit()
1357 return -EBUSY; in rtw89_pci_fwcmd_submit()
1360 tx_data->dma = dma; in rtw89_pci_fwcmd_submit()
1361 txbd->option = cpu_to_le16(RTW89_PCI_TXBD_OPTION_LS); in rtw89_pci_fwcmd_submit()
1362 txbd->length = cpu_to_le16(skb->len); in rtw89_pci_fwcmd_submit()
1363 txbd->dma = cpu_to_le32(tx_data->dma); in rtw89_pci_fwcmd_submit()
1364 skb_queue_tail(&rtwpci->h2c_queue, skb); in rtw89_pci_fwcmd_submit()
1383 if (tx_ring->txch == RTW89_TXCH_CH12) in rtw89_pci_txbd_submit()
1389 ret = -ENOSPC; in rtw89_pci_txbd_submit()
1395 rtw89_err(rtwdev, "failed to submit TXWD %d\n", txwd->seq); in rtw89_pci_txbd_submit()
1399 list_add_tail(&txwd->list, &tx_ring->busy_pages); in rtw89_pci_txbd_submit()
1401 txbd->option = cpu_to_le16(RTW89_PCI_TXBD_OPTION_LS); in rtw89_pci_txbd_submit()
1402 txbd->length = cpu_to_le16(txwd->len); in rtw89_pci_txbd_submit()
1403 txbd->dma = cpu_to_le32(txwd->paddr); in rtw89_pci_txbd_submit()
1418 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_tx_write()
1424 /* check the tx type and dma channel for fw cmd queue */ in rtw89_pci_tx_write()
1426 tx_req->tx_type == RTW89_CORE_TX_TYPE_FWCMD) && in rtw89_pci_tx_write()
1428 tx_req->tx_type != RTW89_CORE_TX_TYPE_FWCMD)) { in rtw89_pci_tx_write()
1429 rtw89_err(rtwdev, "only fw cmd uses dma channel 12\n"); in rtw89_pci_tx_write()
1430 return -EINVAL; in rtw89_pci_tx_write()
1433 tx_ring = &rtwpci->tx_rings[txch]; in rtw89_pci_tx_write()
1434 spin_lock_bh(&rtwpci->trx_lock); in rtw89_pci_tx_write()
1439 ret = -ENOSPC; in rtw89_pci_tx_write()
1450 spin_unlock_bh(&rtwpci->trx_lock); in rtw89_pci_tx_write()
1454 spin_unlock_bh(&rtwpci->trx_lock); in rtw89_pci_tx_write()
1460 struct rtw89_tx_desc_info *desc_info = &tx_req->desc_info; in rtw89_pci_ops_tx_write()
1463 ret = rtw89_pci_tx_write(rtwdev, tx_req, desc_info->ch_dma); in rtw89_pci_ops_tx_write()
1465 rtw89_err(rtwdev, "failed to TX Queue %d\n", desc_info->ch_dma); in rtw89_pci_ops_tx_write()
1502 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_reset_trx_rings()
1503 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_reset_trx_rings() local
1504 const struct rtw89_pci_bd_ram *bd_ram_table = *info->bd_ram_table; in rtw89_pci_reset_trx_rings()
1517 if (info->tx_dma_ch_mask & BIT(i)) in rtw89_pci_reset_trx_rings()
1520 tx_ring = &rtwpci->tx_rings[i]; in rtw89_pci_reset_trx_rings()
1521 bd_ring = &tx_ring->bd_ring; in rtw89_pci_reset_trx_rings()
1523 addr_num = bd_ring->addr.num; in rtw89_pci_reset_trx_rings()
1524 addr_bdram = bd_ring->addr.bdram; in rtw89_pci_reset_trx_rings()
1525 addr_desa_l = bd_ring->addr.desa_l; in rtw89_pci_reset_trx_rings()
1526 bd_ring->wp = 0; in rtw89_pci_reset_trx_rings()
1527 bd_ring->rp = 0; in rtw89_pci_reset_trx_rings()
1529 rtw89_write16(rtwdev, addr_num, bd_ring->len); in rtw89_pci_reset_trx_rings()
1531 val32 = FIELD_PREP(BDRAM_SIDX_MASK, bd_ram->start_idx) | in rtw89_pci_reset_trx_rings()
1532 FIELD_PREP(BDRAM_MAX_MASK, bd_ram->max_num) | in rtw89_pci_reset_trx_rings()
1533 FIELD_PREP(BDRAM_MIN_MASK, bd_ram->min_num); in rtw89_pci_reset_trx_rings()
1537 rtw89_write32(rtwdev, addr_desa_l, bd_ring->dma); in rtw89_pci_reset_trx_rings()
1541 rx_ring = &rtwpci->rx_rings[i]; in rtw89_pci_reset_trx_rings()
1542 bd_ring = &rx_ring->bd_ring; in rtw89_pci_reset_trx_rings()
1543 addr_num = bd_ring->addr.num; in rtw89_pci_reset_trx_rings()
1544 addr_idx = bd_ring->addr.idx; in rtw89_pci_reset_trx_rings()
1545 addr_desa_l = bd_ring->addr.desa_l; in rtw89_pci_reset_trx_rings()
1546 if (info->rx_ring_eq_is_full) in rtw89_pci_reset_trx_rings()
1547 bd_ring->wp = bd_ring->len - 1; in rtw89_pci_reset_trx_rings()
1549 bd_ring->wp = 0; in rtw89_pci_reset_trx_rings()
1550 bd_ring->rp = 0; in rtw89_pci_reset_trx_rings()
1551 rx_ring->diliver_skb = NULL; in rtw89_pci_reset_trx_rings()
1552 rx_ring->diliver_desc.ready = false; in rtw89_pci_reset_trx_rings()
1554 rtw89_write16(rtwdev, addr_num, bd_ring->len); in rtw89_pci_reset_trx_rings()
1555 rtw89_write32(rtwdev, addr_desa_l, bd_ring->dma); in rtw89_pci_reset_trx_rings()
1557 if (info->rx_ring_eq_is_full) in rtw89_pci_reset_trx_rings()
1558 rtw89_write16(rtwdev, addr_idx, bd_ring->wp); in rtw89_pci_reset_trx_rings()
1571 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_ops_reset()
1572 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_ops_reset() local
1577 spin_lock_bh(&rtwpci->trx_lock); in rtw89_pci_ops_reset()
1579 if (info->tx_dma_ch_mask & BIT(txch)) in rtw89_pci_ops_reset()
1583 skb_queue_len(&rtwpci->h2c_queue), true); in rtw89_pci_ops_reset()
1586 rtw89_pci_release_tx_ring(rtwdev, &rtwpci->tx_rings[txch]); in rtw89_pci_ops_reset()
1588 spin_unlock_bh(&rtwpci->trx_lock); in rtw89_pci_ops_reset()
1593 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_enable_intr_lock()
1596 spin_lock_irqsave(&rtwpci->irq_lock, flags); in rtw89_pci_enable_intr_lock()
1597 rtwpci->running = true; in rtw89_pci_enable_intr_lock()
1599 spin_unlock_irqrestore(&rtwpci->irq_lock, flags); in rtw89_pci_enable_intr_lock()
1604 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_disable_intr_lock()
1607 spin_lock_irqsave(&rtwpci->irq_lock, flags); in rtw89_pci_disable_intr_lock()
1608 rtwpci->running = false; in rtw89_pci_disable_intr_lock()
1610 spin_unlock_irqrestore(&rtwpci->irq_lock, flags); in rtw89_pci_disable_intr_lock()
1623 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_ops_stop()
1624 struct pci_dev *pdev = rtwpci->pdev; in rtw89_pci_ops_stop()
1627 synchronize_irq(pdev->irq); in rtw89_pci_ops_stop()
1633 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_ops_pause()
1634 struct pci_dev *pdev = rtwpci->pdev; in rtw89_pci_ops_pause()
1638 synchronize_irq(pdev->irq); in rtw89_pci_ops_pause()
1639 if (test_bit(RTW89_FLAG_NAPI_RUNNING, rtwdev->flags)) in rtw89_pci_ops_pause()
1640 napi_synchronize(&rtwdev->napi); in rtw89_pci_ops_pause()
1650 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_switch_bd_idx_addr()
1651 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_switch_bd_idx_addr() local
1652 const struct rtw89_pci_bd_idx_addr *bd_idx_addr = info->bd_idx_addr_low_power; in rtw89_pci_switch_bd_idx_addr()
1653 const struct rtw89_pci_ch_dma_addr_set *dma_addr_set = info->dma_addr_set; in rtw89_pci_switch_bd_idx_addr()
1662 tx_ring = &rtwpci->tx_rings[i]; in rtw89_pci_switch_bd_idx_addr()
1663 tx_ring->bd_ring.addr.idx = low_power ? in rtw89_pci_switch_bd_idx_addr()
1664 bd_idx_addr->tx_bd_addrs[i] : in rtw89_pci_switch_bd_idx_addr()
1665 dma_addr_set->tx[i].idx; in rtw89_pci_switch_bd_idx_addr()
1669 rx_ring = &rtwpci->rx_rings[i]; in rtw89_pci_switch_bd_idx_addr()
1670 rx_ring->bd_ring.addr.idx = low_power ? in rtw89_pci_switch_bd_idx_addr()
1671 bd_idx_addr->rx_bd_addrs[i] : in rtw89_pci_switch_bd_idx_addr()
1672 dma_addr_set->rx[i].idx; in rtw89_pci_switch_bd_idx_addr()
1680 WARN(!rtwdev->hci.paused, "HCI isn't paused\n"); in rtw89_pci_ops_switch_mode()
1691 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_ops_read32_cmac()
1692 u32 val = readl(rtwpci->mmap + addr); in rtw89_pci_ops_read32_cmac()
1703 val = readl(rtwpci->mmap + addr); in rtw89_pci_ops_read32_cmac()
1711 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_ops_read8()
1715 return readb(rtwpci->mmap + addr); in rtw89_pci_ops_read8()
1725 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_ops_read16()
1729 return readw(rtwpci->mmap + addr); in rtw89_pci_ops_read16()
1739 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_ops_read32()
1742 return readl(rtwpci->mmap + addr); in rtw89_pci_ops_read32()
1749 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_ops_write8()
1751 writeb(data, rtwpci->mmap + addr); in rtw89_pci_ops_write8()
1756 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_ops_write16()
1758 writew(data, rtwpci->mmap + addr); in rtw89_pci_ops_write16()
1763 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_ops_write32()
1765 writel(data, rtwpci->mmap + addr); in rtw89_pci_ops_write32()
1770 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_ctrl_dma_trx() local
1773 rtw89_write32_set(rtwdev, info->init_cfg_reg, in rtw89_pci_ctrl_dma_trx()
1774 info->rxhci_en_bit | info->txhci_en_bit); in rtw89_pci_ctrl_dma_trx()
1776 rtw89_write32_clr(rtwdev, info->init_cfg_reg, in rtw89_pci_ctrl_dma_trx()
1777 info->rxhci_en_bit | info->txhci_en_bit); in rtw89_pci_ctrl_dma_trx()
1782 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_ctrl_dma_io() local
1783 const struct rtw89_reg_def *reg = &info->dma_io_stop; in rtw89_pci_ctrl_dma_io()
1786 rtw89_write32_clr(rtwdev, reg->addr, reg->mask); in rtw89_pci_ctrl_dma_io()
1788 rtw89_write32_set(rtwdev, reg->addr, reg->mask); in rtw89_pci_ctrl_dma_io()
1819 return -EINVAL; in rtw89_pci_check_mdio()
1913 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_write_config_byte()
1914 struct pci_dev *pdev = rtwpci->pdev; in rtw89_pci_write_config_byte()
1922 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_read_config_byte()
1923 struct pci_dev *pdev = rtwpci->pdev; in rtw89_pci_read_config_byte()
1992 return -EINVAL; in __get_target()
2002 enum rtw89_core_chip_id chip_id = rtwdev->chip->chip_id; in rtw89_pci_autok_x()
2015 enum rtw89_core_chip_id chip_id = rtwdev->chip->chip_id; in rtw89_pci_auto_refclk_cal()
2038 return -EOPNOTSUPP; in rtw89_pci_auto_refclk_cal()
2089 mgn_set = tar * INTF_INTGRA_HOSTREF_V1 / INTF_INTGRA_MINREF_V1 - tar; in rtw89_pci_auto_refclk_cal()
2167 enum rtw89_core_chip_id chip_id = rtwdev->chip->chip_id; in rtw89_pci_deglitch_setting()
2191 if (rtwdev->chip->chip_id != RTL8852A) in rtw89_pci_rxdma_prefth()
2199 enum rtw89_core_chip_id chip_id = rtwdev->chip->chip_id; in rtw89_pci_l1off_pwroff()
2211 if (rtwdev->chip->chip_id != RTL8852A) in rtw89_pci_l2_rxen_lat()
2229 enum rtw89_core_chip_id chip_id = rtwdev->chip->chip_id; in rtw89_pci_aphy_pwrcut()
2239 enum rtw89_core_chip_id chip_id = rtwdev->chip->chip_id; in rtw89_pci_hci_ldo()
2246 } else if (rtwdev->chip->chip_id == RTL8852C) { in rtw89_pci_hci_ldo()
2254 enum rtw89_core_chip_id chip_id = rtwdev->chip->chip_id; in rtw89_pci_dphy_delay()
2273 if (rtwdev->chip->chip_id != RTL8852C) in rtw89_pci_autoload_hang()
2282 if (!(rtwdev->chip->chip_id == RTL8852C && rtwdev->hal.cv == CHIP_CAV)) in rtw89_pci_l12_vmain()
2290 if (!(rtwdev->chip->chip_id == RTL8852C && rtwdev->hal.cv == CHIP_CAV)) in rtw89_pci_gen2_force_ib()
2302 if (rtwdev->chip->chip_id != RTL8852C) in rtw89_pci_l1_ent_lat()
2310 if (rtwdev->chip->chip_id != RTL8852C) in rtw89_pci_wd_exit_l1()
2318 if (rtwdev->chip->chip_id == RTL8852C) in rtw89_pci_set_sic()
2327 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_set_lbc() local
2330 if (rtwdev->chip->chip_id == RTL8852C) in rtw89_pci_set_lbc()
2334 if (info->lbc_en == MAC_AX_PCIE_ENABLE) { in rtw89_pci_set_lbc()
2335 lbc = u32_replace_bits(lbc, info->lbc_tmr, B_AX_LBC_TIMER); in rtw89_pci_set_lbc()
2346 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_set_io_rcy() local
2349 if (rtwdev->chip->chip_id != RTL8852C) in rtw89_pci_set_io_rcy()
2352 if (info->io_rcy_en == MAC_AX_PCIE_ENABLE) { in rtw89_pci_set_io_rcy()
2354 info->io_rcy_tmr); in rtw89_pci_set_io_rcy()
2373 if (rtwdev->chip->chip_id == RTL8852C) in rtw89_pci_set_dbg()
2379 if (rtwdev->chip->chip_id == RTL8852A) in rtw89_pci_set_dbg()
2386 if (rtwdev->chip->chip_id == RTL8852C) in rtw89_pci_set_keep_reg()
2395 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_clr_idx_all_ax() local
2396 enum rtw89_core_chip_id chip_id = rtwdev->chip->chip_id; in rtw89_pci_clr_idx_all_ax()
2400 u32 rxbd_rwptr_clr = info->rxbd_rwptr_clr_reg; in rtw89_pci_clr_idx_all_ax()
2401 u32 txbd_rwptr_clr2 = info->txbd_rwptr_clr2_reg; in rtw89_pci_clr_idx_all_ax()
2406 /* clear DMA indexes */ in rtw89_pci_clr_idx_all_ax()
2417 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_poll_txdma_ch_idle_pcie() local
2419 u32 dma_busy1 = info->dma_busy1.addr; in rtw89_poll_txdma_ch_idle_pcie()
2420 u32 dma_busy2 = info->dma_busy2_reg; in rtw89_poll_txdma_ch_idle_pcie()
2422 check = info->dma_busy1.mask; in rtw89_poll_txdma_ch_idle_pcie()
2444 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_poll_rxdma_ch_idle_pcie() local
2446 u32 dma_busy3 = info->dma_busy3_reg; in rtw89_poll_rxdma_ch_idle_pcie()
2479 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_mode_op() local
2480 enum mac_ax_bd_trunc_mode txbd_trunc_mode = info->txbd_trunc_mode; in rtw89_pci_mode_op()
2481 enum mac_ax_bd_trunc_mode rxbd_trunc_mode = info->rxbd_trunc_mode; in rtw89_pci_mode_op()
2482 enum mac_ax_rxbd_mode rxbd_mode = info->rxbd_mode; in rtw89_pci_mode_op()
2483 enum mac_ax_tag_mode tag_mode = info->tag_mode; in rtw89_pci_mode_op()
2484 enum mac_ax_wd_dma_intvl wd_dma_idle_intvl = info->wd_dma_idle_intvl; in rtw89_pci_mode_op()
2485 enum mac_ax_wd_dma_intvl wd_dma_act_intvl = info->wd_dma_act_intvl; in rtw89_pci_mode_op()
2486 enum mac_ax_tx_burst tx_burst = info->tx_burst; in rtw89_pci_mode_op()
2487 enum mac_ax_rx_burst rx_burst = info->rx_burst; in rtw89_pci_mode_op()
2488 enum rtw89_core_chip_id chip_id = rtwdev->chip->chip_id; in rtw89_pci_mode_op()
2489 u8 cv = rtwdev->hal.cv; in rtw89_pci_mode_op()
2509 rtw89_write32_clr(rtwdev, info->init_cfg_reg, info->rxbd_mode_bit); in rtw89_pci_mode_op()
2511 rtw89_write32_set(rtwdev, info->init_cfg_reg, info->rxbd_mode_bit); in rtw89_pci_mode_op()
2538 rtw89_write32_mask(rtwdev, info->exp_ctrl_reg, info->max_tag_num_mask, in rtw89_pci_mode_op()
2539 info->multi_tag_num); in rtw89_pci_mode_op()
2568 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_ops_deinit() local
2570 if (rtwdev->chip->chip_id == RTL8852A) { in rtw89_pci_ops_deinit()
2574 info->ltr_set(rtwdev, false); in rtw89_pci_ops_deinit()
2583 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_ops_mac_pre_init_ax() local
2623 rtw89_write32_set(rtwdev, info->dma_stop1.addr, B_AX_STOP_WPDMA); in rtw89_pci_ops_mac_pre_init_ax()
2625 /* stop DMA activities */ in rtw89_pci_ops_mac_pre_init_ax()
2630 rtw89_err(rtwdev, "[ERR] poll pcie dma all idle\n"); in rtw89_pci_ops_mac_pre_init_ax()
2650 /* start DMA activities */ in rtw89_pci_ops_mac_pre_init_ax()
2665 return -EINVAL; in rtw89_pci_ltr_set()
2668 return -EINVAL; in rtw89_pci_ltr_set()
2671 return -EINVAL; in rtw89_pci_ltr_set()
2674 return -EINVAL; in rtw89_pci_ltr_set()
2698 return -EINVAL; in rtw89_pci_ltr_set_v1()
2701 return -EINVAL; in rtw89_pci_ltr_set_v1()
2704 return -EINVAL; in rtw89_pci_ltr_set_v1()
2707 return -EINVAL; in rtw89_pci_ltr_set_v1()
2710 return -EINVAL; in rtw89_pci_ltr_set_v1()
2740 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_ops_mac_post_init_ax() local
2741 enum rtw89_core_chip_id chip_id = rtwdev->chip->chip_id; in rtw89_pci_ops_mac_post_init_ax()
2744 ret = info->ltr_set(rtwdev, true); in rtw89_pci_ops_mac_post_init_ax()
2754 /* ADDR info 8-byte mode */ in rtw89_pci_ops_mac_post_init_ax()
2760 /* enable DMA for all queues */ in rtw89_pci_ops_mac_post_init_ax()
2764 rtw89_write32_clr(rtwdev, info->dma_stop1.addr, in rtw89_pci_ops_mac_post_init_ax()
2773 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_claim_device()
2783 pci_set_drvdata(pdev, rtwdev->hw); in rtw89_pci_claim_device()
2785 rtwpci->pdev = pdev; in rtw89_pci_claim_device()
2799 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_setup_mapping()
2810 ret = dma_set_mask(&pdev->dev, DMA_BIT_MASK(32)); in rtw89_pci_setup_mapping()
2812 rtw89_err(rtwdev, "failed to set dma mask to 32-bit\n"); in rtw89_pci_setup_mapping()
2816 ret = dma_set_coherent_mask(&pdev->dev, DMA_BIT_MASK(32)); in rtw89_pci_setup_mapping()
2818 rtw89_err(rtwdev, "failed to set consistent dma mask to 32-bit\n"); in rtw89_pci_setup_mapping()
2823 rtwpci->mmap = pci_iomap(pdev, bar_id, resource_len); in rtw89_pci_setup_mapping()
2824 if (!rtwpci->mmap) { in rtw89_pci_setup_mapping()
2826 ret = -EIO; in rtw89_pci_setup_mapping()
2841 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_clear_mapping()
2843 if (rtwpci->mmap) { in rtw89_pci_clear_mapping()
2844 pci_iounmap(pdev, rtwpci->mmap); in rtw89_pci_clear_mapping()
2853 struct rtw89_pci_tx_wd_ring *wd_ring = &tx_ring->wd_ring; in rtw89_pci_free_tx_wd_ring()
2854 u8 *head = wd_ring->head; in rtw89_pci_free_tx_wd_ring()
2855 dma_addr_t dma = wd_ring->dma; in rtw89_pci_free_tx_wd_ring() local
2856 u32 page_size = wd_ring->page_size; in rtw89_pci_free_tx_wd_ring()
2857 u32 page_num = wd_ring->page_num; in rtw89_pci_free_tx_wd_ring()
2860 dma_free_coherent(&pdev->dev, ring_sz, head, dma); in rtw89_pci_free_tx_wd_ring()
2861 wd_ring->head = NULL; in rtw89_pci_free_tx_wd_ring()
2870 dma_addr_t dma; in rtw89_pci_free_tx_ring() local
2872 head = tx_ring->bd_ring.head; in rtw89_pci_free_tx_ring()
2873 dma = tx_ring->bd_ring.dma; in rtw89_pci_free_tx_ring()
2874 ring_sz = tx_ring->bd_ring.desc_size * tx_ring->bd_ring.len; in rtw89_pci_free_tx_ring()
2875 dma_free_coherent(&pdev->dev, ring_sz, head, dma); in rtw89_pci_free_tx_ring()
2877 tx_ring->bd_ring.head = NULL; in rtw89_pci_free_tx_ring()
2883 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_free_tx_rings()
2884 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_free_tx_rings() local
2889 if (info->tx_dma_ch_mask & BIT(i)) in rtw89_pci_free_tx_rings()
2891 tx_ring = &rtwpci->tx_rings[i]; in rtw89_pci_free_tx_rings()
2903 dma_addr_t dma; in rtw89_pci_free_rx_ring() local
2906 int ring_sz = rx_ring->bd_ring.desc_size * rx_ring->bd_ring.len; in rtw89_pci_free_rx_ring()
2909 buf_sz = rx_ring->buf_sz; in rtw89_pci_free_rx_ring()
2910 for (i = 0; i < rx_ring->bd_ring.len; i++) { in rtw89_pci_free_rx_ring()
2911 skb = rx_ring->buf[i]; in rtw89_pci_free_rx_ring()
2916 dma = rx_info->dma; in rtw89_pci_free_rx_ring()
2917 dma_unmap_single(&pdev->dev, dma, buf_sz, DMA_FROM_DEVICE); in rtw89_pci_free_rx_ring()
2919 rx_ring->buf[i] = NULL; in rtw89_pci_free_rx_ring()
2922 head = rx_ring->bd_ring.head; in rtw89_pci_free_rx_ring()
2923 dma = rx_ring->bd_ring.dma; in rtw89_pci_free_rx_ring()
2924 dma_free_coherent(&pdev->dev, ring_sz, head, dma); in rtw89_pci_free_rx_ring()
2926 rx_ring->bd_ring.head = NULL; in rtw89_pci_free_rx_ring()
2932 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_free_rx_rings()
2937 rx_ring = &rtwpci->rx_rings[i]; in rtw89_pci_free_rx_rings()
2955 dma_addr_t dma; in rtw89_pci_init_rx_bd() local
2958 return -EINVAL; in rtw89_pci_init_rx_bd()
2960 dma = dma_map_single(&pdev->dev, skb->data, buf_sz, DMA_FROM_DEVICE); in rtw89_pci_init_rx_bd()
2961 if (dma_mapping_error(&pdev->dev, dma)) in rtw89_pci_init_rx_bd()
2962 return -EBUSY; in rtw89_pci_init_rx_bd()
2968 rx_bd->buf_size = cpu_to_le16(buf_sz); in rtw89_pci_init_rx_bd()
2969 rx_bd->dma = cpu_to_le32(dma); in rtw89_pci_init_rx_bd()
2970 rx_info->dma = dma; in rtw89_pci_init_rx_bd()
2980 struct rtw89_pci_tx_wd_ring *wd_ring = &tx_ring->wd_ring; in rtw89_pci_alloc_tx_wd_ring()
2982 dma_addr_t dma; in rtw89_pci_alloc_tx_wd_ring() local
2996 head = dma_alloc_coherent(&pdev->dev, ring_sz, &dma, GFP_KERNEL); in rtw89_pci_alloc_tx_wd_ring()
2998 return -ENOMEM; in rtw89_pci_alloc_tx_wd_ring()
3000 INIT_LIST_HEAD(&wd_ring->free_pages); in rtw89_pci_alloc_tx_wd_ring()
3001 wd_ring->head = head; in rtw89_pci_alloc_tx_wd_ring()
3002 wd_ring->dma = dma; in rtw89_pci_alloc_tx_wd_ring()
3003 wd_ring->page_size = page_size; in rtw89_pci_alloc_tx_wd_ring()
3004 wd_ring->page_num = page_num; in rtw89_pci_alloc_tx_wd_ring()
3008 txwd = &wd_ring->pages[i]; in rtw89_pci_alloc_tx_wd_ring()
3009 cur_paddr = dma + page_offset; in rtw89_pci_alloc_tx_wd_ring()
3012 skb_queue_head_init(&txwd->queue); in rtw89_pci_alloc_tx_wd_ring()
3013 INIT_LIST_HEAD(&txwd->list); in rtw89_pci_alloc_tx_wd_ring()
3014 txwd->paddr = cur_paddr; in rtw89_pci_alloc_tx_wd_ring()
3015 txwd->vaddr = cur_vaddr; in rtw89_pci_alloc_tx_wd_ring()
3016 txwd->len = page_size; in rtw89_pci_alloc_tx_wd_ring()
3017 txwd->seq = i; in rtw89_pci_alloc_tx_wd_ring()
3035 dma_addr_t dma; in rtw89_pci_alloc_tx_ring() local
3050 head = dma_alloc_coherent(&pdev->dev, ring_sz, &dma, GFP_KERNEL); in rtw89_pci_alloc_tx_ring()
3052 ret = -ENOMEM; in rtw89_pci_alloc_tx_ring()
3056 INIT_LIST_HEAD(&tx_ring->busy_pages); in rtw89_pci_alloc_tx_ring()
3057 tx_ring->bd_ring.head = head; in rtw89_pci_alloc_tx_ring()
3058 tx_ring->bd_ring.dma = dma; in rtw89_pci_alloc_tx_ring()
3059 tx_ring->bd_ring.len = len; in rtw89_pci_alloc_tx_ring()
3060 tx_ring->bd_ring.desc_size = desc_size; in rtw89_pci_alloc_tx_ring()
3061 tx_ring->bd_ring.addr = *txch_addr; in rtw89_pci_alloc_tx_ring()
3062 tx_ring->bd_ring.wp = 0; in rtw89_pci_alloc_tx_ring()
3063 tx_ring->bd_ring.rp = 0; in rtw89_pci_alloc_tx_ring()
3064 tx_ring->txch = txch; in rtw89_pci_alloc_tx_ring()
3077 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_alloc_tx_rings()
3078 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_alloc_tx_rings() local
3086 if (info->tx_dma_ch_mask & BIT(i)) in rtw89_pci_alloc_tx_rings()
3088 tx_ring = &rtwpci->tx_rings[i]; in rtw89_pci_alloc_tx_rings()
3104 tx_ring = &rtwpci->tx_rings[i]; in rtw89_pci_alloc_tx_rings()
3116 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_alloc_rx_ring() local
3120 dma_addr_t dma; in rtw89_pci_alloc_rx_ring() local
3132 head = dma_alloc_coherent(&pdev->dev, ring_sz, &dma, GFP_KERNEL); in rtw89_pci_alloc_rx_ring()
3134 ret = -ENOMEM; in rtw89_pci_alloc_rx_ring()
3138 rx_ring->bd_ring.head = head; in rtw89_pci_alloc_rx_ring()
3139 rx_ring->bd_ring.dma = dma; in rtw89_pci_alloc_rx_ring()
3140 rx_ring->bd_ring.len = len; in rtw89_pci_alloc_rx_ring()
3141 rx_ring->bd_ring.desc_size = desc_size; in rtw89_pci_alloc_rx_ring()
3142 rx_ring->bd_ring.addr = *rxch_addr; in rtw89_pci_alloc_rx_ring()
3143 if (info->rx_ring_eq_is_full) in rtw89_pci_alloc_rx_ring()
3144 rx_ring->bd_ring.wp = len - 1; in rtw89_pci_alloc_rx_ring()
3146 rx_ring->bd_ring.wp = 0; in rtw89_pci_alloc_rx_ring()
3147 rx_ring->bd_ring.rp = 0; in rtw89_pci_alloc_rx_ring()
3148 rx_ring->buf_sz = buf_sz; in rtw89_pci_alloc_rx_ring()
3149 rx_ring->diliver_skb = NULL; in rtw89_pci_alloc_rx_ring()
3150 rx_ring->diliver_desc.ready = false; in rtw89_pci_alloc_rx_ring()
3155 ret = -ENOMEM; in rtw89_pci_alloc_rx_ring()
3159 memset(skb->data, 0, buf_sz); in rtw89_pci_alloc_rx_ring()
3160 rx_ring->buf[i] = skb; in rtw89_pci_alloc_rx_ring()
3166 rx_ring->buf[i] = NULL; in rtw89_pci_alloc_rx_ring()
3176 skb = rx_ring->buf[i]; in rtw89_pci_alloc_rx_ring()
3179 dma = *((dma_addr_t *)skb->cb); in rtw89_pci_alloc_rx_ring()
3180 dma_unmap_single(&pdev->dev, dma, buf_sz, DMA_FROM_DEVICE); in rtw89_pci_alloc_rx_ring()
3182 rx_ring->buf[i] = NULL; in rtw89_pci_alloc_rx_ring()
3185 head = rx_ring->bd_ring.head; in rtw89_pci_alloc_rx_ring()
3186 dma = rx_ring->bd_ring.dma; in rtw89_pci_alloc_rx_ring()
3187 dma_free_coherent(&pdev->dev, ring_sz, head, dma); in rtw89_pci_alloc_rx_ring()
3189 rx_ring->bd_ring.head = NULL; in rtw89_pci_alloc_rx_ring()
3197 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_alloc_rx_rings()
3205 rx_ring = &rtwpci->rx_rings[i]; in rtw89_pci_alloc_rx_rings()
3221 rx_ring = &rtwpci->rx_rings[i]; in rtw89_pci_alloc_rx_rings()
3235 rtw89_err(rtwdev, "failed to alloc dma tx rings\n"); in rtw89_pci_alloc_trx_rings()
3241 rtw89_err(rtwdev, "failed to alloc dma rx rings\n"); in rtw89_pci_alloc_trx_rings()
3256 skb_queue_head_init(&rtwpci->h2c_queue); in rtw89_pci_h2c_init()
3257 skb_queue_head_init(&rtwpci->h2c_release_queue); in rtw89_pci_h2c_init()
3263 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_setup_resource()
3280 spin_lock_init(&rtwpci->irq_lock); in rtw89_pci_setup_resource()
3281 spin_lock_init(&rtwpci->trx_lock); in rtw89_pci_setup_resource()
3294 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_clear_resource()
3299 skb_queue_len(&rtwpci->h2c_queue), true); in rtw89_pci_clear_resource()
3304 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_config_intr_mask()
3305 const struct rtw89_chip_info *chip = rtwdev->chip; in rtw89_pci_config_intr_mask()
3308 if (chip->chip_id == RTL8851B) in rtw89_pci_config_intr_mask()
3311 rtwpci->halt_c2h_intrs = B_AX_HALT_C2H_INT_EN | 0; in rtw89_pci_config_intr_mask()
3313 if (rtwpci->under_recovery) { in rtw89_pci_config_intr_mask()
3314 rtwpci->intrs[0] = hs0isr_ind_int_en; in rtw89_pci_config_intr_mask()
3315 rtwpci->intrs[1] = 0; in rtw89_pci_config_intr_mask()
3317 rtwpci->intrs[0] = B_AX_TXDMA_STUCK_INT_EN | in rtw89_pci_config_intr_mask()
3326 rtwpci->intrs[1] = B_AX_HC10ISR_IND_INT_EN; in rtw89_pci_config_intr_mask()
3333 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_recovery_intr_mask_v1()
3335 rtwpci->ind_intrs = B_AX_HS0ISR_IND_INT_EN; in rtw89_pci_recovery_intr_mask_v1()
3336 rtwpci->halt_c2h_intrs = B_AX_HALT_C2H_INT_EN | B_AX_WDT_TIMEOUT_INT_EN; in rtw89_pci_recovery_intr_mask_v1()
3337 rtwpci->intrs[0] = 0; in rtw89_pci_recovery_intr_mask_v1()
3338 rtwpci->intrs[1] = 0; in rtw89_pci_recovery_intr_mask_v1()
3343 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_default_intr_mask_v1()
3345 rtwpci->ind_intrs = B_AX_HCI_AXIDMA_INT_EN | in rtw89_pci_default_intr_mask_v1()
3348 rtwpci->halt_c2h_intrs = B_AX_HALT_C2H_INT_EN | B_AX_WDT_TIMEOUT_INT_EN; in rtw89_pci_default_intr_mask_v1()
3349 rtwpci->intrs[0] = B_AX_TXDMA_STUCK_INT_EN | in rtw89_pci_default_intr_mask_v1()
3356 rtwpci->intrs[1] = B_AX_GPIO18_INT_EN; in rtw89_pci_default_intr_mask_v1()
3361 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_low_power_intr_mask_v1()
3363 rtwpci->ind_intrs = B_AX_HS1ISR_IND_INT_EN | in rtw89_pci_low_power_intr_mask_v1()
3365 rtwpci->halt_c2h_intrs = B_AX_HALT_C2H_INT_EN | B_AX_WDT_TIMEOUT_INT_EN; in rtw89_pci_low_power_intr_mask_v1()
3366 rtwpci->intrs[0] = 0; in rtw89_pci_low_power_intr_mask_v1()
3367 rtwpci->intrs[1] = B_AX_GPIO18_INT_EN; in rtw89_pci_low_power_intr_mask_v1()
3372 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_config_intr_mask_v1()
3374 if (rtwpci->under_recovery) in rtw89_pci_config_intr_mask_v1()
3376 else if (rtwpci->low_power) in rtw89_pci_config_intr_mask_v1()
3385 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_recovery_intr_mask_v2()
3387 rtwpci->ind_intrs = B_BE_HS0_IND_INT_EN0; in rtw89_pci_recovery_intr_mask_v2()
3388 rtwpci->halt_c2h_intrs = B_BE_HALT_C2H_INT_EN | B_BE_WDT_TIMEOUT_INT_EN; in rtw89_pci_recovery_intr_mask_v2()
3389 rtwpci->intrs[0] = 0; in rtw89_pci_recovery_intr_mask_v2()
3390 rtwpci->intrs[1] = B_BE_PCIE_RX_RX0P2_IMR0_V1 | in rtw89_pci_recovery_intr_mask_v2()
3396 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_default_intr_mask_v2()
3398 rtwpci->ind_intrs = B_BE_HCI_AXIDMA_INT_EN0 | in rtw89_pci_default_intr_mask_v2()
3400 rtwpci->halt_c2h_intrs = B_BE_HALT_C2H_INT_EN | B_BE_WDT_TIMEOUT_INT_EN; in rtw89_pci_default_intr_mask_v2()
3401 rtwpci->intrs[0] = B_BE_RDU_CH1_INT_IMR_V1 | in rtw89_pci_default_intr_mask_v2()
3403 rtwpci->intrs[1] = B_BE_PCIE_RX_RX0P2_IMR0_V1 | in rtw89_pci_default_intr_mask_v2()
3409 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_low_power_intr_mask_v2()
3411 rtwpci->ind_intrs = B_BE_HS0_IND_INT_EN0 | in rtw89_pci_low_power_intr_mask_v2()
3413 rtwpci->halt_c2h_intrs = B_BE_HALT_C2H_INT_EN | B_BE_WDT_TIMEOUT_INT_EN; in rtw89_pci_low_power_intr_mask_v2()
3414 rtwpci->intrs[0] = 0; in rtw89_pci_low_power_intr_mask_v2()
3415 rtwpci->intrs[1] = B_BE_PCIE_RX_RX0P2_IMR0_V1 | in rtw89_pci_low_power_intr_mask_v2()
3421 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_config_intr_mask_v2()
3423 if (rtwpci->under_recovery) in rtw89_pci_config_intr_mask_v2()
3425 else if (rtwpci->low_power) in rtw89_pci_config_intr_mask_v2()
3445 ret = devm_request_threaded_irq(rtwdev->dev, pdev->irq, in rtw89_pci_request_irq()
3467 devm_free_irq(rtwdev->dev, pdev->irq, rtwdev); in rtw89_pci_free_irq()
3478 if (bit_num - bit_idx > 1) in gray_code_to_bin()
3488 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_filter_out()
3489 struct pci_dev *pdev = rtwpci->pdev; in rtw89_pci_filter_out()
3494 if (rtwdev->chip->chip_id != RTL8852C) in rtw89_pci_filter_out()
3533 return -EOPNOTSUPP; in rtw89_pci_filter_out()
3543 enum rtw89_core_chip_id chip_id = rtwdev->chip->chip_id; in rtw89_pci_clkreq_set()
3580 enum rtw89_core_chip_id chip_id = rtwdev->chip->chip_id; in rtw89_pci_aspm_set()
3623 enum rtw89_chip_gen chip_gen = rtwdev->chip->chip_gen; in rtw89_pci_recalc_int_mit()
3624 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_recalc_int_mit() local
3625 struct rtw89_traffic_stats *stats = &rtwdev->stats; in rtw89_pci_recalc_int_mit()
3626 enum rtw89_tfc_lv tx_tfc_lv = stats->tx_tfc_lv; in rtw89_pci_recalc_int_mit()
3627 enum rtw89_tfc_lv rx_tfc_lv = stats->rx_tfc_lv; in rtw89_pci_recalc_int_mit()
3630 if (rtwdev->scanning || in rtw89_pci_recalc_int_mit()
3643 rtw89_write32(rtwdev, info->mit_addr, val); in rtw89_pci_recalc_int_mit()
3648 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_link_cfg()
3649 struct pci_dev *pdev = rtwpci->pdev; in rtw89_pci_link_cfg()
3662 * settings (ex. CLKREQ# not Bi-Direction), it could lead to device in rtw89_pci_link_cfg()
3684 enum rtw89_core_chip_id chip_id = rtwdev->chip->chip_id; in rtw89_pci_l1ss_set()
3716 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_l1ss_cfg()
3717 struct pci_dev *pdev = rtwpci->pdev; in rtw89_pci_l1ss_cfg()
3745 return -EINVAL; in rtw89_pci_poll_io_idle_ax()
3755 if (rtwdev->chip->chip_id == RTL8852C) in rtw89_pci_lv1rst_stop_dma_ax()
3784 if (rtwdev->chip->chip_id == RTL8852C) in rtw89_pci_lv1rst_start_dma_ax()
3802 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_ops_mac_lv1_recovery() local
3803 const struct rtw89_pci_gen_def *gen_def = info->gen_def; in rtw89_pci_ops_mac_lv1_recovery()
3808 ret = gen_def->lv1rst_stop_dma(rtwdev); in rtw89_pci_ops_mac_lv1_recovery()
3810 rtw89_err(rtwdev, "lv1 rcvy pci stop dma fail\n"); in rtw89_pci_ops_mac_lv1_recovery()
3815 ret = gen_def->lv1rst_start_dma(rtwdev); in rtw89_pci_ops_mac_lv1_recovery()
3817 rtw89_err(rtwdev, "lv1 rcvy pci start dma fail\n"); in rtw89_pci_ops_mac_lv1_recovery()
3821 return -EINVAL; in rtw89_pci_ops_mac_lv1_recovery()
3829 if (rtwdev->chip->chip_gen == RTW89_CHIP_BE) in rtw89_pci_ops_dump_err_status()
3832 if (rtwdev->chip->chip_id == RTL8852C) { in rtw89_pci_ops_dump_err_status()
3850 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_napi_poll()
3851 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_napi_poll() local
3852 const struct rtw89_pci_gen_def *gen_def = info->gen_def; in rtw89_pci_napi_poll()
3856 rtwdev->napi_budget_countdown = budget; in rtw89_pci_napi_poll()
3858 rtw89_write32(rtwdev, gen_def->isr_clear_rpq.addr, gen_def->isr_clear_rpq.data); in rtw89_pci_napi_poll()
3859 work_done = rtw89_pci_poll_rpq_dma(rtwdev, rtwpci, rtwdev->napi_budget_countdown); in rtw89_pci_napi_poll()
3863 rtw89_write32(rtwdev, gen_def->isr_clear_rxq.addr, gen_def->isr_clear_rxq.data); in rtw89_pci_napi_poll()
3864 work_done += rtw89_pci_poll_rxq_dma(rtwdev, rtwpci, rtwdev->napi_budget_countdown); in rtw89_pci_napi_poll()
3866 spin_lock_irqsave(&rtwpci->irq_lock, flags); in rtw89_pci_napi_poll()
3867 if (likely(rtwpci->running)) in rtw89_pci_napi_poll()
3869 spin_unlock_irqrestore(&rtwpci->irq_lock, flags); in rtw89_pci_napi_poll()
3878 struct rtw89_dev *rtwdev = hw->priv; in rtw89_pci_suspend()
3879 enum rtw89_core_chip_id chip_id = rtwdev->chip->chip_id; in rtw89_pci_suspend()
3899 if (rtwdev->chip->chip_id == RTL8852C) in rtw89_pci_l2_hci_ldo()
3912 struct rtw89_dev *rtwdev = hw->priv; in rtw89_pci_resume()
3913 enum rtw89_core_chip_id chip_id = rtwdev->chip->chip_id; in rtw89_pci_resume()
4005 const struct rtw89_driver_info *info; in rtw89_pci_probe() local
4009 info = (const struct rtw89_driver_info *)id->driver_data; in rtw89_pci_probe()
4011 rtwdev = rtw89_alloc_ieee80211_hw(&pdev->dev, in rtw89_pci_probe()
4013 info->chip); in rtw89_pci_probe()
4015 dev_err(&pdev->dev, "failed to allocate hw\n"); in rtw89_pci_probe()
4016 return -ENOMEM; in rtw89_pci_probe()
4019 pci_info = info->bus.pci; in rtw89_pci_probe()
4021 rtwdev->pci_info = info->bus.pci; in rtw89_pci_probe()
4022 rtwdev->hci.ops = &rtw89_pci_ops; in rtw89_pci_probe()
4023 rtwdev->hci.type = RTW89_HCI_TYPE_PCIE; in rtw89_pci_probe()
4024 rtwdev->hci.rpwm_addr = pci_info->rpwm_addr; in rtw89_pci_probe()
4025 rtwdev->hci.cpwm_addr = pci_info->cpwm_addr; in rtw89_pci_probe()
4027 SET_IEEE80211_DEV(rtwdev->hw, &pdev->dev); in rtw89_pci_probe()
4095 rtwdev = hw->priv; in rtw89_pci_remove()