Lines Matching full:lp

133 static struct skbuf_dma_descriptor *axienet_get_rx_desc(struct axienet_local *lp, int i)  in axienet_get_rx_desc()  argument
135 return lp->rx_skb_ring[i & (RX_BUF_NUM_DEFAULT - 1)]; in axienet_get_rx_desc()
138 static struct skbuf_dma_descriptor *axienet_get_tx_desc(struct axienet_local *lp, int i) in axienet_get_tx_desc() argument
140 return lp->tx_skb_ring[i & (TX_BD_NUM_MAX - 1)]; in axienet_get_tx_desc()
145 * @lp: Pointer to axienet local structure
152 static inline u32 axienet_dma_in32(struct axienet_local *lp, off_t reg) in axienet_dma_in32() argument
154 return ioread32(lp->dma_regs + reg); in axienet_dma_in32()
157 static void desc_set_phys_addr(struct axienet_local *lp, dma_addr_t addr, in desc_set_phys_addr() argument
161 if (lp->features & XAE_FEATURE_DMA_64BIT) in desc_set_phys_addr()
165 static dma_addr_t desc_get_phys_addr(struct axienet_local *lp, in desc_get_phys_addr() argument
170 if (lp->features & XAE_FEATURE_DMA_64BIT) in desc_get_phys_addr()
187 struct axienet_local *lp = netdev_priv(ndev); in axienet_dma_bd_release() local
190 dma_free_coherent(lp->dev, in axienet_dma_bd_release()
191 sizeof(*lp->tx_bd_v) * lp->tx_bd_num, in axienet_dma_bd_release()
192 lp->tx_bd_v, in axienet_dma_bd_release()
193 lp->tx_bd_p); in axienet_dma_bd_release()
195 if (!lp->rx_bd_v) in axienet_dma_bd_release()
198 for (i = 0; i < lp->rx_bd_num; i++) { in axienet_dma_bd_release()
204 if (!lp->rx_bd_v[i].skb) in axienet_dma_bd_release()
207 dev_kfree_skb(lp->rx_bd_v[i].skb); in axienet_dma_bd_release()
213 if (lp->rx_bd_v[i].cntrl) { in axienet_dma_bd_release()
214 phys = desc_get_phys_addr(lp, &lp->rx_bd_v[i]); in axienet_dma_bd_release()
215 dma_unmap_single(lp->dev, phys, in axienet_dma_bd_release()
216 lp->max_frm_size, DMA_FROM_DEVICE); in axienet_dma_bd_release()
220 dma_free_coherent(lp->dev, in axienet_dma_bd_release()
221 sizeof(*lp->rx_bd_v) * lp->rx_bd_num, in axienet_dma_bd_release()
222 lp->rx_bd_v, in axienet_dma_bd_release()
223 lp->rx_bd_p); in axienet_dma_bd_release()
228 * @lp: Pointer to the axienet_local structure
231 static u32 axienet_usec_to_timer(struct axienet_local *lp, u32 coalesce_usec) in axienet_usec_to_timer() argument
236 if (lp->axi_clk) in axienet_usec_to_timer()
237 clk_rate = clk_get_rate(lp->axi_clk); in axienet_usec_to_timer()
250 * @lp: Pointer to the axienet_local structure
252 static void axienet_dma_start(struct axienet_local *lp) in axienet_dma_start() argument
255 lp->rx_dma_cr = (lp->coalesce_count_rx << XAXIDMA_COALESCE_SHIFT) | in axienet_dma_start()
260 if (lp->coalesce_count_rx > 1) in axienet_dma_start()
261 lp->rx_dma_cr |= (axienet_usec_to_timer(lp, lp->coalesce_usec_rx) in axienet_dma_start()
264 axienet_dma_out32(lp, XAXIDMA_RX_CR_OFFSET, lp->rx_dma_cr); in axienet_dma_start()
267 lp->tx_dma_cr = (lp->coalesce_count_tx << XAXIDMA_COALESCE_SHIFT) | in axienet_dma_start()
272 if (lp->coalesce_count_tx > 1) in axienet_dma_start()
273 lp->tx_dma_cr |= (axienet_usec_to_timer(lp, lp->coalesce_usec_tx) in axienet_dma_start()
276 axienet_dma_out32(lp, XAXIDMA_TX_CR_OFFSET, lp->tx_dma_cr); in axienet_dma_start()
281 axienet_dma_out_addr(lp, XAXIDMA_RX_CDESC_OFFSET, lp->rx_bd_p); in axienet_dma_start()
282 lp->rx_dma_cr |= XAXIDMA_CR_RUNSTOP_MASK; in axienet_dma_start()
283 axienet_dma_out32(lp, XAXIDMA_RX_CR_OFFSET, lp->rx_dma_cr); in axienet_dma_start()
284 axienet_dma_out_addr(lp, XAXIDMA_RX_TDESC_OFFSET, lp->rx_bd_p + in axienet_dma_start()
285 (sizeof(*lp->rx_bd_v) * (lp->rx_bd_num - 1))); in axienet_dma_start()
291 axienet_dma_out_addr(lp, XAXIDMA_TX_CDESC_OFFSET, lp->tx_bd_p); in axienet_dma_start()
292 lp->tx_dma_cr |= XAXIDMA_CR_RUNSTOP_MASK; in axienet_dma_start()
293 axienet_dma_out32(lp, XAXIDMA_TX_CR_OFFSET, lp->tx_dma_cr); in axienet_dma_start()
310 struct axienet_local *lp = netdev_priv(ndev); in axienet_dma_bd_init() local
313 lp->tx_bd_ci = 0; in axienet_dma_bd_init()
314 lp->tx_bd_tail = 0; in axienet_dma_bd_init()
315 lp->rx_bd_ci = 0; in axienet_dma_bd_init()
318 lp->tx_bd_v = dma_alloc_coherent(lp->dev, in axienet_dma_bd_init()
319 sizeof(*lp->tx_bd_v) * lp->tx_bd_num, in axienet_dma_bd_init()
320 &lp->tx_bd_p, GFP_KERNEL); in axienet_dma_bd_init()
321 if (!lp->tx_bd_v) in axienet_dma_bd_init()
324 lp->rx_bd_v = dma_alloc_coherent(lp->dev, in axienet_dma_bd_init()
325 sizeof(*lp->rx_bd_v) * lp->rx_bd_num, in axienet_dma_bd_init()
326 &lp->rx_bd_p, GFP_KERNEL); in axienet_dma_bd_init()
327 if (!lp->rx_bd_v) in axienet_dma_bd_init()
330 for (i = 0; i < lp->tx_bd_num; i++) { in axienet_dma_bd_init()
331 dma_addr_t addr = lp->tx_bd_p + in axienet_dma_bd_init()
332 sizeof(*lp->tx_bd_v) * in axienet_dma_bd_init()
333 ((i + 1) % lp->tx_bd_num); in axienet_dma_bd_init()
335 lp->tx_bd_v[i].next = lower_32_bits(addr); in axienet_dma_bd_init()
336 if (lp->features & XAE_FEATURE_DMA_64BIT) in axienet_dma_bd_init()
337 lp->tx_bd_v[i].next_msb = upper_32_bits(addr); in axienet_dma_bd_init()
340 for (i = 0; i < lp->rx_bd_num; i++) { in axienet_dma_bd_init()
343 addr = lp->rx_bd_p + sizeof(*lp->rx_bd_v) * in axienet_dma_bd_init()
344 ((i + 1) % lp->rx_bd_num); in axienet_dma_bd_init()
345 lp->rx_bd_v[i].next = lower_32_bits(addr); in axienet_dma_bd_init()
346 if (lp->features & XAE_FEATURE_DMA_64BIT) in axienet_dma_bd_init()
347 lp->rx_bd_v[i].next_msb = upper_32_bits(addr); in axienet_dma_bd_init()
349 skb = netdev_alloc_skb_ip_align(ndev, lp->max_frm_size); in axienet_dma_bd_init()
353 lp->rx_bd_v[i].skb = skb; in axienet_dma_bd_init()
354 addr = dma_map_single(lp->dev, skb->data, in axienet_dma_bd_init()
355 lp->max_frm_size, DMA_FROM_DEVICE); in axienet_dma_bd_init()
356 if (dma_mapping_error(lp->dev, addr)) { in axienet_dma_bd_init()
360 desc_set_phys_addr(lp, addr, &lp->rx_bd_v[i]); in axienet_dma_bd_init()
362 lp->rx_bd_v[i].cntrl = lp->max_frm_size; in axienet_dma_bd_init()
365 axienet_dma_start(lp); in axienet_dma_bd_init()
384 struct axienet_local *lp = netdev_priv(ndev); in axienet_set_mac_address() local
392 axienet_iow(lp, XAE_UAW0_OFFSET, in axienet_set_mac_address()
397 axienet_iow(lp, XAE_UAW1_OFFSET, in axienet_set_mac_address()
398 (((axienet_ior(lp, XAE_UAW1_OFFSET)) & in axienet_set_mac_address()
437 struct axienet_local *lp = netdev_priv(ndev); in axienet_set_multicast_list() local
446 reg = axienet_ior(lp, XAE_FMI_OFFSET); in axienet_set_multicast_list()
448 axienet_iow(lp, XAE_FMI_OFFSET, reg); in axienet_set_multicast_list()
466 reg = axienet_ior(lp, XAE_FMI_OFFSET) & 0xFFFFFF00; in axienet_set_multicast_list()
469 axienet_iow(lp, XAE_FMI_OFFSET, reg); in axienet_set_multicast_list()
470 axienet_iow(lp, XAE_AF0_OFFSET, af0reg); in axienet_set_multicast_list()
471 axienet_iow(lp, XAE_AF1_OFFSET, af1reg); in axienet_set_multicast_list()
475 reg = axienet_ior(lp, XAE_FMI_OFFSET); in axienet_set_multicast_list()
478 axienet_iow(lp, XAE_FMI_OFFSET, reg); in axienet_set_multicast_list()
481 reg = axienet_ior(lp, XAE_FMI_OFFSET) & 0xFFFFFF00; in axienet_set_multicast_list()
484 axienet_iow(lp, XAE_FMI_OFFSET, reg); in axienet_set_multicast_list()
485 axienet_iow(lp, XAE_AF0_OFFSET, 0); in axienet_set_multicast_list()
486 axienet_iow(lp, XAE_AF1_OFFSET, 0); in axienet_set_multicast_list()
507 struct axienet_local *lp = netdev_priv(ndev); in axienet_setoptions() local
511 reg = ((axienet_ior(lp, tp->reg)) & ~(tp->m_or)); in axienet_setoptions()
514 axienet_iow(lp, tp->reg, reg); in axienet_setoptions()
518 lp->options |= options; in axienet_setoptions()
521 static int __axienet_device_reset(struct axienet_local *lp) in __axienet_device_reset() argument
533 axienet_dma_out32(lp, XAXIDMA_TX_CR_OFFSET, XAXIDMA_CR_RESET_MASK); in __axienet_device_reset()
536 DELAY_OF_ONE_MILLISEC, 50000, false, lp, in __axienet_device_reset()
539 dev_err(lp->dev, "%s: DMA reset timeout!\n", __func__); in __axienet_device_reset()
546 DELAY_OF_ONE_MILLISEC, 50000, false, lp, in __axienet_device_reset()
549 dev_err(lp->dev, "%s: timeout waiting for PhyRstCmplt\n", __func__); in __axienet_device_reset()
558 * @lp: Pointer to the axienet_local structure
560 static void axienet_dma_stop(struct axienet_local *lp) in axienet_dma_stop() argument
565 cr = axienet_dma_in32(lp, XAXIDMA_RX_CR_OFFSET); in axienet_dma_stop()
567 axienet_dma_out32(lp, XAXIDMA_RX_CR_OFFSET, cr); in axienet_dma_stop()
568 synchronize_irq(lp->rx_irq); in axienet_dma_stop()
570 cr = axienet_dma_in32(lp, XAXIDMA_TX_CR_OFFSET); in axienet_dma_stop()
572 axienet_dma_out32(lp, XAXIDMA_TX_CR_OFFSET, cr); in axienet_dma_stop()
573 synchronize_irq(lp->tx_irq); in axienet_dma_stop()
576 sr = axienet_dma_in32(lp, XAXIDMA_RX_SR_OFFSET); in axienet_dma_stop()
579 sr = axienet_dma_in32(lp, XAXIDMA_RX_SR_OFFSET); in axienet_dma_stop()
582 sr = axienet_dma_in32(lp, XAXIDMA_TX_SR_OFFSET); in axienet_dma_stop()
585 sr = axienet_dma_in32(lp, XAXIDMA_TX_SR_OFFSET); in axienet_dma_stop()
589 axienet_lock_mii(lp); in axienet_dma_stop()
590 __axienet_device_reset(lp); in axienet_dma_stop()
591 axienet_unlock_mii(lp); in axienet_dma_stop()
609 struct axienet_local *lp = netdev_priv(ndev); in axienet_device_reset() local
612 lp->max_frm_size = XAE_MAX_VLAN_FRAME_SIZE; in axienet_device_reset()
613 lp->options |= XAE_OPTION_VLAN; in axienet_device_reset()
614 lp->options &= (~XAE_OPTION_JUMBO); in axienet_device_reset()
618 lp->max_frm_size = ndev->mtu + VLAN_ETH_HLEN + in axienet_device_reset()
621 if (lp->max_frm_size <= lp->rxmem) in axienet_device_reset()
622 lp->options |= XAE_OPTION_JUMBO; in axienet_device_reset()
625 if (!lp->use_dmaengine) { in axienet_device_reset()
626 ret = __axienet_device_reset(lp); in axienet_device_reset()
638 axienet_status = axienet_ior(lp, XAE_RCW1_OFFSET); in axienet_device_reset()
640 axienet_iow(lp, XAE_RCW1_OFFSET, axienet_status); in axienet_device_reset()
642 axienet_status = axienet_ior(lp, XAE_IP_OFFSET); in axienet_device_reset()
644 axienet_iow(lp, XAE_IS_OFFSET, XAE_INT_RXRJECT_MASK); in axienet_device_reset()
645 axienet_iow(lp, XAE_IE_OFFSET, lp->eth_irq > 0 ? in axienet_device_reset()
648 axienet_iow(lp, XAE_FCC_OFFSET, XAE_FCC_FCRX_MASK); in axienet_device_reset()
653 axienet_setoptions(ndev, lp->options & in axienet_device_reset()
657 axienet_setoptions(ndev, lp->options); in axienet_device_reset()
666 * @lp: Pointer to the axienet_local structure
678 static int axienet_free_tx_chain(struct axienet_local *lp, u32 first_bd, in axienet_free_tx_chain() argument
687 cur_p = &lp->tx_bd_v[(first_bd + i) % lp->tx_bd_num]; in axienet_free_tx_chain()
698 phys = desc_get_phys_addr(lp, cur_p); in axienet_free_tx_chain()
699 dma_unmap_single(lp->dev, phys, in axienet_free_tx_chain()
725 * @lp: Pointer to the axienet_local structure
736 static inline int axienet_check_tx_bd_space(struct axienet_local *lp, in axienet_check_tx_bd_space() argument
743 cur_p = &lp->tx_bd_v[(READ_ONCE(lp->tx_bd_tail) + num_frag) % in axienet_check_tx_bd_space()
744 lp->tx_bd_num]; in axienet_check_tx_bd_space()
760 struct axienet_local *lp = data; in axienet_dma_tx_cb() local
764 skbuf_dma = axienet_get_tx_desc(lp, lp->tx_ring_tail++); in axienet_dma_tx_cb()
766 txq = skb_get_tx_queue(lp->ndev, skbuf_dma->skb); in axienet_dma_tx_cb()
767 u64_stats_update_begin(&lp->tx_stat_sync); in axienet_dma_tx_cb()
768 u64_stats_add(&lp->tx_bytes, len); in axienet_dma_tx_cb()
769 u64_stats_add(&lp->tx_packets, 1); in axienet_dma_tx_cb()
770 u64_stats_update_end(&lp->tx_stat_sync); in axienet_dma_tx_cb()
771 dma_unmap_sg(lp->dev, skbuf_dma->sgl, skbuf_dma->sg_len, DMA_TO_DEVICE); in axienet_dma_tx_cb()
774 CIRC_SPACE(lp->tx_ring_head, lp->tx_ring_tail, TX_BD_NUM_MAX), in axienet_dma_tx_cb()
797 struct axienet_local *lp = netdev_priv(ndev); in axienet_start_xmit_dmaengine() local
807 dma_dev = lp->tx_chan->device; in axienet_start_xmit_dmaengine()
809 if (CIRC_SPACE(lp->tx_ring_head, lp->tx_ring_tail, TX_BD_NUM_MAX) <= sg_len) { in axienet_start_xmit_dmaengine()
816 skbuf_dma = axienet_get_tx_desc(lp, lp->tx_ring_head); in axienet_start_xmit_dmaengine()
820 lp->tx_ring_head++; in axienet_start_xmit_dmaengine()
826 ret = dma_map_sg(lp->dev, skbuf_dma->sgl, sg_len, DMA_TO_DEVICE); in axienet_start_xmit_dmaengine()
832 if (lp->features & XAE_FEATURE_FULL_TX_CSUM) { in axienet_start_xmit_dmaengine()
835 } else if (lp->features & XAE_FEATURE_PARTIAL_TX_CSUM) { in axienet_start_xmit_dmaengine()
846 dma_tx_desc = dma_dev->device_prep_slave_sg(lp->tx_chan, skbuf_dma->sgl, in axienet_start_xmit_dmaengine()
854 dma_tx_desc->callback_param = lp; in axienet_start_xmit_dmaengine()
857 dma_async_issue_pending(lp->tx_chan); in axienet_start_xmit_dmaengine()
858 txq = skb_get_tx_queue(lp->ndev, skb); in axienet_start_xmit_dmaengine()
860 netif_txq_maybe_stop(txq, CIRC_SPACE(lp->tx_ring_head, lp->tx_ring_tail, TX_BD_NUM_MAX), in axienet_start_xmit_dmaengine()
866 dma_unmap_sg(lp->dev, skbuf_dma->sgl, sg_len, DMA_TO_DEVICE); in axienet_start_xmit_dmaengine()
888 struct axienet_local *lp = container_of(napi, struct axienet_local, napi_tx); in axienet_tx_poll() local
889 struct net_device *ndev = lp->ndev; in axienet_tx_poll()
893 packets = axienet_free_tx_chain(lp, lp->tx_bd_ci, budget, false, &size, budget); in axienet_tx_poll()
896 lp->tx_bd_ci += packets; in axienet_tx_poll()
897 if (lp->tx_bd_ci >= lp->tx_bd_num) in axienet_tx_poll()
898 lp->tx_bd_ci %= lp->tx_bd_num; in axienet_tx_poll()
900 u64_stats_update_begin(&lp->tx_stat_sync); in axienet_tx_poll()
901 u64_stats_add(&lp->tx_packets, packets); in axienet_tx_poll()
902 u64_stats_add(&lp->tx_bytes, size); in axienet_tx_poll()
903 u64_stats_update_end(&lp->tx_stat_sync); in axienet_tx_poll()
908 if (!axienet_check_tx_bd_space(lp, MAX_SKB_FRAGS + 1)) in axienet_tx_poll()
917 axienet_dma_out32(lp, XAXIDMA_TX_CR_OFFSET, lp->tx_dma_cr); in axienet_tx_poll()
945 struct axienet_local *lp = netdev_priv(ndev); in axienet_start_xmit() local
948 orig_tail_ptr = lp->tx_bd_tail; in axienet_start_xmit()
952 cur_p = &lp->tx_bd_v[orig_tail_ptr]; in axienet_start_xmit()
954 if (axienet_check_tx_bd_space(lp, num_frag + 1)) { in axienet_start_xmit()
966 if (lp->features & XAE_FEATURE_FULL_TX_CSUM) { in axienet_start_xmit()
969 } else if (lp->features & XAE_FEATURE_PARTIAL_TX_CSUM) { in axienet_start_xmit()
980 phys = dma_map_single(lp->dev, skb->data, in axienet_start_xmit()
982 if (unlikely(dma_mapping_error(lp->dev, phys))) { in axienet_start_xmit()
988 desc_set_phys_addr(lp, phys, cur_p); in axienet_start_xmit()
992 if (++new_tail_ptr >= lp->tx_bd_num) in axienet_start_xmit()
994 cur_p = &lp->tx_bd_v[new_tail_ptr]; in axienet_start_xmit()
996 phys = dma_map_single(lp->dev, in axienet_start_xmit()
1000 if (unlikely(dma_mapping_error(lp->dev, phys))) { in axienet_start_xmit()
1004 axienet_free_tx_chain(lp, orig_tail_ptr, ii + 1, in axienet_start_xmit()
1008 desc_set_phys_addr(lp, phys, cur_p); in axienet_start_xmit()
1015 tail_p = lp->tx_bd_p + sizeof(*lp->tx_bd_v) * new_tail_ptr; in axienet_start_xmit()
1016 if (++new_tail_ptr >= lp->tx_bd_num) in axienet_start_xmit()
1018 WRITE_ONCE(lp->tx_bd_tail, new_tail_ptr); in axienet_start_xmit()
1021 axienet_dma_out_addr(lp, XAXIDMA_TX_TDESC_OFFSET, tail_p); in axienet_start_xmit()
1024 if (axienet_check_tx_bd_space(lp, MAX_SKB_FRAGS + 1)) { in axienet_start_xmit()
1031 if (!axienet_check_tx_bd_space(lp, MAX_SKB_FRAGS + 1)) in axienet_start_xmit()
1049 struct axienet_local *lp = data; in axienet_dma_rx_cb() local
1053 skbuf_dma = axienet_get_rx_desc(lp, lp->rx_ring_tail++); in axienet_dma_rx_cb()
1057 dma_unmap_single(lp->dev, skbuf_dma->dma_address, lp->max_frm_size, in axienet_dma_rx_cb()
1062 skb->protocol = eth_type_trans(skb, lp->ndev); in axienet_dma_rx_cb()
1066 u64_stats_update_begin(&lp->rx_stat_sync); in axienet_dma_rx_cb()
1067 u64_stats_add(&lp->rx_packets, 1); in axienet_dma_rx_cb()
1068 u64_stats_add(&lp->rx_bytes, rx_len); in axienet_dma_rx_cb()
1069 u64_stats_update_end(&lp->rx_stat_sync); in axienet_dma_rx_cb()
1070 axienet_rx_submit_desc(lp->ndev); in axienet_dma_rx_cb()
1071 dma_async_issue_pending(lp->rx_chan); in axienet_dma_rx_cb()
1090 struct axienet_local *lp = container_of(napi, struct axienet_local, napi_rx); in axienet_rx_poll() local
1092 cur_p = &lp->rx_bd_v[lp->rx_bd_ci]; in axienet_rx_poll()
1111 phys = desc_get_phys_addr(lp, cur_p); in axienet_rx_poll()
1112 dma_unmap_single(lp->dev, phys, lp->max_frm_size, in axienet_rx_poll()
1116 skb->protocol = eth_type_trans(skb, lp->ndev); in axienet_rx_poll()
1121 if (lp->features & XAE_FEATURE_FULL_RX_CSUM) { in axienet_rx_poll()
1128 } else if ((lp->features & XAE_FEATURE_PARTIAL_RX_CSUM) != 0 && in axienet_rx_poll()
1141 new_skb = napi_alloc_skb(napi, lp->max_frm_size); in axienet_rx_poll()
1145 phys = dma_map_single(lp->dev, new_skb->data, in axienet_rx_poll()
1146 lp->max_frm_size, in axienet_rx_poll()
1148 if (unlikely(dma_mapping_error(lp->dev, phys))) { in axienet_rx_poll()
1150 netdev_err(lp->ndev, "RX DMA mapping error\n"); in axienet_rx_poll()
1154 desc_set_phys_addr(lp, phys, cur_p); in axienet_rx_poll()
1156 cur_p->cntrl = lp->max_frm_size; in axienet_rx_poll()
1163 tail_p = lp->rx_bd_p + sizeof(*lp->rx_bd_v) * lp->rx_bd_ci; in axienet_rx_poll()
1165 if (++lp->rx_bd_ci >= lp->rx_bd_num) in axienet_rx_poll()
1166 lp->rx_bd_ci = 0; in axienet_rx_poll()
1167 cur_p = &lp->rx_bd_v[lp->rx_bd_ci]; in axienet_rx_poll()
1170 u64_stats_update_begin(&lp->rx_stat_sync); in axienet_rx_poll()
1171 u64_stats_add(&lp->rx_packets, packets); in axienet_rx_poll()
1172 u64_stats_add(&lp->rx_bytes, size); in axienet_rx_poll()
1173 u64_stats_update_end(&lp->rx_stat_sync); in axienet_rx_poll()
1176 axienet_dma_out_addr(lp, XAXIDMA_RX_TDESC_OFFSET, tail_p); in axienet_rx_poll()
1183 axienet_dma_out32(lp, XAXIDMA_RX_CR_OFFSET, lp->rx_dma_cr); in axienet_rx_poll()
1202 struct axienet_local *lp = netdev_priv(ndev); in axienet_tx_irq() local
1204 status = axienet_dma_in32(lp, XAXIDMA_TX_SR_OFFSET); in axienet_tx_irq()
1209 axienet_dma_out32(lp, XAXIDMA_TX_SR_OFFSET, status); in axienet_tx_irq()
1214 (lp->tx_bd_v[lp->tx_bd_ci]).phys_msb, in axienet_tx_irq()
1215 (lp->tx_bd_v[lp->tx_bd_ci]).phys); in axienet_tx_irq()
1216 schedule_work(&lp->dma_err_task); in axienet_tx_irq()
1221 u32 cr = lp->tx_dma_cr; in axienet_tx_irq()
1224 axienet_dma_out32(lp, XAXIDMA_TX_CR_OFFSET, cr); in axienet_tx_irq()
1226 napi_schedule(&lp->napi_tx); in axienet_tx_irq()
1246 struct axienet_local *lp = netdev_priv(ndev); in axienet_rx_irq() local
1248 status = axienet_dma_in32(lp, XAXIDMA_RX_SR_OFFSET); in axienet_rx_irq()
1253 axienet_dma_out32(lp, XAXIDMA_RX_SR_OFFSET, status); in axienet_rx_irq()
1258 (lp->rx_bd_v[lp->rx_bd_ci]).phys_msb, in axienet_rx_irq()
1259 (lp->rx_bd_v[lp->rx_bd_ci]).phys); in axienet_rx_irq()
1260 schedule_work(&lp->dma_err_task); in axienet_rx_irq()
1265 u32 cr = lp->rx_dma_cr; in axienet_rx_irq()
1268 axienet_dma_out32(lp, XAXIDMA_RX_CR_OFFSET, cr); in axienet_rx_irq()
1270 napi_schedule(&lp->napi_rx); in axienet_rx_irq()
1288 struct axienet_local *lp = netdev_priv(ndev); in axienet_eth_irq() local
1291 pending = axienet_ior(lp, XAE_IP_OFFSET); in axienet_eth_irq()
1301 axienet_iow(lp, XAE_IS_OFFSET, pending); in axienet_eth_irq()
1318 struct axienet_local *lp = netdev_priv(ndev); in axienet_rx_submit_desc() local
1323 skbuf_dma = axienet_get_rx_desc(lp, lp->rx_ring_head); in axienet_rx_submit_desc()
1327 lp->rx_ring_head++; in axienet_rx_submit_desc()
1328 skb = netdev_alloc_skb(ndev, lp->max_frm_size); in axienet_rx_submit_desc()
1333 addr = dma_map_single(lp->dev, skb->data, lp->max_frm_size, DMA_FROM_DEVICE); in axienet_rx_submit_desc()
1334 if (unlikely(dma_mapping_error(lp->dev, addr))) { in axienet_rx_submit_desc()
1340 sg_dma_len(skbuf_dma->sgl) = lp->max_frm_size; in axienet_rx_submit_desc()
1341 dma_rx_desc = dmaengine_prep_slave_sg(lp->rx_chan, skbuf_dma->sgl, in axienet_rx_submit_desc()
1350 dma_rx_desc->callback_param = lp; in axienet_rx_submit_desc()
1357 dma_unmap_single(lp->dev, addr, lp->max_frm_size, DMA_FROM_DEVICE); in axienet_rx_submit_desc()
1373 struct axienet_local *lp = netdev_priv(ndev); in axienet_init_dmaengine() local
1377 lp->tx_chan = dma_request_chan(lp->dev, "tx_chan0"); in axienet_init_dmaengine()
1378 if (IS_ERR(lp->tx_chan)) { in axienet_init_dmaengine()
1379 dev_err(lp->dev, "No Ethernet DMA (TX) channel found\n"); in axienet_init_dmaengine()
1380 return PTR_ERR(lp->tx_chan); in axienet_init_dmaengine()
1383 lp->rx_chan = dma_request_chan(lp->dev, "rx_chan0"); in axienet_init_dmaengine()
1384 if (IS_ERR(lp->rx_chan)) { in axienet_init_dmaengine()
1385 ret = PTR_ERR(lp->rx_chan); in axienet_init_dmaengine()
1386 dev_err(lp->dev, "No Ethernet DMA (RX) channel found\n"); in axienet_init_dmaengine()
1390 lp->tx_ring_tail = 0; in axienet_init_dmaengine()
1391 lp->tx_ring_head = 0; in axienet_init_dmaengine()
1392 lp->rx_ring_tail = 0; in axienet_init_dmaengine()
1393 lp->rx_ring_head = 0; in axienet_init_dmaengine()
1394 lp->tx_skb_ring = kcalloc(TX_BD_NUM_MAX, sizeof(*lp->tx_skb_ring), in axienet_init_dmaengine()
1396 if (!lp->tx_skb_ring) { in axienet_init_dmaengine()
1406 lp->tx_skb_ring[i] = skbuf_dma; in axienet_init_dmaengine()
1409 lp->rx_skb_ring = kcalloc(RX_BUF_NUM_DEFAULT, sizeof(*lp->rx_skb_ring), in axienet_init_dmaengine()
1411 if (!lp->rx_skb_ring) { in axienet_init_dmaengine()
1421 lp->rx_skb_ring[i] = skbuf_dma; in axienet_init_dmaengine()
1426 dma_async_issue_pending(lp->rx_chan); in axienet_init_dmaengine()
1432 kfree(lp->rx_skb_ring[i]); in axienet_init_dmaengine()
1433 kfree(lp->rx_skb_ring); in axienet_init_dmaengine()
1436 kfree(lp->tx_skb_ring[i]); in axienet_init_dmaengine()
1437 kfree(lp->tx_skb_ring); in axienet_init_dmaengine()
1439 dma_release_channel(lp->rx_chan); in axienet_init_dmaengine()
1441 dma_release_channel(lp->tx_chan); in axienet_init_dmaengine()
1459 struct axienet_local *lp = netdev_priv(ndev); in axienet_init_legacy_dma() local
1462 INIT_WORK(&lp->dma_err_task, axienet_dma_err_handler); in axienet_init_legacy_dma()
1464 napi_enable(&lp->napi_rx); in axienet_init_legacy_dma()
1465 napi_enable(&lp->napi_tx); in axienet_init_legacy_dma()
1468 ret = request_irq(lp->tx_irq, axienet_tx_irq, IRQF_SHARED, in axienet_init_legacy_dma()
1473 ret = request_irq(lp->rx_irq, axienet_rx_irq, IRQF_SHARED, in axienet_init_legacy_dma()
1478 if (lp->eth_irq > 0) { in axienet_init_legacy_dma()
1479 ret = request_irq(lp->eth_irq, axienet_eth_irq, IRQF_SHARED, in axienet_init_legacy_dma()
1488 free_irq(lp->rx_irq, ndev); in axienet_init_legacy_dma()
1490 free_irq(lp->tx_irq, ndev); in axienet_init_legacy_dma()
1492 napi_disable(&lp->napi_tx); in axienet_init_legacy_dma()
1493 napi_disable(&lp->napi_rx); in axienet_init_legacy_dma()
1494 cancel_work_sync(&lp->dma_err_task); in axienet_init_legacy_dma()
1495 dev_err(lp->dev, "request_irq() failed\n"); in axienet_init_legacy_dma()
1515 struct axienet_local *lp = netdev_priv(ndev); in axienet_open() local
1523 axienet_lock_mii(lp); in axienet_open()
1525 axienet_unlock_mii(lp); in axienet_open()
1527 ret = phylink_of_phy_connect(lp->phylink, lp->dev->of_node, 0); in axienet_open()
1529 dev_err(lp->dev, "phylink_of_phy_connect() failed: %d\n", ret); in axienet_open()
1533 phylink_start(lp->phylink); in axienet_open()
1535 if (lp->use_dmaengine) { in axienet_open()
1537 if (lp->eth_irq > 0) { in axienet_open()
1538 ret = request_irq(lp->eth_irq, axienet_eth_irq, IRQF_SHARED, in axienet_open()
1556 if (lp->eth_irq > 0) in axienet_open()
1557 free_irq(lp->eth_irq, ndev); in axienet_open()
1559 phylink_stop(lp->phylink); in axienet_open()
1560 phylink_disconnect_phy(lp->phylink); in axienet_open()
1576 struct axienet_local *lp = netdev_priv(ndev); in axienet_stop() local
1581 if (!lp->use_dmaengine) { in axienet_stop()
1582 napi_disable(&lp->napi_tx); in axienet_stop()
1583 napi_disable(&lp->napi_rx); in axienet_stop()
1586 phylink_stop(lp->phylink); in axienet_stop()
1587 phylink_disconnect_phy(lp->phylink); in axienet_stop()
1589 axienet_setoptions(ndev, lp->options & in axienet_stop()
1592 if (!lp->use_dmaengine) { in axienet_stop()
1593 axienet_dma_stop(lp); in axienet_stop()
1594 cancel_work_sync(&lp->dma_err_task); in axienet_stop()
1595 free_irq(lp->tx_irq, ndev); in axienet_stop()
1596 free_irq(lp->rx_irq, ndev); in axienet_stop()
1599 dmaengine_terminate_sync(lp->tx_chan); in axienet_stop()
1600 dmaengine_synchronize(lp->tx_chan); in axienet_stop()
1601 dmaengine_terminate_sync(lp->rx_chan); in axienet_stop()
1602 dmaengine_synchronize(lp->rx_chan); in axienet_stop()
1605 kfree(lp->tx_skb_ring[i]); in axienet_stop()
1606 kfree(lp->tx_skb_ring); in axienet_stop()
1608 kfree(lp->rx_skb_ring[i]); in axienet_stop()
1609 kfree(lp->rx_skb_ring); in axienet_stop()
1611 dma_release_channel(lp->rx_chan); in axienet_stop()
1612 dma_release_channel(lp->tx_chan); in axienet_stop()
1615 axienet_iow(lp, XAE_IE_OFFSET, 0); in axienet_stop()
1617 if (lp->eth_irq > 0) in axienet_stop()
1618 free_irq(lp->eth_irq, ndev); in axienet_stop()
1635 struct axienet_local *lp = netdev_priv(ndev); in axienet_change_mtu() local
1641 XAE_TRL_SIZE) > lp->rxmem) in axienet_change_mtu()
1659 struct axienet_local *lp = netdev_priv(ndev); in axienet_poll_controller() local
1660 disable_irq(lp->tx_irq); in axienet_poll_controller()
1661 disable_irq(lp->rx_irq); in axienet_poll_controller()
1662 axienet_rx_irq(lp->tx_irq, ndev); in axienet_poll_controller()
1663 axienet_tx_irq(lp->rx_irq, ndev); in axienet_poll_controller()
1664 enable_irq(lp->tx_irq); in axienet_poll_controller()
1665 enable_irq(lp->rx_irq); in axienet_poll_controller()
1671 struct axienet_local *lp = netdev_priv(dev); in axienet_ioctl() local
1676 return phylink_mii_ioctl(lp->phylink, rq, cmd); in axienet_ioctl()
1682 struct axienet_local *lp = netdev_priv(dev); in axienet_get_stats64() local
1688 start = u64_stats_fetch_begin(&lp->rx_stat_sync); in axienet_get_stats64()
1689 stats->rx_packets = u64_stats_read(&lp->rx_packets); in axienet_get_stats64()
1690 stats->rx_bytes = u64_stats_read(&lp->rx_bytes); in axienet_get_stats64()
1691 } while (u64_stats_fetch_retry(&lp->rx_stat_sync, start)); in axienet_get_stats64()
1694 start = u64_stats_fetch_begin(&lp->tx_stat_sync); in axienet_get_stats64()
1695 stats->tx_packets = u64_stats_read(&lp->tx_packets); in axienet_get_stats64()
1696 stats->tx_bytes = u64_stats_read(&lp->tx_bytes); in axienet_get_stats64()
1697 } while (u64_stats_fetch_retry(&lp->tx_stat_sync, start)); in axienet_get_stats64()
1772 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_get_regs() local
1778 data[0] = axienet_ior(lp, XAE_RAF_OFFSET); in axienet_ethtools_get_regs()
1779 data[1] = axienet_ior(lp, XAE_TPF_OFFSET); in axienet_ethtools_get_regs()
1780 data[2] = axienet_ior(lp, XAE_IFGP_OFFSET); in axienet_ethtools_get_regs()
1781 data[3] = axienet_ior(lp, XAE_IS_OFFSET); in axienet_ethtools_get_regs()
1782 data[4] = axienet_ior(lp, XAE_IP_OFFSET); in axienet_ethtools_get_regs()
1783 data[5] = axienet_ior(lp, XAE_IE_OFFSET); in axienet_ethtools_get_regs()
1784 data[6] = axienet_ior(lp, XAE_TTAG_OFFSET); in axienet_ethtools_get_regs()
1785 data[7] = axienet_ior(lp, XAE_RTAG_OFFSET); in axienet_ethtools_get_regs()
1786 data[8] = axienet_ior(lp, XAE_UAWL_OFFSET); in axienet_ethtools_get_regs()
1787 data[9] = axienet_ior(lp, XAE_UAWU_OFFSET); in axienet_ethtools_get_regs()
1788 data[10] = axienet_ior(lp, XAE_TPID0_OFFSET); in axienet_ethtools_get_regs()
1789 data[11] = axienet_ior(lp, XAE_TPID1_OFFSET); in axienet_ethtools_get_regs()
1790 data[12] = axienet_ior(lp, XAE_PPST_OFFSET); in axienet_ethtools_get_regs()
1791 data[13] = axienet_ior(lp, XAE_RCW0_OFFSET); in axienet_ethtools_get_regs()
1792 data[14] = axienet_ior(lp, XAE_RCW1_OFFSET); in axienet_ethtools_get_regs()
1793 data[15] = axienet_ior(lp, XAE_TC_OFFSET); in axienet_ethtools_get_regs()
1794 data[16] = axienet_ior(lp, XAE_FCC_OFFSET); in axienet_ethtools_get_regs()
1795 data[17] = axienet_ior(lp, XAE_EMMC_OFFSET); in axienet_ethtools_get_regs()
1796 data[18] = axienet_ior(lp, XAE_PHYC_OFFSET); in axienet_ethtools_get_regs()
1797 data[19] = axienet_ior(lp, XAE_MDIO_MC_OFFSET); in axienet_ethtools_get_regs()
1798 data[20] = axienet_ior(lp, XAE_MDIO_MCR_OFFSET); in axienet_ethtools_get_regs()
1799 data[21] = axienet_ior(lp, XAE_MDIO_MWD_OFFSET); in axienet_ethtools_get_regs()
1800 data[22] = axienet_ior(lp, XAE_MDIO_MRD_OFFSET); in axienet_ethtools_get_regs()
1801 data[27] = axienet_ior(lp, XAE_UAW0_OFFSET); in axienet_ethtools_get_regs()
1802 data[28] = axienet_ior(lp, XAE_UAW1_OFFSET); in axienet_ethtools_get_regs()
1803 data[29] = axienet_ior(lp, XAE_FMI_OFFSET); in axienet_ethtools_get_regs()
1804 data[30] = axienet_ior(lp, XAE_AF0_OFFSET); in axienet_ethtools_get_regs()
1805 data[31] = axienet_ior(lp, XAE_AF1_OFFSET); in axienet_ethtools_get_regs()
1806 if (!lp->use_dmaengine) { in axienet_ethtools_get_regs()
1807 data[32] = axienet_dma_in32(lp, XAXIDMA_TX_CR_OFFSET); in axienet_ethtools_get_regs()
1808 data[33] = axienet_dma_in32(lp, XAXIDMA_TX_SR_OFFSET); in axienet_ethtools_get_regs()
1809 data[34] = axienet_dma_in32(lp, XAXIDMA_TX_CDESC_OFFSET); in axienet_ethtools_get_regs()
1810 data[35] = axienet_dma_in32(lp, XAXIDMA_TX_TDESC_OFFSET); in axienet_ethtools_get_regs()
1811 data[36] = axienet_dma_in32(lp, XAXIDMA_RX_CR_OFFSET); in axienet_ethtools_get_regs()
1812 data[37] = axienet_dma_in32(lp, XAXIDMA_RX_SR_OFFSET); in axienet_ethtools_get_regs()
1813 data[38] = axienet_dma_in32(lp, XAXIDMA_RX_CDESC_OFFSET); in axienet_ethtools_get_regs()
1814 data[39] = axienet_dma_in32(lp, XAXIDMA_RX_TDESC_OFFSET); in axienet_ethtools_get_regs()
1824 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_get_ringparam() local
1830 ering->rx_pending = lp->rx_bd_num; in axienet_ethtools_get_ringparam()
1833 ering->tx_pending = lp->tx_bd_num; in axienet_ethtools_get_ringparam()
1842 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_set_ringparam() local
1854 lp->rx_bd_num = ering->rx_pending; in axienet_ethtools_set_ringparam()
1855 lp->tx_bd_num = ering->tx_pending; in axienet_ethtools_set_ringparam()
1872 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_get_pauseparam() local
1874 phylink_ethtool_get_pauseparam(lp->phylink, epauseparm); in axienet_ethtools_get_pauseparam()
1893 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_set_pauseparam() local
1895 return phylink_ethtool_set_pauseparam(lp->phylink, epauseparm); in axienet_ethtools_set_pauseparam()
1917 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_get_coalesce() local
1919 ecoalesce->rx_max_coalesced_frames = lp->coalesce_count_rx; in axienet_ethtools_get_coalesce()
1920 ecoalesce->rx_coalesce_usecs = lp->coalesce_usec_rx; in axienet_ethtools_get_coalesce()
1921 ecoalesce->tx_max_coalesced_frames = lp->coalesce_count_tx; in axienet_ethtools_get_coalesce()
1922 ecoalesce->tx_coalesce_usecs = lp->coalesce_usec_tx; in axienet_ethtools_get_coalesce()
1945 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_set_coalesce() local
1954 lp->coalesce_count_rx = ecoalesce->rx_max_coalesced_frames; in axienet_ethtools_set_coalesce()
1956 lp->coalesce_usec_rx = ecoalesce->rx_coalesce_usecs; in axienet_ethtools_set_coalesce()
1958 lp->coalesce_count_tx = ecoalesce->tx_max_coalesced_frames; in axienet_ethtools_set_coalesce()
1960 lp->coalesce_usec_tx = ecoalesce->tx_coalesce_usecs; in axienet_ethtools_set_coalesce()
1969 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_get_link_ksettings() local
1971 return phylink_ethtool_ksettings_get(lp->phylink, cmd); in axienet_ethtools_get_link_ksettings()
1978 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_set_link_ksettings() local
1980 return phylink_ethtool_ksettings_set(lp->phylink, cmd); in axienet_ethtools_set_link_ksettings()
1985 struct axienet_local *lp = netdev_priv(dev); in axienet_ethtools_nway_reset() local
1987 return phylink_ethtool_nway_reset(lp->phylink); in axienet_ethtools_nway_reset()
2035 struct axienet_local *lp = netdev_priv(ndev); in axienet_pcs_config() local
2038 if (lp->switch_x_sgmii) { in axienet_pcs_config()
2068 struct axienet_local *lp = netdev_priv(ndev); in axienet_mac_select_pcs() local
2072 return &lp->pcs; in axienet_mac_select_pcs()
2097 struct axienet_local *lp = netdev_priv(ndev); in axienet_mac_link_up() local
2100 emmc_reg = axienet_ior(lp, XAE_EMMC_OFFSET); in axienet_mac_link_up()
2119 axienet_iow(lp, XAE_EMMC_OFFSET, emmc_reg); in axienet_mac_link_up()
2121 fcc_reg = axienet_ior(lp, XAE_FCC_OFFSET); in axienet_mac_link_up()
2130 axienet_iow(lp, XAE_FCC_OFFSET, fcc_reg); in axienet_mac_link_up()
2152 struct axienet_local *lp = container_of(work, struct axienet_local, in axienet_dma_err_handler() local
2154 struct net_device *ndev = lp->ndev; in axienet_dma_err_handler()
2156 napi_disable(&lp->napi_tx); in axienet_dma_err_handler()
2157 napi_disable(&lp->napi_rx); in axienet_dma_err_handler()
2159 axienet_setoptions(ndev, lp->options & in axienet_dma_err_handler()
2162 axienet_dma_stop(lp); in axienet_dma_err_handler()
2164 for (i = 0; i < lp->tx_bd_num; i++) { in axienet_dma_err_handler()
2165 cur_p = &lp->tx_bd_v[i]; in axienet_dma_err_handler()
2167 dma_addr_t addr = desc_get_phys_addr(lp, cur_p); in axienet_dma_err_handler()
2169 dma_unmap_single(lp->dev, addr, in axienet_dma_err_handler()
2188 for (i = 0; i < lp->rx_bd_num; i++) { in axienet_dma_err_handler()
2189 cur_p = &lp->rx_bd_v[i]; in axienet_dma_err_handler()
2198 lp->tx_bd_ci = 0; in axienet_dma_err_handler()
2199 lp->tx_bd_tail = 0; in axienet_dma_err_handler()
2200 lp->rx_bd_ci = 0; in axienet_dma_err_handler()
2202 axienet_dma_start(lp); in axienet_dma_err_handler()
2204 axienet_status = axienet_ior(lp, XAE_RCW1_OFFSET); in axienet_dma_err_handler()
2206 axienet_iow(lp, XAE_RCW1_OFFSET, axienet_status); in axienet_dma_err_handler()
2208 axienet_status = axienet_ior(lp, XAE_IP_OFFSET); in axienet_dma_err_handler()
2210 axienet_iow(lp, XAE_IS_OFFSET, XAE_INT_RXRJECT_MASK); in axienet_dma_err_handler()
2211 axienet_iow(lp, XAE_IE_OFFSET, lp->eth_irq > 0 ? in axienet_dma_err_handler()
2213 axienet_iow(lp, XAE_FCC_OFFSET, XAE_FCC_FCRX_MASK); in axienet_dma_err_handler()
2218 axienet_setoptions(ndev, lp->options & in axienet_dma_err_handler()
2222 axienet_setoptions(ndev, lp->options); in axienet_dma_err_handler()
2223 napi_enable(&lp->napi_rx); in axienet_dma_err_handler()
2224 napi_enable(&lp->napi_tx); in axienet_dma_err_handler()
2243 struct axienet_local *lp; in axienet_probe() local
2250 ndev = alloc_etherdev(sizeof(*lp)); in axienet_probe()
2265 lp = netdev_priv(ndev); in axienet_probe()
2266 lp->ndev = ndev; in axienet_probe()
2267 lp->dev = &pdev->dev; in axienet_probe()
2268 lp->options = XAE_OPTION_DEFAULTS; in axienet_probe()
2269 lp->rx_bd_num = RX_BD_NUM_DEFAULT; in axienet_probe()
2270 lp->tx_bd_num = TX_BD_NUM_DEFAULT; in axienet_probe()
2272 u64_stats_init(&lp->rx_stat_sync); in axienet_probe()
2273 u64_stats_init(&lp->tx_stat_sync); in axienet_probe()
2275 lp->axi_clk = devm_clk_get_optional(&pdev->dev, "s_axi_lite_clk"); in axienet_probe()
2276 if (!lp->axi_clk) { in axienet_probe()
2280 lp->axi_clk = devm_clk_get_optional(&pdev->dev, NULL); in axienet_probe()
2282 if (IS_ERR(lp->axi_clk)) { in axienet_probe()
2283 ret = PTR_ERR(lp->axi_clk); in axienet_probe()
2286 ret = clk_prepare_enable(lp->axi_clk); in axienet_probe()
2292 lp->misc_clks[0].id = "axis_clk"; in axienet_probe()
2293 lp->misc_clks[1].id = "ref_clk"; in axienet_probe()
2294 lp->misc_clks[2].id = "mgt_clk"; in axienet_probe()
2296 ret = devm_clk_bulk_get_optional(&pdev->dev, XAE_NUM_MISC_CLOCKS, lp->misc_clks); in axienet_probe()
2300 ret = clk_bulk_prepare_enable(XAE_NUM_MISC_CLOCKS, lp->misc_clks); in axienet_probe()
2305 lp->regs = devm_platform_get_and_ioremap_resource(pdev, 0, &ethres); in axienet_probe()
2306 if (IS_ERR(lp->regs)) { in axienet_probe()
2307 ret = PTR_ERR(lp->regs); in axienet_probe()
2310 lp->regs_start = ethres->start; in axienet_probe()
2313 lp->features = 0; in axienet_probe()
2319 lp->csum_offload_on_tx_path = in axienet_probe()
2321 lp->features |= XAE_FEATURE_PARTIAL_TX_CSUM; in axienet_probe()
2326 lp->csum_offload_on_tx_path = in axienet_probe()
2328 lp->features |= XAE_FEATURE_FULL_TX_CSUM; in axienet_probe()
2333 lp->csum_offload_on_tx_path = XAE_NO_CSUM_OFFLOAD; in axienet_probe()
2340 lp->csum_offload_on_rx_path = in axienet_probe()
2342 lp->features |= XAE_FEATURE_PARTIAL_RX_CSUM; in axienet_probe()
2345 lp->csum_offload_on_rx_path = in axienet_probe()
2347 lp->features |= XAE_FEATURE_FULL_RX_CSUM; in axienet_probe()
2350 lp->csum_offload_on_rx_path = XAE_NO_CSUM_OFFLOAD; in axienet_probe()
2359 of_property_read_u32(pdev->dev.of_node, "xlnx,rxmem", &lp->rxmem); in axienet_probe()
2361 lp->switch_x_sgmii = of_property_read_bool(pdev->dev.of_node, in axienet_probe()
2370 lp->phy_mode = PHY_INTERFACE_MODE_MII; in axienet_probe()
2373 lp->phy_mode = PHY_INTERFACE_MODE_GMII; in axienet_probe()
2376 lp->phy_mode = PHY_INTERFACE_MODE_RGMII_ID; in axienet_probe()
2379 lp->phy_mode = PHY_INTERFACE_MODE_SGMII; in axienet_probe()
2382 lp->phy_mode = PHY_INTERFACE_MODE_1000BASEX; in axienet_probe()
2389 ret = of_get_phy_mode(pdev->dev.of_node, &lp->phy_mode); in axienet_probe()
2393 if (lp->switch_x_sgmii && lp->phy_mode != PHY_INTERFACE_MODE_SGMII && in axienet_probe()
2394 lp->phy_mode != PHY_INTERFACE_MODE_1000BASEX) { in axienet_probe()
2414 lp->dma_regs = devm_ioremap_resource(&pdev->dev, in axienet_probe()
2416 lp->rx_irq = irq_of_parse_and_map(np, 1); in axienet_probe()
2417 lp->tx_irq = irq_of_parse_and_map(np, 0); in axienet_probe()
2419 lp->eth_irq = platform_get_irq_optional(pdev, 0); in axienet_probe()
2422 lp->dma_regs = devm_platform_get_and_ioremap_resource(pdev, 1, NULL); in axienet_probe()
2423 lp->rx_irq = platform_get_irq(pdev, 1); in axienet_probe()
2424 lp->tx_irq = platform_get_irq(pdev, 0); in axienet_probe()
2425 lp->eth_irq = platform_get_irq_optional(pdev, 2); in axienet_probe()
2427 if (IS_ERR(lp->dma_regs)) { in axienet_probe()
2429 ret = PTR_ERR(lp->dma_regs); in axienet_probe()
2432 if (lp->rx_irq <= 0 || lp->tx_irq <= 0) { in axienet_probe()
2439 ret = __axienet_device_reset(lp); in axienet_probe()
2451 if ((axienet_ior(lp, XAE_ID_OFFSET) >> 24) >= 0x9) { in axienet_probe()
2452 void __iomem *desc = lp->dma_regs + XAXIDMA_TX_CDESC_OFFSET + 4; in axienet_probe()
2458 lp->features |= XAE_FEATURE_DMA_64BIT; in axienet_probe()
2466 if (!IS_ENABLED(CONFIG_64BIT) && lp->features & XAE_FEATURE_DMA_64BIT) { in axienet_probe()
2477 netif_napi_add(ndev, &lp->napi_rx, axienet_rx_poll); in axienet_probe()
2478 netif_napi_add(ndev, &lp->napi_tx, axienet_tx_poll); in axienet_probe()
2483 lp->eth_irq = platform_get_irq_optional(pdev, 0); in axienet_probe()
2484 if (lp->eth_irq < 0 && lp->eth_irq != -ENXIO) { in axienet_probe()
2485 ret = lp->eth_irq; in axienet_probe()
2488 tx_chan = dma_request_chan(lp->dev, "tx_chan0"); in axienet_probe()
2491 dev_err_probe(lp->dev, ret, "No Ethernet DMA (TX) channel found\n"); in axienet_probe()
2505 lp->use_dmaengine = 1; in axienet_probe()
2508 if (lp->use_dmaengine) in axienet_probe()
2513 if (lp->eth_irq <= 0) in axienet_probe()
2526 lp->coalesce_count_rx = XAXIDMA_DFT_RX_THRESHOLD; in axienet_probe()
2527 lp->coalesce_count_tx = XAXIDMA_DFT_TX_THRESHOLD; in axienet_probe()
2528 lp->coalesce_usec_rx = XAXIDMA_DFT_RX_USEC; in axienet_probe()
2529 lp->coalesce_usec_tx = XAXIDMA_DFT_TX_USEC; in axienet_probe()
2531 ret = axienet_mdio_setup(lp); in axienet_probe()
2536 if (lp->phy_mode == PHY_INTERFACE_MODE_SGMII || in axienet_probe()
2537 lp->phy_mode == PHY_INTERFACE_MODE_1000BASEX) { in axienet_probe()
2551 lp->pcs_phy = of_mdio_find_device(np); in axienet_probe()
2552 if (!lp->pcs_phy) { in axienet_probe()
2558 lp->pcs.ops = &axienet_pcs_ops; in axienet_probe()
2559 lp->pcs.neg_mode = true; in axienet_probe()
2560 lp->pcs.poll = true; in axienet_probe()
2563 lp->phylink_config.dev = &ndev->dev; in axienet_probe()
2564 lp->phylink_config.type = PHYLINK_NETDEV; in axienet_probe()
2565 lp->phylink_config.mac_capabilities = MAC_SYM_PAUSE | MAC_ASYM_PAUSE | in axienet_probe()
2568 __set_bit(lp->phy_mode, lp->phylink_config.supported_interfaces); in axienet_probe()
2569 if (lp->switch_x_sgmii) { in axienet_probe()
2571 lp->phylink_config.supported_interfaces); in axienet_probe()
2573 lp->phylink_config.supported_interfaces); in axienet_probe()
2576 lp->phylink = phylink_create(&lp->phylink_config, pdev->dev.fwnode, in axienet_probe()
2577 lp->phy_mode, in axienet_probe()
2579 if (IS_ERR(lp->phylink)) { in axienet_probe()
2580 ret = PTR_ERR(lp->phylink); in axienet_probe()
2585 ret = register_netdev(lp->ndev); in axienet_probe()
2587 dev_err(lp->dev, "register_netdev() error (%i)\n", ret); in axienet_probe()
2594 phylink_destroy(lp->phylink); in axienet_probe()
2597 if (lp->pcs_phy) in axienet_probe()
2598 put_device(&lp->pcs_phy->dev); in axienet_probe()
2599 if (lp->mii_bus) in axienet_probe()
2600 axienet_mdio_teardown(lp); in axienet_probe()
2602 clk_bulk_disable_unprepare(XAE_NUM_MISC_CLOCKS, lp->misc_clks); in axienet_probe()
2603 clk_disable_unprepare(lp->axi_clk); in axienet_probe()
2614 struct axienet_local *lp = netdev_priv(ndev); in axienet_remove() local
2618 if (lp->phylink) in axienet_remove()
2619 phylink_destroy(lp->phylink); in axienet_remove()
2621 if (lp->pcs_phy) in axienet_remove()
2622 put_device(&lp->pcs_phy->dev); in axienet_remove()
2624 axienet_mdio_teardown(lp); in axienet_remove()
2626 clk_bulk_disable_unprepare(XAE_NUM_MISC_CLOCKS, lp->misc_clks); in axienet_remove()
2627 clk_disable_unprepare(lp->axi_clk); in axienet_remove()