Lines Matching full:sg

104 static void otx2_dma_unmap_skb_frags(struct otx2_nic *pfvf, struct sg_list *sg)  in otx2_dma_unmap_skb_frags()  argument
108 for (seg = 0; seg < sg->num_segs; seg++) { in otx2_dma_unmap_skb_frags()
109 otx2_dma_unmap_page(pfvf, sg->dma_addr[seg], in otx2_dma_unmap_skb_frags()
110 sg->size[seg], DMA_TO_DEVICE); in otx2_dma_unmap_skb_frags()
112 sg->num_segs = 0; in otx2_dma_unmap_skb_frags()
120 struct sg_list *sg; in otx2_xdp_snd_pkt_handler() local
124 sg = &sq->sg[snd_comp->sqe_id]; in otx2_xdp_snd_pkt_handler()
126 pa = otx2_iova_to_phys(pfvf->iommu_domain, sg->dma_addr[0]); in otx2_xdp_snd_pkt_handler()
127 otx2_dma_unmap_page(pfvf, sg->dma_addr[0], in otx2_xdp_snd_pkt_handler()
128 sg->size[0], DMA_TO_DEVICE); in otx2_xdp_snd_pkt_handler()
143 struct sg_list *sg; in otx2_snd_pkt_handler() local
151 sg = &sq->sg[snd_comp->sqe_id]; in otx2_snd_pkt_handler()
152 skb = (struct sk_buff *)sg->skb; in otx2_snd_pkt_handler()
171 otx2_dma_unmap_skb_frags(pfvf, sg); in otx2_snd_pkt_handler()
173 sg->skb = (u64)NULL; in otx2_snd_pkt_handler()
257 struct nix_rx_sg_s *sg = &cqe->sg; in otx2_free_rcv_seg() local
262 start = (void *)sg; in otx2_free_rcv_seg()
265 sg = (struct nix_rx_sg_s *)start; in otx2_free_rcv_seg()
266 seg_addr = &sg->seg_addr; in otx2_free_rcv_seg()
267 for (seg = 0; seg < sg->segs; seg++, seg_addr++) in otx2_free_rcv_seg()
270 start += sizeof(*sg); in otx2_free_rcv_seg()
333 if (cqe->sg.segs) in otx2_check_rcv_errors()
344 struct nix_rx_sg_s *sg = &cqe->sg; in otx2_rcv_pkt_handler() local
364 start = (void *)sg; in otx2_rcv_pkt_handler()
367 sg = (struct nix_rx_sg_s *)start; in otx2_rcv_pkt_handler()
368 seg_addr = &sg->seg_addr; in otx2_rcv_pkt_handler()
369 seg_size = (void *)sg; in otx2_rcv_pkt_handler()
370 for (seg = 0; seg < sg->segs; seg++, seg_addr++) { in otx2_rcv_pkt_handler()
375 start += sizeof(*sg); in otx2_rcv_pkt_handler()
406 !cqe->sg.seg_addr) { in otx2_rx_napi_handler()
417 cqe->sg.seg_addr = 0x00; in otx2_rx_napi_handler()
617 struct nix_sqe_sg_s *sg = NULL; in otx2_sqe_add_sg() local
622 sq->sg[sq->head].num_segs = 0; in otx2_sqe_add_sg()
626 sg = (struct nix_sqe_sg_s *)(sq->sqe_base + *offset); in otx2_sqe_add_sg()
627 sg->ld_type = NIX_SEND_LDTYPE_LDD; in otx2_sqe_add_sg()
628 sg->subdc = NIX_SUBDC_SG; in otx2_sqe_add_sg()
629 sg->segs = 0; in otx2_sqe_add_sg()
630 sg_lens = (void *)sg; in otx2_sqe_add_sg()
631 iova = (void *)sg + sizeof(*sg); in otx2_sqe_add_sg()
633 * So if sg->segs is whether 2 or 3, offset += 16bytes. in otx2_sqe_add_sg()
636 *offset += sizeof(*sg) + (3 * sizeof(u64)); in otx2_sqe_add_sg()
638 *offset += sizeof(*sg) + sizeof(u64); in otx2_sqe_add_sg()
645 sg->segs++; in otx2_sqe_add_sg()
649 sq->sg[sq->head].dma_addr[seg] = dma_addr; in otx2_sqe_add_sg()
650 sq->sg[sq->head].size[seg] = len; in otx2_sqe_add_sg()
651 sq->sg[sq->head].num_segs++; in otx2_sqe_add_sg()
654 sq->sg[sq->head].skb = (u64)skb; in otx2_sqe_add_sg()
803 struct sg_list *sg = &sq->sg[sqe]; in otx2_dma_map_tso_skb() local
807 sg->num_segs = 0; in otx2_dma_map_tso_skb()
821 sg->dma_addr[sg->num_segs] = dma_addr; in otx2_dma_map_tso_skb()
822 sg->size[sg->num_segs] = len; in otx2_dma_map_tso_skb()
823 sg->num_segs++; in otx2_dma_map_tso_skb()
827 otx2_dma_unmap_skb_frags(pfvf, sg); in otx2_dma_map_tso_skb()
835 struct sg_list *sg = &sq->sg[sqe]; in otx2_tso_frag_dma_addr() local
840 return sg->dma_addr[0] + (seg_addr - (u64)skb->data); in otx2_tso_frag_dma_addr()
846 return sg->dma_addr[seg] + offset; in otx2_tso_frag_dma_addr()
852 struct nix_sqe_sg_s *sg = NULL; in otx2_sqe_tso_add_sg() local
857 /* Add SG descriptors with buffer addresses */ in otx2_sqe_tso_add_sg()
860 sg = (struct nix_sqe_sg_s *)(sq->sqe_base + *offset); in otx2_sqe_tso_add_sg()
861 sg->ld_type = NIX_SEND_LDTYPE_LDD; in otx2_sqe_tso_add_sg()
862 sg->subdc = NIX_SUBDC_SG; in otx2_sqe_tso_add_sg()
863 sg->segs = 0; in otx2_sqe_tso_add_sg()
864 sg_lens = (void *)sg; in otx2_sqe_tso_add_sg()
865 iova = (void *)sg + sizeof(*sg); in otx2_sqe_tso_add_sg()
867 * So if sg->segs is whether 2 or 3, offset += 16bytes. in otx2_sqe_tso_add_sg()
870 *offset += sizeof(*sg) + (3 * sizeof(u64)); in otx2_sqe_tso_add_sg()
872 *offset += sizeof(*sg) + sizeof(u64); in otx2_sqe_tso_add_sg()
876 sg->segs++; in otx2_sqe_tso_add_sg()
953 sq->sg[first_sqe].skb = (u64)skb; in otx2_sq_append_tso()
1191 /* Add SG subdesc with data frags */ in otx2_sq_append_skb()
1193 otx2_dma_unmap_skb_frags(pfvf, &sq->sg[sq->head]); in otx2_sq_append_skb()
1234 if (cqe->sg.segs > 1) { in otx2_cleanup_rx_cqes()
1238 iova = cqe->sg.seg_addr - OTX2_HEAD_ROOM; in otx2_cleanup_rx_cqes()
1256 struct sg_list *sg; in otx2_cleanup_tx_cqes() local
1272 sg = &sq->sg[cqe->comp.sqe_id]; in otx2_cleanup_tx_cqes()
1273 skb = (struct sk_buff *)sg->skb; in otx2_cleanup_tx_cqes()
1277 otx2_dma_unmap_skb_frags(pfvf, sg); in otx2_cleanup_tx_cqes()
1279 sg->skb = (u64)NULL; in otx2_cleanup_tx_cqes()
1321 struct sg_list *sg; in otx2_free_pending_sqe() local
1327 sg = &sq->sg[sqe]; in otx2_free_pending_sqe()
1328 skb = (struct sk_buff *)sg->skb; in otx2_free_pending_sqe()
1332 otx2_dma_unmap_skb_frags(pfvf, sg); in otx2_free_pending_sqe()
1334 sg->skb = (u64)NULL; in otx2_free_pending_sqe()
1350 struct nix_sqe_sg_s *sg = NULL; in otx2_xdp_sqe_add_sg() local
1353 sg = (struct nix_sqe_sg_s *)(sq->sqe_base + *offset); in otx2_xdp_sqe_add_sg()
1354 sg->ld_type = NIX_SEND_LDTYPE_LDD; in otx2_xdp_sqe_add_sg()
1355 sg->subdc = NIX_SUBDC_SG; in otx2_xdp_sqe_add_sg()
1356 sg->segs = 1; in otx2_xdp_sqe_add_sg()
1357 sg->seg1_size = len; in otx2_xdp_sqe_add_sg()
1358 iova = (void *)sg + sizeof(*sg); in otx2_xdp_sqe_add_sg()
1360 *offset += sizeof(*sg) + sizeof(u64); in otx2_xdp_sqe_add_sg()
1362 sq->sg[sq->head].dma_addr[0] = dma_addr; in otx2_xdp_sqe_add_sg()
1363 sq->sg[sq->head].size[0] = len; in otx2_xdp_sqe_add_sg()
1364 sq->sg[sq->head].num_segs = 1; in otx2_xdp_sqe_add_sg()
1414 iova = cqe->sg.seg_addr - OTX2_HEAD_ROOM; in otx2_xdp_rcv_pkt_handler()
1422 cqe->sg.seg_size, false); in otx2_xdp_rcv_pkt_handler()
1433 cqe->sg.seg_size, qidx); in otx2_xdp_rcv_pkt_handler()