Lines Matching full:sg
491 struct ib_sge *sg) in mlx5r_umr_unmap_free_xlt() argument
495 dma_unmap_single(ddev, sg->addr, sg->length, DMA_TO_DEVICE); in mlx5r_umr_unmap_free_xlt()
496 mlx5r_umr_free_xlt(xlt, sg->length); in mlx5r_umr_unmap_free_xlt()
502 static void *mlx5r_umr_create_xlt(struct mlx5_ib_dev *dev, struct ib_sge *sg, in mlx5r_umr_create_xlt() argument
513 sg->length = nents * ent_size; in mlx5r_umr_create_xlt()
514 dma = dma_map_single(ddev, xlt, sg->length, DMA_TO_DEVICE); in mlx5r_umr_create_xlt()
517 mlx5r_umr_free_xlt(xlt, sg->length); in mlx5r_umr_create_xlt()
520 sg->addr = dma; in mlx5r_umr_create_xlt()
521 sg->lkey = dev->umrc.pd->local_dma_lkey; in mlx5r_umr_create_xlt()
528 unsigned int flags, struct ib_sge *sg) in mlx5r_umr_set_update_xlt_ctrl_seg() argument
537 cpu_to_be16(mlx5r_umr_get_xlt_octo(sg->length)); in mlx5r_umr_set_update_xlt_ctrl_seg()
556 struct ib_sge *sg) in mlx5r_umr_set_update_xlt_data_seg() argument
558 data_seg->byte_count = cpu_to_be32(sg->length); in mlx5r_umr_set_update_xlt_data_seg()
559 data_seg->lkey = cpu_to_be32(sg->lkey); in mlx5r_umr_set_update_xlt_data_seg()
560 data_seg->addr = cpu_to_be64(sg->addr); in mlx5r_umr_set_update_xlt_data_seg()
575 struct mlx5_ib_mr *mr, struct ib_sge *sg, in mlx5r_umr_final_update_xlt() argument
602 cpu_to_be16(mlx5r_umr_get_xlt_octo(sg->length)); in mlx5r_umr_final_update_xlt()
603 wqe->data_seg.byte_count = cpu_to_be32(sg->length); in mlx5r_umr_final_update_xlt()
621 struct ib_sge sg; in mlx5r_umr_update_mr_pas() local
629 dev, &sg, ib_umem_num_dma_blocks(mr->umem, 1 << mr->page_shift), in mlx5r_umr_update_mr_pas()
634 orig_sg_length = sg.length; in mlx5r_umr_update_mr_pas()
636 mlx5r_umr_set_update_xlt_ctrl_seg(&wqe.ctrl_seg, flags, &sg); in mlx5r_umr_update_mr_pas()
639 mlx5r_umr_set_update_xlt_data_seg(&wqe.data_seg, &sg); in mlx5r_umr_update_mr_pas()
643 if (cur_mtt == (void *)mtt + sg.length) { in mlx5r_umr_update_mr_pas()
644 dma_sync_single_for_device(ddev, sg.addr, sg.length, in mlx5r_umr_update_mr_pas()
651 dma_sync_single_for_cpu(ddev, sg.addr, sg.length, in mlx5r_umr_update_mr_pas()
653 offset += sg.length; in mlx5r_umr_update_mr_pas()
670 sg.length = ALIGN(final_size, MLX5_UMR_FLEX_ALIGNMENT); in mlx5r_umr_update_mr_pas()
671 memset(cur_mtt, 0, sg.length - final_size); in mlx5r_umr_update_mr_pas()
672 mlx5r_umr_final_update_xlt(dev, &wqe, mr, &sg, flags); in mlx5r_umr_update_mr_pas()
674 dma_sync_single_for_device(ddev, sg.addr, sg.length, DMA_TO_DEVICE); in mlx5r_umr_update_mr_pas()
678 sg.length = orig_sg_length; in mlx5r_umr_update_mr_pas()
679 mlx5r_umr_unmap_free_xlt(dev, mtt, &sg); in mlx5r_umr_update_mr_pas()
704 struct ib_sge sg; in mlx5r_umr_update_xlt() local
724 xlt = mlx5r_umr_create_xlt(dev, &sg, npages, desc_size, flags); in mlx5r_umr_update_xlt()
728 pages_iter = sg.length / desc_size; in mlx5r_umr_update_xlt()
729 orig_sg_length = sg.length; in mlx5r_umr_update_xlt()
738 mlx5r_umr_set_update_xlt_ctrl_seg(&wqe.ctrl_seg, flags, &sg); in mlx5r_umr_update_xlt()
740 mlx5r_umr_set_update_xlt_data_seg(&wqe.data_seg, &sg); in mlx5r_umr_update_xlt()
747 dma_sync_single_for_cpu(ddev, sg.addr, sg.length, in mlx5r_umr_update_xlt()
750 dma_sync_single_for_device(ddev, sg.addr, sg.length, in mlx5r_umr_update_xlt()
752 sg.length = ALIGN(size_to_map, MLX5_UMR_FLEX_ALIGNMENT); in mlx5r_umr_update_xlt()
755 mlx5r_umr_final_update_xlt(dev, &wqe, mr, &sg, flags); in mlx5r_umr_update_xlt()
759 sg.length = orig_sg_length; in mlx5r_umr_update_xlt()
760 mlx5r_umr_unmap_free_xlt(dev, xlt, &sg); in mlx5r_umr_update_xlt()