Home
last modified time | relevance | path

Searched refs:sgt (Results 1 – 25 of 272) sorted by relevance

1234567891011

/linux/drivers/media/common/videobuf2/
H A Dvideobuf2-dma-contig.c53 static unsigned long vb2_dc_get_contiguous_size(struct sg_table *sgt) in vb2_dc_get_contiguous_size() argument
56 dma_addr_t expected = sg_dma_address(sgt->sgl); in vb2_dc_get_contiguous_size()
60 for_each_sgtable_dma_sg(sgt, s, i) { in vb2_dc_get_contiguous_size()
126 struct sg_table *sgt = buf->dma_sgt; in vb2_dc_prepare() local
140 dma_sync_sgtable_for_device(buf->dev, sgt, buf->dma_dir); in vb2_dc_prepare()
146 struct sg_table *sgt = buf->dma_sgt; in vb2_dc_finish() local
160 dma_sync_sgtable_for_cpu(buf->dev, sgt, buf->dma_dir); in vb2_dc_finish()
314 struct sg_table sgt; member
324 struct sg_table *sgt; in vb2_dc_dmabuf_ops_attach() local
332 sgt = &attach->sgt; in vb2_dc_dmabuf_ops_attach()
[all …]
H A Dvideobuf2-dma-sg.c105 struct sg_table *sgt; in vb2_dma_sg_alloc() local
145 sgt = &buf->sg_table; in vb2_dma_sg_alloc()
150 if (dma_map_sgtable(buf->dev, sgt, buf->dma_dir, in vb2_dma_sg_alloc()
182 struct sg_table *sgt = &buf->sg_table; in vb2_dma_sg_put() local
188 dma_unmap_sgtable(buf->dev, sgt, buf->dma_dir, in vb2_dma_sg_put()
204 struct sg_table *sgt = buf->dma_sgt; in vb2_dma_sg_prepare() local
209 dma_sync_sgtable_for_device(buf->dev, sgt, buf->dma_dir); in vb2_dma_sg_prepare()
215 struct sg_table *sgt = buf->dma_sgt; in vb2_dma_sg_finish() local
220 dma_sync_sgtable_for_cpu(buf->dev, sgt, buf->dma_dir); in vb2_dma_sg_finish()
227 struct sg_table *sgt; in vb2_dma_sg_get_userptr() local
[all …]
H A Dvideobuf2-vmalloc.c208 struct sg_table sgt; member
218 struct sg_table *sgt; in vb2_vmalloc_dmabuf_ops_attach() local
228 sgt = &attach->sgt; in vb2_vmalloc_dmabuf_ops_attach()
229 ret = sg_alloc_table(sgt, num_pages, GFP_KERNEL); in vb2_vmalloc_dmabuf_ops_attach()
234 for_each_sgtable_sg(sgt, sg, i) { in vb2_vmalloc_dmabuf_ops_attach()
238 sg_free_table(sgt); in vb2_vmalloc_dmabuf_ops_attach()
255 struct sg_table *sgt; in vb2_vmalloc_dmabuf_ops_detach() local
260 sgt = &attach->sgt; in vb2_vmalloc_dmabuf_ops_detach()
264 dma_unmap_sgtable(db_attach->dev, sgt, attach->dma_dir, 0); in vb2_vmalloc_dmabuf_ops_detach()
265 sg_free_table(sgt); in vb2_vmalloc_dmabuf_ops_detach()
[all …]
/linux/drivers/gpu/drm/i915/
H A Di915_mm.c37 struct sgt_iter sgt; member
46 return (r->sgt.dma + r->sgt.curr + r->iobase) >> PAGE_SHIFT; in sgt_pfn()
48 return r->sgt.pfn + (r->sgt.curr >> PAGE_SHIFT); in sgt_pfn()
55 if (GEM_WARN_ON(!r->sgt.sgp)) in remap_sg()
63 r->sgt.curr += PAGE_SIZE; in remap_sg()
64 if (r->sgt.curr >= r->sgt.max) in remap_sg()
65 r->sgt = __sgt_iter(__sg_next(r->sgt.sgp), use_dma(r->iobase)); in remap_sg()
138 .sgt = __sgt_iter(sgl, use_dma(iobase)), in remap_io_sg()
146 while (offset >= r.sgt.max >> PAGE_SHIFT) { in remap_io_sg()
147 offset -= r.sgt.max >> PAGE_SHIFT; in remap_io_sg()
[all …]
/linux/drivers/gpu/drm/tegra/
H A Dgem.c47 static inline unsigned int sgt_dma_count_chunks(struct sg_table *sgt) in sgt_dma_count_chunks() argument
49 return sg_dma_count_chunks(sgt->sgl, sgt->nents); in sgt_dma_count_chunks()
88 map->sgt = dma_buf_map_attachment_unlocked(map->attach, direction); in tegra_bo_pin()
89 if (IS_ERR(map->sgt)) { in tegra_bo_pin()
91 err = PTR_ERR(map->sgt); in tegra_bo_pin()
92 map->sgt = NULL; in tegra_bo_pin()
96 err = sgt_dma_count_chunks(map->sgt); in tegra_bo_pin()
106 map->sgt = kzalloc_obj(*map->sgt); in tegra_bo_pin()
107 if (!map->sgt) { in tegra_bo_pin()
117 err = sg_alloc_table_from_pages(map->sgt, obj->pages, obj->num_pages, 0, gem->size, in tegra_bo_pin()
[all …]
/linux/drivers/gpu/drm/tests/
H A Ddrm_gem_shmem_test.c75 struct sg_table *sgt; in drm_gem_shmem_test_obj_create_private() local
83 sgt = kzalloc_obj(*sgt); in drm_gem_shmem_test_obj_create_private()
84 KUNIT_ASSERT_NOT_NULL(test, sgt); in drm_gem_shmem_test_obj_create_private()
86 ret = kunit_add_action_or_reset(test, kfree_wrapper, sgt); in drm_gem_shmem_test_obj_create_private()
89 ret = sg_alloc_table(sgt, 1, GFP_KERNEL); in drm_gem_shmem_test_obj_create_private()
92 ret = kunit_add_action_or_reset(test, sg_free_table_wrapper, sgt); in drm_gem_shmem_test_obj_create_private()
95 sg_init_one(sgt->sgl, buf, TEST_SIZE); in drm_gem_shmem_test_obj_create_private()
105 ret = dma_map_sgtable(drm_dev->dev, sgt, DMA_BIDIRECTIONAL, 0); in drm_gem_shmem_test_obj_create_private()
112 gem_obj = drm_gem_shmem_prime_import_sg_table(drm_dev, &attach_mock, sgt); in drm_gem_shmem_test_obj_create_private()
119 kunit_remove_action(test, sg_free_table_wrapper, sgt); in drm_gem_shmem_test_obj_create_private()
[all …]
/linux/drivers/gpu/drm/armada/
H A Darmada_gem.c69 if (dobj->sgt) in armada_gem_free_object()
71 dobj->sgt, DMA_TO_DEVICE); in armada_gem_free_object()
393 struct sg_table *sgt; in armada_gem_prime_map_dma_buf() local
396 sgt = kmalloc_obj(*sgt); in armada_gem_prime_map_dma_buf()
397 if (!sgt) in armada_gem_prime_map_dma_buf()
405 if (sg_alloc_table(sgt, count, GFP_KERNEL)) in armada_gem_prime_map_dma_buf()
410 for_each_sgtable_sg(sgt, sg, i) { in armada_gem_prime_map_dma_buf()
420 if (dma_map_sgtable(attach->dev, sgt, dir, 0)) in armada_gem_prime_map_dma_buf()
424 if (sg_alloc_table(sgt, 1, GFP_KERNEL)) in armada_gem_prime_map_dma_buf()
427 sg_set_page(sgt->sgl, dobj->page, dobj->obj.size, 0); in armada_gem_prime_map_dma_buf()
[all …]
/linux/drivers/accel/ivpu/
H A Divpu_gem_userptr.c26 struct sg_table *sgt = attachment->dmabuf->priv; in ivpu_gem_userptr_dmabuf_map() local
29 ret = dma_map_sgtable(attachment->dev, sgt, direction, DMA_ATTR_SKIP_CPU_SYNC); in ivpu_gem_userptr_dmabuf_map()
33 return sgt; in ivpu_gem_userptr_dmabuf_map()
37 struct sg_table *sgt, in ivpu_gem_userptr_dmabuf_unmap() argument
40 dma_unmap_sgtable(attachment->dev, sgt, direction, DMA_ATTR_SKIP_CPU_SYNC); in ivpu_gem_userptr_dmabuf_unmap()
45 struct sg_table *sgt = dma_buf->priv; in ivpu_gem_userptr_dmabuf_release() local
49 for_each_sgtable_page(sgt, &page_iter, 0) { in ivpu_gem_userptr_dmabuf_release()
54 sg_free_table(sgt); in ivpu_gem_userptr_dmabuf_release()
55 kfree(sgt); in ivpu_gem_userptr_dmabuf_release()
70 struct sg_table *sgt; in ivpu_create_userptr_dmabuf() local
[all …]
/linux/drivers/gpu/drm/virtio/
H A Dvirtgpu_vram.c78 struct sg_table *sgt; in virtio_gpu_vram_map_dma_buf() local
82 sgt = kzalloc_obj(*sgt); in virtio_gpu_vram_map_dma_buf()
83 if (!sgt) in virtio_gpu_vram_map_dma_buf()
93 return sgt; in virtio_gpu_vram_map_dma_buf()
96 ret = sg_alloc_table(sgt, 1, GFP_KERNEL); in virtio_gpu_vram_map_dma_buf()
107 sg_set_page(sgt->sgl, NULL, vram->vram_node.size, 0); in virtio_gpu_vram_map_dma_buf()
108 sg_dma_address(sgt->sgl) = addr; in virtio_gpu_vram_map_dma_buf()
109 sg_dma_len(sgt->sgl) = vram->vram_node.size; in virtio_gpu_vram_map_dma_buf()
111 return sgt; in virtio_gpu_vram_map_dma_buf()
113 sg_free_table(sgt); in virtio_gpu_vram_map_dma_buf()
[all …]
H A Dvirtgpu_prime.c62 struct sg_table *sgt, in virtgpu_gem_unmap_dma_buf() argument
69 virtio_gpu_vram_unmap_dma_buf(attach->dev, sgt, dir); in virtgpu_gem_unmap_dma_buf()
73 drm_gem_unmap_dma_buf(attach, sgt, dir); in virtgpu_gem_unmap_dma_buf()
152 struct sg_table *sgt; in virtgpu_dma_buf_import_sgt() local
163 sgt = dma_buf_map_attachment(attach, DMA_BIDIRECTIONAL); in virtgpu_dma_buf_import_sgt()
164 if (IS_ERR(sgt)) in virtgpu_dma_buf_import_sgt()
165 return PTR_ERR(sgt); in virtgpu_dma_buf_import_sgt()
167 *ents = kvmalloc_objs(struct virtio_gpu_mem_entry, sgt->nents); in virtgpu_dma_buf_import_sgt()
169 dma_buf_unmap_attachment(attach, sgt, DMA_BIDIRECTIONAL); in virtgpu_dma_buf_import_sgt()
173 *nents = sgt->nents; in virtgpu_dma_buf_import_sgt()
[all …]
/linux/drivers/xen/
H A Dgntdev-dmabuf.c46 struct sg_table *sgt; member
65 struct sg_table *sgt; member
198 struct sg_table *sgt; in dmabuf_pages_to_sgt() local
201 sgt = kmalloc_obj(*sgt); in dmabuf_pages_to_sgt()
202 if (!sgt) { in dmabuf_pages_to_sgt()
207 ret = sg_alloc_table_from_pages(sgt, pages, nr_pages, 0, in dmabuf_pages_to_sgt()
213 return sgt; in dmabuf_pages_to_sgt()
216 kfree(sgt); in dmabuf_pages_to_sgt()
240 struct sg_table *sgt = gntdev_dmabuf_attach->sgt; in dmabuf_exp_ops_detach() local
242 if (sgt) { in dmabuf_exp_ops_detach()
[all …]
/linux/net/ceph/
H A Dcrypto.c206 static int setup_sgtable(struct sg_table *sgt, struct scatterlist *prealloc_sg, in setup_sgtable() argument
218 memset(sgt, 0, sizeof(*sgt)); in setup_sgtable()
228 ret = sg_alloc_table(sgt, chunk_cnt, GFP_NOFS); in setup_sgtable()
234 sgt->sgl = prealloc_sg; in setup_sgtable()
235 sgt->nents = sgt->orig_nents = 1; in setup_sgtable()
238 for_each_sg(sgt->sgl, sg, sgt->orig_nents, i) { in setup_sgtable()
258 static void teardown_sgtable(struct sg_table *sgt) in teardown_sgtable() argument
260 if (sgt->orig_nents > 1) in teardown_sgtable()
261 sg_free_table(sgt); in teardown_sgtable()
268 struct sg_table sgt; in ceph_aes_crypt() local
[all …]
/linux/drivers/gpu/drm/mediatek/
H A Dmtk_gem.c28 if (dma_obj->sgt) in mtk_gem_free_object()
29 drm_prime_gem_destroy(obj, dma_obj->sgt); in mtk_gem_free_object()
50 struct sg_table *sgt; in mtk_gem_prime_get_sg_table() local
53 sgt = kzalloc_obj(*sgt); in mtk_gem_prime_get_sg_table()
54 if (!sgt) in mtk_gem_prime_get_sg_table()
57 ret = dma_get_sgtable(priv->dma_dev, sgt, dma_obj->vaddr, in mtk_gem_prime_get_sg_table()
61 kfree(sgt); in mtk_gem_prime_get_sg_table()
65 return sgt; in mtk_gem_prime_get_sg_table()
213 struct dma_buf_attachment *attach, struct sg_table *sgt) in mtk_gem_prime_import_sg_table() argument
218 if (drm_prime_get_contiguous_size(sgt) < attach->dmabuf->size) { in mtk_gem_prime_import_sg_table()
[all …]
/linux/drivers/hwtracing/intel_th/
H A Dmsu-sink.c51 static int msu_sink_alloc_window(void *data, struct sg_table **sgt, size_t size) in msu_sink_alloc_window() argument
64 ret = sg_alloc_table(*sgt, nents, GFP_KERNEL); in msu_sink_alloc_window()
68 priv->sgts[priv->nr_sgts++] = *sgt; in msu_sink_alloc_window()
70 for_each_sg((*sgt)->sgl, sg_ptr, nents, i) { in msu_sink_alloc_window()
84 static void msu_sink_free_window(void *data, struct sg_table *sgt) in msu_sink_free_window() argument
90 for_each_sg(sgt->sgl, sg_ptr, sgt->nents, i) { in msu_sink_free_window()
95 sg_free_table(sgt); in msu_sink_free_window()
99 static int msu_sink_ready(void *data, struct sg_table *sgt, size_t bytes) in msu_sink_ready() argument
103 intel_th_msc_window_unlock(priv->dev, sgt); in msu_sink_ready()
/linux/rust/kernel/
H A Dscatterlist.rs185 sgt: NonNull<bindings::sg_table>, field
204 sgt: NonNull<bindings::sg_table>, in new()
213 bindings::dma_map_sgtable(dev.as_raw(), sgt.as_ptr(), dir.into(), 0) in new()
219 sgt, in new()
237 bindings::dma_unmap_sgtable(self.dev.as_raw(), self.sgt.as_ptr(), self.dir.into(), 0) in drop()
273 let sgt = Opaque::zeroed(); in new() localVariable
279 sgt.get(), in new()
289 Ok(Self(sgt)) in new()
322 sgt: RawSGTable, field
367 sgt: unsafe { RawSGTable::new(&mut page_vec, size, max_segment, flags) }?, in new()
[all …]
/linux/kernel/dma/
H A Dmapping.c324 int dma_map_sgtable(struct device *dev, struct sg_table *sgt, in dma_map_sgtable() argument
329 nents = __dma_map_sg_attrs(dev, sgt->sgl, sgt->orig_nents, dir, attrs); in dma_map_sgtable()
332 sgt->nents = nents; in dma_map_sgtable()
512 int dma_get_sgtable_attrs(struct device *dev, struct sg_table *sgt, in dma_get_sgtable_attrs() argument
519 return dma_direct_get_sgtable(dev, sgt, cpu_addr, dma_addr, in dma_get_sgtable_attrs()
522 return iommu_dma_get_sgtable(dev, sgt, cpu_addr, dma_addr, in dma_get_sgtable_attrs()
526 return ops->get_sgtable(dev, sgt, cpu_addr, dma_addr, size, attrs); in dma_get_sgtable_attrs()
774 struct sg_table *sgt; in alloc_single_sgt() local
777 sgt = kmalloc_obj(*sgt, gfp); in alloc_single_sgt()
778 if (!sgt) in alloc_single_sgt()
[all …]
/linux/drivers/media/platform/nvidia/tegra-vde/
H A Ddmabuf-cache.c27 struct sg_table *sgt; member
41 dma_buf_unmap_attachment_unlocked(entry->a, entry->sgt, entry->dma_dir); in tegra_vde_release_entry()
72 struct sg_table *sgt; in tegra_vde_dmabuf_cache_map() local
93 *addrp = sg_dma_address(entry->sgt->sgl); in tegra_vde_dmabuf_cache_map()
105 sgt = dma_buf_map_attachment_unlocked(attachment, dma_dir); in tegra_vde_dmabuf_cache_map()
106 if (IS_ERR(sgt)) { in tegra_vde_dmabuf_cache_map()
108 err = PTR_ERR(sgt); in tegra_vde_dmabuf_cache_map()
112 if (!vde->domain && sgt->nents > 1) { in tegra_vde_dmabuf_cache_map()
125 err = tegra_vde_iommu_map(vde, sgt, &iova, dmabuf->size); in tegra_vde_dmabuf_cache_map()
131 *addrp = sg_dma_address(sgt->sgl); in tegra_vde_dmabuf_cache_map()
[all …]
/linux/drivers/gpu/drm/i915/gem/
H A Di915_gem_dmabuf.c31 struct sg_table *sgt; in i915_gem_map_dma_buf() local
39 sgt = kmalloc_obj(*sgt); in i915_gem_map_dma_buf()
40 if (!sgt) { in i915_gem_map_dma_buf()
45 ret = sg_alloc_table(sgt, obj->mm.pages->orig_nents, GFP_KERNEL); in i915_gem_map_dma_buf()
49 dst = sgt->sgl; in i915_gem_map_dma_buf()
55 ret = dma_map_sgtable(attach->dev, sgt, dir, DMA_ATTR_SKIP_CPU_SYNC); in i915_gem_map_dma_buf()
59 return sgt; in i915_gem_map_dma_buf()
62 sg_free_table(sgt); in i915_gem_map_dma_buf()
64 kfree(sgt); in i915_gem_map_dma_buf()
239 struct sg_table *sgt; in i915_gem_object_get_pages_dmabuf() local
[all …]
/linux/include/linux/
H A Dscatterlist.h46 struct sg_table sgt; /* The scatter list table */ member
224 #define for_each_sgtable_sg(sgt, sg, i) \ argument
225 for_each_sg((sgt)->sgl, sg, (sgt)->orig_nents, i)
232 #define for_each_sgtable_dma_sg(sgt, sg, i) \ argument
233 for_each_sg((sgt)->sgl, sg, (sgt)->nents, i)
461 void sg_free_append_table(struct sg_append_table *sgt);
465 int sg_alloc_append_table_from_pages(struct sg_append_table *sgt,
470 int sg_alloc_table_from_pages_segment(struct sg_table *sgt, struct page **pages,
495 static inline int sg_alloc_table_from_pages(struct sg_table *sgt, in sg_alloc_table_from_pages() argument
501 return sg_alloc_table_from_pages_segment(sgt, pages, n_pages, offset, in sg_alloc_table_from_pages()
[all …]
/linux/drivers/dma-buf/
H A Ddma-buf-mapping.c64 struct sg_table sgt; member
136 ret = sg_alloc_table(&dma->sgt, nents, GFP_KERNEL | __GFP_ZERO); in dma_buf_phys_vec_to_sgt()
140 sgl = dma->sgt.sgl; in dma_buf_phys_vec_to_sgt()
183 dma->sgt.orig_nents = 0; in dma_buf_phys_vec_to_sgt()
191 return &dma->sgt; in dma_buf_phys_vec_to_sgt()
200 for_each_sgtable_dma_sg(&dma->sgt, sgl, i) in dma_buf_phys_vec_to_sgt()
204 sg_free_table(&dma->sgt); in dma_buf_phys_vec_to_sgt()
222 void dma_buf_free_sgt(struct dma_buf_attachment *attach, struct sg_table *sgt, in dma_buf_free_sgt() argument
225 struct dma_buf_dma *dma = container_of(sgt, struct dma_buf_dma, sgt); in dma_buf_free_sgt()
238 for_each_sgtable_dma_sg(sgt, sgl, i) in dma_buf_free_sgt()
[all …]
/linux/drivers/gpu/drm/panthor/
H A Dpanthor_gem.c243 struct sg_table *sgt = drm_gem_map_dma_buf(attach, dir); in panthor_gem_prime_map_dma_buf() local
245 if (!IS_ERR(sgt)) in panthor_gem_prime_map_dma_buf()
246 attach->priv = sgt; in panthor_gem_prime_map_dma_buf()
248 return sgt; in panthor_gem_prime_map_dma_buf()
253 struct sg_table *sgt, in panthor_gem_prime_unmap_dma_buf() argument
257 drm_gem_unmap_dma_buf(attach, sgt, dir); in panthor_gem_prime_unmap_dma_buf()
270 if (shmem->sgt) in panthor_gem_prime_begin_cpu_access()
271 dma_sync_sgtable_for_cpu(dev->dev, shmem->sgt, dir); in panthor_gem_prime_begin_cpu_access()
277 struct sg_table *sgt = attach->priv; in panthor_gem_prime_begin_cpu_access() local
279 if (sgt) in panthor_gem_prime_begin_cpu_access()
[all …]
/linux/drivers/gpu/drm/
H A Ddrm_prime.c670 struct sg_table *sgt; in drm_gem_map_dma_buf() local
679 sgt = obj->funcs->get_sg_table(obj); in drm_gem_map_dma_buf()
680 if (IS_ERR(sgt)) in drm_gem_map_dma_buf()
681 return sgt; in drm_gem_map_dma_buf()
683 ret = dma_map_sgtable(attach->dev, sgt, dir, in drm_gem_map_dma_buf()
686 sg_free_table(sgt); in drm_gem_map_dma_buf()
687 kfree(sgt); in drm_gem_map_dma_buf()
688 sgt = ERR_PTR(ret); in drm_gem_map_dma_buf()
691 return sgt; in drm_gem_map_dma_buf()
704 struct sg_table *sgt, in drm_gem_unmap_dma_buf() argument
[all …]
H A Ddrm_gem_shmem_helper.c173 drm_prime_gem_destroy(obj, shmem->sgt); in drm_gem_shmem_release()
179 if (shmem->sgt) { in drm_gem_shmem_release()
180 dma_unmap_sgtable(obj->dev->dev, shmem->sgt, in drm_gem_shmem_release()
182 sg_free_table(shmem->sgt); in drm_gem_shmem_release()
183 kfree(shmem->sgt); in drm_gem_shmem_release()
500 dma_unmap_sgtable(dev->dev, shmem->sgt, DMA_BIDIRECTIONAL, 0); in drm_gem_shmem_purge_locked()
501 sg_free_table(shmem->sgt); in drm_gem_shmem_purge_locked()
502 kfree(shmem->sgt); in drm_gem_shmem_purge_locked()
503 shmem->sgt = NULL; in drm_gem_shmem_purge_locked()
756 struct sg_table *sgt; in drm_gem_shmem_get_pages_sgt_locked() local
[all …]
/linux/drivers/gpu/drm/xe/
H A Dxe_dma_buf.c108 struct sg_table *sgt; in xe_dma_buf_map() local
125 sgt = drm_prime_pages_to_sg(obj->dev, in xe_dma_buf_map()
128 if (IS_ERR(sgt)) in xe_dma_buf_map()
129 return sgt; in xe_dma_buf_map()
131 if (dma_map_sgtable(attach->dev, sgt, dir, in xe_dma_buf_map()
141 dir, &sgt); in xe_dma_buf_map()
149 return sgt; in xe_dma_buf_map()
152 sg_free_table(sgt); in xe_dma_buf_map()
153 kfree(sgt); in xe_dma_buf_map()
158 struct sg_table *sgt, in xe_dma_buf_unmap() argument
[all …]
/linux/drivers/infiniband/core/
H A Dumem_dmabuf.c17 struct sg_table *sgt; in ib_umem_dmabuf_map_pages() local
29 if (umem_dmabuf->sgt) in ib_umem_dmabuf_map_pages()
32 sgt = dma_buf_map_attachment(umem_dmabuf->attach, in ib_umem_dmabuf_map_pages()
34 if (IS_ERR(sgt)) in ib_umem_dmabuf_map_pages()
35 return PTR_ERR(sgt); in ib_umem_dmabuf_map_pages()
42 for_each_sgtable_dma_sg(sgt, sg, i) { in ib_umem_dmabuf_map_pages()
65 umem_dmabuf->umem.sgt_append.sgt.sgl = umem_dmabuf->first_sg; in ib_umem_dmabuf_map_pages()
66 umem_dmabuf->umem.sgt_append.sgt.nents = nmap; in ib_umem_dmabuf_map_pages()
67 umem_dmabuf->sgt = sgt; in ib_umem_dmabuf_map_pages()
90 if (!umem_dmabuf->sgt) in ib_umem_dmabuf_unmap_pages()
[all …]

1234567891011