Home
last modified time | relevance | path

Searched full:vmm (Results 1 – 25 of 176) sorted by relevance

12345678

/linux-6.8/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/
Dvmm.c23 #include "vmm.h"
75 struct nvkm_vmm *vmm; member
113 VMM_TRACE(_it->vmm, "%s "f, _buf, ##a); \
129 if (it->vmm->func->flush) { in nvkm_vmm_flush()
131 it->vmm->func->flush(it->vmm, it->flush); in nvkm_vmm_flush()
145 struct nvkm_vmm *vmm = it->vmm; in nvkm_vmm_unref_pdes() local
159 func->sparse(vmm, pgd->pt[0], pdei, 1); in nvkm_vmm_unref_pdes()
162 func->unmap(vmm, pgd->pt[0], pdei, 1); in nvkm_vmm_unref_pdes()
170 func->pde(vmm, pgd, pdei); in nvkm_vmm_unref_pdes()
177 func->pde(vmm, pgd, pdei); in nvkm_vmm_unref_pdes()
[all …]
Duvmm.c42 return nvkm_vmm_ref(nvkm_uvmm(object)->vmm); in nvkm_uvmm_search()
51 struct nvkm_vmm *vmm = uvmm->vmm; in nvkm_uvmm_mthd_pfnclr() local
61 if (nvkm_vmm_in_managed_range(vmm, addr, size) && vmm->managed.raw) in nvkm_uvmm_mthd_pfnclr()
65 mutex_lock(&vmm->mutex.vmm); in nvkm_uvmm_mthd_pfnclr()
66 ret = nvkm_vmm_pfn_unmap(vmm, addr, size); in nvkm_uvmm_mthd_pfnclr()
67 mutex_unlock(&vmm->mutex.vmm); in nvkm_uvmm_mthd_pfnclr()
79 struct nvkm_vmm *vmm = uvmm->vmm; in nvkm_uvmm_mthd_pfnmap() local
94 if (nvkm_vmm_in_managed_range(vmm, addr, size) && vmm->managed.raw) in nvkm_uvmm_mthd_pfnmap()
98 mutex_lock(&vmm->mutex.vmm); in nvkm_uvmm_mthd_pfnmap()
99 ret = nvkm_vmm_pfn_map(vmm, page, addr, size, phys); in nvkm_uvmm_mthd_pfnmap()
[all …]
Dvmmnv44.c22 #include "vmm.h"
27 nv44_vmm_pgt_fill(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv44_vmm_pgt_fill() argument
39 u32 addr = (list ? *list++ : vmm->null) >> 12; in nv44_vmm_pgt_fill()
66 VMM_WO032(pt, vmm, pteo + 0x0, tmp[0]); in nv44_vmm_pgt_fill()
67 VMM_WO032(pt, vmm, pteo + 0x4, tmp[1]); in nv44_vmm_pgt_fill()
68 VMM_WO032(pt, vmm, pteo + 0x8, tmp[2]); in nv44_vmm_pgt_fill()
69 VMM_WO032(pt, vmm, pteo + 0xc, tmp[3] | 0x40000000); in nv44_vmm_pgt_fill()
73 nv44_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv44_vmm_pgt_pte() argument
82 nv44_vmm_pgt_fill(vmm, pt, tmp, ptei, pten); in nv44_vmm_pgt_pte()
90 VMM_WO032(pt, vmm, ptei++ * 4, tmp[0] >> 0 | tmp[1] << 27); in nv44_vmm_pgt_pte()
[all …]
Dr535.c22 #include "vmm.h"
30 r535_mmu_promote_vmm(struct nvkm_vmm *vmm) in r535_mmu_promote_vmm() argument
35 ret = nvkm_gsp_client_device_ctor(vmm->mmu->subdev.device->gsp, in r535_mmu_promote_vmm()
36 &vmm->rm.client, &vmm->rm.device); in r535_mmu_promote_vmm()
40 args = nvkm_gsp_rm_alloc_get(&vmm->rm.device.object, 0x90f10000, FERMI_VASPACE_A, in r535_mmu_promote_vmm()
41 sizeof(*args), &vmm->rm.object); in r535_mmu_promote_vmm()
47 ret = nvkm_gsp_rm_alloc_wr(&vmm->rm.object, args); in r535_mmu_promote_vmm()
54 mutex_lock(&vmm->mutex.vmm); in r535_mmu_promote_vmm()
55 ret = nvkm_vmm_get_locked(vmm, true, false, false, 0x1d, 32, 0x20000000, in r535_mmu_promote_vmm()
56 &vmm->rm.rsvd); in r535_mmu_promote_vmm()
[all …]
Dvmmgp100.c22 #include "vmm.h"
34 gp100_vmm_pfn_unmap(struct nvkm_vmm *vmm, in gp100_vmm_pfn_unmap() argument
37 struct device *dev = vmm->mmu->subdev.device->dev; in gp100_vmm_pfn_unmap()
55 gp100_vmm_pfn_clear(struct nvkm_vmm *vmm, in gp100_vmm_pfn_clear() argument
65 VMM_WO064(pt, vmm, ptei * 8, data & ~BIT_ULL(0)); in gp100_vmm_pfn_clear()
75 gp100_vmm_pgt_pfn(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gp100_vmm_pgt_pfn() argument
78 struct device *dev = vmm->mmu->subdev.device->dev; in gp100_vmm_pgt_pfn()
109 VMM_WO064(pt, vmm, ptei++ * 8, data); in gp100_vmm_pgt_pfn()
115 gp100_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gp100_vmm_pgt_pte() argument
123 VMM_WO064(pt, vmm, ptei++ * 8, data); in gp100_vmm_pgt_pte()
[all …]
Dvmmnv50.c22 #include "vmm.h"
32 nv50_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv50_vmm_pgt_pte() argument
53 VMM_WO064(pt, vmm, ptei++ * 8, data); in nv50_vmm_pgt_pte()
58 nv50_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv50_vmm_pgt_sgl() argument
61 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte); in nv50_vmm_pgt_sgl()
65 nv50_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv50_vmm_pgt_dma() argument
69 VMM_SPAM(vmm, "DMAA %08x %08x PTE(s)", ptei, ptes); in nv50_vmm_pgt_dma()
73 VMM_WO064(pt, vmm, ptei++ * 8, data); in nv50_vmm_pgt_dma()
80 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte); in nv50_vmm_pgt_dma()
84 nv50_vmm_pgt_mem(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv50_vmm_pgt_mem() argument
[all …]
Dvmmgf100.c22 #include "vmm.h"
32 gf100_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gf100_vmm_pgt_pte() argument
44 VMM_WO064(pt, vmm, ptei++ * 8, data); in gf100_vmm_pgt_pte()
51 VMM_WO064(pt, vmm, ptei++ * 8, data); in gf100_vmm_pgt_pte()
58 gf100_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gf100_vmm_pgt_sgl() argument
61 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte); in gf100_vmm_pgt_sgl()
65 gf100_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gf100_vmm_pgt_dma() argument
69 VMM_SPAM(vmm, "DMAA %08x %08x PTE(s)", ptei, ptes); in gf100_vmm_pgt_dma()
73 VMM_WO064(pt, vmm, ptei++ * 8, data); in gf100_vmm_pgt_dma()
80 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte); in gf100_vmm_pgt_dma()
[all …]
Dvmmnv04.c22 #include "vmm.h"
28 nv04_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv04_vmm_pgt_pte() argument
33 VMM_WO032(pt, vmm, 8 + ptei++ * 4, data); in nv04_vmm_pgt_pte()
39 nv04_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv04_vmm_pgt_sgl() argument
42 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); in nv04_vmm_pgt_sgl()
46 nv04_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv04_vmm_pgt_dma() argument
52 VMM_WO032(pt, vmm, 8 + (ptei++ * 4), *map->dma++ | 0x00000003); in nv04_vmm_pgt_dma()
55 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); in nv04_vmm_pgt_dma()
60 nv04_vmm_pgt_unmap(struct nvkm_vmm *vmm, in nv04_vmm_pgt_unmap() argument
63 VMM_FO032(pt, vmm, 8 + (ptei * 4), 0, ptes); in nv04_vmm_pgt_unmap()
[all …]
Dvmmnv41.c22 #include "vmm.h"
27 nv41_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv41_vmm_pgt_pte() argument
32 VMM_WO032(pt, vmm, ptei++ * 4, data); in nv41_vmm_pgt_pte()
38 nv41_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv41_vmm_pgt_sgl() argument
41 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); in nv41_vmm_pgt_sgl()
45 nv41_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv41_vmm_pgt_dma() argument
52 VMM_WO032(pt, vmm, ptei++ * 4, data); in nv41_vmm_pgt_dma()
56 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); in nv41_vmm_pgt_dma()
61 nv41_vmm_pgt_unmap(struct nvkm_vmm *vmm, in nv41_vmm_pgt_unmap() argument
64 VMM_FO032(pt, vmm, ptei * 4, 0, ptes); in nv41_vmm_pgt_unmap()
[all …]
Dvmmtu102.c22 #include "vmm.h"
27 tu102_vmm_flush(struct nvkm_vmm *vmm, int depth) in tu102_vmm_flush() argument
29 struct nvkm_device *device = vmm->mmu->subdev.device; in tu102_vmm_flush()
33 if (atomic_read(&vmm->engref[NVKM_SUBDEV_BAR])) in tu102_vmm_flush()
36 mutex_lock(&vmm->mmu->mutex); in tu102_vmm_flush()
38 if (!vmm->rm.bar2_pdb) in tu102_vmm_flush()
39 nvkm_wr32(device, 0xb830a0, vmm->pd->pt[0]->addr >> 8); in tu102_vmm_flush()
41 nvkm_wr32(device, 0xb830a0, vmm->rm.bar2_pdb >> 8); in tu102_vmm_flush()
50 mutex_unlock(&vmm->mmu->mutex); in tu102_vmm_flush()
Dvmmgm200.c22 #include "vmm.h"
28 gm200_vmm_pgt_sparse(struct nvkm_vmm *vmm, in gm200_vmm_pgt_sparse() argument
32 VMM_FO064(pt, vmm, ptei * 8, BIT_ULL(32) /* VOL. */, ptes); in gm200_vmm_pgt_sparse()
53 gm200_vmm_pgd_sparse(struct nvkm_vmm *vmm, in gm200_vmm_pgd_sparse() argument
57 VMM_FO064(pt, vmm, pdei * 8, BIT_ULL(35) /* VOL_BIG. */, pdes); in gm200_vmm_pgd_sparse()
96 gm200_vmm_join_(struct nvkm_vmm *vmm, struct nvkm_memory *inst, u64 base) in gm200_vmm_join_() argument
98 if (vmm->func->page[1].shift == 16) in gm200_vmm_join_()
100 return gf100_vmm_join_(vmm, inst, base); in gm200_vmm_join_()
104 gm200_vmm_join(struct nvkm_vmm *vmm, struct nvkm_memory *inst) in gm200_vmm_join() argument
106 return gm200_vmm_join_(vmm, inst, 0); in gm200_vmm_join()
Dvmm.h177 int nvkm_vmm_raw_get(struct nvkm_vmm *vmm, u64 addr, u64 size, u8 refd);
178 void nvkm_vmm_raw_put(struct nvkm_vmm *vmm, u64 addr, u64 size, u8 refd);
179 void nvkm_vmm_raw_unmap(struct nvkm_vmm *vmm, u64 addr, u64 size,
184 nvkm_vmm_in_managed_range(struct nvkm_vmm *vmm, u64 start, u64 size) in nvkm_vmm_in_managed_range() argument
186 u64 p_start = vmm->managed.p.addr; in nvkm_vmm_in_managed_range()
187 u64 p_end = p_start + vmm->managed.p.size; in nvkm_vmm_in_managed_range()
188 u64 n_start = vmm->managed.n.addr; in nvkm_vmm_in_managed_range()
189 u64 n_end = n_start + vmm->managed.n.size; in nvkm_vmm_in_managed_range()
311 #define VMM_MAP_ITER(VMM,PT,PTEI,PTEN,MAP,FILL,BASE,SIZE,NEXT) do { \ argument
325 VMM_SPAM(VMM, "ITER %08x %08x PTE(s)", PTEI, (u32)_ptes); \
[all …]
/linux-6.8/drivers/gpu/drm/nouveau/nvif/
Dvmm.c22 #include <nvif/vmm.h>
28 nvif_vmm_unmap(struct nvif_vmm *vmm, u64 addr) in nvif_vmm_unmap() argument
30 return nvif_object_mthd(&vmm->object, NVIF_VMM_V0_UNMAP, in nvif_vmm_unmap()
36 nvif_vmm_map(struct nvif_vmm *vmm, u64 addr, u64 size, void *argv, u32 argc, in nvif_vmm_map() argument
57 ret = nvif_object_mthd(&vmm->object, NVIF_VMM_V0_MAP, in nvif_vmm_map()
65 nvif_vmm_put(struct nvif_vmm *vmm, struct nvif_vma *vma) in nvif_vmm_put() argument
68 WARN_ON(nvif_object_mthd(&vmm->object, NVIF_VMM_V0_PUT, in nvif_vmm_put()
77 nvif_vmm_get(struct nvif_vmm *vmm, enum nvif_vmm_get type, bool sparse, in nvif_vmm_get() argument
98 ret = nvif_object_mthd(&vmm->object, NVIF_VMM_V0_GET, in nvif_vmm_get()
108 nvif_vmm_raw_get(struct nvif_vmm *vmm, u64 addr, u64 size, in nvif_vmm_raw_get() argument
[all …]
/linux-6.8/drivers/gpu/drm/nouveau/
Dnouveau_vmm.c32 nvif_vmm_unmap(&vma->vmm->vmm, vma->addr); in nouveau_vma_unmap()
41 int ret = nouveau_mem_map(mem, &vma->vmm->vmm, &tmp); in nouveau_vma_map()
49 nouveau_vma_find(struct nouveau_bo *nvbo, struct nouveau_vmm *vmm) in nouveau_vma_find() argument
54 if (vma->vmm == vmm) in nouveau_vma_find()
68 nvif_vmm_put(&vma->vmm->vmm, &tmp); in nouveau_vma_del()
77 nouveau_vma_new(struct nouveau_bo *nvbo, struct nouveau_vmm *vmm, in nouveau_vma_new() argument
85 if ((vma = *pvma = nouveau_vma_find(nvbo, vmm))) { in nouveau_vma_new()
92 vma->vmm = vmm; in nouveau_vma_new()
101 ret = nvif_vmm_get(&vmm->vmm, LAZY, false, mem->mem.page, 0, in nouveau_vma_new()
109 ret = nvif_vmm_get(&vmm->vmm, PTES, false, mem->mem.page, 0, in nouveau_vma_new()
[all …]
Dnouveau_svm.c29 #include <nvif/vmm.h>
105 NV_DEBUG((s)->vmm->cli->drm, "svm-%p: "f"\n", (s), ##a)
107 NV_WARN((s)->vmm->cli->drm, "svm-%p: "f"\n", (s), ##a)
213 mutex_lock(&svmm->vmm->cli->drm->svm->mutex); in nouveau_svmm_part()
214 ivmm = nouveau_ivmm_find(svmm->vmm->cli->drm->svm, inst); in nouveau_svmm_part()
219 mutex_unlock(&svmm->vmm->cli->drm->svm->mutex); in nouveau_svmm_part()
234 mutex_lock(&svmm->vmm->cli->drm->svm->mutex); in nouveau_svmm_join()
235 list_add(&ivmm->head, &svmm->vmm->cli->drm->svm->inst); in nouveau_svmm_join()
236 mutex_unlock(&svmm->vmm->cli->drm->svm->mutex); in nouveau_svmm_join()
246 nvif_object_mthd(&svmm->vmm->vmm.object, NVIF_VMM_V0_PFNCLR, in nouveau_svmm_invalidate()
[all …]
Dnouveau_chan.c99 nouveau_svmm_part(chan->vmm->svmm, chan->inst); in nouveau_channel_del()
160 chan->vmm = nouveau_cli_vmm(cli); in nouveau_channel_prep()
196 ret = nouveau_vma_new(chan->push.buffer, chan->vmm, in nouveau_channel_prep()
211 args.limit = chan->vmm->vmm.limit - 1; in nouveau_channel_prep()
241 args.limit = chan->vmm->vmm.limit - 1; in nouveau_channel_prep()
314 args.chan.vmm = 0; in nouveau_channel_ctor()
319 args.chan.vmm = nvif_handle(&chan->vmm->vmm.object); in nouveau_channel_ctor()
402 args.limit = chan->vmm->vmm.limit - 1; in nouveau_channel_init()
420 args.limit = chan->vmm->vmm.limit - 1; in nouveau_channel_init()
432 args.limit = chan->vmm->vmm.limit - 1; in nouveau_channel_init()
[all …]
/linux-6.8/drivers/gpu/drm/nouveau/nvkm/subdev/bar/
Dr535.c28 #include <subdev/mmu/vmm.h>
77 struct nvkm_vmm *vmm = gf100_bar(bar)->bar[0].vmm; in r535_bar_bar2_init() local
80 WARN_ON(r535_bar_bar2_update_pde(gsp, vmm->pd->pde[0]->pt[0]->addr)); in r535_bar_bar2_init()
81 vmm->rm.bar2_pdb = gsp->bar.rm_bar2_pdb; in r535_bar_bar2_init()
115 struct nvkm_vmm *vmm = gf100_bar(bar)->bar[1].vmm; in r535_bar_bar1_init() local
123 nvkm_memory_unref(&vmm->pd->pt[0]->memory); in r535_bar_bar1_init()
125 ret = nvkm_memory_kmap(pd3, &vmm->pd->pt[0]->memory); in r535_bar_bar1_init()
130 vmm->pd->pt[0]->addr = nvkm_memory_addr(vmm->pd->pt[0]->memory); in r535_bar_bar1_init()
163 rm->bar1.vmm = hw->bar1.vmm; in r535_bar_new_()
167 rm->bar2.vmm = hw->bar2.vmm; in r535_bar_new_()
Dgf100.c34 return gf100_bar(base)->bar[1].vmm; in gf100_bar_bar1_vmm()
63 return gf100_bar(base)->bar[0].vmm; in gf100_bar_bar2_vmm()
103 (bar_nr == 3) ? "bar2" : "bar1", &bar_vm->vmm); in gf100_bar_oneinit_bar()
107 atomic_inc(&bar_vm->vmm->engref[NVKM_SUBDEV_BAR]); in gf100_bar_oneinit_bar()
108 bar_vm->vmm->debug = bar->base.subdev.debug; in gf100_bar_oneinit_bar()
114 ret = nvkm_vmm_boot(bar_vm->vmm); in gf100_bar_oneinit_bar()
119 return nvkm_vmm_join(bar_vm->vmm, bar_vm->inst); in gf100_bar_oneinit_bar()
153 nvkm_vmm_part(bar->bar[1].vmm, bar->bar[1].inst); in gf100_bar_dtor()
154 nvkm_vmm_unref(&bar->bar[1].vmm); in gf100_bar_dtor()
157 nvkm_vmm_part(bar->bar[0].vmm, bar->bar[0].inst); in gf100_bar_dtor()
[all …]
/linux-6.8/drivers/gpu/drm/nouveau/nvkm/engine/gr/
Dr535.c26 #include <subdev/mmu/vmm.h>
65 struct nvkm_vmm *vmm; member
115 nvkm_vmm_put(grc->vmm, &grc->vma[i]); in r535_gr_chan_dtor()
119 nvkm_vmm_unref(&grc->vmm); in r535_gr_chan_dtor()
129 r535_gr_promote_ctx(struct r535_gr *gr, bool golden, struct nvkm_vmm *vmm, in r535_gr_promote_ctx() argument
137 ctrl = nvkm_gsp_rm_ctrl_get(&vmm->rm.device.subdevice, in r535_gr_promote_ctx()
143 ctrl->hChanClient = vmm->rm.client.object.handle; in r535_gr_promote_ctx()
180 mutex_lock(&vmm->mutex.vmm); in r535_gr_promote_ctx()
181 ret = nvkm_vmm_get_locked(vmm, false, true, false, 0, gr->ctxbuf[i].align, in r535_gr_promote_ctx()
183 mutex_unlock(&vmm->mutex.vmm); in r535_gr_promote_ctx()
[all …]
/linux-6.8/drivers/gpu/drm/nouveau/nvkm/engine/fifo/
Dchan.c103 cctx->vctx->ectx->engn == engn && cctx->vctx->vmm == chan->vmm); in nvkm_chan_cctx_get()
288 if (chan->vmm) { in nvkm_chan_del()
289 nvkm_vmm_part(chan->vmm, chan->inst->memory); in nvkm_chan_del()
290 nvkm_vmm_unref(&chan->vmm); in nvkm_chan_del()
350 struct nvkm_cgrp *cgrp, const char *name, bool priv, u32 devm, struct nvkm_vmm *vmm, in nvkm_chan_new_() argument
361 (!func->inst->vmm != !vmm) || in nvkm_chan_new_()
366 RUNL_DEBUG(runl, "args runq:%d:%d vmm:%d:%p userd:%d:%p " in nvkm_chan_new_()
368 runl->func->runqs, runq, func->inst->vmm, vmm, in nvkm_chan_new_()
396 ret = nvkm_cgrp_new(runl, chan->name, vmm, fifo->func->cgrp.force, &chan->cgrp); in nvkm_chan_new_()
404 if (cgrp->runl != runl || cgrp->vmm != vmm) { in nvkm_chan_new_()
[all …]
Dcgrp.c102 nvkm_vmm_put(vctx->vmm, &vctx->vma); in nvkm_cgrp_vctx_put()
106 if (vctx->vmm) { in nvkm_cgrp_vctx_put()
107 atomic_dec(&vctx->vmm->engref[engn->engine->subdev.type]); in nvkm_cgrp_vctx_put()
108 nvkm_vmm_unref(&vctx->vmm); in nvkm_cgrp_vctx_put()
128 vctx->ectx->engn == engn && vctx->vmm == chan->vmm); in nvkm_cgrp_vctx_get()
150 vctx->vmm = nvkm_vmm_ref(chan->vmm); in nvkm_cgrp_vctx_get()
155 if (vctx->vmm) in nvkm_cgrp_vctx_get()
156 atomic_inc(&vctx->vmm->engref[engn->engine->subdev.type]); in nvkm_cgrp_vctx_get()
184 nvkm_vmm_unref(&cgrp->vmm); in nvkm_cgrp_del()
222 nvkm_cgrp_new(struct nvkm_runl *runl, const char *name, struct nvkm_vmm *vmm, bool hw, in nvkm_cgrp_new() argument
[all …]
Ducgrp.c86 struct nvkm_vmm *vmm; in nvkm_ucgrp_new() local
102 vmm = nvkm_uvmm_search(oclass->client, args->v0.vmm); in nvkm_ucgrp_new()
103 if (IS_ERR(vmm)) in nvkm_ucgrp_new()
104 return PTR_ERR(vmm); in nvkm_ucgrp_new()
115 ret = nvkm_cgrp_new(runl, args->v0.name, vmm, true, &ucgrp->cgrp); in nvkm_ucgrp_new()
123 nvkm_vmm_unref(&vmm); in nvkm_ucgrp_new()
/linux-6.8/drivers/gpu/drm/nouveau/include/nvif/
Dvmm.h50 int nvif_vmm_raw_get(struct nvif_vmm *vmm, u64 addr, u64 size, u8 shift);
51 int nvif_vmm_raw_put(struct nvif_vmm *vmm, u64 addr, u64 size, u8 shift);
52 int nvif_vmm_raw_map(struct nvif_vmm *vmm, u64 addr, u64 size, u8 shift,
54 int nvif_vmm_raw_unmap(struct nvif_vmm *vmm, u64 addr, u64 size,
56 int nvif_vmm_raw_sparse(struct nvif_vmm *vmm, u64 addr, u64 size, bool ref);
/linux-6.8/drivers/gpu/drm/nouveau/nvkm/subdev/instmem/
Dnv50.c121 nv50_instobj_kmap(struct nv50_instobj *iobj, struct nvkm_vmm *vmm) in nv50_instobj_kmap() argument
138 while ((ret = nvkm_vmm_get(vmm, 12, size, &bar))) { in nv50_instobj_kmap()
159 nvkm_vmm_put(vmm, &ebar); in nv50_instobj_kmap()
163 ret = nvkm_memory_map(memory, 0, vmm, bar, NULL, 0); in nv50_instobj_kmap()
168 nvkm_vmm_put(vmm, &bar); in nv50_instobj_kmap()
179 nvkm_vmm_put(vmm, &iobj->bar); in nv50_instobj_kmap()
184 nv50_instobj_map(struct nvkm_memory *memory, u64 offset, struct nvkm_vmm *vmm, in nv50_instobj_map() argument
188 return nvkm_memory_map(memory, offset, vmm, vma, argv, argc); in nv50_instobj_map()
221 struct nvkm_vmm *vmm; in nv50_instobj_acquire() local
238 if ((vmm = nvkm_bar_bar2_vmm(imem->subdev.device))) { in nv50_instobj_acquire()
[all …]
/linux-6.8/arch/xtensa/kernel/
Dsyscall.c60 struct vm_area_struct *vmm; in arch_get_unmapped_area() local
84 for_each_vma(vmi, vmm) { in arch_get_unmapped_area()
85 /* At this point: (addr < vmm->vm_end). */ in arch_get_unmapped_area()
86 if (addr + len <= vm_start_gap(vmm)) in arch_get_unmapped_area()
89 addr = vmm->vm_end; in arch_get_unmapped_area()

12345678