Lines Matching full:v0

50 		struct nvif_vmm_pfnclr_v0 v0;  in nvkm_uvmm_mthd_pfnclr()  member
56 if (!(ret = nvif_unpack(ret, &argv, &argc, args->v0, 0, 0, false))) { in nvkm_uvmm_mthd_pfnclr()
57 addr = args->v0.addr; in nvkm_uvmm_mthd_pfnclr()
58 size = args->v0.size; in nvkm_uvmm_mthd_pfnclr()
79 struct nvif_vmm_pfnmap_v0 v0; in nvkm_uvmm_mthd_pfnmap() member
86 if (!(ret = nvif_unpack(ret, &argv, &argc, args->v0, 0, 0, true))) { in nvkm_uvmm_mthd_pfnmap()
87 page = args->v0.page; in nvkm_uvmm_mthd_pfnmap()
88 addr = args->v0.addr; in nvkm_uvmm_mthd_pfnmap()
89 size = args->v0.size; in nvkm_uvmm_mthd_pfnmap()
90 phys = args->v0.phys; in nvkm_uvmm_mthd_pfnmap()
91 if (argc != (size >> page) * sizeof(args->v0.phys[0])) in nvkm_uvmm_mthd_pfnmap()
113 struct nvif_vmm_unmap_v0 v0; in nvkm_uvmm_mthd_unmap() member
120 if (!(ret = nvif_unpack(ret, &argv, &argc, args->v0, 0, 0, false))) { in nvkm_uvmm_mthd_unmap()
121 addr = args->v0.addr; in nvkm_uvmm_mthd_unmap()
156 struct nvif_vmm_map_v0 v0; in nvkm_uvmm_mthd_map() member
164 if (!(ret = nvif_unpack(ret, &argv, &argc, args->v0, 0, 0, true))) { in nvkm_uvmm_mthd_map()
165 addr = args->v0.addr; in nvkm_uvmm_mthd_map()
166 size = args->v0.size; in nvkm_uvmm_mthd_map()
167 handle = args->v0.memory; in nvkm_uvmm_mthd_map()
168 offset = args->v0.offset; in nvkm_uvmm_mthd_map()
235 struct nvif_vmm_put_v0 v0; in nvkm_uvmm_mthd_put() member
242 if (!(ret = nvif_unpack(ret, &argv, &argc, args->v0, 0, 0, false))) { in nvkm_uvmm_mthd_put()
243 addr = args->v0.addr; in nvkm_uvmm_mthd_put()
248 vma = nvkm_vmm_node_search(vmm, args->v0.addr); in nvkm_uvmm_mthd_put()
273 struct nvif_vmm_get_v0 v0; in nvkm_uvmm_mthd_get() member
282 if (!(ret = nvif_unpack(ret, &argv, &argc, args->v0, 0, 0, false))) { in nvkm_uvmm_mthd_get()
283 getref = args->v0.type == NVIF_VMM_GET_V0_PTES; in nvkm_uvmm_mthd_get()
284 mapref = args->v0.type == NVIF_VMM_GET_V0_ADDR; in nvkm_uvmm_mthd_get()
285 sparse = args->v0.sparse; in nvkm_uvmm_mthd_get()
286 page = args->v0.page; in nvkm_uvmm_mthd_get()
287 align = args->v0.align; in nvkm_uvmm_mthd_get()
288 size = args->v0.size; in nvkm_uvmm_mthd_get()
299 args->v0.addr = vma->addr; in nvkm_uvmm_mthd_get()
308 struct nvif_vmm_page_v0 v0; in nvkm_uvmm_mthd_page() member
317 if (!(ret = nvif_unpack(ret, &argv, &argc, args->v0, 0, 0, false))) { in nvkm_uvmm_mthd_page()
318 if ((index = args->v0.index) >= nr) in nvkm_uvmm_mthd_page()
321 args->v0.shift = page[index].shift; in nvkm_uvmm_mthd_page()
322 args->v0.sparse = !!(type & NVKM_VMM_PAGE_SPARSE); in nvkm_uvmm_mthd_page()
323 args->v0.vram = !!(type & NVKM_VMM_PAGE_VRAM); in nvkm_uvmm_mthd_page()
324 args->v0.host = !!(type & NVKM_VMM_PAGE_HOST); in nvkm_uvmm_mthd_page()
325 args->v0.comp = !!(type & NVKM_VMM_PAGE_COMP); in nvkm_uvmm_mthd_page()
378 struct nvif_vmm_v0 v0; in nvkm_uvmm_new() member
386 if (!(ret = nvif_unpack(ret, &argv, &argc, args->v0, 0, 0, more))) { in nvkm_uvmm_new()
387 managed = args->v0.managed != 0; in nvkm_uvmm_new()
388 addr = args->v0.addr; in nvkm_uvmm_new()
389 size = args->v0.size; in nvkm_uvmm_new()
413 args->v0.page_nr = 0; in nvkm_uvmm_new()
415 args->v0.page_nr++; in nvkm_uvmm_new()
416 args->v0.addr = uvmm->vmm->start; in nvkm_uvmm_new()
417 args->v0.size = uvmm->vmm->limit; in nvkm_uvmm_new()