Lines Matching full:vm

57 	struct drm_i915_private *i915 = ggtt->vm.i915;  in ggtt_init_hw()
59 i915_address_space_init(&ggtt->vm, VM_CLASS_GGTT); in ggtt_init_hw()
61 ggtt->vm.is_ggtt = true; in ggtt_init_hw()
64 ggtt->vm.has_read_only = IS_VALLEYVIEW(i915); in ggtt_init_hw()
67 ggtt->vm.mm.color_adjust = i915_ggtt_color_adjust; in ggtt_init_hw()
73 ggtt->vm.cleanup(&ggtt->vm); in ggtt_init_hw()
108 * i915_ggtt_suspend_vm - Suspend the memory mappings for a GGTT or DPT VM
109 * @vm: The VM to suspend the mappings for
114 void i915_ggtt_suspend_vm(struct i915_address_space *vm) in i915_ggtt_suspend_vm() argument
119 drm_WARN_ON(&vm->i915->drm, !vm->is_ggtt && !vm->is_dpt); in i915_ggtt_suspend_vm()
122 i915_gem_drain_freed_objects(vm->i915); in i915_ggtt_suspend_vm()
124 mutex_lock(&vm->mutex); in i915_ggtt_suspend_vm()
130 save_skip_rewrite = vm->skip_pte_rewrite; in i915_ggtt_suspend_vm()
131 vm->skip_pte_rewrite = true; in i915_ggtt_suspend_vm()
133 list_for_each_entry_safe(vma, vn, &vm->bound_list, vm_link) { in i915_ggtt_suspend_vm()
149 mutex_unlock(&vm->mutex); in i915_ggtt_suspend_vm()
156 vm->skip_pte_rewrite = save_skip_rewrite; in i915_ggtt_suspend_vm()
170 vm->clear_range(vm, 0, vm->total); in i915_ggtt_suspend_vm()
172 vm->skip_pte_rewrite = save_skip_rewrite; in i915_ggtt_suspend_vm()
174 mutex_unlock(&vm->mutex); in i915_ggtt_suspend_vm()
181 i915_ggtt_suspend_vm(&ggtt->vm); in i915_ggtt_suspend()
190 struct intel_uncore *uncore = ggtt->vm.gt->uncore; in gen6_ggtt_invalidate()
215 struct intel_uncore *uncore = ggtt->vm.gt->uncore; in gen8_ggtt_invalidate()
223 if (needs_wc_ggtt_mapping(ggtt->vm.i915)) in gen8_ggtt_invalidate()
242 struct drm_i915_private *i915 = ggtt->vm.i915; in guc_ggtt_invalidate()
294 struct intel_gt *gt = ggtt->vm.gt; in should_update_ggtt_with_bind()
302 struct intel_gt *gt = ggtt->vm.gt; in gen8_ggtt_bind_get_ce()
336 struct intel_gt *gt = ggtt->vm.gt; in gen8_ggtt_bind_ptes()
337 const gen8_pte_t scratch_pte = ggtt->vm.scratch[0]->encode; in gen8_ggtt_bind_ptes()
438 static void gen8_ggtt_insert_page(struct i915_address_space *vm, in gen8_ggtt_insert_page() argument
444 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); in gen8_ggtt_insert_page()
448 gen8_set_pte(pte, ggtt->vm.pte_encode(addr, pat_index, flags)); in gen8_ggtt_insert_page()
453 static void gen8_ggtt_insert_page_bind(struct i915_address_space *vm, in gen8_ggtt_insert_page_bind() argument
457 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); in gen8_ggtt_insert_page_bind()
460 pte = ggtt->vm.pte_encode(addr, pat_index, flags); in gen8_ggtt_insert_page_bind()
461 if (should_update_ggtt_with_bind(i915_vm_to_ggtt(vm)) && in gen8_ggtt_insert_page_bind()
465 gen8_ggtt_insert_page(vm, addr, offset, pat_index, flags); in gen8_ggtt_insert_page_bind()
468 static void gen8_ggtt_insert_entries(struct i915_address_space *vm, in gen8_ggtt_insert_entries() argument
473 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); in gen8_ggtt_insert_entries()
474 const gen8_pte_t pte_encode = ggtt->vm.pte_encode(0, pat_index, flags); in gen8_ggtt_insert_entries()
489 gen8_set_pte(gte++, vm->scratch[0]->encode); in gen8_ggtt_insert_entries()
498 gen8_set_pte(gte++, vm->scratch[0]->encode); in gen8_ggtt_insert_entries()
507 static bool __gen8_ggtt_insert_entries_bind(struct i915_address_space *vm, in __gen8_ggtt_insert_entries_bind() argument
511 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); in __gen8_ggtt_insert_entries_bind()
512 gen8_pte_t scratch_pte = vm->scratch[0]->encode; in __gen8_ggtt_insert_entries_bind()
516 pte_encode = ggtt->vm.pte_encode(0, pat_index, flags); in __gen8_ggtt_insert_entries_bind()
538 static void gen8_ggtt_insert_entries_bind(struct i915_address_space *vm, in gen8_ggtt_insert_entries_bind() argument
542 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); in gen8_ggtt_insert_entries_bind()
544 if (should_update_ggtt_with_bind(i915_vm_to_ggtt(vm)) && in gen8_ggtt_insert_entries_bind()
545 __gen8_ggtt_insert_entries_bind(vm, vma_res, pat_index, flags)) in gen8_ggtt_insert_entries_bind()
548 gen8_ggtt_insert_entries(vm, vma_res, pat_index, flags); in gen8_ggtt_insert_entries_bind()
551 static void gen8_ggtt_clear_range(struct i915_address_space *vm, in gen8_ggtt_clear_range() argument
554 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); in gen8_ggtt_clear_range()
557 const gen8_pte_t scratch_pte = vm->scratch[0]->encode; in gen8_ggtt_clear_range()
572 static void gen8_ggtt_scratch_range_bind(struct i915_address_space *vm, in gen8_ggtt_scratch_range_bind() argument
575 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); in gen8_ggtt_scratch_range_bind()
578 const gen8_pte_t scratch_pte = vm->scratch[0]->encode; in gen8_ggtt_scratch_range_bind()
590 gen8_ggtt_clear_range(vm, start, length); in gen8_ggtt_scratch_range_bind()
593 static void gen6_ggtt_insert_page(struct i915_address_space *vm, in gen6_ggtt_insert_page() argument
599 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); in gen6_ggtt_insert_page()
603 iowrite32(vm->pte_encode(addr, pat_index, flags), pte); in gen6_ggtt_insert_page()
614 static void gen6_ggtt_insert_entries(struct i915_address_space *vm, in gen6_ggtt_insert_entries() argument
619 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); in gen6_ggtt_insert_entries()
630 iowrite32(vm->scratch[0]->encode, gte++); in gen6_ggtt_insert_entries()
633 iowrite32(vm->pte_encode(addr, pat_index, flags), gte++); in gen6_ggtt_insert_entries()
638 iowrite32(vm->scratch[0]->encode, gte++); in gen6_ggtt_insert_entries()
647 static void nop_clear_range(struct i915_address_space *vm, in nop_clear_range() argument
652 static void bxt_vtd_ggtt_wa(struct i915_address_space *vm) in bxt_vtd_ggtt_wa() argument
661 intel_uncore_posting_read_fw(vm->gt->uncore, GFX_FLSH_CNTL_GEN6); in bxt_vtd_ggtt_wa()
665 struct i915_address_space *vm; member
675 gen8_ggtt_insert_page(arg->vm, arg->addr, arg->offset, in bxt_vtd_ggtt_insert_page__cb()
677 bxt_vtd_ggtt_wa(arg->vm); in bxt_vtd_ggtt_insert_page__cb()
682 static void bxt_vtd_ggtt_insert_page__BKL(struct i915_address_space *vm, in bxt_vtd_ggtt_insert_page__BKL() argument
688 struct insert_page arg = { vm, addr, offset, pat_index }; in bxt_vtd_ggtt_insert_page__BKL()
694 struct i915_address_space *vm; member
704 gen8_ggtt_insert_entries(arg->vm, arg->vma_res, in bxt_vtd_ggtt_insert_entries__cb()
706 bxt_vtd_ggtt_wa(arg->vm); in bxt_vtd_ggtt_insert_entries__cb()
711 static void bxt_vtd_ggtt_insert_entries__BKL(struct i915_address_space *vm, in bxt_vtd_ggtt_insert_entries__BKL() argument
716 struct insert_entries arg = { vm, vma_res, pat_index, flags }; in bxt_vtd_ggtt_insert_entries__BKL()
721 static void gen6_ggtt_clear_range(struct i915_address_space *vm, in gen6_ggtt_clear_range() argument
724 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); in gen6_ggtt_clear_range()
737 scratch_pte = vm->scratch[0]->encode; in gen6_ggtt_clear_range()
742 void intel_ggtt_bind_vma(struct i915_address_space *vm, in intel_ggtt_bind_vma() argument
762 vm->insert_entries(vm, vma_res, pat_index, pte_flags); in intel_ggtt_bind_vma()
766 void intel_ggtt_unbind_vma(struct i915_address_space *vm, in intel_ggtt_unbind_vma() argument
769 vm->clear_range(vm, vma_res->start, vma_res->vma_size); in intel_ggtt_unbind_vma()
788 if (!intel_uc_uses_guc(&ggtt->vm.gt->uc)) in ggtt_reserve_guc_top()
791 GEM_BUG_ON(ggtt->vm.total <= GUC_TOP_RESERVE_SIZE); in ggtt_reserve_guc_top()
792 offset = ggtt->vm.total - GUC_TOP_RESERVE_SIZE; in ggtt_reserve_guc_top()
794 ret = i915_gem_gtt_reserve(&ggtt->vm, NULL, &ggtt->uc_fw, in ggtt_reserve_guc_top()
798 drm_dbg(&ggtt->vm.i915->drm, in ggtt_reserve_guc_top()
841 intel_wopcm_guc_size(&ggtt->vm.gt->wopcm)); in init_ggtt()
873 if (drm_mm_reserve_node(&ggtt->vm.mm, &ggtt->error_capture)) in init_ggtt()
874 drm_mm_insert_node_in_range(&ggtt->vm.mm, in init_ggtt()
885 ggtt->vm.scratch_range(&ggtt->vm, start, size); in init_ggtt()
886 drm_dbg(&ggtt->vm.i915->drm, in init_ggtt()
901 drm_mm_for_each_hole(entry, &ggtt->vm.mm, hole_start, hole_end) { in init_ggtt()
902 drm_dbg(&ggtt->vm.i915->drm, in init_ggtt()
905 ggtt->vm.clear_range(&ggtt->vm, hole_start, in init_ggtt()
910 ggtt->vm.clear_range(&ggtt->vm, ggtt->vm.total - PAGE_SIZE, PAGE_SIZE); in init_ggtt()
919 static void aliasing_gtt_bind_vma(struct i915_address_space *vm, in aliasing_gtt_bind_vma() argument
933 ppgtt_bind_vma(&i915_vm_to_ggtt(vm)->alias->vm, in aliasing_gtt_bind_vma()
937 vm->insert_entries(vm, vma_res, pat_index, pte_flags); in aliasing_gtt_bind_vma()
942 static void aliasing_gtt_unbind_vma(struct i915_address_space *vm, in aliasing_gtt_unbind_vma() argument
946 vm->clear_range(vm, vma_res->start, vma_res->vma_size); in aliasing_gtt_unbind_vma()
949 ppgtt_unbind_vma(&i915_vm_to_ggtt(vm)->alias->vm, vma_res); in aliasing_gtt_unbind_vma()
958 ppgtt = i915_ppgtt_create(ggtt->vm.gt, 0); in init_aliasing_ppgtt()
962 if (GEM_WARN_ON(ppgtt->vm.total < ggtt->vm.total)) { in init_aliasing_ppgtt()
967 err = i915_vm_alloc_pt_stash(&ppgtt->vm, &stash, ggtt->vm.total); in init_aliasing_ppgtt()
971 i915_gem_object_lock(ppgtt->vm.scratch[0], NULL); in init_aliasing_ppgtt()
972 err = i915_vm_map_pt_stash(&ppgtt->vm, &stash); in init_aliasing_ppgtt()
973 i915_gem_object_unlock(ppgtt->vm.scratch[0]); in init_aliasing_ppgtt()
983 ppgtt->vm.allocate_va_range(&ppgtt->vm, &stash, 0, ggtt->vm.total); in init_aliasing_ppgtt()
986 ggtt->vm.bind_async_flags |= ppgtt->vm.bind_async_flags; in init_aliasing_ppgtt()
988 GEM_BUG_ON(ggtt->vm.vma_ops.bind_vma != intel_ggtt_bind_vma); in init_aliasing_ppgtt()
989 ggtt->vm.vma_ops.bind_vma = aliasing_gtt_bind_vma; in init_aliasing_ppgtt()
991 GEM_BUG_ON(ggtt->vm.vma_ops.unbind_vma != intel_ggtt_unbind_vma); in init_aliasing_ppgtt()
992 ggtt->vm.vma_ops.unbind_vma = aliasing_gtt_unbind_vma; in init_aliasing_ppgtt()
994 i915_vm_free_pt_stash(&ppgtt->vm, &stash); in init_aliasing_ppgtt()
998 i915_vm_free_pt_stash(&ppgtt->vm, &stash); in init_aliasing_ppgtt()
1000 i915_vm_put(&ppgtt->vm); in init_aliasing_ppgtt()
1012 i915_vm_put(&ppgtt->vm); in fini_aliasing_ppgtt()
1014 ggtt->vm.vma_ops.bind_vma = intel_ggtt_bind_vma; in fini_aliasing_ppgtt()
1015 ggtt->vm.vma_ops.unbind_vma = intel_ggtt_unbind_vma; in fini_aliasing_ppgtt()
1039 flush_workqueue(ggtt->vm.i915->wq); in ggtt_cleanup_hw()
1040 i915_gem_drain_freed_objects(ggtt->vm.i915); in ggtt_cleanup_hw()
1042 mutex_lock(&ggtt->vm.mutex); in ggtt_cleanup_hw()
1044 ggtt->vm.skip_pte_rewrite = true; in ggtt_cleanup_hw()
1046 list_for_each_entry_safe(vma, vn, &ggtt->vm.bound_list, vm_link) { in ggtt_cleanup_hw()
1065 ggtt->vm.cleanup(&ggtt->vm); in ggtt_cleanup_hw()
1067 mutex_unlock(&ggtt->vm.mutex); in ggtt_cleanup_hw()
1068 i915_address_space_fini(&ggtt->vm); in ggtt_cleanup_hw()
1099 GEM_WARN_ON(kref_read(&ggtt->vm.resv_ref) != 1); in i915_ggtt_driver_late_release()
1100 dma_resv_fini(&ggtt->vm._resv); in i915_ggtt_driver_late_release()
1154 struct drm_i915_private *i915 = ggtt->vm.i915; in ggtt_probe_common()
1173 kref_init(&ggtt->vm.resv_ref); in ggtt_probe_common()
1174 ret = setup_scratch_page(&ggtt->vm); in ggtt_probe_common()
1183 if (i915_gem_object_is_lmem(ggtt->vm.scratch[0])) in ggtt_probe_common()
1186 ggtt->vm.scratch[0]->encode = in ggtt_probe_common()
1187 ggtt->vm.pte_encode(px_dma(ggtt->vm.scratch[0]), in ggtt_probe_common()
1195 static void gen6_gmch_remove(struct i915_address_space *vm) in gen6_gmch_remove() argument
1197 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); in gen6_gmch_remove()
1200 free_scratch(vm); in gen6_gmch_remove()
1211 struct drm_i915_private *i915 = ggtt->vm.i915; in gen8_gmch_probe()
1230 ggtt->vm.alloc_pt_dma = alloc_pt_dma; in gen8_gmch_probe()
1231 ggtt->vm.alloc_scratch_dma = alloc_pt_dma; in gen8_gmch_probe()
1232 ggtt->vm.lmem_pt_obj_flags = I915_BO_ALLOC_PM_EARLY; in gen8_gmch_probe()
1234 ggtt->vm.total = (size / sizeof(gen8_pte_t)) * I915_GTT_PAGE_SIZE; in gen8_gmch_probe()
1235 ggtt->vm.cleanup = gen6_gmch_remove; in gen8_gmch_probe()
1236 ggtt->vm.insert_page = gen8_ggtt_insert_page; in gen8_gmch_probe()
1237 ggtt->vm.clear_range = nop_clear_range; in gen8_gmch_probe()
1238 ggtt->vm.scratch_range = gen8_ggtt_clear_range; in gen8_gmch_probe()
1240 ggtt->vm.insert_entries = gen8_ggtt_insert_entries; in gen8_gmch_probe()
1247 ggtt->vm.insert_entries = bxt_vtd_ggtt_insert_entries__BKL; in gen8_gmch_probe()
1248 ggtt->vm.insert_page = bxt_vtd_ggtt_insert_page__BKL; in gen8_gmch_probe()
1256 ggtt->vm.raw_insert_page = gen8_ggtt_insert_page; in gen8_gmch_probe()
1257 ggtt->vm.raw_insert_entries = gen8_ggtt_insert_entries; in gen8_gmch_probe()
1259 ggtt->vm.bind_async_flags = in gen8_gmch_probe()
1264 ggtt->vm.scratch_range = gen8_ggtt_scratch_range_bind; in gen8_gmch_probe()
1265 ggtt->vm.insert_page = gen8_ggtt_insert_page_bind; in gen8_gmch_probe()
1266 ggtt->vm.insert_entries = gen8_ggtt_insert_entries_bind; in gen8_gmch_probe()
1271 ggtt->vm.raw_insert_page = gen8_ggtt_insert_page; in gen8_gmch_probe()
1274 if (intel_uc_wants_guc_submission(&ggtt->vm.gt->uc)) in gen8_gmch_probe()
1279 ggtt->vm.vma_ops.bind_vma = intel_ggtt_bind_vma; in gen8_gmch_probe()
1280 ggtt->vm.vma_ops.unbind_vma = intel_ggtt_unbind_vma; in gen8_gmch_probe()
1283 ggtt->vm.pte_encode = mtl_ggtt_pte_encode; in gen8_gmch_probe()
1285 ggtt->vm.pte_encode = gen8_ggtt_pte_encode; in gen8_gmch_probe()
1388 struct drm_i915_private *i915 = ggtt->vm.i915; in gen6_gmch_probe()
1413 ggtt->vm.total = (size / sizeof(gen6_pte_t)) * I915_GTT_PAGE_SIZE; in gen6_gmch_probe()
1415 ggtt->vm.alloc_pt_dma = alloc_pt_dma; in gen6_gmch_probe()
1416 ggtt->vm.alloc_scratch_dma = alloc_pt_dma; in gen6_gmch_probe()
1418 ggtt->vm.clear_range = nop_clear_range; in gen6_gmch_probe()
1420 ggtt->vm.clear_range = gen6_ggtt_clear_range; in gen6_gmch_probe()
1421 ggtt->vm.scratch_range = gen6_ggtt_clear_range; in gen6_gmch_probe()
1422 ggtt->vm.insert_page = gen6_ggtt_insert_page; in gen6_gmch_probe()
1423 ggtt->vm.insert_entries = gen6_ggtt_insert_entries; in gen6_gmch_probe()
1424 ggtt->vm.cleanup = gen6_gmch_remove; in gen6_gmch_probe()
1429 ggtt->vm.pte_encode = iris_pte_encode; in gen6_gmch_probe()
1431 ggtt->vm.pte_encode = hsw_pte_encode; in gen6_gmch_probe()
1433 ggtt->vm.pte_encode = byt_pte_encode; in gen6_gmch_probe()
1435 ggtt->vm.pte_encode = ivb_pte_encode; in gen6_gmch_probe()
1437 ggtt->vm.pte_encode = snb_pte_encode; in gen6_gmch_probe()
1439 ggtt->vm.vma_ops.bind_vma = intel_ggtt_bind_vma; in gen6_gmch_probe()
1440 ggtt->vm.vma_ops.unbind_vma = intel_ggtt_unbind_vma; in gen6_gmch_probe()
1450 ggtt->vm.gt = gt; in ggtt_probe_hw()
1451 ggtt->vm.i915 = i915; in ggtt_probe_hw()
1452 ggtt->vm.dma = i915->drm.dev; in ggtt_probe_hw()
1453 dma_resv_init(&ggtt->vm._resv); in ggtt_probe_hw()
1463 dma_resv_fini(&ggtt->vm._resv); in ggtt_probe_hw()
1467 if ((ggtt->vm.total - 1) >> 32) { in ggtt_probe_hw()
1471 ggtt->vm.total >> 20); in ggtt_probe_hw()
1472 ggtt->vm.total = 1ULL << 32; in ggtt_probe_hw()
1474 min_t(u64, ggtt->mappable_end, ggtt->vm.total); in ggtt_probe_hw()
1477 if (ggtt->mappable_end > ggtt->vm.total) { in ggtt_probe_hw()
1481 &ggtt->mappable_end, ggtt->vm.total); in ggtt_probe_hw()
1482 ggtt->mappable_end = ggtt->vm.total; in ggtt_probe_hw()
1486 drm_dbg(&i915->drm, "GGTT size = %lluM\n", ggtt->vm.total >> 20); in ggtt_probe_hw()
1542 * i915_ggtt_resume_vm - Restore the memory mappings for a GGTT or DPT VM
1543 * @vm: The VM to restore the mappings for
1551 bool i915_ggtt_resume_vm(struct i915_address_space *vm) in i915_ggtt_resume_vm() argument
1556 drm_WARN_ON(&vm->i915->drm, !vm->is_ggtt && !vm->is_dpt); in i915_ggtt_resume_vm()
1559 vm->clear_range(vm, 0, vm->total); in i915_ggtt_resume_vm()
1562 list_for_each_entry(vma, &vm->bound_list, vm_link) { in i915_ggtt_resume_vm()
1574 vma->ops->bind_vma(vm, NULL, vma->resource, in i915_ggtt_resume_vm()
1576 i915_gem_get_pat_index(vm->i915, in i915_ggtt_resume_vm()
1597 flush = i915_ggtt_resume_vm(&ggtt->vm); in i915_ggtt_resume()
1600 ggtt->vm.scratch_range(&ggtt->vm, ggtt->error_capture.start, in i915_ggtt_resume()