Home
last modified time | relevance | path

Searched refs:vma (Results 1 – 25 of 1064) sorted by relevance

12345678910>>...43

/linux/drivers/gpu/drm/i915/
H A Di915_vma.h51 static inline bool i915_vma_is_active(const struct i915_vma *vma) in i915_vma_is_active() argument
53 return !i915_active_is_idle(&vma->active); in i915_vma_is_active()
60 int __must_check _i915_vma_move_to_active(struct i915_vma *vma,
65 i915_vma_move_to_active(struct i915_vma *vma, struct i915_request *rq, in i915_vma_move_to_active() argument
68 return _i915_vma_move_to_active(vma, rq, &rq->fence, flags); in i915_vma_move_to_active()
73 static inline bool i915_vma_is_ggtt(const struct i915_vma *vma) in i915_vma_is_ggtt() argument
75 return test_bit(I915_VMA_GGTT_BIT, __i915_vma_flags(vma)); in i915_vma_is_ggtt()
78 static inline bool i915_vma_is_dpt(const struct i915_vma *vma) in i915_vma_is_dpt() argument
80 return i915_is_dpt(vma->vm); in i915_vma_is_dpt()
83 static inline bool i915_vma_has_ggtt_write(const struct i915_vma *vma) in i915_vma_has_ggtt_write() argument
[all …]
H A Di915_vma.c48 static inline void assert_vma_held_evict(const struct i915_vma *vma) in assert_vma_held_evict() argument
55 if (kref_read(&vma->vm->ref)) in assert_vma_held_evict()
56 assert_object_held_shared(vma->obj); in assert_vma_held_evict()
66 static void i915_vma_free(struct i915_vma *vma) in i915_vma_free() argument
68 return kmem_cache_free(slab_vmas, vma); in i915_vma_free()
75 static void vma_print_allocator(struct i915_vma *vma, const char *reason) in vma_print_allocator() argument
79 if (!vma->node.stack) { in vma_print_allocator()
80 drm_dbg(vma->obj->base.dev, in vma_print_allocator()
82 vma->node.start, vma->node.size, reason); in vma_print_allocator()
86 stack_depot_snprint(vma->node.stack, buf, sizeof(buf), 0); in vma_print_allocator()
[all …]
H A Di915_gem_evict.c41 static bool dying_vma(struct i915_vma *vma)
43 return !kref_read(&vma->obj->base.refcount);
67 static bool grab_vma(struct i915_vma *vma, struct i915_gem_ww_ctx *ww) in grab_vma() argument
73 if (i915_gem_object_get_rcu(vma->obj)) { in grab_vma()
74 if (!i915_gem_object_trylock(vma->obj, ww)) { in grab_vma()
75 i915_gem_object_put(vma->obj); in grab_vma()
80 atomic_and(~I915_VMA_PIN_MASK, &vma->flags); in grab_vma()
86 static void ungrab_vma(struct i915_vma *vma) in ungrab_vma() argument
88 if (dying_vma(vma)) in ungrab_vma()
91 i915_gem_object_unlock(vma->obj); in ungrab_vma()
[all …]
/linux/mm/
H A Dmmap_lock.c48 static inline bool __vma_enter_locked(struct vm_area_struct *vma, bool detaching) in __vma_enter_locked() argument
60 if (!refcount_add_not_zero(VMA_LOCK_OFFSET, &vma->vm_refcnt)) in __vma_enter_locked()
63 rwsem_acquire(&vma->vmlock_dep_map, 0, 0, _RET_IP_); in __vma_enter_locked()
64 rcuwait_wait_event(&vma->vm_mm->vma_writer_wait, in __vma_enter_locked()
65 refcount_read(&vma->vm_refcnt) == tgt_refcnt, in __vma_enter_locked()
67 lock_acquired(&vma->vmlock_dep_map, _RET_IP_); in __vma_enter_locked()
72 static inline void __vma_exit_locked(struct vm_area_struct *vma, bool *detached) in __vma_exit_locked() argument
74 *detached = refcount_sub_and_test(VMA_LOCK_OFFSET, &vma->vm_refcnt); in __vma_exit_locked()
75 rwsem_release(&vma->vmlock_dep_map, _RET_IP_); in __vma_exit_locked()
78 void __vma_start_write(struct vm_area_struct *vma, unsigned int mm_lock_seq) in __vma_start_write() argument
[all …]
H A Dvma.c75 static bool vma_had_uncowed_parents(struct vm_area_struct *vma) in vma_had_uncowed_parents() argument
81 return vma && vma->anon_vma && !list_is_singular(&vma->anon_vma_chain); in vma_had_uncowed_parents()
86 struct vm_area_struct *vma = merge_next ? vmg->next : vmg->prev; in is_mergeable_vma() local
88 if (!mpol_equal(vmg->policy, vma_policy(vma))) in is_mergeable_vma()
98 if ((vma->vm_flags ^ vmg->vm_flags) & ~VM_SOFTDIRTY) in is_mergeable_vma()
100 if (vma->vm_file != vmg->file) in is_mergeable_vma()
102 if (!is_mergeable_vm_userfaultfd_ctx(vma, vmg->uffd_ctx)) in is_mergeable_vma()
104 if (!anon_vma_name_eq(anon_vma_name(vma), vmg->anon_name)) in is_mergeable_vma()
141 struct vm_area_struct *vma, in init_multi_vma_prep() argument
148 vp->vma = vma; in init_multi_vma_prep()
[all …]
H A Dnommu.c92 struct vm_area_struct *vma; in kobjsize() local
94 vma = find_vma(current->mm, (unsigned long)objp); in kobjsize()
95 if (vma) in kobjsize()
96 return vma->vm_end - vma->vm_start; in kobjsize()
148 struct vm_area_struct *vma; in __vmalloc_user_flags() local
151 vma = find_vma(current->mm, (unsigned long)ret); in __vmalloc_user_flags()
152 if (vma) in __vmalloc_user_flags()
153 vm_flags_set(vma, VM_USERMAP); in __vmalloc_user_flags()
345 int vm_insert_page(struct vm_area_struct *vma, unsigned long addr, in vm_insert_page() argument
352 int vm_insert_pages(struct vm_area_struct *vma, unsigned long addr, in vm_insert_pages() argument
[all …]
H A Dmprotect.c43 static bool maybe_change_pte_writable(struct vm_area_struct *vma, pte_t pte) in maybe_change_pte_writable() argument
45 if (WARN_ON_ONCE(!(vma->vm_flags & VM_WRITE))) in maybe_change_pte_writable()
53 if (pte_needs_soft_dirty_wp(vma, pte)) in maybe_change_pte_writable()
57 if (userfaultfd_pte_wp(vma, pte)) in maybe_change_pte_writable()
63 static bool can_change_private_pte_writable(struct vm_area_struct *vma, in can_change_private_pte_writable() argument
68 if (!maybe_change_pte_writable(vma, pte)) in can_change_private_pte_writable()
77 page = vm_normal_page(vma, addr, pte); in can_change_private_pte_writable()
81 static bool can_change_shared_pte_writable(struct vm_area_struct *vma, in can_change_shared_pte_writable() argument
84 if (!maybe_change_pte_writable(vma, pte)) in can_change_shared_pte_writable()
99 bool can_change_pte_writable(struct vm_area_struct *vma, unsigned long addr, in can_change_pte_writable() argument
[all …]
H A Dvma.h14 struct vm_area_struct *vma; member
36 struct vm_area_struct *vma; /* The first vma to munmap */ member
161 static inline pgoff_t vma_pgoff_offset(struct vm_area_struct *vma, in vma_pgoff_offset() argument
164 return vma->vm_pgoff + PHYS_PFN(addr - vma->vm_start); in vma_pgoff_offset()
205 struct vm_area_struct *vma,
209 struct vm_area_struct *vma, gfp_t gfp) in vma_iter_store_gfp() argument
213 ((vmi->mas.index > vma->vm_start) || (vmi->mas.last < vma->vm_start))) in vma_iter_store_gfp()
216 __mas_set_range(&vmi->mas, vma->vm_start, vma->vm_end - 1); in vma_iter_store_gfp()
217 mas_store_gfp(&vmi->mas, vma, gfp); in vma_iter_store_gfp()
221 vma_mark_attached(vma); in vma_iter_store_gfp()
[all …]
H A Dmremap.c64 struct vm_area_struct *vma; member
143 static void take_rmap_locks(struct vm_area_struct *vma) in take_rmap_locks() argument
145 if (vma->vm_file) in take_rmap_locks()
146 i_mmap_lock_write(vma->vm_file->f_mapping); in take_rmap_locks()
147 if (vma->anon_vma) in take_rmap_locks()
148 anon_vma_lock_write(vma->anon_vma); in take_rmap_locks()
151 static void drop_rmap_locks(struct vm_area_struct *vma) in drop_rmap_locks() argument
153 if (vma->anon_vma) in drop_rmap_locks()
154 anon_vma_unlock_write(vma->anon_vma); in drop_rmap_locks()
155 if (vma->vm_file) in drop_rmap_locks()
[all …]
H A Dmemory.c107 if (!userfaultfd_wp(vmf->vma)) in vmf_orig_pte_uffd_wp()
374 struct vm_area_struct *vma, unsigned long floor, in free_pgtables() argument
382 unsigned long addr = vma->vm_start; in free_pgtables()
398 vma_start_write(vma); in free_pgtables()
399 unlink_anon_vmas(vma); in free_pgtables()
402 unlink_file_vma_batch_add(&vb, vma); in free_pgtables()
407 while (next && next->vm_start <= vma->vm_end + PMD_SIZE) { in free_pgtables()
408 vma = next; in free_pgtables()
413 vma_start_write(vma); in free_pgtables()
414 unlink_anon_vmas(vma); in free_pgtables()
[all …]
H A Dmadvise.c80 struct vm_area_struct *vma; member
110 struct anon_vma_name *anon_vma_name(struct vm_area_struct *vma) in anon_vma_name() argument
112 if (!rwsem_is_locked(&vma->vm_mm->mmap_lock)) in anon_vma_name()
113 vma_assert_locked(vma); in anon_vma_name()
115 return vma->anon_name; in anon_vma_name()
119 static int replace_anon_vma_name(struct vm_area_struct *vma, in replace_anon_vma_name() argument
122 struct anon_vma_name *orig_name = anon_vma_name(vma); in replace_anon_vma_name()
125 vma->anon_name = NULL; in replace_anon_vma_name()
133 vma->anon_name = anon_vma_name_reuse(anon_name); in replace_anon_vma_name()
139 static int replace_anon_vma_name(struct vm_area_struct *vma, in replace_anon_vma_name() argument
[all …]
H A Dvma_exec.c19 int relocate_vma_down(struct vm_area_struct *vma, unsigned long shift) in relocate_vma_down() argument
32 struct mm_struct *mm = vma->vm_mm; in relocate_vma_down()
33 unsigned long old_start = vma->vm_start; in relocate_vma_down()
34 unsigned long old_end = vma->vm_end; in relocate_vma_down()
39 VMG_STATE(vmg, mm, &vmi, new_start, old_end, 0, vma->vm_pgoff); in relocate_vma_down()
42 PAGETABLE_MOVE(pmc, vma, vma, old_start, new_start, length); in relocate_vma_down()
50 if (vma != vma_next(&vmi)) in relocate_vma_down()
57 vmg.target = vma; in relocate_vma_down()
91 return vma_shrink(&vmi, vma, new_start, new_end, vma->vm_pgoff); in relocate_vma_down()
111 struct vm_area_struct *vma = vm_area_alloc(mm); in create_init_stack_vma() local
[all …]
H A Drmap.c149 static void anon_vma_chain_link(struct vm_area_struct *vma, in anon_vma_chain_link() argument
153 avc->vma = vma; in anon_vma_chain_link()
155 list_add(&avc->same_vma, &vma->anon_vma_chain); in anon_vma_chain_link()
185 int __anon_vma_prepare(struct vm_area_struct *vma) in __anon_vma_prepare() argument
187 struct mm_struct *mm = vma->vm_mm; in __anon_vma_prepare()
198 anon_vma = find_mergeable_anon_vma(vma); in __anon_vma_prepare()
211 if (likely(!vma->anon_vma)) { in __anon_vma_prepare()
212 vma->anon_vma = anon_vma; in __anon_vma_prepare()
213 anon_vma_chain_link(vma, avc, anon_vma); in __anon_vma_prepare()
333 int anon_vma_fork(struct vm_area_struct *vma, struct vm_area_struct *pvma) in anon_vma_fork() argument
[all …]
/linux/tools/testing/vma/
H A Dvma.c18 #define vma_iter_prealloc(vmi, vma) \ argument
19 (fail_prealloc ? -ENOMEM : mas_preallocate(&(vmi)->mas, (vma), GFP_KERNEL))
85 static int attach_vma(struct mm_struct *mm, struct vm_area_struct *vma) in attach_vma() argument
89 res = vma_link(mm, vma); in attach_vma()
91 vma_assert_attached(vma); in attach_vma()
95 static void detach_free_vma(struct vm_area_struct *vma) in detach_free_vma() argument
97 vma_mark_detached(vma); in detach_free_vma()
98 vm_area_free(vma); in detach_free_vma()
108 struct vm_area_struct *vma = alloc_vma(mm, start, end, pgoff, vm_flags); in alloc_and_link_vma() local
110 if (vma == NULL) in alloc_and_link_vma()
[all …]
H A Dvma_internal.h122 #define vma_policy(vma) NULL argument
431 int (*mprotect)(struct vm_area_struct *vma, unsigned long start,
450 int (*access)(struct vm_area_struct *vma, unsigned long addr,
456 const char *(*name)(struct vm_area_struct *vma);
466 int (*set_policy)(struct vm_area_struct *vma, struct mempolicy *new);
478 struct mempolicy *(*get_policy)(struct vm_area_struct *vma,
493 struct page *(*find_normal_page)(struct vm_area_struct *vma,
552 static inline bool vma_is_shared_maywrite(struct vm_area_struct *vma) in vma_is_shared_maywrite() argument
554 return is_shared_maywrite(vma->vm_flags); in vma_is_shared_maywrite()
571 static inline void vma_assert_attached(struct vm_area_struct *vma) in vma_assert_attached() argument
[all …]
/linux/include/linux/
H A Duserfaultfd_k.h131 extern long uffd_wp_range(struct vm_area_struct *vma,
145 static inline bool is_mergeable_vm_userfaultfd_ctx(struct vm_area_struct *vma, in is_mergeable_vm_userfaultfd_ctx() argument
148 return vma->vm_userfaultfd_ctx.ctx == vm_ctx.ctx; in is_mergeable_vm_userfaultfd_ctx()
162 static inline bool uffd_disable_huge_pmd_share(struct vm_area_struct *vma) in uffd_disable_huge_pmd_share() argument
164 return vma->vm_flags & (VM_UFFD_WP | VM_UFFD_MINOR); in uffd_disable_huge_pmd_share()
174 static inline bool uffd_disable_fault_around(struct vm_area_struct *vma) in uffd_disable_fault_around() argument
176 return vma->vm_flags & (VM_UFFD_WP | VM_UFFD_MINOR); in uffd_disable_fault_around()
179 static inline bool userfaultfd_missing(struct vm_area_struct *vma) in userfaultfd_missing() argument
181 return vma->vm_flags & VM_UFFD_MISSING; in userfaultfd_missing()
184 static inline bool userfaultfd_wp(struct vm_area_struct *vma) in userfaultfd_wp() argument
[all …]
H A Dmmap_lock.h113 static inline void vma_lock_init(struct vm_area_struct *vma, bool reset_refcnt) in vma_lock_init() argument
118 lockdep_init_map(&vma->vmlock_dep_map, "vm_lock", &lockdep_key, 0); in vma_lock_init()
121 refcount_set(&vma->vm_refcnt, 0); in vma_lock_init()
122 vma->vm_lock_seq = UINT_MAX; in vma_lock_init()
136 static inline void vma_refcount_put(struct vm_area_struct *vma) in vma_refcount_put() argument
139 struct mm_struct *mm = vma->vm_mm; in vma_refcount_put()
142 rwsem_release(&vma->vmlock_dep_map, _RET_IP_); in vma_refcount_put()
143 if (!__refcount_dec_and_test(&vma->vm_refcnt, &oldcnt)) { in vma_refcount_put()
156 static inline bool vma_start_read_locked_nested(struct vm_area_struct *vma, int subclass) in vma_start_read_locked_nested() argument
160 mmap_assert_locked(vma->vm_mm); in vma_start_read_locked_nested()
[all …]
/linux/drivers/gpu/drm/nouveau/
H A Dnouveau_vmm.c29 nouveau_vma_unmap(struct nouveau_vma *vma) in nouveau_vma_unmap() argument
31 if (vma->mem) { in nouveau_vma_unmap()
32 nvif_vmm_unmap(&vma->vmm->vmm, vma->addr); in nouveau_vma_unmap()
33 vma->mem = NULL; in nouveau_vma_unmap()
38 nouveau_vma_map(struct nouveau_vma *vma, struct nouveau_mem *mem) in nouveau_vma_map() argument
40 struct nvif_vma tmp = { .addr = vma->addr }; in nouveau_vma_map()
41 int ret = nouveau_mem_map(mem, &vma->vmm->vmm, &tmp); in nouveau_vma_map()
44 vma->mem = mem; in nouveau_vma_map()
51 struct nouveau_vma *vma; in nouveau_vma_find() local
53 list_for_each_entry(vma, &nvbo->vma_list, head) { in nouveau_vma_find()
[all …]
/linux/drivers/gpu/drm/i915/display/
H A Dintel_fb_pin.c36 struct i915_vma *vma; in intel_fb_pin_to_dpt() local
77 vma = i915_vma_instance(obj, vm, view); in intel_fb_pin_to_dpt()
78 if (IS_ERR(vma)) { in intel_fb_pin_to_dpt()
79 ret = PTR_ERR(vma); in intel_fb_pin_to_dpt()
83 if (i915_vma_misplaced(vma, 0, alignment, 0)) { in intel_fb_pin_to_dpt()
84 ret = i915_vma_unbind(vma); in intel_fb_pin_to_dpt()
89 ret = i915_vma_pin_ww(vma, &ww, 0, alignment, PIN_GLOBAL); in intel_fb_pin_to_dpt()
94 vma = ERR_PTR(ret); in intel_fb_pin_to_dpt()
98 vma->display_alignment = max(vma->display_alignment, alignment); in intel_fb_pin_to_dpt()
102 i915_vma_get(vma); in intel_fb_pin_to_dpt()
[all …]
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/
H A Dvmm.c802 struct nvkm_vma *vma = kzalloc(sizeof(*vma), GFP_KERNEL); in nvkm_vma_new() local
803 if (vma) { in nvkm_vma_new()
804 vma->addr = addr; in nvkm_vma_new()
805 vma->size = size; in nvkm_vma_new()
806 vma->page = NVKM_VMA_PAGE_NONE; in nvkm_vma_new()
807 vma->refd = NVKM_VMA_PAGE_NONE; in nvkm_vma_new()
809 return vma; in nvkm_vma_new()
813 nvkm_vma_tail(struct nvkm_vma *vma, u64 tail) in nvkm_vma_tail() argument
817 BUG_ON(vma->size == tail); in nvkm_vma_tail()
819 if (!(new = nvkm_vma_new(vma->addr + (vma->size - tail), tail))) in nvkm_vma_tail()
[all …]
/linux/drivers/gpu/drm/i915/selftests/
H A Di915_gem_gtt.c398 struct i915_vma *vma; in close_object_list() local
400 vma = i915_vma_instance(obj, vm, NULL); in close_object_list()
401 if (!IS_ERR(vma)) in close_object_list()
402 ignored = i915_vma_unbind_unlocked(vma); in close_object_list()
421 struct i915_vma *vma; in fill_hole() local
463 vma = i915_vma_instance(obj, vm, NULL); in fill_hole()
464 if (IS_ERR(vma)) in fill_hole()
473 err = i915_vma_pin(vma, 0, 0, offset | flags); in fill_hole()
480 if (!drm_mm_node_allocated(&vma->node) || in fill_hole()
481 i915_vma_misplaced(vma, 0, 0, offset | flags)) { in fill_hole()
[all …]
H A Di915_vma.c37 static bool assert_vma(struct i915_vma *vma, in assert_vma() argument
43 if (vma->vm != ctx->vm) { in assert_vma()
48 if (vma->size != obj->base.size) { in assert_vma()
50 vma->size, obj->base.size); in assert_vma()
54 if (vma->gtt_view.type != I915_GTT_VIEW_NORMAL) { in assert_vma()
56 vma->gtt_view.type); in assert_vma()
68 struct i915_vma *vma; in checked_vma_instance() local
71 vma = i915_vma_instance(obj, vm, view); in checked_vma_instance()
72 if (IS_ERR(vma)) in checked_vma_instance()
73 return vma; in checked_vma_instance()
[all …]
/linux/drivers/gpu/drm/xe/
H A Dxe_trace_bo.h21 #define __dev_name_vma(vma) __dev_name_vm(xe_vma_vm(vma)) argument
89 TP_PROTO(struct xe_vma *vma),
90 TP_ARGS(vma),
93 __string(dev, __dev_name_vma(vma))
94 __field(struct xe_vma *, vma)
104 __entry->vma = vma;
105 __entry->vm = xe_vma_vm(vma);
106 __entry->asid = xe_vma_vm(vma)->usm.asid;
107 __entry->start = xe_vma_start(vma);
108 __entry->end = xe_vma_end(vma) - 1;
[all …]
H A Dxe_vm.h69 bool xe_vma_has_default_mem_attrs(struct xe_vma *vma);
112 static inline u64 xe_vma_start(struct xe_vma *vma) in xe_vma_start() argument
114 return vma->gpuva.va.addr; in xe_vma_start()
117 static inline u64 xe_vma_size(struct xe_vma *vma) in xe_vma_size() argument
119 return vma->gpuva.va.range; in xe_vma_size()
122 static inline u64 xe_vma_end(struct xe_vma *vma) in xe_vma_end() argument
124 return xe_vma_start(vma) + xe_vma_size(vma); in xe_vma_end()
127 static inline u64 xe_vma_bo_offset(struct xe_vma *vma) in xe_vma_bo_offset() argument
129 return vma->gpuva.gem.offset; in xe_vma_bo_offset()
132 static inline struct xe_bo *xe_vma_bo(struct xe_vma *vma) in xe_vma_bo() argument
[all …]
/linux/drivers/pci/
H A Dmmap.c25 struct vm_area_struct *vma, in pci_mmap_resource_range() argument
32 if (vma->vm_pgoff + vma_pages(vma) > size) in pci_mmap_resource_range()
36 vma->vm_page_prot = pgprot_writecombine(vma->vm_page_prot); in pci_mmap_resource_range()
38 vma->vm_page_prot = pgprot_device(vma->vm_page_prot); in pci_mmap_resource_range()
41 ret = pci_iobar_pfn(pdev, bar, vma); in pci_mmap_resource_range()
45 vma->vm_pgoff += (pci_resource_start(pdev, bar) >> PAGE_SHIFT); in pci_mmap_resource_range()
47 vma->vm_ops = &pci_phys_vm_ops; in pci_mmap_resource_range()
49 return io_remap_pfn_range(vma, vma->vm_start, vma->vm_pgoff, in pci_mmap_resource_range()
50 vma->vm_end - vma->vm_start, in pci_mmap_resource_range()
51 vma->vm_page_prot); in pci_mmap_resource_range()
[all …]

12345678910>>...43