Home
last modified time | relevance | path

Searched refs:bo (Results 1 – 25 of 380) sorted by relevance

12345678910>>...16

/linux/drivers/staging/media/atomisp/pci/hmm/
H A Dhmm_bo.c36 static int __bo_init(struct hmm_bo_device *bdev, struct hmm_buffer_object *bo, in __bo_init() argument
46 memset(bo, 0, sizeof(*bo)); in __bo_init()
47 mutex_init(&bo->mutex); in __bo_init()
49 /* init the bo->list HEAD as an element of entire_bo_list */ in __bo_init()
50 INIT_LIST_HEAD(&bo->list); in __bo_init()
52 bo->bdev = bdev; in __bo_init()
53 bo->vmap_addr = NULL; in __bo_init()
54 bo->status = HMM_BO_FREE; in __bo_init()
55 bo in __bo_init()
119 struct hmm_buffer_object *bo; __bo_search_by_addr() local
144 struct hmm_buffer_object *bo; __bo_search_by_addr_in_range() local
166 __bo_insert_to_free_rbtree(struct rb_root * root,struct hmm_buffer_object * bo) __bo_insert_to_free_rbtree() argument
199 __bo_insert_to_alloc_rbtree(struct rb_root * root,struct hmm_buffer_object * bo) __bo_insert_to_alloc_rbtree() argument
224 __bo_break_up(struct hmm_bo_device * bdev,struct hmm_buffer_object * bo,unsigned int pgnr) __bo_break_up() argument
255 __bo_take_off_handling(struct hmm_buffer_object * bo) __bo_take_off_handling() argument
296 __bo_merge(struct hmm_buffer_object * bo,struct hmm_buffer_object * next_bo) __bo_merge() argument
323 struct hmm_buffer_object *bo; hmm_bo_device_init() local
384 struct hmm_buffer_object *bo, *new_bo; hmm_bo_alloc() local
427 hmm_bo_release(struct hmm_buffer_object * bo) hmm_bo_release() argument
491 struct hmm_buffer_object *bo; hmm_bo_device_exit() local
534 hmm_bo_allocated(struct hmm_buffer_object * bo) hmm_bo_allocated() argument
544 struct hmm_buffer_object *bo; hmm_bo_device_search_start() local
564 struct hmm_buffer_object *bo; hmm_bo_device_search_in_range() local
585 struct hmm_buffer_object *bo; hmm_bo_device_search_vmap_start() local
614 free_private_bo_pages(struct hmm_buffer_object * bo) free_private_bo_pages() argument
621 alloc_private_pages(struct hmm_buffer_object * bo) alloc_private_pages() argument
643 alloc_vmalloc_pages(struct hmm_buffer_object * bo,void * vmalloc_addr) alloc_vmalloc_pages() argument
668 hmm_bo_alloc_pages(struct hmm_buffer_object * bo,enum hmm_bo_type type,void * vmalloc_addr) hmm_bo_alloc_pages() argument
719 hmm_bo_free_pages(struct hmm_buffer_object * bo) hmm_bo_free_pages() argument
748 hmm_bo_page_allocated(struct hmm_buffer_object * bo) hmm_bo_page_allocated() argument
758 hmm_bo_bind(struct hmm_buffer_object * bo) hmm_bo_bind() argument
836 hmm_bo_unbind(struct hmm_buffer_object * bo) hmm_bo_unbind() argument
879 hmm_bo_binded(struct hmm_buffer_object * bo) hmm_bo_binded() argument
894 hmm_bo_vmap(struct hmm_buffer_object * bo,bool cached) hmm_bo_vmap() argument
925 hmm_bo_flush_vmap(struct hmm_buffer_object * bo) hmm_bo_flush_vmap() argument
939 hmm_bo_vunmap(struct hmm_buffer_object * bo) hmm_bo_vunmap() argument
954 hmm_bo_ref(struct hmm_buffer_object * bo) hmm_bo_ref() argument
969 hmm_bo_unref(struct hmm_buffer_object * bo) hmm_bo_unref() argument
978 struct hmm_buffer_object *bo = hmm_bo_vm_open() local
996 struct hmm_buffer_object *bo = hmm_bo_vm_close() local
1023 hmm_bo_mmap(struct vm_area_struct * vma,struct hmm_buffer_object * bo) hmm_bo_mmap() argument
[all...]
/linux/drivers/gpu/drm/ttm/
H A Dttm_bo.c52 static void ttm_bo_mem_space_debug(struct ttm_buffer_object *bo, in ttm_bo_mem_space_debug() argument
63 man = ttm_manager_type(bo->bdev, mem_type); in ttm_bo_mem_space_debug()
71 * @bo: The buffer object.
77 void ttm_bo_move_to_lru_tail(struct ttm_buffer_object *bo) in ttm_bo_move_to_lru_tail() argument
79 dma_resv_assert_held(bo->base.resv); in ttm_bo_move_to_lru_tail()
81 if (bo->resource) in ttm_bo_move_to_lru_tail()
82 ttm_resource_move_to_lru_tail(bo->resource); in ttm_bo_move_to_lru_tail()
89 * @bo: The buffer object.
100 void ttm_bo_set_bulk_move(struct ttm_buffer_object *bo, in ttm_bo_set_bulk_move() argument
103 dma_resv_assert_held(bo in ttm_bo_set_bulk_move()
118 ttm_bo_handle_move_mem(struct ttm_buffer_object * bo,struct ttm_resource * mem,bool evict,struct ttm_operation_ctx * ctx,struct ttm_place * hop) ttm_bo_handle_move_mem() argument
180 ttm_bo_cleanup_memtype_use(struct ttm_buffer_object * bo) ttm_bo_cleanup_memtype_use() argument
189 ttm_bo_individualize_resv(struct ttm_buffer_object * bo) ttm_bo_individualize_resv() argument
216 ttm_bo_flush_all_fences(struct ttm_buffer_object * bo) ttm_bo_flush_all_fences() argument
236 struct ttm_buffer_object *bo; ttm_bo_delayed_delete() local
250 struct ttm_buffer_object *bo = ttm_bo_release() local
328 ttm_bo_put(struct ttm_buffer_object * bo) ttm_bo_put() argument
334 ttm_bo_bounce_temp_buffer(struct ttm_buffer_object * bo,struct ttm_operation_ctx * ctx,struct ttm_place * hop) ttm_bo_bounce_temp_buffer() argument
358 ttm_bo_evict(struct ttm_buffer_object * bo,struct ttm_operation_ctx * ctx) ttm_bo_evict() argument
421 ttm_bo_eviction_valuable(struct ttm_buffer_object * bo,const struct ttm_place * place) ttm_bo_eviction_valuable() argument
450 struct ttm_buffer_object *bo; ttm_bo_evict_first() local
515 ttm_bo_evict_cb(struct ttm_lru_walk * walk,struct ttm_buffer_object * bo) ttm_bo_evict_cb() argument
625 ttm_bo_pin(struct ttm_buffer_object * bo) ttm_bo_pin() argument
644 ttm_bo_unpin(struct ttm_buffer_object * bo) ttm_bo_unpin() argument
664 ttm_bo_add_move_fence(struct ttm_buffer_object * bo,struct ttm_resource_manager * man,bool no_wait_gpu) ttm_bo_add_move_fence() argument
710 ttm_bo_alloc_resource(struct ttm_buffer_object * bo,struct ttm_placement * placement,struct ttm_operation_ctx * ctx,bool force_space,struct ttm_resource ** res) ttm_bo_alloc_resource() argument
785 ttm_bo_mem_space(struct ttm_buffer_object * bo,struct ttm_placement * placement,struct ttm_resource ** res,struct ttm_operation_ctx * ctx) ttm_bo_mem_space() argument
818 ttm_bo_validate(struct ttm_buffer_object * bo,struct ttm_placement * placement,struct ttm_operation_ctx * ctx) ttm_bo_validate() argument
928 ttm_bo_init_reserved(struct ttm_device * bdev,struct ttm_buffer_object * bo,enum ttm_bo_type type,struct ttm_placement * placement,uint32_t alignment,struct ttm_operation_ctx * ctx,struct sg_table * sg,struct dma_resv * resv,void (* destroy)(struct ttm_buffer_object *)) ttm_bo_init_reserved() argument
1021 ttm_bo_init_validate(struct ttm_device * bdev,struct ttm_buffer_object * bo,enum ttm_bo_type type,struct ttm_placement * placement,uint32_t alignment,bool interruptible,struct sg_table * sg,struct dma_resv * resv,void (* destroy)(struct ttm_buffer_object *)) ttm_bo_init_validate() argument
1051 ttm_bo_unmap_virtual(struct ttm_buffer_object * bo) ttm_bo_unmap_virtual() argument
1070 ttm_bo_wait_ctx(struct ttm_buffer_object * bo,struct ttm_operation_ctx * ctx) ttm_bo_wait_ctx() argument
1106 ttm_bo_swapout_cb(struct ttm_lru_walk * walk,struct ttm_buffer_object * bo) ttm_bo_swapout_cb() argument
1233 ttm_bo_tt_destroy(struct ttm_buffer_object * bo) ttm_bo_tt_destroy() argument
1258 ttm_bo_populate(struct ttm_buffer_object * bo,struct ttm_operation_ctx * ctx) ttm_bo_populate() argument
[all...]
H A Dttm_bo_util.c46 struct ttm_buffer_object *bo; member
133 * @bo: A pointer to a struct ttm_buffer_object.
140 * and update the (@bo)->mem placement flags. If unsuccessful, the old
146 int ttm_bo_move_memcpy(struct ttm_buffer_object *bo, in ttm_bo_move_memcpy() argument
150 struct ttm_device *bdev = bo->bdev; in ttm_bo_move_memcpy()
152 ttm_manager_type(bo->bdev, dst_mem->mem_type); in ttm_bo_move_memcpy()
153 struct ttm_tt *ttm = bo->ttm; in ttm_bo_move_memcpy()
154 struct ttm_resource *src_mem = bo->resource; in ttm_bo_move_memcpy()
170 ret = ttm_bo_populate(bo, ctx); in ttm_bo_move_memcpy()
177 dst_iter = ttm_kmap_iter_tt_init(&_dst_iter.tt, bo in ttm_bo_move_memcpy()
205 ttm_transfered_destroy(struct ttm_buffer_object * bo) ttm_transfered_destroy() argument
230 ttm_buffer_object_transfer(struct ttm_buffer_object * bo,struct ttm_buffer_object ** new_obj) ttm_buffer_object_transfer() argument
295 ttm_io_prot(struct ttm_buffer_object * bo,struct ttm_resource * res,pgprot_t tmp) ttm_io_prot() argument
314 ttm_bo_ioremap(struct ttm_buffer_object * bo,unsigned long offset,unsigned long size,struct ttm_bo_kmap_obj * map) ttm_bo_ioremap() argument
340 ttm_bo_kmap_ttm(struct ttm_buffer_object * bo,unsigned long start_page,unsigned long num_pages,struct ttm_bo_kmap_obj * map) ttm_bo_kmap_ttm() argument
399 ttm_bo_kmap_try_from_panic(struct ttm_buffer_object * bo,unsigned long page) ttm_bo_kmap_try_from_panic() argument
427 ttm_bo_kmap(struct ttm_buffer_object * bo,unsigned long start_page,unsigned long num_pages,struct ttm_bo_kmap_obj * map) ttm_bo_kmap() argument
500 ttm_bo_vmap(struct ttm_buffer_object * bo,struct iosys_map * map) ttm_bo_vmap() argument
569 ttm_bo_vunmap(struct ttm_buffer_object * bo,struct iosys_map * map) ttm_bo_vunmap() argument
588 ttm_bo_wait_free_node(struct ttm_buffer_object * bo,bool dst_use_tt) ttm_bo_wait_free_node() argument
606 ttm_bo_move_to_ghost(struct ttm_buffer_object * bo,struct dma_fence * fence,bool dst_use_tt) ttm_bo_move_to_ghost() argument
644 ttm_bo_move_pipeline_evict(struct ttm_buffer_object * bo,struct dma_fence * fence) ttm_bo_move_pipeline_evict() argument
682 ttm_bo_move_accel_cleanup(struct ttm_buffer_object * bo,struct dma_fence * fence,bool evict,bool pipeline,struct ttm_resource * new_mem) ttm_bo_move_accel_cleanup() argument
719 ttm_bo_move_sync_cleanup(struct ttm_buffer_object * bo,struct ttm_resource * new_mem) ttm_bo_move_sync_cleanup() argument
745 ttm_bo_pipeline_gutting(struct ttm_buffer_object * bo) ttm_bo_pipeline_gutting() argument
804 ttm_lru_walk_trylock(struct ttm_bo_lru_cursor * curs,struct ttm_buffer_object * bo) ttm_lru_walk_trylock() argument
824 ttm_lru_walk_ticketlock(struct ttm_bo_lru_cursor * curs,struct ttm_buffer_object * bo) ttm_lru_walk_ticketlock() argument
887 struct ttm_buffer_object *bo; ttm_lru_walk_for_evict() local
908 struct ttm_buffer_object *bo = curs->bo; ttm_bo_lru_cursor_cleanup_bo() local
962 struct ttm_buffer_object *bo; __ttm_bo_lru_cursor_next() local
1064 ttm_bo_shrink(struct ttm_operation_ctx * ctx,struct ttm_buffer_object * bo,const struct ttm_bo_shrink_flags flags) ttm_bo_shrink() argument
1132 ttm_bo_shrink_suitable(struct ttm_buffer_object * bo,struct ttm_operation_ctx * ctx) ttm_bo_shrink_suitable() argument
[all...]
H A Dttm_bo_vm.c43 static vm_fault_t ttm_bo_vm_fault_idle(struct ttm_buffer_object *bo, in ttm_bo_vm_fault_idle() argument
51 if (dma_resv_test_signaled(bo->base.resv, DMA_RESV_USAGE_KERNEL)) in ttm_bo_vm_fault_idle()
63 drm_gem_object_get(&bo->base); in ttm_bo_vm_fault_idle()
65 (void)dma_resv_wait_timeout(bo->base.resv, in ttm_bo_vm_fault_idle()
68 dma_resv_unlock(bo->base.resv); in ttm_bo_vm_fault_idle()
69 drm_gem_object_put(&bo->base); in ttm_bo_vm_fault_idle()
76 err = dma_resv_wait_timeout(bo->base.resv, DMA_RESV_USAGE_KERNEL, true, in ttm_bo_vm_fault_idle()
86 static unsigned long ttm_bo_io_mem_pfn(struct ttm_buffer_object *bo, in ttm_bo_io_mem_pfn() argument
89 struct ttm_device *bdev = bo->bdev; in ttm_bo_io_mem_pfn()
92 return bdev->funcs->io_mem_pfn(bo, page_offse in ttm_bo_io_mem_pfn()
118 ttm_bo_vm_reserve(struct ttm_buffer_object * bo,struct vm_fault * vmf) ttm_bo_vm_reserve() argument
188 struct ttm_buffer_object *bo = vma->vm_private_data; ttm_bo_vm_fault_reserved() local
295 struct ttm_buffer_object *bo = vma->vm_private_data; ttm_bo_vm_dummy_page() local
326 struct ttm_buffer_object *bo = vma->vm_private_data; ttm_bo_vm_fault() local
353 struct ttm_buffer_object *bo = vma->vm_private_data; ttm_bo_vm_open() local
363 struct ttm_buffer_object *bo = vma->vm_private_data; ttm_bo_vm_close() local
370 ttm_bo_vm_access_kmap(struct ttm_buffer_object * bo,unsigned long offset,uint8_t * buf,int len,int write) ttm_bo_vm_access_kmap() argument
425 ttm_bo_access(struct ttm_buffer_object * bo,unsigned long offset,void * buf,int len,int write) ttm_bo_access() argument
460 struct ttm_buffer_object *bo = vma->vm_private_data; ttm_bo_vm_access() local
484 ttm_bo_mmap_obj(struct vm_area_struct * vma,struct ttm_buffer_object * bo) ttm_bo_mmap_obj() argument
[all...]
/linux/drivers/gpu/drm/qxl/
H A Dqxl_object.c34 struct qxl_bo *bo; in qxl_ttm_bo_destroy() local
37 bo = to_qxl_bo(tbo); in qxl_ttm_bo_destroy()
38 qdev = to_qxl(bo->tbo.base.dev); in qxl_ttm_bo_destroy()
40 qxl_surface_evict(qdev, bo, false); in qxl_ttm_bo_destroy()
41 WARN_ON_ONCE(bo->map_count > 0); in qxl_ttm_bo_destroy()
43 list_del_init(&bo->list); in qxl_ttm_bo_destroy()
45 drm_gem_object_release(&bo->tbo.base); in qxl_ttm_bo_destroy()
46 kfree(bo); in qxl_ttm_bo_destroy()
49 bool qxl_ttm_bo_is_qxl_bo(struct ttm_buffer_object *bo) in qxl_ttm_bo_is_qxl_bo() argument
51 if (bo in qxl_ttm_bo_is_qxl_bo()
110 struct qxl_bo *bo; qxl_bo_create() local
156 qxl_bo_vmap_locked(struct qxl_bo * bo,struct iosys_map * map) qxl_bo_vmap_locked() argument
185 qxl_bo_pin_and_vmap(struct qxl_bo * bo,struct iosys_map * map) qxl_bo_pin_and_vmap() argument
207 qxl_bo_kmap_atomic_page(struct qxl_device * qdev,struct qxl_bo * bo,int page_offset) qxl_bo_kmap_atomic_page() argument
239 qxl_bo_vunmap_locked(struct qxl_bo * bo) qxl_bo_vunmap_locked() argument
252 qxl_bo_vunmap_and_unpin(struct qxl_bo * bo) qxl_bo_vunmap_and_unpin() argument
267 qxl_bo_kunmap_atomic_page(struct qxl_device * qdev,struct qxl_bo * bo,void * pmap) qxl_bo_kunmap_atomic_page() argument
279 qxl_bo_unref(struct qxl_bo ** bo) qxl_bo_unref() argument
288 qxl_bo_ref(struct qxl_bo * bo) qxl_bo_ref() argument
294 qxl_bo_pin_locked(struct qxl_bo * bo) qxl_bo_pin_locked() argument
315 qxl_bo_unpin_locked(struct qxl_bo * bo) qxl_bo_unpin_locked() argument
327 qxl_bo_pin(struct qxl_bo * bo) qxl_bo_pin() argument
345 qxl_bo_unpin(struct qxl_bo * bo) qxl_bo_unpin() argument
360 struct qxl_bo *bo, *n; qxl_bo_force_delete() local
387 qxl_bo_check_id(struct qxl_device * qdev,struct qxl_bo * bo) qxl_bo_check_id() argument
[all...]
H A Dqxl_object.h30 static inline int qxl_bo_reserve(struct qxl_bo *bo) in qxl_bo_reserve() argument
34 r = ttm_bo_reserve(&bo->tbo, true, false, NULL); in qxl_bo_reserve()
37 struct drm_device *ddev = bo->tbo.base.dev; in qxl_bo_reserve()
39 dev_err(ddev->dev, "%p reserve failed\n", bo); in qxl_bo_reserve()
46 static inline void qxl_bo_unreserve(struct qxl_bo *bo) in qxl_bo_unreserve() argument
48 ttm_bo_unreserve(&bo->tbo); in qxl_bo_unreserve()
51 static inline unsigned long qxl_bo_size(struct qxl_bo *bo) in qxl_bo_size() argument
53 return bo->tbo.base.size; in qxl_bo_size()
62 int qxl_bo_pin_and_vmap(struct qxl_bo *bo, struct iosys_map *map);
63 int qxl_bo_vmap_locked(struct qxl_bo *bo, struc
[all...]
/linux/drivers/accel/ivpu/
H A Divpu_gem.c27 static inline void ivpu_dbg_bo(struct ivpu_device *vdev, struct ivpu_bo *bo, const char *action) in ivpu_dbg_bo() argument
30 "%6s: bo %8p vpu_addr %9llx size %8zu ctx %d has_pages %d dma_mapped %d mmu_mapped %d wc %d imported %d\n", in ivpu_dbg_bo()
31 action, bo, bo->vpu_addr, ivpu_bo_size(bo), bo->ctx_id, in ivpu_dbg_bo()
32 (bool)bo->base.pages, (bool)bo->base.sgt, bo->mmu_mapped, bo in ivpu_dbg_bo()
36 ivpu_bo_lock(struct ivpu_bo * bo) ivpu_bo_lock() argument
41 ivpu_bo_unlock(struct ivpu_bo * bo) ivpu_bo_unlock() argument
53 ivpu_bo_pin(struct ivpu_bo * bo) ivpu_bo_pin() argument
88 ivpu_bo_alloc_vpu_addr(struct ivpu_bo * bo,struct ivpu_mmu_context * ctx,const struct ivpu_addr_range * range) ivpu_bo_alloc_vpu_addr() argument
114 ivpu_bo_unbind_locked(struct ivpu_bo * bo) ivpu_bo_unbind_locked() argument
146 struct ivpu_bo *bo; ivpu_bo_unbind_all_bos_from_context() local
165 struct ivpu_bo *bo; ivpu_gem_create_object() local
226 struct ivpu_bo *bo; ivpu_bo_alloc() local
258 struct ivpu_bo *bo = to_ivpu_bo(obj); ivpu_gem_bo_open() local
280 struct ivpu_bo *bo = to_ivpu_bo(obj); ivpu_gem_bo_free() local
320 struct ivpu_bo *bo; ivpu_bo_create_ioctl() local
353 struct ivpu_bo *bo; ivpu_bo_create() local
399 ivpu_bo_free(struct ivpu_bo * bo) ivpu_bo_free() argument
416 struct ivpu_bo *bo; ivpu_bo_info_ioctl() local
465 ivpu_bo_print_info(struct ivpu_bo * bo,struct drm_printer * p) ivpu_bo_print_info() argument
490 struct ivpu_bo *bo; ivpu_bo_list() local
[all...]
H A Divpu_gem.h27 int ivpu_bo_pin(struct ivpu_bo *bo);
35 void ivpu_bo_free(struct ivpu_bo *bo);
49 static inline void *ivpu_bo_vaddr(struct ivpu_bo *bo) in ivpu_bo_vaddr() argument
51 return bo->base.vaddr; in ivpu_bo_vaddr()
54 static inline size_t ivpu_bo_size(struct ivpu_bo *bo) in ivpu_bo_size() argument
56 return bo->base.base.size; in ivpu_bo_size()
59 static inline u32 ivpu_bo_cache_mode(struct ivpu_bo *bo) in ivpu_bo_cache_mode() argument
61 return bo->flags & DRM_IVPU_BO_CACHE_MASK; in ivpu_bo_cache_mode()
64 static inline struct ivpu_device *ivpu_bo_to_vdev(struct ivpu_bo *bo) in ivpu_bo_to_vdev() argument
66 return to_ivpu_device(bo in ivpu_bo_to_vdev()
69 ivpu_bo_is_snooped(struct ivpu_bo * bo) ivpu_bo_is_snooped() argument
77 ivpu_to_cpu_addr(struct ivpu_bo * bo,u32 vpu_addr) ivpu_to_cpu_addr() argument
88 cpu_to_vpu_addr(struct ivpu_bo * bo,void * cpu_addr) cpu_to_vpu_addr() argument
[all...]
/linux/drivers/gpu/drm/radeon/
H A Dradeon_object.c45 static void radeon_bo_clear_surface_reg(struct radeon_bo *bo);
54 struct radeon_bo *bo; in radeon_ttm_bo_destroy() local
56 bo = container_of(tbo, struct radeon_bo, tbo); in radeon_ttm_bo_destroy()
58 mutex_lock(&bo->rdev->gem.mutex); in radeon_ttm_bo_destroy()
59 list_del_init(&bo->list); in radeon_ttm_bo_destroy()
60 mutex_unlock(&bo->rdev->gem.mutex); in radeon_ttm_bo_destroy()
61 radeon_bo_clear_surface_reg(bo); in radeon_ttm_bo_destroy()
62 WARN_ON_ONCE(!list_empty(&bo->va)); in radeon_ttm_bo_destroy()
63 if (bo->tbo.base.import_attach) in radeon_ttm_bo_destroy()
64 drm_prime_gem_destroy(&bo in radeon_ttm_bo_destroy()
69 radeon_ttm_bo_is_radeon_bo(struct ttm_buffer_object * bo) radeon_ttm_bo_is_radeon_bo() argument
134 struct radeon_bo *bo; radeon_bo_create() local
218 radeon_bo_kmap(struct radeon_bo * bo,void ** ptr) radeon_bo_kmap() argument
246 radeon_bo_kunmap(struct radeon_bo * bo) radeon_bo_kunmap() argument
255 radeon_bo_ref(struct radeon_bo * bo) radeon_bo_ref() argument
264 radeon_bo_unref(struct radeon_bo ** bo) radeon_bo_unref() argument
272 radeon_bo_pin_restricted(struct radeon_bo * bo,u32 domain,u64 max_offset,u64 * gpu_addr) radeon_bo_pin_restricted() argument
331 radeon_bo_pin(struct radeon_bo * bo,u32 domain,u64 * gpu_addr) radeon_bo_pin() argument
336 radeon_bo_unpin(struct radeon_bo * bo) radeon_bo_unpin() argument
368 struct radeon_bo *bo, *n; radeon_bo_force_delete() local
487 struct radeon_bo *bo = lobj->robj; radeon_bo_list_validate() local
535 radeon_bo_get_surface_reg(struct radeon_bo * bo) radeon_bo_get_surface_reg() argument
589 radeon_bo_clear_surface_reg(struct radeon_bo * bo) radeon_bo_clear_surface_reg() argument
604 radeon_bo_set_tiling_flags(struct radeon_bo * bo,uint32_t tiling_flags,uint32_t pitch) radeon_bo_set_tiling_flags() argument
664 radeon_bo_get_tiling_flags(struct radeon_bo * bo,uint32_t * tiling_flags,uint32_t * pitch) radeon_bo_get_tiling_flags() argument
676 radeon_bo_check_tiling(struct radeon_bo * bo,bool has_moved,bool force_drop) radeon_bo_check_tiling() argument
705 radeon_bo_move_notify(struct ttm_buffer_object * bo) radeon_bo_move_notify() argument
717 radeon_bo_fault_reserve_notify(struct ttm_buffer_object * bo) radeon_bo_fault_reserve_notify() argument
779 radeon_bo_fence(struct radeon_bo * bo,struct radeon_fence * fence,bool shared) radeon_bo_fence() argument
[all...]
H A Dradeon_object.h56 * radeon_bo_reserve - reserve bo
57 * @bo: bo structure
64 static inline int radeon_bo_reserve(struct radeon_bo *bo, bool no_intr) in radeon_bo_reserve() argument
68 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, NULL); in radeon_bo_reserve()
71 dev_err(bo->rdev->dev, "%p reserve failed\n", bo); in radeon_bo_reserve()
77 static inline void radeon_bo_unreserve(struct radeon_bo *bo) in radeon_bo_unreserve() argument
79 ttm_bo_unreserve(&bo->tbo); in radeon_bo_unreserve()
83 * radeon_bo_gpu_offset - return GPU offset of bo
91 radeon_bo_gpu_offset(struct radeon_bo * bo) radeon_bo_gpu_offset() argument
110 radeon_bo_size(struct radeon_bo * bo) radeon_bo_size() argument
115 radeon_bo_ngpu_pages(struct radeon_bo * bo) radeon_bo_ngpu_pages() argument
120 radeon_bo_gpu_page_alignment(struct radeon_bo * bo) radeon_bo_gpu_page_alignment() argument
131 radeon_bo_mmap_offset(struct radeon_bo * bo) radeon_bo_mmap_offset() argument
[all...]
H A Dradeon_mn.c53 struct radeon_bo *bo = container_of(mn, struct radeon_bo, notifier); in radeon_mn_invalidate() local
57 if (!bo->tbo.ttm || !radeon_ttm_tt_is_bound(bo->tbo.bdev, bo->tbo.ttm)) in radeon_mn_invalidate()
63 r = radeon_bo_reserve(bo, true); in radeon_mn_invalidate()
65 DRM_ERROR("(%ld) failed to reserve user bo\n", r); in radeon_mn_invalidate()
69 r = dma_resv_wait_timeout(bo->tbo.base.resv, DMA_RESV_USAGE_BOOKKEEP, in radeon_mn_invalidate()
72 DRM_ERROR("(%ld) failed to wait for user bo\n", r); in radeon_mn_invalidate()
74 radeon_ttm_placement_from_domain(bo, RADEON_GEM_DOMAIN_CPU); in radeon_mn_invalidate()
75 r = ttm_bo_validate(&bo in radeon_mn_invalidate()
96 radeon_mn_register(struct radeon_bo * bo,unsigned long addr) radeon_mn_register() argument
122 radeon_mn_unregister(struct radeon_bo * bo) radeon_mn_unregister() argument
[all...]
/linux/drivers/gpu/drm/xe/
H A Dxe_bo.h87 void xe_bo_free(struct xe_bo *bo);
89 struct xe_bo *___xe_bo_create_locked(struct xe_device *xe, struct xe_bo *bo,
127 int xe_bo_placement_for_flags(struct xe_device *xe, struct xe_bo *bo,
130 static inline struct xe_bo *ttm_to_xe_bo(const struct ttm_buffer_object *bo) in ttm_to_xe_bo() argument
132 return container_of(bo, struct xe_bo, ttm); in ttm_to_xe_bo()
140 #define xe_bo_device(bo) ttm_to_xe_device((bo)->ttm.bdev) argument
142 static inline struct xe_bo *xe_bo_get(struct xe_bo *bo) in xe_bo_get() argument
144 if (bo) in xe_bo_get()
145 drm_gem_object_get(&bo in xe_bo_get()
166 xe_bo_get_unless_zero(struct xe_bo * bo) xe_bo_get_unless_zero() argument
174 __xe_bo_unset_bulk_move(struct xe_bo * bo) __xe_bo_unset_bulk_move() argument
180 xe_bo_assert_held(struct xe_bo * bo) xe_bo_assert_held() argument
190 xe_bo_unlock_vm_held(struct xe_bo * bo) xe_bo_unlock_vm_held() argument
207 xe_bo_is_pinned(struct xe_bo * bo) xe_bo_is_pinned() argument
212 xe_bo_is_protected(const struct xe_bo * bo) xe_bo_is_protected() argument
217 xe_bo_unpin_map_no_vm(struct xe_bo * bo) xe_bo_unpin_map_no_vm() argument
233 xe_bo_main_addr(struct xe_bo * bo,size_t page_size) xe_bo_main_addr() argument
246 xe_bo_size(struct xe_bo * bo) xe_bo_size() argument
252 __xe_bo_ggtt_addr(struct xe_bo * bo,u8 tile_id) __xe_bo_ggtt_addr() argument
265 xe_bo_ggtt_addr(struct xe_bo * bo) xe_bo_ggtt_addr() argument
311 xe_bo_ccs_pages_start(struct xe_bo * bo) xe_bo_ccs_pages_start() argument
316 xe_bo_has_pages(struct xe_bo * bo) xe_bo_has_pages() argument
351 xe_bo_put_deferred(struct xe_bo * bo,struct llist_head * deferred) xe_bo_put_deferred() argument
373 xe_bo_put_async(struct xe_bo * bo) xe_bo_put_async() argument
433 xe_bo_is_mem_type(struct xe_bo * bo,u32 mem_type) xe_bo_is_mem_type() argument
[all...]
H A Dxe_bo.c97 bool xe_bo_is_vram(struct xe_bo *bo) in xe_bo_is_vram() argument
99 return resource_is_vram(bo->ttm.resource) || in xe_bo_is_vram()
100 resource_is_stolen_vram(xe_bo_device(bo), bo->ttm.resource); in xe_bo_is_vram()
103 bool xe_bo_is_stolen(struct xe_bo *bo) in xe_bo_is_stolen() argument
105 return bo->ttm.resource->mem_type == XE_PL_STOLEN; in xe_bo_is_stolen()
110 * @bo: The BO
117 bool xe_bo_has_single_placement(struct xe_bo *bo) in xe_bo_has_single_placement() argument
119 return bo->placement.num_placement == 1; in xe_bo_has_single_placement()
124 * @bo
131 xe_bo_is_stolen_devmem(struct xe_bo * bo) xe_bo_is_stolen_devmem() argument
146 xe_bo_is_vm_bound(struct xe_bo * bo) xe_bo_is_vm_bound() argument
153 xe_bo_is_user(struct xe_bo * bo) xe_bo_is_user() argument
181 try_add_system(struct xe_device * xe,struct xe_bo * bo,u32 bo_flags,u32 * c) try_add_system() argument
210 add_vram(struct xe_device * xe,struct xe_bo * bo,struct ttm_place * places,u32 bo_flags,u32 mem_type,u32 * c) add_vram() argument
241 try_add_vram(struct xe_device * xe,struct xe_bo * bo,u32 bo_flags,u32 * c) try_add_vram() argument
250 try_add_stolen(struct xe_device * xe,struct xe_bo * bo,u32 bo_flags,u32 * c) try_add_stolen() argument
265 __xe_bo_placement_for_flags(struct xe_device * xe,struct xe_bo * bo,u32 bo_flags) __xe_bo_placement_for_flags() argument
285 xe_bo_placement_for_flags(struct xe_device * xe,struct xe_bo * bo,u32 bo_flags) xe_bo_placement_for_flags() argument
297 struct xe_bo *bo; xe_evict_flags() local
391 xe_bo_sg(struct xe_bo * bo) xe_bo_sg() argument
439 struct xe_bo *bo = ttm_to_xe_bo(ttm_bo); xe_ttm_tt_create() local
614 xe_bo_trigger_rebind(struct xe_device * xe,struct xe_bo * bo,const struct ttm_operation_ctx * ctx) xe_bo_trigger_rebind() argument
745 xe_bo_move_notify(struct xe_bo * bo,const struct ttm_operation_ctx * ctx) xe_bo_move_notify() argument
792 struct xe_bo *bo = ttm_to_xe_bo(ttm_bo); xe_bo_move() local
984 xe_bo_shrink_purge(struct ttm_operation_ctx * ctx,struct ttm_buffer_object * bo,unsigned long * scanned) xe_bo_shrink_purge() argument
1019 xe_bo_eviction_valuable(struct ttm_buffer_object * bo,const struct ttm_place * place) xe_bo_eviction_valuable() argument
1055 xe_bo_shrink(struct ttm_operation_ctx * ctx,struct ttm_buffer_object * bo,const struct xe_bo_shrink_flags flags,unsigned long * scanned) xe_bo_shrink() argument
1118 xe_bo_notifier_prepare_pinned(struct xe_bo * bo) xe_bo_notifier_prepare_pinned() argument
1169 xe_bo_notifier_unprepare_pinned(struct xe_bo * bo) xe_bo_notifier_unprepare_pinned() argument
1193 xe_bo_evict_pinned(struct xe_bo * bo) xe_bo_evict_pinned() argument
1303 xe_bo_restore_pinned(struct xe_bo * bo) xe_bo_restore_pinned() argument
1387 xe_bo_dma_unmap_pinned(struct xe_bo * bo) xe_bo_dma_unmap_pinned() argument
1416 struct xe_bo *bo = ttm_to_xe_bo(ttm_bo); xe_ttm_io_mem_pfn() local
1460 struct xe_bo *bo; xe_ttm_bo_release_notify() local
1554 struct xe_bo *bo = ttm_to_xe_bo(ttm_bo); xe_ttm_access_memory() local
1621 struct xe_bo *bo = ttm_to_xe_bo(ttm_bo); xe_ttm_bo_destroy() local
1677 struct xe_bo *bo = gem_to_xe_bo(obj); xe_gem_object_close() local
1693 struct xe_bo *bo = ttm_to_xe_bo(tbo); xe_gem_fault() local
1739 struct xe_bo *bo = ttm_to_xe_bo(ttm_bo); xe_bo_vm_access() local
1761 xe_bo_read(struct xe_bo * bo,u64 offset,void * dst,int size) xe_bo_read() argument
1804 struct xe_bo *bo = kzalloc(sizeof(*bo), GFP_KERNEL); xe_bo_alloc() local
1818 xe_bo_free(struct xe_bo * bo) xe_bo_free() argument
1823 ___xe_bo_create_locked(struct xe_device * xe,struct xe_bo * bo,struct xe_tile * tile,struct dma_resv * resv,struct ttm_lru_bulk_move * bulk,size_t size,u16 cpu_caching,enum ttm_bo_type type,u32 flags) ___xe_bo_create_locked() argument
1955 __xe_bo_fixed_placement(struct xe_device * xe,struct xe_bo * bo,u32 flags,u64 start,u64 end,u64 size) __xe_bo_fixed_placement() argument
1999 struct xe_bo *bo = NULL; __xe_bo_create_locked() local
2098 struct xe_bo *bo = __xe_bo_create_locked(xe, tile, vm, size, 0, ~0ULL, xe_bo_create_user() local
2111 struct xe_bo *bo = xe_bo_create_locked(xe, tile, vm, size, type, flags); xe_bo_create() local
2135 struct xe_bo *bo; xe_bo_create_pin_map_at_aligned() local
2185 struct xe_bo *bo; xe_managed_bo_create_pin_map() local
2204 struct xe_bo *bo = xe_managed_bo_create_pin_map(xe, tile, ALIGN(size, PAGE_SIZE), flags); xe_managed_bo_create_from_data() local
2229 struct xe_bo *bo; xe_managed_bo_reinit_in_vram() local
2279 xe_bo_pin_external(struct xe_bo * bo) xe_bo_pin_external() argument
2310 xe_bo_pin(struct xe_bo * bo) xe_bo_pin() argument
2368 xe_bo_unpin_external(struct xe_bo * bo) xe_bo_unpin_external() argument
2392 xe_bo_unpin(struct xe_bo * bo) xe_bo_unpin() argument
2434 xe_bo_validate(struct xe_bo * bo,struct xe_vm * vm,bool allow_res_evict) xe_bo_validate() argument
2460 xe_bo_is_xe_bo(struct ttm_buffer_object * bo) xe_bo_is_xe_bo() argument
2474 __xe_bo_addr(struct xe_bo * bo,u64 offset,size_t page_size) __xe_bo_addr() argument
2499 xe_bo_addr(struct xe_bo * bo,u64 offset,size_t page_size) xe_bo_addr() argument
2506 xe_bo_vmap(struct xe_bo * bo) xe_bo_vmap() argument
2542 __xe_bo_vunmap(struct xe_bo * bo) __xe_bo_vunmap() argument
2550 xe_bo_vunmap(struct xe_bo * bo) xe_bo_vunmap() argument
2556 gem_create_set_pxp_type(struct xe_device * xe,struct xe_bo * bo,u64 value) gem_create_set_pxp_type() argument
2577 gem_create_user_ext_set_property(struct xe_device * xe,struct xe_bo * bo,u64 extension) gem_create_user_ext_set_property() argument
2611 gem_create_user_extensions(struct xe_device * xe,struct xe_bo * bo,u64 extensions,int ext_number) gem_create_user_extensions() argument
2651 struct xe_bo *bo; xe_gem_create_ioctl() local
2826 xe_bo_lock(struct xe_bo * bo,bool intr) xe_bo_lock() argument
2842 xe_bo_unlock(struct xe_bo * bo) xe_bo_unlock() argument
2862 xe_bo_can_migrate(struct xe_bo * bo,u32 mem_type) xe_bo_can_migrate() argument
2902 xe_bo_migrate(struct xe_bo * bo,u32 mem_type) xe_bo_migrate() argument
2952 xe_bo_evict(struct xe_bo * bo) xe_bo_evict() argument
2980 xe_bo_needs_ccs_pages(struct xe_bo * bo) xe_bo_needs_ccs_pages() argument
3030 struct xe_bo *bo, *next; xe_bo_put_commit() local
3068 xe_bo_put(struct xe_bo * bo) xe_bo_put() argument
3101 struct xe_bo *bo; xe_bo_dumb_create() local
3128 xe_bo_runtime_pm_release_mmap_offset(struct xe_bo * bo) xe_bo_runtime_pm_release_mmap_offset() argument
[all...]
/linux/drivers/gpu/drm/tegra/
H A Dgem.c51 static void tegra_bo_put(struct host1x_bo *bo) in tegra_bo_put() argument
53 struct tegra_bo *obj = host1x_to_tegra_bo(bo); in tegra_bo_put()
58 static struct host1x_bo_mapping *tegra_bo_pin(struct device *dev, struct host1x_bo *bo, in tegra_bo_pin() argument
61 struct tegra_bo *obj = host1x_to_tegra_bo(bo); in tegra_bo_pin()
71 map->bo = host1x_bo_get(bo); in tegra_bo_pin()
172 host1x_bo_put(map->bo); in tegra_bo_unpin()
176 static void *tegra_bo_mmap(struct host1x_bo *bo) in tegra_bo_mmap() argument
178 struct tegra_bo *obj = host1x_to_tegra_bo(bo); in tegra_bo_mmap()
202 static void tegra_bo_munmap(struct host1x_bo *bo, voi argument
216 tegra_bo_get(struct host1x_bo * bo) tegra_bo_get() argument
234 tegra_bo_iommu_map(struct tegra_drm * tegra,struct tegra_bo * bo) tegra_bo_iommu_map() argument
277 tegra_bo_iommu_unmap(struct tegra_drm * tegra,struct tegra_bo * bo) tegra_bo_iommu_unmap() argument
301 struct tegra_bo *bo; tegra_bo_alloc_object() local
330 tegra_bo_free(struct drm_device * drm,struct tegra_bo * bo) tegra_bo_free() argument
342 tegra_bo_get_pages(struct drm_device * drm,struct tegra_bo * bo) tegra_bo_get_pages() argument
372 tegra_bo_alloc(struct drm_device * drm,struct tegra_bo * bo) tegra_bo_alloc() argument
406 struct tegra_bo *bo; tegra_bo_create() local
437 struct tegra_bo *bo; tegra_bo_create_with_handle() local
460 struct tegra_bo *bo; tegra_bo_import() local
512 struct tegra_bo *bo = to_tegra_bo(gem); tegra_bo_free_object() local
547 struct tegra_bo *bo; tegra_bo_dumb_create() local
564 struct tegra_bo *bo = to_tegra_bo(gem); tegra_bo_fault() local
585 struct tegra_bo *bo = to_tegra_bo(gem); __tegra_gem_mmap() local
637 struct tegra_bo *bo = to_tegra_bo(gem); tegra_gem_prime_map_dma_buf() local
670 struct tegra_bo *bo = to_tegra_bo(gem); tegra_gem_prime_unmap_dma_buf() local
688 struct tegra_bo *bo = to_tegra_bo(gem); tegra_gem_prime_begin_cpu_access() local
701 struct tegra_bo *bo = to_tegra_bo(gem); tegra_gem_prime_end_cpu_access() local
725 struct tegra_bo *bo = to_tegra_bo(gem); tegra_gem_prime_vmap() local
740 struct tegra_bo *bo = to_tegra_bo(gem); tegra_gem_prime_vunmap() local
774 struct tegra_bo *bo; tegra_gem_prime_import() local
795 struct tegra_bo *bo; tegra_gem_lookup() local
[all...]
/linux/drivers/gpu/drm/panfrost/
H A Dpanfrost_gem.c17 struct panfrost_gem_object *bo) in panfrost_gem_debugfs_bo_add() argument
19 bo->debugfs.creator.tgid = current->group_leader->pid; in panfrost_gem_debugfs_bo_add()
20 get_task_comm(bo->debugfs.creator.process_name, current->group_leader); in panfrost_gem_debugfs_bo_add()
23 list_add_tail(&bo->debugfs.node, &pfdev->debugfs.gems_list); in panfrost_gem_debugfs_bo_add()
27 static void panfrost_gem_debugfs_bo_rm(struct panfrost_gem_object *bo) in panfrost_gem_debugfs_bo_rm() argument
29 struct panfrost_device *pfdev = bo->base.base.dev->dev_private; in panfrost_gem_debugfs_bo_rm()
31 if (list_empty(&bo->debugfs.node)) in panfrost_gem_debugfs_bo_rm()
35 list_del_init(&bo->debugfs.node); in panfrost_gem_debugfs_bo_rm()
40 struct panfrost_gem_object *bo) in panfrost_gem_debugfs_bo_add() argument
42 static void panfrost_gem_debugfs_bo_rm(struct panfrost_gem_object *bo) {} in panfrost_gem_debugfs_bo_rm() argument
[all …]
/linux/drivers/gpu/drm/panthor/
H A Dpanthor_gem.c19 static void panthor_gem_debugfs_bo_init(struct panthor_gem_object *bo) in panthor_gem_debugfs_bo_init() argument
21 INIT_LIST_HEAD(&bo->debugfs.node); in panthor_gem_debugfs_bo_init()
24 static void panthor_gem_debugfs_bo_add(struct panthor_gem_object *bo) in panthor_gem_debugfs_bo_add() argument
26 struct panthor_device *ptdev = container_of(bo->base.base.dev, in panthor_gem_debugfs_bo_add()
29 bo->debugfs.creator.tgid = current->group_leader->pid; in panthor_gem_debugfs_bo_add()
30 get_task_comm(bo->debugfs.creator.process_name, current->group_leader); in panthor_gem_debugfs_bo_add()
33 list_add_tail(&bo->debugfs.node, &ptdev->gems.node); in panthor_gem_debugfs_bo_add()
37 static void panthor_gem_debugfs_bo_rm(struct panthor_gem_object *bo) in panthor_gem_debugfs_bo_rm() argument
39 struct panthor_device *ptdev = container_of(bo->base.base.dev, in panthor_gem_debugfs_bo_rm()
42 if (list_empty(&bo in panthor_gem_debugfs_bo_rm()
50 panthor_gem_debugfs_set_usage_flags(struct panthor_gem_object * bo,u32 usage_flags) panthor_gem_debugfs_set_usage_flags() argument
56 panthor_gem_debugfs_bo_rm(struct panthor_gem_object * bo) panthor_gem_debugfs_bo_rm() argument
57 panthor_gem_debugfs_set_usage_flags(struct panthor_gem_object * bo,u32 usage_flags) panthor_gem_debugfs_set_usage_flags() argument
58 panthor_gem_debugfs_bo_init(struct panthor_gem_object * bo) panthor_gem_debugfs_bo_init() argument
63 struct panthor_gem_object *bo = to_panthor_bo(obj); panthor_gem_free_object() local
87 panthor_kernel_bo_destroy(struct panthor_kernel_bo * bo) panthor_kernel_bo_destroy() argument
136 struct panthor_gem_object *bo; panthor_kernel_bo_create() local
207 struct panthor_gem_object *bo = to_panthor_bo(obj); panthor_gem_status() local
277 struct panthor_gem_object *bo; panthor_gem_create_with_handle() local
311 struct panthor_gem_object *bo = to_panthor_bo(obj); panthor_gem_bo_set_label() local
323 panthor_gem_kernel_bo_set_label(struct panthor_kernel_bo * bo,const char * label) panthor_gem_kernel_bo_set_label() argument
383 panthor_gem_debugfs_bo_print(struct panthor_gem_object * bo,struct seq_file * m,struct gem_size_totals * totals) panthor_gem_debugfs_bo_print() argument
430 struct panthor_gem_object *bo; panthor_gem_debugfs_print_bos() local
[all...]
/linux/drivers/gpu/drm/vc4/
H A Dvc4_bo.c134 struct vc4_bo *bo = to_vc4_bo(gem_obj); in vc4_bo_set_label() local
144 vc4->bo_labels[bo->label].num_allocated--; in vc4_bo_set_label()
145 vc4->bo_labels[bo->label].size_allocated -= gem_obj->size; in vc4_bo_set_label()
147 if (vc4->bo_labels[bo->label].num_allocated == 0 && in vc4_bo_set_label()
148 is_user_label(bo->label)) { in vc4_bo_set_label()
154 kfree(vc4->bo_labels[bo->label].name); in vc4_bo_set_label()
155 vc4->bo_labels[bo->label].name = NULL; in vc4_bo_set_label()
158 bo->label = label; in vc4_bo_set_label()
166 static void vc4_bo_destroy(struct vc4_bo *bo) in vc4_bo_destroy() argument
168 struct drm_gem_object *obj = &bo in vc4_bo_destroy()
186 vc4_bo_remove_from_cache(struct vc4_bo * bo) vc4_bo_remove_from_cache() argument
242 struct vc4_bo *bo = list_last_entry(&vc4->bo_cache.time_list, vc4_bo_cache_purge() local
250 vc4_bo_add_to_purgeable_pool(struct vc4_bo * bo) vc4_bo_add_to_purgeable_pool() argument
264 vc4_bo_remove_from_purgeable_pool_locked(struct vc4_bo * bo) vc4_bo_remove_from_purgeable_pool_locked() argument
288 vc4_bo_remove_from_purgeable_pool(struct vc4_bo * bo) vc4_bo_remove_from_purgeable_pool() argument
299 struct vc4_bo *bo = to_vc4_bo(obj); vc4_bo_purge() local
318 struct vc4_bo *bo = list_first_entry(&vc4->purgeable.list, vc4_bo_userspace_cache_purge() local
365 struct vc4_bo *bo = NULL; vc4_bo_get_from_cache() local
397 struct vc4_bo *bo; vc4_create_object() local
428 struct vc4_bo *bo; vc4_bo_create() local
496 struct vc4_bo *bo = NULL; vc4_bo_dumb_create() local
526 struct vc4_bo *bo = list_last_entry(&vc4->bo_cache.time_list, vc4_bo_cache_free_old() local
547 struct vc4_bo *bo = to_vc4_bo(gem_bo); vc4_free_object() local
620 vc4_bo_inc_usecnt(struct vc4_bo * bo) vc4_bo_inc_usecnt() argument
660 vc4_bo_dec_usecnt(struct vc4_bo * bo) vc4_bo_dec_usecnt() argument
689 struct vc4_bo *bo = to_vc4_bo(obj); vc4_prime_export() local
720 struct vc4_bo *bo = to_vc4_bo(obj); vc4_fault() local
734 struct vc4_bo *bo = to_vc4_bo(obj); vc4_gem_object_mmap() local
783 struct vc4_bo *bo = NULL; vc4_create_bo_ioctl() local
839 struct vc4_bo *bo = NULL; vc4_create_shader_bo_ioctl() local
918 struct vc4_bo *bo; vc4_set_tiling_ioctl() local
965 struct vc4_bo *bo; vc4_get_tiling_ioctl() local
[all...]
/linux/drivers/gpu/drm/lima/
H A Dlima_gem.c21 int lima_heap_alloc(struct lima_bo *bo, struct lima_vm *vm) in lima_heap_alloc() argument
24 struct address_space *mapping = bo->base.base.filp->f_mapping; in lima_heap_alloc()
25 struct device *dev = bo->base.base.dev->dev; in lima_heap_alloc()
26 size_t old_size = bo->heap_size; in lima_heap_alloc()
27 size_t new_size = bo->heap_size ? bo->heap_size * 2 : in lima_heap_alloc()
32 if (bo->heap_size >= bo->base.base.size) in lima_heap_alloc()
35 new_size = min(new_size, bo->base.base.size); in lima_heap_alloc()
37 dma_resv_lock(bo in lima_heap_alloc()
115 struct lima_bo *bo; lima_gem_create_handle() local
155 struct lima_bo *bo = to_lima_bo(obj); lima_gem_free_object() local
165 struct lima_bo *bo = to_lima_bo(obj); lima_gem_object_open() local
174 struct lima_bo *bo = to_lima_bo(obj); lima_gem_object_close() local
183 struct lima_bo *bo = to_lima_bo(obj); lima_gem_pin() local
193 struct lima_bo *bo = to_lima_bo(obj); lima_gem_vmap() local
203 struct lima_bo *bo = to_lima_bo(obj); lima_gem_mmap() local
227 struct lima_bo *bo; lima_gem_create_object() local
244 struct lima_bo *bo; lima_gem_get_info() local
262 lima_gem_sync_bo(struct lima_sched_task * task,struct lima_bo * bo,bool write,bool explicit) lima_gem_sync_bo() argument
315 struct lima_bo *bo; lima_gem_submit() local
[all...]
/linux/drivers/gpu/drm/ttm/tests/
H A Dttm_bo_test.c62 struct ttm_buffer_object *bo; in ttm_bo_reserve_optimistic_no_ticket() local
65 bo = ttm_bo_kunit_init(test, test->priv, BO_SIZE, NULL); in ttm_bo_reserve_optimistic_no_ticket()
67 err = ttm_bo_reserve(bo, params->interruptible, params->no_wait, NULL); in ttm_bo_reserve_optimistic_no_ticket()
70 dma_resv_unlock(bo->base.resv); in ttm_bo_reserve_optimistic_no_ticket()
75 struct ttm_buffer_object *bo; in ttm_bo_reserve_locked_no_sleep() local
80 bo = ttm_bo_kunit_init(test, test->priv, BO_SIZE, NULL); in ttm_bo_reserve_locked_no_sleep()
83 dma_resv_lock(bo->base.resv, NULL); in ttm_bo_reserve_locked_no_sleep()
85 err = ttm_bo_reserve(bo, interruptible, no_wait, NULL); in ttm_bo_reserve_locked_no_sleep()
86 dma_resv_unlock(bo->base.resv); in ttm_bo_reserve_locked_no_sleep()
93 struct ttm_buffer_object *bo; in ttm_bo_reserve_no_wait_ticket() local
111 struct ttm_buffer_object *bo; ttm_bo_reserve_double_resv() local
186 struct ttm_buffer_object *bo = arg; threaded_ttm_bo_reserve() local
213 struct ttm_buffer_object *bo; ttm_bo_reserve_interrupted() local
240 struct ttm_buffer_object *bo; ttm_bo_unreserve_basic() local
283 struct ttm_buffer_object *bo; ttm_bo_unreserve_pinned() local
385 struct ttm_buffer_object *bo; ttm_bo_put_basic() local
429 struct ttm_buffer_object *bo; ttm_bo_put_shared_resv() local
472 struct ttm_buffer_object *bo; ttm_bo_pin_basic() local
500 struct ttm_buffer_object *bo; ttm_bo_pin_unpin_resource() local
551 struct ttm_buffer_object *bo; ttm_bo_multiple_pin_one_unpin() local
[all...]
H A Dttm_tt_test.c42 struct ttm_buffer_object *bo; in ttm_tt_init_basic() local
53 bo = ttm_bo_kunit_init(test, test->priv, params->size, NULL); in ttm_tt_init_basic()
55 err = ttm_tt_init(tt, bo, page_flags, caching, extra_pages); in ttm_tt_init_basic()
69 struct ttm_buffer_object *bo; in ttm_tt_init_misaligned() local
79 bo = ttm_bo_kunit_init(test, test->priv, size, NULL); in ttm_tt_init_misaligned()
82 bo->base.size += 1; in ttm_tt_init_misaligned()
84 err = ttm_tt_init(tt, bo, 0, caching, 0); in ttm_tt_init_misaligned()
92 struct ttm_buffer_object *bo; in ttm_tt_fini_basic() local
100 bo = ttm_bo_kunit_init(test, test->priv, BO_SIZE, NULL); in ttm_tt_fini_basic()
102 err = ttm_tt_init(tt, bo, in ttm_tt_fini_basic()
112 struct ttm_buffer_object *bo; ttm_tt_fini_sg() local
132 struct ttm_buffer_object *bo; ttm_tt_fini_shmem() local
155 struct ttm_buffer_object *bo; ttm_tt_create_basic() local
174 struct ttm_buffer_object *bo; ttm_tt_create_invalid_bo_type() local
190 struct ttm_buffer_object *bo; ttm_tt_create_ttm_exists() local
213 ttm_tt_null_create(struct ttm_buffer_object * bo,u32 page_flags) ttm_tt_null_create() argument
226 struct ttm_buffer_object *bo; ttm_tt_create_failed() local
244 struct ttm_buffer_object *bo; ttm_tt_destroy_basic() local
273 struct ttm_buffer_object *bo; ttm_tt_populate_populated_ttm() local
298 struct ttm_buffer_object *bo; ttm_tt_unpopulate_basic() local
321 struct ttm_buffer_object *bo; ttm_tt_unpopulate_empty_ttm() local
342 struct ttm_buffer_object *bo; ttm_tt_swapin_basic() local
[all...]
H A Dttm_bo_validate_test.c45 return "ttm-bo-validate-fence"; in fence_name()
112 struct ttm_buffer_object *bo; in ttm_bo_init_reserved_sys_man() local
116 bo = kunit_kzalloc(test, sizeof(*bo), GFP_KERNEL); in ttm_bo_init_reserved_sys_man()
117 KUNIT_ASSERT_NOT_NULL(test, bo); in ttm_bo_init_reserved_sys_man()
122 drm_gem_private_object_init(priv->drm, &bo->base, size); in ttm_bo_init_reserved_sys_man()
124 err = ttm_bo_init_reserved(priv->ttm_dev, bo, bo_type, placement, in ttm_bo_init_reserved_sys_man()
127 dma_resv_unlock(bo->base.resv); in ttm_bo_init_reserved_sys_man()
130 KUNIT_EXPECT_EQ(test, kref_read(&bo->kref), 1); in ttm_bo_init_reserved_sys_man()
131 KUNIT_EXPECT_PTR_EQ(test, bo in ttm_bo_init_reserved_sys_man()
159 struct ttm_buffer_object *bo; ttm_bo_init_reserved_mock_man() local
200 struct ttm_buffer_object *bo; ttm_bo_init_reserved_resv() local
236 struct ttm_buffer_object *bo; ttm_bo_validate_basic() local
279 struct ttm_buffer_object *bo; ttm_bo_validate_invalid_placement() local
306 struct ttm_buffer_object *bo; ttm_bo_validate_failed_alloc() local
335 struct ttm_buffer_object *bo; ttm_bo_validate_pinned() local
380 struct ttm_buffer_object *bo; ttm_bo_validate_same_placement() local
422 struct ttm_buffer_object *bo; ttm_bo_validate_busy_placement() local
469 struct ttm_buffer_object *bo; ttm_bo_validate_multihop() local
527 struct ttm_buffer_object *bo; ttm_bo_validate_no_placement_signaled() local
575 struct ttm_buffer_object *bo = arg; threaded_dma_resv_signal() local
597 struct ttm_buffer_object *bo; ttm_bo_validate_no_placement_not_signaled() local
650 struct ttm_buffer_object *bo; ttm_bo_validate_move_fence_signaled() local
710 struct ttm_buffer_object *bo; ttm_bo_validate_move_fence_not_signaled() local
870 struct ttm_buffer_object *bo, *bo_evictable, *bo_pinned; ttm_bo_validate_allowed_only_evict() local
1040 struct ttm_buffer_object *bo, *bo_evict; ttm_bo_validate_evict_gutting() local
[all...]
/linux/drivers/gpu/drm/virtio/
H A Dvirtgpu_prime.c36 struct virtio_gpu_object *bo = gem_to_virtio_gpu_obj(obj); in virtgpu_virtio_get_uuid() local
39 wait_event(vgdev->resp_wq, bo->uuid_state != STATE_INITIALIZING); in virtgpu_virtio_get_uuid()
40 if (bo->uuid_state != STATE_OK) in virtgpu_virtio_get_uuid()
43 uuid_copy(uuid, &bo->uuid); in virtgpu_virtio_get_uuid()
53 struct virtio_gpu_object *bo = gem_to_virtio_gpu_obj(obj); in virtgpu_gem_map_dma_buf() local
55 if (virtio_gpu_is_vram(bo)) in virtgpu_gem_map_dma_buf()
56 return virtio_gpu_vram_map_dma_buf(bo, attach->dev, dir); in virtgpu_gem_map_dma_buf()
66 struct virtio_gpu_object *bo = gem_to_virtio_gpu_obj(obj); in virtgpu_gem_unmap_dma_buf() local
68 if (virtio_gpu_is_vram(bo)) { in virtgpu_gem_unmap_dma_buf()
92 struct virtio_gpu_object *bo) in virtio_gpu_resource_assign_uuid() argument
111 struct virtio_gpu_object *bo = gem_to_virtio_gpu_obj(obj); virtgpu_gem_prime_export() local
148 virtgpu_dma_buf_import_sgt(struct virtio_gpu_mem_entry ** ents,unsigned int * nents,struct virtio_gpu_object * bo,struct dma_buf_attachment * attach) virtgpu_dma_buf_import_sgt() argument
186 virtgpu_dma_buf_unmap(struct virtio_gpu_object * bo) virtgpu_dma_buf_unmap() argument
205 struct virtio_gpu_object *bo = gem_to_virtio_gpu_obj(obj); virtgpu_dma_buf_free_obj() local
229 virtgpu_dma_buf_init_obj(struct drm_device * dev,struct virtio_gpu_object * bo,struct dma_buf_attachment * attach) virtgpu_dma_buf_init_obj() argument
284 struct virtio_gpu_object *bo = gem_to_virtio_gpu_obj(obj); virtgpu_dma_buf_move_notify() local
299 struct virtio_gpu_object *bo; virtgpu_gem_prime_import() local
[all...]
H A Dvirtgpu_object.c64 void virtio_gpu_cleanup_object(struct virtio_gpu_object *bo) in virtio_gpu_cleanup_object() argument
66 struct virtio_gpu_device *vgdev = bo->base.base.dev->dev_private; in virtio_gpu_cleanup_object()
68 virtio_gpu_resource_id_put(vgdev, bo->hw_res_handle); in virtio_gpu_cleanup_object()
69 if (virtio_gpu_is_shmem(bo)) { in virtio_gpu_cleanup_object()
70 drm_gem_shmem_free(&bo->base); in virtio_gpu_cleanup_object()
71 } else if (virtio_gpu_is_vram(bo)) { in virtio_gpu_cleanup_object()
72 struct virtio_gpu_object_vram *vram = to_virtio_gpu_vram(bo); in virtio_gpu_cleanup_object()
84 drm_gem_object_release(&bo->base.base); in virtio_gpu_cleanup_object()
85 kfree(bo); in virtio_gpu_cleanup_object()
91 struct virtio_gpu_object *bo in virtio_gpu_free_object() local
103 virtio_gpu_detach_object_fenced(struct virtio_gpu_object * bo) virtio_gpu_detach_object_fenced() argument
139 virtio_gpu_is_shmem(struct virtio_gpu_object * bo) virtio_gpu_is_shmem() argument
160 virtio_gpu_object_shmem_init(struct virtio_gpu_device * vgdev,struct virtio_gpu_object * bo,struct virtio_gpu_mem_entry ** ents,unsigned int * nents) virtio_gpu_object_shmem_init() argument
210 struct virtio_gpu_object *bo; virtio_gpu_object_create() local
[all...]
/linux/drivers/gpu/drm/xe/tests/
H A Dxe_bo.c24 static int ccs_test_migrate(struct xe_tile *tile, struct xe_bo *bo, in ccs_test_migrate() argument
37 /* Move bo to VRAM if not already there. */ in ccs_test_migrate()
38 ret = xe_bo_validate(bo, NULL, false); in ccs_test_migrate()
40 KUNIT_FAIL(test, "Failed to validate bo.\n"); in ccs_test_migrate()
44 /* Optionally clear bo *and* CCS data in VRAM. */ in ccs_test_migrate()
46 fence = xe_migrate_clear(tile->migrate, bo, bo->ttm.resource, in ccs_test_migrate()
49 KUNIT_FAIL(test, "Failed to submit bo clear.\n"); in ccs_test_migrate()
55 KUNIT_FAIL(test, "Timeout while clearing bo.\n"); in ccs_test_migrate()
63 ret = xe_bo_evict(bo); in ccs_test_migrate()
129 struct xe_bo *bo; ccs_test_run_tile() local
210 struct xe_bo *bo, *external; evict_test_run_tile() local
376 struct xe_bo *bo; global() member
382 shrink_test_fill_random(struct xe_bo * bo,struct rnd_state * state,struct xe_bo_link * link) shrink_test_fill_random() argument
404 shrink_test_verify(struct kunit * test,struct xe_bo * bo,unsigned int bo_nr,struct rnd_state * state,struct xe_bo_link * link) shrink_test_verify() argument
484 struct xe_bo *bo; shrink_test_run_device() local
560 struct xe_bo *bo = link->bo; shrink_test_run_device() local
[all...]
/linux/drivers/gpu/drm/xe/display/
H A Dintel_fb_bo.c16 struct xe_bo *bo = gem_to_xe_bo(obj); in intel_fb_bo_framebuffer_fini() local
18 if (bo->flags & XE_BO_FLAG_PINNED) { in intel_fb_bo_framebuffer_fini()
20 xe_bo_lock(bo, false); in intel_fb_bo_framebuffer_fini()
21 xe_bo_unpin(bo); in intel_fb_bo_framebuffer_fini()
22 xe_bo_unlock(bo); in intel_fb_bo_framebuffer_fini()
24 xe_bo_put(bo); in intel_fb_bo_framebuffer_fini()
31 struct xe_bo *bo = gem_to_xe_bo(obj); in intel_fb_bo_framebuffer_init() local
32 struct xe_device *xe = to_xe_device(bo->ttm.base.dev); in intel_fb_bo_framebuffer_init()
40 !(bo->flags & XE_BO_FLAG_NEEDS_64K))) in intel_fb_bo_framebuffer_init()
43 xe_bo_get(bo); in intel_fb_bo_framebuffer_init()
76 struct xe_bo *bo; intel_fb_bo_lookup_valid_bo() local
[all...]

12345678910>>...16