Home
last modified time | relevance | path

Searched refs:mman (Results 1 – 25 of 75) sorted by relevance

123

/linux/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_ttm.c78 return ttm_range_man_init(&adev->mman.bdev, type, in amdgpu_ttm_init_on_chip()
130 if (!adev->mman.buffer_funcs_enabled) { in amdgpu_evict_flags()
193 BUG_ON(adev->mman.buffer_funcs->copy_max_bytes < in amdgpu_ttm_map_buffer()
223 num_dw = ALIGN(adev->mman.buffer_funcs->copy_num_dw, 8); in amdgpu_ttm_map_buffer()
226 r = amdgpu_job_alloc_with_ib(adev, &adev->mman.high_pr, in amdgpu_ttm_map_buffer()
294 struct amdgpu_ring *ring = adev->mman.buffer_funcs_ring; in amdgpu_ttm_copy_mem_to_mem()
301 if (!adev->mman.buffer_funcs_enabled) { in amdgpu_ttm_copy_mem_to_mem()
310 mutex_lock(&adev->mman.gtt_window_lock); in amdgpu_ttm_copy_mem_to_mem()
365 mutex_unlock(&adev->mman.gtt_window_lock); in amdgpu_ttm_copy_mem_to_mem()
560 if (adev->mman in amdgpu_bo_move()
[all...]
H A Damdgpu_preempt_mgr.c42 struct ttm_resource_manager *man = &adev->mman.preempt_mgr; in mem_info_preempt_used_show()
102 struct ttm_resource_manager *man = &adev->mman.preempt_mgr; in amdgpu_preempt_mgr_init()
108 ttm_resource_manager_init(man, &adev->mman.bdev, (1 << 30)); in amdgpu_preempt_mgr_init()
116 ttm_set_driver_manager(&adev->mman.bdev, AMDGPU_PL_PREEMPT, man); in amdgpu_preempt_mgr_init()
131 struct ttm_resource_manager *man = &adev->mman.preempt_mgr; in amdgpu_preempt_mgr_fini()
136 ret = ttm_resource_manager_evict_all(&adev->mman.bdev, man); in amdgpu_preempt_mgr_fini()
144 ttm_set_driver_manager(&adev->mman.bdev, AMDGPU_PL_PREEMPT, NULL); in amdgpu_preempt_mgr_fini()
H A Damdgpu_gtt_mgr.c51 man = ttm_manager_type(&adev->mman.bdev, TTM_PL_TT); in amdgpu_mem_info_gtt_total_show()
69 struct ttm_resource_manager *man = &adev->mman.gtt_mgr.manager; in amdgpu_mem_info_gtt_used_show()
196 adev = container_of(mgr, typeof(*adev), mman.gtt_mgr); in amdgpu_gtt_mgr_recover()
277 struct amdgpu_gtt_mgr *mgr = &adev->mman.gtt_mgr; in amdgpu_gtt_mgr_init()
284 ttm_resource_manager_init(man, &adev->mman.bdev, gtt_size); in amdgpu_gtt_mgr_init()
291 ttm_set_driver_manager(&adev->mman.bdev, TTM_PL_TT, &mgr->manager); in amdgpu_gtt_mgr_init()
306 struct amdgpu_gtt_mgr *mgr = &adev->mman.gtt_mgr; in amdgpu_gtt_mgr_fini()
312 ret = ttm_resource_manager_evict_all(&adev->mman.bdev, man); in amdgpu_gtt_mgr_fini()
321 ttm_set_driver_manager(&adev->mman.bdev, TTM_PL_TT, NULL); in amdgpu_gtt_mgr_fini()
H A Damdgpu_discovery.c257 discv_regn = memremap(pos, adev->mman.discovery_tmr_size, MEMREMAP_WC); in amdgpu_discovery_read_binary_from_sysmem()
259 memcpy(binary, discv_regn, adev->mman.discovery_tmr_size); in amdgpu_discovery_read_binary_from_sysmem()
304 adev->mman.discovery_tmr_size, false); in amdgpu_discovery_read_binary_from_mem()
401 (struct nps_info_header *)(adev->mman.discovery_bin + offset); in amdgpu_discovery_verify_npsinfo()
408 if (!amdgpu_discovery_verify_checksum(adev->mman.discovery_bin + offset, in amdgpu_discovery_verify_npsinfo()
456 adev->mman.discovery_tmr_size = DISCOVERY_TMR_SIZE; in amdgpu_discovery_init()
457 adev->mman.discovery_bin = kzalloc(adev->mman.discovery_tmr_size, GFP_KERNEL); in amdgpu_discovery_init()
458 if (!adev->mman.discovery_bin) in amdgpu_discovery_init()
465 r = amdgpu_discovery_read_binary_from_file(adev, adev->mman in amdgpu_discovery_init()
[all...]
H A Damdgpu_virt.c367 struct amdgpu_vram_mgr *mgr = &adev->mman.vram_mgr; in amdgpu_virt_ras_reserve_bps()
385 amdgpu_vram_mgr_reserve_range(&adev->mman.vram_mgr, in amdgpu_virt_ras_reserve_bps()
426 if (adev->mman.fw_vram_usage_va) in amdgpu_virt_add_bad_page()
427 vram_usage_va = adev->mman.fw_vram_usage_va; in amdgpu_virt_add_bad_page()
429 vram_usage_va = adev->mman.drv_vram_usage_va; in amdgpu_virt_add_bad_page()
602 ttm_resource_manager_usage(&adev->mman.vram_mgr.manager) >> 20; in amdgpu_virt_write_vf2pf_data()
604 amdgpu_vram_mgr_vis_usage(&adev->mman.vram_mgr) >> 20; in amdgpu_virt_write_vf2pf_data()
677 if (adev->mman.fw_vram_usage_va && adev->mman.drv_vram_usage_va) { in amdgpu_virt_init_data_exchange()
679 } else if (adev->mman in amdgpu_virt_init_data_exchange()
[all...]
H A Damdgpu_gmc.c654 struct amdgpu_ring *ring = adev->mman.buffer_funcs_ring; in amdgpu_gmc_flush_gpu_tlb()
661 !adev->mman.buffer_funcs_enabled || !adev->ib_pool_ready || in amdgpu_gmc_flush_gpu_tlb()
689 mutex_lock(&adev->mman.gtt_window_lock); in amdgpu_gmc_flush_gpu_tlb()
690 r = amdgpu_job_alloc_with_ib(ring->adev, &adev->mman.high_pr, in amdgpu_gmc_flush_gpu_tlb()
702 mutex_unlock(&adev->mman.gtt_window_lock); in amdgpu_gmc_flush_gpu_tlb()
710 mutex_unlock(&adev->mman.gtt_window_lock); in amdgpu_gmc_flush_gpu_tlb()
980 adev->mman.stolen_reserved_offset = 0; in amdgpu_gmc_get_vbios_allocations()
981 adev->mman.stolen_reserved_size = 0; in amdgpu_gmc_get_vbios_allocations()
993 adev->mman.keep_stolen_vga_memory = true; in amdgpu_gmc_get_vbios_allocations()
999 adev->mman in amdgpu_gmc_get_vbios_allocations()
[all...]
H A Damdgpu_vram_mgr.c52 return container_of(mgr, struct amdgpu_device, mman.vram_mgr); in to_amdgpu_device()
141 struct ttm_resource_manager *man = &adev->mman.vram_mgr.manager; in amdgpu_mem_info_vram_used_show()
162 amdgpu_vram_mgr_vis_usage(&adev->mman.vram_mgr)); in amdgpu_mem_info_vis_vram_used_show()
793 struct amdgpu_vram_mgr *mgr = &adev->mman.vram_mgr; in amdgpu_vram_mgr_clear_reset_blocks()
923 struct amdgpu_vram_mgr *mgr = &adev->mman.vram_mgr; in amdgpu_vram_mgr_init()
930 ttm_resource_manager_init(man, &adev->mman.bdev, in amdgpu_vram_mgr_init()
949 ttm_set_driver_manager(&adev->mman.bdev, TTM_PL_VRAM, &mgr->manager); in amdgpu_vram_mgr_init()
964 struct amdgpu_vram_mgr *mgr = &adev->mman.vram_mgr; in amdgpu_vram_mgr_fini()
971 ret = ttm_resource_manager_evict_all(&adev->mman.bdev, man); in amdgpu_vram_mgr_fini()
988 ttm_set_driver_manager(&adev->mman.bdev, TTM_PL_VRAM, NULL); in amdgpu_vram_mgr_fini()
H A Dpsp_v11_0.c518 if (adev->gmc.visible_vram_size < sz || !adev->mman.aper_base_kaddr) { in psp_v11_0_memory_training()
521 adev->mman.aper_base_kaddr); in psp_v11_0_memory_training()
532 memcpy_fromio(buf, adev->mman.aper_base_kaddr, sz); in psp_v11_0_memory_training()
541 memcpy_toio(adev->mman.aper_base_kaddr, buf, sz); in psp_v11_0_memory_training()
H A Dpsp_v14_0.c484 if (adev->gmc.visible_vram_size < sz || !adev->mman.aper_base_kaddr) { in psp_v14_0_memory_training()
487 adev->mman.aper_base_kaddr); in psp_v14_0_memory_training()
498 memcpy_fromio(buf, adev->mman.aper_base_kaddr, sz); in psp_v14_0_memory_training()
507 memcpy_toio(adev->mman.aper_base_kaddr, buf, sz); in psp_v14_0_memory_training()
H A Dpsp_v13_0.c619 if (adev->gmc.visible_vram_size < sz || !adev->mman.aper_base_kaddr) { in psp_v13_0_memory_training()
622 adev->mman.aper_base_kaddr); in psp_v13_0_memory_training()
633 memcpy_fromio(buf, adev->mman.aper_base_kaddr, sz); in psp_v13_0_memory_training()
642 memcpy_toio(adev->mman.aper_base_kaddr, buf, sz); in psp_v13_0_memory_training()
/linux/drivers/gpu/drm/qxl/
H A Dqxl_ttm.c42 struct qxl_mman *mman; in qxl_get_qdev() local
45 mman = container_of(bdev, struct qxl_mman, bdev); in qxl_get_qdev()
46 qdev = container_of(mman, struct qxl_device, mman); in qxl_get_qdev()
187 return ttm_range_man_init(&qdev->mman.bdev, type, false, size); in qxl_ttm_init_mem_type()
196 r = ttm_device_init(&qdev->mman.bdev, &qxl_bo_driver, NULL, in qxl_ttm_init()
228 ttm_range_man_fini(&qdev->mman.bdev, TTM_PL_VRAM); in qxl_ttm_fini()
229 ttm_range_man_fini(&qdev->mman.bdev, TTM_PL_PRIV); in qxl_ttm_fini()
230 ttm_device_fini(&qdev->mman.bdev); in qxl_ttm_fini()
237 ttm_resource_manager_create_debugfs(ttm_manager_type(&qdev->mman in qxl_ttm_debugfs_init()
[all...]
H A Dqxl_object.c139 r = ttm_bo_init_reserved(&qdev->mman.bdev, &bo->tbo, type, in qxl_bo_create()
408 man = ttm_manager_type(&qdev->mman.bdev, TTM_PL_PRIV); in qxl_surf_evict()
409 return ttm_resource_manager_evict_all(&qdev->mman.bdev, man); in qxl_surf_evict()
416 man = ttm_manager_type(&qdev->mman.bdev, TTM_PL_VRAM); in qxl_vram_evict()
417 return ttm_resource_manager_evict_all(&qdev->mman.bdev, man); in qxl_vram_evict()
/linux/tools/perf/trace/beauty/
H A Dmmap_flags.sh15 linux_mman=${linux_header_dir}/mman.h
16 arch_mman=${arch_header_dir}/mman.h
33 grep -E -q '#[[:space:]]*include[[:space:]]+.*uapi/asm-generic/mman.*' ${arch_mman}) &&
34 (grep -E $regex ${header_dir}/mman-common.h | \
39 grep -E -q '#[[:space:]]*include[[:space:]]+.*uapi/asm-generic/mman.h>.*' ${arch_mman}) &&
40 (grep -E $regex ${header_dir}/mman.h | \
H A Dmmap_prot.sh13 common_mman=${asm_header_dir}/mman-common.h
14 arch_mman=${arch_header_dir}/mman.h
21 || grep -E -q '#[[:space:]]*include[[:space:]]+.*uapi/asm-generic/mman.*' ${arch_mman}) &&
H A Dpkey_alloc_access_rights.sh8 grep -E $regex ${header_dir}/mman-common.h | \
H A Dmadvise_behavior.sh8 grep -E $regex ${header_dir}/mman-common.h | \
H A Dmremap_flags.sh10 linux_mman=${linux_header_dir}/mman.h
/linux/drivers/gpu/drm/radeon/
H A Dradeon_ttm.c62 struct radeon_mman *mman; in radeon_get_rdev() local
65 mman = container_of(bdev, struct radeon_mman, bdev); in radeon_get_rdev()
66 rdev = container_of(mman, struct radeon_device, mman); in radeon_get_rdev()
72 return ttm_range_man_init(&rdev->mman.bdev, TTM_PL_VRAM, in radeon_ttm_init_vram()
78 return ttm_range_man_init(&rdev->mman.bdev, TTM_PL_TT, in radeon_ttm_init_gtt()
550 return ttm_pool_alloc(&rdev->mman.bdev.pool, ttm, ctx); in radeon_ttm_tt_populate()
570 return ttm_pool_free(&rdev->mman.bdev.pool, ttm); in radeon_ttm_tt_unpopulate()
683 r = ttm_device_init(&rdev->mman.bdev, &radeon_bo_driver, rdev->dev, in radeon_ttm_init()
692 rdev->mman in radeon_ttm_init()
[all...]
/linux/tools/perf/
H A Dcheck-headers.sh70 "include/uapi/asm-generic/mman-common.h"
189 check include/uapi/asm-generic/mman.h '-I "^#include <\(uapi/\)*asm-generic/mman-common\(-tools\)*.h>"'
190 check include/uapi/linux/mman.h '-I "^#include <\(uapi/\)*asm/mman.h>"'
H A DMakefile.perf584 $(pkey_alloc_access_rights_array): $(asm_generic_hdr_dir)/mman-common.h $(pkey_alloc_access_rights_tbl)
644 $(madvise_behavior_array): $(madvise_hdr_dir)/mman-common.h $(madvise_behavior_tbl)
650 $(mmap_flags_array): $(linux_uapi_dir)/mman.h $(asm_generic_uapi_dir)/mman.h $(asm_generic_uapi_dir)/mman-common.h $(mmap_flags_tbl)
656 $(mremap_flags_array): $(linux_uapi_dir)/mman.h $(mremap_flags_tbl)
674 $(mmap_prot_array): $(asm_generic_uapi_dir)/mman.h $(asm_generic_uapi_dir)/mman-common.h $(mmap_prot_tbl)
/linux/drivers/gpu/drm/i915/selftests/
H A Di915_live_selftests.h33 selftest(mman, i915_gem_mman_live_selftests)
/linux/Documentation/userspace-api/media/dvb/
H A Ddmx-munmap.rst23 #include <sys/mman.h>
/linux/include/uapi/asm-generic/
H A DKbuild15 mandatory-y += mman.h
/linux/Documentation/userspace-api/media/v4l/
H A Dfunc-munmap.rst21 #include <sys/mman.h>
/linux/drivers/gpu/drm/amd/amdkfd/
H A Dkfd_migrate.c64 num_dw = ALIGN(adev->mman.buffer_funcs->copy_num_dw, 8); in svm_migrate_gart_map()
67 r = amdgpu_job_alloc_with_ib(adev, &adev->mman.high_pr, in svm_migrate_gart_map()
130 struct amdgpu_ring *ring = adev->mman.buffer_funcs_ring; in svm_migrate_copy_memory_gart()
136 mutex_lock(&adev->mman.gtt_window_lock); in svm_migrate_copy_memory_gart()
172 mutex_unlock(&adev->mman.gtt_window_lock); in svm_migrate_copy_memory_gart()

123