/linux/arch/powerpc/mm/book3s64/ |
H A D | iommu_api.c | 60 struct mm_iommu_table_group_mem_t *mem, *mem2; in mm_iommu_do_alloc() local 73 mem = kzalloc(sizeof(*mem), GFP_KERNEL); in mm_iommu_do_alloc() 74 if (!mem) { in mm_iommu_do_alloc() 80 mem->pageshift = __ffs(dev_hpa | (entries << PAGE_SHIFT)); in mm_iommu_do_alloc() 81 mem->dev_hpa = dev_hpa; in mm_iommu_do_alloc() 84 mem->dev_hpa = MM_IOMMU_TABLE_INVALID_HPA; in mm_iommu_do_alloc() 91 mem->pageshift = __ffs(ua | (entries << PAGE_SHIFT)); in mm_iommu_do_alloc() 92 mem->hpas = vzalloc(array_size(entries, sizeof(mem in mm_iommu_do_alloc() 202 mm_iommu_unpin(struct mm_iommu_table_group_mem_t * mem) mm_iommu_unpin() argument 227 mm_iommu_do_free(struct mm_iommu_table_group_mem_t * mem) mm_iommu_do_free() argument 237 struct mm_iommu_table_group_mem_t *mem = container_of(head, mm_iommu_free() local 243 mm_iommu_release(struct mm_iommu_table_group_mem_t * mem) mm_iommu_release() argument 249 mm_iommu_put(struct mm_struct * mm,struct mm_iommu_table_group_mem_t * mem) mm_iommu_put() argument 291 struct mm_iommu_table_group_mem_t *mem, *ret = NULL; mm_iommu_lookup() local 311 struct mm_iommu_table_group_mem_t *mem, *ret = NULL; mm_iommu_get() local 330 mm_iommu_ua_to_hpa(struct mm_iommu_table_group_mem_t * mem,unsigned long ua,unsigned int pageshift,unsigned long * hpa) mm_iommu_ua_to_hpa() argument 357 struct mm_iommu_table_group_mem_t *mem; mm_iommu_is_devmem() local 383 mm_iommu_mapped_inc(struct mm_iommu_table_group_mem_t * mem) mm_iommu_mapped_inc() argument 393 mm_iommu_mapped_dec(struct mm_iommu_table_group_mem_t * mem) mm_iommu_mapped_dec() argument [all...] |
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/ |
H A D | mem.c | 23 #include "mem.h" 35 struct page **mem; member 57 struct nvkm_mem *mem = nvkm_mem(memory); in nvkm_mem_addr() local 58 if (mem->pages == 1 && mem->mem) in nvkm_mem_addr() 59 return mem->dma[0]; in nvkm_mem_addr() 73 struct nvkm_mem *mem = nvkm_mem(memory); in nvkm_mem_map_dma() local 75 .memory = &mem->memory, in nvkm_mem_map_dma() 77 .dma = mem in nvkm_mem_map_dma() 85 struct nvkm_mem *mem = nvkm_mem(memory); nvkm_mem_dtor() local 113 struct nvkm_mem *mem = nvkm_mem(memory); nvkm_mem_map_sgl() local 135 struct nvkm_mem *mem = nvkm_mem(memory); nvkm_mem_map_host() local 154 struct nvkm_mem *mem; nvkm_mem_new_host() local [all...] |
/linux/drivers/gpu/drm/nouveau/ |
H A D | nouveau_mem.c | 37 nouveau_mem_map(struct nouveau_mem *mem, in nouveau_mem_map() argument 53 args.nv50.kind = mem->kind; in nouveau_mem_map() 54 args.nv50.comp = mem->comp; in nouveau_mem_map() 61 if (mem->mem.type & NVIF_MEM_VRAM) in nouveau_mem_map() 67 args.gf100.kind = mem->kind; in nouveau_mem_map() 75 return nvif_vmm_map(vmm, vma->addr, mem->mem.size, &args, argc, &mem->mem, in nouveau_mem_map() 79 nouveau_mem_fini(struct nouveau_mem * mem) nouveau_mem_fini() argument 91 struct nouveau_mem *mem = nouveau_mem(reg); nouveau_mem_host() local 127 struct nouveau_mem *mem = nouveau_mem(reg); nouveau_mem_vram() local 166 struct nouveau_mem *mem = nouveau_mem(reg); nouveau_mem_del() local 177 struct nouveau_mem *mem; nouveau_mem_new() local [all...] |
H A D | nouveau_vmm.c | 31 if (vma->mem) { in nouveau_vma_unmap() 33 vma->mem = NULL; in nouveau_vma_unmap() 38 nouveau_vma_map(struct nouveau_vma *vma, struct nouveau_mem *mem) in nouveau_vma_map() argument 41 int ret = nouveau_mem_map(mem, &vma->vmm->vmm, &tmp); in nouveau_vma_map() 44 vma->mem = mem; in nouveau_vma_map() 80 struct nouveau_mem *mem = nouveau_mem(nvbo->bo.resource); in nouveau_vma_new() local 95 vma->mem = NULL; in nouveau_vma_new() 100 mem->mem in nouveau_vma_new() [all...] |
/linux/drivers/staging/media/atomisp/pci/runtime/isp_param/src/ |
H A D | isp_param.c | 18 enum ia_css_isp_memories mem, in ia_css_isp_param_set_mem_init() argument 21 mem_init->params[pclass][mem].address = address; in ia_css_isp_param_set_mem_init() 22 mem_init->params[pclass][mem].size = (uint32_t)size; in ia_css_isp_param_set_mem_init() 29 enum ia_css_isp_memories mem, in ia_css_isp_param_set_css_mem_init() argument 32 mem_init->params[pclass][mem].address = address; in ia_css_isp_param_set_css_mem_init() 33 mem_init->params[pclass][mem].size = (uint32_t)size; in ia_css_isp_param_set_css_mem_init() 40 enum ia_css_isp_memories mem, in ia_css_isp_param_set_isp_mem_init() argument 43 mem_init->params[pclass][mem].address = address; in ia_css_isp_param_set_isp_mem_init() 44 mem_init->params[pclass][mem].size = (uint32_t)size; in ia_css_isp_param_set_isp_mem_init() 52 enum ia_css_isp_memories mem) in ia_css_isp_param_get_mem_init() argument 61 ia_css_isp_param_get_css_mem_init(const struct ia_css_isp_param_css_segments * mem_init,enum ia_css_param_class pclass,enum ia_css_isp_memories mem) ia_css_isp_param_get_css_mem_init() argument 70 ia_css_isp_param_get_isp_mem_init(const struct ia_css_isp_param_isp_segments * mem_init,enum ia_css_param_class pclass,enum ia_css_isp_memories mem) ia_css_isp_param_get_isp_mem_init() argument 81 unsigned int pclass, mem; ia_css_init_memory_interface() local 102 unsigned int mem, pclass; ia_css_isp_param_allocate_isp_parameters() local 145 unsigned int mem, pclass; ia_css_isp_param_destroy_isp_parameters() local 179 unsigned int mem; ia_css_isp_param_copy_isp_mem_if_to_ddr() local [all...] |
/linux/drivers/infiniband/sw/siw/ |
H A D | siw_mem.c | 29 struct siw_mem *mem; in siw_mem_id2obj() local 32 mem = xa_load(&sdev->mem_xa, stag_index); in siw_mem_id2obj() 33 if (likely(mem && kref_get_unless_zero(&mem->ref))) { in siw_mem_id2obj() 35 return mem; in siw_mem_id2obj() 61 struct siw_mem *mem = kzalloc(sizeof(*mem), GFP_KERNEL); in siw_mr_add_mem() local 65 if (!mem) in siw_mr_add_mem() 68 mem->mem_obj = mem_obj; in siw_mr_add_mem() 69 mem in siw_mr_add_mem() 96 struct siw_mem *mem = mr->mem, *found; siw_mr_drop_mem() local 110 struct siw_mem *mem = container_of(ref, struct siw_mem, ref); siw_free_mem() local 136 siw_check_mem(struct ib_pd * pd,struct siw_mem * mem,u64 addr,enum ib_access_flags perms,int len) siw_check_mem() argument 190 siw_check_sge(struct ib_pd * pd,struct siw_sge * sge,struct siw_mem * mem[],enum ib_access_flags perms,u32 off,int len) siw_check_sge() argument 263 struct siw_mem *mem = siw_mem_id2obj(sdev, stag >> 8); siw_invalidate_stag() local [all...] |
/linux/drivers/pci/endpoint/ |
H A D | pci-epc-mem.c | 17 * @mem: address space of the endpoint controller 20 * Reimplement get_order() for mem->page_size since the generic get_order 23 static int pci_epc_mem_get_order(struct pci_epc_mem *mem, size_t size) in pci_epc_mem_get_order() argument 26 unsigned int page_shift = ilog2(mem->window.page_size); in pci_epc_mem_get_order() 51 struct pci_epc_mem *mem = NULL; in pci_epc_multi_mem_init() local 77 mem = kzalloc(sizeof(*mem), GFP_KERNEL); in pci_epc_multi_mem_init() 78 if (!mem) { in pci_epc_multi_mem_init() 87 kfree(mem); in pci_epc_multi_mem_init() 92 mem in pci_epc_multi_mem_init() 150 struct pci_epc_mem *mem; pci_epc_mem_exit() local 182 struct pci_epc_mem *mem; pci_epc_mem_alloc_addr() local 224 struct pci_epc_mem *mem; pci_epc_get_matching_window() local 250 struct pci_epc_mem *mem; pci_epc_mem_free_addr() local [all...] |
/linux/tools/perf/ |
H A D | builtin-mem.c | 16 #include "util/mem-events.h" 51 struct perf_mem *mem = (struct perf_mem *)opt->value; in parse_record_events() local 56 pr_err("failed: there is no PMU that supports perf mem\n"); in parse_record_events() 67 mem->operation = 0; in parse_record_events() 71 static int __cmd_record(int argc, const char **argv, struct perf_mem *mem, in __cmd_record() argument 82 "perf mem record [<options>] [<command>]", in __cmd_record() 83 "perf mem record [<options>] -- <command> [<options>]", in __cmd_record() 104 if (mem->cpu_list) in __cmd_record() 120 (mem->operation & MEM_OPERATION_LOAD) && in __cmd_record() 121 (mem in __cmd_record() 185 struct perf_mem *mem = container_of(tool, struct perf_mem, tool); dump_raw_samples() local 263 report_raw_events(struct perf_mem * mem) report_raw_events() argument 328 get_sort_order(struct perf_mem * mem) get_sort_order() argument 363 __cmd_report(int argc,const char ** argv,struct perf_mem * mem,const struct option * options) __cmd_report() argument 474 struct perf_mem mem = { cmd_mem() local [all...] |
/linux/tools/testing/selftests/mm/ |
H A D | mkdirty.c | 41 static void do_test_write_sigsegv(char *mem) in do_test_write_sigsegv() argument 43 char orig = *mem; in do_test_write_sigsegv() 53 *mem = orig + 1; in do_test_write_sigsegv() 58 ksft_test_result(ret == 1 && *mem == orig, in do_test_write_sigsegv() 65 char *mem, *mmap_mem; in mmap_thp_range() local 73 mem = (char *)(((uintptr_t)mmap_mem + thpsize) & ~(thpsize - 1)); in mmap_thp_range() 75 if (madvise(mem, thpsize, MADV_HUGEPAGE)) { in mmap_thp_range() 83 return mem; in mmap_thp_range() 89 char *mem; in test_ptrace_write() local 94 mem in test_ptrace_write() 124 char *mem, *mmap_mem; test_ptrace_write_thp() local 160 char *mem; test_page_migration() local 192 char *mem, *mmap_mem; test_page_migration_thp() local 231 char *mem, *mmap_mem; test_pte_mapped_thp() local [all...] |
H A D | pagemap_ioctl.c | 164 char *mem; in gethugetlb_mem() local 171 mem = shmat(*shmid, 0, 0); in gethugetlb_mem() 172 if (mem == (char *)-1) { in gethugetlb_mem() 177 mem = mmap(NULL, size, PROT_READ | PROT_WRITE, in gethugetlb_mem() 179 if (mem == MAP_FAILED) in gethugetlb_mem() 183 return mem; in gethugetlb_mem() 189 char *mem, *vec; in userfaultfd_tests() local 192 mem = mmap(NULL, mem_size, PROT_NONE, MAP_PRIVATE | MAP_ANON, -1, 0); in userfaultfd_tests() 193 if (mem == MAP_FAILED) in userfaultfd_tests() 196 wp_init(mem, mem_siz in userfaultfd_tests() 244 char *mem, *m[2]; sanity_tests_sd() local 680 base_tests(char * prefix,char * mem,unsigned long long mem_size,int skip) base_tests() argument 1057 char *mem, *fmem; sanity_tests() local 1228 char *mem, *mem2; mprotect_tests() local 1315 get_dirty_pages_reset(char * mem,unsigned int count,int reset,int page_size) get_dirty_pages_reset() argument 1351 thread_proc(void * mem) thread_proc() argument 1396 char *mem; transact_test() local 1489 char *mmap_mem, *mem; zeropfn_tests() local 1548 char *mem, *map, *fmem; main() local [all...] |
H A D | cow.c | 112 static int child_memcmp_fn(char *mem, size_t size, in child_memcmp_fn() argument 119 memcpy(old, mem, size); in child_memcmp_fn() 127 return memcmp(old, mem, size); in child_memcmp_fn() 130 static int child_vmsplice_memcmp_fn(char *mem, size_t size, in child_vmsplice_memcmp_fn() argument 134 .iov_base = mem, in child_vmsplice_memcmp_fn() 146 memcpy(old, mem, size); in child_vmsplice_memcmp_fn() 159 if (munmap(mem, size) < 0) in child_vmsplice_memcmp_fn() 177 typedef int (*child_fn)(char *mem, size_t size, struct comm_pipes *comm_pipes); 179 static void do_test_cow_in_parent(char *mem, size_t size, bool do_mprotect, in do_test_cow_in_parent() argument 198 exit(fn(mem, siz in do_test_cow_in_parent() 256 test_cow_in_parent(char * mem,size_t size,bool is_hugetlb) test_cow_in_parent() argument 261 test_cow_in_parent_mprotect(char * mem,size_t size,bool is_hugetlb) test_cow_in_parent_mprotect() argument 266 test_vmsplice_in_child(char * mem,size_t size,bool is_hugetlb) test_vmsplice_in_child() argument 272 test_vmsplice_in_child_mprotect(char * mem,size_t size,bool is_hugetlb) test_vmsplice_in_child_mprotect() argument 279 do_test_vmsplice_in_parent(char * mem,size_t size,bool before_fork,bool xfail) do_test_vmsplice_in_parent() argument 393 test_vmsplice_before_fork(char * mem,size_t size,bool is_hugetlb) test_vmsplice_before_fork() argument 398 test_vmsplice_after_fork(char * mem,size_t size,bool is_hugetlb) test_vmsplice_after_fork() argument 404 do_test_iouring(char * mem,size_t size,bool use_fork) do_test_iouring() argument 576 test_iouring_ro(char * mem,size_t size,bool is_hugetlb) test_iouring_ro() argument 581 test_iouring_fork(char * mem,size_t size,bool is_hugetlb) test_iouring_fork() argument 595 do_test_ro_pin(char * mem,size_t size,enum ro_pin_test test,bool fast) do_test_ro_pin() argument 737 test_ro_pin_on_shared(char * mem,size_t size,bool is_hugetlb) test_ro_pin_on_shared() argument 742 test_ro_fast_pin_on_shared(char * mem,size_t size,bool is_hugetlb) test_ro_fast_pin_on_shared() argument 747 test_ro_pin_on_ro_previously_shared(char * mem,size_t size,bool is_hugetlb) test_ro_pin_on_ro_previously_shared() argument 753 test_ro_fast_pin_on_ro_previously_shared(char * mem,size_t size,bool is_hugetlb) test_ro_fast_pin_on_ro_previously_shared() argument 759 test_ro_pin_on_ro_exclusive(char * mem,size_t size,bool is_hugetlb) test_ro_pin_on_ro_exclusive() argument 765 test_ro_fast_pin_on_ro_exclusive(char * mem,size_t size,bool is_hugetlb) test_ro_fast_pin_on_ro_exclusive() argument 775 char *mem; do_run_with_base_page() local 836 char *mem, *mmap_mem, *tmp, *mremap_mem = MAP_FAILED; do_run_with_thp() local 1048 char *mem, *dummy; run_with_hugetlb() local 1269 do_test_anon_thp_collapse(char * mem,size_t size,enum anon_thp_collapse_test test) do_test_anon_thp_collapse() argument 1413 test_anon_thp_collapse_unshared(char * mem,size_t size,bool is_hugetlb) test_anon_thp_collapse_unshared() argument 1420 test_anon_thp_collapse_fully_shared(char * mem,size_t size,bool is_hugetlb) test_anon_thp_collapse_fully_shared() argument 1427 test_anon_thp_collapse_lower_shared(char * mem,size_t size,bool is_hugetlb) test_anon_thp_collapse_lower_shared() argument 1434 test_anon_thp_collapse_upper_shared(char * mem,size_t size,bool is_hugetlb) test_anon_thp_collapse_upper_shared() argument 1505 test_cow(char * mem,const char * smem,size_t size) test_cow() argument 1525 test_ro_pin(char * mem,const char * smem,size_t size) test_ro_pin() argument 1530 test_ro_fast_pin(char * mem,const char * smem,size_t size) test_ro_fast_pin() argument 1537 char *mem, *smem; run_with_zeropage() local 1569 char *mem, *smem, *mmap_mem, *mmap_smem; run_with_huge_zeropage() local 1637 char *mem, *smem; run_with_memfd() local 1685 char *mem, *smem; run_with_tmpfile() local 1743 char *mem, *smem; run_with_memfd_hugetlb() local [all...] |
H A D | uffd-wp-mremap.c | 55 char *mmap_mem, *mem; in mmap_aligned() local 61 mem = (char *)(((uintptr_t)mmap_mem + size - 1) & ~(size - 1)); in mmap_aligned() 62 munmap(mmap_mem, mem - mmap_mem); in mmap_aligned() 63 munmap(mem + size, mmap_mem + mmap_size - mem - size); in mmap_aligned() 65 return mem; in mmap_aligned() 73 char *mem, *addr; in alloc_one_folio() local 96 mem = mmap_aligned(size, prot, flags); in alloc_one_folio() 103 mem = mmap(NULL, size, prot, flags, -1, 0); in alloc_one_folio() 106 if (mem in alloc_one_folio() 134 check_uffd_wp_state(void * mem,size_t size,bool expect) check_uffd_wp_state() argument 165 void *mem = NULL; test_one_folio() local [all...] |
H A D | memfd_secret.c | 63 char *mem; in test_mlock_limit() local 69 mem = mmap(NULL, len, prot, mode, fd, 0); in test_mlock_limit() 70 if (mem == MAP_FAILED) { in test_mlock_limit() 74 munmap(mem, len); in test_mlock_limit() 77 mem = mmap(NULL, len, prot, mode, fd, 0); in test_mlock_limit() 78 if (mem != MAP_FAILED) { in test_mlock_limit() 80 munmap(mem, len); in test_mlock_limit() 92 char *mem; in test_vmsplice() local 99 mem = mmap(NULL, page_size, prot, mode, fd, 0); in test_vmsplice() 100 if (mem in test_vmsplice() 130 char *mem; try_process_vm_read() local 154 char *mem; try_ptrace() local 206 char *mem; test_remote_access() local [all...] |
/linux/drivers/base/ |
H A D | memory.c | 94 struct memory_block *mem = to_memory_block(dev); in memory_block_release() local 96 WARN_ON(mem->altmap); in memory_block_release() 97 kfree(mem); in memory_block_release() 161 struct memory_block *mem = to_memory_block(dev); in phys_index_show() local 163 return sysfs_emit(buf, "%08lx\n", memory_block_id(mem->start_section_nr)); in phys_index_show() 182 struct memory_block *mem = to_memory_block(dev); in state_show() local 189 switch (mem->state) { in state_show() 201 return sysfs_emit(buf, "ERROR-UNKNOWN-%ld\n", mem->state); in state_show() 213 static unsigned long memblk_nr_poison(struct memory_block *mem); 215 static inline unsigned long memblk_nr_poison(struct memory_block *mem) in memblk_nr_poison() argument 224 memory_block_online(struct memory_block * mem) memory_block_online() argument 295 memory_block_offline(struct memory_block * mem) memory_block_offline() argument 347 memory_block_action(struct memory_block * mem,unsigned long action) memory_block_action() argument 367 memory_block_change_state(struct memory_block * mem,unsigned long to_state,unsigned long from_state_req) memory_block_change_state() argument 387 struct memory_block *mem = to_memory_block(dev); memory_subsys_online() local 408 struct memory_block *mem = to_memory_block(dev); memory_subsys_offline() local 420 struct memory_block *mem = to_memory_block(dev); state_store() local 465 struct memory_block *mem = to_memory_block(dev); phys_device_show() local 490 struct memory_block *mem = to_memory_block(dev); valid_zones_show() local 675 struct memory_block *mem; find_memory_block_by_id() local 736 early_node_zone_for_memory_block(struct memory_block * mem,int nid) early_node_zone_for_memory_block() argument 783 memory_block_add_nid(struct memory_block * mem,int nid,enum meminit_context context) memory_block_add_nid() argument 816 struct memory_block *mem; add_memory_block() local 894 struct memory_block *mem; create_memory_block_devices() local 931 struct memory_block *mem; remove_memory_block_devices() local 1038 struct memory_block *mem; walk_memory_blocks() local 1065 struct memory_block *mem = to_memory_block(dev); for_each_memory_block_cb() local 1259 struct memory_block *mem = find_memory_block_by_id(block_id); memblk_nr_poison_inc() local 1268 struct memory_block *mem = find_memory_block_by_id(block_id); memblk_nr_poison_sub() local 1274 memblk_nr_poison(struct memory_block * mem) memblk_nr_poison() argument [all...] |
/linux/drivers/gpu/drm/i915/ |
H A D | intel_region_ttm.c | 54 int intel_region_to_ttm_type(const struct intel_memory_region *mem) in intel_region_to_ttm_type() argument 58 GEM_BUG_ON(mem->type != INTEL_MEMORY_LOCAL && in intel_region_to_ttm_type() 59 mem->type != INTEL_MEMORY_MOCK && in intel_region_to_ttm_type() 60 mem->type != INTEL_MEMORY_SYSTEM); in intel_region_to_ttm_type() 62 if (mem->type == INTEL_MEMORY_SYSTEM) in intel_region_to_ttm_type() 65 type = mem->instance + TTM_PL_PRIV; in intel_region_to_ttm_type() 73 * @mem: The region to initialize. 82 int intel_region_ttm_init(struct intel_memory_region *mem) in intel_region_ttm_init() argument 84 struct ttm_device *bdev = &mem->i915->bdev; in intel_region_ttm_init() 85 int mem_type = intel_region_to_ttm_type(mem); in intel_region_ttm_init() 108 intel_region_ttm_fini(struct intel_memory_region * mem) intel_region_ttm_fini() argument 163 intel_region_ttm_resource_to_rsgt(struct intel_memory_region * mem,struct ttm_resource * res,u32 page_alignment) intel_region_ttm_resource_to_rsgt() argument 198 intel_region_ttm_resource_alloc(struct intel_memory_region * mem,resource_size_t offset,resource_size_t size,unsigned int flags) intel_region_ttm_resource_alloc() argument 255 intel_region_ttm_resource_free(struct intel_memory_region * mem,struct ttm_resource * res) intel_region_ttm_resource_free() argument [all...] |
/linux/kernel/dma/ |
H A D | swiotlb.c | 231 struct io_tlb_pool *mem = &io_tlb_default_mem.defpool; in swiotlb_print_info() local 233 if (!mem->nslabs) { in swiotlb_print_info() 234 pr_warn("No low mem\n"); in swiotlb_print_info() 238 pr_info("mapped [mem %pa-%pa] (%luMB)\n", &mem->start, &mem->end, in swiotlb_print_info() 239 (mem->nslabs << IO_TLB_SHIFT) >> 20); in swiotlb_print_info() 260 struct io_tlb_pool *mem = &io_tlb_default_mem.defpool; in swiotlb_update_mem_attributes() local 263 if (!mem->nslabs || mem in swiotlb_update_mem_attributes() 269 swiotlb_init_io_tlb_pool(struct io_tlb_pool * mem,phys_addr_t start,unsigned long nslabs,bool late_alloc,unsigned int nareas) swiotlb_init_io_tlb_pool() argument 306 add_mem_pool(struct io_tlb_mem * mem,struct io_tlb_pool * pool) add_mem_pool() argument 357 struct io_tlb_pool *mem = &io_tlb_default_mem.defpool; swiotlb_init_remap() local 433 struct io_tlb_pool *mem = &io_tlb_default_mem.defpool; swiotlb_init_late() local 527 struct io_tlb_pool *mem = &io_tlb_default_mem.defpool; swiotlb_exit() local 736 struct io_tlb_mem *mem = swiotlb_dyn_alloc() local 779 struct io_tlb_mem *mem = dev->dma_io_tlb_mem; __swiotlb_find_pool() local 860 swiotlb_bounce(struct device * dev,phys_addr_t tlb_addr,size_t size,enum dma_data_direction dir,struct io_tlb_pool * mem) swiotlb_bounce() argument 937 wrap_area_index(struct io_tlb_pool * mem,unsigned int index) wrap_area_index() argument 951 inc_used_and_hiwater(struct io_tlb_mem * mem,unsigned int nslots) inc_used_and_hiwater() argument 964 dec_used(struct io_tlb_mem * mem,unsigned int nslots) dec_used() argument 970 inc_used_and_hiwater(struct io_tlb_mem * mem,unsigned int nslots) inc_used_and_hiwater() argument 973 dec_used(struct io_tlb_mem * mem,unsigned int nslots) dec_used() argument 980 inc_transient_used(struct io_tlb_mem * mem,unsigned int nslots) inc_transient_used() argument 985 dec_transient_used(struct io_tlb_mem * mem,unsigned int nslots) dec_transient_used() argument 991 inc_transient_used(struct io_tlb_mem * mem,unsigned int nslots) inc_transient_used() argument 994 dec_transient_used(struct io_tlb_mem * mem,unsigned int nslots) dec_transient_used() argument 1145 struct io_tlb_mem *mem = dev->dma_io_tlb_mem; swiotlb_search_area() local 1185 struct io_tlb_mem *mem = dev->dma_io_tlb_mem; swiotlb_find_slots() local 1291 mem_used(struct io_tlb_mem * mem) mem_used() argument 1325 mem_used(struct io_tlb_mem * mem) mem_used() argument 1373 struct io_tlb_mem *mem = dev->dma_io_tlb_mem; swiotlb_tbl_map_single() local 1444 swiotlb_release_slots(struct device * dev,phys_addr_t tlb_addr,struct io_tlb_pool * mem) swiotlb_release_slots() argument 1631 struct io_tlb_mem *mem = dev->dma_io_tlb_mem; is_swiotlb_active() local 1665 mem_transient_used(struct io_tlb_mem * mem) mem_transient_used() argument 1672 struct io_tlb_mem *mem = data; io_tlb_transient_used_get() local 1684 struct io_tlb_mem *mem = data; io_tlb_used_get() local 1692 struct io_tlb_mem *mem = data; io_tlb_hiwater_get() local 1700 struct io_tlb_mem *mem = data; io_tlb_hiwater_set() local 1714 swiotlb_create_debugfs_files(struct io_tlb_mem * mem,const char * dirname) swiotlb_create_debugfs_files() argument 1742 swiotlb_create_debugfs_files(struct io_tlb_mem * mem,const char * dirname) swiotlb_create_debugfs_files() argument 1753 struct io_tlb_mem *mem = dev->dma_io_tlb_mem; swiotlb_alloc() local 1795 struct io_tlb_mem *mem = rmem->priv; rmem_swiotlb_device_init() local [all...] |
/linux/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_amdkfd_gpuvm.c | 78 struct kgd_mem *mem) in kfd_mem_is_attached() argument 82 list_for_each_entry(entry, &mem->attachments, list) in kfd_mem_is_attached() 115 uint64_t mem; in amdgpu_amdkfd_gpuvm_init_mem_limits() local 121 mem = si.totalram - si.totalhigh; in amdgpu_amdkfd_gpuvm_init_mem_limits() 122 mem *= si.mem_unit; in amdgpu_amdkfd_gpuvm_init_mem_limits() 125 kfd_mem_limit.max_system_mem_limit = mem - (mem >> 6); in amdgpu_amdkfd_gpuvm_init_mem_limits() 259 "adev reference can't be null when alloc mem flags vram is set"); in amdgpu_amdkfd_unreserve_mem_limit() 310 * @mem: BO of peer device that is being DMA mapped. Provides parameters 316 struct kgd_mem *mem, struc in create_dmamap_sg_bo() argument 497 get_pte_flags(struct amdgpu_device * adev,struct kgd_mem * mem) get_pte_flags() argument 543 kfd_mem_dmamap_userptr(struct kgd_mem * mem,struct kfd_mem_attachment * attachment) kfd_mem_dmamap_userptr() argument 630 kfd_mem_dmamap_sg_bo(struct kgd_mem * mem,struct kfd_mem_attachment * attachment) kfd_mem_dmamap_sg_bo() argument 685 kfd_mem_dmamap_attachment(struct kgd_mem * mem,struct kfd_mem_attachment * attachment) kfd_mem_dmamap_attachment() argument 704 kfd_mem_dmaunmap_userptr(struct kgd_mem * mem,struct kfd_mem_attachment * attachment) kfd_mem_dmaunmap_userptr() argument 753 kfd_mem_dmaunmap_sg_bo(struct kgd_mem * mem,struct kfd_mem_attachment * attachment) kfd_mem_dmaunmap_sg_bo() argument 781 kfd_mem_dmaunmap_attachment(struct kgd_mem * mem,struct kfd_mem_attachment * attachment) kfd_mem_dmaunmap_attachment() argument 801 kfd_mem_export_dmabuf(struct kgd_mem * mem) kfd_mem_export_dmabuf() argument 821 kfd_mem_attach_dmabuf(struct amdgpu_device * adev,struct kgd_mem * mem,struct amdgpu_bo ** bo) kfd_mem_attach_dmabuf() argument 854 kfd_mem_attach(struct amdgpu_device * adev,struct kgd_mem * mem,struct amdgpu_vm * vm,bool is_aql) kfd_mem_attach() argument 1004 add_kgd_mem_to_kfd_bo_list(struct kgd_mem * mem,struct amdkfd_process_info * process_info,bool userptr) add_kgd_mem_to_kfd_bo_list() argument 1017 remove_kgd_mem_from_kfd_bo_list(struct kgd_mem * mem,struct amdkfd_process_info * process_info) remove_kgd_mem_from_kfd_bo_list() argument 1037 init_user_pages(struct kgd_mem * mem,uint64_t user_addr,bool criu_resume) init_user_pages() argument 1131 reserve_bo_and_vm(struct kgd_mem * mem,struct amdgpu_vm * vm,struct bo_vm_reservation_context * ctx) reserve_bo_and_vm() argument 1172 reserve_bo_and_cond_vms(struct kgd_mem * mem,struct amdgpu_vm * vm,enum bo_vm_match map_type,struct bo_vm_reservation_context * ctx) reserve_bo_and_cond_vms() argument 1235 unmap_bo_from_gpuvm(struct kgd_mem * mem,struct kfd_mem_attachment * entry,struct amdgpu_sync * sync) unmap_bo_from_gpuvm() argument 1257 update_gpuvm_pte(struct kgd_mem * mem,struct kfd_mem_attachment * entry,struct amdgpu_sync * sync) update_gpuvm_pte() argument 1279 map_bo_to_gpuvm(struct kgd_mem * mem,struct kfd_mem_attachment * entry,struct amdgpu_sync * sync,bool no_update_pte) map_bo_to_gpuvm() argument 1661 amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu(struct amdgpu_device * adev,uint64_t va,uint64_t size,void * drm_priv,struct kgd_mem ** mem,uint64_t * offset,uint32_t flags,bool criu_resume) amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu() argument 1852 amdgpu_amdkfd_gpuvm_free_memory_of_gpu(struct amdgpu_device * adev,struct kgd_mem * mem,void * drm_priv,uint64_t * size) amdgpu_amdkfd_gpuvm_free_memory_of_gpu() argument 1967 amdgpu_amdkfd_gpuvm_map_memory_to_gpu(struct amdgpu_device * adev,struct kgd_mem * mem,void * drm_priv) amdgpu_amdkfd_gpuvm_map_memory_to_gpu() argument 2072 amdgpu_amdkfd_gpuvm_dmaunmap_mem(struct kgd_mem * mem,void * drm_priv) amdgpu_amdkfd_gpuvm_dmaunmap_mem() argument 2104 amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu(struct amdgpu_device * adev,struct kgd_mem * mem,void * drm_priv) amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu() argument 2158 amdgpu_amdkfd_gpuvm_sync_memory(struct amdgpu_device * adev,struct kgd_mem * mem,bool intr) amdgpu_amdkfd_gpuvm_sync_memory() argument 2235 amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel(struct kgd_mem * mem,void ** kptr,uint64_t * size) amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel() argument 2295 amdgpu_amdkfd_gpuvm_unmap_gtt_bo_from_kernel(struct kgd_mem * mem) amdgpu_amdkfd_gpuvm_unmap_gtt_bo_from_kernel() argument 2306 amdgpu_amdkfd_gpuvm_get_vm_fault_info(struct amdgpu_device * adev,struct kfd_vm_fault_info * mem) amdgpu_amdkfd_gpuvm_get_vm_fault_info() argument 2320 import_obj_create(struct amdgpu_device * adev,struct dma_buf * dma_buf,struct drm_gem_object * obj,uint64_t va,void * drm_priv,struct kgd_mem ** mem,uint64_t * size,uint64_t * mmap_offset) import_obj_create() argument 2391 amdgpu_amdkfd_gpuvm_import_dmabuf_fd(struct amdgpu_device * adev,int fd,uint64_t va,void * drm_priv,struct kgd_mem ** mem,uint64_t * size,uint64_t * mmap_offset) amdgpu_amdkfd_gpuvm_import_dmabuf_fd() argument 2424 amdgpu_amdkfd_gpuvm_export_dmabuf(struct kgd_mem * mem,struct dma_buf ** dma_buf) amdgpu_amdkfd_gpuvm_export_dmabuf() argument 2452 amdgpu_amdkfd_evict_userptr(struct mmu_interval_notifier * mni,unsigned long cur_seq,struct kgd_mem * mem) amdgpu_amdkfd_evict_userptr() argument 2494 struct kgd_mem *mem, *tmp_mem; update_invalid_user_pages() local 2609 struct kgd_mem *mem, *tmp_mem; validate_invalid_user_pages() local 2702 struct kgd_mem *mem, *tmp_mem; confirm_valid_user_pages_locked() local 2861 struct kgd_mem *mem; amdgpu_amdkfd_gpuvm_restore_process_bos() local 3055 amdgpu_amdkfd_add_gws_to_process(void * info,void * gws,struct kgd_mem ** mem) amdgpu_amdkfd_add_gws_to_process() argument 3119 amdgpu_amdkfd_remove_gws_from_process(void * info,void * mem) amdgpu_amdkfd_remove_gws_from_process() argument 3167 amdgpu_amdkfd_bo_mapped_to_dev(void * drm_priv,struct kgd_mem * mem) amdgpu_amdkfd_bo_mapped_to_dev() argument [all...] |
/linux/drivers/gpu/drm/ttm/ |
H A D | ttm_agp_backend.c | 47 struct agp_memory *mem; member 55 struct agp_memory *mem; in ttm_agp_bind() local 59 if (agp_be->mem) in ttm_agp_bind() 62 mem = agp_allocate_memory(agp_be->bridge, ttm->num_pages, AGP_USER_MEMORY); in ttm_agp_bind() 63 if (unlikely(mem == NULL)) in ttm_agp_bind() 66 mem->page_count = 0; in ttm_agp_bind() 73 mem->pages[mem->page_count++] = page; in ttm_agp_bind() 75 agp_be->mem = mem; in ttm_agp_bind() [all...] |
/linux/drivers/spi/ |
H A D | spi-mem.c | 12 #include <linux/spi/spi-mem.h> 104 static int spi_check_buswidth_req(struct spi_mem *mem, u8 buswidth, bool tx) in spi_check_buswidth_req() argument 106 u32 mode = mem->spi->mode; in spi_check_buswidth_req() 142 static bool spi_mem_check_buswidth(struct spi_mem *mem, in spi_mem_check_buswidth() argument 145 if (spi_check_buswidth_req(mem, op->cmd.buswidth, true)) in spi_mem_check_buswidth() 149 spi_check_buswidth_req(mem, op->addr.buswidth, true)) in spi_mem_check_buswidth() 153 spi_check_buswidth_req(mem, op->dummy.buswidth, true)) in spi_mem_check_buswidth() 157 spi_check_buswidth_req(mem, op->data.buswidth, in spi_mem_check_buswidth() 164 bool spi_mem_default_supports_op(struct spi_mem *mem, in spi_mem_default_supports_op() argument 167 struct spi_controller *ctlr = mem in spi_mem_default_supports_op() 240 spi_mem_internal_supports_op(struct spi_mem * mem,const struct spi_mem_op * op) spi_mem_internal_supports_op() argument 266 spi_mem_supports_op(struct spi_mem * mem,const struct spi_mem_op * op) spi_mem_supports_op() argument 275 spi_mem_access_start(struct spi_mem * mem) spi_mem_access_start() argument 302 spi_mem_access_end(struct spi_mem * mem) spi_mem_access_end() argument 368 spi_mem_exec_op(struct spi_mem * mem,const struct spi_mem_op * op) spi_mem_exec_op() argument 512 spi_mem_get_name(struct spi_mem * mem) spi_mem_get_name() argument 533 spi_mem_adjust_op_size(struct spi_mem * mem,struct spi_mem_op * op) spi_mem_adjust_op_size() argument 569 spi_mem_adjust_op_freq(struct spi_mem * mem,struct spi_mem_op * op) spi_mem_adjust_op_freq() argument 594 spi_mem_calc_op_duration(struct spi_mem * mem,struct spi_mem_op * op) spi_mem_calc_op_duration() argument 681 spi_mem_dirmap_create(struct spi_mem * mem,const struct spi_mem_dirmap_info * info) spi_mem_dirmap_create() argument 760 devm_spi_mem_dirmap_create(struct device * dev,struct spi_mem * mem,const struct spi_mem_dirmap_info * info) devm_spi_mem_dirmap_create() argument 906 spi_mem_read_status(struct spi_mem * mem,const struct spi_mem_op * op,u16 * status) spi_mem_read_status() argument 941 spi_mem_poll_status(struct spi_mem * mem,const struct spi_mem_op * op,u16 mask,u16 match,unsigned long initial_delay_us,unsigned long polling_delay_us,u16 timeout_ms) spi_mem_poll_status() argument 995 struct spi_mem *mem; spi_mem_probe() local 1019 struct spi_mem *mem = spi_get_drvdata(spi); spi_mem_remove() local 1028 struct spi_mem *mem = spi_get_drvdata(spi); spi_mem_shutdown() local [all...] |
/linux/drivers/char/hw_random/ |
H A D | intel-rng.c | 155 static inline u8 hwstatus_get(void __iomem *mem) in hwstatus_get() argument 157 return readb(mem + INTEL_RNG_HW_STATUS); in hwstatus_get() 160 static inline u8 hwstatus_set(void __iomem *mem, in hwstatus_set() argument 163 writeb(hw_status, mem + INTEL_RNG_HW_STATUS); in hwstatus_set() 164 return hwstatus_get(mem); in hwstatus_set() 169 void __iomem *mem = (void __iomem *)rng->priv; in intel_rng_data_present() local 173 data = !!(readb(mem + INTEL_RNG_STATUS) & in intel_rng_data_present() 184 void __iomem *mem = (void __iomem *)rng->priv; in intel_rng_data_read() local 186 *data = readb(mem + INTEL_RNG_DATA); in intel_rng_data_read() 193 void __iomem *mem in intel_rng_init() local 212 void __iomem *mem = (void __iomem *)rng->priv; intel_rng_cleanup() local 233 void __iomem *mem; global() member 333 void __iomem *mem; intel_rng_mod_init() local 408 void __iomem *mem = (void __iomem *)intel_rng.priv; intel_rng_mod_exit() local [all...] |
/linux/mm/ |
H A D | cma_debug.c | 69 static void cma_add_to_cma_mem_list(struct cma *cma, struct cma_mem *mem) in cma_add_to_cma_mem_list() argument 72 hlist_add_head(&mem->node, &cma->mem_head); in cma_add_to_cma_mem_list() 78 struct cma_mem *mem = NULL; in cma_get_entry_from_list() local 82 mem = hlist_entry(cma->mem_head.first, struct cma_mem, node); in cma_get_entry_from_list() 83 hlist_del_init(&mem->node); in cma_get_entry_from_list() 87 return mem; in cma_get_entry_from_list() 92 struct cma_mem *mem = NULL; in cma_free_mem() local 95 mem = cma_get_entry_from_list(cma); in cma_free_mem() 96 if (mem == NULL) in cma_free_mem() 99 if (mem in cma_free_mem() 131 struct cma_mem *mem; cma_alloc_mem() local [all...] |
/linux/drivers/gpu/drm/xe/ |
H A D | xe_vram.c | 150 xe->mem.vram.io_start = pci_resource_start(pdev, LMEM_BAR); in determine_lmem_bar_size() 151 xe->mem.vram.io_size = pci_resource_len(pdev, LMEM_BAR); in determine_lmem_bar_size() 152 if (!xe->mem.vram.io_size) in determine_lmem_bar_size() 156 xe->mem.vram.dpa_base = 0; in determine_lmem_bar_size() 159 xe->mem.vram.mapping = ioremap_wc(xe->mem.vram.io_start, xe->mem.vram.io_size); in determine_lmem_bar_size() 281 if (xe->mem.vram.mapping) in vram_fini() 282 iounmap(xe->mem.vram.mapping); in vram_fini() 284 xe->mem in vram_fini() [all...] |
/linux/drivers/gpu/drm/i915/gem/ |
H A D | i915_gem_region.c | 14 struct intel_memory_region *mem) in i915_gem_object_init_memory_region() argument 16 obj->mm.region = mem; in i915_gem_object_init_memory_region() 18 mutex_lock(&mem->objects.lock); in i915_gem_object_init_memory_region() 19 list_add(&obj->mm.region_link, &mem->objects.list); in i915_gem_object_init_memory_region() 20 mutex_unlock(&mem->objects.lock); in i915_gem_object_init_memory_region() 25 struct intel_memory_region *mem = obj->mm.region; in i915_gem_object_release_memory_region() local 27 mutex_lock(&mem->objects.lock); in i915_gem_object_release_memory_region() 29 mutex_unlock(&mem->objects.lock); in i915_gem_object_release_memory_region() 33 __i915_gem_object_create_region(struct intel_memory_region *mem, in __i915_gem_object_create_region() argument 45 * resource for the mem in __i915_gem_object_create_region() 107 i915_gem_object_create_region(struct intel_memory_region * mem,resource_size_t size,resource_size_t page_size,unsigned int flags) i915_gem_object_create_region() argument 117 i915_gem_object_create_region_at(struct intel_memory_region * mem,resource_size_t offset,resource_size_t size,unsigned int flags) i915_gem_object_create_region_at() argument [all...] |
/linux/drivers/media/platform/mediatek/vcodec/common/ |
H A D | mtk_vcodec_util.c | 48 int mtk_vcodec_mem_alloc(void *priv, struct mtk_vcodec_mem *mem) in mtk_vcodec_mem_alloc() argument 66 mem->va = dma_alloc_attrs(&plat_dev->dev, mem->size, &mem->dma_addr, in mtk_vcodec_mem_alloc() 68 if (!mem->va) { in mtk_vcodec_mem_alloc() 70 __func__, mem->size); in mtk_vcodec_mem_alloc() 74 mtk_v4l2_debug(plat_dev, 3, "[%d] - va = %p dma = 0x%lx size = 0x%zx", id, mem->va, in mtk_vcodec_mem_alloc() 75 (unsigned long)mem->dma_addr, mem->size); in mtk_vcodec_mem_alloc() 81 void mtk_vcodec_mem_free(void *priv, struct mtk_vcodec_mem *mem) in mtk_vcodec_mem_free() argument [all...] |
/linux/drivers/gpu/drm/nouveau/nvif/ |
H A D | mem.c | 22 #include <nvif/mem.h> 29 struct nvif_mem *mem) in nvif_mem_ctor_map() argument 31 int ret = nvif_mem_ctor(mmu, name, mmu->mem, NVIF_MEM_MAPPABLE | type, in nvif_mem_ctor_map() 32 0, size, NULL, 0, mem); in nvif_mem_ctor_map() 34 ret = nvif_object_map(&mem->object, NULL, 0); in nvif_mem_ctor_map() 36 nvif_mem_dtor(mem); in nvif_mem_ctor_map() 42 nvif_mem_dtor(struct nvif_mem *mem) in nvif_mem_dtor() argument 44 nvif_object_dtor(&mem->object); in nvif_mem_dtor() 50 struct nvif_mem *mem) in nvif_mem_ctor_type() argument 56 mem in nvif_mem_ctor_type() 89 nvif_mem_ctor(struct nvif_mmu * mmu,const char * name,s32 oclass,u8 type,u8 page,u64 size,void * argv,u32 argc,struct nvif_mem * mem) nvif_mem_ctor() argument [all...] |