| /linux/drivers/iommu/iommufd/ |
| H A D | pages.c | 164 static void iopt_pages_add_npinned(struct iopt_pages *pages, size_t npages) in iopt_pages_add_npinned() 173 static void iopt_pages_sub_npinned(struct iopt_pages *pages, size_t npages) in iopt_pages_sub_npinned() 182 static void iopt_pages_err_unpin(struct iopt_pages *pages, in iopt_pages_err_unpin() 250 static struct iopt_area *iopt_pages_find_domain_area(struct iopt_pages *pages, in iopt_pages_find_domain_area() 591 unsigned long last_index, struct page **pages) in pages_to_xarray() 630 static void batch_from_pages(struct pfn_batch *batch, struct page **pages, in batch_from_pages() 675 static void batch_unpin(struct pfn_batch *batch, struct iopt_pages *pages, in batch_unpin() 766 struct iopt_pages *pages) in pfn_reader_user_init() 785 struct iopt_pages *pages) in pfn_reader_user_destroy() 854 struct iopt_pages *pages, in pfn_reader_user_pin() [all …]
|
| H A D | io_pagetable.c | 24 struct iopt_pages *pages; member 208 struct iopt_pages *pages, unsigned long iova, in iopt_insert_area() 404 struct iopt_pages *pages, unsigned long *iova, in iopt_map_common() 455 struct iopt_pages *pages; in iopt_map_user_pages() local 482 struct iopt_pages *pages; in iopt_map_file_pages() local 724 struct iopt_pages *pages; in iopt_unmap_iova_range() local 954 struct iopt_pages *pages = area->pages; in iopt_unfill_domain() local 973 struct iopt_pages *pages = area->pages; in iopt_unfill_domain() local 1007 struct iopt_pages *pages = area->pages; in iopt_fill_domain() local 1032 struct iopt_pages *pages = area->pages; in iopt_fill_domain() local [all …]
|
| /linux/net/ceph/ |
| H A D | pagevec.c | 13 void ceph_put_page_vector(struct page **pages, int num_pages, bool dirty) in ceph_put_page_vector() 26 void ceph_release_page_vector(struct page **pages, int num_pages) in ceph_release_page_vector() 41 struct page **pages; in ceph_alloc_page_vector() local 58 void ceph_copy_from_page_vector(struct page **pages, in ceph_copy_from_page_vector() 85 void ceph_zero_page_vector_range(int off, int len, struct page **pages) in ceph_zero_page_vector_range()
|
| /linux/mm/ |
| H A D | percpu-vm.c | 34 static struct page **pages; in pcpu_get_pages() local 55 struct page **pages, int page_start, int page_end) in pcpu_free_pages() 83 struct page **pages, int page_start, int page_end, in pcpu_alloc_pages() 154 struct page **pages, int page_start, int page_end) in pcpu_unmap_pages() 193 static int __pcpu_map_pages(unsigned long addr, struct page **pages, in __pcpu_map_pages() 215 struct page **pages, int page_start, int page_end) in pcpu_map_pages() 279 struct page **pages; in pcpu_populate_chunk() local 315 struct page **pages; in pcpu_depopulate_chunk() local
|
| H A D | gup.c | 31 static inline void sanity_check_pinned_pages(struct page **pages, in sanity_check_pinned_pages() 284 void unpin_user_pages_dirty_lock(struct page **pages, unsigned long npages, in unpin_user_pages_dirty_lock() 375 static void gup_fast_unpin_user_pages(struct page **pages, unsigned long npages) in gup_fast_unpin_user_pages() 401 void unpin_user_pages(struct page **pages, unsigned long npages) in unpin_user_pages() 1356 unsigned int gup_flags, struct page **pages, in __get_user_pages() 1652 struct page **pages, in __get_user_pages_locked() 1980 unsigned long nr_pages, struct page **pages, in __get_user_pages_locked() 2209 struct page **pages; member 2437 struct page **pages) in check_and_migrate_movable_pages() 2449 struct page **pages) in check_and_migrate_movable_pages() [all …]
|
| H A D | gup_test.c | 10 static void put_back_pages(unsigned int cmd, struct page **pages, in put_back_pages() 39 static void verify_dma_pinned(unsigned int cmd, struct page **pages, in verify_dma_pinned() 69 static void dump_pages_test(struct gup_test *gup, struct page **pages, in dump_pages_test() 106 struct page **pages; in __gup_test_ioctl() local 227 struct page **pages; in pin_longterm_test_start() local
|
| /linux/drivers/media/common/videobuf2/ |
| H A D | frame_vector.c | 82 struct page **pages; in put_vaddr_frames() local 114 struct page **pages; in frame_vector_to_pages() local 140 struct page **pages; in frame_vector_to_pfns() local
|
| /linux/include/linux/ |
| H A D | balloon_compaction.h | 56 struct list_head pages; /* Pages enqueued & handled to Host */ member 138 static inline void balloon_page_push(struct list_head *pages, struct page *page) in balloon_page_push() 150 static inline struct page *balloon_page_pop(struct list_head *pages) in balloon_page_pop()
|
| /linux/drivers/xen/ |
| H A D | xlate_mmu.c | 48 static void xen_for_each_gfn(struct page **pages, unsigned nr_gfn, in xen_for_each_gfn() 71 struct page **pages; member 148 struct page **pages) in xen_xlate_remap_gfn_array() 184 int nr, struct page **pages) in xen_xlate_unmap_gfn_range() 217 struct page **pages; in xen_xlate_map_ballooned_pages() local 267 struct page **pages; member
|
| /linux/drivers/gpu/drm/i915/gem/selftests/ |
| H A D | huge_gem_object.c | 12 struct sg_table *pages) in huge_free_pages() 34 struct sg_table *pages; in huge_get_pages() local 86 struct sg_table *pages) in huge_put_pages()
|
| /linux/include/xen/ |
| H A D | xen-ops.h | 81 struct page **pages) in xen_xlate_remap_gfn_array() 87 int nr, struct page **pages) in xen_xlate_unmap_gfn_range() 118 struct page **pages) in xen_remap_domain_gfn_array() 177 struct page **pages) in xen_remap_domain_gfn_range()
|
| H A D | mem-reservation.h | 38 struct page **pages, in xenmem_reservation_va_mapping_update() 48 struct page **pages) in xenmem_reservation_va_mapping_reset()
|
| H A D | grant_table.h | 81 struct page **pages; member 220 struct page *pages; member 222 struct list_head pages; member 242 struct page **pages; member
|
| /linux/arch/s390/kernel/diag/ |
| H A D | diag310.c | 110 static int diag310_get_memtop_size(unsigned long *pages, unsigned long level) in diag310_get_memtop_size() 129 static int diag310_store_topology_map(void *buf, unsigned long pages, unsigned long level) in diag310_store_topology_map() 193 unsigned long pages; in memtop_get_page_count() local 229 unsigned long pages, level; in diag310_memtop_len() local 248 unsigned long level, pages, data_size; in diag310_memtop_buf() local
|
| /linux/arch/arm64/include/asm/ |
| H A D | tlbflush.h | 177 #define __TLBI_RANGE_NUM(pages, scale) \ argument 379 #define __flush_tlb_range_op(op, start, pages, stride, \ argument 416 #define __flush_s2_tlb_range_op(op, start, pages, stride, tlb_level) \ argument 420 unsigned long end, unsigned long pages, unsigned long stride) in __flush_tlb_range_limit_excess() 441 unsigned long asid, pages; in __flush_tlb_range_nosync() local 490 unsigned long pages; in flush_tlb_kernel_range() local
|
| /linux/drivers/hwtracing/coresight/ |
| H A D | coresight-tmc-etr.c | 52 void **pages; member 199 enum dma_data_direction dir, void **pages) in tmc_pages_alloc() 296 static int tmc_alloc_data_pages(struct tmc_sg_table *sg_table, void **pages) in tmc_alloc_data_pages() 330 void **pages) in tmc_alloc_sg_table() 571 unsigned long size, void **pages) in tmc_init_etr_sg_table() 606 void **pages) in tmc_etr_alloc_flat_buf() 704 void **pages) in tmc_etr_alloc_resrv_buf() 774 void **pages) in tmc_etr_alloc_sg_buf() 890 int node, void **pages) in tmc_etr_mode_alloc_buf() 937 int node, void **pages) in tmc_alloc_etr_buf() [all …]
|
| /linux/arch/x86/include/asm/ |
| H A D | pgtable_32.h | 60 #define PAGE_TABLE_SIZE(pages) (((pages) / PTRS_PER_PMD) + PTRS_PER_PGD) argument 62 #define PAGE_TABLE_SIZE(pages) ((pages) / PTRS_PER_PGD) argument
|
| /linux/drivers/gpu/drm/i915/gem/ |
| H A D | i915_gem_pages.c | 22 struct sg_table *pages) in __i915_gem_object_set_pages() 214 struct sg_table *pages; in __i915_gem_object_unset_pages() local 243 struct sg_table *pages; in __i915_gem_object_put_pages() local 277 struct page *stack[32], **pages = stack, *page; in i915_gem_object_map_page() local 361 struct page **pages; member 379 struct page **pages; in i915_gem_object_panic_pages() local
|
| /linux/drivers/block/xen-blkback/ |
| H A D | blkback.c | 246 struct page *pages[BLKIF_MAX_SEGMENTS_PER_REQUEST]; in free_persistent_gnts() local 293 struct page *pages[BLKIF_MAX_SEGMENTS_PER_REQUEST]; in xen_blkbk_unmap_purged_grants() local 647 struct grant_page **pages, in xen_blkbk_unmap_prepare() 707 struct grant_page **pages = req->segments; in xen_blkbk_unmap_and_respond() local 732 struct grant_page *pages[], in xen_blkbk_unmap() 757 struct grant_page *pages[], in xen_blkbk_map() 918 struct grant_page **pages = pending_req->indirect_pages; in xen_blkbk_parse_indirect() local 1304 struct grant_page **pages = pending_req->segments; in dispatch_rw_block_io() local
|
| /linux/drivers/staging/media/ipu3/ |
| H A D | ipu3-dmamap.c | 20 static void imgu_dmamap_free_buffer(struct page **pages, in imgu_dmamap_free_buffer() 36 struct page **pages; in imgu_dmamap_alloc_buffer() local 100 struct page **pages; in imgu_dmamap_alloc() local
|
| /linux/drivers/media/pci/intel/ipu6/ |
| H A D | ipu6-dma.c | 23 struct page **pages; member 62 struct page **pages; in __alloc_buffer() local 101 static void __free_buffer(struct page **pages, size_t size, unsigned long attrs) in __free_buffer() 162 struct page **pages; in ipu6_dma_alloc() local 252 struct page **pages; in ipu6_dma_free() local
|
| /linux/arch/s390/hypfs/ |
| H A D | hypfs_diag.c | 56 void *diag204_get_buffer(enum diag204_format fmt, int *pages) in diag204_get_buffer() 96 int pages, rc; in diag204_probe() local 139 int diag204_store(void *buf, int pages) in diag204_store()
|
| /linux/drivers/staging/media/ipu7/ |
| H A D | ipu7-dma.c | 23 struct page **pages; member 62 struct page **pages; in __alloc_buffer() local 101 static void __free_buffer(struct page **pages, size_t size, unsigned long attrs) in __free_buffer() 162 struct page **pages; in ipu7_dma_alloc() local 252 struct page **pages; in ipu7_dma_free() local
|
| /linux/fs/isofs/ |
| H A D | compress.c | 42 struct page **pages, unsigned poffset, in zisofs_uncompress_block() 207 struct page **pages) in zisofs_fill_pages() 312 struct page **pages; in zisofs_read_folio() local
|
| /linux/drivers/gpu/drm/amd/amdgpu/ |
| H A D | amdgpu_gart.c | 302 int pages) in amdgpu_gart_unbind() 350 int pages, dma_addr_t *dma_addr, uint64_t flags, in amdgpu_gart_map() 386 int pages, dma_addr_t *dma_addr, in amdgpu_gart_bind()
|