Home
last modified time | relevance | path

Searched refs:page_count (Results 1 – 25 of 113) sorted by relevance

12345

/linux/drivers/gpu/drm/nouveau/
H A Dnouveau_bo90b5.c40 u32 page_count = PFN_UP(new_reg->size); in nvc0_bo_move_copy() local
43 page_count = PFN_UP(new_reg->size); in nvc0_bo_move_copy()
44 while (page_count) { in nvc0_bo_move_copy()
45 int line_count = (page_count > 8191) ? 8191 : page_count; in nvc0_bo_move_copy()
61 page_count -= line_count; in nvc0_bo_move_copy()
H A Dnouveau_bo0039.c55 u32 page_count = PFN_UP(new_reg->size); in nv04_bo_move_m2mf() local
65 page_count = PFN_UP(new_reg->size); in nv04_bo_move_m2mf()
66 while (page_count) { in nv04_bo_move_m2mf()
67 int line_count = (page_count > 2047) ? 2047 : page_count; in nv04_bo_move_m2mf()
88 page_count -= line_count; in nv04_bo_move_m2mf()
/linux/drivers/hv/
H A Dmshv_root_hv_call.c191 u64 page_count = page_struct_count; in hv_do_map_gpa_hcall() local
193 if (page_count == 0 || (pages && mmio_spa)) in hv_do_map_gpa_hcall()
200 if (!HV_PAGE_COUNT_2M_ALIGNED(page_count)) in hv_do_map_gpa_hcall()
204 page_count >>= large_shift; in hv_do_map_gpa_hcall()
207 while (done < page_count) { in hv_do_map_gpa_hcall()
208 ulong i, completed, remain = page_count - done; in hv_do_map_gpa_hcall()
268 int hv_call_map_gpa_pages(u64 partition_id, u64 gpa_target, u64 page_count, in hv_call_map_gpa_pages() argument
271 return hv_do_map_gpa_hcall(partition_id, gpa_target, page_count, in hv_call_map_gpa_pages()
294 u64 status, page_count = page_count_4k; in hv_call_unmap_gpa_pages() local
298 if (page_count in hv_call_unmap_gpa_pages()
417 hv_call_get_vp_state(u32 vp_index,u64 partition_id,struct hv_vp_state_data state_data,u64 page_count,struct page ** pages,union hv_output_get_vp_state * ret_output) hv_call_get_vp_state() argument
471 hv_call_set_vp_state(u32 vp_index,u64 partition_id,struct hv_vp_state_data state_data,u64 page_count,struct page ** pages,u32 num_bytes,u8 * bytes) hv_call_set_vp_state() argument
796 u64 page_count = page_struct_count; hv_call_modify_spa_host_access() local
[all...]
H A Dhv_proc.c24 int i, j, page_count; in hv_call_deposit_pages() local
82 for (i = 0, page_count = 0; i < num_allocations; ++i) { in hv_call_deposit_pages()
84 for (j = 0; j < counts[i]; ++j, ++page_count) in hv_call_deposit_pages()
85 input_page->gpa_page_list[page_count] = base_pfn + j; in hv_call_deposit_pages()
88 page_count, 0, input_page, NULL); in hv_call_deposit_pages()
H A Dmshv_root_main.c616 unsigned long user_pfn, size_t page_count, in mshv_vp_ioctl_get_set_state_pfn() argument
623 if (page_count > INT_MAX) in mshv_vp_ioctl_get_set_state_pfn()
628 * (user_pfn + (page_count - 1)) * PAGE_SIZE in mshv_vp_ioctl_get_set_state_pfn()
630 if (check_add_overflow(user_pfn, (page_count - 1), &check)) in mshv_vp_ioctl_get_set_state_pfn()
636 pages = kcalloc(page_count, sizeof(struct page *), GFP_KERNEL); in mshv_vp_ioctl_get_set_state_pfn()
640 for (completed = 0; completed < page_count; completed += ret) { in mshv_vp_ioctl_get_set_state_pfn()
642 int remaining = page_count - completed; in mshv_vp_ioctl_get_set_state_pfn()
656 state_data, page_count, pages, in mshv_vp_ioctl_get_set_state_pfn()
661 state_data, page_count, pages, in mshv_vp_ioctl_get_set_state_pfn()
746 size_t page_count in mshv_vp_ioctl_get_set_state() local
1089 mshv_region_remap_pages(struct mshv_mem_region * region,u32 map_flags,u64 page_offset,u64 page_count) mshv_region_remap_pages() argument
1115 mshv_region_evict_pages(struct mshv_mem_region * region,u64 page_offset,u64 page_count) mshv_region_evict_pages() argument
1132 mshv_region_populate_pages(struct mshv_mem_region * region,u64 page_offset,u64 page_count) mshv_region_populate_pages() argument
[all...]
H A Dmshv_root.h259 int hv_call_map_gpa_pages(u64 partition_id, u64 gpa_target, u64 page_count,
261 int hv_call_unmap_gpa_pages(u64 partition_id, u64 gpa_target, u64 page_count,
275 u64 page_count, struct page **pages,
279 struct hv_vp_state_data state_data, u64 page_count,
/linux/drivers/firewire/
H A Dcore-iso.c31 int fw_iso_buffer_alloc(struct fw_iso_buffer *buffer, int page_count) in fw_iso_buffer_alloc() argument
35 buffer->page_count = 0; in fw_iso_buffer_alloc()
37 buffer->pages = kmalloc_array(page_count, sizeof(buffer->pages[0]), in fw_iso_buffer_alloc()
42 for (i = 0; i < page_count; i++) { in fw_iso_buffer_alloc()
47 buffer->page_count = i; in fw_iso_buffer_alloc()
48 if (i < page_count) { in fw_iso_buffer_alloc()
64 for (i = 0; i < buffer->page_count; i++) { in fw_iso_buffer_map_dma()
73 if (i < buffer->page_count) in fw_iso_buffer_map_dma()
80 int page_count, enum dma_data_direction direction) in fw_iso_buffer_init() argument
84 ret = fw_iso_buffer_alloc(buffer, page_count); in fw_iso_buffer_init()
[all...]
/linux/drivers/char/agp/
H A Dgeneric.c181 if (curr->page_count != 0) { in agp_free_memory()
186 for (i = 0; i < curr->page_count; i++) { in agp_free_memory()
191 for (i = 0; i < curr->page_count; i++) { in agp_free_memory()
210 * @page_count: size_t argument of the number of pages
219 size_t page_count, u32 type) in agp_allocate_memory() argument
230 if ((cur_memory + page_count > bridge->max_memory_agp) || in agp_allocate_memory()
231 (cur_memory + page_count < page_count)) in agp_allocate_memory()
235 new = agp_generic_alloc_user(page_count, type); in agp_allocate_memory()
242 new = bridge->driver->alloc_by_type(page_count, typ in agp_allocate_memory()
1145 agp_generic_alloc_by_type(size_t page_count,int type) agp_generic_alloc_by_type() argument
1159 agp_generic_alloc_user(size_t page_count,int type) agp_generic_alloc_user() argument
[all...]
H A Dnvidia-agp.c211 if (mem->page_count == 0) in nvidia_insert_memory()
214 if ((pg_start + mem->page_count) > in nvidia_insert_memory()
218 for (j = pg_start; j < (pg_start + mem->page_count); j++) { in nvidia_insert_memory()
227 for (i = 0, j = pg_start; i < mem->page_count; i++, j++) { in nvidia_insert_memory()
251 if (mem->page_count == 0) in nvidia_remove_memory()
254 for (i = pg_start; i < (mem->page_count + pg_start); i++) in nvidia_remove_memory()
H A Dati-agp.c273 if (mem->page_count == 0) in ati_insert_memory()
276 if ((pg_start + mem->page_count) > num_entries) in ati_insert_memory()
280 while (j < (pg_start + mem->page_count)) { in ati_insert_memory()
294 for (i = 0, j = pg_start; i < mem->page_count; i++, j++) { in ati_insert_memory()
319 if (mem->page_count == 0) in ati_remove_memory()
322 for (i = pg_start; i < (mem->page_count + pg_start); i++) { in ati_remove_memory()
H A Dintel-gtt.c129 DBG("try unmapping %lu pages\n", (unsigned long)mem->page_count); in intel_gtt_unmap_memory()
219 if ((pg_start + mem->page_count) in i810_insert_dcache_entries()
226 for (i = pg_start; i < (pg_start + mem->page_count); i++) { in i810_insert_dcache_entries()
271 new->page_count = pg_count; in alloc_agpphysmem_i8xx()
282 if (curr->page_count == 4) in intel_i810_free_by_type()
946 if (mem->page_count == 0) in intel_fake_agp_insert_entries()
949 if (pg_start + mem->page_count > intel_private.gtt_total_entries) in intel_fake_agp_insert_entries()
964 ret = intel_gtt_map_memory(mem->pages, mem->page_count, &st); in intel_fake_agp_insert_entries()
972 intel_gmch_gtt_insert_pages(pg_start, mem->page_count, mem->pages, in intel_fake_agp_insert_entries()
999 if (mem->page_count in intel_fake_agp_remove_entries()
[all...]
H A Defficeon-agp.c238 int i, count = mem->page_count, num_entries; in efficeon_insert_memory()
246 if ((pg_start + mem->page_count) > num_entries) in efficeon_insert_memory()
287 int i, count = mem->page_count, num_entries; in efficeon_remove_memory()
293 if ((pg_start + mem->page_count) > num_entries) in efficeon_remove_memory()
H A Duninorth-agp.c166 if (mem->page_count == 0) in uninorth_insert_memory()
172 if ((pg_start + mem->page_count) > num_entries) in uninorth_insert_memory()
176 for (i = 0; i < mem->page_count; ++i) { in uninorth_insert_memory()
185 for (i = 0; i < mem->page_count; i++) { in uninorth_insert_memory()
215 if (mem->page_count == 0) in uninorth_remove_memory()
219 for (i = 0; i < mem->page_count; ++i) { in uninorth_remove_memory()
/linux/fs/squashfs/
H A Dblock.c81 u64 read_start, u64 read_end, int page_count) in squashfs_bio_read_cached() argument
91 struct folio **cache_folios = kmalloc_array(page_count, in squashfs_bio_read_cached()
114 else if (idx == page_count - 1 && index + length != read_end) in squashfs_bio_read_cached()
178 for (idx = 0; idx < page_count; idx++) { in squashfs_bio_read_cached()
227 const int page_count = DIV_ROUND_UP(total_len + offset, PAGE_SIZE); in squashfs_bio_read() local
231 bio = bio_kmalloc(page_count, GFP_NOIO); in squashfs_bio_read()
234 bio_init(bio, sb->s_bdev, bio->bi_inline_vecs, page_count, REQ_OP_READ); in squashfs_bio_read()
237 for (i = 0; i < page_count; ++i) { in squashfs_bio_read()
264 page_count); in squashfs_bio_read()
/linux/drivers/vfio/pci/pds/
H A Ddirty.c65 "region_info[%d]: dma_base 0x%llx page_count %u page_size_log2 %u\n", in pds_vfio_print_guest_region_info()
67 le32_to_cpu(region_info[i].page_count), in pds_vfio_print_guest_region_info()
145 u32 page_count) in pds_vfio_dirty_alloc_sgl() argument
154 max_sge = DIV_ROUND_UP(page_count, PAGE_SIZE * 8); in pds_vfio_dirty_alloc_sgl()
199 u32 page_count; in pds_vfio_dirty_alloc_regions() local
201 /* page_count might be adjusted by the device */ in pds_vfio_dirty_alloc_regions()
202 page_count = le32_to_cpu(ri->page_count); in pds_vfio_dirty_alloc_regions()
204 region_size = page_count * region_page_size; in pds_vfio_dirty_alloc_regions()
207 page_count / BITS_PER_BYT in pds_vfio_dirty_alloc_regions()
301 u32 page_count; pds_vfio_dirty_enable() local
[all...]
/linux/sound/soc/sof/amd/
H A Dacp-loader.c55 u32 dma_size, page_count; in acp_dsp_block_write() local
64 page_count = PAGE_ALIGN(size_fw) >> PAGE_SHIFT; in acp_dsp_block_write()
65 dma_size = page_count * ACP_PAGE_SIZE; in acp_dsp_block_write()
171 u32 page_count, dma_size; in acp_dsp_pre_fw_run() local
181 page_count = PAGE_ALIGN(size_fw) >> PAGE_SHIFT; in acp_dsp_pre_fw_run()
182 adata->fw_bin_page_count = page_count; in acp_dsp_pre_fw_run()
184 configure_pte_for_fw_loading(FW_BIN, page_count, adata); in acp_dsp_pre_fw_run()
193 src_addr = ACP_SYSTEM_MEMORY_WINDOW + (page_count * ACP_PAGE_SIZE); in acp_dsp_pre_fw_run()
208 (page_count * ACP_PAGE_SIZE); in acp_dsp_pre_fw_run()
/linux/fs/orangefs/
H A Dorangefs-bufmap.c152 int page_count; member
171 unpin_user_pages(bufmap->page_array, bufmap->page_count); in orangefs_bufmap_unmap()
227 bufmap->page_count = bufmap->total_size / PAGE_SIZE; in orangefs_bufmap_alloc()
231 kcalloc(bufmap->page_count, sizeof(struct page *), GFP_KERNEL); in orangefs_bufmap_alloc()
256 bufmap->page_count, FOLL_WRITE, bufmap->page_array); in orangefs_bufmap_map()
261 if (ret != bufmap->page_count) { in orangefs_bufmap_map()
263 bufmap->page_count, ret); in orangefs_bufmap_map()
276 for (i = 0; i < bufmap->page_count; i++) in orangefs_bufmap_map()
/linux/drivers/target/
H A Dtarget_core_rd.c68 u32 i, j, page_count = 0, sg_per_table; in rd_release_sgl_table() local
78 page_count++; in rd_release_sgl_table()
85 return page_count; in rd_release_sgl_table()
90 u32 page_count; in rd_release_device_space() local
95 page_count = rd_release_sgl_table(rd_dev, rd_dev->sg_table_array, in rd_release_device_space()
100 rd_dev->rd_host->rd_host_id, rd_dev->rd_dev_id, page_count, in rd_release_device_space()
101 rd_dev->sg_table_count, (unsigned long)page_count * PAGE_SIZE); in rd_release_device_space()
216 u32 page_count; in rd_release_prot_space() local
221 page_count = rd_release_sgl_table(rd_dev, rd_dev->sg_prot_array, in rd_release_prot_space()
226 rd_dev->rd_host->rd_host_id, rd_dev->rd_dev_id, page_count, in rd_release_prot_space()
[all...]
/linux/drivers/w1/slaves/
H A Dw1_ds2433.c44 unsigned int page_count; /* number of 256 bits pages */ member
50 .page_count = 16,
56 .page_count = 80,
94 bitmap_zero(data->validcrc, data->cfg->page_count); in w1_f23_refresh_block()
337 if (data->cfg->page_count > W1_VALIDCRC_MAX) { in w1_f23_add_slave()
347 bitmap_zero(data->validcrc, data->cfg->page_count); in w1_f23_add_slave()
/linux/drivers/gpu/drm/i915/gem/
H A Di915_gem_shmem.c68 unsigned int page_count; /* restricted by sg_alloc_table */ in shmem_sg_alloc_table() local
75 if (overflows_type(size / PAGE_SIZE, page_count)) in shmem_sg_alloc_table()
78 page_count = size / PAGE_SIZE; in shmem_sg_alloc_table()
86 if (sg_alloc_table(st, page_count, GFP_KERNEL | __GFP_NOWARN)) in shmem_sg_alloc_table()
101 for (i = 0; i < page_count; i++) { in shmem_sg_alloc_table()
121 i915_gem_shrink(NULL, i915, 2 * page_count, NULL, *s++); in shmem_sg_alloc_table()
155 folio_nr_pages(folio), page_count - i); in shmem_sg_alloc_table()
/linux/drivers/virt/vboxguest/
H A Dvboxguest_utils.c199 u32 page_count; in hgcm_call_add_pagelist_size() local
201 page_count = hgcm_call_buf_size_in_pages(buf, len); in hgcm_call_add_pagelist_size()
202 *extra += offsetof(struct vmmdev_hgcm_pagelist, pages[page_count]); in hgcm_call_add_pagelist_size()
340 u32 i, page_count; in hgcm_call_init_linaddr() local
351 page_count = hgcm_call_buf_size_in_pages(buf, len); in hgcm_call_init_linaddr()
359 dst_pg_lst->page_count = page_count; in hgcm_call_init_linaddr()
361 for (i = 0; i < page_count; i++) { in hgcm_call_init_linaddr()
371 *off_extra += offsetof(struct vmmdev_hgcm_pagelist, pages[page_count]); in hgcm_call_init_linaddr()
/linux/tools/testing/radix-tree/
H A Dregression2.c61 unsigned long page_count = 0; variable
71 p->index = page_count++; in page_alloc()
/linux/drivers/media/pci/ivtv/
H A Divtv-yuv.c54 if (dma->SG_length || dma->page_count) { in ivtv_yuv_prep_user_dma()
56 ("prep_user_dma: SG_length %d page_count %d still full?\n", in ivtv_yuv_prep_user_dma()
57 dma->SG_length, dma->page_count); in ivtv_yuv_prep_user_dma()
66 y_dma.page_count, &dma->map[0], 0); in ivtv_yuv_prep_user_dma()
68 if (y_pages == y_dma.page_count) { in ivtv_yuv_prep_user_dma()
70 uv_dma.page_count, &dma->map[y_pages], 0); in ivtv_yuv_prep_user_dma()
73 if (y_pages != y_dma.page_count || uv_pages != uv_dma.page_count) { in ivtv_yuv_prep_user_dma()
76 if (y_pages == y_dma.page_count) { in ivtv_yuv_prep_user_dma()
79 uv_pages, uv_dma.page_count); in ivtv_yuv_prep_user_dma()
[all...]
/linux/drivers/gpu/drm/ttm/
H A Dttm_agp_backend.c66 mem->page_count = 0; in ttm_agp_bind()
73 mem->pages[mem->page_count++] = page; in ttm_agp_bind()
/linux/arch/riscv/kvm/
H A Dnacl.c20 unsigned long page_count) in __kvm_riscv_nacl_hfence() argument
50 *entp = cpu_to_lelong(page_count); in __kvm_riscv_nacl_hfence()

12345