Searched refs:last_pfn (Results 1 – 7 of 7) sorted by relevance
/linux/arch/sparc/mm/ |
H A D | init_32.c | 83 unsigned long curr_pfn, last_pfn; in calc_max_low_pfn() local 85 last_pfn = (sp_banks[0].base_addr + sp_banks[0].num_bytes) >> PAGE_SHIFT; in calc_max_low_pfn() 90 if (last_pfn < tmp) in calc_max_low_pfn() 91 tmp = last_pfn; in calc_max_low_pfn() 95 last_pfn = (sp_banks[i].base_addr + sp_banks[i].num_bytes) >> PAGE_SHIFT; in calc_max_low_pfn()
|
/linux/arch/mips/mm/ |
H A D | ioremap.c | 50 unsigned long offset, pfn, last_pfn; in ioremap_prot() local 84 last_pfn = PFN_DOWN(last_addr); in ioremap_prot() 85 if (walk_system_ram_range(pfn, last_pfn - pfn + 1, NULL, in ioremap_prot()
|
/linux/arch/x86/kernel/ |
H A D | e820.c | 832 unsigned long last_pfn = 0; in e820__end_ram_pfn() local 850 last_pfn = limit_pfn; in e820__end_ram_pfn() 853 if (end_pfn > last_pfn) in e820__end_ram_pfn() 854 last_pfn = end_pfn; in e820__end_ram_pfn() 857 if (last_pfn > max_arch_pfn) in e820__end_ram_pfn() 858 last_pfn = max_arch_pfn; in e820__end_ram_pfn() 860 pr_info("last_pfn = %#lx max_arch_pfn = %#lx\n", in e820__end_ram_pfn() 861 last_pfn, max_arch_pfn); in e820__end_ram_pfn() 862 return last_pfn; in e820__end_ram_pfn()
|
/linux/drivers/iommu/intel/ |
H A D | iommu.c | 804 unsigned long last_pfn) in dma_pte_clear_range() argument 809 if (WARN_ON(!domain_pfn_supported(domain, last_pfn)) || in dma_pte_clear_range() 810 WARN_ON(start_pfn > last_pfn)) in dma_pte_clear_range() 825 } while (start_pfn <= last_pfn && !first_pte_in_page(pte)); in dma_pte_clear_range() 830 } while (start_pfn && start_pfn <= last_pfn); in dma_pte_clear_range() 836 unsigned long last_pfn) in dma_pte_free_level() argument 854 last_pfn); in dma_pte_free_level() 862 last_pfn < level_pfn + level_size(level) - 1)) { in dma_pte_free_level() 869 } while (!first_pte_in_page(++pte) && pfn <= last_pfn); in dma_pte_free_level() 878 unsigned long last_pfn, in dma_pte_free_pagetable() argument 920 dma_pte_clear_level(struct dmar_domain * domain,int level,struct dma_pte * pte,unsigned long pfn,unsigned long start_pfn,unsigned long last_pfn,struct iommu_pages_list * freelist) dma_pte_clear_level() argument 966 domain_unmap(struct dmar_domain * domain,unsigned long start_pfn,unsigned long last_pfn,struct iommu_pages_list * freelist) domain_unmap() argument 3619 unsigned long start_pfn, last_pfn; intel_iommu_unmap() local [all...] |
/linux/drivers/vhost/ |
H A D | vdpa.c | 1101 unsigned long npages, cur_base, map_pfn, last_pfn = 0; in vhost_vdpa_pa_map() local 1148 if (!last_pfn) in vhost_vdpa_pa_map() 1155 if (last_pfn && (this_pfn != last_pfn + 1)) { in vhost_vdpa_pa_map() 1157 csize = PFN_PHYS(last_pfn - map_pfn + 1); in vhost_vdpa_pa_map() 1180 last_pfn = this_pfn; in vhost_vdpa_pa_map() 1188 ret = vhost_vdpa_map(v, iotlb, iova, PFN_PHYS(last_pfn - map_pfn + 1), in vhost_vdpa_pa_map() 1204 WARN_ON(!last_pfn); in vhost_vdpa_pa_map() 1205 for (pfn = map_pfn; pfn <= last_pfn; pfn++) in vhost_vdpa_pa_map()
|
/linux/drivers/gpu/drm/radeon/ |
H A D | radeon_vm.c | 452 unsigned last_pfn, pt_idx; in radeon_vm_bo_set_addr() local 464 last_pfn = eoffset / RADEON_GPU_PAGE_SIZE; in radeon_vm_bo_set_addr() 465 if (last_pfn >= rdev->vm_manager.max_pfn) { in radeon_vm_bo_set_addr() 467 last_pfn, rdev->vm_manager.max_pfn); in radeon_vm_bo_set_addr() 473 eoffset = last_pfn = 0; in radeon_vm_bo_set_addr()
|
/linux/mm/ |
H A D | page_alloc.c | 7005 unsigned long last_pfn = start_pfn + nr_pages - 1; in zone_spans_last_pfn() local 7007 return zone_spans_pfn(zone, last_pfn); in zone_spans_last_pfn()
|