Lines Matching full:range

66 static void pgmap_array_delete(struct range *range)  in pgmap_array_delete()  argument
68 xa_store_range(&pgmap_array, PHYS_PFN(range->start), PHYS_PFN(range->end), in pgmap_array_delete()
75 struct range *range = &pgmap->ranges[range_id]; in pfn_first() local
76 unsigned long pfn = PHYS_PFN(range->start); in pfn_first()
85 const struct range *range = &pgmap->ranges[range_id]; in pfn_end() local
87 return (range->start + range_len(range)) >> PAGE_SHIFT; in pfn_end()
126 struct range *range = &pgmap->ranges[range_id]; in pageunmap_range() local
137 remove_pfn_range_from_zone(page_zone(first_page), PHYS_PFN(range->start), in pageunmap_range()
138 PHYS_PFN(range_len(range))); in pageunmap_range()
140 __remove_pages(PHYS_PFN(range->start), in pageunmap_range()
141 PHYS_PFN(range_len(range)), NULL); in pageunmap_range()
143 arch_remove_memory(nid, range->start, range_len(range), in pageunmap_range()
145 kasan_remove_zero_shadow(__va(range->start), range_len(range)); in pageunmap_range()
149 untrack_pfn(NULL, PHYS_PFN(range->start), range_len(range)); in pageunmap_range()
150 pgmap_array_delete(range); in pageunmap_range()
188 struct range *range = &pgmap->ranges[range_id]; in pagemap_range() local
196 conflict_pgmap = get_dev_pagemap(PHYS_PFN(range->start), NULL); in pagemap_range()
203 conflict_pgmap = get_dev_pagemap(PHYS_PFN(range->end), NULL); in pagemap_range()
210 is_ram = region_intersects(range->start, range_len(range), in pagemap_range()
216 range->start, range->end); in pagemap_range()
220 error = xa_err(xa_store_range(&pgmap_array, PHYS_PFN(range->start), in pagemap_range()
221 PHYS_PFN(range->end), pgmap, GFP_KERNEL)); in pagemap_range()
228 error = track_pfn_remap(NULL, &params->pgprot, PHYS_PFN(range->start), 0, in pagemap_range()
229 range_len(range)); in pagemap_range()
247 error = add_pages(nid, PHYS_PFN(range->start), in pagemap_range()
248 PHYS_PFN(range_len(range)), params); in pagemap_range()
250 error = kasan_add_zero_shadow(__va(range->start), range_len(range)); in pagemap_range()
256 error = arch_add_memory(nid, range->start, range_len(range), in pagemap_range()
264 move_pfn_range_to_zone(zone, PHYS_PFN(range->start), in pagemap_range()
265 PHYS_PFN(range_len(range)), params->altmap, in pagemap_range()
278 PHYS_PFN(range->start), in pagemap_range()
279 PHYS_PFN(range_len(range)), pgmap); in pagemap_range()
285 kasan_remove_zero_shadow(__va(range->start), range_len(range)); in pagemap_range()
287 untrack_pfn(NULL, PHYS_PFN(range->start), range_len(range)); in pagemap_range()
289 pgmap_array_delete(range); in pagemap_range()
368 * successfully processed range. This communicates how many in memremap_pages()
406 * 4/ range is expected to be a host memory range that could feasibly be
407 * treated as a "System RAM" range, i.e. not a device mmio range, but
463 if (phys >= pgmap->range.start && phys <= pgmap->range.end) in get_dev_pagemap()