Searched refs:PAGES_PER_SUBSECTION (Results 1 – 5 of 5) sorted by relevance
486 return !IS_ALIGNED(offset, nr_pages) && nr_pages > PAGES_PER_SUBSECTION; in reuse_compound_section()569 if (WARN_ON_ONCE(!IS_ALIGNED(pfn, PAGES_PER_SUBSECTION) || in __populate_section_memmap()570 !IS_ALIGNED(nr_pages, PAGES_PER_SUBSECTION))) in __populate_section_memmap()
333 min_align = PAGES_PER_SUBSECTION; in check_pfn_span()438 for (; start_pfn < end_pfn; start_pfn += PAGES_PER_SUBSECTION) { in find_smallest_section_pfn()463 for (; pfn >= start_pfn; pfn -= PAGES_PER_SUBSECTION) { in find_biggest_section_pfn()
1882 #define PAGES_PER_SUBSECTION (1UL << PFN_SUBSECTION_SHIFT) macro1883 #define PAGE_SUBSECTION_MASK (~(PAGES_PER_SUBSECTION-1))1891 #define SUBSECTION_ALIGN_UP(pfn) ALIGN((pfn), PAGES_PER_SUBSECTION)2111 return (pfn & ~(PAGE_SECTION_MASK)) / PAGES_PER_SUBSECTION; in subsection_map_index()2140 *pfn = (*pfn & PAGE_SECTION_MASK) + (bit * PAGES_PER_SUBSECTION); in pfn_section_first_valid()
85 self.PAGES_PER_SUBSECTION = 1 << self.PFN_SUBSECTION_SHIFT151 return (pfn & ~(self.PAGE_SECTION_MASK)) // self.PAGES_PER_SUBSECTION
103 for (; (unsigned long)start < vmemmap_end; start += PAGES_PER_SUBSECTION) in vmemmap_populated()