Lines Matching +full:1 +full:ms

24  * 1) mem_section	- memory sections, mem_map's for valid memory
132 unsigned long max_sparsemem_pfn = (DIRECT_MAP_PHYSMEM_END + 1) >> PAGE_SHIFT; in mminit_validate_memmodel_limits()
142 WARN_ON_ONCE(1); in mminit_validate_memmodel_limits()
149 WARN_ON_ONCE(1); in mminit_validate_memmodel_limits()
164 static void __section_mark_present(struct mem_section *ms, in __section_mark_present() argument
170 ms->section_mem_map |= SECTION_MARKED_PRESENT; in __section_mark_present()
175 return next_present_section_nr(-1); in first_present_section_nr()
183 int end = subsection_map_index(pfn + nr_pages - 1); in subsection_mask_set()
185 bitmap_set(map, idx, end - idx + 1); in subsection_mask_set()
190 int end_sec_nr = pfn_to_section_nr(pfn + nr_pages - 1); in subsection_map_init()
194 struct mem_section *ms; in subsection_map_init() local
199 ms = __nr_to_section(nr); in subsection_map_init()
200 subsection_mask_set(ms->usage->subsection_map, pfn, pfns); in subsection_map_init()
204 subsection_map_index(pfn + pfns - 1)); in subsection_map_init()
225 struct mem_section *ms; in memory_present() local
230 ms = __nr_to_section(section_nr); in memory_present()
231 if (!ms->section_mem_map) { in memory_present()
232 ms->section_mem_map = sparse_encode_early_nid(nid) | in memory_present()
234 __section_mark_present(ms, section_nr); in memory_present()
254 align = 1 << (INTERNODE_CACHE_SHIFT); in memblocks_present()
289 static void __meminit sparse_init_one_section(struct mem_section *ms, in sparse_init_one_section() argument
293 ms->section_mem_map &= ~SECTION_MAP_MASK; in sparse_init_one_section()
294 ms->section_mem_map |= sparse_encode_mem_map(mem_map, pnum) in sparse_init_one_section()
296 ms->usage = usage; in sparse_init_one_section()
338 limit = goal + (1UL << PA_SECTION_SHIFT); in sparse_early_usemaps_alloc_pgdat_section()
541 struct mem_section *ms; in sparse_init_nid() local
558 ms = __nr_to_section(pnum); in sparse_init_nid()
559 if (!preinited_vmemmap_section(ms)) { in sparse_init_nid()
584 ms = __nr_to_section(pnum); in sparse_init_nid()
585 if (!preinited_vmemmap_section(ms)) in sparse_init_nid()
586 ms->section_mem_map = 0; in sparse_init_nid()
587 ms->section_mem_map = 0; in sparse_init_nid()
597 unsigned long pnum_end, pnum_begin, map_count = 1; in sparse_init()
610 for_each_present_section_nr(pnum_begin + 1, pnum_end) { in sparse_init()
621 map_count = 1; in sparse_init()
637 struct mem_section *ms; in online_mem_sections() local
643 ms = __nr_to_section(section_nr); in online_mem_sections()
644 ms->section_mem_map |= SECTION_IS_ONLINE; in online_mem_sections()
655 struct mem_section *ms; in offline_mem_sections() local
664 ms = __nr_to_section(section_nr); in offline_mem_sections()
665 ms->section_mem_map &= ~SECTION_IS_ONLINE; in offline_mem_sections()
683 memmap_pages_add(-1L * (DIV_ROUND_UP(end - start, PAGE_SIZE))); in depopulate_section_memmap()
698 struct mem_section *ms = __pfn_to_section(pfn); in clear_subsection_map() local
699 unsigned long *subsection_map = ms->usage in clear_subsection_map()
700 ? &ms->usage->subsection_map[0] : NULL; in clear_subsection_map()
715 static bool is_subsection_map_empty(struct mem_section *ms) in is_subsection_map_empty() argument
717 return bitmap_empty(&ms->usage->subsection_map[0], in is_subsection_map_empty()
723 struct mem_section *ms = __pfn_to_section(pfn); in fill_subsection_map() local
730 subsection_map = &ms->usage->subsection_map[0]; in fill_subsection_map()
792 static bool is_subsection_map_empty(struct mem_section *ms) in is_subsection_map_empty() argument
807 * 1. deactivation of a partial hot-added section (only possible in
814 * For 1, when subsection_map does not empty we will not be freeing the
822 struct mem_section *ms = __pfn_to_section(pfn); in section_deactivate() local
823 bool section_is_early = early_section(ms); in section_deactivate()
830 empty = is_subsection_map_empty(ms); in section_deactivate()
837 * ms->usage array. in section_deactivate()
839 ms->section_mem_map &= ~SECTION_HAS_MEM_MAP; in section_deactivate()
848 if (!PageReserved(virt_to_page(ms->usage))) { in section_deactivate()
849 kfree_rcu(ms->usage, rcu); in section_deactivate()
850 WRITE_ONCE(ms->usage, NULL); in section_deactivate()
852 memmap = sparse_decode_mem_map(ms->section_mem_map, section_nr); in section_deactivate()
865 ms->section_mem_map = (unsigned long)NULL; in section_deactivate()
872 struct mem_section *ms = __pfn_to_section(pfn); in section_activate() local
877 if (!ms->usage) { in section_activate()
881 ms->usage = usage; in section_activate()
887 ms->usage = NULL; in section_activate()
899 if (nr_pages < PAGES_PER_SECTION && early_section(ms)) in section_activate()
935 struct mem_section *ms; in sparse_add_section() local
954 ms = __nr_to_section(section_nr); in sparse_add_section()
956 __section_mark_present(ms, section_nr); in sparse_add_section()
961 sparse_init_one_section(ms, section_nr, memmap, ms->usage, 0); in sparse_add_section()
969 struct mem_section *ms = __pfn_to_section(pfn); in sparse_remove_section() local
971 if (WARN_ON_ONCE(!valid_section(ms))) in sparse_remove_section()