Lines Matching +full:mm +full:- +full:0

1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * address space "slices" (meta-segments) support
15 #include <linux/mm.h>
21 #include <linux/sched/mm.h>
39 (int)SLICE_NUM_LOW, &mask->low_slices); in slice_print_mask()
41 (int)SLICE_NUM_HIGH, mask->high_slices); in slice_print_mask()
44 #define slice_dbg(fmt...) do { if (_slice_debug) pr_devel(fmt); } while (0)
63 unsigned long end = start + len - 1; in slice_range_to_mask()
65 ret->low_slices = 0; in slice_range_to_mask()
67 bitmap_zero(ret->high_slices, SLICE_NUM_HIGH); in slice_range_to_mask()
71 (unsigned long)(SLICE_LOW_TOP - 1)); in slice_range_to_mask()
73 ret->low_slices = (1u << (GET_LOW_SLICE_INDEX(mend) + 1)) in slice_range_to_mask()
74 - (1u << GET_LOW_SLICE_INDEX(start)); in slice_range_to_mask()
80 unsigned long count = GET_HIGH_SLICE_INDEX(align_end) - start_index; in slice_range_to_mask()
82 bitmap_set(ret->high_slices, start_index, count); in slice_range_to_mask()
86 static int slice_area_is_free(struct mm_struct *mm, unsigned long addr, in slice_area_is_free() argument
91 if ((mm_ctx_slb_addr_limit(&mm->context) - len) < addr) in slice_area_is_free()
92 return 0; in slice_area_is_free()
93 vma = find_vma(mm, addr); in slice_area_is_free()
97 static int slice_low_has_vma(struct mm_struct *mm, unsigned long slice) in slice_low_has_vma() argument
99 return !slice_area_is_free(mm, slice << SLICE_LOW_SHIFT, in slice_low_has_vma()
103 static int slice_high_has_vma(struct mm_struct *mm, unsigned long slice) in slice_high_has_vma() argument
110 * at 4GB, not 0 */ in slice_high_has_vma()
111 if (start == 0) in slice_high_has_vma()
114 return !slice_area_is_free(mm, start, end - start); in slice_high_has_vma()
117 static void slice_mask_for_free(struct mm_struct *mm, struct slice_mask *ret, in slice_mask_for_free() argument
122 ret->low_slices = 0; in slice_mask_for_free()
124 bitmap_zero(ret->high_slices, SLICE_NUM_HIGH); in slice_mask_for_free()
126 for (i = 0; i < SLICE_NUM_LOW; i++) in slice_mask_for_free()
127 if (!slice_low_has_vma(mm, i)) in slice_mask_for_free()
128 ret->low_slices |= 1u << i; in slice_mask_for_free()
130 if (slice_addr_is_low(high_limit - 1)) in slice_mask_for_free()
133 for (i = 0; i < GET_HIGH_SLICE_INDEX(high_limit); i++) in slice_mask_for_free()
134 if (!slice_high_has_vma(mm, i)) in slice_mask_for_free()
135 __set_bit(i, ret->high_slices); in slice_mask_for_free()
138 static bool slice_check_range_fits(struct mm_struct *mm, in slice_check_range_fits() argument
142 unsigned long end = start + len - 1; in slice_check_range_fits()
143 u64 low_slices = 0; in slice_check_range_fits()
147 (unsigned long)(SLICE_LOW_TOP - 1)); in slice_check_range_fits()
150 - (1u << GET_LOW_SLICE_INDEX(start)); in slice_check_range_fits()
152 if ((low_slices & available->low_slices) != low_slices) in slice_check_range_fits()
158 unsigned long count = GET_HIGH_SLICE_INDEX(align_end) - start_index; in slice_check_range_fits()
162 if (!test_bit(i, available->high_slices)) in slice_check_range_fits()
173 struct mm_struct *mm = parm; in slice_flush_segments() local
176 if (mm != current->active_mm) in slice_flush_segments()
179 copy_mm_to_paca(current->active_mm); in slice_flush_segments()
187 static void slice_convert(struct mm_struct *mm, in slice_convert() argument
197 slice_dbg("slice_convert(mm=%p, psize=%d)\n", mm, psize); in slice_convert()
200 psize_mask = slice_mask_for_size(&mm->context, psize); in slice_convert()
203 * concurrent 64k -> 4k demotion ... in slice_convert()
207 lpsizes = mm_ctx_low_slices(&mm->context); in slice_convert()
208 for (i = 0; i < SLICE_NUM_LOW; i++) { in slice_convert()
209 if (!(mask->low_slices & (1u << i))) in slice_convert()
212 mask_index = i & 0x1; in slice_convert()
216 old_psize = (lpsizes[index] >> (mask_index * 4)) & 0xf; in slice_convert()
217 old_mask = slice_mask_for_size(&mm->context, old_psize); in slice_convert()
218 old_mask->low_slices &= ~(1u << i); in slice_convert()
219 psize_mask->low_slices |= 1u << i; in slice_convert()
222 lpsizes[index] = (lpsizes[index] & ~(0xf << (mask_index * 4))) | in slice_convert()
226 hpsizes = mm_ctx_high_slices(&mm->context); in slice_convert()
227 for (i = 0; i < GET_HIGH_SLICE_INDEX(mm_ctx_slb_addr_limit(&mm->context)); i++) { in slice_convert()
228 if (!test_bit(i, mask->high_slices)) in slice_convert()
231 mask_index = i & 0x1; in slice_convert()
235 old_psize = (hpsizes[index] >> (mask_index * 4)) & 0xf; in slice_convert()
236 old_mask = slice_mask_for_size(&mm->context, old_psize); in slice_convert()
237 __clear_bit(i, old_mask->high_slices); in slice_convert()
238 __set_bit(i, psize_mask->high_slices); in slice_convert()
241 hpsizes[index] = (hpsizes[index] & ~(0xf << (mask_index * 4))) | in slice_convert()
246 (unsigned long)mm_ctx_low_slices(&mm->context), in slice_convert()
247 (unsigned long)mm_ctx_high_slices(&mm->context)); in slice_convert()
252 spu_flush_all_slbs(mm); in slice_convert()
271 return !!(available->low_slices & (1u << slice)); in slice_scan_available()
276 return !!test_bit(slice, available->high_slices); in slice_scan_available()
280 static unsigned long slice_find_area_bottomup(struct mm_struct *mm, in slice_find_area_bottomup() argument
289 .align_mask = PAGE_MASK & ((1ul << pshift) - 1), in slice_find_area_bottomup()
319 return -ENOMEM; in slice_find_area_bottomup()
322 static unsigned long slice_find_area_topdown(struct mm_struct *mm, in slice_find_area_topdown() argument
332 .align_mask = PAGE_MASK & ((1ul << pshift) - 1), in slice_find_area_topdown()
343 addr += mm_ctx_slb_addr_limit(&mm->context) - DEFAULT_MAP_WINDOW; in slice_find_area_topdown()
347 if (!slice_scan_available(addr - 1, available, 0, &addr)) in slice_find_area_topdown()
359 else if (slice_scan_available(addr - 1, available, 0, &prev)) { in slice_find_area_topdown()
372 * so fall back to the bottom-up function here. This scenario in slice_find_area_topdown()
376 return slice_find_area_bottomup(mm, TASK_UNMAPPED_BASE, len, available, psize, high_limit); in slice_find_area_topdown()
380 static unsigned long slice_find_area(struct mm_struct *mm, unsigned long len, in slice_find_area() argument
385 return slice_find_area_topdown(mm, mm->mmap_base, len, mask, psize, high_limit); in slice_find_area()
387 return slice_find_area_bottomup(mm, mm->mmap_base, len, mask, psize, high_limit); in slice_find_area()
393 dst->low_slices = src->low_slices; in slice_copy_mask()
396 bitmap_copy(dst->high_slices, src->high_slices, SLICE_NUM_HIGH); in slice_copy_mask()
403 dst->low_slices = src1->low_slices | src2->low_slices; in slice_or_mask()
406 bitmap_or(dst->high_slices, src1->high_slices, src2->high_slices, SLICE_NUM_HIGH); in slice_or_mask()
413 dst->low_slices = src1->low_slices & ~src2->low_slices; in slice_andnot_mask()
416 bitmap_andnot(dst->high_slices, src1->high_slices, src2->high_slices, SLICE_NUM_HIGH); in slice_andnot_mask()
436 struct mm_struct *mm = current->mm; in slice_get_unmapped_area() local
445 return -ENOMEM; in slice_get_unmapped_area()
446 if (len & (page_size - 1)) in slice_get_unmapped_area()
447 return -EINVAL; in slice_get_unmapped_area()
449 if (addr & (page_size - 1)) in slice_get_unmapped_area()
450 return -EINVAL; in slice_get_unmapped_area()
451 if (addr > high_limit - len) in slice_get_unmapped_area()
452 return -ENOMEM; in slice_get_unmapped_area()
455 if (high_limit > mm_ctx_slb_addr_limit(&mm->context)) { in slice_get_unmapped_area()
461 mm_ctx_set_slb_addr_limit(&mm->context, high_limit); in slice_get_unmapped_area()
463 on_each_cpu(slice_flush_segments, mm, 1); in slice_get_unmapped_area()
467 BUG_ON(mm->task_size == 0); in slice_get_unmapped_area()
468 BUG_ON(mm_ctx_slb_addr_limit(&mm->context) == 0); in slice_get_unmapped_area()
471 slice_dbg("slice_get_unmapped_area(mm=%p, psize=%d...\n", mm, psize); in slice_get_unmapped_area()
480 if (addr > high_limit - len || addr < mmap_min_addr || in slice_get_unmapped_area()
481 !slice_area_is_free(mm, addr, len)) in slice_get_unmapped_area()
482 addr = 0; in slice_get_unmapped_area()
488 maskp = slice_mask_for_size(&mm->context, psize); in slice_get_unmapped_area()
515 compat_maskp = slice_mask_for_size(&mm->context, MMU_PAGE_4K); in slice_get_unmapped_area()
529 if (addr != 0 || fixed) { in slice_get_unmapped_area()
533 if (slice_check_range_fits(mm, &good_mask, addr, len)) { in slice_get_unmapped_area()
542 newaddr = slice_find_area(mm, len, &good_mask, in slice_get_unmapped_area()
544 if (newaddr != -ENOMEM) { in slice_get_unmapped_area()
548 slice_dbg(" found area at 0x%lx\n", newaddr); in slice_get_unmapped_area()
556 slice_mask_for_free(mm, &potential_mask, high_limit); in slice_get_unmapped_area()
560 if (addr != 0 || fixed) { in slice_get_unmapped_area()
561 if (slice_check_range_fits(mm, &potential_mask, addr, len)) { in slice_get_unmapped_area()
570 return -EBUSY; in slice_get_unmapped_area()
578 newaddr = slice_find_area(mm, len, &good_mask, in slice_get_unmapped_area()
580 if (newaddr != -ENOMEM) { in slice_get_unmapped_area()
581 slice_dbg(" found area at 0x%lx\n", newaddr); in slice_get_unmapped_area()
589 newaddr = slice_find_area(mm, len, &potential_mask, in slice_get_unmapped_area()
592 if (IS_ENABLED(CONFIG_PPC_64K_PAGES) && newaddr == -ENOMEM && in slice_get_unmapped_area()
594 /* retry the search with 4k-page slices included */ in slice_get_unmapped_area()
596 newaddr = slice_find_area(mm, len, &potential_mask, in slice_get_unmapped_area()
600 if (newaddr == -ENOMEM) in slice_get_unmapped_area()
601 return -ENOMEM; in slice_get_unmapped_area()
604 slice_dbg(" found potential area at 0x%lx\n", newaddr); in slice_get_unmapped_area()
612 if (need_extra_context(mm, newaddr)) { in slice_get_unmapped_area()
613 if (alloc_extended_context(mm, newaddr) < 0) in slice_get_unmapped_area()
614 return -ENOMEM; in slice_get_unmapped_area()
623 slice_convert(mm, &potential_mask, psize); in slice_get_unmapped_area()
625 on_each_cpu(slice_flush_segments, mm, 1); in slice_get_unmapped_area()
630 if (need_extra_context(mm, newaddr)) { in slice_get_unmapped_area()
631 if (alloc_extended_context(mm, newaddr) < 0) in slice_get_unmapped_area()
632 return -ENOMEM; in slice_get_unmapped_area()
648 return 0; in file_to_psize()
667 psize = mm_ctx_user_psize(&current->mm->context); in arch_get_unmapped_area()
669 return slice_get_unmapped_area(addr, len, flags, psize, 0); in arch_get_unmapped_area()
687 psize = mm_ctx_user_psize(&current->mm->context); in arch_get_unmapped_area_topdown()
692 unsigned int notrace get_slice_psize(struct mm_struct *mm, unsigned long addr) in get_slice_psize() argument
700 psizes = mm_ctx_low_slices(&mm->context); in get_slice_psize()
703 psizes = mm_ctx_high_slices(&mm->context); in get_slice_psize()
706 mask_index = index & 0x1; in get_slice_psize()
707 return (psizes[index >> 1] >> (mask_index * 4)) & 0xf; in get_slice_psize()
711 void slice_init_new_context_exec(struct mm_struct *mm) in slice_init_new_context_exec() argument
717 slice_dbg("slice_init_new_context_exec(mm=%p)\n", mm); in slice_init_new_context_exec()
721 * case of fork it is just inherited from the mm being in slice_init_new_context_exec()
724 mm_ctx_set_slb_addr_limit(&mm->context, SLB_ADDR_LIMIT_DEFAULT); in slice_init_new_context_exec()
725 mm_ctx_set_user_psize(&mm->context, psize); in slice_init_new_context_exec()
730 lpsizes = mm_ctx_low_slices(&mm->context); in slice_init_new_context_exec()
733 hpsizes = mm_ctx_high_slices(&mm->context); in slice_init_new_context_exec()
739 mask = slice_mask_for_size(&mm->context, psize); in slice_init_new_context_exec()
740 mask->low_slices = ~0UL; in slice_init_new_context_exec()
742 bitmap_fill(mask->high_slices, SLICE_NUM_HIGH); in slice_init_new_context_exec()
747 struct mm_struct *mm = current->mm; in slice_setup_new_exec() local
749 slice_dbg("slice_setup_new_exec(mm=%p)\n", mm); in slice_setup_new_exec()
754 mm_ctx_set_slb_addr_limit(&mm->context, DEFAULT_MAP_WINDOW); in slice_setup_new_exec()
757 void slice_set_range_psize(struct mm_struct *mm, unsigned long start, in slice_set_range_psize() argument
765 slice_convert(mm, &mask, psize); in slice_set_range_psize()
784 * generic code will redefine that function as 0 in that. This is ok
788 int slice_is_hugepage_only_range(struct mm_struct *mm, unsigned long addr, in slice_is_hugepage_only_range() argument
792 unsigned int psize = mm_ctx_user_psize(&mm->context); in slice_is_hugepage_only_range()
796 maskp = slice_mask_for_size(&mm->context, psize); in slice_is_hugepage_only_range()
803 compat_maskp = slice_mask_for_size(&mm->context, MMU_PAGE_4K); in slice_is_hugepage_only_range()
805 return !slice_check_range_fits(mm, &available, addr, len); in slice_is_hugepage_only_range()
808 return !slice_check_range_fits(mm, maskp, addr, len); in slice_is_hugepage_only_range()
817 return 1UL << mmu_psize_to_shift(get_slice_psize(vma->vm_mm, vma->vm_start)); in vma_mmu_pagesize()