Lines Matching +full:addr +full:- +full:mode
1 // SPDX-License-Identifier: GPL-2.0
7 #include <asm/page-states.h>
65 static bool is_kasan_populate_mode(enum populate_mode mode) in is_kasan_populate_mode() argument
68 return mode >= POPULATE_KASAN_MAP_SHADOW; in is_kasan_populate_mode()
74 static void pgtable_populate(unsigned long addr, unsigned long end, enum populate_mode mode);
87 static inline void kasan_populate(unsigned long start, unsigned long end, enum populate_mode mode) in kasan_populate() argument
92 boot_debug("%-17s 0x%016lx-0x%016lx >> 0x%016lx-0x%016lx\n", get_populate_mode_name(mode), in kasan_populate()
94 pgtable_populate(sha_start, sha_end, mode); in kasan_populate()
138 static bool kasan_pgd_populate_zero_shadow(pgd_t *pgd, unsigned long addr, in kasan_pgd_populate_zero_shadow() argument
139 unsigned long end, enum populate_mode mode) in kasan_pgd_populate_zero_shadow() argument
141 if (mode == POPULATE_KASAN_ZERO_SHADOW && in kasan_pgd_populate_zero_shadow()
142 IS_ALIGNED(addr, PGDIR_SIZE) && end - addr >= PGDIR_SIZE) { in kasan_pgd_populate_zero_shadow()
149 static bool kasan_p4d_populate_zero_shadow(p4d_t *p4d, unsigned long addr, in kasan_p4d_populate_zero_shadow() argument
150 unsigned long end, enum populate_mode mode) in kasan_p4d_populate_zero_shadow() argument
152 if (mode == POPULATE_KASAN_ZERO_SHADOW && in kasan_p4d_populate_zero_shadow()
153 IS_ALIGNED(addr, P4D_SIZE) && end - addr >= P4D_SIZE) { in kasan_p4d_populate_zero_shadow()
160 static bool kasan_pud_populate_zero_shadow(pud_t *pud, unsigned long addr, in kasan_pud_populate_zero_shadow() argument
161 unsigned long end, enum populate_mode mode) in kasan_pud_populate_zero_shadow() argument
163 if (mode == POPULATE_KASAN_ZERO_SHADOW && in kasan_pud_populate_zero_shadow()
164 IS_ALIGNED(addr, PUD_SIZE) && end - addr >= PUD_SIZE) { in kasan_pud_populate_zero_shadow()
171 static bool kasan_pmd_populate_zero_shadow(pmd_t *pmd, unsigned long addr, in kasan_pmd_populate_zero_shadow() argument
172 unsigned long end, enum populate_mode mode) in kasan_pmd_populate_zero_shadow() argument
174 if (mode == POPULATE_KASAN_ZERO_SHADOW && in kasan_pmd_populate_zero_shadow()
175 IS_ALIGNED(addr, PMD_SIZE) && end - addr >= PMD_SIZE) { in kasan_pmd_populate_zero_shadow()
182 static bool kasan_pte_populate_zero_shadow(pte_t *pte, enum populate_mode mode) in kasan_pte_populate_zero_shadow() argument
184 if (mode == POPULATE_KASAN_ZERO_SHADOW) { in kasan_pte_populate_zero_shadow()
196 static inline bool kasan_pgd_populate_zero_shadow(pgd_t *pgd, unsigned long addr, in kasan_pgd_populate_zero_shadow() argument
197 unsigned long end, enum populate_mode mode) in kasan_pgd_populate_zero_shadow() argument
202 static inline bool kasan_p4d_populate_zero_shadow(p4d_t *p4d, unsigned long addr, in kasan_p4d_populate_zero_shadow() argument
203 unsigned long end, enum populate_mode mode) in kasan_p4d_populate_zero_shadow() argument
208 static inline bool kasan_pud_populate_zero_shadow(pud_t *pud, unsigned long addr, in kasan_pud_populate_zero_shadow() argument
209 unsigned long end, enum populate_mode mode) in kasan_pud_populate_zero_shadow() argument
214 static inline bool kasan_pmd_populate_zero_shadow(pmd_t *pmd, unsigned long addr, in kasan_pmd_populate_zero_shadow() argument
215 unsigned long end, enum populate_mode mode) in kasan_pmd_populate_zero_shadow() argument
220 static bool kasan_pte_populate_zero_shadow(pte_t *pte, enum populate_mode mode) in kasan_pte_populate_zero_shadow() argument
268 static unsigned long resolve_pa_may_alloc(unsigned long addr, unsigned long size, in resolve_pa_may_alloc() argument
269 enum populate_mode mode) in resolve_pa_may_alloc() argument
271 switch (mode) { in resolve_pa_may_alloc()
275 return addr; in resolve_pa_may_alloc()
277 return __lowcore_pa(addr); in resolve_pa_may_alloc()
279 return __abs_lowcore_pa(addr); in resolve_pa_may_alloc()
281 return __kernel_pa(addr); in resolve_pa_may_alloc()
283 return __identity_pa(addr); in resolve_pa_may_alloc()
287 addr = physmem_alloc(RR_VMEM, size, size, size == PAGE_SIZE); in resolve_pa_may_alloc()
288 if (addr) { in resolve_pa_may_alloc()
289 memset((void *)addr, 0, size); in resolve_pa_may_alloc()
290 return addr; in resolve_pa_may_alloc()
299 static bool large_page_mapping_allowed(enum populate_mode mode) in large_page_mapping_allowed() argument
301 switch (mode) { in large_page_mapping_allowed()
314 static unsigned long try_get_large_pud_pa(pud_t *pu_dir, unsigned long addr, unsigned long end, in try_get_large_pud_pa() argument
315 enum populate_mode mode) in try_get_large_pud_pa() argument
317 unsigned long pa, size = end - addr; in try_get_large_pud_pa()
319 if (!cpu_has_edat2() || !large_page_mapping_allowed(mode) || in try_get_large_pud_pa()
320 !IS_ALIGNED(addr, PUD_SIZE) || (size < PUD_SIZE)) in try_get_large_pud_pa()
323 pa = resolve_pa_may_alloc(addr, size, mode); in try_get_large_pud_pa()
330 static unsigned long try_get_large_pmd_pa(pmd_t *pm_dir, unsigned long addr, unsigned long end, in try_get_large_pmd_pa() argument
331 enum populate_mode mode) in try_get_large_pmd_pa() argument
333 unsigned long pa, size = end - addr; in try_get_large_pmd_pa()
335 if (!cpu_has_edat1() || !large_page_mapping_allowed(mode) || in try_get_large_pmd_pa()
336 !IS_ALIGNED(addr, PMD_SIZE) || (size < PMD_SIZE)) in try_get_large_pmd_pa()
339 pa = resolve_pa_may_alloc(addr, size, mode); in try_get_large_pmd_pa()
346 static void pgtable_pte_populate(pmd_t *pmd, unsigned long addr, unsigned long end, in pgtable_pte_populate() argument
347 enum populate_mode mode) in pgtable_pte_populate() argument
352 pte = pte_offset_kernel(pmd, addr); in pgtable_pte_populate()
353 for (; addr < end; addr += PAGE_SIZE, pte++) { in pgtable_pte_populate()
355 if (kasan_pte_populate_zero_shadow(pte, mode)) in pgtable_pte_populate()
357 entry = __pte(resolve_pa_may_alloc(addr, PAGE_SIZE, mode)); in pgtable_pte_populate()
363 if (mode == POPULATE_IDENTITY) in pgtable_pte_populate()
367 static void pgtable_pmd_populate(pud_t *pud, unsigned long addr, unsigned long end, in pgtable_pmd_populate() argument
368 enum populate_mode mode) in pgtable_pmd_populate() argument
374 pmd = pmd_offset(pud, addr); in pgtable_pmd_populate()
375 for (; addr < end; addr = next, pmd++) { in pgtable_pmd_populate()
376 next = pmd_addr_end(addr, end); in pgtable_pmd_populate()
378 if (kasan_pmd_populate_zero_shadow(pmd, addr, next, mode)) in pgtable_pmd_populate()
380 pa = try_get_large_pmd_pa(pmd, addr, next, mode); in pgtable_pmd_populate()
393 pgtable_pte_populate(pmd, addr, next, mode); in pgtable_pmd_populate()
395 if (mode == POPULATE_IDENTITY) in pgtable_pmd_populate()
399 static void pgtable_pud_populate(p4d_t *p4d, unsigned long addr, unsigned long end, in pgtable_pud_populate() argument
400 enum populate_mode mode) in pgtable_pud_populate() argument
406 pud = pud_offset(p4d, addr); in pgtable_pud_populate()
407 for (; addr < end; addr = next, pud++) { in pgtable_pud_populate()
408 next = pud_addr_end(addr, end); in pgtable_pud_populate()
410 if (kasan_pud_populate_zero_shadow(pud, addr, next, mode)) in pgtable_pud_populate()
412 pa = try_get_large_pud_pa(pud, addr, next, mode); in pgtable_pud_populate()
425 pgtable_pmd_populate(pud, addr, next, mode); in pgtable_pud_populate()
427 if (mode == POPULATE_IDENTITY) in pgtable_pud_populate()
431 static void pgtable_p4d_populate(pgd_t *pgd, unsigned long addr, unsigned long end, in pgtable_p4d_populate() argument
432 enum populate_mode mode) in pgtable_p4d_populate() argument
438 p4d = p4d_offset(pgd, addr); in pgtable_p4d_populate()
439 for (; addr < end; addr = next, p4d++) { in pgtable_p4d_populate()
440 next = p4d_addr_end(addr, end); in pgtable_p4d_populate()
442 if (kasan_p4d_populate_zero_shadow(p4d, addr, next, mode)) in pgtable_p4d_populate()
447 pgtable_pud_populate(p4d, addr, next, mode); in pgtable_p4d_populate()
451 static void pgtable_populate(unsigned long addr, unsigned long end, enum populate_mode mode) in pgtable_populate() argument
457 if (!is_kasan_populate_mode(mode)) { in pgtable_populate()
458 boot_debug("%-17s 0x%016lx-0x%016lx -> 0x%016lx-0x%016lx\n", in pgtable_populate()
459 get_populate_mode_name(mode), addr, end, in pgtable_populate()
460 resolve_pa_may_alloc(addr, 0, mode), in pgtable_populate()
461 resolve_pa_may_alloc(end - 1, 0, mode) + 1); in pgtable_populate()
464 pgd = pgd_offset(&init_mm, addr); in pgtable_populate()
465 for (; addr < end; addr = next, pgd++) { in pgtable_populate()
466 next = pgd_addr_end(addr, end); in pgtable_populate()
468 if (kasan_pgd_populate_zero_shadow(pgd, addr, next, mode)) in pgtable_populate()
474 if (mode == POPULATE_KASAN_SHALLOW) in pgtable_populate()
477 pgtable_p4d_populate(pgd, addr, next, mode); in pgtable_populate()
491 * Mark whole memory as no-dat. This must be done before any in setup_vmem()
496 __arch_set_page_nodat((void *)start, (end - start) >> PAGE_SHIFT); in setup_vmem()
499 * init_mm->pgd contains virtual address of swapper_pg_dir. in setup_vmem()
557 get_lowcore()->kernel_asce.val = swapper_pg_dir | asce_bits; in setup_vmem()
558 get_lowcore()->user_asce = s390_invalid_asce; in setup_vmem()
560 local_ctl_load(1, &get_lowcore()->kernel_asce); in setup_vmem()
561 local_ctl_load(7, &get_lowcore()->user_asce); in setup_vmem()
562 local_ctl_load(13, &get_lowcore()->kernel_asce); in setup_vmem()
564 init_mm.context.asce = get_lowcore()->kernel_asce.val; in setup_vmem()