Searched refs:kernel_map (Results 1 – 10 of 10) sorted by relevance
35 #define PAGE_OFFSET kernel_map.page_offset94 * the boot process (before kernel_map.va_pa_offset is set).120 extern struct kernel_mapping kernel_map;125 ((x) >= kernel_map.virt_addr && (x) < (kernel_map.virt_addr + kernel_map.size))131 #define linear_mapping_pa_to_va(x) ((void *)((unsigned long)(x) + kernel_map.va_pa_offset))140 (void *)(_y + kernel_map.va_kernel_xip_text_pa_offset) : \141 (void *)(_y + kernel_map.va_kernel_xip_data_pa_offset); \144 #define kernel_mapping_pa_to_va(y) ((void *)((unsigned long)(y) + kernel_map[all...]
24 unsigned long kernel_start = kernel_map.virt_addr; in __phys_addr_symbol() 25 unsigned long kernel_end = kernel_start + kernel_map.size; in __phys_addr_symbol() 39 BUG_ON(!kernel_map.va_pa_offset); in linear_mapping_va_to_pa() 41 return ((unsigned long)(x) - kernel_map.va_pa_offset); in linear_mapping_va_to_pa() 47 BUG_ON(!kernel_map.va_pa_offset); in linear_mapping_pa_to_va() 49 return ((void *)((unsigned long)(x) + kernel_map.va_pa_offset)); in linear_mapping_pa_to_va()
41 struct kernel_mapping kernel_map __ro_after_init;42 EXPORT_SYMBOL(kernel_map);44 #define kernel_map (*(struct kernel_mapping *)XIP_FIXUP(&kernel_map)) macro 164 print_ml("kernel", (unsigned long)kernel_map.virt_addr, in print_vm_layout() 260 kernel_map.va_pa_offset = PAGE_OFFSET - phys_ram_base; in setup_bootmem() 331 uintptr_t reloc_offset = kernel_map.virt_addr - KERNEL_LINK_ADDR; in relocate_kernel() 336 uintptr_t va_kernel_link_pa_offset = KERNEL_LINK_ADDR - kernel_map.phys_addr; in relocate_kernel() 516 BUG_ON((va - kernel_map.virt_addr) >> PUD_SHIFT); in alloc_pmd_early() 581 BUG_ON((va - kernel_map in alloc_pud_early() [all...]
443 address_markers[KERNEL_MAPPING_NR].start_address = kernel_map.virt_addr; in ptdump_init()
29 kernel_map.va_kernel_pa_offset); in arch_crash_save_vmcoreinfo()
392 kernel_map.virt_offset, in dump_kernel_offset()
76 la a1, kernel_map
621 struct map *kernel_map = machine__kernel_map(&rep->session->machines.host); in report__warn_kptr_restrict() local 622 struct kmap *kernel_kmap = kernel_map ? map__kmap(kernel_map) : NULL; in report__warn_kptr_restrict() 627 if (kernel_map == NULL || in report__warn_kptr_restrict() 628 (dso__hit(map__dso(kernel_map)) && in report__warn_kptr_restrict() 635 if (kernel_map && map__has_symbols(kernel_map)) { in report__warn_kptr_restrict()
346 struct map *kernel_map; in build_alloc_func_list() local 363 kernel_map = machine__kernel_map(machine); in build_alloc_func_list() 364 if (map__load(kernel_map) < 0) { in build_alloc_func_list() 369 map__for_each_symbol(kernel_map, sym, node) { in build_alloc_func_list()
541 struct map *kernel_map = machine__kernel_map(machine); in map__rip_2objdump() local 543 if (kernel_map) in map__rip_2objdump() 544 map = kernel_map; in map__rip_2objdump()