Lines Matching +full:0 +full:x23
41 #if (PAGE_OFFSET & 0x1fffff) != 0
70 add x13, x18, #0x16
74 .long 0 // reserved
76 .quad 0 // Image load offset from start of RAM, little-endian
79 .quad 0 // reserved
80 .quad 0 // reserved
81 .quad 0 // reserved
89 .long 0 // reserved
100 * x23 primary_entry() .. start_kernel() physical misalignment/KASLR offset
108 adrp x23, __PHYS_OFFSET
109 and x23, x23, MIN_KIMG_ALIGN - 1 // KASLR offset, defaults to 0
135 mov x1, #0x20 // 4 x 8 bytes
244 mov \count, #0
312 and x6, x6, #(0xf << ID_AA64MMFR2_LVA_SHIFT)
382 add x5, x5, x23 // add KASLR displacement
455 tst x23, ~(MIN_KIMG_ALIGN - 1) // already running randomized?
456 b.ne 0f
459 cbz x0, 0f // KASLR disabled? just proceed
460 orr x23, x23, x0 // record KASLR offset
463 0:
466 mov x29, #0
467 mov x30, #0
554 tbz x0, #0, 3f // and check that it sticks
593 b 7f // use EL1&0 translation.
627 mov x0, #0x33ff
692 .quad 0
750 mov x29, #0
751 mov x30, #0
801 update_early_cpu_boot_status 0, x2, x3
829 and x0, x0, #(0xf << ID_AA64MMFR2_LVA_SHIFT)
862 add x11, x11, x23 // actual virtual offset
866 0: cmp x9, x10
871 b.ne 0b
872 add x14, x14, x23 // relocate
873 str x14, [x12, x23]
874 b 0b
919 sub x15, x23, x24 // delta from previous offset
921 mov x24, x23 // save new offset
926 tbnz x11, #0, 3f // branch to handle bitmaps
927 add x13, x11, x23
936 tbz x11, #0, 5f // skip bit if not set
968 mov x24, #0 // no RELR displacement yet
977 * If we return here, we have a KASLR displacement in x23 which we need