1*2187b97cSVarad Gautam/* 2*2187b97cSVarad Gautam * Common bootstrapping code to transition from 16-bit to 32-bit code, and to 3*2187b97cSVarad Gautam * transition from 32-bit to 64-bit code (x86-64 only) 4*2187b97cSVarad Gautam */ 5c8ab4c00SVarad Gautam 6c8ab4c00SVarad Gautam /* EFI provides it's own SIPI sequence to handle relocation. */ 7c8ab4c00SVarad Gautam#ifndef CONFIG_EFI 8c8ab4c00SVarad Gautam.code16 9c8ab4c00SVarad Gautam.globl rm_trampoline 10c8ab4c00SVarad Gautamrm_trampoline: 11c8ab4c00SVarad Gautam 12c8ab4c00SVarad Gautam/* Store SIPI vector code at the beginning of trampoline. */ 13c8ab4c00SVarad Gautamsipi_entry: 14c8ab4c00SVarad Gautam mov %cr0, %eax 15c8ab4c00SVarad Gautam or $1, %eax 16c8ab4c00SVarad Gautam mov %eax, %cr0 17c8ab4c00SVarad Gautam lgdtl ap_rm_gdt_descr - sipi_entry 18c8ab4c00SVarad Gautam ljmpl $8, $ap_start32 19c8ab4c00SVarad Gautamsipi_end: 20c8ab4c00SVarad Gautam 21c8ab4c00SVarad Gautam.globl ap_rm_gdt_descr 22c8ab4c00SVarad Gautamap_rm_gdt_descr: 23c8ab4c00SVarad Gautam#ifdef __i386__ 24c8ab4c00SVarad Gautam .word 0 25c8ab4c00SVarad Gautam .long 0 26c8ab4c00SVarad Gautam#else 27c8ab4c00SVarad Gautam .word gdt32_end - gdt32 - 1 28c8ab4c00SVarad Gautam .long gdt32 29c8ab4c00SVarad Gautam#endif 30c8ab4c00SVarad Gautam 31c8ab4c00SVarad Gautam.globl rm_trampoline_end 32c8ab4c00SVarad Gautamrm_trampoline_end: 33c8ab4c00SVarad Gautam#endif 34*2187b97cSVarad Gautam 35*2187b97cSVarad Gautam/* The 32-bit => 64-bit trampoline is x86-64 only. */ 36*2187b97cSVarad Gautam#ifdef __x86_64__ 37*2187b97cSVarad Gautam.code32 38*2187b97cSVarad Gautam 39*2187b97cSVarad GautamMSR_GS_BASE = 0xc0000101 40*2187b97cSVarad Gautam 41*2187b97cSVarad Gautam.macro setup_percpu_area 42*2187b97cSVarad Gautam lea -4096(%esp), %eax 43*2187b97cSVarad Gautam mov $0, %edx 44*2187b97cSVarad Gautam mov $MSR_GS_BASE, %ecx 45*2187b97cSVarad Gautam wrmsr 46*2187b97cSVarad Gautam.endm 47*2187b97cSVarad Gautam 48*2187b97cSVarad Gautam.macro setup_segments 49*2187b97cSVarad Gautam mov $MSR_GS_BASE, %ecx 50*2187b97cSVarad Gautam rdmsr 51*2187b97cSVarad Gautam 52*2187b97cSVarad Gautam mov $0x10, %bx 53*2187b97cSVarad Gautam mov %bx, %ds 54*2187b97cSVarad Gautam mov %bx, %es 55*2187b97cSVarad Gautam mov %bx, %fs 56*2187b97cSVarad Gautam mov %bx, %gs 57*2187b97cSVarad Gautam mov %bx, %ss 58*2187b97cSVarad Gautam 59*2187b97cSVarad Gautam /* restore MSR_GS_BASE */ 60*2187b97cSVarad Gautam wrmsr 61*2187b97cSVarad Gautam.endm 62*2187b97cSVarad Gautam 63*2187b97cSVarad Gautamprepare_64: 64*2187b97cSVarad Gautam lgdt gdt_descr 65*2187b97cSVarad Gautam setup_segments 66*2187b97cSVarad Gautam 67*2187b97cSVarad Gautam xor %eax, %eax 68*2187b97cSVarad Gautam mov %eax, %cr4 69*2187b97cSVarad Gautam 70*2187b97cSVarad Gautamenter_long_mode: 71*2187b97cSVarad Gautam mov %cr4, %eax 72*2187b97cSVarad Gautam bts $5, %eax // pae 73*2187b97cSVarad Gautam mov %eax, %cr4 74*2187b97cSVarad Gautam 75*2187b97cSVarad Gautam mov pt_root, %eax 76*2187b97cSVarad Gautam mov %eax, %cr3 77*2187b97cSVarad Gautam 78*2187b97cSVarad Gautamefer = 0xc0000080 79*2187b97cSVarad Gautam mov $efer, %ecx 80*2187b97cSVarad Gautam rdmsr 81*2187b97cSVarad Gautam bts $8, %eax 82*2187b97cSVarad Gautam wrmsr 83*2187b97cSVarad Gautam 84*2187b97cSVarad Gautam mov %cr0, %eax 85*2187b97cSVarad Gautam bts $0, %eax 86*2187b97cSVarad Gautam bts $31, %eax 87*2187b97cSVarad Gautam mov %eax, %cr0 88*2187b97cSVarad Gautam ret 89*2187b97cSVarad Gautam 90*2187b97cSVarad Gautamap_start32: 91*2187b97cSVarad Gautam setup_segments 92*2187b97cSVarad Gautam mov $-4096, %esp 93*2187b97cSVarad Gautam lock xaddl %esp, smp_stacktop 94*2187b97cSVarad Gautam setup_percpu_area 95*2187b97cSVarad Gautam call prepare_64 96*2187b97cSVarad Gautam ljmpl $8, $ap_start64 97*2187b97cSVarad Gautam#endif 98