Lines Matching full:accel

77 #include "qemu/accel.h"
90 wreg(cpu->accel->fd, HV_X86_TPR, tpr); in vmx_update_tpr()
92 wvmcs(cpu->accel->fd, VMCS_TPR_THRESHOLD, 0); in vmx_update_tpr()
94 wvmcs(cpu->accel->fd, VMCS_TPR_THRESHOLD, (irr > tpr) ? tpr >> 4 : in vmx_update_tpr()
102 int tpr = rreg(cpu->accel->fd, HV_X86_TPR) >> 4; in update_apic_tpr()
220 hv_vcpu_interrupt(&cpu->accel->fd, 1); in hvf_kick_vcpu_thread()
315 wvmcs(cpu->accel->fd, VMCS_PIN_BASED_CTLS, in hvf_arch_init_vcpu()
320 wvmcs(cpu->accel->fd, VMCS_PRI_PROC_BASED_CTLS, in hvf_arch_init_vcpu()
335 wvmcs(cpu->accel->fd, VMCS_SEC_PROC_BASED_CTLS, in hvf_arch_init_vcpu()
338 wvmcs(cpu->accel->fd, VMCS_ENTRY_CTLS, in hvf_arch_init_vcpu()
340 wvmcs(cpu->accel->fd, VMCS_EXCEPTION_BITMAP, 0); /* Double fault */ in hvf_arch_init_vcpu()
342 wvmcs(cpu->accel->fd, VMCS_TPR_THRESHOLD, 0); in hvf_arch_init_vcpu()
354 hv_vcpu_enable_native_msr(cpu->accel->fd, MSR_STAR, 1); in hvf_arch_init_vcpu()
355 hv_vcpu_enable_native_msr(cpu->accel->fd, MSR_LSTAR, 1); in hvf_arch_init_vcpu()
356 hv_vcpu_enable_native_msr(cpu->accel->fd, MSR_CSTAR, 1); in hvf_arch_init_vcpu()
357 hv_vcpu_enable_native_msr(cpu->accel->fd, MSR_FMASK, 1); in hvf_arch_init_vcpu()
358 hv_vcpu_enable_native_msr(cpu->accel->fd, MSR_FSBASE, 1); in hvf_arch_init_vcpu()
359 hv_vcpu_enable_native_msr(cpu->accel->fd, MSR_GSBASE, 1); in hvf_arch_init_vcpu()
360 hv_vcpu_enable_native_msr(cpu->accel->fd, MSR_KERNELGSBASE, 1); in hvf_arch_init_vcpu()
361 hv_vcpu_enable_native_msr(cpu->accel->fd, MSR_TSC_AUX, 1); in hvf_arch_init_vcpu()
362 hv_vcpu_enable_native_msr(cpu->accel->fd, MSR_IA32_TSC, 1); in hvf_arch_init_vcpu()
363 hv_vcpu_enable_native_msr(cpu->accel->fd, MSR_IA32_SYSENTER_CS, 1); in hvf_arch_init_vcpu()
364 hv_vcpu_enable_native_msr(cpu->accel->fd, MSR_IA32_SYSENTER_EIP, 1); in hvf_arch_init_vcpu()
365 hv_vcpu_enable_native_msr(cpu->accel->fd, MSR_IA32_SYSENTER_ESP, 1); in hvf_arch_init_vcpu()
406 env->error_code = rvmcs(cpu->accel->fd, VMCS_IDT_VECTORING_ERROR); in hvf_store_events()
409 if ((rvmcs(cpu->accel->fd, VMCS_GUEST_INTERRUPTIBILITY) & in hvf_store_events()
415 if (rvmcs(cpu->accel->fd, VMCS_GUEST_INTERRUPTIBILITY) & in hvf_store_events()
472 RRX(env, R_EAX) = rreg(cs->accel->fd, HV_X86_RAX); in hvf_load_regs()
473 RRX(env, R_EBX) = rreg(cs->accel->fd, HV_X86_RBX); in hvf_load_regs()
474 RRX(env, R_ECX) = rreg(cs->accel->fd, HV_X86_RCX); in hvf_load_regs()
475 RRX(env, R_EDX) = rreg(cs->accel->fd, HV_X86_RDX); in hvf_load_regs()
476 RRX(env, R_ESI) = rreg(cs->accel->fd, HV_X86_RSI); in hvf_load_regs()
477 RRX(env, R_EDI) = rreg(cs->accel->fd, HV_X86_RDI); in hvf_load_regs()
478 RRX(env, R_ESP) = rreg(cs->accel->fd, HV_X86_RSP); in hvf_load_regs()
479 RRX(env, R_EBP) = rreg(cs->accel->fd, HV_X86_RBP); in hvf_load_regs()
481 RRX(env, i) = rreg(cs->accel->fd, HV_X86_RAX + i); in hvf_load_regs()
484 env->eflags = rreg(cs->accel->fd, HV_X86_RFLAGS); in hvf_load_regs()
486 env->eip = rreg(cs->accel->fd, HV_X86_RIP); in hvf_load_regs()
495 wreg(cs->accel->fd, HV_X86_RAX, RAX(env)); in hvf_store_regs()
496 wreg(cs->accel->fd, HV_X86_RBX, RBX(env)); in hvf_store_regs()
497 wreg(cs->accel->fd, HV_X86_RCX, RCX(env)); in hvf_store_regs()
498 wreg(cs->accel->fd, HV_X86_RDX, RDX(env)); in hvf_store_regs()
499 wreg(cs->accel->fd, HV_X86_RSI, RSI(env)); in hvf_store_regs()
500 wreg(cs->accel->fd, HV_X86_RDI, RDI(env)); in hvf_store_regs()
501 wreg(cs->accel->fd, HV_X86_RBP, RBP(env)); in hvf_store_regs()
502 wreg(cs->accel->fd, HV_X86_RSP, RSP(env)); in hvf_store_regs()
504 wreg(cs->accel->fd, HV_X86_RAX + i, RRX(env, i)); in hvf_store_regs()
508 wreg(cs->accel->fd, HV_X86_RFLAGS, env->eflags); in hvf_store_regs()
521 val = rdtscp() + rvmcs(cs->accel->fd, VMCS_TSC_OFFSET); in hvf_simulate_rdmsr()
541 val = rvmcs(cs->accel->fd, VMCS_GUEST_IA32_EFER); in hvf_simulate_rdmsr()
544 val = rvmcs(cs->accel->fd, VMCS_GUEST_FS_BASE); in hvf_simulate_rdmsr()
547 val = rvmcs(cs->accel->fd, VMCS_GUEST_GS_BASE); in hvf_simulate_rdmsr()
550 val = rvmcs(cs->accel->fd, VMCS_HOST_FS_BASE); in hvf_simulate_rdmsr()
649 wvmcs(cs->accel->fd, VMCS_GUEST_FS_BASE, data); in hvf_simulate_wrmsr()
652 wvmcs(cs->accel->fd, VMCS_GUEST_GS_BASE, data); in hvf_simulate_wrmsr()
655 wvmcs(cs->accel->fd, VMCS_HOST_FS_BASE, data); in hvf_simulate_wrmsr()
668 wvmcs(cs->accel->fd, VMCS_GUEST_IA32_EFER, data); in hvf_simulate_wrmsr()
670 hv_vcpu_invalidate_tlb(cs->accel->fd); in hvf_simulate_wrmsr()
736 if (cpu->accel->dirty) { in hvf_vcpu_exec()
738 cpu->accel->dirty = false; in hvf_vcpu_exec()
752 hv_return_t r = hv_vcpu_run_until(cpu->accel->fd, HV_DEADLINE_FOREVER); in hvf_vcpu_exec()
756 uint64_t exit_reason = rvmcs(cpu->accel->fd, VMCS_EXIT_REASON); in hvf_vcpu_exec()
757 uint64_t exit_qual = rvmcs(cpu->accel->fd, VMCS_EXIT_QUALIFICATION); in hvf_vcpu_exec()
758 uint32_t ins_len = (uint32_t)rvmcs(cpu->accel->fd, in hvf_vcpu_exec()
761 uint64_t idtvec_info = rvmcs(cpu->accel->fd, VMCS_IDT_VECTORING_INFO); in hvf_vcpu_exec()
764 rip = rreg(cpu->accel->fd, HV_X86_RIP); in hvf_vcpu_exec()
765 env->eflags = rreg(cpu->accel->fd, HV_X86_RFLAGS); in hvf_vcpu_exec()
795 uint64_t gpa = rvmcs(cpu->accel->fd, VMCS_GUEST_PHYSICAL_ADDRESS); in hvf_vcpu_exec()
840 RAX(env) = rreg(cpu->accel->fd, HV_X86_RAX); in hvf_vcpu_exec()
856 uint32_t rax = (uint32_t)rreg(cpu->accel->fd, HV_X86_RAX); in hvf_vcpu_exec()
857 uint32_t rbx = (uint32_t)rreg(cpu->accel->fd, HV_X86_RBX); in hvf_vcpu_exec()
858 uint32_t rcx = (uint32_t)rreg(cpu->accel->fd, HV_X86_RCX); in hvf_vcpu_exec()
859 uint32_t rdx = (uint32_t)rreg(cpu->accel->fd, HV_X86_RDX); in hvf_vcpu_exec()
863 env->cr[4] = rvmcs(cpu->accel->fd, VMCS_GUEST_CR4); in hvf_vcpu_exec()
867 wreg(cpu->accel->fd, HV_X86_RAX, rax); in hvf_vcpu_exec()
868 wreg(cpu->accel->fd, HV_X86_RBX, rbx); in hvf_vcpu_exec()
869 wreg(cpu->accel->fd, HV_X86_RCX, rcx); in hvf_vcpu_exec()
870 wreg(cpu->accel->fd, HV_X86_RDX, rdx); in hvf_vcpu_exec()
876 uint32_t eax = (uint32_t)rreg(cpu->accel->fd, HV_X86_RAX); in hvf_vcpu_exec()
877 uint32_t ecx = (uint32_t)rreg(cpu->accel->fd, HV_X86_RCX); in hvf_vcpu_exec()
878 uint32_t edx = (uint32_t)rreg(cpu->accel->fd, HV_X86_RDX); in hvf_vcpu_exec()
885 wreg(cpu->accel->fd, HV_X86_XCR0, env->xcr0 | 1); in hvf_vcpu_exec()
924 macvm_set_cr0(cpu->accel->fd, RRX(env, reg)); in hvf_vcpu_exec()
928 macvm_set_cr4(cpu->accel->fd, RRX(env, reg)); in hvf_vcpu_exec()
963 uint64_t vinfo = rvmcs(cpu->accel->fd, VMCS_IDT_VECTORING_INFO); in hvf_vcpu_exec()
976 wreg(cpu->accel->fd, HV_X86_RAX, 0); in hvf_vcpu_exec()
977 wreg(cpu->accel->fd, HV_X86_RDX, 0); in hvf_vcpu_exec()