/linux/arch/arm64/kvm/hyp/nvhe/ |
H A D | tlb.c | 14 struct kvm_s2_mmu *mmu; 19 static void enter_vmid_context(struct kvm_s2_mmu *mmu, in enter_vmid_context() 23 struct kvm_s2_mmu *host_s2_mmu = &host_mmu.arch.mmu; in enter_vmid_context() 120 struct kvm_s2_mmu *mmu = cxt->mmu; in exit_vmid_context() 148 void __kvm_tlb_flush_vmid_ipa(struct kvm_s2_mmu *mmu, in __kvm_tlb_flush_vmid_ipa() 178 void __kvm_tlb_flush_vmid_ipa_nsh(struct kvm_s2_mmu *mmu, in __kvm_tlb_flush_vmid_ipa_nsh() 208 void __kvm_tlb_flush_vmid_range(struct kvm_s2_mmu *mmu, in __kvm_tlb_flush_vmid_range() 235 void __kvm_tlb_flush_vmid(struct kvm_s2_mmu *mmu) in __kvm_tlb_flush_vmid() 249 void __kvm_flush_cpu_context(struct kvm_s2_mmu *mmu) in __kvm_flush_cpu_context()
|
H A D | hyp-main.c | 387 DECLARE_REG(struct kvm_s2_mmu *, mmu, host_ctxt, 1); in handle___kvm_tlb_flush_vmid_ipa() 396 DECLARE_REG(struct kvm_s2_mmu *, mmu, host_ctxt, 1); in handle___kvm_tlb_flush_vmid_ipa_nsh() 406 DECLARE_REG(struct kvm_s2_mmu *, mmu, host_ctxt, 1); in handle___kvm_tlb_flush_vmid_range() 415 DECLARE_REG(struct kvm_s2_mmu *, mmu, host_ctxt, 1); in handle___kvm_tlb_flush_vmid() 438 DECLARE_REG(struct kvm_s2_mmu *, mmu, host_ctxt, 1); in handle___kvm_flush_cpu_context()
|
H A D | switch.c | 240 struct kvm_s2_mmu *mmu; in __kvm_vcpu_run()
|
H A D | mem_protect.c | 145 struct kvm_s2_mmu *mmu = &host_mmu.arch.mmu; in kvm_host_prepare_stage2() 259 struct kvm_s2_mmu *mmu = &vm->kvm.arch.mmu; in kvm_guest_prepare_stage2() 318 struct kvm_s2_mmu *mmu = &host_mmu.arch.mmu; in __pkvm_prot_finalize()
|
H A D | pkvm.c | 504 struct kvm_s2_mmu *mmu = &hyp_vm->kvm.arch.mmu; in insert_vm_table_entry()
|
/linux/arch/arm64/include/asm/ |
H A D | kvm_mmu.h | 171 void kvm_stage2_unmap_range(struct kvm_s2_mmu *mmu, phys_addr_t start, 173 void kvm_stage2_flush_range(struct kvm_s2_mmu *mmu, phys_addr_t addr, phys_addr_t end); 174 void kvm_stage2_wp_range(struct kvm_s2_mmu *mmu, phys_addr_t addr, phys_addr_t end); 177 int kvm_init_stage2_mmu(struct kvm *kvm, struct kvm_s2_mmu *mmu, unsigned long type); 179 void kvm_free_stage2_pgd(struct kvm_s2_mmu *mmu); 303 static __always_inline u64 kvm_get_vttbr(struct kvm_s2_mmu *mmu) in kvm_get_vttbr() 319 static __always_inline void __load_stage2(struct kvm_s2_mmu *mmu, in __load_stage2() 333 static inline struct kvm *kvm_s2_mmu_to_kvm(struct kvm_s2_mmu *mmu) in kvm_s2_mmu_to_kvm() 344 static inline bool kvm_s2_mmu_valid(struct kvm_s2_mmu *mmu) in kvm_s2_mmu_valid() 349 static inline bool kvm_is_nested_s2_mmu(struct kvm *kvm, struct kvm_s2_mmu *mm [all...] |
H A D | kvm_asm.h | 219 struct kvm_s2_mmu; 234 extern void __kvm_flush_cpu_context(struct kvm_s2_mmu *mmu); 235 extern void __kvm_tlb_flush_vmid_ipa(struct kvm_s2_mmu *mmu, phys_addr_t ipa, 237 extern void __kvm_tlb_flush_vmid_ipa_nsh(struct kvm_s2_mmu *mmu, 240 extern void __kvm_tlb_flush_vmid_range(struct kvm_s2_mmu *mmu, 242 extern void __kvm_tlb_flush_vmid(struct kvm_s2_mmu *mmu); 244 extern int __kvm_tlbi_s1e2(struct kvm_s2_mmu *mmu, u64 va, u64 sys_encoding);
|
H A D | kvm_nested.h | 70 extern void kvm_init_nested_s2_mmu(struct kvm_s2_mmu *mmu); 71 extern struct kvm_s2_mmu *lookup_s2_mmu(struct kvm_vcpu *vcpu); 77 void (*)(struct kvm_s2_mmu *, 135 unsigned long compute_tlb_inval_range(struct kvm_s2_mmu *mmu, u64 val);
|
H A D | kvm_pkvm.h | 180 int pkvm_pgtable_stage2_init(struct kvm_pgtable *pgt, struct kvm_s2_mmu *mmu,
|
/linux/arch/arm64/kvm/ |
H A D | nested.c | 53 static int init_nested_s2_mmu(struct kvm *kvm, struct kvm_s2_mmu *mmu) in init_nested_s2_mmu() 71 struct kvm_s2_mmu *tmp; in kvm_vcpu_init_nested() 467 static u8 get_guest_mapping_ttl(struct kvm_s2_mmu *mmu, u64 addr) in get_guest_mapping_ttl() 537 unsigned long compute_tlb_inval_range(struct kvm_s2_mmu *mmu, u64 val) in compute_tlb_inval_range() 594 void (*tlbi_callback)(struct kvm_s2_mmu *, in kvm_s2_mmu_iterate_by_vmid() argument 600 struct kvm_s2_mmu *mmu = &kvm->arch.nested_mmus[i]; in kvm_s2_mmu_iterate_by_vmid() 612 struct kvm_s2_mmu *lookup_s2_mmu(struct kvm_vcpu *vcpu) in lookup_s2_mmu() 642 struct kvm_s2_mmu *mmu = &kvm->arch.nested_mmus[i]; in lookup_s2_mmu() 661 static struct kvm_s2_mmu *get_s2_mmu_nested(struct kvm_vcpu *vcpu) in get_s2_mmu_nested() 664 struct kvm_s2_mmu *s2_mm in get_s2_mmu_nested() [all...] |
H A D | mmu.c | 61 static int stage2_apply_range(struct kvm_s2_mmu *mmu, phys_addr_t addr, in stage2_apply_range() 327 static void __unmap_stage2_range(struct kvm_s2_mmu *mmu, phys_addr_t start, u64 size, in __unmap_stage2_range() 339 void kvm_stage2_unmap_range(struct kvm_s2_mmu *mmu, phys_addr_t start, in kvm_stage2_unmap_range() 345 void kvm_stage2_flush_range(struct kvm_s2_mmu *mmu, phys_addr_t addr, phys_addr_t end) in kvm_stage2_flush_range() 874 static int kvm_init_ipa_range(struct kvm_s2_mmu *mmu, unsigned long type) in kvm_init_ipa_range() 917 * - when secondary kvm_s2_mmu structures are initialised for NV 921 int kvm_init_stage2_mmu(struct kvm *kvm, struct kvm_s2_mmu *mmu, unsigned long type) in kvm_init_stage2_mmu() 1064 void kvm_free_stage2_pgd(struct kvm_s2_mmu *mmu) in kvm_free_stage2_pgd() 1145 struct kvm_s2_mmu *mmu = &kvm->arch.mmu; in kvm_phys_addr_ioremap() 1183 void kvm_stage2_wp_range(struct kvm_s2_mmu *mm [all...] |
H A D | ptdump.c | 99 struct kvm_s2_mmu *mmu = &kvm->arch.mmu; in kvm_ptdump_parser_create() 134 struct kvm_s2_mmu *mmu = &kvm->arch.mmu; in kvm_ptdump_guest_show()
|
H A D | pkvm.c | 288 int pkvm_pgtable_stage2_init(struct kvm_pgtable *pgt, struct kvm_s2_mmu *mmu, in pkvm_pgtable_stage2_init()
|
H A D | at.c | 1204 struct kvm_s2_mmu *mmu; in __kvm_at_s1e01_fast()
|
H A D | sys_regs.c | 3635 static void s2_mmu_unmap_range(struct kvm_s2_mmu *mmu, in s2_mmu_unmap_range() 3717 static void s2_mmu_unmap_ipa(struct kvm_s2_mmu *mmu, in s2_mmu_unmap_ipa() 3763 static void s2_mmu_tlbi_s1e1(struct kvm_s2_mmu *mmu, in s2_mmu_tlbi_s1e1()
|
H A D | arm.c | 576 struct kvm_s2_mmu *mmu; in kvm_arch_vcpu_load()
|