Lines Matching defs:mmu
12 #include <asm/mmu.h>
150 #define kvm_phys_shift(mmu) VTCR_EL2_IPA((mmu)->vtcr)
151 #define kvm_phys_size(mmu) (_AC(1, ULL) << kvm_phys_shift(mmu))
152 #define kvm_phys_mask(mmu) (kvm_phys_size(mmu) - _AC(1, ULL))
171 void kvm_stage2_unmap_range(struct kvm_s2_mmu *mmu, phys_addr_t start,
173 void kvm_stage2_flush_range(struct kvm_s2_mmu *mmu, phys_addr_t addr, phys_addr_t end);
174 void kvm_stage2_wp_range(struct kvm_s2_mmu *mmu, phys_addr_t addr, phys_addr_t end);
177 int kvm_init_stage2_mmu(struct kvm *kvm, struct kvm_s2_mmu *mmu, unsigned long type);
179 void kvm_free_stage2_pgd(struct kvm_s2_mmu *mmu);
303 static __always_inline u64 kvm_get_vttbr(struct kvm_s2_mmu *mmu)
305 struct kvm_vmid *vmid = &mmu->vmid;
309 baddr = mmu->pgd_phys;
319 static __always_inline void __load_stage2(struct kvm_s2_mmu *mmu,
322 write_sysreg(mmu->vtcr, vtcr_el2);
323 write_sysreg(kvm_get_vttbr(mmu), vttbr_el2);
333 static inline struct kvm *kvm_s2_mmu_to_kvm(struct kvm_s2_mmu *mmu)
335 return container_of(mmu->arch, struct kvm, arch);
344 static inline bool kvm_s2_mmu_valid(struct kvm_s2_mmu *mmu)
346 return !(mmu->tlb_vttbr & VTTBR_CNP_BIT);
349 static inline bool kvm_is_nested_s2_mmu(struct kvm *kvm, struct kvm_s2_mmu *mmu)
352 * Be careful, mmu may not be fully initialised so do look at
355 return &kvm->arch.mmu != mmu;