Lines Matching full:vmid
21 void kvm_riscv_local_hfence_gvma_vmid_gpa(unsigned long vmid, in kvm_riscv_local_hfence_gvma_vmid_gpa() argument
28 kvm_riscv_local_hfence_gvma_vmid_all(vmid); in kvm_riscv_local_hfence_gvma_vmid_gpa()
36 : : "r" (pos >> 2), "r" (vmid) : "memory"); in kvm_riscv_local_hfence_gvma_vmid_gpa()
41 : : "r" (pos >> 2), "r" (vmid) : "memory"); in kvm_riscv_local_hfence_gvma_vmid_gpa()
45 void kvm_riscv_local_hfence_gvma_vmid_all(unsigned long vmid) in kvm_riscv_local_hfence_gvma_vmid_all() argument
47 asm volatile(HFENCE_GVMA(zero, %0) : : "r" (vmid) : "memory"); in kvm_riscv_local_hfence_gvma_vmid_all()
78 void kvm_riscv_local_hfence_vvma_asid_gva(unsigned long vmid, in kvm_riscv_local_hfence_vvma_asid_gva() argument
87 kvm_riscv_local_hfence_vvma_asid_all(vmid, asid); in kvm_riscv_local_hfence_vvma_asid_gva()
91 hgatp = csr_swap(CSR_HGATP, vmid << HGATP_VMID_SHIFT); in kvm_riscv_local_hfence_vvma_asid_gva()
108 void kvm_riscv_local_hfence_vvma_asid_all(unsigned long vmid, in kvm_riscv_local_hfence_vvma_asid_all() argument
113 hgatp = csr_swap(CSR_HGATP, vmid << HGATP_VMID_SHIFT); in kvm_riscv_local_hfence_vvma_asid_all()
120 void kvm_riscv_local_hfence_vvma_gva(unsigned long vmid, in kvm_riscv_local_hfence_vvma_gva() argument
127 kvm_riscv_local_hfence_vvma_all(vmid); in kvm_riscv_local_hfence_vvma_gva()
131 hgatp = csr_swap(CSR_HGATP, vmid << HGATP_VMID_SHIFT); in kvm_riscv_local_hfence_vvma_gva()
148 void kvm_riscv_local_hfence_vvma_all(unsigned long vmid) in kvm_riscv_local_hfence_vvma_all() argument
152 hgatp = csr_swap(CSR_HGATP, vmid << HGATP_VMID_SHIFT); in kvm_riscv_local_hfence_vvma_all()
161 unsigned long vmid; in kvm_riscv_local_tlb_sanitize() local
168 * On RISC-V platforms with hardware VMID support, we share same in kvm_riscv_local_tlb_sanitize()
169 * VMID for all VCPUs of a particular Guest/VM. This means we might in kvm_riscv_local_tlb_sanitize()
175 * entries by VMID whenever underlying Host CPU changes for a VCPU. in kvm_riscv_local_tlb_sanitize()
178 vmid = READ_ONCE(vcpu->kvm->arch.vmid.vmid); in kvm_riscv_local_tlb_sanitize()
179 kvm_riscv_local_hfence_gvma_vmid_all(vmid); in kvm_riscv_local_tlb_sanitize()
190 struct kvm_vmid *v = &vcpu->kvm->arch.vmid; in kvm_riscv_hfence_gvma_vmid_all_process()
191 unsigned long vmid = READ_ONCE(v->vmid); in kvm_riscv_hfence_gvma_vmid_all_process() local
194 nacl_hfence_gvma_vmid_all(nacl_shmem(), vmid); in kvm_riscv_hfence_gvma_vmid_all_process()
196 kvm_riscv_local_hfence_gvma_vmid_all(vmid); in kvm_riscv_hfence_gvma_vmid_all_process()
201 struct kvm_vmid *v = &vcpu->kvm->arch.vmid; in kvm_riscv_hfence_vvma_all_process()
202 unsigned long vmid = READ_ONCE(v->vmid); in kvm_riscv_hfence_vvma_all_process() local
205 nacl_hfence_vvma_all(nacl_shmem(), vmid); in kvm_riscv_hfence_vvma_all_process()
207 kvm_riscv_local_hfence_vvma_all(vmid); in kvm_riscv_hfence_vvma_all_process()
261 unsigned long vmid; in kvm_riscv_hfence_process() local
263 struct kvm_vmid *v = &vcpu->kvm->arch.vmid; in kvm_riscv_hfence_process()
270 vmid = READ_ONCE(v->vmid); in kvm_riscv_hfence_process()
272 nacl_hfence_gvma_vmid(nacl_shmem(), vmid, in kvm_riscv_hfence_process()
275 kvm_riscv_local_hfence_gvma_vmid_gpa(vmid, d.addr, in kvm_riscv_hfence_process()
280 vmid = READ_ONCE(v->vmid); in kvm_riscv_hfence_process()
282 nacl_hfence_vvma_asid(nacl_shmem(), vmid, d.asid, in kvm_riscv_hfence_process()
285 kvm_riscv_local_hfence_vvma_asid_gva(vmid, d.asid, d.addr, in kvm_riscv_hfence_process()
290 vmid = READ_ONCE(v->vmid); in kvm_riscv_hfence_process()
292 nacl_hfence_vvma_asid_all(nacl_shmem(), vmid, d.asid); in kvm_riscv_hfence_process()
294 kvm_riscv_local_hfence_vvma_asid_all(vmid, d.asid); in kvm_riscv_hfence_process()
298 vmid = READ_ONCE(v->vmid); in kvm_riscv_hfence_process()
300 nacl_hfence_vvma(nacl_shmem(), vmid, in kvm_riscv_hfence_process()
303 kvm_riscv_local_hfence_vvma_gva(vmid, d.addr, in kvm_riscv_hfence_process()