Lines Matching full:asid

58  * Check if the CPU ASID is available on the SMMU side. If a private context
62 arm_smmu_share_asid(struct mm_struct *mm, u16 asid) in arm_smmu_share_asid() argument
70 cd = xa_load(&arm_smmu_asid_xa, asid); in arm_smmu_share_asid()
90 * Race with unmap: TLB invalidations will start targeting the new ASID, in arm_smmu_share_asid()
91 * which isn't assigned yet. We'll do an invalidate-all on the old ASID in arm_smmu_share_asid()
94 cd->asid = new_asid; in arm_smmu_share_asid()
96 * Update ASID and invalidate CD in all associated masters. There will in arm_smmu_share_asid()
103 arm_smmu_tlb_inv_asid(smmu, asid); in arm_smmu_share_asid()
105 xa_erase(&arm_smmu_asid_xa, asid); in arm_smmu_share_asid()
111 u16 asid; in arm_smmu_alloc_shared_cd() local
117 /* Don't free the mm until we release the ASID */ in arm_smmu_alloc_shared_cd()
120 asid = arm64_mm_context_get(mm); in arm_smmu_alloc_shared_cd()
121 if (!asid) { in arm_smmu_alloc_shared_cd()
135 ret = arm_smmu_share_asid(mm, asid); in arm_smmu_alloc_shared_cd()
141 err = xa_insert(&arm_smmu_asid_xa, asid, cd, GFP_KERNEL); in arm_smmu_alloc_shared_cd()
180 cd->asid = asid; in arm_smmu_alloc_shared_cd()
199 /* Unpin ASID */ in arm_smmu_free_shared_cd()
241 smmu_mn->cd->asid); in arm_smmu_mm_arch_invalidate_secondary_tlbs()
244 smmu_mn->cd->asid, in arm_smmu_mm_arch_invalidate_secondary_tlbs()
271 arm_smmu_tlb_inv_asid(smmu_domain->smmu, smmu_mn->cd->asid); in arm_smmu_mm_release()
350 arm_smmu_tlb_inv_asid(smmu_domain->smmu, cd->asid); in arm_smmu_mmu_notifier_put()