Searched refs:tlb_inval (Results 1 – 11 of 11) sorted by relevance
| /linux/drivers/gpu/drm/xe/ ! |
| H A D | xe_tlb_inval.c | 34 if (WARN_ON_ONCE(!fence->tlb_inval)) in xe_tlb_inval_fence_fini() 37 xe_pm_runtime_put(fence->tlb_inval->xe); in xe_tlb_inval_fence_fini() 38 fence->tlb_inval = NULL; /* fini() should be called once */ in xe_tlb_inval_fence_fini() 46 lockdep_assert_held(&fence->tlb_inval->pending_lock); in xe_tlb_inval_fence_signal() 49 trace_xe_tlb_inval_fence_signal(fence->tlb_inval->xe, fence); in xe_tlb_inval_fence_signal() 59 struct xe_tlb_inval *tlb_inval = fence->tlb_inval; in xe_tlb_inval_fence_signal_unlocked() local 61 spin_lock_irq(&tlb_inval->pending_lock); in xe_tlb_inval_fence_signal_unlocked() 63 spin_unlock_irq(&tlb_inval->pending_lock); in xe_tlb_inval_fence_signal_unlocked() 68 struct xe_tlb_inval *tlb_inval = container_of(work, struct xe_tlb_inval, in xe_tlb_inval_fence_timeout() local 70 struct xe_device *xe = tlb_inval->xe; in xe_tlb_inval_fence_timeout() [all …]
|
| H A D | xe_tlb_inval_job.c | 24 struct xe_tlb_inval *tlb_inval; member 57 prl_sa = xe_page_reclaim_create_prl_bo(job->tlb_inval, &job->prl, ifence); in xe_tlb_inval_job_run() 62 xe_tlb_inval_range(job->tlb_inval, ifence, job->start, in xe_tlb_inval_job_run() 98 xe_tlb_inval_job_create(struct xe_exec_queue *q, struct xe_tlb_inval *tlb_inval, in xe_tlb_inval_job_create() argument 117 job->tlb_inval = tlb_inval; in xe_tlb_inval_job_create() 269 xe_tlb_inval_fence_init(job->tlb_inval, ifence, false); in xe_tlb_inval_job_push()
|
| H A D | xe_exec_queue.c | 146 if (q->tlb_inval[i].dep_scheduler) in __xe_exec_queue_free() 147 xe_dep_scheduler_fini(q->tlb_inval[i].dep_scheduler); in __xe_exec_queue_free() 183 wq = gt->tlb_inval.job_wq; in alloc_dep_schedulers() 191 q->tlb_inval[i].dep_scheduler = dep_scheduler; in alloc_dep_schedulers() 1580 dma_fence_put(q->tlb_inval[type].last_fence); in xe_exec_queue_tlb_inval_last_fence_put_unlocked() 1581 q->tlb_inval[type].last_fence = NULL; in xe_exec_queue_tlb_inval_last_fence_put_unlocked() 1606 if (q->tlb_inval[type].last_fence && in xe_exec_queue_tlb_inval_last_fence_get() 1608 &q->tlb_inval[type].last_fence->flags)) in xe_exec_queue_tlb_inval_last_fence_get() 1611 fence = q->tlb_inval[type].last_fence ?: dma_fence_get_stub(); in xe_exec_queue_tlb_inval_last_fence_get() 1640 q->tlb_inval[type].last_fence = dma_fence_get(fence); in xe_exec_queue_tlb_inval_last_fence_set()
|
| H A D | xe_guc_tlb_inval.h | 15 struct xe_tlb_inval *tlb_inval);
|
| H A D | xe_exec_queue_types.h | 212 } tlb_inval[XE_EXEC_QUEUE_TLB_INVAL_COUNT]; member
|
| H A D | xe_lmtt.c | 249 xe_tlb_inval_fence_init(>->tlb_inval, fence, true); in lmtt_invalidate_hw() 250 err = xe_tlb_inval_all(>->tlb_inval, fence); in lmtt_invalidate_hw()
|
| H A D | xe_gt.c | 903 xe_tlb_inval_reset(>->tlb_inval); in gt_reset_worker() 1141 xe_tlb_inval_reset(>->tlb_inval); in xe_gt_declare_wedged()
|
| H A D | xe_pt.c | 2501 return q->tlb_inval[XE_EXEC_QUEUE_TLB_INVAL_MEDIA_GT].dep_scheduler; in to_dep_scheduler() 2503 return q->tlb_inval[XE_EXEC_QUEUE_TLB_INVAL_PRIMARY_GT].dep_scheduler; in to_dep_scheduler() 2556 ijob = xe_tlb_inval_job_create(q, &tile->primary_gt->tlb_inval, in xe_pt_update_ops_run() 2582 &tile->media_gt->tlb_inval, in xe_pt_update_ops_run()
|
| H A D | xe_vm.c | 1720 xe_tlb_inval_vm(>->tlb_inval, vm); in xe_vm_close() 3983 xe_tlb_inval_fence_init(&tile->primary_gt->tlb_inval, in xe_vm_range_tilemask_tlb_inval() 3986 err = xe_tlb_inval_range(&tile->primary_gt->tlb_inval, in xe_vm_range_tilemask_tlb_inval() 3996 xe_tlb_inval_fence_init(&tile->media_gt->tlb_inval, in xe_vm_range_tilemask_tlb_inval() 3999 err = xe_tlb_inval_range(&tile->media_gt->tlb_inval, in xe_vm_range_tilemask_tlb_inval()
|
| H A D | xe_ggtt.c | 518 err = xe_tlb_inval_ggtt(>->tlb_inval); in ggtt_invalidate_gt_tlb()
|
| H A D | xe_gt_sriov_vf.c | 1242 xe_tlb_inval_reset(>->tlb_inval); in vf_post_migration_shutdown()
|