Lines Matching full:w
117 struct clear_pages_work *w = container_of(fence, typeof(*w), dma); in clear_pages_work_release() local
119 destroy_sleeve(w->sleeve); in clear_pages_work_release()
121 i915_sw_fence_fini(&w->wait); in clear_pages_work_release()
123 BUILD_BUG_ON(offsetof(typeof(*w), dma)); in clear_pages_work_release()
124 dma_fence_free(&w->dma); in clear_pages_work_release()
135 struct clear_pages_work *w = container_of(work, typeof(*w), irq_work); in clear_pages_signal_irq_worker() local
137 dma_fence_signal(&w->dma); in clear_pages_signal_irq_worker()
138 dma_fence_put(&w->dma); in clear_pages_signal_irq_worker()
144 struct clear_pages_work *w = container_of(cb, typeof(*w), cb); in clear_pages_dma_fence_cb() local
147 dma_fence_set_error(&w->dma, fence->error); in clear_pages_dma_fence_cb()
153 irq_work_queue(&w->irq_work); in clear_pages_dma_fence_cb()
158 struct clear_pages_work *w = container_of(work, typeof(*w), work); in clear_pages_worker() local
159 struct drm_i915_gem_object *obj = w->sleeve->vma->obj; in clear_pages_worker()
160 struct i915_vma *vma = w->sleeve->vma; in clear_pages_worker()
164 int err = w->dma.error; in clear_pages_worker()
171 drm_clflush_sg(w->sleeve->pages); in clear_pages_worker()
178 intel_engine_pm_get(w->ce->engine); in clear_pages_worker()
180 err = intel_context_pin_ww(w->ce, &ww); in clear_pages_worker()
184 batch = intel_emit_vma_fill_blt(w->ce, vma, &ww, w->value); in clear_pages_worker()
190 rq = i915_request_create(w->ce); in clear_pages_worker()
197 if (dma_fence_add_callback(&rq->fence, &w->cb, in clear_pages_worker()
205 if (w->ce->engine->emit_init_breadcrumb) { in clear_pages_worker()
206 err = w->ce->engine->emit_init_breadcrumb(rq); in clear_pages_worker()
212 * w->dma is already exported via (vma|obj)->resv we need only in clear_pages_worker()
214 * propagate the signal from the request to w->dma. in clear_pages_worker()
220 err = w->ce->engine->emit_bb_start(rq, in clear_pages_worker()
231 intel_emit_vma_release(w->ce, batch); in clear_pages_worker()
233 intel_context_unpin(w->ce); in clear_pages_worker()
242 i915_vma_unpin(w->sleeve->vma); in clear_pages_worker()
243 intel_engine_pm_put(w->ce->engine); in clear_pages_worker()
246 dma_fence_set_error(&w->dma, err); in clear_pages_worker()
247 dma_fence_signal(&w->dma); in clear_pages_worker()
248 dma_fence_put(&w->dma); in clear_pages_worker()
252 static int pin_wait_clear_pages_work(struct clear_pages_work *w, in pin_wait_clear_pages_work() argument
255 struct i915_vma *vma = w->sleeve->vma; in pin_wait_clear_pages_work()
269 err = i915_sw_fence_await_reservation(&w->wait, in pin_wait_clear_pages_work()
275 dma_resv_add_excl_fence(vma->obj->base.resv, &w->dma); in pin_wait_clear_pages_work()
294 struct clear_pages_work *w = container_of(fence, typeof(*w), wait); in clear_pages_work_notify() local
298 schedule_work(&w->work); in clear_pages_work_notify()
302 dma_fence_put(&w->dma); in clear_pages_work_notify()