Home
last modified time | relevance | path

Searched refs:sched_job (Results 1 – 21 of 21) sorted by relevance

/linux/drivers/gpu/drm/scheduler/
H A Dgpu_scheduler_trace.h58 TP_PROTO(struct drm_sched_job *sched_job, struct drm_sched_entity *entity),
59 TP_ARGS(sched_job, entity),
61 __string(name, sched_job->sched->name)
64 __string(dev, dev_name(sched_job->sched->dev))
74 &sched_job->sched->credit_count);
76 __entry->fence_context = sched_job->s_fence->finished.context;
77 __entry->fence_seqno = sched_job->s_fence->finished.seqno;
78 __entry->client_id = sched_job->s_fence->drm_client_id;
87 TP_PROTO(struct drm_sched_job *sched_job, struct drm_sched_entity *entity),
88 TP_ARGS(sched_job, entit
[all...]
H A Dsched_entity.c456 struct drm_sched_job *sched_job; in drm_sched_entity_pop_job() local
458 sched_job = drm_sched_entity_queue_peek(entity); in drm_sched_entity_pop_job()
459 if (!sched_job) in drm_sched_entity_pop_job()
463 drm_sched_job_dependency(sched_job, entity))) { in drm_sched_entity_pop_job()
465 trace_drm_sched_job_unschedulable(sched_job, entity->dependency); in drm_sched_entity_pop_job()
472 dma_fence_set_error(&sched_job->s_fence->finished, -ECANCELED); in drm_sched_entity_pop_job()
476 dma_fence_get(&sched_job->s_fence->finished)); in drm_sched_entity_pop_job()
512 sched_job->entity = NULL; in drm_sched_entity_pop_job()
514 return sched_job; in drm_sched_entity_pop_job()
560 * @sched_job
567 drm_sched_entity_push_job(struct drm_sched_job * sched_job) drm_sched_entity_push_job() argument
[all...]
/linux/drivers/gpu/drm/etnaviv/
H A Detnaviv_sched.c21 static struct dma_fence *etnaviv_sched_run_job(struct drm_sched_job *sched_job) in etnaviv_sched_run_job() argument
23 struct etnaviv_gem_submit *submit = to_etnaviv_submit(sched_job); in etnaviv_sched_run_job()
26 if (likely(!sched_job->s_fence->finished.error)) in etnaviv_sched_run_job()
35 *sched_job) in etnaviv_sched_timedout_job()
37 struct etnaviv_gem_submit *submit = to_etnaviv_submit(sched_job); in etnaviv_sched_timedout_job()
77 drm_sched_stop(&gpu->sched, sched_job); in etnaviv_sched_timedout_job()
79 if(sched_job) in etnaviv_sched_timedout_job()
80 drm_sched_increase_karma(sched_job); in etnaviv_sched_timedout_job()
92 static void etnaviv_sched_free_job(struct drm_sched_job *sched_job) in etnaviv_sched_free_job() argument
94 struct etnaviv_gem_submit *submit = to_etnaviv_submit(sched_job); in etnaviv_sched_free_job()
34 etnaviv_sched_timedout_job(struct drm_sched_job * sched_job) etnaviv_sched_timedout_job() argument
[all...]
H A Detnaviv_sched.h14 struct etnaviv_gem_submit *to_etnaviv_submit(struct drm_sched_job *sched_job) in to_etnaviv_submit() argument
16 return container_of(sched_job, struct etnaviv_gem_submit, sched_job); in to_etnaviv_submit()
H A Detnaviv_gem_submit.c188 ret = drm_sched_job_add_implicit_dependencies(&submit->sched_job, in submit_fence_sync()
535 ret = drm_sched_job_init(&submit->sched_job, in etnaviv_ioctl_gem_submit()
559 ret = drm_sched_job_add_dependency(&submit->sched_job, in etnaviv_ioctl_gem_submit()
607 * the sched_job must not be cleaned up. in etnaviv_ioctl_gem_submit()
619 drm_sched_job_cleanup(&submit->sched_job); in etnaviv_ioctl_gem_submit()
H A Detnaviv_gem.h93 struct drm_sched_job sched_job; member
/linux/drivers/gpu/drm/v3d/
H A Dv3d_sched.c33 to_v3d_job(struct drm_sched_job *sched_job) in to_v3d_job() argument
35 return container_of(sched_job, struct v3d_job, base); in to_v3d_job()
39 to_bin_job(struct drm_sched_job *sched_job) in to_bin_job() argument
41 return container_of(sched_job, struct v3d_bin_job, base.base); in to_bin_job()
45 to_render_job(struct drm_sched_job *sched_job) in to_render_job() argument
47 return container_of(sched_job, struct v3d_render_job, base.base); in to_render_job()
51 to_tfu_job(struct drm_sched_job *sched_job) in to_tfu_job() argument
53 return container_of(sched_job, struct v3d_tfu_job, base.base); in to_tfu_job()
57 to_csd_job(struct drm_sched_job *sched_job) in to_csd_job() argument
59 return container_of(sched_job, struc in to_csd_job()
63 to_cpu_job(struct drm_sched_job * sched_job) to_cpu_job() argument
69 v3d_sched_job_free(struct drm_sched_job * sched_job) v3d_sched_job_free() argument
107 v3d_cpu_job_free(struct drm_sched_job * sched_job) v3d_cpu_job_free() argument
225 v3d_bin_job_run(struct drm_sched_job * sched_job) v3d_bin_job_run() argument
285 v3d_render_job_run(struct drm_sched_job * sched_job) v3d_render_job_run() argument
333 v3d_tfu_job_run(struct drm_sched_job * sched_job) v3d_tfu_job_run() argument
380 v3d_csd_job_run(struct drm_sched_job * sched_job) v3d_csd_job_run() argument
685 v3d_cpu_job_run(struct drm_sched_job * sched_job) v3d_cpu_job_run() argument
707 v3d_cache_clean_job_run(struct drm_sched_job * sched_job) v3d_cache_clean_job_run() argument
722 v3d_gpu_reset_for_timeout(struct v3d_dev * v3d,struct drm_sched_job * sched_job) v3d_gpu_reset_for_timeout() argument
752 v3d_cl_job_timedout(struct drm_sched_job * sched_job,enum v3d_queue q,u32 * timedout_ctca,u32 * timedout_ctra) v3d_cl_job_timedout() argument
776 v3d_bin_job_timedout(struct drm_sched_job * sched_job) v3d_bin_job_timedout() argument
785 v3d_render_job_timedout(struct drm_sched_job * sched_job) v3d_render_job_timedout() argument
794 v3d_generic_job_timedout(struct drm_sched_job * sched_job) v3d_generic_job_timedout() argument
802 v3d_csd_job_timedout(struct drm_sched_job * sched_job) v3d_csd_job_timedout() argument
[all...]
/linux/drivers/gpu/drm/scheduler/tests/
H A Dmock_scheduler.c164 static struct dma_fence *mock_sched_run_job(struct drm_sched_job *sched_job) in mock_sched_run_job() argument
167 drm_sched_to_mock_sched(sched_job->sched); in mock_sched_run_job()
168 struct drm_mock_sched_job *job = drm_sched_job_to_mock_job(sched_job); in mock_sched_run_job()
215 mock_sched_timedout_job(struct drm_sched_job *sched_job) in mock_sched_timedout_job() argument
217 struct drm_mock_scheduler *sched = drm_sched_to_mock_sched(sched_job->sched); in mock_sched_timedout_job()
218 struct drm_mock_sched_job *job = drm_sched_job_to_mock_job(sched_job); in mock_sched_timedout_job()
236 drm_sched_job_cleanup(sched_job); in mock_sched_timedout_job()
242 static void mock_sched_free_job(struct drm_sched_job *sched_job) in mock_sched_free_job() argument
244 struct drm_mock_sched_job *job = drm_sched_job_to_mock_job(sched_job); in mock_sched_free_job()
247 drm_sched_job_cleanup(sched_job); in mock_sched_free_job()
252 mock_sched_cancel_job(struct drm_sched_job * sched_job) mock_sched_cancel_job() argument
[all...]
H A Dsched_tests.h127 drm_sched_job_to_mock_job(struct drm_sched_job *sched_job) in drm_sched_job_to_mock_job() argument
129 return container_of(sched_job, struct drm_mock_sched_job, base); in drm_sched_job_to_mock_job()
/linux/include/drm/
H A Dgpu_scheduler.h423 struct dma_fence *(*prepare_job)(struct drm_sched_job *sched_job,
430 * @sched_job: the job to run
458 struct dma_fence *(*run_job)(struct drm_sched_job *sched_job);
464 * @sched_job: The job that has timed out
511 enum drm_gpu_sched_stat (*timedout_job)(struct drm_sched_job *sched_job);
517 void (*free_job)(struct drm_sched_job *sched_job);
535 void (*cancel_job)(struct drm_sched_job *sched_job);
660 void drm_sched_entity_push_job(struct drm_sched_job *sched_job);
/linux/drivers/gpu/drm/nouveau/
H A Dnouveau_sched.c361 nouveau_sched_run_job(struct drm_sched_job *sched_job) in nouveau_sched_run_job() argument
363 struct nouveau_job *job = to_nouveau_job(sched_job); in nouveau_sched_run_job()
369 nouveau_sched_timedout_job(struct drm_sched_job *sched_job) in nouveau_sched_timedout_job() argument
371 struct drm_gpu_scheduler *sched = sched_job->sched; in nouveau_sched_timedout_job()
372 struct nouveau_job *job = to_nouveau_job(sched_job); in nouveau_sched_timedout_job()
375 drm_sched_stop(sched, sched_job); in nouveau_sched_timedout_job()
388 nouveau_sched_free_job(struct drm_sched_job *sched_job) in nouveau_sched_free_job() argument
390 struct nouveau_job *job = to_nouveau_job(sched_job); in nouveau_sched_free_job()
396 nouveau_sched_cancel_job(struct drm_sched_job *sched_job) in nouveau_sched_cancel_job() argument
401 job = to_nouveau_job(sched_job); in nouveau_sched_cancel_job()
[all...]
H A Dnouveau_sched.h13 #define to_nouveau_job(sched_job) \ argument
14 container_of((sched_job), struct nouveau_job, base)
/linux/include/trace/events/
H A Damdxdna.h33 TP_PROTO(struct drm_sched_job *sched_job, const char *name, const char *str, u64 seq),
35 TP_ARGS(sched_job, name, str, seq),
45 __entry->fence_context = sched_job->s_fence->finished.context;
46 __entry->fence_seqno = sched_job->s_fence->finished.seqno;
/linux/drivers/accel/amdxdna/
H A Daie2_ctx.c296 aie2_sched_job_run(struct drm_sched_job *sched_job) in aie2_sched_job_run() argument
298 struct amdxdna_sched_job *job = drm_job_to_xdna_job(sched_job); in aie2_sched_job_run()
331 trace_xdna_job(sched_job, hwctx->name, "sent to device", job->seq); in aie2_sched_job_run()
336 static void aie2_sched_job_free(struct drm_sched_job *sched_job) in aie2_sched_job_free() argument
338 struct amdxdna_sched_job *job = drm_job_to_xdna_job(sched_job); in aie2_sched_job_free()
341 trace_xdna_job(sched_job, hwctx->name, "job free", job->seq); in aie2_sched_job_free()
345 drm_sched_job_cleanup(sched_job); in aie2_sched_job_free()
350 aie2_sched_job_timedout(struct drm_sched_job *sched_job) in aie2_sched_job_timedout() argument
352 struct amdxdna_sched_job *job = drm_job_to_xdna_job(sched_job); in aie2_sched_job_timedout()
357 trace_xdna_job(sched_job, hwct in aie2_sched_job_timedout()
[all...]
/linux/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_job.h39 #define to_amdgpu_job(sched_job) \ argument
40 container_of((sched_job), struct amdgpu_job, base)
H A Damdgpu_job.c340 amdgpu_job_prepare_job(struct drm_sched_job *sched_job, in amdgpu_job_prepare_job() argument
344 struct amdgpu_job *job = to_amdgpu_job(sched_job); in amdgpu_job_prepare_job()
378 static struct dma_fence *amdgpu_job_run(struct drm_sched_job *sched_job) in amdgpu_job_run() argument
380 struct amdgpu_ring *ring = to_amdgpu_ring(sched_job->sched); in amdgpu_job_run()
386 job = to_amdgpu_job(sched_job); in amdgpu_job_run()
H A Damdgpu_trace.h546 TP_PROTO(struct amdgpu_job *sched_job, struct dma_fence *fence),
547 TP_ARGS(sched_job, fence),
549 __string(ring, sched_job->base.sched->name)
/linux/drivers/gpu/drm/panthor/
H A Dpanthor_mmu.c2201 panthor_vm_bind_run_job(struct drm_sched_job *sched_job) in panthor_vm_bind_run_job() argument
2203 struct panthor_vm_bind_job *job = container_of(sched_job, struct panthor_vm_bind_job, base); in panthor_vm_bind_run_job()
2233 * @sched_job: Job to release the reference on.
2235 void panthor_vm_bind_job_put(struct drm_sched_job *sched_job) in panthor_vm_bind_job_put() argument
2238 container_of(sched_job, struct panthor_vm_bind_job, base); in panthor_vm_bind_job_put()
2240 if (sched_job) in panthor_vm_bind_job_put()
2245 panthor_vm_bind_free_job(struct drm_sched_job *sched_job) in panthor_vm_bind_free_job() argument
2248 container_of(sched_job, struct panthor_vm_bind_job, base); in panthor_vm_bind_free_job()
2250 drm_sched_job_cleanup(sched_job); in panthor_vm_bind_free_job()
2259 panthor_vm_bind_timedout_job(struct drm_sched_job *sched_job) in panthor_vm_bind_timedout_job() argument
2526 panthor_vm_bind_job_prepare_resvs(struct drm_exec * exec,struct drm_sched_job * sched_job) panthor_vm_bind_job_prepare_resvs() argument
2552 panthor_vm_bind_job_update_resvs(struct drm_exec * exec,struct drm_sched_job * sched_job) panthor_vm_bind_job_update_resvs() argument
[all...]
H A Dpanthor_sched.c3113 queue_run_job(struct drm_sched_job *sched_job) in queue_run_job() argument
3115 struct panthor_job *job = container_of(sched_job, struct panthor_job, base); in queue_run_job()
3213 queue_timedout_job(struct drm_sched_job *sched_job) in queue_timedout_job() argument
3215 struct panthor_job *job = container_of(sched_job, struct panthor_job, base); in queue_timedout_job()
3247 static void queue_free_job(struct drm_sched_job *sched_job) in queue_free_job() argument
3249 drm_sched_job_cleanup(sched_job); in queue_free_job()
3250 panthor_job_put(sched_job); in queue_free_job()
3706 struct drm_sched_job *panthor_job_get(struct drm_sched_job *sched_job) in panthor_job_get() argument
3708 if (sched_job) { in panthor_job_get()
3709 struct panthor_job *job = container_of(sched_job, struc in panthor_job_get()
3717 panthor_job_put(struct drm_sched_job * sched_job) panthor_job_put() argument
3725 panthor_job_vm(struct drm_sched_job * sched_job) panthor_job_vm() argument
3819 panthor_job_update_resvs(struct drm_exec * exec,struct drm_sched_job * sched_job) panthor_job_update_resvs() argument
[all...]
H A Dpanthor_sched.h35 struct panthor_vm *panthor_job_vm(struct drm_sched_job *sched_job);
/linux/drivers/gpu/drm/imagination/
H A Dpvr_queue.c498 * @sched_job: The job to query the next internal dependency on
505 pvr_queue_prepare_job(struct drm_sched_job *sched_job, in pvr_queue_prepare_job() argument
508 struct pvr_job *job = container_of(sched_job, struct pvr_job, base); in pvr_queue_prepare_job()
691 * @sched_job: The job to submit.
696 static struct dma_fence *pvr_queue_run_job(struct drm_sched_job *sched_job) in pvr_queue_run_job() argument
698 struct pvr_job *job = container_of(sched_job, struct pvr_job, base); in pvr_queue_run_job()
862 * @sched_job: Job object to free.
864 static void pvr_queue_free_job(struct drm_sched_job *sched_job) in pvr_queue_free_job() argument
866 struct pvr_job *job = container_of(sched_job, struct pvr_job, base); in pvr_queue_free_job()
868 drm_sched_job_cleanup(sched_job); in pvr_queue_free_job()
[all...]