Home
last modified time | relevance | path

Searched refs:s_job (Results 1 – 4 of 4) sorted by relevance

/OK3568_Linux_fs/kernel/drivers/gpu/drm/scheduler/
H A Dsched_main.c270 static void drm_sched_job_begin(struct drm_sched_job *s_job) in drm_sched_job_begin() argument
272 struct drm_gpu_scheduler *sched = s_job->sched; in drm_sched_job_begin()
275 list_add_tail(&s_job->node, &sched->ring_mirror_list); in drm_sched_job_begin()
379 struct drm_sched_job *s_job, *tmp; in drm_sched_stop() local
403 list_for_each_entry_safe_reverse(s_job, tmp, &sched->ring_mirror_list, node) { in drm_sched_stop()
404 if (s_job->s_fence->parent && in drm_sched_stop()
405 dma_fence_remove_callback(s_job->s_fence->parent, in drm_sched_stop()
406 &s_job->cb)) { in drm_sched_stop()
414 list_del_init(&s_job->node); in drm_sched_stop()
423 dma_fence_wait(&s_job->s_fence->finished, false); in drm_sched_stop()
[all …]
/OK3568_Linux_fs/kernel/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_job.c31 static void amdgpu_job_timedout(struct drm_sched_job *s_job) in amdgpu_job_timedout() argument
33 struct amdgpu_ring *ring = to_amdgpu_ring(s_job->sched); in amdgpu_job_timedout()
34 struct amdgpu_job *job = to_amdgpu_job(s_job); in amdgpu_job_timedout()
41 amdgpu_ring_soft_recovery(ring, job->vmid, s_job->s_fence->parent)) { in amdgpu_job_timedout()
43 s_job->sched->name); in amdgpu_job_timedout()
124 static void amdgpu_job_free_cb(struct drm_sched_job *s_job) in amdgpu_job_free_cb() argument
126 struct amdgpu_job *job = to_amdgpu_job(s_job); in amdgpu_job_free_cb()
128 drm_sched_job_cleanup(s_job); in amdgpu_job_free_cb()
249 struct drm_sched_job *s_job; in amdgpu_job_stop_all_jobs_on_sched() local
262 while ((s_job = to_drm_sched_job(spsc_queue_pop(&s_entity->job_queue)))) { in amdgpu_job_stop_all_jobs_on_sched()
[all …]
H A Damdgpu_debugfs.c1392 struct drm_sched_job *s_job; in amdgpu_ib_preempt_job_recovery() local
1396 list_for_each_entry(s_job, &sched->ring_mirror_list, node) { in amdgpu_ib_preempt_job_recovery()
1397 fence = sched->ops->run_job(s_job); in amdgpu_ib_preempt_job_recovery()
1406 struct drm_sched_job *s_job, *tmp; in amdgpu_ib_preempt_mark_partial_job() local
1428 list_for_each_entry_safe(s_job, tmp, &sched->ring_mirror_list, node) { in amdgpu_ib_preempt_mark_partial_job()
1429 if (dma_fence_is_signaled(&s_job->s_fence->finished)) { in amdgpu_ib_preempt_mark_partial_job()
1431 list_del_init(&s_job->node); in amdgpu_ib_preempt_mark_partial_job()
1432 sched->ops->free_job(s_job); in amdgpu_ib_preempt_mark_partial_job()
1435 job = to_amdgpu_job(s_job); in amdgpu_ib_preempt_mark_partial_job()
/OK3568_Linux_fs/kernel/include/drm/
H A Dgpu_scheduler.h203 static inline bool drm_sched_invalidate_job(struct drm_sched_job *s_job, in drm_sched_invalidate_job() argument
206 return (s_job && atomic_inc_return(&s_job->karma) > threshold); in drm_sched_invalidate_job()
315 void drm_sched_job_kickout(struct drm_sched_job *s_job);