Lines Matching refs:sched
1150 if (!ring || !ring->sched.thread) in amdgpu_debugfs_test_ib_show()
1152 kthread_park(ring->sched.thread); in amdgpu_debugfs_test_ib_show()
1166 if (!ring || !ring->sched.thread) in amdgpu_debugfs_test_ib_show()
1168 kthread_unpark(ring->sched.thread); in amdgpu_debugfs_test_ib_show()
1302 static void amdgpu_ib_preempt_job_recovery(struct drm_gpu_scheduler *sched) in amdgpu_ib_preempt_job_recovery() argument
1307 spin_lock(&sched->job_list_lock); in amdgpu_ib_preempt_job_recovery()
1308 list_for_each_entry(s_job, &sched->pending_list, list) { in amdgpu_ib_preempt_job_recovery()
1309 fence = sched->ops->run_job(s_job); in amdgpu_ib_preempt_job_recovery()
1312 spin_unlock(&sched->job_list_lock); in amdgpu_ib_preempt_job_recovery()
1322 struct drm_gpu_scheduler *sched = &ring->sched; in amdgpu_ib_preempt_mark_partial_job() local
1339 spin_lock(&sched->job_list_lock); in amdgpu_ib_preempt_mark_partial_job()
1340 list_for_each_entry_safe(s_job, tmp, &sched->pending_list, list) { in amdgpu_ib_preempt_mark_partial_job()
1344 sched->ops->free_job(s_job); in amdgpu_ib_preempt_mark_partial_job()
1352 spin_unlock(&sched->job_list_lock); in amdgpu_ib_preempt_mark_partial_job()
1367 if (!ring || !ring->funcs->preempt_ib || !ring->sched.thread) in amdgpu_debugfs_ib_preempt()
1385 kthread_park(ring->sched.thread); in amdgpu_debugfs_ib_preempt()
1410 amdgpu_ib_preempt_job_recovery(&ring->sched); in amdgpu_debugfs_ib_preempt()
1421 kthread_unpark(ring->sched.thread); in amdgpu_debugfs_ib_preempt()