• Home
  • Raw
  • Download

Lines Matching refs:cbe_spu_info

147 		mutex_lock(&cbe_spu_info[node].list_mutex);  in spu_update_sched_info()
149 mutex_unlock(&cbe_spu_info[node].list_mutex); in spu_update_sched_info()
191 mutex_lock(&cbe_spu_info[node].list_mutex); in do_notify_spus_active()
192 list_for_each_entry(spu, &cbe_spu_info[node].spus, cbe_list) { in do_notify_spus_active()
201 mutex_unlock(&cbe_spu_info[node].list_mutex); in do_notify_spus_active()
217 atomic_inc(&cbe_spu_info[spu->node].reserved_spus); in spu_bind_context()
253 BUG_ON(!mutex_is_locked(&cbe_spu_info[spu->node].list_mutex)); in sched_spu()
320 mutex_lock(&cbe_spu_info[node].list_mutex); in aff_ref_location()
321 list_for_each_entry(spu, &cbe_spu_info[node].spus, cbe_list) { in aff_ref_location()
328 mutex_unlock(&cbe_spu_info[node].list_mutex); in aff_ref_location()
332 list_for_each_entry(spu, &cbe_spu_info[node].spus, cbe_list) { in aff_ref_location()
335 mutex_unlock(&cbe_spu_info[node].list_mutex); in aff_ref_location()
339 mutex_unlock(&cbe_spu_info[node].list_mutex); in aff_ref_location()
433 atomic_dec(&cbe_spu_info[spu->node].reserved_spus); in spu_unbind_context()
574 mutex_lock(&cbe_spu_info[node].list_mutex); in spu_get_idle()
578 mutex_unlock(&cbe_spu_info[node].list_mutex); in spu_get_idle()
591 mutex_lock(&cbe_spu_info[node].list_mutex); in spu_get_idle()
592 list_for_each_entry(spu, &cbe_spu_info[node].spus, cbe_list) { in spu_get_idle()
596 mutex_unlock(&cbe_spu_info[node].list_mutex); in spu_get_idle()
605 mutex_unlock(&cbe_spu_info[node].list_mutex); in spu_get_idle()
639 mutex_lock(&cbe_spu_info[node].list_mutex); in find_victim()
640 list_for_each_entry(spu, &cbe_spu_info[node].spus, cbe_list) { in find_victim()
651 mutex_unlock(&cbe_spu_info[node].list_mutex); in find_victim()
685 mutex_lock(&cbe_spu_info[node].list_mutex); in find_victim()
686 cbe_spu_info[node].nr_active--; in find_victim()
688 mutex_unlock(&cbe_spu_info[node].list_mutex); in find_victim()
712 mutex_lock(&cbe_spu_info[node].list_mutex); in __spu_schedule()
715 cbe_spu_info[node].nr_active++; in __spu_schedule()
719 mutex_unlock(&cbe_spu_info[node].list_mutex); in __spu_schedule()
755 mutex_lock(&cbe_spu_info[node].list_mutex); in spu_unschedule()
756 cbe_spu_info[node].nr_active--; in spu_unschedule()
762 mutex_unlock(&cbe_spu_info[node].list_mutex); in spu_unschedule()
960 nr_active += cbe_spu_info[node].nr_active; in count_active_contexts()
1003 struct mutex *mtx = &cbe_spu_info[node].list_mutex; in spusched_thread()
1006 list_for_each_entry(spu, &cbe_spu_info[node].spus, in spusched_thread()
1055 atomic_dec(&cbe_spu_info[node].busy_spus); in spuctx_switch_state()
1057 atomic_inc(&cbe_spu_info[node].busy_spus); in spuctx_switch_state()
1138 mutex_lock(&cbe_spu_info[node].list_mutex); in spu_sched_exit()
1139 list_for_each_entry(spu, &cbe_spu_info[node].spus, cbe_list) in spu_sched_exit()
1142 mutex_unlock(&cbe_spu_info[node].list_mutex); in spu_sched_exit()