Lines Matching refs:e
48 struct sched_class *e; in t4_sched_class_fw_cmd() local
51 e = &s->tab[p->u.params.class]; in t4_sched_class_fw_cmd()
58 p->u.params.channel, e->idx, in t4_sched_class_fw_cmd()
115 struct sched_class *e, *end; in t4_sched_queue_lookup() local
121 for (e = &s->tab[0]; e != end; ++e) { in t4_sched_queue_lookup()
125 if (e->state == SCHED_STATE_UNUSED) in t4_sched_queue_lookup()
128 list_for_each_entry(qe, &e->queue_list, list) { in t4_sched_queue_lookup()
130 found = e; in t4_sched_queue_lookup()
148 struct sched_class *e; in t4_sched_queue_unbind() local
162 e = t4_sched_queue_lookup(pi, qid, &index); in t4_sched_queue_unbind()
163 if (e && index >= 0) { in t4_sched_queue_unbind()
166 spin_lock(&e->lock); in t4_sched_queue_unbind()
167 list_for_each_entry(qe, &e->queue_list, list) { in t4_sched_queue_unbind()
175 spin_unlock(&e->lock); in t4_sched_queue_unbind()
181 if (atomic_dec_and_test(&e->refcnt)) { in t4_sched_queue_unbind()
182 e->state = SCHED_STATE_UNUSED; in t4_sched_queue_unbind()
183 memset(&e->info, 0, sizeof(e->info)); in t4_sched_queue_unbind()
185 spin_unlock(&e->lock); in t4_sched_queue_unbind()
195 struct sched_class *e; in t4_sched_queue_bind() local
223 e = &s->tab[qe->param.class]; in t4_sched_queue_bind()
224 spin_lock(&e->lock); in t4_sched_queue_bind()
228 spin_unlock(&e->lock); in t4_sched_queue_bind()
232 list_add_tail(&qe->list, &e->queue_list); in t4_sched_queue_bind()
233 atomic_inc(&e->refcnt); in t4_sched_queue_bind()
234 spin_unlock(&e->lock); in t4_sched_queue_bind()
240 struct sched_class *e, in t4_sched_class_unbind_all() argument
243 if (!e) in t4_sched_class_unbind_all()
250 list_for_each_entry(qe, &e->queue_list, list) in t4_sched_class_unbind_all()
383 struct sched_class *e, *end; in t4_sched_class_lookup() local
389 for (e = &s->tab[0]; e != end; ++e) { in t4_sched_class_lookup()
390 if (e->state == SCHED_STATE_UNUSED) { in t4_sched_class_lookup()
391 found = e; in t4_sched_class_lookup()
405 for (e = &s->tab[0]; e != end; ++e) { in t4_sched_class_lookup()
406 if (e->state == SCHED_STATE_UNUSED) in t4_sched_class_lookup()
409 memcpy(&info, &e->info, sizeof(info)); in t4_sched_class_lookup()
416 found = e; in t4_sched_class_lookup()
429 struct sched_class *e; in t4_sched_class_alloc() local
448 e = t4_sched_class_lookup(pi, p); in t4_sched_class_alloc()
449 if (!e) { in t4_sched_class_alloc()
453 e = t4_sched_class_lookup(pi, NULL); in t4_sched_class_alloc()
454 if (!e) in t4_sched_class_alloc()
458 np.u.params.class = e->idx; in t4_sched_class_alloc()
460 spin_lock(&e->lock); in t4_sched_class_alloc()
464 spin_unlock(&e->lock); in t4_sched_class_alloc()
465 e = NULL; in t4_sched_class_alloc()
468 memcpy(&e->info, &np, sizeof(e->info)); in t4_sched_class_alloc()
469 atomic_set(&e->refcnt, 0); in t4_sched_class_alloc()
470 e->state = SCHED_STATE_ACTIVE; in t4_sched_class_alloc()
471 spin_unlock(&e->lock); in t4_sched_class_alloc()
476 return e; in t4_sched_class_alloc()
505 static void t4_sched_class_free(struct port_info *pi, struct sched_class *e) in t4_sched_class_free() argument
507 t4_sched_class_unbind_all(pi, e, SCHED_QUEUE); in t4_sched_class_free()
543 struct sched_class *e; in t4_cleanup_sched() local
546 e = &s->tab[i]; in t4_cleanup_sched()
547 if (e->state == SCHED_STATE_ACTIVE) in t4_cleanup_sched()
548 t4_sched_class_free(pi, e); in t4_cleanup_sched()