• Home
  • Raw
  • Download

Lines Matching refs:e

47 	struct sched_class *e;  in t4_sched_class_fw_cmd()  local
50 e = &s->tab[p->u.params.class]; in t4_sched_class_fw_cmd()
58 p->u.params.channel, e->idx, in t4_sched_class_fw_cmd()
125 struct sched_class *e, *end; in t4_sched_entry_lookup() local
130 for (e = &s->tab[0]; e != end; ++e) { in t4_sched_entry_lookup()
131 if (e->state == SCHED_STATE_UNUSED || in t4_sched_entry_lookup()
132 e->bind_type != type) in t4_sched_entry_lookup()
139 list_for_each_entry(qe, &e->entry_list, list) { in t4_sched_entry_lookup()
150 list_for_each_entry(fe, &e->entry_list, list) { in t4_sched_entry_lookup()
190 struct sched_class *e; in t4_sched_queue_unbind() local
206 e = &pi->sched_tbl->tab[qe->param.class]; in t4_sched_queue_unbind()
209 if (atomic_dec_and_test(&e->refcnt)) in t4_sched_queue_unbind()
210 cxgb4_sched_class_free(adap->port[pi->port_id], e->idx); in t4_sched_queue_unbind()
221 struct sched_class *e; in t4_sched_queue_bind() local
244 e = &s->tab[qe->param.class]; in t4_sched_queue_bind()
249 list_add_tail(&qe->list, &e->entry_list); in t4_sched_queue_bind()
250 e->bind_type = SCHED_QUEUE; in t4_sched_queue_bind()
251 atomic_inc(&e->refcnt); in t4_sched_queue_bind()
263 struct sched_class *e; in t4_sched_flowc_unbind() local
277 e = &pi->sched_tbl->tab[fe->param.class]; in t4_sched_flowc_unbind()
280 if (atomic_dec_and_test(&e->refcnt)) in t4_sched_flowc_unbind()
281 cxgb4_sched_class_free(adap->port[pi->port_id], e->idx); in t4_sched_flowc_unbind()
291 struct sched_class *e; in t4_sched_flowc_bind() local
309 e = &s->tab[fe->param.class]; in t4_sched_flowc_bind()
314 list_add_tail(&fe->list, &e->entry_list); in t4_sched_flowc_bind()
315 e->bind_type = SCHED_FLOWC; in t4_sched_flowc_bind()
316 atomic_inc(&e->refcnt); in t4_sched_flowc_bind()
325 struct sched_class *e, in t4_sched_class_unbind_all() argument
328 if (!e) in t4_sched_class_unbind_all()
335 list_for_each_entry(qe, &e->entry_list, list) in t4_sched_class_unbind_all()
342 list_for_each_entry(fe, &e->entry_list, list) in t4_sched_class_unbind_all()
484 struct sched_class *e, *end; in t4_sched_class_lookup() local
489 for (e = &s->tab[0]; e != end; ++e) { in t4_sched_class_lookup()
490 if (e->state == SCHED_STATE_UNUSED) { in t4_sched_class_lookup()
491 found = e; in t4_sched_class_lookup()
505 for (e = &s->tab[0]; e != end; ++e) { in t4_sched_class_lookup()
506 if (e->state == SCHED_STATE_UNUSED) in t4_sched_class_lookup()
509 memcpy(&info, &e->info, sizeof(info)); in t4_sched_class_lookup()
516 found = e; in t4_sched_class_lookup()
528 struct sched_class *e = NULL; in t4_sched_class_alloc() local
548 e = t4_sched_class_lookup(pi, p); in t4_sched_class_alloc()
550 if (!e) { in t4_sched_class_alloc()
554 e = t4_sched_class_lookup(pi, NULL); in t4_sched_class_alloc()
555 if (!e) in t4_sched_class_alloc()
559 np.u.params.class = e->idx; in t4_sched_class_alloc()
564 memcpy(&e->info, &np, sizeof(e->info)); in t4_sched_class_alloc()
565 atomic_set(&e->refcnt, 0); in t4_sched_class_alloc()
566 e->state = SCHED_STATE_ACTIVE; in t4_sched_class_alloc()
569 return e; in t4_sched_class_alloc()
610 struct sched_class *e; in cxgb4_sched_class_free() local
614 e = &s->tab[classid]; in cxgb4_sched_class_free()
615 if (!atomic_read(&e->refcnt) && e->state != SCHED_STATE_UNUSED) { in cxgb4_sched_class_free()
621 memcpy(&p, &e->info, sizeof(p)); in cxgb4_sched_class_free()
638 e->state = SCHED_STATE_UNUSED; in cxgb4_sched_class_free()
639 memset(&e->info, 0, sizeof(e->info)); in cxgb4_sched_class_free()
643 static void t4_sched_class_free(struct net_device *dev, struct sched_class *e) in t4_sched_class_free() argument
647 t4_sched_class_unbind_all(pi, e, e->bind_type); in t4_sched_class_free()
648 cxgb4_sched_class_free(dev, e->idx); in t4_sched_class_free()
685 struct sched_class *e; in t4_cleanup_sched() local
687 e = &s->tab[i]; in t4_cleanup_sched()
688 if (e->state == SCHED_STATE_ACTIVE) in t4_cleanup_sched()
689 t4_sched_class_free(adap->port[j], e); in t4_cleanup_sched()