Lines Matching refs:cl
53 static void drr_purge_queue(struct drr_class *cl) in drr_purge_queue() argument
55 unsigned int len = cl->qdisc->q.qlen; in drr_purge_queue()
56 unsigned int backlog = cl->qdisc->qstats.backlog; in drr_purge_queue()
58 qdisc_reset(cl->qdisc); in drr_purge_queue()
59 qdisc_tree_reduce_backlog(cl->qdisc, len, backlog); in drr_purge_queue()
71 struct drr_class *cl = (struct drr_class *)*arg; in drr_change_class() local
95 if (cl != NULL) { in drr_change_class()
97 err = gen_replace_estimator(&cl->bstats, NULL, in drr_change_class()
98 &cl->rate_est, in drr_change_class()
110 cl->quantum = quantum; in drr_change_class()
116 cl = kzalloc(sizeof(struct drr_class), GFP_KERNEL); in drr_change_class()
117 if (cl == NULL) in drr_change_class()
120 cl->common.classid = classid; in drr_change_class()
121 cl->quantum = quantum; in drr_change_class()
122 cl->qdisc = qdisc_create_dflt(sch->dev_queue, in drr_change_class()
125 if (cl->qdisc == NULL) in drr_change_class()
126 cl->qdisc = &noop_qdisc; in drr_change_class()
128 qdisc_hash_add(cl->qdisc, true); in drr_change_class()
131 err = gen_replace_estimator(&cl->bstats, NULL, &cl->rate_est, in drr_change_class()
137 qdisc_destroy(cl->qdisc); in drr_change_class()
138 kfree(cl); in drr_change_class()
144 qdisc_class_hash_insert(&q->clhash, &cl->common); in drr_change_class()
149 *arg = (unsigned long)cl; in drr_change_class()
153 static void drr_destroy_class(struct Qdisc *sch, struct drr_class *cl) in drr_destroy_class() argument
155 gen_kill_estimator(&cl->rate_est); in drr_destroy_class()
156 qdisc_destroy(cl->qdisc); in drr_destroy_class()
157 kfree(cl); in drr_destroy_class()
163 struct drr_class *cl = (struct drr_class *)arg; in drr_delete_class() local
165 if (cl->filter_cnt > 0) in drr_delete_class()
170 drr_purge_queue(cl); in drr_delete_class()
171 qdisc_class_hash_remove(&q->clhash, &cl->common); in drr_delete_class()
175 drr_destroy_class(sch, cl); in drr_delete_class()
184 static struct tcf_block *drr_tcf_block(struct Qdisc *sch, unsigned long cl, in drr_tcf_block() argument
189 if (cl) { in drr_tcf_block()
200 struct drr_class *cl = drr_find_class(sch, classid); in drr_bind_tcf() local
202 if (cl != NULL) in drr_bind_tcf()
203 cl->filter_cnt++; in drr_bind_tcf()
205 return (unsigned long)cl; in drr_bind_tcf()
210 struct drr_class *cl = (struct drr_class *)arg; in drr_unbind_tcf() local
212 cl->filter_cnt--; in drr_unbind_tcf()
219 struct drr_class *cl = (struct drr_class *)arg; in drr_graft_class() local
223 cl->common.classid, NULL); in drr_graft_class()
228 *old = qdisc_replace(sch, new, &cl->qdisc); in drr_graft_class()
234 struct drr_class *cl = (struct drr_class *)arg; in drr_class_leaf() local
236 return cl->qdisc; in drr_class_leaf()
241 struct drr_class *cl = (struct drr_class *)arg; in drr_qlen_notify() local
243 list_del(&cl->alist); in drr_qlen_notify()
249 struct drr_class *cl = (struct drr_class *)arg; in drr_dump_class() local
253 tcm->tcm_handle = cl->common.classid; in drr_dump_class()
254 tcm->tcm_info = cl->qdisc->handle; in drr_dump_class()
259 if (nla_put_u32(skb, TCA_DRR_QUANTUM, cl->quantum)) in drr_dump_class()
271 struct drr_class *cl = (struct drr_class *)arg; in drr_dump_class_stats() local
272 __u32 qlen = cl->qdisc->q.qlen; in drr_dump_class_stats()
277 xstats.deficit = cl->deficit; in drr_dump_class_stats()
280 d, NULL, &cl->bstats) < 0 || in drr_dump_class_stats()
281 gnet_stats_copy_rate_est(d, &cl->rate_est) < 0 || in drr_dump_class_stats()
282 gnet_stats_copy_queue(d, NULL, &cl->qdisc->qstats, qlen) < 0) in drr_dump_class_stats()
291 struct drr_class *cl; in drr_walk() local
298 hlist_for_each_entry(cl, &q->clhash.hash[i], common.hnode) { in drr_walk()
303 if (arg->fn(sch, (unsigned long)cl, arg) < 0) { in drr_walk()
316 struct drr_class *cl; in drr_classify() local
322 cl = drr_find_class(sch, skb->priority); in drr_classify()
323 if (cl != NULL) in drr_classify()
324 return cl; in drr_classify()
342 cl = (struct drr_class *)res.class; in drr_classify()
343 if (cl == NULL) in drr_classify()
344 cl = drr_find_class(sch, res.classid); in drr_classify()
345 return cl; in drr_classify()
354 struct drr_class *cl; in drr_enqueue() local
357 cl = drr_classify(skb, sch, &err); in drr_enqueue()
358 if (cl == NULL) { in drr_enqueue()
365 err = qdisc_enqueue(skb, cl->qdisc, to_free); in drr_enqueue()
368 cl->qstats.drops++; in drr_enqueue()
374 if (cl->qdisc->q.qlen == 1) { in drr_enqueue()
375 list_add_tail(&cl->alist, &q->active); in drr_enqueue()
376 cl->deficit = cl->quantum; in drr_enqueue()
387 struct drr_class *cl; in drr_dequeue() local
394 cl = list_first_entry(&q->active, struct drr_class, alist); in drr_dequeue()
395 skb = cl->qdisc->ops->peek(cl->qdisc); in drr_dequeue()
397 qdisc_warn_nonwc(__func__, cl->qdisc); in drr_dequeue()
402 if (len <= cl->deficit) { in drr_dequeue()
403 cl->deficit -= len; in drr_dequeue()
404 skb = qdisc_dequeue_peeked(cl->qdisc); in drr_dequeue()
407 if (cl->qdisc->q.qlen == 0) in drr_dequeue()
408 list_del(&cl->alist); in drr_dequeue()
410 bstats_update(&cl->bstats, skb); in drr_dequeue()
417 cl->deficit += cl->quantum; in drr_dequeue()
418 list_move_tail(&cl->alist, &q->active); in drr_dequeue()
443 struct drr_class *cl; in drr_reset_qdisc() local
447 hlist_for_each_entry(cl, &q->clhash.hash[i], common.hnode) { in drr_reset_qdisc()
448 if (cl->qdisc->q.qlen) in drr_reset_qdisc()
449 list_del(&cl->alist); in drr_reset_qdisc()
450 qdisc_reset(cl->qdisc); in drr_reset_qdisc()
460 struct drr_class *cl; in drr_destroy_qdisc() local
467 hlist_for_each_entry_safe(cl, next, &q->clhash.hash[i], in drr_destroy_qdisc()
469 drr_destroy_class(sch, cl); in drr_destroy_qdisc()