Lines Matching refs:cl
186 eltree_insert(struct hfsc_class *cl) in eltree_insert() argument
188 struct rb_node **p = &cl->sched->eligible.rb_node; in eltree_insert()
195 if (cl->cl_e >= cl1->cl_e) in eltree_insert()
200 rb_link_node(&cl->el_node, parent, p); in eltree_insert()
201 rb_insert_color(&cl->el_node, &cl->sched->eligible); in eltree_insert()
205 eltree_remove(struct hfsc_class *cl) in eltree_remove() argument
207 rb_erase(&cl->el_node, &cl->sched->eligible); in eltree_remove()
211 eltree_update(struct hfsc_class *cl) in eltree_update() argument
213 eltree_remove(cl); in eltree_update()
214 eltree_insert(cl); in eltree_update()
221 struct hfsc_class *p, *cl = NULL; in eltree_get_mindl() local
228 if (cl == NULL || p->cl_d < cl->cl_d) in eltree_get_mindl()
229 cl = p; in eltree_get_mindl()
231 return cl; in eltree_get_mindl()
251 vttree_insert(struct hfsc_class *cl) in vttree_insert() argument
253 struct rb_node **p = &cl->cl_parent->vt_tree.rb_node; in vttree_insert()
260 if (cl->cl_vt >= cl1->cl_vt) in vttree_insert()
265 rb_link_node(&cl->vt_node, parent, p); in vttree_insert()
266 rb_insert_color(&cl->vt_node, &cl->cl_parent->vt_tree); in vttree_insert()
270 vttree_remove(struct hfsc_class *cl) in vttree_remove() argument
272 rb_erase(&cl->vt_node, &cl->cl_parent->vt_tree); in vttree_remove()
276 vttree_update(struct hfsc_class *cl) in vttree_update() argument
278 vttree_remove(cl); in vttree_update()
279 vttree_insert(cl); in vttree_update()
283 vttree_firstfit(struct hfsc_class *cl, u64 cur_time) in vttree_firstfit() argument
288 for (n = rb_first(&cl->vt_tree); n != NULL; n = rb_next(n)) { in vttree_firstfit()
300 vttree_get_minvt(struct hfsc_class *cl, u64 cur_time) in vttree_get_minvt() argument
303 if (cl->cl_cfmin > cur_time) in vttree_get_minvt()
306 while (cl->level > 0) { in vttree_get_minvt()
307 cl = vttree_firstfit(cl, cur_time); in vttree_get_minvt()
308 if (cl == NULL) in vttree_get_minvt()
313 if (cl->cl_parent->cl_cvtmin < cl->cl_vt) in vttree_get_minvt()
314 cl->cl_parent->cl_cvtmin = cl->cl_vt; in vttree_get_minvt()
316 return cl; in vttree_get_minvt()
320 cftree_insert(struct hfsc_class *cl) in cftree_insert() argument
322 struct rb_node **p = &cl->cl_parent->cf_tree.rb_node; in cftree_insert()
329 if (cl->cl_f >= cl1->cl_f) in cftree_insert()
334 rb_link_node(&cl->cf_node, parent, p); in cftree_insert()
335 rb_insert_color(&cl->cf_node, &cl->cl_parent->cf_tree); in cftree_insert()
339 cftree_remove(struct hfsc_class *cl) in cftree_remove() argument
341 rb_erase(&cl->cf_node, &cl->cl_parent->cf_tree); in cftree_remove()
345 cftree_update(struct hfsc_class *cl) in cftree_update() argument
347 cftree_remove(cl); in cftree_update()
348 cftree_insert(cl); in cftree_update()
610 init_ed(struct hfsc_class *cl, unsigned int next_len) in init_ed() argument
615 rtsc_min(&cl->cl_deadline, &cl->cl_rsc, cur_time, cl->cl_cumul); in init_ed()
622 cl->cl_eligible = cl->cl_deadline; in init_ed()
623 if (cl->cl_rsc.sm1 <= cl->cl_rsc.sm2) { in init_ed()
624 cl->cl_eligible.dx = 0; in init_ed()
625 cl->cl_eligible.dy = 0; in init_ed()
629 cl->cl_e = rtsc_y2x(&cl->cl_eligible, cl->cl_cumul); in init_ed()
630 cl->cl_d = rtsc_y2x(&cl->cl_deadline, cl->cl_cumul + next_len); in init_ed()
632 eltree_insert(cl); in init_ed()
636 update_ed(struct hfsc_class *cl, unsigned int next_len) in update_ed() argument
638 cl->cl_e = rtsc_y2x(&cl->cl_eligible, cl->cl_cumul); in update_ed()
639 cl->cl_d = rtsc_y2x(&cl->cl_deadline, cl->cl_cumul + next_len); in update_ed()
641 eltree_update(cl); in update_ed()
645 update_d(struct hfsc_class *cl, unsigned int next_len) in update_d() argument
647 cl->cl_d = rtsc_y2x(&cl->cl_deadline, cl->cl_cumul + next_len); in update_d()
651 update_cfmin(struct hfsc_class *cl) in update_cfmin() argument
653 struct rb_node *n = rb_first(&cl->cf_tree); in update_cfmin()
657 cl->cl_cfmin = 0; in update_cfmin()
661 cl->cl_cfmin = p->cl_f; in update_cfmin()
665 init_vf(struct hfsc_class *cl, unsigned int len) in init_vf() argument
674 for (; cl->cl_parent != NULL; cl = cl->cl_parent) { in init_vf()
675 if (go_active && cl->cl_nactive++ == 0) in init_vf()
681 n = rb_last(&cl->cl_parent->vt_tree); in init_vf()
690 if (cl->cl_parent->cl_cvtmin != 0) in init_vf()
691 vt = (cl->cl_parent->cl_cvtmin + vt)/2; in init_vf()
693 if (cl->cl_parent->cl_vtperiod != in init_vf()
694 cl->cl_parentperiod || vt > cl->cl_vt) in init_vf()
695 cl->cl_vt = vt; in init_vf()
703 cl->cl_vt = cl->cl_parent->cl_cvtoff; in init_vf()
704 cl->cl_parent->cl_cvtmin = 0; in init_vf()
708 rtsc_min(&cl->cl_virtual, &cl->cl_fsc, cl->cl_vt, cl->cl_total); in init_vf()
709 cl->cl_vtadj = 0; in init_vf()
711 cl->cl_vtperiod++; /* increment vt period */ in init_vf()
712 cl->cl_parentperiod = cl->cl_parent->cl_vtperiod; in init_vf()
713 if (cl->cl_parent->cl_nactive == 0) in init_vf()
714 cl->cl_parentperiod++; in init_vf()
715 cl->cl_f = 0; in init_vf()
717 vttree_insert(cl); in init_vf()
718 cftree_insert(cl); in init_vf()
720 if (cl->cl_flags & HFSC_USC) { in init_vf()
726 rtsc_min(&cl->cl_ulimit, &cl->cl_usc, cur_time, in init_vf()
727 cl->cl_total); in init_vf()
729 cl->cl_myf = rtsc_y2x(&cl->cl_ulimit, in init_vf()
730 cl->cl_total); in init_vf()
734 f = max(cl->cl_myf, cl->cl_cfmin); in init_vf()
735 if (f != cl->cl_f) { in init_vf()
736 cl->cl_f = f; in init_vf()
737 cftree_update(cl); in init_vf()
739 update_cfmin(cl->cl_parent); in init_vf()
744 update_vf(struct hfsc_class *cl, unsigned int len, u64 cur_time) in update_vf() argument
749 if (cl->qdisc->q.qlen == 0 && cl->cl_flags & HFSC_FSC) in update_vf()
752 for (; cl->cl_parent != NULL; cl = cl->cl_parent) { in update_vf()
753 cl->cl_total += len; in update_vf()
755 if (!(cl->cl_flags & HFSC_FSC) || cl->cl_nactive == 0) in update_vf()
758 if (go_passive && --cl->cl_nactive == 0) in update_vf()
764 cl->cl_vt = rtsc_y2x(&cl->cl_virtual, cl->cl_total) + cl->cl_vtadj; in update_vf()
771 if (cl->cl_vt < cl->cl_parent->cl_cvtmin) { in update_vf()
772 cl->cl_vtadj += cl->cl_parent->cl_cvtmin - cl->cl_vt; in update_vf()
773 cl->cl_vt = cl->cl_parent->cl_cvtmin; in update_vf()
780 if (cl->cl_vt > cl->cl_parent->cl_cvtoff) in update_vf()
781 cl->cl_parent->cl_cvtoff = cl->cl_vt; in update_vf()
784 vttree_remove(cl); in update_vf()
786 cftree_remove(cl); in update_vf()
787 update_cfmin(cl->cl_parent); in update_vf()
793 vttree_update(cl); in update_vf()
796 if (cl->cl_flags & HFSC_USC) { in update_vf()
797 cl->cl_myf = rtsc_y2x(&cl->cl_ulimit, cl->cl_total); in update_vf()
799 cl->cl_myf = cl->cl_myfadj + rtsc_y2x(&cl->cl_ulimit, in update_vf()
800 cl->cl_total); in update_vf()
814 if (cl->cl_myf < myf_bound) { in update_vf()
815 delta = cur_time - cl->cl_myf; in update_vf()
816 cl->cl_myfadj += delta; in update_vf()
817 cl->cl_myf += delta; in update_vf()
822 f = max(cl->cl_myf, cl->cl_cfmin); in update_vf()
823 if (f != cl->cl_f) { in update_vf()
824 cl->cl_f = f; in update_vf()
825 cftree_update(cl); in update_vf()
826 update_cfmin(cl->cl_parent); in update_vf()
848 hfsc_adjust_levels(struct hfsc_class *cl) in hfsc_adjust_levels() argument
855 list_for_each_entry(p, &cl->children, siblings) { in hfsc_adjust_levels()
859 cl->level = level; in hfsc_adjust_levels()
860 } while ((cl = cl->cl_parent) != NULL); in hfsc_adjust_levels()
876 hfsc_change_rsc(struct hfsc_class *cl, struct tc_service_curve *rsc, in hfsc_change_rsc() argument
879 sc2isc(rsc, &cl->cl_rsc); in hfsc_change_rsc()
880 rtsc_init(&cl->cl_deadline, &cl->cl_rsc, cur_time, cl->cl_cumul); in hfsc_change_rsc()
881 cl->cl_eligible = cl->cl_deadline; in hfsc_change_rsc()
882 if (cl->cl_rsc.sm1 <= cl->cl_rsc.sm2) { in hfsc_change_rsc()
883 cl->cl_eligible.dx = 0; in hfsc_change_rsc()
884 cl->cl_eligible.dy = 0; in hfsc_change_rsc()
886 cl->cl_flags |= HFSC_RSC; in hfsc_change_rsc()
890 hfsc_change_fsc(struct hfsc_class *cl, struct tc_service_curve *fsc) in hfsc_change_fsc() argument
892 sc2isc(fsc, &cl->cl_fsc); in hfsc_change_fsc()
893 rtsc_init(&cl->cl_virtual, &cl->cl_fsc, cl->cl_vt, cl->cl_total); in hfsc_change_fsc()
894 cl->cl_flags |= HFSC_FSC; in hfsc_change_fsc()
898 hfsc_change_usc(struct hfsc_class *cl, struct tc_service_curve *usc, in hfsc_change_usc() argument
901 sc2isc(usc, &cl->cl_usc); in hfsc_change_usc()
902 rtsc_init(&cl->cl_ulimit, &cl->cl_usc, cur_time, cl->cl_total); in hfsc_change_usc()
903 cl->cl_flags |= HFSC_USC; in hfsc_change_usc()
907 hfsc_upgrade_rt(struct hfsc_class *cl) in hfsc_upgrade_rt() argument
909 cl->cl_fsc = cl->cl_rsc; in hfsc_upgrade_rt()
910 rtsc_init(&cl->cl_virtual, &cl->cl_fsc, cl->cl_vt, cl->cl_total); in hfsc_upgrade_rt()
911 cl->cl_flags |= HFSC_FSC; in hfsc_upgrade_rt()
926 struct hfsc_class *cl = (struct hfsc_class *)*arg; in hfsc_change_class() local
960 if (cl != NULL) { in hfsc_change_class()
964 if (cl->cl_parent && in hfsc_change_class()
965 cl->cl_parent->cl_common.classid != parentid) in hfsc_change_class()
967 if (cl->cl_parent == NULL && parentid != TC_H_ROOT) in hfsc_change_class()
973 err = gen_replace_estimator(&cl->bstats, NULL, in hfsc_change_class()
974 &cl->rate_est, in hfsc_change_class()
983 old_flags = cl->cl_flags; in hfsc_change_class()
986 hfsc_change_rsc(cl, rsc, cur_time); in hfsc_change_class()
988 hfsc_change_fsc(cl, fsc); in hfsc_change_class()
990 hfsc_change_usc(cl, usc, cur_time); in hfsc_change_class()
992 if (cl->qdisc->q.qlen != 0) { in hfsc_change_class()
993 int len = qdisc_peek_len(cl->qdisc); in hfsc_change_class()
995 if (cl->cl_flags & HFSC_RSC) { in hfsc_change_class()
997 update_ed(cl, len); in hfsc_change_class()
999 init_ed(cl, len); in hfsc_change_class()
1002 if (cl->cl_flags & HFSC_FSC) { in hfsc_change_class()
1004 update_vf(cl, 0, cur_time); in hfsc_change_class()
1006 init_vf(cl, len); in hfsc_change_class()
1032 cl = kzalloc(sizeof(struct hfsc_class), GFP_KERNEL); in hfsc_change_class()
1033 if (cl == NULL) in hfsc_change_class()
1036 err = tcf_block_get(&cl->block, &cl->filter_list, sch, extack); in hfsc_change_class()
1038 kfree(cl); in hfsc_change_class()
1043 err = gen_new_estimator(&cl->bstats, NULL, &cl->rate_est, in hfsc_change_class()
1048 tcf_block_put(cl->block); in hfsc_change_class()
1049 kfree(cl); in hfsc_change_class()
1055 hfsc_change_rsc(cl, rsc, 0); in hfsc_change_class()
1057 hfsc_change_fsc(cl, fsc); in hfsc_change_class()
1059 hfsc_change_usc(cl, usc, 0); in hfsc_change_class()
1061 cl->cl_common.classid = classid; in hfsc_change_class()
1062 cl->sched = q; in hfsc_change_class()
1063 cl->cl_parent = parent; in hfsc_change_class()
1064 cl->qdisc = qdisc_create_dflt(sch->dev_queue, &pfifo_qdisc_ops, in hfsc_change_class()
1066 if (cl->qdisc == NULL) in hfsc_change_class()
1067 cl->qdisc = &noop_qdisc; in hfsc_change_class()
1069 qdisc_hash_add(cl->qdisc, true); in hfsc_change_class()
1070 INIT_LIST_HEAD(&cl->children); in hfsc_change_class()
1071 cl->vt_tree = RB_ROOT; in hfsc_change_class()
1072 cl->cf_tree = RB_ROOT; in hfsc_change_class()
1081 qdisc_class_hash_insert(&q->clhash, &cl->cl_common); in hfsc_change_class()
1082 list_add_tail(&cl->siblings, &parent->children); in hfsc_change_class()
1090 *arg = (unsigned long)cl; in hfsc_change_class()
1095 hfsc_destroy_class(struct Qdisc *sch, struct hfsc_class *cl) in hfsc_destroy_class() argument
1099 tcf_block_put(cl->block); in hfsc_destroy_class()
1100 qdisc_put(cl->qdisc); in hfsc_destroy_class()
1101 gen_kill_estimator(&cl->rate_est); in hfsc_destroy_class()
1102 if (cl != &q->root) in hfsc_destroy_class()
1103 kfree(cl); in hfsc_destroy_class()
1111 struct hfsc_class *cl = (struct hfsc_class *)arg; in hfsc_delete_class() local
1113 if (cl->level > 0 || cl->filter_cnt > 0 || cl == &q->root) in hfsc_delete_class()
1118 list_del(&cl->siblings); in hfsc_delete_class()
1119 hfsc_adjust_levels(cl->cl_parent); in hfsc_delete_class()
1121 qdisc_purge_queue(cl->qdisc); in hfsc_delete_class()
1122 qdisc_class_hash_remove(&q->clhash, &cl->cl_common); in hfsc_delete_class()
1126 hfsc_destroy_class(sch, cl); in hfsc_delete_class()
1134 struct hfsc_class *head, *cl; in hfsc_classify() local
1140 (cl = hfsc_find_class(skb->priority, sch)) != NULL) in hfsc_classify()
1141 if (cl->level == 0) in hfsc_classify()
1142 return cl; in hfsc_classify()
1159 cl = (struct hfsc_class *)res.class; in hfsc_classify()
1160 if (!cl) { in hfsc_classify()
1161 cl = hfsc_find_class(res.classid, sch); in hfsc_classify()
1162 if (!cl) in hfsc_classify()
1164 if (cl->level >= head->level) in hfsc_classify()
1168 if (cl->level == 0) in hfsc_classify()
1169 return cl; /* hit leaf class */ in hfsc_classify()
1172 tcf = rcu_dereference_bh(cl->filter_list); in hfsc_classify()
1173 head = cl; in hfsc_classify()
1177 cl = hfsc_find_class(TC_H_MAKE(TC_H_MAJ(sch->handle), q->defcls), sch); in hfsc_classify()
1178 if (cl == NULL || cl->level > 0) in hfsc_classify()
1181 return cl; in hfsc_classify()
1188 struct hfsc_class *cl = (struct hfsc_class *)arg; in hfsc_graft_class() local
1190 if (cl->level > 0) in hfsc_graft_class()
1194 cl->cl_common.classid, NULL); in hfsc_graft_class()
1199 *old = qdisc_replace(sch, new, &cl->qdisc); in hfsc_graft_class()
1206 struct hfsc_class *cl = (struct hfsc_class *)arg; in hfsc_class_leaf() local
1208 if (cl->level == 0) in hfsc_class_leaf()
1209 return cl->qdisc; in hfsc_class_leaf()
1217 struct hfsc_class *cl = (struct hfsc_class *)arg; in hfsc_qlen_notify() local
1222 update_vf(cl, 0, 0); in hfsc_qlen_notify()
1223 if (cl->cl_flags & HFSC_RSC) in hfsc_qlen_notify()
1224 eltree_remove(cl); in hfsc_qlen_notify()
1237 struct hfsc_class *cl = hfsc_find_class(classid, sch); in hfsc_bind_tcf() local
1239 if (cl != NULL) { in hfsc_bind_tcf()
1240 if (p != NULL && p->level <= cl->level) in hfsc_bind_tcf()
1242 cl->filter_cnt++; in hfsc_bind_tcf()
1245 return (unsigned long)cl; in hfsc_bind_tcf()
1251 struct hfsc_class *cl = (struct hfsc_class *)arg; in hfsc_unbind_tcf() local
1253 cl->filter_cnt--; in hfsc_unbind_tcf()
1260 struct hfsc_class *cl = (struct hfsc_class *)arg; in hfsc_tcf_block() local
1262 if (cl == NULL) in hfsc_tcf_block()
1263 cl = &q->root; in hfsc_tcf_block()
1265 return cl->block; in hfsc_tcf_block()
1286 hfsc_dump_curves(struct sk_buff *skb, struct hfsc_class *cl) in hfsc_dump_curves() argument
1288 if ((cl->cl_flags & HFSC_RSC) && in hfsc_dump_curves()
1289 (hfsc_dump_sc(skb, TCA_HFSC_RSC, &cl->cl_rsc) < 0)) in hfsc_dump_curves()
1292 if ((cl->cl_flags & HFSC_FSC) && in hfsc_dump_curves()
1293 (hfsc_dump_sc(skb, TCA_HFSC_FSC, &cl->cl_fsc) < 0)) in hfsc_dump_curves()
1296 if ((cl->cl_flags & HFSC_USC) && in hfsc_dump_curves()
1297 (hfsc_dump_sc(skb, TCA_HFSC_USC, &cl->cl_usc) < 0)) in hfsc_dump_curves()
1310 struct hfsc_class *cl = (struct hfsc_class *)arg; in hfsc_dump_class() local
1313 tcm->tcm_parent = cl->cl_parent ? cl->cl_parent->cl_common.classid : in hfsc_dump_class()
1315 tcm->tcm_handle = cl->cl_common.classid; in hfsc_dump_class()
1316 if (cl->level == 0) in hfsc_dump_class()
1317 tcm->tcm_info = cl->qdisc->handle; in hfsc_dump_class()
1322 if (hfsc_dump_curves(skb, cl) < 0) in hfsc_dump_class()
1335 struct hfsc_class *cl = (struct hfsc_class *)arg; in hfsc_dump_class_stats() local
1339 qdisc_qstats_qlen_backlog(cl->qdisc, &qlen, &cl->qstats.backlog); in hfsc_dump_class_stats()
1340 xstats.level = cl->level; in hfsc_dump_class_stats()
1341 xstats.period = cl->cl_vtperiod; in hfsc_dump_class_stats()
1342 xstats.work = cl->cl_total; in hfsc_dump_class_stats()
1343 xstats.rtwork = cl->cl_cumul; in hfsc_dump_class_stats()
1345 if (gnet_stats_copy_basic(qdisc_root_sleeping_running(sch), d, NULL, &cl->bstats) < 0 || in hfsc_dump_class_stats()
1346 gnet_stats_copy_rate_est(d, &cl->rate_est) < 0 || in hfsc_dump_class_stats()
1347 gnet_stats_copy_queue(d, NULL, &cl->qstats, qlen) < 0) in hfsc_dump_class_stats()
1359 struct hfsc_class *cl; in hfsc_walk() local
1366 hlist_for_each_entry(cl, &q->clhash.hash[i], in hfsc_walk()
1372 if (arg->fn(sch, (unsigned long)cl, arg) < 0) { in hfsc_walk()
1385 struct hfsc_class *cl; in hfsc_schedule_watchdog() local
1388 cl = eltree_get_minel(q); in hfsc_schedule_watchdog()
1389 if (cl) in hfsc_schedule_watchdog()
1390 next_time = cl->cl_e; in hfsc_schedule_watchdog()
1460 hfsc_reset_class(struct hfsc_class *cl) in hfsc_reset_class() argument
1462 cl->cl_total = 0; in hfsc_reset_class()
1463 cl->cl_cumul = 0; in hfsc_reset_class()
1464 cl->cl_d = 0; in hfsc_reset_class()
1465 cl->cl_e = 0; in hfsc_reset_class()
1466 cl->cl_vt = 0; in hfsc_reset_class()
1467 cl->cl_vtadj = 0; in hfsc_reset_class()
1468 cl->cl_cvtmin = 0; in hfsc_reset_class()
1469 cl->cl_cvtoff = 0; in hfsc_reset_class()
1470 cl->cl_vtperiod = 0; in hfsc_reset_class()
1471 cl->cl_parentperiod = 0; in hfsc_reset_class()
1472 cl->cl_f = 0; in hfsc_reset_class()
1473 cl->cl_myf = 0; in hfsc_reset_class()
1474 cl->cl_cfmin = 0; in hfsc_reset_class()
1475 cl->cl_nactive = 0; in hfsc_reset_class()
1477 cl->vt_tree = RB_ROOT; in hfsc_reset_class()
1478 cl->cf_tree = RB_ROOT; in hfsc_reset_class()
1479 qdisc_reset(cl->qdisc); in hfsc_reset_class()
1481 if (cl->cl_flags & HFSC_RSC) in hfsc_reset_class()
1482 rtsc_init(&cl->cl_deadline, &cl->cl_rsc, 0, 0); in hfsc_reset_class()
1483 if (cl->cl_flags & HFSC_FSC) in hfsc_reset_class()
1484 rtsc_init(&cl->cl_virtual, &cl->cl_fsc, 0, 0); in hfsc_reset_class()
1485 if (cl->cl_flags & HFSC_USC) in hfsc_reset_class()
1486 rtsc_init(&cl->cl_ulimit, &cl->cl_usc, 0, 0); in hfsc_reset_class()
1493 struct hfsc_class *cl; in hfsc_reset_qdisc() local
1497 hlist_for_each_entry(cl, &q->clhash.hash[i], cl_common.hnode) in hfsc_reset_qdisc()
1498 hfsc_reset_class(cl); in hfsc_reset_qdisc()
1509 struct hfsc_class *cl; in hfsc_destroy_qdisc() local
1513 hlist_for_each_entry(cl, &q->clhash.hash[i], cl_common.hnode) { in hfsc_destroy_qdisc()
1514 tcf_block_put(cl->block); in hfsc_destroy_qdisc()
1515 cl->block = NULL; in hfsc_destroy_qdisc()
1519 hlist_for_each_entry_safe(cl, next, &q->clhash.hash[i], in hfsc_destroy_qdisc()
1521 hfsc_destroy_class(sch, cl); in hfsc_destroy_qdisc()
1548 struct hfsc_class *cl; in hfsc_enqueue() local
1552 cl = hfsc_classify(skb, sch, &err); in hfsc_enqueue()
1553 if (cl == NULL) { in hfsc_enqueue()
1560 first = !cl->qdisc->q.qlen; in hfsc_enqueue()
1561 err = qdisc_enqueue(skb, cl->qdisc, to_free); in hfsc_enqueue()
1564 cl->qstats.drops++; in hfsc_enqueue()
1571 if (cl->cl_flags & HFSC_RSC) in hfsc_enqueue()
1572 init_ed(cl, len); in hfsc_enqueue()
1573 if (cl->cl_flags & HFSC_FSC) in hfsc_enqueue()
1574 init_vf(cl, len); in hfsc_enqueue()
1580 if (cl->cl_flags & HFSC_RSC) in hfsc_enqueue()
1581 cl->qdisc->ops->peek(cl->qdisc); in hfsc_enqueue()
1595 struct hfsc_class *cl; in hfsc_dequeue() local
1611 cl = eltree_get_mindl(q, cur_time); in hfsc_dequeue()
1612 if (cl) { in hfsc_dequeue()
1619 cl = vttree_get_minvt(&q->root, cur_time); in hfsc_dequeue()
1620 if (cl == NULL) { in hfsc_dequeue()
1627 skb = qdisc_dequeue_peeked(cl->qdisc); in hfsc_dequeue()
1629 qdisc_warn_nonwc("HFSC", cl->qdisc); in hfsc_dequeue()
1633 bstats_update(&cl->bstats, skb); in hfsc_dequeue()
1634 update_vf(cl, qdisc_pkt_len(skb), cur_time); in hfsc_dequeue()
1636 cl->cl_cumul += qdisc_pkt_len(skb); in hfsc_dequeue()
1638 if (cl->cl_flags & HFSC_RSC) { in hfsc_dequeue()
1639 if (cl->qdisc->q.qlen != 0) { in hfsc_dequeue()
1641 next_len = qdisc_peek_len(cl->qdisc); in hfsc_dequeue()
1643 update_ed(cl, next_len); in hfsc_dequeue()
1645 update_d(cl, next_len); in hfsc_dequeue()
1648 eltree_remove(cl); in hfsc_dequeue()