/net/sched/ |
D | sch_atm.c | 50 u32 classid; /* x:y type ID */ member 82 static inline struct atm_flow_data *lookup_flow(struct Qdisc *sch, u32 classid) in lookup_flow() argument 88 if (flow->classid == classid) in lookup_flow() 120 static unsigned long atm_tc_get(struct Qdisc *sch, u32 classid) in atm_tc_get() argument 125 pr_debug("atm_tc_get(sch %p,[qdisc %p],classid %x)\n", sch, p, classid); in atm_tc_get() 126 flow = lookup_flow(sch, classid); in atm_tc_get() 134 unsigned long parent, u32 classid) in atm_tc_bind_filter() argument 136 return atm_tc_get(sch, classid); in atm_tc_bind_filter() 204 static int atm_tc_change(struct Qdisc *sch, u32 classid, u32 parent, in atm_tc_change() argument 217 "flow %p,opt %p)\n", sch, p, classid, parent, flow, opt); in atm_tc_change() [all …]
|
D | sch_ingress.c | 36 static unsigned long ingress_get(struct Qdisc *sch, u32 classid) in ingress_get() argument 38 return TC_H_MIN(classid) + 1; in ingress_get() 42 unsigned long parent, u32 classid) in ingress_bind_filter() argument 44 return ingress_get(sch, classid); in ingress_bind_filter() 51 static int ingress_change(struct Qdisc *sch, u32 classid, u32 parent, in ingress_change() argument 92 skb->tc_index = TC_H_MIN(res.classid); in ingress_enqueue()
|
D | cls_cgroup.c | 24 u32 classid; member 48 cs->classid = cgrp_cls_state(cgrp->parent)->classid; in cgrp_create() 60 return cgrp_cls_state(cgrp)->classid; in read_classid() 68 cgrp_cls_state(cgrp)->classid = (u32) value; in write_classid() 125 if (cs->classid && tcf_em_tree_match(skb, &head->ematches, NULL)) { in cls_cgroup_classify() 126 res->classid = cs->classid; in cls_cgroup_classify()
|
D | sch_drr.c | 41 static struct drr_class *drr_find_class(struct Qdisc *sch, u32 classid) in drr_find_class() argument 46 clc = qdisc_class_find(&q->clhash, classid); in drr_find_class() 64 static int drr_change_class(struct Qdisc *sch, u32 classid, u32 parentid, in drr_change_class() argument 110 cl->common.classid = classid; in drr_change_class() 113 &pfifo_qdisc_ops, classid); in drr_change_class() 165 static unsigned long drr_get_class(struct Qdisc *sch, u32 classid) in drr_get_class() argument 167 struct drr_class *cl = drr_find_class(sch, classid); in drr_get_class() 194 u32 classid) in drr_bind_tcf() argument 196 struct drr_class *cl = drr_find_class(sch, classid); in drr_bind_tcf() 218 &pfifo_qdisc_ops, cl->common.classid); in drr_graft_class() [all …]
|
D | sch_htb.c | 216 if (res.classid == sch->handle) in htb_classify() 218 if ((cl = htb_find(res.classid, sch)) == NULL) in htb_classify() 250 if (cl->common.classid > c->common.classid) in htb_add_to_id_tree() 414 p->un.inner.last_ptr_id[prio] = cl->common.classid; in htb_deactivate_prios() 703 if (id > cl->common.classid) { in htb_id_find_next_upper() 705 } else if (id < cl->common.classid) { in htb_id_find_next_upper() 815 cl->common.classid); in htb_dequeue_tree() 1051 tcm->tcm_parent = cl->parent ? cl->parent->common.classid : TC_H_ROOT; in htb_dump_class() 1052 tcm->tcm_handle = cl->common.classid; in htb_dump_class() 1108 cl->common.classid)) in htb_graft() [all …]
|
D | sch_dsmark.c | 86 static unsigned long dsmark_get(struct Qdisc *sch, u32 classid) in dsmark_get() argument 89 sch, qdisc_priv(sch), classid); in dsmark_get() 91 return TC_H_MIN(classid) + 1; in dsmark_get() 95 unsigned long parent, u32 classid) in dsmark_bind_filter() argument 97 return dsmark_get(sch, classid); in dsmark_bind_filter() 112 static int dsmark_change(struct Qdisc *sch, u32 classid, u32 parent, in dsmark_change() argument 122 "arg 0x%lx\n", sch, p, classid, parent, *arg); in dsmark_change() 233 pr_debug("result %d class 0x%04x\n", result, res.classid); in dsmark_enqueue() 246 skb->tc_index = TC_H_MIN(res.classid); in dsmark_enqueue()
|
D | sch_cbq.c | 180 cbq_class_lookup(struct cbq_sched_data *q, u32 classid) in cbq_class_lookup() argument 184 clc = qdisc_class_find(&q->clhash, classid); in cbq_class_lookup() 246 if (TC_H_MAJ(res.classid)) in cbq_classify() 247 cl = cbq_class_lookup(q, res.classid); in cbq_classify() 248 else if ((cl = defmap[res.classid&TC_PRIO_MAX]) == NULL) in cbq_classify() 1058 …printk(KERN_WARNING "CBQ: class %08x has bad quantum==%ld, repaired.\n", cl->common.classid, cl->q… in cbq_normalize_quanta() 1109 splitid = split->common.classid; in cbq_change_defmap() 1112 if (split == NULL || split->common.classid != splitid) { in cbq_change_defmap() 1114 if (split->common.classid == splitid) in cbq_change_defmap() 1379 q->link.common.classid = sch->handle; in cbq_init() [all …]
|
D | cls_flow.c | 332 u32 classid; in flow_classify() local 351 classid = jhash2(keys, f->nkeys, f->hashrnd); in flow_classify() 353 classid = keys[0]; in flow_classify() 354 classid = (classid & f->mask) ^ f->xor; in flow_classify() 355 classid = (classid >> f->rshift) + f->addend; in flow_classify() 359 classid %= f->divisor; in flow_classify() 362 res->classid = TC_H_MAKE(f->baseclass, f->baseclass + classid); in flow_classify()
|
D | cls_rsvp.h | 197 tunnelid = f->res.classid; in rsvp_classify() 374 data->tgenerator = f->res.classid; in tunnel_recycle() 442 f->res.classid = nla_get_u32(tb[TCA_RSVP_CLASSID-1]); in rsvp_change() 473 f->res.classid = nla_get_u32(tb[TCA_RSVP_CLASSID-1]); in rsvp_change() 484 if (f->res.classid > 255) in rsvp_change() 488 if (f->res.classid == 0 && in rsvp_change() 489 (f->res.classid = gen_tunnel(data)) == 0) in rsvp_change() 615 if (f->res.classid) in rsvp_dump() 616 NLA_PUT_U32(skb, TCA_RSVP_CLASSID, f->res.classid); in rsvp_dump()
|
D | sch_hfsc.c | 925 hfsc_find_class(u32 classid, struct Qdisc *sch) in hfsc_find_class() argument 930 clc = qdisc_class_find(&q->clhash, classid); in hfsc_find_class() 974 hfsc_change_class(struct Qdisc *sch, u32 classid, u32 parentid, in hfsc_change_class() argument 1014 cl->cl_parent->cl_common.classid != parentid) in hfsc_change_class() 1058 if (classid == 0 || TC_H_MAJ(classid ^ sch->handle) != 0) in hfsc_change_class() 1060 if (hfsc_find_class(classid, sch)) in hfsc_change_class() 1087 cl->cl_common.classid = classid; in hfsc_change_class() 1092 &pfifo_qdisc_ops, classid); in hfsc_change_class() 1177 if ((cl = hfsc_find_class(res.classid, sch)) == NULL) in hfsc_classify() 1209 cl->cl_common.classid); in hfsc_graft_class() [all …]
|
D | cls_fw.c | 108 res->classid = id; in fw_classify() 210 f->res.classid = nla_get_u32(tb[TCA_FW_CLASSID]); in fw_change_attrs() 343 if (!f->res.classid && !tcf_exts_is_available(&f->exts)) in fw_dump() 350 if (f->res.classid) in fw_dump() 351 NLA_PUT_U32(skb, TCA_FW_CLASSID, f->res.classid); in fw_dump()
|
D | sch_prio.c | 58 band = res.classid; in prio_classify() 293 static unsigned long prio_get(struct Qdisc *sch, u32 classid) in prio_get() argument 296 unsigned long band = TC_H_MIN(classid); in prio_get() 303 static unsigned long prio_bind(struct Qdisc *sch, unsigned long parent, u32 classid) in prio_bind() argument 305 return prio_get(sch, classid); in prio_bind()
|
D | cls_tcindex.c | 60 return tcf_exts_is_predicative(&r->exts) || r->res.classid; in tcindex_filter_is_set() 95 res->classid = TC_H_MAKE(TC_H_MAJ(tp->q->handle), key); in tcindex_classify() 97 pr_debug("alg 0x%x\n", res->classid); in tcindex_classify() 101 pr_debug("map 0x%x\n", res->classid); in tcindex_classify() 296 cr.res.classid = nla_get_u32(tb[TCA_TCINDEX_CLASSID]); in tcindex_set_parms() 463 NLA_PUT_U32(skb, TCA_TCINDEX_CLASSID, r->res.classid); in tcindex_dump()
|
D | cls_basic.c | 154 f->res.classid = nla_get_u32(tb[TCA_BASIC_CLASSID]); in basic_set_parms() 262 if (f->res.classid) in basic_dump() 263 NLA_PUT_U32(skb, TCA_BASIC_CLASSID, f->res.classid); in basic_dump()
|
D | sch_multiq.c | 329 static unsigned long multiq_get(struct Qdisc *sch, u32 classid) in multiq_get() argument 332 unsigned long band = TC_H_MIN(classid); in multiq_get() 340 u32 classid) in multiq_bind() argument 342 return multiq_get(sch, classid); in multiq_bind()
|
D | cls_route.c | 190 res->classid = id; in route4_classify() 413 f->res.classid = nla_get_u32(tb[TCA_ROUTE4_CLASSID]); in route4_set_parms() 576 if (f->res.classid) in route4_dump() 577 NLA_PUT_U32(skb, TCA_ROUTE4_CLASSID, f->res.classid); in route4_dump()
|
D | sch_sfq.c | 186 if (TC_H_MIN(res.classid) <= SFQ_HASH_DIVISOR) in sfq_classify() 187 return TC_H_MIN(res.classid); in sfq_classify() 499 static int sfq_change_class(struct Qdisc *sch, u32 classid, u32 parentid, in sfq_change_class() argument 505 static unsigned long sfq_get(struct Qdisc *sch, u32 classid) in sfq_get() argument
|
D | sch_api.c | 239 static struct Qdisc *qdisc_leaf(struct Qdisc *p, u32 classid) in qdisc_leaf() argument 247 cl = cops->get(p, classid); in qdisc_leaf() 539 h = qdisc_class_hash(cl->classid, nmask); in qdisc_class_hash_grow() 578 h = qdisc_class_hash(cl->classid, clhash->hashmask); in qdisc_class_hash_insert() 682 struct sk_buff *skb, struct nlmsghdr *n, u32 classid, in qdisc_graft() argument 712 notify_and_destroy(skb, n, classid, old, new); in qdisc_graft() 723 unsigned long cl = cops->get(parent, classid); in qdisc_graft() 730 notify_and_destroy(skb, n, classid, old, new); in qdisc_graft()
|
D | sch_red.c | 301 static unsigned long red_get(struct Qdisc *sch, u32 classid) in red_get() argument 311 static int red_change_class(struct Qdisc *sch, u32 classid, u32 parentid, in red_change_class() argument
|
D | sch_tbf.c | 409 static unsigned long tbf_get(struct Qdisc *sch, u32 classid) in tbf_get() argument 418 static int tbf_change_class(struct Qdisc *sch, u32 classid, u32 parentid, in tbf_change_class() argument
|
D | cls_u32.c | 504 n->res.classid = nla_get_u32(tb[TCA_U32_CLASSID]); in u32_set_parms() 718 if (n->res.classid) in u32_dump() 719 NLA_PUT_U32(skb, TCA_U32_CLASSID, n->res.classid); in u32_dump()
|
D | cls_api.c | 260 tp->classid = parent; in tc_ctl_tfilter() 342 tcm->tcm_parent = tp->classid; in tcf_fill_node()
|