/net/sched/ |
D | sch_pie.c | 26 struct pie_vars vars; member 34 struct pie_vars *vars, u32 backlog, u32 packet_size) in pie_drop_early() argument 37 u64 local_prob = vars->prob; in pie_drop_early() 41 if (vars->burst_time > 0) in pie_drop_early() 47 if ((vars->qdelay < params->target / 2) && in pie_drop_early() 48 (vars->prob < MAX_PROB / 5)) in pie_drop_early() 63 local_prob = vars->prob; in pie_drop_early() 66 vars->accu_prob = 0; in pie_drop_early() 68 vars->accu_prob += local_prob; in pie_drop_early() 70 if (vars->accu_prob < (MAX_PROB / 100) * 85) in pie_drop_early() [all …]
|
D | sch_choke.c | 56 struct red_vars vars; member 221 q->vars.qavg = red_calc_qavg(p, &q->vars, sch->q.qlen); in choke_enqueue() 222 if (red_is_idling(&q->vars)) in choke_enqueue() 223 red_end_of_idle_period(&q->vars); in choke_enqueue() 226 if (q->vars.qavg <= p->qth_min) in choke_enqueue() 227 q->vars.qcount = -1; in choke_enqueue() 239 if (q->vars.qavg > p->qth_max) { in choke_enqueue() 240 q->vars.qcount = -1; in choke_enqueue() 250 } else if (++q->vars.qcount) { in choke_enqueue() 251 if (red_mark_probability(p, &q->vars, q->vars.qavg)) { in choke_enqueue() [all …]
|
D | sch_codel.c | 60 struct codel_vars vars; member 69 static struct sk_buff *dequeue_func(struct codel_vars *vars, void *ctx) in dequeue_func() argument 94 skb = codel_dequeue(sch, &sch->qstats.backlog, &q->params, &q->vars, in codel_qdisc_dequeue() 197 codel_vars_init(&q->vars); in codel_init() 250 .count = q->vars.count, in codel_dump_stats() 251 .lastcount = q->vars.lastcount, in codel_dump_stats() 253 .ldelay = codel_time_to_us(q->vars.ldelay), in codel_dump_stats() 254 .dropping = q->vars.dropping, in codel_dump_stats() 259 if (q->vars.dropping) { in codel_dump_stats() 260 codel_tdiff_t delta = q->vars.drop_next - codel_get_time(); in codel_dump_stats() [all …]
|
D | sch_cake.c | 372 static void cobalt_newton_step(struct cobalt_vars *vars) in cobalt_newton_step() argument 377 invsqrt = vars->rec_inv_sqrt; in cobalt_newton_step() 379 val = (3LL << 32) - ((u64)vars->count * invsqrt2); in cobalt_newton_step() 384 vars->rec_inv_sqrt = val; in cobalt_newton_step() 387 static void cobalt_invsqrt(struct cobalt_vars *vars) in cobalt_invsqrt() argument 389 if (vars->count < REC_INV_SQRT_CACHE) in cobalt_invsqrt() 390 vars->rec_inv_sqrt = cobalt_rec_inv_sqrt_cache[vars->count]; in cobalt_invsqrt() 392 cobalt_newton_step(vars); in cobalt_invsqrt() 423 static void cobalt_vars_init(struct cobalt_vars *vars) in cobalt_vars_init() argument 425 memset(vars, 0, sizeof(*vars)); in cobalt_vars_init() [all …]
|
D | sch_gred.c | 43 struct red_vars vars; member 130 q->vars.qavg = table->wred_set.qavg; in gred_load_wred_set() 131 q->vars.qidlestart = table->wred_set.qidlestart; in gred_load_wred_set() 137 table->wred_set.qavg = q->vars.qavg; in gred_store_wred_set() 138 table->wred_set.qidlestart = q->vars.qidlestart; in gred_store_wred_set() 199 !red_is_idling(&t->tab[i]->vars)) in gred_enqueue() 200 qavg += t->tab[i]->vars.qavg; in gred_enqueue() 211 q->vars.qavg = red_calc_qavg(&q->parms, in gred_enqueue() 212 &q->vars, in gred_enqueue() 215 if (red_is_idling(&q->vars)) in gred_enqueue() [all …]
|
D | sch_red.c | 46 struct red_vars vars; member 78 q->vars.qavg = red_calc_qavg(&q->parms, in red_enqueue() 79 &q->vars, in red_enqueue() 82 if (red_is_idling(&q->vars)) in red_enqueue() 83 red_end_of_idle_period(&q->vars); in red_enqueue() 85 switch (red_action(&q->parms, &q->vars, q->vars.qavg)) { in red_enqueue() 162 if (!red_is_idling(&q->vars)) in red_dequeue() 163 red_start_of_idle_period(&q->vars); in red_dequeue() 181 red_restart(&q->vars); in red_reset() 296 red_set_vars(&q->vars); in __red_change() [all …]
|
D | sch_fq_pie.c | 45 struct pie_vars vars; member 163 if (!pie_drop_early(sch, &q->p_params, &sel_flow->vars, in fq_pie_qdisc_enqueue() 167 sel_flow->vars.prob <= (MAX_PROB / 100) * q->ecn_prob && in fq_pie_qdisc_enqueue() 199 sel_flow->vars.accu_prob = 0; in fq_pie_qdisc_enqueue() 280 pie_process_dequeue(skb, &q->p_params, &flow->vars, flow->backlog); in fq_pie_qdisc_dequeue() 394 &q->flows[q->flows_cursor].vars, in fq_pie_timer() 451 pie_vars_init(&flow->vars); in fq_pie_init() 539 pie_vars_init(&flow->vars); in fq_pie_reset()
|
D | sch_sfq.c | 110 struct red_vars vars; member 375 red_set_vars(&slot->vars); in sfq_enqueue() 379 slot->vars.qavg = red_calc_qavg_no_idle_time(q->red_parms, in sfq_enqueue() 380 &slot->vars, in sfq_enqueue() 383 &slot->vars, in sfq_enqueue() 384 slot->vars.qavg)) { in sfq_enqueue() 557 red_set_vars(&slot->vars); in sfq_rehash() 585 slot->vars.qavg = red_calc_qavg(q->red_parms, in sfq_rehash() 586 &slot->vars, in sfq_rehash()
|
D | sch_fq_codel.c | 255 static struct sk_buff *dequeue_func(struct codel_vars *vars, void *ctx) in dequeue_func() argument 262 flow = container_of(vars, struct fq_codel_flow, cvars); in dequeue_func()
|