Home
last modified time | relevance | path

Searched refs:backlog (Results 1 – 25 of 65) sorted by relevance

123

/net/sched/
Dsch_pie.c34 struct pie_vars *vars, u32 backlog, u32 packet_size) in pie_drop_early() argument
54 if (backlog < 2 * mtu) in pie_drop_early()
96 if (!pie_drop_early(sch, &q->params, &q->vars, sch->qstats.backlog, in pie_qdisc_enqueue()
209 struct pie_vars *vars, u32 backlog) in pie_process_dequeue() argument
225 if (backlog == 0) in pie_process_dequeue()
238 if (backlog >= QUEUE_THRESHOLD && vars->dq_count == DQCOUNT_INVALID) { in pie_process_dequeue()
277 if (backlog < QUEUE_THRESHOLD) { in pie_process_dequeue()
301 u32 backlog) in pie_calculate_probability() argument
316 qdelay = (backlog << PIE_SCALE) / vars->avg_dq_rate; in pie_calculate_probability()
327 if (qdelay == 0 && backlog != 0) in pie_calculate_probability()
[all …]
Dsch_gred.c39 u32 backlog; /* bytes on the virtualQ */ member
117 return sch->qstats.backlog; in gred_backlog()
119 return q->backlog; in gred_backlog()
181 if (likely(sch->qstats.backlog + qdisc_pkt_len(skb) <= in gred_enqueue()
247 q->backlog += qdisc_pkt_len(skb); in gred_enqueue()
275 q->backlog -= qdisc_pkt_len(skb); in gred_dequeue()
278 if (!sch->qstats.backlog) in gred_dequeue()
281 if (!q->backlog) in gred_dequeue()
306 q->backlog = 0; in gred_reset()
344 opt.set.tab[i].backlog = &q->backlog; in gred_offload()
[all …]
Dsch_fq_pie.c47 u32 backlog; member
164 sel_flow->backlog, skb->len)) { in fq_pie_qdisc_enqueue()
183 sch->qstats.backlog += pkt_len; in fq_pie_qdisc_enqueue()
191 sel_flow->backlog = 0; in fq_pie_qdisc_enqueue()
194 sel_flow->backlog += pkt_len; in fq_pie_qdisc_enqueue()
262 sch->qstats.backlog -= pkt_len; in fq_pie_qdisc_dequeue()
278 flow->backlog -= pkt_len; in fq_pie_qdisc_dequeue()
280 pie_process_dequeue(skb, &q->p_params, &flow->vars, flow->backlog); in fq_pie_qdisc_dequeue()
395 q->flows[q->flows_cursor].backlog); in fq_pie_timer()
Dsch_sfq.c109 unsigned int backlog; member
308 slot->backlog -= len; in sfq_drop()
374 slot->backlog = 0; /* should already be 0 anyway... */ in sfq_enqueue()
381 slot->backlog); in sfq_enqueue()
432 sch->qstats.backlog -= delta; in sfq_enqueue()
433 slot->backlog -= delta; in sfq_enqueue()
443 slot->backlog += qdisc_pkt_len(skb); in sfq_enqueue()
504 slot->backlog -= qdisc_pkt_len(skb); in sfq_dequeue()
556 slot->backlog = 0; in sfq_rehash()
587 slot->backlog); in sfq_rehash()
[all …]
Dsch_skbprio.c85 q->qstats[prio].backlog += qdisc_pkt_len(skb); in skbprio_enqueue()
108 q->qstats[prio].backlog += qdisc_pkt_len(skb); in skbprio_enqueue()
117 q->qstats[lp].backlog -= qdisc_pkt_len(to_drop); in skbprio_enqueue()
152 q->qstats[q->highest_prio].backlog -= qdisc_pkt_len(skb); in skbprio_dequeue()
Dsch_fifo.c22 if (likely(sch->qstats.backlog + qdisc_pkt_len(skb) <= sch->limit)) in bfifo_enqueue()
45 prev_backlog = sch->qstats.backlog; in pfifo_tail_enqueue()
51 qdisc_tree_reduce_backlog(sch, 0, prev_backlog - sch->qstats.backlog); in pfifo_tail_enqueue()
Dsch_fq_codel.c179 sch->qstats.backlog -= len; in fq_codel_drop()
220 prev_backlog = sch->qstats.backlog; in fq_codel_enqueue()
233 prev_backlog -= sch->qstats.backlog; in fq_codel_enqueue()
268 sch->qstats.backlog -= qdisc_pkt_len(skb); in dequeue_func()
303 skb = codel_dequeue(sch, &sch->qstats.backlog, &q->cparams, in fq_codel_dequeue()
657 qs.backlog = q->backlogs[idx]; in fq_codel_dump_class_stats()
Dsch_hhf.c405 prev_backlog = sch->qstats.backlog; in hhf_enqueue()
414 qdisc_tree_reduce_backlog(sch, 1, prev_backlog - sch->qstats.backlog); in hhf_enqueue()
564 prev_backlog = sch->qstats.backlog; in hhf_change()
571 prev_backlog - sch->qstats.backlog); in hhf_change()
Dsch_mqprio.c483 sch->qstats.backlog += qdisc->qstats.backlog; in mqprio_dump()
613 qstats.backlog += qdisc->qstats.backlog; in mqprio_dump_class_stats()
Dsch_codel.c75 sch->qstats.backlog -= qdisc_pkt_len(skb); in dequeue_func()
94 skb = codel_dequeue(sch, &sch->qstats.backlog, &q->params, &q->vars, in codel_qdisc_dequeue()
Dsch_mq.c185 sch->qstats.backlog += qdisc->qstats.backlog; in mq_dump()
Dsch_sfb.c407 sch->qstats.backlog += len; in sfb_enqueue()
583 sch->qstats.backlog = q->qdisc->qstats.backlog; in sfb_dump()
Dsch_plug.c92 if (likely(sch->qstats.backlog + skb->len <= q->limit)) { in plug_enqueue()
/net/core/
Dgen_stats.c295 qstats->backlog += qcpu->backlog; in __gnet_stats_copy_queue_cpu()
311 qstats->backlog = q->backlog; in __gnet_stats_copy_queue()
347 d->tc_stats.backlog = qstats.backlog; in gnet_stats_copy_queue()
/net/rxrpc/
Dcall_accept.c165 struct rxrpc_backlog *b = rx->backlog; in rxrpc_service_prealloc()
171 rx->backlog = b; in rxrpc_service_prealloc()
182 struct rxrpc_backlog *b = rx->backlog; in rxrpc_discard_prealloc()
188 rx->backlog = NULL; in rxrpc_discard_prealloc()
267 struct rxrpc_backlog *b = rx->backlog; in rxrpc_alloc_incoming_call()
450 struct rxrpc_backlog *b = rx->backlog; in rxrpc_user_charge_accept()
482 struct rxrpc_backlog *b = rx->backlog; in rxrpc_kernel_charge_accept()
Daf_rxrpc.c209 static int rxrpc_listen(struct socket *sock, int backlog) in rxrpc_listen() argument
216 _enter("%p,%d", rx, backlog); in rxrpc_listen()
229 if (backlog == INT_MAX) in rxrpc_listen()
230 backlog = max; in rxrpc_listen()
231 else if (backlog < 0 || backlog > max) in rxrpc_listen()
234 sk->sk_max_ack_backlog = backlog; in rxrpc_listen()
242 if (backlog == 0) { in rxrpc_listen()
/net/sunrpc/
Dstats.c153 ktime_t backlog, execute, now; in rpc_count_iostats_metrics() local
169 backlog = 0; in rpc_count_iostats_metrics()
171 backlog = ktime_sub(req->rq_xtime, task->tk_start); in rpc_count_iostats_metrics()
172 op_metrics->om_queue = ktime_add(op_metrics->om_queue, backlog); in rpc_count_iostats_metrics()
184 trace_rpc_stats_latency(req->rq_task, backlog, req->rq_rtt, execute); in rpc_count_iostats_metrics()
Dxprt.c1564 xprt->stat.bklog_u += xprt->backlog.qlen; in xprt_request_transmit()
1629 rpc_sleep_on(&xprt->backlog, task, xprt_complete_request_init); in xprt_add_backlog()
1647 if (rpc_wake_up_first(&xprt->backlog, __xprt_set_rq, req) == NULL) { in xprt_wake_up_backlog()
2013 rpc_init_priority_wait_queue(&xprt->backlog, "xprt_backlog"); in xprt_init()
2077 rpc_destroy_wait_queue(&xprt->backlog); in xprt_destroy_cb()
/net/tipc/
Dlink.c193 } backlog[5]; member
943 avail[imp] = l->backlog[imp].limit - l->backlog[imp].len; in link_prepare_wakeup()
1001 l->backlog[imp].len = 0; in tipc_link_reset()
1002 l->backlog[imp].target_bskb = NULL; in tipc_link_reset()
1068 if (unlikely(l->backlog[imp].len >= l->backlog[imp].limit)) { in tipc_link_xmit()
1103 if (tipc_msg_try_bundle(l->backlog[imp].target_bskb, &skb, in tipc_link_xmit()
1107 l->backlog[imp].target_bskb = skb; in tipc_link_xmit()
1108 l->backlog[imp].len++; in tipc_link_xmit()
1119 l->backlog[imp].target_bskb = NULL; in tipc_link_xmit()
1120 l->backlog[imp].len += (1 + skb_queue_len(list)); in tipc_link_xmit()
[all …]
/net/dccp/
Dproto.c247 static inline int dccp_listen_start(struct sock *sk, int backlog) in dccp_listen_start() argument
255 return inet_csk_listen_start(sk, backlog); in dccp_listen_start()
929 int inet_dccp_listen(struct socket *sock, int backlog) in inet_dccp_listen() argument
945 WRITE_ONCE(sk->sk_max_ack_backlog, backlog); in inet_dccp_listen()
954 err = dccp_listen_start(sk, backlog); in inet_dccp_listen()
/net/atm/
Dsvc.c282 static int svc_listen(struct socket *sock, int backlog) in svc_listen() argument
315 sk->sk_max_ack_backlog = backlog > 0 ? backlog : ATM_BACKLOG_DEFAULT; in svc_listen()
/net/llc/
Daf_llc.c551 static int llc_ui_listen(struct socket *sock, int backlog) in llc_ui_listen() argument
566 if (!(unsigned int)backlog) /* BSDism */ in llc_ui_listen()
567 backlog = 1; in llc_ui_listen()
568 sk->sk_max_ack_backlog = backlog; in llc_ui_listen()
/net/
Dsocket.c1735 int __sys_listen(int fd, int backlog) in __sys_listen() argument
1744 if ((unsigned int)backlog > somaxconn) in __sys_listen()
1745 backlog = somaxconn; in __sys_listen()
1747 err = security_socket_listen(sock, backlog); in __sys_listen()
1749 err = sock->ops->listen(sock, backlog); in __sys_listen()
1756 SYSCALL_DEFINE2(listen, int, fd, int, backlog) in SYSCALL_DEFINE2() argument
1758 return __sys_listen(fd, backlog); in SYSCALL_DEFINE2()
3421 int kernel_listen(struct socket *sock, int backlog) in kernel_listen() argument
3423 return sock->ops->listen(sock, backlog); in kernel_listen()
/net/bluetooth/rfcomm/
Dsock.c429 static int rfcomm_sock_listen(struct socket *sock, int backlog) in rfcomm_sock_listen() argument
434 BT_DBG("sk %p backlog %d", sk, backlog); in rfcomm_sock_listen()
469 sk->sk_max_ack_backlog = backlog; in rfcomm_sock_listen()
/net/nfc/
Dllcp_sock.c199 static int llcp_sock_listen(struct socket *sock, int backlog) in llcp_sock_listen() argument
204 pr_debug("sk %p backlog %d\n", sk, backlog); in llcp_sock_listen()
214 sk->sk_max_ack_backlog = backlog; in llcp_sock_listen()

123