Home
last modified time | relevance | path

Searched refs:sk_receive_queue (Results 1 – 25 of 88) sorted by relevance

1234

/kernel/linux/linux-5.10/net/unix/
Dgarbage.c97 spin_lock(&x->sk_receive_queue.lock); in scan_inflight()
98 skb_queue_walk_safe(&x->sk_receive_queue, skb, next) { in scan_inflight()
125 __skb_unlink(skb, &x->sk_receive_queue); in scan_inflight()
130 spin_unlock(&x->sk_receive_queue.lock); in scan_inflight()
147 spin_lock(&x->sk_receive_queue.lock); in scan_children()
148 skb_queue_walk_safe(&x->sk_receive_queue, skb, next) { in scan_children()
157 spin_unlock(&x->sk_receive_queue.lock); in scan_children()
310 skb_queue_tail(&skb->sk->sk_receive_queue, skb); in unix_gc()
Ddiag.c68 spin_lock(&sk->sk_receive_queue.lock); in sk_diag_dump_icons()
71 sk->sk_receive_queue.qlen * sizeof(u32)); in sk_diag_dump_icons()
77 skb_queue_walk(&sk->sk_receive_queue, skb) { in sk_diag_dump_icons()
91 spin_unlock(&sk->sk_receive_queue.lock); in sk_diag_dump_icons()
97 spin_unlock(&sk->sk_receive_queue.lock); in sk_diag_dump_icons()
106 rql.udiag_rqueue = sk->sk_receive_queue.qlen; in sk_diag_show_rqlen()
Daf_unix.c194 return skb_queue_len(&sk->sk_receive_queue) > sk->sk_max_ack_backlog; in unix_recvq_full()
199 return skb_queue_len_lockless(&sk->sk_receive_queue) > in unix_recvq_full_lockless()
477 if (!skb_queue_empty(&sk->sk_receive_queue)) { in unix_dgram_disconnected()
478 skb_queue_purge(&sk->sk_receive_queue); in unix_dgram_disconnected()
496 skb_queue_purge(&sk->sk_receive_queue); in unix_sock_destructor()
551 if (!skb_queue_empty(&sk->sk_receive_queue) || embrion) in unix_release_sock()
564 while ((skb = skb_dequeue(&sk->sk_receive_queue)) != NULL) { in unix_release_sock()
1418 spin_lock(&other->sk_receive_queue.lock); in unix_stream_connect()
1419 __skb_queue_tail(&other->sk_receive_queue, skb); in unix_stream_connect()
1420 spin_unlock(&other->sk_receive_queue.lock); in unix_stream_connect()
[all …]
/kernel/linux/linux-5.10/net/atm/
Dsignaling.c33 skb_queue_tail(&sk_atm(sigd)->sk_receive_queue, skb); in sigd_put_skb()
113 skb_queue_tail(&sk->sk_receive_queue, skb); in sigd_send()
204 if (skb_peek(&sk_atm(vcc)->sk_receive_queue)) in sigd_close()
206 skb_queue_purge(&sk_atm(vcc)->sk_receive_queue); in sigd_close()
Dioctl.c82 spin_lock_irq(&sk->sk_receive_queue.lock); in do_vcc_ioctl()
83 skb = skb_peek(&sk->sk_receive_queue); in do_vcc_ioctl()
85 spin_unlock_irq(&sk->sk_receive_queue.lock); in do_vcc_ioctl()
Draw.c28 skb_queue_tail(&sk->sk_receive_queue, skb); in atm_push_raw()
/kernel/linux/linux-5.10/net/bluetooth/
Daf_bluetooth.c316 if (!skb_queue_empty(&sk->sk_receive_queue)) in bt_sock_data_wait()
359 skb = skb_dequeue(&sk->sk_receive_queue); in bt_sock_stream_recvmsg()
385 skb_queue_head(&sk->sk_receive_queue, skb); in bt_sock_stream_recvmsg()
424 skb_queue_head(&sk->sk_receive_queue, skb); in bt_sock_stream_recvmsg()
431 skb_queue_head(&sk->sk_receive_queue, skb); in bt_sock_stream_recvmsg()
479 if (!skb_queue_empty_lockless(&sk->sk_receive_queue)) in bt_sock_poll()
524 skb = skb_peek(&sk->sk_receive_queue); in bt_sock_ioctl()
/kernel/linux/linux-5.10/net/sctp/
Dulpqueue.c138 &sk->sk_receive_queue); in sctp_clear_pd()
155 __skb_queue_tail(&sk->sk_receive_queue, in sctp_clear_pd()
216 queue = &sk->sk_receive_queue; in sctp_ulpq_tail_event()
230 queue = &sk->sk_receive_queue; in sctp_ulpq_tail_event()
239 queue = &sk->sk_receive_queue; in sctp_ulpq_tail_event()
254 if (queue == &sk->sk_receive_queue && !sp->data_ready_signalled) { in sctp_ulpq_tail_event()
1085 if (skb_queue_empty(&asoc->base.sk->sk_receive_queue)) { in sctp_ulpq_renege()
1129 __skb_queue_tail(&sk->sk_receive_queue, sctp_event2skb(ev)); in sctp_ulpq_abort_pd()
/kernel/linux/linux-5.10/net/caif/
Dcaif_socket.c128 struct sk_buff_head *list = &sk->sk_receive_queue; in caif_queue_rcv_skb()
317 if (!skb_queue_empty(&sk->sk_receive_queue) || in caif_stream_data_wait()
381 skb = skb_dequeue(&sk->sk_receive_queue); in caif_stream_recvmsg()
426 skb_queue_head(&sk->sk_receive_queue, skb); in caif_stream_recvmsg()
440 skb_queue_head(&sk->sk_receive_queue, skb); in caif_stream_recvmsg()
450 skb_queue_head(&sk->sk_receive_queue, skb); in caif_stream_recvmsg()
914 spin_lock_bh(&sk->sk_receive_queue.lock); in caif_release()
916 spin_unlock_bh(&sk->sk_receive_queue.lock); in caif_release()
957 if (!skb_queue_empty_lockless(&sk->sk_receive_queue) || in caif_poll()
/kernel/linux/linux-5.10/net/phonet/
Ddatagram.c39 skb = skb_peek(&sk->sk_receive_queue); in pn_ioctl()
64 skb_queue_purge(&sk->sk_receive_queue); in pn_destruct()
Dpep.c405 queue = &sk->sk_receive_queue; in pipe_do_rcv()
464 skb_queue_purge(&sk->sk_receive_queue); in pipe_destruct()
577 skb_queue_tail(&sk->sk_receive_queue, skb); in pipe_handler_do_rcv()
684 skb_queue_head(&sk->sk_receive_queue, skb); in pep_do_rcv()
936 else if (!skb_queue_empty(&sk->sk_receive_queue)) in pep_ioctl()
937 answ = skb_peek(&sk->sk_receive_queue)->len; in pep_ioctl()
1233 struct sk_buff *skb = skb_dequeue(&sk->sk_receive_queue); in pep_read()
/kernel/linux/linux-5.10/net/kcm/
Dkcmproc.c119 kcm->sk.sk_receive_queue.qlen, in kcm_format_sock()
149 psock->sk->sk_receive_queue.qlen, in kcm_format_psock()
167 if (psock->sk->sk_receive_queue.qlen) { in kcm_format_psock()
/kernel/linux/linux-5.10/net/rxrpc/
Daf_rxrpc.c813 spin_lock_bh(&sk->sk_receive_queue.lock); in rxrpc_shutdown()
820 spin_unlock_bh(&sk->sk_receive_queue.lock); in rxrpc_shutdown()
835 rxrpc_purge_queue(&sk->sk_receive_queue); in rxrpc_sock_destructor()
873 spin_lock_bh(&sk->sk_receive_queue.lock); in rxrpc_release_sock()
875 spin_unlock_bh(&sk->sk_receive_queue.lock); in rxrpc_release_sock()
887 rxrpc_purge_queue(&sk->sk_receive_queue); in rxrpc_release_sock()
/kernel/linux/linux-5.10/net/tipc/
Dsocket.c270 kfree_skb(__skb_dequeue(&sk->sk_receive_queue)); in tsk_advance_rx_queue()
299 while ((skb = __skb_dequeue(&sk->sk_receive_queue))) in tsk_rej_rx_queue()
556 skb = skb_peek(&sk->sk_receive_queue); in __tipc_shutdown()
558 __skb_unlink(skb, &sk->sk_receive_queue); in __tipc_shutdown()
574 skb = __skb_dequeue(&sk->sk_receive_queue); in __tipc_shutdown()
576 __skb_queue_purge(&sk->sk_receive_queue); in __tipc_shutdown()
592 __skb_queue_purge(&sk->sk_receive_queue); in __tipc_shutdown()
790 if (!skb_queue_empty_lockless(&sk->sk_receive_queue)) in tipc_poll()
798 if (skb_queue_empty_lockless(&sk->sk_receive_queue)) in tipc_poll()
1843 if (timeo && skb_queue_empty(&sk->sk_receive_queue)) { in tipc_wait_for_rcvmsg()
[all …]
/kernel/linux/linux-5.10/drivers/net/ethernet/chelsio/inline_crypto/chtls/
Dchtls_cm.h175 __skb_unlink(skb, &sk->sk_receive_queue); in chtls_free_skb()
182 __skb_unlink(skb, &sk->sk_receive_queue); in chtls_kfree_skb()
Dchtls_io.c1464 skb = skb_peek(&sk->sk_receive_queue); in chtls_pt_recvmsg()
1521 __skb_unlink(skb, &sk->sk_receive_queue); in chtls_pt_recvmsg()
1601 next_skb = skb_peek(&sk->sk_receive_queue); in chtls_pt_recvmsg()
1643 skb_queue_walk(&sk->sk_receive_queue, skb) { in peekmsg()
1760 skb_queue_empty_lockless(&sk->sk_receive_queue) && in chtls_recvmsg()
1791 skb = skb_peek(&sk->sk_receive_queue); in chtls_recvmsg()
1903 !skb_peek(&sk->sk_receive_queue)) in chtls_recvmsg()
/kernel/linux/linux-5.10/drivers/xen/
Dpvcalls-back.c116 spin_lock_irqsave(&map->sock->sk->sk_receive_queue.lock, flags); in pvcalls_conn_back_read()
117 if (skb_queue_empty(&map->sock->sk->sk_receive_queue)) { in pvcalls_conn_back_read()
119 spin_unlock_irqrestore(&map->sock->sk->sk_receive_queue.lock, in pvcalls_conn_back_read()
123 spin_unlock_irqrestore(&map->sock->sk->sk_receive_queue.lock, flags); in pvcalls_conn_back_read()
148 spin_lock_irqsave(&map->sock->sk->sk_receive_queue.lock, flags); in pvcalls_conn_back_read()
149 if (ret > 0 && !skb_queue_empty(&map->sock->sk->sk_receive_queue)) in pvcalls_conn_back_read()
151 spin_unlock_irqrestore(&map->sock->sk->sk_receive_queue.lock, flags); in pvcalls_conn_back_read()
/kernel/linux/linux-5.10/net/packet/
Daf_packet.c649 spin_lock(&po->sk.sk_receive_queue.lock); in prb_retire_rx_blk_timer_expired()
711 spin_unlock(&po->sk.sk_receive_queue.lock); in prb_retire_rx_blk_timer_expired()
2170 spin_lock(&sk->sk_receive_queue.lock); in packet_rcv()
2173 __skb_queue_tail(&sk->sk_receive_queue, skb); in packet_rcv()
2174 spin_unlock(&sk->sk_receive_queue.lock); in packet_rcv()
2316 spin_lock(&sk->sk_receive_queue.lock); in tpacket_rcv()
2353 __skb_queue_tail(&sk->sk_receive_queue, copy_skb); in tpacket_rcv()
2355 spin_unlock(&sk->sk_receive_queue.lock); in tpacket_rcv()
2442 spin_lock(&sk->sk_receive_queue.lock); in tpacket_rcv()
2445 spin_unlock(&sk->sk_receive_queue.lock); in tpacket_rcv()
[all …]
/kernel/linux/linux-5.10/net/l2tp/
Dl2tp_ip.c573 spin_lock_bh(&sk->sk_receive_queue.lock); in l2tp_ioctl()
574 skb = skb_peek(&sk->sk_receive_queue); in l2tp_ioctl()
576 spin_unlock_bh(&sk->sk_receive_queue.lock); in l2tp_ioctl()
/kernel/linux/linux-5.10/net/nfc/
Dllcp_sock.c567 if (!skb_queue_empty_lockless(&sk->sk_receive_queue)) in llcp_sock_poll()
836 skb_queue_empty(&sk->sk_receive_queue)) { in llcp_sock_recvmsg()
863 skb_queue_head(&sk->sk_receive_queue, skb); in llcp_sock_recvmsg()
894 skb_queue_head(&sk->sk_receive_queue, skb); in llcp_sock_recvmsg()
959 skb_queue_purge(&sk->sk_receive_queue); in llcp_sock_destruct()
/kernel/linux/linux-5.10/net/mptcp/
Dprotocol.c272 __skb_unlink(skb, &ssk->sk_receive_queue); in __mptcp_move_skb()
299 tail = skb_peek_tail(&sk->sk_receive_queue); in __mptcp_move_skb()
304 __skb_queue_tail(&sk->sk_receive_queue, skb); in __mptcp_move_skb()
485 skb = skb_peek(&ssk->sk_receive_queue); in __mptcp_move_skbs_from_subflow()
572 tail = skb_peek_tail(&sk->sk_receive_queue); in mptcp_ofo_queue()
581 __skb_queue_tail(&sk->sk_receive_queue, skb); in mptcp_ofo_queue()
1347 while ((skb = skb_peek(&sk->sk_receive_queue)) != NULL) { in __mptcp_recvmsg_mskq()
1367 __skb_unlink(skb, &sk->sk_receive_queue); in __mptcp_recvmsg_mskq()
1533 if (skb_queue_empty(&sk->sk_receive_queue) && in mptcp_recvmsg()
1582 if (skb_queue_empty(&sk->sk_receive_queue)) { in mptcp_recvmsg()
[all …]
/kernel/linux/linux-5.10/net/core/
Ddatagram.c317 return __skb_recv_datagram(sk, &sk->sk_receive_queue, in skb_recv_datagram()
398 int err = __sk_queue_drop_skb(sk, &sk->sk_receive_queue, skb, flags, in skb_kill_datagram()
817 if (!skb_queue_empty_lockless(&sk->sk_receive_queue)) in datagram_poll()
/kernel/linux/linux-5.10/net/rose/
Daf_rose.c343 while ((skb = skb_dequeue(&sk->sk_receive_queue)) != NULL) { in rose_destroy_socket()
911 skb = skb_dequeue(&sk->sk_receive_queue); in rose_accept()
1047 skb_queue_head(&sk->sk_receive_queue, skb); in rose_rx_call_request()
1303 spin_lock_irq(&sk->sk_receive_queue.lock); in rose_ioctl()
1304 if ((skb = skb_peek(&sk->sk_receive_queue)) != NULL) in rose_ioctl()
1306 spin_unlock_irq(&sk->sk_receive_queue.lock); in rose_ioctl()
/kernel/linux/linux-5.10/net/llc/
Daf_llc.c710 if (skb_queue_empty(&sk->sk_receive_queue)) { in llc_ui_accept()
717 skb = skb_dequeue(&sk->sk_receive_queue); in llc_ui_accept()
803 skb = skb_peek(&sk->sk_receive_queue); in llc_ui_recvmsg()
887 skb_unlink(skb, &sk->sk_receive_queue); in llc_ui_recvmsg()
909 skb_unlink(skb, &sk->sk_receive_queue); in llc_ui_recvmsg()
/kernel/linux/linux-5.10/net/dccp/
Dproto.c284 __skb_queue_purge(&sk->sk_receive_queue); in dccp_disconnect()
391 skb = skb_peek(&sk->sk_receive_queue); in dccp_ioctl()
821 struct sk_buff *skb = skb_peek(&sk->sk_receive_queue); in dccp_recvmsg()
1005 while ((skb = __skb_dequeue(&sk->sk_receive_queue)) != NULL) { in dccp_close()

1234