Lines Matching refs:base
168 struct sctp_sock *sp = sctp_sk(ulpq->asoc->base.sk); in sctp_ulpq_set_pd()
179 return sctp_clear_pd(ulpq->asoc->base.sk, ulpq->asoc); in sctp_ulpq_clear_pd()
184 struct sock *sk = ulpq->asoc->base.sk; in sctp_ulpq_tail_event()
482 if (!sctp_sk(asoc->base.sk)->frag_interleave && in sctp_ulpq_retrieve_reassembled()
483 atomic_read(&sctp_sk(asoc->base.sk)->pd_mode)) in sctp_ulpq_retrieve_reassembled()
487 pd_point = sctp_sk(asoc->base.sk)->pd_point; in sctp_ulpq_retrieve_reassembled()
489 retval = sctp_make_reassembled_event(sock_net(asoc->base.sk), in sctp_ulpq_retrieve_reassembled()
500 retval = sctp_make_reassembled_event(sock_net(ulpq->asoc->base.sk), in sctp_ulpq_retrieve_reassembled()
566 retval = sctp_make_reassembled_event(sock_net(ulpq->asoc->base.sk), in sctp_ulpq_retrieve_partial()
667 retval = sctp_make_reassembled_event(sock_net(ulpq->asoc->base.sk), in sctp_ulpq_retrieve_first()
1036 sp = sctp_sk(asoc->base.sk); in sctp_ulpq_partial_delivery()
1086 if (skb_queue_empty(&asoc->base.sk->sk_receive_queue)) { in sctp_ulpq_renege()
1092 if (sk_rmem_schedule(asoc->base.sk, chunk->skb, needed) && in sctp_ulpq_renege()
1105 sk_mem_reclaim(asoc->base.sk); in sctp_ulpq_renege()
1122 sk = ulpq->asoc->base.sk; in sctp_ulpq_abort_pd()