• Home
  • Raw
  • Download

Lines Matching refs:tp

145 	struct tcp_sock *tp;  in assign_rxopt()  local
148 tp = tcp_sk(sk); in assign_rxopt()
151 tp->tcp_header_len = sizeof(struct tcphdr); in assign_rxopt()
152 tp->rx_opt.mss_clamp = cdev->mtus[TCPOPT_MSS_G(opt)] - 40; in assign_rxopt()
153 tp->mss_cache = tp->rx_opt.mss_clamp; in assign_rxopt()
154 tp->rx_opt.tstamp_ok = TCPOPT_TSTAMP_G(opt); in assign_rxopt()
155 tp->rx_opt.snd_wscale = TCPOPT_SACK_G(opt); in assign_rxopt()
156 tp->rx_opt.wscale_ok = TCPOPT_WSCALE_OK_G(opt); in assign_rxopt()
157 SND_WSCALE(tp) = TCPOPT_SND_WSCALE_G(opt); in assign_rxopt()
158 if (!tp->rx_opt.wscale_ok) in assign_rxopt()
159 tp->rx_opt.rcv_wscale = 0; in assign_rxopt()
160 if (tp->rx_opt.tstamp_ok) { in assign_rxopt()
161 tp->tcp_header_len += TCPOLEN_TSTAMP_ALIGNED; in assign_rxopt()
162 tp->rx_opt.mss_clamp -= TCPOLEN_TSTAMP_ALIGNED; in assign_rxopt()
227 struct tcp_sock *tp; in chtls_send_abort() local
230 tp = tcp_sk(sk); in chtls_send_abort()
238 req->rsvd0 = htonl(tp->snd_nxt); in chtls_send_abort()
242 send_or_defer(sk, tp, skb, mode == CPL_ABORT_SEND_RST); in chtls_send_abort()
257 struct tcp_sock *tp = tcp_sk(sk); in chtls_send_reset() local
259 if (send_tx_flowc_wr(sk, 0, tp->snd_nxt, tp->rcv_nxt) < 0) in chtls_send_reset()
288 struct tcp_sock *tp = tcp_sk(sk); in tcp_uncork() local
290 if (tp->nonagle & TCP_NAGLE_CORK) { in tcp_uncork()
291 tp->nonagle &= ~TCP_NAGLE_CORK; in tcp_uncork()
448 struct tcp_sock *tp; in chtls_disconnect() local
451 tp = tcp_sk(sk); in chtls_disconnect()
465 tp->max_window = 0xFFFF << (tp->rx_opt.snd_wscale); in chtls_disconnect()
951 struct tcp_sock *tp; in chtls_select_mss() local
959 tp = tcp_sk(sk); in chtls_select_mss()
971 tp->advmss = dst_metric_advmss(dst); in chtls_select_mss()
972 if (USER_MSS(tp) && tp->advmss > USER_MSS(tp)) in chtls_select_mss()
973 tp->advmss = USER_MSS(tp); in chtls_select_mss()
974 if (tp->advmss > pmtu - iphdrsz) in chtls_select_mss()
975 tp->advmss = pmtu - iphdrsz; in chtls_select_mss()
976 if (mss && tp->advmss > mss) in chtls_select_mss()
977 tp->advmss = mss; in chtls_select_mss()
979 tp->advmss = cxgb4_best_aligned_mtu(cdev->lldi->mtus, in chtls_select_mss()
981 tp->advmss - tcpoptsz, in chtls_select_mss()
983 tp->advmss -= iphdrsz; in chtls_select_mss()
1013 const struct tcp_sock *tp; in chtls_pass_accept_rpl() local
1021 tp = tcp_sk(sk); in chtls_pass_accept_rpl()
1035 WND_SCALE_V(RCV_WSCALE(tp)) | in chtls_pass_accept_rpl()
1038 NAGLE_V(!(tp->nonagle & TCP_NAGLE_OFF)) | in chtls_pass_accept_rpl()
1043 RCV_BUFSIZ_V(min(tp->rcv_wnd >> 10, RCV_BUFSIZ_M)); in chtls_pass_accept_rpl()
1063 opt2 |= WND_SCALE_EN_V(WSCALE_OK(tp)); in chtls_pass_accept_rpl()
1123 struct tcp_sock *tp; in chtls_recv_sock() local
1187 tp = tcp_sk(newsk); in chtls_recv_sock()
1227 tp->rcv_wnd = csk->rcv_win; in chtls_recv_sock()
1238 RCV_WSCALE(tp) = select_rcv_wscale(tcp_full_space(newsk), in chtls_recv_sock()
1241 tp->window_clamp); in chtls_recv_sock()
1464 struct tcp_sock *tp = tcp_sk(sk); in make_established() local
1466 tp->pushed_seq = snd_isn; in make_established()
1467 tp->write_seq = snd_isn; in make_established()
1468 tp->snd_nxt = snd_isn; in make_established()
1469 tp->snd_una = snd_isn; in make_established()
1473 if (tp->rcv_wnd > (RCV_BUFSIZ_M << 10)) in make_established()
1474 tp->rcv_wup -= tp->rcv_wnd - (RCV_BUFSIZ_M << 10); in make_established()
1639 struct tcp_sock *tp = tcp_sk(sk); in handle_urg_ptr() local
1642 if (tp->urg_data && !after(urg_seq, tp->urg_seq)) in handle_urg_ptr()
1646 if (tp->urg_seq == tp->copied_seq && tp->urg_data && in handle_urg_ptr()
1648 tp->copied_seq != tp->rcv_nxt) { in handle_urg_ptr()
1651 tp->copied_seq++; in handle_urg_ptr()
1652 if (skb && tp->copied_seq - ULP_SKB_CB(skb)->seq >= skb->len) in handle_urg_ptr()
1656 tp->urg_data = TCP_URG_NOTYET; in handle_urg_ptr()
1657 tp->urg_seq = urg_seq; in handle_urg_ptr()
1685 struct tcp_sock *tp; in chtls_recv_data() local
1688 tp = tcp_sk(sk); in chtls_recv_data()
1705 handle_urg_ptr(sk, tp->rcv_nxt + ntohs(hdr->urg)); in chtls_recv_data()
1706 if (unlikely(tp->urg_data == TCP_URG_NOTYET && in chtls_recv_data()
1707 tp->urg_seq - tp->rcv_nxt < skb->len)) in chtls_recv_data()
1708 tp->urg_data = TCP_URG_VALID | in chtls_recv_data()
1709 skb->data[tp->urg_seq - tp->rcv_nxt]; in chtls_recv_data()
1713 csk->delack_seq = tp->rcv_nxt; in chtls_recv_data()
1717 tp->rcv_nxt += skb->len; in chtls_recv_data()
1748 struct tcp_sock *tp; in chtls_recv_pdu() local
1752 tp = tcp_sk(sk); in chtls_recv_pdu()
1769 if (unlikely(tp->urg_data == TCP_URG_NOTYET && tp->urg_seq - in chtls_recv_pdu()
1770 tp->rcv_nxt < skb->len)) in chtls_recv_pdu()
1771 tp->urg_data = TCP_URG_VALID | in chtls_recv_pdu()
1772 skb->data[tp->urg_seq - tp->rcv_nxt]; in chtls_recv_pdu()
1810 struct tcp_sock *tp; in chtls_rx_hdr() local
1815 tp = tcp_sk(sk); in chtls_rx_hdr()
1828 tp->rcv_nxt += in chtls_rx_hdr()
1867 struct tcp_sock *tp = tcp_sk(sk); in chtls_timewait() local
1869 tp->rcv_nxt++; in chtls_timewait()
1870 tp->rx_opt.ts_recent_stamp = ktime_get_seconds(); in chtls_timewait()
1871 tp->srtt_us = 0; in chtls_timewait()
1922 struct tcp_sock *tp; in chtls_close_con_rpl() local
1929 tp = tcp_sk(sk); in chtls_close_con_rpl()
1931 tp->snd_una = ntohl(rpl->snd_nxt) - 1; /* exclude FIN */ in chtls_close_con_rpl()
2124 struct tcp_sock *tp = tcp_sk(sk); in chtls_abort_req_rss() local
2126 if (send_tx_flowc_wr(sk, 0, tp->snd_nxt, tp->rcv_nxt) < 0) in chtls_abort_req_rss()
2227 struct tcp_sock *tp = tcp_sk(sk); in chtls_rx_ack() local
2256 if (unlikely(before(snd_una, tp->snd_una))) { in chtls_rx_ack()
2261 if (tp->snd_una != snd_una) { in chtls_rx_ack()
2262 tp->snd_una = snd_una; in chtls_rx_ack()
2263 tp->rcv_tstamp = tcp_jiffies32; in chtls_rx_ack()
2264 if (tp->snd_una == tp->snd_nxt && in chtls_rx_ack()