Home
last modified time | relevance | path

Searched refs:mss (Results 1 – 25 of 28) sorted by relevance

12

/net/ipv4/
Dtcp_offload.c17 unsigned int seq, unsigned int mss) in tcp_gso_tstamp() argument
20 if (before(ts_seq, seq + mss)) { in tcp_gso_tstamp()
27 seq += mss; in tcp_gso_tstamp()
65 unsigned int mss; in tcp_gso_segment() local
85 mss = skb_shinfo(skb)->gso_size; in tcp_gso_segment()
86 if (unlikely(skb->len <= mss)) in tcp_gso_segment()
92 skb_shinfo(skb)->gso_segs = DIV_ROUND_UP(skb->len, mss); in tcp_gso_segment()
115 mss *= skb_shinfo(segs)->gso_segs; in tcp_gso_segment()
117 delta = (__force __wsum)htonl(oldlen + thlen + mss); in tcp_gso_segment()
124 tcp_gso_tstamp(segs, skb_shinfo(gso_skb)->tskey, seq, mss); in tcp_gso_segment()
[all …]
Dtcp_recovery.c225 u32 mss; in tcp_newreno_mark_lost() local
230 mss = tcp_skb_mss(skb); in tcp_newreno_mark_lost()
231 if (tcp_skb_pcount(skb) > 1 && skb->len > mss) in tcp_newreno_mark_lost()
233 mss, mss, GFP_ATOMIC); in tcp_newreno_mark_lost()
Dsyncookies.c157 const __u16 mss = *mssp; in __cookie_v4_init_sequence() local
160 if (mss >= msstab[mssind]) in __cookie_v4_init_sequence()
330 int full_space, mss; in cookie_v4_check() local
343 mss = __cookie_v4_check(ip_hdr(skb), th, cookie); in cookie_v4_check()
344 if (mss == 0) { in cookie_v4_check()
377 req->mss = mss; in cookie_v4_check()
435 tcp_select_initial_window(sk, full_space, req->mss, in cookie_v4_check()
Dudp_offload.c254 unsigned int mss = skb_shinfo(skb)->gso_size; in __udp_gso_segment_list() local
260 udp_hdr(skb)->len = htons(sizeof(struct udphdr) + mss); in __udp_gso_segment_list()
272 unsigned int mss; in __udp_gso_segment() local
277 mss = skb_shinfo(gso_skb)->gso_size; in __udp_gso_segment()
278 if (gso_skb->len <= sizeof(*uh) + mss) in __udp_gso_segment()
289 mss); in __udp_gso_segment()
315 mss *= skb_shinfo(segs)->gso_segs; in __udp_gso_segment()
326 newlen = htons(sizeof(*uh) + mss); in __udp_gso_segment()
385 unsigned int mss; in udp4_ufo_fragment() local
406 mss = skb_shinfo(skb)->gso_size; in udp4_ufo_fragment()
[all …]
Dtcp_output.c126 int mss = tp->advmss; in tcp_advertise_mss() local
131 if (metric < mss) { in tcp_advertise_mss()
132 mss = metric; in tcp_advertise_mss()
133 tp->advmss = mss; in tcp_advertise_mss()
137 return (__u16)mss; in tcp_advertise_mss()
206 void tcp_select_initial_window(const struct sock *sk, int __space, __u32 mss, in tcp_select_initial_window() argument
220 if (space > mss) in tcp_select_initial_window()
221 space = rounddown(space, mss); in tcp_select_initial_window()
237 *rcv_wnd = min(*rcv_wnd, init_rcv_wnd * mss); in tcp_select_initial_window()
451 u16 mss; /* 0 to disable */ member
[all …]
Dtcp_timer.c162 int mss; in tcp_mtu_probing() local
172 mss = tcp_mtu_to_mss(sk, icsk->icsk_mtup.search_low) >> 1; in tcp_mtu_probing()
173 mss = min(READ_ONCE(net->ipv4.sysctl_tcp_base_mss), mss); in tcp_mtu_probing()
174 mss = max(mss, READ_ONCE(net->ipv4.sysctl_tcp_mtu_probe_floor)); in tcp_mtu_probing()
175 mss = max(mss, READ_ONCE(net->ipv4.sysctl_tcp_min_snd_mss)); in tcp_mtu_probing()
176 icsk->icsk_mtup.search_low = tcp_mss_to_mtu(sk, mss); in tcp_mtu_probing()
Dtcp_metrics.c29 u16 mss; member
134 tm->tcpm_fastopen.mss = 0; in tcpm_suck_dst()
561 void tcp_fastopen_cache_get(struct sock *sk, u16 *mss, in tcp_fastopen_cache_get() argument
574 if (tfom->mss) in tcp_fastopen_cache_get()
575 *mss = tfom->mss; in tcp_fastopen_cache_get()
584 void tcp_fastopen_cache_set(struct sock *sk, u16 mss, in tcp_fastopen_cache_set() argument
599 if (mss) in tcp_fastopen_cache_set()
600 tfom->mss = mss; in tcp_fastopen_cache_set()
717 if (tfom->mss && in tcp_metrics_fill_info()
719 tfom->mss) < 0) in tcp_metrics_fill_info()
Dtcp_input.c1063 const u32 mss = tp->mss_cache; in tcp_check_sack_reordering() local
1071 if ((metric > tp->reordering * mss) && mss) { in tcp_check_sack_reordering()
1080 tp->reordering = min_t(u32, (metric + mss - 1) / mss, in tcp_check_sack_reordering()
1329 unsigned int mss; in tcp_match_skb_to_sack() local
1336 mss = tcp_skb_mss(skb); in tcp_match_skb_to_sack()
1341 if (pkt_len < mss) in tcp_match_skb_to_sack()
1342 pkt_len = mss; in tcp_match_skb_to_sack()
1345 if (pkt_len < mss) in tcp_match_skb_to_sack()
1352 if (pkt_len > mss) { in tcp_match_skb_to_sack()
1353 unsigned int new_len = (pkt_len / mss) * mss; in tcp_match_skb_to_sack()
[all …]
Dtcp_minisocks.c395 int mss; in tcp_openreq_init_rwin() local
397 mss = tcp_mss_clamp(tp, dst_metric_advmss(dst)); in tcp_openreq_init_rwin()
410 else if (full_space < rcv_wnd * mss) in tcp_openreq_init_rwin()
411 full_space = rcv_wnd * mss; in tcp_openreq_init_rwin()
415 mss - (ireq->tstamp_ok ? TCPOLEN_TSTAMP_ALIGNED : 0), in tcp_openreq_init_rwin()
570 newtp->rx_opt.mss_clamp = req->mss; in tcp_create_openreq_child()
Dtcp_fastopen.c414 bool tcp_fastopen_cookie_check(struct sock *sk, u16 *mss, in tcp_fastopen_cookie_check() argument
419 tcp_fastopen_cache_get(sk, mss, cookie); in tcp_fastopen_cookie_check()
450 u16 mss; in tcp_fastopen_defer_connect() local
453 if (tcp_fastopen_cookie_check(sk, &mss, &cookie)) { in tcp_fastopen_defer_connect()
Dtcp_bbr.c246 unsigned int mss = tcp_sk(sk)->mss_cache; in bbr_rate_bytes_per_sec() local
248 rate *= mss; in bbr_rate_bytes_per_sec()
Dtcp_ipv4.c1691 u16 mss = 0; in tcp_v4_get_syncookie() local
1693 mss = tcp_get_syncookie_mss(&tcp_request_sock_ops, in tcp_v4_get_syncookie()
1695 if (mss) { in tcp_v4_get_syncookie()
1696 *cookie = __cookie_v4_init_sequence(iph, th, &mss); in tcp_v4_get_syncookie()
1700 return mss; in tcp_v4_get_syncookie()
/net/ipv6/
Dsyncookies.c96 const __u16 mss = *mssp; in __cookie_v6_init_sequence() local
99 if (mss >= msstab[mssind]) in __cookie_v6_init_sequence()
139 int full_space, mss; in cookie_v6_check() local
151 mss = __cookie_v6_check(ipv6_hdr(skb), th, cookie); in cookie_v6_check()
152 if (mss == 0) { in cookie_v6_check()
183 req->mss = mss; in cookie_v6_check()
253 tcp_select_initial_window(sk, full_space, req->mss, in cookie_v6_check()
Dudp_offload.c23 unsigned int mss; in udp6_ufo_fragment() local
49 mss = skb_shinfo(skb)->gso_size; in udp6_ufo_fragment()
50 if (unlikely(skb->len <= mss)) in udp6_ufo_fragment()
Dtcp_ipv6.c1150 u16 mss = 0; in tcp_v6_get_syncookie() local
1152 mss = tcp_get_syncookie_mss(&tcp6_request_sock_ops, in tcp_v6_get_syncookie()
1154 if (mss) { in tcp_v6_get_syncookie()
1155 *cookie = __cookie_v6_init_sequence(iph, th, &mss); in tcp_v6_get_syncookie()
1159 return mss; in tcp_v6_get_syncookie()
/net/netfilter/
Dnfnetlink_osf.c73 u16 mss = 0; in nf_osf_match_one() local
106 mss = ctx->optp[3]; in nf_osf_match_one()
107 mss <<= 8; in nf_osf_match_one()
108 mss |= ctx->optp[2]; in nf_osf_match_one()
110 mss = ntohs((__force __be16)mss); in nf_osf_match_one()
141 if (ctx->window == f->wss.val * mss || in nf_osf_match_one()
147 if (ctx->window == f->wss.val * (mss + 40) || in nf_osf_match_one()
Dnf_synproxy_core.c455 u16 mss = opts->mss_encode; in synproxy_send_client_synack() local
472 nth->seq = htonl(__cookie_v4_init_sequence(iph, th, &mss)); in synproxy_send_client_synack()
618 int mss; in synproxy_recv_client_ack() local
620 mss = __cookie_v4_check(ip_hdr(skb), th, ntohl(th->ack_seq) - 1); in synproxy_recv_client_ack()
621 if (mss == 0) { in synproxy_recv_client_ack()
627 opts->mss_option = mss; in synproxy_recv_client_ack()
868 u16 mss = opts->mss_encode; in synproxy_send_client_synack_ipv6() local
885 nth->seq = htonl(nf_ipv6_cookie_init_sequence(iph, th, &mss)); in synproxy_send_client_synack_ipv6()
1035 int mss; in synproxy_recv_client_ack_ipv6() local
1037 mss = nf_cookie_v6_check(ipv6_hdr(skb), th, ntohl(th->ack_seq) - 1); in synproxy_recv_client_ack_ipv6()
[all …]
Dxt_TCPMSS.c102 if (info->mss == XT_TCPMSS_CLAMP_PMTU) { in tcpmss_mangle_packet()
114 newmss = info->mss; in tcpmss_mangle_packet()
269 if (info->mss == XT_TCPMSS_CLAMP_PMTU && in tcpmss_tg4_check()
293 if (info->mss == XT_TCPMSS_CLAMP_PMTU && in tcpmss_tg6_check()
Dnft_synproxy.c35 opts->mss_option = info->mss; in nft_synproxy_tcp_options()
161 priv->info.mss = ntohs(nla_get_be16(tb[NFTA_SYNPROXY_MSS])); in nft_synproxy_do_init()
230 if (nla_put_be16(skb, NFTA_SYNPROXY_MSS, htons(priv->info.mss)) || in nft_synproxy_do_dump()
/net/batman-adv/
Dtp_meter.c143 static void batadv_tp_update_cwnd(struct batadv_tp_vars *tp_vars, u32 mss) in batadv_tp_update_cwnd() argument
150 tp_vars->cwnd = batadv_tp_cwnd(tp_vars->cwnd, mss, mss); in batadv_tp_update_cwnd()
157 ((mss * mss) << 6) / (tp_vars->cwnd << 3)); in batadv_tp_update_cwnd()
158 if (tp_vars->dec_cwnd < (mss << 3)) { in batadv_tp_update_cwnd()
163 tp_vars->cwnd = batadv_tp_cwnd(tp_vars->cwnd, mss, mss); in batadv_tp_update_cwnd()
635 size_t packet_len, mss; in batadv_tp_recv_ack() local
639 mss = BATADV_TP_PLEN; in batadv_tp_recv_ack()
704 tp_vars->cwnd = batadv_tp_cwnd(tp_vars->ss_threshold, 3 * mss, in batadv_tp_recv_ack()
705 mss); in batadv_tp_recv_ack()
731 mss, mss); in batadv_tp_recv_ack()
[all …]
/net/tipc/
Dmsg.c206 int mss, struct sk_buff_head *txq) in tipc_msg_append() argument
218 if (!skb || skb->len >= mss) { in tipc_msg_append()
219 skb = tipc_buf_acquire(mss, GFP_KERNEL); in tipc_msg_append()
234 cpy = min_t(size_t, rem, mss - mlen); in tipc_msg_append()
520 bool tipc_msg_try_bundle(struct sk_buff *tskb, struct sk_buff **skb, u32 mss, in tipc_msg_try_bundle() argument
534 if (mss <= INT_H_SIZE + msg_size(msg)) in tipc_msg_try_bundle()
549 if (unlikely(mss < BUF_ALIGN(INT_H_SIZE + tsz) + msg_size(msg))) in tipc_msg_try_bundle()
551 if (unlikely(pskb_expand_head(tskb, INT_H_SIZE, mss - tsz - INT_H_SIZE, in tipc_msg_try_bundle()
565 if (likely(tipc_msg_bundle(tskb, msg, mss))) { in tipc_msg_try_bundle()
Dmsg.h1166 bool tipc_msg_try_bundle(struct sk_buff *tskb, struct sk_buff **skb, u32 mss,
1174 int mss, struct sk_buff_head *txq);
/net/ipv4/netfilter/
Dipt_SYNPROXY.c40 opts.mss_option = info->mss; in synproxy_tg4()
/net/ipv6/netfilter/
Dip6t_SYNPROXY.c40 opts.mss_option = info->mss; in synproxy_tg6()
/net/core/
Dskbuff.c4450 unsigned int mss = skb_shinfo(head_skb)->gso_size; in skb_segment() local
4466 mss != GSO_BY_FRAGS && mss != skb_headlen(head_skb)) { in skb_segment()
4494 if (sg && csum && (mss != GSO_BY_FRAGS)) { in skb_segment()
4530 partial_segs = min(len, GSO_BY_FRAGS - 1U) / mss; in skb_segment()
4532 mss *= partial_segs; in skb_segment()
4554 if (unlikely(mss == GSO_BY_FRAGS)) { in skb_segment()
4558 if (len > mss) in skb_segment()
4559 len = mss; in skb_segment()
4698 pos, mss); in skb_segment()

12