/net/dccp/ |
D | ackvec.c | 51 int dccp_ackvec_update_records(struct dccp_ackvec *av, u64 seqno, u8 nonce_sum) in dccp_ackvec_update_records() argument 59 avr->avr_ack_seqno = seqno; in dccp_ackvec_update_records() 132 u64 seqno, enum dccp_ackvec_states state) in dccp_ackvec_update_old() argument 163 (unsigned long long)seqno, state); in dccp_ackvec_update_old() 197 u64 seqno, enum dccp_ackvec_states state) in dccp_ackvec_add_new() argument 237 av->av_buf_ackno = seqno; in dccp_ackvec_add_new() 250 u64 seqno = DCCP_SKB_CB(skb)->dccpd_seq; in dccp_ackvec_input() local 254 dccp_ackvec_add_new(av, 1, seqno, state); in dccp_ackvec_input() 255 av->av_tail_ackno = seqno; in dccp_ackvec_input() 258 s64 num_packets = dccp_delta_seqno(av->av_buf_ackno, seqno); in dccp_ackvec_input() [all …]
|
D | input.c | 189 u64 lswl, lawl, seqno = DCCP_SKB_CB(skb)->dccpd_seq, in dccp_check_seqno() local 207 dccp_delta_seqno(dp->dccps_swl, seqno) >= 0) in dccp_check_seqno() 208 dccp_update_gsr(sk, seqno); in dccp_check_seqno() 234 if (between48(seqno, lswl, dp->dccps_swh) && in dccp_check_seqno() 237 dccp_update_gsr(sk, seqno); in dccp_check_seqno() 265 (unsigned long long) lswl, (unsigned long long) seqno, in dccp_check_seqno() 275 seqno = dp->dccps_gsr; in dccp_check_seqno() 276 dccp_send_sync(sk, seqno, DCCP_PKT_SYNC); in dccp_check_seqno()
|
D | dccp.h | 111 static inline void dccp_inc_seqno(u64 *seqno) in dccp_inc_seqno() argument 113 *seqno = ADD48(*seqno, 1); in dccp_inc_seqno()
|
/net/batman-adv/ |
D | fragmentation.c | 108 u16 seqno) in batadv_frag_init_chain() argument 112 if (chain->seqno == seqno) in batadv_frag_init_chain() 119 chain->seqno = seqno; in batadv_frag_init_chain() 146 u16 seqno, hdr_size = sizeof(struct batadv_frag_packet); in batadv_frag_insert_packet() local 157 seqno = ntohs(frag_packet->seqno); in batadv_frag_insert_packet() 158 bucket = seqno % BATADV_FRAG_BUFFER_COUNT; in batadv_frag_insert_packet() 173 if (batadv_frag_init_chain(chain, seqno)) { in batadv_frag_insert_packet() 493 frag_header.seqno = htons(atomic_inc_return(&bat_priv->frag_seqno)); in batadv_frag_send_packet()
|
D | tp_meter.c | 579 u32 seqno, size_t len, const u8 *session, in batadv_tp_send_msg() argument 606 icmp->seqno = htonl(seqno); in batadv_tp_send_msg() 654 if (batadv_seq_before(ntohl(icmp->seqno), in batadv_tp_recv_ack() 674 recv_ack = ntohl(icmp->seqno); in batadv_tp_recv_ack() 1194 icmp->seqno = htonl(seq); in batadv_tp_send_ack() 1237 new->seqno = ntohl(icmp->seqno); in batadv_tp_handle_out_of_order() 1257 if (new->seqno == un->seqno) { in batadv_tp_handle_out_of_order() 1266 if (batadv_seq_before(new->seqno, un->seqno)) in batadv_tp_handle_out_of_order() 1307 if (batadv_seq_before(tp_vars->last_recv, un->seqno)) in batadv_tp_ack_unordered() 1310 to_ack = un->seqno + un->len - tp_vars->last_recv; in batadv_tp_ack_unordered() [all …]
|
D | bat_v_ogm.c | 299 ogm_packet->seqno = htonl(atomic_read(&bat_priv->bat_v.ogm_seqno)); in batadv_v_ogm_send_softif() 339 ogm_packet->orig, ntohl(ogm_packet->seqno), in batadv_v_ogm_send_softif() 551 if (orig_ifinfo->last_seqno_forwarded == ntohl(ogm_received->seqno)) in batadv_v_ogm_forward() 554 orig_ifinfo->last_seqno_forwarded = ntohl(ogm_received->seqno); in batadv_v_ogm_forward() 626 seq_diff = ntohl(ogm2->seqno) - orig_ifinfo->last_real_seqno; in batadv_v_ogm_metric_update() 652 orig_ifinfo->last_real_seqno = ntohl(ogm2->seqno); in batadv_v_ogm_metric_update() 663 neigh_ifinfo->bat_v.last_seqno = ntohl(ogm2->seqno); in batadv_v_ogm_metric_update() 875 ntohl(ogm_packet->seqno), ogm_throughput, ogm_packet->ttl, in batadv_v_ogm_process()
|
D | bat_v_elp.c | 283 elp_packet->seqno = htonl(atomic_read(&hard_iface->bat_v.elp_seqno)); in batadv_v_elp_periodic_work() 475 seqno_diff = ntohl(elp_packet->seqno) - elp_latest_seqno; in batadv_v_elp_neigh_update() 485 hardif_neigh->bat_v.elp_latest_seqno = ntohl(elp_packet->seqno); in batadv_v_elp_neigh_update() 532 ethhdr->h_source, ntohl(elp_packet->seqno), in batadv_v_elp_packet_recv()
|
D | bat_iv_ogm.c | 373 ntohl(batadv_ogm_packet->seqno), in batadv_iv_ogm_send_to_if() 787 u32 seqno; in batadv_iv_ogm_schedule_buff() local 822 seqno = (u32)atomic_read(&hard_iface->bat_iv.ogm_seqno); in batadv_iv_ogm_schedule_buff() 823 batadv_ogm_packet->seqno = htonl(seqno); in batadv_iv_ogm_schedule_buff() 1221 u32 seqno = ntohl(batadv_ogm_packet->seqno); in batadv_iv_ogm_update_seqnos() local 1237 seq_diff = seqno - orig_ifinfo->last_real_seqno; in batadv_iv_ogm_update_seqnos() 1258 seqno); in batadv_iv_ogm_update_seqnos() 1287 orig_ifinfo->last_real_seqno, seqno); in batadv_iv_ogm_update_seqnos() 1288 orig_ifinfo->last_real_seqno = seqno; in batadv_iv_ogm_update_seqnos() 1431 sameseq = orig_ifinfo->last_real_seqno == ntohl(ogm_packet->seqno); in batadv_iv_ogm_process_per_outif() [all …]
|
D | routing.c | 1174 u32 seqno; in batadv_recv_bcast_packet() local 1211 seqno = ntohl(bcast_packet->seqno); in batadv_recv_bcast_packet() 1214 seqno)) in batadv_recv_bcast_packet() 1217 seq_diff = seqno - orig_node->last_bcast_seqno; in batadv_recv_bcast_packet() 1229 orig_node->last_bcast_seqno = seqno; in batadv_recv_bcast_packet()
|
D | soft-interface.c | 200 u32 seqno; in batadv_interface_tx() local 351 seqno = atomic_inc_return(&bat_priv->bcast_seqno); in batadv_interface_tx() 352 bcast_packet->seqno = htonl(seqno); in batadv_interface_tx()
|
D | types.h | 294 u16 seqno; member 1351 u32 seqno; member
|
/net/dccp/ccids/lib/ |
D | packet_history.h | 38 u64 seqno; member 43 tfrc_tx_hist_find_entry(struct tfrc_tx_hist_entry *head, u64 seqno) in tfrc_tx_hist_find_entry() argument 45 while (head != NULL && head->seqno != seqno) in tfrc_tx_hist_find_entry() 50 int tfrc_tx_hist_add(struct tfrc_tx_hist_entry **headp, u64 seqno);
|
D | packet_history.c | 49 int tfrc_tx_hist_add(struct tfrc_tx_hist_entry **headp, u64 seqno) in tfrc_tx_hist_add() argument 55 entry->seqno = seqno; in tfrc_tx_hist_add()
|
/net/tipc/ |
D | name_distr.c | 159 u32 dnode, struct list_head *pls, u16 seqno) in named_distribute() argument 205 msg_set_named_seqno(hdr, seqno); in named_distribute() 219 u16 seqno; in tipc_named_node_up() local 225 seqno = nt->snd_nxt; in tipc_named_node_up() 229 named_distribute(net, &head, dnode, &nt->cluster_scope, seqno); in tipc_named_node_up() 326 u16 seqno; in tipc_named_dequeue() local 336 seqno = msg_named_seqno(hdr); in tipc_named_dequeue() 338 *rcv_nxt = seqno; in tipc_named_dequeue() 348 if (*open && (*rcv_nxt == seqno)) { in tipc_named_dequeue() 355 if (less(seqno, *rcv_nxt)) { in tipc_named_dequeue()
|
D | link.c | 1044 u16 seqno = l->snd_nxt; in tipc_link_xmit() local 1085 msg_set_seqno(hdr, seqno); in tipc_link_xmit() 1100 seqno++; in tipc_link_xmit() 1124 l->snd_nxt = seqno; in tipc_link_xmit() 1177 u16 seqno = l->snd_nxt; in tipc_link_advance_backlog() local 1200 msg_set_seqno(hdr, seqno); in tipc_link_advance_backlog() 1205 seqno++; in tipc_link_advance_backlog() 1207 l->snd_nxt = seqno; in tipc_link_advance_backlog() 1373 u16 seqno; in tipc_link_tnl_rcv() local 1408 seqno = buf_seqno(iskb); in tipc_link_tnl_rcv() [all …]
|
D | msg.c | 819 bool __tipc_skb_queue_sorted(struct sk_buff_head *list, u16 seqno, in __tipc_skb_queue_sorted() argument 824 if (skb_queue_empty(list) || less(seqno, buf_seqno(skb_peek(list)))) { in __tipc_skb_queue_sorted() 829 if (more(seqno, buf_seqno(skb_peek_tail(list)))) { in __tipc_skb_queue_sorted() 835 if (more(seqno, buf_seqno(_skb))) in __tipc_skb_queue_sorted() 837 if (seqno == buf_seqno(_skb)) in __tipc_skb_queue_sorted()
|
D | crypto.c | 164 atomic64_t seqno ____cacheline_aligned; 606 atomic64_set(&tmp->seqno, 0); in tipc_aead_init() 660 atomic64_set(&aead->seqno, 0); in tipc_aead_clone() 809 memcpy(iv + 4, (u8 *)&ehdr->seqno, 8); in tipc_aead_encrypt() 934 memcpy(iv + 4, (u8 *)&ehdr->seqno, 8); in tipc_aead_decrypt() 1048 u64 seqno; in tipc_ehdr_build() local 1061 seqno = atomic64_inc_return(&aead->seqno); in tipc_ehdr_build() 1063 seqno = atomic64_inc_return(&__rx->sndnxt); in tipc_ehdr_build() 1066 if (unlikely(!seqno)) in tipc_ehdr_build() 1070 ehdr->seqno = cpu_to_be64(seqno); in tipc_ehdr_build()
|
D | crypto.h | 147 __be64 seqno; member
|
D | msg.h | 1203 bool __tipc_skb_queue_sorted(struct sk_buff_head *list, u16 seqno, 1322 u16 seqno) in __tipc_skb_dequeue() argument 1326 if (skb && less_eq(buf_seqno(skb), seqno)) { in __tipc_skb_dequeue()
|
D | group.c | 670 u32 event, u16 seqno, in tipc_group_create_event() argument 697 msg_set_grp_bc_seqno(hdr, seqno); in tipc_group_create_event()
|
/net/smc/ |
D | smc_cdc.c | 112 conn->local_tx_ctrl.seqno = conn->tx_cdc_seq; in smc_cdc_msg_send() 124 conn->local_tx_ctrl.seqno = conn->tx_cdc_seq; in smc_cdc_msg_send() 144 peer->seqno = htons(conn->tx_cdc_seq_fin); /* seqno last compl. tx */ in smcr_cdc_msg_send_validation() 292 u16 recv_seq = ntohs(cdc->seqno); in smc_cdc_msg_validate() 296 diff = conn->local_rx_ctrl.seqno - recv_seq; in smc_cdc_msg_validate() 448 if (smc_cdc_before(ntohs(cdc->seqno), in smc_cdc_rx_handler() 449 conn->local_rx_ctrl.seqno)) in smc_cdc_rx_handler()
|
D | smc_cdc.h | 44 __be16 seqno; member 214 peer->seqno = htons(local->seqno); in smc_host_msg_to_cdc() 247 local->seqno = ntohs(peer->seqno); in smcr_cdc_msg_to_host()
|
D | smc.h | 115 u16 seqno; /* connection seq # */ member
|
/net/dccp/ccids/ |
D | ccid2.c | 517 u64 ackno, seqno; in ccid2_hc_tx_packet_recv() local 523 seqno = DCCP_SKB_CB(skb)->dccpd_seq; in ccid2_hc_tx_packet_recv() 532 hc->tx_rpseq = seqno; in ccid2_hc_tx_packet_recv() 535 if (dccp_delta_seqno(hc->tx_rpseq, seqno) == 1) in ccid2_hc_tx_packet_recv() 536 hc->tx_rpseq = seqno; in ccid2_hc_tx_packet_recv() 538 else if (after48(seqno, hc->tx_rpseq)) { in ccid2_hc_tx_packet_recv()
|
/net/ipv4/ |
D | esp4_offload.c | 317 esp.seqno = cpu_to_be64(seq + ((u64)xo->seq.hi << 32)); in esp_xmit()
|