/net/core/ |
D | dst.c | 69 struct dst_entry *last = &head; in dst_gc_task() local 80 last->next = dst; in dst_gc_task() 81 last = dst; in dst_gc_task() 113 last->next = NULL; in dst_gc_task() 376 struct dst_entry *dst, *last = NULL; in dst_dev_event() local 383 last = dst; in dst_dev_event() 392 if (last) in dst_dev_event() 393 last->next = dst; in dst_dev_event()
|
D | datagram.c | 165 struct sk_buff *skb, *last; in __skb_recv_datagram() local 188 last = (struct sk_buff *)queue; in __skb_recv_datagram() 191 last = skb; in __skb_recv_datagram() 215 } while (!wait_for_more_packets(sk, err, &timeo, last)); in __skb_recv_datagram()
|
D | fib_rules.c | 312 struct fib_rule *rule, *r, *last = NULL; in fib_nl_newrule() local 426 last = r; in fib_nl_newrule() 431 if (last) in fib_nl_newrule() 432 list_add_rcu(&rule->list, &last->list); in fib_nl_newrule()
|
/net/netfilter/ipvs/ |
D | ip_vs_wrr.c | 167 struct ip_vs_dest *dest, *last, *stop = NULL; in ip_vs_wrr_schedule() local 178 last = dest; in ip_vs_wrr_schedule() 200 &last->n_list == &svc->destinations) in ip_vs_wrr_schedule() 206 &last->n_list != &svc->destinations) { in ip_vs_wrr_schedule() 211 stop = last; in ip_vs_wrr_schedule()
|
D | ip_vs_rr.c | 61 struct ip_vs_dest *dest, *last; in ip_vs_rr_schedule() local 68 last = dest = list_entry(p, struct ip_vs_dest, n_list); in ip_vs_rr_schedule() 78 if (dest == last) in ip_vs_rr_schedule()
|
/net/ipv4/ |
D | ip_input.c | 156 struct sock *last = NULL; in ip_call_ra_chain() local 173 if (last) { in ip_call_ra_chain() 176 raw_rcv(last, skb2); in ip_call_ra_chain() 178 last = sk; in ip_call_ra_chain() 182 if (last) { in ip_call_ra_chain() 183 raw_rcv(last, skb); in ip_call_ra_chain()
|
D | ipconfig.c | 206 struct ic_device *d, **last; in ic_open_devs() local 211 last = &ic_first_dev; in ic_open_devs() 246 *last = d; in ic_open_devs() 247 last = &d->next; in ic_open_devs() 287 *last = NULL; in ic_open_devs()
|
/net/rds/ |
D | bind.c | 114 u16 rover, last; in rds_add_bound() local 118 last = rover; in rds_add_bound() 121 last = rover - 1; in rds_add_bound() 136 } while (rover++ != last); in rds_add_bound()
|
/net/wireless/ |
D | lib80211_crypt_ccmp.c | 221 int data_len, i, blocks, last, len; in lib80211_ccmp_encrypt() local 242 last = data_len % AES_BLOCK_LEN; in lib80211_ccmp_encrypt() 245 len = (i == blocks && last) ? last : AES_BLOCK_LEN; in lib80211_ccmp_encrypt() 294 int i, blocks, last, len; in lib80211_ccmp_decrypt() local 348 last = data_len % AES_BLOCK_LEN; in lib80211_ccmp_decrypt() 351 len = (i == blocks && last) ? last : AES_BLOCK_LEN; in lib80211_ccmp_decrypt()
|
/net/netfilter/ipset/ |
D | pfxlen.c | 298 u32 last; in ip_set_range_to_cidr() local 304 last = from | ~ip_set_hostmask(i); in ip_set_range_to_cidr() 305 if (!after(last, to)) { in ip_set_range_to_cidr() 307 return last; in ip_set_range_to_cidr()
|
D | ip_set_hash_net.c | 169 u32 ip = 0, ip_to, last; in hash_net4_uadt() local 220 last = ip_set_range_to_cidr(ip, ip_to, &e.cidr); in hash_net4_uadt() 226 ip = last + 1; in hash_net4_uadt()
|
D | ip_set_hash_netport.c | 190 u32 port, port_to, p = 0, ip = 0, ip_to, last; in hash_netport4_uadt() local 272 last = ip_set_range_to_cidr(ip, ip_to, &cidr); in hash_netport4_uadt() 285 ip = last + 1; in hash_netport4_uadt()
|
/net/sctp/ |
D | ulpqueue.c | 336 struct sk_buff *pnext, *last; in sctp_make_reassembled_event() local 346 for (last = list; list; last = list, list = list->next); in sctp_make_reassembled_event() 351 if (last) in sctp_make_reassembled_event() 352 last->next = pos; in sctp_make_reassembled_event() 979 struct sk_buff *skb, *flist, *last; in sctp_ulpq_renege_list() local 999 for (last = flist; flist; flist = flist->next) { in sctp_ulpq_renege_list() 1000 last = flist; in sctp_ulpq_renege_list() 1001 freed += skb_headlen(last); in sctp_ulpq_renege_list() 1003 if (last) in sctp_ulpq_renege_list() 1004 last_tsn = sctp_skb2event(last)->tsn; in sctp_ulpq_renege_list()
|
/net/netfilter/ |
D | nfnetlink_acct.c | 141 struct nf_acct *cur, *last; in nfnl_acct_dump() local 146 last = (struct nf_acct *)cb->args[1]; in nfnl_acct_dump() 152 if (last) { in nfnl_acct_dump() 153 if (cur != last) in nfnl_acct_dump() 156 last = NULL; in nfnl_acct_dump()
|
D | nfnetlink_cttimeout.c | 212 struct ctnl_timeout *cur, *last; in ctnl_timeout_dump() local 217 last = (struct ctnl_timeout *)cb->args[1]; in ctnl_timeout_dump() 223 if (last) { in ctnl_timeout_dump() 224 if (cur != last) in ctnl_timeout_dump() 227 last = NULL; in ctnl_timeout_dump()
|
D | nf_conntrack_expect.c | 349 struct nf_conntrack_expect *exp, *last = NULL; in evict_oldest_expect() local 353 last = exp; in evict_oldest_expect() 356 if (last && del_timer(&last->timeout)) { in evict_oldest_expect() 357 nf_ct_unlink_expect(last); in evict_oldest_expect() 358 nf_ct_expect_put(last); in evict_oldest_expect()
|
D | nf_conntrack_netlink.c | 763 struct nf_conn *ct, *last; in ctnetlink_dump_table() local 774 last = (struct nf_conn *)cb->args[1]; in ctnetlink_dump_table() 788 if (ct != last) in ctnetlink_dump_table() 818 if (last) in ctnetlink_dump_table() 819 nf_ct_put(last); in ctnetlink_dump_table() 1151 struct nf_conn *ct, *last; in ctnetlink_dump_list() local 1162 last = (struct nf_conn *)cb->args[1]; in ctnetlink_dump_list() 1169 if (ct != last) in ctnetlink_dump_list() 1192 if (last) in ctnetlink_dump_list() 1193 nf_ct_put(last); in ctnetlink_dump_list() [all …]
|
D | nfnetlink_cthelper.c | 453 struct nf_conntrack_helper *cur, *last; in nfnl_cthelper_dump_table() local 456 last = (struct nf_conntrack_helper *)cb->args[1]; in nfnl_cthelper_dump_table() 467 if (cur != last) in nfnl_cthelper_dump_table()
|
/net/ax25/ |
D | ax25_out.c | 248 int last = 1; in ax25_kick() local 292 last = (next == end); in ax25_kick() 302 ax25_send_iframe(ax25, skbn, (last) ? AX25_POLLON : AX25_POLLOFF); in ax25_kick() 319 } while (!last && (skb = skb_dequeue(&ax25->write_queue)) != NULL); in ax25_kick()
|
/net/rfkill/ |
D | input.c | 143 static unsigned long rfkill_ratelimit(const unsigned long last) in rfkill_ratelimit() argument 146 return time_after(jiffies, last + delay) ? 0 : delay; in rfkill_ratelimit()
|
/net/sched/ |
D | sch_mqprio.c | 72 unsigned int last = qopt->offset[i] + qopt->count[i]; in mqprio_parse_opt() local 79 last > dev->real_num_tx_queues) in mqprio_parse_opt() 84 if (last > qopt->offset[j]) in mqprio_parse_opt()
|
D | sch_netem.c | 96 u32 last; member 145 state->last = net_random(); in init_crandom() 162 answer = (value * ((1ull<<32) - rho) + state->last * rho) >> 32; in get_crandom() 163 state->last = answer; in get_crandom()
|
/net/8021q/ |
D | vlan.c | 350 bool last = false; in vlan_device_event() local 445 last = true; in vlan_device_event() 448 if (last) in vlan_device_event()
|
/net/mac80211/ |
D | rc80211_minstrel_ht.c | 471 bool last, update = false; in minstrel_ht_tx_status() local 500 last = !minstrel_ht_txstat_valid(mp, &ar[0]); in minstrel_ht_tx_status() 501 for (i = 0; !last; i++) { in minstrel_ht_tx_status() 502 last = (i == IEEE80211_TX_MAX_RATES - 1) || in minstrel_ht_tx_status() 507 if (last) in minstrel_ht_tx_status()
|
/net/irda/ |
D | Kconfig | 50 bool "Cache last LSAP" 53 Say Y here if you want IrLMP to cache the last LSAP used. This
|