Home
last modified time | relevance | path

Searched refs:idx (Results 1 – 25 of 126) sorted by relevance

123456

/net/netfilter/
Dnft_set_bitmap.c53 u32 *idx, u32 *off) in nft_bitmap_location() argument
63 *idx = k / BITS_PER_BYTE; in nft_bitmap_location()
71 nft_bitmap_active(const u8 *bitmap, u32 idx, u32 off, u8 genmask) in nft_bitmap_active() argument
73 return (bitmap[idx] & (0x3 << off)) & (genmask << off); in nft_bitmap_active()
82 u32 idx, off; in nft_bitmap_lookup() local
84 nft_bitmap_location(set, key, &idx, &off); in nft_bitmap_lookup()
86 return nft_bitmap_active(priv->bitmap, idx, off, genmask); in nft_bitmap_lookup()
131 u32 idx, off; in nft_bitmap_insert() local
139 nft_bitmap_location(set, nft_set_ext_key(&new->ext), &idx, &off); in nft_bitmap_insert()
141 priv->bitmap[idx] |= (genmask << off); in nft_bitmap_insert()
[all …]
/net/wireless/
Dwext-compat.c412 int idx, struct key_params *params) in __cfg80211_set_encryption() argument
449 if (idx < 4 || idx > 5) in __cfg80211_set_encryption()
451 } else if (idx < 0 || idx > 3) in __cfg80211_set_encryption()
463 if (idx == wdev->wext.default_key && in __cfg80211_set_encryption()
473 err = rdev_del_key(rdev, dev, -1, idx, pairwise, in __cfg80211_set_encryption()
484 if (!addr && idx < 4) { in __cfg80211_set_encryption()
485 memset(wdev->wext.keys->data[idx], 0, in __cfg80211_set_encryption()
486 sizeof(wdev->wext.keys->data[idx])); in __cfg80211_set_encryption()
487 wdev->wext.keys->params[idx].key_len = 0; in __cfg80211_set_encryption()
488 wdev->wext.keys->params[idx].cipher = 0; in __cfg80211_set_encryption()
[all …]
/net/ipv6/netfilter/
Dip6t_NPT.c43 unsigned int i, idx; in ip6t_npt_map_pfx() local
54 idx = i / 32; in ip6t_npt_map_pfx()
55 addr->s6_addr32[idx] &= mask; in ip6t_npt_map_pfx()
56 addr->s6_addr32[idx] |= ~mask & npt->dst_pfx.in6.s6_addr32[idx]; in ip6t_npt_map_pfx()
60 idx = 3; in ip6t_npt_map_pfx()
62 for (idx = 4; idx < ARRAY_SIZE(addr->s6_addr16); idx++) { in ip6t_npt_map_pfx()
63 if ((__force __sum16)addr->s6_addr16[idx] != in ip6t_npt_map_pfx()
67 if (idx == ARRAY_SIZE(addr->s6_addr16)) in ip6t_npt_map_pfx()
71 sum = ~csum_fold(csum_add(csum_unfold((__force __sum16)addr->s6_addr16[idx]), in ip6t_npt_map_pfx()
75 *(__force __sum16 *)&addr->s6_addr16[idx] = sum; in ip6t_npt_map_pfx()
/net/nfc/
Dnetlink.c73 if (nla_put_u32(msg, NFC_ATTR_TARGET_INDEX, target->idx) || in nfc_genl_send_target()
111 u32 idx; in __get_device_from_cb() local
116 idx = nla_get_u32(info->attrs[NFC_ATTR_DEVICE_INDEX]); in __get_device_from_cb()
118 dev = nfc_get_device(idx); in __get_device_from_cb()
186 if (nla_put_u32(msg, NFC_ATTR_DEVICE_INDEX, dev->idx)) in nfc_genl_targets_found()
243 if (nla_put_u32(msg, NFC_ATTR_DEVICE_INDEX, dev->idx)) in nfc_genl_tm_activated()
274 if (nla_put_u32(msg, NFC_ATTR_DEVICE_INDEX, dev->idx)) in nfc_genl_tm_deactivated()
292 nla_put_u32(msg, NFC_ATTR_DEVICE_INDEX, dev->idx) || in nfc_genl_setup_device_added()
343 if (nla_put_u32(msg, NFC_ATTR_DEVICE_INDEX, dev->idx)) in nfc_genl_device_removed()
377 if (nla_put_u32(msg, NFC_ATTR_DEVICE_INDEX, dev->idx)) in nfc_genl_llc_send_sdres()
[all …]
Dcore.c273 if (dev->targets[i].idx == target_idx) in nfc_find_target()
461 if (dev->active_target->idx != target_idx) { in nfc_deactivate_target()
505 if (dev->active_target->idx != target_idx) { in nfc_data_exchange()
539 if (se->idx == se_idx) in nfc_find_se()
776 targets[i].idx = dev->target_next_idx++; in nfc_targets_found()
836 if (tg->idx == target_idx) in nfc_target_lost()
886 se->idx = se_idx; in nfc_add_se()
913 if (se->idx == se_idx) { in nfc_remove_se()
973 nfc_genl_se_removed(dev, se->idx); in nfc_release()
978 ida_free(&nfc_index_ida, dev->idx); in nfc_release()
[all …]
/net/mac80211/
Drc80211_minstrel_ht_debugfs.c36 minstrel_ht_is_sample_rate(struct minstrel_ht_sta *mi, int idx) in minstrel_ht_is_sample_rate() argument
42 if (mi->sample[type].cur_sample_rates[i] == idx) in minstrel_ht_is_sample_rate()
71 int idx = MI_RATE(i, j); in minstrel_ht_stats_dump() local
94 *(p++) = (idx == mi->max_tp_rate[0]) ? 'A' : ' '; in minstrel_ht_stats_dump()
95 *(p++) = (idx == mi->max_tp_rate[1]) ? 'B' : ' '; in minstrel_ht_stats_dump()
96 *(p++) = (idx == mi->max_tp_rate[2]) ? 'C' : ' '; in minstrel_ht_stats_dump()
97 *(p++) = (idx == mi->max_tp_rate[3]) ? 'D' : ' '; in minstrel_ht_stats_dump()
98 *(p++) = (idx == mi->max_prob_rate) ? 'P' : ' '; in minstrel_ht_stats_dump()
99 *(p++) = minstrel_ht_is_sample_rate(mi, idx) ? 'S' : ' '; in minstrel_ht_stats_dump()
116 p += sprintf(p, " %3u ", idx); in minstrel_ht_stats_dump()
[all …]
Drc80211_minstrel_ht.c332 return GROUP_IDX((rate->idx / 8) + 1, in minstrel_ht_get_group_idx()
373 int group, idx; in minstrel_ht_get_stats() local
377 idx = rate->idx % 8; in minstrel_ht_get_stats()
383 idx = ieee80211_rate_get_vht_mcs(rate); in minstrel_ht_get_stats()
388 for (idx = 0; idx < ARRAY_SIZE(mp->cck_rates); idx++) { in minstrel_ht_get_stats()
389 if (!(mi->supported[group] & BIT(idx))) in minstrel_ht_get_stats()
392 if (rate->idx != mp->cck_rates[idx]) in minstrel_ht_get_stats()
396 if ((mi->supported[group] & BIT(idx + 4)) && in minstrel_ht_get_stats()
398 idx += 4; in minstrel_ht_get_stats()
403 for (idx = 0; idx < ARRAY_SIZE(mp->ofdm_rates[0]); idx++) in minstrel_ht_get_stats()
[all …]
Dkey.c356 int idx, bool uni, bool multi) in __ieee80211_set_default_key() argument
363 if (idx >= 0 && idx < NUM_DEFAULT_KEYS) { in __ieee80211_set_default_key()
364 key = key_mtx_dereference(sdata->local, sdata->keys[idx]); in __ieee80211_set_default_key()
366 key = key_mtx_dereference(sdata->local, link->gtk[idx]); in __ieee80211_set_default_key()
373 drv_set_default_unicast_key(sdata->local, sdata, idx); in __ieee80211_set_default_key()
382 void ieee80211_set_default_key(struct ieee80211_link_data *link, int idx, in ieee80211_set_default_key() argument
386 __ieee80211_set_default_key(link, idx, uni, multi); in ieee80211_set_default_key()
391 __ieee80211_set_default_mgmt_key(struct ieee80211_link_data *link, int idx) in __ieee80211_set_default_mgmt_key() argument
398 if (idx >= NUM_DEFAULT_KEYS && in __ieee80211_set_default_mgmt_key()
399 idx < NUM_DEFAULT_KEYS + NUM_DEFAULT_MGMT_KEYS) in __ieee80211_set_default_mgmt_key()
[all …]
Drate.c324 rate->idx = 0; in rc_send_low_basicrate()
330 if (rate->idx < 0) in rc_send_low_basicrate()
332 if (basic_rates & (1 << rate->idx)) in rc_send_low_basicrate()
335 for (i = rate->idx + 1; i <= sband->n_bitrates; i++) { in rc_send_low_basicrate()
337 rate->idx = i; in rc_send_low_basicrate()
357 info->control.rates[0].idx = 0; in __rate_control_send_low()
365 info->control.rates[0].idx = 0; in __rate_control_send_low()
376 info->control.rates[0].idx = i; in __rate_control_send_low()
410 info->control.rates[0].idx = mcast_rate - 1; in rate_control_send_low()
637 rate = &sband->bitrates[rates[0].idx]; in rate_fixup_ratelist()
[all …]
/net/xdp/
Dxsk_queue.h117 u32 idx = cached_cons & q->ring_mask; in __xskq_cons_read_addr_unchecked() local
119 *addr = ring->desc[idx]; in __xskq_cons_read_addr_unchecked()
197 u32 idx = q->cached_cons & q->ring_mask; in xskq_cons_read_desc() local
199 *desc = ring->desc[idx]; in xskq_cons_read_desc()
222 u32 idx = cached_cons & q->ring_mask; in xskq_cons_read_desc_batch() local
224 descs[nb_entries] = ring->desc[idx]; in xskq_cons_read_desc_batch()
373 u32 idx; in xskq_prod_reserve_desc() local
379 idx = q->cached_prod++ & q->ring_mask; in xskq_prod_reserve_desc()
380 ring->desc[idx].addr = addr; in xskq_prod_reserve_desc()
381 ring->desc[idx].len = len; in xskq_prod_reserve_desc()
[all …]
/net/netfilter/ipvs/
Dip_vs_ctl.c1614 int idx; in ip_vs_flush() local
1621 for(idx = 0; idx < IP_VS_SVC_TAB_SIZE; idx++) { in ip_vs_flush()
1622 hlist_for_each_entry_safe(svc, n, &ip_vs_svc_table[idx], in ip_vs_flush()
1632 for(idx = 0; idx < IP_VS_SVC_TAB_SIZE; idx++) { in ip_vs_flush()
1633 hlist_for_each_entry_safe(svc, n, &ip_vs_svc_fwm_table[idx], in ip_vs_flush()
1693 unsigned int idx; in ip_vs_dst_event() local
1700 for (idx = 0; idx < IP_VS_SVC_TAB_SIZE; idx++) { in ip_vs_dst_event()
1701 hlist_for_each_entry(svc, &ip_vs_svc_table[idx], s_list) { in ip_vs_dst_event()
1710 hlist_for_each_entry(svc, &ip_vs_svc_fwm_table[idx], f_list) { in ip_vs_dst_event()
1747 int idx; in ip_vs_zero_all() local
[all …]
Dip_vs_conn.c1054 int idx; in ip_vs_conn_array() local
1058 for (idx = 0; idx < ip_vs_conn_tab_size; idx++) { in ip_vs_conn_array()
1059 hlist_for_each_entry_rcu(cp, &ip_vs_conn_tab[idx], c_list) { in ip_vs_conn_array()
1064 iter->l = &ip_vs_conn_tab[idx]; in ip_vs_conn_array()
1090 int idx; in ip_vs_conn_seq_next() local
1101 idx = l - ip_vs_conn_tab; in ip_vs_conn_seq_next()
1102 while (++idx < ip_vs_conn_tab_size) { in ip_vs_conn_seq_next()
1103 hlist_for_each_entry_rcu(cp, &ip_vs_conn_tab[idx], c_list) { in ip_vs_conn_seq_next()
1104 iter->l = &ip_vs_conn_tab[idx]; in ip_vs_conn_seq_next()
1303 int idx; in ip_vs_random_dropentry() local
[all …]
Dip_vs_pe_sip.c15 int *idx) in ip_vs_dbg_callid() argument
18 size_t len = min3(max_len, callid_len, buf_len - *idx - 1); in ip_vs_dbg_callid()
19 memcpy(buf + *idx, callid, len); in ip_vs_dbg_callid()
20 buf[*idx+len] = '\0'; in ip_vs_dbg_callid()
21 *idx += len + 1; in ip_vs_dbg_callid()
22 return buf + *idx - len; in ip_vs_dbg_callid()
/net/smc/
Dsmc_wr.c45 u32 idx; member
171 static inline int smc_wr_tx_get_free_slot_index(struct smc_link *link, u32 *idx) in smc_wr_tx_get_free_slot_index() argument
173 *idx = link->wr_tx_cnt; in smc_wr_tx_get_free_slot_index()
176 for_each_clear_bit(*idx, link->wr_tx_mask, link->wr_tx_cnt) { in smc_wr_tx_get_free_slot_index()
177 if (!test_and_set_bit(*idx, link->wr_tx_mask)) in smc_wr_tx_get_free_slot_index()
180 *idx = link->wr_tx_cnt; in smc_wr_tx_get_free_slot_index()
203 u32 idx = link->wr_tx_cnt; in smc_wr_tx_get_free_slot() local
211 rc = smc_wr_tx_get_free_slot_index(link, &idx); in smc_wr_tx_get_free_slot()
219 (smc_wr_tx_get_free_slot_index(link, &idx) != -EBUSY), in smc_wr_tx_get_free_slot()
226 if (idx == link->wr_tx_cnt) in smc_wr_tx_get_free_slot()
[all …]
/net/ipv4/netfilter/
Dipt_CLUSTERIP.c710 struct clusterip_seq_position *idx; in clusterip_seq_start() local
718 idx = kmalloc(sizeof(struct clusterip_seq_position), GFP_KERNEL); in clusterip_seq_start()
719 if (!idx) in clusterip_seq_start()
722 idx->pos = *pos; in clusterip_seq_start()
723 idx->weight = weight; in clusterip_seq_start()
724 idx->bit = ffs(local_nodes); in clusterip_seq_start()
725 idx->val = local_nodes; in clusterip_seq_start()
726 clear_bit(idx->bit - 1, &idx->val); in clusterip_seq_start()
728 return idx; in clusterip_seq_start()
733 struct clusterip_seq_position *idx = v; in clusterip_seq_next() local
[all …]
/net/ethtool/
Dbitset.c383 int ret, idx; in ethnl_parse_bit() local
393 idx = nla_get_u32(tb[ETHTOOL_A_BITSET_BIT_INDEX]); in ethnl_parse_bit()
394 if (idx >= nbits) { in ethnl_parse_bit()
400 name = names ? names[idx] : NULL; in ethnl_parse_bit()
409 idx = ethnl_name_to_idx(names, nbits, in ethnl_parse_bit()
411 if (idx < 0) { in ethnl_parse_bit()
423 *index = idx; in ethnl_parse_bit()
456 unsigned int idx; in ethnl_update_bitset32_verbose() local
463 ret = ethnl_parse_bit(&idx, &new_val, nbits, bit_attr, no_mask, in ethnl_update_bitset32_verbose()
467 old_val = bitmap[idx / 32] & ((u32)1 << (idx % 32)); in ethnl_update_bitset32_verbose()
[all …]
/net/netlabel/
Dnetlabel_kapi.c611 u32 idx; in netlbl_catmap_walk() local
620 idx = offset / NETLBL_CATMAP_MAPSIZE; in netlbl_catmap_walk()
623 idx = 0; in netlbl_catmap_walk()
626 bitmap = iter->bitmap[idx] >> bit; in netlbl_catmap_walk()
635 (NETLBL_CATMAP_MAPSIZE * idx) + bit; in netlbl_catmap_walk()
637 if (++idx >= NETLBL_CATMAP_MAPCNT) { in netlbl_catmap_walk()
640 idx = 0; in netlbl_catmap_walk()
644 bitmap = iter->bitmap[idx]; in netlbl_catmap_walk()
667 u32 idx; in netlbl_catmap_walkrng() local
677 idx = offset / NETLBL_CATMAP_MAPSIZE; in netlbl_catmap_walkrng()
[all …]
/net/caif/
Dcfmuxl.c108 int idx = phyid % DN_CACHE_SIZE; in cfmuxl_remove_dnlayer() local
111 RCU_INIT_POINTER(muxl->dn_cache[idx], NULL); in cfmuxl_remove_dnlayer()
126 int idx = id % UP_CACHE_SIZE; in get_up() local
127 up = rcu_dereference(muxl->up_cache[idx]); in get_up()
131 rcu_assign_pointer(muxl->up_cache[idx], up); in get_up()
140 int idx = dev_info->id % DN_CACHE_SIZE; in get_dn() local
141 dn = rcu_dereference(muxl->dn_cache[idx]); in get_dn()
145 rcu_assign_pointer(muxl->dn_cache[idx], dn); in get_dn()
155 int idx = id % UP_CACHE_SIZE; in cfmuxl_remove_uplayer() local
167 RCU_INIT_POINTER(muxl->up_cache[idx], NULL); in cfmuxl_remove_uplayer()
/net/batman-adv/
Dbat_v.c186 int idx = 0; in batadv_v_neigh_dump_hardif() local
190 if (idx++ < *idx_s) in batadv_v_neigh_dump_hardif()
194 *idx_s = idx - 1; in batadv_v_neigh_dump_hardif()
219 int idx = cb->args[1]; in batadv_v_neigh_dump() local
228 &idx) == 0) in batadv_v_neigh_dump()
242 &idx)) { in batadv_v_neigh_dump()
251 cb->args[1] = idx; in batadv_v_neigh_dump()
394 int idx = 0; in batadv_v_orig_dump_bucket() local
398 if (idx++ < *idx_s) in batadv_v_orig_dump_bucket()
404 *idx_s = idx - 1; in batadv_v_orig_dump_bucket()
[all …]
/net/dccp/
Dfeat.c191 int idx = dccp_feat_index(feat_num); in dccp_feat_type() local
193 if (idx < 0) in dccp_feat_type()
195 return dccp_feat_table[idx].reconciliation; in dccp_feat_type()
200 int idx = dccp_feat_index(feat_num); in dccp_feat_default_value() local
205 DCCP_BUG_ON(idx < 0); in dccp_feat_default_value()
207 return idx < 0 ? 0 : dccp_feat_table[idx].default_value; in dccp_feat_default_value()
305 static int __dccp_feat_activate(struct sock *sk, const int idx, in __dccp_feat_activate() argument
311 if (idx < 0 || idx >= DCCP_FEAT_SUPPORTED_MAX) in __dccp_feat_activate()
313 if (dccp_feat_table[idx].activation_hdlr == NULL) in __dccp_feat_activate()
317 val = dccp_feat_table[idx].default_value; in __dccp_feat_activate()
[all …]
/net/sched/
Dsch_fq_pie.c138 u32 idx; in fq_pie_qdisc_enqueue() local
141 idx = fq_pie_classify(skb, sch, &ret); in fq_pie_qdisc_enqueue()
142 if (idx == 0) { in fq_pie_qdisc_enqueue()
148 idx--; in fq_pie_qdisc_enqueue()
150 sel_flow = &q->flows[idx]; in fq_pie_qdisc_enqueue()
414 u32 idx; in fq_pie_init() local
446 for (idx = 0; idx < q->flows_cnt; idx++) { in fq_pie_init()
447 struct fq_pie_flow *flow = q->flows + idx; in fq_pie_init()
526 u32 idx; in fq_pie_reset() local
530 for (idx = 0; idx < q->flows_cnt; idx++) { in fq_pie_reset()
[all …]
Dematch.c171 struct tcf_ematch *em, struct nlattr *nla, int idx) in tcf_em_validate() argument
198 if (ref <= idx) in tcf_em_validate()
308 int idx, list_len, matches_len, err; in tcf_em_tree_validate() local
350 for (idx = 0; nla_ok(rt_match, list_len); idx++) { in tcf_em_tree_validate()
353 if (rt_match->nla_type != (idx + 1)) in tcf_em_tree_validate()
356 if (idx >= tree_hdr->nmatches) in tcf_em_tree_validate()
362 em = tcf_em_get_match(tree, idx); in tcf_em_tree_validate()
364 err = tcf_em_validate(tp, tree_hdr, em, rt_match, idx); in tcf_em_tree_validate()
376 if (idx != tree_hdr->nmatches) { in tcf_em_tree_validate()
Dsch_fq_codel.c142 unsigned int maxbacklog = 0, idx = 0, i, len; in fq_codel_drop() local
157 idx = i; in fq_codel_drop()
164 flow = &q->flows[idx]; in fq_codel_drop()
176 q->backlogs[idx] -= len; in fq_codel_drop()
181 return idx; in fq_codel_drop()
188 unsigned int idx, prev_backlog, prev_qlen; in fq_codel_enqueue() local
194 idx = fq_codel_classify(skb, sch, &ret); in fq_codel_enqueue()
195 if (idx == 0) { in fq_codel_enqueue()
201 idx--; in fq_codel_enqueue()
204 flow = &q->flows[idx]; in fq_codel_enqueue()
[all …]
/net/mctp/
Dneigh.c246 int rc, idx, req_ifindex; in mctp_rtm_getneigh() local
250 int idx; in mctp_rtm_getneigh() member
256 idx = 0; in mctp_rtm_getneigh()
259 if (idx < cbctx->idx) in mctp_rtm_getneigh()
271 idx++; in mctp_rtm_getneigh()
275 cbctx->idx = idx; in mctp_rtm_getneigh()
Ddevice.c24 int idx; member
123 int idx = 0, rc; in mctp_dump_addrinfo() local
130 for (; mcb->h < NETDEV_HASHENTRIES; mcb->h++, mcb->idx = 0) { in mctp_dump_addrinfo()
131 idx = 0; in mctp_dump_addrinfo()
134 if (idx >= mcb->idx && in mctp_dump_addrinfo()
147 idx++; in mctp_dump_addrinfo()
154 mcb->idx = idx; in mctp_dump_addrinfo()

123456