/net/batman-adv/ |
D | gateway_common.c | 136 struct batadv_tvlv_gateway_data gw; in batadv_gw_tvlv_container_update() local 140 gw_mode = atomic_read(&bat_priv->gw.mode); in batadv_gw_tvlv_container_update() 148 down = atomic_read(&bat_priv->gw.bandwidth_down); in batadv_gw_tvlv_container_update() 149 up = atomic_read(&bat_priv->gw.bandwidth_up); in batadv_gw_tvlv_container_update() 150 gw.bandwidth_down = htonl(down); in batadv_gw_tvlv_container_update() 151 gw.bandwidth_up = htonl(up); in batadv_gw_tvlv_container_update() 153 &gw, sizeof(gw)); in batadv_gw_tvlv_container_update() 177 down_curr = (unsigned int)atomic_read(&bat_priv->gw.bandwidth_down); in batadv_gw_bandwidth_set() 178 up_curr = (unsigned int)atomic_read(&bat_priv->gw.bandwidth_up); in batadv_gw_bandwidth_set() 202 atomic_set(&bat_priv->gw.bandwidth_down, down_new); in batadv_gw_bandwidth_set() [all …]
|
D | gateway_client.c | 85 gw_node = rcu_dereference(bat_priv->gw.curr_gw); in batadv_gw_get_selected_gw_node() 134 spin_lock_bh(&bat_priv->gw.list_lock); in batadv_gw_select() 139 curr_gw_node = rcu_replace_pointer(bat_priv->gw.curr_gw, new_gw_node, in batadv_gw_select() 145 spin_unlock_bh(&bat_priv->gw.list_lock); in batadv_gw_select() 161 atomic_set(&bat_priv->gw.reselect, 1); in batadv_gw_reselect() 176 if (atomic_read(&bat_priv->gw.mode) != BATADV_GW_MODE_CLIENT) in batadv_gw_check_client_stop() 208 if (atomic_read(&bat_priv->gw.mode) != BATADV_GW_MODE_CLIENT) in batadv_gw_election() 211 if (!bat_priv->algo_ops->gw.get_best_gw_node) in batadv_gw_election() 216 if (!batadv_atomic_dec_not_zero(&bat_priv->gw.reselect) && curr_gw) in batadv_gw_election() 223 next_gw = bat_priv->algo_ops->gw.get_best_gw_node(bat_priv); in batadv_gw_election() [all …]
|
D | sysfs.c | 464 if (!bat_priv->algo_ops->gw.get_best_gw_node || in batadv_show_gw_mode() 465 !bat_priv->algo_ops->gw.is_eligible) in batadv_show_gw_mode() 468 switch (atomic_read(&bat_priv->gw.mode)) { in batadv_show_gw_mode() 500 if (!bat_priv->algo_ops->gw.get_best_gw_node || in batadv_store_gw_mode() 501 !bat_priv->algo_ops->gw.is_eligible) in batadv_store_gw_mode() 526 if (atomic_read(&bat_priv->gw.mode) == gw_mode_tmp) in batadv_store_gw_mode() 529 switch (atomic_read(&bat_priv->gw.mode)) { in batadv_store_gw_mode() 558 atomic_set(&bat_priv->gw.mode, (unsigned int)gw_mode_tmp); in batadv_store_gw_mode() 576 if (!bat_priv->algo_ops->gw.get_best_gw_node || in batadv_show_gw_sel_class() 577 !bat_priv->algo_ops->gw.is_eligible) in batadv_show_gw_sel_class() [all …]
|
D | bat_v.c | 666 atomic_set(&bat_priv->gw.sel_class, 50); in batadv_v_init_sel_class() 679 old_class = atomic_read(&bat_priv->gw.sel_class); in batadv_v_store_sel_class() 680 atomic_set(&bat_priv->gw.sel_class, class); in batadv_v_store_sel_class() 690 u32 class = atomic_read(&bat_priv->gw.sel_class); in batadv_v_show_sel_class() 751 hlist_for_each_entry_rcu(gw_node, &bat_priv->gw.gateway_list, list) { in batadv_v_gw_get_best_gw_node() 792 threshold = atomic_read(&bat_priv->gw.sel_class); in batadv_v_gw_is_eligible() 890 hlist_for_each_entry_rcu(gw_node, &bat_priv->gw.gateway_list, list) { in batadv_v_gw_print() 1015 spin_lock_bh(&bat_priv->gw.list_lock); in batadv_v_gw_dump() 1016 cb->seq = bat_priv->gw.generation << 1 | 1; in batadv_v_gw_dump() 1018 hlist_for_each_entry(gw_node, &bat_priv->gw.gateway_list, list) { in batadv_v_gw_dump() [all …]
|
D | netlink.c | 310 atomic_read(&bat_priv->gw.bandwidth_down))) in batadv_netlink_mesh_fill() 314 atomic_read(&bat_priv->gw.bandwidth_up))) in batadv_netlink_mesh_fill() 318 atomic_read(&bat_priv->gw.mode))) in batadv_netlink_mesh_fill() 321 if (bat_priv->algo_ops->gw.get_best_gw_node && in batadv_netlink_mesh_fill() 322 bat_priv->algo_ops->gw.is_eligible) { in batadv_netlink_mesh_fill() 327 atomic_read(&bat_priv->gw.sel_class))) in batadv_netlink_mesh_fill() 508 atomic_set(&bat_priv->gw.bandwidth_down, nla_get_u32(attr)); in batadv_netlink_set_mesh() 515 atomic_set(&bat_priv->gw.bandwidth_up, nla_get_u32(attr)); in batadv_netlink_set_mesh() 543 atomic_set(&bat_priv->gw.mode, gw_mode); in batadv_netlink_set_mesh() 549 bat_priv->algo_ops->gw.get_best_gw_node && in batadv_netlink_set_mesh() [all …]
|
D | main.c | 168 spin_lock_init(&bat_priv->gw.list_lock); in batadv_mesh_init() 180 INIT_HLIST_HEAD(&bat_priv->gw.gateway_list); in batadv_mesh_init() 197 bat_priv->gw.generation = 0; in batadv_mesh_init() 238 atomic_set(&bat_priv->gw.reselect, 0); in batadv_mesh_init()
|
D | bat_iv_ogm.c | 2410 atomic_set(&bat_priv->gw.sel_class, 20); in batadv_iv_init_sel_class() 2426 hlist_for_each_entry_rcu(gw_node, &bat_priv->gw.gateway_list, list) { in batadv_iv_gw_get_best_gw_node() 2442 switch (atomic_read(&bat_priv->gw.sel_class)) { in batadv_iv_gw_get_best_gw_node() 2505 if (atomic_read(&bat_priv->gw.sel_class) <= 2) in batadv_iv_gw_is_eligible() 2540 if ((atomic_read(&bat_priv->gw.sel_class) > 3) && in batadv_iv_gw_is_eligible() 2541 (orig_tq_avg - gw_tq_avg < atomic_read(&bat_priv->gw.sel_class))) in batadv_iv_gw_is_eligible() 2614 hlist_for_each_entry_rcu(gw_node, &bat_priv->gw.gateway_list, list) { in batadv_iv_gw_print() 2719 spin_lock_bh(&bat_priv->gw.list_lock); in batadv_iv_gw_dump() 2720 cb->seq = bat_priv->gw.generation << 1 | 1; in batadv_iv_gw_dump() 2722 hlist_for_each_entry(gw_node, &bat_priv->gw.gateway_list, list) { in batadv_iv_gw_dump() [all …]
|
D | soft-interface.c | 275 gw_mode = atomic_read(&bat_priv->gw.mode); in batadv_interface_tx() 805 atomic_set(&bat_priv->gw.mode, BATADV_GW_MODE_OFF); in batadv_softif_init_late() 806 atomic_set(&bat_priv->gw.bandwidth_down, 100); in batadv_softif_init_late() 807 atomic_set(&bat_priv->gw.bandwidth_up, 20); in batadv_softif_init_late()
|
D | bridge_loop_avoidance.c | 87 const struct batadv_bla_backbone_gw *gw; in batadv_choose_backbone_gw() local 90 gw = (struct batadv_bla_backbone_gw *)data; in batadv_choose_backbone_gw() 91 hash = jhash(&gw->orig, sizeof(gw->orig), hash); in batadv_choose_backbone_gw() 92 hash = jhash(&gw->vid, sizeof(gw->vid), hash); in batadv_choose_backbone_gw()
|
D | types.h | 1743 struct batadv_priv_gw gw; member 2332 struct batadv_algo_gw_ops gw; member
|
/net/ipv6/netfilter/ |
D | nf_dup_ipv6.c | 20 const struct in6_addr *gw, int oif) in nf_dup_ipv6_route() argument 30 fl6.daddr = *gw; in nf_dup_ipv6_route() 48 const struct in6_addr *gw, int oif) in nf_dup_ipv6() argument 65 if (nf_dup_ipv6_route(net, skb, gw, oif)) { in nf_dup_ipv6()
|
D | nft_dup_ipv6.c | 25 struct in6_addr *gw = (struct in6_addr *)®s->data[priv->sreg_addr]; in nft_dup_ipv6_eval() local 28 nf_dup_ipv6(nft_net(pkt), pkt->skb, nft_hook(pkt), gw, oif); in nft_dup_ipv6_eval()
|
/net/ipv4/netfilter/ |
D | nf_dup_ipv4.c | 24 const struct in_addr *gw, int oif) in nf_dup_ipv4_route() argument 34 fl4.daddr = gw->s_addr; in nf_dup_ipv4_route() 51 const struct in_addr *gw, int oif) in nf_dup_ipv4() argument 86 if (nf_dup_ipv4_route(net, skb, gw, oif)) { in nf_dup_ipv4()
|
D | nft_dup_ipv4.c | 25 struct in_addr gw = { in nft_dup_ipv4_eval() local 30 nf_dup_ipv4(nft_net(pkt), pkt->skb, nft_hook(pkt), &gw, oif); in nft_dup_ipv4_eval()
|
/net/can/ |
D | Makefile | 16 obj-$(CONFIG_CAN_GW) += can-gw.o 17 can-gw-y := gw.o
|
/net/netfilter/ |
D | xt_TEE.c | 42 nf_dup_ipv4(xt_net(par), skb, xt_hooknum(par), &info->gw.in, oif); in tee_tg4() 54 nf_dup_ipv6(xt_net(par), skb, xt_hooknum(par), &info->gw.in6, oif); in tee_tg6() 99 if (memcmp(&info->gw, &tee_zero_address, in tee_tg_check()
|
/net/ipv4/ |
D | fib_semantics.c | 442 int ip_fib_check_default(__be32 gw, struct net_device *dev) in ip_fib_check_default() argument 453 nh->fib_nh_gw4 == gw && in ip_fib_check_default() 676 static int fib_gw_from_attr(__be32 *gw, struct nlattr *nla, in fib_gw_from_attr() argument 679 if (nla_len(nla) < sizeof(*gw)) { in fib_gw_from_attr() 684 *gw = nla_get_in_addr(nla); in fib_gw_from_attr() 959 __be32 gw; in fib_nh_match() local 961 err = fib_gw_from_attr(&gw, nla, extack); in fib_nh_match() 966 gw != nh->fib_nh_gw4) in fib_nh_match()
|
D | nexthop.c | 1304 .fc_gw4 = cfg->gw.ipv4, in nh_create_ipv4() 1305 .fc_gw_family = cfg->gw.ipv4 ? AF_INET : 0, in nh_create_ipv4() 1344 .fc_gateway = cfg->gw.ipv6, in nh_create_ipv6() 1353 if (!ipv6_addr_any(&cfg->gw.ipv6)) in nh_create_ipv6() 1604 cfg->gw.ipv4 = nla_get_be32(gwa); in rtm_to_nh_config() 1611 cfg->gw.ipv6 = nla_get_in6_addr(gwa); in rtm_to_nh_config()
|
D | route.c | 668 __be32 gw, u32 pmtu, bool lock, in update_or_create_fnhe() argument 704 if (gw) in update_or_create_fnhe() 705 fnhe->fnhe_gw = gw; in update_or_create_fnhe() 736 fnhe->fnhe_gw = gw; in update_or_create_fnhe() 953 __be32 gw = rt_nexthop(rt, ip_hdr(skb)->daddr); in ip_rt_send_redirect() local 955 icmp_send(skb, ICMP_REDIRECT, ICMP_REDIR_HOST, gw); in ip_rt_send_redirect() 963 &ip_hdr(skb)->daddr, &gw); in ip_rt_send_redirect() 1862 __be32 gw; in __mkroute_input() local 1864 gw = nhc->nhc_gw_family == AF_INET ? nhc->nhc_gw.ipv4 : 0; in __mkroute_input() 1866 inet_addr_onlink(out_dev, saddr, gw)) in __mkroute_input()
|
D | fib_trie.c | 2953 __be32 gw = 0; in fib_route_seq_show() local 2956 gw = nhc->nhc_gw.ipv4; in fib_route_seq_show() 2962 prefix, gw, flags, 0, 0, in fib_route_seq_show()
|
/net/bluetooth/ |
D | 6lowpan.c | 33 struct in6_addr gw; member 173 if (ipv6_addr_any(&lowpan_cb(skb)->gw)) { in peer_lookup_dst() 181 nexthop = &lowpan_cb(skb)->gw; in peer_lookup_dst() 190 memcpy(&lowpan_cb(skb)->gw, nexthop, sizeof(struct in6_addr)); in peer_lookup_dst()
|
/net/ipv6/ |
D | route.c | 202 struct neighbour *ip6_neigh_lookup(const struct in6_addr *gw, in ip6_neigh_lookup() argument 209 daddr = choose_neigh_daddr(gw, skb, daddr); in ip6_neigh_lookup() 1935 const struct in6_addr *gw; member 1945 (arg->gw && !nh->fib_nh_gw_family) || in fib6_nh_find_match() 1946 (!arg->gw && nh->fib_nh_gw_family) || in fib6_nh_find_match() 1947 (arg->gw && !ipv6_addr_equal(arg->gw, &nh->fib_nh_gw6))) in fib6_nh_find_match() 1970 .gw = &rt->rt6i_gateway, in rt6_update_exception_stamp_rt() 2766 .gw = &rt6->rt6i_gateway, in __ip6_rt_update_pmtu() 2864 const struct in6_addr *gw, in ip6_redirect_nh_match() argument 2878 if (!ipv6_addr_equal(gw, &nh->fib_nh_gw6)) { in ip6_redirect_nh_match() [all …]
|